From a7c21c7fdb9df742f7d54060173b8ddb420c1bcf Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 28 Apr 2021 13:42:23 +0900 Subject: [PATCH 001/253] init --- .env | 6 + .github/semantic.yml | 1 + .github/workflows/main.yml | 44 + .github/workflows/pr.yml | 41 + .gitignore | 28 + .golangci.yml | 10 + .graphqlconfig | 3 + .vscode/extensions.json | 11 + .vscode/launch.json | 15 + .vscode/settings.json | 33 + CODE_OF_CONDUCT.md | 132 + Dockerfile | 24 + LICENSE | 177 + Makefile | 31 + README.md | 7 + cmd/godoc/main.go | 41 + cmd/reearth/debug.go | 5 + cmd/reearth/main.go | 9 + cmd/reearth/release.go | 5 + codecov.yml | 4 + docker-compose.yml | 22 + go.mod | 57 + go.sum | 832 + gqlgen.yml | 35 + internal/adapter/graphql/container.go | 92 + internal/adapter/graphql/controller_asset.go | 75 + .../adapter/graphql/controller_dataset.go | 208 + internal/adapter/graphql/controller_layer.go | 201 + internal/adapter/graphql/controller_plugin.go | 38 + .../adapter/graphql/controller_project.go | 109 + .../adapter/graphql/controller_property.go | 200 + internal/adapter/graphql/controller_scene.go | 130 + internal/adapter/graphql/controller_team.go | 82 + internal/adapter/graphql/controller_user.go | 96 + internal/adapter/graphql/convert.go | 87 + internal/adapter/graphql/convert_asset.go | 20 + internal/adapter/graphql/convert_dataset.go | 124 + internal/adapter/graphql/convert_layer.go | 169 + internal/adapter/graphql/convert_plugin.go | 58 + internal/adapter/graphql/convert_project.go | 61 + internal/adapter/graphql/convert_property.go | 648 + internal/adapter/graphql/convert_scene.go | 76 + internal/adapter/graphql/convert_team.go | 51 + internal/adapter/graphql/convert_user.go | 56 + internal/adapter/graphql/loader_dataset.go | 50 + internal/adapter/graphql/loader_layer.go | 91 + internal/adapter/graphql/loader_plugin.go | 22 + internal/adapter/graphql/loader_project.go | 47 + internal/adapter/graphql/loader_property.go | 46 + internal/adapter/graphql/loader_scene.go | 56 + internal/adapter/graphql/loader_team.go | 1 + internal/adapter/graphql/loader_user.go | 33 + internal/adapter/graphql/models.go | 290 + internal/adapter/graphql/models_gen.go | 1725 + internal/adapter/graphql/scalar.go | 122 + internal/adapter/http/user_controller.go | 53 + internal/app/app.go | 161 + internal/app/auth.go | 132 + internal/app/config.go | 62 + internal/app/echo-logrus.go | 217 + internal/app/file.go | 73 + internal/app/graphql.go | 125 + internal/app/jwt.go | 192 + internal/app/main.go | 43 + internal/app/profiler.go | 27 + internal/app/public.go | 109 + internal/app/repo.go | 83 + internal/app/server.go | 89 + internal/app/tracer.go | 69 + internal/app/userAPIs.go | 133 + internal/app/web.go | 41 + internal/graphql/context.go | 62 + internal/graphql/dataloader/context.go | 73 + internal/graphql/dataloader/dataloader.go | 14 + .../graphql/dataloader/datasetloader_gen.go | 225 + .../dataloader/datasetschemaloader_gen.go | 225 + .../dataloader/layergrouploader_gen.go | 225 + .../graphql/dataloader/layeritemloader_gen.go | 225 + .../graphql/dataloader/layerloader_gen.go | 225 + internal/graphql/dataloader/loader.tmpl | 55 + .../dataloader/loader_dataset_schema_gen.go | 54 + internal/graphql/dataloader/loader_gen.go | 348 + .../dataloader/loader_layer_group_gen.go | 54 + .../dataloader/loader_layer_item_gen.go | 54 + .../dataloader/loader_property_schema_gen.go | 54 + .../graphql/dataloader/pluginloader_gen.go | 225 + .../graphql/dataloader/projectloader_gen.go | 225 + .../graphql/dataloader/propertyloader_gen.go | 225 + .../dataloader/propertyschemaloader_gen.go | 225 + .../graphql/dataloader/sceneloader_gen.go | 225 + .../graphql/dataloader/scenelockloader_gen.go | 222 + internal/graphql/dataloader/teamloader_gen.go | 225 + internal/graphql/dataloader/userloader_gen.go | 225 + internal/graphql/generated.go | 39527 ++++++++++++++++ internal/graphql/resolver.go | 35 + internal/graphql/resolver_asset.go | 22 + internal/graphql/resolver_dataset.go | 76 + internal/graphql/resolver_dataset_schema.go | 69 + internal/graphql/resolver_layer.go | 327 + internal/graphql/resolver_mutation.go | 427 + internal/graphql/resolver_plugin.go | 92 + internal/graphql/resolver_project.go | 34 + internal/graphql/resolver_property.go | 542 + internal/graphql/resolver_query.go | 319 + internal/graphql/resolver_scene.go | 131 + internal/graphql/resolver_team.go | 43 + internal/graphql/resolver_user.go | 29 + internal/graphql/tracer.go | 151 + internal/infrastructure/adapter/plugin.go | 59 + .../infrastructure/adapter/property_schema.go | 66 + .../infrastructure/auth0/authenticator.go | 223 + .../auth0/authenticator_test.go | 185 + internal/infrastructure/fs/archive.go | 115 + internal/infrastructure/fs/common.go | 50 + internal/infrastructure/fs/common_test.go | 22 + internal/infrastructure/fs/file.go | 236 + internal/infrastructure/fs/plugin.go | 65 + .../infrastructure/fs/plugin_repository.go | 62 + internal/infrastructure/fs/property_schema.go | 85 + internal/infrastructure/gcs/file.go | 326 + internal/infrastructure/gcs/file_test.go | 24 + internal/infrastructure/memory/asset.go | 64 + internal/infrastructure/memory/config.go | 25 + internal/infrastructure/memory/container.go | 35 + internal/infrastructure/memory/dataset.go | 133 + .../infrastructure/memory/dataset_schema.go | 142 + internal/infrastructure/memory/dummy.go | 73 + internal/infrastructure/memory/layer.go | 207 + internal/infrastructure/memory/plugin.go | 68 + internal/infrastructure/memory/project.go | 105 + internal/infrastructure/memory/property.go | 100 + .../infrastructure/memory/property_schema.go | 66 + internal/infrastructure/memory/scene.go | 110 + internal/infrastructure/memory/scene_lock.go | 67 + internal/infrastructure/memory/team.go | 74 + internal/infrastructure/memory/transaction.go | 27 + internal/infrastructure/memory/user.go | 92 + internal/infrastructure/memory/util.go | 27 + internal/infrastructure/mongo/asset.go | 75 + internal/infrastructure/mongo/config.go | 46 + internal/infrastructure/mongo/container.go | 42 + internal/infrastructure/mongo/dataset.go | 342 + .../infrastructure/mongo/dataset_schema.go | 182 + internal/infrastructure/mongo/dummy.go | 46 + internal/infrastructure/mongo/layer.go | 324 + .../201217132559_add_scene_widget_id.go | 46 + .../201217193948_add_scene_default_tile.go | 68 + .../210310145844_remove_preview_token.go | 43 + .../infrastructure/mongo/migration/client.go | 101 + .../mongo/migration/migrations.go | 9 + .../infrastructure/mongo/mongodoc/asset.go | 74 + .../infrastructure/mongo/mongodoc/client.go | 333 + .../mongo/mongodoc/clientcol.go | 53 + .../infrastructure/mongo/mongodoc/consumer.go | 42 + .../mongo/mongodoc/consumer_test.go | 60 + .../infrastructure/mongo/mongodoc/dataset.go | 191 + .../mongo/mongodoc/dataset_schema.go | 134 + .../infrastructure/mongo/mongodoc/layer.go | 263 + .../infrastructure/mongo/mongodoc/plugin.go | 114 + .../infrastructure/mongo/mongodoc/project.go | 120 + .../infrastructure/mongo/mongodoc/property.go | 330 + .../mongo/mongodoc/property_schema.go | 313 + .../infrastructure/mongo/mongodoc/scene.go | 230 + .../infrastructure/mongo/mongodoc/team.go | 93 + .../infrastructure/mongo/mongodoc/user.go | 91 + .../infrastructure/mongo/mongodoc/util.go | 33 + internal/infrastructure/mongo/plugin.go | 146 + internal/infrastructure/mongo/project.go | 149 + internal/infrastructure/mongo/property.go | 170 + .../infrastructure/mongo/property_schema.go | 153 + internal/infrastructure/mongo/scene.go | 187 + internal/infrastructure/mongo/scene_lock.go | 75 + internal/infrastructure/mongo/team.go | 129 + internal/infrastructure/mongo/transaction.go | 63 + internal/infrastructure/mongo/user.go | 119 + internal/infrastructure/npm/archive.go | 73 + .../infrastructure/npm/plugin_repository.go | 97 + internal/usecase/cursor.go | 4 + internal/usecase/gateway/authenticator.go | 20 + internal/usecase/gateway/container.go | 9 + internal/usecase/gateway/datasouce.go | 18 + internal/usecase/gateway/file.go | 31 + internal/usecase/gateway/mailer.go | 5 + internal/usecase/gateway/plugin_repository.go | 19 + internal/usecase/interactor/asset.go | 120 + internal/usecase/interactor/common.go | 243 + internal/usecase/interactor/dataset.go | 624 + internal/usecase/interactor/layer.go | 983 + internal/usecase/interactor/layer_test.go | 37 + internal/usecase/interactor/plugin.go | 59 + internal/usecase/interactor/project.go | 411 + internal/usecase/interactor/property.go | 525 + internal/usecase/interactor/scene.go | 595 + internal/usecase/interactor/team.go | 301 + internal/usecase/interactor/team_test.go | 37 + internal/usecase/interactor/user.go | 364 + internal/usecase/interfaces/asset.go | 26 + internal/usecase/interfaces/common.go | 17 + internal/usecase/interfaces/dataset.go | 69 + internal/usecase/interfaces/layer.go | 104 + internal/usecase/interfaces/plugin.go | 20 + internal/usecase/interfaces/project.go | 73 + internal/usecase/interfaces/property.go | 96 + internal/usecase/interfaces/scene.go | 36 + internal/usecase/interfaces/team.go | 27 + internal/usecase/interfaces/user.go | 44 + internal/usecase/operator.go | 143 + internal/usecase/pageinfo.go | 63 + internal/usecase/pagination.go | 19 + internal/usecase/repo/asset.go | 16 + internal/usecase/repo/config.go | 12 + internal/usecase/repo/container.go | 18 + internal/usecase/repo/dataset.go | 50 + internal/usecase/repo/dataset_schema.go | 24 + internal/usecase/repo/layer.go | 39 + internal/usecase/repo/plugin.go | 20 + internal/usecase/repo/project.go | 46 + internal/usecase/repo/property.go | 26 + internal/usecase/repo/property_schema.go | 21 + internal/usecase/repo/scene.go | 19 + internal/usecase/repo/scene_lock.go | 15 + internal/usecase/repo/team.go | 18 + internal/usecase/repo/transaction.go | 16 + internal/usecase/repo/user.go | 18 + main.go | 3 + pkg/asset/asset.go | 66 + pkg/asset/asset_test.go | 63 + pkg/asset/builder.go | 94 + pkg/asset/builder_test.go | 236 + pkg/builtin/main.go | 57 + pkg/builtin/main_test.go | 105 + pkg/builtin/manifest.yml | 1027 + pkg/builtin/manifest_gen.go | 5 + pkg/builtin/manifest_ja.yml | 472 + pkg/builtin/manifest_ja_gen.go | 5 + pkg/builtin/migration.go | 3 + pkg/config/config.go | 23 + pkg/config/config_test.go | 14 + pkg/czml/czml.go | 45 + pkg/dataset/builder.go | 90 + pkg/dataset/csvparser.go | 233 + pkg/dataset/csvparser_test.go | 66 + pkg/dataset/dataset.go | 114 + pkg/dataset/diff.go | 10 + pkg/dataset/field.go | 61 + pkg/dataset/graph_iterator.go | 72 + pkg/dataset/graph_iterator_test.go | 63 + pkg/dataset/graph_loader.go | 37 + pkg/dataset/list.go | 195 + pkg/dataset/list_test.go | 77 + pkg/dataset/loader.go | 43 + pkg/dataset/schema.go | 135 + pkg/dataset/schema_builder.go | 112 + pkg/dataset/schema_field.go | 62 + pkg/dataset/schema_field_builder.go | 78 + pkg/dataset/schema_field_diff.go | 45 + pkg/dataset/schema_graph_iterator.go | 72 + pkg/dataset/schema_graph_iterator_test.go | 69 + pkg/dataset/schema_list.go | 64 + pkg/dataset/schema_list_test.go | 40 + pkg/dataset/source.go | 9 + pkg/dataset/value.go | 393 + pkg/dataset/value_test.go | 63 + pkg/error/error.go | 73 + pkg/error/error_test.go | 30 + pkg/file/file.go | 28 + pkg/i18n/string.go | 42 + pkg/i18n/string_test.go | 123 + pkg/id/asset_gen.go | 287 + pkg/id/dataset_gen.go | 287 + pkg/id/dataset_schema_field_gen.go | 251 + pkg/id/dataset_schema_gen.go | 287 + pkg/id/gen.go | 13 + pkg/id/id.go | 141 + pkg/id/id.tmpl | 287 + pkg/id/infobox_field_gen.go | 251 + pkg/id/layer_gen.go | 287 + pkg/id/plugin.go | 169 + pkg/id/plugin_extension.go | 41 + pkg/id/plugin_test.go | 24 + pkg/id/project_gen.go | 287 + pkg/id/property_gen.go | 287 + pkg/id/property_item_gen.go | 287 + pkg/id/property_schema.go | 145 + pkg/id/property_schema_field.go | 42 + pkg/id/property_schema_test.go | 8 + pkg/id/scene_gen.go | 287 + pkg/id/team_gen.go | 287 + pkg/id/user_gen.go | 287 + pkg/id/widget_gen.go | 287 + pkg/kml/kml.go | 68 + pkg/layer/builder.go | 73 + pkg/layer/decoding/common.go | 531 + pkg/layer/decoding/common_test.go | 59 + pkg/layer/decoding/czml.go | 174 + pkg/layer/decoding/czml_test.go | 145 + pkg/layer/decoding/decoder.go | 60 + pkg/layer/decoding/format.go | 11 + pkg/layer/decoding/geojson.go | 226 + pkg/layer/decoding/geojson_test.go | 129 + pkg/layer/decoding/kml.go | 300 + pkg/layer/decoding/kml_test.go | 754 + pkg/layer/decoding/reearth.go | 312 + pkg/layer/decoding/reearth_test.go | 211 + pkg/layer/decoding/shapetest/point.shp | Bin 0 -> 184 bytes pkg/layer/decoding/shapetest/polygon.shp | Bin 0 -> 236 bytes pkg/layer/decoding/shapetest/polyline.shp | Bin 0 -> 308 bytes pkg/layer/decoding/shapetest/shapes.zip | Bin 0 -> 578 bytes pkg/layer/decoding/shp.go | 101 + pkg/layer/decoding/shp_test.go | 142 + pkg/layer/encoding/common.go | 43 + pkg/layer/encoding/common_test.go | 26 + pkg/layer/encoding/czml.go | 257 + pkg/layer/encoding/czml_test.go | 517 + pkg/layer/encoding/encoder.go | 9 + pkg/layer/encoding/exporter.go | 52 + pkg/layer/encoding/geojson.go | 207 + pkg/layer/encoding/geojson_test.go | 430 + pkg/layer/encoding/kml.go | 420 + pkg/layer/encoding/kml_test.go | 507 + pkg/layer/encoding/shp.go | 204 + pkg/layer/encoding/shp_test.go | 298 + pkg/layer/group.go | 182 + pkg/layer/group_builder.go | 112 + pkg/layer/group_test.go | 137 + pkg/layer/id_list.go | 224 + pkg/layer/id_list_test.go | 133 + pkg/layer/infobox.go | 163 + pkg/layer/infobox_field.go | 56 + pkg/layer/infobox_field_builder.go | 56 + pkg/layer/infobox_test.go | 52 + pkg/layer/initializer.go | 292 + pkg/layer/initializer/initializer.go | 64 + pkg/layer/initializer/initializer_test.go | 82 + pkg/layer/initializer_test.go | 188 + pkg/layer/item.go | 151 + pkg/layer/item_builder.go | 102 + pkg/layer/item_test.go | 3 + pkg/layer/layer.go | 227 + pkg/layer/layer_test.go | 4 + pkg/layer/list.go | 244 + pkg/layer/loader.go | 44 + pkg/layer/merged.go | 152 + pkg/layer/merged_test.go | 379 + pkg/layer/merging/merged.go | 96 + pkg/layer/merging/merger.go | 120 + pkg/layer/merging/merger_test.go | 174 + pkg/layer/merging/sealed.go | 74 + pkg/layer/merging/sealer.go | 139 + pkg/log/gceformatter.go | 123 + pkg/log/log.go | 104 + pkg/plugin/builder.go | 72 + pkg/plugin/builder_test.go | 196 + pkg/plugin/extension.go | 84 + pkg/plugin/extension_builder.go | 90 + pkg/plugin/extension_builder_test.go | 195 + pkg/plugin/extension_test.go | 60 + pkg/plugin/loader.go | 9 + pkg/plugin/manifest/convert.go | 296 + pkg/plugin/manifest/convert_test.go | 755 + pkg/plugin/manifest/manifest.go | 12 + pkg/plugin/manifest/parser.go | 98 + pkg/plugin/manifest/parser_test.go | 202 + pkg/plugin/manifest/parser_translation.go | 187 + .../manifest/parser_translation_test.go | 416 + pkg/plugin/manifest/schema_gen.go | 85 + pkg/plugin/manifest/schema_json_gen.go | 365 + .../manifest/schema_json_translation_gen.go | 131 + pkg/plugin/manifest/schema_translation.go | 28 + pkg/plugin/plugin.go | 86 + pkg/plugin/plugin_test.go | 59 + pkg/project/builder.go | 123 + pkg/project/builder_test.go | 369 + pkg/project/project.go | 190 + pkg/project/project_test.go | 270 + pkg/project/publishment_status.go | 13 + pkg/property/builder.go | 97 + pkg/property/builder_test.go | 206 + pkg/property/condition.go | 20 + pkg/property/condition_test.go | 46 + pkg/property/field.go | 200 + pkg/property/field_builder.go | 114 + pkg/property/field_builder_test.go | 232 + pkg/property/field_test.go | 116 + pkg/property/group.go | 286 + pkg/property/group_builder.go | 83 + pkg/property/group_builder_test.go | 153 + pkg/property/group_list.go | 394 + pkg/property/group_list_builder.go | 83 + pkg/property/group_list_builder_test.go | 166 + pkg/property/group_list_test.go | 742 + pkg/property/group_test.go | 471 + pkg/property/initializer.go | 310 + pkg/property/initializer_test.go | 292 + pkg/property/item.go | 56 + pkg/property/item_builder.go | 35 + pkg/property/item_test.go | 86 + pkg/property/link.go | 443 + pkg/property/link_test.go | 481 + pkg/property/list.go | 96 + pkg/property/list_test.go | 102 + pkg/property/loader.go | 45 + pkg/property/loader_test.go | 46 + pkg/property/merged.go | 309 + pkg/property/merged_test.go | 413 + pkg/property/pointer.go | 201 + pkg/property/pointer_test.go | 77 + pkg/property/property.go | 493 + pkg/property/property_test.go | 275 + pkg/property/schema.go | 175 + pkg/property/schema_builder.go | 78 + pkg/property/schema_builder_test.go | 161 + pkg/property/schema_field.go | 178 + pkg/property/schema_field_builder.go | 143 + pkg/property/schema_field_builder_test.go | 94 + pkg/property/schema_field_test.go | 227 + pkg/property/schema_field_ui.go | 62 + pkg/property/schema_field_ui_test.go | 19 + pkg/property/schema_group.go | 127 + pkg/property/schema_group_builder.go | 83 + pkg/property/schema_group_builder_test.go | 124 + pkg/property/schema_group_test.go | 118 + pkg/property/schema_list.go | 65 + pkg/property/schema_test.go | 142 + pkg/property/sealed.go | 264 + pkg/property/sealed_test.go | 951 + pkg/property/validator.go | 28 + pkg/property/value.go | 240 + pkg/property/value_converter.go | 41 + pkg/property/value_converter_test.go | 120 + pkg/property/value_test.go | 354 + pkg/property/value_type.go | 583 + pkg/property/value_type_test.go | 1426 + pkg/scene/builder.go | 105 + pkg/scene/builder/builder.go | 78 + pkg/scene/builder/builder_test.go | 592 + pkg/scene/builder/encoder.go | 112 + pkg/scene/builder/encoder_test.go | 183 + pkg/scene/builder/scene.go | 81 + pkg/scene/builder/scene_test.go | 46 + pkg/scene/builder_test.go | 325 + pkg/scene/lock.go | 45 + pkg/scene/lock_test.go | 86 + pkg/scene/plugin.go | 38 + pkg/scene/plugin_system.go | 130 + pkg/scene/plugin_system_test.go | 332 + pkg/scene/plugin_test.go | 21 + pkg/scene/scene.go | 113 + pkg/scene/scene_test.go | 86 + pkg/scene/sceneops/dataset_migrator.go | 277 + pkg/scene/sceneops/dataset_migrator_test.go | 46 + pkg/scene/sceneops/plugin_installer.go | 122 + pkg/scene/sceneops/plugin_migrator.go | 243 + pkg/scene/widget.go | 63 + pkg/scene/widget_system.go | 112 + pkg/scene/widget_system_test.go | 364 + pkg/scene/widget_test.go | 210 + pkg/shp/errreader.go | 27 + pkg/shp/reader.go | 287 + pkg/shp/sequentialreader.go | 294 + pkg/shp/shapefile.go | 1066 + pkg/shp/shapetype_string.go | 51 + pkg/shp/writer.go | 122 + pkg/shp/zipreader.go | 105 + pkg/user/auth.go | 20 + pkg/user/auth_test.go | 66 + pkg/user/builder.go | 78 + pkg/user/builder_test.go | 200 + pkg/user/initializer/initializer.go | 39 + pkg/user/initializer/initializer_test.go | 110 + pkg/user/members.go | 127 + pkg/user/members_test.go | 261 + pkg/user/role.go | 42 + pkg/user/role_test.go | 80 + pkg/user/team.go | 29 + pkg/user/team_builder.go | 70 + pkg/user/team_builder_test.go | 125 + pkg/user/team_test.go | 38 + pkg/user/theme.go | 9 + pkg/user/user.go | 120 + pkg/user/user_test.go | 278 + pkg/visualizer/visualizer.go | 9 + pkg/writer/seeker_closer.go | 55 + pkg/writer/seeker_closer_test.go | 122 + plugin_manifest_schema.json | 360 + plugin_manifest_schema_translation.json | 126 + schema.graphql | 1385 + tools.go | 9 + tools/cmd/embed/main.go | 178 + tools/cmd/gen/flag.go | 123 + tools/cmd/gen/flag_test.go | 130 + tools/cmd/gen/main.go | 98 + tools/cmd/migrategen/main.go | 115 + tools/cmd/shapefiletest/main.go | 46 + 494 files changed, 114314 insertions(+) create mode 100644 .env create mode 100644 .github/semantic.yml create mode 100644 .github/workflows/main.yml create mode 100644 .github/workflows/pr.yml create mode 100644 .gitignore create mode 100644 .golangci.yml create mode 100644 .graphqlconfig create mode 100644 .vscode/extensions.json create mode 100644 .vscode/launch.json create mode 100644 .vscode/settings.json create mode 100644 CODE_OF_CONDUCT.md create mode 100644 Dockerfile create mode 100644 LICENSE create mode 100644 Makefile create mode 100644 README.md create mode 100644 cmd/godoc/main.go create mode 100644 cmd/reearth/debug.go create mode 100644 cmd/reearth/main.go create mode 100644 cmd/reearth/release.go create mode 100644 codecov.yml create mode 100644 docker-compose.yml create mode 100644 go.mod create mode 100644 go.sum create mode 100644 gqlgen.yml create mode 100644 internal/adapter/graphql/container.go create mode 100644 internal/adapter/graphql/controller_asset.go create mode 100644 internal/adapter/graphql/controller_dataset.go create mode 100644 internal/adapter/graphql/controller_layer.go create mode 100644 internal/adapter/graphql/controller_plugin.go create mode 100644 internal/adapter/graphql/controller_project.go create mode 100644 internal/adapter/graphql/controller_property.go create mode 100644 internal/adapter/graphql/controller_scene.go create mode 100644 internal/adapter/graphql/controller_team.go create mode 100644 internal/adapter/graphql/controller_user.go create mode 100644 internal/adapter/graphql/convert.go create mode 100644 internal/adapter/graphql/convert_asset.go create mode 100644 internal/adapter/graphql/convert_dataset.go create mode 100644 internal/adapter/graphql/convert_layer.go create mode 100644 internal/adapter/graphql/convert_plugin.go create mode 100644 internal/adapter/graphql/convert_project.go create mode 100644 internal/adapter/graphql/convert_property.go create mode 100644 internal/adapter/graphql/convert_scene.go create mode 100644 internal/adapter/graphql/convert_team.go create mode 100644 internal/adapter/graphql/convert_user.go create mode 100644 internal/adapter/graphql/loader_dataset.go create mode 100644 internal/adapter/graphql/loader_layer.go create mode 100644 internal/adapter/graphql/loader_plugin.go create mode 100644 internal/adapter/graphql/loader_project.go create mode 100644 internal/adapter/graphql/loader_property.go create mode 100644 internal/adapter/graphql/loader_scene.go create mode 100644 internal/adapter/graphql/loader_team.go create mode 100644 internal/adapter/graphql/loader_user.go create mode 100644 internal/adapter/graphql/models.go create mode 100644 internal/adapter/graphql/models_gen.go create mode 100644 internal/adapter/graphql/scalar.go create mode 100644 internal/adapter/http/user_controller.go create mode 100644 internal/app/app.go create mode 100644 internal/app/auth.go create mode 100644 internal/app/config.go create mode 100644 internal/app/echo-logrus.go create mode 100644 internal/app/file.go create mode 100644 internal/app/graphql.go create mode 100644 internal/app/jwt.go create mode 100644 internal/app/main.go create mode 100644 internal/app/profiler.go create mode 100644 internal/app/public.go create mode 100644 internal/app/repo.go create mode 100644 internal/app/server.go create mode 100644 internal/app/tracer.go create mode 100644 internal/app/userAPIs.go create mode 100644 internal/app/web.go create mode 100644 internal/graphql/context.go create mode 100644 internal/graphql/dataloader/context.go create mode 100644 internal/graphql/dataloader/dataloader.go create mode 100644 internal/graphql/dataloader/datasetloader_gen.go create mode 100644 internal/graphql/dataloader/datasetschemaloader_gen.go create mode 100644 internal/graphql/dataloader/layergrouploader_gen.go create mode 100644 internal/graphql/dataloader/layeritemloader_gen.go create mode 100644 internal/graphql/dataloader/layerloader_gen.go create mode 100644 internal/graphql/dataloader/loader.tmpl create mode 100644 internal/graphql/dataloader/loader_dataset_schema_gen.go create mode 100644 internal/graphql/dataloader/loader_gen.go create mode 100644 internal/graphql/dataloader/loader_layer_group_gen.go create mode 100644 internal/graphql/dataloader/loader_layer_item_gen.go create mode 100644 internal/graphql/dataloader/loader_property_schema_gen.go create mode 100644 internal/graphql/dataloader/pluginloader_gen.go create mode 100644 internal/graphql/dataloader/projectloader_gen.go create mode 100644 internal/graphql/dataloader/propertyloader_gen.go create mode 100644 internal/graphql/dataloader/propertyschemaloader_gen.go create mode 100644 internal/graphql/dataloader/sceneloader_gen.go create mode 100644 internal/graphql/dataloader/scenelockloader_gen.go create mode 100644 internal/graphql/dataloader/teamloader_gen.go create mode 100644 internal/graphql/dataloader/userloader_gen.go create mode 100644 internal/graphql/generated.go create mode 100644 internal/graphql/resolver.go create mode 100644 internal/graphql/resolver_asset.go create mode 100644 internal/graphql/resolver_dataset.go create mode 100644 internal/graphql/resolver_dataset_schema.go create mode 100644 internal/graphql/resolver_layer.go create mode 100644 internal/graphql/resolver_mutation.go create mode 100644 internal/graphql/resolver_plugin.go create mode 100644 internal/graphql/resolver_project.go create mode 100644 internal/graphql/resolver_property.go create mode 100644 internal/graphql/resolver_query.go create mode 100644 internal/graphql/resolver_scene.go create mode 100644 internal/graphql/resolver_team.go create mode 100644 internal/graphql/resolver_user.go create mode 100644 internal/graphql/tracer.go create mode 100644 internal/infrastructure/adapter/plugin.go create mode 100644 internal/infrastructure/adapter/property_schema.go create mode 100644 internal/infrastructure/auth0/authenticator.go create mode 100644 internal/infrastructure/auth0/authenticator_test.go create mode 100644 internal/infrastructure/fs/archive.go create mode 100644 internal/infrastructure/fs/common.go create mode 100644 internal/infrastructure/fs/common_test.go create mode 100644 internal/infrastructure/fs/file.go create mode 100644 internal/infrastructure/fs/plugin.go create mode 100644 internal/infrastructure/fs/plugin_repository.go create mode 100644 internal/infrastructure/fs/property_schema.go create mode 100644 internal/infrastructure/gcs/file.go create mode 100644 internal/infrastructure/gcs/file_test.go create mode 100644 internal/infrastructure/memory/asset.go create mode 100644 internal/infrastructure/memory/config.go create mode 100644 internal/infrastructure/memory/container.go create mode 100644 internal/infrastructure/memory/dataset.go create mode 100644 internal/infrastructure/memory/dataset_schema.go create mode 100644 internal/infrastructure/memory/dummy.go create mode 100644 internal/infrastructure/memory/layer.go create mode 100644 internal/infrastructure/memory/plugin.go create mode 100644 internal/infrastructure/memory/project.go create mode 100644 internal/infrastructure/memory/property.go create mode 100644 internal/infrastructure/memory/property_schema.go create mode 100644 internal/infrastructure/memory/scene.go create mode 100644 internal/infrastructure/memory/scene_lock.go create mode 100644 internal/infrastructure/memory/team.go create mode 100644 internal/infrastructure/memory/transaction.go create mode 100644 internal/infrastructure/memory/user.go create mode 100644 internal/infrastructure/memory/util.go create mode 100644 internal/infrastructure/mongo/asset.go create mode 100644 internal/infrastructure/mongo/config.go create mode 100644 internal/infrastructure/mongo/container.go create mode 100644 internal/infrastructure/mongo/dataset.go create mode 100644 internal/infrastructure/mongo/dataset_schema.go create mode 100644 internal/infrastructure/mongo/dummy.go create mode 100644 internal/infrastructure/mongo/layer.go create mode 100644 internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go create mode 100644 internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go create mode 100644 internal/infrastructure/mongo/migration/210310145844_remove_preview_token.go create mode 100644 internal/infrastructure/mongo/migration/client.go create mode 100644 internal/infrastructure/mongo/migration/migrations.go create mode 100644 internal/infrastructure/mongo/mongodoc/asset.go create mode 100644 internal/infrastructure/mongo/mongodoc/client.go create mode 100644 internal/infrastructure/mongo/mongodoc/clientcol.go create mode 100644 internal/infrastructure/mongo/mongodoc/consumer.go create mode 100644 internal/infrastructure/mongo/mongodoc/consumer_test.go create mode 100644 internal/infrastructure/mongo/mongodoc/dataset.go create mode 100644 internal/infrastructure/mongo/mongodoc/dataset_schema.go create mode 100644 internal/infrastructure/mongo/mongodoc/layer.go create mode 100644 internal/infrastructure/mongo/mongodoc/plugin.go create mode 100644 internal/infrastructure/mongo/mongodoc/project.go create mode 100644 internal/infrastructure/mongo/mongodoc/property.go create mode 100644 internal/infrastructure/mongo/mongodoc/property_schema.go create mode 100644 internal/infrastructure/mongo/mongodoc/scene.go create mode 100644 internal/infrastructure/mongo/mongodoc/team.go create mode 100644 internal/infrastructure/mongo/mongodoc/user.go create mode 100644 internal/infrastructure/mongo/mongodoc/util.go create mode 100644 internal/infrastructure/mongo/plugin.go create mode 100644 internal/infrastructure/mongo/project.go create mode 100644 internal/infrastructure/mongo/property.go create mode 100644 internal/infrastructure/mongo/property_schema.go create mode 100644 internal/infrastructure/mongo/scene.go create mode 100644 internal/infrastructure/mongo/scene_lock.go create mode 100644 internal/infrastructure/mongo/team.go create mode 100644 internal/infrastructure/mongo/transaction.go create mode 100644 internal/infrastructure/mongo/user.go create mode 100644 internal/infrastructure/npm/archive.go create mode 100644 internal/infrastructure/npm/plugin_repository.go create mode 100644 internal/usecase/cursor.go create mode 100644 internal/usecase/gateway/authenticator.go create mode 100644 internal/usecase/gateway/container.go create mode 100644 internal/usecase/gateway/datasouce.go create mode 100644 internal/usecase/gateway/file.go create mode 100644 internal/usecase/gateway/mailer.go create mode 100644 internal/usecase/gateway/plugin_repository.go create mode 100644 internal/usecase/interactor/asset.go create mode 100644 internal/usecase/interactor/common.go create mode 100644 internal/usecase/interactor/dataset.go create mode 100644 internal/usecase/interactor/layer.go create mode 100644 internal/usecase/interactor/layer_test.go create mode 100644 internal/usecase/interactor/plugin.go create mode 100644 internal/usecase/interactor/project.go create mode 100644 internal/usecase/interactor/property.go create mode 100644 internal/usecase/interactor/scene.go create mode 100644 internal/usecase/interactor/team.go create mode 100644 internal/usecase/interactor/team_test.go create mode 100644 internal/usecase/interactor/user.go create mode 100644 internal/usecase/interfaces/asset.go create mode 100644 internal/usecase/interfaces/common.go create mode 100644 internal/usecase/interfaces/dataset.go create mode 100644 internal/usecase/interfaces/layer.go create mode 100644 internal/usecase/interfaces/plugin.go create mode 100644 internal/usecase/interfaces/project.go create mode 100644 internal/usecase/interfaces/property.go create mode 100644 internal/usecase/interfaces/scene.go create mode 100644 internal/usecase/interfaces/team.go create mode 100644 internal/usecase/interfaces/user.go create mode 100644 internal/usecase/operator.go create mode 100644 internal/usecase/pageinfo.go create mode 100644 internal/usecase/pagination.go create mode 100644 internal/usecase/repo/asset.go create mode 100644 internal/usecase/repo/config.go create mode 100644 internal/usecase/repo/container.go create mode 100644 internal/usecase/repo/dataset.go create mode 100644 internal/usecase/repo/dataset_schema.go create mode 100644 internal/usecase/repo/layer.go create mode 100644 internal/usecase/repo/plugin.go create mode 100644 internal/usecase/repo/project.go create mode 100644 internal/usecase/repo/property.go create mode 100644 internal/usecase/repo/property_schema.go create mode 100644 internal/usecase/repo/scene.go create mode 100644 internal/usecase/repo/scene_lock.go create mode 100644 internal/usecase/repo/team.go create mode 100644 internal/usecase/repo/transaction.go create mode 100644 internal/usecase/repo/user.go create mode 100644 main.go create mode 100644 pkg/asset/asset.go create mode 100644 pkg/asset/asset_test.go create mode 100644 pkg/asset/builder.go create mode 100644 pkg/asset/builder_test.go create mode 100644 pkg/builtin/main.go create mode 100644 pkg/builtin/main_test.go create mode 100644 pkg/builtin/manifest.yml create mode 100644 pkg/builtin/manifest_gen.go create mode 100644 pkg/builtin/manifest_ja.yml create mode 100644 pkg/builtin/manifest_ja_gen.go create mode 100644 pkg/builtin/migration.go create mode 100644 pkg/config/config.go create mode 100644 pkg/config/config_test.go create mode 100644 pkg/czml/czml.go create mode 100644 pkg/dataset/builder.go create mode 100644 pkg/dataset/csvparser.go create mode 100644 pkg/dataset/csvparser_test.go create mode 100644 pkg/dataset/dataset.go create mode 100644 pkg/dataset/diff.go create mode 100644 pkg/dataset/field.go create mode 100644 pkg/dataset/graph_iterator.go create mode 100644 pkg/dataset/graph_iterator_test.go create mode 100644 pkg/dataset/graph_loader.go create mode 100644 pkg/dataset/list.go create mode 100644 pkg/dataset/list_test.go create mode 100644 pkg/dataset/loader.go create mode 100644 pkg/dataset/schema.go create mode 100644 pkg/dataset/schema_builder.go create mode 100644 pkg/dataset/schema_field.go create mode 100644 pkg/dataset/schema_field_builder.go create mode 100644 pkg/dataset/schema_field_diff.go create mode 100644 pkg/dataset/schema_graph_iterator.go create mode 100644 pkg/dataset/schema_graph_iterator_test.go create mode 100644 pkg/dataset/schema_list.go create mode 100644 pkg/dataset/schema_list_test.go create mode 100644 pkg/dataset/source.go create mode 100644 pkg/dataset/value.go create mode 100644 pkg/dataset/value_test.go create mode 100644 pkg/error/error.go create mode 100644 pkg/error/error_test.go create mode 100644 pkg/file/file.go create mode 100644 pkg/i18n/string.go create mode 100644 pkg/i18n/string_test.go create mode 100644 pkg/id/asset_gen.go create mode 100644 pkg/id/dataset_gen.go create mode 100644 pkg/id/dataset_schema_field_gen.go create mode 100644 pkg/id/dataset_schema_gen.go create mode 100644 pkg/id/gen.go create mode 100644 pkg/id/id.go create mode 100644 pkg/id/id.tmpl create mode 100644 pkg/id/infobox_field_gen.go create mode 100644 pkg/id/layer_gen.go create mode 100644 pkg/id/plugin.go create mode 100644 pkg/id/plugin_extension.go create mode 100644 pkg/id/plugin_test.go create mode 100644 pkg/id/project_gen.go create mode 100644 pkg/id/property_gen.go create mode 100644 pkg/id/property_item_gen.go create mode 100644 pkg/id/property_schema.go create mode 100644 pkg/id/property_schema_field.go create mode 100644 pkg/id/property_schema_test.go create mode 100644 pkg/id/scene_gen.go create mode 100644 pkg/id/team_gen.go create mode 100644 pkg/id/user_gen.go create mode 100644 pkg/id/widget_gen.go create mode 100644 pkg/kml/kml.go create mode 100644 pkg/layer/builder.go create mode 100644 pkg/layer/decoding/common.go create mode 100644 pkg/layer/decoding/common_test.go create mode 100644 pkg/layer/decoding/czml.go create mode 100644 pkg/layer/decoding/czml_test.go create mode 100644 pkg/layer/decoding/decoder.go create mode 100644 pkg/layer/decoding/format.go create mode 100644 pkg/layer/decoding/geojson.go create mode 100644 pkg/layer/decoding/geojson_test.go create mode 100644 pkg/layer/decoding/kml.go create mode 100644 pkg/layer/decoding/kml_test.go create mode 100644 pkg/layer/decoding/reearth.go create mode 100644 pkg/layer/decoding/reearth_test.go create mode 100644 pkg/layer/decoding/shapetest/point.shp create mode 100644 pkg/layer/decoding/shapetest/polygon.shp create mode 100644 pkg/layer/decoding/shapetest/polyline.shp create mode 100644 pkg/layer/decoding/shapetest/shapes.zip create mode 100644 pkg/layer/decoding/shp.go create mode 100644 pkg/layer/decoding/shp_test.go create mode 100644 pkg/layer/encoding/common.go create mode 100644 pkg/layer/encoding/common_test.go create mode 100644 pkg/layer/encoding/czml.go create mode 100644 pkg/layer/encoding/czml_test.go create mode 100644 pkg/layer/encoding/encoder.go create mode 100644 pkg/layer/encoding/exporter.go create mode 100644 pkg/layer/encoding/geojson.go create mode 100644 pkg/layer/encoding/geojson_test.go create mode 100644 pkg/layer/encoding/kml.go create mode 100644 pkg/layer/encoding/kml_test.go create mode 100644 pkg/layer/encoding/shp.go create mode 100644 pkg/layer/encoding/shp_test.go create mode 100644 pkg/layer/group.go create mode 100644 pkg/layer/group_builder.go create mode 100644 pkg/layer/group_test.go create mode 100644 pkg/layer/id_list.go create mode 100644 pkg/layer/id_list_test.go create mode 100644 pkg/layer/infobox.go create mode 100644 pkg/layer/infobox_field.go create mode 100644 pkg/layer/infobox_field_builder.go create mode 100644 pkg/layer/infobox_test.go create mode 100644 pkg/layer/initializer.go create mode 100644 pkg/layer/initializer/initializer.go create mode 100644 pkg/layer/initializer/initializer_test.go create mode 100644 pkg/layer/initializer_test.go create mode 100644 pkg/layer/item.go create mode 100644 pkg/layer/item_builder.go create mode 100644 pkg/layer/item_test.go create mode 100644 pkg/layer/layer.go create mode 100644 pkg/layer/layer_test.go create mode 100644 pkg/layer/list.go create mode 100644 pkg/layer/loader.go create mode 100644 pkg/layer/merged.go create mode 100644 pkg/layer/merged_test.go create mode 100644 pkg/layer/merging/merged.go create mode 100644 pkg/layer/merging/merger.go create mode 100644 pkg/layer/merging/merger_test.go create mode 100644 pkg/layer/merging/sealed.go create mode 100644 pkg/layer/merging/sealer.go create mode 100644 pkg/log/gceformatter.go create mode 100644 pkg/log/log.go create mode 100644 pkg/plugin/builder.go create mode 100644 pkg/plugin/builder_test.go create mode 100644 pkg/plugin/extension.go create mode 100644 pkg/plugin/extension_builder.go create mode 100644 pkg/plugin/extension_builder_test.go create mode 100644 pkg/plugin/extension_test.go create mode 100644 pkg/plugin/loader.go create mode 100644 pkg/plugin/manifest/convert.go create mode 100644 pkg/plugin/manifest/convert_test.go create mode 100644 pkg/plugin/manifest/manifest.go create mode 100644 pkg/plugin/manifest/parser.go create mode 100644 pkg/plugin/manifest/parser_test.go create mode 100644 pkg/plugin/manifest/parser_translation.go create mode 100644 pkg/plugin/manifest/parser_translation_test.go create mode 100644 pkg/plugin/manifest/schema_gen.go create mode 100644 pkg/plugin/manifest/schema_json_gen.go create mode 100644 pkg/plugin/manifest/schema_json_translation_gen.go create mode 100644 pkg/plugin/manifest/schema_translation.go create mode 100644 pkg/plugin/plugin.go create mode 100644 pkg/plugin/plugin_test.go create mode 100644 pkg/project/builder.go create mode 100644 pkg/project/builder_test.go create mode 100644 pkg/project/project.go create mode 100644 pkg/project/project_test.go create mode 100644 pkg/project/publishment_status.go create mode 100644 pkg/property/builder.go create mode 100644 pkg/property/builder_test.go create mode 100644 pkg/property/condition.go create mode 100644 pkg/property/condition_test.go create mode 100644 pkg/property/field.go create mode 100644 pkg/property/field_builder.go create mode 100644 pkg/property/field_builder_test.go create mode 100644 pkg/property/field_test.go create mode 100644 pkg/property/group.go create mode 100644 pkg/property/group_builder.go create mode 100644 pkg/property/group_builder_test.go create mode 100644 pkg/property/group_list.go create mode 100644 pkg/property/group_list_builder.go create mode 100644 pkg/property/group_list_builder_test.go create mode 100644 pkg/property/group_list_test.go create mode 100644 pkg/property/group_test.go create mode 100644 pkg/property/initializer.go create mode 100644 pkg/property/initializer_test.go create mode 100644 pkg/property/item.go create mode 100644 pkg/property/item_builder.go create mode 100644 pkg/property/item_test.go create mode 100644 pkg/property/link.go create mode 100644 pkg/property/link_test.go create mode 100644 pkg/property/list.go create mode 100644 pkg/property/list_test.go create mode 100644 pkg/property/loader.go create mode 100644 pkg/property/loader_test.go create mode 100644 pkg/property/merged.go create mode 100644 pkg/property/merged_test.go create mode 100644 pkg/property/pointer.go create mode 100644 pkg/property/pointer_test.go create mode 100644 pkg/property/property.go create mode 100644 pkg/property/property_test.go create mode 100644 pkg/property/schema.go create mode 100644 pkg/property/schema_builder.go create mode 100644 pkg/property/schema_builder_test.go create mode 100644 pkg/property/schema_field.go create mode 100644 pkg/property/schema_field_builder.go create mode 100644 pkg/property/schema_field_builder_test.go create mode 100644 pkg/property/schema_field_test.go create mode 100644 pkg/property/schema_field_ui.go create mode 100644 pkg/property/schema_field_ui_test.go create mode 100644 pkg/property/schema_group.go create mode 100644 pkg/property/schema_group_builder.go create mode 100644 pkg/property/schema_group_builder_test.go create mode 100644 pkg/property/schema_group_test.go create mode 100644 pkg/property/schema_list.go create mode 100644 pkg/property/schema_test.go create mode 100644 pkg/property/sealed.go create mode 100644 pkg/property/sealed_test.go create mode 100644 pkg/property/validator.go create mode 100644 pkg/property/value.go create mode 100644 pkg/property/value_converter.go create mode 100644 pkg/property/value_converter_test.go create mode 100644 pkg/property/value_test.go create mode 100644 pkg/property/value_type.go create mode 100644 pkg/property/value_type_test.go create mode 100644 pkg/scene/builder.go create mode 100644 pkg/scene/builder/builder.go create mode 100644 pkg/scene/builder/builder_test.go create mode 100644 pkg/scene/builder/encoder.go create mode 100644 pkg/scene/builder/encoder_test.go create mode 100644 pkg/scene/builder/scene.go create mode 100644 pkg/scene/builder/scene_test.go create mode 100644 pkg/scene/builder_test.go create mode 100644 pkg/scene/lock.go create mode 100644 pkg/scene/lock_test.go create mode 100644 pkg/scene/plugin.go create mode 100644 pkg/scene/plugin_system.go create mode 100644 pkg/scene/plugin_system_test.go create mode 100644 pkg/scene/plugin_test.go create mode 100644 pkg/scene/scene.go create mode 100644 pkg/scene/scene_test.go create mode 100644 pkg/scene/sceneops/dataset_migrator.go create mode 100644 pkg/scene/sceneops/dataset_migrator_test.go create mode 100644 pkg/scene/sceneops/plugin_installer.go create mode 100644 pkg/scene/sceneops/plugin_migrator.go create mode 100644 pkg/scene/widget.go create mode 100644 pkg/scene/widget_system.go create mode 100644 pkg/scene/widget_system_test.go create mode 100644 pkg/scene/widget_test.go create mode 100644 pkg/shp/errreader.go create mode 100644 pkg/shp/reader.go create mode 100644 pkg/shp/sequentialreader.go create mode 100644 pkg/shp/shapefile.go create mode 100644 pkg/shp/shapetype_string.go create mode 100644 pkg/shp/writer.go create mode 100644 pkg/shp/zipreader.go create mode 100644 pkg/user/auth.go create mode 100644 pkg/user/auth_test.go create mode 100644 pkg/user/builder.go create mode 100644 pkg/user/builder_test.go create mode 100644 pkg/user/initializer/initializer.go create mode 100644 pkg/user/initializer/initializer_test.go create mode 100644 pkg/user/members.go create mode 100644 pkg/user/members_test.go create mode 100644 pkg/user/role.go create mode 100644 pkg/user/role_test.go create mode 100644 pkg/user/team.go create mode 100644 pkg/user/team_builder.go create mode 100644 pkg/user/team_builder_test.go create mode 100644 pkg/user/team_test.go create mode 100644 pkg/user/theme.go create mode 100644 pkg/user/user.go create mode 100644 pkg/user/user_test.go create mode 100644 pkg/visualizer/visualizer.go create mode 100644 pkg/writer/seeker_closer.go create mode 100644 pkg/writer/seeker_closer_test.go create mode 100644 plugin_manifest_schema.json create mode 100644 plugin_manifest_schema_translation.json create mode 100644 schema.graphql create mode 100644 tools.go create mode 100644 tools/cmd/embed/main.go create mode 100644 tools/cmd/gen/flag.go create mode 100644 tools/cmd/gen/flag_test.go create mode 100644 tools/cmd/gen/main.go create mode 100644 tools/cmd/migrategen/main.go create mode 100644 tools/cmd/shapefiletest/main.go diff --git a/.env b/.env new file mode 100644 index 000000000..7e9817c1f --- /dev/null +++ b/.env @@ -0,0 +1,6 @@ +REEARTH_AUTH0_DOMAIN= +REEARTH_AUTH0_CLIENTID= +REEARTH_AUTH0_CLIENTSECRET= +REEARTH_AUTH0_WEBCLIENTID= +REEARTH_ASSETBASEURL=http://localhost:8080/assets +REEARTH_SERVEFILES=1 diff --git a/.github/semantic.yml b/.github/semantic.yml new file mode 100644 index 000000000..fd160e519 --- /dev/null +++ b/.github/semantic.yml @@ -0,0 +1 @@ +titleOnly: true diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 000000000..f03af0f8d --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,44 @@ +name: main +on: + push: + branches: + - main +jobs: + build: + name: main + runs-on: ubuntu-latest + steps: + - name: set up + uses: actions/setup-go@v1 + with: + go-version: 1.16 + id: go + - name: checkout + uses: actions/checkout@v2 + - name: cache + uses: actions/cache@v1 + with: + path: ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + - name: install golangci-lint + run: curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(go env GOPATH)/bin v1.39.0 + - name: lint + run: $(go env GOPATH)/bin/golangci-lint run --timeout=10m + - name: test + run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic + - name: Send coverage report + uses: codecov/codecov-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + file: coverage.txt + - name: Slack Notification + uses: 8398a7/action-slack@v3 + if: always() + with: + status: ${{ job.status }} + fields: repo,commit,action,workflow + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml new file mode 100644 index 000000000..1dc172971 --- /dev/null +++ b/.github/workflows/pr.yml @@ -0,0 +1,41 @@ +name: pr +on: [pull_request] +jobs: + build: + name: pr + runs-on: ubuntu-latest + steps: + - name: set up + uses: actions/setup-go@v1 + with: + go-version: 1.16 + id: go + - name: checkout + uses: actions/checkout@v2 + - name: cache + uses: actions/cache@v1 + with: + path: ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + - name: install lint + run: curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(go env GOPATH)/bin v1.39.0 + - name: lint + run: $(go env GOPATH)/bin/golangci-lint run --timeout=10m + - name: test + run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic + - name: Send coverage report + uses: codecov/codecov-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + file: coverage.txt + - name: Slack Notification + uses: 8398a7/action-slack@v3 + if: always() + with: + status: ${{ job.status }} + fields: repo,commit,action,workflow + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..b2965e261 --- /dev/null +++ b/.gitignore @@ -0,0 +1,28 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib +.DS_Store + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Editor +.idea + +# reearth +/reearth +/reearth-backend +__debug_bin +/data +/bin +/debug +/mongo +.env.local +/coverage.txt +/web diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 000000000..63eab2e25 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,10 @@ +issues: + exclude-use-default: false + +linters: + enable: + - gofmt + - goimports + +goimports: + local-prefixes: github.com/reearth/reearth-backend diff --git a/.graphqlconfig b/.graphqlconfig new file mode 100644 index 000000000..5c5a31cea --- /dev/null +++ b/.graphqlconfig @@ -0,0 +1,3 @@ +{ + "schemaPath": "schema.graphql" +} diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 000000000..0181837e9 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,11 @@ +{ + // See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations. + // Extension identifier format: ${publisher}.${name}. Example: vscode.csharp + // List of extensions which should be recommended for users of this workspace. + "recommendations": [ + "golang.go", + "redhat.vscode-yaml" + ], + // List of extensions recommended by VS Code that should not be recommended for users of this workspace. + "unwantedRecommendations": [] +} diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 000000000..3ff903694 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,15 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Launch", + "type": "go", + "request": "launch", + "mode": "auto", + "cwd": "${workspaceRoot}", + "program": "${workspaceRoot}/cmd/reearth", + "env": {}, + "args": [] + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 000000000..87392e61f --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,33 @@ +{ + "editor.formatOnSave": true, + "go.lintTool": "golangci-lint", + "go.lintFlags": [ + "--fast" + ], + "yaml.format.enable": true, + "yaml.completion": true, + "yaml.validate": true, + "yaml.hover": true, + "yaml.schemas": { + "./plugin_manifest_schema.json": [ + "/pkg/builtin/manifest.yml" + ], + "./plugin_manifest_schema_translation.json": [ + "/pkg/builtin/manifest_*.yml" + ] + }, + "json.schemas": [ + { + "fileMatch": [ + "/pkg/builtin/manifest.json" + ], + "url": "./plugin_manifest_schema.json" + }, + { + "fileMatch": [ + "/pkg/builtin/manifest_*.json" + ], + "url": "./plugin_manifest_schema_translation.json" + } + ] +} diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..ae596ec11 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,132 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, caste, color, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +[INSERT CONTACT METHOD]. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +[https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0]. + +Community Impact Guidelines were inspired by +[Mozilla's code of conduct enforcement ladder][Mozilla CoC]. + +For answers to common questions about this code of conduct, see the FAQ at +[https://www.contributor-covenant.org/faq][FAQ]. Translations are available +at [https://www.contributor-covenant.org/translations][translations]. + +[homepage]: https://www.contributor-covenant.org +[v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html +[Mozilla CoC]: https://github.com/mozilla/diversity +[FAQ]: https://www.contributor-covenant.org/faq +[translations]: https://www.contributor-covenant.org/translations diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..db1df0115 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,24 @@ +FROM golang:1.16-alpine AS build +ARG TAG=release +ARG VERSION + +RUN apk add --update --no-cache git ca-certificates build-base upx + +COPY go.mod go.sum main.go /reearth/ +WORKDIR /reearth +RUN go mod download + +COPY cmd/ /reearth/cmd/ +COPY pkg/ /reearth/pkg/ +COPY internal/ /reearth/internal/ + +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -tags "${TAG}" "-ldflags=-X main.version=${VERSION} -s -buildid=" -trimpath ./cmd/reearth && upx reearth + +FROM scratch + +COPY --from=build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt +COPY --from=build /reearth/reearth /reearth/reearth + +WORKDIR /reearth + +CMD [ "./reearth" ] diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..f433b1a53 --- /dev/null +++ b/LICENSE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..c27e1a738 --- /dev/null +++ b/Makefile @@ -0,0 +1,31 @@ +lint: + golangci-lint run --fix + +test: + go test -race -v ./... + +build: + go build ./cmd/reearth + +run-app: + go run ./cmd/reearth + +run-db: + docker-compose up -d reearth-mongo + +gen: + go generate ./... + +gen/gql: + go generate ./internal/graphql + +gen/builtin: + go generate ./pkg/builtin + +gen/manifest: + go generate ./pkg/plugin/manifest + +gen/id: + go generate ./pkg/id + +.PHONY: lint test build run-app run-db gen gen/gql gen/builtin gen/manifest gen/id diff --git a/README.md b/README.md new file mode 100644 index 000000000..7241a49f8 --- /dev/null +++ b/README.md @@ -0,0 +1,7 @@ +# reearth-backend + +(unfinished) + +## License + +[Apache License 2.0](LICENSE) diff --git a/cmd/godoc/main.go b/cmd/godoc/main.go new file mode 100644 index 000000000..8fac7cf2f --- /dev/null +++ b/cmd/godoc/main.go @@ -0,0 +1,41 @@ +package main + +import ( + "crypto/subtle" + "net/url" + "os" + "os/exec" + + "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" +) + +const godocPort = "8080" + +func main() { + e := echo.New() + err := exec.Command("godoc", "-http=:"+godocPort).Start() + if err != nil { + e.Logger.Fatal(err) + } + e.Use(middleware.Logger()) + e.Use(middleware.Recover()) + e.Use(middleware.BasicAuth(func(username, password string, c echo.Context) (bool, error) { + if subtle.ConstantTimeCompare([]byte(username), []byte(os.Getenv("BASIC_AUTH_USERNAME"))) == 1 && + subtle.ConstantTimeCompare([]byte(password), []byte(os.Getenv("BASIC_AUTH_PASSWORD"))) == 1 { + return true, nil + } + return false, nil + })) + t, err := url.Parse("http://localhost:8080/") + if err != nil { + e.Logger.Fatal(err) + } + tl := []*middleware.ProxyTarget{ + { + URL: t, + }, + } + e.Group("", middleware.Proxy(middleware.NewRoundRobinBalancer(tl))) + e.Logger.Fatal(e.Start(":" + os.Getenv("PORT"))) +} diff --git a/cmd/reearth/debug.go b/cmd/reearth/debug.go new file mode 100644 index 000000000..163bd1394 --- /dev/null +++ b/cmd/reearth/debug.go @@ -0,0 +1,5 @@ +// +build !release + +package main + +const debug = true diff --git a/cmd/reearth/main.go b/cmd/reearth/main.go new file mode 100644 index 000000000..7246bdfe8 --- /dev/null +++ b/cmd/reearth/main.go @@ -0,0 +1,9 @@ +package main + +import "github.com/reearth/reearth-backend/internal/app" + +var version = "" + +func main() { + app.Start(debug, version) +} diff --git a/cmd/reearth/release.go b/cmd/reearth/release.go new file mode 100644 index 000000000..103b98e7c --- /dev/null +++ b/cmd/reearth/release.go @@ -0,0 +1,5 @@ +// +build release + +package main + +const debug = false diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 000000000..dc7e349fc --- /dev/null +++ b/codecov.yml @@ -0,0 +1,4 @@ +comment: + layout: 'reach, diff, flags, files' + behavior: default + require_changes: false diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..d51821c97 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,22 @@ +version: '3' +services: + reearth-backend: + build: + context: . + args: + TAG: debug + ports: + - '8080:8080' + env_file: + - ./.env + environment: + REEARTH_ENV: docker + REEARTH_DB_URL: mongodb://reearth-mongo + depends_on: + - reearth-mongo + reearth-mongo: + image: mongo:4.2.0-bionic + ports: + - 27017:27017 + volumes: + - ./mongo:/data/db diff --git a/go.mod b/go.mod new file mode 100644 index 000000000..0557de0b6 --- /dev/null +++ b/go.mod @@ -0,0 +1,57 @@ +module github.com/reearth/reearth-backend + +// +heroku install golang.org/x/tools/cmd/godoc +require ( + cloud.google.com/go v0.80.0 + cloud.google.com/go/storage v1.14.0 + github.com/99designs/gqlgen v0.13.0 + github.com/99designs/gqlgen-contrib v0.1.1-0.20200601100547-7a955d321bbd + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v0.2.0 + github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect + github.com/agnivade/levenshtein v1.1.0 // indirect + github.com/auth0/go-jwt-middleware v0.0.0-20200507191422-d30d7b9ece63 + github.com/blang/semver v3.5.1+incompatible + github.com/dgrijalva/jwt-go v3.2.0+incompatible + github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b // indirect + github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 // indirect + github.com/hashicorp/golang-lru v0.5.4 // indirect + github.com/iancoleman/strcase v0.1.3 + github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d + github.com/joho/godotenv v1.3.0 + github.com/jonas-p/go-shp v0.1.1 + github.com/kelseyhightower/envconfig v1.4.0 + github.com/klauspost/compress v1.10.10 // indirect + github.com/labstack/echo/v4 v4.2.1 + github.com/labstack/gommon v0.3.0 + github.com/mitchellh/mapstructure v1.4.1 + github.com/oklog/ulid v1.3.1 + github.com/opentracing/opentracing-go v1.2.0 // indirect + github.com/paulmach/go.geojson v1.4.0 + github.com/pkg/errors v0.9.1 + github.com/sirupsen/logrus v1.8.1 + github.com/smartystreets/assertions v1.1.1 // indirect + github.com/stretchr/objx v0.2.0 // indirect + github.com/stretchr/testify v1.7.0 + github.com/tidwall/pretty v1.0.1 // indirect + github.com/twpayne/go-kml v1.5.2 + github.com/uber/jaeger-client-go v2.25.0+incompatible + github.com/uber/jaeger-lib v2.4.1+incompatible + github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e + github.com/vektah/gqlparser/v2 v2.1.0 + github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect + github.com/xeipuuv/gojsonschema v1.2.0 + go.mongodb.org/mongo-driver v1.5.1 + go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo v0.0.0-20200707171851-ae0d272a2deb + go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver v0.7.0 + go.opentelemetry.io/otel v0.7.0 + go.uber.org/atomic v1.7.0 // indirect + golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c // indirect + golang.org/x/text v0.3.5 + golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect + golang.org/x/tools v0.1.0 + gopkg.in/go-playground/colors.v1 v1.2.0 + gopkg.in/yaml.v2 v2.4.0 + gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 // indirect +) + +go 1.16 diff --git a/go.sum b/go.sum new file mode 100644 index 000000000..4784ad74b --- /dev/null +++ b/go.sum @@ -0,0 +1,832 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.80.0 h1:kAdyAMrj9CjqOSGiluseVjIgAyQ3uxADYtUYR6MwYeY= +cloud.google.com/go v0.80.0/go.mod h1:fqpb6QRi1CFGAMXDoE72G+b+Ybv7dMB/T1tbExDHktI= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0 h1:6RRlFMv1omScs6iq2hfE3IvgE+l6RfJPampq8UZc5TU= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/99designs/gqlgen v0.11.3/go.mod h1:RgX5GRRdDWNkh4pBrdzNpNPFVsdoUFY2+adM6nb1N+4= +github.com/99designs/gqlgen v0.13.0 h1:haLTcUp3Vwp80xMVEg5KRNwzfUrgFdRmtBY8fuB8scA= +github.com/99designs/gqlgen v0.13.0/go.mod h1:NV130r6f4tpRWuAI+zsrSdooO/eWUv+Gyyoi3rEfXIk= +github.com/99designs/gqlgen-contrib v0.1.1-0.20200601100547-7a955d321bbd h1:jtzFT7TsrvMTGwBn8DvwMFDowJ2INPqtP7HpL1R9TIY= +github.com/99designs/gqlgen-contrib v0.1.1-0.20200601100547-7a955d321bbd/go.mod h1:ud8RnaGvSBJFGEIfo0gMid33OUXXb68bNJlWUUZARGY= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/sketches-go v0.0.0-20190923095040-43f19ad77ff7 h1:qELHH0AWCvf98Yf+CNIJx9vOZOfHFDDzgDRYsnNk/vs= +github.com/DataDog/sketches-go v0.0.0-20190923095040-43f19ad77ff7/go.mod h1:Q5DbzQ+3AkgGwymQO7aZFNP7ns2lZKGtvRBzRXfdi60= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v0.2.0 h1:d/0HrwVskjLkJIz70Gn9ADURRNaNdTGOkQ1TiuCOefU= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v0.2.0/go.mod h1:Ps0PAOihxzMbs4J2PWLffeKwJo3Bka6LHMLP6r/K0l8= +github.com/HdrHistogram/hdrhistogram-go v1.0.1 h1:GX8GAYDuhlFQnI2fRDHQhTlkHMz8bEn0jTI6LJU0mpw= +github.com/HdrHistogram/hdrhistogram-go v1.0.1/go.mod h1:BWJ+nMSHY3L41Zj7CA3uXnloDp7xxV0YvstAE7nKTaM= +github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= +github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= +github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= +github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= +github.com/agnivade/levenshtein v1.0.3/go.mod h1:4SFRZbbXWLF4MU1T9Qg0pGgH3Pjs+t6ie5efyrwRJXs= +github.com/agnivade/levenshtein v1.1.0 h1:n6qGwyHG61v3ABce1rPVZklEYRT8NFpCMrpZdBUbYGM= +github.com/agnivade/levenshtein v1.1.0/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4 h1:Hs82Z41s6SdL1CELW+XaDYmOH4hkBN4/N9og/AsOv7E= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= +github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= +github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= +github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= +github.com/auth0/go-jwt-middleware v0.0.0-20200507191422-d30d7b9ece63 h1:LY/kRH+fCqA090FsM2VfZ+oocD99ogm3HrT1r0WDnCk= +github.com/auth0/go-jwt-middleware v0.0.0-20200507191422-d30d7b9ece63/go.mod h1:mF0ip7kTEFtnhBJbd/gJe62US3jykNN+dcZoZakJCCA= +github.com/aws/aws-sdk-go v1.34.28 h1:sscPpn/Ns3i0F4HPEWAVcwdIRaZZCuL7llJ2/60yPIk= +github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= +github.com/benbjohnson/clock v1.0.0/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM= +github.com/benbjohnson/clock v1.0.3/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= +github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dgryski/trifles v0.0.0-20190318185328-a8d75aae118c/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= +github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= +github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b h1:8xx0j7yceTAgVxonE+qOOepmwWS/Ic3OLQapY9HJajc= +github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 h1:Uc+IZ7gYqAf/rSGFplbWBSHaGolEQlNLgMgSE3ccnIQ= +github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813/go.mod h1:P+oSoE9yhSRvsmYyZsshflcR6ePWYLql6UU1amW13IM= +github.com/go-chi/chi v3.3.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= +github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= +github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= +github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= +github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI= +github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs= +github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI= +github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk= +github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28= +github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo= +github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk= +github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw= +github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360= +github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg= +github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE= +github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8= +github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc= +github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4= +github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= +github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= +github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= +github.com/gogo/protobuf v1.0.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0 h1:jlYHihg//f7RRwuPfptm04yp4s7O6Kw8EZiVYIGcH0g= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1 h1:jAbXjIeW2ZSW2AwFxlGTDoc2CjI2XujLkV3ArsZFCvc= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0 h1:wCKgOCHuUEVfsaQLpPSJb7VdYCdTVZQAuOdYm1yc/60= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5 h1:zIaiqGYDQwa4HVx5wGRTXbx38Pqxjemn4BP98wpzpXo= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 h1:l5lAOZEym3oK3SQ2HBHWsJUfbNBiTXJDeW2QDxw9AQ0= +github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/context v0.0.0-20160226214623-1ea25387ff6f/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc= +github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/websocket v1.2.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= +github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/huandu/xstrings v1.3.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/iancoleman/strcase v0.1.3 h1:dJBk1m2/qjL1twPLf68JND55vvivMupZ4wIzE8CTdBw= +github.com/iancoleman/strcase v0.1.3/go.mod h1:SK73tn/9oHe+/Y0h39VT4UCxmurVJkR5NA7kMEAOgSE= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d h1:sQbbvtUoen3Tfl9G/079tXeqniwPH6TgM/lU4y7lQN8= +github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d/go.mod h1:xVHEhsiSJJnT0jlcQpQUg+GyoLf0i0xciM1kqWTGT58= +github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= +github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= +github.com/jonas-p/go-shp v0.1.1 h1:LY81nN67DBCz6VNFn2kS64CjmnDo9IP8rmSkTvhO9jE= +github.com/jonas-p/go-shp v0.1.1/go.mod h1:MRIhyxDQ6VVp0oYeD7yPGr5RSTNScUFKCDsI5DR7PtI= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= +github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= +github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= +github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= +github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/compress v1.10.10 h1:a/y8CglcM7gLGYmlbP/stPE5sR3hbhFRUjCBfd/0B3I= +github.com/klauspost/compress v1.10.10/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/labstack/echo/v4 v4.1.16/go.mod h1:awO+5TzAjvL8XpibdsfXxPgHr+orhtXZJZIQCVjogKI= +github.com/labstack/echo/v4 v4.2.1 h1:LF5Iq7t/jrtUuSutNuiEWtB5eiHfZ5gSe2pcu5exjQw= +github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg= +github.com/labstack/gommon v0.3.0 h1:JEeO0bvc78PKdyHxloTKiF8BD5iGrH8T6MSeGvSgob0= +github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= +github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= +github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= +github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= +github.com/matryer/moq v0.0.0-20200106131100-75d0ddfc0007/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= +github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.7 h1:bQGKb3vps/j0E9GfJQ03JyhRuxsvdAanXlT9BTw3mdw= +github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= +github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= +github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= +github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= +github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.1.1-0.20190913142402-a7454ce5950e/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= +github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/paulmach/go.geojson v1.4.0 h1:5x5moCkCtDo5x8af62P9IOAYGQcYHtxz2QJ3x1DoCgY= +github.com/paulmach/go.geojson v1.4.0/go.mod h1:YaKx1hKpWF+T2oj2lFJPsW/t1Q5e1jQI61eoQSTwpIs= +github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo= +github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.6.0/go.mod h1:ZLOG9ck3JLRdB5MgO8f+lLTe83AXG6ro35rLTxvnIl4= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.11/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= +github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= +github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/shurcooL/httpfs v0.0.0-20171119174359-809beceb2371/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg= +github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/shurcooL/vfsgen v0.0.0-20180121065927-ffb13db8def0/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/assertions v1.1.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= +github.com/smartystreets/assertions v1.1.1 h1:T/YLemO5Yp7KPzS+lVtu+WsHn8yoSwTfItdAd1r3cck= +github.com/smartystreets/assertions v1.1.1/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= +github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tidwall/pretty v1.0.1 h1:WE4RBSZ1x6McVVC8S/Md+Qse8YUv6HRObAx6ke00NY8= +github.com/tidwall/pretty v1.0.1/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/twpayne/go-kml v1.5.2 h1:rFMw2/EwgkVssGS2MT6YfWSPZz6BgcJkLxQ53jnE8rQ= +github.com/twpayne/go-kml v1.5.2/go.mod h1:kz8jAiIz6FIdU2Zjce9qGlVtgFYES9vt7BTPBHf5jl4= +github.com/twpayne/go-polyline v1.0.0/go.mod h1:ICh24bcLYBX8CknfvNPKqoTbe+eg+MX1NPyJmSBo7pU= +github.com/twpayne/go-waypoint v0.0.0-20200706203930-b263a7f6e4e8/go.mod h1:qj5pHncxKhu9gxtZEYWypA/z097sxhFlbTyOyt9gcnU= +github.com/uber/jaeger-client-go v2.25.0+incompatible h1:IxcNZ7WRY1Y3G4poYlx24szfsn/3LvK9QHCq9oQw8+U= +github.com/uber/jaeger-client-go v2.25.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= +github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVKhn2Um6rjCsSsg= +github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= +github.com/urfave/cli/v2 v2.1.1 h1:Qt8FeAtxE/vfdrLmR3rxR6JRE0RoVmbXu8+6kZtYU4k= +github.com/urfave/cli/v2 v2.1.1/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ= +github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc= +github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= +github.com/valyala/fasttemplate v1.1.0/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= +github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4= +github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= +github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e h1:+w0Zm/9gaWpEAyDlU1eKOuk5twTjAjuevXqcJJw8hrg= +github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= +github.com/vektah/gqlparser v1.3.1 h1:8b0IcD3qZKWJQHSzynbDlrtP3IxVydZ2DZepCGofqfU= +github.com/vektah/gqlparser v1.3.1/go.mod h1:bkVf0FX+Stjg/MHnm8mEyubuaArhNEqfQhF+OTiAL74= +github.com/vektah/gqlparser/v2 v2.0.1/go.mod h1:SyUiHgLATUR8BiYURfTirrTcGpcE+4XkV2se04Px1Ms= +github.com/vektah/gqlparser/v2 v2.1.0 h1:uiKJ+T5HMGGQM2kRKQ8Pxw8+Zq9qhhZhz/lieYvCMns= +github.com/vektah/gqlparser/v2 v2.1.0/go.mod h1:SyUiHgLATUR8BiYURfTirrTcGpcE+4XkV2se04Px1Ms= +github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.0.2 h1:akYIkZ28e6A96dkWNJQu3nmCzH3YfwMPQExUYDaRv7w= +github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= +github.com/xdg-go/stringprep v1.0.2 h1:6iq84/ryjjeRmMJwxutI51F2GIPlP5BfTvXHeYjyhBc= +github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= +github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= +github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= +github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= +github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= +github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +go.mongodb.org/mongo-driver v1.3.4/go.mod h1:MSWZXKOynuguX+JSvwP8i+58jYCXxbia8HS3gZBapIE= +go.mongodb.org/mongo-driver v1.5.1 h1:9nOVLGDfOaZ9R0tBumx/BcuqkbFpyTCU2r/Po7A2azI= +go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opentelemetry.io/contrib v0.7.0 h1:6IuKhaeEk+uxX5icJCdsgqlDVbsbDEPFD6NcHCDp9QI= +go.opentelemetry.io/contrib v0.7.0/go.mod h1:g4BXWOrb66AyXbXlSgfGWR7TQzXQX4Oq2NidBrSwZPM= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo v0.0.0-20200707171851-ae0d272a2deb h1:Jr4s7kqO0pFyoJmTFm0zC10wr2N7sJ3dibMghcQ3YlE= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo v0.0.0-20200707171851-ae0d272a2deb/go.mod h1:r41UB9RpbyhLlJLZ05Avf4ez4znJOWyubPuaO4TAVC8= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver v0.7.0 h1:ytNCwz7GNLx3dr8kKNfcRjkN5yAoq/zmoQCnxnCrA6Y= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver v0.7.0/go.mod h1:aypkg2vu/qouzRc9dh/qx+0UKOv3oaowY9WxxOfS20Q= +go.opentelemetry.io/otel v0.5.0/go.mod h1:jzBIgIzK43Iu1BpDAXwqOd6UPsSAk+ewVZ5ofSXw4Ek= +go.opentelemetry.io/otel v0.7.0 h1:u43jukpwqR8EsyeJOMgrsUgZwVI1e1eVw7yuzRkD1l0= +go.opentelemetry.io/otel v0.7.0/go.mod h1:aZMyHG5TqDOXEgH2tyLiXSUKly1jT3yqE9PmrzIeCdo= +go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191105034135-c7e5f84aec59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200221231518-2aa609cf4a9d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c h1:9HhBz5L/UjnK9XLtiZhYAdue5BVKep3PMmS2LuPDt8k= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 h1:2M3HP5CCK1Si9FQhwnzYhXdG6DXeebvUHFpre8QvbyI= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1 h1:Kvvh58BN8Y9/lBi7hTekvtMpm07eUZ0ck5pRHpsMWrY= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4 h1:b0LrWgu8+q7z4J+0Y3Umo5q1dL7NXBkKBWkaVkAq17E= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84 h1:duBc5zuJsmJXYOVVE/6PxejI+N3AaCqKjtsoLn1Je5Q= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210314195730-07df6a141424/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4 h1:EZ2mChiOa8udjfp6rRmswTbtZN/QzUQp4ptM4rnjHvc= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE= +golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190515012406-7d7faa4812bd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200114235610-7ae403b6b589/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.42.0 h1:uqATLkpxiBrhrvFoebXUjvyzE9nQf+pVyy0Z0IHE+fc= +google.golang.org/api v0.42.0/go.mod h1:+Oj4s6ch2SEGtPjGqfUfZonBH0GjQH89gTeKKAEGZKI= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191009194640-548a555dbc03/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200303153909-beee998c1893/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210312152112-fc591d9ea70f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210323160006-e668133fea6a h1:XVaQ1+BDKvrRcgppHhtAaniHCKyV5xJAvymwsPHHFaE= +google.golang.org/genproto v0.0.0-20210323160006-e668133fea6a/go.mod h1:f2Bd7+2PlaVKmvKQ52aspJZXIDaRQBVdOOBfJ5i8OEs= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0 h1:o1bcQ6imQMIOpdrO3SWf2z5RV72WbDwdXuK0MDlc8As= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/go-playground/colors.v1 v1.2.0 h1:SPweMUve+ywPrfwao+UvfD5Ah78aOLUkT5RlJiZn52c= +gopkg.in/go-playground/colors.v1 v1.2.0/go.mod h1:AvbqcMpNXVl5gBrM20jBm3VjjKBbH/kI5UnqjU7lxFI= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +sourcegraph.com/sourcegraph/appdash v0.0.0-20180110180208-2cc67fd64755/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= +sourcegraph.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67/go.mod h1:L5q+DGLGOQFpo1snNEkLOJT2d1YTW66rWNzatr3He1k= diff --git a/gqlgen.yml b/gqlgen.yml new file mode 100644 index 000000000..b3eca9d23 --- /dev/null +++ b/gqlgen.yml @@ -0,0 +1,35 @@ +# .gqlgen.yml example +# +# Refer to https://gqlgen.com/config/ +# for detailed .gqlgen.yml documentation. + +schema: + - schema.graphql +exec: + filename: internal/graphql/generated.go +model: + filename: internal/adapter/graphql/models_gen.go +resolver: + filename: internal/graphql/resolver.go + type: Resolver +models: + DateTime: + model: github.com/99designs/gqlgen/graphql.Time + FileSize: + model: github.com/99designs/gqlgen/graphql.Int64 + ID: + model: github.com/reearth/reearth-backend/internal/adapter/graphql.ID + Cursor: + model: github.com/reearth/reearth-backend/internal/adapter/graphql.Cursor + URL: + model: github.com/reearth/reearth-backend/internal/adapter/graphql.URL + PluginID: + model: github.com/reearth/reearth-backend/internal/adapter/graphql.PluginID + PluginExtensionID: + model: github.com/reearth/reearth-backend/internal/adapter/graphql.PluginExtensionID + PropertySchemaID: + model: github.com/reearth/reearth-backend/internal/adapter/graphql.PropertySchemaID + PropertySchemaFieldID: + model: github.com/reearth/reearth-backend/internal/adapter/graphql.PropertySchemaFieldID + TranslatedString: + model: github.com/reearth/reearth-backend/internal/adapter/graphql.Map diff --git a/internal/adapter/graphql/container.go b/internal/adapter/graphql/container.go new file mode 100644 index 000000000..2f04f1d86 --- /dev/null +++ b/internal/adapter/graphql/container.go @@ -0,0 +1,92 @@ +package graphql + +import ( + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interactor" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +type Container struct { + AssetController *AssetController + DatasetController *DatasetController + LayerController *LayerController + PluginController *PluginController + ProjectController *ProjectController + PropertyController *PropertyController + SceneController *SceneController + TeamController *TeamController + UserController *UserController +} + +type ContainerConfig struct { + SignupSecret string +} + +func NewContainer(r *repo.Container, g *gateway.Container, conf ContainerConfig) *Container { + return &Container{ + AssetController: NewAssetController( + AssetControlerConfig{ + AssetInput: func() interfaces.Asset { + return interactor.NewAsset(r, g) + }, + }, + ), + DatasetController: NewDatasetController( + DatasetControllerConfig{ + DatasetInput: func() interfaces.Dataset { + return interactor.NewDataset(r, g) + }, + }, + ), + LayerController: NewLayerController( + LayerControllerConfig{ + LayerInput: func() interfaces.Layer { + return interactor.NewLayer(r) + }, + }, + ), + PluginController: NewPluginController( + PluginControllerConfig{ + PluginInput: func() interfaces.Plugin { + return interactor.NewPlugin(r, g) + }, + }, + ), + ProjectController: NewProjectController( + ProjectControllerConfig{ + ProjectInput: func() interfaces.Project { + return interactor.NewProject(r, g) + }, + }, + ), + PropertyController: NewPropertyController( + PropertyControllerConfig{ + PropertyInput: func() interfaces.Property { + return interactor.NewProperty(r, g) + }, + }, + ), + SceneController: NewSceneController( + SceneControllerConfig{ + SceneInput: func() interfaces.Scene { + return interactor.NewScene(r) + }, + }, + ), + TeamController: NewTeamController( + TeamControllerConfig{ + TeamInput: func() interfaces.Team { + return interactor.NewTeam(r) + }, + }, + ), + UserController: NewUserController( + UserControllerConfig{ + UserInput: func() interfaces.User { + return interactor.NewUser(r, g, conf.SignupSecret) + }, + }, + ), + } +} diff --git a/internal/adapter/graphql/controller_asset.go b/internal/adapter/graphql/controller_asset.go new file mode 100644 index 000000000..50c90a552 --- /dev/null +++ b/internal/adapter/graphql/controller_asset.go @@ -0,0 +1,75 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type AssetControlerConfig struct { + AssetInput func() interfaces.Asset +} + +type AssetController struct { + config AssetControlerConfig +} + +func NewAssetController(config AssetControlerConfig) *AssetController { + return &AssetController{config: config} +} + +func (c *AssetController) usecase() interfaces.Asset { + if c == nil { + return nil + } + return c.config.AssetInput() +} + +func (c *AssetController) Create(ctx context.Context, i *CreateAssetInput, o *usecase.Operator) (*CreateAssetPayload, error) { + res, err := c.usecase().Create(ctx, interfaces.CreateAssetParam{ + TeamID: id.TeamID(i.TeamID), + File: fromFile(&i.File), + }, o) + if err != nil { + return nil, err + } + + return &CreateAssetPayload{Asset: toAsset(res)}, nil +} + +func (c *AssetController) Remove(ctx context.Context, i *RemoveAssetInput, o *usecase.Operator) (*RemoveAssetPayload, error) { + res, err2 := c.usecase().Remove(ctx, id.AssetID(i.AssetID), o) + if err2 != nil { + return nil, err2 + } + + return &RemoveAssetPayload{AssetID: res.ID()}, nil +} + +func (c *AssetController) FindByTeam(ctx context.Context, teamID id.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor, operator *usecase.Operator) (*AssetConnection, error) { + p := usecase.NewPagination(first, last, before, after) + assets, pi, err := c.usecase().FindByTeam(ctx, id.TeamID(teamID), p, operator) + if err != nil { + return nil, err + } + + edges := make([]*AssetEdge, 0, len(assets)) + nodes := make([]*Asset, 0, len(assets)) + for _, a := range assets { + asset := toAsset(a) + edges = append(edges, &AssetEdge{ + Node: asset, + Cursor: usecase.Cursor(asset.ID.String()), + }) + nodes = append(nodes, asset) + } + + return &AssetConnection{ + Edges: edges, + Nodes: nodes, + PageInfo: toPageInfo(pi), + TotalCount: pi.TotalCount(), + }, nil +} diff --git a/internal/adapter/graphql/controller_dataset.go b/internal/adapter/graphql/controller_dataset.go new file mode 100644 index 000000000..df729babe --- /dev/null +++ b/internal/adapter/graphql/controller_dataset.go @@ -0,0 +1,208 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type DatasetControllerConfig struct { + DatasetInput func() interfaces.Dataset +} + +type DatasetController struct { + config DatasetControllerConfig +} + +func NewDatasetController(config DatasetControllerConfig) *DatasetController { + return &DatasetController{config: config} +} + +func (c *DatasetController) usecase() interfaces.Dataset { + if c == nil { + return nil + } + return c.config.DatasetInput() +} + +func (c *DatasetController) UpdateDatasetSchema(ctx context.Context, i *UpdateDatasetSchemaInput, operator *usecase.Operator) (*UpdateDatasetSchemaPayload, error) { + res, err := c.usecase().UpdateDatasetSchema(ctx, interfaces.UpdateDatasetSchemaParam{ + SchemaId: id.DatasetSchemaID(i.SchemaID), + Name: i.Name, + }, operator) + if err != nil { + return nil, err + } + return &UpdateDatasetSchemaPayload{DatasetSchema: toDatasetSchema(res)}, nil +} + +func (c *DatasetController) AddDynamicDatasetSchema(ctx context.Context, i *AddDynamicDatasetSchemaInput) (*AddDynamicDatasetSchemaPayload, error) { + res, err := c.usecase().AddDynamicDatasetSchema(ctx, interfaces.AddDynamicDatasetSchemaParam{ + SceneId: id.SceneID(i.SceneID), + }) + if err != nil { + return nil, err + } + + return &AddDynamicDatasetSchemaPayload{DatasetSchema: toDatasetSchema(res)}, nil +} + +func (c *DatasetController) AddDynamicDataset(ctx context.Context, i *AddDynamicDatasetInput) (*AddDynamicDatasetPayload, error) { + dss, ds, err := c.usecase().AddDynamicDataset(ctx, interfaces.AddDynamicDatasetParam{ + SchemaId: id.DatasetSchemaID(i.DatasetSchemaID), + Author: i.Author, + Content: i.Content, + Lat: i.Lat, + Lng: i.Lng, + Target: i.Target, + }) + if err != nil { + return nil, err + } + + return &AddDynamicDatasetPayload{DatasetSchema: toDatasetSchema(dss), Dataset: toDataset(ds)}, nil +} + +func (c *DatasetController) ImportDataset(ctx context.Context, i *ImportDatasetInput, o *usecase.Operator) (*ImportDatasetPayload, error) { + res, err := c.usecase().ImportDataset(ctx, interfaces.ImportDatasetParam{ + SceneId: id.SceneID(i.SceneID), + SchemaId: id.DatasetSchemaIDFromRefID(i.DatasetSchemaID), + File: fromFile(&i.File), + }, o) + if err != nil { + return nil, err + } + + return &ImportDatasetPayload{DatasetSchema: toDatasetSchema(res)}, nil +} + +func (c *DatasetController) GraphFetchSchema(ctx context.Context, i id.ID, depth int, operator *usecase.Operator) ([]*DatasetSchema, []error) { + res, err := c.usecase().GraphFetchSchema(ctx, id.DatasetSchemaID(i), depth, operator) + if err != nil { + return nil, []error{err} + } + + schemas := make([]*DatasetSchema, 0, len(res)) + for _, d := range res { + schemas = append(schemas, toDatasetSchema(d)) + } + + return schemas, nil +} + +func (c *DatasetController) FindSchemaByScene(ctx context.Context, i id.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor, operator *usecase.Operator) (*DatasetSchemaConnection, error) { + res, pi, err := c.usecase().FindSchemaByScene(ctx, id.SceneID(i), usecase.NewPagination(first, last, before, after), operator) + if err != nil { + return nil, err + } + + edges := make([]*DatasetSchemaEdge, 0, len(res)) + nodes := make([]*DatasetSchema, 0, len(res)) + for _, dataset := range res { + ds := toDatasetSchema(dataset) + edges = append(edges, &DatasetSchemaEdge{ + Node: ds, + Cursor: usecase.Cursor(ds.ID.String()), + }) + nodes = append(nodes, ds) + } + + return &DatasetSchemaConnection{ + Edges: edges, + Nodes: nodes, + PageInfo: toPageInfo(pi), + TotalCount: pi.TotalCount(), + }, nil +} + +func (c *DatasetController) FindDynamicSchemasByScene(ctx context.Context, sid id.ID) ([]*DatasetSchema, error) { + res, err := c.usecase().FindDynamicSchemaByScene(ctx, id.SceneID(sid)) + if err != nil { + return nil, err + } + + dss := []*DatasetSchema{} + for _, dataset := range res { + dss = append(dss, toDatasetSchema(dataset)) + } + + return dss, nil +} + +func (c *DatasetController) FindBySchema(ctx context.Context, dsid id.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor, operator *usecase.Operator) (*DatasetConnection, error) { + p := usecase.NewPagination(first, last, before, after) + res, pi, err2 := c.usecase().FindBySchema(ctx, id.DatasetSchemaID(dsid), p, operator) + if err2 != nil { + return nil, err2 + } + + edges := make([]*DatasetEdge, 0, len(res)) + nodes := make([]*Dataset, 0, len(res)) + for _, dataset := range res { + ds := toDataset(dataset) + edges = append(edges, &DatasetEdge{ + Node: ds, + Cursor: usecase.Cursor(ds.ID.String()), + }) + nodes = append(nodes, ds) + } + + conn := &DatasetConnection{ + Edges: edges, + Nodes: nodes, + PageInfo: toPageInfo(pi), + TotalCount: pi.TotalCount(), + } + + return conn, nil +} + +func (c *DatasetController) Sync(ctx context.Context, input *SyncDatasetInput, operator *usecase.Operator) (*SyncDatasetPayload, error) { + dss, ds, err := c.usecase().Sync(ctx, id.SceneID(input.SceneID), input.URL, operator) + if err != nil { + return nil, err + } + + schemas := make([]*DatasetSchema, 0, len(dss)) + datasets := make([]*Dataset, 0, len(ds)) + for _, d := range dss { + schemas = append(schemas, toDatasetSchema(d)) + } + for _, d := range ds { + datasets = append(datasets, toDataset(d)) + } + + return &SyncDatasetPayload{ + SceneID: input.SceneID, + URL: input.URL, + DatasetSchema: schemas, + Dataset: datasets, + }, nil +} + +func (c *DatasetController) RemoveDatasetSchema(ctx context.Context, i *RemoveDatasetSchemaInput, o *usecase.Operator) (*RemoveDatasetSchemaPayload, error) { + res, err := c.usecase().RemoveDatasetSchema(ctx, interfaces.RemoveDatasetSchemaParam{ + SchemaId: id.DatasetSchemaID(i.SchemaID), + Force: i.Force, + }, o) + if err != nil { + return nil, err + } + + return &RemoveDatasetSchemaPayload{SchemaID: res.ID()}, nil +} + +func (c *DatasetController) AddDatasetSchema(ctx context.Context, i *AddDatasetSchemaInput, o *usecase.Operator) (*AddDatasetSchemaPayload, error) { + res, err2 := c.usecase().AddDatasetSchema(ctx, interfaces.AddDatasetSchemaParam{ + SceneId: id.SceneID(i.SceneID), + Name: i.Name, + RepresentativeField: id.DatasetSchemaFieldIDFromRefID(i.Representativefield), + }, o) + if err2 != nil { + return nil, err2 + } + + return &AddDatasetSchemaPayload{DatasetSchema: toDatasetSchema(res)}, nil +} diff --git a/internal/adapter/graphql/controller_layer.go b/internal/adapter/graphql/controller_layer.go new file mode 100644 index 000000000..4bcfc2321 --- /dev/null +++ b/internal/adapter/graphql/controller_layer.go @@ -0,0 +1,201 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type LayerControllerConfig struct { + LayerInput func() interfaces.Layer +} + +type LayerController struct { + config LayerControllerConfig +} + +func NewLayerController(config LayerControllerConfig) *LayerController { + return &LayerController{config: config} +} + +func (c *LayerController) usecase() interfaces.Layer { + if c == nil { + return nil + } + return c.config.LayerInput() +} + +func (c *LayerController) AddItem(ctx context.Context, ginput *AddLayerItemInput, operator *usecase.Operator) (*AddLayerItemPayload, error) { + layer, parent, err := c.usecase().AddItem(ctx, interfaces.AddLayerItemInput{ + ParentLayerID: id.LayerID(ginput.ParentLayerID), + PluginID: &ginput.PluginID, + ExtensionID: &ginput.ExtensionID, + Index: ginput.Index, + Name: refToString(ginput.Name), + LatLng: toPropertyLatLng(ginput.Lat, ginput.Lng), + // LinkedDatasetID: ginput.LinkedDatasetID, + }, operator) + if err != nil { + return nil, err + } + + return &AddLayerItemPayload{ + Layer: toLayerItem(layer, parent.IDRef()), + ParentLayer: toLayerGroup(parent, nil), + Index: ginput.Index, + }, nil +} + +func (c *LayerController) AddGroup(ctx context.Context, ginput *AddLayerGroupInput, operator *usecase.Operator) (*AddLayerGroupPayload, error) { + layer, parent, err := c.usecase().AddGroup(ctx, interfaces.AddLayerGroupInput{ + ParentLayerID: id.LayerID(ginput.ParentLayerID), + PluginID: ginput.PluginID, + ExtensionID: ginput.ExtensionID, + Index: ginput.Index, + Name: refToString(ginput.Name), + LinkedDatasetSchemaID: id.DatasetSchemaIDFromRefID(ginput.LinkedDatasetSchemaID), + }, operator) + if err != nil { + return nil, err + } + + return &AddLayerGroupPayload{ + Layer: toLayerGroup(layer, parent.IDRef()), + ParentLayer: toLayerGroup(parent, nil), + Index: ginput.Index, + }, nil +} + +func (c *LayerController) Remove(ctx context.Context, ginput *RemoveLayerInput, operator *usecase.Operator) (*RemoveLayerPayload, error) { + id, layer, err := c.usecase().Remove(ctx, id.LayerID(ginput.LayerID), operator) + if err != nil { + return nil, err + } + + return &RemoveLayerPayload{ + LayerID: id.ID(), + ParentLayer: toLayerGroup(layer, nil), + }, nil +} + +func (c *LayerController) Update(ctx context.Context, ginput *UpdateLayerInput, operator *usecase.Operator) (*UpdateLayerPayload, error) { + layer, err := c.usecase().Update(ctx, interfaces.UpdateLayerInput{ + LayerID: id.LayerID(ginput.LayerID), + Name: ginput.Name, + Visible: ginput.Visible, + }, operator) + if err != nil { + return nil, err + } + + return &UpdateLayerPayload{ + Layer: toLayer(layer, nil), + }, nil +} + +func (c *LayerController) Move(ctx context.Context, ginput *MoveLayerInput, operator *usecase.Operator) (*MoveLayerPayload, error) { + targetLayerID, layerGroupFrom, layerGroupTo, index, err := c.usecase().Move(ctx, interfaces.MoveLayerInput{ + LayerID: id.LayerID(ginput.LayerID), + DestLayerID: id.LayerIDFromRefID(ginput.DestLayerID), + Index: refToIndex(ginput.Index), + }, operator) + if err != nil { + return nil, err + } + + return &MoveLayerPayload{ + LayerID: targetLayerID.ID(), + FromParentLayer: toLayerGroup(layerGroupFrom, nil), + ToParentLayer: toLayerGroup(layerGroupTo, nil), + Index: index, + }, nil +} + +func (c *LayerController) CreateInfobox(ctx context.Context, ginput *CreateInfoboxInput, operator *usecase.Operator) (*CreateInfoboxPayload, error) { + layer, err := c.usecase().CreateInfobox(ctx, id.LayerID(ginput.LayerID), operator) + if err != nil { + return nil, err + } + + return &CreateInfoboxPayload{ + Layer: toLayer(layer, nil), + }, nil +} + +func (c *LayerController) RemoveInfobox(ctx context.Context, ginput *RemoveInfoboxInput, operator *usecase.Operator) (*RemoveInfoboxPayload, error) { + layer, err := c.usecase().RemoveInfobox(ctx, id.LayerID(ginput.LayerID), operator) + if err != nil { + return nil, err + } + + return &RemoveInfoboxPayload{ + Layer: toLayer(layer, nil), + }, nil +} + +func (c *LayerController) AddInfoboxField(ctx context.Context, ginput *AddInfoboxFieldInput, operator *usecase.Operator) (*AddInfoboxFieldPayload, error) { + infoboxField, layer, err := c.usecase().AddInfoboxField(ctx, interfaces.AddInfoboxFieldParam{ + LayerID: id.LayerID(ginput.LayerID), + PluginID: ginput.PluginID, + ExtensionID: ginput.ExtensionID, + Index: ginput.Index, + }, operator) + if err != nil { + return nil, err + } + + return &AddInfoboxFieldPayload{ + InfoboxField: toInfoboxField(infoboxField, nil), + Layer: toLayer(layer, nil), + }, nil +} + +func (c *LayerController) MoveInfoboxField(ctx context.Context, ginput *MoveInfoboxFieldInput, operator *usecase.Operator) (*MoveInfoboxFieldPayload, error) { + infoboxField, layer, index, err := c.usecase().MoveInfoboxField(ctx, interfaces.MoveInfoboxFieldParam{ + LayerID: id.LayerID(ginput.LayerID), + InfoboxFieldID: id.InfoboxFieldID(ginput.InfoboxFieldID), + Index: ginput.Index, + }, operator) + if err != nil { + return nil, err + } + + return &MoveInfoboxFieldPayload{ + InfoboxFieldID: infoboxField.ID(), + Layer: toLayer(layer, nil), + Index: index, + }, nil +} + +func (c *LayerController) RemoveInfoboxField(ctx context.Context, ginput *RemoveInfoboxFieldInput, operator *usecase.Operator) (*RemoveInfoboxFieldPayload, error) { + infoboxField, layer, err := c.usecase().RemoveInfoboxField(ctx, interfaces.RemoveInfoboxFieldParam{ + LayerID: id.LayerID(ginput.LayerID), + InfoboxFieldID: id.InfoboxFieldID(ginput.InfoboxFieldID), + }, operator) + if err != nil { + return nil, err + } + + return &RemoveInfoboxFieldPayload{ + InfoboxFieldID: infoboxField.ID(), + Layer: toLayer(layer, nil), + }, nil +} + +func (c *LayerController) ImportLayer(ctx context.Context, ginput *ImportLayerInput, operator *usecase.Operator) (*ImportLayerPayload, error) { + l, l2, err := c.usecase().ImportLayer(ctx, interfaces.ImportLayerParam{ + LayerID: id.LayerID(ginput.LayerID), + File: fromFile(&ginput.File), + Format: fromLayerEncodingFormat(ginput.Format), + }, operator) + if err != nil { + return nil, err + } + + return &ImportLayerPayload{ + Layers: toLayers(l, l2.IDRef()), + ParentLayer: toLayerGroup(l2, nil), + }, err +} diff --git a/internal/adapter/graphql/controller_plugin.go b/internal/adapter/graphql/controller_plugin.go new file mode 100644 index 000000000..0578d8db2 --- /dev/null +++ b/internal/adapter/graphql/controller_plugin.go @@ -0,0 +1,38 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" +) + +type PluginControllerConfig struct { + PluginInput func() interfaces.Plugin +} + +type PluginController struct { + config PluginControllerConfig +} + +func NewPluginController(config PluginControllerConfig) *PluginController { + return &PluginController{config: config} +} + +func (c *PluginController) usecase() interfaces.Plugin { + if c == nil { + return nil + } + return c.config.PluginInput() +} + +func (c *PluginController) Upload(ctx context.Context, ginput *UploadPluginInput, operator *usecase.Operator) (*UploadPluginPayload, error) { + res, err := c.usecase().Upload(ctx, ginput.File.File, operator) + if err != nil { + return nil, err + } + + return &UploadPluginPayload{ + Plugin: toPlugin(res), + }, nil +} diff --git a/internal/adapter/graphql/controller_project.go b/internal/adapter/graphql/controller_project.go new file mode 100644 index 000000000..5b9711caa --- /dev/null +++ b/internal/adapter/graphql/controller_project.go @@ -0,0 +1,109 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +type ProjectControllerConfig struct { + ProjectInput func() interfaces.Project +} + +type ProjectController struct { + config ProjectControllerConfig +} + +func NewProjectController(config ProjectControllerConfig) *ProjectController { + return &ProjectController{config: config} +} + +func (c *ProjectController) usecase() interfaces.Project { + if c == nil { + return nil + } + return c.config.ProjectInput() +} + +func (c *ProjectController) Create(ctx context.Context, i *CreateProjectInput, operator *usecase.Operator) (*ProjectPayload, error) { + res, err := c.usecase().Create(ctx, interfaces.CreateProjectParam{ + TeamID: id.TeamID(i.TeamID), + Visualizer: visualizer.Visualizer(i.Visualizer), + Name: i.Name, + Description: i.Description, + ImageURL: i.ImageURL, + Alias: i.Alias, + Archived: i.Archived, + }, operator) + if err != nil { + return nil, err + } + + return &ProjectPayload{Project: toProject(res)}, nil +} + +func (c *ProjectController) Update(ctx context.Context, ginput *UpdateProjectInput, operator *usecase.Operator) (*ProjectPayload, error) { + deletePublicImage := false + if ginput.DeletePublicImage != nil { + deletePublicImage = *ginput.DeletePublicImage + } + + deleteImageURL := false + if ginput.DeleteImageURL != nil { + deleteImageURL = *ginput.DeleteImageURL + } + + res, err := c.usecase().Update(ctx, interfaces.UpdateProjectParam{ + ID: id.ProjectID(ginput.ProjectID), + Name: ginput.Name, + Description: ginput.Description, + Alias: ginput.Alias, + ImageURL: ginput.ImageURL, + Archived: ginput.Archived, + PublicTitle: ginput.PublicTitle, + PublicDescription: ginput.PublicDescription, + PublicImage: fromFile(ginput.PublicImage), + PublicNoIndex: ginput.PublicNoIndex, + DeletePublicImage: deletePublicImage, + DeleteImageURL: deleteImageURL, + }, operator) + if err != nil { + return nil, err + } + + return &ProjectPayload{Project: toProject(res)}, nil +} + +func (c *ProjectController) CheckAlias(ctx context.Context, alias string) (*CheckProjectAliasPayload, error) { + ok, err := c.usecase().CheckAlias(ctx, alias) + if err != nil { + return nil, err + } + + return &CheckProjectAliasPayload{Alias: alias, Available: ok}, nil +} + +func (c *ProjectController) Publish(ctx context.Context, ginput *PublishProjectInput, operator *usecase.Operator) (*ProjectPayload, error) { + res, err := c.usecase().Publish(ctx, interfaces.PublishProjectParam{ + ID: id.ProjectID(ginput.ProjectID), + Alias: ginput.Alias, + Status: fromPublishmentStatus(ginput.Status), + }, operator) + if err != nil { + return nil, err + } + + return &ProjectPayload{Project: toProject(res)}, nil +} + +func (c *ProjectController) Delete(ctx context.Context, ginput *DeleteProjectInput, operator *usecase.Operator) (*DeleteProjectPayload, error) { + err := c.usecase().Delete(ctx, id.ProjectID(ginput.ProjectID), operator) + if err != nil { + return nil, err + } + + return &DeleteProjectPayload{ProjectID: ginput.ProjectID}, nil +} diff --git a/internal/adapter/graphql/controller_property.go b/internal/adapter/graphql/controller_property.go new file mode 100644 index 000000000..ccfe2a2fe --- /dev/null +++ b/internal/adapter/graphql/controller_property.go @@ -0,0 +1,200 @@ +package graphql + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type PropertyControllerConfig struct { + PropertyInput func() interfaces.Property +} + +type PropertyController struct { + config PropertyControllerConfig +} + +func NewPropertyController(config PropertyControllerConfig) *PropertyController { + return &PropertyController{config: config} +} + +func (c *PropertyController) usecase() interfaces.Property { + if c == nil { + return nil + } + return c.config.PropertyInput() +} + +func (c *PropertyController) UpdateValue(ctx context.Context, p id.ID, si *id.PropertySchemaFieldID, ii *id.ID, f id.PropertySchemaFieldID, val interface{}, t ValueType, operator *usecase.Operator) (*PropertyFieldPayload, error) { + v, ok := fromPropertyValueAndType(val, t) + if !ok { + return nil, errors.New("invalid value") + } + + pp, pgl, pg, pf, err := c.usecase().UpdateValue(ctx, interfaces.UpdatePropertyValueParam{ + PropertyID: id.PropertyID(p), + Pointer: fromPointer(si, ii, &f), + Value: v, + }, operator) + if err != nil { + return nil, err + } + + return &PropertyFieldPayload{ + Property: toProperty(pp), + PropertyField: toPropertyField(pf, pp, pgl, pg), + }, nil +} + +func (c *PropertyController) RemoveField(ctx context.Context, ginput *RemovePropertyFieldInput, operator *usecase.Operator) (*PropertyFieldPayload, error) { + p, err := c.usecase().RemoveField(ctx, interfaces.RemovePropertyFieldParam{ + PropertyID: id.PropertyID(ginput.PropertyID), + Pointer: fromPointer(ginput.SchemaItemID, ginput.ItemID, &ginput.FieldID), + }, operator) + if err != nil { + return nil, err + } + + return &PropertyFieldPayload{ + Property: toProperty(p), + }, nil +} + +func (c *PropertyController) UploadFile(ctx context.Context, ginput *UploadFileToPropertyInput, operator *usecase.Operator) (*PropertyFieldPayload, error) { + p, pgl, pg, pf, err := c.usecase().UploadFile(ctx, interfaces.UploadFileParam{ + PropertyID: id.PropertyID(ginput.PropertyID), + Pointer: fromPointer(ginput.SchemaItemID, ginput.ItemID, &ginput.FieldID), + File: fromFile(&ginput.File), + }, operator) + if err != nil { + return nil, err + } + + return &PropertyFieldPayload{ + Property: toProperty(p), + PropertyField: toPropertyField(pf, p, pgl, pg), + }, nil +} + +func (c *PropertyController) LinkValue(ctx context.Context, ginput *LinkDatasetToPropertyValueInput, operator *usecase.Operator) (*PropertyFieldPayload, error) { + p, pgl, pg, pf, err := c.usecase().LinkValue(ctx, interfaces.LinkPropertyValueParam{ + PropertyID: id.PropertyID(ginput.PropertyID), + Pointer: fromPointer(ginput.SchemaItemID, ginput.ItemID, &ginput.FieldID), + Links: fromPropertyFieldLink( + ginput.DatasetSchemaIds, + ginput.DatasetIds, + ginput.DatasetSchemaFieldIds, + ), + }, operator) + if err != nil { + return nil, err + } + + return &PropertyFieldPayload{ + Property: toProperty(p), + PropertyField: toPropertyField(pf, p, pgl, pg), + }, nil +} + +func (c *PropertyController) UnlinkValue(ctx context.Context, ginput *UnlinkPropertyValueInput, operator *usecase.Operator) (*PropertyFieldPayload, error) { + p, pgl, pg, pf, err := c.usecase().UnlinkValue(ctx, interfaces.UnlinkPropertyValueParam{ + PropertyID: id.PropertyID(ginput.PropertyID), + Pointer: fromPointer(ginput.SchemaItemID, ginput.ItemID, &ginput.FieldID), + }, operator) + if err != nil { + return nil, err + } + + return &PropertyFieldPayload{ + Property: toProperty(p), + PropertyField: toPropertyField(pf, p, pgl, pg), + }, nil +} + +func (c *PropertyController) AddItem(ctx context.Context, ginput *AddPropertyItemInput, operator *usecase.Operator) (*PropertyItemPayload, error) { + var v *property.Value + if ginput.NameFieldType != nil { + v, _ = fromPropertyValueAndType(ginput.NameFieldValue, *ginput.NameFieldType) + } + + p, pgl, pi, err := c.usecase().AddItem(ctx, interfaces.AddPropertyItemParam{ + PropertyID: id.PropertyID(ginput.PropertyID), + Pointer: fromPointer(&ginput.SchemaItemID, nil, nil), + Index: ginput.Index, + NameFieldValue: v, + }, operator) + + if err != nil { + return nil, err + } + + return &PropertyItemPayload{ + Property: toProperty(p), + PropertyItem: toPropertyItem(pi, p, pgl), + }, nil +} + +func (c *PropertyController) MoveItem(ctx context.Context, ginput *MovePropertyItemInput, operator *usecase.Operator) (*PropertyItemPayload, error) { + p, pgl, pi, err := c.usecase().MoveItem(ctx, interfaces.MovePropertyItemParam{ + PropertyID: id.PropertyID(ginput.PropertyID), + Pointer: fromPointer(&ginput.SchemaItemID, &ginput.ItemID, nil), + Index: ginput.Index, + }, operator) + if err != nil { + return nil, err + } + + return &PropertyItemPayload{ + Property: toProperty(p), + PropertyItem: toPropertyItem(pi, p, pgl), + }, nil +} + +func (c *PropertyController) RemoveItem(ctx context.Context, ginput *RemovePropertyItemInput, operator *usecase.Operator) (*PropertyItemPayload, error) { + p, err := c.usecase().RemoveItem(ctx, interfaces.RemovePropertyItemParam{ + PropertyID: id.PropertyID(ginput.PropertyID), + Pointer: fromPointer(&ginput.SchemaItemID, &ginput.ItemID, nil), + }, operator) + if err != nil { + return nil, err + } + + return &PropertyItemPayload{ + Property: toProperty(p), + }, nil +} + +func (c *PropertyController) UpdateItems(ctx context.Context, ginput *UpdatePropertyItemInput, operator *usecase.Operator) (*PropertyItemPayload, error) { + op := make([]interfaces.UpdatePropertyItemsOperationParam, 0, len(ginput.Operations)) + for _, o := range ginput.Operations { + var v *property.Value + if o.NameFieldType != nil { + v, _ = fromPropertyValueAndType(o.NameFieldValue, *o.NameFieldType) + } + + op = append(op, interfaces.UpdatePropertyItemsOperationParam{ + Operation: fromListOperation(o.Operation), + ItemID: id.PropertyItemIDFromRefID(o.ItemID), + Index: o.Index, + NameFieldValue: v, + }) + } + + p, err2 := c.usecase().UpdateItems(ctx, interfaces.UpdatePropertyItemsParam{ + PropertyID: id.PropertyID(ginput.PropertyID), + Pointer: fromPointer(&ginput.SchemaItemID, nil, nil), + Operations: op, + }, operator) + + if err2 != nil { + return nil, err2 + } + + return &PropertyItemPayload{ + Property: toProperty(p), + }, nil +} diff --git a/internal/adapter/graphql/controller_scene.go b/internal/adapter/graphql/controller_scene.go new file mode 100644 index 000000000..695b7727c --- /dev/null +++ b/internal/adapter/graphql/controller_scene.go @@ -0,0 +1,130 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type SceneControllerConfig struct { + SceneInput func() interfaces.Scene +} + +type SceneController struct { + config SceneControllerConfig +} + +func NewSceneController(config SceneControllerConfig) *SceneController { + return &SceneController{config: config} +} + +func (c *SceneController) usecase() interfaces.Scene { + if c == nil { + return nil + } + return c.config.SceneInput() +} + +func (c *SceneController) Create(ctx context.Context, ginput *CreateSceneInput, operator *usecase.Operator) (*CreateScenePayload, error) { + res, err := c.usecase().Create( + ctx, + id.ProjectID(ginput.ProjectID), + operator, + ) + if err != nil { + return nil, err + } + + return &CreateScenePayload{Scene: toScene(res)}, nil +} + +func (c *SceneController) AddWidget(ctx context.Context, ginput *AddWidgetInput, operator *usecase.Operator) (*AddWidgetPayload, error) { + scene, widget, err := c.usecase().AddWidget( + ctx, + id.SceneID(ginput.SceneID), + ginput.PluginID, + id.PluginExtensionID(ginput.ExtensionID), + operator, + ) + if err != nil { + return nil, err + } + + return &AddWidgetPayload{Scene: toScene(scene), SceneWidget: toSceneWidget(widget)}, nil +} + +func (c *SceneController) UpdateWidget(ctx context.Context, ginput *UpdateWidgetInput, operator *usecase.Operator) (*UpdateWidgetPayload, error) { + scene, widget, err := c.usecase().UpdateWidget(ctx, interfaces.UpdateWidgetParam{ + SceneID: id.SceneID(ginput.SceneID), + PluginID: ginput.PluginID, + ExtensionID: id.PluginExtensionID(ginput.ExtensionID), + Enabled: ginput.Enabled, + }, operator) + if err != nil { + return nil, err + } + + return &UpdateWidgetPayload{Scene: toScene(scene), SceneWidget: toSceneWidget(widget)}, nil +} + +func (c *SceneController) RemoveWidget(ctx context.Context, ginput *RemoveWidgetInput, operator *usecase.Operator) (*RemoveWidgetPayload, error) { + scene, err := c.usecase().RemoveWidget(ctx, + id.SceneID(ginput.SceneID), + id.PluginID(ginput.PluginID), + id.PluginExtensionID(ginput.ExtensionID), + operator, + ) + if err != nil { + return nil, err + } + + return &RemoveWidgetPayload{Scene: toScene(scene), PluginID: ginput.PluginID, ExtensionID: ginput.ExtensionID}, nil +} + +func (c *SceneController) InstallPlugin(ctx context.Context, ginput *InstallPluginInput, operator *usecase.Operator) (*InstallPluginPayload, error) { + scene, pl, pr, err := c.usecase().InstallPlugin(ctx, + id.SceneID(ginput.SceneID), + ginput.PluginID, + operator, + ) + if err != nil { + return nil, err + } + + return &InstallPluginPayload{Scene: toScene(scene), ScenePlugin: &ScenePlugin{ + PluginID: pl, + PropertyID: pr.IDRef(), + }}, nil +} + +func (c *SceneController) UninstallPlugin(ctx context.Context, ginput *UninstallPluginInput, operator *usecase.Operator) (*UninstallPluginPayload, error) { + scene, err := c.usecase().UninstallPlugin(ctx, + id.SceneID(ginput.SceneID), + id.PluginID(ginput.PluginID), + operator, + ) + if err != nil { + return nil, err + } + + return &UninstallPluginPayload{Scene: toScene(scene)}, nil +} + +func (c *SceneController) UpgradePlugin(ctx context.Context, ginput *UpgradePluginInput, operator *usecase.Operator) (*UpgradePluginPayload, error) { + s, err := c.usecase().UpgradePlugin(ctx, + id.SceneID(ginput.SceneID), + ginput.PluginID, + ginput.ToPluginID, + operator, + ) + if err != nil { + return nil, err + } + + return &UpgradePluginPayload{ + Scene: toScene(s), + ScenePlugin: toScenePlugin(s.PluginSystem().Plugin(ginput.ToPluginID)), + }, nil +} diff --git a/internal/adapter/graphql/controller_team.go b/internal/adapter/graphql/controller_team.go new file mode 100644 index 000000000..f511052a5 --- /dev/null +++ b/internal/adapter/graphql/controller_team.go @@ -0,0 +1,82 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +type TeamControllerConfig struct { + TeamInput func() interfaces.Team +} + +type TeamController struct { + config TeamControllerConfig +} + +func NewTeamController(config TeamControllerConfig) *TeamController { + return &TeamController{config: config} +} + +func (c *TeamController) usecase() interfaces.Team { + if c == nil { + return nil + } + return c.config.TeamInput() +} + +func (c *TeamController) Create(ctx context.Context, i *CreateTeamInput, u *user.User) (*CreateTeamPayload, error) { + res, err := c.usecase().Create(ctx, i.Name, u.ID()) + if err != nil { + return nil, err + } + + return &CreateTeamPayload{Team: toTeam(res)}, nil +} + +func (c *TeamController) Update(ctx context.Context, i *UpdateTeamInput, o *usecase.Operator) (*UpdateTeamPayload, error) { + res, err := c.usecase().Update(ctx, id.TeamID(i.TeamID), i.Name, o) + if err != nil { + return nil, err + } + + return &UpdateTeamPayload{Team: toTeam(res)}, nil +} + +func (c *TeamController) AddMember(ctx context.Context, i *AddMemberToTeamInput, o *usecase.Operator) (*AddMemberToTeamPayload, error) { + res, err := c.usecase().AddMember(ctx, id.TeamID(i.TeamID), id.UserID(i.UserID), fromRole(i.Role), o) + if err != nil { + return nil, err + } + + return &AddMemberToTeamPayload{Team: toTeam(res)}, nil +} + +func (c *TeamController) RemoveMember(ctx context.Context, i *RemoveMemberFromTeamInput, o *usecase.Operator) (*RemoveMemberFromTeamPayload, error) { + res, err := c.usecase().RemoveMember(ctx, id.TeamID(i.TeamID), id.UserID(i.UserID), o) + if err != nil { + return nil, err + } + + return &RemoveMemberFromTeamPayload{Team: toTeam(res)}, nil +} + +func (c *TeamController) UpdateMember(ctx context.Context, i *UpdateMemberOfTeamInput, o *usecase.Operator) (*UpdateMemberOfTeamPayload, error) { + res, err := c.usecase().UpdateMember(ctx, id.TeamID(i.TeamID), id.UserID(i.UserID), fromRole(i.Role), o) + if err != nil { + return nil, err + } + + return &UpdateMemberOfTeamPayload{Team: toTeam(res)}, nil +} + +func (c *TeamController) Remove(ctx context.Context, team id.ID, o *usecase.Operator) (*DeleteTeamPayload, error) { + if err := c.usecase().Remove(ctx, id.TeamID(team), o); err != nil { + return nil, err + } + + return &DeleteTeamPayload{TeamID: team}, nil +} diff --git a/internal/adapter/graphql/controller_user.go b/internal/adapter/graphql/controller_user.go new file mode 100644 index 000000000..bf8db6dbc --- /dev/null +++ b/internal/adapter/graphql/controller_user.go @@ -0,0 +1,96 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type UserControllerConfig struct { + UserInput func() interfaces.User +} + +type UserController struct { + config UserControllerConfig +} + +func NewUserController(config UserControllerConfig) *UserController { + return &UserController{config: config} +} + +func (c *UserController) usecase() interfaces.User { + if c == nil { + return nil + } + return c.config.UserInput() +} + +func (c *UserController) Fetch(ctx context.Context, ids []id.UserID, operator *usecase.Operator) ([]*User, []error) { + res, err := c.usecase().Fetch(ctx, ids, operator) + if err != nil { + return nil, []error{err} + } + + users := make([]*User, 0, len(res)) + for _, u := range res { + users = append(users, ToUser(u)) + } + + return users, nil +} + +func (c *UserController) Signup(ctx context.Context, ginput *SignupInput, sub string) (*SignupPayload, error) { + u, team, err := c.usecase().Signup(ctx, interfaces.SignupParam{ + Sub: sub, + UserID: id.UserIDFromRefID(ginput.UserID), + TeamID: id.TeamIDFromRefID(ginput.TeamID), + }) + if err != nil { + return nil, err + } + return &SignupPayload{User: ToUser(u), Team: toTeam(team)}, nil +} + +func (c *UserController) UpdateMe(ctx context.Context, ginput *UpdateMeInput, operator *usecase.Operator) (*UpdateMePayload, error) { + res, err := c.usecase().UpdateMe(ctx, interfaces.UpdateMeParam{ + Name: ginput.Name, + Email: ginput.Email, + Lang: ginput.Lang, + Password: ginput.Password, + PasswordConfirmation: ginput.PasswordConfirmation, + Theme: toTheme(ginput.Theme), + }, operator) + if err != nil { + return nil, err + } + + return &UpdateMePayload{User: ToUser(res)}, nil +} + +func (c *UserController) RemoveMyAuth(ctx context.Context, ginput *RemoveMyAuthInput, operator *usecase.Operator) (*UpdateMePayload, error) { + res, err := c.usecase().RemoveMyAuth(ctx, ginput.Auth, operator) + if err != nil { + return nil, err + } + + return &UpdateMePayload{User: ToUser(res)}, nil +} + +func (c *UserController) SearchUser(ctx context.Context, nameOrEmail string, operator *usecase.Operator) (*SearchedUser, error) { + res, err := c.usecase().SearchUser(ctx, nameOrEmail, operator) + if err != nil { + return nil, err + } + + return toSearchedUser(res), nil +} + +func (c *UserController) DeleteMe(ctx context.Context, user id.ID, operator *usecase.Operator) (*DeleteMePayload, error) { + if err := c.usecase().DeleteMe(ctx, id.UserID(user), operator); err != nil { + return nil, err + } + + return &DeleteMePayload{UserID: operator.User.ID()}, nil +} diff --git a/internal/adapter/graphql/convert.go b/internal/adapter/graphql/convert.go new file mode 100644 index 000000000..f147bba78 --- /dev/null +++ b/internal/adapter/graphql/convert.go @@ -0,0 +1,87 @@ +package graphql + +import ( + "github.com/99designs/gqlgen/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +// func refToBool(s *bool) bool { +// if s == nil { +// return false +// } +// return *s +// } + +// func refToInt(s *int) int { +// if s == nil { +// return 0 +// } +// return *s +// } + +func refToIndex(s *int) int { + if s == nil { + return -1 + } + return *s +} + +func refToString(s *string) string { + if s == nil { + return "" + } + return *s +} + +func stringToRef(s string) *string { + if s == "" { + return nil + } + return &s +} + +func toPageInfo(p *usecase.PageInfo) *PageInfo { + if p == nil { + return &PageInfo{} + } + return &PageInfo{ + StartCursor: p.StartCursor(), + EndCursor: p.EndCursor(), + HasNextPage: p.HasNextPage(), + HasPreviousPage: p.HasPreviousPage(), + } +} + +func toVisualizer(v visualizer.Visualizer) Visualizer { + switch v { + case visualizer.VisualizerCesium: + return VisualizerCesium + } + return Visualizer("") +} + +func fromFile(f *graphql.Upload) *file.File { + if f == nil { + return nil + } + return &file.File{ + Content: f.File, + Name: f.Filename, + Size: f.Size, + ContentType: f.ContentType, + } +} + +func fromListOperation(op ListOperation) interfaces.ListOperation { + if op == ListOperationAdd { + return interfaces.ListOperationAdd + } else if op == ListOperationMove { + return interfaces.ListOperationMove + } else if op == ListOperationRemove { + return interfaces.ListOperationRemove + } + return interfaces.ListOperation("") +} diff --git a/internal/adapter/graphql/convert_asset.go b/internal/adapter/graphql/convert_asset.go new file mode 100644 index 000000000..b2ec93df9 --- /dev/null +++ b/internal/adapter/graphql/convert_asset.go @@ -0,0 +1,20 @@ +package graphql + +import ( + "github.com/reearth/reearth-backend/pkg/asset" +) + +func toAsset(a *asset.Asset) *Asset { + if a == nil { + return nil + } + return &Asset{ + ID: a.ID().ID(), + CreatedAt: a.CreatedAt(), + TeamID: a.Team().ID(), + Name: a.Name(), + Size: a.Size(), + URL: a.URL(), + ContentType: a.ContentType(), + } +} diff --git a/internal/adapter/graphql/convert_dataset.go b/internal/adapter/graphql/convert_dataset.go new file mode 100644 index 000000000..b2cbc2edf --- /dev/null +++ b/internal/adapter/graphql/convert_dataset.go @@ -0,0 +1,124 @@ +package graphql + +import ( + "net/url" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +func toDatasetValue(v *dataset.Value) *interface{} { + var res interface{} + if v == nil { + return nil + } + switch v2 := v.Value().(type) { + case bool: + res = v2 + case float64: + res = v2 + case string: + res = v2 + case id.ID: + res = v2.String() + case *url.URL: + res = v2.String() + case dataset.LatLng: + res = LatLng{ + Lat: v2.Lat, + Lng: v2.Lng, + } + case dataset.LatLngHeight: + res = LatLngHeight{ + Lat: v2.Lat, + Lng: v2.Lng, + Height: v2.Height, + } + } + return &res +} + +func toDatasetValueType(t dataset.ValueType) ValueType { + switch t { + case dataset.ValueTypeBool: + return ValueTypeBool + case dataset.ValueTypeNumber: + return ValueTypeNumber + case dataset.ValueTypeString: + return ValueTypeString + case dataset.ValueTypeLatLng: + return ValueTypeLatlng + case dataset.ValueTypeLatLngHeight: + return ValueTypeLatlngheight + case dataset.ValueTypeURL: + return ValueTypeURL + case dataset.ValueTypeRef: + return ValueTypeRef + } + return "" +} + +func toDatasetSource(ds dataset.Source) string { + return ds.String() +} + +func toDatasetField(f *dataset.Field, parent *dataset.Dataset) *DatasetField { + if f == nil || parent == nil { + return nil + } + + return &DatasetField{ + SchemaID: parent.Schema().ID(), + FieldID: f.Field().ID(), + Type: toDatasetValueType(f.Type()), + Value: toDatasetValue(f.Value()), + Source: toDatasetSource(f.Source()), + } +} + +func toDataset(ds *dataset.Dataset) *Dataset { + if ds == nil { + return nil + } + + dsFields := ds.Fields() + fields := make([]*DatasetField, 0, len(dsFields)) + for _, f := range dsFields { + fields = append(fields, toDatasetField(f, ds)) + } + + return &Dataset{ + ID: ds.ID().ID(), + SchemaID: ds.Schema().ID(), + Source: toDatasetSource(ds.Source()), + Fields: fields, + } +} + +func toDatasetSchema(ds *dataset.Schema) *DatasetSchema { + if ds == nil { + return nil + } + + dsFields := ds.Fields() + fields := make([]*DatasetSchemaField, 0, len(dsFields)) + for _, f := range dsFields { + fields = append(fields, &DatasetSchemaField{ + ID: f.ID().ID(), + Name: f.Name(), + Type: toDatasetValueType(f.Type()), + SchemaID: ds.ID().ID(), + Source: toDatasetSource(f.Source()), + RefID: f.Ref().IDRef(), + }) + } + + return &DatasetSchema{ + ID: ds.ID().ID(), + Source: toDatasetSource(ds.Source()), + Name: ds.Name(), + SceneID: ds.Scene().ID(), + RepresentativeFieldID: ds.RepresentativeField().IDRef().IDRef(), + Fields: fields, + } +} diff --git a/internal/adapter/graphql/convert_layer.go b/internal/adapter/graphql/convert_layer.go new file mode 100644 index 000000000..1c76d2360 --- /dev/null +++ b/internal/adapter/graphql/convert_layer.go @@ -0,0 +1,169 @@ +package graphql + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/decoding" +) + +func toLayerItem(l *layer.Item, parent *id.LayerID) *LayerItem { + if l == nil { + return nil + } + + return &LayerItem{ + ID: l.ID().ID(), + Name: l.Name(), + IsVisible: l.IsVisible(), + PropertyID: l.Property().IDRef(), + PluginID: l.Plugin(), + ExtensionID: l.Extension(), + Infobox: toInfobox(l.Infobox(), l.ID(), l.LinkedDataset()), + LinkedDatasetID: l.LinkedDataset().IDRef(), + ParentID: parent.IDRef(), + } +} + +func toLayerGroup(l *layer.Group, parent *id.LayerID) *LayerGroup { + if l == nil { + return nil + } + + laLayers := l.Layers().Layers() + layers := make([]*id.ID, 0, len(laLayers)) + for _, lay := range laLayers { + layers = append(layers, lay.IDRef()) + } + + return &LayerGroup{ + ID: l.ID().ID(), + Name: l.Name(), + IsVisible: l.IsVisible(), + PropertyID: l.Property().IDRef(), + PluginID: l.Plugin(), + ExtensionID: l.Extension(), + Infobox: toInfobox(l.Infobox(), l.ID(), nil), + LinkedDatasetSchemaID: l.LinkedDatasetSchema().IDRef(), + LayerIds: layers, + Root: l.IsRoot(), + ParentID: parent.IDRef(), + } +} + +func toLayer(l layer.Layer, parent *id.LayerID) Layer { + if l == nil { + return nil + } + switch la := l.(type) { + case *layer.Item: + return toLayerItem(la, parent) + case *layer.Group: + return toLayerGroup(la, parent) + } + return nil +} + +func toLayers(layers layer.List, parent *id.LayerID) []Layer { + if len(layers) == 0 { + return nil + } + + result := make([]Layer, 0, len(layers)) + for _, l := range layers { + if l == nil { + continue + } + result = append(result, toLayer(*l, parent)) + } + + return result +} + +func toInfoboxField(ibf *layer.InfoboxField, parentDatasetID *id.DatasetID) *InfoboxField { + if ibf == nil { + return nil + } + return &InfoboxField{ + ID: ibf.ID().ID(), + PluginID: ibf.Plugin(), + ExtensionID: ibf.Extension(), + PropertyID: ibf.Property().ID(), + LinkedDatasetID: parentDatasetID.IDRef(), + } +} + +func toInfobox(ib *layer.Infobox, parent id.LayerID, parentDatasetID *id.DatasetID) *Infobox { + if ib == nil { + return nil + } + ibFields := ib.Fields() + fields := make([]*InfoboxField, 0, len(ibFields)) + for _, ibf := range ibFields { + fields = append(fields, toInfoboxField(ibf, parentDatasetID)) + } + + return &Infobox{ + PropertyID: ib.Property().ID(), + Fields: fields, + LayerID: parent.ID(), + LinkedDatasetID: parentDatasetID.IDRef(), + } +} + +func toMergedLayer(layer *layer.Merged) *MergedLayer { + if layer == nil { + return nil + } + + return &MergedLayer{ + OriginalID: layer.Original.ID(), + ParentID: layer.Parent.IDRef(), + Infobox: toMergedInfobox(layer.Infobox), + Property: toMergedPropertyFromMetadata(layer.Property), + } +} + +func toMergedInfobox(ib *layer.MergedInfobox) *MergedInfobox { + if ib == nil { + return nil + } + + fields := make([]*MergedInfoboxField, 0, len(ib.Fields)) + for _, f := range ib.Fields { + fields = append(fields, toMergedInfoboxField(f)) + } + + return &MergedInfobox{ + Fields: fields, + Property: toMergedPropertyFromMetadata(ib.Property), + } +} + +func toMergedInfoboxField(ibf *layer.MergedInfoboxField) *MergedInfoboxField { + if ibf == nil { + return nil + } + + return &MergedInfoboxField{ + OriginalID: ibf.ID.ID(), + PluginID: ibf.Plugin, + ExtensionID: ibf.Extension, + Property: toMergedPropertyFromMetadata(ibf.Property), + } +} +func fromLayerEncodingFormat(v LayerEncodingFormat) decoding.LayerEncodingFormat { + switch v { + case LayerEncodingFormatKml: + return decoding.LayerEncodingFormatKML + case LayerEncodingFormatCzml: + return decoding.LayerEncodingFormatCZML + case LayerEncodingFormatGeojson: + return decoding.LayerEncodingFormatGEOJSON + case LayerEncodingFormatShape: + return decoding.LayerEncodingFormatSHAPE + case LayerEncodingFormatReearth: + return decoding.LayerEncodingFormatREEARTH + } + + return decoding.LayerEncodingFormat("") +} diff --git a/internal/adapter/graphql/convert_plugin.go b/internal/adapter/graphql/convert_plugin.go new file mode 100644 index 000000000..f74c6f79d --- /dev/null +++ b/internal/adapter/graphql/convert_plugin.go @@ -0,0 +1,58 @@ +package graphql + +import ( + "github.com/reearth/reearth-backend/pkg/plugin" +) + +func toPlugin(p *plugin.Plugin) *Plugin { + if p == nil { + return nil + } + + pid := p.ID() + pluginExtensions := p.Extensions() + extensions := make([]*PluginExtension, 0, len(pluginExtensions)) + for _, pe := range pluginExtensions { + extensions = append(extensions, &PluginExtension{ + ExtensionID: pe.ID(), + PluginID: pid, + Type: toPluginExtensionType(pe.Type()), + Visualizer: toVisualizer(pe.Visualizer()), + Name: pe.Name().String(), + Description: pe.Description().String(), + Icon: pe.Icon(), + PropertySchemaID: pe.Schema(), + AllTranslatedDescription: pe.Description(), + AllTranslatedName: pe.Name(), + }) + } + + return &Plugin{ + ID: pid, + Name: p.Name().String(), + Description: p.Description().String(), + AllTranslatedDescription: p.Description(), + AllTranslatedName: p.Name(), + Author: p.Author(), + RepositoryURL: p.RepositoryURL(), + Version: p.Version().String(), + PropertySchemaID: p.Schema(), + Extensions: extensions, + } +} + +func toPluginExtensionType(t plugin.ExtensionType) PluginExtensionType { + switch t { + case plugin.ExtensionTypePrimitive: + return PluginExtensionTypePrimitive + case plugin.ExtensionTypeWidget: + return PluginExtensionTypeWidget + case plugin.ExtensionTypeBlock: + return PluginExtensionTypeBlock + case plugin.ExtensionTypeVisualizer: + return PluginExtensionTypeVisualizer + case plugin.ExtensionTypeInfobox: + return PluginExtensionTypeInfobox + } + return PluginExtensionType("") +} diff --git a/internal/adapter/graphql/convert_project.go b/internal/adapter/graphql/convert_project.go new file mode 100644 index 000000000..cbb69f339 --- /dev/null +++ b/internal/adapter/graphql/convert_project.go @@ -0,0 +1,61 @@ +package graphql + +import ( + "time" + + "github.com/reearth/reearth-backend/pkg/project" +) + +func fromPublishmentStatus(v PublishmentStatus) project.PublishmentStatus { + switch v { + case PublishmentStatusPublic: + return project.PublishmentStatusPublic + case PublishmentStatusLimited: + return project.PublishmentStatusLimited + case PublishmentStatusPrivate: + return project.PublishmentStatusPrivate + } + return project.PublishmentStatus("") +} + +func toPublishmentStatus(v project.PublishmentStatus) PublishmentStatus { + switch v { + case project.PublishmentStatusPublic: + return PublishmentStatusPublic + case project.PublishmentStatusLimited: + return PublishmentStatusLimited + case project.PublishmentStatusPrivate: + return PublishmentStatusPrivate + } + return PublishmentStatus("") +} + +func toProject(p *project.Project) *Project { + if p == nil { + return nil + } + + var publishedAtRes *time.Time + if publishedAt := p.PublishedAt(); !publishedAt.IsZero() { + publishedAtRes = &publishedAt + } + + return &Project{ + ID: p.ID().ID(), + CreatedAt: p.CreatedAt(), + IsArchived: p.IsArchived(), + Alias: p.Alias(), + Name: p.Name(), + Description: p.Description(), + ImageURL: p.ImageURL(), + PublishedAt: publishedAtRes, + UpdatedAt: p.UpdatedAt(), + Visualizer: Visualizer(p.Visualizer()), + TeamID: p.Team().ID(), + PublishmentStatus: toPublishmentStatus(p.PublishmentStatus()), + PublicTitle: p.PublicTitle(), + PublicDescription: p.PublicDescription(), + PublicImage: p.PublicImage(), + PublicNoIndex: p.PublicNoIndex(), + } +} diff --git a/internal/adapter/graphql/convert_property.go b/internal/adapter/graphql/convert_property.go new file mode 100644 index 000000000..3ca383c94 --- /dev/null +++ b/internal/adapter/graphql/convert_property.go @@ -0,0 +1,648 @@ +package graphql + +import ( + "net/url" + "strings" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +func toPropertyValue(v *property.Value) *interface{} { + var res interface{} + if v == nil { + return nil + } + switch v2 := v.Value().(type) { + case bool: + res = v2 + case float64: + res = v2 + case string: + res = v2 + case id.ID: + res = v2.String() + case *url.URL: + res = v2.String() + case property.LatLng: + res = LatLng{ + Lat: v2.Lat, + Lng: v2.Lng, + } + case property.LatLngHeight: + res = LatLngHeight{ + Lat: v2.Lat, + Lng: v2.Lng, + Height: v2.Height, + } + case property.Camera: + res = Camera{ + Lat: v2.Lat, + Lng: v2.Lng, + Altitude: v2.Altitude, + Heading: v2.Heading, + Pitch: v2.Pitch, + Roll: v2.Roll, + Fov: v2.FOV, + } + case property.Typography: + res = Typography{ + FontFamily: v2.FontFamily, + FontSize: v2.FontSize, + FontWeight: v2.FontWeight, + Color: v2.Color, + TextAlign: toTextAlign(v2.TextAlign), + Bold: v2.Bold, + Italic: v2.Italic, + Underline: v2.Underline, + } + case property.Coordinates: + res2 := make([]LatLngHeight, 0, len(v2)) + for _, c := range v2 { + res2 = append(res2, LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + res = res2 + case property.Polygon: + res2 := make([][]LatLngHeight, 0, len(v2)) + for _, d := range v2 { + coord := make([]LatLngHeight, 0, len(d)) + for _, c := range d { + coord = append(coord, LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + res2 = append(res2, coord) + } + res = res2 + } + return &res +} + +func toTextAlign(t *property.TextAlign) *TextAlign { + if t == nil { + return nil + } + var t3 TextAlign + switch *t { + case property.TextAlignLeft: + t3 = TextAlignLeft + case property.TextAlignCenter: + t3 = TextAlignCenter + case property.TextAlignRight: + t3 = TextAlignRight + case property.TextAlignJustify: + t3 = TextAlignJustify + case property.TextAlignJustifyAll: + t3 = TextAlignJustifyAll + default: + return nil + } + return &t3 +} + +func toPropertyValueType(t property.ValueType) ValueType { + switch t { + case property.ValueTypeBool: + return ValueTypeBool + case property.ValueTypeNumber: + return ValueTypeNumber + case property.ValueTypeString: + return ValueTypeString + case property.ValueTypeLatLng: + return ValueTypeLatlng + case property.ValueTypeLatLngHeight: + return ValueTypeLatlngheight + case property.ValueTypeURL: + return ValueTypeURL + case property.ValueTypeRef: + return ValueTypeRef + case property.ValueTypeCamera: + return ValueTypeCamera + case property.ValueTypeTypography: + return ValueTypeTypography + case property.ValueTypeCoordinates: + return ValueTypeCoordinates + case property.ValueTypePolygon: + return ValueTypePolygon + case property.ValueTypeRect: + return ValueTypeRect + } + return "" +} + +func fromPropertyValueType(t ValueType) property.ValueType { + switch t { + case ValueTypeBool: + return property.ValueTypeBool + case ValueTypeNumber: + return property.ValueTypeNumber + case ValueTypeString: + return property.ValueTypeString + case ValueTypeLatlng: + return property.ValueTypeLatLng + case ValueTypeLatlngheight: + return property.ValueTypeLatLngHeight + case ValueTypeURL: + return property.ValueTypeURL + case ValueTypeRef: + return property.ValueTypeRef + case ValueTypeCamera: + return property.ValueTypeCamera + case ValueTypeTypography: + return property.ValueTypeTypography + case ValueTypeCoordinates: + return property.ValueTypeCoordinates + case ValueTypePolygon: + return property.ValueTypePolygon + case ValueTypeRect: + return property.ValueTypeRect + } + return "" +} + +func fromPropertyValueAndType(v interface{}, t ValueType) (*property.Value, bool) { + switch v2 := v.(type) { + case LatLng: + v = property.LatLng{ + Lat: v2.Lat, + Lng: v2.Lng} + case LatLngHeight: + v = property.LatLngHeight{ + Lat: v2.Lat, + Lng: v2.Lng, + Height: v2.Height} + case *LatLng: + v = property.LatLng{ + Lat: v2.Lat, + Lng: v2.Lng, + } + case *LatLngHeight: + v = property.LatLngHeight{ + Lat: v2.Lat, + Lng: v2.Lng, + Height: v2.Height, + } + case *Camera: + v = property.Camera{ + Lat: v2.Lat, + Lng: v2.Lng, + Altitude: v2.Altitude, + Heading: v2.Heading, + Pitch: v2.Pitch, + Roll: v2.Roll, + FOV: v2.Fov, + } + case *Typography: + v = property.Typography{ + FontFamily: v2.FontFamily, + FontSize: v2.FontSize, + FontWeight: v2.FontWeight, + Color: v2.Color, + TextAlign: fromTextAlign(v2.TextAlign), + Bold: v2.Bold, + Italic: v2.Italic, + Underline: v2.Underline, + } + case []LatLngHeight: + res := make([]property.LatLngHeight, 0, len(v2)) + for _, c := range v2 { + res = append(res, property.LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + v = res + case [][]LatLngHeight: + res := make([][]property.LatLngHeight, 0, len(v2)) + for _, d := range v2 { + coord := make([]property.LatLngHeight, 0, len(d)) + for _, c := range d { + coord = append(coord, property.LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + res = append(res, coord) + } + v = res + case *Rect: + v = property.Rect{ + West: v2.West, + East: v2.East, + North: v2.North, + South: v2.South, + } + } + return fromPropertyValueType(t).ValueFrom(v) +} + +func fromTextAlign(t *TextAlign) *property.TextAlign { + if t == nil { + return nil + } + var t2 property.TextAlign + switch *t { + case TextAlignLeft: + t2 = property.TextAlignLeft + case TextAlignCenter: + t2 = property.TextAlignCenter + case TextAlignRight: + t2 = property.TextAlignRight + case TextAlignJustify: + t2 = property.TextAlignJustify + case TextAlignJustifyAll: + t2 = property.TextAlignJustifyAll + default: + return nil + } + return &t2 +} + +func toPropertyField(f *property.Field, parent *property.Property, gl *property.GroupList, g *property.Group) *PropertyField { + if f == nil { + return nil + } + + var links []*PropertyFieldLink + if flinks := f.Links(); flinks != nil { + links = make([]*PropertyFieldLink, 0, flinks.Len()) + for _, l := range flinks.Links() { + links = append(links, toPropertyFieldLink(l)) + } + } + + return &PropertyField{ + // TODO: PropertySchemaFieldID is mismatched + ID: id.PropertySchemaFieldID(propertyFieldID(parent, gl, g, f)), + ParentID: parent.ID().ID(), + SchemaID: parent.Schema(), + FieldID: f.Field(), + Value: toPropertyValue(f.Value()), + Type: toPropertyValueType(f.Type()), + Links: links, + } +} + +func toPropertyFieldLinks(flinks *property.Links) []*PropertyFieldLink { + if flinks == nil { + return nil + } + var links []*PropertyFieldLink + links = make([]*PropertyFieldLink, 0, flinks.Len()) + for _, l := range flinks.Links() { + links = append(links, toPropertyFieldLink(l)) + } + return links +} + +func fromPropertyFieldLink(datasetSchema, ds, fields []*id.ID) *property.Links { + if len(datasetSchema) != len(fields) || (ds != nil && len(ds) != len(fields) && len(ds) > 1) { + return nil + } + + links := make([]*property.Link, 0, len(datasetSchema)) + for i, dss := range datasetSchema { + f := fields[i] + if dss == nil || f == nil { + return nil + } + dsid := id.DatasetSchemaID(*dss) + dsfid := id.DatasetSchemaFieldID(*f) + if len(ds) == 0 || (len(ds) == 1 && i > 0) { + links = append(links, property.NewLinkFieldOnly(dsid, dsfid)) + } else { + d := ds[i] + if d == nil { + return nil + } + links = append(links, property.NewLink(id.DatasetID(*d), dsid, dsfid)) + } + } + + return property.NewLinks(links) +} + +func toPropertyFieldLink(link *property.Link) *PropertyFieldLink { + return &PropertyFieldLink{ + DatasetID: link.Dataset().IDRef(), + DatasetSchemaID: link.DatasetSchema().ID(), + DatasetSchemaFieldID: link.DatasetSchemaField().ID(), + } +} + +func toProperty(property *property.Property) *Property { + if property == nil { + return nil + } + + pitems := property.Items() + items := make([]PropertyItem, 0, len(pitems)) + for _, i := range pitems { + items = append(items, toPropertyItem(i, property, nil)) + } + + return &Property{ + ID: property.ID().ID(), + SchemaID: property.Schema(), + Items: items, + } +} + +func toPropertySchema(propertySchema *property.Schema) *PropertySchema { + if propertySchema == nil { + return nil + } + + pgroups := propertySchema.Groups() + groups := make([]*PropertySchemaGroup, 0, len(pgroups)) + for _, g := range pgroups { + groups = append(groups, toPropertySchemaGroup(g)) + } + + return &PropertySchema{ + ID: propertySchema.ID(), + Groups: groups, + LinkableFields: toPropertyLinkableFields(propertySchema.ID(), propertySchema.LinkableFields()), + } +} + +func toPropertyLinkableFields(sid id.PropertySchemaID, l property.LinkableFields) *PropertyLinkableFields { + return &PropertyLinkableFields{ + SchemaID: sid, + Latlng: l.LatLng.FieldRef(), + URL: l.URL.FieldRef(), + } +} + +func toPropertySchemaField(f *property.SchemaField) *PropertySchemaField { + if f == nil { + return nil + } + + var choices []*PropertySchemaFieldChoice + if c := f.Choices(); c != nil { + choices = make([]*PropertySchemaFieldChoice, 0, len(c)) + for _, k := range c { + choices = append(choices, &PropertySchemaFieldChoice{ + Key: k.Key, + Title: k.Title.String(), + Label: k.Title.String(), // deprecated + AllTranslatedTitle: k.Title, + AllTranslatedLabel: k.Title, // deprecated + Icon: stringToRef(k.Icon), + }) + } + } + + return &PropertySchemaField{ + FieldID: f.ID(), + Type: toPropertyValueType(f.Type()), + Title: f.Title().String(), + Name: f.Title().String(), // deprecated + Description: f.Description().String(), + Prefix: stringToRef(f.Prefix()), + Suffix: stringToRef(f.Suffix()), + DefaultValue: toPropertyValue(f.DefaultValue()), + UI: toPropertySchemaFieldUI(f.UI()), + Min: f.Min(), + Max: f.Max(), + Choices: choices, + IsAvailableIf: toPropertyConditon(f.IsAvailableIf()), + AllTranslatedTitle: f.Title(), + AllTranslatedName: f.Title(), // deprecated + AllTranslatedDescription: f.Description(), + } +} + +func toPropertySchemaFieldUI(ui *property.SchemaFieldUI) *PropertySchemaFieldUI { + if ui == nil { + return nil + } + ui2 := PropertySchemaFieldUI("") + switch *ui { + case property.SchemaFieldUIMultiline: + ui2 = PropertySchemaFieldUIMultiline + case property.SchemaFieldUISelection: + ui2 = PropertySchemaFieldUISelection + case property.SchemaFieldUIColor: + ui2 = PropertySchemaFieldUIColor + case property.SchemaFieldUIRange: + ui2 = PropertySchemaFieldUIRange + case property.SchemaFieldUIImage: + ui2 = PropertySchemaFieldUIImage + case property.SchemaFieldUIVideo: + ui2 = PropertySchemaFieldUIVideo + case property.SchemaFieldUIFile: + ui2 = PropertySchemaFieldUIFile + case property.SchemaFieldUILayer: + ui2 = PropertySchemaFieldUILayer + case property.SchemaFieldUICameraPose: + ui2 = PropertySchemaFieldUICameraPose + } + if ui2 != PropertySchemaFieldUI("") { + return &ui2 + } + return nil +} + +func toMergedPropertyFromMetadata(m *property.MergedMetadata) *MergedProperty { + if m == nil { + return nil + } + return &MergedProperty{ + OriginalID: m.Original.IDRef(), + ParentID: m.Parent.IDRef(), + LinkedDatasetID: m.LinkedDataset.IDRef(), + Groups: nil, // resolved by graphql resolver + } +} + +func toMergedProperty(m *property.Merged) *MergedProperty { + if m == nil { + return nil + } + groups := make([]*MergedPropertyGroup, 0, len(m.Groups)) + for _, f := range m.Groups { + groups = append(groups, toMergedPropertyGroup(f, m)) + } + return &MergedProperty{ + OriginalID: m.Original.IDRef(), + ParentID: m.Parent.IDRef(), + SchemaID: &m.Schema, + LinkedDatasetID: m.LinkedDataset.IDRef(), + Groups: groups, + } +} + +func toMergedPropertyGroup(f *property.MergedGroup, p *property.Merged) *MergedPropertyGroup { + if f == nil { + return nil + } + fields := make([]*MergedPropertyField, 0, len(f.Fields)) + for _, f2 := range f.Fields { + fields = append(fields, toMergedPropertyField(f2, p.Schema)) + } + groups := make([]*MergedPropertyGroup, 0, len(f.Groups)) + for _, f2 := range f.Groups { + groups = append(groups, toMergedPropertyGroup(f2, p)) + } + return &MergedPropertyGroup{ + OriginalPropertyID: p.Original.IDRef(), + ParentPropertyID: p.Parent.IDRef(), + OriginalID: f.Original.IDRef(), + SchemaGroupID: f.SchemaGroup, + ParentID: f.Parent.IDRef(), + SchemaID: p.Schema.Ref(), + LinkedDatasetID: f.LinkedDataset.IDRef(), + Fields: fields, + Groups: groups, + } +} + +func toMergedPropertyField(f *property.MergedField, s id.PropertySchemaID) *MergedPropertyField { + if f == nil { + return nil + } + return &MergedPropertyField{ + FieldID: f.ID, + SchemaID: s, + Links: toPropertyFieldLinks(f.Links), + Value: toPropertyValue(f.Value), + Type: toPropertyValueType(f.Type), + Overridden: f.Overridden, + } +} + +func toPropertySchemaGroup(g *property.SchemaGroup) *PropertySchemaGroup { + if g == nil { + return nil + } + gfields := g.Fields() + fields := make([]*PropertySchemaField, 0, len(gfields)) + var representativeField *PropertySchemaField + representativeFieldID := g.RepresentativeFieldID() + for _, f := range gfields { + f2 := toPropertySchemaField(f) + fields = append(fields, f2) + if representativeFieldID != nil && f.ID() == *representativeFieldID { + representativeField = f2 + } + } + return &PropertySchemaGroup{ + SchemaGroupID: g.ID(), + SchemaID: g.Schema(), + IsList: g.IsList(), + Title: g.Title().StringRef(), + Fields: fields, + Name: representativeFieldID, // deprecated + RepresentativeFieldID: representativeFieldID, + RepresentativeField: representativeField, + AllTranslatedTitle: g.Title(), + IsAvailableIf: toPropertyConditon(g.IsAvailableIf()), + } +} + +func toPropertyGroup(g *property.Group, p *property.Property, gl *property.GroupList) *PropertyGroup { + if g == nil { + return nil + } + + gfields := g.Fields() + fields := make([]*PropertyField, 0, len(gfields)) + for _, f := range gfields { + fields = append(fields, toPropertyField(f, p, gl, g)) + } + + return &PropertyGroup{ + ID: g.ID().ID(), + SchemaID: g.Schema(), + SchemaGroupID: g.SchemaGroup(), + Fields: fields, + } +} + +func toPropertyGroupList(g *property.GroupList, p *property.Property) *PropertyGroupList { + if g == nil { + return nil + } + + ggroups := g.Groups() + groups := make([]*PropertyGroup, 0, len(ggroups)) + for _, f := range ggroups { + groups = append(groups, toPropertyGroup(f, p, g)) + } + + return &PropertyGroupList{ + ID: g.ID().ID(), + SchemaID: g.Schema(), + SchemaGroupID: g.SchemaGroup(), + Groups: groups, + } +} + +func toPropertyItem(i property.Item, p *property.Property, pgl *property.GroupList) PropertyItem { + if i == nil { + return nil + } + + if g := property.ToGroup(i); g != nil { + return toPropertyGroup(g, p, pgl) + } else if gl := property.ToGroupList(i); gl != nil { + return toPropertyGroupList(gl, p) + } + return nil +} + +func toPropertyConditon(c *property.Condition) *PropertyCondition { + if c == nil { + return nil + } + + return &PropertyCondition{ + FieldID: c.Field, + Value: toPropertyValue(c.Value), + Type: toPropertyValueType(c.Value.Type()), + } +} + +func fromPointer(schemaItem *id.PropertySchemaFieldID, item *id.ID, field *id.PropertySchemaFieldID) *property.Pointer { + i := id.PropertyItemIDFromRefID(item) + return property.NewPointer(schemaItem, i, field) +} + +func toPropertyLatLng(lat, lng *float64) *property.LatLng { + var latlng *property.LatLng + if lat != nil && lng != nil { + latlng2 := property.LatLng{Lat: *lat, Lng: *lng} + latlng = &latlng2 + } + return latlng +} + +func propertyFieldID(property *property.Property, groupList *property.GroupList, group *property.Group, field *property.Field) string { + if property == nil || group == nil || field == nil { + return "" + } + + const sep = "_" + var sb strings.Builder + sb.WriteString(property.ID().String()) + sb.WriteString(sep) + if groupList != nil { + sb.WriteString(groupList.ID().String()) + sb.WriteString(sep) + } + sb.WriteString(group.ID().String()) + sb.WriteString(sep) + sb.WriteString(field.Field().String()) + + return sb.String() +} diff --git a/internal/adapter/graphql/convert_scene.go b/internal/adapter/graphql/convert_scene.go new file mode 100644 index 000000000..877693d4a --- /dev/null +++ b/internal/adapter/graphql/convert_scene.go @@ -0,0 +1,76 @@ +package graphql + +import ( + "github.com/reearth/reearth-backend/pkg/scene" +) + +func toSceneWidget(w *scene.Widget) *SceneWidget { + if w == nil { + return nil + } + + return &SceneWidget{ + ID: w.ID().ID(), + PluginID: w.Plugin(), + ExtensionID: w.Extension(), + PropertyID: w.Property().ID(), + Enabled: w.Enabled(), + } +} + +func toScenePlugin(sp *scene.Plugin) *ScenePlugin { + if sp == nil { + return nil + } + + return &ScenePlugin{ + PluginID: sp.Plugin(), + PropertyID: sp.Property().IDRef(), + } +} + +func toScene(scene *scene.Scene) *Scene { + if scene == nil { + return nil + } + + sceneWidgets := scene.WidgetSystem().Widgets() + widgets := make([]*SceneWidget, 0, len(sceneWidgets)) + for _, w := range sceneWidgets { + widgets = append(widgets, toSceneWidget(w)) + } + + scenePlugins := scene.PluginSystem().Plugins() + plugins := make([]*ScenePlugin, 0, len(scenePlugins)) + for _, sp := range scenePlugins { + plugins = append(plugins, toScenePlugin(sp)) + } + + return &Scene{ + ID: scene.ID().ID(), + ProjectID: scene.Project().ID(), + PropertyID: scene.Property().ID(), + TeamID: scene.Team().ID(), + RootLayerID: scene.RootLayer().ID(), + CreatedAt: scene.CreatedAt(), + UpdatedAt: scene.UpdatedAt(), + Widgets: widgets, + Plugins: plugins, + } +} + +func toSceneLockMode(lm scene.LockMode) SceneLockMode { + switch lm { + case scene.LockModeFree: + return SceneLockModeFree + case scene.LockModePending: + return SceneLockModePending + case scene.LockModeDatasetSyncing: + return SceneLockModeDatasetSyncing + case scene.LockModePluginUpgrading: + return SceneLockModePluginUpgrading + case scene.LockModePublishing: + return SceneLockModePublishing + } + return SceneLockMode("invalid") +} diff --git a/internal/adapter/graphql/convert_team.go b/internal/adapter/graphql/convert_team.go new file mode 100644 index 000000000..a1eb97d30 --- /dev/null +++ b/internal/adapter/graphql/convert_team.go @@ -0,0 +1,51 @@ +package graphql + +import ( + "github.com/reearth/reearth-backend/pkg/user" +) + +func toTeam(t *user.Team) *Team { + if t == nil { + return nil + } + + memberMap := t.Members().Members() + members := make([]*TeamMember, 0, len(memberMap)) + for u, r := range memberMap { + members = append(members, &TeamMember{ + UserID: u.ID(), + Role: toRole(r), + }) + } + + return &Team{ + ID: t.ID().ID(), + Name: t.Name(), + Personal: t.IsPersonal(), + Members: members, + } +} + +func fromRole(r Role) user.Role { + switch r { + case RoleReader: + return user.RoleReader + case RoleWriter: + return user.RoleWriter + case RoleOwner: + return user.RoleOwner + } + return user.Role("") +} + +func toRole(r user.Role) Role { + switch r { + case user.RoleReader: + return RoleReader + case user.RoleWriter: + return RoleWriter + case user.RoleOwner: + return RoleOwner + } + return Role("") +} diff --git a/internal/adapter/graphql/convert_user.go b/internal/adapter/graphql/convert_user.go new file mode 100644 index 000000000..c021cca01 --- /dev/null +++ b/internal/adapter/graphql/convert_user.go @@ -0,0 +1,56 @@ +package graphql + +import ( + "github.com/reearth/reearth-backend/pkg/user" +) + +// ToUser _ +func ToUser(user *user.User) *User { + return toUser(user) +} + +func toUser(user *user.User) *User { + if user == nil { + return nil + } + auths := user.Auths() + authsgql := make([]string, 0, len(auths)) + for _, a := range auths { + authsgql = append(authsgql, a.Provider) + } + return &User{ + ID: user.ID().ID(), + Name: user.Name(), + Email: user.Email(), + Lang: user.Lang().String(), + MyTeamID: user.Team().ID(), + Auths: authsgql, + } +} + +func toSearchedUser(u *user.User) *SearchedUser { + if u == nil { + return nil + } + return &SearchedUser{ + UserID: u.ID().ID(), + UserName: u.Name(), + UserEmail: u.Email(), + } +} + +func toTheme(t *Theme) *user.Theme { + th := user.ThemeDefault + + if t == nil { + return nil + } + + switch *t { + case ThemeDark: + th = user.ThemeDark + case ThemeLight: + th = user.ThemeLight + } + return &th +} diff --git a/internal/adapter/graphql/loader_dataset.go b/internal/adapter/graphql/loader_dataset.go new file mode 100644 index 000000000..11794914d --- /dev/null +++ b/internal/adapter/graphql/loader_dataset.go @@ -0,0 +1,50 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (c *DatasetController) Fetch(ctx context.Context, ids []id.DatasetID, operator *usecase.Operator) ([]*Dataset, []error) { + res, err := c.usecase().Fetch(ctx, ids, operator) + if err != nil { + return nil, []error{err} + } + + datasets := make([]*Dataset, 0, len(res)) + for _, d := range res { + datasets = append(datasets, toDataset(d)) + } + + return datasets, nil +} + +func (c *DatasetController) FetchSchema(ctx context.Context, ids []id.DatasetSchemaID, operator *usecase.Operator) ([]*DatasetSchema, []error) { + res, err := c.usecase().FetchSchema(ctx, ids, operator) + if err != nil { + return nil, []error{err} + } + + schemas := make([]*DatasetSchema, 0, len(res)) + for _, d := range res { + schemas = append(schemas, toDatasetSchema(d)) + } + + return schemas, nil +} + +func (c *DatasetController) GraphFetch(ctx context.Context, i id.DatasetID, depth int, operator *usecase.Operator) ([]*Dataset, []error) { + res, err := c.usecase().GraphFetch(ctx, i, depth, operator) + if err != nil { + return nil, []error{err} + } + + datasets := make([]*Dataset, 0, len(res)) + for _, d := range res { + datasets = append(datasets, toDataset(d)) + } + + return datasets, nil +} diff --git a/internal/adapter/graphql/loader_layer.go b/internal/adapter/graphql/loader_layer.go new file mode 100644 index 000000000..417df7756 --- /dev/null +++ b/internal/adapter/graphql/loader_layer.go @@ -0,0 +1,91 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (c *LayerController) Fetch(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*Layer, []error) { + res, err := c.usecase().Fetch(ctx, ids, operator) + if err != nil { + return nil, []error{err} + } + + layers := make([]*Layer, 0, len(res)) + for _, l := range res { + if l == nil { + layers = append(layers, nil) + } else { + layer := toLayer(*l, nil) + layers = append(layers, &layer) + } + } + + return layers, nil +} + +func (c *LayerController) FetchGroup(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*LayerGroup, []error) { + res, err := c.usecase().FetchGroup(ctx, ids, operator) + if err != nil { + return nil, []error{err} + } + + layerGroups := make([]*LayerGroup, 0, len(res)) + for _, l := range res { + layerGroups = append(layerGroups, toLayerGroup(l, nil)) + } + + return layerGroups, nil +} + +func (c *LayerController) FetchItem(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*LayerItem, []error) { + res, err := c.usecase().FetchItem(ctx, ids, operator) + if err != nil { + return nil, []error{err} + } + + layerItems := make([]*LayerItem, 0, len(res)) + for _, l := range res { + layerItems = append(layerItems, toLayerItem(l, nil)) + } + + return layerItems, nil +} + +func (c *LayerController) FetchParent(ctx context.Context, lid id.LayerID, operator *usecase.Operator) (*LayerGroup, error) { + res, err := c.usecase().FetchParent(ctx, id.LayerID(lid), operator) + if err != nil { + return nil, err + } + + return toLayerGroup(res, nil), nil +} + +func (c *LayerController) FetchByProperty(ctx context.Context, pid id.PropertyID, operator *usecase.Operator) (Layer, error) { + res, err := c.usecase().FetchByProperty(ctx, pid, operator) + if err != nil { + return nil, err + } + + return toLayer(res, nil), nil +} + +func (c *LayerController) FetchMerged(ctx context.Context, org id.LayerID, parent *id.LayerID, operator *usecase.Operator) (*MergedLayer, error) { + res, err2 := c.usecase().FetchMerged(ctx, org, parent, operator) + if err2 != nil { + return nil, err2 + } + + return toMergedLayer(res), nil +} + +func (c *LayerController) FetchParentAndMerged(ctx context.Context, org id.LayerID, operator *usecase.Operator) (*MergedLayer, error) { + res, err2 := c.usecase().FetchParentAndMerged(ctx, org, operator) + if err2 != nil { + return nil, err2 + } + + return toMergedLayer(res), nil +} diff --git a/internal/adapter/graphql/loader_plugin.go b/internal/adapter/graphql/loader_plugin.go new file mode 100644 index 000000000..dacc31500 --- /dev/null +++ b/internal/adapter/graphql/loader_plugin.go @@ -0,0 +1,22 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (c *PluginController) Fetch(ctx context.Context, ids []id.PluginID, operator *usecase.Operator) ([]*Plugin, []error) { + res, err := c.usecase().Fetch(ctx, ids, operator) + if err != nil { + return nil, []error{err} + } + + plugins := make([]*Plugin, 0, len(res)) + for _, pl := range res { + plugins = append(plugins, toPlugin(pl)) + } + + return plugins, nil +} diff --git a/internal/adapter/graphql/loader_project.go b/internal/adapter/graphql/loader_project.go new file mode 100644 index 000000000..224f35212 --- /dev/null +++ b/internal/adapter/graphql/loader_project.go @@ -0,0 +1,47 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (c *ProjectController) Fetch(ctx context.Context, ids []id.ProjectID, operator *usecase.Operator) ([]*Project, []error) { + res, err := c.usecase().Fetch(ctx, ids, operator) + if err != nil { + return nil, []error{err} + } + + projects := make([]*Project, 0, len(res)) + for _, project := range res { + projects = append(projects, toProject(project)) + } + + return projects, nil +} + +func (c *ProjectController) FindByTeam(ctx context.Context, teamID id.TeamID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor, operator *usecase.Operator) (*ProjectConnection, error) { + res, pi, err := c.usecase().FindByTeam(ctx, teamID, usecase.NewPagination(first, last, before, after), operator) + if err != nil { + return nil, err + } + + edges := make([]*ProjectEdge, 0, len(res)) + nodes := make([]*Project, 0, len(res)) + for _, p := range res { + prj := toProject(p) + edges = append(edges, &ProjectEdge{ + Node: prj, + Cursor: usecase.Cursor(prj.ID.String()), + }) + nodes = append(nodes, prj) + } + + return &ProjectConnection{ + Edges: edges, + Nodes: nodes, + PageInfo: toPageInfo(pi), + TotalCount: pi.TotalCount(), + }, nil +} diff --git a/internal/adapter/graphql/loader_property.go b/internal/adapter/graphql/loader_property.go new file mode 100644 index 000000000..08f75e7dc --- /dev/null +++ b/internal/adapter/graphql/loader_property.go @@ -0,0 +1,46 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (c *PropertyController) Fetch(ctx context.Context, ids []id.PropertyID, operator *usecase.Operator) ([]*Property, []error) { + res, err := c.usecase().Fetch(ctx, ids, operator) + if err != nil { + return nil, []error{err} + } + + properties := make([]*Property, 0, len(res)) + for _, property := range res { + properties = append(properties, toProperty(property)) + } + + return properties, nil +} + +func (c *PropertyController) FetchSchema(ctx context.Context, ids []id.PropertySchemaID, operator *usecase.Operator) ([]*PropertySchema, []error) { + res, err := c.usecase().FetchSchema(ctx, ids, operator) + if err != nil { + return nil, []error{err} + } + + schemas := make([]*PropertySchema, 0, len(res)) + for _, propertySchema := range res { + schemas = append(schemas, toPropertySchema(propertySchema)) + } + + return schemas, nil +} + +func (c *PropertyController) FetchMerged(ctx context.Context, org, parent, linked *id.ID, operator *usecase.Operator) (*MergedProperty, error) { + res, err := c.usecase().FetchMerged(ctx, id.PropertyIDFromRefID(org), id.PropertyIDFromRefID(parent), id.DatasetIDFromRefID(linked), operator) + + if err != nil { + return nil, err + } + + return toMergedProperty(res), nil +} diff --git a/internal/adapter/graphql/loader_scene.go b/internal/adapter/graphql/loader_scene.go new file mode 100644 index 000000000..29808c57a --- /dev/null +++ b/internal/adapter/graphql/loader_scene.go @@ -0,0 +1,56 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (c *SceneController) Fetch(ctx context.Context, ids []id.SceneID, operator *usecase.Operator) ([]*Scene, []error) { + res, err := c.usecase().Fetch(ctx, ids, operator) + if err != nil { + return nil, []error{err} + } + + scenes := make([]*Scene, 0, len(res)) + for _, scene := range res { + scenes = append(scenes, toScene(scene)) + } + return scenes, nil +} + +func (c *SceneController) FindByProject(ctx context.Context, projectID id.ProjectID, operator *usecase.Operator) (*Scene, error) { + res, err := c.usecase().FindByProject(ctx, projectID, operator) + if err != nil { + return nil, err + } + + return toScene(res), nil +} + +func (c *SceneController) FetchLock(ctx context.Context, sid id.SceneID, operator *usecase.Operator) (*SceneLockMode, error) { + res, err := c.usecase().FetchLock(ctx, []id.SceneID{sid}, operator) + if err != nil { + return nil, err + } + if len(res) > 0 { + return nil, nil + } + sl := toSceneLockMode(res[0]) + return &sl, nil +} + +func (c *SceneController) FetchLockAll(ctx context.Context, sid []id.SceneID, operator *usecase.Operator) ([]SceneLockMode, []error) { + res, err := c.usecase().FetchLock(ctx, sid, operator) + if err != nil { + return nil, []error{err} + } + + res2 := make([]SceneLockMode, 0, len(res)) + for _, r := range res { + res2 = append(res2, toSceneLockMode(r)) + } + + return res2, nil +} diff --git a/internal/adapter/graphql/loader_team.go b/internal/adapter/graphql/loader_team.go new file mode 100644 index 000000000..2337ca770 --- /dev/null +++ b/internal/adapter/graphql/loader_team.go @@ -0,0 +1 @@ +package graphql diff --git a/internal/adapter/graphql/loader_user.go b/internal/adapter/graphql/loader_user.go new file mode 100644 index 000000000..0da555abd --- /dev/null +++ b/internal/adapter/graphql/loader_user.go @@ -0,0 +1,33 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (c *TeamController) Fetch(ctx context.Context, ids []id.TeamID, operator *usecase.Operator) ([]*Team, []error) { + res, err := c.usecase().Fetch(ctx, ids, operator) + if err != nil { + return nil, []error{err} + } + + teams := make([]*Team, 0, len(res)) + for _, t := range res { + teams = append(teams, toTeam(t)) + } + return teams, nil +} + +func (c *TeamController) FindByUser(ctx context.Context, uid id.UserID, operator *usecase.Operator) ([]*Team, error) { + res, err := c.usecase().FindByUser(ctx, uid, operator) + if err != nil { + return nil, err + } + teams := make([]*Team, 0, len(res)) + for _, t := range res { + teams = append(teams, toTeam(t)) + } + return teams, nil +} diff --git a/internal/adapter/graphql/models.go b/internal/adapter/graphql/models.go new file mode 100644 index 000000000..0b2a450de --- /dev/null +++ b/internal/adapter/graphql/models.go @@ -0,0 +1,290 @@ +package graphql + +import ( + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (LayerItem) IsNode() {} + +func (LayerGroup) IsNode() {} + +func (l *PropertyFieldLink) Copy() *PropertyFieldLink { + if l == nil { + return nil + } + return &PropertyFieldLink{ + DatasetID: l.DatasetID, + DatasetSchemaID: l.DatasetSchemaID, + DatasetSchemaFieldID: l.DatasetSchemaFieldID, + } +} + +func (d *Dataset) Field(id id.ID) *DatasetField { + if d == nil || id.IsNil() { + return nil + } + for _, f := range d.Fields { + if f.FieldID == id { + return f + } + } + return nil +} + +func (d *DatasetSchema) Field(id id.ID) *DatasetSchemaField { + if d == nil || id.IsNil() { + return nil + } + for _, f := range d.Fields { + if f.ID == id { + return f + } + } + return nil +} + +func (d *Property) Field(id id.PropertySchemaFieldID) *PropertyField { + if d == nil || id == "" { + return nil + } + for _, g := range d.Items { + if gi, ok := g.(*PropertyGroup); ok { + for _, f := range gi.Fields { + if f.ID == id { + return f + } + } + } + } + return nil +} + +func (d *PropertySchema) Field(id id.PropertySchemaFieldID) *PropertySchemaField { + if d == nil || id == "" { + return nil + } + for _, g := range d.Groups { + for _, f := range g.Fields { + if f.FieldID == id { + return f + } + } + } + return nil +} + +func (d *Plugin) Extension(id id.PluginExtensionID) *PluginExtension { + if d == nil || id == "" { + return nil + } + for _, f := range d.Extensions { + if f.ExtensionID == id { + return f + } + } + return nil +} + +func (d *Infobox) Field(id id.ID) *InfoboxField { + if d == nil || id.IsNil() { + return nil + } + for _, f := range d.Fields { + if f.ID == id { + return f + } + } + return nil +} + +func (d *MergedInfobox) Field(id id.ID) *MergedInfoboxField { + if d == nil || id.IsNil() { + return nil + } + for _, f := range d.Fields { + if f.OriginalID == id { + return f + } + } + return nil +} + +func AttachParentLayer(layers []*Layer, parent id.ID) []Layer { + if layers == nil { + return nil + } + res := make([]Layer, 0, len(layers)) + for _, l := range layers { + if l == nil { + res = append(res, nil) + continue + } + l2 := *l + if l2 == nil { + res = append(res, nil) + continue + } + if li, ok := l2.(*LayerItem); ok { + li.ParentID = &parent + res = append(res, li) + } else if lg, ok := l2.(*LayerGroup); ok { + lg.ParentID = &parent + res = append(res, lg) + } + } + return res +} + +func NewEmptyPageInfo() *PageInfo { + return toPageInfo(usecase.NewPageInfo(0, nil, nil, false, false)) +} + +func (d *PropertyGroup) Field(id id.PropertySchemaFieldID) *PropertyField { + if d == nil || id == "" { + return nil + } + for _, f := range d.Fields { + if f.ID == id { + return f + } + } + return nil +} + +func (d *PropertySchema) Group(id id.PropertySchemaFieldID) *PropertySchemaGroup { + if d == nil || id == "" { + return nil + } + for _, f := range d.Groups { + if f.SchemaGroupID == id { + return f + } + } + return nil +} + +func (d *Property) Item(id id.ID) PropertyItem { + if d == nil || id.IsNil() { + return nil + } + for _, f := range d.Items { + switch g := f.(type) { + case *PropertyGroup: + if g.ID == id { + return g + } + case *PropertyGroupList: + if g.ID == id { + return g + } + h := g.Group(id) + if h != nil { + return h + } + } + } + return nil +} + +func (d *PropertyGroupList) Group(id id.ID) *PropertyGroup { + if d == nil || id.IsNil() { + return nil + } + for _, f := range d.Groups { + if f.ID == id { + return f + } + } + return nil +} + +func (d *MergedProperty) PropertyID() *id.ID { + if d.OriginalID != nil { + return d.OriginalID + } else if d.ParentID != nil { + return d.ParentID + } + return nil +} + +func (d *MergedProperty) GroupByOriginal(id id.ID) *MergedPropertyGroup { + if d == nil || id.IsNil() { + return nil + } + for _, f := range d.Groups { + if f.OriginalID != nil && *f.OriginalID == id { + return f + } + } + return nil +} + +func (d *MergedProperty) GroupByParent(id id.ID) *MergedPropertyGroup { + if d == nil || id.IsNil() { + return nil + } + for _, f := range d.Groups { + if f.ParentID != nil && *f.ParentID == id { + return f + } + } + return nil +} + +func (d *MergedPropertyGroup) PropertyID() *id.ID { + if d.OriginalID != nil { + return d.OriginalID + } else if d.ParentID != nil { + return d.ParentID + } + return nil +} + +func (d *MergedPropertyGroup) GroupByOriginal(id id.ID) *MergedPropertyGroup { + if d == nil || id.IsNil() { + return nil + } + for _, f := range d.Groups { + if f.OriginalID != nil && *f.OriginalID == id { + return f + } + } + return nil +} + +func (d *MergedPropertyGroup) GroupByParent(id id.ID) *MergedPropertyGroup { + if d == nil || id.IsNil() { + return nil + } + for _, f := range d.Groups { + if f.ParentID != nil && *f.ParentID == id { + return f + } + } + return nil +} + +func (s *Scene) Widget(pluginID id.PluginID, extensionID id.PluginExtensionID) *SceneWidget { + if s == nil { + return nil + } + for _, w := range s.Widgets { + if w.PluginID == pluginID && w.ExtensionID == extensionID { + return w + } + } + return nil +} + +func (s *Scene) Plugin(pluginID id.PluginID) *ScenePlugin { + if s == nil { + return nil + } + for _, p := range s.Plugins { + if p.PluginID == pluginID { + return p + } + } + return nil +} diff --git a/internal/adapter/graphql/models_gen.go b/internal/adapter/graphql/models_gen.go new file mode 100644 index 000000000..111bd2ada --- /dev/null +++ b/internal/adapter/graphql/models_gen.go @@ -0,0 +1,1725 @@ +// Code generated by github.com/99designs/gqlgen, DO NOT EDIT. + +package graphql + +import ( + "fmt" + "io" + "net/url" + "strconv" + "time" + + "github.com/99designs/gqlgen/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +type Layer interface { + IsLayer() +} + +type Layers interface { + IsLayers() +} + +type Node interface { + IsNode() +} + +type PropertyItem interface { + IsPropertyItem() +} + +type AddDatasetSchemaInput struct { + SceneID id.ID `json:"sceneId"` + Name string `json:"name"` + Representativefield *id.ID `json:"representativefield"` +} + +type AddDatasetSchemaPayload struct { + DatasetSchema *DatasetSchema `json:"datasetSchema"` +} + +type AddDynamicDatasetInput struct { + DatasetSchemaID id.ID `json:"datasetSchemaId"` + Author string `json:"author"` + Content string `json:"content"` + Lat *float64 `json:"lat"` + Lng *float64 `json:"lng"` + Target *string `json:"target"` +} + +type AddDynamicDatasetPayload struct { + DatasetSchema *DatasetSchema `json:"datasetSchema"` + Dataset *Dataset `json:"dataset"` +} + +type AddDynamicDatasetSchemaInput struct { + SceneID id.ID `json:"sceneId"` +} + +type AddDynamicDatasetSchemaPayload struct { + DatasetSchema *DatasetSchema `json:"datasetSchema"` +} + +type AddInfoboxFieldInput struct { + LayerID id.ID `json:"layerId"` + PluginID id.PluginID `json:"pluginId"` + ExtensionID id.PluginExtensionID `json:"extensionId"` + Index *int `json:"index"` +} + +type AddInfoboxFieldPayload struct { + InfoboxField *InfoboxField `json:"infoboxField"` + Layer Layer `json:"layer"` +} + +type AddLayerGroupInput struct { + ParentLayerID id.ID `json:"parentLayerId"` + PluginID *id.PluginID `json:"pluginId"` + ExtensionID *id.PluginExtensionID `json:"extensionId"` + Index *int `json:"index"` + LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaID"` + Name *string `json:"name"` +} + +type AddLayerGroupPayload struct { + Layer *LayerGroup `json:"layer"` + ParentLayer *LayerGroup `json:"parentLayer"` + Index *int `json:"index"` +} + +type AddLayerItemInput struct { + ParentLayerID id.ID `json:"parentLayerId"` + PluginID id.PluginID `json:"pluginId"` + ExtensionID id.PluginExtensionID `json:"extensionId"` + Index *int `json:"index"` + Name *string `json:"name"` + Lat *float64 `json:"lat"` + Lng *float64 `json:"lng"` +} + +type AddLayerItemPayload struct { + Layer *LayerItem `json:"layer"` + ParentLayer *LayerGroup `json:"parentLayer"` + Index *int `json:"index"` +} + +type AddMemberToTeamInput struct { + TeamID id.ID `json:"teamId"` + UserID id.ID `json:"userId"` + Role Role `json:"role"` +} + +type AddMemberToTeamPayload struct { + Team *Team `json:"team"` +} + +type AddPropertyItemInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID id.PropertySchemaFieldID `json:"schemaItemId"` + Index *int `json:"index"` + NameFieldValue interface{} `json:"nameFieldValue"` + NameFieldType *ValueType `json:"nameFieldType"` +} + +type AddWidgetInput struct { + SceneID id.ID `json:"sceneId"` + PluginID id.PluginID `json:"pluginId"` + ExtensionID id.PluginExtensionID `json:"extensionId"` +} + +type AddWidgetPayload struct { + Scene *Scene `json:"scene"` + SceneWidget *SceneWidget `json:"sceneWidget"` +} + +type Asset struct { + ID id.ID `json:"id"` + CreatedAt time.Time `json:"createdAt"` + TeamID id.ID `json:"teamId"` + Name string `json:"name"` + Size int64 `json:"size"` + URL string `json:"url"` + ContentType string `json:"contentType"` + Team *Team `json:"team"` +} + +func (Asset) IsNode() {} + +type AssetConnection struct { + Edges []*AssetEdge `json:"edges"` + Nodes []*Asset `json:"nodes"` + PageInfo *PageInfo `json:"pageInfo"` + TotalCount int `json:"totalCount"` +} + +type AssetEdge struct { + Cursor usecase.Cursor `json:"cursor"` + Node *Asset `json:"node"` +} + +type Camera struct { + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` + Altitude float64 `json:"altitude"` + Heading float64 `json:"heading"` + Pitch float64 `json:"pitch"` + Roll float64 `json:"roll"` + Fov float64 `json:"fov"` +} + +type CheckProjectAliasPayload struct { + Alias string `json:"alias"` + Available bool `json:"available"` +} + +type CreateAssetInput struct { + TeamID id.ID `json:"teamId"` + File graphql.Upload `json:"file"` +} + +type CreateAssetPayload struct { + Asset *Asset `json:"asset"` +} + +type CreateInfoboxInput struct { + LayerID id.ID `json:"layerId"` +} + +type CreateInfoboxPayload struct { + Layer Layer `json:"layer"` +} + +type CreateProjectInput struct { + TeamID id.ID `json:"teamId"` + Visualizer Visualizer `json:"visualizer"` + Name *string `json:"name"` + Description *string `json:"description"` + ImageURL *url.URL `json:"imageUrl"` + Alias *string `json:"alias"` + Archived *bool `json:"archived"` +} + +type CreateSceneInput struct { + ProjectID id.ID `json:"projectId"` +} + +type CreateScenePayload struct { + Scene *Scene `json:"scene"` +} + +type CreateTeamInput struct { + Name string `json:"name"` +} + +type CreateTeamPayload struct { + Team *Team `json:"team"` +} + +type Dataset struct { + ID id.ID `json:"id"` + Source string `json:"source"` + SchemaID id.ID `json:"schemaId"` + Fields []*DatasetField `json:"fields"` + Schema *DatasetSchema `json:"schema"` + Name *string `json:"name"` +} + +func (Dataset) IsNode() {} + +type DatasetConnection struct { + Edges []*DatasetEdge `json:"edges"` + Nodes []*Dataset `json:"nodes"` + PageInfo *PageInfo `json:"pageInfo"` + TotalCount int `json:"totalCount"` +} + +type DatasetEdge struct { + Cursor usecase.Cursor `json:"cursor"` + Node *Dataset `json:"node"` +} + +type DatasetField struct { + FieldID id.ID `json:"fieldId"` + SchemaID id.ID `json:"schemaId"` + Source string `json:"source"` + Type ValueType `json:"type"` + Value interface{} `json:"value"` + Schema *DatasetSchema `json:"schema"` + Field *DatasetSchemaField `json:"field"` + ValueRef *Dataset `json:"valueRef"` +} + +type DatasetSchema struct { + ID id.ID `json:"id"` + Source string `json:"source"` + Name string `json:"name"` + SceneID id.ID `json:"sceneId"` + Fields []*DatasetSchemaField `json:"fields"` + RepresentativeFieldID *id.ID `json:"representativeFieldId"` + Dynamic *bool `json:"dynamic"` + Datasets *DatasetConnection `json:"datasets"` + Scene *Scene `json:"scene"` + RepresentativeField *DatasetSchemaField `json:"representativeField"` +} + +func (DatasetSchema) IsNode() {} + +type DatasetSchemaConnection struct { + Edges []*DatasetSchemaEdge `json:"edges"` + Nodes []*DatasetSchema `json:"nodes"` + PageInfo *PageInfo `json:"pageInfo"` + TotalCount int `json:"totalCount"` +} + +type DatasetSchemaEdge struct { + Cursor usecase.Cursor `json:"cursor"` + Node *DatasetSchema `json:"node"` +} + +type DatasetSchemaField struct { + ID id.ID `json:"id"` + Source string `json:"source"` + Name string `json:"name"` + Type ValueType `json:"type"` + SchemaID id.ID `json:"schemaId"` + RefID *id.ID `json:"refId"` + Schema *DatasetSchema `json:"schema"` + Ref *DatasetSchema `json:"ref"` +} + +func (DatasetSchemaField) IsNode() {} + +type DeleteMeInput struct { + UserID id.ID `json:"userId"` +} + +type DeleteMePayload struct { + UserID id.ID `json:"userId"` +} + +type DeleteProjectInput struct { + ProjectID id.ID `json:"projectId"` +} + +type DeleteProjectPayload struct { + ProjectID id.ID `json:"projectId"` +} + +type DeleteTeamInput struct { + TeamID id.ID `json:"teamId"` +} + +type DeleteTeamPayload struct { + TeamID id.ID `json:"teamId"` +} + +type ImportDatasetInput struct { + File graphql.Upload `json:"file"` + SceneID id.ID `json:"sceneId"` + DatasetSchemaID *id.ID `json:"datasetSchemaId"` +} + +type ImportDatasetPayload struct { + DatasetSchema *DatasetSchema `json:"datasetSchema"` +} + +type ImportLayerInput struct { + LayerID id.ID `json:"layerId"` + File graphql.Upload `json:"file"` + Format LayerEncodingFormat `json:"format"` +} + +type ImportLayerPayload struct { + Layers []Layer `json:"layers"` + ParentLayer *LayerGroup `json:"parentLayer"` +} + +type Infobox struct { + LayerID id.ID `json:"layerId"` + PropertyID id.ID `json:"propertyId"` + Fields []*InfoboxField `json:"fields"` + LinkedDatasetID *id.ID `json:"linkedDatasetId"` + Layer Layer `json:"layer"` + Property *Property `json:"property"` + LinkedDataset *Dataset `json:"linkedDataset"` + Merged *MergedInfobox `json:"merged"` +} + +type InfoboxField struct { + ID id.ID `json:"id"` + LayerID id.ID `json:"layerId"` + PropertyID id.ID `json:"propertyId"` + PluginID id.PluginID `json:"pluginId"` + ExtensionID id.PluginExtensionID `json:"extensionId"` + LinkedDatasetID *id.ID `json:"linkedDatasetId"` + Layer Layer `json:"layer"` + Infobox *Infobox `json:"infobox"` + Property *Property `json:"property"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + LinkedDataset *Dataset `json:"linkedDataset"` + Merged *MergedInfoboxField `json:"merged"` +} + +type InstallPluginInput struct { + SceneID id.ID `json:"sceneId"` + PluginID id.PluginID `json:"pluginId"` +} + +type InstallPluginPayload struct { + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` +} + +type LatLng struct { + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` +} + +type LatLngHeight struct { + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` + Height float64 `json:"height"` +} + +type LayerGroup struct { + ID id.ID `json:"id"` + Name string `json:"name"` + IsVisible bool `json:"isVisible"` + PropertyID *id.ID `json:"propertyId"` + PluginID *id.PluginID `json:"pluginId"` + ExtensionID *id.PluginExtensionID `json:"extensionId"` + Infobox *Infobox `json:"infobox"` + ParentID *id.ID `json:"parentId"` + LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaId"` + Root bool `json:"root"` + LayerIds []*id.ID `json:"layerIds"` + Parent *LayerGroup `json:"parent"` + Property *Property `json:"property"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + LinkedDatasetSchema *DatasetSchema `json:"linkedDatasetSchema"` + Layers []Layer `json:"layers"` +} + +func (LayerGroup) IsLayers() {} +func (LayerGroup) IsLayer() {} + +type LayerItem struct { + ID id.ID `json:"id"` + Name string `json:"name"` + IsVisible bool `json:"isVisible"` + PropertyID *id.ID `json:"propertyId"` + PluginID *id.PluginID `json:"pluginId"` + ExtensionID *id.PluginExtensionID `json:"extensionId"` + Infobox *Infobox `json:"infobox"` + ParentID *id.ID `json:"parentId"` + LinkedDatasetID *id.ID `json:"linkedDatasetId"` + Parent *LayerGroup `json:"parent"` + Property *Property `json:"property"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + LinkedDataset *Dataset `json:"linkedDataset"` + Merged *MergedLayer `json:"merged"` +} + +func (LayerItem) IsLayers() {} +func (LayerItem) IsLayer() {} + +type LinkDatasetToPropertyValueInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` + DatasetSchemaIds []*id.ID `json:"datasetSchemaIds"` + DatasetSchemaFieldIds []*id.ID `json:"datasetSchemaFieldIds"` + DatasetIds []*id.ID `json:"datasetIds"` +} + +type MergedInfobox struct { + Property *MergedProperty `json:"property"` + Fields []*MergedInfoboxField `json:"fields"` +} + +type MergedInfoboxField struct { + OriginalID id.ID `json:"originalId"` + PluginID id.PluginID `json:"pluginId"` + ExtensionID id.PluginExtensionID `json:"extensionId"` + Property *MergedProperty `json:"property"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` +} + +type MergedLayer struct { + OriginalID id.ID `json:"originalId"` + ParentID *id.ID `json:"parentId"` + Property *MergedProperty `json:"property"` + Infobox *MergedInfobox `json:"infobox"` + Original *LayerItem `json:"original"` + Parent *LayerGroup `json:"parent"` +} + +type MergedProperty struct { + OriginalID *id.ID `json:"originalId"` + ParentID *id.ID `json:"parentId"` + SchemaID *id.PropertySchemaID `json:"schemaId"` + LinkedDatasetID *id.ID `json:"linkedDatasetId"` + Original *Property `json:"original"` + Parent *Property `json:"parent"` + Schema *PropertySchema `json:"schema"` + LinkedDataset *Dataset `json:"linkedDataset"` + Groups []*MergedPropertyGroup `json:"groups"` +} + +type MergedPropertyField struct { + SchemaID id.PropertySchemaID `json:"schemaId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` + Value interface{} `json:"value"` + Type ValueType `json:"type"` + Links []*PropertyFieldLink `json:"links"` + Overridden bool `json:"overridden"` + Schema *PropertySchema `json:"schema"` + Field *PropertySchemaField `json:"field"` + ActualValue interface{} `json:"actualValue"` +} + +type MergedPropertyGroup struct { + OriginalPropertyID *id.ID `json:"originalPropertyId"` + ParentPropertyID *id.ID `json:"parentPropertyId"` + OriginalID *id.ID `json:"originalId"` + ParentID *id.ID `json:"parentId"` + SchemaGroupID id.PropertySchemaFieldID `json:"schemaGroupId"` + SchemaID *id.PropertySchemaID `json:"schemaId"` + LinkedDatasetID *id.ID `json:"linkedDatasetId"` + Fields []*MergedPropertyField `json:"fields"` + Groups []*MergedPropertyGroup `json:"groups"` + OriginalProperty *Property `json:"originalProperty"` + ParentProperty *Property `json:"parentProperty"` + Original *PropertyGroup `json:"original"` + Parent *PropertyGroup `json:"parent"` + Schema *PropertySchema `json:"schema"` + LinkedDataset *Dataset `json:"linkedDataset"` +} + +type MoveInfoboxFieldInput struct { + LayerID id.ID `json:"layerId"` + InfoboxFieldID id.ID `json:"infoboxFieldId"` + Index int `json:"index"` +} + +type MoveInfoboxFieldPayload struct { + InfoboxFieldID id.ID `json:"infoboxFieldId"` + Layer Layer `json:"layer"` + Index int `json:"index"` +} + +type MoveLayerInput struct { + LayerID id.ID `json:"layerId"` + DestLayerID *id.ID `json:"destLayerId"` + Index *int `json:"index"` +} + +type MoveLayerPayload struct { + LayerID id.ID `json:"layerId"` + FromParentLayer *LayerGroup `json:"fromParentLayer"` + ToParentLayer *LayerGroup `json:"toParentLayer"` + Index int `json:"index"` +} + +type MovePropertyItemInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID id.PropertySchemaFieldID `json:"schemaItemId"` + ItemID id.ID `json:"itemId"` + Index int `json:"index"` +} + +type PageInfo struct { + StartCursor *usecase.Cursor `json:"startCursor"` + EndCursor *usecase.Cursor `json:"endCursor"` + HasNextPage bool `json:"hasNextPage"` + HasPreviousPage bool `json:"hasPreviousPage"` +} + +type Plugin struct { + ID id.PluginID `json:"id"` + Name string `json:"name"` + Version string `json:"version"` + Description string `json:"description"` + Author string `json:"author"` + RepositoryURL string `json:"repositoryUrl"` + PropertySchemaID *id.PropertySchemaID `json:"propertySchemaId"` + Extensions []*PluginExtension `json:"extensions"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` + AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` + AllTranslatedName map[string]string `json:"allTranslatedName"` + TranslatedName string `json:"translatedName"` + TranslatedDescription string `json:"translatedDescription"` + PropertySchema *PropertySchema `json:"propertySchema"` +} + +type PluginExtension struct { + ExtensionID id.PluginExtensionID `json:"extensionId"` + PluginID id.PluginID `json:"pluginId"` + Type PluginExtensionType `json:"type"` + Name string `json:"name"` + Description string `json:"description"` + Icon string `json:"icon"` + Visualizer Visualizer `json:"visualizer"` + PropertySchemaID id.PropertySchemaID `json:"propertySchemaId"` + AllTranslatedName map[string]string `json:"allTranslatedName"` + AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` + Plugin *Plugin `json:"plugin"` + SceneWidget *SceneWidget `json:"sceneWidget"` + PropertySchema *PropertySchema `json:"propertySchema"` + TranslatedName string `json:"translatedName"` + TranslatedDescription string `json:"translatedDescription"` +} + +type Project struct { + ID id.ID `json:"id"` + IsArchived bool `json:"isArchived"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + PublishedAt *time.Time `json:"publishedAt"` + Name string `json:"name"` + Description string `json:"description"` + Alias string `json:"alias"` + PublicTitle string `json:"publicTitle"` + PublicDescription string `json:"publicDescription"` + PublicImage string `json:"publicImage"` + PublicNoIndex bool `json:"publicNoIndex"` + ImageURL *url.URL `json:"imageUrl"` + TeamID id.ID `json:"teamId"` + Visualizer Visualizer `json:"visualizer"` + PublishmentStatus PublishmentStatus `json:"publishmentStatus"` + Team *Team `json:"team"` + Scene *Scene `json:"scene"` +} + +func (Project) IsNode() {} + +type ProjectConnection struct { + Edges []*ProjectEdge `json:"edges"` + Nodes []*Project `json:"nodes"` + PageInfo *PageInfo `json:"pageInfo"` + TotalCount int `json:"totalCount"` +} + +type ProjectEdge struct { + Cursor usecase.Cursor `json:"cursor"` + Node *Project `json:"node"` +} + +type ProjectPayload struct { + Project *Project `json:"project"` +} + +type Property struct { + ID id.ID `json:"id"` + SchemaID id.PropertySchemaID `json:"schemaId"` + Items []PropertyItem `json:"items"` + Schema *PropertySchema `json:"schema"` + Layer Layer `json:"layer"` + Merged *MergedProperty `json:"merged"` +} + +func (Property) IsNode() {} + +type PropertyCondition struct { + FieldID id.PropertySchemaFieldID `json:"fieldId"` + Type ValueType `json:"type"` + Value interface{} `json:"value"` +} + +type PropertyField struct { + ID id.PropertySchemaFieldID `json:"id"` + ParentID id.ID `json:"parentId"` + SchemaID id.PropertySchemaID `json:"schemaId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` + Links []*PropertyFieldLink `json:"links"` + Type ValueType `json:"type"` + Value interface{} `json:"value"` + Parent *Property `json:"parent"` + Schema *PropertySchema `json:"schema"` + Field *PropertySchemaField `json:"field"` + ActualValue interface{} `json:"actualValue"` +} + +type PropertyFieldLink struct { + DatasetID *id.ID `json:"datasetId"` + DatasetSchemaID id.ID `json:"datasetSchemaId"` + DatasetSchemaFieldID id.ID `json:"datasetSchemaFieldId"` + Dataset *Dataset `json:"dataset"` + DatasetField *DatasetField `json:"datasetField"` + DatasetSchema *DatasetSchema `json:"datasetSchema"` + DatasetSchemaField *DatasetSchemaField `json:"datasetSchemaField"` +} + +type PropertyFieldPayload struct { + Property *Property `json:"property"` + PropertyField *PropertyField `json:"propertyField"` +} + +type PropertyGroup struct { + ID id.ID `json:"id"` + SchemaID id.PropertySchemaID `json:"schemaId"` + SchemaGroupID id.PropertySchemaFieldID `json:"schemaGroupId"` + Fields []*PropertyField `json:"fields"` + Schema *PropertySchema `json:"schema"` + SchemaGroup *PropertySchemaGroup `json:"schemaGroup"` +} + +func (PropertyGroup) IsPropertyItem() {} + +type PropertyGroupList struct { + ID id.ID `json:"id"` + SchemaID id.PropertySchemaID `json:"schemaId"` + SchemaGroupID id.PropertySchemaFieldID `json:"schemaGroupId"` + Groups []*PropertyGroup `json:"groups"` + Schema *PropertySchema `json:"schema"` + SchemaGroup *PropertySchemaGroup `json:"schemaGroup"` +} + +func (PropertyGroupList) IsPropertyItem() {} + +type PropertyItemPayload struct { + Property *Property `json:"property"` + PropertyItem PropertyItem `json:"propertyItem"` +} + +type PropertyLinkableFields struct { + SchemaID id.PropertySchemaID `json:"schemaId"` + Latlng *id.PropertySchemaFieldID `json:"latlng"` + URL *id.PropertySchemaFieldID `json:"url"` + LatlngField *PropertySchemaField `json:"latlngField"` + URLField *PropertySchemaField `json:"urlField"` + Schema *PropertySchema `json:"schema"` +} + +type PropertySchema struct { + ID id.PropertySchemaID `json:"id"` + Groups []*PropertySchemaGroup `json:"groups"` + LinkableFields *PropertyLinkableFields `json:"linkableFields"` +} + +type PropertySchemaField struct { + FieldID id.PropertySchemaFieldID `json:"fieldId"` + Type ValueType `json:"type"` + Title string `json:"title"` + Name string `json:"name"` + Description string `json:"description"` + Prefix *string `json:"prefix"` + Suffix *string `json:"suffix"` + DefaultValue interface{} `json:"defaultValue"` + UI *PropertySchemaFieldUI `json:"ui"` + Min *float64 `json:"min"` + Max *float64 `json:"max"` + Choices []*PropertySchemaFieldChoice `json:"choices"` + IsAvailableIf *PropertyCondition `json:"isAvailableIf"` + AllTranslatedTitle map[string]string `json:"allTranslatedTitle"` + AllTranslatedName map[string]string `json:"allTranslatedName"` + AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` + TranslatedTitle string `json:"translatedTitle"` + TranslatedName string `json:"translatedName"` + TranslatedDescription string `json:"translatedDescription"` +} + +type PropertySchemaFieldChoice struct { + Key string `json:"key"` + Title string `json:"title"` + Label string `json:"label"` + Icon *string `json:"icon"` + AllTranslatedTitle map[string]string `json:"allTranslatedTitle"` + AllTranslatedLabel map[string]string `json:"allTranslatedLabel"` + TranslatedTitle string `json:"translatedTitle"` + TranslatedLabel string `json:"translatedLabel"` +} + +type PropertySchemaGroup struct { + SchemaGroupID id.PropertySchemaFieldID `json:"schemaGroupId"` + SchemaID id.PropertySchemaID `json:"schemaId"` + Fields []*PropertySchemaField `json:"fields"` + IsList bool `json:"isList"` + IsAvailableIf *PropertyCondition `json:"isAvailableIf"` + Title *string `json:"title"` + AllTranslatedTitle map[string]string `json:"allTranslatedTitle"` + Name *id.PropertySchemaFieldID `json:"name"` + RepresentativeFieldID *id.PropertySchemaFieldID `json:"representativeFieldId"` + RepresentativeField *PropertySchemaField `json:"representativeField"` + Schema *PropertySchema `json:"schema"` + TranslatedTitle string `json:"translatedTitle"` +} + +type PublishProjectInput struct { + ProjectID id.ID `json:"projectId"` + Alias *string `json:"alias"` + Status PublishmentStatus `json:"status"` +} + +type Rect struct { + West float64 `json:"west"` + South float64 `json:"south"` + East float64 `json:"east"` + North float64 `json:"north"` +} + +type RemoveAssetInput struct { + AssetID id.ID `json:"assetId"` +} + +type RemoveAssetPayload struct { + AssetID id.ID `json:"assetId"` +} + +type RemoveDatasetSchemaInput struct { + SchemaID id.ID `json:"schemaId"` + Force *bool `json:"force"` +} + +type RemoveDatasetSchemaPayload struct { + SchemaID id.ID `json:"schemaId"` +} + +type RemoveInfoboxFieldInput struct { + LayerID id.ID `json:"layerId"` + InfoboxFieldID id.ID `json:"infoboxFieldId"` +} + +type RemoveInfoboxFieldPayload struct { + InfoboxFieldID id.ID `json:"infoboxFieldId"` + Layer Layer `json:"layer"` +} + +type RemoveInfoboxInput struct { + LayerID id.ID `json:"layerId"` +} + +type RemoveInfoboxPayload struct { + Layer Layer `json:"layer"` +} + +type RemoveLayerInput struct { + LayerID id.ID `json:"layerId"` +} + +type RemoveLayerPayload struct { + LayerID id.ID `json:"layerId"` + ParentLayer *LayerGroup `json:"parentLayer"` +} + +type RemoveMemberFromTeamInput struct { + TeamID id.ID `json:"teamId"` + UserID id.ID `json:"userId"` +} + +type RemoveMemberFromTeamPayload struct { + Team *Team `json:"team"` +} + +type RemoveMyAuthInput struct { + Auth string `json:"auth"` +} + +type RemovePropertyFieldInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` +} + +type RemovePropertyItemInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID id.PropertySchemaFieldID `json:"schemaItemId"` + ItemID id.ID `json:"itemId"` +} + +type RemoveWidgetInput struct { + SceneID id.ID `json:"sceneId"` + PluginID id.PluginID `json:"pluginId"` + ExtensionID id.PluginExtensionID `json:"extensionId"` +} + +type RemoveWidgetPayload struct { + Scene *Scene `json:"scene"` + PluginID id.PluginID `json:"pluginId"` + ExtensionID id.PluginExtensionID `json:"extensionId"` +} + +type Scene struct { + ID id.ID `json:"id"` + ProjectID id.ID `json:"projectId"` + TeamID id.ID `json:"teamId"` + PropertyID id.ID `json:"propertyId"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + RootLayerID id.ID `json:"rootLayerId"` + Widgets []*SceneWidget `json:"widgets"` + Plugins []*ScenePlugin `json:"plugins"` + DynamicDatasetSchemas []*DatasetSchema `json:"dynamicDatasetSchemas"` + Project *Project `json:"project"` + Team *Team `json:"team"` + Property *Property `json:"property"` + RootLayer *LayerGroup `json:"rootLayer"` + LockMode SceneLockMode `json:"lockMode"` + DatasetSchemas *DatasetSchemaConnection `json:"datasetSchemas"` +} + +func (Scene) IsNode() {} + +type ScenePlugin struct { + PluginID id.PluginID `json:"pluginId"` + PropertyID *id.ID `json:"propertyId"` + Plugin *Plugin `json:"plugin"` + Property *Property `json:"property"` +} + +type SceneWidget struct { + ID id.ID `json:"id"` + PluginID id.PluginID `json:"pluginId"` + ExtensionID id.PluginExtensionID `json:"extensionId"` + PropertyID id.ID `json:"propertyId"` + Enabled bool `json:"enabled"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + Property *Property `json:"property"` +} + +type SearchedUser struct { + UserID id.ID `json:"userId"` + UserName string `json:"userName"` + UserEmail string `json:"userEmail"` +} + +type SignupInput struct { + UserID *id.ID `json:"userId"` + TeamID *id.ID `json:"teamId"` + Secret *string `json:"secret"` +} + +type SignupPayload struct { + User *User `json:"user"` + Team *Team `json:"team"` +} + +type SyncDatasetInput struct { + SceneID id.ID `json:"sceneId"` + URL string `json:"url"` +} + +type SyncDatasetPayload struct { + SceneID id.ID `json:"sceneId"` + URL string `json:"url"` + DatasetSchema []*DatasetSchema `json:"datasetSchema"` + Dataset []*Dataset `json:"dataset"` +} + +type Team struct { + ID id.ID `json:"id"` + Name string `json:"name"` + Members []*TeamMember `json:"members"` + Personal bool `json:"personal"` + Assets *AssetConnection `json:"assets"` + Projects *ProjectConnection `json:"projects"` +} + +func (Team) IsNode() {} + +type TeamMember struct { + UserID id.ID `json:"userId"` + Role Role `json:"role"` + User *User `json:"user"` +} + +type Typography struct { + FontFamily *string `json:"fontFamily"` + FontWeight *string `json:"fontWeight"` + FontSize *int `json:"fontSize"` + Color *string `json:"color"` + TextAlign *TextAlign `json:"textAlign"` + Bold *bool `json:"bold"` + Italic *bool `json:"italic"` + Underline *bool `json:"underline"` +} + +type UninstallPluginInput struct { + SceneID id.ID `json:"sceneId"` + PluginID id.PluginID `json:"pluginId"` +} + +type UninstallPluginPayload struct { + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` +} + +type UnlinkPropertyValueInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` +} + +type UpdateDatasetSchemaInput struct { + SchemaID id.ID `json:"schemaId"` + Name string `json:"name"` +} + +type UpdateDatasetSchemaPayload struct { + DatasetSchema *DatasetSchema `json:"datasetSchema"` +} + +type UpdateLayerInput struct { + LayerID id.ID `json:"layerId"` + Name *string `json:"name"` + Visible *bool `json:"visible"` +} + +type UpdateLayerPayload struct { + Layer Layer `json:"layer"` +} + +type UpdateMeInput struct { + Name *string `json:"name"` + Email *string `json:"email"` + Lang *string `json:"lang"` + Theme *Theme `json:"theme"` + Password *string `json:"password"` + PasswordConfirmation *string `json:"passwordConfirmation"` +} + +type UpdateMePayload struct { + User *User `json:"user"` +} + +type UpdateMemberOfTeamInput struct { + TeamID id.ID `json:"teamId"` + UserID id.ID `json:"userId"` + Role Role `json:"role"` +} + +type UpdateMemberOfTeamPayload struct { + Team *Team `json:"team"` +} + +type UpdateProjectInput struct { + ProjectID id.ID `json:"projectId"` + Name *string `json:"name"` + Description *string `json:"description"` + Archived *bool `json:"archived"` + Alias *string `json:"alias"` + ImageURL *url.URL `json:"imageUrl"` + PublicTitle *string `json:"publicTitle"` + PublicDescription *string `json:"publicDescription"` + PublicImage *graphql.Upload `json:"publicImage"` + PublicNoIndex *bool `json:"publicNoIndex"` + DeleteImageURL *bool `json:"deleteImageUrl"` + DeletePublicImage *bool `json:"deletePublicImage"` +} + +type UpdatePropertyItemInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID id.PropertySchemaFieldID `json:"schemaItemId"` + Operations []*UpdatePropertyItemOperationInput `json:"operations"` +} + +type UpdatePropertyItemOperationInput struct { + Operation ListOperation `json:"operation"` + ItemID *id.ID `json:"itemId"` + Index *int `json:"index"` + NameFieldValue interface{} `json:"nameFieldValue"` + NameFieldType *ValueType `json:"nameFieldType"` +} + +type UpdatePropertyValueCameraInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` + Altitude float64 `json:"altitude"` + Heading float64 `json:"heading"` + Pitch float64 `json:"pitch"` + Roll float64 `json:"roll"` + Fov float64 `json:"fov"` +} + +type UpdatePropertyValueInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` + Value interface{} `json:"value"` + Type ValueType `json:"type"` +} + +type UpdatePropertyValueLatLngHeightInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` + Height float64 `json:"height"` +} + +type UpdatePropertyValueLatLngInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` + Lat float64 `json:"lat"` + Lng float64 `json:"lng"` +} + +type UpdatePropertyValueTypographyInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` + FontFamily *string `json:"fontFamily"` + FontWeight *string `json:"fontWeight"` + FontSize *int `json:"fontSize"` + Color *string `json:"color"` + TextAlign *TextAlign `json:"textAlign"` + Bold *bool `json:"bold"` + Italic *bool `json:"italic"` + Underline *bool `json:"underline"` +} + +type UpdateTeamInput struct { + TeamID id.ID `json:"teamId"` + Name string `json:"name"` +} + +type UpdateTeamPayload struct { + Team *Team `json:"team"` +} + +type UpdateWidgetInput struct { + SceneID id.ID `json:"sceneId"` + PluginID id.PluginID `json:"pluginId"` + ExtensionID id.PluginExtensionID `json:"extensionId"` + Enabled *bool `json:"enabled"` +} + +type UpdateWidgetPayload struct { + Scene *Scene `json:"scene"` + SceneWidget *SceneWidget `json:"sceneWidget"` +} + +type UpgradePluginInput struct { + SceneID id.ID `json:"sceneId"` + PluginID id.PluginID `json:"pluginId"` + ToPluginID id.PluginID `json:"toPluginId"` +} + +type UpgradePluginPayload struct { + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` +} + +type UploadFileToPropertyInput struct { + PropertyID id.ID `json:"propertyId"` + SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` + File graphql.Upload `json:"file"` +} + +type UploadPluginInput struct { + File graphql.Upload `json:"file"` +} + +type UploadPluginPayload struct { + Plugin *Plugin `json:"plugin"` +} + +type User struct { + ID id.ID `json:"id"` + Name string `json:"name"` + Email string `json:"email"` + Lang string `json:"lang"` + Theme Theme `json:"theme"` + MyTeamID id.ID `json:"myTeamId"` + Auths []string `json:"auths"` + Teams []*Team `json:"teams"` + MyTeam *Team `json:"myTeam"` +} + +func (User) IsNode() {} + +type LayerEncodingFormat string + +const ( + LayerEncodingFormatKml LayerEncodingFormat = "KML" + LayerEncodingFormatCzml LayerEncodingFormat = "CZML" + LayerEncodingFormatGeojson LayerEncodingFormat = "GEOJSON" + LayerEncodingFormatShape LayerEncodingFormat = "SHAPE" + LayerEncodingFormatReearth LayerEncodingFormat = "REEARTH" +) + +var AllLayerEncodingFormat = []LayerEncodingFormat{ + LayerEncodingFormatKml, + LayerEncodingFormatCzml, + LayerEncodingFormatGeojson, + LayerEncodingFormatShape, + LayerEncodingFormatReearth, +} + +func (e LayerEncodingFormat) IsValid() bool { + switch e { + case LayerEncodingFormatKml, LayerEncodingFormatCzml, LayerEncodingFormatGeojson, LayerEncodingFormatShape, LayerEncodingFormatReearth: + return true + } + return false +} + +func (e LayerEncodingFormat) String() string { + return string(e) +} + +func (e *LayerEncodingFormat) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = LayerEncodingFormat(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid LayerEncodingFormat", str) + } + return nil +} + +func (e LayerEncodingFormat) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type ListOperation string + +const ( + ListOperationAdd ListOperation = "ADD" + ListOperationMove ListOperation = "MOVE" + ListOperationRemove ListOperation = "REMOVE" +) + +var AllListOperation = []ListOperation{ + ListOperationAdd, + ListOperationMove, + ListOperationRemove, +} + +func (e ListOperation) IsValid() bool { + switch e { + case ListOperationAdd, ListOperationMove, ListOperationRemove: + return true + } + return false +} + +func (e ListOperation) String() string { + return string(e) +} + +func (e *ListOperation) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = ListOperation(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid ListOperation", str) + } + return nil +} + +func (e ListOperation) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type NodeType string + +const ( + NodeTypeUser NodeType = "USER" + NodeTypeTeam NodeType = "TEAM" + NodeTypeProject NodeType = "PROJECT" + NodeTypePlugin NodeType = "PLUGIN" + NodeTypeScene NodeType = "SCENE" + NodeTypePropertySchema NodeType = "PROPERTY_SCHEMA" + NodeTypeProperty NodeType = "PROPERTY" + NodeTypeDatasetSchema NodeType = "DATASET_SCHEMA" + NodeTypeDataset NodeType = "DATASET" + NodeTypeLayerGroup NodeType = "LAYER_GROUP" + NodeTypeLayerItem NodeType = "LAYER_ITEM" +) + +var AllNodeType = []NodeType{ + NodeTypeUser, + NodeTypeTeam, + NodeTypeProject, + NodeTypePlugin, + NodeTypeScene, + NodeTypePropertySchema, + NodeTypeProperty, + NodeTypeDatasetSchema, + NodeTypeDataset, + NodeTypeLayerGroup, + NodeTypeLayerItem, +} + +func (e NodeType) IsValid() bool { + switch e { + case NodeTypeUser, NodeTypeTeam, NodeTypeProject, NodeTypePlugin, NodeTypeScene, NodeTypePropertySchema, NodeTypeProperty, NodeTypeDatasetSchema, NodeTypeDataset, NodeTypeLayerGroup, NodeTypeLayerItem: + return true + } + return false +} + +func (e NodeType) String() string { + return string(e) +} + +func (e *NodeType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = NodeType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid NodeType", str) + } + return nil +} + +func (e NodeType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type PluginExtensionType string + +const ( + PluginExtensionTypePrimitive PluginExtensionType = "PRIMITIVE" + PluginExtensionTypeWidget PluginExtensionType = "WIDGET" + PluginExtensionTypeBlock PluginExtensionType = "BLOCK" + PluginExtensionTypeVisualizer PluginExtensionType = "VISUALIZER" + PluginExtensionTypeInfobox PluginExtensionType = "INFOBOX" +) + +var AllPluginExtensionType = []PluginExtensionType{ + PluginExtensionTypePrimitive, + PluginExtensionTypeWidget, + PluginExtensionTypeBlock, + PluginExtensionTypeVisualizer, + PluginExtensionTypeInfobox, +} + +func (e PluginExtensionType) IsValid() bool { + switch e { + case PluginExtensionTypePrimitive, PluginExtensionTypeWidget, PluginExtensionTypeBlock, PluginExtensionTypeVisualizer, PluginExtensionTypeInfobox: + return true + } + return false +} + +func (e PluginExtensionType) String() string { + return string(e) +} + +func (e *PluginExtensionType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = PluginExtensionType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid PluginExtensionType", str) + } + return nil +} + +func (e PluginExtensionType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type PropertySchemaFieldUI string + +const ( + PropertySchemaFieldUILayer PropertySchemaFieldUI = "LAYER" + PropertySchemaFieldUIMultiline PropertySchemaFieldUI = "MULTILINE" + PropertySchemaFieldUISelection PropertySchemaFieldUI = "SELECTION" + PropertySchemaFieldUIColor PropertySchemaFieldUI = "COLOR" + PropertySchemaFieldUIRange PropertySchemaFieldUI = "RANGE" + PropertySchemaFieldUIImage PropertySchemaFieldUI = "IMAGE" + PropertySchemaFieldUIVideo PropertySchemaFieldUI = "VIDEO" + PropertySchemaFieldUIFile PropertySchemaFieldUI = "FILE" + PropertySchemaFieldUICameraPose PropertySchemaFieldUI = "CAMERA_POSE" +) + +var AllPropertySchemaFieldUI = []PropertySchemaFieldUI{ + PropertySchemaFieldUILayer, + PropertySchemaFieldUIMultiline, + PropertySchemaFieldUISelection, + PropertySchemaFieldUIColor, + PropertySchemaFieldUIRange, + PropertySchemaFieldUIImage, + PropertySchemaFieldUIVideo, + PropertySchemaFieldUIFile, + PropertySchemaFieldUICameraPose, +} + +func (e PropertySchemaFieldUI) IsValid() bool { + switch e { + case PropertySchemaFieldUILayer, PropertySchemaFieldUIMultiline, PropertySchemaFieldUISelection, PropertySchemaFieldUIColor, PropertySchemaFieldUIRange, PropertySchemaFieldUIImage, PropertySchemaFieldUIVideo, PropertySchemaFieldUIFile, PropertySchemaFieldUICameraPose: + return true + } + return false +} + +func (e PropertySchemaFieldUI) String() string { + return string(e) +} + +func (e *PropertySchemaFieldUI) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = PropertySchemaFieldUI(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid PropertySchemaFieldUI", str) + } + return nil +} + +func (e PropertySchemaFieldUI) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type PublishmentStatus string + +const ( + PublishmentStatusPublic PublishmentStatus = "PUBLIC" + PublishmentStatusLimited PublishmentStatus = "LIMITED" + PublishmentStatusPrivate PublishmentStatus = "PRIVATE" +) + +var AllPublishmentStatus = []PublishmentStatus{ + PublishmentStatusPublic, + PublishmentStatusLimited, + PublishmentStatusPrivate, +} + +func (e PublishmentStatus) IsValid() bool { + switch e { + case PublishmentStatusPublic, PublishmentStatusLimited, PublishmentStatusPrivate: + return true + } + return false +} + +func (e PublishmentStatus) String() string { + return string(e) +} + +func (e *PublishmentStatus) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = PublishmentStatus(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid PublishmentStatus", str) + } + return nil +} + +func (e PublishmentStatus) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type Role string + +const ( + RoleReader Role = "READER" + RoleWriter Role = "WRITER" + RoleOwner Role = "OWNER" +) + +var AllRole = []Role{ + RoleReader, + RoleWriter, + RoleOwner, +} + +func (e Role) IsValid() bool { + switch e { + case RoleReader, RoleWriter, RoleOwner: + return true + } + return false +} + +func (e Role) String() string { + return string(e) +} + +func (e *Role) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = Role(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid Role", str) + } + return nil +} + +func (e Role) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type SceneLockMode string + +const ( + SceneLockModeFree SceneLockMode = "FREE" + SceneLockModePending SceneLockMode = "PENDING" + SceneLockModeDatasetSyncing SceneLockMode = "DATASET_SYNCING" + SceneLockModePluginUpgrading SceneLockMode = "PLUGIN_UPGRADING" + SceneLockModePublishing SceneLockMode = "PUBLISHING" +) + +var AllSceneLockMode = []SceneLockMode{ + SceneLockModeFree, + SceneLockModePending, + SceneLockModeDatasetSyncing, + SceneLockModePluginUpgrading, + SceneLockModePublishing, +} + +func (e SceneLockMode) IsValid() bool { + switch e { + case SceneLockModeFree, SceneLockModePending, SceneLockModeDatasetSyncing, SceneLockModePluginUpgrading, SceneLockModePublishing: + return true + } + return false +} + +func (e SceneLockMode) String() string { + return string(e) +} + +func (e *SceneLockMode) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = SceneLockMode(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid SceneLockMode", str) + } + return nil +} + +func (e SceneLockMode) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type TextAlign string + +const ( + TextAlignLeft TextAlign = "LEFT" + TextAlignCenter TextAlign = "CENTER" + TextAlignRight TextAlign = "RIGHT" + TextAlignJustify TextAlign = "JUSTIFY" + TextAlignJustifyAll TextAlign = "JUSTIFY_ALL" +) + +var AllTextAlign = []TextAlign{ + TextAlignLeft, + TextAlignCenter, + TextAlignRight, + TextAlignJustify, + TextAlignJustifyAll, +} + +func (e TextAlign) IsValid() bool { + switch e { + case TextAlignLeft, TextAlignCenter, TextAlignRight, TextAlignJustify, TextAlignJustifyAll: + return true + } + return false +} + +func (e TextAlign) String() string { + return string(e) +} + +func (e *TextAlign) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = TextAlign(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid TextAlign", str) + } + return nil +} + +func (e TextAlign) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type Theme string + +const ( + ThemeDefault Theme = "DEFAULT" + ThemeLight Theme = "LIGHT" + ThemeDark Theme = "DARK" +) + +var AllTheme = []Theme{ + ThemeDefault, + ThemeLight, + ThemeDark, +} + +func (e Theme) IsValid() bool { + switch e { + case ThemeDefault, ThemeLight, ThemeDark: + return true + } + return false +} + +func (e Theme) String() string { + return string(e) +} + +func (e *Theme) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = Theme(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid Theme", str) + } + return nil +} + +func (e Theme) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type ValueType string + +const ( + ValueTypeBool ValueType = "BOOL" + ValueTypeNumber ValueType = "NUMBER" + ValueTypeString ValueType = "STRING" + ValueTypeRef ValueType = "REF" + ValueTypeURL ValueType = "URL" + ValueTypeLatlng ValueType = "LATLNG" + ValueTypeLatlngheight ValueType = "LATLNGHEIGHT" + ValueTypeCamera ValueType = "CAMERA" + ValueTypeTypography ValueType = "TYPOGRAPHY" + ValueTypeCoordinates ValueType = "COORDINATES" + ValueTypePolygon ValueType = "POLYGON" + ValueTypeRect ValueType = "RECT" +) + +var AllValueType = []ValueType{ + ValueTypeBool, + ValueTypeNumber, + ValueTypeString, + ValueTypeRef, + ValueTypeURL, + ValueTypeLatlng, + ValueTypeLatlngheight, + ValueTypeCamera, + ValueTypeTypography, + ValueTypeCoordinates, + ValueTypePolygon, + ValueTypeRect, +} + +func (e ValueType) IsValid() bool { + switch e { + case ValueTypeBool, ValueTypeNumber, ValueTypeString, ValueTypeRef, ValueTypeURL, ValueTypeLatlng, ValueTypeLatlngheight, ValueTypeCamera, ValueTypeTypography, ValueTypeCoordinates, ValueTypePolygon, ValueTypeRect: + return true + } + return false +} + +func (e ValueType) String() string { + return string(e) +} + +func (e *ValueType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = ValueType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid ValueType", str) + } + return nil +} + +func (e ValueType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type Visualizer string + +const ( + VisualizerCesium Visualizer = "CESIUM" +) + +var AllVisualizer = []Visualizer{ + VisualizerCesium, +} + +func (e Visualizer) IsValid() bool { + switch e { + case VisualizerCesium: + return true + } + return false +} + +func (e Visualizer) String() string { + return string(e) +} + +func (e *Visualizer) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = Visualizer(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid Visualizer", str) + } + return nil +} + +func (e Visualizer) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} diff --git a/internal/adapter/graphql/scalar.go b/internal/adapter/graphql/scalar.go new file mode 100644 index 000000000..4eaaf9304 --- /dev/null +++ b/internal/adapter/graphql/scalar.go @@ -0,0 +1,122 @@ +package graphql + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "net/url" + "strconv" + + graphql1 "github.com/99designs/gqlgen/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func MarshalURL(t url.URL) graphql1.Marshaler { + return graphql1.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalURL(v interface{}) (url.URL, error) { + if tmpStr, ok := v.(string); ok { + u, err := url.Parse(tmpStr) + if u != nil { + return *u, err + } + return url.URL{}, err + } + return url.URL{}, errors.New("Invalid URL") +} + +func MarshalID(t id.ID) graphql1.Marshaler { + return graphql1.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalID(v interface{}) (id.ID, error) { + if tmpStr, ok := v.(string); ok { + return id.NewIDWith(tmpStr) + } + return id.ID{}, errors.New("Invalid ID") +} + +func MarshalCursor(t usecase.Cursor) graphql1.Marshaler { + return graphql1.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(string(t))) + }) +} + +func UnmarshalCursor(v interface{}) (usecase.Cursor, error) { + if tmpStr, ok := v.(string); ok { + return usecase.Cursor(tmpStr), nil + } + return usecase.Cursor(""), errors.New("Invalid cursor") +} + +func MarshalPluginID(t id.PluginID) graphql1.Marshaler { + return graphql1.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPluginID(v interface{}) (id.PluginID, error) { + if tmpStr, ok := v.(string); ok { + return id.PluginIDFrom(tmpStr) + } + return id.PluginID{}, errors.New("Invalid ID") +} + +func MarshalPluginExtensionID(t id.PluginExtensionID) graphql1.Marshaler { + return graphql1.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPluginExtensionID(v interface{}) (id.PluginExtensionID, error) { + if tmpStr, ok := v.(string); ok { + return id.PluginExtensionID(tmpStr), nil + } + return id.PluginExtensionID(""), errors.New("Invalid ID") +} + +func MarshalPropertySchemaID(t id.PropertySchemaID) graphql1.Marshaler { + return graphql1.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPropertySchemaID(v interface{}) (id.PropertySchemaID, error) { + if tmpStr, ok := v.(string); ok { + return id.PropertySchemaIDFrom(tmpStr) + } + return id.PropertySchemaID{}, errors.New("Invalid ID") +} + +func MarshalPropertySchemaFieldID(t id.PropertySchemaFieldID) graphql1.Marshaler { + return graphql1.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPropertySchemaFieldID(v interface{}) (id.PropertySchemaFieldID, error) { + if tmpStr, ok := v.(string); ok { + return id.PropertySchemaFieldID(tmpStr), nil + } + return id.PropertySchemaFieldID(""), errors.New("Invalid ID") +} + +func MarshalMap(val map[string]string) graphql1.Marshaler { + return graphql1.WriterFunc(func(w io.Writer) { + _ = json.NewEncoder(w).Encode(val) + }) +} + +func UnmarshalMap(v interface{}) (map[string]string, error) { + if m, ok := v.(map[string]string); ok { + return m, nil + } + return nil, fmt.Errorf("%T is not a map", v) +} diff --git a/internal/adapter/http/user_controller.go b/internal/adapter/http/user_controller.go new file mode 100644 index 000000000..4a2205c3c --- /dev/null +++ b/internal/adapter/http/user_controller.go @@ -0,0 +1,53 @@ +package http + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type UserController struct { + usecase interfaces.User +} + +func NewUserController(usecase interfaces.User) *UserController { + return &UserController{ + usecase: usecase, + } +} + +type CreateUserInput struct { + Sub string `json:"sub"` + Secret string `json:"secret"` + Name string `json:"name"` + Email string `json:"email"` + UserID *id.UserID `json:"userId"` + TeamID *id.TeamID `json:"teamId"` +} + +type CreateUserOutput struct { + ID string `json:"id"` + Name string `json:"name"` + Email string `json:"email"` +} + +func (c *UserController) CreateUser(ctx context.Context, input CreateUserInput) (interface{}, error) { + u, _, err := c.usecase.Signup(ctx, interfaces.SignupParam{ + Sub: input.Sub, + Name: input.Name, + Email: input.Email, + Secret: input.Secret, + UserID: input.UserID, + TeamID: input.TeamID, + }) + if err != nil { + return nil, err + } + + return CreateUserOutput{ + ID: u.ID().String(), + Name: u.Name(), + Email: u.Email(), + }, nil +} diff --git a/internal/app/app.go b/internal/app/app.go new file mode 100644 index 000000000..58da88efa --- /dev/null +++ b/internal/app/app.go @@ -0,0 +1,161 @@ +package app + +import ( + "errors" + "net/http" + + "github.com/99designs/gqlgen/graphql/playground" + "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" + "github.com/reearth/reearth-backend/internal/adapter/graphql" + err1 "github.com/reearth/reearth-backend/pkg/error" +) + +func initAppEcho(cfg *ServerConfig) *echo.Echo { + e := newEcho(cfg) + + controllers := graphql.NewContainer(cfg.Repos, cfg.Gateways, graphql.ContainerConfig{ + SignupSecret: cfg.Config.SignupSecret, + }) + + e.HTTPErrorHandler = func(err error, c echo.Context) { + if c.Response().Committed { + return + } + + code := http.StatusBadRequest + msg := err.Error() + + if err2, ok := err.(*echo.HTTPError); ok { + code = err2.Code + if msg2, ok := err2.Message.(string); ok { + msg = msg2 + } else if msg2, ok := err2.Message.(error); ok { + msg = msg2.Error() + } else { + msg = "error" + } + if err2.Internal != nil { + c.Logger().Errorf("echo internal err: %+v", err2) + } + } else if errors.Is(err, err1.ErrNotFound) { + code = http.StatusNotFound + msg = "not found" + } else { + var ierr *err1.ErrInternal + if errors.As(err, &ierr) { + if err2 := ierr.Unwrap(); err2 != nil { + c.Logger().Errorf("internal err: %+v", err2) + } + code = http.StatusInternalServerError + msg = "internal server error" + } + } + + if err := c.JSON(code, map[string]string{ + "error": msg, + }); err != nil { + e.DefaultHTTPErrorHandler(err, c) + } + } + + origins := allowedOrigins(cfg) + if len(origins) > 0 { + e.Use( + middleware.CORSWithConfig(middleware.CORSConfig{ + AllowOrigins: origins, + }), + ) + } + + e.GET("/api/ping", func(c echo.Context) error { + return c.JSON(http.StatusOK, "pong") + }) + + if cfg.Debug || cfg.Config.Dev { + // GraphQL Playground without auth + e.GET("/graphql", echo.WrapHandler( + playground.Handler("reearth-backend", "/api/graphql"), + )) + } + + if cfg.Config.ServeFiles { + files := e.Group("") + serveFiles(e, files, cfg.Gateways.File, cfg.Debug) + } + + e.GET("/api/published/:name", apiPublished(cfg)) + e.GET("/api/published_data/:name", apiPublishedData(cfg)) + api := e.Group("/api") + + privateApi := api.Group("") + jwks := &JwksSyncOnce{} + authRequired(privateApi, jwks, cfg) + + publicRoute(e, api, cfg.Config, cfg.Repos, cfg.Gateways) + graphqlRoute(e, privateApi, cfg, controllers) + userRoute(e, privateApi, cfg.Repos) + web(e, cfg.Config.Web, cfg.Config.Auth0) + + return e +} + +func authRequired(g *echo.Group, jwks Jwks, cfg *ServerConfig) { + g.Use(jwtEchoMiddleware(jwks, cfg)) + g.Use(parseJwtMiddleware(cfg)) + g.Use(authMiddleware(cfg)) +} + +func allowedOrigins(cfg *ServerConfig) []string { + if cfg == nil { + return nil + } + origins := append([]string{}, cfg.Config.Origins...) + if cfg.Debug { + origins = append(origins, "http://localhost:3000", "http://localhost:8080") + } + return origins +} + +func apiPublished(cfg *ServerConfig) echo.HandlerFunc { + return func(c echo.Context) error { + name := c.Param("name") + prj, err := cfg.Repos.Project.FindByPublicName(c.Request().Context(), name) + if err != nil || prj == nil { + return echo.ErrNotFound + } + + title := prj.PublicTitle() + description := prj.PublicDescription() + if title == "" { + title = prj.Name() + } + if description == "" { + description = prj.Description() + } + + return c.JSON(http.StatusOK, map[string]interface{}{ + "title": title, + "description": description, + "image": prj.PublicImage(), + "noindex": prj.PublicNoIndex(), + }) + } +} + +func apiPublishedData(cfg *ServerConfig) echo.HandlerFunc { + return func(c echo.Context) error { + name := c.Param("name") + prj, err := cfg.Repos.Project.FindByPublicName(c.Request().Context(), name) + if err != nil || prj == nil { + return echo.ErrNotFound + } + + r, err := cfg.Gateways.File.ReadBuiltSceneFile(c.Request().Context(), prj.PublicName()) + if err != nil { + return err + } + + return c.Stream(http.StatusOK, echo.MIMEApplicationJSON, r) + } +} diff --git a/internal/app/auth.go b/internal/app/auth.go new file mode 100644 index 000000000..8ad615335 --- /dev/null +++ b/internal/app/auth.go @@ -0,0 +1,132 @@ +package app + +import ( + "context" + + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + req := c.Request() + ctx := req.Context() + + var sub, userID string + var u *user.User + + // get sub from context + if s, ok := ctx.Value(contextAuth0Sub).(string); ok { + sub = s + } + if u, ok := ctx.Value(contextUser).(string); ok { + userID = u + } + + // attach sub + ctx = context.WithValue(ctx, graphql.ContextSub, sub) + + // debug mode + if cfg.Debug { + if userID := c.Request().Header.Get(debugUserHeader); userID != "" { + if id, err := id.UserIDFrom(userID); err == nil { + user2, err := cfg.Repos.User.FindByID(ctx, id) + if err == nil && user2 != nil { + u = user2 + } + } + } + } + + if u == nil && userID != "" { + if userID2, err := id.UserIDFrom(userID); err == nil { + u, err = cfg.Repos.User.FindByID(ctx, userID2) + if err != nil && err != err1.ErrNotFound { + return err + } + } else { + return err + } + } + + if u == nil && sub != "" { + var err error + // find user + u, err = cfg.Repos.User.FindByAuth0Sub(ctx, sub) + if err != nil && err != err1.ErrNotFound { + return err + } + + // Auth0 accounts are already merged into one so it doesn't need to fetch more info from Auth0 + // + // if u == nil && token != "" { + // // user not found by sub + + // // fetch user profile from Auth0 + // data, err := cfg.Gateways.Authenticator.FetchUser(token) + // if err != nil { + // return err + // } + + // // if !data.EmailVerified { + // // return errors.New("email is not verified") + // // } + + // u, err = cfg.Repos.User.FindByEmail(ctx, data.Email) + // if err != nil && err != err1.ErrNotFound { + // return err + // } + // if u == nil { + // return err1.ErrUserNotFound + // } + // } + } + + // save a new sub + if u != nil && sub != "" { + if err := addAuth0SubToUser(ctx, u, user.AuthFromAuth0Sub(sub), cfg); err != nil { + return err + } + } + + // attach operator + op, err := generateOperator(ctx, cfg, u) + if err != nil { + return err + } + ctx = context.WithValue(ctx, graphql.ContextOperator, op) + + // attach user + ctx = context.WithValue(ctx, graphql.ContextUser, u) + + c.SetRequest(req.WithContext(ctx)) + return next(c) + } + } +} + +func generateOperator(ctx context.Context, cfg *ServerConfig, u *user.User) (*usecase.Operator, error) { + if u == nil { + return nil, nil + } + teams, err := cfg.Repos.Team.FindByUser(ctx, u.ID()) + if err != nil { + return nil, err + } + return usecase.OperatorFrom(u.ID(), teams), nil +} + +func addAuth0SubToUser(ctx context.Context, u *user.User, a user.Auth, cfg *ServerConfig) error { + if u.AddAuth(a) { + err := cfg.Repos.User.Save(ctx, u) + if err != nil { + return err + } + } + return nil +} diff --git a/internal/app/config.go b/internal/app/config.go new file mode 100644 index 000000000..35b26229a --- /dev/null +++ b/internal/app/config.go @@ -0,0 +1,62 @@ +package app + +import ( + "os" + + "github.com/joho/godotenv" + "github.com/kelseyhightower/envconfig" +) + +const configPrefix = "reearth" + +type Config struct { + Port string `default:"8080" envconfig:"PORT"` + Dev bool + DB string `default:"mongodb://localhost"` + Auth0 Auth0Config + GraphQL GraphQLConfig + GCPProject string `envconfig:"GOOGLE_CLOUD_PROJECT"` + Profiler string + Tracer string + TracerSample float64 + GCS GCSConfig + ServeFiles bool + AssetBaseURL string + Origins []string + Web WebConfig + SignupSecret string +} + +type Auth0Config struct { + Domain string + ClientID string + ClientSecret string + WebClientID string +} + +type GraphQLConfig struct { + ComplexityLimit int `default:"4000"` +} + +type GCSConfig struct { + BucketName string + PublicationCacheControl string +} + +func ReadConfig(debug bool) (*Config, error) { + envs := []string{} + if debug { + // .env file is only available in debug environment + envs = append(envs, ".env", ".env.local") + } + for _, e := range envs { + if err := godotenv.Load(e); err != nil && !os.IsNotExist(err) { + return nil, err + } + } + + var c Config + err := envconfig.Process(configPrefix, &c) + + return &c, err +} diff --git a/internal/app/echo-logrus.go b/internal/app/echo-logrus.go new file mode 100644 index 000000000..ead1a14eb --- /dev/null +++ b/internal/app/echo-logrus.go @@ -0,0 +1,217 @@ +package app + +// https://github.com/plutov/echo-logrus with some modifications +// MIT License +// Copyright (c) 2017 Alex Pliutau + +import ( + "io" + "time" + + "github.com/labstack/echo/v4" + "github.com/labstack/gommon/log" + "github.com/sirupsen/logrus" +) + +// Logrus : implement Logger +type Logger struct{} + +var _ echo.Logger = new(Logger) + +// GetEchoLogger for e.Logger +func GetEchoLogger() *Logger { + return &Logger{} +} + +// Level returns logger level +func (l *Logger) Level() log.Lvl { + switch logrus.StandardLogger().Level { + case logrus.DebugLevel: + return log.DEBUG + case logrus.WarnLevel: + return log.WARN + case logrus.ErrorLevel: + return log.ERROR + case logrus.InfoLevel: + return log.INFO + default: + l.Panic("Invalid level") + } + return log.OFF +} + +// SetHeader is a stub to satisfy interface +// It's controlled by Logger +func (l *Logger) SetHeader(_ string) {} + +// SetPrefix It's controlled by Logger +func (l *Logger) SetPrefix(s string) {} + +// Prefix It's controlled by Logger +func (l *Logger) Prefix() string { + return "" +} + +// SetLevel set level to logger from given log.Lvl +func (l *Logger) SetLevel(lvl log.Lvl) { + switch lvl { + case log.DEBUG: + logrus.SetLevel(logrus.DebugLevel) + case log.WARN: + logrus.SetLevel(logrus.WarnLevel) + case log.ERROR: + logrus.SetLevel(logrus.ErrorLevel) + case log.INFO: + logrus.SetLevel(logrus.InfoLevel) + default: + l.Panic("Invalid level") + } +} + +// Output logger output func +func (l *Logger) Output() io.Writer { + return logrus.StandardLogger().Out +} + +// SetOutput change output, default os.Stdout +func (l *Logger) SetOutput(w io.Writer) { + logrus.SetOutput(w) +} + +// Printj print json log +func (l *Logger) Printj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Print() +} + +// Debugj debug json log +func (l *Logger) Debugj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Debug() +} + +// Infoj info json log +func (l *Logger) Infoj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Info() +} + +// Warnj warning json log +func (l *Logger) Warnj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Warn() +} + +// Errorj error json log +func (l *Logger) Errorj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Error() +} + +// Fatalj fatal json log +func (l *Logger) Fatalj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Fatal() +} + +// Panicj panic json log +func (l *Logger) Panicj(j log.JSON) { + logrus.WithFields(logrus.Fields(j)).Panic() +} + +// Print string log +func (l *Logger) Print(i ...interface{}) { + logrus.Print(i[0].(string)) +} + +// Debug string log +func (l *Logger) Debug(i ...interface{}) { + logrus.Debug(i[0].(string)) +} + +// Info string log +func (l *Logger) Info(i ...interface{}) { + logrus.Info(i[0].(string)) +} + +// Warn string log +func (l *Logger) Warn(i ...interface{}) { + logrus.Warn(i[0].(string)) +} + +// Error string log +func (l *Logger) Error(i ...interface{}) { + logrus.Error(i[0].(string)) +} + +// Fatal string log +func (l *Logger) Fatal(i ...interface{}) { + logrus.Fatal(i[0].(string)) +} + +// Panic string log +func (l *Logger) Panic(i ...interface{}) { + logrus.Panic(i[0].(string)) +} + +// Printf print json log +func (l *Logger) Printf(format string, args ...interface{}) { + logrus.Printf(format, args...) +} + +// Debugf debug json log +func (l *Logger) Debugf(format string, args ...interface{}) { + logrus.Debugf(format, args...) +} + +// Infof info json log +func (l *Logger) Infof(format string, args ...interface{}) { + logrus.Infof(format, args...) +} + +// Warnf warning json log +func (l *Logger) Warnf(format string, args ...interface{}) { + logrus.Warnf(format, args...) +} + +// Errorf error json log +func (l *Logger) Errorf(format string, args ...interface{}) { + logrus.Errorf(format, args...) +} + +// Fatalf fatal json log +func (l *Logger) Fatalf(format string, args ...interface{}) { + logrus.Fatalf(format, args...) +} + +// Panicf panic json log +func (l *Logger) Panicf(format string, args ...interface{}) { + logrus.Panicf(format, args...) +} + +// Hook is a function to process middleware. +func (l *Logger) Hook() echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + req := c.Request() + res := c.Response() + start := time.Now() + if err := next(c); err != nil { + c.Error(err) + } + stop := time.Now() + + logrus.WithFields(map[string]interface{}{ + "time_rfc3339": time.Now().Format(time.RFC3339), + "remote_ip": c.RealIP(), + "host": req.Host, + "uri": req.RequestURI, + "method": req.Method, + "path": req.URL.Path, + "referer": req.Referer(), + "user_agent": req.UserAgent(), + "status": res.Status, + "latency": stop.Sub(start).Microseconds(), + "latency_human": stop.Sub(start).String(), + "bytes_in": req.ContentLength, + "bytes_out": res.Size, + }).Info("Handled request") + + return nil + } + } +} diff --git a/internal/app/file.go b/internal/app/file.go new file mode 100644 index 000000000..3f8a3821c --- /dev/null +++ b/internal/app/file.go @@ -0,0 +1,73 @@ +package app + +import ( + "io" + "mime" + "net/http" + "path" + + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" +) + +func serveFiles( + ec *echo.Echo, + r *echo.Group, + repo gateway.File, + debug bool, +) { + if repo == nil { + return + } + + fileHandler := func(handler func(echo.Context) (io.Reader, string, error)) echo.HandlerFunc { + return func(ctx echo.Context) error { + reader, filename, err := handler(ctx) + if err != nil { + return err + } + ct := "application/octet-stream" + if ext := path.Ext(filename); ext != "" { + ct2 := mime.TypeByExtension(ext) + if ct2 != "" { + ct = ct2 + } + } + return ctx.Stream(http.StatusOK, ct, reader) + } + } + + r.GET( + "/assets/:filename", + fileHandler(func(ctx echo.Context) (io.Reader, string, error) { + filename := ctx.Param("filename") + r, err := repo.ReadAsset(ctx.Request().Context(), filename) + return r, filename, err + }), + ) + + r.GET( + "/plugins/:name/:version/:filename", + fileHandler(func(ctx echo.Context) (io.Reader, string, error) { + pid, err := id.PluginIDFrom(ctx.Param("name") + "#" + ctx.Param("version")) + if err != nil { + return nil, "", err1.ErrNotFound + } + filename := ctx.Param("filename") + r, err := repo.ReadPluginFile(ctx.Request().Context(), pid, filename) + return r, filename, err + }), + ) + + r.GET( + "/published/:name", + fileHandler(func(ctx echo.Context) (io.Reader, string, error) { + name := ctx.Param("name") + r, err := repo.ReadBuiltSceneFile(ctx.Request().Context(), name) + return r, name + ".json", err + }), + ) + +} diff --git a/internal/app/graphql.go b/internal/app/graphql.go new file mode 100644 index 000000000..070c75345 --- /dev/null +++ b/internal/app/graphql.go @@ -0,0 +1,125 @@ +package app + +import ( + "context" + "errors" + + "github.com/99designs/gqlgen-contrib/gqlopencensus" + "github.com/99designs/gqlgen-contrib/gqlopentracing" + graphql1 "github.com/99designs/gqlgen/graphql" + "github.com/99designs/gqlgen/graphql/handler" + "github.com/99designs/gqlgen/graphql/handler/extension" + "github.com/99designs/gqlgen/graphql/handler/lru" + "github.com/99designs/gqlgen/graphql/playground" + "github.com/labstack/echo/v4" + "github.com/vektah/gqlparser/v2/gqlerror" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + infra_graphql "github.com/reearth/reearth-backend/internal/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/internal/usecase" + err1 "github.com/reearth/reearth-backend/pkg/error" +) + +const enableDataLoaders = true + +func getOperator(ctx context.Context) *usecase.Operator { + if v := ctx.Value(infra_graphql.ContextOperator); v != nil { + if v2, ok := v.(*usecase.Operator); ok { + return v2 + } + } + return nil +} + +func dataLoaderMiddleware(container *graphql.Container) echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(echoCtx echo.Context) error { + req := echoCtx.Request() + ctx := req.Context() + + var dl *dataloader.DataLoaders + if enableDataLoaders { + dl = dataloader.NewDataLoaders(ctx, container, getOperator(ctx)) + } else { + dl = dataloader.NewOrdinaryDataLoaders(ctx, container, getOperator(ctx)) + } + + ctx = context.WithValue(ctx, dataloader.DataLoadersKey(), dl) + echoCtx.SetRequest(req.WithContext(ctx)) + return next(echoCtx) + } + } +} + +func tracerMiddleware(enabled bool) echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(echoCtx echo.Context) error { + if !enabled { + return next(echoCtx) + } + req := echoCtx.Request() + ctx := req.Context() + t := &infra_graphql.Tracer{} + echoCtx.SetRequest(req.WithContext(infra_graphql.AttachTracer(ctx, t))) + defer t.Print() + return next(echoCtx) + } + } +} + +func graphqlRoute( + ec *echo.Echo, + r *echo.Group, + conf *ServerConfig, + controllers *graphql.Container, +) { + playgroundEnabled := conf.Debug || conf.Config.Dev + + if playgroundEnabled { + r.GET("/graphql", echo.WrapHandler( + playground.Handler("reearth-backend", "/api/graphql"), + )) + } + + schema := infra_graphql.NewExecutableSchema(infra_graphql.Config{ + Resolvers: infra_graphql.NewResolver(infra_graphql.ResolverConfig{ + Controllers: controllers, + Debug: conf.Debug, + }), + }) + + srv := handler.NewDefaultServer(schema) + srv.Use(gqlopentracing.Tracer{}) + srv.Use(gqlopencensus.Tracer{}) + if conf.Config.GraphQL.ComplexityLimit > 0 { + srv.Use(extension.FixedComplexityLimit(conf.Config.GraphQL.ComplexityLimit)) + } + if playgroundEnabled { + srv.Use(extension.Introspection{}) + } + srv.Use(extension.AutomaticPersistedQuery{ + Cache: lru.New(30), + }) + srv.SetErrorPresenter( + // show more detailed error messgage in debug mode + func(ctx context.Context, e error) *gqlerror.Error { + if conf.Debug { + var ierr *err1.ErrInternal + if errors.As(e, &ierr) { + if err2 := ierr.Unwrap(); err2 != nil { + // TODO: display stacktrace with xerrors + ec.Logger.Errorf("%+v", err2) + } + } + return gqlerror.ErrorPathf(graphql1.GetFieldContext(ctx).Path(), e.Error()) + } + return graphql1.DefaultErrorPresenter(ctx, e) + }, + ) + + r.POST("/graphql", func(c echo.Context) error { + srv.ServeHTTP(c.Response(), c.Request()) + return nil + }, dataLoaderMiddleware(controllers), tracerMiddleware(false)) +} diff --git a/internal/app/jwt.go b/internal/app/jwt.go new file mode 100644 index 000000000..11f1d98b6 --- /dev/null +++ b/internal/app/jwt.go @@ -0,0 +1,192 @@ +package app + +import ( + "context" + "encoding/json" + "errors" + "net/http" + "sync" + + jwtmiddleware "github.com/auth0/go-jwt-middleware" + "github.com/dgrijalva/jwt-go" + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/pkg/log" +) + +// TODO: move the authentication logic to infrastructure + +type contextKey string + +const ( + userProfileKey = "auth0_user" + debugUserHeader = "X-Reearth-Debug-User" + contextAuth0AccessToken contextKey = "auth0AccessToken" + contextAuth0Sub contextKey = "auth0Sub" + contextUser contextKey = "reearth_user" +) + +// JSONWebKeys _ +type JSONWebKeys struct { + Kty string `json:"kty"` + Kid string `json:"kid"` + Use string `json:"use"` + N string `json:"n"` + E string `json:"e"` + X5c []string `json:"x5c"` +} + +// Jwks _ +type Jwks interface { + GetJwks(string) ([]JSONWebKeys, error) +} + +// JwksSyncOnce _ +type JwksSyncOnce struct { + jwks []JSONWebKeys + once sync.Once +} + +// GetJwks _ +func (jso *JwksSyncOnce) GetJwks(publicKeyURL string) ([]JSONWebKeys, error) { + var err error + jso.once.Do(func() { + jso.jwks, err = fetchJwks(publicKeyURL) + }) + + if err != nil { + return nil, err + } + + return jso.jwks, nil +} + +func fetchJwks(publicKeyURL string) ([]JSONWebKeys, error) { + resp, err := http.Get(publicKeyURL) + var res struct { + Jwks []JSONWebKeys `json:"keys"` + } + + if err != nil { + return nil, err + } + defer func() { + _ = resp.Body.Close() + }() + + err = json.NewDecoder(resp.Body).Decode(&res) + + if err != nil { + return nil, err + } + + return res.Jwks, nil +} + +func getPemCert(token *jwt.Token, publicKeyURL string, jwks Jwks) (string, error) { + cert := "" + keys, err := jwks.GetJwks(publicKeyURL) + + if err != nil { + return cert, err + } + + for k := range keys { + if token.Header["kid"] == keys[k].Kid { + cert = "-----BEGIN CERTIFICATE-----\n" + keys[k].X5c[0] + "\n-----END CERTIFICATE-----" + } + } + + if cert == "" { + err := errors.New("unable to find appropriate key") + return cert, err + } + + return cert, nil +} + +func addPathSep(path string) string { + if path == "" { + return path + } + if path[len(path)-1] != '/' { + path += "/" + } + return path +} + +func parseJwtMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { + iss := addPathSep(cfg.Config.Auth0.Domain) + aud := iss + "api/v2/" + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + req := c.Request() + ctx := req.Context() + + token := ctx.Value(userProfileKey) + if userProfile, ok := token.(*jwt.Token); ok { + claims := userProfile.Claims.(jwt.MapClaims) + + // Verify 'iss' claim + checkIss := claims.VerifyIssuer(iss, false) + if !checkIss { + return errorResponse(c, "invalid issuer") + } + + // Verify 'aud' claim + checkAud := claims.VerifyAudience(aud, false) + if !checkAud { + return errorResponse(c, "invalid audience") + } + + // attach sub and access token to context + if sub, ok := claims["sub"].(string); ok { + ctx = context.WithValue(ctx, contextAuth0Sub, sub) + } + if user, ok := claims["https://reearth.io/user_id"].(string); ok { + ctx = context.WithValue(ctx, contextUser, user) + } + ctx = context.WithValue(ctx, contextAuth0AccessToken, userProfile.Raw) + } + + c.SetRequest(req.WithContext(ctx)) + return next(c) + } + } +} + +func jwtEchoMiddleware(jwks Jwks, cfg *ServerConfig) echo.MiddlewareFunc { + jwksURL := addPathSep(cfg.Config.Auth0.Domain) + ".well-known/jwks.json" + + jwtMiddleware := jwtmiddleware.New(jwtmiddleware.Options{ + CredentialsOptional: cfg.Debug, + UserProperty: userProfileKey, + SigningMethod: jwt.SigningMethodRS256, + // Make jwtmiddleware return an error object by not writing ErrorHandler to ResponseWriter + ErrorHandler: func(w http.ResponseWriter, req *http.Request, err string) {}, + ValidationKeyGetter: func(token *jwt.Token) (interface{}, error) { + cert, err := getPemCert(token, jwksURL, jwks) + if err != nil { + log.Errorf("jwt: %s", err) + return nil, err + } + result, _ := jwt.ParseRSAPublicKeyFromPEM([]byte(cert)) + return result, nil + }, + }) + + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + err := jwtMiddleware.CheckJWT(c.Response(), c.Request()) + if err != nil { + return errorResponse(c, err.Error()) + } + return next(c) + } + } +} + +func errorResponse(c echo.Context, err string) error { + res := map[string]string{"error": err} + return c.JSON(http.StatusUnauthorized, res) +} diff --git a/internal/app/main.go b/internal/app/main.go new file mode 100644 index 000000000..3e0915255 --- /dev/null +++ b/internal/app/main.go @@ -0,0 +1,43 @@ +package app + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/log" +) + +func Start(debug bool, version string) { + log.Infof("reearth-backend %s", version) + + ctx := context.Background() + + // Load config + conf, cerr := ReadConfig(debug) + if cerr != nil { + log.Fatal(cerr) + } + + // Init profiler + initProfiler(conf.Profiler, version) + + // Init tracer + closer := initTracer(conf) + defer func() { + if closer != nil { + if err := closer.Close(); err != nil { + log.Errorf("Failed to close tracer: %s\n", err.Error()) + } + } + }() + + // Init repositories + repos, gateways := initReposAndGateways(ctx, conf, debug) + + server := NewServer(&ServerConfig{ + Config: conf, + Debug: debug, + Repos: repos, + Gateways: gateways, + }) + server.Run() +} diff --git a/internal/app/profiler.go b/internal/app/profiler.go new file mode 100644 index 000000000..6b5bb3d17 --- /dev/null +++ b/internal/app/profiler.go @@ -0,0 +1,27 @@ +package app + +import ( + "cloud.google.com/go/profiler" + "github.com/reearth/reearth-backend/pkg/log" +) + +func initProfiler(kind string, version string) { + if kind == "" { + return + } + + if kind == "gcp" { + initGCPProfiler(version) + } + + log.Infof("profiler: %s initialized\n", kind) +} + +func initGCPProfiler(version string) { + if err := profiler.Start(profiler.Config{ + Service: "reearth-backend", + ServiceVersion: version, + }); err != nil { + log.Fatalln(err) + } +} diff --git a/internal/app/public.go b/internal/app/public.go new file mode 100644 index 000000000..66d454954 --- /dev/null +++ b/internal/app/public.go @@ -0,0 +1,109 @@ +package app + +import ( + "errors" + "fmt" + "net/http" + + "github.com/labstack/echo/v4" + http1 "github.com/reearth/reearth-backend/internal/adapter/http" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interactor" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/dataset" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" +) + +type inputJSON struct { + DatasetSchemaID string `json:"datasetSchemaId"` + Author string `json:"author"` + Content string `json:"content"` + Target *string `json:"target"` + Lat *float64 `json:"lat"` + Lng *float64 `json:"lng"` +} + +func toResponseValue(v *dataset.Value) interface{} { + if v == nil { + return nil + } + switch v2 := v.Value().(type) { + case float64: + return v2 + case string: + return v2 + case dataset.LatLng: + return map[string]float64{ + "lat": v2.Lat, + "lng": v2.Lng, + } + } + return nil +} + +func publicRoute( + ec *echo.Echo, + r *echo.Group, + conf *Config, + repos *repo.Container, + gateways *gateway.Container, +) { + controller := http1.NewUserController(interactor.NewUser(repos, gateways, conf.SignupSecret)) + + // TODO: move to adapter and usecase layer + r.POST("/comments", func(c echo.Context) error { + var inp inputJSON + if err := c.Bind(&inp); err != nil { + return &echo.HTTPError{Code: http.StatusBadRequest, Message: err} + } + + dssid, err := id.DatasetSchemaIDFrom(inp.DatasetSchemaID) + if err != nil { + return &echo.HTTPError{Code: http.StatusNotFound, Message: err1.ErrNotFound} + } + if inp.Author == "" { + return &echo.HTTPError{Code: http.StatusBadRequest, Message: errors.New("require author value")} + } + if inp.Content == "" { + return &echo.HTTPError{Code: http.StatusBadRequest, Message: errors.New("require content value")} + } + interactor := interactor.NewDataset(repos, gateways) + dss, ds, err := interactor.AddDynamicDataset(c.Request().Context(), interfaces.AddDynamicDatasetParam{ + SchemaId: dssid, + Author: inp.Author, + Content: inp.Content, + Lat: inp.Lat, + Lng: inp.Lng, + Target: inp.Target, + }) + + if err != nil { + if errors.Is(err1.ErrNotFound, err) { + return &echo.HTTPError{Code: http.StatusNotFound, Message: err} + } + return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} + } + response := make(map[string]interface{}) + response["id"] = ds.ID().String() + for _, f := range dss.Fields() { + response[f.Name()] = toResponseValue(ds.Field(f.ID()).Value()) + } + return c.JSON(http.StatusOK, response) + }) + + r.POST("/signup", func(c echo.Context) error { + var inp http1.CreateUserInput + if err := c.Bind(&inp); err != nil { + return &echo.HTTPError{Code: http.StatusBadRequest, Message: fmt.Errorf("failed to parse request body: %w", err)} + } + + output, err := controller.CreateUser(c.Request().Context(), inp) + if err != nil { + return err + } + + return c.JSON(http.StatusOK, output) + }) +} diff --git a/internal/app/repo.go b/internal/app/repo.go new file mode 100644 index 000000000..dc795a8d4 --- /dev/null +++ b/internal/app/repo.go @@ -0,0 +1,83 @@ +package app + +import ( + "context" + "fmt" + "time" + + "github.com/reearth/reearth-backend/internal/infrastructure/adapter" + "github.com/reearth/reearth-backend/internal/infrastructure/auth0" + "github.com/reearth/reearth-backend/internal/infrastructure/fs" + "github.com/reearth/reearth-backend/internal/infrastructure/gcs" + mongorepo "github.com/reearth/reearth-backend/internal/infrastructure/mongo" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/log" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" + mongotrace "go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver" +) + +func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo.Container, *gateway.Container) { + repos := &repo.Container{} + gateways := &gateway.Container{} + + // memory.InitRepos(repos, true) // DEBUG + + // Mongo + client, err := mongo.Connect( + ctx, + options.Client(). + ApplyURI(conf.DB). + SetConnectTimeout(time.Second*10). + SetMonitor(mongotrace.NewMonitor("reearth-backend")), + ) + if err != nil { + log.Fatalln(fmt.Sprintf("repo initialization error: %+v", err)) + } + if err := mongorepo.InitRepos(ctx, repos, client, "reearth", debug); err != nil { + log.Fatalln(fmt.Sprintf("Failed to init mongo: %+v", err)) + } + + // Plugin and PropertySchema + if debug { + repos.Plugin = adapter.NewPlugin([]repo.Plugin{ + fs.NewPlugin("data"), + repos.Plugin, + }, repos.Plugin) + repos.PropertySchema = adapter.NewPropertySchema([]repo.PropertySchema{ + fs.NewPropertySchema("data"), + repos.PropertySchema, + }, repos.PropertySchema) + } + + // File + var fileRepo gateway.File + if conf.GCS.BucketName == "" { + log.Infoln("file: local storage is used") + fileRepo, err = fs.NewFile("data", conf.AssetBaseURL) + } else { + log.Infof("file: GCS storage is used: %s\n", conf.GCS.BucketName) + fileRepo, err = gcs.NewFile(conf.GCS.BucketName, conf.AssetBaseURL, conf.GCS.PublicationCacheControl) + if err != nil { + if debug { + log.Warnf("file: failed to init GCS storage: %s\n", err.Error()) + err = nil + } + } + } + if err != nil { + log.Fatalln(fmt.Sprintf("file: init error: %+v", err)) + } + gateways.File = fileRepo + + // Auth0 + gateways.Authenticator = auth0.New(conf.Auth0.Domain, conf.Auth0.ClientID, conf.Auth0.ClientSecret) + + // release lock of all scenes + if err := repos.SceneLock.ReleaseAllLock(context.Background()); err != nil { + log.Fatalln(fmt.Sprintf("repo initialization error: %+v", err)) + } + + return repos, gateways +} diff --git a/internal/app/server.go b/internal/app/server.go new file mode 100644 index 000000000..b52b7ffc2 --- /dev/null +++ b/internal/app/server.go @@ -0,0 +1,89 @@ +package app + +import ( + "net/http" + _ "net/http/pprof" + "os" + "os/signal" + + "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/log" + echotracer "go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo" +) + +type WebServer struct { + address string + appServer *echo.Echo +} + +type ServerConfig struct { + Config *Config + Debug bool + Repos *repo.Container + Gateways *gateway.Container +} + +func NewServer(cfg *ServerConfig) *WebServer { + port := cfg.Config.Port + if port == "" { + port = "8080" + } + + address := "0.0.0.0:" + port + if cfg.Debug { + address = "localhost:" + port + } + + w := &WebServer{ + address: address, + } + + w.appServer = initAppEcho(cfg) + return w +} + +func (w *WebServer) Run() { + defer log.Infoln("Server shutdown") + + debugLog := "" + if w.appServer.Debug { + debugLog += " with debug mode" + } + log.Infof("Server started%s\n", debugLog) + + go func() { + err := w.appServer.Start(w.address) + log.Fatalln(err.Error()) + }() + + quit := make(chan os.Signal, 1) + signal.Notify(quit, os.Interrupt) + <-quit +} + +func newEcho(cfg *ServerConfig) *echo.Echo { + if cfg.Config == nil { + log.Fatalln("ServerConfig.Config is nil") + } + + e := echo.New() + e.Debug = cfg.Debug + e.HideBanner = true + e.HidePort = true + + logger := GetEchoLogger() + e.Logger = logger + e.Use(logger.Hook()) + + e.Use(middleware.Recover(), echotracer.Middleware("reearth-backend")) + + if e.Debug { + // enable pprof + e.GET("/debug/pprof/*", echo.WrapHandler(http.DefaultServeMux)) + } + + return e +} diff --git a/internal/app/tracer.go b/internal/app/tracer.go new file mode 100644 index 000000000..ac18bd8f1 --- /dev/null +++ b/internal/app/tracer.go @@ -0,0 +1,69 @@ +package app + +import ( + "io" + + texporter "github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace" + "github.com/reearth/reearth-backend/pkg/log" + jaeger "github.com/uber/jaeger-client-go" + jaegercfg "github.com/uber/jaeger-client-go/config" + jaegerlog "github.com/uber/jaeger-client-go/log" + "github.com/uber/jaeger-lib/metrics" + "go.opentelemetry.io/otel/api/global" + sdktrace "go.opentelemetry.io/otel/sdk/trace" +) + +func initTracer(conf *Config) io.Closer { + if conf.Tracer == "gcp" { + initGCPTracer(conf) + } else if conf.Tracer == "jaeger" { + return initJaegerTracer(conf) + } + return nil +} + +func initGCPTracer(conf *Config) { + exporter, err := texporter.NewExporter(texporter.WithProjectID(conf.GCPProject)) + if err != nil { + log.Fatalln(err) + } + + tp, err := sdktrace.NewProvider(sdktrace.WithConfig(sdktrace.Config{ + DefaultSampler: sdktrace.ProbabilitySampler(conf.TracerSample), + }), sdktrace.WithSyncer(exporter)) + if err != nil { + log.Fatalln(err) + } + + global.SetTraceProvider(tp) + + log.Infof("tracer: initialized cloud trace with sample fraction: %g", conf.TracerSample) +} + +func initJaegerTracer(conf *Config) io.Closer { + cfg := jaegercfg.Configuration{ + Sampler: &jaegercfg.SamplerConfig{ + Type: jaeger.SamplerTypeConst, + Param: conf.TracerSample, + }, + Reporter: &jaegercfg.ReporterConfig{ + LogSpans: true, + }, + } + + jLogger := jaegerlog.StdLogger + jMetricsFactory := metrics.NullFactory + + closer, err := cfg.InitGlobalTracer( + "Re:Earth", + jaegercfg.Logger(jLogger), + jaegercfg.Metrics(jMetricsFactory), + ) + + if err != nil { + log.Fatalf("Could not initialize jaeger tracer: %s\n", err.Error()) + } + + log.Infof("tracer: initialized jaeger tracer with sample fraction: %g\n", conf.TracerSample) + return closer +} diff --git a/internal/app/userAPIs.go b/internal/app/userAPIs.go new file mode 100644 index 000000000..9f2b11163 --- /dev/null +++ b/internal/app/userAPIs.go @@ -0,0 +1,133 @@ +package app + +import ( + "context" + "errors" + "io" + "net/http" + "strings" + + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer/encoding" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/user" +) + +// TODO: move to adapter and usecase layer + +var ( + ErrOpDenied = errors.New("operation denied") + ErrUnauthorized = errors.New("Unauthorized") + ErrUnknowFormat = errors.New("unknown file format") + ErrBadID = errors.New("bad id") + ErrBadParameter = errors.New("id.ext is needed") +) + +func checkScene(ctx context.Context, id id.SceneID, op *usecase.Operator, sr repo.Scene) error { + res, err := sr.HasSceneTeam(ctx, id, op.ReadableTeams) + if err != nil { + return err + } + if !res { + return ErrOpDenied + } + return nil +} + +func getEncoder(w io.Writer, ext string) (encoding.Encoder, string) { + switch strings.ToLower(ext) { + case "kml": + return encoding.NewKMLEncoder(w), "application/xml" + case "geojson": + return encoding.NewGeoJSONEncoder(w), "application/json" + case "czml": + return encoding.NewCZMLEncoder(w), "application/json" + case "shp": + return encoding.NewSHPEncoder(w), "application/octet-stream" + } + return nil, "" +} +func userRoute( + ec *echo.Echo, + r *echo.Group, + repos *repo.Container, +) { + r.GET("/layers/:param", func(c echo.Context) error { + ctx := c.Request().Context() + user := c.Request().Context().Value(graphql.ContextUser).(*user.User) + if user == nil { + return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrUnauthorized} + } + op := c.Request().Context().Value(graphql.ContextOperator).(*usecase.Operator) + if op == nil { + return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrOpDenied} + } + param := c.Param("param") + params := strings.Split(param, ".") + if len(params) != 2 { + return &echo.HTTPError{Code: http.StatusBadRequest, Message: ErrBadParameter} + } + + lid, err := id.LayerIDFrom(params[0]) + if err != nil { + return &echo.HTTPError{Code: http.StatusBadRequest, Message: ErrBadID} + } + scenes, err := repos.Scene.FindIDsByTeam(ctx, op.ReadableTeams) + if err != nil { + if errors.Is(err1.ErrNotFound, err) { + return &echo.HTTPError{Code: http.StatusNotFound, Message: err} + } + return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} + } + layer, err := repos.Layer.FindByID(ctx, lid, scenes) + if err != nil { + if errors.Is(err1.ErrNotFound, err) { + return &echo.HTTPError{Code: http.StatusNotFound, Message: err} + } + return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} + } + err = checkScene(ctx, layer.Scene(), op, repos.Scene) + if err != nil { + if errors.Is(ErrOpDenied, err) { + return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrOpDenied} + } + + return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} + } + ext := params[1] + + reader, writer := io.Pipe() + e, mime := getEncoder(writer, strings.ToLower(ext)) + if e == nil { + return &echo.HTTPError{Code: http.StatusBadRequest, Message: ErrUnknowFormat} + } + + ex := &encoding.Exporter{ + Merger: &merging.Merger{ + LayerLoader: repo.LayerLoaderFrom(repos.Layer, scenes), + PropertyLoader: repo.PropertyLoaderFrom(repos.Property, scenes), + }, + Sealer: &merging.Sealer{ + DatasetGraphLoader: repo.DatasetGraphLoaderFrom(repos.Dataset, scenes), + }, + Encoder: e, + } + + go func() { + defer func() { + _ = writer.Close() + }() + err = ex.ExportLayerByID(ctx, lid) + }() + + if err != nil { + return &echo.HTTPError{Code: http.StatusBadRequest, Message: err} + } + return c.Stream(http.StatusOK, mime, reader) + }) +} diff --git a/internal/app/web.go b/internal/app/web.go new file mode 100644 index 000000000..2e9ebdf1f --- /dev/null +++ b/internal/app/web.go @@ -0,0 +1,41 @@ +package app + +import ( + "net/http" + "os" + + "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" +) + +type WebConfig map[string]string + +func web(e *echo.Echo, wc WebConfig, ac Auth0Config) { + if _, err := os.Stat("web"); err != nil { + return // web won't be delivered + } + + e.Logger.Info("web: web directory will be delivered\n") + + config := map[string]string{} + if ac.Domain != "" { + config["auth0Domain"] = ac.Domain + } + if ac.WebClientID != "" { + config["auth0ClientId"] = ac.WebClientID + } + for k, v := range wc { + config[k] = v + } + + e.GET("/reearth_config.json", func(c echo.Context) error { + return c.JSON(http.StatusOK, config) + }) + + e.Use(middleware.StaticWithConfig(middleware.StaticConfig{ + Root: "web", + Index: "index.html", + Browse: false, + HTML5: true, + })) +} diff --git a/internal/graphql/context.go b/internal/graphql/context.go new file mode 100644 index 000000000..768647a0e --- /dev/null +++ b/internal/graphql/context.go @@ -0,0 +1,62 @@ +package graphql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/user" +) + +// ContextKey _ +type ContextKey string + +const ( + ContextUser ContextKey = "user" + ContextOperator ContextKey = "operator" + ContextSub ContextKey = "sub" +) + +func getUser(ctx context.Context) *user.User { + if v := ctx.Value(ContextUser); v != nil { + if u, ok := v.(*user.User); ok { + return u + } + } + return nil +} + +func getLang(ctx context.Context, lang *string) string { + if lang != nil && *lang != "" { + return *lang + } + + u := getUser(ctx) + if u == nil { + return "en" // default language + } + + l := u.Lang() + if l.IsRoot() { + return "en" // default language + } + + return l.String() +} + +func getOperator(ctx context.Context) *usecase.Operator { + if v := ctx.Value(ContextOperator); v != nil { + if v2, ok := v.(*usecase.Operator); ok { + return v2 + } + } + return nil +} + +func getSub(ctx context.Context) string { + if v := ctx.Value(ContextSub); v != nil { + if v2, ok := v.(string); ok { + return v2 + } + } + return "" +} diff --git a/internal/graphql/dataloader/context.go b/internal/graphql/dataloader/context.go new file mode 100644 index 000000000..b0e56543f --- /dev/null +++ b/internal/graphql/dataloader/context.go @@ -0,0 +1,73 @@ +package dataloader + +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen -template=loader.tmpl -output=loader_gen.go -m=Dataset -m=Layer -m=Plugin -m=Project -m=Property -m=Scene -m=Team -m=User +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen -template=loader.tmpl -output=loader_layer_item_gen.go -controller=Layer -method=FetchItem -id=LayerID -m=LayerItem +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen -template=loader.tmpl -output=loader_layer_group_gen.go -controller=Layer -method=FetchGroup -id=LayerID -m=LayerGroup +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen -template=loader.tmpl -output=loader_dataset_schema_gen.go -controller=Dataset -method=FetchSchema -m=DatasetSchema +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen -template=loader.tmpl -output=loader_property_schema_gen.go -controller=Property -method=FetchSchema -m=PropertySchema + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/usecase" +) + +type dataLoadersKey struct{} + +type DataLoaders struct { + Dataset DatasetDataLoader + DatasetSchema DatasetSchemaDataLoader + LayerItem LayerItemDataLoader + LayerGroup LayerGroupDataLoader + Layer LayerDataLoader + Plugin PluginDataLoader + Project ProjectDataLoader + Property PropertyDataLoader + PropertySchema PropertySchemaDataLoader + Scene SceneDataLoader + Team TeamDataLoader + User UserDataLoader +} + +func DataLoadersFromContext(ctx context.Context) *DataLoaders { + return ctx.Value(dataLoadersKey{}).(*DataLoaders) +} + +func DataLoadersKey() interface{} { + return dataLoadersKey{} +} + +func NewDataLoaders(ctx context.Context, c *graphql.Container, o *usecase.Operator) *DataLoaders { + return &DataLoaders{ + Dataset: newDataset(ctx, c.DatasetController, o), + DatasetSchema: newDatasetSchema(ctx, c.DatasetController, o), + LayerItem: newLayerItem(ctx, c.LayerController, o), + LayerGroup: newLayerGroup(ctx, c.LayerController, o), + Layer: newLayer(ctx, c.LayerController, o), + Plugin: newPlugin(ctx, c.PluginController, o), + Project: newProject(ctx, c.ProjectController, o), + Property: newProperty(ctx, c.PropertyController, o), + PropertySchema: newPropertySchema(ctx, c.PropertyController, o), + Scene: newScene(ctx, c.SceneController, o), + Team: newTeam(ctx, c.TeamController, o), + User: newUser(ctx, c.UserController, o), + } +} + +func NewOrdinaryDataLoaders(ctx context.Context, c *graphql.Container, o *usecase.Operator) *DataLoaders { + return &DataLoaders{ + Dataset: newOrdinaryDataset(ctx, c.DatasetController, o), + DatasetSchema: newOrdinaryDatasetSchema(ctx, c.DatasetController, o), + LayerItem: newOrdinaryLayerItem(ctx, c.LayerController, o), + LayerGroup: newOrdinaryLayerGroup(ctx, c.LayerController, o), + Layer: newOrdinaryLayer(ctx, c.LayerController, o), + Plugin: newOrdinaryPlugin(ctx, c.PluginController, o), + Project: newOrdinaryProject(ctx, c.ProjectController, o), + Property: newOrdinaryProperty(ctx, c.PropertyController, o), + PropertySchema: newOrdinaryPropertySchema(ctx, c.PropertyController, o), + Scene: newOrdinaryScene(ctx, c.SceneController, o), + Team: newOrdinaryTeam(ctx, c.TeamController, o), + User: newOrdinaryUser(ctx, c.UserController, o), + } +} diff --git a/internal/graphql/dataloader/dataloader.go b/internal/graphql/dataloader/dataloader.go new file mode 100644 index 000000000..c4ddfdc8c --- /dev/null +++ b/internal/graphql/dataloader/dataloader.go @@ -0,0 +1,14 @@ +package dataloader + +//go:generate go run github.com/vektah/dataloaden DatasetLoader github.com/reearth/reearth-backend/pkg/id.DatasetID *github.com/reearth/reearth-backend/internal/adapter/graphql.Dataset +//go:generate go run github.com/vektah/dataloaden DatasetSchemaLoader github.com/reearth/reearth-backend/pkg/id.DatasetSchemaID *github.com/reearth/reearth-backend/internal/adapter/graphql.DatasetSchema +//go:generate go run github.com/vektah/dataloaden LayerLoader github.com/reearth/reearth-backend/pkg/id.LayerID *github.com/reearth/reearth-backend/internal/adapter/graphql.Layer +//go:generate go run github.com/vektah/dataloaden LayerGroupLoader github.com/reearth/reearth-backend/pkg/id.LayerID *github.com/reearth/reearth-backend/internal/adapter/graphql.LayerGroup +//go:generate go run github.com/vektah/dataloaden LayerItemLoader github.com/reearth/reearth-backend/pkg/id.LayerID *github.com/reearth/reearth-backend/internal/adapter/graphql.LayerItem +//go:generate go run github.com/vektah/dataloaden PluginLoader github.com/reearth/reearth-backend/pkg/id.PluginID *github.com/reearth/reearth-backend/internal/adapter/graphql.Plugin +//go:generate go run github.com/vektah/dataloaden ProjectLoader github.com/reearth/reearth-backend/pkg/id.ProjectID *github.com/reearth/reearth-backend/internal/adapter/graphql.Project +//go:generate go run github.com/vektah/dataloaden PropertyLoader github.com/reearth/reearth-backend/pkg/id.PropertyID *github.com/reearth/reearth-backend/internal/adapter/graphql.Property +//go:generate go run github.com/vektah/dataloaden PropertySchemaLoader github.com/reearth/reearth-backend/pkg/id.PropertySchemaID *github.com/reearth/reearth-backend/internal/adapter/graphql.PropertySchema +//go:generate go run github.com/vektah/dataloaden SceneLoader github.com/reearth/reearth-backend/pkg/id.SceneID *github.com/reearth/reearth-backend/internal/adapter/graphql.Scene +//go:generate go run github.com/vektah/dataloaden TeamLoader github.com/reearth/reearth-backend/pkg/id.TeamID *github.com/reearth/reearth-backend/internal/adapter/graphql.Team +//go:generate go run github.com/vektah/dataloaden UserLoader github.com/reearth/reearth-backend/pkg/id.UserID *github.com/reearth/reearth-backend/internal/adapter/graphql.User diff --git a/internal/graphql/dataloader/datasetloader_gen.go b/internal/graphql/dataloader/datasetloader_gen.go new file mode 100644 index 000000000..45559750d --- /dev/null +++ b/internal/graphql/dataloader/datasetloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// DatasetLoaderConfig captures the config to create a new DatasetLoader +type DatasetLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.DatasetID) ([]*graphql.Dataset, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewDatasetLoader creates a new DatasetLoader given a fetch, wait, and maxBatch +func NewDatasetLoader(config DatasetLoaderConfig) *DatasetLoader { + return &DatasetLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// DatasetLoader batches and caches requests +type DatasetLoader struct { + // this method provides the data for the loader + fetch func(keys []id.DatasetID) ([]*graphql.Dataset, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.DatasetID]*graphql.Dataset + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *datasetLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type datasetLoaderBatch struct { + keys []id.DatasetID + data []*graphql.Dataset + error []error + closing bool + done chan struct{} +} + +// Load a Dataset by key, batching and caching will be applied automatically +func (l *DatasetLoader) Load(key id.DatasetID) (*graphql.Dataset, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Dataset. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *DatasetLoader) LoadThunk(key id.DatasetID) func() (*graphql.Dataset, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*graphql.Dataset, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &datasetLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*graphql.Dataset, error) { + <-batch.done + + var data *graphql.Dataset + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *DatasetLoader) LoadAll(keys []id.DatasetID) ([]*graphql.Dataset, []error) { + results := make([]func() (*graphql.Dataset, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + datasets := make([]*graphql.Dataset, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + datasets[i], errors[i] = thunk() + } + return datasets, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Datasets. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *DatasetLoader) LoadAllThunk(keys []id.DatasetID) func() ([]*graphql.Dataset, []error) { + results := make([]func() (*graphql.Dataset, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*graphql.Dataset, []error) { + datasets := make([]*graphql.Dataset, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + datasets[i], errors[i] = thunk() + } + return datasets, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *DatasetLoader) Prime(key id.DatasetID, value *graphql.Dataset) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *DatasetLoader) Clear(key id.DatasetID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *DatasetLoader) unsafeSet(key id.DatasetID, value *graphql.Dataset) { + if l.cache == nil { + l.cache = map[id.DatasetID]*graphql.Dataset{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *datasetLoaderBatch) keyIndex(l *DatasetLoader, key id.DatasetID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *datasetLoaderBatch) startTimer(l *DatasetLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *datasetLoaderBatch) end(l *DatasetLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/dataloader/datasetschemaloader_gen.go b/internal/graphql/dataloader/datasetschemaloader_gen.go new file mode 100644 index 000000000..cfe201b04 --- /dev/null +++ b/internal/graphql/dataloader/datasetschemaloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// DatasetSchemaLoaderConfig captures the config to create a new DatasetSchemaLoader +type DatasetSchemaLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewDatasetSchemaLoader creates a new DatasetSchemaLoader given a fetch, wait, and maxBatch +func NewDatasetSchemaLoader(config DatasetSchemaLoaderConfig) *DatasetSchemaLoader { + return &DatasetSchemaLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// DatasetSchemaLoader batches and caches requests +type DatasetSchemaLoader struct { + // this method provides the data for the loader + fetch func(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.DatasetSchemaID]*graphql.DatasetSchema + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *datasetSchemaLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type datasetSchemaLoaderBatch struct { + keys []id.DatasetSchemaID + data []*graphql.DatasetSchema + error []error + closing bool + done chan struct{} +} + +// Load a DatasetSchema by key, batching and caching will be applied automatically +func (l *DatasetSchemaLoader) Load(key id.DatasetSchemaID) (*graphql.DatasetSchema, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a DatasetSchema. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *DatasetSchemaLoader) LoadThunk(key id.DatasetSchemaID) func() (*graphql.DatasetSchema, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*graphql.DatasetSchema, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &datasetSchemaLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*graphql.DatasetSchema, error) { + <-batch.done + + var data *graphql.DatasetSchema + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *DatasetSchemaLoader) LoadAll(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) { + results := make([]func() (*graphql.DatasetSchema, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + datasetSchemas := make([]*graphql.DatasetSchema, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + datasetSchemas[i], errors[i] = thunk() + } + return datasetSchemas, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a DatasetSchemas. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *DatasetSchemaLoader) LoadAllThunk(keys []id.DatasetSchemaID) func() ([]*graphql.DatasetSchema, []error) { + results := make([]func() (*graphql.DatasetSchema, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*graphql.DatasetSchema, []error) { + datasetSchemas := make([]*graphql.DatasetSchema, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + datasetSchemas[i], errors[i] = thunk() + } + return datasetSchemas, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *DatasetSchemaLoader) Prime(key id.DatasetSchemaID, value *graphql.DatasetSchema) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *DatasetSchemaLoader) Clear(key id.DatasetSchemaID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *DatasetSchemaLoader) unsafeSet(key id.DatasetSchemaID, value *graphql.DatasetSchema) { + if l.cache == nil { + l.cache = map[id.DatasetSchemaID]*graphql.DatasetSchema{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *datasetSchemaLoaderBatch) keyIndex(l *DatasetSchemaLoader, key id.DatasetSchemaID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *datasetSchemaLoaderBatch) startTimer(l *DatasetSchemaLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *datasetSchemaLoaderBatch) end(l *DatasetSchemaLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/dataloader/layergrouploader_gen.go b/internal/graphql/dataloader/layergrouploader_gen.go new file mode 100644 index 000000000..1c21aed1d --- /dev/null +++ b/internal/graphql/dataloader/layergrouploader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// LayerGroupLoaderConfig captures the config to create a new LayerGroupLoader +type LayerGroupLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.LayerID) ([]*graphql.LayerGroup, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewLayerGroupLoader creates a new LayerGroupLoader given a fetch, wait, and maxBatch +func NewLayerGroupLoader(config LayerGroupLoaderConfig) *LayerGroupLoader { + return &LayerGroupLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// LayerGroupLoader batches and caches requests +type LayerGroupLoader struct { + // this method provides the data for the loader + fetch func(keys []id.LayerID) ([]*graphql.LayerGroup, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.LayerID]*graphql.LayerGroup + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *layerGroupLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type layerGroupLoaderBatch struct { + keys []id.LayerID + data []*graphql.LayerGroup + error []error + closing bool + done chan struct{} +} + +// Load a LayerGroup by key, batching and caching will be applied automatically +func (l *LayerGroupLoader) Load(key id.LayerID) (*graphql.LayerGroup, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a LayerGroup. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *LayerGroupLoader) LoadThunk(key id.LayerID) func() (*graphql.LayerGroup, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*graphql.LayerGroup, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &layerGroupLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*graphql.LayerGroup, error) { + <-batch.done + + var data *graphql.LayerGroup + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *LayerGroupLoader) LoadAll(keys []id.LayerID) ([]*graphql.LayerGroup, []error) { + results := make([]func() (*graphql.LayerGroup, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + layerGroups := make([]*graphql.LayerGroup, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + layerGroups[i], errors[i] = thunk() + } + return layerGroups, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a LayerGroups. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *LayerGroupLoader) LoadAllThunk(keys []id.LayerID) func() ([]*graphql.LayerGroup, []error) { + results := make([]func() (*graphql.LayerGroup, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*graphql.LayerGroup, []error) { + layerGroups := make([]*graphql.LayerGroup, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + layerGroups[i], errors[i] = thunk() + } + return layerGroups, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *LayerGroupLoader) Prime(key id.LayerID, value *graphql.LayerGroup) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *LayerGroupLoader) Clear(key id.LayerID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *LayerGroupLoader) unsafeSet(key id.LayerID, value *graphql.LayerGroup) { + if l.cache == nil { + l.cache = map[id.LayerID]*graphql.LayerGroup{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *layerGroupLoaderBatch) keyIndex(l *LayerGroupLoader, key id.LayerID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *layerGroupLoaderBatch) startTimer(l *LayerGroupLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *layerGroupLoaderBatch) end(l *LayerGroupLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/dataloader/layeritemloader_gen.go b/internal/graphql/dataloader/layeritemloader_gen.go new file mode 100644 index 000000000..3150226c6 --- /dev/null +++ b/internal/graphql/dataloader/layeritemloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// LayerItemLoaderConfig captures the config to create a new LayerItemLoader +type LayerItemLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.LayerID) ([]*graphql.LayerItem, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewLayerItemLoader creates a new LayerItemLoader given a fetch, wait, and maxBatch +func NewLayerItemLoader(config LayerItemLoaderConfig) *LayerItemLoader { + return &LayerItemLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// LayerItemLoader batches and caches requests +type LayerItemLoader struct { + // this method provides the data for the loader + fetch func(keys []id.LayerID) ([]*graphql.LayerItem, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.LayerID]*graphql.LayerItem + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *layerItemLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type layerItemLoaderBatch struct { + keys []id.LayerID + data []*graphql.LayerItem + error []error + closing bool + done chan struct{} +} + +// Load a LayerItem by key, batching and caching will be applied automatically +func (l *LayerItemLoader) Load(key id.LayerID) (*graphql.LayerItem, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a LayerItem. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *LayerItemLoader) LoadThunk(key id.LayerID) func() (*graphql.LayerItem, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*graphql.LayerItem, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &layerItemLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*graphql.LayerItem, error) { + <-batch.done + + var data *graphql.LayerItem + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *LayerItemLoader) LoadAll(keys []id.LayerID) ([]*graphql.LayerItem, []error) { + results := make([]func() (*graphql.LayerItem, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + layerItems := make([]*graphql.LayerItem, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + layerItems[i], errors[i] = thunk() + } + return layerItems, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a LayerItems. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *LayerItemLoader) LoadAllThunk(keys []id.LayerID) func() ([]*graphql.LayerItem, []error) { + results := make([]func() (*graphql.LayerItem, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*graphql.LayerItem, []error) { + layerItems := make([]*graphql.LayerItem, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + layerItems[i], errors[i] = thunk() + } + return layerItems, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *LayerItemLoader) Prime(key id.LayerID, value *graphql.LayerItem) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *LayerItemLoader) Clear(key id.LayerID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *LayerItemLoader) unsafeSet(key id.LayerID, value *graphql.LayerItem) { + if l.cache == nil { + l.cache = map[id.LayerID]*graphql.LayerItem{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *layerItemLoaderBatch) keyIndex(l *LayerItemLoader, key id.LayerID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *layerItemLoaderBatch) startTimer(l *LayerItemLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *layerItemLoaderBatch) end(l *LayerItemLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/dataloader/layerloader_gen.go b/internal/graphql/dataloader/layerloader_gen.go new file mode 100644 index 000000000..b83418930 --- /dev/null +++ b/internal/graphql/dataloader/layerloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// LayerLoaderConfig captures the config to create a new LayerLoader +type LayerLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.LayerID) ([]*graphql.Layer, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewLayerLoader creates a new LayerLoader given a fetch, wait, and maxBatch +func NewLayerLoader(config LayerLoaderConfig) *LayerLoader { + return &LayerLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// LayerLoader batches and caches requests +type LayerLoader struct { + // this method provides the data for the loader + fetch func(keys []id.LayerID) ([]*graphql.Layer, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.LayerID]*graphql.Layer + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *layerLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type layerLoaderBatch struct { + keys []id.LayerID + data []*graphql.Layer + error []error + closing bool + done chan struct{} +} + +// Load a Layer by key, batching and caching will be applied automatically +func (l *LayerLoader) Load(key id.LayerID) (*graphql.Layer, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Layer. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *LayerLoader) LoadThunk(key id.LayerID) func() (*graphql.Layer, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*graphql.Layer, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &layerLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*graphql.Layer, error) { + <-batch.done + + var data *graphql.Layer + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *LayerLoader) LoadAll(keys []id.LayerID) ([]*graphql.Layer, []error) { + results := make([]func() (*graphql.Layer, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + layers := make([]*graphql.Layer, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + layers[i], errors[i] = thunk() + } + return layers, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Layers. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *LayerLoader) LoadAllThunk(keys []id.LayerID) func() ([]*graphql.Layer, []error) { + results := make([]func() (*graphql.Layer, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*graphql.Layer, []error) { + layers := make([]*graphql.Layer, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + layers[i], errors[i] = thunk() + } + return layers, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *LayerLoader) Prime(key id.LayerID, value *graphql.Layer) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *LayerLoader) Clear(key id.LayerID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *LayerLoader) unsafeSet(key id.LayerID, value *graphql.Layer) { + if l.cache == nil { + l.cache = map[id.LayerID]*graphql.Layer{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *layerLoaderBatch) keyIndex(l *LayerLoader, key id.LayerID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *layerLoaderBatch) startTimer(l *LayerLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *layerLoaderBatch) end(l *LayerLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/dataloader/loader.tmpl b/internal/graphql/dataloader/loader.tmpl new file mode 100644 index 000000000..5268e4c11 --- /dev/null +++ b/internal/graphql/dataloader/loader.tmpl @@ -0,0 +1,55 @@ +package {{.PackageName}} +{{$controller := ""}}{{if .Flags.controller}}{{$controller = index .Flags.controller 0}}{{end}} +{{$id := ""}}{{if .Flags.id}}{{$id = index .Flags.id 0}}{{end}} +{{$method := "Fetch"}}{{if .Flags.method}}{{$method = index .Flags.method 0}}{{end}} +import ( + "context" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) +{{range .Flags.m}} +type {{camel .}}DataLoader interface { + Load(id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) (*graphql.{{camel .}}, error) + LoadAll([]id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) ([]*graphql.{{camel .}}, []error) +} + +func new{{camel .}}(ctx context.Context, c *graphql.{{if $controller}}{{$controller}}{{else}}{{camel .}}{{end}}Controller, o *usecase.Operator) *{{camel .}}Loader { + return New{{camel .}}Loader({{camel .}}LoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) ([]*graphql.{{camel .}}, []error) { + return c.{{$method}}(ctx, keys, o) + }, + }) +} + +func newOrdinary{{camel .}}(ctx context.Context, c *graphql.{{if $controller}}{{$controller}}{{else}}{{camel .}}{{end}}Controller, o *usecase.Operator) {{camel .}}DataLoader { + return &ordinary{{camel .}}Loader{ + fetch: func(keys []id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) ([]*graphql.{{camel .}}, []error) { + return c.{{$method}}(ctx, keys, o) + }, + } +} + +type ordinary{{camel .}}Loader struct { + fetch func(keys []id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) ([]*graphql.{{camel .}}, []error) +} + +func (l *ordinary{{camel .}}Loader) Load(key id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) (*graphql.{{camel .}}, error) { + res, errs := l.fetch([]id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinary{{camel .}}Loader) LoadAll(keys []id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) ([]*graphql.{{camel .}}, []error) { + return l.fetch(keys) +} +{{end}} diff --git a/internal/graphql/dataloader/loader_dataset_schema_gen.go b/internal/graphql/dataloader/loader_dataset_schema_gen.go new file mode 100644 index 000000000..2d90e275f --- /dev/null +++ b/internal/graphql/dataloader/loader_dataset_schema_gen.go @@ -0,0 +1,54 @@ +// Code generated by gen, DO NOT EDIT. + +package dataloader + +import ( + "context" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +type DatasetSchemaDataLoader interface { + Load(id.DatasetSchemaID) (*graphql.DatasetSchema, error) + LoadAll([]id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) +} + +func newDatasetSchema(ctx context.Context, c *graphql.DatasetController, o *usecase.Operator) *DatasetSchemaLoader { + return NewDatasetSchemaLoader(DatasetSchemaLoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) { + return c.FetchSchema(ctx, keys, o) + }, + }) +} + +func newOrdinaryDatasetSchema(ctx context.Context, c *graphql.DatasetController, o *usecase.Operator) DatasetSchemaDataLoader { + return &ordinaryDatasetSchemaLoader{ + fetch: func(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) { + return c.FetchSchema(ctx, keys, o) + }, + } +} + +type ordinaryDatasetSchemaLoader struct { + fetch func(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) +} + +func (l *ordinaryDatasetSchemaLoader) Load(key id.DatasetSchemaID) (*graphql.DatasetSchema, error) { + res, errs := l.fetch([]id.DatasetSchemaID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryDatasetSchemaLoader) LoadAll(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) { + return l.fetch(keys) +} diff --git a/internal/graphql/dataloader/loader_gen.go b/internal/graphql/dataloader/loader_gen.go new file mode 100644 index 000000000..ad97fd004 --- /dev/null +++ b/internal/graphql/dataloader/loader_gen.go @@ -0,0 +1,348 @@ +// Code generated by gen, DO NOT EDIT. + +package dataloader + +import ( + "context" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +type DatasetDataLoader interface { + Load(id.DatasetID) (*graphql.Dataset, error) + LoadAll([]id.DatasetID) ([]*graphql.Dataset, []error) +} + +func newDataset(ctx context.Context, c *graphql.DatasetController, o *usecase.Operator) *DatasetLoader { + return NewDatasetLoader(DatasetLoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.DatasetID) ([]*graphql.Dataset, []error) { + return c.Fetch(ctx, keys, o) + }, + }) +} + +func newOrdinaryDataset(ctx context.Context, c *graphql.DatasetController, o *usecase.Operator) DatasetDataLoader { + return &ordinaryDatasetLoader{ + fetch: func(keys []id.DatasetID) ([]*graphql.Dataset, []error) { + return c.Fetch(ctx, keys, o) + }, + } +} + +type ordinaryDatasetLoader struct { + fetch func(keys []id.DatasetID) ([]*graphql.Dataset, []error) +} + +func (l *ordinaryDatasetLoader) Load(key id.DatasetID) (*graphql.Dataset, error) { + res, errs := l.fetch([]id.DatasetID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryDatasetLoader) LoadAll(keys []id.DatasetID) ([]*graphql.Dataset, []error) { + return l.fetch(keys) +} + +type LayerDataLoader interface { + Load(id.LayerID) (*graphql.Layer, error) + LoadAll([]id.LayerID) ([]*graphql.Layer, []error) +} + +func newLayer(ctx context.Context, c *graphql.LayerController, o *usecase.Operator) *LayerLoader { + return NewLayerLoader(LayerLoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.LayerID) ([]*graphql.Layer, []error) { + return c.Fetch(ctx, keys, o) + }, + }) +} + +func newOrdinaryLayer(ctx context.Context, c *graphql.LayerController, o *usecase.Operator) LayerDataLoader { + return &ordinaryLayerLoader{ + fetch: func(keys []id.LayerID) ([]*graphql.Layer, []error) { + return c.Fetch(ctx, keys, o) + }, + } +} + +type ordinaryLayerLoader struct { + fetch func(keys []id.LayerID) ([]*graphql.Layer, []error) +} + +func (l *ordinaryLayerLoader) Load(key id.LayerID) (*graphql.Layer, error) { + res, errs := l.fetch([]id.LayerID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryLayerLoader) LoadAll(keys []id.LayerID) ([]*graphql.Layer, []error) { + return l.fetch(keys) +} + +type PluginDataLoader interface { + Load(id.PluginID) (*graphql.Plugin, error) + LoadAll([]id.PluginID) ([]*graphql.Plugin, []error) +} + +func newPlugin(ctx context.Context, c *graphql.PluginController, o *usecase.Operator) *PluginLoader { + return NewPluginLoader(PluginLoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.PluginID) ([]*graphql.Plugin, []error) { + return c.Fetch(ctx, keys, o) + }, + }) +} + +func newOrdinaryPlugin(ctx context.Context, c *graphql.PluginController, o *usecase.Operator) PluginDataLoader { + return &ordinaryPluginLoader{ + fetch: func(keys []id.PluginID) ([]*graphql.Plugin, []error) { + return c.Fetch(ctx, keys, o) + }, + } +} + +type ordinaryPluginLoader struct { + fetch func(keys []id.PluginID) ([]*graphql.Plugin, []error) +} + +func (l *ordinaryPluginLoader) Load(key id.PluginID) (*graphql.Plugin, error) { + res, errs := l.fetch([]id.PluginID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryPluginLoader) LoadAll(keys []id.PluginID) ([]*graphql.Plugin, []error) { + return l.fetch(keys) +} + +type ProjectDataLoader interface { + Load(id.ProjectID) (*graphql.Project, error) + LoadAll([]id.ProjectID) ([]*graphql.Project, []error) +} + +func newProject(ctx context.Context, c *graphql.ProjectController, o *usecase.Operator) *ProjectLoader { + return NewProjectLoader(ProjectLoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.ProjectID) ([]*graphql.Project, []error) { + return c.Fetch(ctx, keys, o) + }, + }) +} + +func newOrdinaryProject(ctx context.Context, c *graphql.ProjectController, o *usecase.Operator) ProjectDataLoader { + return &ordinaryProjectLoader{ + fetch: func(keys []id.ProjectID) ([]*graphql.Project, []error) { + return c.Fetch(ctx, keys, o) + }, + } +} + +type ordinaryProjectLoader struct { + fetch func(keys []id.ProjectID) ([]*graphql.Project, []error) +} + +func (l *ordinaryProjectLoader) Load(key id.ProjectID) (*graphql.Project, error) { + res, errs := l.fetch([]id.ProjectID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryProjectLoader) LoadAll(keys []id.ProjectID) ([]*graphql.Project, []error) { + return l.fetch(keys) +} + +type PropertyDataLoader interface { + Load(id.PropertyID) (*graphql.Property, error) + LoadAll([]id.PropertyID) ([]*graphql.Property, []error) +} + +func newProperty(ctx context.Context, c *graphql.PropertyController, o *usecase.Operator) *PropertyLoader { + return NewPropertyLoader(PropertyLoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.PropertyID) ([]*graphql.Property, []error) { + return c.Fetch(ctx, keys, o) + }, + }) +} + +func newOrdinaryProperty(ctx context.Context, c *graphql.PropertyController, o *usecase.Operator) PropertyDataLoader { + return &ordinaryPropertyLoader{ + fetch: func(keys []id.PropertyID) ([]*graphql.Property, []error) { + return c.Fetch(ctx, keys, o) + }, + } +} + +type ordinaryPropertyLoader struct { + fetch func(keys []id.PropertyID) ([]*graphql.Property, []error) +} + +func (l *ordinaryPropertyLoader) Load(key id.PropertyID) (*graphql.Property, error) { + res, errs := l.fetch([]id.PropertyID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryPropertyLoader) LoadAll(keys []id.PropertyID) ([]*graphql.Property, []error) { + return l.fetch(keys) +} + +type SceneDataLoader interface { + Load(id.SceneID) (*graphql.Scene, error) + LoadAll([]id.SceneID) ([]*graphql.Scene, []error) +} + +func newScene(ctx context.Context, c *graphql.SceneController, o *usecase.Operator) *SceneLoader { + return NewSceneLoader(SceneLoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.SceneID) ([]*graphql.Scene, []error) { + return c.Fetch(ctx, keys, o) + }, + }) +} + +func newOrdinaryScene(ctx context.Context, c *graphql.SceneController, o *usecase.Operator) SceneDataLoader { + return &ordinarySceneLoader{ + fetch: func(keys []id.SceneID) ([]*graphql.Scene, []error) { + return c.Fetch(ctx, keys, o) + }, + } +} + +type ordinarySceneLoader struct { + fetch func(keys []id.SceneID) ([]*graphql.Scene, []error) +} + +func (l *ordinarySceneLoader) Load(key id.SceneID) (*graphql.Scene, error) { + res, errs := l.fetch([]id.SceneID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinarySceneLoader) LoadAll(keys []id.SceneID) ([]*graphql.Scene, []error) { + return l.fetch(keys) +} + +type TeamDataLoader interface { + Load(id.TeamID) (*graphql.Team, error) + LoadAll([]id.TeamID) ([]*graphql.Team, []error) +} + +func newTeam(ctx context.Context, c *graphql.TeamController, o *usecase.Operator) *TeamLoader { + return NewTeamLoader(TeamLoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.TeamID) ([]*graphql.Team, []error) { + return c.Fetch(ctx, keys, o) + }, + }) +} + +func newOrdinaryTeam(ctx context.Context, c *graphql.TeamController, o *usecase.Operator) TeamDataLoader { + return &ordinaryTeamLoader{ + fetch: func(keys []id.TeamID) ([]*graphql.Team, []error) { + return c.Fetch(ctx, keys, o) + }, + } +} + +type ordinaryTeamLoader struct { + fetch func(keys []id.TeamID) ([]*graphql.Team, []error) +} + +func (l *ordinaryTeamLoader) Load(key id.TeamID) (*graphql.Team, error) { + res, errs := l.fetch([]id.TeamID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryTeamLoader) LoadAll(keys []id.TeamID) ([]*graphql.Team, []error) { + return l.fetch(keys) +} + +type UserDataLoader interface { + Load(id.UserID) (*graphql.User, error) + LoadAll([]id.UserID) ([]*graphql.User, []error) +} + +func newUser(ctx context.Context, c *graphql.UserController, o *usecase.Operator) *UserLoader { + return NewUserLoader(UserLoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.UserID) ([]*graphql.User, []error) { + return c.Fetch(ctx, keys, o) + }, + }) +} + +func newOrdinaryUser(ctx context.Context, c *graphql.UserController, o *usecase.Operator) UserDataLoader { + return &ordinaryUserLoader{ + fetch: func(keys []id.UserID) ([]*graphql.User, []error) { + return c.Fetch(ctx, keys, o) + }, + } +} + +type ordinaryUserLoader struct { + fetch func(keys []id.UserID) ([]*graphql.User, []error) +} + +func (l *ordinaryUserLoader) Load(key id.UserID) (*graphql.User, error) { + res, errs := l.fetch([]id.UserID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryUserLoader) LoadAll(keys []id.UserID) ([]*graphql.User, []error) { + return l.fetch(keys) +} diff --git a/internal/graphql/dataloader/loader_layer_group_gen.go b/internal/graphql/dataloader/loader_layer_group_gen.go new file mode 100644 index 000000000..9724ccc1f --- /dev/null +++ b/internal/graphql/dataloader/loader_layer_group_gen.go @@ -0,0 +1,54 @@ +// Code generated by gen, DO NOT EDIT. + +package dataloader + +import ( + "context" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +type LayerGroupDataLoader interface { + Load(id.LayerID) (*graphql.LayerGroup, error) + LoadAll([]id.LayerID) ([]*graphql.LayerGroup, []error) +} + +func newLayerGroup(ctx context.Context, c *graphql.LayerController, o *usecase.Operator) *LayerGroupLoader { + return NewLayerGroupLoader(LayerGroupLoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.LayerID) ([]*graphql.LayerGroup, []error) { + return c.FetchGroup(ctx, keys, o) + }, + }) +} + +func newOrdinaryLayerGroup(ctx context.Context, c *graphql.LayerController, o *usecase.Operator) LayerGroupDataLoader { + return &ordinaryLayerGroupLoader{ + fetch: func(keys []id.LayerID) ([]*graphql.LayerGroup, []error) { + return c.FetchGroup(ctx, keys, o) + }, + } +} + +type ordinaryLayerGroupLoader struct { + fetch func(keys []id.LayerID) ([]*graphql.LayerGroup, []error) +} + +func (l *ordinaryLayerGroupLoader) Load(key id.LayerID) (*graphql.LayerGroup, error) { + res, errs := l.fetch([]id.LayerID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryLayerGroupLoader) LoadAll(keys []id.LayerID) ([]*graphql.LayerGroup, []error) { + return l.fetch(keys) +} diff --git a/internal/graphql/dataloader/loader_layer_item_gen.go b/internal/graphql/dataloader/loader_layer_item_gen.go new file mode 100644 index 000000000..50e593848 --- /dev/null +++ b/internal/graphql/dataloader/loader_layer_item_gen.go @@ -0,0 +1,54 @@ +// Code generated by gen, DO NOT EDIT. + +package dataloader + +import ( + "context" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +type LayerItemDataLoader interface { + Load(id.LayerID) (*graphql.LayerItem, error) + LoadAll([]id.LayerID) ([]*graphql.LayerItem, []error) +} + +func newLayerItem(ctx context.Context, c *graphql.LayerController, o *usecase.Operator) *LayerItemLoader { + return NewLayerItemLoader(LayerItemLoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.LayerID) ([]*graphql.LayerItem, []error) { + return c.FetchItem(ctx, keys, o) + }, + }) +} + +func newOrdinaryLayerItem(ctx context.Context, c *graphql.LayerController, o *usecase.Operator) LayerItemDataLoader { + return &ordinaryLayerItemLoader{ + fetch: func(keys []id.LayerID) ([]*graphql.LayerItem, []error) { + return c.FetchItem(ctx, keys, o) + }, + } +} + +type ordinaryLayerItemLoader struct { + fetch func(keys []id.LayerID) ([]*graphql.LayerItem, []error) +} + +func (l *ordinaryLayerItemLoader) Load(key id.LayerID) (*graphql.LayerItem, error) { + res, errs := l.fetch([]id.LayerID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryLayerItemLoader) LoadAll(keys []id.LayerID) ([]*graphql.LayerItem, []error) { + return l.fetch(keys) +} diff --git a/internal/graphql/dataloader/loader_property_schema_gen.go b/internal/graphql/dataloader/loader_property_schema_gen.go new file mode 100644 index 000000000..c7ec6113a --- /dev/null +++ b/internal/graphql/dataloader/loader_property_schema_gen.go @@ -0,0 +1,54 @@ +// Code generated by gen, DO NOT EDIT. + +package dataloader + +import ( + "context" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +type PropertySchemaDataLoader interface { + Load(id.PropertySchemaID) (*graphql.PropertySchema, error) + LoadAll([]id.PropertySchemaID) ([]*graphql.PropertySchema, []error) +} + +func newPropertySchema(ctx context.Context, c *graphql.PropertyController, o *usecase.Operator) *PropertySchemaLoader { + return NewPropertySchemaLoader(PropertySchemaLoaderConfig{ + Wait: 1 * time.Millisecond, + MaxBatch: 100, + Fetch: func(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) { + return c.FetchSchema(ctx, keys, o) + }, + }) +} + +func newOrdinaryPropertySchema(ctx context.Context, c *graphql.PropertyController, o *usecase.Operator) PropertySchemaDataLoader { + return &ordinaryPropertySchemaLoader{ + fetch: func(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) { + return c.FetchSchema(ctx, keys, o) + }, + } +} + +type ordinaryPropertySchemaLoader struct { + fetch func(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) +} + +func (l *ordinaryPropertySchemaLoader) Load(key id.PropertySchemaID) (*graphql.PropertySchema, error) { + res, errs := l.fetch([]id.PropertySchemaID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryPropertySchemaLoader) LoadAll(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) { + return l.fetch(keys) +} diff --git a/internal/graphql/dataloader/pluginloader_gen.go b/internal/graphql/dataloader/pluginloader_gen.go new file mode 100644 index 000000000..36d763982 --- /dev/null +++ b/internal/graphql/dataloader/pluginloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// PluginLoaderConfig captures the config to create a new PluginLoader +type PluginLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.PluginID) ([]*graphql.Plugin, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewPluginLoader creates a new PluginLoader given a fetch, wait, and maxBatch +func NewPluginLoader(config PluginLoaderConfig) *PluginLoader { + return &PluginLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// PluginLoader batches and caches requests +type PluginLoader struct { + // this method provides the data for the loader + fetch func(keys []id.PluginID) ([]*graphql.Plugin, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.PluginID]*graphql.Plugin + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *pluginLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type pluginLoaderBatch struct { + keys []id.PluginID + data []*graphql.Plugin + error []error + closing bool + done chan struct{} +} + +// Load a Plugin by key, batching and caching will be applied automatically +func (l *PluginLoader) Load(key id.PluginID) (*graphql.Plugin, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Plugin. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *PluginLoader) LoadThunk(key id.PluginID) func() (*graphql.Plugin, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*graphql.Plugin, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &pluginLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*graphql.Plugin, error) { + <-batch.done + + var data *graphql.Plugin + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *PluginLoader) LoadAll(keys []id.PluginID) ([]*graphql.Plugin, []error) { + results := make([]func() (*graphql.Plugin, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + plugins := make([]*graphql.Plugin, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + plugins[i], errors[i] = thunk() + } + return plugins, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Plugins. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *PluginLoader) LoadAllThunk(keys []id.PluginID) func() ([]*graphql.Plugin, []error) { + results := make([]func() (*graphql.Plugin, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*graphql.Plugin, []error) { + plugins := make([]*graphql.Plugin, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + plugins[i], errors[i] = thunk() + } + return plugins, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *PluginLoader) Prime(key id.PluginID, value *graphql.Plugin) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *PluginLoader) Clear(key id.PluginID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *PluginLoader) unsafeSet(key id.PluginID, value *graphql.Plugin) { + if l.cache == nil { + l.cache = map[id.PluginID]*graphql.Plugin{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *pluginLoaderBatch) keyIndex(l *PluginLoader, key id.PluginID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *pluginLoaderBatch) startTimer(l *PluginLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *pluginLoaderBatch) end(l *PluginLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/dataloader/projectloader_gen.go b/internal/graphql/dataloader/projectloader_gen.go new file mode 100644 index 000000000..34daabef8 --- /dev/null +++ b/internal/graphql/dataloader/projectloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// ProjectLoaderConfig captures the config to create a new ProjectLoader +type ProjectLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.ProjectID) ([]*graphql.Project, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewProjectLoader creates a new ProjectLoader given a fetch, wait, and maxBatch +func NewProjectLoader(config ProjectLoaderConfig) *ProjectLoader { + return &ProjectLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// ProjectLoader batches and caches requests +type ProjectLoader struct { + // this method provides the data for the loader + fetch func(keys []id.ProjectID) ([]*graphql.Project, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.ProjectID]*graphql.Project + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *projectLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type projectLoaderBatch struct { + keys []id.ProjectID + data []*graphql.Project + error []error + closing bool + done chan struct{} +} + +// Load a Project by key, batching and caching will be applied automatically +func (l *ProjectLoader) Load(key id.ProjectID) (*graphql.Project, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Project. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *ProjectLoader) LoadThunk(key id.ProjectID) func() (*graphql.Project, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*graphql.Project, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &projectLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*graphql.Project, error) { + <-batch.done + + var data *graphql.Project + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *ProjectLoader) LoadAll(keys []id.ProjectID) ([]*graphql.Project, []error) { + results := make([]func() (*graphql.Project, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + projects := make([]*graphql.Project, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + projects[i], errors[i] = thunk() + } + return projects, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Projects. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *ProjectLoader) LoadAllThunk(keys []id.ProjectID) func() ([]*graphql.Project, []error) { + results := make([]func() (*graphql.Project, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*graphql.Project, []error) { + projects := make([]*graphql.Project, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + projects[i], errors[i] = thunk() + } + return projects, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *ProjectLoader) Prime(key id.ProjectID, value *graphql.Project) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *ProjectLoader) Clear(key id.ProjectID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *ProjectLoader) unsafeSet(key id.ProjectID, value *graphql.Project) { + if l.cache == nil { + l.cache = map[id.ProjectID]*graphql.Project{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *projectLoaderBatch) keyIndex(l *ProjectLoader, key id.ProjectID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *projectLoaderBatch) startTimer(l *ProjectLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *projectLoaderBatch) end(l *ProjectLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/dataloader/propertyloader_gen.go b/internal/graphql/dataloader/propertyloader_gen.go new file mode 100644 index 000000000..b20777acf --- /dev/null +++ b/internal/graphql/dataloader/propertyloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// PropertyLoaderConfig captures the config to create a new PropertyLoader +type PropertyLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.PropertyID) ([]*graphql.Property, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewPropertyLoader creates a new PropertyLoader given a fetch, wait, and maxBatch +func NewPropertyLoader(config PropertyLoaderConfig) *PropertyLoader { + return &PropertyLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// PropertyLoader batches and caches requests +type PropertyLoader struct { + // this method provides the data for the loader + fetch func(keys []id.PropertyID) ([]*graphql.Property, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.PropertyID]*graphql.Property + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *propertyLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type propertyLoaderBatch struct { + keys []id.PropertyID + data []*graphql.Property + error []error + closing bool + done chan struct{} +} + +// Load a Property by key, batching and caching will be applied automatically +func (l *PropertyLoader) Load(key id.PropertyID) (*graphql.Property, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Property. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *PropertyLoader) LoadThunk(key id.PropertyID) func() (*graphql.Property, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*graphql.Property, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &propertyLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*graphql.Property, error) { + <-batch.done + + var data *graphql.Property + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *PropertyLoader) LoadAll(keys []id.PropertyID) ([]*graphql.Property, []error) { + results := make([]func() (*graphql.Property, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + propertys := make([]*graphql.Property, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + propertys[i], errors[i] = thunk() + } + return propertys, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Propertys. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *PropertyLoader) LoadAllThunk(keys []id.PropertyID) func() ([]*graphql.Property, []error) { + results := make([]func() (*graphql.Property, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*graphql.Property, []error) { + propertys := make([]*graphql.Property, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + propertys[i], errors[i] = thunk() + } + return propertys, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *PropertyLoader) Prime(key id.PropertyID, value *graphql.Property) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *PropertyLoader) Clear(key id.PropertyID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *PropertyLoader) unsafeSet(key id.PropertyID, value *graphql.Property) { + if l.cache == nil { + l.cache = map[id.PropertyID]*graphql.Property{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *propertyLoaderBatch) keyIndex(l *PropertyLoader, key id.PropertyID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *propertyLoaderBatch) startTimer(l *PropertyLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *propertyLoaderBatch) end(l *PropertyLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/dataloader/propertyschemaloader_gen.go b/internal/graphql/dataloader/propertyschemaloader_gen.go new file mode 100644 index 000000000..5473685b4 --- /dev/null +++ b/internal/graphql/dataloader/propertyschemaloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// PropertySchemaLoaderConfig captures the config to create a new PropertySchemaLoader +type PropertySchemaLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewPropertySchemaLoader creates a new PropertySchemaLoader given a fetch, wait, and maxBatch +func NewPropertySchemaLoader(config PropertySchemaLoaderConfig) *PropertySchemaLoader { + return &PropertySchemaLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// PropertySchemaLoader batches and caches requests +type PropertySchemaLoader struct { + // this method provides the data for the loader + fetch func(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.PropertySchemaID]*graphql.PropertySchema + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *propertySchemaLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type propertySchemaLoaderBatch struct { + keys []id.PropertySchemaID + data []*graphql.PropertySchema + error []error + closing bool + done chan struct{} +} + +// Load a PropertySchema by key, batching and caching will be applied automatically +func (l *PropertySchemaLoader) Load(key id.PropertySchemaID) (*graphql.PropertySchema, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a PropertySchema. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *PropertySchemaLoader) LoadThunk(key id.PropertySchemaID) func() (*graphql.PropertySchema, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*graphql.PropertySchema, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &propertySchemaLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*graphql.PropertySchema, error) { + <-batch.done + + var data *graphql.PropertySchema + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *PropertySchemaLoader) LoadAll(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) { + results := make([]func() (*graphql.PropertySchema, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + propertySchemas := make([]*graphql.PropertySchema, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + propertySchemas[i], errors[i] = thunk() + } + return propertySchemas, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a PropertySchemas. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *PropertySchemaLoader) LoadAllThunk(keys []id.PropertySchemaID) func() ([]*graphql.PropertySchema, []error) { + results := make([]func() (*graphql.PropertySchema, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*graphql.PropertySchema, []error) { + propertySchemas := make([]*graphql.PropertySchema, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + propertySchemas[i], errors[i] = thunk() + } + return propertySchemas, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *PropertySchemaLoader) Prime(key id.PropertySchemaID, value *graphql.PropertySchema) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *PropertySchemaLoader) Clear(key id.PropertySchemaID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *PropertySchemaLoader) unsafeSet(key id.PropertySchemaID, value *graphql.PropertySchema) { + if l.cache == nil { + l.cache = map[id.PropertySchemaID]*graphql.PropertySchema{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *propertySchemaLoaderBatch) keyIndex(l *PropertySchemaLoader, key id.PropertySchemaID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *propertySchemaLoaderBatch) startTimer(l *PropertySchemaLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *propertySchemaLoaderBatch) end(l *PropertySchemaLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/dataloader/sceneloader_gen.go b/internal/graphql/dataloader/sceneloader_gen.go new file mode 100644 index 000000000..1996f82cf --- /dev/null +++ b/internal/graphql/dataloader/sceneloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// SceneLoaderConfig captures the config to create a new SceneLoader +type SceneLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.SceneID) ([]*graphql.Scene, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewSceneLoader creates a new SceneLoader given a fetch, wait, and maxBatch +func NewSceneLoader(config SceneLoaderConfig) *SceneLoader { + return &SceneLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// SceneLoader batches and caches requests +type SceneLoader struct { + // this method provides the data for the loader + fetch func(keys []id.SceneID) ([]*graphql.Scene, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.SceneID]*graphql.Scene + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *sceneLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type sceneLoaderBatch struct { + keys []id.SceneID + data []*graphql.Scene + error []error + closing bool + done chan struct{} +} + +// Load a Scene by key, batching and caching will be applied automatically +func (l *SceneLoader) Load(key id.SceneID) (*graphql.Scene, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Scene. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *SceneLoader) LoadThunk(key id.SceneID) func() (*graphql.Scene, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*graphql.Scene, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &sceneLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*graphql.Scene, error) { + <-batch.done + + var data *graphql.Scene + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *SceneLoader) LoadAll(keys []id.SceneID) ([]*graphql.Scene, []error) { + results := make([]func() (*graphql.Scene, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + scenes := make([]*graphql.Scene, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + scenes[i], errors[i] = thunk() + } + return scenes, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Scenes. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *SceneLoader) LoadAllThunk(keys []id.SceneID) func() ([]*graphql.Scene, []error) { + results := make([]func() (*graphql.Scene, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*graphql.Scene, []error) { + scenes := make([]*graphql.Scene, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + scenes[i], errors[i] = thunk() + } + return scenes, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *SceneLoader) Prime(key id.SceneID, value *graphql.Scene) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *SceneLoader) Clear(key id.SceneID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *SceneLoader) unsafeSet(key id.SceneID, value *graphql.Scene) { + if l.cache == nil { + l.cache = map[id.SceneID]*graphql.Scene{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *sceneLoaderBatch) keyIndex(l *SceneLoader, key id.SceneID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *sceneLoaderBatch) startTimer(l *SceneLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *sceneLoaderBatch) end(l *SceneLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/dataloader/scenelockloader_gen.go b/internal/graphql/dataloader/scenelockloader_gen.go new file mode 100644 index 000000000..810765b8e --- /dev/null +++ b/internal/graphql/dataloader/scenelockloader_gen.go @@ -0,0 +1,222 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// SceneLockLoaderConfig captures the config to create a new SceneLockLoader +type SceneLockLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.SceneID) ([]graphql.SceneLockMode, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewSceneLockLoader creates a new SceneLockLoader given a fetch, wait, and maxBatch +func NewSceneLockLoader(config SceneLockLoaderConfig) *SceneLockLoader { + return &SceneLockLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// SceneLockLoader batches and caches requests +type SceneLockLoader struct { + // this method provides the data for the loader + fetch func(keys []id.SceneID) ([]graphql.SceneLockMode, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.SceneID]graphql.SceneLockMode + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *sceneLockLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type sceneLockLoaderBatch struct { + keys []id.SceneID + data []graphql.SceneLockMode + error []error + closing bool + done chan struct{} +} + +// Load a SceneLockMode by key, batching and caching will be applied automatically +func (l *SceneLockLoader) Load(key id.SceneID) (graphql.SceneLockMode, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a SceneLockMode. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *SceneLockLoader) LoadThunk(key id.SceneID) func() (graphql.SceneLockMode, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (graphql.SceneLockMode, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &sceneLockLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (graphql.SceneLockMode, error) { + <-batch.done + + var data graphql.SceneLockMode + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *SceneLockLoader) LoadAll(keys []id.SceneID) ([]graphql.SceneLockMode, []error) { + results := make([]func() (graphql.SceneLockMode, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + sceneLockModes := make([]graphql.SceneLockMode, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + sceneLockModes[i], errors[i] = thunk() + } + return sceneLockModes, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a SceneLockModes. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *SceneLockLoader) LoadAllThunk(keys []id.SceneID) func() ([]graphql.SceneLockMode, []error) { + results := make([]func() (graphql.SceneLockMode, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]graphql.SceneLockMode, []error) { + sceneLockModes := make([]graphql.SceneLockMode, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + sceneLockModes[i], errors[i] = thunk() + } + return sceneLockModes, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *SceneLockLoader) Prime(key id.SceneID, value graphql.SceneLockMode) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + l.unsafeSet(key, value) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *SceneLockLoader) Clear(key id.SceneID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *SceneLockLoader) unsafeSet(key id.SceneID, value graphql.SceneLockMode) { + if l.cache == nil { + l.cache = map[id.SceneID]graphql.SceneLockMode{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *sceneLockLoaderBatch) keyIndex(l *SceneLockLoader, key id.SceneID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *sceneLockLoaderBatch) startTimer(l *SceneLockLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *sceneLockLoaderBatch) end(l *SceneLockLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/dataloader/teamloader_gen.go b/internal/graphql/dataloader/teamloader_gen.go new file mode 100644 index 000000000..a9cfd2a67 --- /dev/null +++ b/internal/graphql/dataloader/teamloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// TeamLoaderConfig captures the config to create a new TeamLoader +type TeamLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.TeamID) ([]*graphql.Team, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewTeamLoader creates a new TeamLoader given a fetch, wait, and maxBatch +func NewTeamLoader(config TeamLoaderConfig) *TeamLoader { + return &TeamLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// TeamLoader batches and caches requests +type TeamLoader struct { + // this method provides the data for the loader + fetch func(keys []id.TeamID) ([]*graphql.Team, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.TeamID]*graphql.Team + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *teamLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type teamLoaderBatch struct { + keys []id.TeamID + data []*graphql.Team + error []error + closing bool + done chan struct{} +} + +// Load a Team by key, batching and caching will be applied automatically +func (l *TeamLoader) Load(key id.TeamID) (*graphql.Team, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Team. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TeamLoader) LoadThunk(key id.TeamID) func() (*graphql.Team, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*graphql.Team, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &teamLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*graphql.Team, error) { + <-batch.done + + var data *graphql.Team + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *TeamLoader) LoadAll(keys []id.TeamID) ([]*graphql.Team, []error) { + results := make([]func() (*graphql.Team, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + teams := make([]*graphql.Team, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + teams[i], errors[i] = thunk() + } + return teams, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Teams. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TeamLoader) LoadAllThunk(keys []id.TeamID) func() ([]*graphql.Team, []error) { + results := make([]func() (*graphql.Team, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*graphql.Team, []error) { + teams := make([]*graphql.Team, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + teams[i], errors[i] = thunk() + } + return teams, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *TeamLoader) Prime(key id.TeamID, value *graphql.Team) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *TeamLoader) Clear(key id.TeamID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *TeamLoader) unsafeSet(key id.TeamID, value *graphql.Team) { + if l.cache == nil { + l.cache = map[id.TeamID]*graphql.Team{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *teamLoaderBatch) keyIndex(l *TeamLoader, key id.TeamID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *teamLoaderBatch) startTimer(l *TeamLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *teamLoaderBatch) end(l *TeamLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/dataloader/userloader_gen.go b/internal/graphql/dataloader/userloader_gen.go new file mode 100644 index 000000000..a6d1ebff6 --- /dev/null +++ b/internal/graphql/dataloader/userloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package dataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/pkg/id" +) + +// UserLoaderConfig captures the config to create a new UserLoader +type UserLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.UserID) ([]*graphql.User, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewUserLoader creates a new UserLoader given a fetch, wait, and maxBatch +func NewUserLoader(config UserLoaderConfig) *UserLoader { + return &UserLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// UserLoader batches and caches requests +type UserLoader struct { + // this method provides the data for the loader + fetch func(keys []id.UserID) ([]*graphql.User, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.UserID]*graphql.User + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *userLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type userLoaderBatch struct { + keys []id.UserID + data []*graphql.User + error []error + closing bool + done chan struct{} +} + +// Load a User by key, batching and caching will be applied automatically +func (l *UserLoader) Load(key id.UserID) (*graphql.User, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a User. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *UserLoader) LoadThunk(key id.UserID) func() (*graphql.User, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*graphql.User, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &userLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*graphql.User, error) { + <-batch.done + + var data *graphql.User + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *UserLoader) LoadAll(keys []id.UserID) ([]*graphql.User, []error) { + results := make([]func() (*graphql.User, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + users := make([]*graphql.User, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + users[i], errors[i] = thunk() + } + return users, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Users. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *UserLoader) LoadAllThunk(keys []id.UserID) func() ([]*graphql.User, []error) { + results := make([]func() (*graphql.User, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*graphql.User, []error) { + users := make([]*graphql.User, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + users[i], errors[i] = thunk() + } + return users, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *UserLoader) Prime(key id.UserID, value *graphql.User) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *UserLoader) Clear(key id.UserID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *UserLoader) unsafeSet(key id.UserID, value *graphql.User) { + if l.cache == nil { + l.cache = map[id.UserID]*graphql.User{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *userLoaderBatch) keyIndex(l *UserLoader, key id.UserID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *userLoaderBatch) startTimer(l *UserLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *userLoaderBatch) end(l *UserLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/graphql/generated.go b/internal/graphql/generated.go new file mode 100644 index 000000000..ab8cda7ca --- /dev/null +++ b/internal/graphql/generated.go @@ -0,0 +1,39527 @@ +// Code generated by github.com/99designs/gqlgen, DO NOT EDIT. + +package graphql + +import ( + "bytes" + "context" + "errors" + "fmt" + "net/url" + "strconv" + "sync" + "sync/atomic" + "time" + + "github.com/99designs/gqlgen/graphql" + "github.com/99designs/gqlgen/graphql/introspection" + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + gqlparser "github.com/vektah/gqlparser/v2" + "github.com/vektah/gqlparser/v2/ast" +) + +// region ************************** generated!.gotpl ************************** + +// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface. +func NewExecutableSchema(cfg Config) graphql.ExecutableSchema { + return &executableSchema{ + resolvers: cfg.Resolvers, + directives: cfg.Directives, + complexity: cfg.Complexity, + } +} + +type Config struct { + Resolvers ResolverRoot + Directives DirectiveRoot + Complexity ComplexityRoot +} + +type ResolverRoot interface { + Asset() AssetResolver + Dataset() DatasetResolver + DatasetField() DatasetFieldResolver + DatasetSchema() DatasetSchemaResolver + DatasetSchemaField() DatasetSchemaFieldResolver + Infobox() InfoboxResolver + InfoboxField() InfoboxFieldResolver + LayerGroup() LayerGroupResolver + LayerItem() LayerItemResolver + MergedInfoboxField() MergedInfoboxFieldResolver + MergedLayer() MergedLayerResolver + MergedProperty() MergedPropertyResolver + MergedPropertyField() MergedPropertyFieldResolver + MergedPropertyGroup() MergedPropertyGroupResolver + Mutation() MutationResolver + Plugin() PluginResolver + PluginExtension() PluginExtensionResolver + Project() ProjectResolver + Property() PropertyResolver + PropertyField() PropertyFieldResolver + PropertyFieldLink() PropertyFieldLinkResolver + PropertyGroup() PropertyGroupResolver + PropertyGroupList() PropertyGroupListResolver + PropertyLinkableFields() PropertyLinkableFieldsResolver + PropertySchemaField() PropertySchemaFieldResolver + PropertySchemaFieldChoice() PropertySchemaFieldChoiceResolver + PropertySchemaGroup() PropertySchemaGroupResolver + Query() QueryResolver + Scene() SceneResolver + ScenePlugin() ScenePluginResolver + SceneWidget() SceneWidgetResolver + Team() TeamResolver + TeamMember() TeamMemberResolver + User() UserResolver +} + +type DirectiveRoot struct { +} + +type ComplexityRoot struct { + AddDatasetSchemaPayload struct { + DatasetSchema func(childComplexity int) int + } + + AddDynamicDatasetPayload struct { + Dataset func(childComplexity int) int + DatasetSchema func(childComplexity int) int + } + + AddDynamicDatasetSchemaPayload struct { + DatasetSchema func(childComplexity int) int + } + + AddInfoboxFieldPayload struct { + InfoboxField func(childComplexity int) int + Layer func(childComplexity int) int + } + + AddLayerGroupPayload struct { + Index func(childComplexity int) int + Layer func(childComplexity int) int + ParentLayer func(childComplexity int) int + } + + AddLayerItemPayload struct { + Index func(childComplexity int) int + Layer func(childComplexity int) int + ParentLayer func(childComplexity int) int + } + + AddMemberToTeamPayload struct { + Team func(childComplexity int) int + } + + AddWidgetPayload struct { + Scene func(childComplexity int) int + SceneWidget func(childComplexity int) int + } + + Asset struct { + ContentType func(childComplexity int) int + CreatedAt func(childComplexity int) int + ID func(childComplexity int) int + Name func(childComplexity int) int + Size func(childComplexity int) int + Team func(childComplexity int) int + TeamID func(childComplexity int) int + URL func(childComplexity int) int + } + + AssetConnection struct { + Edges func(childComplexity int) int + Nodes func(childComplexity int) int + PageInfo func(childComplexity int) int + TotalCount func(childComplexity int) int + } + + AssetEdge struct { + Cursor func(childComplexity int) int + Node func(childComplexity int) int + } + + Camera struct { + Altitude func(childComplexity int) int + Fov func(childComplexity int) int + Heading func(childComplexity int) int + Lat func(childComplexity int) int + Lng func(childComplexity int) int + Pitch func(childComplexity int) int + Roll func(childComplexity int) int + } + + CheckProjectAliasPayload struct { + Alias func(childComplexity int) int + Available func(childComplexity int) int + } + + CreateAssetPayload struct { + Asset func(childComplexity int) int + } + + CreateInfoboxPayload struct { + Layer func(childComplexity int) int + } + + CreateScenePayload struct { + Scene func(childComplexity int) int + } + + CreateTeamPayload struct { + Team func(childComplexity int) int + } + + Dataset struct { + Fields func(childComplexity int) int + ID func(childComplexity int) int + Name func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + Source func(childComplexity int) int + } + + DatasetConnection struct { + Edges func(childComplexity int) int + Nodes func(childComplexity int) int + PageInfo func(childComplexity int) int + TotalCount func(childComplexity int) int + } + + DatasetEdge struct { + Cursor func(childComplexity int) int + Node func(childComplexity int) int + } + + DatasetField struct { + Field func(childComplexity int) int + FieldID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + Source func(childComplexity int) int + Type func(childComplexity int) int + Value func(childComplexity int) int + ValueRef func(childComplexity int) int + } + + DatasetSchema struct { + Datasets func(childComplexity int, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + Dynamic func(childComplexity int) int + Fields func(childComplexity int) int + ID func(childComplexity int) int + Name func(childComplexity int) int + RepresentativeField func(childComplexity int) int + RepresentativeFieldID func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + Source func(childComplexity int) int + } + + DatasetSchemaConnection struct { + Edges func(childComplexity int) int + Nodes func(childComplexity int) int + PageInfo func(childComplexity int) int + TotalCount func(childComplexity int) int + } + + DatasetSchemaEdge struct { + Cursor func(childComplexity int) int + Node func(childComplexity int) int + } + + DatasetSchemaField struct { + ID func(childComplexity int) int + Name func(childComplexity int) int + Ref func(childComplexity int) int + RefID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + Source func(childComplexity int) int + Type func(childComplexity int) int + } + + DeleteMePayload struct { + UserID func(childComplexity int) int + } + + DeleteProjectPayload struct { + ProjectID func(childComplexity int) int + } + + DeleteTeamPayload struct { + TeamID func(childComplexity int) int + } + + ImportDatasetPayload struct { + DatasetSchema func(childComplexity int) int + } + + ImportLayerPayload struct { + Layers func(childComplexity int) int + ParentLayer func(childComplexity int) int + } + + Infobox struct { + Fields func(childComplexity int) int + Layer func(childComplexity int) int + LayerID func(childComplexity int) int + LinkedDataset func(childComplexity int) int + LinkedDatasetID func(childComplexity int) int + Merged func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + } + + InfoboxField struct { + Extension func(childComplexity int) int + ExtensionID func(childComplexity int) int + ID func(childComplexity int) int + Infobox func(childComplexity int) int + Layer func(childComplexity int) int + LayerID func(childComplexity int) int + LinkedDataset func(childComplexity int) int + LinkedDatasetID func(childComplexity int) int + Merged func(childComplexity int) int + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + } + + InstallPluginPayload struct { + Scene func(childComplexity int) int + ScenePlugin func(childComplexity int) int + } + + LatLng struct { + Lat func(childComplexity int) int + Lng func(childComplexity int) int + } + + LatLngHeight struct { + Height func(childComplexity int) int + Lat func(childComplexity int) int + Lng func(childComplexity int) int + } + + LayerGroup struct { + Extension func(childComplexity int) int + ExtensionID func(childComplexity int) int + ID func(childComplexity int) int + Infobox func(childComplexity int) int + IsVisible func(childComplexity int) int + LayerIds func(childComplexity int) int + Layers func(childComplexity int) int + LinkedDatasetSchema func(childComplexity int) int + LinkedDatasetSchemaID func(childComplexity int) int + Name func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + Root func(childComplexity int) int + } + + LayerItem struct { + Extension func(childComplexity int) int + ExtensionID func(childComplexity int) int + ID func(childComplexity int) int + Infobox func(childComplexity int) int + IsVisible func(childComplexity int) int + LinkedDataset func(childComplexity int) int + LinkedDatasetID func(childComplexity int) int + Merged func(childComplexity int) int + Name func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + } + + MergedInfobox struct { + Fields func(childComplexity int) int + Property func(childComplexity int) int + } + + MergedInfoboxField struct { + Extension func(childComplexity int) int + ExtensionID func(childComplexity int) int + OriginalID func(childComplexity int) int + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + Property func(childComplexity int) int + } + + MergedLayer struct { + Infobox func(childComplexity int) int + Original func(childComplexity int) int + OriginalID func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + Property func(childComplexity int) int + } + + MergedProperty struct { + Groups func(childComplexity int) int + LinkedDataset func(childComplexity int) int + LinkedDatasetID func(childComplexity int) int + Original func(childComplexity int) int + OriginalID func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + } + + MergedPropertyField struct { + ActualValue func(childComplexity int) int + Field func(childComplexity int) int + FieldID func(childComplexity int) int + Links func(childComplexity int) int + Overridden func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + Type func(childComplexity int) int + Value func(childComplexity int) int + } + + MergedPropertyGroup struct { + Fields func(childComplexity int) int + Groups func(childComplexity int) int + LinkedDataset func(childComplexity int) int + LinkedDatasetID func(childComplexity int) int + Original func(childComplexity int) int + OriginalID func(childComplexity int) int + OriginalProperty func(childComplexity int) int + OriginalPropertyID func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + ParentProperty func(childComplexity int) int + ParentPropertyID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaGroupID func(childComplexity int) int + SchemaID func(childComplexity int) int + } + + MoveInfoboxFieldPayload struct { + Index func(childComplexity int) int + InfoboxFieldID func(childComplexity int) int + Layer func(childComplexity int) int + } + + MoveLayerPayload struct { + FromParentLayer func(childComplexity int) int + Index func(childComplexity int) int + LayerID func(childComplexity int) int + ToParentLayer func(childComplexity int) int + } + + Mutation struct { + AddDatasetSchema func(childComplexity int, input graphql1.AddDatasetSchemaInput) int + AddDynamicDataset func(childComplexity int, input graphql1.AddDynamicDatasetInput) int + AddDynamicDatasetSchema func(childComplexity int, input graphql1.AddDynamicDatasetSchemaInput) int + AddInfoboxField func(childComplexity int, input graphql1.AddInfoboxFieldInput) int + AddLayerGroup func(childComplexity int, input graphql1.AddLayerGroupInput) int + AddLayerItem func(childComplexity int, input graphql1.AddLayerItemInput) int + AddMemberToTeam func(childComplexity int, input graphql1.AddMemberToTeamInput) int + AddPropertyItem func(childComplexity int, input graphql1.AddPropertyItemInput) int + AddWidget func(childComplexity int, input graphql1.AddWidgetInput) int + CreateAsset func(childComplexity int, input graphql1.CreateAssetInput) int + CreateInfobox func(childComplexity int, input graphql1.CreateInfoboxInput) int + CreateProject func(childComplexity int, input graphql1.CreateProjectInput) int + CreateScene func(childComplexity int, input graphql1.CreateSceneInput) int + CreateTeam func(childComplexity int, input graphql1.CreateTeamInput) int + DeleteMe func(childComplexity int, input graphql1.DeleteMeInput) int + DeleteProject func(childComplexity int, input graphql1.DeleteProjectInput) int + DeleteTeam func(childComplexity int, input graphql1.DeleteTeamInput) int + ImportDataset func(childComplexity int, input graphql1.ImportDatasetInput) int + ImportLayer func(childComplexity int, input graphql1.ImportLayerInput) int + InstallPlugin func(childComplexity int, input graphql1.InstallPluginInput) int + LinkDatasetToPropertyValue func(childComplexity int, input graphql1.LinkDatasetToPropertyValueInput) int + MoveInfoboxField func(childComplexity int, input graphql1.MoveInfoboxFieldInput) int + MoveLayer func(childComplexity int, input graphql1.MoveLayerInput) int + MovePropertyItem func(childComplexity int, input graphql1.MovePropertyItemInput) int + PublishProject func(childComplexity int, input graphql1.PublishProjectInput) int + RemoveAsset func(childComplexity int, input graphql1.RemoveAssetInput) int + RemoveDatasetSchema func(childComplexity int, input graphql1.RemoveDatasetSchemaInput) int + RemoveInfobox func(childComplexity int, input graphql1.RemoveInfoboxInput) int + RemoveInfoboxField func(childComplexity int, input graphql1.RemoveInfoboxFieldInput) int + RemoveLayer func(childComplexity int, input graphql1.RemoveLayerInput) int + RemoveMemberFromTeam func(childComplexity int, input graphql1.RemoveMemberFromTeamInput) int + RemoveMyAuth func(childComplexity int, input graphql1.RemoveMyAuthInput) int + RemovePropertyField func(childComplexity int, input graphql1.RemovePropertyFieldInput) int + RemovePropertyItem func(childComplexity int, input graphql1.RemovePropertyItemInput) int + RemoveWidget func(childComplexity int, input graphql1.RemoveWidgetInput) int + Signup func(childComplexity int, input graphql1.SignupInput) int + SyncDataset func(childComplexity int, input graphql1.SyncDatasetInput) int + UninstallPlugin func(childComplexity int, input graphql1.UninstallPluginInput) int + UnlinkPropertyValue func(childComplexity int, input graphql1.UnlinkPropertyValueInput) int + UpdateDatasetSchema func(childComplexity int, input graphql1.UpdateDatasetSchemaInput) int + UpdateLayer func(childComplexity int, input graphql1.UpdateLayerInput) int + UpdateMe func(childComplexity int, input graphql1.UpdateMeInput) int + UpdateMemberOfTeam func(childComplexity int, input graphql1.UpdateMemberOfTeamInput) int + UpdateProject func(childComplexity int, input graphql1.UpdateProjectInput) int + UpdatePropertyItems func(childComplexity int, input graphql1.UpdatePropertyItemInput) int + UpdatePropertyValue func(childComplexity int, input graphql1.UpdatePropertyValueInput) int + UpdatePropertyValueCamera func(childComplexity int, input graphql1.UpdatePropertyValueCameraInput) int + UpdatePropertyValueLatLng func(childComplexity int, input graphql1.UpdatePropertyValueLatLngInput) int + UpdatePropertyValueLatLngHeight func(childComplexity int, input graphql1.UpdatePropertyValueLatLngHeightInput) int + UpdatePropertyValueTypography func(childComplexity int, input graphql1.UpdatePropertyValueTypographyInput) int + UpdateTeam func(childComplexity int, input graphql1.UpdateTeamInput) int + UpdateWidget func(childComplexity int, input graphql1.UpdateWidgetInput) int + UpgradePlugin func(childComplexity int, input graphql1.UpgradePluginInput) int + UploadFileToProperty func(childComplexity int, input graphql1.UploadFileToPropertyInput) int + UploadPlugin func(childComplexity int, input graphql1.UploadPluginInput) int + } + + PageInfo struct { + EndCursor func(childComplexity int) int + HasNextPage func(childComplexity int) int + HasPreviousPage func(childComplexity int) int + StartCursor func(childComplexity int) int + } + + Plugin struct { + AllTranslatedDescription func(childComplexity int) int + AllTranslatedName func(childComplexity int) int + Author func(childComplexity int) int + Description func(childComplexity int) int + Extensions func(childComplexity int) int + ID func(childComplexity int) int + Name func(childComplexity int) int + PropertySchema func(childComplexity int) int + PropertySchemaID func(childComplexity int) int + RepositoryURL func(childComplexity int) int + ScenePlugin func(childComplexity int, sceneID id.ID) int + TranslatedDescription func(childComplexity int, lang *string) int + TranslatedName func(childComplexity int, lang *string) int + Version func(childComplexity int) int + } + + PluginExtension struct { + AllTranslatedDescription func(childComplexity int) int + AllTranslatedName func(childComplexity int) int + Description func(childComplexity int) int + ExtensionID func(childComplexity int) int + Icon func(childComplexity int) int + Name func(childComplexity int) int + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + PropertySchema func(childComplexity int) int + PropertySchemaID func(childComplexity int) int + SceneWidget func(childComplexity int, sceneID id.ID) int + TranslatedDescription func(childComplexity int, lang *string) int + TranslatedName func(childComplexity int, lang *string) int + Type func(childComplexity int) int + Visualizer func(childComplexity int) int + } + + Project struct { + Alias func(childComplexity int) int + CreatedAt func(childComplexity int) int + Description func(childComplexity int) int + ID func(childComplexity int) int + ImageURL func(childComplexity int) int + IsArchived func(childComplexity int) int + Name func(childComplexity int) int + PublicDescription func(childComplexity int) int + PublicImage func(childComplexity int) int + PublicNoIndex func(childComplexity int) int + PublicTitle func(childComplexity int) int + PublishedAt func(childComplexity int) int + PublishmentStatus func(childComplexity int) int + Scene func(childComplexity int) int + Team func(childComplexity int) int + TeamID func(childComplexity int) int + UpdatedAt func(childComplexity int) int + Visualizer func(childComplexity int) int + } + + ProjectConnection struct { + Edges func(childComplexity int) int + Nodes func(childComplexity int) int + PageInfo func(childComplexity int) int + TotalCount func(childComplexity int) int + } + + ProjectEdge struct { + Cursor func(childComplexity int) int + Node func(childComplexity int) int + } + + ProjectPayload struct { + Project func(childComplexity int) int + } + + Property struct { + ID func(childComplexity int) int + Items func(childComplexity int) int + Layer func(childComplexity int) int + Merged func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + } + + PropertyCondition struct { + FieldID func(childComplexity int) int + Type func(childComplexity int) int + Value func(childComplexity int) int + } + + PropertyField struct { + ActualValue func(childComplexity int) int + Field func(childComplexity int) int + FieldID func(childComplexity int) int + ID func(childComplexity int) int + Links func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + Type func(childComplexity int) int + Value func(childComplexity int) int + } + + PropertyFieldLink struct { + Dataset func(childComplexity int) int + DatasetField func(childComplexity int) int + DatasetID func(childComplexity int) int + DatasetSchema func(childComplexity int) int + DatasetSchemaField func(childComplexity int) int + DatasetSchemaFieldID func(childComplexity int) int + DatasetSchemaID func(childComplexity int) int + } + + PropertyFieldPayload struct { + Property func(childComplexity int) int + PropertyField func(childComplexity int) int + } + + PropertyGroup struct { + Fields func(childComplexity int) int + ID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaGroup func(childComplexity int) int + SchemaGroupID func(childComplexity int) int + SchemaID func(childComplexity int) int + } + + PropertyGroupList struct { + Groups func(childComplexity int) int + ID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaGroup func(childComplexity int) int + SchemaGroupID func(childComplexity int) int + SchemaID func(childComplexity int) int + } + + PropertyItemPayload struct { + Property func(childComplexity int) int + PropertyItem func(childComplexity int) int + } + + PropertyLinkableFields struct { + Latlng func(childComplexity int) int + LatlngField func(childComplexity int) int + Schema func(childComplexity int) int + SchemaID func(childComplexity int) int + URL func(childComplexity int) int + URLField func(childComplexity int) int + } + + PropertySchema struct { + Groups func(childComplexity int) int + ID func(childComplexity int) int + LinkableFields func(childComplexity int) int + } + + PropertySchemaField struct { + AllTranslatedDescription func(childComplexity int) int + AllTranslatedName func(childComplexity int) int + AllTranslatedTitle func(childComplexity int) int + Choices func(childComplexity int) int + DefaultValue func(childComplexity int) int + Description func(childComplexity int) int + FieldID func(childComplexity int) int + IsAvailableIf func(childComplexity int) int + Max func(childComplexity int) int + Min func(childComplexity int) int + Name func(childComplexity int) int + Prefix func(childComplexity int) int + Suffix func(childComplexity int) int + Title func(childComplexity int) int + TranslatedDescription func(childComplexity int, lang *string) int + TranslatedName func(childComplexity int, lang *string) int + TranslatedTitle func(childComplexity int, lang *string) int + Type func(childComplexity int) int + UI func(childComplexity int) int + } + + PropertySchemaFieldChoice struct { + AllTranslatedLabel func(childComplexity int) int + AllTranslatedTitle func(childComplexity int) int + Icon func(childComplexity int) int + Key func(childComplexity int) int + Label func(childComplexity int) int + Title func(childComplexity int) int + TranslatedLabel func(childComplexity int, lang *string) int + TranslatedTitle func(childComplexity int, lang *string) int + } + + PropertySchemaGroup struct { + AllTranslatedTitle func(childComplexity int) int + Fields func(childComplexity int) int + IsAvailableIf func(childComplexity int) int + IsList func(childComplexity int) int + Name func(childComplexity int) int + RepresentativeField func(childComplexity int) int + RepresentativeFieldID func(childComplexity int) int + Schema func(childComplexity int) int + SchemaGroupID func(childComplexity int) int + SchemaID func(childComplexity int) int + Title func(childComplexity int) int + TranslatedTitle func(childComplexity int, lang *string) int + } + + Query struct { + Assets func(childComplexity int, teamID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + CheckProjectAlias func(childComplexity int, alias string) int + DatasetSchemas func(childComplexity int, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + Datasets func(childComplexity int, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + DynamicDatasetSchemas func(childComplexity int, sceneID id.ID) int + Layer func(childComplexity int, id id.ID) int + Me func(childComplexity int) int + Node func(childComplexity int, id id.ID, typeArg graphql1.NodeType) int + Nodes func(childComplexity int, id []*id.ID, typeArg graphql1.NodeType) int + Plugin func(childComplexity int, id id.PluginID) int + Plugins func(childComplexity int, id []*id.PluginID) int + Projects func(childComplexity int, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + PropertySchema func(childComplexity int, id id.PropertySchemaID) int + PropertySchemas func(childComplexity int, id []*id.PropertySchemaID) int + Scene func(childComplexity int, projectID id.ID) int + SceneLock func(childComplexity int, sceneID id.ID) int + SearchUser func(childComplexity int, nameOrEmail string) int + } + + Rect struct { + East func(childComplexity int) int + North func(childComplexity int) int + South func(childComplexity int) int + West func(childComplexity int) int + } + + RemoveAssetPayload struct { + AssetID func(childComplexity int) int + } + + RemoveDatasetSchemaPayload struct { + SchemaID func(childComplexity int) int + } + + RemoveInfoboxFieldPayload struct { + InfoboxFieldID func(childComplexity int) int + Layer func(childComplexity int) int + } + + RemoveInfoboxPayload struct { + Layer func(childComplexity int) int + } + + RemoveLayerPayload struct { + LayerID func(childComplexity int) int + ParentLayer func(childComplexity int) int + } + + RemoveMemberFromTeamPayload struct { + Team func(childComplexity int) int + } + + RemoveWidgetPayload struct { + ExtensionID func(childComplexity int) int + PluginID func(childComplexity int) int + Scene func(childComplexity int) int + } + + Scene struct { + CreatedAt func(childComplexity int) int + DatasetSchemas func(childComplexity int, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + DynamicDatasetSchemas func(childComplexity int) int + ID func(childComplexity int) int + LockMode func(childComplexity int) int + Plugins func(childComplexity int) int + Project func(childComplexity int) int + ProjectID func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + RootLayer func(childComplexity int) int + RootLayerID func(childComplexity int) int + Team func(childComplexity int) int + TeamID func(childComplexity int) int + UpdatedAt func(childComplexity int) int + Widgets func(childComplexity int) int + } + + ScenePlugin struct { + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + } + + SceneWidget struct { + Enabled func(childComplexity int) int + Extension func(childComplexity int) int + ExtensionID func(childComplexity int) int + ID func(childComplexity int) int + Plugin func(childComplexity int) int + PluginID func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int + } + + SearchedUser struct { + UserEmail func(childComplexity int) int + UserID func(childComplexity int) int + UserName func(childComplexity int) int + } + + SignupPayload struct { + Team func(childComplexity int) int + User func(childComplexity int) int + } + + SyncDatasetPayload struct { + Dataset func(childComplexity int) int + DatasetSchema func(childComplexity int) int + SceneID func(childComplexity int) int + URL func(childComplexity int) int + } + + Team struct { + Assets func(childComplexity int, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + ID func(childComplexity int) int + Members func(childComplexity int) int + Name func(childComplexity int) int + Personal func(childComplexity int) int + Projects func(childComplexity int, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + } + + TeamMember struct { + Role func(childComplexity int) int + User func(childComplexity int) int + UserID func(childComplexity int) int + } + + Typography struct { + Bold func(childComplexity int) int + Color func(childComplexity int) int + FontFamily func(childComplexity int) int + FontSize func(childComplexity int) int + FontWeight func(childComplexity int) int + Italic func(childComplexity int) int + TextAlign func(childComplexity int) int + Underline func(childComplexity int) int + } + + UninstallPluginPayload struct { + Scene func(childComplexity int) int + ScenePlugin func(childComplexity int) int + } + + UpdateDatasetSchemaPayload struct { + DatasetSchema func(childComplexity int) int + } + + UpdateLayerPayload struct { + Layer func(childComplexity int) int + } + + UpdateMePayload struct { + User func(childComplexity int) int + } + + UpdateMemberOfTeamPayload struct { + Team func(childComplexity int) int + } + + UpdateTeamPayload struct { + Team func(childComplexity int) int + } + + UpdateWidgetPayload struct { + Scene func(childComplexity int) int + SceneWidget func(childComplexity int) int + } + + UpgradePluginPayload struct { + Scene func(childComplexity int) int + ScenePlugin func(childComplexity int) int + } + + UploadPluginPayload struct { + Plugin func(childComplexity int) int + } + + User struct { + Auths func(childComplexity int) int + Email func(childComplexity int) int + ID func(childComplexity int) int + Lang func(childComplexity int) int + MyTeam func(childComplexity int) int + MyTeamID func(childComplexity int) int + Name func(childComplexity int) int + Teams func(childComplexity int) int + Theme func(childComplexity int) int + } +} + +type AssetResolver interface { + Team(ctx context.Context, obj *graphql1.Asset) (*graphql1.Team, error) +} +type DatasetResolver interface { + Schema(ctx context.Context, obj *graphql1.Dataset) (*graphql1.DatasetSchema, error) + Name(ctx context.Context, obj *graphql1.Dataset) (*string, error) +} +type DatasetFieldResolver interface { + Schema(ctx context.Context, obj *graphql1.DatasetField) (*graphql1.DatasetSchema, error) + Field(ctx context.Context, obj *graphql1.DatasetField) (*graphql1.DatasetSchemaField, error) + ValueRef(ctx context.Context, obj *graphql1.DatasetField) (*graphql1.Dataset, error) +} +type DatasetSchemaResolver interface { + Datasets(ctx context.Context, obj *graphql1.DatasetSchema, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetConnection, error) + Scene(ctx context.Context, obj *graphql1.DatasetSchema) (*graphql1.Scene, error) + RepresentativeField(ctx context.Context, obj *graphql1.DatasetSchema) (*graphql1.DatasetSchemaField, error) +} +type DatasetSchemaFieldResolver interface { + Schema(ctx context.Context, obj *graphql1.DatasetSchemaField) (*graphql1.DatasetSchema, error) + Ref(ctx context.Context, obj *graphql1.DatasetSchemaField) (*graphql1.DatasetSchema, error) +} +type InfoboxResolver interface { + Layer(ctx context.Context, obj *graphql1.Infobox) (graphql1.Layer, error) + Property(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Property, error) + LinkedDataset(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Dataset, error) + Merged(ctx context.Context, obj *graphql1.Infobox) (*graphql1.MergedInfobox, error) +} +type InfoboxFieldResolver interface { + Layer(ctx context.Context, obj *graphql1.InfoboxField) (graphql1.Layer, error) + Infobox(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Infobox, error) + Property(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Property, error) + Plugin(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Plugin, error) + Extension(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.PluginExtension, error) + LinkedDataset(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Dataset, error) + Merged(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.MergedInfoboxField, error) +} +type LayerGroupResolver interface { + Parent(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.LayerGroup, error) + Property(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.Property, error) + Plugin(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.Plugin, error) + Extension(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.PluginExtension, error) + LinkedDatasetSchema(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.DatasetSchema, error) + Layers(ctx context.Context, obj *graphql1.LayerGroup) ([]graphql1.Layer, error) +} +type LayerItemResolver interface { + Parent(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.LayerGroup, error) + Property(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Property, error) + Plugin(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Plugin, error) + Extension(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.PluginExtension, error) + LinkedDataset(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Dataset, error) + Merged(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.MergedLayer, error) +} +type MergedInfoboxFieldResolver interface { + Plugin(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.Plugin, error) + Extension(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.PluginExtension, error) +} +type MergedLayerResolver interface { + Original(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.LayerItem, error) + Parent(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.LayerGroup, error) +} +type MergedPropertyResolver interface { + Original(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Property, error) + Parent(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Property, error) + Schema(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.PropertySchema, error) + LinkedDataset(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Dataset, error) + Groups(ctx context.Context, obj *graphql1.MergedProperty) ([]*graphql1.MergedPropertyGroup, error) +} +type MergedPropertyFieldResolver interface { + Schema(ctx context.Context, obj *graphql1.MergedPropertyField) (*graphql1.PropertySchema, error) + Field(ctx context.Context, obj *graphql1.MergedPropertyField) (*graphql1.PropertySchemaField, error) + ActualValue(ctx context.Context, obj *graphql1.MergedPropertyField) (interface{}, error) +} +type MergedPropertyGroupResolver interface { + OriginalProperty(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.Property, error) + ParentProperty(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.Property, error) + Original(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.PropertyGroup, error) + Parent(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.PropertyGroup, error) + Schema(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.PropertySchema, error) + LinkedDataset(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.Dataset, error) +} +type MutationResolver interface { + CreateAsset(ctx context.Context, input graphql1.CreateAssetInput) (*graphql1.CreateAssetPayload, error) + RemoveAsset(ctx context.Context, input graphql1.RemoveAssetInput) (*graphql1.RemoveAssetPayload, error) + Signup(ctx context.Context, input graphql1.SignupInput) (*graphql1.SignupPayload, error) + UpdateMe(ctx context.Context, input graphql1.UpdateMeInput) (*graphql1.UpdateMePayload, error) + RemoveMyAuth(ctx context.Context, input graphql1.RemoveMyAuthInput) (*graphql1.UpdateMePayload, error) + DeleteMe(ctx context.Context, input graphql1.DeleteMeInput) (*graphql1.DeleteMePayload, error) + CreateTeam(ctx context.Context, input graphql1.CreateTeamInput) (*graphql1.CreateTeamPayload, error) + DeleteTeam(ctx context.Context, input graphql1.DeleteTeamInput) (*graphql1.DeleteTeamPayload, error) + UpdateTeam(ctx context.Context, input graphql1.UpdateTeamInput) (*graphql1.UpdateTeamPayload, error) + AddMemberToTeam(ctx context.Context, input graphql1.AddMemberToTeamInput) (*graphql1.AddMemberToTeamPayload, error) + RemoveMemberFromTeam(ctx context.Context, input graphql1.RemoveMemberFromTeamInput) (*graphql1.RemoveMemberFromTeamPayload, error) + UpdateMemberOfTeam(ctx context.Context, input graphql1.UpdateMemberOfTeamInput) (*graphql1.UpdateMemberOfTeamPayload, error) + CreateProject(ctx context.Context, input graphql1.CreateProjectInput) (*graphql1.ProjectPayload, error) + UpdateProject(ctx context.Context, input graphql1.UpdateProjectInput) (*graphql1.ProjectPayload, error) + PublishProject(ctx context.Context, input graphql1.PublishProjectInput) (*graphql1.ProjectPayload, error) + DeleteProject(ctx context.Context, input graphql1.DeleteProjectInput) (*graphql1.DeleteProjectPayload, error) + UploadPlugin(ctx context.Context, input graphql1.UploadPluginInput) (*graphql1.UploadPluginPayload, error) + CreateScene(ctx context.Context, input graphql1.CreateSceneInput) (*graphql1.CreateScenePayload, error) + AddWidget(ctx context.Context, input graphql1.AddWidgetInput) (*graphql1.AddWidgetPayload, error) + UpdateWidget(ctx context.Context, input graphql1.UpdateWidgetInput) (*graphql1.UpdateWidgetPayload, error) + RemoveWidget(ctx context.Context, input graphql1.RemoveWidgetInput) (*graphql1.RemoveWidgetPayload, error) + InstallPlugin(ctx context.Context, input graphql1.InstallPluginInput) (*graphql1.InstallPluginPayload, error) + UninstallPlugin(ctx context.Context, input graphql1.UninstallPluginInput) (*graphql1.UninstallPluginPayload, error) + UpgradePlugin(ctx context.Context, input graphql1.UpgradePluginInput) (*graphql1.UpgradePluginPayload, error) + UpdateDatasetSchema(ctx context.Context, input graphql1.UpdateDatasetSchemaInput) (*graphql1.UpdateDatasetSchemaPayload, error) + SyncDataset(ctx context.Context, input graphql1.SyncDatasetInput) (*graphql1.SyncDatasetPayload, error) + AddDynamicDatasetSchema(ctx context.Context, input graphql1.AddDynamicDatasetSchemaInput) (*graphql1.AddDynamicDatasetSchemaPayload, error) + AddDynamicDataset(ctx context.Context, input graphql1.AddDynamicDatasetInput) (*graphql1.AddDynamicDatasetPayload, error) + RemoveDatasetSchema(ctx context.Context, input graphql1.RemoveDatasetSchemaInput) (*graphql1.RemoveDatasetSchemaPayload, error) + ImportDataset(ctx context.Context, input graphql1.ImportDatasetInput) (*graphql1.ImportDatasetPayload, error) + AddDatasetSchema(ctx context.Context, input graphql1.AddDatasetSchemaInput) (*graphql1.AddDatasetSchemaPayload, error) + UpdatePropertyValue(ctx context.Context, input graphql1.UpdatePropertyValueInput) (*graphql1.PropertyFieldPayload, error) + UpdatePropertyValueLatLng(ctx context.Context, input graphql1.UpdatePropertyValueLatLngInput) (*graphql1.PropertyFieldPayload, error) + UpdatePropertyValueLatLngHeight(ctx context.Context, input graphql1.UpdatePropertyValueLatLngHeightInput) (*graphql1.PropertyFieldPayload, error) + UpdatePropertyValueCamera(ctx context.Context, input graphql1.UpdatePropertyValueCameraInput) (*graphql1.PropertyFieldPayload, error) + UpdatePropertyValueTypography(ctx context.Context, input graphql1.UpdatePropertyValueTypographyInput) (*graphql1.PropertyFieldPayload, error) + RemovePropertyField(ctx context.Context, input graphql1.RemovePropertyFieldInput) (*graphql1.PropertyFieldPayload, error) + UploadFileToProperty(ctx context.Context, input graphql1.UploadFileToPropertyInput) (*graphql1.PropertyFieldPayload, error) + LinkDatasetToPropertyValue(ctx context.Context, input graphql1.LinkDatasetToPropertyValueInput) (*graphql1.PropertyFieldPayload, error) + UnlinkPropertyValue(ctx context.Context, input graphql1.UnlinkPropertyValueInput) (*graphql1.PropertyFieldPayload, error) + AddPropertyItem(ctx context.Context, input graphql1.AddPropertyItemInput) (*graphql1.PropertyItemPayload, error) + MovePropertyItem(ctx context.Context, input graphql1.MovePropertyItemInput) (*graphql1.PropertyItemPayload, error) + RemovePropertyItem(ctx context.Context, input graphql1.RemovePropertyItemInput) (*graphql1.PropertyItemPayload, error) + UpdatePropertyItems(ctx context.Context, input graphql1.UpdatePropertyItemInput) (*graphql1.PropertyItemPayload, error) + AddLayerItem(ctx context.Context, input graphql1.AddLayerItemInput) (*graphql1.AddLayerItemPayload, error) + AddLayerGroup(ctx context.Context, input graphql1.AddLayerGroupInput) (*graphql1.AddLayerGroupPayload, error) + RemoveLayer(ctx context.Context, input graphql1.RemoveLayerInput) (*graphql1.RemoveLayerPayload, error) + UpdateLayer(ctx context.Context, input graphql1.UpdateLayerInput) (*graphql1.UpdateLayerPayload, error) + MoveLayer(ctx context.Context, input graphql1.MoveLayerInput) (*graphql1.MoveLayerPayload, error) + CreateInfobox(ctx context.Context, input graphql1.CreateInfoboxInput) (*graphql1.CreateInfoboxPayload, error) + RemoveInfobox(ctx context.Context, input graphql1.RemoveInfoboxInput) (*graphql1.RemoveInfoboxPayload, error) + AddInfoboxField(ctx context.Context, input graphql1.AddInfoboxFieldInput) (*graphql1.AddInfoboxFieldPayload, error) + MoveInfoboxField(ctx context.Context, input graphql1.MoveInfoboxFieldInput) (*graphql1.MoveInfoboxFieldPayload, error) + RemoveInfoboxField(ctx context.Context, input graphql1.RemoveInfoboxFieldInput) (*graphql1.RemoveInfoboxFieldPayload, error) + ImportLayer(ctx context.Context, input graphql1.ImportLayerInput) (*graphql1.ImportLayerPayload, error) +} +type PluginResolver interface { + PropertySchema(ctx context.Context, obj *graphql1.Plugin) (*graphql1.PropertySchema, error) +} +type PluginExtensionResolver interface { + Plugin(ctx context.Context, obj *graphql1.PluginExtension) (*graphql1.Plugin, error) + SceneWidget(ctx context.Context, obj *graphql1.PluginExtension, sceneID id.ID) (*graphql1.SceneWidget, error) + PropertySchema(ctx context.Context, obj *graphql1.PluginExtension) (*graphql1.PropertySchema, error) + TranslatedName(ctx context.Context, obj *graphql1.PluginExtension, lang *string) (string, error) + TranslatedDescription(ctx context.Context, obj *graphql1.PluginExtension, lang *string) (string, error) +} +type ProjectResolver interface { + Team(ctx context.Context, obj *graphql1.Project) (*graphql1.Team, error) + Scene(ctx context.Context, obj *graphql1.Project) (*graphql1.Scene, error) +} +type PropertyResolver interface { + Schema(ctx context.Context, obj *graphql1.Property) (*graphql1.PropertySchema, error) + Layer(ctx context.Context, obj *graphql1.Property) (graphql1.Layer, error) + Merged(ctx context.Context, obj *graphql1.Property) (*graphql1.MergedProperty, error) +} +type PropertyFieldResolver interface { + Parent(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.Property, error) + Schema(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.PropertySchema, error) + Field(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.PropertySchemaField, error) + ActualValue(ctx context.Context, obj *graphql1.PropertyField) (interface{}, error) +} +type PropertyFieldLinkResolver interface { + Dataset(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.Dataset, error) + DatasetField(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.DatasetField, error) + DatasetSchema(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.DatasetSchema, error) + DatasetSchemaField(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.DatasetSchemaField, error) +} +type PropertyGroupResolver interface { + Schema(ctx context.Context, obj *graphql1.PropertyGroup) (*graphql1.PropertySchema, error) + SchemaGroup(ctx context.Context, obj *graphql1.PropertyGroup) (*graphql1.PropertySchemaGroup, error) +} +type PropertyGroupListResolver interface { + Schema(ctx context.Context, obj *graphql1.PropertyGroupList) (*graphql1.PropertySchema, error) + SchemaGroup(ctx context.Context, obj *graphql1.PropertyGroupList) (*graphql1.PropertySchemaGroup, error) +} +type PropertyLinkableFieldsResolver interface { + LatlngField(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchemaField, error) + URLField(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchemaField, error) + Schema(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchema, error) +} +type PropertySchemaFieldResolver interface { + TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) + TranslatedName(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) + TranslatedDescription(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) +} +type PropertySchemaFieldChoiceResolver interface { + TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaFieldChoice, lang *string) (string, error) + TranslatedLabel(ctx context.Context, obj *graphql1.PropertySchemaFieldChoice, lang *string) (string, error) +} +type PropertySchemaGroupResolver interface { + Schema(ctx context.Context, obj *graphql1.PropertySchemaGroup) (*graphql1.PropertySchema, error) + TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaGroup, lang *string) (string, error) +} +type QueryResolver interface { + Me(ctx context.Context) (*graphql1.User, error) + Node(ctx context.Context, id id.ID, typeArg graphql1.NodeType) (graphql1.Node, error) + Nodes(ctx context.Context, id []*id.ID, typeArg graphql1.NodeType) ([]graphql1.Node, error) + PropertySchema(ctx context.Context, id id.PropertySchemaID) (*graphql1.PropertySchema, error) + PropertySchemas(ctx context.Context, id []*id.PropertySchemaID) ([]*graphql1.PropertySchema, error) + Plugin(ctx context.Context, id id.PluginID) (*graphql1.Plugin, error) + Plugins(ctx context.Context, id []*id.PluginID) ([]*graphql1.Plugin, error) + Layer(ctx context.Context, id id.ID) (graphql1.Layer, error) + Scene(ctx context.Context, projectID id.ID) (*graphql1.Scene, error) + Assets(ctx context.Context, teamID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.AssetConnection, error) + Projects(ctx context.Context, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.ProjectConnection, error) + DatasetSchemas(ctx context.Context, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetSchemaConnection, error) + Datasets(ctx context.Context, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetConnection, error) + SceneLock(ctx context.Context, sceneID id.ID) (*graphql1.SceneLockMode, error) + DynamicDatasetSchemas(ctx context.Context, sceneID id.ID) ([]*graphql1.DatasetSchema, error) + SearchUser(ctx context.Context, nameOrEmail string) (*graphql1.SearchedUser, error) + CheckProjectAlias(ctx context.Context, alias string) (*graphql1.CheckProjectAliasPayload, error) +} +type SceneResolver interface { + Project(ctx context.Context, obj *graphql1.Scene) (*graphql1.Project, error) + Team(ctx context.Context, obj *graphql1.Scene) (*graphql1.Team, error) + Property(ctx context.Context, obj *graphql1.Scene) (*graphql1.Property, error) + RootLayer(ctx context.Context, obj *graphql1.Scene) (*graphql1.LayerGroup, error) + LockMode(ctx context.Context, obj *graphql1.Scene) (graphql1.SceneLockMode, error) + DatasetSchemas(ctx context.Context, obj *graphql1.Scene, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetSchemaConnection, error) +} +type ScenePluginResolver interface { + Plugin(ctx context.Context, obj *graphql1.ScenePlugin) (*graphql1.Plugin, error) + Property(ctx context.Context, obj *graphql1.ScenePlugin) (*graphql1.Property, error) +} +type SceneWidgetResolver interface { + Plugin(ctx context.Context, obj *graphql1.SceneWidget) (*graphql1.Plugin, error) + Extension(ctx context.Context, obj *graphql1.SceneWidget) (*graphql1.PluginExtension, error) + Property(ctx context.Context, obj *graphql1.SceneWidget) (*graphql1.Property, error) +} +type TeamResolver interface { + Assets(ctx context.Context, obj *graphql1.Team, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.AssetConnection, error) + Projects(ctx context.Context, obj *graphql1.Team, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.ProjectConnection, error) +} +type TeamMemberResolver interface { + User(ctx context.Context, obj *graphql1.TeamMember) (*graphql1.User, error) +} +type UserResolver interface { + Teams(ctx context.Context, obj *graphql1.User) ([]*graphql1.Team, error) + MyTeam(ctx context.Context, obj *graphql1.User) (*graphql1.Team, error) +} + +type executableSchema struct { + resolvers ResolverRoot + directives DirectiveRoot + complexity ComplexityRoot +} + +func (e *executableSchema) Schema() *ast.Schema { + return parsedSchema +} + +func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) { + ec := executionContext{nil, e} + _ = ec + switch typeName + "." + field { + + case "AddDatasetSchemaPayload.datasetSchema": + if e.complexity.AddDatasetSchemaPayload.DatasetSchema == nil { + break + } + + return e.complexity.AddDatasetSchemaPayload.DatasetSchema(childComplexity), true + + case "AddDynamicDatasetPayload.dataset": + if e.complexity.AddDynamicDatasetPayload.Dataset == nil { + break + } + + return e.complexity.AddDynamicDatasetPayload.Dataset(childComplexity), true + + case "AddDynamicDatasetPayload.datasetSchema": + if e.complexity.AddDynamicDatasetPayload.DatasetSchema == nil { + break + } + + return e.complexity.AddDynamicDatasetPayload.DatasetSchema(childComplexity), true + + case "AddDynamicDatasetSchemaPayload.datasetSchema": + if e.complexity.AddDynamicDatasetSchemaPayload.DatasetSchema == nil { + break + } + + return e.complexity.AddDynamicDatasetSchemaPayload.DatasetSchema(childComplexity), true + + case "AddInfoboxFieldPayload.infoboxField": + if e.complexity.AddInfoboxFieldPayload.InfoboxField == nil { + break + } + + return e.complexity.AddInfoboxFieldPayload.InfoboxField(childComplexity), true + + case "AddInfoboxFieldPayload.layer": + if e.complexity.AddInfoboxFieldPayload.Layer == nil { + break + } + + return e.complexity.AddInfoboxFieldPayload.Layer(childComplexity), true + + case "AddLayerGroupPayload.index": + if e.complexity.AddLayerGroupPayload.Index == nil { + break + } + + return e.complexity.AddLayerGroupPayload.Index(childComplexity), true + + case "AddLayerGroupPayload.layer": + if e.complexity.AddLayerGroupPayload.Layer == nil { + break + } + + return e.complexity.AddLayerGroupPayload.Layer(childComplexity), true + + case "AddLayerGroupPayload.parentLayer": + if e.complexity.AddLayerGroupPayload.ParentLayer == nil { + break + } + + return e.complexity.AddLayerGroupPayload.ParentLayer(childComplexity), true + + case "AddLayerItemPayload.index": + if e.complexity.AddLayerItemPayload.Index == nil { + break + } + + return e.complexity.AddLayerItemPayload.Index(childComplexity), true + + case "AddLayerItemPayload.layer": + if e.complexity.AddLayerItemPayload.Layer == nil { + break + } + + return e.complexity.AddLayerItemPayload.Layer(childComplexity), true + + case "AddLayerItemPayload.parentLayer": + if e.complexity.AddLayerItemPayload.ParentLayer == nil { + break + } + + return e.complexity.AddLayerItemPayload.ParentLayer(childComplexity), true + + case "AddMemberToTeamPayload.team": + if e.complexity.AddMemberToTeamPayload.Team == nil { + break + } + + return e.complexity.AddMemberToTeamPayload.Team(childComplexity), true + + case "AddWidgetPayload.scene": + if e.complexity.AddWidgetPayload.Scene == nil { + break + } + + return e.complexity.AddWidgetPayload.Scene(childComplexity), true + + case "AddWidgetPayload.sceneWidget": + if e.complexity.AddWidgetPayload.SceneWidget == nil { + break + } + + return e.complexity.AddWidgetPayload.SceneWidget(childComplexity), true + + case "Asset.contentType": + if e.complexity.Asset.ContentType == nil { + break + } + + return e.complexity.Asset.ContentType(childComplexity), true + + case "Asset.createdAt": + if e.complexity.Asset.CreatedAt == nil { + break + } + + return e.complexity.Asset.CreatedAt(childComplexity), true + + case "Asset.id": + if e.complexity.Asset.ID == nil { + break + } + + return e.complexity.Asset.ID(childComplexity), true + + case "Asset.name": + if e.complexity.Asset.Name == nil { + break + } + + return e.complexity.Asset.Name(childComplexity), true + + case "Asset.size": + if e.complexity.Asset.Size == nil { + break + } + + return e.complexity.Asset.Size(childComplexity), true + + case "Asset.team": + if e.complexity.Asset.Team == nil { + break + } + + return e.complexity.Asset.Team(childComplexity), true + + case "Asset.teamId": + if e.complexity.Asset.TeamID == nil { + break + } + + return e.complexity.Asset.TeamID(childComplexity), true + + case "Asset.url": + if e.complexity.Asset.URL == nil { + break + } + + return e.complexity.Asset.URL(childComplexity), true + + case "AssetConnection.edges": + if e.complexity.AssetConnection.Edges == nil { + break + } + + return e.complexity.AssetConnection.Edges(childComplexity), true + + case "AssetConnection.nodes": + if e.complexity.AssetConnection.Nodes == nil { + break + } + + return e.complexity.AssetConnection.Nodes(childComplexity), true + + case "AssetConnection.pageInfo": + if e.complexity.AssetConnection.PageInfo == nil { + break + } + + return e.complexity.AssetConnection.PageInfo(childComplexity), true + + case "AssetConnection.totalCount": + if e.complexity.AssetConnection.TotalCount == nil { + break + } + + return e.complexity.AssetConnection.TotalCount(childComplexity), true + + case "AssetEdge.cursor": + if e.complexity.AssetEdge.Cursor == nil { + break + } + + return e.complexity.AssetEdge.Cursor(childComplexity), true + + case "AssetEdge.node": + if e.complexity.AssetEdge.Node == nil { + break + } + + return e.complexity.AssetEdge.Node(childComplexity), true + + case "Camera.altitude": + if e.complexity.Camera.Altitude == nil { + break + } + + return e.complexity.Camera.Altitude(childComplexity), true + + case "Camera.fov": + if e.complexity.Camera.Fov == nil { + break + } + + return e.complexity.Camera.Fov(childComplexity), true + + case "Camera.heading": + if e.complexity.Camera.Heading == nil { + break + } + + return e.complexity.Camera.Heading(childComplexity), true + + case "Camera.lat": + if e.complexity.Camera.Lat == nil { + break + } + + return e.complexity.Camera.Lat(childComplexity), true + + case "Camera.lng": + if e.complexity.Camera.Lng == nil { + break + } + + return e.complexity.Camera.Lng(childComplexity), true + + case "Camera.pitch": + if e.complexity.Camera.Pitch == nil { + break + } + + return e.complexity.Camera.Pitch(childComplexity), true + + case "Camera.roll": + if e.complexity.Camera.Roll == nil { + break + } + + return e.complexity.Camera.Roll(childComplexity), true + + case "CheckProjectAliasPayload.alias": + if e.complexity.CheckProjectAliasPayload.Alias == nil { + break + } + + return e.complexity.CheckProjectAliasPayload.Alias(childComplexity), true + + case "CheckProjectAliasPayload.available": + if e.complexity.CheckProjectAliasPayload.Available == nil { + break + } + + return e.complexity.CheckProjectAliasPayload.Available(childComplexity), true + + case "CreateAssetPayload.asset": + if e.complexity.CreateAssetPayload.Asset == nil { + break + } + + return e.complexity.CreateAssetPayload.Asset(childComplexity), true + + case "CreateInfoboxPayload.layer": + if e.complexity.CreateInfoboxPayload.Layer == nil { + break + } + + return e.complexity.CreateInfoboxPayload.Layer(childComplexity), true + + case "CreateScenePayload.scene": + if e.complexity.CreateScenePayload.Scene == nil { + break + } + + return e.complexity.CreateScenePayload.Scene(childComplexity), true + + case "CreateTeamPayload.team": + if e.complexity.CreateTeamPayload.Team == nil { + break + } + + return e.complexity.CreateTeamPayload.Team(childComplexity), true + + case "Dataset.fields": + if e.complexity.Dataset.Fields == nil { + break + } + + return e.complexity.Dataset.Fields(childComplexity), true + + case "Dataset.id": + if e.complexity.Dataset.ID == nil { + break + } + + return e.complexity.Dataset.ID(childComplexity), true + + case "Dataset.name": + if e.complexity.Dataset.Name == nil { + break + } + + return e.complexity.Dataset.Name(childComplexity), true + + case "Dataset.schema": + if e.complexity.Dataset.Schema == nil { + break + } + + return e.complexity.Dataset.Schema(childComplexity), true + + case "Dataset.schemaId": + if e.complexity.Dataset.SchemaID == nil { + break + } + + return e.complexity.Dataset.SchemaID(childComplexity), true + + case "Dataset.source": + if e.complexity.Dataset.Source == nil { + break + } + + return e.complexity.Dataset.Source(childComplexity), true + + case "DatasetConnection.edges": + if e.complexity.DatasetConnection.Edges == nil { + break + } + + return e.complexity.DatasetConnection.Edges(childComplexity), true + + case "DatasetConnection.nodes": + if e.complexity.DatasetConnection.Nodes == nil { + break + } + + return e.complexity.DatasetConnection.Nodes(childComplexity), true + + case "DatasetConnection.pageInfo": + if e.complexity.DatasetConnection.PageInfo == nil { + break + } + + return e.complexity.DatasetConnection.PageInfo(childComplexity), true + + case "DatasetConnection.totalCount": + if e.complexity.DatasetConnection.TotalCount == nil { + break + } + + return e.complexity.DatasetConnection.TotalCount(childComplexity), true + + case "DatasetEdge.cursor": + if e.complexity.DatasetEdge.Cursor == nil { + break + } + + return e.complexity.DatasetEdge.Cursor(childComplexity), true + + case "DatasetEdge.node": + if e.complexity.DatasetEdge.Node == nil { + break + } + + return e.complexity.DatasetEdge.Node(childComplexity), true + + case "DatasetField.field": + if e.complexity.DatasetField.Field == nil { + break + } + + return e.complexity.DatasetField.Field(childComplexity), true + + case "DatasetField.fieldId": + if e.complexity.DatasetField.FieldID == nil { + break + } + + return e.complexity.DatasetField.FieldID(childComplexity), true + + case "DatasetField.schema": + if e.complexity.DatasetField.Schema == nil { + break + } + + return e.complexity.DatasetField.Schema(childComplexity), true + + case "DatasetField.schemaId": + if e.complexity.DatasetField.SchemaID == nil { + break + } + + return e.complexity.DatasetField.SchemaID(childComplexity), true + + case "DatasetField.source": + if e.complexity.DatasetField.Source == nil { + break + } + + return e.complexity.DatasetField.Source(childComplexity), true + + case "DatasetField.type": + if e.complexity.DatasetField.Type == nil { + break + } + + return e.complexity.DatasetField.Type(childComplexity), true + + case "DatasetField.value": + if e.complexity.DatasetField.Value == nil { + break + } + + return e.complexity.DatasetField.Value(childComplexity), true + + case "DatasetField.valueRef": + if e.complexity.DatasetField.ValueRef == nil { + break + } + + return e.complexity.DatasetField.ValueRef(childComplexity), true + + case "DatasetSchema.datasets": + if e.complexity.DatasetSchema.Datasets == nil { + break + } + + args, err := ec.field_DatasetSchema_datasets_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.DatasetSchema.Datasets(childComplexity, args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "DatasetSchema.dynamic": + if e.complexity.DatasetSchema.Dynamic == nil { + break + } + + return e.complexity.DatasetSchema.Dynamic(childComplexity), true + + case "DatasetSchema.fields": + if e.complexity.DatasetSchema.Fields == nil { + break + } + + return e.complexity.DatasetSchema.Fields(childComplexity), true + + case "DatasetSchema.id": + if e.complexity.DatasetSchema.ID == nil { + break + } + + return e.complexity.DatasetSchema.ID(childComplexity), true + + case "DatasetSchema.name": + if e.complexity.DatasetSchema.Name == nil { + break + } + + return e.complexity.DatasetSchema.Name(childComplexity), true + + case "DatasetSchema.representativeField": + if e.complexity.DatasetSchema.RepresentativeField == nil { + break + } + + return e.complexity.DatasetSchema.RepresentativeField(childComplexity), true + + case "DatasetSchema.representativeFieldId": + if e.complexity.DatasetSchema.RepresentativeFieldID == nil { + break + } + + return e.complexity.DatasetSchema.RepresentativeFieldID(childComplexity), true + + case "DatasetSchema.scene": + if e.complexity.DatasetSchema.Scene == nil { + break + } + + return e.complexity.DatasetSchema.Scene(childComplexity), true + + case "DatasetSchema.sceneId": + if e.complexity.DatasetSchema.SceneID == nil { + break + } + + return e.complexity.DatasetSchema.SceneID(childComplexity), true + + case "DatasetSchema.source": + if e.complexity.DatasetSchema.Source == nil { + break + } + + return e.complexity.DatasetSchema.Source(childComplexity), true + + case "DatasetSchemaConnection.edges": + if e.complexity.DatasetSchemaConnection.Edges == nil { + break + } + + return e.complexity.DatasetSchemaConnection.Edges(childComplexity), true + + case "DatasetSchemaConnection.nodes": + if e.complexity.DatasetSchemaConnection.Nodes == nil { + break + } + + return e.complexity.DatasetSchemaConnection.Nodes(childComplexity), true + + case "DatasetSchemaConnection.pageInfo": + if e.complexity.DatasetSchemaConnection.PageInfo == nil { + break + } + + return e.complexity.DatasetSchemaConnection.PageInfo(childComplexity), true + + case "DatasetSchemaConnection.totalCount": + if e.complexity.DatasetSchemaConnection.TotalCount == nil { + break + } + + return e.complexity.DatasetSchemaConnection.TotalCount(childComplexity), true + + case "DatasetSchemaEdge.cursor": + if e.complexity.DatasetSchemaEdge.Cursor == nil { + break + } + + return e.complexity.DatasetSchemaEdge.Cursor(childComplexity), true + + case "DatasetSchemaEdge.node": + if e.complexity.DatasetSchemaEdge.Node == nil { + break + } + + return e.complexity.DatasetSchemaEdge.Node(childComplexity), true + + case "DatasetSchemaField.id": + if e.complexity.DatasetSchemaField.ID == nil { + break + } + + return e.complexity.DatasetSchemaField.ID(childComplexity), true + + case "DatasetSchemaField.name": + if e.complexity.DatasetSchemaField.Name == nil { + break + } + + return e.complexity.DatasetSchemaField.Name(childComplexity), true + + case "DatasetSchemaField.ref": + if e.complexity.DatasetSchemaField.Ref == nil { + break + } + + return e.complexity.DatasetSchemaField.Ref(childComplexity), true + + case "DatasetSchemaField.refId": + if e.complexity.DatasetSchemaField.RefID == nil { + break + } + + return e.complexity.DatasetSchemaField.RefID(childComplexity), true + + case "DatasetSchemaField.schema": + if e.complexity.DatasetSchemaField.Schema == nil { + break + } + + return e.complexity.DatasetSchemaField.Schema(childComplexity), true + + case "DatasetSchemaField.schemaId": + if e.complexity.DatasetSchemaField.SchemaID == nil { + break + } + + return e.complexity.DatasetSchemaField.SchemaID(childComplexity), true + + case "DatasetSchemaField.source": + if e.complexity.DatasetSchemaField.Source == nil { + break + } + + return e.complexity.DatasetSchemaField.Source(childComplexity), true + + case "DatasetSchemaField.type": + if e.complexity.DatasetSchemaField.Type == nil { + break + } + + return e.complexity.DatasetSchemaField.Type(childComplexity), true + + case "DeleteMePayload.userId": + if e.complexity.DeleteMePayload.UserID == nil { + break + } + + return e.complexity.DeleteMePayload.UserID(childComplexity), true + + case "DeleteProjectPayload.projectId": + if e.complexity.DeleteProjectPayload.ProjectID == nil { + break + } + + return e.complexity.DeleteProjectPayload.ProjectID(childComplexity), true + + case "DeleteTeamPayload.teamId": + if e.complexity.DeleteTeamPayload.TeamID == nil { + break + } + + return e.complexity.DeleteTeamPayload.TeamID(childComplexity), true + + case "ImportDatasetPayload.datasetSchema": + if e.complexity.ImportDatasetPayload.DatasetSchema == nil { + break + } + + return e.complexity.ImportDatasetPayload.DatasetSchema(childComplexity), true + + case "ImportLayerPayload.layers": + if e.complexity.ImportLayerPayload.Layers == nil { + break + } + + return e.complexity.ImportLayerPayload.Layers(childComplexity), true + + case "ImportLayerPayload.parentLayer": + if e.complexity.ImportLayerPayload.ParentLayer == nil { + break + } + + return e.complexity.ImportLayerPayload.ParentLayer(childComplexity), true + + case "Infobox.fields": + if e.complexity.Infobox.Fields == nil { + break + } + + return e.complexity.Infobox.Fields(childComplexity), true + + case "Infobox.layer": + if e.complexity.Infobox.Layer == nil { + break + } + + return e.complexity.Infobox.Layer(childComplexity), true + + case "Infobox.layerId": + if e.complexity.Infobox.LayerID == nil { + break + } + + return e.complexity.Infobox.LayerID(childComplexity), true + + case "Infobox.linkedDataset": + if e.complexity.Infobox.LinkedDataset == nil { + break + } + + return e.complexity.Infobox.LinkedDataset(childComplexity), true + + case "Infobox.linkedDatasetId": + if e.complexity.Infobox.LinkedDatasetID == nil { + break + } + + return e.complexity.Infobox.LinkedDatasetID(childComplexity), true + + case "Infobox.merged": + if e.complexity.Infobox.Merged == nil { + break + } + + return e.complexity.Infobox.Merged(childComplexity), true + + case "Infobox.property": + if e.complexity.Infobox.Property == nil { + break + } + + return e.complexity.Infobox.Property(childComplexity), true + + case "Infobox.propertyId": + if e.complexity.Infobox.PropertyID == nil { + break + } + + return e.complexity.Infobox.PropertyID(childComplexity), true + + case "InfoboxField.extension": + if e.complexity.InfoboxField.Extension == nil { + break + } + + return e.complexity.InfoboxField.Extension(childComplexity), true + + case "InfoboxField.extensionId": + if e.complexity.InfoboxField.ExtensionID == nil { + break + } + + return e.complexity.InfoboxField.ExtensionID(childComplexity), true + + case "InfoboxField.id": + if e.complexity.InfoboxField.ID == nil { + break + } + + return e.complexity.InfoboxField.ID(childComplexity), true + + case "InfoboxField.infobox": + if e.complexity.InfoboxField.Infobox == nil { + break + } + + return e.complexity.InfoboxField.Infobox(childComplexity), true + + case "InfoboxField.layer": + if e.complexity.InfoboxField.Layer == nil { + break + } + + return e.complexity.InfoboxField.Layer(childComplexity), true + + case "InfoboxField.layerId": + if e.complexity.InfoboxField.LayerID == nil { + break + } + + return e.complexity.InfoboxField.LayerID(childComplexity), true + + case "InfoboxField.linkedDataset": + if e.complexity.InfoboxField.LinkedDataset == nil { + break + } + + return e.complexity.InfoboxField.LinkedDataset(childComplexity), true + + case "InfoboxField.linkedDatasetId": + if e.complexity.InfoboxField.LinkedDatasetID == nil { + break + } + + return e.complexity.InfoboxField.LinkedDatasetID(childComplexity), true + + case "InfoboxField.merged": + if e.complexity.InfoboxField.Merged == nil { + break + } + + return e.complexity.InfoboxField.Merged(childComplexity), true + + case "InfoboxField.plugin": + if e.complexity.InfoboxField.Plugin == nil { + break + } + + return e.complexity.InfoboxField.Plugin(childComplexity), true + + case "InfoboxField.pluginId": + if e.complexity.InfoboxField.PluginID == nil { + break + } + + return e.complexity.InfoboxField.PluginID(childComplexity), true + + case "InfoboxField.property": + if e.complexity.InfoboxField.Property == nil { + break + } + + return e.complexity.InfoboxField.Property(childComplexity), true + + case "InfoboxField.propertyId": + if e.complexity.InfoboxField.PropertyID == nil { + break + } + + return e.complexity.InfoboxField.PropertyID(childComplexity), true + + case "InstallPluginPayload.scene": + if e.complexity.InstallPluginPayload.Scene == nil { + break + } + + return e.complexity.InstallPluginPayload.Scene(childComplexity), true + + case "InstallPluginPayload.scenePlugin": + if e.complexity.InstallPluginPayload.ScenePlugin == nil { + break + } + + return e.complexity.InstallPluginPayload.ScenePlugin(childComplexity), true + + case "LatLng.lat": + if e.complexity.LatLng.Lat == nil { + break + } + + return e.complexity.LatLng.Lat(childComplexity), true + + case "LatLng.lng": + if e.complexity.LatLng.Lng == nil { + break + } + + return e.complexity.LatLng.Lng(childComplexity), true + + case "LatLngHeight.height": + if e.complexity.LatLngHeight.Height == nil { + break + } + + return e.complexity.LatLngHeight.Height(childComplexity), true + + case "LatLngHeight.lat": + if e.complexity.LatLngHeight.Lat == nil { + break + } + + return e.complexity.LatLngHeight.Lat(childComplexity), true + + case "LatLngHeight.lng": + if e.complexity.LatLngHeight.Lng == nil { + break + } + + return e.complexity.LatLngHeight.Lng(childComplexity), true + + case "LayerGroup.extension": + if e.complexity.LayerGroup.Extension == nil { + break + } + + return e.complexity.LayerGroup.Extension(childComplexity), true + + case "LayerGroup.extensionId": + if e.complexity.LayerGroup.ExtensionID == nil { + break + } + + return e.complexity.LayerGroup.ExtensionID(childComplexity), true + + case "LayerGroup.id": + if e.complexity.LayerGroup.ID == nil { + break + } + + return e.complexity.LayerGroup.ID(childComplexity), true + + case "LayerGroup.infobox": + if e.complexity.LayerGroup.Infobox == nil { + break + } + + return e.complexity.LayerGroup.Infobox(childComplexity), true + + case "LayerGroup.isVisible": + if e.complexity.LayerGroup.IsVisible == nil { + break + } + + return e.complexity.LayerGroup.IsVisible(childComplexity), true + + case "LayerGroup.layerIds": + if e.complexity.LayerGroup.LayerIds == nil { + break + } + + return e.complexity.LayerGroup.LayerIds(childComplexity), true + + case "LayerGroup.layers": + if e.complexity.LayerGroup.Layers == nil { + break + } + + return e.complexity.LayerGroup.Layers(childComplexity), true + + case "LayerGroup.linkedDatasetSchema": + if e.complexity.LayerGroup.LinkedDatasetSchema == nil { + break + } + + return e.complexity.LayerGroup.LinkedDatasetSchema(childComplexity), true + + case "LayerGroup.linkedDatasetSchemaId": + if e.complexity.LayerGroup.LinkedDatasetSchemaID == nil { + break + } + + return e.complexity.LayerGroup.LinkedDatasetSchemaID(childComplexity), true + + case "LayerGroup.name": + if e.complexity.LayerGroup.Name == nil { + break + } + + return e.complexity.LayerGroup.Name(childComplexity), true + + case "LayerGroup.parent": + if e.complexity.LayerGroup.Parent == nil { + break + } + + return e.complexity.LayerGroup.Parent(childComplexity), true + + case "LayerGroup.parentId": + if e.complexity.LayerGroup.ParentID == nil { + break + } + + return e.complexity.LayerGroup.ParentID(childComplexity), true + + case "LayerGroup.plugin": + if e.complexity.LayerGroup.Plugin == nil { + break + } + + return e.complexity.LayerGroup.Plugin(childComplexity), true + + case "LayerGroup.pluginId": + if e.complexity.LayerGroup.PluginID == nil { + break + } + + return e.complexity.LayerGroup.PluginID(childComplexity), true + + case "LayerGroup.property": + if e.complexity.LayerGroup.Property == nil { + break + } + + return e.complexity.LayerGroup.Property(childComplexity), true + + case "LayerGroup.propertyId": + if e.complexity.LayerGroup.PropertyID == nil { + break + } + + return e.complexity.LayerGroup.PropertyID(childComplexity), true + + case "LayerGroup.root": + if e.complexity.LayerGroup.Root == nil { + break + } + + return e.complexity.LayerGroup.Root(childComplexity), true + + case "LayerItem.extension": + if e.complexity.LayerItem.Extension == nil { + break + } + + return e.complexity.LayerItem.Extension(childComplexity), true + + case "LayerItem.extensionId": + if e.complexity.LayerItem.ExtensionID == nil { + break + } + + return e.complexity.LayerItem.ExtensionID(childComplexity), true + + case "LayerItem.id": + if e.complexity.LayerItem.ID == nil { + break + } + + return e.complexity.LayerItem.ID(childComplexity), true + + case "LayerItem.infobox": + if e.complexity.LayerItem.Infobox == nil { + break + } + + return e.complexity.LayerItem.Infobox(childComplexity), true + + case "LayerItem.isVisible": + if e.complexity.LayerItem.IsVisible == nil { + break + } + + return e.complexity.LayerItem.IsVisible(childComplexity), true + + case "LayerItem.linkedDataset": + if e.complexity.LayerItem.LinkedDataset == nil { + break + } + + return e.complexity.LayerItem.LinkedDataset(childComplexity), true + + case "LayerItem.linkedDatasetId": + if e.complexity.LayerItem.LinkedDatasetID == nil { + break + } + + return e.complexity.LayerItem.LinkedDatasetID(childComplexity), true + + case "LayerItem.merged": + if e.complexity.LayerItem.Merged == nil { + break + } + + return e.complexity.LayerItem.Merged(childComplexity), true + + case "LayerItem.name": + if e.complexity.LayerItem.Name == nil { + break + } + + return e.complexity.LayerItem.Name(childComplexity), true + + case "LayerItem.parent": + if e.complexity.LayerItem.Parent == nil { + break + } + + return e.complexity.LayerItem.Parent(childComplexity), true + + case "LayerItem.parentId": + if e.complexity.LayerItem.ParentID == nil { + break + } + + return e.complexity.LayerItem.ParentID(childComplexity), true + + case "LayerItem.plugin": + if e.complexity.LayerItem.Plugin == nil { + break + } + + return e.complexity.LayerItem.Plugin(childComplexity), true + + case "LayerItem.pluginId": + if e.complexity.LayerItem.PluginID == nil { + break + } + + return e.complexity.LayerItem.PluginID(childComplexity), true + + case "LayerItem.property": + if e.complexity.LayerItem.Property == nil { + break + } + + return e.complexity.LayerItem.Property(childComplexity), true + + case "LayerItem.propertyId": + if e.complexity.LayerItem.PropertyID == nil { + break + } + + return e.complexity.LayerItem.PropertyID(childComplexity), true + + case "MergedInfobox.fields": + if e.complexity.MergedInfobox.Fields == nil { + break + } + + return e.complexity.MergedInfobox.Fields(childComplexity), true + + case "MergedInfobox.property": + if e.complexity.MergedInfobox.Property == nil { + break + } + + return e.complexity.MergedInfobox.Property(childComplexity), true + + case "MergedInfoboxField.extension": + if e.complexity.MergedInfoboxField.Extension == nil { + break + } + + return e.complexity.MergedInfoboxField.Extension(childComplexity), true + + case "MergedInfoboxField.extensionId": + if e.complexity.MergedInfoboxField.ExtensionID == nil { + break + } + + return e.complexity.MergedInfoboxField.ExtensionID(childComplexity), true + + case "MergedInfoboxField.originalId": + if e.complexity.MergedInfoboxField.OriginalID == nil { + break + } + + return e.complexity.MergedInfoboxField.OriginalID(childComplexity), true + + case "MergedInfoboxField.plugin": + if e.complexity.MergedInfoboxField.Plugin == nil { + break + } + + return e.complexity.MergedInfoboxField.Plugin(childComplexity), true + + case "MergedInfoboxField.pluginId": + if e.complexity.MergedInfoboxField.PluginID == nil { + break + } + + return e.complexity.MergedInfoboxField.PluginID(childComplexity), true + + case "MergedInfoboxField.property": + if e.complexity.MergedInfoboxField.Property == nil { + break + } + + return e.complexity.MergedInfoboxField.Property(childComplexity), true + + case "MergedLayer.infobox": + if e.complexity.MergedLayer.Infobox == nil { + break + } + + return e.complexity.MergedLayer.Infobox(childComplexity), true + + case "MergedLayer.original": + if e.complexity.MergedLayer.Original == nil { + break + } + + return e.complexity.MergedLayer.Original(childComplexity), true + + case "MergedLayer.originalId": + if e.complexity.MergedLayer.OriginalID == nil { + break + } + + return e.complexity.MergedLayer.OriginalID(childComplexity), true + + case "MergedLayer.parent": + if e.complexity.MergedLayer.Parent == nil { + break + } + + return e.complexity.MergedLayer.Parent(childComplexity), true + + case "MergedLayer.parentId": + if e.complexity.MergedLayer.ParentID == nil { + break + } + + return e.complexity.MergedLayer.ParentID(childComplexity), true + + case "MergedLayer.property": + if e.complexity.MergedLayer.Property == nil { + break + } + + return e.complexity.MergedLayer.Property(childComplexity), true + + case "MergedProperty.groups": + if e.complexity.MergedProperty.Groups == nil { + break + } + + return e.complexity.MergedProperty.Groups(childComplexity), true + + case "MergedProperty.linkedDataset": + if e.complexity.MergedProperty.LinkedDataset == nil { + break + } + + return e.complexity.MergedProperty.LinkedDataset(childComplexity), true + + case "MergedProperty.linkedDatasetId": + if e.complexity.MergedProperty.LinkedDatasetID == nil { + break + } + + return e.complexity.MergedProperty.LinkedDatasetID(childComplexity), true + + case "MergedProperty.original": + if e.complexity.MergedProperty.Original == nil { + break + } + + return e.complexity.MergedProperty.Original(childComplexity), true + + case "MergedProperty.originalId": + if e.complexity.MergedProperty.OriginalID == nil { + break + } + + return e.complexity.MergedProperty.OriginalID(childComplexity), true + + case "MergedProperty.parent": + if e.complexity.MergedProperty.Parent == nil { + break + } + + return e.complexity.MergedProperty.Parent(childComplexity), true + + case "MergedProperty.parentId": + if e.complexity.MergedProperty.ParentID == nil { + break + } + + return e.complexity.MergedProperty.ParentID(childComplexity), true + + case "MergedProperty.schema": + if e.complexity.MergedProperty.Schema == nil { + break + } + + return e.complexity.MergedProperty.Schema(childComplexity), true + + case "MergedProperty.schemaId": + if e.complexity.MergedProperty.SchemaID == nil { + break + } + + return e.complexity.MergedProperty.SchemaID(childComplexity), true + + case "MergedPropertyField.actualValue": + if e.complexity.MergedPropertyField.ActualValue == nil { + break + } + + return e.complexity.MergedPropertyField.ActualValue(childComplexity), true + + case "MergedPropertyField.field": + if e.complexity.MergedPropertyField.Field == nil { + break + } + + return e.complexity.MergedPropertyField.Field(childComplexity), true + + case "MergedPropertyField.fieldId": + if e.complexity.MergedPropertyField.FieldID == nil { + break + } + + return e.complexity.MergedPropertyField.FieldID(childComplexity), true + + case "MergedPropertyField.links": + if e.complexity.MergedPropertyField.Links == nil { + break + } + + return e.complexity.MergedPropertyField.Links(childComplexity), true + + case "MergedPropertyField.overridden": + if e.complexity.MergedPropertyField.Overridden == nil { + break + } + + return e.complexity.MergedPropertyField.Overridden(childComplexity), true + + case "MergedPropertyField.schema": + if e.complexity.MergedPropertyField.Schema == nil { + break + } + + return e.complexity.MergedPropertyField.Schema(childComplexity), true + + case "MergedPropertyField.schemaId": + if e.complexity.MergedPropertyField.SchemaID == nil { + break + } + + return e.complexity.MergedPropertyField.SchemaID(childComplexity), true + + case "MergedPropertyField.type": + if e.complexity.MergedPropertyField.Type == nil { + break + } + + return e.complexity.MergedPropertyField.Type(childComplexity), true + + case "MergedPropertyField.value": + if e.complexity.MergedPropertyField.Value == nil { + break + } + + return e.complexity.MergedPropertyField.Value(childComplexity), true + + case "MergedPropertyGroup.fields": + if e.complexity.MergedPropertyGroup.Fields == nil { + break + } + + return e.complexity.MergedPropertyGroup.Fields(childComplexity), true + + case "MergedPropertyGroup.groups": + if e.complexity.MergedPropertyGroup.Groups == nil { + break + } + + return e.complexity.MergedPropertyGroup.Groups(childComplexity), true + + case "MergedPropertyGroup.linkedDataset": + if e.complexity.MergedPropertyGroup.LinkedDataset == nil { + break + } + + return e.complexity.MergedPropertyGroup.LinkedDataset(childComplexity), true + + case "MergedPropertyGroup.linkedDatasetId": + if e.complexity.MergedPropertyGroup.LinkedDatasetID == nil { + break + } + + return e.complexity.MergedPropertyGroup.LinkedDatasetID(childComplexity), true + + case "MergedPropertyGroup.original": + if e.complexity.MergedPropertyGroup.Original == nil { + break + } + + return e.complexity.MergedPropertyGroup.Original(childComplexity), true + + case "MergedPropertyGroup.originalId": + if e.complexity.MergedPropertyGroup.OriginalID == nil { + break + } + + return e.complexity.MergedPropertyGroup.OriginalID(childComplexity), true + + case "MergedPropertyGroup.originalProperty": + if e.complexity.MergedPropertyGroup.OriginalProperty == nil { + break + } + + return e.complexity.MergedPropertyGroup.OriginalProperty(childComplexity), true + + case "MergedPropertyGroup.originalPropertyId": + if e.complexity.MergedPropertyGroup.OriginalPropertyID == nil { + break + } + + return e.complexity.MergedPropertyGroup.OriginalPropertyID(childComplexity), true + + case "MergedPropertyGroup.parent": + if e.complexity.MergedPropertyGroup.Parent == nil { + break + } + + return e.complexity.MergedPropertyGroup.Parent(childComplexity), true + + case "MergedPropertyGroup.parentId": + if e.complexity.MergedPropertyGroup.ParentID == nil { + break + } + + return e.complexity.MergedPropertyGroup.ParentID(childComplexity), true + + case "MergedPropertyGroup.parentProperty": + if e.complexity.MergedPropertyGroup.ParentProperty == nil { + break + } + + return e.complexity.MergedPropertyGroup.ParentProperty(childComplexity), true + + case "MergedPropertyGroup.parentPropertyId": + if e.complexity.MergedPropertyGroup.ParentPropertyID == nil { + break + } + + return e.complexity.MergedPropertyGroup.ParentPropertyID(childComplexity), true + + case "MergedPropertyGroup.schema": + if e.complexity.MergedPropertyGroup.Schema == nil { + break + } + + return e.complexity.MergedPropertyGroup.Schema(childComplexity), true + + case "MergedPropertyGroup.schemaGroupId": + if e.complexity.MergedPropertyGroup.SchemaGroupID == nil { + break + } + + return e.complexity.MergedPropertyGroup.SchemaGroupID(childComplexity), true + + case "MergedPropertyGroup.schemaId": + if e.complexity.MergedPropertyGroup.SchemaID == nil { + break + } + + return e.complexity.MergedPropertyGroup.SchemaID(childComplexity), true + + case "MoveInfoboxFieldPayload.index": + if e.complexity.MoveInfoboxFieldPayload.Index == nil { + break + } + + return e.complexity.MoveInfoboxFieldPayload.Index(childComplexity), true + + case "MoveInfoboxFieldPayload.infoboxFieldId": + if e.complexity.MoveInfoboxFieldPayload.InfoboxFieldID == nil { + break + } + + return e.complexity.MoveInfoboxFieldPayload.InfoboxFieldID(childComplexity), true + + case "MoveInfoboxFieldPayload.layer": + if e.complexity.MoveInfoboxFieldPayload.Layer == nil { + break + } + + return e.complexity.MoveInfoboxFieldPayload.Layer(childComplexity), true + + case "MoveLayerPayload.fromParentLayer": + if e.complexity.MoveLayerPayload.FromParentLayer == nil { + break + } + + return e.complexity.MoveLayerPayload.FromParentLayer(childComplexity), true + + case "MoveLayerPayload.index": + if e.complexity.MoveLayerPayload.Index == nil { + break + } + + return e.complexity.MoveLayerPayload.Index(childComplexity), true + + case "MoveLayerPayload.layerId": + if e.complexity.MoveLayerPayload.LayerID == nil { + break + } + + return e.complexity.MoveLayerPayload.LayerID(childComplexity), true + + case "MoveLayerPayload.toParentLayer": + if e.complexity.MoveLayerPayload.ToParentLayer == nil { + break + } + + return e.complexity.MoveLayerPayload.ToParentLayer(childComplexity), true + + case "Mutation.addDatasetSchema": + if e.complexity.Mutation.AddDatasetSchema == nil { + break + } + + args, err := ec.field_Mutation_addDatasetSchema_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddDatasetSchema(childComplexity, args["input"].(graphql1.AddDatasetSchemaInput)), true + + case "Mutation.addDynamicDataset": + if e.complexity.Mutation.AddDynamicDataset == nil { + break + } + + args, err := ec.field_Mutation_addDynamicDataset_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddDynamicDataset(childComplexity, args["input"].(graphql1.AddDynamicDatasetInput)), true + + case "Mutation.addDynamicDatasetSchema": + if e.complexity.Mutation.AddDynamicDatasetSchema == nil { + break + } + + args, err := ec.field_Mutation_addDynamicDatasetSchema_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddDynamicDatasetSchema(childComplexity, args["input"].(graphql1.AddDynamicDatasetSchemaInput)), true + + case "Mutation.addInfoboxField": + if e.complexity.Mutation.AddInfoboxField == nil { + break + } + + args, err := ec.field_Mutation_addInfoboxField_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddInfoboxField(childComplexity, args["input"].(graphql1.AddInfoboxFieldInput)), true + + case "Mutation.addLayerGroup": + if e.complexity.Mutation.AddLayerGroup == nil { + break + } + + args, err := ec.field_Mutation_addLayerGroup_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddLayerGroup(childComplexity, args["input"].(graphql1.AddLayerGroupInput)), true + + case "Mutation.addLayerItem": + if e.complexity.Mutation.AddLayerItem == nil { + break + } + + args, err := ec.field_Mutation_addLayerItem_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddLayerItem(childComplexity, args["input"].(graphql1.AddLayerItemInput)), true + + case "Mutation.addMemberToTeam": + if e.complexity.Mutation.AddMemberToTeam == nil { + break + } + + args, err := ec.field_Mutation_addMemberToTeam_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddMemberToTeam(childComplexity, args["input"].(graphql1.AddMemberToTeamInput)), true + + case "Mutation.addPropertyItem": + if e.complexity.Mutation.AddPropertyItem == nil { + break + } + + args, err := ec.field_Mutation_addPropertyItem_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddPropertyItem(childComplexity, args["input"].(graphql1.AddPropertyItemInput)), true + + case "Mutation.addWidget": + if e.complexity.Mutation.AddWidget == nil { + break + } + + args, err := ec.field_Mutation_addWidget_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddWidget(childComplexity, args["input"].(graphql1.AddWidgetInput)), true + + case "Mutation.createAsset": + if e.complexity.Mutation.CreateAsset == nil { + break + } + + args, err := ec.field_Mutation_createAsset_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateAsset(childComplexity, args["input"].(graphql1.CreateAssetInput)), true + + case "Mutation.createInfobox": + if e.complexity.Mutation.CreateInfobox == nil { + break + } + + args, err := ec.field_Mutation_createInfobox_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateInfobox(childComplexity, args["input"].(graphql1.CreateInfoboxInput)), true + + case "Mutation.createProject": + if e.complexity.Mutation.CreateProject == nil { + break + } + + args, err := ec.field_Mutation_createProject_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateProject(childComplexity, args["input"].(graphql1.CreateProjectInput)), true + + case "Mutation.createScene": + if e.complexity.Mutation.CreateScene == nil { + break + } + + args, err := ec.field_Mutation_createScene_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateScene(childComplexity, args["input"].(graphql1.CreateSceneInput)), true + + case "Mutation.createTeam": + if e.complexity.Mutation.CreateTeam == nil { + break + } + + args, err := ec.field_Mutation_createTeam_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateTeam(childComplexity, args["input"].(graphql1.CreateTeamInput)), true + + case "Mutation.deleteMe": + if e.complexity.Mutation.DeleteMe == nil { + break + } + + args, err := ec.field_Mutation_deleteMe_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.DeleteMe(childComplexity, args["input"].(graphql1.DeleteMeInput)), true + + case "Mutation.deleteProject": + if e.complexity.Mutation.DeleteProject == nil { + break + } + + args, err := ec.field_Mutation_deleteProject_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.DeleteProject(childComplexity, args["input"].(graphql1.DeleteProjectInput)), true + + case "Mutation.deleteTeam": + if e.complexity.Mutation.DeleteTeam == nil { + break + } + + args, err := ec.field_Mutation_deleteTeam_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.DeleteTeam(childComplexity, args["input"].(graphql1.DeleteTeamInput)), true + + case "Mutation.importDataset": + if e.complexity.Mutation.ImportDataset == nil { + break + } + + args, err := ec.field_Mutation_importDataset_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.ImportDataset(childComplexity, args["input"].(graphql1.ImportDatasetInput)), true + + case "Mutation.importLayer": + if e.complexity.Mutation.ImportLayer == nil { + break + } + + args, err := ec.field_Mutation_importLayer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.ImportLayer(childComplexity, args["input"].(graphql1.ImportLayerInput)), true + + case "Mutation.installPlugin": + if e.complexity.Mutation.InstallPlugin == nil { + break + } + + args, err := ec.field_Mutation_installPlugin_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.InstallPlugin(childComplexity, args["input"].(graphql1.InstallPluginInput)), true + + case "Mutation.linkDatasetToPropertyValue": + if e.complexity.Mutation.LinkDatasetToPropertyValue == nil { + break + } + + args, err := ec.field_Mutation_linkDatasetToPropertyValue_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.LinkDatasetToPropertyValue(childComplexity, args["input"].(graphql1.LinkDatasetToPropertyValueInput)), true + + case "Mutation.moveInfoboxField": + if e.complexity.Mutation.MoveInfoboxField == nil { + break + } + + args, err := ec.field_Mutation_moveInfoboxField_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.MoveInfoboxField(childComplexity, args["input"].(graphql1.MoveInfoboxFieldInput)), true + + case "Mutation.moveLayer": + if e.complexity.Mutation.MoveLayer == nil { + break + } + + args, err := ec.field_Mutation_moveLayer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.MoveLayer(childComplexity, args["input"].(graphql1.MoveLayerInput)), true + + case "Mutation.movePropertyItem": + if e.complexity.Mutation.MovePropertyItem == nil { + break + } + + args, err := ec.field_Mutation_movePropertyItem_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.MovePropertyItem(childComplexity, args["input"].(graphql1.MovePropertyItemInput)), true + + case "Mutation.publishProject": + if e.complexity.Mutation.PublishProject == nil { + break + } + + args, err := ec.field_Mutation_publishProject_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.PublishProject(childComplexity, args["input"].(graphql1.PublishProjectInput)), true + + case "Mutation.removeAsset": + if e.complexity.Mutation.RemoveAsset == nil { + break + } + + args, err := ec.field_Mutation_removeAsset_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveAsset(childComplexity, args["input"].(graphql1.RemoveAssetInput)), true + + case "Mutation.removeDatasetSchema": + if e.complexity.Mutation.RemoveDatasetSchema == nil { + break + } + + args, err := ec.field_Mutation_removeDatasetSchema_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveDatasetSchema(childComplexity, args["input"].(graphql1.RemoveDatasetSchemaInput)), true + + case "Mutation.removeInfobox": + if e.complexity.Mutation.RemoveInfobox == nil { + break + } + + args, err := ec.field_Mutation_removeInfobox_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveInfobox(childComplexity, args["input"].(graphql1.RemoveInfoboxInput)), true + + case "Mutation.removeInfoboxField": + if e.complexity.Mutation.RemoveInfoboxField == nil { + break + } + + args, err := ec.field_Mutation_removeInfoboxField_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveInfoboxField(childComplexity, args["input"].(graphql1.RemoveInfoboxFieldInput)), true + + case "Mutation.removeLayer": + if e.complexity.Mutation.RemoveLayer == nil { + break + } + + args, err := ec.field_Mutation_removeLayer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveLayer(childComplexity, args["input"].(graphql1.RemoveLayerInput)), true + + case "Mutation.removeMemberFromTeam": + if e.complexity.Mutation.RemoveMemberFromTeam == nil { + break + } + + args, err := ec.field_Mutation_removeMemberFromTeam_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveMemberFromTeam(childComplexity, args["input"].(graphql1.RemoveMemberFromTeamInput)), true + + case "Mutation.removeMyAuth": + if e.complexity.Mutation.RemoveMyAuth == nil { + break + } + + args, err := ec.field_Mutation_removeMyAuth_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveMyAuth(childComplexity, args["input"].(graphql1.RemoveMyAuthInput)), true + + case "Mutation.removePropertyField": + if e.complexity.Mutation.RemovePropertyField == nil { + break + } + + args, err := ec.field_Mutation_removePropertyField_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemovePropertyField(childComplexity, args["input"].(graphql1.RemovePropertyFieldInput)), true + + case "Mutation.removePropertyItem": + if e.complexity.Mutation.RemovePropertyItem == nil { + break + } + + args, err := ec.field_Mutation_removePropertyItem_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemovePropertyItem(childComplexity, args["input"].(graphql1.RemovePropertyItemInput)), true + + case "Mutation.removeWidget": + if e.complexity.Mutation.RemoveWidget == nil { + break + } + + args, err := ec.field_Mutation_removeWidget_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveWidget(childComplexity, args["input"].(graphql1.RemoveWidgetInput)), true + + case "Mutation.signup": + if e.complexity.Mutation.Signup == nil { + break + } + + args, err := ec.field_Mutation_signup_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.Signup(childComplexity, args["input"].(graphql1.SignupInput)), true + + case "Mutation.syncDataset": + if e.complexity.Mutation.SyncDataset == nil { + break + } + + args, err := ec.field_Mutation_syncDataset_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.SyncDataset(childComplexity, args["input"].(graphql1.SyncDatasetInput)), true + + case "Mutation.uninstallPlugin": + if e.complexity.Mutation.UninstallPlugin == nil { + break + } + + args, err := ec.field_Mutation_uninstallPlugin_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UninstallPlugin(childComplexity, args["input"].(graphql1.UninstallPluginInput)), true + + case "Mutation.unlinkPropertyValue": + if e.complexity.Mutation.UnlinkPropertyValue == nil { + break + } + + args, err := ec.field_Mutation_unlinkPropertyValue_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UnlinkPropertyValue(childComplexity, args["input"].(graphql1.UnlinkPropertyValueInput)), true + + case "Mutation.updateDatasetSchema": + if e.complexity.Mutation.UpdateDatasetSchema == nil { + break + } + + args, err := ec.field_Mutation_updateDatasetSchema_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateDatasetSchema(childComplexity, args["input"].(graphql1.UpdateDatasetSchemaInput)), true + + case "Mutation.updateLayer": + if e.complexity.Mutation.UpdateLayer == nil { + break + } + + args, err := ec.field_Mutation_updateLayer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateLayer(childComplexity, args["input"].(graphql1.UpdateLayerInput)), true + + case "Mutation.updateMe": + if e.complexity.Mutation.UpdateMe == nil { + break + } + + args, err := ec.field_Mutation_updateMe_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateMe(childComplexity, args["input"].(graphql1.UpdateMeInput)), true + + case "Mutation.updateMemberOfTeam": + if e.complexity.Mutation.UpdateMemberOfTeam == nil { + break + } + + args, err := ec.field_Mutation_updateMemberOfTeam_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateMemberOfTeam(childComplexity, args["input"].(graphql1.UpdateMemberOfTeamInput)), true + + case "Mutation.updateProject": + if e.complexity.Mutation.UpdateProject == nil { + break + } + + args, err := ec.field_Mutation_updateProject_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateProject(childComplexity, args["input"].(graphql1.UpdateProjectInput)), true + + case "Mutation.updatePropertyItems": + if e.complexity.Mutation.UpdatePropertyItems == nil { + break + } + + args, err := ec.field_Mutation_updatePropertyItems_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdatePropertyItems(childComplexity, args["input"].(graphql1.UpdatePropertyItemInput)), true + + case "Mutation.updatePropertyValue": + if e.complexity.Mutation.UpdatePropertyValue == nil { + break + } + + args, err := ec.field_Mutation_updatePropertyValue_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdatePropertyValue(childComplexity, args["input"].(graphql1.UpdatePropertyValueInput)), true + + case "Mutation.updatePropertyValueCamera": + if e.complexity.Mutation.UpdatePropertyValueCamera == nil { + break + } + + args, err := ec.field_Mutation_updatePropertyValueCamera_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdatePropertyValueCamera(childComplexity, args["input"].(graphql1.UpdatePropertyValueCameraInput)), true + + case "Mutation.updatePropertyValueLatLng": + if e.complexity.Mutation.UpdatePropertyValueLatLng == nil { + break + } + + args, err := ec.field_Mutation_updatePropertyValueLatLng_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdatePropertyValueLatLng(childComplexity, args["input"].(graphql1.UpdatePropertyValueLatLngInput)), true + + case "Mutation.updatePropertyValueLatLngHeight": + if e.complexity.Mutation.UpdatePropertyValueLatLngHeight == nil { + break + } + + args, err := ec.field_Mutation_updatePropertyValueLatLngHeight_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdatePropertyValueLatLngHeight(childComplexity, args["input"].(graphql1.UpdatePropertyValueLatLngHeightInput)), true + + case "Mutation.updatePropertyValueTypography": + if e.complexity.Mutation.UpdatePropertyValueTypography == nil { + break + } + + args, err := ec.field_Mutation_updatePropertyValueTypography_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdatePropertyValueTypography(childComplexity, args["input"].(graphql1.UpdatePropertyValueTypographyInput)), true + + case "Mutation.updateTeam": + if e.complexity.Mutation.UpdateTeam == nil { + break + } + + args, err := ec.field_Mutation_updateTeam_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateTeam(childComplexity, args["input"].(graphql1.UpdateTeamInput)), true + + case "Mutation.updateWidget": + if e.complexity.Mutation.UpdateWidget == nil { + break + } + + args, err := ec.field_Mutation_updateWidget_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateWidget(childComplexity, args["input"].(graphql1.UpdateWidgetInput)), true + + case "Mutation.upgradePlugin": + if e.complexity.Mutation.UpgradePlugin == nil { + break + } + + args, err := ec.field_Mutation_upgradePlugin_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpgradePlugin(childComplexity, args["input"].(graphql1.UpgradePluginInput)), true + + case "Mutation.uploadFileToProperty": + if e.complexity.Mutation.UploadFileToProperty == nil { + break + } + + args, err := ec.field_Mutation_uploadFileToProperty_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UploadFileToProperty(childComplexity, args["input"].(graphql1.UploadFileToPropertyInput)), true + + case "Mutation.uploadPlugin": + if e.complexity.Mutation.UploadPlugin == nil { + break + } + + args, err := ec.field_Mutation_uploadPlugin_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UploadPlugin(childComplexity, args["input"].(graphql1.UploadPluginInput)), true + + case "PageInfo.endCursor": + if e.complexity.PageInfo.EndCursor == nil { + break + } + + return e.complexity.PageInfo.EndCursor(childComplexity), true + + case "PageInfo.hasNextPage": + if e.complexity.PageInfo.HasNextPage == nil { + break + } + + return e.complexity.PageInfo.HasNextPage(childComplexity), true + + case "PageInfo.hasPreviousPage": + if e.complexity.PageInfo.HasPreviousPage == nil { + break + } + + return e.complexity.PageInfo.HasPreviousPage(childComplexity), true + + case "PageInfo.startCursor": + if e.complexity.PageInfo.StartCursor == nil { + break + } + + return e.complexity.PageInfo.StartCursor(childComplexity), true + + case "Plugin.allTranslatedDescription": + if e.complexity.Plugin.AllTranslatedDescription == nil { + break + } + + return e.complexity.Plugin.AllTranslatedDescription(childComplexity), true + + case "Plugin.allTranslatedName": + if e.complexity.Plugin.AllTranslatedName == nil { + break + } + + return e.complexity.Plugin.AllTranslatedName(childComplexity), true + + case "Plugin.author": + if e.complexity.Plugin.Author == nil { + break + } + + return e.complexity.Plugin.Author(childComplexity), true + + case "Plugin.description": + if e.complexity.Plugin.Description == nil { + break + } + + return e.complexity.Plugin.Description(childComplexity), true + + case "Plugin.extensions": + if e.complexity.Plugin.Extensions == nil { + break + } + + return e.complexity.Plugin.Extensions(childComplexity), true + + case "Plugin.id": + if e.complexity.Plugin.ID == nil { + break + } + + return e.complexity.Plugin.ID(childComplexity), true + + case "Plugin.name": + if e.complexity.Plugin.Name == nil { + break + } + + return e.complexity.Plugin.Name(childComplexity), true + + case "Plugin.propertySchema": + if e.complexity.Plugin.PropertySchema == nil { + break + } + + return e.complexity.Plugin.PropertySchema(childComplexity), true + + case "Plugin.propertySchemaId": + if e.complexity.Plugin.PropertySchemaID == nil { + break + } + + return e.complexity.Plugin.PropertySchemaID(childComplexity), true + + case "Plugin.repositoryUrl": + if e.complexity.Plugin.RepositoryURL == nil { + break + } + + return e.complexity.Plugin.RepositoryURL(childComplexity), true + + case "Plugin.scenePlugin": + if e.complexity.Plugin.ScenePlugin == nil { + break + } + + args, err := ec.field_Plugin_scenePlugin_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Plugin.ScenePlugin(childComplexity, args["sceneId"].(id.ID)), true + + case "Plugin.translatedDescription": + if e.complexity.Plugin.TranslatedDescription == nil { + break + } + + args, err := ec.field_Plugin_translatedDescription_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Plugin.TranslatedDescription(childComplexity, args["lang"].(*string)), true + + case "Plugin.translatedName": + if e.complexity.Plugin.TranslatedName == nil { + break + } + + args, err := ec.field_Plugin_translatedName_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Plugin.TranslatedName(childComplexity, args["lang"].(*string)), true + + case "Plugin.version": + if e.complexity.Plugin.Version == nil { + break + } + + return e.complexity.Plugin.Version(childComplexity), true + + case "PluginExtension.allTranslatedDescription": + if e.complexity.PluginExtension.AllTranslatedDescription == nil { + break + } + + return e.complexity.PluginExtension.AllTranslatedDescription(childComplexity), true + + case "PluginExtension.allTranslatedName": + if e.complexity.PluginExtension.AllTranslatedName == nil { + break + } + + return e.complexity.PluginExtension.AllTranslatedName(childComplexity), true + + case "PluginExtension.description": + if e.complexity.PluginExtension.Description == nil { + break + } + + return e.complexity.PluginExtension.Description(childComplexity), true + + case "PluginExtension.extensionId": + if e.complexity.PluginExtension.ExtensionID == nil { + break + } + + return e.complexity.PluginExtension.ExtensionID(childComplexity), true + + case "PluginExtension.icon": + if e.complexity.PluginExtension.Icon == nil { + break + } + + return e.complexity.PluginExtension.Icon(childComplexity), true + + case "PluginExtension.name": + if e.complexity.PluginExtension.Name == nil { + break + } + + return e.complexity.PluginExtension.Name(childComplexity), true + + case "PluginExtension.plugin": + if e.complexity.PluginExtension.Plugin == nil { + break + } + + return e.complexity.PluginExtension.Plugin(childComplexity), true + + case "PluginExtension.pluginId": + if e.complexity.PluginExtension.PluginID == nil { + break + } + + return e.complexity.PluginExtension.PluginID(childComplexity), true + + case "PluginExtension.propertySchema": + if e.complexity.PluginExtension.PropertySchema == nil { + break + } + + return e.complexity.PluginExtension.PropertySchema(childComplexity), true + + case "PluginExtension.propertySchemaId": + if e.complexity.PluginExtension.PropertySchemaID == nil { + break + } + + return e.complexity.PluginExtension.PropertySchemaID(childComplexity), true + + case "PluginExtension.sceneWidget": + if e.complexity.PluginExtension.SceneWidget == nil { + break + } + + args, err := ec.field_PluginExtension_sceneWidget_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PluginExtension.SceneWidget(childComplexity, args["sceneId"].(id.ID)), true + + case "PluginExtension.translatedDescription": + if e.complexity.PluginExtension.TranslatedDescription == nil { + break + } + + args, err := ec.field_PluginExtension_translatedDescription_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PluginExtension.TranslatedDescription(childComplexity, args["lang"].(*string)), true + + case "PluginExtension.translatedName": + if e.complexity.PluginExtension.TranslatedName == nil { + break + } + + args, err := ec.field_PluginExtension_translatedName_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PluginExtension.TranslatedName(childComplexity, args["lang"].(*string)), true + + case "PluginExtension.type": + if e.complexity.PluginExtension.Type == nil { + break + } + + return e.complexity.PluginExtension.Type(childComplexity), true + + case "PluginExtension.visualizer": + if e.complexity.PluginExtension.Visualizer == nil { + break + } + + return e.complexity.PluginExtension.Visualizer(childComplexity), true + + case "Project.alias": + if e.complexity.Project.Alias == nil { + break + } + + return e.complexity.Project.Alias(childComplexity), true + + case "Project.createdAt": + if e.complexity.Project.CreatedAt == nil { + break + } + + return e.complexity.Project.CreatedAt(childComplexity), true + + case "Project.description": + if e.complexity.Project.Description == nil { + break + } + + return e.complexity.Project.Description(childComplexity), true + + case "Project.id": + if e.complexity.Project.ID == nil { + break + } + + return e.complexity.Project.ID(childComplexity), true + + case "Project.imageUrl": + if e.complexity.Project.ImageURL == nil { + break + } + + return e.complexity.Project.ImageURL(childComplexity), true + + case "Project.isArchived": + if e.complexity.Project.IsArchived == nil { + break + } + + return e.complexity.Project.IsArchived(childComplexity), true + + case "Project.name": + if e.complexity.Project.Name == nil { + break + } + + return e.complexity.Project.Name(childComplexity), true + + case "Project.publicDescription": + if e.complexity.Project.PublicDescription == nil { + break + } + + return e.complexity.Project.PublicDescription(childComplexity), true + + case "Project.publicImage": + if e.complexity.Project.PublicImage == nil { + break + } + + return e.complexity.Project.PublicImage(childComplexity), true + + case "Project.publicNoIndex": + if e.complexity.Project.PublicNoIndex == nil { + break + } + + return e.complexity.Project.PublicNoIndex(childComplexity), true + + case "Project.publicTitle": + if e.complexity.Project.PublicTitle == nil { + break + } + + return e.complexity.Project.PublicTitle(childComplexity), true + + case "Project.publishedAt": + if e.complexity.Project.PublishedAt == nil { + break + } + + return e.complexity.Project.PublishedAt(childComplexity), true + + case "Project.publishmentStatus": + if e.complexity.Project.PublishmentStatus == nil { + break + } + + return e.complexity.Project.PublishmentStatus(childComplexity), true + + case "Project.scene": + if e.complexity.Project.Scene == nil { + break + } + + return e.complexity.Project.Scene(childComplexity), true + + case "Project.team": + if e.complexity.Project.Team == nil { + break + } + + return e.complexity.Project.Team(childComplexity), true + + case "Project.teamId": + if e.complexity.Project.TeamID == nil { + break + } + + return e.complexity.Project.TeamID(childComplexity), true + + case "Project.updatedAt": + if e.complexity.Project.UpdatedAt == nil { + break + } + + return e.complexity.Project.UpdatedAt(childComplexity), true + + case "Project.visualizer": + if e.complexity.Project.Visualizer == nil { + break + } + + return e.complexity.Project.Visualizer(childComplexity), true + + case "ProjectConnection.edges": + if e.complexity.ProjectConnection.Edges == nil { + break + } + + return e.complexity.ProjectConnection.Edges(childComplexity), true + + case "ProjectConnection.nodes": + if e.complexity.ProjectConnection.Nodes == nil { + break + } + + return e.complexity.ProjectConnection.Nodes(childComplexity), true + + case "ProjectConnection.pageInfo": + if e.complexity.ProjectConnection.PageInfo == nil { + break + } + + return e.complexity.ProjectConnection.PageInfo(childComplexity), true + + case "ProjectConnection.totalCount": + if e.complexity.ProjectConnection.TotalCount == nil { + break + } + + return e.complexity.ProjectConnection.TotalCount(childComplexity), true + + case "ProjectEdge.cursor": + if e.complexity.ProjectEdge.Cursor == nil { + break + } + + return e.complexity.ProjectEdge.Cursor(childComplexity), true + + case "ProjectEdge.node": + if e.complexity.ProjectEdge.Node == nil { + break + } + + return e.complexity.ProjectEdge.Node(childComplexity), true + + case "ProjectPayload.project": + if e.complexity.ProjectPayload.Project == nil { + break + } + + return e.complexity.ProjectPayload.Project(childComplexity), true + + case "Property.id": + if e.complexity.Property.ID == nil { + break + } + + return e.complexity.Property.ID(childComplexity), true + + case "Property.items": + if e.complexity.Property.Items == nil { + break + } + + return e.complexity.Property.Items(childComplexity), true + + case "Property.layer": + if e.complexity.Property.Layer == nil { + break + } + + return e.complexity.Property.Layer(childComplexity), true + + case "Property.merged": + if e.complexity.Property.Merged == nil { + break + } + + return e.complexity.Property.Merged(childComplexity), true + + case "Property.schema": + if e.complexity.Property.Schema == nil { + break + } + + return e.complexity.Property.Schema(childComplexity), true + + case "Property.schemaId": + if e.complexity.Property.SchemaID == nil { + break + } + + return e.complexity.Property.SchemaID(childComplexity), true + + case "PropertyCondition.fieldId": + if e.complexity.PropertyCondition.FieldID == nil { + break + } + + return e.complexity.PropertyCondition.FieldID(childComplexity), true + + case "PropertyCondition.type": + if e.complexity.PropertyCondition.Type == nil { + break + } + + return e.complexity.PropertyCondition.Type(childComplexity), true + + case "PropertyCondition.value": + if e.complexity.PropertyCondition.Value == nil { + break + } + + return e.complexity.PropertyCondition.Value(childComplexity), true + + case "PropertyField.actualValue": + if e.complexity.PropertyField.ActualValue == nil { + break + } + + return e.complexity.PropertyField.ActualValue(childComplexity), true + + case "PropertyField.field": + if e.complexity.PropertyField.Field == nil { + break + } + + return e.complexity.PropertyField.Field(childComplexity), true + + case "PropertyField.fieldId": + if e.complexity.PropertyField.FieldID == nil { + break + } + + return e.complexity.PropertyField.FieldID(childComplexity), true + + case "PropertyField.id": + if e.complexity.PropertyField.ID == nil { + break + } + + return e.complexity.PropertyField.ID(childComplexity), true + + case "PropertyField.links": + if e.complexity.PropertyField.Links == nil { + break + } + + return e.complexity.PropertyField.Links(childComplexity), true + + case "PropertyField.parent": + if e.complexity.PropertyField.Parent == nil { + break + } + + return e.complexity.PropertyField.Parent(childComplexity), true + + case "PropertyField.parentId": + if e.complexity.PropertyField.ParentID == nil { + break + } + + return e.complexity.PropertyField.ParentID(childComplexity), true + + case "PropertyField.schema": + if e.complexity.PropertyField.Schema == nil { + break + } + + return e.complexity.PropertyField.Schema(childComplexity), true + + case "PropertyField.schemaId": + if e.complexity.PropertyField.SchemaID == nil { + break + } + + return e.complexity.PropertyField.SchemaID(childComplexity), true + + case "PropertyField.type": + if e.complexity.PropertyField.Type == nil { + break + } + + return e.complexity.PropertyField.Type(childComplexity), true + + case "PropertyField.value": + if e.complexity.PropertyField.Value == nil { + break + } + + return e.complexity.PropertyField.Value(childComplexity), true + + case "PropertyFieldLink.dataset": + if e.complexity.PropertyFieldLink.Dataset == nil { + break + } + + return e.complexity.PropertyFieldLink.Dataset(childComplexity), true + + case "PropertyFieldLink.datasetField": + if e.complexity.PropertyFieldLink.DatasetField == nil { + break + } + + return e.complexity.PropertyFieldLink.DatasetField(childComplexity), true + + case "PropertyFieldLink.datasetId": + if e.complexity.PropertyFieldLink.DatasetID == nil { + break + } + + return e.complexity.PropertyFieldLink.DatasetID(childComplexity), true + + case "PropertyFieldLink.datasetSchema": + if e.complexity.PropertyFieldLink.DatasetSchema == nil { + break + } + + return e.complexity.PropertyFieldLink.DatasetSchema(childComplexity), true + + case "PropertyFieldLink.datasetSchemaField": + if e.complexity.PropertyFieldLink.DatasetSchemaField == nil { + break + } + + return e.complexity.PropertyFieldLink.DatasetSchemaField(childComplexity), true + + case "PropertyFieldLink.datasetSchemaFieldId": + if e.complexity.PropertyFieldLink.DatasetSchemaFieldID == nil { + break + } + + return e.complexity.PropertyFieldLink.DatasetSchemaFieldID(childComplexity), true + + case "PropertyFieldLink.datasetSchemaId": + if e.complexity.PropertyFieldLink.DatasetSchemaID == nil { + break + } + + return e.complexity.PropertyFieldLink.DatasetSchemaID(childComplexity), true + + case "PropertyFieldPayload.property": + if e.complexity.PropertyFieldPayload.Property == nil { + break + } + + return e.complexity.PropertyFieldPayload.Property(childComplexity), true + + case "PropertyFieldPayload.propertyField": + if e.complexity.PropertyFieldPayload.PropertyField == nil { + break + } + + return e.complexity.PropertyFieldPayload.PropertyField(childComplexity), true + + case "PropertyGroup.fields": + if e.complexity.PropertyGroup.Fields == nil { + break + } + + return e.complexity.PropertyGroup.Fields(childComplexity), true + + case "PropertyGroup.id": + if e.complexity.PropertyGroup.ID == nil { + break + } + + return e.complexity.PropertyGroup.ID(childComplexity), true + + case "PropertyGroup.schema": + if e.complexity.PropertyGroup.Schema == nil { + break + } + + return e.complexity.PropertyGroup.Schema(childComplexity), true + + case "PropertyGroup.schemaGroup": + if e.complexity.PropertyGroup.SchemaGroup == nil { + break + } + + return e.complexity.PropertyGroup.SchemaGroup(childComplexity), true + + case "PropertyGroup.schemaGroupId": + if e.complexity.PropertyGroup.SchemaGroupID == nil { + break + } + + return e.complexity.PropertyGroup.SchemaGroupID(childComplexity), true + + case "PropertyGroup.schemaId": + if e.complexity.PropertyGroup.SchemaID == nil { + break + } + + return e.complexity.PropertyGroup.SchemaID(childComplexity), true + + case "PropertyGroupList.groups": + if e.complexity.PropertyGroupList.Groups == nil { + break + } + + return e.complexity.PropertyGroupList.Groups(childComplexity), true + + case "PropertyGroupList.id": + if e.complexity.PropertyGroupList.ID == nil { + break + } + + return e.complexity.PropertyGroupList.ID(childComplexity), true + + case "PropertyGroupList.schema": + if e.complexity.PropertyGroupList.Schema == nil { + break + } + + return e.complexity.PropertyGroupList.Schema(childComplexity), true + + case "PropertyGroupList.schemaGroup": + if e.complexity.PropertyGroupList.SchemaGroup == nil { + break + } + + return e.complexity.PropertyGroupList.SchemaGroup(childComplexity), true + + case "PropertyGroupList.schemaGroupId": + if e.complexity.PropertyGroupList.SchemaGroupID == nil { + break + } + + return e.complexity.PropertyGroupList.SchemaGroupID(childComplexity), true + + case "PropertyGroupList.schemaId": + if e.complexity.PropertyGroupList.SchemaID == nil { + break + } + + return e.complexity.PropertyGroupList.SchemaID(childComplexity), true + + case "PropertyItemPayload.property": + if e.complexity.PropertyItemPayload.Property == nil { + break + } + + return e.complexity.PropertyItemPayload.Property(childComplexity), true + + case "PropertyItemPayload.propertyItem": + if e.complexity.PropertyItemPayload.PropertyItem == nil { + break + } + + return e.complexity.PropertyItemPayload.PropertyItem(childComplexity), true + + case "PropertyLinkableFields.latlng": + if e.complexity.PropertyLinkableFields.Latlng == nil { + break + } + + return e.complexity.PropertyLinkableFields.Latlng(childComplexity), true + + case "PropertyLinkableFields.latlngField": + if e.complexity.PropertyLinkableFields.LatlngField == nil { + break + } + + return e.complexity.PropertyLinkableFields.LatlngField(childComplexity), true + + case "PropertyLinkableFields.schema": + if e.complexity.PropertyLinkableFields.Schema == nil { + break + } + + return e.complexity.PropertyLinkableFields.Schema(childComplexity), true + + case "PropertyLinkableFields.schemaId": + if e.complexity.PropertyLinkableFields.SchemaID == nil { + break + } + + return e.complexity.PropertyLinkableFields.SchemaID(childComplexity), true + + case "PropertyLinkableFields.url": + if e.complexity.PropertyLinkableFields.URL == nil { + break + } + + return e.complexity.PropertyLinkableFields.URL(childComplexity), true + + case "PropertyLinkableFields.urlField": + if e.complexity.PropertyLinkableFields.URLField == nil { + break + } + + return e.complexity.PropertyLinkableFields.URLField(childComplexity), true + + case "PropertySchema.groups": + if e.complexity.PropertySchema.Groups == nil { + break + } + + return e.complexity.PropertySchema.Groups(childComplexity), true + + case "PropertySchema.id": + if e.complexity.PropertySchema.ID == nil { + break + } + + return e.complexity.PropertySchema.ID(childComplexity), true + + case "PropertySchema.linkableFields": + if e.complexity.PropertySchema.LinkableFields == nil { + break + } + + return e.complexity.PropertySchema.LinkableFields(childComplexity), true + + case "PropertySchemaField.allTranslatedDescription": + if e.complexity.PropertySchemaField.AllTranslatedDescription == nil { + break + } + + return e.complexity.PropertySchemaField.AllTranslatedDescription(childComplexity), true + + case "PropertySchemaField.allTranslatedName": + if e.complexity.PropertySchemaField.AllTranslatedName == nil { + break + } + + return e.complexity.PropertySchemaField.AllTranslatedName(childComplexity), true + + case "PropertySchemaField.allTranslatedTitle": + if e.complexity.PropertySchemaField.AllTranslatedTitle == nil { + break + } + + return e.complexity.PropertySchemaField.AllTranslatedTitle(childComplexity), true + + case "PropertySchemaField.choices": + if e.complexity.PropertySchemaField.Choices == nil { + break + } + + return e.complexity.PropertySchemaField.Choices(childComplexity), true + + case "PropertySchemaField.defaultValue": + if e.complexity.PropertySchemaField.DefaultValue == nil { + break + } + + return e.complexity.PropertySchemaField.DefaultValue(childComplexity), true + + case "PropertySchemaField.description": + if e.complexity.PropertySchemaField.Description == nil { + break + } + + return e.complexity.PropertySchemaField.Description(childComplexity), true + + case "PropertySchemaField.fieldId": + if e.complexity.PropertySchemaField.FieldID == nil { + break + } + + return e.complexity.PropertySchemaField.FieldID(childComplexity), true + + case "PropertySchemaField.isAvailableIf": + if e.complexity.PropertySchemaField.IsAvailableIf == nil { + break + } + + return e.complexity.PropertySchemaField.IsAvailableIf(childComplexity), true + + case "PropertySchemaField.max": + if e.complexity.PropertySchemaField.Max == nil { + break + } + + return e.complexity.PropertySchemaField.Max(childComplexity), true + + case "PropertySchemaField.min": + if e.complexity.PropertySchemaField.Min == nil { + break + } + + return e.complexity.PropertySchemaField.Min(childComplexity), true + + case "PropertySchemaField.name": + if e.complexity.PropertySchemaField.Name == nil { + break + } + + return e.complexity.PropertySchemaField.Name(childComplexity), true + + case "PropertySchemaField.prefix": + if e.complexity.PropertySchemaField.Prefix == nil { + break + } + + return e.complexity.PropertySchemaField.Prefix(childComplexity), true + + case "PropertySchemaField.suffix": + if e.complexity.PropertySchemaField.Suffix == nil { + break + } + + return e.complexity.PropertySchemaField.Suffix(childComplexity), true + + case "PropertySchemaField.title": + if e.complexity.PropertySchemaField.Title == nil { + break + } + + return e.complexity.PropertySchemaField.Title(childComplexity), true + + case "PropertySchemaField.translatedDescription": + if e.complexity.PropertySchemaField.TranslatedDescription == nil { + break + } + + args, err := ec.field_PropertySchemaField_translatedDescription_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PropertySchemaField.TranslatedDescription(childComplexity, args["lang"].(*string)), true + + case "PropertySchemaField.translatedName": + if e.complexity.PropertySchemaField.TranslatedName == nil { + break + } + + args, err := ec.field_PropertySchemaField_translatedName_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PropertySchemaField.TranslatedName(childComplexity, args["lang"].(*string)), true + + case "PropertySchemaField.translatedTitle": + if e.complexity.PropertySchemaField.TranslatedTitle == nil { + break + } + + args, err := ec.field_PropertySchemaField_translatedTitle_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PropertySchemaField.TranslatedTitle(childComplexity, args["lang"].(*string)), true + + case "PropertySchemaField.type": + if e.complexity.PropertySchemaField.Type == nil { + break + } + + return e.complexity.PropertySchemaField.Type(childComplexity), true + + case "PropertySchemaField.ui": + if e.complexity.PropertySchemaField.UI == nil { + break + } + + return e.complexity.PropertySchemaField.UI(childComplexity), true + + case "PropertySchemaFieldChoice.allTranslatedLabel": + if e.complexity.PropertySchemaFieldChoice.AllTranslatedLabel == nil { + break + } + + return e.complexity.PropertySchemaFieldChoice.AllTranslatedLabel(childComplexity), true + + case "PropertySchemaFieldChoice.allTranslatedTitle": + if e.complexity.PropertySchemaFieldChoice.AllTranslatedTitle == nil { + break + } + + return e.complexity.PropertySchemaFieldChoice.AllTranslatedTitle(childComplexity), true + + case "PropertySchemaFieldChoice.icon": + if e.complexity.PropertySchemaFieldChoice.Icon == nil { + break + } + + return e.complexity.PropertySchemaFieldChoice.Icon(childComplexity), true + + case "PropertySchemaFieldChoice.key": + if e.complexity.PropertySchemaFieldChoice.Key == nil { + break + } + + return e.complexity.PropertySchemaFieldChoice.Key(childComplexity), true + + case "PropertySchemaFieldChoice.label": + if e.complexity.PropertySchemaFieldChoice.Label == nil { + break + } + + return e.complexity.PropertySchemaFieldChoice.Label(childComplexity), true + + case "PropertySchemaFieldChoice.title": + if e.complexity.PropertySchemaFieldChoice.Title == nil { + break + } + + return e.complexity.PropertySchemaFieldChoice.Title(childComplexity), true + + case "PropertySchemaFieldChoice.translatedLabel": + if e.complexity.PropertySchemaFieldChoice.TranslatedLabel == nil { + break + } + + args, err := ec.field_PropertySchemaFieldChoice_translatedLabel_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PropertySchemaFieldChoice.TranslatedLabel(childComplexity, args["lang"].(*string)), true + + case "PropertySchemaFieldChoice.translatedTitle": + if e.complexity.PropertySchemaFieldChoice.TranslatedTitle == nil { + break + } + + args, err := ec.field_PropertySchemaFieldChoice_translatedTitle_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PropertySchemaFieldChoice.TranslatedTitle(childComplexity, args["lang"].(*string)), true + + case "PropertySchemaGroup.allTranslatedTitle": + if e.complexity.PropertySchemaGroup.AllTranslatedTitle == nil { + break + } + + return e.complexity.PropertySchemaGroup.AllTranslatedTitle(childComplexity), true + + case "PropertySchemaGroup.fields": + if e.complexity.PropertySchemaGroup.Fields == nil { + break + } + + return e.complexity.PropertySchemaGroup.Fields(childComplexity), true + + case "PropertySchemaGroup.isAvailableIf": + if e.complexity.PropertySchemaGroup.IsAvailableIf == nil { + break + } + + return e.complexity.PropertySchemaGroup.IsAvailableIf(childComplexity), true + + case "PropertySchemaGroup.isList": + if e.complexity.PropertySchemaGroup.IsList == nil { + break + } + + return e.complexity.PropertySchemaGroup.IsList(childComplexity), true + + case "PropertySchemaGroup.name": + if e.complexity.PropertySchemaGroup.Name == nil { + break + } + + return e.complexity.PropertySchemaGroup.Name(childComplexity), true + + case "PropertySchemaGroup.representativeField": + if e.complexity.PropertySchemaGroup.RepresentativeField == nil { + break + } + + return e.complexity.PropertySchemaGroup.RepresentativeField(childComplexity), true + + case "PropertySchemaGroup.representativeFieldId": + if e.complexity.PropertySchemaGroup.RepresentativeFieldID == nil { + break + } + + return e.complexity.PropertySchemaGroup.RepresentativeFieldID(childComplexity), true + + case "PropertySchemaGroup.schema": + if e.complexity.PropertySchemaGroup.Schema == nil { + break + } + + return e.complexity.PropertySchemaGroup.Schema(childComplexity), true + + case "PropertySchemaGroup.schemaGroupId": + if e.complexity.PropertySchemaGroup.SchemaGroupID == nil { + break + } + + return e.complexity.PropertySchemaGroup.SchemaGroupID(childComplexity), true + + case "PropertySchemaGroup.schemaId": + if e.complexity.PropertySchemaGroup.SchemaID == nil { + break + } + + return e.complexity.PropertySchemaGroup.SchemaID(childComplexity), true + + case "PropertySchemaGroup.title": + if e.complexity.PropertySchemaGroup.Title == nil { + break + } + + return e.complexity.PropertySchemaGroup.Title(childComplexity), true + + case "PropertySchemaGroup.translatedTitle": + if e.complexity.PropertySchemaGroup.TranslatedTitle == nil { + break + } + + args, err := ec.field_PropertySchemaGroup_translatedTitle_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.PropertySchemaGroup.TranslatedTitle(childComplexity, args["lang"].(*string)), true + + case "Query.assets": + if e.complexity.Query.Assets == nil { + break + } + + args, err := ec.field_Query_assets_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Assets(childComplexity, args["teamId"].(id.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "Query.checkProjectAlias": + if e.complexity.Query.CheckProjectAlias == nil { + break + } + + args, err := ec.field_Query_checkProjectAlias_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.CheckProjectAlias(childComplexity, args["alias"].(string)), true + + case "Query.datasetSchemas": + if e.complexity.Query.DatasetSchemas == nil { + break + } + + args, err := ec.field_Query_datasetSchemas_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.DatasetSchemas(childComplexity, args["sceneId"].(id.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "Query.datasets": + if e.complexity.Query.Datasets == nil { + break + } + + args, err := ec.field_Query_datasets_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Datasets(childComplexity, args["datasetSchemaId"].(id.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "Query.dynamicDatasetSchemas": + if e.complexity.Query.DynamicDatasetSchemas == nil { + break + } + + args, err := ec.field_Query_dynamicDatasetSchemas_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.DynamicDatasetSchemas(childComplexity, args["sceneId"].(id.ID)), true + + case "Query.layer": + if e.complexity.Query.Layer == nil { + break + } + + args, err := ec.field_Query_layer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Layer(childComplexity, args["id"].(id.ID)), true + + case "Query.me": + if e.complexity.Query.Me == nil { + break + } + + return e.complexity.Query.Me(childComplexity), true + + case "Query.node": + if e.complexity.Query.Node == nil { + break + } + + args, err := ec.field_Query_node_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Node(childComplexity, args["id"].(id.ID), args["type"].(graphql1.NodeType)), true + + case "Query.nodes": + if e.complexity.Query.Nodes == nil { + break + } + + args, err := ec.field_Query_nodes_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Nodes(childComplexity, args["id"].([]*id.ID), args["type"].(graphql1.NodeType)), true + + case "Query.plugin": + if e.complexity.Query.Plugin == nil { + break + } + + args, err := ec.field_Query_plugin_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Plugin(childComplexity, args["id"].(id.PluginID)), true + + case "Query.plugins": + if e.complexity.Query.Plugins == nil { + break + } + + args, err := ec.field_Query_plugins_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Plugins(childComplexity, args["id"].([]*id.PluginID)), true + + case "Query.projects": + if e.complexity.Query.Projects == nil { + break + } + + args, err := ec.field_Query_projects_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Projects(childComplexity, args["teamId"].(id.ID), args["includeArchived"].(*bool), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "Query.propertySchema": + if e.complexity.Query.PropertySchema == nil { + break + } + + args, err := ec.field_Query_propertySchema_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.PropertySchema(childComplexity, args["id"].(id.PropertySchemaID)), true + + case "Query.propertySchemas": + if e.complexity.Query.PropertySchemas == nil { + break + } + + args, err := ec.field_Query_propertySchemas_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.PropertySchemas(childComplexity, args["id"].([]*id.PropertySchemaID)), true + + case "Query.scene": + if e.complexity.Query.Scene == nil { + break + } + + args, err := ec.field_Query_scene_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.Scene(childComplexity, args["projectId"].(id.ID)), true + + case "Query.sceneLock": + if e.complexity.Query.SceneLock == nil { + break + } + + args, err := ec.field_Query_sceneLock_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.SceneLock(childComplexity, args["sceneId"].(id.ID)), true + + case "Query.searchUser": + if e.complexity.Query.SearchUser == nil { + break + } + + args, err := ec.field_Query_searchUser_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Query.SearchUser(childComplexity, args["nameOrEmail"].(string)), true + + case "Rect.east": + if e.complexity.Rect.East == nil { + break + } + + return e.complexity.Rect.East(childComplexity), true + + case "Rect.north": + if e.complexity.Rect.North == nil { + break + } + + return e.complexity.Rect.North(childComplexity), true + + case "Rect.south": + if e.complexity.Rect.South == nil { + break + } + + return e.complexity.Rect.South(childComplexity), true + + case "Rect.west": + if e.complexity.Rect.West == nil { + break + } + + return e.complexity.Rect.West(childComplexity), true + + case "RemoveAssetPayload.assetId": + if e.complexity.RemoveAssetPayload.AssetID == nil { + break + } + + return e.complexity.RemoveAssetPayload.AssetID(childComplexity), true + + case "RemoveDatasetSchemaPayload.schemaId": + if e.complexity.RemoveDatasetSchemaPayload.SchemaID == nil { + break + } + + return e.complexity.RemoveDatasetSchemaPayload.SchemaID(childComplexity), true + + case "RemoveInfoboxFieldPayload.infoboxFieldId": + if e.complexity.RemoveInfoboxFieldPayload.InfoboxFieldID == nil { + break + } + + return e.complexity.RemoveInfoboxFieldPayload.InfoboxFieldID(childComplexity), true + + case "RemoveInfoboxFieldPayload.layer": + if e.complexity.RemoveInfoboxFieldPayload.Layer == nil { + break + } + + return e.complexity.RemoveInfoboxFieldPayload.Layer(childComplexity), true + + case "RemoveInfoboxPayload.layer": + if e.complexity.RemoveInfoboxPayload.Layer == nil { + break + } + + return e.complexity.RemoveInfoboxPayload.Layer(childComplexity), true + + case "RemoveLayerPayload.layerId": + if e.complexity.RemoveLayerPayload.LayerID == nil { + break + } + + return e.complexity.RemoveLayerPayload.LayerID(childComplexity), true + + case "RemoveLayerPayload.parentLayer": + if e.complexity.RemoveLayerPayload.ParentLayer == nil { + break + } + + return e.complexity.RemoveLayerPayload.ParentLayer(childComplexity), true + + case "RemoveMemberFromTeamPayload.team": + if e.complexity.RemoveMemberFromTeamPayload.Team == nil { + break + } + + return e.complexity.RemoveMemberFromTeamPayload.Team(childComplexity), true + + case "RemoveWidgetPayload.extensionId": + if e.complexity.RemoveWidgetPayload.ExtensionID == nil { + break + } + + return e.complexity.RemoveWidgetPayload.ExtensionID(childComplexity), true + + case "RemoveWidgetPayload.pluginId": + if e.complexity.RemoveWidgetPayload.PluginID == nil { + break + } + + return e.complexity.RemoveWidgetPayload.PluginID(childComplexity), true + + case "RemoveWidgetPayload.scene": + if e.complexity.RemoveWidgetPayload.Scene == nil { + break + } + + return e.complexity.RemoveWidgetPayload.Scene(childComplexity), true + + case "Scene.createdAt": + if e.complexity.Scene.CreatedAt == nil { + break + } + + return e.complexity.Scene.CreatedAt(childComplexity), true + + case "Scene.datasetSchemas": + if e.complexity.Scene.DatasetSchemas == nil { + break + } + + args, err := ec.field_Scene_datasetSchemas_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Scene.DatasetSchemas(childComplexity, args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "Scene.dynamicDatasetSchemas": + if e.complexity.Scene.DynamicDatasetSchemas == nil { + break + } + + return e.complexity.Scene.DynamicDatasetSchemas(childComplexity), true + + case "Scene.id": + if e.complexity.Scene.ID == nil { + break + } + + return e.complexity.Scene.ID(childComplexity), true + + case "Scene.lockMode": + if e.complexity.Scene.LockMode == nil { + break + } + + return e.complexity.Scene.LockMode(childComplexity), true + + case "Scene.plugins": + if e.complexity.Scene.Plugins == nil { + break + } + + return e.complexity.Scene.Plugins(childComplexity), true + + case "Scene.project": + if e.complexity.Scene.Project == nil { + break + } + + return e.complexity.Scene.Project(childComplexity), true + + case "Scene.projectId": + if e.complexity.Scene.ProjectID == nil { + break + } + + return e.complexity.Scene.ProjectID(childComplexity), true + + case "Scene.property": + if e.complexity.Scene.Property == nil { + break + } + + return e.complexity.Scene.Property(childComplexity), true + + case "Scene.propertyId": + if e.complexity.Scene.PropertyID == nil { + break + } + + return e.complexity.Scene.PropertyID(childComplexity), true + + case "Scene.rootLayer": + if e.complexity.Scene.RootLayer == nil { + break + } + + return e.complexity.Scene.RootLayer(childComplexity), true + + case "Scene.rootLayerId": + if e.complexity.Scene.RootLayerID == nil { + break + } + + return e.complexity.Scene.RootLayerID(childComplexity), true + + case "Scene.team": + if e.complexity.Scene.Team == nil { + break + } + + return e.complexity.Scene.Team(childComplexity), true + + case "Scene.teamId": + if e.complexity.Scene.TeamID == nil { + break + } + + return e.complexity.Scene.TeamID(childComplexity), true + + case "Scene.updatedAt": + if e.complexity.Scene.UpdatedAt == nil { + break + } + + return e.complexity.Scene.UpdatedAt(childComplexity), true + + case "Scene.widgets": + if e.complexity.Scene.Widgets == nil { + break + } + + return e.complexity.Scene.Widgets(childComplexity), true + + case "ScenePlugin.plugin": + if e.complexity.ScenePlugin.Plugin == nil { + break + } + + return e.complexity.ScenePlugin.Plugin(childComplexity), true + + case "ScenePlugin.pluginId": + if e.complexity.ScenePlugin.PluginID == nil { + break + } + + return e.complexity.ScenePlugin.PluginID(childComplexity), true + + case "ScenePlugin.property": + if e.complexity.ScenePlugin.Property == nil { + break + } + + return e.complexity.ScenePlugin.Property(childComplexity), true + + case "ScenePlugin.propertyId": + if e.complexity.ScenePlugin.PropertyID == nil { + break + } + + return e.complexity.ScenePlugin.PropertyID(childComplexity), true + + case "SceneWidget.enabled": + if e.complexity.SceneWidget.Enabled == nil { + break + } + + return e.complexity.SceneWidget.Enabled(childComplexity), true + + case "SceneWidget.extension": + if e.complexity.SceneWidget.Extension == nil { + break + } + + return e.complexity.SceneWidget.Extension(childComplexity), true + + case "SceneWidget.extensionId": + if e.complexity.SceneWidget.ExtensionID == nil { + break + } + + return e.complexity.SceneWidget.ExtensionID(childComplexity), true + + case "SceneWidget.id": + if e.complexity.SceneWidget.ID == nil { + break + } + + return e.complexity.SceneWidget.ID(childComplexity), true + + case "SceneWidget.plugin": + if e.complexity.SceneWidget.Plugin == nil { + break + } + + return e.complexity.SceneWidget.Plugin(childComplexity), true + + case "SceneWidget.pluginId": + if e.complexity.SceneWidget.PluginID == nil { + break + } + + return e.complexity.SceneWidget.PluginID(childComplexity), true + + case "SceneWidget.property": + if e.complexity.SceneWidget.Property == nil { + break + } + + return e.complexity.SceneWidget.Property(childComplexity), true + + case "SceneWidget.propertyId": + if e.complexity.SceneWidget.PropertyID == nil { + break + } + + return e.complexity.SceneWidget.PropertyID(childComplexity), true + + case "SearchedUser.userEmail": + if e.complexity.SearchedUser.UserEmail == nil { + break + } + + return e.complexity.SearchedUser.UserEmail(childComplexity), true + + case "SearchedUser.userId": + if e.complexity.SearchedUser.UserID == nil { + break + } + + return e.complexity.SearchedUser.UserID(childComplexity), true + + case "SearchedUser.userName": + if e.complexity.SearchedUser.UserName == nil { + break + } + + return e.complexity.SearchedUser.UserName(childComplexity), true + + case "SignupPayload.team": + if e.complexity.SignupPayload.Team == nil { + break + } + + return e.complexity.SignupPayload.Team(childComplexity), true + + case "SignupPayload.user": + if e.complexity.SignupPayload.User == nil { + break + } + + return e.complexity.SignupPayload.User(childComplexity), true + + case "SyncDatasetPayload.dataset": + if e.complexity.SyncDatasetPayload.Dataset == nil { + break + } + + return e.complexity.SyncDatasetPayload.Dataset(childComplexity), true + + case "SyncDatasetPayload.datasetSchema": + if e.complexity.SyncDatasetPayload.DatasetSchema == nil { + break + } + + return e.complexity.SyncDatasetPayload.DatasetSchema(childComplexity), true + + case "SyncDatasetPayload.sceneId": + if e.complexity.SyncDatasetPayload.SceneID == nil { + break + } + + return e.complexity.SyncDatasetPayload.SceneID(childComplexity), true + + case "SyncDatasetPayload.url": + if e.complexity.SyncDatasetPayload.URL == nil { + break + } + + return e.complexity.SyncDatasetPayload.URL(childComplexity), true + + case "Team.assets": + if e.complexity.Team.Assets == nil { + break + } + + args, err := ec.field_Team_assets_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Team.Assets(childComplexity, args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "Team.id": + if e.complexity.Team.ID == nil { + break + } + + return e.complexity.Team.ID(childComplexity), true + + case "Team.members": + if e.complexity.Team.Members == nil { + break + } + + return e.complexity.Team.Members(childComplexity), true + + case "Team.name": + if e.complexity.Team.Name == nil { + break + } + + return e.complexity.Team.Name(childComplexity), true + + case "Team.personal": + if e.complexity.Team.Personal == nil { + break + } + + return e.complexity.Team.Personal(childComplexity), true + + case "Team.projects": + if e.complexity.Team.Projects == nil { + break + } + + args, err := ec.field_Team_projects_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Team.Projects(childComplexity, args["includeArchived"].(*bool), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + + case "TeamMember.role": + if e.complexity.TeamMember.Role == nil { + break + } + + return e.complexity.TeamMember.Role(childComplexity), true + + case "TeamMember.user": + if e.complexity.TeamMember.User == nil { + break + } + + return e.complexity.TeamMember.User(childComplexity), true + + case "TeamMember.userId": + if e.complexity.TeamMember.UserID == nil { + break + } + + return e.complexity.TeamMember.UserID(childComplexity), true + + case "Typography.bold": + if e.complexity.Typography.Bold == nil { + break + } + + return e.complexity.Typography.Bold(childComplexity), true + + case "Typography.color": + if e.complexity.Typography.Color == nil { + break + } + + return e.complexity.Typography.Color(childComplexity), true + + case "Typography.fontFamily": + if e.complexity.Typography.FontFamily == nil { + break + } + + return e.complexity.Typography.FontFamily(childComplexity), true + + case "Typography.fontSize": + if e.complexity.Typography.FontSize == nil { + break + } + + return e.complexity.Typography.FontSize(childComplexity), true + + case "Typography.fontWeight": + if e.complexity.Typography.FontWeight == nil { + break + } + + return e.complexity.Typography.FontWeight(childComplexity), true + + case "Typography.italic": + if e.complexity.Typography.Italic == nil { + break + } + + return e.complexity.Typography.Italic(childComplexity), true + + case "Typography.textAlign": + if e.complexity.Typography.TextAlign == nil { + break + } + + return e.complexity.Typography.TextAlign(childComplexity), true + + case "Typography.underline": + if e.complexity.Typography.Underline == nil { + break + } + + return e.complexity.Typography.Underline(childComplexity), true + + case "UninstallPluginPayload.scene": + if e.complexity.UninstallPluginPayload.Scene == nil { + break + } + + return e.complexity.UninstallPluginPayload.Scene(childComplexity), true + + case "UninstallPluginPayload.scenePlugin": + if e.complexity.UninstallPluginPayload.ScenePlugin == nil { + break + } + + return e.complexity.UninstallPluginPayload.ScenePlugin(childComplexity), true + + case "UpdateDatasetSchemaPayload.datasetSchema": + if e.complexity.UpdateDatasetSchemaPayload.DatasetSchema == nil { + break + } + + return e.complexity.UpdateDatasetSchemaPayload.DatasetSchema(childComplexity), true + + case "UpdateLayerPayload.layer": + if e.complexity.UpdateLayerPayload.Layer == nil { + break + } + + return e.complexity.UpdateLayerPayload.Layer(childComplexity), true + + case "UpdateMePayload.user": + if e.complexity.UpdateMePayload.User == nil { + break + } + + return e.complexity.UpdateMePayload.User(childComplexity), true + + case "UpdateMemberOfTeamPayload.team": + if e.complexity.UpdateMemberOfTeamPayload.Team == nil { + break + } + + return e.complexity.UpdateMemberOfTeamPayload.Team(childComplexity), true + + case "UpdateTeamPayload.team": + if e.complexity.UpdateTeamPayload.Team == nil { + break + } + + return e.complexity.UpdateTeamPayload.Team(childComplexity), true + + case "UpdateWidgetPayload.scene": + if e.complexity.UpdateWidgetPayload.Scene == nil { + break + } + + return e.complexity.UpdateWidgetPayload.Scene(childComplexity), true + + case "UpdateWidgetPayload.sceneWidget": + if e.complexity.UpdateWidgetPayload.SceneWidget == nil { + break + } + + return e.complexity.UpdateWidgetPayload.SceneWidget(childComplexity), true + + case "UpgradePluginPayload.scene": + if e.complexity.UpgradePluginPayload.Scene == nil { + break + } + + return e.complexity.UpgradePluginPayload.Scene(childComplexity), true + + case "UpgradePluginPayload.scenePlugin": + if e.complexity.UpgradePluginPayload.ScenePlugin == nil { + break + } + + return e.complexity.UpgradePluginPayload.ScenePlugin(childComplexity), true + + case "UploadPluginPayload.plugin": + if e.complexity.UploadPluginPayload.Plugin == nil { + break + } + + return e.complexity.UploadPluginPayload.Plugin(childComplexity), true + + case "User.auths": + if e.complexity.User.Auths == nil { + break + } + + return e.complexity.User.Auths(childComplexity), true + + case "User.email": + if e.complexity.User.Email == nil { + break + } + + return e.complexity.User.Email(childComplexity), true + + case "User.id": + if e.complexity.User.ID == nil { + break + } + + return e.complexity.User.ID(childComplexity), true + + case "User.lang": + if e.complexity.User.Lang == nil { + break + } + + return e.complexity.User.Lang(childComplexity), true + + case "User.myTeam": + if e.complexity.User.MyTeam == nil { + break + } + + return e.complexity.User.MyTeam(childComplexity), true + + case "User.myTeamId": + if e.complexity.User.MyTeamID == nil { + break + } + + return e.complexity.User.MyTeamID(childComplexity), true + + case "User.name": + if e.complexity.User.Name == nil { + break + } + + return e.complexity.User.Name(childComplexity), true + + case "User.teams": + if e.complexity.User.Teams == nil { + break + } + + return e.complexity.User.Teams(childComplexity), true + + case "User.theme": + if e.complexity.User.Theme == nil { + break + } + + return e.complexity.User.Theme(childComplexity), true + + } + return 0, false +} + +func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler { + rc := graphql.GetOperationContext(ctx) + ec := executionContext{rc, e} + first := true + + switch rc.Operation.Operation { + case ast.Query: + return func(ctx context.Context) *graphql.Response { + if !first { + return nil + } + first = false + data := ec._Query(ctx, rc.Operation.SelectionSet) + var buf bytes.Buffer + data.MarshalGQL(&buf) + + return &graphql.Response{ + Data: buf.Bytes(), + } + } + case ast.Mutation: + return func(ctx context.Context) *graphql.Response { + if !first { + return nil + } + first = false + data := ec._Mutation(ctx, rc.Operation.SelectionSet) + var buf bytes.Buffer + data.MarshalGQL(&buf) + + return &graphql.Response{ + Data: buf.Bytes(), + } + } + + default: + return graphql.OneShot(graphql.ErrorResponse(ctx, "unsupported GraphQL operation")) + } +} + +type executionContext struct { + *graphql.OperationContext + *executableSchema +} + +func (ec *executionContext) introspectSchema() (*introspection.Schema, error) { + if ec.DisableIntrospection { + return nil, errors.New("introspection disabled") + } + return introspection.WrapSchema(parsedSchema), nil +} + +func (ec *executionContext) introspectType(name string) (*introspection.Type, error) { + if ec.DisableIntrospection { + return nil, errors.New("introspection disabled") + } + return introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name]), nil +} + +var sources = []*ast.Source{ + {Name: "schema.graphql", Input: `# Built-in + +scalar Upload +scalar Any + +directive @goModel(model: String, models: [String!]) on OBJECT + | INPUT_OBJECT + | SCALAR + | ENUM + | INTERFACE + | UNION + +directive @goField(forceResolver: Boolean, name: String) on INPUT_FIELD_DEFINITION + | FIELD_DEFINITION + +# Basic types + +scalar Cursor +scalar DateTime +scalar URL +scalar FileSize +scalar PluginID +scalar PluginExtensionID +scalar PropertySchemaID +scalar PropertySchemaFieldID +scalar TranslatedString + +type LatLng { + lat: Float! + lng: Float! +} + +type LatLngHeight { + lat: Float! + lng: Float! + height: Float! +} + +type Camera { + lat: Float! + lng: Float! + altitude: Float! + heading: Float! + pitch: Float! + roll: Float! + fov: Float! +} + +type Typography { + fontFamily: String + fontWeight: String + fontSize: Int + color: String + textAlign: TextAlign + bold: Boolean + italic: Boolean + underline: Boolean +} + +type Rect { + west: Float! + south: Float! + east: Float! + north: Float! +} + +enum TextAlign { + LEFT + CENTER + RIGHT + JUSTIFY + JUSTIFY_ALL +} + +enum ValueType { + BOOL + NUMBER + STRING + REF + URL + LATLNG + LATLNGHEIGHT + CAMERA + TYPOGRAPHY + COORDINATES + POLYGON + RECT +} + +enum ListOperation { + ADD, + MOVE, + REMOVE +} + +enum Theme { + DEFAULT + LIGHT + DARK +} + +# Meta Type + +interface Node { + id: ID! +} + +type PageInfo { + startCursor: Cursor + endCursor: Cursor + hasNextPage: Boolean! + hasPreviousPage: Boolean! +} + +# Asset + +type Asset implements Node { + id: ID! + createdAt: DateTime! + teamId: ID! + name: String! + size: FileSize! + url: String! + contentType: String! + team: Team @goField(forceResolver: true) +} + +# User + +type User implements Node { + id: ID! + name: String! + email: String! + lang: String! + theme: Theme! + myTeamId: ID! + auths: [String!]! + teams: [Team!]! @goField(forceResolver: true) + myTeam: Team! @goField(forceResolver: true) +} + +type SearchedUser { + userId: ID! + userName: String! + userEmail: String! +} + +type CheckProjectAliasPayload { + alias: String! + available: Boolean! +} + +type Team implements Node { + id: ID! + name: String! + members: [TeamMember!]! + personal: Boolean! + assets(first: Int, last: Int, after: Cursor, before: Cursor): AssetConnection! @goField(forceResolver: true) + projects(includeArchived: Boolean, first: Int, last: Int, after: Cursor, before: Cursor): ProjectConnection! @goField(forceResolver: true) +} + +type TeamMember { + userId: ID! + role: Role! + user: User @goField(forceResolver: true) +} + +enum Role { + # a role who can read project + READER + # a role who can read and write project + WRITER + # a eole who can have full controll of project + OWNER +} + +# Project + +type Project implements Node { + id: ID! + isArchived: Boolean! + createdAt: DateTime! + updatedAt: DateTime! + publishedAt: DateTime + name: String! + description: String! + alias: String! + publicTitle: String! + publicDescription: String! + publicImage: String! + publicNoIndex: Boolean! + imageUrl: URL + teamId: ID! + visualizer: Visualizer! + publishmentStatus: PublishmentStatus! + team: Team @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) +} + +enum Visualizer { + CESIUM +} + +enum PublishmentStatus { + PUBLIC + LIMITED + PRIVATE +} + +# Plugin + +type Plugin { + id: PluginID! + name: String! + version: String! + description: String! + author: String! + repositoryUrl: String! + propertySchemaId: PropertySchemaID + extensions: [PluginExtension!]! + scenePlugin(sceneId: ID!): ScenePlugin + allTranslatedDescription: TranslatedString + allTranslatedName: TranslatedString + translatedName(lang: String): String! + translatedDescription(lang: String): String! + propertySchema: PropertySchema @goField(forceResolver: true) +} + +enum PluginExtensionType { + PRIMITIVE + WIDGET + BLOCK + VISUALIZER + INFOBOX +} + +type PluginExtension { + extensionId: PluginExtensionID! + pluginId: PluginID! + type: PluginExtensionType! + name: String! + description: String! + icon: String! + visualizer: Visualizer! + propertySchemaId: PropertySchemaID! + allTranslatedName: TranslatedString + allTranslatedDescription: TranslatedString + plugin: Plugin @goField(forceResolver: true) + sceneWidget(sceneId: ID!): SceneWidget @goField(forceResolver: true) + propertySchema: PropertySchema @goField(forceResolver: true) + translatedName(lang: String): String! @goField(forceResolver: true) + translatedDescription(lang: String): String! @goField(forceResolver: true) +} + +# Scene + +type Scene implements Node { + id: ID! + projectId: ID! + teamId: ID! + propertyId: ID! + createdAt: DateTime! + updatedAt: DateTime! + rootLayerId: ID! + widgets: [SceneWidget!]! + plugins: [ScenePlugin!]! + dynamicDatasetSchemas: [DatasetSchema!]! + project: Project @goField(forceResolver: true) + team: Team @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + rootLayer: LayerGroup @goField(forceResolver: true) + lockMode: SceneLockMode! @goField(forceResolver: true) + datasetSchemas(first: Int, last: Int, after: Cursor, before: Cursor): DatasetSchemaConnection! @goField(forceResolver: true) +} + +enum SceneLockMode { + FREE + PENDING + DATASET_SYNCING + PLUGIN_UPGRADING + PUBLISHING +} + +type SceneWidget { + id: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + propertyId: ID! + enabled: Boolean! + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + property: Property @goField(forceResolver: true) +} + +type ScenePlugin { + pluginId: PluginID! + propertyId: ID + plugin: Plugin @goField(forceResolver: true) + property: Property @goField(forceResolver: true) +} + +# Property + +type PropertySchema { + id: PropertySchemaID! + groups: [PropertySchemaGroup!]! + linkableFields: PropertyLinkableFields! +} + +type PropertyLinkableFields { + schemaId: PropertySchemaID! + latlng: PropertySchemaFieldID + url: PropertySchemaFieldID + latlngField: PropertySchemaField @goField(forceResolver: true) + urlField: PropertySchemaField @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) +} + +type PropertySchemaGroup { + schemaGroupId: PropertySchemaFieldID! + schemaId: PropertySchemaID! + fields: [PropertySchemaField!]! + isList: Boolean! + isAvailableIf: PropertyCondition + title: String + allTranslatedTitle: TranslatedString + # For compatibility: "name" field will be removed in the futrue + name: PropertySchemaFieldID + representativeFieldId: PropertySchemaFieldID + representativeField: PropertySchemaField + schema: PropertySchema @goField(forceResolver: true) + translatedTitle(lang: String): String! @goField(forceResolver: true) +} + +type PropertySchemaField { + fieldId: PropertySchemaFieldID! + type: ValueType! + title: String! + # For compatibility: "name" field will be removed in the futrue + name: String! + description: String! + prefix: String + suffix: String + defaultValue: Any + ui: PropertySchemaFieldUI + min: Float + max: Float + choices: [PropertySchemaFieldChoice!] + isAvailableIf: PropertyCondition + allTranslatedTitle: TranslatedString + # For compatibility: "allTranslatedName" field will be removed in the futrue + allTranslatedName: TranslatedString + allTranslatedDescription: TranslatedString + translatedTitle(lang: String): String! @goField(forceResolver: true) + # For compatibility: "translatedName" field will be removed in the futrue + translatedName(lang: String): String! @goField(forceResolver: true) + translatedDescription(lang: String): String! @goField(forceResolver: true) +} + +enum PropertySchemaFieldUI { + LAYER + MULTILINE + SELECTION + COLOR + RANGE + IMAGE + VIDEO + FILE + CAMERA_POSE +} + +type PropertySchemaFieldChoice { + key: String! + title: String! + # For compatibility: "label" field will be removed in the futrue + label: String! + icon: String + allTranslatedTitle: TranslatedString + # For compatibility: "allTranslatedLabel" field will be removed in the futrue + allTranslatedLabel: TranslatedString + translatedTitle(lang: String): String! @goField(forceResolver: true) + # For compatibility: "translatedLabel" field will be removed in the futrue + translatedLabel(lang: String): String! @goField(forceResolver: true) +} + +type PropertyCondition { + fieldId: PropertySchemaFieldID! + type: ValueType! + value: Any +} + +type Property implements Node { + id: ID! + schemaId: PropertySchemaID! + items: [PropertyItem!]! + schema: PropertySchema @goField(forceResolver: true) + layer: Layer @goField(forceResolver: true) + merged: MergedProperty @goField(forceResolver: true) +} + +union PropertyItem = PropertyGroup | PropertyGroupList + +type PropertyGroup { + id: ID! + schemaId: PropertySchemaID! + schemaGroupId: PropertySchemaFieldID! + fields: [PropertyField!]! + schema: PropertySchema @goField(forceResolver: true) + schemaGroup: PropertySchemaGroup @goField(forceResolver: true) +} + +type PropertyGroupList { + id: ID! + schemaId: PropertySchemaID! + schemaGroupId: PropertySchemaFieldID! + groups: [PropertyGroup!]! + schema: PropertySchema @goField(forceResolver: true) + schemaGroup: PropertySchemaGroup @goField(forceResolver: true) +} + +type PropertyField { + id: PropertySchemaFieldID! + parentId: ID! + schemaId: PropertySchemaID! + fieldId: PropertySchemaFieldID! + links: [PropertyFieldLink!] + type: ValueType! + value: Any + parent: Property @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + field: PropertySchemaField @goField(forceResolver: true) + actualValue: Any @goField(forceResolver: true) +} + +type PropertyFieldLink { + datasetId: ID + datasetSchemaId: ID! + datasetSchemaFieldId: ID! + dataset: Dataset @goField(forceResolver: true) + datasetField: DatasetField @goField(forceResolver: true) + datasetSchema: DatasetSchema @goField(forceResolver: true) + datasetSchemaField: DatasetSchemaField @goField(forceResolver: true) +} + +type MergedProperty { + originalId: ID + parentId: ID + # note: schemaId will not always be set + schemaId: PropertySchemaID + linkedDatasetId: ID + original: Property @goField(forceResolver: true) + parent: Property @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + groups: [MergedPropertyGroup!]! @goField(forceResolver: true) +} + +type MergedPropertyGroup { + originalPropertyId: ID + parentPropertyId: ID + originalId: ID + parentId: ID + schemaGroupId: PropertySchemaFieldID! + # note: schemaId will not always be set + schemaId: PropertySchemaID + linkedDatasetId: ID + fields: [MergedPropertyField!]! + groups: [MergedPropertyGroup!]! + originalProperty: Property @goField(forceResolver: true) + parentProperty: Property @goField(forceResolver: true) + original: PropertyGroup @goField(forceResolver: true) + parent: PropertyGroup @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) +} + +type MergedPropertyField { + schemaId: PropertySchemaID! + fieldId: PropertySchemaFieldID! + value: Any + type: ValueType! + links: [PropertyFieldLink!] + overridden: Boolean! + schema: PropertySchema @goField(forceResolver: true) + field: PropertySchemaField @goField(forceResolver: true) + actualValue: Any @goField(forceResolver: true) +} + +# Dataset + +type DatasetSchema implements Node { + id: ID! + source: String! + name: String! + sceneId: ID! + fields: [DatasetSchemaField!]! + representativeFieldId: ID + dynamic: Boolean + datasets(first: Int, last: Int, after: Cursor, before: Cursor): DatasetConnection! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + representativeField: DatasetSchemaField @goField(forceResolver: true) +} + +type DatasetSchemaField implements Node { + id: ID! + source: String! + name: String! + type: ValueType! + schemaId: ID! + refId: ID + schema: DatasetSchema @goField(forceResolver: true) + ref: DatasetSchema @goField(forceResolver: true) +} + +type Dataset implements Node { + id: ID! + source: String! + schemaId: ID! + fields: [DatasetField!]! + schema: DatasetSchema @goField(forceResolver: true) + name: String @goField(forceResolver: true) +} + +type DatasetField { + fieldId: ID! + schemaId: ID! + source: String! + type: ValueType! + value: Any + schema: DatasetSchema @goField(forceResolver: true) + field: DatasetSchemaField @goField(forceResolver: true) + valueRef: Dataset @goField(forceResolver: true) +} + +# Layer + +interface Layer { + id: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: PluginID + extensionId: PluginExtensionID + infobox: Infobox + # parentId will not be always set + parentId: ID + parent: LayerGroup + property: Property + plugin: Plugin + extension: PluginExtension +} + +union Layers = LayerItem | LayerGroup + +enum LayerEncodingFormat { + KML + CZML + GEOJSON + SHAPE + REEARTH +} + +type LayerItem implements Layer { + id: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: PluginID + extensionId: PluginExtensionID + infobox: Infobox + # parentId will not be always set + parentId: ID + linkedDatasetId: ID + parent: LayerGroup @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedLayer @goField(forceResolver: true) +} + +type LayerGroup implements Layer { + id: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: PluginID + extensionId: PluginExtensionID + infobox: Infobox + # parentId will not be always set + parentId: ID + linkedDatasetSchemaId: ID + root: Boolean! + layerIds: [ID!]! + parent: LayerGroup @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) + layers: [Layer]! @goField(forceResolver: true) +} + +type Infobox { + layerId: ID! + propertyId: ID! + fields: [InfoboxField!]! + linkedDatasetId: ID + layer: Layer! @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedInfobox @goField(forceResolver: true) +} + +type InfoboxField { + id: ID! + layerId: ID! + propertyId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + linkedDatasetId: ID + layer: Layer! @goField(forceResolver: true) + infobox: Infobox! @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedInfoboxField @goField(forceResolver: true) +} + +type MergedLayer { + originalId: ID! + parentId: ID + property: MergedProperty + infobox: MergedInfobox + original: LayerItem @goField(forceResolver: true) + parent: LayerGroup @goField(forceResolver: true) +} + +type MergedInfobox { + property: MergedProperty + fields: [MergedInfoboxField!]! +} + +type MergedInfoboxField { + originalId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + property: MergedProperty + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) +} + + +# InputType +input CreateAssetInput { + teamId: ID! + file: Upload! +} + +input RemoveAssetInput { + assetId: ID! +} + +input SignupInput { + userId: ID + teamId: ID + secret: String +} + +input UpdateMeInput { + name: String + email: String + lang: String + theme: Theme + password: String + passwordConfirmation: String +} + +input RemoveMyAuthInput { + auth: String! +} + +input DeleteMeInput { + userId: ID! +} + +input CreateTeamInput { + name: String! +} + +input UpdateTeamInput { + teamId: ID! + name: String! +} + +input AddMemberToTeamInput { + teamId: ID! + userId: ID! + role: Role! +} + +input RemoveMemberFromTeamInput { + teamId: ID! + userId: ID! +} + +input UpdateMemberOfTeamInput { + teamId: ID! + userId: ID! + role: Role! +} + +input DeleteTeamInput { + teamId: ID! +} + +input CreateProjectInput { + teamId: ID! + visualizer: Visualizer! + name: String + description: String + imageUrl: URL + alias: String + archived: Boolean +} + +input UpdateProjectInput { + projectId: ID! + name: String + description: String + archived: Boolean + alias: String + imageUrl: URL + publicTitle: String + publicDescription: String + publicImage: Upload + publicNoIndex: Boolean + deleteImageUrl: Boolean + deletePublicImage: Boolean +} + +input UploadPluginInput { + file: Upload! +} + +input CreateSceneInput { + projectId: ID! +} + +input PublishProjectInput { + projectId: ID! + alias: String + status: PublishmentStatus! +} + +input DeleteProjectInput { + projectId: ID! +} + +input AddWidgetInput { + sceneId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! +} + +input UpdateWidgetInput { + sceneId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + enabled: Boolean +} + +input RemoveWidgetInput { + sceneId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! +} + +input InstallPluginInput { + sceneId: ID! + pluginId: PluginID! +} + +input UninstallPluginInput { + sceneId: ID! + pluginId: PluginID! +} + +input UpgradePluginInput { + sceneId: ID! + pluginId: PluginID! + toPluginId: PluginID! +} + +input SyncDatasetInput { + sceneId: ID! + url: String! +} + +input UpdatePropertyValueInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + value: Any + type: ValueType! +} + +input UpdatePropertyValueLatLngInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + lat: Float! + lng: Float! +} + +input UpdatePropertyValueLatLngHeightInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + lat: Float! + lng: Float! + height: Float! +} + +input UpdatePropertyValueCameraInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + lat: Float! + lng: Float! + altitude: Float! + heading: Float! + pitch: Float! + roll: Float! + fov: Float! +} + +input UpdatePropertyValueTypographyInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + fontFamily: String + fontWeight: String + fontSize: Int + color: String + textAlign: TextAlign + bold: Boolean + italic: Boolean + underline: Boolean +} + +input RemovePropertyFieldInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! +} + +input UploadFileToPropertyInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + file: Upload! +} + +input LinkDatasetToPropertyValueInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + datasetSchemaIds: [ID!]! + datasetSchemaFieldIds: [ID!]! + datasetIds: [ID!] +} + +input UnlinkPropertyValueInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! +} + +input AddPropertyItemInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + index: Int + nameFieldValue: Any + nameFieldType: ValueType +} + +input MovePropertyItemInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + itemId: ID! + index: Int! +} + +input RemovePropertyItemInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + itemId: ID! +} + +input UpdatePropertyItemInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + operations: [UpdatePropertyItemOperationInput!]! +} + +input UpdatePropertyItemOperationInput { + operation: ListOperation! + itemId: ID + index: Int + nameFieldValue: Any + nameFieldType: ValueType +} + +input AddLayerItemInput { + parentLayerId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + index: Int + name: String + lat: Float + lng: Float +} + +input AddLayerGroupInput { + parentLayerId: ID! + pluginId: PluginID + extensionId: PluginExtensionID + index: Int + linkedDatasetSchemaID: ID + name: String +} + +input RemoveLayerInput { + layerId: ID! +} + +input UpdateLayerInput { + layerId: ID! + name: String + visible: Boolean +} + +input MoveLayerInput { + layerId: ID! + destLayerId: ID + index: Int +} + +input CreateInfoboxInput { + layerId: ID! +} + +input RemoveInfoboxInput { + layerId: ID! +} + +input AddInfoboxFieldInput { + layerId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + index: Int +} + +input MoveInfoboxFieldInput { + layerId: ID! + infoboxFieldId: ID! + index: Int! +} + +input RemoveInfoboxFieldInput { + layerId: ID! + infoboxFieldId: ID! +} + +input UpdateDatasetSchemaInput { + schemaId: ID! + name: String! +} + +input AddDynamicDatasetSchemaInput { + sceneId: ID! +} + +input AddDynamicDatasetInput { + datasetSchemaId: ID! + author: String! + content: String! + lat: Float + lng: Float + target: String +} + +input RemoveDatasetSchemaInput { + schemaId: ID! + force: Boolean +} + +input ImportLayerInput { + layerId: ID! + file: Upload! + format: LayerEncodingFormat! +} + +input ImportDatasetInput { + file: Upload! + sceneId: ID! + datasetSchemaId: ID +} + +input AddDatasetSchemaInput { + sceneId: ID! + name: String! + representativefield: ID +} + +# Payload +type CreateAssetPayload { + asset: Asset! +} + +type RemoveAssetPayload { + assetId: ID! +} + +type SignupPayload { + user: User! + team: Team! +} + +type UpdateMePayload { + user: User! +} + +type DeleteMePayload { + userId: ID! +} + +type CreateTeamPayload { + team: Team! +} + +type UpdateTeamPayload { + team: Team! +} + +type AddMemberToTeamPayload { + team: Team! +} + +type RemoveMemberFromTeamPayload { + team: Team! +} + +type UpdateMemberOfTeamPayload { + team: Team! +} + +type DeleteTeamPayload { + teamId: ID! +} + +type ProjectPayload { + project: Project! +} + +type DeleteProjectPayload { + projectId: ID! +} + +type UploadPluginPayload { + plugin: Plugin! +} + +type CreateScenePayload { + scene: Scene! +} + +type AddWidgetPayload { + scene: Scene! + sceneWidget: SceneWidget! +} + +type UpdateWidgetPayload { + scene: Scene! + sceneWidget: SceneWidget! +} + +type RemoveWidgetPayload { + scene: Scene! + pluginId: PluginID! + extensionId: PluginExtensionID! +} + +type InstallPluginPayload { + scene: Scene! + scenePlugin: ScenePlugin! +} + +type UninstallPluginPayload { + scene: Scene! + scenePlugin: ScenePlugin! +} + +type UpgradePluginPayload { + scene: Scene! + scenePlugin: ScenePlugin! +} + +type SyncDatasetPayload { + sceneId: ID! + url: String! + datasetSchema: [DatasetSchema!]! + dataset: [Dataset!]! +} + +type PropertyFieldPayload { + property: Property! + propertyField: PropertyField +} + +type PropertyItemPayload { + property: Property! + propertyItem: PropertyItem +} + +type AddLayerItemPayload { + layer: LayerItem! + parentLayer: LayerGroup! + index: Int +} + +type AddLayerGroupPayload { + layer: LayerGroup! + parentLayer: LayerGroup! + index: Int +} + +type RemoveLayerPayload { + layerId: ID! + parentLayer: LayerGroup! +} + +type UpdateLayerPayload { + layer: Layer! +} + +type MoveLayerPayload { + layerId: ID! + fromParentLayer: LayerGroup! + toParentLayer: LayerGroup! + index: Int! +} + +type CreateInfoboxPayload { + layer: Layer! +} + +type RemoveInfoboxPayload { + layer: Layer! +} + +type AddInfoboxFieldPayload { + infoboxField: InfoboxField! + layer: Layer! +} + +type MoveInfoboxFieldPayload { + infoboxFieldId: ID! + layer: Layer! + index: Int! +} + +type RemoveInfoboxFieldPayload { + infoboxFieldId: ID! + layer: Layer! +} + +type UpdateDatasetSchemaPayload { + datasetSchema: DatasetSchema +} + +type RemoveDatasetSchemaPayload { + schemaId: ID! +} + +type AddDynamicDatasetSchemaPayload { + datasetSchema: DatasetSchema +} + +type AddDynamicDatasetPayload { + datasetSchema: DatasetSchema + dataset: Dataset +} + +type ImportLayerPayload { + layers: [Layer!]! + parentLayer: LayerGroup! +} + +type ImportDatasetPayload { + datasetSchema: DatasetSchema! +} + +type AddDatasetSchemaPayload { + datasetSchema: DatasetSchema +} + +# Connection + +enum NodeType { + USER + TEAM + PROJECT + PLUGIN + SCENE + PROPERTY_SCHEMA + PROPERTY + DATASET_SCHEMA + DATASET + LAYER_GROUP + LAYER_ITEM +} + +type AssetConnection { + edges: [AssetEdge!]! + nodes: [Asset]! + pageInfo: PageInfo! + totalCount: Int! +} + +type AssetEdge { + cursor: Cursor! + node: Asset +} + +type ProjectConnection { + edges: [ProjectEdge!]! + nodes: [Project]! + pageInfo: PageInfo! + totalCount: Int! +} + +type ProjectEdge { + cursor: Cursor! + node: Project +} + +type DatasetSchemaConnection { + edges: [DatasetSchemaEdge!]! + nodes: [DatasetSchema]! + pageInfo: PageInfo! + totalCount: Int! +} + +type DatasetSchemaEdge { + cursor: Cursor! + node: DatasetSchema +} + +type DatasetConnection { + edges: [DatasetEdge!]! + nodes: [Dataset]! + pageInfo: PageInfo! + totalCount: Int! +} + +type DatasetEdge { + cursor: Cursor! + node: Dataset +} + + +# Query + +type Query { + me: User + node(id: ID!, type: NodeType!): Node + nodes(id: [ID!]!, type: NodeType!): [Node]! + propertySchema(id: PropertySchemaID!): PropertySchema + propertySchemas(id: [PropertySchemaID!]!): [PropertySchema!]! + plugin(id: PluginID!): Plugin + plugins(id: [PluginID!]!): [Plugin!]! + layer(id: ID!): Layer + scene(projectId: ID!): Scene + assets(teamId: ID!, first: Int, last: Int, after: Cursor, before: Cursor): AssetConnection! + projects(teamId: ID!, includeArchived: Boolean, first: Int, last: Int, after: Cursor, before: Cursor): ProjectConnection! + datasetSchemas(sceneId: ID!, first: Int, last: Int, after: Cursor, before: Cursor): DatasetSchemaConnection! + datasets(datasetSchemaId: ID!, first: Int, last: Int, after: Cursor, before: Cursor): DatasetConnection! + sceneLock(sceneId: ID!): SceneLockMode + dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! + searchUser(nameOrEmail: String!): SearchedUser + checkProjectAlias(alias: String!): CheckProjectAliasPayload! +} + +# Mutation + +type Mutation { + # Asset + createAsset(input: CreateAssetInput!): CreateAssetPayload! + removeAsset(input: RemoveAssetInput!): RemoveAssetPayload! + + # User + signup(input: SignupInput!): SignupPayload! + updateMe(input: UpdateMeInput!): UpdateMePayload! + removeMyAuth(input: RemoveMyAuthInput!): UpdateMePayload! + deleteMe(input: DeleteMeInput!): DeleteMePayload! + + # Team + createTeam(input: CreateTeamInput!): CreateTeamPayload! + deleteTeam(input: DeleteTeamInput!): DeleteTeamPayload! + updateTeam(input: UpdateTeamInput!): UpdateTeamPayload! + addMemberToTeam(input: AddMemberToTeamInput!): AddMemberToTeamPayload! + removeMemberFromTeam(input: RemoveMemberFromTeamInput!): RemoveMemberFromTeamPayload! + updateMemberOfTeam(input: UpdateMemberOfTeamInput!): UpdateMemberOfTeamPayload! + + # Project + createProject(input: CreateProjectInput!): ProjectPayload! + updateProject(input: UpdateProjectInput!): ProjectPayload! + publishProject(input: PublishProjectInput!): ProjectPayload! + deleteProject(input: DeleteProjectInput!): DeleteProjectPayload! + + # Plugin + uploadPlugin(input: UploadPluginInput!): UploadPluginPayload! + + # Scene + createScene(input: CreateSceneInput!): CreateScenePayload! + addWidget(input: AddWidgetInput!): AddWidgetPayload! + updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload! + removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload! + installPlugin(input: InstallPluginInput!): InstallPluginPayload! + uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload! + upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload! + + # Dataset + updateDatasetSchema(input:UpdateDatasetSchemaInput!): UpdateDatasetSchemaPayload! + syncDataset(input: SyncDatasetInput!): SyncDatasetPayload! + addDynamicDatasetSchema(input:AddDynamicDatasetSchemaInput!): AddDynamicDatasetSchemaPayload! + addDynamicDataset(input:AddDynamicDatasetInput!): AddDynamicDatasetPayload! + removeDatasetSchema(input: RemoveDatasetSchemaInput!): RemoveDatasetSchemaPayload! + importDataset(input: ImportDatasetInput!): ImportDatasetPayload! + addDatasetSchema(input:AddDatasetSchemaInput!): AddDatasetSchemaPayload! + + # Property + updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload! + updatePropertyValueLatLng(input: UpdatePropertyValueLatLngInput!): PropertyFieldPayload! + updatePropertyValueLatLngHeight(input: UpdatePropertyValueLatLngHeightInput!): PropertyFieldPayload! + updatePropertyValueCamera(input: UpdatePropertyValueCameraInput!): PropertyFieldPayload! + updatePropertyValueTypography(input: UpdatePropertyValueTypographyInput!): PropertyFieldPayload! + removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload! + uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload! + linkDatasetToPropertyValue(input: LinkDatasetToPropertyValueInput!): PropertyFieldPayload! + unlinkPropertyValue(input: UnlinkPropertyValueInput!): PropertyFieldPayload! + addPropertyItem(input: AddPropertyItemInput!): PropertyItemPayload! + movePropertyItem(input: MovePropertyItemInput!): PropertyItemPayload! + removePropertyItem(input: RemovePropertyItemInput!): PropertyItemPayload! + updatePropertyItems(input: UpdatePropertyItemInput!): PropertyItemPayload! + + # Layer + addLayerItem(input: AddLayerItemInput!): AddLayerItemPayload! + addLayerGroup(input: AddLayerGroupInput!): AddLayerGroupPayload! + removeLayer(input: RemoveLayerInput!): RemoveLayerPayload! + updateLayer(input: UpdateLayerInput!): UpdateLayerPayload! + moveLayer(input: MoveLayerInput!): MoveLayerPayload! + createInfobox(input: CreateInfoboxInput!): CreateInfoboxPayload! + removeInfobox(input: RemoveInfoboxInput!): RemoveInfoboxPayload! + addInfoboxField(input: AddInfoboxFieldInput!): AddInfoboxFieldPayload! + moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload! + removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload! + importLayer(input:ImportLayerInput!): ImportLayerPayload! +} + +schema { + query: Query + mutation: Mutation +} +`, BuiltIn: false}, +} +var parsedSchema = gqlparser.MustLoadSchema(sources...) + +// endregion ************************** generated!.gotpl ************************** + +// region ***************************** args.gotpl ***************************** + +func (ec *executionContext) field_DatasetSchema_datasets_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg0, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg1 + var arg2 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg2, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg3 + return args, nil +} + +func (ec *executionContext) field_Mutation_addDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.AddDatasetSchemaInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addDynamicDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.AddDynamicDatasetSchemaInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddDynamicDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addDynamicDataset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.AddDynamicDatasetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddDynamicDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addInfoboxField_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.AddInfoboxFieldInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addLayerGroup_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.AddLayerGroupInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddLayerGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addLayerItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.AddLayerItemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddLayerItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addMemberToTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.AddMemberToTeamInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddMemberToTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addPropertyItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.AddPropertyItemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddPropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddPropertyItemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_addWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.AddWidgetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createAsset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.CreateAssetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createInfobox_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.CreateInfoboxInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createProject_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.CreateProjectInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateProjectInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createScene_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.CreateSceneInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateSceneInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateSceneInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.CreateTeamInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_deleteMe_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.DeleteMeInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNDeleteMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMeInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_deleteProject_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.DeleteProjectInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNDeleteProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_deleteTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.DeleteTeamInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNDeleteTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_importDataset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.ImportDatasetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNImportDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_importLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.ImportLayerInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNImportLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_installPlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.InstallPluginInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNInstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_linkDatasetToPropertyValue_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.LinkDatasetToPropertyValueInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNLinkDatasetToPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLinkDatasetToPropertyValueInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_moveInfoboxField_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.MoveInfoboxFieldInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNMoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_moveLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.MoveLayerInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNMoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_movePropertyItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.MovePropertyItemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNMovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMovePropertyItemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_publishProject_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.PublishProjectInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNPublishProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPublishProjectInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeAsset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.RemoveAssetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.RemoveDatasetSchemaInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeInfoboxField_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.RemoveInfoboxFieldInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeInfobox_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.RemoveInfoboxInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.RemoveLayerInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeMemberFromTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.RemoveMemberFromTeamInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveMemberFromTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeMyAuth_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.RemoveMyAuthInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveMyAuthInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMyAuthInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removePropertyField_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.RemovePropertyFieldInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemovePropertyFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemovePropertyFieldInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removePropertyItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.RemovePropertyItemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemovePropertyItemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_removeWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.RemoveWidgetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_signup_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.SignupInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNSignupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_syncDataset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.SyncDatasetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNSyncDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_uninstallPlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UninstallPluginInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUninstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_unlinkPropertyValue_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UnlinkPropertyValueInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUnlinkPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUnlinkPropertyValueInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdateDatasetSchemaInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdateLayerInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateMe_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdateMeInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMeInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateMemberOfTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdateMemberOfTeamInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateMemberOfTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateProject_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdateProjectInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateProjectInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updatePropertyItems_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdatePropertyItemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdatePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyItemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updatePropertyValueCamera_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdatePropertyValueCameraInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdatePropertyValueCameraInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueCameraInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updatePropertyValueLatLngHeight_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdatePropertyValueLatLngHeightInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdatePropertyValueLatLngHeightInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueLatLngHeightInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updatePropertyValueLatLng_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdatePropertyValueLatLngInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdatePropertyValueLatLngInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueLatLngInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updatePropertyValueTypography_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdatePropertyValueTypographyInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdatePropertyValueTypographyInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueTypographyInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updatePropertyValue_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdatePropertyValueInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdatePropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdateTeamInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_updateWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpdateWidgetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_upgradePlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UpgradePluginInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpgradePluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_uploadFileToProperty_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UploadFileToPropertyInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUploadFileToPropertyInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadFileToPropertyInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_uploadPlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.UploadPluginInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUploadPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PluginExtension_sceneWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.ID + if tmp, ok := rawArgs["sceneId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["sceneId"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PluginExtension_translatedDescription_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PluginExtension_translatedName_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Plugin_scenePlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.ID + if tmp, ok := rawArgs["sceneId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["sceneId"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Plugin_translatedDescription_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Plugin_translatedName_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PropertySchemaFieldChoice_translatedLabel_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PropertySchemaFieldChoice_translatedTitle_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PropertySchemaField_translatedDescription_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PropertySchemaField_translatedName_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PropertySchemaField_translatedTitle_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_PropertySchemaGroup_translatedTitle_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *string + if tmp, ok := rawArgs["lang"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["lang"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query___type_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 string + if tmp, ok := rawArgs["name"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + arg0, err = ec.unmarshalNString2string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["name"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_assets_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.ID + if tmp, ok := rawArgs["teamId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["teamId"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg1 + var arg2 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg2, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg3 + var arg4 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg4, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg4 + return args, nil +} + +func (ec *executionContext) field_Query_checkProjectAlias_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 string + if tmp, ok := rawArgs["alias"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("alias")) + arg0, err = ec.unmarshalNString2string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["alias"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_datasetSchemas_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.ID + if tmp, ok := rawArgs["sceneId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["sceneId"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg1 + var arg2 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg2, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg3 + var arg4 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg4, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg4 + return args, nil +} + +func (ec *executionContext) field_Query_datasets_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.ID + if tmp, ok := rawArgs["datasetSchemaId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["datasetSchemaId"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg1 + var arg2 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg2, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg3 + var arg4 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg4, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg4 + return args, nil +} + +func (ec *executionContext) field_Query_dynamicDatasetSchemas_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.ID + if tmp, ok := rawArgs["sceneId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["sceneId"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_layer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.ID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_node_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.ID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + var arg1 graphql1.NodeType + if tmp, ok := rawArgs["type"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("type")) + arg1, err = ec.unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNodeType(ctx, tmp) + if err != nil { + return nil, err + } + } + args["type"] = arg1 + return args, nil +} + +func (ec *executionContext) field_Query_nodes_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 []*id.ID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + var arg1 graphql1.NodeType + if tmp, ok := rawArgs["type"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("type")) + arg1, err = ec.unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNodeType(ctx, tmp) + if err != nil { + return nil, err + } + } + args["type"] = arg1 + return args, nil +} + +func (ec *executionContext) field_Query_plugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.PluginID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_plugins_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 []*id.PluginID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNPluginID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginIDแš„(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_projects_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.ID + if tmp, ok := rawArgs["teamId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["teamId"] = arg0 + var arg1 *bool + if tmp, ok := rawArgs["includeArchived"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("includeArchived")) + arg1, err = ec.unmarshalOBoolean2แš–bool(ctx, tmp) + if err != nil { + return nil, err + } + } + args["includeArchived"] = arg1 + var arg2 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg2, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg2 + var arg3 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg3, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg3 + var arg4 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg4, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg4 + var arg5 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg5, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg5 + return args, nil +} + +func (ec *executionContext) field_Query_propertySchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.PropertySchemaID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_propertySchemas_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 []*id.PropertySchemaID + if tmp, ok := rawArgs["id"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) + arg0, err = ec.unmarshalNPropertySchemaID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaIDแš„(ctx, tmp) + if err != nil { + return nil, err + } + } + args["id"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_sceneLock_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.ID + if tmp, ok := rawArgs["sceneId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["sceneId"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_scene_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 id.ID + if tmp, ok := rawArgs["projectId"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + if err != nil { + return nil, err + } + } + args["projectId"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Query_searchUser_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 string + if tmp, ok := rawArgs["nameOrEmail"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nameOrEmail")) + arg0, err = ec.unmarshalNString2string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["nameOrEmail"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Scene_datasetSchemas_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg0, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg1 + var arg2 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg2, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg3 + return args, nil +} + +func (ec *executionContext) field_Team_assets_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg0, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg1 + var arg2 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg2, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg3 + return args, nil +} + +func (ec *executionContext) field_Team_projects_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 *bool + if tmp, ok := rawArgs["includeArchived"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("includeArchived")) + arg0, err = ec.unmarshalOBoolean2แš–bool(ctx, tmp) + if err != nil { + return nil, err + } + } + args["includeArchived"] = arg0 + var arg1 *int + if tmp, ok := rawArgs["first"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["first"] = arg1 + var arg2 *int + if tmp, ok := rawArgs["last"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + arg2, err = ec.unmarshalOInt2แš–int(ctx, tmp) + if err != nil { + return nil, err + } + } + args["last"] = arg2 + var arg3 *usecase.Cursor + if tmp, ok := rawArgs["after"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["after"] = arg3 + var arg4 *usecase.Cursor + if tmp, ok := rawArgs["before"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + arg4, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + if err != nil { + return nil, err + } + } + args["before"] = arg4 + return args, nil +} + +func (ec *executionContext) field___Type_enumValues_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 bool + if tmp, ok := rawArgs["includeDeprecated"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("includeDeprecated")) + arg0, err = ec.unmarshalOBoolean2bool(ctx, tmp) + if err != nil { + return nil, err + } + } + args["includeDeprecated"] = arg0 + return args, nil +} + +func (ec *executionContext) field___Type_fields_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 bool + if tmp, ok := rawArgs["includeDeprecated"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("includeDeprecated")) + arg0, err = ec.unmarshalOBoolean2bool(ctx, tmp) + if err != nil { + return nil, err + } + } + args["includeDeprecated"] = arg0 + return args, nil +} + +// endregion ***************************** args.gotpl ***************************** + +// region ************************** directives.gotpl ************************** + +// endregion ************************** directives.gotpl ************************** + +// region **************************** field.gotpl ***************************** + +func (ec *executionContext) _AddDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddDatasetSchemaPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddDatasetSchemaPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchema, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddDynamicDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddDynamicDatasetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddDynamicDatasetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchema, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddDynamicDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddDynamicDatasetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddDynamicDatasetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Dataset, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddDynamicDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddDynamicDatasetSchemaPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddDynamicDatasetSchemaPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchema, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddInfoboxFieldPayload_infoboxField(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddInfoboxFieldPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddInfoboxFieldPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.InfoboxField, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.InfoboxField) + fc.Result = res + return ec.marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfoboxField(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddInfoboxFieldPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddInfoboxFieldPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddLayerGroupPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddLayerGroupPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddLayerGroupPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddLayerGroupPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddLayerGroupPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddLayerGroupPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddLayerGroupPayload_index(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddLayerGroupPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddLayerGroupPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Index, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2แš–int(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddLayerItemPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddLayerItemPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddLayerItemPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.LayerItem) + fc.Result = res + return ec.marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerItem(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddLayerItemPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddLayerItemPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddLayerItemPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddLayerItemPayload_index(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddLayerItemPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddLayerItemPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Index, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2แš–int(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddMemberToTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddMemberToTeamPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddMemberToTeamPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Team, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddWidgetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddWidgetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddWidgetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddWidgetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneWidget, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.SceneWidget) + fc.Result = res + return ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidget(ctx, field.Selections, res) +} + +func (ec *executionContext) _Asset_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Asset", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Asset_createdAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Asset", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.CreatedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(time.Time) + fc.Result = res + return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) +} + +func (ec *executionContext) _Asset_teamId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Asset", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TeamID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Asset_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Asset", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Asset_size(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Asset", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Size, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int64) + fc.Result = res + return ec.marshalNFileSize2int64(ctx, field.Selections, res) +} + +func (ec *executionContext) _Asset_url(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Asset", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.URL, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Asset_contentType(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Asset", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ContentType, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Asset_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Asset", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Asset().Team(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Team) + fc.Result = res + return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) _AssetConnection_edges(ctx context.Context, field graphql.CollectedField, obj *graphql1.AssetConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AssetConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Edges, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.AssetEdge) + fc.Result = res + return ec.marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetEdgeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _AssetConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *graphql1.AssetConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AssetConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Nodes, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.Asset) + fc.Result = res + return ec.marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx, field.Selections, res) +} + +func (ec *executionContext) _AssetConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *graphql1.AssetConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AssetConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PageInfo, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PageInfo) + fc.Result = res + return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPageInfo(ctx, field.Selections, res) +} + +func (ec *executionContext) _AssetConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *graphql1.AssetConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AssetConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TotalCount, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) _AssetEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.AssetEdge) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AssetEdge", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(usecase.Cursor) + fc.Result = res + return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) _AssetEdge_node(ctx context.Context, field graphql.CollectedField, obj *graphql1.AssetEdge) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AssetEdge", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Node, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Asset) + fc.Result = res + return ec.marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx, field.Selections, res) +} + +func (ec *executionContext) _Camera_lat(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Camera", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lat, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _Camera_lng(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Camera", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lng, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _Camera_altitude(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Camera", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Altitude, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _Camera_heading(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Camera", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Heading, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _Camera_pitch(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Camera", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Pitch, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _Camera_roll(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Camera", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Roll, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _Camera_fov(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Camera", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fov, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _CheckProjectAliasPayload_alias(ctx context.Context, field graphql.CollectedField, obj *graphql1.CheckProjectAliasPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "CheckProjectAliasPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Alias, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _CheckProjectAliasPayload_available(ctx context.Context, field graphql.CollectedField, obj *graphql1.CheckProjectAliasPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "CheckProjectAliasPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Available, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _CreateAssetPayload_asset(ctx context.Context, field graphql.CollectedField, obj *graphql1.CreateAssetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "CreateAssetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Asset, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Asset) + fc.Result = res + return ec.marshalNAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx, field.Selections, res) +} + +func (ec *executionContext) _CreateInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.CreateInfoboxPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "CreateInfoboxPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _CreateScenePayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.CreateScenePayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "CreateScenePayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _CreateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.CreateTeamPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "CreateTeamPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Team, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) _Dataset_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Dataset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Dataset", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Dataset_source(ctx context.Context, field graphql.CollectedField, obj *graphql1.Dataset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Dataset", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Source, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Dataset_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Dataset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Dataset", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Dataset_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.Dataset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Dataset", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.DatasetField) + fc.Result = res + return ec.marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Dataset_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.Dataset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Dataset", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Dataset().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _Dataset_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.Dataset) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Dataset", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Dataset().Name(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetConnection_edges(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Edges, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.DatasetEdge) + fc.Result = res + return ec.marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetEdgeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Nodes, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.Dataset) + fc.Result = res + return ec.marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PageInfo, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PageInfo) + fc.Result = res + return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPageInfo(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TotalCount, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetEdge) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetEdge", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(usecase.Cursor) + fc.Result = res + return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetEdge_node(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetEdge) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetEdge", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Node, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetField_fieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetField_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetField_source(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Source, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetField_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.ValueType) + fc.Result = res + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetField_value(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Value, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetField_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetField().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetField_field(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetField().Field(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchemaField) + fc.Result = res + return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetField_valueRef(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetField().ValueRef(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchema_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchema_source(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Source, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchema_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchema_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchema_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.DatasetSchemaField) + fc.Result = res + return ec.marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchema_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RepresentativeFieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchema_dynamic(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Dynamic, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*bool) + fc.Result = res + return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchema_datasets(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_DatasetSchema_datasets_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetSchema().Datasets(rctx, obj, args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.DatasetConnection) + fc.Result = res + return ec.marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchema_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetSchema().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchema_representativeField(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetSchema().RepresentativeField(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchemaField) + fc.Result = res + return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaConnection_edges(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Edges, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.DatasetSchemaEdge) + fc.Result = res + return ec.marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaEdgeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Nodes, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PageInfo, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PageInfo) + fc.Result = res + return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPageInfo(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TotalCount, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaEdge) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaEdge", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(usecase.Cursor) + fc.Result = res + return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaEdge_node(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaEdge) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaEdge", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Node, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaField_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaField_source(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Source, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaField_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaField_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.ValueType) + fc.Result = res + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaField_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaField_refId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RefID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaField_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetSchemaField().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _DatasetSchemaField_ref(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetSchemaField().Ref(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _DeleteMePayload_userId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DeleteMePayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DeleteMePayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UserID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _DeleteProjectPayload_projectId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DeleteProjectPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DeleteProjectPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ProjectID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _DeleteTeamPayload_teamId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DeleteTeamPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DeleteTeamPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TeamID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _ImportDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.ImportDatasetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ImportDatasetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchema, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _ImportLayerPayload_layers(ctx context.Context, field graphql.CollectedField, obj *graphql1.ImportLayerPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ImportLayerPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layers, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]graphql1.Layer) + fc.Result = res + return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _ImportLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.ImportLayerPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ImportLayerPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _Infobox_layerId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Infobox", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LayerID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Infobox_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Infobox", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Infobox_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Infobox", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.InfoboxField) + fc.Result = res + return ec.marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfoboxFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Infobox_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Infobox", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Infobox_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Infobox", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Infobox().Layer(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _Infobox_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Infobox", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Infobox().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _Infobox_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Infobox", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Infobox().LinkedDataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) _Infobox_merged(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Infobox", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Infobox().Merged(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.MergedInfobox) + fc.Result = res + return ec.marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfobox(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_layerId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LayerID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginID) + fc.Result = res + return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginExtensionID) + fc.Result = res + return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Layer(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_infobox(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Infobox(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Infobox) + fc.Result = res + return ec.marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Extension(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PluginExtension) + fc.Result = res + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().LinkedDataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_merged(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Merged(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.MergedInfoboxField) + fc.Result = res + return ec.marshalOMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxField(ctx, field.Selections, res) +} + +func (ec *executionContext) _InstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.InstallPluginPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InstallPluginPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _InstallPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.InstallPluginPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InstallPluginPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ScenePlugin, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.ScenePlugin) + fc.Result = res + return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _LatLng_lat(ctx context.Context, field graphql.CollectedField, obj *graphql1.LatLng) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LatLng", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lat, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _LatLng_lng(ctx context.Context, field graphql.CollectedField, obj *graphql1.LatLng) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LatLng", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lng, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _LatLngHeight_lat(ctx context.Context, field graphql.CollectedField, obj *graphql1.LatLngHeight) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LatLngHeight", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lat, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _LatLngHeight_lng(ctx context.Context, field graphql.CollectedField, obj *graphql1.LatLngHeight) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LatLngHeight", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lng, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _LatLngHeight_height(ctx context.Context, field graphql.CollectedField, obj *graphql1.LatLngHeight) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LatLngHeight", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Height, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_isVisible(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsVisible, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.PluginID) + fc.Result = res + return ec.marshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.PluginExtensionID) + fc.Result = res + return ec.marshalOPluginExtensionID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_infobox(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Infobox, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Infobox) + fc.Result = res + return ec.marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_parentId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_linkedDatasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetSchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_root(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Root, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_layerIds(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LayerIds, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*id.ID) + fc.Result = res + return ec.marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_parent(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.LayerGroup) + fc.Result = res + return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_extension(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Extension(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PluginExtension) + fc.Result = res + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().LinkedDatasetSchema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_layers(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Layers(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]graphql1.Layer) + fc.Result = res + return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_isVisible(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsVisible, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.PluginID) + fc.Result = res + return ec.marshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.PluginExtensionID) + fc.Result = res + return ec.marshalOPluginExtensionID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_infobox(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Infobox, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Infobox) + fc.Result = res + return ec.marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_parentId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_parent(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.LayerGroup) + fc.Result = res + return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_extension(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().Extension(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PluginExtension) + fc.Result = res + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().LinkedDataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_merged(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().Merged(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.MergedLayer) + fc.Result = res + return ec.marshalOMergedLayer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfobox", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.MergedProperty) + fc.Result = res + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfobox", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.MergedInfoboxField) + fc.Result = res + return ec.marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OriginalID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginID) + fc.Result = res + return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginExtensionID) + fc.Result = res + return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.MergedProperty) + fc.Result = res + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfoboxField().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfoboxField().Extension(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PluginExtension) + fc.Result = res + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedLayer_originalId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OriginalID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedLayer_parentId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedLayer_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.MergedProperty) + fc.Result = res + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedLayer_infobox(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Infobox, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.MergedInfobox) + fc.Result = res + return ec.marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfobox(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedLayer_original(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedLayer().Original(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.LayerItem) + fc.Result = res + return ec.marshalOLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerItem(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedLayer_parent(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedLayer().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.LayerGroup) + fc.Result = res + return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedProperty_originalId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OriginalID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedProperty_parentId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedProperty_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.PropertySchemaID) + fc.Result = res + return ec.marshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedProperty_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedProperty_original(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedProperty().Original(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedProperty_parent(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedProperty().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedProperty_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedProperty().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedProperty_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedProperty().LinkedDataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedProperty_groups(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedProperty().Groups(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.MergedPropertyGroup) + fc.Result = res + return ec.marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyGroupแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyField_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaID) + fc.Result = res + return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyField_value(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Value, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyField_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.ValueType) + fc.Result = res + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyField_links(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Links, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]*graphql1.PropertyFieldLink) + fc.Result = res + return ec.marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldLinkแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyField_overridden(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Overridden, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyField_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyField().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyField_field(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyField().Field(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchemaField) + fc.Result = res + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyField_actualValue(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyField().ActualValue(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_originalPropertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OriginalPropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_parentPropertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentPropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_originalId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OriginalID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_parentId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaGroupID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.PropertySchemaID) + fc.Result = res + return ec.marshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.MergedPropertyField) + fc.Result = res + return ec.marshalNMergedPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_groups(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Groups, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.MergedPropertyGroup) + fc.Result = res + return ec.marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyGroupแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_originalProperty(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyGroup().OriginalProperty(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_parentProperty(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyGroup().ParentProperty(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_original(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyGroup().Original(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertyGroup) + fc.Result = res + return ec.marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_parent(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyGroup().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertyGroup) + fc.Result = res + return ec.marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyGroup().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedPropertyGroup_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyGroup().LinkedDataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) _MoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MoveInfoboxFieldPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.InfoboxFieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MoveInfoboxFieldPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _MoveInfoboxFieldPayload_index(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MoveInfoboxFieldPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Index, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) _MoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveLayerPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MoveLayerPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LayerID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MoveLayerPayload_fromParentLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveLayerPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MoveLayerPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FromParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _MoveLayerPayload_toParentLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveLayerPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MoveLayerPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ToParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _MoveLayerPayload_index(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveLayerPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MoveLayerPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Index, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_createAsset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_createAsset_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateAsset(rctx, args["input"].(graphql1.CreateAssetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.CreateAssetPayload) + fc.Result = res + return ec.marshalNCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_removeAsset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_removeAsset_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveAsset(rctx, args["input"].(graphql1.RemoveAssetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.RemoveAssetPayload) + fc.Result = res + return ec.marshalNRemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_signup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_signup_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().Signup(rctx, args["input"].(graphql1.SignupInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.SignupPayload) + fc.Result = res + return ec.marshalNSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updateMe(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updateMe_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateMe(rctx, args["input"].(graphql1.UpdateMeInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.UpdateMePayload) + fc.Result = res + return ec.marshalNUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_removeMyAuth(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_removeMyAuth_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveMyAuth(rctx, args["input"].(graphql1.RemoveMyAuthInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.UpdateMePayload) + fc.Result = res + return ec.marshalNUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_deleteMe(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_deleteMe_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().DeleteMe(rctx, args["input"].(graphql1.DeleteMeInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.DeleteMePayload) + fc.Result = res + return ec.marshalNDeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMePayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_createTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_createTeam_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateTeam(rctx, args["input"].(graphql1.CreateTeamInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.CreateTeamPayload) + fc.Result = res + return ec.marshalNCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_deleteTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_deleteTeam_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().DeleteTeam(rctx, args["input"].(graphql1.DeleteTeamInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.DeleteTeamPayload) + fc.Result = res + return ec.marshalNDeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updateTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updateTeam_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateTeam(rctx, args["input"].(graphql1.UpdateTeamInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.UpdateTeamPayload) + fc.Result = res + return ec.marshalNUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_addMemberToTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_addMemberToTeam_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddMemberToTeam(rctx, args["input"].(graphql1.AddMemberToTeamInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.AddMemberToTeamPayload) + fc.Result = res + return ec.marshalNAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_removeMemberFromTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_removeMemberFromTeam_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveMemberFromTeam(rctx, args["input"].(graphql1.RemoveMemberFromTeamInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.RemoveMemberFromTeamPayload) + fc.Result = res + return ec.marshalNRemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updateMemberOfTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updateMemberOfTeam_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateMemberOfTeam(rctx, args["input"].(graphql1.UpdateMemberOfTeamInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.UpdateMemberOfTeamPayload) + fc.Result = res + return ec.marshalNUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_createProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_createProject_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateProject(rctx, args["input"].(graphql1.CreateProjectInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.ProjectPayload) + fc.Result = res + return ec.marshalNProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updateProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updateProject_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateProject(rctx, args["input"].(graphql1.UpdateProjectInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.ProjectPayload) + fc.Result = res + return ec.marshalNProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_publishProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_publishProject_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().PublishProject(rctx, args["input"].(graphql1.PublishProjectInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.ProjectPayload) + fc.Result = res + return ec.marshalNProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_deleteProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_deleteProject_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().DeleteProject(rctx, args["input"].(graphql1.DeleteProjectInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.DeleteProjectPayload) + fc.Result = res + return ec.marshalNDeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_uploadPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_uploadPlugin_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UploadPlugin(rctx, args["input"].(graphql1.UploadPluginInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.UploadPluginPayload) + fc.Result = res + return ec.marshalNUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_createScene(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_createScene_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateScene(rctx, args["input"].(graphql1.CreateSceneInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.CreateScenePayload) + fc.Result = res + return ec.marshalNCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateScenePayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_addWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_addWidget_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddWidget(rctx, args["input"].(graphql1.AddWidgetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.AddWidgetPayload) + fc.Result = res + return ec.marshalNAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updateWidget_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateWidget(rctx, args["input"].(graphql1.UpdateWidgetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.UpdateWidgetPayload) + fc.Result = res + return ec.marshalNUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_removeWidget_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveWidget(rctx, args["input"].(graphql1.RemoveWidgetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.RemoveWidgetPayload) + fc.Result = res + return ec.marshalNRemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_installPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_installPlugin_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().InstallPlugin(rctx, args["input"].(graphql1.InstallPluginInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.InstallPluginPayload) + fc.Result = res + return ec.marshalNInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_uninstallPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_uninstallPlugin_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UninstallPlugin(rctx, args["input"].(graphql1.UninstallPluginInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.UninstallPluginPayload) + fc.Result = res + return ec.marshalNUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_upgradePlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_upgradePlugin_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpgradePlugin(rctx, args["input"].(graphql1.UpgradePluginInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.UpgradePluginPayload) + fc.Result = res + return ec.marshalNUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updateDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updateDatasetSchema_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateDatasetSchema(rctx, args["input"].(graphql1.UpdateDatasetSchemaInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.UpdateDatasetSchemaPayload) + fc.Result = res + return ec.marshalNUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_syncDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_syncDataset_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().SyncDataset(rctx, args["input"].(graphql1.SyncDatasetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.SyncDatasetPayload) + fc.Result = res + return ec.marshalNSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_addDynamicDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_addDynamicDatasetSchema_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddDynamicDatasetSchema(rctx, args["input"].(graphql1.AddDynamicDatasetSchemaInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.AddDynamicDatasetSchemaPayload) + fc.Result = res + return ec.marshalNAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_addDynamicDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_addDynamicDataset_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddDynamicDataset(rctx, args["input"].(graphql1.AddDynamicDatasetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.AddDynamicDatasetPayload) + fc.Result = res + return ec.marshalNAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_removeDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_removeDatasetSchema_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveDatasetSchema(rctx, args["input"].(graphql1.RemoveDatasetSchemaInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.RemoveDatasetSchemaPayload) + fc.Result = res + return ec.marshalNRemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_importDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_importDataset_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().ImportDataset(rctx, args["input"].(graphql1.ImportDatasetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.ImportDatasetPayload) + fc.Result = res + return ec.marshalNImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_addDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_addDatasetSchema_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddDatasetSchema(rctx, args["input"].(graphql1.AddDatasetSchemaInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.AddDatasetSchemaPayload) + fc.Result = res + return ec.marshalNAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updatePropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updatePropertyValue_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdatePropertyValue(rctx, args["input"].(graphql1.UpdatePropertyValueInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyFieldPayload) + fc.Result = res + return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updatePropertyValueLatLng(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updatePropertyValueLatLng_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdatePropertyValueLatLng(rctx, args["input"].(graphql1.UpdatePropertyValueLatLngInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyFieldPayload) + fc.Result = res + return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updatePropertyValueLatLngHeight(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updatePropertyValueLatLngHeight_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdatePropertyValueLatLngHeight(rctx, args["input"].(graphql1.UpdatePropertyValueLatLngHeightInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyFieldPayload) + fc.Result = res + return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updatePropertyValueCamera(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updatePropertyValueCamera_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdatePropertyValueCamera(rctx, args["input"].(graphql1.UpdatePropertyValueCameraInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyFieldPayload) + fc.Result = res + return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updatePropertyValueTypography(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updatePropertyValueTypography_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdatePropertyValueTypography(rctx, args["input"].(graphql1.UpdatePropertyValueTypographyInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyFieldPayload) + fc.Result = res + return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_removePropertyField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_removePropertyField_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemovePropertyField(rctx, args["input"].(graphql1.RemovePropertyFieldInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyFieldPayload) + fc.Result = res + return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_uploadFileToProperty(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_uploadFileToProperty_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UploadFileToProperty(rctx, args["input"].(graphql1.UploadFileToPropertyInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyFieldPayload) + fc.Result = res + return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_linkDatasetToPropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_linkDatasetToPropertyValue_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().LinkDatasetToPropertyValue(rctx, args["input"].(graphql1.LinkDatasetToPropertyValueInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyFieldPayload) + fc.Result = res + return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_unlinkPropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_unlinkPropertyValue_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UnlinkPropertyValue(rctx, args["input"].(graphql1.UnlinkPropertyValueInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyFieldPayload) + fc.Result = res + return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_addPropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_addPropertyItem_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddPropertyItem(rctx, args["input"].(graphql1.AddPropertyItemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyItemPayload) + fc.Result = res + return ec.marshalNPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_movePropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_movePropertyItem_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().MovePropertyItem(rctx, args["input"].(graphql1.MovePropertyItemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyItemPayload) + fc.Result = res + return ec.marshalNPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_removePropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_removePropertyItem_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemovePropertyItem(rctx, args["input"].(graphql1.RemovePropertyItemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyItemPayload) + fc.Result = res + return ec.marshalNPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updatePropertyItems(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updatePropertyItems_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdatePropertyItems(rctx, args["input"].(graphql1.UpdatePropertyItemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyItemPayload) + fc.Result = res + return ec.marshalNPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_addLayerItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_addLayerItem_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddLayerItem(rctx, args["input"].(graphql1.AddLayerItemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.AddLayerItemPayload) + fc.Result = res + return ec.marshalNAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_addLayerGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_addLayerGroup_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddLayerGroup(rctx, args["input"].(graphql1.AddLayerGroupInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.AddLayerGroupPayload) + fc.Result = res + return ec.marshalNAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_removeLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_removeLayer_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveLayer(rctx, args["input"].(graphql1.RemoveLayerInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.RemoveLayerPayload) + fc.Result = res + return ec.marshalNRemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updateLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updateLayer_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateLayer(rctx, args["input"].(graphql1.UpdateLayerInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.UpdateLayerPayload) + fc.Result = res + return ec.marshalNUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_moveLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_moveLayer_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().MoveLayer(rctx, args["input"].(graphql1.MoveLayerInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.MoveLayerPayload) + fc.Result = res + return ec.marshalNMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_createInfobox(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_createInfobox_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateInfobox(rctx, args["input"].(graphql1.CreateInfoboxInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.CreateInfoboxPayload) + fc.Result = res + return ec.marshalNCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_removeInfobox(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_removeInfobox_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveInfobox(rctx, args["input"].(graphql1.RemoveInfoboxInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.RemoveInfoboxPayload) + fc.Result = res + return ec.marshalNRemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_addInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_addInfoboxField_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddInfoboxField(rctx, args["input"].(graphql1.AddInfoboxFieldInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.AddInfoboxFieldPayload) + fc.Result = res + return ec.marshalNAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_moveInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_moveInfoboxField_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().MoveInfoboxField(rctx, args["input"].(graphql1.MoveInfoboxFieldInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.MoveInfoboxFieldPayload) + fc.Result = res + return ec.marshalNMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_removeInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_removeInfoboxField_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveInfoboxField(rctx, args["input"].(graphql1.RemoveInfoboxFieldInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.RemoveInfoboxFieldPayload) + fc.Result = res + return ec.marshalNRemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_importLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_importLayer_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().ImportLayer(rctx, args["input"].(graphql1.ImportLayerInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.ImportLayerPayload) + fc.Result = res + return ec.marshalNImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.PageInfo) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PageInfo", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.StartCursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*usecase.Cursor) + fc.Result = res + return ec.marshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) _PageInfo_endCursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.PageInfo) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PageInfo", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.EndCursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*usecase.Cursor) + fc.Result = res + return ec.marshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) _PageInfo_hasNextPage(ctx context.Context, field graphql.CollectedField, obj *graphql1.PageInfo) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PageInfo", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.HasNextPage, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _PageInfo_hasPreviousPage(ctx context.Context, field graphql.CollectedField, obj *graphql1.PageInfo) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PageInfo", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.HasPreviousPage, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginID) + fc.Result = res + return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_version(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Version, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_description(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_author(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Author, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_repositoryUrl(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RepositoryURL, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertySchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.PropertySchemaID) + fc.Result = res + return ec.marshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_extensions(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Extensions, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.PluginExtension) + fc.Result = res + return ec.marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtensionแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Plugin_scenePlugin_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ScenePlugin, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.ScenePlugin) + fc.Result = res + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedDescription, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedName, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_translatedName(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Plugin_translatedName_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TranslatedName, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Plugin_translatedDescription_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TranslatedDescription, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Plugin_propertySchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Plugin().PropertySchema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginExtensionID) + fc.Result = res + return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginID) + fc.Result = res + return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.PluginExtensionType) + fc.Result = res + return ec.marshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtensionType(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_description(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_icon(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Icon, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_visualizer(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Visualizer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.Visualizer) + fc.Result = res + return ec.marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšVisualizer(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertySchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaID) + fc.Result = res + return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedName, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedDescription, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PluginExtension().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_PluginExtension_sceneWidget_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PluginExtension().SceneWidget(rctx, obj, args["sceneId"].(id.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.SceneWidget) + fc.Result = res + return ec.marshalOSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidget(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_propertySchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PluginExtension().PropertySchema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_translatedName(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_PluginExtension_translatedName_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PluginExtension().TranslatedName(rctx, obj, args["lang"].(*string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginExtension_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_PluginExtension_translatedDescription_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PluginExtension().TranslatedDescription(rctx, obj, args["lang"].(*string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_isArchived(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsArchived, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_createdAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.CreatedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(time.Time) + fc.Result = res + return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_updatedAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UpdatedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(time.Time) + fc.Result = res + return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_publishedAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PublishedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*time.Time) + fc.Result = res + return ec.marshalODateTime2แš–timeแšTime(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_description(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_alias(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Alias, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_publicTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PublicTitle, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_publicDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PublicDescription, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_publicImage(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PublicImage, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_publicNoIndex(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PublicNoIndex, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_imageUrl(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ImageURL, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*url.URL) + fc.Result = res + return ec.marshalOURL2แš–netแš‹urlแšURL(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_teamId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TeamID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_visualizer(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Visualizer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.Visualizer) + fc.Result = res + return ec.marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšVisualizer(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_publishmentStatus(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PublishmentStatus, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.PublishmentStatus) + fc.Result = res + return ec.marshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPublishmentStatus(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Project().Team(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Team) + fc.Result = res + return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Project().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _ProjectConnection_edges(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ProjectConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Edges, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.ProjectEdge) + fc.Result = res + return ec.marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectEdgeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _ProjectConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ProjectConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Nodes, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.Project) + fc.Result = res + return ec.marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx, field.Selections, res) +} + +func (ec *executionContext) _ProjectConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ProjectConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PageInfo, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PageInfo) + fc.Result = res + return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPageInfo(ctx, field.Selections, res) +} + +func (ec *executionContext) _ProjectConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectConnection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ProjectConnection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TotalCount, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) _ProjectEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectEdge) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ProjectEdge", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cursor, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(usecase.Cursor) + fc.Result = res + return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) +} + +func (ec *executionContext) _ProjectEdge_node(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectEdge) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ProjectEdge", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Node, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Project) + fc.Result = res + return ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx, field.Selections, res) +} + +func (ec *executionContext) _ProjectPayload_project(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ProjectPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Project, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Project) + fc.Result = res + return ec.marshalNProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx, field.Selections, res) +} + +func (ec *executionContext) _Property_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Property) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Property", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Property_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Property) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Property", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaID) + fc.Result = res + return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Property_items(ctx context.Context, field graphql.CollectedField, obj *graphql1.Property) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Property", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Items, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]graphql1.PropertyItem) + fc.Result = res + return ec.marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Property_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.Property) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Property", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Property().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _Property_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.Property) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Property", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Property().Layer(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(graphql1.Layer) + fc.Result = res + return ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _Property_merged(ctx context.Context, field graphql.CollectedField, obj *graphql1.Property) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Property", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Property().Merged(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.MergedProperty) + fc.Result = res + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyCondition_fieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyCondition) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyCondition", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyCondition_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyCondition) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyCondition", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.ValueType) + fc.Result = res + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyCondition_value(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyCondition) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyCondition", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Value, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyField_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyField_parentId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyField_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaID) + fc.Result = res + return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyField_links(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Links, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]*graphql1.PropertyFieldLink) + fc.Result = res + return ec.marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldLinkแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyField_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.ValueType) + fc.Result = res + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyField_value(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Value, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyField_parent(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyField().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyField_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyField().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyField_field(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyField().Field(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchemaField) + fc.Result = res + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyField_actualValue(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyField().ActualValue(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyFieldLink_datasetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyFieldLink_datasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyFieldLink_datasetSchemaFieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchemaFieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyFieldLink_dataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyFieldLink().Dataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyFieldLink_datasetField(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyFieldLink().DatasetField(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetField) + fc.Result = res + return ec.marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetField(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyFieldLink_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyFieldLink().DatasetSchema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyFieldLink_datasetSchemaField(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyFieldLink().DatasetSchemaField(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchemaField) + fc.Result = res + return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyFieldPayload_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyFieldPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyFieldPayload_propertyField(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyFieldPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyField, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertyField) + fc.Result = res + return ec.marshalOPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyField(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyGroup_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaID) + fc.Result = res + return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaGroupID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.PropertyField) + fc.Result = res + return ec.marshalNPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyGroup_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyGroup().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyGroup_schemaGroup(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyGroup().SchemaGroup(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchemaGroup) + fc.Result = res + return ec.marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyGroupList_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroupList) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyGroupList_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroupList) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaID) + fc.Result = res + return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyGroupList_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroupList) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaGroupID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyGroupList_groups(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroupList) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Groups, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.PropertyGroup) + fc.Result = res + return ec.marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroupแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyGroupList_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroupList) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyGroupList().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyGroupList_schemaGroup(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroupList) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyGroupList().SchemaGroup(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchemaGroup) + fc.Result = res + return ec.marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyItemPayload_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyItemPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyItemPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyItemPayload_propertyItem(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyItemPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyItemPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyItem, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(graphql1.PropertyItem) + fc.Result = res + return ec.marshalOPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItem(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyLinkableFields_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyLinkableFields) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaID) + fc.Result = res + return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyLinkableFields_latlng(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyLinkableFields) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Latlng, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyLinkableFields_url(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyLinkableFields) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.URL, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyLinkableFields_latlngField(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyLinkableFields) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyLinkableFields().LatlngField(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchemaField) + fc.Result = res + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyLinkableFields_urlField(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyLinkableFields) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyLinkableFields().URLField(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchemaField) + fc.Result = res + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertyLinkableFields_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyLinkableFields) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertyLinkableFields().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchema_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchema", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaID) + fc.Result = res + return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchema_groups(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchema", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Groups, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.PropertySchemaGroup) + fc.Result = res + return ec.marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroupแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchema_linkableFields(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchema", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkableFields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.PropertyLinkableFields) + fc.Result = res + return ec.marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyLinkableFields(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_fieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.ValueType) + fc.Result = res + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_title(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Title, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_description(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_prefix(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Prefix, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_suffix(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Suffix, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_defaultValue(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DefaultValue, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(interface{}) + fc.Result = res + return ec.marshalOAny2interface(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_ui(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UI, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchemaFieldUI) + fc.Result = res + return ec.marshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldUI(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_min(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Min, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*float64) + fc.Result = res + return ec.marshalOFloat2แš–float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_max(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Max, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*float64) + fc.Result = res + return ec.marshalOFloat2แš–float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_choices(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Choices, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]*graphql1.PropertySchemaFieldChoice) + fc.Result = res + return ec.marshalOPropertySchemaFieldChoice2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldChoiceแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_isAvailableIf(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsAvailableIf, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertyCondition) + fc.Result = res + return ec.marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyCondition(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedTitle, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedName, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedDescription, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_PropertySchemaField_translatedTitle_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertySchemaField().TranslatedTitle(rctx, obj, args["lang"].(*string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_translatedName(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_PropertySchemaField_translatedName_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertySchemaField().TranslatedName(rctx, obj, args["lang"].(*string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaField_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_PropertySchemaField_translatedDescription_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertySchemaField().TranslatedDescription(rctx, obj, args["lang"].(*string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaFieldChoice_key(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Key, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaFieldChoice_title(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Title, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaFieldChoice_label(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Label, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaFieldChoice_icon(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Icon, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedTitle, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedLabel(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedLabel, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaFieldChoice_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_PropertySchemaFieldChoice_translatedTitle_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertySchemaFieldChoice().TranslatedTitle(rctx, obj, args["lang"].(*string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaFieldChoice_translatedLabel(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_PropertySchemaFieldChoice_translatedLabel_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertySchemaFieldChoice().TranslatedLabel(rctx, obj, args["lang"].(*string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaGroupID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PropertySchemaID) + fc.Result = res + return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaGroup_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.PropertySchemaField) + fc.Result = res + return ec.marshalNPropertySchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaGroup_isList(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsList, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaGroup_isAvailableIf(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsAvailableIf, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertyCondition) + fc.Result = res + return ec.marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyCondition(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaGroup_title(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Title, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaGroup_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AllTranslatedTitle, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(map[string]string) + fc.Result = res + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaGroup_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaGroup_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RepresentativeFieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.PropertySchemaFieldID) + fc.Result = res + return ec.marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaGroup_representativeField(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RepresentativeField, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchemaField) + fc.Result = res + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaGroup_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertySchemaGroup().Schema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _PropertySchemaGroup_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_PropertySchemaGroup_translatedTitle_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.PropertySchemaGroup().TranslatedTitle(rctx, obj, args["lang"].(*string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_me(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Me(rctx) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.User) + fc.Result = res + return ec.marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_node(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_node_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Node(rctx, args["id"].(id.ID), args["type"].(graphql1.NodeType)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(graphql1.Node) + fc.Result = res + return ec.marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNode(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_nodes(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_nodes_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Nodes(rctx, args["id"].([]*id.ID), args["type"].(graphql1.NodeType)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]graphql1.Node) + fc.Result = res + return ec.marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNode(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_propertySchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_propertySchema_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().PropertySchema(rctx, args["id"].(id.PropertySchemaID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PropertySchema) + fc.Result = res + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_propertySchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_propertySchemas_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().PropertySchemas(rctx, args["id"].([]*id.PropertySchemaID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.PropertySchema) + fc.Result = res + return ec.marshalNPropertySchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_plugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_plugin_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Plugin(rctx, args["id"].(id.PluginID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_plugins(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_plugins_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Plugins(rctx, args["id"].([]*id.PluginID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.Plugin) + fc.Result = res + return ec.marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_layer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_layer_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Layer(rctx, args["id"].(id.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(graphql1.Layer) + fc.Result = res + return ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_scene(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_scene_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Scene(rctx, args["projectId"].(id.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_assets(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_assets_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Assets(rctx, args["teamId"].(id.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.AssetConnection) + fc.Result = res + return ec.marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_projects(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_projects_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Projects(rctx, args["teamId"].(id.ID), args["includeArchived"].(*bool), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.ProjectConnection) + fc.Result = res + return ec.marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_datasetSchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_datasetSchemas_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().DatasetSchemas(rctx, args["sceneId"].(id.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchemaConnection) + fc.Result = res + return ec.marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_datasets(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_datasets_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Datasets(rctx, args["datasetSchemaId"].(id.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.DatasetConnection) + fc.Result = res + return ec.marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_sceneLock(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_sceneLock_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().SceneLock(rctx, args["sceneId"].(id.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.SceneLockMode) + fc.Result = res + return ec.marshalOSceneLockMode2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneLockMode(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_dynamicDatasetSchemas_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().DynamicDatasetSchemas(rctx, args["sceneId"].(id.ID)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_searchUser(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_searchUser_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().SearchUser(rctx, args["nameOrEmail"].(string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.SearchedUser) + fc.Result = res + return ec.marshalOSearchedUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSearchedUser(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query_checkProjectAlias(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query_checkProjectAlias_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().CheckProjectAlias(rctx, args["alias"].(string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.CheckProjectAliasPayload) + fc.Result = res + return ec.marshalNCheckProjectAliasPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCheckProjectAliasPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Query___type_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.introspectType(args["name"].(string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.introspectSchema() + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Schema) + fc.Result = res + return ec.marshalO__Schema2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _Rect_west(ctx context.Context, field graphql.CollectedField, obj *graphql1.Rect) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Rect", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.West, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _Rect_south(ctx context.Context, field graphql.CollectedField, obj *graphql1.Rect) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Rect", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.South, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _Rect_east(ctx context.Context, field graphql.CollectedField, obj *graphql1.Rect) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Rect", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.East, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _Rect_north(ctx context.Context, field graphql.CollectedField, obj *graphql1.Rect) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Rect", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.North, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(float64) + fc.Result = res + return ec.marshalNFloat2float64(ctx, field.Selections, res) +} + +func (ec *executionContext) _RemoveAssetPayload_assetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveAssetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveAssetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.AssetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _RemoveDatasetSchemaPayload_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveDatasetSchemaPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveDatasetSchemaPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _RemoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveInfoboxFieldPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.InfoboxFieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _RemoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveInfoboxFieldPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _RemoveInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveInfoboxPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveInfoboxPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _RemoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveLayerPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveLayerPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LayerID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _RemoveLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveLayerPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveLayerPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _RemoveMemberFromTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveMemberFromTeamPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveMemberFromTeamPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Team, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) _RemoveWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveWidgetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveWidgetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _RemoveWidgetPayload_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveWidgetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveWidgetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginID) + fc.Result = res + return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) +} + +func (ec *executionContext) _RemoveWidgetPayload_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveWidgetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveWidgetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginExtensionID) + fc.Result = res + return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_projectId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ProjectID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_teamId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TeamID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_createdAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.CreatedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(time.Time) + fc.Result = res + return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_updatedAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UpdatedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(time.Time) + fc.Result = res + return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_rootLayerId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.RootLayerID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_widgets(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Widgets, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.SceneWidget) + fc.Result = res + return ec.marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidgetแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_plugins(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Plugins, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.ScenePlugin) + fc.Result = res + return ec.marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePluginแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DynamicDatasetSchemas, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_project(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().Project(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Project) + fc.Result = res + return ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().Team(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Team) + fc.Result = res + return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_rootLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().RootLayer(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.LayerGroup) + fc.Result = res + return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_lockMode(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().LockMode(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.SceneLockMode) + fc.Result = res + return ec.marshalNSceneLockMode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneLockMode(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_datasetSchemas(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Scene_datasetSchemas_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().DatasetSchemas(rctx, obj, args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchemaConnection) + fc.Result = res + return ec.marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) _ScenePlugin_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.ScenePlugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ScenePlugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginID) + fc.Result = res + return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) +} + +func (ec *executionContext) _ScenePlugin_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.ScenePlugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ScenePlugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _ScenePlugin_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.ScenePlugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ScenePlugin", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.ScenePlugin().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _ScenePlugin_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.ScenePlugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ScenePlugin", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.ScenePlugin().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _SceneWidget_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _SceneWidget_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginID) + fc.Result = res + return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) +} + +func (ec *executionContext) _SceneWidget_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginExtensionID) + fc.Result = res + return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) +} + +func (ec *executionContext) _SceneWidget_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PropertyID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _SceneWidget_enabled(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Enabled, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _SceneWidget_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.SceneWidget().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _SceneWidget_extension(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.SceneWidget().Extension(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.PluginExtension) + fc.Result = res + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, field.Selections, res) +} + +func (ec *executionContext) _SceneWidget_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.SceneWidget().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _SearchedUser_userId(ctx context.Context, field graphql.CollectedField, obj *graphql1.SearchedUser) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SearchedUser", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UserID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _SearchedUser_userName(ctx context.Context, field graphql.CollectedField, obj *graphql1.SearchedUser) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SearchedUser", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UserName, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _SearchedUser_userEmail(ctx context.Context, field graphql.CollectedField, obj *graphql1.SearchedUser) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SearchedUser", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UserEmail, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _SignupPayload_user(ctx context.Context, field graphql.CollectedField, obj *graphql1.SignupPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SignupPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.User, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.User) + fc.Result = res + return ec.marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx, field.Selections, res) +} + +func (ec *executionContext) _SignupPayload_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.SignupPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SignupPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Team, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) _SyncDatasetPayload_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.SyncDatasetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SyncDatasetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _SyncDatasetPayload_url(ctx context.Context, field graphql.CollectedField, obj *graphql1.SyncDatasetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SyncDatasetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.URL, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _SyncDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.SyncDatasetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SyncDatasetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchema, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _SyncDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.SyncDatasetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SyncDatasetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Dataset, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.Dataset) + fc.Result = res + return ec.marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Team_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Team) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Team", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Team_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.Team) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Team", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Team_members(ctx context.Context, field graphql.CollectedField, obj *graphql1.Team) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Team", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Members, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.TeamMember) + fc.Result = res + return ec.marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeamMemberแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Team_personal(ctx context.Context, field graphql.CollectedField, obj *graphql1.Team) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Team", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Personal, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _Team_assets(ctx context.Context, field graphql.CollectedField, obj *graphql1.Team) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Team", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Team_assets_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Team().Assets(rctx, obj, args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.AssetConnection) + fc.Result = res + return ec.marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) _Team_projects(ctx context.Context, field graphql.CollectedField, obj *graphql1.Team) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Team", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Team_projects_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Team().Projects(rctx, obj, args["includeArchived"].(*bool), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.ProjectConnection) + fc.Result = res + return ec.marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectConnection(ctx, field.Selections, res) +} + +func (ec *executionContext) _TeamMember_userId(ctx context.Context, field graphql.CollectedField, obj *graphql1.TeamMember) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TeamMember", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UserID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _TeamMember_role(ctx context.Context, field graphql.CollectedField, obj *graphql1.TeamMember) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TeamMember", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Role, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.Role) + fc.Result = res + return ec.marshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRole(ctx, field.Selections, res) +} + +func (ec *executionContext) _TeamMember_user(ctx context.Context, field graphql.CollectedField, obj *graphql1.TeamMember) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TeamMember", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TeamMember().User(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.User) + fc.Result = res + return ec.marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx, field.Selections, res) +} + +func (ec *executionContext) _Typography_fontFamily(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Typography", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FontFamily, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Typography_fontWeight(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Typography", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FontWeight, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Typography_fontSize(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Typography", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.FontSize, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2แš–int(ctx, field.Selections, res) +} + +func (ec *executionContext) _Typography_color(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Typography", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Color, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Typography_textAlign(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Typography", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TextAlign, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.TextAlign) + fc.Result = res + return ec.marshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTextAlign(ctx, field.Selections, res) +} + +func (ec *executionContext) _Typography_bold(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Typography", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Bold, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*bool) + fc.Result = res + return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _Typography_italic(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Typography", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Italic, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*bool) + fc.Result = res + return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _Typography_underline(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Typography", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Underline, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*bool) + fc.Result = res + return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.UninstallPluginPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UninstallPluginPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _UninstallPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.UninstallPluginPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UninstallPluginPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ScenePlugin, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.ScenePlugin) + fc.Result = res + return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _UpdateDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateDatasetSchemaPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpdateDatasetSchemaPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DatasetSchema, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _UpdateLayerPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateLayerPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpdateLayerPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _UpdateMePayload_user(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateMePayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpdateMePayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.User, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.User) + fc.Result = res + return ec.marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx, field.Selections, res) +} + +func (ec *executionContext) _UpdateMemberOfTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateMemberOfTeamPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpdateMemberOfTeamPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Team, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) _UpdateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateTeamPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpdateTeamPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Team, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) _UpdateWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateWidgetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpdateWidgetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _UpdateWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateWidgetPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpdateWidgetPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneWidget, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.SceneWidget) + fc.Result = res + return ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidget(ctx, field.Selections, res) +} + +func (ec *executionContext) _UpgradePluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpgradePluginPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpgradePluginPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _UpgradePluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpgradePluginPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpgradePluginPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ScenePlugin, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.ScenePlugin) + fc.Result = res + return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _UploadPluginPayload_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.UploadPluginPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UploadPluginPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Plugin, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Plugin) + fc.Result = res + return ec.marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _User_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "User", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _User_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "User", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _User_email(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "User", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Email, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _User_lang(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "User", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Lang, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _User_theme(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "User", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Theme, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(graphql1.Theme) + fc.Result = res + return ec.marshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx, field.Selections, res) +} + +func (ec *executionContext) _User_myTeamId(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "User", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.MyTeamID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _User_auths(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "User", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Auths, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]string) + fc.Result = res + return ec.marshalNString2แš•stringแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _User_teams(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "User", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.User().Teams(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.Team) + fc.Result = res + return ec.marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeamแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _User_myTeam(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "User", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.User().MyTeam(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Directive", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Directive_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Directive", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Directive_locations(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Directive", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Locations, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]string) + fc.Result = res + return ec.marshalN__DirectiveLocation2แš•stringแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Directive", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Args, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___EnumValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsDeprecated(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DeprecationReason(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Field", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Field_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Field", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Field", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Args, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Field", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Field", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsDeprecated(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Field", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DeprecationReason(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___InputValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___InputValue_type(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) ___InputValue_defaultValue(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DefaultValue, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Schema_types(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Schema", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Types(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.Type) + fc.Result = res + return ec.marshalN__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Schema_queryType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Schema", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.QueryType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Schema_mutationType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Schema", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.MutationType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Schema_subscriptionType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Schema", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SubscriptionType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Schema_directives(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Schema", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Directives(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.Directive) + fc.Result = res + return ec.marshalN__Directive2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšDirectiveแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Type_kind(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Type", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Kind(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalN__TypeKind2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Type_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Type", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Type_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Type", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Type_fields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Type", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field___Type_fields_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields(args["includeDeprecated"].(bool)), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.Field) + fc.Result = res + return ec.marshalO__Field2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Type_interfaces(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Type", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Interfaces(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.Type) + fc.Result = res + return ec.marshalO__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Type_possibleTypes(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Type", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PossibleTypes(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.Type) + fc.Result = res + return ec.marshalO__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Type_enumValues(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Type", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field___Type_enumValues_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.EnumValues(args["includeDeprecated"].(bool)), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.EnumValue) + fc.Result = res + return ec.marshalO__EnumValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšEnumValueแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Type_inputFields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Type", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.InputFields(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalO__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Type", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OfType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +// endregion **************************** field.gotpl ***************************** + +// region **************************** input.gotpl ***************************** + +func (ec *executionContext) unmarshalInputAddDatasetSchemaInput(ctx context.Context, obj interface{}) (graphql1.AddDatasetSchemaInput, error) { + var it graphql1.AddDatasetSchemaInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "representativefield": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("representativefield")) + it.Representativefield, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddDynamicDatasetInput(ctx context.Context, obj interface{}) (graphql1.AddDynamicDatasetInput, error) { + var it graphql1.AddDynamicDatasetInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "datasetSchemaId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaId")) + it.DatasetSchemaID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "author": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("author")) + it.Author, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "content": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("content")) + it.Content, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "lat": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lat")) + it.Lat, err = ec.unmarshalOFloat2แš–float64(ctx, v) + if err != nil { + return it, err + } + case "lng": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lng")) + it.Lng, err = ec.unmarshalOFloat2แš–float64(ctx, v) + if err != nil { + return it, err + } + case "target": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("target")) + it.Target, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddDynamicDatasetSchemaInput(ctx context.Context, obj interface{}) (graphql1.AddDynamicDatasetSchemaInput, error) { + var it graphql1.AddDynamicDatasetSchemaInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddInfoboxFieldInput(ctx context.Context, obj interface{}) (graphql1.AddInfoboxFieldInput, error) { + var it graphql1.AddInfoboxFieldInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + if err != nil { + return it, err + } + case "extensionId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) + it.ExtensionID, err = ec.unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context, obj interface{}) (graphql1.AddLayerGroupInput, error) { + var it graphql1.AddLayerGroupInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "parentLayerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("parentLayerId")) + it.ParentLayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + if err != nil { + return it, err + } + case "extensionId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) + it.ExtensionID, err = ec.unmarshalOPluginExtensionID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "linkedDatasetSchemaID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("linkedDatasetSchemaID")) + it.LinkedDatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddLayerItemInput(ctx context.Context, obj interface{}) (graphql1.AddLayerItemInput, error) { + var it graphql1.AddLayerItemInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "parentLayerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("parentLayerId")) + it.ParentLayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + if err != nil { + return it, err + } + case "extensionId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) + it.ExtensionID, err = ec.unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "lat": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lat")) + it.Lat, err = ec.unmarshalOFloat2แš–float64(ctx, v) + if err != nil { + return it, err + } + case "lng": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lng")) + it.Lng, err = ec.unmarshalOFloat2แš–float64(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddMemberToTeamInput(ctx context.Context, obj interface{}) (graphql1.AddMemberToTeamInput, error) { + var it graphql1.AddMemberToTeamInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "userId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "role": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("role")) + it.Role, err = ec.unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRole(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddPropertyItemInput(ctx context.Context, obj interface{}) (graphql1.AddPropertyItemInput, error) { + var it graphql1.AddPropertyItemInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "nameFieldValue": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nameFieldValue")) + it.NameFieldValue, err = ec.unmarshalOAny2interface(ctx, v) + if err != nil { + return it, err + } + case "nameFieldType": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nameFieldType")) + it.NameFieldType, err = ec.unmarshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAddWidgetInput(ctx context.Context, obj interface{}) (graphql1.AddWidgetInput, error) { + var it graphql1.AddWidgetInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + if err != nil { + return it, err + } + case "extensionId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) + it.ExtensionID, err = ec.unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateAssetInput(ctx context.Context, obj interface{}) (graphql1.CreateAssetInput, error) { + var it graphql1.CreateAssetInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "file": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("file")) + it.File, err = ec.unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateInfoboxInput(ctx context.Context, obj interface{}) (graphql1.CreateInfoboxInput, error) { + var it graphql1.CreateInfoboxInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateProjectInput(ctx context.Context, obj interface{}) (graphql1.CreateProjectInput, error) { + var it graphql1.CreateProjectInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "visualizer": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("visualizer")) + it.Visualizer, err = ec.unmarshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšVisualizer(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "description": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("description")) + it.Description, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "imageUrl": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("imageUrl")) + it.ImageURL, err = ec.unmarshalOURL2แš–netแš‹urlแšURL(ctx, v) + if err != nil { + return it, err + } + case "alias": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("alias")) + it.Alias, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "archived": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("archived")) + it.Archived, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateSceneInput(ctx context.Context, obj interface{}) (graphql1.CreateSceneInput, error) { + var it graphql1.CreateSceneInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "projectId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateTeamInput(ctx context.Context, obj interface{}) (graphql1.CreateTeamInput, error) { + var it graphql1.CreateTeamInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputDeleteMeInput(ctx context.Context, obj interface{}) (graphql1.DeleteMeInput, error) { + var it graphql1.DeleteMeInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "userId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputDeleteProjectInput(ctx context.Context, obj interface{}) (graphql1.DeleteProjectInput, error) { + var it graphql1.DeleteProjectInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "projectId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputDeleteTeamInput(ctx context.Context, obj interface{}) (graphql1.DeleteTeamInput, error) { + var it graphql1.DeleteTeamInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputImportDatasetInput(ctx context.Context, obj interface{}) (graphql1.ImportDatasetInput, error) { + var it graphql1.ImportDatasetInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "file": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("file")) + it.File, err = ec.unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + if err != nil { + return it, err + } + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "datasetSchemaId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaId")) + it.DatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputImportLayerInput(ctx context.Context, obj interface{}) (graphql1.ImportLayerInput, error) { + var it graphql1.ImportLayerInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "file": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("file")) + it.File, err = ec.unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + if err != nil { + return it, err + } + case "format": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("format")) + it.Format, err = ec.unmarshalNLayerEncodingFormat2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerEncodingFormat(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputInstallPluginInput(ctx context.Context, obj interface{}) (graphql1.InstallPluginInput, error) { + var it graphql1.InstallPluginInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx context.Context, obj interface{}) (graphql1.LinkDatasetToPropertyValueInput, error) { + var it graphql1.LinkDatasetToPropertyValueInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "datasetSchemaIds": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaIds")) + it.DatasetSchemaIds, err = ec.unmarshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, v) + if err != nil { + return it, err + } + case "datasetSchemaFieldIds": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaFieldIds")) + it.DatasetSchemaFieldIds, err = ec.unmarshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, v) + if err != nil { + return it, err + } + case "datasetIds": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetIds")) + it.DatasetIds, err = ec.unmarshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputMoveInfoboxFieldInput(ctx context.Context, obj interface{}) (graphql1.MoveInfoboxFieldInput, error) { + var it graphql1.MoveInfoboxFieldInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "infoboxFieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("infoboxFieldId")) + it.InfoboxFieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalNInt2int(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputMoveLayerInput(ctx context.Context, obj interface{}) (graphql1.MoveLayerInput, error) { + var it graphql1.MoveLayerInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "destLayerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("destLayerId")) + it.DestLayerID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputMovePropertyItemInput(ctx context.Context, obj interface{}) (graphql1.MovePropertyItemInput, error) { + var it graphql1.MovePropertyItemInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalNInt2int(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputPublishProjectInput(ctx context.Context, obj interface{}) (graphql1.PublishProjectInput, error) { + var it graphql1.PublishProjectInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "projectId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "alias": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("alias")) + it.Alias, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "status": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("status")) + it.Status, err = ec.unmarshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPublishmentStatus(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveAssetInput(ctx context.Context, obj interface{}) (graphql1.RemoveAssetInput, error) { + var it graphql1.RemoveAssetInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "assetId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("assetId")) + it.AssetID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveDatasetSchemaInput(ctx context.Context, obj interface{}) (graphql1.RemoveDatasetSchemaInput, error) { + var it graphql1.RemoveDatasetSchemaInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "schemaId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaId")) + it.SchemaID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "force": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("force")) + it.Force, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveInfoboxFieldInput(ctx context.Context, obj interface{}) (graphql1.RemoveInfoboxFieldInput, error) { + var it graphql1.RemoveInfoboxFieldInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "infoboxFieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("infoboxFieldId")) + it.InfoboxFieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveInfoboxInput(ctx context.Context, obj interface{}) (graphql1.RemoveInfoboxInput, error) { + var it graphql1.RemoveInfoboxInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveLayerInput(ctx context.Context, obj interface{}) (graphql1.RemoveLayerInput, error) { + var it graphql1.RemoveLayerInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveMemberFromTeamInput(ctx context.Context, obj interface{}) (graphql1.RemoveMemberFromTeamInput, error) { + var it graphql1.RemoveMemberFromTeamInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "userId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveMyAuthInput(ctx context.Context, obj interface{}) (graphql1.RemoveMyAuthInput, error) { + var it graphql1.RemoveMyAuthInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "auth": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("auth")) + it.Auth, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemovePropertyFieldInput(ctx context.Context, obj interface{}) (graphql1.RemovePropertyFieldInput, error) { + var it graphql1.RemovePropertyFieldInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemovePropertyItemInput(ctx context.Context, obj interface{}) (graphql1.RemovePropertyItemInput, error) { + var it graphql1.RemovePropertyItemInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputRemoveWidgetInput(ctx context.Context, obj interface{}) (graphql1.RemoveWidgetInput, error) { + var it graphql1.RemoveWidgetInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + if err != nil { + return it, err + } + case "extensionId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) + it.ExtensionID, err = ec.unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputSignupInput(ctx context.Context, obj interface{}) (graphql1.SignupInput, error) { + var it graphql1.SignupInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "userId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) + it.UserID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "secret": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("secret")) + it.Secret, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputSyncDatasetInput(ctx context.Context, obj interface{}) (graphql1.SyncDatasetInput, error) { + var it graphql1.SyncDatasetInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "url": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("url")) + it.URL, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUninstallPluginInput(ctx context.Context, obj interface{}) (graphql1.UninstallPluginInput, error) { + var it graphql1.UninstallPluginInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.Context, obj interface{}) (graphql1.UnlinkPropertyValueInput, error) { + var it graphql1.UnlinkPropertyValueInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateDatasetSchemaInput(ctx context.Context, obj interface{}) (graphql1.UpdateDatasetSchemaInput, error) { + var it graphql1.UpdateDatasetSchemaInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "schemaId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaId")) + it.SchemaID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateLayerInput(ctx context.Context, obj interface{}) (graphql1.UpdateLayerInput, error) { + var it graphql1.UpdateLayerInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "layerId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "visible": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("visible")) + it.Visible, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateMeInput(ctx context.Context, obj interface{}) (graphql1.UpdateMeInput, error) { + var it graphql1.UpdateMeInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "email": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("email")) + it.Email, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "lang": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + it.Lang, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "theme": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("theme")) + it.Theme, err = ec.unmarshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx, v) + if err != nil { + return it, err + } + case "password": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("password")) + it.Password, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "passwordConfirmation": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("passwordConfirmation")) + it.PasswordConfirmation, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateMemberOfTeamInput(ctx context.Context, obj interface{}) (graphql1.UpdateMemberOfTeamInput, error) { + var it graphql1.UpdateMemberOfTeamInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "userId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "role": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("role")) + it.Role, err = ec.unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRole(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateProjectInput(ctx context.Context, obj interface{}) (graphql1.UpdateProjectInput, error) { + var it graphql1.UpdateProjectInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "projectId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "description": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("description")) + it.Description, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "archived": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("archived")) + it.Archived, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "alias": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("alias")) + it.Alias, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "imageUrl": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("imageUrl")) + it.ImageURL, err = ec.unmarshalOURL2แš–netแš‹urlแšURL(ctx, v) + if err != nil { + return it, err + } + case "publicTitle": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("publicTitle")) + it.PublicTitle, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "publicDescription": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("publicDescription")) + it.PublicDescription, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "publicImage": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("publicImage")) + it.PublicImage, err = ec.unmarshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + if err != nil { + return it, err + } + case "publicNoIndex": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("publicNoIndex")) + it.PublicNoIndex, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "deleteImageUrl": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("deleteImageUrl")) + it.DeleteImageURL, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "deletePublicImage": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("deletePublicImage")) + it.DeletePublicImage, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdatePropertyItemInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyItemInput, error) { + var it graphql1.UpdatePropertyItemInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "operations": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("operations")) + it.Operations, err = ec.unmarshalNUpdatePropertyItemOperationInput2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyItemOperationInputแš„(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdatePropertyItemOperationInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyItemOperationInput, error) { + var it graphql1.UpdatePropertyItemOperationInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "operation": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("operation")) + it.Operation, err = ec.unmarshalNListOperation2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšListOperation(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "nameFieldValue": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nameFieldValue")) + it.NameFieldValue, err = ec.unmarshalOAny2interface(ctx, v) + if err != nil { + return it, err + } + case "nameFieldType": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nameFieldType")) + it.NameFieldType, err = ec.unmarshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdatePropertyValueCameraInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyValueCameraInput, error) { + var it graphql1.UpdatePropertyValueCameraInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "lat": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lat")) + it.Lat, err = ec.unmarshalNFloat2float64(ctx, v) + if err != nil { + return it, err + } + case "lng": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lng")) + it.Lng, err = ec.unmarshalNFloat2float64(ctx, v) + if err != nil { + return it, err + } + case "altitude": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("altitude")) + it.Altitude, err = ec.unmarshalNFloat2float64(ctx, v) + if err != nil { + return it, err + } + case "heading": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("heading")) + it.Heading, err = ec.unmarshalNFloat2float64(ctx, v) + if err != nil { + return it, err + } + case "pitch": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pitch")) + it.Pitch, err = ec.unmarshalNFloat2float64(ctx, v) + if err != nil { + return it, err + } + case "roll": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("roll")) + it.Roll, err = ec.unmarshalNFloat2float64(ctx, v) + if err != nil { + return it, err + } + case "fov": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fov")) + it.Fov, err = ec.unmarshalNFloat2float64(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyValueInput, error) { + var it graphql1.UpdatePropertyValueInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "value": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("value")) + it.Value, err = ec.unmarshalOAny2interface(ctx, v) + if err != nil { + return it, err + } + case "type": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("type")) + it.Type, err = ec.unmarshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdatePropertyValueLatLngHeightInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyValueLatLngHeightInput, error) { + var it graphql1.UpdatePropertyValueLatLngHeightInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "lat": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lat")) + it.Lat, err = ec.unmarshalNFloat2float64(ctx, v) + if err != nil { + return it, err + } + case "lng": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lng")) + it.Lng, err = ec.unmarshalNFloat2float64(ctx, v) + if err != nil { + return it, err + } + case "height": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("height")) + it.Height, err = ec.unmarshalNFloat2float64(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdatePropertyValueLatLngInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyValueLatLngInput, error) { + var it graphql1.UpdatePropertyValueLatLngInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "lat": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lat")) + it.Lat, err = ec.unmarshalNFloat2float64(ctx, v) + if err != nil { + return it, err + } + case "lng": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lng")) + it.Lng, err = ec.unmarshalNFloat2float64(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdatePropertyValueTypographyInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyValueTypographyInput, error) { + var it graphql1.UpdatePropertyValueTypographyInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "fontFamily": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fontFamily")) + it.FontFamily, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "fontWeight": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fontWeight")) + it.FontWeight, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "fontSize": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fontSize")) + it.FontSize, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "color": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("color")) + it.Color, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "textAlign": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("textAlign")) + it.TextAlign, err = ec.unmarshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTextAlign(ctx, v) + if err != nil { + return it, err + } + case "bold": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("bold")) + it.Bold, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "italic": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("italic")) + it.Italic, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "underline": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("underline")) + it.Underline, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateTeamInput(ctx context.Context, obj interface{}) (graphql1.UpdateTeamInput, error) { + var it graphql1.UpdateTeamInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "teamId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpdateWidgetInput(ctx context.Context, obj interface{}) (graphql1.UpdateWidgetInput, error) { + var it graphql1.UpdateWidgetInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + if err != nil { + return it, err + } + case "extensionId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) + it.ExtensionID, err = ec.unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, v) + if err != nil { + return it, err + } + case "enabled": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("enabled")) + it.Enabled, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUpgradePluginInput(ctx context.Context, obj interface{}) (graphql1.UpgradePluginInput, error) { + var it graphql1.UpgradePluginInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "pluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) + it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + if err != nil { + return it, err + } + case "toPluginId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("toPluginId")) + it.ToPluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUploadFileToPropertyInput(ctx context.Context, obj interface{}) (graphql1.UploadFileToPropertyInput, error) { + var it graphql1.UploadFileToPropertyInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "schemaItemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) + it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "itemId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "fieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) + it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + if err != nil { + return it, err + } + case "file": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("file")) + it.File, err = ec.unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputUploadPluginInput(ctx context.Context, obj interface{}) (graphql1.UploadPluginInput, error) { + var it graphql1.UploadPluginInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "file": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("file")) + it.File, err = ec.unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +// endregion **************************** input.gotpl ***************************** + +// region ************************** interface.gotpl *************************** + +func (ec *executionContext) _Layer(ctx context.Context, sel ast.SelectionSet, obj graphql1.Layer) graphql.Marshaler { + switch obj := (obj).(type) { + case nil: + return graphql.Null + case graphql1.LayerItem: + return ec._LayerItem(ctx, sel, &obj) + case *graphql1.LayerItem: + if obj == nil { + return graphql.Null + } + return ec._LayerItem(ctx, sel, obj) + case graphql1.LayerGroup: + return ec._LayerGroup(ctx, sel, &obj) + case *graphql1.LayerGroup: + if obj == nil { + return graphql.Null + } + return ec._LayerGroup(ctx, sel, obj) + default: + panic(fmt.Errorf("unexpected type %T", obj)) + } +} + +func (ec *executionContext) _Layers(ctx context.Context, sel ast.SelectionSet, obj graphql1.Layers) graphql.Marshaler { + switch obj := (obj).(type) { + case nil: + return graphql.Null + case graphql1.LayerItem: + return ec._LayerItem(ctx, sel, &obj) + case *graphql1.LayerItem: + if obj == nil { + return graphql.Null + } + return ec._LayerItem(ctx, sel, obj) + case graphql1.LayerGroup: + return ec._LayerGroup(ctx, sel, &obj) + case *graphql1.LayerGroup: + if obj == nil { + return graphql.Null + } + return ec._LayerGroup(ctx, sel, obj) + default: + panic(fmt.Errorf("unexpected type %T", obj)) + } +} + +func (ec *executionContext) _Node(ctx context.Context, sel ast.SelectionSet, obj graphql1.Node) graphql.Marshaler { + switch obj := (obj).(type) { + case nil: + return graphql.Null + case graphql1.Asset: + return ec._Asset(ctx, sel, &obj) + case *graphql1.Asset: + if obj == nil { + return graphql.Null + } + return ec._Asset(ctx, sel, obj) + case graphql1.User: + return ec._User(ctx, sel, &obj) + case *graphql1.User: + if obj == nil { + return graphql.Null + } + return ec._User(ctx, sel, obj) + case graphql1.Team: + return ec._Team(ctx, sel, &obj) + case *graphql1.Team: + if obj == nil { + return graphql.Null + } + return ec._Team(ctx, sel, obj) + case graphql1.Project: + return ec._Project(ctx, sel, &obj) + case *graphql1.Project: + if obj == nil { + return graphql.Null + } + return ec._Project(ctx, sel, obj) + case graphql1.Scene: + return ec._Scene(ctx, sel, &obj) + case *graphql1.Scene: + if obj == nil { + return graphql.Null + } + return ec._Scene(ctx, sel, obj) + case graphql1.Property: + return ec._Property(ctx, sel, &obj) + case *graphql1.Property: + if obj == nil { + return graphql.Null + } + return ec._Property(ctx, sel, obj) + case graphql1.DatasetSchema: + return ec._DatasetSchema(ctx, sel, &obj) + case *graphql1.DatasetSchema: + if obj == nil { + return graphql.Null + } + return ec._DatasetSchema(ctx, sel, obj) + case graphql1.DatasetSchemaField: + return ec._DatasetSchemaField(ctx, sel, &obj) + case *graphql1.DatasetSchemaField: + if obj == nil { + return graphql.Null + } + return ec._DatasetSchemaField(ctx, sel, obj) + case graphql1.Dataset: + return ec._Dataset(ctx, sel, &obj) + case *graphql1.Dataset: + if obj == nil { + return graphql.Null + } + return ec._Dataset(ctx, sel, obj) + default: + panic(fmt.Errorf("unexpected type %T", obj)) + } +} + +func (ec *executionContext) _PropertyItem(ctx context.Context, sel ast.SelectionSet, obj graphql1.PropertyItem) graphql.Marshaler { + switch obj := (obj).(type) { + case nil: + return graphql.Null + case graphql1.PropertyGroup: + return ec._PropertyGroup(ctx, sel, &obj) + case *graphql1.PropertyGroup: + if obj == nil { + return graphql.Null + } + return ec._PropertyGroup(ctx, sel, obj) + case graphql1.PropertyGroupList: + return ec._PropertyGroupList(ctx, sel, &obj) + case *graphql1.PropertyGroupList: + if obj == nil { + return graphql.Null + } + return ec._PropertyGroupList(ctx, sel, obj) + default: + panic(fmt.Errorf("unexpected type %T", obj)) + } +} + +// endregion ************************** interface.gotpl *************************** + +// region **************************** object.gotpl **************************** + +var addDatasetSchemaPayloadImplementors = []string{"AddDatasetSchemaPayload"} + +func (ec *executionContext) _AddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddDatasetSchemaPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addDatasetSchemaPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddDatasetSchemaPayload") + case "datasetSchema": + out.Values[i] = ec._AddDatasetSchemaPayload_datasetSchema(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addDynamicDatasetPayloadImplementors = []string{"AddDynamicDatasetPayload"} + +func (ec *executionContext) _AddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddDynamicDatasetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addDynamicDatasetPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddDynamicDatasetPayload") + case "datasetSchema": + out.Values[i] = ec._AddDynamicDatasetPayload_datasetSchema(ctx, field, obj) + case "dataset": + out.Values[i] = ec._AddDynamicDatasetPayload_dataset(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addDynamicDatasetSchemaPayloadImplementors = []string{"AddDynamicDatasetSchemaPayload"} + +func (ec *executionContext) _AddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddDynamicDatasetSchemaPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addDynamicDatasetSchemaPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddDynamicDatasetSchemaPayload") + case "datasetSchema": + out.Values[i] = ec._AddDynamicDatasetSchemaPayload_datasetSchema(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addInfoboxFieldPayloadImplementors = []string{"AddInfoboxFieldPayload"} + +func (ec *executionContext) _AddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddInfoboxFieldPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addInfoboxFieldPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddInfoboxFieldPayload") + case "infoboxField": + out.Values[i] = ec._AddInfoboxFieldPayload_infoboxField(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "layer": + out.Values[i] = ec._AddInfoboxFieldPayload_layer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addLayerGroupPayloadImplementors = []string{"AddLayerGroupPayload"} + +func (ec *executionContext) _AddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddLayerGroupPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addLayerGroupPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddLayerGroupPayload") + case "layer": + out.Values[i] = ec._AddLayerGroupPayload_layer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "parentLayer": + out.Values[i] = ec._AddLayerGroupPayload_parentLayer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "index": + out.Values[i] = ec._AddLayerGroupPayload_index(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addLayerItemPayloadImplementors = []string{"AddLayerItemPayload"} + +func (ec *executionContext) _AddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddLayerItemPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addLayerItemPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddLayerItemPayload") + case "layer": + out.Values[i] = ec._AddLayerItemPayload_layer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "parentLayer": + out.Values[i] = ec._AddLayerItemPayload_parentLayer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "index": + out.Values[i] = ec._AddLayerItemPayload_index(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addMemberToTeamPayloadImplementors = []string{"AddMemberToTeamPayload"} + +func (ec *executionContext) _AddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddMemberToTeamPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addMemberToTeamPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddMemberToTeamPayload") + case "team": + out.Values[i] = ec._AddMemberToTeamPayload_team(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var addWidgetPayloadImplementors = []string{"AddWidgetPayload"} + +func (ec *executionContext) _AddWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddWidgetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addWidgetPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddWidgetPayload") + case "scene": + out.Values[i] = ec._AddWidgetPayload_scene(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "sceneWidget": + out.Values[i] = ec._AddWidgetPayload_sceneWidget(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var assetImplementors = []string{"Asset", "Node"} + +func (ec *executionContext) _Asset(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Asset) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, assetImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Asset") + case "id": + out.Values[i] = ec._Asset_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "createdAt": + out.Values[i] = ec._Asset_createdAt(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "teamId": + out.Values[i] = ec._Asset_teamId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + out.Values[i] = ec._Asset_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "size": + out.Values[i] = ec._Asset_size(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "url": + out.Values[i] = ec._Asset_url(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "contentType": + out.Values[i] = ec._Asset_contentType(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "team": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Asset_team(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var assetConnectionImplementors = []string{"AssetConnection"} + +func (ec *executionContext) _AssetConnection(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AssetConnection) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, assetConnectionImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AssetConnection") + case "edges": + out.Values[i] = ec._AssetConnection_edges(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "nodes": + out.Values[i] = ec._AssetConnection_nodes(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "pageInfo": + out.Values[i] = ec._AssetConnection_pageInfo(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "totalCount": + out.Values[i] = ec._AssetConnection_totalCount(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var assetEdgeImplementors = []string{"AssetEdge"} + +func (ec *executionContext) _AssetEdge(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AssetEdge) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, assetEdgeImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AssetEdge") + case "cursor": + out.Values[i] = ec._AssetEdge_cursor(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "node": + out.Values[i] = ec._AssetEdge_node(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var cameraImplementors = []string{"Camera"} + +func (ec *executionContext) _Camera(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Camera) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, cameraImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Camera") + case "lat": + out.Values[i] = ec._Camera_lat(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "lng": + out.Values[i] = ec._Camera_lng(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "altitude": + out.Values[i] = ec._Camera_altitude(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "heading": + out.Values[i] = ec._Camera_heading(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "pitch": + out.Values[i] = ec._Camera_pitch(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "roll": + out.Values[i] = ec._Camera_roll(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "fov": + out.Values[i] = ec._Camera_fov(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var checkProjectAliasPayloadImplementors = []string{"CheckProjectAliasPayload"} + +func (ec *executionContext) _CheckProjectAliasPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.CheckProjectAliasPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, checkProjectAliasPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CheckProjectAliasPayload") + case "alias": + out.Values[i] = ec._CheckProjectAliasPayload_alias(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "available": + out.Values[i] = ec._CheckProjectAliasPayload_available(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var createAssetPayloadImplementors = []string{"CreateAssetPayload"} + +func (ec *executionContext) _CreateAssetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.CreateAssetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, createAssetPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CreateAssetPayload") + case "asset": + out.Values[i] = ec._CreateAssetPayload_asset(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var createInfoboxPayloadImplementors = []string{"CreateInfoboxPayload"} + +func (ec *executionContext) _CreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.CreateInfoboxPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, createInfoboxPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CreateInfoboxPayload") + case "layer": + out.Values[i] = ec._CreateInfoboxPayload_layer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var createScenePayloadImplementors = []string{"CreateScenePayload"} + +func (ec *executionContext) _CreateScenePayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.CreateScenePayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, createScenePayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CreateScenePayload") + case "scene": + out.Values[i] = ec._CreateScenePayload_scene(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var createTeamPayloadImplementors = []string{"CreateTeamPayload"} + +func (ec *executionContext) _CreateTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.CreateTeamPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, createTeamPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CreateTeamPayload") + case "team": + out.Values[i] = ec._CreateTeamPayload_team(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetImplementors = []string{"Dataset", "Node"} + +func (ec *executionContext) _Dataset(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Dataset) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Dataset") + case "id": + out.Values[i] = ec._Dataset_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "source": + out.Values[i] = ec._Dataset_source(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + out.Values[i] = ec._Dataset_schemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fields": + out.Values[i] = ec._Dataset_fields(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Dataset_schema(ctx, field, obj) + return res + }) + case "name": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Dataset_name(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetConnectionImplementors = []string{"DatasetConnection"} + +func (ec *executionContext) _DatasetConnection(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetConnection) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetConnectionImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetConnection") + case "edges": + out.Values[i] = ec._DatasetConnection_edges(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "nodes": + out.Values[i] = ec._DatasetConnection_nodes(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "pageInfo": + out.Values[i] = ec._DatasetConnection_pageInfo(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "totalCount": + out.Values[i] = ec._DatasetConnection_totalCount(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetEdgeImplementors = []string{"DatasetEdge"} + +func (ec *executionContext) _DatasetEdge(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetEdge) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetEdgeImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetEdge") + case "cursor": + out.Values[i] = ec._DatasetEdge_cursor(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "node": + out.Values[i] = ec._DatasetEdge_node(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetFieldImplementors = []string{"DatasetField"} + +func (ec *executionContext) _DatasetField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetFieldImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetField") + case "fieldId": + out.Values[i] = ec._DatasetField_fieldId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + out.Values[i] = ec._DatasetField_schemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "source": + out.Values[i] = ec._DatasetField_source(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "type": + out.Values[i] = ec._DatasetField_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "value": + out.Values[i] = ec._DatasetField_value(ctx, field, obj) + case "schema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetField_schema(ctx, field, obj) + return res + }) + case "field": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetField_field(ctx, field, obj) + return res + }) + case "valueRef": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetField_valueRef(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetSchemaImplementors = []string{"DatasetSchema", "Node"} + +func (ec *executionContext) _DatasetSchema(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetSchema) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetSchema") + case "id": + out.Values[i] = ec._DatasetSchema_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "source": + out.Values[i] = ec._DatasetSchema_source(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + out.Values[i] = ec._DatasetSchema_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "sceneId": + out.Values[i] = ec._DatasetSchema_sceneId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fields": + out.Values[i] = ec._DatasetSchema_fields(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "representativeFieldId": + out.Values[i] = ec._DatasetSchema_representativeFieldId(ctx, field, obj) + case "dynamic": + out.Values[i] = ec._DatasetSchema_dynamic(ctx, field, obj) + case "datasets": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetSchema_datasets(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "scene": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetSchema_scene(ctx, field, obj) + return res + }) + case "representativeField": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetSchema_representativeField(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetSchemaConnectionImplementors = []string{"DatasetSchemaConnection"} + +func (ec *executionContext) _DatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetSchemaConnection) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaConnectionImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetSchemaConnection") + case "edges": + out.Values[i] = ec._DatasetSchemaConnection_edges(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "nodes": + out.Values[i] = ec._DatasetSchemaConnection_nodes(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "pageInfo": + out.Values[i] = ec._DatasetSchemaConnection_pageInfo(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "totalCount": + out.Values[i] = ec._DatasetSchemaConnection_totalCount(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetSchemaEdgeImplementors = []string{"DatasetSchemaEdge"} + +func (ec *executionContext) _DatasetSchemaEdge(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetSchemaEdge) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaEdgeImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetSchemaEdge") + case "cursor": + out.Values[i] = ec._DatasetSchemaEdge_cursor(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "node": + out.Values[i] = ec._DatasetSchemaEdge_node(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var datasetSchemaFieldImplementors = []string{"DatasetSchemaField", "Node"} + +func (ec *executionContext) _DatasetSchemaField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetSchemaField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaFieldImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DatasetSchemaField") + case "id": + out.Values[i] = ec._DatasetSchemaField_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "source": + out.Values[i] = ec._DatasetSchemaField_source(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + out.Values[i] = ec._DatasetSchemaField_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "type": + out.Values[i] = ec._DatasetSchemaField_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + out.Values[i] = ec._DatasetSchemaField_schemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "refId": + out.Values[i] = ec._DatasetSchemaField_refId(ctx, field, obj) + case "schema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetSchemaField_schema(ctx, field, obj) + return res + }) + case "ref": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetSchemaField_ref(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var deleteMePayloadImplementors = []string{"DeleteMePayload"} + +func (ec *executionContext) _DeleteMePayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DeleteMePayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, deleteMePayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DeleteMePayload") + case "userId": + out.Values[i] = ec._DeleteMePayload_userId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var deleteProjectPayloadImplementors = []string{"DeleteProjectPayload"} + +func (ec *executionContext) _DeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DeleteProjectPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, deleteProjectPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DeleteProjectPayload") + case "projectId": + out.Values[i] = ec._DeleteProjectPayload_projectId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var deleteTeamPayloadImplementors = []string{"DeleteTeamPayload"} + +func (ec *executionContext) _DeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DeleteTeamPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, deleteTeamPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DeleteTeamPayload") + case "teamId": + out.Values[i] = ec._DeleteTeamPayload_teamId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var importDatasetPayloadImplementors = []string{"ImportDatasetPayload"} + +func (ec *executionContext) _ImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.ImportDatasetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, importDatasetPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ImportDatasetPayload") + case "datasetSchema": + out.Values[i] = ec._ImportDatasetPayload_datasetSchema(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var importLayerPayloadImplementors = []string{"ImportLayerPayload"} + +func (ec *executionContext) _ImportLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.ImportLayerPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, importLayerPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ImportLayerPayload") + case "layers": + out.Values[i] = ec._ImportLayerPayload_layers(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "parentLayer": + out.Values[i] = ec._ImportLayerPayload_parentLayer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var infoboxImplementors = []string{"Infobox"} + +func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Infobox) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, infoboxImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Infobox") + case "layerId": + out.Values[i] = ec._Infobox_layerId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + out.Values[i] = ec._Infobox_propertyId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fields": + out.Values[i] = ec._Infobox_fields(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "linkedDatasetId": + out.Values[i] = ec._Infobox_linkedDatasetId(ctx, field, obj) + case "layer": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Infobox_layer(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "property": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Infobox_property(ctx, field, obj) + return res + }) + case "linkedDataset": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Infobox_linkedDataset(ctx, field, obj) + return res + }) + case "merged": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Infobox_merged(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var infoboxFieldImplementors = []string{"InfoboxField"} + +func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.InfoboxField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, infoboxFieldImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("InfoboxField") + case "id": + out.Values[i] = ec._InfoboxField_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "layerId": + out.Values[i] = ec._InfoboxField_layerId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + out.Values[i] = ec._InfoboxField_propertyId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "pluginId": + out.Values[i] = ec._InfoboxField_pluginId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "extensionId": + out.Values[i] = ec._InfoboxField_extensionId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "linkedDatasetId": + out.Values[i] = ec._InfoboxField_linkedDatasetId(ctx, field, obj) + case "layer": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_layer(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "infobox": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_infobox(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "property": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_property(ctx, field, obj) + return res + }) + case "plugin": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_plugin(ctx, field, obj) + return res + }) + case "extension": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_extension(ctx, field, obj) + return res + }) + case "linkedDataset": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_linkedDataset(ctx, field, obj) + return res + }) + case "merged": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_merged(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var installPluginPayloadImplementors = []string{"InstallPluginPayload"} + +func (ec *executionContext) _InstallPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.InstallPluginPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, installPluginPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("InstallPluginPayload") + case "scene": + out.Values[i] = ec._InstallPluginPayload_scene(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "scenePlugin": + out.Values[i] = ec._InstallPluginPayload_scenePlugin(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var latLngImplementors = []string{"LatLng"} + +func (ec *executionContext) _LatLng(ctx context.Context, sel ast.SelectionSet, obj *graphql1.LatLng) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, latLngImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("LatLng") + case "lat": + out.Values[i] = ec._LatLng_lat(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "lng": + out.Values[i] = ec._LatLng_lng(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var latLngHeightImplementors = []string{"LatLngHeight"} + +func (ec *executionContext) _LatLngHeight(ctx context.Context, sel ast.SelectionSet, obj *graphql1.LatLngHeight) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, latLngHeightImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("LatLngHeight") + case "lat": + out.Values[i] = ec._LatLngHeight_lat(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "lng": + out.Values[i] = ec._LatLngHeight_lng(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "height": + out.Values[i] = ec._LatLngHeight_height(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var layerGroupImplementors = []string{"LayerGroup", "Layers", "Layer"} + +func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSet, obj *graphql1.LayerGroup) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, layerGroupImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("LayerGroup") + case "id": + out.Values[i] = ec._LayerGroup_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + out.Values[i] = ec._LayerGroup_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "isVisible": + out.Values[i] = ec._LayerGroup_isVisible(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + out.Values[i] = ec._LayerGroup_propertyId(ctx, field, obj) + case "pluginId": + out.Values[i] = ec._LayerGroup_pluginId(ctx, field, obj) + case "extensionId": + out.Values[i] = ec._LayerGroup_extensionId(ctx, field, obj) + case "infobox": + out.Values[i] = ec._LayerGroup_infobox(ctx, field, obj) + case "parentId": + out.Values[i] = ec._LayerGroup_parentId(ctx, field, obj) + case "linkedDatasetSchemaId": + out.Values[i] = ec._LayerGroup_linkedDatasetSchemaId(ctx, field, obj) + case "root": + out.Values[i] = ec._LayerGroup_root(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "layerIds": + out.Values[i] = ec._LayerGroup_layerIds(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "parent": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_parent(ctx, field, obj) + return res + }) + case "property": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_property(ctx, field, obj) + return res + }) + case "plugin": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_plugin(ctx, field, obj) + return res + }) + case "extension": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_extension(ctx, field, obj) + return res + }) + case "linkedDatasetSchema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_linkedDatasetSchema(ctx, field, obj) + return res + }) + case "layers": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_layers(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var layerItemImplementors = []string{"LayerItem", "Layers", "Layer"} + +func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet, obj *graphql1.LayerItem) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, layerItemImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("LayerItem") + case "id": + out.Values[i] = ec._LayerItem_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + out.Values[i] = ec._LayerItem_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "isVisible": + out.Values[i] = ec._LayerItem_isVisible(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + out.Values[i] = ec._LayerItem_propertyId(ctx, field, obj) + case "pluginId": + out.Values[i] = ec._LayerItem_pluginId(ctx, field, obj) + case "extensionId": + out.Values[i] = ec._LayerItem_extensionId(ctx, field, obj) + case "infobox": + out.Values[i] = ec._LayerItem_infobox(ctx, field, obj) + case "parentId": + out.Values[i] = ec._LayerItem_parentId(ctx, field, obj) + case "linkedDatasetId": + out.Values[i] = ec._LayerItem_linkedDatasetId(ctx, field, obj) + case "parent": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_parent(ctx, field, obj) + return res + }) + case "property": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_property(ctx, field, obj) + return res + }) + case "plugin": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_plugin(ctx, field, obj) + return res + }) + case "extension": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_extension(ctx, field, obj) + return res + }) + case "linkedDataset": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_linkedDataset(ctx, field, obj) + return res + }) + case "merged": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_merged(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mergedInfoboxImplementors = []string{"MergedInfobox"} + +func (ec *executionContext) _MergedInfobox(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MergedInfobox) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mergedInfoboxImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MergedInfobox") + case "property": + out.Values[i] = ec._MergedInfobox_property(ctx, field, obj) + case "fields": + out.Values[i] = ec._MergedInfobox_fields(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mergedInfoboxFieldImplementors = []string{"MergedInfoboxField"} + +func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MergedInfoboxField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mergedInfoboxFieldImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MergedInfoboxField") + case "originalId": + out.Values[i] = ec._MergedInfoboxField_originalId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "pluginId": + out.Values[i] = ec._MergedInfoboxField_pluginId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "extensionId": + out.Values[i] = ec._MergedInfoboxField_extensionId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "property": + out.Values[i] = ec._MergedInfoboxField_property(ctx, field, obj) + case "plugin": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedInfoboxField_plugin(ctx, field, obj) + return res + }) + case "extension": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedInfoboxField_extension(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mergedLayerImplementors = []string{"MergedLayer"} + +func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MergedLayer) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mergedLayerImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MergedLayer") + case "originalId": + out.Values[i] = ec._MergedLayer_originalId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "parentId": + out.Values[i] = ec._MergedLayer_parentId(ctx, field, obj) + case "property": + out.Values[i] = ec._MergedLayer_property(ctx, field, obj) + case "infobox": + out.Values[i] = ec._MergedLayer_infobox(ctx, field, obj) + case "original": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedLayer_original(ctx, field, obj) + return res + }) + case "parent": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedLayer_parent(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mergedPropertyImplementors = []string{"MergedProperty"} + +func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MergedProperty) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mergedPropertyImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MergedProperty") + case "originalId": + out.Values[i] = ec._MergedProperty_originalId(ctx, field, obj) + case "parentId": + out.Values[i] = ec._MergedProperty_parentId(ctx, field, obj) + case "schemaId": + out.Values[i] = ec._MergedProperty_schemaId(ctx, field, obj) + case "linkedDatasetId": + out.Values[i] = ec._MergedProperty_linkedDatasetId(ctx, field, obj) + case "original": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedProperty_original(ctx, field, obj) + return res + }) + case "parent": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedProperty_parent(ctx, field, obj) + return res + }) + case "schema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedProperty_schema(ctx, field, obj) + return res + }) + case "linkedDataset": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedProperty_linkedDataset(ctx, field, obj) + return res + }) + case "groups": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedProperty_groups(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mergedPropertyFieldImplementors = []string{"MergedPropertyField"} + +func (ec *executionContext) _MergedPropertyField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MergedPropertyField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mergedPropertyFieldImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MergedPropertyField") + case "schemaId": + out.Values[i] = ec._MergedPropertyField_schemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fieldId": + out.Values[i] = ec._MergedPropertyField_fieldId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "value": + out.Values[i] = ec._MergedPropertyField_value(ctx, field, obj) + case "type": + out.Values[i] = ec._MergedPropertyField_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "links": + out.Values[i] = ec._MergedPropertyField_links(ctx, field, obj) + case "overridden": + out.Values[i] = ec._MergedPropertyField_overridden(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyField_schema(ctx, field, obj) + return res + }) + case "field": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyField_field(ctx, field, obj) + return res + }) + case "actualValue": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyField_actualValue(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mergedPropertyGroupImplementors = []string{"MergedPropertyGroup"} + +func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MergedPropertyGroup) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mergedPropertyGroupImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MergedPropertyGroup") + case "originalPropertyId": + out.Values[i] = ec._MergedPropertyGroup_originalPropertyId(ctx, field, obj) + case "parentPropertyId": + out.Values[i] = ec._MergedPropertyGroup_parentPropertyId(ctx, field, obj) + case "originalId": + out.Values[i] = ec._MergedPropertyGroup_originalId(ctx, field, obj) + case "parentId": + out.Values[i] = ec._MergedPropertyGroup_parentId(ctx, field, obj) + case "schemaGroupId": + out.Values[i] = ec._MergedPropertyGroup_schemaGroupId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + out.Values[i] = ec._MergedPropertyGroup_schemaId(ctx, field, obj) + case "linkedDatasetId": + out.Values[i] = ec._MergedPropertyGroup_linkedDatasetId(ctx, field, obj) + case "fields": + out.Values[i] = ec._MergedPropertyGroup_fields(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "groups": + out.Values[i] = ec._MergedPropertyGroup_groups(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "originalProperty": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyGroup_originalProperty(ctx, field, obj) + return res + }) + case "parentProperty": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyGroup_parentProperty(ctx, field, obj) + return res + }) + case "original": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyGroup_original(ctx, field, obj) + return res + }) + case "parent": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyGroup_parent(ctx, field, obj) + return res + }) + case "schema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyGroup_schema(ctx, field, obj) + return res + }) + case "linkedDataset": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedPropertyGroup_linkedDataset(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var moveInfoboxFieldPayloadImplementors = []string{"MoveInfoboxFieldPayload"} + +func (ec *executionContext) _MoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MoveInfoboxFieldPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, moveInfoboxFieldPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MoveInfoboxFieldPayload") + case "infoboxFieldId": + out.Values[i] = ec._MoveInfoboxFieldPayload_infoboxFieldId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "layer": + out.Values[i] = ec._MoveInfoboxFieldPayload_layer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "index": + out.Values[i] = ec._MoveInfoboxFieldPayload_index(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var moveLayerPayloadImplementors = []string{"MoveLayerPayload"} + +func (ec *executionContext) _MoveLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MoveLayerPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, moveLayerPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("MoveLayerPayload") + case "layerId": + out.Values[i] = ec._MoveLayerPayload_layerId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "fromParentLayer": + out.Values[i] = ec._MoveLayerPayload_fromParentLayer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "toParentLayer": + out.Values[i] = ec._MoveLayerPayload_toParentLayer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "index": + out.Values[i] = ec._MoveLayerPayload_index(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var mutationImplementors = []string{"Mutation"} + +func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, mutationImplementors) + + ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{ + Object: "Mutation", + }) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Mutation") + case "createAsset": + out.Values[i] = ec._Mutation_createAsset(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "removeAsset": + out.Values[i] = ec._Mutation_removeAsset(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "signup": + out.Values[i] = ec._Mutation_signup(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updateMe": + out.Values[i] = ec._Mutation_updateMe(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "removeMyAuth": + out.Values[i] = ec._Mutation_removeMyAuth(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "deleteMe": + out.Values[i] = ec._Mutation_deleteMe(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "createTeam": + out.Values[i] = ec._Mutation_createTeam(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "deleteTeam": + out.Values[i] = ec._Mutation_deleteTeam(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updateTeam": + out.Values[i] = ec._Mutation_updateTeam(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "addMemberToTeam": + out.Values[i] = ec._Mutation_addMemberToTeam(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "removeMemberFromTeam": + out.Values[i] = ec._Mutation_removeMemberFromTeam(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updateMemberOfTeam": + out.Values[i] = ec._Mutation_updateMemberOfTeam(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "createProject": + out.Values[i] = ec._Mutation_createProject(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updateProject": + out.Values[i] = ec._Mutation_updateProject(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "publishProject": + out.Values[i] = ec._Mutation_publishProject(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "deleteProject": + out.Values[i] = ec._Mutation_deleteProject(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "uploadPlugin": + out.Values[i] = ec._Mutation_uploadPlugin(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "createScene": + out.Values[i] = ec._Mutation_createScene(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "addWidget": + out.Values[i] = ec._Mutation_addWidget(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updateWidget": + out.Values[i] = ec._Mutation_updateWidget(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "removeWidget": + out.Values[i] = ec._Mutation_removeWidget(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "installPlugin": + out.Values[i] = ec._Mutation_installPlugin(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "uninstallPlugin": + out.Values[i] = ec._Mutation_uninstallPlugin(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "upgradePlugin": + out.Values[i] = ec._Mutation_upgradePlugin(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updateDatasetSchema": + out.Values[i] = ec._Mutation_updateDatasetSchema(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "syncDataset": + out.Values[i] = ec._Mutation_syncDataset(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "addDynamicDatasetSchema": + out.Values[i] = ec._Mutation_addDynamicDatasetSchema(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "addDynamicDataset": + out.Values[i] = ec._Mutation_addDynamicDataset(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "removeDatasetSchema": + out.Values[i] = ec._Mutation_removeDatasetSchema(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "importDataset": + out.Values[i] = ec._Mutation_importDataset(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "addDatasetSchema": + out.Values[i] = ec._Mutation_addDatasetSchema(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updatePropertyValue": + out.Values[i] = ec._Mutation_updatePropertyValue(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updatePropertyValueLatLng": + out.Values[i] = ec._Mutation_updatePropertyValueLatLng(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updatePropertyValueLatLngHeight": + out.Values[i] = ec._Mutation_updatePropertyValueLatLngHeight(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updatePropertyValueCamera": + out.Values[i] = ec._Mutation_updatePropertyValueCamera(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updatePropertyValueTypography": + out.Values[i] = ec._Mutation_updatePropertyValueTypography(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "removePropertyField": + out.Values[i] = ec._Mutation_removePropertyField(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "uploadFileToProperty": + out.Values[i] = ec._Mutation_uploadFileToProperty(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "linkDatasetToPropertyValue": + out.Values[i] = ec._Mutation_linkDatasetToPropertyValue(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "unlinkPropertyValue": + out.Values[i] = ec._Mutation_unlinkPropertyValue(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "addPropertyItem": + out.Values[i] = ec._Mutation_addPropertyItem(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "movePropertyItem": + out.Values[i] = ec._Mutation_movePropertyItem(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "removePropertyItem": + out.Values[i] = ec._Mutation_removePropertyItem(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updatePropertyItems": + out.Values[i] = ec._Mutation_updatePropertyItems(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "addLayerItem": + out.Values[i] = ec._Mutation_addLayerItem(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "addLayerGroup": + out.Values[i] = ec._Mutation_addLayerGroup(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "removeLayer": + out.Values[i] = ec._Mutation_removeLayer(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "updateLayer": + out.Values[i] = ec._Mutation_updateLayer(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "moveLayer": + out.Values[i] = ec._Mutation_moveLayer(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "createInfobox": + out.Values[i] = ec._Mutation_createInfobox(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "removeInfobox": + out.Values[i] = ec._Mutation_removeInfobox(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "addInfoboxField": + out.Values[i] = ec._Mutation_addInfoboxField(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "moveInfoboxField": + out.Values[i] = ec._Mutation_moveInfoboxField(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "removeInfoboxField": + out.Values[i] = ec._Mutation_removeInfoboxField(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + case "importLayer": + out.Values[i] = ec._Mutation_importLayer(ctx, field) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var pageInfoImplementors = []string{"PageInfo"} + +func (ec *executionContext) _PageInfo(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PageInfo) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, pageInfoImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PageInfo") + case "startCursor": + out.Values[i] = ec._PageInfo_startCursor(ctx, field, obj) + case "endCursor": + out.Values[i] = ec._PageInfo_endCursor(ctx, field, obj) + case "hasNextPage": + out.Values[i] = ec._PageInfo_hasNextPage(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "hasPreviousPage": + out.Values[i] = ec._PageInfo_hasPreviousPage(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var pluginImplementors = []string{"Plugin"} + +func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Plugin) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, pluginImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Plugin") + case "id": + out.Values[i] = ec._Plugin_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + out.Values[i] = ec._Plugin_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "version": + out.Values[i] = ec._Plugin_version(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "description": + out.Values[i] = ec._Plugin_description(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "author": + out.Values[i] = ec._Plugin_author(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "repositoryUrl": + out.Values[i] = ec._Plugin_repositoryUrl(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertySchemaId": + out.Values[i] = ec._Plugin_propertySchemaId(ctx, field, obj) + case "extensions": + out.Values[i] = ec._Plugin_extensions(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "scenePlugin": + out.Values[i] = ec._Plugin_scenePlugin(ctx, field, obj) + case "allTranslatedDescription": + out.Values[i] = ec._Plugin_allTranslatedDescription(ctx, field, obj) + case "allTranslatedName": + out.Values[i] = ec._Plugin_allTranslatedName(ctx, field, obj) + case "translatedName": + out.Values[i] = ec._Plugin_translatedName(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "translatedDescription": + out.Values[i] = ec._Plugin_translatedDescription(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertySchema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Plugin_propertySchema(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var pluginExtensionImplementors = []string{"PluginExtension"} + +func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PluginExtension) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, pluginExtensionImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PluginExtension") + case "extensionId": + out.Values[i] = ec._PluginExtension_extensionId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "pluginId": + out.Values[i] = ec._PluginExtension_pluginId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "type": + out.Values[i] = ec._PluginExtension_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + out.Values[i] = ec._PluginExtension_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "description": + out.Values[i] = ec._PluginExtension_description(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "icon": + out.Values[i] = ec._PluginExtension_icon(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "visualizer": + out.Values[i] = ec._PluginExtension_visualizer(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertySchemaId": + out.Values[i] = ec._PluginExtension_propertySchemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "allTranslatedName": + out.Values[i] = ec._PluginExtension_allTranslatedName(ctx, field, obj) + case "allTranslatedDescription": + out.Values[i] = ec._PluginExtension_allTranslatedDescription(ctx, field, obj) + case "plugin": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PluginExtension_plugin(ctx, field, obj) + return res + }) + case "sceneWidget": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PluginExtension_sceneWidget(ctx, field, obj) + return res + }) + case "propertySchema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PluginExtension_propertySchema(ctx, field, obj) + return res + }) + case "translatedName": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PluginExtension_translatedName(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "translatedDescription": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PluginExtension_translatedDescription(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var projectImplementors = []string{"Project", "Node"} + +func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Project) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, projectImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Project") + case "id": + out.Values[i] = ec._Project_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "isArchived": + out.Values[i] = ec._Project_isArchived(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "createdAt": + out.Values[i] = ec._Project_createdAt(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "updatedAt": + out.Values[i] = ec._Project_updatedAt(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "publishedAt": + out.Values[i] = ec._Project_publishedAt(ctx, field, obj) + case "name": + out.Values[i] = ec._Project_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "description": + out.Values[i] = ec._Project_description(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "alias": + out.Values[i] = ec._Project_alias(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "publicTitle": + out.Values[i] = ec._Project_publicTitle(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "publicDescription": + out.Values[i] = ec._Project_publicDescription(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "publicImage": + out.Values[i] = ec._Project_publicImage(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "publicNoIndex": + out.Values[i] = ec._Project_publicNoIndex(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "imageUrl": + out.Values[i] = ec._Project_imageUrl(ctx, field, obj) + case "teamId": + out.Values[i] = ec._Project_teamId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "visualizer": + out.Values[i] = ec._Project_visualizer(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "publishmentStatus": + out.Values[i] = ec._Project_publishmentStatus(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "team": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Project_team(ctx, field, obj) + return res + }) + case "scene": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Project_scene(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var projectConnectionImplementors = []string{"ProjectConnection"} + +func (ec *executionContext) _ProjectConnection(ctx context.Context, sel ast.SelectionSet, obj *graphql1.ProjectConnection) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, projectConnectionImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ProjectConnection") + case "edges": + out.Values[i] = ec._ProjectConnection_edges(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "nodes": + out.Values[i] = ec._ProjectConnection_nodes(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "pageInfo": + out.Values[i] = ec._ProjectConnection_pageInfo(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "totalCount": + out.Values[i] = ec._ProjectConnection_totalCount(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var projectEdgeImplementors = []string{"ProjectEdge"} + +func (ec *executionContext) _ProjectEdge(ctx context.Context, sel ast.SelectionSet, obj *graphql1.ProjectEdge) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, projectEdgeImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ProjectEdge") + case "cursor": + out.Values[i] = ec._ProjectEdge_cursor(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "node": + out.Values[i] = ec._ProjectEdge_node(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var projectPayloadImplementors = []string{"ProjectPayload"} + +func (ec *executionContext) _ProjectPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.ProjectPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, projectPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ProjectPayload") + case "project": + out.Values[i] = ec._ProjectPayload_project(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyImplementors = []string{"Property", "Node"} + +func (ec *executionContext) _Property(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Property) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Property") + case "id": + out.Values[i] = ec._Property_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + out.Values[i] = ec._Property_schemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "items": + out.Values[i] = ec._Property_items(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Property_schema(ctx, field, obj) + return res + }) + case "layer": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Property_layer(ctx, field, obj) + return res + }) + case "merged": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Property_merged(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyConditionImplementors = []string{"PropertyCondition"} + +func (ec *executionContext) _PropertyCondition(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyCondition) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyConditionImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyCondition") + case "fieldId": + out.Values[i] = ec._PropertyCondition_fieldId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "type": + out.Values[i] = ec._PropertyCondition_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "value": + out.Values[i] = ec._PropertyCondition_value(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyFieldImplementors = []string{"PropertyField"} + +func (ec *executionContext) _PropertyField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyFieldImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyField") + case "id": + out.Values[i] = ec._PropertyField_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "parentId": + out.Values[i] = ec._PropertyField_parentId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + out.Values[i] = ec._PropertyField_schemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fieldId": + out.Values[i] = ec._PropertyField_fieldId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "links": + out.Values[i] = ec._PropertyField_links(ctx, field, obj) + case "type": + out.Values[i] = ec._PropertyField_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "value": + out.Values[i] = ec._PropertyField_value(ctx, field, obj) + case "parent": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyField_parent(ctx, field, obj) + return res + }) + case "schema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyField_schema(ctx, field, obj) + return res + }) + case "field": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyField_field(ctx, field, obj) + return res + }) + case "actualValue": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyField_actualValue(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyFieldLinkImplementors = []string{"PropertyFieldLink"} + +func (ec *executionContext) _PropertyFieldLink(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyFieldLink) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyFieldLinkImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyFieldLink") + case "datasetId": + out.Values[i] = ec._PropertyFieldLink_datasetId(ctx, field, obj) + case "datasetSchemaId": + out.Values[i] = ec._PropertyFieldLink_datasetSchemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "datasetSchemaFieldId": + out.Values[i] = ec._PropertyFieldLink_datasetSchemaFieldId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "dataset": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyFieldLink_dataset(ctx, field, obj) + return res + }) + case "datasetField": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyFieldLink_datasetField(ctx, field, obj) + return res + }) + case "datasetSchema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyFieldLink_datasetSchema(ctx, field, obj) + return res + }) + case "datasetSchemaField": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyFieldLink_datasetSchemaField(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyFieldPayloadImplementors = []string{"PropertyFieldPayload"} + +func (ec *executionContext) _PropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyFieldPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyFieldPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyFieldPayload") + case "property": + out.Values[i] = ec._PropertyFieldPayload_property(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "propertyField": + out.Values[i] = ec._PropertyFieldPayload_propertyField(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyGroupImplementors = []string{"PropertyGroup", "PropertyItem"} + +func (ec *executionContext) _PropertyGroup(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyGroup) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyGroupImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyGroup") + case "id": + out.Values[i] = ec._PropertyGroup_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + out.Values[i] = ec._PropertyGroup_schemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaGroupId": + out.Values[i] = ec._PropertyGroup_schemaGroupId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fields": + out.Values[i] = ec._PropertyGroup_fields(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyGroup_schema(ctx, field, obj) + return res + }) + case "schemaGroup": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyGroup_schemaGroup(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyGroupListImplementors = []string{"PropertyGroupList", "PropertyItem"} + +func (ec *executionContext) _PropertyGroupList(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyGroupList) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyGroupListImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyGroupList") + case "id": + out.Values[i] = ec._PropertyGroupList_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + out.Values[i] = ec._PropertyGroupList_schemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaGroupId": + out.Values[i] = ec._PropertyGroupList_schemaGroupId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "groups": + out.Values[i] = ec._PropertyGroupList_groups(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyGroupList_schema(ctx, field, obj) + return res + }) + case "schemaGroup": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyGroupList_schemaGroup(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyItemPayloadImplementors = []string{"PropertyItemPayload"} + +func (ec *executionContext) _PropertyItemPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyItemPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyItemPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyItemPayload") + case "property": + out.Values[i] = ec._PropertyItemPayload_property(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "propertyItem": + out.Values[i] = ec._PropertyItemPayload_propertyItem(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertyLinkableFieldsImplementors = []string{"PropertyLinkableFields"} + +func (ec *executionContext) _PropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyLinkableFields) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertyLinkableFieldsImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertyLinkableFields") + case "schemaId": + out.Values[i] = ec._PropertyLinkableFields_schemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "latlng": + out.Values[i] = ec._PropertyLinkableFields_latlng(ctx, field, obj) + case "url": + out.Values[i] = ec._PropertyLinkableFields_url(ctx, field, obj) + case "latlngField": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyLinkableFields_latlngField(ctx, field, obj) + return res + }) + case "urlField": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyLinkableFields_urlField(ctx, field, obj) + return res + }) + case "schema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertyLinkableFields_schema(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertySchemaImplementors = []string{"PropertySchema"} + +func (ec *executionContext) _PropertySchema(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertySchema) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertySchema") + case "id": + out.Values[i] = ec._PropertySchema_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "groups": + out.Values[i] = ec._PropertySchema_groups(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "linkableFields": + out.Values[i] = ec._PropertySchema_linkableFields(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertySchemaFieldImplementors = []string{"PropertySchemaField"} + +func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertySchemaField) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaFieldImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertySchemaField") + case "fieldId": + out.Values[i] = ec._PropertySchemaField_fieldId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "type": + out.Values[i] = ec._PropertySchemaField_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "title": + out.Values[i] = ec._PropertySchemaField_title(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + out.Values[i] = ec._PropertySchemaField_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "description": + out.Values[i] = ec._PropertySchemaField_description(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "prefix": + out.Values[i] = ec._PropertySchemaField_prefix(ctx, field, obj) + case "suffix": + out.Values[i] = ec._PropertySchemaField_suffix(ctx, field, obj) + case "defaultValue": + out.Values[i] = ec._PropertySchemaField_defaultValue(ctx, field, obj) + case "ui": + out.Values[i] = ec._PropertySchemaField_ui(ctx, field, obj) + case "min": + out.Values[i] = ec._PropertySchemaField_min(ctx, field, obj) + case "max": + out.Values[i] = ec._PropertySchemaField_max(ctx, field, obj) + case "choices": + out.Values[i] = ec._PropertySchemaField_choices(ctx, field, obj) + case "isAvailableIf": + out.Values[i] = ec._PropertySchemaField_isAvailableIf(ctx, field, obj) + case "allTranslatedTitle": + out.Values[i] = ec._PropertySchemaField_allTranslatedTitle(ctx, field, obj) + case "allTranslatedName": + out.Values[i] = ec._PropertySchemaField_allTranslatedName(ctx, field, obj) + case "allTranslatedDescription": + out.Values[i] = ec._PropertySchemaField_allTranslatedDescription(ctx, field, obj) + case "translatedTitle": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertySchemaField_translatedTitle(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "translatedName": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertySchemaField_translatedName(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "translatedDescription": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertySchemaField_translatedDescription(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertySchemaFieldChoiceImplementors = []string{"PropertySchemaFieldChoice"} + +func (ec *executionContext) _PropertySchemaFieldChoice(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertySchemaFieldChoice) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaFieldChoiceImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertySchemaFieldChoice") + case "key": + out.Values[i] = ec._PropertySchemaFieldChoice_key(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "title": + out.Values[i] = ec._PropertySchemaFieldChoice_title(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "label": + out.Values[i] = ec._PropertySchemaFieldChoice_label(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "icon": + out.Values[i] = ec._PropertySchemaFieldChoice_icon(ctx, field, obj) + case "allTranslatedTitle": + out.Values[i] = ec._PropertySchemaFieldChoice_allTranslatedTitle(ctx, field, obj) + case "allTranslatedLabel": + out.Values[i] = ec._PropertySchemaFieldChoice_allTranslatedLabel(ctx, field, obj) + case "translatedTitle": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertySchemaFieldChoice_translatedTitle(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "translatedLabel": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertySchemaFieldChoice_translatedLabel(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var propertySchemaGroupImplementors = []string{"PropertySchemaGroup"} + +func (ec *executionContext) _PropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertySchemaGroup) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaGroupImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PropertySchemaGroup") + case "schemaGroupId": + out.Values[i] = ec._PropertySchemaGroup_schemaGroupId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "schemaId": + out.Values[i] = ec._PropertySchemaGroup_schemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "fields": + out.Values[i] = ec._PropertySchemaGroup_fields(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "isList": + out.Values[i] = ec._PropertySchemaGroup_isList(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "isAvailableIf": + out.Values[i] = ec._PropertySchemaGroup_isAvailableIf(ctx, field, obj) + case "title": + out.Values[i] = ec._PropertySchemaGroup_title(ctx, field, obj) + case "allTranslatedTitle": + out.Values[i] = ec._PropertySchemaGroup_allTranslatedTitle(ctx, field, obj) + case "name": + out.Values[i] = ec._PropertySchemaGroup_name(ctx, field, obj) + case "representativeFieldId": + out.Values[i] = ec._PropertySchemaGroup_representativeFieldId(ctx, field, obj) + case "representativeField": + out.Values[i] = ec._PropertySchemaGroup_representativeField(ctx, field, obj) + case "schema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertySchemaGroup_schema(ctx, field, obj) + return res + }) + case "translatedTitle": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._PropertySchemaGroup_translatedTitle(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var queryImplementors = []string{"Query"} + +func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, queryImplementors) + + ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{ + Object: "Query", + }) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Query") + case "me": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_me(ctx, field) + return res + }) + case "node": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_node(ctx, field) + return res + }) + case "nodes": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_nodes(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "propertySchema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_propertySchema(ctx, field) + return res + }) + case "propertySchemas": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_propertySchemas(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "plugin": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_plugin(ctx, field) + return res + }) + case "plugins": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_plugins(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "layer": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_layer(ctx, field) + return res + }) + case "scene": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_scene(ctx, field) + return res + }) + case "assets": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_assets(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "projects": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_projects(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "datasetSchemas": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_datasetSchemas(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "datasets": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_datasets(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "sceneLock": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_sceneLock(ctx, field) + return res + }) + case "dynamicDatasetSchemas": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_dynamicDatasetSchemas(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "searchUser": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_searchUser(ctx, field) + return res + }) + case "checkProjectAlias": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_checkProjectAlias(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "__type": + out.Values[i] = ec._Query___type(ctx, field) + case "__schema": + out.Values[i] = ec._Query___schema(ctx, field) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var rectImplementors = []string{"Rect"} + +func (ec *executionContext) _Rect(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Rect) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, rectImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Rect") + case "west": + out.Values[i] = ec._Rect_west(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "south": + out.Values[i] = ec._Rect_south(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "east": + out.Values[i] = ec._Rect_east(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "north": + out.Values[i] = ec._Rect_north(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeAssetPayloadImplementors = []string{"RemoveAssetPayload"} + +func (ec *executionContext) _RemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveAssetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeAssetPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveAssetPayload") + case "assetId": + out.Values[i] = ec._RemoveAssetPayload_assetId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeDatasetSchemaPayloadImplementors = []string{"RemoveDatasetSchemaPayload"} + +func (ec *executionContext) _RemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveDatasetSchemaPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeDatasetSchemaPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveDatasetSchemaPayload") + case "schemaId": + out.Values[i] = ec._RemoveDatasetSchemaPayload_schemaId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeInfoboxFieldPayloadImplementors = []string{"RemoveInfoboxFieldPayload"} + +func (ec *executionContext) _RemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveInfoboxFieldPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeInfoboxFieldPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveInfoboxFieldPayload") + case "infoboxFieldId": + out.Values[i] = ec._RemoveInfoboxFieldPayload_infoboxFieldId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "layer": + out.Values[i] = ec._RemoveInfoboxFieldPayload_layer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeInfoboxPayloadImplementors = []string{"RemoveInfoboxPayload"} + +func (ec *executionContext) _RemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveInfoboxPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeInfoboxPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveInfoboxPayload") + case "layer": + out.Values[i] = ec._RemoveInfoboxPayload_layer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeLayerPayloadImplementors = []string{"RemoveLayerPayload"} + +func (ec *executionContext) _RemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveLayerPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeLayerPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveLayerPayload") + case "layerId": + out.Values[i] = ec._RemoveLayerPayload_layerId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "parentLayer": + out.Values[i] = ec._RemoveLayerPayload_parentLayer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeMemberFromTeamPayloadImplementors = []string{"RemoveMemberFromTeamPayload"} + +func (ec *executionContext) _RemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveMemberFromTeamPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeMemberFromTeamPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveMemberFromTeamPayload") + case "team": + out.Values[i] = ec._RemoveMemberFromTeamPayload_team(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var removeWidgetPayloadImplementors = []string{"RemoveWidgetPayload"} + +func (ec *executionContext) _RemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveWidgetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeWidgetPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveWidgetPayload") + case "scene": + out.Values[i] = ec._RemoveWidgetPayload_scene(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "pluginId": + out.Values[i] = ec._RemoveWidgetPayload_pluginId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "extensionId": + out.Values[i] = ec._RemoveWidgetPayload_extensionId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var sceneImplementors = []string{"Scene", "Node"} + +func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Scene) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, sceneImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Scene") + case "id": + out.Values[i] = ec._Scene_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "projectId": + out.Values[i] = ec._Scene_projectId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "teamId": + out.Values[i] = ec._Scene_teamId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + out.Values[i] = ec._Scene_propertyId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "createdAt": + out.Values[i] = ec._Scene_createdAt(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "updatedAt": + out.Values[i] = ec._Scene_updatedAt(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "rootLayerId": + out.Values[i] = ec._Scene_rootLayerId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "widgets": + out.Values[i] = ec._Scene_widgets(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "plugins": + out.Values[i] = ec._Scene_plugins(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "dynamicDatasetSchemas": + out.Values[i] = ec._Scene_dynamicDatasetSchemas(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "project": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_project(ctx, field, obj) + return res + }) + case "team": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_team(ctx, field, obj) + return res + }) + case "property": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_property(ctx, field, obj) + return res + }) + case "rootLayer": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_rootLayer(ctx, field, obj) + return res + }) + case "lockMode": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_lockMode(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "datasetSchemas": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_datasetSchemas(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var scenePluginImplementors = []string{"ScenePlugin"} + +func (ec *executionContext) _ScenePlugin(ctx context.Context, sel ast.SelectionSet, obj *graphql1.ScenePlugin) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, scenePluginImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ScenePlugin") + case "pluginId": + out.Values[i] = ec._ScenePlugin_pluginId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + out.Values[i] = ec._ScenePlugin_propertyId(ctx, field, obj) + case "plugin": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._ScenePlugin_plugin(ctx, field, obj) + return res + }) + case "property": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._ScenePlugin_property(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var sceneWidgetImplementors = []string{"SceneWidget"} + +func (ec *executionContext) _SceneWidget(ctx context.Context, sel ast.SelectionSet, obj *graphql1.SceneWidget) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, sceneWidgetImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("SceneWidget") + case "id": + out.Values[i] = ec._SceneWidget_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "pluginId": + out.Values[i] = ec._SceneWidget_pluginId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "extensionId": + out.Values[i] = ec._SceneWidget_extensionId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "propertyId": + out.Values[i] = ec._SceneWidget_propertyId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "enabled": + out.Values[i] = ec._SceneWidget_enabled(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "plugin": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._SceneWidget_plugin(ctx, field, obj) + return res + }) + case "extension": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._SceneWidget_extension(ctx, field, obj) + return res + }) + case "property": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._SceneWidget_property(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var searchedUserImplementors = []string{"SearchedUser"} + +func (ec *executionContext) _SearchedUser(ctx context.Context, sel ast.SelectionSet, obj *graphql1.SearchedUser) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, searchedUserImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("SearchedUser") + case "userId": + out.Values[i] = ec._SearchedUser_userId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "userName": + out.Values[i] = ec._SearchedUser_userName(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "userEmail": + out.Values[i] = ec._SearchedUser_userEmail(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var signupPayloadImplementors = []string{"SignupPayload"} + +func (ec *executionContext) _SignupPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.SignupPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, signupPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("SignupPayload") + case "user": + out.Values[i] = ec._SignupPayload_user(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "team": + out.Values[i] = ec._SignupPayload_team(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var syncDatasetPayloadImplementors = []string{"SyncDatasetPayload"} + +func (ec *executionContext) _SyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.SyncDatasetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, syncDatasetPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("SyncDatasetPayload") + case "sceneId": + out.Values[i] = ec._SyncDatasetPayload_sceneId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "url": + out.Values[i] = ec._SyncDatasetPayload_url(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "datasetSchema": + out.Values[i] = ec._SyncDatasetPayload_datasetSchema(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "dataset": + out.Values[i] = ec._SyncDatasetPayload_dataset(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var teamImplementors = []string{"Team", "Node"} + +func (ec *executionContext) _Team(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Team) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, teamImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Team") + case "id": + out.Values[i] = ec._Team_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + out.Values[i] = ec._Team_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "members": + out.Values[i] = ec._Team_members(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "personal": + out.Values[i] = ec._Team_personal(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "assets": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Team_assets(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "projects": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Team_projects(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var teamMemberImplementors = []string{"TeamMember"} + +func (ec *executionContext) _TeamMember(ctx context.Context, sel ast.SelectionSet, obj *graphql1.TeamMember) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, teamMemberImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("TeamMember") + case "userId": + out.Values[i] = ec._TeamMember_userId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "role": + out.Values[i] = ec._TeamMember_role(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "user": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TeamMember_user(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var typographyImplementors = []string{"Typography"} + +func (ec *executionContext) _Typography(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Typography) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, typographyImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Typography") + case "fontFamily": + out.Values[i] = ec._Typography_fontFamily(ctx, field, obj) + case "fontWeight": + out.Values[i] = ec._Typography_fontWeight(ctx, field, obj) + case "fontSize": + out.Values[i] = ec._Typography_fontSize(ctx, field, obj) + case "color": + out.Values[i] = ec._Typography_color(ctx, field, obj) + case "textAlign": + out.Values[i] = ec._Typography_textAlign(ctx, field, obj) + case "bold": + out.Values[i] = ec._Typography_bold(ctx, field, obj) + case "italic": + out.Values[i] = ec._Typography_italic(ctx, field, obj) + case "underline": + out.Values[i] = ec._Typography_underline(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var uninstallPluginPayloadImplementors = []string{"UninstallPluginPayload"} + +func (ec *executionContext) _UninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UninstallPluginPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, uninstallPluginPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UninstallPluginPayload") + case "scene": + out.Values[i] = ec._UninstallPluginPayload_scene(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "scenePlugin": + out.Values[i] = ec._UninstallPluginPayload_scenePlugin(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateDatasetSchemaPayloadImplementors = []string{"UpdateDatasetSchemaPayload"} + +func (ec *executionContext) _UpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpdateDatasetSchemaPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateDatasetSchemaPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateDatasetSchemaPayload") + case "datasetSchema": + out.Values[i] = ec._UpdateDatasetSchemaPayload_datasetSchema(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateLayerPayloadImplementors = []string{"UpdateLayerPayload"} + +func (ec *executionContext) _UpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpdateLayerPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateLayerPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateLayerPayload") + case "layer": + out.Values[i] = ec._UpdateLayerPayload_layer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateMePayloadImplementors = []string{"UpdateMePayload"} + +func (ec *executionContext) _UpdateMePayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpdateMePayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateMePayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateMePayload") + case "user": + out.Values[i] = ec._UpdateMePayload_user(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateMemberOfTeamPayloadImplementors = []string{"UpdateMemberOfTeamPayload"} + +func (ec *executionContext) _UpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpdateMemberOfTeamPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateMemberOfTeamPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateMemberOfTeamPayload") + case "team": + out.Values[i] = ec._UpdateMemberOfTeamPayload_team(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateTeamPayloadImplementors = []string{"UpdateTeamPayload"} + +func (ec *executionContext) _UpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpdateTeamPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateTeamPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateTeamPayload") + case "team": + out.Values[i] = ec._UpdateTeamPayload_team(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var updateWidgetPayloadImplementors = []string{"UpdateWidgetPayload"} + +func (ec *executionContext) _UpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpdateWidgetPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateWidgetPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateWidgetPayload") + case "scene": + out.Values[i] = ec._UpdateWidgetPayload_scene(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "sceneWidget": + out.Values[i] = ec._UpdateWidgetPayload_sceneWidget(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var upgradePluginPayloadImplementors = []string{"UpgradePluginPayload"} + +func (ec *executionContext) _UpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpgradePluginPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, upgradePluginPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpgradePluginPayload") + case "scene": + out.Values[i] = ec._UpgradePluginPayload_scene(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "scenePlugin": + out.Values[i] = ec._UpgradePluginPayload_scenePlugin(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var uploadPluginPayloadImplementors = []string{"UploadPluginPayload"} + +func (ec *executionContext) _UploadPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UploadPluginPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, uploadPluginPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UploadPluginPayload") + case "plugin": + out.Values[i] = ec._UploadPluginPayload_plugin(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var userImplementors = []string{"User", "Node"} + +func (ec *executionContext) _User(ctx context.Context, sel ast.SelectionSet, obj *graphql1.User) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, userImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("User") + case "id": + out.Values[i] = ec._User_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + out.Values[i] = ec._User_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "email": + out.Values[i] = ec._User_email(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "lang": + out.Values[i] = ec._User_lang(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "theme": + out.Values[i] = ec._User_theme(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "myTeamId": + out.Values[i] = ec._User_myTeamId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "auths": + out.Values[i] = ec._User_auths(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "teams": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._User_teams(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "myTeam": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._User_myTeam(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var __DirectiveImplementors = []string{"__Directive"} + +func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionSet, obj *introspection.Directive) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __DirectiveImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Directive") + case "name": + out.Values[i] = ec.___Directive_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "description": + out.Values[i] = ec.___Directive_description(ctx, field, obj) + case "locations": + out.Values[i] = ec.___Directive_locations(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "args": + out.Values[i] = ec.___Directive_args(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var __EnumValueImplementors = []string{"__EnumValue"} + +func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.EnumValue) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __EnumValueImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__EnumValue") + case "name": + out.Values[i] = ec.___EnumValue_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "description": + out.Values[i] = ec.___EnumValue_description(ctx, field, obj) + case "isDeprecated": + out.Values[i] = ec.___EnumValue_isDeprecated(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "deprecationReason": + out.Values[i] = ec.___EnumValue_deprecationReason(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var __FieldImplementors = []string{"__Field"} + +func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, obj *introspection.Field) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __FieldImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Field") + case "name": + out.Values[i] = ec.___Field_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "description": + out.Values[i] = ec.___Field_description(ctx, field, obj) + case "args": + out.Values[i] = ec.___Field_args(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "type": + out.Values[i] = ec.___Field_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "isDeprecated": + out.Values[i] = ec.___Field_isDeprecated(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "deprecationReason": + out.Values[i] = ec.___Field_deprecationReason(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var __InputValueImplementors = []string{"__InputValue"} + +func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.InputValue) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __InputValueImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__InputValue") + case "name": + out.Values[i] = ec.___InputValue_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "description": + out.Values[i] = ec.___InputValue_description(ctx, field, obj) + case "type": + out.Values[i] = ec.___InputValue_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "defaultValue": + out.Values[i] = ec.___InputValue_defaultValue(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var __SchemaImplementors = []string{"__Schema"} + +func (ec *executionContext) ___Schema(ctx context.Context, sel ast.SelectionSet, obj *introspection.Schema) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __SchemaImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Schema") + case "types": + out.Values[i] = ec.___Schema_types(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "queryType": + out.Values[i] = ec.___Schema_queryType(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "mutationType": + out.Values[i] = ec.___Schema_mutationType(ctx, field, obj) + case "subscriptionType": + out.Values[i] = ec.___Schema_subscriptionType(ctx, field, obj) + case "directives": + out.Values[i] = ec.___Schema_directives(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var __TypeImplementors = []string{"__Type"} + +func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, obj *introspection.Type) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __TypeImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Type") + case "kind": + out.Values[i] = ec.___Type_kind(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "name": + out.Values[i] = ec.___Type_name(ctx, field, obj) + case "description": + out.Values[i] = ec.___Type_description(ctx, field, obj) + case "fields": + out.Values[i] = ec.___Type_fields(ctx, field, obj) + case "interfaces": + out.Values[i] = ec.___Type_interfaces(ctx, field, obj) + case "possibleTypes": + out.Values[i] = ec.___Type_possibleTypes(ctx, field, obj) + case "enumValues": + out.Values[i] = ec.___Type_enumValues(ctx, field, obj) + case "inputFields": + out.Values[i] = ec.___Type_inputFields(ctx, field, obj) + case "ofType": + out.Values[i] = ec.___Type_ofType(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +// endregion **************************** object.gotpl **************************** + +// region ***************************** type.gotpl ***************************** + +func (ec *executionContext) unmarshalNAddDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaInput(ctx context.Context, v interface{}) (graphql1.AddDatasetSchemaInput, error) { + res, err := ec.unmarshalInputAddDatasetSchemaInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNAddDatasetSchemaPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddDatasetSchemaPayload) graphql.Marshaler { + return ec._AddDatasetSchemaPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddDatasetSchemaPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._AddDatasetSchemaPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNAddDynamicDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetInput(ctx context.Context, v interface{}) (graphql1.AddDynamicDatasetInput, error) { + res, err := ec.unmarshalInputAddDynamicDatasetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNAddDynamicDatasetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddDynamicDatasetPayload) graphql.Marshaler { + return ec._AddDynamicDatasetPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddDynamicDatasetPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._AddDynamicDatasetPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNAddDynamicDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaInput(ctx context.Context, v interface{}) (graphql1.AddDynamicDatasetSchemaInput, error) { + res, err := ec.unmarshalInputAddDynamicDatasetSchemaInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNAddDynamicDatasetSchemaPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddDynamicDatasetSchemaPayload) graphql.Marshaler { + return ec._AddDynamicDatasetSchemaPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddDynamicDatasetSchemaPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._AddDynamicDatasetSchemaPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNAddInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldInput(ctx context.Context, v interface{}) (graphql1.AddInfoboxFieldInput, error) { + res, err := ec.unmarshalInputAddInfoboxFieldInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNAddInfoboxFieldPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddInfoboxFieldPayload) graphql.Marshaler { + return ec._AddInfoboxFieldPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddInfoboxFieldPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._AddInfoboxFieldPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNAddLayerGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupInput(ctx context.Context, v interface{}) (graphql1.AddLayerGroupInput, error) { + res, err := ec.unmarshalInputAddLayerGroupInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNAddLayerGroupPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddLayerGroupPayload) graphql.Marshaler { + return ec._AddLayerGroupPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddLayerGroupPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._AddLayerGroupPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNAddLayerItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemInput(ctx context.Context, v interface{}) (graphql1.AddLayerItemInput, error) { + res, err := ec.unmarshalInputAddLayerItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNAddLayerItemPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddLayerItemPayload) graphql.Marshaler { + return ec._AddLayerItemPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddLayerItemPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._AddLayerItemPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNAddMemberToTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamInput(ctx context.Context, v interface{}) (graphql1.AddMemberToTeamInput, error) { + res, err := ec.unmarshalInputAddMemberToTeamInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNAddMemberToTeamPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddMemberToTeamPayload) graphql.Marshaler { + return ec._AddMemberToTeamPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddMemberToTeamPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._AddMemberToTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNAddPropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddPropertyItemInput(ctx context.Context, v interface{}) (graphql1.AddPropertyItemInput, error) { + res, err := ec.unmarshalInputAddPropertyItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNAddWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetInput(ctx context.Context, v interface{}) (graphql1.AddWidgetInput, error) { + res, err := ec.unmarshalInputAddWidgetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNAddWidgetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddWidgetPayload) graphql.Marshaler { + return ec._AddWidgetPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddWidgetPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._AddWidgetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Asset) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx context.Context, sel ast.SelectionSet, v *graphql1.Asset) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._Asset(ctx, sel, v) +} + +func (ec *executionContext) marshalNAssetConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetConnection(ctx context.Context, sel ast.SelectionSet, v graphql1.AssetConnection) graphql.Marshaler { + return ec._AssetConnection(ctx, sel, &v) +} + +func (ec *executionContext) marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetConnection(ctx context.Context, sel ast.SelectionSet, v *graphql1.AssetConnection) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._AssetConnection(ctx, sel, v) +} + +func (ec *executionContext) marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.AssetEdge) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNAssetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetEdge(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNAssetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetEdge(ctx context.Context, sel ast.SelectionSet, v *graphql1.AssetEdge) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._AssetEdge(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNBoolean2bool(ctx context.Context, v interface{}) (bool, error) { + res, err := graphql.UnmarshalBoolean(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler { + res := graphql.MarshalBoolean(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) marshalNCheckProjectAliasPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCheckProjectAliasPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.CheckProjectAliasPayload) graphql.Marshaler { + return ec._CheckProjectAliasPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNCheckProjectAliasPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCheckProjectAliasPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CheckProjectAliasPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._CheckProjectAliasPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNCreateAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetInput(ctx context.Context, v interface{}) (graphql1.CreateAssetInput, error) { + res, err := ec.unmarshalInputCreateAssetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNCreateAssetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.CreateAssetPayload) graphql.Marshaler { + return ec._CreateAssetPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateAssetPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._CreateAssetPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNCreateInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxInput(ctx context.Context, v interface{}) (graphql1.CreateInfoboxInput, error) { + res, err := ec.unmarshalInputCreateInfoboxInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNCreateInfoboxPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.CreateInfoboxPayload) graphql.Marshaler { + return ec._CreateInfoboxPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateInfoboxPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._CreateInfoboxPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNCreateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateProjectInput(ctx context.Context, v interface{}) (graphql1.CreateProjectInput, error) { + res, err := ec.unmarshalInputCreateProjectInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNCreateSceneInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateSceneInput(ctx context.Context, v interface{}) (graphql1.CreateSceneInput, error) { + res, err := ec.unmarshalInputCreateSceneInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNCreateScenePayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateScenePayload(ctx context.Context, sel ast.SelectionSet, v graphql1.CreateScenePayload) graphql.Marshaler { + return ec._CreateScenePayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateScenePayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateScenePayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._CreateScenePayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNCreateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamInput(ctx context.Context, v interface{}) (graphql1.CreateTeamInput, error) { + res, err := ec.unmarshalInputCreateTeamInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNCreateTeamPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.CreateTeamPayload) graphql.Marshaler { + return ec._CreateTeamPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateTeamPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._CreateTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx context.Context, v interface{}) (usecase.Cursor, error) { + res, err := graphql1.UnmarshalCursor(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx context.Context, sel ast.SelectionSet, v usecase.Cursor) graphql.Marshaler { + res := graphql1.MarshalCursor(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Dataset) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Dataset) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNDataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNDataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx context.Context, sel ast.SelectionSet, v *graphql1.Dataset) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._Dataset(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetConnection(ctx context.Context, sel ast.SelectionSet, v graphql1.DatasetConnection) graphql.Marshaler { + return ec._DatasetConnection(ctx, sel, &v) +} + +func (ec *executionContext) marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetConnection(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetConnection) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._DatasetConnection(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.DatasetEdge) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNDatasetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetEdge(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNDatasetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetEdge(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetEdge) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._DatasetEdge(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.DatasetField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNDatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNDatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetField(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._DatasetField(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v []*graphql1.DatasetSchema) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.DatasetSchema) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetSchema) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._DatasetSchema(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetSchemaConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, v graphql1.DatasetSchemaConnection) graphql.Marshaler { + return ec._DatasetSchemaConnection(ctx, sel, &v) +} + +func (ec *executionContext) marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetSchemaConnection) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._DatasetSchemaConnection(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.DatasetSchemaEdge) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNDatasetSchemaEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaEdge(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNDatasetSchemaEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaEdge(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetSchemaEdge) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._DatasetSchemaEdge(ctx, sel, v) +} + +func (ec *executionContext) marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.DatasetSchemaField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNDatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNDatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaField(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetSchemaField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._DatasetSchemaField(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNDateTime2timeแšTime(ctx context.Context, v interface{}) (time.Time, error) { + res, err := graphql.UnmarshalTime(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNDateTime2timeแšTime(ctx context.Context, sel ast.SelectionSet, v time.Time) graphql.Marshaler { + res := graphql.MarshalTime(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalNDeleteMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMeInput(ctx context.Context, v interface{}) (graphql1.DeleteMeInput, error) { + res, err := ec.unmarshalInputDeleteMeInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNDeleteMePayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMePayload(ctx context.Context, sel ast.SelectionSet, v graphql1.DeleteMePayload) graphql.Marshaler { + return ec._DeleteMePayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNDeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMePayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.DeleteMePayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._DeleteMePayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNDeleteProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectInput(ctx context.Context, v interface{}) (graphql1.DeleteProjectInput, error) { + res, err := ec.unmarshalInputDeleteProjectInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNDeleteProjectPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.DeleteProjectPayload) graphql.Marshaler { + return ec._DeleteProjectPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNDeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.DeleteProjectPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._DeleteProjectPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNDeleteTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamInput(ctx context.Context, v interface{}) (graphql1.DeleteTeamInput, error) { + res, err := ec.unmarshalInputDeleteTeamInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNDeleteTeamPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.DeleteTeamPayload) graphql.Marshaler { + return ec._DeleteTeamPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNDeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.DeleteTeamPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._DeleteTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNFileSize2int64(ctx context.Context, v interface{}) (int64, error) { + res, err := graphql.UnmarshalInt64(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNFileSize2int64(ctx context.Context, sel ast.SelectionSet, v int64) graphql.Marshaler { + res := graphql.MarshalInt64(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalNFloat2float64(ctx context.Context, v interface{}) (float64, error) { + res, err := graphql.UnmarshalFloat(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNFloat2float64(ctx context.Context, sel ast.SelectionSet, v float64) graphql.Marshaler { + res := graphql.MarshalFloat(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, v interface{}) (id.ID, error) { + res, err := graphql1.UnmarshalID(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, sel ast.SelectionSet, v id.ID) graphql.Marshaler { + res := graphql1.MarshalID(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx context.Context, v interface{}) ([]*id.ID, error) { + var vSlice []interface{} + if v != nil { + if tmp1, ok := v.([]interface{}); ok { + vSlice = tmp1 + } else { + vSlice = []interface{}{v} + } + } + var err error + res := make([]*id.ID, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx context.Context, sel ast.SelectionSet, v []*id.ID) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + for i := range v { + ret[i] = ec.marshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, sel, v[i]) + } + + return ret +} + +func (ec *executionContext) unmarshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, v interface{}) (*id.ID, error) { + res, err := graphql1.UnmarshalID(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, sel ast.SelectionSet, v *id.ID) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := graphql1.MarshalID(*v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalNImportDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetInput(ctx context.Context, v interface{}) (graphql1.ImportDatasetInput, error) { + res, err := ec.unmarshalInputImportDatasetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNImportDatasetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.ImportDatasetPayload) graphql.Marshaler { + return ec._ImportDatasetPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.ImportDatasetPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._ImportDatasetPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNImportLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerInput(ctx context.Context, v interface{}) (graphql1.ImportLayerInput, error) { + res, err := ec.unmarshalInputImportLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNImportLayerPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.ImportLayerPayload) graphql.Marshaler { + return ec._ImportLayerPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.ImportLayerPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._ImportLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalNInfobox2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx context.Context, sel ast.SelectionSet, v graphql1.Infobox) graphql.Marshaler { + return ec._Infobox(ctx, sel, &v) +} + +func (ec *executionContext) marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx context.Context, sel ast.SelectionSet, v *graphql1.Infobox) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._Infobox(ctx, sel, v) +} + +func (ec *executionContext) marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfoboxFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.InfoboxField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfoboxField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfoboxField(ctx context.Context, sel ast.SelectionSet, v *graphql1.InfoboxField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._InfoboxField(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNInstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginInput(ctx context.Context, v interface{}) (graphql1.InstallPluginInput, error) { + res, err := ec.unmarshalInputInstallPluginInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNInstallPluginPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.InstallPluginPayload) graphql.Marshaler { + return ec._InstallPluginPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.InstallPluginPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._InstallPluginPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNInt2int(ctx context.Context, v interface{}) (int, error) { + res, err := graphql.UnmarshalInt(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNInt2int(ctx context.Context, sel ast.SelectionSet, v int) graphql.Marshaler { + res := graphql.MarshalInt(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx context.Context, sel ast.SelectionSet, v graphql1.Layer) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._Layer(ctx, sel, v) +} + +func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx context.Context, sel ast.SelectionSet, v []graphql1.Layer) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerแš„(ctx context.Context, sel ast.SelectionSet, v []graphql1.Layer) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) unmarshalNLayerEncodingFormat2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerEncodingFormat(ctx context.Context, v interface{}) (graphql1.LayerEncodingFormat, error) { + var res graphql1.LayerEncodingFormat + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNLayerEncodingFormat2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerEncodingFormat(ctx context.Context, sel ast.SelectionSet, v graphql1.LayerEncodingFormat) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.LayerGroup) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._LayerGroup(ctx, sel, v) +} + +func (ec *executionContext) marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerItem(ctx context.Context, sel ast.SelectionSet, v *graphql1.LayerItem) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._LayerItem(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNLinkDatasetToPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLinkDatasetToPropertyValueInput(ctx context.Context, v interface{}) (graphql1.LinkDatasetToPropertyValueInput, error) { + res, err := ec.unmarshalInputLinkDatasetToPropertyValueInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNListOperation2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšListOperation(ctx context.Context, v interface{}) (graphql1.ListOperation, error) { + var res graphql1.ListOperation + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNListOperation2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšListOperation(ctx context.Context, sel ast.SelectionSet, v graphql1.ListOperation) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.MergedInfoboxField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxField(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedInfoboxField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._MergedInfoboxField(ctx, sel, v) +} + +func (ec *executionContext) marshalNMergedPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.MergedPropertyField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyField(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedPropertyField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._MergedPropertyField(ctx, sel, v) +} + +func (ec *executionContext) marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.MergedPropertyGroup) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyGroup(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedPropertyGroup) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._MergedPropertyGroup(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNMoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldInput(ctx context.Context, v interface{}) (graphql1.MoveInfoboxFieldInput, error) { + res, err := ec.unmarshalInputMoveInfoboxFieldInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNMoveInfoboxFieldPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.MoveInfoboxFieldPayload) graphql.Marshaler { + return ec._MoveInfoboxFieldPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.MoveInfoboxFieldPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._MoveInfoboxFieldPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNMoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerInput(ctx context.Context, v interface{}) (graphql1.MoveLayerInput, error) { + res, err := ec.unmarshalInputMoveLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNMoveLayerPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.MoveLayerPayload) graphql.Marshaler { + return ec._MoveLayerPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.MoveLayerPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._MoveLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNMovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMovePropertyItemInput(ctx context.Context, v interface{}) (graphql1.MovePropertyItemInput, error) { + res, err := ec.unmarshalInputMovePropertyItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNode(ctx context.Context, sel ast.SelectionSet, v []graphql1.Node) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNode(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNodeType(ctx context.Context, v interface{}) (graphql1.NodeType, error) { + var res graphql1.NodeType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNodeType(ctx context.Context, sel ast.SelectionSet, v graphql1.NodeType) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPageInfo(ctx context.Context, sel ast.SelectionSet, v *graphql1.PageInfo) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PageInfo(ctx, sel, v) +} + +func (ec *executionContext) marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Plugin) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx context.Context, sel ast.SelectionSet, v *graphql1.Plugin) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._Plugin(ctx, sel, v) +} + +func (ec *executionContext) marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtensionแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PluginExtension) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx context.Context, sel ast.SelectionSet, v *graphql1.PluginExtension) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PluginExtension(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx context.Context, v interface{}) (id.PluginExtensionID, error) { + res, err := graphql1.UnmarshalPluginExtensionID(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx context.Context, sel ast.SelectionSet, v id.PluginExtensionID) graphql.Marshaler { + res := graphql1.MarshalPluginExtensionID(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtensionType(ctx context.Context, v interface{}) (graphql1.PluginExtensionType, error) { + var res graphql1.PluginExtensionType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtensionType(ctx context.Context, sel ast.SelectionSet, v graphql1.PluginExtensionType) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, v interface{}) (id.PluginID, error) { + res, err := graphql1.UnmarshalPluginID(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, sel ast.SelectionSet, v id.PluginID) graphql.Marshaler { + res := graphql1.MarshalPluginID(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalNPluginID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginIDแš„(ctx context.Context, v interface{}) ([]*id.PluginID, error) { + var vSlice []interface{} + if v != nil { + if tmp1, ok := v.([]interface{}); ok { + vSlice = tmp1 + } else { + vSlice = []interface{}{v} + } + } + var err error + res := make([]*id.PluginID, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalNPluginID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginIDแš„(ctx context.Context, sel ast.SelectionSet, v []*id.PluginID) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + for i := range v { + ret[i] = ec.marshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, sel, v[i]) + } + + return ret +} + +func (ec *executionContext) unmarshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, v interface{}) (*id.PluginID, error) { + res, err := graphql1.UnmarshalPluginID(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, sel ast.SelectionSet, v *id.PluginID) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := graphql1.MarshalPluginID(*v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Project) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx context.Context, sel ast.SelectionSet, v *graphql1.Project) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._Project(ctx, sel, v) +} + +func (ec *executionContext) marshalNProjectConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v graphql1.ProjectConnection) graphql.Marshaler { + return ec._ProjectConnection(ctx, sel, &v) +} + +func (ec *executionContext) marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v *graphql1.ProjectConnection) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._ProjectConnection(ctx, sel, v) +} + +func (ec *executionContext) marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.ProjectEdge) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectEdge(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectEdge(ctx context.Context, sel ast.SelectionSet, v *graphql1.ProjectEdge) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._ProjectEdge(ctx, sel, v) +} + +func (ec *executionContext) marshalNProjectPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.ProjectPayload) graphql.Marshaler { + return ec._ProjectPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.ProjectPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._ProjectPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx context.Context, sel ast.SelectionSet, v *graphql1.Property) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._Property(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertyField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyField(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PropertyField(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyFieldLink2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldLink(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyFieldLink) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PropertyFieldLink(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyFieldPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.PropertyFieldPayload) graphql.Marshaler { + return ec._PropertyFieldPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyFieldPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PropertyFieldPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertyGroup) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroup(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyGroup) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PropertyGroup(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItem(ctx context.Context, sel ast.SelectionSet, v graphql1.PropertyItem) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PropertyItem(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemแš„(ctx context.Context, sel ast.SelectionSet, v []graphql1.PropertyItem) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItem(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNPropertyItemPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.PropertyItemPayload) graphql.Marshaler { + return ec._PropertyItemPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyItemPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PropertyItemPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyLinkableFields) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PropertyLinkableFields(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertySchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertySchema) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchema) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PropertySchema(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertySchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertySchemaField) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchemaField) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PropertySchemaField(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertySchemaFieldChoice2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldChoice(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchemaFieldChoice) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PropertySchemaFieldChoice(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx context.Context, v interface{}) (id.PropertySchemaFieldID, error) { + res, err := graphql1.UnmarshalPropertySchemaFieldID(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx context.Context, sel ast.SelectionSet, v id.PropertySchemaFieldID) graphql.Marshaler { + res := graphql1.MarshalPropertySchemaFieldID(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertySchemaGroup) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroup(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchemaGroup) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PropertySchemaGroup(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, v interface{}) (id.PropertySchemaID, error) { + res, err := graphql1.UnmarshalPropertySchemaID(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, sel ast.SelectionSet, v id.PropertySchemaID) graphql.Marshaler { + res := graphql1.MarshalPropertySchemaID(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalNPropertySchemaID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaIDแš„(ctx context.Context, v interface{}) ([]*id.PropertySchemaID, error) { + var vSlice []interface{} + if v != nil { + if tmp1, ok := v.([]interface{}); ok { + vSlice = tmp1 + } else { + vSlice = []interface{}{v} + } + } + var err error + res := make([]*id.PropertySchemaID, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalNPropertySchemaID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaIDแš„(ctx context.Context, sel ast.SelectionSet, v []*id.PropertySchemaID) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + for i := range v { + ret[i] = ec.marshalNPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, sel, v[i]) + } + + return ret +} + +func (ec *executionContext) unmarshalNPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, v interface{}) (*id.PropertySchemaID, error) { + res, err := graphql1.UnmarshalPropertySchemaID(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, sel ast.SelectionSet, v *id.PropertySchemaID) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := graphql1.MarshalPropertySchemaID(*v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalNPublishProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPublishProjectInput(ctx context.Context, v interface{}) (graphql1.PublishProjectInput, error) { + res, err := ec.unmarshalInputPublishProjectInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPublishmentStatus(ctx context.Context, v interface{}) (graphql1.PublishmentStatus, error) { + var res graphql1.PublishmentStatus + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPublishmentStatus(ctx context.Context, sel ast.SelectionSet, v graphql1.PublishmentStatus) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalNRemoveAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetInput(ctx context.Context, v interface{}) (graphql1.RemoveAssetInput, error) { + res, err := ec.unmarshalInputRemoveAssetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNRemoveAssetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveAssetPayload) graphql.Marshaler { + return ec._RemoveAssetPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNRemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveAssetPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._RemoveAssetPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNRemoveDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaInput(ctx context.Context, v interface{}) (graphql1.RemoveDatasetSchemaInput, error) { + res, err := ec.unmarshalInputRemoveDatasetSchemaInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNRemoveDatasetSchemaPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveDatasetSchemaPayload) graphql.Marshaler { + return ec._RemoveDatasetSchemaPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNRemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveDatasetSchemaPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._RemoveDatasetSchemaPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNRemoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldInput(ctx context.Context, v interface{}) (graphql1.RemoveInfoboxFieldInput, error) { + res, err := ec.unmarshalInputRemoveInfoboxFieldInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNRemoveInfoboxFieldPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveInfoboxFieldPayload) graphql.Marshaler { + return ec._RemoveInfoboxFieldPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNRemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveInfoboxFieldPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._RemoveInfoboxFieldPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNRemoveInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxInput(ctx context.Context, v interface{}) (graphql1.RemoveInfoboxInput, error) { + res, err := ec.unmarshalInputRemoveInfoboxInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNRemoveInfoboxPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveInfoboxPayload) graphql.Marshaler { + return ec._RemoveInfoboxPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNRemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveInfoboxPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._RemoveInfoboxPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNRemoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerInput(ctx context.Context, v interface{}) (graphql1.RemoveLayerInput, error) { + res, err := ec.unmarshalInputRemoveLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNRemoveLayerPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveLayerPayload) graphql.Marshaler { + return ec._RemoveLayerPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNRemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveLayerPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._RemoveLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNRemoveMemberFromTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamInput(ctx context.Context, v interface{}) (graphql1.RemoveMemberFromTeamInput, error) { + res, err := ec.unmarshalInputRemoveMemberFromTeamInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNRemoveMemberFromTeamPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveMemberFromTeamPayload) graphql.Marshaler { + return ec._RemoveMemberFromTeamPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNRemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveMemberFromTeamPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._RemoveMemberFromTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNRemoveMyAuthInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMyAuthInput(ctx context.Context, v interface{}) (graphql1.RemoveMyAuthInput, error) { + res, err := ec.unmarshalInputRemoveMyAuthInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemovePropertyFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemovePropertyFieldInput(ctx context.Context, v interface{}) (graphql1.RemovePropertyFieldInput, error) { + res, err := ec.unmarshalInputRemovePropertyFieldInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemovePropertyItemInput(ctx context.Context, v interface{}) (graphql1.RemovePropertyItemInput, error) { + res, err := ec.unmarshalInputRemovePropertyItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNRemoveWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetInput(ctx context.Context, v interface{}) (graphql1.RemoveWidgetInput, error) { + res, err := ec.unmarshalInputRemoveWidgetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNRemoveWidgetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveWidgetPayload) graphql.Marshaler { + return ec._RemoveWidgetPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNRemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveWidgetPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._RemoveWidgetPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRole(ctx context.Context, v interface{}) (graphql1.Role, error) { + var res graphql1.Role + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRole(ctx context.Context, sel ast.SelectionSet, v graphql1.Role) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx context.Context, sel ast.SelectionSet, v *graphql1.Scene) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._Scene(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNSceneLockMode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneLockMode(ctx context.Context, v interface{}) (graphql1.SceneLockMode, error) { + var res graphql1.SceneLockMode + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNSceneLockMode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneLockMode(ctx context.Context, sel ast.SelectionSet, v graphql1.SceneLockMode) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePluginแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.ScenePlugin) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx context.Context, sel ast.SelectionSet, v *graphql1.ScenePlugin) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._ScenePlugin(ctx, sel, v) +} + +func (ec *executionContext) marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidgetแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.SceneWidget) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidget(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidget(ctx context.Context, sel ast.SelectionSet, v *graphql1.SceneWidget) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._SceneWidget(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNSignupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupInput(ctx context.Context, v interface{}) (graphql1.SignupInput, error) { + res, err := ec.unmarshalInputSignupInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNSignupPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.SignupPayload) graphql.Marshaler { + return ec._SignupPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.SignupPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._SignupPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNString2string(ctx context.Context, v interface{}) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNString2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalNString2แš•stringแš„(ctx context.Context, v interface{}) ([]string, error) { + var vSlice []interface{} + if v != nil { + if tmp1, ok := v.([]interface{}); ok { + vSlice = tmp1 + } else { + vSlice = []interface{}{v} + } + } + var err error + res := make([]string, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNString2string(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalNString2แš•stringแš„(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + for i := range v { + ret[i] = ec.marshalNString2string(ctx, sel, v[i]) + } + + return ret +} + +func (ec *executionContext) unmarshalNSyncDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetInput(ctx context.Context, v interface{}) (graphql1.SyncDatasetInput, error) { + res, err := ec.unmarshalInputSyncDatasetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNSyncDatasetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.SyncDatasetPayload) graphql.Marshaler { + return ec._SyncDatasetPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.SyncDatasetPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._SyncDatasetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalNTeam2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx context.Context, sel ast.SelectionSet, v graphql1.Team) graphql.Marshaler { + return ec._Team(ctx, sel, &v) +} + +func (ec *executionContext) marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeamแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Team) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx context.Context, sel ast.SelectionSet, v *graphql1.Team) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._Team(ctx, sel, v) +} + +func (ec *executionContext) marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeamMemberแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.TeamMember) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNTeamMember2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeamMember(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNTeamMember2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeamMember(ctx context.Context, sel ast.SelectionSet, v *graphql1.TeamMember) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._TeamMember(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx context.Context, v interface{}) (graphql1.Theme, error) { + var res graphql1.Theme + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx context.Context, sel ast.SelectionSet, v graphql1.Theme) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalNUninstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginInput(ctx context.Context, v interface{}) (graphql1.UninstallPluginInput, error) { + res, err := ec.unmarshalInputUninstallPluginInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNUninstallPluginPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UninstallPluginPayload) graphql.Marshaler { + return ec._UninstallPluginPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UninstallPluginPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._UninstallPluginPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNUnlinkPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUnlinkPropertyValueInput(ctx context.Context, v interface{}) (graphql1.UnlinkPropertyValueInput, error) { + res, err := ec.unmarshalInputUnlinkPropertyValueInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdateDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaInput(ctx context.Context, v interface{}) (graphql1.UpdateDatasetSchemaInput, error) { + res, err := ec.unmarshalInputUpdateDatasetSchemaInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNUpdateDatasetSchemaPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpdateDatasetSchemaPayload) graphql.Marshaler { + return ec._UpdateDatasetSchemaPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateDatasetSchemaPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._UpdateDatasetSchemaPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNUpdateLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerInput(ctx context.Context, v interface{}) (graphql1.UpdateLayerInput, error) { + res, err := ec.unmarshalInputUpdateLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNUpdateLayerPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpdateLayerPayload) graphql.Marshaler { + return ec._UpdateLayerPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateLayerPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._UpdateLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNUpdateMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMeInput(ctx context.Context, v interface{}) (graphql1.UpdateMeInput, error) { + res, err := ec.unmarshalInputUpdateMeInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNUpdateMePayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpdateMePayload) graphql.Marshaler { + return ec._UpdateMePayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateMePayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._UpdateMePayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNUpdateMemberOfTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamInput(ctx context.Context, v interface{}) (graphql1.UpdateMemberOfTeamInput, error) { + res, err := ec.unmarshalInputUpdateMemberOfTeamInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNUpdateMemberOfTeamPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpdateMemberOfTeamPayload) graphql.Marshaler { + return ec._UpdateMemberOfTeamPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateMemberOfTeamPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._UpdateMemberOfTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNUpdateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateProjectInput(ctx context.Context, v interface{}) (graphql1.UpdateProjectInput, error) { + res, err := ec.unmarshalInputUpdateProjectInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdatePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyItemInput(ctx context.Context, v interface{}) (graphql1.UpdatePropertyItemInput, error) { + res, err := ec.unmarshalInputUpdatePropertyItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdatePropertyItemOperationInput2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyItemOperationInputแš„(ctx context.Context, v interface{}) ([]*graphql1.UpdatePropertyItemOperationInput, error) { + var vSlice []interface{} + if v != nil { + if tmp1, ok := v.([]interface{}); ok { + vSlice = tmp1 + } else { + vSlice = []interface{}{v} + } + } + var err error + res := make([]*graphql1.UpdatePropertyItemOperationInput, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNUpdatePropertyItemOperationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyItemOperationInput(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) unmarshalNUpdatePropertyItemOperationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyItemOperationInput(ctx context.Context, v interface{}) (*graphql1.UpdatePropertyItemOperationInput, error) { + res, err := ec.unmarshalInputUpdatePropertyItemOperationInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdatePropertyValueCameraInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueCameraInput(ctx context.Context, v interface{}) (graphql1.UpdatePropertyValueCameraInput, error) { + res, err := ec.unmarshalInputUpdatePropertyValueCameraInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdatePropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueInput(ctx context.Context, v interface{}) (graphql1.UpdatePropertyValueInput, error) { + res, err := ec.unmarshalInputUpdatePropertyValueInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdatePropertyValueLatLngHeightInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueLatLngHeightInput(ctx context.Context, v interface{}) (graphql1.UpdatePropertyValueLatLngHeightInput, error) { + res, err := ec.unmarshalInputUpdatePropertyValueLatLngHeightInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdatePropertyValueLatLngInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueLatLngInput(ctx context.Context, v interface{}) (graphql1.UpdatePropertyValueLatLngInput, error) { + res, err := ec.unmarshalInputUpdatePropertyValueLatLngInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdatePropertyValueTypographyInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueTypographyInput(ctx context.Context, v interface{}) (graphql1.UpdatePropertyValueTypographyInput, error) { + res, err := ec.unmarshalInputUpdatePropertyValueTypographyInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUpdateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamInput(ctx context.Context, v interface{}) (graphql1.UpdateTeamInput, error) { + res, err := ec.unmarshalInputUpdateTeamInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNUpdateTeamPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpdateTeamPayload) graphql.Marshaler { + return ec._UpdateTeamPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateTeamPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._UpdateTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNUpdateWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetInput(ctx context.Context, v interface{}) (graphql1.UpdateWidgetInput, error) { + res, err := ec.unmarshalInputUpdateWidgetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNUpdateWidgetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpdateWidgetPayload) graphql.Marshaler { + return ec._UpdateWidgetPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateWidgetPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._UpdateWidgetPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNUpgradePluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginInput(ctx context.Context, v interface{}) (graphql1.UpgradePluginInput, error) { + res, err := ec.unmarshalInputUpgradePluginInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNUpgradePluginPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpgradePluginPayload) graphql.Marshaler { + return ec._UpgradePluginPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpgradePluginPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._UpgradePluginPayload(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, v interface{}) (graphql.Upload, error) { + res, err := graphql.UnmarshalUpload(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, sel ast.SelectionSet, v graphql.Upload) graphql.Marshaler { + res := graphql.MarshalUpload(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalNUploadFileToPropertyInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadFileToPropertyInput(ctx context.Context, v interface{}) (graphql1.UploadFileToPropertyInput, error) { + res, err := ec.unmarshalInputUploadFileToPropertyInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNUploadPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginInput(ctx context.Context, v interface{}) (graphql1.UploadPluginInput, error) { + res, err := ec.unmarshalInputUploadPluginInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNUploadPluginPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UploadPluginPayload) graphql.Marshaler { + return ec._UploadPluginPayload(ctx, sel, &v) +} + +func (ec *executionContext) marshalNUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UploadPluginPayload) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._UploadPluginPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx context.Context, sel ast.SelectionSet, v *graphql1.User) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._User(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx context.Context, v interface{}) (graphql1.ValueType, error) { + var res graphql1.ValueType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx context.Context, sel ast.SelectionSet, v graphql1.ValueType) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšVisualizer(ctx context.Context, v interface{}) (graphql1.Visualizer, error) { + var res graphql1.Visualizer + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšVisualizer(ctx context.Context, sel ast.SelectionSet, v graphql1.Visualizer) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalN__Directive2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšDirective(ctx context.Context, sel ast.SelectionSet, v introspection.Directive) graphql.Marshaler { + return ec.___Directive(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Directive2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšDirectiveแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.Directive) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Directive2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšDirective(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) unmarshalN__DirectiveLocation2string(ctx context.Context, v interface{}) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalN__DirectiveLocation2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalN__DirectiveLocation2แš•stringแš„(ctx context.Context, v interface{}) ([]string, error) { + var vSlice []interface{} + if v != nil { + if tmp1, ok := v.([]interface{}); ok { + vSlice = tmp1 + } else { + vSlice = []interface{}{v} + } + } + var err error + res := make([]string, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalN__DirectiveLocation2string(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalN__DirectiveLocation2แš•stringแš„(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__DirectiveLocation2string(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalN__EnumValue2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšEnumValue(ctx context.Context, sel ast.SelectionSet, v introspection.EnumValue) graphql.Marshaler { + return ec.___EnumValue(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Field2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšField(ctx context.Context, sel ast.SelectionSet, v introspection.Field) graphql.Marshaler { + return ec.___Field(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__InputValue2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValue(ctx context.Context, sel ast.SelectionSet, v introspection.InputValue) graphql.Marshaler { + return ec.___InputValue(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.InputValue) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__InputValue2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValue(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalN__Type2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx context.Context, sel ast.SelectionSet, v introspection.Type) graphql.Marshaler { + return ec.___Type(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.Type) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Type2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec.___Type(ctx, sel, v) +} + +func (ec *executionContext) unmarshalN__TypeKind2string(ctx context.Context, v interface{}) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalN__TypeKind2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + +func (ec *executionContext) unmarshalOAny2interface(ctx context.Context, v interface{}) (interface{}, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalAny(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOAny2interface(ctx context.Context, sel ast.SelectionSet, v interface{}) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql.MarshalAny(v) +} + +func (ec *executionContext) marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx context.Context, sel ast.SelectionSet, v *graphql1.Asset) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Asset(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOBoolean2bool(ctx context.Context, v interface{}) (bool, error) { + res, err := graphql.UnmarshalBoolean(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler { + return graphql.MarshalBoolean(v) +} + +func (ec *executionContext) unmarshalOBoolean2แš–bool(ctx context.Context, v interface{}) (*bool, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalBoolean(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOBoolean2แš–bool(ctx context.Context, sel ast.SelectionSet, v *bool) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql.MarshalBoolean(*v) +} + +func (ec *executionContext) unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx context.Context, v interface{}) (*usecase.Cursor, error) { + if v == nil { + return nil, nil + } + res, err := graphql1.UnmarshalCursor(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx context.Context, sel ast.SelectionSet, v *usecase.Cursor) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql1.MarshalCursor(*v) +} + +func (ec *executionContext) marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx context.Context, sel ast.SelectionSet, v *graphql1.Dataset) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Dataset(ctx, sel, v) +} + +func (ec *executionContext) marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetField(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetField) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DatasetField(ctx, sel, v) +} + +func (ec *executionContext) marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetSchema) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DatasetSchema(ctx, sel, v) +} + +func (ec *executionContext) marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaField(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetSchemaField) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DatasetSchemaField(ctx, sel, v) +} + +func (ec *executionContext) unmarshalODateTime2แš–timeแšTime(ctx context.Context, v interface{}) (*time.Time, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalTime(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalODateTime2แš–timeแšTime(ctx context.Context, sel ast.SelectionSet, v *time.Time) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql.MarshalTime(*v) +} + +func (ec *executionContext) unmarshalOFloat2แš–float64(ctx context.Context, v interface{}) (*float64, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalFloat(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOFloat2แš–float64(ctx context.Context, sel ast.SelectionSet, v *float64) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql.MarshalFloat(*v) +} + +func (ec *executionContext) unmarshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx context.Context, v interface{}) ([]*id.ID, error) { + if v == nil { + return nil, nil + } + var vSlice []interface{} + if v != nil { + if tmp1, ok := v.([]interface{}); ok { + vSlice = tmp1 + } else { + vSlice = []interface{}{v} + } + } + var err error + res := make([]*id.ID, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx context.Context, sel ast.SelectionSet, v []*id.ID) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + for i := range v { + ret[i] = ec.marshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, sel, v[i]) + } + + return ret +} + +func (ec *executionContext) unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, v interface{}) (*id.ID, error) { + if v == nil { + return nil, nil + } + res, err := graphql1.UnmarshalID(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, sel ast.SelectionSet, v *id.ID) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql1.MarshalID(*v) +} + +func (ec *executionContext) marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx context.Context, sel ast.SelectionSet, v *graphql1.Infobox) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Infobox(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOInt2แš–int(ctx context.Context, v interface{}) (*int, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalInt(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOInt2แš–int(ctx context.Context, sel ast.SelectionSet, v *int) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql.MarshalInt(*v) +} + +func (ec *executionContext) marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx context.Context, sel ast.SelectionSet, v graphql1.Layer) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Layer(ctx, sel, v) +} + +func (ec *executionContext) marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.LayerGroup) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._LayerGroup(ctx, sel, v) +} + +func (ec *executionContext) marshalOLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerItem(ctx context.Context, sel ast.SelectionSet, v *graphql1.LayerItem) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._LayerItem(ctx, sel, v) +} + +func (ec *executionContext) marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfobox(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedInfobox) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._MergedInfobox(ctx, sel, v) +} + +func (ec *executionContext) marshalOMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxField(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedInfoboxField) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._MergedInfoboxField(ctx, sel, v) +} + +func (ec *executionContext) marshalOMergedLayer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedLayer(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedLayer) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._MergedLayer(ctx, sel, v) +} + +func (ec *executionContext) marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedProperty(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedProperty) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._MergedProperty(ctx, sel, v) +} + +func (ec *executionContext) marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNode(ctx context.Context, sel ast.SelectionSet, v graphql1.Node) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Node(ctx, sel, v) +} + +func (ec *executionContext) marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx context.Context, sel ast.SelectionSet, v *graphql1.Plugin) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Plugin(ctx, sel, v) +} + +func (ec *executionContext) marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx context.Context, sel ast.SelectionSet, v *graphql1.PluginExtension) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PluginExtension(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOPluginExtensionID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx context.Context, v interface{}) (*id.PluginExtensionID, error) { + if v == nil { + return nil, nil + } + res, err := graphql1.UnmarshalPluginExtensionID(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOPluginExtensionID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx context.Context, sel ast.SelectionSet, v *id.PluginExtensionID) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql1.MarshalPluginExtensionID(*v) +} + +func (ec *executionContext) unmarshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, v interface{}) (*id.PluginID, error) { + if v == nil { + return nil, nil + } + res, err := graphql1.UnmarshalPluginID(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, sel ast.SelectionSet, v *id.PluginID) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql1.MarshalPluginID(*v) +} + +func (ec *executionContext) marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx context.Context, sel ast.SelectionSet, v *graphql1.Project) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Project(ctx, sel, v) +} + +func (ec *executionContext) marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx context.Context, sel ast.SelectionSet, v *graphql1.Property) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Property(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyCondition(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyCondition) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertyCondition(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyField(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyField) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertyField(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldLinkแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertyFieldLink) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertyFieldLink2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldLink(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyGroup) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertyGroup(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItem(ctx context.Context, sel ast.SelectionSet, v graphql1.PropertyItem) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertyItem(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchema) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertySchema(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchemaField) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertySchemaField(ctx, sel, v) +} + +func (ec *executionContext) marshalOPropertySchemaFieldChoice2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldChoiceแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertySchemaFieldChoice) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertySchemaFieldChoice2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldChoice(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx context.Context, v interface{}) (*id.PropertySchemaFieldID, error) { + if v == nil { + return nil, nil + } + res, err := graphql1.UnmarshalPropertySchemaFieldID(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx context.Context, sel ast.SelectionSet, v *id.PropertySchemaFieldID) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql1.MarshalPropertySchemaFieldID(*v) +} + +func (ec *executionContext) unmarshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldUI(ctx context.Context, v interface{}) (*graphql1.PropertySchemaFieldUI, error) { + if v == nil { + return nil, nil + } + var res = new(graphql1.PropertySchemaFieldUI) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldUI(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchemaFieldUI) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchemaGroup) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertySchemaGroup(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, v interface{}) (*id.PropertySchemaID, error) { + if v == nil { + return nil, nil + } + res, err := graphql1.UnmarshalPropertySchemaID(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, sel ast.SelectionSet, v *id.PropertySchemaID) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql1.MarshalPropertySchemaID(*v) +} + +func (ec *executionContext) marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx context.Context, sel ast.SelectionSet, v *graphql1.Scene) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Scene(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOSceneLockMode2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneLockMode(ctx context.Context, v interface{}) (*graphql1.SceneLockMode, error) { + if v == nil { + return nil, nil + } + var res = new(graphql1.SceneLockMode) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOSceneLockMode2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneLockMode(ctx context.Context, sel ast.SelectionSet, v *graphql1.SceneLockMode) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx context.Context, sel ast.SelectionSet, v *graphql1.ScenePlugin) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._ScenePlugin(ctx, sel, v) +} + +func (ec *executionContext) marshalOSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidget(ctx context.Context, sel ast.SelectionSet, v *graphql1.SceneWidget) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._SceneWidget(ctx, sel, v) +} + +func (ec *executionContext) marshalOSearchedUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSearchedUser(ctx context.Context, sel ast.SelectionSet, v *graphql1.SearchedUser) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._SearchedUser(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOString2string(ctx context.Context, v interface{}) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOString2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + return graphql.MarshalString(v) +} + +func (ec *executionContext) unmarshalOString2แš•stringแš„(ctx context.Context, v interface{}) ([]string, error) { + if v == nil { + return nil, nil + } + var vSlice []interface{} + if v != nil { + if tmp1, ok := v.([]interface{}); ok { + vSlice = tmp1 + } else { + vSlice = []interface{}{v} + } + } + var err error + res := make([]string, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalNString2string(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalOString2แš•stringแš„(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + for i := range v { + ret[i] = ec.marshalNString2string(ctx, sel, v[i]) + } + + return ret +} + +func (ec *executionContext) unmarshalOString2แš–string(ctx context.Context, v interface{}) (*string, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalString(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOString2แš–string(ctx context.Context, sel ast.SelectionSet, v *string) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql.MarshalString(*v) +} + +func (ec *executionContext) marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx context.Context, sel ast.SelectionSet, v *graphql1.Team) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Team(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTextAlign(ctx context.Context, v interface{}) (*graphql1.TextAlign, error) { + if v == nil { + return nil, nil + } + var res = new(graphql1.TextAlign) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTextAlign(ctx context.Context, sel ast.SelectionSet, v *graphql1.TextAlign) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) unmarshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx context.Context, v interface{}) (*graphql1.Theme, error) { + if v == nil { + return nil, nil + } + var res = new(graphql1.Theme) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx context.Context, sel ast.SelectionSet, v *graphql1.Theme) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) unmarshalOTranslatedString2map(ctx context.Context, v interface{}) (map[string]string, error) { + if v == nil { + return nil, nil + } + res, err := graphql1.UnmarshalMap(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOTranslatedString2map(ctx context.Context, sel ast.SelectionSet, v map[string]string) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql1.MarshalMap(v) +} + +func (ec *executionContext) unmarshalOURL2แš–netแš‹urlแšURL(ctx context.Context, v interface{}) (*url.URL, error) { + if v == nil { + return nil, nil + } + res, err := graphql1.UnmarshalURL(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOURL2แš–netแš‹urlแšURL(ctx context.Context, sel ast.SelectionSet, v *url.URL) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql1.MarshalURL(*v) +} + +func (ec *executionContext) unmarshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, v interface{}) (*graphql.Upload, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalUpload(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, sel ast.SelectionSet, v *graphql.Upload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql.MarshalUpload(*v) +} + +func (ec *executionContext) marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx context.Context, sel ast.SelectionSet, v *graphql1.User) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._User(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx context.Context, v interface{}) (*graphql1.ValueType, error) { + if v == nil { + return nil, nil + } + var res = new(graphql1.ValueType) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx context.Context, sel ast.SelectionSet, v *graphql1.ValueType) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) marshalO__EnumValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšEnumValueแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.EnumValue) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__EnumValue2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšEnumValue(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalO__Field2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšFieldแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.Field) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Field2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalO__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.InputValue) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__InputValue2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValue(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalO__Schema2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšSchema(ctx context.Context, sel ast.SelectionSet, v *introspection.Schema) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec.___Schema(ctx, sel, v) +} + +func (ec *executionContext) marshalO__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.Type) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Type2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec.___Type(ctx, sel, v) +} + +// endregion ***************************** type.gotpl ***************************** diff --git a/internal/graphql/resolver.go b/internal/graphql/resolver.go new file mode 100644 index 000000000..383c41969 --- /dev/null +++ b/internal/graphql/resolver.go @@ -0,0 +1,35 @@ +//go:generate go run github.com/99designs/gqlgen + +package graphql + +import ( + "errors" + + graphql "github.com/reearth/reearth-backend/internal/adapter/graphql" +) + +// THIS CODE IS A STARTING POINT ONLY. IT WILL NOT BE UPDATED WITH SCHEMA CHANGES. + +// Resolver _ +type Resolver struct { + config ResolverConfig +} + +// ErrNotImplemented _ +var ErrNotImplemented = errors.New("not impleneted yet") + +// ErrUnauthorized _ +var ErrUnauthorized = errors.New("unauthorized") + +// ResolverConfig _ +type ResolverConfig struct { + Controllers *graphql.Container + Debug bool +} + +// NewResolver _ +func NewResolver(config ResolverConfig) ResolverRoot { + return &Resolver{ + config: config, + } +} diff --git a/internal/graphql/resolver_asset.go b/internal/graphql/resolver_asset.go new file mode 100644 index 000000000..d3084aec0 --- /dev/null +++ b/internal/graphql/resolver_asset.go @@ -0,0 +1,22 @@ +package graphql + +import ( + "context" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) Asset() AssetResolver { + return &assetResolver{r} +} + +type assetResolver struct{ *Resolver } + +func (r *assetResolver) Team(ctx context.Context, obj *graphql1.Asset) (*graphql1.Team, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.TeamID)) +} diff --git a/internal/graphql/resolver_dataset.go b/internal/graphql/resolver_dataset.go new file mode 100644 index 000000000..b78a1369c --- /dev/null +++ b/internal/graphql/resolver_dataset.go @@ -0,0 +1,76 @@ +package graphql + +import ( + "context" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) Dataset() DatasetResolver { + return &datasetResolver{r} +} + +func (r *Resolver) DatasetField() DatasetFieldResolver { + return &datasetFieldResolver{r} +} + +type datasetResolver struct{ *Resolver } + +func (r *datasetResolver) Schema(ctx context.Context, obj *graphql1.Dataset) (*graphql1.DatasetSchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) +} + +func (r *datasetResolver) Name(ctx context.Context, obj *graphql1.Dataset) (*string, error) { + exit := trace(ctx) + defer exit() + + ds, err := dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + if err != nil || ds == nil || ds.RepresentativeFieldID == nil { + return nil, err + } + f := obj.Field(*ds.RepresentativeFieldID) + if f == nil { + return nil, nil + } + if v, ok := f.Value.(string); ok { + v2 := &v + return v2, nil + } + return nil, nil +} + +type datasetFieldResolver struct{ *Resolver } + +func (r *datasetFieldResolver) Field(ctx context.Context, obj *graphql1.DatasetField) (*graphql1.DatasetSchemaField, error) { + exit := trace(ctx) + defer exit() + + ds, err := dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + return ds.Field(obj.FieldID), err +} + +func (r *datasetFieldResolver) Schema(ctx context.Context, obj *graphql1.DatasetField) (*graphql1.DatasetSchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) +} + +func (r *datasetFieldResolver) ValueRef(ctx context.Context, obj *graphql1.DatasetField) (*graphql1.Dataset, error) { + exit := trace(ctx) + defer exit() + + if obj.Value == nil { + return nil, nil + } + idstr, ok := (obj.Value).(id.ID) + if !ok { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(idstr)) +} diff --git a/internal/graphql/resolver_dataset_schema.go b/internal/graphql/resolver_dataset_schema.go new file mode 100644 index 000000000..054dca1da --- /dev/null +++ b/internal/graphql/resolver_dataset_schema.go @@ -0,0 +1,69 @@ +package graphql + +import ( + "context" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) DatasetSchema() DatasetSchemaResolver { + return &datasetSchemaResolver{r} +} + +func (r *Resolver) DatasetSchemaField() DatasetSchemaFieldResolver { + return &datasetSchemaFieldResolver{r} +} + +type datasetSchemaResolver struct{ *Resolver } + +func (r *datasetSchemaResolver) Scene(ctx context.Context, obj *graphql1.DatasetSchema) (*graphql1.Scene, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +func (r *datasetSchemaResolver) RepresentativeField(ctx context.Context, obj *graphql1.DatasetSchema) (*graphql1.DatasetSchemaField, error) { + exit := trace(ctx) + defer exit() + + if obj.RepresentativeFieldID == nil { + return nil, nil + } + nf := *obj.RepresentativeFieldID + for _, f := range obj.Fields { + if f.ID == nf { + return f, nil + } + } + return nil, nil +} + +func (r *datasetSchemaResolver) Datasets(ctx context.Context, obj *graphql1.DatasetSchema, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetConnection, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.FindBySchema(ctx, obj.ID, first, last, before, after, getOperator(ctx)) +} + +type datasetSchemaFieldResolver struct{ *Resolver } + +func (r *datasetSchemaFieldResolver) Schema(ctx context.Context, obj *graphql1.DatasetSchemaField) (*graphql1.DatasetSchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) +} + +func (r *datasetSchemaFieldResolver) Ref(ctx context.Context, obj *graphql1.DatasetSchemaField) (*graphql1.DatasetSchema, error) { + exit := trace(ctx) + defer exit() + + if obj.RefID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.RefID)) +} diff --git a/internal/graphql/resolver_layer.go b/internal/graphql/resolver_layer.go new file mode 100644 index 000000000..36a76ef50 --- /dev/null +++ b/internal/graphql/resolver_layer.go @@ -0,0 +1,327 @@ +package graphql + +import ( + "context" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) LayerItem() LayerItemResolver { + return &layerItemResolver{r} +} + +func (r *Resolver) LayerGroup() LayerGroupResolver { + return &layerGroupResolver{r} +} + +func (r *Resolver) Infobox() InfoboxResolver { + return &infoboxResolver{r} +} + +func (r *Resolver) InfoboxField() InfoboxFieldResolver { + return &infoboxFieldResolver{r} +} + +func (r *Resolver) MergedLayer() MergedLayerResolver { + return &mergedLayerResolver{r} +} + +func (r *Resolver) MergedInfoboxField() MergedInfoboxFieldResolver { + return &mergedInfoboxFieldResolver{r} +} + +type infoboxResolver struct{ *Resolver } + +func (r *infoboxResolver) Property(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Property, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) +} + +func (r *infoboxResolver) Layer(ctx context.Context, obj *graphql1.Infobox) (graphql1.Layer, error) { + exit := trace(ctx) + defer exit() + + layer, err := dataloader.DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) + if err != nil || layer == nil { + return nil, err + } + return *layer, nil +} + +func (r *infoboxResolver) LinkedDataset(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Dataset, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) +} + +func (r *infoboxResolver) Merged(ctx context.Context, obj *graphql1.Infobox) (*graphql1.MergedInfobox, error) { + exit := trace(ctx) + defer exit() + + ml, err := r.config.Controllers.LayerController.FetchParentAndMerged(ctx, id.LayerID(obj.LayerID), getOperator(ctx)) + if err != nil || ml == nil { + return nil, err + } + return ml.Infobox, nil +} + +type infoboxFieldResolver struct{ *Resolver } + +func (r *infoboxFieldResolver) Layer(ctx context.Context, obj *graphql1.InfoboxField) (graphql1.Layer, error) { + exit := trace(ctx) + defer exit() + + layer, err := dataloader.DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) + if err != nil { + return nil, err + } + return *layer, nil +} + +func (r *infoboxFieldResolver) Infobox(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Infobox, error) { + exit := trace(ctx) + defer exit() + + layer, err := dataloader.DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) + if err != nil || layer == nil { + return nil, err + } + layer2 := (*layer).(*graphql1.LayerItem) + if layer2 == nil { + return nil, nil + } + return layer2.Infobox, nil +} + +func (r *infoboxFieldResolver) Property(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Property, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) +} + +func (r *infoboxFieldResolver) Plugin(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Plugin, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) +} + +func (r *infoboxFieldResolver) Extension(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.PluginExtension, error) { + exit := trace(ctx) + defer exit() + + plugin, err := dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + if err != nil { + return nil, err + } + return plugin.Extension(obj.ExtensionID), nil +} + +func (r *infoboxFieldResolver) LinkedDataset(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Dataset, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) +} + +func (r *infoboxFieldResolver) Merged(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.MergedInfoboxField, error) { + exit := trace(ctx) + defer exit() + + ml, err := r.config.Controllers.LayerController.FetchParentAndMerged(ctx, id.LayerID(obj.LayerID), getOperator(ctx)) + if err != nil || ml == nil || ml.Infobox == nil { + return nil, err + } + return ml.Infobox.Field(obj.ID), nil +} + +type layerGroupResolver struct{ *Resolver } + +func (r *layerGroupResolver) Parent(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.LayerGroup, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID != nil { + return dataloader.DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) + } + return r.config.Controllers.LayerController.FetchParent(ctx, id.LayerID(obj.ID), getOperator(ctx)) +} + +func (r *layerGroupResolver) Property(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.Property, error) { + exit := trace(ctx) + defer exit() + + if obj.PropertyID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) +} + +func (r *layerGroupResolver) Plugin(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.Plugin, error) { + exit := trace(ctx) + defer exit() + + if obj.PluginID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) +} + +func (r *layerGroupResolver) Extension(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.PluginExtension, error) { + exit := trace(ctx) + defer exit() + + if obj.PluginID == nil || obj.ExtensionID == nil { + return nil, nil + } + plugin, err := dataloader.DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) + if err != nil { + return nil, err + } + return plugin.Extension(*obj.ExtensionID), nil +} + +func (r *layerGroupResolver) ParentLayer(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.LayerGroup, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.LayerController.FetchParent(ctx, id.LayerID(obj.ID), getOperator(ctx)) +} + +func (r *layerGroupResolver) LinkedDatasetSchema(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.DatasetSchema, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetSchemaID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.LinkedDatasetSchemaID)) +} + +func (r *layerGroupResolver) Layers(ctx context.Context, obj *graphql1.LayerGroup) ([]graphql1.Layer, error) { + exit := trace(ctx) + defer exit() + + layers, err := dataloader.DataLoadersFromContext(ctx).Layer.LoadAll(id.LayerIDsFromIDRef(obj.LayerIds)) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + return graphql1.AttachParentLayer(layers, obj.ID), nil +} + +type layerItemResolver struct{ *Resolver } + +func (r *layerItemResolver) Parent(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.LayerGroup, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID != nil { + return dataloader.DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) + } + return r.config.Controllers.LayerController.FetchParent(ctx, id.LayerID(obj.ID), getOperator(ctx)) +} + +func (r *layerItemResolver) Property(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Property, error) { + exit := trace(ctx) + defer exit() + + if obj.PropertyID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) +} + +func (r *layerItemResolver) Plugin(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Plugin, error) { + exit := trace(ctx) + defer exit() + + if obj.PluginID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) +} + +func (r *layerItemResolver) Extension(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.PluginExtension, error) { + exit := trace(ctx) + defer exit() + + if obj.PluginID == nil || obj.ExtensionID == nil { + return nil, nil + } + plugin, err := dataloader.DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) + if err != nil { + return nil, err + } + return plugin.Extension(*obj.ExtensionID), nil +} + +func (r *layerItemResolver) LinkedDataset(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Dataset, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) +} + +func (r *layerItemResolver) Merged(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.MergedLayer, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID == nil { + return r.config.Controllers.LayerController.FetchParentAndMerged(ctx, id.LayerID(obj.ID), getOperator(ctx)) + } + return r.config.Controllers.LayerController.FetchMerged(ctx, id.LayerID(obj.ID), id.LayerIDFromRefID(obj.ParentID), getOperator(ctx)) +} + +type mergedLayerResolver struct{ *Resolver } + +func (r *mergedLayerResolver) Original(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.LayerItem, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).LayerItem.Load(id.LayerID(obj.OriginalID)) +} + +func (r *mergedLayerResolver) Parent(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.LayerGroup, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) +} + +type mergedInfoboxFieldResolver struct{ *Resolver } + +func (r *mergedInfoboxFieldResolver) Plugin(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.Plugin, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) +} + +func (r *mergedInfoboxFieldResolver) Extension(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.PluginExtension, error) { + exit := trace(ctx) + defer exit() + + plugin, err := dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + if err != nil { + return nil, err + } + return plugin.Extension(obj.ExtensionID), nil +} diff --git a/internal/graphql/resolver_mutation.go b/internal/graphql/resolver_mutation.go new file mode 100644 index 000000000..b10d36587 --- /dev/null +++ b/internal/graphql/resolver_mutation.go @@ -0,0 +1,427 @@ +package graphql + +import ( + "context" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" +) + +func (r *Resolver) Mutation() MutationResolver { + return &mutationResolver{r} +} + +type mutationResolver struct{ *Resolver } + +func (r *mutationResolver) CreateAsset(ctx context.Context, input graphql1.CreateAssetInput) (*graphql1.CreateAssetPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.AssetController.Create(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) RemoveAsset(ctx context.Context, input graphql1.RemoveAssetInput) (*graphql1.RemoveAssetPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.AssetController.Remove(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) UpdateDatasetSchema(ctx context.Context, input graphql1.UpdateDatasetSchemaInput) (*graphql1.UpdateDatasetSchemaPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.UpdateDatasetSchema(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) AddDynamicDatasetSchema(ctx context.Context, input graphql1.AddDynamicDatasetSchemaInput) (*graphql1.AddDynamicDatasetSchemaPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.AddDynamicDatasetSchema(ctx, &input) +} + +func (r *mutationResolver) AddDynamicDataset(ctx context.Context, input graphql1.AddDynamicDatasetInput) (*graphql1.AddDynamicDatasetPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.AddDynamicDataset(ctx, &input) +} + +func (r *mutationResolver) Signup(ctx context.Context, input graphql1.SignupInput) (*graphql1.SignupPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.UserController.Signup(ctx, &input, getSub(ctx)) +} + +func (r *mutationResolver) UpdateMe(ctx context.Context, input graphql1.UpdateMeInput) (*graphql1.UpdateMePayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.UserController.UpdateMe(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) RemoveMyAuth(ctx context.Context, input graphql1.RemoveMyAuthInput) (*graphql1.UpdateMePayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.UserController.RemoveMyAuth(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) DeleteMe(ctx context.Context, input graphql1.DeleteMeInput) (*graphql1.DeleteMePayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.UserController.DeleteMe(ctx, input.UserID, getOperator(ctx)) +} + +func (r *mutationResolver) CreateTeam(ctx context.Context, input graphql1.CreateTeamInput) (*graphql1.CreateTeamPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.TeamController.Create(ctx, &input, getUser(ctx)) +} + +func (r *mutationResolver) DeleteTeam(ctx context.Context, input graphql1.DeleteTeamInput) (*graphql1.DeleteTeamPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.TeamController.Remove(ctx, input.TeamID, getOperator(ctx)) +} + +func (r *mutationResolver) UpdateTeam(ctx context.Context, input graphql1.UpdateTeamInput) (*graphql1.UpdateTeamPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.TeamController.Update(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) AddMemberToTeam(ctx context.Context, input graphql1.AddMemberToTeamInput) (*graphql1.AddMemberToTeamPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.TeamController.AddMember(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) RemoveMemberFromTeam(ctx context.Context, input graphql1.RemoveMemberFromTeamInput) (*graphql1.RemoveMemberFromTeamPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.TeamController.RemoveMember(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) UpdateMemberOfTeam(ctx context.Context, input graphql1.UpdateMemberOfTeamInput) (*graphql1.UpdateMemberOfTeamPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.TeamController.UpdateMember(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) CreateProject(ctx context.Context, input graphql1.CreateProjectInput) (*graphql1.ProjectPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.ProjectController.Create(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) UpdateProject(ctx context.Context, input graphql1.UpdateProjectInput) (*graphql1.ProjectPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.ProjectController.Update(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) PublishProject(ctx context.Context, input graphql1.PublishProjectInput) (*graphql1.ProjectPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.ProjectController.Publish(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) DeleteProject(ctx context.Context, input graphql1.DeleteProjectInput) (*graphql1.DeleteProjectPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.ProjectController.Delete(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) UploadPlugin(ctx context.Context, input graphql1.UploadPluginInput) (*graphql1.UploadPluginPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PluginController.Upload(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) CreateScene(ctx context.Context, input graphql1.CreateSceneInput) (*graphql1.CreateScenePayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.SceneController.Create(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) AddWidget(ctx context.Context, input graphql1.AddWidgetInput) (*graphql1.AddWidgetPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.SceneController.AddWidget(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) UpdateWidget(ctx context.Context, input graphql1.UpdateWidgetInput) (*graphql1.UpdateWidgetPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.SceneController.UpdateWidget(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) RemoveWidget(ctx context.Context, input graphql1.RemoveWidgetInput) (*graphql1.RemoveWidgetPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.SceneController.RemoveWidget(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) InstallPlugin(ctx context.Context, input graphql1.InstallPluginInput) (*graphql1.InstallPluginPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.SceneController.InstallPlugin(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) UninstallPlugin(ctx context.Context, input graphql1.UninstallPluginInput) (*graphql1.UninstallPluginPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.SceneController.UninstallPlugin(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) UpgradePlugin(ctx context.Context, input graphql1.UpgradePluginInput) (*graphql1.UpgradePluginPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.SceneController.UpgradePlugin(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) SyncDataset(ctx context.Context, input graphql1.SyncDatasetInput) (*graphql1.SyncDatasetPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.Sync(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) UpdatePropertyValue(ctx context.Context, input graphql1.UpdatePropertyValueInput) (*graphql1.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.UpdateValue(ctx, + input.PropertyID, input.SchemaItemID, input.ItemID, input.FieldID, input.Value, input.Type, getOperator(ctx)) +} + +func (r *mutationResolver) UpdatePropertyValueLatLng(ctx context.Context, input graphql1.UpdatePropertyValueLatLngInput) (*graphql1.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.UpdateValue(ctx, + input.PropertyID, input.SchemaItemID, input.ItemID, input.FieldID, graphql1.LatLng{ + Lat: input.Lat, + Lng: input.Lng, + }, graphql1.ValueTypeLatlng, getOperator(ctx)) +} + +func (r *mutationResolver) UpdatePropertyValueLatLngHeight(ctx context.Context, input graphql1.UpdatePropertyValueLatLngHeightInput) (*graphql1.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.UpdateValue(ctx, + input.PropertyID, input.SchemaItemID, input.ItemID, input.FieldID, graphql1.LatLngHeight{ + Lat: input.Lat, + Lng: input.Lng, + Height: input.Height, + }, graphql1.ValueTypeLatlngheight, getOperator(ctx)) +} + +func (r *mutationResolver) UpdatePropertyValueCamera(ctx context.Context, input graphql1.UpdatePropertyValueCameraInput) (*graphql1.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.UpdateValue(ctx, + input.PropertyID, input.SchemaItemID, input.ItemID, input.FieldID, graphql1.Camera{ + Lat: input.Lat, + Lng: input.Lng, + Altitude: input.Altitude, + Heading: input.Heading, + Pitch: input.Pitch, + Roll: input.Roll, + Fov: input.Fov, + }, graphql1.ValueTypeLatlngheight, getOperator(ctx)) +} + +func (r *mutationResolver) UpdatePropertyValueTypography(ctx context.Context, input graphql1.UpdatePropertyValueTypographyInput) (*graphql1.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.UpdateValue(ctx, + input.PropertyID, input.SchemaItemID, input.ItemID, input.FieldID, graphql1.Typography{ + FontFamily: input.FontFamily, + FontSize: input.FontSize, + FontWeight: input.FontWeight, + Color: input.Color, + TextAlign: input.TextAlign, + Bold: input.Bold, + Italic: input.Italic, + Underline: input.Underline, + }, graphql1.ValueTypeLatlngheight, getOperator(ctx)) +} + +func (r *mutationResolver) RemovePropertyField(ctx context.Context, input graphql1.RemovePropertyFieldInput) (*graphql1.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.RemoveField(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) UploadFileToProperty(ctx context.Context, input graphql1.UploadFileToPropertyInput) (*graphql1.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.UploadFile(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) LinkDatasetToPropertyValue(ctx context.Context, input graphql1.LinkDatasetToPropertyValueInput) (*graphql1.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.LinkValue(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) UnlinkPropertyValue(ctx context.Context, input graphql1.UnlinkPropertyValueInput) (*graphql1.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.UnlinkValue(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) AddPropertyItem(ctx context.Context, input graphql1.AddPropertyItemInput) (*graphql1.PropertyItemPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.AddItem(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) MovePropertyItem(ctx context.Context, input graphql1.MovePropertyItemInput) (*graphql1.PropertyItemPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.MoveItem(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) RemovePropertyItem(ctx context.Context, input graphql1.RemovePropertyItemInput) (*graphql1.PropertyItemPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.RemoveItem(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) UpdatePropertyItems(ctx context.Context, input graphql1.UpdatePropertyItemInput) (*graphql1.PropertyItemPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PropertyController.UpdateItems(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) AddLayerItem(ctx context.Context, input graphql1.AddLayerItemInput) (*graphql1.AddLayerItemPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.LayerController.AddItem(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) AddLayerGroup(ctx context.Context, input graphql1.AddLayerGroupInput) (*graphql1.AddLayerGroupPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.LayerController.AddGroup(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) RemoveLayer(ctx context.Context, input graphql1.RemoveLayerInput) (*graphql1.RemoveLayerPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.LayerController.Remove(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) UpdateLayer(ctx context.Context, input graphql1.UpdateLayerInput) (*graphql1.UpdateLayerPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.LayerController.Update(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) MoveLayer(ctx context.Context, input graphql1.MoveLayerInput) (*graphql1.MoveLayerPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.LayerController.Move(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) CreateInfobox(ctx context.Context, input graphql1.CreateInfoboxInput) (*graphql1.CreateInfoboxPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.LayerController.CreateInfobox(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) RemoveInfobox(ctx context.Context, input graphql1.RemoveInfoboxInput) (*graphql1.RemoveInfoboxPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.LayerController.RemoveInfobox(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) AddInfoboxField(ctx context.Context, input graphql1.AddInfoboxFieldInput) (*graphql1.AddInfoboxFieldPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.LayerController.AddInfoboxField(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) MoveInfoboxField(ctx context.Context, input graphql1.MoveInfoboxFieldInput) (*graphql1.MoveInfoboxFieldPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.LayerController.MoveInfoboxField(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) RemoveInfoboxField(ctx context.Context, input graphql1.RemoveInfoboxFieldInput) (*graphql1.RemoveInfoboxFieldPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.LayerController.RemoveInfoboxField(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) RemoveDatasetSchema(ctx context.Context, input graphql1.RemoveDatasetSchemaInput) (*graphql1.RemoveDatasetSchemaPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.RemoveDatasetSchema(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) AddDatasetSchema(ctx context.Context, input graphql1.AddDatasetSchemaInput) (*graphql1.AddDatasetSchemaPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.AddDatasetSchema(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) ImportLayer(ctx context.Context, input graphql1.ImportLayerInput) (*graphql1.ImportLayerPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.LayerController.ImportLayer(ctx, &input, getOperator(ctx)) +} + +func (r *mutationResolver) ImportDataset(ctx context.Context, input graphql1.ImportDatasetInput) (*graphql1.ImportDatasetPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.ImportDataset(ctx, &input, getOperator(ctx)) +} diff --git a/internal/graphql/resolver_plugin.go b/internal/graphql/resolver_plugin.go new file mode 100644 index 000000000..50d5a0249 --- /dev/null +++ b/internal/graphql/resolver_plugin.go @@ -0,0 +1,92 @@ +package graphql + +import ( + "context" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) Plugin() PluginResolver { + return &pluginResolver{r} +} + +func (r *Resolver) PluginExtension() PluginExtensionResolver { + return &pluginExtensionResolver{r} +} + +type pluginResolver struct{ *Resolver } + +func (r *pluginResolver) PropertySchema(ctx context.Context, obj *graphql1.Plugin) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + if obj.PropertySchemaID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(*obj.PropertySchemaID) +} + +func (r *pluginResolver) ScenePlugin(ctx context.Context, obj *graphql1.Plugin, sceneID id.ID) (*graphql1.ScenePlugin, error) { + exit := trace(ctx) + defer exit() + + s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(sceneID)) + return s.Plugin(obj.ID), err +} + +func (r *pluginResolver) TranslatedName(ctx context.Context, obj *graphql1.Plugin, lang *string) (string, error) { + if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Name, nil +} + +func (r *pluginResolver) TranslatedDescription(ctx context.Context, obj *graphql1.Plugin, lang *string) (string, error) { + if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Description, nil +} + +type pluginExtensionResolver struct{ *Resolver } + +func (r *pluginExtensionResolver) Plugin(ctx context.Context, obj *graphql1.PluginExtension) (*graphql1.Plugin, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) +} + +func (r *pluginExtensionResolver) PropertySchema(ctx context.Context, obj *graphql1.PluginExtension) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.PropertySchemaID) +} + +func (r *pluginExtensionResolver) TranslatedName(ctx context.Context, obj *graphql1.PluginExtension, lang *string) (string, error) { + if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Name, nil +} + +func (r *pluginExtensionResolver) SceneWidget(ctx context.Context, obj *graphql1.PluginExtension, sceneID id.ID) (*graphql1.SceneWidget, error) { + exit := trace(ctx) + defer exit() + + s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(sceneID)) + return s.Widget(obj.PluginID, obj.ExtensionID), err +} + +func (r *pluginExtensionResolver) TranslatedDescription(ctx context.Context, obj *graphql1.PluginExtension, lang *string) (string, error) { + exit := trace(ctx) + defer exit() + + if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Description, nil +} diff --git a/internal/graphql/resolver_project.go b/internal/graphql/resolver_project.go new file mode 100644 index 000000000..60f2472e6 --- /dev/null +++ b/internal/graphql/resolver_project.go @@ -0,0 +1,34 @@ +package graphql + +import ( + "context" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) Project() ProjectResolver { + return &projectResolver{r} +} + +type projectResolver struct{ *Resolver } + +func (r *projectResolver) Team(ctx context.Context, obj *graphql1.Project) (*graphql1.Team, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.TeamID)) +} + +func (r *projectResolver) Scene(ctx context.Context, obj *graphql1.Project) (*graphql1.Scene, error) { + exit := trace(ctx) + defer exit() + + s, err := r.config.Controllers.SceneController.FindByProject(ctx, id.ProjectID(obj.ID), getOperator(ctx)) + if err != nil && err != err1.ErrNotFound { + return nil, err + } + return s, nil +} diff --git a/internal/graphql/resolver_property.go b/internal/graphql/resolver_property.go new file mode 100644 index 000000000..72f6ca1cf --- /dev/null +++ b/internal/graphql/resolver_property.go @@ -0,0 +1,542 @@ +package graphql + +import ( + "context" + "errors" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) Property() PropertyResolver { + return &propertyResolver{r} +} + +func (r *Resolver) PropertyField() PropertyFieldResolver { + return &propertyFieldResolver{r} +} + +func (r *Resolver) PropertySchemaField() PropertySchemaFieldResolver { + return &propertySchemaFieldResolver{r} +} + +func (r *Resolver) PropertySchemaFieldChoice() PropertySchemaFieldChoiceResolver { + return &propertySchemaFieldChoiceResolver{r} +} + +func (r *Resolver) PropertyFieldLink() PropertyFieldLinkResolver { + return &propertyFieldLinkResolver{r} +} + +func (r *Resolver) PropertyLinkableFields() PropertyLinkableFieldsResolver { + return &propertyLinkableFieldsResolver{r} +} + +func (r *Resolver) MergedProperty() MergedPropertyResolver { + return &mergedPropertyResolver{r} +} + +func (r *Resolver) MergedPropertyGroup() MergedPropertyGroupResolver { + return &mergedPropertyGroupResolver{r} +} + +func (r *Resolver) MergedPropertyField() MergedPropertyFieldResolver { + return &mergedPropertyFieldResolver{r} +} + +func (r *Resolver) PropertyGroupList() PropertyGroupListResolver { + return &propertyGroupListResolver{r} +} + +func (r *Resolver) PropertyGroup() PropertyGroupResolver { + return &propertyGroupResolver{r} +} + +func (r *Resolver) PropertySchemaGroup() PropertySchemaGroupResolver { + return &propertySchemaGroupResolver{r} +} + +type propertyResolver struct{ *Resolver } + +func (r *propertyResolver) Schema(ctx context.Context, obj *graphql1.Property) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (r *propertyResolver) Layer(ctx context.Context, obj *graphql1.Property) (graphql1.Layer, error) { + exit := trace(ctx) + defer exit() + + l, err := r.config.Controllers.LayerController.FetchByProperty(ctx, id.PropertyID(obj.ID), getOperator(ctx)) + if err != nil || errors.Is(err, err1.ErrNotFound) { + return nil, nil + } + return l, err +} + +func (r *propertyResolver) Merged(ctx context.Context, obj *graphql1.Property) (*graphql1.MergedProperty, error) { + exit := trace(ctx) + defer exit() + + l, err := r.config.Controllers.LayerController.FetchByProperty(ctx, id.PropertyID(obj.ID), getOperator(ctx)) + if err != nil { + if errors.Is(err, err1.ErrNotFound) { + return nil, nil + } + return nil, err + } + li, ok := l.(*graphql1.LayerItem) + if !ok { + return nil, nil + } + merged, err := r.LayerItem().Merged(ctx, li) + if err != nil { + return nil, err + } + if merged == nil { + return nil, nil + } + if merged.Property != nil && merged.Property.OriginalID != nil && *merged.Property.OriginalID == obj.ID { + return merged.Property, nil + } else if merged.Infobox != nil && merged.Infobox.Property != nil && merged.Infobox.Property.OriginalID != nil && *merged.Infobox.Property.OriginalID == obj.ID { + return merged.Infobox.Property, nil + } + return nil, nil +} + +type propertyFieldResolver struct{ *Resolver } +type propertySchemaFieldResolver struct{ *Resolver } +type propertySchemaFieldChoiceResolver struct{ *Resolver } + +func (r *propertyFieldResolver) Parent(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.Property, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.ParentID)) +} + +func (r *propertyFieldResolver) Schema(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (r *propertyFieldResolver) Field(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.PropertySchemaField, error) { + exit := trace(ctx) + defer exit() + + schema, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + if err != nil { + return nil, err + } + return schema.Field(obj.FieldID), nil +} + +func (r *propertyFieldResolver) ActualValue(ctx context.Context, obj *graphql1.PropertyField) (interface{}, error) { + exit := trace(ctx) + defer exit() + + datasetLoader := dataloader.DataLoadersFromContext(ctx).Dataset + return actualValue(datasetLoader, obj.Value, obj.Links) +} + +func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { + exit := trace(ctx) + defer exit() + + if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Name, nil +} + +// deprecated +func (r *propertySchemaFieldResolver) TranslatedName(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { + exit := trace(ctx) + defer exit() + + if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Name, nil +} + +func (r *propertySchemaFieldResolver) TranslatedDescription(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { + exit := trace(ctx) + defer exit() + + if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Description, nil +} + +type propertyFieldLinkResolver struct{ *Resolver } + +func (r *propertyFieldLinkResolver) Dataset(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.Dataset, error) { + exit := trace(ctx) + defer exit() + + if obj.DatasetID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.DatasetID)) +} + +func (r *propertyFieldLinkResolver) DatasetField(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.DatasetField, error) { + exit := trace(ctx) + defer exit() + + if obj.DatasetID == nil { + return nil, nil + } + d, err := dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.DatasetID)) + if err != nil { + return nil, err + } + return d.Field(obj.DatasetSchemaFieldID), nil +} + +func (r *propertyFieldLinkResolver) DatasetSchema(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.DatasetSchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.DatasetSchemaID)) +} + +func (r *propertyFieldLinkResolver) DatasetSchemaField(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.DatasetSchemaField, error) { + exit := trace(ctx) + defer exit() + + ds, err := dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.DatasetSchemaID)) + return ds.Field(obj.DatasetSchemaFieldID), err +} + +type propertyLinkableFieldsResolver struct{ *Resolver } + +func (r *propertyLinkableFieldsResolver) Schema(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (r *propertyLinkableFieldsResolver) LatlngField(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchemaField, error) { + exit := trace(ctx) + defer exit() + + if obj.Latlng == nil { + return nil, nil + } + ps, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return ps.Field(*obj.Latlng), err +} + +func (r *propertyLinkableFieldsResolver) URLField(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchemaField, error) { + exit := trace(ctx) + defer exit() + + if obj.URL == nil { + return nil, nil + } + ps, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return ps.Field(*obj.URL), err +} + +type mergedPropertyResolver struct{ *Resolver } + +func (r *mergedPropertyResolver) Original(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Property, error) { + exit := trace(ctx) + defer exit() + + if obj.OriginalID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) +} + +func (r *mergedPropertyResolver) Parent(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Property, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.ParentID)) +} + +func (r *mergedPropertyResolver) Schema(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + if obj.SchemaID == nil { + if propertyID := obj.PropertyID(); propertyID != nil { + property, err := dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*propertyID)) + if err != nil { + return nil, err + } + if property == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(property.SchemaID) + } + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(*obj.SchemaID) +} + +func (r *mergedPropertyResolver) LinkedDataset(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Dataset, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) +} + +func (r *mergedPropertyResolver) Groups(ctx context.Context, obj *graphql1.MergedProperty) ([]*graphql1.MergedPropertyGroup, error) { + exit := trace(ctx) + defer exit() + + if obj.Groups != nil { + return obj.Groups, nil + } + m, err := r.config.Controllers.PropertyController.FetchMerged(ctx, obj.OriginalID, obj.ParentID, obj.LinkedDatasetID, getOperator(ctx)) + if err != nil || m == nil { + return nil, err + } + return m.Groups, nil +} + +type mergedPropertyGroupResolver struct{ *Resolver } + +func (r *mergedPropertyGroupResolver) Original(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.PropertyGroup, error) { + exit := trace(ctx) + defer exit() + + if obj.OriginalID == nil || obj.OriginalPropertyID == nil { + return nil, nil + } + p, err := dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) + if err != nil { + return nil, err + } + if i, ok := p.Item(*obj.OriginalID).(*graphql1.PropertyGroup); ok { + return i, nil + } + return nil, nil +} + +func (r *mergedPropertyGroupResolver) Parent(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.PropertyGroup, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID == nil || obj.ParentPropertyID == nil { + return nil, nil + } + p, err := dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.ParentID)) + if err != nil { + return nil, err + } + if i, ok := p.Item(*obj.ParentID).(*graphql1.PropertyGroup); ok { + return i, nil + } + return nil, nil +} + +func (r *mergedPropertyGroupResolver) OriginalProperty(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.Property, error) { + exit := trace(ctx) + defer exit() + + if obj.OriginalID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) +} + +func (r *mergedPropertyGroupResolver) ParentProperty(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.Property, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.ParentID)) +} + +func (r *mergedPropertyGroupResolver) Schema(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + if obj.SchemaID == nil { + if propertyID := obj.PropertyID(); propertyID != nil { + property, err := dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*propertyID)) + if err != nil { + return nil, err + } + if property == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(property.SchemaID) + } + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(*obj.SchemaID) +} + +func (r *mergedPropertyGroupResolver) LinkedDataset(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.Dataset, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) +} + +type mergedPropertyFieldResolver struct{ *Resolver } + +func (r *mergedPropertyFieldResolver) Schema(ctx context.Context, obj *graphql1.MergedPropertyField) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (r *mergedPropertyFieldResolver) Field(ctx context.Context, obj *graphql1.MergedPropertyField) (*graphql1.PropertySchemaField, error) { + exit := trace(ctx) + defer exit() + + s, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return s.Field(obj.FieldID), err +} + +func (r *mergedPropertyFieldResolver) ActualValue(ctx context.Context, obj *graphql1.MergedPropertyField) (interface{}, error) { + exit := trace(ctx) + defer exit() + + datasetLoader := dataloader.DataLoadersFromContext(ctx).Dataset + return actualValue(datasetLoader, obj.Value, obj.Links) +} + +type propertyGroupListResolver struct{ *Resolver } + +func (*propertyGroupListResolver) Schema(ctx context.Context, obj *graphql1.PropertyGroupList) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (*propertyGroupListResolver) SchemaGroup(ctx context.Context, obj *graphql1.PropertyGroupList) (*graphql1.PropertySchemaGroup, error) { + exit := trace(ctx) + defer exit() + + s, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + if err != nil { + return nil, err + } + return s.Group(obj.SchemaGroupID), nil +} + +type propertyGroupResolver struct{ *Resolver } + +func (*propertyGroupResolver) Schema(ctx context.Context, obj *graphql1.PropertyGroup) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (*propertyGroupResolver) SchemaGroup(ctx context.Context, obj *graphql1.PropertyGroup) (*graphql1.PropertySchemaGroup, error) { + exit := trace(ctx) + defer exit() + + s, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + if err != nil { + return nil, err + } + return s.Group(obj.SchemaGroupID), nil +} + +func actualValue(datasetLoader dataloader.DatasetDataLoader, value interface{}, links []*graphql1.PropertyFieldLink) (interface{}, error) { + if len(links) == 0 { + return &value, nil + } + // ๅ…ˆ้ ญใฎใƒชใƒณใ‚ฏใซใ—ใ‹DatasetใŒๅ‰ฒใ‚Šๅฝ“ใฆใ‚‰ใ‚Œใฆใ„ใชใ„โ†’ๅ…ˆ้ ญใ‹ใ‚‰้ †ใ€…ใซ่พฟใฃใฆใ„ใ + if len(links) > 1 && links[0].DatasetID != nil && links[len(links)-1].DatasetID == nil { + dsid := *links[0].DatasetID + for i, link := range links { + ds, err := datasetLoader.Load(id.DatasetID(dsid)) + if err != nil { + return nil, err + } + field := ds.Field(link.DatasetSchemaFieldID) + if field != nil { + if i == len(links)-1 { + return &value, nil + } else if field.Type != graphql1.ValueTypeRef { + return nil, nil + } + if field.Value != nil { + val, ok := (field.Value).(id.ID) + if ok { + dsid = val + } else { + return nil, nil + } + } else { + return nil, nil + } + } + } + } else if lastLink := links[len(links)-1]; lastLink.DatasetID != nil { + // ไธ€็•ชๆœ€ๅพŒใฎใƒชใƒณใ‚ฏใ‚’ๅ–ๅพ— + ds, err := datasetLoader.Load(id.DatasetID(*lastLink.DatasetID)) + if err != nil { + return nil, err + } + if f := ds.Field(lastLink.DatasetSchemaFieldID); f != nil { + return &f.Value, nil + } + } + return nil, nil +} + +type propertySchemaGroupResolver struct{ *Resolver } + +func (r *propertySchemaGroupResolver) Schema(ctx context.Context, obj *graphql1.PropertySchemaGroup) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (r *propertySchemaGroupResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaGroup, lang *string) (string, error) { + if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { + return s, nil + } + t := obj.Title + return *t, nil +} + +func (r *propertySchemaFieldChoiceResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaFieldChoice, lang *string) (string, error) { + if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Label, nil +} + +// deprecated +func (r *propertySchemaFieldChoiceResolver) TranslatedLabel(ctx context.Context, obj *graphql1.PropertySchemaFieldChoice, lang *string) (string, error) { + if s, ok := obj.AllTranslatedLabel[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Label, nil +} diff --git a/internal/graphql/resolver_query.go b/internal/graphql/resolver_query.go new file mode 100644 index 000000000..8a4d07f33 --- /dev/null +++ b/internal/graphql/resolver_query.go @@ -0,0 +1,319 @@ +package graphql + +import ( + "context" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) Query() QueryResolver { + return &queryResolver{r} +} + +type queryResolver struct{ *Resolver } + +func (r *queryResolver) Assets(ctx context.Context, teamID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.AssetConnection, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.AssetController.FindByTeam(ctx, teamID, first, last, before, after, getOperator(ctx)) +} + +func (r *queryResolver) Me(ctx context.Context) (*graphql1.User, error) { + exit := trace(ctx) + defer exit() + + u := getUser(ctx) + if u == nil { + return nil, nil + } + return graphql1.ToUser(u), nil +} + +func (r *queryResolver) Node(ctx context.Context, i id.ID, typeArg graphql1.NodeType) (graphql1.Node, error) { + exit := trace(ctx) + defer exit() + + dataloaders := dataloader.DataLoadersFromContext(ctx) + switch typeArg { + case graphql1.NodeTypeDataset: + result, err := dataloaders.Dataset.Load(id.DatasetID(i)) + if result == nil { + return nil, nil + } + return result, err + case graphql1.NodeTypeDatasetSchema: + result, err := dataloaders.DatasetSchema.Load(id.DatasetSchemaID(i)) + if result == nil { + return nil, nil + } + return result, err + case graphql1.NodeTypeLayerItem: + result, err := dataloaders.LayerItem.Load(id.LayerID(i)) + if result == nil { + return nil, nil + } + return result, err + case graphql1.NodeTypeLayerGroup: + result, err := dataloaders.LayerGroup.Load(id.LayerID(i)) + if result == nil { + return nil, nil + } + return result, err + case graphql1.NodeTypeProject: + result, err := dataloaders.Project.Load(id.ProjectID(i)) + if result == nil { + return nil, nil + } + return result, err + case graphql1.NodeTypeProperty: + result, err := dataloaders.Property.Load(id.PropertyID(i)) + if result == nil { + return nil, nil + } + return result, err + case graphql1.NodeTypeScene: + result, err := dataloaders.Scene.Load(id.SceneID(i)) + if result == nil { + return nil, nil + } + return result, err + case graphql1.NodeTypeTeam: + result, err := dataloaders.Team.Load(id.TeamID(i)) + if result == nil { + return nil, nil + } + return result, err + case graphql1.NodeTypeUser: + result, err := dataloaders.User.Load(id.UserID(i)) + if result == nil { + return nil, nil + } + return result, err + } + return nil, nil +} + +func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg graphql1.NodeType) ([]graphql1.Node, error) { + exit := trace(ctx) + defer exit() + + dataloaders := dataloader.DataLoadersFromContext(ctx) + switch typeArg { + case graphql1.NodeTypeDataset: + data, err := dataloaders.Dataset.LoadAll(id.DatasetIDsFromIDRef(ids)) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]graphql1.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case graphql1.NodeTypeDatasetSchema: + data, err := dataloaders.DatasetSchema.LoadAll(id.DatasetSchemaIDsFromIDRef(ids)) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]graphql1.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case graphql1.NodeTypeLayerItem: + data, err := dataloaders.LayerItem.LoadAll(id.LayerIDsFromIDRef(ids)) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]graphql1.Node, len(data)) + for i := range data { + nodes[i] = *data[i] + } + return nodes, nil + case graphql1.NodeTypeLayerGroup: + data, err := dataloaders.LayerGroup.LoadAll(id.LayerIDsFromIDRef(ids)) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]graphql1.Node, len(data)) + for i := range data { + nodes[i] = *data[i] + } + return nodes, nil + case graphql1.NodeTypeProject: + data, err := dataloaders.Project.LoadAll(id.ProjectIDsFromIDRef(ids)) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]graphql1.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case graphql1.NodeTypeProperty: + data, err := dataloaders.Property.LoadAll(id.PropertyIDsFromIDRef(ids)) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]graphql1.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case graphql1.NodeTypeScene: + data, err := dataloaders.Scene.LoadAll(id.SceneIDsFromIDRef(ids)) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]graphql1.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case graphql1.NodeTypeTeam: + data, err := dataloaders.Team.LoadAll(id.TeamIDsFromIDRef(ids)) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]graphql1.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case graphql1.NodeTypeUser: + data, err := dataloaders.User.LoadAll(id.UserIDsFromIDRef(ids)) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]graphql1.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + default: + return nil, nil + } +} + +func (r *queryResolver) PropertySchema(ctx context.Context, i id.PropertySchemaID) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(i) +} + +func (r *queryResolver) PropertySchemas(ctx context.Context, ids []*id.PropertySchemaID) ([]*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + ids2 := make([]id.PropertySchemaID, 0, len(ids)) + for _, i := range ids { + if i != nil { + ids2 = append(ids2, *i) + } + } + + data, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.LoadAll(ids2) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + + return data, nil +} + +func (r *queryResolver) Plugin(ctx context.Context, id id.PluginID) (*graphql1.Plugin, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Plugin.Load(id) +} + +func (r *queryResolver) Plugins(ctx context.Context, ids []*id.PluginID) ([]*graphql1.Plugin, error) { + exit := trace(ctx) + defer exit() + + ids2 := make([]id.PluginID, 0, len(ids)) + for _, i := range ids { + if i != nil { + ids2 = append(ids2, *i) + } + } + + data, err := dataloader.DataLoadersFromContext(ctx).Plugin.LoadAll(ids2) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + + return data, nil +} + +func (r *queryResolver) Layer(ctx context.Context, layerID id.ID) (graphql1.Layer, error) { + exit := trace(ctx) + defer exit() + + dataloaders := dataloader.DataLoadersFromContext(ctx) + result, err := dataloaders.Layer.Load(id.LayerID(layerID)) + if result == nil || *result == nil { + return nil, nil + } + return *result, err +} + +func (r *queryResolver) Scene(ctx context.Context, projectID id.ID) (*graphql1.Scene, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.SceneController.FindByProject(ctx, id.ProjectID(projectID), getOperator(ctx)) +} + +func (r *queryResolver) Projects(ctx context.Context, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.ProjectConnection, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.ProjectController.FindByTeam(ctx, id.TeamID(teamID), first, last, before, after, getOperator(ctx)) +} + +func (r *queryResolver) DatasetSchemas(ctx context.Context, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetSchemaConnection, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.FindSchemaByScene(ctx, sceneID, first, last, before, after, getOperator(ctx)) +} + +func (r *queryResolver) DynamicDatasetSchemas(ctx context.Context, sceneID id.ID) ([]*graphql1.DatasetSchema, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.FindDynamicSchemasByScene(ctx, sceneID) +} + +func (r *queryResolver) Datasets(ctx context.Context, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetConnection, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.FindBySchema(ctx, datasetSchemaID, first, last, before, after, getOperator(ctx)) +} + +func (r *queryResolver) SceneLock(ctx context.Context, sceneID id.ID) (*graphql1.SceneLockMode, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.SceneController.FetchLock(ctx, id.SceneID(sceneID), getOperator(ctx)) +} + +func (r *queryResolver) SearchUser(ctx context.Context, nameOrEmail string) (*graphql1.SearchedUser, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.UserController.SearchUser(ctx, nameOrEmail, getOperator(ctx)) +} + +func (r *queryResolver) CheckProjectAlias(ctx context.Context, alias string) (*graphql1.CheckProjectAliasPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.ProjectController.CheckAlias(ctx, alias) +} diff --git a/internal/graphql/resolver_scene.go b/internal/graphql/resolver_scene.go new file mode 100644 index 000000000..daabef883 --- /dev/null +++ b/internal/graphql/resolver_scene.go @@ -0,0 +1,131 @@ +package graphql + +import ( + "context" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) Scene() SceneResolver { + return &sceneResolver{r} +} + +func (r *Resolver) ScenePlugin() ScenePluginResolver { + return &scenePluginResolver{r} +} + +func (r *Resolver) SceneWidget() SceneWidgetResolver { + return &sceneWidgetResolver{r} +} + +type sceneResolver struct{ *Resolver } + +func (r *sceneResolver) Project(ctx context.Context, obj *graphql1.Scene) (*graphql1.Project, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Project.Load(id.ProjectID(obj.ProjectID)) +} + +func (r *sceneResolver) Team(ctx context.Context, obj *graphql1.Scene) (*graphql1.Team, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.TeamID)) +} + +func (r *sceneResolver) Property(ctx context.Context, obj *graphql1.Scene) (*graphql1.Property, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) +} + +func (r *sceneResolver) RootLayer(ctx context.Context, obj *graphql1.Scene) (*graphql1.LayerGroup, error) { + exit := trace(ctx) + defer exit() + + layer, err := dataloader.DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.RootLayerID)) + if err != nil { + return nil, err + } + if layer == nil { + return nil, nil + } + layerGroup, ok := (*layer).(*graphql1.LayerGroup) + if !ok { + return nil, nil + } + return layerGroup, nil +} + +func (r *sceneResolver) DatasetSchemas(ctx context.Context, obj *graphql1.Scene, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetSchemaConnection, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.FindSchemaByScene(ctx, obj.ID, first, last, before, after, getOperator(ctx)) +} + +func (r *sceneResolver) LockMode(ctx context.Context, obj *graphql1.Scene) (graphql1.SceneLockMode, error) { + exit := trace(ctx) + defer exit() + + sl, err := r.config.Controllers.SceneController.FetchLock(ctx, id.SceneID(obj.ID), getOperator(ctx)) + if err != nil { + return graphql1.SceneLockModeFree, err + } + return *sl, nil +} + +type scenePluginResolver struct{ *Resolver } + +func (r *scenePluginResolver) Plugin(ctx context.Context, obj *graphql1.ScenePlugin) (*graphql1.Plugin, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) +} +func (r *scenePluginResolver) Property(ctx context.Context, obj *graphql1.ScenePlugin) (*graphql1.Property, error) { + exit := trace(ctx) + defer exit() + + if obj.PropertyID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) +} + +type sceneWidgetResolver struct{ *Resolver } + +func (r *sceneWidgetResolver) Plugin(ctx context.Context, obj *graphql1.SceneWidget) (*graphql1.Plugin, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) +} + +func (r *sceneWidgetResolver) Extension(ctx context.Context, obj *graphql1.SceneWidget) (*graphql1.PluginExtension, error) { + exit := trace(ctx) + defer exit() + + plugin, err := dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + if err != nil { + return nil, err + } + for _, e := range plugin.Extensions { + if e.ExtensionID == obj.ExtensionID { + return e, nil + } + } + return nil, nil +} + +func (r *sceneWidgetResolver) Property(ctx context.Context, obj *graphql1.SceneWidget) (*graphql1.Property, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) +} diff --git a/internal/graphql/resolver_team.go b/internal/graphql/resolver_team.go new file mode 100644 index 000000000..e53d0e476 --- /dev/null +++ b/internal/graphql/resolver_team.go @@ -0,0 +1,43 @@ +package graphql + +import ( + "context" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) Team() TeamResolver { + return &teamResolver{r} +} + +func (r *Resolver) TeamMember() TeamMemberResolver { + return &teamMemberResolver{r} +} + +type teamResolver struct{ *Resolver } + +func (r *teamResolver) Assets(ctx context.Context, obj *graphql1.Team, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.AssetConnection, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.AssetController.FindByTeam(ctx, obj.ID, first, last, before, after, getOperator(ctx)) +} + +func (r *teamResolver) Projects(ctx context.Context, obj *graphql1.Team, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.ProjectConnection, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.ProjectController.FindByTeam(ctx, id.TeamID(obj.ID), first, last, before, after, getOperator(ctx)) +} + +type teamMemberResolver struct{ *Resolver } + +func (r *teamMemberResolver) User(ctx context.Context, obj *graphql1.TeamMember) (*graphql1.User, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).User.Load(id.UserID(obj.UserID)) +} diff --git a/internal/graphql/resolver_user.go b/internal/graphql/resolver_user.go new file mode 100644 index 000000000..9be4aaa5c --- /dev/null +++ b/internal/graphql/resolver_user.go @@ -0,0 +1,29 @@ +package graphql + +import ( + "context" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) User() UserResolver { + return &userResolver{r} +} + +type userResolver struct{ *Resolver } + +func (r *userResolver) MyTeam(ctx context.Context, obj *graphql1.User) (*graphql1.Team, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.MyTeamID)) +} + +func (r *userResolver) Teams(ctx context.Context, obj *graphql1.User) ([]*graphql1.Team, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.TeamController.FindByUser(ctx, id.UserID(obj.ID), getOperator(ctx)) +} diff --git a/internal/graphql/tracer.go b/internal/graphql/tracer.go new file mode 100644 index 000000000..e9854a262 --- /dev/null +++ b/internal/graphql/tracer.go @@ -0,0 +1,151 @@ +package graphql + +import ( + "context" + "fmt" + "sort" + "sync" + "time" + + "github.com/99designs/gqlgen/graphql" +) + +type tracerKeyStruct struct{} + +var tracerKey = tracerKeyStruct{} + +type Tracer struct { + Spans sync.Map +} + +type span struct { + Name string + StartedAt int64 + EndedAt int64 +} + +func (t *Tracer) AddSpan(s *span) { + if t == nil { + return + } + + var spans []*span + if ss, ok := t.Spans.Load(s.Name); ok { + if ss, ok := ss.([]*span); ok { + spans = append(ss, s) + } else { + spans = []*span{s} + } + } else { + spans = []*span{s} + } + + t.Spans.Store(s.Name, spans) +} + +func (t *Tracer) Print() { + if t == nil { + return + } + + type result struct { + Name string + Max int64 + Min int64 + Avr float64 + Count int + } + var results []result + + t.Spans.Range(func(key, value interface{}) bool { + name := key.(string) + ss := value.([]*span) + + var max, min, sum int64 + for i, s := range ss { + d := s.Duration() + sum += d + if i == 0 { + max = d + min = d + } else { + if max < d { + max = d + } + if min > d { + min = d + } + } + } + + results = append(results, result{ + Name: name, + Max: max, + Min: min, + Avr: float64(sum) / float64(len(ss)), + Count: len(ss), + }) + return true + }) + + sort.Slice(results, func(i, j int) bool { + return results[i].Avr > results[j].Avr + }) + + println("\nGraphQL tracing --------------------------------") + for _, r := range results { + if r.Count == 1 { + fmt.Printf("%s: %.2fms\n", r.Name, float64(r.Min)/1000000.0) + } else { + fmt.Printf("%s: %.2f~%.2fms (avr:%.2fms) (%d)\n", r.Name, float64(r.Min)/1000000.0, float64(r.Max)/1000000.0, r.Avr/1000000.0, r.Count) + } + } + println("------------------------------------------------\n") +} + +func (s *span) Start() { + s.StartedAt = time.Now().UnixNano() +} + +func (s *span) End() { + s.EndedAt = time.Now().UnixNano() +} + +func (s *span) Duration() int64 { + return s.EndedAt - s.StartedAt +} + +func AttachTracer(ctx context.Context, t *Tracer) context.Context { + return context.WithValue(ctx, tracerKey, t) +} + +func ExitTracer(ctx context.Context) { + getTracer(ctx).Print() +} + +func getTracer(ctx context.Context) *Tracer { + if t, ok := ctx.Value(tracerKey).(*Tracer); ok { + return t + } + return nil +} + +func trace(ctx context.Context) func() { + t := getTracer(ctx) + fc := graphql.GetFieldContext(ctx) + + name := fc.Field.Name + if object := fc.Field.ObjectDefinition; object != nil { + name = object.Name + "." + name + } + + s := &span{ + Name: name, + } + s.Start() + t.AddSpan(s) + + return func() { + s.End() + } +} diff --git a/internal/infrastructure/adapter/plugin.go b/internal/infrastructure/adapter/plugin.go new file mode 100644 index 000000000..f38b6cf67 --- /dev/null +++ b/internal/infrastructure/adapter/plugin.go @@ -0,0 +1,59 @@ +package adapter + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +// TODO: ใ“ใ“ใงๅน…ๅ„ชๅ…ˆๆŽข็ดขใ—ใฆใ„ใใ‚ขใƒซใ‚ดใƒชใ‚บใƒ ใ‚’ๆ›ธใ„ใฆmongoใ‹ใ‚‰ใƒ“ใƒซใƒˆใ‚คใƒณใฎๆคœ็ดขใƒญใ‚ธใƒƒใ‚ฏใ‚’้™คๅŽปใ™ใ‚‹ +type pluginRepo struct { + readers []repo.Plugin + writer repo.Plugin +} + +// NewPlugin generates a new repository which has fallback repositories to be used when the plugin is not found +func NewPlugin(readers []repo.Plugin, writer repo.Plugin) repo.Plugin { + return &pluginRepo{ + readers: append([]repo.Plugin{}, readers...), + writer: writer, + } +} + +func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { + for _, re := range r.readers { + if res, err := re.FindByID(ctx, id); err != nil { + if errors.Is(err, err1.ErrNotFound) { + continue + } else { + return nil, err + } + } else { + return res, nil + } + } + return nil, err1.ErrNotFound +} + +func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { + results := make([]*plugin.Plugin, 0, len(ids)) + for _, id := range ids { + res, err := r.FindByID(ctx, id) + if err != nil && err != err1.ErrNotFound { + return nil, err + } + results = append(results, res) + } + return results, nil +} + +func (r *pluginRepo) Save(ctx context.Context, p *plugin.Plugin) error { + if r.writer == nil { + return errors.New("cannot write") + } + return r.writer.Save(ctx, p) +} diff --git a/internal/infrastructure/adapter/property_schema.go b/internal/infrastructure/adapter/property_schema.go new file mode 100644 index 000000000..c06d800b6 --- /dev/null +++ b/internal/infrastructure/adapter/property_schema.go @@ -0,0 +1,66 @@ +package adapter + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +// TODO: ใ“ใ“ใงๅน…ๅ„ชๅ…ˆๆŽข็ดขใ—ใฆใ„ใใ‚ขใƒซใ‚ดใƒชใ‚บใƒ ใ‚’ๆ›ธใ„ใฆmongoใ‹ใ‚‰ใƒ“ใƒซใƒˆใ‚คใƒณใฎๆคœ็ดขใƒญใ‚ธใƒƒใ‚ฏใ‚’้™คๅŽปใ™ใ‚‹ +type propertySchema struct { + readers []repo.PropertySchema + writer repo.PropertySchema +} + +// NewPropertySchema generates a new repository which has fallback repositories to be used when the property schema is not found +func NewPropertySchema(readers []repo.PropertySchema, writer repo.PropertySchema) repo.PropertySchema { + return &propertySchema{ + readers: append([]repo.PropertySchema{}, readers...), + writer: writer, + } +} + +func (r *propertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) (*property.Schema, error) { + for _, re := range r.readers { + if res, err := re.FindByID(ctx, id); err != nil { + if errors.Is(err, err1.ErrNotFound) { + continue + } else { + return nil, err + } + } else { + return res, nil + } + } + return nil, err1.ErrNotFound +} + +func (r *propertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { + results := make(property.SchemaList, 0, len(ids)) + for _, id := range ids { + res, err := r.FindByID(ctx, id) + if err != nil && err != err1.ErrNotFound { + return nil, err + } + results = append(results, res) + } + return results, nil +} + +func (r *propertySchema) Save(ctx context.Context, p *property.Schema) error { + if r.writer == nil { + return err1.ErrInternalBy(errors.New("writer is not set")) + } + return r.writer.Save(ctx, p) +} + +func (r *propertySchema) SaveAll(ctx context.Context, p property.SchemaList) error { + if r.writer == nil { + return err1.ErrInternalBy(errors.New("writer is not set")) + } + return r.writer.SaveAll(ctx, p) +} diff --git a/internal/infrastructure/auth0/authenticator.go b/internal/infrastructure/auth0/authenticator.go new file mode 100644 index 000000000..541ab8d6d --- /dev/null +++ b/internal/infrastructure/auth0/authenticator.go @@ -0,0 +1,223 @@ +package auth0 + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/log" +) + +type Auth0 struct { + domain string + client *http.Client + clientID string + clientSecret string + token string + expireAt time.Time + lock sync.Mutex + current func() time.Time +} + +type response struct { + ID string `json:"user_id"` + Name string `json:"name"` + UserName string `json:"username"` + Email string `json:"email"` + EmailVerified bool `json:"email_verified"` + Message string `json:"string"` + Token string `json:"access_token"` + ExpiresIn int64 `json:"expires_in"` +} + +func currentTime() time.Time { + return time.Now() +} + +func (u response) Into() gateway.AuthenticatorUser { + name := u.UserName + if name == "" { + name = u.Name + } + + return gateway.AuthenticatorUser{ + ID: u.ID, + Name: name, + Email: u.Email, + EmailVerified: u.EmailVerified, + } +} + +func New(domain, clientID, clientSecret string) *Auth0 { + return &Auth0{ + domain: addPathSep(domain), + clientID: clientID, + clientSecret: clientSecret, + } +} + +func (a *Auth0) FetchUser(id string) (data gateway.AuthenticatorUser, err error) { + err = a.updateToken() + if err != nil { + return + } + + var r response + r, err = a.exec(http.MethodGet, "api/v2/users/"+id, a.token, nil) + if err != nil { + log.Errorf("auth0: fetch user: %s", err) + err = fmt.Errorf("failed to auth") + return + } + data = r.Into() + return +} + +func (a *Auth0) UpdateUser(p gateway.AuthenticatorUpdateUserParam) (data gateway.AuthenticatorUser, err error) { + err = a.updateToken() + if err != nil { + return + } + + payload := map[string]string{} + if p.Name != nil { + payload["name"] = *p.Name + } + if p.Email != nil { + payload["email"] = *p.Email + } + if p.Password != nil { + payload["password"] = *p.Password + } + if len(payload) == 0 { + err = errors.New("nothing is updated") + return + } + + var r response + r, err = a.exec(http.MethodPatch, "api/v2/users/"+p.ID, a.token, payload) + if err != nil { + log.Errorf("auth0: update user: %s", err) + err = fmt.Errorf("failed to update user") + return + } + + data = r.Into() + return +} + +func (a *Auth0) needsFetchToken() bool { + if a == nil { + return false + } + if a.current == nil { + a.current = currentTime + } + return a.expireAt.IsZero() || a.expireAt.Sub(a.current()) <= time.Hour +} + +func (a *Auth0) updateToken() error { + if a == nil || !a.needsFetchToken() { + return nil + } + + if a.clientID == "" || a.clientSecret == "" || a.domain == "" { + return errors.New("auth0 is not set up") + } + + a.lock.Lock() + defer a.lock.Unlock() + + if !a.needsFetchToken() { + return nil + } + + r, err := a.exec(http.MethodPost, "oauth/token", "", map[string]string{ + "client_id": a.clientID, + "client_secret": a.clientSecret, + "audience": a.domain + "api/v2/", + "grant_type": "client_credentials", + }) + if err != nil { + return err + } + + if a.current == nil { + a.current = currentTime + } + + a.token = r.Token + a.expireAt = a.current().Add(time.Duration(r.ExpiresIn * int64(time.Second))) + + return nil +} + +func (a *Auth0) exec(method, path, token string, b interface{}) (r response, err error) { + if a == nil || a.domain == "" { + err = errors.New("auth0: domain is not set") + return + } + if a.client == nil { + a.client = http.DefaultClient + } + + var body io.Reader = nil + if b != nil { + if b2, ok := b.([]byte); ok { + body = bytes.NewReader(b2) + } else { + var b2 []byte + b2, err = json.Marshal(b) + if err != nil { + return + } + body = bytes.NewReader(b2) + } + } + + var req *http.Request + req, err = http.NewRequest(method, a.domain+path, body) + if err != nil { + return + } + + req.Header.Set("Content-Type", "application/json") + if token != "" { + req.Header.Set("Authorization", "Bearer "+token) + } + + resp, err := a.client.Do(req) + if err != nil { + return + } + + defer func() { + _ = resp.Body.Close() + }() + + err = json.NewDecoder(resp.Body).Decode(&r) + if err != nil { + return + } + if resp.StatusCode >= 300 { + err = errors.New(r.Message) + return + } + return +} + +func addPathSep(path string) string { + if path == "" { + return path + } + if path[len(path)-1] != '/' { + path += "/" + } + return path +} diff --git a/internal/infrastructure/auth0/authenticator_test.go b/internal/infrastructure/auth0/authenticator_test.go new file mode 100644 index 000000000..fc73ac568 --- /dev/null +++ b/internal/infrastructure/auth0/authenticator_test.go @@ -0,0 +1,185 @@ +package auth0 + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "strings" + "testing" + "time" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/stretchr/testify/assert" +) + +const ( + token = "a" + clientID = "b" + clientSecret = "c" + domain = "https://reearth-dev.auth0.com/" + userID = "x" + expiresIn = 24 * 60 * 60 + userName = "d" + userEmail = "e" +) + +var ( + current = time.Date(2020, time.April, 1, 0, 0, 0, 0, time.UTC) + current2 = time.Date(2020, time.April, 1, 23, 0, 0, 0, time.UTC) +) + +func TestAddPathSep(t *testing.T) { + assert.Equal(t, "a/", addPathSep("a")) + assert.Equal(t, "a/", addPathSep("a/")) +} + +func TestAuth0(t *testing.T) { + a := New(domain, clientID, clientSecret) + a.client = client(t) // inject mock + a.current = func() time.Time { return current } + + assert.True(t, a.needsFetchToken()) + assert.NoError(t, a.updateToken()) + assert.Equal(t, token, a.token) + assert.Equal(t, current.Add(time.Second*expiresIn), a.expireAt) + assert.False(t, a.needsFetchToken()) + + a.current = func() time.Time { return current2 } + assert.True(t, a.needsFetchToken()) + a.current = func() time.Time { return current } + + r, err := a.FetchUser(userID) + assert.NoError(t, err) + assert.Equal(t, gateway.AuthenticatorUser{ + ID: userID, + Email: userEmail, + EmailVerified: true, + Name: userName, + }, r) + + _, err = a.FetchUser(token) + assert.Error(t, err) + + newEmail := "xxxxx" + r, err = a.UpdateUser(gateway.AuthenticatorUpdateUserParam{ + ID: userID, + Email: &newEmail, + }) + assert.NoError(t, err) + assert.Equal(t, gateway.AuthenticatorUser{ + ID: userID, + Email: newEmail, + EmailVerified: true, + Name: userName, + }, r) + + a.current = func() time.Time { return current2 } + _, err = a.FetchUser(token + "a") + assert.Error(t, err) + assert.Equal(t, current2.Add(time.Second*expiresIn), a.expireAt) +} + +func res(i interface{}) io.ReadCloser { + b, _ := json.Marshal(i) + return io.NopCloser(bytes.NewBuffer(b)) +} + +func client(t *testing.T) *http.Client { + t.Helper() + + return &http.Client{ + Transport: RoundTripFunc(func(req *http.Request) *http.Response { + p := req.URL.Path + var body map[string]string + if req.Body != nil { + _ = json.NewDecoder(req.Body).Decode(&body) + } + + if req.Method == http.MethodPost && p == "/oauth/token" { + assert.Equal(t, domain+"api/v2/", body["audience"]) + assert.Equal(t, "client_credentials", body["grant_type"]) + assert.Equal(t, clientID, body["client_id"]) + assert.Equal(t, clientSecret, body["client_secret"]) + return &http.Response{ + StatusCode: http.StatusOK, + Body: res(map[string]interface{}{ + "access_token": token, + "expires_in": expiresIn, + }), + Header: make(http.Header), + } + } + + if req.Method == http.MethodGet && p == "/api/v2/users/"+userID { + tok := strings.TrimPrefix(req.Header.Get("Authorization"), "Bearer ") + if token != tok { + return &http.Response{ + StatusCode: http.StatusUnauthorized, + Body: res(map[string]interface{}{ + "message": "Unauthorized", + }), + Header: make(http.Header), + } + } + + return &http.Response{ + StatusCode: http.StatusOK, + Body: res(map[string]interface{}{ + "user_id": userID, + "username": userName, + "email": userEmail, + "email_verified": true, + }), + Header: make(http.Header), + } + } + + if req.Method == http.MethodPatch && p == "/api/v2/users/"+userID { + tok := strings.TrimPrefix(req.Header.Get("Authorization"), "Bearer ") + if token != tok { + return &http.Response{ + StatusCode: http.StatusUnauthorized, + Body: res(map[string]interface{}{ + "message": "Unauthorized", + }), + Header: make(http.Header), + } + } + + username := userName + email := userEmail + if body["username"] != "" { + username = body["username"] + } + if body["email"] != "" { + email = body["email"] + } + return &http.Response{ + StatusCode: http.StatusOK, + Body: res(map[string]interface{}{ + "user_id": userID, + "username": username, + "email": email, + "email_verified": true, + }), + Header: make(http.Header), + } + } + + return &http.Response{ + StatusCode: http.StatusNotFound, + Body: res(map[string]interface{}{ + "message": "Not found", + }), + Header: make(http.Header), + } + }), + } +} + +type RoundTripFunc func(req *http.Request) *http.Response + +func (f RoundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) { + return f(req), nil +} diff --git a/internal/infrastructure/fs/archive.go b/internal/infrastructure/fs/archive.go new file mode 100644 index 000000000..66727f418 --- /dev/null +++ b/internal/infrastructure/fs/archive.go @@ -0,0 +1,115 @@ +package fs + +import ( + "os" + "path" + "strings" + + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/file" +) + +type archive struct { + p string + files []string + counter int + name string + size int64 + fi *os.File +} + +// NewArchive _ +func NewArchive(p string) (file.Archive, error) { + bp := strings.TrimSuffix(p, "/") + files, size, err := dirwalk(bp, "", 0) + if err != nil { + if os.IsNotExist(err) { + return nil, err1.ErrNotFound + } + return nil, err1.ErrInternalBy(err) + } + return &archive{ + p: bp, + files: files, + counter: 0, + name: path.Base(p), + size: size, + }, nil +} + +// Next _ +func (a *archive) Next() (f *file.File, derr error) { + if len(a.files) <= a.counter { + return nil, file.EOF + } + next := a.files[a.counter] + a.counter++ + fi, err := os.Open(path.Join(a.p, next)) + if err != nil { + derr = err1.ErrInternalBy(err) + return + } + stat, err := fi.Stat() + if err != nil { + derr = err1.ErrInternalBy(err) + return + } + + f = &file.File{ + Content: fi, + Name: stat.Name(), + Fullpath: strings.TrimPrefix(next, a.p+"/"), + Size: stat.Size(), + } + return +} + +// Close _ +func (a *archive) Close() error { + if a.fi != nil { + if err := a.fi.Close(); err != nil { + return err1.ErrInternalBy(err) + } + a.fi = nil + } + return nil +} + +// Name _ +func (a *archive) Name() string { + return a.name +} + +// Size _ +func (a *archive) Size() int64 { + return a.size +} + +func dirwalk(dir string, base string, size int64) ([]string, int64, error) { + files, err := os.ReadDir(dir) + if err != nil { + return []string{}, 0, err + } + + var paths []string + for _, file := range files { + if file.IsDir() { + fname := file.Name() + dfiles, dsize, err := dirwalk(path.Join(dir, fname), path.Join(base, fname), size) + if err != nil { + return []string{}, 0, err + } + paths = append(paths, dfiles...) + size += dsize + continue + } + paths = append(paths, path.Join(base, file.Name())) + fileInfo, err := file.Info() + if err != nil { + return []string{}, 0, err + } + size += fileInfo.Size() + } + + return paths, size, nil +} diff --git a/internal/infrastructure/fs/common.go b/internal/infrastructure/fs/common.go new file mode 100644 index 000000000..cecff5fb6 --- /dev/null +++ b/internal/infrastructure/fs/common.go @@ -0,0 +1,50 @@ +package fs + +import ( + "net/url" + "path" + "strings" + + "github.com/reearth/reearth-backend/pkg/id" +) + +const ( + manifestFilePath = "reearth.json" + assetDir = "assets" + pluginDir = "plugins" + publishedDir = "published" +) + +func getPluginFilePath(base string, pluginID id.PluginID, filename string) string { + return path.Join(base, pluginDir, pluginID.Name(), pluginID.Version().String(), filename) +} + +func getAssetFilePath(base string, filename string) string { + return path.Join(base, assetDir, filename) +} + +func getPublishedDataFilePath(base, name string) string { + return path.Join(base, publishedDir, name+".json") +} + +func getAssetFileURL(base *url.URL, filename string) *url.URL { + if base == nil { + return nil + } + + b := *base + b.Path = path.Join(b.Path, filename) + return &b +} + +func getAssetFilePathFromURL(base string, u *url.URL) string { + if u == nil { + return "" + } + p := strings.Split(u.Path, "/") + if len(p) == 0 { + return "" + } + f := p[len(p)-1] + return getAssetFilePath(base, f) +} diff --git a/internal/infrastructure/fs/common_test.go b/internal/infrastructure/fs/common_test.go new file mode 100644 index 000000000..3cc762b73 --- /dev/null +++ b/internal/infrastructure/fs/common_test.go @@ -0,0 +1,22 @@ +package fs + +import ( + "net/url" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGetAssetFileURL(t *testing.T) { + e, err := url.Parse("http://hoge.com/assets/xxx.yyy") + assert.NoError(t, err) + b, err := url.Parse("http://hoge.com/assets") + assert.NoError(t, err) + assert.Equal(t, e, getAssetFileURL(b, "xxx.yyy")) +} + +func TestGetAssetFilePathFromURL(t *testing.T) { + u, err := url.Parse("http://hoge.com/assets/xxx.yyy") + assert.NoError(t, err) + assert.Equal(t, "a/assets/xxx.yyy", getAssetFilePathFromURL("a", u)) +} diff --git a/internal/infrastructure/fs/file.go b/internal/infrastructure/fs/file.go new file mode 100644 index 000000000..ca4d83c5d --- /dev/null +++ b/internal/infrastructure/fs/file.go @@ -0,0 +1,236 @@ +package fs + +import ( + "context" + "errors" + "io" + "net/url" + "os" + "path" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +type fileRepo struct { + basePath string + urlBase *url.URL +} + +func NewFile(basePath, urlBase string) (gateway.File, error) { + var b *url.URL + var err error + b, err = url.Parse(urlBase) + if err != nil { + return nil, errors.New("invalid base URL") + } + + return &fileRepo{ + basePath: basePath, + urlBase: b, + }, nil +} + +func (f *fileRepo) ReadAsset(ctx context.Context, name string) (io.Reader, error) { + filename := getAssetFilePath(f.basePath, name) + file, err := os.Open(filename) + if err != nil { + if os.IsNotExist(err) { + return nil, err1.ErrNotFound + } + return nil, err1.ErrInternalBy(err) + } + return file, nil +} + +func (f *fileRepo) ReadPluginFile(ctx context.Context, id id.PluginID, p string) (io.Reader, error) { + filename := getPluginFilePath(f.basePath, id, p) + file, err := os.Open(filename) + if err != nil { + if os.IsNotExist(err) { + return nil, err1.ErrNotFound + } + return nil, err1.ErrInternalBy(err) + } + return file, nil +} + +func (f *fileRepo) ReadBuiltSceneFile(ctx context.Context, name string) (io.Reader, error) { + filename := getPublishedDataFilePath(f.basePath, name) + file, err := os.Open(filename) + if err != nil { + if os.IsNotExist(err) { + return nil, err1.ErrNotFound + } + return nil, err1.ErrInternalBy(err) + } + return file, nil +} + +func (f *fileRepo) UploadAsset(ctx context.Context, file *file.File) (*url.URL, error) { + if f == nil || f.urlBase == nil { + return nil, errors.New("cannot upload asset because of url lack") + } + if file == nil { + return nil, gateway.ErrInvalidFile + } + + base := path.Join(f.basePath, "assets") + err := os.MkdirAll(base, 0755) + if err != nil { + return nil, gateway.ErrFailedToUploadFile + // return nil, repo.ErrFailedToUploadFile.CausedBy(err) + } + + // calc checksum + // hasher := sha256.New() + // tr := io.TeeReader(file.Content, hasher) + // checksum := hex.EncodeToString(hasher.Sum(nil)) + + id := id.New().String() + filename := id + path.Ext(file.Name) + name := getAssetFilePath(f.basePath, filename) + + dest, err2 := os.Create(name) + if err2 != nil { + return nil, gateway.ErrFailedToUploadFile + // return nil, repo.ErrFailedToUploadFile.CausedBy(err2) + } + defer func() { + _ = dest.Close() + }() + if _, err := io.Copy(dest, file.Content); err != nil { + return nil, gateway.ErrFailedToUploadFile + // return nil, repo.ErrFailedToUploadFile.CausedBy(err) + } + + return getAssetFileURL(f.urlBase, filename), nil +} + +func (f *fileRepo) RemoveAsset(ctx context.Context, u *url.URL) error { + if u == nil { + return gateway.ErrInvalidFile + } + + p := getAssetFilePathFromURL(f.basePath, u) + if p != "" { + if err := os.Remove(p); err != nil { + if os.IsNotExist(err) { + return nil + } + return gateway.ErrFailedToRemoveFile + } + } + + return nil +} + +func (f *fileRepo) UploadAndExtractPluginFiles(ctx context.Context, archive file.Archive, plugin *plugin.Plugin) (*url.URL, error) { + defer func() { + _ = archive.Close() + }() + base := getPluginFilePath(f.basePath, plugin.ID(), "") + url, _ := url.Parse(base) + + for { + err := func() error { + f, err := archive.Next() + if errors.Is(err, file.EOF) { + return err + } + name := path.Join(base, f.Fullpath) + fbase := path.Dir(name) + err2 := os.MkdirAll(fbase, 0755) + if err2 != nil { + return gateway.ErrFailedToUploadFile + // return repo.ErrFailedToUploadFile.CausedBy(err2) + } + dest, err2 := os.Create(name) + if err2 != nil { + return gateway.ErrFailedToUploadFile + // return repo.ErrFailedToUploadFile.CausedBy(err2) + } + defer func() { + _ = dest.Close() + }() + if _, err := io.Copy(dest, f.Content); err != nil { + return gateway.ErrFailedToUploadFile + // return repo.ErrFailedToUploadFile.CausedBy(err) + } + return nil + }() + + if errors.Is(err, file.EOF) { + break + } + if err != nil { + return nil, err + } + } + + return url, nil +} + +func (f *fileRepo) UploadBuiltScene(ctx context.Context, reader io.Reader, name string) error { + filename := getPublishedDataFilePath(f.basePath, name) + err := os.MkdirAll(path.Dir(filename), 0755) + if err != nil { + return gateway.ErrFailedToUploadFile + // return repo.ErrFailedToUploadFile.CausedBy(err) + } + + dest, err2 := os.Create(filename) + if err2 != nil { + return gateway.ErrFailedToUploadFile + // return repo.ErrFailedToUploadFile.CausedBy(err2) + } + defer func() { + _ = dest.Close() + }() + if _, err := io.Copy(dest, reader); err != nil { + return gateway.ErrFailedToUploadFile + // return repo.ErrFailedToUploadFile.CausedBy(err) + } + + return nil +} + +func (f *fileRepo) MoveBuiltScene(ctx context.Context, oldName, name string) error { + if oldName == name { + return nil + } + + filename := getPublishedDataFilePath(f.basePath, oldName) + newfilename := getPublishedDataFilePath(f.basePath, name) + err := os.MkdirAll(path.Dir(newfilename), 0755) + if err != nil { + return gateway.ErrFailedToUploadFile + // return repo.ErrFailedToUploadFile.CausedBy(err) + } + + if err := os.Rename( + filename, + newfilename, + ); err != nil { + if errors.Is(err, os.ErrNotExist) { + return err1.ErrNotFound + } + return err1.ErrInternalBy(err) + } + + return nil +} + +func (f *fileRepo) RemoveBuiltScene(ctx context.Context, name string) error { + filename := getPublishedDataFilePath(f.basePath, name) + if err := os.Remove(filename); err != nil { + if errors.Is(err, os.ErrNotExist) { + return nil + } + return err1.ErrInternalBy(err) + } + return nil +} diff --git a/internal/infrastructure/fs/plugin.go b/internal/infrastructure/fs/plugin.go new file mode 100644 index 000000000..1f49c99a3 --- /dev/null +++ b/internal/infrastructure/fs/plugin.go @@ -0,0 +1,65 @@ +package fs + +import ( + "context" + "errors" + "os" + "path" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" +) + +type pluginRepo struct { + basePath string +} + +func NewPlugin(basePath string) repo.Plugin { + return &pluginRepo{ + basePath: basePath, + } +} + +func (r *pluginRepo) manifest(ctx context.Context, id id.PluginID) string { + return path.Join(getPluginFilePath(r.basePath, id, manifestFilePath)) +} + +func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { + filename := r.manifest(ctx, id) + if _, err := os.Stat(filename); err != nil { + return nil, err1.ErrNotFound + } + file, err := os.Open(filename) + if err != nil { + return nil, err1.ErrInternalBy(err) + } + defer func() { + _ = file.Close() + }() + + m, err := manifest.Parse(file) + if err != nil { + return nil, err1.ErrInternalBy(err) + } + + return m.Plugin, nil +} + +func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { + results := make([]*plugin.Plugin, 0, len(ids)) + for _, id := range ids { + res, err := r.FindByID(ctx, id) + if err != nil { + return nil, err + } + results = append(results, res) + } + return results, nil +} + +func (r *pluginRepo) Save(ctx context.Context, p *plugin.Plugin) error { + return err1.ErrInternalBy(errors.New("read only")) +} diff --git a/internal/infrastructure/fs/plugin_repository.go b/internal/infrastructure/fs/plugin_repository.go new file mode 100644 index 000000000..a0ce34f23 --- /dev/null +++ b/internal/infrastructure/fs/plugin_repository.go @@ -0,0 +1,62 @@ +package fs + +import ( + "context" + "errors" + "path" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" +) + +type pluginRepository struct { + basePath string +} + +func NewPluginRepository(basePath string) gateway.PluginRepository { + return &pluginRepository{ + basePath: basePath, + } +} + +func (r *pluginRepository) Data(ctx context.Context, id id.PluginID) (file.Archive, error) { + return r.getArchive(id) +} + +func (r *pluginRepository) Manifest(ctx context.Context, id id.PluginID) (*manifest.Manifest, error) { + archive, err := r.getArchive(id) + if err != nil { + return nil, err + } + + defer func() { + _ = archive.Close() + }() + + for { + f, err := archive.Next() + if errors.Is(err, file.EOF) { + break + } + if err != nil { + return nil, err1.ErrInternalBy(err) + } + if f.Fullpath == manifestFilePath { + m, err := manifest.Parse(f.Content) + if err != nil { + return nil, err + } + return m, nil + } + } + return nil, manifest.ErrFailedToParseManifest +} + +func (r *pluginRepository) getArchive(id id.PluginID) (file.Archive, error) { + return NewArchive( + path.Join(r.basePath, id.Name()+"_"+id.Version().String()), + ) +} diff --git a/internal/infrastructure/fs/property_schema.go b/internal/infrastructure/fs/property_schema.go new file mode 100644 index 000000000..7d6d15c8a --- /dev/null +++ b/internal/infrastructure/fs/property_schema.go @@ -0,0 +1,85 @@ +package fs + +import ( + "context" + "errors" + "os" + "path" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/reearth/reearth-backend/pkg/property" +) + +type propertySchema struct { + basePath string +} + +func NewPropertySchema(basePath string) repo.PropertySchema { + return &propertySchema{ + basePath: basePath, + } +} + +func (r *propertySchema) manifest(id id.PluginID) string { + return path.Join(getPluginFilePath(r.basePath, id, manifestFilePath)) +} + +func (r *propertySchema) FindByID(ctx context.Context, i id.PropertySchemaID) (*property.Schema, error) { + pid, err := id.PluginIDFrom(i.Plugin()) + if err != nil { + return nil, err1.ErrNotFound + } + filename := r.manifest(pid) + if _, err := os.Stat(filename); err != nil { + return nil, err1.ErrNotFound + } + file, err2 := os.Open(filename) + if err2 != nil { + return nil, err1.ErrInternalBy(err2) + } + defer func() { + _ = file.Close() + }() + + m, err := manifest.Parse(file) + if err != nil { + return nil, err1.ErrInternalBy(err) + } + + if m.Schema != nil && m.Schema.ID() == i { + return m.Schema, nil + } + for _, ps := range m.ExtensionSchema { + if ps == nil { + continue + } + if ps.ID() == i { + return ps, nil + } + } + + return nil, err1.ErrNotFound +} + +func (r *propertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { + results := make(property.SchemaList, 0, len(ids)) + for _, id := range ids { + res, err := r.FindByID(ctx, id) + if err != nil { + return nil, err + } + results = append(results, res) + } + return results, nil +} + +func (r *propertySchema) Save(ctx context.Context, p *property.Schema) error { + return err1.ErrInternalBy(errors.New("read only")) +} + +func (r *propertySchema) SaveAll(ctx context.Context, p property.SchemaList) error { + return err1.ErrInternalBy(errors.New("read only")) +} diff --git a/internal/infrastructure/gcs/file.go b/internal/infrastructure/gcs/file.go new file mode 100644 index 000000000..fab52a81f --- /dev/null +++ b/internal/infrastructure/gcs/file.go @@ -0,0 +1,326 @@ +package gcs + +import ( + "context" + "errors" + "fmt" + "io" + "net/url" + "path" + "strings" + + "cloud.google.com/go/storage" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +const ( + gcsAssetBasePath string = "assets" + gcsPluginBasePath string = "plugins" + gcsMapBasePath string = "maps" + fileSizeLimit int64 = 1024 * 1024 * 100 // about 100MB +) + +type fileRepo struct { + bucketName string + base *url.URL + cacheControl string +} + +func NewFile(bucketName, base string, cacheControl string) (gateway.File, error) { + if bucketName == "" { + return nil, errors.New("bucket name is empty") + } + + var u *url.URL + if base == "" { + base = fmt.Sprintf("https://storage.googleapis.com/%s", bucketName) + } + + var err error + u, _ = url.Parse(base) + if err != nil { + return nil, errors.New("invalid base URL") + } + + return &fileRepo{ + bucketName: bucketName, + base: u, + cacheControl: cacheControl, + }, nil +} + +func (f *fileRepo) bucket(ctx context.Context) (*storage.BucketHandle, error) { + client, err := storage.NewClient(ctx) + if err != nil { + return nil, err + } + bucket := client.Bucket(f.bucketName) + return bucket, err +} + +func (f *fileRepo) ReadAsset(ctx context.Context, name string) (io.Reader, error) { + if name == "" { + return nil, err1.ErrNotFound + } + + p := path.Join(gcsAssetBasePath, name) + bucket, err := f.bucket(ctx) + if err != nil { + return nil, err + } + log.Infof("gcs: read asset from gs://%s/%s", f.bucketName, p) + reader, err := bucket.Object(p).NewReader(ctx) + if err != nil { + if errors.Is(err, storage.ErrObjectNotExist) { + return nil, err1.ErrNotFound + } + return nil, err1.ErrInternalBy(err) + } + return reader, nil +} + +func (f *fileRepo) ReadPluginFile(ctx context.Context, plugin id.PluginID, name string) (io.Reader, error) { + if name == "" { + return nil, err1.ErrNotFound + } + + p := path.Join(gcsPluginBasePath, plugin.Name(), plugin.Version().String(), name) + bucket, err := f.bucket(ctx) + if err != nil { + return nil, err + } + log.Infof("gcs: read plugin from gs://%s/%s", f.bucketName, p) + reader, err := bucket.Object(p).NewReader(ctx) + if err != nil { + if errors.Is(err, storage.ErrObjectNotExist) { + return nil, err1.ErrNotFound + } + return nil, err1.ErrInternalBy(err) + } + return reader, nil +} + +func (f *fileRepo) ReadBuiltSceneFile(ctx context.Context, name string) (io.Reader, error) { + if name == "" { + return nil, err1.ErrNotFound + } + + p := path.Join(gcsMapBasePath, name+".json") + bucket, err := f.bucket(ctx) + if err != nil { + return nil, err + } + + log.Infof("gcs: read scene from gs://%s/%s", f.bucketName, p) + reader, err := bucket.Object(p).NewReader(ctx) + if err != nil { + if errors.Is(err, storage.ErrObjectNotExist) { + return nil, err1.ErrNotFound + } + return nil, err1.ErrInternalBy(err) + } + return reader, nil +} + +func (f *fileRepo) UploadAsset(ctx context.Context, file *file.File) (*url.URL, error) { + if file == nil { + return nil, gateway.ErrInvalidFile + } + if file.Size >= fileSizeLimit { + return nil, gateway.ErrFileTooLarge + } + + bucket, err := f.bucket(ctx) + if err != nil { + return nil, err + } + + // calc checksum + // hasher := sha256.New() + // tr := io.TeeReader(file.Content, hasher) + // checksum := hex.EncodeToString(hasher.Sum(nil)) + + id := id.New().String() + filename := id + path.Ext(file.Name) + name := path.Join(gcsAssetBasePath, filename) + objectURL := getGCSObjectURL(f.base, name) + if objectURL == nil { + return nil, gateway.ErrInvalidFile + } + + object := bucket.Object(name) + _, err = object.Attrs(ctx) + if !errors.Is(err, storage.ErrObjectNotExist) { + log.Errorf("gcs: err=%+v\n", err) + return nil, gateway.ErrFailedToUploadFile + } + + writer := object.NewWriter(ctx) + if _, err := io.Copy(writer, file.Content); err != nil { + log.Errorf("gcs: err=%+v\n", err) + return nil, gateway.ErrFailedToUploadFile + } + if err := writer.Close(); err != nil { + log.Errorf("gcs: err=%+v\n", err) + return nil, gateway.ErrFailedToUploadFile + } + + return objectURL, nil +} + +func (f *fileRepo) RemoveAsset(ctx context.Context, u *url.URL) error { + if u == nil { + return gateway.ErrInvalidFile + } + name := getGCSObjectNameFromURL(f.base, u) + if name == "" { + return gateway.ErrInvalidFile + } + bucket, err := f.bucket(ctx) + if err != nil { + return err + } + object := bucket.Object(name) + if err := object.Delete(ctx); err != nil { + if errors.Is(err, storage.ErrObjectNotExist) { + return nil + } + return err1.ErrInternalBy(err) + } + return nil +} + +func (f *fileRepo) UploadAndExtractPluginFiles(ctx context.Context, archive file.Archive, plugin *plugin.Plugin) (*url.URL, error) { + defer func() { + _ = archive.Close() + }() + + basePath := path.Join(gcsPluginBasePath, plugin.ID().Name(), plugin.Version().String()) + objectURL := getGCSObjectURL(f.base, basePath) + if objectURL == nil { + return nil, gateway.ErrInvalidFile + } + + for { + ff, err := archive.Next() + if errors.Is(err, file.EOF) { + break + } + bucket, err := f.bucket(ctx) + if err != nil { + return nil, err + } + name := path.Join(basePath, ff.Fullpath) + object := bucket.Object(name) + _, err2 := object.Attrs(ctx) + if errors.Is(err2, storage.ErrBucketNotExist) { + return nil, gateway.ErrFailedToUploadFile + } else if !errors.Is(err2, storage.ErrObjectNotExist) { + // does not overwrite + continue + } + + writer := object.NewWriter(ctx) + if _, err := io.Copy(writer, ff.Content); err != nil { + log.Errorf("gcs: err=%+v\n", err) + return nil, gateway.ErrFailedToUploadFile + } + if err := writer.Close(); err != nil { + log.Errorf("gcs: err=%+v\n", err) + return nil, gateway.ErrFailedToUploadFile + } + } + + return objectURL, nil +} + +func getGCSObjectURL(base *url.URL, objectName string) *url.URL { + if base == nil { + return nil + } + b := *base + b.Path = path.Join(b.Path, objectName) + return &b +} + +func getGCSObjectNameFromURL(base, u *url.URL) string { + if u == nil { + return "" + } + bp := "" + if base != nil { + bp = base.Path + } + return strings.TrimPrefix(strings.TrimPrefix(u.Path, bp), "/") +} + +func (f *fileRepo) UploadBuiltScene(ctx context.Context, reader io.Reader, name string) error { + filename := path.Join(gcsMapBasePath, name+".json") + bucket, err := f.bucket(ctx) + if err != nil { + return err + } + object := bucket.Object(filename) + + if err := object.Delete(ctx); err != nil && !errors.Is(err, storage.ErrObjectNotExist) { + log.Errorf("gcs: err=%+v\n", err) + return gateway.ErrFailedToUploadFile + } + + writer := object.NewWriter(ctx) + writer.ObjectAttrs.CacheControl = f.cacheControl + + if _, err := io.Copy(writer, reader); err != nil { + log.Errorf("gcs: err=%+v\n", err) + return gateway.ErrFailedToUploadFile + } + + if err := writer.Close(); err != nil { + log.Errorf("gcs: err=%+v\n", err) + return gateway.ErrFailedToUploadFile + } + + return nil +} + +func (f *fileRepo) MoveBuiltScene(ctx context.Context, oldName, name string) error { + oldFilename := path.Join(gcsMapBasePath, oldName+".json") + filename := path.Join(gcsMapBasePath, name+".json") + bucket, err := f.bucket(ctx) + if err != nil { + return err + } + object := bucket.Object(oldFilename) + destObject := bucket.Object(filename) + if _, err := destObject.CopierFrom(object).Run(ctx); err != nil { + if errors.Is(err, storage.ErrObjectNotExist) { + return err1.ErrNotFound + } + return err1.ErrInternalBy(err) + } + if err := object.Delete(ctx); err != nil { + return err1.ErrInternalBy(err) + } + return nil +} + +func (f *fileRepo) RemoveBuiltScene(ctx context.Context, name string) error { + filename := path.Join(gcsMapBasePath, name+".json") + bucket, err := f.bucket(ctx) + if err != nil { + return err + } + object := bucket.Object(filename) + if err := object.Delete(ctx); err != nil { + if errors.Is(err, storage.ErrObjectNotExist) { + return nil + } + return err1.ErrInternalBy(err) + } + return nil +} diff --git a/internal/infrastructure/gcs/file_test.go b/internal/infrastructure/gcs/file_test.go new file mode 100644 index 000000000..b5687d54e --- /dev/null +++ b/internal/infrastructure/gcs/file_test.go @@ -0,0 +1,24 @@ +package gcs + +import ( + "net/url" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGetGCSObjectURL(t *testing.T) { + e, err := url.Parse("https://hoge.com/assets/xxx.yyy") + assert.NoError(t, err) + b, err := url.Parse("https://hoge.com/assets") + assert.NoError(t, err) + assert.Equal(t, e, getGCSObjectURL(b, "xxx.yyy")) +} + +func TestGetGCSObjectNameFromURL(t *testing.T) { + u, err := url.Parse("https://hoge.com/assets/xxx.yyy") + assert.NoError(t, err) + b, err := url.Parse("https://hoge.com") + assert.NoError(t, err) + assert.Equal(t, "assets/xxx.yyy", getGCSObjectNameFromURL(b, u)) +} diff --git a/internal/infrastructure/memory/asset.go b/internal/infrastructure/memory/asset.go new file mode 100644 index 000000000..e528ebb31 --- /dev/null +++ b/internal/infrastructure/memory/asset.go @@ -0,0 +1,64 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/asset" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" +) + +type Asset struct { + data map[id.AssetID]*asset.Asset +} + +func NewAsset() repo.Asset { + return &Asset{ + data: map[id.AssetID]*asset.Asset{}, + } +} + +func (r *Asset) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, error) { + d, ok := r.data[id] + if ok { + return d, nil + } + return &asset.Asset{}, err1.ErrNotFound +} + +func (r *Asset) Save(ctx context.Context, a *asset.Asset) error { + r.data[a.ID()] = a + return nil +} + +func (r *Asset) Remove(ctx context.Context, id id.AssetID) error { + delete(r.data, id) + return nil +} + +func (r *Asset) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { + result := []*asset.Asset{} + for _, d := range r.data { + if d.Team() == id { + result = append(result, d) + } + } + + var startCursor, endCursor *usecase.Cursor + if len(result) > 0 { + _startCursor := usecase.Cursor(result[0].ID().String()) + _endCursor := usecase.Cursor(result[len(result)-1].ID().String()) + startCursor = &_startCursor + endCursor = &_endCursor + } + + return result, usecase.NewPageInfo( + len(r.data), + startCursor, + endCursor, + true, + true, + ), nil +} diff --git a/internal/infrastructure/memory/config.go b/internal/infrastructure/memory/config.go new file mode 100644 index 000000000..d88f03496 --- /dev/null +++ b/internal/infrastructure/memory/config.go @@ -0,0 +1,25 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/config" +) + +type Config struct { + data *config.Config +} + +func NewConfig() repo.Config { + return &Config{} +} + +func (r *Config) Load(ctx context.Context) (*config.Config, error) { + return r.data, nil +} + +func (r *Config) Save(ctx context.Context, c *config.Config) error { + r.data = c + return nil +} diff --git a/internal/infrastructure/memory/container.go b/internal/infrastructure/memory/container.go new file mode 100644 index 000000000..e5ad118ca --- /dev/null +++ b/internal/infrastructure/memory/container.go @@ -0,0 +1,35 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +// InitRepos _ +func InitRepos(c *repo.Container, dummy bool) *repo.Container { + if c == nil { + c = &repo.Container{} + } + // not supported: File, PluginRepository + c.Asset = NewAsset() + c.Config = NewConfig() + c.DatasetSchema = NewDatasetSchema() + c.Dataset = NewDataset() + c.Layer = NewLayer() + c.Plugin = NewPlugin() + c.Project = NewProject() + c.PropertySchema = NewPropertySchema() + c.Property = NewProperty() + c.Scene = NewScene() + c.Team = NewTeam() + c.User = NewUser() + c.SceneLock = NewSceneLock() + c.Transaction = NewTransaction() + + if dummy { + generateDummyData(context.Background(), c) + } + + return c +} diff --git a/internal/infrastructure/memory/dataset.go b/internal/infrastructure/memory/dataset.go new file mode 100644 index 000000000..77d871003 --- /dev/null +++ b/internal/infrastructure/memory/dataset.go @@ -0,0 +1,133 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/dataset" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" +) + +type Dataset struct { + data map[id.DatasetID]dataset.Dataset +} + +func NewDataset() repo.Dataset { + return &Dataset{ + data: map[id.DatasetID]dataset.Dataset{}, + } +} + +func (r *Dataset) FindByID(ctx context.Context, id id.DatasetID, f []id.SceneID) (*dataset.Dataset, error) { + p, ok := r.data[id] + if ok && isSceneIncludes(p.Scene(), f) { + return &p, nil + } + return nil, err1.ErrNotFound +} + +func (r *Dataset) FindByIDs(ctx context.Context, ids []id.DatasetID, f []id.SceneID) (dataset.List, error) { + result := dataset.List{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + if isSceneIncludes(d.Scene(), f) { + result = append(result, &d) + continue + } + } + result = append(result, nil) + } + return result, nil +} + +func (r *Dataset) FindBySchema(ctx context.Context, id id.DatasetSchemaID, f []id.SceneID, p *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { + result := dataset.List{} + for _, d := range r.data { + if d.Schema() == id && isSceneIncludes(d.Scene(), f) { + dd := d + result = append(result, &dd) + } + } + + var startCursor, endCursor *usecase.Cursor + if len(result) > 0 { + _startCursor := usecase.Cursor(result[0].ID().String()) + _endCursor := usecase.Cursor(result[len(result)-1].ID().String()) + startCursor = &_startCursor + endCursor = &_endCursor + } + + return result, usecase.NewPageInfo( + len(r.data), + startCursor, + endCursor, + true, + true, + ), nil +} + +func (r *Dataset) FindBySchemaAll(ctx context.Context, id id.DatasetSchemaID) (dataset.List, error) { + result := dataset.List{} + for _, d := range r.data { + if d.Schema() == id { + dd := d + result = append(result, &dd) + } + } + return result, nil +} + +func (r *Dataset) FindGraph(ctx context.Context, i id.DatasetID, f []id.SceneID, fields []id.DatasetSchemaFieldID) (dataset.List, error) { + result := make(dataset.List, 0, len(fields)) + next := i + for _, nextField := range fields { + d, _ := r.FindByID(ctx, next, f) + if d != nil { + result = append(result, d) + if f := d.Field(nextField); f != nil { + if f.Type() == dataset.ValueTypeRef { + if l := f.Value().ValueRef(); l != nil { + next = id.DatasetID(*l) + continue + } + } + } + } + } + return result, nil +} + +func (r *Dataset) Save(ctx context.Context, d *dataset.Dataset) error { + r.data[d.ID()] = *d + return nil +} + +func (r *Dataset) SaveAll(ctx context.Context, dl dataset.List) error { + for _, d := range dl { + r.data[d.ID()] = *d + } + return nil +} + +func (r *Dataset) Remove(ctx context.Context, id id.DatasetID) error { + delete(r.data, id) + return nil +} + +func (r *Dataset) RemoveAll(ctx context.Context, ids []id.DatasetID) error { + for _, id := range ids { + delete(r.data, id) + } + return nil +} + +func (r *Dataset) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + for did, d := range r.data { + if d.Scene() == sceneID { + delete(r.data, did) + } + } + return nil +} diff --git a/internal/infrastructure/memory/dataset_schema.go b/internal/infrastructure/memory/dataset_schema.go new file mode 100644 index 000000000..be3772606 --- /dev/null +++ b/internal/infrastructure/memory/dataset_schema.go @@ -0,0 +1,142 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/dataset" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" +) + +type DatasetSchema struct { + data map[id.DatasetSchemaID]dataset.Schema +} + +func NewDatasetSchema() repo.DatasetSchema { + return &DatasetSchema{ + data: map[id.DatasetSchemaID]dataset.Schema{}, + } +} + +func (r *DatasetSchema) FindByID(ctx context.Context, id id.DatasetSchemaID, f []id.SceneID) (*dataset.Schema, error) { + p, ok := r.data[id] + if ok { + return &p, nil + } + return nil, err1.ErrNotFound +} + +func (r *DatasetSchema) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID, f []id.SceneID) (dataset.SchemaList, error) { + result := dataset.SchemaList{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + d2 := d + result = append(result, &d2) + } else { + result = append(result, nil) + } + } + return result, nil +} + +func (r *DatasetSchema) FindByScene(ctx context.Context, s id.SceneID, p *usecase.Pagination) (dataset.SchemaList, *usecase.PageInfo, error) { + result := dataset.SchemaList{} + for _, d := range r.data { + if d.Scene() == s { + d2 := d + result = append(result, &d2) + } + } + + var startCursor, endCursor *usecase.Cursor + if len(result) > 0 { + _startCursor := usecase.Cursor(result[0].ID().String()) + _endCursor := usecase.Cursor(result[len(result)-1].ID().String()) + startCursor = &_startCursor + endCursor = &_endCursor + } + + return result, usecase.NewPageInfo( + len(r.data), + startCursor, + endCursor, + true, + true, + ), nil +} + +func (r *DatasetSchema) FindBySceneAll(ctx context.Context, s id.SceneID) (dataset.SchemaList, error) { + result := dataset.SchemaList{} + for _, d := range r.data { + if d.Scene() == s { + d2 := d + result = append(result, &d2) + } + } + return result, nil +} + +func (r *DatasetSchema) FindAllDynamicByScene(ctx context.Context, s id.SceneID) (dataset.SchemaList, error) { + result := dataset.SchemaList{} + for _, d := range r.data { + if d.Scene() == s && d.Dynamic() { + d2 := d + result = append(result, &d2) + } + } + return result, nil +} + +func (r *DatasetSchema) FindDynamicByID(ctx context.Context, id id.DatasetSchemaID) (*dataset.Schema, error) { + p, ok := r.data[id] + if ok && p.Dynamic() { + return &p, nil + } + return nil, err1.ErrNotFound +} + +func (r *DatasetSchema) FindBySceneAndSource(ctx context.Context, s id.SceneID, src dataset.Source) (dataset.SchemaList, error) { + result := dataset.SchemaList{} + for _, d := range r.data { + if d.Scene() == s && d.Source() == src { + d2 := d + result = append(result, &d2) + } + } + return result, nil +} + +func (r *DatasetSchema) Save(ctx context.Context, d *dataset.Schema) error { + r.data[d.ID()] = *d + return nil +} + +func (r *DatasetSchema) SaveAll(ctx context.Context, dl dataset.SchemaList) error { + for _, d := range dl { + r.data[d.ID()] = *d + } + return nil +} + +func (r *DatasetSchema) Remove(ctx context.Context, id id.DatasetSchemaID) error { + delete(r.data, id) + return nil +} + +func (r *DatasetSchema) RemoveAll(ctx context.Context, ids []id.DatasetSchemaID) error { + for _, id := range ids { + delete(r.data, id) + } + return nil +} + +func (r *DatasetSchema) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + for did, d := range r.data { + if d.Scene() == sceneID { + delete(r.data, did) + } + } + return nil +} diff --git a/internal/infrastructure/memory/dummy.go b/internal/infrastructure/memory/dummy.go new file mode 100644 index 000000000..2f05d885d --- /dev/null +++ b/internal/infrastructure/memory/dummy.go @@ -0,0 +1,73 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/user" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +var ( + DummySceneID = id.MustSceneID("01d7yt9zdyb74v2bvx7r5pkp32") + DummyUserID = id.MustUserID("01d7yt9zdyb74v2bvx76vw0jfj") +) + +func generateDummyData(ctx context.Context, c *repo.Container) { + // team + team, _ := user.NewTeam().NewID().Members(map[id.UserID]user.Role{ + DummyUserID: user.RoleOwner, + }).Build() + _ = c.Team.Save(ctx, team) + // user + user, _ := user.New(). + ID(DummyUserID). + Name("dummy"). + Email("dummy@dummy.com"). + Team(team.ID()). + Build() + _ = c.User.Save(ctx, user) + // project + projectID, _ := id.NewIDWith("01d7yt9zdyb74v2bvx7hwq41v1") + prj, _ := project.New(). + ID(id.ProjectID(projectID)). + Team(team.ID()). + Visualizer(visualizer.VisualizerCesium). + Build() + _ = c.Project.Save(ctx, prj) + // scene's property + sceneProperty, _ := property.New(). + NewID(). + Schema(builtin.PropertySchemaIDVisualizerCesium). + Scene(DummySceneID). + Build() + _ = c.Property.Save(ctx, sceneProperty) + + // root layer + rootLayerID, _ := id.NewIDWith("01d7yt9zdyb74v2bvx7ngfy1hc") + rootLayer, _ := layer.NewGroup().ID(id.LayerID(rootLayerID)).Scene(DummySceneID).Build() + _ = c.Layer.Save(ctx, rootLayer) + + widgets := scene.NewWidgetSystem([]*scene.Widget{}) + plugins := scene.NewPluginSystem([]*scene.Plugin{ + scene.NewPlugin(id.OfficialPluginID, nil), + }) + + // scene + scene, _ := scene.New(). + ID(DummySceneID). + Project(prj.ID()). + Team(team.ID()). + Property(sceneProperty.ID()). + RootLayer(rootLayer.ID()). + WidgetSystem(widgets). + PluginSystem(plugins). + Build() + _ = c.Scene.Save(ctx, scene) +} diff --git a/internal/infrastructure/memory/layer.go b/internal/infrastructure/memory/layer.go new file mode 100644 index 000000000..fe44513d9 --- /dev/null +++ b/internal/infrastructure/memory/layer.go @@ -0,0 +1,207 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" +) + +type Layer struct { + data map[id.LayerID]layer.Layer +} + +func NewLayer() repo.Layer { + return &Layer{ + data: map[id.LayerID]layer.Layer{}, + } +} + +func (r *Layer) FindByID(ctx context.Context, id id.LayerID, f []id.SceneID) (layer.Layer, error) { + res, ok := r.data[id] + if ok && isSceneIncludes(res.Scene(), f) { + return res, nil + } + return nil, err1.ErrNotFound +} + +func (r *Layer) FindByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.List, error) { + result := layer.List{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + if isSceneIncludes(d.Scene(), f) { + result = append(result, &d) + continue + } + } + result = append(result, nil) + } + return result, nil +} + +func (r *Layer) FindGroupByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.GroupList, error) { + result := layer.GroupList{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + if lg := layer.GroupFromLayer(d); lg != nil { + if isSceneIncludes(lg.Scene(), f) { + result = append(result, lg) + continue + } + } + result = append(result, nil) + } + } + return result, nil +} + +func (r *Layer) FindItemByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.ItemList, error) { + result := layer.ItemList{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + if li := layer.ItemFromLayer(d); li != nil { + if isSceneIncludes(li.Scene(), f) { + result = append(result, li) + continue + } + } + result = append(result, nil) + } + } + return result, nil +} + +func (r *Layer) FindItemByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Item, error) { + d, ok := r.data[id] + if !ok { + return &layer.Item{}, nil + } + if li := layer.ItemFromLayer(d); li != nil { + if isSceneIncludes(li.Scene(), f) { + return li, nil + } + } + return nil, err1.ErrNotFound +} + +func (r *Layer) FindGroupByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { + d, ok := r.data[id] + if !ok { + return &layer.Group{}, nil + } + if lg := layer.GroupFromLayer(d); lg != nil { + if isSceneIncludes(lg.Scene(), f) { + return lg, nil + } + } + return nil, err1.ErrNotFound +} + +func (r *Layer) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, s id.SceneID, ds id.DatasetSchemaID) (layer.GroupList, error) { + result := layer.GroupList{} + for _, l := range r.data { + if l.Scene() != s { + continue + } + if lg, ok := l.(*layer.Group); ok { + if dsid := lg.LinkedDatasetSchema(); dsid != nil && *dsid == ds { + result = append(result, lg) + } + } + } + return result, nil +} + +func (r *Layer) FindByProperty(ctx context.Context, id id.PropertyID, f []id.SceneID) (layer.Layer, error) { + for _, l := range r.data { + if !isSceneIncludes(l.Scene(), f) { + continue + } + if pid := l.Property(); pid != nil && *pid == id { + return l, nil + } + if pid := l.Infobox().PropertyRef(); pid != nil && *pid == id { + return l, nil + } + for _, f := range l.Infobox().Fields() { + if f.Property() == id { + return l, nil + } + } + } + return nil, err1.ErrNotFound +} + +func (r *Layer) FindParentByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { + for _, l := range r.data { + if !isSceneIncludes(l.Scene(), f) { + continue + } + gl, ok := l.(*layer.Group) + if !ok { + continue + } + for _, cl := range gl.Layers().Layers() { + if cl == id { + return gl, nil + } + } + } + return nil, err1.ErrNotFound +} + +func (r *Layer) FindByScene(ctx context.Context, sceneID id.SceneID) (layer.List, error) { + res := layer.List{} + for _, l := range r.data { + if l.Scene() == sceneID { + res = append(res, &l) + } + } + return res, nil +} + +func (r *Layer) FindAllByDatasetSchema(ctx context.Context, datasetSchemaID id.DatasetSchemaID) (layer.List, error) { + res := layer.List{} + for _, l := range r.data { + if d := layer.ToLayerGroup(l).LinkedDatasetSchema(); d != nil && *d == datasetSchemaID { + res = append(res, &l) + } + } + return res, nil +} + +func (r *Layer) Save(ctx context.Context, l layer.Layer) error { + r.data[l.ID()] = l + return nil +} + +func (r *Layer) SaveAll(ctx context.Context, ll layer.List) error { + for _, l := range ll { + layer := *l + r.data[layer.ID()] = layer + } + return nil +} + +func (r *Layer) Remove(ctx context.Context, id id.LayerID) error { + delete(r.data, id) + return nil +} + +func (r *Layer) RemoveAll(ctx context.Context, ids []id.LayerID) error { + for _, id := range ids { + delete(r.data, id) + } + return nil +} + +func (r *Layer) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + for lid, p := range r.data { + if p.Scene() == sceneID { + delete(r.data, lid) + } + } + return nil +} diff --git a/internal/infrastructure/memory/plugin.go b/internal/infrastructure/memory/plugin.go new file mode 100644 index 000000000..07a51a012 --- /dev/null +++ b/internal/infrastructure/memory/plugin.go @@ -0,0 +1,68 @@ +package memory + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +type Plugin struct { + data []*plugin.Plugin +} + +func NewPlugin() repo.Plugin { + return &Plugin{ + data: []*plugin.Plugin{}, + } +} + +func (r *Plugin) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { + if p := builtin.GetPlugin(id); p != nil { + return p, nil + } + for _, p := range r.data { + if p.ID().Equal(id) { + p2 := *p + return &p2, nil + } + } + return nil, err1.ErrNotFound +} + +func (r *Plugin) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { + result := []*plugin.Plugin{} + for _, id := range ids { + if p := builtin.GetPlugin(id); p != nil { + result = append(result, p) + continue + } + for _, p := range r.data { + if p.ID().Equal(id) { + p2 := *p + result = append(result, &p2) + } else { + result = append(result, nil) + } + } + } + return result, nil +} + +func (r *Plugin) Save(ctx context.Context, p *plugin.Plugin) error { + if p.ID().System() { + return errors.New("cannnot save system plugin") + } + for _, p := range r.data { + if p.ID().Equal(p.ID()) { + return nil + } + } + p2 := *p + r.data = append(r.data, &p2) + return nil +} diff --git a/internal/infrastructure/memory/project.go b/internal/infrastructure/memory/project.go new file mode 100644 index 000000000..d2d60d5ff --- /dev/null +++ b/internal/infrastructure/memory/project.go @@ -0,0 +1,105 @@ +package memory + +import ( + "context" + "time" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" +) + +type Project struct { + data map[id.ProjectID]*project.Project +} + +func NewProject() repo.Project { + return &Project{ + data: map[id.ProjectID]*project.Project{}, + } +} + +func (r *Project) FindByTeam(ctx context.Context, id id.TeamID, p *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { + result := []*project.Project{} + for _, d := range r.data { + if d.Team() == id { + result = append(result, d) + } + } + + var startCursor, endCursor *usecase.Cursor + if len(result) > 0 { + _startCursor := usecase.Cursor(result[0].ID().String()) + _endCursor := usecase.Cursor(result[len(result)-1].ID().String()) + startCursor = &_startCursor + endCursor = &_endCursor + } + + return result, usecase.NewPageInfo( + len(r.data), + startCursor, + endCursor, + true, + true, + ), nil +} + +func (r *Project) FindByIDs(ctx context.Context, ids []id.ProjectID, filter []id.TeamID) ([]*project.Project, error) { + result := []*project.Project{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + if isTeamIncludes(d.Team(), filter) { + result = append(result, d) + continue + } + } + result = append(result, nil) + } + return result, nil +} + +func (r *Project) FindByID(ctx context.Context, id id.ProjectID, filter []id.TeamID) (*project.Project, error) { + p, ok := r.data[id] + if ok && isTeamIncludes(p.Team(), filter) { + return p, nil + } + return nil, err1.ErrNotFound +} + +func (r *Project) FindByPublicName(ctx context.Context, name string) (*project.Project, error) { + if name == "" { + return nil, nil + } + for _, p := range r.data { + if p.MatchWithPublicName(name) { + return p, nil + } + } + return nil, err1.ErrNotFound +} + +func (r *Project) CountByTeam(ctx context.Context, team id.TeamID) (c int, err error) { + for _, p := range r.data { + if p.Team() == team { + c++ + } + } + return +} + +func (r *Project) Save(ctx context.Context, p *project.Project) error { + p.SetUpdatedAt(time.Now()) + r.data[p.ID()] = p + return nil +} + +func (r *Project) Remove(ctx context.Context, projectID id.ProjectID) error { + for sid := range r.data { + if sid == projectID { + delete(r.data, sid) + } + } + return nil +} diff --git a/internal/infrastructure/memory/property.go b/internal/infrastructure/memory/property.go new file mode 100644 index 000000000..0985a2f2e --- /dev/null +++ b/internal/infrastructure/memory/property.go @@ -0,0 +1,100 @@ +package memory + +import ( + "context" + + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +type Property struct { + data map[id.PropertyID]property.Property +} + +func NewProperty() repo.Property { + return &Property{ + data: map[id.PropertyID]property.Property{}, + } +} + +func (r *Property) FindByID(ctx context.Context, id id.PropertyID, f []id.SceneID) (*property.Property, error) { + p, ok := r.data[id] + if ok && isSceneIncludes(p.Scene(), f) { + return &p, nil + } + return nil, err1.ErrNotFound +} + +func (r *Property) FindByIDs(ctx context.Context, ids []id.PropertyID, f []id.SceneID) (property.List, error) { + result := property.List{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + if isSceneIncludes(d.Scene(), f) { + result = append(result, &d) + continue + } + } + result = append(result, nil) + } + return result, nil +} + +func (r *Property) FindByDataset(ctx context.Context, sid id.DatasetSchemaID, did id.DatasetID) (property.List, error) { + result := property.List{} + for _, p := range r.data { + if p.IsDatasetLinked(sid, did) { + result = append(result, &p) + } + } + return result, nil +} + +func (r *Property) FindLinkedAll(ctx context.Context, s id.SceneID) (property.List, error) { + result := property.List{} + for _, p := range r.data { + if p.Scene() != s { + continue + } + if p.HasLinkedField() { + p2 := p + result = append(result, &p2) + } + } + return result, nil +} + +func (r *Property) Save(ctx context.Context, p *property.Property) error { + r.data[p.ID()] = *p + return nil +} + +func (r *Property) SaveAll(ctx context.Context, pl property.List) error { + for _, p := range pl { + r.data[p.ID()] = *p + } + return nil +} + +func (r *Property) Remove(ctx context.Context, id id.PropertyID) error { + delete(r.data, id) + return nil +} + +func (r *Property) RemoveAll(ctx context.Context, ids []id.PropertyID) error { + for _, id := range ids { + delete(r.data, id) + } + return nil +} + +func (r *Property) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + for pid, p := range r.data { + if p.Scene() == sceneID { + delete(r.data, pid) + } + } + return nil +} diff --git a/internal/infrastructure/memory/property_schema.go b/internal/infrastructure/memory/property_schema.go new file mode 100644 index 000000000..124bfc3de --- /dev/null +++ b/internal/infrastructure/memory/property_schema.go @@ -0,0 +1,66 @@ +package memory + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type PropertySchema struct { + data map[id.PropertySchemaID]property.Schema +} + +func NewPropertySchema() repo.PropertySchema { + return &PropertySchema{ + data: map[id.PropertySchemaID]property.Schema{}, + } +} + +func (r *PropertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) (*property.Schema, error) { + if ps := builtin.GetPropertySchema(id); ps != nil { + return ps, nil + } + p, ok := r.data[id] + if ok { + return &p, nil + } + return nil, err1.ErrNotFound +} + +func (r *PropertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { + result := property.SchemaList{} + for _, id := range ids { + if ps := builtin.GetPropertySchema(id); ps != nil { + result = append(result, ps) + continue + } + if d, ok := r.data[id]; ok { + result = append(result, &d) + } else { + result = append(result, nil) + } + } + return result, nil +} + +func (r *PropertySchema) Save(ctx context.Context, p *property.Schema) error { + if p.ID().System() { + return errors.New("cannnot save system property schema") + } + r.data[p.ID()] = *p + return nil +} + +func (r *PropertySchema) SaveAll(ctx context.Context, p property.SchemaList) error { + for _, ps := range p { + if err := r.Save(ctx, ps); err != nil { + return err + } + } + return nil +} diff --git a/internal/infrastructure/memory/scene.go b/internal/infrastructure/memory/scene.go new file mode 100644 index 000000000..86ceb9e95 --- /dev/null +++ b/internal/infrastructure/memory/scene.go @@ -0,0 +1,110 @@ +package memory + +import ( + "context" + "time" + + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" + + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +type Scene struct { + data map[id.SceneID]scene.Scene +} + +func NewScene() repo.Scene { + return &Scene{ + data: map[id.SceneID]scene.Scene{}, + } +} + +func (r *Scene) FindByID(ctx context.Context, id id.SceneID, f []id.TeamID) (*scene.Scene, error) { + s, ok := r.data[id] + if ok && isTeamIncludes(s.Team(), f) { + return &s, nil + } + return nil, err1.ErrNotFound +} + +func (r *Scene) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) ([]*scene.Scene, error) { + result := []*scene.Scene{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + if isTeamIncludes(d.Team(), f) { + result = append(result, &d) + continue + } + } + result = append(result, nil) + + } + return result, nil +} + +func (r *Scene) FindByProject(ctx context.Context, id id.ProjectID, f []id.TeamID) (*scene.Scene, error) { + for _, d := range r.data { + if d.Project() == id && isTeamIncludes(d.Team(), f) { + return &d, nil + } + } + return nil, err1.ErrNotFound +} + +func (r *Scene) FindIDsByTeam(ctx context.Context, teams []id.TeamID) ([]id.SceneID, error) { + result := []id.SceneID{} + for _, d := range r.data { + if isTeamIncludes(d.Team(), teams) { + result = append(result, d.ID()) + } + } + return result, nil +} + +func (r *Scene) HasSceneTeam(ctx context.Context, id id.SceneID, teams []id.TeamID) (bool, error) { + s, ok := r.data[id] + if !ok { + return false, err1.ErrNotFound + } + return s.IsTeamIncluded(teams), nil +} + +func (r *Scene) HasScenesTeam(ctx context.Context, id []id.SceneID, teams []id.TeamID) ([]bool, error) { + if id == nil { + return nil, nil + } + if len(teams) == 0 { + return make([]bool, len(id)), nil + } + res := make([]bool, 0, len(id)) + for _, i := range id { + if teams == nil { + res = append(res, false) + continue + } + s, ok := r.data[i] + if !ok { + res = append(res, false) + continue + } + res = append(res, s.IsTeamIncluded(teams)) + } + return res, nil +} + +func (r *Scene) Save(ctx context.Context, s *scene.Scene) error { + s.SetUpdatedAt(time.Now()) + r.data[s.ID()] = *s + return nil +} + +func (r *Scene) Remove(ctx context.Context, sceneID id.SceneID) error { + for sid := range r.data { + if sid == sceneID { + delete(r.data, sid) + } + } + return nil +} diff --git a/internal/infrastructure/memory/scene_lock.go b/internal/infrastructure/memory/scene_lock.go new file mode 100644 index 000000000..ab467e0b7 --- /dev/null +++ b/internal/infrastructure/memory/scene_lock.go @@ -0,0 +1,67 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" + + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +type sceneLock struct { + lock sync.Map +} + +func NewSceneLock() repo.SceneLock { + return &sceneLock{} +} + +func (r *sceneLock) GetLock(ctx context.Context, sceneID id.SceneID) (scene.LockMode, error) { + if id.ID(sceneID).IsNil() { + return "", id.ErrInvalidID + } + if v, ok := r.lock.Load(sceneID); ok { + if v2, ok2 := v.(scene.LockMode); ok2 { + return v2, nil + } + } + return scene.LockModeFree, nil +} + +func (r *sceneLock) GetAllLock(ctx context.Context, sceneID []id.SceneID) ([]scene.LockMode, error) { + res := make([]scene.LockMode, 0, len(sceneID)) + for _, si := range sceneID { + if id.ID(si).IsNil() { + return nil, id.ErrInvalidID + } + if v, ok := r.lock.Load(si); ok { + if v2, ok2 := v.(scene.LockMode); ok2 { + res = append(res, v2) + } else { + res = append(res, scene.LockModeFree) + } + } else { + res = append(res, scene.LockModeFree) + } + } + return res, nil +} + +func (r *sceneLock) SaveLock(ctx context.Context, sceneID id.SceneID, lock scene.LockMode) error { + if lock == scene.LockModeFree { + r.lock.Delete(sceneID) + } else { + r.lock.Store(sceneID, lock) + } + return nil +} + +func (r *sceneLock) ReleaseAllLock(ctx context.Context) error { + r.lock.Range(func(key interface{}, value interface{}) bool { + r.lock.Delete(key) + return true + }) + return nil +} diff --git a/internal/infrastructure/memory/team.go b/internal/infrastructure/memory/team.go new file mode 100644 index 000000000..d6d446e1e --- /dev/null +++ b/internal/infrastructure/memory/team.go @@ -0,0 +1,74 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +type Team struct { + data map[id.TeamID]user.Team +} + +func NewTeam() repo.Team { + return &Team{ + data: map[id.TeamID]user.Team{}, + } +} + +func (r *Team) FindByUser(ctx context.Context, i id.UserID) ([]*user.Team, error) { + result := []*user.Team{} + for _, d := range r.data { + if d.Members().ContainsUser(i) { + result = append(result, &d) + } + } + return result, nil +} + +func (r *Team) FindByIDs(ctx context.Context, ids []id.TeamID) ([]*user.Team, error) { + result := []*user.Team{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + result = append(result, &d) + } else { + result = append(result, nil) + } + } + return result, nil +} + +func (r *Team) FindByID(ctx context.Context, id id.TeamID) (*user.Team, error) { + d, ok := r.data[id] + if ok { + return &d, nil + } + return &user.Team{}, err1.ErrNotFound +} + +func (r *Team) Save(ctx context.Context, t *user.Team) error { + r.data[t.ID()] = *t + return nil +} + +func (r *Team) SaveAll(ctx context.Context, teams []*user.Team) error { + for _, t := range teams { + r.data[t.ID()] = *t + } + return nil +} + +func (r *Team) Remove(ctx context.Context, id id.TeamID) error { + delete(r.data, id) + return nil +} + +func (r *Team) RemoveAll(ctx context.Context, ids []id.TeamID) error { + for _, id := range ids { + delete(r.data, id) + } + return nil +} diff --git a/internal/infrastructure/memory/transaction.go b/internal/infrastructure/memory/transaction.go new file mode 100644 index 000000000..c0ff8b77b --- /dev/null +++ b/internal/infrastructure/memory/transaction.go @@ -0,0 +1,27 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +type Transaction struct{} + +type Tx struct{} + +func NewTransaction() *Transaction { + return &Transaction{} +} + +func (t *Transaction) Begin() (repo.Tx, error) { + return &Tx{}, nil +} + +func (t *Tx) Commit() { + // do nothing +} + +func (t *Tx) End(_ context.Context) error { + return nil +} diff --git a/internal/infrastructure/memory/user.go b/internal/infrastructure/memory/user.go new file mode 100644 index 000000000..ec26c0064 --- /dev/null +++ b/internal/infrastructure/memory/user.go @@ -0,0 +1,92 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +type User struct { + data map[id.UserID]user.User +} + +func NewUser() repo.User { + return &User{ + data: map[id.UserID]user.User{}, + } +} + +func (r *User) FindByIDs(ctx context.Context, ids []id.UserID) ([]*user.User, error) { + result := []*user.User{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + result = append(result, &d) + } else { + result = append(result, nil) + } + } + return result, nil +} + +func (r *User) FindByID(ctx context.Context, id id.UserID) (*user.User, error) { + d, ok := r.data[id] + if ok { + return &d, nil + } + return &user.User{}, err1.ErrNotFound +} + +func (r *User) Save(ctx context.Context, u *user.User) error { + r.data[u.ID()] = *u + return nil +} + +func (r *User) FindByAuth0Sub(ctx context.Context, auth0sub string) (*user.User, error) { + if auth0sub == "" { + return nil, err1.ErrInvalidParams + } + + for _, u := range r.data { + if u.ContainAuth(user.AuthFromAuth0Sub(auth0sub)) { + return &u, nil + } + } + + return nil, err1.ErrNotFound +} + +func (r *User) FindByEmail(ctx context.Context, email string) (*user.User, error) { + if email == "" { + return nil, err1.ErrInvalidParams + } + + for _, u := range r.data { + if u.Email() == email { + return &u, nil + } + } + + return nil, err1.ErrNotFound +} + +func (r *User) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user.User, error) { + if nameOrEmail == "" { + return nil, err1.ErrInvalidParams + } + + for _, u := range r.data { + if u.Email() == nameOrEmail || u.Name() == nameOrEmail { + return &u, nil + } + } + + return nil, err1.ErrNotFound +} + +func (r *User) Remove(ctx context.Context, user id.UserID) error { + delete(r.data, user) + return nil +} diff --git a/internal/infrastructure/memory/util.go b/internal/infrastructure/memory/util.go new file mode 100644 index 000000000..01d3a80ef --- /dev/null +++ b/internal/infrastructure/memory/util.go @@ -0,0 +1,27 @@ +package memory + +import "github.com/reearth/reearth-backend/pkg/id" + +func isTeamIncludes(id id.TeamID, ids []id.TeamID) bool { + if ids == nil { + return true + } + for _, i := range ids { + if id == i { + return true + } + } + return false +} + +func isSceneIncludes(id id.SceneID, ids []id.SceneID) bool { + if ids == nil { + return true + } + for _, i := range ids { + if id == i { + return true + } + } + return false +} diff --git a/internal/infrastructure/mongo/asset.go b/internal/infrastructure/mongo/asset.go new file mode 100644 index 000000000..12ed59d7a --- /dev/null +++ b/internal/infrastructure/mongo/asset.go @@ -0,0 +1,75 @@ +package mongo + +import ( + "context" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/asset" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" +) + +type assetRepo struct { + client *mongodoc.ClientCollection +} + +func NewAsset(client *mongodoc.Client) repo.Asset { + r := &assetRepo{client: client.WithCollection("asset")} + r.init() + return r +} + +func (r *assetRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "asset", i) + } +} + +func (r *assetRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { + var c mongodoc.AssetConsumer + pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) + if err2 != nil { + return nil, nil, err1.ErrInternalBy(err2) + } + return c.Rows, pageInfo, nil +} + +func (r *assetRepo) findOne(ctx context.Context, filter bson.D) (*asset.Asset, error) { + dst := make([]*asset.Asset, 0, 1) + c := mongodoc.AssetConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func (r *assetRepo) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, error) { + filter := bson.D{ + {Key: "id", Value: id.String()}, + } + return r.findOne(ctx, filter) +} + +func (r *assetRepo) Save(ctx context.Context, asset *asset.Asset) error { + doc, id := mongodoc.NewAsset(asset) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *assetRepo) Remove(ctx context.Context, id id.AssetID) error { + return r.client.RemoveOne(ctx, id.String()) +} + +func (r *assetRepo) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { + filter := bson.D{ + {Key: "team", Value: id.String()}, + } + return r.paginate(ctx, filter, pagination) +} diff --git a/internal/infrastructure/mongo/config.go b/internal/infrastructure/mongo/config.go new file mode 100644 index 000000000..4b562a5bd --- /dev/null +++ b/internal/infrastructure/mongo/config.go @@ -0,0 +1,46 @@ +package mongo + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/config" + err1 "github.com/reearth/reearth-backend/pkg/error" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var upsert = true + +type configRepo struct { + client *mongodoc.ClientCollection +} + +func NewConfig(client *mongodoc.Client) repo.Config { + return &configRepo{client: client.WithCollection("config")} +} + +func (r *configRepo) Load(ctx context.Context) (*config.Config, error) { + cfg := &config.Config{} + if err := r.client.Collection().FindOne(ctx, nil).Decode(cfg); err != nil { + if errors.Is(err, mongo.ErrNoDocuments) { + return cfg, nil + } + return nil, err1.ErrInternalBy(err) + } + return cfg, nil +} + +func (r *configRepo) Save(ctx context.Context, cfg *config.Config) error { + if cfg == nil { + return nil + } + if _, err := r.client.Collection().UpdateOne(ctx, nil, cfg, &options.UpdateOptions{ + Upsert: &upsert, + }); err != nil { + return err1.ErrInternalBy(err) + } + return nil +} diff --git a/internal/infrastructure/mongo/container.go b/internal/infrastructure/mongo/container.go new file mode 100644 index 000000000..2bbc9dd33 --- /dev/null +++ b/internal/infrastructure/mongo/container.go @@ -0,0 +1,42 @@ +package mongo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/migration" + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "go.mongodb.org/mongo-driver/mongo" +) + +func InitRepos(ctx context.Context, c *repo.Container, mc *mongo.Client, databaseName string, dummy bool) error { + if databaseName == "" { + databaseName = "reearth" + } + client := mongodoc.NewClient(databaseName, mc) + + c.Asset = NewAsset(client) + c.Config = NewConfig(client) + c.DatasetSchema = NewDatasetSchema(client) + c.Dataset = NewDataset(client) + c.Layer = NewLayer(client) + c.Plugin = NewPlugin(client) + c.Project = NewProject(client) + c.PropertySchema = NewPropertySchema(client) + c.Property = NewProperty(client) + c.Scene = NewScene(client) + c.Team = NewTeam(client) + c.User = NewUser(client) + c.SceneLock = NewSceneLock(client) + c.Transaction = NewTransaction(client) + + if err := (migration.Client{Client: client}).Migrate(ctx); err != nil { + return err + } + + if dummy { + generateDummyData(ctx, c) + } + + return nil +} diff --git a/internal/infrastructure/mongo/dataset.go b/internal/infrastructure/mongo/dataset.go new file mode 100644 index 000000000..7921ef995 --- /dev/null +++ b/internal/infrastructure/mongo/dataset.go @@ -0,0 +1,342 @@ +package mongo + +import ( + "context" + "errors" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/dataset" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +type datasetRepo struct { + client *mongodoc.ClientCollection +} + +func NewDataset(client *mongodoc.Client) repo.Dataset { + r := &datasetRepo{client: client.WithCollection("dataset")} + r.init() + return r +} + +func (r *datasetRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "dataset", i) + } +} + +func (r *datasetRepo) FindByID(ctx context.Context, id2 id.DatasetID, f []id.SceneID) (*dataset.Dataset, error) { + filter := r.sceneFilter(bson.D{{Key: "id", Value: id.ID(id2).String()}}, f) + return r.findOne(ctx, filter) +} + +func (r *datasetRepo) FindByIDs(ctx context.Context, ids []id.DatasetID, f []id.SceneID) (dataset.List, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: id.DatasetIDToKeys(ids)}, + }}, + }, f) + dst := make([]*dataset.Dataset, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterDatasets(ids, res), nil +} + +func (r *datasetRepo) FindBySchema(ctx context.Context, schemaID id.DatasetSchemaID, f []id.SceneID, pagination *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { + filter := r.sceneFilter(bson.D{ + {Key: "schema", Value: id.ID(schemaID).String()}, + }, f) + return r.paginate(ctx, filter, pagination) +} + +func (r *datasetRepo) FindBySchemaAll(ctx context.Context, schemaID id.DatasetSchemaID) (dataset.List, error) { + filter := bson.D{ + {Key: "schema", Value: id.ID(schemaID).String()}, + } + return r.find(ctx, nil, filter) +} + +func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, f []id.SceneID, fields []id.DatasetSchemaFieldID) (dataset.List, error) { + if len(fields) == 0 { + d, err := r.FindByID(ctx, did, f) + if err != nil { + return nil, err + } + return dataset.List{d}, nil + } + + fieldsstr := id.DatasetSchemaFieldIDToKeys(fields) + firstField := fieldsstr[0] + + aggfilter := bson.D{} + if f != nil { + aggfilter = append(aggfilter, bson.E{Key: "$in", Value: []interface{}{ + "$$g.scene", + id.SceneIDToKeys(f), + }}) + } + + pipeline := bson.D{ + {Key: "$match", Value: r.sceneFilter(bson.D{ + {Key: "id", Value: did.String()}, + {Key: "fields.id", Value: firstField}, + }, f)}, + {Key: "$limit", Value: 1}, + {Key: "$addFields", Value: bson.D{ + {Key: "field", Value: bson.D{ + {Key: "$arrayElemAt", Value: []interface{}{ + bson.D{{Key: "$filter", Value: bson.D{ + {Key: "input", Value: "$fields"}, + {Key: "as", Value: "f"}, + {Key: "cond", Value: bson.D{ + {Key: "$and", Value: []bson.D{ + {{Key: "$eq", Value: []string{"$$f.id", firstField}}}, + {{Key: "$eq", Value: []string{"$$f.type", "ref"}}}, + }}, + }}, + }}}, + 0, + }}, + }}, + }}, + {Key: "$graphLookup", Value: bson.D{ + {Key: "from", Value: "dataset"}, + {Key: "startWith", Value: "$field.value"}, + {Key: "connectFromField", Value: "fields.value"}, + {Key: "connectToField", Value: "id"}, + {Key: "depthField", Value: "depth"}, + {Key: "as", Value: "graph"}, + {Key: "restrictSearchWithMatch", Value: r.sceneFilter(bson.D{}, f)}, + }}, + {Key: "$addFields", Value: bson.D{ + {Key: "firstGraph", Value: bson.D{ + {Key: "$slice", Value: []interface{}{ + bson.D{{Key: "$filter", Value: bson.D{ + {Key: "input", Value: "$graph"}, + {Key: "as", Value: "g"}, + {Key: "cond", Value: bson.D{ + {Key: "$eq", Value: []interface{}{"$$g.depth", 0}}, + }}, + }}}, + 0, + 1, + }}, + }}, + {Key: "graph", Value: bson.D{ + {Key: "$filter", Value: bson.D{ + {Key: "input", Value: bson.D{ + {Key: "$map", Value: bson.D{ + {Key: "input", Value: bson.D{ + {Key: "$map", Value: bson.D{ + {Key: "input", Value: "$graph"}, + {Key: "as", Value: "g"}, + {Key: "in", Value: bson.D{ + {Key: "$arrayElemAt", Value: []interface{}{ + bson.D{{Key: "$filter", Value: bson.D{ + {Key: "input", Value: "$#g.fields"}, + {Key: "as", Value: "f"}, + {Key: "cond", Value: bson.D{ + {Key: "$and", Value: bson.D{ + {Key: "$eq", Value: []interface{}{ + "$$f.id", + bson.D{ + {Key: "$arrayElemAt", Value: []interface{}{ + fieldsstr[1.], + "$$g.depth", + }}, + }, + }}, + }}, + {Key: "$eq", Value: []string{"$$f.type", "ref"}}, + }}, + }}}, + 0, + }}, + }}, + }}, + }}, + {Key: "as", Value: ""}, + {Key: "in", Value: bson.D{ + {Key: "$arrayElemAt", Value: []interface{}{ + bson.D{{Key: "$filter", Value: bson.D{ + {Key: "input", Value: "$graph"}, + {Key: "as", Value: "g1"}, + {Key: "cond", Value: bson.D{ + {Key: "$eq", Value: []string{ + "$$g1.id", + "$$g.value", + }}, + }}, + }}}, + 0, + }}, + }}, + }}, + }}, + {Key: "as", Value: "f"}, + {Key: "cond", Value: bson.D{ + {Key: "$ne", Value: []interface{}{"$$f", nil}}, + }}, + }}, + }}, + }}, + {Key: "$sort", Value: bson.D{ + {Key: "graph.depth", Value: 1}, + }}, + {Key: "$addFields", Value: bson.D{ + {Key: "graph", Value: bson.D{ + {Key: "$filter", Value: bson.D{ + {Key: "input", Value: bson.D{ + {Key: "$concatArrays", Value: []string{"$firstGraph", "$graph"}}, + }}, + {Key: "as", Value: "g"}, + {Key: "cond", Value: aggfilter}, + }}, + }}, + }}, + {Key: "$project", Value: bson.D{ + {Key: "firstGraph", Value: 0}, + {Key: "field", Value: 0}, + }}, + } + + cursor, err2 := r.client.Collection().Aggregate(ctx, pipeline) + if err2 != nil { + return nil, err1.ErrInternalBy(err2) + } + defer func() { + _ = cursor.Close(ctx) + }() + + doc := mongodoc.DatasetExtendedDocument{} + if err2 := bson.Unmarshal(cursor.Current, &doc); err2 != nil { + return nil, err1.ErrInternalBy(err2) + } + docs := make([]*mongodoc.DatasetExtendedDocument, 0, len(fields)) + for i := 0; i < len(fields); i++ { + var d2 *mongodoc.DatasetExtendedDocument + if i == 0 { + d2 = &doc + } else { + for _, d := range doc.Graph { + if i-1 == d.Depth { + d2 = d + } + } + } + docs = append(docs, d2) + } + res := make(dataset.List, 0, len(docs)) + for i, d := range docs { + if i > 0 && i-1 != d.Depth { + return nil, err1.ErrInternalBy(errors.New("invalid order")) + } + ds, err2 := d.DatasetDocument.Model() + if err2 != nil { + return nil, err1.ErrInternalBy(err2) + } + res = append(res, ds) + } + return res, nil +} + +func (r *datasetRepo) Save(ctx context.Context, dataset *dataset.Dataset) error { + doc, id := mongodoc.NewDataset(dataset) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *datasetRepo) SaveAll(ctx context.Context, datasetList dataset.List) error { + if datasetList == nil || len(datasetList) == 0 { + return nil + } + docs, ids := mongodoc.NewDatasets(datasetList) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *datasetRepo) Remove(ctx context.Context, id id.DatasetID) error { + return r.client.RemoveOne(ctx, id.String()) +} + +func (r *datasetRepo) RemoveAll(ctx context.Context, ids []id.DatasetID) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, id.DatasetIDToKeys(ids)) +} + +func (r *datasetRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + } + _, err := r.client.Collection().DeleteMany(ctx, filter) + if err != nil { + return err1.ErrInternalBy(err) + } + return nil +} + +func (r *datasetRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { + var c mongodoc.DatasetConsumer + pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) + if err2 != nil { + return nil, nil, err1.ErrInternalBy(err2) + } + return c.Rows, pageInfo, nil +} + +func (r *datasetRepo) find(ctx context.Context, dst dataset.List, filter bson.D) (dataset.List, error) { + c := mongodoc.DatasetConsumer{ + Rows: dst, + } + if err2 := r.client.Find(ctx, filter, &c); err2 != nil { + return nil, err1.ErrInternalBy(err2) + } + return c.Rows, nil +} + +func (r *datasetRepo) findOne(ctx context.Context, filter bson.D) (*dataset.Dataset, error) { + dst := make([]*dataset.Dataset, 0, 1) + c := mongodoc.DatasetConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func filterDatasets(ids []id.DatasetID, rows []*dataset.Dataset) []*dataset.Dataset { + res := make([]*dataset.Dataset, 0, len(ids)) + for _, id := range ids { + var r2 *dataset.Dataset + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (*datasetRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { + if scenes == nil { + return filter + } + filter = append(filter, bson.E{ + Key: "scene", + Value: bson.D{{Key: "$in", Value: id.SceneIDToKeys(scenes)}}, + }) + return filter +} diff --git a/internal/infrastructure/mongo/dataset_schema.go b/internal/infrastructure/mongo/dataset_schema.go new file mode 100644 index 000000000..3fc3625ed --- /dev/null +++ b/internal/infrastructure/mongo/dataset_schema.go @@ -0,0 +1,182 @@ +package mongo + +import ( + "context" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/dataset" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" +) + +type datasetSchemaRepo struct { + client *mongodoc.ClientCollection +} + +func NewDatasetSchema(client *mongodoc.Client) repo.DatasetSchema { + r := &datasetSchemaRepo{client: client.WithCollection("datasetSchema")} + r.init() + return r +} + +func (r *datasetSchemaRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "datasetSchema", i) + } +} + +func (r *datasetSchemaRepo) FindByID(ctx context.Context, id2 id.DatasetSchemaID, f []id.SceneID) (*dataset.Schema, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: id.ID(id2).String()}, + }, f) + return r.findOne(ctx, filter) +} + +func (r *datasetSchemaRepo) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID, f []id.SceneID) (dataset.SchemaList, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: id.DatasetSchemaIDToKeys(ids)}, + }}, + }, f) + dst := make([]*dataset.Schema, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterDatasetSchemas(ids, res), nil +} + +func (r *datasetSchemaRepo) FindByScene(ctx context.Context, sceneID id.SceneID, pagination *usecase.Pagination) (dataset.SchemaList, *usecase.PageInfo, error) { + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + } + return r.paginate(ctx, filter, pagination) +} + +func (r *datasetSchemaRepo) FindBySceneAll(ctx context.Context, sceneID id.SceneID) (dataset.SchemaList, error) { + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + } + return r.find(ctx, nil, filter) +} + +func (r *datasetSchemaRepo) FindDynamicByID(ctx context.Context, sid id.DatasetSchemaID) (*dataset.Schema, error) { + filter := bson.D{ + {Key: "id", Value: id.ID(sid).String()}, + {Key: "dynamic", Value: true}, + } + return r.findOne(ctx, filter) +} + +func (r *datasetSchemaRepo) FindAllDynamicByScene(ctx context.Context, sceneID id.SceneID) (dataset.SchemaList, error) { + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + {Key: "dynamic", Value: true}, + } + return r.find(ctx, nil, filter) +} + +func (r *datasetSchemaRepo) FindBySceneAndSource(ctx context.Context, sceneID id.SceneID, source dataset.Source) (dataset.SchemaList, error) { + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + {Key: "source", Value: string(source)}, + } + return r.find(ctx, nil, filter) +} + +func (r *datasetSchemaRepo) Save(ctx context.Context, datasetSchema *dataset.Schema) error { + doc, id := mongodoc.NewDatasetSchema(datasetSchema) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *datasetSchemaRepo) SaveAll(ctx context.Context, datasetSchemas dataset.SchemaList) error { + if datasetSchemas == nil || len(datasetSchemas) == 0 { + return nil + } + docs, ids := mongodoc.NewDatasetSchemas(datasetSchemas) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *datasetSchemaRepo) Remove(ctx context.Context, datasetSchemaID id.DatasetSchemaID) error { + return r.client.RemoveOne(ctx, datasetSchemaID.String()) +} + +func (r *datasetSchemaRepo) RemoveAll(ctx context.Context, ids []id.DatasetSchemaID) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, id.DatasetSchemaIDToKeys(ids)) +} + +func (r *datasetSchemaRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + } + _, err := r.client.Collection().DeleteMany(ctx, filter) + if err != nil { + return err1.ErrInternalBy(err) + } + return nil +} + +func (r *datasetSchemaRepo) find(ctx context.Context, dst []*dataset.Schema, filter bson.D) ([]*dataset.Schema, error) { + c := mongodoc.DatasetSchemaConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *datasetSchemaRepo) findOne(ctx context.Context, filter bson.D) (*dataset.Schema, error) { + dst := make([]*dataset.Schema, 0, 1) + c := mongodoc.DatasetSchemaConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func (r *datasetSchemaRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) ([]*dataset.Schema, *usecase.PageInfo, error) { + var c mongodoc.DatasetSchemaConsumer + pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) + if err2 != nil { + return nil, nil, err1.ErrInternalBy(err2) + } + return c.Rows, pageInfo, nil +} + +func filterDatasetSchemas(ids []id.DatasetSchemaID, rows []*dataset.Schema) []*dataset.Schema { + res := make([]*dataset.Schema, 0, len(ids)) + for _, id := range ids { + var r2 *dataset.Schema + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (*datasetSchemaRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { + if scenes == nil { + return filter + } + filter = append(filter, bson.E{ + Key: "scene", + Value: bson.D{{Key: "$in", Value: id.SceneIDToKeys(scenes)}}, + }) + return filter +} diff --git a/internal/infrastructure/mongo/dummy.go b/internal/infrastructure/mongo/dummy.go new file mode 100644 index 000000000..8b9452a60 --- /dev/null +++ b/internal/infrastructure/mongo/dummy.go @@ -0,0 +1,46 @@ +package mongo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + user1 "github.com/reearth/reearth-backend/pkg/user" +) + +func generateDummyData(ctx context.Context, c *repo.Container) { + // check if duumy data are already created + userID, _ := id.UserIDFrom("01d7yt9zdyb74v2bvx76vw0jfj") + if user, err2 := c.User.FindByID(ctx, userID); err2 != nil { + if err2 != err1.ErrNotFound { + panic(err2) + } + } else if user != nil { + return + } + + // team + team, _ := user1.NewTeam().NewID().Personal(true).Members(map[id.UserID]user1.Role{ + userID: user1.RoleOwner, + }).Build() + err := c.Team.Save(ctx, team) + if err != nil { + panic(err) + } + + // user + user, _ := user1.New(). + ID(userID). + Name("dummy"). + Email("dummy@dummy.com"). + Team(team.ID()). + Build() + err = c.User.Save(ctx, user) + if err != nil { + panic(err) + } + + println("dummy user: ", userID.String()) + println("dummy team: ", team.ID().String()) +} diff --git a/internal/infrastructure/mongo/layer.go b/internal/infrastructure/mongo/layer.go new file mode 100644 index 000000000..966808beb --- /dev/null +++ b/internal/infrastructure/mongo/layer.go @@ -0,0 +1,324 @@ +package mongo + +import ( + "context" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/log" +) + +type layerRepo struct { + client *mongodoc.ClientCollection +} + +func NewLayer(client *mongodoc.Client) repo.Layer { + r := &layerRepo{client: client.WithCollection("layer")} + r.init() + return r +} + +func (r *layerRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "layer", i) + } +} + +func (r *layerRepo) FindByID(ctx context.Context, id id.LayerID, f []id.SceneID) (layer.Layer, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: id.String()}, + }, f) + return r.findOne(ctx, filter) +} + +func (r *layerRepo) FindByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.List, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: id.LayerIDToKeys(ids)}, + }}, + }, f) + dst := make([]*layer.Layer, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterLayers(ids, res), nil +} + +func (r *layerRepo) FindAllByDatasetSchema(ctx context.Context, dsid id.DatasetSchemaID) (layer.List, error) { + filter := bson.D{ + {Key: "group.linkeddatasetschema", Value: dsid.String()}, + } + return r.find(ctx, nil, filter) +} + +func (r *layerRepo) FindItemByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Item, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: id.String()}, + }, f) + return r.findItemOne(ctx, filter) +} + +func (r *layerRepo) FindItemByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.ItemList, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: id.LayerIDToKeys(ids)}, + }}, + }, f) + dst := make([]*layer.Item, 0, len(ids)) + res, err := r.findItems(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterLayerItems(ids, res), nil +} + +func (r *layerRepo) FindGroupByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: id.String()}, + }, f) + return r.findGroupOne(ctx, filter) +} + +func (r *layerRepo) FindGroupByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.GroupList, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: id.LayerIDToKeys(ids)}, + }}, + }, f) + dst := make([]*layer.Group, 0, len(ids)) + res, err := r.findGroups(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterLayerGroups(ids, res), nil +} + +func (r *layerRepo) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, sceneID id.SceneID, datasetSchemaID id.DatasetSchemaID) (layer.GroupList, error) { + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + {Key: "group.linkeddatasetschema", Value: datasetSchemaID.String()}, + } + return r.findGroups(ctx, nil, filter) +} + +func (r *layerRepo) FindByProperty(ctx context.Context, id id.PropertyID, f []id.SceneID) (layer.Layer, error) { + filter := r.sceneFilter(bson.D{ + {Key: "$or", Value: []bson.D{ + {{Key: "property", Value: id.String()}}, + {{Key: "infobox.property", Value: id.String()}}, + {{Key: "infobox.fields.property", Value: id.String()}}, + }}, + }, f) + return r.findOne(ctx, filter) +} + +func (r *layerRepo) FindParentByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { + filter := r.sceneFilter(bson.D{ + {Key: "group.layers", Value: id.String()}, + }, f) + return r.findGroupOne(ctx, filter) +} + +func (r *layerRepo) FindByScene(ctx context.Context, id id.SceneID) (layer.List, error) { + filter := bson.D{ + {Key: "scene", Value: id.String()}, + } + return r.find(ctx, nil, filter) +} + +func (r *layerRepo) Save(ctx context.Context, layer layer.Layer) error { + doc, id := mongodoc.NewLayer(layer) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *layerRepo) SaveAll(ctx context.Context, layers layer.List) error { + if layers == nil || len(layers) == 0 { + return nil + } + docs, ids := mongodoc.NewLayers(layers) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *layerRepo) Remove(ctx context.Context, id id.LayerID) error { + return r.client.RemoveOne(ctx, id.String()) +} + +func (r *layerRepo) RemoveAll(ctx context.Context, ids []id.LayerID) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, id.LayerIDToKeys(ids)) +} + +func (r *layerRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + } + _, err := r.client.Collection().DeleteMany(ctx, filter) + if err != nil { + return err1.ErrInternalBy(err) + } + return nil +} + +func (r *layerRepo) find(ctx context.Context, dst layer.List, filter bson.D) (layer.List, error) { + c := mongodoc.LayerConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *layerRepo) findOne(ctx context.Context, filter bson.D) (layer.Layer, error) { + c := mongodoc.LayerConsumer{} + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + if len(c.Rows) == 0 { + return nil, err1.ErrNotFound + } + return *c.Rows[0], nil +} + +func (r *layerRepo) findItemOne(ctx context.Context, filter bson.D) (*layer.Item, error) { + c := mongodoc.LayerConsumer{} + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + if len(c.ItemRows) == 0 { + return nil, err1.ErrNotFound + } + return c.ItemRows[0], nil +} + +func (r *layerRepo) findGroupOne(ctx context.Context, filter bson.D) (*layer.Group, error) { + c := mongodoc.LayerConsumer{} + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + if len(c.GroupRows) == 0 { + return nil, err1.ErrNotFound + } + return c.GroupRows[0], nil +} + +// func (r *layerRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (layer.List, *usecase.PageInfo, error) { +// var c mongodoc.LayerConsumer +// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) +// if err2 != nil { +// return nil, nil, err1.ErrInternalBy(err2) +// } +// return c.Rows, pageInfo, nil +// } + +func (r *layerRepo) findItems(ctx context.Context, dst layer.ItemList, filter bson.D) (layer.ItemList, error) { + c := mongodoc.LayerConsumer{ + ItemRows: dst, + } + if c.ItemRows != nil { + c.Rows = make(layer.List, 0, len(c.ItemRows)) + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.ItemRows, nil +} + +// func (r *layerRepo) paginateItems(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (layer.ItemList, *usecase.PageInfo, error) { +// var c mongodoc.LayerConsumer +// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) +// if err2 != nil { +// return nil, nil, err1.ErrInternalBy(err2) +// } +// return c.ItemRows, pageInfo, nil +// } + +func (r *layerRepo) findGroups(ctx context.Context, dst layer.GroupList, filter bson.D) (layer.GroupList, error) { + c := mongodoc.LayerConsumer{ + GroupRows: dst, + } + if c.GroupRows != nil { + c.Rows = make(layer.List, 0, len(c.GroupRows)) + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.GroupRows, nil +} + +// func (r *layerRepo) paginateGroups(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (layer.GroupList, *usecase.PageInfo, error) { +// var c mongodoc.LayerConsumer +// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) +// if err2 != nil { +// return nil, nil, err1.ErrInternalBy(err2) +// } +// return c.GroupRows, pageInfo, nil +// } + +func filterLayers(ids []id.LayerID, rows []*layer.Layer) []*layer.Layer { + res := make([]*layer.Layer, 0, len(ids)) + for _, id := range ids { + var r2 *layer.Layer + for _, r := range rows { + if r == nil { + continue + } + if r3 := *r; r3 != nil && r3.ID() == id { + r2 = &r3 + break + } + } + res = append(res, r2) + } + return res +} + +func filterLayerItems(ids []id.LayerID, rows []*layer.Item) []*layer.Item { + res := make([]*layer.Item, 0, len(ids)) + for _, id := range ids { + var r2 *layer.Item + for _, r := range rows { + if r != nil && r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func filterLayerGroups(ids []id.LayerID, rows []*layer.Group) []*layer.Group { + res := make([]*layer.Group, 0, len(ids)) + for _, id := range ids { + var r2 *layer.Group + for _, r := range rows { + if r != nil && r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (*layerRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { + if scenes == nil { + return filter + } + filter = append(filter, bson.E{ + Key: "scene", + Value: bson.D{{Key: "$in", Value: id.SceneIDToKeys(scenes)}}, + }) + return filter +} diff --git a/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go b/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go new file mode 100644 index 000000000..95cf2167a --- /dev/null +++ b/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go @@ -0,0 +1,46 @@ +package migration + +import ( + "context" + + "github.com/labstack/gommon/log" + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/pkg/id" + "go.mongodb.org/mongo-driver/bson" +) + +func AddSceneWidgetId(ctx context.Context, c DBClient) error { + col := c.WithCollection("scene") + + return col.Find(ctx, bson.D{}, &mongodoc.BatchConsumer{ + Size: 50, + Callback: func(rows []bson.Raw) error { + + ids := make([]string, 0, len(rows)) + newRows := make([]interface{}, 0, len(rows)) + + log.Infof("migration: AddSceneWidgetId: hit scenes: %d\n", len(rows)) + + for _, row := range rows { + var doc mongodoc.SceneDocument + if err := bson.Unmarshal(row, &doc); err != nil { + return err + } + + widgets := make([]mongodoc.SceneWidgetDocument, 0, len(doc.Widgets)) + for _, w := range doc.Widgets { + if w.ID == "" { + w.ID = id.New().String() + } + widgets = append(widgets, w) + } + doc.Widgets = widgets + + ids = append(ids, doc.ID) + newRows = append(newRows, doc) + } + + return col.SaveAll(ctx, ids, newRows) + }, + }) +} diff --git a/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go b/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go new file mode 100644 index 000000000..de2b0cd7b --- /dev/null +++ b/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go @@ -0,0 +1,68 @@ +package migration + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/visualizer" + "go.mongodb.org/mongo-driver/bson" +) + +var scenePropertySchema = builtin.MustPropertySchemaByVisualizer(visualizer.VisualizerCesium) + +func AddSceneDefaultTile(ctx context.Context, c DBClient) error { + col := c.WithCollection("property") + + psid := scenePropertySchema.ID().String() + filter := bson.M{ + "$or": bson.A{ + bson.M{"schema": psid, "items": bson.A{}}, + bson.M{"schema": psid, "items": bson.M{"$exists": false}}, + bson.M{ + "schema": psid, + "items": bson.M{ + "$not": bson.M{ + "$elemMatch": bson.M{ + "schemagroup": "tiles", + }, + }, + }, + }, + bson.M{ + "schema": psid, + "items": bson.M{ + "$elemMatch": bson.M{ + "schemagroup": "tiles", + "groups": bson.A{}, + }, + }, + }, + }, + } + + log.Infof("migration: AddSceneDefaultTile: filter: %+v\n", filter) + + return col.Find(ctx, filter, &mongodoc.PropertyBatchConsumer{ + Size: 50, + Callback: func(properties []*property.Property) error { + log.Infof("migration: AddSceneDefaultTile: hit properties: %d\n", len(properties)) + + for _, p := range properties { + g := p.GetOrCreateGroupList(scenePropertySchema, property.PointItemBySchema(id.PropertySchemaFieldID("tiles"))) + if g == nil || g.Count() > 0 { + continue + } + f := property.NewGroup().NewID().Schema(p.Schema(), id.PropertySchemaFieldID("tiles")).MustBuild() + g.Add(f, -1) + } + + docs, ids := mongodoc.NewProperties(properties) + + return col.SaveAll(ctx, ids, docs) + }, + }) +} diff --git a/internal/infrastructure/mongo/migration/210310145844_remove_preview_token.go b/internal/infrastructure/mongo/migration/210310145844_remove_preview_token.go new file mode 100644 index 000000000..ab1908d25 --- /dev/null +++ b/internal/infrastructure/mongo/migration/210310145844_remove_preview_token.go @@ -0,0 +1,43 @@ +package migration + +import ( + "context" + + "github.com/labstack/gommon/log" + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "go.mongodb.org/mongo-driver/bson" +) + +func RemovePreviewToken(ctx context.Context, c DBClient) error { + col := c.WithCollection("project") + + return col.Find(ctx, bson.D{}, &mongodoc.BatchConsumer{ + Size: 50, + Callback: func(rows []bson.Raw) error { + + ids := make([]string, 0, len(rows)) + newRows := make([]interface{}, 0, len(rows)) + + log.Infof("migration: RemoveProjectPreviewToken: hit projects: %d\n", len(rows)) + + for _, row := range rows { + doc := bson.M{} + if err := bson.Unmarshal(row, &doc); err != nil { + return err + } + + if doc["publishmentstatus"] == "limited" { + pt := doc["previewtoken"] + doc["alias"] = pt + } + delete(doc, "previewtoken") + + id := doc["id"].(string) + ids = append(ids, id) + newRows = append(newRows, doc) + } + + return col.SaveAll(ctx, ids, newRows) + }, + }) +} diff --git a/internal/infrastructure/mongo/migration/client.go b/internal/infrastructure/mongo/migration/client.go new file mode 100644 index 000000000..e716dce1b --- /dev/null +++ b/internal/infrastructure/mongo/migration/client.go @@ -0,0 +1,101 @@ +package migration + +import ( + "context" + "errors" + "fmt" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/pkg/config" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/log" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +var upsert = true + +type DBClient = *mongodoc.Client + +type MigrationFunc = func(context.Context, DBClient) error + +type Client struct { + Client *mongodoc.Client +} + +func (c Client) Migrate(ctx context.Context) error { + config, err := c.loadConfig(ctx) + if err != nil { + var ie *err1.ErrInternal + if ok := errors.As(err, &ie); ok { + err = ie.Unwrap() + } + return fmt.Errorf("Failed to load config: %w", err) + } + + nextMigrations := config.NextMigrations(migrationKeys()) + if len(nextMigrations) == 0 { + return nil + } + + for _, m := range nextMigrations { + log.Infof("DB migration: %d\n", m) + + if err := migrations[m](ctx, c.Client); err != nil { + var ie *err1.ErrInternal + if ok := errors.As(err, &ie); ok { + err = ie.Unwrap() + } + return fmt.Errorf("Failed to exec migration %d: %w", m, err) + } + + config.Migration = m + if err := c.saveConfig(ctx, config); err != nil { + var ie *err1.ErrInternal + if ok := errors.As(err, &ie); ok { + err = ie.Unwrap() + } + return fmt.Errorf("Failed to save config: %w", err) + } + } + + return nil +} + +func migrationKeys() []int64 { + keys := make([]int64, 0, len(migrations)) + for k := range migrations { + keys = append(keys, k) + } + return keys +} + +func (c *Client) loadConfig(ctx context.Context) (*config.Config, error) { + cfg := &config.Config{} + + if err := c.Client.Collection("config").FindOne(ctx, bson.D{}).Decode(cfg); err != nil { + if errors.Is(err, mongo.ErrNoDocuments) || errors.Is(err, mongo.ErrNilDocument) { + return cfg, nil + } + return nil, err + } + + return cfg, nil +} + +func (c *Client) saveConfig(ctx context.Context, cfg *config.Config) error { + if cfg == nil { + return nil + } + + if _, err := c.Client.Collection("config").UpdateOne(ctx, bson.D{}, bson.M{ + "$set": cfg, + }, &options.UpdateOptions{ + Upsert: &upsert, + }); err != nil { + return err1.ErrInternalBy(err) + } + + return nil +} diff --git a/internal/infrastructure/mongo/migration/migrations.go b/internal/infrastructure/mongo/migration/migrations.go new file mode 100644 index 000000000..7fafb0fd0 --- /dev/null +++ b/internal/infrastructure/mongo/migration/migrations.go @@ -0,0 +1,9 @@ +// Code generated by migrategen, DO NOT EDIT. + +package migration + +var migrations = map[int64]MigrationFunc{ + 201217132559: AddSceneWidgetId, + 201217193948: AddSceneDefaultTile, + 210310145844: RemovePreviewToken, +} diff --git a/internal/infrastructure/mongo/mongodoc/asset.go b/internal/infrastructure/mongo/mongodoc/asset.go new file mode 100644 index 000000000..511538ddc --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/asset.go @@ -0,0 +1,74 @@ +package mongodoc + +import ( + "time" + + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" + "go.mongodb.org/mongo-driver/bson" +) + +type AssetDocument struct { + ID string + CreatedAt time.Time + Team string + Name string + Size int64 + URL string + ContentType string +} + +type AssetConsumer struct { + Rows []*asset.Asset +} + +func (c *AssetConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc AssetDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + asset, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, asset) + return nil +} + +func NewAsset(asset *asset.Asset) (*AssetDocument, string) { + aid := asset.ID().String() + return &AssetDocument{ + ID: aid, + CreatedAt: asset.CreatedAt(), + Team: asset.Team().String(), + Name: asset.Name(), + Size: asset.Size(), + URL: asset.URL(), + ContentType: asset.ContentType(), + }, aid +} + +func (d *AssetDocument) Model() (*asset.Asset, error) { + aid, err := id.AssetIDFrom(d.ID) + if err != nil { + return nil, err + } + tid, err := id.TeamIDFrom(d.Team) + if err != nil { + return nil, err + } + + return asset.New(). + ID(aid). + CreatedAt(d.CreatedAt). + Team(tid). + Name(d.Name). + Size(d.Size). + URL(d.URL). + ContentType(d.ContentType). + Build() +} diff --git a/internal/infrastructure/mongo/mongodoc/client.go b/internal/infrastructure/mongo/mongodoc/client.go new file mode 100644 index 000000000..08d384f8c --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/client.go @@ -0,0 +1,333 @@ +package mongodoc + +import ( + "context" + "errors" + "fmt" + "io" + + "github.com/reearth/reearth-backend/internal/usecase" + err1 "github.com/reearth/reearth-backend/pkg/error" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +type Client struct { + database string + client *mongo.Client +} + +func NewClient(database string, c *mongo.Client) *Client { + return &Client{ + database: database, + client: c, + } +} + +func (c *Client) WithCollection(col string) *ClientCollection { + return &ClientCollection{ + Client: c, + CollectionName: col, + } +} + +func (c *Client) Collection(col string) *mongo.Collection { + return c.client.Database(c.database).Collection(col) +} + +func (c *Client) Find(ctx context.Context, col string, filter interface{}, consumer Consumer) error { + cursor, err := c.Collection(col).Find(ctx, filter) + if errors.Is(err, mongo.ErrNilDocument) || errors.Is(err, mongo.ErrNoDocuments) { + return err1.ErrNotFound + } + if err != nil { + return err1.ErrInternalBy(err) + } + defer func() { + _ = cursor.Close(ctx) + }() + + for { + c := cursor.Next(ctx) + if err := cursor.Err(); err != nil && !errors.Is(err, io.EOF) { + return err1.ErrInternalBy(err) + } + + if !c { + if err := consumer.Consume(nil); err != nil { + return err1.ErrInternalBy(err) + } + break + } + + if err := consumer.Consume(cursor.Current); err != nil { + return err1.ErrInternalBy(err) + } + } + return nil +} + +func (c *Client) FindOne(ctx context.Context, col string, filter interface{}, consumer Consumer) error { + raw, err := c.Collection(col).FindOne(ctx, filter).DecodeBytes() + if errors.Is(err, mongo.ErrNilDocument) || errors.Is(err, mongo.ErrNoDocuments) { + return err1.ErrNotFound + } + if err := consumer.Consume(raw); err != nil { + return err1.ErrInternalBy(err) + } + return nil +} + +func (c *Client) Count(ctx context.Context, col string, filter interface{}) (int64, error) { + count, err := c.Collection(col).CountDocuments(ctx, filter) + if err != nil { + return count, err1.ErrInternalBy(err) + } + return count, nil +} + +func (c *Client) RemoveAll(ctx context.Context, col string, ids []string) error { + if len(ids) == 0 { + return nil + } + filter := bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: ids}, + }}, + } + _, err := c.Collection(col).DeleteMany(ctx, filter) + if err != nil { + return err1.ErrInternalBy(err) + } + return nil +} + +func (c *Client) RemoveOne(ctx context.Context, col string, id string) error { + _, err := c.Collection(col).DeleteOne(ctx, bson.D{{Key: "id", Value: id}}) + if err != nil { + return err1.ErrInternalBy(err) + } + return nil +} + +var ( + upsert = true + replaceOption = &options.ReplaceOptions{ + Upsert: &upsert, + } +) + +func (c *Client) SaveOne(ctx context.Context, col string, id string, replacement interface{}) error { + _, err := c.Collection(col).ReplaceOne(ctx, bson.D{{Key: "id", Value: id}}, replacement, replaceOption) + if err != nil { + return err1.ErrInternalBy(err) + } + return nil +} + +func (c *Client) SaveAll(ctx context.Context, col string, ids []string, updates []interface{}) error { + if len(ids) == 0 || len(updates) == 0 { + return nil + } + if len(ids) != len(updates) { + return err1.ErrInternalBy(errors.New("invalid save args")) + } + + writeModels := make([]mongo.WriteModel, 0, len(updates)) + for i, u := range updates { + id := ids[i] + writeModels = append(writeModels, &mongo.ReplaceOneModel{ + Upsert: &upsert, + Filter: bson.D{{Key: "id", Value: id}}, + Replacement: u, + }) + } + + _, err := c.Collection(col).BulkWrite(ctx, writeModels) + if err != nil { + return err1.ErrInternalBy(err) + } + return nil +} + +func getCursor(raw bson.Raw, key string) (*usecase.Cursor, error) { + val, err := raw.LookupErr(key) + if err != nil { + return nil, fmt.Errorf("failed to lookup cursor: %v", err.Error()) + } + var s string + if err := val.Unmarshal(&s); err != nil { + return nil, fmt.Errorf("failed to unmarshal cursor: %v", err.Error()) + } + c := usecase.Cursor(s) + return &c, nil +} + +func (c *Client) Paginate(ctx context.Context, col string, filter interface{}, p *usecase.Pagination, consumer Consumer) (*usecase.PageInfo, error) { + if p == nil { + return nil, nil + } + coll := c.Collection(col) + + key := "id" + + count, err := coll.CountDocuments(ctx, filter) + if err != nil { + return nil, fmt.Errorf("failed to count documents: %v", err.Error()) + } + + reverse := false + var limit int64 + findOptions := options.Find() + if first := p.First; first != nil { + limit = int64(*first) + findOptions.Sort = bson.D{ + {Key: key, Value: 1}, + } + if after := p.After; after != nil { + filter = appendE(filter, bson.E{Key: key, Value: bson.D{ + {Key: "$gt", Value: *after}, + }}) + } + } + if last := p.Last; last != nil { + reverse = true + limit = int64(*last) + findOptions.Sort = bson.D{ + {Key: key, Value: -1}, + } + if before := p.Before; before != nil { + filter = appendE(filter, bson.E{Key: key, Value: bson.D{ + {Key: "$lt", Value: *before}, + }}) + } + } + // ๆ›ดใซ่ชญใ‚ใ‚‹่ฆ็ด ใŒใ‚ใ‚‹ใฎใ‹็ขบใ‹ใ‚ใ‚‹ใŸใ‚ใซไธ€ใคๅคšใ‚ใซ่ชญใฟๅ‡บใ™ + // Read one more element so that we can see whether there's a further one + limit++ + findOptions.Limit = &limit + + cursor, err := coll.Find(ctx, filter, findOptions) + if err != nil { + return nil, fmt.Errorf("failed to find: %v", err.Error()) + } + defer func() { + _ = cursor.Close(ctx) + }() + + results := make([]bson.Raw, 0, limit) + for cursor.Next(ctx) { + raw := make(bson.Raw, len(cursor.Current)) + copy(raw, cursor.Current) + results = append(results, raw) + } + if err := cursor.Err(); err != nil { + return nil, fmt.Errorf("failed to read cursor: %v", err.Error()) + } + + hasMore := false + if len(results) == int(limit) { + hasMore = true + // ไฝ™่จˆใซ1ใค่ชญใ‚“ใ ๅˆ†ใ‚’ๅ–ใ‚Š้™คใ + results = results[:len(results)-1] + } + + if reverse { + for i := len(results)/2 - 1; i >= 0; i-- { + opp := len(results) - 1 - i + results[i], results[opp] = results[opp], results[i] + } + } + + for _, result := range results { + if err := consumer.Consume(result); err != nil { + return nil, err + } + } + + var startCursor, endCursor *usecase.Cursor + if len(results) > 0 { + sc, err := getCursor(results[0], key) + if err != nil { + return nil, fmt.Errorf("failed to get start cursor: %v", err.Error()) + } + startCursor = sc + ec, err := getCursor(results[len(results)-1], key) + if err != nil { + return nil, fmt.Errorf("failed to get end cursor: %v", err.Error()) + } + endCursor = ec + } + + // ref: https://facebook.github.io/relay/graphql/connections.htm#sec-undefined.PageInfo.Fields + // firstใŒ่จญๅฎšใ•ใ‚Œใฆใ„ใ‚‹ๅ ดๅˆใงๅ‰ใฎpageใŒใ‚ใ‚‹ใ‹ใฉใ†ใ‹ใฎๅˆคๅฎšใฏๅŠน็Ž‡็š„ใซ่กŒใˆใ‚‹ๅ ดๅˆไปฅๅค–ใฏfalseใ‚’่ฟ”ใ—ใฆใ‚ˆใ„ + // lastใŒ่จญๅฎšใ•ใ‚Œใฆใ„ใ‚‹ๅ ดๅˆใงๆฌกใฎpageใŒใ‚ใ‚‹ใ‹ใฉใ†ใ‹ใฎๅˆคๅฎšใฏๅŠน็Ž‡็š„ใซ่กŒใˆใ‚‹ๅ ดๅˆไปฅๅค–ใฏfalseใ‚’่ฟ”ใ—ใฆใ‚ˆใ„ + // ๆ—ขๅญ˜ใฎๅฎŸ่ฃ…ใงใฏๅŠน็Ž‡็š„ใซๆฑ‚ใ‚ใ‚‹ใ“ใจใŒใงใใชใ„ใฎใง็ตถๅฏพใซfalseใ‚’่ฟ”ใ™ + // If first is set, false can be returned unless it can be efficiently determined whether or not a previous page exists. + // If last is set, false can be returned unless it can be efficiently determined whether or not a next page exists. + // Returning absolutely false because the existing implementation cannot determine it efficiently. + var hasNextPage, hasPreviousPage bool + switch { + case p.First != nil: + hasNextPage = hasMore + case p.Last != nil: + hasPreviousPage = hasMore + } + + return usecase.NewPageInfo(int(count), startCursor, endCursor, hasNextPage, hasPreviousPage), nil +} + +func (c *Client) CreateIndex(ctx context.Context, col string, keys []string) []string { + coll := c.Collection(col) + indexedKeys := indexes(ctx, coll) + + newIndexes := []mongo.IndexModel{} + for _, k := range append([]string{"id"}, keys...) { + if _, ok := indexedKeys[k]; !ok { + indexBg := true + unique := k == "id" + newIndexes = append(newIndexes, mongo.IndexModel{ + Keys: map[string]int{ + k: 1, + }, + Options: &options.IndexOptions{ + Background: &indexBg, + Unique: &unique, + }, + }) + } + } + + if len(newIndexes) > 0 { + index, err := coll.Indexes().CreateMany(ctx, newIndexes) + if err != nil { + panic(err) + } + return index + } + return nil +} + +func indexes(ctx context.Context, coll *mongo.Collection) map[string]struct{} { + c, err := coll.Indexes().List(ctx, nil) + if err != nil { + panic(err) + } + indexes := []struct{ Key map[string]int }{} + err = c.All(ctx, &indexes) + if err != nil { + panic(err) + } + keys := map[string]struct{}{} + for _, i := range indexes { + for k := range i.Key { + keys[k] = struct{}{} + } + } + return keys +} + +func (c *Client) Session() (mongo.Session, error) { + return c.client.StartSession() +} diff --git a/internal/infrastructure/mongo/mongodoc/clientcol.go b/internal/infrastructure/mongo/mongodoc/clientcol.go new file mode 100644 index 000000000..21916b4cf --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/clientcol.go @@ -0,0 +1,53 @@ +package mongodoc + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "go.mongodb.org/mongo-driver/mongo" +) + +type ClientCollection struct { + Client *Client + CollectionName string +} + +func (c *ClientCollection) Collection() *mongo.Collection { + return c.Client.Collection(c.CollectionName) +} + +func (c *ClientCollection) FindOne(ctx context.Context, filter interface{}, consumer Consumer) error { + return c.Client.FindOne(ctx, c.CollectionName, filter, consumer) +} + +func (c *ClientCollection) Find(ctx context.Context, filter interface{}, consumer Consumer) error { + return c.Client.Find(ctx, c.CollectionName, filter, consumer) +} + +func (c *ClientCollection) Count(ctx context.Context, filter interface{}) (int64, error) { + return c.Client.Count(ctx, c.CollectionName, filter) +} + +func (c *ClientCollection) Paginate(ctx context.Context, filter interface{}, p *usecase.Pagination, consumer Consumer) (*usecase.PageInfo, error) { + return c.Client.Paginate(ctx, c.CollectionName, filter, p, consumer) +} + +func (c *ClientCollection) SaveOne(ctx context.Context, id string, replacement interface{}) error { + return c.Client.SaveOne(ctx, c.CollectionName, id, replacement) +} + +func (c *ClientCollection) SaveAll(ctx context.Context, ids []string, updates []interface{}) error { + return c.Client.SaveAll(ctx, c.CollectionName, ids, updates) +} + +func (c *ClientCollection) RemoveOne(ctx context.Context, id string) error { + return c.Client.RemoveOne(ctx, c.CollectionName, id) +} + +func (c *ClientCollection) RemoveAll(ctx context.Context, ids []string) error { + return c.Client.RemoveAll(ctx, c.CollectionName, ids) +} + +func (c *ClientCollection) CreateIndex(ctx context.Context, keys []string) []string { + return c.Client.CreateIndex(ctx, c.CollectionName, keys) +} diff --git a/internal/infrastructure/mongo/mongodoc/consumer.go b/internal/infrastructure/mongo/mongodoc/consumer.go new file mode 100644 index 000000000..0550bad31 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/consumer.go @@ -0,0 +1,42 @@ +package mongodoc + +import "go.mongodb.org/mongo-driver/bson" + +type Consumer interface { + // Consume ใงๆธกใ•ใ‚ŒใŸrawใฎๅ‚็…งใ‚’ใƒ•ใ‚ฃใƒผใƒซใƒ‰ใซๆŒใฃใฆใฏใ„ใ‘ใพใ›ใ‚“ + // MUST NOT HAVE A ROW REFERENCE PASSED BY Consume METHOD IN THE FIELD + Consume(raw bson.Raw) error +} + +type FuncConsumer func(raw bson.Raw) error + +func (c FuncConsumer) Consume(raw bson.Raw) error { + return c(raw) +} + +type BatchConsumer struct { + Size int + Rows []bson.Raw + Callback func([]bson.Raw) error +} + +func (c *BatchConsumer) Consume(raw bson.Raw) error { + size := c.Size + if size == 0 { + size = 10 + } + + if raw != nil { + c.Rows = append(c.Rows, raw) + } + + if raw == nil || len(c.Rows) >= size { + err := c.Callback(c.Rows) + c.Rows = []bson.Raw{} + if err != nil { + return err + } + } + + return nil +} diff --git a/internal/infrastructure/mongo/mongodoc/consumer_test.go b/internal/infrastructure/mongo/mongodoc/consumer_test.go new file mode 100644 index 000000000..98c59b1f0 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/consumer_test.go @@ -0,0 +1,60 @@ +package mongodoc + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" + "go.mongodb.org/mongo-driver/bson" +) + +var _ Consumer = FuncConsumer(nil) + +func TestBatchConsumer(t *testing.T) { + c := &BatchConsumer{ + Size: 10, + Callback: func(r []bson.Raw) error { + assert.Equal(t, []bson.Raw{[]byte{0}, []byte{1}, []byte{2}, []byte{3}, []byte{4}}, r) + return nil + }, + } + + for i := 0; i < 5; i++ { + r := bson.Raw([]byte{byte(i)}) + assert.Nil(t, c.Consume(r)) + } + assert.Nil(t, c.Consume(nil)) +} + +func TestBatchConsumerWithManyRows(t *testing.T) { + counter := 0 + c := &BatchConsumer{ + Size: 1, + Callback: func(r []bson.Raw) error { + if counter >= 5 { + assert.Equal(t, []bson.Raw{}, r) + return nil + } + assert.Equal(t, []bson.Raw{[]byte{byte(counter)}}, r) + counter++ + return nil + }, + } + + for i := 0; i < 5; i++ { + r := bson.Raw([]byte{byte(i)}) + assert.Nil(t, c.Consume(r)) + } + assert.Nil(t, c.Consume(nil)) +} + +func TestBatchConsumerWithError(t *testing.T) { + c := &BatchConsumer{ + Size: 1, + Callback: func(r []bson.Raw) error { + return errors.New("hoge") + }, + } + + assert.EqualError(t, c.Consume(nil), "hoge") +} diff --git a/internal/infrastructure/mongo/mongodoc/dataset.go b/internal/infrastructure/mongo/mongodoc/dataset.go new file mode 100644 index 000000000..194e1ffc5 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/dataset.go @@ -0,0 +1,191 @@ +package mongodoc + +import ( + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +type DatasetFieldDocument struct { + Field string + Type string + Value interface{} + Source string +} + +type DatasetDocument struct { + ID string + Source string + Schema string + Fields []*DatasetFieldDocument + Scene string +} + +type DatasetExtendedDocument struct { + DatasetDocument + Graph []*DatasetExtendedDocument + Depth int +} + +type DatasetConsumer struct { + Rows []*dataset.Dataset +} + +func (c *DatasetConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc DatasetDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + dataset, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, dataset) + return nil +} + +type DatasetMapConsumer struct { + Map dataset.Map +} + +func (c *DatasetMapConsumer) Consume(raw bson.Raw) error { + var doc DatasetDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + m, err := doc.Model() + if err != nil { + return err + } + if c.Map == nil { + c.Map = map[id.DatasetID]*dataset.Dataset{ + m.ID(): m, + } + } else { + c.Map[m.ID()] = m + } + return nil +} + +type DatasetBatchConsumer struct { + Size int + Callback func([]*dataset.Dataset) error + consumer *BatchConsumer +} + +func (c *DatasetBatchConsumer) Consume(raw bson.Raw) error { + if c.consumer == nil { + c.consumer = &BatchConsumer{ + Size: c.Size, + Callback: func(rows []bson.Raw) error { + datasets := make([]*dataset.Dataset, 0, len(rows)) + + for _, r := range rows { + var doc DatasetDocument + if err := bson.Unmarshal(r, &doc); err != nil { + return err + } + dataset, err := doc.Model() + if err != nil { + return err + } + + datasets = append(datasets, dataset) + } + + return c.Callback(datasets) + }, + } + } + + return c.consumer.Consume(raw) +} + +func (doc *DatasetDocument) Model() (*dataset.Dataset, error) { + did, err := id.DatasetIDFrom(doc.ID) + if err != nil { + return nil, err + } + scene, err := id.SceneIDFrom(doc.Scene) + if err != nil { + return nil, err + } + ds, err := id.DatasetSchemaIDFrom(doc.Schema) + if err != nil { + return nil, err + } + fields := make([]*dataset.Field, 0, len(doc.Fields)) + for _, field := range doc.Fields { + fid, err := id.DatasetSchemaFieldIDFrom(field.Field) + if err != nil { + return nil, err + } + f := dataset.NewField( + fid, + toModelDatasetValue(field.Value, field.Type), + dataset.Source(field.Source), + ) + fields = append(fields, f) + } + return dataset.New(). + ID(did). + Source(dataset.Source(doc.Source)). + Fields(fields). + Schema(ds). + Scene(scene). + Build() +} + +func NewDataset(dataset *dataset.Dataset) (*DatasetDocument, string) { + did := dataset.ID().String() + var doc DatasetDocument + doc.ID = did + doc.Source = dataset.Source().String() + doc.Scene = id.ID(dataset.Scene()).String() + doc.Schema = id.ID(dataset.Schema()).String() + + fields := dataset.Fields() + doc.Fields = make([]*DatasetFieldDocument, 0, len(fields)) + for _, f := range fields { + doc.Fields = append(doc.Fields, &DatasetFieldDocument{ + Field: f.Field().String(), + Type: string(f.Type()), + Value: f.Value().Interface(), + Source: f.Source().String(), + }) + } + return &doc, did +} + +func NewDatasets(datasets []*dataset.Dataset) ([]interface{}, []string) { + res := make([]interface{}, 0, len(datasets)) + ids := make([]string, 0, len(datasets)) + for _, d := range datasets { + if d == nil { + continue + } + r, id := NewDataset(d) + res = append(res, r) + ids = append(ids, id) + } + return res, ids +} + +func toModelDatasetValue(v interface{}, t string) *dataset.Value { + if v == nil { + return nil + } + if v2, ok := v.(bson.D); ok { + v = v2.Map() + } + vt, ok := dataset.ValueTypeFrom(t) + if !ok { + return nil + } + return vt.ValueFrom(v) +} diff --git a/internal/infrastructure/mongo/mongodoc/dataset_schema.go b/internal/infrastructure/mongo/mongodoc/dataset_schema.go new file mode 100644 index 000000000..0ac53fdc2 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/dataset_schema.go @@ -0,0 +1,134 @@ +package mongodoc + +import ( + "errors" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +type DatasetSchemaDocument struct { + ID string + Source string + Name string + Fields []*DatasetSchemaFieldDocument + RepresentativeField *string + Scene string + Dynamic bool +} + +type DatasetSchemaFieldDocument struct { + ID string + Name string + Type string + Source string +} + +type DatasetSchemaConsumer struct { + Rows []*dataset.Schema +} + +func (c *DatasetSchemaConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc DatasetSchemaDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + dataset, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, dataset) + return nil +} + +func (d *DatasetSchemaDocument) Model() (*dataset.Schema, error) { + did, err := id.DatasetSchemaIDFrom(d.ID) + if err != nil { + return nil, err + } + scene, err := id.SceneIDFrom(d.Scene) + if err != nil { + return nil, err + } + + fields := make([]*dataset.SchemaField, 0, len(d.Fields)) + for _, field := range d.Fields { + fid, err := id.DatasetSchemaFieldIDFrom(field.ID) + if err != nil { + return nil, err + } + vt, ok := dataset.ValueType(field.Type).Validate() + if !ok { + return nil, errors.New("invalid value type") + } + f, err := dataset.NewSchemaField(). + Name(field.Name). + ID(fid). + Type(vt). + Source(dataset.Source(field.Source)). + Build() + if err != nil { + return nil, err + } + fields = append(fields, f) + } + b := dataset.NewSchema(). + ID(did). + Name(d.Name). + Source(dataset.Source(d.Source)). + Scene(scene). + Fields(fields) + if d.RepresentativeField != nil { + dsfid, err := id.DatasetSchemaFieldIDFrom(*d.RepresentativeField) + if err != nil { + return nil, err + } + b.RepresentativeField(dsfid) + } + return b.Build() +} + +func NewDatasetSchema(dataset *dataset.Schema) (*DatasetSchemaDocument, string) { + did := dataset.ID().String() + doc := DatasetSchemaDocument{ + ID: did, + Name: dataset.Name(), + Source: dataset.Source().String(), + Scene: id.ID(dataset.Scene()).String(), + RepresentativeField: dataset.RepresentativeFieldID().StringRef(), + Dynamic: dataset.Dynamic(), + } + + fields := dataset.Fields() + doc.Fields = make([]*DatasetSchemaFieldDocument, 0, len(fields)) + for _, f := range fields { + doc.Fields = append(doc.Fields, &DatasetSchemaFieldDocument{ + ID: f.ID().String(), + Type: string(f.Type()), + Name: f.Name(), + Source: f.Source().String(), + }) + } + + return &doc, did +} + +func NewDatasetSchemas(datasetSchemas []*dataset.Schema) ([]interface{}, []string) { + res := make([]interface{}, 0, len(datasetSchemas)) + ids := make([]string, 0, len(datasetSchemas)) + for _, d := range datasetSchemas { + if d == nil { + continue + } + r, id := NewDatasetSchema(d) + res = append(res, r) + ids = append(ids, id) + } + return res, ids +} diff --git a/internal/infrastructure/mongo/mongodoc/layer.go b/internal/infrastructure/mongo/mongodoc/layer.go new file mode 100644 index 000000000..bcc4cfdc5 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/layer.go @@ -0,0 +1,263 @@ +package mongodoc + +import ( + "errors" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" +) + +type LayerInfoboxFieldDocument struct { + ID string + Plugin string + Extension string + Property string +} + +type LayerInfoboxDocument struct { + Property string + Fields []LayerInfoboxFieldDocument +} + +type LayerItemDocument struct { + LinkedDataset *string +} + +type LayerGroupDocument struct { + Layers []string + LinkedDatasetSchema *string + Root bool +} + +type LayerDocument struct { + ID string + Name string + Visible bool + Scene string + Plugin *string + Extension *string + Property *string + Infobox *LayerInfoboxDocument + Item *LayerItemDocument + Group *LayerGroupDocument +} + +type LayerConsumer struct { + Rows []*layer.Layer + GroupRows []*layer.Group + ItemRows []*layer.Item +} + +func (c *LayerConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc LayerDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + li, lg, err := doc.Model() + if err != nil { + return err + } + if li != nil { + var layer layer.Layer = li + c.Rows = append(c.Rows, &layer) + c.ItemRows = append(c.ItemRows, li) + } + if lg != nil { + var layer layer.Layer = lg + c.Rows = append(c.Rows, &layer) + c.GroupRows = append(c.GroupRows, lg) + } + return nil +} + +func NewLayer(l layer.Layer) (*LayerDocument, string) { + var group *LayerGroupDocument + var item *LayerItemDocument + var infobox *LayerInfoboxDocument + + if lg := layer.GroupFromLayer(l); lg != nil { + group = &LayerGroupDocument{ + Layers: id.LayerIDToKeys(lg.Layers().Layers()), + LinkedDatasetSchema: lg.LinkedDatasetSchema().StringRef(), + Root: lg.IsRoot(), + } + } + + if li := layer.ItemFromLayer(l); li != nil { + item = &LayerItemDocument{ + LinkedDataset: li.LinkedDataset().StringRef(), + } + } + + if ib := l.Infobox(); ib != nil { + ibfields := ib.Fields() + fields := make([]LayerInfoboxFieldDocument, 0, len(ibfields)) + for _, f := range ibfields { + fields = append(fields, LayerInfoboxFieldDocument{ + ID: f.ID().String(), + Plugin: f.Plugin().String(), + Extension: string(f.Extension()), + Property: f.Property().String(), + }) + } + infobox = &LayerInfoboxDocument{ + Property: ib.Property().String(), + Fields: fields, + } + } + + id := l.ID().String() + return &LayerDocument{ + ID: id, + Name: l.Name(), + Visible: l.IsVisible(), + Scene: l.Scene().String(), + Infobox: infobox, + Group: group, + Item: item, + Plugin: l.Plugin().StringRef(), + Extension: l.Extension().StringRef(), + Property: l.Property().StringRef(), + }, id +} + +func NewLayers(layers layer.List) ([]interface{}, []string) { + res := make([]interface{}, 0, len(layers)) + ids := make([]string, 0, len(layers)) + for _, d := range layers { + if d == nil { + continue + } + r, id := NewLayer(*d) + res = append(res, r) + ids = append(ids, id) + } + return res, ids +} + +func (d *LayerDocument) Model() (*layer.Item, *layer.Group, error) { + if d.Item != nil { + li, err := d.ModelItem() + if err != nil { + return nil, nil, err + } + return li, nil, nil + } + if d.Group != nil { + lg, err := d.ModelGroup() + if err != nil { + return nil, nil, err + } + return nil, lg, nil + } + return nil, nil, errors.New("invalid layer") +} + +func (d *LayerDocument) ModelItem() (*layer.Item, error) { + lid, err := id.LayerIDFrom(d.ID) + if err != nil { + return nil, err + } + sid, err := id.SceneIDFrom(d.Scene) + if err != nil { + return nil, err + } + ib, err2 := ToModelInfobox(d.Infobox) + if err2 != nil { + return nil, err + } + + return layer.NewItem(). + ID(lid). + Name(d.Name). + IsVisible(d.Visible). + Plugin(id.PluginIDFromRef(d.Plugin)). + Extension(id.PluginExtensionIDFromRef(d.Extension)). + Property(id.PropertyIDFromRef(d.Property)). + Infobox(ib). + Scene(sid). + // item + LinkedDataset(id.DatasetIDFromRef(d.Item.LinkedDataset)). + Build() +} + +func (d *LayerDocument) ModelGroup() (*layer.Group, error) { + lid, err := id.LayerIDFrom(d.ID) + if err != nil { + return nil, err + } + sid, err := id.SceneIDFrom(d.Scene) + if err != nil { + return nil, err + } + ib, err2 := ToModelInfobox(d.Infobox) + if err2 != nil { + return nil, err2 + } + + ids := make([]id.LayerID, 0, len(d.Group.Layers)) + for _, lgid := range d.Group.Layers { + lid, err := id.LayerIDFrom(lgid) + if err != nil { + return nil, err + } + ids = append(ids, lid) + } + + return layer.NewGroup(). + ID(lid). + Name(d.Name). + IsVisible(d.Visible). + Plugin(id.PluginIDFromRef(d.Plugin)). + Extension(id.PluginExtensionIDFromRef(d.Extension)). + Property(id.PropertyIDFromRef(d.Property)). + Infobox(ib). + Scene(sid). + // group + Root(d.Group != nil && d.Group.Root). + Layers(layer.NewIDList(ids)). + LinkedDatasetSchema(id.DatasetSchemaIDFromRef(d.Group.LinkedDatasetSchema)). + Build() +} + +func ToModelInfobox(ib *LayerInfoboxDocument) (*layer.Infobox, error) { + if ib == nil { + return nil, nil + } + pid, err := id.PropertyIDFrom(ib.Property) + if err != nil { + return nil, err + } + fields := make([]*layer.InfoboxField, 0, len(ib.Fields)) + for _, f := range ib.Fields { + iid, err := id.InfoboxFieldIDFrom(f.ID) + if err != nil { + return nil, err + } + pid, err := id.PluginIDFrom(f.Plugin) + if err != nil { + return nil, err + } + prid, err := id.PropertyIDFrom(f.Property) + if err != nil { + return nil, err + } + ibf, err := layer.NewInfoboxField(). + ID(iid). + Plugin(pid). + Extension(id.PluginExtensionID(f.Extension)). + Property(prid). + Build() + if err != nil { + return nil, err + } + fields = append(fields, ibf) + } + return layer.NewInfobox(fields, pid), nil +} diff --git a/internal/infrastructure/mongo/mongodoc/plugin.go b/internal/infrastructure/mongo/mongodoc/plugin.go new file mode 100644 index 000000000..6af6a0cd3 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/plugin.go @@ -0,0 +1,114 @@ +package mongodoc + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "go.mongodb.org/mongo-driver/bson" +) + +type PluginExtensionDocument struct { + ID string + Type string + Name map[string]string + Description map[string]string + Icon string + Schema string + Visualizer string +} + +type PluginDocument struct { + ID string + Name map[string]string + Author string + Description map[string]string + RepositoryURL string + Deprecated bool + Public bool + Extensions []PluginExtensionDocument + Schema *string +} + +type PluginConsumer struct { + Rows []*plugin.Plugin +} + +func (c *PluginConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc PluginDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + plugin, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, plugin) + return nil +} + +func NewPlugin(plugin *plugin.Plugin) (*PluginDocument, string) { + extensions := plugin.Extensions() + extensionsDoc := make([]PluginExtensionDocument, 0, len(extensions)) + for _, e := range extensions { + extensionsDoc = append(extensionsDoc, PluginExtensionDocument{ + ID: string(e.ID()), + Type: string(e.Type()), + Name: e.Name(), + Description: e.Description(), + Icon: e.Icon(), + Schema: e.Schema().String(), + Visualizer: string(e.Visualizer()), + }) + } + + pid := plugin.ID().String() + return &PluginDocument{ + ID: pid, + Name: plugin.Name(), + Description: plugin.Description(), + Author: plugin.Author(), + RepositoryURL: plugin.RepositoryURL(), + Extensions: extensionsDoc, + Schema: plugin.Schema().StringRef(), + }, pid +} + +func (d *PluginDocument) Model() (*plugin.Plugin, error) { + pid, err := id.PluginIDFrom(d.ID) + if err != nil { + return nil, err + } + + extensions := make([]*plugin.Extension, 0, len(d.Extensions)) + for _, e := range d.Extensions { + psid, err := id.PropertySchemaIDFrom(e.Schema) + if err != nil { + return nil, err + } + extension, err := plugin.NewExtension(). + ID(id.PluginExtensionID(e.ID)). + Type(plugin.ExtensionType(e.Type)). + Name(d.Name). + Description(d.Description). + Icon(e.Icon). + Schema(psid). + Build() + if err != nil { + return nil, err + } + extensions = append(extensions, extension) + } + + return plugin.New(). + ID(pid). + Name(d.Name). + Description(d.Description). + Author(d.Author). + RepositoryURL(d.RepositoryURL). + Extensions(extensions). + Schema(id.PropertySchemaIDFromRef(d.Schema)). + Build() +} diff --git a/internal/infrastructure/mongo/mongodoc/project.go b/internal/infrastructure/mongo/mongodoc/project.go new file mode 100644 index 000000000..6e31f7d37 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/project.go @@ -0,0 +1,120 @@ +package mongodoc + +import ( + "net/url" + "time" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +type ProjectDocument struct { + ID string + Archived bool + UpdatedAt time.Time + PublishedAt time.Time + Name string + Description string + Alias string + ImageURL string + PublicTitle string + PublicDescription string + PublicImage string + PublicNoIndex bool + Domains []*ProjectDomainDocument + Team string + Visualizer string + PublishmentStatus string +} + +type ProjectDomainDocument struct { + Domain string + Ready bool +} + +type ProjectConsumer struct { + Rows []*project.Project +} + +func (c *ProjectConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc ProjectDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + project, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, project) + return nil +} + +func NewProject(project *project.Project) (*ProjectDocument, string) { + pid := project.ID().String() + + imageURL := "" + if u := project.ImageURL(); u != nil { + imageURL = u.String() + } + + return &ProjectDocument{ + ID: pid, + Archived: project.IsArchived(), + UpdatedAt: project.UpdatedAt(), + PublishedAt: project.PublishedAt(), + Name: project.Name(), + Description: project.Description(), + Alias: project.Alias(), + ImageURL: imageURL, + PublicTitle: project.PublicTitle(), + PublicDescription: project.PublicDescription(), + PublicImage: project.PublicImage(), + PublicNoIndex: project.PublicNoIndex(), + Team: project.Team().String(), + Visualizer: string(project.Visualizer()), + PublishmentStatus: string(project.PublishmentStatus()), + }, pid +} + +func (d *ProjectDocument) Model() (*project.Project, error) { + pid, err := id.ProjectIDFrom(d.ID) + if err != nil { + return nil, err + } + tid, err := id.TeamIDFrom(d.Team) + if err != nil { + return nil, err + } + + var imageURL *url.URL + if d.ImageURL != "" { + if imageURL, err = url.Parse(d.ImageURL); err != nil { + imageURL = nil + } + } + + return project.New(). + ID(pid). + IsArchived(d.Archived). + UpdatedAt(d.UpdatedAt). + PublishedAt(d.PublishedAt). + Name(d.Name). + Description(d.Description). + Alias(d.Alias). + ImageURL(imageURL). + PublicTitle(d.PublicTitle). + PublicDescription(d.PublicDescription). + PublicImage(d.PublicImage). + PublicNoIndex(d.PublicNoIndex). + Team(tid). + Visualizer(visualizer.Visualizer(d.Visualizer)). + PublishmentStatus(project.PublishmentStatus(d.PublishmentStatus)). + Build() +} diff --git a/internal/infrastructure/mongo/mongodoc/property.go b/internal/infrastructure/mongo/mongodoc/property.go new file mode 100644 index 000000000..16eb1c3b4 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/property.go @@ -0,0 +1,330 @@ +package mongodoc + +import ( + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +const ( + typePropertyItemGroup = "group" + typePropertyItemGroupList = "grouplist" +) + +type PropertyDocument struct { + ID string + Scene string + Schema string + Items []*PropertyItemDocument +} + +type PropertyFieldDocument struct { + Field string + Type string + Links []*PropertyLinkDocument + Value interface{} +} + +type PropertyLinkDocument struct { + Schema *string + Dataset *string + Field *string +} + +type PropertyItemDocument struct { + Type string + ID string + Schema string + SchemaGroup string + Groups []*PropertyItemDocument + Fields []*PropertyFieldDocument +} + +type PropertyConsumer struct { + Rows []*property.Property +} + +func (c *PropertyConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc PropertyDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + property, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, property) + return nil +} + +type PropertyBatchConsumer struct { + Size int + Callback func([]*property.Property) error + consumer *BatchConsumer +} + +func (c *PropertyBatchConsumer) Consume(raw bson.Raw) error { + if c.consumer == nil { + c.consumer = &BatchConsumer{ + Size: c.Size, + Callback: func(rows []bson.Raw) error { + properties := make([]*property.Property, 0, len(rows)) + + for _, r := range rows { + var doc PropertyDocument + if err := bson.Unmarshal(r, &doc); err != nil { + return err + } + property, err := doc.Model() + if err != nil { + return err + } + + properties = append(properties, property) + } + + return c.Callback(properties) + }, + } + } + + return c.consumer.Consume(raw) +} + +func newPropertyField(f *property.Field) *PropertyFieldDocument { + if f == nil { + return nil + } + + field := &PropertyFieldDocument{ + Field: string(f.Field()), + Type: string(f.Type()), + Value: f.Value().Interface(), + } + + if links := f.Links().Links(); links != nil { + field.Links = make([]*PropertyLinkDocument, 0, len(links)) + for _, l := range links { + field.Links = append(field.Links, &PropertyLinkDocument{ + Schema: l.DatasetSchema().StringRef(), + Dataset: l.Dataset().StringRef(), + Field: l.DatasetSchemaField().StringRef(), + }) + } + } + + return field +} + +func newPropertyItem(f property.Item) *PropertyItemDocument { + if f == nil { + return nil + } + + t := "" + var items []*PropertyItemDocument + var fields []*PropertyFieldDocument + + if g := property.ToGroup(f); g != nil { + t = typePropertyItemGroup + pfields := g.Fields() + fields = make([]*PropertyFieldDocument, 0, len(pfields)) + for _, r := range pfields { + fields = append(fields, newPropertyField(r)) + } + } else if g := property.ToGroupList(f); g != nil { + t = typePropertyItemGroupList + pgroups := g.Groups() + items = make([]*PropertyItemDocument, 0, len(pgroups)) + for _, r := range pgroups { + items = append(items, newPropertyItem(r)) + } + } + + return &PropertyItemDocument{ + Type: t, + ID: f.ID().String(), + Schema: f.Schema().String(), + SchemaGroup: string(f.SchemaGroup()), + Groups: items, + Fields: fields, + } +} + +func NewProperty(property *property.Property) (*PropertyDocument, string) { + if property == nil { + return nil, "" + } + + pid := property.ID().String() + items := property.Items() + doc := PropertyDocument{ + ID: pid, + Schema: property.Schema().String(), + Items: make([]*PropertyItemDocument, 0, len(items)), + Scene: property.Scene().String(), + } + for _, f := range items { + doc.Items = append(doc.Items, newPropertyItem(f)) + } + return &doc, pid +} + +func NewProperties(properties []*property.Property) ([]interface{}, []string) { + if properties == nil { + return nil, nil + } + + res := make([]interface{}, 0, len(properties)) + ids := make([]string, 0, len(properties)) + for _, d := range properties { + if d == nil { + continue + } + r, id := NewProperty(d) + res = append(res, r) + ids = append(ids, id) + } + return res, ids +} + +func toModelPropertyField(f *PropertyFieldDocument) *property.Field { + if f == nil { + return nil + } + + var flinks *property.Links + if f.Links != nil { + links := make([]*property.Link, 0, len(f.Links)) + for _, l := range f.Links { + var link *property.Link + d := id.DatasetIDFromRef(l.Dataset) + ds := id.DatasetSchemaIDFromRef(l.Schema) + df := id.DatasetSchemaFieldIDFromRef(l.Field) + if d != nil && ds != nil && df != nil { + link = property.NewLink(*d, *ds, *df) + } else if ds != nil && df != nil { + link = property.NewLinkFieldOnly(*ds, *df) + } else { + continue + } + links = append(links, link) + } + flinks = property.NewLinks(links) + } + + vt, _ := property.ValueTypeFrom(f.Type) + field := property.NewFieldUnsafe(). + FieldUnsafe(id.PropertySchemaFieldID(f.Field)). + TypeUnsafe(vt). + ValueUnsafe(toModelPropertyValue(f.Value, f.Type)). + LinksUnsafe(flinks). + Build() + + return field +} + +func toModelPropertyItem(f *PropertyItemDocument) (property.Item, error) { + if f == nil { + return nil, nil + } + + var i property.Item + var err error + var iid id.PropertyItemID + var sid id.PropertySchemaID + + iid, err = id.PropertyItemIDFrom(f.ID) + if err != nil { + return nil, err + } + sid, err = id.PropertySchemaIDFrom(f.Schema) + if err != nil { + return nil, err + } + gid := id.PropertySchemaFieldID(f.SchemaGroup) + + if f.Type == typePropertyItemGroup { + fields := make([]*property.Field, 0, len(f.Fields)) + for _, i := range f.Fields { + fields = append(fields, toModelPropertyField(i)) + } + + i, err = property.NewGroup(). + ID(iid). + Schema(sid, gid). + Fields(fields). + Build() + } else if f.Type == typePropertyItemGroupList { + items := make([]*property.Group, 0, len(f.Groups)) + for _, i := range f.Groups { + i2, err := toModelPropertyItem(i) + if err != nil { + return nil, err + } + if i3 := property.ToGroup(i2); i3 != nil { + items = append(items, i3) + } + } + + i, err = property.NewGroupList(). + ID(iid). + Schema(sid, gid). + Groups(items). + Build() + } + + return i, err +} + +func (doc *PropertyDocument) Model() (*property.Property, error) { + if doc == nil { + return nil, nil + } + + pid, err := id.PropertyIDFrom(doc.ID) + if err != nil { + return nil, err + } + sid, err := id.SceneIDFrom(doc.Scene) + if err != nil { + return nil, err + } + psid, err := id.PropertySchemaIDFrom(doc.Schema) + if err != nil { + return nil, err + } + + items := make([]property.Item, 0, len(doc.Items)) + for _, f := range doc.Items { + i, err := toModelPropertyItem(f) + if err != nil { + return nil, err + } + items = append(items, i) + } + + return property.New(). + ID(pid). + Scene(sid). + Schema(psid). + Items(items). + Build() +} + +func toModelPropertyValue(v interface{}, t string) *property.Value { + if v == nil { + return nil + } + v = convertDToM(v) + vt, ok := property.ValueTypeFrom(t) + if !ok { + return nil + } + return vt.ValueFromUnsafe(v) +} diff --git a/internal/infrastructure/mongo/mongodoc/property_schema.go b/internal/infrastructure/mongo/mongodoc/property_schema.go new file mode 100644 index 000000000..1a491bddd --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/property_schema.go @@ -0,0 +1,313 @@ +package mongodoc + +import ( + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type PropertySchemaDocument struct { + ID string + Version int + Groups []*PropertySchemaGroupDocument + LinkableFields *PropertyLinkableFieldsDocument +} + +type PropertySchemaFieldDocument struct { + ID string + Type string + Name map[string]string + Description map[string]string + Prefix string + Suffix string + DefaultValue interface{} + UI *string + Min *float64 + Max *float64 + Choices []PropertySchemaFieldChoiceDocument +} + +type PropertySchemaFieldChoiceDocument struct { + Key string + Label map[string]string +} + +type PropertyLinkableFieldsDocument struct { + LatLng *PropertyPointerDocument + URL *PropertyPointerDocument +} + +type PropertyPointerDocument struct { + SchemaGroupID *string + ItemID *string + FieldID *string +} + +type PropertyConditonDocument struct { + Field string + Type string + Value interface{} +} + +type PropertySchemaGroupDocument struct { + ID string + Fields []*PropertySchemaFieldDocument + List bool + IsAvailableIf *PropertyConditonDocument + Title map[string]string +} + +type PropertySchemaConsumer struct { + Rows []*property.Schema +} + +func (c *PropertySchemaConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc PropertySchemaDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + propertySchema, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, propertySchema) + return nil +} + +func NewPropertySchemaField(f *property.SchemaField) *PropertySchemaFieldDocument { + if f == nil { + return nil + } + + field := &PropertySchemaFieldDocument{ + ID: string(f.ID()), + Name: f.Title(), + Suffix: f.Suffix(), + Prefix: f.Prefix(), + Description: f.Description(), + Type: string(f.Type()), + DefaultValue: f.DefaultValue().Value(), + UI: f.UI().StringRef(), + Min: f.Min(), + Max: f.Max(), + } + if choices := f.Choices(); choices != nil { + field.Choices = make([]PropertySchemaFieldChoiceDocument, 0, len(choices)) + for _, c := range choices { + field.Choices = append(field.Choices, PropertySchemaFieldChoiceDocument{ + Key: c.Key, + Label: c.Title, + }) + } + } + return field +} + +func NewPropertySchema(m *property.Schema) (*PropertySchemaDocument, string) { + if m == nil { + return nil, "" + } + + pgroups := m.Groups() + groups := make([]*PropertySchemaGroupDocument, 0, len(pgroups)) + for _, f := range pgroups { + groups = append(groups, newPropertySchemaGroup(f)) + } + + id := m.ID().String() + return &PropertySchemaDocument{ + ID: id, + Version: m.Version(), + Groups: groups, + LinkableFields: ToDocPropertyLinkableFields(m.LinkableFields()), + }, id +} + +func NewPropertySchemas(ps []*property.Schema) ([]interface{}, []string) { + if ps == nil { + return nil, nil + } + + res := make([]interface{}, 0, len(ps)) + ids := make([]string, 0, len(ps)) + for _, d := range ps { + if d == nil { + continue + } + r, id := NewPropertySchema(d) + res = append(res, r) + ids = append(ids, id) + } + return res, ids +} + +func ToModelPropertySchemaField(f *PropertySchemaFieldDocument) (*property.SchemaField, error) { + if f == nil { + return nil, nil + } + + var choices []property.SchemaFieldChoice + if f.Choices != nil { + choices = make([]property.SchemaFieldChoice, 0, len(f.Choices)) + for _, c := range f.Choices { + choices = append(choices, property.SchemaFieldChoice{ + Key: c.Key, + Title: c.Label, + }) + } + } + + vt := property.ValueType(f.Type) + return property.NewSchemaField(). + ID(id.PropertySchemaFieldID(f.ID)). + Type(vt). + Name(f.Name). + Description(f.Description). + Prefix(f.Prefix). + Suffix(f.Suffix). + DefaultValue(vt.ValueFromUnsafe(f.DefaultValue)). + UIRef(property.SchemaFieldUIFromRef(f.UI)). + MinRef(f.Min). + MaxRef(f.Max). + Choices(choices). + Build() +} + +func (doc *PropertySchemaDocument) Model() (*property.Schema, error) { + if doc == nil { + return nil, nil + } + + pid, err := id.PropertySchemaIDFrom(doc.ID) + if err != nil { + return nil, err + } + + groups := make([]*property.SchemaGroup, 0, len(doc.Groups)) + for _, g := range doc.Groups { + g2, err := toModelPropertySchemaGroup(g, pid) + if err != nil { + return nil, err + } + groups = append(groups, g2) + } + + return property.NewSchema(). + ID(pid). + Version(doc.Version). + Groups(groups). + LinkableFields(toModelPropertyLinkableFields(doc.LinkableFields)). + Build() +} + +func newPropertyCondition(c *property.Condition) *PropertyConditonDocument { + if c == nil { + return nil + } + + return &PropertyConditonDocument{ + Field: string(c.Field), + Type: string(c.Value.Type()), + Value: c.Value.Interface(), + } +} + +func toModelPropertyCondition(d *PropertyConditonDocument) *property.Condition { + if d == nil { + return nil + } + + return &property.Condition{ + Field: id.PropertySchemaFieldID(d.Field), + Value: toModelPropertyValue(d.Value, d.Type), + } +} + +func newPropertySchemaGroup(p *property.SchemaGroup) *PropertySchemaGroupDocument { + if p == nil { + return nil + } + + pfields := p.Fields() + fields := make([]*PropertySchemaFieldDocument, 0, len(pfields)) + for _, f := range pfields { + fields = append(fields, NewPropertySchemaField(f)) + } + + return &PropertySchemaGroupDocument{ + ID: string(p.ID()), + List: p.IsList(), + IsAvailableIf: newPropertyCondition(p.IsAvailableIf()), + Title: p.Title(), + Fields: fields, + } +} + +func toModelPropertySchemaGroup(d *PropertySchemaGroupDocument, sid id.PropertySchemaID) (*property.SchemaGroup, error) { + if d == nil { + return nil, nil + } + + fields := make([]*property.SchemaField, 0, len(d.Fields)) + for _, f := range d.Fields { + field, err := ToModelPropertySchemaField(f) + if err != nil { + return nil, err + } + fields = append(fields, field) + } + + return property.NewSchemaGroup(). + ID(id.PropertySchemaFieldID(d.ID)). + Schema(sid). + IsList(d.List). + Title(d.Title). + IsAvailableIf(toModelPropertyCondition(d.IsAvailableIf)). + Fields(fields). + Build() +} + +func ToDocPropertyLinkableFields(l property.LinkableFields) *PropertyLinkableFieldsDocument { + return &PropertyLinkableFieldsDocument{ + LatLng: newDocPropertyPointer(l.LatLng), + URL: newDocPropertyPointer(l.URL), + } +} + +func toModelPropertyLinkableFields(l *PropertyLinkableFieldsDocument) property.LinkableFields { + if l == nil { + return property.LinkableFields{} + } + return property.LinkableFields{ + LatLng: toModelPropertyPointer(l.LatLng), + URL: toModelPropertyPointer(l.URL), + } +} + +func toModelPropertyPointer(p *PropertyPointerDocument) *property.Pointer { + if p == nil { + return nil + } + return property.NewPointer( + id.PropertySchemaFieldIDFrom(p.SchemaGroupID), + id.PropertyItemIDFromRef(p.ItemID), + id.PropertySchemaFieldIDFrom(p.FieldID), + ) +} + +func newDocPropertyPointer(p *property.Pointer) *PropertyPointerDocument { + if p == nil { + return nil + } + schemaGroupID, itemID, fieldID := p.GetAll() + return &PropertyPointerDocument{ + SchemaGroupID: schemaGroupID.StringRef(), + ItemID: itemID.StringRef(), + FieldID: fieldID.StringRef(), + } +} diff --git a/internal/infrastructure/mongo/mongodoc/scene.go b/internal/infrastructure/mongo/mongodoc/scene.go new file mode 100644 index 000000000..5c8f9e638 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/scene.go @@ -0,0 +1,230 @@ +package mongodoc + +import ( + "errors" + "time" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type SceneWidgetDocument struct { + ID string + Plugin string + Extension string + Property string + Enabled bool +} + +type ScenePluginDocument struct { + Plugin string + Property *string +} + +type SceneDocument struct { + ID string + Project string + Team string + RootLayer string + Widgets []SceneWidgetDocument + Plugins []ScenePluginDocument + UpdateAt time.Time + Property string +} + +type SceneConsumer struct { + Rows []*scene.Scene +} + +func (c *SceneConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc SceneDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + scene, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, scene) + return nil +} + +type SceneIDDocument struct { + ID string +} + +type SceneIDConsumer struct { + Rows []id.SceneID +} + +func (c *SceneIDConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc SceneIDDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + scene, err := id.SceneIDFrom(doc.ID) + if err != nil { + return err + } + c.Rows = append(c.Rows, scene) + return nil +} + +func NewScene(scene *scene.Scene) (*SceneDocument, string) { + widgets := scene.WidgetSystem().Widgets() + plugins := scene.PluginSystem().Plugins() + + widgetsDoc := make([]SceneWidgetDocument, 0, len(widgets)) + pluginsDoc := make([]ScenePluginDocument, 0, len(plugins)) + + for _, w := range widgets { + widgetsDoc = append(widgetsDoc, SceneWidgetDocument{ + ID: w.ID().String(), + Plugin: w.Plugin().String(), + Extension: string(w.Extension()), + Property: w.Property().String(), + Enabled: w.Enabled(), + }) + } + + for _, sp := range plugins { + pluginsDoc = append(pluginsDoc, ScenePluginDocument{ + Plugin: sp.Plugin().String(), + Property: sp.Property().StringRef(), + }) + } + + id := scene.ID().String() + return &SceneDocument{ + ID: id, + Project: scene.Project().String(), + Team: scene.Team().String(), + RootLayer: scene.RootLayer().String(), + Widgets: widgetsDoc, + Plugins: pluginsDoc, + UpdateAt: scene.UpdatedAt(), + Property: scene.Property().String(), + }, id +} + +func (d *SceneDocument) Model() (*scene.Scene, error) { + sid, err := id.SceneIDFrom(d.ID) + if err != nil { + return nil, err + } + projectID, err := id.ProjectIDFrom(d.Project) + if err != nil { + return nil, err + } + prid, err := id.PropertyIDFrom(d.Property) + if err != nil { + return nil, err + } + tid, err := id.TeamIDFrom(d.Team) + if err != nil { + return nil, err + } + lid, err := id.LayerIDFrom(d.RootLayer) + if err != nil { + return nil, err + } + + ws := make([]*scene.Widget, 0, len(d.Widgets)) + ps := make([]*scene.Plugin, 0, len(d.Plugins)) + + for _, w := range d.Widgets { + pid, err := id.PluginIDFrom(w.Plugin) + if err != nil { + return nil, err + } + prid, err := id.PropertyIDFrom(w.Property) + if err != nil { + return nil, err + } + sw, err := scene.NewWidget( + id.WidgetIDFromRef(&w.ID), + pid, + id.PluginExtensionID(w.Extension), + prid, + w.Enabled, + ) + if err != nil { + return nil, err + } + ws = append(ws, sw) + } + + for _, p := range d.Plugins { + pid, err := id.PluginIDFrom(p.Plugin) + if err != nil { + return nil, err + } + ps = append(ps, scene.NewPlugin(pid, id.PropertyIDFromRef(p.Property))) + } + + return scene.New(). + ID(sid). + Project(projectID). + Team(tid). + RootLayer(lid). + WidgetSystem(scene.NewWidgetSystem(ws)). + PluginSystem(scene.NewPluginSystem(ps)). + UpdatedAt(d.UpdateAt). + Property(prid). + Build() +} + +type SceneLockConsumer struct { + Rows []scene.LockMode +} + +type SceneLockDocument struct { + Scene string + Lock string +} + +func (c *SceneLockConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc SceneLockDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + _, sceneLock, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, sceneLock) + return nil +} + +func NewSceneLock(sceneID id.SceneID, lock scene.LockMode) *SceneLockDocument { + return &SceneLockDocument{ + Scene: sceneID.String(), + Lock: string(lock), + } +} + +func (d *SceneLockDocument) Model() (id.SceneID, scene.LockMode, error) { + sceneID, err := id.SceneIDFrom(d.Scene) + if err != nil { + return sceneID, scene.LockMode(""), err + } + sceneLock, ok := scene.LockMode(d.Lock).Validate() + if !ok { + return sceneID, sceneLock, errors.New("invalid scene lock mode") + } + return sceneID, sceneLock, nil +} diff --git a/internal/infrastructure/mongo/mongodoc/team.go b/internal/infrastructure/mongo/mongodoc/team.go new file mode 100644 index 000000000..261d6188f --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/team.go @@ -0,0 +1,93 @@ +package mongodoc + +import ( + "github.com/reearth/reearth-backend/pkg/id" + user1 "github.com/reearth/reearth-backend/pkg/user" + "go.mongodb.org/mongo-driver/bson" +) + +type TeamMemberDocument struct { + Role string +} + +type TeamDocument struct { + ID string + Name string + Members map[string]TeamMemberDocument + Personal bool +} + +type TeamConsumer struct { + Rows []*user1.Team +} + +func (c *TeamConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc TeamDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + project, err := doc.Model() + if err != nil { + return err + } + c.Rows = append(c.Rows, project) + return nil +} + +func NewTeam(team *user1.Team) (*TeamDocument, string) { + membersDoc := map[string]TeamMemberDocument{} + for user, r := range team.Members().Members() { + membersDoc[user.String()] = TeamMemberDocument{ + Role: string(r), + } + } + id := team.ID().String() + return &TeamDocument{ + ID: id, + Name: team.Name(), + Members: membersDoc, + Personal: team.IsPersonal(), + }, id +} + +func (d *TeamDocument) Model() (*user1.Team, error) { + tid, err := id.TeamIDFrom(d.ID) + if err != nil { + return nil, err + } + + members := map[id.UserID]user1.Role{} + if d.Members != nil { + for user, member := range d.Members { + uid, err := id.UserIDFrom(user) + if err != nil { + return nil, err + } + members[uid] = user1.Role(member.Role) + } + } + return user1.NewTeam(). + ID(tid). + Name(d.Name). + Members(members). + Personal(d.Personal). + Build() +} + +func NewTeams(teams []*user1.Team) ([]interface{}, []string) { + res := make([]interface{}, 0, len(teams)) + ids := make([]string, 0, len(teams)) + for _, d := range teams { + if d == nil { + continue + } + r, id := NewTeam(d) + res = append(res, r) + ids = append(ids, id) + } + return res, ids +} diff --git a/internal/infrastructure/mongo/mongodoc/user.go b/internal/infrastructure/mongo/mongodoc/user.go new file mode 100644 index 000000000..de3030c23 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/user.go @@ -0,0 +1,91 @@ +package mongodoc + +import ( + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" + user1 "github.com/reearth/reearth-backend/pkg/user" +) + +type UserDocument struct { + ID string + Name string + Email string + Auth0Sub string + Auth0SubList []string + Team string + Lang string + Theme string +} + +type UserConsumer struct { + Rows []*user1.User +} + +func (u *UserConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc UserDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + user, err := doc.Model() + if err != nil { + return err + } + u.Rows = append(u.Rows, user) + return nil +} + +func NewUser(user *user1.User) (*UserDocument, string) { + id := user.ID().String() + auths := user.Auths() + authsdoc := make([]string, 0, len(auths)) + for _, a := range auths { + authsdoc = append(authsdoc, a.Sub) + } + + return &UserDocument{ + ID: id, + Name: user.Name(), + Email: user.Email(), + Auth0SubList: authsdoc, + Team: user.Team().String(), + Lang: user.Lang().String(), + Theme: string(user.Theme()), + }, id +} + +func (d *UserDocument) Model() (*user1.User, error) { + uid, err := id.UserIDFrom(d.ID) + if err != nil { + return nil, err + } + tid, err := id.TeamIDFrom(d.Team) + if err != nil { + return nil, err + } + auths := make([]user.Auth, 0, len(d.Auth0SubList)) + for _, s := range d.Auth0SubList { + auths = append(auths, user.AuthFromAuth0Sub(s)) + } + if d.Auth0Sub != "" { + auths = append(auths, user.AuthFromAuth0Sub(d.Auth0Sub)) + } + user, err := user1.New(). + ID(uid). + Name(d.Name). + Email(d.Email). + Auths(auths). + Team(tid). + LangFrom(d.Lang). + Theme(user.Theme(d.Theme)). + Build() + if err != nil { + return nil, err + } + return user, nil +} diff --git a/internal/infrastructure/mongo/mongodoc/util.go b/internal/infrastructure/mongo/mongodoc/util.go new file mode 100644 index 000000000..0b1051762 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/util.go @@ -0,0 +1,33 @@ +package mongodoc + +import "go.mongodb.org/mongo-driver/bson" + +func convertDToM(i interface{}) interface{} { + switch i2 := i.(type) { + case bson.D: + return i2.Map() + case bson.A: + a := make([]interface{}, 0, len(i2)) + for _, e := range i2 { + a = append(a, convertDToM(e)) + } + return a + } + return i +} + +func appendE(f interface{}, elements ...bson.E) interface{} { + switch f2 := f.(type) { + case bson.D: + for _, e := range elements { + f2 = append(f2, e) + } + return f2 + case bson.M: + for _, e := range elements { + f2[e.Key] = e.Value + } + return f2 + } + return f +} diff --git a/internal/infrastructure/mongo/plugin.go b/internal/infrastructure/mongo/plugin.go new file mode 100644 index 000000000..86558212e --- /dev/null +++ b/internal/infrastructure/mongo/plugin.go @@ -0,0 +1,146 @@ +package mongo + +import ( + "context" + "errors" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +type pluginRepo struct { + client *mongodoc.ClientCollection +} + +func NewPlugin(client *mongodoc.Client) repo.Plugin { + r := &pluginRepo{client: client.WithCollection("plugin")} + r.init() + return r +} + +func (r *pluginRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "plugin", i) + } +} + +func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { + if p := builtin.GetPlugin(id); p != nil { + return p, nil + } + filter := bson.D{ + {Key: "id", Value: id.String()}, + } + return r.findOne(ctx, filter) +} + +func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { + // exclude built-in + b := map[string]*plugin.Plugin{} + ids2 := make([]id.PluginID, 0, len(ids)) + for _, id := range ids { + if p := builtin.GetPlugin(id); p != nil { + b[id.String()] = p + } else { + ids2 = append(ids2, id) + } + } + + res := make([]*plugin.Plugin, 0, len(ids2)) + var err error + + if len(ids2) > 0 { + filter := bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: id.PluginIDToKeys(ids2)}, + }}, + } + dst := make([]*plugin.Plugin, 0, len(ids2)) + res, err = r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + } + + // combine built-in and mongo results + results := make([]*plugin.Plugin, 0, len(ids)) + for _, id := range ids { + if p, ok := b[id.String()]; ok { + results = append(results, p) + continue + } + found := false + for _, p := range res { + if p != nil && p.ID().Equal(id) { + results = append(results, p) + found = true + break + } + } + if !found { + results = append(results, nil) + } + } + + return filterPlugins(ids, results), nil +} + +func (r *pluginRepo) Save(ctx context.Context, plugin *plugin.Plugin) error { + if plugin.ID().System() { + return errors.New("cannnot save system plugin") + } + doc, id := mongodoc.NewPlugin(plugin) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *pluginRepo) find(ctx context.Context, dst []*plugin.Plugin, filter bson.D) ([]*plugin.Plugin, error) { + c := mongodoc.PluginConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *pluginRepo) findOne(ctx context.Context, filter bson.D) (*plugin.Plugin, error) { + dst := make([]*plugin.Plugin, 0, 1) + c := mongodoc.PluginConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +// func (r *pluginRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) ([]*plugin.Plugin, *usecase.PageInfo, error) { +// var c mongodoc.PluginConsumer +// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) +// if err2 != nil { +// return nil, nil, err1.ErrInternalBy(err2) +// } +// return c.Rows, pageInfo, nil +// } + +func filterPlugins(ids []id.PluginID, rows []*plugin.Plugin) []*plugin.Plugin { + res := make([]*plugin.Plugin, 0, len(ids)) + for _, id := range ids { + var r2 *plugin.Plugin + for _, r := range rows { + if r.ID().Equal(id) { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} diff --git a/internal/infrastructure/mongo/project.go b/internal/infrastructure/mongo/project.go new file mode 100644 index 000000000..51aa63230 --- /dev/null +++ b/internal/infrastructure/mongo/project.go @@ -0,0 +1,149 @@ +package mongo + +import ( + "context" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/project" +) + +type projectRepo struct { + client *mongodoc.ClientCollection +} + +func NewProject(client *mongodoc.Client) repo.Project { + r := &projectRepo{client: client.WithCollection("project")} + r.init() + return r +} + +func (r *projectRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "project", i) + } +} + +func (r *projectRepo) FindByIDs(ctx context.Context, ids []id.ProjectID, f []id.TeamID) ([]*project.Project, error) { + filter := r.teamFilter(bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: id.ProjectIDToKeys(ids)}, + }}, + }, f) + dst := make([]*project.Project, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterProjects(ids, res), nil +} + +func (r *projectRepo) FindByID(ctx context.Context, id id.ProjectID, f []id.TeamID) (*project.Project, error) { + filter := r.teamFilter(bson.D{ + {Key: "id", Value: id.String()}, + }, f) + return r.findOne(ctx, filter) +} + +func (r *projectRepo) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { + filter := bson.D{ + {Key: "team", Value: id.String()}, + } + return r.paginate(ctx, filter, pagination) +} + +func (r *projectRepo) FindByPublicName(ctx context.Context, name string) (*project.Project, error) { + var filter bson.D + + if name == "" { + return nil, nil + } + + filter = bson.D{ + {Key: "$or", Value: []bson.D{ + {{Key: "alias", Value: name}, {Key: "publishmentstatus", Value: "limited"}}, + {{Key: "domains.domain", Value: name}, {Key: "publishmentstatus", Value: "public"}}, + {{Key: "alias", Value: name}, {Key: "publishmentstatus", Value: "public"}}, + }}, + } + return r.findOne(ctx, filter) +} + +func (r *projectRepo) CountByTeam(ctx context.Context, team id.TeamID) (int, error) { + count, err := r.client.Count(ctx, bson.D{ + {Key: "team", Value: team.String()}, + }) + return int(count), err +} + +func (r *projectRepo) Save(ctx context.Context, project *project.Project) error { + doc, id := mongodoc.NewProject(project) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *projectRepo) Remove(ctx context.Context, id id.ProjectID) error { + return r.client.RemoveOne(ctx, id.String()) +} + +func (r *projectRepo) find(ctx context.Context, dst []*project.Project, filter bson.D) ([]*project.Project, error) { + c := mongodoc.ProjectConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *projectRepo) findOne(ctx context.Context, filter bson.D) (*project.Project, error) { + dst := make([]*project.Project, 0, 1) + c := mongodoc.ProjectConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func (r *projectRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { + var c mongodoc.ProjectConsumer + pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) + if err2 != nil { + return nil, nil, err1.ErrInternalBy(err2) + } + return c.Rows, pageInfo, nil +} + +func filterProjects(ids []id.ProjectID, rows []*project.Project) []*project.Project { + res := make([]*project.Project, 0, len(ids)) + for _, id := range ids { + var r2 *project.Project + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (*projectRepo) teamFilter(filter bson.D, teams []id.TeamID) bson.D { + if teams == nil { + return filter + } + filter = append(filter, bson.E{ + Key: "team", + Value: bson.D{{Key: "$in", Value: id.TeamIDToKeys(teams)}}, + }) + return filter +} diff --git a/internal/infrastructure/mongo/property.go b/internal/infrastructure/mongo/property.go new file mode 100644 index 000000000..d5d4e4735 --- /dev/null +++ b/internal/infrastructure/mongo/property.go @@ -0,0 +1,170 @@ +package mongo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/property" + "go.mongodb.org/mongo-driver/bson" +) + +type propertyRepo struct { + client *mongodoc.ClientCollection +} + +func NewProperty(client *mongodoc.Client) repo.Property { + r := &propertyRepo{client: client.WithCollection("property")} + r.init() + return r +} + +func (r *propertyRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "property", i) + } +} + +func (r *propertyRepo) FindByID(ctx context.Context, id2 id.PropertyID, f []id.SceneID) (*property.Property, error) { + filter := r.sceneFilter(bson.D{{Key: "id", Value: id.ID(id2).String()}}, f) + return r.findOne(ctx, filter) +} + +func (r *propertyRepo) FindByIDs(ctx context.Context, ids []id.PropertyID, f []id.SceneID) (property.List, error) { + filter := r.sceneFilter(bson.D{{Key: "id", Value: bson.D{{ + Key: "$in", Value: id.PropertyIDToKeys(ids), + }}}}, f) + dst := make(property.List, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterProperties(ids, res), nil +} + +func (r *propertyRepo) FindLinkedAll(ctx context.Context, id id.SceneID) (property.List, error) { + filter := bson.D{ + {Key: "scene", Value: id.String()}, + {Key: "fields", Value: bson.D{ + {Key: "$elemMatch", Value: bson.D{ + {Key: "links", Value: bson.D{{ + Key: "$not", Value: bson.D{{ + Key: "$size", Value: 0, + }}}, + }}, + }}, + }}, + } + return r.find(ctx, nil, filter) +} + +func (r *propertyRepo) FindByDataset(ctx context.Context, sid id.DatasetSchemaID, did id.DatasetID) (property.List, error) { + sids := sid.String() + pids := did.String() + filter := bson.D{ + {Key: "$or", Value: []bson.D{ + {{Key: "fields.links.dataset", Value: pids}}, // for compatibility + {{Key: "items.fields.links.dataset", Value: pids}}, + {{Key: "items.groups.fields.links.dataset", Value: pids}}, + {{Key: "fields.links.schema", Value: sids}}, // for compatibility + {{Key: "items.fields.links.schema", Value: sids}}, + {{Key: "items.groups.fields.links.schema", Value: sids}}, + }}, + } + return r.find(ctx, nil, filter) +} + +func (r *propertyRepo) Save(ctx context.Context, property *property.Property) error { + doc, id := mongodoc.NewProperty(property) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *propertyRepo) SaveAll(ctx context.Context, properties property.List) error { + if len(properties) == 0 { + return nil + } + docs, ids := mongodoc.NewProperties(properties) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *propertyRepo) Remove(ctx context.Context, id id.PropertyID) error { + return r.client.RemoveOne(ctx, id.String()) +} + +func (r *propertyRepo) RemoveAll(ctx context.Context, ids []id.PropertyID) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, id.PropertyIDToKeys(ids)) +} + +func (r *propertyRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + } + _, err := r.client.Collection().DeleteMany(ctx, filter) + if err != nil { + return err1.ErrInternalBy(err) + } + return nil +} + +func (r *propertyRepo) find(ctx context.Context, dst property.List, filter bson.D) (property.List, error) { + c := mongodoc.PropertyConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *propertyRepo) findOne(ctx context.Context, filter bson.D) (*property.Property, error) { + dst := make(property.List, 0, 1) + c := mongodoc.PropertyConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +// func (r *propertyRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (property.List, *usecase.PageInfo, error) { +// var c propertyConsumer +// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) +// if err2 != nil { +// return nil, nil, err1.ErrInternalBy(err2) +// } +// return c.rows, pageInfo, nil +// } + +func filterProperties(ids []id.PropertyID, rows property.List) property.List { + res := make(property.List, 0, len(ids)) + for _, id := range ids { + var r2 *property.Property + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (*propertyRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { + if scenes == nil { + return filter + } + filter = append(filter, bson.E{ + Key: "scene", + Value: bson.D{{Key: "$in", Value: id.SceneIDToKeys(scenes)}}, + }) + return filter +} diff --git a/internal/infrastructure/mongo/property_schema.go b/internal/infrastructure/mongo/property_schema.go new file mode 100644 index 000000000..9b079181c --- /dev/null +++ b/internal/infrastructure/mongo/property_schema.go @@ -0,0 +1,153 @@ +package mongo + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/property" + "go.mongodb.org/mongo-driver/bson" +) + +type propertySchemaRepo struct { + client *mongodoc.ClientCollection +} + +func NewPropertySchema(client *mongodoc.Client) repo.PropertySchema { + r := &propertySchemaRepo{client: client.WithCollection("propertySchema")} + r.init() + return r +} + +func (r *propertySchemaRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "propertySchema", i) + } +} + +func (r *propertySchemaRepo) FindByID(ctx context.Context, id id.PropertySchemaID) (*property.Schema, error) { + if ps := builtin.GetPropertySchema(id); ps != nil { + return ps, nil + } + + filter := bson.D{{Key: "id", Value: id.String()}} + return r.findOne(ctx, filter) +} + +func (r *propertySchemaRepo) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { + // exclude built-in + b := map[string]*property.Schema{} + ids2 := make([]id.PropertySchemaID, 0, len(ids)) + for _, id := range ids { + if p := builtin.GetPropertySchema(id); p != nil { + b[id.String()] = p + } else { + ids2 = append(ids2, id) + } + } + + res := make(property.SchemaList, 0, len(ids2)) + var err error + + if len(ids2) > 0 { + filter := bson.D{{Key: "id", Value: bson.D{{ + Key: "$in", Value: id.PropertySchemaIDToKeys(ids), + }}}} + dst := make(property.SchemaList, 0, len(ids)) + res, err = r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + } + + // combine built-in and mongo results + results := make(property.SchemaList, 0, len(ids)) + for _, id := range ids { + if p, ok := b[id.String()]; ok { + results = append(results, p) + continue + } + found := false + for _, p := range res { + if p != nil && p.ID() == id { + results = append(results, p) + found = true + break + } + } + if !found { + results = append(results, nil) + } + } + + return filterPropertySchemas(ids, results), nil +} + +func (r *propertySchemaRepo) Save(ctx context.Context, m *property.Schema) error { + if m.ID().System() { + return errors.New("cannnot save system property schema") + } + + doc, id := mongodoc.NewPropertySchema(m) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *propertySchemaRepo) SaveAll(ctx context.Context, m property.SchemaList) error { + for _, ps := range m { + if ps.ID().System() { + return errors.New("cannnot save system property schema") + } + } + + if len(m) == 0 { + return nil + } + + docs, ids := mongodoc.NewPropertySchemas(m) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *propertySchemaRepo) find(ctx context.Context, dst property.SchemaList, filter bson.D) (property.SchemaList, error) { + c := mongodoc.PropertySchemaConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *propertySchemaRepo) findOne(ctx context.Context, filter bson.D) (*property.Schema, error) { + dst := make(property.SchemaList, 0, 1) + c := mongodoc.PropertySchemaConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func filterPropertySchemas(ids []id.PropertySchemaID, rows property.SchemaList) property.SchemaList { + res := make(property.SchemaList, 0, len(ids)) + for _, id := range ids { + var r2 *property.Schema + if ps := builtin.GetPropertySchema(id); ps != nil { + r2 = ps + } else { + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + } + res = append(res, r2) + } + return res +} diff --git a/internal/infrastructure/mongo/scene.go b/internal/infrastructure/mongo/scene.go new file mode 100644 index 000000000..35feb65dd --- /dev/null +++ b/internal/infrastructure/mongo/scene.go @@ -0,0 +1,187 @@ +package mongo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/scene" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +type sceneRepo struct { + client *mongodoc.ClientCollection +} + +func NewScene(client *mongodoc.Client) repo.Scene { + r := &sceneRepo{client: client.WithCollection("scene")} + r.init() + return r +} + +func (r *sceneRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "scene", i) + } +} + +func (r *sceneRepo) FindByID(ctx context.Context, id id.SceneID, f []id.TeamID) (*scene.Scene, error) { + filter := r.teamFilter(bson.D{ + {Key: "id", Value: id.String()}, + }, f) + return r.findOne(ctx, filter) +} + +func (r *sceneRepo) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) ([]*scene.Scene, error) { + filter := r.teamFilter(bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: id.SceneIDToKeys(ids)}, + }}, + }, f) + dst := make([]*scene.Scene, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterScenes(ids, res), nil +} + +func (r *sceneRepo) FindByProject(ctx context.Context, id id.ProjectID, f []id.TeamID) (*scene.Scene, error) { + filter := r.teamFilter(bson.D{ + {Key: "project", Value: id.String()}, + }, f) + return r.findOne(ctx, filter) +} + +func (r *sceneRepo) FindIDsByTeam(ctx context.Context, teams []id.TeamID) ([]id.SceneID, error) { + filter := bson.D{ + {Key: "team", Value: bson.D{ + {Key: "$in", Value: id.TeamIDToKeys(teams)}, + }}, + } + c := mongodoc.SceneIDConsumer{ + Rows: []id.SceneID{}, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + if err != mongo.ErrNilDocument && err != mongo.ErrNoDocuments { + return nil, err + } + } + return c.Rows, nil +} + +func (r *sceneRepo) HasSceneTeam(ctx context.Context, sceneID id.SceneID, temaIDs []id.TeamID) (bool, error) { + filter := bson.D{ + {Key: "id", Value: sceneID.String()}, + {Key: "team", Value: bson.D{{Key: "$in", Value: id.TeamIDToKeys(temaIDs)}}}, + } + res, err2 := r.client.Collection().CountDocuments(ctx, filter) + if err2 != nil { + return false, err1.ErrInternalBy(err2) + } + return res == 1, nil +} + +func (r *sceneRepo) HasScenesTeam(ctx context.Context, sceneIDs []id.SceneID, teamIDs []id.TeamID) ([]bool, error) { + cursor, err2 := r.client.Collection().Find(ctx, bson.D{ + {Key: "id", Value: bson.D{{Key: "$in", Value: id.SceneIDToKeys(sceneIDs)}}}, + {Key: "team", Value: bson.D{{Key: "$in", Value: id.TeamIDToKeys(teamIDs)}}}, + }, &options.FindOptions{ + Projection: bson.D{{Key: "id", Value: 1}, {Key: "_id", Value: 0}}, + }) + + if err2 != nil { + return nil, err1.ErrInternalBy(err2) + } + + var res []struct{ ID string } + err2 = cursor.All(ctx, res) + if err2 != nil { + return nil, err1.ErrInternalBy(err2) + } + + res2 := make([]bool, 0, len(sceneIDs)) + for _, sid := range sceneIDs { + ok := false + for _, r := range res { + if r.ID == sid.String() { + ok = true + break + } + } + res2 = append(res2, ok) + } + + return res2, nil +} + +func (r *sceneRepo) Save(ctx context.Context, scene *scene.Scene) error { + doc, id := mongodoc.NewScene(scene) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *sceneRepo) Remove(ctx context.Context, id id.SceneID) error { + return r.client.RemoveOne(ctx, id.String()) +} + +func (r *sceneRepo) find(ctx context.Context, dst []*scene.Scene, filter bson.D) ([]*scene.Scene, error) { + c := mongodoc.SceneConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *sceneRepo) findOne(ctx context.Context, filter bson.D) (*scene.Scene, error) { + dst := make([]*scene.Scene, 0, 1) + c := mongodoc.SceneConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +// func (r *sceneRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) ([]*scene.Scene, *usecase.PageInfo, error) { +// var c mongodoc.SceneConsumer +// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) +// if err2 != nil { +// return nil, nil, err1.ErrInternalBy(err2) +// } +// return c.Rows, pageInfo, nil +// } + +func filterScenes(ids []id.SceneID, rows []*scene.Scene) []*scene.Scene { + res := make([]*scene.Scene, 0, len(ids)) + for _, id := range ids { + var r2 *scene.Scene + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (*sceneRepo) teamFilter(filter bson.D, teams []id.TeamID) bson.D { + if teams == nil { + return filter + } + filter = append(filter, bson.E{ + Key: "team", + Value: bson.D{{Key: "$in", Value: id.TeamIDToKeys(teams)}}, + }) + return filter +} diff --git a/internal/infrastructure/mongo/scene_lock.go b/internal/infrastructure/mongo/scene_lock.go new file mode 100644 index 000000000..1229cf267 --- /dev/null +++ b/internal/infrastructure/mongo/scene_lock.go @@ -0,0 +1,75 @@ +package mongo + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +type sceneLockRepo struct { + client *mongodoc.ClientCollection +} + +func NewSceneLock(client *mongodoc.Client) repo.SceneLock { + return &sceneLockRepo{client: client.WithCollection("sceneLock")} +} + +func (r *sceneLockRepo) GetLock(ctx context.Context, sceneID id.SceneID) (scene.LockMode, error) { + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + } + var c mongodoc.SceneLockConsumer + if err2 := r.client.FindOne(ctx, filter, &c); err2 != nil { + if errors.Is(err2, err1.ErrNotFound) { + return scene.LockModeFree, nil + } + return scene.LockMode(""), err2 + } + return c.Rows[0], nil +} + +func (r *sceneLockRepo) GetAllLock(ctx context.Context, ids []id.SceneID) ([]scene.LockMode, error) { + filter := bson.D{ + {Key: "scene", Value: bson.D{ + {Key: "$in", Value: id.SceneIDToKeys(ids)}, + }}, + } + c := mongodoc.SceneLockConsumer{ + Rows: make([]scene.LockMode, 0, len(ids)), + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *sceneLockRepo) SaveLock(ctx context.Context, sceneID id.SceneID, lock scene.LockMode) error { + filter := bson.D{{Key: "scene", Value: sceneID.String()}} + doc := mongodoc.NewSceneLock(sceneID, lock) + upsert := true + if _, err2 := r.client.Collection().UpdateOne(ctx, filter, bson.D{ + {Key: "$set", Value: doc}, + }, &options.UpdateOptions{ + Upsert: &upsert, + }); err2 != nil { + return err1.ErrInternalBy(err2) + } + return nil +} + +func (r *sceneLockRepo) ReleaseAllLock(ctx context.Context) error { + if _, err2 := r.client.Collection().DeleteMany(ctx, bson.D{}); err2 != nil { + if err2 != mongo.ErrNilDocument && err2 != mongo.ErrNoDocuments { + return err1.ErrInternalBy(err2) + } + } + return nil +} diff --git a/internal/infrastructure/mongo/team.go b/internal/infrastructure/mongo/team.go new file mode 100644 index 000000000..b2ae10819 --- /dev/null +++ b/internal/infrastructure/mongo/team.go @@ -0,0 +1,129 @@ +package mongo + +import ( + "context" + "strings" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/user" + "go.mongodb.org/mongo-driver/bson" +) + +type teamRepo struct { + client *mongodoc.ClientCollection +} + +func NewTeam(client *mongodoc.Client) repo.Team { + r := &teamRepo{client: client.WithCollection("team")} + r.init() + return r +} + +func (r *teamRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "team", i) + } +} + +func (r *teamRepo) FindByUser(ctx context.Context, id id.UserID) ([]*user.Team, error) { + filter := bson.D{ + {Key: "members." + strings.Replace(id.String(), ".", "", -1), Value: bson.D{ + {Key: "$exists", Value: true}, + }}, + } + return r.find(ctx, nil, filter) +} + +func (r *teamRepo) FindByIDs(ctx context.Context, ids []id.TeamID) ([]*user.Team, error) { + filter := bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: id.TeamIDToKeys(ids)}, + }}, + } + dst := make([]*user.Team, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterTeams(ids, res), nil +} + +func (r *teamRepo) FindByID(ctx context.Context, id id.TeamID) (*user.Team, error) { + filter := bson.D{ + {Key: "id", Value: id.String()}, + } + return r.findOne(ctx, filter) +} + +func (r *teamRepo) Save(ctx context.Context, team *user.Team) error { + doc, id := mongodoc.NewTeam(team) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *teamRepo) SaveAll(ctx context.Context, teams []*user.Team) error { + if len(teams) == 0 { + return nil + } + docs, ids := mongodoc.NewTeams(teams) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *teamRepo) Remove(ctx context.Context, id id.TeamID) error { + return r.client.RemoveOne(ctx, id.String()) +} + +func (r *teamRepo) RemoveAll(ctx context.Context, ids []id.TeamID) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, id.TeamIDToKeys(ids)) +} + +func (r *teamRepo) find(ctx context.Context, dst []*user.Team, filter bson.D) ([]*user.Team, error) { + c := mongodoc.TeamConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *teamRepo) findOne(ctx context.Context, filter bson.D) (*user.Team, error) { + dst := make([]*user.Team, 0, 1) + c := mongodoc.TeamConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +// func (r *teamRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) ([]*user.Team, *usecase.PageInfo, error) { +// var c mongodoc.TeamConsumer +// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) +// if err2 != nil { +// return nil, nil, err1.ErrInternalBy(err2) +// } +// return c.Rows, pageInfo, nil +// } + +func filterTeams(ids []id.TeamID, rows []*user.Team) []*user.Team { + res := make([]*user.Team, 0, len(ids)) + for _, id := range ids { + var r2 *user.Team + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} diff --git a/internal/infrastructure/mongo/transaction.go b/internal/infrastructure/mongo/transaction.go new file mode 100644 index 000000000..074394bd8 --- /dev/null +++ b/internal/infrastructure/mongo/transaction.go @@ -0,0 +1,63 @@ +package mongo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" +) + +type Transaction struct { + client *mongodoc.Client +} + +func NewTransaction(client *mongodoc.Client) repo.Transaction { + return &Transaction{ + client: client, + } +} + +func (t *Transaction) Begin() (repo.Tx, error) { + s, err := t.client.Session() + if err != nil { + return nil, err1.ErrInternalBy(err) + } + + if err := s.StartTransaction(&options.TransactionOptions{}); err != nil { + return nil, err1.ErrInternalBy(err) + } + + return &Tx{session: s, commit: false}, nil +} + +type Tx struct { + session mongo.Session + commit bool +} + +func (t *Tx) Commit() { + if t == nil { + return + } + t.commit = true +} + +func (t *Tx) End(ctx context.Context) error { + if t == nil { + return nil + } + + if t.commit { + if err := t.session.CommitTransaction(ctx); err != nil { + return err1.ErrInternalBy(err) + } + } else if err := t.session.AbortTransaction(ctx); err != nil { + return err1.ErrInternalBy(err) + } + + t.session.EndSession(ctx) + return nil +} diff --git a/internal/infrastructure/mongo/user.go b/internal/infrastructure/mongo/user.go new file mode 100644 index 000000000..1a8cf7271 --- /dev/null +++ b/internal/infrastructure/mongo/user.go @@ -0,0 +1,119 @@ +package mongo + +import ( + "context" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/user" +) + +type userRepo struct { + client *mongodoc.ClientCollection +} + +func NewUser(client *mongodoc.Client) repo.User { + r := &userRepo{client: client.WithCollection("user")} + r.init() + return r +} + +func (r *userRepo) init() { + i := r.client.CreateIndex(context.Background(), []string{"email", "auth0sublist"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "user", i) + } +} + +func (r *userRepo) FindByIDs(ctx context.Context, ids []id.UserID) ([]*user.User, error) { + filter := bson.D{{Key: "id", Value: bson.D{ + {Key: "$in", Value: id.UserIDToKeys(ids)}, + }}} + dst := make([]*user.User, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterUsers(ids, res), nil +} + +func (r *userRepo) FindByID(ctx context.Context, id2 id.UserID) (*user.User, error) { + filter := bson.D{{Key: "id", Value: id.ID(id2).String()}} + return r.findOne(ctx, filter) +} + +func (r *userRepo) FindByAuth0Sub(ctx context.Context, auth0sub string) (*user.User, error) { + filter := bson.D{ + {Key: "$or", Value: []bson.D{ + {{Key: "auth0sub", Value: auth0sub}}, + {{Key: "auth0sublist", Value: bson.D{ + {Key: "$elemMatch", Value: bson.D{ + {Key: "$eq", Value: auth0sub}, + }}, + }}}, + }}, + } + return r.findOne(ctx, filter) +} + +func (r *userRepo) FindByEmail(ctx context.Context, email string) (*user.User, error) { + filter := bson.D{{Key: "email", Value: email}} + return r.findOne(ctx, filter) +} + +func (r *userRepo) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user.User, error) { + filter := bson.D{{Key: "$or", Value: []bson.D{ + {{Key: "email", Value: nameOrEmail}}, + {{Key: "name", Value: nameOrEmail}}, + }}} + return r.findOne(ctx, filter) +} + +func (r *userRepo) Save(ctx context.Context, user *user.User) error { + doc, id := mongodoc.NewUser(user) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *userRepo) Remove(ctx context.Context, user id.UserID) error { + return r.client.RemoveOne(ctx, user.String()) +} + +func (r *userRepo) find(ctx context.Context, dst []*user.User, filter bson.D) ([]*user.User, error) { + c := mongodoc.UserConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *userRepo) findOne(ctx context.Context, filter bson.D) (*user.User, error) { + dst := make([]*user.User, 0, 1) + c := mongodoc.UserConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} + +func filterUsers(ids []id.UserID, rows []*user.User) []*user.User { + res := make([]*user.User, 0, len(ids)) + for _, id := range ids { + var r2 *user.User + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} diff --git a/internal/infrastructure/npm/archive.go b/internal/infrastructure/npm/archive.go new file mode 100644 index 000000000..f8937e0d4 --- /dev/null +++ b/internal/infrastructure/npm/archive.go @@ -0,0 +1,73 @@ +package npm + +import ( + "archive/tar" + "compress/gzip" + "errors" + "io" + "strings" + + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/file" +) + +type archive struct { + gzipReader *gzip.Reader + tarReader *tar.Reader + name string + size int64 +} + +// NewArchive _ +func NewArchive(reader io.Reader, name string, size int64) file.Archive { + gzipReader, _ := gzip.NewReader(reader) + tarReader := tar.NewReader(gzipReader) + return &archive{ + gzipReader: gzipReader, + tarReader: tarReader, + name: name, + size: size, + } +} + +// Next _ +func (a *archive) Next() (f *file.File, derr error) { + var head *tar.Header + var err error + for { + head, err = a.tarReader.Next() + if errors.Is(err, io.EOF) { + derr = file.EOF + return + } + if err != nil { + derr = err1.ErrInternalBy(err) + return + } + if strings.HasPrefix(head.Name, "package/") { + break + } + } + f = &file.File{ + Content: a.tarReader, + Name: head.FileInfo().Name(), + Fullpath: strings.TrimPrefix(head.Name, "package/"), + Size: head.Size, + } + return +} + +// Close _ +func (a *archive) Close() error { + return a.gzipReader.Close() +} + +// Name _ +func (a *archive) Name() string { + return a.name +} + +// Size _ +func (a *archive) Size() int64 { + return a.size +} diff --git a/internal/infrastructure/npm/plugin_repository.go b/internal/infrastructure/npm/plugin_repository.go new file mode 100644 index 000000000..d79e8598e --- /dev/null +++ b/internal/infrastructure/npm/plugin_repository.go @@ -0,0 +1,97 @@ +package npm + +import ( + "context" + "errors" + "fmt" + "net/http" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" +) + +const ( + manifestFilePath = "reearth.json" +) + +type pluginRepository struct { + registryURL string +} + +func NewPluginRepository() gateway.PluginRepository { + return &pluginRepository{ + registryURL: "https://registry.npmjs.org/", + } +} + +func NewPluginRepositoryOf(url string) gateway.PluginRepository { + return &pluginRepository{ + registryURL: url, + } +} + +func (r *pluginRepository) Data(ctx context.Context, id id.PluginID) (file.Archive, error) { + return r.getNpmTarball(ctx, id) +} + +// Manifest _ +func (r *pluginRepository) Manifest(ctx context.Context, id id.PluginID) (*manifest.Manifest, error) { + archive, err := r.getNpmTarball(ctx, id) + if err != nil { + return nil, err + } + + defer func() { + _ = archive.Close() + }() + + for { + f, err := archive.Next() + if errors.Is(err, file.EOF) { + break + } + if err != nil { + return nil, err1.ErrInternalBy(err) + } + if f.Fullpath == manifestFilePath { + manifest, err := manifest.Parse(f.Content) + if err != nil { + return nil, err + } + return manifest, nil + } + } + return nil, manifest.ErrFailedToParseManifest +} + +func (r *pluginRepository) getNpmTarball(ctx context.Context, id id.PluginID) (file.Archive, error) { + rawPkgName := id.Name() + pkgVersion := id.Version().String() + scopelessPkgName := id.Name() + if rawPkgName[0] == '@' { + splitted := strings.Split(rawPkgName, "/") + if len(splitted) > 1 { + scopelessPkgName = splitted[1] + } + } + tarballURL := fmt.Sprintf("%s%s/-/%s-%s.tgz", r.registryURL, rawPkgName, scopelessPkgName, pkgVersion) + + req, err := http.NewRequestWithContext(ctx, "GET", tarballURL, nil) + if err != nil { + return nil, gateway.ErrFailedToFetchPluiginRepositoryData + } + + res, err := http.DefaultClient.Do(req) + if err != nil || res.StatusCode != http.StatusOK { + if res.StatusCode == http.StatusNotFound { + return nil, err1.ErrNotFound + } + return nil, gateway.ErrFailedToFetchPluiginRepositoryData + } + + return NewArchive(res.Body, fmt.Sprintf("%s-%s.tgz", rawPkgName, pkgVersion), res.ContentLength), nil +} diff --git a/internal/usecase/cursor.go b/internal/usecase/cursor.go new file mode 100644 index 000000000..f18fab832 --- /dev/null +++ b/internal/usecase/cursor.go @@ -0,0 +1,4 @@ +package usecase + +// Cursor _ +type Cursor string diff --git a/internal/usecase/gateway/authenticator.go b/internal/usecase/gateway/authenticator.go new file mode 100644 index 000000000..fa4fa0bbc --- /dev/null +++ b/internal/usecase/gateway/authenticator.go @@ -0,0 +1,20 @@ +package gateway + +type AuthenticatorUpdateUserParam struct { + ID string + Name *string + Email *string + Password *string +} + +type AuthenticatorUser struct { + ID string + Name string + Email string + EmailVerified bool +} + +type Authenticator interface { + FetchUser(string) (AuthenticatorUser, error) + UpdateUser(AuthenticatorUpdateUserParam) (AuthenticatorUser, error) +} diff --git a/internal/usecase/gateway/container.go b/internal/usecase/gateway/container.go new file mode 100644 index 000000000..205988fa0 --- /dev/null +++ b/internal/usecase/gateway/container.go @@ -0,0 +1,9 @@ +package gateway + +type Container struct { + Authenticator Authenticator + Mailer Mailer + PluginRepository PluginRepository + DataSource DataSource + File File +} diff --git a/internal/usecase/gateway/datasouce.go b/internal/usecase/gateway/datasouce.go new file mode 100644 index 000000000..94d72bc9c --- /dev/null +++ b/internal/usecase/gateway/datasouce.go @@ -0,0 +1,18 @@ +package gateway + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + ErrDataSourceInvalidURL error = errors.New("invalid url") +) + +type DataSource interface { + Fetch(context.Context, string, id.SceneID) ([]*dataset.Schema, []*dataset.Dataset, error) + IsURLValid(context.Context, string) bool +} diff --git a/internal/usecase/gateway/file.go b/internal/usecase/gateway/file.go new file mode 100644 index 000000000..e333e4589 --- /dev/null +++ b/internal/usecase/gateway/file.go @@ -0,0 +1,31 @@ +package gateway + +import ( + "context" + "errors" + "io" + "net/url" + + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +var ( + ErrInvalidFile error = errors.New("invalid file") + ErrFailedToUploadFile error = errors.New("failed to upload file") + ErrFileTooLarge error = errors.New("file too large") + ErrFailedToRemoveFile error = errors.New("failed to remove file") +) + +type File interface { + ReadAsset(context.Context, string) (io.Reader, error) + ReadPluginFile(context.Context, id.PluginID, string) (io.Reader, error) + ReadBuiltSceneFile(context.Context, string) (io.Reader, error) + UploadAsset(context.Context, *file.File) (*url.URL, error) + UploadAndExtractPluginFiles(context.Context, file.Archive, *plugin.Plugin) (*url.URL, error) + UploadBuiltScene(context.Context, io.Reader, string) error + MoveBuiltScene(context.Context, string, string) error + RemoveAsset(context.Context, *url.URL) error + RemoveBuiltScene(context.Context, string) error +} diff --git a/internal/usecase/gateway/mailer.go b/internal/usecase/gateway/mailer.go new file mode 100644 index 000000000..27f530855 --- /dev/null +++ b/internal/usecase/gateway/mailer.go @@ -0,0 +1,5 @@ +package gateway + +type Mailer interface { + SendMail(to, content string) error +} diff --git a/internal/usecase/gateway/plugin_repository.go b/internal/usecase/gateway/plugin_repository.go new file mode 100644 index 000000000..9212e61a5 --- /dev/null +++ b/internal/usecase/gateway/plugin_repository.go @@ -0,0 +1,19 @@ +package gateway + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" +) + +var ( + ErrFailedToFetchPluiginRepositoryData error = errors.New("failed to fetch repository data") +) + +type PluginRepository interface { + Manifest(context.Context, id.PluginID) (*manifest.Manifest, error) + Data(context.Context, id.PluginID) (file.Archive, error) +} diff --git a/internal/usecase/interactor/asset.go b/internal/usecase/interactor/asset.go new file mode 100644 index 000000000..92becb9f8 --- /dev/null +++ b/internal/usecase/interactor/asset.go @@ -0,0 +1,120 @@ +package interactor + +import ( + "context" + "net/url" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" +) + +type Asset struct { + common + assetRepo repo.Asset + teamRepo repo.Team + transaction repo.Transaction + file gateway.File +} + +func NewAsset(r *repo.Container, gr *gateway.Container) interfaces.Asset { + return &Asset{ + assetRepo: r.Asset, + teamRepo: r.Team, + transaction: r.Transaction, + file: gr.File, + } +} + +func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, operator *usecase.Operator) (result *asset.Asset, err error) { + if err := i.CanWriteTeam(inp.TeamID, operator); err != nil { + return nil, err + } + + if inp.File == nil { + return nil, interfaces.ErrFileNotIncluded + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + url, err := i.file.UploadAsset(ctx, inp.File) + if err != nil { + return nil, err + } + + result, err = asset.New(). + NewID(). + Team(inp.TeamID). + Name(inp.File.Name). + Size(inp.File.Size). + URL(url.String()). + Build() + if err != nil { + return nil, err + } + + if err = i.assetRepo.Save(ctx, result); err != nil { + return + } + + tx.Commit() + return +} + +func (i *Asset) Remove(ctx context.Context, aid id.AssetID, operator *usecase.Operator) (result id.AssetID, err error) { + asset, err := i.assetRepo.FindByID(ctx, aid) + if err != nil { + return aid, err + } + + if err := i.CanWriteTeam(asset.Team(), operator); err != nil { + return aid, err + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + team, err := i.teamRepo.FindByID(ctx, asset.Team()) + if err != nil { + return aid, err + } + + if !team.Members().ContainsUser(operator.User) { + return aid, interfaces.ErrOperationDenied + } + + if url, _ := url.Parse(asset.URL()); url != nil { + if err = i.file.RemoveAsset(ctx, url); err != nil { + return aid, err + } + } + + if err = i.assetRepo.Remove(ctx, aid); err != nil { + return + } + + tx.Commit() + return aid, nil +} + +func (i *Asset) FindByTeam(ctx context.Context, tid id.TeamID, p *usecase.Pagination, operator *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) { + if err := i.CanReadTeam(tid, operator); err != nil { + return nil, nil, err + } + + return i.assetRepo.FindByTeam(ctx, tid, p) +} diff --git a/internal/usecase/interactor/common.go b/internal/usecase/interactor/common.go new file mode 100644 index 000000000..714ff38f0 --- /dev/null +++ b/internal/usecase/interactor/common.go @@ -0,0 +1,243 @@ +package interactor + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type common struct{} + +func (common) OnlyOperator(op *usecase.Operator) error { + if op == nil { + return interfaces.ErrOperationDenied + } + return nil +} + +func (i common) IsMe(u id.UserID, op *usecase.Operator) error { + if err := i.OnlyOperator(op); err != nil { + return err + } + if op.User != u { + return interfaces.ErrOperationDenied + } + return nil +} + +func (i common) CanReadTeam(t id.TeamID, op *usecase.Operator) error { + if err := i.OnlyOperator(op); err != nil { + return err + } + if !op.IsReadableTeamIncluded(t) { + return interfaces.ErrOperationDenied + } + return nil +} + +func (i common) CanWriteTeam(t id.TeamID, op *usecase.Operator) error { + if err := i.OnlyOperator(op); err != nil { + return err + } + if !op.IsWritableTeamIncluded(t) { + return interfaces.ErrOperationDenied + } + return nil +} + +type commonScene struct { + common + sceneRepo repo.Scene +} + +func (i commonScene) OnlyReadableScenes(ctx context.Context, op *usecase.Operator) ([]id.SceneID, error) { + if err := i.OnlyOperator(op); err != nil { + return nil, err + } + scenes, err := i.sceneRepo.FindIDsByTeam(ctx, op.ReadableTeams) + if err != nil { + return nil, err + } + return scenes, nil +} + +func (i commonScene) OnlyWritableScenes(ctx context.Context, op *usecase.Operator) ([]id.SceneID, error) { + if err := i.OnlyOperator(op); err != nil { + return nil, err + } + scenes, err := i.sceneRepo.FindIDsByTeam(ctx, op.WritableTeams) + if err != nil { + return nil, err + } + return scenes, nil +} + +func (i commonScene) CanReadScene(ctx context.Context, s id.SceneID, op *usecase.Operator) error { + if err := i.OnlyOperator(op); err != nil { + return err + } + res, err := i.sceneRepo.HasSceneTeam(ctx, s, op.ReadableTeams) + if err != nil { + return err + } + if !res { + return interfaces.ErrOperationDenied + } + return nil +} + +func (i commonScene) CanWriteScene(ctx context.Context, s id.SceneID, op *usecase.Operator) error { + if err := i.OnlyOperator(op); err != nil { + return err + } + res, err := i.sceneRepo.HasSceneTeam(ctx, s, op.WritableTeams) + if err != nil { + return err + } + if !res { + return interfaces.ErrOperationDenied + } + return nil +} + +type commonSceneLock struct { + sceneLockRepo repo.SceneLock +} + +func (i commonSceneLock) CheckSceneLock(ctx context.Context, s id.SceneID) error { + // check scene lock + if lock, err := i.sceneLockRepo.GetLock(ctx, s); err != nil { + return err + } else if lock.IsLocked() { + return interfaces.ErrSceneIsLocked + } + return nil +} + +func (i commonSceneLock) UpdateSceneLock(ctx context.Context, s id.SceneID, before, after scene.LockMode) error { + // get lock + lm, err := i.sceneLockRepo.GetLock(ctx, s) + if err != nil { + return err + } + + // check lock + if lm != before { + return scene.ErrSceneIsLocked + } + + // change lock + err = i.sceneLockRepo.SaveLock(ctx, s, after) + if err != nil { + return err + } + return nil +} + +func (i commonSceneLock) ReleaseSceneLock(ctx context.Context, s id.SceneID) { + _ = i.sceneLockRepo.SaveLock(ctx, s, scene.LockModeFree) +} + +type SceneDeleter struct { + Scene repo.Scene + SceneLock repo.SceneLock + Layer repo.Layer + Property repo.Property + Dataset repo.Dataset + DatasetSchema repo.DatasetSchema +} + +func (d SceneDeleter) Delete(ctx context.Context, s *scene.Scene, force bool) error { + if s == nil { + return nil + } + + if force { + lock, err := d.SceneLock.GetLock(ctx, s.ID()) + if err != nil { + return err + } + + if lock != scene.LockModeFree { + return scene.ErrSceneIsLocked + } + } + + // Delete layer + if err := d.Layer.RemoveByScene(ctx, s.ID()); err != nil { + return err + } + + // Delete property + if err := d.Property.RemoveByScene(ctx, s.ID()); err != nil { + return err + } + + // Delete dataset + if err := d.Dataset.RemoveByScene(ctx, s.ID()); err != nil { + return err + } + + // Delete dataset schema + if err := d.DatasetSchema.RemoveByScene(ctx, s.ID()); err != nil { + return err + } + + // Release scene lock + if err := d.SceneLock.SaveLock(ctx, s.ID(), scene.LockModeFree); err != nil { + return err + } + + // Delete scene + if err := d.Scene.Remove(ctx, s.ID()); err != nil { + return err + } + + return nil +} + +type ProjectDeleter struct { + SceneDeleter + File gateway.File + Project repo.Project +} + +func (d ProjectDeleter) Delete(ctx context.Context, prj *project.Project, force bool, operator *usecase.Operator) error { + if prj == nil { + return nil + } + + // Fetch scene + s, err := d.Scene.FindByProject(ctx, prj.ID(), operator.WritableTeams) + if err != nil && !errors.Is(err, err1.ErrNotFound) { + return err + } + + // Delete scene + if err := d.SceneDeleter.Delete(ctx, s, force); err != nil { + return err + } + + // Unpublish project + if prj.PublishmentStatus() != project.PublishmentStatusPrivate { + if err := d.File.RemoveBuiltScene(ctx, prj.Alias()); err != nil { + return err + } + } + + // Delete project + if err := d.Project.Remove(ctx, prj.ID()); err != nil { + return err + } + + return nil +} diff --git a/internal/usecase/interactor/dataset.go b/internal/usecase/interactor/dataset.go new file mode 100644 index 000000000..188be97bc --- /dev/null +++ b/internal/usecase/interactor/dataset.go @@ -0,0 +1,624 @@ +package interactor + +import ( + "context" + "errors" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/dataset" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/initializer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/scene/sceneops" +) + +var extensionForLinkedLayers = id.PluginExtensionID("marker") + +type Dataset struct { + commonScene + commonSceneLock + datasetRepo repo.Dataset + datasetSchemaRepo repo.DatasetSchema + propertyRepo repo.Property + layerRepo repo.Layer + pluginRepo repo.Plugin + transaction repo.Transaction + datasource gateway.DataSource + file gateway.File +} + +func NewDataset(r *repo.Container, gr *gateway.Container) interfaces.Dataset { + return &Dataset{ + commonScene: commonScene{sceneRepo: r.Scene}, + commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, + datasetRepo: r.Dataset, + datasetSchemaRepo: r.DatasetSchema, + propertyRepo: r.Property, + layerRepo: r.Layer, + pluginRepo: r.Plugin, + transaction: r.Transaction, + datasource: gr.DataSource, + file: gr.File, + } +} + +func (i *Dataset) DynamicSchemaFields() []*dataset.SchemaField { + author, _ := dataset.NewSchemaField().NewID().Name("author").Type(dataset.ValueTypeString).Build() + content, _ := dataset.NewSchemaField().NewID().Name("content").Type(dataset.ValueTypeString).Build() + location, _ := dataset.NewSchemaField().NewID().Name("location").Type(dataset.ValueTypeLatLng).Build() + target, _ := dataset.NewSchemaField().NewID().Name("target").Type(dataset.ValueTypeString).Build() + return []*dataset.SchemaField{author, content, location, target} +} + +func (i *Dataset) UpdateDatasetSchema(ctx context.Context, inp interfaces.UpdateDatasetSchemaParam, operator *usecase.Operator) (_ *dataset.Schema, err error) { + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, err + } + schema, err := i.datasetSchemaRepo.FindByID(ctx, inp.SchemaId, scenes) + if err != nil { + return nil, err + } + + // Begin Db transaction + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + schema.Rename(inp.Name) + err = i.datasetSchemaRepo.Save(ctx, schema) + if err != nil { + return nil, err + } + + // Commit db transaction + tx.Commit() + return schema, nil +} + +func (i *Dataset) AddDynamicDatasetSchema(ctx context.Context, inp interfaces.AddDynamicDatasetSchemaParam) (_ *dataset.Schema, err error) { + + // Begin Db transaction + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + schemaBuilder := dataset.NewSchema(). + NewID(). + Scene(inp.SceneId). + Dynamic(true) + fields := i.DynamicSchemaFields() + schemaBuilder = schemaBuilder.Fields(fields) + ds, err := schemaBuilder.Build() + if err != nil { + return nil, err + } + err = i.datasetSchemaRepo.Save(ctx, ds) + if err != nil { + return nil, err + } + + // Commit db transaction + tx.Commit() + return ds, nil +} + +func (i *Dataset) AddDynamicDataset(ctx context.Context, inp interfaces.AddDynamicDatasetParam) (_ *dataset.Schema, _ *dataset.Dataset, err error) { + + // Begin Db transaction + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + fields := []*dataset.Field{} + dss, err := i.datasetSchemaRepo.FindDynamicByID(ctx, inp.SchemaId) + if err != nil { + return nil, nil, err + } + for _, f := range dss.Fields() { + + if f.Name() == "author" { + fields = append(fields, dataset.NewField(f.ID(), dataset.ValueFrom(inp.Author), "")) + } + if f.Name() == "content" { + fields = append(fields, dataset.NewField(f.ID(), dataset.ValueFrom(inp.Content), "")) + } + if inp.Target != nil && len(*inp.Target) > 0 && f.Name() == "target" { + fields = append(fields, dataset.NewField(f.ID(), dataset.ValueFrom(inp.Target), "")) + } + if inp.Lat != nil && inp.Lng != nil && f.Name() == "location" { + latlng := dataset.LatLng{Lat: *inp.Lat, Lng: *inp.Lng} + fields = append(fields, dataset.NewField(f.ID(), dataset.ValueFrom(latlng), "")) + } + } + ds, err := dataset. + New(). + NewID(). + Fields(fields). + Schema(inp.SchemaId). + Build() + if err != nil { + return nil, nil, err + } + err = i.datasetRepo.Save(ctx, ds) + if err != nil { + return nil, nil, err + } + + // Commit db transaction + tx.Commit() + return dss, ds, nil +} + +func (i *Dataset) ImportDataset(ctx context.Context, inp interfaces.ImportDatasetParam, operator *usecase.Operator) (_ *dataset.Schema, err error) { + if err := i.CanWriteScene(ctx, inp.SceneId, operator); err != nil { + return nil, err + } + if inp.File == nil { + return nil, interfaces.ErrFileNotIncluded + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + seperator := ',' + if strings.HasSuffix(inp.File.Name, ".tsv") { + seperator = '\t' + } + scenes := []id.SceneID{inp.SceneId} + csv := dataset.NewCSVParser(inp.File.Content, inp.File.Name, seperator) + err = csv.Init() + if err != nil { + return nil, err + } + + // replacment mode + if inp.SchemaId != nil { + dss, err := i.datasetSchemaRepo.FindByID(ctx, *inp.SchemaId, scenes) + if err != nil { + return nil, err + } + err = csv.CheckCompatible(dss) + if err != nil { + return nil, err + } + toreplace, err := i.datasetRepo.FindBySchemaAll(ctx, *inp.SchemaId) + if err != nil { + return nil, err + } + err = i.datasetRepo.RemoveAll(ctx, toreplace.ToDatasetIds()) + if err != nil { + return nil, err + } + } else { + err = csv.GuessSchema(inp.SceneId) + if err != nil { + return nil, err + } + } + + schema, datasets, err := csv.ReadAll() + if err != nil { + return nil, err + } + + err = i.datasetSchemaRepo.Save(ctx, schema) + if err != nil { + return nil, err + } + err = i.datasetRepo.SaveAll(ctx, datasets) + if err != nil { + return nil, err + } + + if inp.SchemaId != nil { + layergroups, err := i.layerRepo.FindGroupBySceneAndLinkedDatasetSchema(ctx, inp.SceneId, *inp.SchemaId) + if err != nil { + return nil, err + } + + newProperties := make([]*property.Property, 0, len(datasets)) + representativeFieldID := schema.RepresentativeFieldID() + removedProperties := []id.PropertyID{} + removedLayers := []id.LayerID{} + updatedLayers := append(layer.List{}, layergroups.ToLayerList()...) + + for _, lg := range layergroups { + if lg.Layers().LayerCount() > 0 { + children, err := i.layerRepo.FindByIDs(ctx, lg.Layers().Layers(), scenes) + if err != nil { + return nil, err + } + for _, c := range children { + if c != nil { + removedProperties = append(removedProperties, (*c).Properties()...) + } + } + removedLayers = append(removedLayers, lg.Layers().Layers()...) + lg.Layers().Empty() + } + + for _, ds := range datasets { + dsid := ds.ID() + name := "" + if rf := ds.FieldRef(representativeFieldID); rf != nil && rf.Type() == dataset.ValueTypeString { + name = rf.Value().Value().(string) + } + layerItem, layerProperty, err := initializer.LayerItem{ + SceneID: inp.SceneId, + ParentLayerID: lg.ID(), + Plugin: builtin.Plugin(), + ExtensionID: &extensionForLinkedLayers, + LinkedDatasetID: &dsid, + Name: name, + }.Initialize() + if err != nil { + return nil, err + } + if layerItem != nil { + lg.Layers().AddLayer(layerItem.ID(), -1) + updatedLayers = append(updatedLayers, layerItem.LayerRef()) + } + if layerProperty != nil { + newProperties = append(newProperties, layerProperty) + } + } + } + + err = i.layerRepo.RemoveAll(ctx, removedLayers) + if err != nil { + return nil, err + } + err = i.propertyRepo.RemoveAll(ctx, removedProperties) + if err != nil { + return nil, err + } + err = i.layerRepo.SaveAll(ctx, updatedLayers) + if err != nil { + return nil, err + } + err = i.propertyRepo.SaveAll(ctx, newProperties) + if err != nil { + return nil, err + } + } + + // Commit db transaction + tx.Commit() + return schema, nil +} + +func (i *Dataset) Fetch(ctx context.Context, ids []id.DatasetID, operator *usecase.Operator) (dataset.List, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + return i.datasetRepo.FindByIDs(ctx, ids, scenes) +} + +func (i *Dataset) GraphFetch(ctx context.Context, id id.DatasetID, depth int, operator *usecase.Operator) (dataset.List, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + if depth < 0 || depth > 3 { + return nil, interfaces.ErrDatasetInvalidDepth + } + it := dataset.GraphIteratorFrom(id, depth) + res := dataset.List{} + next := id + done := false + for { + d, err := i.datasetRepo.FindByID(ctx, next, scenes) + if err != nil { + return nil, err + } + res = append(res, d) + next, done = it.Next(d) + if next.ID().IsNil() { + return nil, err1.ErrInternalBy(errors.New("next id is nil")) + } + if done { + break + } + } + return res, nil +} + +func (i *Dataset) FetchSchema(ctx context.Context, ids []id.DatasetSchemaID, operator *usecase.Operator) (dataset.SchemaList, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + return i.datasetSchemaRepo.FindByIDs(ctx, ids, scenes) +} + +func (i *Dataset) GraphFetchSchema(ctx context.Context, id id.DatasetSchemaID, depth int, operator *usecase.Operator) (dataset.SchemaList, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + if depth < 0 || depth > 3 { + return nil, interfaces.ErrDatasetInvalidDepth + } + + it := dataset.SchemaGraphIteratorFrom(id, depth) + res := dataset.SchemaList{} + next := id + done := false + for { + d, err := i.datasetSchemaRepo.FindByID(ctx, next, scenes) + if err != nil { + return nil, err + } + res = append(res, d) + next, done = it.Next(d) + if next.ID().IsNil() { + return nil, err1.ErrInternalBy(errors.New("next id is nil")) + } + if done { + break + } + } + + return res, nil +} + +func (i *Dataset) FindBySchema(ctx context.Context, ds id.DatasetSchemaID, p *usecase.Pagination, operator *usecase.Operator) (dataset.List, *usecase.PageInfo, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, nil, err + } + + return i.datasetRepo.FindBySchema(ctx, ds, scenes, p) +} + +func (i *Dataset) FindSchemaByScene(ctx context.Context, sid id.SceneID, p *usecase.Pagination, operator *usecase.Operator) (dataset.SchemaList, *usecase.PageInfo, error) { + if err := i.CanReadScene(ctx, sid, operator); err != nil { + return nil, nil, err + } + + return i.datasetSchemaRepo.FindByScene(ctx, sid, p) +} + +func (i *Dataset) FindDynamicSchemaByScene(ctx context.Context, sid id.SceneID) (dataset.SchemaList, error) { + return i.datasetSchemaRepo.FindAllDynamicByScene(ctx, sid) +} + +func (i *Dataset) Sync(ctx context.Context, sceneID id.SceneID, url string, operator *usecase.Operator) (dss dataset.SchemaList, ds dataset.List, err error) { + if err := i.CanWriteScene(ctx, sceneID, operator); err != nil { + return nil, nil, err + } + + if i.datasource == nil { + return nil, nil, interfaces.ErrNoDataSourceAvailable + } + + // Check URL + if !i.datasource.IsURLValid(ctx, url) { + return nil, nil, interfaces.ErrDataSourceInvalidURL + } + + // Begin Db transaction + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.UpdateSceneLock(ctx, sceneID, scene.LockModeFree, scene.LockModeDatasetSyncing); err != nil { + return nil, nil, err + } + + defer i.ReleaseSceneLock(ctx, sceneID) + + // Fetch + dss, ds, err = i.datasource.Fetch(ctx, url, sceneID) + if err != nil { + return nil, nil, err + } + + // Save + if err := i.datasetSchemaRepo.SaveAll(ctx, dss); err != nil { + return nil, nil, err + } + if err := i.datasetRepo.SaveAll(ctx, ds); err != nil { + return nil, nil, err + } + + // Migrate + result, err := sceneops.DatasetMigrator{ + PropertyRepo: i.propertyRepo, + LayerRepo: i.layerRepo, + DatasetSchemaRepo: i.datasetSchemaRepo, + DatasetRepo: i.datasetRepo, + Plugin: repo.PluginLoaderFrom(i.pluginRepo), + }.Migrate(ctx, sceneID, dss, ds) + if err != nil { + return nil, nil, err + } + + if err := i.propertyRepo.SaveAll(ctx, result.Properties.List()); err != nil { + return nil, nil, err + } + if err := i.layerRepo.SaveAll(ctx, result.Layers.List()); err != nil { + return nil, nil, err + } + if err := i.layerRepo.RemoveAll(ctx, result.RemovedLayers.All()); err != nil { + return nil, nil, err + } + if err := i.datasetRepo.RemoveAll(ctx, result.RemovedDatasets); err != nil { + return nil, nil, err + } + if err := i.datasetSchemaRepo.RemoveAll(ctx, result.RemovedDatasetSchemas); err != nil { + return nil, nil, err + } + + tx.Commit() + return dss, ds, nil +} + +func (i *Dataset) AddDatasetSchema(ctx context.Context, inp interfaces.AddDatasetSchemaParam, operator *usecase.Operator) (ds *dataset.Schema, err error) { + if err := i.CanWriteScene(ctx, inp.SceneId, operator); err != nil { + return nil, err + } + + // Begin Db transaction + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + schemaBuilder := dataset.NewSchema(). + NewID(). + Scene(inp.SceneId). + Name(inp.Name). + Source("reearth"). + RepresentativeField(*inp.RepresentativeField) + ds, err = schemaBuilder.Build() + if err != nil { + return nil, err + } + err = i.datasetSchemaRepo.Save(ctx, ds) + if err != nil { + return nil, err + } + + tx.Commit() + return ds, nil +} + +func (i *Dataset) RemoveDatasetSchema(ctx context.Context, inp interfaces.RemoveDatasetSchemaParam, operator *usecase.Operator) (_ id.DatasetSchemaID, err error) { + if operator == nil { + return inp.SchemaId, interfaces.ErrOperationDenied + } + scenes, err := i.sceneRepo.FindIDsByTeam(ctx, operator.WritableTeams) + if err != nil { + return inp.SchemaId, err + } + s, err := i.datasetSchemaRepo.FindByID(ctx, inp.SchemaId, scenes) + if err != nil { + return inp.SchemaId, err + } + + if s == nil { + return inp.SchemaId, err1.ErrNotFound + } + + datasets, err := i.datasetRepo.FindBySchemaAll(ctx, inp.SchemaId) + if err != nil { + return inp.SchemaId, err + } + if (inp.Force == nil || !*inp.Force) && len(datasets) != 0 { + return inp.SchemaId, errors.New("can not remove non-empty schema") + } + + // Begin Db transaction + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + // list of datasets attached by the schema + dsids := []id.DatasetID{} + var properties []*property.Property + for _, d := range datasets { + properties, err = i.propertyRepo.FindByDataset(ctx, inp.SchemaId, d.ID()) + if err != nil { + return inp.SchemaId, err + } + + for _, p := range properties { + // unlinking fields + p.UnlinkAllByDataset(inp.SchemaId, d.ID()) + } + + dsids = append(dsids, d.ID()) + } + + // unlink layers (items and groups) and save + layers, err := i.layerRepo.FindAllByDatasetSchema(ctx, inp.SchemaId) + if err != nil { + return inp.SchemaId, err + } + + for _, li := range layers.ToLayerItemList() { + li.Unlink() + } + + for _, lg := range layers.ToLayerGroupList() { + lg.Unlink() + + groupItems, err := i.layerRepo.FindItemByIDs(ctx, lg.Layers().Layers(), scenes) + if err != nil { + return inp.SchemaId, err + } + + // unlink layers group items + for _, item := range groupItems { + item.Unlink() + } + + // save the changed layers + layers = append(layers, groupItems.ToLayerList()...) + } + + err = i.propertyRepo.SaveAll(ctx, properties) + if err != nil { + return inp.SchemaId, err + } + + err = i.layerRepo.SaveAll(ctx, layers) + if err != nil { + return inp.SchemaId, err + } + + err = i.datasetRepo.RemoveAll(ctx, dsids) + if err != nil { + return inp.SchemaId, err + } + + err = i.datasetSchemaRepo.Remove(ctx, inp.SchemaId) + if err != nil { + return inp.SchemaId, err + } + + tx.Commit() + return inp.SchemaId, nil +} diff --git a/internal/usecase/interactor/layer.go b/internal/usecase/interactor/layer.go new file mode 100644 index 000000000..6b53538c0 --- /dev/null +++ b/internal/usecase/interactor/layer.go @@ -0,0 +1,983 @@ +package interactor + +import ( + "context" + "encoding/json" + "encoding/xml" + "errors" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/shp" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/dataset" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/decoding" + "github.com/reearth/reearth-backend/pkg/layer/initializer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" +) + +// TODO: ใƒฌใ‚คใƒคใƒผไฝœๆˆใฎใƒ‰ใƒกใ‚คใƒณใƒญใ‚ธใƒƒใ‚ฏใŒใ“ใ“ใซๅคšใๆผใ‚Œๅ‡บใ—ใฆใ„ใ‚‹ใฎใงใƒ‰ใƒกใ‚คใƒณๅฑคใซ็งปใ™ + +type Layer struct { + commonScene + commonSceneLock + layerRepo repo.Layer + pluginRepo repo.Plugin + propertyRepo repo.Property + propertySchemaRepo repo.PropertySchema + datasetRepo repo.Dataset + datasetSchemaRepo repo.DatasetSchema + sceneRepo repo.Scene + sceneLockRepo repo.SceneLock + transaction repo.Transaction +} + +func NewLayer(r *repo.Container) interfaces.Layer { + return &Layer{ + commonScene: commonScene{sceneRepo: r.Scene}, + commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, + layerRepo: r.Layer, + pluginRepo: r.Plugin, + propertyRepo: r.Property, + datasetRepo: r.Dataset, + propertySchemaRepo: r.PropertySchema, + datasetSchemaRepo: r.DatasetSchema, + sceneRepo: r.Scene, + sceneLockRepo: r.SceneLock, + transaction: r.Transaction, + } +} + +func (i *Layer) Fetch(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*layer.Layer, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + return i.layerRepo.FindByIDs(ctx, ids, scenes) +} + +func (i *Layer) FetchGroup(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*layer.Group, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + return i.layerRepo.FindGroupByIDs(ctx, ids, scenes) +} + +func (i *Layer) FetchItem(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*layer.Item, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + return i.layerRepo.FindItemByIDs(ctx, ids, scenes) +} + +func (i *Layer) FetchParent(ctx context.Context, pid id.LayerID, operator *usecase.Operator) (*layer.Group, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + return i.layerRepo.FindParentByID(ctx, pid, scenes) +} + +func (i *Layer) FetchByProperty(ctx context.Context, pid id.PropertyID, operator *usecase.Operator) (layer.Layer, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + return i.layerRepo.FindByProperty(ctx, pid, scenes) +} + +func (i *Layer) FetchMerged(ctx context.Context, org id.LayerID, parent *id.LayerID, operator *usecase.Operator) (*layer.Merged, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + ids := []id.LayerID{org} + if parent != nil { + ids = append(ids, *parent) + } + layers, err := i.layerRepo.FindByIDs(ctx, ids, scenes) + if err != nil { + return nil, err + } + layers2 := []*layer.Layer(layers) + + var orgl *layer.Item + var parentl *layer.Group + if parent != nil && len(layers2) == 2 { + l := layers2[0] + orgl = layer.ToLayerItemRef(l) + l = layers2[1] + parentl = layer.ToLayerGroupRef(l) + } else if parent == nil && len(layers2) == 1 { + l := layers2[0] + if l != nil { + orgl = layer.ToLayerItemRef(l) + } + } + + return layer.Merge(orgl, parentl), nil +} + +func (i *Layer) FetchParentAndMerged(ctx context.Context, org id.LayerID, operator *usecase.Operator) (*layer.Merged, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + orgl, err := i.layerRepo.FindItemByID(ctx, org, scenes) + if err != nil { + return nil, err + } + parent, err := i.layerRepo.FindParentByID(ctx, org, scenes) + if err != nil { + return nil, err + } + + return layer.Merge(orgl, parent), nil +} + +func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, operator *usecase.Operator) (_ *layer.Item, _ *layer.Group, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, nil, err + } + + parentLayer, err := i.layerRepo.FindGroupByID(ctx, inp.ParentLayerID, scenes) + if err != nil { + if errors.Is(err, err1.ErrNotFound) { + return nil, nil, err + } + return nil, nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, parentLayer.Scene()); err != nil { + return nil, nil, err + } + + if parentLayer.IsLinked() { + return nil, nil, interfaces.ErrCannotAddLayerToLinkedLayerGroup + } + + plugin, extension, err := i.getPlugin(ctx, inp.PluginID, inp.ExtensionID) + if err != nil { + return nil, nil, err + } + + propertySchema, err := i.propertySchemaRepo.FindByID(ctx, extension.Schema()) + if err != nil { + return nil, nil, err + } + + layerItem, property, err := initializer.LayerItem{ + SceneID: parentLayer.Scene(), + ParentLayerID: parentLayer.ID(), + Plugin: plugin, + ExtensionID: inp.ExtensionID, + LinkedDatasetID: inp.LinkedDatasetID, + LinkablePropertySchema: propertySchema, + LatLng: inp.LatLng, + Name: inp.Name, + Index: inp.Index, + }.Initialize() + if err != nil { + return nil, nil, err + } + + index := -1 + if inp.Index != nil { + index = *inp.Index + } + + parentLayer.Layers().AddLayer(layerItem.ID(), index) + + if property != nil { + err = i.propertyRepo.Save(ctx, property) + if err != nil { + return nil, nil, err + } + } + + err = i.layerRepo.Save(ctx, layerItem) + if err != nil { + return nil, nil, err + } + err = i.layerRepo.Save(ctx, parentLayer) + if err != nil { + return nil, nil, err + } + + tx.Commit() + return layerItem, parentLayer, nil +} + +func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, operator *usecase.Operator) (_ *layer.Group, _ *layer.Group, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, nil, err + } + + parentLayer, err := i.layerRepo.FindGroupByID(ctx, inp.ParentLayerID, scenes) + if err != nil { + return nil, nil, err + } + + layerScenes := []id.SceneID{parentLayer.Scene()} + + // check scene lock + if err := i.CheckSceneLock(ctx, parentLayer.Scene()); err != nil { + return nil, nil, err + } + + if parentLayer.IsLinked() { + return nil, nil, interfaces.ErrCannotAddLayerToLinkedLayerGroup + } + + var extensionSchemaID id.PropertySchemaID + var propertySchema *property.Schema + + plug, extension, err := i.getPlugin(ctx, inp.PluginID, inp.ExtensionID) + if err != nil { + return nil, nil, err + } + if extension != nil { + if extension.Type() != plugin.ExtensionTypePrimitive { + return nil, nil, interfaces.ErrExtensionTypeMustBePrimitive + } + extensionSchemaID = extension.Schema() + } + + var datasetSchema *dataset.Schema + var ds dataset.List + if inp.LinkedDatasetSchemaID != nil { + datasetSchema2, err := i.datasetSchemaRepo.FindByID(ctx, *inp.LinkedDatasetSchemaID, layerScenes) + if err != nil { + return nil, nil, err + } + datasetSchema = datasetSchema2 + + ds, err = i.datasetRepo.FindBySchemaAll(ctx, + *inp.LinkedDatasetSchemaID, + ) + if err != nil { + return nil, nil, err + } + } else { + ds = []*dataset.Dataset{} + } + + var p *property.Property + builder := layer.NewGroup().NewID().Scene(parentLayer.Scene()) + if inp.Name == "" && datasetSchema != nil { + builder = builder.Name(datasetSchema.Name()) + } else { + builder = builder.Name(inp.Name) + } + if inp.PluginID != nil { + builder = builder.Plugin(inp.PluginID) + } + if inp.PluginID != nil && inp.ExtensionID != nil { + propertySchema, err = i.propertySchemaRepo.FindByID(ctx, extensionSchemaID) + if err != nil { + return nil, nil, err + } + + builder = builder.Extension(inp.ExtensionID) + p, err = property.New(). + NewID(). + Schema(extensionSchemaID). + Scene(parentLayer.Scene()). + Build() + if err != nil { + return nil, nil, err + } + + // auto linking + p.AutoLinkField( + propertySchema, + property.ValueTypeLatLng, + datasetSchema.ID(), + datasetSchema.FieldByType(dataset.ValueTypeLatLng).IDRef(), + nil) + p.AutoLinkField( + propertySchema, + property.ValueTypeURL, + datasetSchema.ID(), + datasetSchema.FieldByType(dataset.ValueTypeURL).IDRef(), + nil) + + builder = builder.Property(p.ID().Ref()) + } + if inp.LinkedDatasetSchemaID != nil { + builder = builder.LinkedDatasetSchema(inp.LinkedDatasetSchemaID) + } + layerGroup, err := builder.Build() + if err != nil { + return nil, nil, err + } + + // create item layers + representativeFieldID := datasetSchema.RepresentativeFieldID() + layerItems := make([]*layer.Item, 0, len(ds)) + layerItemProperties := make([]*property.Property, 0, len(ds)) + index := -1 + for _, ds := range ds { + dsid := ds.ID() + + name := "" + if rf := ds.FieldRef(representativeFieldID); rf != nil && rf.Type() == dataset.ValueTypeString { + name = rf.Value().Value().(string) + } + + layerItem, property, err := initializer.LayerItem{ + SceneID: parentLayer.Scene(), + ParentLayerID: layerGroup.ID(), + Plugin: plug, + ExtensionID: inp.ExtensionID, + Index: &index, + LinkedDatasetID: &dsid, + Name: name, + }.Initialize() + + if err != nil { + return nil, nil, err + } + layerItems = append(layerItems, layerItem) + layerItemProperties = append(layerItemProperties, property) + layerGroup.Layers().AddLayer(layerItem.ID(), -1) + } + + // add group to parent + if inp.Index != nil { + index = *inp.Index + } + + parentLayer.Layers().AddLayer(layerGroup.ID(), index) + + // save + var pl layer.Layer = parentLayer + var gl layer.Layer = layerGroup + layers := layer.List{&pl, &gl} + properties := []*property.Property{} + if p != nil { + properties = append(properties, p) + } + + for index, item := range layerItems { + var l layer.Layer = item + layers = append(layers, &l) + if p := layerItemProperties[index]; p != nil { + properties = append(properties, p) + } + } + + err = i.propertyRepo.SaveAll(ctx, properties) + if err != nil { + return nil, nil, err + } + + err = i.layerRepo.SaveAll(ctx, layers) + if err != nil { + return nil, nil, err + } + + tx.Commit() + return layerGroup, parentLayer, nil +} + +func (i *Layer) fetchAllChildren(ctx context.Context, l layer.Layer, scenes []id.SceneID) ([]id.LayerID, []id.PropertyID, error) { + lidl := layer.ToLayerGroup(l).Layers().Layers() + layers, err := i.layerRepo.FindByIDs(ctx, lidl, scenes) + if err != nil { + return nil, nil, err + } + properties := append(make([]id.PropertyID, 0), l.Properties()...) + for _, ll := range layers { + lg := layer.ToLayerGroup(*ll) + li := layer.ToLayerItem(*ll) + if lg != nil { + childrenLayers, childrenProperties, err := i.fetchAllChildren(ctx, lg, scenes) + if err != nil { + return nil, nil, err + } + properties = append(properties, childrenProperties...) + lidl = append(lidl, childrenLayers...) + + } + if li != nil { + properties = append(properties, l.Properties()...) + } + + } + return lidl, properties, nil +} + +func (i *Layer) Remove(ctx context.Context, lid id.LayerID, operator *usecase.Operator) (_ id.LayerID, _ *layer.Group, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return lid, nil, err + } + + l, err := i.layerRepo.FindByID(ctx, lid, scenes) + if err != nil { + return lid, nil, err + } + + if err := i.CheckSceneLock(ctx, l.Scene()); err != nil { + return lid, nil, err + } + + if gl := layer.GroupFromLayer(l); gl != nil && gl.IsRoot() { + return lid, nil, errors.New("root layer cannot be deleted") + } + + parentLayer, err := i.layerRepo.FindParentByID(ctx, lid, scenes) + if err != nil && err != err1.ErrNotFound { + return lid, nil, err + } + if parentLayer != nil { + if l.Scene() != parentLayer.Scene() { + return lid, nil, errors.New("invalid layer") + } + } + + if parentLayer != nil && parentLayer.IsLinked() { + return lid, nil, interfaces.ErrCannotRemoveLayerToLinkedLayerGroup + } + if parentLayer != nil { + parentLayer.Layers().RemoveLayer(lid) + err = i.layerRepo.Save(ctx, parentLayer) + if err != nil { + return lid, nil, err + } + } + layers, properties, err := i.fetchAllChildren(ctx, l, scenes) + if err != nil { + return lid, nil, err + } + layers = append(layers, l.ID()) + err = i.layerRepo.RemoveAll(ctx, layers) + if err != nil { + return lid, nil, err + } + err = i.propertyRepo.RemoveAll(ctx, properties) + if err != nil { + return lid, nil, err + } + + tx.Commit() + return lid, parentLayer, nil +} + +func (i *Layer) Update(ctx context.Context, inp interfaces.UpdateLayerInput, operator *usecase.Operator) (_ layer.Layer, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, err + } + + layer, err := i.layerRepo.FindByID(ctx, inp.LayerID, scenes) + if err != nil { + return nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, layer.Scene()); err != nil { + return nil, err + } + + if inp.Name != nil { + layer.Rename(*inp.Name) + } + + if inp.Visible != nil { + layer.SetVisible(*inp.Visible) + } + + err = i.layerRepo.Save(ctx, layer) + if err != nil { + return nil, err + } + + tx.Commit() + return layer, nil +} + +func (i *Layer) Move(ctx context.Context, inp interfaces.MoveLayerInput, operator *usecase.Operator) (_ id.LayerID, _ *layer.Group, _ *layer.Group, _ int, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return inp.LayerID, nil, nil, -1, err + } + + parentLayer, err := i.layerRepo.FindParentByID(ctx, inp.LayerID, scenes) + if err != nil { + return inp.LayerID, nil, nil, -1, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, parentLayer.Scene()); err != nil { + return inp.LayerID, nil, nil, -1, err + } + + var toParentLayer *layer.Group + if inp.DestLayerID == nil || parentLayer.ID() == *inp.DestLayerID { + toParentLayer = parentLayer + } else if parentLayer.IsLinked() { + return inp.LayerID, nil, nil, -1, interfaces.ErrLinkedLayerItemCannotBeMoved + } else { + toParentLayer, err = i.layerRepo.FindGroupByID(ctx, *inp.DestLayerID, scenes) + if err != nil { + return inp.LayerID, nil, nil, -1, err + } + if toParentLayer.Scene() != parentLayer.Scene() { + return inp.LayerID, nil, nil, -1, interfaces.ErrCannotMoveLayerToOtherScene + } + if toParentLayer.IsLinked() { + return inp.LayerID, nil, nil, -1, interfaces.ErrLayerCannotBeMovedToLinkedLayerGroup + } + } + + toParentLayer.MoveLayerFrom(inp.LayerID, inp.Index, parentLayer) + + layers := layer.List{parentLayer.LayerRef()} + if parentLayer.ID() != toParentLayer.ID() { + layers = append(layers, toParentLayer.LayerRef()) + } + err = i.layerRepo.SaveAll(ctx, layers) + if err != nil { + return inp.LayerID, nil, nil, -1, err + } + + tx.Commit() + return inp.LayerID, + parentLayer, + toParentLayer, + toParentLayer.Layers().FindLayerIndex(inp.LayerID), + nil +} + +func (i *Layer) CreateInfobox(ctx context.Context, lid id.LayerID, operator *usecase.Operator) (_ layer.Layer, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, err + } + + l, err := i.layerRepo.FindByID(ctx, lid, scenes) + if err != nil { + return nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, l.Scene()); err != nil { + return nil, err + } + + infobox := l.Infobox() + if infobox != nil { + return nil, interfaces.ErrInfoboxAlreadyExists + } + + schema := builtin.GetPropertySchema(builtin.PropertySchemaIDInfobox) + property, err := property.New().NewID().Schema(schema.ID()).Scene(l.Scene()).Build() + if err != nil { + return nil, err + } + infobox = layer.NewInfobox(nil, property.ID()) + l.SetInfobox(infobox) + + err = i.propertyRepo.Save(ctx, property) + if err != nil { + return nil, err + } + err = i.layerRepo.Save(ctx, l) + if err != nil { + return nil, err + } + + tx.Commit() + return l, nil +} + +func (i *Layer) RemoveInfobox(ctx context.Context, layerID id.LayerID, operator *usecase.Operator) (_ layer.Layer, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, err + } + + layer, err := i.layerRepo.FindByID(ctx, layerID, scenes) + if err != nil { + return nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, layer.Scene()); err != nil { + return nil, err + } + + infobox := layer.Infobox() + if infobox == nil { + return nil, interfaces.ErrInfoboxNotFound + } + + layer.SetInfobox(nil) + + err = i.propertyRepo.Remove(ctx, infobox.Property()) + if err != nil { + return nil, err + } + + err = i.layerRepo.Save(ctx, layer) + if err != nil { + return nil, err + } + + tx.Commit() + return layer, nil +} + +func (i *Layer) AddInfoboxField(ctx context.Context, inp interfaces.AddInfoboxFieldParam, operator *usecase.Operator) (_ *layer.InfoboxField, _ layer.Layer, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, nil, err + } + + l, err := i.layerRepo.FindByID(ctx, inp.LayerID, scenes) + if err != nil { + return nil, nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, l.Scene()); err != nil { + return nil, nil, err + } + + infobox := l.Infobox() + if infobox == nil { + return nil, nil, interfaces.ErrInfoboxNotFound + } + + _, extension, err := i.getPlugin(ctx, &inp.PluginID, &inp.ExtensionID) + if err != nil { + return nil, nil, err + } + if extension.Type() != plugin.ExtensionTypeBlock { + return nil, nil, interfaces.ErrExtensionTypeMustBeBlock + } + + property, err := property.New().NewID().Schema(extension.Schema()).Scene(l.Scene()).Build() + if err != nil { + return nil, nil, err + } + + field, err := layer.NewInfoboxField(). + NewID(). + Plugin(inp.PluginID). + Extension(inp.ExtensionID). + Property(property.ID()). + Build() + if err != nil { + return nil, nil, err + } + + index := -1 + if inp.Index != nil { + index = *inp.Index + } + infobox.Add(field, index) + + err = i.propertyRepo.Save(ctx, property) + if err != nil { + return nil, nil, err + } + + err = i.layerRepo.Save(ctx, l) + if err != nil { + return nil, nil, err + } + + tx.Commit() + return field, l, err +} + +func (i *Layer) MoveInfoboxField(ctx context.Context, inp interfaces.MoveInfoboxFieldParam, operator *usecase.Operator) (_ id.InfoboxFieldID, _ layer.Layer, _ int, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return inp.InfoboxFieldID, nil, -1, err + } + + layer, err := i.layerRepo.FindByID(ctx, inp.LayerID, scenes) + if err != nil { + return inp.InfoboxFieldID, nil, -1, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, layer.Scene()); err != nil { + return inp.InfoboxFieldID, nil, -1, err + } + + infobox := layer.Infobox() + if infobox == nil { + return inp.InfoboxFieldID, nil, -1, interfaces.ErrInfoboxNotFound + } + + infobox.Move(inp.InfoboxFieldID, inp.Index) + + err = i.layerRepo.Save(ctx, layer) + if err != nil { + return inp.InfoboxFieldID, nil, -1, err + } + + tx.Commit() + return inp.InfoboxFieldID, layer, inp.Index, err +} + +func (i *Layer) RemoveInfoboxField(ctx context.Context, inp interfaces.RemoveInfoboxFieldParam, operator *usecase.Operator) (_ id.InfoboxFieldID, _ layer.Layer, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return inp.InfoboxFieldID, nil, err + } + + layer, err := i.layerRepo.FindByID(ctx, inp.LayerID, scenes) + if err != nil { + return inp.InfoboxFieldID, nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, layer.Scene()); err != nil { + return inp.InfoboxFieldID, nil, err + } + + infobox := layer.Infobox() + if infobox == nil { + return inp.InfoboxFieldID, nil, interfaces.ErrInfoboxNotFound + } + + infobox.Remove(inp.InfoboxFieldID) + + err = i.layerRepo.Save(ctx, layer) + if err != nil { + return inp.InfoboxFieldID, nil, err + } + + tx.Commit() + return inp.InfoboxFieldID, layer, err +} + +func (i *Layer) getPlugin(ctx context.Context, p *id.PluginID, e *id.PluginExtensionID) (*plugin.Plugin, *plugin.Extension, error) { + if p == nil { + return nil, nil, nil + } + + plugin, err := i.pluginRepo.FindByID(ctx, *p) + if err != nil { + if errors.Is(err, err1.ErrNotFound) { + return nil, nil, interfaces.ErrPluginNotFound + } + return nil, nil, err + } + + if e == nil { + return plugin, nil, nil + } + + extension := plugin.Extension(*e) + if extension == nil { + return nil, nil, interfaces.ErrExtensionNotFound + } + + return plugin, extension, nil +} + +func (i *Layer) ImportLayer(ctx context.Context, inp interfaces.ImportLayerParam, operator *usecase.Operator) (_ layer.List, _ *layer.Group, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if inp.File == nil { + return nil, nil, interfaces.ErrFileNotIncluded + } + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, nil, err + } + parent, err := i.layerRepo.FindGroupByID(ctx, inp.LayerID, scenes) + if err != nil { + return nil, nil, err + } + var decoder decoding.Decoder + switch inp.Format { + case decoding.LayerEncodingFormatKML: + d := xml.NewDecoder(inp.File.Content) + decoder = decoding.NewKMLDecoder(d, parent.Scene()) + case decoding.LayerEncodingFormatGEOJSON: + decoder = decoding.NewGeoJSONDecoder(inp.File.Content, parent.Scene()) + case decoding.LayerEncodingFormatCZML: + d := json.NewDecoder(inp.File.Content) + decoder = decoding.NewCZMLDecoder(d, parent.Scene()) + case decoding.LayerEncodingFormatREEARTH: + d := json.NewDecoder(inp.File.Content) + decoder = decoding.NewReearthDecoder(d, parent.Scene()) + case decoding.LayerEncodingFormatSHAPE: + // limit file size to 2m + if inp.File.Size > 2097152 { + return nil, nil, errors.New("file is too big") + } + var reader decoding.ShapeReader + if inp.File.ContentType == "application/octet-stream" && strings.HasSuffix(inp.File.Name, ".shp") { + reader, err = shp.ReadFrom(inp.File.Content) + if err != nil { + return nil, nil, err + } + decoder = decoding.NewShapeDecoder(reader, parent.Scene()) + } else if inp.File.ContentType == "application/zip" && strings.HasSuffix(inp.File.Name, ".zip") { + reader, err = shp.ReadZipFrom(inp.File.Content) + if err != nil { + return nil, nil, err + } + } + decoder = decoding.NewShapeDecoder(reader, parent.Scene()) + } + if decoder == nil { + return nil, nil, errors.New("unsupported format") + } + result, err := decoder.Decode() + if err != nil { + return nil, nil, err + } + + properties := result.Properties.List() + if err := (property.Validator{ + SchemaLoader: repo.PropertySchemaLoaderFrom(i.propertySchemaRepo), + }.Validate(ctx, properties)); err != nil { + return nil, nil, err + } + + rootLayers := result.RootLayers() + if len(rootLayers) == 0 { + return nil, nil, errors.New("no layers are imported") + } + + if result.Root.LayerCount() > 0 { + parent.Layers().AppendLayers(result.Root.Layers()...) + } + + if err := i.layerRepo.SaveAll(ctx, append(result.Layers.List(), parent.LayerRef())); err != nil { + return nil, nil, err + } + + if err := i.propertyRepo.SaveAll(ctx, properties); err != nil { + return nil, nil, err + } + + tx.Commit() + return rootLayers, parent, nil +} diff --git a/internal/usecase/interactor/layer_test.go b/internal/usecase/interactor/layer_test.go new file mode 100644 index 000000000..76d36c147 --- /dev/null +++ b/internal/usecase/interactor/layer_test.go @@ -0,0 +1,37 @@ +package interactor + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/stretchr/testify/assert" +) + +func TestCreateInfobox(t *testing.T) { + ctx := context.Background() + + db := memory.InitRepos(&repo.Container{}, false) + scene, _ := scene.New().NewID().Team(id.NewTeamID()).Project(id.NewProjectID()).RootLayer(id.NewLayerID()).Build() + _ = db.Scene.Save(ctx, scene) + il := NewLayer(db) + + l, _ := layer.NewItem().NewID().Scene(scene.ID()).Build() + _ = db.Layer.Save(ctx, l) + + i, _ := il.CreateInfobox(ctx, l.ID(), &usecase.Operator{ + WritableTeams: []id.TeamID{scene.Team()}, + }) + assert.NotNil(t, i) + l, _ = db.Layer.FindItemByID(ctx, l.ID(), nil) + infobox := l.Infobox() + assert.NotNil(t, infobox) + property, _ := db.Property.FindByID(ctx, infobox.Property(), nil) + assert.NotNil(t, property) + assert.NotNil(t, property.Schema()) +} diff --git a/internal/usecase/interactor/plugin.go b/internal/usecase/interactor/plugin.go new file mode 100644 index 000000000..200b289a7 --- /dev/null +++ b/internal/usecase/interactor/plugin.go @@ -0,0 +1,59 @@ +package interactor + +import ( + "context" + "errors" + "io" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +type Plugin struct { + common + pluginRepo repo.Plugin + propertySchemaRepo repo.PropertySchema + file gateway.File + pluginRepository gateway.PluginRepository + transaction repo.Transaction +} + +func NewPlugin(r *repo.Container, gr *gateway.Container) interfaces.Plugin { + return &Plugin{ + pluginRepo: r.Plugin, + propertySchemaRepo: r.PropertySchema, + transaction: r.Transaction, + pluginRepository: gr.PluginRepository, + file: gr.File, + } +} + +func (i *Plugin) Fetch(ctx context.Context, ids []id.PluginID, operator *usecase.Operator) ([]*plugin.Plugin, error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + res, err := i.pluginRepo.FindByIDs(ctx, ids) + return res, err +} + +func (i *Plugin) Upload(ctx context.Context, r io.Reader, operator *usecase.Operator) (_ *plugin.Plugin, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + + tx.Commit() + return nil, errors.New("not implemented") +} diff --git a/internal/usecase/interactor/project.go b/internal/usecase/interactor/project.go new file mode 100644 index 000000000..d956d0209 --- /dev/null +++ b/internal/usecase/interactor/project.go @@ -0,0 +1,411 @@ +package interactor + +import ( + "context" + "errors" + "io" + "time" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/asset" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/scene/builder" +) + +type Project struct { + commonScene + commonSceneLock + assetRepo repo.Asset + projectRepo repo.Project + userRepo repo.User + teamRepo repo.Team + sceneRepo repo.Scene + propertyRepo repo.Property + layerRepo repo.Layer + datasetRepo repo.Dataset + datasetSchemaRepo repo.DatasetSchema + transaction repo.Transaction + file gateway.File +} + +func NewProject(r *repo.Container, gr *gateway.Container) interfaces.Project { + return &Project{ + commonScene: commonScene{sceneRepo: r.Scene}, + commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, + assetRepo: r.Asset, + projectRepo: r.Project, + userRepo: r.User, + teamRepo: r.Team, + sceneRepo: r.Scene, + propertyRepo: r.Property, + layerRepo: r.Layer, + datasetRepo: r.Dataset, + datasetSchemaRepo: r.DatasetSchema, + transaction: r.Transaction, + file: gr.File, + } +} + +func (i *Project) Fetch(ctx context.Context, ids []id.ProjectID, operator *usecase.Operator) ([]*project.Project, error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + return i.projectRepo.FindByIDs(ctx, ids, operator.ReadableTeams) +} + +func (i *Project) FindByTeam(ctx context.Context, id id.TeamID, p *usecase.Pagination, operator *usecase.Operator) ([]*project.Project, *usecase.PageInfo, error) { + if err := i.CanReadTeam(id, operator); err != nil { + return nil, nil, err + } + return i.projectRepo.FindByTeam(ctx, id, p) +} + +func (i *Project) Create(ctx context.Context, p interfaces.CreateProjectParam, operator *usecase.Operator) (_ *project.Project, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.CanWriteTeam(p.TeamID, operator); err != nil { + return nil, err + } + + pb := project.New(). + NewID(). + Team(p.TeamID). + Visualizer(p.Visualizer) + if p.Name != nil { + pb = pb.Name(*p.Name) + } + if p.Description != nil { + pb = pb.Description(*p.Description) + } + if p.ImageURL != nil { + pb = pb.ImageURL(p.ImageURL) + } + if p.Alias != nil { + pb = pb.Alias(*p.Alias) + } + if p.Archived != nil { + pb = pb.IsArchived(*p.Archived) + } + + project, err := pb.Build() + if err != nil { + return nil, err + } + + err = i.projectRepo.Save(ctx, project) + if err != nil { + return nil, err + } + + tx.Commit() + return project, nil +} + +func (i *Project) Update(ctx context.Context, p interfaces.UpdateProjectParam, operator *usecase.Operator) (_ *project.Project, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + + prj, err := i.projectRepo.FindByID(ctx, p.ID, operator.WritableTeams) + if err != nil { + return nil, err + } + + if err := i.CanWriteTeam(prj.Team(), operator); err != nil { + return nil, err + } + + oldAlias := prj.Alias() + + if p.Name != nil { + prj.UpdateName(*p.Name) + } + + if p.Description != nil { + prj.UpdateDescription(*p.Description) + } + + if p.Alias != nil { + if err := prj.UpdateAlias(*p.Alias); err != nil { + return nil, err + } + } + + if p.ImageURL != nil && !p.DeleteImageURL { + prj.SetImageURL(p.ImageURL) + } + if p.DeleteImageURL { + prj.SetImageURL(nil) + } + + if p.Archived != nil { + prj.SetArchived(*p.Archived) + } + + if p.PublicTitle != nil { + prj.UpdatePublicTitle(*p.PublicTitle) + } + + if p.PublicDescription != nil { + prj.UpdatePublicDescription(*p.PublicDescription) + } + + if p.PublicImage != nil && !p.DeletePublicImage { + asset, err := i.createAsset(ctx, p.PublicImage, prj.Team()) + if err != nil { + return nil, err + } + prj.UpdatePublicImage(asset.URL()) + } + + if p.PublicNoIndex != nil { + prj.UpdatePublicNoIndex(*p.PublicNoIndex) + } + + if p.DeletePublicImage { + prj.UpdatePublicImage("") + } + + err = i.projectRepo.Save(ctx, prj) + if err != nil { + return nil, err + } + + if prj.PublishmentStatus() != project.PublishmentStatusPrivate && p.Alias != nil && *p.Alias != oldAlias { + if err := i.file.MoveBuiltScene(ctx, oldAlias, *p.Alias); err != nil { + // ignore ErrNotFound + if !errors.Is(err, err1.ErrNotFound) { + return nil, err + } + } + } + + tx.Commit() + return prj, nil +} + +func (i *Project) CheckAlias(ctx context.Context, alias string) (bool, error) { + if !project.CheckAliasPattern(alias) { + return false, project.ErrInvalidAlias + } + + prj, err := i.projectRepo.FindByPublicName(ctx, alias) + if prj == nil && err == nil || err != nil && errors.Is(err, err1.ErrNotFound) { + return true, nil + } + + return false, err +} + +func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectParam, operator *usecase.Operator) (_ *project.Project, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + + prj, err := i.projectRepo.FindByID(ctx, params.ID, operator.WritableTeams) + if err != nil { + return nil, err + } + + if err := i.CanWriteTeam(prj.Team(), operator); err != nil { + return nil, err + } + + s, err := i.sceneRepo.FindByProject(ctx, params.ID, operator.WritableTeams) + if err != nil { + return nil, err + } + + if err := i.CheckSceneLock(ctx, s.ID()); err != nil { + return nil, err + } + + sceneID := s.ID() + + prevAlias := prj.Alias() + if params.Alias == nil && prevAlias == "" && params.Status != project.PublishmentStatusPrivate { + return nil, interfaces.ErrProjectAliasIsNotSet + } + + var prevPublishedAlias string + if prj.PublishmentStatus() != project.PublishmentStatusPrivate { + prevPublishedAlias = prevAlias + } + + newAlias := prevAlias + if params.Alias != nil { + if err := prj.UpdateAlias(*params.Alias); err != nil { + return nil, err + } + newAlias = *params.Alias + } + + newPublishedAlias := newAlias + + // Lock + if err := i.UpdateSceneLock(ctx, sceneID, scene.LockModeFree, scene.LockModePublishing); err != nil { + return nil, err + } + + defer i.ReleaseSceneLock(ctx, sceneID) + + if params.Status == project.PublishmentStatusPrivate { + // unpublish + if err = i.file.RemoveBuiltScene(ctx, prevPublishedAlias); err != nil { + return prj, err + } + } else { + // publish + r, w := io.Pipe() + + // Build + scenes := []id.SceneID{sceneID} + go func() { + var err error + + defer func() { + _ = w.CloseWithError(err) + }() + + err = builder.New( + repo.LayerLoaderFrom(i.layerRepo, scenes), + repo.PropertyLoaderFrom(i.propertyRepo, scenes), + repo.DatasetGraphLoaderFrom(i.datasetRepo, scenes), + ).BuildScene(ctx, w, s, time.Now()) + }() + + // Save + if err := i.file.UploadBuiltScene(ctx, r, newPublishedAlias); err != nil { + return nil, err + } + + // If project has been published before and alias is changed, + // remove old published data. + if prevPublishedAlias != "" && newPublishedAlias != prevPublishedAlias { + if err := i.file.RemoveBuiltScene(ctx, prevPublishedAlias); err != nil { + return nil, err + } + } + } + + prj.UpdatePublishmentStatus(params.Status) + prj.SetPublishedAt(time.Now()) + + err = i.projectRepo.Save(ctx, prj) + if err != nil { + return nil, err + } + + tx.Commit() + return prj, nil +} + +func (i *Project) createAsset(ctx context.Context, f *file.File, t id.TeamID) (_ *asset.Asset, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + url, err := i.file.UploadAsset(ctx, f) + if err != nil { + return nil, err + } + + asset, err := asset.New(). + NewID(). + Team(t). + Name(f.Name). + Size(f.Size). + URL(url.String()). + Build() + if err != nil { + return nil, err + } + + err = i.assetRepo.Save(ctx, asset) + if err != nil { + return nil, err + } + + tx.Commit() + return asset, nil +} + +func (i *Project) Delete(ctx context.Context, projectID id.ProjectID, operator *usecase.Operator) (err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.OnlyOperator(operator); err != nil { + return err + } + + prj, err := i.projectRepo.FindByID(ctx, projectID, operator.WritableTeams) + if err != nil { + return err + } + + if err := i.CanWriteTeam(prj.Team(), operator); err != nil { + return err + } + + deleter := ProjectDeleter{ + SceneDeleter: SceneDeleter{ + Scene: i.sceneRepo, + SceneLock: i.sceneLockRepo, + Layer: i.layerRepo, + Property: i.propertyRepo, + Dataset: i.datasetRepo, + DatasetSchema: i.datasetSchemaRepo, + }, + File: i.file, + Project: i.projectRepo, + } + if err := deleter.Delete(ctx, prj, true, operator); err != nil { + return err + } + + tx.Commit() + return nil +} diff --git a/internal/usecase/interactor/property.go b/internal/usecase/interactor/property.go new file mode 100644 index 000000000..96dba6901 --- /dev/null +++ b/internal/usecase/interactor/property.go @@ -0,0 +1,525 @@ +package interactor + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Property struct { + commonScene + commonSceneLock + propertyRepo repo.Property + propertySchemaRepo repo.PropertySchema + datasetRepo repo.Dataset + datasetSchemaRepo repo.DatasetSchema + sceneRepo repo.Scene + assetRepo repo.Asset + transaction repo.Transaction + file gateway.File +} + +func NewProperty(r *repo.Container, gr *gateway.Container) interfaces.Property { + return &Property{ + commonScene: commonScene{sceneRepo: r.Scene}, + commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, + propertyRepo: r.Property, + propertySchemaRepo: r.PropertySchema, + datasetRepo: r.Dataset, + datasetSchemaRepo: r.DatasetSchema, + sceneRepo: r.Scene, + assetRepo: r.Asset, + transaction: r.Transaction, + file: gr.File, + } +} + +func (i *Property) Fetch(ctx context.Context, ids []id.PropertyID, operator *usecase.Operator) ([]*property.Property, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + return i.propertyRepo.FindByIDs(ctx, ids, scenes) +} + +func (i *Property) FetchSchema(ctx context.Context, ids []id.PropertySchemaID, operator *usecase.Operator) ([]*property.Schema, error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + res, err := i.propertySchemaRepo.FindByIDs(ctx, ids) + return res, err +} + +func (i *Property) FetchMerged(ctx context.Context, org, parent *id.PropertyID, linked *id.DatasetID, operator *usecase.Operator) (*property.Merged, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + ids := []id.PropertyID{} + if org != nil { + ids = append(ids, *org) + } + if parent != nil { + ids = append(ids, *parent) + } + props, err := i.propertyRepo.FindByIDs(ctx, ids, scenes) + if err != nil { + return nil, err + } + + var orgp, parentp *property.Property + if org != nil && parent != nil && len(props) == 2 { + orgp = props[0] + parentp = props[1] + } else if org != nil && parent == nil && len(props) == 1 { + orgp = props[0] + } else if org == nil && parent != nil && len(props) == 1 { + parentp = props[0] + } + + res := property.Merge(orgp, parentp, linked) + return res, nil +} + +func (i *Property) UpdateValue(ctx context.Context, inp interfaces.UpdatePropertyValueParam, operator *usecase.Operator) (p *property.Property, _ *property.GroupList, _ *property.Group, _ *property.Field, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, nil, nil, nil, err + } + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) + if err != nil { + return nil, nil, nil, nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, nil, nil, nil, err + } + + ps, err := i.propertySchemaRepo.FindByID(ctx, p.Schema()) + if err != nil { + return nil, nil, nil, nil, err + } + + field, pgl, pg, err := p.UpdateValue(ps, inp.Pointer, inp.Value) + if err != nil { + return nil, nil, nil, nil, err + } + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, nil, nil, nil, err + } + + tx.Commit() + return p, pgl, pg, field, nil +} + +func (i *Property) RemoveField(ctx context.Context, inp interfaces.RemovePropertyFieldParam, operator *usecase.Operator) (p *property.Property, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, err + } + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) + if err != nil { + return nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, err + } + + p.RemoveField(inp.Pointer) + p.Prune() + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, err + } + + tx.Commit() + return p, nil +} + +func (i *Property) UploadFile(ctx context.Context, inp interfaces.UploadFileParam, operator *usecase.Operator) (p *property.Property, pgl *property.GroupList, pg *property.Group, field *property.Field, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if inp.File == nil { + return nil, nil, nil, nil, interfaces.ErrInvalidFile + } + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, nil, nil, nil, err + } + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) + if err != nil { + return nil, nil, nil, nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, nil, nil, nil, err + } + + propertyScene, err := i.sceneRepo.FindByID(ctx, p.Scene(), operator.WritableTeams) + if err != nil { + return nil, nil, nil, nil, err + } + ps, err := i.propertySchemaRepo.FindByID(ctx, p.Schema()) + if err != nil { + return nil, nil, nil, nil, err + } + + field, pgl, pg, _ = p.GetOrCreateField(ps, inp.Pointer) + + if field.Type() != property.ValueTypeURL { + return nil, nil, nil, nil, interfaces.ErrPropertyInvalidType + } + + url, err := i.file.UploadAsset(ctx, inp.File) + if err != nil { + return nil, nil, nil, nil, err + } + + asset, err := asset.New(). + NewID(). + Team(propertyScene.Team()). + Name(inp.File.Name). + Size(inp.File.Size). + URL(url.String()). + Build() + if err != nil { + return nil, nil, nil, nil, err + } + + err = i.assetRepo.Save(ctx, asset) + if err != nil { + return nil, nil, nil, nil, err + } + + v := property.ValueTypeURL.ValueFromUnsafe(url) + if v == nil { + return nil, nil, nil, nil, interfaces.ErrInvalidPropertyValue + } + if err = field.Update(v, ps.Field(field.Field())); err != nil { + return nil, nil, nil, nil, err + } + + if err = i.propertyRepo.Save(ctx, p); err != nil { + return nil, nil, nil, nil, err + } + + tx.Commit() + return p, pgl, pg, field, nil +} + +func (i *Property) LinkValue(ctx context.Context, inp interfaces.LinkPropertyValueParam, operator *usecase.Operator) (p *property.Property, pgl *property.GroupList, pg *property.Group, field *property.Field, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, nil, nil, nil, err + } + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) + if err != nil { + return nil, nil, nil, nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, nil, nil, nil, err + } + + ps, err := i.propertySchemaRepo.FindByID(ctx, p.Schema()) + if err != nil { + return nil, nil, nil, nil, err + } + + field, pgl, pg, _ = p.GetOrCreateField(ps, inp.Pointer) + + propertyScenes := []id.SceneID{p.Scene()} + + if inp.Links != nil { + dsids := inp.Links.DatasetSchemaIDs() + dids := inp.Links.DatasetIDs() + dss, err := i.datasetSchemaRepo.FindByIDs(ctx, dsids, propertyScenes) + if err != nil { + return nil, nil, nil, nil, err + } + ds, err := i.datasetRepo.FindByIDs(ctx, dids, propertyScenes) + if err != nil { + return nil, nil, nil, nil, err + } + if !inp.Links.Validate(dss.Map(), ds.Map()) { + return nil, nil, nil, nil, interfaces.ErrInvalidPropertyLinks + } + } + + field.Link(inp.Links) + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, nil, nil, nil, err + } + + tx.Commit() + return p, pgl, pg, field, nil +} + +func (i *Property) UnlinkValue(ctx context.Context, inp interfaces.UnlinkPropertyValueParam, operator *usecase.Operator) (p *property.Property, pgl *property.GroupList, pg *property.Group, field *property.Field, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, nil, nil, nil, err + } + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) + if err != nil { + return nil, nil, nil, nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, nil, nil, nil, err + } + + ps, err := i.propertySchemaRepo.FindByID(ctx, p.Schema()) + if err != nil { + return nil, nil, nil, nil, err + } + + field, pgl, pg, _ = p.GetOrCreateField(ps, inp.Pointer) + + field.Unlink() + + if field.IsEmpty() { + field = nil + } + p.Prune() + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, nil, nil, nil, err + } + + tx.Commit() + return p, pgl, pg, field, nil +} + +func (i *Property) AddItem(ctx context.Context, inp interfaces.AddPropertyItemParam, operator *usecase.Operator) (p *property.Property, _ *property.GroupList, pg *property.Group, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, nil, nil, err + } + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) + if err != nil { + return nil, nil, nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, nil, nil, err + } + + ps, err := i.propertySchemaRepo.FindByID(ctx, p.Schema()) + if err != nil { + return nil, nil, nil, err + } + + item, gl := p.AddListItem(ps, inp.Pointer, inp.Index) + if item == nil { + return nil, nil, nil, errors.New("failed to create item") + } + + // Set nameFieldValue to the name field + if inp.NameFieldValue != nil { + _ = item.UpdateNameFieldValue(ps, inp.NameFieldValue) + } + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, nil, nil, err + } + + tx.Commit() + return p, gl, item, nil +} + +func (i *Property) MoveItem(ctx context.Context, inp interfaces.MovePropertyItemParam, operator *usecase.Operator) (p *property.Property, _ *property.GroupList, _ *property.Group, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, nil, nil, err + } + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) + if err != nil { + return nil, nil, nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, nil, nil, err + } + + item, gl := p.MoveListItem(inp.Pointer, inp.Index) + if item == nil { + return nil, nil, nil, errors.New("failed to move item") + } + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, nil, nil, err + } + + tx.Commit() + return p, gl, item, nil +} + +func (i *Property) RemoveItem(ctx context.Context, inp interfaces.RemovePropertyItemParam, operator *usecase.Operator) (p *property.Property, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, err + } + + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) + if err != nil { + return nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, err + } + + if ok := p.RemoveListItem(inp.Pointer); !ok { + return nil, errors.New("failed to remove item") + } + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, err + } + + tx.Commit() + return p, nil +} + +func (i *Property) UpdateItems(ctx context.Context, inp interfaces.UpdatePropertyItemsParam, operator *usecase.Operator) (*property.Property, error) { + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, err + } + + p, err := i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) + if err != nil { + return nil, err + } + + if err := i.CheckSceneLock(ctx, p.Scene()); err != nil { + return nil, err + } + + ps, err := i.propertySchemaRepo.FindByID(ctx, p.Schema()) + if err != nil { + return nil, err + } + + for _, op := range inp.Operations { + var ptr *property.Pointer + if op.ItemID != nil { + ptr = property.PointItem(*op.ItemID) + } + + if op.Operation == interfaces.ListOperationAdd { + g, _ := p.AddListItem(ps, inp.Pointer, op.Index) + if op.NameFieldValue != nil { + _ = g.UpdateNameFieldValue(ps, op.NameFieldValue) + } + } else if op.Operation == interfaces.ListOperationMove && ptr != nil && op.Index != nil { + _, _ = p.MoveListItem(ptr, *op.Index) + } else if op.Operation == interfaces.ListOperationRemove && ptr != nil { + _ = p.RemoveListItem(ptr) + } + } + + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, err + } + + return p, nil +} diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go new file mode 100644 index 000000000..f49335d2e --- /dev/null +++ b/internal/usecase/interactor/scene.go @@ -0,0 +1,595 @@ +package interactor + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/builtin" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/scene/sceneops" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +type Scene struct { + commonScene + commonSceneLock + sceneRepo repo.Scene + sceneLockRepo repo.SceneLock + propertyRepo repo.Property + propertySchemaRepo repo.PropertySchema + projectRepo repo.Project + pluginRepo repo.Plugin + layerRepo repo.Layer + datasetRepo repo.Dataset + transaction repo.Transaction +} + +func NewScene(r *repo.Container) interfaces.Scene { + return &Scene{ + commonScene: commonScene{sceneRepo: r.Scene}, + commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, + sceneRepo: r.Scene, + sceneLockRepo: r.SceneLock, + propertyRepo: r.Property, + propertySchemaRepo: r.PropertySchema, + projectRepo: r.Project, + pluginRepo: r.Plugin, + layerRepo: r.Layer, + datasetRepo: r.Dataset, + transaction: r.Transaction, + } +} + +func (i *Scene) Fetch(ctx context.Context, ids []id.SceneID, operator *usecase.Operator) ([]*scene.Scene, error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + return i.sceneRepo.FindByIDs(ctx, ids, operator.ReadableTeams) +} + +func (i *Scene) FindByProject(ctx context.Context, id id.ProjectID, operator *usecase.Operator) (*scene.Scene, error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + res, err := i.sceneRepo.FindByProject(ctx, id, operator.ReadableTeams) + return res, err +} + +func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase.Operator) (_ *scene.Scene, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + + prj, err := i.projectRepo.FindByID(ctx, pid, operator.WritableTeams) + if err != nil { + return nil, err + } + + if err := i.CanWriteTeam(prj.Team(), operator); err != nil { + return nil, err + } + + schema := builtin.GetPropertySchemaByVisualizer(visualizer.VisualizerCesium) + sceneID := id.NewSceneID() + + rootLayer, err := layer.NewGroup().NewID().Scene(sceneID).Root(true).Build() + if err != nil { + return nil, err + } + + ps := scene.NewPluginSystem([]*scene.Plugin{ + scene.NewPlugin(id.OfficialPluginID, nil), + }) + + p, err := property.New().NewID().Schema(schema.ID()).Scene(sceneID).Build() + if err != nil { + return nil, err + } + + // add default tile + tiles := id.PropertySchemaFieldID("tiles") + g := p.GetOrCreateGroupList(schema, property.PointItemBySchema(tiles)) + g.Add(property.NewGroup().NewID().Schema(schema.ID(), tiles).MustBuild(), -1) + + scene, err := scene.New(). + ID(sceneID). + Project(pid). + Team(prj.Team()). + Property(p.ID()). + RootLayer(rootLayer.ID()). + PluginSystem(ps). + Build() + + if err != nil { + return nil, err + } + + if p != nil { + err = i.propertyRepo.Save(ctx, p) + if err != nil { + return nil, err + } + } + + err = i.layerRepo.Save(ctx, rootLayer) + if err != nil { + return nil, err + } + + err = i.sceneRepo.Save(ctx, scene) + if err != nil { + return nil, err + } + + tx.Commit() + return scene, err +} + +func (s *Scene) FetchLock(ctx context.Context, ids []id.SceneID, operator *usecase.Operator) ([]scene.LockMode, error) { + if err := s.OnlyOperator(operator); err != nil { + return nil, err + } + return s.sceneLockRepo.GetAllLock(ctx, ids) +} + +func (i *Scene) AddWidget(ctx context.Context, id id.SceneID, pid id.PluginID, eid id.PluginExtensionID, operator *usecase.Operator) (_ *scene.Scene, widget *scene.Widget, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, nil, interfaces.ErrOperationDenied + } + + // check scene lock + if err := i.CheckSceneLock(ctx, id); err != nil { + return nil, nil, err + } + + s, err := i.sceneRepo.FindByID(ctx, id, operator.WritableTeams) + if err != nil { + return nil, nil, err + } + if err := i.CanWriteTeam(s.Team(), operator); err != nil { + return nil, nil, err + } + + _, extension, err := i.getPlugin(ctx, pid, eid) + if err != nil { + return nil, nil, err + } + if extension.Type() != plugin.ExtensionTypeWidget { + return nil, nil, interfaces.ErrExtensionTypeMustBeWidget + } + + property, err := property.New().NewID().Schema(extension.Schema()).Scene(id).Build() + if err != nil { + return nil, nil, err + } + + widget, err = scene.NewWidget(nil, pid, eid, property.ID(), true) + if err != nil { + return nil, nil, err + } + + s.WidgetSystem().Add(widget) + + err = i.propertyRepo.Save(ctx, property) + if err != nil { + return nil, nil, err + } + + err = i.sceneRepo.Save(ctx, s) + if err != nil { + return nil, nil, err + } + + tx.Commit() + return s, widget, nil +} + +func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetParam, operator *usecase.Operator) (_ *scene.Scene, _ *scene.Widget, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, nil, interfaces.ErrOperationDenied + } + + // check scene lock + if err := i.CheckSceneLock(ctx, param.SceneID); err != nil { + return nil, nil, err + } + + scene, err2 := i.sceneRepo.FindByID(ctx, param.SceneID, operator.WritableTeams) + if err2 != nil { + return nil, nil, err2 + } + if err := i.CanWriteTeam(scene.Team(), operator); err != nil { + return nil, nil, err + } + + ws := scene.WidgetSystem() + widget := ws.Widget(param.PluginID, param.ExtensionID) + if widget == nil { + return nil, nil, err1.ErrNotFound + } + + if param.Enabled != nil { + widget.SetEnabled(*param.Enabled) + } + + err2 = i.sceneRepo.Save(ctx, scene) + if err2 != nil { + return nil, nil, err2 + } + + tx.Commit() + return scene, widget, nil +} + +func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, pid id.PluginID, eid id.PluginExtensionID, operator *usecase.Operator) (_ *scene.Scene, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, interfaces.ErrOperationDenied + } + + scene, err2 := i.sceneRepo.FindByID(ctx, id, operator.WritableTeams) + if err2 != nil { + return nil, err2 + } + if err := i.CanWriteTeam(scene.Team(), operator); err != nil { + return nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, id); err != nil { + return nil, err + } + + ws := scene.WidgetSystem() + + widget := ws.Widget(pid, eid) + if widget == nil { + return nil, err1.ErrNotFound + } + + ws.Remove(pid, eid) + + err2 = i.propertyRepo.Remove(ctx, widget.Property()) + if err2 != nil { + return nil, err2 + } + + err2 = i.sceneRepo.Save(ctx, scene) + if err2 != nil { + return nil, err2 + } + + tx.Commit() + return scene, nil +} + +func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.PluginID, operator *usecase.Operator) (_ *scene.Scene, _ id.PluginID, _ *id.PropertyID, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if operator == nil { + return nil, pid, nil, interfaces.ErrOperationDenied + } + + s, err2 := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) + if err2 != nil { + return nil, pid, nil, err2 + } + if err := i.CanWriteTeam(s.Team(), operator); err != nil { + return nil, pid, nil, err + } + + // check scene lock + if err2 := i.CheckSceneLock(ctx, sid); err2 != nil { + return nil, pid, nil, err2 + } + + if s.PluginSystem().HasPlugin(pid) { + return nil, pid, nil, interfaces.ErrPluginAlreadyInstalled + } + + plugin, err2 := i.pluginRepo.FindByID(ctx, pid) + if err2 != nil { + if errors.Is(err2, err1.ErrNotFound) { + // + // Install Plugin + // + return nil, pid, nil, interfaces.ErrPluginNotFound + } + return nil, pid, nil, err2 + } + + var p *property.Property + var propertyID *id.PropertyID + schema := plugin.Schema() + if schema != nil { + pr, err := property.New().NewID().Schema(*schema).Scene(sid).Build() + if err != nil { + return nil, pid, nil, err + } + prid := pr.ID() + p = pr + propertyID = &prid + } + + s.PluginSystem().Add(scene.NewPlugin(pid, propertyID)) + + if p != nil { + err2 = i.propertyRepo.Save(ctx, p) + if err2 != nil { + return nil, pid, nil, err2 + } + } + + err2 = i.sceneRepo.Save(ctx, s) + if err2 != nil { + return nil, pid, nil, err2 + } + + tx.Commit() + return s, pid, propertyID, nil +} + +func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.PluginID, operator *usecase.Operator) (_ *scene.Scene, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + + scene, err := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) + if err != nil { + return nil, err + } + if err := i.CanWriteTeam(scene.Team(), operator); err != nil { + return nil, err + } + + // check scene lock + if err := i.CheckSceneLock(ctx, sid); err != nil { + return nil, err + } + + ps := scene.PluginSystem() + if !ps.Has(pid) { + return nil, interfaces.ErrPluginNotInstalled + } + + removedProperties := []id.PropertyID{} + + // remove plugin + if p := ps.Property(pid); p != nil { + removedProperties = append(removedProperties, *p) + } + ps.Remove(pid) + + // remove widgets + for _, w := range scene.WidgetSystem().Widgets() { + if w.Plugin().Equal(pid) { + scene.WidgetSystem().Remove(pid, w.Extension()) + removedProperties = append(removedProperties, w.Property()) + } + } + + // remove layers and infobox fields + modifiedLayers := layer.List{} + removedLayers := []id.LayerID{} + layers, err := i.layerRepo.FindByScene(ctx, sid) + if err != nil { + return nil, err + } + for _, l := range layers { + if l == nil { + continue + } + ll := *l + if p := ll.Plugin(); p != nil && pid.Equal(*p) { + removedLayers = append(removedLayers, ll.ID()) + if pp := ll.Property(); pp != nil { + removedProperties = append(removedProperties, *pp) + } + if ib := ll.Infobox(); ib != nil { + removedProperties = append(removedProperties, ib.Property()) + for _, f := range ib.Fields() { + removedProperties = append(removedProperties, f.Property()) + } + } + } else if ib := ll.Infobox(); ib != nil { + removedProperties = append(removedProperties, ib.Property()) + for _, f := range ib.Fields() { + removedProperties = append(removedProperties, f.Property()) + } + var ll2 layer.Layer = ll + modifiedLayers = append(modifiedLayers, &ll2) + } + } + for _, lg := range layers.ToLayerGroupList() { + modified := false + cancel := false + for _, lid := range removedLayers { + if lg.ID() == lid { + cancel = true + break + } + if lg.Layers().HasLayer(lid) { + lg.Layers().RemoveLayer(lid) + modified = true + } + } + if cancel { + continue + } + if modified { + already := false + for _, l := range modifiedLayers { + if l != nil && (*l).ID() == lg.ID() { + already = true + break + } + } + if already { + continue + } + var lg2 layer.Layer = lg + modifiedLayers = append(modifiedLayers, &lg2) + } + } + + if len(modifiedLayers) > 0 { + err = i.layerRepo.SaveAll(ctx, modifiedLayers) + if err != nil { + return nil, err + } + } + if len(removedLayers) > 0 { + err = i.layerRepo.RemoveAll(ctx, removedLayers) + if err != nil { + return nil, err + } + } + err = i.sceneRepo.Save(ctx, scene) + if err != nil { + return nil, err + } + if len(removedProperties) > 0 { + err = i.propertyRepo.RemoveAll(ctx, removedProperties) + if err != nil { + return nil, err + } + } + + tx.Commit() + return scene, nil +} + +func (i *Scene) UpgradePlugin(ctx context.Context, sid id.SceneID, oldPluginID, newPluginID id.PluginID, operator *usecase.Operator) (_ *scene.Scene, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + + s, err := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) + if err != nil { + return nil, err + } + if err := i.CanWriteTeam(s.Team(), operator); err != nil { + return nil, err + } + + if err := i.UpdateSceneLock(ctx, sid, scene.LockModeFree, scene.LockModePluginUpgrading); err != nil { + return nil, err + } + + defer i.ReleaseSceneLock(ctx, sid) + + scenes := []id.SceneID{s.ID()} + pluginMigrator := sceneops.PluginMigrator{ + Property: repo.PropertyLoaderFrom(i.propertyRepo, scenes), + PropertySchema: repo.PropertySchemaLoaderFrom(i.propertySchemaRepo), + Dataset: repo.DatasetLoaderFrom(i.datasetRepo, scenes), + Layer: repo.LayerLoaderBySceneFrom(i.layerRepo), + Plugin: repo.PluginLoaderFrom(i.pluginRepo), + } + + result, err := pluginMigrator.MigratePlugins(ctx, s, oldPluginID, newPluginID) + + if err := i.sceneRepo.Save(ctx, result.Scene); err != nil { + return nil, err + } + if err := i.propertyRepo.SaveAll(ctx, result.Properties); err != nil { + return nil, err + } + if err := i.layerRepo.SaveAll(ctx, result.Layers); err != nil { + return nil, err + } + if err := i.layerRepo.RemoveAll(ctx, result.RemovedLayers); err != nil { + return nil, err + } + if err := i.propertyRepo.RemoveAll(ctx, result.RemovedProperties); err != nil { + return nil, err + } + + tx.Commit() + return result.Scene, err +} + +func (i *Scene) getPlugin(ctx context.Context, p id.PluginID, e id.PluginExtensionID) (*plugin.Plugin, *plugin.Extension, error) { + plugin, err2 := i.pluginRepo.FindByID(ctx, p) + if err2 != nil { + if errors.Is(err2, err1.ErrNotFound) { + return nil, nil, interfaces.ErrPluginNotFound + } + return nil, nil, err2 + } + + extension := plugin.Extension(e) + if extension == nil { + return nil, nil, interfaces.ErrExtensionNotFound + } + + return plugin, extension, nil +} diff --git a/internal/usecase/interactor/team.go b/internal/usecase/interactor/team.go new file mode 100644 index 000000000..a3cd86d3c --- /dev/null +++ b/internal/usecase/interactor/team.go @@ -0,0 +1,301 @@ +package interactor + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +type Team struct { + common + teamRepo repo.Team + projectRepo repo.Project + userRepo repo.User + transaction repo.Transaction +} + +func NewTeam(r *repo.Container) interfaces.Team { + return &Team{ + teamRepo: r.Team, + projectRepo: r.Project, + userRepo: r.User, + transaction: r.Transaction, + } +} + +func (i *Team) Fetch(ctx context.Context, ids []id.TeamID, operator *usecase.Operator) ([]*user.Team, error) { + if operator == nil { + return nil, interfaces.ErrOperationDenied + } + res, err := i.teamRepo.FindByIDs(ctx, ids) + res2, err := i.filterTeams(res, operator, err) + return res2, err +} + +func (i *Team) FindByUser(ctx context.Context, id id.UserID, operator *usecase.Operator) ([]*user.Team, error) { + if operator == nil { + return nil, interfaces.ErrOperationDenied + } + res, err := i.teamRepo.FindByUser(ctx, id) + res2, err := i.filterTeams(res, operator, err) + return res2, err +} + +func (i *Team) Create(ctx context.Context, name string, firstUser id.UserID) (_ *user.Team, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + team, err := user.NewTeam(). + NewID(). + Name(name). + Build() + if err != nil { + return nil, err + } + + err = team.Members().Join(firstUser, user.RoleOwner) + if err != nil { + return nil, err + } + + err = i.teamRepo.Save(ctx, team) + if err != nil { + return nil, err + } + + tx.Commit() + return team, nil +} + +func (i *Team) Update(ctx context.Context, id id.TeamID, name string, operator *usecase.Operator) (_ *user.Team, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if operator == nil { + return nil, interfaces.ErrOperationDenied + } + + team, err := i.teamRepo.FindByID(ctx, id) + if err != nil { + return nil, err + } + if team.IsPersonal() { + return nil, user.ErrCannotModifyPersonalTeam + } + if team.Members().GetRole(operator.User) != user.RoleOwner { + return nil, interfaces.ErrOperationDenied + } + + team.Rename(name) + + err = i.teamRepo.Save(ctx, team) + if err != nil { + return nil, err + } + + tx.Commit() + return team, nil +} + +func (i *Team) AddMember(ctx context.Context, id id.TeamID, u id.UserID, role user.Role, operator *usecase.Operator) (_ *user.Team, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if operator == nil { + return nil, interfaces.ErrOperationDenied + } + + team, err := i.teamRepo.FindByID(ctx, id) + if err != nil { + return nil, err + } + if team.IsPersonal() { + return nil, user.ErrCannotModifyPersonalTeam + } + if team.Members().GetRole(operator.User) != user.RoleOwner { + return nil, interfaces.ErrOperationDenied + } + + _, err = i.userRepo.FindByID(ctx, u) + if err != nil { + return nil, err + } + + err = team.Members().Join(u, role) + if err != nil { + return nil, err + } + + err = i.teamRepo.Save(ctx, team) + if err != nil { + return nil, err + } + + tx.Commit() + return team, nil +} + +func (i *Team) RemoveMember(ctx context.Context, id id.TeamID, u id.UserID, operator *usecase.Operator) (_ *user.Team, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if operator == nil { + return nil, interfaces.ErrOperationDenied + } + + team, err := i.teamRepo.FindByID(ctx, id) + if err != nil { + return nil, err + } + if team.IsPersonal() { + return nil, user.ErrCannotModifyPersonalTeam + } + if team.Members().GetRole(operator.User) != user.RoleOwner { + return nil, interfaces.ErrOperationDenied + } + + if u.ID() == operator.User.ID() { + return nil, interfaces.ErrOwnerCannotLeaveTheTeam + } + + err = team.Members().Leave(u) + if err != nil { + return nil, err + } + + err = i.teamRepo.Save(ctx, team) + if err != nil { + return nil, err + } + + tx.Commit() + return team, nil +} + +func (i *Team) UpdateMember(ctx context.Context, id id.TeamID, u id.UserID, role user.Role, operator *usecase.Operator) (_ *user.Team, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if operator == nil { + return nil, interfaces.ErrOperationDenied + } + + team, err := i.teamRepo.FindByID(ctx, id) + if err != nil { + return nil, err + } + if team.IsPersonal() { + return nil, user.ErrCannotModifyPersonalTeam + } + if team.Members().GetRole(operator.User) != user.RoleOwner { + return nil, interfaces.ErrOperationDenied + } + + if u.ID() == operator.User.ID() { + return nil, interfaces.ErrCannotChangeOwnerRole + } + + err = team.Members().UpdateRole(u, role) + if err != nil { + return nil, err + } + + err = i.teamRepo.Save(ctx, team) + if err != nil { + return nil, err + } + + tx.Commit() + return team, nil +} + +func (i *Team) Remove(ctx context.Context, id id.TeamID, operator *usecase.Operator) (err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if operator == nil { + return interfaces.ErrOperationDenied + } + + team, err := i.teamRepo.FindByID(ctx, id) + if err != nil { + return err + } + if team.IsPersonal() { + return user.ErrCannotModifyPersonalTeam + } + if team.Members().GetRole(operator.User) != user.RoleOwner { + return interfaces.ErrOperationDenied + } + + projects, err := i.projectRepo.CountByTeam(ctx, id) + if err != nil { + return err + } + if projects > 0 { + return interfaces.ErrCannotDeleteTeam + } + + err = i.teamRepo.Remove(ctx, id) + if err != nil { + return err + } + + tx.Commit() + return +} + +func (i *Team) filterTeams(teams []*user.Team, operator *usecase.Operator, err error) ([]*user.Team, error) { + if err != nil { + return nil, err + } + if operator == nil { + return make([]*user.Team, len(teams)), nil + } + for i, t := range teams { + if t == nil || !operator.IsReadableTeamIncluded(t.ID()) { + teams[i] = nil + } + } + return teams, nil +} diff --git a/internal/usecase/interactor/team_test.go b/internal/usecase/interactor/team_test.go new file mode 100644 index 000000000..41496ebeb --- /dev/null +++ b/internal/usecase/interactor/team_test.go @@ -0,0 +1,37 @@ +package interactor + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" + "github.com/stretchr/testify/assert" +) + +func TestCreateTeam(t *testing.T) { + ctx := context.Background() + + db := memory.InitRepos(&repo.Container{}, false) + + user := user.New().NewID().Team(id.NewTeamID()).MustBuild() + + teamUC := NewTeam(db) + + team, err := teamUC.Create(ctx, "team name", user.ID()) + + assert.Nil(t, err) + assert.NotNil(t, team) + + resultTeams, _ := teamUC.Fetch(ctx, []id.TeamID{team.ID()}, &usecase.Operator{ + ReadableTeams: []id.TeamID{team.ID()}, + }) + + assert.NotNil(t, resultTeams) + assert.NotEmpty(t, resultTeams) + assert.Equal(t, resultTeams[0].ID(), team.ID()) + assert.Equal(t, resultTeams[0].Name(), "team name") +} diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go new file mode 100644 index 000000000..09be865f8 --- /dev/null +++ b/internal/usecase/interactor/user.go @@ -0,0 +1,364 @@ +package interactor + +import ( + "context" + "errors" + + "golang.org/x/text/language" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/user" + "github.com/reearth/reearth-backend/pkg/user/initializer" +) + +type User struct { + common + userRepo repo.User + teamRepo repo.Team + projectRepo repo.Project + sceneRepo repo.Scene + sceneLockRepo repo.SceneLock + layerRepo repo.Layer + propertyRepo repo.Property + datasetRepo repo.Dataset + datasetSchemaRepo repo.DatasetSchema + transaction repo.Transaction + file gateway.File + authenticator gateway.Authenticator + signupSecret string +} + +func NewUser(r *repo.Container, g *gateway.Container, signupSecret string) interfaces.User { + return &User{ + userRepo: r.User, + teamRepo: r.Team, + projectRepo: r.Project, + sceneRepo: r.Scene, + sceneLockRepo: r.SceneLock, + layerRepo: r.Layer, + propertyRepo: r.Property, + datasetRepo: r.Dataset, + datasetSchemaRepo: r.DatasetSchema, + transaction: r.Transaction, + file: g.File, + authenticator: g.Authenticator, + signupSecret: signupSecret, + } +} + +func (i *User) Fetch(ctx context.Context, ids []id.UserID, operator *usecase.Operator) ([]*user.User, error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + res, err := i.userRepo.FindByIDs(ctx, ids) + if err != nil { + return res, err + } + // filter + for k, u := range res { + teams, err := i.teamRepo.FindByUser(ctx, u.ID()) + if err != nil { + return res, err + } + teamIDs := make([]id.TeamID, 0, len(teams)) + for _, t := range teams { + if t != nil { + teamIDs = append(teamIDs, t.ID()) + } + } + if !operator.IsReadableTeamsIncluded(teamIDs) { + res[k] = nil + } + } + return res, nil +} + +func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user.User, _ *user.Team, err error) { + if i.signupSecret != "" && inp.Secret != i.signupSecret { + return nil, nil, interfaces.ErrSignupInvalidSecret + } + + if len(inp.Sub) == 0 { + return nil, nil, errors.New("sub is required") + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + // Check if user and team already exists + existed, err := i.userRepo.FindByAuth0Sub(ctx, inp.Sub) + if err != nil && !errors.Is(err, err1.ErrNotFound) { + return nil, nil, err + } + if existed != nil { + return nil, nil, errors.New("existed user") + } + + if inp.UserID != nil { + existed, err := i.userRepo.FindByID(ctx, *inp.UserID) + if err != nil && !errors.Is(err, err1.ErrNotFound) { + return nil, nil, err + } + if existed != nil { + return nil, nil, errors.New("existed user") + } + } + + if inp.TeamID != nil { + existed, err := i.teamRepo.FindByID(ctx, *inp.TeamID) + if err != nil && !errors.Is(err, err1.ErrNotFound) { + return nil, nil, err + } + if existed != nil { + return nil, nil, errors.New("existed team") + } + } + + // Check if user and team already exists + var team *user.Team + existed, err = i.userRepo.FindByEmail(ctx, inp.Email) + if err != nil && !errors.Is(err, err1.ErrNotFound) { + return nil, nil, err + } + if existed != nil { + return nil, nil, errors.New("existed user") + } + + // Initialize user and team + u, team, err = initializer.InitUser(inp.Email, inp.Name, inp.Sub, inp.UserID, inp.TeamID) + if err != nil { + return nil, nil, err + } + if err := i.userRepo.Save(ctx, u); err != nil { + return nil, nil, err + } + if err := i.teamRepo.Save(ctx, team); err != nil { + return nil, nil, err + } + + tx.Commit() + return u, team, nil +} + +func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operator *usecase.Operator) (u *user.User, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + var team *user.Team + + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + + u, err = i.userRepo.FindByID(ctx, operator.User) + if err != nil { + return nil, err + } + + if p.Name != nil { + oldName := u.Name() + u.UpdateName(*p.Name) + + team, err = i.teamRepo.FindByID(ctx, u.Team()) + if err != nil && !errors.Is(err, err1.ErrNotFound) { + return nil, err + } + + tn := team.Name() + if tn == "" || tn == oldName { + team.Rename(*p.Name) + } else { + team = nil + } + } + if p.Email != nil { + u.UpdateEmail(*p.Email) + } + if p.Lang != nil { + if *p.Lang == "" { + u.UpdateLang(language.Tag{}) + } else { + l, err := language.Parse(*p.Lang) + if err != nil { + return nil, interfaces.ErrUserInvalidLang + } + u.UpdateLang(l) + } + } + if p.Password != nil { + if p.PasswordConfirmation == nil || *p.Password != *p.PasswordConfirmation { + return nil, interfaces.ErrUserInvalidPasswordConfirmation + } + } + if p.Theme != nil { + u.UpdateTheme(*p.Theme) + } + + // Update Auth0 users + if p.Name != nil || p.Email != nil || p.Password != nil { + for _, a := range u.Auths() { + if _, err := i.authenticator.UpdateUser(gateway.AuthenticatorUpdateUserParam{ + ID: a.Sub, + Name: p.Name, + Email: p.Email, + Password: p.Password, + }); err != nil { + return nil, err + } + } + } + + if team != nil { + err = i.teamRepo.Save(ctx, team) + if err != nil { + return nil, err + } + } + + err = i.userRepo.Save(ctx, u) + if err != nil { + return nil, err + } + + tx.Commit() + return u, nil +} + +func (i *User) RemoveMyAuth(ctx context.Context, authProvider string, operator *usecase.Operator) (u *user.User, err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + + u, err = i.userRepo.FindByID(ctx, operator.User) + if err != nil { + return nil, err + } + + u.RemoveAuthByProvider(authProvider) + + err = i.userRepo.Save(ctx, u) + if err != nil { + return nil, err + } + + tx.Commit() + return u, nil +} + +func (i *User) SearchUser(ctx context.Context, nameOrEmail string, operator *usecase.Operator) (u *user.User, err error) { + u, err = i.userRepo.FindByNameOrEmail(ctx, nameOrEmail) + if err != nil && !errors.Is(err, err1.ErrNotFound) { + return nil, err + } + return u, nil +} + +func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase.Operator) (err error) { + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + err = tx.End(ctx) + }() + + if operator == nil || operator.User.IsNil() || userID.IsNil() || userID != operator.User { + return errors.New("invalid user id") + } + + u, err := i.userRepo.FindByID(ctx, userID) + if err != nil && !errors.Is(err, err1.ErrNotFound) { + return err + } + if u == nil { + return nil + } + + teams, err := i.teamRepo.FindByUser(ctx, u.ID()) + if err != nil { + return err + } + + deleter := ProjectDeleter{ + SceneDeleter: SceneDeleter{ + Scene: i.sceneRepo, + SceneLock: i.sceneLockRepo, + Layer: i.layerRepo, + Property: i.propertyRepo, + Dataset: i.datasetRepo, + DatasetSchema: i.datasetSchemaRepo, + }, + File: i.file, + Project: i.projectRepo, + } + updatedTeams := make([]*user.Team, 0, len(teams)) + deletedTeams := []id.TeamID{} + + for _, team := range teams { + if !team.IsPersonal() && !team.Members().IsOnlyOwner(u.ID()) { + _ = team.Members().Leave(u.ID()) + updatedTeams = append(updatedTeams, team) + continue + } + + // Delete all projects + err := repo.IterateProjectsByTeam(i.projectRepo, ctx, team.ID(), 50, func(projects []*project.Project) error { + for _, prj := range projects { + if err := deleter.Delete(ctx, prj, true, operator); err != nil { + return err + } + } + return nil + }) + if err != nil { + return err + } + + deletedTeams = append(deletedTeams, team.ID()) + } + + // Save teams + if err := i.teamRepo.SaveAll(ctx, updatedTeams); err != nil { + return err + } + + // Delete teams + if err := i.teamRepo.RemoveAll(ctx, deletedTeams); err != nil { + return err + } + + // Delete user + if err := i.userRepo.Remove(ctx, u.ID()); err != nil { + return err + } + + tx.Commit() + return nil +} diff --git a/internal/usecase/interfaces/asset.go b/internal/usecase/interfaces/asset.go new file mode 100644 index 000000000..d4a078939 --- /dev/null +++ b/internal/usecase/interfaces/asset.go @@ -0,0 +1,26 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" +) + +type CreateAssetParam struct { + TeamID id.TeamID + File *file.File +} + +var ( + ErrCreateAssetFailed error = errors.New("failed to create asset") +) + +type Asset interface { + Create(context.Context, CreateAssetParam, *usecase.Operator) (*asset.Asset, error) + Remove(context.Context, id.AssetID, *usecase.Operator) (id.AssetID, error) + FindByTeam(context.Context, id.TeamID, *usecase.Pagination, *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) +} diff --git a/internal/usecase/interfaces/common.go b/internal/usecase/interfaces/common.go new file mode 100644 index 000000000..6069a60f8 --- /dev/null +++ b/internal/usecase/interfaces/common.go @@ -0,0 +1,17 @@ +package interfaces + +import "errors" + +type ListOperation string + +const ( + ListOperationAdd ListOperation = "add" + ListOperationMove ListOperation = "move" + ListOperationRemove ListOperation = "remove" +) + +var ( + ErrSceneIsLocked error = errors.New("scene is locked") + ErrOperationDenied error = errors.New("operation denied") + ErrFileNotIncluded error = errors.New("file not included") +) diff --git a/internal/usecase/interfaces/dataset.go b/internal/usecase/interfaces/dataset.go new file mode 100644 index 000000000..8a87593de --- /dev/null +++ b/internal/usecase/interfaces/dataset.go @@ -0,0 +1,69 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" +) + +type AddDatasetSchemaParam struct { + SceneId id.SceneID + Name string + RepresentativeField *id.DatasetSchemaFieldID +} + +type AddDynamicDatasetSchemaParam struct { + SceneId id.SceneID +} + +type AddDynamicDatasetParam struct { + SchemaId id.DatasetSchemaID + Author string + Content string + Target *string + Lat *float64 + Lng *float64 +} + +type ImportDatasetParam struct { + File *file.File + SceneId id.SceneID + SchemaId *id.DatasetSchemaID +} + +type RemoveDatasetSchemaParam struct { + SchemaId id.DatasetSchemaID + Force *bool +} + +type UpdateDatasetSchemaParam struct { + SchemaId id.DatasetSchemaID + Name string +} + +var ( + ErrNoDataSourceAvailable error = errors.New("no datasource available") + ErrDataSourceInvalidURL error = errors.New("invalid url") + ErrDatasetInvalidDepth error = errors.New("invalid depth") +) + +type Dataset interface { + Fetch(context.Context, []id.DatasetID, *usecase.Operator) (dataset.List, error) + GraphFetch(context.Context, id.DatasetID, int, *usecase.Operator) (dataset.List, error) + FetchSchema(context.Context, []id.DatasetSchemaID, *usecase.Operator) (dataset.SchemaList, error) + ImportDataset(context.Context, ImportDatasetParam, *usecase.Operator) (*dataset.Schema, error) + GraphFetchSchema(context.Context, id.DatasetSchemaID, int, *usecase.Operator) (dataset.SchemaList, error) + AddDynamicDatasetSchema(context.Context, AddDynamicDatasetSchemaParam) (*dataset.Schema, error) + AddDynamicDataset(context.Context, AddDynamicDatasetParam) (*dataset.Schema, *dataset.Dataset, error) + FindBySchema(context.Context, id.DatasetSchemaID, *usecase.Pagination, *usecase.Operator) (dataset.List, *usecase.PageInfo, error) + FindSchemaByScene(context.Context, id.SceneID, *usecase.Pagination, *usecase.Operator) (dataset.SchemaList, *usecase.PageInfo, error) + FindDynamicSchemaByScene(context.Context, id.SceneID) (dataset.SchemaList, error) + RemoveDatasetSchema(context.Context, RemoveDatasetSchemaParam, *usecase.Operator) (id.DatasetSchemaID, error) + UpdateDatasetSchema(context.Context, UpdateDatasetSchemaParam, *usecase.Operator) (*dataset.Schema, error) + Sync(context.Context, id.SceneID, string, *usecase.Operator) (dataset.SchemaList, dataset.List, error) + AddDatasetSchema(context.Context, AddDatasetSchemaParam, *usecase.Operator) (*dataset.Schema, error) +} diff --git a/internal/usecase/interfaces/layer.go b/internal/usecase/interfaces/layer.go new file mode 100644 index 000000000..449671114 --- /dev/null +++ b/internal/usecase/interfaces/layer.go @@ -0,0 +1,104 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/decoding" + "github.com/reearth/reearth-backend/pkg/property" +) + +type AddLayerItemInput struct { + ParentLayerID id.LayerID + PluginID *id.PluginID + ExtensionID *id.PluginExtensionID + Index *int + LinkedDatasetID *id.DatasetID + Name string + LatLng *property.LatLng +} + +type AddLayerGroupInput struct { + ParentLayerID id.LayerID + PluginID *id.PluginID + ExtensionID *id.PluginExtensionID + Index *int + LinkedDatasetSchemaID *id.DatasetSchemaID + Name string +} + +type UpdateLayerInput struct { + LayerID id.LayerID + Name *string + Visible *bool +} + +type MoveLayerInput struct { + LayerID id.LayerID + DestLayerID *id.LayerID + Index int +} + +type AddInfoboxFieldParam struct { + LayerID id.LayerID + PluginID id.PluginID + ExtensionID id.PluginExtensionID + Index *int +} + +type MoveInfoboxFieldParam struct { + LayerID id.LayerID + InfoboxFieldID id.InfoboxFieldID + Index int +} + +type RemoveInfoboxFieldParam struct { + LayerID id.LayerID + InfoboxFieldID id.InfoboxFieldID +} +type ImportLayerParam struct { + LayerID id.LayerID + File *file.File + Format decoding.LayerEncodingFormat +} + +var ( + ErrParentLayerNotFound error = errors.New("parent layer not found") + ErrPluginNotFound error = errors.New("plugin not found") + ErrExtensionNotFound error = errors.New("extension not found") + ErrInfoboxNotFound error = errors.New("infobox not found") + ErrInfoboxAlreadyExists error = errors.New("infobox already exists") + ErrCannotAddLayerToLinkedLayerGroup error = errors.New("cannot add layer to linked layer group") + ErrCannotRemoveLayerToLinkedLayerGroup error = errors.New("cannot remove layer to linked layer group") + ErrLinkedLayerItemCannotBeMoved error = errors.New("linked layer item cannot be moved") + ErrLayerCannotBeMovedToLinkedLayerGroup error = errors.New("layer cannot be moved to linked layer group") + ErrCannotMoveLayerToOtherScene error = errors.New("layer cannot layer to other scene") + ErrExtensionTypeMustBePrimitive error = errors.New("extension type must be primitive") + ErrExtensionTypeMustBeBlock error = errors.New("extension type must be block") + ErrInvalidExtensionType error = errors.New("invalid extension type") +) + +type Layer interface { + Fetch(context.Context, []id.LayerID, *usecase.Operator) ([]*layer.Layer, error) + FetchGroup(context.Context, []id.LayerID, *usecase.Operator) ([]*layer.Group, error) + FetchItem(context.Context, []id.LayerID, *usecase.Operator) ([]*layer.Item, error) + FetchParent(context.Context, id.LayerID, *usecase.Operator) (*layer.Group, error) + FetchByProperty(context.Context, id.PropertyID, *usecase.Operator) (layer.Layer, error) + FetchMerged(context.Context, id.LayerID, *id.LayerID, *usecase.Operator) (*layer.Merged, error) + FetchParentAndMerged(context.Context, id.LayerID, *usecase.Operator) (*layer.Merged, error) + AddItem(context.Context, AddLayerItemInput, *usecase.Operator) (*layer.Item, *layer.Group, error) + AddGroup(context.Context, AddLayerGroupInput, *usecase.Operator) (*layer.Group, *layer.Group, error) + Remove(context.Context, id.LayerID, *usecase.Operator) (id.LayerID, *layer.Group, error) + Update(context.Context, UpdateLayerInput, *usecase.Operator) (layer.Layer, error) + Move(context.Context, MoveLayerInput, *usecase.Operator) (id.LayerID, *layer.Group, *layer.Group, int, error) + CreateInfobox(context.Context, id.LayerID, *usecase.Operator) (layer.Layer, error) + RemoveInfobox(context.Context, id.LayerID, *usecase.Operator) (layer.Layer, error) + AddInfoboxField(context.Context, AddInfoboxFieldParam, *usecase.Operator) (*layer.InfoboxField, layer.Layer, error) + MoveInfoboxField(context.Context, MoveInfoboxFieldParam, *usecase.Operator) (id.InfoboxFieldID, layer.Layer, int, error) + RemoveInfoboxField(context.Context, RemoveInfoboxFieldParam, *usecase.Operator) (id.InfoboxFieldID, layer.Layer, error) + ImportLayer(context.Context, ImportLayerParam, *usecase.Operator) (layer.List, *layer.Group, error) +} diff --git a/internal/usecase/interfaces/plugin.go b/internal/usecase/interfaces/plugin.go new file mode 100644 index 000000000..5bba2b81e --- /dev/null +++ b/internal/usecase/interfaces/plugin.go @@ -0,0 +1,20 @@ +package interfaces + +import ( + "context" + "errors" + "io" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +var ( + ErrPluginAlreadyRegistered error = errors.New("plugin already registered") +) + +type Plugin interface { + Fetch(context.Context, []id.PluginID, *usecase.Operator) ([]*plugin.Plugin, error) + Upload(context.Context, io.Reader, *usecase.Operator) (*plugin.Plugin, error) +} diff --git a/internal/usecase/interfaces/project.go b/internal/usecase/interfaces/project.go new file mode 100644 index 000000000..24ca6960a --- /dev/null +++ b/internal/usecase/interfaces/project.go @@ -0,0 +1,73 @@ +package interfaces + +import ( + "context" + "errors" + "net/url" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +type CreateProjectParam struct { + TeamID id.TeamID + Visualizer visualizer.Visualizer + Name *string + Description *string + ImageURL *url.URL + Alias *string + Archived *bool +} + +type UpdateProjectParam struct { + ID id.ProjectID + Name *string + Description *string + Alias *string + Archived *bool + ImageURL *url.URL + PublicTitle *string + PublicDescription *string + PublicImage *file.File + PublicNoIndex *bool + DeletePublicImage bool + DeleteImageURL bool +} + +type PublishProjectParam struct { + ID id.ProjectID + Alias *string + Status project.PublishmentStatus +} + +type AddProjectDomainParam struct { + ID id.ProjectID + Domain string +} + +type VerifyProjectDomainParam struct { + ID id.ProjectID + Domain string +} + +type RemoveProjectDomainParam struct { + ID id.ProjectID + Domain string +} + +var ( + ErrProjectAliasIsNotSet error = errors.New("project alias is not set") +) + +type Project interface { + Fetch(context.Context, []id.ProjectID, *usecase.Operator) ([]*project.Project, error) + FindByTeam(context.Context, id.TeamID, *usecase.Pagination, *usecase.Operator) ([]*project.Project, *usecase.PageInfo, error) + Create(context.Context, CreateProjectParam, *usecase.Operator) (*project.Project, error) + Update(context.Context, UpdateProjectParam, *usecase.Operator) (*project.Project, error) + Publish(context.Context, PublishProjectParam, *usecase.Operator) (*project.Project, error) + CheckAlias(context.Context, string) (bool, error) + Delete(context.Context, id.ProjectID, *usecase.Operator) error +} diff --git a/internal/usecase/interfaces/property.go b/internal/usecase/interfaces/property.go new file mode 100644 index 000000000..1f295fff3 --- /dev/null +++ b/internal/usecase/interfaces/property.go @@ -0,0 +1,96 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type UpdatePropertyValueParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer + Value *property.Value +} + +type RemovePropertyFieldParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer +} + +type UploadFileParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer + File *file.File +} + +type LinkPropertyValueParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer + Links *property.Links +} + +type UnlinkPropertyValueParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer +} + +type AddPropertyItemParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer + Index *int + NameFieldValue *property.Value +} + +type MovePropertyItemParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer + Index int +} + +type RemovePropertyItemParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer +} + +type UpdatePropertyItemsParam struct { + PropertyID id.PropertyID + Pointer *property.Pointer + Operations []UpdatePropertyItemsOperationParam +} + +type UpdatePropertyItemsOperationParam struct { + Operation ListOperation + ItemID *id.PropertyItemID + Index *int + NameFieldValue *property.Value +} + +var ( + ErrPropertyNotFound error = errors.New("property not found") + ErrPropertyInvalidType error = errors.New("property invalid type") + ErrInvalidFile error = errors.New("invalid file") + ErrFailedToUploadFile error = errors.New("failed to upload file") + ErrPropertySchemaMustBeSpecified error = errors.New("property schema must be specified") + ErrInvalidDatasetFieldID error = errors.New("invalid dataset field id") + ErrInvalidPropertyLinks error = errors.New("invalid property links") + ErrInvalidPropertyValue error = errors.New("invalid property value") +) + +type Property interface { + Fetch(context.Context, []id.PropertyID, *usecase.Operator) ([]*property.Property, error) + FetchSchema(context.Context, []id.PropertySchemaID, *usecase.Operator) ([]*property.Schema, error) + FetchMerged(context.Context, *id.PropertyID, *id.PropertyID, *id.DatasetID, *usecase.Operator) (*property.Merged, error) + UpdateValue(context.Context, UpdatePropertyValueParam, *usecase.Operator) (*property.Property, *property.GroupList, *property.Group, *property.Field, error) + RemoveField(context.Context, RemovePropertyFieldParam, *usecase.Operator) (*property.Property, error) + UploadFile(context.Context, UploadFileParam, *usecase.Operator) (*property.Property, *property.GroupList, *property.Group, *property.Field, error) + LinkValue(context.Context, LinkPropertyValueParam, *usecase.Operator) (*property.Property, *property.GroupList, *property.Group, *property.Field, error) + UnlinkValue(context.Context, UnlinkPropertyValueParam, *usecase.Operator) (*property.Property, *property.GroupList, *property.Group, *property.Field, error) + AddItem(context.Context, AddPropertyItemParam, *usecase.Operator) (*property.Property, *property.GroupList, *property.Group, error) + MoveItem(context.Context, MovePropertyItemParam, *usecase.Operator) (*property.Property, *property.GroupList, *property.Group, error) + RemoveItem(context.Context, RemovePropertyItemParam, *usecase.Operator) (*property.Property, error) + UpdateItems(context.Context, UpdatePropertyItemsParam, *usecase.Operator) (*property.Property, error) +} diff --git a/internal/usecase/interfaces/scene.go b/internal/usecase/interfaces/scene.go new file mode 100644 index 000000000..04307e292 --- /dev/null +++ b/internal/usecase/interfaces/scene.go @@ -0,0 +1,36 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type UpdateWidgetParam struct { + SceneID id.SceneID + PluginID id.PluginID + ExtensionID id.PluginExtensionID + Enabled *bool +} + +var ( + ErrPluginAlreadyInstalled error = errors.New("plugin already installed") + ErrPluginNotInstalled error = errors.New("plugin not installed") + ErrExtensionTypeMustBeWidget error = errors.New("extension type must be widget") +) + +type Scene interface { + Fetch(context.Context, []id.SceneID, *usecase.Operator) ([]*scene.Scene, error) + FindByProject(context.Context, id.ProjectID, *usecase.Operator) (*scene.Scene, error) + FetchLock(context.Context, []id.SceneID, *usecase.Operator) ([]scene.LockMode, error) + Create(context.Context, id.ProjectID, *usecase.Operator) (*scene.Scene, error) + AddWidget(context.Context, id.SceneID, id.PluginID, id.PluginExtensionID, *usecase.Operator) (*scene.Scene, *scene.Widget, error) + UpdateWidget(context.Context, UpdateWidgetParam, *usecase.Operator) (*scene.Scene, *scene.Widget, error) + RemoveWidget(context.Context, id.SceneID, id.PluginID, id.PluginExtensionID, *usecase.Operator) (*scene.Scene, error) + InstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, id.PluginID, *id.PropertyID, error) + UninstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, error) + UpgradePlugin(context.Context, id.SceneID, id.PluginID, id.PluginID, *usecase.Operator) (*scene.Scene, error) +} diff --git a/internal/usecase/interfaces/team.go b/internal/usecase/interfaces/team.go new file mode 100644 index 000000000..a4ba619fa --- /dev/null +++ b/internal/usecase/interfaces/team.go @@ -0,0 +1,27 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +var ( + ErrOwnerCannotLeaveTheTeam = errors.New("owner user cannot leave from the team") + ErrCannotChangeOwnerRole = errors.New("cannot change the role of the team owner") + ErrCannotDeleteTeam = errors.New("cannot delete team because at least one project is left") +) + +type Team interface { + Fetch(context.Context, []id.TeamID, *usecase.Operator) ([]*user.Team, error) + FindByUser(context.Context, id.UserID, *usecase.Operator) ([]*user.Team, error) + Create(context.Context, string, id.UserID) (*user.Team, error) + Update(context.Context, id.TeamID, string, *usecase.Operator) (*user.Team, error) + AddMember(context.Context, id.TeamID, id.UserID, user.Role, *usecase.Operator) (*user.Team, error) + RemoveMember(context.Context, id.TeamID, id.UserID, *usecase.Operator) (*user.Team, error) + UpdateMember(context.Context, id.TeamID, id.UserID, user.Role, *usecase.Operator) (*user.Team, error) + Remove(context.Context, id.TeamID, *usecase.Operator) error +} diff --git a/internal/usecase/interfaces/user.go b/internal/usecase/interfaces/user.go new file mode 100644 index 000000000..c7400ff34 --- /dev/null +++ b/internal/usecase/interfaces/user.go @@ -0,0 +1,44 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/pkg/user" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + ErrUserInvalidPasswordConfirmation = errors.New("invalid password confirmation") + ErrUserInvalidLang = errors.New("invalid lang") + ErrSignupInvalidSecret = errors.New("invalid secret") +) + +type SignupParam struct { + Sub string + Name string + Email string + UserID *id.UserID + TeamID *id.TeamID + Secret string +} + +type UpdateMeParam struct { + Name *string + Email *string + Lang *string + Theme *user.Theme + Password *string + PasswordConfirmation *string +} + +type User interface { + Fetch(context.Context, []id.UserID, *usecase.Operator) ([]*user.User, error) + Signup(context.Context, SignupParam) (*user.User, *user.Team, error) + UpdateMe(context.Context, UpdateMeParam, *usecase.Operator) (*user.User, error) + RemoveMyAuth(context.Context, string, *usecase.Operator) (*user.User, error) + SearchUser(context.Context, string, *usecase.Operator) (*user.User, error) + DeleteMe(context.Context, id.UserID, *usecase.Operator) error +} diff --git a/internal/usecase/operator.go b/internal/usecase/operator.go new file mode 100644 index 000000000..cc9edd35e --- /dev/null +++ b/internal/usecase/operator.go @@ -0,0 +1,143 @@ +package usecase + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +// Operator _ +type Operator struct { + User id.UserID + ReadableTeams []id.TeamID + WritableTeams []id.TeamID + OwningTeams []id.TeamID +} + +// OperatorFrom _ +func OperatorFrom(u id.UserID, teams []*user.Team) *Operator { + rt := []id.TeamID{} + wt := []id.TeamID{} + ot := []id.TeamID{} + for _, t := range teams { + r := t.Members().GetRole(u) + if r == user.Role("") { + continue + } + tid := t.ID() + rt = append(rt, tid) + if r == user.RoleWriter { + wt = append(wt, tid) + } else if r == user.RoleOwner { + wt = append(wt, tid) + ot = append(ot, tid) + } + } + + return &Operator{ + User: u, + ReadableTeams: rt, + WritableTeams: wt, + OwningTeams: ot, + } +} + +// Teams _ +func (o *Operator) Teams(r user.Role) []id.TeamID { + if o == nil { + return nil + } + if r == user.RoleReader { + return o.ReadableTeams + } + if r == user.RoleWriter { + return o.WritableTeams + } + if r == user.RoleOwner { + return o.OwningTeams + } + return nil +} + +// IsReadableTeamIncluded _ +func (o *Operator) IsReadableTeamIncluded(team id.TeamID) bool { + if o == nil { + return false + } + for _, t := range o.ReadableTeams { + if t == team { + return true + } + } + return false +} + +// IsWritableTeamIncluded _ +func (o *Operator) IsWritableTeamIncluded(team id.TeamID) bool { + if o == nil { + return false + } + for _, t := range o.WritableTeams { + if t == team { + return true + } + } + return false +} + +// IsOwningTeamIncluded _ +func (o *Operator) IsOwningTeamIncluded(team id.TeamID) bool { + if o == nil { + return false + } + for _, t := range o.OwningTeams { + if t == team { + return true + } + } + return false +} + +// IsReadableTeamsIncluded _ +func (o *Operator) IsReadableTeamsIncluded(teams []id.TeamID) bool { + if o == nil { + return false + } + for _, t := range teams { + for _, t2 := range o.ReadableTeams { + if t == t2 { + return true + } + } + } + return false +} + +// IsWritableTeamsIncluded _ +func (o *Operator) IsWritableTeamsIncluded(teams []id.TeamID) bool { + if o == nil { + return false + } + for _, t := range teams { + for _, t2 := range o.WritableTeams { + if t == t2 { + return true + } + } + } + return false +} + +// IsOwningTeamsIncluded _ +func (o *Operator) IsOwningTeamsIncluded(teams []id.TeamID) bool { + if o == nil { + return false + } + for _, t := range teams { + for _, t2 := range o.OwningTeams { + if t == t2 { + return true + } + } + } + return false +} diff --git a/internal/usecase/pageinfo.go b/internal/usecase/pageinfo.go new file mode 100644 index 000000000..fa6a0ace3 --- /dev/null +++ b/internal/usecase/pageinfo.go @@ -0,0 +1,63 @@ +package usecase + +type PageInfo struct { + totalCount int + startCursor *Cursor + endCursor *Cursor + hasNextPage bool + hasPreviousPage bool +} + +func NewPageInfo(totalCount int, startCursor *Cursor, endCursor *Cursor, hasNextPage bool, hasPreviousPage bool) *PageInfo { + var sc Cursor + var ec Cursor + if startCursor != nil { + sc = *startCursor + } + if endCursor != nil { + ec = *endCursor + } + + return &PageInfo{ + totalCount: totalCount, + startCursor: &sc, + endCursor: &ec, + hasNextPage: hasNextPage, + hasPreviousPage: hasPreviousPage, + } +} + +func (p *PageInfo) TotalCount() int { + if p == nil { + return 0 + } + return p.totalCount +} + +func (p *PageInfo) StartCursor() *Cursor { + if p == nil { + return nil + } + return p.startCursor +} + +func (p *PageInfo) EndCursor() *Cursor { + if p == nil { + return nil + } + return p.endCursor +} + +func (p *PageInfo) HasNextPage() bool { + if p == nil { + return false + } + return p.hasNextPage +} + +func (p *PageInfo) HasPreviousPage() bool { + if p == nil { + return false + } + return p.hasPreviousPage +} diff --git a/internal/usecase/pagination.go b/internal/usecase/pagination.go new file mode 100644 index 000000000..46932c09c --- /dev/null +++ b/internal/usecase/pagination.go @@ -0,0 +1,19 @@ +package usecase + +type Pagination struct { + Before *Cursor + After *Cursor + First *int + Last *int +} + +func NewPagination(first *int, last *int, before *Cursor, after *Cursor) *Pagination { + // Relay-Style Cursor Pagination + // ref: https://www.apollographql.com/docs/react/features/pagination/#relay-style-cursor-pagination + return &Pagination{ + Before: before, + After: after, + First: first, + Last: last, + } +} diff --git a/internal/usecase/repo/asset.go b/internal/usecase/repo/asset.go new file mode 100644 index 000000000..37f22216b --- /dev/null +++ b/internal/usecase/repo/asset.go @@ -0,0 +1,16 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" +) + +type Asset interface { + Save(context.Context, *asset.Asset) error + Remove(context.Context, id.AssetID) error + FindByTeam(context.Context, id.TeamID, *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) + FindByID(context.Context, id.AssetID) (*asset.Asset, error) +} diff --git a/internal/usecase/repo/config.go b/internal/usecase/repo/config.go new file mode 100644 index 000000000..5db2abb4c --- /dev/null +++ b/internal/usecase/repo/config.go @@ -0,0 +1,12 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/config" +) + +type Config interface { + Load(context.Context) (*config.Config, error) + Save(context.Context, *config.Config) error +} diff --git a/internal/usecase/repo/container.go b/internal/usecase/repo/container.go new file mode 100644 index 000000000..f329feb87 --- /dev/null +++ b/internal/usecase/repo/container.go @@ -0,0 +1,18 @@ +package repo + +type Container struct { + Asset Asset + Config Config + DatasetSchema DatasetSchema + Dataset Dataset + Layer Layer + Plugin Plugin + Project Project + PropertySchema PropertySchema + Property Property + Scene Scene + Team Team + User User + SceneLock SceneLock + Transaction Transaction +} diff --git a/internal/usecase/repo/dataset.go b/internal/usecase/repo/dataset.go new file mode 100644 index 000000000..9cb0df1bf --- /dev/null +++ b/internal/usecase/repo/dataset.go @@ -0,0 +1,50 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +type Dataset interface { + FindByID(context.Context, id.DatasetID, []id.SceneID) (*dataset.Dataset, error) + FindByIDs(context.Context, []id.DatasetID, []id.SceneID) (dataset.List, error) + FindBySchema(context.Context, id.DatasetSchemaID, []id.SceneID, *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) + FindBySchemaAll(context.Context, id.DatasetSchemaID) (dataset.List, error) + FindGraph(context.Context, id.DatasetID, []id.SceneID, []id.DatasetSchemaFieldID) (dataset.List, error) + Save(context.Context, *dataset.Dataset) error + SaveAll(context.Context, dataset.List) error + Remove(context.Context, id.DatasetID) error + RemoveAll(context.Context, []id.DatasetID) error + RemoveByScene(context.Context, id.SceneID) error +} + +func DatasetLoaderFrom(r Dataset, scenes []id.SceneID) dataset.Loader { + return func(ctx context.Context, ids ...id.DatasetID) (dataset.List, error) { + return r.FindByIDs(ctx, ids, scenes) + } +} + +func DatasetGraphLoaderFrom(r Dataset, scenes []id.SceneID) dataset.GraphLoader { + return func(ctx context.Context, root id.DatasetID, fields ...id.DatasetSchemaFieldID) (dataset.List, *dataset.Field, error) { + if len(fields) <= 1 { + d, err := r.FindByID(ctx, root, scenes) + if err != nil { + return nil, nil, err + } + var field *dataset.Field + if len(fields) == 1 { + field = d.Field(fields[0]) + } + return dataset.List{d}, field, nil + } + + list2, err := r.FindGraph(ctx, root, scenes, fields) + if err != nil { + return nil, nil, err + } + return list2, list2.Last().Field(fields[len(fields)-1]), nil + } +} diff --git a/internal/usecase/repo/dataset_schema.go b/internal/usecase/repo/dataset_schema.go new file mode 100644 index 000000000..be94c38ab --- /dev/null +++ b/internal/usecase/repo/dataset_schema.go @@ -0,0 +1,24 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +type DatasetSchema interface { + FindByID(context.Context, id.DatasetSchemaID, []id.SceneID) (*dataset.Schema, error) + FindByIDs(context.Context, []id.DatasetSchemaID, []id.SceneID) (dataset.SchemaList, error) + FindByScene(context.Context, id.SceneID, *usecase.Pagination) (dataset.SchemaList, *usecase.PageInfo, error) + FindBySceneAll(context.Context, id.SceneID) (dataset.SchemaList, error) + FindBySceneAndSource(context.Context, id.SceneID, dataset.Source) (dataset.SchemaList, error) + FindDynamicByID(context.Context, id.DatasetSchemaID) (*dataset.Schema, error) + FindAllDynamicByScene(context.Context, id.SceneID) (dataset.SchemaList, error) + Save(context.Context, *dataset.Schema) error + SaveAll(context.Context, dataset.SchemaList) error + Remove(context.Context, id.DatasetSchemaID) error + RemoveAll(context.Context, []id.DatasetSchemaID) error + RemoveByScene(context.Context, id.SceneID) error +} diff --git a/internal/usecase/repo/layer.go b/internal/usecase/repo/layer.go new file mode 100644 index 000000000..b4d03d741 --- /dev/null +++ b/internal/usecase/repo/layer.go @@ -0,0 +1,39 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" +) + +type Layer interface { + FindByID(context.Context, id.LayerID, []id.SceneID) (layer.Layer, error) + FindByIDs(context.Context, []id.LayerID, []id.SceneID) (layer.List, error) + FindItemByID(context.Context, id.LayerID, []id.SceneID) (*layer.Item, error) + FindItemByIDs(context.Context, []id.LayerID, []id.SceneID) (layer.ItemList, error) + FindAllByDatasetSchema(context.Context, id.DatasetSchemaID) (layer.List, error) + FindGroupByID(context.Context, id.LayerID, []id.SceneID) (*layer.Group, error) + FindGroupByIDs(context.Context, []id.LayerID, []id.SceneID) (layer.GroupList, error) + FindGroupBySceneAndLinkedDatasetSchema(context.Context, id.SceneID, id.DatasetSchemaID) (layer.GroupList, error) + FindParentByID(context.Context, id.LayerID, []id.SceneID) (*layer.Group, error) + FindByProperty(context.Context, id.PropertyID, []id.SceneID) (layer.Layer, error) + FindByScene(context.Context, id.SceneID) (layer.List, error) + Save(context.Context, layer.Layer) error + SaveAll(context.Context, layer.List) error + Remove(context.Context, id.LayerID) error + RemoveAll(context.Context, []id.LayerID) error + RemoveByScene(context.Context, id.SceneID) error +} + +func LayerLoaderFrom(r Layer, scenes []id.SceneID) layer.Loader { + return func(ctx context.Context, ids ...id.LayerID) (layer.List, error) { + return r.FindByIDs(ctx, ids, scenes) + } +} + +func LayerLoaderBySceneFrom(r Layer) layer.LoaderByScene { + return func(ctx context.Context, s id.SceneID) (layer.List, error) { + return r.FindByScene(ctx, s) + } +} diff --git a/internal/usecase/repo/plugin.go b/internal/usecase/repo/plugin.go new file mode 100644 index 000000000..052564c8f --- /dev/null +++ b/internal/usecase/repo/plugin.go @@ -0,0 +1,20 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +type Plugin interface { + FindByID(context.Context, id.PluginID) (*plugin.Plugin, error) + FindByIDs(context.Context, []id.PluginID) ([]*plugin.Plugin, error) + Save(context.Context, *plugin.Plugin) error +} + +func PluginLoaderFrom(r Plugin) plugin.Loader { + return func(ctx context.Context, ids ...id.PluginID) ([]*plugin.Plugin, error) { + return r.FindByIDs(ctx, ids) + } +} diff --git a/internal/usecase/repo/project.go b/internal/usecase/repo/project.go new file mode 100644 index 000000000..c5921a9c6 --- /dev/null +++ b/internal/usecase/repo/project.go @@ -0,0 +1,46 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/project" +) + +type Project interface { + FindByIDs(context.Context, []id.ProjectID, []id.TeamID) ([]*project.Project, error) + FindByID(context.Context, id.ProjectID, []id.TeamID) (*project.Project, error) + FindByTeam(context.Context, id.TeamID, *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) + FindByPublicName(context.Context, string) (*project.Project, error) + CountByTeam(context.Context, id.TeamID) (int, error) + Save(context.Context, *project.Project) error + Remove(context.Context, id.ProjectID) error +} + +func IterateProjectsByTeam(repo Project, ctx context.Context, tid id.TeamID, batch int, callback func([]*project.Project) error) error { + pagination := usecase.NewPagination(&batch, nil, nil, nil) + + for { + projects, info, err := repo.FindByTeam(ctx, tid, pagination) + if err != nil { + return err + } + if len(projects) == 0 { + break + } + + if err := callback(projects); err != nil { + return err + } + + if !info.HasNextPage() { + break + } + + c := usecase.Cursor(projects[len(projects)-1].ID().String()) + pagination.After = &c + } + + return nil +} diff --git a/internal/usecase/repo/property.go b/internal/usecase/repo/property.go new file mode 100644 index 000000000..469ea2396 --- /dev/null +++ b/internal/usecase/repo/property.go @@ -0,0 +1,26 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Property interface { + FindByID(context.Context, id.PropertyID, []id.SceneID) (*property.Property, error) + FindByIDs(context.Context, []id.PropertyID, []id.SceneID) (property.List, error) + FindLinkedAll(context.Context, id.SceneID) (property.List, error) + FindByDataset(context.Context, id.DatasetSchemaID, id.DatasetID) (property.List, error) + Save(context.Context, *property.Property) error + SaveAll(context.Context, property.List) error + Remove(context.Context, id.PropertyID) error + RemoveAll(context.Context, []id.PropertyID) error + RemoveByScene(context.Context, id.SceneID) error +} + +func PropertyLoaderFrom(r Property, scenes []id.SceneID) property.Loader { + return func(ctx context.Context, ids ...id.PropertyID) (property.List, error) { + return r.FindByIDs(ctx, ids, scenes) + } +} diff --git a/internal/usecase/repo/property_schema.go b/internal/usecase/repo/property_schema.go new file mode 100644 index 000000000..878cc6b12 --- /dev/null +++ b/internal/usecase/repo/property_schema.go @@ -0,0 +1,21 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type PropertySchema interface { + FindByID(context.Context, id.PropertySchemaID) (*property.Schema, error) + FindByIDs(context.Context, []id.PropertySchemaID) (property.SchemaList, error) + Save(context.Context, *property.Schema) error + SaveAll(context.Context, property.SchemaList) error +} + +func PropertySchemaLoaderFrom(r PropertySchema) property.SchemaLoader { + return func(ctx context.Context, ids ...id.PropertySchemaID) (property.SchemaList, error) { + return r.FindByIDs(ctx, ids) + } +} diff --git a/internal/usecase/repo/scene.go b/internal/usecase/repo/scene.go new file mode 100644 index 000000000..a599a6ae7 --- /dev/null +++ b/internal/usecase/repo/scene.go @@ -0,0 +1,19 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type Scene interface { + FindByID(context.Context, id.SceneID, []id.TeamID) (*scene.Scene, error) + FindByIDs(context.Context, []id.SceneID, []id.TeamID) ([]*scene.Scene, error) + FindByProject(context.Context, id.ProjectID, []id.TeamID) (*scene.Scene, error) + FindIDsByTeam(context.Context, []id.TeamID) ([]id.SceneID, error) + HasSceneTeam(context.Context, id.SceneID, []id.TeamID) (bool, error) + HasScenesTeam(context.Context, []id.SceneID, []id.TeamID) ([]bool, error) + Save(context.Context, *scene.Scene) error + Remove(context.Context, id.SceneID) error +} diff --git a/internal/usecase/repo/scene_lock.go b/internal/usecase/repo/scene_lock.go new file mode 100644 index 000000000..d0358adef --- /dev/null +++ b/internal/usecase/repo/scene_lock.go @@ -0,0 +1,15 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type SceneLock interface { + GetLock(context.Context, id.SceneID) (scene.LockMode, error) + GetAllLock(context.Context, []id.SceneID) ([]scene.LockMode, error) + SaveLock(context.Context, id.SceneID, scene.LockMode) error + ReleaseAllLock(context.Context) error +} diff --git a/internal/usecase/repo/team.go b/internal/usecase/repo/team.go new file mode 100644 index 000000000..60b75f354 --- /dev/null +++ b/internal/usecase/repo/team.go @@ -0,0 +1,18 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +type Team interface { + FindByUser(context.Context, id.UserID) ([]*user.Team, error) + FindByIDs(context.Context, []id.TeamID) ([]*user.Team, error) + FindByID(context.Context, id.TeamID) (*user.Team, error) + Save(context.Context, *user.Team) error + SaveAll(context.Context, []*user.Team) error + Remove(context.Context, id.TeamID) error + RemoveAll(context.Context, []id.TeamID) error +} diff --git a/internal/usecase/repo/transaction.go b/internal/usecase/repo/transaction.go new file mode 100644 index 000000000..80175e237 --- /dev/null +++ b/internal/usecase/repo/transaction.go @@ -0,0 +1,16 @@ +package repo + +import "context" + +type Transaction interface { + Begin() (Tx, error) +} + +type Tx interface { + // Commit informs Tx to commit when End() is called. + // If this was not called once, rollback is done when End() is called. + Commit() + // End finishes the transaction and do commit if Commit() was called once, or else do rollback. + // This method is supposed to be called in the uscase layer using defer. + End(context.Context) error +} diff --git a/internal/usecase/repo/user.go b/internal/usecase/repo/user.go new file mode 100644 index 000000000..2b4152785 --- /dev/null +++ b/internal/usecase/repo/user.go @@ -0,0 +1,18 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +type User interface { + FindByIDs(context.Context, []id.UserID) ([]*user.User, error) + FindByID(context.Context, id.UserID) (*user.User, error) + FindByAuth0Sub(context.Context, string) (*user.User, error) + FindByEmail(context.Context, string) (*user.User, error) + FindByNameOrEmail(context.Context, string) (*user.User, error) + Save(context.Context, *user.User) error + Remove(context.Context, id.UserID) error +} diff --git a/main.go b/main.go new file mode 100644 index 000000000..38dd16da6 --- /dev/null +++ b/main.go @@ -0,0 +1,3 @@ +package main + +func main() {} diff --git a/pkg/asset/asset.go b/pkg/asset/asset.go new file mode 100644 index 000000000..c0b60c755 --- /dev/null +++ b/pkg/asset/asset.go @@ -0,0 +1,66 @@ +package asset + +import ( + "errors" + "time" + + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + // ErrEmptyTeamID _ + ErrEmptyTeamID = errors.New("require team id") + // ErrEmptyURL _ + ErrEmptyURL = errors.New("require valid url") + // ErrEmptySize _ + ErrEmptySize = errors.New("file size cannot be zero") +) + +// Asset _ +type Asset struct { + id id.AssetID + createdAt time.Time + team id.TeamID + name string // file name + size int64 // file size + url string + contentType string +} + +// ID _ +func (a *Asset) ID() id.AssetID { + return a.id +} + +// Team _ +func (a *Asset) Team() id.TeamID { + return a.team +} + +// Name _ +func (a *Asset) Name() string { + return a.name +} + +// Size _ +func (a *Asset) Size() int64 { + return a.size +} + +// URL _ +func (a *Asset) URL() string { + return a.url +} + +// ContentType _ +func (a *Asset) ContentType() string { + return a.contentType +} + +// CreatedAt _ +func (a *Asset) CreatedAt() time.Time { + if a == nil { + return time.Time{} + } + return id.ID(a.id).Timestamp() +} diff --git a/pkg/asset/asset_test.go b/pkg/asset/asset_test.go new file mode 100644 index 000000000..b0b65643a --- /dev/null +++ b/pkg/asset/asset_test.go @@ -0,0 +1,63 @@ +package asset + +import ( + "testing" + "time" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestAsset(t *testing.T) { + aid := id.NewAssetID() + tid := id.NewTeamID() + d := id.ID(aid).Timestamp() + testCases := []struct { + Name string + Expected struct { + ID id.AssetID + CreatedAt time.Time + Team id.TeamID + Name string + Size int64 + Url string + ContentType string + } + Actual *Asset + }{ + { + Expected: struct { + ID id.AssetID + CreatedAt time.Time + Team id.TeamID + Name string + Size int64 + Url string + ContentType string + }{ + ID: aid, + CreatedAt: d, + Team: tid, + Size: 10, + Url: "tt://xxx.xx", + Name: "xxx", + ContentType: "test", + }, + Actual: New().ID(aid).CreatedAt(d).ContentType("test").Team(tid).Size(10).Name("xxx").URL("tt://xxx.xx").MustBuild(), + }, + } + for _, tc := range testCases { + tc := tc + + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected.ID, tc.Actual.ID()) + assert.Equal(tt, tc.Expected.CreatedAt, tc.Actual.CreatedAt()) + assert.Equal(tt, tc.Expected.Team, tc.Actual.Team()) + assert.Equal(tt, tc.Expected.Url, tc.Actual.URL()) + assert.Equal(tt, tc.Expected.Size, tc.Actual.Size()) + assert.Equal(tt, tc.Expected.Name, tc.Actual.Name()) + assert.Equal(tt, tc.Expected.ContentType, tc.Actual.ContentType()) + }) + } +} diff --git a/pkg/asset/builder.go b/pkg/asset/builder.go new file mode 100644 index 000000000..b75ddc6c8 --- /dev/null +++ b/pkg/asset/builder.go @@ -0,0 +1,94 @@ +package asset + +import ( + "time" + + "github.com/reearth/reearth-backend/pkg/id" +) + +// Builder _ +type Builder struct { + a *Asset +} + +// New _ +func New() *Builder { + return &Builder{a: &Asset{}} +} + +// Build _ +func (b *Builder) Build() (*Asset, error) { + if id.ID(b.a.id).IsNil() { + return nil, id.ErrInvalidID + } + if id.ID(b.a.team).IsNil() { + return nil, ErrEmptyTeamID + } + if b.a.url == "" { + return nil, ErrEmptyURL + } + if b.a.size <= 0 { + return nil, ErrEmptySize + } + if b.a.createdAt.IsZero() { + b.a.createdAt = b.a.CreatedAt() + } + return b.a, nil +} + +// MustBuild _ +func (b *Builder) MustBuild() *Asset { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +// ID _ +func (b *Builder) ID(id id.AssetID) *Builder { + b.a.id = id + return b +} + +// NewID _ +func (b *Builder) NewID() *Builder { + b.a.id = id.AssetID(id.New()) + return b +} + +// Team _ +func (b *Builder) Team(team id.TeamID) *Builder { + b.a.team = team + return b +} + +// Name _ +func (b *Builder) Name(name string) *Builder { + b.a.name = name + return b +} + +// Size _ +func (b *Builder) Size(size int64) *Builder { + b.a.size = size + return b +} + +// URL _ +func (b *Builder) URL(url string) *Builder { + b.a.url = url + return b +} + +// ContentType _ +func (b *Builder) ContentType(contentType string) *Builder { + b.a.contentType = contentType + return b +} + +// CreatedAt - +func (b *Builder) CreatedAt(createdAt time.Time) *Builder { + b.a.createdAt = createdAt + return b +} diff --git a/pkg/asset/builder_test.go b/pkg/asset/builder_test.go new file mode 100644 index 000000000..0142afb73 --- /dev/null +++ b/pkg/asset/builder_test.go @@ -0,0 +1,236 @@ +package asset + +import ( + "errors" + "testing" + "time" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestBuilder_Build(t *testing.T) { + aid := id.NewAssetID() + tid := id.NewTeamID() + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + testCases := []struct { + Name, AssetName string + Id id.AssetID + CreatedAt time.Time + Team id.TeamID + Size int64 + Url string + ContentType string + ExpectedAssert *Asset + Err error + }{ + { + Name: "Valid asset", + CreatedAt: d, + Id: aid, + Team: tid, + AssetName: "xxx", + Size: 10, + Url: "tt://xxx.zz", + ContentType: "bbb", + ExpectedAssert: &Asset{ + id: aid, + createdAt: d, + team: tid, + size: 10, + name: "xxx", + url: "tt://xxx.zz", + contentType: "bbb", + }, + Err: nil, + }, + { + Name: "failed empty size", + Id: id.NewAssetID(), + CreatedAt: d, + Team: id.NewTeamID(), + Size: 0, + Url: "tt://xxx.zz", + ContentType: "bbb", + ExpectedAssert: nil, + Err: ErrEmptySize, + }, + { + Name: "failed empty url", + Id: id.NewAssetID(), + CreatedAt: d, + Team: id.NewTeamID(), + Size: 10, + Url: "", + ContentType: "bbb", + ExpectedAssert: nil, + Err: ErrEmptyURL, + }, + { + Name: "failed empty team", + Id: id.NewAssetID(), + CreatedAt: d, + Team: id.TeamID{}, + Size: 10, + Url: "tt://xxx.zz", + ContentType: "bbb", + ExpectedAssert: nil, + Err: ErrEmptyTeamID, + }, + { + Name: "failed invalid Id", + Id: id.AssetID{}, + CreatedAt: d, + Team: id.NewTeamID(), + Size: 10, + Url: "tt://xxx.zz", + ContentType: "bbb", + ExpectedAssert: nil, + Err: ErrEmptyTeamID, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + a, err := New(). + ID(tc.Id). + CreatedAt(tc.CreatedAt). + Name(tc.AssetName). + Size(tc.Size). + Team(tc.Team). + ContentType(tc.ContentType). + URL(tc.Url). + Build() + if err == nil { + assert.Equal(tt, tc.ExpectedAssert, a) + } else { + assert.True(tt, errors.As(tc.Err, &err)) + } + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + aid := id.NewAssetID() + tid := id.NewTeamID() + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + testCases := []struct { + name, assetName string + createdAt time.Time + id id.AssetID + team id.TeamID + size int64 + url string + contentType string + expectedAssert *Asset + panic bool + }{ + { + name: "Valid asset", + createdAt: d, + id: aid, + team: tid, + assetName: "xxx", + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + expectedAssert: &Asset{ + id: aid, + createdAt: d, + team: tid, + size: 10, + name: "xxx", + url: "tt://xxx.zz", + contentType: "bbb", + }, + panic: false, + }, + { + name: "failed empty size", + createdAt: d, + id: id.NewAssetID(), + team: id.NewTeamID(), + size: 0, + url: "tt://xxx.zz", + contentType: "bbb", + expectedAssert: nil, + panic: true, + }, + { + name: "failed empty url", + createdAt: d, + id: id.NewAssetID(), + team: id.NewTeamID(), + size: 10, + url: "", + contentType: "bbb", + expectedAssert: nil, + panic: true, + }, + { + name: "failed empty team", + createdAt: d, + id: id.NewAssetID(), + team: id.TeamID{}, + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + expectedAssert: nil, + panic: true, + }, + { + name: "failed invalid Id", + createdAt: d, + id: id.AssetID{}, + team: id.NewTeamID(), + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + expectedAssert: nil, + panic: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + var a *Asset + if tc.panic { + defer func() { + if r := recover(); r != nil { + assert.Nil(tt, a) + } + }() + + a = New(). + ID(tc.id). + CreatedAt(tc.createdAt). + Name(tc.assetName). + Size(tc.size). + Team(tc.team). + ContentType(tc.contentType). + URL(tc.url). + MustBuild() + } else { + a = New(). + ID(tc.id). + CreatedAt(tc.createdAt). + Name(tc.assetName). + Size(tc.size). + Team(tc.team). + ContentType(tc.contentType). + URL(tc.url). + MustBuild() + assert.Equal(tt, tc.expectedAssert, a) + + } + + }) + } +} + +func TestNewID(t *testing.T) { + a := New().NewID().URL("tt://xxx.bb").Team(id.NewTeamID()).Size(10).MustBuild() + assert.False(t, a.id.IsNil()) +} diff --git a/pkg/builtin/main.go b/pkg/builtin/main.go new file mode 100644 index 000000000..ebb36c2a1 --- /dev/null +++ b/pkg/builtin/main.go @@ -0,0 +1,57 @@ +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/embed -all -n pluginManifestJSON -i manifest.yml -yaml2json + +package builtin + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +var pluginTranslationList = map[string]*manifest.TranslationRoot{"ja": manifest.MustParseTransSystemFromStaticJSON(pluginManifestJSON_ja)} +var pluginManifest = manifest.MergeManifestTranslation(manifest.MustParseSystemFromStaticJSON(pluginManifestJSON), pluginTranslationList) + +// MUST NOT CHANGE +var PropertySchemaIDVisualizerCesium = id.MustPropertySchemaID("reearth/cesium") + +// MUST NOT CHANGE +var PropertySchemaIDInfobox = id.MustPropertySchemaID("reearth/infobox") + +func GetPropertySchemaByVisualizer(v visualizer.Visualizer) *property.Schema { + for _, p := range pluginManifest.ExtensionSchema { + if p.ID().String() == "reearth/"+string(v) { + return p + } + } + return nil +} + +func MustPropertySchemaByVisualizer(v visualizer.Visualizer) *property.Schema { + ps := GetPropertySchemaByVisualizer(v) + if ps == nil { + panic("property schema not found: " + v) + } + return ps +} + +func GetPropertySchema(id id.PropertySchemaID) *property.Schema { + for _, p := range pluginManifest.ExtensionSchema { + if id == p.ID() { + return p + } + } + return nil +} + +func Plugin() *plugin.Plugin { + return pluginManifest.Plugin +} + +func GetPlugin(id id.PluginID) *plugin.Plugin { + if id.Equal(pluginManifest.Plugin.ID()) { + return pluginManifest.Plugin + } + return nil +} diff --git a/pkg/builtin/main_test.go b/pkg/builtin/main_test.go new file mode 100644 index 000000000..816a5e0d7 --- /dev/null +++ b/pkg/builtin/main_test.go @@ -0,0 +1,105 @@ +package builtin + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/visualizer" + "github.com/stretchr/testify/assert" +) + +func TestGetPropertySchemaByVisualizer(t *testing.T) { + testCases := []struct { + name string + visualizer visualizer.Visualizer + expectedNil bool + }{ + { + name: "cesium", + visualizer: visualizer.VisualizerCesium, + expectedNil: false, + }, + { + name: "unsupported visualizer", + visualizer: "foo", + expectedNil: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res := GetPropertySchemaByVisualizer(tc.visualizer) + if tc.expectedNil { + assert.Nil(tt, res) + } else { + assert.NotNil(tt, res) + } + }) + } +} + +func TestPlugin(t *testing.T) { + assert.NotNil(t, Plugin()) +} +func TestGetPlugin(t *testing.T) { + testCases := []struct { + name string + pluginID id.PluginID + expectedNil bool + }{ + { + name: "Official Plugin", + pluginID: id.OfficialPluginID, + expectedNil: false, + }, + { + name: "foo plugin", + pluginID: id.MustPluginID("foo#1.1.1"), + expectedNil: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res := GetPlugin(tc.pluginID) + if tc.expectedNil { + assert.Nil(tt, res) + } else { + assert.NotNil(tt, res) + } + }) + } +} + +func TestGetPropertySchema(t *testing.T) { + testCases := []struct { + name string + psId id.PropertySchemaID + expectedNil bool + }{ + { + name: "Infobox", + psId: PropertySchemaIDInfobox, + expectedNil: false, + }, + { + name: "unknown propertySchemaId", + psId: id.MustPropertySchemaID("xxx#1.1.1/aa"), + expectedNil: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res := GetPropertySchema(tc.psId) + if tc.expectedNil { + assert.Nil(tt, res) + } else { + assert.NotNil(tt, res) + } + }) + } +} diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml new file mode 100644 index 000000000..c1a9cf33c --- /dev/null +++ b/pkg/builtin/manifest.yml @@ -0,0 +1,1027 @@ +id: reearth +system: true +title: Re:Earth Official Plugin +description: Official Plugin +author: Re:Earth +extensions: + - id: cesium + title: Cesium + description: Select here to find scene settings in the right panel. This includes setting map tiles, atmospheric conditions, real lighting, and more. + visualizer: cesium + type: visualizer + schema: + groups: + - id: default + title: Scene + fields: + - id: camera + type: camera + title: Initial camera position + description: The starting position of your project. + - id: terrain + type: bool + title: Terrain + description: Show elevation when close to the surface. + - id: skybox + type: bool + title: Sky + defaultValue: true + description: Show the stars. + - id: bgcolor + type: string + title: Background color + description: With Sky disabled, choose a background color. + ui: color + - id: ion + type: string + title: Cesium Ion API access token + description: Cesium Ion account users may use their personal API keys to be able to use their Cesium Ion assets(tile data, 3D data, etc) with their project. + - id: tiles + title: Tiles + description: You may change the look of the Earth by obtaining map tile data and setting it here. + list: true + representativeField: tile_type + fields: + - id: tile_type + type: string + title: Tile Type + defaultValue: default + choices: + - key: default + label: Default + - key: default_label + label: Labelled + - key: default_road + label: Road Map + - key: stamen_watercolor + label: Stamen Watercolor + - key: stamen_toner + label: Stamen Toner + - key: open_street_map + label: Open Street Map + - key: esri_world_topo + label: ESRI Topography + - key: black_marble + label: Earth at night + - key: japan_gsi_standard + label: Japan GSI Standard Map + - key: url + label: URL + - id: tile_url + type: string + title: Tile map URL + availableIf: + field: tile_type + type: string + value: url + - id: tile_minLevel + type: number + title: Minimum zoom level + min: 0 + max: 30 + - id: tile_maxLevel + type: number + title: Maximum zoom level + min: 0 + max: 30 + - id: atmosphere + title: Atmospheric Conditions + description: Set the look and feel of the Earth. + fields: + - id: enable_sun + type: bool + title: Sun + defaultValue: true + description: Display the Sun. + - id: enable_lighting + type: bool + title: Lighting + defaultValue: false + description: Display natural lighting from the sun. + - id: ground_atmosphere + type: bool + title: Ground atmosphere + defaultValue: true + description: Display a lower atmospheric layer. + - id: sky_atmosphere + type: bool + title: Sky atmosphere + defaultValue: true + description: Display an upper atmospheric layer. + - id: fog + type: bool + title: Fog + defaultValue: true + description: Display customizable fog. + - id: fog_density + type: number + title: Fog density + defaultValue: 2.0e-4 + description: "Set a thickness to the fog. Min: 0 Max: 1" + min: 0 + max: 1 + - id: brightness_shift + type: number + title: Fog Brightness + defaultValue: 0.03 + description: "Set brightness of the fog. Min: -1 Max: 1" + min: -1 + max: 1 + - id: hue_shift + type: number + title: Fog Hue + description: "Set hue of the fog. Min: -1 Max: 1" + min: -1 + max: 1 + - id: surturation_shift + type: number + title: Fog Saturation + description: "Set saturation of the fog. Min: -1 Max: 1" + min: -1 + max: 1 + - id: infobox + title: Infobox + visualizer: cesium + type: infobox + description: Create an information area that appears when a layer is highlighted. Text, pictures, video, etc can be added to an infobox. + schema: + groups: + - id: default + title: Basic + fields: + - id: title + type: string + title: Title + - id: size + type: string + title: Size Type + defaultValue: small + choices: + - key: small + label: Small + - key: large + label: Large + - id: bgcolor + type: string + title: Background Color + ui: color + - id: typography + type: typography + title: Font + - id: marker + visualizer: cesium + type: primitive + title: Marker + description: A standard map marker. + schema: + groups: + - id: default + title: Marker + fields: + - id: location + type: latlng + title: Location + - id: height + type: number + title: Height + defaultValue: 0 + min: 0 + suffix: m + - id: style + type: string + title: Style + defaultValue: image + choices: + - key: point + label: Point + - key: image + label: Icon + - id: pointColor + type: string + title: Point color + ui: color + availableIf: + field: style + type: string + value: point + - id: pointSize + type: number + title: Point size + defaultValue: 10 + min: 0 + suffix: px + availableIf: + field: style + type: string + value: point + - id: image + type: url + title: Image URL + ui: image + availableIf: + field: style + type: string + value: image + - id: imageSize + type: number + title: Image scale + defaultValue: 1 + min: 0 + availableIf: + field: style + type: string + value: image + - id: imageHorizontalOrigin + type: string + title: Image horizontal origin + defaultValue: center + choices: + - key: left + label: Left + - key: center + label: Center + - key: right + label: Right + availableIf: + field: style + type: string + value: image + - id: imageVerticalOrigin + type: string + title: Image vertical origin + defaultValue: center + choices: + - key: top + label: Top + - key: center + label: Center + - key: baseline + label: Baseline + - key: bottom + label: Bottom + availableIf: + field: style + type: string + value: image + - id: imageCrop + type: string + title: Image crop + defaultValue: none + choices: + - key: none + label: None + # - key: rounded + # label: Rounded + - key: circle + label: Circle + availableIf: + field: style + type: string + value: image + - id: imageShadow + type: bool + title: Image shadow + availableIf: + field: style + type: string + value: image + - id: imageShadowColor + type: string + title: Shadow color + ui: color + availableIf: + field: imageShadow + type: bool + value: true + - id: imageShadowBlur + type: number + title: Shadow radius + defaultValue: 3 + suffix: px + availableIf: + field: imageShadow + type: bool + value: true + - id: imageShadowPositionX + type: number + title: Shadow X + suffix: px + defaultValue: 0 + availableIf: + field: imageShadow + type: bool + value: true + - id: imageShadowPositionY + type: number + title: Shadow Y + suffix: px + defaultValue: 0 + availableIf: + field: imageShadow + type: bool + value: true + - id: label + type: bool + title: Label + - id: labelText + type: string + title: Label text + availableIf: + field: label + type: bool + value: true + - id: labelTypography + type: typography + title: Label font + availableIf: + field: label + type: bool + value: true + - id: extrude + type: bool + title: Extruded + linkable: + latlng: + schemaGroupId: default + fieldId: location + url: + schemaGroupId: default + fieldId: image + - id: polyline + visualizer: cesium + type: primitive + title: Polyline + description: Polyline primitive + schema: + groups: + - id: default + title: Polyline + fields: + - id: coordinates + type: coordinates + title: Coordinates + - id: strokeColor + type: string + title: Stroke color + ui: color + - id: strokeWidth + type: number + title: Stroke width + min: 0 + defaultValue: 1 + suffix: px + - id: polygon + visualizer: cesium + type: primitive + title: Polygon + description: Polygon primitive + schema: + groups: + - id: default + title: Polygon + fields: + - id: polygon + type: polygon + title: Polygon + - id: fill + type: bool + title: Fill + defaultValue: true + - id: fillColor + type: string + title: Fill color + ui: color + availableIf: + field: fill + type: bool + value: true + - id: stroke + type: bool + title: Stroke + - id: strokeColor + type: string + title: Stroke color + ui: color + availableIf: + field: stroke + type: bool + value: true + - id: strokeWidth + type: number + title: Stroke width + min: 0 + defaultValue: 1 + suffix: px + availableIf: + field: stroke + type: bool + value: true + - id: rect + visualizer: cesium + type: primitive + title: Rectangle + description: Rectangle primitive + schema: + groups: + - id: default + title: Rectangle + fields: + - id: rect + type: rect + title: Rect + - id: height + type: number + title: Height + defaultValue: 0 + min: 0 + suffix: m + - id: extrudedHeight + type: number + title: Extruded height + min: 0 + - id: style + type: string + title: Style + defaultValue: color + choices: + - key: color + label: Color + - key: image + label: Image + - id: fillColor + type: string + title: Fill + ui: color + availableIf: + field: style + type: string + value: color + - id: image + type: url + title: Image URL + ui: image + availableIf: + field: style + type: string + value: image + - id: photooverlay + visualizer: cesium + type: primitive + title: Photo overlay + description: An Icon marker that allows you to set a photo that will appear after reaching its location. + schema: + groups: + - id: default + title: Photo overlay + fields: + - id: location + type: latlng + title: Location + - id: height + type: number + title: Height + - id: camera + type: camera + title: Camera + description: Set the camera position for the overlay. + - id: image + type: url + ui: image + title: Icon + - id: imageSize + type: number + title: Icon size + prefix: x + defaultValue: 1 + - id: imageHorizontalOrigin + type: string + title: Image horizontal origin + defaultValue: center + choices: + - key: left + label: Left + - key: center + label: Center + - key: right + label: Right + - id: imageVerticalOrigin + type: string + title: Image vertical origin + defaultValue: center + choices: + - key: top + label: Top + - key: center + label: Center + - key: baseline + label: Baseline + - key: bottom + label: Bottom + - id: imageCrop + type: string + title: Icon crop + defaultValue: none + choices: + - key: none + label: None + # - key: rounded + # label: Rounded + - key: circle + label: Circle + - id: imageShadow + type: bool + title: Icon shadow + - id: imageShadowColor + type: string + title: Shadow color + ui: color + availableIf: + field: imageShadow + type: bool + value: true + - id: imageShadowBlur + type: number + title: Shadow radius + defaultValue: 3 + suffix: px + availableIf: + field: imageShadow + type: bool + value: true + - id: imageShadowPositionX + type: number + title: Shadow X + suffix: px + defaultValue: 0 + availableIf: + field: imageShadow + type: bool + value: true + - id: imageShadowPositionY + type: number + title: Shadow Y + suffix: px + defaultValue: 0 + availableIf: + field: imageShadow + type: bool + value: true + - id: photoOverlayImage + type: url + title: Photo + ui: image + - id: photoOverlayDescription + type: string + ui: multiline + title: Photo description + linkable: + latlng: + schemaGroupId: default + fieldId: location + - id: ellipsoid + visualizer: cesium + type: primitive + title: Sphere + description: A ball-like marker. + schema: + groups: + - id: default + title: Ellipsoid + fields: + - id: position + type: latlng + title: Position + - id: height + type: number + title: Height + defaultValue: 0 + min: 0 + suffix: m + - id: radius + type: number + title: Radius + defaultValue: 1000 + min: 0 + suffix: m + - id: fillColor + type: string + title: Fill + ui: color + linkable: + latlng: + schemaGroupId: default + fieldId: position + - id: resource + visualizer: cesium + type: primitive + title: File + description: Import your own primitives to be used instead of Re:Earth's built in ones. + schema: + groups: + - id: default + title: File + fields: + - id: url + type: url + title: File URL + ui: file + - id: type + type: string + title: File format + defaultValue: auto + choices: + - key: auto + label: Auto + - key: kml + label: KML + - key: geojson + label: GeoJSON / TopoJSON + - key: czml + label: CZML + - id: textblock + visualizer: cesium + type: block + title: Text + description: Text block + schema: + groups: + - id: default + title: Text block + fields: + - id: title + type: string + title: Title + - id: text + type: string + title: Content + ui: multiline + - id: markdown + type: bool + title: Use markdown + - id: typography + type: typography + title: Font + - id: imageblock + visualizer: cesium + type: block + title: Image + description: Image block + schema: + groups: + - id: default + title: Image block + fields: + - id: image + type: url + title: Image + ui: image + - id: title + type: string + title: Title + - id: fullSize + type: bool + title: Full size + - id: imageSize + type: string + title: Image size + defaultValue: cover + choices: + - key: cover + label: Cover + - key: contain + label: Contain + - id: imagePositionX + type: string + title: Image horizontal position + defaultValue: center + choices: + - key: left + label: Left + - key: center + label: Center + - key: right + label: Right + - id: imagePositionY + type: string + title: Image vertical position + defaultValue: center + choices: + - key: top + label: Top + - key: center + label: Center + - key: bottom + label: Bottom + linkable: + url: + schemaGroupId: default + fieldId: image + - id: videoblock + visualizer: cesium + type: block + title: Video + description: Video block + schema: + groups: + - id: default + title: Video block + fields: + - id: url + type: url + title: Video URL + ui: video + - id: title + type: string + title: Title + - id: fullSize + type: bool + title: Full size + - id: locationblock + visualizer: cesium + type: block + title: Location + description: Location block + schema: + groups: + - id: default + title: Location block + fields: + - id: location + type: latlng + title: Location + - id: title + type: string + title: Title + - id: fullSize + type: bool + title: Full size + - id: dlblock + visualizer: cesium + type: block + title: Table + description: Table block + schema: + groups: + - id: default + title: Table block + fields: + - id: title + type: string + title: Title + - id: typography + type: typography + title: Font + - id: items + list: true + title: Items + fields: + - id: item_title + type: string + title: Title + - id: item_datatype + type: string + title: Type + defaultValue: string + choices: + - key: string + label: String + - key: number + label: Number + - id: item_datastr + type: string + title: Data + availableIf: + field: item_datatype + type: string + value: string + - id: item_datanum + type: number + title: Data + availableIf: + field: item_datatype + type: string + value: number + # - id: navigator + # visualizer: cesium + # type: widget + # title: Navigator + # description: Navigator widget + - id: menu + visualizer: cesium + type: widget + title: Menu + description: Menu widgets + schema: + groups: + - id: buttons + title: Buttons + list: true + fields: + - id: buttonInvisible + type: bool + title: Hide + - id: buttonTitle + type: string + title: Title + - id: buttonPosition + type: string + title: Position + defaultValue: topleft + choices: + - key: topleft + label: Top-Left + - key: topright + label: Top-Right + - key: bottomleft + label: Bottom-Left + - key: bottomright + label: Bottom-Right + - id: buttonStyle + type: string + title: Style + defaultValue: text + choices: + - key: text + label: Text only + - key: icon + label: Icon only + - key: texticon + label: Text and icon + - id: buttonIcon + type: url + title: Icon + ui: image + - id: buttonColor + type: string + title: Text color + ui: color + - id: buttonBgcolor + type: string + title: Background color + ui: color + - id: buttonType + type: string + title: Type + defaultValue: link + choices: + - key: link + label: Link + - key: menu + label: Menu + - key: camera + label: Camera flight + - id: buttonLink + type: url + title: Link + availableIf: + field: buttonType + type: string + value: link + - id: buttonCamera + type: camera + title: Camera flight + availableIf: + field: buttonType + type: string + value: camera + - id: menu + title: Menu + list: true + fields: + - id: menuTitle + type: string + title: Title + - id: menuIcon + type: url + title: Icon + - id: menuType + type: string + title: Type + defaultValue: link + choices: + - key: link + label: Link + - key: camera + label: Camera + - key: border + label: Break + - id: menuLink + type: url + title: Link + availableIf: + field: menuType + type: string + value: link + - id: menuCamera + type: camera + title: Camera + availableIf: + field: menuType + type: string + value: camera + - id: splashscreen + visualizer: cesium + type: widget + title: Splash screen + description: A unique start screen that will display on load of your archive(ex. display the archive's title). + schema: + groups: + - id: overlay + title: Overlay screen + fields: + - id: overlayEnabled + type: bool + title: Enabled + - id: overlayDelay + type: number + title: Delay + suffix: s + min: 0 + - id: overlayDuration + type: number + title: Duration + suffix: s + min: 0 + - id: overlayTransitionDuration + type: number + title: Fade out + suffix: s + min: 0 + - id: overlayImage + type: url + title: Image + ui: image + - id: overlayImageW + type: number + title: Image width + - id: overlayImageH + type: number + title: Image height + - id: overlayBgcolor + type: string + title: Background color + ui: color + - id: camera + title: Camera flight sequence + list: true + fields: + - id: cameraPosition + type: camera + title: Camera position + - id: cameraDuration + type: number + title: Duration + suffix: s + min: 0 + - id: cameraDelay + type: number + title: Delay + suffix: s + min: 0 + - id: storytelling + visualizer: cesium + type: widget + title: Storytelling + description: SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily. + schema: + groups: + - id: default + title: Storytelling + fields: + - id: duration + title: Duration + type: number + suffix: s + defaultValue: 3 + - id: range + title: Range + type: number + suffix: m + defaultValue: 50000 + - id: camera + title: Camera pose + type: camera + ui: camera_pose + - id: autoStart + title: Auto start + type: bool + - id: stories + title: Stories + representativeField: layer + list: true + fields: + - id: layer + title: Layer + type: ref + ui: layer + - id: layerDuration + title: Duration + type: number + suffix: s + - id: layerRange + title: Range + type: number + suffix: m + - id: layerCamera + title: Camera position + type: camera diff --git a/pkg/builtin/manifest_gen.go b/pkg/builtin/manifest_gen.go new file mode 100644 index 000000000..9a4ddbcb9 --- /dev/null +++ b/pkg/builtin/manifest_gen.go @@ -0,0 +1,5 @@ +// Code generated by github.com/reearth/reearth-backend/tools/cmd/embed, DO NOT EDIT. + +package builtin + +const pluginManifestJSON string = `{"author":"Re:Earth","description":"Official Plugin","extensions":[{"description":"Select here to find scene settings in the right panel. This includes setting map tiles, atmospheric conditions, real lighting, and more.","id":"cesium","schema":{"groups":[{"fields":[{"description":"The starting position of your project.","id":"camera","title":"Initial camera position","type":"camera"},{"description":"Show elevation when close to the surface.","id":"terrain","title":"Terrain","type":"bool"},{"defaultValue":true,"description":"Show the stars.","id":"skybox","title":"Sky","type":"bool"},{"description":"With Sky disabled, choose a background color.","id":"bgcolor","title":"Background color","type":"string","ui":"color"},{"description":"Cesium Ion account users may use their personal API keys to be able to use their Cesium Ion assets(tile data, 3D data, etc) with their project.","id":"ion","title":"Cesium Ion API access token","type":"string"}],"id":"default","title":"Scene"},{"description":"You may change the look of the Earth by obtaining map tile data and setting it here.","fields":[{"choices":[{"key":"default","label":"Default"},{"key":"default_label","label":"Labelled"},{"key":"default_road","label":"Road Map"},{"key":"stamen_watercolor","label":"Stamen Watercolor"},{"key":"stamen_toner","label":"Stamen Toner"},{"key":"open_street_map","label":"Open Street Map"},{"key":"esri_world_topo","label":"ESRI Topography"},{"key":"black_marble","label":"Earth at night"},{"key":"japan_gsi_standard","label":"Japan GSI Standard Map"},{"key":"url","label":"URL"}],"defaultValue":"default","id":"tile_type","title":"Tile Type","type":"string"},{"availableIf":{"field":"tile_type","type":"string","value":"url"},"id":"tile_url","title":"Tile map URL","type":"string"},{"id":"tile_minLevel","max":30,"min":0,"title":"Minimum zoom level","type":"number"},{"id":"tile_maxLevel","max":30,"min":0,"title":"Maximum zoom level","type":"number"}],"id":"tiles","list":true,"representativeField":"tile_type","title":"Tiles"},{"description":"Set the look and feel of the Earth.","fields":[{"defaultValue":true,"description":"Display the Sun.","id":"enable_sun","title":"Sun","type":"bool"},{"defaultValue":false,"description":"Display natural lighting from the sun.","id":"enable_lighting","title":"Lighting","type":"bool"},{"defaultValue":true,"description":"Display a lower atmospheric layer.","id":"ground_atmosphere","title":"Ground atmosphere","type":"bool"},{"defaultValue":true,"description":"Display an upper atmospheric layer.","id":"sky_atmosphere","title":"Sky atmosphere","type":"bool"},{"defaultValue":true,"description":"Display customizable fog.","id":"fog","title":"Fog","type":"bool"},{"defaultValue":0.0002,"description":"Set a thickness to the fog. Min: 0 Max: 1","id":"fog_density","max":1,"min":0,"title":"Fog density","type":"number"},{"defaultValue":0.03,"description":"Set brightness of the fog. Min: -1 Max: 1","id":"brightness_shift","max":1,"min":-1,"title":"Fog Brightness","type":"number"},{"description":"Set hue of the fog. Min: -1 Max: 1","id":"hue_shift","max":1,"min":-1,"title":"Fog Hue","type":"number"},{"description":"Set saturation of the fog. Min: -1 Max: 1","id":"surturation_shift","max":1,"min":-1,"title":"Fog Saturation","type":"number"}],"id":"atmosphere","title":"Atmospheric Conditions"}]},"title":"Cesium","type":"visualizer","visualizer":"cesium"},{"description":"Create an information area that appears when a layer is highlighted. Text, pictures, video, etc can be added to an infobox.","id":"infobox","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"choices":[{"key":"small","label":"Small"},{"key":"large","label":"Large"}],"defaultValue":"small","id":"size","title":"Size Type","type":"string"},{"id":"bgcolor","title":"Background Color","type":"string","ui":"color"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Basic"}]},"title":"Infobox","type":"infobox","visualizer":"cesium"},{"description":"A standard map marker.","id":"marker","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"choices":[{"key":"point","label":"Point"},{"key":"image","label":"Icon"}],"defaultValue":"image","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"point"},"id":"pointColor","title":"Point color","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"point"},"defaultValue":10,"id":"pointSize","min":0,"suffix":"px","title":"Point size","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"},{"availableIf":{"field":"style","type":"string","value":"image"},"defaultValue":1,"id":"imageSize","min":0,"title":"Image scale","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Image crop","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"imageShadow","title":"Image shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"label","title":"Label","type":"bool"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelText","title":"Label text","type":"string"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelTypography","title":"Label font","type":"typography"},{"id":"extrude","title":"Extruded","type":"bool"}],"id":"default","title":"Marker"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"},"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Marker","type":"primitive","visualizer":"cesium"},{"description":"Polyline primitive","id":"polyline","schema":{"groups":[{"fields":[{"id":"coordinates","title":"Coordinates","type":"coordinates"},{"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polyline"}]},"title":"Polyline","type":"primitive","visualizer":"cesium"},{"description":"Polygon primitive","id":"polygon","schema":{"groups":[{"fields":[{"id":"polygon","title":"Polygon","type":"polygon"},{"defaultValue":true,"id":"fill","title":"Fill","type":"bool"},{"availableIf":{"field":"fill","type":"bool","value":true},"id":"fillColor","title":"Fill color","type":"string","ui":"color"},{"id":"stroke","title":"Stroke","type":"bool"},{"availableIf":{"field":"stroke","type":"bool","value":true},"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"availableIf":{"field":"stroke","type":"bool","value":true},"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polygon"}]},"title":"Polygon","type":"primitive","visualizer":"cesium"},{"description":"Rectangle primitive","id":"rect","schema":{"groups":[{"fields":[{"id":"rect","title":"Rect","type":"rect"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"id":"extrudedHeight","min":0,"title":"Extruded height","type":"number"},{"choices":[{"key":"color","label":"Color"},{"key":"image","label":"Image"}],"defaultValue":"color","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"color"},"id":"fillColor","title":"Fill","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"}],"id":"default","title":"Rectangle"}]},"title":"Rectangle","type":"primitive","visualizer":"cesium"},{"description":"An Icon marker that allows you to set a photo that will appear after reaching its location.","id":"photooverlay","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"height","title":"Height","type":"number"},{"description":"Set the camera position for the overlay.","id":"camera","title":"Camera","type":"camera"},{"id":"image","title":"Icon","type":"url","ui":"image"},{"defaultValue":1,"id":"imageSize","prefix":"x","title":"Icon size","type":"number"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Icon crop","type":"string"},{"id":"imageShadow","title":"Icon shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"photoOverlayImage","title":"Photo","type":"url","ui":"image"},{"id":"photoOverlayDescription","title":"Photo description","type":"string","ui":"multiline"}],"id":"default","title":"Photo overlay"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"}}},"title":"Photo overlay","type":"primitive","visualizer":"cesium"},{"description":"A ball-like marker.","id":"ellipsoid","schema":{"groups":[{"fields":[{"id":"position","title":"Position","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"defaultValue":1000,"id":"radius","min":0,"suffix":"m","title":"Radius","type":"number"},{"id":"fillColor","title":"Fill","type":"string","ui":"color"}],"id":"default","title":"Ellipsoid"}],"linkable":{"latlng":{"fieldId":"position","schemaGroupId":"default"}}},"title":"Sphere","type":"primitive","visualizer":"cesium"},{"description":"Import your own primitives to be used instead of Re:Earth's built in ones.","id":"resource","schema":{"groups":[{"fields":[{"id":"url","title":"File URL","type":"url","ui":"file"},{"choices":[{"key":"auto","label":"Auto"},{"key":"kml","label":"KML"},{"key":"geojson","label":"GeoJSON / TopoJSON"},{"key":"czml","label":"CZML"}],"defaultValue":"auto","id":"type","title":"File format","type":"string"}],"id":"default","title":"File"}]},"title":"File","type":"primitive","visualizer":"cesium"},{"description":"Text block","id":"textblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"text","title":"Content","type":"string","ui":"multiline"},{"id":"markdown","title":"Use markdown","type":"bool"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Text block"}]},"title":"Text","type":"block","visualizer":"cesium"},{"description":"Image block","id":"imageblock","schema":{"groups":[{"fields":[{"id":"image","title":"Image","type":"url","ui":"image"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"},{"choices":[{"key":"cover","label":"Cover"},{"key":"contain","label":"Contain"}],"defaultValue":"cover","id":"imageSize","title":"Image size","type":"string"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imagePositionX","title":"Image horizontal position","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imagePositionY","title":"Image vertical position","type":"string"}],"id":"default","title":"Image block"}],"linkable":{"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Image","type":"block","visualizer":"cesium"},{"description":"Video block","id":"videoblock","schema":{"groups":[{"fields":[{"id":"url","title":"Video URL","type":"url","ui":"video"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Video block"}]},"title":"Video","type":"block","visualizer":"cesium"},{"description":"Location block","id":"locationblock","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Location block"}]},"title":"Location","type":"block","visualizer":"cesium"},{"description":"Table block","id":"dlblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Table block"},{"fields":[{"id":"item_title","title":"Title","type":"string"},{"choices":[{"key":"string","label":"String"},{"key":"number","label":"Number"}],"defaultValue":"string","id":"item_datatype","title":"Type","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"string"},"id":"item_datastr","title":"Data","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"number"},"id":"item_datanum","title":"Data","type":"number"}],"id":"items","list":true,"title":"Items"}]},"title":"Table","type":"block","visualizer":"cesium"},{"description":"Menu widgets","id":"menu","schema":{"groups":[{"fields":[{"id":"buttonInvisible","title":"Hide","type":"bool"},{"id":"buttonTitle","title":"Title","type":"string"},{"choices":[{"key":"topleft","label":"Top-Left"},{"key":"topright","label":"Top-Right"},{"key":"bottomleft","label":"Bottom-Left"},{"key":"bottomright","label":"Bottom-Right"}],"defaultValue":"topleft","id":"buttonPosition","title":"Position","type":"string"},{"choices":[{"key":"text","label":"Text only"},{"key":"icon","label":"Icon only"},{"key":"texticon","label":"Text and icon"}],"defaultValue":"text","id":"buttonStyle","title":"Style","type":"string"},{"id":"buttonIcon","title":"Icon","type":"url","ui":"image"},{"id":"buttonColor","title":"Text color","type":"string","ui":"color"},{"id":"buttonBgcolor","title":"Background color","type":"string","ui":"color"},{"choices":[{"key":"link","label":"Link"},{"key":"menu","label":"Menu"},{"key":"camera","label":"Camera flight"}],"defaultValue":"link","id":"buttonType","title":"Type","type":"string"},{"availableIf":{"field":"buttonType","type":"string","value":"link"},"id":"buttonLink","title":"Link","type":"url"},{"availableIf":{"field":"buttonType","type":"string","value":"camera"},"id":"buttonCamera","title":"Camera flight","type":"camera"}],"id":"buttons","list":true,"title":"Buttons"},{"fields":[{"id":"menuTitle","title":"Title","type":"string"},{"id":"menuIcon","title":"Icon","type":"url"},{"choices":[{"key":"link","label":"Link"},{"key":"camera","label":"Camera"},{"key":"border","label":"Break"}],"defaultValue":"link","id":"menuType","title":"Type","type":"string"},{"availableIf":{"field":"menuType","type":"string","value":"link"},"id":"menuLink","title":"Link","type":"url"},{"availableIf":{"field":"menuType","type":"string","value":"camera"},"id":"menuCamera","title":"Camera","type":"camera"}],"id":"menu","list":true,"title":"Menu"}]},"title":"Menu","type":"widget","visualizer":"cesium"},{"description":"A unique start screen that will display on load of your archive(ex. display the archive's title).","id":"splashscreen","schema":{"groups":[{"fields":[{"id":"overlayEnabled","title":"Enabled","type":"bool"},{"id":"overlayDelay","min":0,"suffix":"s","title":"Delay","type":"number"},{"id":"overlayDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"overlayTransitionDuration","min":0,"suffix":"s","title":"Fade out","type":"number"},{"id":"overlayImage","title":"Image","type":"url","ui":"image"},{"id":"overlayImageW","title":"Image width","type":"number"},{"id":"overlayImageH","title":"Image height","type":"number"},{"id":"overlayBgcolor","title":"Background color","type":"string","ui":"color"}],"id":"overlay","title":"Overlay screen"},{"fields":[{"id":"cameraPosition","title":"Camera position","type":"camera"},{"id":"cameraDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"cameraDelay","min":0,"suffix":"s","title":"Delay","type":"number"}],"id":"camera","list":true,"title":"Camera flight sequence"}]},"title":"Splash screen","type":"widget","visualizer":"cesium"},{"description":"SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily.","id":"storytelling","schema":{"groups":[{"fields":[{"defaultValue":3,"id":"duration","suffix":"s","title":"Duration","type":"number"},{"defaultValue":50000,"id":"range","suffix":"m","title":"Range","type":"number"},{"id":"camera","title":"Camera pose","type":"camera","ui":"camera_pose"},{"id":"autoStart","title":"Auto start","type":"bool"}],"id":"default","title":"Storytelling"},{"fields":[{"id":"layer","title":"Layer","type":"ref","ui":"layer"},{"id":"layerDuration","suffix":"s","title":"Duration","type":"number"},{"id":"layerRange","suffix":"m","title":"Range","type":"number"},{"id":"layerCamera","title":"Camera position","type":"camera"}],"id":"stories","list":true,"representativeField":"layer","title":"Stories"}]},"title":"Storytelling","type":"widget","visualizer":"cesium"}],"id":"reearth","system":true,"title":"Re:Earth Official Plugin"}` diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml new file mode 100644 index 000000000..5588e3903 --- /dev/null +++ b/pkg/builtin/manifest_ja.yml @@ -0,0 +1,472 @@ +title: Re:Earthๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ +description: ๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ +extensions: + cesium: + title: Cesium + description: ๅณใƒ‘ใƒใƒซใงใ‚ทใƒผใƒณๅ…จไฝ“ใฎ่จญๅฎšใ‚’ๅค‰ๆ›ดใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ‚ฟใ‚คใƒซใ€ๅคงๆฐ—ใ€ใƒฉใ‚คใƒ†ใ‚ฃใƒณใ‚ฐใชใฉใฎ่จญๅฎšใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚ + propertySchema: + default: + title: ใ‚ทใƒผใƒณ + fields: + camera: + title: ใ‚ซใƒกใƒฉๅˆๆœŸไฝ็ฝฎ + description: ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒๆœ€ๅˆใซ่กจ็คบใ™ใ‚‹ใ‚ซใƒกใƒฉใฎไฝ็ฝฎใ‚’่จญๅฎšใ—ใพใ™ใ€‚ + terrain: + title: ๅœฐๅฝข + description: ๆœ‰ๅŠนใซใ™ใ‚‹ใจใ€ๆจ™้ซ˜ใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใฟใ“ใพใ‚Œใ€็ซ‹ไฝ“็š„ใชๅœฐๅฝขใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + skybox: + title: ๅฎ‡ๅฎ™ใฎ่กจ็คบ + description: ๅฎ‡ๅฎ™็ฉบ้–“ใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ + bgcolor: + title: ่ƒŒๆ™ฏ่‰ฒ + description: ๅฎ‡ๅฎ™็ฉบ้–“ใŒ้ž่กจ็คบใฎๅ ดๅˆใฎใ€่ƒŒๆ™ฏ่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚ + ion: + title: Cesium Icon APIใ‚ขใ‚ฏใ‚ปใ‚นใƒˆใƒผใ‚ฏใƒณ + description: ่‡ช่บซใฎCesium Ionใ‚ขใ‚ซใ‚ฆใƒณใƒˆใ‹ใ‚‰APIใ‚ญใƒผใ‚’็™บ่กŒใ—ใ€ใ“ใ“ใซ่จญๅฎšใ—ใพใ™ใ€‚Cesium Ionใฎใ‚ขใ‚ปใƒƒใƒˆ๏ผˆใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ€3Dใƒ‡ใƒผใ‚ฟใชใฉ๏ผ‰ใฎไฝฟ็”จใŒๅฏ่ƒฝใซใชใ‚‹ใŸใ‚ใ€่จญๅฎšใ‚’ๆŽจๅฅจใ—ใพใ™ใ€‚ + tiles: + title: ใ‚ฟใ‚คใƒซ + description: ๆ‰‹ๆŒใกใฎใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ‚’ไฝฟ็”จใ—ใ€ๅœฐ็ƒไธŠใซ่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + fields: + tile_title: + title: ๅๅ‰ + tile_type: + title: ็จฎ้กž + choices: + default: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + default_label: ใƒฉใƒ™ใƒซไป˜ใๅœฐๅ›ณ + default_road: ้“่ทฏๅœฐๅ›ณ + stamen_watercolor: Stamen Watercolor + stamen_toner: Stamen Toner + open_street_map: Open Street Map + esri_world_topo: ESRI Topography + black_marble: Black Marble + japan_gsi_standard: ๅœฐ็†้™ขๅœฐๅ›ณ ๆจ™ๆบ–ๅœฐๅ›ณ + url: URL + tile_url: + title: URL + tile_minLevel: + title: ๆœ€ๅฐใƒฌใƒ™ใƒซ + tile_maxLevel: + title: ๆœ€ๅคงใƒฌใƒ™ใƒซ + atmosphere: + title: ๅคงๆฐ— + description: ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎ่จญๅฎšใŒใงใใพใ™ใ€‚ + fields: + enable_sun: + title: ๅคช้™ฝ + description: ๅฎ‡ๅฎ™็ฉบ้–“ใซๅญ˜ๅœจใ™ใ‚‹ๅคช้™ฝใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ + enable_lighting: + title: ๅคช้™ฝๅ…‰ + description: ๅคช้™ฝๅ…‰ใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใ‚‹ใ“ใจใงใ€ๆ˜ผๅคœใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + ground_atmosphere: + title: ๅœฐ่กจใฎๅคงๆฐ— + description: ๅœฐ่กจใฎๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ + sky_atmosphere: + title: ไธŠ็ฉบใฎๅคงๆฐ— + description: ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ + fog: + title: ้œง + description: ้œงใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ + fog_density: + title: ๆฟƒๅบฆ + description: ้œงใฎๆฟƒๅบฆใ‚’0ไปฅไธŠใ‹ใ‚‰่จญๅฎšใ—ใพใ™ใ€‚ + brightness_shift: + title: ๆ˜Žๅบฆ + hue_shift: + title: ่‰ฒ็›ธ + surturation_shift: + title: ๅฝฉๅบฆ + infobox: + title: ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น + description: ้–ฒ่ฆง่€…ใŒๅœฐๅ›ณไธŠใฎใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ—ใŸๆ™‚ใซ่กจ็คบใ•ใ‚Œใ‚‹ใƒœใƒƒใ‚ฏใ‚นใงใ™ใ€‚ใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ€ๅ‹•็”ปใชใฉใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + propertySchema: + default: + title: ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น + fields: + title: + title: ใ‚ฟใ‚คใƒˆใƒซ + size: + title: ใ‚ตใ‚คใ‚บ + choices: + small: ๅฐ + large: ๅคง + bgcolor: + title: ่ƒŒๆ™ฏ่‰ฒ + typography: + title: ใƒ•ใ‚ฉใƒณใƒˆ + marker: + title: ใƒžใƒผใ‚ซใƒผ + description: ใƒ‰ใƒฉใƒƒใ‚ฐ&ใƒ‰ใƒญใƒƒใƒ—ใ™ใ‚‹ใ“ใจใงใ€ๅœฐๅ›ณไธŠใซใƒžใƒผใ‚ซใƒผใ‚’่ฟฝๅŠ ใ—ใพใ™ใ€‚ใƒžใƒผใ‚ซใƒผใซใฏใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ‚’็ดใฅใ‘ใ‚‹ใ“ใจใŒใงใใ€้–ฒ่ฆง่€…ใฏใƒžใƒผใ‚ซใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ™ใ‚‹ใ“ใจใงใใ‚Œใ‚‰ใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่ฆ‹ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + propertySchema: + default: + title: ใƒžใƒผใ‚ซใƒผ + fields: + location: + title: ไฝ็ฝฎ + height: + title: ้ซ˜ๅบฆ + style: + title: ่กจ็คบๆ–นๆณ• + choices: + point: ใƒใ‚คใƒณใƒˆ + image: ใ‚ขใ‚คใ‚ณใƒณ + pointColor: + title: ใƒใ‚คใƒณใƒˆ่‰ฒ + pointSize: + title: ใƒใ‚คใƒณใƒˆใ‚ตใ‚คใ‚บ + image: + title: ็”ปๅƒURL + imageSize: + title: ็”ปๅƒใ‚ตใ‚คใ‚บ + imageCrop: + title: ๅˆ‡ใ‚ŠๆŠœใ + choices: + none: ใชใ— + circle: ๅ††ๅฝข + imageShadow: + title: ใ‚ทใƒฃใƒ‰ใ‚ฆ + imageShadowColor: + title: ใ‚ทใƒฃใƒ‰ใ‚ฆ่‰ฒ + imageShadowBlur: + title: ใ‚ทใƒฃใƒ‰ใ‚ฆๅŠๅพ„ + imageShadowPositionX: + title: ใ‚ทใƒฃใƒ‰ใ‚ฆX + imageShadowPositionY: + title: ใ‚ทใƒฃใƒ‰ใ‚ฆY + label: + title: ใƒฉใƒ™ใƒซ + labelText: + title: ใƒฉใƒ™ใƒซๆ–‡ๅญ— + labelTypography: + title: ใƒฉใƒ™ใƒซใƒ•ใ‚ฉใƒณใƒˆ + extrude: + title: ๅœฐ้ขใ‹ใ‚‰็ทšใ‚’ไผธใฐใ™ + polyline: + title: ็›ด็ทš + description: Polyline primitive + propertySchema: + default: + title: ็›ด็ทš + fields: + coordinates: + title: ้ ‚็‚น + strokeColor: + title: ็ทš่‰ฒ + strokeWidth: + title: ็ทšๅน… + polygon: + title: ใƒใƒชใ‚ดใƒณ + description: Polygon primitive + propertySchema: + default: + title: ใƒใƒชใ‚ดใƒณ + fields: + polygon: + title: ใƒใƒชใ‚ดใƒณ + fill: + title: ๅก—ใ‚Š + fillColor: + title: ๅก—ใ‚Š่‰ฒ + stroke: + title: ็ทš + strokeColor: + title: ็ทš่‰ฒ + strokeWidth: + title: ็ทšๅน… + rect: + title: ้•ทๆ–นๅฝข + description: Rectangle primitive + propertySchema: + default: + title: ้•ทๆ–นๅฝข + fields: + rect: + title: ้•ทๆ–นๅฝข + height: + title: ้ซ˜ๅบฆ + extrudedHeight: + title: ้ซ˜ใ• + style: + title: ใ‚นใ‚ฟใ‚คใƒซ + choices: + color: ่‰ฒ + image: ็”ปๅƒ + fillColor: + title: ๅก—ใ‚Š่‰ฒ + image: + title: ็”ปๅƒURL + photooverlay: + title: ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค + description: ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ใ•ใ‚ŒใŸใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚คใ‚’้ธๆŠžใ™ใ‚‹ใจใ€่จญๅฎšใ—ใŸ็”ปๅƒใ‚’ใƒขใƒผใƒ€ใƒซๅฝขๅผใง่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + propertySchema: + default: + title: ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค + fields: + location: + title: ไฝ็ฝฎ + height: + title: ้ซ˜ๅบฆ + camera: + title: ใ‚ซใƒกใƒฉ + description: ใ‚ฏใƒชใƒƒใ‚ฏใ•ใ‚ŒใŸใจใใซ็งปๅ‹•ใ™ใ‚‹ใ‚ซใƒกใƒฉใฎ่จญๅฎšใ‚’ใ—ใพใ™ใ€‚ + image: + title: ใ‚ขใ‚คใ‚ณใƒณ + imageSize: + title: ใ‚ขใ‚คใ‚ณใƒณใ‚ตใ‚คใ‚บ + imageCrop: + title: ๅˆ‡ใ‚ŠๆŠœใ + choices: + none: ใชใ— + circle: ๅ††ๅฝข + imageShadow: + title: ใ‚ขใ‚คใ‚ณใƒณใ‚ทใƒฃใƒ‰ใ‚ฆ + imageShadowColor: + title: ใ‚ทใƒฃใƒ‰ใ‚ฆ่‰ฒ + imageShadowBlur: + title: ใ‚ทใƒฃใƒ‰ใ‚ฆๅŠๅพ„ + imageShadowPositionX: + title: ใ‚ทใƒฃใƒ‰ใ‚ฆX + imageShadowPositionY: + title: ใ‚ทใƒฃใƒ‰ใ‚ฆY + photoOverlayImage: + title: ใ‚ชใƒผใƒใƒฌใ‚ค็”ปๅƒ + ellipsoid: + title: ็ƒไฝ“ใƒ„ใƒผใƒซ + description: ๆฅ•ๅ††ๅฝขใƒ„ใƒผใƒซใ‚’ๅœฐๅ›ณไธŠใซใƒ‰ใƒฉใƒƒใ‚ฐ&ใƒ‰ใƒญใƒƒใƒ—ใ™ใ‚‹ใ“ใจใง่ฟฝๅŠ ใงใใพใ™ใ€‚ๆฅ•ๅ††ๅฝขใƒ„ใƒผใƒซใซใ‚ˆใฃใฆ็ซ‹ไฝ“็š„ใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ๅœฐๅ›ณไธŠใซ่กจ็คบใงใใพใ™ใ€‚ + propertySchema: + default: + title: ็ƒไฝ“ใƒ„ใƒผใƒซ + fields: + position: + title: ไฝ็ฝฎ + height: + title: ้ซ˜ๅบฆ + radius: + title: ๅŠๅพ„ + fillColor: + title: ๅก—ใ‚Š่‰ฒ + resource: + title: ใƒ•ใ‚กใ‚คใƒซ + description: ๅค–้ƒจใ‹ใ‚‰ใƒ‡ใƒผใ‚ฟ๏ผˆๅฝขๅผไฝ•๏ผŸ๏ผŸ๏ผŸ๏ผ‰ใ‚’ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ๅพŒใ€URLใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใงๅค–้ƒจใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใพใ‚Œใพใ™ใ€‚ + propertySchema: + default: + title: ใƒ•ใ‚กใ‚คใƒซ + fields: + url: + title: ใƒ•ใ‚กใ‚คใƒซ URL + choices: + auto: ่‡ชๅ‹• + kml: KML + geojson: GeoJSON / TopoJSON + czml: CZML + textblock: + title: ใƒ†ใ‚ญใ‚นใƒˆ + description: Text block + propertySchema: + default: + title: ใƒ†ใ‚ญใ‚นใƒˆใƒ–ใƒญใƒƒใ‚ฏ + fields: + title: + title: ใ‚ฟใ‚คใƒˆใƒซ + text: + title: ใ‚ณใƒณใƒ†ใƒณใƒ„ + markdown: + title: ใƒžใƒผใ‚ฏใƒ€ใ‚ฆใƒณ + typography: + title: ใƒ•ใ‚ฉใƒณใƒˆ + imageblock: + title: ็”ปๅƒ + description: ็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ + propertySchema: + default: + title: ็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ + fields: + image: + title: ็”ปๅƒ + title: + title: ใ‚ฟใ‚คใƒˆใƒซ + fullSize: + title: ใƒ•ใƒซใ‚ตใ‚คใ‚บ + imageSize: + title: ็”ปๅƒใ‚ตใ‚คใ‚บ + choices: + cover: ใ‚ซใƒใƒผ + contain: ๅซใ‚€ + imagePositionX: + title: ๆฐดๅนณไฝ็ฝฎ + choices: + left: ๅทฆ + center: ไธญๅคฎ + right: ๅณ + imagePositionY: + title: ๅž‚็›ดไฝ็ฝฎ + choices: + top: ไธŠ + center: ไธญๅคฎ + bottom: ไธ‹ + videoblock: + title: ๅ‹•็”ป + description: ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ + propertySchema: + default: + title: ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ + fields: + url: + title: ๅ‹•็”ป URL + title: + title: ใ‚ฟใ‚คใƒˆใƒซ + fullSize: + title: ใƒ•ใƒซใ‚ตใ‚คใ‚บ + locationblock: + title: ไฝ็ฝฎๆƒ…ๅ ฑ + description: ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ + propertySchema: + default: + title: ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ + fields: + location: + title: ไฝ็ฝฎๆƒ…ๅ ฑ + title: + title: ใ‚ฟใ‚คใƒˆใƒซ + fullSize: + title: ใƒ•ใƒซใ‚ตใ‚คใ‚บ + dlblock: + title: ่กจ + description: ่กจใƒ–ใƒญใƒƒใ‚ฏ + propertySchema: + default: + title: ่กจใƒ–ใƒญใƒƒใ‚ฏ + fields: + title: + title: ใ‚ฟใ‚คใƒˆใƒซ + typography: + title: ใƒ•ใ‚ฉใƒณใƒˆ + items: + title: ใ‚ขใ‚คใƒ†ใƒ  + fields: + item_title: + title: ใ‚ฟใ‚คใƒˆใƒซ + item_datatype: + title: ็จฎ้กž + choices: + string: ๆ–‡ๅญ— + number: ๆ•ฐๅญ— + item_datastr: + title: ใƒ‡ใƒผใ‚ฟ(ๆ–‡ๅญ—) + item_datanum: + title: ใƒ‡ใƒผใ‚ฟ(ๆ•ฐๅญ—) + menu: + title: ใƒกใƒ‹ใƒฅใƒผ + description: ใ‚ทใƒผใƒณใซใƒœใ‚ฟใƒณใ‚’่จญ็ฝฎใ—ใ€ใƒกใƒ‹ใƒฅใƒผใ‚’่กจ็คบใ—ใพใ™ใ€‚่ฟฝๅŠ ใ—ใŸใƒœใ‚ฟใƒณใซ่จญๅฎšใ•ใ‚ŒใŸใ‚ขใ‚ฏใ‚ทใƒงใƒณใ‚ฟใ‚คใƒ—ใซใ‚ˆใฃใฆๅ‹•ไฝœใŒๅค‰ใ‚ใ‚Šใพใ™ใ€‚\nใƒปใƒชใƒณใ‚ฏ๏ผšใƒœใ‚ฟใƒณ่‡ชไฝ“ใŒๅค–้ƒจใ‚ตใ‚คใƒˆใธใฎใƒชใƒณใ‚ฏใซใชใ‚Šใพใ™ใ€‚\nใƒปใƒกใƒ‹ใƒฅใƒผ๏ผš่ฟฝๅŠ ใ—ใŸใƒกใƒ‹ใƒฅใƒผใ‚’้–‹ใใพใ™\nใƒปใ‚ซใƒกใƒฉใ‚ขใ‚ฏใ‚ทใƒงใƒณ๏ผšใ‚ฏใƒชใƒƒใ‚ฏๆ™‚ใซใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ—ใพใ™ใ€‚ + propertySchema: + buttons: + title: ใƒœใ‚ฟใƒณ + fields: + buttonInvisible: + title: ้ž่กจ็คบ + buttonTitle: + title: ใ‚ฟใ‚คใƒˆใƒซ + buttonPosition: + title: ่กจ็คบไฝ็ฝฎ + choices: + topleft: ไธŠๅทฆ + topright: ไธŠๅณ + bottomleft: ไธ‹ๅทฆ + bottomright: ไธ‹ๅณ + buttonStyle: + title: ่กจ็คบๆ–นๆณ• + choices: + text: ใƒ†ใ‚ญใ‚นใƒˆใฎใฟ + icon: ใ‚ขใ‚คใ‚ณใƒณใฎใฟ + texticon: ใƒ†ใ‚ญใ‚นใƒˆ๏ผ‹ใ‚ขใ‚คใ‚ณใƒณ + buttonIcon: + title: ใ‚ขใ‚คใ‚ณใƒณ + buttonColor: + title: ใƒ†ใ‚ญใ‚นใƒˆ่‰ฒ + buttonBgcolor: + title: ่ƒŒๆ™ฏ่‰ฒ + buttonType: + title: ใ‚ขใ‚ฏใ‚ทใƒงใƒณ + choices: + link: ใƒชใƒณใ‚ฏ + menu: ใƒกใƒ‹ใƒฅใƒผ้–‹้–‰ + camera: ใ‚ซใƒกใƒฉ็งปๅ‹• + buttonLink: + title: ใƒชใƒณใ‚ฏ + buttonCamera: + title: ใ‚ซใƒกใƒฉ + menu: + title: ใƒกใƒ‹ใƒฅใƒผ + fields: + menuTitle: + title: ใ‚ฟใ‚คใƒˆใƒซ + menuIcon: + title: ใ‚ขใ‚คใ‚ณใƒณ + menuType: + title: ใ‚ขใ‚ฏใ‚ทใƒงใƒณ + choices: + link: ใƒชใƒณใ‚ฏ + camera: ใ‚ซใƒกใƒฉ็งปๅ‹• + border: ๅŒบๅˆ‡ใ‚Š็ทš + menuLink: + title: ใƒชใƒณใ‚ฏ + menuCamera: + title: ใ‚ซใƒกใƒฉ + splashscreen: + title: ใ‚นใƒ—ใƒฉใƒƒใ‚ทใƒฅใ‚นใ‚ฏใƒชใƒผใƒณ + description: ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒใ€ๆœ€ๅˆใซ่กจ็คบใ•ใ‚Œใ‚‹ๆผ”ๅ‡บใ‚’่จญๅฎšใงใใพใ™ใ€‚ไพ‹ใˆใฐใ€ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ฟใ‚คใƒˆใƒซใ‚’้–ฒ่ฆง่€…ใซ่ฆ‹ใ›ใŸใ‚Šใ€ใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ•ใ›ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + propertySchema: + overlay: + title: ใ‚ชใƒผใƒใƒผใƒฌใ‚ค + fields: + overlayEnabled: + title: ๆœ‰ๅŠน + overlayDelay: + title: ้–‹ๅง‹ๆ™‚้–“ + overlayDuration: + title: ่กจ็คบๆ™‚้–“ + overlayTransitionDuration: + title: ใƒ•ใ‚งใƒผใƒ‰ๆ™‚้–“ + overlayImage: + title: ใ‚ชใƒผใƒใƒผใƒฌใ‚ค็”ปๅƒ + overlayImageW: + title: ็”ปๅƒๅน… + overlayImageH: + title: ็”ปๅƒ้ซ˜ใ• + overlayBgcolor: + title: ่ƒŒๆ™ฏ่‰ฒ + camera: + title: ใ‚ซใƒกใƒฉใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณ + fields: + cameraPosition: + title: ใ‚ซใƒกใƒฉไฝ็ฝฎ + cameraDuration: + title: ใ‚ซใƒกใƒฉ้–‹ๅง‹ๆ™‚้–“ + cameraDelay: + title: ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“ + storytelling: + title: ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐ + description: ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐๆฉŸ่ƒฝใ‚’ไฝฟใˆใฐใ€ใƒ‡ใƒผใ‚ฟ้–“ใฎ็น‹ใŒใ‚Šใ‚„ๆ™‚็ณปๅˆ—ใ‚’ใ‚‚ใจใซใ€้ †็•ชใซ่ณ‡ๆ–™ใ‚’้–ฒ่ฆงใ—ใฆใ‚‚ใ‚‰ใ†ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚ไฝฟ็”จใ™ใ‚‹ใซใฏใ€ๅณใƒ‘ใƒใƒซใ‹ใ‚‰ๅœฐ็ƒไธŠใฎใƒฌใ‚คใƒคใƒผใซ้ †็•ชใ‚’ไป˜ไธŽใ—ใพใ™ใ€‚ + propertySchema: + default: + title: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + fields: + duration: + title: ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“ + range: + title: ็”ป่ง’ + camera: + title: ใ‚ซใƒกใƒฉ + autoStart: + title: ่‡ชๅ‹•ๅ†็”Ÿ + stories: + title: ใ‚นใƒˆใƒผใƒชใƒผ + fields: + layer: + title: ใƒฌใ‚คใƒคใƒผ + layerDuration: + title: ็งปๅ‹•ๆ™‚้–“ + layerRange: + title: ใ‚ซใƒกใƒฉ็”ป่ง’ + layerCamera: + title: ใ‚ซใƒกใƒฉ diff --git a/pkg/builtin/manifest_ja_gen.go b/pkg/builtin/manifest_ja_gen.go new file mode 100644 index 000000000..1ba2a559b --- /dev/null +++ b/pkg/builtin/manifest_ja_gen.go @@ -0,0 +1,5 @@ +// Code generated by github.com/reearth/reearth-backend/tools/cmd/embed, DO NOT EDIT. + +package builtin + +const pluginManifestJSON_ja string = `{"description":"ๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ","extensions":{"cesium":{"description":"ๅณใƒ‘ใƒใƒซใงใ‚ทใƒผใƒณๅ…จไฝ“ใฎ่จญๅฎšใ‚’ๅค‰ๆ›ดใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ‚ฟใ‚คใƒซใ€ๅคงๆฐ—ใ€ใƒฉใ‚คใƒ†ใ‚ฃใƒณใ‚ฐใชใฉใฎ่จญๅฎšใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚","propertySchema":{"atmosphere":{"description":"ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎ่จญๅฎšใŒใงใใพใ™ใ€‚","fields":{"brightness_shift":{"title":"ๆ˜Žๅบฆ"},"enable_lighting":{"description":"ๅคช้™ฝๅ…‰ใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใ‚‹ใ“ใจใงใ€ๆ˜ผๅคœใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","title":"ๅคช้™ฝๅ…‰"},"enable_sun":{"description":"ๅฎ‡ๅฎ™็ฉบ้–“ใซๅญ˜ๅœจใ™ใ‚‹ๅคช้™ฝใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ๅคช้™ฝ"},"fog":{"description":"้œงใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"้œง"},"fog_density":{"description":"้œงใฎๆฟƒๅบฆใ‚’0ไปฅไธŠใ‹ใ‚‰่จญๅฎšใ—ใพใ™ใ€‚","title":"ๆฟƒๅบฆ"},"ground_atmosphere":{"description":"ๅœฐ่กจใฎๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ๅœฐ่กจใฎๅคงๆฐ—"},"hue_shift":{"title":"่‰ฒ็›ธ"},"sky_atmosphere":{"description":"ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ไธŠ็ฉบใฎๅคงๆฐ—"},"surturation_shift":{"title":"ๅฝฉๅบฆ"}},"title":"ๅคงๆฐ—"},"default":{"fields":{"bgcolor":{"description":"ๅฎ‡ๅฎ™็ฉบ้–“ใŒ้ž่กจ็คบใฎๅ ดๅˆใฎใ€่ƒŒๆ™ฏ่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚","title":"่ƒŒๆ™ฏ่‰ฒ"},"camera":{"description":"ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒๆœ€ๅˆใซ่กจ็คบใ™ใ‚‹ใ‚ซใƒกใƒฉใฎไฝ็ฝฎใ‚’่จญๅฎšใ—ใพใ™ใ€‚","title":"ใ‚ซใƒกใƒฉๅˆๆœŸไฝ็ฝฎ"},"ion":{"description":"่‡ช่บซใฎCesium Ionใ‚ขใ‚ซใ‚ฆใƒณใƒˆใ‹ใ‚‰APIใ‚ญใƒผใ‚’็™บ่กŒใ—ใ€ใ“ใ“ใซ่จญๅฎšใ—ใพใ™ใ€‚Cesium Ionใฎใ‚ขใ‚ปใƒƒใƒˆ๏ผˆใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ€3Dใƒ‡ใƒผใ‚ฟใชใฉ๏ผ‰ใฎไฝฟ็”จใŒๅฏ่ƒฝใซใชใ‚‹ใŸใ‚ใ€่จญๅฎšใ‚’ๆŽจๅฅจใ—ใพใ™ใ€‚","title":"Cesium Icon APIใ‚ขใ‚ฏใ‚ปใ‚นใƒˆใƒผใ‚ฏใƒณ"},"skybox":{"description":"ๅฎ‡ๅฎ™็ฉบ้–“ใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ๅฎ‡ๅฎ™ใฎ่กจ็คบ"},"terrain":{"description":"ๆœ‰ๅŠนใซใ™ใ‚‹ใจใ€ๆจ™้ซ˜ใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใฟใ“ใพใ‚Œใ€็ซ‹ไฝ“็š„ใชๅœฐๅฝขใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","title":"ๅœฐๅฝข"}},"title":"ใ‚ทใƒผใƒณ"},"tiles":{"description":"ๆ‰‹ๆŒใกใฎใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ‚’ไฝฟ็”จใ—ใ€ๅœฐ็ƒไธŠใซ่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","fields":{"tile_maxLevel":{"title":"ๆœ€ๅคงใƒฌใƒ™ใƒซ"},"tile_minLevel":{"title":"ๆœ€ๅฐใƒฌใƒ™ใƒซ"},"tile_title":{"title":"ๅๅ‰"},"tile_type":{"choices":{"black_marble":"Black Marble","default":"ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ","default_label":"ใƒฉใƒ™ใƒซไป˜ใๅœฐๅ›ณ","default_road":"้“่ทฏๅœฐๅ›ณ","esri_world_topo":"ESRI Topography","japan_gsi_standard":"ๅœฐ็†้™ขๅœฐๅ›ณ ๆจ™ๆบ–ๅœฐๅ›ณ","open_street_map":"Open Street Map","stamen_toner":"Stamen Toner","stamen_watercolor":"Stamen Watercolor","url":"URL"},"title":"็จฎ้กž"},"tile_url":{"title":"URL"}},"title":"ใ‚ฟใ‚คใƒซ"}},"title":"Cesium"},"dlblock":{"description":"่กจใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"typography":{"title":"ใƒ•ใ‚ฉใƒณใƒˆ"}},"title":"่กจใƒ–ใƒญใƒƒใ‚ฏ"},"items":{"fields":{"item_datanum":{"title":"ใƒ‡ใƒผใ‚ฟ(ๆ•ฐๅญ—)"},"item_datastr":{"title":"ใƒ‡ใƒผใ‚ฟ(ๆ–‡ๅญ—)"},"item_datatype":{"choices":{"number":"ๆ•ฐๅญ—","string":"ๆ–‡ๅญ—"},"title":"็จฎ้กž"},"item_title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"}},"title":"ใ‚ขใ‚คใƒ†ใƒ "}},"title":"่กจ"},"ellipsoid":{"description":"ๆฅ•ๅ††ๅฝขใƒ„ใƒผใƒซใ‚’ๅœฐๅ›ณไธŠใซใƒ‰ใƒฉใƒƒใ‚ฐ\u0026ใƒ‰ใƒญใƒƒใƒ—ใ™ใ‚‹ใ“ใจใง่ฟฝๅŠ ใงใใพใ™ใ€‚ๆฅ•ๅ††ๅฝขใƒ„ใƒผใƒซใซใ‚ˆใฃใฆ็ซ‹ไฝ“็š„ใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ๅœฐๅ›ณไธŠใซ่กจ็คบใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"fillColor":{"title":"ๅก—ใ‚Š่‰ฒ"},"height":{"title":"้ซ˜ๅบฆ"},"position":{"title":"ไฝ็ฝฎ"},"radius":{"title":"ๅŠๅพ„"}},"title":"็ƒไฝ“ใƒ„ใƒผใƒซ"}},"title":"็ƒไฝ“ใƒ„ใƒผใƒซ"},"imageblock":{"description":"็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"fullSize":{"title":"ใƒ•ใƒซใ‚ตใ‚คใ‚บ"},"image":{"title":"็”ปๅƒ"},"imagePositionX":{"choices":{"center":"ไธญๅคฎ","left":"ๅทฆ","right":"ๅณ"},"title":"ๆฐดๅนณไฝ็ฝฎ"},"imagePositionY":{"choices":{"bottom":"ไธ‹","center":"ไธญๅคฎ","top":"ไธŠ"},"title":"ๅž‚็›ดไฝ็ฝฎ"},"imageSize":{"choices":{"contain":"ๅซใ‚€","cover":"ใ‚ซใƒใƒผ"},"title":"็”ปๅƒใ‚ตใ‚คใ‚บ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"}},"title":"็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"็”ปๅƒ"},"infobox":{"description":"้–ฒ่ฆง่€…ใŒๅœฐๅ›ณไธŠใฎใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ—ใŸๆ™‚ใซ่กจ็คบใ•ใ‚Œใ‚‹ใƒœใƒƒใ‚ฏใ‚นใงใ™ใ€‚ใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ€ๅ‹•็”ปใชใฉใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"bgcolor":{"title":"่ƒŒๆ™ฏ่‰ฒ"},"size":{"choices":{"large":"ๅคง","small":"ๅฐ"},"title":"ใ‚ตใ‚คใ‚บ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"typography":{"title":"ใƒ•ใ‚ฉใƒณใƒˆ"}},"title":"ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น"}},"title":"ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น"},"locationblock":{"description":"ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"fullSize":{"title":"ใƒ•ใƒซใ‚ตใ‚คใ‚บ"},"location":{"title":"ไฝ็ฝฎๆƒ…ๅ ฑ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"}},"title":"ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"ไฝ็ฝฎๆƒ…ๅ ฑ"},"marker":{"description":"ใƒ‰ใƒฉใƒƒใ‚ฐ\u0026ใƒ‰ใƒญใƒƒใƒ—ใ™ใ‚‹ใ“ใจใงใ€ๅœฐๅ›ณไธŠใซใƒžใƒผใ‚ซใƒผใ‚’่ฟฝๅŠ ใ—ใพใ™ใ€‚ใƒžใƒผใ‚ซใƒผใซใฏใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ‚’็ดใฅใ‘ใ‚‹ใ“ใจใŒใงใใ€้–ฒ่ฆง่€…ใฏใƒžใƒผใ‚ซใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ™ใ‚‹ใ“ใจใงใใ‚Œใ‚‰ใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่ฆ‹ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"extrude":{"title":"ๅœฐ้ขใ‹ใ‚‰็ทšใ‚’ไผธใฐใ™"},"height":{"title":"้ซ˜ๅบฆ"},"image":{"title":"็”ปๅƒURL"},"imageCrop":{"choices":{"circle":"ๅ††ๅฝข","none":"ใชใ—"},"title":"ๅˆ‡ใ‚ŠๆŠœใ"},"imageShadow":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆ"},"imageShadowBlur":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆๅŠๅพ„"},"imageShadowColor":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆ่‰ฒ"},"imageShadowPositionX":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆX"},"imageShadowPositionY":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆY"},"imageSize":{"title":"็”ปๅƒใ‚ตใ‚คใ‚บ"},"label":{"title":"ใƒฉใƒ™ใƒซ"},"labelText":{"title":"ใƒฉใƒ™ใƒซๆ–‡ๅญ—"},"labelTypography":{"title":"ใƒฉใƒ™ใƒซใƒ•ใ‚ฉใƒณใƒˆ"},"location":{"title":"ไฝ็ฝฎ"},"pointColor":{"title":"ใƒใ‚คใƒณใƒˆ่‰ฒ"},"pointSize":{"title":"ใƒใ‚คใƒณใƒˆใ‚ตใ‚คใ‚บ"},"style":{"choices":{"image":"ใ‚ขใ‚คใ‚ณใƒณ","point":"ใƒใ‚คใƒณใƒˆ"},"title":"่กจ็คบๆ–นๆณ•"}},"title":"ใƒžใƒผใ‚ซใƒผ"}},"title":"ใƒžใƒผใ‚ซใƒผ"},"menu":{"description":"ใ‚ทใƒผใƒณใซใƒœใ‚ฟใƒณใ‚’่จญ็ฝฎใ—ใ€ใƒกใƒ‹ใƒฅใƒผใ‚’่กจ็คบใ—ใพใ™ใ€‚่ฟฝๅŠ ใ—ใŸใƒœใ‚ฟใƒณใซ่จญๅฎšใ•ใ‚ŒใŸใ‚ขใ‚ฏใ‚ทใƒงใƒณใ‚ฟใ‚คใƒ—ใซใ‚ˆใฃใฆๅ‹•ไฝœใŒๅค‰ใ‚ใ‚Šใพใ™ใ€‚\\nใƒปใƒชใƒณใ‚ฏ๏ผšใƒœใ‚ฟใƒณ่‡ชไฝ“ใŒๅค–้ƒจใ‚ตใ‚คใƒˆใธใฎใƒชใƒณใ‚ฏใซใชใ‚Šใพใ™ใ€‚\\nใƒปใƒกใƒ‹ใƒฅใƒผ๏ผš่ฟฝๅŠ ใ—ใŸใƒกใƒ‹ใƒฅใƒผใ‚’้–‹ใใพใ™\\nใƒปใ‚ซใƒกใƒฉใ‚ขใ‚ฏใ‚ทใƒงใƒณ๏ผšใ‚ฏใƒชใƒƒใ‚ฏๆ™‚ใซใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ—ใพใ™ใ€‚","propertySchema":{"buttons":{"fields":{"buttonBgcolor":{"title":"่ƒŒๆ™ฏ่‰ฒ"},"buttonCamera":{"title":"ใ‚ซใƒกใƒฉ"},"buttonColor":{"title":"ใƒ†ใ‚ญใ‚นใƒˆ่‰ฒ"},"buttonIcon":{"title":"ใ‚ขใ‚คใ‚ณใƒณ"},"buttonInvisible":{"title":"้ž่กจ็คบ"},"buttonLink":{"title":"ใƒชใƒณใ‚ฏ"},"buttonPosition":{"choices":{"bottomleft":"ไธ‹ๅทฆ","bottomright":"ไธ‹ๅณ","topleft":"ไธŠๅทฆ","topright":"ไธŠๅณ"},"title":"่กจ็คบไฝ็ฝฎ"},"buttonStyle":{"choices":{"icon":"ใ‚ขใ‚คใ‚ณใƒณใฎใฟ","text":"ใƒ†ใ‚ญใ‚นใƒˆใฎใฟ","texticon":"ใƒ†ใ‚ญใ‚นใƒˆ๏ผ‹ใ‚ขใ‚คใ‚ณใƒณ"},"title":"่กจ็คบๆ–นๆณ•"},"buttonTitle":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"buttonType":{"choices":{"camera":"ใ‚ซใƒกใƒฉ็งปๅ‹•","link":"ใƒชใƒณใ‚ฏ","menu":"ใƒกใƒ‹ใƒฅใƒผ้–‹้–‰"},"title":"ใ‚ขใ‚ฏใ‚ทใƒงใƒณ"}},"title":"ใƒœใ‚ฟใƒณ"},"menu":{"fields":{"menuCamera":{"title":"ใ‚ซใƒกใƒฉ"},"menuIcon":{"title":"ใ‚ขใ‚คใ‚ณใƒณ"},"menuLink":{"title":"ใƒชใƒณใ‚ฏ"},"menuTitle":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"menuType":{"choices":{"border":"ๅŒบๅˆ‡ใ‚Š็ทš","camera":"ใ‚ซใƒกใƒฉ็งปๅ‹•","link":"ใƒชใƒณใ‚ฏ"},"title":"ใ‚ขใ‚ฏใ‚ทใƒงใƒณ"}},"title":"ใƒกใƒ‹ใƒฅใƒผ"}},"title":"ใƒกใƒ‹ใƒฅใƒผ"},"photooverlay":{"description":"ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ใ•ใ‚ŒใŸใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚คใ‚’้ธๆŠžใ™ใ‚‹ใจใ€่จญๅฎšใ—ใŸ็”ปๅƒใ‚’ใƒขใƒผใƒ€ใƒซๅฝขๅผใง่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"camera":{"description":"ใ‚ฏใƒชใƒƒใ‚ฏใ•ใ‚ŒใŸใจใใซ็งปๅ‹•ใ™ใ‚‹ใ‚ซใƒกใƒฉใฎ่จญๅฎšใ‚’ใ—ใพใ™ใ€‚","title":"ใ‚ซใƒกใƒฉ"},"height":{"title":"้ซ˜ๅบฆ"},"image":{"title":"ใ‚ขใ‚คใ‚ณใƒณ"},"imageCrop":{"choices":{"circle":"ๅ††ๅฝข","none":"ใชใ—"},"title":"ๅˆ‡ใ‚ŠๆŠœใ"},"imageShadow":{"title":"ใ‚ขใ‚คใ‚ณใƒณใ‚ทใƒฃใƒ‰ใ‚ฆ"},"imageShadowBlur":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆๅŠๅพ„"},"imageShadowColor":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆ่‰ฒ"},"imageShadowPositionX":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆX"},"imageShadowPositionY":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆY"},"imageSize":{"title":"ใ‚ขใ‚คใ‚ณใƒณใ‚ตใ‚คใ‚บ"},"location":{"title":"ไฝ็ฝฎ"},"photoOverlayImage":{"title":"ใ‚ชใƒผใƒใƒฌใ‚ค็”ปๅƒ"}},"title":"ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค"}},"title":"ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค"},"polygon":{"description":"Polygon primitive","propertySchema":{"default":{"fields":{"fill":{"title":"ๅก—ใ‚Š"},"fillColor":{"title":"ๅก—ใ‚Š่‰ฒ"},"polygon":{"title":"ใƒใƒชใ‚ดใƒณ"},"stroke":{"title":"็ทš"},"strokeColor":{"title":"็ทš่‰ฒ"},"strokeWidth":{"title":"็ทšๅน…"}},"title":"ใƒใƒชใ‚ดใƒณ"}},"title":"ใƒใƒชใ‚ดใƒณ"},"polyline":{"description":"Polyline primitive","propertySchema":{"default":{"fields":{"coordinates":{"title":"้ ‚็‚น"},"strokeColor":{"title":"็ทš่‰ฒ"},"strokeWidth":{"title":"็ทšๅน…"}},"title":"็›ด็ทš"}},"title":"็›ด็ทš"},"rect":{"description":"Rectangle primitive","propertySchema":{"default":{"fields":{"extrudedHeight":{"title":"้ซ˜ใ•"},"fillColor":{"title":"ๅก—ใ‚Š่‰ฒ"},"height":{"title":"้ซ˜ๅบฆ"},"image":{"title":"็”ปๅƒURL"},"rect":{"title":"้•ทๆ–นๅฝข"},"style":{"choices":{"color":"่‰ฒ","image":"็”ปๅƒ"},"title":"ใ‚นใ‚ฟใ‚คใƒซ"}},"title":"้•ทๆ–นๅฝข"}},"title":"้•ทๆ–นๅฝข"},"resource":{"description":"ๅค–้ƒจใ‹ใ‚‰ใƒ‡ใƒผใ‚ฟ๏ผˆๅฝขๅผไฝ•๏ผŸ๏ผŸ๏ผŸ๏ผ‰ใ‚’ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ๅพŒใ€URLใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใงๅค–้ƒจใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใพใ‚Œใพใ™ใ€‚","propertySchema":{"default":{"fields":{"url":{"choices":{"auto":"่‡ชๅ‹•","czml":"CZML","geojson":"GeoJSON / TopoJSON","kml":"KML"},"title":"ใƒ•ใ‚กใ‚คใƒซ URL"}},"title":"ใƒ•ใ‚กใ‚คใƒซ"}},"title":"ใƒ•ใ‚กใ‚คใƒซ"},"splashscreen":{"description":"ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒใ€ๆœ€ๅˆใซ่กจ็คบใ•ใ‚Œใ‚‹ๆผ”ๅ‡บใ‚’่จญๅฎšใงใใพใ™ใ€‚ไพ‹ใˆใฐใ€ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ฟใ‚คใƒˆใƒซใ‚’้–ฒ่ฆง่€…ใซ่ฆ‹ใ›ใŸใ‚Šใ€ใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ•ใ›ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"camera":{"fields":{"cameraDelay":{"title":"ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“"},"cameraDuration":{"title":"ใ‚ซใƒกใƒฉ้–‹ๅง‹ๆ™‚้–“"},"cameraPosition":{"title":"ใ‚ซใƒกใƒฉไฝ็ฝฎ"}},"title":"ใ‚ซใƒกใƒฉใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณ"},"overlay":{"fields":{"overlayBgcolor":{"title":"่ƒŒๆ™ฏ่‰ฒ"},"overlayDelay":{"title":"้–‹ๅง‹ๆ™‚้–“"},"overlayDuration":{"title":"่กจ็คบๆ™‚้–“"},"overlayEnabled":{"title":"ๆœ‰ๅŠน"},"overlayImage":{"title":"ใ‚ชใƒผใƒใƒผใƒฌใ‚ค็”ปๅƒ"},"overlayImageH":{"title":"็”ปๅƒ้ซ˜ใ•"},"overlayImageW":{"title":"็”ปๅƒๅน…"},"overlayTransitionDuration":{"title":"ใƒ•ใ‚งใƒผใƒ‰ๆ™‚้–“"}},"title":"ใ‚ชใƒผใƒใƒผใƒฌใ‚ค"}},"title":"ใ‚นใƒ—ใƒฉใƒƒใ‚ทใƒฅใ‚นใ‚ฏใƒชใƒผใƒณ"},"storytelling":{"description":"ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐๆฉŸ่ƒฝใ‚’ไฝฟใˆใฐใ€ใƒ‡ใƒผใ‚ฟ้–“ใฎ็น‹ใŒใ‚Šใ‚„ๆ™‚็ณปๅˆ—ใ‚’ใ‚‚ใจใซใ€้ †็•ชใซ่ณ‡ๆ–™ใ‚’้–ฒ่ฆงใ—ใฆใ‚‚ใ‚‰ใ†ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚ไฝฟ็”จใ™ใ‚‹ใซใฏใ€ๅณใƒ‘ใƒใƒซใ‹ใ‚‰ๅœฐ็ƒไธŠใฎใƒฌใ‚คใƒคใƒผใซ้ †็•ชใ‚’ไป˜ไธŽใ—ใพใ™ใ€‚","propertySchema":{"default":{"fields":{"autoStart":{"title":"่‡ชๅ‹•ๅ†็”Ÿ"},"camera":{"title":"ใ‚ซใƒกใƒฉ"},"duration":{"title":"ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“"},"range":{"title":"็”ป่ง’"}},"title":"ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"},"stories":{"fields":{"layer":{"title":"ใƒฌใ‚คใƒคใƒผ"},"layerCamera":{"title":"ใ‚ซใƒกใƒฉ"},"layerDuration":{"title":"็งปๅ‹•ๆ™‚้–“"},"layerRange":{"title":"ใ‚ซใƒกใƒฉ็”ป่ง’"}},"title":"ใ‚นใƒˆใƒผใƒชใƒผ"}},"title":"ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐ"},"textblock":{"description":"Text block","propertySchema":{"default":{"fields":{"markdown":{"title":"ใƒžใƒผใ‚ฏใƒ€ใ‚ฆใƒณ"},"text":{"title":"ใ‚ณใƒณใƒ†ใƒณใƒ„"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"typography":{"title":"ใƒ•ใ‚ฉใƒณใƒˆ"}},"title":"ใƒ†ใ‚ญใ‚นใƒˆใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"ใƒ†ใ‚ญใ‚นใƒˆ"},"videoblock":{"description":"ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"fullSize":{"title":"ใƒ•ใƒซใ‚ตใ‚คใ‚บ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"url":{"title":"ๅ‹•็”ป URL"}},"title":"ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"ๅ‹•็”ป"}},"title":"Re:Earthๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ"}` diff --git a/pkg/builtin/migration.go b/pkg/builtin/migration.go new file mode 100644 index 000000000..ff11e348a --- /dev/null +++ b/pkg/builtin/migration.go @@ -0,0 +1,3 @@ +package builtin + +// TODO: migration code diff --git a/pkg/config/config.go b/pkg/config/config.go new file mode 100644 index 000000000..fd069478a --- /dev/null +++ b/pkg/config/config.go @@ -0,0 +1,23 @@ +package config + +import "sort" + +type Config struct { + Migration int64 +} + +func (c *Config) NextMigrations(migrations []int64) []int64 { + migrations2 := append([]int64{}, migrations...) + sort.SliceStable(migrations2, func(i, j int) bool { return migrations2[i] < migrations2[j] }) + + for i, m := range migrations2 { + if len(migrations2) <= i { + return nil + } + if c.Migration < m { + return migrations2[i:] + } + } + + return nil +} diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go new file mode 100644 index 000000000..5492aaebe --- /dev/null +++ b/pkg/config/config_test.go @@ -0,0 +1,14 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestConfigNextMigrations(t *testing.T) { + c := &Config{ + Migration: 100, + } + assert.Equal(t, []int64{200, 500}, c.NextMigrations([]int64{1, 100, 500, 200, 2})) +} diff --git a/pkg/czml/czml.go b/pkg/czml/czml.go new file mode 100644 index 000000000..bc6d24285 --- /dev/null +++ b/pkg/czml/czml.go @@ -0,0 +1,45 @@ +package czml + +type Feature struct { + Id string `json:"id"` + Name string `json:"name"` + Polygon *Polygon `json:"polygon,omitempty"` + Polyline *Polyline `json:"polyline,omitempty"` + Position *Position `json:"position,omitempty"` + Point *Point `json:"point,omitempty"` +} +type Polyline struct { + Positions Position `json:"positions"` + Material *Material `json:"material,omitempty"` + Width float64 `json:"width,omitempty"` +} +type Polygon struct { + Positions Position `json:"positions"` + Fill bool `json:"fill,omitempty"` + Material *Material `json:"material,omitempty"` + Stroke bool `json:"outline,omitempty"` + StrokeColor *Color `json:"outlineColor,omitempty"` + StrokeWidth float64 `json:"outlineWidth,omitempty"` +} +type Point struct { + Color string `json:"color,omitempty"` + PixelSize float64 `json:"pixelSize,omitempty"` +} +type Position struct { + CartographicDegrees []float64 `json:"cartographicDegrees"` +} +type Material struct { + SolidColor *SolidColor `json:"solidColor,omitempty"` + PolylineOutline *PolylineOutline `json:"polylineOutline,omitempty"` +} +type PolylineOutline struct { + Color *Color `json:"color"` +} +type SolidColor struct { + Color *Color `json:"color"` +} +type Color struct { + RGBA []int64 `json:"rgba,omitempty"` + RGBAF []float64 `json:"rgbaf,omitempty"` + Reference string `json:"reference,omitempty"` +} diff --git a/pkg/dataset/builder.go b/pkg/dataset/builder.go new file mode 100644 index 000000000..576f85638 --- /dev/null +++ b/pkg/dataset/builder.go @@ -0,0 +1,90 @@ +package dataset + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +// Builder _ +type Builder struct { + d *Dataset +} + +// New _ +func New() *Builder { + return &Builder{d: &Dataset{}} +} + +// Build _ +func (b *Builder) Build() (*Dataset, error) { + if id.ID(b.d.id).IsNil() { + return nil, id.ErrInvalidID + } + if b.d.fields == nil || b.d.order == nil { + b.d.fields = map[id.DatasetSchemaFieldID]*Field{} + b.d.order = []id.DatasetSchemaFieldID{} + } + return b.d, nil +} + +// MustBuild _ +func (b *Builder) MustBuild() *Dataset { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +// ID _ +func (b *Builder) ID(id id.DatasetID) *Builder { + b.d.id = id + return b +} + +// NewID _ +func (b *Builder) NewID() *Builder { + b.d.id = id.DatasetID(id.New()) + return b +} + +// Scene _ +func (b *Builder) Scene(scene id.SceneID) *Builder { + b.d.scene = scene + return b +} + +// Source _ +func (b *Builder) Source(source Source) *Builder { + b.d.source = source + return b +} + +// Schema _ +func (b *Builder) Schema(schema id.DatasetSchemaID) *Builder { + b.d.schema = schema + return b +} + +// Fields _ +func (b *Builder) Fields(fields []*Field) *Builder { + b.d.fields = map[id.DatasetSchemaFieldID]*Field{} + b.d.order = make([]id.DatasetSchemaFieldID, 0, len(fields)) + sources := map[Source]struct{}{} + for _, f := range b.d.fields { + if source := f.Source(); source != "" { + sources[source] = struct{}{} + } + } + for _, f := range fields { + source := f.Source() + if source == "" { + b.d.fields[f.Field()] = f + b.d.order = append(b.d.order, f.Field()) + } else if _, ok := sources[source]; !ok { + b.d.fields[f.Field()] = f + b.d.order = append(b.d.order, f.Field()) + sources[source] = struct{}{} + } + } + return b +} diff --git a/pkg/dataset/csvparser.go b/pkg/dataset/csvparser.go new file mode 100644 index 000000000..2edb43381 --- /dev/null +++ b/pkg/dataset/csvparser.go @@ -0,0 +1,233 @@ +package dataset + +import ( + "encoding/csv" + "errors" + "io" + "strconv" + + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + // ErrFailedToParseCSVorTSVFile _ + ErrFailedToParseCSVorTSVFile error = errors.New("failed to parse file content") + // ErrIncompatibleSchema _ + ErrIncompatibleSchema error = errors.New("schema is not compatible with csv") + // ErrDuplicatiedNameFields _ + ErrDuplicatiedNameFields error = errors.New("failed to parse, name-duplicated fields") +) + +type DatasetCSVParser struct { + reader *csv.Reader + firstline []string + headers []string + schema *Schema + name string +} + +func NewCSVParser(r io.Reader, n string, seperator rune) *DatasetCSVParser { + r2 := csv.NewReader(r) + r2.Comma = seperator + obj := &DatasetCSVParser{ + reader: r2, + name: n, + } + return obj +} + +func (p *DatasetCSVParser) Init() error { + headers, err := p.reader.Read() + if err != nil { + return ErrFailedToParseCSVorTSVFile + } + p.headers = headers + p.firstline, err = p.reader.Read() + if err != nil { + return ErrFailedToParseCSVorTSVFile + } + return nil +} +func (p *DatasetCSVParser) validateLine(line []string) bool { + return len(p.headers) == len(line) +} + +func (p *DatasetCSVParser) getRecord(rec string) *Value { + var v *Value + vint, err := strconv.Atoi(rec) + if err == nil { + v = ValueFrom(vint) + return v + } + + vfloat64, err := strconv.ParseFloat(rec, 64) + if err == nil { + v = ValueFrom(vfloat64) + return v + } + vbool, err := strconv.ParseBool(rec) + if err == nil { + v = ValueFrom(vbool) + return v + } + v = ValueFrom(rec) + return v +} + +func (p *DatasetCSVParser) GuessSchema(sid id.SceneID) error { + if !p.validateLine(p.firstline) { + return ErrFailedToParseCSVorTSVFile + } + schemafields := []*SchemaField{} + haslat, haslng := false, false + for k, h := range p.headers { + if h == "lat" { + haslat = true + } + if h == "lng" { + haslng = true + } + if h != "lng" && h != "lat" { + t := p.getRecord(p.firstline[k]).Type() + field, _ := NewSchemaField().NewID().Name(h).Type(t).Build() + schemafields = append(schemafields, field) + } + } + if haslat && haslng { + field, _ := NewSchemaField().NewID().Name("location").Type(ValueTypeLatLng).Build() + schemafields = append(schemafields, field) + } + schema, err := NewSchema(). + NewID(). + Scene(sid). + Name(p.name). + Source(Source("file:///" + p.name)). + Fields(schemafields). + Build() + if err != nil { + return err + } + p.schema = schema + return nil +} + +func (p *DatasetCSVParser) ReadAll() (*Schema, []*Dataset, error) { + if p.schema == nil { + return nil, nil, errors.New("schema is not generated yet") + } + var fields []*Field + schemafieldmap := make(map[string]id.DatasetSchemaFieldID) + for _, f := range p.schema.Fields() { + if _, ok := schemafieldmap[f.Name()]; !ok { + schemafieldmap[f.Name()] = f.ID() + } else { + return nil, nil, ErrDuplicatiedNameFields + } + } + datasets := []*Dataset{} + i := 0 + for { + var line []string + var err error + if i == 0 { + // process first line + line = p.firstline + } else { + line, err = p.reader.Read() + if err == io.EOF { + break + } + if err != nil { + return nil, nil, err + } + } + if !p.validateLine(line) { + return nil, nil, ErrFailedToParseCSVorTSVFile + } + + fields, err = p.getFields(line, schemafieldmap) + if err != nil { + return nil, nil, err + } + ds, err := New().NewID(). + Fields(fields). + Scene(p.schema.Scene()).Schema(p.schema.ID()).Build() + if err != nil { + return nil, nil, err + } + datasets = append(datasets, ds) + i++ + } + + return p.schema, datasets, nil +} + +func (p *DatasetCSVParser) getFields(line []string, sfm map[string]id.DatasetSchemaFieldID) ([]*Field, error) { + fields := []*Field{} + var lat, lng *float64 + for i, record := range line { + value := p.getRecord(record).Value() + if p.headers[i] == "lng" { + value, err := strconv.ParseFloat(record, 64) + if err != nil { + return nil, ErrFailedToParseCSVorTSVFile + } + lng = &value + } + if p.headers[i] == "lat" { + value, err := strconv.ParseFloat(record, 64) + if err != nil { + return nil, ErrFailedToParseCSVorTSVFile + } + lat = &value + } + + if p.headers[i] != "lat" && p.headers[i] != "lng" { + fields = append(fields, NewField(sfm[p.headers[i]], ValueFrom(value), "")) + } + } + if lat != nil && lng != nil { + latlng := LatLng{Lat: *lat, Lng: *lng} + fields = append(fields, NewField(sfm["location"], ValueFrom(latlng), "")) + } + return append([]*Field{}, fields...), nil +} + +func (p *DatasetCSVParser) CheckCompatible(s *Schema) error { + fieldsmap := make(map[string]*SchemaField) + for _, f := range s.Fields() { + fieldsmap[f.Name()] = f + } + haslat, haslng := false, false + for i, h := range p.headers { + if h != "lat" && h != "lng" { + if fieldsmap[h] == nil { + return ErrIncompatibleSchema + } + t := fieldsmap[h].Type() + v := p.getRecord(p.firstline[i]) + if !t.ValidateValue(v) { + return ErrIncompatibleSchema + } + } + if h == "lat" { + haslat = true + } + if h == "lng" { + haslng = true + } + } + // check for location fields + if haslat && haslng { + if fieldsmap["location"] == nil { + return ErrIncompatibleSchema + } + } else { + if fieldsmap["location"] != nil { + return ErrIncompatibleSchema + } + } + + p.schema = s + return nil +} diff --git a/pkg/dataset/csvparser_test.go b/pkg/dataset/csvparser_test.go new file mode 100644 index 000000000..d6a37b823 --- /dev/null +++ b/pkg/dataset/csvparser_test.go @@ -0,0 +1,66 @@ +package dataset + +import ( + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +const ( + csvmock = `hoge,foo,bar,lat,lng +1,foo,bar,12,15` +) + +func TestCSVParser(t *testing.T) { + r := strings.NewReader(csvmock) + p := NewCSVParser(r, "hoge.csv", ',') + err := p.Init() + assert.NoError(t, err) + sceneID := id.NewSceneID() + err = p.GuessSchema(sceneID) + assert.NoError(t, err) + + schema, datasets, err := p.ReadAll() + assert.NoError(t, err) + + assert.NotEmpty(t, schema) + assert.Equal(t, "hoge.csv", schema.Name()) + assert.Equal(t, Source("file:///hoge.csv"), schema.Source()) + + assert.Equal(t, 1, len(datasets)) + sfm := make(map[string]string) + for _, sf := range schema.Fields() { + sfm[sf.ID().String()] = sf.Name() + } + dsfm := make(map[string]interface{}) + for _, dsf := range datasets[0].Fields() { + dsfm[sfm[dsf.Field().String()]] = dsf.Value().Interface() + } + latlng := map[string]interface{}{"lat": 12.0, "lng": 15.0} + assert.Equal(t, map[string]interface{}{ + "hoge": 1.0, + "foo": "foo", + "bar": "bar", + "location": latlng, + }, dsfm) + +} + +func TestCSVParserCheckCompatible(t *testing.T) { + r := strings.NewReader(csvmock) + p := NewCSVParser(r, "hoge", ',') + err := p.Init() + assert.NoError(t, err) + f1 := NewSchemaField().NewID().Name("hoge").Type(ValueTypeNumber).MustBuild() + f2 := NewSchemaField().NewID().Name("foo").Type(ValueTypeString).MustBuild() + f3 := NewSchemaField().NewID().Name("bar").Type(ValueTypeString).MustBuild() + f4 := NewSchemaField().NewID().Name("location").Type(ValueTypeLatLng).MustBuild() + fields := []*SchemaField{f1, f2, f3, f4} + ds, err := NewSchema().NewID().Fields(fields).Build() + assert.NoError(t, err) + result := p.CheckCompatible(ds) + assert.NoError(t, result) + +} diff --git a/pkg/dataset/dataset.go b/pkg/dataset/dataset.go new file mode 100644 index 000000000..abb09d844 --- /dev/null +++ b/pkg/dataset/dataset.go @@ -0,0 +1,114 @@ +package dataset + +import "github.com/reearth/reearth-backend/pkg/id" + +// Dataset _ +type Dataset struct { + id id.DatasetID + source Source + schema id.DatasetSchemaID + fields map[id.DatasetSchemaFieldID]*Field + order []id.DatasetSchemaFieldID + scene id.SceneID +} + +// ID _ +func (d *Dataset) ID() (i id.DatasetID) { + if d == nil { + return + } + return d.id +} + +// Scene _ +func (d *Dataset) Scene() (i id.SceneID) { + if d == nil { + return + } + return d.scene +} + +// Source _ +func (d *Dataset) Source() (i Source) { + if d == nil { + return + } + return d.source +} + +// Schema _ +func (d *Dataset) Schema() (i id.DatasetSchemaID) { + if d == nil { + return + } + return d.schema +} + +// Fields _ +func (d *Dataset) Fields() []*Field { + if d == nil || d.order == nil { + return nil + } + fields := make([]*Field, 0, len(d.fields)) + for _, id := range d.order { + fields = append(fields, d.fields[id]) + } + return fields +} + +// Field _ +func (d *Dataset) Field(id id.DatasetSchemaFieldID) *Field { + if d == nil || d.fields == nil { + return nil + } + return d.fields[id] +} + +// FieldRef _ +func (d *Dataset) FieldRef(id *id.DatasetSchemaFieldID) *Field { + if d == nil || id == nil { + return nil + } + return d.fields[*id] +} + +// NameField _ +func (d *Dataset) NameField(ds *Schema) *Field { + if d == nil { + return nil + } + if d.Schema() != ds.ID() { + return nil + } + f := ds.RepresentativeField() + if f == nil { + return nil + } + return d.fields[f.ID()] +} + +// FieldBySource _ +func (d *Dataset) FieldBySource(source Source) *Field { + if d == nil { + return nil + } + for _, f := range d.fields { + if f.source == source { + return f + } + } + return nil +} + +// FieldByType _ +func (d *Dataset) FieldByType(t ValueType) *Field { + if d == nil { + return nil + } + for _, f := range d.fields { + if f.Type() == t { + return f + } + } + return nil +} diff --git a/pkg/dataset/diff.go b/pkg/dataset/diff.go new file mode 100644 index 000000000..bbd41a726 --- /dev/null +++ b/pkg/dataset/diff.go @@ -0,0 +1,10 @@ +package dataset + +import "github.com/reearth/reearth-backend/pkg/id" + +// Diff _ +type Diff struct { + Added List + Removed List + Others map[id.DatasetID]*Dataset +} diff --git a/pkg/dataset/field.go b/pkg/dataset/field.go new file mode 100644 index 000000000..e03407cbc --- /dev/null +++ b/pkg/dataset/field.go @@ -0,0 +1,61 @@ +package dataset + +import "github.com/reearth/reearth-backend/pkg/id" + +// Field _ +type Field struct { + field id.DatasetSchemaFieldID + dtype ValueType + value *Value + source Source +} + +// NewField _ +func NewField(field id.DatasetSchemaFieldID, value *Value, source Source) *Field { + return &Field{ + dtype: value.Type(), + field: field, + value: value, + source: source, + } +} + +// Field _ +func (d *Field) Field() (i id.DatasetSchemaFieldID) { + if d == nil { + return + } + return d.field +} + +// FieldRef _ +func (d *Field) FieldRef() *id.DatasetSchemaFieldID { + if d == nil { + return nil + } + return d.field.Ref() +} + +// Type _ +func (d *Field) Type() (v ValueType) { + if d == nil { + return + } + return d.dtype +} + +// Value _ +func (d *Field) Value() *Value { + if d == nil { + return nil + } + return d.value +} + +// Source _ +func (d *Field) Source() (s Source) { + if d == nil { + return + } + return d.source +} diff --git a/pkg/dataset/graph_iterator.go b/pkg/dataset/graph_iterator.go new file mode 100644 index 000000000..cb8e6b981 --- /dev/null +++ b/pkg/dataset/graph_iterator.go @@ -0,0 +1,72 @@ +package dataset + +import "github.com/reearth/reearth-backend/pkg/id" + +// GraphIterator ใฏใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ใ‚ฐใƒฉใƒ•ๆŽข็ดขใ™ใ‚‹ใŸใ‚ใฎใ‚คใƒ†ใƒฌใƒผใ‚ฟใงใ™ใ€‚ +type GraphIterator struct { + m Map + ids [][]id.DatasetID + currentIndex int + currentDepthIndex int + maxDepth int +} + +// GraphIteratorFrom _ +func GraphIteratorFrom(root id.DatasetID, depth int) *GraphIterator { + return &GraphIterator{ + ids: [][]id.DatasetID{{root}}, + maxDepth: depth, + } +} + +// Next _ +func (di *GraphIterator) Next(d *Dataset) (id.DatasetID, bool) { + if di == nil || di.maxDepth == 0 || di.ids == nil || len(di.ids) == 0 || d == nil { + return id.DatasetID{}, false + } + if di.currentDepthIndex >= len(di.ids) { + return id.DatasetID{}, true + } + + if di.m == nil { + di.m = Map{} + } + di.m[d.ID()] = d + + // add fields + if len(di.ids) <= di.currentDepthIndex+1 { + di.ids = append(di.ids, []id.DatasetID{}) + } + nextDepthIDs := di.ids[di.currentDepthIndex+1] + currentIDs := di.ids[di.currentDepthIndex] + for _, f := range d.Fields() { + if r := f.Value().ValueRef(); r != nil { + nextDepthIDs = append(nextDepthIDs, id.DatasetID(*r)) + } + } + di.ids[di.currentDepthIndex+1] = nextDepthIDs + + // next + if di.currentIndex == len(currentIDs)-1 { + di.currentIndex = 0 + // next depth + if di.maxDepth <= di.currentDepthIndex || len(nextDepthIDs) == 0 { + // done + di.currentDepthIndex++ + return id.DatasetID{}, true + } + di.currentDepthIndex++ + } else { + di.currentIndex++ + } + + return di.ids[di.currentDepthIndex][di.currentIndex], false +} + +// Result _ +func (di *GraphIterator) Result() Map { + if di == nil { + return nil + } + return di.m +} diff --git a/pkg/dataset/graph_iterator_test.go b/pkg/dataset/graph_iterator_test.go new file mode 100644 index 000000000..73435ecc3 --- /dev/null +++ b/pkg/dataset/graph_iterator_test.go @@ -0,0 +1,63 @@ +package dataset + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestDatasetGraphIterator(t *testing.T) { + sid := id.NewSceneID() + dsid := id.NewDatasetSchemaID() + + d0id := id.NewDatasetID() + d11id := id.NewDatasetID() + d12id := id.NewDatasetID() + d21id := id.NewDatasetID() + d31id := id.NewDatasetID() + d32id := id.NewDatasetID() + + d0, _ := New().ID(d0id).Schema(dsid).Scene(sid).Fields([]*Field{ + NewField(id.NewDatasetSchemaFieldID(), ValueTypeRef.ValueFrom(d11id.ID()), ""), + NewField(id.NewDatasetSchemaFieldID(), ValueTypeRef.ValueFrom(d12id.ID()), ""), + }).Build() + d11, _ := New().ID(d11id).Schema(dsid).Scene(sid).Fields([]*Field{ + NewField(id.NewDatasetSchemaFieldID(), ValueTypeRef.ValueFrom(d21id.ID()), ""), + }).Build() + d12, _ := New().ID(d12id).Schema(dsid).Scene(sid).Fields([]*Field{ + NewField(id.NewDatasetSchemaFieldID(), ValueTypeString.ValueFrom("hoge"), ""), + }).Build() + d21, _ := New().ID(d21id).Schema(dsid).Scene(sid).Fields([]*Field{ + NewField(id.NewDatasetSchemaFieldID(), ValueTypeRef.ValueFrom(d31id.ID()), ""), + NewField(id.NewDatasetSchemaFieldID(), ValueTypeRef.ValueFrom(d32id.ID()), ""), + }).Build() + d31, _ := New().ID(d31id).Schema(dsid).Scene(sid).Fields([]*Field{ + NewField(id.NewDatasetSchemaFieldID(), ValueTypeString.ValueFrom("foo"), ""), + }).Build() + d32, _ := New().ID(d32id).Schema(dsid).Scene(sid).Fields([]*Field{ + NewField(id.NewDatasetSchemaFieldID(), ValueTypeString.ValueFrom("bar"), ""), + }).Build() + + it := GraphIteratorFrom(d0id, 3) + testTestDatasetGraphIteratorNext( + t, it, []*Dataset{d0, d11, d12, d21, d31, d32}, + ) + it = GraphIteratorFrom(d0id, 2) + testTestDatasetGraphIteratorNext( + t, it, []*Dataset{d0, d11, d12, d21}, + ) +} + +func testTestDatasetGraphIteratorNext(t *testing.T, it *GraphIterator, ds List) { + for i, d := range ds { + next, done := it.Next(d) + if i == len(ds)-1 { + assert.Equal(t, true, done) + } else { + assert.Equal(t, ds[i+1].ID(), next, "next %d", i) + assert.Equal(t, false, done, "next done %d", i) + } + } + assert.Equal(t, ds.Map(), it.Result()) +} diff --git a/pkg/dataset/graph_loader.go b/pkg/dataset/graph_loader.go new file mode 100644 index 000000000..a613abf42 --- /dev/null +++ b/pkg/dataset/graph_loader.go @@ -0,0 +1,37 @@ +package dataset + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type GraphLoader func(context.Context, id.DatasetID, ...id.DatasetSchemaFieldID) (List, *Field, error) + +func GraphLoaderFromMap(m Map) GraphLoader { + return func(ctx context.Context, root id.DatasetID, fields ...id.DatasetSchemaFieldID) (List, *Field, error) { + list, field := m.GraphSearchByFields(root, fields...) + return list, field, nil + } +} + +func GraphLoaderFromMapAndGraph(m Map, g GraphLoader) GraphLoader { + return func(ctx context.Context, root id.DatasetID, fields ...id.DatasetSchemaFieldID) (List, *Field, error) { + if m != nil { + if len(fields) == 0 { + return List{m[root]}, nil, nil + } + if len(fields) == 1 { + ds := m[root] + return List{ds}, ds.Field(fields[0]), nil + } + list, field := m.GraphSearchByFields(root, fields...) + if list != nil && field != nil { + return list, field, nil + } + } + + // it needs looking up dataset graph + return g(ctx, root, fields...) + } +} diff --git a/pkg/dataset/list.go b/pkg/dataset/list.go new file mode 100644 index 000000000..850e70906 --- /dev/null +++ b/pkg/dataset/list.go @@ -0,0 +1,195 @@ +package dataset + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +// List _ +type List []*Dataset + +// First _ +func (l List) First() *Dataset { + if l == nil || len(l) == 0 { + return nil + } + return l[0] +} + +// Last _ +func (l List) Last() *Dataset { + if l == nil || len(l) == 0 { + return nil + } + return l[len(l)-1] +} + +// FindDataset _ +func (l List) FindDataset(id id.DatasetID) *Dataset { + for _, t := range l { + if t.ID() == id { + return t + } + } + return nil +} + +// ToDatasetIds _ +func (l List) ToDatasetIds() []id.DatasetID { + if l == nil { + return nil + } + + ids := []id.DatasetID{} + for _, t := range l { + ids = append(ids, t.ID()) + } + return ids +} + +// FindDatasetBySource _ +func (l List) FindDatasetBySource(s Source) *Dataset { + for _, t := range l { + if t.Source() == s { + return t + } + } + return nil +} + +// FilterByDatasetSchema _ +func (l List) FilterByDatasetSchema(s id.DatasetSchemaID) List { + n := List{} + for _, t := range l { + if t.Schema() == s { + n = append(n, t) + } + } + return n +} + +// DiffBySource _ +func (l List) DiffBySource(l2 List) Diff { + // l is old, l2 is new + added := []*Dataset{} + removed := []*Dataset{} + // others := map[DatasetSource]DatasetDiffTouple{} + others2 := map[id.DatasetID]*Dataset{} + + s1 := map[Source]*Dataset{} + for _, d1 := range l { + s1[d1.Source()] = d1 + } + + for _, d2 := range l2 { + if d1, ok := s1[d2.Source()]; ok { + // others + // others[d2.Source()] = DatasetDiffTouple{Old: d1, New: d2} + others2[d1.ID()] = d2 + } else { + // added + added = append(added, d2) + } + } + + for _, d1 := range l { + if _, ok := others2[d1.ID()]; !ok { + // removed + removed = append(removed, d1) + } + } + + return Diff{ + Added: added, + Removed: removed, + Others: others2, + // Others: others, + } +} + +// Map _ +func (l List) Map() Map { + if l == nil { + return nil + } + m := Map{} + for _, d := range l { + if d != nil { + m[d.ID()] = d + } + } + return m +} + +func (l List) Loader() Loader { + return LoaderFrom(l) +} + +func (l List) GraphLoader() GraphLoader { + return GraphLoaderFromMap(l.Map()) +} + +// Map _ +type Map map[id.DatasetID]*Dataset + +// Add _ +func (dm Map) Add(dss ...*Dataset) { + if dss == nil { + return + } + if dm == nil { + dm = map[id.DatasetID]*Dataset{} + } + for _, ds := range dss { + if ds == nil { + continue + } + dm[ds.ID()] = ds + } +} + +// Slice _ +func (dm Map) Slice() List { + if dm == nil { + return nil + } + res := make(List, 0, len(dm)) + for _, d := range dm { + res = append(res, d) + } + return res +} + +// GraphSearchByFields _ +func (dm Map) GraphSearchByFields(root id.DatasetID, fields ...id.DatasetSchemaFieldID) (List, *Field) { + res := make(List, 0, len(fields)) + currentD := dm[root] + if currentD == nil { + return res, nil + } + for i, f := range fields { + if currentD == nil { + return res, nil + } + res = append(res, currentD) + field := currentD.Field(f) + if field == nil { + return res, nil + } + if len(fields)-1 == i { + return res, field + } else if fid := field.Value().ValueRef(); fid != nil { + currentD = dm[id.DatasetID(*fid)] + } else { + return res, nil + } + } + return res, nil +} + +func (dm Map) Loader() Loader { + return LoaderFromMap(dm) +} + +func (dm Map) GraphLoader() GraphLoader { + return GraphLoaderFromMap(dm) +} diff --git a/pkg/dataset/list_test.go b/pkg/dataset/list_test.go new file mode 100644 index 000000000..9c5fd3723 --- /dev/null +++ b/pkg/dataset/list_test.go @@ -0,0 +1,77 @@ +package dataset + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestDatasetListDiff(t *testing.T) { + sid := id.SceneID(id.New()) + source1 := Source("hogehoge/1") + source2 := Source("hogehoge/2") + source3 := Source("hogehoge/3") + d1, _ := New().NewID().Scene(sid).Source(source1).Build() + d2, _ := New().NewID().Scene(sid).Source(source2).Build() + d3, _ := New().NewID().Scene(sid).Source(source2).Build() + d4, _ := New().NewID().Scene(sid).Source(source3).Build() + d5, _ := New().NewID().Scene(sid).Source(source2).Build() // duplicated source + + l1 := List{d1, d2} + l2 := List{d3, d4} + diff := l1.DiffBySource(l2) + expected := Diff{ + Added: []*Dataset{d4}, + Removed: []*Dataset{d1}, + Others: map[id.DatasetID]*Dataset{ + d2.ID(): d3, + }, + } + assert.Equal(t, expected, diff) + + l1 = List{d1, d2, d5} + l2 = List{d3, d4} + diff = l1.DiffBySource(l2) + expected = Diff{ + Added: []*Dataset{d4}, + Removed: []*Dataset{d1, d2}, + Others: map[id.DatasetID]*Dataset{ + d5.ID(): d3, + }, + } + assert.Equal(t, expected, diff) +} + +func TestDatasetMapGraphSearchByFields(t *testing.T) { + did1 := id.NewDatasetID() + did2 := id.NewDatasetID() + did3 := id.NewDatasetID() + fid1 := id.NewDatasetSchemaFieldID() + fid2 := id.NewDatasetSchemaFieldID() + fid3 := id.NewDatasetSchemaFieldID() + sid := id.NewSceneID() + v1 := ValueTypeRef.ValueFrom(did2.ID()) + v2 := ValueTypeRef.ValueFrom(did3.ID()) + v3 := ValueTypeString.ValueFrom("value") + f3 := NewField(fid3, v3, "") + d1, _ := New().ID(did1).Scene(sid).Fields([]*Field{ + NewField(fid1, v1, ""), + }).Build() + d2, _ := New().ID(did2).Scene(sid).Fields([]*Field{ + NewField(fid2, v2, ""), + }).Build() + d3, _ := New().ID(did3).Scene(sid).Fields([]*Field{ + f3, + }).Build() + + m := List{d1, d2, d3}.Map() + + res, resf := m.GraphSearchByFields(did1, fid1, fid2, fid3) + assert.Equal(t, List{d1, d2, d3}, res) + assert.Equal(t, f3, resf) + + res2, resf2 := m.GraphSearchByFields(did1, fid1, fid3, fid2) + assert.Equal(t, List{d1, d2}, res2) + assert.Nil(t, resf2) +} diff --git a/pkg/dataset/loader.go b/pkg/dataset/loader.go new file mode 100644 index 000000000..5092aa34e --- /dev/null +++ b/pkg/dataset/loader.go @@ -0,0 +1,43 @@ +package dataset + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type Loader func(context.Context, ...id.DatasetID) (List, error) + +func LoaderFrom(data []*Dataset) Loader { + return func(ctx context.Context, ids ...id.DatasetID) (List, error) { + res := make(List, 0, len(ids)) + for _, i := range ids { + found := false + for _, d := range data { + if i == d.ID() { + res = append(res, d) + found = true + break + } + } + if !found { + res = append(res, nil) + } + } + return res, nil + } +} + +func LoaderFromMap(data map[id.DatasetID]*Dataset) Loader { + return func(ctx context.Context, ids ...id.DatasetID) (List, error) { + res := make(List, 0, len(ids)) + for _, i := range ids { + if d, ok := data[i]; ok { + res = append(res, d) + } else { + res = append(res, nil) + } + } + return res, nil + } +} diff --git a/pkg/dataset/schema.go b/pkg/dataset/schema.go new file mode 100644 index 000000000..b3b9208e1 --- /dev/null +++ b/pkg/dataset/schema.go @@ -0,0 +1,135 @@ +package dataset + +import "github.com/reearth/reearth-backend/pkg/id" + +// Schema _ +type Schema struct { + id id.DatasetSchemaID + source Source + name string + fields map[id.DatasetSchemaFieldID]*SchemaField + order []id.DatasetSchemaFieldID + representativeField *id.DatasetSchemaFieldID + scene id.SceneID + dynamic bool +} + +// ID _ +func (d *Schema) ID() (i id.DatasetSchemaID) { + if d == nil { + return + } + return d.id +} + +// IDRef _ +func (d *Schema) IDRef() *id.DatasetSchemaID { + if d == nil { + return nil + } + return d.id.Ref() +} + +// Scene _ +func (d *Schema) Scene() (i id.SceneID) { + if d == nil { + return + } + return d.scene +} + +// Source _ +func (d *Schema) Source() (s Source) { + if d == nil { + return + } + return d.source +} + +// Name _ +func (d *Schema) Name() string { + if d == nil { + return "" + } + return d.name +} + +// RepresentativeFieldID _ +func (d *Schema) RepresentativeFieldID() *id.DatasetSchemaFieldID { + if d == nil { + return nil + } + return d.representativeField +} + +// RepresentativeField _ +func (d *Schema) RepresentativeField() *SchemaField { + if d == nil || d.representativeField == nil { + return nil + } + return d.fields[*d.representativeField] +} + +// Fields _ +func (d *Schema) Fields() []*SchemaField { + if d == nil || d.order == nil { + return nil + } + fields := make([]*SchemaField, 0, len(d.fields)) + for _, id := range d.order { + fields = append(fields, d.fields[id]) + } + return fields +} + +// Field _ +func (d *Schema) Field(id id.DatasetSchemaFieldID) *SchemaField { + if d == nil { + return nil + } + return d.fields[id] +} + +// FieldRef _ +func (d *Schema) FieldRef(id *id.DatasetSchemaFieldID) *SchemaField { + if d == nil || id == nil { + return nil + } + return d.fields[*id] +} + +// FieldBySource _ +func (d *Schema) FieldBySource(source Source) *SchemaField { + if d == nil { + return nil + } + for _, f := range d.fields { + if f.source == source { + return f + } + } + return nil +} + +// FieldByType _ +func (d *Schema) FieldByType(t ValueType) *SchemaField { + if d == nil { + return nil + } + for _, f := range d.fields { + if f.Type() == t { + return f + } + } + return nil +} + +// Dynamic _ +func (d *Schema) Dynamic() bool { + return d.dynamic +} + +// Rename _ +func (u *Schema) Rename(name string) { + u.name = name +} diff --git a/pkg/dataset/schema_builder.go b/pkg/dataset/schema_builder.go new file mode 100644 index 000000000..7999703f1 --- /dev/null +++ b/pkg/dataset/schema_builder.go @@ -0,0 +1,112 @@ +package dataset + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +// SchemaBuilder _ +type SchemaBuilder struct { + d *Schema +} + +// NewSchema _ +func NewSchema() *SchemaBuilder { + return &SchemaBuilder{d: &Schema{}} +} + +// Build _ +func (b *SchemaBuilder) Build() (*Schema, error) { + if id.ID(b.d.id).IsNil() { + return nil, id.ErrInvalidID + } + if b.d.fields == nil || b.d.order == nil { + b.d.fields = map[id.DatasetSchemaFieldID]*SchemaField{} + b.d.order = []id.DatasetSchemaFieldID{} + } + return b.d, nil +} + +// MustBuild _ +func (b *SchemaBuilder) MustBuild() *Schema { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +// ID _ +func (b *SchemaBuilder) ID(id id.DatasetSchemaID) *SchemaBuilder { + b.d.id = id + return b +} + +// NewID _ +func (b *SchemaBuilder) NewID() *SchemaBuilder { + b.d.id = id.DatasetSchemaID(id.New()) + return b +} + +// Scene _ +func (b *SchemaBuilder) Scene(scene id.SceneID) *SchemaBuilder { + b.d.scene = scene + return b +} + +// Name _ +func (b *SchemaBuilder) Name(name string) *SchemaBuilder { + b.d.name = name + return b +} + +// Dynamic _ +func (b *SchemaBuilder) Dynamic(dynamic bool) *SchemaBuilder { + b.d.dynamic = dynamic + return b +} + +// Source _ +func (b *SchemaBuilder) Source(source Source) *SchemaBuilder { + b.d.source = source + return b +} + +// RepresentativeField _ +func (b *SchemaBuilder) RepresentativeField(representativeField id.DatasetSchemaFieldID) *SchemaBuilder { + rf := representativeField + b.d.representativeField = &rf + return b +} + +// Fields _ +func (b *SchemaBuilder) Fields(fields []*SchemaField) *SchemaBuilder { + b.d.fields = map[id.DatasetSchemaFieldID]*SchemaField{} + b.d.order = []id.DatasetSchemaFieldID{} + sources := map[string]struct{}{} + for _, f := range b.d.fields { + if f == nil { + continue + } + source := f.Source().String() + if source != "" { + sources[source] = struct{}{} + } + } + for _, f := range fields { + if f == nil { + continue + } + source := f.Source().String() + if source == "" { + copied := *f + b.d.fields[f.ID()] = &copied + b.d.order = append(b.d.order, f.ID()) + } else if _, ok := sources[source]; !ok { + copied := *f + b.d.fields[f.ID()] = &copied + b.d.order = append(b.d.order, f.ID()) + sources[source] = struct{}{} + } + } + return b +} diff --git a/pkg/dataset/schema_field.go b/pkg/dataset/schema_field.go new file mode 100644 index 000000000..b090395dd --- /dev/null +++ b/pkg/dataset/schema_field.go @@ -0,0 +1,62 @@ +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/idgen --name DatasetSchemaField --output ../id + +package dataset + +import "github.com/reearth/reearth-backend/pkg/id" + +// SchemaField _ +type SchemaField struct { + id id.DatasetSchemaFieldID + name string + dataType ValueType + source Source + ref *id.DatasetSchemaID +} + +// ID _ +func (d *SchemaField) ID() (i id.DatasetSchemaFieldID) { + if d == nil { + return + } + return d.id +} + +// IDRef _ +func (d *SchemaField) IDRef() *id.DatasetSchemaFieldID { + if d == nil { + return nil + } + return d.id.Ref() +} + +// Name _ +func (d *SchemaField) Name() (n string) { + if d == nil { + return + } + return d.name +} + +// Ref _ +func (d *SchemaField) Ref() *id.DatasetSchemaID { + if d == nil { + return nil + } + return d.ref +} + +// Type _ +func (d *SchemaField) Type() (v ValueType) { + if d == nil { + return + } + return d.dataType +} + +// Source _ +func (d *SchemaField) Source() (s Source) { + if d == nil { + return + } + return d.source +} diff --git a/pkg/dataset/schema_field_builder.go b/pkg/dataset/schema_field_builder.go new file mode 100644 index 000000000..07adf3b9b --- /dev/null +++ b/pkg/dataset/schema_field_builder.go @@ -0,0 +1,78 @@ +package dataset + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/id" +) + +// SchemaFieldBuilder _ +type SchemaFieldBuilder struct { + d *SchemaField +} + +// NewSchemaField _ +func NewSchemaField() *SchemaFieldBuilder { + return &SchemaFieldBuilder{d: &SchemaField{}} +} + +// Build _ +func (b *SchemaFieldBuilder) Build() (*SchemaField, error) { + if id.ID(b.d.id).IsNil() { + return nil, id.ErrInvalidID + } + if _, ok := b.d.dataType.Validate(); !ok { + return nil, errors.New("invalid value type") + } + return b.d, nil +} + +// MustBuild _ +func (b *SchemaFieldBuilder) MustBuild() *SchemaField { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +// ID _ +func (b *SchemaFieldBuilder) ID(id id.DatasetSchemaFieldID) *SchemaFieldBuilder { + b.d.id = id + return b +} + +// NewID _ +func (b *SchemaFieldBuilder) NewID() *SchemaFieldBuilder { + b.d.id = id.DatasetSchemaFieldID(id.New()) + return b +} + +// Name _ +func (b *SchemaFieldBuilder) Name(name string) *SchemaFieldBuilder { + b.d.name = name + return b +} + +// Type _ +func (b *SchemaFieldBuilder) Type(dataType ValueType) *SchemaFieldBuilder { + b.d.dataType = dataType + return b +} + +// Source _ +func (b *SchemaFieldBuilder) Source(source Source) *SchemaFieldBuilder { + b.d.source = source + return b +} + +// Ref _ +func (b *SchemaFieldBuilder) Ref(ref *id.DatasetSchemaID) *SchemaFieldBuilder { + if ref == nil { + b.d.ref = nil + } else { + ref2 := *ref + b.d.ref = &ref2 + } + return b +} diff --git a/pkg/dataset/schema_field_diff.go b/pkg/dataset/schema_field_diff.go new file mode 100644 index 000000000..c6dab0ac4 --- /dev/null +++ b/pkg/dataset/schema_field_diff.go @@ -0,0 +1,45 @@ +package dataset + +import "github.com/reearth/reearth-backend/pkg/id" + +// SchemaFieldDiff _ +type SchemaFieldDiff struct { + Added []*SchemaField + Removed []*SchemaField + Replaced map[id.DatasetSchemaFieldID]*SchemaField +} + +// FieldDiffBySource _ +func (d *Schema) FieldDiffBySource(d2 *Schema) SchemaFieldDiff { + added := []*SchemaField{} + removed := []*SchemaField{} + // others := map[DatasetSource]DatasetDiffTouple{} + others2 := map[id.DatasetSchemaFieldID]*SchemaField{} + + s1 := map[Source]*SchemaField{} + for _, d1 := range d.fields { + s1[d1.Source()] = d1 + } + + for _, d2 := range d2.fields { + if d1, ok := s1[d2.Source()]; ok { + others2[d1.ID()] = d2 + } else { + // added + added = append(added, d2) + } + } + + for _, d1 := range d.fields { + if _, ok := others2[d1.ID()]; !ok { + // removed + removed = append(removed, d1) + } + } + + return SchemaFieldDiff{ + Added: added, + Removed: removed, + Replaced: others2, + } +} diff --git a/pkg/dataset/schema_graph_iterator.go b/pkg/dataset/schema_graph_iterator.go new file mode 100644 index 000000000..8c5e7e159 --- /dev/null +++ b/pkg/dataset/schema_graph_iterator.go @@ -0,0 +1,72 @@ +package dataset + +import "github.com/reearth/reearth-backend/pkg/id" + +// SchemaGraphIterator ใฏใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ใ‚ฐใƒฉใƒ•ๆŽข็ดขใ™ใ‚‹ใŸใ‚ใฎใ‚คใƒ†ใƒฌใƒผใ‚ฟใงใ™ใ€‚ +type SchemaGraphIterator struct { + m SchemaMap + ids [][]id.DatasetSchemaID + currentIndex int + currentDepthIndex int + maxDepth int +} + +// SchemaGraphIteratorFrom _ +func SchemaGraphIteratorFrom(root id.DatasetSchemaID, depth int) *SchemaGraphIterator { + return &SchemaGraphIterator{ + ids: [][]id.DatasetSchemaID{{root}}, + maxDepth: depth, + } +} + +// Next _ +func (di *SchemaGraphIterator) Next(d *Schema) (id.DatasetSchemaID, bool) { + if di == nil || di.maxDepth == 0 || di.ids == nil || len(di.ids) == 0 || d == nil { + return id.DatasetSchemaID{}, false + } + if di.currentDepthIndex >= len(di.ids) { + return id.DatasetSchemaID{}, true + } + + if di.m == nil { + di.m = SchemaMap{} + } + di.m[d.ID()] = d + + // add fields + if len(di.ids) <= di.currentDepthIndex+1 { + di.ids = append(di.ids, []id.DatasetSchemaID{}) + } + nextDepthIDs := di.ids[di.currentDepthIndex+1] + currentIDs := di.ids[di.currentDepthIndex] + for _, f := range d.Fields() { + if r := f.Ref(); r != nil { + nextDepthIDs = append(nextDepthIDs, *r) + } + } + di.ids[di.currentDepthIndex+1] = nextDepthIDs + + // next + if di.currentIndex == len(currentIDs)-1 { + di.currentIndex = 0 + // next depth + if di.maxDepth <= di.currentDepthIndex || len(nextDepthIDs) == 0 { + // done + di.currentDepthIndex++ + return id.DatasetSchemaID{}, true + } + di.currentDepthIndex++ + } else { + di.currentIndex++ + } + + return di.ids[di.currentDepthIndex][di.currentIndex], false +} + +// Result _ +func (di *SchemaGraphIterator) Result() SchemaMap { + if di == nil { + return nil + } + return di.m +} diff --git a/pkg/dataset/schema_graph_iterator_test.go b/pkg/dataset/schema_graph_iterator_test.go new file mode 100644 index 000000000..fef53de19 --- /dev/null +++ b/pkg/dataset/schema_graph_iterator_test.go @@ -0,0 +1,69 @@ +package dataset + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestDatasetSchemaGraphIterator(t *testing.T) { + sid := id.NewSceneID() + d0id := id.NewDatasetSchemaID() + d11id := id.NewDatasetSchemaID() + d12id := id.NewDatasetSchemaID() + d21id := id.NewDatasetSchemaID() + d31id := id.NewDatasetSchemaID() + d32id := id.NewDatasetSchemaID() + + d0f0, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeRef).Ref(&d11id).Build() + d0f1, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeRef).Ref(&d12id).Build() + d11f0, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeString).Build() + d12f0, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeRef).Ref(&d21id).Build() + d21f0, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeRef).Ref(&d31id).Build() + d21f1, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeRef).Ref(&d32id).Build() + d31f0, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeString).Build() + d32f0, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeString).Build() + + d0, _ := NewSchema().ID(d0id).Scene(sid).Fields([]*SchemaField{ + d0f0, d0f1, + }).Build() + d11, _ := NewSchema().ID(d11id).Scene(sid).Fields([]*SchemaField{ + d11f0, + }).Build() + d12, _ := NewSchema().ID(d12id).Scene(sid).Fields([]*SchemaField{ + d12f0, + }).Build() + d21, _ := NewSchema().ID(d21id).Scene(sid).Fields([]*SchemaField{ + d21f0, + d21f1, + }).Build() + d31, _ := NewSchema().ID(d31id).Scene(sid).Fields([]*SchemaField{ + d31f0, + }).Build() + d32, _ := NewSchema().ID(d32id).Scene(sid).Fields([]*SchemaField{ + d32f0, + }).Build() + + it := SchemaGraphIteratorFrom(d0id, 3) + testTestDatasetSchemaGraphIteratorNext( + t, it, []*Schema{d0, d11, d12, d21, d31, d32}, + ) + it = SchemaGraphIteratorFrom(d0id, 2) + testTestDatasetSchemaGraphIteratorNext( + t, it, []*Schema{d0, d11, d12, d21}, + ) +} + +func testTestDatasetSchemaGraphIteratorNext(t *testing.T, it *SchemaGraphIterator, ds SchemaList) { + for i, d := range ds { + next, done := it.Next(d) + if i == len(ds)-1 { + assert.Equal(t, true, done) + } else { + assert.Equal(t, ds[i+1].ID(), next, "next %d", i) + assert.Equal(t, false, done, "next done %d", i) + } + } + assert.Equal(t, ds.Map(), it.Result()) +} diff --git a/pkg/dataset/schema_list.go b/pkg/dataset/schema_list.go new file mode 100644 index 000000000..6e55b0262 --- /dev/null +++ b/pkg/dataset/schema_list.go @@ -0,0 +1,64 @@ +package dataset + +import "github.com/reearth/reearth-backend/pkg/id" + +// SchemaList _ +type SchemaList []*Schema + +// Map _ +func (dsl SchemaList) Map() SchemaMap { + if dsl == nil { + return nil + } + m := SchemaMap{} + for _, d := range dsl { + if d != nil { + m[d.ID()] = d + } + } + return m +} + +// SchemaMap _ +type SchemaMap map[id.DatasetSchemaID]*Schema + +// Slice _ +func (dsm SchemaMap) Slice() SchemaList { + if dsm == nil { + return nil + } + res := make(SchemaList, 0, len(dsm)) + for _, ds := range dsm { + if ds != nil { + res = append(res, ds) + } + } + return res +} + +// GraphSearchByFields _ +func (dsm SchemaMap) GraphSearchByFields(root id.DatasetSchemaID, fields ...id.DatasetSchemaFieldID) (SchemaList, *SchemaField) { + res := make(SchemaList, 0, len(fields)) + currentDs := dsm[root] + if currentDs == nil { + return res, nil + } + for i, f := range fields { + if currentDs == nil { + return res, nil + } + res = append(res, currentDs) + field := currentDs.Field(f) + if field == nil { + return res, nil + } + if len(fields)-1 == i { + return res, field + } else if r := field.Ref(); r != nil { + currentDs = dsm[*r] + } else { + return res, nil + } + } + return res, nil +} diff --git a/pkg/dataset/schema_list_test.go b/pkg/dataset/schema_list_test.go new file mode 100644 index 000000000..c9f17451f --- /dev/null +++ b/pkg/dataset/schema_list_test.go @@ -0,0 +1,40 @@ +package dataset + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestDatasetSchemaMapGraphSearchByFields(t *testing.T) { + did1 := id.NewDatasetSchemaID() + did2 := id.NewDatasetSchemaID() + did3 := id.NewDatasetSchemaID() + fid1 := id.NewDatasetSchemaFieldID() + fid2 := id.NewDatasetSchemaFieldID() + fid3 := id.NewDatasetSchemaFieldID() + sid := id.NewSceneID() + f1, _ := NewSchemaField().ID(fid1).Type(ValueTypeString).Ref(&did2).Build() + f2, _ := NewSchemaField().ID(fid2).Type(ValueTypeString).Ref(&did3).Build() + f3, _ := NewSchemaField().ID(fid3).Type(ValueTypeString).Build() + d1, _ := NewSchema().ID(did1).Scene(sid).Fields([]*SchemaField{ + f1, + }).Build() + d2, _ := NewSchema().ID(did2).Scene(sid).Fields([]*SchemaField{ + f2, + }).Build() + d3, _ := NewSchema().ID(did3).Scene(sid).Fields([]*SchemaField{ + f3, + }).Build() + + m := SchemaList{d1, d2, d3}.Map() + + res, resf := m.GraphSearchByFields(did1, fid1, fid2, fid3) + assert.Equal(t, SchemaList{d1, d2, d3}, res) + assert.Equal(t, f3, resf) + + res2, resf2 := m.GraphSearchByFields(did1, fid1, fid3, fid2) + assert.Equal(t, SchemaList{d1, d2}, res2) + assert.Nil(t, resf2) +} diff --git a/pkg/dataset/source.go b/pkg/dataset/source.go new file mode 100644 index 000000000..068773b58 --- /dev/null +++ b/pkg/dataset/source.go @@ -0,0 +1,9 @@ +package dataset + +// Source _ +type Source string + +// String implements Stringer +func (d Source) String() string { + return string(d) +} diff --git a/pkg/dataset/value.go b/pkg/dataset/value.go new file mode 100644 index 000000000..100555cfb --- /dev/null +++ b/pkg/dataset/value.go @@ -0,0 +1,393 @@ +package dataset + +import ( + "net/url" + + "github.com/mitchellh/mapstructure" + "github.com/reearth/reearth-backend/pkg/id" +) + +// LatLng _ +type LatLng struct { + Lat float64 `mapstructure:"lat"` + Lng float64 `mapstructure:"lng"` +} + +// LatLngFrom _ +func LatLngFrom(m interface{}) (LatLng, bool) { + l := LatLng{} + err := mapstructure.Decode(m, &l) + return l, err == nil +} + +// LatLngHeight _ +type LatLngHeight struct { + Lat float64 `mapstructure:"lat"` + Lng float64 `mapstructure:"lng"` + Height float64 `mapstructure:"height"` +} + +// LatLngHeightFrom _ +func LatLngHeightFrom(m interface{}) (LatLngHeight, bool) { + l := LatLngHeight{} + err := mapstructure.Decode(m, &l) + return l, err == nil +} + +// ValueType _ +type ValueType string + +const ( + // ValueTypeBool _ + ValueTypeBool ValueType = "bool" + // ValueTypeNumber _ + ValueTypeNumber ValueType = "number" + // ValueTypeString _ + ValueTypeString ValueType = "string" + // ValueTypeRef _ + ValueTypeRef ValueType = "ref" + // ValueTypeURL _ + ValueTypeURL ValueType = "url" + // ValueTypeLatLng _ + ValueTypeLatLng ValueType = "latlng" + // ValueTypeLatLngHeight _ + ValueTypeLatLngHeight ValueType = "latlngheight" +) + +// ValueTypeFrom _ +func ValueTypeFrom(t string) (ValueType, bool) { + switch ValueType(t) { + case ValueTypeBool: + return ValueTypeBool, true + case ValueTypeNumber: + return ValueTypeNumber, true + case ValueTypeString: + return ValueTypeString, true + case ValueTypeRef: + return ValueTypeRef, true + case ValueTypeURL: + return ValueTypeURL, true + case ValueTypeLatLng: + return ValueTypeLatLng, true + case ValueTypeLatLngHeight: + return ValueTypeLatLngHeight, true + } + return ValueType(""), false +} + +// Validate _ +func (t ValueType) Validate() (ValueType, bool) { + switch t { + case ValueTypeBool: + fallthrough + case ValueTypeNumber: + fallthrough + case ValueTypeString: + fallthrough + case ValueTypeRef: + fallthrough + case ValueTypeURL: + fallthrough + case ValueTypeLatLng: + fallthrough + case ValueTypeLatLngHeight: + return t, true + } + return t, false +} + +// Value _ +type Value struct { + v interface{} + t ValueType +} + +// Value _ +func (v *Value) Value() interface{} { + if v == nil { + return nil + } + return v.v +} + +// ValueBool _ +func (v *Value) ValueBool() *bool { + if v == nil { + return nil + } + if v2, ok := v.v.(bool); ok { + return &v2 + } + return nil +} + +// ValueNumber _ +func (v *Value) ValueNumber() *float64 { + if v == nil { + return nil + } + if v2, ok := v.v.(float64); ok { + return &v2 + } + return nil +} + +// ValueString _ +func (v *Value) ValueString() *string { + if v == nil { + return nil + } + if v2, ok := v.v.(string); ok { + return &v2 + } + return nil +} + +// ValueRef _ +func (v *Value) ValueRef() *id.ID { + if v == nil { + return nil + } + if v2, ok := v.v.(id.ID); ok { + return &v2 + } + return nil +} + +// ValueURL _ +func (v *Value) ValueURL() *url.URL { + if v == nil { + return nil + } + if v2, ok := v.v.(*url.URL); ok { + return v2 + } + return nil +} + +// ValueLatLng _ +func (v *Value) ValueLatLng() *LatLng { + if v == nil { + return nil + } + if v2, ok := v.v.(LatLng); ok { + return &v2 + } + return nil +} + +// ValueLatLngHeight _ +func (v *Value) ValueLatLngHeight() *LatLngHeight { + if v == nil { + return nil + } + if v2, ok := v.v.(LatLngHeight); ok { + return &v2 + } + return nil +} + +// Type _ +func (v *Value) Type() ValueType { + if v == nil { + return ValueType("") + } + return v.t +} + +// ValueFrom _ +func (t ValueType) ValueFrom(v interface{}) *Value { + if v == nil { + return nil + } + switch t { + case ValueTypeBool: + if v2, ok := v.(bool); ok { + return &Value{v: v2, t: ValueTypeBool} + } + case ValueTypeNumber: + if v2, ok := v.(float64); ok { + return &Value{v: v2, t: ValueTypeNumber} + } + if v2, ok := v.(int); ok { + return &Value{v: float64(v2), t: ValueTypeNumber} + } + case ValueTypeString: + if v2, ok := v.(string); ok { + return &Value{v: v2, t: ValueTypeString} + } + case ValueTypeRef: + if v2, ok := v.(id.ID); ok { + return &Value{v: v2, t: ValueTypeRef} + } + if v2, ok := v.(string); ok { + if id, err := id.NewIDWith(v2); err == nil { + return &Value{v: id, t: ValueTypeRef} + } + } + case ValueTypeURL: + if v2, ok := v.(*url.URL); ok { + return &Value{v: v2, t: ValueTypeURL} + } + if v2, ok := v.(string); ok { + if u, err := url.Parse(v2); err == nil { + return &Value{v: u, t: ValueTypeURL} + } + } + case ValueTypeLatLng: + if v2, ok := v.(LatLng); ok { + return &Value{v: v2, t: ValueTypeLatLng} + } else if v2, ok := v.(*LatLng); ok { + if v2 == nil { + return nil + } + return &Value{v: *v2, t: ValueTypeLatLng} + } + v2 := LatLng{} + if err := mapstructure.Decode(v, &v2); err != nil { + return nil + } + return &Value{v: v2, t: ValueTypeLatLng} + case ValueTypeLatLngHeight: + if v2, ok := v.(LatLngHeight); ok { + return &Value{v: v2, t: ValueTypeLatLngHeight} + } else if v2, ok := v.(*LatLngHeight); ok { + if v2 == nil { + return nil + } + return &Value{v: *v2, t: ValueTypeLatLngHeight} + } + v2 := LatLngHeight{} + if err := mapstructure.Decode(v, &v2); err != nil { + return nil + } + return &Value{v: v2, t: ValueTypeLatLng} + } + return nil +} + +// ValidateValue _ +func (t ValueType) ValidateValue(v *Value) bool { + if v == nil { + return true + } + vv := v.Value() + if vv == nil { + return true + } + switch t { + case ValueTypeBool: + if _, ok := vv.(bool); ok { + return true + } + case ValueTypeNumber: + if _, ok := vv.(float64); ok { + return true + } + case ValueTypeString: + if _, ok := vv.(string); ok { + return true + } + case ValueTypeRef: + if _, ok := vv.(id.ID); ok { + return true + } + case ValueTypeURL: + if _, ok := vv.(*url.URL); ok { + return true + } + case ValueTypeLatLng: + if _, ok := vv.(LatLng); ok { + return true + } + case ValueTypeLatLngHeight: + if _, ok := vv.(LatLngHeight); ok { + return true + } + } + return false +} + +// Clone _ +func (v *Value) Clone() *Value { + if v == nil { + return nil + } + var v3 interface{} + switch v2 := v.v.(type) { + case bool: + v3 = v2 + case float64: + v3 = v2 + case string: + v3 = v2 + case id.ID: + v3 = v2 + case *url.URL: + v3, _ = url.Parse(v2.String()) + case LatLng: + v3 = LatLng{Lat: v2.Lat, Lng: v2.Lng} + case LatLngHeight: + v3 = LatLngHeight{Lat: v2.Lat, Lng: v2.Lng, Height: v2.Height} + } + return &Value{v: v3, t: v.t} +} + +// ValueFrom _ +func ValueFrom(v interface{}) *Value { + if v == nil { + return nil + } + switch v2 := v.(type) { + case bool: + return &Value{v: v2, t: ValueTypeBool} + case int: + return &Value{v: float64(v2), t: ValueTypeNumber} + case float64: + return &Value{v: v2, t: ValueTypeNumber} + case string: + return &Value{v: v2, t: ValueTypeString} + case id.ID: + return &Value{v: v2, t: ValueTypeRef} + case *url.URL: + return &Value{v: v2, t: ValueTypeURL} + case LatLng: + return &Value{v: v2, t: ValueTypeLatLng} + case LatLngHeight: + return &Value{v: v2, t: ValueTypeLatLngHeight} + } + return nil +} + +// Interface converts the value into generic representation +func (v *Value) Interface() interface{} { + if v == nil { + return nil + } + switch v2 := v.Value().(type) { + case bool: + return v2 + case float64: + return v2 + case string: + return v2 + case id.ID: + return v2.String() + case *url.URL: + return v2.String() + case LatLng: + return encodeValue(&v2) + case LatLngHeight: + return encodeValue(&v2) + } + return nil +} + +func encodeValue(v interface{}) map[string]interface{} { + var v3 map[string]interface{} + err := mapstructure.Decode(v, &v3) + if err != nil { + return nil + } + return v3 +} diff --git a/pkg/dataset/value_test.go b/pkg/dataset/value_test.go new file mode 100644 index 000000000..6d2ea89e8 --- /dev/null +++ b/pkg/dataset/value_test.go @@ -0,0 +1,63 @@ +package dataset + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestValueInterface(t *testing.T) { + assert.Equal( + t, + map[string]interface{}{ + "lat": 1.2, + "lng": 1.3, + }, + ValueTypeLatLng.ValueFrom(LatLng{ + Lat: 1.2, + Lng: 1.3, + }).Interface(), + ) + + assert.Equal( + t, + map[string]interface{}{ + "lat": 1.2, + "lng": 1.3, + "height": 1.4, + }, + ValueTypeLatLngHeight.ValueFrom(LatLngHeight{ + Lat: 1.2, + Lng: 1.3, + Height: 1.4, + }).Interface(), + ) +} + +func TestValueFromInterface(t *testing.T) { + assert.Equal( + t, + LatLng{ + Lat: 1.2, + Lng: 1.3, + }, + ValueTypeLatLng.ValueFrom(map[string]interface{}{ + "lat": 1.2, + "lng": 1.3, + }).Value(), + ) + + assert.Equal( + t, + LatLngHeight{ + Lat: 1.2, + Lng: 1.3, + Height: 1.4, + }, + ValueTypeLatLngHeight.ValueFrom(map[string]interface{}{ + "lat": 1.2, + "lng": 1.3, + "height": 1.4, + }).Value(), + ) +} diff --git a/pkg/error/error.go b/pkg/error/error.go new file mode 100644 index 000000000..5f16e8061 --- /dev/null +++ b/pkg/error/error.go @@ -0,0 +1,73 @@ +package error + +import ( + "fmt" + + "github.com/pkg/errors" +) + +var ( + // ErrNotFound _ + ErrNotFound = errors.New("not found") + // ErrInvalidParams represents the params are invalid, such as empty string. + ErrInvalidParams = errors.New("invalid params") + // ErrNotImplemented _ + ErrNotImplemented = errors.New("not implemented") + // ErrUserNotFound _ + ErrUserNotFound = errors.New("user is not found") +) + +// ErrInternal is an error struct that can hold an internal error but hides users the details. +type ErrInternal struct { + Err error +} + +func ErrInternalBy(err error) error { + return &ErrInternal{ + Err: err, + } +} + +func (e *ErrInternal) Error() string { + if e == nil { + return "" + } + return "internal" +} + +func (e *ErrInternal) Unwrap() error { + if e == nil { + return nil + } + return e.Err +} + +// Error can hold an error together with any label. This is useful for displaying a hierarchical error. +type Error struct { + Label string + Err error +} + +func New(label string, err error) *Error { + return &Error{ + Label: label, + Err: err, + } +} + +func (e *Error) Error() string { + if e == nil { + return "" + } + if e2, ok := e.Err.(*Error); ok { + return fmt.Sprintf("%s.%s", e.Label, e2) + } + return fmt.Sprintf("%s: %s", e.Label, e.Err) +} + +func (e *Error) Unwrap() error { + if e == nil { + return nil + } + return e.Err +} diff --git a/pkg/error/error_test.go b/pkg/error/error_test.go new file mode 100644 index 000000000..61f7f003a --- /dev/null +++ b/pkg/error/error_test.go @@ -0,0 +1,30 @@ +package error + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestErrInternal(t *testing.T) { + werr := errors.New("wrapped") + err := ErrInternalBy(werr) + var err2 *ErrInternal + assert.Equal(t, "internal", err.Error()) + assert.True(t, errors.As(err, &err2)) + assert.Same(t, werr, errors.Unwrap(err)) +} + +func TestError(t *testing.T) { + werr := errors.New("wrapped") + err := New("label", werr) + var err2 *Error + assert.Equal(t, "label: wrapped", err.Error()) + assert.True(t, errors.As(err, &err2)) + assert.Same(t, werr, errors.Unwrap(err)) + err3 := New("foo", err) + assert.Equal(t, "foo.label: wrapped", err3.Error()) + err4 := New("bar", err3) + assert.Equal(t, "bar.foo.label: wrapped", err4.Error()) +} diff --git a/pkg/file/file.go b/pkg/file/file.go new file mode 100644 index 000000000..d087bf15f --- /dev/null +++ b/pkg/file/file.go @@ -0,0 +1,28 @@ +package file + +import ( + "errors" + "io" +) + +var ( + // EOF _ + EOF error = errors.New("eof") +) + +// File _ +type File struct { + Content io.Reader + Name string + Fullpath string + Size int64 + ContentType string +} + +// Archive is a file like tarball. +type Archive interface { + Name() string + Size() int64 + Next() (*File, error) + Close() error +} diff --git a/pkg/i18n/string.go b/pkg/i18n/string.go new file mode 100644 index 000000000..a6d0dfb94 --- /dev/null +++ b/pkg/i18n/string.go @@ -0,0 +1,42 @@ +package i18n + +type String map[string]string // key should use BCP 47 representation + +func StringFrom(s string) String { + return String{"en": s} +} + +func (s String) Translated(lang ...string) string { + if s == nil { + return "" + } + for _, l := range lang { + if s, ok := s[l]; ok { + return s + } + } + return s.String() +} + +func (s String) Copy() String { + s2 := make(String, len(s)) + for k, v := range s { + s2[k] = v + } + return s2 +} + +func (s String) String() string { + if s == nil { + return "" + } + return s["en"] +} + +func (s String) StringRef() *string { + if s == nil { + return nil + } + st := s["en"] + return &st +} diff --git a/pkg/i18n/string_test.go b/pkg/i18n/string_test.go new file mode 100644 index 000000000..263478334 --- /dev/null +++ b/pkg/i18n/string_test.go @@ -0,0 +1,123 @@ +package i18n + +import ( + "reflect" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestString_String(t *testing.T) { + testCases := []struct { + Name, ExpectedStr string + I18nString String + }{ + { + Name: "en string", + ExpectedStr: "foo", + I18nString: String{"en": "foo"}, + }, + { + Name: "nil string", + ExpectedStr: "", + I18nString: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.ExpectedStr, tc.I18nString.String()) + }) + } +} + +func TestStringTranslated(t *testing.T) { + testCases := []struct { + Name, Lang, ExpectedStr string + I18nString String + }{ + { + Name: "ja string", + Lang: "ja", + ExpectedStr: "fooJA", + I18nString: String{"ja": "fooJA"}, + }, + { + Name: "default string", + ExpectedStr: "foo", + Lang: "", + I18nString: String{"en": "foo"}, + }, + { + Name: "nil string", + ExpectedStr: "", + Lang: "", + I18nString: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.ExpectedStr, tc.I18nString.Translated(tc.Lang)) + }) + } +} + +func TestStringFrom(t *testing.T) { + assert.Equal(t, String{"en": "foo"}, StringFrom("foo")) +} + +func TestStringCopy(t *testing.T) { + testCases := []struct { + Name string + SourceString String + }{ + { + Name: "String with content", + SourceString: String{"ja": "foo"}, + }, + { + Name: "empty String", + SourceString: String{}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.True(tt, reflect.DeepEqual(tc.SourceString, tc.SourceString.Copy())) + }) + } +} + +func TestString_StringRef(t *testing.T) { + stringRef := func(s string) *string { + return &s + } + + testCases := []struct { + Name string + I18nString String + Expected *string + }{ + { + Name: "en string", + I18nString: String{"en": "foo"}, + Expected: stringRef("foo"), + }, + { + Name: "nil string", + I18nString: nil, + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.I18nString.StringRef()) + }) + } +} diff --git a/pkg/id/asset_gen.go b/pkg/id/asset_gen.go new file mode 100644 index 000000000..624d889e6 --- /dev/null +++ b/pkg/id/asset_gen.go @@ -0,0 +1,287 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// AssetID is an ID for Asset. +type AssetID ID + +// NewAssetID generates a new AssetId. +func NewAssetID() AssetID { + return AssetID(New()) +} + +// AssetIDFrom generates a new AssetID from a string. +func AssetIDFrom(i string) (nid AssetID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = AssetID(did) + return +} + +// MustAssetID generates a new AssetID from a string, but panics if the string cannot be parsed. +func MustAssetID(i string) AssetID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return AssetID(did) +} + +// AssetIDFromRef generates a new AssetID from a string ref. +func AssetIDFromRef(i *string) *AssetID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := AssetID(*did) + return &nid +} + +// AssetIDFromRefID generates a new AssetID from a ref of a generic ID. +func AssetIDFromRefID(i *ID) *AssetID { + if i == nil { + return nil + } + nid := AssetID(*i) + return &nid +} + +// ID returns a domain ID. +func (d AssetID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d AssetID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d AssetID) GoString() string { + return "id.AssetID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d AssetID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d AssetID) Ref() *AssetID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *AssetID) CopyRef() *AssetID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *AssetID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *AssetID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *AssetID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *AssetID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = AssetIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *AssetID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *AssetID) UnmarshalText(text []byte) (err error) { + *d, err = AssetIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d AssetID) IsNil() bool { + return ID(d).IsNil() +} + +// AssetIDToKeys converts IDs into a string slice. +func AssetIDToKeys(ids []AssetID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// AssetIDsFrom converts a string slice into a ID slice. +func AssetIDsFrom(ids []string) ([]AssetID, error) { + dids := make([]AssetID, 0, len(ids)) + for _, i := range ids { + did, err := AssetIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// AssetIDsFromID converts a generic ID slice into a ID slice. +func AssetIDsFromID(ids []ID) []AssetID { + dids := make([]AssetID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, AssetID(i)) + } + return dids +} + +// AssetIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func AssetIDsFromIDRef(ids []*ID) []AssetID { + dids := make([]AssetID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, AssetID(*i)) + } + } + return dids +} + +// AssetIDsToID converts a ID slice into a generic ID slice. +func AssetIDsToID(ids []AssetID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// AssetIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func AssetIDsToIDRef(ids []*AssetID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// AssetIDSet represents a set of AssetIDs +type AssetIDSet struct { + m map[AssetID]struct{} + s []AssetID +} + +// NewAssetIDSet creates a new AssetIDSet +func NewAssetIDSet() *AssetIDSet { + return &AssetIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *AssetIDSet) Add(p ...AssetID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[AssetID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []AssetID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *AssetIDSet) AddRef(p *AssetID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *AssetIDSet) Has(p AssetID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *AssetIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *AssetIDSet) All() []AssetID { + if s == nil { + return nil + } + return append([]AssetID{}, s.s...) +} + +// Clone returns a cloned set +func (s *AssetIDSet) Clone() *AssetIDSet { + if s == nil { + return NewAssetIDSet() + } + s2 := NewAssetIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *AssetIDSet) Merge(s2 *AssetIDSet) *AssetIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/dataset_gen.go b/pkg/id/dataset_gen.go new file mode 100644 index 000000000..b6fe12c24 --- /dev/null +++ b/pkg/id/dataset_gen.go @@ -0,0 +1,287 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// DatasetID is an ID for Dataset. +type DatasetID ID + +// NewDatasetID generates a new DatasetId. +func NewDatasetID() DatasetID { + return DatasetID(New()) +} + +// DatasetIDFrom generates a new DatasetID from a string. +func DatasetIDFrom(i string) (nid DatasetID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = DatasetID(did) + return +} + +// MustDatasetID generates a new DatasetID from a string, but panics if the string cannot be parsed. +func MustDatasetID(i string) DatasetID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return DatasetID(did) +} + +// DatasetIDFromRef generates a new DatasetID from a string ref. +func DatasetIDFromRef(i *string) *DatasetID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := DatasetID(*did) + return &nid +} + +// DatasetIDFromRefID generates a new DatasetID from a ref of a generic ID. +func DatasetIDFromRefID(i *ID) *DatasetID { + if i == nil { + return nil + } + nid := DatasetID(*i) + return &nid +} + +// ID returns a domain ID. +func (d DatasetID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d DatasetID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d DatasetID) GoString() string { + return "id.DatasetID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d DatasetID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d DatasetID) Ref() *DatasetID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *DatasetID) CopyRef() *DatasetID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *DatasetID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *DatasetID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *DatasetID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *DatasetID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = DatasetIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *DatasetID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *DatasetID) UnmarshalText(text []byte) (err error) { + *d, err = DatasetIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d DatasetID) IsNil() bool { + return ID(d).IsNil() +} + +// DatasetIDToKeys converts IDs into a string slice. +func DatasetIDToKeys(ids []DatasetID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// DatasetIDsFrom converts a string slice into a ID slice. +func DatasetIDsFrom(ids []string) ([]DatasetID, error) { + dids := make([]DatasetID, 0, len(ids)) + for _, i := range ids { + did, err := DatasetIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// DatasetIDsFromID converts a generic ID slice into a ID slice. +func DatasetIDsFromID(ids []ID) []DatasetID { + dids := make([]DatasetID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, DatasetID(i)) + } + return dids +} + +// DatasetIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func DatasetIDsFromIDRef(ids []*ID) []DatasetID { + dids := make([]DatasetID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, DatasetID(*i)) + } + } + return dids +} + +// DatasetIDsToID converts a ID slice into a generic ID slice. +func DatasetIDsToID(ids []DatasetID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// DatasetIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func DatasetIDsToIDRef(ids []*DatasetID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// DatasetIDSet represents a set of DatasetIDs +type DatasetIDSet struct { + m map[DatasetID]struct{} + s []DatasetID +} + +// NewDatasetIDSet creates a new DatasetIDSet +func NewDatasetIDSet() *DatasetIDSet { + return &DatasetIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *DatasetIDSet) Add(p ...DatasetID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[DatasetID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []DatasetID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *DatasetIDSet) AddRef(p *DatasetID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *DatasetIDSet) Has(p DatasetID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *DatasetIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *DatasetIDSet) All() []DatasetID { + if s == nil { + return nil + } + return append([]DatasetID{}, s.s...) +} + +// Clone returns a cloned set +func (s *DatasetIDSet) Clone() *DatasetIDSet { + if s == nil { + return NewDatasetIDSet() + } + s2 := NewDatasetIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *DatasetIDSet) Merge(s2 *DatasetIDSet) *DatasetIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/dataset_schema_field_gen.go b/pkg/id/dataset_schema_field_gen.go new file mode 100644 index 000000000..b11285aec --- /dev/null +++ b/pkg/id/dataset_schema_field_gen.go @@ -0,0 +1,251 @@ +// Code generated by idgen, DO NOT EDIT. + +package id + +// DatasetSchemaFieldID is an ID for DatasetSchemaField. +type DatasetSchemaFieldID ID + +// NewDatasetSchemaFieldID generates a new DatasetSchemaFieldId. +func NewDatasetSchemaFieldID() DatasetSchemaFieldID { + return DatasetSchemaFieldID(New()) +} + +// DatasetSchemaFieldIDFrom generates a new DatasetSchemaFieldID from a string. +func DatasetSchemaFieldIDFrom(i string) (nid DatasetSchemaFieldID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = DatasetSchemaFieldID(did) + return +} + +// MustDatasetSchemaFieldID generates a new DatasetSchemaFieldID from a string, but panics if the string cannot be parsed. +func MustDatasetSchemaFieldID(i string) DatasetSchemaFieldID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return DatasetSchemaFieldID(did) +} + +// DatasetSchemaFieldIDFromRef generates a new DatasetSchemaFieldID from a string ref. +func DatasetSchemaFieldIDFromRef(i *string) *DatasetSchemaFieldID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := DatasetSchemaFieldID(*did) + return &nid +} + +// DatasetSchemaFieldIDFromRefID generates a new DatasetSchemaFieldID from a ref of a generic ID. +func DatasetSchemaFieldIDFromRefID(i *ID) *DatasetSchemaFieldID { + if i == nil { + return nil + } + nid := DatasetSchemaFieldID(*i) + return &nid +} + +// ID returns a domain ID. +func (d DatasetSchemaFieldID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d DatasetSchemaFieldID) String() string { + return ID(d).String() +} + +// RefString returns a reference of string representation. +func (d DatasetSchemaFieldID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d DatasetSchemaFieldID) Ref() *DatasetSchemaFieldID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *DatasetSchemaFieldID) CopyRef() *DatasetSchemaFieldID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *DatasetSchemaFieldID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *DatasetSchemaFieldID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// Ref returns true if a ID is nil or zero-value +func (d DatasetSchemaFieldID) IsNil() bool { + return ID(d).IsNil() +} + +// DatasetSchemaFieldIDToKeys converts IDs into a string slice. +func DatasetSchemaFieldIDToKeys(ids []DatasetSchemaFieldID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// DatasetSchemaFieldIDsFrom converts a string slice into a ID slice. +func DatasetSchemaFieldIDsFrom(ids []string) ([]DatasetSchemaFieldID, error) { + dids := make([]DatasetSchemaFieldID, 0, len(ids)) + for _, i := range ids { + did, err := DatasetSchemaFieldIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// DatasetSchemaFieldIDsFromID converts a generic ID slice into a ID slice. +func DatasetSchemaFieldIDsFromID(ids []ID) []DatasetSchemaFieldID { + dids := make([]DatasetSchemaFieldID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, DatasetSchemaFieldID(i)) + } + return dids +} + +// DatasetSchemaFieldIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func DatasetSchemaFieldIDsFromIDRef(ids []*ID) []DatasetSchemaFieldID { + dids := make([]DatasetSchemaFieldID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, DatasetSchemaFieldID(*i)) + } + } + return dids +} + +// DatasetSchemaFieldIDsToID converts a ID slice into a generic ID slice. +func DatasetSchemaFieldIDsToID(ids []DatasetSchemaFieldID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// DatasetSchemaFieldIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func DatasetSchemaFieldIDsToIDRef(ids []*DatasetSchemaFieldID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// DatasetSchemaFieldIDSet represents a set of DatasetSchemaFieldIDs +type DatasetSchemaFieldIDSet struct { + m map[DatasetSchemaFieldID]struct{} + s []DatasetSchemaFieldID +} + +// NewDatasetSchemaFieldIDSet creates a new DatasetSchemaFieldIDSet +func NewDatasetSchemaFieldIDSet() *DatasetSchemaFieldIDSet { + return &DatasetSchemaFieldIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *DatasetSchemaFieldIDSet) Add(p ...DatasetSchemaFieldID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[DatasetSchemaFieldID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []DatasetSchemaFieldID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *DatasetSchemaFieldIDSet) AddRef(p *DatasetSchemaFieldID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *DatasetSchemaFieldIDSet) Has(p DatasetSchemaFieldID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *DatasetSchemaFieldIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *DatasetSchemaFieldIDSet) All() []DatasetSchemaFieldID { + if s == nil { + return nil + } + return append([]DatasetSchemaFieldID{}, s.s...) +} + +// Clone returns a cloned set +func (s *DatasetSchemaFieldIDSet) Clone() *DatasetSchemaFieldIDSet { + if s == nil { + return NewDatasetSchemaFieldIDSet() + } + s2 := NewDatasetSchemaFieldIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *DatasetSchemaFieldIDSet) Merge(s2 *DatasetSchemaFieldIDSet) *DatasetSchemaFieldIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/dataset_schema_gen.go b/pkg/id/dataset_schema_gen.go new file mode 100644 index 000000000..a9d0b132a --- /dev/null +++ b/pkg/id/dataset_schema_gen.go @@ -0,0 +1,287 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// DatasetSchemaID is an ID for DatasetSchema. +type DatasetSchemaID ID + +// NewDatasetSchemaID generates a new DatasetSchemaId. +func NewDatasetSchemaID() DatasetSchemaID { + return DatasetSchemaID(New()) +} + +// DatasetSchemaIDFrom generates a new DatasetSchemaID from a string. +func DatasetSchemaIDFrom(i string) (nid DatasetSchemaID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = DatasetSchemaID(did) + return +} + +// MustDatasetSchemaID generates a new DatasetSchemaID from a string, but panics if the string cannot be parsed. +func MustDatasetSchemaID(i string) DatasetSchemaID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return DatasetSchemaID(did) +} + +// DatasetSchemaIDFromRef generates a new DatasetSchemaID from a string ref. +func DatasetSchemaIDFromRef(i *string) *DatasetSchemaID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := DatasetSchemaID(*did) + return &nid +} + +// DatasetSchemaIDFromRefID generates a new DatasetSchemaID from a ref of a generic ID. +func DatasetSchemaIDFromRefID(i *ID) *DatasetSchemaID { + if i == nil { + return nil + } + nid := DatasetSchemaID(*i) + return &nid +} + +// ID returns a domain ID. +func (d DatasetSchemaID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d DatasetSchemaID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d DatasetSchemaID) GoString() string { + return "id.DatasetSchemaID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d DatasetSchemaID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d DatasetSchemaID) Ref() *DatasetSchemaID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *DatasetSchemaID) CopyRef() *DatasetSchemaID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *DatasetSchemaID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *DatasetSchemaID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *DatasetSchemaID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *DatasetSchemaID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = DatasetSchemaIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *DatasetSchemaID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *DatasetSchemaID) UnmarshalText(text []byte) (err error) { + *d, err = DatasetSchemaIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d DatasetSchemaID) IsNil() bool { + return ID(d).IsNil() +} + +// DatasetSchemaIDToKeys converts IDs into a string slice. +func DatasetSchemaIDToKeys(ids []DatasetSchemaID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// DatasetSchemaIDsFrom converts a string slice into a ID slice. +func DatasetSchemaIDsFrom(ids []string) ([]DatasetSchemaID, error) { + dids := make([]DatasetSchemaID, 0, len(ids)) + for _, i := range ids { + did, err := DatasetSchemaIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// DatasetSchemaIDsFromID converts a generic ID slice into a ID slice. +func DatasetSchemaIDsFromID(ids []ID) []DatasetSchemaID { + dids := make([]DatasetSchemaID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, DatasetSchemaID(i)) + } + return dids +} + +// DatasetSchemaIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func DatasetSchemaIDsFromIDRef(ids []*ID) []DatasetSchemaID { + dids := make([]DatasetSchemaID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, DatasetSchemaID(*i)) + } + } + return dids +} + +// DatasetSchemaIDsToID converts a ID slice into a generic ID slice. +func DatasetSchemaIDsToID(ids []DatasetSchemaID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// DatasetSchemaIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func DatasetSchemaIDsToIDRef(ids []*DatasetSchemaID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// DatasetSchemaIDSet represents a set of DatasetSchemaIDs +type DatasetSchemaIDSet struct { + m map[DatasetSchemaID]struct{} + s []DatasetSchemaID +} + +// NewDatasetSchemaIDSet creates a new DatasetSchemaIDSet +func NewDatasetSchemaIDSet() *DatasetSchemaIDSet { + return &DatasetSchemaIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *DatasetSchemaIDSet) Add(p ...DatasetSchemaID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[DatasetSchemaID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []DatasetSchemaID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *DatasetSchemaIDSet) AddRef(p *DatasetSchemaID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *DatasetSchemaIDSet) Has(p DatasetSchemaID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *DatasetSchemaIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *DatasetSchemaIDSet) All() []DatasetSchemaID { + if s == nil { + return nil + } + return append([]DatasetSchemaID{}, s.s...) +} + +// Clone returns a cloned set +func (s *DatasetSchemaIDSet) Clone() *DatasetSchemaIDSet { + if s == nil { + return NewDatasetSchemaIDSet() + } + s2 := NewDatasetSchemaIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *DatasetSchemaIDSet) Merge(s2 *DatasetSchemaIDSet) *DatasetSchemaIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/gen.go b/pkg/id/gen.go new file mode 100644 index 000000000..a14d534d6 --- /dev/null +++ b/pkg/id/gen.go @@ -0,0 +1,13 @@ +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=asset_gen.go --name=Asset +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=dataset_gen.go --name=Dataset +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=dataset_schema_gen.go --name=DatasetSchema +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=layer_gen.go --name=Layer +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=project_gen.go --name=Project +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=property_gen.go --name=Property +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=property_item_gen.go --name=PropertyItem +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=scene_gen.go --name=Scene +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=widget_gen.go --name=Widget +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=team_gen.go --name=Team +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=user_gen.go --name=User + +package id diff --git a/pkg/id/id.go b/pkg/id/id.go new file mode 100644 index 000000000..8645ad070 --- /dev/null +++ b/pkg/id/id.go @@ -0,0 +1,141 @@ +package id + +import ( + "errors" + "math/rand" + "strings" + "sync" + "time" + + "github.com/oklog/ulid" +) + +var ( + entropyLock sync.Mutex + // not safe for concurrent + entropy = ulid.Monotonic(rand.New(rand.NewSource(time.Now().UnixNano())), 0) + ErrInvalidID = errors.New("invalid ID") +) + +type ID struct { + id ulid.ULID +} + +func New() ID { + return ID{generateID()} +} + +func NewAllID(n int) []ID { + if n <= 0 { + return []ID{} + } + if n == 1 { + return []ID{New()} + } + ids := make([]ID, 0, n) + generated := generateAllID(n) + for _, id := range generated { + ids = append(ids, ID{id}) + } + return ids +} + +func NewIDWith(id string) (ID, error) { + return FromID(id) +} + +func FromID(id string) (ID, error) { + parsedID, e := parseID(id) + if e != nil { + return ID{}, ErrInvalidID + } + return ID{parsedID}, nil +} + +func FromIDRef(id *string) *ID { + if id == nil { + return nil + } + parsedID, err := parseID(*id) + if err != nil { + return nil + } + nid := ID{parsedID} + return &nid +} + +func MustBeID(id string) ID { + parsedID, err := parseID(id) + if err != nil { + panic("invalid id") + } + return ID{parsedID} +} + +func (i ID) Copy() ID { + return ID{i.id} +} + +func (i ID) Timestamp() time.Time { + return ulid.Time(i.id.Time()) +} + +// String implements fmt.Stringer interface. +func (i ID) String() string { + return strings.ToLower(ulid.ULID(i.id).String()) +} + +// GoString implements fmt.GoStringer interface. +func (i ID) GoString() string { + return "id.ID(" + i.String() + ")" +} + +func (i ID) IsNil() bool { + return i.id.Compare(ulid.ULID{}) == 0 +} + +func (i ID) Compare(i2 ID) int { + return i.id.Compare(i2.id) +} + +func (i ID) Equal(i2 ID) bool { + return i.id.Compare(i2.id) == 0 +} + +func (i *ID) IsEmpty() bool { + return i == nil || (*i).IsNil() +} + +func generateID() ulid.ULID { + entropyLock.Lock() + newID := ulid.MustNew(ulid.Timestamp(time.Now().UTC()), entropy) + entropyLock.Unlock() + return newID +} + +func generateAllID(n int) []ulid.ULID { + ids := make([]ulid.ULID, 0, n) + entropyLock.Lock() + for i := 0; i < n; i++ { + newID := ulid.MustNew(ulid.Timestamp(time.Now().UTC()), entropy) + ids = append(ids, newID) + } + entropyLock.Unlock() + return ids +} + +func parseID(id string) (parsedID ulid.ULID, e error) { + if includeUpperCase(id) { + return parsedID, ErrInvalidID + } + return ulid.Parse(id) +} + +func includeUpperCase(s string) bool { + for _, c := range s { + if 'A' <= c && c <= 'Z' { + return true + } + } + return false +} diff --git a/pkg/id/id.tmpl b/pkg/id/id.tmpl new file mode 100644 index 000000000..402f4d405 --- /dev/null +++ b/pkg/id/id.tmpl @@ -0,0 +1,287 @@ +{{ $name := index .Flags.name 0 }} + +package {{.PackageName}} + +import "encoding/json" + +// {{$name}}ID is an ID for {{$name}}. +type {{$name}}ID ID + +// New{{$name}}ID generates a new {{$name}}Id. +func New{{$name}}ID() {{$name}}ID { + return {{$name}}ID(New()) +} + +// {{$name}}IDFrom generates a new {{$name}}ID from a string. +func {{$name}}IDFrom(i string) (nid {{$name}}ID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = {{$name}}ID(did) + return +} + +// Must{{$name}}ID generates a new {{$name}}ID from a string, but panics if the string cannot be parsed. +func Must{{$name}}ID(i string) {{$name}}ID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return {{$name}}ID(did) +} + +// {{$name}}IDFromRef generates a new {{$name}}ID from a string ref. +func {{$name}}IDFromRef(i *string) *{{$name}}ID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := {{$name}}ID(*did) + return &nid +} + +// {{$name}}IDFromRefID generates a new {{$name}}ID from a ref of a generic ID. +func {{$name}}IDFromRefID(i *ID) *{{$name}}ID { + if i == nil { + return nil + } + nid := {{$name}}ID(*i) + return &nid +} + +// ID returns a domain ID. +func (d {{$name}}ID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d {{$name}}ID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d {{$name}}ID) GoString() string { + return "id.{{$name}}ID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d {{$name}}ID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d {{$name}}ID) Ref() *{{$name}}ID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *{{$name}}ID) CopyRef() *{{$name}}ID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *{{$name}}ID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *{{$name}}ID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *{{$name}}ID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *{{$name}}ID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = {{$name}}IDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *{{$name}}ID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *{{$name}}ID) UnmarshalText(text []byte) (err error) { + *d, err = {{$name}}IDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d {{$name}}ID) IsNil() bool { + return ID(d).IsNil() +} + +// {{$name}}IDToKeys converts IDs into a string slice. +func {{$name}}IDToKeys(ids []{{$name}}ID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// {{$name}}IDsFrom converts a string slice into a ID slice. +func {{$name}}IDsFrom(ids []string) ([]{{$name}}ID, error) { + dids := make([]{{$name}}ID, 0, len(ids)) + for _, i := range ids { + did, err := {{$name}}IDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// {{$name}}IDsFromID converts a generic ID slice into a ID slice. +func {{$name}}IDsFromID(ids []ID) []{{$name}}ID { + dids := make([]{{$name}}ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, {{$name}}ID(i)) + } + return dids +} + +// {{$name}}IDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func {{$name}}IDsFromIDRef(ids []*ID) []{{$name}}ID { + dids := make([]{{$name}}ID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, {{$name}}ID(*i)) + } + } + return dids +} + +// {{$name}}IDsToID converts a ID slice into a generic ID slice. +func {{$name}}IDsToID(ids []{{$name}}ID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// {{$name}}IDsToIDRef converts a ID ref slice into a generic ID ref slice. +func {{$name}}IDsToIDRef(ids []*{{$name}}ID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// {{$name}}IDSet represents a set of {{$name}}IDs +type {{$name}}IDSet struct { + m map[{{$name}}ID]struct{} + s []{{$name}}ID +} + +// New{{$name}}IDSet creates a new {{$name}}IDSet +func New{{$name}}IDSet() *{{$name}}IDSet { + return &{{$name}}IDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *{{$name}}IDSet) Add(p ...{{$name}}ID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[{{$name}}ID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []{{$name}}ID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *{{$name}}IDSet) AddRef(p *{{$name}}ID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *{{$name}}IDSet) Has(p {{$name}}ID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *{{$name}}IDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *{{$name}}IDSet) All() []{{$name}}ID { + if s == nil { + return nil + } + return append([]{{$name}}ID{}, s.s...) +} + +// Clone returns a cloned set +func (s *{{$name}}IDSet) Clone() *{{$name}}IDSet { + if s == nil { + return New{{$name}}IDSet() + } + s2 := New{{$name}}IDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *{{$name}}IDSet) Merge(s2 *{{$name}}IDSet) *{{$name}}IDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/infobox_field_gen.go b/pkg/id/infobox_field_gen.go new file mode 100644 index 000000000..408b04af0 --- /dev/null +++ b/pkg/id/infobox_field_gen.go @@ -0,0 +1,251 @@ +// Code generated by idgen, DO NOT EDIT. + +package id + +// InfoboxFieldID is an ID for InfoboxField. +type InfoboxFieldID ID + +// NewInfoboxFieldID generates a new InfoboxFieldId. +func NewInfoboxFieldID() InfoboxFieldID { + return InfoboxFieldID(New()) +} + +// InfoboxFieldIDFrom generates a new InfoboxFieldID from a string. +func InfoboxFieldIDFrom(i string) (nid InfoboxFieldID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = InfoboxFieldID(did) + return +} + +// MustInfoboxFieldID generates a new InfoboxFieldID from a string, but panics if the string cannot be parsed. +func MustInfoboxFieldID(i string) InfoboxFieldID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return InfoboxFieldID(did) +} + +// InfoboxFieldIDFromRef generates a new InfoboxFieldID from a string ref. +func InfoboxFieldIDFromRef(i *string) *InfoboxFieldID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := InfoboxFieldID(*did) + return &nid +} + +// InfoboxFieldIDFromRefID generates a new InfoboxFieldID from a ref of a generic ID. +func InfoboxFieldIDFromRefID(i *ID) *InfoboxFieldID { + if i == nil { + return nil + } + nid := InfoboxFieldID(*i) + return &nid +} + +// ID returns a domain ID. +func (d InfoboxFieldID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d InfoboxFieldID) String() string { + return ID(d).String() +} + +// RefString returns a reference of string representation. +func (d InfoboxFieldID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d InfoboxFieldID) Ref() *InfoboxFieldID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *InfoboxFieldID) CopyRef() *InfoboxFieldID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *InfoboxFieldID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *InfoboxFieldID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// Ref returns true if a ID is nil or zero-value +func (d InfoboxFieldID) IsNil() bool { + return ID(d).IsNil() +} + +// InfoboxFieldIDToKeys converts IDs into a string slice. +func InfoboxFieldIDToKeys(ids []InfoboxFieldID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// InfoboxFieldIDsFrom converts a string slice into a ID slice. +func InfoboxFieldIDsFrom(ids []string) ([]InfoboxFieldID, error) { + dids := make([]InfoboxFieldID, 0, len(ids)) + for _, i := range ids { + did, err := InfoboxFieldIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// InfoboxFieldIDsFromID converts a generic ID slice into a ID slice. +func InfoboxFieldIDsFromID(ids []ID) []InfoboxFieldID { + dids := make([]InfoboxFieldID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, InfoboxFieldID(i)) + } + return dids +} + +// InfoboxFieldIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func InfoboxFieldIDsFromIDRef(ids []*ID) []InfoboxFieldID { + dids := make([]InfoboxFieldID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, InfoboxFieldID(*i)) + } + } + return dids +} + +// InfoboxFieldIDsToID converts a ID slice into a generic ID slice. +func InfoboxFieldIDsToID(ids []InfoboxFieldID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// InfoboxFieldIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func InfoboxFieldIDsToIDRef(ids []*InfoboxFieldID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// InfoboxFieldIDSet represents a set of InfoboxFieldIDs +type InfoboxFieldIDSet struct { + m map[InfoboxFieldID]struct{} + s []InfoboxFieldID +} + +// NewInfoboxFieldIDSet creates a new InfoboxFieldIDSet +func NewInfoboxFieldIDSet() *InfoboxFieldIDSet { + return &InfoboxFieldIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *InfoboxFieldIDSet) Add(p ...InfoboxFieldID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[InfoboxFieldID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []InfoboxFieldID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *InfoboxFieldIDSet) AddRef(p *InfoboxFieldID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *InfoboxFieldIDSet) Has(p InfoboxFieldID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *InfoboxFieldIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *InfoboxFieldIDSet) All() []InfoboxFieldID { + if s == nil { + return nil + } + return append([]InfoboxFieldID{}, s.s...) +} + +// Clone returns a cloned set +func (s *InfoboxFieldIDSet) Clone() *InfoboxFieldIDSet { + if s == nil { + return NewInfoboxFieldIDSet() + } + s2 := NewInfoboxFieldIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *InfoboxFieldIDSet) Merge(s2 *InfoboxFieldIDSet) *InfoboxFieldIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/layer_gen.go b/pkg/id/layer_gen.go new file mode 100644 index 000000000..0d10e9eb9 --- /dev/null +++ b/pkg/id/layer_gen.go @@ -0,0 +1,287 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// LayerID is an ID for Layer. +type LayerID ID + +// NewLayerID generates a new LayerId. +func NewLayerID() LayerID { + return LayerID(New()) +} + +// LayerIDFrom generates a new LayerID from a string. +func LayerIDFrom(i string) (nid LayerID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = LayerID(did) + return +} + +// MustLayerID generates a new LayerID from a string, but panics if the string cannot be parsed. +func MustLayerID(i string) LayerID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return LayerID(did) +} + +// LayerIDFromRef generates a new LayerID from a string ref. +func LayerIDFromRef(i *string) *LayerID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := LayerID(*did) + return &nid +} + +// LayerIDFromRefID generates a new LayerID from a ref of a generic ID. +func LayerIDFromRefID(i *ID) *LayerID { + if i == nil { + return nil + } + nid := LayerID(*i) + return &nid +} + +// ID returns a domain ID. +func (d LayerID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d LayerID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d LayerID) GoString() string { + return "id.LayerID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d LayerID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d LayerID) Ref() *LayerID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *LayerID) CopyRef() *LayerID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *LayerID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *LayerID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *LayerID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *LayerID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = LayerIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *LayerID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *LayerID) UnmarshalText(text []byte) (err error) { + *d, err = LayerIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d LayerID) IsNil() bool { + return ID(d).IsNil() +} + +// LayerIDToKeys converts IDs into a string slice. +func LayerIDToKeys(ids []LayerID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// LayerIDsFrom converts a string slice into a ID slice. +func LayerIDsFrom(ids []string) ([]LayerID, error) { + dids := make([]LayerID, 0, len(ids)) + for _, i := range ids { + did, err := LayerIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// LayerIDsFromID converts a generic ID slice into a ID slice. +func LayerIDsFromID(ids []ID) []LayerID { + dids := make([]LayerID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, LayerID(i)) + } + return dids +} + +// LayerIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func LayerIDsFromIDRef(ids []*ID) []LayerID { + dids := make([]LayerID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, LayerID(*i)) + } + } + return dids +} + +// LayerIDsToID converts a ID slice into a generic ID slice. +func LayerIDsToID(ids []LayerID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// LayerIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func LayerIDsToIDRef(ids []*LayerID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// LayerIDSet represents a set of LayerIDs +type LayerIDSet struct { + m map[LayerID]struct{} + s []LayerID +} + +// NewLayerIDSet creates a new LayerIDSet +func NewLayerIDSet() *LayerIDSet { + return &LayerIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *LayerIDSet) Add(p ...LayerID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[LayerID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []LayerID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *LayerIDSet) AddRef(p *LayerID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *LayerIDSet) Has(p LayerID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *LayerIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *LayerIDSet) All() []LayerID { + if s == nil { + return nil + } + return append([]LayerID{}, s.s...) +} + +// Clone returns a cloned set +func (s *LayerIDSet) Clone() *LayerIDSet { + if s == nil { + return NewLayerIDSet() + } + s2 := NewLayerIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *LayerIDSet) Merge(s2 *LayerIDSet) *LayerIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/plugin.go b/pkg/id/plugin.go new file mode 100644 index 000000000..5755f0cf3 --- /dev/null +++ b/pkg/id/plugin.go @@ -0,0 +1,169 @@ +package id + +import ( + "regexp" + "strings" + + "github.com/blang/semver" +) + +// MUST NOT CHANGE +const officialPluginIDStr = "reearth" + +// OfficialPluginID _ +var OfficialPluginID = PluginID{name: officialPluginIDStr, sys: true} + +// PluginID is an ID for Plugin. +type PluginID struct { + name string + version string + sys bool +} + +var pluginNameRe = regexp.MustCompile("^[a-zA-Z0-9._-]+$") + +func validatePluginName(s string) bool { + if len(s) == 0 || len(s) > 100 || s == "reearth" || strings.Contains(s, "/") { + return false + } + return pluginNameRe.MatchString(s) +} + +// PluginIDFrom generates a new id.PluginID from a string. +func PluginIDFrom(id string) (PluginID, error) { + if id == officialPluginIDStr { + // official plugin + return PluginID{name: id, sys: true}, nil + } + + ids := strings.Split(id, "#") + if len(ids) != 2 || !validatePluginName(ids[0]) { + return PluginID{}, ErrInvalidID + } + v, err2 := semver.Parse(ids[1]) + if err2 != nil { + return PluginID{}, ErrInvalidID + } + return PluginID{name: ids[0], version: v.String()}, nil +} + +// MustPluginID generates a new id.PluginID from a string, but panics if the string cannot be parsed. +func MustPluginID(id string) PluginID { + did, err := PluginIDFrom(id) + if err != nil { + panic(err) + } + return did +} + +// PluginIDFromRef generates a new id.PluginID from a string ref. +func PluginIDFromRef(id *string) *PluginID { + if id == nil { + return nil + } + did, err := PluginIDFrom(*id) + if err != nil { + return nil + } + return &did +} + +// Name returns a name. +func (d PluginID) Name() string { + return d.name +} + +// Version returns a version. +func (d PluginID) Version() semver.Version { + if d.version == "" { + return semver.Version{} + } + v, err := semver.Parse(d.version) + if err != nil { + return semver.Version{} + } + return v +} + +// System returns if the ID is built-in. +func (d PluginID) System() bool { + return d.sys +} + +// Validate returns true if id is valid. +func (d PluginID) Validate() bool { + if d.sys { + return true + } + return validatePluginName(d.name) +} + +// String returns a string representation. +func (d PluginID) String() string { + if d.sys { + return d.name + } + return d.name + "#" + d.version +} + +// Ref returns a reference. +func (d PluginID) Ref() *PluginID { + d2 := d + return &d2 +} + +// CopyRef _ +func (d *PluginID) CopyRef() *PluginID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// StringRef returns a reference of a string representation. +func (d *PluginID) StringRef() *string { + if d == nil { + return nil + } + id := (*d).String() + return &id +} + +// Equal returns if two IDs are quivarent. +func (d PluginID) Equal(d2 PluginID) bool { + return d.name == d2.name && d.version == d2.version +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *PluginID) MarshalText() ([]byte, error) { + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *PluginID) UnmarshalText(text []byte) (err error) { + *d, err = PluginIDFrom(string(text)) + return +} + +// PluginIDToKeys converts IDs into a string slice. +func PluginIDToKeys(ids []PluginID) []string { + keys := make([]string, 0, len(ids)) + for _, id := range ids { + keys = append(keys, id.String()) + } + return keys +} + +// PluginIDsFrom converts a string slice into a ID slice. +func PluginIDsFrom(ids []string) ([]PluginID, error) { + dids := make([]PluginID, 0, len(ids)) + for _, id := range ids { + did, err := PluginIDFrom(id) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} diff --git a/pkg/id/plugin_extension.go b/pkg/id/plugin_extension.go new file mode 100644 index 000000000..4be0d7c25 --- /dev/null +++ b/pkg/id/plugin_extension.go @@ -0,0 +1,41 @@ +package id + +// PluginExtensionID _ +type PluginExtensionID string + +// PluginExtensionIDFromRef _ +func PluginExtensionIDFromRef(id *string) *PluginExtensionID { + if id == nil { + return nil + } + id2 := PluginExtensionID(*id) + return &id2 +} + +// Ref _ +func (id PluginExtensionID) Ref() *PluginExtensionID { + return &id +} + +// CopyRef _ +func (id *PluginExtensionID) CopyRef() *PluginExtensionID { + if id == nil { + return nil + } + id2 := *id + return &id2 +} + +// String _ +func (id PluginExtensionID) String() string { + return string(id) +} + +// StringRef _ +func (id *PluginExtensionID) StringRef() *string { + if id == nil { + return nil + } + id2 := string(*id) + return &id2 +} diff --git a/pkg/id/plugin_test.go b/pkg/id/plugin_test.go new file mode 100644 index 000000000..8a3fca172 --- /dev/null +++ b/pkg/id/plugin_test.go @@ -0,0 +1,24 @@ +package id + +import ( + "encoding" + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +var _ encoding.TextMarshaler = (*PluginID)(nil) +var _ encoding.TextUnmarshaler = (*PluginID)(nil) + +func TestPluginIDValidator(t *testing.T) { + assert.True(t, validatePluginName("1cc.1_c-d"), "1cc.1_c-d") + assert.True(t, validatePluginName(strings.Repeat("a", 100)), "100 chars") + assert.False(t, validatePluginName(""), "empty") + assert.False(t, validatePluginName(" "), "space") + assert.False(t, validatePluginName("@bbb/aa-a_a"), "@bbb/aa-a_a") + assert.False(t, validatePluginName("bbb a"), "bbb a") + assert.False(t, validatePluginName("cccd="), "cccd=") + assert.False(t, validatePluginName("reearth"), "reearth") + assert.False(t, validatePluginName(strings.Repeat("a", 101)), "over 100 chars") +} diff --git a/pkg/id/project_gen.go b/pkg/id/project_gen.go new file mode 100644 index 000000000..41c10b7ff --- /dev/null +++ b/pkg/id/project_gen.go @@ -0,0 +1,287 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// ProjectID is an ID for Project. +type ProjectID ID + +// NewProjectID generates a new ProjectId. +func NewProjectID() ProjectID { + return ProjectID(New()) +} + +// ProjectIDFrom generates a new ProjectID from a string. +func ProjectIDFrom(i string) (nid ProjectID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = ProjectID(did) + return +} + +// MustProjectID generates a new ProjectID from a string, but panics if the string cannot be parsed. +func MustProjectID(i string) ProjectID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return ProjectID(did) +} + +// ProjectIDFromRef generates a new ProjectID from a string ref. +func ProjectIDFromRef(i *string) *ProjectID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := ProjectID(*did) + return &nid +} + +// ProjectIDFromRefID generates a new ProjectID from a ref of a generic ID. +func ProjectIDFromRefID(i *ID) *ProjectID { + if i == nil { + return nil + } + nid := ProjectID(*i) + return &nid +} + +// ID returns a domain ID. +func (d ProjectID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d ProjectID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d ProjectID) GoString() string { + return "id.ProjectID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d ProjectID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d ProjectID) Ref() *ProjectID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *ProjectID) CopyRef() *ProjectID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *ProjectID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *ProjectID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *ProjectID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *ProjectID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = ProjectIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *ProjectID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *ProjectID) UnmarshalText(text []byte) (err error) { + *d, err = ProjectIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d ProjectID) IsNil() bool { + return ID(d).IsNil() +} + +// ProjectIDToKeys converts IDs into a string slice. +func ProjectIDToKeys(ids []ProjectID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// ProjectIDsFrom converts a string slice into a ID slice. +func ProjectIDsFrom(ids []string) ([]ProjectID, error) { + dids := make([]ProjectID, 0, len(ids)) + for _, i := range ids { + did, err := ProjectIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// ProjectIDsFromID converts a generic ID slice into a ID slice. +func ProjectIDsFromID(ids []ID) []ProjectID { + dids := make([]ProjectID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, ProjectID(i)) + } + return dids +} + +// ProjectIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func ProjectIDsFromIDRef(ids []*ID) []ProjectID { + dids := make([]ProjectID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, ProjectID(*i)) + } + } + return dids +} + +// ProjectIDsToID converts a ID slice into a generic ID slice. +func ProjectIDsToID(ids []ProjectID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// ProjectIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func ProjectIDsToIDRef(ids []*ProjectID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// ProjectIDSet represents a set of ProjectIDs +type ProjectIDSet struct { + m map[ProjectID]struct{} + s []ProjectID +} + +// NewProjectIDSet creates a new ProjectIDSet +func NewProjectIDSet() *ProjectIDSet { + return &ProjectIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *ProjectIDSet) Add(p ...ProjectID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[ProjectID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []ProjectID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *ProjectIDSet) AddRef(p *ProjectID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *ProjectIDSet) Has(p ProjectID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *ProjectIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *ProjectIDSet) All() []ProjectID { + if s == nil { + return nil + } + return append([]ProjectID{}, s.s...) +} + +// Clone returns a cloned set +func (s *ProjectIDSet) Clone() *ProjectIDSet { + if s == nil { + return NewProjectIDSet() + } + s2 := NewProjectIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *ProjectIDSet) Merge(s2 *ProjectIDSet) *ProjectIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/property_gen.go b/pkg/id/property_gen.go new file mode 100644 index 000000000..7c8ea8759 --- /dev/null +++ b/pkg/id/property_gen.go @@ -0,0 +1,287 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// PropertyID is an ID for Property. +type PropertyID ID + +// NewPropertyID generates a new PropertyId. +func NewPropertyID() PropertyID { + return PropertyID(New()) +} + +// PropertyIDFrom generates a new PropertyID from a string. +func PropertyIDFrom(i string) (nid PropertyID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = PropertyID(did) + return +} + +// MustPropertyID generates a new PropertyID from a string, but panics if the string cannot be parsed. +func MustPropertyID(i string) PropertyID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return PropertyID(did) +} + +// PropertyIDFromRef generates a new PropertyID from a string ref. +func PropertyIDFromRef(i *string) *PropertyID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := PropertyID(*did) + return &nid +} + +// PropertyIDFromRefID generates a new PropertyID from a ref of a generic ID. +func PropertyIDFromRefID(i *ID) *PropertyID { + if i == nil { + return nil + } + nid := PropertyID(*i) + return &nid +} + +// ID returns a domain ID. +func (d PropertyID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d PropertyID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d PropertyID) GoString() string { + return "id.PropertyID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d PropertyID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d PropertyID) Ref() *PropertyID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *PropertyID) CopyRef() *PropertyID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *PropertyID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *PropertyID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *PropertyID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *PropertyID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = PropertyIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *PropertyID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *PropertyID) UnmarshalText(text []byte) (err error) { + *d, err = PropertyIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d PropertyID) IsNil() bool { + return ID(d).IsNil() +} + +// PropertyIDToKeys converts IDs into a string slice. +func PropertyIDToKeys(ids []PropertyID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// PropertyIDsFrom converts a string slice into a ID slice. +func PropertyIDsFrom(ids []string) ([]PropertyID, error) { + dids := make([]PropertyID, 0, len(ids)) + for _, i := range ids { + did, err := PropertyIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// PropertyIDsFromID converts a generic ID slice into a ID slice. +func PropertyIDsFromID(ids []ID) []PropertyID { + dids := make([]PropertyID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, PropertyID(i)) + } + return dids +} + +// PropertyIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func PropertyIDsFromIDRef(ids []*ID) []PropertyID { + dids := make([]PropertyID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, PropertyID(*i)) + } + } + return dids +} + +// PropertyIDsToID converts a ID slice into a generic ID slice. +func PropertyIDsToID(ids []PropertyID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// PropertyIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func PropertyIDsToIDRef(ids []*PropertyID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// PropertyIDSet represents a set of PropertyIDs +type PropertyIDSet struct { + m map[PropertyID]struct{} + s []PropertyID +} + +// NewPropertyIDSet creates a new PropertyIDSet +func NewPropertyIDSet() *PropertyIDSet { + return &PropertyIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *PropertyIDSet) Add(p ...PropertyID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[PropertyID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []PropertyID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *PropertyIDSet) AddRef(p *PropertyID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *PropertyIDSet) Has(p PropertyID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *PropertyIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *PropertyIDSet) All() []PropertyID { + if s == nil { + return nil + } + return append([]PropertyID{}, s.s...) +} + +// Clone returns a cloned set +func (s *PropertyIDSet) Clone() *PropertyIDSet { + if s == nil { + return NewPropertyIDSet() + } + s2 := NewPropertyIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *PropertyIDSet) Merge(s2 *PropertyIDSet) *PropertyIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/property_item_gen.go b/pkg/id/property_item_gen.go new file mode 100644 index 000000000..8344f3a05 --- /dev/null +++ b/pkg/id/property_item_gen.go @@ -0,0 +1,287 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// PropertyItemID is an ID for PropertyItem. +type PropertyItemID ID + +// NewPropertyItemID generates a new PropertyItemId. +func NewPropertyItemID() PropertyItemID { + return PropertyItemID(New()) +} + +// PropertyItemIDFrom generates a new PropertyItemID from a string. +func PropertyItemIDFrom(i string) (nid PropertyItemID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = PropertyItemID(did) + return +} + +// MustPropertyItemID generates a new PropertyItemID from a string, but panics if the string cannot be parsed. +func MustPropertyItemID(i string) PropertyItemID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return PropertyItemID(did) +} + +// PropertyItemIDFromRef generates a new PropertyItemID from a string ref. +func PropertyItemIDFromRef(i *string) *PropertyItemID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := PropertyItemID(*did) + return &nid +} + +// PropertyItemIDFromRefID generates a new PropertyItemID from a ref of a generic ID. +func PropertyItemIDFromRefID(i *ID) *PropertyItemID { + if i == nil { + return nil + } + nid := PropertyItemID(*i) + return &nid +} + +// ID returns a domain ID. +func (d PropertyItemID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d PropertyItemID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d PropertyItemID) GoString() string { + return "id.PropertyItemID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d PropertyItemID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d PropertyItemID) Ref() *PropertyItemID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *PropertyItemID) CopyRef() *PropertyItemID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *PropertyItemID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *PropertyItemID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *PropertyItemID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *PropertyItemID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = PropertyItemIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *PropertyItemID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *PropertyItemID) UnmarshalText(text []byte) (err error) { + *d, err = PropertyItemIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d PropertyItemID) IsNil() bool { + return ID(d).IsNil() +} + +// PropertyItemIDToKeys converts IDs into a string slice. +func PropertyItemIDToKeys(ids []PropertyItemID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// PropertyItemIDsFrom converts a string slice into a ID slice. +func PropertyItemIDsFrom(ids []string) ([]PropertyItemID, error) { + dids := make([]PropertyItemID, 0, len(ids)) + for _, i := range ids { + did, err := PropertyItemIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// PropertyItemIDsFromID converts a generic ID slice into a ID slice. +func PropertyItemIDsFromID(ids []ID) []PropertyItemID { + dids := make([]PropertyItemID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, PropertyItemID(i)) + } + return dids +} + +// PropertyItemIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func PropertyItemIDsFromIDRef(ids []*ID) []PropertyItemID { + dids := make([]PropertyItemID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, PropertyItemID(*i)) + } + } + return dids +} + +// PropertyItemIDsToID converts a ID slice into a generic ID slice. +func PropertyItemIDsToID(ids []PropertyItemID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// PropertyItemIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func PropertyItemIDsToIDRef(ids []*PropertyItemID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// PropertyItemIDSet represents a set of PropertyItemIDs +type PropertyItemIDSet struct { + m map[PropertyItemID]struct{} + s []PropertyItemID +} + +// NewPropertyItemIDSet creates a new PropertyItemIDSet +func NewPropertyItemIDSet() *PropertyItemIDSet { + return &PropertyItemIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *PropertyItemIDSet) Add(p ...PropertyItemID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[PropertyItemID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []PropertyItemID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *PropertyItemIDSet) AddRef(p *PropertyItemID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *PropertyItemIDSet) Has(p PropertyItemID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *PropertyItemIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *PropertyItemIDSet) All() []PropertyItemID { + if s == nil { + return nil + } + return append([]PropertyItemID{}, s.s...) +} + +// Clone returns a cloned set +func (s *PropertyItemIDSet) Clone() *PropertyItemIDSet { + if s == nil { + return NewPropertyItemIDSet() + } + s2 := NewPropertyItemIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *PropertyItemIDSet) Merge(s2 *PropertyItemIDSet) *PropertyItemIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/property_schema.go b/pkg/id/property_schema.go new file mode 100644 index 000000000..6f9b4228c --- /dev/null +++ b/pkg/id/property_schema.go @@ -0,0 +1,145 @@ +package id + +import ( + "regexp" + "strings" +) + +const schemaSystemIDPrefix = "reearth" + +var schemaNameRe = regexp.MustCompile("^[a-zA-Z0-9._-]+$") + +// PropertySchemaID is an ID for PropertySchema. +type PropertySchemaID struct { + plugin string + id string +} + +// PropertySchemaIDFrom generates a new PropertySchemaID from a string. +func PropertySchemaIDFrom(id string) (PropertySchemaID, error) { + ids := strings.Split(id, "/") + if len(ids) < 2 || !schemaNameRe.MatchString(ids[len(ids)-1]) { + return PropertySchemaID{}, ErrInvalidID + } + return PropertySchemaID{plugin: strings.Join(ids[:len(ids)-1], "/"), id: ids[len(ids)-1]}, nil +} + +// PropertySchemaIDFromExtension generates a new PropertySchemaID from a plugin ID and an extension ID. +func PropertySchemaIDFromExtension(p PluginID, e PluginExtensionID) (PropertySchemaID, error) { + return PropertySchemaID{plugin: p.String(), id: e.String()}, nil +} + +// MustPropertySchemaID generates a new PropertySchemaID from a string, but panics if the string cannot be parsed. +func MustPropertySchemaID(id string) PropertySchemaID { + did, err := PropertySchemaIDFrom(id) + if err != nil { + panic(err) + } + return did +} + +// MustPropertySchemaIDFromExtension generates a new PropertySchemaID from a plugin ID and an extension ID, but panics if the string cannot be parsed. +func MustPropertySchemaIDFromExtension(p PluginID, e PluginExtensionID) PropertySchemaID { + did, err := PropertySchemaIDFromExtension(p, e) + if err != nil { + panic(err) + } + return did +} + +// PropertySchemaIDFromRef generates a new PropertySchemaID from a string ref. +func PropertySchemaIDFromRef(id *string) *PropertySchemaID { + if id == nil { + return nil + } + did, err := PropertySchemaIDFrom(*id) + if err != nil { + return nil + } + return &did +} + +// ID returns a fragment of just ID. +func (d PropertySchemaID) ID() string { + return d.id +} + +// Plugin returns a fragment of plugin ID. +func (d PropertySchemaID) Plugin() string { + return d.plugin +} + +// System returns if it is system ID +func (d PropertySchemaID) System() bool { + return d.id == schemaSystemIDPrefix || strings.HasPrefix(d.id, schemaSystemIDPrefix+"/") +} + +// String returns a string representation. +func (d PropertySchemaID) String() string { + if d.plugin == "" { + return d.id + } + return d.plugin + "/" + d.id +} + +// Ref returns a reference. +func (d PropertySchemaID) Ref() *PropertySchemaID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *PropertySchemaID) CopyRef() *PropertySchemaID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IsNil checks if ID is empty or not. +func (d PropertySchemaID) IsNil() bool { + return d.plugin == "" && d.id == "" +} + +// StringRef returns a reference of a string representation. +func (d *PropertySchemaID) StringRef() *string { + if d == nil { + return nil + } + id := d.String() + return &id +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *PropertySchemaID) MarshalText() ([]byte, error) { + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *PropertySchemaID) UnmarshalText(text []byte) (err error) { + *d, err = PropertySchemaIDFrom(string(text)) + return +} + +// PropertySchemaIDToKeys converts IDs into a string slice. +func PropertySchemaIDToKeys(ids []PropertySchemaID) []string { + keys := make([]string, 0, len(ids)) + for _, id := range ids { + keys = append(keys, id.String()) + } + return keys +} + +// PropertySchemaIDsFrom converts a string slice into a ID slice. +func PropertySchemaIDsFrom(ids []string) ([]PropertySchemaID, error) { + dids := make([]PropertySchemaID, 0, len(ids)) + for _, id := range ids { + did, err := PropertySchemaIDFrom(id) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} diff --git a/pkg/id/property_schema_field.go b/pkg/id/property_schema_field.go new file mode 100644 index 000000000..64d208ec1 --- /dev/null +++ b/pkg/id/property_schema_field.go @@ -0,0 +1,42 @@ +package id + +// PropertySchemaFieldID _ +type PropertySchemaFieldID string + +// PropertySchemaFieldIDFrom _ +func PropertySchemaFieldIDFrom(str *string) *PropertySchemaFieldID { + if str == nil { + return nil + } + id := PropertySchemaFieldID(*str) + return &id +} + +// Ref _ +func (id PropertySchemaFieldID) Ref() *PropertySchemaFieldID { + id2 := id + return &id2 +} + +// CopyRef _ +func (id *PropertySchemaFieldID) CopyRef() *PropertySchemaFieldID { + if id == nil { + return nil + } + id2 := *id + return &id2 +} + +// String _ +func (id PropertySchemaFieldID) String() string { + return string(id) +} + +// StringRef _ +func (id *PropertySchemaFieldID) StringRef() *string { + if id == nil { + return nil + } + str := string(*id) + return &str +} diff --git a/pkg/id/property_schema_test.go b/pkg/id/property_schema_test.go new file mode 100644 index 000000000..b8dc2eb56 --- /dev/null +++ b/pkg/id/property_schema_test.go @@ -0,0 +1,8 @@ +package id + +import ( + "encoding" +) + +var _ encoding.TextMarshaler = (*PropertySchemaID)(nil) +var _ encoding.TextUnmarshaler = (*PropertySchemaID)(nil) diff --git a/pkg/id/scene_gen.go b/pkg/id/scene_gen.go new file mode 100644 index 000000000..2bbd669dd --- /dev/null +++ b/pkg/id/scene_gen.go @@ -0,0 +1,287 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// SceneID is an ID for Scene. +type SceneID ID + +// NewSceneID generates a new SceneId. +func NewSceneID() SceneID { + return SceneID(New()) +} + +// SceneIDFrom generates a new SceneID from a string. +func SceneIDFrom(i string) (nid SceneID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = SceneID(did) + return +} + +// MustSceneID generates a new SceneID from a string, but panics if the string cannot be parsed. +func MustSceneID(i string) SceneID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return SceneID(did) +} + +// SceneIDFromRef generates a new SceneID from a string ref. +func SceneIDFromRef(i *string) *SceneID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := SceneID(*did) + return &nid +} + +// SceneIDFromRefID generates a new SceneID from a ref of a generic ID. +func SceneIDFromRefID(i *ID) *SceneID { + if i == nil { + return nil + } + nid := SceneID(*i) + return &nid +} + +// ID returns a domain ID. +func (d SceneID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d SceneID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d SceneID) GoString() string { + return "id.SceneID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d SceneID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d SceneID) Ref() *SceneID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *SceneID) CopyRef() *SceneID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *SceneID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *SceneID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *SceneID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *SceneID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = SceneIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *SceneID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *SceneID) UnmarshalText(text []byte) (err error) { + *d, err = SceneIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d SceneID) IsNil() bool { + return ID(d).IsNil() +} + +// SceneIDToKeys converts IDs into a string slice. +func SceneIDToKeys(ids []SceneID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// SceneIDsFrom converts a string slice into a ID slice. +func SceneIDsFrom(ids []string) ([]SceneID, error) { + dids := make([]SceneID, 0, len(ids)) + for _, i := range ids { + did, err := SceneIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// SceneIDsFromID converts a generic ID slice into a ID slice. +func SceneIDsFromID(ids []ID) []SceneID { + dids := make([]SceneID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, SceneID(i)) + } + return dids +} + +// SceneIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func SceneIDsFromIDRef(ids []*ID) []SceneID { + dids := make([]SceneID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, SceneID(*i)) + } + } + return dids +} + +// SceneIDsToID converts a ID slice into a generic ID slice. +func SceneIDsToID(ids []SceneID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// SceneIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func SceneIDsToIDRef(ids []*SceneID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// SceneIDSet represents a set of SceneIDs +type SceneIDSet struct { + m map[SceneID]struct{} + s []SceneID +} + +// NewSceneIDSet creates a new SceneIDSet +func NewSceneIDSet() *SceneIDSet { + return &SceneIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *SceneIDSet) Add(p ...SceneID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[SceneID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []SceneID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *SceneIDSet) AddRef(p *SceneID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *SceneIDSet) Has(p SceneID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *SceneIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *SceneIDSet) All() []SceneID { + if s == nil { + return nil + } + return append([]SceneID{}, s.s...) +} + +// Clone returns a cloned set +func (s *SceneIDSet) Clone() *SceneIDSet { + if s == nil { + return NewSceneIDSet() + } + s2 := NewSceneIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *SceneIDSet) Merge(s2 *SceneIDSet) *SceneIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/team_gen.go b/pkg/id/team_gen.go new file mode 100644 index 000000000..f814bda17 --- /dev/null +++ b/pkg/id/team_gen.go @@ -0,0 +1,287 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// TeamID is an ID for Team. +type TeamID ID + +// NewTeamID generates a new TeamId. +func NewTeamID() TeamID { + return TeamID(New()) +} + +// TeamIDFrom generates a new TeamID from a string. +func TeamIDFrom(i string) (nid TeamID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = TeamID(did) + return +} + +// MustTeamID generates a new TeamID from a string, but panics if the string cannot be parsed. +func MustTeamID(i string) TeamID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return TeamID(did) +} + +// TeamIDFromRef generates a new TeamID from a string ref. +func TeamIDFromRef(i *string) *TeamID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := TeamID(*did) + return &nid +} + +// TeamIDFromRefID generates a new TeamID from a ref of a generic ID. +func TeamIDFromRefID(i *ID) *TeamID { + if i == nil { + return nil + } + nid := TeamID(*i) + return &nid +} + +// ID returns a domain ID. +func (d TeamID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d TeamID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d TeamID) GoString() string { + return "id.TeamID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d TeamID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d TeamID) Ref() *TeamID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *TeamID) CopyRef() *TeamID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *TeamID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *TeamID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *TeamID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *TeamID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = TeamIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *TeamID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *TeamID) UnmarshalText(text []byte) (err error) { + *d, err = TeamIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d TeamID) IsNil() bool { + return ID(d).IsNil() +} + +// TeamIDToKeys converts IDs into a string slice. +func TeamIDToKeys(ids []TeamID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// TeamIDsFrom converts a string slice into a ID slice. +func TeamIDsFrom(ids []string) ([]TeamID, error) { + dids := make([]TeamID, 0, len(ids)) + for _, i := range ids { + did, err := TeamIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// TeamIDsFromID converts a generic ID slice into a ID slice. +func TeamIDsFromID(ids []ID) []TeamID { + dids := make([]TeamID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, TeamID(i)) + } + return dids +} + +// TeamIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func TeamIDsFromIDRef(ids []*ID) []TeamID { + dids := make([]TeamID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, TeamID(*i)) + } + } + return dids +} + +// TeamIDsToID converts a ID slice into a generic ID slice. +func TeamIDsToID(ids []TeamID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// TeamIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func TeamIDsToIDRef(ids []*TeamID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// TeamIDSet represents a set of TeamIDs +type TeamIDSet struct { + m map[TeamID]struct{} + s []TeamID +} + +// NewTeamIDSet creates a new TeamIDSet +func NewTeamIDSet() *TeamIDSet { + return &TeamIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *TeamIDSet) Add(p ...TeamID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[TeamID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []TeamID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *TeamIDSet) AddRef(p *TeamID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *TeamIDSet) Has(p TeamID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *TeamIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *TeamIDSet) All() []TeamID { + if s == nil { + return nil + } + return append([]TeamID{}, s.s...) +} + +// Clone returns a cloned set +func (s *TeamIDSet) Clone() *TeamIDSet { + if s == nil { + return NewTeamIDSet() + } + s2 := NewTeamIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *TeamIDSet) Merge(s2 *TeamIDSet) *TeamIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/user_gen.go b/pkg/id/user_gen.go new file mode 100644 index 000000000..df4b8977f --- /dev/null +++ b/pkg/id/user_gen.go @@ -0,0 +1,287 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// UserID is an ID for User. +type UserID ID + +// NewUserID generates a new UserId. +func NewUserID() UserID { + return UserID(New()) +} + +// UserIDFrom generates a new UserID from a string. +func UserIDFrom(i string) (nid UserID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = UserID(did) + return +} + +// MustUserID generates a new UserID from a string, but panics if the string cannot be parsed. +func MustUserID(i string) UserID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return UserID(did) +} + +// UserIDFromRef generates a new UserID from a string ref. +func UserIDFromRef(i *string) *UserID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := UserID(*did) + return &nid +} + +// UserIDFromRefID generates a new UserID from a ref of a generic ID. +func UserIDFromRefID(i *ID) *UserID { + if i == nil { + return nil + } + nid := UserID(*i) + return &nid +} + +// ID returns a domain ID. +func (d UserID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d UserID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d UserID) GoString() string { + return "id.UserID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d UserID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d UserID) Ref() *UserID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *UserID) CopyRef() *UserID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *UserID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *UserID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *UserID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *UserID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = UserIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *UserID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *UserID) UnmarshalText(text []byte) (err error) { + *d, err = UserIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d UserID) IsNil() bool { + return ID(d).IsNil() +} + +// UserIDToKeys converts IDs into a string slice. +func UserIDToKeys(ids []UserID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// UserIDsFrom converts a string slice into a ID slice. +func UserIDsFrom(ids []string) ([]UserID, error) { + dids := make([]UserID, 0, len(ids)) + for _, i := range ids { + did, err := UserIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// UserIDsFromID converts a generic ID slice into a ID slice. +func UserIDsFromID(ids []ID) []UserID { + dids := make([]UserID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, UserID(i)) + } + return dids +} + +// UserIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func UserIDsFromIDRef(ids []*ID) []UserID { + dids := make([]UserID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, UserID(*i)) + } + } + return dids +} + +// UserIDsToID converts a ID slice into a generic ID slice. +func UserIDsToID(ids []UserID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// UserIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func UserIDsToIDRef(ids []*UserID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// UserIDSet represents a set of UserIDs +type UserIDSet struct { + m map[UserID]struct{} + s []UserID +} + +// NewUserIDSet creates a new UserIDSet +func NewUserIDSet() *UserIDSet { + return &UserIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *UserIDSet) Add(p ...UserID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[UserID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []UserID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *UserIDSet) AddRef(p *UserID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *UserIDSet) Has(p UserID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *UserIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *UserIDSet) All() []UserID { + if s == nil { + return nil + } + return append([]UserID{}, s.s...) +} + +// Clone returns a cloned set +func (s *UserIDSet) Clone() *UserIDSet { + if s == nil { + return NewUserIDSet() + } + s2 := NewUserIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *UserIDSet) Merge(s2 *UserIDSet) *UserIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/widget_gen.go b/pkg/id/widget_gen.go new file mode 100644 index 000000000..73eb8712b --- /dev/null +++ b/pkg/id/widget_gen.go @@ -0,0 +1,287 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// WidgetID is an ID for Widget. +type WidgetID ID + +// NewWidgetID generates a new WidgetId. +func NewWidgetID() WidgetID { + return WidgetID(New()) +} + +// WidgetIDFrom generates a new WidgetID from a string. +func WidgetIDFrom(i string) (nid WidgetID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = WidgetID(did) + return +} + +// MustWidgetID generates a new WidgetID from a string, but panics if the string cannot be parsed. +func MustWidgetID(i string) WidgetID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return WidgetID(did) +} + +// WidgetIDFromRef generates a new WidgetID from a string ref. +func WidgetIDFromRef(i *string) *WidgetID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := WidgetID(*did) + return &nid +} + +// WidgetIDFromRefID generates a new WidgetID from a ref of a generic ID. +func WidgetIDFromRefID(i *ID) *WidgetID { + if i == nil { + return nil + } + nid := WidgetID(*i) + return &nid +} + +// ID returns a domain ID. +func (d WidgetID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d WidgetID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d WidgetID) GoString() string { + return "id.WidgetID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d WidgetID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d WidgetID) Ref() *WidgetID { + d2 := d + return &d2 +} + +// CopyRef returns a copy of a reference. +func (d *WidgetID) CopyRef() *WidgetID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *WidgetID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *WidgetID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *WidgetID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *WidgetID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = WidgetIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *WidgetID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *WidgetID) UnmarshalText(text []byte) (err error) { + *d, err = WidgetIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d WidgetID) IsNil() bool { + return ID(d).IsNil() +} + +// WidgetIDToKeys converts IDs into a string slice. +func WidgetIDToKeys(ids []WidgetID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// WidgetIDsFrom converts a string slice into a ID slice. +func WidgetIDsFrom(ids []string) ([]WidgetID, error) { + dids := make([]WidgetID, 0, len(ids)) + for _, i := range ids { + did, err := WidgetIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// WidgetIDsFromID converts a generic ID slice into a ID slice. +func WidgetIDsFromID(ids []ID) []WidgetID { + dids := make([]WidgetID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, WidgetID(i)) + } + return dids +} + +// WidgetIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func WidgetIDsFromIDRef(ids []*ID) []WidgetID { + dids := make([]WidgetID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, WidgetID(*i)) + } + } + return dids +} + +// WidgetIDsToID converts a ID slice into a generic ID slice. +func WidgetIDsToID(ids []WidgetID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// WidgetIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func WidgetIDsToIDRef(ids []*WidgetID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// WidgetIDSet represents a set of WidgetIDs +type WidgetIDSet struct { + m map[WidgetID]struct{} + s []WidgetID +} + +// NewWidgetIDSet creates a new WidgetIDSet +func NewWidgetIDSet() *WidgetIDSet { + return &WidgetIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *WidgetIDSet) Add(p ...WidgetID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[WidgetID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []WidgetID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *WidgetIDSet) AddRef(p *WidgetID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *WidgetIDSet) Has(p WidgetID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *WidgetIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *WidgetIDSet) All() []WidgetID { + if s == nil { + return nil + } + return append([]WidgetID{}, s.s...) +} + +// Clone returns a cloned set +func (s *WidgetIDSet) Clone() *WidgetIDSet { + if s == nil { + return NewWidgetIDSet() + } + s2 := NewWidgetIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *WidgetIDSet) Merge(s2 *WidgetIDSet) *WidgetIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/kml/kml.go b/pkg/kml/kml.go new file mode 100644 index 000000000..0422cc9e8 --- /dev/null +++ b/pkg/kml/kml.go @@ -0,0 +1,68 @@ +package kml + +import ( + "github.com/reearth/reearth-backend/pkg/property" +) + +type Collection struct { + Folders []Collection `xml:"Folder"` + Placemarks []Placemark `xml:"Placemark"` + Styles []Style `xml:"Style"` + Name string `xml:"name"` +} +type Placemark struct { + Point Point `xml:"Point"` + Polygon Polygon `xml:"Polygon"` + Polyline LineString `xml:"LineString"` + Name string `xml:"name"` + StyleUrl string `xml:"styleUrl"` +} +type BoundaryIs struct { + LinearRing LinearRing `xml:"LinearRing"` +} +type LinearRing struct { + Coordinates string `xml:"coordinates"` +} +type Point struct { + Coordinates string `xml:"coordinates"` +} + +type Polygon struct { + OuterBoundaryIs BoundaryIs `xml:"outerBoundaryIs"` + InnerBoundaryIs []BoundaryIs `xml:"innerBoundaryIs"` +} +type LineString struct { + Coordinates string `xml:"coordinates"` +} +type PointFields struct { + Latlng *property.LatLng + Height *float64 +} + +type IconStyle struct { + Icon *Icon `xml:"Icon"` + Color string `xml:"color"` + Scale float64 `xml:"scale"` +} +type Icon struct { + Href string `xml:"href"` +} + +// Marker Styling +type Style struct { + Id string `xml:"id,attr"` + IconStyle IconStyle `xml:"IconStyle"` + LineStyle LineStyle `xml:"LineStyle"` + PolyStyle PolyStyle `xml:"PolyStyle"` +} + +// Polyline Styling +type LineStyle struct { + Color string `xml:"color"` + Width float64 `xml:"width"` +} +type PolyStyle struct { + Color string `xml:"color"` + Fill bool `xml:"fill"` + Stroke bool `xml:"outline"` +} diff --git a/pkg/layer/builder.go b/pkg/layer/builder.go new file mode 100644 index 000000000..b726cb936 --- /dev/null +++ b/pkg/layer/builder.go @@ -0,0 +1,73 @@ +package layer + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +type Builder struct { + base layerBase +} + +func New() *Builder { + return &Builder{base: layerBase{visible: true}} +} + +func (b *Builder) Group() *GroupBuilder { + return NewGroup().base(b.base) +} + +func (b *Builder) Item() *ItemBuilder { + return NewItem().base(b.base) +} + +func (b *Builder) ID(id id.LayerID) *Builder { + b.base.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.base.id = id.NewLayerID() + return b +} + +func (b *Builder) Scene(s id.SceneID) *Builder { + b.base.scene = s + return b +} + +func (b *Builder) Name(name string) *Builder { + b.base.name = name + return b +} + +func (b *Builder) IsVisible(visible bool) *Builder { + b.base.visible = visible + return b +} + +func (b *Builder) IsVisibleRef(visible *bool) *Builder { + if visible != nil { + b.base.visible = *visible + } + return b +} + +func (b *Builder) Plugin(plugin *id.PluginID) *Builder { + b.base.plugin = plugin.CopyRef() + return b +} + +func (b *Builder) Extension(extension *id.PluginExtensionID) *Builder { + b.base.extension = extension.CopyRef() + return b +} + +func (b *Builder) Property(p *id.PropertyID) *Builder { + b.base.property = p.CopyRef() + return b +} + +func (b *Builder) Infobox(infobox *Infobox) *Builder { + b.base.infobox = infobox + return b +} diff --git a/pkg/layer/decoding/common.go b/pkg/layer/decoding/common.go new file mode 100644 index 000000000..67a0ebd53 --- /dev/null +++ b/pkg/layer/decoding/common.go @@ -0,0 +1,531 @@ +package decoding + +import ( + "errors" + "fmt" + "math" + + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/czml" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/kml" + "github.com/reearth/reearth-backend/pkg/property" +) + +var ( + ErrBadColor = errors.New("bad color") + ErrFieldType = errors.New("incompatible field Type") +) + +var ( + extensions = map[string]id.PluginExtensionID{ + "Point": id.PluginExtensionID("marker"), + "Polygon": id.PluginExtensionID("polygon"), + "Polyline": id.PluginExtensionID("polyline"), + } + propertySchemas = map[string]id.PropertySchemaID{ + "Point": id.MustPropertySchemaID("reearth/marker"), + "Polygon": id.MustPropertySchemaID("reearth/polygon"), + "Polyline": id.MustPropertySchemaID("reearth/polyline"), + } + propertyItems = id.PropertySchemaFieldID("default") + propertyFields = map[string]id.PropertySchemaFieldID{ + "Point": id.PropertySchemaFieldID("location"), + "Polygon": id.PropertySchemaFieldID("polygon"), + "Polyline": id.PropertySchemaFieldID("coordinates"), + } +) + +func rgbaToHex(rgba []int64) (string, error) { + hex := "" + if len(rgba) != 4 { + return "", ErrBadColor + } + for _, i := range rgba { + if i > 255 || i < 0 { + return "", ErrBadColor + } + h := fmt.Sprintf("%x", i) + if len(h) == 1 { + h += "0" + } + hex += h + } + return hex, nil +} + +func rgbafToHex(rgbaf []float64) (string, error) { + var rgba []int64 + if len(rgbaf) != 4 { + return "", ErrBadColor + } + for _, f := range rgbaf { + var i int64 + if f > 1.0 { + return "", ErrBadColor + } else if f == 1.0 { + i = 255 + } else { + i = int64(math.Floor(f * 256)) + } + + rgba = append(rgba, i) + } + return rgbaToHex(rgba) +} + +func MustCreateProperty(t string, v interface{}, sceneID id.SceneID, styleItem interface{}, extension string) *property.Property { + p, err := createProperty(t, v, sceneID, styleItem, extension) + if err != nil { + panic(err) + } + return p +} + +func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem interface{}, extension string) (*property.Property, error) { + propertySchema := propertySchemas[t] + item := propertyItems + field := propertyFields[t] + ps := builtin.GetPropertySchema(propertySchema) + p, err := property. + New(). + NewID(). + Scene(sceneID). + Schema(propertySchema). + Build() + if err != nil { + return nil, err + } + f, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, field), + ) + + switch t { + case "Point": + if pf, ok := v.(property.LatLngHeight); ok { + v2, ok := property.ValueTypeLatLng.ValueFrom(&property.LatLng{Lat: pf.Lat, Lng: pf.Lng}) + if !ok { + return nil, ErrFieldType + } + f.UpdateUnsafe(v2) + + v3, ok := property.ValueTypeNumber.ValueFrom(pf.Height) + if !ok { + return nil, ErrFieldType + } + f2, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "height"), + ) + + f2.UpdateUnsafe(v3) + } else if pf, ok := v.(property.LatLng); ok { + v2, ok := property.ValueTypeLatLng.ValueFrom(&property.LatLng{Lat: pf.Lat, Lng: pf.Lng}) + if !ok { + return nil, ErrFieldType + } + f.UpdateUnsafe(v2) + } + + switch extension { + case "kml": + s, ok := styleItem.(kml.Style) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + if s.IconStyle.Icon != nil && len(s.IconStyle.Icon.Href) > 0 { + imageValue, ok := property.ValueTypeURL.ValueFrom(s.IconStyle.Icon.Href) + if !ok { + return nil, ErrFieldType + } + imageField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "image"), + ) + imageField.UpdateUnsafe(imageValue) + } + if s.IconStyle.Scale != 0 { + scaleValue, ok := property.ValueTypeNumber.ValueFrom(s.IconStyle.Scale) + if !ok { + return nil, ErrFieldType + } + scaleField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "imageSize"), + ) + scaleField.UpdateUnsafe(scaleValue) + } + if len(s.IconStyle.Color) > 0 { + colorValue, ok := property.ValueTypeString.ValueFrom(s.IconStyle.Color) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "pointColor"), + ) + colorField.UpdateUnsafe(colorValue) + } + case "geojson": + s, ok := styleItem.(string) + if !ok { + return nil, ErrFieldType + } + if len(s) > 0 { + colorValue, ok := property.ValueTypeString.ValueFrom(s) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "pointColor"), + ) + colorField.UpdateUnsafe(colorValue) + } + case "czml": + s, ok := styleItem.(*czml.Point) + if !ok { + return nil, ErrFieldType + } + if len(s.Color) > 0 { + colorValue, ok := property.ValueTypeString.ValueFrom(s.Color) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "pointColor"), + ) + colorField.UpdateUnsafe(colorValue) + } + if s.PixelSize != 0 { + sizeValue, ok := property.ValueTypeNumber.ValueFrom(s.PixelSize) + if !ok { + return nil, ErrFieldType + } + sizeField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "pointSize"), + ) + sizeField.UpdateUnsafe(sizeValue) + } + } + case "Polygon": + v2, ok := property.ValueTypePolygon.ValueFrom(v) + if !ok { + return nil, ErrFieldType + } + f.UpdateUnsafe(v2) + + switch extension { + case "kml": + s, ok := styleItem.(kml.Style) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + if s.PolyStyle.Stroke { + stroke, ok := property.ValueTypeBool.ValueFrom(s.PolyStyle.Stroke) + if !ok { + return nil, ErrFieldType + } + strokeField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "stroke"), + ) + strokeField.UpdateUnsafe(stroke) + } + if s.LineStyle.Width != 0 { + width, ok := property.ValueTypeNumber.ValueFrom(s.LineStyle.Width) + if !ok { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + if len(s.LineStyle.Color) > 0 { + color, ok := property.ValueTypeString.ValueFrom(s.LineStyle.Color) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + if s.PolyStyle.Fill { + fill, ok := property.ValueTypeBool.ValueFrom(s.PolyStyle.Fill) + if !ok { + return nil, ErrFieldType + } + fillField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fill"), + ) + fillField.UpdateUnsafe(fill) + } + if len(s.PolyStyle.Color) > 0 { + color, ok := property.ValueTypeString.ValueFrom(s.PolyStyle.Color) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fillColor"), + ) + colorField.UpdateUnsafe(color) + } + + case "czml": + s, ok := styleItem.(*czml.Polygon) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + if s.Stroke { + stroke, ok := property.ValueTypeBool.ValueFrom(s.Stroke) + if !ok { + return nil, ErrFieldType + } + strokeField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "stroke"), + ) + strokeField.UpdateUnsafe(stroke) + } + if s.StrokeWidth != 0 { + width, ok := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) + if !ok { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + if s.StrokeColor != nil { + var colorValue string + var err error + if len(s.StrokeColor.RGBA) > 0 { + colorValue, err = rgbaToHex(s.StrokeColor.RGBA) + if err != nil { + return nil, err + } + } + if len(s.StrokeColor.RGBAF) > 0 { + colorValue, err = rgbafToHex(s.StrokeColor.RGBAF) + if err != nil { + return nil, err + } + } + color, ok := property.ValueTypeString.ValueFrom(colorValue) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + if s.Fill { + fill, ok := property.ValueTypeBool.ValueFrom(s.Fill) + if !ok { + return nil, ErrFieldType + } + fillField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fill"), + ) + fillField.UpdateUnsafe(fill) + } + if s.Material.SolidColor.Color != nil { + var colorValue string + var err error + if len(s.Material.SolidColor.Color.RGBA) > 0 { + colorValue, err = rgbaToHex(s.Material.SolidColor.Color.RGBA) + if err != nil { + return nil, err + } + } + if len(s.Material.SolidColor.Color.RGBAF) > 0 { + colorValue, err = rgbafToHex(s.Material.SolidColor.Color.RGBAF) + if err != nil { + return nil, err + } + } + color, ok := property.ValueTypeString.ValueFrom(colorValue) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fillColor"), + ) + colorField.UpdateUnsafe(color) + } + case "geojson": + s, ok := styleItem.(GeoStyle) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + + if s.StrokeWidth > 0 { + width, ok := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) + if !ok { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + + if len(s.FillColor) > 0 { + fill, ok := property.ValueTypeString.ValueFrom(s.FillColor) + if !ok { + return nil, ErrFieldType + } + fillField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fillColor"), + ) + fillField.UpdateUnsafe(fill) + } + + if len(s.StrokeColor) > 0 { + color, ok := property.ValueTypeString.ValueFrom(s.StrokeColor) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + } + + case "Polyline": + v2, ok := property.ValueTypeCoordinates.ValueFrom(v) + if !ok { + return nil, ErrFieldType + } + f.UpdateUnsafe(v2) + + switch extension { + case "kml": + s, ok := styleItem.(kml.Style) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + + if len(s.LineStyle.Color) > 0 { + color, ok := property.ValueTypeString.ValueFrom(s.LineStyle.Color) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + + if s.LineStyle.Width != 0 { + width, ok := property.ValueTypeNumber.ValueFrom(s.LineStyle.Width) + if !ok { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + case "czml": + s, ok := styleItem.(*czml.Polyline) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + + if s.Width != 0 { + width, ok := property.ValueTypeNumber.ValueFrom(s.Width) + if !ok { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + + if s.Material.PolylineOutline.Color != nil { + var colorValue string + var err error + + if len(s.Material.PolylineOutline.Color.RGBA) > 0 { + colorValue, err = rgbaToHex(s.Material.PolylineOutline.Color.RGBA) + if err != nil { + return nil, err + } + } + + if len(s.Material.PolylineOutline.Color.RGBAF) > 0 { + colorValue, err = rgbafToHex(s.Material.PolylineOutline.Color.RGBAF) + if err != nil { + return nil, err + } + } + + color, ok := property.ValueTypeString.ValueFrom(colorValue) + if !ok { + return nil, ErrFieldType + } + + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + case "geojson": + s, ok := styleItem.(GeoStyle) + if !ok && styleItem != nil { + return nil, ErrFieldType + } + + if s.StrokeWidth > 0 { + width, ok := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) + if !ok { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + + if len(s.StrokeColor) > 0 { + color, ok := property.ValueTypeString.ValueFrom(s.StrokeColor) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + } + } + + return p, nil +} diff --git a/pkg/layer/decoding/common_test.go b/pkg/layer/decoding/common_test.go new file mode 100644 index 000000000..0df6b097c --- /dev/null +++ b/pkg/layer/decoding/common_test.go @@ -0,0 +1,59 @@ +package decoding + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRgbafToHex(t *testing.T) { + testCases := []struct { + name string + rgba []float64 + expected string + err error + }{ + {name: "orange", rgba: []float64{1, 0.6471, 0, 1}, expected: "ffa500ff", err: nil}, + {name: "RGBA length error", rgba: []float64{1, 0.6471, 0, 1, 1}, expected: "", err: ErrBadColor}, + {name: "RGBA greater than 1 error", rgba: []float64{1, 1.6471, 0, 1, 1}, expected: "", err: ErrBadColor}, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res, err := rgbafToHex(tc.rgba) + if tc.err == nil { + assert.NoError(tt, err) + assert.Equal(tt, tc.expected, res) + } else { + assert.True(tt, errors.As(err, &tc.err)) + } + }) + } +} + +func TestRgbaToHex(t *testing.T) { + testCases := []struct { + name string + rgba []int64 + expected string + err error + }{ + {name: "orange", rgba: []int64{255, 165, 0, 255}, expected: "ffa500ff", err: nil}, + {name: "RGBA length error", rgba: []int64{255, 165, 0}, expected: "", err: ErrBadColor}, + {name: "RGBA bad boundaries ", rgba: []int64{400, 165, 0, 1}, expected: "", err: ErrBadColor}, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res, err := rgbaToHex(tc.rgba) + if err == nil { + assert.Equal(tt, tc.expected, res) + } else { + assert.True(tt, errors.As(err, &tc.err)) + } + }) + } +} diff --git a/pkg/layer/decoding/czml.go b/pkg/layer/decoding/czml.go new file mode 100644 index 000000000..b17f266b1 --- /dev/null +++ b/pkg/layer/decoding/czml.go @@ -0,0 +1,174 @@ +package decoding + +import ( + "encoding/json" + "errors" + + "github.com/reearth/reearth-backend/pkg/czml" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +type CZMLDecoder struct { + decoder *json.Decoder + sceneId id.SceneID + groupName string +} + +func NewCZMLDecoder(d *json.Decoder, s id.SceneID) *CZMLDecoder { + return &CZMLDecoder{ + decoder: d, + sceneId: s, + groupName: "", + } +} + +func (d *CZMLDecoder) Decode() (Result, error) { + var features []czml.Feature + var layers layer.Map + lg, err := layer.NewGroup().NewID().Scene(d.sceneId).Build() + if err != nil { + return Result{}, err + } + var properties property.Map + err = d.decoder.Decode(&features) + if err != nil { + return Result{}, errors.New("unable to parse file content") + } + + for _, v := range features { + var li *layer.Item + var p *property.Property + if v.Id == "document" { + d.groupName = v.Name + } + // case Polygon + //ff,_:=v.Polygon.StrokeColor.(map[string][]int64) + if v.Polygon != nil { + li, p, err = d.decodeLayer("Polygon", v.Polygon.Positions.CartographicDegrees, v.Polygon, v.Name) + } + // case Polyline + if v.Polyline != nil { + li, p, err = d.decodeLayer("Polyline", v.Polyline.Positions.CartographicDegrees, v.Polyline, v.Name) + } + // case Point + if v.Point != nil { + li, p, err = d.decodeLayer("Point", v.Position.CartographicDegrees, v.Point, v.Name) + } + if err != nil { + return Result{}, err + } + if li != nil { + var l layer.Layer = li + lg.Layers().AddLayer(l.ID(), -1) + lg.Rename(d.groupName) + layers = layers.Add(&l) + } + if p != nil { + properties = properties.Add(p) + } + } + + return resultFrom(lg, layers, properties) +} + +func (d *CZMLDecoder) decodeLayer(t string, coords []float64, style interface{}, layerName string) (*layer.Item, *property.Property, error) { + var p *property.Property + var l *layer.Item + var ex id.PluginExtensionID + var err error + switch t { + case "Point": + var latlng property.LatLng + var height float64 + latlng = property.LatLng{ + Lng: coords[0], + Lat: coords[1], + } + + if len(coords) > 2 { + height = coords[2] + } + + p, err = createProperty("Point", property.LatLngHeight{ + Lat: latlng.Lat, + Lng: latlng.Lng, + Height: height, + }, d.sceneId, style, "czml") + + if err != nil { + return nil, nil, err + } + + ex = extensions["Point"] + if layerName == "" { + layerName = "Point" + } + case "Polyline": + var crds []property.LatLngHeight + if len(coords)%3 != 0 { + return nil, nil, errors.New("unable to parse coordinates") + } + + for { + crds = append(crds, property.LatLngHeight{Lng: coords[0], Lat: coords[1], Height: coords[2]}) + if len(coords) == 3 { + break + } else { + coords = coords[3:] + } + } + + ex = extensions["Polyline"] + p, err = createProperty("Polyline", crds, d.sceneId, style, "czml") + if err != nil { + return nil, nil, err + } + + if layerName == "" { + layerName = "Polyline" + } + case "Polygon": + var poly [][]property.LatLngHeight + if len(coords)%3 != 0 { + return nil, nil, errors.New("unable to parse coordinates") + } + + for { + var crds []property.LatLngHeight + crds = append(crds, property.LatLngHeight{Lng: coords[0], Lat: coords[1], Height: coords[2]}) + poly = append(poly, crds) + if len(coords) == 3 { + break + } else { + coords = coords[3:] + } + } + + ex = extensions["Polygon"] + p, err = createProperty("Polygon", poly, d.sceneId, style, "czml") + if err != nil { + return nil, nil, err + } + + if layerName == "" { + layerName = "Polygon" + } + } + + l, err = layer. + NewItem(). + NewID(). + Name(layerName). + Scene(d.sceneId). + Property(p.IDRef()). + Extension(&ex). + Plugin(&id.OfficialPluginID). + Build() + if err != nil { + return nil, nil, err + } + + return l, p, nil +} diff --git a/pkg/layer/decoding/czml_test.go b/pkg/layer/decoding/czml_test.go new file mode 100644 index 000000000..ed4a49723 --- /dev/null +++ b/pkg/layer/decoding/czml_test.go @@ -0,0 +1,145 @@ +package decoding + +import ( + "encoding/json" + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Decoder = &CZMLDecoder{} + +const ( + czmlmock = `[{ + "id" : "document", + "name" : "CZML Geometries", + "version" : "1.0" +}, +{ + "id" : "point 1", + "name": "point", + "position" : { + "cartographicDegrees" : [-111.0, 40.0, 0] + }, + "point": { + "color": "red", + "outlineColor": { + "rgba": [255, 0, 0, 255] + }, + "outlineWidth" : 4, + "pixelSize": 20 + } +}, +{ + "id" : "purpleLine", + "name" : "Purple arrow at height", + "polyline" : { + "positions" : { + "cartographicDegrees" : [ + -75, 43, 500000, + -125, 43, 500000 + ] + }, + "material" : { + "polylineOutline" : { + "color" : { + "rgba" : [148, 0, 211, 255] + } + } + }, + "arcType" : "NONE", + "width" : 10 + } +},{ + "id" : "testPoly", + "name" : "Red polygon on surface", + "polygon" : { + "positions" : { + "cartographicDegrees" : [ + -115.0, 37.0, 0, + -115.0, 32.0, 0, + -107.0, 33.0, 0, + -102.0, 31.0, 0, + -102.0, 35.0, 0 + ] + }, + "fill":true, + "outline":true, + "outlineWidth":4, + "material" : { + "solidColor" : { + "color" : { + "rgba" : [4, 190, 32, 144] + } + } + }, + "outlineColor":{ + "rgbaf" : [0.434,0.6,0.8766,0] + } + } +}]` +) + +func TestCZMLDecoder_Decode(t *testing.T) { + r := strings.NewReader(czmlmock) + d := json.NewDecoder(r) + s := id.NewSceneID() + p := NewCZMLDecoder(d, s) + result, err := p.Decode() + assert.NoError(t, err) + assert.Equal(t, 4, len(result.Layers)) + assert.Equal(t, 3, len(result.Properties)) + + // Root layer + rootLayer := result.RootLayers().ToLayerGroupList()[0] + assert.NotNil(t, rootLayer) + assert.Equal(t, 3, rootLayer.Layers().LayerCount()) + + // marker + prop := result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(0)).Property()] + field := propertyFields["Point"] + f, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, field)) + fColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "pointColor")) + fSize, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "pointSize")) + assert.Equal(t, "red", fColor.Value().Value()) + assert.Equal(t, 20.0, fSize.Value().Value()) + assert.Equal(t, f.Value().Value(), property.LatLng{Lng: -111.0, Lat: 40.0}) + + // Polyline + prop = result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(1)).Property()] + field2 := propertyFields["Polyline"] + f2, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, field2)) + plist := property.Coordinates{{Lng: -75, Lat: 43, Height: 500000}, {Lng: -125, Lat: 43, Height: 500000}} + assert.Equal(t, f2.Value().Value(), plist) + strokeColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeColor")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, "9400d3ff", strokeColor.Value().Value()) + strokeWidth, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeWidth")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, 10.0, strokeWidth.Value().Value()) + + // Polygon + prop = result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(2)).Property()] + field3 := propertyFields["Polygon"] + f3, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, field3)) + plist2 := property.Polygon{property.Coordinates{property.LatLngHeight{Lng: -115, Lat: 37, Height: 0}}, property.Coordinates{property.LatLngHeight{Lng: -115, Lat: 32, Height: 0}}, property.Coordinates{property.LatLngHeight{Lng: -107, Lat: 33, Height: 0}}, property.Coordinates{property.LatLngHeight{Lng: -102, Lat: 31, Height: 0}}, property.Coordinates{property.LatLngHeight{Lng: -102, Lat: 35, Height: 0}}} + assert.Equal(t, f3.Value().Value(), plist2) + fill, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "fill")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, true, fill.Value().Value()) + stroke, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "stroke")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, true, stroke.Value().Value()) + fillColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "fillColor")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, "40be2090", fillColor.Value().Value()) + strokeColor2, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeColor")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, "6f99e000", strokeColor2.Value().Value()) + strokeWidth2, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeWidth")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, 4.0, strokeWidth2.Value().Value()) +} diff --git a/pkg/layer/decoding/decoder.go b/pkg/layer/decoding/decoder.go new file mode 100644 index 000000000..641e3baa4 --- /dev/null +++ b/pkg/layer/decoding/decoder.go @@ -0,0 +1,60 @@ +package decoding + +import ( + "fmt" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Decoder interface { + Decode() (Result, error) +} + +type Result struct { + Root *layer.IDList + Layers layer.Map + Properties property.Map +} + +func (r Result) RootLayers() layer.List { + return r.Layers.Pick(r.Root) +} + +func (r Result) Merge(r2 Result) Result { + root := r.Root.Clone() + root.Merge(r2.Root) + return Result{ + Root: root, + Layers: r.Layers.Merge(r2.Layers), + Properties: r.Properties.Merge(r2.Properties), + } +} + +func (r Result) MergeInitializerResult(r2 layer.InitializerResult) Result { + return Result{ + Root: r.Root.Clone().AppendLayers(r2.Root), + Layers: r.Layers.Merge(r2.Layers), + Properties: r.Properties.Merge(r2.Properties), + } +} + +func (r Result) Validate() error { + for _, l := range r.Layers.List().Deref() { + if err := l.ValidateProperties(r.Properties); err != nil { + return fmt.Errorf("layer %s is invalid: %w", l.ID(), err) + } + } + return nil +} + +func resultFrom(lg *layer.Group, layers layer.Map, properties property.Map) (r Result, err error) { + r = Result{ + Root: layer.NewIDList([]id.LayerID{lg.ID()}), + Layers: layers.Add(lg.LayerRef()), + Properties: properties, + } + err = r.Validate() + return +} diff --git a/pkg/layer/decoding/format.go b/pkg/layer/decoding/format.go new file mode 100644 index 000000000..abf4d1caa --- /dev/null +++ b/pkg/layer/decoding/format.go @@ -0,0 +1,11 @@ +package decoding + +type LayerEncodingFormat string + +const ( + LayerEncodingFormatKML LayerEncodingFormat = "kml" + LayerEncodingFormatCZML LayerEncodingFormat = "czml" + LayerEncodingFormatGEOJSON LayerEncodingFormat = "geojson" + LayerEncodingFormatSHAPE LayerEncodingFormat = "shape" + LayerEncodingFormatREEARTH LayerEncodingFormat = "reearth" +) diff --git a/pkg/layer/decoding/geojson.go b/pkg/layer/decoding/geojson.go new file mode 100644 index 000000000..b7f9e34ed --- /dev/null +++ b/pkg/layer/decoding/geojson.go @@ -0,0 +1,226 @@ +package decoding + +import ( + "errors" + "io" + + geojson "github.com/paulmach/go.geojson" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +type GeoStyle struct { + StrokeColor string `json:"stroke"` + StrokeWidth float64 `json:"stroke-width"` + FillColor string `json:"fill"` +} +type GeoJSONDecoder struct { + reader io.Reader + features []*geojson.Feature + sceneId id.SceneID + groupName string +} + +func NewGeoJSONDecoder(r io.Reader, s id.SceneID) *GeoJSONDecoder { + return &GeoJSONDecoder{ + reader: r, + sceneId: s, + groupName: "", + } +} + +func (d *GeoJSONDecoder) Decode() (Result, error) { + lg, err := layer.NewGroup().NewID().Scene(d.sceneId).Name("GeoJSON").Build() + if err != nil { + return Result{}, err + } + + con, err := io.ReadAll(d.reader) + if err != nil { + return Result{}, errors.New("unable to parse file content") + } + fc, err := geojson.UnmarshalFeatureCollection(con) + + if err != nil { + return Result{}, errors.New("unable to parse file content") + } + + // if feature collection > append it to features list, else try to decode a single feature (layer) + if len(fc.Features) > 0 { + d.features = fc.Features + } else { + f, err := geojson.UnmarshalFeature(con) + if err != nil { + return Result{}, errors.New("unable to parse file content") + } + d.features = append(d.features, f) + } + + var layers layer.Map + var properties property.Map + for range d.features { + li, p, err := d.decodeLayer() + if errors.Is(err, io.EOF) { + return resultFrom(lg, layers, properties) + } + if err != nil { + return Result{}, err + } + + if li != nil { + var l layer.Layer = li + lg.Layers().AddLayer(l.ID(), -1) + layers = layers.Add(&l) + } + + if p != nil { + properties = properties.Add(p) + } + } + + return resultFrom(lg, layers, properties) +} + +func (d *GeoJSONDecoder) decodeLayer() (*layer.Item, *property.Property, error) { + var feat *geojson.Feature + var p *property.Property + var l *layer.Item + var ex id.PluginExtensionID + var err error + var stroke, fillColor string + var strokeWidth float64 + var ok bool + var layerName string + + if len(d.features) > 0 { + feat, d.features = d.features[0], d.features[1:] + } else { + return nil, nil, io.EOF + } + + switch feat.Geometry.Type { + case "Point": + var latlng property.LatLng + var height float64 + if len(feat.Geometry.Point) > 2 { + height = feat.Geometry.Point[2] + } + latlng = property.LatLng{ + Lat: feat.Geometry.Point[1], + Lng: feat.Geometry.Point[0], + } + + p, err = createProperty("Point", property.LatLngHeight{ + Lat: latlng.Lat, + Lng: latlng.Lng, + Height: height, + }, d.sceneId, feat.Properties["marker-color"], "geojson") + if err != nil { + return nil, nil, err + } + ex = extensions["Point"] + + layerName = "Point" + case "LineString": + var coords []property.LatLngHeight + for _, c := range feat.Geometry.LineString { + var height float64 + if len(c) == 2 { + height = 0 + } else if len(c) == 3 { + height = c[3] + } else { + return nil, nil, errors.New("unable to parse coordinates") + } + coords = append(coords, property.LatLngHeight{Lat: c[1], Lng: c[0], Height: height}) + } + + if feat.Properties["stroke"] != nil { + stroke, ok = feat.Properties["stroke"].(string) + if !ok { + return nil, nil, errors.New("unable to parse") + } + } + if feat.Properties["stroke-width"] != nil { + + strokeWidth, ok = feat.Properties["stroke-width"].(float64) + if !ok { + return nil, nil, errors.New("unable to parse") + } + } + ex = extensions["Polyline"] + p, err = createProperty("Polyline", coords, d.sceneId, GeoStyle{StrokeColor: stroke, StrokeWidth: strokeWidth}, "geojson") + if err != nil { + return nil, nil, err + } + + layerName = "Polyline" + case "Polygon": + var poly [][]property.LatLngHeight + for _, r := range feat.Geometry.Polygon { + var coords []property.LatLngHeight + for _, c := range r { + var height float64 + if len(c) == 2 { + height = 0 + } else if len(c) == 3 { + height = c[3] + } else { + return nil, nil, errors.New("unable to parse coordinates") + } + coords = append(coords, property.LatLngHeight{Lat: c[1], Lng: c[0], Height: height}) + } + poly = append(poly, coords) + } + + ex = extensions["Polygon"] + if feat.Properties["stroke"] != nil { + + stroke, ok = feat.Properties["stroke"].(string) + if !ok { + return nil, nil, errors.New("unable to parse") + } + } + + if feat.Properties["stroke-width"] != nil { + strokeWidth, ok = feat.Properties["stroke-width"].(float64) + if !ok { + return nil, nil, errors.New("unable to parse") + } + } + + if feat.Properties["stroke-width"] != nil { + fillColor, ok = feat.Properties["fill"].(string) + if !ok { + return nil, nil, errors.New("unable to parse") + } + } + + p, err = createProperty("Polygon", poly, d.sceneId, GeoStyle{StrokeColor: stroke, StrokeWidth: strokeWidth, FillColor: fillColor}, "geojson") + if err != nil { + return nil, nil, err + } + + layerName = "Polygon" + } + + if feat.Properties["name"] != nil { + // name is not required, so no need to return error if name is not decoded + layerName, _ = feat.Properties["name"].(string) + } + + l, err = layer. + NewItem(). + NewID(). + Name(layerName). + Scene(d.sceneId). + Property(p.IDRef()). + Extension(&ex). + Plugin(&id.OfficialPluginID). + Build() + if err != nil { + return nil, nil, err + } + return l, p, nil +} diff --git a/pkg/layer/decoding/geojson_test.go b/pkg/layer/decoding/geojson_test.go new file mode 100644 index 000000000..62992ccfd --- /dev/null +++ b/pkg/layer/decoding/geojson_test.go @@ -0,0 +1,129 @@ +package decoding + +import ( + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Decoder = &GeoJSONDecoder{} + +const geojsonmock = `{ + "type": "FeatureCollection", + "crs": { + "type": "name", + "properties": { + "name": "EPSG:3857" + } + }, + "features": [ + { + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": [102.0, 0.5] + }, + "properties": { + "marker-color": "red" + } + }, + { + "type": "Feature", + "geometry": { + "type": "LineString", + "coordinates": [ + [102.0, 0.0], [103.0, 1.0], [104.0, 0.0] + ] + }, + "properties": { + "stroke": "#b55e5e", + "stroke-width": 1.6, + "prop0": "value0", + "prop1": 0.0 + } + }, + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], + [100.0, 1.0], [100.0, 0.0] + ] + ] + }, + "properties": { + "stroke": "#ffffff", + "stroke-width": 2, + "stroke-opacity": 1, + "fill": "#7c3b3b", + "fill-opacity": 0.5, + "prop0": "value0", + "prop1": { "this": "that" } + } + } + ] +}` + +func TestGeoJSONDecoder_Decode(t *testing.T) { + r := strings.NewReader(geojsonmock) + s := id.NewSceneID() + p := NewGeoJSONDecoder(r, s) + result, err := p.Decode() + assert.NoError(t, err) + assert.Equal(t, 4, len(result.Layers)) + assert.Equal(t, 3, len(result.Properties)) + + // Root layer + rootLayer := result.RootLayers().ToLayerGroupList()[0] + assert.NotNil(t, rootLayer) + assert.Equal(t, 3, rootLayer.Layers().LayerCount()) + + // marker + prop := result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(0)).Property()] + items := prop.Items() + assert.NotEqual(t, 0, len(items)) + item := propertyItems + field := propertyFields["Point"] + f, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, field)) + fColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "pointColor")) + assert.Equal(t, "red", fColor.Value().Value()) + assert.Equal(t, f.Value().Value(), property.LatLng{Lng: 102.0, Lat: 0.5}) + + // Polyline + prop = result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(1)).Property()] + items2 := prop.Items() + assert.NotEqual(t, 0, len(items2)) + field2 := propertyFields["Polyline"] + f2, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, field2)) + plist := property.Coordinates{{Lng: 102.0, Lat: 0.0, Height: 0}, {Lng: 103.0, Lat: 1.0, Height: 0}, {Lng: 104.0, Lat: 0.0, Height: 0}} + assert.Equal(t, f2.Value().Value(), plist) + strokeColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeColor")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, "#b55e5e", strokeColor.Value().Value()) + strokeWidth, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeWidth")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, 1.6, strokeWidth.Value().Value()) + + // Polygon + prop = result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(2)).Property()] + items3 := prop.Items() + assert.NotEqual(t, 0, len(items3)) + field3 := propertyFields["Polygon"] + f3, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, field3)) + plist2 := property.Polygon{property.Coordinates{property.LatLngHeight{Lng: 100, Lat: 0, Height: 0}, property.LatLngHeight{Lng: 101, Lat: 0, Height: 0}, property.LatLngHeight{Lng: 101, Lat: 1, Height: 0}, property.LatLngHeight{Lng: 100, Lat: 1, Height: 0}, property.LatLngHeight{Lng: 100, Lat: 0, Height: 0}}} + assert.Equal(t, f3.Value().Value(), plist2) + fillColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "fillColor")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, "#7c3b3b", fillColor.Value().Value()) + strokeColor2, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeColor")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, "#ffffff", strokeColor2.Value().Value()) + strokeWidth2, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeWidth")) + assert.Equal(t, plist, f2.Value().Value()) + assert.Equal(t, 2.0, strokeWidth2.Value().Value()) +} diff --git a/pkg/layer/decoding/kml.go b/pkg/layer/decoding/kml.go new file mode 100644 index 000000000..8ccb4218e --- /dev/null +++ b/pkg/layer/decoding/kml.go @@ -0,0 +1,300 @@ +package decoding + +import ( + "encoding/xml" + "errors" + "io" + "regexp" + "strconv" + "strings" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/kml" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +type KMLDecoder struct { + decoder *xml.Decoder + sceneId id.SceneID + styles map[string]kml.Style +} + +func NewKMLDecoder(d *xml.Decoder, s id.SceneID) *KMLDecoder { + return &KMLDecoder{ + decoder: d, + sceneId: s, + styles: make(map[string]kml.Style), + } +} + +func coordinatesToLatLngHeight(c string) (*property.LatLng, float64, error) { + parts := strings.Split(strings.TrimSpace(c), ",") + lng, err := strconv.ParseFloat(parts[0], 64) + if err != nil { + return nil, 0, err + } + lat, err := strconv.ParseFloat(parts[1], 64) + if err != nil { + return nil, 0, err + } + LatLng := property.LatLng{Lat: lat, Lng: lng} + var height float64 + if len(parts) > 2 { + height, err = strconv.ParseFloat(parts[2], 64) + if err != nil { + return nil, 0, err + } + } + return &LatLng, height, nil +} + +func coordinatesToLatLngHeightList(c string) ([]property.LatLngHeight, error) { + var LatLngHeighList []property.LatLngHeight + coords := strings.Split(c, "\n") + for _, llh := range coords { + reg, err := regexp.Compile(`\s+`) + if err != nil { + return nil, err + } + processed := reg.ReplaceAllString(llh, "") + parts := strings.Split(processed, ",") + lng, err := strconv.ParseFloat(parts[0], 64) + if err != nil { + return nil, err + } + lat, err := strconv.ParseFloat(parts[1], 64) + if err != nil { + return nil, err + } + heigh, err := strconv.ParseFloat(parts[2], 64) + if err != nil { + return nil, err + } + LatLngHeigh := property.LatLngHeight{Lat: lat, Lng: lng, Height: heigh} + LatLngHeighList = append(LatLngHeighList, LatLngHeigh) + } + + return LatLngHeighList, nil +} + +func getPolygon(p *kml.Polygon) ([][]property.LatLngHeight, error) { + var pol [][]property.LatLngHeight + outer, err := coordinatesToLatLngHeightList(p.OuterBoundaryIs.LinearRing.Coordinates) + if err != nil { + return nil, err + } + pol = append(pol, outer) + if len(p.InnerBoundaryIs) > 0 { + for _, ib := range p.InnerBoundaryIs { + coords, err := coordinatesToLatLngHeightList(ib.LinearRing.Coordinates) + if err != nil { + return nil, err + } + pol = append(pol, coords) + } + } + return pol, nil +} + +func (d *KMLDecoder) parseKML() (interface{}, error) { + for { + token, err := d.decoder.Token() + if errors.Is(err, io.EOF) || token == nil { + return nil, io.EOF + } + if err != nil { + return nil, err + } + switch startElement := token.(type) { + case xml.StartElement: + st := startElement.Name.Local + switch st { + case "Document", "Folder": + var c kml.Collection + err := d.decoder.DecodeElement(&c, &startElement) + if err != nil { + return nil, err + } + return c, nil + case "Placemark": + var p kml.Placemark + err := d.decoder.DecodeElement(&p, &startElement) + if err != nil { + return nil, err + } + return p, nil + } + } + } +} + +func (d *KMLDecoder) decodeCollection(c kml.Collection, depth int) (*layer.Group, layer.Map, property.Map, error) { + var ll layer.Map + var pl property.Map + lg, err := layer.NewGroup().NewID().Name(c.Name).Scene(d.sceneId).Build() + if err != nil { + return nil, nil, nil, err + } + + if len(c.Styles) > 0 { + for _, s := range c.Styles { + d.styles[s.Id] = s + } + } + + for _, f := range c.Folders { + flg, flil, fpl, err := d.decodeCollection(f, depth+1) + if err != nil { + return nil, nil, nil, err + } + + if depth >= 4 { + lg.Layers().AppendLayers(flg.Layers().Layers()...) + } else { + lg.Layers().AppendLayers(flg.ID()) + ll = ll.Add(flg.LayerRef()) + } + + ll = ll.Merge(flil) + pl = pl.Merge(fpl) + } + + for _, p := range c.Placemarks { + pli, pp, err := d.decodePlacemark(p) + if err != nil { + return nil, nil, nil, err + } + lg.Layers().AppendLayers(pli.ID()) + var l layer.Layer = pli + ll = ll.Add(&l) + pl = pl.Add(pp) + } + + return lg, ll, pl, nil +} + +func (d *KMLDecoder) decodePlacemark(p kml.Placemark) (*layer.Item, *property.Property, error) { + var layerItem *layer.Item + var prop *property.Property + var ex id.PluginExtensionID + var styleId string + var layerName string + + if len(p.StyleUrl) > 0 { + styleId = p.StyleUrl[1:] + } + + if len(p.Point.Coordinates) > 0 { + latlng, height, err := coordinatesToLatLngHeight(p.Point.Coordinates) + if err != nil { + return nil, nil, err + } + prop, err = createProperty("Point", property.LatLngHeight{ + Lat: latlng.Lat, + Lng: latlng.Lng, + Height: height, + }, d.sceneId, d.styles[styleId], "kml") + if err != nil { + return nil, nil, err + } + ex = extensions["Point"] + layerName = "Point" + } else if len(p.Polygon.OuterBoundaryIs.LinearRing.Coordinates) > 0 { + coordslist, err := getPolygon(&p.Polygon) + if err != nil { + return nil, nil, err + } + ex = extensions["Polygon"] + layerName = "Polygon" + prop, err = createProperty("Polygon", coordslist, d.sceneId, d.styles[styleId], "kml") + if err != nil { + return nil, nil, err + } + } else if len(p.Polyline.Coordinates) > 0 { + coords, err := coordinatesToLatLngHeightList(p.Polyline.Coordinates) + if err != nil { + return nil, nil, err + } + ex = extensions["Polyline"] + layerName = "Polyline" + prop, err = createProperty("Polyline", coords, d.sceneId, d.styles[styleId], "kml") + if err != nil { + return nil, nil, err + } + } else { + var err error + prop, err = createProperty("Point", nil, d.sceneId, d.styles[styleId], "kml") + if err != nil { + return nil, nil, err + } + ex = extensions["Point"] + layerName = "Point" + } + + if len(p.Name) > 0 { + layerName = p.Name + } + + layerItem, err := layer. + NewItem(). + NewID(). + Name(layerName). + Scene(d.sceneId). + Property(prop.IDRef()). + Extension(&ex). + Plugin(&id.OfficialPluginID). + Build() + if err != nil { + return nil, nil, err + } + + return layerItem, prop, nil +} + +func (d *KMLDecoder) Decode() (Result, error) { + var ll layer.Map + var lg *layer.Group + var pl property.Map + + for { + parsed, err := d.parseKML() + if errors.Is(err, io.EOF) { + break + } + if err != nil { + return Result{}, err + } + + switch p := parsed.(type) { + case kml.Collection: + lg, ll, pl, err = d.decodeCollection(p, 0) + if err != nil { + return Result{}, err + } + case kml.Placemark: + if lg == nil { + lg, err = layer.NewGroup().NewID().Scene(d.sceneId).Name("KML").Build() + if err != nil { + return Result{}, err + } + } + + li, pp, err := d.decodePlacemark(p) + if err != nil { + return Result{}, err + } + + if li != nil { + lg.Layers().AddLayer(li.ID(), -1) + ll = ll.Add(li.LayerRef()) + } + + if pp != nil { + pl = pl.Add(pp) + } + } + } + + return resultFrom(lg, ll, pl) +} diff --git a/pkg/layer/decoding/kml_test.go b/pkg/layer/decoding/kml_test.go new file mode 100644 index 000000000..62be12afa --- /dev/null +++ b/pkg/layer/decoding/kml_test.go @@ -0,0 +1,754 @@ +package decoding + +import ( + "encoding/xml" + "errors" + "io" + "net/url" + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/kml" + "github.com/reearth/reearth-backend/pkg/layer" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Decoder = &KMLDecoder{} + +const kmlmock = ` + + + + + + + + CZML Geometries + + -122.0822035425683,37.42228990140251,43 + + #CZMLGeometries_01e65 + + + #examplePolyStyle + + + + -77.05788457660967,38.87253259892824,100 + -77.05465973756702,38.87291016281703,100 + -77.05315536854791,38.87053267794386,100 + + + + + + #exampleLineStyle + + 1 + -112.0814237830345,36.10677870477137,0 + -112.0870267752693,36.0905099328766,0 + + + + + +` + +func TestNewKMLDecoder(t *testing.T) { + d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(``)), id.NewSceneID()) + assert.NotNil(t, d) +} + +func TestKMLDecoder_Decode(t *testing.T) { + r := strings.NewReader(kmlmock) + d := xml.NewDecoder(r) + s := id.NewSceneID() + k := NewKMLDecoder(d, s) + + result, err := k.Decode() + assert.NoError(t, err) + assert.Equal(t, 6, len(result.Layers)) + assert.Equal(t, 4, len(result.Properties)) + + // Root layer + rootLayer := result.RootLayers().ToLayerGroupList()[0] + assert.NotNil(t, rootLayer) + assert.Equal(t, 1, rootLayer.Layers().LayerCount()) + + // Folder + folder := result.Layers.Group(rootLayer.Layers().LayerAt(0)) + assert.NotNil(t, folder) + assert.Equal(t, 4, folder.Layers().LayerCount()) + + // Marker Test + l := result.Layers.Layer(folder.Layers().LayerAt(0)) + prop := result.Properties[*l.Property()] + fPoint, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, propertyFields["Point"])) + assert.Equal(t, property.LatLng{Lng: -122.0822035425683, Lat: 37.42228990140251}, fPoint.Value().Value()) + fColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "pointColor")) + assert.Equal(t, "ff00ff7f", fColor.Value().Value()) + fSize, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "imageSize")) + assert.Equal(t, 4.0, fSize.Value().Value()) + fImage, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "image")) + actUrl, _ := url.Parse("http://maps.google.com/mapfiles/kml/pal3/icon19.png") + assert.Equal(t, actUrl, fImage.Value().Value()) + fh, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, id.PropertySchemaFieldID("height"))) + assert.Equal(t, 43.0, fh.Value().Value()) + + // Polygon test + l = result.Layers.Layer(folder.Layers().LayerAt(1)) + prop = result.Properties[*l.Property()] + polygon, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, propertyFields["Polygon"])) + assert.Equal(t, property.Polygon{{ + {Lng: -77.05788457660967, Lat: 38.87253259892824, Height: 100}, + {Lng: -77.05465973756702, Lat: 38.87291016281703, Height: 100}, + {Lng: -77.0531553685479, Lat: 38.87053267794386, Height: 100}, + }}, polygon.Value().Value()) + fill, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "fill")) + assert.Equal(t, true, fill.Value().Value()) + stroke, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "stroke")) + assert.Equal(t, true, stroke.Value().Value()) + fillColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "fillColor")) + assert.Equal(t, "FF0000", fillColor.Value().Value()) + strokeColor, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeColor")) + assert.Equal(t, "4DFF0000", strokeColor.Value().Value()) + strokeWidth, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeWidth")) + assert.Equal(t, 4.0, strokeWidth.Value().Value()) + + // Polyline test + l = result.Layers.Layer(folder.Layers().LayerAt(2)) + prop = result.Properties[*l.Property()] + polyline, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, propertyFields["Polyline"])) + assert.Equal(t, property.Coordinates{ + {Lng: -112.0814237830345, Lat: 36.10677870477137, Height: 0}, + {Lng: -112.0870267752693, Lat: 36.0905099328766, Height: 0}, + }, polyline.Value().Value()) + strokeColor2, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeColor")) + assert.Equal(t, "4DFF0000", strokeColor2.Value().Value()) + strokeWidth2, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "strokeWidth")) + assert.Equal(t, 4.0, strokeWidth2.Value().Value()) + + // Empty test + l = result.Layers.Layer(folder.Layers().LayerAt(3)) + prop = result.Properties[*l.Property()] + assert.Equal(t, propertySchemas["Point"], prop.Schema()) + point, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, propertyFields["Point"])) + assert.Nil(t, point.Value().Value()) +} + +//func TestKMLCoordinatesToLatLng(t *testing.T) { +// testCases := []struct { +// name, cords string +// expectedLatLng *property.LatLng +// expectedHeight float64 +// err error +// }{ +// { +// name: "Valid LatLng", cords: "-122.0822035425683,37.42228990140251,43", +// expectedLatLng: &property.LatLng{ +// Lng: -122.0822035425683, +// Lat: 37.42228990140251, +// }, +// expectedHeight: 43, +// err: nil, +// }, +// { +// name: "Failed to parse Lat", cords: "-122.0822035425683,xxx,43", +// expectedLatLng: nil, +// expectedHeight: 0, +// err: strconv.ErrSyntax, +// }, +// { +// name: "Failed to parse Lng", cords: "xxx,-122.0822035425683,43", +// expectedLatLng: nil, +// expectedHeight: 0, +// err: strconv.ErrSyntax, +// }, +// { +// name: "Failed to parse Height", cords: "-122.0822035425683,43,xxx", +// expectedLatLng: nil, +// expectedHeight: 0, +// err: strconv.ErrSyntax, +// }, +// } +// for _, tc := range testCases { +// tc := tc +// t.Run(tc.name, func(tt *testing.T) { +// tt.Parallel() +// ll, h, err := coordinatesToLatLngHeight(tc.cords) +// if tc.err == nil { +// assert.True(tt, reflect.DeepEqual(ll, tc.expectedLatLng)) +// assert.Equal(tt, tc.expectedHeight, h) +// } else { +// assert.True(tt, errors.As(err, &tc.err)) +// } +// }) +// } +//} +// +//func TestKMLCoordinatesToLatLngList(t *testing.T) { +// testCases := []struct { +// name, cords string +// expected []property.LatLngHeight +// err error +// }{ +// { +// name: "Valid Cords", cords: ` -112.0814237830345,36.10677870477137,0 +// -112.0870267752693,36.0905099328766,0 `, +// expected: []property.LatLngHeight{ +// { +// Lat: 36.10677870477137, +// Lng: -112.0814237830345, +// Height: 0, +// }, +// { +// Lat: 36.0905099328766, +// Lng: -112.0870267752693, +// Height: 0, +// }, +// }, +// err: nil, +// }, +// { +// name: "Failed to parse Lng", cords: ` xxx,36.10677870477137,0 +// -112.0870267752693,36.0905099328766,0 `, +// expected: nil, +// err: strconv.ErrSyntax, +// }, +// { +// name: "Failed to parse Lat", cords: ` -112.0814237830345,xxx,0 +// -112.0870267752693,36.0905099328766,0 `, +// expected: nil, +// err: strconv.ErrSyntax, +// }, +// { +// name: "Failed to parse Height", cords: ` -112.0814237830345,36.10677870477137,xxx +// -112.0870267752693,36.0905099328766,0 `, +// expected: nil, +// err: strconv.ErrSyntax, +// }, +// } +// for _, tc := range testCases { +// tc := tc +// t.Run(tc.name, func(tt *testing.T) { +// tt.Parallel() +// res, err := coordinatesToLatLngHeightList(tc.cords) +// if tc.err == nil { +// assert.True(tt, reflect.DeepEqual(res, tc.expected)) +// } else { +// assert.True(tt, errors.As(err, &tc.err)) +// } +// }) +// } +//} +//func TestKMLGetPolygon(t *testing.T) { +// cl1 := []property.LatLngHeight{ +// { +// Lng: 36, +// Lat: -112, +// Height: 0, +// }, +// { +// Lng: 34, +// Lat: -112, +// Height: 0, +// }, +// { +// Lng: 35, +// Lat: -111, +// Height: 0, +// }, +// } +// cl2 := []property.LatLngHeight{ +// { +// Lng: 35, +// Lat: -111, +// Height: 10, +// }, +// { +// Lng: 32, +// Lat: -109, +// Height: 10, +// }, +// { +// Lng: 34, +// Lat: -119, +// Height: 10, +// }, +// } +// expected := [][]property.LatLngHeight{cl1, cl2} +// testCases := []struct { +// name string +// polygon *kml.Polygon +// expected [][]property.LatLngHeight +// err error +// }{ +// { +// name: "Valid Polygon", +// polygon: &kml.Polygon{ +// OuterBoundaryIs: kml.BoundaryIs{ +// LinearRing: kml.LinearRing{ +// Coordinates: ` 36,-112,0 +// 34,-112,0 +// 35,-111,0`, +// }, +// }, +// InnerBoundaryIs: []kml.BoundaryIs{ +// { +// LinearRing: kml.LinearRing{ +// Coordinates: ` 35,-111,10 +// 32,-109,10 +// 34,-119,10 `, +// }, +// }, +// }, +// }, +// expected: expected, +// err: nil, +// }, +// { +// name: "Failed to parse Outer", +// polygon: &kml.Polygon{ +// OuterBoundaryIs: kml.BoundaryIs{ +// LinearRing: kml.LinearRing{ +// Coordinates: ` xxx,-112,0 +// 34,-112,0 +// 35,-111,0`, +// }, +// }, +// InnerBoundaryIs: []kml.BoundaryIs{ +// { +// LinearRing: kml.LinearRing{ +// Coordinates: ` 35,-111,10 +// 32,-109,10 +// 34,-119,10 `, +// }, +// }, +// }, +// }, +// expected: nil, +// err: strconv.ErrSyntax, +// }, +// { +// name: "Failed to parse Inner", +// polygon: &kml.Polygon{ +// OuterBoundaryIs: kml.BoundaryIs{ +// LinearRing: kml.LinearRing{ +// Coordinates: ` 36,-112,0 +// 34,-112,0 +// 35,-111,0`, +// }, +// }, +// InnerBoundaryIs: []kml.BoundaryIs{ +// { +// LinearRing: kml.LinearRing{ +// Coordinates: ` xxx,-111,10 +// 32,-109,10 +// 34,-119,10 `, +// }, +// }, +// }, +// }, +// +// expected: nil, +// err: strconv.ErrSyntax, +// }, +// } +// for _, tc := range testCases { +// tc := tc +// t.Run(tc.name, func(tt *testing.T) { +// tt.Parallel() +// res, err := getPolygon(tc.polygon) +// if tc.err == nil { +// assert.True(tt, reflect.DeepEqual(res, tc.expected)) +// } else { +// assert.True(tt, errors.As(err, &tc.err)) +// } +// }) +// } +//} +// +func TestKMLparseKML(t *testing.T) { + s := id.NewSceneID() + + testCases := []struct { + name, KMLstr string + expected interface{} + err error + }{ + { + name: "parse document", + KMLstr: ` + + + test_doc + + `, + expected: kml.Collection{ + Folders: nil, + Placemarks: nil, + Styles: nil, + Name: "test_doc", + }, + err: io.EOF, + }, + { + name: "parse folder", + KMLstr: ` + + + test_fol + + `, + expected: kml.Collection{ + Folders: nil, + Placemarks: nil, + Styles: nil, + Name: "test_fol", + }, + err: io.EOF, + }, + { + name: "parse placemark", + KMLstr: ` + + + test_place + + `, + expected: kml.Placemark{ + Point: kml.Point{Coordinates: ""}, + Polygon: kml.Polygon{ + OuterBoundaryIs: kml.BoundaryIs{ + LinearRing: kml.LinearRing{Coordinates: ""}, + }, + InnerBoundaryIs: []kml.BoundaryIs(nil)}, + Polyline: kml.LineString{Coordinates: ""}, + Name: "test_place", + StyleUrl: "", + }, + err: io.EOF, + }, + { + name: "err parse token", + KMLstr: ` + + + test_place + + + `, + expected: nil, + err: errors.New("XML syntax error on line 5: element closed by "), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(tc.KMLstr)), s) + for { + res, err := d.parseKML() + if res != nil { + assert.Equal(tt, tc.expected, res) + break + } else { + assert.Equal(tt, tc.err.Error(), err.Error()) + break + } + } + }) + } +} +func TestKMLdecodePlacemark(t *testing.T) { + s := id.NewSceneID() + point := MustCreateProperty("Point", property.LatLngHeight{ + Lat: 23, + Lng: 40, + Height: 0, + }, s, nil, "kml") + polyline := MustCreateProperty("Polyline", property.Coordinates{ + property.LatLngHeight{ + Lat: 23, + Lng: 40, + Height: 0, + }, + property.LatLngHeight{ + Lat: 66, + Lng: 34, + Height: 10, + }, + }, s, nil, "kml") + polygon := MustCreateProperty("Polygon", []property.Coordinates{{ + property.LatLngHeight{ + Lat: 23, + Lng: 40, + Height: 0, + }, + property.LatLngHeight{ + Lat: 66, + Lng: 34, + Height: 10, + }, + property.LatLngHeight{ + Lat: 50, + Lng: 12, + Height: 3, + }, + }, + }, s, nil, "kml") + pointExt := extensions["Point"] + polylineExt := extensions["Polyline"] + polygonExt := extensions["Polygon"] + testCases := []struct { + name, pt string + placemark kml.Placemark + expectedLayer *layer.Item + expectedProperty *property.Property + err error + }{ + { + name: "parse point", + pt: "Point", + placemark: kml.Placemark{ + Point: kml.Point{ + Coordinates: "40,23,0", + }, + Polygon: kml.Polygon{}, + Polyline: kml.LineString{}, + Name: "", + StyleUrl: "", + }, + expectedLayer: layer. + NewItem(). + NewID(). + Name("Point"). + Scene(s). + Property(point.IDRef()). + Extension(&pointExt). + Plugin(&id.OfficialPluginID). + MustBuild(), + expectedProperty: point, + err: nil, + }, + { + name: "parse polyline", + pt: "Polyline", + placemark: kml.Placemark{ + Point: kml.Point{}, + Polygon: kml.Polygon{}, + Polyline: kml.LineString{ + Coordinates: `40,23,0 + 34,66,10`}, + Name: "", + StyleUrl: "", + }, + expectedLayer: layer. + NewItem(). + NewID(). + Name("Polyline"). + Scene(s). + Property(polyline.IDRef()). + Extension(&polylineExt). + Plugin(&id.OfficialPluginID). + MustBuild(), + expectedProperty: polyline, + err: nil, + }, + { + name: "parse polygon", + pt: "Polygon", + placemark: kml.Placemark{ + Point: kml.Point{}, + Polygon: kml.Polygon{ + OuterBoundaryIs: kml.BoundaryIs{ + LinearRing: kml.LinearRing{ + Coordinates: `40,23,0 + 34,66,10 + 12,50,3`, + }, + }, + InnerBoundaryIs: nil, + }, + Polyline: kml.LineString{}, + Name: "", + StyleUrl: "", + }, + expectedLayer: layer. + NewItem(). + NewID(). + Name("Polygon"). + Scene(s). + Property(polygon.IDRef()). + Extension(&polygonExt). + Plugin(&id.OfficialPluginID). + MustBuild(), + expectedProperty: polygon, + err: nil, + }, + { + name: "parse other", + pt: "Point", + placemark: kml.Placemark{ + Point: kml.Point{}, + Polygon: kml.Polygon{}, + Polyline: kml.LineString{}, + Name: "", + StyleUrl: "", + }, + expectedLayer: layer. + NewItem(). + NewID(). + Name("Point"). + Scene(s). + Extension(&pointExt). + Plugin(&id.OfficialPluginID). + MustBuild(), + expectedProperty: nil, + err: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(``)), s) + l, p, err := d.decodePlacemark(tc.placemark) + if err == nil { + assert.NotNil(tt, l) + assert.NotNil(tt, p) + assert.Equal(tt, l.Name(), tc.expectedLayer.Name()) + ps := builtin.GetPropertySchema(propertySchemas[tc.pt]) + fa, _, _, _ := p.GetOrCreateField(ps, property.PointFieldBySchemaGroup(propertyItems, propertyFields[tc.pt])) + fe, _, _, _ := tc.expectedProperty.GetOrCreateField(ps, property.PointFieldBySchemaGroup(propertyItems, propertyFields[tc.pt])) + assert.Equal(tt, fe.Value(), fa.Value()) + } else { + assert.True(tt, errors.As(err, &tc.err)) + } + }) + } +} + +// @todo not finished yet +//func TestKMLdecodeCollection(t *testing.T) { +// // @todo err and style cases +// s := id.NewSceneID() +// pointExt := extensions["Point"] +// point := MustCreateProperty("Point", property.LatLngHeight{ +// Lat: 39, +// Lng: 20, +// Height: 4, +// }, s, nil, "kml") +// li := layer. +// NewItem(). +// NewID(). +// Name("test_placemark"). +// Scene(s). +// Property(point.IDRef()). +// Extension(&pointExt). +// Plugin(&id.OfficialPluginID). +// MustBuild() +// var ll layer.Layer = li +// testCases := []struct { +// name string +// collection *kml.Collection +// expectedLayers []*layer.Layer +// expectedProperties []*property.Property +// expectedGroupLayer *layer.Group +// err error +// }{ +// { +// name: "Folders", +// collection: &kml.Collection{ +// Folders: []kml.Collection{}, +// Placemarks: nil, +// Styles: nil, +// Name: "test_folder", +// }, +// expectedLayers: nil, +// expectedProperties: nil, +// expectedGroupLayer: layer.NewGroup().NewID().Name("test_folder").MustBuild(), +// err: nil, +// }, +// { +// name: "Placemarks", +// collection: &kml.Collection{ +// Folders: nil, +// Placemarks: []kml.Placemark{ +// { +// Point: kml.Point{Coordinates: `20,39,4`}, +// Name: "test_placemark", +// StyleUrl: "", +// }, +// }, +// Styles: nil, +// Name: "test_placemark_group", +// }, +// expectedGroupLayer: layer.NewGroup().NewID().Name("test_placemark_group").MustBuild(), +// expectedLayers: []*layer.Layer{&ll}, +// expectedProperties: []*property.Property{point}, +// err: nil, +// }, +// } +// for _, tc := range testCases { +// tc := tc +// t.Run(tc.name, func(tt *testing.T) { +// d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(``)), s) +// _, lm, pm, _ := d.decodeCollection(*tc.collection, 0) +// //if err == nil { +// // if tc.expectedGroupLayer != nil { +// // assert.NotNil(tt, lg) +// // assert.Equal(tt, tc.expectedGroupLayer.Name(), lg.Name()) +// // } +// // if tc.expectedLayers != nil { +// // assert.NotNil(tt, ll) +// // assert.True(tt, len(ll) == 1) +// // el := *tc.expectedLayers[0] +// // al := *ll[0] +// // assert.Equal(tt, el.Name(), al.Name()) +// // assert.NotNil(tt, al.Property()) +// // } +// // if tc.expectedProperties != nil { +// // assert.NotNil(tt, pl) +// // assert.True(tt, len(pl) == 1) +// // ep := *tc.expectedProperties[0] +// // ap := pl.Keys()[0] +// // fa, _, _, _ := ap.GetOrCreateField(builtin.GetPropertySchema(propertySchemas["Point"]), property.PointFieldBySchemaGroup(propertyItems, propertyFields["Point"])) +// // fe, _, _, _ := ep.GetOrCreateField(builtin.GetPropertySchema(propertySchemas["Point"]), property.PointFieldBySchemaGroup(propertyItems, propertyFields["Point"])) +// // assert.Equal(tt, fe.Value(), fa.Value()) +// // } +// //} else { +// // assert.True(tt, errors.As(err, &tc.err)) +// //} +// }) +// } +// +//} diff --git a/pkg/layer/decoding/reearth.go b/pkg/layer/decoding/reearth.go new file mode 100644 index 000000000..3b7fefaed --- /dev/null +++ b/pkg/layer/decoding/reearth.go @@ -0,0 +1,312 @@ +package decoding + +import ( + "encoding/json" + "errors" + + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +type ReearthDecoder struct { + d *json.Decoder + scene id.SceneID +} + +func NewReearthDecoder(d *json.Decoder, scene id.SceneID) *ReearthDecoder { + return &ReearthDecoder{d: d, scene: scene} +} + +func (d *ReearthDecoder) Decode() (r Result, err error) { + if d == nil || d.d == nil { + return + } + + var root ReearthRoot + if err = d.d.Decode(&root); err != nil { + return + } + + r, err = root.Result(d.scene) + if err != nil { + return + } + + err = r.Validate() + return +} + +type ReearthRoot struct { + Reearth int `json:"reearth"` + Layers []*ReearthLayer `json:"layers"` +} + +func (r *ReearthRoot) Result(scene id.SceneID) (result Result, err error) { + if r == nil { + return + } + + if r.Reearth != 1 { + err = errors.New("not supported version") + return + } + + for _, l := range r.Layers { + var result2 layer.InitializerResult + i := l.layer() + if result2, err = i.Layer(scene); err != nil { + return + } + result = result.MergeInitializerResult(result2) + } + + return +} + +type ReearthLayer struct { + Plugin *id.PluginID `json:"plugin"` + Extension *id.PluginExtensionID `json:"extension"` + Name string `json:"name"` + Infobox *ReearthInfobox `json:"infobox"` + Property *ReearthProperty `json:"property"` + Layers []ReearthLayer `json:"layers"` + IsVisible *bool `json:"isVisible"` + LinkedDatasetSchema *id.DatasetSchemaID `json:"linkedDatasetSchema"` + LinkedDataset *id.DatasetID `json:"linkedDataset"` +} + +func (l *ReearthLayer) layer() *layer.Initializer { + if l == nil { + return nil + } + + var layers []*layer.Initializer + if l.Layers != nil { + layers = make([]*layer.Initializer, 0, len(l.Layers)) + for _, l2 := range l.Layers { + if l3 := l2.layer(); l3 != nil { + layers = append(layers, l3) + } + } + } + + var psid *id.PropertySchemaID + if l.Plugin != nil || l.Extension != nil { + psid2, err := id.PropertySchemaIDFromExtension(*l.Plugin, *l.Extension) + if err == nil { + // if there is an error, property schema id will be nil. + psid = psid2.Ref() + } + } + + var pr *property.Initializer + if l.Property != nil { + pr = l.Property.property(psid) + } + + return &layer.Initializer{ + Plugin: l.Plugin, + Extension: l.Extension, + Name: l.Name, + Infobox: l.Infobox.infobox(), + IsVisible: l.IsVisible, + Property: pr, + LinkedDatasetSchema: l.LinkedDatasetSchema, + LinkedDataset: l.LinkedDataset, + Layers: layers, + } +} + +type ReearthInfobox struct { + Property *ReearthProperty `json:"property"` + Blocks []*ReearthInfoboxField `json:"blocks"` +} + +func (i *ReearthInfobox) infobox() *layer.InitializerInfobox { + if i == nil { + return nil + } + + var blocks []*layer.InitializerInfoboxField + if i.Blocks != nil { + blocks = make([]*layer.InitializerInfoboxField, 0, len(i.Blocks)) + for _, f := range i.Blocks { + if f2 := f.infoboxField(); f2 != nil { + blocks = append(blocks, f2) + } + } + } + + var pr *property.Initializer + if i.Property != nil { + pr = i.Property.property(builtin.PropertySchemaIDInfobox.Ref()) + } + + return &layer.InitializerInfobox{ + Property: pr, + Fields: blocks, + } +} + +type ReearthInfoboxField struct { + Plugin id.PluginID `json:"plugin"` + Extension id.PluginExtensionID `json:"extension"` + Property *ReearthProperty `json:"property"` +} + +func (f *ReearthInfoboxField) infoboxField() *layer.InitializerInfoboxField { + if f == nil { + return nil + } + + var psid *id.PropertySchemaID + { + psid2, err := id.PropertySchemaIDFromExtension(f.Plugin, f.Extension) + if err == nil { + // if there is an error, property schema id will be nil. + psid = psid2.Ref() + } + } + + var pr *property.Initializer + if f.Property != nil { + pr = f.Property.property(psid) + } + + return &layer.InitializerInfoboxField{ + Plugin: f.Plugin, + Extension: f.Extension, + Property: pr, + } +} + +type ReearthProperty map[id.PropertySchemaFieldID]ReearthPropertyItem + +func (p ReearthProperty) property(schema *id.PropertySchemaID) *property.Initializer { + if schema == nil || p == nil { + return nil + } + + var items []*property.InitializerItem + items = make([]*property.InitializerItem, 0, len(p)) + for k, i := range p { + items = append(items, i.propertyItem(k)) + } + + return &property.Initializer{ + Schema: *schema, + Items: items, + } +} + +type ReearthPropertyItem struct { + Groups []ReearthPropertyGroup `json:"groups"` + Fields ReearthPropertyGroup `json:"fields"` +} + +func (p *ReearthPropertyItem) propertyItem(key id.PropertySchemaFieldID) *property.InitializerItem { + if p == nil { + return nil + } + + if p.Groups != nil { + groups := make([]*property.InitializerGroup, 0, len(p.Groups)) + for _, g := range p.Groups { + if g == nil { + continue + } + if g2 := g.propertyGroup(); g2 != nil { + groups = append(groups, g2) + } + } + + return &property.InitializerItem{ + SchemaItem: key, + Groups: groups, + } + } + + var fields []*property.InitializerField + if p.Fields != nil { + fields = make([]*property.InitializerField, 0, len(p.Fields)) + for k, f := range p.Fields { + if f2 := f.propertyField(k); f2 != nil { + fields = append(fields, f2) + } + } + } + + return &property.InitializerItem{ + SchemaItem: key, + Fields: fields, + } +} + +type ReearthPropertyGroup map[id.PropertySchemaFieldID]*ReearthPropertyField + +func (p ReearthPropertyGroup) propertyGroup() *property.InitializerGroup { + if p == nil || len(p) == 0 { + return nil + } + + var fields []*property.InitializerField + fields = make([]*property.InitializerField, 0, len(p)) + for k, f := range p { + if f2 := f.propertyField(k); f2 != nil { + fields = append(fields, f2) + } + } + + return &property.InitializerGroup{ + Fields: fields, + } +} + +type ReearthPropertyField struct { + Type string `json:"type"` + Links []*ReearthPropertyLink `json:"links"` + Value interface{} `json:"value"` +} + +func (f *ReearthPropertyField) propertyField(key id.PropertySchemaFieldID) *property.InitializerField { + if f == nil || f.Type == "" { + return nil + } + + vt, ok := property.ValueTypeFrom(f.Type) + if !ok { + return nil + } + + v, ok := vt.ValueFrom(f.Value) + if !ok { + return nil + } + + var links []*property.InitializerLink + if len(links) > 0 { + links = make([]*property.InitializerLink, 0, len(f.Links)) + for _, l := range f.Links { + links = append(links, &property.InitializerLink{ + Dataset: l.Dataset, + Schema: l.Schema, + Field: l.Field, + }) + } + } + + return &property.InitializerField{ + Field: key, + Type: vt, + Value: v, + Links: links, + } +} + +type ReearthPropertyLink struct { + Dataset *id.DatasetID `json:"dataset"` + Schema id.DatasetSchemaID `json:"schema"` + Field id.DatasetSchemaFieldID `json:"field"` +} diff --git a/pkg/layer/decoding/reearth_test.go b/pkg/layer/decoding/reearth_test.go new file mode 100644 index 000000000..e34ccb101 --- /dev/null +++ b/pkg/layer/decoding/reearth_test.go @@ -0,0 +1,211 @@ +package decoding + +import ( + "encoding/json" + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Decoder = &ReearthDecoder{} + +func TestReearthDecoder_Decode(t *testing.T) { + sid := id.NewSceneID() + dsid := id.NewDatasetSchemaID() + did := id.NewDatasetID() + reearthjson := `{ + "reearth": 1, + "layers": [ + { + "plugin": "reearth", + "extension": "marker", + "name": "ABC", + "infobox": { + "blocks": [ + { + "plugin": "reearth", + "extension": "textblock" + } + ] + }, + "property": { + "default": { + "fields": { + "latlng": { + "type": "latlng", + "value": { + "lat": 1, + "lng": 2 + } + } + } + } + }, + "layers": [ + { + "name": "abc", + "isVisible": true, + "linkedDataset": "` + did.String() + `", + "plugin": "reearth", + "extension": "marker", + "property": { + "hoge": { + "groups": [ + { + "foobar": { + "type": "string", + "value": "bar" + } + }, + { + "foobar": { + "type": "string", + "value": "foo" + } + } + ] + } + } + } + ], + "isVisible": false, + "linkedDatasetSchema": "` + dsid.String() + `" + } + ] + }` + + p := NewReearthDecoder(json.NewDecoder(strings.NewReader(reearthjson)), sid) + result, err := p.Decode() + + assert.NoError(t, err) + assert.Equal(t, 2, len(result.Layers)) // 2 layers + assert.Equal(t, 4, len(result.Properties)) // 3 properties for 2 layers, 1 infobox, and 1 infobox field + + tr := true + f := false + + // root layer + rootLayer := result.Layers.Group(result.Root.LayerAt(0)) + assert.Equal(t, (&layer.Initializer{ + ID: rootLayer.IDRef(), + Plugin: id.MustPluginID("reearth").Ref(), + Extension: id.PluginExtensionID("marker").Ref(), + PropertyID: rootLayer.Property().Ref(), + Name: "ABC", + Infobox: &layer.InitializerInfobox{ + PropertyID: rootLayer.Infobox().Property().Ref(), + Fields: []*layer.InitializerInfoboxField{ + { + ID: rootLayer.Infobox().FieldAt(0).ID().Ref(), + Plugin: id.MustPluginID("reearth"), + Extension: id.PluginExtensionID("textblock"), + PropertyID: rootLayer.Infobox().FieldAt(0).Property().Ref(), + }, + }, + }, + LayerIDs: rootLayer.Layers().Layers(), + IsVisible: &f, + LinkedDatasetSchema: &dsid, + }).MustBeLayer(sid).RootLayer(), rootLayer) + + // second layer + secondLayer := result.Layers.Item(rootLayer.Layers().LayerAt(0)) + assert.Equal(t, (&layer.Initializer{ + ID: secondLayer.IDRef(), + Plugin: id.MustPluginID("reearth").Ref(), + Extension: id.PluginExtensionID("marker").Ref(), + PropertyID: secondLayer.Property().Ref(), + Name: "abc", + IsVisible: &tr, + LinkedDataset: &did, + }).MustBeLayer(sid).RootLayer(), secondLayer) + + // property of root layer + prop := result.Properties[*rootLayer.Property()] + assert.Equal( + t, + (&property.Initializer{ + ID: prop.ID().Ref(), + Schema: id.MustPropertySchemaID("reearth/marker"), + Items: []*property.InitializerItem{ + { + ID: prop.Items()[0].ID().Ref(), + SchemaItem: id.PropertySchemaFieldID("default"), + Fields: []*property.InitializerField{ + { + Field: id.PropertySchemaFieldID("latlng"), + Type: property.ValueTypeLatLng, + Value: property.ValueTypeLatLng.MustBeValue(property.LatLng{Lat: 1, Lng: 2}), + }, + }, + }, + }, + }).MustBeProperty(sid), + prop, + ) + + // property of infobox of root layer + prop = result.Properties[rootLayer.Infobox().Property()] + assert.Equal( + t, + (&property.Initializer{ + ID: rootLayer.Infobox().PropertyRef(), + Schema: id.MustPropertySchemaID("reearth/infobox"), + }).MustBeProperty(sid), + prop, + ) + + // property of infobox field of root layer + prop = result.Properties[rootLayer.Infobox().FieldAt(0).Property()] + assert.Equal( + t, + (&property.Initializer{ + ID: rootLayer.Infobox().FieldAt(0).PropertyRef(), + Schema: id.MustPropertySchemaID("reearth/textblock"), + }).MustBeProperty(sid), + prop, + ) + + // property of second layer + prop = result.Properties[*secondLayer.Property()] + assert.Equal( + t, + (&property.Initializer{ + ID: prop.ID().Ref(), + Schema: id.MustPropertySchemaID("reearth/marker"), + Items: []*property.InitializerItem{ + { + ID: prop.Items()[0].ID().Ref(), + SchemaItem: id.PropertySchemaFieldID("hoge"), + Groups: []*property.InitializerGroup{ + { + ID: property.ToGroupList(prop.Items()[0]).GroupAt(0).IDRef(), + Fields: []*property.InitializerField{ + { + Field: id.PropertySchemaFieldID("foobar"), + Type: property.ValueTypeString, + Value: property.ValueTypeString.MustBeValue("bar"), + }, + }, + }, + { + ID: property.ToGroupList(prop.Items()[0]).GroupAt(1).IDRef(), + Fields: []*property.InitializerField{ + { + Field: id.PropertySchemaFieldID("foobar"), + Type: property.ValueTypeString, + Value: property.ValueTypeString.MustBeValue("foo"), + }, + }, + }, + }, + }, + }, + }).MustBeProperty(sid), + prop, + ) +} diff --git a/pkg/layer/decoding/shapetest/point.shp b/pkg/layer/decoding/shapetest/point.shp new file mode 100644 index 0000000000000000000000000000000000000000..310419cbd1cb9b9905971e2910a76d3acaed158a GIT binary patch literal 184 zcmZQzQ0HR64q{#~GcYh>$cZ>WgjCRI428%7AY-@?W})b1LY0G=52k@GK-LE{QpEuP D!|Vi+ literal 0 HcmV?d00001 diff --git a/pkg/layer/decoding/shapetest/polygon.shp b/pkg/layer/decoding/shapetest/polygon.shp new file mode 100644 index 0000000000000000000000000000000000000000..624a9db63f242e59050428eded06e0ff9902b515 GIT binary patch literal 236 zcmZQzQ0HR64$59IGcd4XmjjB5I6$OeG){#e2}U4xAjT|^Lfq;=M!^8gUR*Rx9fAe` DSi%H^ literal 0 HcmV?d00001 diff --git a/pkg/layer/decoding/shapetest/polyline.shp b/pkg/layer/decoding/shapetest/polyline.shp new file mode 100644 index 0000000000000000000000000000000000000000..938bc5acc7a346c701b8ad3cce825540cc21d315 GIT binary patch literal 308 zcmZQzQ0HR64raYzW?%p!9Kg~6A_1duD1!(y0x1LBW~n$pl&Ls?6yjC~QYGR5A`vt~ kFHn#Ph>`5ka{%&z07k>yYKg@jm>L*u;sDml0HYCl0s1xx*#H0l literal 0 HcmV?d00001 diff --git a/pkg/layer/decoding/shapetest/shapes.zip b/pkg/layer/decoding/shapetest/shapes.zip new file mode 100644 index 0000000000000000000000000000000000000000..0e20d65f5f8fb2529a2f9f3da2d3202abe06a467 GIT binary patch literal 578 zcmWIWW@Zs#U|`^2m|oZDcbfCz0(BrS1c*5qWEcwaGxJLHQj*d_LpT|j=W#BI0pZdL zZU#n{7t9O{U?N#B>1k?OLP9}GLh=EIhdey2KWA2|ew?T0Fi+m2L)4dZJrmmmCWZhM zn{4UHuTNs(-m~jQ51kg4J z*wP51k%E&I5}as3iEbQfC?Ok{1vd^HX2cnX8Vbn9-9$1D7$&%kV`T$*gBb|l0%?0j G5Dx&f;)neJ literal 0 HcmV?d00001 diff --git a/pkg/layer/decoding/shp.go b/pkg/layer/decoding/shp.go new file mode 100644 index 000000000..e808ce823 --- /dev/null +++ b/pkg/layer/decoding/shp.go @@ -0,0 +1,101 @@ +package decoding + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/shp" +) + +type ShapeReader interface { + Next() bool + Shape() (int, shp.Shape) + Err() error +} +type ShapeDecoder struct { + reader ShapeReader + sceneId id.SceneID +} + +func NewShapeDecoder(r ShapeReader, s id.SceneID) *ShapeDecoder { + return &ShapeDecoder{ + reader: r, + sceneId: s, + } +} + +func (shd *ShapeDecoder) getLayer(t string, coords interface{}) (*layer.Item, *property.Property, error) { + var p *property.Property + var l *layer.Item + var ex id.PluginExtensionID + var err error + p, err = createProperty(t, coords, shd.sceneId, nil, "") + if err != nil { + return nil, nil, err + } + ex = extensions[t] + l, err = layer. + NewItem(). + NewID(). + Scene(shd.sceneId). + Property(p.IDRef()). + Extension(&ex). + Plugin(&id.OfficialPluginID). + Build() + if err != nil { + return nil, nil, err + } + return l, p, nil +} +func (shd *ShapeDecoder) pointsToCoords(pl []shp.Point) []property.LatLngHeight { + var ls []property.LatLngHeight + for _, p := range pl { + ls = append(ls, property.LatLngHeight{ + Lat: p.Y, + Lng: p.X, + }) + } + return ls +} + +func (shd *ShapeDecoder) Decode() (Result, error) { + lg, err := layer.NewGroup().NewID().Scene(shd.sceneId).Name("ShapeFile").Build() + if err != nil { + return Result{}, err + } + var properties property.Map + var layers layer.Map + for shd.reader.Next() { + _, shape := shd.reader.Shape() + var li *layer.Item + var p *property.Property + point, okPoint := shape.(*shp.Point) + polyline, okPolyLine := shape.(*shp.PolyLine) + polygon, okPolygon := shape.(*shp.Polygon) + if okPoint { + li, p, err = shd.getLayer("Point", property.LatLng{ + Lat: point.Y, + Lng: point.X, + }) + } + if okPolyLine { + li, p, err = shd.getLayer("Polyline", shd.pointsToCoords(polyline.Points)) + } + if okPolygon { + li, p, err = shd.getLayer("Polygon", append(make([][]property.LatLngHeight, 1), shd.pointsToCoords(polygon.Points))) + } + if err != nil { + return Result{}, err + } + if li != nil { + var l layer.Layer = li + lg.Layers().AddLayer(l.ID(), -1) + layers = layers.Add(&l) + } + if p != nil { + properties = properties.Add(p) + } + } + + return resultFrom(lg, layers, properties) +} diff --git a/pkg/layer/decoding/shp_test.go b/pkg/layer/decoding/shp_test.go new file mode 100644 index 000000000..d246c81f5 --- /dev/null +++ b/pkg/layer/decoding/shp_test.go @@ -0,0 +1,142 @@ +package decoding + +import ( + "os" + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/shp" + "github.com/stretchr/testify/assert" +) + +var _ Decoder = &ShapeDecoder{} +var _ ShapeReader = &shp.ZipReader{} +var _ ShapeReader = &shp.Reader{} + +type identityTestFunc func(*testing.T, [][]float64, []shp.Shape) +type shapeGetterFunc func(string, *testing.T) []shp.Shape +type testCaseData struct { + points [][]float64 + tester identityTestFunc +} + +var dataForReadTests = map[string]testCaseData{ + "shapetest/shapes.zip": { + points: [][]float64{ + {10, 10}, + {5, 5}, + {0, 10}, + }, + tester: testPoint, + }, + "shapetest/point.shp": { + points: [][]float64{ + {10, 10}, + {5, 5}, + {0, 10}, + }, + tester: testPoint, + }, + "shapetest/polyline.shp": { + points: [][]float64{ + {0, 0}, + {5, 5}, + {10, 10}, + {15, 15}, + {20, 20}, + {25, 25}, + }, + tester: testPolyLine, + }, + "shapetest/polygon.shp": { + points: [][]float64{ + {0, 0}, + {0, 5}, + {5, 5}, + {5, 0}, + {0, 0}, + }, + tester: testPolygon, + }, +} + +func testPoint(t *testing.T, points [][]float64, shapes []shp.Shape) { + for n, s := range shapes { + p, ok := s.(*shp.Point) + if !ok { + t.Fatal("Failed to type assert.") + } + assert.Equal(t, []float64{p.X, p.Y}, points[n]) + } +} + +func testPolyLine(t *testing.T, points [][]float64, shapes []shp.Shape) { + for n, s := range shapes { + p, ok := s.(*shp.PolyLine) + if !ok { + t.Fatal("Failed to type assert.") + } + for k, point := range p.Points { + assert.Equal(t, points[n*3+k], []float64{point.X, point.Y}) + } + } +} + +func testPolygon(t *testing.T, points [][]float64, shapes []shp.Shape) { + for n, s := range shapes { + p, ok := s.(*shp.Polygon) + if !ok { + t.Fatal("Failed to type assert.") + } + for k, point := range p.Points { + assert.Equal(t, points[n*3+k], []float64{point.X, point.Y}) + } + } +} + +func TestSHPReadZip(t *testing.T) { + testshapeIdentity(t, "shapetest/shapes.zip", getShapesFromFile) +} + +func TestSHPReadPoint(t *testing.T) { + testshapeIdentity(t, "shapetest/point.shp", getShapesFromFile) +} + +func TestSHPReadPolyLine(t *testing.T) { + testshapeIdentity(t, "shapetest/polyline.shp", getShapesFromFile) +} + +func TestSHPReadPolygon(t *testing.T) { + testshapeIdentity(t, "shapetest/polygon.shp", getShapesFromFile) +} + +func testshapeIdentity(t *testing.T, prefix string, getter shapeGetterFunc) { + shapes := getter(prefix, t) + d := dataForReadTests[prefix] + d.tester(t, d.points, shapes) +} + +func getShapesFromFile(filename string, t *testing.T) (shapes []shp.Shape) { + var reader ShapeReader + var err error + osr, err := os.Open(filename) + assert.NoError(t, err) + if strings.HasSuffix(filename, ".shp") { + reader, err = shp.ReadFrom(osr) + } else { + reader, err = shp.ReadZipFrom(osr) + } + if err != nil { + t.Fatal("Failed to open shapefile: " + filename + " (" + err.Error() + ")") + } + + for reader.Next() { + _, shape := reader.Shape() + shapes = append(shapes, shape) + } + if reader.Err() != nil { + t.Errorf("error while getting shapes for %s: %v", filename, reader.Err()) + } + + return shapes +} diff --git a/pkg/layer/encoding/common.go b/pkg/layer/encoding/common.go new file mode 100644 index 000000000..632754126 --- /dev/null +++ b/pkg/layer/encoding/common.go @@ -0,0 +1,43 @@ +package encoding + +import ( + "errors" + "image/color" + "strconv" + "strings" + + "gopkg.in/go-playground/colors.v1" +) + +var ErrInvalidColor = errors.New("invalid color") + +func getColor(str string) (*color.RGBA, error) { + cs := str + a := "" + + if str[0] == '#' { + if len(str) == 5 { + cs = str[:len(str)-1] + a = strings.Repeat(str[len(str)-1:], 2) + } else if len(str) == 9 { + cs = str[:len(str)-2] + a = str[len(str)-2:] + } + } + + b, err := colors.Parse(cs) + if err != nil || b == nil { + return nil, ErrInvalidColor + } + + c := b.ToRGBA() + var alpha uint8 + if a != "" { + a2, _ := strconv.ParseUint(a, 16, 8) + alpha = uint8(a2) + } else { + alpha = uint8(c.A * 255) + } + + return &color.RGBA{R: c.R, G: c.G, B: c.B, A: alpha}, nil +} diff --git a/pkg/layer/encoding/common_test.go b/pkg/layer/encoding/common_test.go new file mode 100644 index 000000000..99da072f8 --- /dev/null +++ b/pkg/layer/encoding/common_test.go @@ -0,0 +1,26 @@ +package encoding + +import ( + "image/color" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGetColor(t *testing.T) { + c, err := getColor("#ffffff") + assert.NoError(t, err) + assert.Equal(t, &color.RGBA{R: 255, G: 255, B: 255, A: 255}, c) + + c, err = getColor("#fff") + assert.NoError(t, err) + assert.Equal(t, &color.RGBA{R: 255, G: 255, B: 255, A: 255}, c) + + c, err = getColor("#fffa") + assert.NoError(t, err) + assert.Equal(t, &color.RGBA{R: 255, G: 255, B: 255, A: 170}, c) + + c, err = getColor("#ff0000aa") + assert.NoError(t, err) + assert.Equal(t, &color.RGBA{R: 255, G: 0, B: 0, A: 170}, c) +} diff --git a/pkg/layer/encoding/czml.go b/pkg/layer/encoding/czml.go new file mode 100644 index 000000000..c8ec42048 --- /dev/null +++ b/pkg/layer/encoding/czml.go @@ -0,0 +1,257 @@ +package encoding + +import ( + "encoding/json" + "errors" + "io" + + "github.com/reearth/reearth-backend/pkg/czml" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" +) + +type CZMLEncoder struct { + writer io.Writer +} + +func NewCZMLEncoder(w io.Writer) *CZMLEncoder { + return &CZMLEncoder{ + writer: w, + } +} + +func (e *CZMLEncoder) stringToCZMLColor(s string) (*czml.Color, error) { + c, err := getColor(s) + if err != nil || c == nil { + if err == nil { + err = ErrInvalidColor + } + return nil, err + } + return &czml.Color{ + RGBA: []int64{int64(c.R), int64(c.G), int64(c.B), int64(c.A)}, + }, nil +} + +func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feature, error) { + if li.PluginID == nil || !id.OfficialPluginID.Equal(*li.PluginID) { + return nil, nil + } + + var ok bool + var err error + var pointSize float64 + var pointColor string + feature := czml.Feature{ + Id: "", + Name: "", + Point: nil, + } + feature.Name = li.Name + switch li.ExtensionID.String() { + case "marker": + latlng := property.LatLng{} + var height float64 + if li.Property.Field("location") != nil { + latlng, ok = li.Property.Field("location").PropertyValue.ValueLatLng() + if !ok { + return nil, errors.New("invalid value type") + } + + if li.Property.Field("height") != nil { + height, ok = li.Property.Field("height").PropertyValue.ValueNumber() + if !ok { + return nil, errors.New("invalid value type") + } + position := czml.Position{ + CartographicDegrees: []float64{latlng.Lng, latlng.Lat, height}, + } + feature.Position = &position + } else { + position := czml.Position{ + CartographicDegrees: []float64{latlng.Lng, latlng.Lat}, + } + feature.Position = &position + } + } + if li.Property.Field("pointColor") != nil { + pointColor, ok = li.Property.Field("pointColor").PropertyValue.ValueString() + if !ok { + return nil, errors.New("invalid value type") + } + } + if li.Property.Field("pointSize") != nil { + pointSize, ok = li.Property.Field("pointSize").PropertyValue.ValueNumber() + if !ok { + return nil, errors.New("invalid value type") + } + } + if pointSize != 0 || len(pointColor) > 0 { + point := czml.Point{ + Color: pointColor, + PixelSize: pointSize, + } + feature.Point = &point + } + case "polygon": + var polygon property.Polygon + position := czml.Position{} + var fill, stroke bool + var fillColor, strokeColor *czml.Color + var strokeWidth float64 + if li.Property.Field("polygon") != nil { + polygon, ok = li.Property.Field("polygon").PropertyValue.ValuePolygon() + if !ok { + return nil, errors.New("invalid value type") + } + for _, c := range polygon { + for _, l := range c { + position.CartographicDegrees = append(position.CartographicDegrees, []float64{l.Lng, l.Lat, l.Height}...) + } + } + } + if li.Property.Field("fill") != nil { + fill, ok = li.Property.Field("fill").PropertyValue.ValueBool() + if !ok { + return nil, errors.New("invalid value type") + } + } + if li.Property.Field("stroke") != nil { + stroke, ok = li.Property.Field("stroke").PropertyValue.ValueBool() + if !ok { + return nil, errors.New("invalid value type") + } + } + if li.Property.Field("fillColor") != nil { + fillStr, ok := li.Property.Field("fillColor").PropertyValue.ValueString() + if !ok { + return nil, errors.New("invalid value type") + } + fillColor, err = e.stringToCZMLColor(fillStr) + if err != nil { + return nil, err + } + } + if li.Property.Field("strokeColor") != nil { + strokeStr, ok := li.Property.Field("strokeColor").PropertyValue.ValueString() + if !ok { + return nil, errors.New("invalid value type") + } + strokeColor, err = e.stringToCZMLColor(strokeStr) + if err != nil { + return nil, err + } + } + if li.Property.Field("strokeWidth") != nil { + strokeWidth, ok = li.Property.Field("strokeWidth").PropertyValue.ValueNumber() + if !ok { + return nil, errors.New("invalid value type") + } + } + polygonCZML := czml.Polygon{ + Positions: position, + Fill: fill, + Material: &czml.Material{SolidColor: &czml.SolidColor{Color: fillColor}}, + Stroke: stroke, + StrokeColor: strokeColor, + StrokeWidth: strokeWidth, + } + feature.Polygon = &polygonCZML + case "polyline": + var polyline property.Coordinates + position := czml.Position{} + var strokeColor *czml.Color + var strokeWidth float64 + if li.Property.Field("coordinates") != nil { + polyline, ok = li.Property.Field("coordinates").PropertyValue.ValueCoordinates() + if !ok { + return nil, errors.New("invalid value type") + } + for _, l := range polyline { + position.CartographicDegrees = append(position.CartographicDegrees, []float64{l.Lng, l.Lat, l.Height}...) + } + } + + if li.Property.Field("strokeColor") != nil { + strokeStr, ok := li.Property.Field("strokeColor").PropertyValue.ValueString() + if !ok { + return nil, errors.New("invalid value type") + } + strokeColor, err = e.stringToCZMLColor(strokeStr) + if err != nil { + return nil, err + } + } + if li.Property.Field("strokeWidth") != nil { + strokeWidth, ok = li.Property.Field("strokeWidth").PropertyValue.ValueNumber() + if !ok { + return nil, errors.New("invalid value type") + } + } + polylineCZML := czml.Polyline{ + Positions: position, + Material: &czml.Material{ + PolylineOutline: &czml.PolylineOutline{Color: strokeColor}, + }, + Width: strokeWidth, + } + feature.Polyline = &polylineCZML + + } + return &feature, nil +} + +func (e *CZMLEncoder) encodeLayerGroup(li *merging.SealedLayerGroup) ([]*czml.Feature, error) { + groupFeature := czml.Feature{ + Id: "", + Name: "", + } + groupFeature.Id = "document" + groupFeature.Name = li.Name + res := []*czml.Feature{} + res = append(res, &groupFeature) + + for _, ch := range li.Children { + sl := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: ch.Common().Merged, + Property: ch.Common().Property, + Infobox: ch.Common().Infobox, + }, + } + l, err := e.encodeSingleLayer(&sl) + if err != nil { + return nil, err + } + if l != nil { + res = append(res, l) + } + } + + return res, nil +} + +func (e *CZMLEncoder) Encode(layer merging.SealedLayer) error { + var res []*czml.Feature + var err error + if i, ok := layer.(*merging.SealedLayerItem); ok { + feature, err := e.encodeSingleLayer(i) + if err != nil { + return err + } + res = append(res, feature) + + } else if g, ok := layer.(*merging.SealedLayerGroup); ok { + res, err = e.encodeLayerGroup(g) + if err != nil { + return err + } + } + en := json.NewEncoder(e.writer) + err = en.Encode(res) + if err != nil { + return err + } + return nil +} diff --git a/pkg/layer/encoding/czml_test.go b/pkg/layer/encoding/czml_test.go new file mode 100644 index 000000000..feb141e22 --- /dev/null +++ b/pkg/layer/encoding/czml_test.go @@ -0,0 +1,517 @@ +package encoding + +import ( + "bytes" + "encoding/json" + "io" + "testing" + + "github.com/reearth/reearth-backend/pkg/czml" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Encoder = (*GeoJSONEncoder)(nil) + +func TestEncodeCZMLPoint(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("marker") + iid := id.MustPropertyItemID(id.New().String()) + v1 := property.LatLng{ + Lat: 4.4, + Lng: 53.4, + } + + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("location"), + Type: "latlng", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + + v2 := property.ValueTypeString + f2 := property.SealedField{ + ID: id.PropertySchemaFieldID("pointColor"), + Type: "string", + DatasetValue: nil, + PropertyValue: v2.ValueFromUnsafe("#7fff00ff"), + } + fl2 := []*property.SealedField{} + fl2 = append(fl2, &f2) + item2 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl2, + Groups: nil, + } + il = append(il, &item2) + v3 := property.ValueTypeNumber + f3 := property.SealedField{ + ID: id.PropertySchemaFieldID("height"), + Type: "number", + DatasetValue: nil, + PropertyValue: v3.ValueFromUnsafe(34), + } + fl3 := []*property.SealedField{} + fl3 = append(fl3, &f3) + item3 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl3, + Groups: nil, + } + il = append(il, &item3) + v4 := property.ValueTypeNumber + f4 := property.SealedField{ + ID: id.PropertySchemaFieldID("pointSize"), + Type: "number", + DatasetValue: nil, + PropertyValue: v4.ValueFromUnsafe(2.4), + } + fl4 := []*property.SealedField{} + fl4 = append(fl4, &f4) + item4 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl4, + Groups: nil, + } + il = append(il, &item4) + sp := property.Sealed{ + Original: &pid, + Items: il, + } + l := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Name: "test", + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + + reader, writer := io.Pipe() + en := NewCZMLEncoder(writer) + var err error + go func() { + defer func() { + _ = writer.Close() + }() + err = en.Encode(&l) + assert.NoError(t, err) + }() + + colorStr, _ := f2.PropertyValue.ValueString() + height, _ := f3.PropertyValue.ValueNumber() + size, _ := f4.PropertyValue.ValueNumber() + expected := []*czml.Feature{} + exPos := czml.Position{CartographicDegrees: []float64{v1.Lng, v1.Lat, height}} + exPoint := czml.Point{ + Color: colorStr, + PixelSize: size, + } + exValue := czml.Feature{ + Id: "", + Name: "test", + Position: &exPos, + Point: &exPoint, + } + expected = append(expected, &exValue) + reader2, writer2 := io.Pipe() + exEn := json.NewEncoder(writer2) + go func() { + defer func() { + _ = writer2.Close() + }() + err = exEn.Encode(expected) + assert.NoError(t, err) + }() + + buf := new(bytes.Buffer) + _, err = buf.ReadFrom(reader) + assert.NoError(t, err) + s := buf.String() + buf2 := new(bytes.Buffer) + _, err = buf2.ReadFrom(reader2) + assert.NoError(t, err) + s2 := buf2.String() + assert.Equal(t, s2, s) +} + +func TestEncodeCZMLPolygon(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("polygon") + iid := id.MustPropertyItemID(id.New().String()) + vc := property.Coordinates{ + property.LatLngHeight{ + Lat: 3.4, + Lng: 5.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 45.4, + Lng: 2.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 34.66, + Lng: 654.34, + Height: 100, + }, + } + v1 := property.Polygon{vc} + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("polygon"), + Type: "polygon", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + v2 := property.ValueTypeBool + f2 := property.SealedField{ + ID: id.PropertySchemaFieldID("fill"), + Type: "bool", + DatasetValue: nil, + PropertyValue: v2.ValueFromUnsafe(true), + } + fl2 := []*property.SealedField{} + fl2 = append(fl2, &f2) + item2 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl2, + Groups: nil, + } + il = append(il, &item2) + v3 := property.ValueTypeString + f3 := property.SealedField{ + ID: id.PropertySchemaFieldID("fillColor"), + Type: "string", + DatasetValue: nil, + PropertyValue: v3.ValueFromUnsafe("#ff000000"), + } + fl3 := []*property.SealedField{} + fl3 = append(fl3, &f3) + item3 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl3, + Groups: nil, + } + il = append(il, &item3) + v4 := property.ValueTypeBool + f4 := property.SealedField{ + ID: id.PropertySchemaFieldID("stroke"), + Type: "bool", + DatasetValue: nil, + PropertyValue: v4.ValueFromUnsafe(true), + } + fl4 := []*property.SealedField{} + fl4 = append(fl4, &f4) + item4 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl4, + Groups: nil, + } + il = append(il, &item4) + v5 := property.ValueTypeString + f5 := property.SealedField{ + ID: id.PropertySchemaFieldID("strokeColor"), + Type: "string", + DatasetValue: nil, + PropertyValue: v5.ValueFromUnsafe("#ff554555"), + } + fl5 := []*property.SealedField{} + fl5 = append(fl5, &f5) + item5 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl5, + Groups: nil, + } + il = append(il, &item5) + v6 := property.ValueTypeNumber + f6 := property.SealedField{ + ID: id.PropertySchemaFieldID("strokeWidth"), + Type: "number", + DatasetValue: nil, + PropertyValue: v6.ValueFromUnsafe(3), + } + fl6 := []*property.SealedField{} + fl6 = append(fl6, &f6) + item6 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl6, + Groups: nil, + } + il = append(il, &item6) + sp := property.Sealed{ + Original: &pid, + Items: il, + } + l := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Name: "test", + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + + reader, writer := io.Pipe() + en := NewCZMLEncoder(writer) + var err error + go func() { + defer func() { + _ = writer.Close() + }() + err = en.Encode(&l) + assert.NoError(t, err) + }() + + expected := []*czml.Feature{} + exPos := czml.Position{CartographicDegrees: []float64{5.34, 3.4, 100, 2.34, 45.4, 100, 654.34, 34.66, 100}} + exPoint := czml.Polygon{ + Positions: exPos, + Fill: true, + Material: &czml.Material{SolidColor: &czml.SolidColor{Color: &czml.Color{ + RGBA: []int64{255, 0, 0, 0}, + }}}, + Stroke: true, + StrokeColor: &czml.Color{ + RGBA: []int64{255, 85, 69, 85}, + }, + StrokeWidth: 3, + } + exValue := czml.Feature{ + Id: "", + Name: "test", + Polygon: &exPoint, + } + expected = append(expected, &exValue) + reader2, writer2 := io.Pipe() + exEn := json.NewEncoder(writer2) + go func() { + defer func() { + _ = writer2.Close() + }() + err = exEn.Encode(expected) + assert.NoError(t, err) + }() + + assert.NoError(t, err) + buf := new(bytes.Buffer) + _, err = buf.ReadFrom(reader) + assert.NoError(t, err) + s := buf.String() + buf2 := new(bytes.Buffer) + _, err = buf2.ReadFrom(reader2) + assert.NoError(t, err) + s2 := buf2.String() + assert.Equal(t, s2, s) +} + +func TestEncodeCZMLPolyline(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("polyline") + iid := id.MustPropertyItemID(id.New().String()) + v1 := property.Coordinates{ + property.LatLngHeight{ + Lat: 3.4, + Lng: 5.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 45.4, + Lng: 2.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 34.66, + Lng: 654.34, + Height: 100, + }, + } + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("coordinates"), + Type: "coordinates", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + + v2 := property.ValueTypeString + f2 := property.SealedField{ + ID: id.PropertySchemaFieldID("strokeColor"), + Type: "string", + DatasetValue: nil, + PropertyValue: v2.ValueFromUnsafe("#ff224222"), + } + fl2 := []*property.SealedField{} + fl2 = append(fl2, &f2) + item2 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl2, + Groups: nil, + } + il = append(il, &item2) + v3 := property.ValueTypeNumber + f3 := property.SealedField{ + ID: id.PropertySchemaFieldID("strokeWidth"), + Type: "number", + DatasetValue: nil, + PropertyValue: v3.ValueFromUnsafe(3), + } + fl3 := []*property.SealedField{} + fl3 = append(fl3, &f3) + item3 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl3, + Groups: nil, + } + il = append(il, &item3) + sp := property.Sealed{ + Original: &pid, + Items: il, + } + l := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Name: "test", + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + + reader, writer := io.Pipe() + en := NewCZMLEncoder(writer) + var err error + go func() { + defer func() { + _ = writer.Close() + }() + err = en.Encode(&l) + assert.NoError(t, err) + }() + + expected := []*czml.Feature{} + exPos := czml.Position{CartographicDegrees: []float64{5.34, 3.4, 100, 2.34, 45.4, 100, 654.34, 34.66, 100}} + exPolyline := czml.Polyline{ + Positions: exPos, + Material: &czml.Material{PolylineOutline: &czml.PolylineOutline{Color: &czml.Color{ + RGBA: []int64{255, 34, 66, 34}, + }}}, + Width: 3, + } + exValue := czml.Feature{ + Id: "", + Name: "test", + Polyline: &exPolyline, + } + expected = append(expected, &exValue) + reader2, writer2 := io.Pipe() + exEn := json.NewEncoder(writer2) + go func() { + defer func() { + _ = writer2.Close() + }() + err = exEn.Encode(expected) + assert.NoError(t, err) + }() + + assert.NoError(t, err) + buf := new(bytes.Buffer) + _, err = buf.ReadFrom(reader) + assert.NoError(t, err) + s := buf.String() + buf2 := new(bytes.Buffer) + _, err = buf2.ReadFrom(reader2) + assert.NoError(t, err) + s2 := buf2.String() + assert.Equal(t, s2, s) +} diff --git a/pkg/layer/encoding/encoder.go b/pkg/layer/encoding/encoder.go new file mode 100644 index 000000000..20b1d2d88 --- /dev/null +++ b/pkg/layer/encoding/encoder.go @@ -0,0 +1,9 @@ +package encoding + +import ( + "github.com/reearth/reearth-backend/pkg/layer/merging" +) + +type Encoder interface { + Encode(merging.SealedLayer) error +} diff --git a/pkg/layer/encoding/exporter.go b/pkg/layer/encoding/exporter.go new file mode 100644 index 000000000..b01dceaf6 --- /dev/null +++ b/pkg/layer/encoding/exporter.go @@ -0,0 +1,52 @@ +package encoding + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" +) + +type Exporter struct { + Merger *merging.Merger + Sealer *merging.Sealer + Encoder Encoder +} + +func (e *Exporter) ExportLayerByID(ctx context.Context, l id.LayerID) error { + if e == nil { + return nil + } + m, err := e.Merger.MergeLayerFromID(ctx, l, nil) + if err != nil { + return err + } + return e.Encode(ctx, m) +} + +func (e *Exporter) ExportLayer(ctx context.Context, l layer.Layer) error { + if e == nil { + return nil + } + m, err := e.Merger.MergeLayer(ctx, l, nil) + if err != nil { + return err + } + return e.Encode(ctx, m) +} + +func (e *Exporter) Encode(ctx context.Context, m merging.MergedLayer) error { + if e == nil { + return nil + } + s, err := e.Sealer.Seal(ctx, m) + if err != nil { + return err + } + err = e.Encoder.Encode(s) + if err != nil { + return err + } + return nil +} diff --git a/pkg/layer/encoding/geojson.go b/pkg/layer/encoding/geojson.go new file mode 100644 index 000000000..c7c36d908 --- /dev/null +++ b/pkg/layer/encoding/geojson.go @@ -0,0 +1,207 @@ +package encoding + +import ( + "errors" + "io" + + geojson "github.com/paulmach/go.geojson" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" +) + +type GeoJSONEncoder struct { + writer io.Writer +} + +func NewGeoJSONEncoder(w io.Writer) *GeoJSONEncoder { + return &GeoJSONEncoder{ + writer: w, + } +} + +func (e *GeoJSONEncoder) polygonToFloat(p property.Polygon) [][][]float64 { + var res [][][]float64 + for _, c := range p { + t := e.coordsToFloat(c) + res = append(res, t) + } + return res +} + +func (e *GeoJSONEncoder) coordsToFloat(c property.Coordinates) [][]float64 { + var res [][]float64 + for _, l := range c { + t := []float64{} + t = append(t, []float64{l.Lng, l.Lat, l.Height}...) + res = append(res, t) + } + return res +} + +func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojson.Feature, error) { + if li.PluginID == nil || !id.OfficialPluginID.Equal(*li.PluginID) { + return nil, nil + } + + var ok bool + var geo *geojson.Geometry + var res *geojson.Feature + switch li.ExtensionID.String() { + case "marker": + latlng := property.LatLng{} + var height float64 + if li.Property.Field("location") != nil { + latlng, ok = li.Property.Field("location").PropertyValue.ValueLatLng() + if !ok { + return nil, errors.New("invalid value type") + } + if li.Property.Field("height") != nil { + height, ok = li.Property.Field("height").PropertyValue.ValueNumber() + if !ok { + return nil, errors.New("invalid value type") + } + geo = geojson.NewPointGeometry([]float64{latlng.Lng, latlng.Lat, height}) + } else { + geo = geojson.NewPointGeometry([]float64{latlng.Lng, latlng.Lat}) + } + res = geojson.NewFeature(geo) + res.SetProperty("name", li.Name) + } + if li.Property.Field("pointColor") != nil { + pointColor, ok := li.Property.Field("pointColor").PropertyValue.ValueString() + if !ok { + return nil, errors.New("invalid value type") + } + if res != nil { + res.SetProperty("marker-color", pointColor) + } + } + case "polygon": + var polygon property.Polygon + if li.Property.Field("polygon") != nil { + polygon, ok = li.Property.Field("polygon").PropertyValue.ValuePolygon() + if !ok { + return nil, errors.New("invalid value type") + } + fl := e.polygonToFloat(polygon) + + geo = geojson.NewPolygonGeometry(fl) + res = geojson.NewFeature(geo) + res.SetProperty("name", li.Name) + } + if li.Property.Field("fillColor") != nil { + fillColor, ok := li.Property.Field("fillColor").PropertyValue.ValueString() + if !ok { + return nil, errors.New("invalid value type") + } + if res != nil { + res.SetProperty("fill", fillColor) + } + } + if li.Property.Field("strokeColor") != nil { + strokeColor, ok := li.Property.Field("strokeColor").PropertyValue.ValueString() + if !ok { + return nil, errors.New("invalid value type") + } + if res != nil { + res.SetProperty("stroke", strokeColor) + } + } + if li.Property.Field("strokeWidth") != nil { + strokeWidth, ok := li.Property.Field("strokeWidth").PropertyValue.ValueNumber() + if !ok { + return nil, errors.New("invalid value type") + } + if res != nil { + res.SetProperty("stroke-width", strokeWidth) + } + } + case "polyline": + var polyline property.Coordinates + if li.Property.Field("coordinates") != nil { + polyline, ok = li.Property.Field("coordinates").PropertyValue.ValueCoordinates() + if !ok { + return nil, errors.New("invalid value type") + } + fl := e.coordsToFloat(polyline) + geo = geojson.NewLineStringGeometry(fl) + res = geojson.NewFeature(geo) + res.SetProperty("name", li.Name) + } + if li.Property.Field("strokeColor") != nil { + strokeColor, ok := li.Property.Field("strokeColor").PropertyValue.ValueString() + if !ok { + return nil, errors.New("invalid value type") + } + if res != nil { + res.SetProperty("stroke", strokeColor) + } + } + if li.Property.Field("strokeWidth") != nil { + strokeWidth, ok := li.Property.Field("strokeWidth").PropertyValue.ValueNumber() + if !ok { + return nil, errors.New("invalid value type") + } + if res != nil { + res.SetProperty("stroke-width", strokeWidth) + } + } + } + return res, nil +} + +func (e *GeoJSONEncoder) encodeLayerGroup(li *merging.SealedLayerGroup) (*geojson.FeatureCollection, error) { + layers := geojson.NewFeatureCollection() + for _, ch := range li.Flatten() { + sl := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: ch.Common().Merged, + Property: ch.Common().Property, + Infobox: ch.Common().Infobox, + }, + } + l, err := e.encodeSingleLayer(&sl) + if err != nil { + return nil, err + } + if l != nil { + layers.AddFeature(l) + } + } + return layers, nil +} + +func (e *GeoJSONEncoder) Encode(layer merging.SealedLayer) error { + var data []byte + if i, ok := layer.(*merging.SealedLayerItem); ok { + geo, err := e.encodeSingleLayer(i) + if err != nil { + return err + } + if geo != nil { + data, err = geo.MarshalJSON() + if err != nil { + return err + } + } + } else if g, ok := layer.(*merging.SealedLayerGroup); ok { + fc, err := e.encodeLayerGroup(g) + if err != nil { + return err + } + if fc != nil { + data, err = fc.MarshalJSON() + if err != nil { + return err + } + } + } + if len(data) > 0 { + _, err := e.writer.Write(data) + if err != nil { + return err + } + } + return nil +} diff --git a/pkg/layer/encoding/geojson_test.go b/pkg/layer/encoding/geojson_test.go new file mode 100644 index 000000000..e2c1100f7 --- /dev/null +++ b/pkg/layer/encoding/geojson_test.go @@ -0,0 +1,430 @@ +package encoding + +import ( + "bytes" + "io" + "testing" + + geojson "github.com/paulmach/go.geojson" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Encoder = (*GeoJSONEncoder)(nil) + +func TestPointEncodeGeoJSON(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("marker") + iid := id.MustPropertyItemID(id.New().String()) + v1 := property.LatLng{ + Lat: 4.4, + Lng: 53.4, + } + + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("location"), + Type: "latlng", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + + v2 := property.ValueTypeString + f2 := property.SealedField{ + ID: id.PropertySchemaFieldID("pointColor"), + Type: "string", + DatasetValue: nil, + PropertyValue: v2.ValueFromUnsafe("#7fff00ff"), + } + fl2 := []*property.SealedField{} + fl2 = append(fl2, &f2) + item2 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl2, + Groups: nil, + } + il = append(il, &item2) + v3 := property.ValueTypeNumber + f3 := property.SealedField{ + ID: id.PropertySchemaFieldID("height"), + Type: "number", + DatasetValue: nil, + PropertyValue: v3.ValueFromUnsafe(34), + } + fl3 := []*property.SealedField{} + fl3 = append(fl3, &f3) + item3 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl3, + Groups: nil, + } + il = append(il, &item3) + sp := property.Sealed{ + Original: &pid, + Items: il, + } + l := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Name: "test", + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + + reader, writer := io.Pipe() + en := NewGeoJSONEncoder(writer) + var err error + go func() { + defer func() { + _ = writer.Close() + }() + err = en.Encode(&l) + assert.NoError(t, err) + }() + + colorStr, _ := f2.PropertyValue.ValueString() + height, _ := f3.PropertyValue.ValueNumber() + expected := geojson.NewFeature(geojson.NewPointGeometry([]float64{v1.Lng, v1.Lat, height})) + expected.SetProperty("marker-color", colorStr) + expected.SetProperty("name", l.Name) + reader2, writer2 := io.Pipe() + var data []byte + data, err = expected.MarshalJSON() + go func() { + defer func() { + _ = writer2.Close() + }() + _, err = writer2.Write(data) + assert.NoError(t, err) + }() + + buf := new(bytes.Buffer) + _, err = buf.ReadFrom(reader) + assert.NoError(t, err) + s := buf.String() + buf2 := new(bytes.Buffer) + _, err = buf2.ReadFrom(reader2) + assert.NoError(t, err) + s2 := buf2.String() + assert.Equal(t, s2, s) +} + +func TestPolygonEncodeGeoJSON(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("polygon") + iid := id.MustPropertyItemID(id.New().String()) + vc := property.Coordinates{ + property.LatLngHeight{ + Lat: 3.4, + Lng: 5.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 45.4, + Lng: 2.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 34.66, + Lng: 654.34, + Height: 100, + }, + } + v1 := property.Polygon{vc} + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("polygon"), + Type: "polygon", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + v2 := property.ValueTypeString + f2 := property.SealedField{ + ID: id.PropertySchemaFieldID("fillColor"), + Type: "string", + DatasetValue: nil, + PropertyValue: v2.ValueFromUnsafe("#7c3b3b"), + } + fl2 := []*property.SealedField{} + fl2 = append(fl2, &f2) + item2 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl2, + Groups: nil, + } + il = append(il, &item2) + v3 := property.ValueTypeString + f3 := property.SealedField{ + ID: id.PropertySchemaFieldID("strokeColor"), + Type: "string", + DatasetValue: nil, + PropertyValue: v3.ValueFromUnsafe("#ff3343"), + } + fl3 := []*property.SealedField{} + fl3 = append(fl3, &f3) + item3 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl3, + Groups: nil, + } + il = append(il, &item3) + v4 := property.ValueTypeNumber + f4 := property.SealedField{ + ID: id.PropertySchemaFieldID("strokeWidth"), + Type: "number", + DatasetValue: nil, + PropertyValue: v4.ValueFromUnsafe(3), + } + fl4 := []*property.SealedField{} + fl4 = append(fl4, &f4) + item4 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl4, + Groups: nil, + } + il = append(il, &item4) + sp := property.Sealed{ + Original: &pid, + Items: il, + } + l := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Name: "test", + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + + reader, writer := io.Pipe() + en := NewGeoJSONEncoder(writer) + var err error + go func() { + defer func() { + _ = writer.Close() + }() + err = en.Encode(&l) + assert.NoError(t, err) + }() + + fillStr, _ := f2.PropertyValue.ValueString() + strokeStr, _ := f3.PropertyValue.ValueString() + width, _ := f4.PropertyValue.ValueNumber() + expected := geojson.NewFeature(geojson.NewPolygonGeometry([][][]float64{{{5.34, 3.4, 100}, {2.34, 45.4, 100}, {654.34, 34.66, 100}}})) + expected.SetProperty("name", l.Name) + expected.SetProperty("fill", fillStr) + expected.SetProperty("stroke", strokeStr) + expected.SetProperty("stroke-width", width) + reader2, writer2 := io.Pipe() + var data []byte + data, err = expected.MarshalJSON() + go func() { + defer func() { + _ = writer2.Close() + }() + _, err = writer2.Write(data) + }() + assert.NoError(t, err) + + buf := new(bytes.Buffer) + _, err = buf.ReadFrom(reader) + assert.NoError(t, err) + s := buf.String() + buf2 := new(bytes.Buffer) + _, err = buf2.ReadFrom(reader2) + assert.NoError(t, err) + s2 := buf2.String() + assert.Equal(t, s2, s) +} + +func TestPolylineEncodeGeoJSON(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("polyline") + iid := id.MustPropertyItemID(id.New().String()) + v1 := property.Coordinates{ + property.LatLngHeight{ + Lat: 3.4, + Lng: 5.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 45.4, + Lng: 2.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 34.66, + Lng: 654.34, + Height: 100, + }, + } + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("coordinates"), + Type: "coordinates", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + + v2 := property.ValueTypeString + f2 := property.SealedField{ + ID: id.PropertySchemaFieldID("strokeColor"), + Type: "string", + DatasetValue: nil, + PropertyValue: v2.ValueFromUnsafe("#ff3343"), + } + fl2 := []*property.SealedField{} + fl2 = append(fl2, &f2) + item2 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl2, + Groups: nil, + } + il = append(il, &item2) + v3 := property.ValueTypeNumber + f3 := property.SealedField{ + ID: id.PropertySchemaFieldID("strokeWidth"), + Type: "number", + DatasetValue: nil, + PropertyValue: v3.ValueFromUnsafe(3), + } + fl3 := []*property.SealedField{} + fl3 = append(fl3, &f3) + item3 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl3, + Groups: nil, + } + il = append(il, &item3) + sp := property.Sealed{ + Original: &pid, + Items: il, + } + l := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Name: "test", + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + + reader, writer := io.Pipe() + en := NewGeoJSONEncoder(writer) + var err error + go func() { + defer func() { + _ = writer.Close() + }() + err = en.Encode(&l) + assert.NoError(t, err) + }() + + strokeStr, _ := f2.PropertyValue.ValueString() + width, _ := f3.PropertyValue.ValueNumber() + expected := geojson.NewFeature(geojson.NewLineStringGeometry([][]float64{{5.34, 3.4, 100}, {2.34, 45.4, 100}, {654.34, 34.66, 100}})) + expected.SetProperty("name", l.Name) + expected.SetProperty("stroke", strokeStr) + expected.SetProperty("stroke-width", width) + reader2, writer2 := io.Pipe() + var data []byte + data, err = expected.MarshalJSON() + go func() { + defer func() { + _ = writer2.Close() + }() + _, err = writer2.Write(data) + assert.NoError(t, err) + }() + + buf := new(bytes.Buffer) + _, err = buf.ReadFrom(reader) + assert.NoError(t, err) + s := buf.String() + buf2 := new(bytes.Buffer) + _, err = buf2.ReadFrom(reader2) + assert.NoError(t, err) + s2 := buf2.String() + assert.Equal(t, s2, s) +} diff --git a/pkg/layer/encoding/kml.go b/pkg/layer/encoding/kml.go new file mode 100644 index 000000000..803af9166 --- /dev/null +++ b/pkg/layer/encoding/kml.go @@ -0,0 +1,420 @@ +package encoding + +import ( + "errors" + "image/color" + "io" + "math/rand" + "net/url" + "strings" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + kml "github.com/twpayne/go-kml" +) + +type KMLEncoder struct { + writer io.Writer + styles []*kml.SharedElement +} + +func NewKMLEncoder(w io.Writer) *KMLEncoder { + return &KMLEncoder{ + writer: w, + } +} + +// generates a composite string of layer name and id to be used as style tag id +func (e *KMLEncoder) generateStyleId(id string, name string) (string, error) { + if len(id) > 0 { + subid := id[len(id)-5:] + trimmedName := "" + if len(name) > 0 { + trimmedName = strings.Join(strings.Fields(name), "") + "_" + + } + b := make([]byte, 8) + if _, err := rand.Read(b); err != nil { + return "", err + } + return trimmedName + subid, nil + } + + return "", nil +} + +func (e *KMLEncoder) getName(str string) *kml.SimpleElement { + return kml.Name(str) +} + +// encodes style features and return style kml element and used id +func (e *KMLEncoder) encodePointStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string, error) { + var image *url.URL + var styleId string + var err error + var ok bool + var imageSize float64 + var pointColor color.Color + if li.Property.Field("image") != nil { + if li.Property.Field("image").PropertyValue != nil { + image, ok = li.Property.Field("image").PropertyValue.ValueURL() + if !ok { + return nil, "", errors.New("invalid value type") + } + if len(image.String()) == 0 { + return nil, "", errors.New("empty URL") + } + } + } + if li.Property.Field("imageSize") != nil { + imageSize, ok = li.Property.Field("imageSize").PropertyValue.ValueNumber() + if !ok { + return nil, "", errors.New("invalid value type") + } + } + if li.Property.Field("pointColor") != nil { + colorStr, ok := li.Property.Field("pointColor").PropertyValue.ValueString() + if !ok { + return nil, "", errors.New("invalid value type") + } + pointColor, err = getColor(colorStr) + if err != nil { + return nil, "", err + } + } + styleId, err = e.generateStyleId(li.Original.String(), li.Name) + if err != nil { + return nil, "", err + } + if imageSize != 0 || pointColor != nil || (image != nil && len(image.String()) > 0) { + iconStyle := kml.IconStyle() + if imageSize != 0 { + iconStyle.Add(kml.Scale(imageSize)) + } + if pointColor != nil { + iconStyle.Add(kml.Color(pointColor)) + } + if image != nil { + iconStyle.Add(kml.Icon( + kml.Href(image.String()))) + } + return kml.SharedStyle(styleId, iconStyle), styleId, nil + } + return nil, "", nil +} + +func (e *KMLEncoder) encodePolygonStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string, error) { + var styleId string + var fill, stroke bool + var fillColor, strokeColor color.Color + var strokeWidth float64 + var err error + var ok bool + if li.Property.Field("fill") != nil { + fill, ok = li.Property.Field("fill").PropertyValue.ValueBool() + if !ok { + return nil, "", errors.New("invalid value type") + } + } + if li.Property.Field("stroke") != nil { + stroke, ok = li.Property.Field("stroke").PropertyValue.ValueBool() + if !ok { + return nil, "", errors.New("invalid value type") + } + } + + if li.Property.Field("fillColor") != nil { + colorStr, ok := li.Property.Field("fillColor").PropertyValue.ValueString() + if !ok { + return nil, "", errors.New("invalid value type") + } + fillColor, err = getColor(colorStr) + if err != nil { + return nil, "", err + } + } + if li.Property.Field("strokeColor") != nil { + colorStr, ok := li.Property.Field("strokeColor").PropertyValue.ValueString() + if !ok { + return nil, "", errors.New("invalid value type") + } + strokeColor, err = getColor(colorStr) + if err != nil { + return nil, "", err + } + } + if li.Property.Field("strokeWidth") != nil { + strokeWidth, ok = li.Property.Field("strokeWidth").PropertyValue.ValueNumber() + if !ok { + return nil, "", errors.New("invalid value type") + } + } + styleId, err = e.generateStyleId(li.Original.String(), li.Name) + if err != nil { + return nil, "", err + } + polyStyle := kml.PolyStyle() + lineStyle := kml.LineStyle() + if fill || fillColor != nil { + if fill { + polyStyle.Add(kml.Fill(fill)) + } + if fillColor != nil { + polyStyle.Add(kml.Color(fillColor)) + } + } + if stroke || strokeColor != nil || strokeWidth != 0 { + if stroke { + lineStyle.Add(kml.Outline(stroke)) + } + if strokeColor != nil { + lineStyle.Add(kml.Color(strokeColor)) + } + if strokeWidth != 0 { + lineStyle.Add(kml.Width(strokeWidth)) + } + } + style := kml.SharedStyle(styleId) + if polyStyle != nil { + style.Add(polyStyle) + } + if lineStyle != nil { + style.Add(lineStyle) + } + return style, styleId, nil +} + +func (e *KMLEncoder) encodePolylineStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string, error) { + var styleId string + var strokeColor color.Color + var strokeWidth float64 + var err error + var ok bool + + if li.Property.Field("strokeColor") != nil { + colorStr, ok := li.Property.Field("strokeColor").PropertyValue.ValueString() + if !ok { + return nil, "", errors.New("invalid value type") + } + strokeColor, err = getColor(colorStr) + if err != nil { + return nil, "", err + } + } + if li.Property.Field("strokeWidth") != nil { + strokeWidth, ok = li.Property.Field("strokeWidth").PropertyValue.ValueNumber() + if !ok { + return nil, "", errors.New("invalid value type") + } + } + styleId, err = e.generateStyleId(li.Original.String(), li.Name) + if err != nil { + return nil, "", err + } + lineStyle := kml.LineStyle() + if strokeColor != nil || strokeWidth != 0 { + if strokeColor != nil { + lineStyle.Add(kml.Color(strokeColor)) + } + if strokeWidth != 0 { + lineStyle.Add(kml.Width(strokeWidth)) + } + } + style := kml.SharedStyle(styleId) + if lineStyle != nil { + style.Add(lineStyle) + } + return style, styleId, nil +} + +func (e *KMLEncoder) encodeStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string, error) { + switch li.ExtensionID.String() { + case "marker": + return e.encodePointStyle(li) + case "polygon": + return e.encodePolygonStyle(li) + case "polyline": + return e.encodePolylineStyle(li) + } + return nil, "", nil +} + +// encodes non style layer features +func (e *KMLEncoder) encodeLayerTag(li *merging.SealedLayerItem) (*kml.CompoundElement, error) { + if li.PluginID == nil || !id.OfficialPluginID.Equal(*li.PluginID) { + return nil, nil + } + + var layerTag *kml.CompoundElement + var ok bool + name := e.getName(li.Name) + switch li.ExtensionID.String() { + case "marker": + layerTag = kml.Point() + latlng := property.LatLng{} + var height float64 + if li.Property.Field("location") != nil { + latlng, ok = li.Property.Field("location").PropertyValue.ValueLatLng() + if !ok { + return nil, errors.New("invalid value type") + } + } + if li.Property.Field("height") != nil { + height, ok = li.Property.Field("height").PropertyValue.ValueNumber() + if !ok { + return nil, errors.New("invalid value type") + } + } + layerTag = layerTag.Add( + kml.Coordinates( + kml.Coordinate{ + Lon: latlng.Lng, + Lat: latlng.Lat, + Alt: height, + })) + case "polygon": + layerTag = kml.Polygon() + polygon := property.Polygon{} + if li.Property.Field("polygon") != nil { + polygon, ok = li.Property.Field("polygon").PropertyValue.ValuePolygon() + if !ok { + return nil, errors.New("invalid value type") + } + } + // by default, first polygon coords set is for outer boundaries... the second is for inner + if len(polygon) > 0 { + var coords []kml.Coordinate + for _, c := range polygon[0] { + coords = append(coords, kml.Coordinate{ + Lon: c.Lng, + Lat: c.Lat, + Alt: c.Height, + }) + } + layerTag = layerTag.Add(kml.OuterBoundaryIs(kml.LinearRing(kml.Coordinates(coords...)))) + } + //encode inner boundaries + if len(polygon) == 2 { + var coords []kml.Coordinate + for _, c := range polygon[1] { + coords = append(coords, kml.Coordinate{ + Lon: c.Lng, + Lat: c.Lat, + Alt: c.Height, + }) + } + layerTag.Add(kml.InnerBoundaryIs(kml.LinearRing(kml.Coordinates(coords...)))) + } + case "polyline": + layerTag = kml.LineString() + polyline := property.Coordinates{} + if li.Property.Field("coordinates") != nil { + polyline, ok = li.Property.Field("coordinates").PropertyValue.ValueCoordinates() + if !ok { + return nil, errors.New("invalid value type") + } + } + if len(polyline) > 0 { + var coords []kml.Coordinate + for _, c := range polyline { + coords = append(coords, kml.Coordinate{ + Lon: c.Lng, + Lat: c.Lat, + Alt: c.Height, + }) + } + layerTag = layerTag.Add(kml.Coordinates(coords...)) + } + } + placemark := kml.Placemark() + if len(li.Name) != 0 { + placemark.Add(name) + } + placemark = placemark.Add(layerTag) + + return placemark, nil +} + +func (e *KMLEncoder) encodeLayerGroup(li *merging.SealedLayerGroup, parent *kml.CompoundElement) (*kml.CompoundElement, error) { + name := e.getName(li.Name) + if len(li.Name) != 0 { + parent.Add(name) + } + + for _, ch := range li.Children { + if g, ok := ch.(*merging.SealedLayerGroup); ok { + folder := kml.Folder() + + folder, err := e.encodeLayerGroup(g, folder) + if err != nil { + return nil, err + } + + parent.Add(folder) + } else if i, ok := ch.(*merging.SealedLayerItem); ok { + placemark, err := e.encodeLayerTag(i) + if err != nil { + return nil, err + } + if placemark == nil { + return nil, nil + } + + style, styleId, err := e.encodeStyle(i) + if err != nil { + return nil, err + } + if style != nil { + e.styles = append(e.styles, style) + placemark.Add(kml.StyleURL("#" + styleId)) + } + + parent = parent.Add(placemark) + } + } + + return parent, nil +} + +func (e *KMLEncoder) Encode(layer merging.SealedLayer) error { + var res *kml.CompoundElement + var err error + + if i, ok := layer.(*merging.SealedLayerItem); ok { + style, styleId, err := e.encodeStyle(i) + if err != nil { + return err + } + l, err := e.encodeLayerTag(i) + if err != nil { + return err + } + if style != nil { + res = kml.KML(style) + res = res.Add(l) + l.Add(kml.StyleURL("#" + styleId)) + } else { + res = kml.KML(l) + } + } else if g, ok := layer.(*merging.SealedLayerGroup); ok { + doc := kml.Document() + + doc, err := e.encodeLayerGroup(g, doc) + if err != nil { + return err + } + if len(e.styles) > 0 { + for _, s := range e.styles { + doc.Add(s) + } + } + res = kml.KML(doc) + } + + err = res.WriteIndent(e.writer, "", " ") + if err != nil { + return err + } + return nil +} diff --git a/pkg/layer/encoding/kml_test.go b/pkg/layer/encoding/kml_test.go new file mode 100644 index 000000000..5cc46b0c6 --- /dev/null +++ b/pkg/layer/encoding/kml_test.go @@ -0,0 +1,507 @@ +package encoding + +import ( + "bytes" + "io" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" + "github.com/twpayne/go-kml" +) + +var _ Encoder = (*KMLEncoder)(nil) + +func TestEncodeKMLMarker(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("marker") + iid := id.MustPropertyItemID(id.New().String()) + v1 := property.LatLng{ + Lat: 4.4, + Lng: 53.4, + } + + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("location"), + Type: "latlng", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + v2 := property.ValueTypeNumber + f2 := property.SealedField{ + ID: id.PropertySchemaFieldID("imageSize"), + Type: "number", + DatasetValue: nil, + PropertyValue: v2.ValueFromUnsafe(4), + } + fl2 := []*property.SealedField{} + fl2 = append(fl2, &f2) + item2 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl2, + Groups: nil, + } + il = append(il, &item2) + v3 := property.ValueTypeURL + f3 := property.SealedField{ + ID: id.PropertySchemaFieldID("image"), + Type: "url", + DatasetValue: nil, + PropertyValue: v3.ValueFromUnsafe("http://maps.google.com/mapfiles/kml/pal4/icon28.png"), + } + fl3 := []*property.SealedField{} + fl3 = append(fl3, &f3) + item3 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl3, + Groups: nil, + } + il = append(il, &item3) + v4 := property.ValueTypeString + f4 := property.SealedField{ + ID: id.PropertySchemaFieldID("pointColor"), + Type: "string", + DatasetValue: nil, + PropertyValue: v4.ValueFromUnsafe("#7fff00ff"), + } + fl4 := []*property.SealedField{} + fl4 = append(fl4, &f4) + item4 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl4, + Groups: nil, + } + il = append(il, &item4) + sp := property.Sealed{ + Original: &pid, + Items: il, + } + l := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Name: "test", + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + + reader, writer := io.Pipe() + en := NewKMLEncoder(writer) + var err error + go func() { + defer func() { + _ = writer.Close() + }() + err = en.Encode(&l) + assert.NoError(t, err) + }() + + colorStr, _ := f4.PropertyValue.ValueString() + sizeFloat, _ := f2.PropertyValue.ValueNumber() + urlValue, _ := f3.PropertyValue.ValueURL() + b, _ := getColor(colorStr) + stid, err := en.generateStyleId(l.Original.String(), l.Name) + assert.NoError(t, err) + expected := kml.KML(kml.SharedStyle(stid, kml.IconStyle( + kml.Scale(sizeFloat), + kml.Color(b), + kml.Icon( + kml.Href(urlValue.String()))))) + expected = expected.Add(kml.Placemark(kml.Name("test"), + kml.Point(kml.Coordinates(kml.Coordinate{ + Lon: v1.Lng, + Lat: v1.Lat, + })), + kml.StyleURL("#"+stid))) + reader2, writer2 := io.Pipe() + go func() { + defer func() { + _ = writer2.Close() + }() + err = expected.WriteIndent(writer2, "", " ") + assert.NoError(t, err) + }() + + buf := new(bytes.Buffer) + _, err = buf.ReadFrom(reader) + assert.NoError(t, err) + s := buf.String() + buf2 := new(bytes.Buffer) + _, err = buf2.ReadFrom(reader2) + assert.NoError(t, err) + + s2 := buf2.String() + assert.Equal(t, s2, s) +} +func TestEncodeKMLPolygon(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("polygon") + iid := id.MustPropertyItemID(id.New().String()) + vc := property.Coordinates{ + property.LatLngHeight{ + Lat: 3.4, + Lng: 5.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 45.4, + Lng: 2.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 34.66, + Lng: 654.34, + Height: 100, + }, + } + v1 := property.Polygon{vc} + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("polygon"), + Type: "polygon", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + v2 := property.ValueTypeBool + f2 := property.SealedField{ + ID: id.PropertySchemaFieldID("fill"), + Type: "bool", + DatasetValue: nil, + PropertyValue: v2.ValueFromUnsafe(true), + } + fl2 := []*property.SealedField{} + fl2 = append(fl2, &f2) + item2 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl2, + Groups: nil, + } + il = append(il, &item2) + v3 := property.ValueTypeString + f3 := property.SealedField{ + ID: id.PropertySchemaFieldID("fillColor"), + Type: "string", + DatasetValue: nil, + PropertyValue: v3.ValueFromUnsafe("#ff334353"), + } + fl3 := []*property.SealedField{} + fl3 = append(fl3, &f3) + item3 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl3, + Groups: nil, + } + il = append(il, &item3) + v4 := property.ValueTypeBool + f4 := property.SealedField{ + ID: id.PropertySchemaFieldID("stroke"), + Type: "bool", + DatasetValue: nil, + PropertyValue: v4.ValueFromUnsafe(true), + } + fl4 := []*property.SealedField{} + fl4 = append(fl4, &f4) + item4 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl4, + Groups: nil, + } + il = append(il, &item4) + v5 := property.ValueTypeString + f5 := property.SealedField{ + ID: id.PropertySchemaFieldID("strokeColor"), + Type: "string", + DatasetValue: nil, + PropertyValue: v5.ValueFromUnsafe("#ff554555"), + } + fl5 := []*property.SealedField{} + fl5 = append(fl5, &f5) + item5 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl5, + Groups: nil, + } + il = append(il, &item5) + v6 := property.ValueTypeNumber + f6 := property.SealedField{ + ID: id.PropertySchemaFieldID("strokeWidth"), + Type: "number", + DatasetValue: nil, + PropertyValue: v6.ValueFromUnsafe(3), + } + fl6 := []*property.SealedField{} + fl6 = append(fl6, &f6) + item6 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl6, + Groups: nil, + } + il = append(il, &item6) + sp := property.Sealed{ + Original: &pid, + Items: il, + } + l := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Name: "test", + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + + reader, writer := io.Pipe() + en := NewKMLEncoder(writer) + var err error + go func() { + defer func() { + _ = writer.Close() + }() + err = en.Encode(&l) + }() + fillColorStr, _ := f3.PropertyValue.ValueString() + strokeColorStr, _ := f5.PropertyValue.ValueString() + b1, _ := getColor(fillColorStr) + b2, _ := getColor(strokeColorStr) + stid, err := en.generateStyleId(l.Original.String(), l.Name) + assert.NoError(t, err) + expected := kml.KML(kml.SharedStyle(stid, + kml.PolyStyle( + kml.Fill(true), + kml.Color(b1), + ), + kml.LineStyle( + kml.Outline(true), + kml.Color(b2), + kml.Width(3), + ))) + expected = expected.Add(kml.Placemark(kml.Name("test"), + kml.Polygon(kml.OuterBoundaryIs(kml.LinearRing(kml.Coordinates([]kml.Coordinate{ + {Lon: 5.34, Lat: 3.4, Alt: 100}, + {Lon: 2.34, Lat: 45.4, Alt: 100}, + {Lon: 654.34, Lat: 34.66, Alt: 100}, + }...)))), + kml.StyleURL("#"+stid))) + reader2, writer2 := io.Pipe() + go func() { + defer func() { + _ = writer2.Close() + }() + err = expected.WriteIndent(writer2, "", " ") + }() + assert.NoError(t, err) + buf := new(bytes.Buffer) + _, err = buf.ReadFrom(reader) + assert.NoError(t, err) + s := buf.String() + buf2 := new(bytes.Buffer) + _, err = buf2.ReadFrom(reader2) + assert.NoError(t, err) + s2 := buf2.String() + assert.Equal(t, s2, s) +} +func TestEncodeKMLPolyline(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("polyline") + iid := id.MustPropertyItemID(id.New().String()) + v1 := property.Coordinates{ + property.LatLngHeight{ + Lat: 3.4, + Lng: 5.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 45.4, + Lng: 2.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 34.66, + Lng: 654.34, + Height: 100, + }, + } + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("coordinates"), + Type: "coordinates", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + + v2 := property.ValueTypeString + f2 := property.SealedField{ + ID: id.PropertySchemaFieldID("strokeColor"), + Type: "string", + DatasetValue: nil, + PropertyValue: v2.ValueFromUnsafe("#ff224222"), + } + fl2 := []*property.SealedField{} + fl2 = append(fl2, &f2) + item2 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl2, + Groups: nil, + } + il = append(il, &item2) + v3 := property.ValueTypeNumber + f3 := property.SealedField{ + ID: id.PropertySchemaFieldID("strokeWidth"), + Type: "number", + DatasetValue: nil, + PropertyValue: v3.ValueFromUnsafe(3), + } + fl3 := []*property.SealedField{} + fl3 = append(fl3, &f3) + item3 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl3, + Groups: nil, + } + il = append(il, &item3) + sp := property.Sealed{ + Original: &pid, + Items: il, + } + l := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Name: "test", + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + + reader, writer := io.Pipe() + en := NewKMLEncoder(writer) + var err error + go func() { + defer func() { + _ = writer.Close() + }() + err = en.Encode(&l) + }() + strokeColorStr, _ := f2.PropertyValue.ValueString() + b1, _ := getColor(strokeColorStr) + stid, err := en.generateStyleId(l.Original.String(), l.Name) + assert.NoError(t, err) + expected := kml.KML(kml.SharedStyle(stid, + kml.LineStyle( + kml.Color(b1), + kml.Width(3), + ))) + expected = expected.Add(kml.Placemark(kml.Name("test"), + kml.LineString(kml.Coordinates([]kml.Coordinate{ + {Lon: 5.34, Lat: 3.4, Alt: 100}, + {Lon: 2.34, Lat: 45.4, Alt: 100}, + {Lon: 654.34, Lat: 34.66, Alt: 100}, + }...)), + kml.StyleURL("#"+stid))) + reader2, writer2 := io.Pipe() + go func() { + defer func() { + _ = writer2.Close() + }() + err = expected.WriteIndent(writer2, "", " ") + }() + assert.NoError(t, err) + buf := new(bytes.Buffer) + _, err = buf.ReadFrom(reader) + assert.NoError(t, err) + s := buf.String() + buf2 := new(bytes.Buffer) + _, err = buf2.ReadFrom(reader2) + assert.NoError(t, err) + s2 := buf2.String() + assert.Equal(t, s2, s) +} diff --git a/pkg/layer/encoding/shp.go b/pkg/layer/encoding/shp.go new file mode 100644 index 000000000..ce866c2ac --- /dev/null +++ b/pkg/layer/encoding/shp.go @@ -0,0 +1,204 @@ +package encoding + +import ( + "errors" + "io" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + shp "github.com/reearth/reearth-backend/pkg/shp" + wsc "github.com/reearth/reearth-backend/pkg/writer" +) + +type SHPEncoder struct { + writer io.Writer +} + +func NewSHPEncoder(w io.Writer) *SHPEncoder { + return &SHPEncoder{ + writer: w, + } +} + +func coordsToPoints(coords property.Coordinates) []shp.Point { + var res []shp.Point + for _, l := range coords { + res = append(res, shp.Point{ + X: l.Lng, + Y: l.Lat, + }) + } + return res +} +func polygonToPoints(poly property.Polygon) ([]shp.Point, []int32) { + var res []shp.Point + parts := []int32{0} + for i, c := range poly { + partPoints := coordsToPoints(c) + res = append(res, partPoints...) + if i > 0 { + parts = append(parts, int32(len(partPoints)-1)) + + } + } + return res, parts +} +func getMaxMinPoints(points []shp.Point) (shp.Point, shp.Point) { + var max, min shp.Point + max = points[0] + min = points[0] + for _, p := range points { + if p.X > max.X && p.Y > max.Y { + max = p + } + if p.X < min.X && p.Y < min.Y { + min = p + } + } + return max, min +} +func polygonToSHP(poly property.Polygon) *shp.Polygon { + points, parts := polygonToPoints(poly) + max, min := getMaxMinPoints(points) + res := shp.Polygon{ + Box: shp.Box{ + MinX: min.X, + MinY: min.Y, + MaxX: max.X, + MaxY: max.Y, + }, + NumParts: int32(len(poly)), + NumPoints: int32(len(points)), + Parts: parts, + Points: points, + } + return &res +} +func (e *SHPEncoder) encodeLayer(li *merging.SealedLayerItem) (shp.Shape, shp.ShapeType, error) { + if li.PluginID == nil || !id.OfficialPluginID.Equal(*li.PluginID) { + return nil, 0, nil + } + var shapeType shp.ShapeType + var ok bool + var sh shp.Shape + switch li.ExtensionID.String() { + case "marker": + shapeType = shp.POINT + latlng := property.LatLng{} + if li.Property.Field("location") != nil { + latlng, ok = li.Property.Field("location").PropertyValue.ValueLatLng() + if !ok { + return nil, 0, errors.New("invalid value type") + } + sh = &shp.Point{ + X: latlng.Lng, + Y: latlng.Lat, + } + + } + case "polygon": + shapeType = shp.POLYGON + polygon := property.Polygon{} + if li.Property.Field("polygon") != nil { + polygon, ok = li.Property.Field("polygon").PropertyValue.ValuePolygon() + if !ok { + return nil, 0, errors.New("invalid value type") + } + } + if len(polygon) > 0 { + shpPoly := polygonToSHP(polygon) + sh = shpPoly + } + + case "polyline": + shapeType = shp.POLYLINE + polyline := property.Coordinates{} + if li.Property.Field("coordinates") != nil { + polyline, ok = li.Property.Field("coordinates").PropertyValue.ValueCoordinates() + if !ok { + return nil, 0, errors.New("invalid value type") + } + } + if len(polyline) > 0 { + points := coordsToPoints(polyline) + sh = &shp.PolyLine{ + Box: shp.Box{MinX: 102, MinY: 0, MaxX: 104, MaxY: 0}, + NumParts: 1, + NumPoints: int32(len(points)), + Parts: []int32{0}, + Points: points, + } + } + } + return sh, shapeType, nil +} + +func (e *SHPEncoder) encodeLayerGroup(w *wsc.WriterSeeker, li *merging.SealedLayerGroup, shape *shp.Writer) error { + for _, ch := range li.Children { + if g, ok := ch.(*merging.SealedLayerGroup); ok { + err := e.encodeLayerGroup(w, g, shape) + if err != nil { + return err + } + } else if i, ok := ch.(*merging.SealedLayerItem); ok { + l, t, err := e.encodeLayer(i) + if err != nil { + return err + } + if shape == nil { + shape, err = shp.CreateFrom(w, t) + if err != nil { + return err + } + defer func() { + err = shape.Close() + + }() + if err != nil { + return err + } + } + _, err = shape.Write(l) + if err != nil { + return err + } + } + } + return nil +} +func (e *SHPEncoder) Encode(layer merging.SealedLayer) error { + var err error + var w wsc.WriterSeeker + if i, ok := layer.(*merging.SealedLayerItem); ok { + l, t, err := e.encodeLayer(i) + if err != nil { + return err + } + shape, err := shp.CreateFrom(&w, t) + if err != nil { + return err + } + defer func() { + err = shape.Close() + + }() + if err != nil { + return err + } + _, err = shape.Write(l) + if err != nil { + return err + } + } else if g, ok := layer.(*merging.SealedLayerGroup); ok { + err := e.encodeLayerGroup(&w, g, nil) + if err != nil { + return err + } + } + _, err = w.WriteTo(e.writer) + if err != nil { + return err + } + return nil +} diff --git a/pkg/layer/encoding/shp_test.go b/pkg/layer/encoding/shp_test.go new file mode 100644 index 000000000..87fb1c610 --- /dev/null +++ b/pkg/layer/encoding/shp_test.go @@ -0,0 +1,298 @@ +package encoding + +import ( + "io" + "os" + "testing" + + "github.com/jonas-p/go-shp" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +var _ Encoder = (*SHPEncoder)(nil) + +func TestEncodeSHPMarker(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("marker") + iid := id.MustPropertyItemID(id.New().String()) + v1 := property.LatLng{ + Lat: 4.4, + Lng: 53.4, + } + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("location"), + Type: "latlng", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + + sp := property.Sealed{ + Original: &pid, + Items: il, + } + l := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + + reader, writer := io.Pipe() + en := NewSHPEncoder(writer) + var err error + go func() { + defer func() { + _ = writer.Close() + }() + err = en.Encode(&l) + assert.NoError(t, err) + }() + tmpFile, err := os.CreateTemp(os.TempDir(), "*.shp") + assert.NoError(t, err) + defer func() { + err := os.Remove(tmpFile.Name()) + assert.NoError(t, err) + }() + b, err := io.ReadAll(reader) + assert.NoError(t, err) + _, err = tmpFile.Write(b) + assert.NoError(t, err) + err = tmpFile.Close() + assert.NoError(t, err) + shape, err := shp.Open(tmpFile.Name()) + assert.NoError(t, err) + defer func() { + err := shape.Close() + assert.NoError(t, err) + }() + var expected *shp.Point + var ok bool + for shape.Next() { + _, p := shape.Shape() + expected, ok = p.(*shp.Point) + } + assert.Equal(t, true, ok) + assert.Equal(t, expected, &shp.Point{ + X: 53.4, + Y: 4.4, + }) +} +func TestEncodeSHPPolygon(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("polygon") + iid := id.MustPropertyItemID(id.New().String()) + vc := property.Coordinates{ + property.LatLngHeight{ + Lat: 3.4, + Lng: 5.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 45.4, + Lng: 2.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 34.66, + Lng: 654.34, + Height: 100, + }, + } + v1 := property.Polygon{vc} + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("polygon"), + Type: "polygon", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + sp := property.Sealed{ + Original: &pid, + Items: il, + } + l := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + + reader, writer := io.Pipe() + en := NewSHPEncoder(writer) + var err error + go func() { + defer func() { + _ = writer.Close() + }() + err = en.Encode(&l) + }() + tmpFile, err := os.CreateTemp(os.TempDir(), "*.shp") + assert.NoError(t, err) + defer func() { + err := os.Remove(tmpFile.Name()) + assert.NoError(t, err) + }() + b, err := io.ReadAll(reader) + assert.NoError(t, err) + _, err = tmpFile.Write(b) + assert.NoError(t, err) + err = tmpFile.Close() + assert.NoError(t, err) + shape, err := shp.Open(tmpFile.Name()) + assert.NoError(t, err) + defer func() { + err := shape.Close() + assert.NoError(t, err) + }() + var expected *shp.Polygon + var ok bool + for shape.Next() { + _, p := shape.Shape() + expected, ok = p.(*shp.Polygon) + } + assert.Equal(t, true, ok) + assert.Equal(t, expected, &shp.Polygon{Box: shp.Box{MinX: 5.34, MinY: 3.4, MaxX: 654.34, MaxY: 34.66}, NumParts: 1, NumPoints: 3, Parts: []int32{0}, Points: []shp.Point{{X: 5.34, Y: 3.4}, {X: 2.34, Y: 45.4}, {X: 654.34, Y: 34.66}}}) +} + +func TestEncodeSHPPolyline(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("polyline") + iid := id.MustPropertyItemID(id.New().String()) + v1 := property.Coordinates{ + property.LatLngHeight{ + Lat: 3.4, + Lng: 5.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 45.4, + Lng: 2.34, + Height: 100, + }, property.LatLngHeight{ + Lat: 34.66, + Lng: 654.34, + Height: 100, + }, + } + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("coordinates"), + Type: "coordinates", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + sp := property.Sealed{ + Original: &pid, + Items: il, + } + l := merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Name: "test", + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + + reader, writer := io.Pipe() + en := NewSHPEncoder(writer) + var err error + go func() { + defer func() { + _ = writer.Close() + }() + err = en.Encode(&l) + }() + tmpFile, err := os.CreateTemp(os.TempDir(), "*.shp") + assert.NoError(t, err) + defer func() { + err := os.Remove(tmpFile.Name()) + assert.NoError(t, err) + + }() + b, err := io.ReadAll(reader) + assert.NoError(t, err) + _, err = tmpFile.Write(b) + assert.NoError(t, err) + err = tmpFile.Close() + assert.NoError(t, err) + shape, err := shp.Open(tmpFile.Name()) + assert.NoError(t, err) + defer func() { + err := shape.Close() + assert.NoError(t, err) + }() + var expected *shp.PolyLine + var ok bool + for shape.Next() { + _, p := shape.Shape() + expected, ok = p.(*shp.PolyLine) + } + assert.Equal(t, true, ok) + assert.Equal(t, expected, &shp.PolyLine{Box: shp.Box{MinX: 102, MinY: 0, MaxX: 104, MaxY: 0}, NumParts: 1, NumPoints: 3, Parts: []int32{0}, Points: []shp.Point{{X: 5.34, Y: 3.4}, {X: 2.34, Y: 45.4}, {X: 654.34, Y: 34.66}}}) +} diff --git a/pkg/layer/group.go b/pkg/layer/group.go new file mode 100644 index 000000000..ad37499dd --- /dev/null +++ b/pkg/layer/group.go @@ -0,0 +1,182 @@ +package layer + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Group struct { + layerBase + layers *IDList + linkedDatasetSchema *id.DatasetSchemaID + root bool +} + +func (l *Group) ID() id.LayerID { + return l.layerBase.ID() +} + +func (l *Group) IDRef() *id.LayerID { + if l == nil { + return nil + } + return l.layerBase.IDRef() +} + +func (l *Group) Name() string { + return l.layerBase.Name() +} + +func (l *Group) Plugin() *id.PluginID { + if l == nil { + return nil + } + return l.layerBase.Plugin() +} + +func (l *Group) Extension() *id.PluginExtensionID { + if l == nil { + return nil + } + return l.layerBase.Extension() +} + +func (l *Group) UsesPlugin() bool { + return l.layerBase.UsesPlugin() +} + +func (l *Group) Property() *id.PropertyID { + if l == nil { + return nil + } + return l.layerBase.Property() +} + +func (l *Group) Infobox() *Infobox { + if l == nil { + return nil + } + return l.layerBase.Infobox() +} + +func (l *Group) IsVisible() bool { + if l == nil { + return false + } + return l.layerBase.IsVisible() +} + +func (l *Group) Rename(name string) { + if l == nil { + return + } + l.layerBase.Rename(name) +} + +func (l *Group) SetInfobox(infobox *Infobox) { + if l == nil { + return + } + l.layerBase.SetInfobox(infobox) +} + +func (l *Group) SetVisible(visible bool) { + if l == nil { + return + } + l.layerBase.SetVisible(visible) +} + +func (l *Group) SetPlugin(plugin *id.PluginID) { + if l == nil { + return + } + l.layerBase.SetPlugin(plugin) +} + +func (l *Group) IsLinked() bool { + if l == nil { + return false + } + return l.linkedDatasetSchema != nil +} + +func (l *Group) LinkedDatasetSchema() *id.DatasetSchemaID { + if l == nil || l.linkedDatasetSchema == nil { + return nil + } + id := *l.linkedDatasetSchema + return &id +} + +func (l *Group) Link(ds id.DatasetSchemaID) { + if l == nil { + return + } + ds2 := ds + l.linkedDatasetSchema = &ds2 +} + +func (l *Group) Unlink() { + if l == nil { + return + } + l.linkedDatasetSchema = nil +} + +func (l *Group) Layers() *IDList { + if l == nil { + return nil + } + if l.layers == nil { + l.layers = NewIDList(nil) + } + return l.layers +} + +func (l *Group) MoveLayerFrom(id id.LayerID, index int, fromLayerGroup *Group) { + if l == nil { + return + } + + if fromLayerGroup == nil || fromLayerGroup.id == l.id { + l.layers.MoveLayer(id, index) + return + } + + fromLayerGroup.layers.RemoveLayer(id) + + if l.layers == nil { + l.layers = NewIDList(nil) + } + l.layers.AddLayer(id, index) +} + +func (l *Group) LayerRef() *Layer { + if l == nil { + return nil + } + var layer Layer = l + return &layer +} + +func (l *Group) IsRoot() bool { + if l == nil { + return false + } + return l.root +} + +func (l *Group) Properties() []id.PropertyID { + if l == nil { + return nil + } + return l.layerBase.Properties() +} + +func (l *Group) ValidateProperties(pm property.Map) error { + if l == nil { + return nil + } + return l.layerBase.ValidateProperties(pm) +} diff --git a/pkg/layer/group_builder.go b/pkg/layer/group_builder.go new file mode 100644 index 000000000..71fb7a281 --- /dev/null +++ b/pkg/layer/group_builder.go @@ -0,0 +1,112 @@ +package layer + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +func GroupFromLayer(l Layer) *Group { + li, ok := l.(*Group) + if !ok { + return nil + } + return li +} + +func GroupFromLayerRef(l *Layer) *Group { + if l == nil { + return nil + } + li, ok := (*l).(*Group) + if !ok { + return nil + } + return li +} + +type GroupBuilder struct { + l *Group +} + +func NewGroup() *GroupBuilder { + return &GroupBuilder{l: &Group{layerBase: layerBase{visible: true}}} +} + +func (b *GroupBuilder) Build() (*Group, error) { + if id.ID(b.l.id).IsNil() { + return nil, id.ErrInvalidID + } + return b.l, nil +} + +func (b *GroupBuilder) MustBuild() *Group { + group, err := b.Build() + if err != nil { + panic(err) + } + return group +} + +func (b *GroupBuilder) base(layer layerBase) *GroupBuilder { + b.l.layerBase = layer + return b +} + +func (b *GroupBuilder) ID(id id.LayerID) *GroupBuilder { + b.l.id = id + return b +} + +func (b *GroupBuilder) NewID() *GroupBuilder { + b.l.id = id.NewLayerID() + return b +} + +func (b *GroupBuilder) Scene(s id.SceneID) *GroupBuilder { + b.l.scene = s + return b +} + +func (b *GroupBuilder) Root(root bool) *GroupBuilder { + b.l.root = root + return b +} + +func (b *GroupBuilder) Name(name string) *GroupBuilder { + b.l.name = name + return b +} + +func (b *GroupBuilder) IsVisible(visible bool) *GroupBuilder { + b.l.visible = visible + return b +} + +func (b *GroupBuilder) Plugin(plugin *id.PluginID) *GroupBuilder { + b.l.plugin = plugin.CopyRef() + return b +} + +func (b *GroupBuilder) Extension(extension *id.PluginExtensionID) *GroupBuilder { + b.l.extension = extension.CopyRef() + return b +} + +func (b *GroupBuilder) Property(property *id.PropertyID) *GroupBuilder { + b.l.property = property.CopyRef() + return b +} + +func (b *GroupBuilder) Layers(ll *IDList) *GroupBuilder { + b.l.layers = ll + return b +} + +func (b *GroupBuilder) Infobox(infobox *Infobox) *GroupBuilder { + b.l.infobox = infobox + return b +} + +func (b *GroupBuilder) LinkedDatasetSchema(linkedDatasetSchema *id.DatasetSchemaID) *GroupBuilder { + b.l.linkedDatasetSchema = linkedDatasetSchema.CopyRef() + return b +} diff --git a/pkg/layer/group_test.go b/pkg/layer/group_test.go new file mode 100644 index 000000000..94a7ebd56 --- /dev/null +++ b/pkg/layer/group_test.go @@ -0,0 +1,137 @@ +package layer + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +var _ Layer = &Group{} + +var l1 = id.MustLayerID(id.New().String()) +var l2 = id.MustLayerID(id.New().String()) +var group = Group{ + layerBase: layerBase{ + id: id.MustLayerID(id.New().String()), + name: "xxx", + visible: false, + plugin: id.MustPluginID("aaa#1.1.1").Ref(), + extension: id.PluginExtensionID("foo").Ref(), + property: nil, + infobox: nil, + scene: id.SceneID{}, + }, + layers: &IDList{ + layers: append(make([]id.LayerID, 0), l1, l2), + layerIDs: map[id.LayerID]struct{}{l1: {}, l2: {}}, + }, + linkedDatasetSchema: nil, + root: true, +} + +func TestGroup_ID(t *testing.T) { + assert.NotNil(t, group.ID()) + assert.IsType(t, id.MustLayerID(id.New().String()), group.ID()) +} + +func TestGroup_Name(t *testing.T) { + assert.Equal(t, "xxx", group.Name()) +} + +func TestGroup_Plugin(t *testing.T) { + assert.NotNil(t, group.Plugin()) + assert.True(t, id.MustPluginID("aaa#1.1.1").Equal(*group.Plugin())) +} + +func TestGroup_IDRef(t *testing.T) { + assert.NotNil(t, group.IDRef()) + assert.IsType(t, id.MustLayerID(id.New().String()), group.ID()) +} + +func TestGroup_Extension(t *testing.T) { + assert.NotNil(t, group.Extension()) + assert.Equal(t, "foo", group.Extension().String()) +} + +func TestGroup_Infobox(t *testing.T) { + assert.Nil(t, group.Infobox()) +} + +func TestGroup_IsVisible(t *testing.T) { + assert.False(t, group.IsVisible()) +} + +func TestGroup_Property(t *testing.T) { + assert.Nil(t, group.Property()) +} + +func TestGroup_IsLinked(t *testing.T) { + assert.False(t, group.IsLinked()) +} + +func TestGroup_IsRoot(t *testing.T) { + assert.True(t, group.IsRoot()) +} + +func TestGroup_Rename(t *testing.T) { + group.Rename("fff") + assert.Equal(t, "fff", group.Name()) +} + +func TestGroup_SetInfobox(t *testing.T) { + inf := Infobox{ + property: id.MustPropertyID(id.New().String()), + fields: nil, + ids: nil, + } + group.SetInfobox(&inf) + assert.NotNil(t, group.Infobox()) +} + +func TestGroup_SetPlugin(t *testing.T) { + group.SetPlugin(id.MustPluginID("ccc#1.1.1").Ref()) + assert.NotNil(t, group.Plugin()) + assert.True(t, id.MustPluginID("ccc#1.1.1").Equal(*group.Plugin())) +} + +func TestGroup_SetVisible(t *testing.T) { + group.SetVisible(true) + assert.True(t, group.IsVisible()) +} + +func TestGroup_Properties(t *testing.T) { + assert.NotNil(t, group.Properties()) + assert.Equal(t, 1, len(group.Properties())) +} + +func TestGroup_UsesPlugin(t *testing.T) { + assert.True(t, group.UsesPlugin()) +} + +func TestGroup_LayerRef(t *testing.T) { + assert.NotNil(t, group.LayerRef()) +} + +func TestGroup_Layers(t *testing.T) { + assert.Equal(t, 2, len(group.Layers().Layers())) +} + +func TestGroup_LinkedDatasetSchema(t *testing.T) { + assert.Nil(t, group.LinkedDatasetSchema()) +} + +func TestGroup_Link(t *testing.T) { + group.Link(id.MustDatasetSchemaID(id.New().String())) + assert.NotNil(t, group.LinkedDatasetSchema()) +} + +func TestGroup_Unlink(t *testing.T) { + group.Unlink() + assert.Nil(t, group.LinkedDatasetSchema()) +} + +func TestGroup_MoveLayerFrom(t *testing.T) { + group.MoveLayerFrom(l1, 1, &group) + assert.Equal(t, l1, group.Layers().Layers()[1]) +} diff --git a/pkg/layer/id_list.go b/pkg/layer/id_list.go new file mode 100644 index 000000000..ec7273cba --- /dev/null +++ b/pkg/layer/id_list.go @@ -0,0 +1,224 @@ +package layer + +import "github.com/reearth/reearth-backend/pkg/id" + +// IDList _ +type IDList struct { + layers []id.LayerID + // for checking duplication + layerIDs map[id.LayerID]struct{} +} + +// NewIDList _ +func NewIDList(layers []id.LayerID) *IDList { + ll := IDList{} + if len(layers) == 0 { + return &ll + } + + ll.layers = append([]id.LayerID{}, layers...) + ll.layerIDs = make(map[id.LayerID]struct{}, len(layers)) + for _, l := range layers { + ll.layerIDs[l] = struct{}{} + } + return &ll +} + +// Layers _ +func (l *IDList) Layers() []id.LayerID { + if l == nil { + return nil + } + result := append([]id.LayerID{}, l.layers...) + return result +} + +// HasLayer _ +func (l *IDList) HasLayer(id id.LayerID) bool { + if l == nil || len(l.layerIDs) == 0 { + return false + } + _, ok := l.layerIDs[id] + return ok +} + +// LayerAt _ +func (l *IDList) LayerAt(index int) id.LayerID { + if l == nil || index < 0 || len(l.layers) <= index { + return id.LayerID{} + } + return l.layers[index] +} + +func (l *IDList) AtRef(index int) *id.LayerID { + if l == nil || index < 0 || len(l.layers) <= index { + return nil + } + return &l.layers[index] +} + +// FindLayerIndex _ +func (l *IDList) FindLayerIndex(id id.LayerID) int { + if l == nil { + return -1 + } + for i, l := range l.layers { + if l == id { + return i + } + } + return -1 +} + +// LayerCount _ +func (l *IDList) LayerCount() int { + if l == nil { + return 0 + } + return len(l.layers) +} + +// AddLayer _ +func (l *IDList) AddLayer(lid id.LayerID, index int) { + if l == nil || l.HasLayer(lid) { + return + } + if l.layerIDs == nil { + l.layerIDs = make(map[id.LayerID]struct{}) + } + + l.layerIDs[lid] = struct{}{} + + le := len(l.layers) + if index < 0 || le <= index { + l.layers = append(l.layers, lid) + } else { + l.layers = append(l.layers[:index], append([]id.LayerID{lid}, l.layers[index:]...)...) + } +} + +// AppendLayers _ +func (l *IDList) AppendLayers(lid ...id.LayerID) *IDList { + if l == nil { + return NewIDList(lid) + } + for _, i := range lid { + l.AddLayer(i, -1) + } + return l +} + +func (l *IDList) Merge(l2 *IDList) { + l.AppendLayers(l2.layers...) +} + +func (l *IDList) Clone() (l2 *IDList) { + if l == nil { + return l2 + } + return NewIDList(l.layers) +} + +// AddOrMoveLayer _ +func (l *IDList) AddOrMoveLayer(lid id.LayerID, index int) { + if l == nil { + return + } + + le := len(l.layers) + if index < 0 || le <= index { + index = le + } + + if l.HasLayer(lid) { + l.MoveLayer(lid, index) + return + } + l.layers = append(l.layers[:index], append([]id.LayerID{lid}, l.layers[index:]...)...) + l.layerIDs[lid] = struct{}{} +} + +// MoveLayer _ +func (l *IDList) MoveLayer(id id.LayerID, toIndex int) { + if l == nil { + return + } + + for fromIndex, layer := range l.layers { + if layer == id { + l.MoveLayerAt(fromIndex, toIndex) + return + } + } +} + +// MoveLayerAt _ +func (l *IDList) MoveLayerAt(fromIndex int, toIndex int) { + if l == nil || len(l.layers) == 0 { + return + } + + le := len(l.layers) + if fromIndex < 0 || le <= fromIndex { + return + } + if toIndex < 0 || le <= toIndex { + toIndex = le - 1 + } + if fromIndex == toIndex { + return + } + + f := l.layers[fromIndex] + l.layers = append(l.layers[:fromIndex], l.layers[fromIndex+1:]...) + newSlice := make([]id.LayerID, toIndex+1) + copy(newSlice, l.layers[:toIndex]) + newSlice[toIndex] = f + l.layers = append(newSlice, l.layers[toIndex:]...) +} + +// RemoveLayer _ +func (l *IDList) RemoveLayer(id id.LayerID) { + if l == nil { + return + } + + for index, layer := range l.layers { + if layer == id { + l.RemoveLayerAt(index) + return + } + } +} + +// RemoveLayerAt _ +func (l *IDList) RemoveLayerAt(index int) { + if l == nil || len(l.layers) == 0 { + return + } + + le := len(l.layers) + if index < 0 || le <= index { + return + } + + layer := l.layers[index] + var layers []id.LayerID + if index == le { + layers = []id.LayerID{} + } else { + layers = l.layers[index+1:] + } + l.layers = append(l.layers[:index], layers...) + delete(l.layerIDs, layer) +} + +// Empty _ +func (l *IDList) Empty() { + if l == nil { + return + } + + l.layers = nil + l.layerIDs = nil +} diff --git a/pkg/layer/id_list_test.go b/pkg/layer/id_list_test.go new file mode 100644 index 000000000..5edcf2bfd --- /dev/null +++ b/pkg/layer/id_list_test.go @@ -0,0 +1,133 @@ +package layer + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + + "github.com/stretchr/testify/assert" +) + +func TestLayerIDList(t *testing.T) { + l1 := id.LayerID(id.New()) + l2 := id.LayerID(id.New()) + l3 := id.LayerID(id.New()) + l4 := id.LayerID(id.New()) + rawLayers := []id.LayerID{l1, l3} + layers := NewIDList(rawLayers) + + assert.NotNil(t, layers) + + // 1, 3 + + assert.Equal(t, rawLayers, layers.Layers()) + assert.Equal(t, 2, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l3, layers.LayerAt(1)) + assert.True(t, layers.HasLayer(l1)) + assert.False(t, layers.HasLayer(l2)) + assert.True(t, layers.HasLayer(l3)) + assert.False(t, layers.HasLayer(l4)) + + // 1, 2, 3 + + layers.AddLayer(l2, 1) + assert.Equal(t, 3, layers.LayerCount()) + assert.True(t, layers.HasLayer(l2)) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + + // 1, 2, 3 (ignored) + + layers.AddLayer(l1, 2) + assert.Equal(t, 3, layers.LayerCount()) + assert.True(t, layers.HasLayer(l2)) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + + // 1, 2, 3, 4 + + layers.AddLayer(l4, 10) + assert.Equal(t, 4, layers.LayerCount()) + assert.True(t, layers.HasLayer(l4)) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + assert.Equal(t, l4, layers.LayerAt(3)) + + // 3, 1, 2, 4 + + layers.MoveLayer(l3, 0) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l3, layers.LayerAt(0)) + assert.Equal(t, l1, layers.LayerAt(1)) + assert.Equal(t, l2, layers.LayerAt(2)) + assert.Equal(t, l4, layers.LayerAt(3)) + + // 1, 2, 4, 3 + + layers.MoveLayer(l3, 3) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l4, layers.LayerAt(2)) + assert.Equal(t, l3, layers.LayerAt(3)) + + // 1, 2, 3, 4 + + layers.MoveLayer(l4, 4) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + assert.Equal(t, l4, layers.LayerAt(3)) + + // 1, 2, 3, 4 + + layers.MoveLayer(l4, 10) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + assert.Equal(t, l4, layers.LayerAt(3)) + + // 1, 2, 3, 4 + + layers.MoveLayer(l4, -1) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + assert.Equal(t, l4, layers.LayerAt(3)) + + // 1, 3, 4 + + layers.RemoveLayer(l2) + assert.Equal(t, 3, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l3, layers.LayerAt(1)) + assert.Equal(t, l4, layers.LayerAt(2)) + assert.False(t, layers.HasLayer(l2)) + + // 1, 3, 4, 2 + + layers.AddOrMoveLayer(l2, 3) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l3, layers.LayerAt(1)) + assert.Equal(t, l4, layers.LayerAt(2)) + assert.Equal(t, l2, layers.LayerAt(3)) + assert.True(t, layers.HasLayer(l2)) + + // 1, 2, 3, 4 + + layers.AddOrMoveLayer(l2, 1) + assert.Equal(t, 4, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l2, layers.LayerAt(1)) + assert.Equal(t, l3, layers.LayerAt(2)) + assert.Equal(t, l4, layers.LayerAt(3)) + assert.True(t, layers.HasLayer(l2)) +} diff --git a/pkg/layer/infobox.go b/pkg/layer/infobox.go new file mode 100644 index 000000000..28907e8d9 --- /dev/null +++ b/pkg/layer/infobox.go @@ -0,0 +1,163 @@ +package layer + +import ( + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Infobox struct { + property id.PropertyID + fields []*InfoboxField + // for checking duplication + ids map[id.InfoboxFieldID]struct{} +} + +func NewInfobox(fields []*InfoboxField, p id.PropertyID) *Infobox { + infobox := Infobox{ + property: p, + fields: make([]*InfoboxField, len(fields)), + ids: make(map[id.InfoboxFieldID]struct{}, len(fields)), + } + for i, f := range fields { + if f == nil { + continue + } + infobox.fields[i] = f + infobox.ids[f.ID()] = struct{}{} + } + return &infobox +} + +func (i *Infobox) Property() id.PropertyID { + return i.property +} + +func (i *Infobox) PropertyRef() *id.PropertyID { + if i == nil { + return nil + } + pid := i.property + return &pid +} + +func (i *Infobox) Fields() []*InfoboxField { + if i == nil { + return nil + } + return append([]*InfoboxField{}, i.fields...) +} + +func (i *Infobox) Field(field id.InfoboxFieldID) *InfoboxField { + for _, f := range i.fields { + if f.ID() == field { + return f + } + } + return nil +} + +func (i *Infobox) FieldAt(index int) *InfoboxField { + if i == nil || index < 0 || len(i.fields) <= index { + return nil + } + return i.fields[index] +} + +func (i *Infobox) Has(id id.InfoboxFieldID) bool { + _, ok := i.ids[id] + return ok +} + +func (i *Infobox) Count() int { + return len(i.fields) +} + +func (i *Infobox) Add(field *InfoboxField, index int) { + l := len(i.fields) + if index < 0 || l <= index { + index = l + } + + id := field.ID() + if i.Has(id) { + return + } + i.fields = append(i.fields[:index], append([]*InfoboxField{field}, i.fields[index:]...)...) + i.ids[id] = struct{}{} +} + +func (i *Infobox) Move(field id.InfoboxFieldID, toIndex int) { + for fromIndex, f := range i.fields { + if f.ID() == field { + i.MoveAt(fromIndex, toIndex) + return + } + } +} + +func (i *Infobox) MoveAt(fromIndex int, toIndex int) { + l := len(i.fields) + if fromIndex < 0 || l <= fromIndex { + return + } + if toIndex < 0 || l <= toIndex { + toIndex = l - 1 + } + f := i.fields[fromIndex] + + i.fields = append(i.fields[:fromIndex], i.fields[fromIndex+1:]...) + newSlice := make([]*InfoboxField, toIndex+1) + copy(newSlice, i.fields[:toIndex]) + newSlice[toIndex] = f + i.fields = append(newSlice, i.fields[toIndex:]...) +} + +func (i *Infobox) Remove(field id.InfoboxFieldID) { + for index, f := range i.fields { + if f.ID() == field { + i.RemoveAt(index) + return + } + } +} + +func (i *Infobox) RemoveAt(index int) { + l := len(i.fields) + if index < 0 || l <= index { + index = l + } + + f := i.fields[index] + if index == l { + i.fields = i.fields[:index] + } else { + i.fields = append(i.fields[:index], i.fields[index+1:]...) + } + delete(i.ids, f.ID()) +} + +func (i *Infobox) ValidateProperties(pm property.Map) error { + if i == nil || pm == nil { + return nil + } + + lp := pm[i.property] + if lp == nil { + return errors.New("property does not exist") + } + if lp.Schema() != builtin.PropertySchemaIDInfobox { + return errors.New("property has a invalid schema") + } + + for i, f := range i.fields { + if err := f.ValidateProperty(pm); err != nil { + return fmt.Errorf("field[%d](%s): %w", i, f.ID(), err) + } + } + + return nil +} diff --git a/pkg/layer/infobox_field.go b/pkg/layer/infobox_field.go new file mode 100644 index 000000000..48a2e1b6c --- /dev/null +++ b/pkg/layer/infobox_field.go @@ -0,0 +1,56 @@ +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/idgen --name InfoboxField --output ../id + +package layer + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type InfoboxField struct { + id id.InfoboxFieldID + plugin id.PluginID + extension id.PluginExtensionID + property id.PropertyID +} + +func (i *InfoboxField) ID() id.InfoboxFieldID { + return i.id +} + +func (i *InfoboxField) Plugin() id.PluginID { + return i.plugin +} + +func (i *InfoboxField) Extension() id.PluginExtensionID { + return i.extension +} + +func (i *InfoboxField) Property() id.PropertyID { + return i.property +} + +func (i *InfoboxField) PropertyRef() *id.PropertyID { + if i == nil { + return nil + } + return i.property.Ref() +} + +func (i *InfoboxField) ValidateProperty(pm property.Map) error { + if i == nil || pm == nil { + return nil + } + + lp := pm[i.property] + if lp == nil { + return errors.New("property does not exist") + } + if lp.Schema() != id.MustPropertySchemaIDFromExtension(i.plugin, i.extension) { + return errors.New("property has a invalid schema") + } + + return nil +} diff --git a/pkg/layer/infobox_field_builder.go b/pkg/layer/infobox_field_builder.go new file mode 100644 index 000000000..4a8af2c1f --- /dev/null +++ b/pkg/layer/infobox_field_builder.go @@ -0,0 +1,56 @@ +package layer + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +// InfoboxFieldBuilder _ +type InfoboxFieldBuilder struct { + i *InfoboxField +} + +func NewInfoboxField() *InfoboxFieldBuilder { + return &InfoboxFieldBuilder{i: &InfoboxField{}} +} + +func (b *InfoboxFieldBuilder) Build() (*InfoboxField, error) { + if id.ID(b.i.id).IsNil() || + string(b.i.extension) == "" || + id.ID(b.i.property).IsNil() { + return nil, id.ErrInvalidID + } + return b.i, nil +} + +func (b *InfoboxFieldBuilder) MustBuild() *InfoboxField { + i, err := b.Build() + if err != nil { + panic(err) + } + return i +} + +func (b *InfoboxFieldBuilder) ID(id id.InfoboxFieldID) *InfoboxFieldBuilder { + b.i.id = id + return b +} + +func (b *InfoboxFieldBuilder) NewID() *InfoboxFieldBuilder { + b.i.id = id.InfoboxFieldID(id.New()) + return b +} + +func (b *InfoboxFieldBuilder) Plugin(plugin id.PluginID) *InfoboxFieldBuilder { + b.i.plugin = plugin + return b +} + +func (b *InfoboxFieldBuilder) Extension(extension id.PluginExtensionID) *InfoboxFieldBuilder { + b.i.extension = extension + return b +} + +func (b *InfoboxFieldBuilder) Property(p id.PropertyID) *InfoboxFieldBuilder { + b.i.property = p + return b +} diff --git a/pkg/layer/infobox_test.go b/pkg/layer/infobox_test.go new file mode 100644 index 000000000..462c99f5a --- /dev/null +++ b/pkg/layer/infobox_test.go @@ -0,0 +1,52 @@ +package layer + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestInfobox(t *testing.T) { + f1 := &InfoboxField{id: id.NewInfoboxFieldID()} + f2 := &InfoboxField{id: id.NewInfoboxFieldID()} + f3 := &InfoboxField{id: id.NewInfoboxFieldID()} + f4 := &InfoboxField{id: id.NewInfoboxFieldID()} + fields := []*InfoboxField{f1, f2, f3} + infobox := NewInfobox(fields, id.NewPropertyID()) + + assert.NotNil(t, infobox) + assert.Equal(t, fields, infobox.Fields()) + assert.Equal(t, f1, infobox.Field(f1.ID())) + assert.Equal(t, f3, infobox.FieldAt(2)) + assert.Equal(t, 3, infobox.Count()) + assert.True(t, infobox.Has(f1.ID())) + assert.False(t, infobox.Has(f4.ID())) + + infobox.Add(f4, 3) + assert.True(t, infobox.Has(f4.ID())) + assert.Equal(t, 4, infobox.Count()) + assert.Equal(t, f1, infobox.FieldAt(0)) + assert.Equal(t, f2, infobox.FieldAt(1)) + assert.Equal(t, f3, infobox.FieldAt(2)) + assert.Equal(t, f4, infobox.FieldAt(3)) + + infobox.Move(f4.ID(), 2) + assert.Equal(t, f1, infobox.FieldAt(0)) + assert.Equal(t, f2, infobox.FieldAt(1)) + assert.Equal(t, f4, infobox.FieldAt(2)) + assert.Equal(t, f3, infobox.FieldAt(3)) + + infobox.Remove(f2.ID()) + assert.Equal(t, 3, infobox.Count()) + assert.False(t, infobox.Has(f2.ID())) + assert.Nil(t, infobox.Field(f2.ID())) + assert.Equal(t, f1, infobox.FieldAt(0)) + assert.Equal(t, f4, infobox.FieldAt(1)) + assert.Equal(t, f3, infobox.FieldAt(2)) + + infobox.Move(f4.ID(), 2) + assert.Equal(t, f1, infobox.FieldAt(0)) + assert.Equal(t, f3, infobox.FieldAt(1)) + assert.Equal(t, f4, infobox.FieldAt(2)) +} diff --git a/pkg/layer/initializer.go b/pkg/layer/initializer.go new file mode 100644 index 000000000..78aefb589 --- /dev/null +++ b/pkg/layer/initializer.go @@ -0,0 +1,292 @@ +package layer + +import ( + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/builtin" + perror "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type InitializerResult struct { + Root id.LayerID + Layers Map + Properties property.Map +} + +func (r InitializerResult) RootLayer() Layer { + return r.Layers.Layer(r.Root) +} + +func (r InitializerResult) RootLayerRef() *Layer { + return r.Layers[r.Root] +} + +func (r InitializerResult) RootLayerGroup() *Group { + return r.Layers.Group(r.Root) +} + +func (r InitializerResult) RootLayerItem() *Item { + return r.Layers.Item(r.Root) +} + +type Initializer struct { + ID *id.LayerID `json:"id"` + Plugin *id.PluginID `json:"plugin"` + Extension *id.PluginExtensionID `json:"extension"` + Name string `json:"name"` + Infobox *InitializerInfobox `json:"infobox"` + PropertyID *id.PropertyID `json:"propertyId"` + Property *property.Initializer `json:"property"` + Layers []*Initializer `json:"layers"` + LayerIDs []id.LayerID `json:"layerIds"` + IsVisible *bool `json:"isVisible"` + LinkedDatasetSchema *id.DatasetSchemaID `json:"linkedDatasetSchema"` + LinkedDataset *id.DatasetID `json:"linkedDataset"` +} + +func (i *Initializer) Clone() *Initializer { + if i == nil { + return nil + } + + var isVisible *bool + if i.IsVisible != nil { + isVisible2 := *i.IsVisible + isVisible = &isVisible2 + } + + var layers []*Initializer + if i.Layers != nil { + layers = make([]*Initializer, 0, len(i.Layers)) + for _, l := range i.Layers { + layers = append(layers, l.Clone()) + } + } + + var layerIDs []id.LayerID + if len(i.LayerIDs) > 0 { + layerIDs = append([]id.LayerID{}, i.LayerIDs...) + } + + return &Initializer{ + ID: i.ID.CopyRef(), + Plugin: i.Plugin.CopyRef(), + Extension: i.Extension.CopyRef(), + Name: i.Name, + Infobox: i.Infobox.Clone(), + PropertyID: i.PropertyID.CopyRef(), + Property: i.Property.Clone(), + Layers: layers, + LayerIDs: layerIDs, + IsVisible: isVisible, + LinkedDatasetSchema: i.LinkedDatasetSchema.CopyRef(), + LinkedDataset: i.LinkedDataset.CopyRef(), + } +} + +func (i *Initializer) Layer(sid id.SceneID) (r InitializerResult, err error) { + if i == nil { + return + } + + ib, pm, err2 := i.Infobox.Infobox(sid) + if err2 != nil { + err = perror.New("infobox", err2) + return + } + r.Properties = r.Properties.Merge(pm) + + lid := i.ID + if i.ID == nil { + lid = id.NewLayerID().Ref() + } + + pid := i.PropertyID + lp, err2 := i.Property.Property(sid) + if err2 != nil { + err = perror.New("property", err2) + return + } + if lp != nil { + pid = lp.IDRef() + r.Properties = r.Properties.Add(lp) + } + + lay := New(). + ID(*lid). + Plugin(i.Plugin). + Extension(i.Extension). + Infobox(ib). + Scene(sid). + Property(pid). + Name(i.Name). + IsVisibleRef(i.IsVisible) + + var l Layer + if i.Layers != nil { + layers := NewIDList(nil) + + for i, lay2 := range i.Layers { + r2, err2 := lay2.Layer(sid) + if err2 != nil { + err = perror.New(fmt.Sprint(i), err2) + return + } + if rootLayer := r2.RootLayer(); rootLayer != nil { + layers = layers.AppendLayers(rootLayer.ID()) + r.Layers = r.Layers.Merge(r2.Layers) + r.Properties = r.Properties.Merge(r2.Properties) + } + } + + l, err = lay.Group().LinkedDatasetSchema(i.LinkedDatasetSchema).Layers(layers).Build() + } else if i.LayerIDs != nil { + l, err = lay.Group().LinkedDatasetSchema(i.LinkedDatasetSchema).Layers(NewIDList(i.LayerIDs)).Build() + } else { + l, err = lay.Item().LinkedDataset(i.LinkedDataset).Build() + } + + if err != nil { + err = fmt.Errorf("failed to initialize layer: %w", err) + return + } + + r.Layers = r.Layers.Add(&l) + r.Root = l.ID() + return +} + +func (i *Initializer) MustBeLayer(sid id.SceneID) InitializerResult { + r, err := i.Layer(sid) + if err != nil { + panic(err) + } + return r +} + +type InitializerInfobox struct { + PropertyID *id.PropertyID `json:"propertyId"` + Property *property.Initializer `json:"property"` + Fields []*InitializerInfoboxField `json:"fields"` +} + +func (i *InitializerInfobox) Clone() *InitializerInfobox { + if i == nil { + return nil + } + + var fields []*InitializerInfoboxField + if i.Fields != nil { + fields = make([]*InitializerInfoboxField, 0, len(i.Fields)) + for _, f := range i.Fields { + fields = append(fields, f.Clone()) + } + } + + return &InitializerInfobox{ + PropertyID: i.PropertyID.CopyRef(), + Property: i.Property.Clone(), + Fields: fields, + } +} + +func (i *InitializerInfobox) Infobox(scene id.SceneID) (*Infobox, property.Map, error) { + if i == nil { + return nil, nil, nil + } + + pm := property.Map{} + var fields []*InfoboxField + if i.Fields != nil { + fields = make([]*InfoboxField, 0, len(i.Fields)) + for i, f := range i.Fields { + ibf, ibfp, err := f.InfoboxField(scene) + if err != nil { + return nil, nil, perror.New(fmt.Sprint(i), err) + } + fields = append(fields, ibf) + pm = pm.Add(ibfp) + } + } + + var ibp *property.Property + ibpid := i.PropertyID + if ibpid == nil { + var err error + ibp, err = i.Property.PropertyIncludingEmpty(scene, builtin.PropertySchemaIDInfobox) + if err != nil { + return nil, nil, perror.New("property", err) + } + if ibp != nil { + ibpid = ibp.IDRef() + pm = pm.Add(ibp) + } + } + if ibpid == nil { + return nil, nil, errors.New("infobox property id is empty") + } + + return NewInfobox(fields, *ibpid), pm, nil +} + +type InitializerInfoboxField struct { + ID *id.InfoboxFieldID `json:"id"` + Plugin id.PluginID `json:"plugin"` + Extension id.PluginExtensionID `json:"extension"` + PropertyID *id.PropertyID `json:"propertyId"` + Property *property.Initializer `json:"property"` +} + +func (i *InitializerInfoboxField) Clone() *InitializerInfoboxField { + if i == nil { + return nil + } + + return &InitializerInfoboxField{ + ID: i.ID.CopyRef(), + Plugin: i.Plugin, + Extension: i.Extension, + PropertyID: i.PropertyID.CopyRef(), + Property: i.Property.Clone(), + } +} + +func (i *InitializerInfoboxField) InfoboxField(scene id.SceneID) (*InfoboxField, *property.Property, error) { + if i == nil { + return nil, nil, nil + } + + psid, err := id.PropertySchemaIDFromExtension(i.Plugin, i.Extension) + if err != nil { + return nil, nil, err + } + + fid := i.ID + if i.ID == nil { + fid = id.NewInfoboxFieldID().Ref() + } + + pid := i.PropertyID + var p *property.Property + if pid == nil { + p, err = i.Property.PropertyIncludingEmpty(scene, psid) + if err != nil { + return nil, nil, perror.New("property", err) + } + if p != nil { + pid = p.IDRef() + } + } + if pid == nil { + return nil, nil, errors.New("infobox field property id is empty") + } + + f, err := NewInfoboxField().ID(*fid).Plugin(i.Plugin).Extension(i.Extension).Property(*pid).Build() + if err != nil { + return nil, nil, err + } + return f, p, nil +} diff --git a/pkg/layer/initializer/initializer.go b/pkg/layer/initializer/initializer.go new file mode 100644 index 000000000..d510bc825 --- /dev/null +++ b/pkg/layer/initializer/initializer.go @@ -0,0 +1,64 @@ +package initializer + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" +) + +type LayerItem struct { + SceneID id.SceneID + ParentLayerID id.LayerID + Plugin *plugin.Plugin + ExtensionID *id.PluginExtensionID + Index *int + LinkedDatasetID *id.DatasetID + Name string + LinkablePropertySchema *property.Schema + LatLng *property.LatLng +} + +var ( + ErrExtensionTypeMustBePrimitive error = errors.New("extension type must be primitive") +) + +func (i LayerItem) Initialize() (*layer.Item, *property.Property, error) { + builder := layer.NewItem().NewID().Scene(i.SceneID) + + var p *property.Property + var err error + if i.Plugin != nil && i.ExtensionID != nil { + extension := i.Plugin.Extension(*i.ExtensionID) + if extension == nil || extension.Type() != plugin.ExtensionTypePrimitive { + return nil, nil, ErrExtensionTypeMustBePrimitive + } + + p, err = property.New(). + NewID(). + Schema(extension.Schema()). + Scene(i.SceneID). + Build() + + if err != nil { + return nil, nil, err + } + + p.UpdateLinkableValue(i.LinkablePropertySchema, property.ValueTypeLatLng.ValueFromUnsafe(i.LatLng)) + + builder. + Plugin(i.Plugin.ID().Ref()). + Extension(i.ExtensionID). + Property(p.ID().Ref()). + Name(i.Name) + } + + layerItem, err := builder.LinkedDataset(i.LinkedDatasetID).Build() + if err != nil { + return nil, nil, err + } + + return layerItem, p, nil +} diff --git a/pkg/layer/initializer/initializer_test.go b/pkg/layer/initializer/initializer_test.go new file mode 100644 index 000000000..30aee20a4 --- /dev/null +++ b/pkg/layer/initializer/initializer_test.go @@ -0,0 +1,82 @@ +package initializer + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/stretchr/testify/assert" +) + +func TestInitialize(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + ps := id.MustPropertySchemaID("xxx#1.1.1/aa") + eid := id.PluginExtensionID("foo") + eid2 := id.PluginExtensionID("foo2") + e := plugin.NewExtension(). + ID("foo"). + Description(i18n.StringFrom("foo/des")). + Name(i18n.StringFrom("foo/name")). + Schema(ps). + Type(plugin.ExtensionTypePrimitive). + MustBuild() + e2 := plugin.NewExtension(). + ID("foo2"). + Type("not primitive"). + MustBuild() + es := append(make([]*plugin.Extension, 0), e) + es = append(es, e2) + p := plugin.New(). + ID(id.MustPluginID("xxx#1.1.1")). + Schema(&ps). + Extensions(es). + MustBuild() + s := id.NewSceneID() + testCases := []struct { + name string + sceneID *id.SceneID + parentLayerID *id.LayerID + plugin *plugin.Plugin + extID *id.PluginExtensionID + err error + }{ + { + name: "Success", + sceneID: &s, + parentLayerID: &lid, + plugin: p, + extID: &eid, + err: nil, + }, + { + name: "extension type error", + sceneID: &s, + parentLayerID: &lid, + plugin: p, + extID: &eid2, + err: ErrExtensionTypeMustBePrimitive, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + layerItem, property, err := LayerItem{ + SceneID: *tc.sceneID, + ParentLayerID: *tc.parentLayerID, + Plugin: tc.plugin, + ExtensionID: tc.extID, + Name: tc.name, + }.Initialize() + if tc.err == nil { + assert.NoError(tt, err) + assert.NotNil(tt, layerItem) + assert.NotNil(tt, property) + } else { + assert.True(t, errors.As(err, &tc.err)) + } + }) + } +} diff --git a/pkg/layer/initializer_test.go b/pkg/layer/initializer_test.go new file mode 100644 index 000000000..960bf26c7 --- /dev/null +++ b/pkg/layer/initializer_test.go @@ -0,0 +1,188 @@ +package layer + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestInitializer_Clone(t *testing.T) { + isVisible := false + i := &Initializer{ + ID: id.NewLayerID().Ref(), + Plugin: id.MustPluginID("reearth").Ref(), + Extension: id.PluginExtensionID("marker").Ref(), + Name: "hoge", + Infobox: &InitializerInfobox{}, + PropertyID: id.NewPropertyID().Ref(), + Property: &property.Initializer{ + ID: id.NewPropertyID().Ref(), + }, + Layers: []*Initializer{{}}, + IsVisible: &isVisible, + LinkedDatasetSchema: id.NewDatasetSchemaID().Ref(), + LinkedDataset: id.NewDatasetID().Ref(), + } + + actual := i.Clone() + + assert.NotSame(t, i, actual) + assert.NotSame(t, i.ID, actual.ID) + assert.NotSame(t, i.Plugin, actual.Plugin) + assert.NotSame(t, i.Extension, actual.Extension) + assert.NotSame(t, i.Infobox, actual.Infobox) + assert.NotSame(t, i.PropertyID, actual.PropertyID) + assert.NotSame(t, i.Property, actual.Property) + assert.NotSame(t, i.Layers, actual.Layers) + assert.NotSame(t, i.Layers[0], actual.Layers[0]) + assert.NotSame(t, i.IsVisible, actual.IsVisible) + assert.NotSame(t, i.LinkedDatasetSchema, actual.LinkedDatasetSchema) + assert.NotSame(t, i.LinkedDataset, actual.LinkedDataset) + assert.Equal(t, i, actual) +} + +func TestInitializer_Layer(t *testing.T) { + sid := id.NewSceneID() + isVisible := false + i := &Initializer{ + ID: id.NewLayerID().Ref(), + Plugin: id.MustPluginID("reearth").Ref(), + Extension: id.PluginExtensionID("marker").Ref(), + Name: "hoge", + Infobox: &InitializerInfobox{ + PropertyID: id.NewPropertyID().Ref(), + }, + PropertyID: id.NewPropertyID().Ref(), + IsVisible: &isVisible, + LinkedDatasetSchema: id.NewDatasetSchemaID().Ref(), + LinkedDataset: id.NewDatasetID().Ref(), + Layers: []*Initializer{{ + ID: id.NewLayerID().Ref(), + Layers: []*Initializer{{ + ID: id.NewLayerID().Ref(), + }}, + }}, + } + + expected1 := New(). + ID(*i.ID). + Scene(sid). + Plugin(i.Plugin). + Extension(i.Extension). + Name(i.Name). + IsVisibleRef(i.IsVisible). + Infobox(NewInfobox(nil, *i.Infobox.PropertyID)). + Property(i.PropertyID). + Group(). + Layers(NewIDList([]id.LayerID{*i.Layers[0].ID})). + LinkedDatasetSchema(i.LinkedDatasetSchema). + MustBuild() + expected2 := New().ID(*i.Layers[0].ID).Scene(sid).Group().Layers(NewIDList([]id.LayerID{*i.Layers[0].Layers[0].ID})).MustBuild() + expected3 := New().ID(*i.Layers[0].Layers[0].ID).Scene(sid).Item().MustBuild() + + actual, err := i.Layer(sid) + assert.NoError(t, err) + assert.Equal(t, Map{ + expected1.ID(): expected1.LayerRef(), + expected2.ID(): expected2.LayerRef(), + expected3.ID(): expected3.LayerRef(), + }, actual.Layers) + + // check if a new id is generated + i.ID = nil + actual, err = i.Layer(sid) + assert.NoError(t, err) + assert.False(t, actual.RootLayer().ID().IsNil()) +} + +func TestInitializerInfobox_Clone(t *testing.T) { + i := &InitializerInfobox{ + PropertyID: id.NewPropertyID().Ref(), + Property: &property.Initializer{ + ID: id.NewPropertyID().Ref(), + }, + Fields: []*InitializerInfoboxField{{ + ID: id.NewInfoboxFieldID().Ref(), + Plugin: id.MustPluginID("reearth"), + Extension: id.PluginExtensionID("marker"), + PropertyID: id.NewPropertyID().Ref(), + }}, + } + + actual := i.Clone() + + assert.NotSame(t, i, actual) + assert.NotSame(t, i.Property, actual.Property) + assert.NotSame(t, i.Fields, actual.Fields) + assert.NotSame(t, i.Fields[0], actual.Fields[0]) + assert.Equal(t, i, actual) +} + +func TestInitializerInfobox_Infobox(t *testing.T) { + sid := id.NewSceneID() + i := &InitializerInfobox{ + PropertyID: id.NewPropertyID().Ref(), + Fields: []*InitializerInfoboxField{{ + ID: id.NewInfoboxFieldID().Ref(), + Plugin: id.MustPluginID("reearth"), + Extension: id.PluginExtensionID("marker"), + PropertyID: id.NewPropertyID().Ref(), + }}, + } + + expected := NewInfobox([]*InfoboxField{ + NewInfoboxField(). + ID(*i.Fields[0].ID). + Plugin(i.Fields[0].Plugin). + Extension(i.Fields[0].Extension). + Property(*i.Fields[0].PropertyID). + MustBuild(), + }, *i.PropertyID) + actual, _, err := i.Infobox(sid) + + assert.NoError(t, err) + assert.Equal(t, expected, actual) +} + +func TestInitializerInfoboxField_Clone(t *testing.T) { + i := &InitializerInfoboxField{ + ID: id.NewInfoboxFieldID().Ref(), + Plugin: id.MustPluginID("reearth"), + Extension: id.PluginExtensionID("marker"), + PropertyID: id.NewPropertyID().Ref(), + Property: &property.Initializer{ + ID: id.NewPropertyID().Ref(), + }, + } + + actual := i.Clone() + + assert.NotSame(t, i, actual) + assert.NotSame(t, i.Property, actual.Property) + assert.NotSame(t, i.ID, actual.ID) + assert.Equal(t, i, actual) +} + +func TestInitializerInfoboxField_InfoboxField(t *testing.T) { + sid := id.NewSceneID() + i := &InitializerInfoboxField{ + ID: id.NewInfoboxFieldID().Ref(), + Plugin: id.MustPluginID("reearth"), + Extension: id.PluginExtensionID("marker"), + PropertyID: id.NewPropertyID().Ref(), + } + + expected := NewInfoboxField().ID(*i.ID).Plugin(i.Plugin).Extension(i.Extension).Property(*i.PropertyID).MustBuild() + actual, _, err := i.InfoboxField(sid) + + assert.NoError(t, err) + assert.Equal(t, expected, actual) + + // check if a new id is generated + i.ID = nil + actual, _, err = i.InfoboxField(sid) + assert.NoError(t, err) + assert.False(t, actual.ID().IsNil()) +} diff --git a/pkg/layer/item.go b/pkg/layer/item.go new file mode 100644 index 000000000..6cec341ca --- /dev/null +++ b/pkg/layer/item.go @@ -0,0 +1,151 @@ +package layer + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Item struct { + layerBase + linkedDataset *id.DatasetID +} + +func (l *Item) ID() id.LayerID { + return l.layerBase.ID() +} + +func (l *Item) IDRef() *id.LayerID { + if l == nil { + return nil + } + return l.layerBase.IDRef() +} + +func (l *Item) Name() string { + if l == nil { + return "" + } + return l.layerBase.Name() +} + +func (l *Item) IsVisible() bool { + if l == nil { + return false + } + return l.layerBase.IsVisible() +} + +func (l *Item) Plugin() *id.PluginID { + if l == nil { + return nil + } + return l.layerBase.Plugin() +} + +func (l *Item) Extension() *id.PluginExtensionID { + if l == nil { + return nil + } + return l.layerBase.Extension() +} + +func (l *Item) UsesPlugin() bool { + if l == nil { + return false + } + return l.layerBase.UsesPlugin() +} + +func (l *Item) Property() *id.PropertyID { + if l == nil { + return nil + } + return l.layerBase.Property() +} + +func (l *Item) Infobox() *Infobox { + if l == nil { + return nil + } + return l.layerBase.Infobox() +} + +func (l *Item) Rename(name string) { + if l == nil { + return + } + l.layerBase.Rename(name) +} + +func (l *Item) SetVisible(visible bool) { + if l == nil { + return + } + l.layerBase.SetVisible(visible) +} + +func (l *Item) SetInfobox(infobox *Infobox) { + if l == nil { + return + } + l.layerBase.SetInfobox(infobox) +} + +func (l *Item) SetPlugin(plugin *id.PluginID) { + if l == nil { + return + } + l.layerBase.SetPlugin(plugin) +} + +func (l *Item) IsLinked() bool { + if l == nil { + return false + } + return l.linkedDataset != nil +} + +func (l *Item) LinkedDataset() *id.DatasetID { + if l == nil || l.linkedDataset == nil { + return nil + } + id := *l.linkedDataset + return &id +} + +func (l *Item) Link(ds id.DatasetID) { + if l == nil { + return + } + ds2 := ds + l.linkedDataset = &ds2 +} + +func (l *Item) Unlink() { + if l == nil { + return + } + l.linkedDataset = nil +} + +func (l *Item) LayerRef() *Layer { + if l == nil { + return nil + } + var layer Layer = l + return &layer +} + +func (l *Item) Properties() []id.PropertyID { + if l == nil { + return nil + } + return l.layerBase.Properties() +} + +func (l *Item) ValidateProperties(pm property.Map) error { + if l == nil { + return nil + } + return l.layerBase.ValidateProperties(pm) +} diff --git a/pkg/layer/item_builder.go b/pkg/layer/item_builder.go new file mode 100644 index 000000000..84d999efc --- /dev/null +++ b/pkg/layer/item_builder.go @@ -0,0 +1,102 @@ +package layer + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +func ItemFromLayer(l Layer) *Item { + li, ok := l.(*Item) + if !ok { + return nil + } + return li +} + +func ItemFromLayerRef(l *Layer) *Item { + if l == nil { + return nil + } + li, ok := (*l).(*Item) + if !ok { + return nil + } + return li +} + +type ItemBuilder struct { + l *Item +} + +func NewItem() *ItemBuilder { + return &ItemBuilder{l: &Item{layerBase: layerBase{visible: true}}} +} + +func (b *ItemBuilder) Build() (*Item, error) { + if id.ID(b.l.id).IsNil() { + return nil, id.ErrInvalidID + } + return b.l, nil +} + +func (b *ItemBuilder) MustBuild() *Item { + item, err := b.Build() + if err != nil { + panic(err) + } + return item +} + +func (b *ItemBuilder) base(layer layerBase) *ItemBuilder { + b.l.layerBase = layer + return b +} + +func (b *ItemBuilder) ID(id id.LayerID) *ItemBuilder { + b.l.id = id + return b +} + +func (b *ItemBuilder) NewID() *ItemBuilder { + b.l.id = id.NewLayerID() + return b +} + +func (b *ItemBuilder) Scene(s id.SceneID) *ItemBuilder { + b.l.scene = s + return b +} + +func (b *ItemBuilder) Name(name string) *ItemBuilder { + b.l.name = name + return b +} + +func (b *ItemBuilder) IsVisible(visible bool) *ItemBuilder { + b.l.visible = visible + return b +} + +func (b *ItemBuilder) Plugin(plugin *id.PluginID) *ItemBuilder { + b.l.plugin = plugin.CopyRef() + return b +} + +func (b *ItemBuilder) Extension(extension *id.PluginExtensionID) *ItemBuilder { + b.l.extension = extension.CopyRef() + return b +} + +func (b *ItemBuilder) Property(p *id.PropertyID) *ItemBuilder { + b.l.property = p.CopyRef() + return b +} + +func (b *ItemBuilder) Infobox(infobox *Infobox) *ItemBuilder { + b.l.infobox = infobox + return b +} + +func (b *ItemBuilder) LinkedDataset(linkedDataset *id.DatasetID) *ItemBuilder { + b.l.linkedDataset = linkedDataset.CopyRef() + return b +} diff --git a/pkg/layer/item_test.go b/pkg/layer/item_test.go new file mode 100644 index 000000000..a803e2dec --- /dev/null +++ b/pkg/layer/item_test.go @@ -0,0 +1,3 @@ +package layer + +var _ Layer = &Item{} diff --git a/pkg/layer/layer.go b/pkg/layer/layer.go new file mode 100644 index 000000000..921dfd997 --- /dev/null +++ b/pkg/layer/layer.go @@ -0,0 +1,227 @@ +package layer + +import ( + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Layer interface { + ID() id.LayerID + Name() string + IsVisible() bool + Plugin() *id.PluginID + Extension() *id.PluginExtensionID + UsesPlugin() bool + Property() *id.PropertyID + HasInfobox() bool + Infobox() *Infobox + Scene() id.SceneID + Rename(string) + SetVisible(bool) + SetInfobox(*Infobox) + SetPlugin(*id.PluginID) + Properties() []id.PropertyID + ValidateProperties(property.Map) error +} + +func ToLayerGroup(l Layer) *Group { + if lg, ok := l.(*Group); ok { + return lg + } + return nil +} + +func ToLayerGroupRef(l *Layer) *Group { + if l == nil { + return nil + } + l2 := *l + if lg, ok := l2.(*Group); ok { + return lg + } + return nil +} + +func ToLayerItem(l Layer) *Item { + if li, ok := l.(*Item); ok { + return li + } + return nil +} + +func ToLayerItemRef(l *Layer) *Item { + if l == nil { + return nil + } + l2 := *l + if li, ok := l2.(*Item); ok { + return li + } + return nil +} + +type layerBase struct { + id id.LayerID + name string + visible bool + plugin *id.PluginID + extension *id.PluginExtensionID + property *id.PropertyID + infobox *Infobox + scene id.SceneID +} + +func (l *layerBase) ID() id.LayerID { + return l.id +} + +func (l *layerBase) IDRef() *id.LayerID { + if l == nil { + return nil + } + return l.id.Ref() +} + +func (l *layerBase) Name() string { + if l == nil { + return "" + } + return l.name +} + +func (l *layerBase) IsVisible() bool { + if l == nil { + return false + } + return l.visible +} + +func (l *layerBase) UsesPlugin() bool { + if l == nil { + return false + } + return l.plugin != nil && l.extension != nil +} + +func (l *layerBase) Plugin() *id.PluginID { + if l == nil { + return nil + } + return l.plugin.CopyRef() +} + +func (l *layerBase) Extension() *id.PluginExtensionID { + if l == nil { + return nil + } + return l.extension.CopyRef() +} + +func (l *layerBase) Property() *id.PropertyID { + if l == nil { + return nil + } + return l.property.CopyRef() +} + +func (l *layerBase) HasInfobox() bool { + if l == nil { + return false + } + return l.infobox != nil +} + +func (l *layerBase) Infobox() *Infobox { + if l == nil { + return nil + } + return l.infobox +} + +func (l *layerBase) Scene() id.SceneID { + return l.scene +} + +func (l *layerBase) Rename(name string) { + if l == nil { + return + } + l.name = name +} + +func (l *layerBase) SetVisible(visible bool) { + if l == nil { + return + } + l.visible = visible +} + +func (l *layerBase) SetInfobox(infobox *Infobox) { + if l == nil { + return + } + l.infobox = infobox +} + +func (l *layerBase) SetPlugin(plugin *id.PluginID) { + if l == nil { + return + } + l.plugin = plugin.CopyRef() +} + +func (l *layerBase) Properties() []id.PropertyID { + if l == nil { + return nil + } + res := []id.PropertyID{} + if l.property != nil { + res = append(res, *l.property) + } + if l.infobox != nil { + res = append(res, l.infobox.property) + for _, f := range l.infobox.fields { + res = append(res, f.property) + } + } + return res +} + +func (l *layerBase) ValidateProperties(pm property.Map) error { + if l == nil || pm == nil { + return nil + } + + // property + if l.property != nil { + if l.plugin == nil || l.extension == nil { + return errors.New("layer should have plugin id and extension id") + } + + psid, err := id.PropertySchemaIDFromExtension(*l.plugin, *l.extension) + if err != nil { + return errors.New("layer has invalid plugin id and extension id") + } + + lp := pm[*l.property] + if lp == nil { + return errors.New("layer property does not exist") + } + + if lp.Schema() != psid { + return errors.New("layer property has a invalid schema") + } + } else if l.plugin != nil || l.extension != nil { + return errors.New("layer should have property id") + } + + // infobox + if err := l.infobox.ValidateProperties(pm); err != nil { + return fmt.Errorf("infobox: %w", err) + } + + return nil +} diff --git a/pkg/layer/layer_test.go b/pkg/layer/layer_test.go new file mode 100644 index 000000000..3c208be26 --- /dev/null +++ b/pkg/layer/layer_test.go @@ -0,0 +1,4 @@ +package layer + +var _ Layer = &Item{} +var _ Layer = &Group{} diff --git a/pkg/layer/list.go b/pkg/layer/list.go new file mode 100644 index 000000000..f44ab3b31 --- /dev/null +++ b/pkg/layer/list.go @@ -0,0 +1,244 @@ +package layer + +import ( + "sort" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type List []*Layer + +func (ll List) Pick(il *IDList) List { + if il == nil { + return nil + } + + layers := make(List, 0, il.LayerCount()) + for _, lid := range il.Layers() { + if l := ll.Find(lid); l != nil { + layers = append(layers, l) + } + } + return layers +} + +func (ll List) Find(lid id.LayerID) *Layer { + for _, l := range ll { + if l == nil { + continue + } + if (*l).ID() == lid { + return l + } + } + return nil +} + +func (ll List) FindByDataset(ds id.DatasetID) *Item { + for _, l := range ll { + if li := ItemFromLayerRef(l); li != nil { + dsid := li.LinkedDataset() + if dsid != nil && *dsid == ds { + return li + } + } + } + return nil +} + +func (ll List) ToLayerItemList() ItemList { + res := make(ItemList, 0, len(ll)) + for _, l := range ll { + if li := ItemFromLayerRef(l); li != nil { + res = append(res, li) + } + } + return res +} + +func (ll List) ToLayerGroupList() GroupList { + res := make(GroupList, 0, len(ll)) + for _, l := range ll { + if lg := GroupFromLayerRef(l); lg != nil { + res = append(res, lg) + } + } + return res +} + +func (ll List) SeparateLayerItemAndGroup() (ItemList, GroupList) { + resi := make(ItemList, 0, len(ll)) + resg := make(GroupList, 0, len(ll)) + for _, l := range ll { + if lg := GroupFromLayerRef(l); lg != nil { + resg = append(resg, lg) + } else if li := ItemFromLayerRef(l); li != nil { + resi = append(resi, li) + } + } + return resi, resg +} + +func (ll List) Deref() []Layer { + if ll == nil { + return nil + } + res := make([]Layer, 0, len(ll)) + for _, l := range ll { + if l != nil { + res = append(res, *l) + } else { + res = append(res, nil) + } + } + return res +} + +func (ll List) Loader() Loader { + return LoaderFrom(ll.Deref()) +} + +func (ll List) Map() Map { + m := make(Map, len(ll)) + m.Add(ll...) + return m +} + +type ItemList []*Item + +func (ll ItemList) FindByDataset(ds id.DatasetID) *Item { + for _, li := range ll { + dsid := li.LinkedDataset() + if dsid != nil && *dsid == ds { + return li + } + } + return nil +} + +func (ll ItemList) ToLayerList() List { + res := make(List, 0, len(ll)) + for _, l := range ll { + var layer Layer = l + res = append(res, &layer) + } + return res +} + +type GroupList []*Group + +func (ll GroupList) ToLayerList() List { + res := make(List, 0, len(ll)) + for _, l := range ll { + var layer Layer = l + res = append(res, &layer) + } + return res +} + +type Map map[id.LayerID]*Layer + +func MapFrom(l Layer) Map { + return List{&l}.Map() +} + +func (m Map) Add(layers ...*Layer) Map { + if m == nil { + m = map[id.LayerID]*Layer{} + } + for _, l := range layers { + if l == nil { + continue + } + l2 := *l + if l2 == nil { + continue + } + m[l2.ID()] = l + } + return m +} + +func (m Map) List() List { + if m == nil { + return nil + } + list := make(List, 0, len(m)) + for _, l := range m { + list = append(list, l) + } + return list +} + +func (m Map) Clone() Map { + if m == nil { + return Map{} + } + m2 := make(Map, len(m)) + for k, v := range m { + m2[k] = v + } + return m2 +} + +func (m Map) Merge(m2 Map) Map { + if m == nil { + return m2.Clone() + } + m3 := m.Clone() + if m2 == nil { + return m3 + } + + return m3.Add(m2.List()...) +} + +func (m Map) Pick(il *IDList) List { + if il == nil { + return nil + } + + layers := make(List, 0, il.LayerCount()) + for _, lid := range il.Layers() { + if l := m[lid]; l != nil { + layers = append(layers, l) + } + } + return layers +} + +func (m Map) Layer(i id.LayerID) Layer { + if l := m[i]; l != nil { + return *l + } + return nil +} + +func (m Map) Item(i id.LayerID) *Item { + if l := ToLayerItem(m.Layer(i)); l != nil { + return l + } + return nil +} + +func (m Map) Group(i id.LayerID) *Group { + if l := ToLayerGroup(m.Layer(i)); l != nil { + return l + } + return nil +} + +func (m Map) Keys() []id.LayerID { + keys := make([]id.LayerID, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + sort.SliceStable(keys, func(i, j int) bool { + return id.ID(keys[i]).Compare(id.ID(keys[j])) < 0 + }) + return keys +} + +func (m Map) Len() int { + return len(m) +} diff --git a/pkg/layer/loader.go b/pkg/layer/loader.go new file mode 100644 index 000000000..afa5b2c04 --- /dev/null +++ b/pkg/layer/loader.go @@ -0,0 +1,44 @@ +package layer + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type Loader func(context.Context, ...id.LayerID) (List, error) +type LoaderByScene func(context.Context, id.SceneID) (List, error) + +func LoaderFrom(data []Layer) Loader { + return func(ctx context.Context, ids ...id.LayerID) (List, error) { + res := make([]*Layer, 0, len(ids)) + for _, i := range ids { + found := false + for _, d := range data { + if i == d.ID() { + res = append(res, &d) + found = true + break + } + } + if !found { + res = append(res, nil) + } + } + return res, nil + } +} + +func LoaderFromMap(data map[id.LayerID]Layer) Loader { + return func(ctx context.Context, ids ...id.LayerID) (List, error) { + res := make([]*Layer, 0, len(ids)) + for _, i := range ids { + if d, ok := data[i]; ok { + res = append(res, &d) + } else { + res = append(res, nil) + } + } + return res, nil + } +} diff --git a/pkg/layer/merged.go b/pkg/layer/merged.go new file mode 100644 index 000000000..566109c7a --- /dev/null +++ b/pkg/layer/merged.go @@ -0,0 +1,152 @@ +package layer + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +// Merged represents a merged layer from two layers +type Merged struct { + Original id.LayerID + Parent *id.LayerID + Name string + Scene id.SceneID + Property *property.MergedMetadata + Infobox *MergedInfobox + PluginID *id.PluginID + ExtensionID *id.PluginExtensionID +} + +// MergedInfobox represents a merged info box from two layers +type MergedInfobox struct { + Property *property.MergedMetadata + Fields []*MergedInfoboxField +} + +// MergedInfoboxField represents a field of MergedInfobox +type MergedInfoboxField struct { + ID id.InfoboxFieldID + Plugin id.PluginID + Extension id.PluginExtensionID + Property *property.MergedMetadata +} + +// Merge merges two layers +func Merge(o Layer, p *Group) *Merged { + if o == nil || p != nil && o.Scene() != p.Scene() { + return nil + } + + return &Merged{ + Original: o.ID(), + Parent: p.IDRef().CopyRef(), + Scene: o.Scene(), + Name: o.Name(), + PluginID: o.Plugin().CopyRef(), + ExtensionID: o.Extension().CopyRef(), + Property: &property.MergedMetadata{ + Original: o.Property(), + Parent: p.Property(), + LinkedDataset: ToLayerItem(o).LinkedDataset(), + }, + Infobox: MergeInfobox(o.Infobox(), p.Infobox(), ToLayerItem(o).LinkedDataset()), + } +} + +// MergeInfobox merges two infoboxes +func MergeInfobox(o *Infobox, p *Infobox, linked *id.DatasetID) *MergedInfobox { + if o == nil && p == nil { + return nil + } + + var ibf []*InfoboxField + if o != nil && o.Count() > 0 { + ibf = o.Fields() + } else if p != nil { + ibf = p.Fields() + } + + fields := make([]*MergedInfoboxField, 0, len(ibf)) + for _, f := range ibf { + p := f.Property() + fields = append(fields, &MergedInfoboxField{ + ID: f.ID(), + Plugin: f.Plugin(), + Extension: f.Extension(), + Property: &property.MergedMetadata{ + Original: &p, + Parent: nil, + LinkedDataset: linked, + }, + }) + } + + return &MergedInfobox{ + Fields: fields, + Property: &property.MergedMetadata{ + Original: o.PropertyRef(), + Parent: p.PropertyRef(), + LinkedDataset: linked, + }, + } +} + +// Properties returns all property IDs in Merged +func (m *Merged) Properties() []id.PropertyID { + if m == nil { + return nil + } + added := map[id.PropertyID]struct{}{} + result := []id.PropertyID{} + if m.Property != nil { + if m.Property.Original != nil { + t := *m.Property.Original + if _, ok := added[t]; !ok { + result = append(result, t) + added[t] = struct{}{} + } + } + if m.Property.Parent != nil { + t := *m.Property.Parent + if _, ok := added[t]; !ok { + result = append(result, t) + added[t] = struct{}{} + } + } + } + if m.Infobox != nil && m.Infobox.Property != nil { + if m.Infobox.Property.Original != nil { + t := *m.Infobox.Property.Original + if _, ok := added[t]; !ok { + result = append(result, t) + added[t] = struct{}{} + } + } + if m.Infobox.Property.Parent != nil { + t := *m.Infobox.Property.Parent + if _, ok := added[t]; !ok { + result = append(result, t) + added[t] = struct{}{} + } + } + } + if m.Infobox != nil { + for _, f := range m.Infobox.Fields { + if f.Property.Original != nil { + t := *f.Property.Original + if _, ok := added[t]; !ok { + result = append(result, t) + added[t] = struct{}{} + } + } + if f.Property.Parent != nil { + t := *f.Property.Parent + if _, ok := added[t]; !ok { + result = append(result, t) + added[t] = struct{}{} + } + } + } + } + return result +} diff --git a/pkg/layer/merged_test.go b/pkg/layer/merged_test.go new file mode 100644 index 000000000..2c8b18f34 --- /dev/null +++ b/pkg/layer/merged_test.go @@ -0,0 +1,379 @@ +package layer + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestMerge(t *testing.T) { + scene := id.NewSceneID() + dataset1 := id.NewDatasetID() + p := id.MustPluginID("xxx#1.1.1") + e := id.PluginExtensionID("foo") + + itemProperty := id.NewPropertyID() + groupProperty := id.NewPropertyID() + ib1pr := id.NewPropertyID() + ib2pr := id.NewPropertyID() + f1pr := id.NewPropertyID() + f2pr := id.NewPropertyID() + f3pr := id.NewPropertyID() + + f1 := NewInfoboxField().NewID().Plugin(p).Extension(e).Property(f1pr).MustBuild() + f2 := NewInfoboxField().NewID().Plugin(p).Extension(e).Property(f2pr).MustBuild() + f3 := NewInfoboxField().NewID().Plugin(p).Extension(e).Property(f3pr).MustBuild() + + // no-infobox and no-linked + itemLayer1 := NewItem(). + NewID(). + Scene(scene). + Plugin(&p). + Extension(&e). + Property(&itemProperty). + MustBuild() + // no-infobox + itemLayer2 := NewItem(). + NewID(). + Scene(scene). + Plugin(&p). + Extension(&e). + Property(&itemProperty). + LinkedDataset(&dataset1). + MustBuild() + // infobox + itemLayer3 := NewItem(). + NewID(). + Scene(scene). + Plugin(&p). + Extension(&e). + Property(&itemProperty). + LinkedDataset(&dataset1). + Infobox(NewInfobox([]*InfoboxField{f1, f3}, ib1pr)). + MustBuild() + // infobox but field is empty + itemLayer4 := NewItem(). + NewID(). + Scene(scene). + Plugin(&p). + Extension(&e). + Property(&itemProperty). + LinkedDataset(&dataset1). + Infobox(NewInfobox(nil, ib1pr)). + MustBuild() + // no-infobox + groupLayer1 := NewGroup(). + NewID(). + Scene(scene). + Plugin(&p). + Extension(&e). + Property(&groupProperty). + MustBuild() + // infobox + groupLayer2 := NewGroup(). + NewID(). + Scene(scene). + Plugin(&p). + Extension(&e). + Property(&groupProperty). + Infobox(NewInfobox([]*InfoboxField{f2, f3}, ib2pr)). + MustBuild() + + expected1 := &Merged{ + Original: itemLayer1.ID(), + Parent: nil, + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: nil, + LinkedDataset: nil, + }, + } + + expected2 := &Merged{ + Original: itemLayer3.ID(), + Parent: nil, + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: nil, + LinkedDataset: &dataset1, + }, + Infobox: &MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + Fields: []*MergedInfoboxField{ + { + ID: f1.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, + }, + } + + expected3 := &Merged{ + Original: itemLayer2.ID(), + Parent: groupLayer1.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + } + + expected4 := &Merged{ + Original: itemLayer3.ID(), + Parent: groupLayer1.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + Infobox: &MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + Fields: []*MergedInfoboxField{ + { + ID: f1.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, + }, + } + + expected5 := &Merged{ + Original: itemLayer2.ID(), + Parent: groupLayer2.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + Infobox: &MergedInfobox{ + Property: &property.MergedMetadata{ + Original: nil, + Parent: &ib2pr, + LinkedDataset: &dataset1, + }, + Fields: []*MergedInfoboxField{ + { + ID: f2.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f2pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, + }, + } + + expected6 := &Merged{ + Original: itemLayer3.ID(), + Parent: groupLayer2.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + Infobox: &MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib1pr, + Parent: &ib2pr, + LinkedDataset: &dataset1, + }, + Fields: []*MergedInfoboxField{ + { + ID: f1.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, + }, + } + + expected7 := &Merged{ + Original: itemLayer4.ID(), + Parent: groupLayer2.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + Infobox: &MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib1pr, + Parent: &ib2pr, + LinkedDataset: &dataset1, + }, + Fields: []*MergedInfoboxField{ + { + ID: f2.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f2pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, + }, + } + + actual := Merge(nil, nil) + assert.Nil(t, actual) + actual = Merge(nil, groupLayer1) + assert.Nil(t, actual) + actual = Merge(itemLayer1, nil) + assert.Equal(t, expected1, actual) + actual = Merge(itemLayer3, nil) + assert.Equal(t, expected2, actual) + actual = Merge(itemLayer2, groupLayer1) + assert.Equal(t, expected3, actual) + actual = Merge(itemLayer3, groupLayer1) + assert.Equal(t, expected4, actual) + actual = Merge(itemLayer2, groupLayer2) + assert.Equal(t, expected5, actual) + actual = Merge(itemLayer3, groupLayer2) + assert.Equal(t, expected6, actual) + actual = Merge(itemLayer4, groupLayer2) + assert.Equal(t, expected7, actual) +} + +func TestMergedProperties(t *testing.T) { + itemProperty := id.NewPropertyID() + groupProperty := id.NewPropertyID() + ib1pr := id.NewPropertyID() + ib2pr := id.NewPropertyID() + f1pr := id.NewPropertyID() + f2pr := id.NewPropertyID() + f3pr := id.NewPropertyID() + + merged := &Merged{ + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + }, + Infobox: &MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib1pr, + Parent: &ib2pr, + }, + Fields: []*MergedInfoboxField{ + { + Property: &property.MergedMetadata{ + Original: &f1pr, + Parent: &f2pr, + }, + }, + { + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + }, + }, + }, + }, + } + + assert.Equal(t, []id.PropertyID{ + itemProperty, groupProperty, ib1pr, ib2pr, f1pr, f2pr, f3pr, + }, merged.Properties()) +} diff --git a/pkg/layer/merging/merged.go b/pkg/layer/merging/merged.go new file mode 100644 index 000000000..c9ed65dfc --- /dev/null +++ b/pkg/layer/merging/merged.go @@ -0,0 +1,96 @@ +package merging + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +var ( + _ MergedLayer = &MergedLayerGroup{} // must implement Layer + _ MergedLayer = &MergedLayerItem{} // must implement Layer +) + +type MergedLayer interface { + Common() *MergedLayerCommon + AllDatasets() []id.DatasetID +} + +type MergedLayerGroup struct { + MergedLayerCommon + Children []MergedLayer +} + +type MergedLayerItem struct { + MergedLayerCommon +} + +type MergedLayerCommon struct { + layer.Merged + Property *property.Merged + Infobox *MergedInfobox +} + +type MergedInfobox struct { + layer.MergedInfobox + Property *property.Merged + Fields []*MergedInfoboxField +} + +type MergedInfoboxField struct { + layer.MergedInfoboxField + Property *property.Merged +} + +func (l *MergedLayerGroup) Common() *MergedLayerCommon { + if l == nil { + return nil + } + return &l.MergedLayerCommon +} + +func (l *MergedLayerItem) Common() *MergedLayerCommon { + if l == nil { + return nil + } + return &l.MergedLayerCommon +} + +func (l *MergedLayerCommon) Datasets() []id.DatasetID { + return l.datasetIDSet().All() +} + +func (l *MergedLayerCommon) datasetIDSet() *id.DatasetIDSet { + if l == nil { + return nil + } + res := id.NewDatasetIDSet() + res.Add(l.Property.Datasets()...) + res.Add(l.Infobox.Property.Datasets()...) + for _, f := range l.Infobox.Fields { + res.Add(f.Property.Datasets()...) + } + return res +} + +func (l *MergedLayerItem) AllDatasets() []id.DatasetID { + if l == nil { + return nil + } + return l.Datasets() +} + +func (l *MergedLayerGroup) AllDatasets() []id.DatasetID { + return l.allDatasetIDSet().All() +} + +func (l *MergedLayerGroup) allDatasetIDSet() *id.DatasetIDSet { + if l == nil { + return nil + } + d := l.datasetIDSet() + for _, l := range l.Children { + d.Add(l.AllDatasets()...) + } + return d +} diff --git a/pkg/layer/merging/merger.go b/pkg/layer/merging/merger.go new file mode 100644 index 000000000..0c59a42d3 --- /dev/null +++ b/pkg/layer/merging/merger.go @@ -0,0 +1,120 @@ +package merging + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Merger struct { + LayerLoader layer.Loader + PropertyLoader property.Loader +} + +func (m *Merger) MergeLayer(ctx context.Context, l layer.Layer, parent *layer.Group) (MergedLayer, error) { + if m == nil || l == nil { + return nil, nil + } + + common, err := m.mergeCommon(ctx, l, parent) + if err != nil { + return nil, err + } + if common == nil { + return nil, nil + } + + if li := layer.ToLayerItem(l); li != nil { + // item + return &MergedLayerItem{*common}, nil + } else if lg := layer.ToLayerGroup(l); lg != nil { + // group + layers, err := m.LayerLoader(ctx, lg.Layers().Layers()...) + if err != nil { + return nil, err + } + + children := make([]MergedLayer, 0, len(layers)) + for _, c := range layers { + if c == nil { + continue + } + ml, err := m.MergeLayer(ctx, *c, lg) + if err != nil { + return nil, err + } + children = append(children, ml) + } + + return &MergedLayerGroup{ + MergedLayerCommon: *common, + Children: children, + }, nil + } + + return nil, nil +} + +func (m *Merger) MergeLayerFromID(ctx context.Context, i id.LayerID, parent *layer.Group) (MergedLayer, error) { + l, err := m.LayerLoader(ctx, i) + if err != nil { + return nil, err + } + if len(l) == 0 || l[0] == nil { + return nil, nil + } + return m.MergeLayer(ctx, *l[0], parent) +} + +func (m *Merger) mergeCommon(ctx context.Context, original layer.Layer, parent *layer.Group) (p *MergedLayerCommon, e error) { + ml := layer.Merge(original, parent) + if ml == nil { + return + } + properties, err := m.PropertyLoader(ctx, ml.Properties()...) + if err != nil { + e = err + return + } + + var infobox *MergedInfobox + if ml.Infobox != nil { + fields := make([]*MergedInfoboxField, 0, len(ml.Infobox.Fields)) + for _, f := range ml.Infobox.Fields { + fields = append(fields, &MergedInfoboxField{ + MergedInfoboxField: *f, + Property: mergeProperty(f.Property, properties), + }) + } + infobox = &MergedInfobox{ + MergedInfobox: *ml.Infobox, + Fields: fields, + Property: mergeProperty(ml.Infobox.Property, properties), + } + } + + p = &MergedLayerCommon{ + Merged: *ml, + Property: mergeProperty(ml.Property, properties), + Infobox: infobox, + } + return +} + +func mergeProperty(ml *property.MergedMetadata, properties []*property.Property) *property.Merged { + var op, pp *property.Property + for _, p := range properties { + if ml.Original != nil && p.ID() == *ml.Original { + op = p + } + if ml.Parent != nil && p.ID() == *ml.Parent { + pp = p + } + if (ml.Original == nil || op != nil) && (ml.Parent == nil || pp != nil) { + break + } + } + return ml.Merge(op, pp) +} diff --git a/pkg/layer/merging/merger_test.go b/pkg/layer/merging/merger_test.go new file mode 100644 index 000000000..58606d0d9 --- /dev/null +++ b/pkg/layer/merging/merger_test.go @@ -0,0 +1,174 @@ +package merging + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestMergeLayer(t *testing.T) { + // ids + scene := id.NewSceneID() + dataset1 := id.NewDatasetID() + ps := id.MustPropertySchemaID("xxx#1.1.1/aa") + p := id.MustPluginID("xxx#1.1.1") + e := id.PluginExtensionID("foo") + itemProperty := id.NewPropertyID() + groupProperty := id.NewPropertyID() + ib1pr := id.NewPropertyID() + ib2pr := id.NewPropertyID() + fpr := id.NewPropertyID() + l1 := id.NewLayerID() + l2 := id.NewLayerID() + l1if1 := id.NewInfoboxFieldID() + + // property loader + ploader := property.LoaderFrom([]*property.Property{ + property.New().ID(itemProperty).Scene(scene).Schema(ps).MustBuild(), + property.New().ID(groupProperty).Scene(scene).Schema(ps).MustBuild(), + property.New().ID(ib1pr).Scene(scene).Schema(ps).MustBuild(), + property.New().ID(ib2pr).Scene(scene).Schema(ps).MustBuild(), + property.New().ID(fpr).Scene(scene).Schema(ps).MustBuild(), + }) + + // layer loader + lloader := layer.LoaderFrom([]layer.Layer{ + layer.NewItem(). + ID(l1). + Scene(scene). + Property(&itemProperty). + LinkedDataset(&dataset1). + Infobox(layer.NewInfobox(nil, ib1pr)). + MustBuild(), + layer.NewGroup(). + ID(l2). + Scene(scene). + Property(&groupProperty). + Infobox(layer.NewInfobox([]*layer.InfoboxField{ + layer.NewInfoboxField().ID(l1if1).Plugin(p).Extension(e).Property(fpr).MustBuild(), + }, ib2pr)). + Layers(layer.NewIDList([]id.LayerID{l1})). + MustBuild(), + }) + + // assert + expectedInfoboxField := layer.MergedInfoboxField{ + ID: l1if1, + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &fpr, + LinkedDataset: &dataset1, + }, + } + expectedInfobox := layer.MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib1pr, + Parent: &ib2pr, + LinkedDataset: &dataset1, + }, + Fields: []*layer.MergedInfoboxField{&expectedInfoboxField}, + } + expectedInfoboxField2 := layer.MergedInfoboxField{ + ID: l1if1, + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &fpr, + }, + } + expectedInfobox2 := layer.MergedInfobox{ + Property: &property.MergedMetadata{ + Original: &ib2pr, + }, + Fields: []*layer.MergedInfoboxField{&expectedInfoboxField2}, + } + + expected := &MergedLayerGroup{ + MergedLayerCommon: MergedLayerCommon{ + Merged: layer.Merged{ + Original: l2, + Scene: scene, + Property: &property.MergedMetadata{ + Original: &groupProperty, + }, + Infobox: &expectedInfobox2, + }, + Infobox: &MergedInfobox{ + MergedInfobox: expectedInfobox2, + Property: &property.Merged{ + Original: &ib2pr, + Schema: ps, + }, + Fields: []*MergedInfoboxField{ + { + MergedInfoboxField: expectedInfoboxField2, + Property: &property.Merged{ + Original: &fpr, + Schema: ps, + }, + }, + }, + }, + Property: &property.Merged{ + Original: &groupProperty, + Schema: ps, + }, + }, + Children: []MergedLayer{ + &MergedLayerItem{ + MergedLayerCommon{ + Merged: layer.Merged{ + Original: l1, + Parent: &l2, + Scene: scene, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + Infobox: &expectedInfobox, + }, + Infobox: &MergedInfobox{ + MergedInfobox: expectedInfobox, + Property: &property.Merged{ + Original: &ib1pr, + Parent: &ib2pr, + Schema: ps, + LinkedDataset: &dataset1, + }, + Fields: []*MergedInfoboxField{ + { + MergedInfoboxField: expectedInfoboxField, + Property: &property.Merged{ + Original: &fpr, + Schema: ps, + LinkedDataset: &dataset1, + }, + }, + }, + }, + Property: &property.Merged{ + Original: &itemProperty, + Parent: &groupProperty, + Schema: ps, + LinkedDataset: &dataset1, + }, + }, + }, + }, + } + + merger := Merger{ + PropertyLoader: ploader, + LayerLoader: lloader, + } + actual, err := merger.MergeLayerFromID(context.Background(), l2, nil) + + assert.NoError(t, err) + assert.Equal(t, expected, actual) +} diff --git a/pkg/layer/merging/sealed.go b/pkg/layer/merging/sealed.go new file mode 100644 index 000000000..2ca70a018 --- /dev/null +++ b/pkg/layer/merging/sealed.go @@ -0,0 +1,74 @@ +package merging + +import ( + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" +) + +var ( + _ SealedLayer = &SealedLayerGroup{} // must implement SealedLayer + _ SealedLayer = &SealedLayerItem{} // must implement SealedLayer +) + +type SealedLayer interface { + Common() *SealedLayerCommon + Flatten() []*SealedLayerItem +} + +type SealedLayerGroup struct { + SealedLayerCommon + Children []SealedLayer +} + +type SealedLayerItem struct { + SealedLayerCommon +} + +type SealedLayerCommon struct { + layer.Merged + Property *property.Sealed + Infobox *SealedInfobox +} + +type SealedInfobox struct { + layer.MergedInfobox + Property *property.Sealed + Fields []*SealedInfoboxField +} + +type SealedInfoboxField struct { + layer.MergedInfoboxField + Property *property.Sealed +} + +func (l *SealedLayerGroup) Common() *SealedLayerCommon { + if l == nil { + return nil + } + return &l.SealedLayerCommon +} + +func (l *SealedLayerGroup) Flatten() []*SealedLayerItem { + if l == nil { + return nil + } + layers := []*SealedLayerItem{} + for _, c := range l.Children { + layers = append(layers, c.Flatten()...) + } + return layers +} + +func (l *SealedLayerItem) Common() *SealedLayerCommon { + if l == nil { + return nil + } + return &l.SealedLayerCommon +} + +func (l *SealedLayerItem) Flatten() []*SealedLayerItem { + if l == nil { + return nil + } + return []*SealedLayerItem{l} +} diff --git a/pkg/layer/merging/sealer.go b/pkg/layer/merging/sealer.go new file mode 100644 index 000000000..94c0948c4 --- /dev/null +++ b/pkg/layer/merging/sealer.go @@ -0,0 +1,139 @@ +package merging + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Sealer struct { + DatasetGraphLoader dataset.GraphLoader +} + +func (s *Sealer) Seal(ctx context.Context, m MergedLayer) (SealedLayer, error) { + if s == nil || m == nil { + return nil, nil + } + return s.sealLayer(ctx, m) +} + +func (s *Sealer) sealLayer(ctx context.Context, m MergedLayer) (SealedLayer, error) { + if s == nil || m == nil { + return nil, nil + } + if g, ok := m.(*MergedLayerGroup); ok { + return s.sealLayerGroup(ctx, g) + } + if i, ok := m.(*MergedLayerItem); ok { + return s.sealLayerItem(ctx, i) + } + return nil, nil +} + +func (s *Sealer) sealLayerGroup(ctx context.Context, m *MergedLayerGroup) (*SealedLayerGroup, error) { + if s == nil || m == nil { + return nil, nil + } + + c, err := s.sealLayerCommon(ctx, &m.MergedLayerCommon) + if err != nil { + return nil, err + } + if c == nil { + return nil, nil + } + + children := make([]SealedLayer, 0, len(m.Children)) + for _, c := range m.Children { + s, err := s.sealLayer(ctx, c) + if err != nil { + return nil, err + } + children = append(children, s) + } + + return &SealedLayerGroup{ + SealedLayerCommon: *c, + Children: children, + }, nil +} + +func (s *Sealer) sealLayerItem(ctx context.Context, m *MergedLayerItem) (*SealedLayerItem, error) { + if s == nil || m == nil { + return nil, nil + } + c, err := s.sealLayerCommon(ctx, &m.MergedLayerCommon) + if err != nil { + return nil, err + } + if c == nil { + return nil, nil + } + return &SealedLayerItem{ + SealedLayerCommon: *c, + }, nil +} + +func (s *Sealer) sealLayerCommon(ctx context.Context, m *MergedLayerCommon) (*SealedLayerCommon, error) { + if s == nil || m == nil { + return nil, nil + } + p, err := s.sealProperty(ctx, m.Property) + if err != nil { + return nil, err + } + ib, err := s.sealInfobox(ctx, m.Infobox) + if err != nil { + return nil, err + } + return &SealedLayerCommon{ + Merged: m.Merged, + Property: p, + Infobox: ib, + }, nil +} + +func (s *Sealer) sealInfobox(ctx context.Context, m *MergedInfobox) (*SealedInfobox, error) { + if s == nil || m == nil { + return nil, nil + } + p, err := s.sealProperty(ctx, m.Property) + if err != nil { + return nil, err + } + fields := make([]*SealedInfoboxField, 0, len(m.Fields)) + for _, f := range m.Fields { + s, err := s.sealInfoboxField(ctx, f) + if err != nil { + return nil, err + } + fields = append(fields, s) + } + return &SealedInfobox{ + MergedInfobox: m.MergedInfobox, + Property: p, + Fields: fields, + }, nil +} + +func (s *Sealer) sealInfoboxField(ctx context.Context, m *MergedInfoboxField) (*SealedInfoboxField, error) { + if s == nil || m == nil { + return nil, nil + } + p, err := s.sealProperty(ctx, m.Property) + if err != nil { + return nil, err + } + return &SealedInfoboxField{ + MergedInfoboxField: m.MergedInfoboxField, + Property: p, + }, nil +} + +func (s *Sealer) sealProperty(ctx context.Context, m *property.Merged) (*property.Sealed, error) { + if s == nil { + return nil, nil + } + return property.Seal(ctx, m, s.DatasetGraphLoader) +} diff --git a/pkg/log/gceformatter.go b/pkg/log/gceformatter.go new file mode 100644 index 000000000..0df77cc26 --- /dev/null +++ b/pkg/log/gceformatter.go @@ -0,0 +1,123 @@ +package log + +// https://github.com/znly/logrus-gce with some modifications +// Apache License 2.0 + +import ( + "encoding/json" + "errors" + "fmt" + "runtime" + "strings" + "sync" + "time" + + "github.com/sirupsen/logrus" +) + +type severity string + +const ( + severityDEBUG severity = "DEBUG" + severityINFO severity = "INFO" + severityWARNING severity = "WARNING" + severityERROR severity = "ERROR" + severityCRITICAL severity = "CRITICAL" + severityALERT severity = "ALERT" +) + +var ( + levelsLogrusToGCE = map[logrus.Level]severity{ + logrus.DebugLevel: severityDEBUG, + logrus.InfoLevel: severityINFO, + logrus.WarnLevel: severityWARNING, + logrus.ErrorLevel: severityERROR, + logrus.FatalLevel: severityCRITICAL, + logrus.PanicLevel: severityALERT, + } +) + +var ( + stackSkips = map[logrus.Level]int{} + stackSkipsMu = sync.RWMutex{} +) + +var ( + ErrSkipNotFound = errors.New("could not find skips for log level") +) + +func getSkipLevel(level logrus.Level) (int, error) { + stackSkipsMu.RLock() + if skip, ok := stackSkips[level]; ok { + defer stackSkipsMu.RUnlock() + return skip, nil + } + stackSkipsMu.RUnlock() + + stackSkipsMu.Lock() + defer stackSkipsMu.Unlock() + if skip, ok := stackSkips[level]; ok { + return skip, nil + } + + // detect until we escape logrus back to the client package + // skip out of runtime and logrusgce package, hence 3 + stackSkipsCallers := make([]uintptr, 20) + runtime.Callers(3, stackSkipsCallers) + for i, pc := range stackSkipsCallers { + f := runtime.FuncForPC(pc) + if strings.HasPrefix(f.Name(), "github.com/sirupsen/logrus") { + continue + } + stackSkips[level] = i + 1 + return i + 1, nil + } + return 0, ErrSkipNotFound +} + +type GCEFormatter struct { + withSourceInfo bool +} + +func NewGCEFormatter(withSourceInfo bool) *GCEFormatter { + return &GCEFormatter{withSourceInfo: withSourceInfo} +} + +func (f *GCEFormatter) Format(entry *logrus.Entry) ([]byte, error) { + data := make(logrus.Fields, len(entry.Data)+3) + for k, v := range entry.Data { + switch v := v.(type) { + case error: + // Otherwise errors are ignored by `encoding/json` + // https://github.com/Sirupsen/logrus/issues/137 + data[k] = v.Error() + default: + data[k] = v + } + } + + data["time"] = entry.Time.Format(time.RFC3339Nano) + data["severity"] = levelsLogrusToGCE[entry.Level] + data["logMessage"] = entry.Message + + if f.withSourceInfo { + skip, err := getSkipLevel(entry.Level) + if err != nil { + return nil, err + } + if pc, file, line, ok := runtime.Caller(skip); ok { + f := runtime.FuncForPC(pc) + data["sourceLocation"] = map[string]interface{}{ + "file": file, + "line": line, + "functionName": f.Name(), + } + } + } + + serialized, err := json.Marshal(data) + if err != nil { + return nil, fmt.Errorf("Failed to marshal fields to JSON, %v", err) + } + return append(serialized, '\n'), nil +} diff --git a/pkg/log/log.go b/pkg/log/log.go new file mode 100644 index 000000000..23ed1c844 --- /dev/null +++ b/pkg/log/log.go @@ -0,0 +1,104 @@ +package log + +import ( + "os" + + "github.com/sirupsen/logrus" +) + +func init() { + gcp, _ := os.LookupEnv("GOOGLE_CLOUD_PROJECT") + logrus.SetLevel(logrus.InfoLevel) + if gcp != "" { + logrus.SetFormatter(NewGCEFormatter(false)) + } else { + logrus.SetFormatter(&logrus.TextFormatter{ + DisableColors: false, + FullTimestamp: true, + }) + } +} + +func Tracef(format string, args ...interface{}) { + logrus.Tracef(format, args...) +} + +func Debugf(format string, args ...interface{}) { + logrus.Debugf(format, args...) +} + +func Infof(format string, args ...interface{}) { + logrus.Infof(format, args...) +} + +func Printf(format string, args ...interface{}) { + logrus.Printf(format, args...) +} + +func Warnf(format string, args ...interface{}) { + logrus.Warnf(format, args...) +} + +func Errorf(format string, args ...interface{}) { + logrus.Errorf(format, args...) +} + +func Fatalf(format string, args ...interface{}) { + logrus.Fatalf(format, args...) +} + +func Trace(args ...interface{}) { + logrus.Trace(args...) +} + +func Debug(args ...interface{}) { + logrus.Debug(args...) +} + +func Info(args ...interface{}) { + logrus.Info(args...) +} + +func Print(args ...interface{}) { + logrus.Print(args...) +} + +func Warn(args ...interface{}) { + logrus.Warn(args...) +} + +func Error(args ...interface{}) { + logrus.Error(args...) +} + +func Fatal(args ...interface{}) { + logrus.Fatal(args...) +} + +func Traceln(args ...interface{}) { + logrus.Traceln(args...) +} + +func Debugln(args ...interface{}) { + logrus.Debugln(args...) +} + +func Infoln(args ...interface{}) { + logrus.Infoln(args...) +} + +func Println(args ...interface{}) { + logrus.Println(args...) +} + +func Warnln(args ...interface{}) { + logrus.Warnln(args...) +} + +func Errorln(args ...interface{}) { + logrus.Errorln(args...) +} + +func Fatalln(args ...interface{}) { + logrus.Fatalln(args...) +} diff --git a/pkg/plugin/builder.go b/pkg/plugin/builder.go new file mode 100644 index 000000000..bc6117113 --- /dev/null +++ b/pkg/plugin/builder.go @@ -0,0 +1,72 @@ +package plugin + +import ( + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" +) + +type Builder struct { + p *Plugin +} + +func New() *Builder { + return &Builder{p: &Plugin{}} +} + +func (b *Builder) Build() (*Plugin, error) { + // TODO: check extensions duplication ...etc + return b.p, nil +} + +func (b *Builder) MustBuild() *Plugin { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *Builder) ID(id id.PluginID) *Builder { + b.p.id = id + return b +} + +func (b *Builder) Name(name i18n.String) *Builder { + b.p.name = name.Copy() + return b +} + +func (b *Builder) Author(author string) *Builder { + b.p.author = author + return b +} + +func (b *Builder) Description(description i18n.String) *Builder { + b.p.description = description.Copy() + return b +} + +func (b *Builder) RepositoryURL(repositoryURL string) *Builder { + b.p.repositoryURL = repositoryURL + return b +} + +func (b *Builder) Extensions(extensions []*Extension) *Builder { + b.p.extensions = make(map[id.PluginExtensionID]*Extension, len(extensions)) + b.p.extensionOrder = make([]id.PluginExtensionID, 0, len(extensions)) + for _, e := range extensions { + b.p.extensions[e.ID()] = e + b.p.extensionOrder = append(b.p.extensionOrder, e.ID()) + } + return b +} + +func (b *Builder) Schema(schema *id.PropertySchemaID) *Builder { + if schema == nil { + b.p.schema = nil + } else { + sid := *schema + b.p.schema = &sid + } + return b +} diff --git a/pkg/plugin/builder_test.go b/pkg/plugin/builder_test.go new file mode 100644 index 000000000..36b13cf25 --- /dev/null +++ b/pkg/plugin/builder_test.go @@ -0,0 +1,196 @@ +package plugin + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestBuilder_ID(t *testing.T) { + var b = New() + res := b.ID(id.MustPluginID("aaa#1.1.1")).MustBuild() + assert.Equal(t, id.MustPluginID("aaa#1.1.1"), res.ID()) +} + +func TestBuilder_Name(t *testing.T) { + var b = New() + res := b.Name(i18n.StringFrom("fooo")).MustBuild() + assert.Equal(t, i18n.StringFrom("fooo"), res.Name()) +} + +func TestBuilder_Author(t *testing.T) { + var b = New() + res := b.Author("xxx").MustBuild() + assert.Equal(t, "xxx", res.Author()) +} + +func TestBuilder_Description(t *testing.T) { + var b = New() + res := b.Description(i18n.StringFrom("ddd")).MustBuild() + assert.Equal(t, i18n.StringFrom("ddd"), res.Description()) +} + +func TestBuilder_Schema(t *testing.T) { + testCases := []struct { + name string + sid, expected *id.PropertySchemaID + }{ + { + name: "nil schema", + sid: nil, + expected: nil, + }, + { + name: "build schema", + sid: id.MustPropertySchemaID("hoge#0.1.0/fff").Ref(), + expected: id.MustPropertySchemaID("hoge#0.1.0/fff").Ref(), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res := New().Schema(tc.sid).MustBuild() + assert.Equal(tt, tc.expected, res.Schema()) + }) + } +} + +func TestBuilder_Extensions(t *testing.T) { + b := New() + ext := []*Extension{ + NewExtension().ID("xxx").MustBuild(), + NewExtension().ID("yyy").MustBuild(), + } + res := b.Extensions(ext).MustBuild() + assert.Equal(t, ext, res.Extensions()) +} + +func TestBuilder_RepositoryURL(t *testing.T) { + var b = New() + res := b.RepositoryURL("hoge").MustBuild() + assert.Equal(t, "hoge", res.RepositoryURL()) +} + +func TestBuilder_Build(t *testing.T) { + testCases := []struct { + name, author, repositoryURL string + id id.PluginID + pname, description i18n.String + ext []*Extension + schema *id.PropertySchemaID + expected *Plugin + err error // skip for now as error is always nil + }{ + { + id: id.MustPluginID("hoge#0.1.0"), + name: "success build new plugin", + author: "aaa", + repositoryURL: "uuu", + pname: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + ext: []*Extension{ + NewExtension().ID("xxx").MustBuild(), + NewExtension().ID("yyy").MustBuild(), + }, + schema: id.MustPropertySchemaID("hoge#0.1.0/fff").Ref(), + expected: &Plugin{ + id: id.MustPluginID("hoge#0.1.0"), + name: i18n.StringFrom("nnn"), + author: "aaa", + description: i18n.StringFrom("ddd"), + repositoryURL: "uuu", + extensions: map[id.PluginExtensionID]*Extension{ + id.PluginExtensionID("xxx"): NewExtension().ID("xxx").MustBuild(), + id.PluginExtensionID("yyy"): NewExtension().ID("yyy").MustBuild(), + }, + extensionOrder: []id.PluginExtensionID{id.PluginExtensionID("xxx"), id.PluginExtensionID("yyy")}, + schema: id.MustPropertySchemaID("hoge#0.1.0/fff").Ref(), + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + p, err := New(). + ID(tc.id). + Extensions(tc.ext). + RepositoryURL(tc.repositoryURL). + Description(tc.description). + Name(tc.pname). + Schema(tc.schema). + Author(tc.author). + Build() + if err == nil { + assert.Equal(tt, tc.expected, p) + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + testCases := []struct { + name, author, repositoryURL string + id id.PluginID + pname, description i18n.String + ext []*Extension + schema *id.PropertySchemaID + expected *Plugin + }{ + { + id: id.MustPluginID("hoge#0.1.0"), + name: "success build new plugin", + author: "aaa", + repositoryURL: "uuu", + pname: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + ext: []*Extension{ + NewExtension().ID("xxx").MustBuild(), + NewExtension().ID("yyy").MustBuild(), + }, + schema: id.MustPropertySchemaID("hoge#0.1.0/fff").Ref(), + expected: &Plugin{ + id: id.MustPluginID("hoge#0.1.0"), + name: i18n.StringFrom("nnn"), + author: "aaa", + description: i18n.StringFrom("ddd"), + repositoryURL: "uuu", + extensions: map[id.PluginExtensionID]*Extension{ + id.PluginExtensionID("xxx"): NewExtension().ID("xxx").MustBuild(), + id.PluginExtensionID("yyy"): NewExtension().ID("yyy").MustBuild(), + }, + extensionOrder: []id.PluginExtensionID{id.PluginExtensionID("xxx"), id.PluginExtensionID("yyy")}, + schema: id.MustPropertySchemaID("hoge#0.1.0/fff").Ref(), + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + p := New(). + ID(tc.id). + Extensions(tc.ext). + RepositoryURL(tc.repositoryURL). + Description(tc.description). + Name(tc.pname). + Schema(tc.schema). + Author(tc.author). + MustBuild() + assert.Equal(tt, tc.expected, p) + + }) + } +} + +func TestNew(t *testing.T) { + assert.NotNil(t, New()) +} diff --git a/pkg/plugin/extension.go b/pkg/plugin/extension.go new file mode 100644 index 000000000..f7f276bde --- /dev/null +++ b/pkg/plugin/extension.go @@ -0,0 +1,84 @@ +package plugin + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +// ExtensionType _ +type ExtensionType string + +var ( + // ErrPluginExtensionDuplicated _ + ErrPluginExtensionDuplicated error = errors.New("plugin extension duplicated") + // ExtensionTypePrimitive _ + ExtensionTypePrimitive ExtensionType = "primitive" + // ExtensionTypeWidget _ + ExtensionTypeWidget ExtensionType = "widget" + // ExtensionTypeBlock _ + ExtensionTypeBlock ExtensionType = "block" + // ExtensionTypeVisualizer _ + ExtensionTypeVisualizer ExtensionType = "visualizer" + // ExtensionTypeInfobox _ + ExtensionTypeInfobox ExtensionType = "infobox" +) + +// Extension _ +type Extension struct { + id id.PluginExtensionID + extensionType ExtensionType + name i18n.String + description i18n.String + icon string + schema id.PropertySchemaID + visualizer visualizer.Visualizer +} + +// ID _ +func (w *Extension) ID() id.PluginExtensionID { + return w.id +} + +// Type _ +func (w *Extension) Type() ExtensionType { + return w.extensionType +} + +// Name _ +func (w *Extension) Name() i18n.String { + return w.name.Copy() +} + +// Description _ +func (w *Extension) Description() i18n.String { + return w.description.Copy() +} + +// Icon _ +func (w *Extension) Icon() string { + return w.icon +} + +// Schema _ +func (w *Extension) Schema() id.PropertySchemaID { + return w.schema +} + +// Visualizer _ +func (w *Extension) Visualizer() visualizer.Visualizer { + return w.visualizer +} + +// Rename _ +func (w *Extension) Rename(name i18n.String) { + w.name = name.Copy() + +} + +// SetDescription _ +func (w *Extension) SetDescription(des i18n.String) { + w.description = des.Copy() +} diff --git a/pkg/plugin/extension_builder.go b/pkg/plugin/extension_builder.go new file mode 100644 index 000000000..daab6ace2 --- /dev/null +++ b/pkg/plugin/extension_builder.go @@ -0,0 +1,90 @@ +package plugin + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +// ExtensionBuilder _ +type ExtensionBuilder struct { + p *Extension + s bool +} + +// NewExtension _ +func NewExtension() *ExtensionBuilder { + return &ExtensionBuilder{p: &Extension{}} +} + +// Build _ +func (b *ExtensionBuilder) Build() (*Extension, error) { + if string(b.p.id) == "" { + return nil, id.ErrInvalidID + } + if !b.s { + if b.p.extensionType == ExtensionTypeVisualizer || b.p.extensionType == ExtensionTypeInfobox { + return nil, errors.New("cannot build system extension") + } + } + return b.p, nil +} + +// MustBuild _ +func (b *ExtensionBuilder) MustBuild() *Extension { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +// ID _ +func (b *ExtensionBuilder) ID(id id.PluginExtensionID) *ExtensionBuilder { + b.p.id = id + return b +} + +// Name _ +func (b *ExtensionBuilder) Name(name i18n.String) *ExtensionBuilder { + b.p.name = name.Copy() + return b +} + +// Type _ +func (b *ExtensionBuilder) Type(extensionType ExtensionType) *ExtensionBuilder { + b.p.extensionType = extensionType + return b +} + +// Description _ +func (b *ExtensionBuilder) Description(description i18n.String) *ExtensionBuilder { + b.p.description = description.Copy() + return b +} + +// Icon _ +func (b *ExtensionBuilder) Icon(icon string) *ExtensionBuilder { + b.p.icon = icon + return b +} + +// Schema _ +func (b *ExtensionBuilder) Schema(schema id.PropertySchemaID) *ExtensionBuilder { + b.p.schema = schema + return b +} + +// Visualizer _ +func (b *ExtensionBuilder) Visualizer(visualizer visualizer.Visualizer) *ExtensionBuilder { + b.p.visualizer = visualizer + return b +} + +// System _ +func (b *ExtensionBuilder) System(s bool) *ExtensionBuilder { + b.s = s + return b +} diff --git a/pkg/plugin/extension_builder_test.go b/pkg/plugin/extension_builder_test.go new file mode 100644 index 000000000..86d0d43fb --- /dev/null +++ b/pkg/plugin/extension_builder_test.go @@ -0,0 +1,195 @@ +package plugin + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/visualizer" + "github.com/stretchr/testify/assert" +) + +func TestExtensionBuilder_Name(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").Name(i18n.StringFrom("nnn")).MustBuild() + assert.Equal(t, i18n.StringFrom("nnn"), res.Name()) +} + +func TestExtensionBuilder_Description(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").Description(i18n.StringFrom("ddd")).MustBuild() + assert.Equal(t, i18n.StringFrom("ddd"), res.Description()) +} + +func TestExtensionBuilder_ID(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").MustBuild() + assert.Equal(t, id.PluginExtensionID("xxx"), res.ID()) +} + +func TestExtensionBuilder_Type(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").Type("ppp").MustBuild() + assert.Equal(t, ExtensionType("ppp"), res.Type()) +} + +func TestExtensionBuilder_Icon(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").Icon("ccc").MustBuild() + assert.Equal(t, "ccc", res.Icon()) +} + +func TestExtensionBuilder_Schema(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").Schema(id.MustPropertySchemaID("hoge#0.1.0/fff")).MustBuild() + assert.Equal(t, id.MustPropertySchemaID("hoge#0.1.0/fff"), res.Schema()) +} + +func TestExtensionBuilder_Visualizer(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").Visualizer("ccc").MustBuild() + assert.Equal(t, visualizer.Visualizer("ccc"), res.Visualizer()) +} + +func TestExtensionBuilder_Build(t *testing.T) { + testCases := []struct { + name, icon string + id id.PluginExtensionID + extensionType ExtensionType + system bool + ename i18n.String + description i18n.String + schema id.PropertySchemaID + visualizer visualizer.Visualizer + expected *Extension + err error + }{ + { + name: "success not system", + icon: "ttt", + id: "xxx", + extensionType: "ppp", + system: false, + ename: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + schema: id.MustPropertySchemaID("foo#1.1.1/hhh"), + visualizer: "vvv", + expected: &Extension{ + id: "xxx", + extensionType: "ppp", + name: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + icon: "ttt", + schema: id.MustPropertySchemaID("foo#1.1.1/hhh"), + visualizer: "vvv", + }, + err: nil, + }, + { + name: "fail not system type visualizer", + extensionType: ExtensionTypeVisualizer, + err: errors.New("cannot build system extension"), + }, + { + name: "fail not system type infobox", + extensionType: ExtensionTypeInfobox, + err: errors.New("cannot build system extension"), + }, + { + name: "fail nil id", + err: id.ErrInvalidID, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + e, err := NewExtension(). + ID(tc.id). + Visualizer(tc.visualizer). + Schema(tc.schema). + System(tc.system). + Type(tc.extensionType). + Description(tc.description). + Name(tc.ename). + Icon(tc.icon). + Build() + if err == nil { + assert.Equal(tt, tc.expected, e) + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + + }) + } +} + +func TestExtensionBuilder_MustBuild(t *testing.T) { + testCases := []struct { + name, icon string + id id.PluginExtensionID + extensionType ExtensionType + system bool + ename i18n.String + description i18n.String + schema id.PropertySchemaID + visualizer visualizer.Visualizer + expected *Extension + }{ + { + name: "success not system", + icon: "ttt", + id: "xxx", + extensionType: "ppp", + system: false, + ename: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + schema: id.MustPropertySchemaID("foo#1.1.1/hhh"), + visualizer: "vvv", + expected: &Extension{ + id: "xxx", + extensionType: "ppp", + name: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + icon: "ttt", + schema: id.MustPropertySchemaID("foo#1.1.1/hhh"), + visualizer: "vvv", + }, + }, + { + name: "fail not system type visualizer", + extensionType: ExtensionTypeVisualizer, + }, + { + name: "fail not system type infobox", + extensionType: ExtensionTypeInfobox, + }, + { + name: "fail nil id", + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + var e *Extension + defer func() { + if r := recover(); r == nil { + assert.Equal(tt, tc.expected, e) + + } + }() + e = NewExtension(). + ID(tc.id). + Visualizer(tc.visualizer). + Schema(tc.schema). + System(tc.system). + Type(tc.extensionType). + Description(tc.description). + Name(tc.ename). + Icon(tc.icon). + MustBuild() + }) + } +} diff --git a/pkg/plugin/extension_test.go b/pkg/plugin/extension_test.go new file mode 100644 index 000000000..9e0410fe1 --- /dev/null +++ b/pkg/plugin/extension_test.go @@ -0,0 +1,60 @@ +package plugin + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/visualizer" + "github.com/stretchr/testify/assert" +) + +func TestExtension(t *testing.T) { + expected := struct { + Id id.PluginExtensionID + Type ExtensionType + Name i18n.String + Description i18n.String + Icon string + Schema id.PropertySchemaID + Visualizer visualizer.Visualizer + }{ + Id: "xxx", + Type: ExtensionTypePrimitive, + Name: i18n.StringFrom("aaa"), + Description: i18n.StringFrom("ddd"), + Icon: "test", + Schema: id.MustPropertySchemaID("hoge#0.1.0/fff"), + Visualizer: "vvv", + } + + actual := NewExtension(). + ID("xxx"). + Name(i18n.StringFrom("aaa")). + Description(i18n.StringFrom("ddd")). + Schema(id.MustPropertySchemaID("hoge#0.1.0/fff")). + Icon("test"). + Visualizer("vvv"). + Type(ExtensionTypePrimitive). + MustBuild() + + assert.Equal(t, expected.Visualizer, actual.Visualizer()) + assert.Equal(t, expected.Type, actual.Type()) + assert.Equal(t, expected.Description, actual.Description()) + assert.Equal(t, expected.Name, actual.Name()) + assert.Equal(t, expected.Icon, actual.Icon()) + assert.Equal(t, expected.Schema, actual.Schema()) + assert.Equal(t, expected.Id, actual.ID()) +} + +func TestExtension_Rename(t *testing.T) { + p := NewExtension().ID("aaa").Name(i18n.StringFrom("x")).MustBuild() + p.Rename(i18n.StringFrom("z")) + assert.Equal(t, i18n.StringFrom("z"), p.Name()) +} + +func TestExtension_SetDescription(t *testing.T) { + p := NewExtension().ID("xx").MustBuild() + p.SetDescription(i18n.StringFrom("xxx")) + assert.Equal(t, i18n.StringFrom("xxx"), p.Description()) +} diff --git a/pkg/plugin/loader.go b/pkg/plugin/loader.go new file mode 100644 index 000000000..d93ba8e42 --- /dev/null +++ b/pkg/plugin/loader.go @@ -0,0 +1,9 @@ +package plugin + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type Loader func(context.Context, ...id.PluginID) ([]*Plugin, error) diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go new file mode 100644 index 000000000..a30cbf50e --- /dev/null +++ b/pkg/plugin/manifest/convert.go @@ -0,0 +1,296 @@ +package manifest + +import ( + "fmt" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +func (i *Root) manifest() (*Manifest, error) { + var pid id.PluginID + var err error + if i.System && string(i.ID) == id.OfficialPluginID.Name() { + pid = id.OfficialPluginID + } else { + pid, err = id.PluginIDFrom(string(i.ID) + "#" + i.Version) + if err != nil { + return nil, ErrInvalidManifest + } + } + + if i.Title == "" { + i.Title = string(i.ID) + } + + var pluginSchema *property.Schema + if i.Schema != nil { + schema, err := i.Schema.schema(pid, "@") + if err != nil { + return nil, err + } + pluginSchema = schema + } + + extensions := make([]*plugin.Extension, 0, len(i.Extensions)) + extensionSchemas := make([]*property.Schema, 0, len(i.Extensions)) + for _, e := range i.Extensions { + extension, extensionSchema, err2 := e.extension(pid, i.System) + if err2 != nil { + return nil, err2 + } + extensions = append(extensions, extension) + extensionSchemas = append(extensionSchemas, extensionSchema) + } + + var author, desc, repository string + if i.Author != nil { + author = *i.Author + } + if i.Description != nil { + desc = *i.Description + } + if i.Repository != nil { + repository = *i.Repository + } + + p, err := plugin.New(). + ID(pid). + Name(i18n.StringFrom(i.Title)). + Author(author). + Description(i18n.StringFrom(desc)). + RepositoryURL(repository). + Schema(pluginSchema.IDRef()). + Extensions(extensions). + Build() + if err != nil { + return nil, err + } + + return &Manifest{ + Plugin: p, + Schema: pluginSchema, + ExtensionSchema: extensionSchemas, + }, nil +} + +func (i Extension) extension(pluginID id.PluginID, sys bool) (*plugin.Extension, *property.Schema, error) { + eid := string(i.ID) + schema, err := i.Schema.schema(pluginID, eid) + if err != nil { + return nil, nil, err + } + + var viz visualizer.Visualizer + switch i.Visualizer { + case "cesium": + viz = visualizer.VisualizerCesium + default: + return nil, nil, ErrInvalidManifest + } + + var typ plugin.ExtensionType + switch i.Type { + case "primitive": + typ = plugin.ExtensionTypePrimitive + case "widget": + typ = plugin.ExtensionTypeWidget + case "block": + typ = plugin.ExtensionTypeBlock + case "visualizer": + typ = plugin.ExtensionTypeVisualizer + case "infobox": + typ = plugin.ExtensionTypeInfobox + default: + return nil, nil, ErrInvalidManifest + } + + var desc, icon string + if i.Description != nil { + desc = *i.Description + } + if i.Icon != nil { + icon = *i.Icon + } + + ext, err := plugin.NewExtension(). + ID(id.PluginExtensionID(eid)). + Name(i18n.StringFrom(i.Title)). + Description(i18n.StringFrom(desc)). + Visualizer(viz). + Type(typ). + Icon(icon). + Schema(schema.ID()). + System(sys). + Build() + + if err != nil { + return nil, nil, err + } + return ext, schema, nil +} + +func (i *PropertySchema) schema(pluginID id.PluginID, idstr string) (*property.Schema, error) { + psid, err := id.PropertySchemaIDFrom(pluginID.String() + "/" + idstr) + if err != nil { + return nil, err + } + + if i == nil { + return property.NewSchema(). + ID(psid). + Build() + } + + // items + items := make([]*property.SchemaGroup, 0, len(i.Groups)) + for _, d := range i.Groups { + item, err := d.schemaGroup(psid) + if err != nil { + return nil, err + } + items = append(items, item) + } + + // schema + schema, err := property.NewSchema(). + ID(psid). + Version(int(i.Version)). + Groups(items). + LinkableFields(i.Linkable.linkable()). + Build() + if err != nil { + return nil, err + } + return schema, nil +} + +func (p *PropertyLinkableFields) linkable() property.LinkableFields { + if p == nil { + return property.LinkableFields{} + } + return property.LinkableFields{ + LatLng: p.Latlng.pointer(), + URL: p.URL.pointer(), + } +} + +func (p *PropertyPointer) pointer() *property.Pointer { + if p == nil || p.FieldID == "" && p.SchemaGroupID == "" { + return nil + } + return property.NewPointer( + id.PropertySchemaFieldIDFrom(&p.SchemaGroupID), + nil, + id.PropertySchemaFieldIDFrom(&p.FieldID), + ) +} + +func (i PropertySchemaGroup) schemaGroup(sid id.PropertySchemaID) (*property.SchemaGroup, error) { + title := i.Title + var representativeField *id.PropertySchemaFieldID + if i.RepresentativeField != nil { + representativeField2 := id.PropertySchemaFieldID(*i.RepresentativeField) + representativeField = &representativeField2 + } + + // fields + fields := make([]*property.SchemaField, 0, len(i.Fields)) + for _, d := range i.Fields { + field, err := d.schemaField() + if err != nil { + return nil, err + } + fields = append(fields, field) + } + + return property.NewSchemaGroup(). + ID(id.PropertySchemaFieldID(i.ID)). + Schema(sid). + IsList(i.List). + Fields(fields). + Title(i18n.StringFrom(title)). + RepresentativeField(representativeField). + IsAvailableIf(i.AvailableIf.condition()). + Build() +} + +func (o *PropertyCondition) condition() *property.Condition { + if o == nil { + return nil + } + return &property.Condition{ + Field: id.PropertySchemaFieldID(o.Field), + Value: toValue(o.Value, o.Type), + } +} + +func (i PropertySchemaField) schemaField() (*property.SchemaField, error) { + t, ok := property.ValueTypeFrom(string(i.Type)) + if !ok { + return nil, fmt.Errorf("schema field: invalid value type") + } + + var title, desc, prefix, suffix string + if i.Title != nil { + title = *i.Title + } + if i.Description != nil { + desc = *i.Description + } + if i.Prefix != nil { + prefix = *i.Prefix + } + if i.Suffix != nil { + suffix = *i.Suffix + } + + var choices []property.SchemaFieldChoice + if len(i.Choices) > 0 { + choices = make([]property.SchemaFieldChoice, 0, len(i.Choices)) + for _, c := range i.Choices { + if c.Key == "" { + continue + } + choices = append(choices, *c.choice()) + } + } + + f, err := property.NewSchemaField(). + ID(id.PropertySchemaFieldID(i.ID)). + Name(i18n.StringFrom(title)). + Description(i18n.StringFrom(desc)). + Type(t). + Prefix(prefix). + Suffix(suffix). + DefaultValue(toValue(i.DefaultValue, i.Type)). + MinRef(i.Min). + MaxRef(i.Max). + Choices(choices). + UIRef(property.SchemaFieldUIFromRef(i.UI)). + IsAvailableIf(i.AvailableIf.condition()). + Build() + return f, err +} + +func (c *Choice) choice() *property.SchemaFieldChoice { + if c == nil { + return nil + } + return &property.SchemaFieldChoice{ + Key: c.Key, + Title: i18n.StringFrom(c.Label), + Icon: c.Icon, + } +} + +func toValue(v interface{}, t Valuetype) *property.Value { + vt, ok := property.ValueTypeFrom(string(t)) + if !ok { + return nil + } + return vt.ValueFromUnsafe(v) +} diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go new file mode 100644 index 000000000..33830295e --- /dev/null +++ b/pkg/plugin/manifest/convert_test.go @@ -0,0 +1,755 @@ +package manifest + +import ( + "errors" + "fmt" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestToValue(t *testing.T) { + v := property.ValueTypeBool + var vv *property.Value = nil + assert.Equal(t, toValue(false, "bool"), v.ValueFromUnsafe(false)) + assert.Equal(t, toValue("xx", "xxx"), vv) +} + +func TestChoice(t *testing.T) { + testCases := []struct { + name string + ch *Choice + expected *property.SchemaFieldChoice + }{ + { + name: "success", + ch: &Choice{ + Icon: "aaa", + Key: "nnn", + Label: "vvv", + }, + expected: &property.SchemaFieldChoice{ + Key: "nnn", + Title: i18n.StringFrom("vvv"), + Icon: "aaa", + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, *tc.expected, *tc.ch.choice()) + }) + } + +} + +func TestManifest(t *testing.T) { + a := "aaa" + d := "ddd" + r := "rrr" + testCases := []struct { + name string + root *Root + expected *Manifest + err error + }{ + { + name: "success official plugin", + root: &Root{ + Author: &a, + Title: "aaa", + ID: "reearth", + Description: &d, + Extensions: []Extension{{ + Description: nil, + ID: "cesium", + Title: "", + Schema: nil, + Type: "visualizer", + Visualizer: "cesium", + }}, + Repository: &r, + System: true, + Version: "1.1.1", + }, + expected: &Manifest{ + Plugin: plugin.New().ID(id.OfficialPluginID).Name(i18n.StringFrom("aaa")).Extensions([]*plugin.Extension{plugin.NewExtension().ID("cesium").Visualizer("cesium").Type("visualizer").System(true).MustBuild()}).MustBuild(), + ExtensionSchema: nil, + Schema: nil, + }, + err: nil, + }, + { + name: "success empty name", + root: &Root{ + Title: "", + ID: "reearth", + System: true, + }, + expected: &Manifest{ + Plugin: plugin.New().ID(id.OfficialPluginID).Name(i18n.StringFrom("reearth")).MustBuild(), + ExtensionSchema: nil, + Schema: nil, + }, + err: nil, + }, + { + name: "fail invalid manifest - extension", + root: &Root{ + Author: &a, + Title: "aaa", + ID: "reearth", + Description: &d, + Extensions: []Extension{{ + Description: nil, + ID: "cesium", + Title: "", + Schema: nil, + Type: "visualizer", + Visualizer: "", + }}, + Repository: &r, + System: true, + Version: "1.1.1", + }, + expected: &Manifest{ + Plugin: plugin.New().ID(id.OfficialPluginID).Name(i18n.StringFrom("aaa")).Extensions([]*plugin.Extension{plugin.NewExtension().ID("cesium").Visualizer("cesium").Type("visualizer").System(true).MustBuild()}).MustBuild(), + ExtensionSchema: nil, + Schema: nil, + }, + err: ErrInvalidManifest, + }, + { + name: "fail invalid manifest - id", + root: &Root{ + Title: "", + ID: "", + System: false, + }, + expected: &Manifest{ + Plugin: plugin.New().ID(id.OfficialPluginID).Name(i18n.StringFrom("reearth")).MustBuild(), + }, + err: ErrInvalidManifest, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + m, err := tc.root.manifest() + if err == nil { + assert.Equal(tt, tc.expected.Plugin.ID(), m.Plugin.ID()) + assert.Equal(tt, tc.expected.Plugin.Name(), m.Plugin.Name()) + assert.Equal(tt, len(tc.expected.Plugin.Extensions()), len(m.Plugin.Extensions())) + //assert.Equal(tt,tc.expected.Schema..) + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} + +func TestExtension(t *testing.T) { + d := "ddd" + i := "xx:/aa.bb" + testCases := []struct { + name string + ext Extension + sys bool + pid id.PluginID + expectedPE *plugin.Extension + expectedPS *property.Schema + err error + }{ + { + name: "success official extension", + ext: Extension{ + Description: &d, + ID: "cesium", + Title: "Cesium", + Icon: &i, + Schema: nil, + Type: "visualizer", + Visualizer: "cesium", + }, + sys: true, + pid: id.OfficialPluginID, + expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeVisualizer).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), + expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), + err: nil, + }, + { + name: "success official extension", + ext: Extension{ + Description: &d, + ID: "cesium", + Title: "Cesium", + Schema: nil, + Type: "primitive", + Visualizer: "cesium", + }, + sys: true, + pid: id.OfficialPluginID, + expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypePrimitive).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), + expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), + err: nil, + }, + { + name: "success official extension", + ext: Extension{ + Description: &d, + ID: "cesium", + Title: "Cesium", + Schema: nil, + Type: "widget", + Visualizer: "cesium", + }, + sys: true, + pid: id.OfficialPluginID, + expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeWidget).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), + expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), + err: nil, + }, + { + name: "success official extension", + ext: Extension{ + Description: &d, + ID: "cesium", + Title: "Cesium", + Schema: nil, + Type: "block", + Visualizer: "cesium", + }, + sys: true, + pid: id.OfficialPluginID, + expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeBlock).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), + expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), + err: nil, + }, + { + name: "success official extension", + ext: Extension{ + Description: &d, + ID: "cesium", + Title: "Cesium", + Schema: nil, + Type: "infobox", + Visualizer: "cesium", + }, + sys: true, + pid: id.OfficialPluginID, + expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeInfobox).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), + expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), + err: nil, + }, + { + name: "success official extension", + ext: Extension{ + Description: &d, + ID: "cesium", + Title: "Cesium", + Schema: nil, + Type: "visualizer", + Visualizer: "", + }, + sys: true, + pid: id.OfficialPluginID, + expectedPE: nil, + expectedPS: nil, + err: ErrInvalidManifest, + }, + { + name: "success official extension", + ext: Extension{ + Description: &d, + ID: "cesium", + Title: "Cesium", + Schema: nil, + Type: "", + Visualizer: "cesium", + }, + sys: true, + pid: id.OfficialPluginID, + expectedPE: nil, + expectedPS: nil, + err: ErrInvalidManifest, + }, + { + name: "success official extension", + ext: Extension{ + Description: &d, + ID: "cesium", + Title: "Cesium", + Schema: nil, + Type: "visualizer", + Visualizer: "cesium", + }, + sys: false, + pid: id.OfficialPluginID, + expectedPE: nil, + expectedPS: nil, + err: ErrInvalidManifest, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + pe, ps, err := tc.ext.extension(tc.pid, tc.sys) + if err == nil { + assert.Equal(tt, tc.expectedPE.ID(), pe.ID()) + assert.Equal(tt, tc.expectedPE.Visualizer(), pe.Visualizer()) + assert.Equal(tt, tc.expectedPE.Type(), pe.Type()) + assert.Equal(tt, tc.expectedPE.Name(), pe.Name()) + assert.Equal(tt, tc.expectedPS.ID(), ps.ID()) + assert.Equal(tt, tc.expectedPS.ID(), ps.ID()) + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} + +func TestPointer(t *testing.T) { + sg := "aaa" + f := "xxx" + testCases := []struct { + name string + pp *PropertyPointer + expected *property.Pointer + }{ + { + name: "failed nil PropertyPointer", + pp: nil, + expected: nil, + }, + { + name: "failed empty FieldID and SchemaGroupID", + pp: &PropertyPointer{ + FieldID: "", + SchemaGroupID: "", + }, + expected: nil, + }, + { + name: "success", + pp: &PropertyPointer{ + FieldID: "xxx", + SchemaGroupID: "aaa", + }, + expected: property.NewPointer(id.PropertySchemaFieldIDFrom(&sg), nil, id.PropertySchemaFieldIDFrom(&f)), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.pp.pointer()) + }) + } +} +func TestCondition(t *testing.T) { + v := toValue("xxx", "string") + testCases := []struct { + name string + con *PropertyCondition + expected *property.Condition + }{ + { + name: "failed nil condition", + con: nil, + expected: nil, + }, + { + name: "success", + con: &PropertyCondition{ + Field: "aaa", + Type: "string", + Value: "xxx", + }, + expected: &property.Condition{ + Field: "aaa", + Value: v, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.con.condition()) + }) + } +} + +func TestLinkable(t *testing.T) { + l := "location" + d := "default" + u := "url" + testCases := []struct { + name string + p *PropertyLinkableFields + expected property.LinkableFields + }{ + { + name: "nil linkable fields", + p: nil, + expected: property.LinkableFields{}, + }, + { + name: "success linkable fields", + p: &PropertyLinkableFields{ + Latlng: &PropertyPointer{ + FieldID: "location", + SchemaGroupID: "default", + }, + URL: &PropertyPointer{ + FieldID: "url", + SchemaGroupID: "default", + }, + }, + expected: property.LinkableFields{ + LatLng: property.NewPointer(id.PropertySchemaFieldIDFrom(&d), nil, id.PropertySchemaFieldIDFrom(&l)), + URL: property.NewPointer(id.PropertySchemaFieldIDFrom(&d), nil, id.PropertySchemaFieldIDFrom(&u)), + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.p.linkable()) + }) + } +} + +func TestSchema(t *testing.T) { + str := "ddd" + testCases := []struct { + name, psid string + ps *PropertySchema + pid id.PluginID + expected *property.Schema + err error + }{ + { + name: "fail invalid id", + psid: "@", + ps: &PropertySchema{ + Groups: nil, + Linkable: nil, + Version: 0, + }, + pid: id.MustPluginID("aaa#1.1.1"), + expected: nil, + err: id.ErrInvalidID, + }, + { + name: "success nil PropertySchema", + psid: "marker", + ps: nil, + pid: id.OfficialPluginID, + expected: property.NewSchema().ID(id.MustPropertySchemaID("reearth/marker")).MustBuild(), + }, + { + name: "success ", + psid: "marker", + ps: &PropertySchema{ + Groups: []PropertySchemaGroup{{ + AvailableIf: nil, + Description: &str, + Fields: []PropertySchemaField{{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: nil, + ID: "location", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: nil, + Type: "latlng", + UI: nil, + }}, + ID: "default", + List: false, + Title: "marker", + }}, + Linkable: nil, + Version: 0, + }, + pid: id.OfficialPluginID, + expected: property.NewSchema().ID(id.MustPropertySchemaID("reearth/marker")).Groups([]*property.SchemaGroup{property.NewSchemaGroup().ID("default").Schema(id.MustPropertySchemaID("reearth/cesium")).Fields([]*property.SchemaField{property.NewSchemaField().ID("location").Type(property.ValueTypeLatLng).MustBuild()}).MustBuild()}).MustBuild(), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res, err := tc.ps.schema(tc.pid, tc.psid) + if err == nil { + assert.Equal(tt, len(tc.expected.Groups()), len(res.Groups())) + assert.Equal(tt, tc.expected.LinkableFields(), res.LinkableFields()) + assert.Equal(tt, tc.expected.Version(), res.Version()) + if len(res.Groups()) > 0 { + exg := tc.expected.Group(res.Groups()[0].ID()) + assert.NotNil(tt, exg) + } + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} + +func TestSchemaGroup(t *testing.T) { + str := "marker" + des := "ddd" + testCases := []struct { + name string + psg PropertySchemaGroup + sid id.PropertySchemaID + expected *property.SchemaGroup + err error + }{ + { + name: "success reearth/cesium", + psg: PropertySchemaGroup{ + AvailableIf: nil, + Description: &des, + Fields: []PropertySchemaField{{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: nil, + ID: "location", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: nil, + Type: "latlng", + UI: nil, + }}, + ID: "default", + List: false, + Title: "marker", + }, + sid: id.MustPropertySchemaID("reearth/cesium"), + expected: property.NewSchemaGroup().ID("default").Title(i18n.StringFrom("marker")).Title(i18n.StringFrom(str)).Schema(id.MustPropertySchemaID("reearth/cesium")).Fields([]*property.SchemaField{property.NewSchemaField().ID("location").Type(property.ValueTypeLatLng).MustBuild()}).MustBuild(), + err: nil, + }, + { + name: "fail invalid schema field", + psg: PropertySchemaGroup{ + AvailableIf: nil, + Description: &des, + Fields: []PropertySchemaField{{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: nil, + ID: "location", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: nil, + Type: "xx", + UI: nil, + }}, + ID: "default", + List: false, + Title: "marker", + }, + sid: id.MustPropertySchemaID("reearth/cesium"), + expected: nil, + err: fmt.Errorf("schema field: invalid value type"), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res, err := tc.psg.schemaGroup(tc.sid) + if err == nil { + assert.Equal(tt, tc.expected.Title().String(), res.Title().String()) + assert.Equal(tt, tc.expected.Title(), res.Title()) + assert.Equal(tt, tc.expected.Schema(), res.Schema()) + assert.Equal(tt, len(tc.expected.Fields()), len(res.Fields())) + if len(res.Fields()) > 0 { + exf := res.Fields()[0] + assert.NotNil(tt, tc.expected.Field(exf.ID())) + } + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} + +func TestSchemaField(t *testing.T) { + str := "xx" + testCases := []struct { + name string + psg PropertySchemaField + expected *property.SchemaField + err error + }{ + { + name: "success name not nil", + psg: PropertySchemaField{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: nil, + ID: "aaa", + Max: nil, + Min: nil, + Title: &str, + Prefix: nil, + Suffix: nil, + Type: "string", + UI: nil, + }, + expected: property.NewSchemaField().ID("aaa").Name(i18n.StringFrom("xx")).Description(i18n.StringFrom("")).Type(property.ValueTypeString).MustBuild(), + err: nil, + }, + { + name: "success description not nil", + psg: PropertySchemaField{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: &str, + ID: "aaa", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: nil, + Type: "string", + UI: nil, + }, + expected: property.NewSchemaField().ID("aaa").Name(i18n.StringFrom("")).Description(i18n.StringFrom("xx")).Type(property.ValueTypeString).MustBuild(), + err: nil, + }, + { + name: "success prefix not nil", + psg: PropertySchemaField{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: nil, + ID: "aaa", + Max: nil, + Min: nil, + Title: nil, + Prefix: &str, + Suffix: nil, + Type: "string", + UI: nil, + }, + expected: property.NewSchemaField().ID("aaa").Prefix("xx").Name(i18n.StringFrom("")).Description(i18n.StringFrom("")).Type(property.ValueTypeString).MustBuild(), + err: nil, + }, + { + name: "success suffix not nil", + psg: PropertySchemaField{ + AvailableIf: nil, + Choices: nil, + DefaultValue: nil, + Description: nil, + ID: "aaa", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: &str, + Type: "string", + UI: nil, + }, + expected: property.NewSchemaField().ID("aaa").Name(i18n.StringFrom("")).Description(i18n.StringFrom("")).Suffix("xx").Type(property.ValueTypeString).MustBuild(), + err: nil, + }, + { + name: "success choices not empty", + psg: PropertySchemaField{ + AvailableIf: nil, + Choices: []Choice{ + { + Icon: "aaa", + Key: "nnn", + Label: "vvv", + }, + }, + DefaultValue: nil, + Description: nil, + ID: "aaa", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: nil, + Type: "string", + UI: nil, + }, + expected: property.NewSchemaField().ID("aaa").Choices([]property.SchemaFieldChoice{ + { + Key: "nnn", + Title: i18n.StringFrom("vvv"), + Icon: "aaa", + }, + }).Type(property.ValueTypeString).Name(i18n.StringFrom("")).Description(i18n.StringFrom("")).MustBuild(), + err: nil, + }, + { + name: "success choices empty key", + psg: PropertySchemaField{ + AvailableIf: nil, + Choices: []Choice{ + { + Icon: "aaa", + Key: "", + Label: "vvv", + }, + }, + DefaultValue: nil, + Description: nil, + ID: "aaa", + Max: nil, + Min: nil, + Title: nil, + Prefix: nil, + Suffix: nil, + Type: "string", + UI: nil, + }, + expected: property.NewSchemaField().ID("aaa").Choices([]property.SchemaFieldChoice{}).Type(property.ValueTypeString).Name(i18n.StringFrom("")).Description(i18n.StringFrom("")).MustBuild(), + err: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res, err := tc.psg.schemaField() + if err == nil { + assert.Equal(tt, tc.expected.Title(), res.Title()) + assert.Equal(tt, tc.expected.Description(), res.Description()) + assert.Equal(tt, tc.expected.Suffix(), res.Suffix()) + assert.Equal(tt, tc.expected.Prefix(), res.Prefix()) + assert.Equal(tt, tc.expected.Choices(), res.Choices()) + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} diff --git a/pkg/plugin/manifest/manifest.go b/pkg/plugin/manifest/manifest.go new file mode 100644 index 000000000..29b4db008 --- /dev/null +++ b/pkg/plugin/manifest/manifest.go @@ -0,0 +1,12 @@ +package manifest + +import ( + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Manifest struct { + Plugin *plugin.Plugin + ExtensionSchema []*property.Schema + Schema *property.Schema +} diff --git a/pkg/plugin/manifest/parser.go b/pkg/plugin/manifest/parser.go new file mode 100644 index 000000000..246d78385 --- /dev/null +++ b/pkg/plugin/manifest/parser.go @@ -0,0 +1,98 @@ +package manifest + +//go:generate go run github.com/idubinskiy/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/embed -o schema_json_gen.go -n SchemaJSON -i ../../../plugin_manifest_schema.json + +import ( + "encoding/json" + "errors" + "fmt" + "io" + + "github.com/xeipuuv/gojsonschema" +) + +var ( + ErrInvalidManifest error = errors.New("invalid manifest") + ErrFailedToParseManifest error = errors.New("failed to parse plugin manifest") + ErrSystemManifest = errors.New("cannot build system manifest") + schemaLoader = gojsonschema.NewStringLoader(SchemaJSON) +) + +func Parse(source io.Reader) (*Manifest, error) { + // TODO: When using gojsonschema.NewReaderLoader, gojsonschema.Validate returns io.EOF error. + doc, err := io.ReadAll(source) + if err != nil { + return nil, ErrFailedToParseManifest + } + + documentLoader := gojsonschema.NewBytesLoader(doc) + if err := validate(documentLoader); err != nil { + return nil, err + } + + root := Root{} + // err = json.NewDecoder(reader2).Decode(&root) + if err = json.Unmarshal(doc, &root); err != nil { + return nil, ErrFailedToParseManifest + } + + manifest, err := root.manifest() + if err != nil { + return nil, err + } + if manifest.Plugin.ID().System() { + return nil, ErrSystemManifest + } + + return manifest, nil +} + +func ParseSystemFromStaticJSON(source string) (*Manifest, error) { + src := []byte(source) + documentLoader := gojsonschema.NewBytesLoader(src) + if err := validate(documentLoader); err != nil { + return nil, err + } + + root := Root{} + if err := json.Unmarshal(src, &root); err != nil { + return nil, ErrFailedToParseManifest + } + + manifest, err := root.manifest() + if err != nil { + return nil, err + } + + return manifest, nil +} + +func MustParseSystemFromStaticJSON(source string) *Manifest { + m, err := ParseSystemFromStaticJSON(source) + if err != nil { + panic(err) + } + return m +} + +func validate(ld gojsonschema.JSONLoader) error { + // documentLoader, reader2 := gojsonschema.NewReaderLoader(source) + result, err := gojsonschema.Validate(schemaLoader, ld) + if err != nil { + return ErrFailedToParseManifest + } + + if !result.Valid() { + var errstr string + for i, e := range result.Errors() { + if i > 0 { + errstr += ", " + } + errstr += e.String() + } + return fmt.Errorf("invalid manifest: %w", errors.New(errstr)) + } + + return nil +} diff --git a/pkg/plugin/manifest/parser_test.go b/pkg/plugin/manifest/parser_test.go new file mode 100644 index 000000000..155d9774b --- /dev/null +++ b/pkg/plugin/manifest/parser_test.go @@ -0,0 +1,202 @@ +package manifest + +import ( + "io" + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" + "github.com/xeipuuv/gojsonschema" +) + +func TestParse(t *testing.T) { + testCases := []struct { + name string + input io.Reader + expected *Manifest + err error + }{ + { + name: "success create manifest", + input: strings.NewReader(`{ + "id": "aaa", + "title": "bbb", + "version": "1.1.1" + }`), + expected: &Manifest{ + Plugin: plugin.New().ID(id.MustPluginID("aaa#1.1.1")).MustBuild(), + ExtensionSchema: []*property.Schema{}, + Schema: nil, + }, + err: nil, + }, + { + name: "fail not valid JSON", + input: strings.NewReader(""), + expected: nil, + err: ErrFailedToParseManifest, + }, + { + name: "fail system manifest", + input: strings.NewReader(`{ + "system":true, + "id": "reearth", + "title": "bbb", + "version": "1.1.1" + }`), + expected: nil, + err: ErrSystemManifest, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + m, err := Parse(tc.input) + if err == nil { + assert.Equal(t, tc.expected.Plugin.ID(), m.Plugin.ID()) + assert.Equal(t, m.Plugin.Name(), m.Plugin.Name()) + } else { + assert.Equal(t, tc.err, err) + } + }) + } + +} + +func TestParseSystemFromStaticJSON(t *testing.T) { + testCases := []struct { + name, input string + expected *Manifest + err error + }{ + { + name: "success create manifest", + input: `{ + "id": "aaa", + "title": "bbb", + "version": "1.1.1" + }`, + expected: &Manifest{ + Plugin: plugin.New().ID(id.MustPluginID("aaa#1.1.1")).MustBuild(), + ExtensionSchema: []*property.Schema{}, + Schema: nil, + }, + err: nil, + }, + { + name: "fail not valid JSON", + input: "", + expected: nil, + err: ErrFailedToParseManifest, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + m, err := ParseSystemFromStaticJSON(tc.input) + if err == nil { + assert.Equal(t, tc.expected.Plugin.ID(), m.Plugin.ID()) + assert.Equal(t, m.Plugin.Name(), m.Plugin.Name()) + } else { + assert.Equal(t, tc.err, err) + } + }) + } + +} + +func TestMustParseSystemFromStaticJSON(t *testing.T) { + testCases := []struct { + name, input string + expected *Manifest + err error + }{ + { + name: "success create manifest", + input: `{ + "id": "aaa", + "name": "bbb", + "version": "1.1.1" + }`, + expected: &Manifest{ + Plugin: plugin.New().ID(id.MustPluginID("aaa#1.1.1")).MustBuild(), + ExtensionSchema: []*property.Schema{}, + Schema: nil, + }, + err: nil, + }, + { + name: "fail not valid JSON", + input: "", + expected: nil, + err: ErrFailedToParseManifest, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + var m *Manifest + defer func() { + if r := recover(); r == nil { + assert.Equal(t, tc.expected.Plugin.ID(), m.Plugin.ID()) + assert.Equal(t, m.Plugin.Name(), m.Plugin.Name()) + } + }() + m = MustParseSystemFromStaticJSON(tc.input) + + }) + } + +} + +func TestValidate(t *testing.T) { + testCases := []struct { + name, input string + err bool + }{ + { + name: "success create manifest", + input: `{ + "id": "aaa", + "title": "bbb", + "version": "1.1.1" + }`, + + err: false, + }, + { + name: "fail not valid JSON", + input: "", + err: true, + }, + { + name: "fail invalid name type", + input: `{ + "id": "aaa", + "title": 123, + "version": "1.1.1" + }`, + + err: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + err := validate(gojsonschema.NewBytesLoader([]byte(tc.input))) + if tc.err { + assert.Error(tt, err) + } else { + assert.NoError(tt, err) + } + }) + } + +} diff --git a/pkg/plugin/manifest/parser_translation.go b/pkg/plugin/manifest/parser_translation.go new file mode 100644 index 000000000..f4422a398 --- /dev/null +++ b/pkg/plugin/manifest/parser_translation.go @@ -0,0 +1,187 @@ +package manifest + +// Generating types with schema typer for translation schema is disabled because some fields are wrongly typed. +// DISABLED go:generate go run github.com/idubinskiy/schematyper -o schema_translation_gen.go --package manifest --prefix Translation ../../../plugin_manifest_schema_translation.json +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/embed -o schema_json_translation_gen.go -n SchemaTranslationJSON -i ../../../plugin_manifest_schema_translation.json + +import ( + "encoding/json" + "errors" + "fmt" + "io" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/xeipuuv/gojsonschema" +) + +var ( + ErrInvalidManifestTranslation error = errors.New("invalid manifest translation") + ErrFailedToParseManifestTranslation error = errors.New("failed to parse plugin manifest translation") + schemaTranslationLoader gojsonschema.JSONLoader = gojsonschema.NewStringLoader(SchemaTranslationJSON) +) + +func ParseTranslation(source io.Reader) (*TranslationRoot, error) { + // TODO: When using gojsonschema.NewReaderLoader, gojsonschema.Validate returns io.EOF error. + doc, err := io.ReadAll(source) + if err != nil { + return nil, ErrFailedToParseManifestTranslation + } + + documentLoader := gojsonschema.NewBytesLoader(doc) + if err := validateTranslation(documentLoader); err != nil { + return nil, err + } + + root := TranslationRoot{} + // err = json.NewDecoder(reader2).Decode(&root) + if err = json.Unmarshal(doc, &root); err != nil { + return nil, ErrInvalidManifestTranslation + } + + return &root, nil +} + +func ParseTranslationFromStaticJSON(source string) (*TranslationRoot, error) { + src := []byte(source) + + documentLoader := gojsonschema.NewBytesLoader(src) + if err := validateTranslation(documentLoader); err != nil { + return nil, err + } + + tr := TranslationRoot{} + if err := json.Unmarshal(src, &tr); err != nil { + return nil, ErrFailedToParseManifest + } + return &tr, nil +} + +func MustParseTransSystemFromStaticJSON(source string) *TranslationRoot { + m, err := ParseTranslationFromStaticJSON(source) + if err != nil { + panic(err) + } + return m +} + +func validateTranslation(ld gojsonschema.JSONLoader) error { + // documentLoader, reader2 := gojsonschema.NewReaderLoader(source) + result, err := gojsonschema.Validate(schemaTranslationLoader, ld) + if err != nil { + return ErrFailedToParseManifest + } + + if !result.Valid() { + var errstr string + for i, e := range result.Errors() { + if i > 0 { + errstr += ", " + } + errstr += e.String() + } + return fmt.Errorf("invalid manifest translation: %w", errors.New(errstr)) + } + + return nil +} + +func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Manifest { + for lang, t := range tl { + if t == nil { + continue + } + + if t.Title != nil { + name := m.Plugin.Name() + name[lang] = *t.Title + m.Plugin.Rename(name) + } + + if t.Description != nil { + des := m.Plugin.Description() + des[lang] = *t.Description + m.Plugin.SetDescription(des) + } + + for key, te := range t.Extensions { + ext := m.Plugin.Extension(id.PluginExtensionID(key)) + if ext == nil { + continue + } + + if te.Title != nil { + name := ext.Name() + name[lang] = *te.Title + ext.Rename(name) + } + + if te.Description != nil { + des := ext.Description() + des[lang] = *te.Description + ext.SetDescription(des) + } + + var ps *property.Schema + for _, s := range m.ExtensionSchema { + if s.ID() == ext.Schema() { + ps = s + break + } + } + if ps == nil { + continue + } + + for key, tsg := range te.PropertySchema { + psg := ps.Group(id.PropertySchemaFieldID(key)) + if psg == nil { + continue + } + + if tsg.Title != nil { + t := psg.Title() + t[lang] = *tsg.Title + psg.SetTitle(t) + } + + // PropertySchemaGroup does not have description for now + // if tsg.Description != nil { + // t := psg.Description() + // t[lang] = *tsg.Description + // psg.SetDescription(t) + // } + + for key, tsf := range tsg.Fields { + psf := psg.Field(id.PropertySchemaFieldID(key)) + if psf == nil { + continue + } + + if tsf.Title != nil { + t := psf.Title() + t[lang] = *tsf.Title + psf.SetTitle(t) + } + + if tsf.Description != nil { + t := psf.Description() + t[lang] = *tsf.Description + psf.SetDescription(t) + } + + for key, label := range tsf.Choices { + psfc := psf.Choice(key) + if psfc == nil { + continue + } + + psfc.Title[lang] = label + } + } + } + } + } + + return m +} diff --git a/pkg/plugin/manifest/parser_translation_test.go b/pkg/plugin/manifest/parser_translation_test.go new file mode 100644 index 000000000..7747fe81f --- /dev/null +++ b/pkg/plugin/manifest/parser_translation_test.go @@ -0,0 +1,416 @@ +package manifest + +import ( + "errors" + "strings" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/xeipuuv/gojsonschema" + + "github.com/stretchr/testify/assert" +) + +const translatedManifest = `{ + "description": "test plugin desc", + "title": "test plugin name", + "extensions": { + "test_ext": { + "title": "test ext name", + "propertySchema": { + "test_ps": { + "description": "test ps desc", + "title": "test ps title", + "fields": { + "test_field": { + "title": "test field name", + "description": "test field desc", + "choices": { + "test_key": "test choice value" + } + } + } + } + } + } + } +}` + +func TestParseTranslation(t *testing.T) { + desc := "test plugin desc" + name := "test plugin name" + ext_name := "test ext name" + ps_title := "test ps title" + ps_desc := "test ps desc" + psf_desc := "test field desc" + psf_name := "test field name" + testCases := []struct { + name string + input string + expected *TranslationRoot + err error + }{ + { + name: "success create translation", + input: translatedManifest, + expected: &TranslationRoot{ + Description: &desc, + Extensions: map[string]TranslationExtension{ + "test_ext": { + Title: &ext_name, + PropertySchema: TranslationPropertySchema{ + "test_ps": TranslationPropertySchemaGroup{ + Description: &ps_desc, + Fields: map[string]TranslationPropertySchemaField{ + "test_field": { + Choices: map[string]string{ + "test_key": "test choice value"}, + Description: &psf_desc, + Title: &psf_name, + }, + }, + Title: &ps_title, + }, + }, + }, + }, + Title: &name, + Schema: nil, + }, + err: nil, + }, + { + name: "fail not valid JSON", + input: "", + expected: nil, + err: ErrFailedToParseManifest, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + r := strings.NewReader(tc.input) + res, err := ParseTranslation(r) + if err == nil { + assert.Equal(tt, *tc.expected.Title, *res.Title) + assert.Equal(tt, *res.Description, *tc.expected.Description) + assert.Equal(tt, res.Schema, tc.expected.Schema) + if len(res.Extensions) > 0 { + for k, v := range res.Extensions { + assert.Equal(tt, *v.Title, *tc.expected.Extensions[k].Title) + if len(v.PropertySchema) > 0 { + for kk, vv := range v.PropertySchema { + assert.Equal(tt, *vv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Description) + assert.Equal(tt, *vv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Title) + if len(vv.Fields) > 0 { + for kkk, vvv := range vv.Fields { + assert.Equal(tt, *vvv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Description) + assert.Equal(tt, *vvv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Title) + if len(vvv.Choices) > 0 { + for kkkk, vvvv := range vvv.Choices { + assert.Equal(tt, vvvv, tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Choices[kkkk]) + } + } + } + } + } + } + } + } + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} + +func TestParseTranslationFromStaticJSON(t *testing.T) { + desc := "test plugin desc" + name := "test plugin name" + ext_name := "test ext name" + ps_title := "test ps title" + ps_desc := "test ps desc" + psf_desc := "test field desc" + psf_name := "test field name" + testCases := []struct { + name string + input string + expected *TranslationRoot + err error + }{ + { + name: "success create translation", + input: translatedManifest, + expected: &TranslationRoot{ + Description: &desc, + Extensions: map[string]TranslationExtension{ + "test_ext": { + Title: &ext_name, + PropertySchema: TranslationPropertySchema{ + "test_ps": TranslationPropertySchemaGroup{ + Description: &ps_desc, + Fields: map[string]TranslationPropertySchemaField{ + "test_field": { + Choices: map[string]string{ + "test_key": "test choice value"}, + Description: &psf_desc, + Title: &psf_name, + }, + }, + Title: &ps_title, + }, + }, + }, + }, + Title: &name, + Schema: nil, + }, + err: nil, + }, + { + name: "fail not valid JSON", + input: "", + expected: nil, + err: ErrFailedToParseManifest, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res, err := ParseTranslationFromStaticJSON(tc.input) + if err == nil { + assert.Equal(tt, *tc.expected.Title, *res.Title) + assert.Equal(tt, *res.Description, *tc.expected.Description) + assert.Equal(tt, res.Schema, tc.expected.Schema) + if len(res.Extensions) > 0 { + for k, v := range res.Extensions { + assert.Equal(tt, *v.Title, *tc.expected.Extensions[k].Title) + if len(v.PropertySchema) > 0 { + for kk, vv := range v.PropertySchema { + assert.Equal(tt, *vv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Description) + assert.Equal(tt, *vv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Title) + if len(vv.Fields) > 0 { + for kkk, vvv := range vv.Fields { + assert.Equal(tt, *vvv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Description) + assert.Equal(tt, *vvv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Title) + if len(vvv.Choices) > 0 { + for kkkk, vvvv := range vvv.Choices { + assert.Equal(tt, vvvv, tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Choices[kkkk]) + } + } + } + } + } + } + } + } + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} + +func TestMustParseTransSystemFromStaticJSON(t *testing.T) { + desc := "test plugin desc" + name := "test plugin name" + ext_name := "test ext name" + ps_title := "test ps title" + ps_desc := "test ps desc" + psf_desc := "test field desc" + psf_name := "test field name" + testCases := []struct { + name string + input string + expected *TranslationRoot + err error + }{ + { + name: "success create translation", + input: translatedManifest, + expected: &TranslationRoot{ + Description: &desc, + Extensions: map[string]TranslationExtension{ + "test_ext": { + Title: &ext_name, + PropertySchema: TranslationPropertySchema{ + "test_ps": TranslationPropertySchemaGroup{ + Description: &ps_desc, + Fields: map[string]TranslationPropertySchemaField{ + "test_field": { + Choices: map[string]string{ + "test_key": "test choice value"}, + Description: &psf_desc, + Title: &psf_name, + }, + }, + Title: &ps_title, + }, + }, + }, + }, + Title: &name, + Schema: nil, + }, + err: nil, + }, + { + name: "fail not valid JSON", + input: "", + expected: nil, + err: ErrFailedToParseManifest, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tc := tc + var res *TranslationRoot + defer func() { + if r := recover(); r == nil { + assert.Equal(tt, *tc.expected.Title, *res.Title) + assert.Equal(tt, *res.Description, *tc.expected.Description) + assert.Equal(tt, res.Schema, tc.expected.Schema) + if len(res.Extensions) > 0 { + for k, v := range res.Extensions { + assert.Equal(tt, *v.Title, *tc.expected.Extensions[k].Title) + if len(v.PropertySchema) > 0 { + for kk, vv := range v.PropertySchema { + assert.Equal(tt, *vv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Description) + assert.Equal(tt, *vv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Title) + if len(vv.Fields) > 0 { + for kkk, vvv := range vv.Fields { + assert.Equal(tt, *vvv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Description) + assert.Equal(tt, *vvv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Title) + if len(vvv.Choices) > 0 { + for kkkk, vvvv := range vvv.Choices { + assert.Equal(tt, vvvv, tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Choices[kkkk]) + } + } + } + } + } + } + } + } + + } + }() + res = MustParseTransSystemFromStaticJSON(tc.input) + }) + } +} + +func TestMergeManifestTranslation(t *testing.T) { + const manifest = `{ + "id": "xxx", + "title": "aaa", + "version": "1.1.1", + "description": "ddd", + "extensions": [ + { + "id": "test_ext", + "title": "ttt", + "visualizer": "cesium", + "type": "primitive", + "schema": { + "groups": [ + { + "id": "test_ps", + "title": "sss", + "fields": [ + { + "id": "test_field", + "title": "nnn", + "type": "string", + "description": "kkk" + } + ] + } + ] + } + } + ] +}` + + testCases := []struct { + name string + tl map[string]*TranslationRoot + m *Manifest + Expected *struct { + PluginName, PluginDesc, ExtName, PsTitle, FieldName, FieldDesc i18n.String + } + }{ + { + name: "nil translition list", + tl: nil, + m: nil, + Expected: nil, + }, + { + name: "nil translition list", + tl: map[string]*TranslationRoot{"xx": MustParseTransSystemFromStaticJSON(translatedManifest)}, + m: MustParseSystemFromStaticJSON(manifest), + Expected: &struct{ PluginName, PluginDesc, ExtName, PsTitle, FieldName, FieldDesc i18n.String }{ + PluginName: i18n.String{"en": "aaa", "xx": "test plugin name"}, + PluginDesc: i18n.String{"en": "ddd", "xx": "test plugin desc"}, + ExtName: i18n.String{"en": "ttt", "xx": "test ext name"}, + PsTitle: i18n.String{"en": "sss", "xx": "test ps title"}, + FieldName: i18n.String{"en": "nnn", "xx": "test field name"}, + FieldDesc: i18n.String{"en": "kkk", "xx": "test field desc"}, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res := MergeManifestTranslation(tc.m, tc.tl) + if res == nil { + assert.Nil(tt, tc.Expected) + } else { + assert.Equal(tt, tc.Expected.PluginDesc, res.Plugin.Description()) + assert.Equal(tt, tc.Expected.PluginName, res.Plugin.Name()) + //assert.Equal(tt, tc.Expected.PsTitle, res.ExtensionSchema[0].Group("test_ps").Title()) + //assert.Equal(tt, tc.Expected.FieldName, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Name()) + //assert.Equal(tt, tc.Expected.FieldDesc, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Description()) + //assert.Equal(tt, tc.Expected.ExtName, res.ExtensionSchema[0]) + } + }) + } +} + +func TestValidatTranslation(t *testing.T) { + testCases := []struct { + name, input string + err bool + }{ + { + name: "success create translation", + input: translatedManifest, + + err: false, + }, + { + name: "fail not valid JSON", + input: "", + err: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + err := validateTranslation(gojsonschema.NewBytesLoader([]byte(tc.input))) + if tc.err { + assert.Error(tt, err) + } else { + assert.NoError(tt, err) + } + }) + } + +} diff --git a/pkg/plugin/manifest/schema_gen.go b/pkg/plugin/manifest/schema_gen.go new file mode 100644 index 000000000..815a1567e --- /dev/null +++ b/pkg/plugin/manifest/schema_gen.go @@ -0,0 +1,85 @@ +package manifest + +// generated by "/var/folders/qf/bs41lxts47s6y_d62bj5zgym0000gn/T/go-build906758225/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT + +type Choice struct { + Icon string `json:"icon,omitempty"` + Key string `json:"key"` + Label string `json:"label,omitempty"` +} + +type Extension struct { + Description *string `json:"description,omitempty"` + ID ID `json:"id"` + Icon *string `json:"icon,omitempty"` + Schema *PropertySchema `json:"schema,omitempty"` + Title string `json:"title"` + Type string `json:"type"` + Visualizer string `json:"visualizer"` +} + +type ID string + +type Id string + +type PropertyCondition struct { + Field string `json:"field"` + Type Valuetype `json:"type"` + Value interface{} `json:"value"` +} + +type PropertyLinkableFields struct { + Latlng *PropertyPointer `json:"latlng,omitempty"` + URL *PropertyPointer `json:"url,omitempty"` +} + +type PropertyPointer struct { + FieldID string `json:"fieldId"` + SchemaGroupID string `json:"schemaGroupId"` +} + +type PropertySchema struct { + Groups []PropertySchemaGroup `json:"groups,omitempty"` + Linkable *PropertyLinkableFields `json:"linkable,omitempty"` + Version float64 `json:"version,omitempty"` +} + +type PropertySchemaField struct { + AvailableIf *PropertyCondition `json:"availableIf,omitempty"` + Choices []Choice `json:"choices,omitempty"` + DefaultValue interface{} `json:"defaultValue,omitempty"` + Description *string `json:"description,omitempty"` + ID ID `json:"id"` + Max *float64 `json:"max,omitempty"` + Min *float64 `json:"min,omitempty"` + Prefix *string `json:"prefix,omitempty"` + Suffix *string `json:"suffix,omitempty"` + Title *string `json:"title"` + Type Valuetype `json:"type"` + UI *string `json:"ui,omitempty"` +} + +type PropertySchemaGroup struct { + AvailableIf *PropertyCondition `json:"availableIf,omitempty"` + Description *string `json:"description,omitempty"` + Fields []PropertySchemaField `json:"fields,omitempty"` + ID ID `json:"id"` + List bool `json:"list,omitempty"` + RepresentativeField *Id `json:"representativeField,omitempty"` + Title string `json:"title"` +} + +type Root struct { + Author *string `json:"author,omitempty"` + Description *string `json:"description,omitempty"` + Extensions []Extension `json:"extensions,omitempty"` + ID ID `json:"id"` + Main *string `json:"main,omitempty"` + Repository *string `json:"repository,omitempty"` + Schema *PropertySchema `json:"schema,omitempty"` + System bool `json:"system,omitempty"` + Title string `json:"title"` + Version string `json:"version,omitempty"` +} + +type Valuetype string diff --git a/pkg/plugin/manifest/schema_json_gen.go b/pkg/plugin/manifest/schema_json_gen.go new file mode 100644 index 000000000..3a80fc46d --- /dev/null +++ b/pkg/plugin/manifest/schema_json_gen.go @@ -0,0 +1,365 @@ +// Code generated by github.com/reearth/reearth-backend/tools/cmd/embed, DO NOT EDIT. + +package manifest + +const SchemaJSON string = `{ + "$id": "https://app.reearth.io/schemas/plugin-manifest", + "$schema": "http://json-schema.org/draft-04/schema", + "description": "Re:Earth plugin manifest schema", + "definitions": { + "id": { + "$id": "#id", + "type": "string", + "pattern": "^[A-Za-z]{1}[\\w-:.]{0,}$" + }, + "id?": { + "$id": "#id?", + "type": [ + "string", + "null" + ], + "pattern": "^[A-Za-z]{1}[\\w-:.]{0,}$" + }, + "valuetype": { + "$id": "#valuetype", + "type": "string", + "enum": [ + "bool", + "number", + "string", + "url", + "latlng", + "latlngheight", + "camera", + "typography", + "coordinates", + "polygon", + "rect", + "ref" + ] + }, + "propertyPointer": { + "$id": "#propertyPointer", + "type": [ + "object", + "null" + ], + "properties": { + "schemaGroupId": { + "type": "string" + }, + "fieldId": { + "type": "string" + } + }, + "required": [ + "schemaGroupId", + "fieldId" + ], + "additionalProperties": false + }, + "propertyLinkableFields": { + "$id": "#propertyLinkableFields", + "type": [ + "object", + "null" + ], + "properties": { + "latlng": { + "$ref": "#/definitions/propertyPointer" + }, + "url": { + "$ref": "#/definitions/propertyPointer" + } + }, + "additionalProperties": false + }, + "propertyCondition": { + "$id": "#propertyCondition", + "type": [ + "object", + "null" + ], + "properties": { + "field": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/valuetype" + }, + "value": {} + }, + "required": [ + "field", + "type", + "value" + ], + "additionalProperties": false + }, + "propertySchemaField": { + "$id": "#propertySchemaField", + "type": "object", + "properties": { + "id": { + "$ref": "#/definitions/id" + }, + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "type": { + "$ref": "#/definitions/valuetype" + }, + "prefix": { + "type": [ + "string", + "null" + ] + }, + "suffix": { + "type": [ + "string", + "null" + ] + }, + "defaultValue": {}, + "ui": { + "type": [ + "string", + "null" + ], + "enum": [ + "layer", + "color", + "multiline", + "selection", + "buttons", + "range", + "image", + "video", + "file", + "camera_pose" + ] + }, + "min": { + "type": [ + "number", + "null" + ] + }, + "max": { + "type": [ + "number", + "null" + ] + }, + "choices": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "label": { + "type": "string" + }, + "icon": { + "type": "string" + } + }, + "required": [ + "key" + ], + "additionalProperties": false + } + }, + "availableIf": { + "$ref": "#/definitions/propertyCondition" + } + }, + "required": [ + "id", + "type", + "title" + ], + "additionalProperties": false + }, + "propertySchemaGroup": { + "$id": "#propertySchemaGroup", + "type": "object", + "properties": { + "id": { + "$ref": "#/definitions/id" + }, + "title": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "list": { + "type": "boolean" + }, + "availableIf": { + "$ref": "#/definitions/propertyCondition" + }, + "representativeField": { + "$ref": "#/definitions/id?" + }, + "fields": { + "type": "array", + "items": { + "$ref": "#/definitions/propertySchemaField" + } + } + }, + "required": [ + "id", + "title" + ], + "additionalProperties": false + }, + "propertySchema": { + "$id": "#propertySchema", + "type": [ + "object", + "null" + ], + "properties": { + "version": { + "type": "number" + }, + "linkable": { + "$ref": "#/definitions/propertyLinkableFields" + }, + "groups": { + "type": "array", + "items": { + "$ref": "#/definitions/propertySchemaGroup" + } + } + }, + "additionalProperties": false + }, + "extension": { + "$id": "#extension", + "type": "object", + "properties": { + "id": { + "$ref": "#/definitions/id" + }, + "title": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "icon": { + "type": [ + "string", + "null" + ] + }, + "visualizer": { + "type": "string", + "enum": [ + "cesium" + ] + }, + "type": { + "type": "string", + "enum": [ + "primitive", + "widget", + "block", + "visualizer", + "infobox" + ] + }, + "schema": { + "$ref": "#/definitions/propertySchema" + } + }, + "required": [ + "id", + "title", + "visualizer", + "type" + ], + "additionalProperties": false + }, + "root": { + "$id": "#root", + "type": "object", + "properties": { + "id": { + "$ref": "#/definitions/id" + }, + "title": { + "type": "string" + }, + "system": { + "type": "boolean" + }, + "version": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "repository": { + "type": [ + "string", + "null" + ] + }, + "author": { + "type": [ + "string", + "null" + ] + }, + "main": { + "type": [ + "string", + "null" + ] + }, + "extensions": { + "type": "array", + "items": { + "$ref": "#/definitions/extension" + } + }, + "schema": { + "$ref": "#/definitions/propertySchema" + } + }, + "required": [ + "id", + "title" + ], + "additionalProperties": false + } + }, + "$ref": "#/definitions/root" +} +` diff --git a/pkg/plugin/manifest/schema_json_translation_gen.go b/pkg/plugin/manifest/schema_json_translation_gen.go new file mode 100644 index 000000000..2942c4834 --- /dev/null +++ b/pkg/plugin/manifest/schema_json_translation_gen.go @@ -0,0 +1,131 @@ +// Code generated by github.com/reearth/reearth-backend/tools/cmd/embed, DO NOT EDIT. + +package manifest + +const SchemaTranslationJSON string = `{ + "$id": "https://app.reearth.io/schemas/plugin-manifest-translation", + "$schema": "http://json-schema.org/draft-04/schema", + "description": "Re:Earth plugin manifest schema translation", + "definitions": { + "propertySchemaField": { + "$id": "#propertySchemaField", + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "choices": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[A-Za-z]{1}[\\w-:.]{0,}$": { + "type": "string" + } + } + } + } + }, + "propertySchemaGroup": { + "$id": "#propertySchemaGroup", + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "fields": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[A-Za-z]{1}[\\w-:.]{0,}$": { + "$ref": "#/definitions/propertySchemaField" + } + } + } + } + }, + "propertySchema": { + "$id": "#propertySchema", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[A-Za-z]{1}[\\w-:.]{0,}$": { + "$ref": "#/definitions/propertySchemaGroup" + } + } + }, + "extension": { + "$id": "#extension", + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "propertySchema": { + "$ref": "#/definitions/propertySchema" + } + } + }, + "root": { + "$id": "#root", + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "extensions": { + "type": "object", + "patternProperties": { + "^[A-Za-z]{1}[\\w-:.]{0,}$": { + "$ref": "#/definitions/extension" + } + } + }, + "schema": { + "$ref": "#/definitions/propertySchema" + } + } + } + }, + "$ref": "#/definitions/root" +} +` diff --git a/pkg/plugin/manifest/schema_translation.go b/pkg/plugin/manifest/schema_translation.go new file mode 100644 index 000000000..c8604bc89 --- /dev/null +++ b/pkg/plugin/manifest/schema_translation.go @@ -0,0 +1,28 @@ +package manifest + +type TranslationExtension struct { + Description *string `json:"description,omitempty"` + Title *string `json:"title,omitempty"` + PropertySchema TranslationPropertySchema `json:"propertySchema,omitempty"` +} + +type TranslationPropertySchema map[string]TranslationPropertySchemaGroup + +type TranslationPropertySchemaField struct { + Choices map[string]string `json:"choices,omitempty"` + Description *string `json:"description,omitempty"` + Title *string `json:"title,omitempty"` +} + +type TranslationPropertySchemaGroup struct { + Description *string `json:"description,omitempty"` + Fields map[string]TranslationPropertySchemaField `json:"fields,omitempty"` + Title *string `json:"title,omitempty"` +} + +type TranslationRoot struct { + Description *string `json:"description,omitempty"` + Extensions map[string]TranslationExtension `json:"extensions,omitempty"` + Title *string `json:"title,omitempty"` + Schema TranslationPropertySchema `json:"schema,omitempty"` +} diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go new file mode 100644 index 000000000..9bb3fc28d --- /dev/null +++ b/pkg/plugin/plugin.go @@ -0,0 +1,86 @@ +package plugin + +import ( + "github.com/blang/semver" + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" +) + +// Plugin _ +type Plugin struct { + id id.PluginID + name i18n.String + author string + description i18n.String + repositoryURL string + extensions map[id.PluginExtensionID]*Extension + extensionOrder []id.PluginExtensionID + schema *id.PropertySchemaID +} + +// ID _ +func (p *Plugin) ID() id.PluginID { + return p.id +} + +// Version _ +func (p *Plugin) Version() semver.Version { + return p.id.Version() +} + +// Name _ +func (p *Plugin) Name() i18n.String { + return p.name.Copy() +} + +// Author _ +func (p *Plugin) Author() string { + return p.author +} + +// Description _ +func (p *Plugin) Description() i18n.String { + return p.description.Copy() +} + +// RepositoryURL _ +func (p *Plugin) RepositoryURL() string { + return p.repositoryURL +} + +// Extensions _ +func (p *Plugin) Extensions() []*Extension { + if p.extensionOrder == nil { + return []*Extension{} + } + list := make([]*Extension, 0, len(p.extensions)) + for _, id := range p.extensionOrder { + list = append(list, p.extensions[id]) + } + return list +} + +// Extension _ +func (p *Plugin) Extension(id id.PluginExtensionID) *Extension { + e, ok := p.extensions[id] + if ok { + return e + } + return nil +} + +// Schema _ +func (p *Plugin) Schema() *id.PropertySchemaID { + return p.schema +} + +// Rename _ +func (p *Plugin) Rename(name i18n.String) { + p.name = name.Copy() + +} + +// SetDescription _ +func (p *Plugin) SetDescription(des i18n.String) { + p.description = des.Copy() +} diff --git a/pkg/plugin/plugin_test.go b/pkg/plugin/plugin_test.go new file mode 100644 index 000000000..8285e989f --- /dev/null +++ b/pkg/plugin/plugin_test.go @@ -0,0 +1,59 @@ +package plugin + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestPlugin_Extension(t *testing.T) { + testCases := []struct { + name string + plugin *Plugin + key id.PluginExtensionID + expected *Extension + }{ + { + name: "exiting extension", + key: "yyy", + plugin: New().Extensions([]*Extension{NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild()}).MustBuild(), + expected: NewExtension().ID("yyy").MustBuild(), + }, + { + name: "not exiting extension", + key: "zzz", + plugin: New().Extensions([]*Extension{NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild()}).MustBuild(), + expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.plugin.Extension(tc.key)) + }) + } +} + +func TestPlugin_Rename(t *testing.T) { + p := New().Name(i18n.StringFrom("x")).MustBuild() + p.Rename(i18n.StringFrom("z")) + assert.Equal(t, i18n.StringFrom("z"), p.Name()) +} + +func TestPlugin_SetDescription(t *testing.T) { + p := New().MustBuild() + p.SetDescription(i18n.StringFrom("xxx")) + assert.Equal(t, i18n.StringFrom("xxx"), p.Description()) +} + +func TestPlugin_Author(t *testing.T) { + p := New().Author("xx").MustBuild() + assert.Equal(t, "xx", p.Author()) +} + +func TestPlugin_ID(t *testing.T) { + assert.Equal(t, New().ID(id.MustPluginID("xxx#1.1.1")).MustBuild().ID(), id.MustPluginID("xxx#1.1.1")) +} diff --git a/pkg/project/builder.go b/pkg/project/builder.go new file mode 100644 index 000000000..76a6b9962 --- /dev/null +++ b/pkg/project/builder.go @@ -0,0 +1,123 @@ +package project + +import ( + "net/url" + "time" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +type Builder struct { + p *Project +} + +func New() *Builder { + return &Builder{p: &Project{publishmentStatus: PublishmentStatusPrivate}} +} + +func (b *Builder) Build() (*Project, error) { + if id.ID(b.p.id).IsNil() { + return nil, id.ErrInvalidID + } + if b.p.alias != "" && !CheckAliasPattern(b.p.alias) { + return nil, ErrInvalidAlias + } + if b.p.updatedAt.IsZero() { + b.p.updatedAt = b.p.CreatedAt() + } + return b.p, nil +} + +func (b *Builder) MustBuild() *Project { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *Builder) ID(id id.ProjectID) *Builder { + b.p.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.p.id = id.ProjectID(id.New()) + return b +} + +func (b *Builder) IsArchived(isArchived bool) *Builder { + b.p.isArchived = isArchived + return b +} + +func (b *Builder) UpdatedAt(updatedAt time.Time) *Builder { + b.p.updatedAt = updatedAt + return b +} + +func (b *Builder) PublishedAt(publishedAt time.Time) *Builder { + b.p.publishedAt = publishedAt + return b +} + +func (b *Builder) Name(name string) *Builder { + b.p.name = name + return b +} + +func (b *Builder) Description(description string) *Builder { + b.p.description = description + return b +} + +func (b *Builder) Alias(alias string) *Builder { + b.p.alias = alias + return b +} + +func (b *Builder) ImageURL(imageURL *url.URL) *Builder { + if imageURL == nil { + b.p.imageURL = nil + } else { + imageURL2 := *imageURL + b.p.imageURL = &imageURL2 + } + return b +} + +func (b *Builder) PublicTitle(publicTitle string) *Builder { + b.p.publicTitle = publicTitle + return b +} + +func (b *Builder) PublicDescription(publicDescription string) *Builder { + b.p.publicDescription = publicDescription + return b +} + +func (b *Builder) PublicImage(publicImage string) *Builder { + b.p.publicImage = publicImage + return b +} + +func (b *Builder) PublicNoIndex(publicNoIndex bool) *Builder { + b.p.publicNoIndex = publicNoIndex + return b +} + +func (b *Builder) Team(team id.TeamID) *Builder { + b.p.team = team + return b +} + +func (b *Builder) Visualizer(visualizer visualizer.Visualizer) *Builder { + b.p.visualizer = visualizer + return b +} + +func (b *Builder) PublishmentStatus(publishmentStatus PublishmentStatus) *Builder { + b.p.publishmentStatus = publishmentStatus + return b +} diff --git a/pkg/project/builder_test.go b/pkg/project/builder_test.go new file mode 100644 index 000000000..0a6d5d822 --- /dev/null +++ b/pkg/project/builder_test.go @@ -0,0 +1,369 @@ +package project + +import ( + "errors" + "net/url" + "reflect" + "testing" + "time" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/visualizer" + "github.com/stretchr/testify/assert" +) + +func TestNew(t *testing.T) { + var tb = New() + assert.NotNil(t, tb) +} + +func TestBuilder_ID(t *testing.T) { + var tb = New() + res := tb.ID(id.NewProjectID()).MustBuild() + assert.NotNil(t, res.ID()) +} + +func TestBuilder_Name(t *testing.T) { + var tb = New().NewID() + res := tb.Name("foo").MustBuild() + assert.Equal(t, "foo", res.Name()) +} + +func TestBuilder_NewID(t *testing.T) { + var tb = New() + res := tb.NewID().MustBuild() + assert.NotNil(t, res.ID()) +} + +func TestBuilder_Alias(t *testing.T) { + var tb = New().NewID() + res := tb.Alias("xxxxx").MustBuild() + assert.Equal(t, "xxxxx", res.Alias()) +} + +func TestBuilder_Description(t *testing.T) { + var tb = New().NewID() + res := tb.Description("desc").MustBuild() + assert.Equal(t, "desc", res.Description()) +} + +func TestBuilder_IsArchived(t *testing.T) { + var tb = New().NewID() + res := tb.IsArchived(true).MustBuild() + assert.True(t, res.IsArchived()) +} + +func TestBuilder_ImageURL(t *testing.T) { + testCases := []struct { + name string + image *url.URL + expectedNil bool + }{ + { + name: "image not nil", + image: &url.URL{}, + expectedNil: false, + }, + { + name: "image is nil", + image: nil, + expectedNil: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tb := New().NewID() + res := tb.ImageURL(tc.image).MustBuild() + if res.imageURL == nil { + assert.True(tt, tc.expectedNil) + } else { + assert.False(tt, tc.expectedNil) + } + }) + } +} + +func TestBuilder_Visualizer(t *testing.T) { + var tb = New().NewID() + res := tb.Visualizer(visualizer.VisualizerCesium).MustBuild() + assert.Equal(t, visualizer.VisualizerCesium, res.Visualizer()) +} + +func TestBuilder_Team(t *testing.T) { + var tb = New().NewID() + res := tb.Team(id.NewTeamID()).MustBuild() + assert.NotNil(t, res.Team()) +} + +func TestBuilder_PublicImage(t *testing.T) { + var tb = New().NewID() + res := tb.PublicImage("xxxxx").MustBuild() + assert.Equal(t, "xxxxx", res.PublicImage()) +} + +func TestBuilder_PublishedAt(t *testing.T) { + var tb = New().NewID() + d := time.Date(1986, 12, 11, 19, 30, 0, 0, time.UTC) + res := tb.PublishedAt(d).MustBuild() + assert.True(t, reflect.DeepEqual(res.PublishedAt(), d)) +} + +func TestBuilder_UpdatedAt(t *testing.T) { + var tb = New().NewID() + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + res := tb.UpdatedAt(d).MustBuild() + assert.True(t, reflect.DeepEqual(res.UpdatedAt(), d)) +} + +func TestBuilder_PublicTitle(t *testing.T) { + var tb = New().NewID() + res := tb.PublicTitle("xxx.aaa").MustBuild() + assert.Equal(t, "xxx.aaa", res.PublicTitle()) +} + +func TestBuilder_PublishmentStatus(t *testing.T) { + var tb = New().NewID() + var x PublishmentStatus = "xxx.aaa" + res := tb.PublishmentStatus("xxx.aaa").MustBuild() + assert.Equal(t, x, res.PublishmentStatus()) +} + +func TestBuilder_PublicDescription(t *testing.T) { + var tb = New().NewID() + res := tb.PublicDescription("pdesc").MustBuild() + assert.Equal(t, "pdesc", res.PublicDescription()) +} + +func TestBuilder_PublicNoIndex(t *testing.T) { + var tb = New().NewID() + res := tb.PublicNoIndex(true).MustBuild() + assert.Equal(t, true, res.PublicNoIndex()) +} + +func TestBuilder_Build(t *testing.T) { + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + i, _ := url.Parse("ttt://xxx.aa/") + pid := id.NewProjectID() + tid := id.NewTeamID() + testCases := []struct { + name, pname, description, + alias, publicTitle, publicDescription, + publicImage string + id id.ProjectID + isArchived bool + updatedAt time.Time + publishedAt time.Time + imageURL *url.URL + publicNoIndex bool + team id.TeamID + visualizer visualizer.Visualizer + publishmentStatus PublishmentStatus + expected *Project + err error + }{ + { + name: "build normal project", + pname: "xxx.aaa", + description: "ddd", + alias: "aaaaa", + publicTitle: "ttt", + publicDescription: "dddd", + publicImage: "iii", + id: pid, + isArchived: false, + updatedAt: d, + publishedAt: d, + imageURL: i, + publicNoIndex: true, + team: tid, + visualizer: visualizer.VisualizerCesium, + publishmentStatus: "ppp", + expected: &Project{ + id: pid, + description: "ddd", + name: "xxx.aaa", + alias: "aaaaa", + publicTitle: "ttt", + publicDescription: "dddd", + publicImage: "iii", + isArchived: false, + updatedAt: d, + publishedAt: d, + imageURL: i, + publicNoIndex: true, + team: tid, + visualizer: visualizer.VisualizerCesium, + publishmentStatus: "ppp", + }, + err: nil, + }, + { + name: "zero updated at", + id: id.NewProjectID(), + updatedAt: time.Time{}, + expected: nil, + err: nil, + }, + { + name: "failed invalid id", + + expected: nil, + err: id.ErrInvalidID, + }, + { + name: "failed invalid alias", + id: id.NewProjectID(), + alias: "xxx.aaa", + expected: nil, + err: ErrInvalidAlias, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + p, err := New(). + ID(tc.id). + PublicNoIndex(tc.publicNoIndex). + PublicDescription(tc.publicDescription). + PublishmentStatus(tc.publishmentStatus). + PublicTitle(tc.publicTitle). + UpdatedAt(tc.updatedAt). + PublishedAt(tc.publishedAt). + PublicImage(tc.publicImage). + Team(tc.team). + ImageURL(tc.imageURL). + Name(tc.pname). + Alias(tc.alias). + Visualizer(tc.visualizer). + UpdatedAt(tc.updatedAt). + Description(tc.description). + Build() + if err == nil { + if tc.expected == nil { + assert.Equal(tt, p.UpdatedAt(), p.CreatedAt()) + + } else { + assert.Equal(tt, tc.expected, p) + } + } else { + assert.True(tt, errors.As(err, &tc.err)) + } + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) + i, _ := url.Parse("ttt://xxx.aa/") + pid := id.NewProjectID() + tid := id.NewTeamID() + testCases := []struct { + name, pname, description, + alias, publicTitle, publicDescription, + publicImage string + id id.ProjectID + isArchived bool + updatedAt time.Time + publishedAt time.Time + imageURL *url.URL + publicNoIndex bool + team id.TeamID + visualizer visualizer.Visualizer + publishmentStatus PublishmentStatus + expected *Project + err error + }{ + { + name: "build normal project", + pname: "xxx.aaa", + description: "ddd", + alias: "aaaaa", + publicTitle: "ttt", + publicDescription: "dddd", + publicImage: "iii", + id: pid, + isArchived: false, + updatedAt: d, + publishedAt: d, + imageURL: i, + publicNoIndex: true, + team: tid, + visualizer: visualizer.VisualizerCesium, + publishmentStatus: "ppp", + expected: &Project{ + id: pid, + description: "ddd", + name: "xxx.aaa", + alias: "aaaaa", + publicTitle: "ttt", + publicDescription: "dddd", + publicImage: "iii", + isArchived: false, + updatedAt: d, + publishedAt: d, + imageURL: i, + publicNoIndex: true, + team: tid, + visualizer: visualizer.VisualizerCesium, + publishmentStatus: "ppp", + }, + err: nil, + }, + { + name: "zero updated at", + id: id.NewProjectID(), + updatedAt: time.Time{}, + expected: nil, + err: nil, + }, + { + name: "failed invalid id", + + expected: nil, + err: id.ErrInvalidID, + }, + { + name: "failed invalid alias", + id: id.NewProjectID(), + alias: "xxx.aaa", + expected: nil, + err: ErrInvalidAlias, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + var p *Project + defer func() { + if r := recover(); r == nil { + if tc.expected == nil { + assert.Equal(tt, p.UpdatedAt(), p.CreatedAt()) + } else { + assert.Equal(tt, tc.expected, p) + } + } + }() + p = New(). + ID(tc.id). + PublicNoIndex(tc.publicNoIndex). + PublicDescription(tc.publicDescription). + PublishmentStatus(tc.publishmentStatus). + PublicTitle(tc.publicTitle). + UpdatedAt(tc.updatedAt). + PublishedAt(tc.publishedAt). + PublicImage(tc.publicImage). + Team(tc.team). + ImageURL(tc.imageURL). + Name(tc.pname). + Alias(tc.alias). + Visualizer(tc.visualizer). + UpdatedAt(tc.updatedAt). + Description(tc.description). + MustBuild() + }) + } +} diff --git a/pkg/project/project.go b/pkg/project/project.go new file mode 100644 index 000000000..bebfa06c5 --- /dev/null +++ b/pkg/project/project.go @@ -0,0 +1,190 @@ +package project + +import ( + "errors" + "net/url" + "regexp" + "time" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +var ( + ErrInvalidAlias error = errors.New("invalid alias") + aliasRegexp = regexp.MustCompile("^[a-zA-Z0-9_-]{5,32}$") +) + +// Project _ +type Project struct { + id id.ProjectID + isArchived bool + updatedAt time.Time + publishedAt time.Time + name string + description string + alias string + imageURL *url.URL + publicTitle string + publicDescription string + publicImage string + publicNoIndex bool + team id.TeamID + visualizer visualizer.Visualizer + publishmentStatus PublishmentStatus +} + +func (p *Project) ID() id.ProjectID { + return p.id +} + +func (p *Project) IsArchived() bool { + return p.isArchived +} + +func (p *Project) UpdatedAt() time.Time { + return p.updatedAt +} + +func (p *Project) PublishedAt() time.Time { + return p.publishedAt +} + +func (p *Project) Name() string { + return p.name +} + +func (p *Project) Description() string { + return p.description +} + +func (p *Project) Alias() string { + return p.alias +} + +func (p *Project) ImageURL() *url.URL { + if p == nil || p.imageURL == nil { + return nil + } + imageURL2 := *p.imageURL + return &imageURL2 +} + +func (p *Project) PublicTitle() string { + return p.publicTitle +} + +func (p *Project) PublicDescription() string { + return p.publicDescription +} + +func (p *Project) PublicImage() string { + return p.publicImage +} + +func (p *Project) PublicNoIndex() bool { + return p.publicNoIndex +} + +func (p *Project) PublishmentStatus() PublishmentStatus { + return p.publishmentStatus +} + +func (p *Project) Team() id.TeamID { + return p.team +} + +func (p *Project) CreatedAt() time.Time { + return id.ID(p.id).Timestamp() +} + +func (p *Project) Visualizer() visualizer.Visualizer { + return p.visualizer +} + +func (p *Project) SetArchived(isArchived bool) { + p.isArchived = isArchived +} + +func (p *Project) SetUpdatedAt(updatedAt time.Time) { + p.updatedAt = updatedAt +} + +func (p *Project) SetPublishedAt(publishedAt time.Time) { + p.publishedAt = publishedAt +} + +func (p *Project) SetImageURL(imageURL *url.URL) { + if imageURL == nil { + p.imageURL = nil + } else { + imageURL2 := *imageURL + p.imageURL = &imageURL2 + } +} + +func (p *Project) UpdateName(name string) { + p.name = name +} + +func (p *Project) UpdateDescription(description string) { + p.description = description +} + +func (p *Project) UpdateAlias(alias string) error { + if CheckAliasPattern(alias) { + p.alias = alias + } else { + return ErrInvalidAlias + } + return nil +} + +func (p *Project) UpdatePublicTitle(publicTitle string) { + p.publicTitle = publicTitle +} + +func (p *Project) UpdatePublicDescription(publicDescription string) { + p.publicDescription = publicDescription +} + +func (p *Project) UpdatePublicImage(publicImage string) { + p.publicImage = publicImage +} + +func (p *Project) UpdatePublicNoIndex(publicNoIndex bool) { + p.publicNoIndex = publicNoIndex +} + +func (p *Project) UpdateTeam(team id.TeamID) { + p.team = team +} + +func (p *Project) UpdateVisualizer(visualizer visualizer.Visualizer) { + p.visualizer = visualizer +} + +func (p *Project) UpdatePublishmentStatus(publishmentStatus PublishmentStatus) { + p.publishmentStatus = publishmentStatus +} + +func (p *Project) PublicName() string { + if p == nil || p.publishmentStatus == PublishmentStatusPrivate { + return "" + } + return p.alias +} + +func (p *Project) MatchWithPublicName(name string) bool { + if p == nil || name == "" || p.publishmentStatus == PublishmentStatusPrivate { + return false + } + if p.publishmentStatus != PublishmentStatusPrivate && p.alias == name { + return true + } + return false +} + +func CheckAliasPattern(alias string) bool { + return alias != "" && aliasRegexp.Match([]byte(alias)) +} diff --git a/pkg/project/project_test.go b/pkg/project/project_test.go new file mode 100644 index 000000000..adc6dc9e9 --- /dev/null +++ b/pkg/project/project_test.go @@ -0,0 +1,270 @@ +package project + +import ( + "net/url" + "testing" + "time" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/visualizer" + "github.com/stretchr/testify/assert" +) + +func TestCheckAliasPattern(t *testing.T) { + testCase := []struct { + name, alias string + expexted bool + }{ + { + name: "accepted regex", + alias: "xxxxx", + expexted: true, + }, + { + name: "refused regex", + alias: "xxx", + expexted: false, + }, + } + for _, tc := range testCase { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expexted, CheckAliasPattern(tc.alias)) + }) + } +} + +func TestProject_MatchWithPublicName(t *testing.T) { + testCase := []struct { + name, n string + p *Project + expexted bool + }{ + { + name: "alias = name, publishmentStatus = public", + n: "aaaaa", + p: &Project{ + publishmentStatus: PublishmentStatusPublic, + alias: "aaaaa", + }, + expexted: true, + }, + { + name: "nil project", + n: "xx", + p: nil, + expexted: false, + }, + { + name: "nil project", + n: "", + p: &Project{ + publishmentStatus: PublishmentStatusPublic, + alias: "aaaaa", + }, + expexted: false, + }, + { + name: "nil project", + n: "aaaaa", + p: &Project{ + publishmentStatus: PublishmentStatusPrivate, + alias: "aaaaa", + }, + expexted: false, + }, + } + for _, tc := range testCase { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expexted, tc.p.MatchWithPublicName(tc.n)) + }) + } +} + +func TestProject_SetArchived(t *testing.T) { + p := &Project{isArchived: false} + p.SetArchived(true) + assert.Equal(t, true, p.IsArchived()) +} + +func TestProject_SetPublishedAt(t *testing.T) { + p := &Project{} + p.SetPublishedAt(time.Date(1900, 1, 1, 00, 00, 1, 1, time.UTC)) + assert.Equal(t, time.Date(1900, 1, 1, 00, 00, 1, 1, time.UTC), p.publishedAt) +} + +func TestProject_SetUpdatedAt(t *testing.T) { + p := &Project{} + p.SetUpdatedAt(time.Date(1900, 1, 1, 00, 00, 1, 1, time.UTC)) + assert.Equal(t, time.Date(1900, 1, 1, 00, 00, 1, 1, time.UTC), p.UpdatedAt()) +} + +func TestProject_SetImageURL(t *testing.T) { + testCase := []struct { + name string + image *url.URL + p *Project + expectedNil bool + }{ + { + name: "nil image", + image: nil, + p: &Project{}, + expectedNil: true, + }, + { + name: "set new image", + image: &url.URL{}, + p: &Project{}, + expectedNil: false, + }, + } + for _, tc := range testCase { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tc.p.SetImageURL(tc.image) + if tc.expectedNil { + assert.Nil(tt, tc.p.ImageURL()) + } else { + assert.NotNil(tt, tc.p.ImageURL()) + } + }) + } +} + +func TestProject_UpdateName(t *testing.T) { + p := &Project{} + p.UpdateName("foo") + assert.Equal(t, "foo", p.Name()) +} + +func TestProject_UpdateDescription(t *testing.T) { + p := &Project{} + p.UpdateDescription("aaa") + assert.Equal(t, "aaa", p.Description()) +} + +func TestProject_UpdatePublishmentStatus(t *testing.T) { + p := &Project{} + p.UpdatePublishmentStatus(PublishmentStatusPrivate) + assert.Equal(t, PublishmentStatusPrivate, p.PublishmentStatus()) +} + +func TestProject_UpdatePublicNoIndex(t *testing.T) { + p := &Project{} + p.UpdatePublicNoIndex(true) + assert.Equal(t, true, p.PublicNoIndex()) +} + +func TestProject_UpdatePublicDescription(t *testing.T) { + p := &Project{} + p.UpdatePublicDescription("ppp") + assert.Equal(t, "ppp", p.PublicDescription()) +} + +func TestProject_UpdatePublicTitle(t *testing.T) { + p := &Project{} + p.UpdatePublicTitle("ttt") + assert.Equal(t, "ttt", p.PublicTitle()) +} + +func TestProject_UpdateTeam(t *testing.T) { + p := &Project{} + p.UpdateTeam(id.NewTeamID()) + assert.NotNil(t, p.Team()) +} + +func TestProject_UpdateVisualizer(t *testing.T) { + p := &Project{} + var v visualizer.Visualizer = "ttt" + p.UpdateVisualizer(v) + assert.Equal(t, v, p.Visualizer()) +} + +func TestProject_UpdateAlias(t *testing.T) { + testCases := []struct { + name, a string + expected string + err error + }{ + { + name: "accepted alias", + a: "xxxxx", + expected: "xxxxx", + err: nil, + }, + { + name: "fail: invalid alias", + a: "xxx", + expected: "", + err: ErrInvalidAlias, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + p := &Project{} + err := p.UpdateAlias(tc.a) + if err == nil { + assert.Equal(tt, tc.expected, p.Alias()) + } else { + assert.Equal(tt, tc.err, err) + } + }) + } +} + +func TestProject_UpdatePublicImage(t *testing.T) { + p := &Project{} + p.UpdatePublicImage("xxx") + assert.Equal(t, "xxx", p.PublicImage()) +} + +func TestProject_PublicName(t *testing.T) { + testCases := []struct { + name string + p *Project + expected string + }{ + { + name: "private publishment status", + p: &Project{ + publishmentStatus: PublishmentStatusLimited, + alias: "aaaaa", + }, + expected: "aaaaa", + }, + { + name: "not private nor limited publishment status", + p: &Project{ + alias: "aaaaa", + }, + expected: "aaaaa", + }, + { + name: "nil project", + p: nil, + expected: "", + }, + { + name: "private publishment status", + p: &Project{ + publishmentStatus: PublishmentStatusPrivate, + }, + expected: "", + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res := tc.p.PublicName() + assert.Equal(tt, tc.expected, res) + }) + } +} diff --git a/pkg/project/publishment_status.go b/pkg/project/publishment_status.go new file mode 100644 index 000000000..e465e2c34 --- /dev/null +++ b/pkg/project/publishment_status.go @@ -0,0 +1,13 @@ +package project + +// PublishmentStatus _ +type PublishmentStatus string + +const ( + // PublishmentStatusPublic _ + PublishmentStatusPublic PublishmentStatus = "public" + // PublishmentStatusLimited _ + PublishmentStatusLimited PublishmentStatus = "limited" + // PublishmentStatusPrivate _ + PublishmentStatusPrivate PublishmentStatus = "private" +) diff --git a/pkg/property/builder.go b/pkg/property/builder.go new file mode 100644 index 000000000..22e768601 --- /dev/null +++ b/pkg/property/builder.go @@ -0,0 +1,97 @@ +package property + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + // ErrInvalidItem _ + ErrInvalidItem = errors.New("invalid item") +) + +// Builder _ +type Builder struct { + p *Property +} + +// New _ +func New() *Builder { + return &Builder{p: &Property{}} +} + +// Build _ +func (b *Builder) Build() (*Property, error) { + if id.ID(b.p.id).IsNil() { + return nil, id.ErrInvalidID + } + if id.ID(b.p.scene).IsNil() { + return nil, ErrInvalidSceneID + } + if b.p.schema.IsNil() { + return nil, ErrInvalidPropertySchemaID + } + for _, i := range b.p.items { + if i.Schema() != b.p.schema { + return nil, ErrInvalidItem + } + } + return b.p, nil +} + +// MustBuild _ +func (b *Builder) MustBuild() *Property { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +// ID _ +func (b *Builder) ID(id id.PropertyID) *Builder { + b.p.id = id + return b +} + +// NewID _ +func (b *Builder) NewID() *Builder { + b.p.id = id.PropertyID(id.New()) + return b +} + +// Scene _ +func (b *Builder) Scene(s id.SceneID) *Builder { + b.p.scene = s + return b +} + +// Schema _ +func (b *Builder) Schema(schema id.PropertySchemaID) *Builder { + b.p.schema = schema + return b +} + +// Items _ +func (b *Builder) Items(items []Item) *Builder { + if len(items) == 0 { + b.p.items = nil + return b + } + + newItems := []Item{} + ids := map[id.PropertyItemID]struct{}{} + for _, f := range items { + if f == nil { + continue + } + if _, ok := ids[f.ID()]; ok { + continue + } + ids[f.ID()] = struct{}{} + newItems = append(newItems, f) + } + b.p.items = newItems + return b +} diff --git a/pkg/property/builder_test.go b/pkg/property/builder_test.go new file mode 100644 index 000000000..2df04a556 --- /dev/null +++ b/pkg/property/builder_test.go @@ -0,0 +1,206 @@ +package property + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestBuilder_New(t *testing.T) { + b := New() + assert.NotNil(t, b) +} + +func TestBuilder_ID(t *testing.T) { + pid := id.NewPropertyID() + p := New().ID(pid).Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xxx#1.1.1/aa")).MustBuild() + assert.Equal(t, pid, p.ID()) +} + +func TestBuilder_NewID(t *testing.T) { + p := New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xxx#1.1.1/aa")).MustBuild() + assert.False(t, p.ID().IsNil()) +} + +func TestBuilder_Schema(t *testing.T) { + p := New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xxx#1.1.1/aa")).MustBuild() + assert.Equal(t, id.MustPropertySchemaID("xxx#1.1.1/aa"), p.Schema()) +} + +func TestBuilder_Scene(t *testing.T) { + sid := id.NewSceneID() + p := New().NewID().Scene(sid).Schema(id.MustPropertySchemaID("xxx#1.1.1/aa")).MustBuild() + assert.Equal(t, sid, p.Scene()) +} + +func TestBuilder_Items(t *testing.T) { + iid := id.NewPropertyItemID() + propertySchemaID := id.MustPropertySchemaID("xxx#1.1.1/aa") + propertySchemaField1ID := id.PropertySchemaFieldID("a") + propertySchemaGroup1ID := id.PropertySchemaFieldID("A") + + testCases := []struct { + Name string + Input, Expected []Item + }{ + { + Name: "has nil item", + Input: []Item{nil}, + Expected: []Item{}, + }, + { + Name: "has duplicated item", + Input: []Item{ + NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID). + Fields([]*Field{ + NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(ValueTypeString). + ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + Build(), + }).MustBuild(), + NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID). + Fields([]*Field{ + NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(ValueTypeString). + ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + Build(), + }).MustBuild(), + }, + Expected: []Item{NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID). + Fields([]*Field{ + NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(ValueTypeString). + ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + Build(), + }).MustBuild()}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := New().NewID(). + Scene(id.NewSceneID()). + Schema(id.MustPropertySchemaID("xxx#1.1.1/aa")). + Items(tc.Input). + MustBuild() + assert.Equal(tt, tc.Expected, res.Items()) + }) + } +} + +func TestBuilder_Build(t *testing.T) { + pid := id.NewPropertyID() + sid := id.NewSceneID() + scid := id.MustPropertySchemaID("xxx#1.1.1/aa") + iid := id.NewPropertyItemID() + propertySchemaField1ID := id.PropertySchemaFieldID("a") + propertySchemaGroup1ID := id.PropertySchemaFieldID("A") + + testCases := []struct { + Name string + Id id.PropertyID + Scene id.SceneID + Schema id.PropertySchemaID + Items []Item + Err error + Expected struct { + Id id.PropertyID + Scene id.SceneID + Schema id.PropertySchemaID + Items []Item + } + }{ + { + Name: "success", + Id: pid, + Scene: sid, + Schema: scid, + Items: []Item{ + NewGroup().ID(iid).Schema(scid, propertySchemaGroup1ID). + Fields([]*Field{ + NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(ValueTypeString). + ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + Build(), + }).MustBuild()}, + Expected: struct { + Id id.PropertyID + Scene id.SceneID + Schema id.PropertySchemaID + Items []Item + }{ + Id: pid, + Scene: sid, + Schema: scid, + Items: []Item{ + NewGroup().ID(iid).Schema(scid, propertySchemaGroup1ID). + Fields([]*Field{ + NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(ValueTypeString). + ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + Build(), + }).MustBuild()}, + }, + }, + { + Name: "fail invalid id", + Id: id.PropertyID{}, + Items: nil, + Err: id.ErrInvalidID, + }, + { + Name: "fail invalid scene", + Id: pid, + Items: nil, + Err: ErrInvalidSceneID, + }, + { + Name: "fail invalid schema", + Id: pid, + Scene: sid, + Items: nil, + Err: ErrInvalidPropertySchemaID, + }, + { + Name: "fail invalid item", + Id: pid, + Scene: sid, + Schema: scid, + Items: []Item{ + NewGroup().ID(iid).Schema(id.MustPropertySchemaID("zzz#1.1.1/aa"), propertySchemaGroup1ID). + Fields([]*Field{ + NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(ValueTypeString). + ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + Build(), + }).MustBuild()}, + Err: ErrInvalidItem, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := New().ID(tc.Id).Items(tc.Items).Scene(tc.Scene).Schema(tc.Schema).Build() + if err == nil { + assert.Equal(tt, tc.Expected.Id, res.ID()) + assert.Equal(tt, tc.Expected.Schema, res.Schema()) + assert.Equal(tt, tc.Expected.Items, res.Items()) + assert.Equal(tt, tc.Expected.Scene, res.Scene()) + } else { + assert.True(tt, errors.As(tc.Err, &err)) + } + }) + } +} diff --git a/pkg/property/condition.go b/pkg/property/condition.go new file mode 100644 index 000000000..14217f30e --- /dev/null +++ b/pkg/property/condition.go @@ -0,0 +1,20 @@ +package property + +import "github.com/reearth/reearth-backend/pkg/id" + +// Condition _ +type Condition struct { + Field id.PropertySchemaFieldID + Value *Value +} + +// Clone _ +func (c *Condition) Clone() *Condition { + if c == nil { + return nil + } + return &Condition{ + Field: c.Field, + Value: c.Value.Clone(), + } +} diff --git a/pkg/property/condition_test.go b/pkg/property/condition_test.go new file mode 100644 index 000000000..236d827fa --- /dev/null +++ b/pkg/property/condition_test.go @@ -0,0 +1,46 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCondition_Clone(t *testing.T) { + testCases := []struct { + Name string + Con, Expected *Condition + }{ + { + Name: "nil condition", + Con: nil, + Expected: nil, + }, + { + Name: "nil condition", + Con: &Condition{ + Field: "a", + Value: &Value{ + v: true, + t: ValueTypeBool, + }, + }, + Expected: &Condition{ + Field: "a", + Value: &Value{ + v: true, + t: ValueTypeBool, + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Con.Clone() + assert.Equal(tt, tc.Expected, res) + }) + } +} diff --git a/pkg/property/field.go b/pkg/property/field.go new file mode 100644 index 000000000..0ecd7fb3c --- /dev/null +++ b/pkg/property/field.go @@ -0,0 +1,200 @@ +package property + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + ErrInvalidPropertyValue = errors.New("invalid property value") + ErrCannotLinkDataset = errors.New("cannot link dataset") + ErrInvalidPropertyType = errors.New("invalid property type") + ErrInvalidPropertyField = errors.New("invalid property field") +) + +type Field struct { + field id.PropertySchemaFieldID + ptype ValueType + links *Links + value *Value +} + +func (p *Field) Clone() *Field { + return &Field{ + field: p.field, + ptype: p.ptype, + value: p.value.Clone(), + links: p.links.Clone(), + } +} + +func (p *Field) Field() id.PropertySchemaFieldID { + return p.field +} + +func (p *Field) Links() *Links { + if p == nil { + return nil + } + return p.links +} + +func (p *Field) Type() ValueType { + return p.ptype +} + +func (p *Field) Value() *Value { + if p == nil { + return nil + } + return p.value +} + +func (p *Field) ActualValue(ds *dataset.Dataset) *Value { + if p.links != nil { + if l := p.links.Last(); l != nil { + ldid := l.Dataset() + ldsfid := l.DatasetSchemaField() + if ldid != nil || ldsfid != nil || ds.ID() == *ldid { + if f := ds.Field(*ldsfid); f != nil { + v1, _ := valueFromDataset(f.Value()) + return v1 + } + } + } + return nil + } + return p.value +} + +func (p *Field) HasLinkedField() bool { + return p.Links().IsLinked() +} + +func (p *Field) CollectDatasets() []id.DatasetID { + if p == nil { + return nil + } + res := []id.DatasetID{} + + if p.Links().IsLinkedFully() { + dsid := p.Links().Last().Dataset() + if dsid != nil { + res = append(res, *dsid) + } + } + + return res +} + +func (p *Field) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { + return p.Links().HasDatasetOrSchema(s, i) +} + +func (p *Field) Update(value *Value, field *SchemaField) error { + if field == nil || p.field != field.ID() || !field.Validate(value) { + return ErrInvalidPropertyValue + } + p.value = value + return nil +} + +func (p *Field) UpdateUnsafe(value *Value) { + p.value = value +} + +func (p *Field) Link(links *Links) { + p.links = links.Clone() +} + +func (p *Field) Unlink() { + p.links = nil +} + +func (p *Field) UpdateField(field id.PropertySchemaFieldID) { + p.field = field +} + +func (p *Field) IsEmpty() bool { + return p != nil && p.Value().IsEmpty() && p.Links().IsEmpty() +} + +func (p *Field) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset.Loader) bool { + if p == nil || dl == nil || newSchema == nil { + return false + } + + fid := p.Field() + schemaField := newSchema.Field(fid) + + // If field is not found in new schema, this field should be removed + invalid := schemaField == nil + + // if value is not compatible for type, value will be cleared + if !schemaField.Validate(p.Value()) { + p.UpdateUnsafe(nil) + } + + // If linked dataset is not compatible for type, it will be unlinked + l := p.Links() + if dl != nil && l.IsLinkedFully() { + if dsid, dsfid := l.Last().Dataset(), l.Last().DatasetSchemaField(); dsid != nil && dsfid != nil { + dss, _ := dl(ctx, *dsid) + if dsf := dss[0].Field(*dsfid); dsf != nil { + if schemaField.Type() != valueTypeFromDataset(dsf.Type()) { + p.Unlink() + } + } + } + } + + return !invalid +} + +func (p *Field) DatasetValue(ctx context.Context, d dataset.GraphLoader) (*dataset.Value, error) { + if p == nil { + return nil, nil + } + return p.links.DatasetValue(ctx, d) +} + +func (p *Field) MigrateDataset(q DatasetMigrationParam) { + if p == nil { + return + } + link := p.Links() + link.Replace(q.OldDatasetSchemaMap, q.OldDatasetMap, q.DatasetFieldIDMap) + if !link.Validate(q.NewDatasetSchemaMap, q.NewDatasetMap) { + p.Unlink() + } +} + +func (p *Field) ValidateSchema(ps *SchemaField) error { + if p == nil { + return nil + } + if ps == nil { + return errors.New("schema not found") + } + if p.ptype != ps.Type() { + return errors.New("invalid field type") + } + if p.ptype != p.value.Type() { + return errors.New("invalid field value type") + } + if !p.ptype.ValidateValue(p.value) { + return errors.New("invalid field value") + } + return nil +} + +type DatasetMigrationParam struct { + OldDatasetSchemaMap map[id.DatasetSchemaID]id.DatasetSchemaID + OldDatasetMap map[id.DatasetID]id.DatasetID + DatasetFieldIDMap map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID + NewDatasetSchemaMap map[id.DatasetSchemaID]*dataset.Schema + NewDatasetMap map[id.DatasetID]*dataset.Dataset +} diff --git a/pkg/property/field_builder.go b/pkg/property/field_builder.go new file mode 100644 index 000000000..3bd7ab99f --- /dev/null +++ b/pkg/property/field_builder.go @@ -0,0 +1,114 @@ +package property + +import "github.com/reearth/reearth-backend/pkg/id" + +// FieldBuilder _ +type FieldBuilder struct { + p *Field + psf *SchemaField +} + +// FieldUnsafeBuilder _ +type FieldUnsafeBuilder struct { + p *Field +} + +// NewField _ +func NewField(p *SchemaField) *FieldBuilder { + b := &FieldBuilder{ + p: &Field{}, + } + return b.schemaField(p) +} + +// Build _ +func (b *FieldBuilder) Build() (*Field, error) { + if b.p.field == id.PropertySchemaFieldID("") { + return nil, id.ErrInvalidID + } + if b.psf != nil && !b.psf.Validate(b.p.value) { + return nil, ErrInvalidPropertyValue + } + return b.p, nil +} + +// MustBuild _ +func (b *FieldBuilder) MustBuild() *Field { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *FieldBuilder) schemaField(p *SchemaField) *FieldBuilder { + if p != nil { + b.psf = p + b.p.field = p.ID() + b.p.ptype = p.Type() + if dv := p.DefaultValue(); dv != nil { + dv2 := *dv + b.p.value = &dv2 + } + } + return b +} + +// Value _ +func (b *FieldBuilder) Value(v *Value) *FieldBuilder { + if b.p.field == id.PropertySchemaFieldID("") { + return b + } + v2 := *v + b.p.value = &v2 + return b +} + +// Link _ +func (b *FieldBuilder) Link(l *Links) *FieldBuilder { + b.p.links = l.Clone() + return b +} + +// NewFieldUnsafe _ +func NewFieldUnsafe() *FieldUnsafeBuilder { + return &FieldUnsafeBuilder{ + p: &Field{}, + } +} + +// Build _ +func (b *FieldUnsafeBuilder) Build() *Field { + return b.p +} + +// FieldUnsafe _ +func (b *FieldUnsafeBuilder) FieldUnsafe(f id.PropertySchemaFieldID) *FieldUnsafeBuilder { + b.p.field = f + return b +} + +// TypeUnsafe _ +func (b *FieldUnsafeBuilder) TypeUnsafe(t ValueType) *FieldUnsafeBuilder { + b.p.ptype = t + return b +} + +// ValueUnsafe _ +func (b *FieldUnsafeBuilder) ValueUnsafe(v *Value) *FieldUnsafeBuilder { + if v == nil { + b.p.value = nil + return b + } + + v2 := *v + b.p.value = &v2 + b.p.ptype = v.Type() + return b +} + +// LinksUnsafe _ +func (b *FieldUnsafeBuilder) LinksUnsafe(l *Links) *FieldUnsafeBuilder { + b.p.links = l.Clone() + return b +} diff --git a/pkg/property/field_builder_test.go b/pkg/property/field_builder_test.go new file mode 100644 index 000000000..f8c850c3f --- /dev/null +++ b/pkg/property/field_builder_test.go @@ -0,0 +1,232 @@ +package property + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestFieldBuilder_Value(t *testing.T) { + p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + b := NewField(p).Value(v).MustBuild() + assert.Equal(t, v, b.Value()) +} + +func TestFieldBuilder_Link(t *testing.T) { + p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() + l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + ls := NewLinks([]*Link{l}) + b := NewField(p).Link(ls).MustBuild() + assert.Equal(t, ls, b.Links()) +} + +func TestFieldBuilder_Build(t *testing.T) { + l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + testCases := []struct { + Name string + Links *Links + Value *Value + SF *SchemaField + Expected struct { + PType ValueType + Links *Links + Value *Value + } + Err error + }{ + { + Name: "fail invalid property id", + Expected: struct { + PType ValueType + Links *Links + Value *Value + }{}, + Err: id.ErrInvalidID, + }, + { + Name: "fail invalid property type", + SF: NewSchemaField().ID("A").Type(ValueTypeBool).MustBuild(), + Value: ValueTypeString.ValueFromUnsafe("vvv"), + Expected: struct { + PType ValueType + Links *Links + Value *Value + }{}, + Err: ErrInvalidPropertyType, + }, + { + Name: "success", + SF: NewSchemaField().ID("A").Type(ValueTypeString).MustBuild(), + Links: NewLinks([]*Link{l}), + Value: ValueTypeString.ValueFromUnsafe("vvv"), + Expected: struct { + PType ValueType + Links *Links + Value *Value + }{ + PType: ValueTypeString, + Links: NewLinks([]*Link{l}), + Value: ValueTypeString.ValueFromUnsafe("vvv"), + }, + Err: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := NewField(tc.SF).Value(tc.Value).Link(tc.Links).Build() + if err == nil { + assert.Equal(tt, tc.Expected.Links, res.Links()) + assert.Equal(tt, tc.Expected.PType, res.Type()) + assert.Equal(tt, tc.Expected.Value, res.Value()) + } else { + assert.True(tt, errors.As(tc.Err, &err)) + } + }) + } +} + +func TestFieldBuilder_MustBuild(t *testing.T) { + l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + testCases := []struct { + Name string + Fails bool + Links *Links + Value *Value + SF *SchemaField + Expected struct { + PType ValueType + Links *Links + Value *Value + } + }{ + { + Name: "fail invalid property id", + Fails: true, + Expected: struct { + PType ValueType + Links *Links + Value *Value + }{}, + }, + { + Name: "fail invalid property type", + SF: NewSchemaField().ID("A").Type(ValueTypeBool).MustBuild(), + Value: ValueTypeString.ValueFromUnsafe("vvv"), + Fails: true, + Expected: struct { + PType ValueType + Links *Links + Value *Value + }{}, + }, + { + Name: "success", + SF: NewSchemaField().ID("A").Type(ValueTypeString).MustBuild(), + Links: NewLinks([]*Link{l}), + Value: ValueTypeString.ValueFromUnsafe("vvv"), + Expected: struct { + PType ValueType + Links *Links + Value *Value + }{ + PType: ValueTypeString, + Links: NewLinks([]*Link{l}), + Value: ValueTypeString.ValueFromUnsafe("vvv"), + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + var res *Field + if tc.Fails { + defer func() { + if r := recover(); r != nil { + assert.Nil(tt, res) + } + }() + res = NewField(tc.SF).Value(tc.Value).Link(tc.Links).MustBuild() + } else { + res = NewField(tc.SF).Value(tc.Value).Link(tc.Links).MustBuild() + assert.Equal(tt, tc.Expected.Links, res.Links()) + assert.Equal(tt, tc.Expected.PType, res.Type()) + assert.Equal(tt, tc.Expected.Value, res.Value()) + } + }) + } +} + +func TestNewFieldUnsafe(t *testing.T) { + p := NewFieldUnsafe().Build() + assert.NotNil(t, p) +} + +func TestFieldUnsafeBuilder_Build(t *testing.T) { + l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + testCases := []struct { + Name string + Links *Links + Value *Value + Type ValueType + Field id.PropertySchemaFieldID + Expected struct { + PType ValueType + Field id.PropertySchemaFieldID + Links *Links + Value *Value + } + }{ + { + Name: "success", + Links: NewLinks([]*Link{l}), + Value: ValueTypeString.ValueFromUnsafe("vvv"), + Type: ValueTypeString, + Field: "a", + Expected: struct { + PType ValueType + Field id.PropertySchemaFieldID + Links *Links + Value *Value + }{ + PType: ValueTypeString, + Field: "a", + Links: NewLinks([]*Link{l}), + Value: ValueTypeString.ValueFromUnsafe("vvv"), + }, + }, + { + Name: "nil value", + Links: NewLinks([]*Link{l}), + Value: nil, + Type: ValueTypeString, + Field: "a", + Expected: struct { + PType ValueType + Field id.PropertySchemaFieldID + Links *Links + Value *Value + }{ + PType: ValueTypeString, + Field: "a", + Links: NewLinks([]*Link{l}), + Value: nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := NewFieldUnsafe().ValueUnsafe(tc.Value).LinksUnsafe(tc.Links).TypeUnsafe(tc.Type).FieldUnsafe(tc.Field).Build() + assert.Equal(tt, tc.Expected.Links, res.Links()) + assert.Equal(tt, tc.Expected.PType, res.Type()) + assert.Equal(tt, tc.Expected.Value, res.Value()) + }) + } +} diff --git a/pkg/property/field_test.go b/pkg/property/field_test.go new file mode 100644 index 000000000..35d26480f --- /dev/null +++ b/pkg/property/field_test.go @@ -0,0 +1,116 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestField_ActualValue(t *testing.T) { + p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() + dsid := id.NewDatasetID() + dssid := id.NewDatasetSchemaID() + dssfid := id.NewDatasetSchemaFieldID() + l := NewLink(dsid, dssid, dssfid) + ls := NewLinks([]*Link{l}) + + testCases := []struct { + Name string + Field *Field + DS *dataset.Dataset + Expected *Value + }{ + { + Name: "nil links", + Field: NewField(p).Value(ValueTypeString.ValueFromUnsafe("vvv")).MustBuild(), + Expected: ValueTypeString.ValueFromUnsafe("vvv"), + }, + { + Name: "nil last link", + Field: NewField(p).Value(ValueTypeString.ValueFromUnsafe("vvv")).Link(&Links{}).MustBuild(), + Expected: nil, + }, + { + Name: "dataset value", + Field: NewField(p).Value(ValueTypeString.ValueFromUnsafe("vvv")).Link(ls).MustBuild(), + DS: dataset.New().ID(dsid).Schema(dssid).Fields([]*dataset.Field{dataset.NewField(dssfid, dataset.ValueFrom("xxx"), "")}).MustBuild(), + Expected: ValueTypeString.ValueFromUnsafe("xxx"), + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Field.ActualValue(tc.DS) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestField_CollectDatasets(t *testing.T) { + p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() + dsid := id.NewDatasetID() + dssid := id.NewDatasetSchemaID() + dssfid := id.NewDatasetSchemaFieldID() + l := NewLink(dsid, dssid, dssfid) + ls := NewLinks([]*Link{l}) + + testCases := []struct { + Name string + Field *Field + Expected []id.DatasetID + }{ + { + Name: "list of one datasets", + Field: NewField(p).Value(ValueTypeString.ValueFromUnsafe("vvv")).Link(ls).MustBuild(), + Expected: []id.DatasetID{dsid}, + }, + { + Name: "nil field", + Expected: nil, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Field.CollectDatasets() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestField_Clone(t *testing.T) { + p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() + l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + ls := NewLinks([]*Link{l}) + b := NewField(p).Value(ValueTypeString.ValueFromUnsafe("vvv")).Link(ls).MustBuild() + r := b.Clone() + assert.Equal(t, b, r) +} + +func TestField(t *testing.T) { + did := id.NewDatasetID() + dsid := id.NewDatasetSchemaID() + p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() + b := NewField(p).MustBuild() + assert.True(t, b.IsEmpty()) + l := NewLink(did, dsid, id.NewDatasetSchemaFieldID()) + ls := NewLinks([]*Link{l}) + b.Link(ls) + assert.True(t, b.IsDatasetLinked(dsid, did)) + b.Unlink() + assert.False(t, b.HasLinkedField()) +} + +func TestField_Update(t *testing.T) { + p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() + b := NewField(p).Value(ValueTypeString.ValueFromUnsafe("vvv")).MustBuild() + v := ValueTypeString.ValueFromUnsafe("xxx") + b.UpdateUnsafe(v) + assert.Equal(t, v, b.Value()) +} diff --git a/pkg/property/group.go b/pkg/property/group.go new file mode 100644 index 000000000..b07976461 --- /dev/null +++ b/pkg/property/group.go @@ -0,0 +1,286 @@ +package property + +import ( + "context" + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +// Group represents a group of property +type Group struct { + itemBase + fields []*Field +} + +// Group implements Item interface +var _ Item = &Group{} + +// ID returns id +func (g *Group) ID() id.PropertyItemID { + if g == nil { + return id.PropertyItemID{} + } + return g.itemBase.ID +} + +// IDRef returns a reference of id +func (g *Group) IDRef() *id.PropertyItemID { + if g == nil { + return nil + } + return g.itemBase.ID.Ref() +} + +// SchemaGroup returns id of schema group +func (g *Group) SchemaGroup() id.PropertySchemaFieldID { + if g == nil { + return id.PropertySchemaFieldID("") + } + return g.itemBase.SchemaGroup +} + +// SchemaGroupRef _ +func (g *Group) SchemaGroupRef() *id.PropertySchemaFieldID { + if g == nil { + return nil + } + return g.itemBase.SchemaGroup.Ref() +} + +// Schema _ +func (g *Group) Schema() id.PropertySchemaID { + if g == nil { + return id.PropertySchemaID{} + } + return g.itemBase.Schema +} + +// SchemaRef _ +func (g *Group) SchemaRef() *id.PropertySchemaID { + if g == nil { + return nil + } + return g.itemBase.Schema.Ref() +} + +// HasLinkedField _ +func (g *Group) HasLinkedField() bool { + if g == nil { + return false + } + for _, f := range g.fields { + if f.HasLinkedField() { + return true + } + } + return false +} + +// CollectDatasets _ +func (g *Group) CollectDatasets() []id.DatasetID { + if g == nil { + return nil + } + res := []id.DatasetID{} + + for _, f := range g.fields { + res = append(res, f.CollectDatasets()...) + } + + return res +} + +// FieldsByLinkedDataset _ +func (g *Group) FieldsByLinkedDataset(s id.DatasetSchemaID, i id.DatasetID) []*Field { + if g == nil { + return nil + } + res := []*Field{} + for _, f := range g.fields { + if f.Links().IsDatasetLinked(s, i) { + res = append(res, f) + } + } + return res +} + +// IsDatasetLinked _ +func (g *Group) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { + if g == nil { + return false + } + for _, f := range g.fields { + if f.IsDatasetLinked(s, i) { + return true + } + } + return false +} + +// IsEmpty _ +func (g *Group) IsEmpty() bool { + if g != nil { + for _, f := range g.fields { + if !f.IsEmpty() { + return false + } + } + } + return true +} + +// Prune _ +func (g *Group) Prune() { + if g == nil { + return + } + for _, f := range g.fields { + if f.IsEmpty() { + g.RemoveField(f.Field()) + } + } +} + +// TODO: group migration +func (g *Group) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset.Loader) { + if g == nil || dl == nil { + return + } + + g.itemBase.Schema = newSchema.ID() + + for _, f := range g.fields { + if !f.MigrateSchema(ctx, newSchema, dl) { + g.RemoveField(f.Field()) + } + } + + g.Prune() +} + +// GetOrCreateField _ +func (g *Group) GetOrCreateField(ps *Schema, fid id.PropertySchemaFieldID) (*Field, bool) { + if g == nil || ps == nil || g.Schema() != ps.ID() { + return nil, false + } + psg := ps.Group(g.SchemaGroup()) + if psg == nil { + return nil, false + } + + psf := psg.Field(fid) + if psf == nil { + return nil, false + } + + psfid := psf.ID() + field := g.Field(psfid) + if field != nil { + return field, false + } + + // if the field does not exist, create it here + field, _ = NewField(psf).Build() + if field == nil { + return nil, false + } + + g.fields = append(g.fields, field) + return field, true +} + +// RemoveField _ +func (g *Group) RemoveField(fid id.PropertySchemaFieldID) { + if g == nil { + return + } + for i, f := range g.fields { + if f.Field() == fid { + g.fields = append(g.fields[:i], g.fields[i+1:]...) + return + } + } +} + +// FieldIDs _ +func (g *Group) FieldIDs() []id.PropertySchemaFieldID { + if g == nil { + return nil + } + fields := make([]id.PropertySchemaFieldID, 0, len(g.fields)) + for _, f := range g.fields { + fields = append(fields, f.Field()) + } + return fields +} + +// Fields returns a slice of fields +func (g *Group) Fields() []*Field { + if g == nil { + return nil + } + return append([]*Field{}, g.fields...) +} + +// Field returns a field whose id is specified +func (g *Group) Field(fid id.PropertySchemaFieldID) *Field { + if g == nil { + return nil + } + for _, f := range g.fields { + if f.Field() == fid { + return f + } + } + return nil +} + +// MigrateDataset _ +func (g *Group) MigrateDataset(q DatasetMigrationParam) { + if g == nil { + return + } + for _, f := range g.fields { + f.MigrateDataset(q) + } +} + +func (g *Group) UpdateNameFieldValue(ps *Schema, value *Value) error { + if g == nil || ps == nil || g.Schema() != ps.ID() { + return nil + } + if psg := ps.GroupByPointer(NewPointer(&g.itemBase.SchemaGroup, nil, nil)); psg != nil { + if representativeField := psg.RepresentativeFieldID(); representativeField != nil { + if f, _ := g.GetOrCreateField(ps, *representativeField); f != nil { + return f.Update(value, psg.Field(*representativeField)) + } + } + } + return ErrInvalidPropertyField +} + +func (p *Group) ValidateSchema(ps *SchemaGroup) error { + if p == nil { + return nil + } + if ps == nil { + return errors.New("invalid schema") + } + if p.Schema() != ps.Schema() { + return errors.New("invalid schema id") + } + if p.SchemaGroup() != ps.ID() { + return errors.New("invalid schema group id") + } + + for _, i := range p.fields { + if err := i.ValidateSchema(ps.Field(i.Field())); err != nil { + return fmt.Errorf("%s: %w", i.Field(), err) + } + } + + return nil +} diff --git a/pkg/property/group_builder.go b/pkg/property/group_builder.go new file mode 100644 index 000000000..f7c4a63e9 --- /dev/null +++ b/pkg/property/group_builder.go @@ -0,0 +1,83 @@ +package property + +import "github.com/reearth/reearth-backend/pkg/id" + +// GroupBuilder _ +type GroupBuilder struct { + p *Group +} + +// NewGroup _ +func NewGroup() *GroupBuilder { + return &GroupBuilder{ + p: &Group{}, + } +} + +// InitGroupFrom _ +func InitGroupFrom(g *SchemaGroup) *Group { + if g == nil { + return nil + } + g2, _ := NewGroup().NewID().Schema(g.Schema(), g.ID()).Build() + return g2 +} + +// Build _ +func (b *GroupBuilder) Build() (*Group, error) { + if id.ID(b.p.itemBase.ID).IsNil() { + return nil, id.ErrInvalidID + } + return b.p, nil +} + +// MustBuild _ +func (b *GroupBuilder) MustBuild() *Group { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *GroupBuilder) base(base itemBase) *GroupBuilder { + b.p.itemBase = base + return b +} + +// ID _ +func (b *GroupBuilder) ID(id id.PropertyItemID) *GroupBuilder { + b.p.itemBase.ID = id + return b +} + +// NewID _ +func (b *GroupBuilder) NewID() *GroupBuilder { + b.p.itemBase.ID = id.NewPropertyItemID() + return b +} + +// Schema _ +func (b *GroupBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaFieldID) *GroupBuilder { + b.p.itemBase.Schema = s + b.p.itemBase.SchemaGroup = g + return b +} + +// Fields _ +func (b *GroupBuilder) Fields(fields []*Field) *GroupBuilder { + var newFields []*Field + ids := map[id.PropertySchemaFieldID]struct{}{} + for _, f := range fields { + if f == nil { + continue + } + if _, ok := ids[f.Field()]; ok { + continue + } + ids[f.Field()] = struct{}{} + newFields = append(newFields, f) + } + b.p.fields = newFields + return b +} diff --git a/pkg/property/group_builder_test.go b/pkg/property/group_builder_test.go new file mode 100644 index 000000000..fb7ae6615 --- /dev/null +++ b/pkg/property/group_builder_test.go @@ -0,0 +1,153 @@ +package property + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestGroupBuilder_Build(t *testing.T) { + iid := id.NewPropertyItemID() + sid := id.MustPropertySchemaID("xx/aa") + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + f := NewField(sf).Value(v).MustBuild() + testCases := []struct { + Name string + Id id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID + Fields []*Field + Expected struct { + Id id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID + Fields []*Field + } + Err error + }{ + { + Name: "fail invalid id", + Err: id.ErrInvalidID, + }, + { + Name: "success", + Id: iid, + Schema: sid, + SchemaGroup: "a", + Fields: []*Field{f}, + Expected: struct { + Id id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID + Fields []*Field + }{ + Id: iid, + Schema: sid, + SchemaGroup: "a", + Fields: []*Field{f}, + }, + Err: nil, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := NewGroup().ID(tc.Id).Fields(tc.Fields).Schema(tc.Schema, tc.SchemaGroup).Build() + if err == nil { + assert.Equal(tt, tc.Expected.Fields, res.Fields()) + assert.Equal(tt, tc.Expected.Schema, res.Schema()) + assert.Equal(tt, tc.Expected.SchemaGroup, res.SchemaGroup()) + assert.Equal(tt, tc.Expected.Id, res.ID()) + } else { + assert.True(tt, errors.As(tc.Err, &err)) + } + }) + } +} + +func TestGroupBuilder_MustBuild(t *testing.T) { + iid := id.NewPropertyItemID() + sid := id.MustPropertySchemaID("xx/aa") + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + f := NewField(sf).Value(v).MustBuild() + testCases := []struct { + Name string + Fail bool + Id id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID + Fields []*Field + Expected struct { + Id id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID + Fields []*Field + } + }{ + { + Name: "fail invalid id", + Fail: true, + }, + { + Name: "success", + Id: iid, + Schema: sid, + SchemaGroup: "a", + Fields: []*Field{f}, + Expected: struct { + Id id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID + Fields []*Field + }{ + Id: iid, + Schema: sid, + SchemaGroup: "a", + Fields: []*Field{f}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + var res *Group + if tc.Fail { + defer func() { + if r := recover(); r != nil { + assert.Nil(tt, res) + } + }() + res = NewGroup().ID(tc.Id).Fields(tc.Fields).Schema(tc.Schema, tc.SchemaGroup).MustBuild() + } else { + res = NewGroup().ID(tc.Id).Fields(tc.Fields).Schema(tc.Schema, tc.SchemaGroup).MustBuild() + assert.Equal(tt, tc.Expected.Fields, res.Fields()) + assert.Equal(tt, tc.Expected.Schema, res.Schema()) + assert.Equal(tt, tc.Expected.SchemaGroup, res.SchemaGroup()) + assert.Equal(tt, tc.Expected.Id, res.ID()) + } + + }) + } +} + +func TestGroupBuilder_NewID(t *testing.T) { + g := NewGroup().NewID().MustBuild() + assert.False(t, g.ID().IsNil()) +} + +func TestGroupBuilder_InitGroupFrom(t *testing.T) { + var sg *SchemaGroup + assert.Nil(t, InitGroupFrom(sg)) + sg = NewSchemaGroup().ID("a").Schema(id.MustPropertySchemaID("xx/aa")).MustBuild() + g := InitGroupFrom(sg) + assert.Equal(t, sg.ID(), g.SchemaGroup()) + assert.Equal(t, sg.Schema(), g.Schema()) +} diff --git a/pkg/property/group_list.go b/pkg/property/group_list.go new file mode 100644 index 000000000..c8b7e4fe8 --- /dev/null +++ b/pkg/property/group_list.go @@ -0,0 +1,394 @@ +package property + +import ( + "context" + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +// GroupList _ +type GroupList struct { + itemBase + groups []*Group +} + +// List implements Item interface +var _ Item = &GroupList{} + +// ID returns id +func (g *GroupList) ID() id.PropertyItemID { + if g == nil { + return id.PropertyItemID{} + } + return g.itemBase.ID +} + +// IDRef returns a reference of id +func (g *GroupList) IDRef() *id.PropertyItemID { + if g == nil { + return nil + } + return g.itemBase.ID.Ref() +} + +// SchemaGroup returns id of schema group +func (g *GroupList) SchemaGroup() id.PropertySchemaFieldID { + if g == nil { + return id.PropertySchemaFieldID("") + } + return g.itemBase.SchemaGroup +} + +// SchemaGroupRef _ +func (g *GroupList) SchemaGroupRef() *id.PropertySchemaFieldID { + if g == nil { + return nil + } + return g.itemBase.SchemaGroup.Ref() +} + +// Schema _ +func (g *GroupList) Schema() id.PropertySchemaID { + if g == nil { + return id.PropertySchemaID{} + } + return g.itemBase.Schema +} + +// SchemaRef _ +func (g *GroupList) SchemaRef() *id.PropertySchemaID { + if g == nil { + return nil + } + return g.itemBase.Schema.Ref() +} + +// HasLinkedField _ +func (g *GroupList) HasLinkedField() bool { + if g == nil { + return false + } + for _, f := range g.groups { + if f.HasLinkedField() { + return true + } + } + return false +} + +// CollectDatasets _ +func (g *GroupList) CollectDatasets() []id.DatasetID { + if g == nil { + return nil + } + res := []id.DatasetID{} + + for _, f := range g.groups { + res = append(res, f.CollectDatasets()...) + } + + return res +} + +// FieldsByLinkedDataset _ +func (g *GroupList) FieldsByLinkedDataset(s id.DatasetSchemaID, i id.DatasetID) []*Field { + if g == nil { + return nil + } + res := []*Field{} + for _, g := range g.groups { + res = append(res, g.FieldsByLinkedDataset(s, i)...) + } + return res +} + +// IsDatasetLinked _ +func (g *GroupList) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { + if g == nil { + return false + } + for _, d := range g.groups { + if d.IsDatasetLinked(s, i) { + return true + } + } + return false +} + +// IsEmpty _ +func (g *GroupList) IsEmpty() bool { + return g != nil && (g.groups == nil || len(g.groups) == 0) +} + +// Prune _ +func (g *GroupList) Prune() { + if g == nil { + return + } + for _, f := range g.groups { + f.Prune() + } +} + +// MigrateSchema _ +func (g *GroupList) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset.Loader) { + if g == nil || dl == nil { + return + } + + g.itemBase.Schema = newSchema.ID() + + for _, f := range g.groups { + f.MigrateSchema(ctx, newSchema, dl) + } + + g.Prune() +} + +// Groups returns a slice of groups +func (g *GroupList) Groups() []*Group { + if g == nil { + return nil + } + return append([]*Group{}, g.groups...) +} + +// GetGroup returns a group whose id is specified +func (g *GroupList) GetGroup(gid id.PropertyItemID) *Group { + if g == nil { + return nil + } + for _, f := range g.groups { + if f.ID() == gid { + return f + } + } + return nil +} + +// GroupAt returns a group whose index is specified +func (g *GroupList) GroupAt(i int) *Group { + if g == nil || i < 0 || i > len(g.groups)-1 { + return nil + } + return g.groups[i] +} + +// Has _ +func (g *GroupList) Has(i id.PropertyItemID) bool { + if g == nil { + return false + } + for _, gg := range g.groups { + if gg.ID() == i { + return true + } + } + return false +} + +// Count _ +func (g *GroupList) Count() int { + if g == nil { + return 0 + } + return len(g.groups) +} + +// Add _ +func (g *GroupList) Add(gg *Group, index int) { + if g == nil || g.Has(gg.ID()) { + return + } + + le := len(g.groups) + if index < 0 || le <= index { + g.groups = append(g.groups, gg) + } else { + g.groups = append(g.groups[:index], append([]*Group{gg}, g.groups[index:]...)...) + } +} + +// AddOrMove _ +func (g *GroupList) AddOrMove(gg *Group, index int) { + if g == nil { + return + } + + le := len(g.groups) + if index < 0 || le <= index { + index = le + } + + gid := gg.ID() + if g.Has(gid) { + g.Move(gid, index) + return + } + g.groups = append(g.groups[:index], append([]*Group{gg}, g.groups[index:]...)...) +} + +// Move _ +func (g *GroupList) Move(id id.PropertyItemID, toIndex int) { + if g == nil { + return + } + + for fromIndex, gg := range g.groups { + if gg.ID() == id { + g.MoveAt(fromIndex, toIndex) + return + } + } +} + +// MoveAt _ +func (g *GroupList) MoveAt(fromIndex int, toIndex int) { + if g == nil { + return + } + + le := len(g.groups) + if fromIndex < 0 || le <= fromIndex { + return + } + if toIndex < 0 || le <= toIndex { + toIndex = le - 1 + } + if fromIndex == toIndex { + return + } + + f := g.groups[fromIndex] + g.groups = append(g.groups[:fromIndex], g.groups[fromIndex+1:]...) + newSlice := make([]*Group, toIndex+1) + copy(newSlice, g.groups[:toIndex]) + newSlice[toIndex] = f + g.groups = append(newSlice, g.groups[toIndex:]...) +} + +// Remove _ +func (g *GroupList) Remove(id id.PropertyItemID) bool { + if g == nil { + return false + } + + for index, gg := range g.groups { + if gg.ID() == id { + g.RemoveAt(index) + return true + } + } + + return false +} + +// RemoveAt _ +func (g *GroupList) RemoveAt(index int) { + if g == nil { + return + } + + le := len(g.groups) + if index < 0 || le <= index { + return + } + var groups []*Group + if index == le { + groups = []*Group{} + } else { + groups = g.groups[index+1:] + } + g.groups = append(g.groups[:index], groups...) +} + +// Empty _ +func (g *GroupList) Empty() { + if g == nil { + return + } + + g.groups = []*Group{} +} + +// GetOrCreateField _ +func (g *GroupList) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, bool) { + if g == nil || ptr == nil || ps == nil || ps.ID() != g.Schema() { + return nil, false + } + psg := ps.Group(g.SchemaGroup()) + if psg == nil { + return nil, false + } + + item, fid, ok := ptr.FieldByItem() + if !ok { + return nil, false + } + + i := g.GetGroup(item) + if i == nil { + return nil, false + } + + return i.GetOrCreateField(ps, fid) +} + +// CreateAndAddListItem _ +func (g *GroupList) CreateAndAddListItem(ps *Schema, index *int) *Group { + if g == nil || ps == nil || g.Schema() != ps.ID() { + return nil + } + psg := ps.Group(g.SchemaGroup()) + if psg == nil { + return nil + } + + index2 := -1 + if index != nil { + index2 = *index + } + + if ni := InitGroupFrom(psg); ni != nil { + g.Add(ni, index2) + return ni + } + + return nil +} + +// MigrateDataset _ +func (g *GroupList) MigrateDataset(q DatasetMigrationParam) { + if g == nil { + return + } + for _, f := range g.groups { + f.MigrateDataset(q) + } +} + +func (p *GroupList) ValidateSchema(ps *SchemaGroup) error { + if p == nil { + return nil + } + if ps == nil { + return errors.New("invalid schema") + } + if p.Schema() != ps.Schema() { + return errors.New("invalid schema id") + } + if p.SchemaGroup() != ps.ID() { + return errors.New("invalid schema group id") + } + + for _, i := range p.groups { + if err := i.ValidateSchema(ps); err != nil { + return fmt.Errorf("%s: %w", i.ID(), err) + } + } + + return nil +} diff --git a/pkg/property/group_list_builder.go b/pkg/property/group_list_builder.go new file mode 100644 index 000000000..0f6d55b2e --- /dev/null +++ b/pkg/property/group_list_builder.go @@ -0,0 +1,83 @@ +package property + +import "github.com/reearth/reearth-backend/pkg/id" + +// GroupListBuilder _ +type GroupListBuilder struct { + p *GroupList +} + +// NewGroupList _ +func NewGroupList() *GroupListBuilder { + return &GroupListBuilder{ + p: &GroupList{}, + } +} + +// InitGroupListFrom _ +func InitGroupListFrom(g *SchemaGroup) *GroupList { + if g == nil || !g.IsList() { + return nil + } + g2, _ := NewGroupList().NewID().Schema(g.Schema(), g.ID()).Build() + return g2 +} + +// Build _ +func (b *GroupListBuilder) Build() (*GroupList, error) { + if id.ID(b.p.itemBase.ID).IsNil() { + return nil, id.ErrInvalidID + } + return b.p, nil +} + +// MustBuild _ +func (b *GroupListBuilder) MustBuild() *GroupList { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *GroupListBuilder) base(base itemBase) *GroupListBuilder { + b.p.itemBase = base + return b +} + +// ID _ +func (b *GroupListBuilder) ID(id id.PropertyItemID) *GroupListBuilder { + b.p.itemBase.ID = id + return b +} + +// NewID _ +func (b *GroupListBuilder) NewID() *GroupListBuilder { + b.p.itemBase.ID = id.NewPropertyItemID() + return b +} + +// Schema _ +func (b *GroupListBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaFieldID) *GroupListBuilder { + b.p.itemBase.Schema = s + b.p.itemBase.SchemaGroup = g + return b +} + +// Groups _ +func (b *GroupListBuilder) Groups(fields []*Group) *GroupListBuilder { + newGroups := []*Group{} + ids := map[id.PropertyItemID]struct{}{} + for _, f := range fields { + if f == nil { + continue + } + if _, ok := ids[f.ID()]; ok { + continue + } + ids[f.ID()] = struct{}{} + newGroups = append(newGroups, f) + } + b.p.groups = newGroups + return b +} diff --git a/pkg/property/group_list_builder_test.go b/pkg/property/group_list_builder_test.go new file mode 100644 index 000000000..95adc1407 --- /dev/null +++ b/pkg/property/group_list_builder_test.go @@ -0,0 +1,166 @@ +package property + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestGroupListBuilder_Build(t *testing.T) { + pid := id.NewPropertyItemID() + scid := id.MustPropertySchemaID("xx/aa") + groups := []*Group{NewGroup().ID(pid).MustBuild()} + testCases := []struct { + Name string + Id id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID + Groups []*Group + Expected struct { + Id id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID + Groups []*Group + } + Err error + }{ + { + Name: "success", + Id: pid, + Schema: scid, + SchemaGroup: "aa", + Groups: groups, + Expected: struct { + Id id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID + Groups []*Group + }{ + Id: pid, + Schema: scid, + SchemaGroup: "aa", + Groups: groups, + }, + }, + { + Name: "fail invalid id", + Err: id.ErrInvalidID, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := NewGroupList().ID(tc.Id).Schema(tc.Schema, tc.SchemaGroup).Groups(tc.Groups).Build() + if err == nil { + assert.Equal(tt, tc.Expected.Id, res.ID()) + assert.Equal(tt, tc.Expected.SchemaGroup, res.SchemaGroup()) + assert.Equal(tt, tc.Expected.Schema, res.Schema()) + assert.Equal(tt, tc.Expected.Groups, res.Groups()) + } else { + assert.True(tt, errors.As(tc.Err, &err)) + } + }) + } +} + +func TestGroupListBuilder_NewID(t *testing.T) { + b := NewGroupList().NewID().MustBuild() + assert.NotNil(t, b.ID()) +} + +func TestGroupListBuilder_MustBuild(t *testing.T) { + pid := id.NewPropertyItemID() + scid := id.MustPropertySchemaID("xx/aa") + groups := []*Group{NewGroup().ID(pid).MustBuild()} + testCases := []struct { + Name string + Fails bool + Id id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID + Groups []*Group + Expected struct { + Id id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID + Groups []*Group + } + }{ + { + Name: "success", + Id: pid, + Schema: scid, + SchemaGroup: "aa", + Groups: groups, + Expected: struct { + Id id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID + Groups []*Group + }{ + Id: pid, + Schema: scid, + SchemaGroup: "aa", + Groups: groups, + }, + }, + { + Name: "fail invalid id", + Fails: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + var res *GroupList + if tc.Fails { + defer func() { + if r := recover(); r != nil { + assert.Nil(tt, res) + } + }() + res = NewGroupList().ID(tc.Id).Schema(tc.Schema, tc.SchemaGroup).Groups(tc.Groups).MustBuild() + } else { + res = NewGroupList().ID(tc.Id).Schema(tc.Schema, tc.SchemaGroup).Groups(tc.Groups).MustBuild() + assert.Equal(tt, tc.Expected.Id, res.ID()) + assert.Equal(tt, tc.Expected.SchemaGroup, res.SchemaGroup()) + assert.Equal(tt, tc.Expected.Schema, res.Schema()) + assert.Equal(tt, tc.Expected.Groups, res.Groups()) + } + + }) + } +} + +func TestInitGroupListFrom(t *testing.T) { + testCases := []struct { + Name string + SchemaGroup *SchemaGroup + ExpectedSG id.PropertySchemaFieldID + ExpectedSchema id.PropertySchemaID + }{ + { + Name: "nil schema group", + }, + { + Name: "success", + SchemaGroup: NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).MustBuild(), + ExpectedSG: "aa", + ExpectedSchema: id.MustPropertySchemaID("xx/aa"), + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := InitGroupFrom(tc.SchemaGroup) + assert.Equal(tt, tc.ExpectedSG, res.SchemaGroup()) + assert.Equal(tt, tc.ExpectedSchema, res.Schema()) + }) + } +} diff --git a/pkg/property/group_list_test.go b/pkg/property/group_list_test.go new file mode 100644 index 000000000..6579e20df --- /dev/null +++ b/pkg/property/group_list_test.go @@ -0,0 +1,742 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestGroupList_IDRef(t *testing.T) { + var b *GroupList + assert.Nil(t, b.IDRef()) + b = NewGroupList().NewID().MustBuild() + assert.NotNil(t, b.IDRef()) +} + +func TestGroupList_SchemaRef(t *testing.T) { + testCases := []struct { + Name string + GL *GroupList + ExpectedSG *id.PropertySchemaFieldID + ExpectedSchema *id.PropertySchemaID + }{ + { + Name: "nil group list", + }, + { + Name: "success", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("aa/xx"), id.PropertySchemaFieldID("xx")).MustBuild(), + ExpectedSG: id.PropertySchemaFieldID("xx").Ref(), + ExpectedSchema: id.MustPropertySchemaID("aa/xx").Ref(), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.ExpectedSG, tc.GL.SchemaGroupRef()) + assert.Equal(tt, tc.ExpectedSchema, tc.GL.SchemaRef()) + }) + } +} + +func TestGroupList_HasLinkedField(t *testing.T) { + pid := id.NewPropertyItemID() + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + dsid := id.NewDatasetID() + dssid := id.NewDatasetSchemaID() + f := NewField(sf).Value(v).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() + groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} + groups2 := []*Group{NewGroup().ID(pid).MustBuild()} + testCases := []struct { + Name string + GL *GroupList + Expected bool + }{ + { + Name: "nil group list", + }, + { + Name: "has linked field", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups).MustBuild(), + Expected: true, + }, + { + Name: "no linked field", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups2).MustBuild(), + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.GL.HasLinkedField()) + assert.Equal(tt, tc.Expected, tc.GL.IsDatasetLinked(dssid, dsid)) + }) + } +} + +func TestGroupList_CollectDatasets(t *testing.T) { + pid := id.NewPropertyItemID() + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + dsid := id.NewDatasetID() + dssid := id.NewDatasetSchemaID() + f := NewField(sf).Value(v).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() + groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} + groups2 := []*Group{NewGroup().ID(pid).MustBuild()} + testCases := []struct { + Name string + GL *GroupList + Expected []id.DatasetID + }{ + { + Name: "nil group list", + }, + { + Name: "one dataset", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups).MustBuild(), + Expected: []id.DatasetID{dsid}, + }, + { + Name: "empty list", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups2).MustBuild(), + Expected: []id.DatasetID{}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.GL.CollectDatasets()) + }) + } +} + +func TestGroupList_FieldsByLinkedDataset(t *testing.T) { + pid := id.NewPropertyItemID() + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + dsid := id.NewDatasetID() + dssid := id.NewDatasetSchemaID() + f := NewField(sf).Value(v).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() + groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} + groups2 := []*Group{NewGroup().ID(pid).MustBuild()} + testCases := []struct { + Name string + GL *GroupList + Expected []*Field + }{ + { + Name: "nil group list", + }, + { + Name: "one field list", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups).MustBuild(), + Expected: []*Field{f}, + }, + { + Name: "empty list", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups2).MustBuild(), + Expected: []*Field{}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.GL.FieldsByLinkedDataset(dssid, dsid)) + }) + } +} + +func TestGroupList_IsEmpty(t *testing.T) { + pid := id.NewPropertyItemID() + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + dsid := id.NewDatasetID() + dssid := id.NewDatasetSchemaID() + f := NewField(sf).Value(v).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() + groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} + testCases := []struct { + Name string + GL *GroupList + Expected bool + }{ + { + Name: "nil group list", + }, + { + Name: "is empty", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").MustBuild(), + Expected: true, + }, + { + Name: "is not empty", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups).MustBuild(), + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.GL.IsEmpty()) + }) + } +} + +func TestGroupList_Prune(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + f := NewField(sf).Value(v).MustBuild() + f2 := NewField(sf).MustBuild() + pid := id.NewPropertyItemID() + groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f, f2}).MustBuild()} + pruned := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} + testCases := []struct { + Name string + GL *GroupList + Expected []*Group + }{ + { + Name: "nil group list", + }, + { + Name: "pruned list", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups).MustBuild(), + Expected: pruned, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.GL.Prune() + assert.Equal(tt, tc.Expected, tc.GL.Groups()) + }) + } +} + +func TestGroupList_GetGroup(t *testing.T) { + pid := id.NewPropertyItemID() + g := NewGroup().ID(pid).MustBuild() + testCases := []struct { + Name string + Input id.PropertyItemID + GL *GroupList + Expected *Group + }{ + { + Name: "nil group list", + }, + { + Name: "found", + Input: pid, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g}).MustBuild(), + Expected: g, + }, + { + Name: "not found", + Input: id.NewPropertyItemID(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g}).MustBuild(), + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.GL.GetGroup(tc.Input)) + }) + } +} + +func TestGroupList_GroupAt(t *testing.T) { + g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + testCases := []struct { + Name string + Index int + GL *GroupList + Expected *Group + }{ + { + Name: "nil group list", + }, + { + Name: "index < 0", + Index: -1, + }, + { + Name: "index > len(g)-1", + Index: 4, + }, + { + Name: "found", + Index: 2, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: g3, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.GL.GroupAt(tc.Index)) + }) + } +} + +func TestGroupList_Has(t *testing.T) { + g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + testCases := []struct { + Name string + Input id.PropertyItemID + GL *GroupList + Expected bool + }{ + { + Name: "nil group list", + }, + { + Name: "found", + Input: g2.ID(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: true, + }, + { + Name: "not found", + Input: g3.ID(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g4}).MustBuild(), + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.GL.Has(tc.Input)) + }) + } +} + +func TestGroupList_Count(t *testing.T) { + g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + testCases := []struct { + Name string + GL *GroupList + Expected int + }{ + { + Name: "nil group list", + }, + { + Name: "not found", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: 4, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.GL.Count()) + }) + } +} + +func TestGroupList_Add(t *testing.T) { + g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + testCases := []struct { + Name string + GL *GroupList + Gr *Group + Index int + Expected struct { + Gr *Group + Index int + } + }{ + { + Name: "nil group list", + }, + { + Name: "index < 0", + Index: -1, + Gr: g2, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + Expected: struct { + Gr *Group + Index int + }{ + Gr: g2, + Index: 3, + }, + }, + { + Name: "len(g) > index > 0 ", + Index: 2, + Gr: g2, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + Expected: struct { + Gr *Group + Index int + }{ + Gr: g2, + Index: 2, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.GL.Add(tc.Gr, tc.Index) + assert.Equal(tt, tc.Expected.Gr, tc.GL.GroupAt(tc.Expected.Index)) + }) + } +} + +func TestGroupList_AddOrMove(t *testing.T) { + g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + testCases := []struct { + Name string + GL *GroupList + Gr *Group + Index int + Expected struct { + Gr *Group + Index int + } + }{ + { + Name: "nil group list", + }, + { + Name: "index < 0", + Index: -1, + Gr: g2, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + Expected: struct { + Gr *Group + Index int + }{ + Gr: g2, + Index: 3, + }, + }, + { + Name: "len(g) > index > 0 ", + Index: 2, + Gr: g2, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + Expected: struct { + Gr *Group + Index int + }{ + Gr: g2, + Index: 2, + }, + }, + { + Name: "move group", + Index: 2, + Gr: g1, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + Expected: struct { + Gr *Group + Index int + }{ + Gr: g1, + Index: 2, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.GL.AddOrMove(tc.Gr, tc.Index) + assert.Equal(tt, tc.Expected.Gr, tc.GL.GroupAt(tc.Expected.Index)) + }) + } +} + +func TestGroupList_Move(t *testing.T) { + g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + testCases := []struct { + Name string + GL *GroupList + Id id.PropertyItemID + ToIndex int + Expected struct { + Id id.PropertyItemID + Index int + } + }{ + { + Name: "nil group list", + }, + { + Name: "success", + Id: g1.ID(), + ToIndex: 2, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: struct { + Id id.PropertyItemID + Index int + }{Id: g1.ID(), Index: 2}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.GL.Move(tc.Id, tc.ToIndex) + assert.Equal(tt, tc.Expected.Id, tc.GL.GroupAt(tc.Expected.Index).ID()) + }) + } +} + +func TestGroupList_MoveAt(t *testing.T) { + g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + testCases := []struct { + Name string + GL *GroupList + FromIndex, ToIndex int + Expected []*Group + }{ + { + Name: "nil group list", + }, + { + Name: "from = to", + FromIndex: 2, + ToIndex: 2, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: []*Group{g1, g2, g3, g4}, + }, + { + Name: "from < 0", + FromIndex: -1, + ToIndex: 2, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: []*Group{g1, g2, g3, g4}, + }, + { + Name: "success move", + FromIndex: 0, + ToIndex: 2, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: []*Group{g2, g3, g1, g4}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.GL.MoveAt(tc.FromIndex, tc.ToIndex) + assert.Equal(tt, tc.Expected, tc.GL.Groups()) + }) + } +} + +func TestGroupList_RemoveAt(t *testing.T) { + g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + testCases := []struct { + Name string + GL *GroupList + Index int + Expected []*Group + }{ + { + Name: "nil group list", + }, + { + Name: "success", + Index: 1, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: []*Group{g1, g3, g4}, + }, + { + Name: "index < 0", + Index: -1, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: []*Group{g1, g2, g3, g4}, + }, + { + Name: "index > length", + Index: 5, + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: []*Group{g1, g2, g3, g4}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.GL.RemoveAt(tc.Index) + assert.Equal(tt, tc.Expected, tc.GL.Groups()) + }) + } +} +func TestGroupList_Remove(t *testing.T) { + g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + testCases := []struct { + Name string + GL *GroupList + Input id.PropertyItemID + Expected bool + }{ + { + Name: "nil group list", + }, + { + Name: "success", + Input: g1.ID(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + Expected: true, + }, + { + Name: "not found", + Input: g4.ID(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3}).MustBuild(), + Expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.GL.Remove(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestGroupList_GetOrCreateField(t *testing.T) { + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + g := NewGroup().ID(id.NewPropertyItemID()).Schema(sg.Schema(), sf.ID()).MustBuild() + testCases := []struct { + Name string + GL *GroupList + Schema *Schema + Ptr *Pointer + Expected struct { + Ok bool + Field *Field + } + }{ + { + Name: "success", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").Groups([]*Group{g}).MustBuild(), + Schema: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), + Expected: struct { + Ok bool + Field *Field + }{ + Ok: true, + Field: NewField(sf).MustBuild(), + }, + }, + { + Name: "can't get a group", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), + Schema: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), + }, + { + Name: "FieldByItem not ok: sg!=nil", + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").Groups([]*Group{g}).MustBuild(), + Schema: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Ptr: NewPointer(sg.IDRef(), g.IDRef(), sf.ID().Ref()), + }, + { + Name: "psg == nil", + GL: NewGroupList().NewID().Groups([]*Group{g}).MustBuild(), + Schema: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, ok := tc.GL.GetOrCreateField(tc.Schema, tc.Ptr) + assert.Equal(tt, tc.Expected.Field, res) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestGroupList_CreateAndAddListItem(t *testing.T) { + getIntRef := func(i int) *int { return &i } + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + g := NewGroup().ID(id.NewPropertyItemID()).Schema(sg.Schema(), sf.ID()).MustBuild() + testCases := []struct { + Name string + GL *GroupList + Schema *Schema + Index *int + Expected *Group + }{ + { + Name: "success", + Index: getIntRef(0), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), + Schema: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Expected: g, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.GL.CreateAndAddListItem(tc.Schema, tc.Index) + assert.Equal(tt, tc.Expected.Schema(), res.Schema()) + assert.Equal(tt, tc.Expected.Fields(), res.Fields()) + assert.Equal(tt, tc.Expected.SchemaGroup(), res.SchemaGroup()) + }) + } +} diff --git a/pkg/property/group_test.go b/pkg/property/group_test.go new file mode 100644 index 000000000..8a041f333 --- /dev/null +++ b/pkg/property/group_test.go @@ -0,0 +1,471 @@ +package property + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestGroup_IDRef(t *testing.T) { + gid := id.NewPropertyItemID() + var g *Group + assert.Nil(t, g.IDRef()) + g = NewGroup().ID(gid).MustBuild() + assert.Equal(t, gid.Ref(), g.IDRef()) +} + +func TestGroup_SchemaGroup(t *testing.T) { + var g *Group + assert.Nil(t, g.SchemaGroupRef()) + assert.Equal(t, id.PropertySchemaFieldID(""), g.SchemaGroup()) + pfid := id.PropertySchemaFieldID("aa") + g = NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), pfid).MustBuild() + assert.Equal(t, pfid, g.SchemaGroup()) + assert.Equal(t, pfid.Ref(), g.SchemaGroupRef()) +} + +func TestGroup_HasLinkedField(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + ls := NewLinks([]*Link{l}) + f := NewField(sf).Value(v).Link(ls).MustBuild() + f2 := NewField(sf).Value(v).MustBuild() + + testCases := []struct { + Name string + Group *Group + Expected bool + }{ + { + Name: "nil group", + Group: nil, + Expected: false, + }, + { + Name: "true", + Group: NewGroup().NewID().Fields([]*Field{f}).MustBuild(), + Expected: true, + }, + { + Name: "false", + Group: NewGroup().NewID().Fields([]*Field{f2}).MustBuild(), + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Group.HasLinkedField() + assert.Equal(tt, tc.Expected, res) + }) + } +} +func TestGroup_IsDatasetLinked(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + dsid := id.NewDatasetID() + dssid := id.NewDatasetSchemaID() + l := NewLink(dsid, dssid, id.NewDatasetSchemaFieldID()) + ls := NewLinks([]*Link{l}) + f := NewField(sf).Value(v).Link(ls).MustBuild() + f2 := NewField(sf).Value(v).MustBuild() + + testCases := []struct { + Name string + Group *Group + DatasetSchema id.DatasetSchemaID + Dataset id.DatasetID + Expected bool + }{ + { + Name: "nil group", + }, + { + Name: "true", + Group: NewGroup().NewID().Fields([]*Field{f}).MustBuild(), + Dataset: dsid, + DatasetSchema: dssid, + Expected: true, + }, + { + Name: "false", + Group: NewGroup().NewID().Fields([]*Field{f2}).MustBuild(), + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Group.IsDatasetLinked(tc.DatasetSchema, tc.Dataset) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestGroup_CollectDatasets(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + dsid := id.NewDatasetID() + l := NewLink(dsid, id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + ls := NewLinks([]*Link{l}) + f := NewField(sf).Value(v).Link(ls).MustBuild() + + testCases := []struct { + Name string + Group *Group + Expected []id.DatasetID + }{ + { + Name: "nil group", + Group: nil, + Expected: nil, + }, + { + Name: "normal case", + Group: NewGroup().NewID().Fields([]*Field{f}).MustBuild(), + Expected: []id.DatasetID{dsid}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Group.CollectDatasets() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestGroup_FieldsByLinkedDataset(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + dsid := id.NewDatasetID() + dssid := id.NewDatasetSchemaID() + l := NewLink(dsid, dssid, id.NewDatasetSchemaFieldID()) + ls := NewLinks([]*Link{l}) + f := NewField(sf).Value(v).Link(ls).MustBuild() + + testCases := []struct { + Name string + Group *Group + DatasetSchema id.DatasetSchemaID + DataSet id.DatasetID + Expected []*Field + }{ + { + Name: "nil group", + }, + { + Name: "normal case", + DataSet: dsid, + DatasetSchema: dssid, + Group: NewGroup().NewID().Fields([]*Field{f}).MustBuild(), + Expected: []*Field{f}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Group.FieldsByLinkedDataset(tc.DatasetSchema, tc.DataSet) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestGroup_IsEmpty(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + f := NewField(sf).Value(v).MustBuild() + f2 := NewField(sf).MustBuild() + + testCases := []struct { + Name string + Group *Group + Expected bool + }{ + + { + Name: "true case", + Group: NewGroup().NewID().Fields([]*Field{f2}).MustBuild(), + Expected: true, + }, + { + Name: "false case", + Group: NewGroup().NewID().Fields([]*Field{f}).MustBuild(), + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Group.IsEmpty() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestGroup_Prune(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + f := NewField(sf).Value(v).MustBuild() + f2 := NewField(sf).MustBuild() + + testCases := []struct { + Name string + Group *Group + Expected []*Field + }{ + + { + Name: "nil group", + }, + { + Name: "normal case", + Group: NewGroup().NewID().Fields([]*Field{f, f2}).MustBuild(), + Expected: []*Field{f}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.Group.Prune() + assert.Equal(tt, tc.Expected, tc.Group.Fields()) + }) + } +} + +func TestGroup_GetOrCreateField(t *testing.T) { + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + f := NewField(sf).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + testCases := []struct { + Name string + Group *Group + PS *Schema + FID id.PropertySchemaFieldID + Expected struct { + Field *Field + Bool bool + } + }{ + { + Name: "nil group", + }, + { + Name: "nil ps", + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), + }, + { + Name: "group schema doesn't equal to ps", + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xxx/aaa"), "aa").MustBuild(), + PS: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + }, + { + Name: "create field", + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), + PS: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + FID: "aa", + Expected: struct { + Field *Field + Bool bool + }{ + Field: NewField(sf).MustBuild(), + Bool: true, + }, + }, + { + Name: "get field", + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").Fields([]*Field{f}).MustBuild(), + PS: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + FID: "aa", + Expected: struct { + Field *Field + Bool bool + }{ + Field: NewField(sf).MustBuild(), + Bool: false, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, b := tc.Group.GetOrCreateField(tc.PS, tc.FID) + assert.Equal(tt, tc.Expected.Field, res) + assert.Equal(tt, tc.Expected.Bool, b) + }) + } +} + +func TestGroup_RemoveField(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + sf2 := NewSchemaField().ID("b").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + f := NewField(sf).Value(v).MustBuild() + f2 := NewField(sf2).MustBuild() + + testCases := []struct { + Name string + Group *Group + Input id.PropertySchemaFieldID + Expected []*Field + }{ + + { + Name: "nil group", + }, + { + Name: "normal case", + Input: "b", + Group: NewGroup().NewID().Fields([]*Field{f, f2}).MustBuild(), + Expected: []*Field{f}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.Group.RemoveField(tc.Input) + assert.Equal(tt, tc.Expected, tc.Group.Fields()) + }) + } +} + +func TestGroup_FieldIDs(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + sf2 := NewSchemaField().ID("b").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + f := NewField(sf).Value(v).MustBuild() + f2 := NewField(sf2).MustBuild() + + testCases := []struct { + Name string + Group *Group + Expected []id.PropertySchemaFieldID + }{ + + { + Name: "nil group", + }, + { + Name: "normal case", + Group: NewGroup().NewID().Fields([]*Field{f, f2}).MustBuild(), + Expected: []id.PropertySchemaFieldID{"a", "b"}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Group.FieldIDs() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestGroup_Field(t *testing.T) { + sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + sf2 := NewSchemaField().ID("b").Type(ValueTypeString).MustBuild() + v := ValueTypeString.ValueFromUnsafe("vvv") + f := NewField(sf).Value(v).MustBuild() + f2 := NewField(sf2).MustBuild() + + testCases := []struct { + Name string + Group *Group + Input id.PropertySchemaFieldID + Expected *Field + }{ + + { + Name: "nil group", + }, + { + Name: "normal case", + Group: NewGroup().NewID().Fields([]*Field{f, f2}).MustBuild(), + Input: "a", + Expected: f, + }, + { + Name: "normal case", + Group: NewGroup().NewID().Fields([]*Field{f, f2}).MustBuild(), + Input: "x", + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Group.Field(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestGroup_UpdateNameFieldValue(t *testing.T) { + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + //f := NewField(sf).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg2 := NewSchemaGroup().ID("bb").Schema(id.MustPropertySchemaID("xx/bb")).Fields([]*SchemaField{sf}).MustBuild() + testCases := []struct { + Name string + Group *Group + PS *Schema + Value *Value + FID id.PropertySchemaFieldID + Expected *Field + Err error + }{ + { + Name: "nil group", + }, + { + Name: "nil ps", + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), + }, + { + Name: "group schema doesn't equal to ps", + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xxx/aaa"), "aa").MustBuild(), + PS: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + }, + { + Name: "update value", + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), + PS: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Value: ValueTypeString.ValueFromUnsafe("abc"), + FID: "aa", + Expected: NewField(sf).Value(ValueTypeString.ValueFromUnsafe("abc")).MustBuild(), + }, + { + Name: "invalid property field", + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), + PS: NewSchema().ID(id.MustPropertySchemaID("xx/bb")).Groups([]*SchemaGroup{sg2}).MustBuild(), + Value: ValueTypeString.ValueFromUnsafe("abc"), + FID: "aa", + Expected: nil, + Err: ErrInvalidPropertyField, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Group.UpdateNameFieldValue(tc.PS, tc.Value) + if res == nil { + assert.Equal(tt, tc.Expected, tc.Group.Field(tc.FID)) + } else { + assert.True(tt, errors.As(res, &tc.Err)) + } + }) + } +} diff --git a/pkg/property/initializer.go b/pkg/property/initializer.go new file mode 100644 index 000000000..a47144dc2 --- /dev/null +++ b/pkg/property/initializer.go @@ -0,0 +1,310 @@ +//go:generate go run github.com/globusdigital/deep-copy --type Initializer --pointer-receiver -o initializer_gen.go . + +package property + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/id" +) + +var ErrSchemaDoesNotMatch = errors.New("schema of the initializer does not match schema of the argument") + +type Initializer struct { + ID *id.PropertyID `json:"id"` + Schema id.PropertySchemaID `json:"schema"` + Items []*InitializerItem `json:"items"` +} + +func (p *Initializer) Clone() *Initializer { + if p == nil { + return nil + } + + var items []*InitializerItem + if p.Items != nil { + items = make([]*InitializerItem, 0, len(p.Items)) + for _, i := range p.Items { + items = append(items, i.Clone()) + } + } + + return &Initializer{ + ID: p.ID.CopyRef(), + Schema: p.Schema, + Items: items, + } +} + +func (p *Initializer) Property(scene id.SceneID) (*Property, error) { + if p == nil { + return nil, nil + } + + i := p.ID + if i == nil { + i = id.NewPropertyID().Ref() + } + + var items []Item + if p.Items != nil { + items = make([]Item, 0, len(p.Items)) + for _, i := range p.Items { + item, err := i.PropertyItem(p.Schema) + if err != nil { + return nil, err + } + items = append(items, item) + } + } + + return New().ID(*i).Schema(p.Schema).Scene(scene).Items(items).Build() +} + +// PropertyIncludingEmpty generates a new property, but even if the initializer is empty, an empty property will be generated. +func (p *Initializer) PropertyIncludingEmpty(scene id.SceneID, schema id.PropertySchemaID) (*Property, error) { + if p != nil && p.Schema != schema { + return nil, ErrSchemaDoesNotMatch + } + + pr, err := p.Property(scene) + if err != nil { + return nil, err + } + + if pr == nil { + pr, err = New().NewID().Schema(schema).Scene(scene).Build() + if err != nil { + return nil, err + } + } + + return pr, nil +} + +func (p *Initializer) MustBeProperty(scene id.SceneID) *Property { + r, err := p.Property(scene) + if err != nil { + panic(err) + } + return r +} + +type InitializerItem struct { + ID *id.PropertyItemID `json:"id"` + SchemaItem id.PropertySchemaFieldID `json:"schemaItem"` + Groups []*InitializerGroup `json:"groups"` + Fields []*InitializerField `json:"fields"` +} + +func (p *InitializerItem) Clone() *InitializerItem { + if p == nil { + return nil + } + + var groups []*InitializerGroup + if p.Groups != nil { + groups = make([]*InitializerGroup, 0, len(p.Groups)) + for _, g := range p.Groups { + groups = append(groups, g.Clone()) + } + } + + var fields []*InitializerField + if p.Fields != nil { + fields = make([]*InitializerField, 0, len(p.Fields)) + for _, f := range p.Fields { + fields = append(fields, f.Clone()) + } + } + + return &InitializerItem{ + ID: p.ID.CopyRef(), + SchemaItem: p.SchemaItem, + Groups: groups, + Fields: fields, + } +} + +func (p *InitializerItem) PropertyItem(parent id.PropertySchemaID) (Item, error) { + if p == nil { + return nil, nil + } + + i := p.ID + if i == nil { + i = id.NewPropertyItemID().Ref() + } + + pi := NewItem().ID(*i).Schema(parent, p.SchemaItem) + + if p.Groups != nil { + groups := make([]*Group, 0, len(p.Groups)) + for _, g := range p.Groups { + g2, err := g.PropertyGroup(parent, p.SchemaItem) + if err != nil { + return nil, err + } + if g2 != nil { + groups = append(groups, g2) + } + } + + return pi.GroupList().Groups(groups).Build() + } + + var fields []*Field + if p.Fields != nil { + fields = make([]*Field, 0, len(p.Fields)) + for _, f := range p.Fields { + if f2 := f.PropertyField(); f2 != nil { + fields = append(fields, f2) + } + } + } + + return pi.Group().Fields(fields).Build() +} + +func (p *InitializerItem) PropertyGroupList(parent id.PropertySchemaID) *GroupList { + i, _ := p.PropertyItem(parent) + if g := ToGroupList(i); g != nil { + return g + } + return nil +} + +func (p *InitializerItem) PropertyGroup(parent id.PropertySchemaID) *Group { + i, _ := p.PropertyItem(parent) + if g := ToGroup(i); g != nil { + return g + } + return nil +} + +type InitializerGroup struct { + ID *id.PropertyItemID `json:"id"` + Fields []*InitializerField `json:"fields"` +} + +func (p *InitializerGroup) Clone() *InitializerGroup { + if p == nil { + return nil + } + + var fields []*InitializerField + if p.Fields != nil { + fields = make([]*InitializerField, 0, len(p.Fields)) + for _, f := range p.Fields { + fields = append(fields, f.Clone()) + } + } + + return &InitializerGroup{ + ID: p.ID.CopyRef(), + Fields: fields, + } +} + +func (p *InitializerGroup) PropertyGroup(parent id.PropertySchemaID, parentItem id.PropertySchemaFieldID) (*Group, error) { + if p == nil { + return nil, nil + } + + i := p.ID + if i == nil { + i = id.NewPropertyItemID().Ref() + } + + pi := NewItem().ID(*i).Schema(parent, parentItem) + + var fields []*Field + if p.Fields != nil { + fields = make([]*Field, 0, len(p.Fields)) + for _, f := range p.Fields { + if f2 := f.PropertyField(); f2 != nil { + fields = append(fields, f2) + } + } + } + + return pi.Group().Fields(fields).Build() +} + +type InitializerField struct { + Field id.PropertySchemaFieldID `json:"field"` + Type ValueType `json:"type"` + Value *Value `json:"value"` + Links []*InitializerLink `json:"links"` +} + +func (p *InitializerField) Clone() *InitializerField { + if p == nil { + return nil + } + + var links []*InitializerLink + if p.Links != nil { + links = make([]*InitializerLink, 0, len(p.Links)) + for _, l := range p.Links { + links = append(links, l.Clone()) + } + } + + return &InitializerField{ + Field: p.Field, + Type: p.Type, + Value: p.Value.Clone(), + Links: links, + } +} + +func (p *InitializerField) PropertyField() *Field { + if p == nil || p.Field == "" || p.Type == "" { + return nil + } + + var plinks *Links + if p.Links != nil { + links := make([]*Link, 0, len(p.Links)) + for _, l := range p.Links { + link := l.PropertyLink() + if link != nil { + links = append(links, link) + } + } + plinks = NewLinks(links) + } + + return NewFieldUnsafe().LinksUnsafe(plinks).FieldUnsafe(p.Field).TypeUnsafe(p.Type).ValueUnsafe(p.Value.Clone()).Build() +} + +type InitializerLink struct { + Dataset *id.DatasetID `json:"dataset"` + Schema id.DatasetSchemaID `json:"schema"` + Field id.DatasetSchemaFieldID `json:"field"` +} + +func (p *InitializerLink) Clone() *InitializerLink { + if p == nil { + return nil + } + + return &InitializerLink{ + Dataset: p.Dataset.CopyRef(), + Schema: p.Schema, + Field: p.Field, + } +} + +func (p *InitializerLink) PropertyLink() *Link { + if p == nil { + return nil + } + + if p.Dataset == nil { + return NewLinkFieldOnly(p.Schema, p.Field) + } + + return NewLink(*p.Dataset, p.Schema, p.Field) +} diff --git a/pkg/property/initializer_test.go b/pkg/property/initializer_test.go new file mode 100644 index 000000000..1b0ab59aa --- /dev/null +++ b/pkg/property/initializer_test.go @@ -0,0 +1,292 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestInitializer_Clone(t *testing.T) { + initializer := &Initializer{ + ID: id.NewPropertyID().Ref(), + Schema: id.MustPropertySchemaID("reearth/marker"), + Items: []*InitializerItem{{ + ID: id.NewPropertyItemID().Ref(), + SchemaItem: id.PropertySchemaFieldID("hoge"), + }}, + } + + cloned := initializer.Clone() + + assert.NotSame(t, cloned, initializer) + assert.NotSame(t, cloned.Items, initializer.Items) + assert.NotSame(t, cloned.Items[0], initializer.Items[0]) + assert.Equal(t, cloned, initializer) +} + +func TestInitializer_Property(t *testing.T) { + sid := id.NewSceneID() + initializer := &Initializer{ + ID: id.NewPropertyID().Ref(), + Schema: id.MustPropertySchemaID("reearth/marker"), + Items: []*InitializerItem{{ + ID: id.NewPropertyItemID().Ref(), + SchemaItem: id.PropertySchemaFieldID("hoge"), + }}, + } + + expected := New().ID(*initializer.ID).Schema(initializer.Schema).Scene(sid).Items([]Item{ + NewItem().ID(*initializer.Items[0].ID).Schema(initializer.Schema, initializer.Items[0].SchemaItem).Group().MustBuild(), + }).MustBuild() + + actual, err := initializer.Property(sid) + assert.NoError(t, err) + assert.Equal(t, expected, actual) + + // test if a new id is generated + initializer.ID = nil + actual, err = initializer.Property(sid) + assert.NoError(t, err) + assert.False(t, actual.ID().IsNil()) +} + +func TestInitializer_PropertyIncludingEmpty(t *testing.T) { + sid := id.NewSceneID() + psid := id.MustPropertySchemaID("reearth/hoge") + psid2 := id.MustPropertySchemaID("reearth/marker") + + // test case 1: should generate an empty property + var initializer *Initializer + actual, err := initializer.PropertyIncludingEmpty(sid, psid) + expected := New().ID(actual.ID()).Schema(psid).Scene(sid).MustBuild() + assert.NoError(t, err) + assert.Equal(t, expected, actual) + + // test case 2: should returns an error when schema does not match + initializer = &Initializer{ + ID: id.NewPropertyID().Ref(), + Schema: psid2, + Items: []*InitializerItem{{ + ID: id.NewPropertyItemID().Ref(), + SchemaItem: id.PropertySchemaFieldID("hoge"), + }}, + } + + _, err = initializer.PropertyIncludingEmpty(sid, psid) + assert.Equal(t, ErrSchemaDoesNotMatch, err) + + // test case 3: should generates a property normally + actual, err = initializer.PropertyIncludingEmpty(sid, psid2) + expected = New().ID(actual.ID()).Schema(initializer.Schema).Scene(sid).Items([]Item{ + NewItem().ID(*initializer.Items[0].ID).Schema(initializer.Schema, initializer.Items[0].SchemaItem).Group().MustBuild(), + }).MustBuild() + assert.NoError(t, err) + assert.Equal(t, expected, actual) +} + +func TestInitializerItem_Clone(t *testing.T) { + item := &InitializerItem{ + ID: id.NewPropertyItemID().Ref(), + SchemaItem: id.PropertySchemaFieldID("hoge"), + Groups: []*InitializerGroup{{ + ID: id.NewPropertyItemID().Ref(), + Fields: []*InitializerField{{ + Field: id.PropertySchemaFieldID("name"), + Type: ValueTypeString, + Value: ValueTypeString.ValueFromUnsafe("aaa"), + Links: []*InitializerLink{{ + Dataset: id.NewDatasetID().Ref(), + Schema: id.NewDatasetSchemaID(), + Field: id.NewDatasetSchemaFieldID(), + }}, + }}, + }}, + } + + cloned := item.Clone() + + assert.NotSame(t, cloned, item) + assert.NotSame(t, cloned.Groups, item.Groups) + assert.NotSame(t, cloned.Groups[0], item.Groups[0]) + assert.NotSame(t, cloned.Groups[0].Fields, item.Groups[0].Fields) + assert.NotSame(t, cloned.Groups[0].Fields[0], item.Groups[0].Fields[0]) + assert.Equal(t, cloned, item) +} + +func TestInitializerItem_PropertyItem(t *testing.T) { + parent := id.MustPropertySchemaID("reearth/marker") + item := &InitializerItem{ + ID: id.NewPropertyItemID().Ref(), + SchemaItem: id.PropertySchemaFieldID("hoge"), + } + + expected := NewItem().ID(*item.ID).Schema(parent, item.SchemaItem).Group().MustBuild() + + created, err := item.PropertyItem(parent) + assert.NoError(t, err) + assert.Equal(t, expected, created) + + item.ID = nil + created, err = item.PropertyItem(parent) + assert.NoError(t, err) + assert.False(t, created.ID().IsNil()) +} + +func TestInitializerItem_PropertyGroup(t *testing.T) { + parent := id.MustPropertySchemaID("reearth/marker") + item := &InitializerItem{ + ID: id.NewPropertyItemID().Ref(), + SchemaItem: id.PropertySchemaFieldID("hoge"), + Fields: []*InitializerField{{ + Field: id.PropertySchemaFieldID("name"), + Type: ValueTypeString, + Value: ValueTypeString.ValueFromUnsafe("aaa"), + }}, + } + + expected := NewItem().ID(*item.ID).Schema(parent, item.SchemaItem).Group().Fields([]*Field{ + NewFieldUnsafe().FieldUnsafe(item.Fields[0].Field).TypeUnsafe(item.Fields[0].Type).ValueUnsafe(item.Fields[0].Value).Build(), + }).MustBuild() + + assert.Equal(t, expected, item.PropertyGroup(parent)) + + // check if a new id is generated + item.ID = nil + assert.False(t, item.PropertyGroup(parent).ID().IsNil()) +} + +func TestInitializerItem_PropertyGroupList(t *testing.T) { + parent := id.MustPropertySchemaID("reearth/marker") + item := &InitializerItem{ + ID: id.NewPropertyItemID().Ref(), + SchemaItem: id.PropertySchemaFieldID("hoge"), + Groups: []*InitializerGroup{{ + ID: id.NewPropertyItemID().Ref(), + }}, + } + + expected := NewItem().ID(*item.ID).Schema(parent, item.SchemaItem).GroupList().Groups([]*Group{ + NewItem().ID(*item.Groups[0].ID).Schema(parent, item.SchemaItem).Group().MustBuild(), + }).MustBuild() + + assert.Equal(t, expected, item.PropertyGroupList(parent)) + + // check if a new id is generated + item.ID = nil + assert.False(t, item.PropertyGroupList(parent).ID().IsNil()) +} + +func TestInitializerGroup_Clone(t *testing.T) { + item := &InitializerGroup{ + ID: id.NewPropertyItemID().Ref(), + Fields: []*InitializerField{{ + Field: id.PropertySchemaFieldID("name"), + Type: ValueTypeString, + Value: ValueTypeString.ValueFromUnsafe("aaa"), + Links: []*InitializerLink{{ + Dataset: id.NewDatasetID().Ref(), + Schema: id.NewDatasetSchemaID(), + Field: id.NewDatasetSchemaFieldID(), + }}, + }}, + } + + cloned := item.Clone() + + assert.NotSame(t, cloned, item) + assert.NotSame(t, cloned.Fields, item.Fields) + assert.NotSame(t, cloned.Fields[0], item.Fields[0]) + assert.Equal(t, cloned, item) +} + +func TestInitializerGroup_PropertyGroup(t *testing.T) { + parent := id.MustPropertySchemaID("reearth/marker") + parentItem := id.PropertySchemaFieldID("hoge") + item := &InitializerGroup{ + ID: id.NewPropertyItemID().Ref(), + Fields: []*InitializerField{{ + Field: id.PropertySchemaFieldID("name"), + Type: ValueTypeString, + Value: ValueTypeString.ValueFromUnsafe("aaa"), + }}, + } + + expected := NewItem().ID(*item.ID).Schema(parent, parentItem).Group().Fields([]*Field{ + NewFieldUnsafe().FieldUnsafe(item.Fields[0].Field).TypeUnsafe(item.Fields[0].Type).ValueUnsafe(item.Fields[0].Value).Build(), + }).MustBuild() + + p, err := item.PropertyGroup(parent, parentItem) + assert.NoError(t, err) + assert.Equal(t, expected, p) + + // check if a new id is generated + item.ID = nil + p, err = item.PropertyGroup(parent, parentItem) + assert.NoError(t, err) + assert.False(t, p.ID().IsNil()) +} + +func TestInitializerField_Clone(t *testing.T) { + field := &InitializerField{ + Field: id.PropertySchemaFieldID("name"), + Type: ValueTypeString, + Value: ValueTypeString.ValueFromUnsafe("aaa"), + Links: []*InitializerLink{{ + Dataset: id.NewDatasetID().Ref(), + Schema: id.NewDatasetSchemaID(), + Field: id.NewDatasetSchemaFieldID(), + }}, + } + cloned := field.Clone() + + assert.NotSame(t, cloned, field) + assert.NotSame(t, cloned.Links, field.Links) + assert.Equal(t, cloned, field) +} + +func TestInitializerField_PropertyField(t *testing.T) { + field := &InitializerField{ + Field: id.PropertySchemaFieldID("name"), + Type: ValueTypeString, + Value: ValueTypeString.ValueFromUnsafe("aaa"), + Links: []*InitializerLink{{ + Dataset: id.NewDatasetID().Ref(), + Schema: id.NewDatasetSchemaID(), + Field: id.NewDatasetSchemaFieldID(), + }}, + } + + expected := NewFieldUnsafe(). + FieldUnsafe(field.Field). + TypeUnsafe(field.Type). + ValueUnsafe(field.Value). + LinksUnsafe(NewLinks([]*Link{NewLink(*field.Links[0].Dataset.CopyRef(), field.Links[0].Schema, field.Links[0].Field)})). + Build() + + assert.Equal(t, expected, field.PropertyField()) +} + +func TestInitializerLink_Clone(t *testing.T) { + link := &InitializerLink{ + Dataset: id.NewDatasetID().Ref(), + Schema: id.NewDatasetSchemaID(), + Field: id.NewDatasetSchemaFieldID(), + } + cloned := link.Clone() + + assert.NotSame(t, cloned, link) + assert.Equal(t, cloned, link) +} + +func TestInitializerLink_PropertyLink(t *testing.T) { + link := &InitializerLink{ + Dataset: id.NewDatasetID().Ref(), + Schema: id.NewDatasetSchemaID(), + Field: id.NewDatasetSchemaFieldID(), + } + + expected := NewLink(*link.Dataset.CopyRef(), link.Schema, link.Field) + + assert.Equal(t, expected, link.PropertyLink()) +} diff --git a/pkg/property/item.go b/pkg/property/item.go new file mode 100644 index 000000000..0c24addba --- /dev/null +++ b/pkg/property/item.go @@ -0,0 +1,56 @@ +package property + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +// Item _ +type Item interface { + ID() id.PropertyItemID + IDRef() *id.PropertyItemID + SchemaGroup() id.PropertySchemaFieldID + SchemaGroupRef() *id.PropertySchemaFieldID + Schema() id.PropertySchemaID + SchemaRef() *id.PropertySchemaID + HasLinkedField() bool + CollectDatasets() []id.DatasetID + FieldsByLinkedDataset(id.DatasetSchemaID, id.DatasetID) []*Field + IsDatasetLinked(id.DatasetSchemaID, id.DatasetID) bool + IsEmpty() bool + Prune() + MigrateSchema(context.Context, *Schema, dataset.Loader) + MigrateDataset(DatasetMigrationParam) + ValidateSchema(*SchemaGroup) error +} + +type itemBase struct { + ID id.PropertyItemID + Schema id.PropertySchemaID + SchemaGroup id.PropertySchemaFieldID +} + +// ToGroup _ +func ToGroup(i Item) *Group { + g, _ := i.(*Group) + return g +} + +// ToGroupList _ +func ToGroupList(i Item) *GroupList { + g, _ := i.(*GroupList) + return g +} + +// InitItemFrom _ +func InitItemFrom(psg *SchemaGroup) Item { + if psg == nil { + return nil + } + if psg.IsList() { + return InitGroupListFrom(psg) + } + return InitGroupFrom(psg) +} diff --git a/pkg/property/item_builder.go b/pkg/property/item_builder.go new file mode 100644 index 000000000..09ead6c9c --- /dev/null +++ b/pkg/property/item_builder.go @@ -0,0 +1,35 @@ +package property + +import "github.com/reearth/reearth-backend/pkg/id" + +type ItemBuilder struct { + base itemBase +} + +func NewItem() *ItemBuilder { + return &ItemBuilder{} +} + +func (b *ItemBuilder) Group() *GroupBuilder { + return NewGroup().base(b.base) +} + +func (b *ItemBuilder) GroupList() *GroupListBuilder { + return NewGroupList().base(b.base) +} + +func (b *ItemBuilder) ID(id id.PropertyItemID) *ItemBuilder { + b.base.ID = id + return b +} + +func (b *ItemBuilder) NewID() *ItemBuilder { + b.base.ID = id.NewPropertyItemID() + return b +} + +func (b *ItemBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaFieldID) *ItemBuilder { + b.base.Schema = s + b.base.SchemaGroup = g + return b +} diff --git a/pkg/property/item_test.go b/pkg/property/item_test.go new file mode 100644 index 000000000..230550f25 --- /dev/null +++ b/pkg/property/item_test.go @@ -0,0 +1,86 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestInitItemFrom(t *testing.T) { + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + sgl := NewSchemaGroup().ID("aa").IsList(true).Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + iid := id.NewPropertyItemID() + propertySchemaID := id.MustPropertySchemaID("xx/aa") + propertySchemaField1ID := id.PropertySchemaFieldID("aa") + testCases := []struct { + Name string + SG *SchemaGroup + Expected Item + }{ + { + Name: "nil psg", + }, + { + Name: "init item from group", + SG: sg, + Expected: NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaField1ID).MustBuild(), + }, + { + Name: "init item from group list", + SG: sgl, + Expected: NewGroupList().ID(iid).Schema(propertySchemaID, propertySchemaField1ID).MustBuild(), + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := InitItemFrom(tc.SG) + if res != nil { + assert.Equal(tt, tc.Expected.Schema(), res.Schema()) + assert.Equal(tt, tc.Expected.SchemaGroup(), res.SchemaGroup()) + } else { + assert.Nil(tt, tc.Expected) + } + }) + } +} + +func TestToGroup(t *testing.T) { + iid := id.NewPropertyItemID() + propertySchemaID := id.MustPropertySchemaID("xxx#1.1.1/aa") + propertySchemaField1ID := id.PropertySchemaFieldID("a") + propertySchemaGroup1ID := id.PropertySchemaFieldID("A") + il := []Item{ + NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID). + Fields([]*Field{ + NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(ValueTypeString). + ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + Build(), + }).MustBuild(), + } + p := New().NewID().Scene(id.NewSceneID()).Items(il).Schema(propertySchemaID).MustBuild() + g := ToGroup(p.ItemBySchema(propertySchemaGroup1ID)) + assert.Equal(t, propertySchemaID, g.Schema()) + assert.Equal(t, propertySchemaGroup1ID, g.SchemaGroup()) + assert.Equal(t, iid, g.ID()) +} + +func TestToGroupList(t *testing.T) { + iid := id.NewPropertyItemID() + propertySchemaID := id.MustPropertySchemaID("xxx#1.1.1/aa") + propertySchemaGroup1ID := id.PropertySchemaFieldID("A") + il := []Item{ + NewGroupList().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID).MustBuild(), + } + p := New().NewID().Scene(id.NewSceneID()).Items(il).Schema(propertySchemaID).MustBuild() + g := ToGroupList(p.ItemBySchema(propertySchemaGroup1ID)) + assert.Equal(t, propertySchemaID, g.Schema()) + assert.Equal(t, propertySchemaGroup1ID, g.SchemaGroup()) + assert.Equal(t, iid, g.ID()) +} diff --git a/pkg/property/link.go b/pkg/property/link.go new file mode 100644 index 000000000..e1f62a069 --- /dev/null +++ b/pkg/property/link.go @@ -0,0 +1,443 @@ +package property + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +// Links _ +type Links struct { + links []*Link +} + +// Link _ +type Link struct { + dataset *id.DatasetID + schema *id.DatasetSchemaID + field *id.DatasetSchemaFieldID +} + +// NewLinks _ +func NewLinks(links []*Link) *Links { + if links == nil { + return nil + } + links2 := make([]*Link, 0, len(links)) + for _, l := range links { + l2 := *l + links2 = append(links2, &l2) + } + return &Links{ + links: links2, + } +} + +// Clone _ +func (l *Links) Clone() *Links { + if l == nil { + return nil + } + return &Links{ + links: append([]*Link{}, l.links...), + } +} + +// IsLinked _ +func (l *Links) IsLinked() bool { + return l != nil && l.links != nil && len(l.links) > 0 +} + +// IsLinkedFully _ +func (l *Links) IsLinkedFully() bool { + return l != nil && l.links != nil && len(l.links) > 0 && len(l.DatasetIDs()) == len(l.links) +} + +// Len _ +func (l *Links) Len() int { + if l == nil || l.links == nil { + return 0 + } + return len(l.links) +} + +// First _ +func (l *Links) First() *Link { + if l == nil || l.links == nil || len(l.links) == 0 { + return nil + } + return l.links[0] +} + +// Last _ +func (l *Links) Last() *Link { + if l == nil || l.links == nil || len(l.links) == 0 { + return nil + } + return l.links[len(l.links)-1] +} + +// LastValue _ +func (l *Links) LastValue(ds *dataset.Dataset) *dataset.Value { + return l.Last().Value(ds) +} + +// Validate _ +func (l *Links) Validate(dsm dataset.SchemaMap, dm dataset.Map) bool { + if l == nil || l.links == nil { + return false + } + firstDatasetSchema := l.First().DatasetSchema() + if firstDatasetSchema == nil { + return false + } + fields := l.DatasetSchemaFieldIDs() + if fields == nil || len(fields) != len(l.links) { + return false + } + firstDataset := l.First().Dataset() + + res, resf := dsm.GraphSearchByFields(*firstDatasetSchema, fields...) + if len(res) != len(l.links) || resf == nil { + return false + } + + if firstDataset != nil { + res2, resf2 := dm.GraphSearchByFields(*firstDataset, fields...) + return len(res2) == len(l.links) && resf2 != nil + } + + return true +} + +// Replace _ +func (l *Links) Replace( + dsm map[id.DatasetSchemaID]id.DatasetSchemaID, + dm map[id.DatasetID]id.DatasetID, + fm map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID, +) { + if l == nil || l.links == nil { + return + } + + links := make([]*Link, 0, len(l.links)) + + for _, link := range l.links { + nl := &Link{} + + if link.schema != nil { + if nds, ok := dsm[*link.schema]; ok { + nid := nds + nl.schema = &nid + } else { + // Datasetใฏๅ…จใฆIDใŒๅ†ๅ‰ฒใ‚Šๅฝ“ใฆใ•ใ‚Œใ‚‹ใŸใ‚ใ€ใƒชใƒณใ‚ฏใŒ้€”ๅˆ‡ใ‚Œใฆใ„ใ‚‹ใ“ใจใซใชใ‚‹ + // ใ‚ˆใฃใฆใƒชใƒณใ‚ฏ่‡ชไฝ“ใŒ็„กๅŠนใซใชใ‚‹ + l.links = nil + return + } + } + + if link.dataset != nil { + if nds, ok := dm[*link.dataset]; ok { + nid := nds + nl.dataset = &nid + } else { + // Datasetใฏๅ…จใฆIDใŒๅ†ๅ‰ฒใ‚Šๅฝ“ใฆใ•ใ‚Œใ‚‹ใŸใ‚ใ€ใƒชใƒณใ‚ฏใŒ้€”ๅˆ‡ใ‚Œใฆใ„ใ‚‹ใ“ใจใซใชใ‚‹ + // ใ‚ˆใฃใฆใƒชใƒณใ‚ฏ่‡ชไฝ“ใŒ็„กๅŠนใซใชใ‚‹ + l.links = nil + return + } + } + + if link.field != nil { + if nf, ok := fm[*link.field]; ok { + nid := nf + nl.field = &nid + } else { + // Datasetใฏๅ…จใฆIDใŒๅ†ๅ‰ฒใ‚Šๅฝ“ใฆใ•ใ‚Œใ‚‹ใŸใ‚ใ€ใƒชใƒณใ‚ฏใŒ้€”ๅˆ‡ใ‚Œใฆใ„ใ‚‹ใ“ใจใซใชใ‚‹ + // ใ‚ˆใฃใฆใƒชใƒณใ‚ฏ่‡ชไฝ“ใŒ็„กๅŠนใซใชใ‚‹ + l.links = nil + return + } + } + + links = append(links, nl) + } + + l.links = links +} + +// Links _ +func (l *Links) Links() []*Link { + if l == nil || l.links == nil || len(l.links) == 0 { + return nil + } + links2 := make([]*Link, 0, len(l.links)) + for _, l := range l.links { + l2 := *l + links2 = append(links2, &l2) + } + return links2 +} + +// DatasetIDs _ +func (l *Links) DatasetIDs() []id.DatasetID { + if l == nil { + return nil + } + datasets := make([]id.DatasetID, 0, len(l.links)) + for _, i := range l.links { + if i.dataset != nil { + datasets = append(datasets, *i.dataset) + } else { + return datasets + } + } + return datasets +} + +// DatasetSchemaIDs _ +func (l *Links) DatasetSchemaIDs() []id.DatasetSchemaID { + if l == nil { + return nil + } + schemas := make([]id.DatasetSchemaID, 0, len(l.links)) + for _, i := range l.links { + if i.schema != nil { + schemas = append(schemas, *i.schema) + } else { + return schemas + } + } + return schemas +} + +// IsDatasetLinked _ +func (l *Links) IsDatasetLinked(s id.DatasetSchemaID, dsid id.DatasetID) bool { + if l == nil { + return false + } + for _, id := range l.DatasetSchemaIDs() { + if id == s { + return true + } + } + for _, id := range l.DatasetIDs() { + if id == dsid { + return true + } + } + return false +} + +// DatasetSchemaFieldIDs _ +func (l *Links) DatasetSchemaFieldIDs() []id.DatasetSchemaFieldID { + if l == nil { + return nil + } + fields := make([]id.DatasetSchemaFieldID, 0, len(l.links)) + for _, i := range l.links { + if i.field != nil { + fields = append(fields, *i.field) + } else { + return fields + } + } + return fields +} + +// HasDataset _ +func (l *Links) HasDataset(did id.DatasetID) bool { + if l == nil { + return false + } + for _, l2 := range l.links { + if l2 != nil && l2.dataset != nil && *l2.dataset == did { + return true + } + } + return false +} + +// HasDatasetSchema _ +func (l *Links) HasDatasetSchema(dsid id.DatasetSchemaID) bool { + if l == nil { + return false + } + for _, l2 := range l.links { + if l2 != nil && l2.schema != nil && *l2.schema == dsid { + return true + } + } + return false +} + +func (l *Links) HasDatasetOrSchema(dsid id.DatasetSchemaID, did id.DatasetID) bool { + if l == nil { + return false + } + for _, l2 := range l.links { + if l2 != nil && (l2.schema != nil && *l2.schema == dsid || l2.dataset != nil && *l2.dataset == did) { + return true + } + } + return false +} + +// NewLink _ +func NewLink(d id.DatasetID, ds id.DatasetSchemaID, f id.DatasetSchemaFieldID) *Link { + dataset := d + schema := ds + field := f + return &Link{ + dataset: &dataset, + schema: &schema, + field: &field, + } +} + +// NewLinkFieldOnly _ +func NewLinkFieldOnly(ds id.DatasetSchemaID, f id.DatasetSchemaFieldID) *Link { + schema := ds + field := f + return &Link{ + schema: &schema, + field: &field, + } +} + +// Dataset _ +func (l *Link) Dataset() *id.DatasetID { + if l == nil || l.dataset == nil { + return nil + } + dataset := *l.dataset + return &dataset +} + +// DatasetSchema _ +func (l *Link) DatasetSchema() *id.DatasetSchemaID { + if l == nil || l.schema == nil { + return nil + } + datasetSchema := *l.schema + return &datasetSchema +} + +// DatasetSchemaField _ +func (l *Link) DatasetSchemaField() *id.DatasetSchemaFieldID { + if l == nil || l.field == nil { + return nil + } + field := *l.field + return &field +} + +// Value _ +func (l *Link) Value(ds *dataset.Dataset) *dataset.Value { + if l == nil || ds == nil || l.dataset == nil || l.field == nil || ds.ID() != *l.dataset { + return nil + } + f := ds.Field(*l.field) + if f == nil { + return nil + } + return f.Value() +} + +// Validate _ +func (l *Link) Validate(dss *dataset.Schema, ds *dataset.Dataset) bool { + if l == nil || l.field == nil || l.schema == nil || dss == nil { + return false + } + + // DS + if dss.ID() != *l.schema { + return false + } + if f := dss.Field(*l.field); f == nil { + return false + } + + // D + if l.dataset != nil { + if ds == nil || ds.ID() != *l.dataset || ds.Schema() != dss.ID() { + return false + } + if f := ds.Field(*l.field); f == nil { + return false + } + } + + return true +} + +// IsEmpty _ +func (l *Links) IsEmpty() bool { + return l == nil || l.links == nil || len(l.links) == 0 +} + +// Clone _ +func (l *Link) Clone() *Link { + if l == nil { + return nil + } + return &Link{ + dataset: l.Dataset(), + schema: l.DatasetSchema(), + field: l.DatasetSchemaField(), + } +} + +// ApplyDataset _ +func (l *Link) ApplyDataset(ds *id.DatasetID) *Link { + if l == nil { + return nil + } + // if dataset is already set, it will not be overriden + if ds == nil || l.Dataset() != nil { + return l.Clone() + } + ds2 := *ds + return &Link{ + dataset: &ds2, + schema: l.DatasetSchema(), + field: l.DatasetSchemaField(), + } +} + +// ApplyDataset _ +func (l *Links) ApplyDataset(ds *id.DatasetID) *Links { + if l == nil || l.links == nil || len(l.links) == 0 { + return nil + } + + links := l.Clone() + first := links.First() + // if dataset is already set, it will not be overriden + if ds == nil || first.Dataset() != nil { + return links + } + + links.links[0] = first.ApplyDataset(ds) + return links +} + +func (l *Links) DatasetValue(ctx context.Context, d dataset.GraphLoader) (*dataset.Value, error) { + if l == nil || d == nil { + return nil, nil + } + dsid := l.First().Dataset() + dsfid := l.DatasetSchemaFieldIDs() + if dsid != nil && dsfid != nil { + _, dsf, err := d(ctx, *dsid, dsfid...) + if err != nil { + return nil, err + } + if dsf != nil { + return dsf.Value(), nil + } + } + return nil, nil +} diff --git a/pkg/property/link_test.go b/pkg/property/link_test.go new file mode 100644 index 000000000..2e1927404 --- /dev/null +++ b/pkg/property/link_test.go @@ -0,0 +1,481 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/dataset" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestNewLinks(t *testing.T) { + dsid1 := id.NewDatasetSchemaID() + dsid2 := id.NewDatasetSchemaID() + did1 := id.NewDatasetID() + did2 := id.NewDatasetID() + dfid1 := id.NewDatasetSchemaFieldID() + dfid2 := id.NewDatasetSchemaFieldID() + + var lin *Links + assert.Nil(t, lin) + assert.Nil(t, lin.Clone()) + assert.Nil(t, lin.Links()) + assert.Nil(t, lin.DatasetIDs()) + assert.Nil(t, lin.DatasetSchemaIDs()) + assert.False(t, lin.IsLinked()) + assert.Equal(t, 0, lin.Len()) + + lin = NewLinks([]*Link{}) + assert.Equal(t, []id.DatasetID{}, lin.DatasetIDs()) + assert.Equal(t, []id.DatasetSchemaID{}, lin.DatasetSchemaIDs()) + assert.Equal(t, []id.DatasetSchemaFieldID{}, lin.DatasetSchemaFieldIDs()) + + ll := []*Link{ + NewLink(did1, dsid1, dfid1), + NewLink(did2, dsid2, dfid2), + } + dl := []id.DatasetID{did1, did2} + dsl := []id.DatasetSchemaID{dsid1, dsid2} + dsfl := []id.DatasetSchemaFieldID{dfid1, dfid2} + lin = NewLinks(ll) + assert.NotNil(t, lin) + assert.Equal(t, ll, lin.Links()) + assert.Equal(t, ll, lin.Clone().Links()) + assert.Equal(t, dl, lin.DatasetIDs()) + assert.Equal(t, dsl, lin.DatasetSchemaIDs()) + assert.Equal(t, dsfl, lin.DatasetSchemaFieldIDs()) + assert.True(t, lin.IsLinked()) + assert.Equal(t, 2, lin.Len()) +} + +func TestLinks_IsDatasetLinked(t *testing.T) { + dsid1 := id.NewDatasetSchemaID() + dsid2 := id.NewDatasetSchemaID() + did1 := id.NewDatasetID() + did2 := id.NewDatasetID() + dfid1 := id.NewDatasetSchemaFieldID() + ll := []*Link{ + NewLink(did1, dsid1, dfid1), + } + + testCases := []struct { + Name string + DSS id.DatasetSchemaID + DS id.DatasetID + Links *Links + Expected bool + }{ + { + Name: "nil links", + Expected: false, + }, + { + Name: "true", + DSS: dsid1, + DS: did1, + Links: NewLinks(ll), + Expected: true, + }, + { + Name: "false", + DSS: dsid2, + DS: did2, + Links: NewLinks(ll), + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Links.IsDatasetLinked(tc.DSS, tc.DS) + res2 := tc.Links.HasDataset(tc.DS) + res3 := tc.Links.HasDatasetSchema(tc.DSS) + assert.Equal(tt, tc.Expected, res) + assert.Equal(tt, tc.Expected, res2) + assert.Equal(tt, tc.Expected, res3) + }) + } +} + +func TestLinks_Validate(t *testing.T) { + dsid1 := id.NewDatasetSchemaID() + did1 := id.NewDatasetID() + dfid1 := id.NewDatasetSchemaFieldID() + + testCases := []struct { + Name string + DSM dataset.SchemaMap + DM dataset.Map + Links *Links + Expected bool + }{ + { + Name: "nil links", + Expected: false, + }, + { + Name: "nil dataset schema for first link", + Links: NewLinks([]*Link{}), + Expected: false, + }, + { + Name: "len(res) != len(l.links)", + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + Expected: false, + }, + { + Name: "success", + DSM: dataset.SchemaMap{ + dsid1: dataset.NewSchema().ID(dsid1).Fields([]*dataset.SchemaField{ + dataset.NewSchemaField(). + ID(dfid1). + Ref(dsid1.Ref()).Type(dataset.ValueTypeString). + MustBuild(), + }).MustBuild(), + }, + DM: dataset.Map{ + did1: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{ + dataset.NewField(dfid1, dataset.ValueFrom("vvv"), ""), + }).MustBuild(), + }, + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + Expected: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Links.Validate(tc.DSM, tc.DM) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestLinks_Replace(t *testing.T) { + dsid1 := id.NewDatasetSchemaID() + dsid2 := id.NewDatasetSchemaID() + did1 := id.NewDatasetID() + did2 := id.NewDatasetID() + dfid1 := id.NewDatasetSchemaFieldID() + dfid2 := id.NewDatasetSchemaFieldID() + + testCases := []struct { + Name string + DSM map[id.DatasetSchemaID]id.DatasetSchemaID + DM map[id.DatasetID]id.DatasetID + FM map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID + Expected, Links *Links + }{ + { + Name: "nil links", + }, + { + Name: "success", + DSM: map[id.DatasetSchemaID]id.DatasetSchemaID{ + dsid1: dsid2, + }, + DM: map[id.DatasetID]id.DatasetID{ + did1: did2, + }, + FM: map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID{ + dfid1: dfid2, + }, + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + Expected: NewLinks([]*Link{NewLink(did2, dsid2, dfid2)}), + }, + { + Name: "dataset = nil", + DSM: map[id.DatasetSchemaID]id.DatasetSchemaID{ + dsid1: dsid2, + }, + DM: map[id.DatasetID]id.DatasetID{}, + FM: map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID{ + dfid1: dfid2, + }, + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + }, + { + Name: "datasetschema = nil", + DSM: map[id.DatasetSchemaID]id.DatasetSchemaID{}, + DM: map[id.DatasetID]id.DatasetID{ + did1: did2, + }, + FM: map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID{ + dfid1: dfid2, + }, + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + }, + { + Name: "dataset schema field = nil", + DSM: map[id.DatasetSchemaID]id.DatasetSchemaID{ + dsid1: dsid2, + }, + DM: map[id.DatasetID]id.DatasetID{ + did1: did2, + }, + FM: map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID{}, + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.Links.Replace(tc.DSM, tc.DM, tc.FM) + assert.Equal(tt, tc.Expected.Links(), tc.Links.Links()) + }) + } +} + +func TestLinks_ApplyDataset(t *testing.T) { + dsid1 := id.NewDatasetSchemaID() + did1 := id.NewDatasetID() + dfid1 := id.NewDatasetSchemaFieldID() + + testCases := []struct { + Name string + Input *id.DatasetID + Expected, Links *Links + }{ + { + Name: "nil links", + }, + { + Name: "nil input dataset", + Links: NewLinks([]*Link{NewLinkFieldOnly(dsid1, dfid1)}), + Expected: NewLinks([]*Link{NewLinkFieldOnly(dsid1, dfid1)}), + }, + { + Name: "not nil dataset", + Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + Expected: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + }, + { + Name: "apply new dataset", + Input: did1.Ref(), + Links: NewLinks([]*Link{NewLinkFieldOnly(dsid1, dfid1)}), + Expected: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Links.ApplyDataset(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestLink_Dataset(t *testing.T) { + dsid1 := id.NewDatasetSchemaID() + did1 := id.NewDatasetID() + dfid1 := id.NewDatasetSchemaFieldID() + + testCases := []struct { + Name string + Link *Link + Expected *id.DatasetID + }{ + { + Name: "nil link", + }, + { + Name: "nil dataset", + Link: NewLinkFieldOnly(dsid1, dfid1), + }, + { + Name: "success", + Link: NewLink(did1, dsid1, dfid1), + Expected: did1.Ref(), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + res := tc.Link.Dataset() + assert.Equal(tt, tc.Expected, res) + }) + } + +} + +func TestLink_DatasetSchema(t *testing.T) { + dsid1 := id.NewDatasetSchemaID() + did1 := id.NewDatasetID() + dfid1 := id.NewDatasetSchemaFieldID() + + testCases := []struct { + Name string + Link *Link + Expected *id.DatasetSchemaID + }{ + { + Name: "nil link", + }, + { + Name: "success", + Link: NewLink(did1, dsid1, dfid1), + Expected: dsid1.Ref(), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + res := tc.Link.DatasetSchema() + assert.Equal(tt, tc.Expected, res) + }) + } + +} + +func TestLink_DatasetSchemaField(t *testing.T) { + dsid1 := id.NewDatasetSchemaID() + did1 := id.NewDatasetID() + dfid1 := id.NewDatasetSchemaFieldID() + + testCases := []struct { + Name string + Link *Link + Expected *id.DatasetSchemaFieldID + }{ + { + Name: "nil link", + }, + { + Name: "success", + Link: NewLink(did1, dsid1, dfid1), + Expected: dfid1.Ref(), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + res := tc.Link.DatasetSchemaField() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestLink_Value(t *testing.T) { + dsid1 := id.NewDatasetSchemaID() + did1 := id.NewDatasetID() + dfid1 := id.NewDatasetSchemaFieldID() + dsf := []*dataset.Field{ + dataset.NewField(dfid1, dataset.ValueTypeString.ValueFrom("aaa"), ""), + } + + testCases := []struct { + Name string + Link *Link + Input *dataset.Dataset + Expected *dataset.Value + }{ + { + Name: "nil link", + }, + { + Name: "success", + Link: NewLink(did1, dsid1, dfid1), + Input: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{}).MustBuild(), + }, + { + Name: "success", + Link: NewLink(did1, dsid1, dfid1), + Input: dataset.New().ID(did1).Schema(dsid1).Fields(dsf).MustBuild(), + Expected: dataset.ValueTypeString.ValueFrom("aaa"), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + res := tc.Link.Value(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} +func TestLink_Validate(t *testing.T) { + dsid1 := id.NewDatasetSchemaID() + did1 := id.NewDatasetID() + dfid1 := id.NewDatasetSchemaFieldID() + + testCases := []struct { + Name string + DS *dataset.Dataset + DSS *dataset.Schema + Link *Link + Expected bool + }{ + { + Name: "nil links", + Expected: false, + }, + { + Name: "input schema id != link schema", + DS: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{ + dataset.NewField(dfid1, dataset.ValueFrom("vvv"), "")}).MustBuild(), + DSS: dataset.NewSchema().NewID().Fields([]*dataset.SchemaField{ + dataset.NewSchemaField(). + ID(dfid1). + Ref(dsid1.Ref()).Type(dataset.ValueTypeString). + MustBuild(), + }).MustBuild(), + Link: NewLink(did1, dsid1, dfid1), + Expected: false, + }, + { + Name: "nil input dataset", + DSS: dataset.NewSchema().ID(dsid1).Fields([]*dataset.SchemaField{ + dataset.NewSchemaField(). + ID(dfid1). + Ref(dsid1.Ref()).Type(dataset.ValueTypeString). + MustBuild(), + }).MustBuild(), + Link: NewLink(did1, dsid1, dfid1), + Expected: false, + }, + { + Name: "nil dataset field", + DS: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{}).MustBuild(), + DSS: dataset.NewSchema().ID(dsid1).Fields([]*dataset.SchemaField{ + dataset.NewSchemaField(). + ID(dfid1). + Ref(dsid1.Ref()).Type(dataset.ValueTypeString). + MustBuild(), + }).MustBuild(), + Link: NewLink(did1, dsid1, dfid1), + Expected: false, + }, + { + Name: "valid", + DS: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{ + dataset.NewField(dfid1, dataset.ValueFrom("vvv"), "")}).MustBuild(), + DSS: dataset.NewSchema().ID(dsid1).Fields([]*dataset.SchemaField{ + dataset.NewSchemaField(). + ID(dfid1). + Ref(dsid1.Ref()).Type(dataset.ValueTypeString). + MustBuild(), + }).MustBuild(), + Link: NewLink(did1, dsid1, dfid1), + Expected: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Link.Validate(tc.DSS, tc.DS) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestLink_Clone(t *testing.T) { + var l *Link + assert.Nil(t, l.Clone()) + l = NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + assert.Equal(t, l, l.Clone()) +} diff --git a/pkg/property/list.go b/pkg/property/list.go new file mode 100644 index 000000000..9fe0af64f --- /dev/null +++ b/pkg/property/list.go @@ -0,0 +1,96 @@ +package property + +import ( + "sort" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type List []*Property + +func (l List) Schemas() []id.PropertySchemaID { + schemas := make([]id.PropertySchemaID, 0, len(l)) + m := map[id.PropertySchemaID]struct{}{} + for _, p := range l { + s := p.Schema() + if _, ok := m[s]; ok { + continue + } + schemas = append(schemas, s) + m[s] = struct{}{} + } + return schemas +} + +func (l List) Map() Map { + m := make(Map, len(l)) + return m.Add(l...) +} + +type Map map[id.PropertyID]*Property + +func MapFrom(properties ...*Property) Map { + return Map{}.Add(properties...) +} + +func (m Map) Add(properties ...*Property) Map { + if m == nil { + m = Map{} + } + for _, p := range properties { + if p == nil { + continue + } + m[p.ID()] = p + } + return m +} + +func (m Map) List() List { + if m == nil { + return nil + } + list := make(List, 0, len(m)) + for _, l := range m { + list = append(list, l) + } + return list +} + +func (m Map) Clone() Map { + if m == nil { + return Map{} + } + m2 := make(Map, len(m)) + for k, v := range m { + m2[k] = v + } + return m2 +} + +func (m Map) Merge(m2 Map) Map { + if m == nil { + return m2.Clone() + } + m3 := m.Clone() + if m2 == nil { + return m3 + } + + return m3.Add(m2.List()...) +} + +func (m Map) Keys() []id.PropertyID { + keys := make([]id.PropertyID, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + sort.SliceStable(keys, func(i, j int) bool { + return id.ID(keys[i]).Compare(id.ID(keys[j])) < 0 + }) + return keys +} + +func (m Map) Len() int { + return len(m) +} diff --git a/pkg/property/list_test.go b/pkg/property/list_test.go new file mode 100644 index 000000000..1868622ee --- /dev/null +++ b/pkg/property/list_test.go @@ -0,0 +1,102 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +var ( + sf = NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg = NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + p = New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xx/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() + p2 = New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xx/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() +) + +func TestMap_Add(t *testing.T) { + testCases := []struct { + Name string + Input *Property + M, Expected Map + }{ + { + Name: "nil map", + }, + { + Name: "add property list", + Input: p, + M: Map{}, + Expected: Map{p.ID(): p}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.M.Add(tc.Input) + assert.Equal(tt, tc.Expected, tc.M) + assert.Equal(tt, tc.Expected.List(), tc.M.List()) + }) + } +} + +func TestMapFrom(t *testing.T) { + m := MapFrom(p) + e := Map{p.ID(): p} + assert.Equal(t, e, m) +} + +func TestMap_Clone(t *testing.T) { + testCases := []struct { + Name string + M, Expected Map + }{ + { + Name: "nil map", + Expected: Map{}, + }, + { + Name: "add property list", + M: Map{p.ID(): p}, + Expected: Map{p.ID(): p}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.M.Clone() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestMap_Merge(t *testing.T) { + testCases := []struct { + Name string + M1, M2, Expected Map + }{ + { + Name: "nil map", + Expected: Map{}, + }, + { + Name: "add property list", + M1: Map{p.ID(): p}, + M2: Map{p2.ID(): p2}, + Expected: Map{p.ID(): p, p2.ID(): p2}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.M1.Merge(tc.M2) + assert.Equal(tt, tc.Expected, res) + }) + } +} diff --git a/pkg/property/loader.go b/pkg/property/loader.go new file mode 100644 index 000000000..f2bf32801 --- /dev/null +++ b/pkg/property/loader.go @@ -0,0 +1,45 @@ +package property + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type Loader func(context.Context, ...id.PropertyID) (List, error) + +type SchemaLoader func(context.Context, ...id.PropertySchemaID) (SchemaList, error) + +func LoaderFrom(data []*Property) Loader { + return func(ctx context.Context, ids ...id.PropertyID) (List, error) { + res := make([]*Property, 0, len(ids)) + for _, i := range ids { + found := false + for _, d := range data { + if i == d.ID() { + res = append(res, d) + found = true + break + } + } + if !found { + res = append(res, nil) + } + } + return res, nil + } +} + +func LoaderFromMap(data map[id.PropertyID]*Property) Loader { + return func(ctx context.Context, ids ...id.PropertyID) (List, error) { + res := make([]*Property, 0, len(ids)) + for _, i := range ids { + if d, ok := data[i]; ok { + res = append(res, d) + } else { + res = append(res, nil) + } + } + return res, nil + } +} diff --git a/pkg/property/loader_test.go b/pkg/property/loader_test.go new file mode 100644 index 000000000..f2d1f56a9 --- /dev/null +++ b/pkg/property/loader_test.go @@ -0,0 +1,46 @@ +package property + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestLoaderFrom(t *testing.T) { + scene := id.NewSceneID() + ps := id.MustPropertySchemaID("xxx#1.1.1/aa") + pid1 := id.NewPropertyID() + pid2 := id.NewPropertyID() + p1 := New().ID(pid1).Scene(scene).Schema(ps).MustBuild() + p2 := New().ID(pid2).Scene(scene).Schema(ps).MustBuild() + pl := LoaderFrom([]*Property{ + p1, + p2, + New().NewID().Scene(scene).Schema(ps).MustBuild(), + }) + res, err := pl(context.Background(), pid1, pid2) + + assert.Equal(t, List{p1, p2}, res) + assert.NoError(t, err) +} + +func TestLoaderFromMap(t *testing.T) { + scene := id.NewSceneID() + ps := id.MustPropertySchemaID("xxx#1.1.1/aa") + pid1 := id.NewPropertyID() + pid2 := id.NewPropertyID() + pid3 := id.NewPropertyID() + p1 := New().ID(pid1).Scene(scene).Schema(ps).MustBuild() + p2 := New().ID(pid2).Scene(scene).Schema(ps).MustBuild() + p3 := New().ID(pid3).Scene(scene).Schema(ps).MustBuild() + pl := LoaderFromMap(map[id.PropertyID]*Property{ + pid1: p1, + pid2: p2, + pid3: p3, + }) + res, err := pl(context.Background(), pid1, pid2) + assert.Equal(t, List{p1, p2}, res) + assert.NoError(t, err) +} diff --git a/pkg/property/merged.go b/pkg/property/merged.go new file mode 100644 index 000000000..955756750 --- /dev/null +++ b/pkg/property/merged.go @@ -0,0 +1,309 @@ +package property + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +// Merged represents a merged property from two properties +type Merged struct { + Original *id.PropertyID + Parent *id.PropertyID + Schema id.PropertySchemaID + LinkedDataset *id.DatasetID + Groups []*MergedGroup +} + +// MergedGroup represents a group of Merged +type MergedGroup struct { + Original *id.PropertyItemID + Parent *id.PropertyItemID + SchemaGroup id.PropertySchemaFieldID + LinkedDataset *id.DatasetID + Groups []*MergedGroup + Fields []*MergedField +} + +// MergedField represents a field of Merged +type MergedField struct { + ID id.PropertySchemaFieldID + Type ValueType + Value *Value + Links *Links + Overridden bool +} + +// Datasets returns associated dataset IDs +func (m *Merged) Datasets() []id.DatasetID { + if m == nil { + return nil + } + ids := []id.DatasetID{} + for _, g := range m.Groups { + ids = append(ids, g.Datasets()...) + } + return ids +} + +// Datasets returns associated dataset IDs +func (m *MergedGroup) Datasets() []id.DatasetID { + if m == nil { + return nil + } + ids := []id.DatasetID{} + for _, f := range m.Fields { + if f == nil { + continue + } + ids = append(ids, f.Links.DatasetIDs()...) + } + return ids +} + +// MergedMetadata _ +type MergedMetadata struct { + Original *id.PropertyID + Parent *id.PropertyID + LinkedDataset *id.DatasetID +} + +// MergedMetadataFrom generates MergedMetadata from single property +func MergedMetadataFrom(p id.PropertyID) MergedMetadata { + p2 := p + return MergedMetadata{ + Original: &p2, + } +} + +// Properties returns associated property IDs +func (m MergedMetadata) Properties() []id.PropertyID { + ids := make([]id.PropertyID, 0, 2) + if m.Original != nil { + ids = append(ids, *m.Original) + } + if m.Parent != nil { + ids = append(ids, *m.Parent) + } + return ids +} + +// Merge merges two properties +func (m MergedMetadata) Merge(o *Property, p *Property) *Merged { + if m.Original != nil && (o == nil || *m.Original != o.ID()) { + return nil + } + if m.Parent != nil && (p == nil || *m.Parent != p.ID()) { + return nil + } + return Merge(o, p, m.LinkedDataset) +} + +func (f *MergedField) DatasetValue(ctx context.Context, d dataset.GraphLoader) (*dataset.Value, error) { + if f == nil { + return nil, nil + } + return f.Links.DatasetValue(ctx, d) +} + +// Merge merges two properties +func Merge(o *Property, p *Property, linked *id.DatasetID) *Merged { + if o == nil && p == nil || o != nil && p != nil && o.Schema() != p.Schema() { + return nil + } + + // copy id + var linked2 *id.DatasetID + if linked != nil { + linked3 := *linked + linked2 = &linked3 + } + + var schema id.PropertySchemaID + if p != nil { + schema = p.Schema() + } else if o != nil { + schema = o.Schema() + } + + return &Merged{ + Original: o.IDRef(), + Parent: p.IDRef(), + Schema: schema, + Groups: mergeItems(o.Items(), p.Items(), linked2), + LinkedDataset: linked2, + } +} + +func mergeItems(i1, i2 []Item, linked *id.DatasetID) []*MergedGroup { + if i1 == nil && i2 == nil || len(i1) == 0 && len(i2) == 0 { + return nil + } + + consumed := map[id.PropertyItemID]struct{}{} + groups := []*MergedGroup{} + + for _, item := range i1 { + sgid := item.SchemaGroup() + + var parentItem Item + for _, item2 := range i2 { + if item2.SchemaGroup() == sgid { + parentItem = item2 + consumed[item2.ID()] = struct{}{} + } + } + + if mg := mergeItem(item, parentItem, linked); mg != nil { + groups = append(groups, mg) + } + } + + for _, item := range i2 { + if _, ok := consumed[item.ID()]; ok { + continue + } + + if mg := mergeItem(nil, item, linked); mg != nil { + groups = append(groups, mg) + } + } + + return groups +} + +func groupList(o, p Item) (*GroupList, *GroupList) { + return ToGroupList(o), ToGroupList(p) +} + +func group(o, p Item) (*Group, *Group) { + return ToGroup(o), ToGroup(p) +} + +func mergeItem(o, p Item, linked *id.DatasetID) *MergedGroup { + if o == nil && p == nil || o != nil && p != nil && o.SchemaGroup() != p.SchemaGroup() { + return nil + } + + var mgroups []*MergedGroup + var mfields []*MergedField + + if og, pg := groupList(o, p); og != nil || pg != nil { + // List merging + var groups []*Group + // if original exists, original is used + if og != nil { + groups = og.Groups() + } else { + groups = pg.Groups() + } + mgroups = make([]*MergedGroup, 0, len(groups)) + for _, gg := range groups { + var mi *MergedGroup + if og != nil { + mi = mergeItem(gg, nil, linked) + } else { + mi = mergeItem(nil, gg, linked) + } + if mi != nil { + mgroups = append(mgroups, mi) + } + } + } else if og, pg := group(o, p); og != nil || pg != nil { + // Group merging + fieldKeys := allFields(og.FieldIDs(), pg.FieldIDs()) + mfields = make([]*MergedField, 0, len(fieldKeys)) + for _, k := range fieldKeys { + mf := mergeField(og.Field(k), pg.Field(k), linked) + if mf != nil { + mfields = append(mfields, mf) + } + } + } + + var oid, pid *id.PropertyItemID + var sg id.PropertySchemaFieldID + if o != nil { + oid = o.IDRef() + sg = o.SchemaGroup() + } + if p != nil { + pid = p.IDRef() + sg = p.SchemaGroup() + } + + return &MergedGroup{ + Original: oid, + Parent: pid, + SchemaGroup: sg, + Fields: mfields, + Groups: mgroups, + LinkedDataset: linked, + } +} + +func mergeField(original, parent *Field, linked *id.DatasetID) *MergedField { + if original == nil && parent == nil || original != nil && parent != nil && (original.Field() != parent.Field() || original.Type() != parent.Type()) { + return nil + } + + var t ValueType + if original != nil { + t = original.Type() + } else if parent != nil { + t = parent.Type() + } + + var fid id.PropertySchemaFieldID + if original != nil { + fid = original.Field() + } else if parent != nil { + fid = parent.Field() + } + + var v *Value + overridden := false + + if original == nil && parent != nil { + // parent value is used + v = parent.Value().Clone() + } else if original != nil { + // overrided value is used + v = original.Value().Clone() + overridden = parent != nil + } + + var links *Links + if l := original.Links(); l != nil { + // original links are used but dataset is overrided + links = l.ApplyDataset(linked) + overridden = parent != nil + } else if l := parent.Links(); l != nil { + // parent links are used and dataset is overrided + links = l.ApplyDataset(linked) + } + + return &MergedField{ + ID: fid, + Value: v, + Type: t, + Links: links, + Overridden: overridden, + } +} + +func allFields(args ...[]id.PropertySchemaFieldID) []id.PropertySchemaFieldID { + consumedKeys := map[id.PropertySchemaFieldID]struct{}{} + result := []id.PropertySchemaFieldID{} + for _, fields := range args { + for _, f := range fields { + if _, ok := consumedKeys[f]; ok { + continue + } + consumedKeys[f] = struct{}{} + result = append(result, f) + } + } + return result +} diff --git a/pkg/property/merged_test.go b/pkg/property/merged_test.go new file mode 100644 index 000000000..dcb02f695 --- /dev/null +++ b/pkg/property/merged_test.go @@ -0,0 +1,413 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + + "github.com/stretchr/testify/assert" +) + +func TestMerge(t *testing.T) { + ds := id.NewDatasetSchemaID() + df := id.NewDatasetSchemaFieldID() + d := id.NewDatasetID() + d2 := id.NewDatasetID() + opid := id.NewPropertyID() + ppid := id.NewPropertyID() + psid := id.MustPropertySchemaID("hoge#0.1.0/fff") + psid2 := id.MustPropertySchemaID("hoge#0.1.0/aaa") + psgid1 := id.PropertySchemaFieldID("group1") + psgid2 := id.PropertySchemaFieldID("group2") + psgid3 := id.PropertySchemaFieldID("group3") + psgid4 := id.PropertySchemaFieldID("group4") + i1id := id.NewPropertyItemID() + i2id := id.NewPropertyItemID() + i3id := id.NewPropertyItemID() + i4id := id.NewPropertyItemID() + i5id := id.NewPropertyItemID() + i6id := id.NewPropertyItemID() + i7id := id.NewPropertyItemID() + i8id := id.NewPropertyItemID() + + fields1 := []*Field{ + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("a")).ValueUnsafe(ValueTypeString.ValueFromUnsafe("a")).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("b")).ValueUnsafe(ValueTypeString.ValueFromUnsafe("b")).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("e")).TypeUnsafe(ValueTypeString).LinksUnsafe(NewLinks([]*Link{NewLink(d2, ds, df)})).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("f")).TypeUnsafe(ValueTypeNumber).Build(), + } + + fields2 := []*Field{ + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("a")).ValueUnsafe(ValueTypeString.ValueFromUnsafe("1")).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("c")).ValueUnsafe(ValueTypeString.ValueFromUnsafe("2")).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("d")).TypeUnsafe(ValueTypeString).LinksUnsafe(NewLinks([]*Link{NewLinkFieldOnly(ds, df)})).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("f")).TypeUnsafe(ValueTypeString).Build(), + } + + groups1 := []*Group{ + NewGroup().ID(i7id).Schema(psid, psgid1).Fields(fields1).MustBuild(), + } + + groups2 := []*Group{ + NewGroup().ID(i8id).Schema(psid, psgid1).Fields(fields2).MustBuild(), + } + + items1 := []Item{ + NewGroupList().ID(i1id).Schema(psid, psgid1).Groups(groups1).MustBuild(), + NewGroup().ID(i2id).Schema(psid, psgid2).Fields(fields1).MustBuild(), + NewGroup().ID(i3id).Schema(psid, psgid3).Fields(fields1).MustBuild(), + } + + items2 := []Item{ + NewGroupList().ID(i4id).Schema(psid, psgid1).Groups(groups2).MustBuild(), + NewGroup().ID(i5id).Schema(psid, psgid2).Fields(fields2).MustBuild(), + NewGroup().ID(i6id).Schema(psid, psgid4).Fields(fields2).MustBuild(), + } + + sid := id.NewSceneID() + op := New().ID(opid).Scene(sid).Schema(psid).Items(items1).MustBuild() + pp := New().NewID().Scene(sid).Schema(psid2).MustBuild() + pp2 := New().ID(ppid).Scene(sid).Schema(psid).Items(items2).MustBuild() + + // Merge(op, pp2, &d) + expected1 := &Merged{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + Original: &i1id, + Parent: &i4id, + SchemaGroup: psgid1, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + Original: &i7id, + Parent: nil, + SchemaGroup: psgid1, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("a"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("b"), + Value: ValueTypeString.ValueFromUnsafe("b"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("e"), + Links: NewLinks([]*Link{NewLink(d2, ds, df)}), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("f"), + Type: ValueTypeNumber, + }, + }, + }, + }, + }, + { + Original: &i2id, + Parent: &i5id, + SchemaGroup: psgid2, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("a"), + Type: ValueTypeString, + Overridden: true, + }, + { + ID: id.PropertySchemaFieldID("b"), + Value: ValueTypeString.ValueFromUnsafe("b"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("e"), + Links: NewLinks([]*Link{NewLink(d2, ds, df)}), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("c"), + Value: ValueTypeString.ValueFromUnsafe("2"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("d"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + }, + }, + { + Original: &i3id, + Parent: nil, + SchemaGroup: psgid3, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("a"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("b"), + Value: ValueTypeString.ValueFromUnsafe("b"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("e"), + Links: NewLinks([]*Link{NewLink(d2, ds, df)}), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("f"), + Type: ValueTypeNumber, + }, + }, + }, + { + Original: nil, + Parent: &i6id, + SchemaGroup: psgid4, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("1"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("c"), + Value: ValueTypeString.ValueFromUnsafe("2"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("d"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("f"), + Type: ValueTypeString, + }, + }, + }, + }, + } + + // Merge(op, nil, &d) + expected2 := &Merged{ + Original: opid.Ref(), + Parent: nil, + Schema: psid, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + Original: &i1id, + Parent: nil, + SchemaGroup: psgid1, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + Original: &i7id, + Parent: nil, + SchemaGroup: psgid1, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("a"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("b"), + Value: ValueTypeString.ValueFromUnsafe("b"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("e"), + Links: NewLinks([]*Link{NewLink(d2, ds, df)}), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("f"), + Type: ValueTypeNumber, + }, + }, + }, + }, + }, + { + Original: &i2id, + Parent: nil, + SchemaGroup: psgid2, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("a"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("b"), + Value: ValueTypeString.ValueFromUnsafe("b"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("e"), + Links: NewLinks([]*Link{NewLink(d2, ds, df)}), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("f"), + Type: ValueTypeNumber, + }, + }, + }, + { + Original: &i3id, + Parent: nil, + SchemaGroup: psgid3, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("a"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("b"), + Value: ValueTypeString.ValueFromUnsafe("b"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("e"), + Links: NewLinks([]*Link{NewLink(d2, ds, df)}), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("f"), + Type: ValueTypeNumber, + }, + }, + }, + }, + } + + // Merge(nil, pp2, &d) + expected3 := &Merged{ + Original: nil, + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + Original: nil, + Parent: &i4id, + SchemaGroup: psgid1, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + Original: nil, + Parent: &i8id, + SchemaGroup: psgid1, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("1"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("c"), + Value: ValueTypeString.ValueFromUnsafe("2"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("d"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("f"), + Type: ValueTypeString, + }, + }, + }, + }, + }, + { + Original: nil, + Parent: &i5id, + SchemaGroup: psgid2, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("1"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("c"), + Value: ValueTypeString.ValueFromUnsafe("2"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("d"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("f"), + Type: ValueTypeString, + }, + }, + }, + { + Original: nil, + Parent: &i6id, + SchemaGroup: psgid4, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("1"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("c"), + Value: ValueTypeString.ValueFromUnsafe("2"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("d"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("f"), + Type: ValueTypeString, + }, + }, + }, + }, + } + + merged0 := Merge(nil, nil, nil) + assert.Nil(t, merged0) + merged1 := Merge(op, pp, nil) + assert.Nil(t, merged1) + merged2 := Merge(op, pp2, &d) + assert.Equal(t, expected1, merged2) + merged3 := Merge(op, nil, &d) + assert.Equal(t, expected2, merged3) + merged4 := Merge(nil, pp2, &d) + assert.Equal(t, expected3, merged4) +} diff --git a/pkg/property/pointer.go b/pkg/property/pointer.go new file mode 100644 index 000000000..c7933a6e6 --- /dev/null +++ b/pkg/property/pointer.go @@ -0,0 +1,201 @@ +package property + +import "github.com/reearth/reearth-backend/pkg/id" + +// Pointer _ +type Pointer struct { + schemaGroup *id.PropertySchemaFieldID + item *id.PropertyItemID + field *id.PropertySchemaFieldID +} + +// NewPointer _ +func NewPointer(sg *id.PropertySchemaFieldID, i *id.PropertyItemID, f *id.PropertySchemaFieldID) *Pointer { + if sg == nil && i == nil && f == nil { + return nil + } + return &Pointer{ + schemaGroup: sg.CopyRef(), + item: i.CopyRef(), + field: f.CopyRef(), + } +} + +// PointField _ +func PointField(sg *id.PropertySchemaFieldID, i *id.PropertyItemID, f id.PropertySchemaFieldID) *Pointer { + return &Pointer{ + schemaGroup: sg.CopyRef(), + item: i.CopyRef(), + field: &f, + } +} + +// PointFieldOnly _ +func PointFieldOnly(fid id.PropertySchemaFieldID) *Pointer { + return &Pointer{ + field: &fid, + } +} + +// PointItemBySchema _ +func PointItemBySchema(sg id.PropertySchemaFieldID) *Pointer { + return &Pointer{ + schemaGroup: &sg, + } +} + +// PointItem _ +func PointItem(i id.PropertyItemID) *Pointer { + return &Pointer{ + item: &i, + } +} + +// PointFieldBySchemaGroup _ +func PointFieldBySchemaGroup(sg id.PropertySchemaFieldID, f id.PropertySchemaFieldID) *Pointer { + return &Pointer{ + schemaGroup: &sg, + field: &f, + } +} + +// PointFieldBySchemaGroupAndItem _ +func PointFieldBySchemaGroupAndItem(sg id.PropertySchemaFieldID, i id.PropertyItemID) *Pointer { + return &Pointer{ + schemaGroup: &sg, + item: &i, + } +} + +// PointFieldByItem _ +func PointFieldByItem(i id.PropertyItemID, f id.PropertySchemaFieldID) *Pointer { + return &Pointer{ + item: &i, + field: &f, + } +} + +// Clone _ +func (p *Pointer) Clone() *Pointer { + if p == nil { + return nil + } + return &Pointer{ + field: p.field.CopyRef(), + item: p.item.CopyRef(), + schemaGroup: p.schemaGroup.CopyRef(), + } +} + +// ItemBySchemaGroupAndItem _ +func (p *Pointer) ItemBySchemaGroupAndItem() (i id.PropertySchemaFieldID, i2 id.PropertyItemID, ok bool) { + if p == nil || p.schemaGroup == nil || p.item == nil { + ok = false + return + } + i = *p.schemaGroup + i2 = *p.item + ok = true + return +} + +// ItemBySchemaGroup _ +func (p *Pointer) ItemBySchemaGroup() (i id.PropertySchemaFieldID, ok bool) { + if p == nil || p.schemaGroup == nil { + ok = false + return + } + i = *p.schemaGroup + ok = true + return +} + +// SchemaGroupAndItem _ +func (p *Pointer) SchemaGroupAndItem() (i id.PropertySchemaFieldID, i2 id.PropertyItemID, ok bool) { + ok = false + if p == nil { + return + } + if p.schemaGroup != nil { + i = *p.schemaGroup + ok = true + } + if p.item != nil { + i2 = *p.item + ok = true + } + return +} + +// Item _ +func (p *Pointer) Item() (i id.PropertyItemID, ok bool) { + if p == nil || p.item == nil { + ok = false + return + } + i = *p.item + ok = true + return +} + +func (p *Pointer) ItemRef() *id.PropertyItemID { + if p == nil || p.item == nil { + return nil + } + f := *p.item + return &f +} + +// FieldByItem _ +func (p *Pointer) FieldByItem() (i id.PropertyItemID, f id.PropertySchemaFieldID, ok bool) { + if p == nil || p.item == nil || p.schemaGroup != nil || p.field == nil { + ok = false + return + } + i = *p.item + f = *p.field + ok = true + return +} + +// FieldBySchemaGroup _ +func (p *Pointer) FieldBySchemaGroup() (sg id.PropertySchemaFieldID, f id.PropertySchemaFieldID, ok bool) { + if p == nil || p.schemaGroup == nil || p.item != nil || p.field == nil { + ok = false + return + } + sg = *p.schemaGroup + f = *p.field + ok = true + return +} + +// Field _ +func (p *Pointer) Field() (f id.PropertySchemaFieldID, ok bool) { + if p == nil || p.field == nil { + ok = false + return + } + f = *p.field + ok = true + return +} + +func (p *Pointer) FieldRef() *id.PropertySchemaFieldID { + if p == nil || p.field == nil { + return nil + } + f := *p.field + return &f +} + +// GetAll _ +func (p *Pointer) GetAll() (sg *id.PropertySchemaFieldID, i *id.PropertyItemID, f *id.PropertySchemaFieldID) { + if p == nil { + return + } + sg = p.schemaGroup.CopyRef() + i = p.item.CopyRef() + f = p.field.CopyRef() + return +} diff --git a/pkg/property/pointer_test.go b/pkg/property/pointer_test.go new file mode 100644 index 000000000..bce148810 --- /dev/null +++ b/pkg/property/pointer_test.go @@ -0,0 +1,77 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestPointer(t *testing.T) { + iid := id.NewPropertyItemID() + sgid := id.PropertySchemaFieldID("foo") + fid := id.PropertySchemaFieldID("hoge") + + var p *Pointer + var ok bool + + p = PointItem(iid) + i, ok := p.Item() + assert.True(t, ok) + assert.Equal(t, iid, i) + _, ok = p.ItemBySchemaGroup() + assert.False(t, ok) + _, _, ok = p.FieldByItem() + assert.False(t, ok) + _, _, ok = p.FieldBySchemaGroup() + assert.False(t, ok) + + p = PointItemBySchema(sgid) + _, ok = p.Item() + assert.False(t, ok) + sg, ok := p.ItemBySchemaGroup() + assert.True(t, ok) + assert.Equal(t, sgid, sg) + _, _, ok = p.FieldByItem() + assert.False(t, ok) + _, _, ok = p.FieldBySchemaGroup() + assert.False(t, ok) + + p = PointFieldByItem(iid, fid) + i, ok = p.Item() + assert.True(t, ok) + assert.Equal(t, iid, i) + _, ok = p.ItemBySchemaGroup() + assert.False(t, ok) + i, f, ok := p.FieldByItem() + assert.True(t, ok) + assert.Equal(t, iid, i) + assert.Equal(t, fid, f) + _, _, ok = p.FieldBySchemaGroup() + assert.False(t, ok) + + p = PointFieldBySchemaGroup(sgid, fid) + _, ok = p.Item() + assert.False(t, ok) + sg, ok = p.ItemBySchemaGroup() + assert.True(t, ok) + assert.Equal(t, sgid, sg) + _, _, ok = p.FieldByItem() + assert.False(t, ok) + sg, f, ok = p.FieldBySchemaGroup() + assert.True(t, ok) + assert.Equal(t, sgid, sg) + assert.Equal(t, fid, f) + + p = PointField(&sgid, &iid, fid) + i, ok = p.Item() + assert.True(t, ok) + assert.Equal(t, iid, i) + sg, ok = p.ItemBySchemaGroup() + assert.True(t, ok) + assert.Equal(t, sgid, sg) + _, _, ok = p.FieldByItem() + assert.False(t, ok) + _, _, ok = p.FieldBySchemaGroup() + assert.False(t, ok) +} diff --git a/pkg/property/property.go b/pkg/property/property.go new file mode 100644 index 000000000..55be1412f --- /dev/null +++ b/pkg/property/property.go @@ -0,0 +1,493 @@ +package property + +import ( + "context" + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +type Property struct { + id id.PropertyID + scene id.SceneID + schema id.PropertySchemaID + items []Item +} + +func (p *Property) ID() id.PropertyID { + return p.id +} + +func (p *Property) IDRef() *id.PropertyID { + if p == nil { + return nil + } + return p.id.Ref() +} + +func (p *Property) Scene() id.SceneID { + return p.scene +} + +func (p *Property) Schema() id.PropertySchemaID { + return p.schema +} + +func (p *Property) Field(ptr *Pointer) (*Field, *GroupList, *Group) { + if p == nil || ptr == nil { + return nil, nil, nil + } + + if iid, fid, ok := ptr.FieldByItem(); ok { + if i, gl := p.Item(iid); i != nil { + g := ToGroup(i) + return g.Field(fid), gl, g + } + } else if sgid, fid, ok := ptr.FieldBySchemaGroup(); ok { + if i := p.ItemBySchema(sgid); i != nil { + g := ToGroup(i) + return g.Field(fid), nil, g + } + } + + return nil, nil, nil +} + +func (p *Property) Items() []Item { + if p == nil { + return nil + } + return append([]Item{}, p.items...) +} + +func (p *Property) Item(id id.PropertyItemID) (Item, *GroupList) { + if p == nil { + return nil, nil + } + for _, f := range p.items { + if f.ID() == id { + return f, nil + } + if gl := ToGroupList(f); gl != nil { + if i := gl.GetGroup(id); i != nil { + return i, gl + } + } + } + return nil, nil +} + +// ItemBySchema returns a root item by a schema field ID. +func (p *Property) ItemBySchema(id id.PropertySchemaFieldID) Item { + if p == nil { + return nil + } + for _, f := range p.items { + if f.SchemaGroup() == id { + return f + } + } + return nil +} + +func (p *Property) GroupBySchema(id id.PropertySchemaFieldID) *Group { + i := p.ItemBySchema(id) + if i == nil { + return nil + } + if g := ToGroup(i); g != nil { + return g + } + return nil +} + +func (p *Property) GroupListBySchema(id id.PropertySchemaFieldID) *GroupList { + i := p.ItemBySchema(id) + if i == nil { + return nil + } + if g := ToGroupList(i); g != nil { + return g + } + return nil +} + +func (p *Property) ItemByPointer(ptr *Pointer) (Item, *GroupList) { + if p == nil || ptr == nil { + return nil, nil + } + if pid, ok := ptr.Item(); ok { + return p.Item(pid) + } else if sgid, ok := ptr.ItemBySchemaGroup(); ok { + return p.ItemBySchema(sgid), nil + } + return nil, nil +} + +func (p *Property) ListItem(ptr *Pointer) (*Group, *GroupList) { + if p == nil { + return nil, nil + } + if sgid, i, ok := ptr.ItemBySchemaGroupAndItem(); ok { + if item := ToGroupList(p.ItemBySchema(sgid)); item != nil { + return item.GetGroup(i), item + } + } else if iid, ok := ptr.Item(); ok { + for _, item := range p.items { + litem := ToGroupList(item) + if g := litem.GetGroup(iid); g != nil { + return g, litem + } + } + } else if sgid, ok := ptr.ItemBySchemaGroup(); ok { + if item := ToGroupList(p.ItemBySchema(sgid)); item != nil { + return nil, item + } + } + return nil, nil +} + +func (p *Property) HasLinkedField() bool { + if p == nil { + return false + } + for _, f := range p.items { + if f.HasLinkedField() { + return true + } + } + return false +} + +func (p *Property) FieldsByLinkedDataset(s id.DatasetSchemaID, i id.DatasetID) []*Field { + if p == nil { + return nil + } + res := []*Field{} + for _, g := range p.items { + res = append(res, g.FieldsByLinkedDataset(s, i)...) + } + return res +} + +func (p *Property) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { + if p == nil { + return false + } + for _, g := range p.items { + if g.IsDatasetLinked(s, i) { + return true + } + } + return false +} + +func (p *Property) CollectDatasets() []id.DatasetID { + if p == nil { + return nil + } + res := []id.DatasetID{} + + for _, f := range p.items { + res = append(res, f.CollectDatasets()...) + } + + return res +} + +func (p *Property) RemoveItem(ptr *Pointer) { + if p == nil { + return + } + sgid, iid, ok := ptr.SchemaGroupAndItem() + if !ok { + return + } + for i, item := range p.items { + if item.ID() == iid || item.SchemaGroup() == sgid { + p.items = append(p.items[:i], p.items[i+1:]...) + return + } + } +} + +func (p *Property) RemoveField(ptr *Pointer) { + if p == nil { + return + } + + fid, ok := ptr.Field() + if !ok { + return + } + + item, _ := p.ItemByPointer(ptr) + if group := ToGroup(item); group != nil { + group.RemoveField(fid) + } +} + +func (p *Property) Prune() { + if p == nil { + return + } + for _, f := range p.items { + if f.IsEmpty() { + p.RemoveItem(PointItem(f.ID())) + } + } +} + +func (p *Property) UpdateValue(ps *Schema, ptr *Pointer, v *Value) (*Field, *GroupList, *Group, error) { + field, gl, g, created := p.GetOrCreateField(ps, ptr) + if field == nil || created && v == nil { + // The field is empty and will be removed by prune, so it does not make sense + return nil, nil, nil, nil + } + + if err := field.Update(v, ps.Field(field.Field())); err != nil { + return nil, nil, nil, err + } + + if v == nil { + p.Prune() + if field.IsEmpty() { + field = nil + } + } + + return field, gl, g, nil +} + +func (p *Property) UnlinkAllByDataset(s id.DatasetSchemaID, ds id.DatasetID) { + fields := p.FieldsByLinkedDataset(s, ds) + for _, f := range fields { + f.Unlink() + } +} + +func (p *Property) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, *GroupList, *Group, bool) { + if p == nil || ps == nil || ptr == nil || ps.ID() != p.Schema() { + return nil, nil, nil, false + } + + if field, pgl, pg := p.Field(ptr); field != nil { + return field, pgl, pg, false + } + + // if the field does not exist, create it here + + fid, ok := ptr.Field() + if !ok { + return nil, nil, nil, false + } + g, gl := p.GetOrCreateGroup(ps, ptr) + f2, ok := g.GetOrCreateField(ps, fid) + return f2, gl, g, ok +} + +func (p *Property) GetOrCreateItem(ps *Schema, ptr *Pointer) (Item, *GroupList) { + if p == nil || ps == nil || ptr == nil || ps.ID() != p.Schema() { + return nil, nil + } + + if item, pgl := p.ItemByPointer(ptr); item != nil { + return item, pgl + } + + psgid, ok := ptr.ItemBySchemaGroup() + if !ok { + return nil, nil + } + + psg := ps.Group(psgid) + if psg == nil { + return nil, nil + } + + ni := InitItemFrom(psg) + if ni != nil { + if p.items == nil { + p.items = []Item{ni} + } else { + p.items = append(p.items, ni) + } + } + + return ni, nil // root item +} + +func (p *Property) GetOrCreateGroup(ps *Schema, ptr *Pointer) (*Group, *GroupList) { + if p == nil || ps == nil || ptr == nil || ps.ID() != p.Schema() { + return nil, nil + } + + var psg *SchemaGroup + if psgid, ok := ptr.ItemBySchemaGroup(); ok { + psg = ps.Group(psgid) + } else if f, ok := ptr.Field(); ok { + psg = ps.GroupByField(f) + } + if psg == nil { + return nil, nil + } + + item, gl := p.GetOrCreateItem(ps, ptr) + return ToGroup(item), gl +} + +func (p *Property) GetOrCreateGroupList(ps *Schema, ptr *Pointer) *GroupList { + if p == nil || ps == nil || ptr == nil || ps.ID() != p.Schema() { + return nil + } + + var psg *SchemaGroup + if psgid, ok := ptr.ItemBySchemaGroup(); ok { + psg = ps.Group(psgid) + } else if f, ok := ptr.Field(); ok { + psg = ps.GroupByField(f) + } + if psg == nil { + return nil + } + + item, _ := p.GetOrCreateItem(ps, ptr) + return ToGroupList(item) +} + +func (p *Property) AddListItem(ps *Schema, ptr *Pointer, index *int) (*Group, *GroupList) { + item, _ := p.GetOrCreateItem(ps, ptr) + pgl := ToGroupList(item) + if pgl == nil { + return nil, nil + } + return pgl.CreateAndAddListItem(ps, index), pgl +} + +func (p *Property) MoveListItem(ptr *Pointer, i int) (*Group, *GroupList) { + if ptr == nil { + return nil, nil + } + g, l := p.ListItem(ptr) + if g == nil || l == nil { + return nil, nil + } + l.Move(g.ID(), i) + return g, l +} + +func (p *Property) RemoveListItem(ptr *Pointer) bool { + if p == nil || ptr == nil { + return false + } + g, l := p.ListItem(ptr) + if g == nil || l == nil { + return false + } + ok := l.Remove(g.ID()) + if ok { + p.Prune() + } + return ok +} + +func (p *Property) UpdateLinkableValue(s *Schema, v *Value) { + if s == nil || p == nil || v == nil { + return + } + + var ptr *Pointer + switch v.Type() { + case ValueTypeLatLng: + ptr = s.linkable.LatLng + case ValueTypeURL: + ptr = s.linkable.URL + } + + sf := s.FieldByPointer(ptr) + if sf == nil { + return + } + + f, _, _, ok := p.GetOrCreateField(s, ptr) + if ok { + if err := f.Update(v, sf); err != nil { + p.Prune() + } + } +} + +func (p *Property) AutoLinkField(s *Schema, v ValueType, d id.DatasetSchemaID, df *id.DatasetSchemaFieldID, ds *id.DatasetID) { + if s == nil || p == nil || df == nil { + return + } + + var ptr *Pointer + switch v { + case ValueTypeLatLng: + ptr = s.linkable.LatLng + case ValueTypeURL: + ptr = s.linkable.URL + } + + sf := s.FieldByPointer(ptr) + if sf == nil { + return + } + + f, _, _, ok := p.GetOrCreateField(s, ptr) + if ok { + if ds == nil { + f.Link(NewLinks([]*Link{NewLinkFieldOnly(d, *df)})) + } else { + f.Link(NewLinks([]*Link{NewLink(*ds, d, *df)})) + } + } +} + +// TODO: group migration +func (p *Property) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset.Loader) { + if p == nil || dl == nil { + return + } + p.schema = newSchema.ID() + + for _, f := range p.items { + f.MigrateSchema(ctx, newSchema, dl) + } + + p.Prune() +} + +func (p *Property) MigrateDataset(q DatasetMigrationParam) { + if p == nil { + return + } + for _, f := range p.items { + f.MigrateDataset(q) + } + p.Prune() +} + +func (p *Property) ValidateSchema(ps *Schema) error { + if p == nil { + return nil + } + if ps == nil { + return errors.New("invalid schema") + } + if p.schema != ps.ID() { + return errors.New("invalid schema id") + } + + for _, i := range p.items { + sg := i.SchemaGroup() + if err := i.ValidateSchema(ps.Group(sg)); err != nil { + return fmt.Errorf("%s (%s): %w", p.ID(), sg, err) + } + } + + return nil +} diff --git a/pkg/property/property_test.go b/pkg/property/property_test.go new file mode 100644 index 000000000..6a5cb0bbf --- /dev/null +++ b/pkg/property/property_test.go @@ -0,0 +1,275 @@ +package property + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestPropertyMigrateSchema(t *testing.T) { + sceneID := id.NewSceneID() + oldSchema, _ := id.PropertySchemaIDFrom("hoge/test") + newSchema, _ := id.PropertySchemaIDFrom("hoge/test2") + schemaField1ID := id.PropertySchemaFieldID("a") + schemaField2ID := id.PropertySchemaFieldID("b") + schemaField3ID := id.PropertySchemaFieldID("c") + schemaField4ID := id.PropertySchemaFieldID("d") + schemaField5ID := id.PropertySchemaFieldID("e") + schemaField6ID := id.PropertySchemaFieldID("f") + schemaField7ID := id.PropertySchemaFieldID("g") + schemaField8ID := id.PropertySchemaFieldID("h") + schemaField9ID := id.PropertySchemaFieldID("i") + datasetID := id.NewDatasetID() + datasetSchemaID := id.NewDatasetSchemaID() + datasetFieldID := id.NewDatasetSchemaFieldID() + + schemaField1, _ := NewSchemaField().ID(schemaField1ID).Type(ValueTypeString).Build() + schemaField2, _ := NewSchemaField().ID(schemaField2ID).Type(ValueTypeNumber).Min(0).Max(100).Build() + schemaField3, _ := NewSchemaField().ID(schemaField3ID).Type(ValueTypeNumber).Min(0).Max(100).Build() + schemaField4, _ := NewSchemaField().ID(schemaField4ID).Type(ValueTypeString).Choices([]SchemaFieldChoice{ + {Title: i18n.StringFrom("x"), Key: "x"}, + {Title: i18n.StringFrom("y"), Key: "y"}, + }).Build() + schemaField5, _ := NewSchemaField().ID(schemaField5ID).Type(ValueTypeString).Build() + schemaField6, _ := NewSchemaField().ID(schemaField6ID).Type(ValueTypeNumber).Build() + schemaField7, _ := NewSchemaField().ID(schemaField7ID).Type(ValueTypeNumber).Build() + schemaFields := []*SchemaField{ + schemaField1, + schemaField2, + schemaField3, + schemaField4, + schemaField5, + schemaField6, + schemaField7, + } + schemaGroups := []*SchemaGroup{ + NewSchemaGroup().ID(schemaField9ID).Schema(oldSchema).Fields(schemaFields).MustBuild(), + } + + fields := []*Field{ + // should remain + NewFieldUnsafe().FieldUnsafe(schemaField1ID). + ValueUnsafe(ValueTypeString.ValueFromUnsafe("foobar")). + Build(), + // should be removed because of max + NewFieldUnsafe().FieldUnsafe(schemaField2ID). + ValueUnsafe(ValueTypeNumber.ValueFromUnsafe(101)). + Build(), + // should remain + NewFieldUnsafe().FieldUnsafe(schemaField3ID). + ValueUnsafe(ValueTypeNumber.ValueFromUnsafe(1)). + Build(), + // should be removed because of choices + NewFieldUnsafe().FieldUnsafe(schemaField4ID). + ValueUnsafe(ValueTypeString.ValueFromUnsafe("z")). + Build(), + // should remain + NewFieldUnsafe().FieldUnsafe(schemaField5ID). + LinksUnsafe(NewLinks([]*Link{ + NewLink(datasetID, datasetSchemaID, datasetFieldID), + })). + Build(), + // should be removed because of linked dataset field value type + NewFieldUnsafe().FieldUnsafe(schemaField6ID). + LinksUnsafe(NewLinks([]*Link{ + NewLink(datasetID, datasetSchemaID, datasetFieldID), + })). + Build(), + // should be removed because of type + NewFieldUnsafe().FieldUnsafe(schemaField7ID). + ValueUnsafe(ValueTypeString.ValueFromUnsafe("hogehoge")). + Build(), + // should be removed because of not existing field + NewFieldUnsafe().FieldUnsafe(schemaField8ID). + ValueUnsafe(ValueTypeString.ValueFromUnsafe("hogehoge")). + Build(), + } + items := []Item{ + NewGroup().NewID().Schema(oldSchema, schemaField9ID).Fields(fields).MustBuild(), + } + + datasetFields := []*dataset.Field{ + dataset.NewField(datasetFieldID, dataset.ValueTypeString.ValueFrom("a"), ""), + } + + schema, _ := NewSchema().ID(newSchema).Groups(schemaGroups).Build() + property, _ := New().NewID().Scene(sceneID).Schema(oldSchema).Items(items).Build() + ds, _ := dataset.New().ID(datasetID).Schema(datasetSchemaID).Scene(sceneID).Fields(datasetFields).Build() + + property.MigrateSchema(context.Background(), schema, dataset.LoaderFrom([]*dataset.Dataset{ds})) + + newGroup := ToGroup(property.ItemBySchema(schemaField9ID)) + newFields := newGroup.Fields() + + assert.Equal(t, schema.ID(), property.Schema()) + assert.Equal(t, 1, len(property.Items())) + assert.Equal(t, schema.ID(), newGroup.Schema()) + assert.Equal(t, 3, len(newFields)) + assert.NotNil(t, newGroup.Field(schemaField1ID)) + assert.NotNil(t, newGroup.Field(schemaField3ID)) + assert.NotNil(t, newGroup.Field(schemaField5ID)) +} + +func TestGetOrCreateItem(t *testing.T) { + sceneID := id.NewSceneID() + sid, _ := id.PropertySchemaIDFrom("hoge/test") + sf1id := id.PropertySchemaFieldID("a") + sf2id := id.PropertySchemaFieldID("b") + sg1id := id.PropertySchemaFieldID("c") + sg2id := id.PropertySchemaFieldID("d") + + sf1 := NewSchemaField().ID(sf1id).Type(ValueTypeString).MustBuild() + sg1 := NewSchemaGroup().ID(sg1id).Schema(sid).Fields([]*SchemaField{sf1}).MustBuild() + sf2 := NewSchemaField().ID(sf2id).Type(ValueTypeString).MustBuild() + sg2 := NewSchemaGroup().ID(sg2id).Schema(sid).Fields([]*SchemaField{sf2}).IsList(true).MustBuild() + s := NewSchema().ID(sid).Groups([]*SchemaGroup{sg1, sg2}).MustBuild() + + p := New().NewID().Scene(sceneID).Schema(sid).MustBuild() + + // group + assert.Nil(t, p.ItemBySchema(sg1id)) + assert.Equal(t, []Item{}, p.Items()) + + i, _ := p.GetOrCreateItem(s, PointItemBySchema(sg1id)) + assert.NotNil(t, i) + assert.Equal(t, sid, i.Schema()) + assert.Equal(t, sg1id, i.SchemaGroup()) + assert.Equal(t, i, ToGroup(p.ItemBySchema(sg1id))) + assert.Equal(t, []Item{i}, p.Items()) + + i2, _ := p.GetOrCreateItem(s, PointItemBySchema(sg1id)) + assert.NotNil(t, i2) + assert.Equal(t, i, i2) + assert.Equal(t, i2, ToGroup(p.ItemBySchema(sg1id))) + assert.Equal(t, []Item{i2}, p.Items()) + + // group list + assert.Nil(t, p.ItemBySchema(sg2id)) + + i3, _ := p.GetOrCreateItem(s, PointItemBySchema(sg2id)) + assert.NotNil(t, i3) + assert.Equal(t, sid, i3.Schema()) + assert.Equal(t, sg2id, i3.SchemaGroup()) + assert.Equal(t, i3, ToGroupList(p.ItemBySchema(sg2id))) + assert.Equal(t, []Item{i, i3}, p.Items()) + + i4, _ := p.GetOrCreateItem(s, PointItemBySchema(sg2id)) + assert.NotNil(t, i4) + assert.Equal(t, i3, i4) + assert.Equal(t, i4, ToGroupList(p.ItemBySchema(sg2id))) + assert.Equal(t, []Item{i2, i4}, p.Items()) +} + +func TestGetOrCreateField(t *testing.T) { + sceneID := id.NewSceneID() + sid, _ := id.PropertySchemaIDFrom("hoge/test") + sf1id := id.PropertySchemaFieldID("a") + sf2id := id.PropertySchemaFieldID("b") + sg1id := id.PropertySchemaFieldID("c") + sg2id := id.PropertySchemaFieldID("d") + + sf1 := NewSchemaField().ID(sf1id).Type(ValueTypeString).MustBuild() + sg1 := NewSchemaGroup().ID(sg1id).Schema(sid).Fields([]*SchemaField{sf1}).MustBuild() + sf2 := NewSchemaField().ID(sf2id).Type(ValueTypeString).MustBuild() + sg2 := NewSchemaGroup().ID(sg2id).Schema(sid).Fields([]*SchemaField{sf2}).IsList(true).MustBuild() + s := NewSchema().ID(sid).Groups([]*SchemaGroup{sg1, sg2}).MustBuild() + + p := New().NewID().Scene(sceneID).Schema(sid).MustBuild() + + // field and group will be created + assert.Nil(t, p.ItemBySchema(sg1id)) + assert.Equal(t, []Item{}, p.Items()) + + f, _, _, created := p.GetOrCreateField(s, PointFieldBySchemaGroup(sg1id, sf1id)) + assert.NotNil(t, f) + assert.True(t, created) + assert.Equal(t, sf1id, f.Field()) + i := ToGroup(p.ItemBySchema(sg1id)) + assert.Equal(t, sid, i.Schema()) + assert.Equal(t, sg1id, i.SchemaGroup()) + assert.Equal(t, []*Field{f}, i.Fields()) + field, _, _ := p.Field(PointFieldBySchemaGroup(sg1id, sf1id)) + assert.Equal(t, f, field) + + f2, _, _, created := p.GetOrCreateField(s, PointFieldBySchemaGroup(sg1id, sf1id)) + assert.NotNil(t, f2) + assert.False(t, created) + assert.Equal(t, f, f2) + i2 := ToGroup(p.ItemBySchema(sg1id)) + assert.Equal(t, i, i2) + field, _, _ = p.Field(PointFieldBySchemaGroup(sg1id, sf1id)) + assert.Equal(t, f2, field) + + // field will not be created if field is incorrect + f3, _, _, _ := p.GetOrCreateField(s, PointFieldBySchemaGroup(sg1id, sf2id)) + assert.Nil(t, f3) + + // field and group list will not be created + assert.Nil(t, p.ItemBySchema(sg2id)) + f4, _, _, _ := p.GetOrCreateField(s, PointFieldBySchemaGroup(sg1id, sf2id)) + assert.Nil(t, f4) + assert.Nil(t, p.ItemBySchema(sg2id)) + assert.Equal(t, []Item{i}, p.Items()) +} + +func TestAddListItem(t *testing.T) { + sceneID := id.NewSceneID() + sid, _ := id.PropertySchemaIDFrom("hoge/test") + sfid := id.PropertySchemaFieldID("a") + sgid := id.PropertySchemaFieldID("b") + sf := NewSchemaField().ID(sfid).Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID(sgid).Schema(sid).Fields([]*SchemaField{sf}).IsList(true).MustBuild() + ps := NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild() + p := New().NewID().Scene(sceneID).Schema(sid).MustBuild() + + item, _ := p.AddListItem(ps, PointItemBySchema(sgid), nil) + assert.Equal(t, sgid, item.SchemaGroup()) + _, list := p.ListItem(PointItemBySchema(sgid)) + assert.Equal(t, sgid, list.SchemaGroup()) + assert.Equal(t, []*Group{item}, list.Groups()) + + index := 0 + item2, _ := p.AddListItem(ps, PointItem(list.ID()), &index) + assert.Equal(t, sgid, item2.SchemaGroup()) + assert.Equal(t, []*Group{item2, item}, list.Groups()) +} + +func TestMoveListItem(t *testing.T) { + sceneID := id.NewSceneID() + sid, _ := id.PropertySchemaIDFrom("hoge/test") + sgid := id.PropertySchemaFieldID("b") + g1 := NewGroup().NewID().Schema(sid, sgid).MustBuild() + g2 := NewGroup().NewID().Schema(sid, sgid).MustBuild() + gl := NewGroupList().NewID().Schema(sid, sgid).Groups([]*Group{g1, g2}).MustBuild() + p := New().NewID().Scene(sceneID).Schema(sid).Items([]Item{gl}).MustBuild() + + assert.Equal(t, []*Group{g1, g2}, gl.Groups()) + i, _ := p.MoveListItem(PointItem(g1.ID()), 1) + assert.Equal(t, g1, i) + assert.Equal(t, []*Group{g2, g1}, gl.Groups()) +} + +func TestRemoveListItem(t *testing.T) { + sceneID := id.NewSceneID() + sid, _ := id.PropertySchemaIDFrom("hoge/test") + sgid := id.PropertySchemaFieldID("b") + g1 := NewGroup().NewID().Schema(sid, sgid).MustBuild() + g2 := NewGroup().NewID().Schema(sid, sgid).MustBuild() + gl := NewGroupList().NewID().Schema(sid, sgid).Groups([]*Group{g1, g2}).MustBuild() + p := New().NewID().Scene(sceneID).Schema(sid).Items([]Item{gl}).MustBuild() + + assert.Equal(t, []*Group{g1, g2}, gl.Groups()) + ok := p.RemoveListItem(PointItem(g1.ID())) + assert.True(t, ok) + assert.Equal(t, []*Group{g2}, gl.Groups()) + assert.Equal(t, 1, len(p.Items())) + + ok = p.RemoveListItem(PointItem(g2.ID())) + assert.True(t, ok) + assert.Equal(t, []*Group{}, gl.Groups()) + assert.Equal(t, 0, len(p.Items())) +} diff --git a/pkg/property/schema.go b/pkg/property/schema.go new file mode 100644 index 000000000..f3d6d09c4 --- /dev/null +++ b/pkg/property/schema.go @@ -0,0 +1,175 @@ +package property + +import "github.com/reearth/reearth-backend/pkg/id" + +// Schema _ +type Schema struct { + id id.PropertySchemaID + version int + groups []*SchemaGroup + linkable LinkableFields +} + +// LinkableFields _ +type LinkableFields struct { + LatLng *Pointer + URL *Pointer +} + +// ID _ +func (p *Schema) ID() id.PropertySchemaID { + return p.id +} + +// IDRef _ +func (p *Schema) IDRef() *id.PropertySchemaID { + if p == nil { + return nil + } + return p.id.Ref() +} + +// Version _ +func (p *Schema) Version() int { + return p.version +} + +// Fields _ +func (p *Schema) Fields() []*SchemaField { + if p == nil { + return nil + } + fields := []*SchemaField{} + for _, g := range p.groups { + fields = append(fields, g.Fields()...) + } + return fields +} + +// Field _ +func (p *Schema) Field(id id.PropertySchemaFieldID) *SchemaField { + if p == nil { + return nil + } + for _, g := range p.groups { + if f := g.Field(id); f != nil { + return f + } + } + return nil +} + +// FieldByPointer _ +func (p *Schema) FieldByPointer(ptr *Pointer) *SchemaField { + if p == nil { + return nil + } + g := p.GroupByPointer(ptr) + if g == nil { + return nil + } + return g.FieldByPointer(ptr) +} + +// Groups _ +func (p *Schema) Groups() []*SchemaGroup { + if p == nil { + return nil + } + return append([]*SchemaGroup{}, p.groups...) +} + +// Group _ +func (p *Schema) Group(id id.PropertySchemaFieldID) *SchemaGroup { + if p == nil { + return nil + } + for _, f := range p.groups { + if f.ID() == id { + return f + } + } + return nil +} + +// GroupByField _ +func (p *Schema) GroupByField(id id.PropertySchemaFieldID) *SchemaGroup { + if p == nil { + return nil + } + for _, f := range p.groups { + if f.HasField(id) { + return f + } + } + return nil +} + +// GroupByPointer _ +func (p *Schema) GroupByPointer(ptr *Pointer) *SchemaGroup { + if p == nil { + return nil + } + + if gid, ok := ptr.ItemBySchemaGroup(); ok { + return p.Group(gid) + } + if fid, ok := ptr.Field(); ok { + for _, g := range p.groups { + if g.HasField(fid) { + return g + } + } + } + + return nil +} + +// DetectDuplicatedFields _ +func (s *Schema) DetectDuplicatedFields() []id.PropertySchemaFieldID { + duplicated := []id.PropertySchemaFieldID{} + ids := map[id.PropertySchemaFieldID]struct{}{} + for _, f := range s.Fields() { + i := f.ID() + if _, ok := ids[i]; ok { + duplicated = append(duplicated, i) + return duplicated + } + ids[i] = struct{}{} + } + return nil +} + +// LinkableFields _ +func (p *Schema) LinkableFields() LinkableFields { + if p == nil { + return LinkableFields{} + } + return p.linkable.Clone() +} + +// Clone _ +func (l LinkableFields) Clone() LinkableFields { + return LinkableFields{ + LatLng: l.LatLng.Clone(), + URL: l.URL.Clone(), + } +} + +// Validate _ +func (l LinkableFields) Validate(s *Schema) bool { + if s == nil { + return false + } + if l.LatLng != nil { + if f := s.FieldByPointer(l.LatLng); f == nil { + return false + } + } + if l.URL != nil { + if f := s.FieldByPointer(l.URL); f == nil { + return false + } + } + return true +} diff --git a/pkg/property/schema_builder.go b/pkg/property/schema_builder.go new file mode 100644 index 000000000..deb598630 --- /dev/null +++ b/pkg/property/schema_builder.go @@ -0,0 +1,78 @@ +package property + +import ( + "errors" + "fmt" + + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + ErrInvalidSceneID error = errors.New("invalid scene id") + ErrInvalidPropertySchemaID error = errors.New("invalid property schema id") + ErrInvalidValue error = errors.New("invalid value") + ErrInvalidPropertyLinkableField error = errors.New("invalid property linkable field") + ErrInvalidVersion error = errors.New("invalid version") + ErrDuplicatedField = errors.New("duplicated field") +) + +type SchemaBuilder struct { + p *Schema +} + +func NewSchema() *SchemaBuilder { + return &SchemaBuilder{p: &Schema{}} +} + +func (b *SchemaBuilder) Build() (*Schema, error) { + if b.p.id.IsNil() { + return nil, id.ErrInvalidID + } + if d := b.p.DetectDuplicatedFields(); len(d) > 0 { + return nil, fmt.Errorf("%s: %s %s", ErrDuplicatedField, b.p.id, d) + } + if !b.p.linkable.Validate(b.p) { + return nil, ErrInvalidPropertyLinkableField + } + return b.p, nil +} + +func (b *SchemaBuilder) MustBuild() *Schema { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *SchemaBuilder) ID(id id.PropertySchemaID) *SchemaBuilder { + b.p.id = id + return b +} + +func (b *SchemaBuilder) Version(version int) *SchemaBuilder { + b.p.version = version + return b +} + +func (b *SchemaBuilder) Groups(groups []*SchemaGroup) *SchemaBuilder { + newGroups := []*SchemaGroup{} + ids := map[id.PropertySchemaFieldID]struct{}{} + for _, f := range groups { + if f == nil { + continue + } + if _, ok := ids[f.ID()]; ok { + continue + } + ids[f.ID()] = struct{}{} + newGroups = append(newGroups, f) + } + b.p.groups = newGroups + return b +} + +func (b *SchemaBuilder) LinkableFields(l LinkableFields) *SchemaBuilder { + b.p.linkable = l + return b +} diff --git a/pkg/property/schema_builder_test.go b/pkg/property/schema_builder_test.go new file mode 100644 index 000000000..845e5f624 --- /dev/null +++ b/pkg/property/schema_builder_test.go @@ -0,0 +1,161 @@ +package property + +import ( + "errors" + "fmt" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestSchemaBuilder_Build(t *testing.T) { + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aaa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg2 := NewSchemaGroup().ID("daa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + testCases := []struct { + Name string + Id id.PropertySchemaID + Version int + Groups []*SchemaGroup + Linkable LinkableFields + Expected struct { + Id id.PropertySchemaID + Version int + Groups []*SchemaGroup + Linkable LinkableFields + } + Err error + }{ + { + Name: "fail: invalid id", + Err: id.ErrInvalidID, + }, + { + Name: "fail: invalid linkable field", + Id: id.MustPropertySchemaID("xx/aa"), + Linkable: LinkableFields{LatLng: NewPointer(nil, nil, id.PropertySchemaFieldID("xx").Ref())}, + Err: ErrInvalidPropertyLinkableField, + }, + { + Name: "fail: duplicated field", + Id: id.MustPropertySchemaID("xx/aa"), + Groups: []*SchemaGroup{sg, sg2}, + Err: fmt.Errorf("%s: %s %s", ErrDuplicatedField, id.MustPropertySchemaID("xx/aa"), []id.PropertySchemaFieldID{"aa"}), + }, + { + Name: "success", + Id: id.MustPropertySchemaID("xx/aa"), + Groups: []*SchemaGroup{sg}, + Version: 1, + Expected: struct { + Id id.PropertySchemaID + Version int + Groups []*SchemaGroup + Linkable LinkableFields + }{Id: id.MustPropertySchemaID("xx/aa"), Version: 1, Groups: []*SchemaGroup{sg}}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := NewSchema(). + ID(tc.Id). + Groups(tc.Groups). + Version(tc.Version). + LinkableFields(tc.Linkable). + Build() + if err == nil { + assert.Equal(tt, tc.Expected.Linkable, res.LinkableFields()) + assert.Equal(tt, tc.Expected.Groups, res.Groups()) + assert.Equal(tt, tc.Expected.Id, res.ID()) + assert.Equal(tt, tc.Expected.Version, res.Version()) + } else { + assert.True(tt, errors.As(tc.Err, &err)) + } + }) + } +} + +func TestSchemaBuilder_MustBuild(t *testing.T) { + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aaa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg2 := NewSchemaGroup().ID("daa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + testCases := []struct { + Name string + Fails bool + Id id.PropertySchemaID + Version int + Groups []*SchemaGroup + Linkable LinkableFields + Expected struct { + Id id.PropertySchemaID + Version int + Groups []*SchemaGroup + Linkable LinkableFields + } + }{ + { + Name: "fail: invalid id", + Fails: true, + }, + { + Name: "fail: invalid linkable field", + Id: id.MustPropertySchemaID("xx/aa"), + Linkable: LinkableFields{LatLng: NewPointer(nil, nil, id.PropertySchemaFieldID("xx").Ref())}, + Fails: true, + }, + { + Name: "fail: duplicated field", + Id: id.MustPropertySchemaID("xx/aa"), + Groups: []*SchemaGroup{sg, sg2}, + Fails: true, + }, + { + Name: "success", + Id: id.MustPropertySchemaID("xx/aa"), + Groups: []*SchemaGroup{sg}, + Version: 1, + Expected: struct { + Id id.PropertySchemaID + Version int + Groups []*SchemaGroup + Linkable LinkableFields + }{Id: id.MustPropertySchemaID("xx/aa"), Version: 1, Groups: []*SchemaGroup{sg}}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + var res *Schema + if tc.Fails { + defer func() { + if r := recover(); r != nil { + assert.Nil(tt, res) + } + }() + res = NewSchema(). + ID(tc.Id). + Groups(tc.Groups). + Version(tc.Version). + LinkableFields(tc.Linkable). + MustBuild() + } else { + res = NewSchema(). + ID(tc.Id). + Groups(tc.Groups). + Version(tc.Version). + LinkableFields(tc.Linkable). + MustBuild() + assert.Equal(tt, tc.Expected.Linkable, res.LinkableFields()) + assert.Equal(tt, tc.Expected.Groups, res.Groups()) + assert.Equal(tt, tc.Expected.Id, res.ID()) + assert.Equal(tt, tc.Expected.Version, res.Version()) + } + }) + } +} diff --git a/pkg/property/schema_field.go b/pkg/property/schema_field.go new file mode 100644 index 000000000..2b470fd7e --- /dev/null +++ b/pkg/property/schema_field.go @@ -0,0 +1,178 @@ +package property + +import ( + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" +) + +type SchemaField struct { + id id.PropertySchemaFieldID + propertyType ValueType + title i18n.String + description i18n.String + prefix string + suffix string + defaultValue *Value + ui SchemaFieldUI + min *float64 + max *float64 + choices []SchemaFieldChoice + cond *Condition +} + +type SchemaFieldChoice struct { + Key string + Title i18n.String + Icon string +} + +func (p *SchemaField) ID() id.PropertySchemaFieldID { + return p.id +} + +func (p *SchemaField) Type() ValueType { + return p.propertyType +} + +func (p *SchemaField) Title() i18n.String { + return p.title.Copy() +} + +func (p *SchemaField) Description() i18n.String { + return p.description.Copy() +} + +func (p *SchemaField) Prefix() string { + return p.prefix +} + +func (p *SchemaField) Suffix() string { + return p.suffix +} + +func (p *SchemaField) DefaultValue() *Value { + if p == nil || p.defaultValue == nil { + return nil + } + v := *p.defaultValue + return &v +} + +func (p *SchemaField) UI() *SchemaFieldUI { + if p == nil || p.ui == SchemaFieldUI("") { + return nil + } + ui := p.ui + return &ui +} + +func (p *SchemaField) Min() *float64 { + if p == nil || p.min == nil { + return nil + } + min := *p.min + return &min +} + +func (p *SchemaField) Max() *float64 { + if p == nil || p.max == nil { + return nil + } + max := *p.max + return &max +} + +func (p *SchemaField) MinMax() (*float64, *float64) { + if p == nil { + return nil, nil + } + return p.Min(), p.Max() +} + +func (p *SchemaField) Choices() []SchemaFieldChoice { + if p == nil { + return nil + } + if p.choices == nil { + return p.choices + } + return append([]SchemaFieldChoice{}, p.choices...) +} + +func (p *SchemaField) Choice(key string) *SchemaFieldChoice { + if p == nil || p.choices == nil { + return nil + } + for _, c := range p.choices { + if c.Key == key { + return &c + } + } + return nil +} + +func (p *SchemaField) IsAvailableIf() *Condition { + if p == nil { + return nil + } + return p.cond.Clone() +} + +func (p *SchemaField) Validate(value *Value) bool { + if p == nil { + return false + } + if value == nil { + return true + } + if p.propertyType != value.Type() { + return false + } + switch v := value.Value().(type) { + case float64: + if min := p.Min(); min != nil { + if v < *min { + return false + } + } + if max := p.Max(); max != nil { + if v > *max { + return false + } + } + case string: + if choices := p.Choices(); choices != nil { + ok := false + for _, k := range choices { + if k.Key == v { + ok = true + break + } + } + if !ok { + return false + } + } + } + return true +} + +func (p *SchemaField) SetTitle(title i18n.String) { + p.title = title.Copy() +} + +func (p *SchemaField) SetDescription(des i18n.String) { + p.description = des.Copy() +} + +func (c *SchemaFieldChoice) SetTitle(l i18n.String) { + c.Title = l.Copy() +} + +func (c SchemaFieldChoice) Copy() SchemaFieldChoice { + return SchemaFieldChoice{ + Icon: c.Icon, + Key: c.Key, + Title: c.Title.Copy(), + } +} diff --git a/pkg/property/schema_field_builder.go b/pkg/property/schema_field_builder.go new file mode 100644 index 000000000..4fcf1f9c1 --- /dev/null +++ b/pkg/property/schema_field_builder.go @@ -0,0 +1,143 @@ +package property + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" +) + +type SchemaFieldBuilder struct { + p *SchemaField +} + +func NewSchemaField() *SchemaFieldBuilder { + return &SchemaFieldBuilder{p: &SchemaField{}} +} + +func (b *SchemaFieldBuilder) Build() (*SchemaField, error) { + if b.p.id.String() == "" || b.p.id.String() == "id" { + return nil, id.ErrInvalidID + } + if b.p.ui != SchemaFieldUI("") && SchemaFieldUIFrom(string(b.p.ui)) == SchemaFieldUI("") { + return nil, errors.New("invalid property schema field ui") + } + if b.p.min != nil && b.p.max != nil && *b.p.min > *b.p.max { + return nil, errors.New("invalid min and max") + } + if _, ok := b.p.propertyType.Validate(); !ok { + return nil, errors.New("invalid value type") + } + return b.p, nil +} + +func (b *SchemaFieldBuilder) MustBuild() *SchemaField { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *SchemaFieldBuilder) ID(id id.PropertySchemaFieldID) *SchemaFieldBuilder { + b.p.id = id + return b +} + +func (b *SchemaFieldBuilder) Type(propertyType ValueType) *SchemaFieldBuilder { + b.p.propertyType = propertyType + return b +} + +func (b *SchemaFieldBuilder) Name(name i18n.String) *SchemaFieldBuilder { + b.p.title = name.Copy() + return b +} + +func (b *SchemaFieldBuilder) Description(description i18n.String) *SchemaFieldBuilder { + b.p.description = description.Copy() + return b +} + +func (b *SchemaFieldBuilder) Prefix(prefix string) *SchemaFieldBuilder { + b.p.prefix = prefix + return b +} + +func (b *SchemaFieldBuilder) Suffix(suffix string) *SchemaFieldBuilder { + b.p.suffix = suffix + return b +} + +func (b *SchemaFieldBuilder) DefaultValue(v *Value) *SchemaFieldBuilder { + if v == nil { + b.p.defaultValue = nil + } else { + v2 := *v + b.p.defaultValue = &v2 + } + return b +} + +func (b *SchemaFieldBuilder) UI(ui SchemaFieldUI) *SchemaFieldBuilder { + b.p.ui = ui + return b +} + +func (b *SchemaFieldBuilder) UIRef(ui *SchemaFieldUI) *SchemaFieldBuilder { + if ui == nil { + b.p.ui = SchemaFieldUI("") + } else { + b.p.ui = *ui + } + return b +} + +func (b *SchemaFieldBuilder) Min(min float64) *SchemaFieldBuilder { + m := min + b.p.min = &m + return b +} + +func (b *SchemaFieldBuilder) Max(max float64) *SchemaFieldBuilder { + m := max + b.p.max = &m + return b +} + +func (b *SchemaFieldBuilder) MinRef(min *float64) *SchemaFieldBuilder { + if min == nil { + b.p.min = nil + } else { + m := *min + b.p.min = &m + } + return b +} + +func (b *SchemaFieldBuilder) MaxRef(max *float64) *SchemaFieldBuilder { + if max == nil { + b.p.max = nil + } else { + m := *max + b.p.max = &m + } + return b +} + +func (b *SchemaFieldBuilder) Choices(choices []SchemaFieldChoice) *SchemaFieldBuilder { + if choices == nil { + b.p.choices = nil + } else { + b.p.choices = make([]SchemaFieldChoice, 0, len(choices)) + for _, c := range choices { + b.p.choices = append(b.p.choices, c.Copy()) + } + } + return b +} + +func (b *SchemaFieldBuilder) IsAvailableIf(cond *Condition) *SchemaFieldBuilder { + b.p.cond = cond.Clone() + return b +} diff --git a/pkg/property/schema_field_builder_test.go b/pkg/property/schema_field_builder_test.go new file mode 100644 index 000000000..ef1507466 --- /dev/null +++ b/pkg/property/schema_field_builder_test.go @@ -0,0 +1,94 @@ +package property + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestSchemaFieldBuilder_Build(t *testing.T) { + testCases := []struct { + Name string + Id id.PropertySchemaFieldID + PropertyType ValueType + Fname i18n.String + Description i18n.String + Prefix string + Suffix string + DefaultValue *Value + Ui SchemaFieldUI + Min float64 + MinRef *float64 + Max float64 + MaxRef *float64 + Choices []SchemaFieldChoice + Cond *Condition + Expected struct { + Id id.PropertySchemaFieldID + PropertyType ValueType + Fname i18n.String + Description i18n.String + Prefix string + Suffix string + DefaultValue *Value + Ui SchemaFieldUI + Min *float64 + Max *float64 + Choices []SchemaFieldChoice + Cond *Condition + } + Err error + }{ + { + Name: "nil field", + Err: id.ErrInvalidID, + }, + { + Name: "fail min > max", + Id: id.PropertySchemaFieldID("aa"), + Min: 10, + Max: 1, + Err: errors.New("invalid min and max"), + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := NewSchemaField(). + ID(tc.Id).Name(tc.Fname). + IsAvailableIf(tc.Cond). + Type(tc.PropertyType). + Description(tc.Description). + Choices(tc.Choices). + Prefix(tc.Prefix). + Suffix(tc.Suffix). + DefaultValue(tc.DefaultValue). + MaxRef(tc.MaxRef). + MinRef(tc.MinRef). + Min(tc.Min). + Max(tc.Max). + UI(tc.Ui). + UIRef(&tc.Ui). + Build() + if err == nil { + assert.Equal(tt, tc.Expected.Ui, res.UI()) + assert.Equal(tt, tc.Expected.Id, res.ID()) + assert.Equal(tt, tc.Expected.Min, res.Min()) + assert.Equal(tt, tc.Expected.Max, res.Max()) + assert.Equal(tt, tc.Expected.DefaultValue, res.DefaultValue()) + assert.Equal(tt, tc.Expected.Description, res.Description()) + assert.Equal(tt, tc.Expected.Prefix, res.Prefix()) + assert.Equal(tt, tc.Expected.Suffix, res.Suffix()) + assert.Equal(tt, tc.Expected.Choices, res.Choices()) + assert.Equal(tt, tc.Expected.Cond, res.IsAvailableIf()) + assert.Equal(tt, tc.Expected.Fname, res.Title()) + assert.Equal(tt, tc.Expected.PropertyType, res.Type()) + } else { + assert.True(tt, errors.As(tc.Err, &err)) + } + }) + } +} diff --git a/pkg/property/schema_field_test.go b/pkg/property/schema_field_test.go new file mode 100644 index 000000000..dce9058d6 --- /dev/null +++ b/pkg/property/schema_field_test.go @@ -0,0 +1,227 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/stretchr/testify/assert" +) + +func TestSchemaField_MinMax(t *testing.T) { + getFloatRef := func(f float64) *float64 { + return &f + } + testCases := []struct { + Name string + SF *SchemaField + Expected struct { + Min, Max *float64 + } + }{ + { + Name: "get minmax", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).Min(10.0).Max(20.0).MustBuild(), + Expected: struct { + Min, Max *float64 + }{ + Min: getFloatRef(10.0), + Max: getFloatRef(20.0), + }, + }, + { + Name: "nil sf", + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + min, max := tc.SF.MinMax() + assert.Equal(tt, tc.Expected.Min, min) + assert.Equal(tt, tc.Expected.Max, max) + }) + } +} + +func TestSchemaField_Choice(t *testing.T) { + testCases := []struct { + Name, Key string + SF *SchemaField + Expected *SchemaFieldChoice + }{ + { + Name: "found", + Key: "xxx", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).Choices([]SchemaFieldChoice{ + { + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + }, + { + Key: "zzz", + Title: i18n.StringFrom("abc"), + Icon: "", + }, + }).MustBuild(), + Expected: &SchemaFieldChoice{ + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + }, + }, + { + Name: "not found", + Key: "aaa", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).Choices([]SchemaFieldChoice{ + { + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + }, + { + Key: "zzz", + Title: i18n.StringFrom("abc"), + Icon: "", + }, + }).MustBuild(), + Expected: nil, + }, + { + Name: "nil sf", + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + ch := tc.SF.Choice(tc.Key) + assert.Equal(tt, tc.Expected, ch) + }) + } +} + +func TestSchemaField_SetDescription(t *testing.T) { + sf := NewSchemaField().ID("A").Type(ValueTypeNumber).Description(i18n.StringFrom("xx")).MustBuild() + sf.SetDescription(i18n.StringFrom("aa")) + assert.Equal(t, i18n.StringFrom("aa"), sf.Description()) +} + +func TestSchemaField_SetTitle(t *testing.T) { + sf := NewSchemaField().ID("A").Type(ValueTypeNumber).Name(i18n.StringFrom("abc")).MustBuild() + sf.SetTitle(i18n.StringFrom("bb")) + assert.Equal(t, i18n.StringFrom("bb"), sf.Title()) +} + +func TestSchemaField_Validate(t *testing.T) { + testCases := []struct { + Name string + SF *SchemaField + Input *Value + Expected bool + }{ + { + Name: "nil sf", + }, + { + Name: "nil value", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).MustBuild(), + Expected: true, + }, + { + Name: "property type != value type", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).MustBuild(), + Input: ValueTypeBool.ValueFromUnsafe(true), + Expected: false, + }, + { + Name: "validate min", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).Min(10).MustBuild(), + Input: ValueTypeNumber.ValueFromUnsafe(9), + Expected: false, + }, + { + Name: "validate max", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).Max(10).MustBuild(), + Input: ValueTypeNumber.ValueFromUnsafe(11), + Expected: false, + }, + { + Name: "valid string", + SF: NewSchemaField().ID("a").Type(ValueTypeString).Choices([]SchemaFieldChoice{ + { + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + }, + { + Key: "zzz", + Title: i18n.StringFrom("abc"), + Icon: "", + }, + }).MustBuild(), + Input: ValueTypeString.ValueFromUnsafe("xxx"), + Expected: true, + }, + { + Name: "invalid string", + SF: NewSchemaField().ID("a").Type(ValueTypeString).Choices([]SchemaFieldChoice{ + { + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + }, + { + Key: "zzz", + Title: i18n.StringFrom("abc"), + Icon: "", + }, + }).MustBuild(), + Input: ValueTypeString.ValueFromUnsafe("aaa"), + Expected: false, + }, + { + Name: "validate other", + SF: NewSchemaField().ID("A").Type(ValueTypeLatLng).MustBuild(), + Input: ValueTypeLatLng.ValueFromUnsafe(LatLng{Lat: 10, Lng: 11}), + Expected: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.SF.Validate(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestSchemaFieldChoice_SetLabel(t *testing.T) { + sfc := &SchemaFieldChoice{ + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + } + sfc.SetTitle(i18n.StringFrom("aa")) + assert.Equal(t, i18n.StringFrom("aa"), sfc.Title) +} + +func TestSchemaFieldChoice_Copy(t *testing.T) { + sfc := SchemaFieldChoice{ + Key: "xxx", + Title: i18n.StringFrom("lll"), + Icon: "", + } + copy := sfc.Copy() + assert.Equal(t, sfc, copy) +} + +func TestSchemaField_Nil(t *testing.T) { + var sf *SchemaField + assert.Nil(t, sf.UI()) + assert.Nil(t, sf.DefaultValue()) + assert.Nil(t, sf.IsAvailableIf()) + assert.Nil(t, sf.Max()) + assert.Nil(t, sf.Min()) +} diff --git a/pkg/property/schema_field_ui.go b/pkg/property/schema_field_ui.go new file mode 100644 index 000000000..c03f282a9 --- /dev/null +++ b/pkg/property/schema_field_ui.go @@ -0,0 +1,62 @@ +package property + +type SchemaFieldUI string + +const ( + SchemaFieldUIMultiline SchemaFieldUI = "multiline" + SchemaFieldUISelection SchemaFieldUI = "selection" + SchemaFieldUIColor SchemaFieldUI = "color" + SchemaFieldUIRange SchemaFieldUI = "range" + SchemaFieldUIImage SchemaFieldUI = "image" + SchemaFieldUIVideo SchemaFieldUI = "video" + SchemaFieldUIFile SchemaFieldUI = "file" + SchemaFieldUILayer SchemaFieldUI = "layer" + SchemaFieldUICameraPose SchemaFieldUI = "camera_pose" + // DON'T FORGET ADDING A NEW UI TO schemaFieldUIs ALSO! +) + +var ( + schemaFieldUIs = []SchemaFieldUI{ + SchemaFieldUIMultiline, + SchemaFieldUISelection, + SchemaFieldUIColor, + SchemaFieldUIRange, + SchemaFieldUIImage, + SchemaFieldUIVideo, + SchemaFieldUIFile, + SchemaFieldUILayer, + SchemaFieldUICameraPose, + // DON'T FORGET ADDING A NEW UI HERE ALSO! + } +) + +func SchemaFieldUIFrom(ui string) SchemaFieldUI { + psfui := SchemaFieldUI(ui) + for _, u := range schemaFieldUIs { + if u == psfui { + return u + } + } + return "" +} + +func SchemaFieldUIFromRef(ui *string) *SchemaFieldUI { + if ui == nil { + return nil + } + ui2 := SchemaFieldUIFrom(*ui) + return &ui2 +} + +func (p SchemaFieldUI) String() string { + return string(p) +} + +// StringRef _ +func (p *SchemaFieldUI) StringRef() *string { + if p == nil { + return nil + } + p2 := string(*p) + return &p2 +} diff --git a/pkg/property/schema_field_ui_test.go b/pkg/property/schema_field_ui_test.go new file mode 100644 index 000000000..1025e1302 --- /dev/null +++ b/pkg/property/schema_field_ui_test.go @@ -0,0 +1,19 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSchemaFieldUI(t *testing.T) { + var uir *SchemaFieldUI + assert.Equal(t, SchemaFieldUI(""), SchemaFieldUIFrom("")) + assert.Equal(t, uir, SchemaFieldUIFromRef(nil)) + ui := SchemaFieldUILayer + assert.Equal(t, SchemaFieldUILayer, SchemaFieldUIFrom("layer")) + assert.Equal(t, "layer", SchemaFieldUIFrom("layer").String()) + str := "layer" + assert.Equal(t, &ui, SchemaFieldUIFromRef(&str)) + assert.Equal(t, &str, SchemaFieldUIFromRef(&str).StringRef()) +} diff --git a/pkg/property/schema_group.go b/pkg/property/schema_group.go new file mode 100644 index 000000000..af9377860 --- /dev/null +++ b/pkg/property/schema_group.go @@ -0,0 +1,127 @@ +package property + +import ( + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" +) + +// SchemaGroup represents a group of property that has some fields +type SchemaGroup struct { + id id.PropertySchemaFieldID + sid id.PropertySchemaID + fields []*SchemaField + list bool + isAvailableIf *Condition + title i18n.String + representativeField *id.PropertySchemaFieldID +} + +// ID returns id +func (s *SchemaGroup) ID() id.PropertySchemaFieldID { + if s == nil { + return id.PropertySchemaFieldID("") + } + return s.id +} + +func (s *SchemaGroup) IDRef() *id.PropertySchemaFieldID { + if s == nil { + return nil + } + return s.id.Ref() +} + +func (s *SchemaGroup) Schema() id.PropertySchemaID { + if s == nil { + return id.PropertySchemaID{} + } + return s.sid +} + +func (s *SchemaGroup) SchemaRef() *id.PropertySchemaID { + if s == nil { + return nil + } + return &s.sid +} + +// Fields returns a slice of fields +func (s *SchemaGroup) Fields() []*SchemaField { + if s == nil { + return nil + } + return append([]*SchemaField{}, s.fields...) +} + +// Field returns a field whose id is specified +func (s *SchemaGroup) Field(fid id.PropertySchemaFieldID) *SchemaField { + if s == nil { + return nil + } + for _, f := range s.fields { + if f.ID() == fid { + return f + } + } + return nil +} + +// FieldByPointer returns a field whose id is specified +func (s *SchemaGroup) FieldByPointer(ptr *Pointer) *SchemaField { + if s == nil { + return nil + } + fid, ok := ptr.Field() + if !ok { + return nil + } + return s.Field(fid) +} + +func (s *SchemaGroup) HasField(i id.PropertySchemaFieldID) bool { + return s.Field(i) != nil +} + +// IsList returns true if this group is list +func (s *SchemaGroup) IsList() bool { + if s == nil { + return false + } + return s.list +} + +// IsAvailableIf returns condition of availability +func (s *SchemaGroup) IsAvailableIf() *Condition { + if s == nil { + return nil + } + return s.isAvailableIf.Clone() +} + +// Title returns a title of the group +func (s *SchemaGroup) Title() i18n.String { + if s == nil { + return nil + } + return s.title.Copy() +} + +// RepresentativeFieldID returns the representative field ID of the group +func (s *SchemaGroup) RepresentativeFieldID() *id.PropertySchemaFieldID { + if s == nil { + return nil + } + return s.representativeField +} + +// RepresentativeField returns the representative field of the group +func (s *SchemaGroup) RepresentativeField() *SchemaField { + if s == nil || s.representativeField == nil { + return nil + } + return s.Field(*s.representativeField) +} + +func (s *SchemaGroup) SetTitle(t i18n.String) { + s.title = t.Copy() +} diff --git a/pkg/property/schema_group_builder.go b/pkg/property/schema_group_builder.go new file mode 100644 index 000000000..d26729e00 --- /dev/null +++ b/pkg/property/schema_group_builder.go @@ -0,0 +1,83 @@ +package property + +import ( + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" +) + +type SchemaGroupBuilder struct { + p *SchemaGroup +} + +func NewSchemaGroup() *SchemaGroupBuilder { + return &SchemaGroupBuilder{ + p: &SchemaGroup{}, + } +} + +func (b *SchemaGroupBuilder) Build() (*SchemaGroup, error) { + if b.p.sid.IsNil() { + return nil, id.ErrInvalidID + } + return b.p, nil +} + +func (b *SchemaGroupBuilder) MustBuild() *SchemaGroup { + p, err := b.Build() + if err != nil { + panic(err) + } + return p +} + +func (b *SchemaGroupBuilder) ID(id id.PropertySchemaFieldID) *SchemaGroupBuilder { + b.p.id = id + return b +} + +func (b *SchemaGroupBuilder) Schema(sid id.PropertySchemaID) *SchemaGroupBuilder { + b.p.sid = sid + return b +} + +func (b *SchemaGroupBuilder) Fields(fields []*SchemaField) *SchemaGroupBuilder { + newFields := []*SchemaField{} + ids := map[id.PropertySchemaFieldID]struct{}{} + for _, f := range fields { + if f == nil { + continue + } + if _, ok := ids[f.ID()]; ok { + continue + } + ids[f.ID()] = struct{}{} + newFields = append(newFields, f) + } + b.p.fields = newFields + return b +} + +func (b *SchemaGroupBuilder) IsList(list bool) *SchemaGroupBuilder { + b.p.list = list + return b +} + +func (b *SchemaGroupBuilder) IsAvailableIf(cond *Condition) *SchemaGroupBuilder { + b.p.isAvailableIf = cond.Clone() + return b +} + +func (b *SchemaGroupBuilder) Title(title i18n.String) *SchemaGroupBuilder { + b.p.title = title.Copy() + return b +} + +func (b *SchemaGroupBuilder) RepresentativeField(representativeField *id.PropertySchemaFieldID) *SchemaGroupBuilder { + if representativeField == nil { + b.p.representativeField = nil + return b + } + representativeField2 := *representativeField + b.p.representativeField = &representativeField2 + return b +} diff --git a/pkg/property/schema_group_builder_test.go b/pkg/property/schema_group_builder_test.go new file mode 100644 index 000000000..acd92277a --- /dev/null +++ b/pkg/property/schema_group_builder_test.go @@ -0,0 +1,124 @@ +package property + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestSchemaGroupBuilder_Build(t *testing.T) { + sid := id.MustPropertySchemaID("xx/aa") + gid := id.PropertySchemaFieldID("xx") + sf := NewSchemaField().ID("ff").Type(ValueTypeString).MustBuild() + testCases := []struct { + Name string + Id id.PropertySchemaFieldID + Sid id.PropertySchemaID + Fields []*SchemaField + List bool + IsAvailableIf *Condition + Title i18n.String + Expected struct { + Id id.PropertySchemaFieldID + Sid id.PropertySchemaID + Fields []*SchemaField + List bool + IsAvailableIf *Condition + Title i18n.String + } + Err error + }{ + { + Name: "fail: invalid id", + Err: id.ErrInvalidID, + }, + { + Name: "success", + Id: gid, + Sid: sid, + Fields: []*SchemaField{sf, nil, sf}, + List: true, + IsAvailableIf: &Condition{ + Field: "ff", + Value: ValueTypeString.ValueFromUnsafe("abc"), + }, + Title: i18n.StringFrom("tt"), + Expected: struct { + Id id.PropertySchemaFieldID + Sid id.PropertySchemaID + Fields []*SchemaField + List bool + IsAvailableIf *Condition + Title i18n.String + }{ + Id: gid, + Sid: sid, + Fields: []*SchemaField{sf}, + List: true, + IsAvailableIf: &Condition{ + Field: "ff", + Value: ValueTypeString.ValueFromUnsafe("abc"), + }, + Title: i18n.StringFrom("tt"), + }, + }, + { + Name: "success: nil name", + Id: gid, + Sid: sid, + Fields: []*SchemaField{sf}, + List: true, + IsAvailableIf: &Condition{ + Field: "ff", + Value: ValueTypeString.ValueFromUnsafe("abc"), + }, + Title: i18n.StringFrom("tt"), + Expected: struct { + Id id.PropertySchemaFieldID + Sid id.PropertySchemaID + Fields []*SchemaField + List bool + IsAvailableIf *Condition + Title i18n.String + }{ + Id: gid, + Sid: sid, + Fields: []*SchemaField{sf}, + List: true, + IsAvailableIf: &Condition{ + Field: "ff", + Value: ValueTypeString.ValueFromUnsafe("abc"), + }, + Title: i18n.StringFrom("tt"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := NewSchemaGroup(). + ID(tc.Id). + Schema(tc.Sid). + Fields(tc.Fields). + IsList(tc.List). + Title(tc.Title). + IsAvailableIf(tc.IsAvailableIf). + Build() + if err == nil { + assert.Equal(tt, tc.Expected.IsAvailableIf, res.IsAvailableIf()) + assert.Equal(tt, tc.Expected.Sid, res.Schema()) + assert.Equal(tt, tc.Expected.Id, res.ID()) + assert.Equal(tt, tc.Expected.Title, res.Title()) + assert.Equal(tt, tc.Expected.List, res.IsList()) + assert.Equal(tt, tc.Expected.Fields, res.Fields()) + } else { + assert.True(tt, errors.As(tc.Err, &err)) + } + }) + } +} diff --git a/pkg/property/schema_group_test.go b/pkg/property/schema_group_test.go new file mode 100644 index 000000000..e9ddd01ca --- /dev/null +++ b/pkg/property/schema_group_test.go @@ -0,0 +1,118 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestSchemaGroup(t *testing.T) { + scid := id.PropertySchemaFieldID("aa") + sid := id.MustPropertySchemaID("xx/aa") + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + + testCases := []struct { + Name string + G *SchemaGroup + Expected struct { + GIDRef *id.PropertySchemaFieldID + SIDRef *id.PropertySchemaID + GID id.PropertySchemaFieldID + SID id.PropertySchemaID + Fields []*SchemaField + Title i18n.String + IsAvailableIf *Condition + IsList bool + } + }{ + { + Name: "nil schema group", + }, + { + Name: "success", + G: NewSchemaGroup().ID(scid).Schema(sid).Fields([]*SchemaField{sf}).MustBuild(), + Expected: struct { + GIDRef *id.PropertySchemaFieldID + SIDRef *id.PropertySchemaID + GID id.PropertySchemaFieldID + SID id.PropertySchemaID + Fields []*SchemaField + Title i18n.String + IsAvailableIf *Condition + IsList bool + }{ + GIDRef: scid.Ref(), + SIDRef: sid.Ref(), + GID: scid, + SID: sid, + Fields: []*SchemaField{sf}, + Title: make(i18n.String), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.Expected.GID, tc.G.ID()) + assert.Equal(tt, tc.Expected.GIDRef, tc.G.IDRef()) + assert.Equal(tt, tc.Expected.SID, tc.G.Schema()) + assert.Equal(tt, tc.Expected.SIDRef, tc.G.SchemaRef()) + assert.Equal(tt, tc.Expected.Fields, tc.G.Fields()) + assert.Equal(tt, tc.Expected.IsList, tc.G.IsList()) + assert.Equal(tt, tc.Expected.IsAvailableIf, tc.G.IsAvailableIf()) + assert.Equal(tt, tc.Expected.Title, tc.G.Title()) + }) + } +} + +func TestSchemaGroup_Field(t *testing.T) { + scid := id.PropertySchemaFieldID("aa") + sid := id.MustPropertySchemaID("xx/aa") + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + + testCases := []struct { + Name string + G *SchemaGroup + PTR *Pointer + Input id.PropertySchemaFieldID + Expected *SchemaField + }{ + { + Name: "nil schema group", + }, + { + Name: "found", + G: NewSchemaGroup().ID(scid).Schema(sid).Fields([]*SchemaField{sf}).MustBuild(), + PTR: NewPointer(nil, nil, sf.ID().Ref()), + Input: sf.ID(), + Expected: sf, + }, + { + Name: "not found", + G: NewSchemaGroup().ID(scid).Schema(sid).Fields([]*SchemaField{sf}).MustBuild(), + PTR: NewPointer(nil, nil, id.PropertySchemaFieldID("zz").Ref()), + Input: id.PropertySchemaFieldID("zz"), + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.G.Field(tc.Input)) + assert.Equal(tt, tc.Expected, tc.G.FieldByPointer(tc.PTR)) + assert.Equal(tt, tc.Expected != nil, tc.G.HasField(tc.Input)) + }) + } +} + +func TestSchemaGroup_SetTitle(t *testing.T) { + sg := NewSchemaGroup().ID(id.PropertySchemaFieldID("aa")).Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg.SetTitle(i18n.StringFrom("ttt")) + assert.Equal(t, i18n.StringFrom("ttt"), sg.Title()) +} diff --git a/pkg/property/schema_list.go b/pkg/property/schema_list.go new file mode 100644 index 000000000..8666f60c8 --- /dev/null +++ b/pkg/property/schema_list.go @@ -0,0 +1,65 @@ +package property + +import "github.com/reearth/reearth-backend/pkg/id" + +type SchemaList []*Schema + +func (l SchemaList) Map() SchemaMap { + return SchemaMapFrom(l) +} + +type SchemaMap map[id.PropertySchemaID]*Schema + +func SchemaMapFrom(l []*Schema) SchemaMap { + m := make(SchemaMap, len(l)) + m.Add(l...) + return m +} + +func (m SchemaMap) Add(schemas ...*Schema) { + if m == nil { + return + } + for _, p := range schemas { + if p == nil { + continue + } + m[p.ID()] = p + } +} + +func (m SchemaMap) List() SchemaList { + if m == nil { + return nil + } + list := make(SchemaList, 0, len(m)) + for _, l := range m { + list = append(list, l) + } + return list +} + +func (m SchemaMap) Clone() SchemaMap { + if m == nil { + return SchemaMap{} + } + m2 := make(SchemaMap, len(m)) + for k, v := range m { + m2[k] = v + } + return m2 +} + +func (m SchemaMap) Merge(m2 SchemaMap) SchemaMap { + if m == nil { + return nil + } + m3 := m.Clone() + if m2 == nil { + return m3 + } + + m3.Add(m2.List()...) + + return m3 +} diff --git a/pkg/property/schema_test.go b/pkg/property/schema_test.go new file mode 100644 index 000000000..efc677612 --- /dev/null +++ b/pkg/property/schema_test.go @@ -0,0 +1,142 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestSchema_Nil(t *testing.T) { + var s *Schema + assert.Nil(t, s.IDRef()) + assert.Nil(t, nil, s.Fields()) + assert.Nil(t, nil, s.Groups()) + assert.Equal(t, LinkableFields{}, s.LinkableFields()) +} + +func TestSchema_Field(t *testing.T) { + sid := id.MustPropertySchemaID("xx/aa") + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() + + testCases := []struct { + Name string + S *Schema + PTR *Pointer + Input id.PropertySchemaFieldID + Expected *SchemaField + }{ + { + Name: "nil schema", + }, + { + Name: "found", + S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), + PTR: NewPointer(nil, nil, sf.ID().Ref()), + Input: sf.ID(), + Expected: sf, + }, + { + Name: "not found", + S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), + PTR: NewPointer(nil, nil, id.PropertySchemaFieldID("zz").Ref()), + Input: id.PropertySchemaFieldID("zz"), + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.S.Field(tc.Input)) + assert.Equal(tt, tc.Expected, tc.S.FieldByPointer(tc.PTR)) + }) + } +} + +func TestSchema_Group(t *testing.T) { + sid := id.MustPropertySchemaID("xx/aa") + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() + + testCases := []struct { + Name string + S *Schema + PTR *Pointer + Input, InputField id.PropertySchemaFieldID + Expected *SchemaGroup + }{ + { + Name: "nil schema", + }, + { + Name: "found", + S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), + PTR: NewPointer(sg.IDRef(), nil, sf.ID().Ref()), + InputField: sf.ID(), + Input: sg.ID(), + Expected: sg, + }, + { + Name: "not found", + S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), + PTR: NewPointer(nil, nil, id.PropertySchemaFieldID("zz").Ref()), + Input: id.PropertySchemaFieldID("zz"), + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.S.Group(tc.Input)) + assert.Equal(tt, tc.Expected, tc.S.GroupByPointer(tc.PTR)) + assert.Equal(tt, tc.Expected, tc.S.GroupByField(tc.InputField)) + }) + } +} + +func TestSchema_DetectDuplicatedFields(t *testing.T) { + sid := id.MustPropertySchemaID("xx/aa") + sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() + sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() + + testCases := []struct { + Name string + S *Schema + LF LinkableFields + Expected bool + }{ + { + Name: "nil schema", + }, + { + Name: "invalid: URL", + S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), + LF: LinkableFields{URL: NewPointer(nil, nil, id.PropertySchemaFieldID("xx").Ref())}, + Expected: false, + }, + { + Name: "invalid: Lng", + S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), + LF: LinkableFields{LatLng: NewPointer(nil, nil, id.PropertySchemaFieldID("xx").Ref())}, + Expected: false, + }, + { + Name: "success", + S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), + LF: LinkableFields{}, + Expected: true, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.LF.Validate(tc.S) + assert.Equal(tt, tc.Expected, res) + }) + } +} diff --git a/pkg/property/sealed.go b/pkg/property/sealed.go new file mode 100644 index 000000000..a876f81d4 --- /dev/null +++ b/pkg/property/sealed.go @@ -0,0 +1,264 @@ +package property + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" +) + +type Sealed struct { + Original *id.PropertyID + Parent *id.PropertyID + Schema id.PropertySchemaID + LinkedDataset *id.DatasetID + Items []*SealedItem +} + +type SealedItem struct { + Original *id.PropertyItemID + Parent *id.PropertyItemID + SchemaGroup id.PropertySchemaFieldID + LinkedDataset *id.DatasetID + Fields []*SealedField + Groups []*SealedItem +} + +type SealedField struct { + ID id.PropertySchemaFieldID + Type ValueType + DatasetValue *dataset.Value + PropertyValue *Value +} + +func Seal(ctx context.Context, p *Merged, d dataset.GraphLoader) (*Sealed, error) { + if p == nil { + return nil, nil + } + items := make([]*SealedItem, 0, len(p.Groups)) + for _, g := range p.Groups { + i, err := sealedItemFrom(ctx, g, d) + if err != nil { + return nil, err + } + items = append(items, i) + } + + return &Sealed{ + Original: p.Original.CopyRef(), + Parent: p.Parent.CopyRef(), + Schema: p.Schema, + LinkedDataset: p.LinkedDataset.CopyRef(), + Items: items, + }, nil +} + +func SealProperty(ctx context.Context, p *Property) *Sealed { + if p == nil { + return nil + } + m := Merge(p, nil, nil) + s, _ := Seal(ctx, m, nil) + return s +} + +func sealedItemFrom(ctx context.Context, g *MergedGroup, d dataset.GraphLoader) (item *SealedItem, err error) { + if g == nil { + return + } + + item = &SealedItem{ + Original: g.Original.CopyRef(), + Parent: g.Parent.CopyRef(), + SchemaGroup: g.SchemaGroup, + LinkedDataset: g.LinkedDataset.CopyRef(), + } + + if len(g.Groups) > 0 { + item.Groups, err = sealedGroupList(ctx, g.Groups, d) + } else if len(g.Fields) > 0 { + item.Fields, err = sealedGroup(ctx, g.Fields, d) + } + + return +} + +func sealedGroupList(ctx context.Context, gl []*MergedGroup, d dataset.GraphLoader) ([]*SealedItem, error) { + res := make([]*SealedItem, 0, len(gl)) + for _, g := range gl { + sg, err := sealedItemFrom(ctx, g, d) + if err != nil { + return nil, err + } + res = append(res, sg) + } + return res, nil +} + +func sealedGroup(ctx context.Context, fields []*MergedField, d dataset.GraphLoader) ([]*SealedField, error) { + res := []*SealedField{} + for _, f := range fields { + dv, err := f.DatasetValue(ctx, d) + if err != nil { + return nil, err + } + res = append(res, &SealedField{ + ID: f.ID, + Type: f.Type, + PropertyValue: f.Value.Clone(), + DatasetValue: dv.Clone(), + }) + } + return res, nil +} + +func (s *Sealed) Interface() map[string]interface{} { + if s == nil { + return nil + } + + res := map[string]interface{}{} + for _, item := range s.Items { + i := item.Interface() + if i != nil { + res[item.SchemaGroup.String()] = i + } + } + + return res +} + +func (s *SealedItem) Interface() interface{} { + if s == nil { + return nil + } + + if len(s.Groups) > 0 { + items := make([]map[string]interface{}, 0, len(s.Groups)) + for _, g := range s.Groups { + i := sealedFieldsInterface(g.Fields) + if g.Original != nil { + i["id"] = g.Original.String() + } + items = append(items, i) + } + return items + } + + return sealedFieldsInterface(s.Fields) +} + +func sealedFieldsInterface(fields []*SealedField) map[string]interface{} { + item := map[string]interface{}{} + + for _, f := range fields { + var v interface{} + if f.DatasetValue != nil { + v = f.DatasetValue.Interface() + } else { + v = f.PropertyValue.Interface() + } + item[f.ID.String()] = v + } + + return item +} + +func (s *Sealed) Item(i id.PropertyItemID) *SealedItem { + if s == nil { + return nil + } + for _, item := range s.Items { + if item.Match(i) { + return item + } + if g := item.Group(i); g != nil { + return g + } + } + return nil +} + +func (s *Sealed) ItemBy(ptr *Pointer) *SealedItem { + if s == nil || ptr == nil { + return nil + } + if sg, ok := ptr.ItemBySchemaGroup(); ok { + return s.ItemBySchemaGroup(sg) + } + if i, ok := ptr.Item(); ok { + return s.Item(i) + } + return nil +} + +func (s *Sealed) ItemBySchemaGroup(i id.PropertySchemaFieldID) *SealedItem { + if s == nil { + return nil + } + for _, item := range s.Items { + if item.SchemaGroup == i { + return item + } + } + return nil +} + +func (s *Sealed) Field(id id.PropertySchemaFieldID) *SealedField { + if s == nil { + return nil + } + for _, i := range s.Items { + if f := i.Field(id); f != nil { + return f + } + } + return nil +} + +func (s *Sealed) FieldBy(ptr *Pointer) *SealedField { + if s == nil || ptr == nil { + return nil + } + if sg, f, ok := ptr.FieldBySchemaGroup(); ok { + return s.ItemBySchemaGroup(sg).Field(f) + } + if i, f, ok := ptr.FieldByItem(); ok { + return s.Item(i).Field(f) + } + if f, ok := ptr.Field(); ok { + return s.Field(f) + } + return nil +} + +func (s *SealedItem) Match(id id.PropertyItemID) bool { + if s == nil { + return false + } + return s.Original != nil && *s.Original == id || s.Parent != nil && *s.Parent == id +} + +func (s *SealedItem) Group(id id.PropertyItemID) *SealedItem { + if s == nil { + return nil + } + for _, g := range s.Groups { + if g.Match(id) { + return g + } + } + return nil +} + +func (s *SealedItem) Field(id id.PropertySchemaFieldID) *SealedField { + if s == nil { + return nil + } + for _, f := range s.Fields { + if f.ID == id { + return f + } + } + return nil +} diff --git a/pkg/property/sealed_test.go b/pkg/property/sealed_test.go new file mode 100644 index 000000000..aefc4f525 --- /dev/null +++ b/pkg/property/sealed_test.go @@ -0,0 +1,951 @@ +package property + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +var ( + sid = id.NewSceneID() + ds = id.NewDatasetSchemaID() + df = id.NewDatasetSchemaFieldID() + d = id.NewDatasetID() + opid = id.NewPropertyID() + ppid = id.NewPropertyID() + psid = id.MustPropertySchemaID("hoge#0.1.0/fff") + psiid1 = id.PropertySchemaFieldID("x") + psiid2 = id.PropertySchemaFieldID("y") + i1id = id.NewPropertyItemID() + i2id = id.NewPropertyItemID() + i3id = id.NewPropertyItemID() + i4id = id.NewPropertyItemID() + i5id = id.NewPropertyItemID() +) + +func TestSeal(t *testing.T) { + testCases := []struct { + Name string + MD *Merged + DSGL dataset.GraphLoader + Expected *Sealed + Err error + }{ + { + Name: "nil group", + }, + { + Name: "seal", + MD: &Merged{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("a"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("b"), + Value: ValueTypeString.ValueFromUnsafe("b"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("aaa"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("b"), + Value: ValueTypeString.ValueFromUnsafe("aaa"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + }, + }, + }, + }, + DSGL: dataset.GraphLoaderFromMap(map[id.DatasetID]*dataset.Dataset{ + d: dataset.New().Scene(sid).ID(d).Schema(ds).Fields([]*dataset.Field{ + dataset.NewField(df, dataset.ValueTypeString.ValueFrom("bbb"), ""), + }).MustBuild(), + }), + Expected: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("b"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + Err: nil, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := Seal(context.Background(), tc.MD, tc.DSGL) + if err == nil { + assert.Equal(tt, tc.Expected, res) + } + }) + } +} + +func TestSealProperty(t *testing.T) { + pid := id.NewPropertyID() + ps := id.MustPropertySchemaID("xxx#1.1.1/aa") + testCases := []struct { + Name string + Input *Property + Expected *Sealed + }{ + { + Name: "nil property", + }, + { + Name: "seal property", + Input: New().ID(pid).Scene(id.NewSceneID()).Schema(ps).MustBuild(), + Expected: &Sealed{ + Original: pid.Ref(), + Parent: nil, + Schema: ps, + LinkedDataset: nil, + Items: []*SealedItem{}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := SealProperty(context.Background(), tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestSealedItemFrom(t *testing.T) { + + testCases := []struct { + Name string + MG *MergedGroup + DSGL dataset.GraphLoader + Expected *SealedItem + Err error + }{ + { + Name: "nil group", + }, + { + Name: "groups != nil", + MG: &MergedGroup{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("a"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("b"), + Value: ValueTypeString.ValueFromUnsafe("b"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + }, + }, + }, + }, + DSGL: dataset.GraphLoaderFromMap(map[id.DatasetID]*dataset.Dataset{ + d: dataset.New().Scene(sid).ID(d).Schema(ds).Fields([]*dataset.Field{ + dataset.NewField(df, dataset.ValueTypeString.ValueFrom("bbb"), ""), + }).MustBuild(), + }), + Expected: &SealedItem{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("b"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + Err: nil, + }, + { + Name: "groups == nil", + MG: &MergedGroup{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*MergedGroup{ + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*MergedField{ + { + ID: id.PropertySchemaFieldID("a"), + Value: ValueTypeString.ValueFromUnsafe("aaa"), + Type: ValueTypeString, + }, + { + ID: id.PropertySchemaFieldID("b"), + Value: ValueTypeString.ValueFromUnsafe("aaa"), + Links: NewLinks([]*Link{NewLink(d, ds, df)}), + Type: ValueTypeString, + }, + }, + }, + }, + }, + DSGL: dataset.GraphLoaderFromMap(map[id.DatasetID]*dataset.Dataset{ + d: dataset.New().Scene(sid).ID(d).Schema(ds).Fields([]*dataset.Field{ + dataset.NewField(df, dataset.ValueTypeString.ValueFrom("bbb"), ""), + }).MustBuild(), + }), + Expected: &SealedItem{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + Err: nil, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := sealedItemFrom(context.Background(), tc.MG, tc.DSGL) + if err == nil { + assert.Equal(tt, tc.Expected, res) + } + }) + } +} + +func TestSealed_Interface(t *testing.T) { + + testCases := []struct { + Name string + S *Sealed + Expected map[string]interface{} + }{ + { + Name: "nil sealed", + }, + { + Name: "get sealed interface", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("b"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + Expected: map[string]interface{}{ + "x": []map[string]interface{}{ + { + "a": "a", + "b": "bbb", + "id": i5id.String(), + }, + }, + "y": map[string]interface{}{ + "a": "aaa", + "b": "bbb", + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.S.Interface() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestSealedItem_Match(t *testing.T) { + testCases := []struct { + Name string + SI *SealedItem + Input id.PropertyItemID + Expected bool + }{ + { + Name: "nil sealed", + }, + { + Name: "", + SI: &SealedItem{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("b"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + Input: i2id, + Expected: true, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.SI.Match(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestSealed_ItemBy(t *testing.T) { + + testCases := []struct { + Name string + S *Sealed + Input *Pointer + Expected *SealedItem + }{ + { + Name: "nil sealed", + }, + { + Name: "get group", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("b"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + Input: NewPointer(psiid1.Ref(), i1id.Ref(), id.PropertySchemaFieldID("a").Ref()), + Expected: &SealedItem{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("b"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + }, + { + Name: "get item", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("b"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + Input: NewPointer(nil, i1id.Ref(), id.PropertySchemaFieldID("a").Ref()), + Expected: &SealedItem{ + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("b"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + }, + { + Name: "nil ptr sg", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("b"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + Input: NewPointer(nil, nil, id.PropertySchemaFieldID("a").Ref()), + Expected: nil, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.S.ItemBy(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestSealed_FieldBy(t *testing.T) { + + testCases := []struct { + Name string + S *Sealed + Input *Pointer + Expected *SealedField + }{ + { + Name: "nil sealed", + }, + { + Name: "get group", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("b"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + Input: NewPointer(psiid1.Ref(), i1id.Ref(), id.PropertySchemaFieldID("a").Ref()), + Expected: &SealedField{ + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + }, + }, + { + Name: "get item", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("b"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + Input: NewPointer(nil, i3id.Ref(), id.PropertySchemaFieldID("a").Ref()), + Expected: &SealedField{ + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + }, + }, + { + Name: "nil ptr sg", + S: &Sealed{ + Original: opid.Ref(), + Parent: ppid.Ref(), + Schema: psid, + LinkedDataset: &d, + Items: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i1id, + Parent: &i2id, + LinkedDataset: &d, + Groups: []*SealedItem{ + { + SchemaGroup: psiid1, + Original: &i5id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("b"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + { + SchemaGroup: psiid2, + Original: &i3id, + Parent: &i4id, + LinkedDataset: &d, + Fields: []*SealedField{ + { + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + }, + { + ID: "b", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + }, + }, + }, + }, + }, + Input: NewPointer(nil, nil, id.PropertySchemaFieldID("a").Ref()), + Expected: &SealedField{ + ID: "a", + Type: ValueTypeString, + PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.S.FieldBy(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} diff --git a/pkg/property/validator.go b/pkg/property/validator.go new file mode 100644 index 000000000..59b05da72 --- /dev/null +++ b/pkg/property/validator.go @@ -0,0 +1,28 @@ +package property + +import ( + "context" + "fmt" +) + +type Validator struct { + SchemaLoader SchemaLoader +} + +func (v Validator) Validate(ctx context.Context, properties List) error { + schemaIDs := properties.Schemas() + schemas, err := v.SchemaLoader(ctx, schemaIDs...) + if err != nil { + return err + } + schemaMap := schemas.Map() + + for _, p := range properties { + schema := schemaMap[p.Schema()] + if err := p.ValidateSchema(schema); err != nil { + return fmt.Errorf("invalid property: %s (%s): %w", p.ID(), p.Schema(), err) + } + } + + return nil +} diff --git a/pkg/property/value.go b/pkg/property/value.go new file mode 100644 index 000000000..e22c1d3c4 --- /dev/null +++ b/pkg/property/value.go @@ -0,0 +1,240 @@ +package property + +type ValueInner interface { + Value() *Value +} + +// LatLng _ +type LatLng struct { + Lat float64 `json:"lat" mapstructure:"lat"` + Lng float64 `json:"lng" mapstructure:"lng"` +} + +// Clone _ +func (l *LatLng) Clone() *LatLng { + if l == nil { + return nil + } + return &LatLng{ + Lat: l.Lat, + Lng: l.Lng, + } +} + +// LatLngHeight _ +type LatLngHeight struct { + Lat float64 `json:"lat" mapstructure:"lat"` + Lng float64 `json:"lng" mapstructure:"lng"` + Height float64 `json:"height" mapstructure:"height"` +} + +// Clone _ +func (l *LatLngHeight) Clone() *LatLngHeight { + if l == nil { + return nil + } + return &LatLngHeight{ + Lat: l.Lat, + Lng: l.Lng, + Height: l.Height, + } +} + +// Camera _ +type Camera struct { + Lat float64 `json:"lat" mapstructure:"lat"` + Lng float64 `json:"lng" mapstructure:"lng"` + Altitude float64 `json:"altitude" mapstructure:"altitude"` + Heading float64 `json:"heading" mapstructure:"heading"` + Pitch float64 `json:"pitch" mapstructure:"pitch"` + Roll float64 `json:"roll" mapstructure:"roll"` + FOV float64 `json:"fov" mapstructure:"fov"` +} + +// Clone _ +func (c *Camera) Clone() *Camera { + if c == nil { + return nil + } + return &Camera{ + Lat: c.Lat, + Lng: c.Lng, + Altitude: c.Altitude, + Heading: c.Heading, + Pitch: c.Pitch, + Roll: c.Roll, + FOV: c.FOV, + } +} + +// Typography _ +type Typography struct { + FontFamily *string `json:"fontFamily" mapstructure:"fontFamily"` + FontWeight *string `json:"fontWeight" mapstructure:"fontWeight"` + FontSize *int `json:"fontSize" mapstructure:"fontSize"` + Color *string `json:"color" mapstructure:"color"` + TextAlign *TextAlign `json:"textAlign" mapstructure:"textAlign"` + Bold *bool `json:"bold" mapstructure:"bold"` + Italic *bool `json:"italic" mapstructure:"italic"` + Underline *bool `json:"underline" mapstructure:"underline"` +} + +// Clone _ +func (t *Typography) Clone() *Typography { + if t == nil { + return nil + } + return &Typography{ + FontFamily: t.FontFamily, + FontWeight: t.FontWeight, + FontSize: t.FontSize, + Color: t.Color, + TextAlign: t.TextAlign, + Bold: t.Bold, + Italic: t.Italic, + Underline: t.Underline, + } +} + +// TextAlign _ +type TextAlign string + +const ( + // TextAlignLeft _ + TextAlignLeft TextAlign = "left" + // TextAlignCenter _ + TextAlignCenter TextAlign = "center" + // TextAlignRight _ + TextAlignRight TextAlign = "right" + // TextAlignJustify _ + TextAlignJustify TextAlign = "justify" + // TextAlignJustifyAll _ + TextAlignJustifyAll TextAlign = "justify_all" +) + +// TextAlignFrom _ +func TextAlignFrom(t string) (TextAlign, bool) { + switch TextAlign(t) { + case TextAlignLeft: + return TextAlignLeft, true + case TextAlignCenter: + return TextAlignCenter, true + case TextAlignRight: + return TextAlignRight, true + case TextAlignJustify: + return TextAlignJustify, true + case TextAlignJustifyAll: + return TextAlignJustifyAll, true + } + return TextAlign(""), false +} + +// TextAlignFromRef _ +func TextAlignFromRef(t *string) *TextAlign { + if t == nil { + return nil + } + var t2 TextAlign + switch TextAlign(*t) { + case TextAlignLeft: + t2 = TextAlignLeft + case TextAlignCenter: + t2 = TextAlignCenter + case TextAlignRight: + t2 = TextAlignRight + case TextAlignJustify: + t2 = TextAlignJustify + case TextAlignJustifyAll: + t2 = TextAlignJustifyAll + default: + return nil + } + return &t2 +} + +// String _ +func (t TextAlign) String() string { + return string(t) +} + +// StringRef _ +func (t *TextAlign) StringRef() *string { + if t == nil { + return nil + } + t2 := string(*t) + return &t2 +} + +// Coordinates _ +type Coordinates []LatLngHeight + +// CoordinatesFrom generates a new Coordinates from slice such as [lon, lat, alt, lon, lat, alt, ...] +func CoordinatesFrom(coords []float64) Coordinates { + if len(coords) == 0 { + return nil + } + + r := make([]LatLngHeight, 0, len(coords)/3) + l := LatLngHeight{} + for i, c := range coords { + switch i % 3 { + case 0: + l = LatLngHeight{} + l.Lng = c + case 1: + l.Lat = c + case 2: + l.Height = c + r = append(r, l) + } + } + + return r +} + +// Polygon _ +type Polygon []Coordinates + +// Rect _ +type Rect struct { + West float64 `json:"west" mapstructure:"west"` + South float64 `json:"south" mapstructure:"south"` + East float64 `json:"east" mapstructure:"east"` + North float64 `json:"north" mapstructure:"north"` +} + +// Value _ +func (l LatLng) Value() *Value { + return ValueTypeLatLng.ValueFromUnsafe(l) +} + +// Value _ +func (l LatLngHeight) Value() *Value { + return ValueTypeLatLngHeight.ValueFromUnsafe(l) +} + +// Value _ +func (c Camera) Value() *Value { + return ValueTypeCamera.ValueFromUnsafe(c) +} + +// Value _ +func (t Typography) Value() *Value { + return ValueTypeTypography.ValueFromUnsafe(t) +} + +// Value _ +func (t Coordinates) Value() *Value { + return ValueTypeCoordinates.ValueFromUnsafe(t) +} + +// Value _ +func (t Polygon) Value() *Value { + return ValueTypePolygon.ValueFromUnsafe(t) +} + +// Value _ +func (t Rect) Value() *Value { + return ValueTypeRect.ValueFromUnsafe(t) +} diff --git a/pkg/property/value_converter.go b/pkg/property/value_converter.go new file mode 100644 index 000000000..d81937aec --- /dev/null +++ b/pkg/property/value_converter.go @@ -0,0 +1,41 @@ +package property + +import "github.com/reearth/reearth-backend/pkg/dataset" + +func valueFromDataset(v *dataset.Value) (*Value, bool) { + v2 := v.Value() + switch v3 := v2.(type) { + case *dataset.LatLng: + return ValueTypeLatLng.ValueFrom(LatLng{ + Lat: v3.Lat, + Lng: v3.Lng, + }) + case *dataset.LatLngHeight: + return ValueTypeLatLngHeight.ValueFrom(LatLngHeight{ + Lat: v3.Lat, + Lng: v3.Lng, + Height: v3.Height, + }) + } + return valueTypeFromDataset(v.Type()).ValueFrom(v2) +} + +func valueTypeFromDataset(v dataset.ValueType) ValueType { + switch v { + case dataset.ValueTypeBool: + return ValueTypeBool + case dataset.ValueTypeLatLng: + return ValueTypeLatLng + case dataset.ValueTypeLatLngHeight: + return ValueTypeLatLngHeight + case dataset.ValueTypeNumber: + return ValueTypeNumber + case dataset.ValueTypeRef: + return ValueTypeRef + case dataset.ValueTypeString: + return ValueTypeString + case dataset.ValueTypeURL: + return ValueTypeURL + } + return ValueType("") +} diff --git a/pkg/property/value_converter_test.go b/pkg/property/value_converter_test.go new file mode 100644 index 000000000..87abbdf7c --- /dev/null +++ b/pkg/property/value_converter_test.go @@ -0,0 +1,120 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/stretchr/testify/assert" +) + +func TestValueFromDataset(t *testing.T) { + testCases := []struct { + Name string + Input *dataset.Value + Expected struct { + V *Value + Ok bool + } + }{ + { + Name: "latlng", + Input: dataset.ValueFrom(dataset.LatLng{ + Lat: 10, + Lng: 12, + }), + Expected: struct { + V *Value + Ok bool + }{ + V: ValueTypeLatLng.ValueFromUnsafe(LatLng{ + Lat: 10, + Lng: 12, + }), + Ok: true, + }, + }, + { + Name: "LatLngHeight", + Input: dataset.ValueFrom(dataset.LatLngHeight{ + Lat: 10, + Lng: 12, + Height: 14, + }), + Expected: struct { + V *Value + Ok bool + }{ + V: ValueTypeLatLngHeight.ValueFromUnsafe(LatLngHeight{ + Lat: 10, + Lng: 12, + Height: 14, + }), + Ok: true, + }, + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, ok := valueFromDataset(tc.Input) + assert.Equal(tt, tc.Expected.V, res) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValueTypeFromDataset(t *testing.T) { + testCases := []struct { + Name string + Input dataset.ValueType + Expected ValueType + }{ + { + Name: "latlng", + Input: dataset.ValueTypeLatLng, + Expected: ValueTypeLatLng, + }, + { + Name: "latlngheight", + Input: dataset.ValueTypeLatLngHeight, + Expected: ValueTypeLatLngHeight, + }, + { + Name: "string", + Input: dataset.ValueTypeString, + Expected: ValueTypeString, + }, + { + Name: "bool", + Input: dataset.ValueTypeBool, + Expected: ValueTypeBool, + }, + { + Name: "ref", + Input: dataset.ValueTypeRef, + Expected: ValueTypeRef, + }, + { + Name: "url", + Input: dataset.ValueTypeURL, + Expected: ValueTypeURL, + }, + { + Name: "number", + Input: dataset.ValueTypeNumber, + Expected: ValueTypeNumber, + }, + { + Name: "undefined", + Input: dataset.ValueType("xxx"), + Expected: ValueType(""), + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := valueTypeFromDataset(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} diff --git a/pkg/property/value_test.go b/pkg/property/value_test.go new file mode 100644 index 000000000..ce09faa2b --- /dev/null +++ b/pkg/property/value_test.go @@ -0,0 +1,354 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func getStrRef(i string) *string { + return &i +} +func getBoolRef(i bool) *bool { + return &i +} + +func TestLatLng_Clone(t *testing.T) { + testCases := []struct { + Name string + LL, Expected *LatLng + }{ + { + Name: "nil latlng", + }, + { + Name: "cloned", + LL: &LatLng{ + Lat: 10, + Lng: 11, + }, + Expected: &LatLng{ + Lat: 10, + Lng: 11, + }, + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.LL.Clone() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestLatLngHeight_Clone(t *testing.T) { + testCases := []struct { + Name string + LL, Expected *LatLngHeight + }{ + { + Name: "nil LatLngHeight", + }, + { + Name: "cloned", + LL: &LatLngHeight{ + Lat: 10, + Lng: 11, + Height: 12, + }, + Expected: &LatLngHeight{ + Lat: 10, + Lng: 11, + Height: 12, + }, + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.LL.Clone() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestCamera_Clone(t *testing.T) { + testCases := []struct { + Name string + Camera, Expected *Camera + }{ + { + Name: "nil Camera", + }, + { + Name: "cloned", + Camera: &Camera{ + Lat: 1, + Lng: 1, + Altitude: 2, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }, + Expected: &Camera{ + Lat: 1, + Lng: 1, + Altitude: 2, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }, + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Camera.Clone() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestTypography_Clone(t *testing.T) { + + i := 10 + + testCases := []struct { + Name string + Typography, Expected *Typography + }{ + { + Name: "nil typography", + }, + { + Name: "cloned", + Typography: &Typography{ + FontFamily: getStrRef("x"), + FontWeight: getStrRef("b"), + FontSize: &i, + Color: getStrRef("red"), + TextAlign: TextAlignFromRef(getStrRef(TextAlignCenter.String())), + Bold: getBoolRef(true), + Italic: getBoolRef(false), + Underline: getBoolRef(true), + }, + Expected: &Typography{ + FontFamily: getStrRef("x"), + FontWeight: getStrRef("b"), + FontSize: &i, + Color: getStrRef("red"), + TextAlign: TextAlignFromRef(getStrRef("center")), + Bold: getBoolRef(true), + Italic: getBoolRef(false), + Underline: getBoolRef(true), + }, + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Typography.Clone() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestTextAlignFrom(t *testing.T) { + testCases := []struct { + Name string + Expected struct { + TA TextAlign + Bool bool + } + }{ + { + Name: "left", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignLeft, + Bool: true, + }, + }, + { + Name: "right", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignRight, + Bool: true, + }, + }, + { + Name: "center", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignCenter, + Bool: true, + }, + }, + { + Name: "justify", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignJustify, + Bool: true, + }, + }, + { + Name: "justify_all", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignJustifyAll, + Bool: true, + }, + }, + { + Name: "undefined", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlign(""), + Bool: false, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, ok := TextAlignFrom(tc.Name) + assert.Equal(tt, tc.Expected.TA, res) + assert.Equal(tt, tc.Expected.Bool, ok) + }) + } +} + +func TestTextAlignFromRef(t *testing.T) { + ja := TextAlignJustifyAll + j := TextAlignJustify + c := TextAlignCenter + l := TextAlignLeft + r := TextAlignRight + testCases := []struct { + Name string + Input *string + Expected *TextAlign + }{ + { + Name: "left", + Input: getStrRef("left"), + Expected: &l, + }, + { + Name: "right", + Input: getStrRef("right"), + Expected: &r, + }, + { + Name: "center", + Input: getStrRef("center"), + Expected: &c, + }, + { + Name: "justify", + Input: getStrRef("justify"), + Expected: &j, + }, + { + Name: "justify_all", + Input: getStrRef("justify_all"), + Expected: &ja, + }, + { + Name: "undefined", + Input: getStrRef("undefined"), + }, + { + Name: "nil input", + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := TextAlignFromRef(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestTextAlign_StringRef(t *testing.T) { + var ta *TextAlign + assert.Nil(t, ta.StringRef()) +} + +func TestValue(t *testing.T) { + ll := LatLng{ + Lat: 1, + Lng: 2, + } + assert.True(t, ValueTypeLatLng.ValidateValue(ll.Value())) + + llh := LatLngHeight{ + Lat: 1, + Lng: 2, + Height: 3, + } + assert.True(t, ValueTypeLatLngHeight.ValidateValue(llh.Value())) + + ca := Camera{ + Lat: 1, + Lng: 2, + Altitude: 3, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + } + assert.True(t, ValueTypeCamera.ValidateValue(ca.Value())) + + ty := Typography{ + FontFamily: getStrRef("x"), + FontWeight: getStrRef("b"), + FontSize: nil, + Color: getStrRef("red"), + TextAlign: TextAlignFromRef(getStrRef(TextAlignCenter.String())), + Bold: getBoolRef(true), + Italic: getBoolRef(false), + Underline: getBoolRef(true), + } + assert.True(t, ValueTypeTypography.ValidateValue(ty.Value())) + + co := Coordinates{ + llh, + } + assert.True(t, ValueTypeCoordinates.ValidateValue(co.Value())) + + po := Polygon{ + co, + } + assert.True(t, ValueTypePolygon.ValidateValue(po.Value())) + + rc := Rect{ + West: 10, + South: 3, + East: 5, + North: 2, + } + assert.True(t, ValueTypeRect.ValidateValue(rc.Value())) +} diff --git a/pkg/property/value_type.go b/pkg/property/value_type.go new file mode 100644 index 000000000..19f27fe02 --- /dev/null +++ b/pkg/property/value_type.go @@ -0,0 +1,583 @@ +package property + +import ( + "encoding/json" + "fmt" + "net/url" + + "github.com/mitchellh/mapstructure" + "github.com/reearth/reearth-backend/pkg/id" +) + +// ValueType _ +type ValueType string + +const ( + // ValueTypeBool _ + ValueTypeBool ValueType = "bool" + // ValueTypeNumber _ + ValueTypeNumber ValueType = "number" + // ValueTypeString _ + ValueTypeString ValueType = "string" + // ValueTypeRef _ + ValueTypeRef ValueType = "ref" + // ValueTypeURL _ + ValueTypeURL ValueType = "url" + // ValueTypeLatLng _ + ValueTypeLatLng ValueType = "latlng" + // ValueTypeLatLngHeight _ + ValueTypeLatLngHeight ValueType = "latlngheight" + // ValueTypeCamera _ + ValueTypeCamera ValueType = "camera" + // ValueTypeTypography _ + ValueTypeTypography ValueType = "typography" + // ValueTypeCoordinates _ + ValueTypeCoordinates ValueType = "coordinates" + // ValueTypePolygon + ValueTypePolygon ValueType = "polygon" + // ValueTypeRect + ValueTypeRect ValueType = "rect" +) + +// ValueTypeFrom _ +func ValueTypeFrom(t string) (ValueType, bool) { + switch ValueType(t) { + case ValueTypeBool: + return ValueTypeBool, true + case ValueTypeNumber: + return ValueTypeNumber, true + case ValueTypeString: + return ValueTypeString, true + case ValueTypeRef: + return ValueTypeRef, true + case ValueTypeURL: + return ValueTypeURL, true + case ValueTypeLatLng: + return ValueTypeLatLng, true + case ValueTypeLatLngHeight: + return ValueTypeLatLngHeight, true + case ValueTypeCamera: + return ValueTypeCamera, true + case ValueTypeTypography: + return ValueTypeTypography, true + case ValueTypeCoordinates: + return ValueTypeCoordinates, true + case ValueTypePolygon: + return ValueTypePolygon, true + case ValueTypeRect: + return ValueTypeRect, true + } + return ValueType(""), false +} + +// Validate _ +func (t ValueType) Validate() (ValueType, bool) { + switch t { + case ValueTypeBool: + fallthrough + case ValueTypeNumber: + fallthrough + case ValueTypeString: + fallthrough + case ValueTypeRef: + fallthrough + case ValueTypeURL: + fallthrough + case ValueTypeLatLng: + fallthrough + case ValueTypeLatLngHeight: + fallthrough + case ValueTypeCamera: + fallthrough + case ValueTypeTypography: + fallthrough + case ValueTypeCoordinates: + fallthrough + case ValueTypePolygon: + fallthrough + case ValueTypeRect: + return t, true + } + return t, false +} + +// Value _ +type Value struct { + v interface{} + t ValueType +} + +// IsEmpty _ +func (v *Value) IsEmpty() bool { + return v == nil || v.v == nil +} + +// Clone _ +func (v *Value) Clone() *Value { + if v == nil { + return nil + } + return v.t.ValueFromUnsafe(v.v) +} + +// Value _ +func (v *Value) Value() interface{} { + if v == nil { + return nil + } + return v.v +} + +// ValueBool _ +func (v *Value) ValueBool() (vv bool, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(bool) + return +} + +// ValueNumber _ +func (v *Value) ValueNumber() (vv float64, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(float64) + return +} + +// ValueString _ +func (v *Value) ValueString() (vv string, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(string) + return +} + +// ValueRef _ +func (v *Value) ValueRef() (vv id.ID, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(id.ID) + return +} + +// ValueURL _ +func (v *Value) ValueURL() (vv *url.URL, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(*url.URL) + return +} + +// ValueLatLng _ +func (v *Value) ValueLatLng() (vv LatLng, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(LatLng) + return +} + +// ValueLatLngHeight _ +func (v *Value) ValueLatLngHeight() (vv LatLngHeight, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(LatLngHeight) + return +} + +// ValueCamera _ +func (v *Value) ValueCamera() (vv Camera, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(Camera) + return +} + +// ValueTypography _ +func (v *Value) ValueTypography() (vv Typography, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(Typography) + return +} + +// ValueCoordinates _ +func (v *Value) ValueCoordinates() (vv Coordinates, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(Coordinates) + return +} + +// ValuePolygon _ +func (v *Value) ValuePolygon() (vv Polygon, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(Polygon) + return +} + +// ValueRect _ +func (v *Value) ValueRect() (vv Rect, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(Rect) + return +} + +// Type _ +func (v *Value) Type() ValueType { + if v == nil { + return ValueType("") + } + return v.t +} + +// ValueFromUnsafe _ +func (t ValueType) ValueFromUnsafe(v interface{}) *Value { + v2, _ := t.ValueFrom(v) + return v2 +} + +func (t ValueType) MustBeValue(v interface{}) *Value { + v2, ok := t.ValueFrom(v) + if !ok { + panic("incompatible value for property value") + } + return v2 +} + +// ValueFrom _ +func (t ValueType) ValueFrom(v interface{}) (*Value, bool) { + if t == "" { + return nil, false + } + if v == nil { + return nil, true + } + + switch t { + case ValueTypeBool: + if v2, ok := v.(bool); ok { + return &Value{v: v2, t: ValueTypeBool}, true + } + case ValueTypeNumber: + if v2, ok := v.(json.Number); ok { + if v3, err := v2.Float64(); err == nil { + return &Value{v: v3, t: ValueTypeNumber}, true + } + } else if v2, ok := v.(float64); ok { + return &Value{v: v2, t: ValueTypeNumber}, true + } else if v2, ok := v.(int); ok { + return &Value{v: float64(v2), t: ValueTypeNumber}, true + } + case ValueTypeString: + if v2, ok := v.(string); ok { + return &Value{v: v2, t: ValueTypeString}, true + } + case ValueTypeRef: + if v2, ok := v.(id.ID); ok { + return &Value{v: v2, t: ValueTypeRef}, true + } else if v2, ok := v.(string); ok { + if id, err := id.NewIDWith(v2); err == nil { + return &Value{v: id, t: ValueTypeRef}, true + } + } + case ValueTypeURL: + if v2, ok := v.(*url.URL); ok { + if v2 == nil { + return nil, false + } + return &Value{v: v2, t: ValueTypeURL}, true + } else if v2, ok := v.(string); ok { + if u, err := url.Parse(v2); err == nil { + return &Value{v: u, t: ValueTypeURL}, true + } + } + case ValueTypeLatLng: + if v2, ok := v.(LatLng); ok { + return &Value{v: v2, t: ValueTypeLatLng}, true + } else if v2, ok := v.(*LatLng); ok { + if v2 == nil { + return nil, false + } + return &Value{v: *v2, t: ValueTypeLatLng}, true + } + v2 := LatLng{} + if err := mapstructure.Decode(v, &v2); err != nil { + return nil, false + } + return &Value{v: v2, t: ValueTypeLatLng}, true + case ValueTypeLatLngHeight: + if v2, ok := v.(LatLngHeight); ok { + return &Value{v: v2, t: ValueTypeLatLngHeight}, true + } else if v2, ok := v.(*LatLngHeight); ok { + if v2 == nil { + return nil, false + } + return &Value{v: *v2, t: ValueTypeLatLngHeight}, true + } + v2 := LatLngHeight{} + if err := mapstructure.Decode(v, &v2); err != nil { + return nil, false + } + return &Value{v: v2, t: ValueTypeLatLngHeight}, true + case ValueTypeCamera: + if v2, ok := v.(Camera); ok { + return &Value{v: v2, t: ValueTypeCamera}, true + } else if v2, ok := v.(*Camera); ok { + if v2 == nil { + return nil, false + } + return &Value{v: *v2, t: ValueTypeCamera}, true + } + v2 := Camera{} + if err := mapstructure.Decode(v, &v2); err != nil { + return nil, false + } + return &Value{v: v2, t: ValueTypeCamera}, true + case ValueTypeTypography: + if v2, ok := v.(Typography); ok { + return &Value{v: v2, t: ValueTypeTypography}, true + } else if v2, ok := v.(*Typography); ok { + if v2 == nil { + return nil, false + } + return &Value{v: *v2, t: ValueTypeTypography}, true + } + v2 := Typography{} + if err := mapstructure.Decode(v, &v2); err != nil { + return nil, false + } + return &Value{v: v2, t: ValueTypeTypography}, true + case ValueTypeCoordinates: + if v2, ok := v.(Coordinates); ok { + return &Value{v: v2, t: ValueTypeCoordinates}, true + } else if v2, ok := v.(*Coordinates); ok { + if v2 == nil { + return nil, false + } + return &Value{v: *v2, t: ValueTypeCoordinates}, true + } else if v2, ok := v.([]float64); ok { + if v2 == nil { + return nil, false + } + return &Value{v: CoordinatesFrom(v2), t: ValueTypeCoordinates}, true + } + + v2 := []float64{} + if err := mapstructure.Decode(v, &v2); err == nil { + return &Value{v: CoordinatesFrom(v2), t: ValueTypeCoordinates}, true + } + + v3 := Coordinates{} + if err := mapstructure.Decode(v, &v3); err != nil { + return nil, false + } + return &Value{v: v3, t: ValueTypeCoordinates}, true + case ValueTypePolygon: + if v2, ok := v.(Polygon); ok { + return &Value{v: v2, t: ValueTypePolygon}, true + } else if v2, ok := v.(*Polygon); ok { + if v2 == nil { + return nil, false + } + return &Value{v: *v2, t: ValueTypePolygon}, true + } + v2 := Polygon{} + if err := mapstructure.Decode(v, &v2); err != nil { + return nil, false + } + return &Value{v: v2, t: ValueTypePolygon}, true + case ValueTypeRect: + if v2, ok := v.(Rect); ok { + return &Value{v: v2, t: ValueTypeRect}, true + } else if v2, ok := v.(*Rect); ok { + if v2 == nil { + return nil, false + } + return &Value{v: *v2, t: ValueTypeRect}, true + } + v2 := Rect{} + if err := mapstructure.Decode(v, &v2); err != nil { + return nil, false + } + return &Value{v: v2, t: ValueTypeRect}, true + } + return nil, false +} + +// ValidateValue _ +func (t ValueType) ValidateValue(v *Value) bool { + if v == nil { + return true + } + vv := v.Value() + if vv == nil { + return true + } + switch t { + case ValueTypeBool: + if _, ok := vv.(bool); ok { + return true + } + case ValueTypeNumber: + if _, ok := vv.(float64); ok { + return true + } + case ValueTypeString: + if _, ok := vv.(string); ok { + return true + } + case ValueTypeRef: + if _, ok := vv.(id.ID); ok { + return true + } + case ValueTypeURL: + if _, ok := vv.(*url.URL); ok { + return true + } + case ValueTypeLatLng: + if _, ok := vv.(LatLng); ok { + return true + } + case ValueTypeLatLngHeight: + if _, ok := vv.(LatLngHeight); ok { + return true + } + case ValueTypeCamera: + if _, ok := vv.(Camera); ok { + return true + } + case ValueTypeTypography: + if _, ok := vv.(Typography); ok { + return true + } + case ValueTypeCoordinates: + if _, ok := vv.(Coordinates); ok { + return true + } + case ValueTypePolygon: + if _, ok := vv.(Polygon); ok { + return true + } + case ValueTypeRect: + if _, ok := vv.(Rect); ok { + return true + } + } + return false +} + +func (t *ValueType) MarshalJSON() ([]byte, error) { + if t == nil { + return nil, nil + } + return json.Marshal(string(*t)) +} + +func (t *ValueType) UnmarshalJSON(bs []byte) (err error) { + var vtstr string + if err = json.Unmarshal(bs, &vtstr); err != nil { + return + } + var ok bool + *t, ok = ValueTypeFrom(vtstr) + if !ok { + return fmt.Errorf("invalid property value type: %s", vtstr) + } + return +} + +func (t *ValueType) MarshalText() ([]byte, error) { + if t == nil { + return nil, nil + } + return []byte(*t), nil +} + +func (t *ValueType) UnmarshalText(text []byte) (err error) { + var ok bool + *t, ok = ValueTypeFrom(string(text)) + if !ok { + return fmt.Errorf("invalid property value type: %s", text) + } + return +} + +// Interface converts the value into generic representation +func (v *Value) Interface() interface{} { + if v == nil { + return nil + } + switch v2 := v.Value().(type) { + case bool: + return v2 + case float64: + return v2 + case string: + return v2 + case id.ID: + return v2.String() + case *url.URL: + return v2.String() + case LatLng: + var v3 map[string]interface{} + if err := mapstructure.Decode(&v2, &v3); err != nil { + return nil + } + return v3 + case LatLngHeight: + var v3 map[string]interface{} + if err := mapstructure.Decode(&v2, &v3); err != nil { + return nil + } + return v3 + case Camera: + var v3 map[string]interface{} + if err := mapstructure.Decode(&v2, &v3); err != nil { + return nil + } + return v3 + case Typography: + var v3 map[string]interface{} + if err := mapstructure.Decode(&v2, &v3); err != nil { + return nil + } + return v3 + case Coordinates: + var v3 []map[string]interface{} + if err := mapstructure.Decode(&v2, &v3); err != nil { + return nil + } + return v3 + case Polygon: + var v3 [][]map[string]interface{} + if err := mapstructure.Decode(&v2, &v3); err != nil { + return nil + } + return v3 + case Rect: + var v3 map[string]interface{} + if err := mapstructure.Decode(&v2, &v3); err != nil { + return nil + } + return v3 + } + return nil +} + +func (v *Value) MarshalJSON() ([]byte, error) { + return json.Marshal(v.Interface()) +} diff --git a/pkg/property/value_type_test.go b/pkg/property/value_type_test.go new file mode 100644 index 000000000..5f003f680 --- /dev/null +++ b/pkg/property/value_type_test.go @@ -0,0 +1,1426 @@ +package property + +import ( + "encoding/json" + "net/url" + "strconv" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestValueTypeFrom(t *testing.T) { + testCases := []struct { + Name, Input string + Expected struct { + V ValueType + B bool + } + }{ + { + Name: "bool", + Input: "bool", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypeBool, + B: true, + }, + }, + { + Name: "number", + Input: "number", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypeNumber, + B: true, + }, + }, + { + Name: "ref", + Input: "ref", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypeRef, + B: true, + }, + }, + { + Name: "url", + Input: "url", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypeURL, + B: true, + }, + }, + { + Name: "string", + Input: "string", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypeString, + B: true, + }, + }, { + Name: "camera", + Input: "camera", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypeCamera, + B: true, + }, + }, + { + Name: "bool", + Input: "bool", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypeBool, + B: true, + }, + }, + { + Name: "LatLngHeight", + Input: "latlngheight", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypeLatLngHeight, + B: true, + }, + }, + { + Name: "latlng", + Input: "latlng", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypeLatLng, + B: true, + }, + }, + { + Name: "polygon", + Input: "polygon", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypePolygon, + B: true, + }, + }, + { + Name: "rect", + Input: "rect", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypeRect, + B: true, + }, + }, + { + Name: "coordinates", + Input: "coordinates", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypeCoordinates, + B: true, + }, + }, + { + Name: "typography", + Input: "typography", + Expected: struct { + V ValueType + B bool + }{ + V: ValueTypeTypography, + B: true, + }, + }, + { + Name: "unknown", + Input: "", + Expected: struct { + V ValueType + B bool + }{ + V: ValueType(""), + B: false, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, b := ValueTypeFrom(tc.Input) + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.B, b) + + v2, b2 := v.Validate() + assert.Equal(tt, tc.Expected.V, v2) + assert.Equal(tt, tc.Expected.B, b2) + }) + } +} + +func TestValue_IsEmpty(t *testing.T) { + var v *Value + assert.True(t, v.IsEmpty()) +} + +func TestValue_Clone(t *testing.T) { + var v *Value + assert.Nil(t, v.Clone()) + v, _ = ValueTypeBool.ValueFrom(true) + assert.Equal(t, v, v.Clone()) +} + +func TestValue_Value(t *testing.T) { + var v *Value + assert.Nil(t, v.Value()) + v, _ = ValueTypeBool.ValueFrom(true) + assert.Equal(t, true, v.Value()) +} + +func TestValue_Type(t *testing.T) { + var v *Value + assert.Equal(t, ValueType(""), v.Type()) + v, _ = ValueTypeBool.ValueFrom(true) + assert.Equal(t, ValueTypeBool, v.Type()) +} + +func TestValue_ValueBool(t *testing.T) { + testCases := []struct { + Name string + V *Value + Expected struct { + V, Ok bool + } + }{ + { + Name: "nil value", + }, + { + Name: "success", + V: ValueTypeBool.ValueFromUnsafe(true), + Expected: struct { + V, Ok bool + }{ + V: true, + Ok: true, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.V.ValueBool() + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValue_ValueString(t *testing.T) { + testCases := []struct { + Name string + V *Value + Expected struct { + V string + Ok bool + } + }{ + { + Name: "nil value", + }, + { + Name: "success", + V: ValueTypeString.ValueFromUnsafe("xxx"), + Expected: struct { + V string + Ok bool + }{V: "xxx", Ok: true}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.V.ValueString() + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValue_ValueNumber(t *testing.T) { + testCases := []struct { + Name string + V *Value + Expected struct { + V float64 + Ok bool + } + }{ + { + Name: "nil value", + }, + { + Name: "success", + V: ValueTypeNumber.ValueFromUnsafe(5.5), + Expected: struct { + V float64 + Ok bool + }{V: 5.5, Ok: true}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.V.ValueNumber() + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValue_ValueLatLng(t *testing.T) { + testCases := []struct { + Name string + V *Value + Expected struct { + V LatLng + Ok bool + } + }{ + { + Name: "nil value", + }, + { + Name: "success", + V: ValueTypeLatLng.ValueFromUnsafe(map[string]interface{}{ + "Lat": 1, + "Lng": 2, + }), + Expected: struct { + V LatLng + Ok bool + }{ + V: LatLng{ + Lat: 1, + Lng: 2, + }, + Ok: true, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.V.ValueLatLng() + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValue_ValueLatLngHeight(t *testing.T) { + testCases := []struct { + Name string + V *Value + Expected struct { + V LatLngHeight + Ok bool + } + }{ + { + Name: "nil value", + }, + { + Name: "success", + V: ValueTypeLatLngHeight.ValueFromUnsafe(map[string]interface{}{ + "Lat": 1, + "Lng": 2, + "Height": 22, + }), + Expected: struct { + V LatLngHeight + Ok bool + }{ + V: LatLngHeight{ + Lat: 1, + Lng: 2, + Height: 22, + }, + Ok: true, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.V.ValueLatLngHeight() + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValue_ValueCamera(t *testing.T) { + testCases := []struct { + Name string + V *Value + Expected struct { + V Camera + Ok bool + } + }{ + { + Name: "nil value", + }, + { + Name: "success", + V: ValueTypeCamera.ValueFromUnsafe( + map[string]interface{}{ + "Lat": 1, + "Lng": 2, + "Altitude": 3, + "Heading": 4, + "Pitch": 5, + "Roll": 6, + "FOV": 7, + }), + Expected: struct { + V Camera + Ok bool + }{ + V: Camera{ + Lat: 1, + Lng: 2, + Altitude: 3, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }, + Ok: true, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.V.ValueCamera() + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValue_ValueCoordinates(t *testing.T) { + testCases := []struct { + Name string + V *Value + Expected struct { + V Coordinates + Ok bool + } + }{ + { + Name: "nil value", + }, + { + Name: "success", + V: ValueTypeCoordinates.ValueFromUnsafe( + []map[string]interface{}{ + { + "lat": 1, + "lng": 2, + "height": 3, + }, + }), + Expected: struct { + V Coordinates + Ok bool + }{ + V: Coordinates{ + LatLngHeight{ + Lat: 1, + Lng: 2, + Height: 3, + }, + }, + Ok: true, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.V.ValueCoordinates() + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValue_ValuePolygon(t *testing.T) { + testCases := []struct { + Name string + V *Value + Expected struct { + V Polygon + Ok bool + } + }{ + { + Name: "nil value", + }, + { + Name: "success", + V: ValueTypePolygon.ValueFromUnsafe( + [][]map[string]interface{}{ + { + { + "lat": 1, + "lng": 2, + "height": 3, + }, + }, + }), + Expected: struct { + V Polygon + Ok bool + }{ + V: []Coordinates{ + []LatLngHeight{ + { + Lat: 1, + Lng: 2, + Height: 3, + }, + }, + }, + Ok: true, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.V.ValuePolygon() + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValue_ValueRect(t *testing.T) { + testCases := []struct { + Name string + V *Value + Expected struct { + V Rect + Ok bool + } + }{ + { + Name: "nil value", + }, + { + Name: "success", + V: ValueTypeRect.ValueFromUnsafe( + map[string]interface{}{ + "West": 2, + "South": 3, + "East": 4, + "North": 5, + }), + Expected: struct { + V Rect + Ok bool + }{ + V: Rect{ + West: 2, + South: 3, + East: 4, + North: 5, + }, + Ok: true, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.V.ValueRect() + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValue_ValueRef(t *testing.T) { + uid := id.New() + testCases := []struct { + Name string + V *Value + Expected struct { + V id.ID + Ok bool + } + }{ + { + Name: "nil value", + }, + { + Name: "success", + V: ValueTypeRef.ValueFromUnsafe(uid), + Expected: struct { + V id.ID + Ok bool + }{V: uid, Ok: true}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.V.ValueRef() + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValue_ValueURL(t *testing.T) { + testCases := []struct { + Name string + V *Value + Expected struct { + V *url.URL + Ok bool + } + }{ + { + Name: "nil value", + }, + { + Name: "success", + V: ValueTypeURL.ValueFromUnsafe(map[string]interface{}{ + "Scheme": "xx", + "Opaque": "aa.hh", + "Path": "zz/vv.bb", + "ForceQuery": false, + }), + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.V.ValueURL() + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValue_ValueTypography(t *testing.T) { + ff, fs, ts := "Times New Roman", 10, TextAlignLeft + var c, fw *string + var b, i, u *bool + + testCases := []struct { + Name string + V *Value + Expected struct { + V Typography + Ok bool + } + }{ + { + Name: "nil value", + }, + { + Name: "success", + V: ValueTypeTypography.ValueFromUnsafe(map[string]interface{}{ + "fontFamily": &ff, + "fontSize": &fs, + "textAlign": &ts, + "color": c, + "fontWeight": fw, + "bold": b, + "italic": i, + "underline": u, + }), + Expected: struct { + V Typography + Ok bool + }{ + V: Typography{ + FontFamily: &ff, + FontWeight: fw, + FontSize: &fs, + Color: c, + TextAlign: &ts, + Bold: b, + Italic: i, + Underline: u, + }, + Ok: true, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.V.ValueTypography() + assert.Equal(tt, tc.Expected.V, v) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValueType_ValueFrom(t *testing.T) { + var llh *LatLngHeight + var ll *LatLng + var ca *Camera + var rc *Rect + var cords *Coordinates + var p *Polygon + var ty *Typography + iid := id.New() + testCases := []struct { + Name string + Input interface{} + VT ValueType + Expected struct { + V interface{} + Ok bool + } + }{ + { + Name: "valueType is nil", + VT: "", + Expected: struct { + V interface{} + Ok bool + }{ + V: nil, + Ok: false, + }, + }, + { + Name: "input is nil", + VT: ValueTypeBool, + Expected: struct { + V interface{} + Ok bool + }{ + V: nil, + Ok: true, + }, + }, + { + Name: "bool", + Input: true, + VT: ValueTypeBool, + Expected: struct { + V interface{} + Ok bool + }{ + V: true, + Ok: true, + }, + }, + { + Name: "string", + Input: "xxx", + VT: ValueTypeString, + Expected: struct { + V interface{} + Ok bool + }{ + V: "xxx", + Ok: true, + }, + }, + { + Name: "number: json number", + Input: json.Number(strconv.FormatFloat(10, 'e', 0, 64)), + VT: ValueTypeNumber, + Expected: struct { + V interface{} + Ok bool + }{ + V: float64(10), + Ok: true, + }, + }, + { + Name: "number: float64", + Input: float64(11), + VT: ValueTypeNumber, + Expected: struct { + V interface{} + Ok bool + }{ + V: float64(11), + Ok: true, + }, + }, + { + Name: "number: int64", + Input: 12, + VT: ValueTypeNumber, + Expected: struct { + V interface{} + Ok bool + }{ + V: float64(12), + Ok: true, + }, + }, + { + Name: "ref: string", + Input: iid.String(), + VT: ValueTypeRef, + Expected: struct { + V interface{} + Ok bool + }{ + V: iid, + Ok: true, + }, + }, + { + Name: "ref: id", + Input: iid, + VT: ValueTypeRef, + Expected: struct { + V interface{} + Ok bool + }{ + V: iid, + Ok: true, + }, + }, + { + Name: "latlng", + Input: LatLng{ + Lat: 10, + Lng: 11, + }, + VT: ValueTypeLatLng, + Expected: struct { + V interface{} + Ok bool + }{ + V: LatLng{ + Lat: 10, + Lng: 11, + }, + Ok: true, + }, + }, + { + Name: "latlng: nil", + Input: ll, + VT: ValueTypeLatLng, + Expected: struct { + V interface{} + Ok bool + }{ + Ok: false, + }, + }, + { + Name: "latlng: ref", + Input: &LatLng{ + Lat: 10, + Lng: 11, + }, + VT: ValueTypeLatLng, + Expected: struct { + V interface{} + Ok bool + }{ + V: LatLng{ + Lat: 10, + Lng: 11, + }, + Ok: true, + }, + }, + { + Name: "latlng: map", + Input: map[string]interface{}{ + "lat": 10, + "lng": 11, + }, + VT: ValueTypeLatLng, + Expected: struct { + V interface{} + Ok bool + }{ + V: LatLng{ + Lat: 10, + Lng: 11, + }, + Ok: true, + }, + }, + { + Name: "latlngheight: map", + Input: map[string]interface{}{ + "lng": 11, + "lat": 12, + "height": 13, + }, + VT: ValueTypeLatLngHeight, + Expected: struct { + V interface{} + Ok bool + }{ + V: LatLngHeight{ + Lat: 12, + Lng: 11, + Height: 13, + }, + Ok: true, + }, + }, + { + Name: "latlngheight: nil", + Input: llh, + VT: ValueTypeLatLngHeight, + Expected: struct { + V interface{} + Ok bool + }{ + Ok: false, + }, + }, + { + Name: "latlngheight", + Input: LatLngHeight{ + Lat: 12, + Lng: 11, + Height: 13, + }, + VT: ValueTypeLatLngHeight, + Expected: struct { + V interface{} + Ok bool + }{ + V: LatLngHeight{ + Lat: 12, + Lng: 11, + Height: 13, + }, + Ok: true, + }, + }, + { + Name: "latlngheight: ref", + Input: &LatLngHeight{ + Lat: 12, + Lng: 11, + Height: 13, + }, + VT: ValueTypeLatLngHeight, + Expected: struct { + V interface{} + Ok bool + }{ + V: LatLngHeight{ + Lat: 12, + Lng: 11, + Height: 13, + }, + Ok: true, + }, + }, + { + Name: "camera: map", + Input: map[string]interface{}{ + "Lat": 1, + "Lng": 2, + "Altitude": 3, + "Heading": 4, + "Pitch": 5, + "Roll": 6, + "FOV": 7, + }, + VT: ValueTypeCamera, + Expected: struct { + V interface{} + Ok bool + }{ + V: Camera{ + Lat: 1, + Lng: 2, + Altitude: 3, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }, + Ok: true, + }, + }, + { + Name: "camera", + Input: Camera{ + Lat: 1, + Lng: 2, + Altitude: 3, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }, + VT: ValueTypeCamera, + Expected: struct { + V interface{} + Ok bool + }{ + V: Camera{ + Lat: 1, + Lng: 2, + Altitude: 3, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }, + Ok: true, + }, + }, + { + Name: "camera: ref", + Input: &Camera{ + Lat: 1, + Lng: 2, + Altitude: 3, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }, + VT: ValueTypeCamera, + Expected: struct { + V interface{} + Ok bool + }{ + V: Camera{ + Lat: 1, + Lng: 2, + Altitude: 3, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }, + Ok: true, + }, + }, + { + Name: "camera: nil", + Input: ca, + VT: ValueTypeCamera, + Expected: struct { + V interface{} + Ok bool + }{}, + }, + { + Name: "rect: nil", + Input: rc, + VT: ValueTypeRect, + Expected: struct { + V interface{} + Ok bool + }{}, + }, + { + Name: "rect: map", + Input: map[string]interface{}{ + "West": 2, + "South": 3, + "East": 4, + "North": 5, + }, + VT: ValueTypeRect, + Expected: struct { + V interface{} + Ok bool + }{ + V: Rect{ + West: 2, + South: 3, + East: 4, + North: 5, + }, + Ok: true, + }, + }, + { + Name: "rect", + Input: Rect{ + West: 2, + South: 3, + East: 4, + North: 5, + }, + VT: ValueTypeRect, + Expected: struct { + V interface{} + Ok bool + }{ + V: Rect{ + West: 2, + South: 3, + East: 4, + North: 5, + }, + Ok: true, + }, + }, + { + Name: "rect: ref", + Input: &Rect{ + West: 2, + South: 3, + East: 4, + North: 5, + }, + VT: ValueTypeRect, + Expected: struct { + V interface{} + Ok bool + }{ + V: Rect{ + West: 2, + South: 3, + East: 4, + North: 5, + }, + Ok: true, + }, + }, + { + Name: "cods: map", + Input: []map[string]interface{}{ + { + "lat": 1, + "lng": 2, + "height": 3, + }, + }, + VT: ValueTypeCoordinates, + Expected: struct { + V interface{} + Ok bool + }{ + V: Coordinates{ + { + Lat: 1, + Lng: 2, + Height: 3, + }, + }, + Ok: true, + }, + }, + { + Name: "cods: ref", + Input: &Coordinates{ + { + Lat: 1, + Lng: 2, + Height: 3, + }, + }, + VT: ValueTypeCoordinates, + Expected: struct { + V interface{} + Ok bool + }{ + V: Coordinates{ + { + Lat: 1, + Lng: 2, + Height: 3, + }, + }, + Ok: true, + }, + }, + { + Name: "cods: nil", + Input: cords, + VT: ValueTypeCoordinates, + }, + { + Name: "polygon: nil", + Input: p, + VT: ValueTypePolygon, + }, + { + Name: "polygon: nil", + Input: &Polygon{ + Coordinates{ + { + Lat: 1, + Lng: 2, + Height: 3, + }, + }, + }, + VT: ValueTypePolygon, + Expected: struct { + V interface{} + Ok bool + }{ + V: Polygon{ + Coordinates{ + { + Lat: 1, + Lng: 2, + Height: 3, + }, + }, + }, + Ok: true, + }, + }, + { + Name: "typography: nil", + Input: ty, + VT: ValueTypeTypography, + }, + { + Name: "undefined", + Input: "ttt", + VT: "xxx", + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + v, ok := tc.VT.ValueFrom(tc.Input) + assert.Equal(tt, tc.Expected.V, v.Value()) + assert.Equal(tt, tc.Expected.Ok, ok) + }) + } +} + +func TestValue_Interface(t *testing.T) { + ff, fs, ts := "Times New Roman", 10, TextAlignLeft + var c, fw *string + var b, i, u *bool + testCases := []struct { + Name string + V *Value + Expected interface{} + }{ + { + Name: "nil", + }, + { + Name: "undefined", + V: ValueType("uu").ValueFromUnsafe("xx"), + Expected: nil, + }, + { + Name: "bool", + V: ValueTypeBool.ValueFromUnsafe(true), + Expected: true, + }, + { + Name: "latlng", + V: ValueTypeLatLng.ValueFromUnsafe( + LatLng{ + Lat: 1, + Lng: 2, + }), + Expected: map[string]interface{}{ + "lat": 1.0, + "lng": 2.0, + }, + }, + { + Name: "Typography", + V: ValueTypeTypography.ValueFromUnsafe( + Typography{ + FontFamily: &ff, + FontWeight: fw, + FontSize: &fs, + Color: c, + TextAlign: &ts, + Bold: b, + Italic: i, + Underline: u, + }), + Expected: map[string]interface{}{ + "fontFamily": &ff, + "fontSize": &fs, + "textAlign": &ts, + "color": c, + "fontWeight": fw, + "bold": b, + "italic": i, + "underline": u, + }, + }, + { + Name: "camera", + V: ValueTypeCamera.ValueFromUnsafe( + Camera{ + Lat: 1, + Lng: 2, + Altitude: 3, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }), + Expected: map[string]interface{}{ + "lat": 1.0, + "lng": 2.0, + "altitude": 3.0, + "heading": 4.0, + "pitch": 5.0, + "roll": 6.0, + "fov": 7.0, + }, + }, + { + Name: "rect", + V: ValueTypeRect.ValueFromUnsafe( + Rect{ + West: 2, + South: 3, + East: 4, + North: 5, + }), + Expected: map[string]interface{}{ + "west": 2.0, + "south": 3.0, + "east": 4.0, + "north": 5.0, + }, + }, + { + Name: "latlngheight", + V: ValueTypeLatLngHeight.ValueFromUnsafe( + LatLngHeight{ + Lat: 1, + Lng: 2, + Height: 3, + }), + Expected: map[string]interface{}{ + "lat": 1.0, + "lng": 2.0, + "height": 3.0, + }, + }, + { + Name: "coordinates", + V: ValueTypeCoordinates.ValueFromUnsafe( + Coordinates{ + LatLngHeight{ + Lat: 1, + Lng: 2, + Height: 3, + }, + }), + Expected: []map[string]interface{}{ + { + "lat": 1.0, + "lng": 2.0, + "height": 3.0, + }, + }, + }, + { + Name: "polygon", + V: ValueTypePolygon.ValueFromUnsafe( + Polygon{ + Coordinates{ + LatLngHeight{ + Lat: 1, + Lng: 2, + Height: 3, + }, + }, + }), + Expected: [][]map[string]interface{}{ + {{ + "lat": 1.0, + "lng": 2.0, + "height": 3.0, + }}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + i := tc.V.Interface() + assert.Equal(tt, tc.Expected, i) + }) + } +} diff --git a/pkg/scene/builder.go b/pkg/scene/builder.go new file mode 100644 index 000000000..d9ebd0eea --- /dev/null +++ b/pkg/scene/builder.go @@ -0,0 +1,105 @@ +package scene + +import ( + "time" + + "github.com/reearth/reearth-backend/pkg/id" +) + +// Builder _ +type Builder struct { + scene *Scene +} + +// New _ +func New() *Builder { + return &Builder{scene: &Scene{}} +} + +// Build _ +func (b *Builder) Build() (*Scene, error) { + if b.scene.id.ID().IsNil() { + return nil, id.ErrInvalidID + } + if b.scene.team.ID().IsNil() { + return nil, id.ErrInvalidID + } + if b.scene.rootLayer.ID().IsNil() { + return nil, id.ErrInvalidID + } + if b.scene.widgetSystem == nil { + b.scene.widgetSystem = NewWidgetSystem(nil) + } + if b.scene.pluginSystem == nil { + b.scene.pluginSystem = NewPluginSystem(nil) + } + if b.scene.updatedAt.IsZero() { + b.scene.updatedAt = b.scene.CreatedAt() + } + return b.scene, nil +} + +// MustBuild _ +func (b *Builder) MustBuild() *Scene { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +// ID _ +func (b *Builder) ID(id id.SceneID) *Builder { + b.scene.id = id + return b +} + +// NewID _ +func (b *Builder) NewID() *Builder { + b.scene.id = id.SceneID(id.New()) + return b +} + +// Project _ +func (b *Builder) Project(prj id.ProjectID) *Builder { + b.scene.project = prj + return b +} + +// Team _ +func (b *Builder) Team(team id.TeamID) *Builder { + b.scene.team = team + return b +} + +// UpdatedAt _ +func (b *Builder) UpdatedAt(updatedAt time.Time) *Builder { + b.scene.updatedAt = updatedAt + return b +} + +// WidgetSystem _ +func (b *Builder) WidgetSystem(widgetSystem *WidgetSystem) *Builder { + widgetSystem2 := *widgetSystem + b.scene.widgetSystem = &widgetSystem2 + return b +} + +// RootLayer _ +func (b *Builder) RootLayer(rootLayer id.LayerID) *Builder { + b.scene.rootLayer = rootLayer + return b +} + +// PluginSystem _ +func (b *Builder) PluginSystem(pluginSystem *PluginSystem) *Builder { + pluginSystem2 := *pluginSystem + b.scene.pluginSystem = &pluginSystem2 + return b +} + +// Property _ +func (b *Builder) Property(p id.PropertyID) *Builder { + b.scene.property = p + return b +} diff --git a/pkg/scene/builder/builder.go b/pkg/scene/builder/builder.go new file mode 100644 index 000000000..2d6eff88f --- /dev/null +++ b/pkg/scene/builder/builder.go @@ -0,0 +1,78 @@ +package builder + +import ( + "context" + "encoding/json" + "io" + "time" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/encoding" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" +) + +const ( + // schema version + version = 1 +) + +type Builder struct { + ploader property.Loader + exporter *encoding.Exporter + encoder *encoder +} + +func New(ll layer.Loader, pl property.Loader, dl dataset.GraphLoader) *Builder { + e := &encoder{} + return &Builder{ + ploader: pl, + encoder: e, + exporter: &encoding.Exporter{ + Merger: &merging.Merger{ + LayerLoader: ll, + PropertyLoader: pl, + }, + Sealer: &merging.Sealer{ + DatasetGraphLoader: dl, + }, + Encoder: e, + }, + } +} + +func (b *Builder) BuildScene(ctx context.Context, w io.Writer, s *scene.Scene, publishedAt time.Time) error { + if b == nil { + return nil + } + + res, err := b.buildScene(ctx, s, publishedAt) + if err != nil { + return err + } + + return json.NewEncoder(w).Encode(res) +} + +func (b *Builder) buildScene(ctx context.Context, s *scene.Scene, publishedAt time.Time) (*sceneJSON, error) { + if b == nil { + return nil, nil + } + + // properties + p, err := b.ploader(ctx, s.Properties()...) + if err != nil { + return nil, err + } + + // layers + if err := b.exporter.ExportLayerByID(ctx, s.RootLayer()); err != nil { + return nil, err + } + layers := b.encoder.Result() + + res := b.scene(ctx, s, publishedAt, layers, p) + return res, nil +} diff --git a/pkg/scene/builder/builder_test.go b/pkg/scene/builder/builder_test.go new file mode 100644 index 000000000..38e326981 --- /dev/null +++ b/pkg/scene/builder/builder_test.go @@ -0,0 +1,592 @@ +package builder + +import ( + "context" + "testing" + "time" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/stretchr/testify/assert" +) + +func TestSceneBuilder(t *testing.T) { + // ids + sceneID := id.NewSceneID() + scenePropertyID := id.NewPropertyID() + propertySchemaID := id.MustPropertySchemaID("hoge#0.1.0/foobar") + pluginID := id.MustPluginID("hoge#0.1.0") + pluginExtension1ID := id.PluginExtensionID("ext") + pluginExtension2ID := id.PluginExtensionID("ext2") + propertySchemaField1ID := id.PropertySchemaFieldID("a") + propertySchemaField2ID := id.PropertySchemaFieldID("b") + propertySchemaField3ID := id.PropertySchemaFieldID("c") + propertySchemaGroup1ID := id.PropertySchemaFieldID("A") + propertySchemaGroup2ID := id.PropertySchemaFieldID("B") + propertyItemID1 := id.NewPropertyItemID() + propertyItemID2 := id.NewPropertyItemID() + + // datasets + dss1id := id.NewDatasetSchemaID() + dss2id := id.NewDatasetSchemaID() + dss3id := id.NewDatasetSchemaID() + ds1id := id.NewDatasetID() + ds2id := id.NewDatasetID() + ds3id := id.NewDatasetID() + ds1f1 := id.NewDatasetSchemaFieldID() + ds1f2 := id.NewDatasetSchemaFieldID() + ds2f1 := id.NewDatasetSchemaFieldID() + ds3f1 := id.NewDatasetSchemaFieldID() + ds1 := dataset.New().ID(ds1id).Fields([]*dataset.Field{ + dataset.NewField( + ds1f1, + dataset.ValueTypeRef.ValueFrom(ds2id.ID()), + "ds1f1", + ), + dataset.NewField( + ds1f2, + dataset.ValueTypeString.ValueFrom("a"), + "ds1f2", + ), + }).Scene(sceneID).Schema(dss1id).Source("ds1").MustBuild() + ds2 := dataset.New().ID(ds2id).Fields([]*dataset.Field{ + dataset.NewField( + ds2f1, + dataset.ValueTypeRef.ValueFrom(ds3id.ID()), + "ds2", + ), + }).Scene(sceneID).Schema(dss2id).Source("ds2").MustBuild() + ds3 := dataset.New().ID(ds3id).Fields([]*dataset.Field{ + dataset.NewField( + ds3f1, + dataset.ValueTypeString.ValueFrom("b"), + "ds3", + ), + }).Scene(sceneID).Schema(dss3id).Source("ds3").MustBuild() + + // layer1: normal layer item + layer1p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("xxx")). + Build(), + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField2ID). + TypeUnsafe(property.ValueTypeNumber). + ValueUnsafe(property.ValueTypeNumber.ValueFromUnsafe(1)). + Build(), + }).MustBuild(), + }). + MustBuild() + layer1 := layer.NewItem(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer1p.IDRef()). + MustBuild() + + // layer2: normal layer group + layer21p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("yyy")). + Build(), + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField2ID). + TypeUnsafe(property.ValueTypeNumber). + ValueUnsafe(property.ValueTypeNumber.ValueFromUnsafe(1)). + Build(), + }).MustBuild(), + }). + MustBuild() + layer21 := layer.NewItem(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer21p.IDRef()). + MustBuild() + layer2p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("xxx")). + Build(), + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField3ID). + TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("test")). + Build(), + }).MustBuild(), + }). + MustBuild() + layer2ibf1 := layer.NewInfoboxField().NewID().Plugin(pluginID).Extension(pluginExtension1ID).Property(layer2p.ID()).MustBuild() + layer2ib := layer.NewInfobox([]*layer.InfoboxField{ + layer2ibf1, + }, scenePropertyID) + layer2 := layer.NewGroup(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer2p.IDRef()). + Infobox(layer2ib). + Layers(layer.NewIDList([]id.LayerID{layer21.ID()})). + MustBuild() + + // layer3: full-linked layer item with infobox + layer3p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(property.ValueTypeString). + LinksUnsafe(property.NewLinks([]*property.Link{ + property.NewLink(ds2id, dss2id, ds2f1), + property.NewLink(ds3id, dss3id, ds3f1), + })). + Build(), + }).MustBuild(), + }). + MustBuild() + layer3ibf1 := layer.NewInfoboxField().NewID().Plugin(pluginID).Extension(pluginExtension1ID).Property(scenePropertyID).MustBuild() + layer3ib := layer.NewInfobox([]*layer.InfoboxField{ + layer3ibf1, + }, scenePropertyID) + layer3 := layer.NewItem(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer3p.IDRef()). + Infobox(layer3ib). + MustBuild() + + // layer4: linked layer group with infobox and children + layer41p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField2ID). + TypeUnsafe(property.ValueTypeNumber). + ValueUnsafe(property.ValueTypeNumber.ValueFromUnsafe(1)). + Build(), + }).MustBuild(), + }). + MustBuild() + layer41ibf1 := layer.NewInfoboxField().NewID().Plugin(pluginID).Extension(pluginExtension1ID).Property(layer41p.ID()).MustBuild() + layer41ib := layer.NewInfobox([]*layer.InfoboxField{ + layer41ibf1, + }, layer41p.ID()) + layer41 := layer.NewItem(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer41p.IDRef()). + Infobox(layer41ib). + LinkedDataset(&ds3id). + MustBuild() + layer4p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(property.ValueTypeString). + LinksUnsafe(property.NewLinks([]*property.Link{ + property.NewLinkFieldOnly(dss3id, ds3f1), + })). + Build(), + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField3ID). + TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("xxx")). + Build(), + }).MustBuild(), + }). + MustBuild() + layer4ibf1 := layer.NewInfoboxField().NewID().Plugin(pluginID).Extension(pluginExtension1ID).Property(layer4p.ID()).MustBuild() + layer4ib := layer.NewInfobox([]*layer.InfoboxField{ + layer4ibf1, + }, scenePropertyID) + layer4 := layer.NewGroup(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer4p.IDRef()). + Infobox(layer4ib). + LinkedDatasetSchema(&dss3id). + Layers(layer.NewIDList([]id.LayerID{layer41.ID()})). + MustBuild() + + // layer5: linked layer group and children with overrided property + layer51p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(property.ValueTypeString). + LinksUnsafe(property.NewLinks([]*property.Link{ + property.NewLinkFieldOnly(dss1id, ds1f2), + })). + Build(), + }).MustBuild(), + }). + MustBuild() + layer51 := layer.NewItem(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer51p.IDRef()). + LinkedDataset(&ds1id). + MustBuild() + layer5p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + Fields([]*property.Field{ + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(property.ValueTypeString). + LinksUnsafe(property.NewLinks([]*property.Link{ + property.NewLinkFieldOnly(dss1id, ds1f1), + property.NewLinkFieldOnly(dss2id, ds2f1), + property.NewLinkFieldOnly(dss3id, ds3f1), + })). + Build(), + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField2ID). + TypeUnsafe(property.ValueTypeString). + LinksUnsafe(property.NewLinks([]*property.Link{ + property.NewLinkFieldOnly(dss1id, ds1f1), + property.NewLinkFieldOnly(dss2id, ds2f1), + property.NewLinkFieldOnly(dss3id, ds3f1), + })). + Build(), + }).MustBuild(), + }). + MustBuild() + layer5 := layer.NewGroup(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer5p.IDRef()). + LinkedDatasetSchema(&dss1id). + Layers(layer.NewIDList([]id.LayerID{layer51.ID()})). + MustBuild() + layer6p := property.New(). + NewID(). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroupList().NewID().Schema(propertySchemaID, propertySchemaGroup2ID).Groups([]*property.Group{ + property.NewGroup().ID(propertyItemID1).Schema(propertySchemaID, propertySchemaGroup2ID). + Fields([]*property.Field{ + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("XYZ")). + Build(), + }).MustBuild(), + property.NewGroup().ID(propertyItemID2).Schema(propertySchemaID, propertySchemaGroup2ID). + Fields([]*property.Field{ + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("ZYX")). + Build(), + }).MustBuild(), + }).MustBuild(), + }). + MustBuild() + layer6 := layer.NewItem(). + NewID(). + Scene(sceneID). + Plugin(&pluginID). + Extension(&pluginExtension1ID). + Property(layer6p.IDRef()). + MustBuild() + + // root layer + rootLayer := layer.NewGroup().NewID().Scene(sceneID).Layers(layer.NewIDList([]id.LayerID{ + layer1.ID(), + layer2.ID(), + layer3.ID(), + layer4.ID(), + layer5.ID(), + layer6.ID(), + })).MustBuild() + + // scene + scenep := property.New(). + ID(scenePropertyID). + Scene(sceneID). + Schema(propertySchemaID). + Items([]property.Item{ + property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID).Fields([]*property.Field{ + property.NewFieldUnsafe(). + FieldUnsafe(propertySchemaField1ID). + TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("hogehoge")). + Build(), + }).MustBuild(), + }). + MustBuild() + + sceneWidgetID1 := id.NewWidgetID() + sceneWidgetID2 := id.NewWidgetID() + sceneWidget1 := scene.MustNewWidget(&sceneWidgetID1, pluginID, pluginExtension1ID, scenePropertyID, false) + sceneWidget2 := scene.MustNewWidget(&sceneWidgetID2, pluginID, pluginExtension2ID, scenePropertyID, true) + scenePlugin1 := scene.NewPlugin(pluginID, &scenePropertyID) + + assert.Equal(t, sceneWidgetID1, sceneWidget1.ID()) + assert.Equal(t, sceneWidgetID2, sceneWidget2.ID()) + + scene := scene.New(). + ID(sceneID). + Project(id.NewProjectID()). + Team(id.NewTeamID()). + Property(scenep.ID()). + WidgetSystem(scene.NewWidgetSystem([]*scene.Widget{ + sceneWidget1, sceneWidget2, + })). + PluginSystem(scene.NewPluginSystem([]*scene.Plugin{scenePlugin1})). + RootLayer(rootLayer.ID()). + MustBuild() + + // loaders + dloader := dataset.List{ + ds1, ds2, ds3, + }.GraphLoader() + lloader := layer.LoaderFrom([]layer.Layer{ + rootLayer, + layer1, + layer2, + layer21, + layer3, + layer4, + layer41, + layer5, + layer51, + layer6, + }) + ploader := property.LoaderFrom([]*property.Property{ + scenep, + layer1p, + layer2p, + layer21p, + layer3p, + layer4p, + layer41p, + layer5p, + layer51p, + layer6p, + }) + + // exec + sb := New(lloader, ploader, dloader) + publishedAt := time.Date(2019, time.August, 15, 0, 0, 0, 0, time.Local) + result, err := sb.buildScene(context.Background(), scene, publishedAt) + + // general + assert.NoError(t, err) + assert.Equal(t, sceneID.String(), result.ID) + assert.Equal(t, version, result.SchemaVersion) + assert.Equal(t, publishedAt, result.PublishedAt) + + // property + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, result.Property, "property") + + // plugins + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, result.Plugins[pluginID.String()], "plugin1 property") + + // widgets + assert.Equal(t, 1, len(result.Widgets), "widgets len") + resWidget1 := result.Widgets[0] + assert.Equal(t, pluginID.String(), resWidget1.PluginID, "widget1 plugin") + assert.Equal(t, string(pluginExtension2ID), resWidget1.ExtensionID, "widget1 extension") + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, resWidget1.Property, "widget1 property") + + // layers + assert.Equal(t, 6, len(result.Layers), "layers len") + + // layer1 + resLayer1 := result.Layers[0] + assert.Equal(t, layer1.ID().String(), resLayer1.ID, "layer1 id") + assert.Equal(t, pluginID.StringRef(), resLayer1.PluginID, "layer1 plugin id") + assert.Equal(t, pluginExtension1ID.StringRef(), resLayer1.ExtensionID, "layer1 extension id") + assert.Nil(t, resLayer1.Infobox, "layer1 infobox") + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "xxx", + "b": float64(1), + }, + }, resLayer1.Property, "layer1 prpperty") + + // layer2 + resLayer2 := result.Layers[1] + assert.Equal(t, layer21.ID().String(), resLayer2.ID, "layer๏ผ’ id") + assert.Equal(t, pluginID.StringRef(), resLayer2.PluginID, "layer๏ผ’ plugin id") + assert.Equal(t, pluginExtension1ID.StringRef(), resLayer2.ExtensionID, "layer๏ผ’ extension id") + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, resLayer2.Infobox.Property, "layer2 infobox property") + assert.Equal(t, 1, len(resLayer2.Infobox.Fields), "layer2 infobox fields len") + assert.Equal(t, pluginID.String(), resLayer2.Infobox.Fields[0].PluginID, "layer2 infobox field1 plugin") + assert.Equal(t, string(pluginExtension1ID), resLayer2.Infobox.Fields[0].ExtensionID, "layer2 infobox field1 extension") + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "xxx", + "c": "test", + }, + }, resLayer2.Infobox.Fields[0].Property, "layer2 infobox field1 property") + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "yyy", + "b": float64(1), + "c": "test", + }, + }, resLayer2.Property, "layer2 prpperty") + + // layer3 + resLayer3 := result.Layers[2] + assert.Equal(t, layer3.ID().String(), resLayer3.ID, "layer3 id") + assert.Equal(t, pluginID.StringRef(), resLayer3.PluginID, "layer3 plugin id") + assert.Equal(t, pluginExtension1ID.StringRef(), resLayer3.ExtensionID, "layer3 extension id") + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, resLayer3.Infobox.Property, "layer3 infobox property") + assert.Equal(t, 1, len(resLayer3.Infobox.Fields), "layer3 infobox fields len") + assert.Equal(t, pluginID.String(), resLayer3.Infobox.Fields[0].PluginID, "layer3 infobox field1 plugin") + assert.Equal(t, string(pluginExtension1ID), resLayer3.Infobox.Fields[0].ExtensionID, "layer3 infobox field1 extension") + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, resLayer3.Infobox.Fields[0].Property, "layer3 infobox field1 property") + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "b", + }, + }, resLayer3.Property, "layer3 prpperty") + + // layer4 + resLayer4 := result.Layers[3] + assert.Equal(t, layer41.ID().String(), resLayer4.ID, "layer4 id") + assert.Equal(t, pluginID.StringRef(), resLayer4.PluginID, "layer4 plugin id") + assert.Equal(t, pluginExtension1ID.StringRef(), resLayer4.ExtensionID, "layer4 extension id") + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + "b": float64(1), + }, + }, resLayer4.Infobox.Property, "layer4 infobox property") + assert.Equal(t, 1, len(resLayer4.Infobox.Fields), "layer4 infobox fields len") + assert.Equal(t, pluginID.String(), resLayer4.Infobox.Fields[0].PluginID, "layer4 infobox field1 plugin") + assert.Equal(t, string(pluginExtension1ID), resLayer4.Infobox.Fields[0].ExtensionID, "layer4 infobox field1 extension") + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "b": float64(1), + }, + }, resLayer4.Infobox.Fields[0].Property, "layer4 infobox field1 property") + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "b", + "b": float64(1), + "c": "xxx", + }, + }, resLayer4.Property, "layer4 prpperty") + + // layer5 + resLayer5 := result.Layers[4] + assert.Equal(t, layer51.ID().String(), resLayer5.ID, "layer5 id") + assert.Equal(t, pluginID.StringRef(), resLayer5.PluginID, "layer5 plugin id") + assert.Equal(t, pluginExtension1ID.StringRef(), resLayer5.ExtensionID, "layer5 extension id") + assert.Nil(t, resLayer5.Infobox, "layer5 infobox") + assert.Equal(t, map[string]interface{}{ + "A": map[string]interface{}{ + "a": "a", + "b": "b", + }, + }, resLayer5.Property, "layer5 prpperty") + + // layer6 + resLayer6 := result.Layers[5] + assert.Equal(t, layer6.ID().String(), resLayer6.ID, "layer6 id") + assert.Equal(t, pluginID.StringRef(), resLayer6.PluginID, "layer6 plugin id") + assert.Equal(t, pluginExtension1ID.StringRef(), resLayer6.ExtensionID, "layer6 extension id") + assert.Nil(t, resLayer6.Infobox, "layer6 infobox") + assert.Equal(t, map[string]interface{}{ + "B": []map[string]interface{}{ + { + "a": "XYZ", + "id": propertyItemID1.String(), + }, + { + "a": "ZYX", + "id": propertyItemID2.String(), + }, + }, + }, resLayer6.Property, "layer6 prpperty") +} diff --git a/pkg/scene/builder/encoder.go b/pkg/scene/builder/encoder.go new file mode 100644 index 000000000..2dd030c4d --- /dev/null +++ b/pkg/scene/builder/encoder.go @@ -0,0 +1,112 @@ +package builder + +import ( + "github.com/reearth/reearth-backend/pkg/layer/encoding" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" +) + +var _ encoding.Encoder = &encoder{} + +type encoder struct { + res []*layerJSON +} + +func (e *encoder) Result() []*layerJSON { + if e == nil { + return nil + } + return e.res +} + +func (e *encoder) Encode(l merging.SealedLayer) (err error) { + if e == nil { + return + } + e.res = e.layers(l) + return +} + +func (e *encoder) layers(l merging.SealedLayer) []*layerJSON { + if l == nil { + return nil + } + if i, ok := l.(*merging.SealedLayerItem); ok { + layer := e.layer(i) + if layer == nil { + return nil + } + return []*layerJSON{layer} + } else if g, ok := l.(*merging.SealedLayerGroup); ok { + // This encoder does not print group layer representation. + layers := make([]*layerJSON, 0, len(g.Children)) + for _, c := range g.Children { + l := e.layers(c) + if l != nil { + layers = append(layers, l...) + } + } + return layers + } + return nil +} + +func (e *encoder) layer(l *merging.SealedLayerItem) *layerJSON { + if l == nil { + return nil + } + return &layerJSON{ + ID: l.Original.String(), + PluginID: l.PluginID.StringRef(), + ExtensionID: l.ExtensionID.StringRef(), + Name: l.Name, + Property: e.property(l.Property), + Infobox: e.infobox(l.Infobox), + } +} + +func (e *encoder) infobox(i *merging.SealedInfobox) *infoboxJSON { + if i == nil { + return nil + } + fields := make([]infoboxFieldJSON, 0, len(i.Fields)) + for _, f := range i.Fields { + fields = append(fields, infoboxFieldJSON{ + ID: f.ID.String(), + PluginID: f.Plugin.String(), + ExtensionID: string(f.Extension), + Property: e.property(f.Property), + }) + } + return &infoboxJSON{ + Fields: fields, + Property: e.property(i.Property), + } +} + +func (e *encoder) property(p *property.Sealed) propertyJSON { + return p.Interface() +} + +type layerJSON struct { + ID string `json:"id"` + PluginID *string `json:"pluginId,omitempty"` + ExtensionID *string `json:"extensionId,omitempty"` + Name string `json:"name,omitempty"` + Property propertyJSON `json:"property,omitempty"` + Infobox *infoboxJSON `json:"infobox,omitempty"` +} + +type infoboxJSON struct { + Fields []infoboxFieldJSON `json:"fields"` + Property propertyJSON `json:"property"` +} + +type infoboxFieldJSON struct { + ID string `json:"id"` + PluginID string `json:"pluginId"` + ExtensionID string `json:"extensionId"` + Property propertyJSON `json:"property"` +} + +type propertyJSON = map[string]interface{} diff --git a/pkg/scene/builder/encoder_test.go b/pkg/scene/builder/encoder_test.go new file mode 100644 index 000000000..71b4deffd --- /dev/null +++ b/pkg/scene/builder/encoder_test.go @@ -0,0 +1,183 @@ +package builder + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestEncoder_Result(t *testing.T) { + testCases := []struct { + Name string + E *encoder + Expected []*layerJSON + }{ + { + Name: "nil encoder", + E: nil, + Expected: nil, + }, + { + Name: "success", + E: &encoder{ + res: []*layerJSON{ + { + ID: "xxx", + PluginID: nil, + ExtensionID: nil, + Name: "aaa", + Property: nil, + Infobox: nil, + }, + }, + }, + Expected: []*layerJSON{ + { + ID: "xxx", + PluginID: nil, + ExtensionID: nil, + Name: "aaa", + Property: nil, + Infobox: nil, + }, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.E.Result() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestEncoder_Encode(t *testing.T) { + testCases := []struct { + Name string + E *encoder + SL merging.SealedLayer + Expected error + }{ + { + Name: "nil encoder", + E: nil, + SL: nil, + Expected: nil, + }, + { + Name: "success encoding", + E: &encoder{}, + SL: nil, + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.E.Encode(tc.SL) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestEncoder_Layers(t *testing.T) { + lid := id.MustLayerID(id.New().String()) + sid := id.MustSceneID(id.New().String()) + pid := id.MustPropertyID(id.New().String()) + ex := id.PluginExtensionID("marker") + iid := id.MustPropertyItemID(id.New().String()) + v1 := property.LatLng{ + Lat: 4.4, + Lng: 53.4, + } + + f1 := property.SealedField{ + ID: id.PropertySchemaFieldID("location"), + Type: "latlng", + DatasetValue: nil, + PropertyValue: v1.Value(), + } + fl1 := []*property.SealedField{} + fl1 = append(fl1, &f1) + item1 := property.SealedItem{ + Original: &iid, + Parent: nil, + SchemaGroup: id.PropertySchemaFieldID("default"), + LinkedDataset: nil, + Fields: fl1, + Groups: nil, + } + il := []*property.SealedItem{} + il = append(il, &item1) + + sp := property.Sealed{ + Original: &pid, + Items: il, + } + sealed := &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Parent: nil, + Name: "test", + Scene: sid, + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: &ex, + }, + Property: &sp, + Infobox: nil, + }} + testCases := []struct { + Name string + E *encoder + SL *merging.SealedLayerItem + Expected *layerJSON + }{ + { + Name: "nil layers", + E: &encoder{}, + SL: nil, + Expected: nil, + }, + { + Name: "success", + E: &encoder{}, + SL: sealed, + Expected: &layerJSON{ + ID: lid.String(), + PluginID: id.OfficialPluginID.StringRef(), + ExtensionID: ex.StringRef(), + Name: "test", + Property: map[string]interface{}{"default": map[string]interface{}{"location": map[string]interface{}{"lat": 4.4, "lng": 53.4}}}, + Infobox: nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.E.layer(tc.SL) + if res == nil { + assert.Equal(tt, tc.Expected, res) + } else { + assert.Equal(tt, tc.Expected.Property, res.Property) + assert.Equal(tt, tc.Expected.Infobox, res.Infobox) + assert.Equal(tt, *tc.Expected.ExtensionID, *res.ExtensionID) + assert.Equal(tt, tc.Expected.ID, res.ID) + assert.Equal(tt, tc.Expected.Name, res.Name) + assert.Equal(tt, *tc.Expected.PluginID, *res.PluginID) + } + + }) + } +} diff --git a/pkg/scene/builder/scene.go b/pkg/scene/builder/scene.go new file mode 100644 index 000000000..fb7cd9833 --- /dev/null +++ b/pkg/scene/builder/scene.go @@ -0,0 +1,81 @@ +package builder + +import ( + "context" + "time" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type sceneJSON struct { + SchemaVersion int `json:"schemaVersion"` + ID string `json:"id"` + PublishedAt time.Time `json:"publishedAt"` + Property propertyJSON `json:"property"` + Plugins map[string]propertyJSON `json:"plugins"` + Layers []*layerJSON `json:"layers"` + Widgets []*widgetJSON `json:"widgets"` +} + +type widgetJSON struct { + PluginID string `json:"pluginId"` + ExtensionID string `json:"extensionId"` + Property propertyJSON `json:"property"` +} + +func (b *Builder) scene(ctx context.Context, s *scene.Scene, publishedAt time.Time, l []*layerJSON, p []*property.Property) *sceneJSON { + return &sceneJSON{ + SchemaVersion: version, + ID: s.ID().String(), + PublishedAt: publishedAt, + Property: b.property(ctx, findProperty(p, s.Property())), + Plugins: b.plugins(ctx, s, p), + Widgets: b.widgets(ctx, s, p), + Layers: l, + } +} + +func (b *Builder) plugins(ctx context.Context, s *scene.Scene, p []*property.Property) map[string]propertyJSON { + scenePlugins := s.PluginSystem().Plugins() + res := map[string]propertyJSON{} + for _, sp := range scenePlugins { + if sp == nil { + continue + } + if pp := sp.Property(); pp != nil { + res[sp.Plugin().String()] = b.property(ctx, findProperty(p, *pp)) + } + } + return res +} + +func (b *Builder) widgets(ctx context.Context, s *scene.Scene, p []*property.Property) []*widgetJSON { + sceneWidgets := s.WidgetSystem().Widgets() + res := make([]*widgetJSON, 0, len(sceneWidgets)) + for _, w := range sceneWidgets { + if !w.Enabled() { + continue + } + res = append(res, &widgetJSON{ + PluginID: w.Plugin().String(), + ExtensionID: string(w.Extension()), + Property: b.property(ctx, findProperty(p, w.Property())), + }) + } + return res +} + +func (b *Builder) property(ctx context.Context, p *property.Property) propertyJSON { + return property.SealProperty(ctx, p).Interface() +} + +func findProperty(pp []*property.Property, i id.PropertyID) *property.Property { + for _, p := range pp { + if p.ID() == i { + return p + } + } + return nil +} diff --git a/pkg/scene/builder/scene_test.go b/pkg/scene/builder/scene_test.go new file mode 100644 index 000000000..8c0e741d1 --- /dev/null +++ b/pkg/scene/builder/scene_test.go @@ -0,0 +1,46 @@ +package builder + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestScene_FindProperty(t *testing.T) { + p1 := id.NewPropertyID() + sid := id.NewSceneID() + scid := id.MustPropertySchemaID("xx/aa") + pl := []*property.Property{ + property.New().NewID().Scene(sid).Schema(scid).MustBuild(), + property.New().ID(p1).Scene(sid).Schema(scid).MustBuild(), + } + testCases := []struct { + Name string + PL []*property.Property + Input id.PropertyID + Expected *property.Property + }{ + { + Name: "Found", + PL: pl, + Input: p1, + Expected: property.New().Scene(sid).Schema(scid).ID(p1).MustBuild(), + }, + { + Name: " NotFound", + PL: pl, + Input: id.NewPropertyID(), + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := findProperty(tc.PL, tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} diff --git a/pkg/scene/builder_test.go b/pkg/scene/builder_test.go new file mode 100644 index 000000000..596ac9e6b --- /dev/null +++ b/pkg/scene/builder_test.go @@ -0,0 +1,325 @@ +package scene + +import ( + "errors" + "testing" + "time" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestBuilder_IDs(t *testing.T) { + tid := id.NewTeamID() + lid := id.NewLayerID() + b := New().NewID().RootLayer(lid).Team(tid).MustBuild() + assert.NotNil(t, b.ID()) + assert.Equal(t, tid, b.Team()) + assert.Equal(t, lid, b.RootLayer()) + sid := id.NewSceneID() + b2 := New().ID(sid).RootLayer(lid).Team(tid).MustBuild() + assert.Equal(t, sid, b2.ID()) +} + +func TestBuilder_UpdatedAt(t *testing.T) { + ti := time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC) + b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).UpdatedAt(ti).MustBuild() + assert.Equal(t, ti, b.UpdatedAt()) +} + +func TestBuilder_Property(t *testing.T) { + pid := id.NewPropertyID() + b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).Property(pid).MustBuild() + assert.Equal(t, pid, b.Property()) +} + +func TestBuilder_PluginSystem(t *testing.T) { + ps := NewPluginSystem([]*Plugin{ + NewPlugin(id.OfficialPluginID, id.NewPropertyID().Ref()), + }) + b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).PluginSystem(ps).MustBuild() + assert.Equal(t, ps, b.PluginSystem()) +} + +func TestBuilder_Project(t *testing.T) { + pid := id.NewProjectID() + b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).Project(pid).MustBuild() + assert.Equal(t, pid, b.Project()) +} + +func TestBuilder_WidgetSystem(t *testing.T) { + nid := id.New() + ws := NewWidgetSystem([]*Widget{ + MustNewWidget(id.WidgetIDFromRefID(&nid), id.OfficialPluginID, "xxx", id.NewPropertyID(), true), + }) + b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).WidgetSystem(ws).MustBuild() + assert.Equal(t, ws, b.WidgetSystem()) +} + +func TestBuilder_Build(t *testing.T) { + tid := id.NewTeamID() + sid := id.NewSceneID() + pid := id.NewProjectID() + ppid := id.NewPropertyID() + lid := id.NewLayerID() + nid := id.New() + ws := NewWidgetSystem([]*Widget{ + MustNewWidget(id.WidgetIDFromRefID(&nid), id.OfficialPluginID, "xxx", ppid, true), + }) + ps := NewPluginSystem([]*Plugin{ + NewPlugin(id.OfficialPluginID, ppid.Ref()), + }) + testCases := []struct { + Name string + Id id.SceneID + Project id.ProjectID + Team id.TeamID + RootLayer id.LayerID + WidgetSystem *WidgetSystem + PluginSystem *PluginSystem + UpdatedAt time.Time + Property id.PropertyID + Expected struct { + Id id.SceneID + Project id.ProjectID + Team id.TeamID + RootLayer id.LayerID + WidgetSystem *WidgetSystem + PluginSystem *PluginSystem + UpdatedAt time.Time + Property id.PropertyID + } + err error + }{ + { + Name: "fail nil scene id", + Id: id.SceneID{}, + Project: pid, + Team: tid, + RootLayer: lid, + WidgetSystem: ws, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + err: id.ErrInvalidID, + }, + { + Name: "fail nil team id", + Id: sid, + Project: pid, + Team: id.TeamID{}, + RootLayer: lid, + WidgetSystem: ws, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + err: id.ErrInvalidID, + }, + { + Name: "fail nil root layer id", + Id: sid, + Project: pid, + Team: tid, + RootLayer: id.LayerID{}, + WidgetSystem: ws, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + err: id.ErrInvalidID, + }, + { + Name: "success build new scene", + Id: sid, + Project: pid, + Team: tid, + RootLayer: lid, + WidgetSystem: ws, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + Expected: struct { + Id id.SceneID + Project id.ProjectID + Team id.TeamID + RootLayer id.LayerID + WidgetSystem *WidgetSystem + PluginSystem *PluginSystem + UpdatedAt time.Time + Property id.PropertyID + }{ + Id: sid, + Project: pid, + Team: tid, + RootLayer: lid, + WidgetSystem: ws, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + err: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := New(). + ID(tc.Id). + WidgetSystem(tc.WidgetSystem). + Project(tc.Project). + PluginSystem(tc.PluginSystem). + Property(tc.Property). + RootLayer(tc.RootLayer). + Team(tc.Team). + UpdatedAt(tc.UpdatedAt). + Build() + if err == nil { + assert.Equal(tt, tc.Expected.Id, res.ID()) + assert.Equal(tt, tc.Expected.UpdatedAt, res.UpdatedAt()) + assert.Equal(tt, tc.Expected.Team, res.Team()) + assert.Equal(tt, tc.Expected.RootLayer, res.RootLayer()) + assert.Equal(tt, tc.Expected.Property, res.Property()) + assert.Equal(tt, tc.Expected.PluginSystem, res.PluginSystem()) + assert.Equal(tt, tc.Expected.WidgetSystem, res.WidgetSystem()) + assert.Equal(tt, tc.Expected.Project, res.Project()) + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + tid := id.NewTeamID() + sid := id.NewSceneID() + pid := id.NewProjectID() + ppid := id.NewPropertyID() + lid := id.NewLayerID() + nid := id.New() + ws := NewWidgetSystem([]*Widget{ + MustNewWidget(id.WidgetIDFromRefID(&nid), id.OfficialPluginID, "xxx", ppid, true), + }) + ps := NewPluginSystem([]*Plugin{ + NewPlugin(id.OfficialPluginID, ppid.Ref()), + }) + testCases := []struct { + Name string + Id id.SceneID + Project id.ProjectID + Team id.TeamID + RootLayer id.LayerID + WidgetSystem *WidgetSystem + PluginSystem *PluginSystem + UpdatedAt time.Time + Property id.PropertyID + Expected struct { + Id id.SceneID + Project id.ProjectID + Team id.TeamID + RootLayer id.LayerID + WidgetSystem *WidgetSystem + PluginSystem *PluginSystem + UpdatedAt time.Time + Property id.PropertyID + } + err error + }{ + { + Name: "fail nil scene id", + Id: id.SceneID{}, + Project: pid, + Team: tid, + RootLayer: lid, + WidgetSystem: ws, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + err: id.ErrInvalidID, + }, + { + Name: "fail nil team id", + Id: sid, + Project: pid, + Team: id.TeamID{}, + RootLayer: lid, + WidgetSystem: ws, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + err: id.ErrInvalidID, + }, + { + Name: "fail nil root layer id", + Id: sid, + Project: pid, + Team: tid, + RootLayer: id.LayerID{}, + WidgetSystem: ws, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + err: id.ErrInvalidID, + }, + { + Name: "success build new scene", + Id: sid, + Project: pid, + Team: tid, + RootLayer: lid, + WidgetSystem: ws, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + Expected: struct { + Id id.SceneID + Project id.ProjectID + Team id.TeamID + RootLayer id.LayerID + WidgetSystem *WidgetSystem + PluginSystem *PluginSystem + UpdatedAt time.Time + Property id.PropertyID + }{ + Id: sid, + Project: pid, + Team: tid, + RootLayer: lid, + WidgetSystem: ws, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + err: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + var res *Scene + defer func() { + if r := recover(); r == nil { + assert.Equal(tt, tc.Expected.Id, res.ID()) + assert.Equal(tt, tc.Expected.UpdatedAt, res.UpdatedAt()) + assert.Equal(tt, tc.Expected.Team, res.Team()) + assert.Equal(tt, tc.Expected.RootLayer, res.RootLayer()) + assert.Equal(tt, tc.Expected.Property, res.Property()) + assert.Equal(tt, tc.Expected.PluginSystem, res.PluginSystem()) + assert.Equal(tt, tc.Expected.WidgetSystem, res.WidgetSystem()) + assert.Equal(tt, tc.Expected.Project, res.Project()) + } + }() + + res = New(). + ID(tc.Id). + WidgetSystem(tc.WidgetSystem). + Project(tc.Project). + PluginSystem(tc.PluginSystem). + Property(tc.Property). + RootLayer(tc.RootLayer). + Team(tc.Team). + UpdatedAt(tc.UpdatedAt). + MustBuild() + }) + } +} diff --git a/pkg/scene/lock.go b/pkg/scene/lock.go new file mode 100644 index 000000000..03afcd1a3 --- /dev/null +++ b/pkg/scene/lock.go @@ -0,0 +1,45 @@ +package scene + +// LockMode ใฏใ‚ทใƒผใƒณใฎใƒญใƒƒใ‚ฏ็Šถๆ…‹ใ‚’่กจใ—ใพใ™ใ€‚ +type LockMode string + +const ( + // LockModeFree ใฏใƒญใƒƒใ‚ฏใŒใ‹ใ‹ใฃใฆใ„ใชใ„็Šถๆ…‹ใงใ™ใ€‚ + LockModeFree LockMode = "" + // LockModePending ใฏๅ‡ฆ็†ๅพ…ใกไธญใงใ™ใ€‚ใƒ‡ใƒผใ‚ฟใฎๅค‰ๆ›ดใฏ็„กๅˆถ้™ใซๅค‰ๆ›ดใฏใงใใพใ™ใ€‚ + LockModePending LockMode = "pending" + // LockModePluginUpgrading ใฏใƒ—ใƒฉใ‚ฐใ‚คใƒณใ‚’ใ‚ขใƒƒใƒ—ใ‚ฐใƒฌใƒผใƒ‰ไธญใงใ™ใ€‚ใ‚ทใƒผใƒณใธใฎๅ„็จฎๆ“ไฝœใŒใงใใพใ›ใ‚“ใ€‚ + LockModePluginUpgrading LockMode = "plugin upgrading" + // LockModeDatasetSyncing ใฏใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ๅŒๆœŸไธญใงใ™ใ€‚ใ‚ทใƒผใƒณใธใฎๅ„็จฎๆ“ไฝœใŒใงใใพใ›ใ‚“ใ€‚ + LockModeDatasetSyncing LockMode = "dataset syncing" + // LockModePublishing ใฏใ‚ทใƒผใƒณใ‚’ๆ›ธใๅ‡บใ—ไธญใงใ™ใ€‚ใ‚ทใƒผใƒณใธใฎๅ„็จฎๆ“ไฝœใŒใงใใพใ›ใ‚“ใ€‚ + LockModePublishing LockMode = "publishing" +) + +// IsLocked _ +func (l LockMode) IsLocked() bool { + switch l { + case LockModeFree: + return false + case LockModePending: + return false + } + return true +} + +// Validate _ +func (l LockMode) Validate() (LockMode, bool) { + switch l { + case LockModeFree: + fallthrough + case LockModePending: + fallthrough + case LockModePluginUpgrading: + fallthrough + case LockModeDatasetSyncing: + fallthrough + case LockModePublishing: + return l, true + } + return l, false +} diff --git a/pkg/scene/lock_test.go b/pkg/scene/lock_test.go new file mode 100644 index 000000000..fcd36a364 --- /dev/null +++ b/pkg/scene/lock_test.go @@ -0,0 +1,86 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLockMode_IsLocked(t *testing.T) { + testCases := []struct { + Name string + LM LockMode + Expected bool + }{ + { + Name: "unlocked free", + LM: LockModeFree, + Expected: false, + }, + { + Name: "unlocked pending", + LM: LockModePending, + Expected: false, + }, + { + Name: "locked", + LM: LockModePublishing, + Expected: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.LM.IsLocked() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestLockMode_Validate(t *testing.T) { + testCases := []struct { + Name string + LM LockMode + Expected bool + }{ + { + Name: "valid free", + LM: LockModeFree, + Expected: true, + }, + { + Name: "valid pending", + LM: LockModePending, + Expected: true, + }, + { + Name: "valid publishing", + LM: LockModePublishing, + Expected: true, + }, + { + Name: "valid upgrading", + LM: LockModePluginUpgrading, + Expected: true, + }, + { + Name: "valid syncing", + LM: LockModeDatasetSyncing, + Expected: true, + }, + { + Name: "invalid", + LM: "xxx", + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + _, res := tc.LM.Validate() + assert.Equal(tt, tc.Expected, res) + }) + } +} diff --git a/pkg/scene/plugin.go b/pkg/scene/plugin.go new file mode 100644 index 000000000..63e863d21 --- /dev/null +++ b/pkg/scene/plugin.go @@ -0,0 +1,38 @@ +package scene + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +// Plugin _ +type Plugin struct { + plugin id.PluginID + property *id.PropertyID +} + +// NewPlugin _ +func NewPlugin(plugin id.PluginID, property *id.PropertyID) *Plugin { + if property != nil { + property2 := *property + property = &property2 + } + return &Plugin{ + plugin: plugin, + property: property, + } +} + +// Plugin _ +func (s Plugin) Plugin() id.PluginID { + return s.plugin +} + +// Property _ +func (s Plugin) Property() *id.PropertyID { + property := s.property + if property != nil { + property2 := *property + property = &property2 + } + return property +} diff --git a/pkg/scene/plugin_system.go b/pkg/scene/plugin_system.go new file mode 100644 index 000000000..cf36ac598 --- /dev/null +++ b/pkg/scene/plugin_system.go @@ -0,0 +1,130 @@ +package scene + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +// PluginSystem _ +type PluginSystem struct { + plugins []*Plugin +} + +// NewPluginSystem _ +func NewPluginSystem(p []*Plugin) *PluginSystem { + if p == nil { + return &PluginSystem{plugins: []*Plugin{}} + } + p2 := make([]*Plugin, 0, len(p)) + for _, p1 := range p { + if p1 == nil { + continue + } + duplicated := false + for _, p3 := range p2 { + if p1.plugin.Equal(p3.plugin) { + duplicated = true + break + } + } + if !duplicated { + p3 := *p1 + p2 = append(p2, &p3) + } + } + return &PluginSystem{plugins: p2} +} + +// Plugins _ +func (p *PluginSystem) Plugins() []*Plugin { + return append([]*Plugin{}, p.plugins...) +} + +// Property _ +func (p *PluginSystem) Property(id id.PluginID) *id.PropertyID { + for _, p := range p.plugins { + if p.plugin.Equal(id) { + p2 := *p.property + return &p2 + } + } + return nil +} + +// Has _ +func (p *PluginSystem) Has(id id.PluginID) bool { + for _, p2 := range p.plugins { + if p2.plugin.Equal(id) { + return true + } + } + return false +} + +// HasPlugin _ +func (p *PluginSystem) HasPlugin(id id.PluginID) bool { + name := id.Name() + for _, p2 := range p.plugins { + if p2.plugin.Name() == name { + return true + } + } + return false +} + +// Add _ +func (p *PluginSystem) Add(sp *Plugin) { + if sp == nil || p.Has(sp.plugin) || sp.plugin.Equal(id.OfficialPluginID) { + return + } + sp2 := *sp + p.plugins = append(p.plugins, &sp2) +} + +// Remove _ +func (p *PluginSystem) Remove(pid id.PluginID) { + if pid.Equal(id.OfficialPluginID) { + return + } + for i, p2 := range p.plugins { + if p2.plugin.Equal(pid) { + p.plugins = append(p.plugins[:i], p.plugins[i+1:]...) + return + } + } +} + +// Upgrade _ +func (p *PluginSystem) Upgrade(pid, newID id.PluginID) { + for i, p2 := range p.plugins { + if p2.plugin.Equal(id.OfficialPluginID) { + continue + } + if p2.plugin.Equal(pid) { + p.plugins[i] = &Plugin{plugin: newID, property: p2.property} + return + } + } +} + +// Properties _ +func (p *PluginSystem) Properties() []id.PropertyID { + if p == nil { + return nil + } + res := make([]id.PropertyID, 0, len(p.plugins)) + for _, pp := range p.plugins { + if pp.property != nil { + res = append(res, *pp.property) + } + } + return res +} + +func (p *PluginSystem) Plugin(pluginID id.PluginID) *Plugin { + for _, pp := range p.plugins { + if pp.plugin == pluginID { + return pp + } + } + return nil +} diff --git a/pkg/scene/plugin_system_test.go b/pkg/scene/plugin_system_test.go new file mode 100644 index 000000000..acf0a0491 --- /dev/null +++ b/pkg/scene/plugin_system_test.go @@ -0,0 +1,332 @@ +package scene + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestNewPluginSystem(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID().Ref() + testCases := []struct { + Name string + Input []*Plugin + Expected *PluginSystem + }{ + { + Name: "nil plugin list", + Input: nil, + Expected: &PluginSystem{plugins: []*Plugin{}}, + }, + { + Name: "plugin list with nil", + Input: []*Plugin{nil}, + Expected: &PluginSystem{plugins: []*Plugin{}}, + }, + { + Name: "plugin list with matched values", + Input: []*Plugin{ + { + plugin: pid, + property: pr, + }, + }, + Expected: &PluginSystem{plugins: []*Plugin{ + NewPlugin(pid, pr), + }}, + }, + { + Name: "plugin list with duplicated values", + Input: []*Plugin{ + { + plugin: pid, + property: pr, + }, + { + plugin: pid, + property: pr, + }, + }, + Expected: &PluginSystem{plugins: []*Plugin{ + NewPlugin(pid, pr), + }}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := NewPluginSystem(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestPluginSystem_Property(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID().Ref() + testCases := []struct { + Name string + Input id.PluginID + PS *PluginSystem + Expected *id.PropertyID + }{ + { + Name: "property is found", + Input: pid, + PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + Expected: pr, + }, + { + Name: "property is not found", + Input: pid, + PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz#1.1.1"), pr)}), + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.PS.Property(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestPluginSystem_Plugin(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID().Ref() + testCases := []struct { + Name string + Input id.PluginID + PS *PluginSystem + Expected *Plugin + }{ + { + Name: "plugin is found", + Input: pid, + PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugin(pid, pr), + }, + { + Name: "plugin is not found", + Input: pid, + PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz#1.1.1"), pr)}), + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.PS.Plugin(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestPluginSystem_Properties(t *testing.T) { + pr := id.NewPropertyID().Ref() + pr2 := id.NewPropertyID().Ref() + testCases := []struct { + Name string + PS *PluginSystem + Expected []id.PropertyID + }{ + { + Name: "pluginSystem is nil", + PS: nil, + Expected: nil, + }, + { + Name: "get properties", + PS: NewPluginSystem([]*Plugin{ + NewPlugin(id.MustPluginID("zzz#1.1.1"), pr), + NewPlugin(id.MustPluginID("xxx#1.1.1"), pr2), + }), + Expected: []id.PropertyID{*pr, *pr2}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.PS.Properties() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestPluginSystem_Has(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID().Ref() + testCases := []struct { + Name string + Input id.PluginID + PS *PluginSystem + Expected bool + }{ + { + Name: "property is found", + Input: pid, + PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + Expected: true, + }, + { + Name: "property is not found", + Input: pid, + PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz#1.1.1"), pr)}), + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.PS.Has(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestPluginSystem_HasPlugin(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID().Ref() + testCases := []struct { + Name string + Input id.PluginID + PS *PluginSystem + Expected bool + }{ + { + Name: "property is found", + Input: pid, + PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + Expected: true, + }, + { + Name: "property is not found", + Input: pid, + PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz#1.1.1"), pr)}), + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.PS.HasPlugin(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestPluginSystem_Add(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID().Ref() + testCases := []struct { + Name string + Input *Plugin + PS, Expected *PluginSystem + }{ + { + Name: "add nil plugin", + Input: nil, + PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + }, + { + Name: "add existing plugin", + Input: NewPlugin(pid, pr), + PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + }, + { + Name: "add official plugin", + Input: NewPlugin(id.OfficialPluginID, pr), + PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + }, + { + Name: "add new plugin", + Input: NewPlugin(pid, pr), + PS: NewPluginSystem([]*Plugin{}), + Expected: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.PS.Add(tc.Input) + assert.Equal(tt, tc.Expected, tc.PS) + }) + } +} + +func TestPluginSystem_Remove(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID().Ref() + testCases := []struct { + Name string + Input id.PluginID + PS, Expected *PluginSystem + }{ + { + Name: "remove official plugin", + Input: id.OfficialPluginID, + PS: NewPluginSystem([]*Plugin{NewPlugin(id.OfficialPluginID, pr)}), + Expected: NewPluginSystem([]*Plugin{NewPlugin(id.OfficialPluginID, pr)}), + }, + { + Name: "remove a plugin", + Input: pid, + PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPluginSystem([]*Plugin{}), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.PS.Remove(tc.Input) + assert.Equal(tt, tc.Expected, tc.PS) + }) + } +} + +func TestPluginSystem_Upgrade(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + nid := id.MustPluginID("zzz#1.1.1") + pr := id.NewPropertyID().Ref() + testCases := []struct { + Name string + PID, NewID id.PluginID + PS, Expected *PluginSystem + }{ + { + Name: "upgrade official plugin", + PID: id.OfficialPluginID, + PS: NewPluginSystem([]*Plugin{NewPlugin(id.OfficialPluginID, pr)}), + Expected: NewPluginSystem([]*Plugin{NewPlugin(id.OfficialPluginID, pr)}), + }, + { + Name: "upgrade a plugin", + PID: pid, + NewID: nid, + PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPluginSystem([]*Plugin{NewPlugin(nid, pr)}), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.PS.Upgrade(tc.PID, tc.NewID) + assert.Equal(tt, tc.Expected, tc.PS) + }) + } +} diff --git a/pkg/scene/plugin_test.go b/pkg/scene/plugin_test.go new file mode 100644 index 000000000..93fbb58c3 --- /dev/null +++ b/pkg/scene/plugin_test.go @@ -0,0 +1,21 @@ +package scene + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestPlugin(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID().Ref() + res := NewPlugin(pid, pr) + p := Plugin{ + plugin: pid, + property: pr, + } + assert.Equal(t, &p, res) + assert.Equal(t, pid, p.Plugin()) + assert.Equal(t, pr, p.Property()) +} diff --git a/pkg/scene/scene.go b/pkg/scene/scene.go new file mode 100644 index 000000000..bad56a386 --- /dev/null +++ b/pkg/scene/scene.go @@ -0,0 +1,113 @@ +package scene + +import ( + "errors" + "time" + + "github.com/reearth/reearth-backend/pkg/id" +) + +var ErrSceneIsLocked error = errors.New("scene is locked") + +type Scene struct { + id id.SceneID + project id.ProjectID + team id.TeamID + rootLayer id.LayerID + widgetSystem *WidgetSystem + pluginSystem *PluginSystem + updatedAt time.Time + property id.PropertyID +} + +func (s *Scene) ID() id.SceneID { + if s == nil { + return id.SceneID{} + } + return s.id +} + +func (s *Scene) CreatedAt() time.Time { + if s == nil { + return time.Time{} + } + return id.ID(s.id).Timestamp() +} + +func (s *Scene) Project() id.ProjectID { + if s == nil { + return id.ProjectID{} + } + return s.project +} + +func (s *Scene) Team() id.TeamID { + if s == nil { + return id.TeamID{} + } + return s.team +} + +func (s *Scene) Property() id.PropertyID { + if s == nil { + return id.PropertyID{} + } + return s.property +} + +func (s *Scene) RootLayer() id.LayerID { + if s == nil { + return id.LayerID{} + } + return s.rootLayer +} + +func (s *Scene) WidgetSystem() *WidgetSystem { + if s == nil { + return nil + } + return s.widgetSystem +} + +func (s *Scene) PluginSystem() *PluginSystem { + if s == nil { + return nil + } + return s.pluginSystem +} + +func (s *Scene) UpdatedAt() time.Time { + if s == nil { + return time.Time{} + } + return s.updatedAt +} + +func (s *Scene) SetUpdatedAt(updatedAt time.Time) { + if s == nil { + return + } + s.updatedAt = updatedAt +} + +func (s *Scene) IsTeamIncluded(teams []id.TeamID) bool { + if s == nil || teams == nil { + return false + } + for _, t := range teams { + if t == s.team { + return true + } + } + return false +} + +func (s *Scene) Properties() []id.PropertyID { + if s == nil { + return nil + } + ids := []id.PropertyID{s.property} + ids = append(ids, s.pluginSystem.Properties()...) + ids = append(ids, s.widgetSystem.Properties()...) + return ids +} diff --git a/pkg/scene/scene_test.go b/pkg/scene/scene_test.go new file mode 100644 index 000000000..1dd71405a --- /dev/null +++ b/pkg/scene/scene_test.go @@ -0,0 +1,86 @@ +package scene + +import ( + "testing" + "time" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestScene_IsTeamIncluded(t *testing.T) { + tid := id.NewTeamID() + testCases := []struct { + Name string + Teams []id.TeamID + S *Scene + Expected bool + }{ + { + Name: "nil scene", + Teams: []id.TeamID{id.NewTeamID()}, + S: nil, + Expected: false, + }, + { + Name: "nil teams", + Teams: nil, + S: New().NewID().Team(id.NewTeamID()).RootLayer(id.NewLayerID()).MustBuild(), + Expected: false, + }, + { + Name: "teams exist", + Teams: []id.TeamID{tid}, + S: New().NewID().Team(tid).RootLayer(id.NewLayerID()).MustBuild(), + Expected: true, + }, + { + Name: "teams not exist", + Teams: []id.TeamID{tid}, + S: New().NewID().Team(id.NewTeamID()).RootLayer(id.NewLayerID()).MustBuild(), + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.S.IsTeamIncluded(tc.Teams) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestScene_SetUpdatedAt(t *testing.T) { + s := New().NewID().Team(id.NewTeamID()).RootLayer(id.NewLayerID()).UpdatedAt(time.Date(1999, 1, 1, 00, 00, 1, 1, time.UTC)).MustBuild() + s.SetUpdatedAt(time.Date(2021, 1, 1, 00, 00, 1, 1, time.UTC)) + assert.Equal(t, time.Date(2021, 1, 1, 00, 00, 1, 1, time.UTC), s.UpdatedAt()) + s = nil + s.SetUpdatedAt(time.Date(2020, 1, 1, 00, 00, 1, 1, time.UTC)) + assert.NotEqual(t, time.Date(2020, 1, 1, 00, 00, 1, 1, time.UTC), s.UpdatedAt()) +} + +func TestScene_Properties(t *testing.T) { + pid1 := id.NewPropertyID() + pid2 := id.NewPropertyID() + s := New().NewID().Team(id.NewTeamID()).RootLayer(id.NewLayerID()).Property(pid1).WidgetSystem( + NewWidgetSystem([]*Widget{ + MustNewWidget(id.NewWidgetID().Ref(), id.MustPluginID("xxx#1.1.1"), "eee", pid2, true), + })).MustBuild() + + assert.Equal(t, []id.PropertyID{pid1, pid2}, s.Properties()) + +} + +func TestSceneNil(t *testing.T) { + var s *Scene + assert.Nil(t, s.Properties()) + assert.True(t, s.ID().IsNil()) + assert.Nil(t, s.WidgetSystem()) + assert.True(t, s.Project().IsNil()) + assert.True(t, s.Team().IsNil()) + assert.True(t, s.RootLayer().IsNil()) + assert.True(t, s.CreatedAt().IsZero()) + assert.Nil(t, s.PluginSystem()) + assert.True(t, s.Property().IsNil()) +} diff --git a/pkg/scene/sceneops/dataset_migrator.go b/pkg/scene/sceneops/dataset_migrator.go new file mode 100644 index 000000000..08453c61a --- /dev/null +++ b/pkg/scene/sceneops/dataset_migrator.go @@ -0,0 +1,277 @@ +package sceneops + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/initializer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" +) + +// TODO: define new loader types and use them instead of repos +type DatasetMigrator struct { + PropertyRepo repo.Property + LayerRepo repo.Layer + DatasetSchemaRepo repo.DatasetSchema + DatasetRepo repo.Dataset + Plugin plugin.Loader +} + +type MigrateDatasetResult struct { + Layers layer.Map + Properties property.Map + RemovedLayers *id.LayerIDSet + RemovedDatasetSchemas []id.DatasetSchemaID + RemovedDatasets []id.DatasetID +} + +func (r MigrateDatasetResult) Merge(r2 MigrateDatasetResult) MigrateDatasetResult { + return MigrateDatasetResult{ + Layers: r.Layers.Merge(r2.Layers), + Properties: r.Properties.Merge(r2.Properties), + RemovedLayers: r.RemovedLayers.Merge(r2.RemovedLayers), + } +} + +// NOTE: DatasetSchemaใฎๅ‰Š้™คใซใฏๅฏพๅฟœใ—ใฆใ„ใชใ„๏ผˆ่‡ชๅ‹•็š„ใซๅ‰Š้™คใ•ใ‚Œใชใ„๏ผ‰ +func (srv DatasetMigrator) Migrate(ctx context.Context, sid id.SceneID, newdsl []*dataset.Schema, newdl dataset.List) (MigrateDatasetResult, error) { + scenes := []id.SceneID{sid} + result := MigrateDatasetResult{} + + // ๅ‰Š้™คๅฏพ่ฑก + noLogerUsedDS := []id.DatasetSchemaID{} + noLogerUsedD := []id.DatasetID{} + + // ๅคใ„DatasetSchema + oldDatasetSchemaMap := map[id.DatasetSchemaID]*dataset.Schema{} + // ๆ–ฐใ—ใ„DatasetSchema + newDatasetSchemaMap := map[id.DatasetSchemaID]*dataset.Schema{} + // ๆ–ฐใ—ใ„DatasetSchemaใ‹ใ‚‰ๅคใ„DatasetSchemaIDใธใฎๅฏพๅฟœ + datasetSchemaMapNewOld := map[id.DatasetSchemaID]id.DatasetSchemaID{} + // ๅคใ„DatasetSchemaใ‹ใ‚‰ๆ–ฐใ—ใ„DatasetSchemaIDใธใฎๅฏพๅฟœ + datasetSchemaMapOldNew := map[id.DatasetSchemaID]id.DatasetSchemaID{} + // ๅคใ„DatasetFieldIDใ‹ใ‚‰ๆ–ฐใ—ใ„DatasetSchemaFieldIDใธใฎๅฏพๅฟœ + datasetSchemaFieldIDMap := map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID{} + // ๅคใ„Datasetใ‹ใ‚‰ๆ–ฐใ—ใ„Datasetใธใฎๅฏพๅฟœ + newDatasetMap := map[id.DatasetID]*dataset.Dataset{} + datasetMapOldNew := map[id.DatasetID]*dataset.Dataset{} + datasetIDMapOldNew := map[id.DatasetID]id.DatasetID{} + // ๆ–ฐใ—ใ„DatasetSchemaใ‹ใ‚‰DatasetDiffใธใฎๅฏพๅฟœ + datasetDiffMap := map[id.DatasetSchemaID]dataset.Diff{} + + // ใƒžใƒƒใƒ—ใฎไฝœๆˆ + for _, newds := range newdsl { + newDatasetSchemaMap[newds.ID()] = newds + + // ใ‚ฝใƒผใ‚นๅ…ƒใŒๅŒใ˜ๅคใ„DSใ‚’ๅ–ๅพ— + olddsl, err := srv.DatasetSchemaRepo.FindBySceneAndSource(ctx, sid, newds.Source()) + if err != nil { + return MigrateDatasetResult{}, err + } + + // ๅคใ„ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ๆŽขใ™๏ผˆๆ–ฐใ—ใ่ฟฝๅŠ ใ•ใ‚ŒใŸใ‚‚ใฎใ‚‚ๅ…ฅใ‚Š่พผใ‚“ใงใ„ใ‚‹ใฎใง๏ผ‰ + var oldds *dataset.Schema + for _, o := range olddsl { + if o.ID() != newds.ID() { + oldds = o + } + } + if oldds == nil { + // ใชใ„ใชใ‚‰ใƒชใƒณใ‚ฏใ•ใ‚Œใฆใ„ใ‚‹ใƒฌใ‚คใƒคใƒผใ‚„ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใ‚‚ไฝœๆˆใ•ใ‚Œใฆใ„ใชใ„ใฏใšใชใฎใง็„ก่ฆ– + continue + } + + oldDatasetSchemaMap[oldds.ID()] = oldds + datasetSchemaMapNewOld[newds.ID()] = oldds.ID() + datasetSchemaMapOldNew[oldds.ID()] = newds.ID() + + // ใƒ•ใ‚ฃใƒผใƒซใƒ‰ใฎๅทฎๅˆ†ใ‚’ๅ–ใ‚‹ + fieldDiff := oldds.FieldDiffBySource(newds) + for of, f := range fieldDiff.Replaced { + datasetSchemaFieldIDMap[of] = f.ID() + } + + // ๅคใ„DSใฎDใ‚’ๆŽขใ—ๅ‡บใ™ + olddl, _, err := srv.DatasetRepo.FindBySchema(ctx, oldds.ID(), scenes, nil) + if err != nil { + return MigrateDatasetResult{}, err + } + + // ๅ‰Š้™คๅฏพ่ฑกใซ่ฟฝๅŠ  + noLogerUsedDS = append(noLogerUsedDS, oldds.ID()) + for _, oldd := range olddl { + noLogerUsedD = append(noLogerUsedD, oldd.ID()) + } + + // ๆ–ฐใ—ใ„DSใฎDใฎใฟๆŠฝๅ‡บ + currentNewdl := newdl.FilterByDatasetSchema(newds.ID()) + + // ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎๅทฎๅˆ†ใ‚’ใจใ‚‹ + diff := dataset.List(olddl).DiffBySource(currentNewdl) + datasetDiffMap[newds.ID()] = diff + for od, d := range diff.Others { + datasetMapOldNew[od] = d + datasetIDMapOldNew[od] = d.ID() + newDatasetMap[d.ID()] = d + } + } + + // ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใฎใƒžใ‚คใ‚ฐใƒฌใƒผใ‚ทใƒงใƒณ + propeties, err := srv.PropertyRepo.FindLinkedAll(ctx, sid) + if err != nil { + return MigrateDatasetResult{}, err + } + for _, p := range propeties { + p.MigrateDataset(property.DatasetMigrationParam{ + OldDatasetSchemaMap: datasetSchemaMapOldNew, + OldDatasetMap: datasetIDMapOldNew, + DatasetFieldIDMap: datasetSchemaFieldIDMap, + NewDatasetSchemaMap: newDatasetSchemaMap, + NewDatasetMap: newDatasetMap, + }) + } + result.Properties = propeties.Map() + + // ๆ–ฐใ—ใ„DSใงใƒซใƒผใƒ— + for _, newds := range newdsl { + oldds := oldDatasetSchemaMap[datasetSchemaMapNewOld[newds.ID()]] + if oldds == nil { + // ใƒชใƒณใ‚ฏใ•ใ‚Œใฆใ„ใ‚‹ใƒฌใ‚คใƒคใƒผใ‚„ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใ‚‚ไฝœๆˆใ•ใ‚Œใฆใ„ใชใ„ใฏใšใชใฎใง็„ก่ฆ– + continue + } + diff, ok := datasetDiffMap[newds.ID()] + if !ok { + continue + } + + // ใƒฌใ‚คใƒคใƒผใฎใƒžใ‚คใ‚ฐใƒฌใƒผใ‚ทใƒงใƒณ + result2, err := srv.migrateLayer(ctx, sid, oldds, newds, diff) + if err != nil { + return MigrateDatasetResult{}, err + } + + result = result.Merge(result2) + } + + result.RemovedDatasetSchemas = append(result.RemovedDatasetSchemas, noLogerUsedDS...) + result.RemovedDatasets = append(result.RemovedDatasets, noLogerUsedD...) + return result, nil +} + +func (srv DatasetMigrator) migrateLayer(ctx context.Context, sid id.SceneID, oldds *dataset.Schema, newds *dataset.Schema, diff dataset.Diff) (MigrateDatasetResult, error) { + scenes := []id.SceneID{sid} + + // ๅ‰ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚นใ‚ญใƒผใƒžใซ็ดใฅใ„ใŸใƒฌใ‚คใƒคใƒผใ‚ฐใƒซใƒผใƒ—ใ‚’ๅ–ๅพ— + layerGroups, err := srv.LayerRepo.FindGroupBySceneAndLinkedDatasetSchema(ctx, sid, oldds.ID()) + if err != nil { + return MigrateDatasetResult{}, err + } + + addedAndUpdatedLayers := layer.List{} + addedProperties := property.List{} + removedLayers := []id.LayerID{} + + for _, lg := range layerGroups { + layers, err := srv.LayerRepo.FindByIDs(ctx, lg.Layers().Layers(), scenes) + if err != nil { + return MigrateDatasetResult{}, err + } + + // ใ‚นใ‚ญใƒผใƒžใŒๆถˆๆป…ใ—ใŸๅ ดๅˆ + if newds == nil { + // ใƒฌใ‚คใƒคใƒผใ‚ฐใƒซใƒผใƒ—่‡ชไฝ“ใ‚’ใ‚ขใƒณใƒชใƒณใ‚ฏ + lg.Unlink() + // ๅญใƒฌใ‚คใƒคใƒผใ‚’ๅ…จใฆๅ‰Š้™ค + for _, l := range layers { + if l == nil { + continue + } + lid := (*l).ID() + removedLayers = append(removedLayers, lid) + } + lg.Layers().Empty() + continue + } + + // ใƒฌใ‚คใƒคใƒผใ‚ฐใƒซใƒผใƒ—ใฎใƒชใƒณใ‚ฏๅผตใ‚Šๆ›ฟใˆใจๅๅ‰ๅค‰ๆ›ด + lg.Link(newds.ID()) + if lg.Name() == oldds.Name() { + lg.Rename(newds.Name()) + } + + // ๆถˆใˆใŸใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆโ†’ใƒฌใ‚คใƒคใƒผใ‚’ๅ‰Š้™ค + for _, d := range diff.Removed { + if l := layers.FindByDataset(d.ID()); l != nil { + lg.Layers().RemoveLayer(l.ID()) + removedLayers = append(removedLayers, l.ID()) + } + } + + // ่ฟฝๅŠ ใ•ใ‚ŒใŸใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆโ†’ใƒฌใ‚คใƒคใƒผใ‚’ไฝœๆˆใ—ใฆ่ฟฝๅŠ  + if len(diff.Added) > 0 { + // ใƒ—ใƒฉใ‚ฐใ‚คใƒณใ‚’ๅ–ๅพ— + var plug *plugin.Plugin + if pid := lg.Plugin(); pid != nil { + plug2, err := srv.Plugin(ctx, *pid) + if err != nil || len(plug2) < 1 { + return MigrateDatasetResult{}, err + } + plug = plug2[0] + } + + representativeFieldID := newds.RepresentativeFieldID() + for _, added := range diff.Added { + did := added.ID() + + name := "" + if rf := added.FieldRef(representativeFieldID); rf != nil && rf.Type() == dataset.ValueTypeString { + name = rf.Value().Value().(string) + } + + layerItem, property, err := initializer.LayerItem{ + SceneID: sid, + ParentLayerID: lg.ID(), + LinkedDatasetID: &did, + Plugin: plug, + ExtensionID: lg.Extension(), + Name: name, + }.Initialize() + if err != nil { + return MigrateDatasetResult{}, err + } + + var l layer.Layer = layerItem + lg.Layers().AddLayer(layerItem.ID(), -1) + addedAndUpdatedLayers = append(addedAndUpdatedLayers, &l) + addedProperties = append(addedProperties, property) + } + } + + // ๆฎ‹ใ‚Šใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆโ†’ใƒฌใ‚คใƒคใƒผใฎใƒชใƒณใ‚ฏใ‚’ๅผตใ‚Šๆ›ฟใˆ + for olddsid, newds := range diff.Others { + if il := layers.FindByDataset(olddsid); il != nil { + var il2 layer.Layer = il + il.Link(newds.ID()) + addedAndUpdatedLayers = append(addedAndUpdatedLayers, &il2) + } + } + } + + layers := append( + addedAndUpdatedLayers, + layerGroups.ToLayerList()..., + ) + + set := id.NewLayerIDSet() + set.Add(removedLayers...) + + return MigrateDatasetResult{ + Layers: layers.Map(), + Properties: addedProperties.Map(), + RemovedLayers: set, + }, nil +} diff --git a/pkg/scene/sceneops/dataset_migrator_test.go b/pkg/scene/sceneops/dataset_migrator_test.go new file mode 100644 index 000000000..a601dd2c7 --- /dev/null +++ b/pkg/scene/sceneops/dataset_migrator_test.go @@ -0,0 +1,46 @@ +package sceneops + +//import ( +// "github.com/reearth/reearth-backend/pkg/dataset" +// "github.com/reearth/reearth-backend/pkg/id" +// "testing" +//) +// +//func TestDatasetMigrator_Migrate(t *testing.T) { +// sid := id.NewSceneID() +// dsid:=id.NewDatasetID() +// dssid:=id.NewDatasetSchemaID() +// dssfid:=id.NewDatasetSchemaFieldID() +// testCases := []struct { +// Name string +// SID id.SceneID +// NewDSL []*dataset.Schema +// NewDL dataset.List +// Expected MigrateDatasetResult +// Err error +// }{ +// { +// Name: "", +// SID: sid, +// NewDSL: []*dataset.Schema{ +// dataset.NewSchema(). +// ID(dssid). +// Fields([]*dataset.SchemaField{ +// dataset.NewSchemaField(). +// ID(dssfid).MustBuild(), +// }).Scene(sid).MustBuild()}, +// NewDL: dataset.List{ +// dataset.New().ID(dsid).MustBuild(), +// }, +// Expected: MigrateDatasetResult{}, +// Err: nil, +// }, +// } +// for _,tc:=range testCases{ +// tc:=tc +// t.Run(tc.Name, func(tt *testing.T) { +// tt.Parallel() +// res,err:=tc +// }) +// } +//} diff --git a/pkg/scene/sceneops/plugin_installer.go b/pkg/scene/sceneops/plugin_installer.go new file mode 100644 index 000000000..4678cf307 --- /dev/null +++ b/pkg/scene/sceneops/plugin_installer.go @@ -0,0 +1,122 @@ +package sceneops + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type PluginInstaller struct { + // PluginRepo repo.Plugin + // PluginRepositoryRepo gateway.PluginRepository + // PropertySchemaRepo repo.PropertySchema +} + +func (s PluginInstaller) InstallPluginFromRepository(pluginID id.PluginID) error { + return errors.New("not implemented") + + // manifest, err := s.PluginRepositoryRepo.Manifest(pluginID) + // if err != nil { + // return err + // } + + // // save + // if manifest.Schema != nil { + // err = s.PropertySchemaRepo.SaveAll(manifest.Schema) + // if err != nil { + // return err + // } + // } + + // for _, s := range manifest.ExtensionSchema { + // err = i.propertySchemaRepo.Save(&s) + // if err != nil { + // i.output.Upload(nil, err1.ErrInternalBy(err)) + // return + // } + // } + + // err = i.pluginRepo.Save(plugin) + // if err != nil { + // i.output.Upload(nil, err1.ErrInternalBy(err)) + // return + // } + + // // Download and extract plugin files to storage + // data, err := i.pluginRepositoryRepo.Data(inp.Name, inp.Version) + // if err != nil { + // i.output.Upload(nil, err1.ErrInternalBy(err)) + // return + // } + + // _, err = i.fileRepo.UploadAndExtractPluginFiles(data, plugin) + // if err != nil { + // i.output.Upload(nil, err1.ErrInternalBy(err)) + // return + // } + + // return nil + // } + + // // UploadPlugin _ + // func (s PluginInstaller) UploadPlugin(reader io.Reader) error { + // panic("not implemented") + + // manifest, err := s.PluginRepositoryRepo.Manifest(inp.Name, inp.Version) + // if err != nil { + // i.output.Upload(nil, err) + // return + // } + + // // build plugin + // plugin, err := plugin.New(). + // NewID(). + // FromManifest(manifest). + // Developer(operator.User). + // PluginSeries(pluginSeries.ID()). + // CreatedAt(time.Now()). + // Public(inp.Public). + // Build() + // if err != nil { + // i.output.Upload(nil, err1.ErrInternalBy(err)) + // return + // } + + // // save + // if manifest.Schema != nil { + // err = i.propertySchemaRepo.Save(manifest.Schema) + // if err != nil { + // i.output.Upload(nil, err1.ErrInternalBy(err)) + // return + // } + // } + + // for _, s := range manifest.ExtensionSchema { + // err = i.propertySchemaRepo.Save(&s) + // if err != nil { + // i.output.Upload(nil, err1.ErrInternalBy(err)) + // return + // } + // } + + // err = i.pluginRepo.Save(plugin) + // if err != nil { + // i.output.Upload(nil, err1.ErrInternalBy(err)) + // return + // } + + // // Download and extract plugin files to storage + // data, err := i.pluginRepositoryRepo.Data(inp.Name, inp.Version) + // if err != nil { + // i.output.Upload(nil, err1.ErrInternalBy(err)) + // return + // } + + // _, err = i.fileRepo.UploadAndExtractPluginFiles(data, plugin) + // if err != nil { + // i.output.Upload(nil, err1.ErrInternalBy(err)) + // return + // } + + // return nil +} diff --git a/pkg/scene/sceneops/plugin_migrator.go b/pkg/scene/sceneops/plugin_migrator.go new file mode 100644 index 000000000..04d174373 --- /dev/null +++ b/pkg/scene/sceneops/plugin_migrator.go @@ -0,0 +1,243 @@ +package sceneops + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/pkg/dataset" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type PluginMigrator struct { + Property property.Loader + PropertySchema property.SchemaLoader + Dataset dataset.Loader + Layer layer.LoaderByScene + Plugin plugin.Loader +} + +type MigratePluginsResult struct { + Scene *scene.Scene + Layers layer.List + Properties []*property.Property + RemovedLayers []id.LayerID + RemovedProperties []id.PropertyID +} + +var ( + ErrPluginNotInstalled error = errors.New("plugin not installed") + ErrInvalidPlugins error = errors.New("invalid plugins") +) + +func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, oldPluginID, newPluginID id.PluginID) (MigratePluginsResult, error) { + if s == nil { + return MigratePluginsResult{}, err1.ErrInternalBy(errors.New("scene is nil")) + } + + if oldPluginID.Equal(newPluginID) || oldPluginID.Name() != newPluginID.Name() { + return MigratePluginsResult{}, ErrInvalidPlugins + } + + plugins, err := s.Plugin(ctx, oldPluginID, newPluginID) + if err != nil || len(plugins) < 2 { + return MigratePluginsResult{}, ErrInvalidPlugins + } + + oldPlugin := plugins[0] + newPlugin := plugins[1] + + if !sc.PluginSystem().Has(oldPluginID) { + return MigratePluginsResult{}, ErrPluginNotInstalled + } + + // ๅ…จใƒฌใ‚คใƒคใƒผใฎๅ–ๅพ— + layers, err := s.Layer(ctx, sc.ID()) + if err != nil { + return MigratePluginsResult{}, err + } + + modifiedLayers := layer.List{} + removedLayers := []id.LayerID{} + propertyIDs := []id.PropertyID{} + removedPropertyIDs := []id.PropertyID{} + schemaMap := map[id.PropertySchemaID]*property.Schema{} + + // ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใ‚นใ‚ญใƒผใƒžใฎๅ–ๅพ—ใจใ€ๅคใ„ใ‚นใ‚ญใƒผใƒžใจๆ–ฐใ—ใ„ใ‚นใ‚ญใƒผใƒžใฎใƒžใƒƒใƒ—ไฝœๆˆ + schemaIDs := []id.PropertySchemaID{} + if oldPlugin.Schema() != nil { + if pps := newPlugin.Schema(); pps != nil { + schemaIDs = append(schemaIDs, *pps) + } + } + for _, e := range newPlugin.Extensions() { + schemaIDs = append(schemaIDs, e.Schema()) + } + schemas, err := s.PropertySchema(ctx, schemaIDs...) + if err != nil { + return MigratePluginsResult{}, err + } + if oops := oldPlugin.Schema(); oops != nil { + if pps := newPlugin.Schema(); pps != nil { + for _, s := range schemas { + if s.ID() == *pps { + schemaMap[*oops] = s + } + } + } + } + for _, e := range oldPlugin.Extensions() { + if ne := newPlugin.Extension(e.ID()); ne != nil { + for _, s := range schemas { + if s.ID() == ne.Schema() { + schemaMap[e.Schema()] = s + } + } + } + } + + // ใ‚ทใƒผใƒณใฎใƒ—ใƒฉใ‚ฐใ‚คใƒณ + sc.PluginSystem().Upgrade(oldPluginID, newPluginID) + for _, sp := range sc.PluginSystem().Plugins() { + if sp.Plugin().Equal(newPluginID) && sp.Property() != nil { + propertyIDs = append(propertyIDs, *sp.Property()) + } + } + + // ใ‚ทใƒผใƒณใฎใ‚ฆใ‚ฃใ‚ธใ‚งใƒƒใƒˆ + sc.WidgetSystem().Replace(oldPluginID, newPluginID) + for _, w := range sc.WidgetSystem().Widgets() { + if w.Plugin().Equal(newPluginID) { + if newPlugin.Extension(w.Extension()) == nil { + sc.WidgetSystem().Remove(oldPluginID, w.Extension()) + } else { + propertyIDs = append(propertyIDs, w.Property()) + } + } + } + + // ใƒฌใ‚คใƒคใƒผ + for _, l := range layers { + if l == nil { + continue + } + ll := *l + llp := ll.Plugin() + lle := ll.Extension() + + // ไธๆญฃใชใƒฌใ‚คใƒคใƒผใฎๆคœๅ‡บ + if llp != nil && lle != nil && (*llp).Equal(oldPluginID) { + if newPlugin.Extension(*lle) == nil { + // ๅ‰Š้™ค + removedLayers = append(removedLayers, ll.ID()) + if p := ll.Property(); p != nil { + removedPropertyIDs = append(removedPropertyIDs, *p) + } + if ib := ll.Infobox(); ib != nil { + removedPropertyIDs = append(removedPropertyIDs, ib.Property()) + for _, f := range ib.Fields() { + removedPropertyIDs = append(removedPropertyIDs, f.Property()) + } + } + continue + } + } + + if p := ll.Property(); p != nil { + propertyIDs = append(propertyIDs, *p) + } + + // ไธๆญฃใชInfoboxFieldใฎๅ‰Š้™ค + if ib := ll.Infobox(); ib != nil { + removeFields := []id.InfoboxFieldID{} + for _, f := range ib.Fields() { + if newPlugin.Extension(f.Extension()) == nil { + removeFields = append(removeFields, f.ID()) + removedPropertyIDs = append(removedPropertyIDs, f.Property()) + } else { + propertyIDs = append(propertyIDs, f.Property()) + } + } + for _, f := range removeFields { + ib.Remove(f) + } + } + + ll.SetPlugin(&newPluginID) + modifiedLayers = append(modifiedLayers, l) + } + + // ไธๆญฃใชใƒฌใ‚คใƒคใƒผใฎใ‚ฐใƒซใƒผใƒ—ใ‹ใ‚‰ใฎๅ‰Š้™ค + for _, lg := range layers.ToLayerGroupList() { + modified := false + canceled := false + for _, l := range removedLayers { + if l == lg.ID() { + canceled = true + break + } + if lg.Layers().HasLayer(l) { + lg.Layers().RemoveLayer(l) + modified = true + } + } + if canceled { + continue + } + if modified { + already := false + for _, l := range modifiedLayers { + if l != nil && (*l).ID() == lg.ID() { + already = true + break + } + } + if already { + continue + } + var lg2 layer.Layer = lg + modifiedLayers = append(modifiedLayers, &lg2) + } + } + + // ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใฎๅ–ๅพ— + properties, err := s.Property(ctx, propertyIDs...) + if err != nil { + return MigratePluginsResult{}, err + } + + // ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใฎๅ–ๅพ— + datasetIDs := collectDatasetIDs(properties) + datasets, err := s.Dataset(ctx, datasetIDs...) + if err != nil { + return MigratePluginsResult{}, err + } + datasetLoader := datasets.Map().Loader() + + // ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใฎ็งป่กŒไฝœๆฅญ + for _, p := range properties { + if schema := schemaMap[p.Schema()]; schema != nil { + p.MigrateSchema(ctx, schema, datasetLoader) + } + } + + return MigratePluginsResult{ + Scene: sc, + Layers: modifiedLayers, + Properties: properties, + RemovedLayers: removedLayers, + RemovedProperties: removedPropertyIDs, + }, nil +} + +func collectDatasetIDs(properties []*property.Property) []id.DatasetID { + res := []id.DatasetID{} + for _, p := range properties { + res = append(res, p.CollectDatasets()...) + } + return res +} diff --git a/pkg/scene/widget.go b/pkg/scene/widget.go new file mode 100644 index 000000000..8d3f3902c --- /dev/null +++ b/pkg/scene/widget.go @@ -0,0 +1,63 @@ +package scene + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +type Widget struct { + id id.WidgetID + plugin id.PluginID + extension id.PluginExtensionID + property id.PropertyID + enabled bool +} + +func NewWidget(wid *id.WidgetID, plugin id.PluginID, extension id.PluginExtensionID, property id.PropertyID, enabled bool) (*Widget, error) { + if !plugin.Validate() || string(extension) == "" || id.ID(property).IsNil() { + return nil, id.ErrInvalidID + } + + if wid == nil || (*wid).IsNil() { + wid = id.NewWidgetID().Ref() + } + + return &Widget{ + id: *wid, + plugin: plugin, + extension: extension, + property: property, + enabled: enabled, + }, nil +} + +func MustNewWidget(wid *id.WidgetID, plugin id.PluginID, extension id.PluginExtensionID, property id.PropertyID, enabled bool) *Widget { + w, err := NewWidget(wid, plugin, extension, property, enabled) + if err != nil { + panic(err) + } + return w +} + +func (w *Widget) ID() id.WidgetID { + return w.id +} + +func (w *Widget) Plugin() id.PluginID { + return w.plugin +} + +func (w *Widget) Extension() id.PluginExtensionID { + return w.extension +} + +func (w *Widget) Property() id.PropertyID { + return w.property +} + +func (w *Widget) Enabled() bool { + return w.enabled +} + +func (w *Widget) SetEnabled(enabled bool) { + w.enabled = enabled +} diff --git a/pkg/scene/widget_system.go b/pkg/scene/widget_system.go new file mode 100644 index 000000000..00418ae03 --- /dev/null +++ b/pkg/scene/widget_system.go @@ -0,0 +1,112 @@ +package scene + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + ErrDuplicatedWidgetInstance = errors.New("duplicated widget instance") +) + +type WidgetSystem struct { + widgets []*Widget +} + +func NewWidgetSystem(w []*Widget) *WidgetSystem { + if w == nil { + return &WidgetSystem{widgets: []*Widget{}} + } + w2 := make([]*Widget, 0, len(w)) + for _, w1 := range w { + if w1 == nil { + continue + } + duplicated := false + for _, w3 := range w2 { + if w1.ID() == w3.ID() { + duplicated = true + break + } + } + if !duplicated { + w3 := *w1 + w2 = append(w2, &w3) + } + } + return &WidgetSystem{widgets: w2} +} + +func (w *WidgetSystem) Widgets() []*Widget { + if w == nil { + return nil + } + return append([]*Widget{}, w.widgets...) +} + +func (w *WidgetSystem) Widget(p id.PluginID, e id.PluginExtensionID) *Widget { + if w == nil { + return nil + } + for _, ww := range w.widgets { + if ww.plugin.Equal(p) && ww.extension == e { + return ww + } + } + return nil +} + +func (w *WidgetSystem) Has(p id.PluginID, e id.PluginExtensionID) bool { + if w == nil { + return false + } + for _, w2 := range w.widgets { + if w2.plugin.Equal(p) && w2.extension == e { + return true + } + } + return false +} + +func (w *WidgetSystem) Add(sw *Widget) { + if w == nil || sw == nil || w.Has(sw.plugin, sw.extension) { + return + } + sw2 := *sw + w.widgets = append(w.widgets, &sw2) +} + +func (w *WidgetSystem) Remove(p id.PluginID, e id.PluginExtensionID) { + if w == nil { + return + } + for i, ww := range w.widgets { + if ww.plugin.Equal(p) && ww.extension == e { + w.widgets = append(w.widgets[:i], w.widgets[i+1:]...) + return + } + } +} + +func (w *WidgetSystem) Replace(oldp, newp id.PluginID) { + if w == nil || w.widgets == nil { + return + } + for _, ww := range w.widgets { + if ww.plugin.Equal(oldp) { + ww.plugin = newp + } + } +} + +func (w *WidgetSystem) Properties() []id.PropertyID { + if w == nil { + return nil + } + res := make([]id.PropertyID, 0, len(w.widgets)) + for _, ww := range w.widgets { + res = append(res, ww.property) + } + return res +} diff --git a/pkg/scene/widget_system_test.go b/pkg/scene/widget_system_test.go new file mode 100644 index 000000000..3ca3659a9 --- /dev/null +++ b/pkg/scene/widget_system_test.go @@ -0,0 +1,364 @@ +package scene + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestNewWidgetSystem(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID() + wid := id.NewWidgetID().Ref() + testCases := []struct { + Name string + Input []*Widget + Expected *WidgetSystem + }{ + { + Name: "nil widget list", + Input: nil, + Expected: &WidgetSystem{widgets: []*Widget{}}, + }, + { + Name: "widget list with nil", + Input: []*Widget{nil}, + Expected: &WidgetSystem{widgets: []*Widget{}}, + }, + { + Name: "widget list with matched values", + Input: []*Widget{ + { + id: *wid, + plugin: pid, + extension: "eee", + property: pr, + enabled: true, + }, + }, + Expected: &WidgetSystem{widgets: []*Widget{ + MustNewWidget(wid, pid, "eee", pr, true), + }}, + }, + { + Name: "widget list with matched values", + Input: []*Widget{ + { + id: *wid, + plugin: pid, + extension: "eee", + property: pr, + enabled: true, + }, + { + id: *wid, + plugin: pid, + extension: "eee", + property: pr, + enabled: true, + }, + }, + Expected: &WidgetSystem{widgets: []*Widget{ + MustNewWidget(wid, pid, "eee", pr, true), + }}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := NewWidgetSystem(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestWidgetSystem_Add(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID() + wid := id.NewWidgetID().Ref() + testCases := []struct { + Name string + Input *Widget + WS, Expected *WidgetSystem + }{ + { + Name: "add new widget", + Input: &Widget{ + id: *wid, + plugin: pid, + extension: "eee", + property: pr, + enabled: true, + }, + WS: NewWidgetSystem([]*Widget{}), + Expected: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), + }, + { + Name: "add nil widget", + Input: nil, + WS: NewWidgetSystem([]*Widget{}), + Expected: NewWidgetSystem([]*Widget{}), + }, + { + Name: "add to nil widgetSystem", + Input: &Widget{ + id: *wid, + plugin: pid, + extension: "eee", + property: pr, + enabled: true, + }, + WS: nil, + Expected: nil, + }, + { + Name: "add existing widget", + Input: &Widget{ + id: *wid, + plugin: pid, + extension: "eee", + property: pr, + enabled: true, + }, + WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), + Expected: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.WS.Add(tc.Input) + assert.Equal(tt, tc.Expected, tc.WS) + }) + } +} + +func TestWidgetSystem_Remove(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID() + wid := id.NewWidgetID().Ref() + testCases := []struct { + Name string + PID id.PluginID + EID id.PluginExtensionID + WS, Expected *WidgetSystem + }{ + { + Name: "remove a widget", + PID: pid, + EID: "eee", + WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), + Expected: NewWidgetSystem([]*Widget{}), + }, + { + Name: "remove from nil widgetSystem", + WS: nil, + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.WS.Remove(tc.PID, tc.EID) + assert.Equal(tt, tc.Expected, tc.WS) + }) + } +} + +func TestWidgetSystem_Replace(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pid2 := id.MustPluginID("zzz#1.1.1") + pr := id.NewPropertyID() + wid := id.NewWidgetID().Ref() + testCases := []struct { + Name string + PID, NewID id.PluginID + WS, Expected *WidgetSystem + }{ + { + Name: "replace a widget", + PID: pid, + NewID: pid2, + WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), + Expected: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid2, "eee", pr, true)}), + }, + { + Name: "replace with nil widget", + PID: pid, + WS: NewWidgetSystem(nil), + Expected: NewWidgetSystem(nil), + }, + { + Name: "replace from nil widgetSystem", + WS: nil, + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + tc.WS.Replace(tc.PID, tc.NewID) + assert.Equal(tt, tc.Expected, tc.WS) + }) + } +} + +func TestWidgetSystem_Properties(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID() + pr2 := id.NewPropertyID() + wid := id.NewWidgetID().Ref() + wid2 := id.NewWidgetID().Ref() + testCases := []struct { + Name string + WS *WidgetSystem + Expected []id.PropertyID + }{ + { + Name: "get properties", + WS: NewWidgetSystem([]*Widget{ + MustNewWidget(wid, pid, "eee", pr, true), + MustNewWidget(wid2, pid, "eee", pr2, true), + }), + Expected: []id.PropertyID{pr, pr2}, + }, + { + Name: "get properties from nil widgetSystem", + WS: nil, + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.WS.Properties() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestWidgetSystem_Widgets(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID() + pr2 := id.NewPropertyID() + wid := id.NewWidgetID().Ref() + wid2 := id.NewWidgetID().Ref() + testCases := []struct { + Name string + WS *WidgetSystem + Expected []*Widget + }{ + { + Name: "get widgets", + WS: NewWidgetSystem([]*Widget{ + MustNewWidget(wid, pid, "eee", pr, true), + MustNewWidget(wid2, pid, "eee", pr2, true), + }), + Expected: []*Widget{ + MustNewWidget(wid, pid, "eee", pr, true), + MustNewWidget(wid2, pid, "eee", pr2, true), + }, + }, + { + Name: "get widgets from nil widgetSystem", + WS: nil, + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.WS.Widgets() + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestWidgetSystem_Widget(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID() + wid := id.NewWidgetID().Ref() + testCases := []struct { + Name string + PID id.PluginID + EID id.PluginExtensionID + WS *WidgetSystem + Expected *Widget + }{ + { + Name: "get a widget", + PID: pid, + EID: "eee", + WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), + Expected: MustNewWidget(wid, pid, "eee", pr, true), + }, + { + Name: "dont has the widget", + PID: pid, + EID: "eee", + WS: NewWidgetSystem([]*Widget{}), + Expected: nil, + }, + { + Name: "get widget from nil widgetSystem", + WS: nil, + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.WS.Widget(tc.PID, tc.EID) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestWidgetSystem_Has(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID() + wid := id.NewWidgetID().Ref() + testCases := []struct { + Name string + PID id.PluginID + EID id.PluginExtensionID + WS *WidgetSystem + Expected bool + }{ + { + Name: "has a widget", + PID: pid, + EID: "eee", + WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), + Expected: true, + }, + { + Name: "dont has a widget", + PID: pid, + EID: "eee", + WS: NewWidgetSystem([]*Widget{}), + Expected: false, + }, + { + Name: "has from nil widgetSystem", + WS: nil, + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.WS.Has(tc.PID, tc.EID) + assert.Equal(tt, tc.Expected, res) + }) + } +} diff --git a/pkg/scene/widget_test.go b/pkg/scene/widget_test.go new file mode 100644 index 000000000..f0719836f --- /dev/null +++ b/pkg/scene/widget_test.go @@ -0,0 +1,210 @@ +package scene + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestNewWidget(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID() + wid := id.NewWidgetID() + testCases := []struct { + Name string + Id *id.WidgetID + Plugin id.PluginID + Extension id.PluginExtensionID + Property id.PropertyID + Enabled bool + Expected struct { + Id id.WidgetID + Plugin id.PluginID + Extension id.PluginExtensionID + Property id.PropertyID + Enabled bool + } + err error + }{ + { + Name: "success new widget", + Id: wid.Ref(), + Plugin: pid, + Extension: "eee", + Property: pr, + Enabled: true, + Expected: struct { + Id id.WidgetID + Plugin id.PluginID + Extension id.PluginExtensionID + Property id.PropertyID + Enabled bool + }{ + Id: wid, + Plugin: pid, + Extension: "eee", + Property: pr, + Enabled: true, + }, + err: nil, + }, + { + Name: "success nil id", + Id: nil, + Plugin: pid, + Extension: "eee", + Property: pr, + Enabled: true, + Expected: struct { + Id id.WidgetID + Plugin id.PluginID + Extension id.PluginExtensionID + Property id.PropertyID + Enabled bool + }{ + Id: wid, + Plugin: pid, + Extension: "eee", + Property: pr, + Enabled: true, + }, + err: nil, + }, + { + Name: "fail empty extension", + Id: wid.Ref(), + Plugin: pid, + Extension: "", + Property: pr, + Enabled: true, + err: id.ErrInvalidID, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := NewWidget(tc.Id, tc.Plugin, tc.Extension, tc.Property, tc.Enabled) + if err == nil { + if tc.Id == nil { + assert.NotNil(tt, res.ID()) + } else { + assert.Equal(tt, tc.Expected.Id, res.ID()) + } + assert.Equal(tt, tc.Expected.Property, res.Property()) + assert.Equal(tt, tc.Expected.Extension, res.Extension()) + assert.Equal(tt, tc.Expected.Enabled, res.Enabled()) + assert.Equal(tt, tc.Expected.Plugin, res.Plugin()) + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} +func TestMustNewWidget(t *testing.T) { + pid := id.MustPluginID("xxx#1.1.1") + pr := id.NewPropertyID() + wid := id.NewWidgetID() + testCases := []struct { + Name string + Id *id.WidgetID + Plugin id.PluginID + Extension id.PluginExtensionID + Property id.PropertyID + Enabled bool + Expected struct { + Id id.WidgetID + Plugin id.PluginID + Extension id.PluginExtensionID + Property id.PropertyID + Enabled bool + } + err error + }{ + { + Name: "success new widget", + Id: wid.Ref(), + Plugin: pid, + Extension: "eee", + Property: pr, + Enabled: true, + Expected: struct { + Id id.WidgetID + Plugin id.PluginID + Extension id.PluginExtensionID + Property id.PropertyID + Enabled bool + }{ + Id: wid, + Plugin: pid, + Extension: "eee", + Property: pr, + Enabled: true, + }, + err: nil, + }, + { + Name: "success nil id", + Id: nil, + Plugin: pid, + Extension: "eee", + Property: pr, + Enabled: true, + Expected: struct { + Id id.WidgetID + Plugin id.PluginID + Extension id.PluginExtensionID + Property id.PropertyID + Enabled bool + }{ + Id: wid, + Plugin: pid, + Extension: "eee", + Property: pr, + Enabled: true, + }, + err: nil, + }, + { + Name: "fail empty extension", + Id: wid.Ref(), + Plugin: pid, + Extension: "", + Property: pr, + Enabled: true, + err: id.ErrInvalidID, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + var res *Widget + defer func() { + if r := recover(); r == nil { + if tc.Id == nil { + assert.NotNil(tt, res.ID()) + } else { + assert.Equal(tt, tc.Expected.Id, res.ID()) + } + assert.Equal(tt, tc.Expected.Property, res.Property()) + assert.Equal(tt, tc.Expected.Extension, res.Extension()) + assert.Equal(tt, tc.Expected.Enabled, res.Enabled()) + assert.Equal(tt, tc.Expected.Plugin, res.Plugin()) + } + }() + res = MustNewWidget(tc.Id, tc.Plugin, tc.Extension, tc.Property, tc.Enabled) + + }) + } +} + +func TestWidget_SetEnabled(t *testing.T) { + res := MustNewWidget(id.NewWidgetID().Ref(), id.MustPluginID("xxx#1.1.1"), "eee", id.NewPropertyID(), false) + res.SetEnabled(true) + assert.True(t, res.Enabled()) +} diff --git a/pkg/shp/errreader.go b/pkg/shp/errreader.go new file mode 100644 index 000000000..4f2d63c3a --- /dev/null +++ b/pkg/shp/errreader.go @@ -0,0 +1,27 @@ +package shp + +import ( + "fmt" + "io" +) + +// errReader is a helper to perform multiple successive read from another reader +// and do the error checking only once afterwards. It will not perform any new +// reads in case there was an error encountered earlier. +type errReader struct { + io.Reader + e error + n int64 +} + +func (er *errReader) Read(p []byte) (n int, err error) { + if er.e != nil { + return 0, fmt.Errorf("unable to read after previous error: %v", er.e) + } + n, err = er.Reader.Read(p) + if n < len(p) && err != nil { + er.e = err + } + er.n += int64(n) + return n, er.e +} diff --git a/pkg/shp/reader.go b/pkg/shp/reader.go new file mode 100644 index 000000000..e391625ce --- /dev/null +++ b/pkg/shp/reader.go @@ -0,0 +1,287 @@ +package shp + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + "math" + "os" + "strings" +) + +// Reader provides a interface for reading Shapefiles. Calls +// to the Next method will iterate through the objects in the +// Shapefile. After a call to Next the object will be available +// through the Shape method. +type Reader struct { + GeometryType ShapeType + bbox Box + err error + + shp io.ReadSeeker + shape Shape + num int32 + filename string + filelength int64 + + dbf io.ReadSeeker + dbfFields []Field + dbfNumRecords int32 + dbfHeaderLength int16 + dbfRecordLength int16 +} + +// ReadFrom read from io.Reader +func ReadFrom(r io.Reader) (*Reader, error) { + buf := new(bytes.Buffer) + _, err := buf.ReadFrom(r) + if err != nil { + return nil, err + } + reader := bytes.NewReader(buf.Bytes()) + sr := &Reader{shp: reader} + return sr, sr.readHeaders() +} + +// BBox returns the bounding box of the shapefile. +func (r *Reader) BBox() Box { + return r.bbox +} + +// Read and parse headers in the Shapefile. This will +// fill out GeometryType, filelength and bbox. +func (r *Reader) readHeaders() error { + er := &errReader{Reader: r.shp} + // don't trust the the filelength in the header + r.filelength, _ = r.shp.Seek(0, io.SeekEnd) + + var filelength int32 + _, err := r.shp.Seek(24, 0) + if err != nil { + return err + } + // file length + err = binary.Read(er, binary.BigEndian, &filelength) + if err != nil { + return err + } + _, err = r.shp.Seek(32, 0) + if err != nil { + return err + } + err = binary.Read(er, binary.LittleEndian, &r.GeometryType) + if err != nil { + return err + } + r.bbox.MinX = readFloat64(er) + r.bbox.MinY = readFloat64(er) + r.bbox.MaxX = readFloat64(er) + r.bbox.MaxY = readFloat64(er) + _, err = r.shp.Seek(100, 0) + if err != nil { + return err + } + return er.e +} + +func readFloat64(r io.Reader) float64 { + var bits uint64 + _ = binary.Read(r, binary.LittleEndian, &bits) + return math.Float64frombits(bits) +} + +// Close closes the Shapefile. + +// Shape returns the most recent feature that was read by +// a call to Next. It returns two values, the int is the +// object index starting from zero in the shapefile which +// can be used as row in ReadAttribute, and the Shape is the object. +func (r *Reader) Shape() (int, Shape) { + return int(r.num) - 1, r.shape +} + +// Attribute returns value of the n-th attribute of the most recent feature +// that was read by a call to Next. +func (r *Reader) Attribute(n int) string { + return r.ReadAttribute(int(r.num)-1, n) +} + +// newShape creates a new shape with a given type. +func newShape(shapetype ShapeType) (Shape, error) { + switch shapetype { + case NULL: + return new(Null), nil + case POINT: + return new(Point), nil + case POLYLINE: + return new(PolyLine), nil + case POLYGON: + return new(Polygon), nil + case MULTIPOINT: + return new(MultiPoint), nil + case POINTZ: + return new(PointZ), nil + case POLYLINEZ: + return new(PolyLineZ), nil + case POLYGONZ: + return new(PolygonZ), nil + case MULTIPOINTZ: + return new(MultiPointZ), nil + case POINTM: + return new(PointM), nil + case POLYLINEM: + return new(PolyLineM), nil + case POLYGONM: + return new(PolygonM), nil + case MULTIPOINTM: + return new(MultiPointM), nil + case MULTIPATCH: + return new(MultiPatch), nil + default: + return nil, fmt.Errorf("Unsupported shape type: %v", shapetype) + } +} + +// Next reads in the next Shape in the Shapefile, which +// will then be available through the Shape method. It +// returns false when the reader has reached the end of the +// file or encounters an error. +func (r *Reader) Next() bool { + cur, _ := r.shp.Seek(0, io.SeekCurrent) + if cur >= r.filelength { + return false + } + + var size int32 + var shapetype ShapeType + er := &errReader{Reader: r.shp} + err1 := binary.Read(er, binary.BigEndian, &r.num) + if err1 != nil { + r.err = err1 + } + err1 = binary.Read(er, binary.BigEndian, &size) + if err1 != nil { + r.err = err1 + } + err1 = binary.Read(er, binary.LittleEndian, &shapetype) + if err1 != nil { + r.err = err1 + } + if er.e != nil { + if er.e != io.EOF { + r.err = fmt.Errorf("error when reading metadata of next shape: %v", er.e) + } else { + r.err = io.EOF + } + return false + } + + var err error + r.shape, err = newShape(shapetype) + if err != nil { + r.err = fmt.Errorf("error decoding shape type: %v", err) + return false + } + err = r.shape.read(er) + if err != nil { + r.err = fmt.Errorf("error while reading next shape: %v", err) + return false + } + if er.e != nil { + r.err = fmt.Errorf("error while reading next shape: %v", er.e) + return false + } + + // move to next object + _, err = r.shp.Seek(int64(size)*2+cur+8, 0) + if err != nil { + r.err = fmt.Errorf("error while seeking: %v", err) + return false + } + return true +} + +// Opens DBF file using r.filename + "dbf". This method +// will parse the header and fill out all dbf* values int +// the f object. +func (r *Reader) openDbf() (err error) { + if r.dbf != nil { + return + } + + r.dbf, err = os.Open(r.filename + ".dbf") + if err != nil { + return + } + + // read header + _, err = r.dbf.Seek(4, io.SeekStart) + if err != nil { + return err + } + err = binary.Read(r.dbf, binary.LittleEndian, &r.dbfNumRecords) + if err != nil { + return err + } + err = binary.Read(r.dbf, binary.LittleEndian, &r.dbfHeaderLength) + if err != nil { + return err + } + err = binary.Read(r.dbf, binary.LittleEndian, &r.dbfRecordLength) + if err != nil { + return err + } + _, err = r.dbf.Seek(20, io.SeekCurrent) // skip padding + if err != nil { + return err + } + numFields := int(math.Floor(float64(r.dbfHeaderLength-33) / 32.0)) + r.dbfFields = make([]Field, numFields) + err = binary.Read(r.dbf, binary.LittleEndian, &r.dbfFields) + if err != nil { + return err + } + return +} + +// Fields returns a slice of Fields that are present in the +// DBF table. +func (r *Reader) Fields() []Field { + err := r.openDbf() // make sure we have dbf file to read from + if err != nil { + return nil + } + return r.dbfFields +} + +// Err returns the last non-EOF error encountered. +func (r *Reader) Err() error { + if r.err == io.EOF { + return nil + } + return r.err +} + +// ReadAttribute returns the attribute value at row for field in +// the DBF table as a string. Both values starts at 0. +func (r *Reader) ReadAttribute(row int, field int) string { + err := r.openDbf() // make sure we have a dbf file to read from + if err != nil { + return "" + } + seekTo := 1 + int64(r.dbfHeaderLength) + (int64(row) * int64(r.dbfRecordLength)) + for n := 0; n < field; n++ { + seekTo += int64(r.dbfFields[n].Size) + } + _, err = r.dbf.Seek(seekTo, io.SeekStart) + if err != nil { + return "" + } + buf := make([]byte, r.dbfFields[field].Size) + _, err = r.dbf.Read(buf) + if err != nil { + return "" + } + return strings.Trim(string(buf[:]), " ") +} diff --git a/pkg/shp/sequentialreader.go b/pkg/shp/sequentialreader.go new file mode 100644 index 000000000..7f3b22104 --- /dev/null +++ b/pkg/shp/sequentialreader.go @@ -0,0 +1,294 @@ +package shp + +import ( + "encoding/binary" + "fmt" + "io" + "math" + "strings" +) + +// SequentialReader is the interface that allows reading shapes and attributes one after another. It also embeds io.Closer. +type SequentialReader interface { + // Close() frees the resources allocated by the SequentialReader. + io.Closer + + // Next() tries to advance the reading by one shape and one attribute row + // and returns true if the read operation could be performed without any + // error. + Next() bool + + // Shape returns the index and the last read shape. If the SequentialReader + // encountered any errors, nil is returned for the Shape. + Shape() (int, Shape) + + // Attribute returns the value of the n-th attribute in the current row. If + // the SequentialReader encountered any errors, the empty string is + // returned. + Attribute(n int) string + + // Fields returns the fields of the database. If the SequentialReader + // encountered any errors, nil is returned. + Fields() []Field + + // Err returns the last non-EOF error encountered. + Err() error +} + +// Attributes returns all attributes of the shape that sr was last advanced to. +func Attributes(sr SequentialReader) []string { + if sr.Err() != nil { + return nil + } + s := make([]string, len(sr.Fields())) + for i := range s { + s[i] = sr.Attribute(i) + } + return s +} + +// AttributeCount returns the number of fields of the database. +func AttributeCount(sr SequentialReader) int { + return len(sr.Fields()) +} + +// seqReader implements SequentialReader based on external io.ReadCloser +// instances +type seqReader struct { + shp, dbf io.ReadCloser + err error + + geometryType ShapeType + bbox Box + + shape Shape + num int32 + filelength int64 + + dbfFields []Field + dbfNumRecords int32 + dbfHeaderLength int16 + dbfRecordLength int16 + dbfRow []byte +} + +// Read and parse headers in the Shapefile. This will fill out GeometryType, +// filelength and bbox. +func (sr *seqReader) readHeaders() { + // contrary to Reader.readHeaders we cannot seek with the ReadCloser, so we + // need to trust the filelength in the header + + er := &errReader{Reader: sr.shp} + // shp headers + _, err := io.CopyN(io.Discard, er, 24) + if err != nil { + sr.err = fmt.Errorf("error when copy : %v", err) + return + } + var l int32 + err = binary.Read(er, binary.BigEndian, &l) + if err != nil { + sr.err = fmt.Errorf("error when reading : %v", err) + return + } + sr.filelength = int64(l) * 2 + _, err = io.CopyN(io.Discard, er, 4) + if err != nil { + sr.err = fmt.Errorf("error when copy : %v", err) + return + } + err = binary.Read(er, binary.LittleEndian, &sr.geometryType) + if err != nil { + sr.err = fmt.Errorf("error when reading : %v", err) + return + } + sr.bbox.MinX = readFloat64(er) + sr.bbox.MinY = readFloat64(er) + sr.bbox.MaxX = readFloat64(er) + sr.bbox.MaxY = readFloat64(er) + _, err = io.CopyN(io.Discard, er, 32) // skip four float64: Zmin, Zmax, Mmin, Max + if err != nil { + sr.err = fmt.Errorf("error when reading SHP header: %v", err) + return + } + if er.e != nil { + sr.err = fmt.Errorf("error when reading SHP header: %v", er.e) + return + } + + // dbf header + er = &errReader{Reader: sr.dbf} + if sr.dbf == nil { + return + } + _, err = io.CopyN(io.Discard, er, 4) + if err != nil { + sr.err = err + return + } + err = binary.Read(er, binary.LittleEndian, &sr.dbfNumRecords) + if err != nil { + sr.err = err + return + } + err = binary.Read(er, binary.LittleEndian, &sr.dbfHeaderLength) + if err != nil { + sr.err = err + return + } + err = binary.Read(er, binary.LittleEndian, &sr.dbfRecordLength) + if err != nil { + sr.err = err + return + } + _, err = io.CopyN(io.Discard, er, 20) // skip padding + if err != nil { + sr.err = err + return + } + numFields := int(math.Floor(float64(sr.dbfHeaderLength-33) / 32.0)) + sr.dbfFields = make([]Field, numFields) + err = binary.Read(er, binary.LittleEndian, &sr.dbfFields) + if err != nil { + sr.err = err + return + } + buf := make([]byte, 1) + _, err = er.Read(buf[:]) + if err != nil { + sr.err = fmt.Errorf("error when reading DBF header: %v", err) + return + } + if er.e != nil { + sr.err = fmt.Errorf("error when reading DBF header: %v", er.e) + return + } + if buf[0] != 0x0d { + sr.err = fmt.Errorf("Field descriptor array terminator not found") + return + } + sr.dbfRow = make([]byte, sr.dbfRecordLength) +} + +// Next implements a method of interface SequentialReader for seqReader. +func (sr *seqReader) Next() bool { + if sr.err != nil { + return false + } + var num, size int32 + var shapetype ShapeType + + // read shape + er := &errReader{Reader: sr.shp} + err1 := binary.Read(er, binary.BigEndian, &num) + if err1 != nil { + return false + } + err1 = binary.Read(er, binary.BigEndian, &size) + if err1 != nil { + return false + } + err1 = binary.Read(er, binary.LittleEndian, &shapetype) + if err1 != nil { + return false + } + if er.e != nil { + if er.e != io.EOF { + sr.err = fmt.Errorf("error when reading shapefile header: %v", er.e) + } else { + sr.err = io.EOF + } + return false + } + sr.num = num + var err error + sr.shape, err = newShape(shapetype) + if err != nil { + sr.err = fmt.Errorf("error decoding shape type: %v", err) + return false + } + err = sr.shape.read(er) + if err != nil { + sr.err = fmt.Errorf("error reading shape : %v", err) + return false + } + switch { + case er.e == io.EOF: + // io.EOF means end-of-file was reached gracefully after all + // shape-internal reads succeeded, so it's not a reason stop + // iterating over all shapes. + er.e = nil + case er.e != nil: + sr.err = fmt.Errorf("error while reading next shape: %v", er.e) + return false + } + skipBytes := int64(size)*2 + 8 - er.n + _, ce := io.CopyN(io.Discard, er, skipBytes) + if er.e != nil { + sr.err = er.e + return false + } + if ce != nil { + sr.err = fmt.Errorf("error when discarding bytes on sequential read: %v", ce) + return false + } + if _, err := io.ReadFull(sr.dbf, sr.dbfRow); err != nil { + sr.err = fmt.Errorf("error when reading DBF row: %v", err) + return false + } + if sr.dbfRow[0] != 0x20 && sr.dbfRow[0] != 0x2a { + sr.err = fmt.Errorf("Attribute row %d starts with incorrect deletion indicator", num) + } + return sr.err == nil +} + +// Shape implements a method of interface SequentialReader for seqReader. +func (sr *seqReader) Shape() (int, Shape) { + return int(sr.num) - 1, sr.shape +} + +// Attribute implements a method of interface SequentialReader for seqReader. +func (sr *seqReader) Attribute(n int) string { + if sr.err != nil { + return "" + } + start := 1 + f := 0 + for ; f < n; f++ { + start += int(sr.dbfFields[f].Size) + } + s := string(sr.dbfRow[start : start+int(sr.dbfFields[f].Size)]) + return strings.Trim(s, " ") +} + +// Err returns the first non-EOF error that was encountered. +func (sr *seqReader) Err() error { + if sr.err == io.EOF { + return nil + } + return sr.err +} + +// Close closes the seqReader and free all the allocated resources. +func (sr *seqReader) Close() error { + if err := sr.shp.Close(); err != nil { + return err + } + if err := sr.dbf.Close(); err != nil { + return err + } + return nil +} + +// Fields returns a slice of the fields that are present in the DBF table. +func (sr *seqReader) Fields() []Field { + return sr.dbfFields +} + +// SequentialReaderFromExt returns a new SequentialReader that interprets shp +// as a source of shapes whose attributes can be retrieved from dbf. +func SequentialReaderFromExt(shp, dbf io.ReadCloser) SequentialReader { + sr := &seqReader{shp: shp, dbf: dbf} + sr.readHeaders() + return sr +} diff --git a/pkg/shp/shapefile.go b/pkg/shp/shapefile.go new file mode 100644 index 000000000..61b2e3660 --- /dev/null +++ b/pkg/shp/shapefile.go @@ -0,0 +1,1066 @@ +package shp + +import ( + "encoding/binary" + "io" + "strings" +) + +//go:generate stringer -type=ShapeType + +// ShapeType is a identifier for the the type of shapes. +type ShapeType int32 + +// These are the possible shape types. +const ( + NULL ShapeType = 0 + POINT ShapeType = 1 + POLYLINE ShapeType = 3 + POLYGON ShapeType = 5 + MULTIPOINT ShapeType = 8 + POINTZ ShapeType = 11 + POLYLINEZ ShapeType = 13 + POLYGONZ ShapeType = 15 + MULTIPOINTZ ShapeType = 18 + POINTM ShapeType = 21 + POLYLINEM ShapeType = 23 + POLYGONM ShapeType = 25 + MULTIPOINTM ShapeType = 28 + MULTIPATCH ShapeType = 31 +) + +// Box structure made up from four coordinates. This type +// is used to represent bounding boxes +type Box struct { + MinX, MinY, MaxX, MaxY float64 +} + +// Extend extends the box with coordinates from the provided +// box. This method calls Box.ExtendWithPoint twice with +// {MinX, MinY} and {MaxX, MaxY} +func (b *Box) Extend(box Box) { + b.ExtendWithPoint(Point{box.MinX, box.MinY}) + b.ExtendWithPoint(Point{box.MaxX, box.MaxY}) +} + +// ExtendWithPoint extends box with coordinates from point +// if they are outside the range of the current box. +func (b *Box) ExtendWithPoint(p Point) { + if p.X < b.MinX { + b.MinX = p.X + } + if p.Y < b.MinY { + b.MinY = p.Y + } + if p.X > b.MaxX { + b.MaxX = p.X + } + if p.Y > b.MaxY { + b.MaxY = p.Y + } +} + +// BBoxFromPoints returns the bounding box calculated +// from points. +func BBoxFromPoints(points []Point) (box Box) { + for k, p := range points { + if k == 0 { + box = Box{p.X, p.Y, p.X, p.Y} + } else { + box.ExtendWithPoint(p) + } + } + return +} + +// Shape interface +type Shape interface { + BBox() Box + + read(io.Reader) error + write(io.Writer) error +} + +// Null is an empty shape. +type Null struct { +} + +// BBox Returns an empty BBox at the geometry origin. +func (n Null) BBox() Box { + return Box{0.0, 0.0, 0.0, 0.0} +} + +func (n *Null) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, n) + if err != nil { + return err + } + return nil +} + +func (n *Null) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, n) + if err != nil { + return err + } + return nil +} + +// Point is the shape that consists of single a geometry point. +type Point struct { + X, Y float64 +} + +// BBox returns the bounding box of the Point feature, i.e. an empty area at +// the point location itself. +func (p Point) BBox() Box { + return Box{p.X, p.Y, p.X, p.Y} +} + +func (p *Point) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, p) + if err != nil { + return err + } + return nil +} + +func (p *Point) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p) + if err != nil { + return err + } + return nil +} + +func flatten(points [][]Point) []Point { + n, i := 0, 0 + for _, v := range points { + n += len(v) + } + r := make([]Point, n) + for _, v := range points { + for _, p := range v { + r[i] = p + i++ + } + } + return r +} + +// PolyLine is a shape type that consists of an ordered set of vertices that +// consists of one or more parts. A part is a connected sequence of two ore +// more points. Parts may or may not be connected to another and may or may not +// intersect each other. +type PolyLine struct { + Box + NumParts int32 + NumPoints int32 + Parts []int32 + Points []Point +} + +// NewPolyLine returns a pointer a new PolyLine created +// with the provided points. The inner slice should be +// the points that the parent part consists of. +func NewPolyLine(parts [][]Point) *PolyLine { + points := flatten(parts) + + p := &PolyLine{} + p.NumParts = int32(len(parts)) + p.NumPoints = int32(len(points)) + p.Parts = make([]int32, len(parts)) + var marker int32 + for i, part := range parts { + p.Parts[i] = marker + marker += int32(len(part)) + } + p.Points = points + p.Box = p.BBox() + + return p +} + +// BBox returns the bounding box of the PolyLine feature +func (p PolyLine) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *PolyLine) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + return nil +} + +func (p *PolyLine) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + return nil +} + +// Polygon is identical to the PolyLine struct. However the parts must form +// rings that may not intersect. +type Polygon PolyLine + +// BBox returns the bounding box of the Polygon feature +func (p Polygon) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *Polygon) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + return nil +} + +func (p *Polygon) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + return nil +} + +// MultiPoint is the shape that consists of multiple points. +type MultiPoint struct { + Box Box + NumPoints int32 + Points []Point +} + +// BBox returns the bounding box of the MultiPoint feature +func (p MultiPoint) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *MultiPoint) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Points = make([]Point, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + return nil +} + +func (p *MultiPoint) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + return nil +} + +// PointZ is a triplet of double precision coordinates plus a measure. +type PointZ struct { + X float64 + Y float64 + Z float64 + M float64 +} + +// BBox eturns the bounding box of the PointZ feature which is an zero-sized area +// at the X and Y coordinates of the feature. +func (p PointZ) BBox() Box { + return Box{p.X, p.Y, p.X, p.Y} +} + +func (p *PointZ) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, p) + if err != nil { + return err + } + return nil +} + +func (p *PointZ) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p) + if err != nil { + return err + } + return nil +} + +// PolyLineZ is a shape which consists of one or more parts. A part is a +// connected sequence of two or more points. Parts may or may not be connected +// and may or may not intersect one another. +type PolyLineZ struct { + Box Box + NumParts int32 + NumPoints int32 + Parts []int32 + Points []Point + ZRange [2]float64 + ZArray []float64 + MRange [2]float64 + MArray []float64 +} + +// BBox eturns the bounding box of the PolyLineZ feature. +func (p PolyLineZ) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *PolyLineZ) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + p.ZArray = make([]float64, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZArray) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *PolyLineZ) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZArray) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// PolygonZ structure is identical to the PolyLineZ structure. +type PolygonZ PolyLineZ + +// BBox returns the bounding box of the PolygonZ feature +func (p PolygonZ) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *PolygonZ) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + p.ZArray = make([]float64, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZArray) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *PolygonZ) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZArray) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// MultiPointZ consists of one ore more PointZ. +type MultiPointZ struct { + Box Box + NumPoints int32 + Points []Point + ZRange [2]float64 + ZArray []float64 + MRange [2]float64 + MArray []float64 +} + +// BBox eturns the bounding box of the MultiPointZ feature. +func (p MultiPointZ) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *MultiPointZ) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Points = make([]Point, p.NumPoints) + p.ZArray = make([]float64, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZArray) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *MultiPointZ) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZArray) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// PointM is a point with a measure. +type PointM struct { + X float64 + Y float64 + M float64 +} + +// BBox returns the bounding box of the PointM feature which is a zero-sized +// area at the X- and Y-coordinates of the point. +func (p PointM) BBox() Box { + return Box{p.X, p.Y, p.X, p.Y} +} + +func (p *PointM) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, p) + if err != nil { + return err + } + return nil +} + +func (p *PointM) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p) + if err != nil { + return err + } + return nil +} + +// PolyLineM is the polyline in which each point also has a measure. +type PolyLineM struct { + Box Box + NumParts int32 + NumPoints int32 + Parts []int32 + Points []Point + MRange [2]float64 + MArray []float64 +} + +// BBox returns the bounding box of the PolyLineM feature. +func (p PolyLineM) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *PolyLineM) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *PolyLineM) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// PolygonM structure is identical to the PolyLineZ structure. +type PolygonM PolyLineZ + +// BBox returns the bounding box of the PolygonM feature. +func (p PolygonM) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *PolygonM) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *PolygonM) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// MultiPointM is the collection of multiple points with measures. +type MultiPointM struct { + Box Box + NumPoints int32 + Points []Point + MRange [2]float64 + MArray []float64 +} + +// BBox eturns the bounding box of the MultiPointM feature +func (p MultiPointM) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *MultiPointM) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Points = make([]Point, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *MultiPointM) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// MultiPatch consists of a number of surfaces patches. Each surface path +// descries a surface. The surface patches of a MultiPatch are referred to as +// its parts, and the type of part controls how the order of vertices of an +// MultiPatch part is interpreted. +type MultiPatch struct { + Box Box + NumParts int32 + NumPoints int32 + Parts []int32 + PartTypes []int32 + Points []Point + ZRange [2]float64 + ZArray []float64 + MRange [2]float64 + MArray []float64 +} + +// BBox returns the bounding box of the MultiPatch feature +func (p MultiPatch) BBox() Box { + return BBoxFromPoints(p.Points) +} + +func (p *MultiPatch) read(file io.Reader) error { + err := binary.Read(file, binary.LittleEndian, &p.Box) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumParts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.NumPoints) + if err != nil { + return err + } + p.Parts = make([]int32, p.NumParts) + p.PartTypes = make([]int32, p.NumParts) + p.Points = make([]Point, p.NumPoints) + p.ZArray = make([]float64, p.NumPoints) + p.MArray = make([]float64, p.NumPoints) + err = binary.Read(file, binary.LittleEndian, &p.Parts) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.PartTypes) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.Points) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.ZArray) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MRange) + if err != nil { + return err + } + err = binary.Read(file, binary.LittleEndian, &p.MArray) + if err != nil { + return err + } + return nil +} + +func (p *MultiPatch) write(file io.Writer) error { + err := binary.Write(file, binary.LittleEndian, p.Box) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumParts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.NumPoints) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Parts) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.PartTypes) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.Points) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.ZArray) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MRange) + if err != nil { + return err + } + err = binary.Write(file, binary.LittleEndian, p.MArray) + if err != nil { + return err + } + return nil +} + +// Field representation of a field object in the DBF file +type Field struct { + Name [11]byte + Fieldtype byte + Addr [4]byte // not used + Size uint8 + Precision uint8 + Padding [14]byte +} + +// Returns a string representation of the Field. Currently +// this only returns field name. +func (f Field) String() string { + return strings.TrimRight(string(f.Name[:]), "\x00") +} + +// StringField returns a Field that can be used in SetFields to initialize the +// DBF file. +func StringField(name string, length uint8) Field { + // TODO: Error checking + field := Field{Fieldtype: 'C', Size: length} + copy(field.Name[:], []byte(name)) + return field +} + +// NumberField returns a Field that can be used in SetFields to initialize the +// DBF file. +func NumberField(name string, length uint8) Field { + field := Field{Fieldtype: 'N', Size: length} + copy(field.Name[:], []byte(name)) + return field +} + +// FloatField returns a Field that can be used in SetFields to initialize the +// DBF file. Used to store floating points with precision in the DBF. +func FloatField(name string, length uint8, precision uint8) Field { + field := Field{Fieldtype: 'F', Size: length, Precision: precision} + copy(field.Name[:], []byte(name)) + return field +} + +// DateField feturns a Field that can be used in SetFields to initialize the +// DBF file. Used to store Date strings formatted as YYYYMMDD. Data wise this +// is the same as a StringField with length 8. +func DateField(name string) Field { + field := Field{Fieldtype: 'D', Size: 8} + copy(field.Name[:], []byte(name)) + return field +} diff --git a/pkg/shp/shapetype_string.go b/pkg/shp/shapetype_string.go new file mode 100644 index 000000000..6b9bf309f --- /dev/null +++ b/pkg/shp/shapetype_string.go @@ -0,0 +1,51 @@ +// Code generated by "stringer -type=ShapeType"; DO NOT EDIT. + +package shp + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[NULL-0] + _ = x[POINT-1] + _ = x[POLYLINE-3] + _ = x[POLYGON-5] + _ = x[MULTIPOINT-8] + _ = x[POINTZ-11] + _ = x[POLYLINEZ-13] + _ = x[POLYGONZ-15] + _ = x[MULTIPOINTZ-18] + _ = x[POINTM-21] + _ = x[POLYLINEM-23] + _ = x[POLYGONM-25] + _ = x[MULTIPOINTM-28] + _ = x[MULTIPATCH-31] +} + +const _ShapeType_name = "NULLPOINTPOLYLINEPOLYGONMULTIPOINTPOINTZPOLYLINEZPOLYGONZMULTIPOINTZPOINTMPOLYLINEMPOLYGONMMULTIPOINTMMULTIPATCH" + +var _ShapeType_map = map[ShapeType]string{ + 0: _ShapeType_name[0:4], + 1: _ShapeType_name[4:9], + 3: _ShapeType_name[9:17], + 5: _ShapeType_name[17:24], + 8: _ShapeType_name[24:34], + 11: _ShapeType_name[34:40], + 13: _ShapeType_name[40:49], + 15: _ShapeType_name[49:57], + 18: _ShapeType_name[57:68], + 21: _ShapeType_name[68:74], + 23: _ShapeType_name[74:83], + 25: _ShapeType_name[83:91], + 28: _ShapeType_name[91:102], + 31: _ShapeType_name[102:112], +} + +func (i ShapeType) String() string { + if str, ok := _ShapeType_map[i]; ok { + return str + } + return "ShapeType(" + strconv.FormatInt(int64(i), 10) + ")" +} diff --git a/pkg/shp/writer.go b/pkg/shp/writer.go new file mode 100644 index 000000000..4969c6b0e --- /dev/null +++ b/pkg/shp/writer.go @@ -0,0 +1,122 @@ +package shp + +import ( + "encoding/binary" + "io" + "math" +) + +// Writer is the type that is used to write a new shapefile. +type Writer struct { + shp io.WriteSeeker + GeometryType ShapeType + num int32 + bbox Box +} + +func CreateFrom(ws io.WriteSeeker, t ShapeType) (*Writer, error) { + _, err := ws.Seek(100, io.SeekStart) + if err != nil { + return nil, err + } + w := &Writer{ + shp: ws, + GeometryType: t, + } + return w, nil +} + +// Write shape to the writer. +// Returns the index of the written object +// which can be used in WriteAttribute. +func (w *Writer) Write(shape Shape) (int32, error) { + // increate bbox + if w.num == 0 { + w.bbox = shape.BBox() + } else { + w.bbox.Extend(shape.BBox()) + } + + w.num++ + err := binary.Write(w.shp, binary.BigEndian, w.num) + if err != nil { + return 0, err + } + _, err = w.shp.Seek(4, io.SeekCurrent) + if err != nil { + return 0, err + } + start, err := w.shp.Seek(0, io.SeekCurrent) + if err != nil { + return 0, err + } + err = binary.Write(w.shp, binary.LittleEndian, w.GeometryType) + if err != nil { + return 0, err + } + err = shape.write(w.shp) + if err != nil { + return 0, err + } + finish, err := w.shp.Seek(0, io.SeekCurrent) + if err != nil { + return 0, err + } + length := int32(math.Floor((float64(finish) - float64(start)) / 2.0)) + _, err = w.shp.Seek(start-4, io.SeekStart) + if err != nil { + return 0, err + } + err = binary.Write(w.shp, binary.BigEndian, length) + if err != nil { + return 0, err + } + _, err = w.shp.Seek(finish, io.SeekStart) + if err != nil { + return 0, err + } + return w.num - 1, nil +} + +// Close closes the writer. +func (w *Writer) Close() error { + return w.writeHeader(w.shp) +} + +// writeHeader writes SHP to ws. +func (w *Writer) writeHeader(ws io.WriteSeeker) error { + filelength, _ := ws.Seek(0, io.SeekEnd) + if filelength == 0 { + filelength = 100 + } + _, err := ws.Seek(0, io.SeekStart) + if err != nil { + return err + } + // file code + err = binary.Write(ws, binary.BigEndian, []int32{9994, 0, 0, 0, 0, 0}) + if err != nil { + return err + } + // file length + err = binary.Write(ws, binary.BigEndian, int32(filelength/2)) + if err != nil { + return err + } + // version and shape type + err = binary.Write(ws, binary.LittleEndian, []int32{1000, int32(w.GeometryType)}) + if err != nil { + return err + } + // bounding box + err = binary.Write(ws, binary.LittleEndian, w.bbox) + if err != nil { + return err + } + // elevation, measure + err = binary.Write(ws, binary.LittleEndian, []float64{0.0, 0.0, 0.0, 0.0}) + if err != nil { + return err + } + return nil +} diff --git a/pkg/shp/zipreader.go b/pkg/shp/zipreader.go new file mode 100644 index 000000000..33fc968fe --- /dev/null +++ b/pkg/shp/zipreader.go @@ -0,0 +1,105 @@ +package shp + +import ( + "archive/zip" + "bytes" + "fmt" + "io" + "strings" +) + +// ZipReader provides an interface for reading Shapefiles that are compressed in a ZIP archive. +type ZipReader struct { + sr SequentialReader + z *zip.Reader +} + +// openFromZIP is convenience function for opening the file called name that is +// compressed in z for reading. +func openFromZIP(z *zip.Reader, name string) (io.ReadCloser, error) { + for _, f := range z.File { + if f.Name == name { + return f.Open() + + } + } + return nil, fmt.Errorf("No such file in archive: %s", name) +} + +// ReadZipFrom read zip file from io.Reader, zip file must contain only one shape file +func ReadZipFrom(r io.Reader) (*ZipReader, error) { + zipBytes, err := io.ReadAll(r) + if err != nil { + return nil, err + } + reader, err := zip.NewReader(bytes.NewReader(zipBytes), int64(len(zipBytes))) + if err != nil { + return nil, err + } + zr := &ZipReader{ + z: reader, + } + shapeFiles := shapesInZip(reader) + if len(shapeFiles) == 0 { + return nil, fmt.Errorf("archive does not contain a .shp file") + } + if len(shapeFiles) > 1 { + return nil, fmt.Errorf("archive does contain multiple .shp files") + } + shp, err := openFromZIP(zr.z, shapeFiles[0].Name) + if err != nil { + return nil, err + } + withoutExt := strings.TrimSuffix(shapeFiles[0].Name, ".shp") + // dbf is optional, so no error checking here + dbf, _ := openFromZIP(zr.z, withoutExt+".dbf") + zr.sr = SequentialReaderFromExt(shp, dbf) + return zr, nil +} + +func shapesInZip(z *zip.Reader) []*zip.File { + var shapeFiles []*zip.File + for _, f := range z.File { + if strings.HasSuffix(f.Name, ".shp") { + shapeFiles = append(shapeFiles, f) + } + } + return shapeFiles +} + +// Close closes the ZipReader and frees the allocated resources. +func (zr *ZipReader) Close() error { + err := zr.sr.Close() + if err != nil { + return err + } + return nil +} + +// Next reads the next shape in the shapefile and the next row in the DBF. Call +// Shape() and Attribute() to access the values. +func (zr *ZipReader) Next() bool { + return zr.sr.Next() +} + +// Shape returns the shape that was last read as well as the current index. +func (zr *ZipReader) Shape() (int, Shape) { + return zr.sr.Shape() +} + +// Attribute returns the n-th field of the last row that was read. If there +// were any errors before, the empty string is returned. +func (zr *ZipReader) Attribute(n int) string { + return zr.sr.Attribute(n) +} + +// Fields returns a slice of Fields that are present in the +// DBF table. +func (zr *ZipReader) Fields() []Field { + return zr.sr.Fields() +} + +// Err returns the last non-EOF error that was encountered by this ZipReader. +func (zr *ZipReader) Err() error { + return zr.sr.Err() +} diff --git a/pkg/user/auth.go b/pkg/user/auth.go new file mode 100644 index 000000000..ed1498591 --- /dev/null +++ b/pkg/user/auth.go @@ -0,0 +1,20 @@ +package user + +import "strings" + +type Auth struct { + Provider string + Sub string +} + +func AuthFromAuth0Sub(sub string) Auth { + s := strings.Split(sub, "|") + if len(s) != 2 { + return Auth{} + } + return Auth{Provider: s[0], Sub: sub} +} + +func (a Auth) IsAuth0() bool { + return a.Provider == "auth0" +} diff --git a/pkg/user/auth_test.go b/pkg/user/auth_test.go new file mode 100644 index 000000000..f125b3e4b --- /dev/null +++ b/pkg/user/auth_test.go @@ -0,0 +1,66 @@ +package user + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestAuthFromAuth0Sub(t *testing.T) { + testCases := []struct { + Name, Sub string + Expected Auth + }{ + { + Name: "Create Auth", + Sub: "xx|yy", + Expected: Auth{ + Provider: "xx", + Sub: "yy", + }, + }, + { + Name: "Create empty Auth", + Sub: "", + Expected: Auth{}, + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, AuthFromAuth0Sub(tc.Sub)) + }) + } +} + +func TestAuth_IsAuth0(t *testing.T) { + testCases := []struct { + Name string + Auth Auth + Expected bool + }{ + { + Name: "is Auth", + Auth: Auth{ + Provider: "auth0", + Sub: "xxx", + }, + Expected: true, + }, + { + Name: "is not Auth", + Auth: Auth{ + Provider: "foo", + Sub: "hoge", + }, + Expected: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.Auth.IsAuth0()) + }) + } +} diff --git a/pkg/user/builder.go b/pkg/user/builder.go new file mode 100644 index 000000000..495983d5f --- /dev/null +++ b/pkg/user/builder.go @@ -0,0 +1,78 @@ +package user + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "golang.org/x/text/language" +) + +type Builder struct { + u *User +} + +func New() *Builder { + return &Builder{u: &User{}} +} + +func (b *Builder) Build() (*User, error) { + if id.ID(b.u.id).IsNil() { + return nil, id.ErrInvalidID + } + return b.u, nil +} + +func (b *Builder) MustBuild() *User { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *Builder) ID(id id.UserID) *Builder { + b.u.id = id + return b +} + +func (b *Builder) NewID() *Builder { + b.u.id = id.UserID(id.New()) + return b +} + +func (b *Builder) Name(name string) *Builder { + b.u.name = name + return b +} + +func (b *Builder) Email(email string) *Builder { + b.u.email = email + return b +} + +func (b *Builder) Team(team id.TeamID) *Builder { + b.u.team = team + return b +} + +func (b *Builder) Lang(lang language.Tag) *Builder { + b.u.lang = lang + return b +} + +func (b *Builder) Theme(t Theme) *Builder { + b.u.theme = t + return b +} + +func (b *Builder) LangFrom(lang string) *Builder { + if lang == "" { + b.u.lang = language.Tag{} + } else if l, err := language.Parse(lang); err == nil { + b.u.lang = l + } + return b +} + +func (b *Builder) Auths(auths []Auth) *Builder { + b.u.auths = append([]Auth{}, auths...) + return b +} diff --git a/pkg/user/builder_test.go b/pkg/user/builder_test.go new file mode 100644 index 000000000..01a106e5e --- /dev/null +++ b/pkg/user/builder_test.go @@ -0,0 +1,200 @@ +package user + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" + "golang.org/x/text/language" +) + +func TestBuilder_ID(t *testing.T) { + uid := id.NewUserID() + b := New().ID(uid).MustBuild() + assert.Equal(t, uid, b.ID()) +} + +func TestBuilder_Name(t *testing.T) { + b := New().NewID().Name("xxx").MustBuild() + assert.Equal(t, "xxx", b.Name()) +} + +func TestBuilder_NewID(t *testing.T) { + b := New().NewID().MustBuild() + assert.NotNil(t, b.ID()) +} + +func TestBuilder_Team(t *testing.T) { + tid := id.NewTeamID() + b := New().NewID().Team(tid).MustBuild() + assert.Equal(t, tid, b.Team()) +} + +func TestBuilder_Auths(t *testing.T) { + b := New().NewID().Auths([]Auth{ + { + Provider: "xxx", + Sub: "aaa", + }, + }).MustBuild() + assert.Equal(t, []Auth{ + { + Provider: "xxx", + Sub: "aaa", + }, + }, b.Auths()) +} + +func TestBuilder_Email(t *testing.T) { + b := New().NewID().Email("xx@yy.zz").MustBuild() + assert.Equal(t, "xx@yy.zz", b.Email()) +} + +func TestBuilder_Lang(t *testing.T) { + l := language.Make("en") + b := New().NewID().Lang(l).MustBuild() + assert.Equal(t, l, b.Lang()) +} + +func TestBuilder_LangFrom(t *testing.T) { + testCases := []struct { + Name, Lang string + Expected language.Tag + }{ + { + Name: "success creating language", + Lang: "en", + Expected: language.Make("en"), + }, + { + Name: "empty language and empty tag", + Lang: "", + Expected: language.Tag{}, + }, + { + Name: "empty tag of parse err", + Lang: "xxxxxxxxxxx", + Expected: language.Tag{}, + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + b := New().NewID().LangFrom(tc.Lang).MustBuild() + assert.Equal(t, tc.Expected, b.Lang()) + }) + } +} + +func TestNew(t *testing.T) { + b := New() + assert.NotNil(t, b) + assert.IsType(t, &Builder{}, b) +} + +func TestBuilder_Build(t *testing.T) { + uid := id.NewUserID() + tid := id.NewTeamID() + testCases := []struct { + Name, UserName, Lang, Email string + UID id.UserID + TID id.TeamID + Auths []Auth + Expected *User + err error + }{ + { + Name: "Success build user", + UserName: "xxx", + Email: "xx@yy.zz", + Lang: "en", + UID: uid, + TID: tid, + Auths: []Auth{ + { + Provider: "ppp", + Sub: "sss", + }, + }, + Expected: New(). + ID(uid). + Team(tid). + Email("xx@yy.zz"). + Name("xxx"). + Auths([]Auth{{Provider: "ppp", Sub: "sss"}}). + LangFrom("en"). + MustBuild(), + err: nil, + }, { + Name: "failed invalid id", + Expected: nil, + err: id.ErrInvalidID, + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := New().ID(tc.UID).Name(tc.UserName).Auths(tc.Auths).LangFrom(tc.Lang).Email(tc.Email).Team(tc.TID).Build() + if err == nil { + assert.Equal(tt, tc.Expected, res) + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} + +func TestBuilder_MustBuild(t *testing.T) { + uid := id.NewUserID() + tid := id.NewTeamID() + testCases := []struct { + Name, UserName, Lang, Email string + UID id.UserID + TID id.TeamID + Auths []Auth + Expected *User + err error + }{ + { + Name: "Success build user", + UserName: "xxx", + Email: "xx@yy.zz", + Lang: "en", + UID: uid, + TID: tid, + Auths: []Auth{ + { + Provider: "ppp", + Sub: "sss", + }, + }, + Expected: New(). + ID(uid). + Team(tid). + Email("xx@yy.zz"). + Name("xxx"). + Auths([]Auth{{Provider: "ppp", Sub: "sss"}}). + LangFrom("en"). + MustBuild(), + err: nil, + }, { + Name: "failed invalid id", + Expected: nil, + err: id.ErrInvalidID, + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + var res *User + defer func() { + if r := recover(); r == nil { + assert.Equal(tt, tc.Expected, res) + } + }() + + res = New().ID(tc.UID).Name(tc.UserName).Auths(tc.Auths).LangFrom(tc.Lang).Email(tc.Email).Team(tc.TID).MustBuild() + }) + } +} diff --git a/pkg/user/initializer/initializer.go b/pkg/user/initializer/initializer.go new file mode 100644 index 000000000..197df1ed4 --- /dev/null +++ b/pkg/user/initializer/initializer.go @@ -0,0 +1,39 @@ +package initializer + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" +) + +func InitUser(email, username, auth0sub string, userID *id.UserID, teamID *id.TeamID) (*user.User, *user.Team, error) { + if userID == nil { + userID = id.NewUserID().Ref() + } + if teamID == nil { + teamID = id.NewTeamID().Ref() + } + + u, err := user.New(). + ID(*userID). + Name(username). + Email(email). + Auths([]user.Auth{user.AuthFromAuth0Sub(auth0sub)}). + Build() + if err != nil { + return nil, nil, err + } + + // create a user's own team + t, err := user.NewTeam(). + ID(*teamID). + Name(username). + Members(map[id.UserID]user.Role{u.ID(): user.RoleOwner}). + Personal(true). + Build() + if err != nil { + return nil, nil, err + } + u.UpdateTeam(t.ID()) + + return u, t, err +} diff --git a/pkg/user/initializer/initializer_test.go b/pkg/user/initializer/initializer_test.go new file mode 100644 index 000000000..321c8449d --- /dev/null +++ b/pkg/user/initializer/initializer_test.go @@ -0,0 +1,110 @@ +package initializer + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" + "github.com/stretchr/testify/assert" +) + +func TestInitUser(t *testing.T) { + uid := id.NewUserID() + tid := id.NewTeamID() + testCases := []struct { + Name, Email, Username, Sub string + UID *id.UserID + TID *id.TeamID + ExpectedUser *user.User + ExpectedTeam *user.Team + Err error + }{ + { + Name: "Success create user", + Email: "xx@yy.zz", + Username: "nnn", + Sub: "###", + UID: &uid, + TID: &tid, + ExpectedUser: user.New(). + ID(uid). + Email("xx@yy.zz"). + Name("nnn"). + Team(tid). + Auths([]user.Auth{user.AuthFromAuth0Sub("###")}). + MustBuild(), + ExpectedTeam: user.NewTeam(). + ID(tid). + Name("nnn"). + Members(map[id.UserID]user. + Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + Err: nil, + }, + { + Name: "Success nil team id", + Email: "xx@yy.zz", + Username: "nnn", + Sub: "###", + UID: &uid, + TID: nil, + ExpectedUser: user.New(). + ID(uid). + Email("xx@yy.zz"). + Name("nnn"). + Team(tid). + Auths([]user.Auth{user.AuthFromAuth0Sub("###")}). + MustBuild(), + ExpectedTeam: user.NewTeam(). + NewID(). + Name("nnn"). + Members(map[id.UserID]user. + Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + Err: nil, + }, + { + Name: "Success nil id", + Email: "xx@yy.zz", + Username: "nnn", + Sub: "###", + UID: nil, + TID: &tid, + ExpectedUser: user.New(). + NewID(). + Email("xx@yy.zz"). + Name("nnn"). + Team(tid). + Auths([]user.Auth{user.AuthFromAuth0Sub("###")}). + MustBuild(), + ExpectedTeam: user.NewTeam(). + ID(tid). + Name("nnn"). + Members(map[id.UserID]user. + Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + Err: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + u, t, err := InitUser(tc.Email, tc.Username, tc.Sub, tc.UID, tc.TID) + if err == nil { + assert.Equal(tt, tc.ExpectedUser.Email(), u.Email()) + assert.Equal(tt, tc.ExpectedUser.Name(), u.Name()) + assert.Equal(tt, tc.ExpectedUser.Auths(), u.Auths()) + + assert.Equal(tt, tc.ExpectedTeam.Name(), t.Name()) + assert.Equal(tt, tc.ExpectedTeam.IsPersonal(), t.IsPersonal()) + } else { + assert.True(tt, errors.As(tc.Err, &err)) + } + }) + } +} diff --git a/pkg/user/members.go b/pkg/user/members.go new file mode 100644 index 000000000..a60d80acc --- /dev/null +++ b/pkg/user/members.go @@ -0,0 +1,127 @@ +package user + +import ( + "errors" + "sort" + + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + ErrUserAlreadyJoined = errors.New("user already joined") + ErrCannotModifyPersonalTeam = errors.New("personal team cannot be modified") + ErrTeamWithProjects = errors.New("target team still has some project") + ErrTargetUserNotInTheTeam = errors.New("target user does not exist in the team") +) + +type Members struct { + members map[id.UserID]Role + fixed bool +} + +func NewMembers() *Members { + m := &Members{members: map[id.UserID]Role{}} + return m +} + +func NewFixedMembers(u id.UserID) *Members { + m := &Members{members: map[id.UserID]Role{u: RoleOwner}, fixed: true} + return m +} + +func NewMembersWith(members map[id.UserID]Role) *Members { + m := &Members{members: map[id.UserID]Role{}} + for k, v := range members { + m.members[k] = v + } + return m +} + +func CopyMembers(members *Members) *Members { + return NewMembersWith(members.members) +} + +func (m *Members) Members() map[id.UserID]Role { + members := make(map[id.UserID]Role) + for k, v := range m.members { + members[k] = v + } + return members +} + +func (m *Members) ContainsUser(u id.UserID) bool { + for k := range m.members { + if k == u { + return true + } + } + return false +} + +func (m *Members) Count() int { + return len(m.members) +} + +func (m *Members) GetRole(u id.UserID) Role { + return m.members[u] +} + +func (m *Members) UpdateRole(u id.UserID, role Role) error { + if m.fixed { + return ErrCannotModifyPersonalTeam + } + if role == Role("") { + return nil + } + if _, ok := m.members[u]; ok { + m.members[u] = role + } else { + return ErrTargetUserNotInTheTeam + } + return nil +} + +func (m *Members) Join(u id.UserID, role Role) error { + if m.fixed { + return ErrCannotModifyPersonalTeam + } + if _, ok := m.members[u]; ok { + return ErrUserAlreadyJoined + } + if role == Role("") { + role = RoleReader + } + m.members[u] = role + return nil +} + +func (m *Members) Leave(u id.UserID) error { + if m.fixed { + return ErrCannotModifyPersonalTeam + } + if _, ok := m.members[u]; ok { + delete(m.members, u) + } else { + return ErrTargetUserNotInTheTeam + } + return nil +} + +func (m *Members) UsersByRole(role Role) []id.UserID { + users := make([]id.UserID, 0, len(m.members)) + for u, r := range m.members { + if r == role { + users = append(users, u) + } + } + + sort.SliceStable(users, func(a, b int) bool { + return users[a].ID().Compare(users[b].ID()) > 0 + }) + + return users +} + +func (m *Members) IsOnlyOwner(u id.UserID) bool { + return len(m.UsersByRole(RoleOwner)) == 1 && m.members[u] == RoleOwner +} diff --git a/pkg/user/members_test.go b/pkg/user/members_test.go new file mode 100644 index 000000000..6216f90a0 --- /dev/null +++ b/pkg/user/members_test.go @@ -0,0 +1,261 @@ +package user + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestNewMembers(t *testing.T) { + m := NewMembers() + assert.NotNil(t, m) + assert.IsType(t, &Members{}, m) +} + +func TestNewMembersWith(t *testing.T) { + uid := id.NewUserID() + m := NewMembersWith(map[id.UserID]Role{uid: RoleOwner}) + assert.NotNil(t, m) + assert.Equal(t, map[id.UserID]Role{uid: RoleOwner}, m.Members()) +} + +func TestMembers_ContainsUser(t *testing.T) { + uid1 := id.NewUserID() + uid2 := id.NewUserID() + testCases := []struct { + Name string + M *Members + UID id.UserID + Expected bool + }{ + { + Name: "existing user", + M: NewMembersWith(map[id.UserID]Role{uid1: RoleOwner, uid2: RoleReader}), + UID: uid1, + Expected: true, + }, + { + Name: "not existing user", + M: NewMembersWith(map[id.UserID]Role{uid2: RoleReader}), + UID: uid1, + Expected: false, + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.M.ContainsUser(tc.UID) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestCopyMembers(t *testing.T) { + uid := id.NewUserID() + m := NewMembersWith(map[id.UserID]Role{uid: RoleOwner}) + m2 := CopyMembers(m) + assert.Equal(t, m, m2) +} + +func TestMembers_Count(t *testing.T) { + m := NewMembersWith(map[id.UserID]Role{id.NewUserID(): RoleOwner}) + assert.Equal(t, len(m.Members()), m.Count()) +} + +func TestMembers_GetRole(t *testing.T) { + uid := id.NewUserID() + m := NewMembersWith(map[id.UserID]Role{uid: RoleOwner}) + assert.Equal(t, RoleOwner, m.GetRole(uid)) +} + +func TestMembers_IsOnlyOwner(t *testing.T) { + uid := id.NewUserID() + m := NewMembersWith(map[id.UserID]Role{uid: RoleOwner, id.NewUserID(): RoleReader}) + assert.True(t, m.IsOnlyOwner(uid)) +} + +func TestMembers_Leave(t *testing.T) { + uid := id.NewUserID() + testCases := []struct { + Name string + M *Members + UID id.UserID + err error + }{ + { + Name: "success user left", + M: NewMembersWith(map[id.UserID]Role{uid: RoleWriter, id.NewUserID(): RoleOwner}), + UID: uid, + err: nil, + }, + { + Name: "fail personal team", + M: NewFixedMembers(uid), + UID: uid, + err: ErrCannotModifyPersonalTeam, + }, + { + Name: "fail user not in the team", + M: NewMembersWith(map[id.UserID]Role{uid: RoleWriter, id.NewUserID(): RoleOwner}), + UID: id.NewUserID(), + err: ErrTargetUserNotInTheTeam, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + err := tc.M.Leave(tc.UID) + if err == nil { + assert.False(tt, tc.M.ContainsUser(tc.UID)) + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} + +func TestMembers_Members(t *testing.T) { + uid := id.NewUserID() + m := NewMembersWith(map[id.UserID]Role{uid: RoleOwner}) + assert.Equal(t, map[id.UserID]Role{uid: RoleOwner}, m.Members()) +} + +func TestMembers_UpdateRole(t *testing.T) { + uid := id.NewUserID() + testCases := []struct { + Name string + M *Members + UID id.UserID + NewRole, Expected Role + err error + }{ + { + Name: "success role updated", + M: NewMembersWith(map[id.UserID]Role{uid: RoleWriter}), + UID: uid, + NewRole: RoleOwner, + Expected: RoleOwner, + err: nil, + }, + { + Name: "nil role", + M: NewMembersWith(map[id.UserID]Role{uid: RoleOwner}), + UID: uid, + NewRole: "", + Expected: RoleOwner, + err: nil, + }, + { + Name: "fail personal team", + M: NewFixedMembers(uid), + UID: uid, + NewRole: Role("xxx"), + err: ErrCannotModifyPersonalTeam, + }, + { + Name: "fail user not in the team", + M: NewMembersWith(map[id.UserID]Role{uid: RoleOwner}), + UID: id.NewUserID(), + NewRole: "", + err: ErrTargetUserNotInTheTeam, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + err := tc.M.UpdateRole(tc.UID, tc.NewRole) + if err == nil { + assert.Equal(tt, tc.Expected, tc.M.GetRole(tc.UID)) + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} + +func TestMembers_Join(t *testing.T) { + uid := id.NewUserID() + uid2 := id.NewUserID() + testCases := []struct { + Name string + M *Members + UID id.UserID + JoinRole, ExpectedRole Role + err error + }{ + { + Name: "success join user", + M: NewMembersWith(map[id.UserID]Role{uid: RoleWriter}), + UID: uid2, + JoinRole: "xxx", + ExpectedRole: "xxx", + err: nil, + }, + { + Name: "success join user", + M: NewMembersWith(map[id.UserID]Role{uid: RoleWriter}), + UID: uid2, + JoinRole: "", + ExpectedRole: RoleReader, + err: nil, + }, + { + Name: "fail personal team", + M: NewFixedMembers(uid), + UID: uid2, + JoinRole: "xxx", + err: ErrCannotModifyPersonalTeam, + }, + { + Name: "fail user already joined", + M: NewMembersWith(map[id.UserID]Role{uid: RoleOwner}), + UID: uid, + JoinRole: "", + err: ErrUserAlreadyJoined, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + err := tc.M.Join(tc.UID, tc.JoinRole) + if err == nil { + assert.True(tt, tc.M.ContainsUser(tc.UID)) + assert.Equal(tt, tc.ExpectedRole, tc.M.GetRole(tc.UID)) + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} + +func TestMembers_UsersByRole(t *testing.T) { + uid := id.NewUserID() + uid2 := id.NewUserID() + testCases := []struct { + Name string + M *Members + Role Role + Expected []id.UserID + err error + }{ + { + Name: "success join user", + M: NewMembersWith(map[id.UserID]Role{uid: "xxx", uid2: "xxx"}), + Role: "xxx", + Expected: []id.UserID{uid2, uid}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.M.UsersByRole(tc.Role) + assert.Equal(tt, tc.Expected, res) + }) + } +} diff --git a/pkg/user/role.go b/pkg/user/role.go new file mode 100644 index 000000000..b6359d347 --- /dev/null +++ b/pkg/user/role.go @@ -0,0 +1,42 @@ +package user + +import ( + "errors" + "strings" +) + +var ( + // RoleReader is a role who can read project + RoleReader = Role("reader") + // RoleWriter is a role who can read and write project + RoleWriter = Role("writer") + // RoleOwner is a role who can have full controll of project + RoleOwner = Role("owner") + // ErrInvalidRole _ + ErrInvalidRole = errors.New("invalid role") +) + +// Role _ +type Role string + +func checkRole(role Role) bool { + switch role { + case RoleReader: + return true + case RoleWriter: + return true + case RoleOwner: + return true + } + return false +} + +// RoleFromString _ +func RoleFromString(r string) (Role, error) { + role := Role(strings.ToLower(r)) + + if checkRole(role) { + return role, nil + } + return role, ErrInvalidRole +} diff --git a/pkg/user/role_test.go b/pkg/user/role_test.go new file mode 100644 index 000000000..446b97eec --- /dev/null +++ b/pkg/user/role_test.go @@ -0,0 +1,80 @@ +package user + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRoleFromString(t *testing.T) { + testCases := []struct { + Name, Role string + Expected Role + Err error + }{ + { + Name: "Success reader", + Role: "reader", + Expected: Role("reader"), + Err: nil, + }, + { + Name: "fail invalid role", + Role: "xxx", + Expected: Role("xxx"), + Err: ErrInvalidRole, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := RoleFromString(tc.Role) + if err == nil { + assert.Equal(tt, tc.Expected, res) + } else { + assert.True(tt, errors.As(err, &tc.Err)) + } + }) + } +} + +func TestCheckRole(t *testing.T) { + testCases := []struct { + Name string + Input Role + Expected bool + }{ + { + Name: "check reader", + Input: Role("reader"), + Expected: true, + }, + { + Name: "check writer", + Input: Role("writer"), + Expected: true, + }, + { + Name: "check owner", + Input: Role("owner"), + Expected: true, + }, + { + Name: "check unknown role", + Input: Role("xxx"), + Expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := checkRole(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} diff --git a/pkg/user/team.go b/pkg/user/team.go new file mode 100644 index 000000000..021a392f7 --- /dev/null +++ b/pkg/user/team.go @@ -0,0 +1,29 @@ +package user + +import "github.com/reearth/reearth-backend/pkg/id" + +type Team struct { + id id.TeamID + name string + members Members +} + +func (t *Team) ID() id.TeamID { + return t.id +} + +func (t *Team) Name() string { + return t.name +} + +func (t *Team) Members() *Members { + return &t.members +} + +func (t *Team) Rename(name string) { + t.name = name +} + +func (t *Team) IsPersonal() bool { + return t.members.fixed +} diff --git a/pkg/user/team_builder.go b/pkg/user/team_builder.go new file mode 100644 index 000000000..ec62b6897 --- /dev/null +++ b/pkg/user/team_builder.go @@ -0,0 +1,70 @@ +package user + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +// TeamBuilder _ +type TeamBuilder struct { + t *Team + members map[id.UserID]Role + personal bool +} + +// NewTeam _ +func NewTeam() *TeamBuilder { + return &TeamBuilder{t: &Team{}} +} + +// Build _ +func (b *TeamBuilder) Build() (*Team, error) { + if id.ID(b.t.id).IsNil() { + return nil, id.ErrInvalidID + } + if b.members == nil { + b.t.members = *NewMembers() + } else { + b.t.members = *NewMembersWith(b.members) + } + b.t.members.fixed = b.personal + return b.t, nil +} + +// MustBuild _ +func (b *TeamBuilder) MustBuild() *Team { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +// ID _ +func (b *TeamBuilder) ID(id id.TeamID) *TeamBuilder { + b.t.id = id + return b +} + +// NewID _ +func (b *TeamBuilder) NewID() *TeamBuilder { + b.t.id = id.TeamID(id.New()) + return b +} + +// Name _ +func (b *TeamBuilder) Name(name string) *TeamBuilder { + b.t.name = name + return b +} + +// Members _ +func (b *TeamBuilder) Members(members map[id.UserID]Role) *TeamBuilder { + b.members = members + return b +} + +// Personal _ +func (b *TeamBuilder) Personal(p bool) *TeamBuilder { + b.personal = p + return b +} diff --git a/pkg/user/team_builder_test.go b/pkg/user/team_builder_test.go new file mode 100644 index 000000000..eb630becc --- /dev/null +++ b/pkg/user/team_builder_test.go @@ -0,0 +1,125 @@ +package user + +import ( + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestTeamBuilder_ID(t *testing.T) { + tid := id.NewTeamID() + tm := NewTeam().ID(tid).MustBuild() + assert.Equal(t, tid, tm.ID()) +} + +func TestTeamBuilder_Members(t *testing.T) { + m := map[id.UserID]Role{id.NewUserID(): RoleOwner} + tm := NewTeam().NewID().Members(m).MustBuild() + assert.Equal(t, m, tm.Members().Members()) +} + +func TestTeamBuilder_Personal(t *testing.T) { + tm := NewTeam().NewID().Personal(true).MustBuild() + assert.True(t, tm.IsPersonal()) +} + +func TestTeamBuilder_Name(t *testing.T) { + tm := NewTeam().NewID().Name("xxx").MustBuild() + assert.Equal(t, "xxx", tm.Name()) +} + +func TestTeamBuilder_NewID(t *testing.T) { + tm := NewTeam().NewID().MustBuild() + assert.NotNil(t, tm.ID()) +} + +func TestTeamBuilder_Build(t *testing.T) { + tid := id.NewTeamID() + testCases := []struct { + Name, UserName string + TID id.TeamID + Personal bool + Members map[id.UserID]Role + Expected *Team + err error + }{ + { + Name: "success create team", + UserName: "xxx", + TID: tid, + Personal: true, + Expected: NewTeam().ID(tid).Members(map[id.UserID]Role{id.NewUserID(): RoleOwner}).Personal(true).Name("xxx").MustBuild(), + err: nil, + }, { + Name: "success create team with nil members", + UserName: "xxx", + Members: nil, + Expected: NewTeam().ID(tid).MustBuild(), + err: nil, + }, + { + Name: "fail invalid id", + Expected: nil, + err: id.ErrInvalidID, + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := NewTeam().ID(tc.TID).Members(tc.Members).Personal(tc.Personal).Name(tc.UserName).Build() + if err == nil { + assert.Equal(tt, tc.Expected, res) + } else { + assert.True(tt, errors.As(tc.err, &err)) + } + }) + } +} + +func TestTeamBuilder_MustBuild(t *testing.T) { + tid := id.NewTeamID() + testCases := []struct { + Name, UserName string + TID id.TeamID + Personal bool + Members map[id.UserID]Role + Expected *Team + err error + }{ + { + Name: "success create team", + UserName: "xxx", + TID: tid, + Personal: true, + Expected: NewTeam().ID(tid).Members(map[id.UserID]Role{id.NewUserID(): RoleOwner}).Personal(true).Name("xxx").MustBuild(), + err: nil, + }, { + Name: "success create team with nil members", + UserName: "xxx", + Members: nil, + Expected: NewTeam().ID(tid).MustBuild(), + err: nil, + }, + { + Name: "fail invalid id", + Expected: nil, + err: id.ErrInvalidID, + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + var res *Team + defer func() { + if r := recover(); r == nil { + assert.Equal(tt, tc.Expected, res) + } + }() + res = NewTeam().ID(tc.TID).Members(tc.Members).Personal(tc.Personal).Name(tc.UserName).MustBuild() + }) + } +} diff --git a/pkg/user/team_test.go b/pkg/user/team_test.go new file mode 100644 index 000000000..f01e4f843 --- /dev/null +++ b/pkg/user/team_test.go @@ -0,0 +1,38 @@ +package user + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestTeam_ID(t *testing.T) { + tid := id.NewTeamID() + tm := NewTeam().ID(tid).MustBuild() + assert.Equal(t, tid, tm.ID()) +} + +func TestTeam_Name(t *testing.T) { + tm := NewTeam().NewID().Name("ttt").MustBuild() + assert.Equal(t, "ttt", tm.Name()) +} + +func TestTeam_Members(t *testing.T) { + m := map[id.UserID]Role{ + id.NewUserID(): RoleOwner, + } + tm := NewTeam().NewID().Members(m).MustBuild() + assert.Equal(t, m, tm.Members().Members()) +} + +func TestTeam_Rename(t *testing.T) { + tm := NewTeam().NewID().Name("ttt").MustBuild() + tm.Rename("ccc") + assert.Equal(t, "ccc", tm.Name()) +} + +func TestTeam_IsPersonal(t *testing.T) { + tm := NewTeam().NewID().Personal(true).MustBuild() + assert.Equal(t, true, tm.IsPersonal()) +} diff --git a/pkg/user/theme.go b/pkg/user/theme.go new file mode 100644 index 000000000..8ec4e75e9 --- /dev/null +++ b/pkg/user/theme.go @@ -0,0 +1,9 @@ +package user + +type Theme string + +const ( + ThemeDefault Theme = "default" + ThemeLight Theme = "light" + ThemeDark Theme = "dark" +) diff --git a/pkg/user/user.go b/pkg/user/user.go new file mode 100644 index 000000000..24359fb92 --- /dev/null +++ b/pkg/user/user.go @@ -0,0 +1,120 @@ +package user + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "golang.org/x/text/language" +) + +type User struct { + id id.UserID + name string + email string + team id.TeamID + auths []Auth + lang language.Tag + theme Theme +} + +func (u *User) ID() id.UserID { + return u.id +} + +func (u *User) Name() string { + return u.name +} + +func (u *User) Email() string { + return u.email +} + +func (u *User) Team() id.TeamID { + return u.team +} + +func (u *User) Lang() language.Tag { + return u.lang +} + +func (u *User) Theme() Theme { + return u.theme +} + +func (u *User) UpdateName(name string) { + u.name = name +} + +func (u *User) UpdateEmail(email string) { + u.email = email +} + +func (u *User) UpdateTeam(team id.TeamID) { + u.team = team +} + +func (u *User) UpdateLang(lang language.Tag) { + u.lang = lang +} + +func (u *User) UpdateTheme(t Theme) { + u.theme = t +} + +func (u *User) Auths() []Auth { + if u == nil { + return nil + } + return append([]Auth{}, u.auths...) +} + +func (u *User) ContainAuth(a Auth) bool { + if u == nil { + return false + } + for _, b := range u.auths { + if a == b || a.Provider == b.Provider { + return true + } + } + return false +} + +func (u *User) AddAuth(a Auth) bool { + if u == nil { + return false + } + if !u.ContainAuth(a) { + u.auths = append(u.auths, a) + return true + } + return false +} + +func (u *User) RemoveAuth(a Auth) bool { + if u == nil || a.IsAuth0() { + return false + } + for i, b := range u.auths { + if a == b { + u.auths = append(u.auths[:i], u.auths[i+1:]...) + return true + } + } + return false +} + +func (u *User) RemoveAuthByProvider(provider string) bool { + if u == nil || provider == "auth0" { + return false + } + for i, b := range u.auths { + if provider == b.Provider { + u.auths = append(u.auths[:i], u.auths[i+1:]...) + return true + } + } + return false +} + +func (u *User) ClearAuths() { + u.auths = []Auth{} +} diff --git a/pkg/user/user_test.go b/pkg/user/user_test.go new file mode 100644 index 000000000..9f782760c --- /dev/null +++ b/pkg/user/user_test.go @@ -0,0 +1,278 @@ +package user + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" + "golang.org/x/text/language" +) + +func TestUser(t *testing.T) { + uid := id.NewUserID() + tid := id.NewTeamID() + testCases := []struct { + Name string + User *User + Expected struct { + Id id.UserID + Name string + Email string + Team id.TeamID + Auths []Auth + Lang language.Tag + } + }{ + { + Name: "create user", + User: New().ID(uid). + Team(tid). + Name("xxx"). + LangFrom("en"). + Email("ff@xx.zz"). + Auths([]Auth{{ + Provider: "aaa", + Sub: "sss", + }}).MustBuild(), + Expected: struct { + Id id.UserID + Name string + Email string + Team id.TeamID + Auths []Auth + Lang language.Tag + }{ + Id: uid, + Name: "xxx", + Email: "ff@xx.zz", + Team: tid, + Auths: []Auth{{ + Provider: "aaa", + Sub: "sss", + }}, + Lang: language.Make("en"), + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected.Id, tc.User.ID()) + assert.Equal(tt, tc.Expected.Name, tc.User.Name()) + assert.Equal(tt, tc.Expected.Team, tc.User.Team()) + assert.Equal(tt, tc.Expected.Auths, tc.User.Auths()) + assert.Equal(tt, tc.Expected.Email, tc.User.Email()) + assert.Equal(tt, tc.Expected.Lang, tc.User.Lang()) + }) + } +} + +func TestUser_AddAuth(t *testing.T) { + testCases := []struct { + Name string + User *User + A Auth + Expected bool + }{ + { + Name: "nil user", + User: nil, + Expected: false, + }, + { + Name: "add new auth", + User: New().NewID().MustBuild(), + A: Auth{ + Provider: "xxx", + Sub: "zzz", + }, + Expected: true, + }, + { + Name: "existing auth", + User: New().NewID().Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + A: Auth{ + Provider: "xxx", + Sub: "zzz", + }, + Expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.User.AddAuth(tc.A) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestUser_RemoveAuth(t *testing.T) { + testCases := []struct { + Name string + User *User + A Auth + Expected bool + }{ + { + Name: "nil user", + User: nil, + Expected: false, + }, + { + Name: "remove auth0", + User: New().NewID().MustBuild(), + A: Auth{ + Provider: "auth0", + Sub: "zzz", + }, + Expected: false, + }, + { + Name: "existing auth", + User: New().NewID().Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + A: Auth{ + Provider: "xxx", + Sub: "zzz", + }, + Expected: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.User.RemoveAuth(tc.A) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestUser_ContainAuth(t *testing.T) { + testCases := []struct { + Name string + User *User + A Auth + Expected bool + }{ + { + Name: "nil user", + User: nil, + Expected: false, + }, + { + Name: "not existing auth", + User: New().NewID().MustBuild(), + A: Auth{ + Provider: "auth0", + Sub: "zzz", + }, + Expected: false, + }, + { + Name: "existing auth", + User: New().NewID().Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + A: Auth{ + Provider: "xxx", + Sub: "zzz", + }, + Expected: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.User.ContainAuth(tc.A) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestUser_RemoveAuthByProvider(t *testing.T) { + testCases := []struct { + Name string + User *User + Provider string + Expected bool + }{ + { + Name: "nil user", + User: nil, + Expected: false, + }, + { + Name: "remove auth0", + User: New().NewID().MustBuild(), + Provider: "auth0", + Expected: false, + }, + { + Name: "existing auth", + User: New().NewID().Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + Provider: "xxx", + Expected: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.User.RemoveAuthByProvider(tc.Provider) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestUser_ClearAuths(t *testing.T) { + u := New().NewID().Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild() + u.ClearAuths() + assert.Equal(t, 0, len(u.Auths())) +} + +func TestUser_Auths(t *testing.T) { + var u *User + assert.Equal(t, []Auth(nil), u.Auths()) +} + +func TestUser_UpdateEmail(t *testing.T) { + u := New().NewID().MustBuild() + u.UpdateEmail("ff@xx.zz") + assert.Equal(t, "ff@xx.zz", u.Email()) +} + +func TestUser_UpdateLang(t *testing.T) { + u := New().NewID().MustBuild() + u.UpdateLang(language.Make("en")) + assert.Equal(t, language.Make("en"), u.Lang()) +} + +func TestUser_UpdateTeam(t *testing.T) { + tid := id.NewTeamID() + u := New().NewID().MustBuild() + u.UpdateTeam(tid) + assert.Equal(t, tid, u.Team()) +} + +func TestUser_UpdateName(t *testing.T) { + u := New().NewID().MustBuild() + u.UpdateName("xxx") + assert.Equal(t, "xxx", u.Name()) +} diff --git a/pkg/visualizer/visualizer.go b/pkg/visualizer/visualizer.go new file mode 100644 index 000000000..3e7b2235b --- /dev/null +++ b/pkg/visualizer/visualizer.go @@ -0,0 +1,9 @@ +package visualizer + +// Visualizer _ +type Visualizer string + +const ( + // VisualizerCesium _ + VisualizerCesium Visualizer = "cesium" +) diff --git a/pkg/writer/seeker_closer.go b/pkg/writer/seeker_closer.go new file mode 100644 index 000000000..c1ea7a72a --- /dev/null +++ b/pkg/writer/seeker_closer.go @@ -0,0 +1,55 @@ +package writer + +import ( + "errors" + "io" +) + +// reference: https://stackoverflow.com/questions/45836767/using-an-io-writeseeker-without-a-file-in-go +type WriterSeeker struct { + buffer []byte + position int +} + +func (sc *WriterSeeker) Write(p []byte) (int, error) { + minCap := sc.position + len(p) + if minCap > cap(sc.buffer) { + b2 := make([]byte, len(sc.buffer), minCap+len(p)) + copy(b2, sc.buffer) + sc.buffer = b2 + } + if minCap > len(sc.buffer) { + sc.buffer = sc.buffer[:minCap] + } + copy(sc.buffer[sc.position:], p) + sc.position += len(p) + return len(p), nil +} + +func (sc *WriterSeeker) Seek(offset int64, whence int) (int64, error) { + newPos, offs := 0, int(offset) + switch whence { + case io.SeekStart: + newPos = offs + case io.SeekCurrent: + newPos = sc.position + offs + case io.SeekEnd: + newPos = len(sc.buffer) + offs + } + if newPos < 0 { + return 0, errors.New("negative result pos") + } + sc.position = newPos + return int64(newPos), nil +} + +func (sc *WriterSeeker) WriteTo(w io.Writer) (int64, error) { + i, err := w.Write(sc.buffer) + return int64(i), err +} + +func (sc *WriterSeeker) Buffer() []byte { + b := make([]byte, len(sc.buffer)) + copy(b, sc.buffer) + return b +} diff --git a/pkg/writer/seeker_closer_test.go b/pkg/writer/seeker_closer_test.go new file mode 100644 index 000000000..503539794 --- /dev/null +++ b/pkg/writer/seeker_closer_test.go @@ -0,0 +1,122 @@ +package writer + +import ( + "bytes" + "errors" + "io" + "testing" + + "github.com/stretchr/testify/assert" +) + +var _ io.WriteSeeker = (*WriterSeeker)(nil) + +//reference: https://github.com/orcaman/writerseeker/blob/master/writerseeker_test.go + +func TestWrite(t *testing.T) { + testCases := []struct { + Name string + Input []byte + WS *WriterSeeker + ExpectedBuffer []byte + ExpectedPosition int + err error + }{ + { + Name: "write a string", + Input: []byte("xxxx"), + WS: &WriterSeeker{}, + ExpectedBuffer: []byte("xxxx"), + ExpectedPosition: 4, + err: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + n, err := tc.WS.Write(tc.Input) + if err == nil { + assert.Equal(tt, tc.ExpectedBuffer, tc.WS.Buffer()) + assert.Equal(tt, tc.ExpectedPosition, n) + } else { + assert.True(tt, errors.As(err, &tc.err)) + } + }) + } +} + +func TestSeek(t *testing.T) { + ws := &WriterSeeker{} + _, _ = ws.Write([]byte("xxxxxx")) + + testCases := []struct { + Name string + WS *WriterSeeker + Whence int + Offset, ExpectedPosition int64 + err error + }{ + { + Name: "whence start", + WS: ws, + Offset: 1, + Whence: 0, // could use io.SeekStart as well + ExpectedPosition: 1, + err: nil, + }, + { + Name: "whence current position", + WS: ws, + Offset: 1, + Whence: 1, + ExpectedPosition: 2, + err: nil, + }, + { + Name: "end position", + WS: ws, + Offset: 1, + Whence: 2, + ExpectedPosition: 7, + err: nil, + }, + { + Name: "fail negative position", + WS: ws, + Offset: -100, + Whence: 0, + ExpectedPosition: 0, + err: errors.New("negative result pos"), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + // this test is sequential + //tt.Parallel() + n, err := tc.WS.Seek(tc.Offset, tc.Whence) + if err == nil { + assert.Equal(tt, tc.ExpectedPosition, n) + } else { + assert.True(tt, errors.As(err, &tc.err)) + } + }) + } +} + +func TestWriterSeeker_WriteTo(t *testing.T) { + ws := &WriterSeeker{} + buf := bytes.NewBufferString("") + _, _ = ws.Write([]byte("xxxx")) + n, err := ws.WriteTo(buf) + assert.NoError(t, err) + assert.Equal(t, int64(4), n) + assert.Equal(t, "xxxx", buf.String()) +} + +func TestWriterSeeker_Buffer(t *testing.T) { + ws := &WriterSeeker{} + _, _ = ws.Write([]byte("xxxx")) + assert.Equal(t, []byte("xxxx"), ws.Buffer()) +} diff --git a/plugin_manifest_schema.json b/plugin_manifest_schema.json new file mode 100644 index 000000000..59c787a59 --- /dev/null +++ b/plugin_manifest_schema.json @@ -0,0 +1,360 @@ +{ + "$id": "https://app.reearth.io/schemas/plugin-manifest", + "$schema": "http://json-schema.org/draft-04/schema", + "description": "Re:Earth plugin manifest schema", + "definitions": { + "id": { + "$id": "#id", + "type": "string", + "pattern": "^[A-Za-z]{1}[\\w-:.]{0,}$" + }, + "id?": { + "$id": "#id?", + "type": [ + "string", + "null" + ], + "pattern": "^[A-Za-z]{1}[\\w-:.]{0,}$" + }, + "valuetype": { + "$id": "#valuetype", + "type": "string", + "enum": [ + "bool", + "number", + "string", + "url", + "latlng", + "latlngheight", + "camera", + "typography", + "coordinates", + "polygon", + "rect", + "ref" + ] + }, + "propertyPointer": { + "$id": "#propertyPointer", + "type": [ + "object", + "null" + ], + "properties": { + "schemaGroupId": { + "type": "string" + }, + "fieldId": { + "type": "string" + } + }, + "required": [ + "schemaGroupId", + "fieldId" + ], + "additionalProperties": false + }, + "propertyLinkableFields": { + "$id": "#propertyLinkableFields", + "type": [ + "object", + "null" + ], + "properties": { + "latlng": { + "$ref": "#/definitions/propertyPointer" + }, + "url": { + "$ref": "#/definitions/propertyPointer" + } + }, + "additionalProperties": false + }, + "propertyCondition": { + "$id": "#propertyCondition", + "type": [ + "object", + "null" + ], + "properties": { + "field": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/valuetype" + }, + "value": {} + }, + "required": [ + "field", + "type", + "value" + ], + "additionalProperties": false + }, + "propertySchemaField": { + "$id": "#propertySchemaField", + "type": "object", + "properties": { + "id": { + "$ref": "#/definitions/id" + }, + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "type": { + "$ref": "#/definitions/valuetype" + }, + "prefix": { + "type": [ + "string", + "null" + ] + }, + "suffix": { + "type": [ + "string", + "null" + ] + }, + "defaultValue": {}, + "ui": { + "type": [ + "string", + "null" + ], + "enum": [ + "layer", + "color", + "multiline", + "selection", + "buttons", + "range", + "image", + "video", + "file", + "camera_pose" + ] + }, + "min": { + "type": [ + "number", + "null" + ] + }, + "max": { + "type": [ + "number", + "null" + ] + }, + "choices": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "label": { + "type": "string" + }, + "icon": { + "type": "string" + } + }, + "required": [ + "key" + ], + "additionalProperties": false + } + }, + "availableIf": { + "$ref": "#/definitions/propertyCondition" + } + }, + "required": [ + "id", + "type", + "title" + ], + "additionalProperties": false + }, + "propertySchemaGroup": { + "$id": "#propertySchemaGroup", + "type": "object", + "properties": { + "id": { + "$ref": "#/definitions/id" + }, + "title": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "list": { + "type": "boolean" + }, + "availableIf": { + "$ref": "#/definitions/propertyCondition" + }, + "representativeField": { + "$ref": "#/definitions/id?" + }, + "fields": { + "type": "array", + "items": { + "$ref": "#/definitions/propertySchemaField" + } + } + }, + "required": [ + "id", + "title" + ], + "additionalProperties": false + }, + "propertySchema": { + "$id": "#propertySchema", + "type": [ + "object", + "null" + ], + "properties": { + "version": { + "type": "number" + }, + "linkable": { + "$ref": "#/definitions/propertyLinkableFields" + }, + "groups": { + "type": "array", + "items": { + "$ref": "#/definitions/propertySchemaGroup" + } + } + }, + "additionalProperties": false + }, + "extension": { + "$id": "#extension", + "type": "object", + "properties": { + "id": { + "$ref": "#/definitions/id" + }, + "title": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "icon": { + "type": [ + "string", + "null" + ] + }, + "visualizer": { + "type": "string", + "enum": [ + "cesium" + ] + }, + "type": { + "type": "string", + "enum": [ + "primitive", + "widget", + "block", + "visualizer", + "infobox" + ] + }, + "schema": { + "$ref": "#/definitions/propertySchema" + } + }, + "required": [ + "id", + "title", + "visualizer", + "type" + ], + "additionalProperties": false + }, + "root": { + "$id": "#root", + "type": "object", + "properties": { + "id": { + "$ref": "#/definitions/id" + }, + "title": { + "type": "string" + }, + "system": { + "type": "boolean" + }, + "version": { + "type": "string" + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "repository": { + "type": [ + "string", + "null" + ] + }, + "author": { + "type": [ + "string", + "null" + ] + }, + "main": { + "type": [ + "string", + "null" + ] + }, + "extensions": { + "type": "array", + "items": { + "$ref": "#/definitions/extension" + } + }, + "schema": { + "$ref": "#/definitions/propertySchema" + } + }, + "required": [ + "id", + "title" + ], + "additionalProperties": false + } + }, + "$ref": "#/definitions/root" +} diff --git a/plugin_manifest_schema_translation.json b/plugin_manifest_schema_translation.json new file mode 100644 index 000000000..de2a81652 --- /dev/null +++ b/plugin_manifest_schema_translation.json @@ -0,0 +1,126 @@ +{ + "$id": "https://app.reearth.io/schemas/plugin-manifest-translation", + "$schema": "http://json-schema.org/draft-04/schema", + "description": "Re:Earth plugin manifest schema translation", + "definitions": { + "propertySchemaField": { + "$id": "#propertySchemaField", + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "choices": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[A-Za-z]{1}[\\w-:.]{0,}$": { + "type": "string" + } + } + } + } + }, + "propertySchemaGroup": { + "$id": "#propertySchemaGroup", + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "fields": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[A-Za-z]{1}[\\w-:.]{0,}$": { + "$ref": "#/definitions/propertySchemaField" + } + } + } + } + }, + "propertySchema": { + "$id": "#propertySchema", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^[A-Za-z]{1}[\\w-:.]{0,}$": { + "$ref": "#/definitions/propertySchemaGroup" + } + } + }, + "extension": { + "$id": "#extension", + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "propertySchema": { + "$ref": "#/definitions/propertySchema" + } + } + }, + "root": { + "$id": "#root", + "type": "object", + "additionalProperties": false, + "properties": { + "title": { + "type": [ + "string", + "null" + ] + }, + "description": { + "type": [ + "string", + "null" + ] + }, + "extensions": { + "type": "object", + "patternProperties": { + "^[A-Za-z]{1}[\\w-:.]{0,}$": { + "$ref": "#/definitions/extension" + } + } + }, + "schema": { + "$ref": "#/definitions/propertySchema" + } + } + } + }, + "$ref": "#/definitions/root" +} diff --git a/schema.graphql b/schema.graphql new file mode 100644 index 000000000..98d6ac6be --- /dev/null +++ b/schema.graphql @@ -0,0 +1,1385 @@ +# Built-in + +scalar Upload +scalar Any + +directive @goModel(model: String, models: [String!]) on OBJECT + | INPUT_OBJECT + | SCALAR + | ENUM + | INTERFACE + | UNION + +directive @goField(forceResolver: Boolean, name: String) on INPUT_FIELD_DEFINITION + | FIELD_DEFINITION + +# Basic types + +scalar Cursor +scalar DateTime +scalar URL +scalar FileSize +scalar PluginID +scalar PluginExtensionID +scalar PropertySchemaID +scalar PropertySchemaFieldID +scalar TranslatedString + +type LatLng { + lat: Float! + lng: Float! +} + +type LatLngHeight { + lat: Float! + lng: Float! + height: Float! +} + +type Camera { + lat: Float! + lng: Float! + altitude: Float! + heading: Float! + pitch: Float! + roll: Float! + fov: Float! +} + +type Typography { + fontFamily: String + fontWeight: String + fontSize: Int + color: String + textAlign: TextAlign + bold: Boolean + italic: Boolean + underline: Boolean +} + +type Rect { + west: Float! + south: Float! + east: Float! + north: Float! +} + +enum TextAlign { + LEFT + CENTER + RIGHT + JUSTIFY + JUSTIFY_ALL +} + +enum ValueType { + BOOL + NUMBER + STRING + REF + URL + LATLNG + LATLNGHEIGHT + CAMERA + TYPOGRAPHY + COORDINATES + POLYGON + RECT +} + +enum ListOperation { + ADD, + MOVE, + REMOVE +} + +enum Theme { + DEFAULT + LIGHT + DARK +} + +# Meta Type + +interface Node { + id: ID! +} + +type PageInfo { + startCursor: Cursor + endCursor: Cursor + hasNextPage: Boolean! + hasPreviousPage: Boolean! +} + +# Asset + +type Asset implements Node { + id: ID! + createdAt: DateTime! + teamId: ID! + name: String! + size: FileSize! + url: String! + contentType: String! + team: Team @goField(forceResolver: true) +} + +# User + +type User implements Node { + id: ID! + name: String! + email: String! + lang: String! + theme: Theme! + myTeamId: ID! + auths: [String!]! + teams: [Team!]! @goField(forceResolver: true) + myTeam: Team! @goField(forceResolver: true) +} + +type SearchedUser { + userId: ID! + userName: String! + userEmail: String! +} + +type CheckProjectAliasPayload { + alias: String! + available: Boolean! +} + +type Team implements Node { + id: ID! + name: String! + members: [TeamMember!]! + personal: Boolean! + assets(first: Int, last: Int, after: Cursor, before: Cursor): AssetConnection! @goField(forceResolver: true) + projects(includeArchived: Boolean, first: Int, last: Int, after: Cursor, before: Cursor): ProjectConnection! @goField(forceResolver: true) +} + +type TeamMember { + userId: ID! + role: Role! + user: User @goField(forceResolver: true) +} + +enum Role { + # a role who can read project + READER + # a role who can read and write project + WRITER + # a eole who can have full controll of project + OWNER +} + +# Project + +type Project implements Node { + id: ID! + isArchived: Boolean! + createdAt: DateTime! + updatedAt: DateTime! + publishedAt: DateTime + name: String! + description: String! + alias: String! + publicTitle: String! + publicDescription: String! + publicImage: String! + publicNoIndex: Boolean! + imageUrl: URL + teamId: ID! + visualizer: Visualizer! + publishmentStatus: PublishmentStatus! + team: Team @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) +} + +enum Visualizer { + CESIUM +} + +enum PublishmentStatus { + PUBLIC + LIMITED + PRIVATE +} + +# Plugin + +type Plugin { + id: PluginID! + name: String! + version: String! + description: String! + author: String! + repositoryUrl: String! + propertySchemaId: PropertySchemaID + extensions: [PluginExtension!]! + scenePlugin(sceneId: ID!): ScenePlugin + allTranslatedDescription: TranslatedString + allTranslatedName: TranslatedString + translatedName(lang: String): String! + translatedDescription(lang: String): String! + propertySchema: PropertySchema @goField(forceResolver: true) +} + +enum PluginExtensionType { + PRIMITIVE + WIDGET + BLOCK + VISUALIZER + INFOBOX +} + +type PluginExtension { + extensionId: PluginExtensionID! + pluginId: PluginID! + type: PluginExtensionType! + name: String! + description: String! + icon: String! + visualizer: Visualizer! + propertySchemaId: PropertySchemaID! + allTranslatedName: TranslatedString + allTranslatedDescription: TranslatedString + plugin: Plugin @goField(forceResolver: true) + sceneWidget(sceneId: ID!): SceneWidget @goField(forceResolver: true) + propertySchema: PropertySchema @goField(forceResolver: true) + translatedName(lang: String): String! @goField(forceResolver: true) + translatedDescription(lang: String): String! @goField(forceResolver: true) +} + +# Scene + +type Scene implements Node { + id: ID! + projectId: ID! + teamId: ID! + propertyId: ID! + createdAt: DateTime! + updatedAt: DateTime! + rootLayerId: ID! + widgets: [SceneWidget!]! + plugins: [ScenePlugin!]! + dynamicDatasetSchemas: [DatasetSchema!]! + project: Project @goField(forceResolver: true) + team: Team @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + rootLayer: LayerGroup @goField(forceResolver: true) + lockMode: SceneLockMode! @goField(forceResolver: true) + datasetSchemas(first: Int, last: Int, after: Cursor, before: Cursor): DatasetSchemaConnection! @goField(forceResolver: true) +} + +enum SceneLockMode { + FREE + PENDING + DATASET_SYNCING + PLUGIN_UPGRADING + PUBLISHING +} + +type SceneWidget { + id: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + propertyId: ID! + enabled: Boolean! + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + property: Property @goField(forceResolver: true) +} + +type ScenePlugin { + pluginId: PluginID! + propertyId: ID + plugin: Plugin @goField(forceResolver: true) + property: Property @goField(forceResolver: true) +} + +# Property + +type PropertySchema { + id: PropertySchemaID! + groups: [PropertySchemaGroup!]! + linkableFields: PropertyLinkableFields! +} + +type PropertyLinkableFields { + schemaId: PropertySchemaID! + latlng: PropertySchemaFieldID + url: PropertySchemaFieldID + latlngField: PropertySchemaField @goField(forceResolver: true) + urlField: PropertySchemaField @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) +} + +type PropertySchemaGroup { + schemaGroupId: PropertySchemaFieldID! + schemaId: PropertySchemaID! + fields: [PropertySchemaField!]! + isList: Boolean! + isAvailableIf: PropertyCondition + title: String + allTranslatedTitle: TranslatedString + # For compatibility: "name" field will be removed in the futrue + name: PropertySchemaFieldID + representativeFieldId: PropertySchemaFieldID + representativeField: PropertySchemaField + schema: PropertySchema @goField(forceResolver: true) + translatedTitle(lang: String): String! @goField(forceResolver: true) +} + +type PropertySchemaField { + fieldId: PropertySchemaFieldID! + type: ValueType! + title: String! + # For compatibility: "name" field will be removed in the futrue + name: String! + description: String! + prefix: String + suffix: String + defaultValue: Any + ui: PropertySchemaFieldUI + min: Float + max: Float + choices: [PropertySchemaFieldChoice!] + isAvailableIf: PropertyCondition + allTranslatedTitle: TranslatedString + # For compatibility: "allTranslatedName" field will be removed in the futrue + allTranslatedName: TranslatedString + allTranslatedDescription: TranslatedString + translatedTitle(lang: String): String! @goField(forceResolver: true) + # For compatibility: "translatedName" field will be removed in the futrue + translatedName(lang: String): String! @goField(forceResolver: true) + translatedDescription(lang: String): String! @goField(forceResolver: true) +} + +enum PropertySchemaFieldUI { + LAYER + MULTILINE + SELECTION + COLOR + RANGE + IMAGE + VIDEO + FILE + CAMERA_POSE +} + +type PropertySchemaFieldChoice { + key: String! + title: String! + # For compatibility: "label" field will be removed in the futrue + label: String! + icon: String + allTranslatedTitle: TranslatedString + # For compatibility: "allTranslatedLabel" field will be removed in the futrue + allTranslatedLabel: TranslatedString + translatedTitle(lang: String): String! @goField(forceResolver: true) + # For compatibility: "translatedLabel" field will be removed in the futrue + translatedLabel(lang: String): String! @goField(forceResolver: true) +} + +type PropertyCondition { + fieldId: PropertySchemaFieldID! + type: ValueType! + value: Any +} + +type Property implements Node { + id: ID! + schemaId: PropertySchemaID! + items: [PropertyItem!]! + schema: PropertySchema @goField(forceResolver: true) + layer: Layer @goField(forceResolver: true) + merged: MergedProperty @goField(forceResolver: true) +} + +union PropertyItem = PropertyGroup | PropertyGroupList + +type PropertyGroup { + id: ID! + schemaId: PropertySchemaID! + schemaGroupId: PropertySchemaFieldID! + fields: [PropertyField!]! + schema: PropertySchema @goField(forceResolver: true) + schemaGroup: PropertySchemaGroup @goField(forceResolver: true) +} + +type PropertyGroupList { + id: ID! + schemaId: PropertySchemaID! + schemaGroupId: PropertySchemaFieldID! + groups: [PropertyGroup!]! + schema: PropertySchema @goField(forceResolver: true) + schemaGroup: PropertySchemaGroup @goField(forceResolver: true) +} + +type PropertyField { + id: PropertySchemaFieldID! + parentId: ID! + schemaId: PropertySchemaID! + fieldId: PropertySchemaFieldID! + links: [PropertyFieldLink!] + type: ValueType! + value: Any + parent: Property @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + field: PropertySchemaField @goField(forceResolver: true) + actualValue: Any @goField(forceResolver: true) +} + +type PropertyFieldLink { + datasetId: ID + datasetSchemaId: ID! + datasetSchemaFieldId: ID! + dataset: Dataset @goField(forceResolver: true) + datasetField: DatasetField @goField(forceResolver: true) + datasetSchema: DatasetSchema @goField(forceResolver: true) + datasetSchemaField: DatasetSchemaField @goField(forceResolver: true) +} + +type MergedProperty { + originalId: ID + parentId: ID + # note: schemaId will not always be set + schemaId: PropertySchemaID + linkedDatasetId: ID + original: Property @goField(forceResolver: true) + parent: Property @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + groups: [MergedPropertyGroup!]! @goField(forceResolver: true) +} + +type MergedPropertyGroup { + originalPropertyId: ID + parentPropertyId: ID + originalId: ID + parentId: ID + schemaGroupId: PropertySchemaFieldID! + # note: schemaId will not always be set + schemaId: PropertySchemaID + linkedDatasetId: ID + fields: [MergedPropertyField!]! + groups: [MergedPropertyGroup!]! + originalProperty: Property @goField(forceResolver: true) + parentProperty: Property @goField(forceResolver: true) + original: PropertyGroup @goField(forceResolver: true) + parent: PropertyGroup @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) +} + +type MergedPropertyField { + schemaId: PropertySchemaID! + fieldId: PropertySchemaFieldID! + value: Any + type: ValueType! + links: [PropertyFieldLink!] + overridden: Boolean! + schema: PropertySchema @goField(forceResolver: true) + field: PropertySchemaField @goField(forceResolver: true) + actualValue: Any @goField(forceResolver: true) +} + +# Dataset + +type DatasetSchema implements Node { + id: ID! + source: String! + name: String! + sceneId: ID! + fields: [DatasetSchemaField!]! + representativeFieldId: ID + dynamic: Boolean + datasets(first: Int, last: Int, after: Cursor, before: Cursor): DatasetConnection! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + representativeField: DatasetSchemaField @goField(forceResolver: true) +} + +type DatasetSchemaField implements Node { + id: ID! + source: String! + name: String! + type: ValueType! + schemaId: ID! + refId: ID + schema: DatasetSchema @goField(forceResolver: true) + ref: DatasetSchema @goField(forceResolver: true) +} + +type Dataset implements Node { + id: ID! + source: String! + schemaId: ID! + fields: [DatasetField!]! + schema: DatasetSchema @goField(forceResolver: true) + name: String @goField(forceResolver: true) +} + +type DatasetField { + fieldId: ID! + schemaId: ID! + source: String! + type: ValueType! + value: Any + schema: DatasetSchema @goField(forceResolver: true) + field: DatasetSchemaField @goField(forceResolver: true) + valueRef: Dataset @goField(forceResolver: true) +} + +# Layer + +interface Layer { + id: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: PluginID + extensionId: PluginExtensionID + infobox: Infobox + # parentId will not be always set + parentId: ID + parent: LayerGroup + property: Property + plugin: Plugin + extension: PluginExtension +} + +union Layers = LayerItem | LayerGroup + +enum LayerEncodingFormat { + KML + CZML + GEOJSON + SHAPE + REEARTH +} + +type LayerItem implements Layer { + id: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: PluginID + extensionId: PluginExtensionID + infobox: Infobox + # parentId will not be always set + parentId: ID + linkedDatasetId: ID + parent: LayerGroup @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedLayer @goField(forceResolver: true) +} + +type LayerGroup implements Layer { + id: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: PluginID + extensionId: PluginExtensionID + infobox: Infobox + # parentId will not be always set + parentId: ID + linkedDatasetSchemaId: ID + root: Boolean! + layerIds: [ID!]! + parent: LayerGroup @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) + layers: [Layer]! @goField(forceResolver: true) +} + +type Infobox { + layerId: ID! + propertyId: ID! + fields: [InfoboxField!]! + linkedDatasetId: ID + layer: Layer! @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedInfobox @goField(forceResolver: true) +} + +type InfoboxField { + id: ID! + layerId: ID! + propertyId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + linkedDatasetId: ID + layer: Layer! @goField(forceResolver: true) + infobox: Infobox! @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedInfoboxField @goField(forceResolver: true) +} + +type MergedLayer { + originalId: ID! + parentId: ID + property: MergedProperty + infobox: MergedInfobox + original: LayerItem @goField(forceResolver: true) + parent: LayerGroup @goField(forceResolver: true) +} + +type MergedInfobox { + property: MergedProperty + fields: [MergedInfoboxField!]! +} + +type MergedInfoboxField { + originalId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + property: MergedProperty + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) +} + + +# InputType +input CreateAssetInput { + teamId: ID! + file: Upload! +} + +input RemoveAssetInput { + assetId: ID! +} + +input SignupInput { + userId: ID + teamId: ID + secret: String +} + +input UpdateMeInput { + name: String + email: String + lang: String + theme: Theme + password: String + passwordConfirmation: String +} + +input RemoveMyAuthInput { + auth: String! +} + +input DeleteMeInput { + userId: ID! +} + +input CreateTeamInput { + name: String! +} + +input UpdateTeamInput { + teamId: ID! + name: String! +} + +input AddMemberToTeamInput { + teamId: ID! + userId: ID! + role: Role! +} + +input RemoveMemberFromTeamInput { + teamId: ID! + userId: ID! +} + +input UpdateMemberOfTeamInput { + teamId: ID! + userId: ID! + role: Role! +} + +input DeleteTeamInput { + teamId: ID! +} + +input CreateProjectInput { + teamId: ID! + visualizer: Visualizer! + name: String + description: String + imageUrl: URL + alias: String + archived: Boolean +} + +input UpdateProjectInput { + projectId: ID! + name: String + description: String + archived: Boolean + alias: String + imageUrl: URL + publicTitle: String + publicDescription: String + publicImage: Upload + publicNoIndex: Boolean + deleteImageUrl: Boolean + deletePublicImage: Boolean +} + +input UploadPluginInput { + file: Upload! +} + +input CreateSceneInput { + projectId: ID! +} + +input PublishProjectInput { + projectId: ID! + alias: String + status: PublishmentStatus! +} + +input DeleteProjectInput { + projectId: ID! +} + +input AddWidgetInput { + sceneId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! +} + +input UpdateWidgetInput { + sceneId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + enabled: Boolean +} + +input RemoveWidgetInput { + sceneId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! +} + +input InstallPluginInput { + sceneId: ID! + pluginId: PluginID! +} + +input UninstallPluginInput { + sceneId: ID! + pluginId: PluginID! +} + +input UpgradePluginInput { + sceneId: ID! + pluginId: PluginID! + toPluginId: PluginID! +} + +input SyncDatasetInput { + sceneId: ID! + url: String! +} + +input UpdatePropertyValueInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + value: Any + type: ValueType! +} + +input UpdatePropertyValueLatLngInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + lat: Float! + lng: Float! +} + +input UpdatePropertyValueLatLngHeightInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + lat: Float! + lng: Float! + height: Float! +} + +input UpdatePropertyValueCameraInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + lat: Float! + lng: Float! + altitude: Float! + heading: Float! + pitch: Float! + roll: Float! + fov: Float! +} + +input UpdatePropertyValueTypographyInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + fontFamily: String + fontWeight: String + fontSize: Int + color: String + textAlign: TextAlign + bold: Boolean + italic: Boolean + underline: Boolean +} + +input RemovePropertyFieldInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! +} + +input UploadFileToPropertyInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + file: Upload! +} + +input LinkDatasetToPropertyValueInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + datasetSchemaIds: [ID!]! + datasetSchemaFieldIds: [ID!]! + datasetIds: [ID!] +} + +input UnlinkPropertyValueInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! +} + +input AddPropertyItemInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + index: Int + nameFieldValue: Any + nameFieldType: ValueType +} + +input MovePropertyItemInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + itemId: ID! + index: Int! +} + +input RemovePropertyItemInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + itemId: ID! +} + +input UpdatePropertyItemInput { + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + operations: [UpdatePropertyItemOperationInput!]! +} + +input UpdatePropertyItemOperationInput { + operation: ListOperation! + itemId: ID + index: Int + nameFieldValue: Any + nameFieldType: ValueType +} + +input AddLayerItemInput { + parentLayerId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + index: Int + name: String + lat: Float + lng: Float +} + +input AddLayerGroupInput { + parentLayerId: ID! + pluginId: PluginID + extensionId: PluginExtensionID + index: Int + linkedDatasetSchemaID: ID + name: String +} + +input RemoveLayerInput { + layerId: ID! +} + +input UpdateLayerInput { + layerId: ID! + name: String + visible: Boolean +} + +input MoveLayerInput { + layerId: ID! + destLayerId: ID + index: Int +} + +input CreateInfoboxInput { + layerId: ID! +} + +input RemoveInfoboxInput { + layerId: ID! +} + +input AddInfoboxFieldInput { + layerId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + index: Int +} + +input MoveInfoboxFieldInput { + layerId: ID! + infoboxFieldId: ID! + index: Int! +} + +input RemoveInfoboxFieldInput { + layerId: ID! + infoboxFieldId: ID! +} + +input UpdateDatasetSchemaInput { + schemaId: ID! + name: String! +} + +input AddDynamicDatasetSchemaInput { + sceneId: ID! +} + +input AddDynamicDatasetInput { + datasetSchemaId: ID! + author: String! + content: String! + lat: Float + lng: Float + target: String +} + +input RemoveDatasetSchemaInput { + schemaId: ID! + force: Boolean +} + +input ImportLayerInput { + layerId: ID! + file: Upload! + format: LayerEncodingFormat! +} + +input ImportDatasetInput { + file: Upload! + sceneId: ID! + datasetSchemaId: ID +} + +input AddDatasetSchemaInput { + sceneId: ID! + name: String! + representativefield: ID +} + +# Payload +type CreateAssetPayload { + asset: Asset! +} + +type RemoveAssetPayload { + assetId: ID! +} + +type SignupPayload { + user: User! + team: Team! +} + +type UpdateMePayload { + user: User! +} + +type DeleteMePayload { + userId: ID! +} + +type CreateTeamPayload { + team: Team! +} + +type UpdateTeamPayload { + team: Team! +} + +type AddMemberToTeamPayload { + team: Team! +} + +type RemoveMemberFromTeamPayload { + team: Team! +} + +type UpdateMemberOfTeamPayload { + team: Team! +} + +type DeleteTeamPayload { + teamId: ID! +} + +type ProjectPayload { + project: Project! +} + +type DeleteProjectPayload { + projectId: ID! +} + +type UploadPluginPayload { + plugin: Plugin! +} + +type CreateScenePayload { + scene: Scene! +} + +type AddWidgetPayload { + scene: Scene! + sceneWidget: SceneWidget! +} + +type UpdateWidgetPayload { + scene: Scene! + sceneWidget: SceneWidget! +} + +type RemoveWidgetPayload { + scene: Scene! + pluginId: PluginID! + extensionId: PluginExtensionID! +} + +type InstallPluginPayload { + scene: Scene! + scenePlugin: ScenePlugin! +} + +type UninstallPluginPayload { + scene: Scene! + scenePlugin: ScenePlugin! +} + +type UpgradePluginPayload { + scene: Scene! + scenePlugin: ScenePlugin! +} + +type SyncDatasetPayload { + sceneId: ID! + url: String! + datasetSchema: [DatasetSchema!]! + dataset: [Dataset!]! +} + +type PropertyFieldPayload { + property: Property! + propertyField: PropertyField +} + +type PropertyItemPayload { + property: Property! + propertyItem: PropertyItem +} + +type AddLayerItemPayload { + layer: LayerItem! + parentLayer: LayerGroup! + index: Int +} + +type AddLayerGroupPayload { + layer: LayerGroup! + parentLayer: LayerGroup! + index: Int +} + +type RemoveLayerPayload { + layerId: ID! + parentLayer: LayerGroup! +} + +type UpdateLayerPayload { + layer: Layer! +} + +type MoveLayerPayload { + layerId: ID! + fromParentLayer: LayerGroup! + toParentLayer: LayerGroup! + index: Int! +} + +type CreateInfoboxPayload { + layer: Layer! +} + +type RemoveInfoboxPayload { + layer: Layer! +} + +type AddInfoboxFieldPayload { + infoboxField: InfoboxField! + layer: Layer! +} + +type MoveInfoboxFieldPayload { + infoboxFieldId: ID! + layer: Layer! + index: Int! +} + +type RemoveInfoboxFieldPayload { + infoboxFieldId: ID! + layer: Layer! +} + +type UpdateDatasetSchemaPayload { + datasetSchema: DatasetSchema +} + +type RemoveDatasetSchemaPayload { + schemaId: ID! +} + +type AddDynamicDatasetSchemaPayload { + datasetSchema: DatasetSchema +} + +type AddDynamicDatasetPayload { + datasetSchema: DatasetSchema + dataset: Dataset +} + +type ImportLayerPayload { + layers: [Layer!]! + parentLayer: LayerGroup! +} + +type ImportDatasetPayload { + datasetSchema: DatasetSchema! +} + +type AddDatasetSchemaPayload { + datasetSchema: DatasetSchema +} + +# Connection + +enum NodeType { + USER + TEAM + PROJECT + PLUGIN + SCENE + PROPERTY_SCHEMA + PROPERTY + DATASET_SCHEMA + DATASET + LAYER_GROUP + LAYER_ITEM +} + +type AssetConnection { + edges: [AssetEdge!]! + nodes: [Asset]! + pageInfo: PageInfo! + totalCount: Int! +} + +type AssetEdge { + cursor: Cursor! + node: Asset +} + +type ProjectConnection { + edges: [ProjectEdge!]! + nodes: [Project]! + pageInfo: PageInfo! + totalCount: Int! +} + +type ProjectEdge { + cursor: Cursor! + node: Project +} + +type DatasetSchemaConnection { + edges: [DatasetSchemaEdge!]! + nodes: [DatasetSchema]! + pageInfo: PageInfo! + totalCount: Int! +} + +type DatasetSchemaEdge { + cursor: Cursor! + node: DatasetSchema +} + +type DatasetConnection { + edges: [DatasetEdge!]! + nodes: [Dataset]! + pageInfo: PageInfo! + totalCount: Int! +} + +type DatasetEdge { + cursor: Cursor! + node: Dataset +} + + +# Query + +type Query { + me: User + node(id: ID!, type: NodeType!): Node + nodes(id: [ID!]!, type: NodeType!): [Node]! + propertySchema(id: PropertySchemaID!): PropertySchema + propertySchemas(id: [PropertySchemaID!]!): [PropertySchema!]! + plugin(id: PluginID!): Plugin + plugins(id: [PluginID!]!): [Plugin!]! + layer(id: ID!): Layer + scene(projectId: ID!): Scene + assets(teamId: ID!, first: Int, last: Int, after: Cursor, before: Cursor): AssetConnection! + projects(teamId: ID!, includeArchived: Boolean, first: Int, last: Int, after: Cursor, before: Cursor): ProjectConnection! + datasetSchemas(sceneId: ID!, first: Int, last: Int, after: Cursor, before: Cursor): DatasetSchemaConnection! + datasets(datasetSchemaId: ID!, first: Int, last: Int, after: Cursor, before: Cursor): DatasetConnection! + sceneLock(sceneId: ID!): SceneLockMode + dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! + searchUser(nameOrEmail: String!): SearchedUser + checkProjectAlias(alias: String!): CheckProjectAliasPayload! +} + +# Mutation + +type Mutation { + # Asset + createAsset(input: CreateAssetInput!): CreateAssetPayload! + removeAsset(input: RemoveAssetInput!): RemoveAssetPayload! + + # User + signup(input: SignupInput!): SignupPayload! + updateMe(input: UpdateMeInput!): UpdateMePayload! + removeMyAuth(input: RemoveMyAuthInput!): UpdateMePayload! + deleteMe(input: DeleteMeInput!): DeleteMePayload! + + # Team + createTeam(input: CreateTeamInput!): CreateTeamPayload! + deleteTeam(input: DeleteTeamInput!): DeleteTeamPayload! + updateTeam(input: UpdateTeamInput!): UpdateTeamPayload! + addMemberToTeam(input: AddMemberToTeamInput!): AddMemberToTeamPayload! + removeMemberFromTeam(input: RemoveMemberFromTeamInput!): RemoveMemberFromTeamPayload! + updateMemberOfTeam(input: UpdateMemberOfTeamInput!): UpdateMemberOfTeamPayload! + + # Project + createProject(input: CreateProjectInput!): ProjectPayload! + updateProject(input: UpdateProjectInput!): ProjectPayload! + publishProject(input: PublishProjectInput!): ProjectPayload! + deleteProject(input: DeleteProjectInput!): DeleteProjectPayload! + + # Plugin + uploadPlugin(input: UploadPluginInput!): UploadPluginPayload! + + # Scene + createScene(input: CreateSceneInput!): CreateScenePayload! + addWidget(input: AddWidgetInput!): AddWidgetPayload! + updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload! + removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload! + installPlugin(input: InstallPluginInput!): InstallPluginPayload! + uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload! + upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload! + + # Dataset + updateDatasetSchema(input:UpdateDatasetSchemaInput!): UpdateDatasetSchemaPayload! + syncDataset(input: SyncDatasetInput!): SyncDatasetPayload! + addDynamicDatasetSchema(input:AddDynamicDatasetSchemaInput!): AddDynamicDatasetSchemaPayload! + addDynamicDataset(input:AddDynamicDatasetInput!): AddDynamicDatasetPayload! + removeDatasetSchema(input: RemoveDatasetSchemaInput!): RemoveDatasetSchemaPayload! + importDataset(input: ImportDatasetInput!): ImportDatasetPayload! + addDatasetSchema(input:AddDatasetSchemaInput!): AddDatasetSchemaPayload! + + # Property + updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload! + updatePropertyValueLatLng(input: UpdatePropertyValueLatLngInput!): PropertyFieldPayload! + updatePropertyValueLatLngHeight(input: UpdatePropertyValueLatLngHeightInput!): PropertyFieldPayload! + updatePropertyValueCamera(input: UpdatePropertyValueCameraInput!): PropertyFieldPayload! + updatePropertyValueTypography(input: UpdatePropertyValueTypographyInput!): PropertyFieldPayload! + removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload! + uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload! + linkDatasetToPropertyValue(input: LinkDatasetToPropertyValueInput!): PropertyFieldPayload! + unlinkPropertyValue(input: UnlinkPropertyValueInput!): PropertyFieldPayload! + addPropertyItem(input: AddPropertyItemInput!): PropertyItemPayload! + movePropertyItem(input: MovePropertyItemInput!): PropertyItemPayload! + removePropertyItem(input: RemovePropertyItemInput!): PropertyItemPayload! + updatePropertyItems(input: UpdatePropertyItemInput!): PropertyItemPayload! + + # Layer + addLayerItem(input: AddLayerItemInput!): AddLayerItemPayload! + addLayerGroup(input: AddLayerGroupInput!): AddLayerGroupPayload! + removeLayer(input: RemoveLayerInput!): RemoveLayerPayload! + updateLayer(input: UpdateLayerInput!): UpdateLayerPayload! + moveLayer(input: MoveLayerInput!): MoveLayerPayload! + createInfobox(input: CreateInfoboxInput!): CreateInfoboxPayload! + removeInfobox(input: RemoveInfoboxInput!): RemoveInfoboxPayload! + addInfoboxField(input: AddInfoboxFieldInput!): AddInfoboxFieldPayload! + moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload! + removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload! + importLayer(input:ImportLayerInput!): ImportLayerPayload! +} + +schema { + query: Query + mutation: Mutation +} diff --git a/tools.go b/tools.go new file mode 100644 index 000000000..f4aea7332 --- /dev/null +++ b/tools.go @@ -0,0 +1,9 @@ +// +build tools + +package main + +import ( + _ "github.com/99designs/gqlgen/cmd" + _ "github.com/idubinskiy/schematyper" + _ "github.com/vektah/dataloaden" +) diff --git a/tools/cmd/embed/main.go b/tools/cmd/embed/main.go new file mode 100644 index 000000000..a8cfd97a4 --- /dev/null +++ b/tools/cmd/embed/main.go @@ -0,0 +1,178 @@ +package main + +import ( + "bytes" + "encoding/json" + "flag" + "fmt" + "log" + "os" + "path/filepath" + "strings" + "text/template" + "unsafe" + + "github.com/pkg/errors" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/imports" + "gopkg.in/yaml.v2" +) + +func main() { + log.SetPrefix("idgen: ") + log.SetFlags(0) + + wd, err := os.Getwd() + if err != nil { + log.Fatal(err) + } + + if err := run(wd); err != nil { + log.Fatal(err) + } +} + +func run(wd string) error { + input := flag.String("i", "", "input") + output := flag.String("o", "", "output") + name := flag.String("n", "", "const name") + pkgname := flag.String("p", "", "package name") + yamltojson := flag.Bool("yaml2json", false, "convert YAML to JSON") + all := flag.Bool("all", false, "read all files") + flag.Parse() + + if *input == "" { + return errors.New("input option is required") + } + + if *output == "" && !*all { + return errors.New("output option is required") + } + + if *name == "" { + return errors.New("name option is required") + } + + pkgs, err := packages.Load(&packages.Config{Dir: wd}, ".") + if err != nil { + return errors.Wrap(err, "failed to load package") + } + + if *pkgname == "" { + pkgname = &pkgs[0].Name + } + + if *all { + filename, ext := getFileNameExt(*input) + filenames, err := os.ReadDir(".") + if err != nil { + return err + } + for _, f := range filenames { + if strings.HasPrefix(f.Name(), filename) && strings.HasSuffix(f.Name(), ext) { + if err != nil { + return errors.Wrap(err, fmt.Sprintf("failed to read file %s", f.Name())) + } + fstr, _ := getFileNameExt(f.Name()) + err = handleOneFile(f.Name(), fstr+"_gen.go", *pkgname, *name+fstr[len(filename):], yamltojson) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("failed to handle file %s", f.Name())) + } + } + } + } else { + return handleOneFile(*input, *output, *pkgname, *name, yamltojson) + } + return nil +} + +type templateData struct { + PackageName string + Name string + Content string +} + +var templ = template.Must(template.New("generated").Parse(` +// Code generated by github.com/reearth/reearth-backend/tools/cmd/embed, DO NOT EDIT. + +package {{.PackageName}} + +const {{.Name}} string = ` + "`{{.Content}}`" + ``)) + +// https://stackoverflow.com/questions/40737122/convert-yaml-to-json-without-struct +func convert(i interface{}) interface{} { + switch x := i.(type) { + case map[interface{}]interface{}: + m2 := map[string]interface{}{} + for k, v := range x { + m2[k.(string)] = convert(v) + } + return m2 + case []interface{}: + for i, v := range x { + x[i] = convert(v) + } + } + return i +} + +func yaml2json(content []byte) ([]byte, error) { + var y interface{} + if err := yaml.Unmarshal([]byte(content), &y); err != nil { + return nil, errors.Wrap(err, "failed to parse YAML") + } + y = convert(y) + b, err := json.Marshal(&y) + if err != nil { + return nil, errors.Wrap(err, "failed to marhsal JSON") + } + return b, nil +} + +func processAndWriteOneFile(data templateData, output string) error { + buf := &bytes.Buffer{} + + if err := templ.Execute(buf, data); err != nil { + return errors.Wrap(err, "unable to generate code") + } + + src, err := imports.Process("", buf.Bytes(), nil) + if err != nil { + return errors.Wrap(err, "unable to gofmt") + } + + err = os.WriteFile(output, src, 0644) + if err != nil { + return errors.Wrap(err, "unable to write file") + } + return nil +} + +func handleOneFile(input, output, pkgname, name string, yamltojson *bool) error { + content, err := os.ReadFile(input) + if err != nil { + return errors.Wrap(err, "failed to read file") + } + + if yamltojson != nil && *yamltojson { + content, err = yaml2json(content) + if err != nil { + return errors.Wrap(err, "failed to read file") + } + } + + contentstr := *(*string)(unsafe.Pointer(&content)) + + data := templateData{ + PackageName: pkgname, + Name: name, + Content: strings.ReplaceAll(contentstr, "`", "` + \"`\" + `"), + } + return processAndWriteOneFile(data, output) +} + +func getFileNameExt(input string) (string, string) { + ext := filepath.Ext(input) + fname := input[0 : len(input)-len(ext)] + return fname, ext +} diff --git a/tools/cmd/gen/flag.go b/tools/cmd/gen/flag.go new file mode 100644 index 000000000..5211e1b50 --- /dev/null +++ b/tools/cmd/gen/flag.go @@ -0,0 +1,123 @@ +package main + +import ( + "fmt" +) + +type Flags map[string][]string + +func (f Flags) Bool(keys ...string) bool { + for _, k := range keys { + _, ok := f[k] + if !ok { + continue + } + return true + } + return false +} + +func (f Flags) String(keys ...string) string { + for _, k := range keys { + v, ok := f[k] + if !ok || len(v) == 0 { + continue + } + return v[0] + } + return "" +} + +func (f Flags) Strings(keys ...string) []string { + for _, k := range keys { + v, ok := f[k] + if !ok || len(v) == 0 { + continue + } + return v + } + return nil +} + +type flagSet struct { + args []string + parsed bool + flags Flags +} + +func Parse(args []string) (Flags, []string, error) { + fs := flagSet{} + if err := fs.parse(args); err != nil { + return nil, nil, err + } + if len(fs.args) == 0 { + fs.args = nil + } + return fs.flags, fs.args, nil +} + +func (f *flagSet) parse(arguments []string) error { + f.parsed = true + f.args = arguments + for { + seen, err := f.parseOne() + if seen { + continue + } + if err == nil { + break + } + } + return nil +} + +func (f *flagSet) parseOne() (bool, error) { + if len(f.args) == 0 { + return false, nil + } + s := f.args[0] + if len(s) < 2 || s[0] != '-' { + return false, nil + } + numMinuses := 1 + if s[1] == '-' { + numMinuses++ + if len(s) == 2 { // "--" terminates the flags + f.args = f.args[1:] + return false, nil + } + } + name := s[numMinuses:] + if len(name) == 0 || name[0] == '-' || name[0] == '=' { + return false, fmt.Errorf("bad flag syntax: %s", s) + } + + // it's a flag. does it have an argument? + f.args = f.args[1:] + hasValue := false + value := "" + for i := 1; i < len(name); i++ { // equals cannot be first + if name[i] == '=' { + value = name[i+1:] + hasValue = true + name = name[0:i] + break + } + } + + var actualValue string + if hasValue { + actualValue = value + } + + if existingValue, alreadythere := f.flags[name]; alreadythere { + f.flags[name] = append(existingValue, actualValue) + } else { + if f.flags == nil { + f.flags = make(map[string][]string) + } + f.flags[name] = []string{actualValue} + } + + return true, nil +} diff --git a/tools/cmd/gen/flag_test.go b/tools/cmd/gen/flag_test.go new file mode 100644 index 000000000..1d99fba63 --- /dev/null +++ b/tools/cmd/gen/flag_test.go @@ -0,0 +1,130 @@ +package main + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParse(t *testing.T) { + flags, args, err := Parse(nil) + assert.NoError(t, err) + assert.Equal(t, Flags(nil), flags) + assert.Equal(t, []string(nil), args) + + flags, args, err = Parse([]string{}) + assert.NoError(t, err) + assert.Equal(t, Flags(nil), flags) + assert.Equal(t, []string(nil), args) + + flags, args, err = Parse([]string{"-a=b"}) + assert.NoError(t, err) + assert.Equal(t, Flags(map[string][]string{ + "a": {"b"}, + }), flags) + assert.Equal(t, []string(nil), args) + + flags, args, err = Parse([]string{"-a"}) + assert.NoError(t, err) + assert.Equal(t, Flags(map[string][]string{ + "a": {""}, + }), flags) + assert.Equal(t, []string(nil), args) + + flags, args, err = Parse([]string{"-a", "-b"}) + assert.NoError(t, err) + assert.Equal(t, Flags(map[string][]string{ + "a": {""}, + "b": {""}, + }), flags) + assert.Equal(t, []string(nil), args) + + flags, args, err = Parse([]string{"--hoge=a", "--hoge=b"}) + assert.NoError(t, err) + assert.Equal(t, Flags(map[string][]string{ + "hoge": {"a", "b"}, + }), flags) + assert.Equal(t, []string(nil), args) + + flags, args, err = Parse([]string{"aaa", "bbb"}) + assert.NoError(t, err) + assert.Equal(t, Flags(nil), flags) + assert.Equal(t, []string{"aaa", "bbb"}, args) + + flags, args, err = Parse([]string{"aaa", "-a", "--", "-b", "bbb"}) + assert.NoError(t, err) + assert.Equal(t, Flags(nil), flags) + assert.Equal(t, []string{"aaa", "-a", "--", "-b", "bbb"}, args) + + flags, args, err = Parse([]string{"-a", "--", "-b", "bbb"}) + assert.NoError(t, err) + assert.Equal(t, Flags(map[string][]string{ + "a": {""}, + }), flags) + assert.Equal(t, []string{"-b", "bbb"}, args) +} + +func TestFlags_Bool(t *testing.T) { + assert.Equal(t, false, Flags(nil).Bool("hoge")) + assert.Equal(t, true, Flags(map[string][]string{ + "hoge": nil, + }).Bool("hoge")) + assert.Equal(t, true, Flags(map[string][]string{ + "hoge": {""}, + }).Bool("hoge")) + assert.Equal(t, true, Flags(map[string][]string{ + "hoge": {"a"}, + "h": {"b"}, + }).Bool("hoge")) + + assert.Equal(t, false, Flags(nil).Bool("hoge", "h")) + assert.Equal(t, true, Flags(map[string][]string{ + "h": nil, + }).Bool("hoge", "h")) + assert.Equal(t, true, Flags(map[string][]string{ + "h": {""}, + }).Bool("hoge", "h")) + assert.Equal(t, true, Flags(map[string][]string{ + "h": {"a"}, + }).Bool("hoge", "h")) +} + +func TestFlags_String(t *testing.T) { + assert.Equal(t, "", Flags(nil).String("hoge")) + assert.Equal(t, "", Flags(map[string][]string{ + "hoge": nil, + }).String("hoge")) + assert.Equal(t, "a", Flags(map[string][]string{ + "hoge": {"a"}, + "h": {"b"}, + }).String("hoge")) + assert.Equal(t, "a", Flags(map[string][]string{ + "hoge": {"a", "b"}, + }).String("hoge")) + + assert.Equal(t, "", Flags(nil).String("hoge", "h")) + assert.Equal(t, "", Flags(map[string][]string{ + "h": nil, + }).String("hoge", "h")) + assert.Equal(t, "a", Flags(map[string][]string{ + "h": {"a"}, + }).String("hoge", "h")) + assert.Equal(t, "a", Flags(map[string][]string{ + "h": {"a", "b"}, + }).String("hoge", "h")) +} + +func TestFlags_Strings(t *testing.T) { + assert.Equal(t, []string(nil), Flags(nil).Strings("hoge")) + assert.Equal(t, []string{""}, Flags(map[string][]string{ + "hoge": {""}, + }).Strings("hoge")) + assert.Equal(t, []string{"a", "b"}, Flags(map[string][]string{ + "hoge": {"a", "b"}, + "h": {"a"}, + }).Strings("hoge")) + assert.Equal(t, []string(nil), Flags(nil).Strings("hoge", "h")) + assert.Equal(t, []string{"a"}, Flags(map[string][]string{ + "h": {"a"}, + }).Strings("hoge", "h")) +} diff --git a/tools/cmd/gen/main.go b/tools/cmd/gen/main.go new file mode 100644 index 000000000..def8f0e70 --- /dev/null +++ b/tools/cmd/gen/main.go @@ -0,0 +1,98 @@ +package main + +import ( + "bytes" + "html/template" + "log" + "os" + "path/filepath" + + "github.com/iancoleman/strcase" + "github.com/pkg/errors" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/imports" +) + +var funcs = template.FuncMap{ + "snake": func(str string) string { + return strcase.ToSnake(str) + }, + "kebab": func(str string) string { + return strcase.ToKebab(str) + }, + "camel": func(str string) string { + return strcase.ToCamel(str) + }, + "lowercamel": func(str string) string { + return strcase.ToLowerCamel(str) + }, +} + +type Data struct { + PackageName string + Output string + Flags Flags + Args []string +} + +func main() { + log.SetPrefix("gen: ") + log.SetFlags(0) + + if err := run(); err != nil { + log.Fatal(err) + } +} + +func run() error { + flags, args, err := Parse(os.Args[1:]) + if err != nil { + return err + } + + output := flags.String("", "output") + if output == "" { + return errors.New("output option is required") + } + + templatePaths := flags.Strings("", "template") + if len(templatePaths) == 0 { + return errors.New("template option is required") + } + + pkgs, err := packages.Load(&packages.Config{Dir: filepath.Dir(output)}, ".") + if err != nil { + return errors.Wrap(err, "failed to load package") + } + + templ, err := template.New(filepath.Base(templatePaths[0])).Funcs(funcs).ParseFiles(templatePaths...) + if err != nil { + return errors.Wrap(err, "unable to load templates") + } + + data := Data{ + PackageName: pkgs[0].Name, + Output: output, + Flags: flags, + Args: args, + } + + buf := &bytes.Buffer{} + buf.WriteString("// Code generated by gen, DO NOT EDIT.\n\n") + + if err := templ.Execute(buf, data); err != nil { + return errors.Wrap(err, "unable to generate code") + } + + src, err := imports.Process("", buf.Bytes(), nil) + if err != nil { + return errors.Wrap(err, "unable to gofmt") + } + + err = os.WriteFile(output, src, 0644) + if err != nil { + return errors.Wrap(err, "unable to write file") + } + + return nil +} diff --git a/tools/cmd/migrategen/main.go b/tools/cmd/migrategen/main.go new file mode 100644 index 000000000..5ecb63456 --- /dev/null +++ b/tools/cmd/migrategen/main.go @@ -0,0 +1,115 @@ +package main + +import ( + "bytes" + "fmt" + "log" + "os" + "path/filepath" + "strings" + "text/template" + "time" + + "github.com/iancoleman/strcase" +) + +var dest = []string{"internal", "infrastructure", "mongo", "migration"} + +func main() { + log.SetPrefix("migrategen: ") + log.SetFlags(0) + + if err := run(); err != nil { + log.Fatal(err) + } +} + +func run() error { + name := strings.Join(os.Args[1:], " ") + snake := strcase.ToSnake(name) + camel := strcase.ToCamel(name) + key := time.Now().Format("060102150405") + + data := migration{ + Key: key, + Name: camel, + } + + files, err := os.ReadDir(filepath.Join(dest...)) + if err != nil { + return fmt.Errorf("unable to get dir: %w", err) + } + + migrations := make([]migration, 0, len(files)+1) + for _, file := range files { + if file.IsDir() { + continue + } + m := migrationFromFileName(file.Name()) + if m == nil { + continue + } + migrations = append(migrations, *m) + } + migrations = append(migrations, data) + + buf := bytes.NewBuffer(nil) + if err := templ.Execute(buf, data); err != nil { + return fmt.Errorf("unable to generate code: %w", err) + } + + if err := os.WriteFile(filepath.Join(append(dest, key+"_"+snake+".go")...), buf.Bytes(), 0644); err != nil { + return fmt.Errorf("unable to write file: %w", err) + } + + buf = bytes.NewBuffer(nil) + if err := templ2.Execute(buf, migrations); err != nil { + return fmt.Errorf("unable to generate code: %w", err) + } + + if err := os.WriteFile(filepath.Join(append(dest, "migrations.go")...), buf.Bytes(), 0644); err != nil { + return fmt.Errorf("unable to write file: %w", err) + } + + return nil +} + +type migration struct { + Key string + Name string +} + +func migrationFromFileName(n string) (m *migration) { + if filepath.Ext(n) != ".go" { + return + } + s := strings.SplitN(n[:len(n)-3], "_", 2) + if len(s) != 2 { + return + } + m = &migration{ + Key: s[0], + Name: strcase.ToCamel(s[1]), + } + return +} + +var templ = template.Must(template.New("generated").Parse(`package migration + +import "context" + +func {{.Name}}(ctx context.Context, c DBClient) error { + // TODO: Write your migration code here + + return nil +} +`)) + +var templ2 = template.Must(template.New("generated2").Parse(`// Code generated by migrategen, DO NOT EDIT. + +package migration + +var migrations = map[int64]MigrationFunc{ +{{range .}} {{.Key}}: {{.Name}}, +{{end}}} +`)) diff --git a/tools/cmd/shapefiletest/main.go b/tools/cmd/shapefiletest/main.go new file mode 100644 index 000000000..da7b0af6c --- /dev/null +++ b/tools/cmd/shapefiletest/main.go @@ -0,0 +1,46 @@ +package main + +import ( + "log" + "strconv" + + "github.com/jonas-p/go-shp" +) + +func main() { + // points to write + points := []shp.Point{ + {X: 10.0, Y: 10.0}, + {X: 10.0, Y: 15.0}, + {X: 15.0, Y: 15.0}, + {X: 15.0, Y: 10.0}, + } + + // fields to write + fields := []shp.Field{ + // String attribute field with length 25 + shp.StringField("NAME", 25), + } + + // create and open a shapefile for writing points + shape, err := shp.Create("points.shp", shp.POINT) + if err != nil { + log.Fatal(err) + } + defer shape.Close() + + // setup fields for attributes + if err := shape.SetFields(fields); err != nil { + log.Fatal(err) + } + + // write points and attributes + for n, point := range points { + shape.Write(&point) + + // write attribute for object n for field 0 (NAME) + if err := shape.WriteAttribute(n, 0, "Point "+strconv.Itoa(n+1)); err != nil { + log.Fatal(err) + } + } +} From f4c3b00aa41b875b6ea1e6957f3bdff1cdc8b946 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 13 May 2021 17:02:47 +0900 Subject: [PATCH 002/253] fix: add mutex for each memory repo (#2) --- internal/infrastructure/memory/asset.go | 14 ++++++ internal/infrastructure/memory/config.go | 8 +++ internal/infrastructure/memory/dataset.go | 32 ++++++++++++ .../infrastructure/memory/dataset_schema.go | 38 ++++++++++++++ internal/infrastructure/memory/layer.go | 50 +++++++++++++++++++ internal/infrastructure/memory/plugin.go | 11 ++++ internal/infrastructure/memory/project.go | 23 +++++++++ internal/infrastructure/memory/property.go | 29 +++++++++++ .../infrastructure/memory/property_schema.go | 14 ++++++ internal/infrastructure/memory/scene.go | 26 ++++++++++ internal/infrastructure/memory/team.go | 23 +++++++++ internal/infrastructure/memory/user.go | 23 +++++++++ 12 files changed, 291 insertions(+) diff --git a/internal/infrastructure/memory/asset.go b/internal/infrastructure/memory/asset.go index e528ebb31..2f0fabff7 100644 --- a/internal/infrastructure/memory/asset.go +++ b/internal/infrastructure/memory/asset.go @@ -2,6 +2,7 @@ package memory import ( "context" + "sync" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" @@ -11,6 +12,7 @@ import ( ) type Asset struct { + lock sync.Mutex data map[id.AssetID]*asset.Asset } @@ -21,6 +23,9 @@ func NewAsset() repo.Asset { } func (r *Asset) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, error) { + r.lock.Lock() + defer r.lock.Unlock() + d, ok := r.data[id] if ok { return d, nil @@ -29,16 +34,25 @@ func (r *Asset) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, erro } func (r *Asset) Save(ctx context.Context, a *asset.Asset) error { + r.lock.Lock() + defer r.lock.Unlock() + r.data[a.ID()] = a return nil } func (r *Asset) Remove(ctx context.Context, id id.AssetID) error { + r.lock.Lock() + defer r.lock.Unlock() + delete(r.data, id) return nil } func (r *Asset) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := []*asset.Asset{} for _, d := range r.data { if d.Team() == id { diff --git a/internal/infrastructure/memory/config.go b/internal/infrastructure/memory/config.go index d88f03496..8b8cd2bb9 100644 --- a/internal/infrastructure/memory/config.go +++ b/internal/infrastructure/memory/config.go @@ -2,12 +2,14 @@ package memory import ( "context" + "sync" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/config" ) type Config struct { + lock sync.Mutex data *config.Config } @@ -16,10 +18,16 @@ func NewConfig() repo.Config { } func (r *Config) Load(ctx context.Context) (*config.Config, error) { + r.lock.Lock() + defer r.lock.Unlock() + return r.data, nil } func (r *Config) Save(ctx context.Context, c *config.Config) error { + r.lock.Lock() + defer r.lock.Unlock() + r.data = c return nil } diff --git a/internal/infrastructure/memory/dataset.go b/internal/infrastructure/memory/dataset.go index 77d871003..f7ada2dea 100644 --- a/internal/infrastructure/memory/dataset.go +++ b/internal/infrastructure/memory/dataset.go @@ -2,6 +2,7 @@ package memory import ( "context" + "sync" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" @@ -11,6 +12,7 @@ import ( ) type Dataset struct { + lock sync.Mutex data map[id.DatasetID]dataset.Dataset } @@ -21,6 +23,9 @@ func NewDataset() repo.Dataset { } func (r *Dataset) FindByID(ctx context.Context, id id.DatasetID, f []id.SceneID) (*dataset.Dataset, error) { + r.lock.Lock() + defer r.lock.Unlock() + p, ok := r.data[id] if ok && isSceneIncludes(p.Scene(), f) { return &p, nil @@ -29,6 +34,9 @@ func (r *Dataset) FindByID(ctx context.Context, id id.DatasetID, f []id.SceneID) } func (r *Dataset) FindByIDs(ctx context.Context, ids []id.DatasetID, f []id.SceneID) (dataset.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := dataset.List{} for _, id := range ids { if d, ok := r.data[id]; ok { @@ -43,6 +51,9 @@ func (r *Dataset) FindByIDs(ctx context.Context, ids []id.DatasetID, f []id.Scen } func (r *Dataset) FindBySchema(ctx context.Context, id id.DatasetSchemaID, f []id.SceneID, p *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := dataset.List{} for _, d := range r.data { if d.Schema() == id && isSceneIncludes(d.Scene(), f) { @@ -69,6 +80,9 @@ func (r *Dataset) FindBySchema(ctx context.Context, id id.DatasetSchemaID, f []i } func (r *Dataset) FindBySchemaAll(ctx context.Context, id id.DatasetSchemaID) (dataset.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := dataset.List{} for _, d := range r.data { if d.Schema() == id { @@ -80,6 +94,9 @@ func (r *Dataset) FindBySchemaAll(ctx context.Context, id id.DatasetSchemaID) (d } func (r *Dataset) FindGraph(ctx context.Context, i id.DatasetID, f []id.SceneID, fields []id.DatasetSchemaFieldID) (dataset.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := make(dataset.List, 0, len(fields)) next := i for _, nextField := range fields { @@ -100,11 +117,17 @@ func (r *Dataset) FindGraph(ctx context.Context, i id.DatasetID, f []id.SceneID, } func (r *Dataset) Save(ctx context.Context, d *dataset.Dataset) error { + r.lock.Lock() + defer r.lock.Unlock() + r.data[d.ID()] = *d return nil } func (r *Dataset) SaveAll(ctx context.Context, dl dataset.List) error { + r.lock.Lock() + defer r.lock.Unlock() + for _, d := range dl { r.data[d.ID()] = *d } @@ -112,11 +135,17 @@ func (r *Dataset) SaveAll(ctx context.Context, dl dataset.List) error { } func (r *Dataset) Remove(ctx context.Context, id id.DatasetID) error { + r.lock.Lock() + defer r.lock.Unlock() + delete(r.data, id) return nil } func (r *Dataset) RemoveAll(ctx context.Context, ids []id.DatasetID) error { + r.lock.Lock() + defer r.lock.Unlock() + for _, id := range ids { delete(r.data, id) } @@ -124,6 +153,9 @@ func (r *Dataset) RemoveAll(ctx context.Context, ids []id.DatasetID) error { } func (r *Dataset) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + r.lock.Lock() + defer r.lock.Unlock() + for did, d := range r.data { if d.Scene() == sceneID { delete(r.data, did) diff --git a/internal/infrastructure/memory/dataset_schema.go b/internal/infrastructure/memory/dataset_schema.go index be3772606..45008e6b4 100644 --- a/internal/infrastructure/memory/dataset_schema.go +++ b/internal/infrastructure/memory/dataset_schema.go @@ -2,6 +2,7 @@ package memory import ( "context" + "sync" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" @@ -11,6 +12,7 @@ import ( ) type DatasetSchema struct { + lock sync.Mutex data map[id.DatasetSchemaID]dataset.Schema } @@ -21,6 +23,9 @@ func NewDatasetSchema() repo.DatasetSchema { } func (r *DatasetSchema) FindByID(ctx context.Context, id id.DatasetSchemaID, f []id.SceneID) (*dataset.Schema, error) { + r.lock.Lock() + defer r.lock.Unlock() + p, ok := r.data[id] if ok { return &p, nil @@ -29,6 +34,9 @@ func (r *DatasetSchema) FindByID(ctx context.Context, id id.DatasetSchemaID, f [ } func (r *DatasetSchema) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID, f []id.SceneID) (dataset.SchemaList, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := dataset.SchemaList{} for _, id := range ids { if d, ok := r.data[id]; ok { @@ -42,6 +50,9 @@ func (r *DatasetSchema) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID, } func (r *DatasetSchema) FindByScene(ctx context.Context, s id.SceneID, p *usecase.Pagination) (dataset.SchemaList, *usecase.PageInfo, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := dataset.SchemaList{} for _, d := range r.data { if d.Scene() == s { @@ -68,6 +79,9 @@ func (r *DatasetSchema) FindByScene(ctx context.Context, s id.SceneID, p *usecas } func (r *DatasetSchema) FindBySceneAll(ctx context.Context, s id.SceneID) (dataset.SchemaList, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := dataset.SchemaList{} for _, d := range r.data { if d.Scene() == s { @@ -79,6 +93,9 @@ func (r *DatasetSchema) FindBySceneAll(ctx context.Context, s id.SceneID) (datas } func (r *DatasetSchema) FindAllDynamicByScene(ctx context.Context, s id.SceneID) (dataset.SchemaList, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := dataset.SchemaList{} for _, d := range r.data { if d.Scene() == s && d.Dynamic() { @@ -90,6 +107,9 @@ func (r *DatasetSchema) FindAllDynamicByScene(ctx context.Context, s id.SceneID) } func (r *DatasetSchema) FindDynamicByID(ctx context.Context, id id.DatasetSchemaID) (*dataset.Schema, error) { + r.lock.Lock() + defer r.lock.Unlock() + p, ok := r.data[id] if ok && p.Dynamic() { return &p, nil @@ -98,6 +118,9 @@ func (r *DatasetSchema) FindDynamicByID(ctx context.Context, id id.DatasetSchema } func (r *DatasetSchema) FindBySceneAndSource(ctx context.Context, s id.SceneID, src dataset.Source) (dataset.SchemaList, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := dataset.SchemaList{} for _, d := range r.data { if d.Scene() == s && d.Source() == src { @@ -109,11 +132,17 @@ func (r *DatasetSchema) FindBySceneAndSource(ctx context.Context, s id.SceneID, } func (r *DatasetSchema) Save(ctx context.Context, d *dataset.Schema) error { + r.lock.Lock() + defer r.lock.Unlock() + r.data[d.ID()] = *d return nil } func (r *DatasetSchema) SaveAll(ctx context.Context, dl dataset.SchemaList) error { + r.lock.Lock() + defer r.lock.Unlock() + for _, d := range dl { r.data[d.ID()] = *d } @@ -121,11 +150,17 @@ func (r *DatasetSchema) SaveAll(ctx context.Context, dl dataset.SchemaList) erro } func (r *DatasetSchema) Remove(ctx context.Context, id id.DatasetSchemaID) error { + r.lock.Lock() + defer r.lock.Unlock() + delete(r.data, id) return nil } func (r *DatasetSchema) RemoveAll(ctx context.Context, ids []id.DatasetSchemaID) error { + r.lock.Lock() + defer r.lock.Unlock() + for _, id := range ids { delete(r.data, id) } @@ -133,6 +168,9 @@ func (r *DatasetSchema) RemoveAll(ctx context.Context, ids []id.DatasetSchemaID) } func (r *DatasetSchema) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + r.lock.Lock() + defer r.lock.Unlock() + for did, d := range r.data { if d.Scene() == sceneID { delete(r.data, did) diff --git a/internal/infrastructure/memory/layer.go b/internal/infrastructure/memory/layer.go index fe44513d9..1e7ce7012 100644 --- a/internal/infrastructure/memory/layer.go +++ b/internal/infrastructure/memory/layer.go @@ -2,6 +2,7 @@ package memory import ( "context" + "sync" "github.com/reearth/reearth-backend/internal/usecase/repo" err1 "github.com/reearth/reearth-backend/pkg/error" @@ -10,6 +11,7 @@ import ( ) type Layer struct { + lock sync.Mutex data map[id.LayerID]layer.Layer } @@ -20,6 +22,9 @@ func NewLayer() repo.Layer { } func (r *Layer) FindByID(ctx context.Context, id id.LayerID, f []id.SceneID) (layer.Layer, error) { + r.lock.Lock() + defer r.lock.Unlock() + res, ok := r.data[id] if ok && isSceneIncludes(res.Scene(), f) { return res, nil @@ -28,6 +33,9 @@ func (r *Layer) FindByID(ctx context.Context, id id.LayerID, f []id.SceneID) (la } func (r *Layer) FindByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := layer.List{} for _, id := range ids { if d, ok := r.data[id]; ok { @@ -42,6 +50,9 @@ func (r *Layer) FindByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) } func (r *Layer) FindGroupByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.GroupList, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := layer.GroupList{} for _, id := range ids { if d, ok := r.data[id]; ok { @@ -58,6 +69,9 @@ func (r *Layer) FindGroupByIDs(ctx context.Context, ids []id.LayerID, f []id.Sce } func (r *Layer) FindItemByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.ItemList, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := layer.ItemList{} for _, id := range ids { if d, ok := r.data[id]; ok { @@ -74,6 +88,9 @@ func (r *Layer) FindItemByIDs(ctx context.Context, ids []id.LayerID, f []id.Scen } func (r *Layer) FindItemByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Item, error) { + r.lock.Lock() + defer r.lock.Unlock() + d, ok := r.data[id] if !ok { return &layer.Item{}, nil @@ -87,6 +104,9 @@ func (r *Layer) FindItemByID(ctx context.Context, id id.LayerID, f []id.SceneID) } func (r *Layer) FindGroupByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { + r.lock.Lock() + defer r.lock.Unlock() + d, ok := r.data[id] if !ok { return &layer.Group{}, nil @@ -100,6 +120,9 @@ func (r *Layer) FindGroupByID(ctx context.Context, id id.LayerID, f []id.SceneID } func (r *Layer) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, s id.SceneID, ds id.DatasetSchemaID) (layer.GroupList, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := layer.GroupList{} for _, l := range r.data { if l.Scene() != s { @@ -115,6 +138,9 @@ func (r *Layer) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, s id } func (r *Layer) FindByProperty(ctx context.Context, id id.PropertyID, f []id.SceneID) (layer.Layer, error) { + r.lock.Lock() + defer r.lock.Unlock() + for _, l := range r.data { if !isSceneIncludes(l.Scene(), f) { continue @@ -135,6 +161,9 @@ func (r *Layer) FindByProperty(ctx context.Context, id id.PropertyID, f []id.Sce } func (r *Layer) FindParentByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { + r.lock.Lock() + defer r.lock.Unlock() + for _, l := range r.data { if !isSceneIncludes(l.Scene(), f) { continue @@ -153,6 +182,9 @@ func (r *Layer) FindParentByID(ctx context.Context, id id.LayerID, f []id.SceneI } func (r *Layer) FindByScene(ctx context.Context, sceneID id.SceneID) (layer.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + res := layer.List{} for _, l := range r.data { if l.Scene() == sceneID { @@ -163,6 +195,9 @@ func (r *Layer) FindByScene(ctx context.Context, sceneID id.SceneID) (layer.List } func (r *Layer) FindAllByDatasetSchema(ctx context.Context, datasetSchemaID id.DatasetSchemaID) (layer.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + res := layer.List{} for _, l := range r.data { if d := layer.ToLayerGroup(l).LinkedDatasetSchema(); d != nil && *d == datasetSchemaID { @@ -173,11 +208,17 @@ func (r *Layer) FindAllByDatasetSchema(ctx context.Context, datasetSchemaID id.D } func (r *Layer) Save(ctx context.Context, l layer.Layer) error { + r.lock.Lock() + defer r.lock.Unlock() + r.data[l.ID()] = l return nil } func (r *Layer) SaveAll(ctx context.Context, ll layer.List) error { + r.lock.Lock() + defer r.lock.Unlock() + for _, l := range ll { layer := *l r.data[layer.ID()] = layer @@ -186,11 +227,17 @@ func (r *Layer) SaveAll(ctx context.Context, ll layer.List) error { } func (r *Layer) Remove(ctx context.Context, id id.LayerID) error { + r.lock.Lock() + defer r.lock.Unlock() + delete(r.data, id) return nil } func (r *Layer) RemoveAll(ctx context.Context, ids []id.LayerID) error { + r.lock.Lock() + defer r.lock.Unlock() + for _, id := range ids { delete(r.data, id) } @@ -198,6 +245,9 @@ func (r *Layer) RemoveAll(ctx context.Context, ids []id.LayerID) error { } func (r *Layer) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + r.lock.Lock() + defer r.lock.Unlock() + for lid, p := range r.data { if p.Scene() == sceneID { delete(r.data, lid) diff --git a/internal/infrastructure/memory/plugin.go b/internal/infrastructure/memory/plugin.go index 07a51a012..74a67eb98 100644 --- a/internal/infrastructure/memory/plugin.go +++ b/internal/infrastructure/memory/plugin.go @@ -3,6 +3,7 @@ package memory import ( "context" "errors" + "sync" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/builtin" @@ -12,6 +13,7 @@ import ( ) type Plugin struct { + lock sync.Mutex data []*plugin.Plugin } @@ -22,6 +24,9 @@ func NewPlugin() repo.Plugin { } func (r *Plugin) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { + r.lock.Lock() + defer r.lock.Unlock() + if p := builtin.GetPlugin(id); p != nil { return p, nil } @@ -35,6 +40,9 @@ func (r *Plugin) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, } func (r *Plugin) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := []*plugin.Plugin{} for _, id := range ids { if p := builtin.GetPlugin(id); p != nil { @@ -54,6 +62,9 @@ func (r *Plugin) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Pl } func (r *Plugin) Save(ctx context.Context, p *plugin.Plugin) error { + r.lock.Lock() + defer r.lock.Unlock() + if p.ID().System() { return errors.New("cannnot save system plugin") } diff --git a/internal/infrastructure/memory/project.go b/internal/infrastructure/memory/project.go index d2d60d5ff..54bae3677 100644 --- a/internal/infrastructure/memory/project.go +++ b/internal/infrastructure/memory/project.go @@ -2,6 +2,7 @@ package memory import ( "context" + "sync" "time" "github.com/reearth/reearth-backend/internal/usecase" @@ -12,6 +13,7 @@ import ( ) type Project struct { + lock sync.Mutex data map[id.ProjectID]*project.Project } @@ -22,6 +24,9 @@ func NewProject() repo.Project { } func (r *Project) FindByTeam(ctx context.Context, id id.TeamID, p *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := []*project.Project{} for _, d := range r.data { if d.Team() == id { @@ -47,6 +52,9 @@ func (r *Project) FindByTeam(ctx context.Context, id id.TeamID, p *usecase.Pagin } func (r *Project) FindByIDs(ctx context.Context, ids []id.ProjectID, filter []id.TeamID) ([]*project.Project, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := []*project.Project{} for _, id := range ids { if d, ok := r.data[id]; ok { @@ -61,6 +69,9 @@ func (r *Project) FindByIDs(ctx context.Context, ids []id.ProjectID, filter []id } func (r *Project) FindByID(ctx context.Context, id id.ProjectID, filter []id.TeamID) (*project.Project, error) { + r.lock.Lock() + defer r.lock.Unlock() + p, ok := r.data[id] if ok && isTeamIncludes(p.Team(), filter) { return p, nil @@ -69,6 +80,9 @@ func (r *Project) FindByID(ctx context.Context, id id.ProjectID, filter []id.Tea } func (r *Project) FindByPublicName(ctx context.Context, name string) (*project.Project, error) { + r.lock.Lock() + defer r.lock.Unlock() + if name == "" { return nil, nil } @@ -81,6 +95,9 @@ func (r *Project) FindByPublicName(ctx context.Context, name string) (*project.P } func (r *Project) CountByTeam(ctx context.Context, team id.TeamID) (c int, err error) { + r.lock.Lock() + defer r.lock.Unlock() + for _, p := range r.data { if p.Team() == team { c++ @@ -90,12 +107,18 @@ func (r *Project) CountByTeam(ctx context.Context, team id.TeamID) (c int, err e } func (r *Project) Save(ctx context.Context, p *project.Project) error { + r.lock.Lock() + defer r.lock.Unlock() + p.SetUpdatedAt(time.Now()) r.data[p.ID()] = p return nil } func (r *Project) Remove(ctx context.Context, projectID id.ProjectID) error { + r.lock.Lock() + defer r.lock.Unlock() + for sid := range r.data { if sid == projectID { delete(r.data, sid) diff --git a/internal/infrastructure/memory/property.go b/internal/infrastructure/memory/property.go index 0985a2f2e..4ece98327 100644 --- a/internal/infrastructure/memory/property.go +++ b/internal/infrastructure/memory/property.go @@ -2,6 +2,7 @@ package memory import ( "context" + "sync" err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" @@ -11,6 +12,7 @@ import ( ) type Property struct { + lock sync.Mutex data map[id.PropertyID]property.Property } @@ -21,6 +23,9 @@ func NewProperty() repo.Property { } func (r *Property) FindByID(ctx context.Context, id id.PropertyID, f []id.SceneID) (*property.Property, error) { + r.lock.Lock() + defer r.lock.Unlock() + p, ok := r.data[id] if ok && isSceneIncludes(p.Scene(), f) { return &p, nil @@ -29,6 +34,9 @@ func (r *Property) FindByID(ctx context.Context, id id.PropertyID, f []id.SceneI } func (r *Property) FindByIDs(ctx context.Context, ids []id.PropertyID, f []id.SceneID) (property.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := property.List{} for _, id := range ids { if d, ok := r.data[id]; ok { @@ -43,6 +51,9 @@ func (r *Property) FindByIDs(ctx context.Context, ids []id.PropertyID, f []id.Sc } func (r *Property) FindByDataset(ctx context.Context, sid id.DatasetSchemaID, did id.DatasetID) (property.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := property.List{} for _, p := range r.data { if p.IsDatasetLinked(sid, did) { @@ -53,6 +64,9 @@ func (r *Property) FindByDataset(ctx context.Context, sid id.DatasetSchemaID, di } func (r *Property) FindLinkedAll(ctx context.Context, s id.SceneID) (property.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := property.List{} for _, p := range r.data { if p.Scene() != s { @@ -67,11 +81,17 @@ func (r *Property) FindLinkedAll(ctx context.Context, s id.SceneID) (property.Li } func (r *Property) Save(ctx context.Context, p *property.Property) error { + r.lock.Lock() + defer r.lock.Unlock() + r.data[p.ID()] = *p return nil } func (r *Property) SaveAll(ctx context.Context, pl property.List) error { + r.lock.Lock() + defer r.lock.Unlock() + for _, p := range pl { r.data[p.ID()] = *p } @@ -79,11 +99,17 @@ func (r *Property) SaveAll(ctx context.Context, pl property.List) error { } func (r *Property) Remove(ctx context.Context, id id.PropertyID) error { + r.lock.Lock() + defer r.lock.Unlock() + delete(r.data, id) return nil } func (r *Property) RemoveAll(ctx context.Context, ids []id.PropertyID) error { + r.lock.Lock() + defer r.lock.Unlock() + for _, id := range ids { delete(r.data, id) } @@ -91,6 +117,9 @@ func (r *Property) RemoveAll(ctx context.Context, ids []id.PropertyID) error { } func (r *Property) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + r.lock.Lock() + defer r.lock.Unlock() + for pid, p := range r.data { if p.Scene() == sceneID { delete(r.data, pid) diff --git a/internal/infrastructure/memory/property_schema.go b/internal/infrastructure/memory/property_schema.go index 124bfc3de..3c2cda5ad 100644 --- a/internal/infrastructure/memory/property_schema.go +++ b/internal/infrastructure/memory/property_schema.go @@ -3,6 +3,7 @@ package memory import ( "context" "errors" + "sync" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/builtin" @@ -12,6 +13,7 @@ import ( ) type PropertySchema struct { + lock sync.Mutex data map[id.PropertySchemaID]property.Schema } @@ -22,6 +24,9 @@ func NewPropertySchema() repo.PropertySchema { } func (r *PropertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) (*property.Schema, error) { + r.lock.Lock() + defer r.lock.Unlock() + if ps := builtin.GetPropertySchema(id); ps != nil { return ps, nil } @@ -33,6 +38,9 @@ func (r *PropertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) ( } func (r *PropertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := property.SchemaList{} for _, id := range ids { if ps := builtin.GetPropertySchema(id); ps != nil { @@ -49,6 +57,9 @@ func (r *PropertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaI } func (r *PropertySchema) Save(ctx context.Context, p *property.Schema) error { + r.lock.Lock() + defer r.lock.Unlock() + if p.ID().System() { return errors.New("cannnot save system property schema") } @@ -57,6 +68,9 @@ func (r *PropertySchema) Save(ctx context.Context, p *property.Schema) error { } func (r *PropertySchema) SaveAll(ctx context.Context, p property.SchemaList) error { + r.lock.Lock() + defer r.lock.Unlock() + for _, ps := range p { if err := r.Save(ctx, ps); err != nil { return err diff --git a/internal/infrastructure/memory/scene.go b/internal/infrastructure/memory/scene.go index 86ceb9e95..616b89202 100644 --- a/internal/infrastructure/memory/scene.go +++ b/internal/infrastructure/memory/scene.go @@ -2,6 +2,7 @@ package memory import ( "context" + "sync" "time" err1 "github.com/reearth/reearth-backend/pkg/error" @@ -12,6 +13,7 @@ import ( ) type Scene struct { + lock sync.Mutex data map[id.SceneID]scene.Scene } @@ -22,6 +24,9 @@ func NewScene() repo.Scene { } func (r *Scene) FindByID(ctx context.Context, id id.SceneID, f []id.TeamID) (*scene.Scene, error) { + r.lock.Lock() + defer r.lock.Unlock() + s, ok := r.data[id] if ok && isTeamIncludes(s.Team(), f) { return &s, nil @@ -30,6 +35,9 @@ func (r *Scene) FindByID(ctx context.Context, id id.SceneID, f []id.TeamID) (*sc } func (r *Scene) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) ([]*scene.Scene, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := []*scene.Scene{} for _, id := range ids { if d, ok := r.data[id]; ok { @@ -45,6 +53,9 @@ func (r *Scene) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) } func (r *Scene) FindByProject(ctx context.Context, id id.ProjectID, f []id.TeamID) (*scene.Scene, error) { + r.lock.Lock() + defer r.lock.Unlock() + for _, d := range r.data { if d.Project() == id && isTeamIncludes(d.Team(), f) { return &d, nil @@ -54,6 +65,9 @@ func (r *Scene) FindByProject(ctx context.Context, id id.ProjectID, f []id.TeamI } func (r *Scene) FindIDsByTeam(ctx context.Context, teams []id.TeamID) ([]id.SceneID, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := []id.SceneID{} for _, d := range r.data { if isTeamIncludes(d.Team(), teams) { @@ -64,6 +78,9 @@ func (r *Scene) FindIDsByTeam(ctx context.Context, teams []id.TeamID) ([]id.Scen } func (r *Scene) HasSceneTeam(ctx context.Context, id id.SceneID, teams []id.TeamID) (bool, error) { + r.lock.Lock() + defer r.lock.Unlock() + s, ok := r.data[id] if !ok { return false, err1.ErrNotFound @@ -72,6 +89,9 @@ func (r *Scene) HasSceneTeam(ctx context.Context, id id.SceneID, teams []id.Team } func (r *Scene) HasScenesTeam(ctx context.Context, id []id.SceneID, teams []id.TeamID) ([]bool, error) { + r.lock.Lock() + defer r.lock.Unlock() + if id == nil { return nil, nil } @@ -95,12 +115,18 @@ func (r *Scene) HasScenesTeam(ctx context.Context, id []id.SceneID, teams []id.T } func (r *Scene) Save(ctx context.Context, s *scene.Scene) error { + r.lock.Lock() + defer r.lock.Unlock() + s.SetUpdatedAt(time.Now()) r.data[s.ID()] = *s return nil } func (r *Scene) Remove(ctx context.Context, sceneID id.SceneID) error { + r.lock.Lock() + defer r.lock.Unlock() + for sid := range r.data { if sid == sceneID { delete(r.data, sid) diff --git a/internal/infrastructure/memory/team.go b/internal/infrastructure/memory/team.go index d6d446e1e..9f0b5addc 100644 --- a/internal/infrastructure/memory/team.go +++ b/internal/infrastructure/memory/team.go @@ -2,6 +2,7 @@ package memory import ( "context" + "sync" "github.com/reearth/reearth-backend/internal/usecase/repo" err1 "github.com/reearth/reearth-backend/pkg/error" @@ -10,6 +11,7 @@ import ( ) type Team struct { + lock sync.Mutex data map[id.TeamID]user.Team } @@ -20,6 +22,9 @@ func NewTeam() repo.Team { } func (r *Team) FindByUser(ctx context.Context, i id.UserID) ([]*user.Team, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := []*user.Team{} for _, d := range r.data { if d.Members().ContainsUser(i) { @@ -30,6 +35,9 @@ func (r *Team) FindByUser(ctx context.Context, i id.UserID) ([]*user.Team, error } func (r *Team) FindByIDs(ctx context.Context, ids []id.TeamID) ([]*user.Team, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := []*user.Team{} for _, id := range ids { if d, ok := r.data[id]; ok { @@ -42,6 +50,9 @@ func (r *Team) FindByIDs(ctx context.Context, ids []id.TeamID) ([]*user.Team, er } func (r *Team) FindByID(ctx context.Context, id id.TeamID) (*user.Team, error) { + r.lock.Lock() + defer r.lock.Unlock() + d, ok := r.data[id] if ok { return &d, nil @@ -50,11 +61,17 @@ func (r *Team) FindByID(ctx context.Context, id id.TeamID) (*user.Team, error) { } func (r *Team) Save(ctx context.Context, t *user.Team) error { + r.lock.Lock() + defer r.lock.Unlock() + r.data[t.ID()] = *t return nil } func (r *Team) SaveAll(ctx context.Context, teams []*user.Team) error { + r.lock.Lock() + defer r.lock.Unlock() + for _, t := range teams { r.data[t.ID()] = *t } @@ -62,11 +79,17 @@ func (r *Team) SaveAll(ctx context.Context, teams []*user.Team) error { } func (r *Team) Remove(ctx context.Context, id id.TeamID) error { + r.lock.Lock() + defer r.lock.Unlock() + delete(r.data, id) return nil } func (r *Team) RemoveAll(ctx context.Context, ids []id.TeamID) error { + r.lock.Lock() + defer r.lock.Unlock() + for _, id := range ids { delete(r.data, id) } diff --git a/internal/infrastructure/memory/user.go b/internal/infrastructure/memory/user.go index ec26c0064..34ae4b1dc 100644 --- a/internal/infrastructure/memory/user.go +++ b/internal/infrastructure/memory/user.go @@ -2,6 +2,7 @@ package memory import ( "context" + "sync" "github.com/reearth/reearth-backend/internal/usecase/repo" err1 "github.com/reearth/reearth-backend/pkg/error" @@ -10,6 +11,7 @@ import ( ) type User struct { + lock sync.Mutex data map[id.UserID]user.User } @@ -20,6 +22,9 @@ func NewUser() repo.User { } func (r *User) FindByIDs(ctx context.Context, ids []id.UserID) ([]*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + result := []*user.User{} for _, id := range ids { if d, ok := r.data[id]; ok { @@ -32,6 +37,9 @@ func (r *User) FindByIDs(ctx context.Context, ids []id.UserID) ([]*user.User, er } func (r *User) FindByID(ctx context.Context, id id.UserID) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + d, ok := r.data[id] if ok { return &d, nil @@ -40,11 +48,17 @@ func (r *User) FindByID(ctx context.Context, id id.UserID) (*user.User, error) { } func (r *User) Save(ctx context.Context, u *user.User) error { + r.lock.Lock() + defer r.lock.Unlock() + r.data[u.ID()] = *u return nil } func (r *User) FindByAuth0Sub(ctx context.Context, auth0sub string) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + if auth0sub == "" { return nil, err1.ErrInvalidParams } @@ -59,6 +73,9 @@ func (r *User) FindByAuth0Sub(ctx context.Context, auth0sub string) (*user.User, } func (r *User) FindByEmail(ctx context.Context, email string) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + if email == "" { return nil, err1.ErrInvalidParams } @@ -73,6 +90,9 @@ func (r *User) FindByEmail(ctx context.Context, email string) (*user.User, error } func (r *User) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + if nameOrEmail == "" { return nil, err1.ErrInvalidParams } @@ -87,6 +107,9 @@ func (r *User) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user } func (r *User) Remove(ctx context.Context, user id.UserID) error { + r.lock.Lock() + defer r.lock.Unlock() + delete(r.data, user) return nil } From c3758e7e1fceccb44289904d386f63f0e4b9af9c Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 18 May 2021 13:36:05 +0900 Subject: [PATCH 003/253] feat: support Auth0 audience (#3) --- internal/app/config.go | 1 + internal/app/jwt.go | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/internal/app/config.go b/internal/app/config.go index 35b26229a..6931376d9 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -29,6 +29,7 @@ type Config struct { type Auth0Config struct { Domain string + Audience string ClientID string ClientSecret string WebClientID string diff --git a/internal/app/jwt.go b/internal/app/jwt.go index 11f1d98b6..6f7a2ad79 100644 --- a/internal/app/jwt.go +++ b/internal/app/jwt.go @@ -116,7 +116,7 @@ func addPathSep(path string) string { func parseJwtMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { iss := addPathSep(cfg.Config.Auth0.Domain) - aud := iss + "api/v2/" + aud := cfg.Config.Auth0.Audience return func(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { @@ -134,8 +134,7 @@ func parseJwtMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { } // Verify 'aud' claim - checkAud := claims.VerifyAudience(aud, false) - if !checkAud { + if aud != "" && !claims.VerifyAudience(aud, true) { return errorResponse(c, "invalid audience") } From 16c0370c1a7e9e74697986bb836e0dd399049e49 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 24 May 2021 18:37:37 +0900 Subject: [PATCH 004/253] chore: enable nightly release workflow --- .github/workflows/main.yml | 24 ++++++++++---------- .github/workflows/nightly.yml | 41 +++++++++++++++++++++++++++++++++++ .github/workflows/pr.yml | 24 ++++++++++---------- 3 files changed, 65 insertions(+), 24 deletions(-) create mode 100644 .github/workflows/nightly.yml diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f03af0f8d..642e1a444 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -4,19 +4,19 @@ on: branches: - main jobs: - build: + main: name: main runs-on: ubuntu-latest steps: - name: set up - uses: actions/setup-go@v1 + uses: actions/setup-go@v2 with: go-version: 1.16 id: go - name: checkout uses: actions/checkout@v2 - name: cache - uses: actions/cache@v1 + uses: actions/cache@v2 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} @@ -33,12 +33,12 @@ jobs: with: token: ${{ secrets.CODECOV_TOKEN }} file: coverage.txt - - name: Slack Notification - uses: 8398a7/action-slack@v3 - if: always() - with: - status: ${{ job.status }} - fields: repo,commit,action,workflow - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + # - name: Slack Notification + # uses: 8398a7/action-slack@v3 + # if: always() + # with: + # status: ${{ job.status }} + # fields: repo,commit,action,workflow + # env: + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml new file mode 100644 index 000000000..a864e99d1 --- /dev/null +++ b/.github/workflows/nightly.yml @@ -0,0 +1,41 @@ +name: nightly +on: + workflow_run: + workflows: + - main + types: + - completed +jobs: + nightly: + name: nightly + runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.conclusion == 'success' }} + steps: + - name: set up + uses: actions/setup-go@v2 + with: + go-version: 1.16 + id: go + - name: checkout + uses: actions/checkout@v2 + - name: cache + uses: actions/cache@v2 + with: + path: ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + - name: Build + run: GOOS=linux GOARCH=amd64 go build -o reearth-backend -tags "release" "-ldflags=-X main.version=nightly -s -buildid=" -trimpath ./cmd/reearth + - name: Package + run: tar -zcvf reearth-backend_linux_amd64.tar.gz reearth-backend + - uses: ncipollo/release-action@v1 + with: + allowUpdates: true + artifacts: reearth-backend_*.tar.gz + artifactContentType: application/gzip + commit: ${{ env.GITHUB_SHA }} + name: Nightly release + prerelease: true + tag: nightly + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 1dc172971..cae4c437e 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -1,19 +1,19 @@ name: pr on: [pull_request] jobs: - build: + pr: name: pr runs-on: ubuntu-latest steps: - name: set up - uses: actions/setup-go@v1 + uses: actions/setup-go@v2 with: go-version: 1.16 id: go - name: checkout uses: actions/checkout@v2 - name: cache - uses: actions/cache@v1 + uses: actions/cache@v2 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} @@ -30,12 +30,12 @@ jobs: with: token: ${{ secrets.CODECOV_TOKEN }} file: coverage.txt - - name: Slack Notification - uses: 8398a7/action-slack@v3 - if: always() - with: - status: ${{ job.status }} - fields: repo,commit,action,workflow - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + # - name: Slack Notification + # uses: 8398a7/action-slack@v3 + # if: always() + # with: + # status: ${{ job.status }} + # fields: repo,commit,action,workflow + # env: + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} From 72e3edc85fcd17eea9a903319e2f55f0a6511f6d Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 26 May 2021 14:45:05 +0900 Subject: [PATCH 005/253] fix: auth0 audience in reearth_config.json --- internal/app/web.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/internal/app/web.go b/internal/app/web.go index 2e9ebdf1f..6bf1d6d50 100644 --- a/internal/app/web.go +++ b/internal/app/web.go @@ -24,6 +24,9 @@ func web(e *echo.Echo, wc WebConfig, ac Auth0Config) { if ac.WebClientID != "" { config["auth0ClientId"] = ac.WebClientID } + if ac.Audience != "" { + config["auth0Audience"] = ac.Audience + } for k, v := range wc { config[k] = v } From d6c70b11ce4f453f16b8d3af1956d99f7ef384e7 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 27 May 2021 14:27:24 +0900 Subject: [PATCH 006/253] fix load .env.local --- .env | 4 ++-- internal/app/app.go | 7 ++----- internal/app/config.go | 11 ++++------- internal/app/file.go | 9 +++------ 4 files changed, 11 insertions(+), 20 deletions(-) diff --git a/.env b/.env index 7e9817c1f..89a0c0ccc 100644 --- a/.env +++ b/.env @@ -1,6 +1,6 @@ +# PLEASE COPY THIS TO .env.local AND EDIT IT. DO NOT EDIT THIS FILE. REEARTH_AUTH0_DOMAIN= +REEARTH_AUTH0_AUDIENCE= REEARTH_AUTH0_CLIENTID= REEARTH_AUTH0_CLIENTSECRET= REEARTH_AUTH0_WEBCLIENTID= -REEARTH_ASSETBASEURL=http://localhost:8080/assets -REEARTH_SERVEFILES=1 diff --git a/internal/app/app.go b/internal/app/app.go index 58da88efa..f3cb0d272 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -79,11 +79,6 @@ func initAppEcho(cfg *ServerConfig) *echo.Echo { )) } - if cfg.Config.ServeFiles { - files := e.Group("") - serveFiles(e, files, cfg.Gateways.File, cfg.Debug) - } - e.GET("/api/published/:name", apiPublished(cfg)) e.GET("/api/published_data/:name", apiPublishedData(cfg)) api := e.Group("/api") @@ -95,6 +90,8 @@ func initAppEcho(cfg *ServerConfig) *echo.Echo { publicRoute(e, api, cfg.Config, cfg.Repos, cfg.Gateways) graphqlRoute(e, privateApi, cfg, controllers) userRoute(e, privateApi, cfg.Repos) + + serveFiles(e, cfg.Gateways.File) web(e, cfg.Config.Web, cfg.Config.Auth0) return e diff --git a/internal/app/config.go b/internal/app/config.go index 6931376d9..c2c8205bd 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -5,6 +5,7 @@ import ( "github.com/joho/godotenv" "github.com/kelseyhightower/envconfig" + "github.com/reearth/reearth-backend/pkg/log" ) const configPrefix = "reearth" @@ -20,7 +21,6 @@ type Config struct { Tracer string TracerSample float64 GCS GCSConfig - ServeFiles bool AssetBaseURL string Origins []string Web WebConfig @@ -45,15 +45,12 @@ type GCSConfig struct { } func ReadConfig(debug bool) (*Config, error) { - envs := []string{} if debug { - // .env file is only available in debug environment - envs = append(envs, ".env", ".env.local") - } - for _, e := range envs { - if err := godotenv.Load(e); err != nil && !os.IsNotExist(err) { + // .env.local file is only available in debug environment + if err := godotenv.Load(".env.local"); err != nil && !os.IsNotExist(err) { return nil, err } + log.Infof("config: .env.local loaded") } var c Config diff --git a/internal/app/file.go b/internal/app/file.go index 3f8a3821c..1d6ba1f03 100644 --- a/internal/app/file.go +++ b/internal/app/file.go @@ -14,9 +14,7 @@ import ( func serveFiles( ec *echo.Echo, - r *echo.Group, repo gateway.File, - debug bool, ) { if repo == nil { return @@ -39,7 +37,7 @@ func serveFiles( } } - r.GET( + ec.GET( "/assets/:filename", fileHandler(func(ctx echo.Context) (io.Reader, string, error) { filename := ctx.Param("filename") @@ -48,7 +46,7 @@ func serveFiles( }), ) - r.GET( + ec.GET( "/plugins/:name/:version/:filename", fileHandler(func(ctx echo.Context) (io.Reader, string, error) { pid, err := id.PluginIDFrom(ctx.Param("name") + "#" + ctx.Param("version")) @@ -61,7 +59,7 @@ func serveFiles( }), ) - r.GET( + ec.GET( "/published/:name", fileHandler(func(ctx echo.Context) (io.Reader, string, error) { name := ctx.Param("name") @@ -69,5 +67,4 @@ func serveFiles( return r, name + ".json", err }), ) - } From 835a0242c9f75e2156924ae30174cac1c31d1d04 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 27 May 2021 14:28:28 +0900 Subject: [PATCH 007/253] fix: auth0 domain and multiple auds --- internal/app/jwt.go | 66 +++++++++++++++++++++++++++++++++------------ 1 file changed, 49 insertions(+), 17 deletions(-) diff --git a/internal/app/jwt.go b/internal/app/jwt.go index 6f7a2ad79..44c2f018f 100644 --- a/internal/app/jwt.go +++ b/internal/app/jwt.go @@ -5,6 +5,7 @@ import ( "encoding/json" "errors" "net/http" + "strings" "sync" jwtmiddleware "github.com/auth0/go-jwt-middleware" @@ -25,7 +26,6 @@ const ( contextUser contextKey = "reearth_user" ) -// JSONWebKeys _ type JSONWebKeys struct { Kty string `json:"kty"` Kid string `json:"kid"` @@ -35,18 +35,15 @@ type JSONWebKeys struct { X5c []string `json:"x5c"` } -// Jwks _ type Jwks interface { GetJwks(string) ([]JSONWebKeys, error) } -// JwksSyncOnce _ type JwksSyncOnce struct { jwks []JSONWebKeys once sync.Once } -// GetJwks _ func (jso *JwksSyncOnce) GetJwks(publicKeyURL string) ([]JSONWebKeys, error) { var err error jso.once.Do(func() { @@ -104,18 +101,8 @@ func getPemCert(token *jwt.Token, publicKeyURL string, jwks Jwks) (string, error return cert, nil } -func addPathSep(path string) string { - if path == "" { - return path - } - if path[len(path)-1] != '/' { - path += "/" - } - return path -} - func parseJwtMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { - iss := addPathSep(cfg.Config.Auth0.Domain) + iss := urlFromDomain(cfg.Config.Auth0.Domain) aud := cfg.Config.Auth0.Audience return func(next echo.HandlerFunc) echo.HandlerFunc { @@ -134,7 +121,7 @@ func parseJwtMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { } // Verify 'aud' claim - if aud != "" && !claims.VerifyAudience(aud, true) { + if !verifyAudience(claims, aud) { return errorResponse(c, "invalid audience") } @@ -155,7 +142,7 @@ func parseJwtMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { } func jwtEchoMiddleware(jwks Jwks, cfg *ServerConfig) echo.MiddlewareFunc { - jwksURL := addPathSep(cfg.Config.Auth0.Domain) + ".well-known/jwks.json" + jwksURL := urlFromDomain(cfg.Config.Auth0.Domain) + ".well-known/jwks.json" jwtMiddleware := jwtmiddleware.New(jwtmiddleware.Options{ CredentialsOptional: cfg.Debug, @@ -185,6 +172,51 @@ func jwtEchoMiddleware(jwks Jwks, cfg *ServerConfig) echo.MiddlewareFunc { } } +func urlFromDomain(path string) string { + if path == "" { + return path + } + if !strings.HasPrefix(path, "http://") && !strings.HasPrefix(path, "https://") { + path = "https://" + path + } + if path[len(path)-1] != '/' { + path += "/" + } + return path +} + +// WORKAROUND: https://github.com/dgrijalva/jwt-go/pull/308 should be merged +func verifyAudience(claims jwt.MapClaims, aud string) bool { + if aud == "" { + return true + } + + auds, ok := claims["aud"].([]string) + if !ok { + auds2, ok := claims["aud"].([]interface{}) + if ok { + for _, a := range auds2 { + if aa, ok := a.(string); ok { + auds = append(auds, aa) + } + } + } else { + a, ok := claims["aud"].(string) + if !ok || a == "" { + return false + } + auds = append(auds, a) + } + } + + for _, a := range auds { + if jwt.MapClaims(map[string]interface{}{"aud": a}).VerifyAudience(aud, true) { + return true + } + } + return false +} + func errorResponse(c echo.Context, err string) error { res := map[string]string{"error": err} return c.JSON(http.StatusUnauthorized, res) From 8196397ba2d443fdeec5373ea49ec3abc4b1a29a Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 28 May 2021 18:53:51 +0900 Subject: [PATCH 008/253] chore: set up workflows --- .github/workflows/deploy_test.yml | 48 +++++++++++++++++++++++++++++++ .github/workflows/main.yml | 22 ++++++++------ .github/workflows/nightly.yml | 42 +++++++++++++++++++++++++++ .github/workflows/pr.yml | 22 ++++++++------ 4 files changed, 116 insertions(+), 18 deletions(-) create mode 100644 .github/workflows/deploy_test.yml diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml new file mode 100644 index 000000000..7b24428ba --- /dev/null +++ b/.github/workflows/deploy_test.yml @@ -0,0 +1,48 @@ +name: deploy_test +on: + workflow_run: + workflows: + - nightly + types: + - completed +env: + IMAGE: reearth/reearth-backend:nightly + IMAGE_GCP: us.gcr.io/reearth-oss/reearth-backend:nightly + GCP_REGION: us-central1 +jobs: + deploy_test: + name: deploy_test + runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.conclusion == 'success' }} + steps: + - uses: google-github-actions/setup-gcloud@master + with: + project_id: ${{ secrets.GCP_PROJECT }} + service_account_key: ${{ secrets.GCP_SA_KEY }} + export_default_credentials: true + - name: docker push + run: | + docker pull $IMAGE + docker tag $IMAGE $IMAGE_GCP + docker push $IMAGE_GCP + - name: Deploy to Cloud Run + run: | + gcloud run deploy reearth-backend \ + --image $IMAGE_GCP \ + --project ${{ secrets.GCP_PROJECT }} \ + --region $GCP_REGION \ + --platform managed \ + --quiet + slack-notification: + if: always() + name: Slack Notification + needs: + - deploy_test + runs-on: ubuntu-latest + steps: + - name: Slack Notification + uses: Gamesight/slack-workflow-status@master + if: always() + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 642e1a444..c38e87b46 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -33,12 +33,16 @@ jobs: with: token: ${{ secrets.CODECOV_TOKEN }} file: coverage.txt - # - name: Slack Notification - # uses: 8398a7/action-slack@v3 - # if: always() - # with: - # status: ${{ job.status }} - # fields: repo,commit,action,workflow - # env: - # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + slack-notification: + if: always() + name: Slack Notification + needs: + - main + runs-on: ubuntu-latest + steps: + - name: Slack Notification + uses: Gamesight/slack-workflow-status@master + if: always() + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index a864e99d1..55c9b5167 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -39,3 +39,45 @@ jobs: prerelease: true tag: nightly token: ${{ secrets.GITHUB_TOKEN }} + nightly_docker: + name: nightly_docker + runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.conclusion == 'success' }} + steps: + - name: checkout + uses: actions/checkout@v2 + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Login to DockerHub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + with: + context: . + push: true + build-args: VERSION=nightly + tags: reearth/reearth-backend:nightly + cache-from: type=registry,ref=reearth/reearth-backend:nightly + cache-to: type=inline + - name: Image digest + run: echo ${{ steps.docker_build.outputs.digest }} + slack-notification: + if: always() + name: Slack Notification + needs: + - nightly + - nightly_docker + runs-on: ubuntu-latest + steps: + - name: Slack Notification + uses: Gamesight/slack-workflow-status@master + if: always() + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index cae4c437e..1abbcd204 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -30,12 +30,16 @@ jobs: with: token: ${{ secrets.CODECOV_TOKEN }} file: coverage.txt - # - name: Slack Notification - # uses: 8398a7/action-slack@v3 - # if: always() - # with: - # status: ${{ job.status }} - # fields: repo,commit,action,workflow - # env: - # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + slack-notification: + if: always() + name: Slack Notification + needs: + - pr + runs-on: ubuntu-latest + steps: + - name: Slack Notification + uses: Gamesight/slack-workflow-status@master + if: always() + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} From c022a49a24c1986d906b3c1b862975f3055d03c7 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 28 May 2021 19:25:19 +0900 Subject: [PATCH 009/253] chore: fix workflows --- .github/workflows/deploy_test.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml index 7b24428ba..b44ca3c36 100644 --- a/.github/workflows/deploy_test.yml +++ b/.github/workflows/deploy_test.yml @@ -20,6 +20,8 @@ jobs: project_id: ${{ secrets.GCP_PROJECT }} service_account_key: ${{ secrets.GCP_SA_KEY }} export_default_credentials: true + - name: Configure docker + run: gcloud auth configure-docker --quiet - name: docker push run: | docker pull $IMAGE From 0125aad5def1caaf10d7ae2cd71d9b14e0778929 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 1 Jun 2021 17:57:39 +0900 Subject: [PATCH 010/253] chore: print config --- .env | 6 ------ .gitignore | 3 ++- internal/app/config.go | 13 +++++++++++++ internal/app/main.go | 1 + internal/usecase/interactor/user.go | 3 --- 5 files changed, 16 insertions(+), 10 deletions(-) delete mode 100644 .env diff --git a/.env b/.env deleted file mode 100644 index 89a0c0ccc..000000000 --- a/.env +++ /dev/null @@ -1,6 +0,0 @@ -# PLEASE COPY THIS TO .env.local AND EDIT IT. DO NOT EDIT THIS FILE. -REEARTH_AUTH0_DOMAIN= -REEARTH_AUTH0_AUDIENCE= -REEARTH_AUTH0_CLIENTID= -REEARTH_AUTH0_CLIENTSECRET= -REEARTH_AUTH0_WEBCLIENTID= diff --git a/.gitignore b/.gitignore index b2965e261..27eed16d7 100644 --- a/.gitignore +++ b/.gitignore @@ -23,6 +23,7 @@ __debug_bin /bin /debug /mongo -.env.local +/.env +/.env.local /coverage.txt /web diff --git a/internal/app/config.go b/internal/app/config.go index c2c8205bd..4fb0bf287 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -1,7 +1,9 @@ package app import ( + "fmt" "os" + "strings" "github.com/joho/godotenv" "github.com/kelseyhightower/envconfig" @@ -58,3 +60,14 @@ func ReadConfig(debug bool) (*Config, error) { return &c, err } + +func (c Config) Print() string { + s := fmt.Sprintf("%+v", c) + for _, secret := range []string{c.DB, c.Auth0.ClientSecret} { + if secret == "" { + continue + } + s = strings.ReplaceAll(s, secret, "***") + } + return s +} diff --git a/internal/app/main.go b/internal/app/main.go index 3e0915255..b76c94880 100644 --- a/internal/app/main.go +++ b/internal/app/main.go @@ -16,6 +16,7 @@ func Start(debug bool, version string) { if cerr != nil { log.Fatal(cerr) } + log.Infof("config: %s", conf.Print()) // Init profiler initProfiler(conf.Profiler, version) diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index 09be865f8..57af3774f 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -152,7 +152,6 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. } func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operator *usecase.Operator) (u *user.User, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -242,7 +241,6 @@ func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operato } func (i *User) RemoveMyAuth(ctx context.Context, authProvider string, operator *usecase.Operator) (u *user.User, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -280,7 +278,6 @@ func (i *User) SearchUser(ctx context.Context, nameOrEmail string, operator *use } func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase.Operator) (err error) { - tx, err := i.transaction.Begin() if err != nil { return From 37b2c29a9630943eb6edb795d7cb979f50059335 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 1 Jun 2021 18:29:33 +0900 Subject: [PATCH 011/253] refactor: remove unused code --- internal/app/repo.go | 4 +- internal/infrastructure/memory/container.go | 10 +-- internal/infrastructure/memory/dummy.go | 73 --------------------- internal/infrastructure/mongo/container.go | 6 +- internal/infrastructure/mongo/dummy.go | 46 ------------- internal/usecase/interactor/layer_test.go | 3 +- internal/usecase/interactor/team_test.go | 3 +- 7 files changed, 5 insertions(+), 140 deletions(-) delete mode 100644 internal/infrastructure/memory/dummy.go delete mode 100644 internal/infrastructure/mongo/dummy.go diff --git a/internal/app/repo.go b/internal/app/repo.go index dc795a8d4..1a2d2eeb8 100644 --- a/internal/app/repo.go +++ b/internal/app/repo.go @@ -22,8 +22,6 @@ func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo. repos := &repo.Container{} gateways := &gateway.Container{} - // memory.InitRepos(repos, true) // DEBUG - // Mongo client, err := mongo.Connect( ctx, @@ -35,7 +33,7 @@ func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo. if err != nil { log.Fatalln(fmt.Sprintf("repo initialization error: %+v", err)) } - if err := mongorepo.InitRepos(ctx, repos, client, "reearth", debug); err != nil { + if err := mongorepo.InitRepos(ctx, repos, client, "reearth"); err != nil { log.Fatalln(fmt.Sprintf("Failed to init mongo: %+v", err)) } diff --git a/internal/infrastructure/memory/container.go b/internal/infrastructure/memory/container.go index e5ad118ca..933116833 100644 --- a/internal/infrastructure/memory/container.go +++ b/internal/infrastructure/memory/container.go @@ -1,17 +1,14 @@ package memory import ( - "context" - "github.com/reearth/reearth-backend/internal/usecase/repo" ) // InitRepos _ -func InitRepos(c *repo.Container, dummy bool) *repo.Container { +func InitRepos(c *repo.Container) *repo.Container { if c == nil { c = &repo.Container{} } - // not supported: File, PluginRepository c.Asset = NewAsset() c.Config = NewConfig() c.DatasetSchema = NewDatasetSchema() @@ -26,10 +23,5 @@ func InitRepos(c *repo.Container, dummy bool) *repo.Container { c.User = NewUser() c.SceneLock = NewSceneLock() c.Transaction = NewTransaction() - - if dummy { - generateDummyData(context.Background(), c) - } - return c } diff --git a/internal/infrastructure/memory/dummy.go b/internal/infrastructure/memory/dummy.go deleted file mode 100644 index 2f05d885d..000000000 --- a/internal/infrastructure/memory/dummy.go +++ /dev/null @@ -1,73 +0,0 @@ -package memory - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase/repo" - "github.com/reearth/reearth-backend/pkg/builtin" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/layer" - "github.com/reearth/reearth-backend/pkg/project" - "github.com/reearth/reearth-backend/pkg/property" - "github.com/reearth/reearth-backend/pkg/scene" - "github.com/reearth/reearth-backend/pkg/user" - "github.com/reearth/reearth-backend/pkg/visualizer" -) - -var ( - DummySceneID = id.MustSceneID("01d7yt9zdyb74v2bvx7r5pkp32") - DummyUserID = id.MustUserID("01d7yt9zdyb74v2bvx76vw0jfj") -) - -func generateDummyData(ctx context.Context, c *repo.Container) { - // team - team, _ := user.NewTeam().NewID().Members(map[id.UserID]user.Role{ - DummyUserID: user.RoleOwner, - }).Build() - _ = c.Team.Save(ctx, team) - // user - user, _ := user.New(). - ID(DummyUserID). - Name("dummy"). - Email("dummy@dummy.com"). - Team(team.ID()). - Build() - _ = c.User.Save(ctx, user) - // project - projectID, _ := id.NewIDWith("01d7yt9zdyb74v2bvx7hwq41v1") - prj, _ := project.New(). - ID(id.ProjectID(projectID)). - Team(team.ID()). - Visualizer(visualizer.VisualizerCesium). - Build() - _ = c.Project.Save(ctx, prj) - // scene's property - sceneProperty, _ := property.New(). - NewID(). - Schema(builtin.PropertySchemaIDVisualizerCesium). - Scene(DummySceneID). - Build() - _ = c.Property.Save(ctx, sceneProperty) - - // root layer - rootLayerID, _ := id.NewIDWith("01d7yt9zdyb74v2bvx7ngfy1hc") - rootLayer, _ := layer.NewGroup().ID(id.LayerID(rootLayerID)).Scene(DummySceneID).Build() - _ = c.Layer.Save(ctx, rootLayer) - - widgets := scene.NewWidgetSystem([]*scene.Widget{}) - plugins := scene.NewPluginSystem([]*scene.Plugin{ - scene.NewPlugin(id.OfficialPluginID, nil), - }) - - // scene - scene, _ := scene.New(). - ID(DummySceneID). - Project(prj.ID()). - Team(team.ID()). - Property(sceneProperty.ID()). - RootLayer(rootLayer.ID()). - WidgetSystem(widgets). - PluginSystem(plugins). - Build() - _ = c.Scene.Save(ctx, scene) -} diff --git a/internal/infrastructure/mongo/container.go b/internal/infrastructure/mongo/container.go index 2bbc9dd33..a112483d3 100644 --- a/internal/infrastructure/mongo/container.go +++ b/internal/infrastructure/mongo/container.go @@ -9,7 +9,7 @@ import ( "go.mongodb.org/mongo-driver/mongo" ) -func InitRepos(ctx context.Context, c *repo.Container, mc *mongo.Client, databaseName string, dummy bool) error { +func InitRepos(ctx context.Context, c *repo.Container, mc *mongo.Client, databaseName string) error { if databaseName == "" { databaseName = "reearth" } @@ -34,9 +34,5 @@ func InitRepos(ctx context.Context, c *repo.Container, mc *mongo.Client, databas return err } - if dummy { - generateDummyData(ctx, c) - } - return nil } diff --git a/internal/infrastructure/mongo/dummy.go b/internal/infrastructure/mongo/dummy.go deleted file mode 100644 index 8b9452a60..000000000 --- a/internal/infrastructure/mongo/dummy.go +++ /dev/null @@ -1,46 +0,0 @@ -package mongo - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" - "github.com/reearth/reearth-backend/pkg/id" - user1 "github.com/reearth/reearth-backend/pkg/user" -) - -func generateDummyData(ctx context.Context, c *repo.Container) { - // check if duumy data are already created - userID, _ := id.UserIDFrom("01d7yt9zdyb74v2bvx76vw0jfj") - if user, err2 := c.User.FindByID(ctx, userID); err2 != nil { - if err2 != err1.ErrNotFound { - panic(err2) - } - } else if user != nil { - return - } - - // team - team, _ := user1.NewTeam().NewID().Personal(true).Members(map[id.UserID]user1.Role{ - userID: user1.RoleOwner, - }).Build() - err := c.Team.Save(ctx, team) - if err != nil { - panic(err) - } - - // user - user, _ := user1.New(). - ID(userID). - Name("dummy"). - Email("dummy@dummy.com"). - Team(team.ID()). - Build() - err = c.User.Save(ctx, user) - if err != nil { - panic(err) - } - - println("dummy user: ", userID.String()) - println("dummy team: ", team.ID().String()) -} diff --git a/internal/usecase/interactor/layer_test.go b/internal/usecase/interactor/layer_test.go index 76d36c147..7c9b084f6 100644 --- a/internal/usecase/interactor/layer_test.go +++ b/internal/usecase/interactor/layer_test.go @@ -6,7 +6,6 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/memory" "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/scene" @@ -16,7 +15,7 @@ import ( func TestCreateInfobox(t *testing.T) { ctx := context.Background() - db := memory.InitRepos(&repo.Container{}, false) + db := memory.InitRepos(nil) scene, _ := scene.New().NewID().Team(id.NewTeamID()).Project(id.NewProjectID()).RootLayer(id.NewLayerID()).Build() _ = db.Scene.Save(ctx, scene) il := NewLayer(db) diff --git a/internal/usecase/interactor/team_test.go b/internal/usecase/interactor/team_test.go index 41496ebeb..d160009cd 100644 --- a/internal/usecase/interactor/team_test.go +++ b/internal/usecase/interactor/team_test.go @@ -6,7 +6,6 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/memory" "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/user" "github.com/stretchr/testify/assert" @@ -15,7 +14,7 @@ import ( func TestCreateTeam(t *testing.T) { ctx := context.Background() - db := memory.InitRepos(&repo.Container{}, false) + db := memory.InitRepos(nil) user := user.New().NewID().Team(id.NewTeamID()).MustBuild() From f17b9d01bd31bbf2770c384eea44cd773b3eebe4 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 1 Jun 2021 18:30:13 +0900 Subject: [PATCH 012/253] fix: signing up and deleting user --- internal/adapter/graphql/controller_user.go | 7 ++++++- internal/usecase/interactor/user.go | 5 +---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/internal/adapter/graphql/controller_user.go b/internal/adapter/graphql/controller_user.go index bf8db6dbc..b5a94fa80 100644 --- a/internal/adapter/graphql/controller_user.go +++ b/internal/adapter/graphql/controller_user.go @@ -42,10 +42,15 @@ func (c *UserController) Fetch(ctx context.Context, ids []id.UserID, operator *u } func (c *UserController) Signup(ctx context.Context, ginput *SignupInput, sub string) (*SignupPayload, error) { + secret := "" + if ginput.Secret != nil { + secret = *ginput.Secret + } u, team, err := c.usecase().Signup(ctx, interfaces.SignupParam{ Sub: sub, UserID: id.UserIDFromRefID(ginput.UserID), TeamID: id.TeamIDFromRefID(ginput.TeamID), + Secret: secret, }) if err != nil { return nil, err @@ -92,5 +97,5 @@ func (c *UserController) DeleteMe(ctx context.Context, user id.ID, operator *use return nil, err } - return &DeleteMePayload{UserID: operator.User.ID()}, nil + return &DeleteMePayload{UserID: user}, nil } diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index 57af3774f..98dd6df61 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -291,12 +291,9 @@ func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase } u, err := i.userRepo.FindByID(ctx, userID) - if err != nil && !errors.Is(err, err1.ErrNotFound) { + if err != nil || u == nil { return err } - if u == nil { - return nil - } teams, err := i.teamRepo.FindByUser(ctx, u.ID()) if err != nil { From e9b8c9ca8383ceffa5d5c4c9eb85d0e898f7aab0 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 1 Jun 2021 18:43:59 +0900 Subject: [PATCH 013/253] fix: deleting user --- internal/usecase/interactor/user.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index 98dd6df61..57af3774f 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -291,9 +291,12 @@ func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase } u, err := i.userRepo.FindByID(ctx, userID) - if err != nil || u == nil { + if err != nil && !errors.Is(err, err1.ErrNotFound) { return err } + if u == nil { + return nil + } teams, err := i.teamRepo.FindByUser(ctx, u.ID()) if err != nil { From e5ab87eb712dcd5ff019924fe2b250644b3e55a3 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 1 Jun 2021 20:13:11 +0900 Subject: [PATCH 014/253] fix: sign up and update user --- gqlgen.yml | 2 + internal/adapter/graphql/controller_user.go | 4 +- internal/adapter/graphql/convert_user.go | 2 +- internal/adapter/graphql/models_gen.go | 39 ++++++----- internal/adapter/graphql/scalar.go | 35 ++++++++-- internal/adapter/http/user_controller.go | 4 -- internal/graphql/generated.go | 60 +++++++++++++++-- .../infrastructure/auth0/authenticator.go | 33 ++++++---- .../auth0/authenticator_test.go | 7 +- internal/usecase/interactor/user.go | 66 ++++++++++--------- internal/usecase/interfaces/user.go | 7 +- pkg/user/initializer.go | 58 ++++++++++++++++ pkg/user/initializer/initializer.go | 39 ----------- .../{initializer => }/initializer_test.go | 44 +++++++------ schema.graphql | 7 +- 15 files changed, 260 insertions(+), 147 deletions(-) create mode 100644 pkg/user/initializer.go delete mode 100644 pkg/user/initializer/initializer.go rename pkg/user/{initializer => }/initializer_test.go (68%) diff --git a/gqlgen.yml b/gqlgen.yml index b3eca9d23..30993f753 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -33,3 +33,5 @@ models: model: github.com/reearth/reearth-backend/internal/adapter/graphql.PropertySchemaFieldID TranslatedString: model: github.com/reearth/reearth-backend/internal/adapter/graphql.Map + Lang: + model: github.com/reearth/reearth-backend/internal/adapter/graphql.Lang diff --git a/internal/adapter/graphql/controller_user.go b/internal/adapter/graphql/controller_user.go index b5a94fa80..3522bbba6 100644 --- a/internal/adapter/graphql/controller_user.go +++ b/internal/adapter/graphql/controller_user.go @@ -48,6 +48,8 @@ func (c *UserController) Signup(ctx context.Context, ginput *SignupInput, sub st } u, team, err := c.usecase().Signup(ctx, interfaces.SignupParam{ Sub: sub, + Lang: ginput.Lang, + Theme: toTheme(ginput.Theme), UserID: id.UserIDFromRefID(ginput.UserID), TeamID: id.TeamIDFromRefID(ginput.TeamID), Secret: secret, @@ -63,9 +65,9 @@ func (c *UserController) UpdateMe(ctx context.Context, ginput *UpdateMeInput, op Name: ginput.Name, Email: ginput.Email, Lang: ginput.Lang, + Theme: toTheme(ginput.Theme), Password: ginput.Password, PasswordConfirmation: ginput.PasswordConfirmation, - Theme: toTheme(ginput.Theme), }, operator) if err != nil { return nil, err diff --git a/internal/adapter/graphql/convert_user.go b/internal/adapter/graphql/convert_user.go index c021cca01..687635002 100644 --- a/internal/adapter/graphql/convert_user.go +++ b/internal/adapter/graphql/convert_user.go @@ -22,7 +22,7 @@ func toUser(user *user.User) *User { ID: user.ID().ID(), Name: user.Name(), Email: user.Email(), - Lang: user.Lang().String(), + Lang: user.Lang(), MyTeamID: user.Team().ID(), Auths: authsgql, } diff --git a/internal/adapter/graphql/models_gen.go b/internal/adapter/graphql/models_gen.go index 111bd2ada..24ecaa99d 100644 --- a/internal/adapter/graphql/models_gen.go +++ b/internal/adapter/graphql/models_gen.go @@ -12,6 +12,7 @@ import ( "github.com/99designs/gqlgen/graphql" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/id" + "golang.org/x/text/language" ) type Layer interface { @@ -893,9 +894,11 @@ type SearchedUser struct { } type SignupInput struct { - UserID *id.ID `json:"userId"` - TeamID *id.ID `json:"teamId"` - Secret *string `json:"secret"` + Lang *language.Tag `json:"lang"` + Theme *Theme `json:"theme"` + UserID *id.ID `json:"userId"` + TeamID *id.ID `json:"teamId"` + Secret *string `json:"secret"` } type SignupPayload struct { @@ -980,12 +983,12 @@ type UpdateLayerPayload struct { } type UpdateMeInput struct { - Name *string `json:"name"` - Email *string `json:"email"` - Lang *string `json:"lang"` - Theme *Theme `json:"theme"` - Password *string `json:"password"` - PasswordConfirmation *string `json:"passwordConfirmation"` + Name *string `json:"name"` + Email *string `json:"email"` + Lang *language.Tag `json:"lang"` + Theme *Theme `json:"theme"` + Password *string `json:"password"` + PasswordConfirmation *string `json:"passwordConfirmation"` } type UpdateMePayload struct { @@ -1137,15 +1140,15 @@ type UploadPluginPayload struct { } type User struct { - ID id.ID `json:"id"` - Name string `json:"name"` - Email string `json:"email"` - Lang string `json:"lang"` - Theme Theme `json:"theme"` - MyTeamID id.ID `json:"myTeamId"` - Auths []string `json:"auths"` - Teams []*Team `json:"teams"` - MyTeam *Team `json:"myTeam"` + ID id.ID `json:"id"` + Name string `json:"name"` + Email string `json:"email"` + Lang language.Tag `json:"lang"` + Theme Theme `json:"theme"` + MyTeamID id.ID `json:"myTeamId"` + Auths []string `json:"auths"` + Teams []*Team `json:"teams"` + MyTeam *Team `json:"myTeam"` } func (User) IsNode() {} diff --git a/internal/adapter/graphql/scalar.go b/internal/adapter/graphql/scalar.go index 4eaaf9304..928b25304 100644 --- a/internal/adapter/graphql/scalar.go +++ b/internal/adapter/graphql/scalar.go @@ -11,6 +11,7 @@ import ( graphql1 "github.com/99designs/gqlgen/graphql" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/id" + "golang.org/x/text/language" ) func MarshalURL(t url.URL) graphql1.Marshaler { @@ -27,7 +28,27 @@ func UnmarshalURL(v interface{}) (url.URL, error) { } return url.URL{}, err } - return url.URL{}, errors.New("Invalid URL") + return url.URL{}, errors.New("invalid URL") +} + +func MarshalLang(t language.Tag) graphql1.Marshaler { + return graphql1.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalLang(v interface{}) (language.Tag, error) { + if tmpStr, ok := v.(string); ok { + if tmpStr == "" { + return language.Tag{}, nil + } + l, err := language.Parse(tmpStr) + if err != nil { + return language.Tag{}, err + } + return l, nil + } + return language.Tag{}, errors.New("invalid lang") } func MarshalID(t id.ID) graphql1.Marshaler { @@ -40,7 +61,7 @@ func UnmarshalID(v interface{}) (id.ID, error) { if tmpStr, ok := v.(string); ok { return id.NewIDWith(tmpStr) } - return id.ID{}, errors.New("Invalid ID") + return id.ID{}, errors.New("invalid ID") } func MarshalCursor(t usecase.Cursor) graphql1.Marshaler { @@ -53,7 +74,7 @@ func UnmarshalCursor(v interface{}) (usecase.Cursor, error) { if tmpStr, ok := v.(string); ok { return usecase.Cursor(tmpStr), nil } - return usecase.Cursor(""), errors.New("Invalid cursor") + return usecase.Cursor(""), errors.New("invalid cursor") } func MarshalPluginID(t id.PluginID) graphql1.Marshaler { @@ -66,7 +87,7 @@ func UnmarshalPluginID(v interface{}) (id.PluginID, error) { if tmpStr, ok := v.(string); ok { return id.PluginIDFrom(tmpStr) } - return id.PluginID{}, errors.New("Invalid ID") + return id.PluginID{}, errors.New("invalid ID") } func MarshalPluginExtensionID(t id.PluginExtensionID) graphql1.Marshaler { @@ -79,7 +100,7 @@ func UnmarshalPluginExtensionID(v interface{}) (id.PluginExtensionID, error) { if tmpStr, ok := v.(string); ok { return id.PluginExtensionID(tmpStr), nil } - return id.PluginExtensionID(""), errors.New("Invalid ID") + return id.PluginExtensionID(""), errors.New("invalid ID") } func MarshalPropertySchemaID(t id.PropertySchemaID) graphql1.Marshaler { @@ -92,7 +113,7 @@ func UnmarshalPropertySchemaID(v interface{}) (id.PropertySchemaID, error) { if tmpStr, ok := v.(string); ok { return id.PropertySchemaIDFrom(tmpStr) } - return id.PropertySchemaID{}, errors.New("Invalid ID") + return id.PropertySchemaID{}, errors.New("invalid ID") } func MarshalPropertySchemaFieldID(t id.PropertySchemaFieldID) graphql1.Marshaler { @@ -105,7 +126,7 @@ func UnmarshalPropertySchemaFieldID(v interface{}) (id.PropertySchemaFieldID, er if tmpStr, ok := v.(string); ok { return id.PropertySchemaFieldID(tmpStr), nil } - return id.PropertySchemaFieldID(""), errors.New("Invalid ID") + return id.PropertySchemaFieldID(""), errors.New("invalid ID") } func MarshalMap(val map[string]string) graphql1.Marshaler { diff --git a/internal/adapter/http/user_controller.go b/internal/adapter/http/user_controller.go index 4a2205c3c..414ca04eb 100644 --- a/internal/adapter/http/user_controller.go +++ b/internal/adapter/http/user_controller.go @@ -20,8 +20,6 @@ func NewUserController(usecase interfaces.User) *UserController { type CreateUserInput struct { Sub string `json:"sub"` Secret string `json:"secret"` - Name string `json:"name"` - Email string `json:"email"` UserID *id.UserID `json:"userId"` TeamID *id.TeamID `json:"teamId"` } @@ -35,8 +33,6 @@ type CreateUserOutput struct { func (c *UserController) CreateUser(ctx context.Context, input CreateUserInput) (interface{}, error) { u, _, err := c.usecase.Signup(ctx, interfaces.SignupParam{ Sub: input.Sub, - Name: input.Name, - Email: input.Email, Secret: input.Secret, UserID: input.UserID, TeamID: input.TeamID, diff --git a/internal/graphql/generated.go b/internal/graphql/generated.go index ab8cda7ca..1df008d83 100644 --- a/internal/graphql/generated.go +++ b/internal/graphql/generated.go @@ -20,6 +20,7 @@ import ( "github.com/reearth/reearth-backend/pkg/id" gqlparser "github.com/vektah/gqlparser/v2" "github.com/vektah/gqlparser/v2/ast" + "golang.org/x/text/language" ) // region ************************** generated!.gotpl ************************** @@ -5275,6 +5276,7 @@ directive @goField(forceResolver: Boolean, name: String) on INPUT_FIELD_DEFINITI scalar Cursor scalar DateTime scalar URL +scalar Lang scalar FileSize scalar PluginID scalar PluginExtensionID @@ -5388,7 +5390,7 @@ type User implements Node { id: ID! name: String! email: String! - lang: String! + lang: Lang! theme: Theme! myTeamId: ID! auths: [String!]! @@ -5919,6 +5921,8 @@ input RemoveAssetInput { } input SignupInput { + lang: Lang + theme: Theme userId: ID teamId: ID secret: String @@ -5927,7 +5931,7 @@ input SignupInput { input UpdateMeInput { name: String email: String - lang: String + lang: Lang theme: Theme password: String passwordConfirmation: String @@ -26343,9 +26347,9 @@ func (ec *executionContext) _User_lang(ctx context.Context, field graphql.Collec } return graphql.Null } - res := resTmp.(string) + res := resTmp.(language.Tag) fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) + return ec.marshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, field.Selections, res) } func (ec *executionContext) _User_theme(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { @@ -28844,6 +28848,22 @@ func (ec *executionContext) unmarshalInputSignupInput(ctx context.Context, obj i for k, v := range asMap { switch k { + case "lang": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) + it.Lang, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, v) + if err != nil { + return it, err + } + case "theme": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("theme")) + it.Theme, err = ec.unmarshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx, v) + if err != nil { + return it, err + } case "userId": var err error @@ -29064,7 +29084,7 @@ func (ec *executionContext) unmarshalInputUpdateMeInput(ctx context.Context, obj var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) - it.Lang, err = ec.unmarshalOString2แš–string(ctx, v) + it.Lang, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, v) if err != nil { return it, err } @@ -36556,6 +36576,21 @@ func (ec *executionContext) marshalNInt2int(ctx context.Context, sel ast.Selecti return res } +func (ec *executionContext) unmarshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx context.Context, v interface{}) (language.Tag, error) { + res, err := graphql1.UnmarshalLang(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx context.Context, sel ast.SelectionSet, v language.Tag) graphql.Marshaler { + res := graphql1.MarshalLang(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + func (ec *executionContext) marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx context.Context, sel ast.SelectionSet, v graphql1.Layer) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { @@ -38850,6 +38885,21 @@ func (ec *executionContext) marshalOInt2แš–int(ctx context.Context, sel ast.Sele return graphql.MarshalInt(*v) } +func (ec *executionContext) unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx context.Context, v interface{}) (*language.Tag, error) { + if v == nil { + return nil, nil + } + res, err := graphql1.UnmarshalLang(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx context.Context, sel ast.SelectionSet, v *language.Tag) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql1.MarshalLang(*v) +} + func (ec *executionContext) marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx context.Context, sel ast.SelectionSet, v graphql1.Layer) graphql.Marshaler { if v == nil { return graphql.Null diff --git a/internal/infrastructure/auth0/authenticator.go b/internal/infrastructure/auth0/authenticator.go index 541ab8d6d..ac5603bed 100644 --- a/internal/infrastructure/auth0/authenticator.go +++ b/internal/infrastructure/auth0/authenticator.go @@ -7,6 +7,7 @@ import ( "fmt" "io" "net/http" + "strings" "sync" "time" @@ -15,14 +16,15 @@ import ( ) type Auth0 struct { - domain string - client *http.Client - clientID string - clientSecret string - token string - expireAt time.Time - lock sync.Mutex - current func() time.Time + domain string + client *http.Client + clientID string + clientSecret string + token string + expireAt time.Time + lock sync.Mutex + current func() time.Time + disableLogging bool } type response struct { @@ -56,7 +58,7 @@ func (u response) Into() gateway.AuthenticatorUser { func New(domain, clientID, clientSecret string) *Auth0 { return &Auth0{ - domain: addPathSep(domain), + domain: urlFromDomain(domain), clientID: clientID, clientSecret: clientSecret, } @@ -71,7 +73,9 @@ func (a *Auth0) FetchUser(id string) (data gateway.AuthenticatorUser, err error) var r response r, err = a.exec(http.MethodGet, "api/v2/users/"+id, a.token, nil) if err != nil { - log.Errorf("auth0: fetch user: %s", err) + if !a.disableLogging { + log.Errorf("auth0: fetch user: %s", err) + } err = fmt.Errorf("failed to auth") return } @@ -103,7 +107,9 @@ func (a *Auth0) UpdateUser(p gateway.AuthenticatorUpdateUserParam) (data gateway var r response r, err = a.exec(http.MethodPatch, "api/v2/users/"+p.ID, a.token, payload) if err != nil { - log.Errorf("auth0: update user: %s", err) + if !a.disableLogging { + log.Errorf("auth0: update user: %s", err) + } err = fmt.Errorf("failed to update user") return } @@ -212,10 +218,13 @@ func (a *Auth0) exec(method, path, token string, b interface{}) (r response, err return } -func addPathSep(path string) string { +func urlFromDomain(path string) string { if path == "" { return path } + if !strings.HasPrefix(path, "http://") && !strings.HasPrefix(path, "https://") { + path = "https://" + path + } if path[len(path)-1] != '/' { path += "/" } diff --git a/internal/infrastructure/auth0/authenticator_test.go b/internal/infrastructure/auth0/authenticator_test.go index fc73ac568..0e7157e64 100644 --- a/internal/infrastructure/auth0/authenticator_test.go +++ b/internal/infrastructure/auth0/authenticator_test.go @@ -29,15 +29,16 @@ var ( current2 = time.Date(2020, time.April, 1, 23, 0, 0, 0, time.UTC) ) -func TestAddPathSep(t *testing.T) { - assert.Equal(t, "a/", addPathSep("a")) - assert.Equal(t, "a/", addPathSep("a/")) +func TestURLFromDomain(t *testing.T) { + assert.Equal(t, "https://a/", urlFromDomain("a")) + assert.Equal(t, "https://a/", urlFromDomain("a/")) } func TestAuth0(t *testing.T) { a := New(domain, clientID, clientSecret) a.client = client(t) // inject mock a.current = func() time.Time { return current } + a.disableLogging = true assert.True(t, a.needsFetchToken()) assert.NoError(t, a.updateToken()) diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index 57af3774f..78e1b43f9 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -4,8 +4,6 @@ import ( "context" "errors" - "golang.org/x/text/language" - "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interfaces" @@ -14,7 +12,6 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/project" "github.com/reearth/reearth-backend/pkg/user" - "github.com/reearth/reearth-backend/pkg/user/initializer" ) type User struct { @@ -125,9 +122,15 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. } } + // Fetch user info + ui, err := i.authenticator.FetchUser(inp.Sub) + if err != nil { + return nil, nil, err + } + // Check if user and team already exists var team *user.Team - existed, err = i.userRepo.FindByEmail(ctx, inp.Email) + existed, err = i.userRepo.FindByEmail(ctx, ui.Email) if err != nil && !errors.Is(err, err1.ErrNotFound) { return nil, nil, err } @@ -136,7 +139,15 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. } // Initialize user and team - u, team, err = initializer.InitUser(inp.Email, inp.Name, inp.Sub, inp.UserID, inp.TeamID) + u, team, err = user.Init(user.InitParams{ + Email: ui.Email, + Name: ui.Name, + Auth0Sub: inp.Sub, + Lang: inp.Lang, + Theme: inp.Theme, + UserID: inp.UserID, + TeamID: inp.TeamID, + }) if err != nil { return nil, nil, err } @@ -152,6 +163,16 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. } func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operator *usecase.Operator) (u *user.User, err error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + + if p.Password != nil { + if p.PasswordConfirmation == nil || *p.Password != *p.PasswordConfirmation { + return nil, interfaces.ErrUserInvalidPasswordConfirmation + } + } + tx, err := i.transaction.Begin() if err != nil { return @@ -162,10 +183,6 @@ func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operato var team *user.Team - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - u, err = i.userRepo.FindByID(ctx, operator.User) if err != nil { return nil, err @@ -191,20 +208,7 @@ func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operato u.UpdateEmail(*p.Email) } if p.Lang != nil { - if *p.Lang == "" { - u.UpdateLang(language.Tag{}) - } else { - l, err := language.Parse(*p.Lang) - if err != nil { - return nil, interfaces.ErrUserInvalidLang - } - u.UpdateLang(l) - } - } - if p.Password != nil { - if p.PasswordConfirmation == nil || *p.Password != *p.PasswordConfirmation { - return nil, interfaces.ErrUserInvalidPasswordConfirmation - } + u.UpdateLang(*p.Lang) } if p.Theme != nil { u.UpdateTheme(*p.Theme) @@ -241,6 +245,10 @@ func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operato } func (i *User) RemoveMyAuth(ctx context.Context, authProvider string, operator *usecase.Operator) (u *user.User, err error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + tx, err := i.transaction.Begin() if err != nil { return @@ -249,10 +257,6 @@ func (i *User) RemoveMyAuth(ctx context.Context, authProvider string, operator * err = tx.End(ctx) }() - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - u, err = i.userRepo.FindByID(ctx, operator.User) if err != nil { return nil, err @@ -278,6 +282,10 @@ func (i *User) SearchUser(ctx context.Context, nameOrEmail string, operator *use } func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase.Operator) (err error) { + if operator == nil || operator.User.IsNil() || userID.IsNil() || userID != operator.User { + return errors.New("invalid user id") + } + tx, err := i.transaction.Begin() if err != nil { return @@ -286,10 +294,6 @@ func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase err = tx.End(ctx) }() - if operator == nil || operator.User.IsNil() || userID.IsNil() || userID != operator.User { - return errors.New("invalid user id") - } - u, err := i.userRepo.FindByID(ctx, userID) if err != nil && !errors.Is(err, err1.ErrNotFound) { return err diff --git a/internal/usecase/interfaces/user.go b/internal/usecase/interfaces/user.go index c7400ff34..c933d8961 100644 --- a/internal/usecase/interfaces/user.go +++ b/internal/usecase/interfaces/user.go @@ -5,6 +5,7 @@ import ( "errors" "github.com/reearth/reearth-backend/pkg/user" + "golang.org/x/text/language" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/id" @@ -18,8 +19,8 @@ var ( type SignupParam struct { Sub string - Name string - Email string + Lang *language.Tag + Theme *user.Theme UserID *id.UserID TeamID *id.TeamID Secret string @@ -28,7 +29,7 @@ type SignupParam struct { type UpdateMeParam struct { Name *string Email *string - Lang *string + Lang *language.Tag Theme *user.Theme Password *string PasswordConfirmation *string diff --git a/pkg/user/initializer.go b/pkg/user/initializer.go new file mode 100644 index 000000000..f28c19a10 --- /dev/null +++ b/pkg/user/initializer.go @@ -0,0 +1,58 @@ +package user + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "golang.org/x/text/language" +) + +type InitParams struct { + Email string + Name string + Auth0Sub string + Lang *language.Tag + Theme *Theme + UserID *id.UserID + TeamID *id.TeamID +} + +func Init(p InitParams) (*User, *Team, error) { + if p.UserID == nil { + p.UserID = id.NewUserID().Ref() + } + if p.TeamID == nil { + p.TeamID = id.NewTeamID().Ref() + } + if p.Lang == nil { + p.Lang = &language.Tag{} + } + if p.Theme == nil { + t := ThemeDefault + p.Theme = &t + } + + u, err := New(). + ID(*p.UserID). + Name(p.Name). + Email(p.Email). + Auths([]Auth{AuthFromAuth0Sub(p.Auth0Sub)}). + Lang(*p.Lang). + Theme(*p.Theme). + Build() + if err != nil { + return nil, nil, err + } + + // create a user's own team + t, err := NewTeam(). + ID(*p.TeamID). + Name(p.Name). + Members(map[id.UserID]Role{u.ID(): RoleOwner}). + Personal(true). + Build() + if err != nil { + return nil, nil, err + } + u.UpdateTeam(t.ID()) + + return u, t, err +} diff --git a/pkg/user/initializer/initializer.go b/pkg/user/initializer/initializer.go deleted file mode 100644 index 197df1ed4..000000000 --- a/pkg/user/initializer/initializer.go +++ /dev/null @@ -1,39 +0,0 @@ -package initializer - -import ( - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/user" -) - -func InitUser(email, username, auth0sub string, userID *id.UserID, teamID *id.TeamID) (*user.User, *user.Team, error) { - if userID == nil { - userID = id.NewUserID().Ref() - } - if teamID == nil { - teamID = id.NewTeamID().Ref() - } - - u, err := user.New(). - ID(*userID). - Name(username). - Email(email). - Auths([]user.Auth{user.AuthFromAuth0Sub(auth0sub)}). - Build() - if err != nil { - return nil, nil, err - } - - // create a user's own team - t, err := user.NewTeam(). - ID(*teamID). - Name(username). - Members(map[id.UserID]user.Role{u.ID(): user.RoleOwner}). - Personal(true). - Build() - if err != nil { - return nil, nil, err - } - u.UpdateTeam(t.ID()) - - return u, t, err -} diff --git a/pkg/user/initializer/initializer_test.go b/pkg/user/initializer_test.go similarity index 68% rename from pkg/user/initializer/initializer_test.go rename to pkg/user/initializer_test.go index 321c8449d..769b87aba 100644 --- a/pkg/user/initializer/initializer_test.go +++ b/pkg/user/initializer_test.go @@ -1,23 +1,22 @@ -package initializer +package user import ( "errors" "testing" "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/user" "github.com/stretchr/testify/assert" ) -func TestInitUser(t *testing.T) { +func TestInit(t *testing.T) { uid := id.NewUserID() tid := id.NewTeamID() testCases := []struct { Name, Email, Username, Sub string UID *id.UserID TID *id.TeamID - ExpectedUser *user.User - ExpectedTeam *user.Team + ExpectedUser *User + ExpectedTeam *Team Err error }{ { @@ -27,18 +26,17 @@ func TestInitUser(t *testing.T) { Sub: "###", UID: &uid, TID: &tid, - ExpectedUser: user.New(). + ExpectedUser: New(). ID(uid). Email("xx@yy.zz"). Name("nnn"). Team(tid). - Auths([]user.Auth{user.AuthFromAuth0Sub("###")}). + Auths([]Auth{AuthFromAuth0Sub("###")}). MustBuild(), - ExpectedTeam: user.NewTeam(). + ExpectedTeam: NewTeam(). ID(tid). Name("nnn"). - Members(map[id.UserID]user. - Role{uid: user.RoleOwner}). + Members(map[id.UserID]Role{uid: RoleOwner}). Personal(true). MustBuild(), Err: nil, @@ -50,18 +48,17 @@ func TestInitUser(t *testing.T) { Sub: "###", UID: &uid, TID: nil, - ExpectedUser: user.New(). + ExpectedUser: New(). ID(uid). Email("xx@yy.zz"). Name("nnn"). Team(tid). - Auths([]user.Auth{user.AuthFromAuth0Sub("###")}). + Auths([]Auth{AuthFromAuth0Sub("###")}). MustBuild(), - ExpectedTeam: user.NewTeam(). + ExpectedTeam: NewTeam(). NewID(). Name("nnn"). - Members(map[id.UserID]user. - Role{uid: user.RoleOwner}). + Members(map[id.UserID]Role{uid: RoleOwner}). Personal(true). MustBuild(), Err: nil, @@ -73,18 +70,17 @@ func TestInitUser(t *testing.T) { Sub: "###", UID: nil, TID: &tid, - ExpectedUser: user.New(). + ExpectedUser: New(). NewID(). Email("xx@yy.zz"). Name("nnn"). Team(tid). - Auths([]user.Auth{user.AuthFromAuth0Sub("###")}). + Auths([]Auth{AuthFromAuth0Sub("###")}). MustBuild(), - ExpectedTeam: user.NewTeam(). + ExpectedTeam: NewTeam(). ID(tid). Name("nnn"). - Members(map[id.UserID]user. - Role{uid: user.RoleOwner}). + Members(map[id.UserID]Role{uid: RoleOwner}). Personal(true). MustBuild(), Err: nil, @@ -94,7 +90,13 @@ func TestInitUser(t *testing.T) { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - u, t, err := InitUser(tc.Email, tc.Username, tc.Sub, tc.UID, tc.TID) + u, t, err := Init(InitParams{ + Email: tc.Email, + Name: tc.Username, + Auth0Sub: tc.Sub, + UserID: tc.UID, + TeamID: tc.TID, + }) if err == nil { assert.Equal(tt, tc.ExpectedUser.Email(), u.Email()) assert.Equal(tt, tc.ExpectedUser.Name(), u.Name()) diff --git a/schema.graphql b/schema.graphql index 98d6ac6be..b0e7addd2 100644 --- a/schema.graphql +++ b/schema.graphql @@ -18,6 +18,7 @@ directive @goField(forceResolver: Boolean, name: String) on INPUT_FIELD_DEFINITI scalar Cursor scalar DateTime scalar URL +scalar Lang scalar FileSize scalar PluginID scalar PluginExtensionID @@ -131,7 +132,7 @@ type User implements Node { id: ID! name: String! email: String! - lang: String! + lang: Lang! theme: Theme! myTeamId: ID! auths: [String!]! @@ -662,6 +663,8 @@ input RemoveAssetInput { } input SignupInput { + lang: Lang + theme: Theme userId: ID teamId: ID secret: String @@ -670,7 +673,7 @@ input SignupInput { input UpdateMeInput { name: String email: String - lang: String + lang: Lang theme: Theme password: String passwordConfirmation: String From 9b1c4a3aee384fc392129bd2ecd66895af5d2e3f Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 1 Jun 2021 21:13:47 +0900 Subject: [PATCH 015/253] fix: make gql mutation payloads optional --- internal/graphql/generated.go | 1419 ++++++++++----------------------- schema.graphql | 110 +-- 2 files changed, 456 insertions(+), 1073 deletions(-) diff --git a/internal/graphql/generated.go b/internal/graphql/generated.go index 1df008d83..6749845fa 100644 --- a/internal/graphql/generated.go +++ b/internal/graphql/generated.go @@ -6567,77 +6567,77 @@ type Query { type Mutation { # Asset - createAsset(input: CreateAssetInput!): CreateAssetPayload! - removeAsset(input: RemoveAssetInput!): RemoveAssetPayload! + createAsset(input: CreateAssetInput!): CreateAssetPayload + removeAsset(input: RemoveAssetInput!): RemoveAssetPayload # User - signup(input: SignupInput!): SignupPayload! - updateMe(input: UpdateMeInput!): UpdateMePayload! - removeMyAuth(input: RemoveMyAuthInput!): UpdateMePayload! - deleteMe(input: DeleteMeInput!): DeleteMePayload! + signup(input: SignupInput!): SignupPayload + updateMe(input: UpdateMeInput!): UpdateMePayload + removeMyAuth(input: RemoveMyAuthInput!): UpdateMePayload + deleteMe(input: DeleteMeInput!): DeleteMePayload # Team - createTeam(input: CreateTeamInput!): CreateTeamPayload! - deleteTeam(input: DeleteTeamInput!): DeleteTeamPayload! - updateTeam(input: UpdateTeamInput!): UpdateTeamPayload! - addMemberToTeam(input: AddMemberToTeamInput!): AddMemberToTeamPayload! - removeMemberFromTeam(input: RemoveMemberFromTeamInput!): RemoveMemberFromTeamPayload! - updateMemberOfTeam(input: UpdateMemberOfTeamInput!): UpdateMemberOfTeamPayload! + createTeam(input: CreateTeamInput!): CreateTeamPayload + deleteTeam(input: DeleteTeamInput!): DeleteTeamPayload + updateTeam(input: UpdateTeamInput!): UpdateTeamPayload + addMemberToTeam(input: AddMemberToTeamInput!): AddMemberToTeamPayload + removeMemberFromTeam(input: RemoveMemberFromTeamInput!): RemoveMemberFromTeamPayload + updateMemberOfTeam(input: UpdateMemberOfTeamInput!): UpdateMemberOfTeamPayload # Project - createProject(input: CreateProjectInput!): ProjectPayload! - updateProject(input: UpdateProjectInput!): ProjectPayload! - publishProject(input: PublishProjectInput!): ProjectPayload! - deleteProject(input: DeleteProjectInput!): DeleteProjectPayload! + createProject(input: CreateProjectInput!): ProjectPayload + updateProject(input: UpdateProjectInput!): ProjectPayload + publishProject(input: PublishProjectInput!): ProjectPayload + deleteProject(input: DeleteProjectInput!): DeleteProjectPayload # Plugin - uploadPlugin(input: UploadPluginInput!): UploadPluginPayload! + uploadPlugin(input: UploadPluginInput!): UploadPluginPayload # Scene - createScene(input: CreateSceneInput!): CreateScenePayload! - addWidget(input: AddWidgetInput!): AddWidgetPayload! - updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload! - removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload! - installPlugin(input: InstallPluginInput!): InstallPluginPayload! - uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload! - upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload! + createScene(input: CreateSceneInput!): CreateScenePayload + addWidget(input: AddWidgetInput!): AddWidgetPayload + updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload + removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload + installPlugin(input: InstallPluginInput!): InstallPluginPayload + uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload + upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload # Dataset - updateDatasetSchema(input:UpdateDatasetSchemaInput!): UpdateDatasetSchemaPayload! - syncDataset(input: SyncDatasetInput!): SyncDatasetPayload! - addDynamicDatasetSchema(input:AddDynamicDatasetSchemaInput!): AddDynamicDatasetSchemaPayload! - addDynamicDataset(input:AddDynamicDatasetInput!): AddDynamicDatasetPayload! - removeDatasetSchema(input: RemoveDatasetSchemaInput!): RemoveDatasetSchemaPayload! - importDataset(input: ImportDatasetInput!): ImportDatasetPayload! - addDatasetSchema(input:AddDatasetSchemaInput!): AddDatasetSchemaPayload! + updateDatasetSchema(input:UpdateDatasetSchemaInput!): UpdateDatasetSchemaPayload + syncDataset(input: SyncDatasetInput!): SyncDatasetPayload + addDynamicDatasetSchema(input:AddDynamicDatasetSchemaInput!): AddDynamicDatasetSchemaPayload + addDynamicDataset(input:AddDynamicDatasetInput!): AddDynamicDatasetPayload + removeDatasetSchema(input: RemoveDatasetSchemaInput!): RemoveDatasetSchemaPayload + importDataset(input: ImportDatasetInput!): ImportDatasetPayload + addDatasetSchema(input:AddDatasetSchemaInput!): AddDatasetSchemaPayload # Property - updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload! - updatePropertyValueLatLng(input: UpdatePropertyValueLatLngInput!): PropertyFieldPayload! - updatePropertyValueLatLngHeight(input: UpdatePropertyValueLatLngHeightInput!): PropertyFieldPayload! - updatePropertyValueCamera(input: UpdatePropertyValueCameraInput!): PropertyFieldPayload! - updatePropertyValueTypography(input: UpdatePropertyValueTypographyInput!): PropertyFieldPayload! - removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload! - uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload! - linkDatasetToPropertyValue(input: LinkDatasetToPropertyValueInput!): PropertyFieldPayload! - unlinkPropertyValue(input: UnlinkPropertyValueInput!): PropertyFieldPayload! - addPropertyItem(input: AddPropertyItemInput!): PropertyItemPayload! - movePropertyItem(input: MovePropertyItemInput!): PropertyItemPayload! - removePropertyItem(input: RemovePropertyItemInput!): PropertyItemPayload! - updatePropertyItems(input: UpdatePropertyItemInput!): PropertyItemPayload! + updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload + updatePropertyValueLatLng(input: UpdatePropertyValueLatLngInput!): PropertyFieldPayload + updatePropertyValueLatLngHeight(input: UpdatePropertyValueLatLngHeightInput!): PropertyFieldPayload + updatePropertyValueCamera(input: UpdatePropertyValueCameraInput!): PropertyFieldPayload + updatePropertyValueTypography(input: UpdatePropertyValueTypographyInput!): PropertyFieldPayload + removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload + uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload + linkDatasetToPropertyValue(input: LinkDatasetToPropertyValueInput!): PropertyFieldPayload + unlinkPropertyValue(input: UnlinkPropertyValueInput!): PropertyFieldPayload + addPropertyItem(input: AddPropertyItemInput!): PropertyItemPayload + movePropertyItem(input: MovePropertyItemInput!): PropertyItemPayload + removePropertyItem(input: RemovePropertyItemInput!): PropertyItemPayload + updatePropertyItems(input: UpdatePropertyItemInput!): PropertyItemPayload # Layer - addLayerItem(input: AddLayerItemInput!): AddLayerItemPayload! - addLayerGroup(input: AddLayerGroupInput!): AddLayerGroupPayload! - removeLayer(input: RemoveLayerInput!): RemoveLayerPayload! - updateLayer(input: UpdateLayerInput!): UpdateLayerPayload! - moveLayer(input: MoveLayerInput!): MoveLayerPayload! - createInfobox(input: CreateInfoboxInput!): CreateInfoboxPayload! - removeInfobox(input: RemoveInfoboxInput!): RemoveInfoboxPayload! - addInfoboxField(input: AddInfoboxFieldInput!): AddInfoboxFieldPayload! - moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload! - removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload! - importLayer(input:ImportLayerInput!): ImportLayerPayload! + addLayerItem(input: AddLayerItemInput!): AddLayerItemPayload + addLayerGroup(input: AddLayerGroupInput!): AddLayerGroupPayload + removeLayer(input: RemoveLayerInput!): RemoveLayerPayload + updateLayer(input: UpdateLayerInput!): UpdateLayerPayload + moveLayer(input: MoveLayerInput!): MoveLayerPayload + createInfobox(input: CreateInfoboxInput!): CreateInfoboxPayload + removeInfobox(input: RemoveInfoboxInput!): RemoveInfoboxPayload + addInfoboxField(input: AddInfoboxFieldInput!): AddInfoboxFieldPayload + moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload + removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload + importLayer(input:ImportLayerInput!): ImportLayerPayload } schema { @@ -15277,14 +15277,11 @@ func (ec *executionContext) _Mutation_createAsset(ctx context.Context, field gra return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.CreateAssetPayload) fc.Result = res - return ec.marshalNCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetPayload(ctx, field.Selections, res) + return ec.marshalOCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeAsset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15319,14 +15316,11 @@ func (ec *executionContext) _Mutation_removeAsset(ctx context.Context, field gra return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.RemoveAssetPayload) fc.Result = res - return ec.marshalNRemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetPayload(ctx, field.Selections, res) + return ec.marshalORemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_signup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15361,14 +15355,11 @@ func (ec *executionContext) _Mutation_signup(ctx context.Context, field graphql. return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.SignupPayload) fc.Result = res - return ec.marshalNSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupPayload(ctx, field.Selections, res) + return ec.marshalOSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateMe(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15403,14 +15394,11 @@ func (ec *executionContext) _Mutation_updateMe(ctx context.Context, field graphq return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.UpdateMePayload) fc.Result = res - return ec.marshalNUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx, field.Selections, res) + return ec.marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeMyAuth(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15445,14 +15433,11 @@ func (ec *executionContext) _Mutation_removeMyAuth(ctx context.Context, field gr return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.UpdateMePayload) fc.Result = res - return ec.marshalNUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx, field.Selections, res) + return ec.marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_deleteMe(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15487,14 +15472,11 @@ func (ec *executionContext) _Mutation_deleteMe(ctx context.Context, field graphq return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.DeleteMePayload) fc.Result = res - return ec.marshalNDeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMePayload(ctx, field.Selections, res) + return ec.marshalODeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMePayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_createTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15529,14 +15511,11 @@ func (ec *executionContext) _Mutation_createTeam(ctx context.Context, field grap return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.CreateTeamPayload) fc.Result = res - return ec.marshalNCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamPayload(ctx, field.Selections, res) + return ec.marshalOCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_deleteTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15571,14 +15550,11 @@ func (ec *executionContext) _Mutation_deleteTeam(ctx context.Context, field grap return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.DeleteTeamPayload) fc.Result = res - return ec.marshalNDeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamPayload(ctx, field.Selections, res) + return ec.marshalODeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15613,14 +15589,11 @@ func (ec *executionContext) _Mutation_updateTeam(ctx context.Context, field grap return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.UpdateTeamPayload) fc.Result = res - return ec.marshalNUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamPayload(ctx, field.Selections, res) + return ec.marshalOUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addMemberToTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15655,14 +15628,11 @@ func (ec *executionContext) _Mutation_addMemberToTeam(ctx context.Context, field return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.AddMemberToTeamPayload) fc.Result = res - return ec.marshalNAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamPayload(ctx, field.Selections, res) + return ec.marshalOAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeMemberFromTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15697,14 +15667,11 @@ func (ec *executionContext) _Mutation_removeMemberFromTeam(ctx context.Context, return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.RemoveMemberFromTeamPayload) fc.Result = res - return ec.marshalNRemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamPayload(ctx, field.Selections, res) + return ec.marshalORemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateMemberOfTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15739,14 +15706,11 @@ func (ec *executionContext) _Mutation_updateMemberOfTeam(ctx context.Context, fi return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.UpdateMemberOfTeamPayload) fc.Result = res - return ec.marshalNUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamPayload(ctx, field.Selections, res) + return ec.marshalOUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_createProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15781,14 +15745,11 @@ func (ec *executionContext) _Mutation_createProject(ctx context.Context, field g return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.ProjectPayload) fc.Result = res - return ec.marshalNProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx, field.Selections, res) + return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15823,14 +15784,11 @@ func (ec *executionContext) _Mutation_updateProject(ctx context.Context, field g return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.ProjectPayload) fc.Result = res - return ec.marshalNProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx, field.Selections, res) + return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_publishProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15865,14 +15823,11 @@ func (ec *executionContext) _Mutation_publishProject(ctx context.Context, field return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.ProjectPayload) fc.Result = res - return ec.marshalNProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx, field.Selections, res) + return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_deleteProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15907,14 +15862,11 @@ func (ec *executionContext) _Mutation_deleteProject(ctx context.Context, field g return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.DeleteProjectPayload) fc.Result = res - return ec.marshalNDeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectPayload(ctx, field.Selections, res) + return ec.marshalODeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_uploadPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15949,14 +15901,11 @@ func (ec *executionContext) _Mutation_uploadPlugin(ctx context.Context, field gr return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.UploadPluginPayload) fc.Result = res - return ec.marshalNUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginPayload(ctx, field.Selections, res) + return ec.marshalOUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_createScene(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -15991,14 +15940,11 @@ func (ec *executionContext) _Mutation_createScene(ctx context.Context, field gra return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.CreateScenePayload) fc.Result = res - return ec.marshalNCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateScenePayload(ctx, field.Selections, res) + return ec.marshalOCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateScenePayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16033,14 +15979,11 @@ func (ec *executionContext) _Mutation_addWidget(ctx context.Context, field graph return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.AddWidgetPayload) fc.Result = res - return ec.marshalNAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetPayload(ctx, field.Selections, res) + return ec.marshalOAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16075,14 +16018,11 @@ func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field gr return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.UpdateWidgetPayload) fc.Result = res - return ec.marshalNUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetPayload(ctx, field.Selections, res) + return ec.marshalOUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16117,14 +16057,11 @@ func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field gr return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.RemoveWidgetPayload) fc.Result = res - return ec.marshalNRemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetPayload(ctx, field.Selections, res) + return ec.marshalORemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_installPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16159,14 +16096,11 @@ func (ec *executionContext) _Mutation_installPlugin(ctx context.Context, field g return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.InstallPluginPayload) fc.Result = res - return ec.marshalNInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginPayload(ctx, field.Selections, res) + return ec.marshalOInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_uninstallPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16201,14 +16135,11 @@ func (ec *executionContext) _Mutation_uninstallPlugin(ctx context.Context, field return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.UninstallPluginPayload) fc.Result = res - return ec.marshalNUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginPayload(ctx, field.Selections, res) + return ec.marshalOUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_upgradePlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16243,14 +16174,11 @@ func (ec *executionContext) _Mutation_upgradePlugin(ctx context.Context, field g return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.UpgradePluginPayload) fc.Result = res - return ec.marshalNUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginPayload(ctx, field.Selections, res) + return ec.marshalOUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16285,14 +16213,11 @@ func (ec *executionContext) _Mutation_updateDatasetSchema(ctx context.Context, f return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.UpdateDatasetSchemaPayload) fc.Result = res - return ec.marshalNUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaPayload(ctx, field.Selections, res) + return ec.marshalOUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_syncDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16327,14 +16252,11 @@ func (ec *executionContext) _Mutation_syncDataset(ctx context.Context, field gra return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.SyncDatasetPayload) fc.Result = res - return ec.marshalNSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetPayload(ctx, field.Selections, res) + return ec.marshalOSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addDynamicDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16369,14 +16291,11 @@ func (ec *executionContext) _Mutation_addDynamicDatasetSchema(ctx context.Contex return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.AddDynamicDatasetSchemaPayload) fc.Result = res - return ec.marshalNAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaPayload(ctx, field.Selections, res) + return ec.marshalOAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addDynamicDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16411,14 +16330,11 @@ func (ec *executionContext) _Mutation_addDynamicDataset(ctx context.Context, fie return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.AddDynamicDatasetPayload) fc.Result = res - return ec.marshalNAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetPayload(ctx, field.Selections, res) + return ec.marshalOAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16453,14 +16369,11 @@ func (ec *executionContext) _Mutation_removeDatasetSchema(ctx context.Context, f return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.RemoveDatasetSchemaPayload) fc.Result = res - return ec.marshalNRemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaPayload(ctx, field.Selections, res) + return ec.marshalORemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_importDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16495,14 +16408,11 @@ func (ec *executionContext) _Mutation_importDataset(ctx context.Context, field g return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.ImportDatasetPayload) fc.Result = res - return ec.marshalNImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx, field.Selections, res) + return ec.marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16537,14 +16447,11 @@ func (ec *executionContext) _Mutation_addDatasetSchema(ctx context.Context, fiel return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.AddDatasetSchemaPayload) fc.Result = res - return ec.marshalNAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaPayload(ctx, field.Selections, res) + return ec.marshalOAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updatePropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16579,14 +16486,11 @@ func (ec *executionContext) _Mutation_updatePropertyValue(ctx context.Context, f return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyFieldPayload) fc.Result = res - return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updatePropertyValueLatLng(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16621,14 +16525,11 @@ func (ec *executionContext) _Mutation_updatePropertyValueLatLng(ctx context.Cont return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyFieldPayload) fc.Result = res - return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updatePropertyValueLatLngHeight(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16663,14 +16564,11 @@ func (ec *executionContext) _Mutation_updatePropertyValueLatLngHeight(ctx contex return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyFieldPayload) fc.Result = res - return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updatePropertyValueCamera(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16705,14 +16603,11 @@ func (ec *executionContext) _Mutation_updatePropertyValueCamera(ctx context.Cont return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyFieldPayload) fc.Result = res - return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updatePropertyValueTypography(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16747,14 +16642,11 @@ func (ec *executionContext) _Mutation_updatePropertyValueTypography(ctx context. return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyFieldPayload) fc.Result = res - return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removePropertyField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16789,14 +16681,11 @@ func (ec *executionContext) _Mutation_removePropertyField(ctx context.Context, f return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyFieldPayload) fc.Result = res - return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_uploadFileToProperty(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16831,14 +16720,11 @@ func (ec *executionContext) _Mutation_uploadFileToProperty(ctx context.Context, return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyFieldPayload) fc.Result = res - return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_linkDatasetToPropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16873,14 +16759,11 @@ func (ec *executionContext) _Mutation_linkDatasetToPropertyValue(ctx context.Con return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyFieldPayload) fc.Result = res - return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_unlinkPropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16915,14 +16798,11 @@ func (ec *executionContext) _Mutation_unlinkPropertyValue(ctx context.Context, f return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyFieldPayload) fc.Result = res - return ec.marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addPropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16957,14 +16837,11 @@ func (ec *executionContext) _Mutation_addPropertyItem(ctx context.Context, field return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyItemPayload) fc.Result = res - return ec.marshalNPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) + return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_movePropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16999,14 +16876,11 @@ func (ec *executionContext) _Mutation_movePropertyItem(ctx context.Context, fiel return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyItemPayload) fc.Result = res - return ec.marshalNPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) + return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removePropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17041,14 +16915,11 @@ func (ec *executionContext) _Mutation_removePropertyItem(ctx context.Context, fi return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyItemPayload) fc.Result = res - return ec.marshalNPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) + return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updatePropertyItems(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17083,14 +16954,11 @@ func (ec *executionContext) _Mutation_updatePropertyItems(ctx context.Context, f return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.PropertyItemPayload) fc.Result = res - return ec.marshalNPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) + return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addLayerItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17125,14 +16993,11 @@ func (ec *executionContext) _Mutation_addLayerItem(ctx context.Context, field gr return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.AddLayerItemPayload) fc.Result = res - return ec.marshalNAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemPayload(ctx, field.Selections, res) + return ec.marshalOAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addLayerGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17167,14 +17032,11 @@ func (ec *executionContext) _Mutation_addLayerGroup(ctx context.Context, field g return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.AddLayerGroupPayload) fc.Result = res - return ec.marshalNAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupPayload(ctx, field.Selections, res) + return ec.marshalOAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17209,14 +17071,11 @@ func (ec *executionContext) _Mutation_removeLayer(ctx context.Context, field gra return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.RemoveLayerPayload) fc.Result = res - return ec.marshalNRemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerPayload(ctx, field.Selections, res) + return ec.marshalORemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17251,14 +17110,11 @@ func (ec *executionContext) _Mutation_updateLayer(ctx context.Context, field gra return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.UpdateLayerPayload) fc.Result = res - return ec.marshalNUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerPayload(ctx, field.Selections, res) + return ec.marshalOUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_moveLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17293,14 +17149,11 @@ func (ec *executionContext) _Mutation_moveLayer(ctx context.Context, field graph return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.MoveLayerPayload) fc.Result = res - return ec.marshalNMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerPayload(ctx, field.Selections, res) + return ec.marshalOMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_createInfobox(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17335,14 +17188,11 @@ func (ec *executionContext) _Mutation_createInfobox(ctx context.Context, field g return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.CreateInfoboxPayload) fc.Result = res - return ec.marshalNCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxPayload(ctx, field.Selections, res) + return ec.marshalOCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeInfobox(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17377,14 +17227,11 @@ func (ec *executionContext) _Mutation_removeInfobox(ctx context.Context, field g return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.RemoveInfoboxPayload) fc.Result = res - return ec.marshalNRemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxPayload(ctx, field.Selections, res) + return ec.marshalORemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17419,14 +17266,11 @@ func (ec *executionContext) _Mutation_addInfoboxField(ctx context.Context, field return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.AddInfoboxFieldPayload) fc.Result = res - return ec.marshalNAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldPayload(ctx, field.Selections, res) + return ec.marshalOAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_moveInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17461,14 +17305,11 @@ func (ec *executionContext) _Mutation_moveInfoboxField(ctx context.Context, fiel return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.MoveInfoboxFieldPayload) fc.Result = res - return ec.marshalNMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldPayload(ctx, field.Selections, res) + return ec.marshalOMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17503,14 +17344,11 @@ func (ec *executionContext) _Mutation_removeInfoboxField(ctx context.Context, fi return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.RemoveInfoboxFieldPayload) fc.Result = res - return ec.marshalNRemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldPayload(ctx, field.Selections, res) + return ec.marshalORemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_importLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17545,14 +17383,11 @@ func (ec *executionContext) _Mutation_importLayer(ctx context.Context, field gra return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } res := resTmp.(*graphql1.ImportLayerPayload) fc.Result = res - return ec.marshalNImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerPayload(ctx, field.Selections, res) + return ec.marshalOImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerPayload(ctx, field.Selections, res) } func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.PageInfo) (ret graphql.Marshaler) { @@ -32310,279 +32145,114 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) out.Values[i] = graphql.MarshalString("Mutation") case "createAsset": out.Values[i] = ec._Mutation_createAsset(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "removeAsset": out.Values[i] = ec._Mutation_removeAsset(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "signup": out.Values[i] = ec._Mutation_signup(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updateMe": out.Values[i] = ec._Mutation_updateMe(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "removeMyAuth": out.Values[i] = ec._Mutation_removeMyAuth(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "deleteMe": out.Values[i] = ec._Mutation_deleteMe(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "createTeam": out.Values[i] = ec._Mutation_createTeam(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "deleteTeam": out.Values[i] = ec._Mutation_deleteTeam(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updateTeam": out.Values[i] = ec._Mutation_updateTeam(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "addMemberToTeam": out.Values[i] = ec._Mutation_addMemberToTeam(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "removeMemberFromTeam": out.Values[i] = ec._Mutation_removeMemberFromTeam(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updateMemberOfTeam": out.Values[i] = ec._Mutation_updateMemberOfTeam(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "createProject": out.Values[i] = ec._Mutation_createProject(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updateProject": out.Values[i] = ec._Mutation_updateProject(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "publishProject": out.Values[i] = ec._Mutation_publishProject(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "deleteProject": out.Values[i] = ec._Mutation_deleteProject(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "uploadPlugin": out.Values[i] = ec._Mutation_uploadPlugin(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "createScene": out.Values[i] = ec._Mutation_createScene(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "addWidget": out.Values[i] = ec._Mutation_addWidget(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updateWidget": out.Values[i] = ec._Mutation_updateWidget(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "removeWidget": out.Values[i] = ec._Mutation_removeWidget(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "installPlugin": out.Values[i] = ec._Mutation_installPlugin(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "uninstallPlugin": out.Values[i] = ec._Mutation_uninstallPlugin(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "upgradePlugin": out.Values[i] = ec._Mutation_upgradePlugin(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updateDatasetSchema": out.Values[i] = ec._Mutation_updateDatasetSchema(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "syncDataset": out.Values[i] = ec._Mutation_syncDataset(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "addDynamicDatasetSchema": out.Values[i] = ec._Mutation_addDynamicDatasetSchema(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "addDynamicDataset": out.Values[i] = ec._Mutation_addDynamicDataset(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "removeDatasetSchema": out.Values[i] = ec._Mutation_removeDatasetSchema(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "importDataset": out.Values[i] = ec._Mutation_importDataset(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "addDatasetSchema": out.Values[i] = ec._Mutation_addDatasetSchema(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updatePropertyValue": out.Values[i] = ec._Mutation_updatePropertyValue(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updatePropertyValueLatLng": out.Values[i] = ec._Mutation_updatePropertyValueLatLng(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updatePropertyValueLatLngHeight": out.Values[i] = ec._Mutation_updatePropertyValueLatLngHeight(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updatePropertyValueCamera": out.Values[i] = ec._Mutation_updatePropertyValueCamera(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updatePropertyValueTypography": out.Values[i] = ec._Mutation_updatePropertyValueTypography(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "removePropertyField": out.Values[i] = ec._Mutation_removePropertyField(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "uploadFileToProperty": out.Values[i] = ec._Mutation_uploadFileToProperty(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "linkDatasetToPropertyValue": out.Values[i] = ec._Mutation_linkDatasetToPropertyValue(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "unlinkPropertyValue": out.Values[i] = ec._Mutation_unlinkPropertyValue(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "addPropertyItem": out.Values[i] = ec._Mutation_addPropertyItem(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "movePropertyItem": out.Values[i] = ec._Mutation_movePropertyItem(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "removePropertyItem": out.Values[i] = ec._Mutation_removePropertyItem(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updatePropertyItems": out.Values[i] = ec._Mutation_updatePropertyItems(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "addLayerItem": out.Values[i] = ec._Mutation_addLayerItem(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "addLayerGroup": out.Values[i] = ec._Mutation_addLayerGroup(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "removeLayer": out.Values[i] = ec._Mutation_removeLayer(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "updateLayer": out.Values[i] = ec._Mutation_updateLayer(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "moveLayer": out.Values[i] = ec._Mutation_moveLayer(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "createInfobox": out.Values[i] = ec._Mutation_createInfobox(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "removeInfobox": out.Values[i] = ec._Mutation_removeInfobox(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "addInfoboxField": out.Values[i] = ec._Mutation_addInfoboxField(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "moveInfoboxField": out.Values[i] = ec._Mutation_moveInfoboxField(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "removeInfoboxField": out.Values[i] = ec._Mutation_removeInfoboxField(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } case "importLayer": out.Values[i] = ec._Mutation_importLayer(ctx, field) - if out.Values[i] == graphql.Null { - invalids++ - } default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -35506,134 +35176,36 @@ func (ec *executionContext) unmarshalNAddDatasetSchemaInput2githubแš—comแš‹reear return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNAddDatasetSchemaPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddDatasetSchemaPayload) graphql.Marshaler { - return ec._AddDatasetSchemaPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddDatasetSchemaPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._AddDatasetSchemaPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNAddDynamicDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetInput(ctx context.Context, v interface{}) (graphql1.AddDynamicDatasetInput, error) { res, err := ec.unmarshalInputAddDynamicDatasetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNAddDynamicDatasetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddDynamicDatasetPayload) graphql.Marshaler { - return ec._AddDynamicDatasetPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddDynamicDatasetPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._AddDynamicDatasetPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNAddDynamicDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaInput(ctx context.Context, v interface{}) (graphql1.AddDynamicDatasetSchemaInput, error) { res, err := ec.unmarshalInputAddDynamicDatasetSchemaInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNAddDynamicDatasetSchemaPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddDynamicDatasetSchemaPayload) graphql.Marshaler { - return ec._AddDynamicDatasetSchemaPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddDynamicDatasetSchemaPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._AddDynamicDatasetSchemaPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNAddInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldInput(ctx context.Context, v interface{}) (graphql1.AddInfoboxFieldInput, error) { res, err := ec.unmarshalInputAddInfoboxFieldInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNAddInfoboxFieldPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddInfoboxFieldPayload) graphql.Marshaler { - return ec._AddInfoboxFieldPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddInfoboxFieldPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._AddInfoboxFieldPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNAddLayerGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupInput(ctx context.Context, v interface{}) (graphql1.AddLayerGroupInput, error) { res, err := ec.unmarshalInputAddLayerGroupInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNAddLayerGroupPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddLayerGroupPayload) graphql.Marshaler { - return ec._AddLayerGroupPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddLayerGroupPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._AddLayerGroupPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNAddLayerItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemInput(ctx context.Context, v interface{}) (graphql1.AddLayerItemInput, error) { res, err := ec.unmarshalInputAddLayerItemInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNAddLayerItemPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddLayerItemPayload) graphql.Marshaler { - return ec._AddLayerItemPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddLayerItemPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._AddLayerItemPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNAddMemberToTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamInput(ctx context.Context, v interface{}) (graphql1.AddMemberToTeamInput, error) { res, err := ec.unmarshalInputAddMemberToTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNAddMemberToTeamPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddMemberToTeamPayload) graphql.Marshaler { - return ec._AddMemberToTeamPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddMemberToTeamPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._AddMemberToTeamPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNAddPropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddPropertyItemInput(ctx context.Context, v interface{}) (graphql1.AddPropertyItemInput, error) { res, err := ec.unmarshalInputAddPropertyItemInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -35644,20 +35216,6 @@ func (ec *executionContext) unmarshalNAddWidgetInput2githubแš—comแš‹reearthแš‹re return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNAddWidgetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.AddWidgetPayload) graphql.Marshaler { - return ec._AddWidgetPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddWidgetPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._AddWidgetPayload(ctx, sel, v) -} - func (ec *executionContext) marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Asset) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup @@ -35800,39 +35358,11 @@ func (ec *executionContext) unmarshalNCreateAssetInput2githubแš—comแš‹reearthแš‹ return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNCreateAssetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.CreateAssetPayload) graphql.Marshaler { - return ec._CreateAssetPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateAssetPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._CreateAssetPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNCreateInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxInput(ctx context.Context, v interface{}) (graphql1.CreateInfoboxInput, error) { res, err := ec.unmarshalInputCreateInfoboxInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNCreateInfoboxPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.CreateInfoboxPayload) graphql.Marshaler { - return ec._CreateInfoboxPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateInfoboxPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._CreateInfoboxPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNCreateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateProjectInput(ctx context.Context, v interface{}) (graphql1.CreateProjectInput, error) { res, err := ec.unmarshalInputCreateProjectInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -35843,39 +35373,11 @@ func (ec *executionContext) unmarshalNCreateSceneInput2githubแš—comแš‹reearthแš‹ return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNCreateScenePayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateScenePayload(ctx context.Context, sel ast.SelectionSet, v graphql1.CreateScenePayload) graphql.Marshaler { - return ec._CreateScenePayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateScenePayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateScenePayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._CreateScenePayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNCreateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamInput(ctx context.Context, v interface{}) (graphql1.CreateTeamInput, error) { res, err := ec.unmarshalInputCreateTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNCreateTeamPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.CreateTeamPayload) graphql.Marshaler { - return ec._CreateTeamPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateTeamPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._CreateTeamPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx context.Context, v interface{}) (usecase.Cursor, error) { res, err := graphql1.UnmarshalCursor(v) return res, graphql.ErrorOnPath(ctx, err) @@ -36295,58 +35797,16 @@ func (ec *executionContext) unmarshalNDeleteMeInput2githubแš—comแš‹reearthแš‹ree return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNDeleteMePayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMePayload(ctx context.Context, sel ast.SelectionSet, v graphql1.DeleteMePayload) graphql.Marshaler { - return ec._DeleteMePayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNDeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMePayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.DeleteMePayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._DeleteMePayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNDeleteProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectInput(ctx context.Context, v interface{}) (graphql1.DeleteProjectInput, error) { res, err := ec.unmarshalInputDeleteProjectInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNDeleteProjectPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.DeleteProjectPayload) graphql.Marshaler { - return ec._DeleteProjectPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNDeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.DeleteProjectPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._DeleteProjectPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNDeleteTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamInput(ctx context.Context, v interface{}) (graphql1.DeleteTeamInput, error) { res, err := ec.unmarshalInputDeleteTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNDeleteTeamPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.DeleteTeamPayload) graphql.Marshaler { - return ec._DeleteTeamPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNDeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.DeleteTeamPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._DeleteTeamPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNFileSize2int64(ctx context.Context, v interface{}) (int64, error) { res, err := graphql.UnmarshalInt64(v) return res, graphql.ErrorOnPath(ctx, err) @@ -36448,51 +35908,23 @@ func (ec *executionContext) unmarshalNImportDatasetInput2githubแš—comแš‹reearth return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNImportDatasetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.ImportDatasetPayload) graphql.Marshaler { - return ec._ImportDatasetPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.ImportDatasetPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._ImportDatasetPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNImportLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerInput(ctx context.Context, v interface{}) (graphql1.ImportLayerInput, error) { res, err := ec.unmarshalInputImportLayerInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNImportLayerPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.ImportLayerPayload) graphql.Marshaler { - return ec._ImportLayerPayload(ctx, sel, &v) +func (ec *executionContext) marshalNInfobox2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx context.Context, sel ast.SelectionSet, v graphql1.Infobox) graphql.Marshaler { + return ec._Infobox(ctx, sel, &v) } -func (ec *executionContext) marshalNImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.ImportLayerPayload) graphql.Marshaler { +func (ec *executionContext) marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx context.Context, sel ast.SelectionSet, v *graphql1.Infobox) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._ImportLayerPayload(ctx, sel, v) -} - -func (ec *executionContext) marshalNInfobox2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx context.Context, sel ast.SelectionSet, v graphql1.Infobox) graphql.Marshaler { - return ec._Infobox(ctx, sel, &v) -} - -func (ec *executionContext) marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx context.Context, sel ast.SelectionSet, v *graphql1.Infobox) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._Infobox(ctx, sel, v) + return ec._Infobox(ctx, sel, v) } func (ec *executionContext) marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfoboxFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.InfoboxField) graphql.Marshaler { @@ -36547,20 +35979,6 @@ func (ec *executionContext) unmarshalNInstallPluginInput2githubแš—comแš‹reearth return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNInstallPluginPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.InstallPluginPayload) graphql.Marshaler { - return ec._InstallPluginPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.InstallPluginPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._InstallPluginPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNInt2int(ctx context.Context, v interface{}) (int, error) { res, err := graphql.UnmarshalInt(v) return res, graphql.ErrorOnPath(ctx, err) @@ -36866,39 +36284,11 @@ func (ec *executionContext) unmarshalNMoveInfoboxFieldInput2githubแš—comแš‹reear return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNMoveInfoboxFieldPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.MoveInfoboxFieldPayload) graphql.Marshaler { - return ec._MoveInfoboxFieldPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.MoveInfoboxFieldPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._MoveInfoboxFieldPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNMoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerInput(ctx context.Context, v interface{}) (graphql1.MoveLayerInput, error) { res, err := ec.unmarshalInputMoveLayerInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNMoveLayerPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.MoveLayerPayload) graphql.Marshaler { - return ec._MoveLayerPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.MoveLayerPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._MoveLayerPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNMovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMovePropertyItemInput(ctx context.Context, v interface{}) (graphql1.MovePropertyItemInput, error) { res, err := ec.unmarshalInputMovePropertyItemInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -37254,20 +36644,6 @@ func (ec *executionContext) marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reea return ec._ProjectEdge(ctx, sel, v) } -func (ec *executionContext) marshalNProjectPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.ProjectPayload) graphql.Marshaler { - return ec._ProjectPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.ProjectPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._ProjectPayload(ctx, sel, v) -} - func (ec *executionContext) marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx context.Context, sel ast.SelectionSet, v *graphql1.Property) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { @@ -37335,20 +36711,6 @@ func (ec *executionContext) marshalNPropertyFieldLink2แš–githubแš—comแš‹reearth return ec._PropertyFieldLink(ctx, sel, v) } -func (ec *executionContext) marshalNPropertyFieldPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.PropertyFieldPayload) graphql.Marshaler { - return ec._PropertyFieldPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyFieldPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._PropertyFieldPayload(ctx, sel, v) -} - func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertyGroup) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup @@ -37443,20 +36805,6 @@ func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹ree return ret } -func (ec *executionContext) marshalNPropertyItemPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.PropertyItemPayload) graphql.Marshaler { - return ec._PropertyItemPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyItemPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._PropertyItemPayload(ctx, sel, v) -} - func (ec *executionContext) marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyLinkableFields) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { @@ -37719,115 +37067,31 @@ func (ec *executionContext) unmarshalNRemoveAssetInput2githubแš—comแš‹reearthแš‹ return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNRemoveAssetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveAssetPayload) graphql.Marshaler { - return ec._RemoveAssetPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNRemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveAssetPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._RemoveAssetPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNRemoveDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaInput(ctx context.Context, v interface{}) (graphql1.RemoveDatasetSchemaInput, error) { res, err := ec.unmarshalInputRemoveDatasetSchemaInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNRemoveDatasetSchemaPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveDatasetSchemaPayload) graphql.Marshaler { - return ec._RemoveDatasetSchemaPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNRemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveDatasetSchemaPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._RemoveDatasetSchemaPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNRemoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldInput(ctx context.Context, v interface{}) (graphql1.RemoveInfoboxFieldInput, error) { res, err := ec.unmarshalInputRemoveInfoboxFieldInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNRemoveInfoboxFieldPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveInfoboxFieldPayload) graphql.Marshaler { - return ec._RemoveInfoboxFieldPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNRemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveInfoboxFieldPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._RemoveInfoboxFieldPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNRemoveInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxInput(ctx context.Context, v interface{}) (graphql1.RemoveInfoboxInput, error) { res, err := ec.unmarshalInputRemoveInfoboxInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNRemoveInfoboxPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveInfoboxPayload) graphql.Marshaler { - return ec._RemoveInfoboxPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNRemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveInfoboxPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._RemoveInfoboxPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNRemoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerInput(ctx context.Context, v interface{}) (graphql1.RemoveLayerInput, error) { res, err := ec.unmarshalInputRemoveLayerInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNRemoveLayerPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveLayerPayload) graphql.Marshaler { - return ec._RemoveLayerPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNRemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveLayerPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._RemoveLayerPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNRemoveMemberFromTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamInput(ctx context.Context, v interface{}) (graphql1.RemoveMemberFromTeamInput, error) { res, err := ec.unmarshalInputRemoveMemberFromTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNRemoveMemberFromTeamPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveMemberFromTeamPayload) graphql.Marshaler { - return ec._RemoveMemberFromTeamPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNRemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveMemberFromTeamPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._RemoveMemberFromTeamPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNRemoveMyAuthInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMyAuthInput(ctx context.Context, v interface{}) (graphql1.RemoveMyAuthInput, error) { res, err := ec.unmarshalInputRemoveMyAuthInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -37848,20 +37112,6 @@ func (ec *executionContext) unmarshalNRemoveWidgetInput2githubแš—comแš‹reearth return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNRemoveWidgetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.RemoveWidgetPayload) graphql.Marshaler { - return ec._RemoveWidgetPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNRemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveWidgetPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._RemoveWidgetPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRole(ctx context.Context, v interface{}) (graphql1.Role, error) { var res graphql1.Role err := res.UnmarshalGQL(v) @@ -37991,20 +37241,6 @@ func (ec *executionContext) unmarshalNSignupInput2githubแš—comแš‹reearthแš‹reear return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNSignupPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.SignupPayload) graphql.Marshaler { - return ec._SignupPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.SignupPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._SignupPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNString2string(ctx context.Context, v interface{}) (string, error) { res, err := graphql.UnmarshalString(v) return res, graphql.ErrorOnPath(ctx, err) @@ -38055,20 +37291,6 @@ func (ec *executionContext) unmarshalNSyncDatasetInput2githubแš—comแš‹reearthแš‹ return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNSyncDatasetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.SyncDatasetPayload) graphql.Marshaler { - return ec._SyncDatasetPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.SyncDatasetPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._SyncDatasetPayload(ctx, sel, v) -} - func (ec *executionContext) marshalNTeam2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx context.Context, sel ast.SelectionSet, v graphql1.Team) graphql.Marshaler { return ec._Team(ctx, sel, &v) } @@ -38182,20 +37404,6 @@ func (ec *executionContext) unmarshalNUninstallPluginInput2githubแš—comแš‹reeart return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNUninstallPluginPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UninstallPluginPayload) graphql.Marshaler { - return ec._UninstallPluginPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UninstallPluginPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._UninstallPluginPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNUnlinkPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUnlinkPropertyValueInput(ctx context.Context, v interface{}) (graphql1.UnlinkPropertyValueInput, error) { res, err := ec.unmarshalInputUnlinkPropertyValueInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -38206,77 +37414,21 @@ func (ec *executionContext) unmarshalNUpdateDatasetSchemaInput2githubแš—comแš‹re return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNUpdateDatasetSchemaPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpdateDatasetSchemaPayload) graphql.Marshaler { - return ec._UpdateDatasetSchemaPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateDatasetSchemaPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._UpdateDatasetSchemaPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNUpdateLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerInput(ctx context.Context, v interface{}) (graphql1.UpdateLayerInput, error) { res, err := ec.unmarshalInputUpdateLayerInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNUpdateLayerPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpdateLayerPayload) graphql.Marshaler { - return ec._UpdateLayerPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateLayerPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._UpdateLayerPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNUpdateMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMeInput(ctx context.Context, v interface{}) (graphql1.UpdateMeInput, error) { res, err := ec.unmarshalInputUpdateMeInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNUpdateMePayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpdateMePayload) graphql.Marshaler { - return ec._UpdateMePayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateMePayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._UpdateMePayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNUpdateMemberOfTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamInput(ctx context.Context, v interface{}) (graphql1.UpdateMemberOfTeamInput, error) { res, err := ec.unmarshalInputUpdateMemberOfTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNUpdateMemberOfTeamPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpdateMemberOfTeamPayload) graphql.Marshaler { - return ec._UpdateMemberOfTeamPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateMemberOfTeamPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._UpdateMemberOfTeamPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNUpdateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateProjectInput(ctx context.Context, v interface{}) (graphql1.UpdateProjectInput, error) { res, err := ec.unmarshalInputUpdateProjectInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -38343,58 +37495,16 @@ func (ec *executionContext) unmarshalNUpdateTeamInput2githubแš—comแš‹reearthแš‹r return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNUpdateTeamPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpdateTeamPayload) graphql.Marshaler { - return ec._UpdateTeamPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateTeamPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._UpdateTeamPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNUpdateWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetInput(ctx context.Context, v interface{}) (graphql1.UpdateWidgetInput, error) { res, err := ec.unmarshalInputUpdateWidgetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNUpdateWidgetPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpdateWidgetPayload) graphql.Marshaler { - return ec._UpdateWidgetPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateWidgetPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._UpdateWidgetPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNUpgradePluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginInput(ctx context.Context, v interface{}) (graphql1.UpgradePluginInput, error) { res, err := ec.unmarshalInputUpgradePluginInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNUpgradePluginPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UpgradePluginPayload) graphql.Marshaler { - return ec._UpgradePluginPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpgradePluginPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._UpgradePluginPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, v interface{}) (graphql.Upload, error) { res, err := graphql.UnmarshalUpload(v) return res, graphql.ErrorOnPath(ctx, err) @@ -38420,20 +37530,6 @@ func (ec *executionContext) unmarshalNUploadPluginInput2githubแš—comแš‹reearth return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNUploadPluginPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.UploadPluginPayload) graphql.Marshaler { - return ec._UploadPluginPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UploadPluginPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._UploadPluginPayload(ctx, sel, v) -} - func (ec *executionContext) marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx context.Context, sel ast.SelectionSet, v *graphql1.User) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { @@ -38693,6 +37789,62 @@ func (ec *executionContext) marshalN__TypeKind2string(ctx context.Context, sel a return res } +func (ec *executionContext) marshalOAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddDatasetSchemaPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddDatasetSchemaPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddDynamicDatasetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddDynamicDatasetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddDynamicDatasetSchemaPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddDynamicDatasetSchemaPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddInfoboxFieldPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddInfoboxFieldPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddLayerGroupPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddLayerGroupPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddLayerItemPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddLayerItemPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddMemberToTeamPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddMemberToTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddWidgetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddWidgetPayload(ctx, sel, v) +} + func (ec *executionContext) unmarshalOAny2interface(ctx context.Context, v interface{}) (interface{}, error) { if v == nil { return nil, nil @@ -38739,6 +37891,34 @@ func (ec *executionContext) marshalOBoolean2แš–bool(ctx context.Context, sel ast return graphql.MarshalBoolean(*v) } +func (ec *executionContext) marshalOCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateAssetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._CreateAssetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateInfoboxPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._CreateInfoboxPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateScenePayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateScenePayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._CreateScenePayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateTeamPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._CreateTeamPayload(ctx, sel, v) +} + func (ec *executionContext) unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx context.Context, v interface{}) (*usecase.Cursor, error) { if v == nil { return nil, nil @@ -38797,6 +37977,27 @@ func (ec *executionContext) marshalODateTime2แš–timeแšTime(ctx context.Context, return graphql.MarshalTime(*v) } +func (ec *executionContext) marshalODeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMePayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.DeleteMePayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DeleteMePayload(ctx, sel, v) +} + +func (ec *executionContext) marshalODeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.DeleteProjectPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DeleteProjectPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalODeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.DeleteTeamPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DeleteTeamPayload(ctx, sel, v) +} + func (ec *executionContext) unmarshalOFloat2แš–float64(ctx context.Context, v interface{}) (*float64, error) { if v == nil { return nil, nil @@ -38863,6 +38064,20 @@ func (ec *executionContext) marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘bac return graphql1.MarshalID(*v) } +func (ec *executionContext) marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.ImportDatasetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._ImportDatasetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.ImportLayerPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._ImportLayerPayload(ctx, sel, v) +} + func (ec *executionContext) marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx context.Context, sel ast.SelectionSet, v *graphql1.Infobox) graphql.Marshaler { if v == nil { return graphql.Null @@ -38870,6 +38085,13 @@ func (ec *executionContext) marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearth return ec._Infobox(ctx, sel, v) } +func (ec *executionContext) marshalOInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.InstallPluginPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._InstallPluginPayload(ctx, sel, v) +} + func (ec *executionContext) unmarshalOInt2แš–int(ctx context.Context, v interface{}) (*int, error) { if v == nil { return nil, nil @@ -38949,6 +38171,20 @@ func (ec *executionContext) marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹r return ec._MergedProperty(ctx, sel, v) } +func (ec *executionContext) marshalOMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.MoveInfoboxFieldPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._MoveInfoboxFieldPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.MoveLayerPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._MoveLayerPayload(ctx, sel, v) +} + func (ec *executionContext) marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNode(ctx context.Context, sel ast.SelectionSet, v graphql1.Node) graphql.Marshaler { if v == nil { return graphql.Null @@ -39007,6 +38243,13 @@ func (ec *executionContext) marshalOProject2แš–githubแš—comแš‹reearthแš‹reearth return ec._Project(ctx, sel, v) } +func (ec *executionContext) marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.ProjectPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._ProjectPayload(ctx, sel, v) +} + func (ec *executionContext) marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx context.Context, sel ast.SelectionSet, v *graphql1.Property) graphql.Marshaler { if v == nil { return graphql.Null @@ -39068,6 +38311,13 @@ func (ec *executionContext) marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reear return ret } +func (ec *executionContext) marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyFieldPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertyFieldPayload(ctx, sel, v) +} + func (ec *executionContext) marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyGroup) graphql.Marshaler { if v == nil { return graphql.Null @@ -39082,6 +38332,13 @@ func (ec *executionContext) marshalOPropertyItem2githubแš—comแš‹reearthแš‹reeart return ec._PropertyItem(ctx, sel, v) } +func (ec *executionContext) marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyItemPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._PropertyItemPayload(ctx, sel, v) +} + func (ec *executionContext) marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchema) graphql.Marshaler { if v == nil { return graphql.Null @@ -39189,6 +38446,55 @@ func (ec *executionContext) marshalOPropertySchemaID2แš–githubแš—comแš‹reearth return graphql1.MarshalPropertySchemaID(*v) } +func (ec *executionContext) marshalORemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveAssetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveAssetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveDatasetSchemaPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveDatasetSchemaPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveInfoboxFieldPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveInfoboxFieldPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveInfoboxPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveInfoboxPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveLayerPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveMemberFromTeamPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveMemberFromTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalORemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveWidgetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveWidgetPayload(ctx, sel, v) +} + func (ec *executionContext) marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx context.Context, sel ast.SelectionSet, v *graphql1.Scene) graphql.Marshaler { if v == nil { return graphql.Null @@ -39233,6 +38539,13 @@ func (ec *executionContext) marshalOSearchedUser2แš–githubแš—comแš‹reearthแš‹ree return ec._SearchedUser(ctx, sel, v) } +func (ec *executionContext) marshalOSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.SignupPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._SignupPayload(ctx, sel, v) +} + func (ec *executionContext) unmarshalOString2string(ctx context.Context, v interface{}) (string, error) { res, err := graphql.UnmarshalString(v) return res, graphql.ErrorOnPath(ctx, err) @@ -39293,6 +38606,13 @@ func (ec *executionContext) marshalOString2แš–string(ctx context.Context, sel as return graphql.MarshalString(*v) } +func (ec *executionContext) marshalOSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.SyncDatasetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._SyncDatasetPayload(ctx, sel, v) +} + func (ec *executionContext) marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx context.Context, sel ast.SelectionSet, v *graphql1.Team) graphql.Marshaler { if v == nil { return graphql.Null @@ -39362,6 +38682,62 @@ func (ec *executionContext) marshalOURL2แš–netแš‹urlแšURL(ctx context.Context, return graphql1.MarshalURL(*v) } +func (ec *executionContext) marshalOUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UninstallPluginPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UninstallPluginPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateDatasetSchemaPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateDatasetSchemaPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateLayerPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateMePayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateMePayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateMemberOfTeamPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateMemberOfTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateTeamPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateTeamPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateWidgetPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateWidgetPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpgradePluginPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpgradePluginPayload(ctx, sel, v) +} + func (ec *executionContext) unmarshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, v interface{}) (*graphql.Upload, error) { if v == nil { return nil, nil @@ -39377,6 +38753,13 @@ func (ec *executionContext) marshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgen return graphql.MarshalUpload(*v) } +func (ec *executionContext) marshalOUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UploadPluginPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UploadPluginPayload(ctx, sel, v) +} + func (ec *executionContext) marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx context.Context, sel ast.SelectionSet, v *graphql1.User) graphql.Marshaler { if v == nil { return graphql.Null diff --git a/schema.graphql b/schema.graphql index b0e7addd2..ec1f67b1c 100644 --- a/schema.graphql +++ b/schema.graphql @@ -1309,77 +1309,77 @@ type Query { type Mutation { # Asset - createAsset(input: CreateAssetInput!): CreateAssetPayload! - removeAsset(input: RemoveAssetInput!): RemoveAssetPayload! + createAsset(input: CreateAssetInput!): CreateAssetPayload + removeAsset(input: RemoveAssetInput!): RemoveAssetPayload # User - signup(input: SignupInput!): SignupPayload! - updateMe(input: UpdateMeInput!): UpdateMePayload! - removeMyAuth(input: RemoveMyAuthInput!): UpdateMePayload! - deleteMe(input: DeleteMeInput!): DeleteMePayload! + signup(input: SignupInput!): SignupPayload + updateMe(input: UpdateMeInput!): UpdateMePayload + removeMyAuth(input: RemoveMyAuthInput!): UpdateMePayload + deleteMe(input: DeleteMeInput!): DeleteMePayload # Team - createTeam(input: CreateTeamInput!): CreateTeamPayload! - deleteTeam(input: DeleteTeamInput!): DeleteTeamPayload! - updateTeam(input: UpdateTeamInput!): UpdateTeamPayload! - addMemberToTeam(input: AddMemberToTeamInput!): AddMemberToTeamPayload! - removeMemberFromTeam(input: RemoveMemberFromTeamInput!): RemoveMemberFromTeamPayload! - updateMemberOfTeam(input: UpdateMemberOfTeamInput!): UpdateMemberOfTeamPayload! + createTeam(input: CreateTeamInput!): CreateTeamPayload + deleteTeam(input: DeleteTeamInput!): DeleteTeamPayload + updateTeam(input: UpdateTeamInput!): UpdateTeamPayload + addMemberToTeam(input: AddMemberToTeamInput!): AddMemberToTeamPayload + removeMemberFromTeam(input: RemoveMemberFromTeamInput!): RemoveMemberFromTeamPayload + updateMemberOfTeam(input: UpdateMemberOfTeamInput!): UpdateMemberOfTeamPayload # Project - createProject(input: CreateProjectInput!): ProjectPayload! - updateProject(input: UpdateProjectInput!): ProjectPayload! - publishProject(input: PublishProjectInput!): ProjectPayload! - deleteProject(input: DeleteProjectInput!): DeleteProjectPayload! + createProject(input: CreateProjectInput!): ProjectPayload + updateProject(input: UpdateProjectInput!): ProjectPayload + publishProject(input: PublishProjectInput!): ProjectPayload + deleteProject(input: DeleteProjectInput!): DeleteProjectPayload # Plugin - uploadPlugin(input: UploadPluginInput!): UploadPluginPayload! + uploadPlugin(input: UploadPluginInput!): UploadPluginPayload # Scene - createScene(input: CreateSceneInput!): CreateScenePayload! - addWidget(input: AddWidgetInput!): AddWidgetPayload! - updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload! - removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload! - installPlugin(input: InstallPluginInput!): InstallPluginPayload! - uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload! - upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload! + createScene(input: CreateSceneInput!): CreateScenePayload + addWidget(input: AddWidgetInput!): AddWidgetPayload + updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload + removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload + installPlugin(input: InstallPluginInput!): InstallPluginPayload + uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload + upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload # Dataset - updateDatasetSchema(input:UpdateDatasetSchemaInput!): UpdateDatasetSchemaPayload! - syncDataset(input: SyncDatasetInput!): SyncDatasetPayload! - addDynamicDatasetSchema(input:AddDynamicDatasetSchemaInput!): AddDynamicDatasetSchemaPayload! - addDynamicDataset(input:AddDynamicDatasetInput!): AddDynamicDatasetPayload! - removeDatasetSchema(input: RemoveDatasetSchemaInput!): RemoveDatasetSchemaPayload! - importDataset(input: ImportDatasetInput!): ImportDatasetPayload! - addDatasetSchema(input:AddDatasetSchemaInput!): AddDatasetSchemaPayload! + updateDatasetSchema(input:UpdateDatasetSchemaInput!): UpdateDatasetSchemaPayload + syncDataset(input: SyncDatasetInput!): SyncDatasetPayload + addDynamicDatasetSchema(input:AddDynamicDatasetSchemaInput!): AddDynamicDatasetSchemaPayload + addDynamicDataset(input:AddDynamicDatasetInput!): AddDynamicDatasetPayload + removeDatasetSchema(input: RemoveDatasetSchemaInput!): RemoveDatasetSchemaPayload + importDataset(input: ImportDatasetInput!): ImportDatasetPayload + addDatasetSchema(input:AddDatasetSchemaInput!): AddDatasetSchemaPayload # Property - updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload! - updatePropertyValueLatLng(input: UpdatePropertyValueLatLngInput!): PropertyFieldPayload! - updatePropertyValueLatLngHeight(input: UpdatePropertyValueLatLngHeightInput!): PropertyFieldPayload! - updatePropertyValueCamera(input: UpdatePropertyValueCameraInput!): PropertyFieldPayload! - updatePropertyValueTypography(input: UpdatePropertyValueTypographyInput!): PropertyFieldPayload! - removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload! - uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload! - linkDatasetToPropertyValue(input: LinkDatasetToPropertyValueInput!): PropertyFieldPayload! - unlinkPropertyValue(input: UnlinkPropertyValueInput!): PropertyFieldPayload! - addPropertyItem(input: AddPropertyItemInput!): PropertyItemPayload! - movePropertyItem(input: MovePropertyItemInput!): PropertyItemPayload! - removePropertyItem(input: RemovePropertyItemInput!): PropertyItemPayload! - updatePropertyItems(input: UpdatePropertyItemInput!): PropertyItemPayload! + updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload + updatePropertyValueLatLng(input: UpdatePropertyValueLatLngInput!): PropertyFieldPayload + updatePropertyValueLatLngHeight(input: UpdatePropertyValueLatLngHeightInput!): PropertyFieldPayload + updatePropertyValueCamera(input: UpdatePropertyValueCameraInput!): PropertyFieldPayload + updatePropertyValueTypography(input: UpdatePropertyValueTypographyInput!): PropertyFieldPayload + removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload + uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload + linkDatasetToPropertyValue(input: LinkDatasetToPropertyValueInput!): PropertyFieldPayload + unlinkPropertyValue(input: UnlinkPropertyValueInput!): PropertyFieldPayload + addPropertyItem(input: AddPropertyItemInput!): PropertyItemPayload + movePropertyItem(input: MovePropertyItemInput!): PropertyItemPayload + removePropertyItem(input: RemovePropertyItemInput!): PropertyItemPayload + updatePropertyItems(input: UpdatePropertyItemInput!): PropertyItemPayload # Layer - addLayerItem(input: AddLayerItemInput!): AddLayerItemPayload! - addLayerGroup(input: AddLayerGroupInput!): AddLayerGroupPayload! - removeLayer(input: RemoveLayerInput!): RemoveLayerPayload! - updateLayer(input: UpdateLayerInput!): UpdateLayerPayload! - moveLayer(input: MoveLayerInput!): MoveLayerPayload! - createInfobox(input: CreateInfoboxInput!): CreateInfoboxPayload! - removeInfobox(input: RemoveInfoboxInput!): RemoveInfoboxPayload! - addInfoboxField(input: AddInfoboxFieldInput!): AddInfoboxFieldPayload! - moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload! - removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload! - importLayer(input:ImportLayerInput!): ImportLayerPayload! + addLayerItem(input: AddLayerItemInput!): AddLayerItemPayload + addLayerGroup(input: AddLayerGroupInput!): AddLayerGroupPayload + removeLayer(input: RemoveLayerInput!): RemoveLayerPayload + updateLayer(input: UpdateLayerInput!): UpdateLayerPayload + moveLayer(input: MoveLayerInput!): MoveLayerPayload + createInfobox(input: CreateInfoboxInput!): CreateInfoboxPayload + removeInfobox(input: RemoveInfoboxInput!): RemoveInfoboxPayload + addInfoboxField(input: AddInfoboxFieldInput!): AddInfoboxFieldPayload + moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload + removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload + importLayer(input:ImportLayerInput!): ImportLayerPayload } schema { From 6a27c62934062c661f7dec60fa1e20f4f4494cab Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 1 Jun 2021 23:21:43 +0900 Subject: [PATCH 016/253] fix: auth0 --- .../infrastructure/auth0/authenticator.go | 82 ++++++++++++------- .../auth0/authenticator_test.go | 1 + 2 files changed, 54 insertions(+), 29 deletions(-) diff --git a/internal/infrastructure/auth0/authenticator.go b/internal/infrastructure/auth0/authenticator.go index ac5603bed..91847d9a1 100644 --- a/internal/infrastructure/auth0/authenticator.go +++ b/internal/infrastructure/auth0/authenticator.go @@ -4,7 +4,6 @@ import ( "bytes" "encoding/json" "errors" - "fmt" "io" "net/http" "strings" @@ -27,21 +26,23 @@ type Auth0 struct { disableLogging bool } -type response struct { - ID string `json:"user_id"` - Name string `json:"name"` - UserName string `json:"username"` - Email string `json:"email"` - EmailVerified bool `json:"email_verified"` - Message string `json:"string"` - Token string `json:"access_token"` - ExpiresIn int64 `json:"expires_in"` -} - func currentTime() time.Time { return time.Now() } +type response struct { + ID string `json:"user_id"` + Name string `json:"name"` + UserName string `json:"username"` + Email string `json:"email"` + EmailVerified bool `json:"email_verified"` + Message string `json:"message"` + Token string `json:"access_token"` + Scope string `json:"scope"` + ExpiresIn int64 `json:"expires_in"` + ErrorDescription string `json:"error_description"` +} + func (u response) Into() gateway.AuthenticatorUser { name := u.UserName if name == "" { @@ -56,6 +57,13 @@ func (u response) Into() gateway.AuthenticatorUser { } } +func (u response) Error() string { + if u.ErrorDescription != "" { + return u.ErrorDescription + } + return u.Message +} + func New(domain, clientID, clientSecret string) *Auth0 { return &Auth0{ domain: urlFromDomain(domain), @@ -64,23 +72,20 @@ func New(domain, clientID, clientSecret string) *Auth0 { } } -func (a *Auth0) FetchUser(id string) (data gateway.AuthenticatorUser, err error) { - err = a.updateToken() - if err != nil { - return +func (a *Auth0) FetchUser(id string) (gateway.AuthenticatorUser, error) { + if err := a.updateToken(); err != nil { + return gateway.AuthenticatorUser{}, err } var r response - r, err = a.exec(http.MethodGet, "api/v2/users/"+id, a.token, nil) + r, err := a.exec(http.MethodGet, "api/v2/users/"+id, a.token, nil) if err != nil { if !a.disableLogging { - log.Errorf("auth0: fetch user: %s", err) + log.Errorf("auth0: fetch user: %+v", err) } - err = fmt.Errorf("failed to auth") - return + return gateway.AuthenticatorUser{}, errors.New("failed to auth") } - data = r.Into() - return + return r.Into(), nil } func (a *Auth0) UpdateUser(p gateway.AuthenticatorUpdateUserParam) (data gateway.AuthenticatorUser, err error) { @@ -108,9 +113,9 @@ func (a *Auth0) UpdateUser(p gateway.AuthenticatorUpdateUserParam) (data gateway r, err = a.exec(http.MethodPatch, "api/v2/users/"+p.ID, a.token, payload) if err != nil { if !a.disableLogging { - log.Errorf("auth0: update user: %s", err) + log.Errorf("auth0: update user: %+v", err) } - err = fmt.Errorf("failed to update user") + err = errors.New("failed to update user") return } @@ -147,17 +152,27 @@ func (a *Auth0) updateToken() error { r, err := a.exec(http.MethodPost, "oauth/token", "", map[string]string{ "client_id": a.clientID, "client_secret": a.clientSecret, - "audience": a.domain + "api/v2/", + "audience": urlFromDomain(a.domain) + "api/v2/", "grant_type": "client_credentials", + "scope": "read:users update:users", }) if err != nil { - return err + if !a.disableLogging { + log.Errorf("auth0: access token error: %+v", err) + } + return errors.New("failed to auth") } if a.current == nil { a.current = currentTime } + if r.Token == "" { + if !a.disableLogging { + log.Errorf("auth0: no token: %+v", r) + } + return errors.New("failed to auth") + } a.token = r.Token a.expireAt = a.current().Add(time.Duration(r.ExpiresIn * int64(time.Second))) @@ -188,7 +203,7 @@ func (a *Auth0) exec(method, path, token string, b interface{}) (r response, err } var req *http.Request - req, err = http.NewRequest(method, a.domain+path, body) + req, err = http.NewRequest(method, urlFromDomain(a.domain)+path, body) if err != nil { return } @@ -207,12 +222,21 @@ func (a *Auth0) exec(method, path, token string, b interface{}) (r response, err _ = resp.Body.Close() }() - err = json.NewDecoder(resp.Body).Decode(&r) + respb, err := io.ReadAll(resp.Body) if err != nil { return } + + if !a.disableLogging { + log.Infof("auth0: path: %s, status: %d, resp: %s", path, resp.StatusCode, respb) + } + + if err = json.Unmarshal(respb, &r); err != nil { + return + } + if resp.StatusCode >= 300 { - err = errors.New(r.Message) + err = errors.New(r.Error()) return } return diff --git a/internal/infrastructure/auth0/authenticator_test.go b/internal/infrastructure/auth0/authenticator_test.go index 0e7157e64..ac293563c 100644 --- a/internal/infrastructure/auth0/authenticator_test.go +++ b/internal/infrastructure/auth0/authenticator_test.go @@ -106,6 +106,7 @@ func client(t *testing.T) *http.Client { StatusCode: http.StatusOK, Body: res(map[string]interface{}{ "access_token": token, + "scope": "read:users update:users", "expires_in": expiresIn, }), Header: make(http.Header), From 2d08c503b132b970941f4aad633e7f33f5b87212 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 1 Jun 2021 23:25:15 +0900 Subject: [PATCH 017/253] fix: errors are be overwriten by tx --- internal/usecase/interactor/asset.go | 8 +++-- internal/usecase/interactor/dataset.go | 28 ++++++++++++---- internal/usecase/interactor/layer.go | 44 ++++++++++++++++++------- internal/usecase/interactor/plugin.go | 4 ++- internal/usecase/interactor/project.go | 20 ++++++++--- internal/usecase/interactor/property.go | 32 +++++++++++++----- internal/usecase/interactor/scene.go | 28 ++++++++++++---- internal/usecase/interactor/team.go | 24 ++++++++++---- internal/usecase/interactor/user.go | 16 ++++++--- 9 files changed, 153 insertions(+), 51 deletions(-) diff --git a/internal/usecase/interactor/asset.go b/internal/usecase/interactor/asset.go index 92becb9f8..b4d0aa5e7 100644 --- a/internal/usecase/interactor/asset.go +++ b/internal/usecase/interactor/asset.go @@ -43,7 +43,9 @@ func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, ope return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() url, err := i.file.UploadAsset(ctx, inp.File) @@ -85,7 +87,9 @@ func (i *Asset) Remove(ctx context.Context, aid id.AssetID, operator *usecase.Op return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() team, err := i.teamRepo.FindByID(ctx, asset.Team()) diff --git a/internal/usecase/interactor/dataset.go b/internal/usecase/interactor/dataset.go index 188be97bc..a1a0791b4 100644 --- a/internal/usecase/interactor/dataset.go +++ b/internal/usecase/interactor/dataset.go @@ -75,7 +75,9 @@ func (i *Dataset) UpdateDatasetSchema(ctx context.Context, inp interfaces.Update return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() schema.Rename(inp.Name) @@ -97,7 +99,9 @@ func (i *Dataset) AddDynamicDatasetSchema(ctx context.Context, inp interfaces.Ad return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() schemaBuilder := dataset.NewSchema(). @@ -128,7 +132,9 @@ func (i *Dataset) AddDynamicDataset(ctx context.Context, inp interfaces.AddDynam return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() fields := []*dataset.Field{} @@ -184,7 +190,9 @@ func (i *Dataset) ImportDataset(ctx context.Context, inp interfaces.ImportDatase return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() seperator := ',' @@ -433,7 +441,9 @@ func (i *Dataset) Sync(ctx context.Context, sceneID id.SceneID, url string, oper return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if err := i.UpdateSceneLock(ctx, sceneID, scene.LockModeFree, scene.LockModeDatasetSyncing); err != nil { @@ -499,7 +509,9 @@ func (i *Dataset) AddDatasetSchema(ctx context.Context, inp interfaces.AddDatase return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() schemaBuilder := dataset.NewSchema(). @@ -552,7 +564,9 @@ func (i *Dataset) RemoveDatasetSchema(ctx context.Context, inp interfaces.Remove return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() // list of datasets attached by the schema diff --git a/internal/usecase/interactor/layer.go b/internal/usecase/interactor/layer.go index 6b53538c0..79d7a0207 100644 --- a/internal/usecase/interactor/layer.go +++ b/internal/usecase/interactor/layer.go @@ -158,7 +158,9 @@ func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, o return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -242,7 +244,9 @@ func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -452,7 +456,9 @@ func (i *Layer) Remove(ctx context.Context, lid id.LayerID, operator *usecase.Op return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -518,7 +524,9 @@ func (i *Layer) Update(ctx context.Context, inp interfaces.UpdateLayerInput, ope return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -560,7 +568,9 @@ func (i *Layer) Move(ctx context.Context, inp interfaces.MoveLayerInput, operato return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -622,7 +632,9 @@ func (i *Layer) CreateInfobox(ctx context.Context, lid id.LayerID, operator *use return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -673,7 +685,9 @@ func (i *Layer) RemoveInfobox(ctx context.Context, layerID id.LayerID, operator return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -719,7 +733,9 @@ func (i *Layer) AddInfoboxField(ctx context.Context, inp interfaces.AddInfoboxFi return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -792,7 +808,9 @@ func (i *Layer) MoveInfoboxField(ctx context.Context, inp interfaces.MoveInfobox return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -833,7 +851,9 @@ func (i *Layer) RemoveInfoboxField(ctx context.Context, inp interfaces.RemoveInf return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -899,7 +919,9 @@ func (i *Layer) ImportLayer(ctx context.Context, inp interfaces.ImportLayerParam return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if inp.File == nil { diff --git a/internal/usecase/interactor/plugin.go b/internal/usecase/interactor/plugin.go index 200b289a7..3992764ff 100644 --- a/internal/usecase/interactor/plugin.go +++ b/internal/usecase/interactor/plugin.go @@ -47,7 +47,9 @@ func (i *Plugin) Upload(ctx context.Context, r io.Reader, operator *usecase.Oper return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if err := i.OnlyOperator(operator); err != nil { diff --git a/internal/usecase/interactor/project.go b/internal/usecase/interactor/project.go index d956d0209..8d0c914ff 100644 --- a/internal/usecase/interactor/project.go +++ b/internal/usecase/interactor/project.go @@ -74,7 +74,9 @@ func (i *Project) Create(ctx context.Context, p interfaces.CreateProjectParam, o return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if err := i.CanWriteTeam(p.TeamID, operator); err != nil { @@ -122,7 +124,9 @@ func (i *Project) Update(ctx context.Context, p interfaces.UpdateProjectParam, o return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if err := i.OnlyOperator(operator); err != nil { @@ -227,7 +231,9 @@ func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectP return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if err := i.OnlyOperator(operator); err != nil { @@ -339,7 +345,9 @@ func (i *Project) createAsset(ctx context.Context, f *file.File, t id.TeamID) (_ return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() url, err := i.file.UploadAsset(ctx, f) @@ -374,7 +382,9 @@ func (i *Project) Delete(ctx context.Context, projectID id.ProjectID, operator * return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if err := i.OnlyOperator(operator); err != nil { diff --git a/internal/usecase/interactor/property.go b/internal/usecase/interactor/property.go index 96dba6901..1017bb70e 100644 --- a/internal/usecase/interactor/property.go +++ b/internal/usecase/interactor/property.go @@ -97,7 +97,9 @@ func (i *Property) UpdateValue(ctx context.Context, inp interfaces.UpdatePropert return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -140,7 +142,9 @@ func (i *Property) RemoveField(ctx context.Context, inp interfaces.RemovePropert return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -175,7 +179,9 @@ func (i *Property) UploadFile(ctx context.Context, inp interfaces.UploadFilePara return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if inp.File == nil { @@ -255,7 +261,9 @@ func (i *Property) LinkValue(ctx context.Context, inp interfaces.LinkPropertyVal return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -315,7 +323,9 @@ func (i *Property) UnlinkValue(ctx context.Context, inp interfaces.UnlinkPropert return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -362,7 +372,9 @@ func (i *Property) AddItem(ctx context.Context, inp interfaces.AddPropertyItemPa return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -410,7 +422,9 @@ func (i *Property) MoveItem(ctx context.Context, inp interfaces.MovePropertyItem return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) @@ -448,7 +462,9 @@ func (i *Property) RemoveItem(ctx context.Context, inp interfaces.RemoveProperty return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() scenes, err := i.OnlyWritableScenes(ctx, operator) diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index f49335d2e..941442bba 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -70,7 +70,9 @@ func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase. return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if err := i.OnlyOperator(operator); err != nil { @@ -156,7 +158,9 @@ func (i *Scene) AddWidget(ctx context.Context, id id.SceneID, pid id.PluginID, e return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if err := i.OnlyOperator(operator); err != nil { @@ -217,7 +221,9 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if err := i.OnlyOperator(operator); err != nil { @@ -263,7 +269,9 @@ func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, pid id.PluginID return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if err := i.OnlyOperator(operator); err != nil { @@ -313,7 +321,9 @@ func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plugin return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if operator == nil { @@ -386,7 +396,9 @@ func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plug return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if err := i.OnlyOperator(operator); err != nil { @@ -525,7 +537,9 @@ func (i *Scene) UpgradePlugin(ctx context.Context, sid id.SceneID, oldPluginID, return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if err := i.OnlyOperator(operator); err != nil { diff --git a/internal/usecase/interactor/team.go b/internal/usecase/interactor/team.go index a3cd86d3c..fdff3135a 100644 --- a/internal/usecase/interactor/team.go +++ b/internal/usecase/interactor/team.go @@ -52,7 +52,9 @@ func (i *Team) Create(ctx context.Context, name string, firstUser id.UserID) (_ return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() team, err := user.NewTeam(). @@ -84,7 +86,9 @@ func (i *Team) Update(ctx context.Context, id id.TeamID, name string, operator * return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if operator == nil { @@ -120,7 +124,9 @@ func (i *Team) AddMember(ctx context.Context, id id.TeamID, u id.UserID, role us return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if operator == nil { @@ -164,7 +170,9 @@ func (i *Team) RemoveMember(ctx context.Context, id id.TeamID, u id.UserID, oper return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if operator == nil { @@ -207,7 +215,9 @@ func (i *Team) UpdateMember(ctx context.Context, id id.TeamID, u id.UserID, role return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if operator == nil { @@ -250,7 +260,9 @@ func (i *Team) Remove(ctx context.Context, id id.TeamID, operator *usecase.Opera return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() if operator == nil { diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index 78e1b43f9..b2a9e9285 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -90,7 +90,9 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() // Check if user and team already exists @@ -178,7 +180,9 @@ func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operato return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() var team *user.Team @@ -254,7 +258,9 @@ func (i *User) RemoveMyAuth(ctx context.Context, authProvider string, operator * return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() u, err = i.userRepo.FindByID(ctx, operator.User) @@ -291,7 +297,9 @@ func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase return } defer func() { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } }() u, err := i.userRepo.FindByID(ctx, userID) From f531bd0a89123f05b4f1d93ac526b16e9898bc75 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 1 Jun 2021 23:25:42 +0900 Subject: [PATCH 018/253] fix: deleting user --- internal/usecase/interactor/user.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index b2a9e9285..69b972662 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -288,7 +288,7 @@ func (i *User) SearchUser(ctx context.Context, nameOrEmail string, operator *use } func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase.Operator) (err error) { - if operator == nil || operator.User.IsNil() || userID.IsNil() || userID != operator.User { + if userID.IsNil() || operator != nil && !operator.User.IsNil() && userID != operator.User { return errors.New("invalid user id") } From 0815d37dad041845e5c6b7d2526df77a7649e673 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 1 Jun 2021 23:26:10 +0900 Subject: [PATCH 019/253] fix: always enable dev mode in debug --- internal/app/config.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/internal/app/config.go b/internal/app/config.go index 4fb0bf287..21484efde 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -58,6 +58,10 @@ func ReadConfig(debug bool) (*Config, error) { var c Config err := envconfig.Process(configPrefix, &c) + if debug { + c.Dev = true + } + return &c, err } From a5eeaeb373a757191335adf3f5959fef7cf5a106 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 2 Jun 2021 16:09:01 +0900 Subject: [PATCH 020/253] fix: user deletion --- internal/usecase/interactor/user.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index 69b972662..67abe52c6 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -288,7 +288,11 @@ func (i *User) SearchUser(ctx context.Context, nameOrEmail string, operator *use } func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase.Operator) (err error) { - if userID.IsNil() || operator != nil && !operator.User.IsNil() && userID != operator.User { + if operator == nil || operator.User.IsNil() { + return nil + } + + if userID.IsNil() || userID != operator.User { return errors.New("invalid user id") } From 72ed8e15baf57bee7279169e1f78d523265d4a4a Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Wed, 2 Jun 2021 11:56:27 +0300 Subject: [PATCH 021/253] test: pkg/shp (#5) * - comment out unused code * - add test data * - reader test * - sequential reader test * - shp type test * - shp test * - writer test * - zip reader test * - add test data * fix PR comments Co-authored-by: YK --- pkg/shp/reader.go | 22 +- pkg/shp/reader_test.go | 220 ++++++++++++++++++ pkg/shp/sequentialreader.go | 38 +-- pkg/shp/sequentialreader_test.go | 52 +++++ pkg/shp/shapefile.go | 4 +- pkg/shp/shapefile_test.go | 196 ++++++++++++++++ pkg/shp/shapetype_string_test.go | 99 ++++++++ pkg/shp/test_files/empty.zip | Bin 0 -> 6738 bytes pkg/shp/test_files/multi.zip | Bin 0 -> 331042 bytes pkg/shp/test_files/multipatch.shp | Bin 0 -> 1192 bytes pkg/shp/test_files/multipoint.shp | Bin 0 -> 196 bytes pkg/shp/test_files/multipointm.shp | Bin 0 -> 236 bytes pkg/shp/test_files/multipointz.shp | Bin 0 -> 276 bytes .../test_files/ne_110m_admin_0_countries.zip | Bin 0 -> 196764 bytes pkg/shp/test_files/point.shp | Bin 0 -> 184 bytes pkg/shp/test_files/pointm.shp | Bin 0 -> 208 bytes pkg/shp/test_files/pointz.shp | Bin 0 -> 232 bytes pkg/shp/test_files/polygon.shp | Bin 0 -> 236 bytes pkg/shp/test_files/polygonm.shp | Bin 0 -> 292 bytes pkg/shp/test_files/polygonz.shp | Bin 0 -> 348 bytes pkg/shp/test_files/polyline.shp | Bin 0 -> 308 bytes pkg/shp/test_files/polylinem.shp | Bin 0 -> 388 bytes pkg/shp/test_files/polylinez.shp | Bin 0 -> 468 bytes pkg/shp/testdata_test.go | 201 ++++++++++++++++ pkg/shp/writer_test.go | 154 ++++++++++++ pkg/shp/zipreader.go | 8 +- pkg/shp/zipreader_test.go | 84 +++++++ 27 files changed, 1047 insertions(+), 31 deletions(-) create mode 100644 pkg/shp/reader_test.go create mode 100644 pkg/shp/sequentialreader_test.go create mode 100644 pkg/shp/shapefile_test.go create mode 100644 pkg/shp/shapetype_string_test.go create mode 100644 pkg/shp/test_files/empty.zip create mode 100644 pkg/shp/test_files/multi.zip create mode 100644 pkg/shp/test_files/multipatch.shp create mode 100644 pkg/shp/test_files/multipoint.shp create mode 100644 pkg/shp/test_files/multipointm.shp create mode 100644 pkg/shp/test_files/multipointz.shp create mode 100644 pkg/shp/test_files/ne_110m_admin_0_countries.zip create mode 100644 pkg/shp/test_files/point.shp create mode 100644 pkg/shp/test_files/pointm.shp create mode 100644 pkg/shp/test_files/pointz.shp create mode 100644 pkg/shp/test_files/polygon.shp create mode 100644 pkg/shp/test_files/polygonm.shp create mode 100644 pkg/shp/test_files/polygonz.shp create mode 100644 pkg/shp/test_files/polyline.shp create mode 100644 pkg/shp/test_files/polylinem.shp create mode 100644 pkg/shp/test_files/polylinez.shp create mode 100644 pkg/shp/testdata_test.go create mode 100644 pkg/shp/writer_test.go create mode 100644 pkg/shp/zipreader_test.go diff --git a/pkg/shp/reader.go b/pkg/shp/reader.go index e391625ce..9687b6740 100644 --- a/pkg/shp/reader.go +++ b/pkg/shp/reader.go @@ -6,8 +6,6 @@ import ( "fmt" "io" "math" - "os" - "strings" ) // Reader provides a interface for reading Shapefiles. Calls @@ -19,17 +17,18 @@ type Reader struct { bbox Box err error - shp io.ReadSeeker - shape Shape - num int32 - filename string + shp io.ReadSeeker + shape Shape + num int32 + // filename string filelength int64 + /* Note: not used dbf io.ReadSeeker dbfFields []Field dbfNumRecords int32 dbfHeaderLength int16 - dbfRecordLength int16 + dbfRecordLength int16*/ } // ReadFrom read from io.Reader @@ -101,11 +100,12 @@ func (r *Reader) Shape() (int, Shape) { return int(r.num) - 1, r.shape } +/* Note: not used // Attribute returns value of the n-th attribute of the most recent feature // that was read by a call to Next. func (r *Reader) Attribute(n int) string { return r.ReadAttribute(int(r.num)-1, n) -} +}*/ // newShape creates a new shape with a given type. func newShape(shapetype ShapeType) (Shape, error) { @@ -202,6 +202,7 @@ func (r *Reader) Next() bool { return true } +/* Note: not used // Opens DBF file using r.filename + "dbf". This method // will parse the header and fill out all dbf* values int // the f object. @@ -253,7 +254,7 @@ func (r *Reader) Fields() []Field { return nil } return r.dbfFields -} +}*/ // Err returns the last non-EOF error encountered. func (r *Reader) Err() error { @@ -263,6 +264,7 @@ func (r *Reader) Err() error { return r.err } +/* Note: not used // ReadAttribute returns the attribute value at row for field in // the DBF table as a string. Both values starts at 0. func (r *Reader) ReadAttribute(row int, field int) string { @@ -284,4 +286,4 @@ func (r *Reader) ReadAttribute(row int, field int) string { return "" } return strings.Trim(string(buf[:]), " ") -} +}*/ diff --git a/pkg/shp/reader_test.go b/pkg/shp/reader_test.go new file mode 100644 index 000000000..0dbcab2c9 --- /dev/null +++ b/pkg/shp/reader_test.go @@ -0,0 +1,220 @@ +package shp + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func assertPointsEqual(t *testing.T, a, b []float64, msgAndArgs ...interface{}) bool { + if !assert.True(t, len(a) == len(b), msgAndArgs...) { + return false + } + for k, v := range a { + if !assert.True(t, v == b[k], msgAndArgs...) { + return false + } + } + return true +} + +func getShapesFromFile(prefix string, t *testing.T) (shapes []Shape) { + filename := prefix + ".shp" + ior, _ := os.Open(filename) + file, err := ReadFrom(ior) + assert.Nil(t, err, "Failed to open shapefile") + + defer func() { + err := ior.Close() + assert.Nil(t, err, "Failed to close shapefile") + }() + + for file.Next() { + _, shape := file.Shape() + shapes = append(shapes, shape) + } + assert.Nil(t, file.Err(), "Error while getting shapes") + + return shapes +} + +func testPoint(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*Point) + assert.True(t, ok, "Failed to type assert.") + assertPointsEqual(t, []float64{p.X, p.Y}, points[n], "Points did not match.") + } +} + +func testPolyLine(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PolyLine) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y}, "Points did not match.") + } + } +} + +func testPolygon(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*Polygon) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y}, "Points did not match.") + } + } +} + +func testMultiPoint(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*MultiPoint) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y}, "Points did not match.") + } + } +} + +func testPointZ(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PointZ) + assert.True(t, ok, "Failed to type assert.") + assertPointsEqual(t, []float64{p.X, p.Y, p.Z}, points[n], "Points did not match.") + } +} + +func testPolyLineZ(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PolyLineZ) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.ZArray[k]}, "Points did not match.") + } + } +} + +func testPolygonZ(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PolygonZ) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.ZArray[k]}, "Points did not match.") + } + } +} + +func testMultiPointZ(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*MultiPointZ) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.ZArray[k]}, "Points did not match.") + } + } +} + +func testPointM(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PointM) + assert.True(t, ok, "Failed to type assert.") + assertPointsEqual(t, []float64{p.X, p.Y, p.M}, points[n], "Points did not match.") + } +} + +func testPolyLineM(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PolyLineM) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.MArray[k]}, "Points did not match.") + } + } +} + +func testPolygonM(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*PolygonM) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.MArray[k]}, "Points did not match.") + } + } +} + +func testMultiPointM(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*MultiPointM) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.MArray[k]}, "Points did not match.") + } + } +} + +func testMultiPatch(t *testing.T, points [][]float64, shapes []Shape) { + for n, s := range shapes { + p, ok := s.(*MultiPatch) + assert.True(t, ok, "Failed to type assert.") + for k, point := range p.Points { + assertPointsEqual(t, points[n*3+k], []float64{point.X, point.Y, p.ZArray[k]}, "Points did not match.") + } + } +} + +func TestReadBBox(t *testing.T) { + tests := []struct { + filename string + want Box + }{ + {"test_files/multipatch.shp", Box{0, 0, 10, 10}}, + {"test_files/multipoint.shp", Box{0, 5, 10, 10}}, + {"test_files/multipointm.shp", Box{0, 5, 10, 10}}, + {"test_files/multipointz.shp", Box{0, 5, 10, 10}}, + {"test_files/point.shp", Box{0, 5, 10, 10}}, + {"test_files/pointm.shp", Box{0, 5, 10, 10}}, + {"test_files/pointz.shp", Box{0, 5, 10, 10}}, + {"test_files/polygon.shp", Box{0, 0, 5, 5}}, + {"test_files/polygonm.shp", Box{0, 0, 5, 5}}, + {"test_files/polygonz.shp", Box{0, 0, 5, 5}}, + {"test_files/polyline.shp", Box{0, 0, 25, 25}}, + {"test_files/polylinem.shp", Box{0, 0, 25, 25}}, + {"test_files/polylinez.shp", Box{0, 0, 25, 25}}, + } + for _, tt := range tests { + f, _ := os.Open(tt.filename) + r, err := ReadFrom(f) + if err != nil { + t.Fatalf("%v", err) + } + if got := r.BBox().MinX; got != tt.want.MinX { + t.Errorf("got MinX = %v, want %v", got, tt.want.MinX) + } + if got := r.BBox().MinY; got != tt.want.MinY { + t.Errorf("got MinY = %v, want %v", got, tt.want.MinY) + } + if got := r.BBox().MaxX; got != tt.want.MaxX { + t.Errorf("got MaxX = %v, want %v", got, tt.want.MaxX) + } + if got := r.BBox().MaxY; got != tt.want.MaxY { + t.Errorf("got MaxY = %v, want %v", got, tt.want.MaxY) + } + } +} + +func TestReader(t *testing.T) { + t.Parallel() + testCases := testsData + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + shapes := getShapesFromFile(tc.name, tt) + assert.Equal(tt, tc.count, len(shapes), "Number of shapes for %s read was wrong. Wanted %d, got %d.", tc.name, tc.count, len(shapes)) + tc.tester(tt, tc.points, shapes) + }) + } + +} diff --git a/pkg/shp/sequentialreader.go b/pkg/shp/sequentialreader.go index 7f3b22104..ca6a7da11 100644 --- a/pkg/shp/sequentialreader.go +++ b/pkg/shp/sequentialreader.go @@ -4,16 +4,14 @@ import ( "encoding/binary" "fmt" "io" - "math" - "strings" ) // SequentialReader is the interface that allows reading shapes and attributes one after another. It also embeds io.Closer. type SequentialReader interface { - // Close() frees the resources allocated by the SequentialReader. + // Closer frees the resources allocated by the SequentialReader. io.Closer - // Next() tries to advance the reading by one shape and one attribute row + // Next tries to advance the reading by one shape and one attribute row // and returns true if the read operation could be performed without any // error. Next() bool @@ -22,6 +20,7 @@ type SequentialReader interface { // encountered any errors, nil is returned for the Shape. Shape() (int, Shape) + /* Note: not used // Attribute returns the value of the n-th attribute in the current row. If // the SequentialReader encountered any errors, the empty string is // returned. @@ -29,12 +28,13 @@ type SequentialReader interface { // Fields returns the fields of the database. If the SequentialReader // encountered any errors, nil is returned. - Fields() []Field + Fields() []Field*/ // Err returns the last non-EOF error encountered. Err() error } +/* Note: not used // Attributes returns all attributes of the shape that sr was last advanced to. func Attributes(sr SequentialReader) []string { if sr.Err() != nil { @@ -50,13 +50,13 @@ func Attributes(sr SequentialReader) []string { // AttributeCount returns the number of fields of the database. func AttributeCount(sr SequentialReader) int { return len(sr.Fields()) -} +}*/ // seqReader implements SequentialReader based on external io.ReadCloser // instances type seqReader struct { - shp, dbf io.ReadCloser - err error + shp/*, dbf*/ io.ReadCloser + err error geometryType ShapeType bbox Box @@ -65,11 +65,12 @@ type seqReader struct { num int32 filelength int64 + /* Note: not used dbfFields []Field dbfNumRecords int32 dbfHeaderLength int16 dbfRecordLength int16 - dbfRow []byte + dbfRow []byte*/ } // Read and parse headers in the Shapefile. This will fill out GeometryType, @@ -116,6 +117,7 @@ func (sr *seqReader) readHeaders() { return } + /* Note: not used // dbf header er = &errReader{Reader: sr.dbf} if sr.dbf == nil { @@ -167,7 +169,7 @@ func (sr *seqReader) readHeaders() { sr.err = fmt.Errorf("Field descriptor array terminator not found") return } - sr.dbfRow = make([]byte, sr.dbfRecordLength) + sr.dbfRow = make([]byte, sr.dbfRecordLength)*/ } // Next implements a method of interface SequentialReader for seqReader. @@ -232,13 +234,14 @@ func (sr *seqReader) Next() bool { sr.err = fmt.Errorf("error when discarding bytes on sequential read: %v", ce) return false } + /* Note: not used if _, err := io.ReadFull(sr.dbf, sr.dbfRow); err != nil { sr.err = fmt.Errorf("error when reading DBF row: %v", err) return false } if sr.dbfRow[0] != 0x20 && sr.dbfRow[0] != 0x2a { sr.err = fmt.Errorf("Attribute row %d starts with incorrect deletion indicator", num) - } + }*/ return sr.err == nil } @@ -247,6 +250,7 @@ func (sr *seqReader) Shape() (int, Shape) { return int(sr.num) - 1, sr.shape } +/* Note: not used // Attribute implements a method of interface SequentialReader for seqReader. func (sr *seqReader) Attribute(n int) string { if sr.err != nil { @@ -259,7 +263,7 @@ func (sr *seqReader) Attribute(n int) string { } s := string(sr.dbfRow[start : start+int(sr.dbfFields[f].Size)]) return strings.Trim(s, " ") -} +}*/ // Err returns the first non-EOF error that was encountered. func (sr *seqReader) Err() error { @@ -274,21 +278,23 @@ func (sr *seqReader) Close() error { if err := sr.shp.Close(); err != nil { return err } + /* Note: not used if err := sr.dbf.Close(); err != nil { return err - } + }*/ return nil } +/* Note: not used // Fields returns a slice of the fields that are present in the DBF table. func (sr *seqReader) Fields() []Field { return sr.dbfFields -} +}*/ // SequentialReaderFromExt returns a new SequentialReader that interprets shp // as a source of shapes whose attributes can be retrieved from dbf. -func SequentialReaderFromExt(shp, dbf io.ReadCloser) SequentialReader { - sr := &seqReader{shp: shp, dbf: dbf} +func SequentialReaderFromExt(shp /*, dbf*/ io.ReadCloser) SequentialReader { + sr := &seqReader{shp: shp /*, dbf: dbf*/} sr.readHeaders() return sr } diff --git a/pkg/shp/sequentialreader_test.go b/pkg/shp/sequentialreader_test.go new file mode 100644 index 000000000..5df4ec854 --- /dev/null +++ b/pkg/shp/sequentialreader_test.go @@ -0,0 +1,52 @@ +package shp + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func openFile(name string, t *testing.T) *os.File { + f, err := os.Open(name) + if err != nil { + t.Fatalf("Failed to open %s: %v", name, err) + } + return f +} + +func getShapesSequentially(prefix string, t *testing.T) (shapes []Shape) { + shp := openFile(prefix+".shp", t) + // dbf := openFile(prefix+".dbf", t) + + sr := SequentialReaderFromExt(shp /*, dbf*/) + err := sr.Err() + assert.Nil(t, err, "Error when iterating over the shapefile header") + + for sr.Next() { + _, shape := sr.Shape() + shapes = append(shapes, shape) + } + err = sr.Err() + assert.Nil(t, err, "Error when iterating over the shapes") + + err = sr.Close() + assert.Nil(t, err, "Could not close sequential reader") + + return shapes +} + +func TestSequentialReader(t *testing.T) { + t.Parallel() + testCases := testsData + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + shapes := getShapesSequentially(tc.name, tt) + assert.Equal(tt, tc.count, len(shapes), "Number of shapes for %s read was wrong. Wanted %d, got %d.", tc.name, tc.count, len(shapes)) + tc.tester(tt, tc.points, shapes) + }) + } +} diff --git a/pkg/shp/shapefile.go b/pkg/shp/shapefile.go index 61b2e3660..a1c661c0b 100644 --- a/pkg/shp/shapefile.go +++ b/pkg/shp/shapefile.go @@ -3,7 +3,6 @@ package shp import ( "encoding/binary" "io" - "strings" ) //go:generate stringer -type=ShapeType @@ -1025,6 +1024,7 @@ type Field struct { Padding [14]byte } +/* Note: not used // Returns a string representation of the Field. Currently // this only returns field name. func (f Field) String() string { @@ -1063,4 +1063,4 @@ func DateField(name string) Field { field := Field{Fieldtype: 'D', Size: 8} copy(field.Name[:], []byte(name)) return field -} +}*/ diff --git a/pkg/shp/shapefile_test.go b/pkg/shp/shapefile_test.go new file mode 100644 index 000000000..d50142ffa --- /dev/null +++ b/pkg/shp/shapefile_test.go @@ -0,0 +1,196 @@ +package shp + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestBox_ExtendWithPoint(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input struct { + b Box + p Point + } + expected Box + }{ + { + name: "MaxY", + input: struct { + b Box + p Point + }{ + b: Box{0, 0, 1, 1}, + p: Point{0, 2}}, + expected: Box{ + MinX: 0, + MinY: 0, + MaxX: 1, + MaxY: 2, + }, + }, + { + name: "MaxX", + input: struct { + b Box + p Point + }{ + b: Box{0, 0, 1, 1}, + p: Point{2, 0}}, + expected: Box{ + MinX: 0, + MinY: 0, + MaxX: 2, + MaxY: 1, + }, + }, + { + name: "MinX", + input: struct { + b Box + p Point + }{ + b: Box{0, 0, 1, 1}, + p: Point{-1, 0}}, + expected: Box{ + MinX: -1, + MinY: 0, + MaxX: 1, + MaxY: 1, + }, + }, + { + name: "MinY", + input: struct { + b Box + p Point + }{ + b: Box{0, 0, 1, 1}, + p: Point{0, -1}}, + expected: Box{ + MinX: 0, + MinY: -1, + MaxX: 1, + MaxY: 1, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tc.input.b.ExtendWithPoint(tc.input.p) + assert.Equal(tt, tc.expected, tc.input.b) + }) + } + +} + +func TestBox_Extend(t *testing.T) { + a := Box{-124.763068, 45.543541, -116.915989, 49.002494} + b := Box{-92.888114, 42.49192, -86.805415, 47.080621} + a.Extend(b) + c := Box{-124.763068, 42.49192, -86.805415, 49.002494} + if a.MinX != c.MinX { + t.Errorf("a.MinX = %v, want %v", a.MinX, c.MinX) + } + if a.MinY != c.MinY { + t.Errorf("a.MinY = %v, want %v", a.MinY, c.MinY) + } + if a.MaxX != c.MaxX { + t.Errorf("a.MaxX = %v, want %v", a.MaxX, c.MaxX) + } + if a.MaxY != c.MaxY { + t.Errorf("a.MaxY = %v, want %v", a.MaxY, c.MaxY) + } +} + +func TestNewPolyLine(t *testing.T) { + points := [][]Point{ + {Point{0.0, 0.0}, Point{5.0, 5.0}}, + {Point{10.0, 10.0}, Point{15.0, 15.0}}, + } + polyLine := NewPolyLine(points) + + expected := &PolyLine{ + Box: Box{MinX: 0, MinY: 0, MaxX: 15, MaxY: 15}, + NumParts: 2, + NumPoints: 4, + Parts: []int32{0, 2}, + Points: []Point{ + {X: 0, Y: 0}, + {X: 5, Y: 5}, + {X: 10, Y: 10}, + {X: 15, Y: 15}, + }, + } + + assert.Equal(t, expected, polyLine) +} + +func TestBBoxFromPoints(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []Point + expected Box + }{ + { + name: "Single point", + input: []Point{{ + X: 1, + Y: 1, + }}, + expected: Box{ + MinX: 1, + MinY: 1, + MaxX: 1, + MaxY: 1, + }, + }, + { + name: "Tow points", + input: []Point{{ + X: 1, + Y: 1, + }, { + X: 0, + Y: 0, + }}, + expected: Box{ + MinX: 0, + MinY: 0, + MaxX: 1, + MaxY: 1, + }, + }, + { + name: "Multi points", + input: []Point{{ + X: 2, + Y: 2, + }, { + X: 0, + Y: 0, + }, { + X: 1, + Y: 3, + }}, + expected: Box{ + MinX: 0, + MinY: 0, + MaxX: 2, + MaxY: 3, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, BBoxFromPoints(tc.input)) + }) + } +} diff --git a/pkg/shp/shapetype_string_test.go b/pkg/shp/shapetype_string_test.go new file mode 100644 index 000000000..ee910829f --- /dev/null +++ b/pkg/shp/shapetype_string_test.go @@ -0,0 +1,99 @@ +package shp + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestShapeType_String(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input ShapeType + expected string + }{ + { + name: "NULL", + input: 0, + expected: "NULL", + }, + { + name: "POINT", + input: 1, + expected: "POINT", + }, + { + name: "POLYLINE", + input: 3, + expected: "POLYLINE", + }, + { + name: "POLYGON", + input: 5, + expected: "POLYGON", + }, + { + name: "MULTIPOINT", + input: 8, + expected: "MULTIPOINT", + }, + { + name: "POINTZ", + input: 11, + expected: "POINTZ", + }, + { + name: "POLYLINEZ", + input: 13, + expected: "POLYLINEZ", + }, + { + name: "POLYGONZ", + input: 15, + expected: "POLYGONZ", + }, + { + name: "MULTIPOINTZ", + input: 18, + expected: "MULTIPOINTZ", + }, + { + name: "POINTM", + input: 21, + expected: "POINTM", + }, + { + name: "POLYLINEM", + input: 23, + expected: "POLYLINEM", + }, + { + name: "POLYGONM", + input: 25, + expected: "POLYGONM", + }, + { + name: "MULTIPOINTM", + input: 28, + expected: "MULTIPOINTM", + }, + { + name: "MULTIPATCH", + input: 31, + expected: "MULTIPATCH", + }, + { + name: "MULTIPATCH", + input: -1, + expected: "ShapeType(-1)", + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.String()) + }) + } +} diff --git a/pkg/shp/test_files/empty.zip b/pkg/shp/test_files/empty.zip new file mode 100644 index 0000000000000000000000000000000000000000..acf7655b553c3d629f9d371a986d1291677ffc57 GIT binary patch literal 6738 zcma*sMNk|H)Ft2s8Yj5ByEN|Z?(WjK1R8?72Pe2EXpp4Q;M&36-6c2#4La|i*-h0{ zP2I)0tFt?+PfHa6hz|e&PypEo&1$to7u8-Q06>iy0Kfns2e^YRxw%1ZmezJ|PVSZ< zOIr^gcW*BzFoaV_URq9Fp3~9W%~f9y6@b9$18>@A4{z$@hYo-P-opU^|GRcw)J1nq zsipWU>-*Qzv%&jB%B(&q3@_Fl?g4kkkq>r<8VoFkrp~JH%5~bR#9TYs8y*kY8<2Je zQvoE5k;QQfH0XQ^M59>Z(*5VpOL1|(U!hOW&$77to^qR^L9d5?#f~xH*Oqz@W8_NY z{hm;X*MkrB69U2$j09mr&o0fP1I9!Mc||%TS%w%-|=4>l1AAmV}m0HrD;p{ zc@`H=uS}C$!Ye&kH>FYv;WomRk0w7HV=7)qfYYgsEynS9rd&g<@r8uhA z;{#$guZ9-njAiOYl55Ay_na$7^63sVc>8SrsyDQyIKpMdlxN1y5d~z?Bf=v_`;N5R z#utRbsBn!65s69UZgN~#3sIq@BjKEi=(Z=kr?aU8PY`Oyj7Ija!Zm}ld`U9dw7zv} zaa3GsVBC(;pzw#Nbn)6CA2;zCVa|iYO5q()U(n%3~YA-ZCQ>RuJ z5%56pcb5ujlL%aQ!U})o?)?{%u99kfI$FqOtguFs*JZ7k6*u1E+JI#p$g{LRGMm>r zzF*v7g&9BGpRwt!sC=95tup4Cg*=U!X(-n28J!R3@%wP~9@a{WC!<<1MAC-yAxDmr z6ZbR%3@^ONy*5FEvZ631BC)QZ_p7!>Lz<#+NP!)~_s?@}{qpx0zH7yu1ND#~n|Pw+ zZeoSv@dOu%_#71CF~g1113o383bw;sx-HwoXnar`iLG+En^(ec4Kr$?EMvpkN+GtgUcGXEn1Y0`g{OtCj3(C$ z{P(|DpT@jK_j;)!q)q4S5He>)Rps%{eI$h@;-G~VPPjS#;GyH>ilIUBQtf+f zW28)nbMKxOTlscStAd*vcmjh8)0c(OPj(Xhuf5h%k(w3Gl>-2W!!5J-TknSrrXxj>gom|x?*k801!Jj z(sR@%gs=!o{>uP6*33zkb$N(=Hh`hNxNa&7H1wxN!gO{%VJd5k_kJb)i=^pV;n?}4 zgX$cAFXD~bw3#1H=dfAD-4DK04XbK$^N36>58HkRz41}pAS7LCvVCua`y{w4HQRe$E1r9 zU6A8OlyKtM@Kvao@;VOTx;dIU^-!r3qwhFfInjyGwMOIgI7(M%T^hLD9-B;uj z>WEK}*#);dzl`gusmR;Or!g4EtVh13c5_#KTRkiLVQcZKWk?kG{ZB|kO{~Tp^jiL* zNCea6E-w#dkI%;WgP_u~g1xIZ!Rn?C+Ih992f}d2F?^m$-KOqy0IRqwtGM7ngAYufSx@Fb@I8RPjWu zd-Tdk4hO>QuY-n!ZvR>-ivoE7^!#$Z(?bnODgM0?ly+JWL#L;oX$;jc>`gj z0Wmv4?FP{pUnMKiiOf;SV6Mhps}DhlGxN7fz8I7?rRAKk2djxiP9Io=;Wh^aU~sy7ejn0w z-(lqjqxYmBE1LFs?5Ibb5yA*g_(ujpD!b(;lWtvrf$M&n|8i&clC3ZODwqLdZ_x1y zy8pyzwQsdOHP{=3I&2!fB;YV{9czIbWpR&ny}d9bF(H+lO(fOwk! z03^n>abNwaaDhgVfaYnNG)!vX2r|XnWGWK zKRo3S{I;!MPeEJ@2biYZN5V&GATHOrmLnHvD5XAsND&L_V}drns0gm0U(5+`XG#a$ zzC^6omS=?^zf{70TuQLa?FPZqYbR7zW_iz|75e;hNCBU{FnxoJwUK#zk)n;3zVnav zMm4?4&uZYceTWfQAEK zsWp4ATe<2_aNy>XA`(lN2!HuwPcT*Z*VN*7?qJ~cf} z?7!f+q9<7VN@PY~q$5SG$or9fBhTSIn8TFwdD7$TOW**hzmSF0{gESGa9t)pC2F(y=P~? z{f3FrCfKLBK0OWLQ;Tm7K)|%irFZ;;pT4lp^q~h)sq=;D9Fv9j^kc&~vM! zrYk#YoKXU|M1s7p5s;+xS$C)NQ*k>MhgE=cPGa#-|0lArDM zScI&I)(i@fUrJ;N)jgw&Vm&E);ZjZB757oW9KM#%f=GuxVE<|O$uyS=&HW&%4q zqr_}HOuGgctC)K+-4?8VgK6j`j|A^R5Ucg0Sk3oi9~6*bW4#n3Hs#O$pinJ`qpnD% z9T#2Ybz`1!qV0!@FK4|FH-Kb4X)_{>P@JfKF0Js#fsz-D@=Xh=*0--^4GYdf;S3&1 zo|#f(dy?vO16aS7TKv-omWT>7Zc{#`(wk!Y0+m-`5>M@_CV#=cm4%Lm{o@a+?Y=;E zqgqa&MMD2XFKMXIP~Uuq&5oTdyGh;1Qr9CHXdJ9$M=QiJiZs|6Afp6L zGFGPxELmtjNbGSVGk8w%ANU@bW2Z^P={Y2h$=wN+e)%iWG(ec_JVHxiV;^14ffjX^8CC|*GkxyK- z&`@M5Rwdutm?GMYzHc?&t9O`*COmJQhpZGKPT$uy$@M@5k?fa{yp_(4jjulTqIt1o z4pe6#z^}jwVAv&jPyg_|v$U?}V5qB&r4NFY#-&ttm??`!63<_F`?gQRS@Sv-0r|jw za7IiLd&ob1;IGe)uV-+y%XhLA{AeNUis*`EVtc&kC=2~%Vv;UtZHBX*J#Wt*Rwz*t zb|YUY<<$MS;Hg-v9$%QWg&IY9N#qga?tw7qSow#*rjWVKO+H_X)X;ACwCceYrla0RyM`;~gdgbnYp7d_nAi6hdX&Q0#r*`C8D5?y9wv)y}@#{*|uqPY( zU5nEi8;Z}WVXxh7Kd-^lj@LS$b*DZ;NO+vXiSAtfh0f-;vmW6;M=CmIKI$!l+@=g3qcCL}D%WsM|8T*8E91srSCL6V=uq<%Rqv99Ce6kK zQ%vy=GBDNXn59HLJ|HKqKzpiIC|GHugzp*;7frckmOw_IPv-~zqZEXe?Rh4Od93n6 zV4@D5P@8^Oy|b42GNZS~;rr)Q=V!ELf>DR+|OW&;U<>GXR-t!nXRiMr}MBhb1xu_X~ey1B&babObZ)hc2#pa(2ACtcJO5M zp<#pF(BONBm3V27hle66y?a=hz)LXhh1ur*D8X^%H;GDAbp9!iQr8dy)9NL<$NR;nI(<9TcVU3_gTo~3$nuA|~Pvc_N#B*~|^@5V;MiOg$v||-)jbj!wut{De z@C*rw6ns~c;zFUP9f-50>>8#5WeFa>n5*`LdvKN~6{(rIVsd{SOV?9Ouf7w-I*f6H zN2US=>`9lNBPY3UahniEEtXesp5$R&R1vFCG7=qGv9{^+ET@ulZcr0m#2Bz+3Dv$9 zTXwx3Urlrrcdz^o;zdM3`5W?WHNO3f(v#|yqpQR^>d3-TD>U6``28)#TM;Wif=3%M zYmR66tSczuaAho@4$3H#^1;c#AhC=DwoAR)lvVk%p+FKNK;3#{tT8_X5G8xs#!LnZb8D2c75?GMmqVXgYp*@(^!|3zYG;JNM_ zv5kMIk%)jdeU0fE?H%Z;v0}v{jZ|0=T0j2qguNW$*!ov2+$y*y5nZK1g(eF=DAKb~ zk^6!fwyJrsVzD{a+GUESeJ3(+V5t9M&i*;FUzWbh}A;#mqu>ye;f5H}i~{aPXj5^@(RVj>&`SgJ7q@VE@9b6g{)wVoGr1Z9s2= z>kYq*$G6BmLw`87ka)Z=DI-0K5evejNp$~)7}`X$Qou{$SONtfdiM__t+|1(WZu6- zY8fjIj@8T2pL2*a?@U+jytCkz_FJE+IEO0vM3^3>e!r2C!o>fW#-<3tzh=K>PP3G+>8?fi z8(hpqj5#MgIX6v!5Pfej!Rm3JS?XcAh3wCesx+9cD{NGU@09vgj{rRJ4YZttiysoK z;-37qSNv>$HXX(hCAIU;58+pJ8<2X$(EK_A$C-&bEsMPB&9mc>!>GpC6iv1V^V$`t zH)qo*sqB{Bf@;ogWZJR7!6Wh=e+29|`x>S%ZM`|_N?^r;9D=gwq5HlqgAC?i%c)6} zT2~RFk8?88?>2^G7)qK~zwUm8k*=$e)pe6vG?uKo9EQOq0IwK?5Xr~%{0ibcqj<=A zo*K)AtV|_GT?68xPPi2IPZt={NQSm?Nw-?d+)0~bP&4YCKX%F751iQ3^T=N_;IVq0E+x3(T=Ya*eL*1jsEj1;vS!pw?H@4LC6>-8%$Gp z9N$B*8?j_WIw4&UZLjZ`HtV}-I;#-dz6U8$f|+lsiC$!9K+ZZtlU~s+e8yd2q*#{W zv1<%ZA;e!+1zPM)rrPxsz@b7?s&}ks*q9Bwe@14QbSn^vZb+q`tEH00I( za~Muv0Ag{JaJDgqAG`oh5b_`M-bY?c1AXL^UAX*O|FJ&2bm+Y`J@`CVV9Dy`rGl}?o?oqVP)>u9#bp4h@vJ4y zs<{2Q7mO*vco#t(7Z4VH_MG6eumQ^MEZ&$W|CV$>o}8C2P0L*lU!-A#Q9u0LE{UY| z8bB}&>thLZZ<*vw%-*lso_BN?vn+-~$x`o2knZa|*7Iz`q*H9pBC)9395cXBI|t=j z>rxcg*tZ$oiP(=<2FRv;NB~i7AVIDq5O6Y`TNbvH1*6kUvsCy{GW!>A22jSzzgiVy zONmmWO_~_+eWL&Cv<-2F9>>P@@-fifw|5bgx=f5r4k0ErQYJmm-!gIlw&lyw{@cNL zenan=_Vr0;Y|acc#7y}}#l2rd4>NgTkB)U}>XI(>Z69{kM+~fIzdpDU;CKuY<-n4c z+a?e?rVuo>_!XSY0h6XnVj!&rXzC8v% z-It4c5c+eKLSrlpEgGRWGi7j}q;m;ICTGL9xE}Zl;^u!&&=M|fS8!4Yc9x*G``pbzvJr(0EJ``YB0^&KDcwFayxgZ+4uS6H3 zDL8^~LC>LZTB>mHlnDPX(joqjdH{g`bOuls{y*RUC-5NtAK>}Fa7RlO5$S(Q;Ql+8 M|8nL(IRXIw7a}#HxBvhE literal 0 HcmV?d00001 diff --git a/pkg/shp/test_files/multi.zip b/pkg/shp/test_files/multi.zip new file mode 100644 index 0000000000000000000000000000000000000000..4d79e4318ffadd74d9ebc8bdf17d1c4cba9b4ce8 GIT binary patch literal 331042 zcmagFV{m3s6D}ItPTttIC$??dPA0Z(dor;-G2fUI+qR86-*?W%Ik)QGAFFErSoQ4M z-K+cQUe9hNS#Ssp5D*X;kPPq^xoYgIN*7!ZkSaY85DE}{5C?NZ78YiELt`_0YX?JS zLsQ3}4sI^i=B|t?;=*F`;*3^q_IB!Oupr=!Kf!eSEx>etdcuQ%LOg+jfczJBReyFWT>4zb}TLVhPjxg%Dh5cUcAaO|uJ^uu}y}yg19XN|^1^9j*c@|iOn}4=8IBG+e zK_B!62z(wQHcWByOj(f*@EyR$(VyScNHi1A%;%fOjhMQ28Q)F*Q4rM1fSDK`JN%ur zY>{nnW&KHY?t`{(j!!Y54L0I`yBC&Sn-ode^Sc4~;u*~5%=nv7noR(Xj5F_Yq=M<# zARcX^l_ELwFx9BH|BHG6UUL0ddOCT7JFi%dtk&DLIamJ(%VeFxL5Qv2<#_)zso^)O ztnuU^pUJ1D0X}81e7@lN>BalpTB?2Fk?Vr{|;ZJXd+K-d`1nAU+6 z>5ldl77r|FlSFWA9KO96)6HUN0Nz*-qa?iP8Qb|>;^3Pru~ljl{V$%X;W_p=kqlCg z8o5X!rX+K;&fimi#RPJ56Um0V;8^a&Q|%NNq{6}ZvsLF08#sdd(@liqg4yLhNPfb$;jJ(R+JE4h0p@9%38 zg&p66BT+CJV=EAq34=4Cxp{ubF2!9+ca%l8ZF&@j!Q75(DqUjl5;IywO{4v%ka;5+j(#T#lRZxAGTuZo;W8k35IRINF{&)(Zz}52Cw4;R%Z27H%rZkdLoP`2 zX;jW90*Ojrf?H0FYt9G7o1AHJOzFjOQ254{vl~VVtxqc?0XsU2LkuE#7hA>$=bD{p z(PtViq&u4R__ox>z6)FK*WAeJkVdS`h#`8UbnvxNHASNyJaSO8vUtgzx}qhwuuEVSsgbB zfsz&CJZ|ETm0yqlM*}57&svmrWrTjto1&qhW+t6^RQV9l`UAyGQxa_Y}?e>`*UC||SRpV-F(x05Z%u<`^osbT_m&(9$2Z6|5Qhhnjz%_%~KXC_L8R zia+P`BH2D>XT$8Xo7f<7Nv%j&*tub@ZK=RrR_Z!}k9MAdW$V;zslI#D^4rn!a~+nd z)hyN87G3}Duw~Q~@Qc)%@Y;JkNPTnc&*tJlKyAS?;^>D63Rp)T7`5%Fu?2F^`Gx8g z(*R8e+Erptyj0Wsu0ke_LQ?b64FVe=oj3YR&v3KLf&(wvDy;Ou`=PJ>UqQs=a?3co z%wO7&DKSK;EGt!Ns^0oIr>jO;F&SnGma`MH+ry*N+lOgL40}00z4Tno*n_5jN%LRd zfVDkCZJ^3(JvDkY)l`5cnO}TB#nP-8ZY~!D2hLNN{@qCbB(`8Cov8qf3d_e)nR~!H z&pb<=_34L{ArYTgW*d+Z#uDC*M$+VPW0zmotb-BS9Wtv!}()mqxNhr{=tr^{r!M6KqJacL=t5HdXD0l5oPCe3#3I5fN31 z*jC%IPegFk2WWBYJHq{NXC(atg4I9sp^rc3@^VN4r2}6SEkpvv^D*)-rz1$; zGFCXfGS@3Cok#J1Hyb(;Tf*XO-~r~`>q@(Jrg*%0P$XhuBx0dqfkIIG!v$sv!Un=J ze4w?k2-X3|Y6s+Ta62sRC&AcTq)@M!Qa{6mqA1*OgduDbY0t8D5X1!|1B>TX?!!tN zu|hO(=rOA|u_xHeXtB>kjthsg6dv1tV*1-q7nQO10`i4X7GL(vI(N%1VnV)q{A()c zN4?PjQ-b)9w~yy#Obl!cVzUs^$vR|xFk{oB4FOUsnSswnQ%j6Lfx((P34%xUfUl#PiTJ$IP?^T>QUQRxkH1{X1Qs!<<{x;tqiXZjiaQJ(_inyRlOeZJI|ar zvmA%ad`z29m7fw>V4ig(qTMG%``?x3A~U*|MuEz>>y?EU0*_DMvK zuSAQ-8)20&j#^o7)2C+M% zV&1Wgqt>2n1TMZ@NPqepbA`p;6l+tackkPWv=cjt)4YD)uOLBU%h^d#O6Pp4B9A4)%+E$O^Xx?13b)fE8q`q z#_#sj0nPdnzOQPFugf{kqGiJc|nM5k-*)2XqfOt*-gZNuN72mFo zrRK;#uU#+iO2z6*C4O+xi&B|&edTEze8 zgl&n;?^hLJf(hGYtiJJ;A+&CdBzNK&*fiItK;cuG< z6Y@skgv%1mZn^_Y5&oLODyo@ey>g?YMCK>QYt<9>)1meN6i<tz(amv5I^9F zp8lZ-389^K@|$t;W9>K8+-Urho)XuP1i@9O%mx~a(yvd~MZsE!wm z8O4POigUrlJ|P&(`yTFUV+dQbg%4JkAaVA>QlPDyvcz!$=er~d02R{`ov(hf9AuT8 zMIJU;+DhSq5MrCx*X1Nix$$DLLwxkP!vnvIV?ZInXnHuQRiDsWN3LSpF;BK@M{o?!de+yJ5F!5JP^A^ zPFP$zV?6h4P@E-pkf*UK9W8XAl2T^654yJ6TU`_Bj4xSP9^!O`xCAx?L4uc0Zob|r z``#^s`rA&-F7>&!(Y~2~{NJbJC>v)0KHF`=9f4eK$BxvoqlwF>G&PWONYr6 zN=9m--ejP16-0z&*41pgpZq41W%^apk398zozjYlM)Ij0_ ztR6otq000u(fS@_<5pPg!@{n2lyD1k`=}ota}n1NG6- zN)d-m z(pyQWw{mYrJ-3O*Uw!dIse$-A2ItIx^03HF_r=WUQuSO@0I*kH==vTe5lPA0Xr=)T z<2y}RnasItp!_Vb&jL;1Ji~G5ajcJ$BoL`)8M8i~Hnvg$LC!!EPfGfMeH15-VC;p1 zO5bf7Yu0rlmjHj`vLMuo#A$Z+B5AWgD7{>ela(q#O5yXi^3Q}0L2`}U>(YCh!bQ+2agZ#@^3#WIwe)TOrejZ+_- z3r*T!Whyw>Dkvv{S)AK!t@ERyaU}yqO?3phFYtF{LRqJt^p6mng)29Yj#*G+HtT!{ zc5_cqEh>S1=(?X^w-=|ke^Hf7cGD$1NnPnB;U$at_SxWJ76*#?1Z|VrHRrl|zup~< zU_u4V#(ois$$K(^5z&^vd{8NKH~nD46$cl4f&kV}lpy*v`_FG}v3nRK2J|>2RSvgW zw@z_F7%_v)Oxft%XJD*@5*qvczHxB7jQcih{INf7s2hBIQayId9a@Yr*F|C=`D-AOVN<8-ede5>PRWP};~}o%2Q4lAcIB4jW_EfSyb?h; zHi$9eoqjzx`&pdU=P)sjV)7B@bZ|EYoP_z-aEE9p&bo9q%*>LYK?1 zFRVlw5LQNm4Q>Y4`))tQ1LFD=R$qiWICR0h{329erE76%?$%~@e@}%kV5DV+sBc+8 zkz{j6qq?gI&=8gnQg^KDl$Ck0MW+sRoUDuoHn zS~^qyB%uS|lVCXV6}l)-1_eM$eLEWIfC*6U2byNp7NXj5=!C{f+SG}|)HM17)%z+P zp0f_RyEGIeV#WL5)Bp-5w85m1i=#?$^I)rBNles===QdS&i?)(T;Fm+Ofcl6gOSFB-J?Xz>0C!2I&0Sj{f#$vtb*3|$c-v#hL^_ayz&&^ z2RPp146k%z@y`&F_h0B?_g5v)vVyYjiyCStnvpJ!dU==KdhTAlE&sizc4K(yA-h%Lqi?h$V_r0W~67K1ZNjN!s59(38E+|dx0p^(3YQcHNmrhr3j1+ zzSk7tn0N(f@p8J6SLt4mJ~F>Gm982j;qh?QH%vaip{xX3wcYUr8Ts|b!poG(kfek8 zhB)U*vRqLE*Ax#|4YnrQx^>}{A9)uvG}V96_AHzaVug?1*{sW+Da~}2C~>{B5?xHu zoM237PB9!M4hd*=J2QeVutr%Q)vL?mx2`p@DSEa2$%T|%s!6x^IB>@E*PiSHR#@f` zU@hYjI|#vbCvD(d&^LVlPmw;)msj&8>(1yz1%;H$78dzC!jjy+G;wqtH_l#(E`#7kS3x$X-V%+JB@cm@fw*bLJxU9UO%ivp{?T z-2U)ZQaISzrGqXXw7nBCj+C+UQoV!`(paO+c0xuj2dgU_u??(3 zO(}E`GKS|KeaS8ORVSwENBB${}k%DD;A$_-hQ5F%5QB6nf2#b=NYJQjW4Y46qM$ z+2d@`r&A~_>k-|qSDf2SGot|ogJeH-0y${$&`e(5{<2aPK#pLx^i3zPKk#TBrqDMx zoSBBHw&Ue?nimoNHqo3wkW#$)^Y|waZ$l1H(}QQwRJdk)6bKpvaYMlki9ey{nH%XG z$_nUxYbxP3(iI%H^NtKXW0E*HU!+LF9oa#{+ioj%z-tMIO>MAo>K1t#gc-Ez)FKCp z!xi-t_QlmWQ*R&td;tX?rHV@RWRVd8$@iRwJH3&hCjcR8@>*EscXW^6X1*u(g+?gc zq?$ozsPIQ=LKflea_x3icBw!z+1N``S#ve*e-`?I`L{?m)&+0@!C0fI)AwT=gL01t zDuQNo;ugV~8}g4)t`dE-u5tqb#Bw&$mvwt8(KqrtaKeP%D>W@pxD5h|Y(%Dksk_xI z!2i=CIuNzr8?qpjCqo;-6O5D97y3lM?}ZIXLmm2T4>YIRYvN?^VcLh$oQ6Bx^b9>s zd{0?$>sKGMHqARJnR!W~k0gi(pD+W#g;o$2O4rQ&sb1(dMS2uv<;9or%gqZv-G)3E zs(qKJF$^~Cu31P*lBqO-B3r$Z$nxu#tQLk5mvk|bb1+vhBSK&<@{FT;6V8EcGYg*x z5eftT2p^p#6S{gvl?fE6D2qqi(*xwhGKg+dy?Waah#{?uiwwd(eNLsyVF?LBI+F%- zm2)*-dg+7Hz8{hR;5}9O6I<7&@)`SLv z+$D3Cu_{49l|{SOBd^6|nYU;XVhl6UCY0-q061!zP3z)LJXctfZn_LROxnQG{UFRl z$uFZk6rmr)aML;(?C#`u);q2?^`{Y$ee4vZPaWNuzip?+rbi&-nh4|G7ak}XKz77S z;QqzIWKLt>g!1iKS48IDdVf9XV;P45UNxln#eFi`nVD;{fUg7KO+OAq1O4sc4JX5k z?+*rKaj_jt?o$FTU4uV<@eDvQGOC(z0OPZYaSs#qaRZ$ z)sK&rH@VzeVN7&O)v}l)U(FDyR&qVp=IYh>96m;q3Jk7>HY&7#UnA9zkw0)bOir!d zV81Ez@_WGtvZMmS4Kxj!>hJ#+gSrc6VUA7D1#L4uv*$)G)J>6ME$@^v5^!}D z!gu^U8ah%wOF52awcxs=!9EQ9`0)}Kci=M z$0(GqWrd1!;KUA#{@T_Snh%emow12KyXBl}YLmo!{-Hz1o1lcoAD`yxuvW?_*pr=e z)>rV=?dI>{bNX?`d0AWLChH1-S7rMvP(7YUS+uS&0);VkIs-q13@A*IL|69<}5LZ!^R#aeg^K|>K zpu_M_(CPYr3p(tKER4*EaR2vP(EiCrN_;^`kpJHBpGENhDI1wOS^gKD-3?5aOa`tORuh>e?dU9J0Smmu+2;?{x9tR;Qw#f3;LU?fHC)D|KjBF z8uGLVUkyUuw1q7nRcv#K?KM7Fl6tH}kRZQhbs8IlCSm}&p!hXX|p$ z(;5!w7Zk!uU_T%AJ8vY{Z7D-dTPOS%kxPfA1ei7ZJX$a<(Jsl@B6V;lUJH%GXaUkeu8PnjY#s( zUd=sGio?6lPL($UMV|6T`U!6|imFU5DSKzkR_S&L~VlwfE;b)iDw`p0o{t}aK&y>4k#Ng?ZyLZ|v zw;zXJ+2H!0Y40P39rCysWVyhlc#xJExby_zk?^wUb3 zTmQDMe8$e6Qi-&>JO8K;#$y)D9$oR?`R?9t>gJg!-geLNx6Hbw@7x>iPQAwix(3Bv zVv0NISJ}JUALM0%`h6L1*NhpO=Dwi*WmwxaA0K94QM~2?>;*^}>!<14+XGL3uk75s zHn_da1s<-8B7xsj=6<6F?S9dpY*$;U+e{ljt8RzB(xj;Gez_1$Xwxiy<8%?!xNrE; z+#5e!@}Ax{AG`?C9ihi6M>~Fh3jD|FS9ZMxfpb{AiTr0og=|GxvF z-?rlZgay>`jzp`Auk!i$y;i%FkAk(pXTq7=Z}F(@ zqyT~FnQPCjFAa*XF;gCY(7#l@#3=l(z2y_Rgc{!J97q`o5#?O_vRc@d8Qpe zvF%(U)z5wG($K6p<8zEgCDq< zJhZy(FWwwOtPOyy6EVt;)To;DWUuGNv#STM#aOEX#D$x6^*4oB)?G8c_EbHo9vMdE zGT*+*3R}HXr+UBpwWl{fjwq20qmzgC0pFP(=bDgvVZ^#37YLB6#to-IH?_#*bC@;VFamY!+ZV2t z6hk(`hULqIaY)=1i8lg*1Cj@kA{LE8MLQ@x`z=^TkDnp9A~{fpEAaDI82a z`=CFE>>Ll&YBJ9EgY{77;Q}blH~R1p(m8XsU{orobg@;LROfZEK?UV9l(Qbx`1l_= z`5z;Npl{*(BC{SAX!X*Q648WzApC6>%{B;Zm6i?eDkV-t!9EJ*=tF#QmB{@4_t!5s zzWjJe7OTFKU3=cINTW#H7lS+mCkpiawWGpjyf`h&j{K-8%c45gh6cHZUqzFhD>qkM z%kIMx9*zh^#eC+{M?iFBj>8n>l%}zk1KYdy=2sr`GmNYZ+Nx1NvBMv8Waw2N)%E@5 zrlzXxx((%YY9L}dde;hK6u&}4@UnmG-_8$y5b2P@@vyOp;(un=P><#RrJ7!-R5!0H zYfXd|^=f?JG!S#MOzbfoaUpz#@v%k-;|AT%yc2<+Sk8=IQ|a^irHB$i)S>Wt6+j{> zcnk<^cwZp4frS9y+yw0?h99$H*)wbvvKTJp!W9buZ)CB^i`s$W`!X#MrD)p?h382i zDSO7j2SM8r+AE~ng9MNjY@=_?nm9&K>ccuXK?m;&Q0&Z-zF9@C?}97Kr8hK4nEi8= zvL%b@Wa1#AclWQUYA%*QKQpcqLIGQui#O8!T3{zqE=Pi^lr zBw+-moh3A3l!*h^Cy*P^KZ7XZfZ|gzvGlUe_2t3$LA`*AX7c{Pe}1}I93CC6ucxnu zVM=5snY7nK%eU9#W)74ZDJb5rn@cGu+4q1k@eJxlI(<>$?U8c=UVx&8`Y_K;gxPkB z;ti+8Dnk<)ryWGcRt0wWXX?#}VY4;Nur(wj+IYNNIXtX(0t~M=RG*|1p*OF8_=PW@ z<&Zx~*LE_^{^rm1=4k-Ko8j|R zk^*2ty)s2dDn!N)VGerI$pAj#K0TJg=1pBT>K&RNT*bBrqVet@I_b1Z zT$E6byGzZ(8zlif)#t{OoFY8&jzS>=;gxv_@MK0DQHa2aLvJfzah}j>jA;(7?9!tI zrhv6(CdcY#?wYliV!d0qF+}8KNmjOPT*jA2NnN6Rh&WtoNa)d0HPs7`WEXGkXH-Za3Rbeo85JQ)WDmQOWoE;D4LkzIzYyImFE3d{WR?ZW`qM zVqQjvF;@1am5-W8lqttLXf~wXO|Nft*4Dzz2lv*3I3VUjSqkGCxchgksV5`<)Pe^% z?+2fx6^dd5n^1<((NnOQ!~5bRxY0Hd$PYkkEqA;_JxLi+hj~E(sW>4k#|X!w(_6v< zsaluFzky+2{tm=W64FRsV->{{pnxsO{9xqA51sa=Q86M5ll`m}bP$wP+uwq*hLc~; zH9{7a7N=!H$FzsTh+u1?$V>vZf^Vt!$8g$OazA{&8MZ<3~yOiaqj- zU^ZGZwe|+k5Dl$7>3U*gk~x15E-c(!5h&^?G*e=7j)hIaIN`v&Lz4O0U4Hf$34UMt zZR2hPJl*g5w{BslmeB5`7aVq7G>@+G~mzgJf0=JjSkB9p=KaHl89X(M8Jdci4|a= zjLVU-3MqVZIT1BvTgGA;& zg$+dAhLoj|?K$@SMWh3;1j5ZgEPiF=!$6Eea&4rVl4pp4kVx?1Q-kY`^~@%ga!|(! z1AWg-R<;8}01jqF6DHN74U<=Q1OLcoYqPV}hvJK|6lE`vt;ibXPzuc@@P&x9%})+O z4H((G%aNbG(iQkiq>&amH?vMm*;+0KyV{6*)Su^Vutjn;so^q*?H>ujp$$1FiI#b` zu>Q=iR*jBXnEihm{`6q-c|V*s{Q!kd-F?nGBnrrNVvqHE(WbG%sUqT*F9M*IqYloM zYM=&Oeq-+Kw+gB634?;&DlbrPt9JzgWR}*xe;w| z+hqksoSB=?Fbfb}EEKhjtR+1a)BDP`?s-k7s;8iXheZ$BV?qEJKTDNo~YPH7}fLxSFBGMtL1x0EbM4@DBc z$Eev@g^Aes;90=AeSIy;uUyKP3X00ro@CdR_Q4h>T*SB2EX?x*I8g>#^^iG*} zfil%|Es@v{7Ox-qJ5UXjlFdjdL$!q;d|?KbB3OCS1Blf_vlgwp$LZdw*-#72+1U~m zX*G0njc{m$uk;+?*d>&7QhEovA5q5?2j&=ejc|P!e}EM+(dpL7?po(ZWZh>|iKW;( z)07%Qlmh|d6|FWX2x4uC33dNU?OB!gR~`C%&+461EJ7I9K-?uR0raCpCq>VAMZ@-@XX&M9q30gb0Fo%4U5r*5aHwPgPU3ca<^*jO`qy4a_OK;(VK&sj0MCs1}9g?{6JNRPoN<4})bdy}6lr_ZVI)1K7sKs4zF2)K?=10Z=+ zy$%Dr;$JaE7>Dy;@4+5Z@#5=UdCwRGh+@jRM5fXHux?t7)xtWRiX9u*Lf~8qMnKpJ zFM5Fzf}REySjmw@Br3wvRE@xMa9SjDY1?Ja{12Tt`wv*9&s@VgpxCNJ_4WfIL*H#& zt61(uXy3V3c4nHHdirx?Vw&;f?Y+_S%cSJ@+D2=X!ElZyv|_UIJ3^H~-I?V|%0@bOi6b)&N44M}ei_fN-I zz)%=>4>8sCqG{A!q$coFHGZQ`i0yk4SX{VJK50|F={VCIU3Hh_%|v*F?{}JdU}l(% z<5J{TVoRmt^UAM=9p-m*_}acl4Kh7FNYM(kKe%f(%vZm$f ze;T{!d(l{+I@Ck^>~hu57}C=DbSC!-%gUndADDB`Qd9jAW!Mp#ho9v<{aCM*A9##|ALy#5 zu1;Fav>kwlqktntO=toRIb{Tf!`^-Rd*5}nP45-rMyAQ}Ewe(GELs60%XY0PJ7i`D zArD`|0fRqM;IAF{y}39$(LX!fB-Z+bQ}K&02dL??yIQ>jc#t!V?b&4b5Y)BRf2Ftc zdu9)Ifm0Mo2>N&Td>Zk*AN@Q{jaf2&dQH_>I~sMa*0v@n4NoujWH)g;!%$H#@%4{C z`n*L{rynEHsJtka$D!W&HD=PV_Fw@Y6~msnTI5$i+mDEZ%}M4TJCd!PLWp##1BBP; z_VrhE3eigWJ%C90p*gT^)Qr3Kz)3goE>MC6!hn#`ZCc!AKR7YC!j#PT^r84s*&!~A>Y%h10yEs|M3jIJ8sONsTcO#9sH4+{RH*fng#c^z z@G4sDzm;y~a?3eBa9?Trs*uK%Ld-3oEZuu3RZ)kv0}D3}1cTnw(i;4WVIo-`bkZVx z6CK;S+O1bglFVgce*^~F)_g~ZYW=UYPiH7L&g)0d4wOoY%#&Yos|W2-`W90%2kyiB z9^mQiU)T9-%7Wx8+)|fJH=|WT)z$1bRU;f*co~BVo$l`Nbu>R|+C-=`4f(@;& zE%g?2TY*+Tf`@hKoh86(1T_G zUp3HCAN=gMhC`Wx<)pKX(?7v@qc+TUQ>)T~;w(*(RoRx750PRF!6I=bLz1N8N&c0( zWEAa;j2;%lkp`>F%yK;ec#$Iy%|@$p$~9vKhN10u7qOV797D#`FhyN*FlySZ%em!W zV7J%v`+UBaaIGe_Utnv(Yk*wU@_$)>fv#m~Z{}%<3r;chbbc=Ogk_PptWg#SE{ue! z1!zw|5?&An7wow3bE|km4kb^K)n8{(^~x|(YNj}*(0Tr*x^P|TO4ZxwO4LIICm>9O z#|L4F*oEwgTQ=J9ihDJaW>QX(sfk9ojzoA65AG2b=ps_!?(uO2_{Tm{7>Hn^Ase?{ zy8C}ge0-~Vf`jYzj)z7r&lmldYKMNK{#GqZtfUq8=jCw?#d{$n z9BI40oU2qS`fm|3tg!kGos=(k1L>&7I77@ns@9IVtY|b0* zH^Uz!Rr&zI<7?qw(9*EKO}(Of*b;98Iw%$eh!BI^9-icT_V7hVwk~S#a=JTA zf{Mlv8%7h?*wz;7TEkY!&oK~%%)z~B%D9c*CURRtyXKf&SrQMI|G2f#ey)g^-~@{j z1f?3zbs)#)ik0DNJJ5Lm9t_k#!z;y__*%(`n5EVpS*Gf5qtCR|G9rT1!doP3I8s;8t=(z%l_z{Q)Dxo zh7~x|_><12Xkap6bVF&8d@bJ(M-4+mwrVz+V!sXkN> z^C>aBB4qXz(XRg7T3wQ1(_X?;>z1ff*}2%=qhG1}NY|^l)W?K2@U;?w`GR(y;|a}<{ss9g?xH{R@VvK>XTGq>IR*rQA=@+E-W}jJlU>KXM|AZCIhw6FH!N^ zXcOi3uhg;F*~P>p3fw_$kYk8X77e#{R6-Yil^=c=k+)S^t_W+N>psu(3N4 z7|Q^AEB*tW@W1b&*R1EVk}I5~;?$TW}!QSNNlMvr&8Zzge7cEEKXF z3HLFZK>a;~gOZ!`|LIw(Z>TE1wZ>&Kk2iL1L$Kb_#R~PNdnZ?sUa3+R9MGxDOp%Jg z;xs9(x&sP=B95rkJS}O_@Mugbsvi*l_DCu0I<%}W_;|D;2Z=S#R|S;)1Lh6Z^)C_$&NHjycm5k(xuTjQYOL< zd4}jRTZMIsxMlMgJ_8VrWsdvhXHYl5=sYt+)g}D-Gvfr&mwfS{j2bS5n!<%oJSre0 zVyt?eC{(qmMR`)rxi{JqF8|2|;}f8dR8d$fI?mXq#i~((+=|PFToEJa<9s+dUZDe2 zrSER%3Xe|!cUX=)C^D+BON2!y#)o3?Y^}{*>*%?;t+sX}^K-#>Fcxz+9T}08%hH0N zlJ@pF7#`a%e{!yWJy;!XGv~Gr+nP-_BC3z0bO58DH95hy)|A1hCecmG%F@`VcB?6; z_ytYc+??{JWlq*o-db77Auif0wYopD{Z+^{D!`)^w5=rXADzyKLMCVW4l1s(N^#ZR zvvYe=*D2<>LspTp)#uh0h(SLGU@zse^5zShSwDr&(D>~M7aD|jDLuMwgou|MDONA{t$+J|AyFSuKTaw?G|Pq=gga38&oG*0JXa$F@tlH!ttz{4zC3 zi&UtHjD2r8(wdGHJ3s^160u(~;1a1;8&QOcc8d@>LFR$`NebEy`ZIikXsH2%o%Vra z2qG7Q6g!b(@5elIG*}u?HIhULZ#%QC)$8$Nc;jw3OLrI_(?3byk4DzFqE>Vh$~#1# zM!8>vr;1p z`;_A*#_nuw*{+1kjRlo(z?yI2QnmX6?RlfF|9sxiBkEuB8+e3D(kjZG6u|ON!Zge| zF6$#JzT;bqzS`bN`my?Bc<3RFMUp#-_q2j>6Eu>ZOe+EhQ*@_kUF*`3jQpqJUMbzDnz$16H=MDM$DSB0IrZIdZn+UYFrhwUnJw+ zAk$*e(U?&eS;wiHsWi|`hRCdsed;r#wf^_!(-Ly79(-091~P?z>J+-)_m8IvCKqR1 zx`h#02zqFI*?qhAA`r_;XW|~9I#r4aJMAp!ImN}D!7{2A+|-r<*&QS&FE6)ZADvR% z!~QyO+fvxx98Wu7o`{8GuZ7KL6S=YmqrvYToX&!0An(u@Y9R*>G{ zf;FCfDXMZsJjPE`3m~O+JMDB1PD;{}$)sOtN}Q-?M7b%wlNT@JLhk_1}K8>M*~ zX&~CGDbr_5`$SnbHHcq`pNkbF62!dY0_w4EQLCDcv=zTWBr#6`SbVST6GU}-Q;Jz4 zg16ycX}oa{y*_)@$BaiuE(xpO6IPa5q4;ZD#Qlrsp!qey8^kTnU;6a98`2aS5U(_h zaekRu9EauHWhmC?&rJ+nRL)(EeOr{ceZM>vN(bM_o5M{aIWe=$WU) ztjJ8`!VKk@4gDu{Iozv*{w#JBER0m*(bPQ=-B3~xz>Fw}?e8v7Mc_RFVzD%ZLmt$7 zA-2DB7VzOE6Pncf1gM45(7g55unx)+yFLJcS>Xt=H=M*=LN0XLke#4D>Z!q7N;h^w zn8tO6jPn2?>%`gKpfQx%*3S!-E;(5ksgGY1GgM~k0h<)w;6&1SH>Bp`Ptu@UTD!R- z%Ycu@4cvzcQ0_7AR@|N1W)_&u^q$@DHe2L_#cMq0Ox* zykB*LR(zOI+ACf7b(%4pY{(|4W^zK_#OH!b7mpVqX>L#I?$YB5wYf6_Gd)g2@^@1} z$4rEKc0xry5kqL0IwM25y;UGO!h9F9Gqzk4$UHFlHf;@0k4S^g4#1gg#*dLKWG7Ic^tn*p$)E7MaPTkpIL_-aJ1%#e9Se@)ze*;L}an zzYD^q>$+oA=hK;(0RG;b`m7ieZ!Rxm)0d0kn~ly4bU=bX$4+@PiGTm*R@J6Dyx z0rb&cRY^FOcLB>*+(?jApPI>61c($LL%10pWE6?5!df+qZrA&pC zhC{e?aLYbS7M41wkk&__B1QuizOlP1+cjDQ-{SRI;~3+jcg`~!%%GUwCpZr#zaNxT za;Mqka@t*K-TRI2s4P>DDBEN$ zghbKblO@2>_$=}^c8gPmsLB>tx|R<1mU^z9kpkycgW+5C`gs9mw~b=H%fMeYJNXE8?+QmL z+P1>(-r~hi+X>|V!$uAIkK|VXFH)5}Wh=vu)O1F_xP%Po4Ewi~qTh?HUCO{{X8X68 z6BrYbkMGZ{~j zV3^`i=D9~T~^fBlyo3O9mX7K~XOT7%}j2)@>U*A_v{9PCWO#A5ZQUrLa zkBd*YYPkuc%&%53M)gf}z_suU0u{|1`Uy7sZ8*!>)E`wX?w9T&4D;;}asblf4^VY= z9JujBtxDCpod`2DE3x(Rxb~&>5x-C=wg3u*P`)pmFObVPHjiynOT<69QUgRUW2xM}qy8+f%r2qWq!lZXXT?5IuFT7FbD{8FYA{tYoYsf47o>)LTP~N;eEbA9>-6Tc+Gi_{JYIJ?=R|x;>zRI2xIsP|r zbwuL@QI27@Z(Z!~3{CVWz}KK%RU?=%0o&DBp++EW2`USWC73ap4aJ~V)mr4entc`zyumCCbK50?mnA^0j+l_c)>ezYS<5t9t@fLO1cbT5 z)iNS5__h7~x&L0k6`-S|*c-_8oNpI9W53b$dV5##5f`ItNqP4tz4&9+&EG*{d`3U; zPk^8<8aydx($^~XWfZVz8$U)W$qjn6FBxMjoDX^H>sV1Kp?w)KEh)vpgU7=GGEv) zp!?;pe!-dD^?iM6M3eK#v0nQnG5^cS5=czhG|&sW(Nv=J;5h(1C0(#M_-`rfmVwCg z)GV&jUE&?TdR#IDt5>TMX^r5$U#jilzE**U6s`30w2LNs{1dRED)nRo>foak;$Jh# zG>6~YkI%%4@!yv3=Q{>bXR%TQ2JdYD43+PH62yPH0I=t=m8kUtQ(xw*e^UaEY`bu*W3|7}GZo4#|S!@?q&iE4g^^Dg33ENCU9$_T>X;0NaWBfi* zv|JqbOp7-D#dX_u4h-aN3-o@+_0{l6_svXk{ahB~8EWZrETi&6y4oMrxhak47gMbB zMD+5#oDj9}JIzYmWgweIU zJipDpY3h9(KYNw0l(MpByo2jpt}lTa)S~1l?fI8mK3kaw_u4U2Ys$x4F;jm!G%7Im z)MDRhRNuk3b47Vx`gPXeZwYMN#JRrQ4TRhEHO&7N+Rh+OpJg(_q~Ws<%`^Wfufw6J zCdE`jB@=mAJprUiHMJX%+83#o7M$L}f;b(Y`5tC_ma`9ZcL&T9FOb|7$@q{}upu{s zJO!3JGao5=f2_v&xey}}-r|y3i|A+8j0lg0M9>~X5vPDyu>T`x1`)LFP)=I%uMpFK zVDixrqY$7H1>V7B1kco0ctCyFVN%&=F57B<@cQuJFqsy%3mUA6xh?)o8#M|d{`(RI z?TPr5@FQ@1eUP2`X7}t5fGHgQfJ==@hxu&ian>(zJ3)|`kGaReFkKa&+)u){Jbu{* zij)Qt*eST(tPwunU{XX^o-^QppPL&70VvH$o({sTLc#qoAQ7VP_dnh@vvP#7eazy) z-T?VoWN**{;E@>tv*iKW^ydcbm-G{!(?kg^ z!3y|pX5pR`Avc0<1cJSioAF(p$>oFG~A$~z3&gsS3hINvklb7BHYn2OZjHR%^UJaAPFwU{a&%B#!c97&~Yj;e&Cc^c=DZs~j^ z+SaHwiHg**fS^4FccNcHXe*hSzKwQf=NIa#(a4@X{)4_!=pXBx)@oT${|F3y?>Y0A zRK>JX+yx-E$5OpMj!5^b?Ow5~gn(21ANlo5h~V5{IHKfM-B-z|F=BS3ve#P$a0o!W zIzs<24@^b{8&2KK<5`NBBsJ~0Ao$}EEOEKTc~Pl@LA$y!TMxK5fc#Tz(D8#S3T-RV?RROWT+TV(LtXF1 zjBEpxL2q{5JDNv);rXrIH~SqG9mb%qhoFh_yf<)1(#D{=_GNX~OzP~-?jGCT1Cx$Z z9Msl?h~Hfi@70Fs@l9Xm`>*J~$b58QIgkH{I^WkrJdd7imi!~JU+b`u>8`~gFO;&3 z7SmABy-f&xGXusmaJAH2Vh7u@eg|7F#ID-n@QytTe(sOfM~Ap_f3!Z9k621uHe+SN z_R)oy7Q^wh9iDx`fJSERb75Et{fbDarq&$0aiC+~4&GE@e7%4B>c@H!DsmCphj*j!B ztejBhWep1TNz1)I1)4?%(DYA~8m#>YvT~5{H}Q#?N1(&Z@1HNT<~-QTB+D=~pBSf% zSM@p7e!4qLgx94~aN;!55<@cUc!Y|aql5jS)9V&t)+}+NYlqU&Wtjq^ru@_kS7dPp z!;%|ihVuLOhm0EaH4rDY;RF_LjJ4JOVti zFVP5ACLrIka>YLFUo7C?@RR}gMwR_}rec(IZk8{xalD8Zavzjjp!ef(t>f>K9+kya z1ROw85IfDRYXn+>He;;=z0@UulZ<}qQFJZ`Q}ef@KzErlH&-ZtyR9}H}F z!>I1dN)B&EU$IR7OJ~(B>kgPfGMxD;?@v0El!UFWB?#(fDxMD!>WjKhLwqIQKk4A+ zTs^X*3^Bgo6^xbs3g5O11e_;^Q(M~Z$P0IVb%Jv(Xvv;du}o>Q&pkw2QaI{^-Kj+f zu{9{-@hLlb-`P2a;OsBWY)vxPc>At7edrB{wrR&~wrqX`Xk(Y@kIa2Inw^?mOY4TC zi=P@ee}C>;=(<8Gm8LM~2zu?^6%IR&2Be9}T^Q(UDJ~@?;q8u8OU0`0J*^yd{%R`b z`PYv6qi@3NXR|=M|FtfWZ#FsF{2%wx5jZ8_+q&>xVP~qIxJ6i=nR5`V|LSL+AEQ4H z^RzL0b#wU-EuWUPeI*Ue(bkna^rX;%cM($0xd7~+2~iHr`bfk&eD>Ny$`B3ty*%1I zR<%$Anl%6o;>CZ@gQo>XR-SpWGYKBB#&1ewbTd3r|a`oP2 z%j<Df%kk`;;I6*D(oaWo*#c8DwCu(?Oa(`I@-|R zm~3kt`SJ~&rNi6jM{;gAOARGaqny#qgxmPwioWW9kNd7^e{J_R z8#n@SFO^%N~bt`0QC(aF4w@9YJN535A{c>!kPRfqRx{*7}!gk_<-ql4vF@F@YhozLKpjurKe%Sp{W?>yL-Yvx&Jy{oTStbW{nLj{Ta?(yj< zrB}>S7?m-lm=Uu!v4HGNd zLPHVOyju178=w<-w8%fJnym!94Ig*3hs!fYya!a9rOe6xyf-ecZEMYUKhN#X`7df4N4Lm%e;P=P^8sIOcIxp)dx#7A1zPndqEp>iWDSbo;voaaE$JXEj zd}gTO?Vo`0P|G5HQ_a>iMJ5ukr>?vf+YH(iNe}(1?#GwkHu^ZtL=_fvg5}tarK+M` z`k`ySlTyBOR>o0%ZNaSq`Sg=?B=94 zUcLiTxJ_M5kZuc*OUof3QO*C1P#pZy1H`wn z+kqV(nHjCsUXmq=hso|aBe|ntnRb`{0AHlKFtVh$+(nzrGRxvG{DvA{z5#Qr;aGPR zzEixnb38Cj0X}Z7TCHC00fcHlS!{AUw%)@)!bjA`}dkL%Ibfa!Y{}IhOox{O6L( zQ!@Rkpo=N=vVu1Sd^iFN@jM@RB-2CT1kb)+v7f5Ws*HIT3$xO08(m!vk!*ZO3FxBL zEQC?tM4?KTJf^wv(R!!N5Kb(sKGJI%cfCjFp|1V2cPs&m_fSE1HA-;)L+khRsUj0| zhP&?GX$22W!f=AZ!l}r;9Uva$@u7@>53f;Z8k;6(yS|Oo&i9p8(wNpD<>=%s@L>`} zXM|nMTU*zWd}{0j*c6L*FBDD0W7Xv$f1M>mX6XlDTB-C&y8qNtSFh?kr~8h35lUu= zd9SPf`K@n+NPd(5CAsd=p&@DfJaq=It`gU|E=`kvV_xD8_8N|r$*MlBhgsM_Pj4*} z2@V$IjBZ-at{P0YLBPjtF)WG0y|e&6bh=U~=Xahd_r*v)+|@#<9>@)Gs6x_wg~^P5 z$gblw=h=Q3Dx*}?E{y8(tP=eL?5B${%dDnxia6wa*AA=jhA;@W**Pv)QiYmz5=RcN>7y@0;_s%P*` zKz6mP07^Se$6pYT^+o>nh#=hGGi}#lfJ^z&-Hb}SeX#b;<(D5w7Wr@g4eeYX@_TTv9%M#YvXHlJrR0oICid!$cnw@DHKVh`MFNUVx&q1+15;0-$r^d-P8<#Y)8xM#mgTR zT`HrFd&_$GrVtCAH-Rj&ng`M>5`u!zr)4~$u)MZ!!B^!a!>vqT4}aWtytiqMdNe3i z4gr2=v*|`!{1J6b!jNIns77I6J#9XOq@@y^Kw~H7isT#Eo2pi=oKdS$hp_;<@$%60 z_YDK0>Yya__LK!(7v}0tSI=Y3p8ZgiH!+`F)7jReL`b7wiR!k=QCiP;26gVTz-$G6xOr@vW;yM1~%s zZAE6VV}$IN20{~ose1&H z?PFT0!iK`8dsoi4H?oOQ#v<3Xkx_K047iU5$%`QTcTMHbBkggy6XdHRzN_f%SGy-` zosF>GZ9RvVbXpgyZACE2`R@95VQt3Gg@lhgjI888+>)6n%c8dgt_5AG0=2NwIg%qo3Dm^WkrzOpGE9pM4u7e@-vfMcdwVv~QYIhZ4W^@O z(U}e_2h_)`aiDOJQ7V&~GgC$0BNtb*iZ)>SuFs1zQ*AT)u#BOrULf3K-LUa`kLvyw z!NyrmyQfY67On?-Eo;|TC-~0S*V$?33WuLYE|Ztsql5QYgu(3aClC{;Q_~YDAr_xAvq% zZPYXU@THovlN2!N8DspU)6ab6O?ymXmG@qFv{stngZT_=n;D`U`QL=%b3f*uTsnE>#5M?#Wh-O zj5q$g`>Pd8Y6ca>#vC}I2p`~)@URfux5(fAal30LP^?;LbHBDk-hb=(TSy$p1~|O4 zP!53#Y52Rs1W^~&5~PM|gT+x+-1%EqoK7H4Zp{Oa(eG-UI$cc~ov(*h4#|5lO4hIa zfH2hefJfNJs;$lyH=XqHDAvz9)}jgC(pQmYK0?|1+iePVu(mQ1PuT7}N{q}8078~B zY!I!b(vccAnNI-ELM2I?2bluH`sQFVK6)(%C*yjenBl=nFif|_m6EdJ>wn1j9xUud zac1iJRqR3GtduH?e8pbUF_I44hJh{&7UItvxK$c_Cy@pV1uFN(8WNoKW}_JXedRkw ztW0cdv{U=M)CPS#PhXQ1QA7m7l^A({qdu*@Z=c&)4H6K1?J61@ac{_k;OE3_(iXz< z^^)bLJVC}?h2ck3{{)dAwD7Iz_ zU&ZZf8j%08grIYR-?rH52$U!Po+$i=Dx4FSHeqH?{9{PWqPJbO!KY8}0f9Q}DZe>( zL;jX!$|a?mRi#gO1R4oHl`yV>+np8ukJp&rVBoxInaOS33?{kL5?(|8-61FMh592> zl;oJbKyP_Vq^VVWPm46tY*S-{sCLCp?GBEc%G|K?-yQ+hq3W)_?}W<5`9itFYDz5} zf|lA_9iQowCAHuSPn2vcZUJ0;@VpxrFlXEXU$Bz{lz1)`v({(34jen49zRX+vBM|T zI7V#8QATfZUUPovt9Ow3Z=F8d;=2CYTOa@aA2M1};b2KFG12K1cP<3qZPKcU(^tQL z3jWp0OilD6L{7`p{*o?@Cec9O2K(~2>Ck`;)lQ_oPZFoBmEeqSUep|>TbyeodlD55 z=zR#~ye5ynx}(mD$ScSScbBgAn~8Hg%Z9Bt3itRbG}5~rXW+qw)$ysY_TIsAz@{-x z8w1Z0rOhO*z52DHaC|s{NZS6LjDz~Zs$agb?>pmX>!`_~uiy|C4@IJIB3_HgvJRdl zLx13sN_bGcaxOKExeN|%r6{)UXCT$gA6Te|X@b_3F9FX!h9ER`;jyYEL<5~JdN{#v z73P4yBe{1a@%ww2$f*aJPf^`O5v2h3f23Boz1YV4`~qZJZeg2nyOzNkJ=Ykb+baGu zXJb~CtC95^?7kiB)B>jv=ZhY%&vJ37yq)2F1?7d>DV;vBYayXc2dts@0LNe&JYA0h zweR8Qu{Fq9$Od>_5Iyo3?~$mbBMje;eaMjZLE69Me zVM!V8fz7pKtNAu$Pmi6O1gqtm8FuL6tZ#X>3}-iR!yXCtaL0YjOX{ywZXzqEy zLPkwb!Fj9WUiTPMvOz39Mor}WR<>!XE(#NztO8DxU3;~AcDXoudH>>}lTPfD`ndmn zVa`hEw_X@uC^~f|L&9>)_k-^N37M*#Twr zw?Qc8Q+7y!Y61zPTV(%m(jRhOoLG8xYqoPAJZf45>hBNe_@5B&{?O7XN^j~%L&^qw zLTk!=dmf%&I6l{Hhc8-8w!?Nc?Yet4%SBw_g2dh~bFZq-*6v@ll+WP`KpaJFU-!3U z)FUV)ugN3X(}}IEL#-Ydpr5iH+4q1YU-m@5iw;r^gm-5Q#{Nxkh9#wA#kifw3!q2g zIwB0>&&4YD3~=Z<$q~?0ek3F*j9m9!c-D{LzdFNFVqu_RnyJo4vvWDbjQt1*X9jx) z8U^?F1dL80&kSw~Wb=o$Nh*tNhw!J|1nFQ{zd>a_WMup;09Ntu0k7hz?QN9RFXE#ZlE{mB(|PM5 zj-$l?`VLMzZUrr}>*lg$GC063-UAV(0iJ~jTvS32s12>#Y1Tj#>K*KCgw?`{!SSl! z=17r&uzJR>3{I4&MzT`l`tWdl90Mq+e@U8J08s7;fn^69%_sSth$?{%6fELvbjQPA z$X+|y5E~+PBV~IjZY5WH?v?XD7;vnx@>_AG@W<0MX@Hprl984Sik4riB5%f?Ojs1% z`|rL%$I=QdT=8>O*!6OUyk~M_^SlP^^TawU1oEvP>zuc#BPSaUo)QqScIlgLVN zxX#EEDS1G|4Vb*;3&h)e53!=@kzsNX9G;rxG`%U)t;|Jnn1%32`kZzCsxp9<6oq=V09 zxjy#Qr2oSaD7=T-#H*@t?1gZILv0r3#4{I`djTwoFZ@ak^CtMt> z+4hw#sbZ?`^c08hiL!uTeE$&7ecb`ldRvZ=?8qk*$81KeVKs{ zULz70KxxI`{Cyb!tfJ=X21MwcE*^t!#OxGak3)m(wnN^dceZ!+e1O*y*X2wh#4NB6 zOq)VZ4pmMLonS|gXBTXIF_Ro4f^v~VfF$-AdzTZ_xznkPE7AhlJZ5KdG;$jV?u9t5 zaYGi3$7EN85=>an#OZV-#)d2$rznYqIQ-~jB-{5l#px(BAJLivv}5QUR6`qr!qIM* zB}naijPz4i!m@({zfK_+TC5290bGQOPuOiyOdw}{IssGfYa>f%q;MeRNPgl;w@`TS z&4?rNzeESMp6=8DwM*QYu8)$uqbM8{)9N^amZaocj$g| zh-KXzR@csW|C0XCBUG;RVZG7$B?bbc=P6C_%c=@wB&JJm=yQ|q+$Kkhu>wT&iH^9! z6h+5UHJWsEhK^p=we{o6$NA8!pV&eSErTmY$~1<<(YXJs8Q-RXLxnIAVrzWYp!&kgq<_keFwhi#L}g>le`@pLyEGS+loQ98Vva|g zNF2q5`eP3?4OFFtFZbvm9vlUvqinROf{vsM_AXEJhJINzIyooGt4Nlb^| zqa(XMcXC$L#opQTtfJgLaEzh?2X|{zal#V9-2Vkgn3h&UOO;=6 z$qMJ{J1i>qO`s(ihT2yQPgw%++YE~<57$lGs>aAe^FW&*gHP7efsh$^Gk-kPb6u7u z=#MJ0ErYSn+6?#hk< z0F_5NqWy>_|4l_@@Bx{ptHAx#?cojNmKd9lR$tF0B$2#|aI>?eKN z{yQ6~7b(M1xKc6N+Gg;Ks22eYKtzn@?eFB7yGwz$`FH(2+amt7rRY78U6drfpfhw| zgd%X8EHuflW_a>X>IRD;_acL{j%S_SF6iDV(M4IQ5ApVOjlH2wS_|rxb%~R;eg67*0f)iZ??rP9(+FSFHcUd9Bjx;MS9#_UqoKS2PtW6 zc9%lZ7M{XDVaa)tcG1@SaUn&o#JS@aZ}7+eG4z~$PksL5eaxu8=!UmiJVa>bRjXNW z4N~AB+2mbPj^Z24SH{UOIz|Yr!;=U&~VtFp(%^ zgZ}MXh7jaci*K7S*z2&18i&{Uq^A|s#m^C7a~a!cI+H~jpV>?#(oL#*3s^Wfr@DB& zeWPexVHf%xlR@M5VC~09%B&(YL{o!98h#_r#V-gzWj>eXH6O+P6Q*HxD+M8f=(}j2 z&S_6X6rt#a$5x(~#zh?$=eHefNawEQIZ4|V|ZOrhupT0{UgSp+0Spb6(r)h85 zIK@skc!OW7IJ7Fh6g>c3OViiD?&iaSmu4#z&ehHh_twFa&>?1GyoWva< z1emD1?8()^JB*Z-QH@9gx`P2L*EE`99<~s0S|_uZrmc(+E>2#n*F8(uH9osJB-Afy z#MFeLq5`D6q;lJ@2G`!Vp>6WKn4Rw|O}sfT$CJJ%dSrNf7ee@%jG^Kw7_CQ~WP}Lz zuW7_AQ^n#S%}Vc4edtQ2Vi-zn-+hnDi|WhEIWGKQZ9HDFh$em)O4)X}<&6ELZ7tz+ zj2QKa$irIb($r8k4A{@6)F)4s#>^GFq|wp)nHW+I+H1~CDtJ5G?b$%=@tzZz9c-GGWJ7QA8D=J|YTe)R`5cRv zu{>O0Uw5;5ypPu)nqNm(>L6Tj&2FYJh2uefYtuQ=w0ynu8E?Yi~L zKpm>yDcm+__zeT(oOL8@Fhc&4V+7=j6_6$(I?=^DAs34=A{IB3$95?@*xxes7XajY z$avI)=$h>bZI&$*i(DvoTx=|*L&CiQ->7BgpMJMe66dQoh2a8IvuwHM+!;_`wBGJTlYVuoWO z=yg~c)PS2_wz&TF(SXs@XYi}B7HgI5+EICNjFW3=o1{FiIkwayb5de8pJCl09Nl|W zTCK{BkPXSEu7pIjD%I%7nGJK49AR4bHi3!+GI-+&`1Zk`fj_I8VTf0RUh*4Tst`rq zzixvtE*jtHc7t!;usr~CyuH;BAgg%aLqGP9xomvU^e!|#uJV-rR+ysfpLr)_iXrhhzGyuJ=%&@@?k&!mlo%V4i7@=m1e@l-=6DmO1;QU z&|+!=W4!>MWrroGQ@VAITYiL8oM<4eV=fqBAGhCZxna$4N$E>_qOLZn^jHPW;Yefo z(x)3JQ0xpvH6k@#%f%ah&dY(bwrfQxAx8Y{@9}QQ@^M&+6+rn*$tP@==DN?nUgw(7 zNH?c8>|62G5JL_y#Ur~6+fN*R143Sg5H-eybQ+7w2?C5n6S)$zjX57w&#Ng--`<_e zDp}8M4}^}K)$3O_Yl63q53G%+LS62dJ%B7Cm}m<))Sr*6J5Iu*1hbE<#V?Z$N=bEJ z>ND?x4MKP?3hn069Tj$L*Wi?F?hR@b;vZPU14UoMuF7WluwQj<(z#x%KOMtV38&l` zOwtU1aV3djFIe!WhE0U!N*mTav4uw+I?!r*rcl|u?!fH46$zDGn9+S|4RNjn2XD2$ z5k{?R&2LV!Ye;=gOefp2v@Ext+|W7-Qv@5z($CQiNF1Ef!AYpK8Y-O^p3p+v>2?Qu zSfE%d;e0-v_u*tly_-ga&njh;|8<_$9YVMP&ljGB9I8l+*8vVh!?s4g5BJ6Qs2~A^85%!~u~}xZScVq4prB!R1ZHcX!@U`dV=Mp3{q=`vGkTE+Avu zXpL1GItw25c>}X7xp=0&9t|P>VLN-EEH5%OC7X{!kF?hZWc)tAAM;WCNNiGAlM?1Y z)7Slc6o*E2dPm%xrp2qan~DZWvLl+rY+5b>UrUSIU3|-Rd5fb=)K!`lc$u{@O`zXe z1TF^YUBOK-Pcm3X=IS(f_f&Uuv37GFSE3B+Llp};wyvS?UJF9Hw~9|hOI6G5T|03f z1CZ|wgnDnVk$B1>7u9kjau?u#B}-GwN;QDu1y=Y)<++$bkAq*+Q{cLSvt)E2LlKbi zkMuHcfn?3TS2!b`T!0$`cCOmQxqPEHB?JhutzhIi=`#)#IE)yj*VRq3i1K)diS&sw z52%}f(}6*5Z%QoEv0hhpvQ6EK9TG;baK+8_#&+%YG-2Q7Q5XJ|?y&Ves{^DEg2dCZ z0P@iAi^IFlie>h4`pUuYObYXT;#d2@=qT@3##YHPR5^p@b&jT8Qq7S%M8uYTC|78{ zX>0XIM1nyaOf4Ud-{prad|#k!^}d&}W-N^##{Q@=(FqRQ!R~}*ih!O5v97HtH`oO> zYA=o3VTZgfsJ7|YJ%33JFGW$Aj^D4BIz3gWS7?SPZ{hl=go4r64c@WZ0y(~keG@jX zE-wg+q zXjsp#aBk4sZ3I-Ty@*T7q=+p!C%tRhL;@zOt;LUK89eElQ7enJzsqjbPzP#{ra54a zNQjD^*G&_LDO7X6YE>*Yv)Uo=#u3B703#nO22D%vo9yFwIuaBYA%+hS4Bl`CXZZv6 zdoj9$-`|Xi=`2G9%G3okjj3ZwGls9{T{xdfkRF%;NatS(T~iGhEnB)ev<})s?kIMq zr6mGLDV82l_hoBNtPw^Xkw(VUVkwTdgAp<`1n8r>k~5nUJM8&=jZQ`e$%=>)af8Or z$2yisZXePH*thaa)45s&EuA89XqJ>0@)XR^7x}9zREc%W4Eo2Mi);~F_}>YCpdQIB z_-XE)M?)^u=RtG<*h{z^K^1H@7&pN9$%LPpZ1Nk>>HSDUE|}=^lSE?{YtIbhEtW2K zN>qsT2vI8Dlon|;NyKH*-7e@N!{T?8#Lr+))CYP(AH8N`L8dyIojJ}vk9lA8MpYQ& z8uweL>kMwZTuJjfI1o0&xaXWyO*X^#DE=I<(jXLQZflbLJA>&7`*xauba2kX~J5`Nk6OAA})C*&^=ir?UX@ zVV<&lRvDV0Jyn?6r9WUw#30$kN;FgNhVm5f+rv&ub^8Ys6JQ$#F5Ya#Oe>mDz-_4A zEZi;JQHtFJ;dry9A-igMAw=5u6zd+Kmy$DzppTMwVwa*qS|8UgPnHexMMnCXWEIN7 zM=<30H27nit-QM9SoSaxc7c0qIi%ZOZ`$==*YG}j5V~bH7{Lc#v3}NX3|}ZqP7iOQ z0R}+Z{WONk@#3TXWF_w{cXXiI5#!_%;@WN&gBO8vp0K(9r@o;4vQR|3r65*c_khp|m{U5DRi?Ygc&ZU1&|sIJS!jC~5y;Id*l5+; zZ17$U{iV*lXQ4xGsWXfEEi?Js6EZE4p#}i0k&?~Q3KlB)T@P)X+GSX*`2?IJODVIx zX2y0aFk1qI56zH=(YjZKuq>=bvDnp*gtmcv`@qEMW7*7!x0XxoQo=h;(kC_a`cKbX;^r)En6nWJ z&J*}h98k~a_prLWPz`7SN&c@qh^&h>Zmc$)T0-!iq{3Vz(7Ct|jaJK}^m1s^?h8E_Ar8>K$1BlE*#HM?SDU%N^RK%r{k3UAtOk>8jEa{b9)|Jib2lHkqwT65_ z0?`t$$P4&flj(YC`6FTrw|=`Jh@s0kP6TJ4UNqa)aojfT9My*R-(RK*!kNz$HzWIve&dB9sI=^|(sr(t+;_Z|7 z<@J_*YQO(jBu=DQxQ_8RkL{p@9m$WiaqV6IB6Cw3-q!r- z!v1hEwV|4vsap5zrIvBeA_lxrRX;pqC1dk{irFwErk3uliV@X1dAsKb9|PFeLwLh1 zf-{nTCTCu3gwYf^C)-aTVSSg-{-=B?$GR;g-SKk4E+8OaX`7_>pUhXDD6J!{hY%z| z_)f0q*W56Zb(Xa>m_eZ2BkTEfzh7(DAncT~Cy|W9>1HMN?S=ejtIJV&7P*7}&u4^$ z5VsMeS`~j-7JAIU^l*|2>-zx@9V`e+DJ4e~VjZ}u?<_Be8#fT}KB{!Rlx(;sgfN-c z*nuKHf|^7z^VfM6OwJ+a@cj&eEChTyY3}Fp1sw&&$cp~qXvJ@ASkHSd;5yNo!FcqO3jyJIYW+|LBE-l{V=I8<73Hr$ynbllc6G8C-5~W z6+r=f(n|3)jv0+no$*T(qC)cNkepYy+#3+QLh`M$>XJw%QRqi}WdT8CLWC<<<~C_& z&-V#Eo~tcwHjz_>dnwmR%wC#oF(8c`Tl3?6*jN}`k$!pJY11I!r~g@})6#eYEU0Pr zA2uo^6KRz{=u^3xYbPu>PkCGy$%q1~dkH3q*@dA6vnB(s zOBf@IvHXmJL{*lfb0y3Y0%ihp;_;;JVB|Ul6~q|@1uTAxe1b75pJf$Z5q&w>o}Wv$ z=*`365x>)Les6~&LB=qe!dKKBODAt0Rc3kflEU+U=QubB4?ptV+-&4tZ_KU;6Az)K zG+iamfr37cl?cWa(Mekf+64WdNIR_*ZUme-Q+eE}%@X(lY`FiCVejKOTck7B@l248 zC?qCwo4|_pt@7s9fe^e{#nVr^8a;hGUiweG=@3D6E|*tzKxjl1Z!y0|wk>aDsIj_3 z(G?n2(`2I}0}M(dl^>V9qGHNKrOI#sM~VLUrF6rkJyy}ipC!w23S1t;lF?PVL$awu zm&*G=U%$tshM1x9GiI-Voh`cnhkkH?_agJ1k2IZ#wWORF=pkK|Fe~(CJdG+=tJg!P z?SsPcEPPD&GDN5mns3_Ss>{fVesSg?!rl)&^o2hhJv~g@9qalY-ME$elmzR@eZZ<3lZLS$5HFZ+t-;2(@A*-}&{oE_GyiZl)@rKYVZ zhV4dhYKEI$E8_V5CNzSqJ5lV7aI>A7nnliw4iM3K$)EkyV~ZiBz*2!JF?bp|AAPXy zeDaH|#b*ckk-q5xn6lpj8->S&N0+_pN3M9{ohUNehl!cNWdYh(6@LjU4PMPGX={h^ zaleY;s?sX(AS#1>K=bnc^Wp#9GVO!J_H{a+WZ?O?n#_?NSYfjaPv|1_wyyvI!Wdt* zt89y&`B5q5HriqBsD2}vj}h7iD*h#@O!ttBQ4DfyPuyUi60=|))(G{eFt`PrjloL2 zc)UtVOiehzf1T2h@QN$7de0ivz@UK!`5P7RS>*dBt6uIZ#rz ze2Srzp;=F3aS((~MYv`pD*IT(6>wUPAO83j z`zD_2jX?lbfnSrq4q~bu;l~)aN8}c`r_m#FlfTS72OMG}1~mVYgd#A-zdD2c+K(aV z83WCw&tt*)K_5qoALHvs7?MqIk196wQ@YM0hpB1CA7lpNr4l5GIkfNk(gG<)|AEW> zc!aJ^%7t}AVL5%TcY-qIvFsRFl6C9-fLaef23JWnS;RYS;2aa%LExpF7fcL&0o%Ji zO0krO<=}@tB-W@__mMh^g-s<-O*K|g0dIy!%w`I_uIG5|J7)UlYer9w^%b>*JYfwI zuU6J1;&u!AYAn#8JQ&MzI)E{!_$f9-mVTviNr6j!7AL3X`*-Sp=h-}%C zE}tu+OM;J%#Ym*RoHm~ZjbB?jQ^RMpU3*Z7eaD>cvQSB}xbWBU7t2u| zv2!8H42zb3FIi|glcEmNN3H+I%h51KqC=vdr@R-J*nGl%m-Pw+HxjKlp^XUPl}#+0 zUj!lCitAan@*kDIIR^0oY{3YhL+Nsa`}(J%wxWW6+=SFRdMu+mVm`MOht_=087P?h z#a7jT#(y{ixmYKQ*@DK3^pO>BZuNG$wqD$lm#db0Em~(23(|>rMQF31UFP-Lop~SDSZ%FA!8r>TX&np&s>a?g z8!l7PKFKJDzXcHL0;I7Y^Ug+W7@A49e2BeLGj4CT<*(N9@uhW%IAUohK(RmRt|Y#9 z92_CprdW_@@XwEt^zg92$IaLZ{Kwuw!@B{Z&2oV&#j|yzTwduP5vtsm7-zd{MqNUk ze{gL&C*`7-^C{mer9PVNKj?S|d|pLRcjvlC=%TrA&B@C;7R96lXU#m*{N&BzySs~2lBbB8WeMihD<>k zPYFxZTVO_h;tVE!`g%QoMzR(W!h35c66leN zFWwja&Y}>467sq(i9Ot3u6r(Mdt@taX^s^ZpojT*+k$WfyZK?7YV0*tI(FI<-Wfx}o5*!Lf zj%}Nv`X3f*ag*Lo|J5~g(G6K)85Hf(&A7Z=oJ(uycjQ9Br~*?a=MpPrM0!-0U*68c z7m(#Oy$J;6+jx$!d3>9{Sx_`^kC~JU1%jx;Q5QUzIcdadeQi?Y%Vy6=G&XS9Y-$_=oom>UrsLp|zNe*p z@Pk^1gv2LNs;F{$S*rVXYFh(VvX~>!)F&KoI^0>rB6uu@m}Dl$D_vVq8^(?vBB>8> z@Mrtojs9iKmS;a^{sSPru!k+O;^Ayx(nptOB@V-aJ>XTPQGm*bbFLXrlYy4Byut^ z@Dk@jheE~_-&vC6YBhfbhKzHbA(Mz-Y-9(O(j%6CY3Na&y)9oTlst?crMriR?B%!P ztO+s?rmu+4K+HP>l8#>rzy$6RtC(*>Wh99X4L-S36&}q>-C252KL&t9d<>iGhHu$_ zvw2jKFBN9grbBhjLyWRan{rb#sTyIOQ8dM=b*{fP9Mny!ca@z=0R`u}ngk@hz_ z!3#yQ z2Ce^nHEL7ns@!1PR~}o5e!H(Zm+X8u=KEC#t^m2xD|Ql~D5t55%t|rOHIaFEH|48U zN{{rfSNS%D8U&`AA~DE#aj@OND1)nhqkxKgRS}vm8&^H2t_wxM6ju%@y<(fTyNxG> zgR2BXf|`Rm>_-*cyl}y=?(Gnq?N}Htv^5}-tNQG9wDKYN7rSC~O-LKXVc!eVYG!!( zPW9IK7h*2iF%m)t4t!7ccxx_zWzZ4Sv#3VlEJX;g3)Ep|GIH)UpGQ)8R$Tt&%@z0Q z-ZwvUdk>Xic~61w8sHS|K=FBEcZtik8J|3Y6?VJn0x692m4_5DeuqAWgc2g0I5zpP zSpWGPr|n4ygQS%IGkB(A|Be5}z_HShHg~;w>fs%8iO=BzsrH_xy7MnrY#ue=P)j43 zgs+vV8Ig&dCzzjDVB5)VXZ8GNMaVSqXWuzh_teGY%J*YT#{HH?E@7AH=l{ZSqki5D zY0MnR1lt9|8I&Hbo8RXvi327*kvjn~)zG6xlNgyJm# zl{_)(e9ne|XQik4kt?k5v~L4C24(iI!;1x?Ss_2XFvxSi*Gw1ghDho`8J4@8Ap^?f z8ers@n8|Qoz+Zih=#LuD=J0u?)BR=CB4~zYXGUrOKs-$&kC1F&h}iNRC0-Db~2uj1Rf@P`1e@yQE=C$=xPUd^>P#n<}-7n^$N-p#;e zJ?2ZFJw2-0FD%W285EfO2j=y!i=(yH5E`{5`-iK*HIz+FD(&sk&i<=3?LTOZ3I*`w zH&n;*lYNx0?gO&$_O3^e@s>pTV+&|ypgZK^Pk&wzJ_!BYEcu%NWUu+&LJYC4F#5pE zy87Z@BK?xpr=ESO2bp!0DE@V*C7CXB}2q1~Lt1qE@=ch(W0^Stx=uJ7{(@#caGmvhi!uV5vho`Vo)E z@{L*C1T~#A%%#!Q04AV&$oHI3lksDNRXoB3N$z;|V!WBrX;=Rv4KR=;k{Tc>QGV)Z zzr^`wtWOc#$Z9Ng={Z987%WuU@nL&+d!*QbUG?;Q5G~1x$cv;U#dGrZwxliaf}(A3 zp*-TV`JBXHp(g_vtIoNF15s$yIBjB&R3 zuf1M#=Z|v^Lk{{p4<9Q5CV<~J`>qO{6BomN96*-1TNzRuw8hg zO#0M2J%PC3BQvFu$(-H%Yi(^hz13@HeeG2*?is^Dq8qOK&*^8{oR_OmJLqX^J_&c; zn1Mb*XCdE>SRr>QI$uW6#u>MAv|_0I*nxl;M?6BAB1y|S{aG+M*Hq;VS zTah2or6kSVccpvm@r`FBKE#GD{v0U=x+d0f+`uoqY)ff zSwGEJm`$4)FUYW*{T#eYR-Hq9*mPU!9`qA2IJ2^!L?}{<5kMHgJ`6qOB3D2^?AB%qn`?-$qMh zcim29S8!o_w%_c=EhWJC=nIb=_OcxZbASYw{!Cmo;uJuorL=YwFgJT6kxLIBoV};tG+ya;kRe1vBoyFTyE{%CTbkZ0^HFu%sOOgcDSl|p| z{RBx&Hd`Y2VF2mxa25vobsCPSIjekScir~URheDL48)q&`7ayB{rSdBgAGsD;M7Kr zSa7T9mv0uS2pWGHe4I*>CV{Npvz_mx>z2R2_!-eo%Eh5}vBUSb;D`zu{%ydF(#sCD zuiWc*7s?s%-Cv?CX2-E_>Fv9v@7d&goa$a*5UBJ($1Y%=`_NYT{hJx>XVM5k`sto4t4;EH ztQocO@cc*Wm0k9WH=kM+Kik$-WOyeLI~g?PqlnUh(}c26vS9sAl3q>no?TjBI|d82 zbBVh#XI0O$6IbZ}BY6L{PwgPc@!#8IInEFF0P|%Eul|JsQFZ9P3p-*&6}Uc9Y%S;X zQ8+Q^e)c^71?D3)78;LcJRZg%E%9$ymR#1&B52*$}%Hu?aJK>4)z}T8m@rQ6j77 z+k$pTr?PmQG7N4mxuQRPz;W8_{PIb_ke<6TZApfJ|JlpWlfV2qrDq0ro(`AHHySc{ zp$G;R^sX!9s}UL9V+})@mfwV&w`M4+iROR$1e8w&zxlAI$>T~sw^lgnAi=kJQ@uVp zn$Qyghra|R4bvg!*x6ORxY7bNt`Ee-%sn#R?4-*+y>x=PxD^SNzg6odWazVETgn3k zsbpLPL#dyE93By+8L`iiEJP+ZH)sdjHp@Rl>5OSKFDB3rd)INqvR}I)SOh653BxTS?wJnr$c1+ zSO~1fL$;3e8jA_r8F#}O%uhtbTZ(tXvS3!(z>c~fup22z0~W3)p#9@5g4A;tz<;e9 z`)2b^AMTsSgv6vm*$X=a1VrC~o!1$n^1MW*KlC$R+GHel$o=p6>yysxX``pdV{3T6 z87q~q&tRM!3*Ujs$5F#+sp6@f26!VhW)PqsaP-CKet7#OO@K%yrN5abSLNl9o5j_O zXB~f^f4+L-vvcslCa7JEBlorh=$8Ss8gCmF5_SI-QG(Z0Fn8R04?>}0b0wp`evi9Z ziL@a)ZrHm7#Sjx>K4i6&Jw0DD)K^zb)5V-odL^$MchZd5;D^+WVaNL_K}k6G4SqJ6 z)liKHf5VC1z->yoKgippR1hg0RVIK8LV4SHTDN6z-jGce%C?Z0?)t$wnmF%jShR@U z>&Qu}NYF9BSqMjT0awuE^O{>*qIbZ5z{2yrFI-H)OYKxR0hYxJuV_Cm6#f-OUyJ2< zXGuKc6dU&gUQpu3Nd}!TN_@M*yF7($xcq^L3m5UMKcwb}USsal5pW~#p{}-ARY1sy z-(N&|9kq(Ai6_R6D+DRihZUz}3P@g16QVyrRDZ$Ya0w@x$j`cj%pp}N+`3Vp9h9j? z>psl0`h(4r*>E>GDja!|^F3k}bBqG}>-=X^?!zE5G~5UCvYW+5v(+Una~dCcr{a}_ zt2T6eP7G@-?Ak^yE66%HA69MAxzSuD!*9#CtZxrK5K4_tHz_qjD}~AyfQJcH=ncM) z9#iox-V32X`qOz(*IjPwXr2DyT5cBpAO2(ObFYX`I-q$e9dm%nR-s{l&-)-kMu1lN zvc7^D4M!@}Hh1pPh-m4AF7MZURa30N$)qQme5|Qvg4b$?c#N0cEeIIuiRS4p*+>qH zRiu%xR-qemgRHAF{NoPl4gpBUgFc3l;9_=Ai(5)AlMLz}Zn3`U)iBRgXnqB$zPjFM z_|Jh_nhCiSn$b3%L84-cVtr+tiotmiN0O5nzgelv2`rV~&x|!aH)2HGDjVIQwB<`p z`iIj$>I#%?lVm0em9quvo?{v@P0Lik8)qLCi1&*rw8WwG>h8>al^N z6!4K3d1r8cCUtJC#S4f!dwTLWM`O$HZ`MXf?<$g}VFVD@U(as#`6u>Th0Z;BM3An% z;DdiaM>s(5Zp2VkLp91=BAN+skp^4X^1ulOv&67l>(>6p8B89Ub&QP=rp5QBKXJ1c5R zG9iNA+3CsHRK}S)t$p@WJKud%5bK6+)8EwA@iFdzRM|?!nI{??l|8SGMjR{%CT=&y zi}c4xFdestGf;8Blh48aB@N40{i>gDLuFt51tdFEQs3i)z-Nz5WXFfrBUczpo2sut z^cz`U^&e)G*aNye;gBrFRnzRrGzQxd70INka5NhI#85a%lL(R}&FPs?Bnbjzf0lc+ z6ijnYxZQT_naKYvkj5*=%E*_8+zHAv*AAP(V-i}&AiK24Nd8Oe(ma@9>7N z(bXggOnk?6&P*zIsq3~sBfxuy0>?0GDf^hBxM~;>3f5iuNT*UutNXo4XVYd4mN#VX z$va}Qg7|_ixJkfc**lS9Fker#bIovDJRIBT8DlGUMsktznR*$}7V^jJ9=1?z#iK6} z78L%EEk57lJ_qlq*|OBdak&7b%3GTAUgTzwlkKkScl;_A!3IqIUV57B&y{GIC}l7_ z0EQUs#p3^$3I_>GKW26l;?ao5E<@kywzYC~%qvM_EFW-WRYoosG_LVVqqFnP8&$gy zv17)W4(+s}8z$Fwg-qFN|1=+*$*u2%kZ(JY%g8u3r06uH;~rXJ{oquL z&(^erLAS@nJ`~mbyT@O)q1wJk2_|DRoM?b9UM zUvZCB(wQA3?Bq&|0y!IN)?N^0w)R95hn-<5jVj%KHV-!KTg_!kF&jYO@kzgDG)hx|MsEK4MDUCI9o z7QFdUN!)UAH45NvkiW*k$1xGE!Qu`WF*|`veOGatbauExY|I(q86+*3kF6X>;+vQjuyN22NqOh{)ikqN zr_LRq(4Fcibww!?+jU6SR=c^pUp-qC|NQ0`N?P&RJT=ccy0R?tm}tIS+4#m)_;J)= zQ?cGUOlGn@?$9GcNqD)d2B;I>e>-q%=<#s-Q>HQqMe=^r#?g3GX#!~ei)#8lSL=lFKX>Nl_I>Wcy~xQw z)RB(TUn*TyRgF%wpxT~_@{SJk5j;rrLjwz)iZb4`73@vzwe3{7iaa0xdI$^@y>!m| z1gL1;$6|*EKX0ebTI*4_!5IYiH+Oz@CYI1J{F5KcW%dP`7F)fSX7>&Q6C|^Vv)E>; zF^Mh1krxIJ+-%T*=leVWyuZod@!JiT29QDAJxMR~z_4vie-pQMb>I`DHn^}lnV zL_z35&d#x>UbyHN->h;!oHr-D4zGAaLcobi$nu4j&b#@MoC6UHm|=QW5~w zn|NTj;2Ui7P6ekuoiCW$jmwPW01?fr`YYqKv4P%&X!*f^S#N$jQ_ZQ69iPW^NWoYU z|6h>}U$r72CZ!q`lT?by9(7RURW#Ti5IX)G^D}K!kyId@EvZ~f{!8n3A-?Dif`W=2 zL}A5Dkt!#abxvlPGDXKQvS}Ii%E-rO{dlk%xy)bB{^Y}*Lo&l_9vU8iLyr}V%wtSB ztcC&Kv6V;Of8~uU3|XsRdlg3m-4{5yYmD_`;bTLmM7$Ygq}kzIUrTa9)6~&hDVYmp{{{rA8Zv3&aL}H zb)V2s5OC#zB>DSmo`1r*Klrjw-oki{z%^xv*S{;!K8(LuKmXk)v<`M6H3W_2V-GVM zvrlycHKSdu{Z>LLRelCfM{ECSZi;$X*-Z)2<{(Ev~;xmym9+hLZ zV>u=6Ht8QjsLcI}y!f64um#Z7mfhR2cYAgC{CI1yZIzY(yth}NnpOzO5u9T*ZCcxHCqx0>?xKXS1iqtXl(fFz;b#=Y2wkp{VswEjZ*)x3YK*4l zkFzFiBYl)xDhJe~5#T^jI5JebDi~(pDa>?Xm)p^EPKL(@KdYNHIrb};P2(TFFSVaH zNTmy00t_W46akk2FP~bf-BQ8~a0(1#GNq~b8SsBW7=Q{`pi1AT2S7a7zCTqE;21;1 z5r94-`~oZf0!RdbCu8uQVL-+!iSr8ek@~rMioTNC&W~j~rsDWrFef3+i;d8}NS)ITwY3Z>>_kFbh>O<)+qhlCqs5 zFCrSwS~{TlgJ0H^7`1!}8#{9J2lEOJ%w%2a=mUKwUqhF#`$czFwXNc%woVnfmm&Wv7gCfgsBrjxOUbt7hga76cd)b(C86pN1mVfm9lM-%Xg#7~t zYo#ly1N{-rk@x+mGeA6Ioxpx!dOhC_QcqO>btR8a(}mp!iY{qgdn#_JuPlsneyTiZ zaQdb@S#hApc7{B>%AX7G=i0pTBgGFjE;aC1XFPUQ-ml_~*?LxeOR+Akh*dH@3gZsg zF|vK*uaKo*PY!!uO2=3|f8V+`sB4w75!Ti)1Z#(F%>GyKCo6W#tf-A zU7by&EV9MN%`z3K_SllLVoGSHMVX&hKvrv-zX+=(2^Idb^vfGhupP(X6bt<~=WFJm zX)SdGZ;F!p%pqdM4wrHux#k02osotL^rpIYf$Y3fbO&vor}5}--kW=)*&_v_DXLJh zx1O8(d4+@$hVla$rad~@3rV@x-u|z_OxWzFH+w%BL*2_p zTTRw-ayjZHe&0DXQA@JnQpUYnHXsQB;5|$xRX|7tcWKCPU5!R}kC$6Vt|Iq`YAmjn z6=l2EHf(~n$=^E}-#~wPgp(UT;vv3@C`&JJByHU);z>KlCKgl5>l zLxsqS#3tHD=TDLXlVEby&JY4VGH3KnNy4D~u%gCxw-LH2X4g6yi`n#^h-**fB)%ZO zN)`)|(?(Hv`!$;2c_(r=B}%j`kPN&X(g^=i~YE0v#yNWz`K^~KiM#mp0v zxk0QbsRvsuBt3%M!(WLtmv2E17U>Y|?kOQ#~oVMxl*_tUX5pZJgHO zuf1qW&7&UAt_tc4p;MMZkuv?6z4mQ>{2j^duAW{HQi`O0)-vC3qV0R@^DaDcKH@_@ zn0I7ckx0NSMbK{d+qIz|R!z7l)t^GOiyksPKPzR1Y-7u{m+voiYjm7m8Xqi4%=gT- zdGDG#ZZ?6XffvVKwe)dv`~wV?_SKWn5Zx2l6jjeFSyg#f4sry{3xmV+o`io)2X$7= zL?OQ6BZHypV)pIW;SHnIBi^@5&_%Ej@fs&@okWPF=Ejxcw^hjB)+5i~*fkuibNW?m zc{L?V0=>4FC;IX6>_HgKvzR^h{}9 zdQ!L9@t1QJiAC5nBU0hEk?xr^5lhG;B+POVvtR?);QfQWgGN|$l7ULuY z^g&N|6`pTP>X2&rz#ZyFok{8-jWrnH%gV-*dbVCsP^oqY%d)K9wW#L-J59!<8yly= zq3Ze5d~Ijfh2|c17UJEQg^cbd@8=HLZ&1d1?hYet7P+Zw`_|4W>=twti1S~sjS%tY zlJdHH)rxXuV`tt;-_?)Kbluiv&G@w6J__z+q0^nhE{~*Wr}ysX7b=`{BS@PViePiO=SMic_cZ?I3634-P@>t z>HSEM4eJvpxsflW*p}Q=aSVD*Ry~?3p|zvxJ7m(fK4lnAl^@WDAFF26Bk7B2Na;SE zw7r3)<51Xmf`&mG+|EwNM@si%SXj#bd2OqoL(CmcPEiU&YkpGfcXUz0>e+8W0WG8W zBKrf!Yq4u%r~BbfeX$ekA>0tU#vJ}5%*8aUg{>m&x^>VONtkt1cAV~@8!3bba+m;- zOP5DVJzcCLSH%x%^*N`!S*pB|r1bWc(xSWJ7@2^^yb63xzOnXF)Q=o7uga6$K)0dd zl|Pv1&+|Py542ayng>_{l(sx_(DDB}Ek7Hyb13Q9p(HyDnu;~)} z)8S7t`OqM6M)U-b#cv%$>@1nF_A3tdF7NR+K~Ul}d}z1s5rfji@;C46Al7k>svvwe zu({*R4@fc0xS#IxWfA(S%2&s^moK)r#pE6sa}-@SR&oP{Akm$m2DD0abcLNHu2al)_fq|yGzzg&iu7bzWI;+q1d zAn?kP(gzWt(5Uzzsdc?z71r!pP!TC2Q;kD8S{uD`c0VVwuM1pqII3gXRUJ^9?J_$1 zgE2TP3Wh@yCz)wR&N3Z)Lhy^$2nhKHYr;ppQ10U_fX6+{+fsC#NzfUntP^4pKovj%n%``$|gIwd+$- z9rGE6^4dhF7Kk1BHLe;K`t1MP5Wc`2iLds8uylnlnR9SdLiWNWCX zVcJnz%k5LCyU@rnbyzrpF?#;?gHPnbecy!U#U*VgEN`8{z%og{**(4P%9q{6%`j%% zS_fNL(tE(sOS8$Yv#^jA)fA|24DAQ3UiP>LM~RU0-h7WR_d!)ELjE%4_H1L=7Y%vG z*IEsFTZZ{HqH)+@nQmjL-b(0cdUGy(b=^5q43C&M8XR^#PgcdU?rJD#sn+@q0Z)dlJC^r> zXU1fUZ!7}-GbSVj$TsQF5`Ut%`xm2^cr3|nr6KUh-0uPnF$=#kdrRtEf=oeHvdctFTtdI(#fzWP4TK4*n%) zNg4Gh=M*TM2cO^dv*G8Cr!dZO=qAMu#n#T|I&5yLgj++!k^G)f%mNr(gokoKNGcz! zvslM-)+1B{)SOGzII_1m8oBlG7jaXdu7$XBJdTV{49b`FJ`?yBiGC$(gmI1+G!SHC zq)h&ysy>~JM~tFxs~vOMAPwky9xqyE(7)Yd6G`#04*$LIqTPS(00GxBxwXSj(h<^p zbw&3h-B->e?CMY@R=iV`PPQK$8tj*#Z;yDu`fcf_>@D-FJ;^O}Xa2^Rp`DuWMPSen zOae%L8daiE?TAin@SitQDd6-R)PG9(kt+Bdihv#?3#j4|clwPAZLOgO zAa(WHH?}3Pwd#^2f;+lwKTj6)%|ZZ9qU|Gmx{z_FzYa-J`r2NRFk+}=>)I&(GYnFO zNnFA2|N6b($>nj^$6+w-w|swKo%5CZETTaKKhLv?b5CK}yECS%u)>F`h-0FKc~Flm zVDcYezE*$Jj#B&%Dyc{A!wbAp74*7aly<6Gu;==X4RMS;S;kSuL5zETLpT8}aN?In zv_Tw!J?ZdYMHZvB1NjymgrBIN(v49{2K_t?amzTQ+nAI`f-wV!UJ$MNzI}y3tf^vZ zc}{P!=&NG^9E~`@S1Orr3H;tKX#Z>t!{rPt5CiFKhhm@a;Y8gyBlv@hdF69IFki)XJrtmarpC*M=pF$A!Gb{ zmC9^@Z)GYRWK=bLaD*+V5MtK~pEpR@qkM|Yh5ci4Ke?-sa8*n|<_7wf-INjNwsJKD z9I97%?4Lt*6ZP2cIab+Mp~&6>J;L(uChbrcZvKlWfszD~FxnCRMYLC&J^7A>nqwMc zlr{F_0e*+BvF#rYslk|xAkaA{Y=LL|^{@NPHKeS6Pc{hr_8d>7<9&TGRVQc!8iV9S z2Zd#>cWW&xsA>;tXvTPjcqkbvKRZY=50q;eWDd8Afea3S2`XXpE7#UNKU&mqZv01MY#KEPJ zgE8bjAHF$PrPO%8l0Cl$lfh0D+<^55Y!Y{y&}yPz{}rw*ss4d3CDBpV#q6W!HY34S zdpQKJP(S2uJ>OGNi}LD8!Dl?ZcV$LVVP4m`l!kf@MX{}Yf&Xf-?~%**VE+zFb)A}T zlYJ*^90UKyA-KQ(6pB60;qqGRdn@Vi7-=-MZ_m?25Wefcv;nO>pCpY4|8^O**+%!^ z0Qjx!T?>&B!}N*#k`kyBiBo-$!DCxiOk3 zgA{&d8MntI?uPExkKI@p%Z`39Kv_8yI(V~yn(quBPUA2AvsZ;!CNQZC#b z2f^X5u*)!SBV%PhM6OCr6KY80(%lU+A~(zC?*RIj6-X81e3OzI!Y8`k>dfjSPAV^& ziZUepdft>Q%h@X~k{LVBz|0wq-RhGv9dd6Th$F8zW6~vzz`1&@6xRvIR^_WDd8fMX zUb=~U=reP@k#$G#gipB!J-nW5N#5|e5^y`YX~LqpH0p3?oPgFA;4Y~pbJvwGDKAt0 zS72+*%q@2*Y878sWExD^sd(^+=?}+{7^hbXa>g$eT|EPO6cBkPz&0kNcC^l6qRc2y zS7M*_^uN@*YRkAq9G3gV$N(Y!G5Im(Z)u@Ki~KOdHFo~nJs!|LIlE9b={KvWuDoY` z7~&#_{1!-OWOyxX0CVxR-Vy#>$odt*Hl7^aV{CV?S8ky89ZWt3!h*SNFkEk+l7EU^ zSlUQ!pfVxjDA|Aw>Ff?mrWh1x4)jhTX0or64`|X60a=KG`2CTsMNq8}KF6Ufc4jsc zLGBw9^abd^5Y>uYugk2iHuKPVDH|rrzT+O&G zgUy-tZ|l@ulrCHy3z7aa(*ZF3_?=_n*4STQryFEY9`j4=ir7z;qw|3JA||0#>kp#| zj$B2L8~&Z@-929Rm*yZ}w+Ui%%=*-9UazW#Q)jQ8$u`oy#-piTRhAOB0jct0aBTtL zZI*RxKx&NLhp{6jE8cbquFTO>FqZ~Gb?oV>J4Xm3N6v6SpzR^0pvhjFw45dor~(1;-?L>sG#;mR5XkAkgukP^ z@ixa{*B}MS)0lt--j%0IwROZBjp7o9D;HH#;&)Fs?;Tmf>+LzZ=}o*4CIa*7Eq^~E zgBSnPA;wQGtGagIag`iIUOh$%Xr0?W_9k)2nWX?pUI`LFXz7Y~y_W_~b%O>C791q! zyi?XHjfer!>6PIvfDwwKM#%pdK;k{4hI;)ROyg36 z=N&G}JVtuc>`N@ljqqP_-6f%WSrZ#8z0l_mYw-?TKU{6|nlC;kp_SKm2wn=`MH zs0weATlpWRe?^li;MU7Vo>LN5dS}T(#Z7cfY46YvQaVH{_o17jMC}^C7XGe~qdP;= z1&WA&zlIDy+j-hsdyeM^s4_rWD8_?$Le0%=M7eY z?Mmma^w6jJ0(jFhECt%R_d4O#_{j2igQ7>g$}^SIN>p#R8aHo84$-Wq9_3=5qsc^D z2csk=mXs#Rn~^5XWAud#DQwE4p`q17l4amdf5e13dzzueFq=zZf%8IcArc&7AW z@si;!aE)TL`_@Rm&+k`%@~loD(}LCloHpQ!9XYsNd)bISQqz zs<(Y@rxo%$EStKx?DO!*mEY=k)x#qpuK5t({dkJ>l6e4>YZ}3xA6)fr#erx3aZqua6F^h6~a&ME&DlGJ!nNKTc!(28x^n)cbh@D8 zpIFcfq>Gpm5*BaCPK8Avu^RuRZDkiQCQ)i|7CZc#Sbtm*Ac3dI{ql7O)F1o3<`c2F zi7Yu8?0m05BaF~7uL)!!H?>*i!_vM}NGo7%O?9ve%uA;**0z)uvNxE|H}vh8DcB-?8u?nix6^2(pWC563FA5;Fo4#cijL-+(+2A=3`&Ze zT%#RL^GoA2xW2uhqy|Sw*a&}acwY=sQ=FBzL=ketIr&lFtY=#{!5YRopY6MfghOHc zdjNaoSP2ERhx~VMIS2Vaz2#)MSxYpyX1vVG*`3BlsJPqYf;Bw4Ppg0#ER8}1mKH&3 zjaAait7g)#XBlR6?$+Kg_kP0+LZGUr4}B(%0c*ImXCQ${VSVKcG%`)5zL_f!-M@_y z*=Sez(<8pg0q7eLKlT8*hR1-&y=BK;hvHcT4=wQut-GdF2z1BJ3gxF5#cBCz6s4t7 z-`xDB^++{uTezAs{;lUQ9DJ^Yt(4i0`Hes|%dyznMb#Oo+_S#u*9LYw#$zX%9j zq~ghknPT!oJzZp|?Hc2}GBdun4}D|3+$!gsJG?O#H4JMXvmiQP9WDb}v0Q%4XBdU=7|FG&br- ztBtQM$gBEx2h28Cy}trMJ5ncmr9tmGCszb*c0A5#UC81S8YGK`zpIya|vm z3xi**VlU}Gd|P^CZ5;lfUk)`{$#WBtlamr;)c8WC`^DwDT+St+Qpg4vWy7ZOeB{e& zQ2d^Die&D-r-MpcFx{w-&8zV7os>h79xF4)i;qwfI&6Y*m_$rMmCDJJ5V9IZX- z!kt(3<;tI?r^v2I_ad&l-<$OV{@K;F#x=um^UJ)!^+SXzQp=af7ub#dCP7cQm?f-V zJ9eVGrK&QIjepX28e+R29s&QAr zBn2sMw9p8m#}ZF8N9|9JYwd~YAT`(i)*Q}%`RVL5p)E1PP>#y;1r}o_x9_TwrK0nB z;4;a0**ZpdFcmUsD_T@)le>+I@Oh9V_TcR&AI*{>Hide*H{D;Ix|Sw8AGZ8%w!D$a z`l&`W{Sb=lxQnI>f&BSf&eggo5Ldf*pz9P57BBd8UCiUj_OMqUi4v`V8aEP zCcP%4^JNFM!KL1SFdbw{R}ymWA8l$=hiCWpxR)Y_(5;5b)sw?=@nzH|N`K(5FP>MM zWn2+5B0iw?PRL+Tg{_VAE@o?>f=FyWlrQ9C@;*|3`Kw`Pbttwu@y)q~oh%Lp$rT_L zxhV_>fr+H5!c?fZYNv=W{QMCRP$w0jtNqWqH86ezXms);gSQ;%Z!?ULNk{iU()qPSXhAmpJC+|yYZg{N_Jdr=4sUiHzw0W{K>Quc2|k>m z5)i@MKI#uH!B*#gd+_Hwu$0D}>zZelAia;h@vgKnlT7+#E&PR~MdX`s2)aGHPr3=H zimZb5+-m!%D5#5l#zT}fA`SC(jyFQ*xw%puo|RPE*dgNq)JTGjia{96%I`M<4b$Lx zL-fawG^h%!6ZXr@=Au1fv5Ddz9%V%*bJtgFRG!^NNY6ug(A{2DF=HRsDEMrDK<2dk zI}9-*Lo{3qR`LA3;Am>B5^Nj;8d*i8MWX5%hW+{tu^*%}VZ~YslLwW8U&2~jyQ+H5 zM|b0WPhkl%*iAsU-~QXAw zAo%=p?o|5KyLo$NyYVg1Wk!qq@-h5lT836xMuv`$L;rTV{|GqkxfA>rXiVmUj4>BN z=`@q%Q`5R;!7+5cIS9WA(z2CsM=+WvXd^4r|9t=HX>2(A)hc5r2s{!a?QcKk07Pa* zqKm{}V3<_r7Ne%?1VdIU9xeR%vc{#pCw28YVqBdDN}($y0Z@Hr(CDt zjx`E;F*3^1k_F=KcKg?~>FcbFFYK_p4_1)vu$l3w3|En}NKtuiy|AA??Joa-hS(Nl zX^p?45xp0%ES7m3QMDxEiehx#vzWK6jPLZTMpG?vZda6es7@wCAw(fi@pBFMIF08E zW&@YTK5>yM=Lw=zwkp-gT;|7ryTt3&&#THI+ZK8+n<+kSh^yrJaLZ6Elj-@@bq9k% z01xpeFWLv!wbc*LC0%zO2j2}V?RO)6FJn9?a^JeTmy{hF935LPJJ%x2tcVbC02+~` zMfeRB6+lB_x36oQ(EbD9=~ycL3E8S5Po;8^?iCtPC!`c2l(6du?J;%L-d_9#`6JIn zZ|!4fJJ?x*#dyDQAWHR~Q&woYfShT-GOnTCq5$f(cQI1|h`$Mpr0VPK8C`(dJduiW zs30|V?WK3qBaujQIK#W8AnPFLYBZiaAm%V&Jje#3L)zUQggui)=9^jd7%gH>po zps7R7g}ki22lG?pB8?P|b!M!;y=|VMQh*vYxK_#{_;RJB{8@$T+X7tPVV7y|`egKK z24vNp3*EU_ z-i7W8VO=0fEsp3sTUL8G!BO9!)~?Es$Xtpo5KuxYM*kt{S?J{b*65Fs0?vC=!JHnL zr_`6Q&!}v_T!UnB$p)bEl0_>XTDU;NSiEg4y47(Jt5^f*Hr4hsHX6ZL?xdfATA;ax z=Tt<7DY`-fa6KlTlVP6^cI(jN!?5xoIGny$^v|%E`Z=`w;NSCi>WTKaBhBiubPl)` zijRUzM7YIo{dW!-(|&F&z1QU%ZLhK&c(HInRr-9SkAnTBh{31RfyKqWO*7`Hhl`03 z1N*l+9T&!jL-#(J->PSxQkT}_EYAYR56lrio!^xJT^=G|7E4IJVJ6kr{t6YK{m6Z_ z{!SDD#J+Oz%*OXDOpmD4mvw#^p8g!*lm73@NxFNxO&!2u^{QF2no1nAbY^yukbwi!fp0BK5dEPxm;1iEA zwY|gZ0{Zg=rCMVe+Ddgi_~fEmwdJ)uGlpJ#>AI*6bSA~1uqXV|xl%U?Yy9^8<+8TdQzRfD~>{EDT@DRX1P!$8kETgeLz8%EAAiiU-o+}(7(f zm6y@HFSFL%Ni^|+knE9Ng%svyV=LH*T{v!XDD zoaeA__2|l_q-qE7Ud^{_Uy9g1JRmDX0Yj%e>#m=_va$-KIfzWjfbtb|2VS)Qlf}?! zWmZ5N8oa|h=nAcb0#<*enHnhl=|6Z>3LRZHa$IUB!9~5gI2ymN9c6 zN8YjhnX>1D+@bQU;|GKNb*lI2BTtGUn^GGEB?Q@v2O~$b&t&P#i#C;_PlgUKn5|+Kwe4zTx2595SrDvCbl$RT z|7;pTkhBzY1lQ4wA-%LFG@m9<=Zp+Yt;E8N;!avrxWW%TsNIciS+A>H#U2a2Un^%gJ{a z!fw&>j{OC=u2nz({8_7-7*WtA%C^@J7KC4#dh;B-C9rSE!GOx;VUG%X0N|e?M>VHo z(0CKHGA#+IHZpx9hgPlN)1Q0hSe_uJ)S}uj6Y@S0w{&63?d|>`__bI=^$bZw=d(mA z!6Zio>de;zKi=82Omz@)f;J4MdKVl>DK|AcDbQf;Hxz5dVd7O%jM;&o9yvVe=i}s@ zcpsDuxyinF$cbdFqa$wFGXWek5X_-9sO1^fN>-?Xvn7Lrppl(#6!Rc>;Vm{}Z-~7< z@gp$fJ3S@e=h=`v;V>pl^z1F)>6E|{6IofYAH>mh1v(8g7MWk)9L$-%qaB@jnWP3! zq(`>S%S+chRqr%C@EAzTszE&1yj zgKnc<2|F}rTD8Y2Nm?8w`nVM%6~bGu(l9!~+Lb4KLAIYYzg-3v7{4G>++1yT$gbOV zi#Ch2H!o82^m0Y7OL!h^e92MH;aX<%Y8%#D)|R!0am z7hv$`fm=qv)Gr73*JVOIK9Xlzi>pRTf03E+?6)W=TWb!g32UOQMS)(;9Q`rbs}ELa zwlDkN?E=X<5%0Qk1Xv)hC(}|^tI&bGzEF4Gwusscf0bn^syQ~D^DIA+c=qb(i@EcL9yqziezd z!6Ln&Goq|@T=)>e5rnJyzDD>(2Mf2u$Ij$sQ+7Mz; zof83L_7iM}NZJ$4nTFWa4VlP4Ml|Qxq_BKF%cX&@K6mzTb>srVOw;h@JC;wg`AIo2 zPq|K)dd?i->w9a=v{ir|{eLwGh@+0mYLcZUu|Wx?Y85q@*%AnzYgh{6I+r;W0cCQZ zH=uHTxAw*5e|EEsKV$%p>LGQ6n0D?T;C#7QrPN`>Y^R#wkVhL}V`)Wd!!c*@Q8(5X z3N{C5K4209QxGYMeNW8N}JZ}*c*R_u-LjGwtA4@*%tRVA-YuHk72zbcl+Q-PL!1{N>|zhvHTFTLFd3J6f{$(Js9Lb7&Q*!IXTp zEB}qC&xgN^qpvX8%QaXh2td&p)$-n2um*GtVlSlLt5g1h`cvP5z^9>WfbQ_@#S^Fw z_u%bxW6mx%Fga`E5thx~%l44E|Efy#)+_nX+>6`?kwea}{m#eSWV5-1siH;LeTI#k zEICies+PYzeb8tzD_A&1?{7aJ>61K)aBYH?@7b19f-k?kG-^LWh|+%Rl0I&??R`0O zFmmwaV5D!kPrtS~owa|jIz}Yo5!MZZEat-_yuTv(R>T^h*pls)dJ{ETqdahpCsWC9 zX5pL)`hMCuwe{%v>vlIxli{%PjY|20z~jaHN-wV{epwdI_sS33FN$RCc&^|tz{^!W z>>nyubW+%w^FCeS!-_bVrn?AaJegbtuRl3@n62Fpn^>#rX0JGi9jdmByJy+-bx5GGLIcV6;%mEaDc;uWo&{BJWN=OCvqLc;kE&d5e!6)C zSGQ7WCmw`Z^Xm3!Dw%s{rK|5gAL%LV=y;>#TA+@-ImW}EgRKHkvQktjQY&aYQ$eXT z!8$)cNGLEq2r72{PM%8&^oXr48~hWPyt7IlvHnV0107=C-PRl#=<*)5{F(ZlfDZ9J zpbrU_3);bZ^qn0PBj3;^(M$73onL(z(lp^XrMp6%3!Y8^?z9pEKtP6NhGWE%8oa!j zSafh~{M*>za%|o3(#4fpPto)>R3ZcU$f6mm4+BHY28*$c=EK}qWorVumMuT);zb9e zCZC^S7KeRU)>r-0x0T$#s`5Nq^t(e2nnv7|PXr*J&upp=(dirVW)BT7=Erv^Y;o+L z*@d`rC>omN;|d$Yz<=+8jDqs&X@a(Uv$tgJ+Um3_-tyG{Zv<&@R% zO_qT3w~;t{5lJU<=>9L9Phh?)gsu&4j2NcDFHo#WVLg7t+|e^p>;Yl#E=3aigY#p^!^YOKN$i{%vazZz;0)$Mce;&PX=gDS8DsjP43;lOsN70+e*OQ3RL zg5s0D_dlLHL{Svlgu6qsut4Vs-`eKNlfh0N*028}vVILB4s9n;hSEP;K8(`|&FP_; z@ZUUh`x}_w0s^?dW}?{T6Fh{;_q9nHQ2O|G3e`?_9{(;3Du?P4t0aPg@BfKSg{AHK zO%MV|L+8XbjqHv&|0MMyCrpUWih=ZyGY35N7VxZ&Fmt!ASG{G&Fk%C6i8)A zTa?i~`Q#YvHNCF=3?*-2zyKN@iJX6jvWlvMt06vfe|*h|zg{5uRDqg}S6Wm?f!z>y zWly)EXhRD@C}5RS+S;oZxB$<-qJvL(SkJ!wr0S0X(3ITYDdwXdVb7pd5zvqx=-(wT zaVeZ~hllM+oHgoNqd<~kT=oRA1Azayfi~W^!4kM1PnL})dbX!sJ^zzkJ$HV<48>z0 z+$5HHf4}`tfli3}?rOxHz3IvDV-y@y;IoI9;tYa~f2>R;VQ0w6RRw!5|O69C| zhFzJZVC}D_;r68hqTpT;f!+bC$D-6_FO*iVnH#i;#1Aed%)Y;>z1^wubvJ~2V@X>N z{1qUwD_I8KdRCtH;Q%0jBtV?1Y#Y{*Pq|%B%DfBdyy?q~mW`wjW+1|e?mKWAwwDLG z4C=#M@dwIF3ttzrrF-UyvyBo@jj)sfAfvcf?E82ao}6NwfA;0NM@b|u=JyfB8PrIZ zq25=6ywJlv{N|ntOh!#Y4sAz>luD?Jh3i4mKbC)iLBO@OCVWq9g`|#P76A(m0D?2XSMvNg&mD|u^ z8vMlQ>$!h|uOi93nR=P>r2NHv3!mrDw|8Y)J6~nmU(90Rg3><=* z2ojfP&%Zq1YJz6|CQc!1hNGE%@m~*3Eq&qYzk9d_H+J=SlFgen<2&tEEOtuOsk9rq zE?W-3>o(5nds9KF5Th&(8yGae{v&U}bGdHi&!^qS@eKqL$x-;F9_*KJDUV%}ZVuL! z=z&x5KC@2+ov2NPL2*)(|EGy+*C#bLC!b`peryRUkHg~~CKQjuKy$pX5+6r@HC2FD z@dtn5I7D8RON6FIV%giQua#Sez3=kny1KQ?aEjaDZ@O=>FchWF;Y=76wbBsBV^V)3 zr6;a$8d-U2@02)=l0@1B>5o(-ZfK4%Fg7Jx*|DLF3BIFC`_WgAl)G6pvfyJy2knM= zwth4fX>7y>?xq!$uDy3AE(0h3ZBgNPiL}<8t{h@lHY-vVdGwst%tT-`+fZ1>4Eb8q zM8BDuD1PUn9#iwyizv_81~lslq!`;@w{}q6kd1}Exin(;!!^~m44gUC1R2o!Vdg9T z&ngh>0UVQ(&a=c};-NkR-^iPf9T0mf9`^42^TIkfP}6X+L#3L3ep?_A0avw5-|KH~XIG;r>7Ie?=0G?uMl4-w_zgG4>xTbzP{k{pOZ#9Z-f$n0K>l!U(ALgv zFGw@+;;v;K+AkG3!4<)GeR>woICyP&`uOQ>nUt-mn|&J^1q|I!;9IjWUU;QrF%T;) zZ)L|mVlAXqAN&!9&>tZ#ymI7f_^7YFk==4+5iIx&^X0c<7qwq1_Stk1h8R}KIK;fY ztgKhWdq+HFp=UN=jt#fdwF48Qc>h9x1Y&te z(QH-7>ta+9yu?T#B{HakoKxj`VUJb#prsJRkR4*2*it> zXpYAkY>}hBj=V|rQ+fu!L;X-%AknR3uAP*vY0fHQlsAE&?kFcMsgXm zo=rdpzjmEkt<`m-NN0cAXQX!?ohtv9sFhpC%a45)oofC~{fxz-U^u}=R$Gg> zyCH(YO}!=v5UfO6%m0bHM3Z=~{&ejGFd*=7FQn*+UqK4eH?{wBDK~C0l;huC-4;Ze z;&F456?c;(z=xlQ%z2O$1y!X+S9=E`ifYyje{#>BpQIV+RTBLAct1z9 z<8qAdp*}v_@v+m%ce<74U9i>haRa`vRN2q0;#h$kTAw7(r&ll`M6G}1 za-39!Y2^Kz;8YQ@wIH9sW z@yb~dUfv$Xz9f34e$26hVWw5jf$Owmke^?~ha?fFVM_t{017MWHJI_SVGSShZ_-WY z&JxyHKyN53&{R#W``4+?0&gnQ;+O(DFoRO9-!(wwsc?v+bTPrYM}L^dNH*Rx!jKrr^dkD+a| zyjEJ;CQSJ`J>lT>yft%e55XiG`ou<8ldOwc%|11LqERY(WIvcVsIxKmegj`IG5Wvr zXt)2+qZ^%aNFs_hK-S~aCT4z-*e{Ak#|}jJTG~M6?E00P{r9^vVVaqw1gVp#6J|b_ z5M9{QvCfS%W!xz8q`7^JslV<>$HiAB?w5}{3Fo83Q~Ds$?>S}lFXd4jJdfbIllkiK zNy~CiFIvEZQc^zs=tFSay9;@EC#J>^ojwe=8OCbcZwFd(9s`>&<^6p9iKdyfw`G~} z8qhD`-})fhf+hAy>KA>#SI&-PkL~F1?1!z~)uPlnf_I^%`=z;d(RbKk1)gZ=nVZm4 zx*2@RRi3YZQFwnmPWtOx5DR?P#caeiy%+PfSD|(Fh<)0)%|&b(`Yq-D7blc|^c!9* z{Efo7vahM7!XijKC=)M1T%uVbqV4q-zQTu7AY< z*&KpWKCo7Sd?LAn2}6J?$`Xug4bTk-g`Scj>;bXPR{?9eQ~o%F_i%SAmmP_6Pu*5B zU+VW2M0B=cOeP0}*=NjkL%EP#;g}&@HaK-k%6@vvCv8kP~pqBs{Q>L;Zv=h))O zDEshaef09(c?(mRv2{BCw6?e$I?saFi;nk=1K@bIwF1g)o>j(B=KgO$xqeB%FU7}M zId7&X`x4|A&4)Ojp^2>T>-xm6Mle3eO~=>V;?*P($cj6iQejQHf^w9ieA>%9I3MTR zOMIINs{aFs3bmW_*!+T;21{0fKG~GS3{OnsOkG-&>Yzg9&s5&E(4fLSxO=8X>C+q+SugscMcAdcMcDIt?p)S^bbQ-Yzm~vmA%bEWPhj5-+>Qk= zhFw#dLr)r6?Jr8$jrr_Y;kTE{sU;U%TEM<{&{IZlszr%qn2(}Dz<_Y}R?STJykYJ^ z6tVdpu{r!xE(h8CUZYfGqHpH7pZ&4Bp0Pa_7PmdF^6rl;Y&#~}0Yvq%DD}*4Gun#= z_p*@rp&<6#B@KF>tm7~mj}D?|!pIvS1hT)OpdGb^KV$+a!8ylbL$lt0mIgHAbJpog z>^6+qH-CWLvV}dWul%_nV;`U(9GB(Y0vWR^3{7DAz!G)vSX5xEESa?t$w zQ%GDSelqb2@a|v&=WJieJ`S>3eGCmBpDVqmjpzD}0WRK*OQ+HomLKbsFjpji`359| zN~3fujm@G&eUS{sA9c^1{<$tRF{0|&5)*VjJwJKYwi-u<^}T`9rAcxbs%lNUucf;F z9PodKy^H!$tIZ@nXm*Hw>36l)MV3G5<_WFn8wCr!Rgj1bLPAs53?)uo z(AIN4k4CflDs=DBx^d}|f#81=>+`}LX%Apjstn7bm8&`_VJEc1BMu~?B13c$K9SQ@ zWed<6L~gD#f)MT_{DW^r!}*8*)z?-Chd=Q=lJ8M| z-=|(uP-CzbL;Hp1|ATgSed9PD3j>4x={6ky_@H^$kC{6YT?^WFYT@?=NtW8JQ-Rig zbY0(x>%Slaa{X&)pa1pk8+TzKy@P}WqIfbI$nf z5b26Hp{OMd5qm@$Z)dKy*!7psVg^Z2@2CW02oKpS{6vt2f>po$f7bo#DCKD_TEtQd zb=0ynET!x*ff6&}I=I{kja+t^HEN5=vt7I`Y!H`G}P+s8ucFd{TV+d1oo!FI^ z77JrOQ|UEAdx&KT-`A4!i`Sa*JxQylq?fIm^#Y;ZrH&Vm8VdQzZBVmt(1>nSM_oU^rOm0Ldr&v2t>! zc+aP_-PN3bYVOjRDhjeMqIk-I4%P!amlhusM{=`^U(3nEf2;Q$6W>f5@m zB>x5RI8>VY_buUQ(!!{`zK&@Fyo0p<-$CaO(B@nk8rH1EdR!5_r4Ewlao;sxFl5^u zRu@zwn^0ek5vauOy*T=^9}yIKKM5KUbMr75yPww$dhr2d%6Y=jrex!catB6+ImEGWmJgX~v%{_{HG ze;o@5cH7_?j|r1&sVEq~`aY)~_5{N9OH?^86Ljv`xwEYGY1g-6jhBsNgix7lTwR@$ zJL1^zk-Svsz;vnC(OURD!#;HAesg}Hvg$|erVrBY`x+tBE1~pdiA-ga`IQv?PSg@I zI$>9{ESf0jPR_^IhAE1P-on+XZ8Gb4%A4zFLDac5u2o6@v_F0dQl6U`98Fs_)put?wUMF*Sv9R>;B2+)STAoc_R zM)Sk$OdMcj9!4dy(9wcfELnWF;JiRl8~`QIbgwt+7!HSn^P4&Ah!Hf;zKJZ(?S3}O z1;f)pC-L_5j;i(N-A7x_;T?DOLfHi2ce=^wkW;i-oESIFldxMPZ|0bjD)mu^dZm!^ zqQm#P%~-AO;a-n8b(k+lCm!DC$Grqz?|`Bw~Oo&H}3J_e?soqLBA_P4^;#bNi(Tm&@n3Fo_K`W8ZbR32!zEs zmOMZF+~DX9y4jP1xaN?=0AHsO{mU^#Ldbd19*I^q7VgxucsL3Ydo%V0SWT=m zQh3r04v-b99?{sZs+jwI_pj;e$lczk52fSJ07PhY=!^ocTq1M`E~Wp}WNB?@U#a*L z4)M^rEMLdaMf!7>F}`E;l6s_3BInOj+obh-voYa@I4l!t4$F_@1v?l5d}R^04IL2> zi%T>oir-^BjSFIoV06g2a7#U}`)tPPunzSp4k}dt@Q+Tef9!o^+QEgXctd8W_hs!M zdesg96*(;j(>NIZF90g+Jt|sO8!$iQ1T3A-rPGK4;&*S-du%Ag33h&QUIKKFFa**Ki!|Liy#+t*;e_;Q1FBW!SofK7+kG0-0t;6T zgM<|$eDKHF7si=JqYz;cdFB5nYFHV{DY_Pyz^%qO7L!(ERY+A-ZpEs19~-KVW=?N1 zA#uR`LXL4H+=cd1`*Z7p1&hR+l@cob*XlnW!L{H?lGY#jW^anlnzmoAlD$55G8IJL z5^$qf6i}P(~%?wR?=~_z5ZI}a%RKFyuDJXV_|8G3C`QLdcsbEMT);oC2T;B)N$7T2C zCrlT--Skr*-wVo4mm)ToDbICfLXW>8cMT_$uHW@7(q+-V-DBR_a3-=%!jt3YHdOnV zN5feMmnF=3W6609frq5ysVislB_6tDk878tp!m*2J`|!6*Tin&t zt=TAu3h7i5?*Mz98g}jwp{4)EU@O{IM*d$E@Q<9&T?={>iul5qUxHq%s)_^6sHWTx zO7Z&X+I8NifPWdGP%BVcfNV^apW-GNsg~&+enRgNm5tNZKpT`ojc%ZO)Zm+d!{wNvnZV{BVHQ!;GyMHhGY#R((3AMP5rq@xO+N7m^s#8GIs&- zM87ttGgOAFoZq9MU+-UexVF4}v$we3Oec)3c}rrAcPurLN1NGYtf`+XS`4J|FMk0u ztxvG*CI3|N1pP2bzy!v^+)rjWiV|P#!@w@M&TW!%E^uD&XPpsYQiBf9huSk>#h#H2 zcq(Wfd$tAz9@#5xRoMf2?@n-rb2<)0a@A&uua*w@cF(9Sy2=MP*qawSTUF<(~NMK0f#rfIwpd+8lBg@3CBG!53LiXhOY7dMAT0k?927VMkkw+3%nl5P&vR2 zi&BsUv6B!kdAT4lj3IOaPq-chDR~d&dJF8A5lGg~2}^i@2%R*!?T1imo$@-KNxG(? zg!WjPaYO^E#Kb1Flc@D6o0?EQB%tWyl&uspq`ZaPfjuj`vZQXiYscEW(y!*8s%ddE zZsS5}1S$!M)z*-Xe48oy5z~n{raM1BUeazKCIE?8nr7)|A#I&AiA1A|{U?W=zXc$0 zputWbwjpGGAo3;hq-b?_ePt|Z)u@wwP%VRy^3!)mOumte4-GQw9-z>0AK{A)Jf`bY z-za;Or&uK>fvrv2+V<%h&$_QJ{mD7u&G#GVF_r86+x@MS@GzHzZP^H+W7Q}=Qmz1Q zih0`^aevox1(iuazDp4xkaG>!chzOF?_;ho?d&(tHBAsF`T#)>uPYUaffNr%v;G%f z_43KeZA1!UxAG}&5FP@=0b#&4~36(hbk-@+8UV7 zu`TMp!=90#NjGeQR32-FGrhm-l}OhHH?kT6Pi8C3`VU5p;FZ3Eb_$~%3?Ie;T+L5^ z6w<@t??EOi#k5pv7#u#!Jl2nRZBuk=#|dPJfvn%MnT5{A3CD>MGT$t{yA=kG*R?0K zY>n_PXb^q_d4BSoj$DkbBzdhmc_=p2oTICS7)fu_^DFyPvUB6BQxSafbl(pD&8&#m z?boij&ATmf@`y;!(hn+7IXvFA8KyYzlCX_vPb-!CpKDK@6 z{t2NZ3kih@1_lNPh7Q>vM_2SK!3*>&0rU`p5rEm5889<5*%}y`+FIEeFd3NGyV|)p zS(!OAI5=6W{eTC9WOISgVFx{~9*AJzP>h6vH}10a_qZ zgS^$AkR1W`e+a zR~a2rZVxIj`d@>>7&o3HCg&md))y~+x>tW>NyF`r+8^!OT)*P~!LWM>9dw+)h70^Z z4T?^pVU|FG0Bi4s03-hYGsu92!P(N`f1C6lv;JQu*>u3_PHjDL>C4E@GGJYZ&us41 z8(b_|S8MPFqa%p#6)lAmb7FPFiRd@=c5se#bxO;NtG1>)osUYVSedQs8Oa zd#Xc`h{zT#`)i5V-qeGU7-j1DYx@ru!P~=-qX`pR`$N~+XDHxn>Q%QgZvNhXjb)AF z%O|OACWm9sa1gNY5U>Y`br~UlD@jF_M*k{@MAVLY98Y9=);U@RmS9+CAYIpXTi231 z?{0G_-g-@}!QiHNy(T&e5!kHzifV*Tp}M;VJejH!z^z^0tocuaZEcLnBt}L$GVSJg?)(mxKI%1~h5%~jrse3qamXS4f@V%-!K^mWLmShbxMu1{bo zTkaaJ$l6<=AupEXy0`rjY%9rHj&!qHG<{ZD>=^sA5Vwwf_B!ViR)wc6Ke>O$pDVHf zD-KbMD6Fo7Qb0fT#{K=7*zYswD+9&dH(oY5u_<0V$4izOFAhxSSd+WM$od}#hoU@g zPURlbjpO`Q7``>B{3o4s%+V7sl|bKCduGL5NOmuBH^y^^=X**3{^avH#vKmjw zeE<(BpS#D1{02^Rtl!Jf*jOFaE+6AoH%?pD_Doz#sL#s{Clg{abX+>u1Az?v$cZPM>aqxHJS@T<+3n-@5F7(DJir*9N5ZS&vtxf4A z`2$%NTG8T#?U#udT`xbFqzzZcEZQ=2%NH+RAC0i&mN)bMU9@8Sr8c#ntoRT!WXb*f)U>ios_z9)1I9@W(hc5sY>sat>4D&tv+Vaf-*Wa}&nf3GO; z!kNev@-b}x-K|a^?70#FX-|-MEUCBF=?bNQ)nu;53jrF^1PL}r1(~6+)8`YCy31E+ zwFhtrRO#_QH3sb&tT)-~_?zzkhO~LwoS9z~KP6sQ^_-mmcWQF`9rT+6n_?xRzLXct z0+Siv^5#@WYPsNjjzoTG(` zZnEHMLm=hfQBS(phA@yd=xx&kvv|qaUoKqfDK$h)v+eXreUeyC2=vtELRf(+?oih) zQTYkd1NY;1d%_Q^%SRd)WLpdPxC^z(yeXgz>hj1NB2f8kC*oIpQw3Cwuah_jb*C}f z9qORWvn@>+>iQX@@X;2EeT#>&Zhi@=L8Z71rRn4a<~NQ?tfNBy46FHSg5PITWLxc4 z7blttIEOD<6uQZpCGI?nuw1PA67>8^@*VW|bGlOai7TbX$49G+MUU`(*4LymPuBj* z&^`G3DZBaOzCaUHy46?sFL2v?-4T19>mGD;Iwu1Gui_|&e!d;GU~ZouSOlAT1(6{k znP>J^he|~2*q&?V_%a$DJnUDYcrIyKb%J+NIP8-akGiI5y}d)yAv+f^9u2z-G>thl z4pY$GWh2(@q{%4v7PJ)UfyXJb2n~Fp&psk;4=voqO4YTtqqj)D745ncvy)6An`IMt zvP2C-FX`aEe70S_A1>>@_G?QT2Ztn8>MSPEF$}Ya z4G{HqnqX5nO&{zDF0mFL1&>6+w7FQelg(TPz>L+f>)1f7+`G-5I4}BrvCi0ePa|k} zH1>aw2eNK~w?Y8V_0BND zDyjGx3XRI8>^t|V(p-3kFe!P*v}?`^)vD0*qktnfA|fT*=wm2S5K1p0o_N9 zT9uQrJoYkEBy*5Yh{c0lU45aYg?}KnmRn`f$x`LwVWhpjrNx@c$FoWk#P3R{OU|0d zBgfB6&T7Oz|8~J4w_=fQ{iYQsMrFk<-SW+r!b(uuLi$x?mN7n9ItTq5;4Ij#iXSI> zw}2-Y=1Rm18%7U)XwEL2hYlj=%qiTVCZfAiwv`J0JwL#rMSQ2}8ICPAX>F9rHS8#- z8lK-DGoU;tamsAmEi5v$G49Myayt;8N@9lGec7O^(sWV@3Oj9#fIamP+oFwB+ zDi^FXHdUsrp-9>)^i&nJyhnmW8fWj$`hJXO+Es=XNZ5m*?DmFt5kuLm;l4=jWxtWn z6Gsh3+^(I~%inqgKJQ%q2A*v=ro|x(4$;onYo^?_bQt!QuOKl+F@9LJAGSR7hn_)} zYdzoAN(jcgeD8E`!0new7hl)@z8Ae(L1m4fuoPmiqYQ?|w7s(#pGe-)%v!886jo z!>S8N(fL=@dpH;89@ae3WnV#EbXTqP+j1>$*>N%N{zf9;}|4}~ALg2fdn-|k&W zY$hH_U@S35)t~echbSO0{AC81x^Hbhc4>$DZXQdCvdbg!x?3hPtni4U0m2t3{VKSq zqd56^T{5CN9PHl=F7%0NuL0p8Ji&^J7h8Krg*m^Q;)_p;Ft3mjupvw!1dRhK+5xU^! zm>ALttGx>F485251Et6P6iF)YS%nhUzoD>WX4kCS5u_O4O9f}|m0s0U3fa^GiUiEtmU3D3JLJIGd_?AP;~Kl>XidWf!S3`jCP~y z8De#lwtYJnN~FYBmP!CGQuV{iL`4!y{&=?W#(dCJ1c3%-m&nfB$U!VPdF1{uZ2@AH4BY88$6fE^3$XbHL7w|VUPu^tPk(8| zZnN?c2mq5%=+S{GM&vK%S6d^YANkfDea`--RoN8wlE*DB@8efw2;iw*M$08Map1f5 z44J#U$1#6L1y<2|NLrnreo+N{_|eseXN-bnzmz}Qd%W%WnQ7!rxWH66?rS*hKMa*d zl+X6o<2gK!HE|Ry5`NOZ~wKt;2tkGtZ$Y&K+SE z9?s-lkdo{rx5|X|uBEC*eo! zdvN&WbBn&AD(~58sN?6eX0L@va}s;g-#L7N z7xKFUJ2N*U;bU+Cg#gLIphVt;c?QIjcW-#Y1zKcNOZ~Pux%bK52YbaT9<6QU-F~b& zJHxypztxUkXO|9t8V~Q&`{*)1>o*S*GGI-8wP+q+C=qEmR1@!m`FoCZ4mxY5BjY3e zvQZrkKKFa%>Gr#7SD0KQsa(Y993t=N3Xe@2_qFV(qtYwHp*utU_o-b^BxkqDKa0r zsCp5qKG}7(U&H0f{(UE}xj~FH7 z)rvt!tzRjp%gGTIV2n97eUgWz>vO^&=E6f%vPHAb3m|Tfo21j?^5@HCZJaM+{LA7n z!O4J!(wbjZ?&K-7`jmk=Al*ebh4DmyYXX*xp*1qv`yvtr(W{uC^ggsTn8t5&I}*O* ztu5cQy~o5pRNXcN#m}u7_~ej=vwq9pxL!5{YjuzIl6azj9j?N=DDeF4c-DI;CHva= zOg4w}Ve_xZ{KGtiV($FY1{xzl@grC=s#nqeF*Bo&)O*H&;WB0CeXGw5C~$cl2gt$rCMYa1U&~kA z(|>5$87huc^1%!$pEdN_$`PNTr@qeR2VjEa+E1kFPYHJD*|;VZkyNO7-0X7neSb2Z zFzS8VD}W=sgTsT9oX+*(Rc^L(d2jt&t|XEQvwNy#7cuC9M4U*I);Btnh7$+4$H+$eL1Qr>+1AP8+A(_dzaq8ykc**pYHw0(1CN%=rUBQ`Ugeyz-CS$k5 z&nonW9@{ZT-Z^FM!}<0Rs?`&sHD+caSq!E9Uwd5^E4_Ho>01j7BWb0SLPw~0u!%Q4 zZ)L-1eVDbstf>N?_;92Q;`U&I4hMGI6L6(WrgBm}W&z9do8c6YCsQdhDHl#byX@~R ztUh?ncyw8qWz-=uB6ThayqWSYZ!K^xqUuIM7g+6 zULWdfSbnwIp7xj6)DJUx^gLf6d>_+Dp7Nd;EFRvA?22gV8BbKHcHH`eb?;ESyu`l> zlkQcs7v2D1*He~^A-?5B*lum|!E&M&A(`R{?zbG&Mp^B`NPmuvP5ykzn^Z0}w?O1v z_OsfUpkx;$P#C9sK0k-yyG;PFukvs(i(*!p>oGN-cJcbZhx7 z(;rq_D;>iGH2Lw^tek~GM3&nN{B?%xJ7>&bCw_~%NJ4JwIcZG38NBV+w>~>4CBHS> zcYc`t5i01mL7$+_84Uwp5I95(3zxp(VZ|h~bo36Qc8Y>&?eBP>l|9!h6NbmHSM^vz z?&h;5q_cAHRm&!#A_c=B^R1L@x2m~eHL4*5x8yzPBQ*AmTA+HIquv_N1CLsTym%!qrye{1ofDGiA+TG@jA-tcQ zrEWqnBf3&*9G9-zu>=Esg){!19G}@h?S6{My!9^CEs-_)nwo539QX+u+dDKVMtu%T zVxxBsQJWl6f5~dsl-{^8e@GZ9G@npcpeGL(j#})GMfcVST472Y&@EEyvPW_^`jhmL z-HP*>m!DY*#AH5O`VGn^2p+7M98{-A^?pZhlQhB-kDU2*IIXLF|C2WEKd4cY#loF*#p9cYs`99?4Pj!3!k(j{625qZV=+&}TH$S$Dc{yXx&71!O096P`6aRi)A{&@O;y=G;c zX?upiqwxxIMpU456Os$G+aojg*p0M*1o4AEo6#svjhM)xu2mFvY<@#!v}Kq>>FN)@ zKcTy08xgWGOn#oSwCDv0_l40MLDuYH5gY6RDP2am2W@qPkNN3e=bPTf{a+LKdkr+( zsKE@f*FC%G`A zrZPHyL&rmVmQ$X?cOR4I86-29_Pq7$Esown#8%rJMZQBV3SJO5w$li+l6=}VCSh(_ zVcR&bTX>zBfraWi3%C{hg5C*A11@wyZDSrJcg8Q!Ete0;*cw6a_M6SHCSccXN4(U> z_A~@|$i_4-dIJ#E@v7=%rD}AG=yfEMQjC z*tFw3&r1hR-vzW_XqV5f497CmSYWb%!v-RuY#Hsvh_L64Py`bnuG2P*Jocpn&<$)6 zQ@<*f26(Yx-_k{|edE%V_IR)zJiOi&>5+TdW(CixD@}W<$#Z~V9KqY;S=k%j=vod_ zp{_kijF>6xG@HzDl+OpQj4Xc;2GmMjtzwR9h3xaZ9Q*$$+5G0kxsp4MY5TZ=Eqlyv zCJ2<35$&Sm?F)p88{0}S58I~u>_l7%)9OyWqon9$f$DgR=nO~o@_WiqchRQDRLuIB zT_|Pv$xbOzRF=mjc}O*JNP;-4?^Jx}60)7KuRGOz_tohCwEi%KlFm1?u=4Z}t>Uh9 zejGOwcir2sWBc&|ei{42lm9@g0tY|o!QlNd9p?RE7$vdFt$Okf#5 zb4h4P=_f6~)GII{bIZecaMoJ*LWgZyvjLh`g@4Ava~kVLC13DqB%8YSra#2ORhH0h zR_KrY%i`7ok;l=;tt5UK+fhE3#X3oc+l)6tIdx2|trE4&d@ zvu1lNYMPho`~xOtb!9uI zwJjC$Z`?=93`mA|HGVrrC|svc;2BrWClnd!;x_ifNHV@JuK)16)VG7|K; z_K$&rsJqCBJl>{GeH{*=!;!6n=y@g5E ze1|ja)GLH!_(M|8a=~c?2J)omHsYKqRW)&$Nx}TR^#VJDOO4KJuT#}8;>JrQ^ZSK2 zkp2Jxq1TVdX;jq7(IAH5=5a+l9W*EotOO~6j3M7;+0 zH3dPWwq}t@X97Yu#q4$g^9155hIhC3RCq!^B1Pb+?44*!PI`L%iwoDA?V4(F7;&e7 z?|$904G*pW|EGCC7O|I|zVlTUCru6)W4>_&F;56f$tFVXR}iD}K@|S@9Nmx4%f2@K zWcVOSTI5A~)19WAD+V9(N8z9S{8_C;l$+!985x4%@DUliC|LH!yEhZr8@I%G)6E^J zc&QWp35Ky%PqE<8D3;=Q38~FzdPh6)DM-1lrV8yF{X(+wU+-j)NNy!rPt3-KLwIpH zZIO7>eqoCZC%iXX@IK!3kl!dcldlc(3T*Z5Evl&?1$3WFTNhIUG=T36BNK+*Iww}% z_=qPSocp@%ID(Labyp|2sR_3b1k!D!JnL(X(8iN*y9-er2;nKm*Lhz~3;j<59uupE zVpkA^(F6Pb{-hU?bMm1a+!ZB36KmOo8C}o2ZT-h-_7^8gv5a>DYgy4qfs99}JoDNe z(lay#1-G6@CMb6&+rABE@2Dt{co+!k97i*Gx4-O5euElrd~dai(xhMya8rIRa2`Jk zl%Ie6q9uH74c+BEnwg2OwKdq^MYG|bWQ@n1*gS+uIs-ORg0^j36^KEn9o)3oaqzNi)eYoO<>ryG7QSgIu~ zc?q;`S*%Xd!1HM4*ET;BG2mJxOr%p-RQxq&NVjy zk%n8dSm&KgO$Z@qlY#Fa9bT*$(Mg}K5Y|%;9iVKH=0U%`en!O7weID-RoFu3f;-e= zz#st&IdyKbhVst=ZD+~^--T;EiQDlK9s6{Ndeh1Y-BGHD*?U}brG7h{RVin~JNthCdq9N0JZUh64*9_*3znoaamORR)m_^NzkCy{ zI-$!%Ya4@fv-@<2J>Hk+Y|X@(XODbd^MMW-pKovY$YSD?yPHds3H?Or@YW2I@lSlp z$G1|Q0nQeSg7%xU@ZJ&QASX)(eBwDY^MDu&gM1w(E^=qUf`ISNF-#V&;nmWFWCpAd zaO`PZ&qBTf-=@D>Na|N#Tj+F|g>Dy}rgj{y~#D-DXL`=d)o!w&ZX z2Fzby_2d!9A78yJvb<5wfb+T=HTtIzdnaol4&7!zM!tSpTbe(vnU(8lcZUI8_X`45 zH~OP}apXdUaX?vkEiXo-AIgLf+Vpoo-Oo4)!*+f zB&RT8V(hL<-w*oXh4j6JQXWijTvY0{IoS`@W0+HCsxiT@M$0b2%n!%MGN;}o{!BGf zz1V0w8y^lzZ=Xc`7i}`%cr7+csEBnw(r3bo*n6cPX0lOVi{>`O$NWeByr`6UZ(S6C zKg*ka59BZ*`D^vk=L!M%xxD#qTs9N#CYmfLD)Pr~GcwP%EM`KT7V9hU1Yp~}Paj)t znZPodrkLjyfM-@(u@VfKP}^xf$zn?YCOa66Y~^Kw)rn$B>&^g--`3mth4gRm;0p;WdfG?cTyc&Qnj1+qoVf6!v>2Bi?AB&jWS}x9+Mv#6g|FYgezzc>XiKI|eFz(k9>kK~5f;`c$@twiJnDT~?ukIr2pYa(yu6S~=D;$B7 zEKGBtfy&_*#t(QRFn{i=m#H&p5KKFN@!hNl42%)O-Lq&Qe@dx%>hcI|UXZCEvWo^= zBYBs^osYmuqer4`*JyCZ;?S&jS0hloe`h_Tmj*vu?PYp+IVhB-d@uMn4dO*ogsjBK zddxY#c$|wnJXrNsIc)+54L04*FNtyoqt|_nH4jMpr}{5mbajVmgPd1OPmu8-PZQnP z9b9Er3o?@;Q02U?&NWSUDA;)~YIAS|mOJ?heD_<4_@67bH1ABg6pR8hzl+@G zVZpJF_r0Uig2&I6YfxP0*N6w8l!fe z3BOOq+;kNS#fbi2X{U&PIJ(b1KNOok*;HB+|FATm(dc?=C{`p`Sx+uw!Y|<&{w<&W z)Nf?M$kW62KcvEN`|SOjZq_nkJhfpnHw?dc1Z7*?VZwDP4n;N$N6(Q(Sult8IQmUT zD2|>368STF?x7iqsqyd6PY`Cor+(uZN@qim3WL#n$%IeQO<|tSA^0sPU(%h-=X=~9 z#RvOC@I>R4>6wZw(0M=cia~4$UIIv^T?kyodrUCcQ3D<7=mfy zWB{apZhW@iW=&a89E{B(qCrobnQ$U~=j*FM!I(~sXF3xydNaT8G$i{C^E;=o}*bM3yx!)t=@#0Vj1HwNTW=OTN8aQjDs2m=P>QSnWaf>E5>5VvB$ z=z+xY;4%6k_HLgNUq#!^VCGMLdz09%EHKrwd%+KcQ)EUIy5T)V@nnE#mmb{%Bn+eSq zn%-2b4@FM4Me7YWB44QUr(>b`IOYD2I$b6tQh@+Up~yZg;;KpP35r}SuZ3X#+BLxs zl$g+c;S}HNr6H)~9k0Eb?dxPYF{L0pQ01ep zPv$ea&mR>ucAgSI__eoX$6eVV{MKpgIgZ#vZ!7ZK(;EZP>9y(NbqP!uUG~6096cu@ z_SmSra|=XDJW3OJsv3m+^Hv9b}V-xY|* zsSV$I4EX5zv-)UD;8=WLD+At;;e6r=!rb`gX8V4|*nWIb&{+Q}ME+3+7W;xQn>x2J z{daz&#~X)($K+8QbL@PJ9*m>s5(#7b-8?G6W9N6lOc*_f4-6h_-+&3BqcP^eW9Lg@ z|F$=J{#Nv7{xet*PsESZ{6O3kc3|loaTW}HKCC7)HxN^(c(i^dOrYkyK5*=OfymqL z%d=x^KL_Fivb^)Fm_UUsZL|nNUjes<fn{wt2;VWw&iNELF8Le{F#;jv|K4AB4Nb_uXThow*}y|fV5p3?AT-R zJ*NWj*bkl(GjBG$*W7r|irCjI>0YTNbT%~n6mwZs?~jMJTJd_%V2|mSP=B0PmV7B+ zg*_I}_1SMMK2w?vFDP=l>W8CvddnJ%4&0Vqq<-Zx@@J*FGpd6pi4UGbq7 z;^i#387(mF<+1?0QeqhFc9R9U)Ex%*0Gv(5CAG6)(KdO0|0@BQQ4oB-wT%UpOMR9; z;}0CWv+Vzp9VY)~sbi5HU;SulV8KgaFl7TmIaw;rLB>@j+EW#cHDUS)&t8_v)D4_H{Zo-H3z|EGO%;%`!ax7aWm zNBV(@XDCH^oei2)oT(}EZ#?q0g}1x+9AjX~%qith8rgr&pBZ=};M(x*IyOv|C74@C z$5qQ0zHKD^jmniaU;J}!I~I@F=Yzadd`2l7OkLlV&i>?s-jsaYMd-_XodZXPj^5Po zGB%*_8X=2vI=WGTI`h~tO80N+cx#w2I)@Fe>DqCZ!|7N|#Ss&D^gQbe9Y@azE!nX9 z($2g|;S6-42v_?*d@;u2A3w9k;yZkqxW1ey@Ltv!|EDpDz7e~*_7Mw4lDKOl>S`nhwNLe-_?kG7^R~gzcKlzzyg}Ftap5bKf2jEAG9X+ zm+|$}Gv>wnWB!X{75z3WsAYV5tMt(iSxc;IXL+*5_^~Z!4@cu{YgpiCFzu+6s~;YBnwPEE%7Wn$1IM%?KcvEDr}mTjl%Tuhhmuqrqc|Ie zH=eRRBkG5p3ufH#o5+R}0(T{JI@x3VBftiwh-Go^i`m%Ub5yKq${&6a@EeQo&}5Ir z-xB=%Si;&OqRNKnA5J`3Of2QMgpID5{h{Yv|1o_*q|SZi&001Ji}CdRbLjK zaU~1#iVmZ3cE&6$h`7z0Da?QXrx#N_zcDeIikFjQfNaN|udJI)9H8_l4+FAK%==PU z#~jn2q&|25c@jO2OT+^)v(}?DG=c%g)Sv80c@u!^Z%mHsc4a`-x5p3dB?7T%W_(Zz zu{Yz-`Dd(-55%2GTi3NY5qnfGtN6g4K#aRjcWt*}08?<%ej%dIa(91_F9>2lLGQXV z=Pw4Lfry@Z@>~YgNwxj_x-k%!>GoWkxsCz4$5%d<^a#X;gEy*0QWVf!_ zF>&9TIgEeCcjJVdlcg#h>qXghHT^VbLw{k}33Ox-v7Ysv2K~073m(t)#nF2Ooir%m z<4+lU>w{6nttR~?G#F~y%47Q42c;+J>{+mp2BUb_^uCa$QNa|-EIx0@9`?e>`4Jh>>_RtqcH|HBpbP+^C{^f7zc$PL`roL-|cnT`sx zj#f_zbNi=%p3y6sVxxCSzCD?7lfEPt<@>VdZg^`1`4kacrep3gk$8M$K}xnm)@VP*BIscPi1`V(5OX6LjnTiD($3W^z zN|CNBjNX5BBJEQH|LFoJPJ6QFuVtVPWxTh!0OzaV65pc?EV{4uA$NldP$FLYESWDA z<4o3*Qr%4iPNU{K%>|sPaqnl0+0L(>A!~G@8CXj7^U`_DSJ}Y8W@^19UBF_5$fGne zU!sDQhnzuYw!y?}%NZz7#d_Z(@Tj~YJhrzpmx=9-2SR70I)m4u`rAp9NIqo)Y<31w z$_E%>U>DUc$v;84RD>SVMr{*k;Ge!$wqpeoi>N(80w=BC)cq-)iAmK9x5;yyfeNvo zo5w^IO1IcJgPz>Sz4^soESMQmpaGK@PlPGfiB2t8+v{*MztMB70A zbuz!K`^A%EoS=`GTE*AM_^CY_V<*t?`H-s9&K#4^yiPEvpK?)egtSNDeUBrkPbu);0N&XysHVNE#fz>@CGYj?)c`m0uj8 zd_6%3@ud>x-uP9q%n|hRiiQn;F;Rzi_Uw)Yj!^Qt(x{1>-wiW86SELA6il8 zFm^Xaj)me^m26kUJ3uLQhc1YP^~9B5=imS(hdwRNTFt_N^Am0_8Uo;`C8jMs$U+Ok z{_p|7YjGm~i)%!lynlz5H30casUCv2S%{Rau^pg$3UN-P*f>ng10x53#6HP~A?9q% zpnPb40Fz(K>(=>-VqywMRvo>P|tF4{o?wYR}$V>xxFL)Z?KD8XRI#^%wz z16$aB$49_Tjg4K4er7&NvjG|<4^-KxGAgfa0Kb3o-^a&Bo>2i~1t$(36FxAWw12dN z@1%(pzfBPo0S+^iu3NowPT)S6~Uw({lCsJxM+_ zZevTxq4<@Ie^f3Uv;>n;ej(+laXz(#Txzd$Gsz#7Z&qWr^+{6SY~06-E3Bc9qVw}? z1Zuu7tRbJ$KS$X(Flyu5jP-ws%%A%A$+i&Ft=Bi}J;|r`aQX@S$9EW@rYmO$CGNVt zL&R6Xpf6RO2W;W`zQx0Hn10x-n!NeYDO*UO)-RdpW6G)2wu9|dySx1`W7LkYgQhx( zIUk>s^3)v+S36)6SLw2XKk`^7-N}iv1Fz=ynj1a+vE1OicsJb+RF0*yd5Qgz$NnDn zMaK@(sJ)P2Qhrw5fj8Z@WAZ%2ACs~kWXdhKg^U(0pAX*tC{A4LEDu|#zFV23LdtWd zEHHkoV+-^>7FExZiM>Kyp7XH<>T1-hb^h2)of%9e^AWV&<5x*w-<#0_TDH)o{m^a- zv5zvSOQ;*7sV$gLdjmZI2<;Oe$zHYr9%^$@i})gXoN*U?Y(RyoKnonBhZgHG`5h2AW`9Lm z!`VN3m81QYTaC$czd#hH3^`M)|JDCPA6^u_Mh1?pU)z85Li!usW4D6GqkAaCm!Zms zTaDST4+6*JZQGyx=YeB;@$0Rjh5C2FAk3iVrDOx46W1-%n-YY1)cYS5HjsBTli_X~ zgy{I%baA9DoHZY=jhP*U12^WHWZbmy) zKKZxZlwfS8=xnPkRJx0fo30;>1|Ksjbd~MK_TC+X5s0{XT4Fawj|st;N5sY=8GG=e z_F&cpqqy*T<>)wjI6?XDMI>LJ-%{tQ{hzaj5bSE`lCvW)e^2knWdzO>etdCHfc@BA zXh)Jy-SH;y;9^2(S3^*RaoM!o&mQWRJ#5Tkg<=tPCvH59jsI*Y%I_b?u_5B0K6?nJ zmxW^ch_{Y&C_o=|ruRD(2Piq<1E4bB%|f3RhV|4t97G*j%qb}uV1}W%yMFoGH2@ma zUFmeHpY&DcF=9MkO0F9Lf1CO=CG3RIO*KThIptjf# z&Ll8(w|Kh)4C>uoeQ9nu(kOW*?FdCtg*Czl!?AZ?kYrJvBP96jx^cZI9P250-0e7K zA6*V7_Q+n*O=3<^KI>`n`VZk)xv>1#-6c*?{q*O@@UP*>GlS^47fxXEeAo9SlOwQ_ z@_%%QJyBwoykJ5ECY39m*WXI)g@$X6CnrXr_{+&AgTI|Yh&roKi$E3Xj$@Py@IMK^ zFE1Q{Jk;gbP8X055n4d!jli6!szVQbTtTPfv_#IgaMUp-gjDMa)Yh8(_y{be-Z`1) z23bT6GD-jSH}9{V@p#ntgdfu*DE};DkKCBQXRQ%|LIf@_p#8De34SR098dhYE3rPi z4iWsAhF^5gbsIY?+7XVUXG&#mfA;pn$M{L^-&lL=jc3_U6QgnTPQ0`Utkd1%JTpET zRa}KOoBc9|=|9SZenm!O*e|E=?;jaMi`Yu<)}UzoKL&Zx|$JiVdez8UJrg)V*#{(FaN5gf@y;e-MAVb=l4$Iv?mXF>{4@X2y^fX9V|$6$Ou#VR z*=rr?|7}5o`J)mONTc-n#^|xLo2@2LWJDC@)@Upw%ekQ51kT(qkzTkn8k;^4!g*o> z)R9y*fy-^rXux|DcnSudzIW~vjy)O}rkBenVjrBfh3%_^pq|4Q#>*>rKb*t~ZeNSj0AU*#2+-`h>mD zyo*M?0%GWxn}MU&WXlNz7CxaA9P`5zDEFoH1EIImgT)hGn8LGbnm6};BluphCbdf3 z46JWv^nLjnjfZ?dDpK7H##N7(l>QuzYQxVw7TcKp)4u4V8QQ~lVlbK^vsBN}2lyOz zyx)E<233In&2~*6U^WM=&Z&sOzEch4gv_ihUEUz`B4NAcqZq6f zGWor7*c&WQnHF#P5Q8JKr5!GcKL4~IU>t%B?PwgT)723i@d5+B8`mTyqwzsBXE^e+ z7wA2h;g-c8joXW6-#v263vO*)TV<>gjkc;)Zy#*(f{%KtXLNzk!^&4Lg~PpIc#Tv< zHa!{-Cltg=uJnRsTCZ=U`9&ixJ-%4zk{29q-cp`m5si`6%&axNUjOvZ{dNWS+ZNn! zUvR&T!Tojy_uCrWZ*OqF&B6V42lv|^+;4wyzYW6ub_n;|BHV9}aKBB${dNiW+a}y^ zpNI!<8qH5LFWyhVXHUHxUlYZ_%9w)Ko}Hecvv_gr=M@}mPB2%xP~!>VYuC#1XwAI$or9)^rJ@Ou80q$TNr7d`W1;l;8(xB;YWX8CJdMih$gH+W(B(;EuScj!BB zjl#SI=7oG;yg^SptI*>Dso%bL9IMkCHrwC5xcEU78gR5WsGjnMHFvj^_YFm%PF?QD zkK4S#plNVP#t+iJgn!CKH5G0}VROmte6K7Ym^$u; zdKYPrmYyt}P~rpQ&4$k(8zJLMZMiQU<^y*ucbr^B_{n0wK~%S(51i_KEVh^MOY_d5 z!G|ZjVZzh#p2oU_KN1UKPZ9c=TKr>Ml3O&^H`aw$OM1hPSN6lLv!b!Rp)OqKi5G~^ zukU)Yj?|Chq)j>R1p_xP=HA*8jroH$8cEB&;9l%U8@v{coO^pS1FXFub?N@=Pi{wJ zm%zM4`0WXrOs7RYME-Pb**1M$y(h4h#iLw=V~}TJX$R-NC-6;fHBr}&!E%e@{in)2 z0VNA>?s1F3Uah=M@~b_;(p$LKB8tePigVuXfu7K)Djuc2o0Pwl`|*;oC)l69e?eSdxsVU+se+uNP$Qy(-{1j?}laYt1zEf_-wWgFO$UalnFqmRhD4XlZ)* zUcD2ILfvIj*Y|iqjfO|R%7tj;k$+M5rQQpiLKHgooF@3%cROETnm0V$b2ZMdI2tQk z_m11Q$QwEdzO3sA{^pw(-uLkafrg8s8=HwfJS-^C8siNb&4ZJ>o=2n3)&7f~X5O%> zdGOnfp=hK(;At{X^@i1D_bfdNo*wS*-r)c0KxFuY7-A27-=eV28*UY77=%ub zK?Bm?Q$KI$=hxBawU5ElwV4Xv2pndpno{c!gW}2Nh0};Ui6Hp@W*vh%1HBmv_q<`z z!z}~b=rJhoo#u4vmp9alMs?0yO7O+1DA(#C@Q$}H40gm|;IP^U8#Ny|cj@RJnd31y z5Yo5z3;2M!$PGR#0+%Nj#4cLr1I>IJ{9jy)K?~R7{XFOcPGzRW85d$OZKBD_-NL@W z9-+UPwU>W87+z) zOg{${uJwi6Z|u|jX#{?^P%Hd~FL?59FjFuj^doi8H1MJ?EdKdEqEepp7q_lu(hCAt z%r84OH3k#I``M0SbkK4%;@@r%gC(ZhTy9RLgMZq%O)p9LbW4NuG$T5M;mgwXdNFA8 z<(>HzLptPbe;c*ZCx-aH@*i)j(?NFa;(GhQ7%UVuKYv_?4&Nd#OiE8C>ml%y)T?XLRr9SF;InoyEjE_)ZxR%U0SvhJ^^DQYuo#rKH%NcG zm_dw`3#VjkW6(>3=H|D90Y(#|I^Rqo{a-lp`EVfv!fkiF2f`2Hmz`T*-u&C(^j{?ntPF>$8y^hFH}2rjr9$LC4xwf&)Awwz!9 z{q|r-lMUfNVb0bsl?*uHaX7qEAQ~TEU9oHGIR7VED96Oh}f2` zWq|eJ`xld@L}AydxdNdp8E}4)&6UTWBGGH`bYty021rh@-?QDXN+IMIE zfA$EsePUU2^wO-Go*O_%M%e^xjbtR4X&u|4rU(q1IeoB9iFZIZc+(3M`oan||Y{-c@zS!=V8%$pnB@(fh z4ZXP~Hc~fS;l=gCcM8+l5b!iKWP7PA)F*v7)$)@C-3swW$%|Z}P$uI2d~-Hzi_KJ! z-Q@ya{rk7Coxp~HXFKb!FLD8%b(UoZ#Yy`^&+D4>T)@iqQo7+A7P#&@=e^C|1@8Zl znm@UL1tu5U=5L?t0$439uRfav{_}&bd|m1SDj#fajpt!O-5zFE>IxUwy5*$M^mnHmBRV^H!`75s`1Cn!=(S`u}8dSF(IRzr>Sg%3zW|KFf%ZM2{9{Y`%VED zsB~%CC^kg+mAC4xX`%}(u67#VcZ&fglk{D*f=PSQW?Ob%X25L0&jAu@E+F3gv8g+s z0a|sQ9tk4Eub+Q|wal3TMZYTF&N%H19}2GQUaG@@a^rREU4G6WrtYS8K%DV+E$c(i zI?Dg1!^L|w8VOsRA^ghnPZcNW5HFOsY4Z+eNLyT(T2@4dK*eI^Ilr7CVZDd_%2jko z>-@Hb{@WS4SW6$eWYeL7-oiYy(gg-qq$nP-q=P_m&=tccF0iph`{~VHzL0iAx7753 z3$P>KJ~+ec3uQAa%D18mByF7{n7Y;ncCMa%_oFdcFaPqm3sZd{J5iYt65|S$GKV)a z0=r0nL5mm2SC0w_`_b&k(VXn>;`?oEt zjyOY5-rzBpi^Sd-a29tec80Tu?R8IaJVC83^ik1x7ns|=Aat6DCtTpEnw_re0@s(k z-2F7o13q;(n|6x1!0oh`yXoKojZQzyoVuMM@Qq4|s-*{5gwjqf;UnwCo2a%c&;#h7 zo%8Nlkogykubx552kv3=&yFSZabf+DmgDZwm7vTRzr_VOZ!UMrEN~}8&;MqakI?JE zB|8(Qy8}(7cz>{lE5PPkKFZqeke4qnH_68p@*cbM96sm{%?pCA^fIIy2bAY8>k^vk z2JNopacLyKtLU8f%(-r0@zk9sa=Zs{Bt8fHigN>=wS&jDbiiXOSaf3l^#GI~df5e>>yrU>p@L&kM=@3_U=NIPq9 z`IvA>y$!eXx16K_z2I?#U!5B?E9Gr6wjy}_Xn1bOod%_P#md9|G*Ef2Qt~>823@?R z9SgnPVIcZ`j%t)Ul-_Qef1=$T%4yzvrKWj6+BKf0kw@;3*0CVe)Y%g_Q7!jFR+ILK zJ9hn%CnPj2HQeq?>QO<-(O%H?v}?k)H3TnI4C4%MXs6zK%5jJGgSULl?t8<4$s^Hw zF79A(jcyh9)f?nbGWl=MA@I{Zl?KOsApfP>2e&w~&UW>uoo@L+j%MSso&#ij&mU^G z{~-Q_N}gE%cQT%n}banS;G}w7U>C&k6g{mwVuz4g}D(eYFc7eu@ zDJ)39x~JF9c)<3}DSKSUv4L|zSEqcQJH-5ynjfjb2A#USvB|;=@7jN`q4X+G)BR+3$ekSV{*ZznR957Ed_?%JJmUr{ z*3A#<`*+s!O(JxEJwS=#)D2Hg6Ns6as;9^QIN zZ~iBnoL4>7-MrNi7MwI1+~CCgE4wJYY4%5Na?34lxyCK`xaA_Z+~k(4+;W#&E_2Im zZn@4a_qp`~x8C5^E8KdATQ70zEpENWt@q}&Y+AjmnE^3YS{qVjT%hDj`Jv|p3{c;3 z&U{Fa*+TBNoF8#hK-i=NGb8oP`AJbu9nU&?h7ALqUpY+RV0t56yGZo@e zoIq&Ws!4K_8Q`z&8$VgviP-nG=cj(5|E0Bgvvl@$T&IJ)M6vSDla6pqba$sgF&zqO z*Ibxi<_O}JTU7=R&_PK>W@+t+BgFAXyuVMqv3PWE<_2RY2z@8j-z`Fi14#x^Rtubf zb>fzfPpvO()v7R9S>^;1jE>sZGkxJxLeQ000?uHl;#4;>-~+XJ^1B|dcZTFl(IMeF zA6U^dH&Olo!N06

Hn{0h|uqk5pNj3&f)V$ zB^OBlyn60xx;N<1TkaciTtN8J8NQR+Gb+pO# z&n15U{Go*oN1gw%19(P1Au#SR#g{@t_Eo$^kr1mYChm*fzUg2_G$bi>OyT9> zM}AU<@rN(fNHQSF_5aipZ&LZKc$*&&pLwIcwk;9{6h0=*q>JFOvu|FWl8ge-x$t8~ zw;29{NiQ$!L_vP4lIFP;68N;px^Saa6rhFmv7A*>cz6x#^&I;sP<66CR{vNAIRpOu zmlsBXN>W(UF+X`+!BQDqogM`%>AbeMPyy|}>z7JIL_zu7Hy^*fP(Y218(Y-l2z*)J z%zxt)tX|~ke=ai$7UrxC+##!ifm7&>%XdY=*_ZaIJ5ALvEBx!Odq<-nbbhSaHVrjw zrLVTlDUE_Or@NLML3J!yp6+COH44%VR6m7>tE1?XQk#+sQPBMOR^i7h>R4CE+kdzt z3Ji2k%7yRL!24O}4JNlnLH)k9fkD$Wap?ZN{9cUui&u+6-cv$VQLtRk!zZ{w1AW53 z2CUy31&;%qq#th9z_>Yc7ML$5<#)YRZE)1UqO@nZelbx{n6I%}bwmxDayLrrF$q4@ zUuW%np^DYt^mR5mM1jz`jHy{_Dkv*rZ`mvt1>)Vlfv{#OYJAE2cy@%Whwj%{ucOMi z$$Rxr-8Ye-G`>*a^l}w6^XRFwdmjmULci~_zpA3rerqMAmPjZRD-`fMqJ~dTdFjur zjRYah*IDNoYMAcgPM0YrbmEkl6l$Y}8Uvp%ZXoNQu-GK&;CwmcriZDY{rQ(kM}taM z#`HsO6Y=ozMGXg4qG5l;+ZmE=lTa`FyumV!=)Z5;*Prv!U!o8V3C%4|_r;}V@tvQ9_q1j;@Vih@nQ%<+mBXqTuv+;j`Wq6L8)qe}1n%LQl6( zXl$Sf;RoRZnKK_o0bM56DeuF06rVfCNcw5iU%IZSMZwU1vVQyD-%79*z}cSedA8pP z{`5MgeEcnl*>z#RP6|eY_~ppM>)r|fGoK;G>YpWV$a-IvdG|V04l8uD=V`P@L6NFs z_YFd?C+?cOxNtKH_IF-0igcNbnIHZ6E6)@9%$Xpqe;rlozl);5>m3HM4dKE|CK=hNv=J2m=VwVVi zb&H)oFkTivnC_uHuZbcyX^YcE1!>F+34We-owQfSnH9+^jY};pYPOT{;M8=+FBMsw z7H4>)mC$eVW#7R1`!cv9;_I%bFDQPS_himOX$)R-KIzh66p@+f3_C$7eBAv)*X&~y zWQ>%Ye0W?ORa)-lbNHiyGs|UFXudcm&c5@bRfMcheT&mlkigKP&limd+&d67Gd)cT zGd-c^oJ-o6`HMc(N^{!d}c*{ZDRyR!_`$UE@*_x;E>~j zM82qK=n}aj9P2ELC9ady*Ct0pk=MSIwA=E3<<{_X&a7`b3K)EM*L2+Mhy? zhJoJ(4;F5c#=iD>zR%gwU>W&+T*`Fmf5t~dqA(>^DVcieOZ@=e4}j|%7Q4|aHU9EW z`P*1s|93Jl@lT8~^`}MB%s;kI7)fy?C6cs+q{AegB=r!S7m#t=DBeQ|@Hp$Zy>ZWl8Rjf5ulg%85lRPb%M;fIM91f?6gRy1g4);XPBMi zKn81V>{ee<6m2QBxqh4j12yIoXHOPKaR>Lj^!*&5Pq#i+o+6HAyR9Ev4}vbVnZ8>FV7Reoaair)DLjLfOpgW{xUwSSeVW? zRLKF(nFSXvw(_A*=0KR+c@BV_)P~3lyy$T{yI}rW(!Z$9YdFJ;6(2u!9K;FLa5$^^yOu&>q zJ&Gj1^oLQIn+B$a9-A}uRrp`pA8C5MDCtQ!JeU-AAwx%3tA-UXnj+#dfuS`j7FwJiDrntZ(-?wC5UG-%rP1zt|#=hn2PG zc{C9EdQ{Nbp(=;9%Gw`B?s6c{eUV(%O&Q$OcrV|njRW}&>u%6K%i{Ohhm;KnoUwY| zlMRpL|7rj5wzG~YX<>luv(BBl;_%2!lSh;t1{o{2yi)8EhTpkMAIhx`gCgy#%M857 zL1*+-R}oU4C+gfZe!HLc{OpP68f!@Tc-^w^Py6hR;xDCtNeP3M_iRHm<=@yJY`!vi zZ#XIcSfKHy^+o%p%9K4(k)(WcWz*!!H#OlKmOgA#4}&h1SMwI#{#tWDSnr~)8p(hE zEWqw5A9m9xnl2R%1EJyyRiTOE=(@ORq8LvY4E);OQRpxc71X8s122R^{@`rbI9D9| z8dRfy?F|L-=={aTUngLlWyJf?q)^~Ia}SxbdII*m6wY5j4}~P%{pIjk2)`eE;Mwoc>=m%SDOA1kiUw~Q6SA=ji8tW6;RX@lc? z(ge}OW1{K$tPoI{m76{*{acNQllEqvYKuZ3 zLp1X%&#W)@p+P=>|FO z*N=n(Z~k2O$=yTtIWA@r+v`IC`94a-ZWRDM*`n|Grw8L||_ET6*Y|*|X_HzKBn6y*pa~j~$Q%8D9?6?6A)gL?K+U zc=Gr2!9+eBymeITsTleMY~NI2PvpbKEj9vMCt&$^{-&=QM4kvash0JK;CbKR=RJ~y zo|Qi)^v|7u$%fMN*M5rt73cD+S2ZVKaQq?VAbuk6?yauBRU?4h{VV2lcBtEd3lj*eA(*KUe#d_6rIbB;#}W9D;!fe>uA2KimCs&TheOc= zzQL~bLp47*-R&viWW4Iv4%+VeP?OOa(tPV7DZeq)cGsDgH5Ny7_O5FtFlV9Rbc>HQ zjS76>N6v=B$`il&Jl8^a;zmia`5YDs+bTj6Llfh&*M zx!ojol*tF-e9O{sD1S!$fOWEXY`mYW>gI5mUr*$-stjg_9Gf$d;GbJ0)2x^)TD={ZkmISl+z5`LryWzD|r>)<)#F+W9NLvScu^@WZK= zH(?-de<~}vRRTu>j=k%976zv$I;n0NoPb_$h4cAtgu%1dtLx{ynt#|f9xD_Tf@yN%Z`Km4V}v-|F}Cxb-3i1(WnUb$x9 zBi=ATBkP~>%uH>E$_x99>Xbd@7U2NKPqVju8ni#LXsgP}+2Md^JKrc-4%l@MOsnvUj%wFV<}~StL(7T$S;vU}=WhRW@rIr!egy8P=iHT4 z#ht$tj;3W0{O--4)%s2aA1!a%_@F8rgpMCwYHy%|K^q`pK{=sk!{Hrgyi_sFRl5RS`5(Ep&f`LFhYK#r}P&Sj0$LG4MSM^lC0{xc*WFPkNe3Tqom6oHK4N zYl+G~{eKc`7&w_1289Y9KI^MgaO<8Arz|SNpmb8ov2&YMu${p2^H9GXv@vVX*L*w%&lW8lE{>m>NDa47$CXq@ReZ;o~PpgCQ6M9te&E5??yO$7szviv#PcmQb@q7xsl&+&30xM+= zA1IYi#-&>CBwuTXfVjov9DgN6EQ|^~lX-yXU*hM?&Q!$q8>%l0%R^wv>w?yx$CL58 zqnSjIb|{n{_uqQCUJ?CmM$}8}h`t--%v!=B_WidD&kpYkg|EC86Q?OEBRcL5)_xxf zxgj4zIxnlB{3D}5m6A|M4`FM5$WXYQq|QzS^S5v42@fITonw70X=o~bV0O31 z=!Jk#XWH4{c~i0H(}ibs5+R^_EB>IEgfa?Bb@Gbu4Tg;Du%_8_reN85kD4!Af}!`h z)`Dlq}U(Ej%4{*HJ>)OvV#n$PNBFc?v;owGv@&9-jn`O+K=rB>5+zrH7j z17;)Yd-+44x#rRGNl}yk!SBi6^#dp6NPnLngk^<%tmzZ)=6!}{v16V36#P!w@7-(?;j9$0kH@N;%BK!WR)tQA7IskAkwYjH5t zKhE6n$U*|QSG``uUKk9+#P7YEK*l2{-M?5r7_Mp%zk7uojyyhmC+}+zSWI&4Hr0|w z`J2PpF^7VH_9%11hUela)VSmFuXRB%ux3isjW?pWZt6S94M9QBJY8zT$5;Veb2w#B ztz{6X3{7k8kK@4@?OHQg;UMB~NYos<+g;O=t25|q6#VBK|K|(SX{$DW`4IqltAE!{s+EDE zhlhQHB1nJfQMuM5(vY#cFtunG8PEIjBX=bw;Obt>GUbLq;5_fK60Q=3j*8cdX7>dE zTR^10LXpP)Ac0t6SX%C3KT$UbVE+P6&Mh9;_wK?orP)D{VR%1x)^Q#Ps>(X?azB~B z?Xt^Kkwf;2?-&ify-xCd7wVlDdTyU>^T4h2OA!2Kt#GivcG154VPfRC$D}-KS*>Gz zPfbkL+1hd*vYt96H`jg|sNtp$;-ZW2@bJB$_5q$2JX*Okm^l_#S>%agW!jhLHhW-? z)oCxuho+<7i(T?(A7BDW(~S@8-=@RB2N}$r3mG0`^~4!t->Ay{0}s&0;69JB-$MG> z8v~Y}V{bP&(Q)*;OpxbTdCs5w-JXBT|2Lj;>R#YwI`)RHKi;^&6L%~R;Q#%SKK9MC z`JR8?+F{_{{v&ms1m+&!_jdOP{$?-SuD4=xgt;A_y+7&G&wej#_Td|E%Wsb#oV?Sm zCB4z%aJOg0c)-e&^xfaZy>UTlx2NG6z)iMNlTgna`2+Oy%ZmYvKWz^?X6B8@ZZz@C z8U%C}bUHNKp1^_&#jMR8@U4UQ$m>Ec%oScJ_O!$So9xPOM3#A>n$j9&#h(t?3f?2J z;a(`6;p6+<#1SiY+voJec%i?XXGL6!BSMFJME5gNzUzs^IyontKV#gAlY2@3(R%s@ zp-xy@IDSveJ|E1oYTnzr*a>%QZ#u{`-4~ha`cF}xu=L|Yt(ORQ?CC(Xt?5TLU(T$Ezy`0mmbDWWLHMzRTj*hj>Tel?& zy5RhccEWR{=qQ(NYd5Ixg5B9udTP6Tv1{|YHM_>U;Q4}ECyOuoqKS5IuQbgC71kFs z?d0jW-Zb=D^L!UPm@jkkfF~VCx(}8l)VQF;lc3}Yesr8_8G20*Tu~vZDoj6yj?9G( z(dV*U@#>LvZU>9#n7Z@b8p$=TxX|l+*6sU*{^I@CmDIVSrMC46?N@Yc9+q2Kq~(TJ zLnru~9j4>;zJnzJS#G#H#eciNEIQ7;Jh9YrpBtX~QgUj-N;+QkY2G_Yf`<1C$aos* zSUn*($T5h921nQ3v>K%2iM>9)jD8wE`#8Jy;0y+;TlDtcQ+3A}zVruAXbk+4u;Nv1 zm^+3C-qSHMVW9OLw{>S;xMN_4a@KBP2J&Cp?a-v>ftTyWjJInru&86IK3mHJtsJta zy^3I<*J8h)E|MOYw;^6L_d5d@(s!0<^Lk((y!k9G!^F2oa;n#ycgIl0X?t3wn3x}a z#G%yR9k&{^A6Q_=L}i_}w%6hA7_!T*;TM~UzUzww(+%A5KYo9<|J|C}QW}~Ww7*E# zW#WcOx_OI}XqaW|b=i=#A0y9W;-pMN<{JI9VkOewT3frEr*0_3_bl`*8IN9R1jjMi z4I9JfDYIWPus-j!eBDh~%u9S>Y4nzXR-SWPiqlvo`g(u8AwIEKED!_=$lkuFZS16kO5yN`d^DCk&idk(Aur?Sd*kVLm%af4}6~ z+C0y?pv9-#8`cUlv2nJZevyg`PP_E8_*Opyo1EviY!h(7_?hiddr0}H?9=k=XFKCU z58Soo9Rou?GWwEVJKd4%<>m~ye zWXhI3Y;eSbvKzab2&`me9B<2Z#C#X^$iQ6;wDA8_bC|~wW7;!sKg?&~ChtmSx8}}v>2$dYwOMHtE|xKq?X-T zc?N#BDY6cmX@$GTr9Zf+z`*fQyLmCo0&~>&nT~%*_^srL%TfUg9L_D46sw`*zVZ+L zt47T5hPl_}2X%COU$J3hY_%EsNNkK*v4#FuUpO!R+=#YxylmK6I!DS3FMofkq)Pa^ zcG0TyzD;I0OM`u${>vAm?uajm_+^SWej7>$CHi8Idfc5sAv27+asBhD0AJ*rsuzj& zX87RY0_)I3A2ig|i(LHN96LHs=$zy8L5p)A{9}bIFvi7u#5dj>%W?vPSKP3`pDNQk zm%a4DK-m?OH|ttrynXo%#d}_uXjNpruD}vaD&G65_Kn3iG}#VUw^+9 zww+j?Ell^smXd?*_LnWOK66IuA#qQ9=q4>|lxz7{pUw->)c&e$g^6*lwZacPu;PN; znS?$oT=}p^rrFLDm%4YBu3KV_F`cWMZfy5N{*oq>8_%uL#p$h)jiwhGhxx4b>9qMr zpN*+yzGIXwFK=kMVf%)Oqj$sZ{7-idPU$RNw%u@4moNCE%ccJxx|~~=a_d@dUED3~ zX;VaGZ9erq#KWE_PQ8OiWZQrJhi>36{6gBR+)*TG*XW67sk^j34D1a*XY{F^k+tMJdk?s=-?tIRwkaDDVgmt_HLje6Vq<$T5WygfzoHfMPJo1 z5H?vKS}p8}H0qA*A;SBg7H2)UesL?z1{B!UQLIgo#OsF|aVPd*#Lzrl|bGp@-)S9is+~_M|@hm-o1N zlbd(Bd7GQ}xn%>l?BJFy+_HyTHgU@?ZrR2y`?zHzx9sGWt=zJgTQ+mcZf@DmE&I83 z1Gnzr)-Bw+hg&ys>n?8H#;yCfbtAX#Y#VtY)i}mzX z9=V~yVx`xUbC|ewUFSl}G&lTD9)rWTZ?BkcXraSv?{$KS-uBX(5`OsMYmoG{4+ zr>t)*dqL*6&n)!Xy)qZ9r-yB_+rh-|T>|rN6%yNf+CEdQRZM*NL||U^D`z}|1^Iq+ zm{{8(Ft1?P32k=S3BUJdVs+oyb4tZd7#uS}HiW}O{nv6UO*NdbV#4Ic1F=jT`0zoE zce^8|S7@y`?!iQzxkZBQn;kK=rXW9#;J+g!^|a4 z(b&NBKf1NvQcOl9!1iz4NafDGZBy3R0*^N9i&&Wq%16&G$(mye?Wc>yH`mJH7S8Rt z7x&phX(fN>3TXut&(-~DWNZf<+sQdS3?)>tY&nvA*$%FYwx-CGso+n2MK=nbTm004 zf)m8jeCT(b(=2N{^FRNH_?R>--{KaZuz@7o9Zl9|>R1$Vd+x6lHsG~p|G_!iHL>|j%{R{o8^FlJ zPXc#pV{em4+M*I0IH%W|lKfE{IS-DM%_s9cyLsH)GnG0xytmD{Vy`U(=68tahv*=u z-TBqzGq!(mm+Dm=s^4z|oc5zjQ&&zyq11C~LfK?I4~{N96|0LYv+In{Y1;s8)uyeP z8+1_U;Rv^C`3IlA<#lMd#riJmPt%^HFX#Gjuu*T#H1WvyKst)N*>M~~j2jcIn( zp}Wu$1|3yD^=#3`cBi2R;W?J@*gr8T>8KVidEU^xyxsyz`p1iyE!D!Dm~(1bbPF(f zv1R6#Y%QE@nAzXrU;)ovhIh<)t%-RvGyAjOm_z%c!Qq@vO-y^iXE(6W90Fycb_Z5# zqE1Geb7-p>Y)??A951Yed1gZmQ{v4)Na{w^!Vygjl4?0pqiqIRSLRQgGhGYoT?HQ{ znVLc0jI2v)jq1qpm+zad50F_{j0LDR>oxk2u^@#g0dOcHcjlg81S2 z6ZaRYVEZSZYPlLys1%!YdRmz>W|WDf#a=W8;90P=Q+EnlTvS|Fx7q~qcg}lq%V-L! zIF(396qrC-fy=7LR~2zUj+VJl&IH8YaAsBAP(&VM1M^a06DarG7^R)6guQ&`$6(R#!oT%$je)a)yvIZ;@QXjHxKTTO`f& zu>n{Nn@>FOQVGTF<@+veHGm+~$vHchE1-}JSx=S$&@89zUYjY0AZ+L5gVUgQ3E{7W za>yyPI(12O8VIG|JIpsAhh5#3oP;!8NcgE-`{=P8Ca0deCUZy!+E2zGToEUa|M7C~ zQ09i0I&%2&Tus4_XCyy7Tjyf6EGFYlyM6sS;QKgvDf5>!-dn4?FUm<5RP=A&o%B@- z#dSFI&i>SeO6OBq9m-M|5p!G2B5N8LY|8tsnInZN+05?@}hw37OKe`0ZcBR{kv}*T+5o0o!bkW2!MEZ4_^z zsYpY6|GQ7KJP7@sysPJST@vVfckW&M-UO0{*Lvo7NWk#YHs|V6Q=sRmzN&g94!nCq z4bBsOM1!qrkEe)3prm=S!&x&J;4qfiKbW9Lglj))dV_KpQ~T|C@;bEg#KPcE?fp<)SL!{tX-o|A&% zJ-vPrZkDjw>e7mpWzwM1$7h#y*z&KgcwN-%rxrFAeg(Lk-WqTZ2Koj#I ziq@__e&ql!tkb!+lh$bmJo>LtW!F!8gJ;hb2T}>lW=1*(e6tUnx=vS2mefC}cfk7h zfPK=fW)b5qGM-?G=;|pi?T6p^4K>~)u!c-rfm>sZ$_vGH={pIWStB6W^rEJ{&G}Wp zc3a?xirwO0{H3NrI8DCF!xnzu_B&DY?psYmtl%RX2Qr@vJzF+Z@}dyQmzrt|Ni(kA zU*JCuY4o6Nkpi}$@?v{O(|G}`B>7ieZ6Tvu<@dWF5#-j_M1@b};o${J??0gKkt}xx z8|u;358rDHzI;Bcw#6Op1X8cu>Wu1r%|CkIeb8wAtH%G8ZCHF|zrspq*tcxbE~V6W zHUG6mJ>65`DgYsB&q7n)JHs(C;}su*Cc)@i65=i}_Fkzt=u?mNMY%vN^_}N&5}>m% zJbG7)3&B&>$l4YuSVnz_bD1mrlnOmj$}0_{-&uI&2BmeL=IeM>K&5FxsJ9&rbOOEx zJ~^QZ9El4>(rGjRhe;)#uhk&$$QeF)0vkNMw|m|vbr4dXzD$L{v@+|xpNBLdhLqn% z;DL9gwoe+g;CJWvy6`152v;qgYja-zg3#CA8Ucsp3QTd zp3p$d_tf@S9c}nXeScfk9hOjUOND5I=eU-ZiZFM`&zrQ=Y=RC@Pt93x`X6rp!?iW_ zF8)ewxY6^nlfK3szP?Fo0zWNqmTS4tL|`|*v)oyq;DZbUED#vJPshv*3oVjeU%pk3ciJJ2Qo71TTa^{!xBX+7sc7!OcS=T!;0 zj=j~?K42eAJ=ng*1rmj+huH7iv+d_kV~%%$2TbY#=Jz#As0qJzhS^M!iB0`cLw)!C%Ro(!=l44&KEv4i(s3VZ{_C5E zQMsoH+@NjN!&u{SlBcG;`kpC2LU?*Pypq>E?C^K4lsSH&*8x`Pw`T?g%lw6>SBtzXRFs06+aBei(vI+W|HJOWMhT#) ze?Ofv_kXZ6^`7`7C%8|&(bgaeCC7vhd@y%{;?D|~oX?5Et*nT*ul<~$Jfd9j{CiQD zaWeb5?E)uAikbRtHLoaqIpq9$_f02I+U@p>!Z<7iQn|lh5zQ-MSZVEUl+tnb64&cHiIIU-4l)8ko-fH$ND@; zed>+9E7L%ndXRbs$*10%-#UHl4a(J|J=!<-b2Ie-D0$~i@vi%E>zMYu@|AVQw6X!Ypg?T0_eAmu* zBzk^L@B@BRP%&LrB$Mt4U7^=gqgqWsXs-YAvQ4D?(xlYZJI0_x(cNW75Wl^Z$8@g| zh-)RioA-g#Uv*MaFWl%atr%Fnklgay5Skl{UCw`Y1kR=Rjwy2up^N&C#tuhNaoML4 z=0xnxc|>0jzE$}-w5V&)0K{WXrLN>S0*-U?zu-aa-?LpV5xS)RZFLD#4(WsVk?}LX zo00O=V^!^X&>nhZ%T6`Y{(1ezM3w0<`WDy+2f)YubZH%8zf<4aD0To9>bp$SbO5P0 zJ7*I(wYbXXpcc?D7B7NzA^=pZ5}$ovLHwH?iw(p5NqfHrub(ISROptF+ah67Uao=`_(>b` zGkM!BbV&cq+?;oz+CZOJzrmEac|6(=pStO40nhNmjQS3HVmEP4j}zB~a+!eRy$BxYV=-VwmQHy)$8iNiCx9FV(#E}4M@7RY9wW!E#RyL=}V_; z0M9GC44Euj=qmB5p14yTGGYcKZFy_~PM#PHGgSvpZPuFwUu{4|s+h0)7O_vFSr)m& z)_-B~MI!SwNBhqh&WAszUQQsl+lvk{@n;R&EiIU;O;?5){8SXPSB2ssgwC zqar1UKJTUEXvS^hMP<^{L42vePto!z|Ks(nfrOU{=A&}d>OVQk#rNEDfm?2H%N1_9 z!!4J%<<>uPja%+<$wh9t$t_p8UXVYo-%NLJUDXcEb*U}I{DTLO@#qLT4w!1B7e*^GZO{~T+3${MdT5W(}?CQer15E z+jxu=4FJxi?B)MS)>gDJS!VN$ziSP1hi1Dyn*kY>DSMM{Oa&F0Vwo4VGoUDs*Wl`BG+^IxzOvwd}m74+Hm1qKAJFJ`l{~-`GC`I7=Mc z28rAe_efs(q+K6`k__uY>It8Z%j4huP9Lh$=NK9r}^hF<^ zU(?|~+Rsqk@@e>z9`N`Oi|`|v523c$@R@-7boaHcm;uct%2Rd~82~47X!e8M1YWvM zWz&127w)RXOpcfdU6+SY_zU3&&a0rscV-fK5&z37+60Qq_NLAGHUsFQix!MrG=U82 zJ7ynA{pC|E;zdoN=m?MF9>W=6FxgA?hmR>_Ods0y>q#vcmKqJxn-Mf+VE)x?zTOB7H9t(>*_{_X`@qiRspTKVLJj{Lpd7 z*>8=u~B`eWj1Q*0v z7t%)#neTo@D6T&m93ll&VS<28ks{cr65&Bd9X7koOv z2=-5z_UObeCMtc3jx;}J1ZU$*y>5guv2Tlc!trb}SK~z;OZ1p{VoBc{WMcOO$)gI-2!6QmM^GwB(x`m>4_CR9vR7O0HUy)r z#u-7<42*GC%2@YeY#kK-tOGaRxbe!3ckXrhXWh8QWwIeYcIT`A^{uz1ShDyG15ez% z9TWWA7&>1KX;*G!Anr6zShU|5@(A3S&cN;Gs#z}&8H1M0#u)2l29~V7wo%H{7|d(T zea(FsSpQ@1of&jv$Ul9}#Es6t;p3S-FZqnYw|gl5wlM?QyIS^@d^h?#_p}W^zgHF- zL6z`%oTA4-dKkZHmWdHu|01|#lLrF_54}2NlxPI>+s9}1$1{*d)^%~D5q$66_H=}d zC*fs(_My2(u(G(~Wp)k&uiLG3AG9+9MT=8)^2H47E_roIf2I-4+iutJ@E8M~uilQK zk@6aA8)y8kVW9M{HtQUk5qJns7TMeJzqOIL^ix&tEy1_VO80@QhX2qpSt2R&-^KC& z>BEdTREf+ih}`B3Lsb4q^~3ikTR1< z(k02BqyUm4NV21x@_;yyFeAoa9pi8NiH>I%^FIF{d~-?~I-X-d(uoa^dPC#DHm=C% z*J%cD*XMqJCBtF)twJUZnm1g@G~aD$@IUryh> zU16B|X4ow^2&BF})8Ya;bhB3?HEvLz{2XX2T%bAf(Sarp8X)y<^J{0=cxS5|1q(!( z^HK0%RitOcFE`LxZ1Cda8E44P8($sY>jwYf?_81W?fO%0P&%!`K&8+bzHGPGy+`^N z;`1+l_Qn|s+gHzhNXA_jm02+DgEN#;@1MSOgC~P|+wV@WzNwd|?1IbKyV?Q7 zS0DXGp)1gd>&kn>5dysl!q~3RPJOJ?-x284ro?_%poPgw?K$HJz0@}z&$tpB-P-5g z6eqBtHlwBzyt98Sv}tvO@;inq%NTCZ>vg50XR8zB*_(!6T;m3y?y+q3W+%vBM?D-$ z@MQO+yHLpqGGy{LNpExmgS3P-M^v1ke!D};zzsL3q~6Joasv9|b#G6;aRZCwEzegz zCHyat@`b>OTb_qjIKsd> zgD6cycgW}2Hdpb3fw_Qs-w0}$pPv&7(`t!a)+eHLP2_x4q#DGVv|wt4niB~RvJbQ z5cp!vh1oCMAx-~kYG4;Y`7Nuem(x6eqp?N#aE=Q^MIroH~eB(+C$*XgMssQctC06_7&f?0CIA-P5+ta0rdPx z&xSw%p@GnlA16Fu;l}kxh^m2-7;`11yB?rYCQ)HRz3dur?dpM79&mJXZe2m6J$zq3 zG>9V}(6uqQE-}g;n#avcRFL+BR0ZGa_gC$}Io@2UH{KI+`1tJ_hwWew>gpt|_k;j9 zT4v=lJ6IB15Sv}+3FT+j;p@p>KjjEpc(%5B?Tv??VDMpc^i?Zc zxPA7VckUNY$V1~68+{v~IXn_QEaOGSr*`dgr#0lxF)uWX^n#j`YS)x+Si?f`(hjHn zUZAsU;j%h+Yj~{lIpA`&7j)UYs!Y3J4UWehK8L>Y0-(OPGRGQvTPn_73H65NYt14T zRBa&qY1{l&hrOXFMYn9)4{PAT9ewT9-q1V2M0GlWOLR5~br2Yci>x};7K&I4HFoj& zfLdY5p?4BC5PW?HJq3J#Gy1(H8yHCH37}y6GEe0RW_BQS*5Py8OCKl~EMC~EZVzhJ z%ew0IeWB~t&b?E!>>>D&L6rD>U&!#t?4KcQ2d>9%=hxQx0?!=5N3F?r!18{2O=cpU z*v`Jy5gTk_k-$%(3pR8}`8Mgz1#>&N=%cQ`BZUrmlPcDHn`jI2^P^|ZuO{Vh?A&Yd z-4@s>JU!)obO`jQ4%J;?12tRM-1C!T0KG_}B4?Kk_+C@=(R60O0QKFCI9n(mKl8hY z2LlXFJnDHe*&f85i{4*|roO5Bi_=Bm+!$V|N&?fbO&lH(2N+H{C-Y+o18{ZwZpW7Z z^1GB$Jy;Crs`{GL;NS=eOKT-3k@n@OZv|=+xm+QdeBuWkGI)pX9bM}TfxHI$9G=ht zs2H?7XFw6%|BtONfs3*G9-lTPNu^TSrhTtz-<#7u?X$J1R9YnzAqv@&Jwjy-Q7I%M ziLUGsWh+|>5u%VJ(f>}#`~6=(pU=GB>v^8Lob8@-?z!jCODmbsu6gwAIDS`%;g#{& z$^>3HAwd%txDs^CXJ9#AA1`0`H-OrY&hz(HFd>_l-#-uYSC2wOdI=NqJJe@*B{)IF zi;LoW7UT7c@~bRJb^;Nn#CPvQnIOTdJ)PzR$$RR?ubIjOoi7^ib}3FU^`FE04>oDnYOqFqY1BXY>?K&x#B-(2 z^Z)~1{TfU!*3UBUPP*<6BQ>61WB5N_4>JMrT6IMrr{aKK~ zqZ29%4ED&@b_ubdyuQ^r#~bTKm>lD@S@0S!CrgwCsk~axp7`IBZ>AKB;CVfobG|8% z1*?y`%*i^B>1OwOu3R_^Zkr`^?Qg*LIZ*Sx&>nrz75 z1hvOa*jUarX@4Qxpz*og%u zeqWjgj{+3hoiC+;`$K8tCT?It(#zM&)|#`SJ^G!?l!Hu&;nfIU!UnNO`&C(3 zKIrfYL2Y1zqUMP^Cij@2$jj$3~$);JAF%ML(HQM z2liwJ0EKr5xR(vd(zgtcH3UHB)~22Fud{*abT-diIuO!o474n7V!ZRtx^e=+V5A-) zd$bOTO%RCi>PCHLLu5g(_bJOrvS`s3Lg z*vRtSn<+aDBJUioKAOaV+gh0MS5E^bFCTvp2S)1C7*2x4745w@^3?6q7{)66di2@y^bi!l0E` zhh`ZY(zI6{dL0!8g{qquqy(@bfmfq?Sr|NX&RHde>Ap9!Qa9*z7&Mt)Icb!P>E32s z%!!lXKy3^*-y4hDBXxzs!Dpll3jRJ`bZ^V;>7)CFBO53`F8D5Qnhp}3uIEMP;_*l7 z8b!cJUAk!eKCiIH#R#~K)dpG&8=lRcvg?CXBs}0-X|n{&qm8^;#j24|EAhH;wfvu& z>Ee;2fB(k($-sF|HWhRziJUeVf4A6LXXd>R9c=fa{;`di>kg=GK^*v#MyB7vyz<%~{gI6q`AFoc5 zX)Ijf`3?yu*VSOSsS5D9?4qSP4^91qp zd%iOy!s8&}=H^XM=nqXB-R?i06$cf(`Z7!XfmaSNXKoxMpFBA3KEoe|N-0bjRJ`1DTU`a~L6|&FI^lNJMMlC0A84vhrfT5#^M$wjp1b1%G@c$2 zh1HQhUEFtOX^)Peenve&Gv>a zte)MTfalB8#|*q-;Ju&|a`x*vT4!@Pe*PFEX1f=ZSN5!LpB)WNv3_}1EAaRs z=RZ0xjfR1NCuZI=d|;$rP(E%a1ui{)+y}(gt3-do{YR$9d1k)H>zN*%S8^*F7?GV# zYQnxC!m|eiV<3lTH$d zfma53?$#N=rFSOppis-c{VA3~AD6jE_c3%CL=~2*b-ec{y z!DqB?*y~8hzqryoI@}vtOV%c&%0^*&2&FptV*CBJ`f{396o~NT)Gse+<$dQC1r?J& z>J_zn0p6Y0)`diYb%CDT4yre7Y+tVbI0MU5p57_v4N3bqUl%LG-}8ctHh7K3NR?yx z%exb$>;i1u#qS4S77-bixm|c1_<)bURY5; zJvX$rZ#lMGCc8ct#`3-it5u2G9x%kK52X+Vl%;fw>D%0)X*r{AX>TMjc(EwI-9U^- zA5SA8GA(bzK&2aO#M`vd$w)wWJ8z431L;|xbt!xC{=}2dac(fQT>NV%-j77uy%r`F zxxurUMTftHm|b?kQZj{Vg~W>YB=ZbQRq|C&0de*LL>wwHj&j_IKP)9;wUc z1~t>>Rr1<9c5>W#&qw3}H*!>;H+6&Y+@tNh_T*i+v~zI#^%e7j4BQr*-0*Y9d^gyp zzngI&2Jg2f{%^-Ga)UkG?=5zC|BM^#aCaAXhaorT&^wWlAn^ekr0MRkal>^dw~R<= z4R~-*q0}9$f5h!pI1~vwEm`YRFnqvjUUy-2B#hNr&hH-=k$*W7M(TF%_5dbNur@_P zjbF=XKW@VECSo-Vs~P015f|EmH3z zphALgEwh6G`43i?$KH#8gt?4kC#Pfi;yt9UP#Xc^uKw{p$2_6+sE(3KQ3MofHKfmY z?Fk~;<9_C3L_q8C?4%{JUXWw`VOzLs1YGp)R@#f@4MVr9f7|Qn(0VH5+qoJq*tc~_ zK+TrvV11o(+xaYhzih^OQIrURO+ zczUw47f39zEORuT4*4&h_Qn6i^vir091i(YB31T!d4jvk zkov-V;V`Jy@Y7Su6I4oOtanQb2Vd}i8-Ixb)jL>=Q}n|@BxG0Y`Ogd(2=q88^CAo) z&7O)$yk$V1|3r)23t?bocwqlmNjzTk`3%#%Fxa(@HsgkhCrG$hZcVibgKE>K8*{HR z;NmsaSEikzaLppkC8~k}h0l|srXLN3{HT=iB{>YZ!Dmo&3G1VB$9ig(A7;P|t0DE7 z%ArubRX9tphymLc9Wu%JJ`FZY2dd_GGC=InUA>pp)3DtVsH&*$30;xj-d#wW26r1; z<_?%)dEdX}`*rPUfWB`!;eemh+FjQ8d$77@n8N zO_3Pq1=?`*Lu+a^mhns_8&UITGNlQ;?;@RKKU`XKY$IlqBXIs!) zxjt;a@M3U&F(B~M=2h0GgWy%bFQaNcPoR9(uSr=K1o`;;l@6Xzv}9IfnLBO^mXyZQ zJz>R%&8vha1%biR+O@~xJc0gEzoz_HAneOYyql)y2|`#^*=!aFs@sIe+14`PihTVU z>D~Z1Qxm;I^vJ*YyvcmuWIk{*UpSdhoXj^)<|8Nbm6Q3*$$aNzK6El)I+;(M%(qVF zV<+>qllk1qeD7pFcrsr+nNOa~H&5oHC-c>l`RvJj_hdeNGG9KKPoK=UPv+w%^YxSY z{Kf0FPhTGj`g>lQs*yLGlNJn_yn2boTsX+{cTEU^`sY}2{LF#MHB@2E zpeqQc(>i1QJfJgb&V#+-Z=wS2wVbC;~ zd%h{G_!tMIc*mfw(_q5%KAECi4$QkUEkpEKFwB1Yv-I;4&WO9bj~N0FVogr|xWgGK z7oW8z1Qug5(0|1Fr~iwz0=@icQE>FJz%e9(*Ne@}DJP;pSOgn>LL3Od22s7*447Wy zDaQYr4I_DK#?OF}x_+nGaAa8@K%#2i` zKvZ_ESzQww9(Jz?jHE@ucwT#m{m=M3@G7pI0a*Lxs6ArCGb8hsnJqJb=N^r^$_8G2 z)Y+9Yfagw|QqBgZE@fkUlgkV+ID2}k)Gap5e3f05<~;*=>dUPc z?0@>dl=JY|K5WlV<;89Wv7vsDzV=5!6l~}Dm6O>pQh)Ojp6_g%yq62v@QQbi(-SpX zpK&^S^ql)^6d3dB&HLj11m`?&td9bod83KjHHRPOSYv&&^4Yya2KV3b7*u|hnMv+o z^W;lPohBEOVoMFY;ymERX*2nF6D~YErQLQ}hXHrSbz2#lav@~HEx!-W3^4I)to%gf zg5Xz^s$@*>I!y|GPBboLgyt(u!Fukc`_+PXZMl%iy|kzY?|bt2`3(auXb2tIGH0tN zxM`*w8mErm&kRbj~`!FKj1p zAv^cr7ol4o5bM%^=Lm%h1+!iignQFv-10@|>#$`9jl4G--?lUdnZe^A4w-w$HEE3r`bjcFD&h^+9Vf!;7 zrz@cBh$Se7Tz1Oy^8RQ1q7T%fNte9fZcXvmp}B0V7rR_r_0=0ft3@y9l(3;St<>OE zlQ&!!z5LVy)8FbxyQ__Re4uz;Dp20CA+7$Dz?B_-ASrhFsj>tI#GPikcB(MJ+ntsn zugC$x&lQI=H!)%9!fR>w%{UtLWdTJ11)x!pjbI&3w8vI~GVh^_x^Ap;rBrNIR3-iB8 zZ@-?)h8DT@A-`olkpArE4kpi^S1H?AUgZsk$}l3Q;CAan&2x9X;o-KfsRb`tko;;= z?$#?luqn1jTJbmw>>92}e@pX$Umq7eUsl3`ges=(=|^6$;KZS(}+>f-q`+E^ekJWDOG#uG}9H{Jb8VS|JTzlybk7Zex$&_99q z>4?09HBYR);Fw#Z=?)Y8oV!-E7US9Jtmp;Z1ol7p7`lMgx;~GBecK!Jjh}eI%E_|N zgS(>O;+`V==ILHg>F&u~`!))WuMBZ~((VcCm&z}x?TdnCHP@z}jPQilUT>~_lAZzi zXRb{j%)t6^je2uVXA~6PJ3iT=m;qAz?e?@cM?vX+xhafK9&qJn!*r?7QIKTo|5kpM z2dw=3)mFb3kGHtL?}(%aw%cCner$=Qk7@uxe?!SfZm z5Ct`HjAJ_S9uWLHUM3GkK@4|S?9*4+{;_^&@wqk%O6GLF`Z1dUM?~n0gRfxuu&1r` z!X3OH+RaO@!7g$Pzlp-}eB2AMwlRp^^7L@QjuSwmJPg z7M<1rU&BTw{9Kk;o7x_WYM!kB6?>QovE%e!y=aIlyj&w;v1q28quP-fOvskS8+s1@ zo_CHL#vHAKyA_Wo9kgSq5)%@_tR=SXi$%wI=c*D+NYfGRTUQ&4A{Y8A%StgJk>}TY z5Q|3c=3u^HNq0?NJRFPCdGQ){OmJ4^7e6pA4poOI9KRpP1fDrh)s90Od3SuGm|(=a zQ|29qt|=6F{0?S9fNk*b+psvaPd9aX^js#C&-u2eFCY#rdo}B>!CEGK?G$SD$cRHD zccd0ENAFI}jzc5$)I*qK{z;_#$3HpNUi!y>xtq6Nn#7IfbrWO#1Mj;WrFmw7aY(H! z)Is~)UwLkQ3(eeeGia3Ug-v zR7DSCy$L4vJ!Bj=?%L>j8! z-M7LCP|&LDv)gcg-lYbmE1jU^X`M#seRt6D6zli6gx}9?D_wHh9ZD1y{&?g7YCx60 z;tUk^1N(Jq@%shZZ7q7v(DbeN@zTxiFt9huyLz7!6rQHsW~JfpMfg?vdjZlOs(o@b za)-i2bln@J0M@acuY4Kqz))5sF^H<}xr+c)TEq<;dIP-m}J5;~PFuTYE ztk*J*rPjMcm)XzPj9a*^7;j4X=>88pD5I%@uJB^%RCs3&7dhzrNNsnkQQQCrO&GZl zW(NZQ6WcnDGIELX?>8gwG}w;b^(y3`k@Jf-+tE8_3pr?{ix%5ay(*Q1WJk_FZDHiQ z8V>5_o$t)D1+u@(JU`F8|K>0@r;%o!Ezr(uPnzJ(K?**XdnG$;M#2>C26B*g;kt?r zKHE|L3*exUdiQH>L4bE>1^4H;RokktZ@6D8djL1@J^7?E(kvUdL7Q37bKs5f&mv99WK3{rWUhSeI_Fv?~?c z2E4PJd=nTRZxr|-fdka&Pt%;0O@LP_ybAWm`(B|cn z3g$pOPiGG_gwXr7ucYU4z#vxsWpI!lD3uS-&tJiTE1K8$-b>K}Ug*fdvmD4aDw$jG zN&`%{3|&I+IIz!sn%-Re{vn=E1?!?bld5B`J|Ny1?t2c9@E~KC$k-+__9=~edFPOT z1Gr3+GZ9b0&u6wRTXfzI9?PewuUyZ9uGsXdpN6(D*SWE>Ih6&BZUM*S3L97xB4^U+ z#)5O_MbiRmts&aGC~%uDmc3MQnT%2^nCaALDr&<5TH%j4(@;w|T(@k>3kw#A1XZb> zcC-LPpQ6C3Agr6|hvd2)Hiu39ewS{$vtZzEoN(GWb5L&xu+zu$i*#{fylXcDr`gxi zCQW65&bw^gIloO{id>31Q-cNV!}p8JYD^%)xVEF5!h+Vm0^5gIXmG--DDX9IgGbMn z2MRQZ{h2){^ga`){^Dm=SQ&x8Vi_}GBNKWR{KHo%>VrQdmQFWiLd3PO=3kHW;iN_G z)fI|Zh_36Q-P6#A7qg;IXb3Pt()0o2qmC}Le9!)5FNpU-$?n|w&$K~ra&uKujz2U7 z&{}(bYJ;EZOZRzi{2+56u0+#C7oNvozLa~|58CI;ZMs>Y1AUX`B*{(k1M1B5z)L+^ z;91#L8hq9ldR9@K1;n+$`Fxe{&e^_jcfRhv&1W>=v*d(BYPG%)$&%Z&<1!WUoE;y@ z>)`eV+SCswihubi(l_N&Q>iQzWipO2ynO+JSKd2dCj(~$^KQI;;0t4X*LJq8)N{xW z3eTrbzk6f~r1?J;EB5mTa4V_|R#1h4yBj|%*7(ESbuFF7;+imPX(}jSouA%gV7K_J z4qRVze70{IhR<@Rcc<4F!V6!gO&wpEP$9muCx0#t?ppS5o`CU_IX^XB(bxo$54$nG1ED-+kkN&Z6U8feTIHk`cp0uPi3kbrt?rT{tBuuYT<9@LGEBA6BBsXuK z4O|#l^js3#s^oliqmF!xIA;xNNoh+CFXVt)?#4Z9mRbVuh<{#nvbI|79Q_kUlxvtRj=`SroVP@63o1HyaA8yU!6ma=&5;_<`Ei^Jzt3({QaEXeX7TR* z?BK%t%KX-h)wby6yB(Ecx!h5mf4(oya!zju)P2aWWPD;=LPXaaiO!Oi;Xb$%;EP3f5O4wZB zZWWE5;7LQb^QJ_qXmUq=RF7$+do`w`aNc#@3Fhdq$uy6E30yEgc-m^9#suwQW@oyH zjIGynbWg@~&AYC*N?dQIW zn4`?ec&6BY~Vp} zVwpLz;azCI$p!=7nSX>Cs?IZa|FDe>lX&-e^DU4C&uO)p4L2K;1svy^qY*kyWkda( z)-yVn%@Oa2y*iu?emv)(fhAhKukq+z4jbI>Kfc+XV}%@+&Aq=Lw?B(b9n@x8qH4bV zD(*UL7{?11^RYzis3g@J6WCzL(+@nX(ed+(kGg2F!FfFIg0D4t$2(J&VM8hJj6TH* z9oUl)q$I9_Z-m@Qh*OQ*1z z1@}@Hnp)Rbp)0(Y6D-GOU^Wa1!St4$FXfrd0)AdBnXnB~#HwWulLfp`1x}qEYF~{R zLK*L)Lp0~Kaz|w0^k6APgavlVce3wYqoY%qWrp%7}c43m~f95BO>pF zlz3<9WlV4koA*rD&>5w#GWI!wZCoKm=N-G&I3rm5;|qTVb5viVxs2i`i3v*EY4`iz zx}X!h`_|!1=qZ1`@IZhoy7YFfWW5Cw+A3zV;(}aJXoSY;P7x-Q@nQ(_T~RzQZ^s3H z*tmRuN=JelGF#udGcd>>wqHw73b1fP%D;D&v%dPl5ne3L95>{~%j>Yt53J)|XU?r~ zLnFRwF+UjL;~4k9vS=^=v;l8^2SqM3sR$zFrpTz-^&pDCgl`2D;I* zFGT9JAMCq#X^{SvF$xb8f5?>+3>0tnL~E}&>~w7M2bVcn3cZqEDCCgx!DdM&FwA6q zFy^Vc%ejI%xxAE>UsCak9eVxJS|mNPf)_f>=6E*wQ8o^Z6@62 zov|@OqR&71q!9~hd5L1yd!krO?h8Cwu;T6{_0@AdQTegf zh=u7a;F-1;_`DD=?dh!;7R2&m{ye-8FBH)|lm&ekKUc-qc%k1|?bTxA?|Dw|Aa5kb zi&2Ybf%>DD97?1&66M`Z+Qx!ybh)%ohTiC`VM9{RX%;B(?qHntLPJXH`IllF-Icfg zlb&ePl5F=yMJ(vy@xvxBlz!O2MX`|uyMK3ktfzRRigLW8yc)CVy%GI#;VHotY?%qA?4cp*rtmHsGiBo7S-yo5gtE3Rguv>A^!91!6^A}?5s%0hE_v19@qyl)G< z-I&NiBe79(9GEAICHG?%S~bUb=0@PaV&!Lht4vs^j(3*EyOSe}mUl~2vQ`5&z0Ky8;k!_13~E|fCXoZQU;2ZIN@-UP8m;a$Ul zsk~U#*-VtoI~2h3nztR)aG5BJCqQ~RkgJ48;A5dKTEz5BVJ>Xp#opXxqLF+as$B4! zArWlT>W?n+`uF8R*IZ{+er10&vMw$c{OyJ;LOA~D_JoQpeks_lIQQa!j)ot~UZ3(? zYb6&(;I_*Ljl|61{qcVPiE?vEZ^S$LxW?Nrc?nC_cp+W{<&!in+`5PXVd9DGdBL1} zxUjopT*fjA15Mc5(yFyipOatC9X-o&c1Le{_DLcax{-~#*L)8& z;#kjqx<23Kke-Q`Wp3NFwlrCzm+wb2j>Z6)Q4dF-`Hbw8Sw?|XN=Ob6>HR| zk@%;-{}~_uS04Y8dFvRfF*jDjE-C(X>+**YuzjBWldl`7BkP#(XB}icy2ykTlh(LJ zz=K|9HixN-Dt0V+xF;h5o_%6w+dZeE)SXKnI+sR(VBC%2H@B!r>;P-A%Z2~?9vjQY z!$o>Fett!*aQj#MTw?P4qnd%LD0_bJDW~)ZNQ&LO&E4|fdV6F&K5~APk*l`?qP%sY zH9z8*!8AC%FFEB&f)i5oa*uXO1lY6txm)BE%aNSb)BmiK{Xg&DnUURlFxMWzr-;Tc z_h?`{sJdQ6#vWxlrdLg{Hi2l3W%4Sa_DH91NWd)G1nO-KqPE|#M<~u#T>Oy4oz@CB~j6d8z^QVDmz=`%YZ#EAx7D_|h=L#Iel=^&0ix+P-+-(8LAKxneK4*(2#csStRj`DlMH8Q0 z^sq%}Zx}6XyCtk%d{pYfpbcu5vPu!SZwaQwFiAPn2I*LZn0t4mhtQXNXh-w#-D`RO58c*jt=a$)^3dfM#lyNpd@j2;ZUu}ajokDW8kJ`YK zt5*m2XV{>WuMTD6S8QPWB-iqK0~-XlIus;6+W=LeI?Jun8c8%SV^7Plh5exs+pq7n zMhRlwUsyM7LH>o;o4pR!2xU_aY*n@c`CB#*$K_g~R_~D9nS%BpYVoAx?R`sR(&E~A zXpX~Q{O&LJR=M@a0mNb_KGU0Oi86~)(@WTnaAIljv}`v^ME70U(_P{S*`+LMqn0Hq zyc|ZOn9yO!J-c@XSfbt?*Ua{p(ZTN043o+dEA&0JNYd^T9ga9l%Wj))jdBY2w(Lc8 zs2n$2WH8+Zu^M;oO?yNKdy|`auUl*oSE-TS4>0ntk(E0gy-LSNChoQxz# z1lyt$UBn&1v0=)oE_FJ3r}azVbGJRb%uh}ck_F^3Pp;d^+8(O9HiX>emBL=!v!!>T z9jrVPtTk~jey+01HfokFC`x!&p1A`k=djx47hN{scX3;8)I&gBwX>aCuG@fbcD?TV z0YI(24rO3q0}~4F+YWaE%7^}&YmZrj#h0tsLVx4uQoC#?=Uc(WscL~I@$>4RAHMt; zvIIs%+%Dg5fS9}Le*D~K3Hxk{opyZ!#CZLrqi?+hETq^TP*rt8g`3qb$5xobjcd0l zaTZR9@g-Z=QOX>^T6DtoSSJMXoz=JZn*EhG0}CZ9mzbDA_uk|bOFJi2QR3S9?TIP0 z;O}2pJ0ZD<=)|fdQ`mI$wRg=h9#1EivMWnUxmeN(<o!Fv&K}?UAY!9m|54HYyY>%!_ODtk% z+riS%lRj#W4#*nw@7-bBzx~q+Qqx=O>>w%VNr~oROfM(YE~hBi!#Ja5@>Lrgk+oRm znpIx*u(0V<;5!>f)auk(O?zw)N8j2!{3Pj!P>k@?GlCAV@5Ibm@fRJC&XW!WM~=f^ zdL`$JMZ5z3rSbNLvAK+mJ@)O`T6lY)*q^8FnZ(2Y zv2H~QMQFsYPsL`5AtM|y*L@G}m~05IqXt)hKNWylhkSKh!(c>TIQnPZWAEPJ-DhzH z=(fg2^l%vJH7&k1!w-PT^HCoU`}^(4eYewLNRD^k$_3y?`oo6RoG?`Q6DyZO0DTKr z-(Ptt6j6D(lR{Ar?_Mcx=kV_T;CB1X_9>2+=un<= zWm-nzG?Zf~XrqJMK1W2|HP=l;664paM90zL3NOiB0)Cz(_;P^^eKc31^E3nlhnKY# zIYKp7h4l@mA=>B6N?jF4Fks3LJ1R{>O}y{6IgHMKCZJv4E0dh)LvOFE|bLY1f3!`v<6W661DB=Ei@yV6; z@POB!a0tpAi7&Q?XS{1Y9l>ZMUVXjYsBU*Z7`6X7uNrxNDHzds<4(7QjXZd42}Xq@ zaIpm+o{kt5jCx1jw*jBx_|9*PU_^Bc*RF1}fuxPgdvD?Ak+$L5zkXRmcAM3tUrBhp zbu#YNZq^X?AhPUeN-(O(yS(YbCre1R(tG+~Q83E%a(i|*1q%@GR<#TG{X(^@RT6J3 zps+ga%yBE;pwMML9V+~R^dq? z>J5wk?cQz*f_2@@xRZe>r$R-kmSG0cizWnW9SlU&Qz}Z6+RT7EGwS=)-+ae@{zoXmk{pch*AHUK3v6f;*JHiO$c46DCB4Z!Q0Ft;bo4A9lf=Ic8H z5bZ+lcQ;Wp2vd+>zx`VPl9=?)XO*)lh^<_n+$e3rE zUTz9jj&=sqRRWQOSK!CO3{#letK#Zy8HoClWZdOnn!v!W2BUXQf#_L*xL7pB6!goV zE-MBI*^tb#rhX#HAitApDR9RK^{9TV8}_<>N!YW%%#&a}E_6DY}6d@F|n(di>A z-}vJ86y07!P29e6(}`~4WCHVq?@XBZJP_S3>^!jNFaB+8-Y)i}4lKq1!i#dx5-bc@Ux;Qc;?3MuV7rhSdU1m|kzREqF|&L5|XkJ&6LrSTA^WRFaot`}EV{ za&^of{V2P@&lp_0#80&`F@FxA>~#-~p(rIU{b6P>V)`~*Pg9^l@=0@91kO%GssB=MP&uXV%(ntIE0 zZ!QTz?T_SMt$uF;3{%e80xZ9pG|g`{{4jywx-?6Pb}Wys3*Wo%WeT;agIB5tvHYrU zj5%0``73LJ>{9wPl&l)A{fTJ?_T#60-V}l5?OLs%h%aW4F@BoK-8s_`l6&W)I@cUp zwO=SM2*GXtb7z#Z%^^%b$=BtmUA2g;EgJ}wK(-@R4|YbRR(L&I510Jq!o zehrEzS%A-u<+NRKxSf1pmcR=vU+g!%j!eSiO%~dk44MKr3tCw^?DKNJ54$U;jR~TZ+GW9c70iC|pO4NwEcMITIQ0`Jt$H(tXxV6Y0+Rw(v?iLS2%(8VdFARP zyg%w5KA|2OhH{>$C}rNq``a4xFYzq=eZ+*hHka%{Oy+xhKLdY%tIXMJraj21-0|M& z5QdU>%eX&IvWLP&sO17J3{@m7_pH5c2R_o@Z9h-#5i(Bs5 zuWtue7Kd+6eH)4jI~!w;m)e4Tji93Sy->vTZsyx?-Uj4OHO#TD3q`cwZ00O)8|bz= zgOZPhqU0%dLJRI&!$9o;b~nZsHOK!{vX3=n(lRDXY{2?Pbl}H97i;L9IPG(63dXOJ zRB7p5D>yxldc|m3D9RMLt`{0^1?AtwPt}HoBJ@7Je5J7!#I&qiTec97|I&%Rq}dXJ zX&D^;m6*O#1TCuWSwe>Rw3L_IFg;b3IdkV&f%QD~16OMC{EzH1p;N3OX(7sc{~pta zb-1=bhBYV_yj*-_0)|J$gMA_wtYOGEMr7T148J|=sMFGHK;y%%R>#R0|MBO}ND0_N z)5Ht5btV{I_NaFj+ZJ+Cvh*&5WBe-ayHk7x(=R%3j~0jFckaQyQ-^F}Nc_SecOj-H z&%Eu4Z)~Bw{A`b z;MDDHnMJsL>epcN1v|(VS=D%HBkoUa-<@P7jDPi6E2po=`r*8DXS#FjA#D6A-$R(* zdgb-{Hh4IIfq-YBNe8CaMd!}U-s}Jp=_s#Z0Dr$wF_U5B2t#pFP1zFRNMdSlmf9mn za2-s^FfdSm*$(N2uW5QI=-?zQd;w3{3c~2bCjSIV{ zxO;k|xY(0E`OD}?uZUUj^tBhVFcqC3HV*!dUAU%RS0fxy#nJWS-0pj!ND)c5rv-p0 zL(ZBHPI#fs1H)$1UILO@Y4qLttrzMyoBg1{)Cr~TS-hZOk@w%VycS-%jq%7ECCjf- z35s+=o0asRY*z3=^u;ITCe3j|>Td5%JGT0u+qyex@>Wjh_rvm%6J0*&0sBjxIkr2= zx}s#=QL-*6S+|s|Yx;lGiBqOf#*O$E!xo==ZnT{JcWq1F_9i8VW82usqiEWo^oV_N z>kl4CIFN86;Yh-n!~=;Z5|1RF$#szR@W^_3WIa8y-X2+xkF3{6*7IX%??3(4(i`36 zWcRMtwLwjWj}PQ%dZU3%<&P@ctkDa{_m#_Wd(W2v!zdwZR3x-iDu?5Zl5}q7eR^bt zI>t}++_uUaS++o-NnN29Qz z&Idi)w_#JBfhn5ya~9vYHXjt6^et!3CKGh<=&2c>)qT+y@whvi-As^dy3(666JPYe z#nO6xI}M5dxZ~BX?2F{eY#vtl&``|E8bx<$KXm0o`%7hK8oIY!>CJLCKNRe=XPgYp z2&qhPIjl+bLwj6p9M)+Yp}v+$^B$`Dp|G2mu3qpmM8SMXeAjLLP-;q2GE^C$g7BX% zN#KWER-gUkxk(?zdHH5sS?`DB>MmVP`KE`K&|*Ejp86rh;}xgV2KCVE<(bB=68J|QU@kE0rTBy3uA}x%GxOu)Go(XB9dJVho zk^m-RoDp+X&r(O**ydm2N|}guzH~{;Gj-HoYqdq?786m8->#P3tATvp({{)Ruu$ZZ z8^ha@G|~A?L+da}7SiEA&6+E$g&42<(%(^Z=zU;FJWr6=UYv5 zJJ)eMC5wd~tJ*j$rfZ=n=d7bjhggX5bMforf?CMYWI;&FJ{G!hE2zGAPz@;$2%npF zpM}y^+CM4N(Lk#Tp3)yqV56Ltt;Q(|8t7<1-gyxXHsT(;_b5EFJ9@#M!hB% zb&9*yP}lKw!b8AD8hy;{yAo>XenX1=#uzrr_HdDAJ*J{D*~4rQd=e+8jB@n$S@_Ih zqe!@4vrDpx?`POMvd+=GqkWVi1TTQe0M+4Zz;5*|+_yTpqd zDu;9$QZi~B@cVIShv;`%bi*43$=Klj_s1X7l9xeB9_k&PJuHNNG>&(OltNF=MC#8w z&qCSk@Y}Y#B~kkct!2+ES*Yph$_?Ipl1NGMI?H4+3l$!e`4D6xiS(SZ^pe6@h*GqZ zv+2!19n;pw;oUSAg1sHhj{TqOUKUm!`V_;${84;9yP%`4f3M7J_6q!drk|D<6h$8jyooHieW;0v;G=Yq(wd+2m$^^tbgwWG<-5oFUgnd|v5q z`i-_p^UP29qs)powkf;$K)UteYU&w(1S@2lpL+_x61%LU9+G%IYflNc{SW|zQtb7n z;`O*)N-q%@582ntX6~4R$7jE9IT0xirzdTSu{CF+WV@NyZrez}s`ATyb~;Sdek3V+ zLcRp}h>f?d`s|O|y_d8I$wHm!FM^LrtFt|}}6+oak< zdqnYk41%i!j!3}!XXQGV)BRAz{Nef_O)1#fJ1zOEj~~j}quQxjJ{g7=xYu`v`=JxN z-Y$}9l!nO2{J{_fzrTB_Xvp-5Ilp{SQ{)u!IB{vHToc5x5cflc)h@zUe57Eyrv5bB zm%b?Tz>d13CnaI3G5@l*E57JO?DWdQvPoc6ym&!GmoF;ZxV2N^i5R@QKhN&MkT0TW zS0tVl9S5s!Tl@WZ>Wd;*G94X<`CukY-g5GuFQN~97*xs|qNh1mxP4vai_qQ4$})hd_Q6wgP(ggE~sz*MOUofohRys_Y0@~ z=9HM<^u9d0$oye%M7h+Vt|#-0zD{rd>Gxmpeim$`C}v23XPlod(c^s(weX=X?Ft_( z+kUWl2i6Cmo?2ofBM3GP^Ui<7ObuJsmpMiVKup22i%U0pqs)NJ2M1|_u=cmJroM|e zD)f4__vkWy2*dlc+XOt`l2gJ)trXa;a3kl)D=$QuD}*ux1YrNnR|A6AvHr4#>DYFR zAIACpbjhj0`pd>B|Df3vI5{ixgnJr(p8PDjYM25Q;-9;%J-krnqdPqZ#|uLHhdW;H zRq*rPJ3aLu$AgLL{?n!pJQ1ZhDf;Jr37{?B3i&0TsN(pMridm9c*^+c@+sRBq4j;o z1X@x2(x2wLqxL;v zZ@TXf)SdP#-H>YGj*@R~f2=r_4_S)yFH7-qM=KtGKX`ks5E9&dKksxi5N8e%Lr8iK9=K3MWgW+NT-YR^N6)wBu=~rxZw|n5qjls{5Q! z{H5;Aa?fN?%?u%hlk=R=+pyO5uCG)7>aN*_vtU7l651KC6=vo*p*=_`Kmusu{7Q+7*Vp?O#4XSche ze$|_Kucuog>-9|e%-^o);w#nlfdiJvCne$amUV7O^X)}c^W73H>_4;o1KSi_SoDr>2}*-EM=TFHV{##%7?b1<8kx?zcsDpPeHErZ zE(1AF^ILVq*d9ehyc(!ZVW6o_ad)|E1y{|`G2 zTP!LR3g1Z0&arx)WA&5Abk{N6i)ZK9|1on&y@}MTNWF{H%SgSA)ayvSkJJlEy^+)_ zNxhTQOG&+z)N4t-m(+_%y_wXjNxhrY%SpYR)ayyTpR@}|yMeSTNV|iyOGvwgv};Ja zhibt8JAH~TGFm-zmVcWWs+8xRIk(vt`5m8OGCkA`p=vCrgfM)1#?vnbnW0lo3xkim z_Cw1Se9N&vXoj*MJ`|jw=8qKby;WSj${eZw+#c?x>5rDDD1Wq?Xz@=Sd$P{`ObzZC z=5K#A6`Jr;^^UwwXtitayl4(}+G zLg61dH*UE4tHWm+_ivrwv2Q7iO+unx$Le@|@AHT6NqwKR2S|H?v?oY=gS1CTdxf-T zNPCC0he&&gw5LdWi?qi`dyTZ`NPCa82T6O8v?obCka!~TNaC5K1CmZiIwI+eq(hQU z$r{gO4QLzF1@eNeEObZtNy(IJCg_V$#^i0!S*UIBY}UkZ6I5#g9e6~+0mCYkZjuW{VXgW+lXGEOeT${i}nG^21U)U;+VlC{tMU=THf3WCSmAwS2 z_!x51SCNZKtQJi9d`c2oGu00~=HsHm;i6-)GbE8yOmoTbTMpi@rTpZZCDHoY%c<&( z97Nst^@ir7$>`Cd;;YvdT&_DaT-0;#8vCy-__V&k$Yl?1pZsF8WT?HadHVtO!?kRLJfo?* zCiK>cGQ<5W64@xzqpEd4udgn~KiqGHD;u@nGm*HE#*a#dicUP1W+Q6)L0^x>0_gMP z-eWsHvrux$yt6f-d?;Dp&S3Eq7NYfYOebCBM-A6gh9;fH@}uX>PjPX6RNmQNJaHA4 zH$(DQl^cZ+XP)9)ZaLN~4kon6eV&Nrkcyyn0SoPKpLcdpRs0|S1Qubc6pFyeotyJv zdFiw2IDemw+4gCxXPLcuF7_xzXIuZApH`g--7gOkbV!+FGBiF zNWTi{cOm^Uq~C^w8wpnu?j&AFypeb%@lMhONjD^2k#tAWB}um=UH3_Ith~SCcJGXk z^;K2$m6hu}s-!rGVxL~Uy6_48ad&_GD-GO!@%sFMlqUN0_aUE6Lpf;p-pTAw?8o%D z_LZ9&67h4b5ZAM_-_r-hGC1cJ;+Sn@^gB1Lw$z^CASdg#EElx*$)Z{>HI%MksO@+OVQqnK|wHBKAU`Qgn5!-4SKm~X$x&*6?20;=++ zn0@KEz4?^zk!?cI{;~h<(MbG#_x6^YdWaBm<7F|V0N`vj>% z<%w>IjTV>}>+=jmJyfCDR*Jb+h=Y1Bh1bn5R|6#Ae8k`b#<#<~*`Hsk!RFa+tJc3~ zBbvb#O|OmW&^xolr>+gR`_K7x+iOCh>h-OnHEeWarE|+_JuT=c?`EFa%0}8}7Kl2U zYr*2aM(_O_*{C)2jYt1JEvT>5TDD;texCF_*e+EI!0Fw5BONv>JR0-k%0g}UHehp6ued>F&pW?;GHtv+c#J!=jiqNRvR5~ zbjdnu{Sezjjado{Hfe(()-$-jvECpn)Og5B8x-nx<>!cFJw{5XvDiur9z9vPDNvh@ z67CItdt9Umd#Wy*_ww|n8z?*XjRu@M$Uj!CB2=BRS565BGf4 zU@)#^;}TOgDmm_RJX>E4ZhUFn;HJk$ps>DL6I4OW>D?(UMZEq4Qcjt7sZgxRH!L@W zjneX3?}zYF;dZ^%q*Kall&o{I_{3UeSkh6uWFCH>@@;Ba-$^By>lc3Kq!{Mc>#Cgv zVM?%*HMn~IFBTd`s+}{xD}bi#rXsB}EML0X9bShhLh*ynM+0kEh-wnilhmRB3@m>> zV!b3Yc}a^?oE%Kk+_zv79mDIz)Uqr0eI_bw zqn4O#6a`g_6XPCzWuo>EhjUNmjDx!``?dEz!~22$TJgw6L2%&9;OxToY@x7yjvpUC zkoyJRLe#LGUCrBf_ME7UzI%a-Q0DW(haHN4c_rt2&KC_E^^twY{Lj8a@(IaDB%hId zNb)Jk$Dzq?t1fNgpr+K@TQ-U*gU^@T>5H)3Y4Vw-c6cfk63<^wE#83ji>J#wW*wpe zrKPO2jVG5X)!8eqQQ=C#fqP9@jv>lHf#Y*kp-=RM;;HLc4n>$;8~&vVk5b%RPQSwA z9g%X1e69*(`dD!A_APGum=5y=pJj=1QSfE+=#ryUVBSl~SSrUwtp?VVoB33jC-!2` zWvo9HYVW>jZ-nP_;c}`yg^MI7#pxcUDZ>z-$dA*KTvYMiMfkxnB`n9zK;Kj@8h&Kj zFDt7A*VcY*sImSJ)>6qO8}GVsQ5}DP+$J{_aDBbfSHp*kVA1U@sd>tf`k}Ul9)9hl_G*2EX03qQa0tO!mxdE|QpdO1d$G3eR+}O9~Zm5w$aLO~no> z>@P{P%s93pDD`S)}+E1?&U;f+bri(K{P5aJ*9?=nwm=- zT3V@as3fg_Y84k{4+%DwVm@7;6)2s57LU(&P=LKx6^d|w!pCv{UaOY%KURg=n{rkJ zp2zLR<^pjoHBf$-l5rZhv)R-tYM&YmvDG{J4r2Jt5lMWest(($x|@D%;36F+waPM4 z4JPCMJU8LzhZcy|Vtk1=T5YMD&qc|PN*vC`6?U|D?ku#&{Y^Kyw&|=Y^sfqh5U0vTA`_k^w^^$}+sl=W*@IY)Z@pfx zR;UKq=d^xTy#9~Y18XmvhhD<;b-4Auyn;Gpr7IR+xQzD!v)wmqyVO8y&DZtfh1fp0 zZ6593q6V^>*IC zxGyW;ko^(kWwvvR4-@m*1+Cw@Z@7p?FMncOstUf3#m$y>VLPRz#KC{68Z18BontqM z`+KV>_jgk1+%?m)) z&*n7swrjwWcym)z{66*L%cE8+HDL@_g{Im)Q4N}~E6vSi*K9nV%Hm>%p%#QT{EGS( z9q<=-@nPY28f*fP>W_1N+uv$}v#V6jWc>g{!TUWM@Be}Z*SB7m4M3S;tCsB=)`U0q ztuOV(0+77MDQWsnP1v&kz&6GYF7h}d<)o3S3HR22UH|PJhVPa$Cl9x4!e(u~-F1&K zoh!+XxP15xuQ?5u7GK*D~zZx+7Sfw8sr0>*o@E_^kO$v*Z9&u_tn z4`Ql%@L6B)sp4b&efBJ~!TY*keZE`5^+5n?lI%OLGoTBqGX_^rN0{#L`{on%Ac_CZ zgwW~$1O+c9YOmCTOtHf~{m1eAXL?GW&eDUyH=Ti;iU72=ElVMNr9L#7#4tE`zJ+C+ zt{t28!GYeYHkgayf%s@W>-FLGgu}+VS-9=W^xibt0OVhk&17J@9JstMZc@2EF!LHd zokD7FTcit~uVKQ@=&6SAIATyx-wv<$n0)&A?S`NcotK_&9)PB}%C;3AG6aUg z8MF=aA^3h=5g%X()mI}wTvEh*U`@AmbuiOyPk#W5dQ92A(o+z=?+46Wz=#{1bq zPsua5{qkW-tu5yJ{NfiA3+fGF+v1l$LSOtxTVu4=)veBc{>u=`ou!x`?_hZ@TIz8A zwjuoRd3VbA9v9m^&ywd%GlcKv`@R+5;v(zECl*QXHvnfb=eW2hc>lV)K=gR8KHMpn z`RLlgMK2D@rUH!^MbMtzY>#-hB@(X4mP0V-gJie>Y+w^)*VBlZc8iPes zjEwsXKH3Q1+5F)<5>6x>NjQ^uAn`=vk;F4e2PB>RLq`Oik#tDXDM`m9os)b(@(IaD zB%hIdNb)Jk$0VP>9xt+({Q~!&ylPosz8(lrOakh^;CAle!Y5bsU|aH>7rvhYkjRN% zFdMuLH8K*&Ay)kl3;sd6UMi5iG#Z&3ED3VrDI zK9O}uEf9I^ac+6^O&{kGrc`d8o`>D2Up|715s<+0@3_bW7s1q@&gIu_aE$w%ZoII)dF|C&;G#pS@p(a zSB)_+KdkhPe~Z_zl-cq5k}>ohaa^~k1LMPJ>lgEUW9YsSIcr@DhVP|b<%1`Up`G6P zvhf^-hhk>O+*QVq%zQWBdJo3GolxW59Ngc9b!!i-#r@&;=iM|0O4OhrH8bEZ?e_|c zEIvOA_kZ%mM7J(u5No#Dk{E{P^YPv5%&W%W)8DUs%p241{D+oHn~fndwZ!Ke)(6m% zNk#8|8AJIY$MH3)xJ^GO@aBv$ygu@D`PM18KYO9Zf-*e+xf5i^3*z}Q78k$qHirD4 zw1?TBv0gn*yGP}*5j2S$KH-ApO5skEYp2%#E2l6^YT@6={^@?PJ4Y!i5T&_m_jvpK zM;l|f;_WVHU3G%9V6j>AVW3#9}{DnnN zp24y-eNf!fa9n+LAd++cxMJ&NeOMTjrDr}T5LI`f>~pRL@LS6H2yF(2N9&12!AlKb z+~WsAd9ir>_IIy~&izaG7=+qbe$sewAG>QZHr5zTkKK*o8&$4H{*d=f$a^T{Jr(jE z3wh6lyaz+xlOgZXkoRoJdpP7h9r7LzdC!Nu2SnZznj95==Ll{qZcBMMBwzw++Pxo3 zv3ywm-siaDHDkz_)2y?_E)acQPpx9$wuPen`jc81ZP9SRD z?c5^v6w6=S*60qzdd91x`@!I!dq})3cZ|PD*u}Oy5PCH3zdK(4%ikm($opL6eJ}Dp z7hjqgD+Sg*8Bpgj)spIwCPA~uFLW9Qs5R-PJKt!EcnQ_;S_77c=up{Y` zgck;(E``EB67MnQGaQRujEKlzGO5EC1@Gl_$#X`}Kf?aNos8QfPckl)^aqmuLeif|`Ws1q zBjP0Ri8hCZ(sCjFNSC&eX!&ZfkVl4XT_w;^(W#DP z4-Nyb^qP@faFTU0SquZ=q;i1~#G-E?8^)VuFU5BO_^ml|mc4p&YIq zM$GN!`?LBme9M@0Lr5h8s166~^sE+6Q4TxAwsPzU81*{vS0g-#0j+ja#JF(v&*c+C zGUt9psM=ED7^51YSdGf?k)P9~?3I4_T{f?LY+U_LKK6J|Q*0>tQKkBpg7uK7X6^kW z@~YtmtM1w?DR)}vy~h)MlZd6*lw`F`^Jc}wWuU%Vb^7Mmo7WeW0a+>Bhx4tKfCEc_hc=f-qirrfb% zdZ{Ci0t_Pq`BQ+liML4fU3m?3I`t#|l0S?-F05*WlQ zHUgG)WB1hZl!@Q#5In@0g{pv05}WsPNk^xa;^5O>x5|Uhvp0e^wIOU2TAeEcLzBb! zzZ?c~azM54D#x2C0`mHdCh)sl$r8;Pj&U_#oN@UrmxIv3$Djavin*fwiJ($55dMhA7GOKEIky=q4z4M_YYuE*&stsGw=B`L*_FCvp z7QE>|C0h_yJMFMCeD4|_H|YT*S@9pl>oqW&WE0|URW)i0>*)Z|;&SgIp!X97tj$sii*-pHA`NzvS;A*EKD8PKP#q=`lxQ=#g^SaI|fL7)Hm_s$nKvL!$Na3 zH}T?%UQ4+x)1BZ+aVLA2TY{8iFWrgm1|mXVKaS2(Bf^gjQ}2&yg}62avlN*TvJ;RL zf7O5Lqp-8}`@UpWw8B;U@QM1l>9Fpt;J7pBN-Rw9ti6ZF zuK}g)h|Fo^l1>SemV7e2tn!* z^HoO7OXWPwnk*(CCmxnJ4xLp$%|^!RQ7q9NEAso;52J%$RsHSc?;p?`r-o9JM#{%M zo>O{qEOq8q`m6Zxls!i1%VT}a#5@0FsMrK(I^Y>JWN$LlBsnh%79R07RmeOO9zbDq zAld2Olu!uILUHsU+3D@Ld%`X8lKMb=FgAn%!2_Lza@YOvweIa8){;d8Jd?)`Tj5Yf zwdQyGQ~Exbv(ySvE{b7izj+hPay9yU1H;j0>fA|nzhGie0|(w=nNw40!~|ffRMN+@ z!a8(o+vFhlp!FpVlkLP%xm&s9^)o?gZ@9nLZN&~i*WUNarC?S)HYD*6YqDAHPP9Ie z*x4-?hinV<)3Wz_TPDpz4lm|%zwlbO7W~vEO0tN838)1~=lRx&c;?0gFuzFcQjh}i z5lE>#-o9p&L*N%6KNf&}thsOM{s6@_igvLUJ1&yFIY? zz@{uIIW2&(NC|dQ_kdKzM*+nY3ME6LEApo~A*4xxcH!0d+<&~E6Q15pA0Qpu(2l|h+=k?j1 zOXyICLMsj}j=^sz z1i41N1}{E)erkZzYa0Mt*wGt&+T!wz_O=P>4WL&F#^8Qf`Vq<65L^cTohT9^3VlfMq_A7_6wg!=0P(siS}}Rbk02(H;)0w zZzhvDw0ZYqy^|>-Xsz7q*}t6@owrmsiI=1et(9g|7@`co`X{Xnj=jsh=Ax+sc;wDz z>3u|KBI7>4d6t(om)j4V4tF6+(0H408*=iCM>}L4d9gh7CTitq zrO>N+;kNb9z9I3f>y%Xnfd1?2&5rC)L;DW(SSA3y^r%2)C3@gSAV>w{-Ud zJ*k}A{;ku8sbQ2~Rb`1UAK41(DwU;q6?nurgVll=)F^){@={hK;$5Yy$OPuj`z23`b0>FM9FVWuc-Q*v z)1Xh;>$e4tkkeqaPzwuR1iG%<9(4;KpI4ZDHa-T1^62XXuQ>Z`LW~6EUA)X-#668> zb$?ZuV)YGs;`W^m>SZ*7oMF{JCb=lW2cB<0*G0bKOf)A&)87TRm=kNefK|K<{NTNo0J_w!WlRH}=IF_;%HTnw7h4FpNBLjJs?;=(W-U1HR*i#Rc^j7wp{@TF8%62#=<-zvm?9 zG>F3Me6laZH_i!J)GzF`jaG4H{}nX+mVsj1JhuCe9c<3ksL&&FY0GDTSJRaW25mF| zVg}R6+KEas=t^v1ck{t}*p@B}Sl55ms}MCD>l(8=9%0~99j^RApN&+wOd=q?ed4?} z;4iu*-e2?D?#$c%G-0ZKOj1^eNFh04p2qR=WlVbQbbPtcH%%LIo zREA5IdgZ4Bc0jv5$62GEQhi-?EM|UbdcLO-9IOr-&#bfwA2qf5CU^xoDOuJ&oYLiU z(7D<|<4}-?v7FmrcUf?W@P<)?(STq;Ou~f_jeTl{%t+VwYQ`3|>Phd05B!JEwi@EL zKO>CV$*Ma4hr$+`&8F8|h6qrwPSCNJipWUHXy?@k6K1Fo8S7*Ev{KbQ@`inXWFM_ zxI7~sfual_Gc>hF_*MJ!!Vq%|Sl#>Jm0^NVCV(He_c|@voa46qmYwE!al3RvKciRD zw0`${Lh%=^e*(K}b3(MBzkPQBg&i!V(7l?y+**IJO_+bCM`HBMUQUy|7#Au}5>!Z9 zJPR>8D{%)+#cO!&#CwyezYco)*W$zOV}7@3L6j7YG4sHy2@PJ zdNxv~gOTDes^!HriEWCPwkn`2uQUjhbQUVNE9?%ad_VTbNICkRM8GzHdcngyp>)Y5 zX7ixB)Pi^=8rOL?;>&^&^88C?2;bNB4(BqLsR}4Cs1*`%`ze#S*`$7vXI0 zWrTR}bS{UAGbrp2frY?poja6{)!!3S7L(=xnv=lLt|P;BVERK{5B}MIY_T@M0V@O) zb0^L_y>PD5>D@i$4V$>I`G!q*4`TfPq56ID>qdp~S9N_+%bPE}o>Dti$3{7s{$7PO z4?}B>{2?q97G}TpH>-#6==renbrybh{_{|W;r;1eBA9wySY#3`4F9{lw)wAT2OWKR zZy(7%{QmYoe4Kj<`E?k7!fgghE8t+^>p!qnS_!gP&)8ONe4zu97P&Mn>s7jTvqB9LjAJKd*nYC-eR zmi)Ri@eVZg|0geQq~tK`sOn(5@)p|LILIT45+oElxM!r0Ry*^Y$S@9PcY7b2L&Fy@ zNN@g|(a1E3pq-hVYV==iTlkU3=z75%U8FsXJRGTKl4Pu>aj1gM2mNHbH595PufH|I z?ytP)$pC#bD8A^q>T)`rq8zZi8IH}fnm>vD=aR zw&v=5J}LO+W`(yaHBK?C4opB-XlH=a0&}8!ZJ92btGhpADX4eqk)6m)^FN3V2)xwo8DBC?L2%&gg6VKnGoCj&vFlA;$xk@Ba&z28M!6b5O5g@E(uq0zF0pobLDF3LOWG<%<H#pW+P5qg& zz`w9zq>T;UR^1HalRA zJmlQ2^h*gvw*vVTm7Kq4TURU=VwKl!?{EmpmwvybiJm$0Q2cNMR}pt7!;us2RCYC^ zKxSKHI^CV}E1~)Q{*v_Em5Q0?G4#D72{q?#PSa4x2AhAT8Vi?p{wLE}&b(vymCxpf z2PN!z!4-yu0{0f4c#{*>2ZuhqI%Ci603cl$(^=&qKxpA7UzqDwWsNr}ptp@hNsJ&; zj%)GB?Cc|SB~Ut+#HEy3ZSJ)}R$q(Y1UT;416G0O=UfCvHZ&^_R0{?xYP#)TD}+*T zBksVYT6 z5Quib8aXdcm=%Pn3Sy?*`VWnB3XcMzE1uz5;9T5l)QM~(c$Ah#m(od83+K&!Oy6#? zr0|%lXuZUr6cUD`RK5B2iAdy zj$(O}@uay?hh#}N_lil5g&%g7>BiNM^c+jIeq{aQjcvPaF4qj(vIzR021Gb7B`sv? zDVHYgvpx*cW2ktTIxHZAL%EaSD!E96tFe$e*{lqHQGtK@B2kcMXiA1&5KBp&%CFaJ z{&m`{NWhw*Wj^+nA&U>>xJB<>(@{tL#{{o-Pd_HGl}5qjJNH|o>gh}?E!UrC%{r3W zSXC*6-SQHFn1sARD}_!u3flGGYX#7S;4} z|A*i9<%>#YR}TURVM?Xq55PxrR?`jKoTJyOhj^Qa7-UAAQTLpm6!X8zGbpeAUa$>6Fk$8S>>8Ln5DGjGCpzJ0M3m38qI41o@@sr?k_$EPTn9IzgmXGiK}t&pS!Nx zcRDkF#6K?CUl>zS$d*2W0ZX>Rrgk5U+76k}q#GNr@y!1fn`2Sz9QyiIfZbz=A4I>Z zGxpvF(HEm-8iH_h+0i9nHax=kC)xb2TOK!96}~Lgu3!6s@Y+ab^=iw=BjM;Dxu44~ zpm#+>EYxL+QtD+S(ac;6g?hcykA7LH@&1EZ%$iyX$<3lo3}Ec{rd2XxngiJdPu`YL z@mzn0xXh4m?3pE-Pz#e4HP2)ges1s+Q%TC2t@NRl(7F}dU)0R6fwNe(f; zQO>b1JpKnQrWc|V(eG!9f|9!r}XX5&QtL)Nsj{4$>?S6@!nex%CL`*vFzpd%A%YX zfyf9r))K9CI^f2xD$Bc8WKDPpB1?K5(}RnWFvQoCW9#P&(bGUSkaz>WG4pP(WS1QL zJ$!73=E}0Nquu4l>jHm*e(U%jcL^dc2Yf#n5SgefRlfm9Ci6CdO=v_tb zdHeb{pj!w%yQS8vxk)(6BeM!GyG!TAUPq#HSc*hR4*3Z#9YC7zB|eApn3m2$hEHzN zN+mAY`IF=$cwSNZBXrq8`XhWfUiu?)xla1yvK4ek0pMO#(pqIsUOC|`pwny-=vtU3 zo=R82d6EkxKj5_gzDiR?hjgm1&Kk;)Kd0Dp^+Td<1%BxAWSZ2+a%;pSkyEXGiC&C# z51CuIljPS^0hv!Wf{UUYTX@E|7A)aQOWOM%fXIlNQ_)2nYCeWM*~wzn$#VQL5{u5W z#AYy0n|&(Gx7Fy02+Z)oZRE8ft74~w`+zgfho8EDT3fP$ZG|XoVwQN&$F<8?%jX-I z$qX70_OwOvCVOd`|dWb_76)tvgrJwpElU_4Fxw_*D-0LB{JU$9PJbHG-L1W;~sWdxR;gi z%IZ(%>R(!3In`>{w)#hI#j96xbMIXLB03r{ZY6O#@;ATD|GiA}ubaJ-nI(+%AUIcu zgmMl{Ot@Ky;#gVpFaZyp;JK#PZ}9K+&DtYEX{f!=rY`z3OZQb4IY;Zk{Gs6v>_a}Y zcN`UTm!2-Qhi&t3NHFf&+p@IM+HEkbefQNxmhH`ZNEWVwmgX$$Xtg9C*PmATuiRQx z-$}c4?BuS$o%@C9GZkO-ZnFO?iBW;=;#S5*EnG?hj3OV3c!J+QL^L#zUdebd?H2_5 z%drRKG{_a}1^yE=iYtwW;*JbcG7(@$a4>Fy-cKz$xAD z3U*qiGyfp`rEZHd^nuC(=DI$zyj0U3SN3q8lecj>|>Ow^Xcg+CUkw*ucWB;SD! z)f9AZ-U%IEX_+;V!Wp<)GO0I%;MjvE(}fAoy?p15Ndr9s%gDT>395EKi;cBLG5H8J z&c&y`GI)mF(Ny1MN^UTaXjErP8{t+N|EYOOJ$Dg>Gy?W{}EWd z@mtPKKvi`=vL)D^zmZE_jPkMiZHEwv_khUQfamW=I;g@WgoEDoN$k++@aDWoBVl4M zLFK{}R1 z^-}&&B{}w`!4jHV`@R0_^g+i6>feE4jJH3*Q^LLY=B22%i+n_O zZJsxf9gfRvhLMSPOB;cc@5r0e424LLoVaZxdxP{4~L`y-WIgfDbO!h0E?U;&5!A` z26{PSzY1eDs-IJ{qj$pq#!?k~BX3b7WPGJEDimW`?Bwy{=s^CSbiiIUQl|tCox|Q? zp@SI>EB|9Kzg8vm_<6<#S!5ha^5=UDwIr6(c6l;`84ZlvuN#J?F6Cb21E({e<=nZL?@nOSGoRE2xk;^BTvpn>Dtb|6WM9)}o$xy;r5 zC&&GJF2n1=q*!P1N7QzVZ!&WSEDz7<1s^*@p1#+t*zlrP4wxemkL}s(W&10=qc0Wv zbKe)hD@*UZO-xB5E3&B0-D&CS415G(IiBbC+7;-@vmm2PzOGrC{o{rjsa8c(#cK zCIv@jeEz6L%Rrly*yy?$7cCSzC9vI7%{cORQZFgYotwWg$%j@VYOP!lmVuTZ6M0`D zu!Vr?o)qj80}Zx!Nki8lnK~+tnuNcVi#=!cDGc-~`f1I$OD4gqq>|CT zl79ZsRc#WT)$qMgGNqun@uK`##P`QN-b6TM{^!ux*L(;C{C|96liyWl-zmbS`6MUV z#AiP!;1T#_CY4oYlbkbBmP$Tf(I6_c?k}1H$uL814kVIHOf}JJ1dD1y0w;Cyu=LT< zE~D|MW|@NS%%bZeC4N6P6(Q@IAs>>P79}6g@qVC9!@Z8a^iPjkc$2-#Jx@i=$~|Sx z%9xWaF%&@VZ4zP#vDYwT@&-OR&kmyjlx3I7SF z39k1+0@)+#~sPP)+CRh&jv{npWQ4O zB^iLa7F)xmTBpPPa>twUKGnu#>B!Ej&hp*FEJi}tUtS?{ml4LK-e{Mok0JLDh^cN+ zSJ`ebLWbo(8-tp!tSlSkz8$3KnIFc7@1~4%BiTi>R|Mfz03znAk@t{_+UMli|B8

iAd^WThgc2qT#PM7swq5(N-{FNf6J>A4yN*VaRdnV_vI35O2Uhcyo!|jh6G{ zz_dT@N6*1t@a11N2>yHfZH*av(FgmozKa4;SafQ-LOOmC`?` zd;d%Fd(D0AKzQCnV5b5EotPyE@n3aNxw^cz=y&*>4rA2x!uEH^N*CoCu;M9xp% zN);uyw3rEZr@wgmRra^L0$KliQ@N(Y01(vvU-}x|P;51*&Q_tYs2Rj($a$tld-eAG zvmr1l!whJEL|h`iDW~b5{FUDNf@_(}O9~dN4;zcF7I6|(q_JXCIf#Q)wMtLeR|0q#%bl7#*DYUQ(s$KQ2bz< zepYDFd=pq*$D@vC*@oQ0diBcW1tbBAk6-D$dS&zalKh<(9TeVHWcmco(`i1QOAPZL z!O4Ix#~sjKwW4_f2>|rtju8<6{%{Ap7qfU?b^?GNlGF9glji(S2|-e=bmu>eu0%Jf zR;JUT2orCmsFkI#^2aY57#7Wl9+~0WEA6bTi;}M~W{6T6JIWFj z&lzXbuz!JbtYF_FJ(Q;?f@SV}W$4odN$eJem4$u{;jDxhdd?o+tOVwwpiP?b))B$d zFRIKRyScFVv?M2Z$h)O~o~|Q6vF};Z#8)=W0<1_Q`+)<6E^1=TG`|~* z`=}>hAD;x96tTRqBX4*wDk9lag=^;x%i(xJEVqZoIF-8+luXnpSF=4FCYBr5#IYKq zqdCfc_oQr~yS)^0SDE+TeM?^oY{PI#^?x}lc!siGnQgl0&mlP#c#TArP4G>7FO?&r zUvGW0k#kl4gmru7hj**@*9fqdPhy_bo;lZX6-S|jT9QcQnJyl>sSy91V9IPO zzod}%VOxNtPHR3>?p7$@6eUPK?_= zJQQf+3hd>cZnFp@OH$I)6O5}w2nFT8`}sQy(8 zAuFQPd)cRrJ6|ia1Kmx*orRGtbtxzj9Q#v2mYq07Yu?IB6y=t}Jjq7=qksCPbRUci zV-;=|xO6yP-Xr^&7Pqu}zWM*tt7%E*>lMXUbVf%@3bF-+Y+Rtttw>FuGKMluxqe2b z_mhVSm+i%kh{w#>{Ugh?q97{{6YU7>wSQu|NRQaZy%HWM*Tia*LtqeS(Bmj`WCVXS zZXnS(#VV^wqz#6GbmpnZ_2wxhBtlgX1RjQ)2m0s0uc*{0T$z4)3r+A_*I#1aRNwC& z?lVXfcWor>A~q3YdpJ`dM<)wdV*HTd)ZYnr{yl=c(VT1f^!97w6Il$DJ;{`in^Y6Q z(cq>*65ix@`5T|(Ug?|c<6haDuH#<0o2}zsg&XjMAMpWRE`7jS(qFIG%*nbiN@9Bh zjHwZCDZpj88EEBuOS;ItLI@-76#qzT-3VBMpFy8n3U&1NfOiI_pS}e2>8do%>#$WJ zp-lKFp3FLNY1y9YF!VM@XN+3CZh2yf67N4&9#gtJI{F ziht65>M+{ROpMQ+{R1z#)|eVQ+gg*x=x~b-5l`xqZ*=9-d_O1oR8W(glTwOHa;Q#N z>B?pJ6ehjU(J1qz1(Q6f&-X`fUq@aeH)*{l9tM#n)@S*Fjzce%s6~F6I&c`NqEb|M zi(Lx(kFO~xH4{Hzvwo=Qv7dYY=jy(9b)p1Gyu=-zH358>G!U6|zO84DY5kpq5&`nZ9Y7Jl;BW`qInT@3C*`&3pg+~M?&9aim@xGf=VBbiwcsM^`} z&G9C0-?+5FxJd6Zo1wX53`Q~AAnLIx8lo%P&kxeLfN|F=Je_~DO{o;VAhOD)AoaQ! zaD+mhL_>CHYp@n7)KhoAxW~WI7N-@`2}`I+(g2O%-^bnM#LimZ#XveZ1lPKrXW07Q z&UriqZ2XXE33?8SIy_NNCeu}8P>g;u4-_BTRlC&@5lHLYPj?AWp{iexE z{Tq+9OOjN?H=!b$>oa4k5l!_t+k;=QM-DrnqCuxn?j(?udXvEf>+3UQX>rH|z0g`I zq?!~MM8#CwLWSKwt$Z!GMh89JBLHH4w?v&--<^I=3R%rVGc@u%=$1Kz4yH%pZ#xW`63G;=15ev*#FvQHp2)zP!LTT${@(v7P`VJfovG;h<T{E9x%7~ z%j%KJ=B#A)mzM7j%BWbbrUKP9dzs=((?g+un5#(1k`|v2Yw4tM-*@5m)C)cC?wAqy zIKFAO(FN9&hF#Zl5eQLdO1Ka7wqqSFlX{-qG22AJ9@}nXI=)roBO_b=e9LJHyn15{ zh2mng@XN)0b59ec+rnuBH&LIxzu%6WcAOX*)N3$3go(*1rXu|SPOpBw7^5L_;MN{E ztZcMP4a0X7(yQ(AY{HVB7SZ#hkR!5WfhD9et|{>FnU3eMSs_(rE43T>!hk#9UX;04 zia6d2DFYBO1a98)Uqf{V`07$h?OCJnPkE;X<0z4>C!431n8IL{k(6}#s*ytElF!_* z2rR7;&8AdR3fa!*kslc7j+Lm(Q*3OpUMznYO^= zY}JA{&z*Rfw2T0(Fiw;iv!G*(>BCx22>tN{tYD?#J15iG%mm)ea?($eW}%;5f*rr_ zrI-_3^K0;ve0WAN$&io;M+!z&S%m(~hKan}GiX+WhwQ3*zlGaBMtmi<6ul=B z%%Uc^rfIs|-|XVvM9W#LmhXYtP$S@%@TOnDg(i^|pe@75_27waEDK@vuhl|rCV`MG z46`4qmWr45l+Eq3W(UtIx}mVK4wv`w>T5OrapzU9GP?jF!%K38K3pxTc~W$7K@y=? z?R4Kihk~!sY{xnO7WwN{{%bK0TX?=T>WpdQeO{43Hruqk94}CzG=O#GH+vCLgOJgQ8B`Da zlERW?={K%up>g&l_}mh77L+K=s0b^}HiH`Y<{7@Btc~Qv5QXEMr9Rif&8OjuG5X~1 zs!xq8Fz~i@IbF7$s0VkbM21pAB``hK-mq)QjsYtoAvnVGpV8TDdJE=?Bmrjwl*5Gn zjrRsvyK8xAHtSRhqUn#SX@(a|e{yiPNYE-{Bob1Hag|C?)XqP!FsZ~sWV;ux4f&N` zY*$qzxS%mJ9ej*~)0ltQ#*`^=5!OauX+5it;9q{1=s`$6{jZ?xlO;Av;!Wv$E$7f- z6jV_JNdg-aD^%(Y3yTk2*kwiaA`FQzcVJvP%{3~VZ}qQUjkahY>8{Bze_d71S;a5^ z9c7=7uR!1IF45+vZ@J{@?)JOX@9!@6*&jNF=bZ!%> z4C@OI{9}H_s;H&(rw>L#`$d;y-++d$_kyv44is}o;}85QH%ZQ!DO)9aH0LBuAq1xs z2*f0k#JL~Ny1f^M+c5*?SB&vvQuF~Y5rS(Jb06a_V)xq#|M!;v-wY!%{M6?;$1qiW zj-wpvC})X%WNBV9xP^=|3Zwd5zgr`6c1vc$M;()2D+@p?PjhaW{&w`frR^UBg*p&b zO(K5V)&emF6rkau{1>OWOYig+*9&0(wV25kn0n?hYC3Ipo{Vv=9CKoB?YeY{(K>=m?an-;A>r7gTvUYtCuUe zpm`kzpa+rzk8zarKn@V%=AIs%bLpF(_`FLXHgQ56RY8jqGSq(WS?cnep@aU2hT>6D z`j|JSf7{~?H4H-}ro$mLdJf{I$vje9CIw=0NQ9%#oo9v0aH^-cuU&~Pn)90yVz{i# zsYv$NQXMckLtVF)eHoZj`+j{z@T-Y7P+SDZY9iFS!kw^hvbP6nL*2Ocd}$dHNA#*7 zt>#`tJsH7~gv0=?wPS<{VpYUIaY_wAvxO`8^me~0E#;Pd8MJC(hLd)CyjS%v3{E1R zQ2{q12i_I9%FJu%r=-wr(PALqK8AuP@@3v4x*}=`x(NCK{htzE`A^E=!sSPnu(;o| zclTSV7B6#tCilYZUWS?7b8@>L)If=3Q0WAC`Pk3qewW6JB(9r9YPXyMxRnUHqx{MO zeD4`Se`(9{m-3R3FljEx@ZIu_=0s8F^kvJ~Hh~twXa@4Ll&M+owy`-ATR2bl$eEHd1G68ZB{#loih)DJ3nQz zLTD*di0^WszuUVW`%v~4tjQN_FPuk zDk4tB-VN-gIuv^wwQl)_$E2FcY$j%TKon-IS3P2q%$>r~+q7L4@Ks0Z=+sGL&4q0a z-pnT*n4{%ctPAq1LFuC|;XJX@Ms(0Ehmo!P1$XqiR07)gTX&5$Hk#F_DGH1y^P6^`kOTIGgwda)=|j)To63X-f+fI zPv0{7?GY$u3wFC+HJa#HcgJB{rj>A=k$(a#k%`~>=~=xxFJ~~Py4?F&G{IOtuJ)fD zx+ILr3OSo8452eC0Vtaqm1y7L!UZ0iDeF@TJE>opEQR|_F)0W~*eH04S}lWxrCTkk z3H8as)Zy!&xX_VFZTc;Jl+yo+f7!!871amEj7LWx&|J+KP_b`2REti)36QNt^S$dM zZE^86svEbu&q?%_1$F-@W0r>2&FCAcf9Oo;HqBwiELa#g;{v7S)6|;N_f>RXNbRa~Zh_=Dv6sh*imkv8FZfpyn#bEp8C+|ap$8O2_cVp&B z60aI*+FzzHL2Pe-4F;Rr?C+Bg|20A+vV^tA` z&oJoqCJ`^*c-qfk0Z+ZbBW&We+xGA=NkRKVCoBgRQ*n+Eh<3oP?niV`bv1kpLM-Lg zGmR|LNIqO7;DM(zUHY@3YsRqa=z2BVWqN5XGbpd1;=G>1t?>rKREC!*~|)x{F5YL?m9td$`L)O{IoayJ*`jH z7W}4bkl~~A+^HZ0gMTHz5Fp%Wh?uO8!OLGQ4}*L#G)IYdW%=^hp`nVecH^89l11UDk7U*a7lPY0$m>I!*WS}nW_>);kP*es3d=`j1b-kk$CI1`CV zxDu|moCyHjVrA{GT_*7qnD)!&mpeo3(^ViB+d za>%4r3I>*zugp5|N2xT3LgW*J{8Ed6cf?$O`LSPTl-UKXJv#;I^oba10;<)CMZ;cs zQ}fexR~r1;s*8wu*tcEh7umLpgFf0v={br(~Xw)HC;=Qp+46oIU zV&~FLtcnf2*-G?iN=7!Y`D$xuusV9kI7h##PXiO~(v;p8ftr@zT&v0ui9A*gNNK%U z9@wqth{QxnFlv6CZL8Dg&ln}k9JuI#GKw4|Q4w-w6I1wTcI60kPV6*Yx$Mj^bu-g# zqzIg)TTL=sq?~5ELvK3J2#m=EXsEWx&??BkPmtB0RlnNOt}(-l447l9ytG#@a2;T@(AjGg7kyn2S8{8Ae;dZ$pDCE0K_r? z;u!#m41lBtK#BtoM1my$-(iUF^3)qW;wE0ZYmc8pdg%_DV}JyBSwQdo2eF!YEw9t8 zW>MC?>?V6@IEJmrIA?m>Wk5xxC~N9mhg@HVf~LZ~m@Yblf3kK79lB;E5syAK)lB8v zH}kGWUSU6(;2;ON=nkJN9j_sQ`1}3LFD*$CT->a0G2d+en<4o&%P2X9g_rwy@>8k( zep;hqZRE^Skb|apq~rYv)ZX4dhbF)AF<>>#ssbWpq@ij z4=nL8_C+tkHMatu($&Ig-=#hE=1Z~~NEW$+xVd-*zPW6S7=HbQz^7+&b1@Ek(=Q;EhponiBNC z5PHoA);M3X;0-YjY^Z(XF?`W5ed95G(Xf2uv3$|6edDox(Qtg@(?dN|Yy3O63zH%? z{$m?N5cAwnbh{UB!|AcN>yeWNiqrqGQ}m6 z!g~+h4HkAIYOM)AAob=tlb&&VyktZQa=Y@(DOGC`J2$+?j#-$oZ5g2UUUR~FQk2`w zxBkQ?&{;_r-K>NvC2+sq$!3YgyT--bxGIJMR=R}SwaOS3`48{z0cRZ}9r!G^DI;@# z3wO9riW2ju{L6T%P_q6(rih6s1al$rjqpaAAQwm7J@`4#2fo{l_^|z5cRWeO5;zWe zxOpT@ufV0wGQ@Oo6Gc5{?`3;>MKBpA*15m5H^`BB?u8Pda_MYf_#&G{C89}J9c*6YmBHUrT;=@iUcvU6O3kzBDd4K-URz3G;pWcWR zv~`l&z7|ih01~keN0#!3k+ms0VHgW|h9I*MNZ1~KQwNCO&+xLBtp4hHM8y?zyr4Su z2sItwqrzqJT>9_cFmHBQa?DE?Ni94&reIrisn?xg@|^sf$Gww43>fdad5O=C8(p?H zzi#$bumgS;c_nG!?(jwHd;a%nKc_S8zUw|0&CkNVn1)1TqAG>q@)@{qak4RcHsIk2 zi2Tp7|Ay@udp>l-J~)W_PC{GS*z?WPCGX8XxqjFULlfFj)dFQWzUUOzjVMSAJEf!_ zHxY9DN#RMsmV(3E&5L7p5$9V@OJVulfZ=2>`Ez>wXCR-XjI$U%p1ZXT(k3Ak?#1K| zQ=ViPgDs6>3gM4!I(l9!R3~###z0e$v_kUU+`{%B-njRg?0Cg5oa5a-(>%|HJ1px( z0pNY@14pPL8PBCs3(FqDuvs(Wf1m*=hBk|1TBr}pHp9=G9TNr{M8WiA;S6)d&wPWD z7w{n?}a1qC|Fw+LSkCqhWBh}ArH4U1V6hCmNe)FnW-^& zU#f`qelYM) z%keYBDrV(d{yTW)coi{35-H3}Yc z^L_J#m65euCVmEakyBbiMJ>CG($EvF_b2(QZj};I(@k7A&le4V~DLr)5G1m26n0foj6Khm~#O zv!+1L+_*_MKnvqNxvo>7#T3|C&umq0)yBHS0!PVdrXU&?aeCz^9Zd4HElF0=6v{=N zZd_2(#Zo-Nn?f#|Eb4ZxjDA{8hzU%Z@T@CvMaRFK&Ro0H1bFWHsj~dg!BR4e;)tTYDk4|7VdTePqR+cDoPJYKU>3!A*) zYq9OBF`Sku){}77#J2p3QxBLj1|L1^efQ-wF&mI1Hz4`QUX`Eu-5OY-#rHR3DkkuJ zecO@b)f!k_PQOp29-426@P{4Y>KLKZjDQK50GGF^nemJo78-HgARf&(fI8p)#8D0V z{E6Mox33t0Ytdobg18GkJv*YUWI&kOjxfzKFFI`F-%@V&3_y|M7U zv+%vO@V&S2y}9tcyYRie@V&qAy}|Ik!|=Vu@V&?Iy~+L`y}axfe%p-yp?;N{PZ&fq zH~g%(MqEXA*p2c{8s=oji#jSP|MT1CWR(B%z8UewbE|Lc{=8jK9IhWLVvn2j!1oZAaHF!cgEfGrhbLVBy~SQmNhBILW~E zYWp>1x+u6Cc0cNKAVYB|fpQ5G1)a-PIiV=e44v$%NDdc;yl+!3QQ8y$<{zagJ)%(K z-LPMB2L&RAm|qAjl~|nn%@=KvNt+~Le%Xeah$3`+(*$w1rzCJPDH)&lq(IjJ(uKP# zr9sp*a#{O93R-(YmX^9S=s)<%t-gf<7<+2Fy1Eo7ZX&+gsX+m;cmm~?iWFRL^XxPJ zN`}3?PDj{ZNch0?KCXfLi9q_n$&E7tbA?< z+W~1feleQzZob?p^C z6NV8C8O(&t3QB^l$B|spj3Eq-6t69tlmORT1*+T!3;}OnWO9dvXUrD*2`%&yTj+-% z8PUlrlS%oXXUy=uhf=y$s2y|^!(Oml;Tz4{lhuz*UBd~m*Jq?u=EHPT$fqfnGBM=GVGrEoetv4I`jWXa z&;iN9tnynlXPV}uq?hN1A^9SIFR9^s6Z^Sn>1$t*EbaP=yL!IKb*16cLsT-vTS{I1 z?y&@mDJf#gI7|k@;G>yv=g&=|y9eds`;g2yHW_M*HGLGWsc!m&WIO8Jn<7I-S%%tz zS<;k0?|ANsnKhdEHr114h5YhL!!22hWpMwA`Y-1I3Jm46taGNWfkYGs*TN_u%43+( zX}T7s9fn_D@t`36xozX;k`={q0t|uVAXCS_(Ain1yzE>DNo>7Q968^pmvQUdfyFXl#sb&Dh)O>-Z zN2wrujL;EN4gd2l2tL2#>zKH;`f1G}6bM;n=)6)E@qt;skWGRA9cy9is*;|GV?4-@ zwkY!!$5Vk&ZP9MM1l2m0oHHiz6vXSZ++`Asp!M;r>GNL{=aRtH+cOZl{pdqz9Z1RfBKwKyBSE# z%S~(V(Sj90-{0I*vxHCW32)U_lK$r%frWGDniJmk8tOpS&iJm+LuPP>^v#QIp#uyp z{j{oKGkDnA;IL+?4m2#MwhWk=L$~niv!O~l!1XnE_mxZL;F)Q2sX~^9PqCGh4jo|G$~mK2X91IM*&c0*(1p002hMOU`|sRh663oz zidll9_|~Y&E4qtwkAF`#svp;Z%d_s>5uYr;zxY;q{d*nI>5uaK@YVukBirw?80$c^ z5PwMi4-3$#tDtNW(*aJtUFzp{NVk$0t-0VBqHQgs|BG*9*nnG)MessAvRuuIh6CQK5IeHhBsFGIk+E~ zL5kRg7pItY0IyG^V&?_^++lxnyN(aCOSa9WO&L1y<584nd8ZlFPm+zCgh>CxpX1kF zExxz`)u?NZ-`BdS0}I#g(D#ywkJSa1+duc#?lptrJuI5-OGxmBo!WA)^uJ>XcIZq5 z-7|v~+7j2-cIqzPtFDiztfj3B!Dya?7tKIa|C?7~u@1q^ebWPM;fSd~N3L^W(Si>Amb!~a>wknw(w8}p{%5w*wc!B+#&xp}NE zYCRoPnO|&uoMHe^!|DFcUGUG*QC{NQ=s)L#oje%lau`LwwPXfcl|GK~N;#@52btaC@ z8N<5~8dpa#1w2Xb9%o9SI+E+;2~~t+OZsisj`*3t%DWjuU59_?-T1DjwI)!0hNLU{ zgABf@!AWA5#Q{rY}>rX6!JxFMkWrD5zZZ}BRn<%Ccd@bntjN~pP59Q z(e)vPgpQ%_0DRw0XZ~7Y3cN~;7_9`vKeg5npPn;?(bhcr3Y11^ld82fQPj;Ui_3ZtBPdHWBon@OX#4O!=l-}#c)yggiF#c zX;d|~SW_5KMsO}bw zfBM?!-~2!zxFNqd^xH2q{54U{V-A@19@Tq20CD6bi%3hk|nT*DrLVP(uV5C zKlkqEwt^V;0NFkBp(-(5=L6$aP!QNH;N33AJy!-fypIShzU)Nc9%?i4t zlJ~Kl)mmJCgRaKCf)p(f?UWf*LGozst@PbCS}^zQSl!V&H2%R<(eW#qz&@ZeAysMx z;qIR$)`V*U9Ls*G8)gONir>6cg)|{1Qqz8$p%n_7h9!?__%v(x490oT1{=Z!Cd9*<)tOtq#Fg*LA+^ zwFJ(sHF3S4)WGval;?brC5+a(9k?*12K}!4IhI*lg5}2+dFC87C>G`q$s}7sL`cjk zt95E1iez(1lykO^j~NfCf+ez7MYjbw&iCj!zfuLL==Vu4v4DJ;hf7|ZR0W-Goe7mw z7Er3VJbWuj6^JX!GK~UJ&SSdpg7KLM(V~t$Cxnsg*Vnsmxhlx=@`tdtnM3@Tzpq>^ z5xT#t)()rt*T0I*tcrvKP);FVyOQFj3JdYo4 z%=tG~exDzGY3s7dw*j#9+B2@=R+Yu`HG0*K`EDm-y`t;9;wJw)-lMqFCz^)?z>>!x z>WeH9!(Sh&5UkySJ)b?~)j>tL@GXP6 zE5RMEIrw@C-%#IGa#^J;h{p{af3d|Kx?X7b9~YMaV*j%IZ<+2eH1P0f)VLII#z62rOLvGluUF)7 zR|>*94&Mk|>%O@5f^|IMk6*Zf?aVdnL8=tY3tIPg-f@G>7Y~nCT1x|d{YWLA1OgN9 zZ#|Ch`*>lct1E!Y3hi&V*1^zV+5MV8cX*svpgI?#@kfvFSC4o-;Pr&pBVNz=dCZ-9 z8t`qC8;HvHM;teog90XIZ+%TSARdXMag*gh16F9BFm!{_&edew$MP_RdDv1|-5};9 zU#v}+JZy9SGN!h~4ZeOZH&7{(1wUiclCnWp=q(BO>U>cOf+8Wfr_B|rA8Poo`yc^P z+K=z*-FF3@_$wFp>=cJ6KI{JRSyzy~H1;MmL>vZR4Al>FBR!JenhGBg1(Ry?rT`%~ zz*@zIUtSdgw}S`Y3Xt8P`hnU^*k%FXXqihM+TaFU-xL}GXZT?kd4=Y(RJ0!Nj?C#- zyznmKl&Vm)8%#KNWYP@RLm}7pxNC88tpaU{rBF z#i+>@FtX1?>q!x~dhR^CVKG|&+7Ekg+!6)*=hUS)4!eTr+^*I%Hr z5k-wlqLA$RW$bX4D-f7Vn9HWbKx6c(&*oFEFz0sTedT}z^r&lPM>M-ao$!t`9q*;! zMfWn^ykRu{o=xh!r~-Ryv3FH=hbx#S-k)bHl!Nq!#-rJtt`MoUZg4179-5*Dj+0x^ z{c_nus?I1t;~~EThmIln7<3l*h9SM^SMYF zzF!>K+I7tpXb*T_Cn*u(wASOhM^Cr{{d#9W+*2Yr|6&^azReZ7wvLYlWUIo0Z+m*P zXtGDC0b}IqidC2v21&8=&>tDrwxatW}4R%sUR7ZlG95mUg*?sY3xv zaqgX0t`G;t{kK&#Ai!6}C6vu=F*YojmqprZJgB`sCorwaQg9M>7| zcY`?&%XJ?GP+@s3u`;Xx9d~NK+x}4%x|SQl@vm@)CW=Ry!@2pq<+^dcbYbHjLHVb!@BMU|Lh--jhmAu(sX1inG%V<~N<( zWN=#pw(MQc*(2Z%l?S9<^qVzc2+5~4+@X9@Zu;9Rb=c~6sJLOdI}p3cM(<9jL!SQQ zyNs_02hS(GeMMD=?#zR4SD3g%?#8~}#&c@ml>YI8QLH=6F$_x#;#5K2%CzM0DR(HO zXSvrD5TWPORUhVVcfh=5CKMj3Ec#N!in9j$HYop-PmxP`FZ5oD4mzb8yvAh_qZV|P z6($%@aMu5y-b;8r=P@ePlEcznd;Cmvt%_>0G9JbvTx z{g97}3#G^vbgy05-ryz$u}vPfAFEw~uRP%E&xnyB#y>)*f z$}g>(-kO#h%EQZ{T~Q~KU4eKp-RYev;zhrhyjvn&K}3AIzPeHg-Wr|Tq!r`}{WsVi zHS!@Yewypiesfn~Kks(n<_99EO=j%#7Df8@G%WGiOa%43!tJMeTp(E5CF%BN6-Z1x z__pDa3!Dh>_4Gcb0+!#Iyw|3=fa@iq@|9&om@+agkt}h62)QtwuO_Ns!enr-E!PF| zH}v(EYpFr5S4a8@KNpC3I=J;8DxS&M1sc5C@4kDk z4vB4{zIp3iU|QqhlBNm`Sn##sJl_S`$GY9S~S!-QS(;0(4C;nf7Pe(0bOq$@`QGsO~zsslQSOSi!X9SQ*lf z(wOB@IbC2sbDsTUn+tHw##TBJNRYAdlc9JdcdZpm3_+%@d+lr*B@T$!J}0?;S-Lo za6;R1-7G~PN|rre;=R*#G1hv|n;SJ}x&q@-!dtT@J=n4NlyoIN509T3vWFt#JJk@)yoI zd)tO#Z6LNBzClLk|4>NYr_QAV_N7|{l2AOAn=B7cmDGk8!~AlbD4rFfSTv`nv_SEi z*z1r7u5df1uh&di3-TT6<39aFeD8fNpK7BfhzEA0m-D-U=et@yu1rmEQkj=1K>W;B z{YY;9GtGZ+7mv$$JV6lSv+&Ka1d_>a#I`22y-FZld}&rkULl_hBHX>=ah ziET;#mfw3mCkkiHhTLFncV~dyWi7C@e$$&M><*!QY>zHIL~*xy<85vy_eI^er`qj! zkMuWM-hF)SH!W~n^L)wXDs(>MvxL(qXBb~tg)M8+~LiPzi%?iBYiWU zEWJbAfe|-8_Ijl{c$O5-+8%QUOVX>=E0?Ijz*Tc2`5vUt<;nZfYKRaT3&BR*9*}0c z;pavkB6R%7*e9;z0TBdE|8wcei@qc~so^jBTpqmd&#^wR^nh~e&qHBOGSJzC_{(|^ zpbtJgN@0P$jbN5AG6=?CW zeWQl_<$Jr&##nJsbE}UVqM_?i9YuR8@lXBvdv6-~eZgzRq<>+VrCzt2CM=V0>t{IKvB zoSl{oh%S|r?U67BB9g}p8Nj>Qt#S7p4K827*gP~DU^%z@$7BZ$?gy}sWoa?sW6Nkf zA(94Z$G0w9Dvj<>92dIcNQ3F|ELA5_2H0^Z-4QpWLBgwR=`1&*pwe2hwRL9h&&-MC%&Nz+B z;TRR>3{%X+r+h%4*YN4SWGW>0?HM92XF!73+HYRgRG3T)h&jy40A)wI(nb|@eqSTs z_$MF8-NDBCU78B>98T*l-u3~O6D{7YqEyhg*Wzxj@PX&0gbt<=3Y^}!HYnE zv)@x7t;5_%yD&F(uOF|y(*LW zgx!DbJ;&~p=Vjvqml+NFuYI6^{ylS}HbWnHYEv;2^_2pp7yLMKm3+YZNgjQ01r_RE z%q8<1`GA_N%?1q;HIvMIqoz`1>2}#b3Lk_U-q9xd=Aa52|FiqF3oQul!$I|KHze@pi)75pQR_AK?82 z??-q)YkpX+--Y~s=Sz-Lu2(4_zwQH}0nP1EREP~DfU-DOF6Q>{V_jtEa?Z^d7iU1> z>{L(20WzpPERUO7%7D5(u|uyfEQzDv?gFfaq>j~V-PF1x^Mryt67OU$6~kjn_~v@aPF!R|r1&RAZu_4$XzS%1<&}Aq-v5tz`x>aPEC?v@5hfvoH_dYX;)O{Ab0l zx-PEem$U1oZp3+f>El8&56u9pHaGg9;|2#TQXOTR&7j*QD@ZZX4az_B*V9g#!MMwo zct(~R)Y)HRb0?btafq5^`5x7}w~Ck&mzx31?3}$TgSf(M`UL|CGZVGf==h0>PWADUiE^!{K2y)UYIWW+UkcE?b6AnaCTWH^OkpB5RqBhaMl%#kItV)=XY#( zYh1s<6e78UWPZ4Nfwz;kprErUaBc5M|D@yv&-KTPs#Q%P`8wC5<-a^Z*gbODx&ae- ztN4EPwjxhBNE#Q~Q(*$%Brl$>_VR?7#E=K68%&^Va#z%-fF~4fOeom3*95v7%#Gg6 zdVpk8&jeSx31|(M-M4t^0h8(EE$Ji^n6X%)+3?N-xE#74c`Y*m%ksk6^A2}q}TsDTv!w28`6nH>xrT^i?aAS~{dB6II4H~aANqZLs;i2wP z_j42O;Jzosro_M)28~zLnxwb`YeRIIDz`D5svFtbY~a2an|-=k+?&PR!EavRGHsd$ z&P$oS=O4R42Rbf;WJcBb6x*$CpxEo_F+w&5+Y#jdT5b@hCw*|%(HNS31j;bKbcOL8 z-t}@+W7ziy*~=5v*c#(SeLlwUP(&);bDb-6Ao-=IF^JOluIiX{fukc`wXe&KL5udL z_taw-kbENk@wKoC>>a*=)$Xt;6shFGIB1o0}}sfsj( zSCQ*Ezh*mw{j((PmR?gheA%O`Qp*_v_vTdeAb+Z~JyiUP;0!;-H>lNH(gBZ82(tVU zv-gT_5SF&zlo{qe#YUKa@May!8OO$XM^pU74G>m4l9jyAz_Be+jOfla9-&?zG_w%hE`bjUvqYbhzkX()yyP- z^-_dolFk4@g7G#{B*;{(Df6ZgF=ybBPu_Rv zq&CEyy7P`ux;lBEt(6+W9aC|^4o7f&*CPLYY>1E)BP&#`<_M!1 zH{J)2s6bWIGTwVU==iU1Uwk`MAir&>zF>(Xw0%FK*lVl|uC;2@A6BCCZEj1wt55(s z(X_;VwId8oJUptwEDzF2g|q6j4ls9fHMvz#7C4GVwvNp>fatNi&Er=jLHbSWWcw2b z7$V-55UjybtM)MIphI{OD+dMR z`W7)~?19+P`&fNm0lMr@sRkF?gKONr-7NY_h>vsG6aDO=y5nxM)^`G($pR} zlNh(Jd{BiD{>OJyHS9r9LF1maraDOKx~ktsa+jx4cEBbLFmL5Q>n>{#9fr%pB|J2N zX^(D|_B}fw48>NC+|&RNi1;aR+zyU@O1qIiqyfeIwg~Jsw}U4ud-eL)X@N6;#BP&~ zb|BG_GaI-?3%cIw-}7;{14mTTKkw0k1?=Oh>^?4eQ5!}-S4(}+w1ep$qVlAN4v62{ z6?MDW4idG!WQ>t~oYs-P>WUql+2H(vOIsI8FE7jIKV%2kxgXzN*Xe?=^{I;w4%h*x zk6FIoMFQIs9$oJlcCfW1RrK{F3C!~nE2GcY!JaocvwA=DKweeYanw~XmQ zoWdK|I<$T>?F%pDXY`@n?=q#w(;gC4d-Z0%8o>(oYv&55chLBuw_{O)R z_7Kg){>b*C5imV+%uDaG2ijr(!k6&wuka18`Ki& z076~{%KQqq2gw#sk8`qQP_|)>^~tn{LAQqeeLiH64gGjw%Otu!sn+Twkl`C^idnn1 z14K)wI-YSLgY=~Sz5J~Xi@73dn{1KabqBcrxJy%0jtnzqDm|l@96&9&llLA5Q0mAU z>)q)9mr33B^gRG9uBR><-*W)>4)s0b1^}YrA5~n69H76jh)L82KrL`Rr}+g3pdCB8 zX?3Cz#6Q0JLjI!z7%42t|N7Dp6mz*A37a}X;9SVDIb}nbKGS$KLD&&`)wf3VUN?ZD zqvlQJOHurN>;0D>Tni9U;O!OeZNsAFfa9-}|-45x7!VG{?vFU}u#_SIlEa z(4Qe2*?Q^0pnSw`bp zXl8N-+qHM)#iH~e>-8tgS~i5IUrN~nhxFj*GybzTl%1jBBICCHI(>Mt?BZ!b4QGgX znKOIBQV+D6AIQgQI>QTZ<9;VTJ(zb=aVZOT21!eed-8Q8nCl*@e|-eytJ%S=Dnlgb zKy}FbPG=a6QOagU_s{59mM_WV0z{qUeaD`XAYHgw>lCXCydfp;8{DD?ERU&6^Tttr zQM>RW>y92M*EaI0i@HGZgW(HDhmrsCrIj;KZi@k!IJjOqAms|98(*!i@79M`wPLTw zSGj^{e}jYWM*V;Gl7Ig^Lm({C`ORAva?`@T`$Fzs$aTMYi{xM4g3s&tJ$d||J$_H0 z#Qc1T^->pbO|ZG7uBr(_UQAlFV3)o@4}02Wi2rB=t#f# z+!;#k_n7tG(t-$it?b4V&S0@YDSPdGEvU3~y_ALcU35mOXhwh*v`Qa!k3oEntEfO# z@Tn$n?K${Xe;dl>qeEi|#6Cc0$_{PUS9v@gM5-YcO zJA?LDve6WWKG2<2T$uMdFU~av)fHHkGsI1?Ju2qX2Sw`MRoPwW`eK_)f#`hv{Lzwv z&|lY|ca#5O>jKs-VB3i)qOx}Af1WX3|5p!qT*KoY9vAVriN{qu?&5J7kK1@$$KyUe zFW~bAKCj^O&QFQQcguVo7uPA4}R@s_t)KUf;iVOosD1r%^OJN8wdm@+23}`z1r_H5^lZ7aTap6_)$4ow(S2r zLrwW>CwxA~=X*RK!1Dz>pTP4CJRiaH6+EB8^Bp`N!t*6OpThGkJRigJH9Vig^F2Ht z#PdZwpUg-(&mP!s3&s1_n!Nm^4I#qk-dnfXf(WW#U!BzfcPj|)KVl1(?@HO9hUx+< z(N%p)%oYeyO4(KVy1+GKM)(=TfWnHL(HLcR91^$d5~}luU)S*udPH$>cZN4S@EXX)tD)Ep+X0{$S){0IjZ< zDVx{XLZR?odGAaE`1W;w+b@bOyheOrD8>Mi7I%jQu>Tjkklo+QUjon-MoB3RF@L6}xac0A~pC)z7Ja`Tg&o?hvm0dA3Xc`i{I) zRO?0CFO01(wsxnX;eURIF#d}x=Qxi(C!Vy0g65>F;Z+m}+bCP~?6xiNa-S5*J5Par z=H@HHUu=Q4cTg_#5Cuxf9V^bW+X3TzPxu!!Z%JgACE9ji$gx3fO(g}YZw%FETG~Ot z#+(Y*9166rdwf^j7u9uc2?d`!DL_Yd*|y&fZf_>OGQUTGe@r0M89>(6xg}*7glO(4`p>#IRQ-+5S&>Pel^Y>g3$cG)KXye zPb<&5MpQS)mg+ax{%c3Po$-Ev_Y=Gy;r$Hnhj>55`!U|n@o|8U6MP)u;|w2%_&CMK zF+R@mIDp3qJdWUT29HB{oWkQ69_R2lh{s7hj+V1{aQ}L34Fdhc7Z?qOkhV5deBU!` znB#S@mGL!#`R}5o*)i5|?0)xg>ogx%^GIvlC%qU z0XXb@l;R$14Wh4Kwp8B&ScuiEdwH#gpCWts?dNz9WDR~K?ZRv7WFV@4S}|W>4RIY` zvo(>u8InWuNkTRdBsng0x`Yh5(=(ev~zzWSbGWIRP3BIt?5j#SqS&YVkG|p+WnKd5Ilo5zal!Yi*gOLd|sb=`MdeC=G02(^^4; z451aZn3o+CJoNMkxkZK1orT+_P9YpbvU4XD5+%-U(wVY@v;^{&JEst?8GPBOyU891 z>`Hgku2Mm<)WbF^)*dpLvN9Th3XYMoLa|v0XAar3lvq(0_y4BNBA4wZQsJX^eVlNu zJ=pT~rPclX7pL$zhQ~QP4&reVkE3{;#p5s@r|~$B$9a4nz~>2k9>M1sd>+E*DSRHo z`yX=25ah?i-+sJcy9L`2Qgy3NaXG^Hn)dH1WyXK}SmLiA&+uqv=iRV_5Vp@Q+~UTt zt%169{GJ_XtWq+Tb~j$sTmO=4?=(Y=p?u><6;~g72-p*1BLv1^`7-c~gv7_k`zeHU^SGeVpnT;tfY?t#XWv7uS+9^RlHw(HKTieO1cl z2!<^d<^!vYq0&sn=4ufz9G4>d96@zT13JHA zH4UQ9t4$xIqPUC9QFtCk1606#xiD)79in|{<~209&3)9JALZ9=lQb^hL+JW1)u&$; z*n!01TC2=v8iaLc>^p+;cK}KH;P$o1o;d$NF0%>w*`IgMJ7ZoSwFUD?>Ys3~pJ|=ZvpgKpmPl^&|~ORa-B9_-+9od%tGyJwpRdihi1N znk7hw&qR~Y(m>Yv-DCUhmXLgzq+35mgJt!Xn9sklgcMKu`EInXxX=4G@3Xa9oGZI? zb

DD_ELJ{+upo3=G4-(Q_QuAf!-3+;j!$aosYvQax)R9T+^TUPyz;1kRadE!MF6 z?dDM45*nzAQd{OHt)V%K_elQ%8gSb^YaUUtfvI4(#$i?(I8@%Vwr;k8L((D-^SBkf zs<&wGzf1v)6gc`=+X`0GBbW6xQ-FH=z!{5kmSA|pqWuOl6(V+<_?y~TLeSeY&r(-W zA^${h(z^AQusU~wR5MEf4a@4F*qauxTcU;-{2k%p^C-`VAPcCOpXwnY{je{2@P6it zInWW$-FB1$`)h4UQiJAD#-e0AkMKTDI)m0cX%1NiKa>Y|P=P3`5c@IB97=QS6ID-B zA*KGnnU3@3&}H;P`RP0r1hbWSyIsw}Hgsm%iUs+{iIOSlt>%#Oqwbw*1Px5vf9~!1 zU=D%_r44s%(eYCyQ?}j~(9rGKcYOy9o@k$N;#y(>hi+D_$!MWLu7AFOZ@)PNCY3ha zMmRPt`e}tL8c)19EmC-b1_Aeu)tLsHLxE-YBZq7n;NuB}SSYHe{yYn;yIYy}`wElA z+J2#yXARG(p!WTzp0fNKm-u(F@$YKm-{r=?>y3XG9RIF3{#|nXyXN?J(edx9Yjx^bKSc8Mm(!WUf+i5396RKR;;11hIB8PT1d?~Ku`ZFM!h)`X z!g3TkuTUXwt4s9BK$AsX#h7_lecE9H{k(?G-_B8?ZrRo1yz3^w+ho!HI-CleTdy1V ztu_S>sha8+-c;!B-M6_O(0o^HP&?&^uD9{7sz&;n>;B5U`X&{M*>|a5WTV4MnVRZk z#QXY}4$bdXqJy{1N|-67g3nubZU)l-m5(n?N&>0C`~0nzx*Z+5awo1I6Q_b?+CA%) zk#v|pP3ZU`i{|hA{Y~0YI^=VoJaMd@0AH|JZWOpHrx1H-%p9EOoWzo2|BVxT9O2^(ABXrj#m6x|&ha>a z#|b=+;Bf|Df8gsAeEov2Z}9aGzCOa&Px$%@Uw`5Ex$yg7_1Xx z<0WY+#17oIfoA>=8@ezg_h|(s z&Jq}$={M|WN#JQ+9rV-C67=i4k6)0~gRZbgDJA_D!08yKGc`kkQOQp$Qami+@|(^8 z&R7yCR;}w4YBC4L746{J2PC+-;Ze$419PZcKABvTs|SafmbsPxe(vvPLTxFETQvT47Ye*0 zcRw-^qd*Kjyy*<8V|dGo5=w(9K%2c%d=S+!qZgvfnrtW_8>!42vli7=m$(Pqx1jS? z_jBaAQDJ6MK!faoj&lc&zHUZ&tU9{v43a0WM|mna)1c&V+NY|66tJ-mZ<0DjgJgso ziK+kgEW!6Q!S_7D_e8<>Ou_e5!S{IjfAq2qLw|LDzEj1-A9AGK_#e#O=T1N0eBwWK zI?rD_yZ9lw zNR&y09B%^dmwLkM2LH_$%hxKzs;iqo+PgE)P7!DjVU;hy+HDN1&+}TNMUlKqnb&2= z7+92SMjE|ou&|a%RM+1-hLfHuOF-Xnr8xzSk)TQ-88U0x+Cw}T@zq1 z68OJvph05UhlsD|Oy#2dh7Zpmc_*%R+Fol#?n?pC&|C_&%3d0fK|MPs`#6ALrd8IU%XtHdy zs=m?y zNdX(LeS!*L10iR<)oel3X62Q=+R9)fSYaEShORfA$m|v%Lg4q5FptN!&^vvU^yD@X zE|`}(zaiMc(67g5Dz2-7k4nSKN+ipUrO1W-RE71tkrK9&s4l0H>0?9cK;GOEvx?si z<{kvG#jt9CM7xuo0?iI06x?!*Pirjdideasy!V?X*hCY8cFNd6e0tR={{d~#6|Tq{ z`ECmYm%-fwS=ulVNqFmunBIb}mn|>Y6B%y{^r7PDH_BS@%X;vZU6?HdpYGx~tECBI z%#79N$msgH6uDw=O&IGh){Wsu>&g8Z6mVA^UNy!JExm68T|uOk?9WtTo5p9CaUUDt zT`MAeOjs2rvqbDFDy)Igvz_;Gw<-e4wqn=_OAusINq5l)I(8^Ke>4K*>T<`5hjQfhd>2zaKxW`Fla$4_l_)NC^XqT|_T%qz@6 zScbb(3&|c`CU<_zt_G2VTRs@VeGS8>o0ggb*Zzsj+~bDe`mL+hZp;jVMN{PjGYl7X z{ok#22m23d5=W4CQFPMSO&*EtLpZc(_&!T-*rx~cKM-B=c z*9TXNRL7<(X5e}(I`+ANJ}4BFHW;>{>+hYZGKkiL$;{6Du6t&XHB}s)=uCpS&Ytj; zcV-}Shc-OsfNGx8v5c%~G@oVCjQn&RNG#`Ff9;bQFgB;PST50lQJT#L_6KHAjJ*{2 zn5+$RQ+cf%?PgGI+_{-EK^wlYls{u-Hire@dCd9B#K{oVf07jz`5oFYc{T0RR#g9G zQHk1I_qCyIw}@TbL1Yj1TX~%OwV~`<+9$;Zq%XahwuxA6;D126)-i1kzbH8b#ugp0 z)$il{_!-$JGdk8fRTt7vk+(=oT0kp3hfsH07e?ijj5mfN|G32wb2N_xQTL0Of(}~% zuLeCKZ8wrbyC2oxw}2P{Mkk41502}eJVAYf;s)ox$fYAumG`F~Pq0zv=67qgAe=NV zbnvtZ=^RQ-KUxiDJdq~ zx%^Rm)9_a7@Od)yZcQk7X=VobgbbRrE#hOx2jzwY&44#F`=uwUS0+=5ep0GPu8s2a zT!QkuTS1`47dqn6t8!#5si=-bcrinV(Up6wp0d+`@%z1(4*H^Q-~so5+Zj5f{Ql0^ zQ`j#@A*?xsGd3LSJ)&(2c5|5mwON&(7D{=&Gp$7>Q0?oR)X@U?Y7i+ z=f_Ne7!#ayMigD2OWrbh#}wENzrQIIq(c1KtPGxXQ=sdAe-mbok`nspl2t)^( zBrD`iVaP6FVyMyxVlqqhgWS>lymJUuO@`2Wkd5_}Ihy~t)66-BA;g#1vq&TUL9i5& zjw~=(^u-fG(hNswgbV)k^9l7PuxuuXjkQW2oOs(~-y!}*T$d`>uBQ)kUuj(NjV6%Q zm{&xK(}$=m@|F!9CJ;eOn3yitho?*S?6R9f{HXV(0Q-bKL=1U)h>4-~)rjAE->(m< zrnhc#>X^c(NyUP#s|_GlC5B3+m;zjF`g%dZ0IGN8R2(Fl!r9d|N$Jf7U}#qm*r8|& zli!2bY*`IqZcSDORSW6IEMX$|sR7s=jU6(ent~{mo^Wo|0J;v5x7ax&drC7pO)?FD z^<-^l-!{ar3@R*4LJWZEQp=!sr73VdI#acDg#p+ajSJb_G=)(!5$TO029VzxEq!4e z>2pO*Qd5OK6tnKxC9;YRMCI-?k*Hpz2UTnvTSJFB(I3orlJuZr^lNsz9+DsJwlC-- z!Q<@>`^8ky`R;6eX~#(*Y;o%*2-1Pk@lqg7jRZl8hEI)=+#1)SE;pqMjiJM@-G3r` zd@`s|*3^Zaa~u*Yr;xvPJI%BV=s@odH^U8sXnmR@(jS6!Kx1!?Le&qn9;Q^e`^^Xk zPw=jnMc4N}RV>&jsSWA|HY1LlC|(|B`VZG?!PMD_>+dI#e{M)?;VjjHYLz}tJv9FX zEXqdn~XjO9tUM;!C>Y+A!KT8tg3mknyoAqN_`HVCd-%MF&ztzXiqE_Fyo|?h1nE%=e5g1e z=cJ6Z;-W4WbWo4ki&ZL_Gp{=RliIx&f`ghy=y+Ha^Ol`0CR)RsV zg1~*tk)If8uU;{v2$JV`*MGM`_WA28`E5l^1-p>_Q)_PxUseRGzN$61PoTX2nwD&8 zpa?Oxsg7@l%)u_b_SV{CN|5X*eQ+161(c_$cXN6Xq1ESy@&&R5z$w~r$mm#rj#+H2$XRt%hg^G?s)hKDQ>}Q&8x7dG zSL7i<)&fRt$aJ}SEfB4nxIQjt0lo9@r?whuLv>=qemy~pfA|-2=`9OAr9dGetMQ9C z)IG3hH`Yd&_f;$M-ZvslB@_J!N%a5eDYbCyp5IFBKEmT-+0MY# zfmEUk5xOq5cmtAM>Ng=}9tquDVnh+nTZ|iDPC&CSZ_V4_gQGQ7L8Z^&DgabTT z8SySCKP8aIh$hOg_hO5;t)nUId2`0*F;!`CZbMeXZtn^e!1Y|(Cn6c;)0VD^W=s}( zkH#{L{7fM`iZ|WYS{9zN32NNQK=~|>+abVF8dx{b&nLy8wH@Ym=;xJ&<)!wCiVT#` zf@*J_eI*I!nn>4tq7c5nVScf3Kms0dTnl-=6_6BXgs-!@qtcBIJ5lPyQ^hrycddk zZR@09luzl-o>r6(nAT3(KbM9MBy*taFY-w*VbEn^)f~sEpU)8g`TK3ZjO&CBuBWCT zqS;N~J0J(P2SOh7)tf@(NF76eg**r=7&;d|ME8pt6ghia4!X`wTxXJ^LsLww^U*kY zi1J?vR82Z?OQg1os49T+z8nS92s&^nNiRt}uLz~FZiYOjbP(b1OE}P}1n1(HCsbJI&;=5#gwz&4x)YgfljBKf3l1VP@FV<1pfbyvRSEd?CW#mDk>F zw4_71`9!A4OCpRq6$HxZ(*f^4$fcr@AO7iyxZoQR0rc})FHIJ6CqK%C&u#wWhoXP| z5T7^kc@>{`i+@Tn4)AF~+0s3`RwF%fS&-@e#hO62`JqhRV+#3$1{KGbYQog(ui0B= zkzdpaqo50o1_P}r? z@A>xegA*DMau!=QfzHqC;@I4W z<|np6jprpAzuTa~8sX1n#A|LL{iXD6=Y8Lz2EB>)C@G>Q8Ve<2RS-`OoON14h^8& zFk+U|k-a(;3&c7#7Hy3UKYwZdLKD`N)^<+I{>zh)3v6B7@BYj27C7+&M_u5A|98I| z&j;~*5zi;_d=t+{@q87}XYqU&&xi4R8PBKjd>hZl@q8W6=ka_WUk~8x1$;e$uQ!5g zn!Zk~&<9ze4Qiw4e4r#u%*g6Pb1S!3rV6^B;H|tCQGJ-!s;R!Q0nJacfW()s2ZqOD zhn`cJ1xYdd-Tk-aIZ1& zqS`62*kF;%|KHwD_<7^&D14oTufrC`=2Wk#-k)d!W=w_^AZY-TH>1nugH50=r>JMw zQ+>D}D)R7avMFK7jn+><;j3J&j(^j9YySO&lTQ3D#_;etSTj|d8L1Tad@mt(Z+7R!y z5*|(&!{}{+)S*vWu&##C5jkcIG3`z>q;f6D*g7-KEo=f}rFliJJG20M@ip65!vth0 zotw{HMtJyE{nvLh6Byka9hGp;F!Cc-RCnX? z6OXTW{Kexl9>4#ot8jG}zAnSpZTPwlU-#kbLcAa1{S@!Vct8Iq4sdaTk0ZRlA{X9H z_}ia`AdxUZU#YpcSGKSh#{aO0>4J*o|MaK9?~CF0&G7qb_a)wLwU!XiY}2qcMHe2WH0*!MY5C8-AyQck;qNy@7Vf=pY+(;k zFgbx=dEWAWz9AxeBs#XmOb_~HKfBy$u!N@b;!ik{AWKBzh8-;7k=LOb>Y{I*9%^6c)jXqZcr$

N-iwE{*Ocg?v~IuQFJy6lsv6^LHw;!sC*y|9?zN6>s>rtN%z4w#?pW(7Lh z6&AnO&yLwA{?-RxKS*UeQGWgN&0Fjp%M;q`!K`*Sq+7 z7+){r>*;^$ZCpK$uh)leCMju3D#6LCa^;rsW)MLt=pz;>!nXC!Cz&#l%+B(nO;8cA z{8IhX+fe=QopVy;ygVc*pFHv0%M6OA74?=*$U*drB<&dkRR6mYdnb0vg3g$y$9i#e zeL>G~PPZ(OJENsJ=TY5rgmmE+ha4!c5WLLYg6byIVK)vgdC0n1#AJQu-#Q9kXW{EG ze4U1`*S2^U%cFXW@T0U_O%$RkC$5_$AYOcv^og~E zA9f2V-FbEt@wQo7YkMOv5F&W|ACNtN(pnFA@I!H*h50HZW5E^5d_Dreh;P_$ zAcXv9l5A<`rVl%lM(J7mH+z(wg8baj0F`V@;<(B)U2Vu<4VxI`c(DlBpt4UlBVm#?ef~$=o{}USd_qU-g z7BTgX(SS=fa!^5l1o=-qJ^0QcT~KD)V%2mPbyHo^^i^oL4iL??1#h&|Ao#m=gTh5^ z&~vGn$vi^?l~B1K_dja^(>9_XwSoq=F0r*IUuwZ9Fy=;o(O^N>Wa${{PgykoI@)m3 zL3Es#Wo(5Wn%^6Pire8@!0KKxV``4{x2K2JU7!Wy>4RsPwxIQ+@ixtCfvaw+Bik`! zDBj&e}E96y-XY}5k#Yb0IvB_{vyeq_Qpgy-#l>S8wS$PLmOK+Bbr zZQe=-#@30<%7Xvu;&)Hbh3C?LfAd;2iEz@uoCF__`L7KsBLlk+nQkCYf_cH3>QnJZ zw&>iPe@$m`JW~PfNVx_bC{<<54ILoENDSfRlYQE7dtJ<}r>Dq3K%7MNk~Tal^gnFy zoD3p-+L2O5IuPCJ=^=1|45D%VAsbi`r%}E|4!S~y{I!hEcZi#cnpbSIJ&%qj7xi3g z)Pbp2LS&$%OEz8E9?KIFW z6Tg+Sk_0Vju|shi(ebBFGX+t)0I#|MNz>zkhM(d&aYL-m!v8z0^VvvZ@>C4@$Xca3_@(jn`yh zZl6yD;`uHPp|4ty&zzNE5{G0}5$Oy2wL$hy(p6J36|!Ciu?4g0Kwa*{by-1lKju_9 zLk=C_&9YDIc}#)$bL!m@LfT+^SNvo17YY!)c*j2nYePg`RnE@e&#nhdWCpZofo#{; zY>qP&u$|(uiIvfYt_?XAJX#dsN{Eh)+NA}=zu$|Ept1EOS7-uc*eA+{qVXQJbtPG7 z!qAx_rp!m^xYtDHbzw~qb)=uKlR^4u&GdhLT>~H`b|~ZbGg(FeuIjLB6NIC;4;rj8gzYc&g-bu0NJLhHC5U)Af}Ew{raT=u03C~ z4a|{!mWPy>{o|CS-s0nYP1~n!tW{l%Rp^4V(oerIlL1$gS;E&O!bY)}lU^ zuLZV9zLZ5n-0z78^DQml9lLsg`Tv0xD8zOV2u!5kvB~98%FVZ&wD_Fk!t-+rxb-_W zX@6t$cZ}kn@5X-%4gak){I}Hb&n@DgYs5eIh<`5fpXVlV&sE}|yTm`2iGOYr|6C{j zxlhIFC{KYmWH_+4k5gR&9Zy-;d6l&kMI(PZ+|=cP~wDJ5xYsdEZOn07I~Sb;j*1ns4=I#RAn!24J09YqfJVTK~$Lq!ab} zK;&fESc1-fCdBBJEY$cL8-RL7Gjc4T_$nzq@I0HbINmhz)m zyRP0%xKDy9*I|F9-BkFr=2l*v0tq=-%w!BiLrwebhsn@?h|GoqMeF%I!;jtgjBk(-K zMdAl@j*vbm7fxKi{(uIqqPOxme&|E>CO5-?G8(LZRo4`CUmw(8sQ+T$PlNJs-tq0n z^Ux>=FIy(>Z0OzKD|xxL5aW_by)7|Gclp`xkrgn25(rJf`9?7mvxr zH#{~kzo`R3Q~KbSUMdVdGpK0(q7JTWSjL3EP@&pW?#J{7be#FK3m@`>p^Zg7=k3%X zAFX?>7g}dg7l#cx9&0h4tO zBv%W>&+Kc(OX(;F{pEM@kpe4*k~E;ppH=>~fH54%z8}bcL<1i0tHwPgT>_O=Nr+;#BF9)BG ztpbSldU`|{AZ#ZRwP*a*7k@MQZ8wF#cEkUkg#Wz>|9cev_bUAFS>Pe}qj_hy?A(6X|MD9?`Hr@_-3s`4{yR^{YbG z^(> z_ykeyy~^VGsDKS&j#mL4kUn@X81dd=DTccNx?VnpI;Cg=lQUY8&!UMyU;f#}bSb+2 z%TZG5J0c7{7XP^Y39|crg9=egRmfMVsTR9tjPl3hGj}V|{I2&*oY{x$+We=HJrLE= zKL=y@th2>hV~ZN>|JvsFTla*`J+$BQf-Q(1{vqiJs-Pou@PctRX6a&Q`ZZ<@wh-gqK_yuFa5SFLfaY^&bmsOMWnf40@HG=qHSeYmoK^;B z`998y0mMrrW3D}NQGsZR^uastOu$FQ#_LF`3Vbk1b=)cZ@9&++W#bp{3A;l2B)y&& zm=3V9Zh398_#6GgUd8{#C;Yw!{`YzO@AvrM_wm2~6P-$HIIl~>{lkN2hYd`jxa=$2 zZdAv2NS-`lEo%ylb6piBUg8kEb7tCA2gQ|k&dH{aVjyYtL-{%jx?hO@^1g4PQ1Lde zwfy(?1ukX=p<`iAlfEv}V89e?uFM*uv6 zhhGo;M0S|kEXhyc2iH^fiNCZ=VNo}b>i9|A6yp28nwd%SLVJXW-CIv||4H?$a%%)& zLi*&1$E8TNVN@GU@k7WCw??-+=>BiT%ern0!u;}U@3OD`yU&B)2g2_Y{qwv7fv^|- z+5UNsBy^^dRI?6lZxgXQTle2RpM^c1-*#jBYd8G<4}O0Lzkd`jyH?Hm$oj>x7w*+c z9T#f3h0YHUS*9ko9_G5Z2QrtN{jZka&%`wh$Zr)f$Z0xXNyF6=!;^i<_^Hkkd- zH!T0zm%@*WyDxrT#iczm8`Rc-^h&|YqCe5R`*&-;mgELkLisbPw?knjsvxe?t<4Qefk|L)5nmFz=*S%38SckIPu3wv{saFtR8M>HavdmQst7;pKH{dn~u0dfc?|#w6Yr9xtA2qUyV&C*{ z-P9YV!NdGa!!y-FnCQH~Wj`c~@Q@{3mIz^A8@avGzR)0&wRF6!k{<)>?nkRX(H7?d znyHR2s?q(n(XjLl0$5ep%(Qj}I{(X;(wor&*rW4BOz!fCzj%8t=M)gcl)voWb$mVI zUn6gFh%`YgQ8b1sf_OQ>kPvtDrw~@wo7Z}{3)SGyBd+-e2x5m`ZVr96j|z<2DJ-k^ z2w}U!iGG+0;)8`juf7WjVsBn?oT^zy1s~Oxm3z+#Vzlxirnhe?kmYRg?WdG5c8vuin@fb;DfbRiD{_L;TREq0+g?LowMtqw8F zwY5o7GaQ{CMq^sCTMP?surT*Qby=`)mkdvp80IV#L*)^t0+;z{lyZ(ZCVrKB;FS`p z)Bf7dufA$cfHxH;ee$mNy%EP0FOYOqYN#N~Yrn0YR~##GAAVgngpTu1RBo6S!*U-q zNe1vB93%PHF?j>(1){nWma0h=MP57@H9navIR+g|*~kJJ}_pTX$~uSdL| z@pi!532#Tdo$-Ev_Y=Gy;r$Hnhj>55`!U|n@o|8U6MP)u^(>2IwmC8E!1An>Lx|@u z=sNd$Pk8WeKIp=oer`+zThR3+5v`v^0DBF$$QN4?|8qZ+dgt_7>?z`t@ok8k8*~j5 z1GurDS4q01mk{66+kJLKf)jhTWoFv_IvS5<-hN`!&!)ITu?*oW=y+OamfP~VrpZeG z!|sVlR(lbjS@Oi__-9&{vN^J?+EeMUEpLrhB7XTi5FM|!%s5E;VU!)xz&5Rb?&q-~ z%;7Z~#2VhZc~IYY(Kk3=s{bNRp?WcHW!(A(0!SM%VkQfbUqyvVJe(LanocNfc!lbB z!ii_MB-f4@6^?R9Ja$Gh*SE{!w)IAWtXV4uRv~#he`dqehlnL{d!0Xn>|aexO54?7 zdmtF zpMJDm8qv-r?Yx-B%@%LY3G>Bse`XS2737!$*Yy;Zb{PRI&sO@Ng_t>Xo!G9KCL*>t z_QJgmAem*%3{oCOd>3~T#~SM8WGlmdzb`apmdudAY}V}A)ir4jlV4fqKAaK7cBjou zuXM72u5^Kbw`RhNbHgq?Z|K@4gjv?wC(bI zgO%LO#B|Eq`52Fvqb>H>f- zxkDqCM-wXwa2Hd5901XtkHa)BX)a#-%S=kwwFD!R_As^`CB57%rpTip@>JLA7( zX^3fIhRcK3DqjeI;^Os}zMa>^`1>CArWXW2zF^xCcWF(mT=9T5BPjq1k6#{p9;tyX z%ynUI(g`m(>oNjT z^C$fIM8}dp-#R6^ifUZT^atYby+Xb-QdshXZFre|54J z{RyML!T1-P52yhUuPrtuJuCU&YyXZF_3qW#Agut<%~`X%_mmX2!trg3o_qj|_GYU* z36{jPJVZ4-3<6*-=<3JXH}&}P zmfrK$HrX&lTqwFC5kyNy*C&k^j|r%KkUI5*#M~Xc9e=T z5dUZV%KP7}l z)&`w_@3$}>t$BJ5I7G4F)keC6d4F(RrfX1JDT-xj_Vgw6BxPbA*+hoK~| zjv7-TEPB{@i@|e$DBgM@G15TzpZjlntnq4RsXs8D*|^HC6U2B($684j{eg?Uazs8r z2phC}o7{iSAI4U_b&NeOgozg~2_Go*2VNt-6yaJyY{9n}$r=$3Tmsl`zu6w|Z2v{y z!jF$!DwRNBlK<^<%XYAjnRf-i*_Ryz9(z+P>mkOLW*Y>~eLLwFbjjH7-w+o15dZGm z-WPP^oU33EWcg*zM6%Pc&SuN=WUe5DDF@v`tY{c*H~Uy1AqXt=)i5(g8rIBjq^tNU5NdA}gA&6&rf zRuT;hE#+ zyln}16bQV$Rk8d*R4gCezr80A#+MK@GMTBEtND{JR!;*VnY6jPwVHw@tXXR5+7<|J z`Ukj|dQdQRkGCe8?SUX=B_Oz!M!}9P&5j&H_sj1a;4WH4!2+XH@6?|RguVPpBJRh@ zm^-_&tb1-C5G6csU89mQ-s~&Yy2XKu`jdQDu3vf)jj!5O)E-a9%JL_@4xsVxOM2e= zev*vI$zIAi{5TMvXm9Rj;h|uL5xE(6-UUKLo$$3=1r&^=B@-6;J`fJr*4evLDcDc3 z9YL?Y2BKOwrGXkm!G=B>f3El$2q)d9n%3Q=VCP;wFWI(T+(9mS1}JCWkVz zmueo7<2wcWUfcV~5rUwb*P=$dow8`}mLGmUmOP_iyI(!bZ>I;rfsH{0N6nGGc}tj9 z8U;aPgYY%;!&J<&V#$RQ(m{~%<4#LC(zi4DryY-Q5NP^l&SYJqVue-5hgXONfh_Wm zlao{o?=OVW@&<3sK=79v9p$-ejD6F(E!klk2yqX-c&V-h@+IX;{^!S(mXh$RBRkPwYNN!_Jr7C08^C0*g$N$Z^Cc zLzt#X<97pL3dt8MX_&KP<+IO7{>DW*nThT%j^yPHfpAd!>(^c(V~qdok_)0}yd?u0 zwdLcCG4K1>{^q--~V3k5D1~1EiEq7CfE+15~gFM zK-jjonC*a)DdzO}TXczh;9~s74TU|BTM-EIY-w)pHl|oz|HJ% zDif?eG&jRnI1s|W*^Rf(8vkQY1Q7=a1h$et&qD5n;EPV4P%mB;OL~ZGIWD+fR}JC1 zU1)@$v(z7K`ScfCe(#kuzIwHB)tJ#PKd35fmRoZS={}5oj4R3y67=(k ztTDzI-MI4EGY`MT`KayT`=;yU2Wi|zA#Ux)7_mFdCXMU|=CQ?W-Yv#hbIPRGBPBlw zzqF#f)72Pz>+0?sDdz{LJ?rePSd6g)d#^-%TZhK~uzt_|TpIRv3*~9YdOx7Nf<6Iv z8dmp%SCPf9(u(07hLuD3zkoly8P@GM}4y8^Ct?}}Mbcfx;-0?IFzkb5Y zx#YxDAF%y+r=?T?utb^XODc_hAg@aJnyD9HlF6*GDl2`UJLGwE*G|BK3ocWhFYy8P zq06-&Z=my&Sift(^M(f=ZT_D=0Y>^lT{?Qn8?Nh~A+PuXSk&1&4p$F)L;8xU*l+?F zGh5v@Ddpe|E$I67V}Kc#$_hzHd4qURF&oz`VDYR|mTF(UK=|e5+KG9i!6??(U&v#mm_>i$zCth8`5I7sgg0C$sxU~H$CB& z0WC&`kBoJ8&m{}q@`P{&^$i^l0IORvW%>G*Cur!?V!RvC`gNMMZhY{B9rsU_>YfCQ zpR;Xp=X+21(6f_nv<0y8RY5Y=pFJUeSyikNKVY8gigU%sJz-dZR@qu;gmtIxbh2#s zgq7Yr>zE1*F~zMN=?|JbA!3V9?f{!1#u3N*J+s*ptczUTIkp*KCAC`w)}8ePpHx$` zO%L=j-VLsoqR)6jP2>7YYKi)oxc#~J0tY=o_{xAWNmw6yweicC=svW*C0~^;Iq6}y zu5#IDdV7LwukI6$gCuMreq2Y)-#!r{VeN&oMO~4eu7Fo}u=k2tn=W?t z;CfEEFi!|rerD_SUpm;NqiM;~U{9b~zrV(NK?ig3tB+H0L-UCj{;Isw8ADul`X=A&-@Sl|k_k{U3jac0d zEo@!wmScxgJs~b_$^!EW4lb*$dxZl#2f~;|)_%7cDEfRk0(P1Hzg^-te;=RW$iTte58V!F<{q z3?n);_6QQO+X)+Q@8|M?%S|JK(+)%|TZt#!iq{7)n=R*@#8t3jcBgeFdOlz&XP2@? zS{W<$H7&Vp?gLh&Uk^Mzl`x|%u9segpyTgekH{=l!lIWst$VY{2edD)+0DC65xe|r zR}@o<59qHQIREsbB33E;e2FB&FOl!rDpIP7n5D(MgxP5y@T}No+C8Fx!T1At?b|*e z8?o$6M~MQaT`4QH=d%yodaPO8lCFrgCZy*kEN1|=>2L@04JB+&`e$~k2m^SmxL)qX zl(3m2dV;Uj8IUw`cIW;{B~0-{R-}Lu15PbxZ6dIJM) z90=56^OnU(dnX5ywxj!VtQY7jki~4BW_yAUF+hKnU5YWF0sT`;IT|%& zF#U6uAN+SRAo}DxF{LzV?E1hSM%Y0#p5KspCs`UBHQi@WT+D!u7t?cfm^AjZ4%=UR zkpYU^9HnXw$zbcooLO=^(D72M_tk44@@E#KDU%iyUWX<*&q>rKXgbtaT z?UBV;9xZ1znrA@h*Al22m%-epmC9-CzCc@k*^bI5i&e?^dNi-{g?)#galN@BgAsJ2 zDz^yxf-LXBFY1?Nv6ch!2gld?LjN8|DN2hRCRjc8`pz<65G{GeC3sjKqbxPL*7B19 z&nV~DOP9%G;^CC1u}r=YqwIIIJ4yjN#NMy6_A3JjPKotZQHmI4-(=C%4-B~4^iIqw zObN?hQj)M_k^v3p^`H0rRKn)ve{x5m@oC?LoErF4Fhwz$u=V1;5KpK-*nL9<;}AZe zoulRp$=g{=R%sJ4dh@sGOJ=?httKVYbf1Vd4!k;h&E6N@at7VsjbzunB`Q1oePL$a z@y2p4RV>%Z{iJiSFKDeR))Vtl#m@2Cn!ir=h50#t+u2%GOnk}G4(38%Sm|PF7VE8w z@toe8YEt40&#Rv^F))a#us=8!dGk%QNkLIEKza3>J2oXlt3-&O&KhAR;j%GmM^?V{`KXIH1-YiGCX?Q7obcs^W|D;ENW>9)2=Sx zMZOhnVwSb2Fj0_vHT>5kw|?%ICm$}L2;CbnOCd^F=Ib?1M}_Qn^iIX)XAb{6hF9sN%u*O zsT%f^!$>#C(hmwm4WjJ6s9}Z1m3(W${ea&&;xt#RI>z$))mT-MAGA6?4y&71$5Id(|uA+(Zsa4`X?^eqVa-O z^jEoPVS=tV-tA~ceDHN67AB&FMGutgM?dg`>s23}eKNGL(ZhFd8u$3Y71yInST<;3 zmOMM8+xq>W&PGbcX`432&x>k>&wdcZTbG=)Mh8pb?hkio@`u6_Sf#U72Me-!+u|kQ z50%p%vpZ?Jn0)e;2oFhrFnaK7gU)Iaw)*&z3(8Xd0Q=tTQ7R>2{iS+>U&Z}lph)Fs z2}N&_pG)8U=&U5Dk1>+35dDe%FtR)^;KomVOm@k=!DsUR(A&KJlI2zd?A%HtU2Syz zl7}Ci&$=06-k+UW29eylbH07cxFOa%ajkQ|pg$0$IEYkm^al@)s@NbC zz*^r6uD>|r2hUbo)R-p$rqv|zaLIeL{?O;qTSfufc{Cu#dB_inqj_FklOba}zkDV= zf9eN}tMwD=d1Q>q`fW>myB}1(|0qs)O2$g4t0aPveqUaAW6`Eb!CKbrC^KpFgZ+2%IYPl= zzA4FShx!3$qP5K`K`KVXFt!B5*TYxz^psdpF;3?jA&Tb6e`2>+3gn;e zN9XehmQ<{G?-e2!^6!0(=zf}1?0$#FD>gCYAFp=OuS`*}>$*>B53KTok8wP&-g8hf z*F=YrL=;a!UuCljH&8J`+1AwOJ-+ZWL+bM|jf#oB-ZOOhjxY3iXU>!osaSdPq|nAo z2p0KwX%E6f=Aw|COBC##;?L{{I}pCzK9u8Y zPr;n+Z-n?n`GQU_^wn}xFfHzNUS)nL{5hxM=2>K{J#o^j)CA#2)_k5J;^&Q@vLczaeW65IeZ!MfGDa}px9b2uI{*7G zb+?mbEHw1=2@?fhi0d8TKHyHq#!>zdA)@)IbQM){kulbk^zK-FUoe^brT+aPVAEEj z8Xt9i!ML3@Odav{n%9Es--`HxlOOX#_aVSAvwc5Cmixl(b8jrfILTQ44KKs8UkqUK zZu6grBxAglmgmpDWx#yT0JpIL852bEnGpuit({VxzXEo-Ku_@GV+MTs?Du0|A*zp9 zjIK2`Gr;4A?#c8}R8MqZ*_0~jIsI&^;znO5mxZyWOFSW11QFs@?BF# z*yV{(-@`3FpkZ2PFD429)O)BTK8XH;f7jBs+hvP(jXMB1tGE@uSPzNH=iYCQas-C> zhrJ(8OZ@pgf#Yv2y>N_ctgP$UdSQ5YTI_XOup>n5wTpHBzz3I)oM)f5a)hqsJdrQj zc;T%RYplgO$HlRQtLBoQ&N@K6#!=6<6#}quZ`t=*Q{6$Lz$K!Uy>;1t@7?QmO7()K zBj`LUHwaG^hlb?B*&Pn(e!8nlUxn?cIL&J)Z{XJH6?3uJHi&^<$RL z)@Z=PbGlWfmcwcg%p425fbK7?yKLQY@;|GFc_&>HdaLiY~`c*DOzEpn>~yt-X> zzc9`bTn0Toa?4FXVXq&DPmv>xrj_cixnlxcA?rEQ@1gMnDz+J(GX=v3L-qU2oggSu z#7-pM6pB$g6by5M7CoDhZ`VwrbwuI1e54b|G9s5bljzVWe$?H*%n4Rs^FM51LWggu zzp!OJPQbBj&#uH{|9+Dk|IKpzH_h?iJjZ_%9skXA{5RF{-(1IklO6xfcKkQp@!x#M ze-j@6&3ODb*H|_D?ydN}PQEQUo4y+B)Wvbl9@Kg7w`?(2saNiSR zQ(|BY@-pvNAF=U($<8G0T@+)eJbdu2Pk{&IR{9@K3^#@y>djXyTRougBZtJA%gBGq z3uo7V@c^C9(Rkq(#xP^CLbKtW2XHxbKk{0J^f6p^-{L76FP*$4on!*t4dzB~W<5Z% zsb_*K-2}=ecSVf~ctX*}go0grP2ij4#naVZo)D85@*s7C3A|N&zj|AdCmbY=3+<^e zf#mC4kCy-P1mXX$t~Y^)`uqOJrzjFalXkmAxf4=60Isll1kG5dcQO8?`Qh`Js$Ju{e0i;+_T+tUNf(IJhm)b^$hD~Y2lfW z+qQ=Nc2Ds9FE@?cUFi*P%%u++e6)ZBnVQnnaW6=aWDfm3XaOhI*$r|&_X4Ydz1mLP zWY`dNJ;9va88!-I zE^n9-Z~JMnrMNQC4^9(tDRZy1J&ow_xpBP0@rq}b4JQOkn{ZM z)cT*6(0}HxW2BM~pp`J+3RqK?{>(`5h5p}Ro3;m8 zL4Sp;uxN=d6iK(m`@gb;YMfIGHu!=!$$q`fc}vLY*;5(RHfY2bGM@r?Lu zY|p`y9aXsGDyoH|DXl`{^^@vF&ne|_f%+i5LBGEgFFikEiMAVb-v zjIGTAa;BI1=G>vd_kCdp)lOIdXm1@Ws>5fd-??qyI4wZuNAL;3(=?Fn+n2f}(;U(c z-ZM5!!2aL4r%UjxIn0`wcs`)ffa}9=p*Qj7kni-ZPd}6f!80S~=A7n`iF5cNq^z0rHYCLd#_G7eA00CVe#F(a z!_)`bln*!!Z#9Knp4N$^ARnktHjY%OGKKfrDXl_oJ`ntol)*Y|3SK#vP1;g@AS9+E zhewM9$lId$XNV6B9j&#Mh$8{7(QPeNRUg>j*(f<>O#+i=#|+0uz31<=-uY$H6e?wS z!;@}#Ln5BvrpOegYaa(TBx8H6jVhDMGlh0R!~3iKz2WxTGfy`7n1bb_n`^pudqdhi z@y$alreIYcc|yI;8*~$xW!vqfh#QrNA^TT4x0fl-K;Qh2L$KsR^ z2;?{Wh{Tw{RNAw`3UeRGwB$rSjwX;<^&2(G`anxnRGBHK2{>KWRZ7ABNLp&Ffa8HN zC|!LVc)ryerubq_w?!F)OJZnKI;Rg9G<$i9DjCE6s7$fTGI+jd<4B!qfXA;Nt}D00 z>z~xS_CXI|IB?er(d|CaR4?tSRzn00R}M|SI3IxX_Lo?{7{R46?7t{JkiXe%Q1rYJ zuwr@Y(tMyM`~^?8q!DQCKdt(DuMY?n#;y?SH3VZkeno;0(4LA47cd)wP@HmK$3`De zyq2eWUDp5tGq(4#k@5T;XnieEo#6|>KGzez#Nz&Y4~W^5HDH61RFZs%CoKBWyiRbjCa`;oG&{z40wFGTg*YA`vn8}C zdM%E(@cjH&H9?#uSVoJ<6K1n$O(BOgVestZz z)xSps_kYULb?K27NS@69W;yH*d_5h3a+kFrL&(mHc-|c-EQ@j?YqemURIzb1)g5Yg z#IBgxrUja#0%z+(-J$XlIWC&21zop2DZ!EMFev!W=Cpz)bltqUhD>#bw4tHslj@of z4jh_$t=&Op(3p59Toas?XC?A1@%-w?vvQu`{RXukDi?4EulKcUxY9Kt$MJf?=Q%eV zi`TAEy{QQb*Vew=@X!sKVxRU~ifF;}*8+0vFWf+WE3@X*q!u`oL<;V`<_6{F%fk;y zYJ*Diu^Yw}ZtzQf-)?m-9q5{pufG)R2JGJ*C=IW*L030o&UdpLXp9*XL-}-}yy$V@ zZ`_}vXtPS?gDxBz*)f@U-VMUcX1v{Q>OoxCY1Nu@ZXlt*Z?|zW*2nW!#M|j^KS$1~QJXRJNXSeu@)c0FTl`*mwlETcSJAz{-_OG1eO zDAnFvqb}wOMGs$J$bGHZL$Ua^^WkMyHitB?(f>QjJ0hUYu_@~RAsDX$XM%;u@)j@ ztwhFJii|bp7;Dfm)~I8wVaHhGj{wumlBls+_sLWG6ZLRyyGl69bxpPuz)4eS|z;aJicsx)CRtD}$ zH?;PFHP{bEw`l=CU$9K4@_%#X`M&Ds!{_3toJ4(Irxq-%Wy;sDzy7c`T=B9fj#lu2 zxU8{veFt>Fd!p|_47NVfCO1Q_4BVYri%@y<@hJ3Sfi{A9cnl&h=r+~y#8 zsCkp!BDL58GW4{0xQFGTO_Vvb?7lfPD*P%*BCi7-)@$#r=kF?8eV|RhToyKMNER5X zHizMKg2}-qS!gax`|P;I9B7{u^q=jOg`A|0oIx9NpaiibR4dBC^wId?n&swDoi10{ z%pnhYjZtNK_sw8Sbjd~13I#Bk6TBSaX9g-S!d}dgl^`kCf!Sn+1QZv}E1Cn!Aa|zO zhy5oBGG0m166IB3Cs*6gTSFvx)X44Vc25~hOn)hv+{JP1?}(wVEh>-`C`$d6O#-(T zCZo|ks!*0(VqhLYg7VL|vp3V!AWL~nc&{RfE`y=H$Z3+`rb&YT2 zRiSE(l;L&76gq31r}uHF0%u<~`ADQGeCH9BK31#(4u-#!W;U9Fj>UL-{~8tWzbF1l z72CC#>8}#-Q3lS9N9CQT{>26~W!U_2q&Y^=6o#{F@6@y?gF}SM3lW+rBvG5ye+jC< zs__x?ZSkf+xykLw8lenn`-bF7?wUgN(hTOi-AXXIB;&BkQW9_#HTUTKRs?`wN)oF` zP~;_gk0(SC>J;qX`q^UsPSnx3L=|A0=u`GHfh6e6Bbe+rmIs?kLi?H=5`1p!Ja@5K z4z}+uF$gLm!EhVfnv+_x(6e&An$iFX#5V9BGWM4S*E>~56BW&%^V|;>k)={FcW6jX z59cjK8^={P1WUrI7uhYhzvA|^ssoklCBO~i5@vINhQu!t_tt_+NV32yC38?oD1gEJ6^mX1!Xz7}nQ6Hdv_4x4mpP*4NR-!SDV;5X|59v$z7c7slu0YYKvY zwENAA1Ao6`#`q2z<2!1M@31kxINt%a(eKaTJowt>z?(ua#0&AmEA8<7 z(pz>e>z0BROwVOKynbZb`uEOKP^h4IcLUph{n_ux$vW_3yrsFP%|-&)Z>D{gZTS0M zCoay5(r<6b!u#Bg91~6(IzQF?e<|@k!2Er1#(t%eES=xuO=s+_S4qR>DX*uOs;q&q zvGz{K11U(8%{biq^6&ZPUmqh77X2f;h2Qd%Szefr%#eXlKhvA`c7`|{@~g^%YWt7u zbpFVW_5bm1!e77rXLvW@|L@&_g?9_!qTTW6aRF4%^TJ##+Zw80Wy>!i%OIDEd^*k}FffWtE-g~ua9@oN2S~?2?Pg*SR9PS^dLK=O~ zj;h6$?)-q)Cs;b$0viYVp5lJ9&JM5NoB1@1wi3y@NFOu`!uUz%(_K!yC}m&6{uldk z|5vBSe1>_D%37CGe7A7>8ohUGRe90yvgN1UUt!(kOgk0pwhFbdEp{7>hIfca=+M(;i?O2XQ*35OPPq@prZ?`_e*I3&*vN_6 z*)jIKSG?xo1QER!|77~s5(tJKi_6r$5#5|=-kb@{4t($>2g(n z*T|}7^jIwb<0p|_yFW0YesAr30$4)Q;O}o+%UBS(i`!e>9per-nKHqSQta>C(i64> zkQ!ClFVBPOVnwO9~F#*zE4;Q8N1-K_UpjwY(Ud@Y{G?jzH`##f>Yx%C&P?_=yUGF{8fhstL< zHQ&t5w>v+oddY=!f_l0#rL6$G1OpR!SE9PF+2p+&vHqUXviU&8`t~iG?A7?UzGD+S zLikgw&yA+np5&;5^xXKiA851(Zmsw(B^`ORayR$4E2k*`Q>!mFExSIFSrPppm$)bv zP+(MP!>yX_ipcY*c*^Za3Vp0h-3U#POu_riuJ3)Mh-5q+L~K`6V2|fsE82Zkw0JKk zj6AjnH!ZpAzt5=A*DjZdk6ijg9Wkl9obvCohx=9OYaQ-rq4!Kk^6iX0ka>^Zw7RE> z4%!ZxHmsol->2DIJEpXever>ER$U6XdGsyTRMtYD@p=(v6c|H>p*nh6D6DQ|SkDc& z|JkelP|-Ev^%S^#F@0@HnKoJ*T_DtvL4m+RL5oyzEwtX-LBuQ%S(!O0p;s1s)*&2ASYuIh`fD>Cc#C6oxy7Qwhsj`0q)4TQ09bX4cYzv;YkYQ4iZ+j|^b(!DX7&v~B47~UDYKvbog%1JYjhXpm zxR_qsVYAN^dS2ZaC=Me7Yp2&!zmUJ{<}%jRWvsi)SeKWvZZBh9U&gwLjCBn<|Z zWn`?|$XM5rvF;;dT}Z~dks8ac7WJGk2eRlt^9RHc>wjX6w3vd#9%5IEveEVy2y9#(L!0WDc+nzzZ9)o@`*3o3Fi_BOz zneiSY#yXmebv7C6a5C2EWLzJs@KM5_do+YDoTxLLG5u?AC$+|e+nfLQo}Y!jsqLeL zs;g#D{^j}XlVv1&-w5|m2exe_pz)0`it+v`#`~?Do@XU4!ntAotIk?u zoWr-r-?v*{kJnMPUw_oW3~~<+`mVsadFB);%bun2$)!HfBJpvT^|S=s8$Na; zv(^Vj)cw4!?3Kj+vd*`-!3R!Vc+Lmh>@B$)4ogDHk74~6Z+&3c{*A@fHxgho za_mNLw-2EG6CzW^;_&p?>a>PNOyB4=JFn&9aQGHimqU&Zq?oQKF$|OVi(Y-w?JL4L zzQEO_5#ZD#2CAcKQzr|3fpUYZ$BQ6J=lSpGrN72*2|<*F@5ke5zA$kzvS65h4T!l# z%xOmZ0xiI;_Kt!u&}?%(EM0vesq}HMmy{r69fr}mjlQ5#W@Y?DjUQgk?bu3l_x(?% zMp6WBPQ3oiVwzqlKlJQf`g7$h=1;`ZU$j&~;8-Lmm$L%5%h(N>ZV?8GTudDY-ha|% zU6mAPQE18B-aFal1B9gt4Ie9U`%>psy_bF9vsDDaqemF@@cv`>`T*71qGWNdAnfbC zxn_8~51i~(o33vX0=JW&F8n&`1LWP9|J-6gXtg3zu)I2dFn7E=B?ikjh?r|(dGX&l zuF&Pb7V-NayV>88}eRb=^$)mVE zaHD#0#@fH+vox-q!@9^D_%FYEHG4`N7Hp(wEMro(K@27Y4e#&U;0>{T<%UFYaag~ObB8OMC!a_rcDL!F zKU4+Td}Iizf=0<24%S8d#Y_51E2=c++v2oKz~ z!ja|!&yQrKza=Wcjp$6V(#<}=wf4xQSh)hQejnD)#N)-XHmcW!E5IyXf20!2XOD-t zeWe1}&@74{Vf~p1*cuj@ArA)HA9(7geIR1Hhxm=Sbs#x2te>~s7phAfN3YMxfq^1# z_!PD`=#y}`$0G-6QUi-tVtcB-)8DNcg#QTYTV?p3hgru}*g}w?t@Vowy^M>7uP&1-7B_N5%zuG7G zx()BI=-``O%$Q#qOFH;ID?$-9B5`8I8;Eags@OY~Am!Py8|mM@A%#PtUeH?wr20HN zoq2p9P-c1fGgcL7xqUumn%@UDeGKsP&Q*n>#L%WrMIX9PCHH$$GMsS#&jEgH7$?sO zoL!WF?IDko{fo95c-U$kxtZ<*8C^rs`Pby9;T1iHDXRh#}_VmS#lR; z%fP0LXN5f6zR;3x(w8YL2MszOcn;Y4f}40TlNHua#{JCyoRC0R%r<{srf2!H?O5Ut zgtQ;46K{+X$9p{~CHK8?{&qdl@rvL-F}~5C^K#Aq=}$W-D|-6r8ff??uUEk34)9js z9ApZ>RJ9ecm*fsdo(r67dBhL-TrqV+mbhJ+Lwn}VDtI4pT2(m89mZYS(@93FVTUn? z=CT9sAm-DaKJ|hhLdLE>N5|bEDO*>Cyqh1qSc|gKZn(olzLhcGnpIFy$<-Cq?+(Zj zHtgNF686SDE94$@2d*K5j_GS$VCH%~!MGRGXR_^=c_#k zH`mAxV{Fxxw#aEI+zem(^WrpKkK?3!nJ6>ZWy%T!%WEv?l7D+Rpq&R1te)|9jTPS;|E$MYq(c}Z`}6YY<9e# z;MJUSL_P=$32n+<=?+E3=h+U(@dG3O%Z^?)iH!9Ixsa|dNEg#?up%ce74 zuqY?Ya?Pu0V%cebwxf|=kfi6}#~c5J*m6Qvm_^$Q2^8yYORt(Rpql zeVXWW?L6!9FP@N8-g#Z=^B7U6!;_-ihVeBvIeX$b(d+9~-#0~`bbZSd<_#B*#_jCM z-w2^!i2??>16w!X_TTy$MjQlaFzvSR$n^xItwQW7oFTsQy`Jy@&xfL#^)>T<5g#W) z$b~*n7EHeY_k3a35|UoQ&tf<`Y% zvr(xPAYB`Ig7c6UMEHNINfh7#u4`OfS{1l|N6X|R7A^=rb+^0hD&~)_vU_D9HzX~Z zY&Up}`EhmF(164ZXKfhdh&6wP4ya38Y_FSW|KFJ z#rwB z=HvCY$ckQlwi0|SwT@(-@y0pp+SljbasQ=1^WpFOBbRyKu)v!IUNG)$(Vu6f^M8zO zjC^6_8zWyC`OYX8>DbVwpfWEQHg8YQ&;6Uvn1ti_Q~uBW(0{(eT)1Cx;l9L$`frQ= z#QpPsjNgAU)=XrqsmNG!k+HTMW9>P{yBZnqa%8;gk?}4_#=9aJ?~?rMu1SWwDF3>v zlHvPY#_xL>`GHxKLm({rM;{{O-H)1;YQT%*(^Jl4wlI7@yFg?Q5uU8sB9WX*{fDoz zfA~r%;=Y)2+yLzF2--=JsNiPR;3c=!02(v*K5|}7#rw)G(3vp=r%1w^>M;srthl)M z2Ztf>hX*K!H&7t*$BK7XwT+>GR!8Uju!Sg#sTfUuF}>EkU%;AlH^ zhmV5-Nmj|@O3RGEyrLuLcC$U5Hjh#jf39$QsAf48^;m)k;tlb`EsFL)tg;qeu4M$Q zmyN&ly|e>boygt~+y*ca@y0b0>_9xheC^NL0tN({Hsf)Fa-_H}A@P&9WtJJCTC zW-}Z1D|y)g@^o-dIH(Cx)g=bx0XuMeS`;O_LKDu3nsGJS+C%5EJU!h5noxhK#K1Dg z9$0J7RJm@^g6$&(i>pR4=B9NlzODn^%kq9V>QaDmTKvx2AYFJCS&+Zhf&y#K{|Gj> z)&=8c=j&?z6v%zn+G+Y$8ML&YoJhh;- zF>&>CO)5MT6)UwCE+8CrzhkM2FF3gsCrOVQ zd{nivG!oNSLVEr5ohp36_|Q%=d^Z|Od@i5{-GQ8NYA?nT)2*fQYGD3Z{WmJccz@%M zpkOr!Y3UoMmXjf3@^&`QVKvyCKXlfs@$Xv5f7ME6sHMzUYnibYGh?l0##+vd^2aKX zhxJJ1pL;$2)ID0L_qssaLLH?=f1ZQ?bFMJdU}CJP%vf`ou_iNP&1S}$&Wtsm8EZl_ z){JK4BO{*~)DuQMV$?InTDXk0av5vsGS=E#4szzZ=uX(oUco4Q=J zQUSu|nM8~{!y!o@ffR5I%GDJ{1UopRZOFn*v1kYK8YJL-n{xv)(9fS5lJWc&-E!9P;s# z*(4bDJ<+=PC#KKr=PRBe5)@IIq%!;Dp-z6KO8ER8lcdx-(du;oVfBVF)@Fd*d$qI) z>*#xjspTx%!E!KRr|ED%z>LnD6f2ewzZGzM*ID_Kd~y(S@|yqJ4CmsK^KxHD@mR~FNQ2PrLP?n{ zL|}FI@bgpk2fJ$pi;p*(|MPo5<3DnrzcWvB-X*PwE3<3x1L*m2MEbZX6dJ9NxLoK5 zDZ7WxnyoU08r_<^KeUeL=A z#xTge>X|@_A6&Vy_v-m0#&D&3`95u5KNudbdmrg+42kk{N2aCyAnEl;bDpg+Y}-8{ zO#49tx2+`&iPr%tUI?vz{*ned=efPB#sG%LzSO6_pn-p0z%fr7V^}xcHuSQB2HU*c zZ;Ghk_1?OWnD^4a;I7xxo-Y7zlyr6;_N77QXXjJ2&j2vXY(q$*0Z+8?7qz1R`8X&k z+du==jmyKWB8f0?NrF1<{2$Nw7ZY{2o6~^(K~Ev~Mk2(rukv=Zqd|kD^ubrM0EaEb zrnvQJP`>okNfA4M1)g8bkdu8~3Xr{IFx=6a20a@$Jap;+(DGC@6Y`?Lo)m{YZNkPN zE#-H@I)MgW%TJx0_BDpsm^CwZj?$pwN!X^itHvPNZ>Ap8O@qnvQDvpijG^hmyMU^% zG*J5Lyf*HRF)aPwRw%dH53(xst}Q!b0#~Zvz1*qb2a}b3<8MPvV08PM?OQzkU{i(z z^M`jPU?b-zSQ_RBA(x`cN(8Z8y%y?2yZk_H(U-n;b*3OL-BdzK@B?Z7Qzr$;B*-}$ z_`T(TA4EJ5|MWG71Pv}8ryiX11E2b+vZ!jT*D_5d#y9-H?emEF$2n7A)SrZ1S5!`| z^oPaoIyL8J%z$;sOr5mOADg%MCnHsJx=s$+Y73>EG6V4n9yGehABK0c>`P8G1C57| z_+PyCgIM16Y5~7VAU3z&`N$5IZjW79pd4(B#zcUl1Sm_68mkSo(tuceC zHjUz>T0hwFaFo#NZw}%i8z-l4`q6D>Or|L?^OYZ*`WUv!(byc?7cXHK`{f528uo9= ztIYq}Z)%0j`Qd{!xY^Hk+70V7O}#fdtAGa8=eeIA?)bD$Z2C#%VwDUH=p2nG}Z{;+6t~Zu&VSP*yOuZUcL<0ftSW{&WfLFRJ zP8VYOikp9Mmn6bp^kSD#un_n9-);JG!POoqZ0p{8g(Y zZ`^xRcbl6HpDj{3`pnEgQ3EBO95mWGZi9a958Zg=x(3qv&0070%?9ndB-_NdS__#s z>)7>1*q~1NB7J?b7LvH`S8>e12JOJ@t5<2E7t)IpCEi#g;B~&?>#2owh2#g;J+MZJ z%%sZ4XSC4v>5Yp_Zd;=Zf!&)oXKEvXElC%wFIl6(?Ypl0n9@cg%LFVlcUvQ^KCU=N zf(|NKvV$kX(Hd=%Z9V?!u{M1#`IFxj{g&vUPQ@puGBEz0B=S)-QwNb{PF{YiWsO=L zQzrJv>7u^M)P2h+)~KiCbU*QiE=p?Is{btzk4K+e6i@0Qy?shC+)u2L*lr6;%K&}k zc3H;I#>j@wy9-%MPOv|)L1wJ#C)|Vdk;cW3_G0$7Xp|$2wNg|cttVFB6?|laHt7}V zAIQ^1zr^GR;$PYzw^fO2(((F%X125T>uph0iknB`T^;mfpi;J$$qwn%esT$7)j{38 zTlKv**&$Ua1;1@0+Gs&`Od<+?h^U3u{><_=ld?xUw}^by=Fmhby}iuK#qE($*IS`I zyEV`y<85wbuk27_^w$i7Vs(V-QuiHQVn>(t#WhY>t{t{Tc2)kWKA!4GOkwd{{AydI zv?p|<{g66hls{I9!iC!5ui#e9CR;NoFSt^(NI?#IeyY~KK4b;Vs+)BP1oi)^9j+0e z=|CX-*H&a{TObl>1?9%quB5oD(#In0otEdyRG{hg?l3!5D^Lluzx3Kq1zw$r%TDFD zg71>`cH4)QfsEVt@mYbJ-6h@;Rb}Yc-}Zwy+Y)MS9&)aCR|3tn}RiWFsuOFEc4Xe)^II zpYs+#^xqn$tt=0$$B1&Ro)&OT^uBLt`#SjL%RcQiVGdkl#}zm);&^00tYlHAIdJ#A zdo`W44p=#y`j%cc2h|Jf(waHe0hg(y@%Bt}2pNBryJ1ug)`SbQoJ}-`qI*2XDmZ@O zdhyM|j%W^W|3XZ{mid~LO8gtJA6nq;jjXurM>otMPBHEbo4Fip(%;XvKi>>m4zq3$ z+p-S$x4K3t7nwmd|I~HD0eOgU32!t$Xa@aoiI-bU0n*}?_%*lU_I%b2C{GaW z#zHSl2|As_8`;*I!D#M-SP^sb*Uhbfiv(odUJ z!Oj1yg8C(Mp!|y3>g=ovUI|kbpOh>hEAh^udVN($3s36&?P~!^0Z;o=$T+{i@**Tz zVE^hZqy9w&(yLO>G+eiUCjY1XL&B<%P`s)mW;GcUj&JF$Sf>UC2bB2750jz9%H`v; z0ySXNSFFM(aXj{q-t*d-2TV|m>3c6ZuEg)$YYkP!ed8y1;UB%X`J?xn8h3~JZa0QH z;d|>WJPl#==LAPLs|iGTJX(~lW(YZTm8~*h0!i@B#%-4&xJla(9Kj{IrgU0cvMw>HtCK)AQ7Gy0OCm^`|9tiS zGmgVswL?l&h;aGoje$-Nb5N8id~jdU5E?eV%)L*x0H^3{SKQYag5vW1hqa-RXv9v{JWTRkPX^wfL(vDT^?BI4$8a=e>31pLKyE)lzdw#1h&!eUUhOLKnceI(V0T8xQM1$o{E{ zp&Fc*8_am;7wJF`#)@s0@cnl3n|>o5cxAhvjr$?a)7M^jews-K_9P0k@MFCxU&C|O znxGAvm>zx@O0outS-C0gom%h||Cow*PUVO7d!-l7VuhE2mjt_4f*#s zocR4){VzRa)Kf-1X4G>=J7BaEMmu7(Ge$dPv{ObqX0&rgKfvfG82w1yY=XL!qZwS* z7fP~gRsrwTL8GZSj~jDZKev9RDvXD_MpcUAGe>B~dw(3JCnGL9eV7DUT>EyL^s0iG zs6m=bJ_#rv?;4hUI=H@>}r^S(Y6rB#+#-yV2yKRSxz z`G@YQcc1D2O~$sMGtUI#RQByI;nRiD*QM#i7!wFQedmy8o-Xv0x>AOyCh*+3ymWWB zE}+M%wJ$|+{9mgbGIc}`vQ%%ye12pM)n4VL0W^JZBePGR%`k?`x3~0aa~Z&bucD}5 zcs|Q%019Vx#(M*eA!J8}N74xcNcoVUzHfsuY&vRGEd12~82uG?VMW;EJ(%}3O@3&j3ft>GHUZ}dDB^t0 z+_^hfrrHQ|6tb(Xu7S@9ZM&dj*9I0yGmP!?GHEBYuUN)#n<^RIzwK+jO4A9+ zC*D6_YiNmne7N3QWbA~>@2!~R2(v;Sa?J`uhEAyLVpHT&K5JBz%XIaitP^@8zBqB) zoE74#v8&bObV3_GZQP~)(Gqbg6Y|c?IwHa%f!3FamT0(XcGmf=BdUs=T6Rv!5@m>` z5pUjiL;~B6z~ZlDM^wE-I9RXM7D;8cT4pYGLjIHAq)(34TFf4q2B_N_X*(e`ipEw$Q3`q`B{xmbcS3!~VzU*C_EI3C+(ry}RI-jV>$SHfU^(YmI@5*)YvdYyXjH_%<4c?9eJaW$H|m&~P|=+`J!YC2R1~;HkZh?e6;aWO z)Aj=|gs3Pevq)Xx1{HM*2c~%QQIYQ%yPr%0R22OkOI(bKuKidlu;vSOtbu0zk`Z~rb7cRuXI41hnc;eHc`=XvF1r5F$dHfu~Vgw-vK>7(7Iwy z1oP!?&yrm~s3<|3`}61$2gEhQqI-qpfOf>z&0XMhK--Tn=gXxzAhzcsuVYp_Am63W z^Vx4WAd6GHoaLC0x?0gs9v}x)SLe&_@SggYJWQRUbJrbnM*$79v${Y2!yS#eKjv}I zOmYWDm^O08jrRRIbu!is zH97n!-Hq3e3-vx6rsjrzoUx1lHA_WN27xJE>~84h`ZHmkYaP(N%TIrQHF87mIBQ2- zg&fef^7TRI)7+4ge$|;Lc>iK|?FGu7{@2d7USh4Jnmao1s3KJW^J|SOPbW3f9eI`8 z{Y=OFWZch>&Zj%u&iJ5jN_s47HaMV+4x`=k*iWsGX&zrwFManr*9V;`<~`i(Raxm9_ip}4xl2R;rcq4^Zg#V&8u zaWbaQFWLd|e592Qa{8dWgRLtr>~%oF*A=%8uJ=JrYJI!UB{}>>PruTG`yySw$Y@yP zA|+x7iqUg z6ugYV`l4$>E0yu3(^GtI=Q_XbzDVm}Yr0VQ-|ID=&0*`@7Ko0uX>AZKbEM4*~Q&OQwv z4?yY9U#Wh(js0Kuk%+Tz0uZl&L)XLSj;K)D%AQX$5Uuk3@jM&*r=Q37>#np5MADji z`OLsQbpwf=$VxXs?Z=dU{cMeo8nc6og^bGf239WR`7 zjOX40Ug&TBE@VnJF&H)Ubc-n{Iiq+Ls|&+^!D#k_i0BxrGZHP@AEzxFjHVi;p3Dk4 zqy4LYC+w08Mq(N1X^jHTNQCR|>DtA?NM-t~iN{iB6#07kru|$HA{>c1&duVCR@`I{ zIy@7E(nfcU9>my-_24$c_8`QJq}!xw;BzGcR77dWBr)sY8}2nHaH6+8xAw)*Lan09U} zXG-=*2H0M%zjH*{Jdd50)c7OT#VkEzj~$U!xLQRe+ZB67kot*(FZTZj*GPchR+ZKO1Rvv&dwELKJu>HE< z6<$?-D*%;n^Pel(h1ZW+taRZ%w%5$y=20C-#Ok)=1ycj&S9|vxC88rLD*ky%`6R|g z14Tr~|JGv<4omuB4@wq!l|yN{)h|rKPuvg8qTw~94Gms37%TN#-)zP((vKTb9;Z}lu$SK;S=_!ZdV=R z%>7Y_%a2mGeg||vq~)}rus_;#C-(eAEexMKto)SO4a-PP(Ioy2EWYW0 zMD8oKsj2uO;2*b}u5v&F$+qd&IQ-DCTlV0(Qx1sv!2ZE!Z)hkX*KmXHQ3s@#{nMqr zgNFFM)K`ihcR-ytU4sX&(ooXCs~G8g2b36YP^XvjU;mPK6zP0#8X8_YWLRE^{e4GB zO@k^8U5*&>SYP0PbiCy6-DIJmN@YFUqAa|>voF5}F2;AfbW*v3DmiJyUr+S4cke{;%XVHcXFs)BBSQ*RZvo^G1@b z-NF3_asMldI@~_qXyUW-w$5ER?#M1@@8|GFHZ>`uC(`kGS$0j>@0J&G6Ol7;*n`j8 z2YesH6THxOVY!CJFb9-BByGgw=tUntXctIMHt<4}N#$w(90wHD&Zj83(hIRCPj0L| z?|=d?wJFthdeZkd;jCY2w8s+-54`$TcM;3e{2uo$MNd?@H(i`q?|=}Ws&o5G4`iTN zu(+1xrFsHD&0)~yByL^-1vo7U%!xR{Oin>IS2 zkR={lICr_z`}a?hX%snk#M*CR?9}amHl>&`#j>~~9rtXn8&@3=f2_vEUKMx5JD%CN zvDg8zUoo;&pt%3XuW;`NZ;IU!C1PZtXCIy~a%KoW zpx}!65p!HW?}Iz)X^SY>E8~Fn$R@0;t9M8J%lXgQsNng`77-hQ-BHv74yP3wc)#6u zq6obIwI{Hv_O`XSA$A;JIO#bcp+Jd|6Z_pzyIAv9Nn5;M&Ik8dH@P8=wat@P0x*9! z%`~O*xgoD*@lo5?WBFWsO6jQ|$_xe%K!RIltsqIH9bQF|OA`98lBtoe>f~PH4)fvvuD#2Q)lL7W*yl zL?2H!w=w*L^Q@3!UQRJGmY;^c0s;Hkh55e!++boH?}$`}MP844IG`LyZ#y3Brxxa? zwDp*&bK?HX;-}aXG5@IEho|eX9SEfDZNc%Gmkv+He!OlAjxT3J9nfq#bH1CV1D!vV z{8+71Z>eaOHGb+X19lb;Ei%OYpSy1f zc}>B2mz%>mJgx-w-1U1(LH(C~*^e)yqVLNcwco#`AbzbX8Nw$#?ugNW@No)ic=?s} zLl1>cKYCzcyt0mhCfFRm5zgcPpJBMcw3LEUQkVyecT-Tv46sBT!u$9g-+jjw=dXnL zD4#eAlD$tZ@{y#V%2DNQ>A@7Fk)RlBvz&r>14udFtthBPNWgM^r#(uzu2mAMi{}s3 ztWiv|M+y~EM+N0DzSH!@d5Jwrd8_>1O$yV=J?tq|Ylkvc%BhgKG4AT!Gw5N50^he6 z-C(AmjFhQmnV)Tu%1+YQ<{5h|H`T0%S+*#}OT4Y?i9L!fiHjDn!T2(-OHGYE>c5n_ zulubHT6iu)+3D(7m~Vrsca4Y}-?2xX6Isuo-3GC`jC#HLXpcrax9aEH*`lV9#3i?v z;r)H|m}uH-i)3Z{Vh*jw{HR!^uq-jBS-Px_ zyUY&d9E-7jERN|teqXU?u{|0qOYxFar=W&jbv{4tiby;+H0+phV`;r=Yi0Ad(?C-H%|Wq1$Ej~ z-`)BgxAUYeAr@hJWcrJ{cqquN*e?D^Eyhld%ENT9d@0#pr&=lKvOpkfDh>1X>rw$- ztT#Ej?=EF*!16oDw1XY%ea~6xn*=AkuHBDPztuRuBue*==~kyO-h!#B+g`?awz8%(yAQISm<@8J`&jwnTor&FH;`?oF*Ct-OX*1)mvZa`rA9waZbXu0!MuuXT~IQ*P7#aRVtA6@Dt3Zo4)M7 zsE$bX@cL4LOnj~ehY`{OaJ;7x{p1(+oBcT66xr^GB-^(bY^lNf){D|p#`!687PAl% zbwnD{&6Dr;JJS92LcA;cO0vhE zoQ~_e4`O~*@RHRPoKUsBaDm`DAT`}>|l%0wy9mE@)oSab0cjGraVH6Z)zjv=U zK9~ACPaMBngzf23chQG%C-mK-`fho%9X)@EXxFhz>bFHxg+(7Za-0zB%!4**Pg^8r zWt}5^1+RBHE_%C=Eg~;v>6z(pLb6+4eqXlB2DuGe#IL`K={;IZjjXap8O4Sr$4i`0 z%B)GL(?cti|3K^gz6>W+C-Y=&_FYTV^5RvEZjuus>+F9QIYLGT`h7PlHski2m9ku? zEf9O@z(8o26Dqp1yS+Np9I?LHHL9}B36X}d{1g7lHH~mSdykx+m&jq(zCn>v#n$q*9x|3V=#l=lfid?fon+2wSe^X?%sxgw> z5LQ2@>x4%8*0f%g0VE{XT&b-9mq2L0>Vz_08iqdQH$t-6!Gupz^El>lW10ayzsjg& z(=s+TM4R?FhjaYKa?bPmlySijvF~vn-#+>uj`--7cBT{SXDWBZmy<+#ekDe(&)4h& z#P#{BiRv>)G(2|b^ynjFIxqO9NAyc$B)fdb@X9f)N76q{ z+s_))^%1Fri?mJxD(Z|Vcoyr3`hVzuP!a@`RWu;h6^iGVBtz0~Bh=~NX}Q=Hk9Xm( zC~h&N=Tn`$oZY`h4CwX-8XfYBck3f^P*_qA)_e4A_SSMaJybWhG>?6?BTCxu_32QY zE_zkU%ejmP=WkuS98n*&(HPEm_N{P46Po-L!O>du{3(KSdjDQsP1N+xtxIFZ0c8-I zKc88wffTZscF2Cf@seKi=V$NKP=-;JjL;yCFOR3MeX&=S9uJEJ{*WY=sLfMQqF`aL$l?NQ>x^(WV%VeD_)6tTa&S5s`BEsI89yE$B2;0x@(*JsyniT(F$5z{(#aWpGiunP6y^Wb3X$ECV! zk?hf;OWf~q|CJ?Ug%!dmqcJz`=ol3dY`G65Ckxa2gI2*s?l?hI^7z%aitALAv@iI@ zH@VeF=R+oO;uh{7uMka?T7_t5?BbiAVBD$9Gvv*UW+_`?XQA0t`_G%l(v+G}`2)u3#m%ux=U|XimrDmpuAd^IZgl%*B({<1oKQ29kq+ zh(WBY`q5nns7S0rJ$i^;0wyNfE3aWcD7HuM!iSxbP<~5GrMnQ1FA)#CRxAbVx3#K+ zYBA;>JL)+q1H(B3Vo%HQd@Qlo<{rq>*GsZ`?Y^yl9qei9exsd;`?odbQDYTBY=3L| z%r4x&FWcx1C;{1Y@BIh4I8Od0xQ^?MGQFK}PVm5krK;dIC=wL49mfX=qt2(*)nM#` z)XB-1d3i7y1)HeT_mgq_N3aS_gHC^vDa+I8NKN2>vP)^lp8591AJ$`<^!3C}{RnP4 zq6MtG(u)f#=i`rGOb08pf%lX0w9_e!W17_i`gG{~r{q+Ps?6xp@x<4nsJYepbbiyy zN1ZRNHh|Hl?%=j8y#7xnqixcLAS57XK*D(eA-AB9kJSiLwhEGK<8ge`#}<3%yAg;j ziI3V8f&0H8y^jAyggq1O3Njn0C?{!E#~vEMcN`z(xMBWzxkgVV1BmV+g!>ySE9bNXH_)v4PhFW?A^< zAKByaaZ4KVa!nxXKyY)r6>hIvadhB|2@q0)&3*mwdM8rPh&q~r+ld(0^ILE{^`SI< zN3SU~n29ew<&X1>6?Z@&Fw zL}wxyBo7x|8oG<)QCmr4nQvrplPt&m%gI2Om}tA}T|DB1vNr@|BeL<%b3X&zvZWka98oTn`PPLT~{ zm42v6IY&XU3ZFTSaoa-EigCLEL_xGe!Ynqvwm`W$YZBUp^P9T3Z2j-HAm)E2Z2SoY ziJd$$5@TxzmCI`V5*o<+Wr=fg(*0Y`h{4vg&_ogxZ zd$}A~e^4Oe>(adYKXIOTd~LwA4wa763^I#Ogkel$(>5`}`7BrNz?zLzScvamrJhg8 z3ZMeo*SaEPBaXw`oL0$6Q|b0XX7T;VG(~}GrM}$fOI)}#>dQxKasZg>yZOPUqD(Xp&6dVztLQA)WvH9SCC93 zf$ra;E*SL~{IZU)kPq=k%LTjZxW0wKCCaX$(BD<0pl4JRC-zi!s#k^&VOngmm$_LY!ynS`;&Dx`3qZ zkJ8mQuzu!6p7^lAg+AZZ&Vz4U%W?Zdi(91+aQ~pws!gw5=>8yK^s~*Rv@6sV+x?Wl zdDb2tyFsA_P6Z>7DYn%SW+%cw%(SYu({;c1$08AkN<{EsBeW+`-`2 zvl~5|Fu$q0R_t5hLH9o?Bo0kUJrB?*{h&IXh37l;=|bT_4+uPB_`df#_P5KJhOFB> z==^jl@K4oT;z`F|tidueR-Ul%JfO$6;=}56PiWU4&1$X0{*`}V(ak%au<$$~K z!MG=|CZrc{Ou_W&_=a(5djZ$FKBjkjv3x7?zg-OWqWjAXao+I9IbP6zbU>_p4+Rkd zuP20E^MdyEBLj6gc)z}>?7|b1h z{k%WUs;>a;QfSOPcXLf1wz-W7`7_x4{ttxwP~L(eS9oS zo$qS&AMW2QV!rdcFP(nMB4XU-H8l7xQLySzH3bdxP^=DU(dhCo(%f3P$el*#Z~L-1 z>D)LPoj=1ItDa33{D=2!C>0I4P6Go+^`nuQ6lC+xWsgBW4HlliwJ;N}Egqpk)X{+= zzRj3_dHVULFKF(T$l|=W(hnr-bANQyV0%2c?qI!=A6GzCz9sS^YAW72c8kUcC?^o^} zesq4fkl!@F$(^q&m)U548SCqA$G5NU{D*Di{Z7n`_yLWQJ;-?p&)2=#{`+!&x;|>8 z9m=Xv_NU7qRb7}iHutC7e|00;|BL8Pk0)LcLXK5B`2!b@S1f;E{z{~NlZ^3)a*+Tz z7giiU4wf4X<@v*`a+F>PKNV@TAOE1=k z!xaEM)?VYY+Ef%6T`IasBLKt{`*ss`aeQ~hMp4i*fIdFSRqV1|Pyju?!Iqd((MKo%vb@8Rp7~IbtcKsyw$}kbW92^=5RBt3p;g}7 zz6QX;^RW!EODj9q1cFVql<3~W*xw(S@HSQp1cS$~V)#ojz0WqU_~{%7vFB4l#4b=# zf{;V&A^Gmlk&3jCPpvR`MZlx|huMo|3p6Y;f5=u|9;{7aqf0bij z`K0i2Ae56vd1GrFkV-7;yQ;!K=p>X-Y+4)Q zm;KT=94~MeFIL`)=l_$xqowzK*uw)M>Tv4{Uj;{$|Ek>JJjM&p3yOljtsdBg`AcoA zGxv5x8R|`ejVXaJ%Mm{%5bcP1I%lHVjt9box}I&-K}YPLbbdQ^2LjDrJUJ@M5w#z> z67lSLAk=-yB(hgKqKIE#^fbx>Vb8r>b>4bML=lWDqekQT_f3{QzlYmb5_R|KVEpI% zXpNWU4Vl7$ke|b(jXpUd5>cn|(`vk5)2fkvb|<8wGj~Li4X^(@RQSB46H<7-e5vk_ z0N`(sa^Gd(gbccl%lAnH0$Eyax{KXzIWk`*a z-R*>0L{A>$_6oP=6pFim7loN`2S3b*k0P9mFfip@{SU>-Q?B=I>*e z|BWVj<+dLc-T+xwtgV(O83UKr&sVLM#xVMQvTwTA_@CdVbN%^k`a<7zIn6#}M~p$t z(>3a9FaS?A_p_6kI6vESiAgICprK2B=gt~q7$mgRtnUSIGu?9F?uEbSHh5l|Zuz4SZsFV2rE8dDZIST-TY!W;68w3A?ti-pU04}pKzneK|3`OqshLMi}Ok@r?StjHD z>8WQd?*P2_cE9;$k1;eP#$~^4_-`)=w|my(`QwZuf2LvjaNfDL20+Ef{bqNTF_fRy z8Q+ovuqV#=i)^+r46_@gU5>_isAIB#5nhjRznBHZ_>=L^9p3A1#SDl6kPbL7tK&UI zl$VWy#FVH@)d4s*$4rA!S^lsSk z-`?^ag;{PJ0Yn+~bUk#(a{sgU;v0LfY90E2^64~zu$XI}PXK!DdP`K%$nt*u+Krk( z*!<94ZtioVPMx)t#mK*WBL3l1l8w~WpPq|QY;nQj!^iME(rz!ft7jYCIE@}>nqpid z(686a420e1PmEs^;+)wtpFq-)t) zfTR?W9S=S+BUytj2UfBJ5SMxFH#@|NDy!`;#a<%<{L~Fvsl&n>OjN9H(s=9u7p*XUTnL%y^fr(ueY6tsfI(B@l1@wUM z)#7C13Ry%*ecJzqMGv^dOOz4`a)^>I?J9j&2PjL(Cy6ccDEHCPZ?CJhp-yP}jlmWL zlppuBKiFOiMz5tO&6+5piOf5Pf(JFBMe?ZdjwD4?ccZ~kCP@?6zg`!(P54h27IdR} zVtk*_CQZmV8GKjzhYBiE@RmU@G=Va<>-?i^6{J$h$(}5$1q7*aTMkndgo^T1-<{V2 zu(N)a?x=!>uaM)?n6;rOlyH;(n=&fm(O;r;O&e$%_2h*Iln__HiqeFq4$vg0-!v#H zp=zGO2g-YOpop6GQoBqM$IGI^y99JW>~`x%o;*b~eDj>ii(*|MgmFJ4jwqmrZHhwAGmR8=k zx#CEci8<7QjM*zkxLHJ5i~8?)d7?sWA9XQbh6_Z{aBtWq-6yy`eq?$}nIMYT#lKpT z6WiCbU2PgO{HO)n^*QX%fqQ%drEWD^u+^l-3h6z~yeK7N!^5+~*e@jxG^lvuLuWX-EJJIbP3}xS+WQidi|NF1*U-m56ea;#|(nQG1lnxyb8!V`Ox{3&F z^*2gX*XhEQJqewcju64JK6tVtQIAghKjWkIfx0`C-^hUo2a-(79U={&Rr~2+&Qc<1 zzF#Zw?41E5zqprkeaHwZH-F8asxkzUvYxOqVo^(;pHOUYXNHzx~9gHFI2wni|@jThN zTEW)pcKvK#X$(K_e`>wcrT;IVus^qc&#b{+rVo|6pki?!U}s4R`?>G> z;CP9}W7iRYG6hqi4;u`i%QYid0h@oG#T;$;i6NvtruH(+;q&daSL!)TkCHBCoDV>( zEZfw9%Lb6O^5eRaN&u<$%fI9=>BC&mw^x1~#!#ca_rXRtDpf{@|pn2EBU(4M4g0?};}28?R{qLH^c@h{xLWxy3r71wx;w zL-o-i?VE|3Ahxg7=~kUG(Dq=34$+3B`8zExDMG~j{nCTjjuy9VIcuo^MK?u-@9x0s zZDvWRmXimrCmT-quxY~uYj=&t-!kA-d(NaWRvW^dJ2<&SWkAF5cox?h9Z=~MTsM&_ z1xLQsu34O+1Lgo*L^N&!C|NcuF8ocvk>E?4BK4vy;pdT(fB-X#G^l;z=v zy>(%MH?iO1E2a4LfYy`Fd}Ff&AdYoYqPz8=$nfZ?l_nCPbE#fPikN~$kd8*v07#mJ+_$DL?6wl*XdecUL_Vm5p{YC-~ z@LgGBP9s9m{lqV;&80x&898q3Hpcr3Vz0lHg70hW-^Q{5WaQvL;F&ZOUfrv$qm1Jb z-Pi6WGtzWfgN~I?fukJAZY%4I3j(06UCeoE-8$eu5U;Q!0{5TE*7bfY4+xvXT2I`+ zcdhoF9ZC=zsww#i$2Ud!N(T>wDZ@mY)lmmb4=wA7*wK6yP?^ ztkLBVcL&Ea>;~UNR;lB-=ZLr($q0&acr2$f)j{RhRf+t4M$kEbCkclJBwgOpZiIhp=)Ts1oC)h+4OG0pn#Qq018s=7b?{~~_D>Y1W1q)QXhZqfb@A`3@qF0) zBCx$>V7Dp081H{sW6#46_}q5-8D;S1Z@S;5lluzzdTk)}*D(@E&&$JO1z_ztWz}NgzWjJpUP<|%4bQSPFEIPH*1IHKhSL4tGAhyXPflUm@ z3q54(*S3N{_!d2!zYL#GVqM|BZ-pS%0c%1FKL4*S?}#1{fd$zh?;i>eB}4%>pI+2Z zit(wP$p>r2plH+EpJ(4;{dya2vi7zZWQegywc~s!$>w`$qR(1jJ+AA$>L+ej5BuG= z4*LtmwW3paeZ;du_JoNPkPU7)WP4+J@_aiMbx8qI<{8+!0p~?Ar>@zZmWH~Roei$1 zFt#=*+O$p<_{a8z9sGpH4~|oKW#ypX>&%gCaTB0jQEb~JCjZx6K8$z$Fx~~kcvleP zT|$g^4KdzD#CTT`<6TC7-F3up7ZT%LN&lWVW{@-=hpEd#&Ktc$mwQM+i``zTU@8MC zy2oEs3}O7uZKktU8e*|}Um3=jSA3XDKpOg$s5dlavHr6kJ)6ue4HGQ}lPnf^zWM%| z(l8uzWF$NQg%3|a(9v@t!Hs>u4IX9Cv0^%$HNx2C>DFYIMuZ5==J zfD7gS-6QpX^L$3WFy1l6cn8(OTuDz*%B_32{YI};c?oV8SkfW)MFw)5!y8X|ngQ*{ z&XLks8PJ(vb|xJ*!;% zmwXxJ&!``a`opMSjQYnY2Szz1HLW;m)PQl!gIVRn(m?Px4{+|qyn3P$5LG|#KUS*n zt}utBv-$QlC6OlAsHbB-P z7-D`i$^(n&EaA@`a{Yaz*RENH0G{J!PgZ|x?3s^w_ZS1$^~uL0zq$VB4!JkhGY=}i z0IUz5|NGR~*^CwN84(DHE7`asCK?fTzkGM`xjUce=ub2zF`tUE+KBNL1xkj@WFy?~ zWV7%3Z>@#3Y3!W$nYVpy6ni0fAR!x{)5(oXS=wJTet+jY-S!A$NxtAbi?+rHJYIbm zV-N0t2H}@PH<1H!oU3r0c3PoW|Mvt@<@t!T3y2X%`U(grRi}SydxV*bO>|pg(#bX|q z?;J^Q!5C@CEqP`%ABXI1bu#7zu@%c}%SQ12FE2KAiD3uUv$pwzuK1j3AP${#;r#cm z*qZsg9REw@{5Rhq{Ks2a5?bqIi1TLC^Pxjq*g=OYbBT=&#&0eetk}#73$kcCu#L|l zagI2D9J06=!?%iKI8vG<;^@hcZM8AP&+GU0VIrmOK@4pYuSaX%azm(r$S5x?!isq} z_Rn)+iu+N&rC1hGt`G0JvY=3Z741)*|H*1z?wo(*&M3!)zQmE%eMsqNBl)f10kPXg zFi|)+Xlb(;WxQKvZ4it5_s|6P*)yR~7mbv8e8EG9?dF^;D~KHlKc6ATy^NQ%L^JS zF#pFMRxj{n{@hJhqxVI>yU*OKRE3QCzVS1=%|PXW;W;rIRmf4kSiSZd_8(t!^$xsO zf#GiPPchg(OiXKSc@m`p$Xx=Amh58?*1fm=oFf6{hWfw$4pQ0oxuGSHk70hRO$UQ0t)&H1~Glz zd#|2LF$Wcb_Lh@})Sx_~VgHs?b0A7uM66$=4kk`hazdNUfzl!=F^`A$m@xl?Cp)yX zebj-bd%0yB#+_9-fD}=O@~{4~GFs+zUcp4;u`yHJr!d03{dmi-EtG<&}g#cXwCo zZz##!rU4PseA|aMl0bQr+^+{;w7^(8)^twR?0@d=X7Q>O-{ALO8^txg_SA?Li~{ZZ z=Je#)@eh3Xf{S#66eY0isyXI*GULzQ=g@Q{^*hGhpNgX3=?^ z_x$sH(bbN&pzt0w`q*L2tBtc-5VXzR=qKO5ccQEO;p;-*oim?4I!~EH%H}>&1jbn# zlLcHbF8U%x%g#}!kDaI!Jm90F0fRdBZz)_Bz*R$fEsXaOvS{ayr9-&?ZRhE(=NgdE zVclk3i}m}b6s;v*6Y6RLj;S2S`nL9U-Eg`l7~JK4w#gr_mlPE*DWwI&rD>lH49#J5 zS5$m*v=-!72k?!#n}e)LM_9#WEtq_x{`;q;Ijk+oE^v#}0`&aH5kC!ch;4J8)+^V7 z!_m`I+#=ZC*nTms4AlaMYkIn@i!eQjeI&LdE$EMMziH9+A1~Z=4>+-$Faz10eWac| zEf_mAbarv18FaofJl7Pi1xr27OE30N7WU7SB$Ti<#H5jLdN^^yUx2{ z`PSkJPM&Dfp{Z~$bE_0s>#mar%+_t2LqwUV0f-7EE6TMEYW55T50AZ5s@ zR31Ij%OWp3d856_67QrxKlIcfQO4Y%MpSit7a zgfSlfPFj^U1jn^Xzt{e9!Tp0*T%P9-<>N$O;8%61W1YL2c?gf+jt{O=>cAT2Bk7Ij zOB!U?9*$RsB9AMNSD*jeUoiR;Mt{TTj~M+Gqd#Nxcc7x8JsqG9XrNKjybxngxnD&* z>L7MJei+?02S$CzCQ!BDkG|+c+G#|pft%QwEXN5FeeVmj|6hO9*)1QmhDQV2gQK3C zHI6&43LenvP>0wrBrYbvauZG-pJ>2*nn~Qj?}pod_;yU%szZ(!S&?&%8R(ncQ&aF& zr>_+;nmm&&hw=RL=NY_qqQl4T5j7}kT(*~0(hL@8+yA50Kf>^Dzsl%$8T~S&92n)q zC`U#)GwK1Oo-pzQv*-f;pe6r@R|~#xLFX5Aa-OezfB4GiHs|_C zIvr}oOUT%7{;@yCbs6bm)Ne-pXS9#M?1#a=810YIJ{j$o(Y_h&pV2=s`j5ZP1%`8j z@myi#-~2I4_*3H}Z~ocy4?FZPrzPReyb5i#L*x-G{CiUWQ{$t5J}&LEL;L2xr`an7 zM)S}44*l16VeB;i=XR|W{PX`~?9e}9vW_G)&9}e!Z>@#3=c8O+B}w3$|BoI!lrSIn z)=L88dU#OH!uQiE%*_1UFSY40{KB&+F}+RCXA))q=lf~Kct1z-PB#A(jx)Q1to2o#f2_#$(p}F3v9=9f*JKSrW%@mD{$XxxKWwq?7YzWm{=WVD5jU_t6Fkrr zW&niGwnNLMxq;t)y7l>tKB%mh71b{02Ddy!ncKhIk9^L<_5B6hbvdM^k$S3245 ziRwaUr>Dazf(W=-s%UQ#)`e=d54@u`qCof>!%sY-1G0$|B5u)Qu)rHyXR?;9;acdd zUAd`BRR@NB^e?ZDUJDt^$!u8%bfC!PY5cw3wScV0A?ZGDH?Mo$S117!UN&iuxOIW% z$3t3|CJD3(vB}^zT|isI)8o5vUfZj8%Y7QRQ)<8Wn@K{o^8qEb?Rrqu92)yuKmvw? z-^A@kdVo&YpJg8xhw4L}3J-+zf%bW~#iY3ea6K)_4CvJdIA1yCtto-yS4W++@A@FN z@)Xmn`{F>mz31K5Zw7Rn6ga_SNHC=LC%iUF;@e{g!zDHi9N7{OaevorWT7FT(5(sE zBq)_Mep#Jc#qjsI#PJBhvJC z^n?(1F+vK4=S1Z~2l0GlXOvQg`RkGYx$Og9KYwiZjcEx`*=oE#XZ|^;(eP)h>@6aWAK2mlfTwM-i?nahp?008U-0012T z8~|=*UokN-ZC_zzZE0>_FkfSDb#8QWX=QURb7*)~R0#kBG;{)6G;{)6b$AN^0R;8| z000CO0001ORBK3-Q4l@zxvjE=NX1eiQXv{vW+vMclYW#HY4lLDGRd&0sE85~kqRs6 zg-KaiL`Z~Lm`P8ux21`dge)dyAwBH1NZD(9(7E5#kD!HRICtjGoH;Z110r4c@6Y<& z6#(@53pZt7SDSL(Qk-!2P0EAmujeKF|NcL}SpJ&<;Z?cB+*gUe0s*zaz@b3sGa#6D zxRYlGf#`C^H3Bhf7<&R3GXMzUm5HJBcLI}{C!i9TQccS|Q%^IVJf?N4ogT+~$AFn_ z#9hFwdR`{~q#|147punt(gi1nz~ z+Lgq)v@fX;?`@o;`E%TWlX3YwSeLvCwlN>$HXFXoR~WDTZ)H6p-tJLc^nsQd?C>%V z@y-(B7+}{O#k;pK5Azil@H`FJ+pc|-&~L2<_M1F|fzov4TY6Rb9$UZ3$_QXxb(N$r1QTvm58dsS?Keau@c#GOq zdDLG;49?^XH3jEcU8(uc{$d;ldX8EO=XAb`HUzl9{Gk7$TkBjNq`@CPWgTl6&`E4D@AFc|kOu*8JT9591`uYb(B$=$}^_W)?Y?nviq0VbG#Q0iF>Smv-Z?a z#^&2|k@3{1|9wbOlf8eEoUMH)o4JKR<^*f}T=TskpZWAlHzYM(<5GS0UQ1fL)>~lo zEP|vjwf968dX(ovGdJd6T%b53%KjcnhEMxhGQ-sS4Nyx11QY-O00;mQQ?*Qvl_@dU z3jzSV!U6yt0001PWnVEdFl}F9WNm3~Uoc-|Z*^{Ta%p9AE^}ybguMw=jolY8ep6CO znoC9VMA1ACyZJWXsd>81A(Wv)NJ0pilTd_ANv0y&LMkDo$rO^1gd~LjdCKeeeb=|v z|E~9}-tEl$?CG4d&wgk$Z4ui4>&F)v-osC$jr|YKh{Msp{y1h7hWfYucirD`fcF3Y z<0Gl@|7^=_+r0CY?nZywjDzVnRM!WNH9*oOP4-VQA(ABiR|n>|810!MVeq%ku{Ta9 z)HWI5W5HEoM`p*tZCdhr&D#bTmwNh?aX=hw+#k5m4>KMIlOkj zGYeUy8hM0NddE+T&95g6=5#Oj~gvRxCL2b5i@ZX&0 z%H_W^(Tf9ZX|+!AsYZD8s)?wwZX7%xzwKydr6FFs6nZ_?G!Al%6l1nV8vfIN|ALOr zsKs&kAI@TJMdU)3#o;Wk36B?M{2f33hoAr8|K`J6pX8Dy<*T69r_NGLMkK%%+E+}g zpOUPNO;ZxauF-LZ_&DmrOuLn_mT!1gE}yw#x}RYuv7HA5y`D?d>Ytt~-H>!kW$5 zmTD-jp|d>an-g$56w@P(G;pg??W@&f?w^K!90)$3@fR1M9NkC3g?p~6l+`#v#_c5wWQ`?hWCj|?AifbHHp8e2O_{eycdPP#cj&enEu5YfiM zBwLYIbqC;%AVvc%-aQJRGX#JQHRig}>PVlYq6@Th0 zx+Pz>gE>V{9b`&XP{p$O2nAmkZCOCU;yJoMjg0MpYda~so2i6bxVPtA+-D0V6#^a0 zr4>*?rojC+7@Wq>@Q-aGWZ|u^+et~{NuL-!sSqdZnTAr zN&VC8?@D0*(`%YHC))u&zCVi|Dvq7^Y9(f`w1W)S`r`|7M3Hf2uyHFAn6LfalV>8> zx!duz=@kOU+q0Zqg^-bOt5SL`fv=@6=1UR8nf@l(HfeTnD%e71n=e1+z7h7MU|KU* zVGV)luNBv+)Yw7)zF&{sy~g0oC+GRE;Y# zH^|%|aOR7fdDi{aO|{dRm!H@JZMl?)L{fh>FTW5LqQu=0#dl{z)K=Pv8)9J1Ab$95 z0@+Oyj5csoktW)|W_*b?P<*HKhwnNgnzZZwkG%iNJ`*@Usb{A(^rtEwo*Ax+7Ww7r zzOmNOske8aD?%0Z8Zx_co?8Ol^uvjOer@zzAz<3yX9)=wIWHeQ(?&RAp71Kd96qX! z8`@p1ivu>4iP!`-8dD~_xN>{!*HTQ-Ybef&BKl4ZzHO^;tUu2uY!pBJk;cW(Z zwfxm7j-M&;&IQ1#(Q2$Ami2#Fqm*b0qF*j4)X)s^`S>>LbF)pMPGj5TWf}Tdak+}E zonZ<+m76n$9rdw)>$Qz>k4>RMX|UTSSq}>qKWdBfH3P<2`hNSmX}J5|wy&?s%t2hO zc=~dhE;0(V0<>;e0PTz0{_2T3NL&A?t$Umm&)O{`d~zva+RE6^!TH67+^;DqU?p2a=3f|4Ee@0X{k|Ao2B6%Q|d zpoTQJ?_q~!twDaGyzq~ms<>Eu>cFch)*wFCA@<^d|E&-2dXXg*pwT9&QS0uxjn(vi ze-R}sTU+u&!eB=)M>$Vlo9*n?3bD5(CQ$0ZK zc!;gG1{W&?&wG?{J-{*TZ1QzoE`~l2oBraa2Q1kbc3=sEi?v5+@^>{ni6|bttO>w+ z;bPk*`=`fUy5*7W$?gT-AZe(%wZ|h8cXE?0KHv0)nOokc+eb!@>TH@1s6YC)d{#yzE=@{pnXB#t z|Iv%qoBVonZ66q<)`!4m4$aGG84QTAe&)N8z^Y~Q6SmA@LQ?)qQwpXVRz*_qd`6Ae zqv%My)-PeG8P0^SOTOM*Y8Z)U^4iiv?U^w5$!+#z@kso+p9o1yCKPu)v=kiRqS1D# z_ft7cIPh8FlJjjYj^H_g36II}s`heGCSb?104@_scc-7ZJ%@`YXGnI;PiI2a;$u#8 zEx9=S^75?M944#}nz3fSIu{)b+%`W;W`h4m*X}Ljx#<0fQ;j?(bg^kU@ktS=b#w3it7QCR<)+OtP>jI7cW9};p0vO1 z$K>^rguV`ht==BQ1dRgOs-A1%xL9z)PpKFtSe~s*xpF)lZ=dR>T}@#E_cmXqWq3H& z2emyATfl@+{t(T;;V^u>_uQlDflT0ErslgfE(|%f)psj^2^>>`Xt6LH!J{b?mIRiV z-o6`(#Wz39`A+cVne5k|x;PXEcRks*b`leGDgv3S9)#k>8e-IaW5D?PIW7+(LNRNG z*I|)f1~4}6nk#uG1j}^;(sq4jK;VK5?xyq*3>5esQvZtqO6ieCeik7}H8Zn~0U_@; zWS##MjLjeORyGVVAbz>&I3be|94FM>^HGWkN-Q|{W3n~K>%)5 zGdl5NHWO}2%>Jl5B@jK8FBV=|%>>OwWAcS21>!?3pT5*aCiJwQ_#S#Y0Cnw;4!-%$ zgaJa{U(y0_=_;~7{Y;Rdy_{rX5im->%}j_&RIPJ(?vJchGv)0FJ(rA~@m)kA0P{8s z`7|anLHx~yQ~U@05mh-y+as7nckb@@B(UF)g*I_hm_VDiQs9QSKkip{723>T!fHnW zB}YDgoT9igElisUhtF)g%9QrUt9Dk4UVSC_%$z%8W{V%baguAk(8z!w$-=`sxB1~d zjWFYH-x**Kv*`Hi7(X1Ir)h0w#DvYy9u34t`e8(P#`@e|&+U9~Kp@$lO5a z+lbnW9N{3@ne?g5VuJ74Hg2bWASLJ*-IO|7%fBsZ5z8F5Kie()1 z_@EfEREP=5DjMVS7#u9!x9zEUHv=T7g$m@LBiRbc-DE&vXOzo?77kX~KRxcB&wv`T zwUL2{^HXyf(0h;_V3+KNgDnef82HBdiTp&z92~vj7I==J6{sf z<@bddND#j{02jJ$9~4~R1MeLd>~$>(z}-YVDvJ5QlXF?=O@#i;UWt5|zt|fTe%rmQ z@eV+lTG>pc7;jjUTABU9AOI~sAGd^FFQCNH7^wivd`*nk4PLN$_$n@_^v8p5dSjA` zy6})=ueH77i?NK2V==HFFNP}cfYb<9tz@MaUc=6(jK66UcVvnI~&)( zJ-PB^zB`DMt(bN_8!h$I?gdUp@sarguJtD<-iYpr_bQ!$!1Xst9JBQTR7~?YG zaHO*r%%k>*+H6$GJ9zWMZZCLM!L|xyvvH2)t{2Ayy}|rv`)27PHjc@(TrwQz4Jn74 z7H{-p;{;_1w%U7dxM91~ZecPTlQpf)#zgvnB4O{AHEf(p@w<*MoTl~?!R*mJx2rFt zQ~2#=;q3g>f&=Ql@Ppcm9%G^7vRehy<$OVvkUuVB;ffztNp~!K;b90-7*?!NdYIq~ zhYW+={N6E9e2Lj{yK6qsW16z=Z7UPU{tl_%w8aNHE59by$1?E=>GttjAJ|GM%y=eN zQd>ABADFJHJdns@;ywGTH`XulhEDz5!XY^(Qqx&S=J#Rq*JC^0FiA}+d%j(P2i5A(Yx zAF1$$+Jwn|-r$P~J^MSOWW2#+Z-!P)xetDJxEuFysTbVxJ+{oM+Xv~4Vfh8qyxdx5`Vz9oE~pUySSXMVCt=y7rpx@IY#38Uzr0K72iE znwC58Jsl&wem4XApOcABc8C5s-_I!DWgsQ)mt3Wf>SGlqM(f*+#cVqC8&6jfTE@f? zI(_B_CIh}nr)!z`CRX~E{!BNpr{q>H6XU4$TJAQgk97%5{j}&HMG@3N1`@40TX+i{ zjtdZj)QN!|+ox6C+DnI!bdijYA`HwWWFQ!QvA31K>jt7o|>8Z$6_ zK&E)o3O9K9EI!m?3Inyxlv~$Kaf1@dklyKwnHywG^PalGwHaSNAFlDi+q>kr${$@p z(#+;E%h3nbi2CAR=?1#gKrDUG$7O1czJ}YVJ>Kn&S^I7uyZglzI-Z}ZS6JeOiFJQ`eF~YSNC&;$rR&GWZB;r4))IQo zRN8KE*9F8V1+|`mJ41peth06}In#ZaxFgE#C8N>}Ze5v{`FjNuN7gye4SJcR!Zs#SCs9+# z{6#4H{}>aOQilCSSD;ZFof}Mq3uhH1kGKMhYQLR{l-Y8y&=u5+^k1_&nfT((j&jkJ zuHZ+^AxD1}E@!=e@G-*`7ABAlzy%goXk}lVNZ=zxoP{;B@NojM91>hd&syc#xJjJ| zZc*2fa{-zP8*5f3J`Nq@3Wpc1NLp&b#-YX@T6&WU)cu&1X*rLLrmfXoSp>d*x>tNs zDI1#}sP&B@aBQ>Knc)UDR-WBE=JX{1bwG;07f7bYi9{ZLAsdSWE-*STHd51Fz0?I1 ze$L9AQbpu>g7Urfb6tS3rD1x@L=LigMGiij?gC1-i+0x%dm_JDBJ8Sy3p72Yt4uE7 z{N3NSnOi4LJ@G%7mT)hj=PC!=R3C}%e(MY$?lrHLt>>VYaqr}p!_Gjb*5xJ#NAN7- zGRiNN92^17U0`HfM4#DF1(vx$UhPlSyV<0D&-~a)^)A4Tw0g652M3XouNJP5Ww$8E z@gN7YKg>y7dcqYrZGHyUsAAi@H`l=pL@5JV#t&(? zJhrh++#tz+S?PH#Kh!xY)#Pm92HP*KyVC zKLwqZupspxsr8vIaD!z1n@pVPhh1X+ss;sa5P0jS!pft0nSf_;b~gRHi?6-Yu0{H--)gBZ9{t`w5!e_~!C#EJ}m*k8Dki0$7FVgGk|+W*c!c+W#b9mG8HeseIr=}gcUOJDGla(*(d ziGwd1$p&+gFWk2}mame=!Mi?!ZuJX%VFAV8i#fRKckc2Z!M>nP)O7T94#K>ttVn5J z_`P(z+&XDGyUjR|?BEE3yj-krZA8a%`ZWj<@=nG?+U-{_I z*=V|Vujr=fzN2>m_t;38t-b^w_a|S!BmI_*=AECf3ElI7px>_zA_O@&V#hx60m_Wj zm*b!S*|sZ<@%^X2@EIFp%;N*l@7x^MiYqLL*!j6ZWPJec81~_NbBG0Yi|6hi=nlXy zOK9CEkFh{(vy%3DlR&(*xl}*=5)0C&vCY#Y1F@b}>_5GY1vkbSK3s4<0NY>j^C!Mx zL8qK!+_vR}-qWH_eY?;4r$5;v0tGdT0`TR@od?_^*-&fCXa8<%09tlz)|nr{hBK3& z6+99N#OE!aKCX#o!>zZ+XZ9)v;$+?9xw96rK{7vg<@h}T7=0x4%aVm`c%Qh-T6t~& z@|W&>k(a>!r~Sj0Hznu>WWT${N{at98@4&_GrjdP07I@hAG?vyhVX&Rjp|uwU~M4Y(xb2SUd@K(lMc;@BKvp0a^v01y=+)> zsA2B>l0aN$l6m&_1vW(Vh#Bwj3&dy5e%AKq{_Q`r=av6{VShBY9o(e2n*~~R*GxVp z`J=YTwMV}SS)lQ_*+p0}0KcuYda(B#3wFMLwR4(b0KRG6TymA*`}Q7vivhC#u2gDH zTsB1Lt>^emlf?mOeo{25jJu zL%FgUffySlZ^-drgR`%scpcflEmloF=;X?VF{yKvFU64d5@k>tm;{(G?lUmZg!rq2Hu`B*)Yf)8HCBTLwWAAL(ztA5~DK+;x3=(?q zUo-War$4q=9r-N!ngwk$ZZZ#l@xxucwn;W!ESM~M=#+Dw9|q~a`uXfB3%a)ITdXtk z!$--wHPyW=P!T#b&!UWjzI?srMxR*l(PmM*C7pxjG3Q>Ne$N8VuUU?z{2a6vSl74d zAq(dA4{Aq{{gv+RiY#eDuS+s2Cwooi;Jv`7P7;Kk56l_gQkl*{rgXkNvw#KIA7ps= z4hMe}s0mFfCj2n_UWv^bKdjy$@zpk;_0M`;yiJ0w+HU+^r}3yn%YT>SQ$ z4g#DXPE1KJ6f=1wYA)ptLIRzSf~q_*yPDQm_mvJZ-N8xDZ#*$$N!2**TXfJ^xdqEp zJ<(@b&=v6#qCfY^o+!5Q#D!;dOB`;}L5zRd*56egc<24jx|jRuFtwO5si4aPm)@3m zqUlKN`Oe@Z!3`d0x69$Pa3CGBExd=%F7d$W%LfN0OVj_}7byHV(w=kO4W6#LrQeb5 zfoE45M7?Gcd;OiEbajmf&i45^UBlN64EQ=jUdwu7U58qqcf1=+eWbE#$`22;eY>+x zu*nV5!{$w?o8pOrmgQw0oo;Y=r+v298BaWGEVINd*$qn1Ute(YH<=$RKelp_8?3L0 zJmz}H6O|c1r$0IE23HTr>M5vrVdj=%o3Erj-Smq}MMuegZE?^QWd$(?^xb40v(2H+Ob42gQ8tTB3jYLhz3GL%t>)Or9Rpc9Nd~ z;gKev_7QzGt8j3j_pvW1f7$jl(Vl~~QuodsJnIWwCH7YVVvnjTjz2j2fiFyzowRG3!7?flSl^Xhx=yh-`Qxe?`) z{m?0tHRVvMd-lT7Iy0AsfH_e0Sj+u@71z?&;Kav+!n4 z>72ZYEKuAOf2e2^3y*L1?2`Y;gof=!tiUxaJP8KrdJmb9|A1bYF_DG4-gO)FDufLf?g-s%wpzc(vZ|lH7DAyp^81r=fs>cSFv_dkHXMEa#q%fjKEJ$d$AkWWc>` zTOj7VFJ9f)qmX!s0ev4XS5A2EgV(GLhV?Em;8;xF_ZQN>IL%~mlf!xj*u3rDvH6QH zzDtar@z$OJ`FS$$*SIr~N$}+y!hjm9Hw$!18K`{T`Pjoe26$g>9y>^5B6EEHil$r! zR2=Z_5D{kLtxrBHH4_=29OH7W>IMVfXO+J1cV$3t=U{xtTn1V)mo8bM%Ya|E=!MHT z3|#H~^xATH1}NOm+}Jvmfm(8N6Ftj)!LIJO?h9bxCx`fQPk&zs>M`)k-@-uui6*~? z(tIKC%eYEqf=Np)VZk3Q86y>nBL) zv6qaec86Vk#Sjx6H@|u*-0BNCmE37&fh-JI>)AC^lmUT#+or^yW1;Dj!f%VFF<{_o zeqhvD7G4WD_qvdDJLYIZsHU{Zt_Zbj-yf@d`nuRmY9{Iewj{zB zX5o{&n@f@j{Y2^TRS%KzPk74Dze1e}&K8S;_M5Zu-Vx& zxievYz<1{u78_UdX=y?-6P61)b~mhNBmaSK(_Sqk^{cKebh^w&w~NkGOV=}@^xDtJ zEMjjjbG%)CE0xf5;`o*vHV03e1O?{KW5V8wUhxzgKdg-Tza32A9c5QYPi#hJrbl|q@v)D zr|q}hNQ`2EB(X1^E%Zax-|sFYFJQrh*j<;tAN0cu>3a*LJXqkksKje?vLC9)u%^sV zV}W0_mR*9GAC8S>O}R<-GgU10BBSjbd^jMzePR*|w8?zqwKyoDBG&Opp9RZf@0Ik; z;Gn)1-EEMc^^g2{Q6cl*x-bBLmNoev$Yw$E*QzDY6$0>cS<~IPH7vNBXfnUB&>z1| z&pg|l!h%{Y_E(? zi6TksjsT3`*3{khJVK-{yhdbz$03$_kqdQamE#3y9Dzd~3rXS=`jrDw$c zW)`;?rv5v=En0>$7tV*H;d|SpMdR3@rM_MM|Afmm3@6HXgyZ76<#HTzHpm+s`f#{5 z3>6j4&FZ*h|M0bW?6R0J6h0EVsEh0`Oz0bn&xwZNyBp8BUNhOS)`9(XnM@d3t0t?f zSg;|P9g|(}8iw9KPK)%avH?w>DecS*!)JFYvTjXbgVdzp*_*zF;i*%`?eY9jGA((#t;=7p<7#Jgl zyJylt{*+SDlw}dvG(S^8WEUN_M)EC=J0F1+Mvp{WuhHR-#i5z+u128vhn;oI9y&*)?@bZlrb*u@L=Uz<+Sl!G}v@IuQESHMWi`FmGz`N5%16wH1!Zo}mq_0U!%pfvOqB&T)Vbp1aFnO~HU^k~;m+ULeK~DR z(7E~|sryDa(hMEb&Q>x<&nsVt<1lruLhP4;+W1!smPX)E&GKD&UzsrcoBSdOtbRJ~ ztRxGX9c2R$~vi_OBO6!{S$pgXY zIquzWC9?lqzNKmBf=j_DIODs>eHt5%eZ21-l@>heZ}5u+Q8M#lrpyT*^{*lO&&yjD zcTLU*&mLav(F2~rcX8%)?|NJ z63}3DJv9`|6RfN!m9pTM@O1y?Pk-t+uweM-l*buxBmQeb@hz-;* zmDEGfQOqN6<}@}4@!!3?c0vfIiIV|{vj4$n+imvb`9;ClBqAF0#F+&r(s#bT8WfD_ z)Oe<`AfqSq`%XiG&*yJ;l=l*SP5Bv24n`A#{+wzi6fm+goXUf6A$5_vp9xviJ(gV% zcIqr%v}q+11gMF*1YsU^Zy!tOiMr1y4Z>bkg(nVNCN$OTOBq@ngeQgxLAx;_kGdDx z6NKA85=0m@ zkc?-;wdFHjgrI;+@k^Et6FOb_)5dc`u`V&!-BpeB-==x&LP{u>P22x!F);t|M<@o4 z956E>d#T>C9>Fjarw-^mn4{;Teqrc5G7cs%da7!tPYXkp=)PdbEMiX#s=LJ0g<|tV z|K1s!nUDtU+o#P9#kNw%2HzYeBz*0PEK&$X3QhBOF-Om*cZTA~-@{BO%=;3hqa21b z@2m;F#NO`@cC_nR9EN3)L)m!;n6U7%?&K4D!Z0J!$j{&+6OztX8rqHv!+wo7`SlN& zAVgi5&LsSuyna*HeI_I}JDFT;55u-pm;UOPOwdyjsvlDvh80=qO=qN7kU-s|rjq&D ze4R3#*c+mhUqNFi=FF70Q{%9p=|bb1^7Wy}U1QO5!;Q!n>i+3iC_Y|r|3|GZ3lb?u z0Hsjm92RlaB=!VFE|%9qFmLVZ;0H=9=(=!<|Mij(RPv73UPbZ~)>doHh7cUKO1hJ+ zL-KP71&fAY-Tk%GOWu(6YTNDj`W>OS1A-3>1Xz&0S9f9fq!6sg5Pp2og#|Wz1ToKp zvAFP&r-JXl^!x4WBsnpqAUsg%qpnZpGjh%!6*PLE5PEqBi2aew7yk)|OZ~Lx5JWd_>-ebZ?&!1ICn*&Gv`&yXrh79KuEeLbso0{xD zFh|eh3xh`cS0VC`y0F+6glnjK3)6q+H*&pkICxYZ#j!^3w-~`VaxalEdfrV_2_C(_ z3ueK{J$zvBX!`~%2p#b;4<5Z=3j4Rck^8s8Kl7i?hIk@=q~-_)BHSdKosHN?lPE5D|owyTS%3 z_UL`_uRxr!X8wUNT{i5a{7Q=haT?_~q{;qg{JPY|E3sF1w5x2((c*x}x5D@{%>ih+ zLP|u!iUVHjG?q--7JyR&(spgIyH?e(VeyhfdQOi1xJI%6TjW;usDJ1ge{>jJ^7ZCMHe4=^ zC{wfuK-qaQQ@-tB!()cwooDF**cl&cAzsFYo6&+(UoH*6E5(MvZa3MGL%m^e55QTJ zUs4+z7HyLk@V^p(8TrBITUyyrvBYP|Gl9U-HyiKS;6>dd>y!1~8t__Pg!2#nT-Nwi zZF%L719i%xHj6o<3yTySKKhi!Ho}m=wbq;7!ex|0Zzy8SE7T)gKbBu|_ zGbWckY2f_1e`ex|fNMjyYdJ7QmSAon16M9v__l%cHzHSBeeut|?WjLuuMhH3{uw14 zFm-)bGV7BMdQ>u03fhDimxKOk%{DU1EEXB8u^;*Kl5x+nO zHvCX4(tl0b+fTinF=UVW8>n;eY1tfR+;}#8J$1rF_ZDZ=zjZRx) z*Ua=}kIGYV|Izg&^BMX3#t-?neVQV_oDFA_uLq~w`QgXbn2k%K*rV&S!*BFHnBaqA zo7|s%*iPAbyVwvslO=Mo-VaTv`#{2pTX3lQm%e@+-dF_#TGUU z4I4P775X9NHaq15sZR;IOMWOx`7w%fU})nh+cTnm*fD?l4ZjH-I3ajfLZ^c>%0Ge} zP>NU@=bpmB58X$_DkuNp7eT*K{|-&gsQ)d&&yU6IEh4HMc;0v7NeZ!)-x4;uX7-1k zbNom31(A1&M4fGD^~Z}H>wm^Z|I7d77auOTf18ERd^hfz`rO;%8^f#{7uee3`;b2yl8a#q%2r zqbYwmNhZj)-}%bE$-;h0kJ6a1=EU4D1+}bE{rQUVPye|R-HwaJ12MD4qa-wf3CGl* z>{;+80N3A`6xZd-gvxJ^AKFUU>&~3N7>EWUdg{q@m{2R#`t$3?KwPTZeQm}% zCg>ht@mSI$5bF=#s1iwK!ZqhfDJRqe@hNk{zSXmt|BUa(2{|WARR-3Ha%!tT(4iIm zg=NPxkV(XP)^|F5unnF6c#bcQyesISLq5O2f`PX_7**6_@}ZaxgN<8hrmuZadZNyr z`5Wmlf_F_{9N~)%bSR#!^(^t0FOIxzJVXDdKZBJash?>KRGeD-ZBLdPQ0hVKp)Zd3 zk-56Th~HV7FOK*vnYzJ^)TovXe7>Xp;}&kxw)|MX8Ad7>#c@|NV=lj%1Zi(^r~ zcg>s)Z;c?2B7(~d%sFPf`xUXJsE3{-5BSLM4N2dr2S@}KB1wWId6JY#(jn;#-@k7^ zczq#weI5eMNYEN&fnp{P8v6^+n_g6MCfj`N~9tk#f$EPK`s7g*ZY_&d`6SV~(5!u|YX!nTG3q0>{H!D~_7?WBn$pRxfqJA){-2N-5zC)F;=KS6D&2tA~Y*e1>(Fm0`D z`*Ic*QfGn$PFlaI>r*-lld2YOljk}EN}huo3d>ySr|ysU78aN9or#XeTB47>9E~S(0|~R%I!uLW>dD?ekZ8Y zwc2{?ISVbwFz!@4jlP8=^qevBKTZGW+zu=FRmpaFyaSX_Z|H*9SVy+< z>l_@Q_|T`6tW|96KR^EVqCo(zT4LIggKV@Q><@haUMYzJFRl@J^8OuKRs-ZIrFx9J z%|@hbjqLzklaKV!r8qc5%mX6_fW%(Ohau)1%%Jwr0stnzmesEI|!Fg(qzJMpmr^an;3E33ClJSqog@cx0GQuyUJT=az zmXJf8m2M{aBl68^)V4lJ>YI)EcyYNk^ip(wo`XQm_k}g&QTpd72m42Ce4Ek!FOm6E z|31kUV!HHtXTB%-)EUkP0{_`N^i$K7vx8!H-JU^LKg6Ifl^q9c;rhOmq1h}yY*J0$ zeCU)dBv9*@O!P6esnoWE?NqzF{V-$1jw#mSE$ExKDIzT zje51tADgHw3|%qqYUa1>PcHjI~BaUUpW9r-Y9Ie zg&yiG(I)^isO71%g+%HNMoIv7CLdW?cHVaMtnEa===vE7|vVqVE>z3+G4#He2{zthDBz0;>N=0_|Jx-{Qfaq8zTN0vj$;WX(+Z0d+Rud0`yXMdcQ-lpOOPU z04np`EcEGNSVzU-AnMR!c5!h(D-6Zm^~>I_2GF41N~eb*E$7vVth)d@N3Z+1<%eO9 zukhrSr=&dV!+=Xs7{cajft&apU}T?gBMdDzB);PHu6TiwlChbYg@6OkAfXCxh z&aIOR$Baj-yR9r8Kx2hL|6}cN%>EhswaeQ9s3Z1+GYCw*E#B?`1A2E?U78b)bV{B{ zJ3?VpLACI~aO~L^Bw1MN2njyBZd@-6$2v+LcR7yQN0-BiJ+fDHlb92f&3u}?zAqdr z7MA_GyVwb;p8ot8{xuwF(-&OSd*K8o&v$)aJShSzsQr%)u_ubnlIM?)z@#$8^ZHwz zp}+o`FM@5>8EAdPw)+u;K8Awu&Rd=Z!( zRe9*4k1Oc3pO(n}7LGc`gpg`nfjU}~9~*%sRGgE!ZjeRPAdB>0ck}+*>5oVDp73K@ z1hqfQ*dsT(-?P?;Kp_I>8_@sQ>jXa(eU2sjxht_gyABcjn1)|;&v6^QE7}o`BX>%r zZhy}9!$y2kQPZOhYdwW7sdJ z@9!TOL$lZl@0Orw?4!=4GmRn4_*>kb;Ar$EW`xK>j|J_ehPqMx(i^hHD9!)oyXbdA~Y6Pa96r#T|?4sWKXGY@_QKCT4 z7=wk;=HaUu(MWCmlbVcSDHSJXX*8OiOFTK_yD_XH&CewD52{xVuhlbw6O?^G^2bem z`7GYq1dND!QeGU5UDTT=dy{|WKXTTwB6{>J@tO%3raODBBmKY4uQz{GYyxSNUf&o! zdUvzc1PYCaqTCvd1!Otr*O|bX`^C}=cSd7lA0eD4CO}*A?2|g~|qdk55Z~r5Aoq5stW`Adti@qs{QHOK8qjBU7gM=v*dEB0o za4Z^+txZ@sZjvb^QExI!qLE1VSThULf9j9CvBYSkudgdxf58-P%=bL5dL2sg$rQY0ay;HvM324=btQ13UFn4D(daiA zCNb=5I*Q-AXxzXdoUq<>^zC{*S&u~=Q-|&U_OE|;)ul7zf(9U%p1;As>*6R5yb$ zRbwTkKS!h5&@+z|8?%4f7kxBcd+1IKMl)rW=o$I|zr&9A+t0*dn~KM|fkC@Ww9Tjcvjk`$Rl=(@1`rdC`6fK6~or*y<=Q zR>b7TcJK5Aos^W=&&#>klwhuOq1qF|*RGYNWpgn%IaxS#h8MIgD;t8-Tx9&-S@&wE z7o?coUACfzizO@QCvl=Ttf>wVw0zFRj77=9|N?lcUl5`|?l6+l7Z;DH`kX&V$gn!CJH5G0}VN>z#Jg+Ptm@?*udM9a*o}Mh6Q0xO^&4$h&8z$pR zZN4uZ<^y*ucbr^F_{n0wK~&c`A2`+XSZpuhm!_SA0}oGl!}zCTJ&kpv5fk%cPZ9c= zQuJe6l3O&^HPnVzNqWPNSN20KGo!JszBXLvi5G~^tLuESj?|ChrcFNY1^qWK=G@v6 zjd=sr8cEB%;9l%U8@v{c+5;6^~NiP0C-&`FP3L6YNjlzo4}*2J??xIRH+>@GYq%&XEJ?=mL9K5bpBH5A zy(;K9hSaySYsobAf_-u=1Kkg!vEM>qrdp;KXlZ)*UcD2ILS3a&*Y|iqwT8zBl?%~G zlYdeBrOpeSLKNEfoF@3%cRNpTsy95`b2ZMdC>kqT_Kw-N$QwEczN~8r{^pq%-1qSY z!TO7$8=HteJUmXYCB_>xng%9yK95G7s~;|Unt8*@rh#ua2BVSjfYxZ9>J6((?_Y@f z9gS&kJw4pry}|$0fynUjF~lDFzC~f3H{8nCFbJI%g9fC(r+(h>K|n{J&prlA)@CYv zBXF3Z>Vg`F7!*%7FPKW?Nd&?FH|rSG>F>!fp@%pVXz|x1BcZ5Y}9<<+@+&?WRAyRe@O4%FW>{_A~*Q02waw&AG>Is z4>a*_@PBbF1}$8R_S4V@oJviLGA_hm+60poyM=v$Gt781b1xb1n}vskn7**RWs5j% zD``KkYHih4U+BJd(MvIntY5hAYuN%{_;%>W87+z)Og{${to4Q4Z|u|j=>&eaP%Hd~ zFL?58FjFuj^doi8H1MJ?r2KpzQ6W$Ii(A(`@dbg)=an9t5`&51A2^O;4A62k64-7K zgTHzLk8q-e;c*JCx+~Qmm4)UQhB%ufKfchRfOozjFl3pWiAYO=SiQ~LE2WLlk7seqS6L=J=HD=wL+pX{57OILSuo+Kk*aY$sn0d2 zQq8ksg3sC%dTcD|-y|py1DJ5{>lve)VKFFeZ;<{rg-MK*3#VjkW6(>3?&i0H2}a|i zI^Iks{a-lp`EUUf!fkiF2f`2HmzmwP=)X^s}xz%Y@(i!YA+C8vXbF z>C(^j{?nqOF>!|Sv_7xpQP_EEj$r5tCY)bn zbLH`;Nc0*w-B7cR36kS2`B*iPSQmCqdkw)y&AT)Imp#I3pIBBOy)-j577u41Ptoo2 z1%r+)Nk^k&amkFzw~e<5zj!AaUhs{@;KIRyfi_>L*it6bY8s2CPiT$PFZjZ^`vET} z%EqF?f}f?*hkfD9q43LR74e_AQoREk)O+aVX0A;jhA~YTE(v8 zyd?Rxowt52^&{|awLXP1Ur6^!JYAj~i>^DAnI}tp;lKOA%*1O~Kb`UYXS_5TO^`<8 z&!g@VJKtg?eg?OXnMXpdF6Y+kYIS01IW-7?;asjUo`?s$h&w>7DJL|45ask>p z%hH45r2WC?wT*f%U}bwL-S76$7!^%os?gCr4oD{m=!-4~=#tB9wxxiqBaNhonEa;VL_%Y)! z>Hm4`5wDpn$S9*VmTqu?l9_!o0wY)uvtpL-WN?8Bm&T1^gM?rCD&LwWxm)7aiy zOfZ?K@1hk<+LJchvhy+%W{vwCAfe_0;!PhLyYiTzRqN@IAmRc7^KP)0Iy0g0SH;`u zr=6iM|H|$qI!q`tUdP$x=L}-%ZfXa_nSa-^F7&LU{BH(ayjQJ}u*Dg|uPpmiev$$4 zLb;nZ?{J2+l!DaKLIwmX7AepEw=FA=I73kGz%iGL#NOz47I!LghO>w5 zbx(0UL9I3PQQ=q@nA0{tbgGCaT%c9XO4oIP>x*CRewyY1pSqe%JH%YzcG}C`4Df&k zr=O)xUCt2rMx|KQ(gQ3)=_eQSll9_DRNEEk0gTVix%Vu{{PV|FO(*38_pk(J#kzpX zh4n|8kGn%>f--aL78l^Yx!fT$-<=S>z?&g{SExIicc7~j?GM&)1=xJcM_Jn) za`WWnCi=KS?qhe_;e+nbG(YG{w}UGT-Fc{)TjCDf(%Pr#`&|jXCy$#W;{n-Aw@q_B z>k3I1-X~eF_JG{#)_FQbuFx54%5o|2fU>-$okCOGpv|=`E{)`O7M}B-F~%g2OE>TS54x8)=q82OJQ{A%5x zNhx=ev6VY?el$Ec=uU?cy&~nI4|Gs@u2TFuhz^~6CG88n-Jw7Fezt0qJCxjRop++m z9m?q5d!?p&K-x80+^FXJA*)DxWIJ~KktZZHE-~EhOX^Wh$){Z)l@pJ!QK?+re8tX7|0J-{g_#Jr{Q{xW=%G`|1tyCs_iwXA}78 zo(hBGK9KiPtZ9mBVLM2!1!*?>ClVhvOQhXrS zCe6v6&{^Ao>GBofK45T~m8BEv0r={n=I(YMXxlw*P0}(#cY;H1hRb|__Od5q%0myx zy8xLlE)aW;x~HI#ec&l!`8l2p=&`i(5774noR<0fB(Ya4r~_9oPe@CST7O@J2_+4* zM$>eH2kox5P-o^}*$%}+Yq%Gfkjq}zT)Bhbg+9}HEs;k(QY)5Clpy=i80O+YF%~3c z2ich!c!I`}p*0)0ETEs1m)k5r_-=zZ%Pxckke;bfe#HZ9sF+K6EU4(APyc&IELp|h(equ$OFN|r>eFHmJezs)1j5?N0uv6|tbKazj0bGrykL*(7!GhR z=<1Zsb%&UrQu87;IG|Izcg(rd1TGsA+(7gJBQ{xhFrKWpv76eeHV%kuHvH%y>(yg$ z>|NUr4wPJ_HQrBlhnz_f?++>XK}C7a$47+k$}(=SW8M6q?!(SH{)vR{uiTwl9OVZk z2jzF2YNLZmuH|&;bAA9;GE17?(1BOq5fwO&M&qla^yYuE$$8~d-OXDaVg5;@felWq zzp{(cn`VFXCa>J$m213mk5?}8%1vIm$}4wy`-N6bCOh_SeKXR)x z!1mW)PJLxUqspTkk8t98B~lfrL*=jfz?#4vC-Zc3Oefgj~*j-@OXWfo+Zwp(O#D% z>&FDO-XlS}Va{N8MmX;ZFyT(#2di=8onhxn0ECtpi{qh zvUh_Mz?>Uw@5c<-S88S1zr_hI$|wDD8qWm1&`gE61x_F|b>&34Nlft9_Klw;?F5hc zYtB#k!uU&Tb7$)8ZNJU{d5I$BohKdPnCR{fgCYjx*Q~xUuhbF5Ew`!+9AJQwip-Ll zVMmA)hGG;v?-_Du&fq9IA4eKH?S00l@H<{!RPBgudy*Z-rQc%#a1 z#oGdS_{pwfvS`BvAV}H$nEzRxV$h5RFc9PkNL^ta<a!I-8&iuq4Q$RwrQwg3uBdSc1aYZIo-A7j#J0tW$8|~ zSEC@^K=o62xH^hHDX}TO5Cu(-ZxwvJqK>r%d>;@Az*qCmXMHxO1&L5(lDAI}bx_0atq>vdEaH+ir6srx1pl*Seao?fPcW**(O zcJCuWPw4ku&R11b+Hb9-)Eo%~Vg-VJN7V4?DKGsQHIX2s`8w-7Qw`Hy+!-=Ogif3i zlR|COP^16z#SLWr6H-i)4$hN9UV51F*5+!uG z{OH;krWk4jU4EPKD+*4J6+Y`-J|5?O@)z*xCG>RrgvJKC5cUZl$ei&o3K%l6PPu(! zQGCv9Bk8A6f9bls2FHbdAnUjP{jCIBL7e63o@@J^;7_l8^2gufa7}I4uao1VLHu&$ z;dSqX|C!GqbJfq{H)Opp%e;FXDu?Ad+H*D9qM%UKvFiq**AsV5UR<~t1^YX$8AZBG z!px8U0u|>8eP)lBR&t(%!SjS>E-i?H-0o(lV+!)9Gw}JM$@(bhd3XG^I!M zM>$cTlkk0v6UyMiw@SNm2)&AJN#hbri|XxUJUAtt`AbC>r^XrHXd(35blEqs?!F8zkNCRl z=?jYA=02IdP#S|*pHI3p5JhBWI@4~P6h7{Hp=@jw;Rf^0)%gz@6!` zGBi&d6KCD|(IP_Dr>@y)2}oe*;OC1*1n%h%nvtF+g_)l2;tPI90sSs_=DIgB=&1Id zcDgSLPKB<~xo9i<4?eRZzqT?1qv7hx7Z)@_WpL1OejT8pu zq0npJg0$Q6f92NDbMDMc*NJ=DWF-eC>I!V%b>PZwyt^W_X%gcwnd|IX(`DkZWBy^Um zIZ$Bp=T}_lxhHm7q(cJJ!b&T%1wy*75MuPBN(mjGWtpugB$ zKhA~zYV!%RCW)iCgL`iJel9SkSsyD~AdaQGtsh(DaX~ybTsV4Y0%jI25>_T~Q@w@9 z`sE_1k~(S0i4|O8LmP@O%N4}z=SsWO4{*VNZ`1w{rTkdFFr9y}f(zU;^DkU%;YXj$ z{xG-mTmU(#4Urf4(Bt%){CR6h|Drap;S3*^XKm!vEa5_-7Tx02vv1Y+FNR&27S08S z^{X8V-VIipZHlodWN?9|!R@%s*H<0DP|sS ztq#q%E?8I1h2MQ0eM`UgRGZ|+r0u*;$}dq{EIRj7^}4-aiY4BXCl zs<@BL_vD#yosS}@wRU53lyD^EPSYs3ziH6v15-nf z&7Sfq{4ebfH@;q!^duY}OpMyyxkLlGp1XsMUxvepvimJ}8r8AnTtRBZuW(2&&YQV> zyV^hf$M{~JRar~cx9gfw<#t8nYDGTUbB(O;r{k|*Y>~&q%Gz^1>Ir>4%5P~`mBSik z?Y`l=T*!4_Bv*M;1~)a_%X4bwLSFs48}!ez_Vk6g!3Ccg~WBa;w6iQ2Xjq1Me}=5k19Kgp{X6otr9P z_tRd0GvQoAH7OsjTl)QJuf0+HrSvZg!eGTc+t5t;H}(gcu1wk+PRc(PY`AHC(Y~=_ z!Jeo{QogC8aZ<&b>hKLq9=587L8r>Axr=Uptv(>EcTrc3~V})?gHEB6}QwTuXz}W7zacJT(!E}9A2&l}= zNuQbity)CN_Evrj$zLdu@JRSe^^bLD9iO{|0PXei37Zn0R4dJ%5in#!V3lqBt9EqQ zC-UiCOmrsYhaQ~Toc6_D!De^I{8=Rbw{@=F1R89eEIm(cQ3zy+W`3p3{9+&audkPo>O^JX#8gamo#0P{;}8IV7;8EzkDdr%Kd(I zY#y|)+oLmioH>DwXPx*o_|^XK!6=b!3qm2oiWMon^s{}sLALw#BcZ^TH^+Ta*Pwm2 zib&&gv(PVeLo*ezn}(HJWBEYryMU+~jqqbeX@82?cY8AaB0(uKzJex6-6KQx_ph0# z3CfcE?_CH}KG{#-7*i1>P4MCBsV20c$sUxn=eF?^m|-Q6dbh=XVD1#xfuE%RklH>W zwWjKdW|z-8qFh+fx;d%gOHXxi=0=V&DW9>km3gDFx4O5StamRdKRkTp)5Lq#J-0pV zx3ot9NRN9t#-QK6#Q(v~x!nZbSlYjHXxN^2{IaKiL)fKo$oSwhXB%IyJtu9e%Ai05 z{8ptKcL%<=7kztoswI)1)n}ipH+X!v4>a`&jqxS@#aPK6zA$M2tXVaBh{#XcJc)Dh zQIG69J3<)G-i5<&&Ekr;+IQ^Fa*}?zmWBhZ)2I5%u1EG_ZxSPmoCtiSF!f1no4t0Z z+kp$?39K2_GB6A^Vp31cfR|eDqu4Xf%NgWbxQoKUvky;V`d`$Y)gm}OCf6# ze>giL97+lwEer6H#5`-wjKX-*-x1$H+EsBp|KaeRrIz8a<=fW-O7}$ZPR^5k)^_2r z@<`<2nPbOd!?CFKIa1-!|NQHL-UZ{ZuI6O+pggJnSh?n1v^Y*M@Ohl87!C$=c3Hkt zlEo4avn}60g@KCY&AXdV%cAM)#K@(sVbG~|{>rZ`8B8qbJJtLq48-kEWhJ*r;BdgP zcfHTT;PeD1)lCEA(d(^n9{-Inc=mc#-RxK6F@64474_3$pgNqsX*uCXS)+H7J8qHs zbk{4+`h55};Zk~eGwE-)@%m+lzf^m6-JSYmfXEl|4`u~duGx2s*Z0%O`e!^dQ`@2P z!ak#F!JaaUZ~)_{YqotFus^YAtIEk);ecm5-Y8l2+w;7A5`b!l&>1if$os1`O_Lw>B#VY^w|4FRA|730$ z6exK3tglqTt$X@TSyY5U$;1W6&TUq~HUi7ng+Y_7`18n_s+hxFIZ<(W7z7@lH^I0` z6`dE(2&iR-!NOnKdi~OBc;;k5YWR#W=<;%sej=`hkDnL~gt&x(1}$p0_*)gsr}@b) zNC*SYk$DrwcdOu=y&JlBFAjr@)o)dQ8mRm;o=>5d(sh(WV1=yV1EsP_xJ2unc|GgIJ@rsXh?-WPk1B|*Mhj;?4}rR0 zk-N+C$^K(AbMdM}Az;%yR;1QW1@pFV=nfAd3b_^B&b-wgf}ZbFE?dGm41OTUBBL zf}!p0&;9N3im3JQ?o^*u!C)|~Tr+!z9GY$2(EX(;7)q?B?tXnw4*Si9)%Oa7KvVUj zWfP+&{e$0=zw7!>%8~v)KM2bT`B>d6-odw4Eg1HPJN(*}HC)}-)oJ~}A{de+Y+f5w zj={RBtP@S~!B7}+G0$Z#KOR_g(D3t`V1NYID_P5ha8pT3OlL|k)IHAJ@W?^}w^zPi z#90^&LuB83H-U^tPWnTNelT3sAp7p+ayb0>@SWVRL0~b_vCC9T8s%>et%*4l1oTIl z8#X)_N1=utmw&Abg8tQ$qi(zr#dTBONp1)Vf~ILw8$QMg;_AZ-_S9Ggfy&_2mJe|> zjM1(!lNAmk`wfZeLwCEXn{#v~KW_>Inpe;8^yHiN@JivR^p!vW`{?^a4i+ZF(2A~hA#FQnkVz43p0VFrEW<}W`2Aa~X8nu#?sF!=DWk5B~ZFFh*9 zdRQ7Vb{C`;?jqxPUv}iKqy${uYgwvX9|+v%-B!YtqR?LcdeN-jKq%DMA0!wn3`@#9 z>?i030qmd8&Avs0eeW(jQ<@b78HV?BW*(DLg7cxCY>xnZ*zfqO*2kvK#!o41&e+w9+F$OF>M`JfQF>vI$Opxbj zdG4S5-JXBT|2Lj8>RsSv2KI!mKi)9k6L%~N5cvI)G5XE3d7gh_?J#lghaWg?m#5)sz)iMN6H(6_1p@T+%ZdPtK5Y*>X6B8@ZZy(n4gfljb2>E3p1|W4idmaG z;9Cdp;nxLTm?OMU>}jzBHrkclh%EI&HKoC!^iizi6fTpw$JX4 z@j`z&&+@nhjtK4U5nazn`OYU2>*Snp-t;lcPwplCN9*YuggRkK!Pq@9`+P9Vs%dXa ziWBbE-gJ;Q%@?Cr`}lsE>4bb^6Zs~I`{LslPbA#WJK?S1t1UiLeK9cG$5&3)8RZ{O z{B%mv7d`Z=bBg_)@zRdzsVNb@xGlY7Vd*MoET6P&>;4zMsP}_#=niLeyJ)_-V>$z` zXsuSh66cIR_LRTe=*GaOUe0OO+0Mwlnp{}t9Olc!SngIP8MDCMHB6w9%;G@Dy%PJ*~v3-y=myRrg<)SFi+;> z0Z#@FcO5KFsCGe#Cqc>M{TMjKGW41rxS~Q*Wte^p16d2}qt9iz;?*PT+zu8pFm>m< z)sm}SaiQ1utlReq{l)vOE3S1#OKs~D+OHVcG$gm8P|FRkhK~0)JIuiAy$6c}vfOa@ z0{`uTGZ{GN@`Mu0eQtQ_OYy1kD;Rjyr)lp*2|C`-C*x^gVAc4XAjcp&8XR4B(`tZ$ zC-(aIGC$Dq*~eKm2d6Vp-J+-Go~k>>@TWg`LTBQagypYl!rU=D@Scv52@|dFxUD<$ z!W{$Km9usWGg08uZihxa54>C_X1ra4iG}S`^f_7{XyveG>Z=GQdZqaNbdmJH+zs)X zIp3MMkg>B^o6iG#;mv1h85X`hl3lg>ygP;}PTkWY#lpPsBMv40?zq*U?ZA9H7Aots zw!RK`$BtIT=H#Jb$m^0hZz zF*osrrO{g^T6xZCE=qUBeFL+`INo96;l3vlQ8BLg{LHn5iVvAMdu{4*T@zPaQIK}N z;S&>8T$}c0DY&BZm3;X#PnbBjJSn-U%LP?@!hCj;{(i}|wtAj*L5okfH>?$AVZ$sv z{X!KNoON}yiL#r<7c!*?IGo()|{4KKg$^xdf=`t@0b|!k=dL4+6lLM zhl!ZVZ9?Bl-=0XNMI!+ z<9J(+Bj&lNM+WXcccgjA_ff{V1nvs~;T;6|-|Uuv?LD5w&8?ZgJa`${*r*d1V^h^ZnYIm{k6DaQ!iTf)Rk zwAE|{e>-&lJZsF;3?}+_L>U@{+hWAHNg}^yGI8I9Cobz3+29rCE^BFE;<7VOT;{H} z#+WylmJ}GF!?FFMofkq)Pa^X3@&?zKv!$Q-gD!@yi#Z z?uajr_+^SWej7>$CHi8wdfc4>Av27+asBhD0AJ*vq8EwwX87RYeCyCeA2ig|i%fZL zj_n;Mbk6bnpvAd9|5zaljB)WE_Ko+(((J(CXBr7yiOP2VgdH~>+iS1))VX32s1pfx%gn4{bft6 z%bcEiNZb=2x=G6#lRyMOvkFm8{0impt#ZG#&c_Qae8ZHqv?glVLq#TI&A*YXQOJF{}`pq%j%nN*uG)m z$lI_x|I?iVlRHY5ZZ{m!<@5jOa_RqvF6Y&yytQ@vtX~Q*rQw znSc8a-5^l#g|t_(qi~#EgD0M)-qQLou_yeT(er9g9MY}`WG-W3lS{gG+-^@y+I(nY z7&0-B`o7;9Ps|Q}Q1w!rg&EXa40}&ZG|9LCEMlu1=;P1*?tukIth(Zku}~VbOH8zX-rn`+h^32^lI$F?{xaz2Cl9^8NUb@nrvKg*Sg z$Lu4v?{YN4!h^oSowiJLerdGF_lFUR!<$~Iov$159oWhbv}<(0j>vYA(Q^U8K!+0UyRcy$M_ZsFBEyt;{3ck${r zUfsv58+mmnuWsely}Y`aS9kO3c3$1jYa8&|4!pJnukFEWoABB$ytd84)eWVq!4BoV9P&cgWjGwjA*_rSxF?62PvSvZbfo%3?m|6mh`)!Qhz zx32ldt|}JNNqzH-E?pZFboSzxQTg zRqxqzN<~f>95Y@vgv&zx*K#XNHJq?~{G^5hu`KNG>r>;~?uhB-TFZ}nuux}C;kdTV zj+k1VpBG2)-@YLAxaw60lzz8upVn6;{H9D-1KNMTaM7o{r{0nD^t`$h%*0Gz3&FNF#k1aCCy1{YZT2?dBcG;KQvKUkR zRP5WadOd0XmPhxItH!7``-Np2ksmQT&c2ZjGQvhHugm=@OjMh{_2xx6qkrT%QIVoV zw~l<{Uu6@`F(ga}^tQ*Eomn*%E$%gm?9j6Z@i9J6tp#O9d;`<}=+-(*F&U8n+rNGz z6+8E~PF`&bG;Q`5u~He7kDgVWHQN^2P8W%9u93qn+}m?5?z4rG3W1L0(h4Y^qx;jy z*bcb1ld`*+N~mJld?fj@9b6Y}Ss+uYf079+~gi&12@AsnEfpy{*pWdu<^wuU$MZLruD`=9_(POl0W13x6=q|K`0Y}wO-CMM=&1tY+ zc(x@x_D@VoI;w?>pVv1ntFwUO4`W5lmS|yi%sI6zh6R|s*fL|w8ZDe;nE9dE!2+JU z4DFcxS`%|;WPVul#vIxn4Gd*>Xkyw6e!KpK<`5_owL7p%6Lm6LokLs9V0(f>#aLl2 z%rzUVpB!%nLQ*%P77lA-kW}-LYHc&fx-xIV>}gt9=Q{3DlBpT=PS3ie)}W4DfBD|= zorFPB#>`O{RY#i3xJQn+O~ETaeAwZhDz-o3xBLFd6vPkDo3Ot?1=~LPRLNDFLWS7G z(^E^8F{4x@E%u@*0B!z~4&BLUaZzzy?J5(<+d22iEu+b(;#4dlk#7QN`7SFTUsc3@ zIeO+oITH|n!<|`qLlJ4l2IeKgCQ#lb_ErhE}fiF`fNNn0@7w-jS)s z05=;FyFX9C?1a6o?gGY8$2fmw@Ut??>lv6grx?M}CuwH`qLmT7SV5s`Mv$A~bt#hM zgZGEc(u)lNR=-u9q^^PnnbqHf^?5f}_86GVZk7_dy4IA15zi{gTFeYjyWUIq8Cm{>{4+ze=IF4tMU^pSn=td@8G5 zSqdX!Zi`uDO$CEZxxY2DrBG!J>pRnU8t5dJcxcch(crMvDRU1!=s%El)?P#s%XaAQ z6P&FFI_18BDtc1bv_E>h=Tv=YlTK%@c`k+3=GV+~0;U5T^9{rZ3Cyt6;%izt0~k*G z%ebv#*p?A$P{3yZo$;n8Kf8%y(kYsw)TbGcdujfK@8x5WTmDJUJi!3C5aiy{@uNEL zWJPt>69b^#>)7?N;B$3dLaVdn8bhEhvr=4i`*n4aj)D2|g9M%+Ba&?Js5-l^X6MtR z1TI{$Wya-`)inA!wY&!e_AGsOVcC8Al3}xC!&*c5oxjs(Q{+4Qv~glG3f+c)jh{?5 zJpEx$zf-tSP2LD#QrWEEy<;F-HE7%27$e{&4U{UHjDsFwF`3~7BtJ7C?T3&k$hUSp zr)@9-n%&IW!aL%ib1QD#i9RFfcb!?A-69FtYI_`0jUj2Hcr#r^8rnX*`!v&o(C^8+ zdT!Sxfw6bz-jw$ykTkT`GuuM~hL*HCSCyCoBUklR)2Z8qaPe@Vpy-(^@4V(80i5*2^4drxK&tNs>@E`@?4WnHC`a zfFmC}Pa3jCw79ZÐq^;ij8Ar66xozSR#EOXwUbJF?=O6b$X@@r!V?gw0l$maiz4 z29;iZyR5^Oe|5#{!X7`xcq`yD=;~OWFAZ*b$xq$JTSNbq!%gopr6DlJfqpx}8gP-0 z^5zfHAn!X^|LnUp7{u!+n;(z>p(l##?$5CS+DGrF)-wMNw=Crj62DAf+eD> zCcm^FdgC|PaF4(mGI9BC4b>_y6xXHiByeW6;JC&Y)orcLuL8E)0#{V*mO#pv>U!Zc z`AQF4_uJ7@PX$yO=ZAXR(LpERYv7X;s=$@FP$->72XL5J z?D<*^a*v$hmnX2n!+X2ueo_Y^fIOz&g|n{ikkHg*e>GMO`Weza zQo`=wY;ti`o308dENGSdR7oHGF1ooQOn&#mOu>r|OQ{cEgvfw={yUTVPu+-Ks);$) zDGt%&9mG87Za}}joqnKI5Xz}I{&lWkTVXx>LKqFG(0P?YuA{M<+WPH-setWUTp&@H z3dDZjo?}06Dr>9@JYZ1)nBP|~rY8K_8D_CWE?(C7TFpy8grvm&qeDi;omB2882!fT zIZ6DFb_x|{Jkb!8{lDmNF7@60-A;eYo;$qwrtS+zNWc8!<>grukUMtGiMj2Lkl~wj z`arD^W>ay~g&lxKefRxKe|5L#_d6#(!)SczxW4ND`lfMflP@b(*^h*TlXDo2skx-n zUgiG0h;jZ7Kz-J{y7`lR^D-K1PnH8TtV@w$E&pY2LB+lHa)6BPGd2_F3W1I@s&Z@z z>~wPG(`3=nSe!Hm5Wn=P<@s-M@Vy?fE6UdaHq^Y-SQsq@yJB9+YaVv^J6FmaKhWa< zEA`tlgMwxL!qcimUKS`yLG^8qvQTM9c)b5%S3!dW(AB@6UNGl>urn1;e4-QFr((3# zi$d`+;RAiA@%FW!6O=`iDV~2X3e!)nxo$h(36f%_d|SmQ3SSO6 zzutY*36yrb{o-=RLjr}*_f9~CU}`-l28XFwpHa>*aNH6M#z_Db-v8$gXV5xS_{cMO z;y--PD;Id>1}|NV#P~^e1{b&a-zFZVVQOcA#380L_YIKvDocGnI8 zl%>w)COX5&cVjF>(1uWJLbnr`R8tX_C*Xw^IpI0Co!}X#xTY;i0(t3&;CCX8Hl~oG z!=~JdoA*DMg3j{(DS?YrAVdG{gJmX8f8q<90pI`e7K8RBRo;G!#ow}MGg$9_#~h$4 ztXkVo6*82PpRQbF4nrE(HS>B@!GPG+;<4t?zdh#L`L$|rGfcks$R=|TUvQ|fE>s;< z-gi9j-e?Ys6=a{5>uZ38>Tyr_PV{>DB9S|DG(qRbyHA!+%s^iD0Dli%3z{aG4ET1M zfy!LD{CC&2fP0%S(=yWxI%iYC6Sbl9p`~EV3NxTtM(NwF)`6yX-^X5ZHv`bUC4SRG z7yg@TC-uD=eO(YQ$yu>q*bE9?c26*RL-G$*9P9NY^{E(pSEhnE6_9#5$*1DYZ=E(8 zgK`yVkN(a5+zdScO5S;s{E*booFjUWaqYph*ezy|LB$4psShf5FR%3!G=~h5y*6|P zaH$wYFU^6gZ#Nb#W&my4)fY)E1b@C;OLuQKhLKCuc7mS~WSfA>$X`py*j{CIlfbk; z@PBKQF%rwm_AfjVJ97d252j)#&UO3?(#-Kyd-DO7*zrB@ay4MKAwUXY=?IZP9o|M!JH~LE}239X5 zxBNDQriLPy^Pe4od+EL7f;ooJNqt9Sha;%C?9&KyG6dROqAv*Fs{9;W)Hz@P;xVUE zS8yEx$2j?4@F4c@*-n=TUDE%y+Jwo6^g;Z{*cso=NO>w)Rhu5Pg&x_mQ;oELUcVtx zWg3jc0_$@CeEfkStwZd0>U$eS4xmDPmuadFAQiK77J*ZXDt!)W0o~>@YiF1P(5Qgz z_ch7*Rh0)+NPWqPI(r;7L1>p{nCuS#FrM?fVX8Xhs*KZY90tgsVuFUNg2nBX3mH`Y zRh?Vs->ZSae24hFHw3o3bid`R8k7YWd~A3J&{@#C>hhpEbpBG-o%xuo2i*^|4{Jc# zm9$Uk`2g5XTU?%`37t{Hx{4b}`wNdcKY6b8H=ld@vgQ2{9gw$p5v&sdpkkHy?E7-E zzuA#u817Hn`!#U=yr>Qc-4b$JBuvW7mD2-1X+vHnUz>#v>7SL8{Vr4+7!&F?n38QC zP5a?fH(f2D4L!`LYquwM6ZiBOaZMo#jDnRgdgkrE~dBGfyxr2_X@Mr z0j{ZDSN~1yD*okj2DWQJ(yf)l3-;Lp&YYjVWSR!hUfE^HWZ6Pzu~*fEo$8Pg(=TaD zvjsSLVj#>^9k?}FZ{~ls0Trnt{;peUkQ>dm$QiQ!3yVK0-WvVT`fvHNjZJ!Yl&pdK zah2OASAxH|?!~Ln3UH6_QPB?szkchc8!fSd!rGZ>zZJ-MA6i`v8ngs1!NCEh6$uKtNuC;@753GjCf};FW)rrv%aGJ(L{HxNW?sRC*eSFA@AH zS~mHAyq?vc@G`-CM2=eh5u?0(&np*rL1@1x96iK3=L~?y(yJn4|pPD8FW|sC?&`cpWXQSWQ&uWmA zy*kHjq6uX0-WRu0kl1;POMArSjX`MLMn9Gop&Oy(r_b&Zd8GGiD|}T29obhE7H^E8 zY389q0Vl$zg2_)Sqm4l2_V{t*a*0mk(u&4@H3T^IyT&tD8Mr?l9JzGX0BA}b{A-1# zK>s*;X5B&}f6O&A6Z#2U!*3Tw3j8E%E837Ovw8a8 zwT3x^v)rCdhm48^dy{TV0Tr1dnHRRxp)i-v;OI3Ku;0r1e)Wq!!E?;F{9UThe@23R zT~;5;h8iMP_o+he(<8o{d!_;G+ZPuwNgevP=p6if zDYI1_nEPv+ciz*7{(C0TLq7-~jLQ|+_+dJ57dy5N5V<4nk-Xwbn?4978P5k4Q2 zE3o^WK2)a9HeRTtP39f3G_GDB3aQ}TNjhX54cc0N>cJM~p87Erx}fr4r=5r)>Cbt6 zh5pg0fK{vYPm}Sf{4BBkdX~Tj+_}wfrU6a*qL0t7Y49KIXQ*!ZG;~Q1Xg9LUY_>I2{Zo zdCC6pF@=n2gS(!b(T5}|=HO#Omuu=dS1V@xp`(3N*~xQ+d!BCY@ThR@0017kB423xrP6 zWMS8M$)gI-jDZ(_1f`PH(|l?j{=-$y#5Jp|cN>CHR>SllX(qW@C(XG82o}UfU?;X$NG31@TX5z+R z;?VKT?w9<=;M+AAf7_UeoL$ZPioYBEoqO7bpWiDAjG$8ZJWkeQA|p(|G|R*Yu74S~ zc#{Vc2M)bDWt3!^9k^e4^{ZAie#Gy)LW`5*0XBaaI!_A8o z8wD90ARA^{@|e+LRQcOvq1#A$Mp6$+A4vL2k{~HFktAJ`>`4kBDS{+BYEvE%2NGt) z_^W09Z9mcRYzp7=|G_sWr=jCHCM2EM@Tey=4s7EJjeebG0&jiZ_$wI>%Wf60=+Ly` zN~XzcR}iA&cel8Ke28YC(tTH`yRlxuE5Hp3Lw`Aa_jZLL>YHJ=+#rzp_Dr)2=rGJ) ziB!8mS@LtBFL!~a$VUenJ?MZ`+~(KLu<_1TISLkxGUun@fyzkFh+l4?lVb4V;~8hj z%N<)4-{S`V;qP3b?CrW!Zcs9{+(4zk8NO_{*S$yj7vlFXdiKT{3ffl9c}T`x8kLzp zwa*z!sQ9NZU7?54<>Or-iJ7NpROJdjCcTqGzy*4&-acq@b_JE|rQ6g(T%e+ZY$Ct9 zK%nQV_d;$i028jcUGH>(GRo&7+yyeIaQj9skb5Mc!Ki?=7c`S4lHv?pN*0&7K*{H4 z!|lS(u-~kAa$~IvD4p6U1_o&f ztB;`G~iw6jv%z}scxr| zJM>g_o9#I10Bwsh^PjuBL&B`~;D|B@$j)|66_0iYaq}NtEtw8LKmFn2mV9^6k!r80 zRdRs34F*xy3*8~-%a3r9zlT<&>SGTrdhz9H>pUQ7 z`VGI>74{G~<6z+29Uf59uzmS=Er9HtZPR|{dH^FY(z8AgK&U@78=N;luDGFP?22&u3bIw$^(vW&Z*6Bu!ryK2M2K213EY6)FwvRL(`bKi3-x5 zkgDKY_5P|IILDhS^~8HZHot&f!;l^9L0z4s^_~#mM$fExW(SL7^JCZ4dP3Qm)j11Z z*+P}7zv_fnp78Z#kDqdcEj(LWwf4qCPcZ1)9DUWw7H*$C=biJ#6LQhG*+$<6=njuW z56gIw@u^+=++htlv&{<(BfX&dq}ny*8`iK;yrkV}zZdB2TDY{<-5MV2d=9u=1yvgef7&{4gw8sAZhh$kW#ft#wy4{KTGi6dI(=X0ytQ-hlq`D)K4cIj zKF=32JTgB_7q$b}W4H5aYJGt=d)%XzWIJGcKfNY1fdMKqzEu$$Y+;e$PoWDo3|R1O z;+qTRc5u;0U4O^_$JUpC)zp0t-zFtVB`Hm}d9K?$cUSXVcWP4Urcpu>qL3+>BUHu^ zijas9ZJ9HbsZ1$Eh>9eMzI{sG-?#mHp8I#)dAc z?nb;Ll#7`3i~F#FQTFWZ3k4=fxfOjn6GPS1{mpN|?RBvNZz^9-CM|l@k&+b%iS-NB#Pi;Pp}Y zy1xNbe{z|#tDFTnRDS<_%wL^~Q5hvHC}`D~>XYaUlfy1bAepE&FOpN@82aE^xkrXl$-rkC0L3`bbs1JCb>7i;6?q)&#X>5cp!}|hNGjTf$ z7*sy#AMS9|yh9(KHbuTNc#u}|C~xjA*shR1ll#dzWJ9>7;? z(|I(&f*Y9&1bWZ917*^#(qs?&(&&Hk*-NpZo^qv5^8`bxehsDsQ&g8>*epJ!EFP;!Oef#{KRTw{mPOodFy&#$L z8Mor~_%G?zQ1*uUs65LLgDgOl3;mf7l#h#Ky|iM3F6Co(@P*q_Si>>L?U$>=OfLGs zn{C0auO!%zi%pW=2wymL8Fw&>4K%-B)umOwAiDw+q!OOL+%lytC7ACi-?TCtpki0& zp?LpcId8*;EXomv(IC?*|CNvAIXN>=85fl z>M#`eQ8ELe)|}%7*H>)`4@&@c6|8#ZNC`eob!($yH~=P~D=RY{=P| zqqoVA4WZ>WJAYXIsS%vXhM~GXE*yA6<;~aPK*4&bK4!|na;{$Iix~&xslyCO4){&h zpXJ%hh5@R6(NnDdVKq!n1^<6V&$6pm+3;{17MS>bSqdJ7SRPPkQ<@yOLDgGv#`5C# zz37Wc97q$~E~Yq%4eMeepL+yyAav2O(^ES!eNytWfelJSHDNJ*_2m@EDB}K5`lzvM zS&;nd&Ei!S9B7Go?>ccG3u37n!3#Mc!FO1mjpc(bRS0S=2b8pq-!;9@0wpS+-(?O| zw?(Wu(uLtf<;&mAft;f%@m=tT@1d?K^9fZ z|2hX)&S&y1WP>67oT0YWO^kQytSdJd42S9wa)#@W*oJ@@RX3`e1N_1+-(LYCaATr2 zS44;lZ-(lOg}_jqF)=QzNc&KiU=RxOqi5LW_j88rf%Bn29cdOSae+bMfe{8RRQ(f0 zF1Xn?9CL9Ag8{0RtTh*ssk%o?!k`MHKY_!Ab!_ilS#pzsfA>Ju;bbn{*2av#VluF( zeEcC?7^+WWG#Sb%-;Wm;cAnALDR*%)++Hc97Usl-0xHk_y~)G%ZdAA+oYV8HadbHJ zQMLVzasO03yDQrPgCc+`$!;sjh4Snxj@xn~hV$q5;`OX=usfU{0XB=; zm-Roy@JX(l&}Wr0xII)?C=&dJ%Anxy6U28l z-kvhNUpR4q_Tz&8lKLqi)#i3yd=?&msIE~I4ArHJ!S7RrJuXJUZLBshVma`9#>DL( zW%%$=V43YgERWVvwTjjFP%ZVQXobR`n(31K;eUT){`A6(WyOaqs#x!BESIQSwbS_U zMmk{3lnN|&O~uNTa`@mkZC2OHZ&*G}o%%KPJb!q9YQpjrZ^J*k`5;f_1$~eA54ZkL zA5Eg6&*SYT=3~rXb%BQ$c}9c8xeLXn?RdYT>djh=$O!wdW(O{?uqMpE{i1QUc?zN5(`1;2rBSo;VH5FUk&E8Vd}{4o$-Q6ID;^B7T3_ z+6k6Zu)ZfMbnilcEI7;~s%6DkU28V*|R*lJVP)zO5z=lk!P7B1r z4YAgSYtyj&8ripy z|IqQD8u?lAkaPK|b(!OTZOzy8btj!zkfXU!;p&ojKz!@uaNJJ${w8{Fa6IHvwU*Uc z&`0sPQT(tzrH%E8v^;rXpLlSi>gEaK=lA`mN=3#);>``~p(qgQ*SSAi{`zPRb_=%bh?Lc@=)wDK<2L@Hx#w8Gx7MJd^z8?pD z&l;D`c@+R1RBh$TILKN#|7^~KfZ;lgE8?hnY=wmj0>A>3z9z47~d+7_hTjD>W{-hG4hl%e`>u^>NGM=k)wsGy9U z_&Ie3{=$D)FH4Pu63Xw;=?}b9M{>W!$3jl7AzyHZ|8Siih=uZ3>u+nQVm%0x#|dZL zuDs}#_|XsQt5WEi`27OWE&gZk`T>K|BcgGes!=cN2OZaXR%p4!0`03{$Nq7Ckp9^* zVzyN*6j6Fbu`i^|iCI|dis_3gxHiKV+Oc|edkmg0rH>i7R5kc-&1qnseUk2FQ@>wlYJLU%x zYgA)C@!nY<@Oq}id}{vH$yLv_~f z2fz)ghQClOu#g(B)E)Py{Cj)Zdpv&&2~}xqA6iiQc0Ps=<+nK#0Gcx2L;F2qpxm)u zT%y%~SPzZH@S+@mo3TCG{-ms5AqKjF_diSp zG3M*qrN|%FrT*5IJv$XzvO2zfe&7#O8RS`;rUHwqH?-9sim19*CR2y?r6c~(bPp?{ z*P?;NTqmWn(jQu=`Y%(Wp~a%p#(k~daNV#sd?>iM%pxYz7n({|C8o(mV|oauJNx@W z?rn`F4DDzTqvX^tA84Y!bB~7d@t^dITYSKA4MNL*E;Jv}!X5~;$n zBHpl$lJ`roe2>G5imew2Q)e&iXrP}RSknn?0a@iC4{{EXMEtN=dbOnaEEnxn>Ied2ZXot<^*?; zo!+fS+lBWhN+@#rFq(I` zg=&xAeoH48x8Gc`*yn}Y665QBZk^)}n+Ea#`&&Ic zBt2lj-6i}kpAS+Wu|b;Q0qfRYcXrR@LsQVh`--I=VDlq>hvI%d=r(4rNyYF1>)9Pe zReTt!vs}eP-MAl2Q zL-_sjBS)K}?t6p0CEkD=BB5X+Uv-y{H+Z-XXw17G3H|DIKfPtVLA7M+8u#=_@Q1*6 z375Q}YAbs|szD@(g>8>J-|Yo`!Jhjjyo><8`7;TrcV3VmIMy=nLIhYF?cMWL8jlxq zKGQ5e0=BPaOueD%4N|UFo6>9}pvvsoy1c7iaPgYjYqPd+xMrE|8eQ%MMK6-0ryLH4 zg6P!ICAnU3L%{IdC9IFiAMHH1eG->?L+wsm z<3}4pVdix^TWKqAsHg02RwyJ=`|)!x*tt36oqP|rU#J+IUtSRWdBbv>Qz7s==$CPo zfH%;(4bG*m4uJyv{W3>yC|)?7f5Zc~g-c4~nBK7T{gd?o$a(=}Jpr=b09lWKtXDwR zGa&07ko6GAdI@Ab1+v}(S&xCN*Fe^DAkQ1eKb>uSH8o^7#?+YyvXs6)5c2oDG)*&q zFgHCEvZ#8ACOp_j`Mbu1LG25yICgWPVkKQvYakS;(B0$_2eZY!$EfLJ7RGe`3vT&C#Ywp+n4$#zV} z(+RwP=I7&aw{~}B7NQQ-#iML471MHM;toJ1sUoX)NL}1nbI?%IFAdn zuT0Jqe;x`m-u*1?UdSDCm-n#3;9;EUi63{lL*?SLSBAj?Oa=yzx&QQkkx{5$FgY3y zKM^{L#JE72nG=skgQ(cs_HGgGP+k67^{FuBoVSGF8x9QRsTn;LhU)sA;=sYh&Aei) zZ~08w6$7U^;6v?)SRYhbBe+QY5C^t)cbXfkMuWKAD)X9p4m|2u8q8-z!)U5K!1-r< z3cO0IrhSG)`-r3Y2>^`YH#g`lvH1rUKq`JH&*Q^3z}81g_<(~ZVV4{^7efZx#bCQuDy2;(s)of{dHjk zc*3s&-#7Qi;eF%rTPqLDmo+=vx8Kp={jO9w>hvjcjrFzmE3s|^R zwSlJV3pU;vm8zxYz*9W?VYL;O-|tSWob%coR5ZSZG|%*faUR9eXWcBJKtbtLbb&9F z9G&6(fMo^uDk}ZIE%k*^G5(!_OIF}>w$rY729|TV?LkKlT7gp7W#@b!-+#t0{zxw# zcgY9tojde(U=|1K#je+ufAxj%D)9@tB^+o%quWP~w7cclm*=^(K^bpfz8sgqZM`3bF47L|7NhdJM5w_MNTK%;!iK)_-@$asEpD~s~y zRmj~btMrBaM=&BM;&#&`t+V%h;nC*yNrkW2kn(z5-li*lus*I+R_PcU?CY+`eoOa* zU!Uf`SX{z}#7dUksmDGr_u#gv_sZF@K1HIp`jij6yZ5Er_6{5T)^2JQzvB)5RfpzG zZDxbe;B@uibKX#Ttp46t8V96I1yyYvec({>4};@)pN`5;T=~?-2adYmG23d2pYv9U zS7SUopAo;Hm&o}C9|ITADz_KWuzSm$0+Xjcuxz~Ci_rFHxVW>}p<#*-RCsu^R=tac zW6Q#vp0;?ynnel=t9znh@wsbLPDFXb8=tq=KFdypg45Tg^k-sy_?$*VZd)`I-9I+o z@sJnD?6Ke3(hv=$d*mm2efESaKkKH*bVoz7UEn)~>7KBx`>UNn7angxZ_hz#PY`Q) zrT3{Z8s02rOl_Fr3A?9@z3;~S?$jf`|Kd*%n2F~rav>Vd#d{sqP4I-!-w6})Q8dKz zw#PktjqM+sN0!~y(NHq8?e&itUT{#1xghikmJd6dOE28T`=R~p=Sr%)OA*SQ!nQt(_hpJm>?l%AaV-52B%JGQV$qs}BS($aFo8 z?ek4jx5qtK@`Y@XeS$p?@%K@`j2{{M{sYe#sZiTfFXGTCP4G9m!-AiSld98N;?TLL zYktKYU_sm{{nsz+;*c739B@ zgW)$F;t<}Q6xJ?gfghD?Bq9z?lXp@-IF$uCa!>Al&&A(U=eQB9;X1gR@OZKzTNf#_ zATh#5YV+Nv!o7pNj9!-6ErulFzx4c*Pbe8HA&AHQHQ4rNgB z8ul!3Q4^HhJ1QPkMJ6755X=I~oTux=qjl6BpJ)~sQ+LXI6tqs6bM-!ojrg0F2NO`e(YXy}gAeAe*YsTuKTsGfQlYs5c^ zRQ~uUN7{@2_%C-*`=x38a9%eF_CN5x$5ozf9vqL zzs%#(N4<94*e^6bTC@yPwWxE`~C4JI@93piNDqi=l| z{FmHd|1(~Zip|`+X8=lmKfIYR-~qW2uHRQZ0idy_^-faq1e%uZm>dgd$hz;TS4#5) zM)b#v6_L(RALG@z9n*V(U5G{COlL^6^S#4;etkwW?$5W>uymO- zlsv1^41eGOy517Ko|o|ZdCjE@PkBIz;=CV^U0r~tv3HMdHGaQPr@2wz1?s;Yda`JP z2lVZV_O06O3`M7Cx7q3Vdoe-P-Y$UjN9v#5j6I-eK2z^TDS%B}+iQO>5Aag;-z9Jj zp#A8zDRWlfwzo%&x*dM5Dm?8&sRvZO%{0Hr0<7119ZjqCfOhkrZ@g~dwo-x_?UTnp z@c5G|=n54}r%IhUT;!tfL$%$lhj9a3G-l{Rm^}#nPi*U`;Y*Z%zZrU`&TjawR}mKt zonJKD4c{@F$3;V3G};a8RcTx#H+24K2SeYTM2m-LJ9(A?nTw?$2qHj(20n zzx^kCxnpI$j*I-lvTYt)+JYWsC#C_kP%cJqBru%lkw*KUcZc&bdXLo6{*!0XdpIv+p8s$gc@OvL;s3XW%b@=}sZ)p6m|K|FC%tww!6Ko#38jil5lsdck#Q;MCe;;Qd9z4p4E2i31-XK}$WPT^H(h(0Kn z4bCZ8%7rUh*LU4d)dnhbWZxMs6f<+2swi5WO-A`RQ!Be^WypE?cs?+s>ZT4Y-o?m zsQhVU2eVx6R5YZq!K*{aDW%*NR))!&wz;$6?0NC@plTb4@huMCtcPVUU2;NZsWnV< zzGEhC%LYc#k9f0iD>zWIc;ZV-Hi(5(s-JSQ1S7xV;K~rJn;C@VxgW5A^}PX?ZhNqy z?_RuU`X~#~s0*?;!1Lp~I(xluF$d=v*V4yLVuSAc9KD&pO<|&Zss>Aw4K0HY4jnmX z3Q;E2tz|SeH1!nPJ-Whx<37c~Z*UttJ2yU5WI)`{oSESdSU?YyJiXM~7y^}!uoBm? zpi40@a=DTL1VU2j6f+h?U5jY=_1FMTSms?_s)U8;>Q2UeO#^s2J?6Nk5DTQu9(sM! z)q}?GIiDSb@m?t1kvHeL4(N|>s7%fcg!&*xQ|C_|2vB?FG5c)*Wc9_DXu0aai-gOU z@(u(*%N+UjHw$&4XWYzW`Edb2pOz7PsZ$%gE1F9~&-g>*kJ+zY?GJpm{Q9kz>5%W@^jJX`x7RWzeKb}2%SU|w)JyfHa!{P*b=1q( zA0Twu{k`@R;Iwf5jW-YdVTA9R&oq~M4+KEb`Lrqb4o-yhz-JPN0s;Ztiz`AE)u8a+ zx^AU&fpBkiW1ESj7EE8128yaIV0IeXFF2zM*H<2!;h&D-v&8xRsg*|X(%*S~>sJ<( zOD^jyn8kp5R=pd>XtE(|PFjYNi8%y)4-jBu`nhq^t?eb>5}w`ta)OtO>1k)pkMX0d zp>Rjd#n0>5P`;|s?Za9dP<^nm49P#BO+Cz8GQ|+BtKHRwO)>heJ z3%_FJO*LD&@b3P`7*{->xxY`;6+YoYR?fnJUC7C5iYcs|v+h=+EDx%VyS1H9vj8FS)y7`hJZSMM zmY&sa2_d68tl!A-AaP2iI`5;^U%8hHVR`wpZQ(-S{1?)bc>l~-FZ$r8sIxYpo}9k$ zz&tLf=dIhha*-8KNAwFKxKOfbTivs>7BGd1y_?R37Ap2C$r3&)2~{*^a3Oflk)uub zEI^04r<}_L3q6q+Q(jqt;E%Q%K zV;)eUGK=GwDDQQ^d;U@$%McQXeTQt%T;V-y=KFEGNxM2h&m!~yT<_vdSj1RAY1A}`%MlQQfK~A=BO&)!sFv+4veGj z^A=blOUh}rfde=1qzF09vOq(0n#O_JnN6p4FIyn$h`lP30|AtC(9jC4*nQ{lJuU}4 z9z40(l533|7teaI2e-Q=CiUyEtWcG}9#s!r4veCL#r&)gCpuZ}#uyG5QTls>749y-BGk-=*BE{8 zktI58fERL@4Oh!1y4tR`Mnm}vcC+E8^|R*%0X8U|ig7&6hD+=(AC%VGqS^Z*S8jR6 z9@bmTZP5wJ_gc<|QB-V1gdJK#rBm3zhWlyr%xunCqbpR*36^71F&l=3VS39cknzr8 zgCG@4CTfe6#vf>uE?xUfMK1vlLhyw7!d_$q)eTqA7O!0#O&vKMlL90xryIFY~zY3xoq9O z(gne)A72DBS;P7g!*v)x$t+Ob!g$d8&J`V}?psH)ptJ17yuCqg=+e7Y(zTW>XfB_@ zjt_A|;Zd5W+Qe9Jgo+_3a6<`H-i`}_ux`no)Ye3IWWJ_tTX0AqY`K=G9AxQ^RDN$S zV}A{RgH$ZeOn2l?<#kvc05%D3(`J>sqak0lL;wu&ajeH*Sv2e0&Yp{Eo`^b9x0w+D zXA-svm#BH6)~}bvT5((CD9U}b%M0CT+#M!!DgbuhztqqC>NN}x(?H0R7Y>$e@J6ey zIBs*Q4+Phl*@|7#J}7Lz%Dx6^7I>M<`6p=jAVEs+^JRhh>M0&49{QjpD&{tu1+~=u z@q<2SC{Ifz3uwyNm>t01vzJekzs-W%)EWCCAEel%f9aJR8}7Wv0;<{@S@r~`oG@lX zHI*o4jW>$J4Ypz-(>m&W%+;?&)w&1~4rlu!R`lK2q*Hti?}Y$#{EpFcvb?4rPH z95^-a*~+&*{;0!m@7GDK9GFW5;qD7SpSv1k*N@^3g&?U_1fu+*JTzSJ5&bx*w1S1w zr#@c0SBwiuRInJGjb>4?WI|l1Ixh5XT@o7&#YV|wpUjRG#mu zG-ab2>MV`N1wpSFuiliiP~rJ)x#O_juedZlvZj`ehSI`dc{;LJJ~Q_fe7KAY)$Ime z<~|&Bq10>Ti5*;UG<>-IZ3uf9-siY5iHcR7!9pq2p#YZG)OK)=$3oeZ0O{gFo-!Um zfQ{N2QB$%+d9aa+y}8LkL-{<^cn~mED%7?q5M8AD_vbQnDUJQso1VK`*N?YW&p}rllohG84rfww%rd6#mwRT@j>tLG7D*6L>+xxqxMTG zVaZA#L`6_OP3OU_ix?25-pGLp=G@7H9j&7>7t_4Zm|cxcS}S<4HEGX=>wcc-U8&?* zg(bY^p3Jml6cU8Y&Cr5c%mWS8t$Le7ce!+Q&GM{bq%SoFylXQb+5fpG38d+ z&mPh@Xe_TGn|Smey<_N^{eRDfLRjOl-9p7^rh5(VhkO6DbN=gV*oWbThHUvw?BP5( zPrZhH2qyoHJu;UeU%(!(VS2V^5BoHdybw7){;vZ5rBLe_sWCTF!!9}DP1BM`QLtsU z!_%*8=|k%n^Jg7oJvx5k(s3)@qu^l|D~HQcL*-i+KH8ZX1R3c`X|_7VGp?f)U@v=Q*H)J@w*j+)irsdAt6K4eE;0`<1M30om1S*AITMLAq89 zYl~V7_)<63)T!APbs6{G+;!Ljb}$U1?H1ah&S^187hYo>TyR^g;zK)>Zk0z<+F=PP zpWZ3`K5K`@#jU$WSG0oU`D347^t3~0R|F$sixsR`a9HL-zb$H!u}&3wUg_`<$v2MRBJ-tKa=K`4i|caw@eDBQArG%C*; zHTj0+O%rwiam%MA?;cno(?++p{WBf^;&)G(uj;MGjvx^?_PPEfE0k57mQliSg5!%q zC+E0ZA*TPb&W;i%$SGyh?`T`0qRSBsnkf?oJaW3Gf)(o8dd+;#5hmDQnrd26VvWA1 z6-(QHX2L-iS-H(qY*22|uEt%62^FJei1laKBKDnayV4&s!NK%q{+mWy}23 zUWtn{1xszw4MYANuQ5#6DS2ovoVP`xLDdn;U!9=#(+ciw5j%7vnqh5q)Ctx|ADY`9 zXNUG|x@NxPxD!asOF58y(GE%B{$jkCFjaV4tckZhYS?zo+_sqs-{+?sSZ8F93Pkq= z8IA_1J1}bBv-5U;>0PG$g*y#@?fr;>BqPp@NVi92bD4TN_5kOWAC?*C?|^Pp z(~HF>0XRvT@7RSLP{}Gz7=I-bHut?#dgJVf>QT*)5H1sz%r6vZk9S1!i{(3>ymf-I z#GI}#?T(09DqWGX#|e6hzvX5oJ0aK{ljJJt1Wt7mPqu3?(R=M*Lfst>@TwpsRYVSu z=WO{7XB!8oY+oC8k1B<|pmSr_JbPGnI#he?F8o||yIu5jJ5ZAHtvG!bQ0@Ws%P-q) zA>iWXyy!=O+N)~CO1D0Q|UJL(? zpUZ5w8(&}z7bmF)pTN(net!J&W55c$qT;vve*?tYUi0JUW-Hijd&qhFXFy(Wp0@U^ zv4nXvyS-{^&ZuaE`sKKC3%GIZHZ9)L8F_ul(Q}fq0I(4sb3M)(fkIo=?Oo=757G>=FqV#CDqE_8I_l~wS9YP295apmp0BwJ}M@uGT98)AAaL|ZV->Bn@8KtHv{)$ zOP4K>c18uv=EeC=W&mT4Wyo{_y0K%1vj~`hz-xAgNHK2PIeR_nFa?v&tJjBh}F-f!f9YFEX)&9Lbm_DY8J}X<}08dpgKOb;F z{M=oQqT?N4MaBN6A5R?6^~p)aoE&>t6n?@_-N_NzVE(-~X!o~&Mqye;Q;j_&hdeFO zI)LfrxccQ(MF$vVyjY=ftrN15s93q&#{uTme-3_c>x7z|+o~8(9N_Rf+ee?Joe+u@ zeRf*d5q2M+Ha+2@Bhr1^s_4XZ{7bLoe6fgE#J_ZEZy1@&$k-#_j;uvsI6?E!AK4E% z4ml5Up5!{nb&_x(;Y7lbgfoeU|6|=sG@8heU!RW65+koj#9IA5v~|1@yov5#@%>~F zY8vp@b&G%@ec>>*=>K{526dms4WPru-E;O}1nM$7bZcq=0E_ZbAB*_=?a+O zx^LwQa3kYU-3o35D*B0)OA&ycc`F_)yA+P-RPHz|7bPfN#U&iIQ27Nym>^8uhZq-* za;bZzxSdPg|H18+n=KQaE-|4j^~&VTqRA-NNZ3{vxBU)^duXkmjHE`dQH_ab!WAmX zT_S#-Ec|Nj1m13`VK5^E_Q+{ZwWyIqsfTTomHWy>I8-?g+V9f$*7+CZnNX? z{0G9&(7hyg$Km_`*Tc}zy_!CU;d?Q=ahp1asda=?)_t|HNnxn#>I{pl>yD6Hx{U4Z zh@ZQ>vz!-=`y0DD!%!LbN5v;sIKV@yKhZFhH56a$0MDswJ*}Z=C|-Sy{jhHLAQZLy zIj%)o*xQZV>8VXinSEPRs2E`LcQq7J_{_6 zR*UXe!{hIhl#qR64#{WIttw=2{|@fYg*fJ5=s#DXhM-)lk^@stnSp#qo1Eg)VAK_n@Y|!s41{YsSn(%k83su-n9ImZ*~TwBArMbdZh*A-22t~a8od9`P`Y^J7@ukB6r7R#s{PN=H|IH zS4+5k?eeL+>cI%nvzJFNwuCe*fh8Irf>6upWpC%rwS+A<_y(NIL5S{JazJaX3qp(wdEec|%^^ZjVa=9rK}c%cd%xu_W+1U_Ny;4& zy#8CoVy-L9pulc-rTO?^R9uky==Tycuy(RHoT3_xqhD)pokL4WS9YKixHj^-ms( zgss5e`{z!sX)uM79Hn>iC>WhOxa_SzZco(fGSb5BD>t2)rp~4?TlDUju`hzr?V`5h z8^9EbgiI&cUB&aYS>mnskpbt}ev3974Mr;(4}#*3Y8kc^SaAvd6FT2Wuxrv%V`23 zA*jeIahrm@3GDo)dPOij1fe;PtRh=YpviZxf?HV#qU~2zo@35{*xg1|LiLzlZ#2(+ zLT5m(^2?n`LZJv0ULTgGa%`V^Hdv;C`J)%*6b6`pTf5}RW)|kpK9sZikqH#1=4U+0 z3Pmjcy6fqR3`p5$RCTZ#f3H2$rac$0Z()P(zK&2tdwWg)`Y8q&PME$-&N2+8Jms)% z8JL3X0t>UfNnuE;n^vHG&=l&sj_BQ77=~IN%fDXn!4$mAxMvEn{HoWoxK;PV6hdp# zt)yD8Ji0D=|ACJgRHyY{sp=0ysP<0mz8cJ5*<<7uF(;!GwMd=MEOT%eJ-K^*6qdKE zv_qo4m_z31$)@*aPDV)ny`S1F3uw}LsWdkXw*$|fR>`q|2!qTA;{3^o{>4 zclIkcE6APdx9IWG$%vKiv`_Y$6%G=&R3R zPB?<1)%4g@JFt;Ao!~ns9CfXG)}(OM4ocd7smTz? zf#?<5Axg**l;&!vmB&ROsnh%`SI6P~QSZQUjqnJR`&3mq>jB>1R$6>XVB_zj#>}$4 z3Bro*y=3#pB&fk*s)(*o( z?F$cg3!laM$j(=`FD~N#H$7_#zHJY}leRQx7322FU;Pai>>)>N`JGGaaDVE%@1`hY z{A)~KHf0Ug4`-h}-I4165u=y;@5l7krJ&!l*3%ITg}jSQTQR-PKYMz{21k&}K>2lj z`1^TESzg9YFc2?OpCc8Cq$YJ`t3P%ExBk>jBjZR^FLzo2?PCI|k75;7c_cU9NbO8n z|0+(`0%sI(@YB5q+kKF5>tKSck`wyh+<0W2GltiyX`eIw{=WNEBhE8#z!%AUnd2h; z$_Z(XndJTZwl7++aG);#sS~KBAem*x-)-LcpkDJC59`dF zQQFQ0bL-~&{$0x((M6lR9{Zvcg_Wuyd}p*l+2H90ML)z`a6*3EOlPFw{=uwulOMXR zw~e7-?TmguDl0kO?uQ<7zSLOQJO5Kxl&m{S)+HtDmXdW%|BpIxDm2=tA>U%eg0nA- zmvH{BZQ;AF6x>NjQ^uAn`=vk;F5(4zeB|Suc;Q zr$^S?BkS>z_4>$qeqK6zPQA18MK`%QT`Tl#QN7WVy}4Sxs4q+9lj>$0^wQ}=#S+}! z`K8Y&TEqqwi!74K<@%yz-JAKJA6ui=(UZJ4FZV@OjZzD^G#fN2HS4(VNniAQ;nLGT zdaTjH-su7^wZ7cYV=fVYjl!hpf;mZNbGFZ+#KS23?2>w?Z1{ zqYrO&@I#AE>Ry>BXN5ej*#taF!FV?)vbXoLLRN!fRXyMR&{F(-!bB^yc+YdE*_bYV zt(rD{(|HSY(RN;_-voce$a~@LU}S+ZpLj=kfAT}S-nsg{ZMG~x7?ZQP0_x? zC#QC6_@gh9@pm`4n5QHfnt}PQ}U1v zKvzDtyi##tp!-Xd-!5?vK%ve%M@?WDBh@jk2ejw`Xs4U4<7ypa)YCX__9L|b6mj#? z)eAmGC{!R>;JRG^N=r>nfl5PE82QsR83K^&iZh?R*BhXCAOFlNYXXpb&84fU-}KQ! zMx3Y5vjC)Yto&4Zzdm}iB+JB2Di9^F-mv*voIdg>ymhoLJ`nNxZtV=B=_7|W@0Z$z zf#~6-f@$K;dgyhX7o91~LOrTir{;9%qUtBr{TIVnNWSu?*=IE!bl1}FS0RsuxyTIhV1kxhg&8|ez3V$TxQMqY1v6IfD;i!-E4H@R!D7EzdBOs6Focm0UM<+b9j10R}(ESe8zk=sA59(4K*2QvA zj;E_E`w1P5$R1Xm@Y8sC6_l&L+tP0)2l3rcUo?(XMiNZFUy-<7zOBTkxJ(g^I=*_< zF;5Pvk=wFUV&x=saQm~GadOBUvX^E{N(kTokv!S4rbx@EiGv`ZYwj%H&U_(_b1@q2~P{& zGN-s{wCb?%bGvo(KX|=n)`vFe{`SZ1sq(u@*K{(K65N+Bo`d`EI?QcJ?q;qNRq*jn z!|TaixutQ|Pv&cd>zlTHXQ8YIH=BHAe=;XLuUZ$Lhwm8 zM0w>;=BvDCHF{TAi1yucO_%H!CVUk=JE4k&kXgos%VT?(b0fwCKiPtxi?>ZXZTyaD zXChNC7sEn>kDhMc?EIeDA2M^#$zVKQi1ay+>~3a&-1UY@IxOT|7W8|e%Qxo6{0TEI zN#OR?n@#6meP{MpKXhk{u+S1#QrYa%-^?4$<7QhN4@6nzZ|zcd2!L$UqZRbife4n$ zHN5Z^f`#_khdrh7d{&(lZT=wy2&FpcPr~bQzm!oTG#YZQADOmwA|9Xfq479h5>Aa< zA8Tj9LMissuHCkkg5_nGd+c>tsO4aC%$Ncx@RJyAQ`sGeT6`BaicFA#nJbmwep(xd z(7U&GYWk9p@@;ZgKj!x${9H{`3O38Mgm;SL`51;)3LTV!56{bVFJ}ax@;QUGAzCu9 zt!r}1SHA$1yHl-At!z9D&h@Bmiwr==x4)Y|;f^fu`33!9iUEK3Qt_}UV{?D`qk8^C z$#_Xws8|`owUi7%MOCh%SNvpPik87-yI1}wYwy;Y!zZL+l8NBr<}3c_W!#jCq9fzL z_|Ss6QSJVyXx*kZ#itVR{=sbf3j_X$rc<7DMtl@3zikunkWx@xJqB@afFOLru+aLNN=*ux}{@BlC?SAewaM%yktlag|(rA!b z`0@KelUV%R`_A0jhF?sj+8z1g?s&g&?rlho{mtyjXNt`k^hLBwts43helb_;?>Y71 zE8fpSjg=&fXz-l-(=}$aAEFmM(qmi^fW=$(HEbQg&ui%=wiATGwr=+MPnfA;WhkeUhg`*SP&xc{_H*mkGJrosBsewb|~J+ zJ^0!O(PoLDOd%oIGwpSs@O7-etYkSg9~FdA0Y6=HD}6A0qXR=`(BQ=Mtm7W(_<73n zn94yKluLGZ*m(M&tjBjd_l*{YmXCLRKB(g7U3WWcKaB=cwLPcI9(p6%q2!pKd!&G| zU=tLSc%$-T2kWEirQn&@PuI^m-UzMfIVNN=7Ph2>JWzM`Mm3_VP46cEW-dARXvMJs zF9e>VulL>f$vnG*RcQCa3!&fVV@)q~F(1p`kXjms`=2Zwc5%&Z=G3K$Z^}(^JFRV1 zm+YyUu5+``&kXcJw9_$5mucRuIsH@NX1x#oK0D&A#NDqocDZtf1@Ao(tv$zT*Uncp zC-XnuTj_<{`=x8zls?z6noMOs&+$MlJ0sq9Jm{-A6;Qf1&C&y<+}!d+X_5f4k`!E= z>f?cyKKZ`y_9_u1yyME(?Un8**%iO%;wliWJOFuQSP!`2jUa(c$?TivGb!?D-J^`JZDx!E|wljJc(bUrZb<$tm zHOFW=%#Bh;+k!U1v|MMj6DbGj{#HYqEoQ0YG&m!PBbG%o{WMTNCxqLx(gl^hHjaNH zi}%a&w_CFsUC^N`_a7e&(M9?dZ?`&Ixgys7d7dlA>m!$@1D~CPUC~b6sf8<)_0a&D z+$Frw6*+u9x7K2t0aAbNqScw=ibkJ#B9#+lg1q8x9o@af713hPYfQE@MTv{#3~lzg zqAkaE?$&HJMTbG+QF)y!TJic`gO;lq@;FvjGPTtedBnb15>;r9VDz3-`Y~=O!CY2u z!#fL9F{%9Or*t<|7F;OM(`kvq^RLXwX>minYB%%WOtC^XYgh_dzunNq*J^8m`>c>( zYT}!XtKE^-yNl@DcPlin_wqsx2#c+{gm}H z96S*}DdoU5IUBTi*P3kU2cAf*ZZKizD;rd~NzQOO(+kO;oOp6Yi!F+|IBvEC#|vf8 zO*wFQj~%k_cG0@x;DySzeajUyvPX-}K2((PypYS}faM2G98gr$>%OW~FEq(H{_f^7 z2c$9m+)krg_QYX^MZiVQd*<4JMXFWvM;c|k??6R$JTX7h6Q z>vSKK`FNpMqAH-1XLn504);O1_HOBV|A(DMT{)rA1cqvMj@0uUsh>QeyN>8yl%3=7 z$IK!1CQ`2=^)6B`BlR{?uOsz7QZFR+MpCaN^-fYRCG}QPuO;T*v^=?ux zC-rtxuP60>(k>wF2GXt|?GDl|A?+5@t|9Fnx}o6jjEVlpc*V5ofz9TqLV;``n zaBQmSlyGx|s<50A!SLxE&Ab?5j!rty3qANI04<*TE!Sb6Im&tTNO+8TAX2*jPHDw* z3#9gQOQgG2AX<{D^2vIv;P zqqy^25F4#_wRIfbZiGHuwBC5mfsK^*3C?hBH$*WhN(t%qxPOk?m(3OiDD9i5E!T#P zShHJvN^k3-Ks;WqJ{v`OtT;baRqr1=hdK(S(FBLijq9%dYW17U`&;LC5!yTvc@x61KQSXu7YqA8{JiTS~BsPDf%Lk zIezmCHfrualRY-l6qOm-8_tQ~py1To7K7mo6q{p|ZhMb|>ZkW@70to(Io46XjYi!c zlyIBnXN2Ut@>C`%a#4GCNNxH71GMAek!kr-TvRe+^r0?k1GJB>|Lm3r7qPcGJQba- zi$cFK9<{CHqP{^^PRu1uB$vJU?30i^)lHi>*P_~J5@Jtdmdu3 zWdhVL%b^k8U_D4(IQxqXn)D!bfbYRWi3#pY-L$08n@lC0$niY1X|eKDEypqFw3y4= zYh!pQYs@{53!5ZSoTYt-mmT@aNF6d!*g%R%&veg2*cgi!bRuA^JK*(jxC_L+0x0w~46-f+QFHe&R0 z&Bk36M0M9w2gaRZBh-2Nr=+AHDr@UKG`QEk?;eZg zkgBjvAsg*!nSG{TP4XZA1QuaxG@8)RotyI!`5Dt|xPPCG*!JlwrklTgIb5qW_K(~n z?S9fPK>7_xzXIuZApH`g--7gOkbV!+FGBiFNWTi{cOm^Uq~C^w8wpnu?j&AFypeb% z@lMhONjD^2k#tAWB}um=UH8axt$n}ZcGuLfHI>%qk+V&6wyS(EbA;BiPPu)0GVUicW=6@nHHUCpf7y42>j3k!%6d)y zVa&JObIxlu1mW#kqrS);m~X!*%;b$00lLci*xebpz2T(j!ObGj@~QXTVLtx8W6QFA zh2kKid0pDji;MVYPKrw2mIRrL{ExL!xIYg_stl8Y0T;gu$GNzD&Q(;nOa^}LaPp>4 z<)Xb0Cmp%XlY=5T?)(HE7nRFtEh~SAM_5ah8M{?A(_=Z&xrEv94Lf_=TuJ#qkcQb(WYHYx51oJ=LJWPKLEggp0Z^ zMb^wIQwJpEa?tQ2#<%0_8QrhcVZ#jfmeeQjth>tLPU#6dcz=ZZU7Xv2b@JHC6?aZpqEThHFz+E81ey?E_p z{5<)4sC}9?fb;t~#=0C-bU60Mm3cbwZR(h9PUAS}iGs)-$M-q_)7*tzyV!qaMft2H z<=dXHQRphMq#UFR{dbS(-M+y_xreXUHreWelWX>2n@4P%#(Dd+rwvTmUn2bCQ2JCO_vmm)OJGobA_t}CH$4awpu_E2>v1PlI4DK;#G&J> zRA6Ci^}^ZsecHE4M|w^u!>oYFyC)uzL3$gCBI6BzLLF^h~rP5B^&YZdctUy!0Zq_SVn8V7qFY93;DcjBJV!|>Y8PcG8 z`yH0Y3rZO-XCz@&cEW+}xIg}t-z(1qOMuDrC5(gzEL7A?FEL#w4r-RiM?L<^LMg;-s_XC4flKeZu;3$yE-Hz?qB2kCj00BWD_X`@0rcR>?R8jlR&f^s^ z_b%`d%6d`ss8#7NujGEu{i12BF|_Yk{MmO%J|X#tTgs7|-XzJb&gMT@%cPX7 zyXVS7HG)C%>)lns?aeZOO+Oxj`L{Qw<*Pv2$Le#;Xso|^S>tkhor`yl)V#1hv$0Nr9}#Ph~5^wvV1EY_LQVsWuD?8uVy9rjxBVk ztJ^L=?=%nbpDW4V)}q4*?&Zbhnyu#0K|DG?BlR2))i;zlHa5{=e@S}pq)Hyj84$ix ziurU+cCc*089ctgJ|WI7H7LgYi5|oK`z&AF`$P?9tj}E*&&G!#l|tAom;)XY=3 zox`D5(tFflfTPjcvk${>rdZN*H4WHO*-`&vEf49k=#^GU>M$Pn=e-_3-#=Hp8skgy zj`hZxIXsl|xWw_yTQx9klstKA1`pjhe0+X{lsf2Ft@c!o!20jpOt>UMR|Uio#6WD&LxZd=57G^&G~)^+xlok|Ju~Pp>_=cza8AZqf0p(T z-)a8gI}&arTuHc-cp>pd;+4cZNf+o*s>7NVOjjXWHa>VmhX-;Bbvd6fUS_y7`mr#d zUC{om_m+nk%(AB@rE1{+MACdwJGN6AOB@3ysl$RZ9l7@XxW9Kw@^7}N!?mq-pOeN1 zp&Q4J&(Hp$4(^JlX>;^~P@l${DjjGY(?)Q8n9tP zCf6q^2qEn?RWDy?Ky&Jt;L&q}P>$}Js&Vd`V5F}feq?qKs(L=NzNjxGFN!>5Q(^g6-+M%U&wl~o( zVf+|h(MlZ9_=nGiNoiQ#%hhi$xbY$AZ{NE6oJJ?#Cx?fx#guED^mTtVO4X3@2XCK-dgi?|&d{o(`14u5Dd+~S>Dv`Yv zRq3q@(R0RZtEvk^q5(T@7Jbl#k+pUK#(RSvd^L;BIrWJcvTRB!k1%pmgz&5 z#DUJqGzBwqR~~5Zct7t(dXQ0P0O+y|{S3MMt>pTQ?YhBeO}pKM%tL z2{1a>7{Ki@2Tb&`aodgMyMDYOD7-u}%?s0I-{sx$DAXWFId8Up(kzswh%R@KBOC8VOHi931?@yZC=V80& zdCJVmM)2KY_qRj0c*y3-@%gfQ48cXhB|iQs-oNh66+hNx0C&qKd~$2$p_ls50o1dlc(JN_mBb;#Xg0*oQPv;D(& zB%DY%l5i&RK;ntSBZ+5{4oEurhmHt3Bk7Q&Q<9EJIw$#nQMNhBj!{(HkFa19UA+h6KDql7kz`FS*euCeF z5dS{bBX1gjR`K4=#=^maV?d*Ag~1|z-n<%_#K4WVdeL&-(gU=(s@eUSVyLl~izA`DNRCm4RyEoQT*djTIb zI9lOG5V<8+ASoai+n>)<_SG1{7bpAVcKkf+c&u;m+PEs5R7sIS-zepjA3Qt z!>jR}0r@V(TfvhRcmv@n}q-8qZlp_J7+Yq<%e zu-?zH*@^LQFLLKzF7EHb>Q#GJ;r{UZvu~OJExKQro)z?$_Pa#I7M!1s`#RYm>+RJ!>Wk@j&LgWu4JN=(EAjh=^#QbST=Dx~CQ!EDY4kZY z+-B|*dVAUg-W+_kWYa|4pM%Jq!XtS8v&P7c7RK}ST5#yCuL%_VWIW30#(MQ+oley! z#!xSI;J7Q6D@EH(ubo`;ubjdxsf~X_`=`gnj$GyJV3h8m)9LH?A8m}_O5jh@J=*{9 z{KzvB&mGz$)Mmqa@S;JtPz?Xxf>%etXvQMk@rl&*LGl8r&uNFhKVB}S>W#hL>lYis|DNjgw zL&_skUXgeu@lMhONjD^2k#zSl*`nerhCgG~mW_8B3}AYr^}UyOgHgi{mqv+aSpMR+ zW=AmAGhQFw1BU-1$>*{}S~XWvA6lAv(j|l{YRJS(ua@upey-N7Kdx*T`Wy ztP#E6vI^@Yk!T`Y1F!FPM%g!42HfclGi@;rM)YYFnfL4&|Iif)JCZI*cwrFg(P)B0 z@g5^S!;#pMd;}XfZ zMKZ3DjC&;GBFVT(GOm)0yCmZ>$+%52u9J-WB;!I!e<0~EB>jn`zmfDulKx84pGo>V zNq;EmFD3n{q`#H)$CCb9(w|HEdr5yV=`SYz$)vxT^hcBaYSN!g`nySgIO#7Z{pqB? zo%F|({(91%Px|}2J~4vC8`+3LUV@NxSaO(=Px>Q*C+=R0A+A*=Hm(7vi}=E@E>3T=LMcu-Rv zG)F6}`CO0t*N={H@=%ATrgOtK*5dKE$NpedX+YnVhgTCGvr$vw_1b00nsC@srhfku zHaZkVuT;Y0eR0lC>ROHQn>1OSiLl+D9?ZMDnT_}f^vcaAbfB+2Pi0Cb`>$-6p=WOx z7so~g+q+aaAGM+JqV>4tHf&^T#PSt-tqreF@;`o(#QmSa__(SCPLr;0N|wOu)%~>e zr=k{gTu!b2cAtg3PRte0oTvp1S|)ev1{TU`o>?#Nhv#$S$h6LBEEHIit#~h96RgVl z2ES}Dy`RDO)6s-WW4eswCSZEaR>)XzT@$8e8re*iWuZidPG^Fb7Cf%ianxCZu=rDj2bnec!5XlInzL7llgLNMwb=bGJctiZzkiP$@pk8ewvK0CgZQk_-rzMn~d)!aLE;hjkRK}AbW}XZR~9179HxtY^Fv5-KuZnAgN*IGP{q%Fh#?x)SsWk(Wyz&$ zt3)t8h#xXhxz>b308TSUS3bv zzN|e8NP5F0Xn=q16y1*WJrWxa+IuYa_WK}YnnHW4AU{O6lm5^x8Rt&M!IN?FWE?#i zXHUlAlX3dwyvcQu>n7ns!i|J033n1NB;K~+`7i(Bix{uE17=K|2=up;Q))i?A|(A$ zmR3Iz#>JPebzb577j`4C>bY$^C^0{nX_{krHapxo z{X_~57PAyy(|i%%Vo%uPZxZ0pwEVWqMBLw;bIwOrjD|-}A1YR=`y#c9M|xe)ME<#V zfKg?L34(dvLwWLj{@`^y{<-2Gd5?g+XF%RVAnz%V z_ZY}~4&*%u@}2~_9&)|^A3Ro!{s&$pJV|(y_$XOwJYVk~o}Wi(<(O@9aI9;(z=sqc zl#;#k$MO9WK;pr2znJ&lC|9VqD%4fxpL-Ux?`}8hF3JNfrR{a(c`sT{y)Tr3U)pIm>z*eR zy;VP0e?8vL}AitdXWfCMU2)h_|6}P+0&*+U;gih_fdma>fLe}#i-rm~s zP?w_0ejDKl=(Jt8_C$FQgw?!mb^QL98Jd%5lOS|IXT_kEC-7Z69zBjygz&_+*Drp1 zK+Eoof}NI%Fm-O&#fqmMz;Ca4-ZxbV;^pgp?mLX1%jbIZ3{HaGCKDDOTHzIgQ&{_rh*!_14h+wzs_z^Z?q{?u=UsUu%Lx9^P*k z!0=N1vfS&g0FsPM9qpKd;dytha@{FW^gH&*JlakCoVVC$@^@*JeRN&7eLsfhviVn7YQXC1L)GJAvgWLOBlI&jmsPPHj9+%0%_|EZ}ncw-lMt5bN zLDXw681(sc_grab&D;%1_vV#R_0@!r=1%{{%s3)2Hgt zJYHvXTT%Eg4`;dO*3EVkfdY>X<-s`IZkFdx9}t7;lk2))#drfeO&YWPlmvX{KfJ$; zf!og)TfWwmhEQ6`6Wty!C@OB$NiZ7^+LB7A=)b)Hl6%YSD&*jXYlrf+%XmJz6U+kg zC&HC^s_fD#uYc&B@8kTwXOSArJ5KY9XJPopb~27#SBFfGU&fwG-T%s*=*Ea=E5^G6 zzqM$>zPak~tIJKQxZDlk+s8bE^=hDhly>`Mog0)V#f;f6rv{mv)YXsZ?*H_UMU)ZN zx9F67oA@oSxzmyNH->X0=5i!9Okg-Dap;eXQzPRu$@on&zLSjqB;!NL_)#*xl#D+m z<5S7_RWiPnjDIELV~ZP7GyS@`s663Z!B!U$bZocNO^rc3{($~tlPdy9xVu+p`$Qhv z7*e{*B0~g?z^=UBRO-SH0Ti4(X4@AA4>7$}pFc_&tO4EY(wFSOPf6aMgs(Zg?;g*EBV7wlI7g6>1#x*hpXO*nGErP!{ zxTDgyt(}=P=H<@nS6oDkt9)Ra*7;8y9~tLI#sQLXf@B;a8D~hwA(HsVBw;*~t5s-u z-RI@^SV;a@HJAOx6s9c96?hQGLKa6=l|5FP!T;uJUDeu~Fn%)&$xhun{rYkm3TQWoFN3>Ge}`)oarh3#{_o`*k7p%B|+aR*tbSXSd)=ucCa zaG-;A^VWZ3kIbcP%fsEzqs$=8;&^uIwg2Wqj*q8gGLow`>SLJcuB}{@;-5P?^Q0*_ zoV{#uU4o0eE;5YGv(4atbG5cSZwpR~#qT=2lF3_S25%Z9&8OOOk>M0!%RTXCaCg~P z@u?HJXv@QlG7UYmzkNC2d5?Z=3RjMI{<@m>0oioi(x&qPW?aYY&=#Gqqm9o_u0`GUPZ6M za63dPOGL#Qe7k$!73%QN6`kzmYKqoyzoLV+=pz@Et?PLI;j$GNFFE@{=supe|0@}@ zcq`Cdd^XZy8-`PDHCK7L70jF3pu2eq7eyFH>hyY8!KJ&Y15>>5y!-Te5=$)M;pOe2 z*=k(mxOz4Hi-jf3?C3p|hT+(=?N|Ri8%sFhc-+JLEeG)fl(L+KE#dZjr9)pnaFC%U z>N2jjfI8RrU$30!Alps+EBjAa{9UWCUVr2ZFAJEW8S6E$nS&nQO4`vTVF8a?RIjYL zz(G|9j=02km_xhO+``@M9HjI-qwM%gb12k~y>VI=%P|R=(yp)Okdj<-J1m9!7nYm6 zvy*1KV>z|%^7@8p7I1fg(xFvRTr@D#X&NuU-e?I{S!b0$j^&|AH6Hq5*C4QXv=uC#F(!CT8V}7m@Nl=jsTDllmY~(ThKF8@2oxy9Sb_K* z>%i98JhWotYP!^ZD==`8VU?WVq2fdM{c{{XEnn zEKsmX#|pMDT9Ohh6olqV&z-IK9>aU>*ENC~L1^IU5f|k2rnXtaVRrW6 zok~GS9*;lY!U`5%PW#=-SKRh;Om0?cbcU^&#gRZzKbVEE#=h zK*(h1o}ktrx+e3_lKE-L{Iz6$TQdJ`mg?ngGjBOS(#<(8C-+;TdONRW=^_pgJaz2z z#7s*x_`z_y!8v=lcKhh@aZfFfSZ%%d>a+GBZh6A5c8LXoF@e?ZeC@a_SPSGi zDzMr_-wqaQJuMOSHAglVW;maGVhab0b+5cWZH7{u!}3fH+QQ=bCckY~o1u}lORCR* zxxU&I6_|T1yB2K+a$Z(8+nP*K;_vzLou}-etap*Z_+C?#>lv08HO3xl*DcIc?=nTJ z3eqyP3++LkKL6z$A5(<(NLSpL?*PXF56irt&p`b2O7-Li4q%t?E%#as1JU0G^{)Hk z01;$oQR)zkmH9_7> zDOb{OE{yIa3fm*BDgH)d~q4~=s)>SqG<<1nT7SabO zz7zg^N-P82KDT_v5{%o6C+E2AJOY>**0QO0K`P$}hfm+aor%KKMmyG7W zG>x-B!Y^H}g{uNcoStfWU)l1X{Kc50%triJ>66;39NNtP=7t`jmHK-ZQF5s%`d>d5 zNk?R!Wik&lnWve|<4opxCi6g(d7{ZY(qx`#G7mMGr<%-TP3E~K^I(&CvdKK!WS;GR z@^BM*y2(7=Bd~vv5juJ56HDY}o=4lT5Z2#h#WlX`{p?~4(XHVhx+URG=2s{4uao)N$^7kP zes?neJDDGz%$F?9N?oF6VFKIV%yDtNZ;$u~V#ZvaZVX9^t*^W$+M_Jpz{vCcMiA^H zD<`31hjPP~>uy|3O7gio;PQBSSW3F}|QnO6c@5D4Q|tc_Kt&dE2PcP2<+v{cJB7_$MUzd zy_Ypp7u2;+_-#`%Mdf)kmwQEee{slzxGZ^unyDdo^87} z@GU{CWU8stXH&#Ti%A-5Z4C^woURWm@%!eg+DF7}z)s9Cy4u$iHTm3`aqO54%-q7J zFTnl(nD9&Js*`s^KXv@ERK=KL6MA&tTBX}Kks@lLEXQ&fc;g^ zSI*K$H|pQNY0}n%#BGOe6~`N(u(o_dx}hGV*oyWw$eSQ3`%g>vIO{=a<}neueI_V3 zM=YsNR1bP~#9yt~VxW|j)6LW5_24#!|BRy;e|Ual@O&7SF}gDw8K{K$Y3Xu*eVFf) zasU2v6V%t$PQdwTt)_vFXOex;I=VpYLIbWR%L+ctw!z^rWf5pmqrh!0g&{HDcEx4e*FmUTV_o+ z4>r|=qU-gaPRL5b<=VF6i+5^pLRL@nlA!n$Cte|#dak6dTo1z;b=(K?vLDX0=HkvbDz16 zhVmD`FtN*#|j7=yt5uRBx?dYS3gB6ms;7z0gZvfx4JV$w_# zw^eo;;rY*QvH#u3Y)mVATKEjN86X@X-cXY(Dj~{zg4?sWmaR)$YGAa4=mLBEz33?Y z2jjYHN+x8r=6}KXop=7EiXaWa+4VtIDn^jy@zMKPjsUW;d7h%b*btE1jDp7(L{WpP zvEHEq1E~3Slo|U_43#gq@R2^j0J6UBG(Kr8f%am9}&=s`vZ!dNZ)bdrS45C9-CW;O6Hau$$vd@31+{F|UpLiP6H+A^GRB3f=w0RU`7Q;T zAb;h-72Y^Hx-pRZ<>E9=;9s`daQX-x_076F5b{eCT0U-8-jqv6@2+cb?rqhA@-gzG z*Vfa~rLA2mYZ|m5CCBj0c1Jbjta(z_Xpa`a;eaPadTOX(?%jb)?%I%o-#>!rD5bIg z+uEDjkegq$DPyx5%FVbtAX=mYIsWP`i|?tS1)DS79F26LzIeW+o3lD95>S>uw@VkY z`b>Bi4yhy7f-M_$j_5)`zX|WEx&{iZZh9~$Mi+99%(rCR&_E3vUXJ~)q6=a_zP;VX z(?q$}YpM*&b)ZW{e)M@0EtF!)@*V#~2i|!!M!&eAg{1l3(z4-L-so}WosrQ-VV7l` zb(3-XXwjzd0&Ns}zUje79cn%|KW%ugjpRShtPjR*QeI&ZW`*~A7A5yig{v=hkTU%% zdq#NIW_v|uV4VGs+|!|i-v7KCTHA{{2Q47&dCJZyO$>CYqU|^@$P!wtDw!vr=^@S2 z%if;jT0;5c#^^f^`bhR{+wqxKt)N>?w6Cy65AEE^zw*J$8WL^Ry?*vm2f-oh#I`7F z@UrYI3+d89zDtBHyFS}Oi{Y-GH&JTHux{Dgb*;A0HDB@Iv?*$T=SIpIQVx-Fij-rd zoFnDnKXQ_gqokZAwyQ zk-gn?MHvGaDwQoR?hLfk>XpnzeN&)GIggsJh4nYu+}T$Z&EQJ+`AL_Z8R)h~e`Kw^ zIphp%zIc?+Kt-abIJK_{ZChM{TkIlD;s> zSyL)0MW?jS=bV0=ZfEit4{mPSBo82}ADhXSgrdAmw*%?JagYjs=rrp_bKCYD5un{p zju9QLfcO0^vqm-j`ui-pyMTru3dYSEGnReR`YCx7c-VdC*nNl^`itocTVPtNh$)?xFcH{OJ zk64?V9n6-TOt<~NwIJ(J!aQjE%1qagYu;j`4Owcl{fl+JGut&59GWSw16dcdf8P4> zn_0ffrP0kx2T(oU?;HdmW%l>b@KzmwOU-RhKMH}=`-Vc!d@NtmI>JQiM*UrTP0ZK4 ziq{h0%$_|%fP>%P$+vi$Cjlm3ULAgSNc%6W7TEvZ%t*)bj`M0+_iYI%`Fb`$_OK=( z@z{py;;|rqFjnwAl0|vcE+D1w6R9H$xTvfj^CAgXPUod_zj(`^=HK^7(CH^X{4e zukwgXP5Q9E-EIVrR4v<+MvyXLozI8oR#0?^=CZ>S^PPg9+|K|@$ZD^?X54KI^^25k z5+p34=-96G>6lMh5_4w{oU{1bcWz9=Z@U;wPxsVg6{1X_NWs4S9CFMiV0-hF`s)(7Vp1mo4)j&$NMyc zKl9;xtVhb83kdja0H}JU(TR2gp#Lgy{C-Lw=nu9ziJKZh3)Yv7O7)>-I-{xammzFo z@3<*grHA*O#y0K@Ln!%R5mUTf52B8yDK$mleg~Jd?){GCbkmGiyRm-Q68F}V)1eE< zbo(>I{rb=(*K_`<68``1s`xYmeaPA{-Q3R9089<7WlFc>{l@5diuicE?@zpJ?I5WG zj3jUAloH&&yZGyoel6fD8S4dJ(}$(+;!8foYe9kTKB45rSbxuN@4aD%>9@WA+t>B_ zkkS_rd09dOatB`>zI;*-vbY(+2Sn8Y31+o2u|40iPxRtXYgI_7yk7h2I9?w|+x0j{ z8LF<&6*n`}hb2ArpVYh*LHcKj<0m_P;2S)LV-reghbxZTXt(wXM9QO^gs2SAx%X{>h zqM7A-1|an$X~z_CYseC;OD**_grbY8%2yXz0m+ZUSM!GXdW7dw^20e(dZi#YnbEXd z$LKF^|3AJa`9*XbQhkH9?e@x=w{V5`E@4X-_ zk8b4~(!4Z)e_vd|9OEJVX1?LuM;I<&4DDN&>Va-uL*e3CYQUeQEg+SG_sMm!Kep_o z14@-&e}RtqJ!{l2nz<@;_6WT^dsGML8&0`x-lhzE8!wH=i*$gdeBw}Nha$kco2d#a z*bX>2XV;s&Nx-jL{PoapO`u1GRvx!e0+xm9^$iZ1z?eAS?xcZM6^+pV)l(~u zMb;~T6sK7=dcHbHO})KQ_qqaKgQ zX(*b(n6!M(1Srq4Wq7S04=r^+yrb1+fyOck%bYC-v}wgX)BUA^VPci1H39S6HxBxk zF9Er{Zy%S4Dg0miHD*;o+R%Qz{fDSWKOLA);>WEk{>jW0JmTvk#Q3|1QGD(Ahp)-~ zjiwZp5r5MNim2Mx7$4<=xx$4Xjo@tNbn`SN9gy-#UGk#I2*grMuZ7>x0=j6f@cK?; z;4g4#?CI42_MU4Gw>20+SA#`N-%pHpEB;KnGnOg6U(YXV)PSx{>k{fh3?b{2lKdBO zO~`FN6u1%Zzp#6Ee24;meo~ryMa~Gy`|6@repUnKW={X5Z$?m`8~bB>z8cUc?49t+ z(HM$y1sJ85=>Ri&lRRHyJ-ceou9T}Pz?Y~?ov_guq!y0)B~gy?qa7HTzupM=pSlD5 z_bC7}))uIeF#`JdfaZjDc_@$VO|t%I2&gQav;FP_D00AjgYA17Yt%2Esw}h!7}utIlFQq<(qoSw%4br_0t`>P_HM#kl;Oxxz4ln=B1YcexXW6d z5&X#qukTUS_3g=nOvdWU%iqfIKDEo{>+$;n(6yqIabFwDhi5KzLK=dQ(qm{p;hi4P zgU&f0*(?Y(@x>LiD$G~L1*)xU1c3I_(eG&%<{OUk81r3%Q1om5*){96p{4pl^IkbY zkUC;McZ#0|(4HK~3wIX;{_@5)jeHH*Bhf7#v`q~7TUHv4zNZc?e*Sv{TgC!d^d`+} z!}?^C&DTTr;~?u`{gL=ZYQTusRXvg@4I{LO5_MIV|CIiRk4Qcv`HUmDGxx~@yffx()0kE0ir z*6`H_wA9W_{A*Kz>ViYLqlfddjQ=ACNc)Ghk4XFJANz{1zexLxwBP=*@7@F&xb${0 z|IT$^+9P%8G5HxUiu_9yVesp-*G3`v^|}#Trm#_QTnbBaONuzlYY?VE#4yap7CzIK=p6q_^^{1`HljSu{mn0=36% z+4%jo#y@x=@kZj6#5+kBB;C-0i*6|_5=V@Rdxjr7#{+6V^-FZL6e{A3`bDpj1^UUc zUCRq)P;O&)fPa}R)K6b*lpQ<~LEWSy>wb)ftj=%6bCo9{+Wx*A!TT~$pCQhT8?W*g zm$c(Wf$#+51EyVL&6Nh#oYvl5Yg7zs6eEM@P9BJ}0&)gH*`2b01sis4F0aw(Oz`v`5XDQneAk-{~q^ zR-*zpu0FW3A(f8OKjePVWYVFi{o7lmLONm{dH+UuA05)Cxe1yk(oubvk)G*$I;aLT zs><+GQGpM=vQ}FSZkVPOEY-#RCyON&A-tbHTPl&gn~t(-QI6vl4dBammQ9~aNAj3{ zC0+jY&tnpIqR|AshU8m8y-t;jF`^me_@+51{~c@O43pvuk3W1to^O)ppXB){d45Ws zuaf7lq+gu$o0EQZ((g|CDMRy{^Yp;d2T?SE0E_75#2665|o{Ny@ zCgiyaiB}TuBwdhnL(&yVcO+esbW74TN%!@%V&ikESmrtpXkBVjfxb=gnp#iQ{?c0C ztECcelQ29x7p0{+D}&c!#e>W5sv}f=RD8Xn3h<|zcONfPM_rPJ)eWarfpvJkW%V)* z)VE21Au>S?>R%pC%n;K=Eo0=GW1rE1C7*Hs?p#e&fbtFd@6o~LcGQPEf?7x_TYufY z3M|LM)m!9#X(FlM@1fx{)j&QsXuMOP7Rvqn^*pOb4LawGZ$0d!g|d`CEj3uJ4mU&? zyaPwIP>YDjooGJRn|Ex?(ss~B<#G!SWlqF;DXTI1z#|=0#d_;0DX$6d#^x#i@Y6*t zLG;RPXEcB{C&GXHQ(crO{XFH%5e+Ek*cq%e)k6grG&rjrHNfV1?a7tNdMI}Vw|)L` zb&wWndt|>@AMtNk#6;XxgMt$qvu;l@K#=}&Y~U|-SXI<&^IppkWwpIJeCDbK%>Ph( zaBNQ7kPedhqo3lcPo` zaL(Ovk4}<0kYd}sN**sCWDi*$0zGRf{521%R2@rW$nwcOT%?Q_j1vuC7A}uR(rvrDSLF_x>F@{e2D>~ z>n%7W=8gFy!p>lDmH}clWGil5tou*A1Qs#O5&d=~7Gh*>BkzvHQv9!eOU6x*aTR3T z1sRt?&V!sMxejujBpgUMk#Hp8OyYsh;mjMyGeOaH*@_oFXhXZ0Xkq;X1}c(C+?Fq) z1BL;^FfkB6FN^*0^n(smhp8{s+{HizKVBVP8lwA8TnUw$ z8~?f}IEyvZo;=i|jZ(ChO8X{D|J^G`cue3=v3%-}i!d+hO}MafzFt zRxFk;(Qlp#)~fy^fBunAg#04q8!7)t`AEu7QofS-!6uM`X*$+D7I&BD*L& z%l*MKDThcoManT!&XIDEl#`?!CFLwBheYByWN1(goW*3+K zk6fp&tPP*{(`X~H9V5E>$eG7TtjhnM_mg%285csvjgWCAWZVfEmqNy^kZ~LKLV?0|yPtGhXz43Iw4`!8C%1^TasRumyxLL9g>ddf~^RveLLvjo^MF!}zu9;77w1Tc#SAz^1 zWgt|>I@S3UZu8k0PB}6luq%RbI>8DMW3&2mWhv-e*yvVWVhOBVwYIZuQo!1^ZP&a{ z7LbDN%^V+TXzKjuQ3}9JE`cE+lsE?xU4Z3FvgDg#VuM7!jF)x-L zr)~-;a!N($gRzjJpH!T=ngK6$9dbu)8~u-8ne;pVA7o094M|CHzYnx@v`-rur~|<~pD?O;{DWM7@^XNMk^Ywbru4E^G(< zd~oReDg0dNsn9Ah70BY#_KJKr1voxf>zk_zHltFT&&HSm{mvLs(Of#{%C%M5yf=gT zX$OjgUDZHJEVcRNJu{Fp{Jj~zt3lU-W9?G%<{-9ueGo@m9a3^uuJasc0Xc=c;D_RBZYJePiZ+hRg2B?m#(Fwx)$G7?N%uP0+ zySPgwbS@o2y&2ZY%C=B$_EEOoPZbJI*Nb0QwT1fnaE=^J1)8?|s&?PP{Kb55v*Cjx zsLEd6w$I8AAYt0IDw9dTTD_~$eZCz?O~~<0xj7N4j@A6wYlZn&aX`z+LJmlN#w_ke zqX`sIasf~3mmotMx^i;|+q7lT2yKthuE6jmxS>Ci?sL?_^6tf&0Y9bZn4*O!c-<{Z zQ5J62@5saMz zm@X46kAC%(Kv}lh0`*HxfaSWUoZl#kYOF6?uM088c79+eijqe4dt%1;>Kj3mY$iLq zbv!EHs<(U8bVJas2n?kgjz^4xcE_!K454dZ`YEQk45DXq`q}0NpqpIcqe+uNQL%R< zZC2<5V?*wsR?c`NrOTgr>X#l=xgB5eM0Gr3sa>{y=%@#|XD^$rMtHpa$5zw#>q3{h ze&O|G++WoX(RmMb{^Di?SBx1`M(=+(4zUKGF1ufe_qAtbn)XR!&?SS^C6{`%A@}8$ zCj+XZ5F}>=2Nml8U3O17O<5SB5?^%tmo5}NpT5XQT?p|D?2fP1)`O^ZG4x4BgEjot z#a~rzupTkcl{jvu5UTI}az|;v0Lmkkr&?YSLNv*}6U+(?LAB${tIoc`8v62#;73mk zq4m`X`+Y&*YErJ%Y!}#v_ZW$`tE$IaYf|2{R$o6(op1BziZ|_N(&W?6&R&n7cc;AF zqy3dx^vW&!&{#vDsq{9S;Po==Ev#iKbM;{=>lCx`yX zb3O9hk31J7&kad9kZ>a5NWz)K1BoXRk0hQ+Iw0wUq$85fNIE3xl%!*l&PhI?2a0As zSJQ+}g&PuCmqZcDmgZ*OQwLU~*s1k80%!y`S)U8LUSIF7$qHyz6)45+kB(IapWAED z0rRrJ z*W%T0O_b|qN_7dn{I*FOvK~i0GVbVN{)69P7wnL|Ko8CcUa<4OzDB+sIn$-+MD!1x zkmtQ*zvQ^&yvTWvj14pJ^Sm!ivVT0J=SZA_iOj=8-mfkH-s#=4@}!W#MSvS9q;_Bd z9sO^d!Y?C5&(sS}FysHx_2uzYeck`p5Rycx%=0{6GuO24fNP%T=^B$Hq*7E8qKqj? zDM?Z!4N4IwN{CV^%8-<(6iKL5zwPtH=Xp-wU$2)xR{NZN)?UMVt+nsj>zo=OudWow z+62f!0rDo$Zz59^PL#l6A8K}mXPQ9#1umLWiUc;{D8in_4=_m5U~%u0z=+r1>S+1{ z6xmYwc(+Pob!^OHCkg>ltGQ@#o>Gg?X1rU%xn{Wx79XO@?_CCv^h!14=@A)h&RK;0 zR3?D<&%O&oI&xU(q*J%vS%4U0OIy*G@>uw@4U&_)0O(!kcDUszVBraCI##;_^u9E^ z^dnpebKYj65PS^4`qatOM?NcIgQsKSCRL0<%gNHV;E2j!=Z*2_k@4r1@#mTG=biEA zq4DRX@#m@W=dJPQKJn+_@aN_5=jrh0?eOREcvzeAer4$ZbJo>>tU&!4`xg+qJUz zh*(wIr1Gk9ZTKd9;Op=E3Rvj3OUV+fdY~M-^lZ#y1xzoia@78lE>QO=wO*4@z&OT? z%47IYey_3l{AYi8Ec86e$47J_n`y_J2T^hu_Q9z95V}8`Tra)}+>UL#6){E+ZCK!n5hx!D*157*5t}XSrd=*H0{8sq zJo#So7~{#=%1v%Ya5am0%7wN&C1UZF#pni_dGeQ z$g5-HuU&>_>VKt~Eo9js|k+>H47EvFpmEQy(T5Ou}t z3_-drNA2ix39KrJy|-zWydTpAgk>%c03)5mHqn3Ey>;By|}l*6VxZyL>y_dGne!!JvIalDW8Z1Y!n zC5(goI8`Ku3^5*Fe!C@MB(?s~IM}R&O|3Lcf8R}pisLQ6U+AeU@=i;m z@~Pif!+0hH!a~Z)KtguCM~H~==>B-)ltf-!tFYxcoXD3WW@YM!z4jva4b(KQd@fNn{w*xEs)?uY&cwi`TT~AVVa>%g?Gt z8FSY9@rG|0fFtL#m+Oe~U$}$EB|L87aSe}qcwEHeCLUMu`a&wSK=_|MulEHETv^p` zN~kAccRo{^DS-+x%_B~@mK35RQiLYnQK0BYbN7ifS&Y@` zobpW_DvEbYIN1)$VnxU9h*vdIA>rlh3(fm7Sf^;{k`#FwfLP$CdEO3cG!G{udUEzh z@jU%aYU{UT`G0v&&so!9U_^sGsfI5`_Q_!KtAa;obZLMgeE5|thaKHCb-u-%1{FJ< zWhmwf*qxiv-k3EF?(7`2T)$3vQ5I?55|gR{D%egD!}K+1zG0sAPnLA5VsDx(2pgBt zK(sN*sa=4Gr5^nl`SAu7dhR4SrPmNKZv)@TQ5qGBts>9#38-U)dJ96)RSHa+>F5dP zsbhndT+;+;3eFn5I#}tkxt&Zc$nQ?0|5&So#RzQu=5~h+1X~$7v6Z@*)(L;B zJaoTGw7JFdG+iuw@Akag9c1uXC1n5VsxB69eNMUNCK)(g8+`)B^sxNoH61$1WH>Q3 ztFmQ>9`=TVS*%Ei45N*Azklo0!?X^{4OHy`h)5FsnC7LA33C~yXIKD~iPfa2-_*yX zt14PkoB{MsvlTCuC1K77u+8pzRD>ac?aN z%Vu52?mcG$f>wNME?bf?hmO3$*k>kSsNQzrulEWc?k zx7fpBA*Wl|yI}s$#RS5ier))?jz{|xP6SX<4XbkR;fk46SQM6Ld}3T4?E0d1IR4_p zK36vs9DXSeJ-#XOH|y45;+X`BmybM*PA49CEx-1!y%79f34SjH|NN+J)lI9!;D~=z$}+JQgQdpSwwNM}ZhutzBW5ke zu^Q!&PDG&VD<0jKxEA9v;UB9#NCXdUfz-U6f*8+H&RFd}BIwQPE-!v9j1dzXzAas= z4yh4a*0!pOVqi0##kXA@Ao;z7fRs3fSvz`oOKL!Nf_1I`7cq=qnW)>fRRhlT$6j>{ z6vI+O3%e~?HKE1+TKzN{PpzL+KC7q+aIY==?H5sOltnk{RhTApGYrnlpA^Ojnnc~Q zrCLyBl@TDfO&FV4OHYhhs|7so80&A`6~e@+CA_=^S`b@cx~K295TS(T zG-dClbZCQhf$tpokp#wY>dM9fZv5pg_I4nB`;TD~by4HMt)_}$b*9^MJ`U$}f*1xr{Gd*i)_ zJmhCM%S;a|V>vmrFUOv=YD^eR9d zvbW_QB(Nf5?rY1pD+1TOsaYOz2~0e{F!p-C0w`pDXYLIU#faUsl-Dr|z|+>d)2~_x ztJ>Gx6%r#4v3nks?o|=^3lILZqe%GYT~m9HTz@tr81;Wt`r|h*W?w(ueh@Q;DpLN| zE3M4`VEXbuShwC)E7dU87%qD1td9nY!-TtA1^*u^G$*b*5}9Gxh5Dv(2t4bD?X4R@lj`W;%WGw z-s5*3%$Upekzv+B>`}8P%H1wTd-vQWL%c+qa|AN`R1XpM14qaZyFScL0j26Zh`t?9 zhO}B)x1N3z2u-`f^8Pp(@^=a+2;Vk={8wkP^wBtfOM&`up$U{3X**VLMKN70&p%zkz1Uc2>Q-c^4ZE@iAIL%X^Q z@mZr0Y(gc*{MtTq34WK;GXaTPOYY3muy4A3o?cjc3(FY}c|>0oTt(DHIe@dh)Xm-L zCO|vxZ?(f1zZEXtWZV+YO!BRXAiA7^7T%5fB%Za+B% zP$&2PiNL#mWxt?{zHI~NL^lBt%{Db$d5+f0Y3y3+1%THFQjzUK&a&|Ag=fVoynkY2 zOMw>(vIo)~$^X-f6#ra2UwFRp>l%$Mo~*e=2IZ?(ZNpyxoFC4Z2mU03qup@e8e1~B zAKriV{%`W)v#+9B(N~`Wif!L`*{sQnyzPI|sof<&hVT(e^BxWga2y!FnfLB*x%{Ur z{m*Y(T)*Nq(~3+XfS`G7=BPeqU1*~4+t31TYV z!yQc-IT;~!Z0;wAN8@#Lzv)q?xi9Ki*Dkjhk7PRVJ3NU971hKBPj1iCF{6W84y@2Z zHDS%8idM-PbD(Ff-=H6)gYlnQ(;?Js4jgP}x2U%0Vx1DfqvUjR5RZsF!~IzglRjfl zl9x7zGI5t%7ghAJRIg19VOPx-WxHC=F#U3v8BCb*ttoIv<6qBYtzKdVeD?y>S%2zb zshK7UO|ML$+CFvS(-u_o%~G1}pPPa^sv*A})Wu@dt3r$`OhNMFtjgnZv{Ehbg=Xs+F1yRmU*pt>1phnZZE`zBMDf+E9I$6 z#9}ObE5F2|`T2~ZKd2Bf;ndJ%56ldvP)+h`QVlDkNt5fx&A|TB$~O`&zq4$ z@-50d=$oa69c_vB{t#&n49an~!VRieomk5y@(Xjw-W(oDC#qnKy*aPW6X;OK!Nz$_ zRT+~$R6J=oZVpKqnFrrGDPi=Mr!vQ7=rDD2pL+dC=Tfd(m!_q!s}7KiI6^VR2MlWwfdp>5vuF#zF8Cse5R+CRRAnJ+fXL0 zYbzHSc!sv!P^F85r*YTgUMDgXg%c=OFmcekTA3S;;>_Tw&hnH^;*j@!(k)t-0>JXK zB(+-{F8kK)lip5&$U&BuB1@zedB6R#H7a?7G|VkscR8{Ujc*ty?(&ufE@lc5 zv(>xT^cxxW^tc@1cqIdokzsbR&&eQ8q)VMBl!1VYygpT7Wau=W4m)Bk4Q+GEcU=|9 zkbU=6-N*%L(9-y+n)?RHD-o^lnheBLRjvvd0(e*|8I++b3zmva4X%xHx;P!?l%U!ev!(Z7Tz&i=qI$$ zM{J=Vf^=jDzg!mOf8H^}_Z~{^T&{7zSps{>dQD(7&j1GfI^|jrP2ieHSypBM+27XR zmhq6lv|?^t^X)c-ggv`dN)3fEakO0eAtNx^EEKfHm>-K}kFT(=H3G)|7j+Xed|3Fe z5)Ki9F^Hc#y4pN)6}GV>>Zwfv;#)RvYDvm+47=ZT;>55qzz_Z4RjSJ|dX?Q3nbRg< zvebK@X$A*Yf#}#9CXkRSy7R#&R!rV7Ds2@Pz#hMm61k644Pl=r-AW~p9Y;L$Q26z^ zq4H}Mcd!$pMcEWLYfm@K%E&Iuk3jS#!5&iGj|PtOF|s$lB3jmch_`C4!JXUq*+D89 z60Bve|L|hMVv7rzGY^x2Fz|SKlk1lT@m&K73B8DB9GeKY#~MC~Uao5RjA#eyz1w1g zCfUZi!r8Kvf6jRBj-4@?{yy2AY=i9bT3wU8)lzuyRP(oM5CsNvo7cEfS3wf;gBuYP z5a%<_>@Z&qQ%=LLuX$0B{5-Y_atT1i;q=c6nG`rVoV>~2QDjkf1ogHncP2&Q?G@gB z->AR+3h#G#zr_13-mmd~kH-Z(Zs2hRZJDnN;@qHu`_WMGW;aK-Wyq)Jx24}FWZ#qqYnw6UL+{V3;{G)#=gRS5d)0kKm^abmv=Gz{KNGG#Pl8x!C580$gyj)cxHgBEa>^xcPU zr3VZh!}Q8w3wYF0=d@~x9@H(PHuqatLYL^ubKxp_!1L|Uu4`8;!8^>N+s_h=D zsUF0L2!`eVv;w`Fa>@n?J>U}9sd>K13T8jXRS?E?fk1iQXp88&%BZJ4DY_sXwW)#S zgcXdgFjx{MtP8y?4?mnQu!2(!Vj?cpIuMTNfds2XStjigVdp1ULYctIb5;ahcw@7V zllP$oq)J?Td74EJ@bW|=c3sGy+Z}Ij)AK`m$+5e#AyW^2K92S->#%^@39^Zc2-9V6A9jMP@B(}{5zL$ zyWV)neG6EwD|K^qhyLQTYI=#PI=cE0iufG3WC7xa-+hi3>4E&3>qUW?|MG@^K4Qsj zVI6fFULU(W zd(#~3d&NY~5R71f7w0WEu6YO;!7F!d$GTGUMPBgFpMR|#<+K0fDGQjfa4*Y;8S^W2 zI)JyBgZ-ud22By_eW&>&%S*Ut;uWDx#BX_mP}16f=8h44;P_l$0k zTz!TH!Y?^be^f!Z@bgwumvs}HGlos5PCbh1)=QsrXVejGeXP%~pNS62xvbh>V}pOO`zq_6&6iR z8mR9QPI&g(F%~DFD1gEv!TfmBQ`)^t~litOakUV zAM_#o=Y!G&JZ9iA1&=v+Ou}Op9@Frc_ZKGemS(AhTZ5FxZi@jUJ+Lu3=~8mV8aTpL zay|^|Le-OBd-n0#KrBa)e{2JyZ>;HX^+D_D4LkNQTNkiU-%7zFHZWJ#bz=E19iY~( z={SDF20CR@_OhSTSzLdezSjMMR2>lSkQ-1#^ys0cj9qp*F#G&i&Cwb(|A92|v1{7E z(XTfyQ(^;~Jikb-+N2F|Ea#PegbkD_fA>)r(T3P4ZO5&~HXzJuu;lSZEfA&|rU$Xw zKzrQfc#}9S;Cr(oxA~nlY&)tOs%@+VCE6{Qbe~#7UkCf+usThc;tH3XdSeX?%5Dp5 zUQGzSzNX_@xg6i~Sp&RZMtjc{TEl3y$Nr0x8qnvyk8`Q5HCTUYR%FT5fFe=B zuq?7QM25w#uvw!4;)u4CMlomG*gMlfb+AVID(|ua=eceJ*CBO)@;<+eVk^j(d&Kne zlsf2j>5Z$Mwt^DnWt+B;)PcCXG|MCy#XRPVFBxBm5F_r~eNq(B0lhtYm#Kq1zhD?! zt0g473-nj0CPLQ__3Gh_f9abvctm+YZ1cD~;SyhGn-facP z?}r`x{aX`Cj;QjxJhg&BtD%(yg3ezt3m-%AF_!1j_B>Zr>qUF6I=!S$(byVnHfMWE z+L6HUs4Bmp2D;z1?2O;A{$KMUllMe_JO26I8+-R!qALhqyrWid9VKFKetjV={SgFF zkq`CnfXY9=2Xp=LmHF#qu54L4@iqvS+<4AY)S|X{y;hILG5>8utVeu}PyEEc^F59~ zbFy(b2(0;xqQA-$G5r0JdtaveKM#VsW1qbuT-C7AvJ+?SBYyICgh%qUs$vWLE%5Uu z$kRXibrA3$S87!pP{CLYfAN-#20`am4VE`M)UaRZ`lW+GP;&3R#0Apd^P?qr&4+`3 zdYdO+rI_b=fZWxUWZ`ep07bts=MhgJWE{$pl2-XAe(?TTyVk`EshiGt0LSY4erbO4 zuy8Nk^;qkRRz+a#E48IGctEQ4{+gW4%5dcPZ;W}RC+Mcmxcfa-h5|xsB*E4bIPy8d zh7Ksh#qXIcor#`s!^z)Uv`GnotLV_3lm9-u^up$2yLvo9{91-f={n^_-t-Kw@Tsp< zf>6;*GMis|Lgby5aA~=rI}KiiObo z)}9c1!JyFTo(x2^AHEg5+H-O31#9>=J$dN?_R}|P2dFYICv4l-an}Q~UOqZnVJi#x z_5bJd`po?EavaD3J{zX>j~wBT9Px6%%Ly+> zyqxiT%$|N0^nHT|h%5F*p0HGa0%jIpLv0Tr9*L*%k`+J;mTR9h_JGljm1O%TiZF?J z*;Cj&Aoi3%oL#3PZ1w#5PJ_tawNYs7d;{l zW>uCAK_VW2wMY!Vx-J492M)XyB6~p9LyhT(jY7cLJexAO&I5S9E7b*03&KwFa_yyQ zXg$8|SyMy&@ILaix=4%%jJvjH(Tvx^ah`280~Q{OvYx84Gl+p>9Tr zRLFzIM+D2^pde%&3gFzi!~+-y$%34>%A;tacyTjS;Kwv3mDV>x6t$EA@ioj2Tp_K^Q|vI-H-=sdZ1^rSn`Z*~O5KO=(cZ{~p?Tiu~^%h+zI}p&G#Icd+Oo75T>uX^G}u zb=WuVyvB5&2h4I>ulXdT0dOO!BBB6|yR_YF`=kz?%Z%>7NB-4!uS@EKg2Kd&DY#CDVgqA3~E3F&Vfz}`qIH>N4VqEMB z(J11guUovixQ!sK2GhvDp9LVC3Xk4+Hd^H`+(QuHv+ymp1z9X$Alq4eRf#vAP`xvD z#rLf`@ZaBJ`wI_oc!|eT{B!Yq;rYg|3%_o>T<~(k%M~woyk78n!|N5Vcf4KT?FMgG ze1?&~R;>1by6vmYOj@;I6T-n{D-`Sc$RvKjF& z^v?RUvNmXs3ZA>U-UBMG(6?`K(1CH<`3J8MceG&hp0!-vLY`2uU)IgAQ40nUeMZX@$|e-1z7J`_mVkprb;~?~*hMyZ ze^L|j44>R%eDeUF3yE)ssG87~b>QuCGfz0QzPG3Tyau>re7a~7=Lxe6<6@(Dbx^c1 zFFt(Q6OPlfJuep!q5Jc7Kb9^}zrO+MpZ(Q6~=OF`e4PN%2s@y@KEa=bABmtbk-67b9Ld|;xY%za5j69TZ#&)X2BETU z$#*uYK~mCzw{=(C;AD`$x9>4Eu>Qg9yE@$s+^-N-uPr6Qq=|X4bg>&mDn#ggGgAjM zW~2M9huk24U2jjBjs_g^Y0o$r;0CeJ2DUu*(FAe-^>?Df+(2H+M^5mjCg=#uBru!0 zL7h+Az4tFPA*nUoKX0uYOldu0YADx&1zQ_Fp&NQ*yEd@>Jb9>I!40&6+U`v`YC{UI zZQpVwH(=djT%5_N10_*Ao}RLFgRdEB;uhvQFoni@scvw4SL}-4S9HMfh`y?mqZ=@a z3)EGX>B6?5>n|TVqxn)-n+fyjfzT@3zOEEEplkccwLRB`mUEU3zNg(lednnSeHD7Z z2Ij@bN|Ah2-dP`2&#6BF@$?`GR$h~M{Y=mu z22ZDn*HI1N`bu+j9pk9$1{%8H%^{XReMU)F1gAM`T_E?DOK)zFL{O4aN-}_K4pkA*HlELj6Wr7~y{k~d&CrcY# z)aIlLP=02wdE`+3bM3!y7mv$$JemK*@WMC87BDw|$F)nVG$nw`ZO@7=E?#h_pZ&48 zr|$oJa}1x)+kTo zcrMDmEzM#32PD7IvaS=Wzw3bWsuxTfE7A3gFH$b|b->eXxwdTy%0CiToAv4HK;OA% zLH8OxA@@nxv7U$8Ajuvgr^Dg}bB7prLNl~s?p9Jo(*>j-rV|f>E^EPT6kFWJSWkE} z9q6Be;z;lGXKUXuPhiB4y?f2A3EsuWXY7x8f;DMqB{!1>^k26$QS3(YT$Zvo{W1~4 z;~><8*9+3^*Zo@0M}+pDnR_MmydaXG9e6%NbONyOHvO!fo`#)r0}LR)N!!*emv?4_RCC2Z7ZZ8h#dJ# z+|qN=R)w0q?B8f0d->7kw?0l1G(2kK2WjYjR7cUCN&QuS&hJelyDylx`%u@&U%z?$ zV5>)>V98SoOv(Lv^wpgK%%uwQ61zg-0Wl>C^4(IVKt%LAV+QbV^r+u8OM|P|Fm^9(23XJT`Z>`~g9kwz z@3M6m@Tqw;fe=N5^b=c_E|EpgCyt3+bEd)6Shl*0I0GCwRqjd}(;#u^Mh1sd-$w@yj&a%{7I7*VI_mH?mixhr5<)xk2nEip z-;+hmLi1yz2{|7qklt=-qILk$UxcoHeMy1Ha8WbCa6bsO<_5=E3iM2s+WH6i!I~rK zQM5q{bgjr@IqC4PzGpdH@_g+4;3}hT-;IwHFuZSR(rWAn&+N*lqrXw0*8W+8H<>IWZ|xBVEW)8NdGgGHriy`hSiiMKLn;2gGR#mVb_5YG0+EqRm%sh0!x z6Q23Oxv5uX>o6L~`#gv%NAezw&Q)rtr2$<<|ottJCRk5qRA z>zKkXWG|0Q{DcVr~0q1|NA#uyq@rS z#OoPv2Y5Td+Y#Q*8XuJzb|Smq@rv`b`*jK^uK7r)L%cnX4zptfQ5JdSVQKsEu9FO% zu7@(mBpGmgX0khTKN&P0mBmjkVL;99xWSnyGF1E=tgSV z_M+vJ)}M~}!3k^ng_GOJpiI~m-F6yXznPuwWH%WWWOZKeXJwx^1u#yRJT9cSMz=)4 z@aVto7>O)^Kw$po-P=0?jo0`lTtN7D&HHW0$E#;|PHy?^1N~+EYkAl#f$6`_(JWkh z?PlO%&Qdxo34zc|O;?aO$j(Npql3vurJEHOT;W7^*uz*w3)u4^bMKNBZlJugZr_Cv z3n8{j$pVfabQ|HH zas!Us=1`HJbm-I!9X)C94xy)>X^r>Mq4BuB>eEbjh(P!A>R5mrod3`h;|^`ltt>r?dpD|3*|ii^f3{+M;3rpS(<#*^ML(UY0mPE7SQFE9ip7%0cBqV zYiXw}V9afE0wdc4Y8*&(_caA>bzO`dATq0@?wmWC zA6-9%u5aJwQNMPbIYjY>$o=&60bdtgVPRKu;Mvxm@ma+OUKoxQR;imq%1xff%YJ)< zsAtsDHT`DrR{6uqt%crjfHWquyW9-EOJ6!u<>L*pNnsDu)|o-+#LnnZA#W&LpIESS zj~R5;S(?0=@dD|F?s1+BGte0>ePH#>3nntin=?peFm1J5yY9Ug@Hlln_E~BM)@8?M z)_(N@y^hfY(U+#MU9<6;b&D4q|HLV^>Z&PJ96s>YufPiqRRkVR+GGlfavxS6u|xB9 zB^AUxDR>UnNV5^HMRp&K@(={Vo8jU;`eY00zhj*id zCj`t1U8POYz;y|W@7xm)Xh-96h-Oq?NVVVM0m?nzUL#~vupdG8uj2vn2C@fcoK2zO zXRsX0D|Z;n|1^i}8L}==2g! z$X}@ktgYd%vcJ2+9niWTAfX2lyFOi9#qS2;ogVu+zU#r7xs!*KzqrDfg7unu8GXom z``P;ObyuLbiHS@~=))bygGFOIT*3N5ofEgIJ|s>v`Ic_cuE)I)gpJT)^mk*-_vicc%=h+lfh?~QCz|OM&ervw+I8$tJ2H|dq~SQ1Ov1SubiIeC)_6R31dgZ~_upA^5WaFw%Hz5t zOgQNgUdAav!I+^{>{&-3w)Z^IoKu2M$J6Se#~r~ve(x?;Llu;d^EeU%9HFZHUZc(r zHE6uPG{3^!5x9~WcdmU@hcLk>_tLZ+L0C!azOA+O;xZrTKyf9RNH3^ZV-> zeGs)hed*zT2LR1?)*p6~!2YCH=Ld!ZY$;9?e?370%e(i`_0w0;ZSi!T+Y4WTXIDy7@o5t7t<45q&s!LQ_JZ)OS{Au{QU z)YI?AkWzm5*7u{15W~#z*#43UFh6$A%jk3j+Tp;%S0n+1J=Q^YUa+wPTC(%W{MPkGg%meq@jj|8#Nl1iC-D z+U69H;X7NZMVqb@#K@*OpLHUG?1bU{{4Gw4u_Ah_d||*%CwTCrQ(Ies4AT~B-J@5W zKqIt+|2_s#;>;H3+u;OPNnMWg-2klar!Se_cLL9L&D~>00OFfIsks$8LErI0W^p?J zjo`IhmKU9XcI?!Kl}RR$@Z|bS#ZOLPqQsQ{?UgYoAL4l|YVHidvth?(RgGclZ2i$h zQD^AU+!Eb$(+CESS~ip|LH_rtBPig$A=oG`*FHyehDgr{z2q=MxH)Bb|MzZZ;7MiG z9vd@&9hF|4u}_@AaGGpl?_&T1ijliCm0V!=(aZ6Yh6d0hGAAWs?*ccXzDSMUAi?OT z;ImnKT!4|ts-1d@1gh}|-ex{$5 zZMbo5>q8Qp-~Q=hBeN^mufC@!5p4k3uRmK?vm-qHTEY=LXaK*S3!b~B>I!w27^=631R0`@I;Yv(;0-Bd z@4#jQV0}Vek~fCpi^j#5*>?>8HPnUnjK{W9(_ zx_)S7ZI>YoRZF}cTj37keRWRy>ka?fOP>FEhCpD_o6lPoV$;ID`$Ftqh;{RMi}ato z1s~V(d-C`_d;Fe0iRA^8?GiU|Pqe$DsjdxtE(gU6ncN`Da%g426>Uh6Kl&o#t1E05 zxcIV5RR>JG+A}V_aD@`b-4;DfIuNO-lT&}v6|B~&uz1#DtNFN%|#m(D!! zbcF+s!U=9O+JD7ad>qEdX?z^V$9a4nfX@@~c?3Ssz~>?OJO!V};PV`O9)!=6@OczI z&%)k7Kx$R?AVhCp{!b7R@#y2xuRsw=QcSBRfve_X_42+GtwD{?x~ z{Y7?Hg3`&|l}g92T&30qYj9?c^j;RX6-U?-;NBBL_UL;c*X-i+J3`<0>9^ z@wkk~Z9J~yaUUNS@NoklSMYJ?m(-JcrT)%~YwzE8eCF#RXSkZfs+}le3>wphR=H1| z!M$Hh*H_-yMuVTKuRd%L*5eFI;WL(rpNOnfjIW-~uLhqn@TC zTs;n=W|v}IpsTVeLqy9E0@_L+thwa^@$M0N>%aXSH;~BJ5eUrk^LjdTz0YrClkH-T zvk4#FRaiB#MsLHby<_e7x`d>-it%GNai z=gErJOAI^kD@hY)@;88RTf=k(A3HdX;%h95uh`g%-1Z?`=os|a&wj-aI$r$RV|vjR zIH)OmPum#5f-M>PJYurwGJ<>_jcR{0J8*xWJClLzxa3;#WJI$a%&wY9d9%$3XfK!t zVwc)O=XTeRCT>R1;(nE~ag9A37rm$Gn`H#wzwK-NO|gg9C?6P%HG-6c&Bv7U>>>S8 zImQ095tQBk+`c~F9>li`C&=G22E{jq_t(_f1JQWpIc{4Mc<^CsP2C53cw&X{)5ZkE z#m;Z2owSDwdtE=04x2!cifD6*lmlp9xEz1%nh8W4N~*9maDecH8`tifFaeHCOA}XT z2S|`noN9b)0(o}?6*}x4;PQyjRTDt=`Dv@(_YDqke0bN-gbaYK=IifNA98>St%Be; z7XbKbsY^2B93b^L!8(_j42roUTQ=1=z%S{vu7ke-=s({d0 z`nF1W^KV5u9a)d_l%R~il3LLUpf5$P&5xVwQcd&1uz-4Z~ zjlCs~K)EaV>3kvuQYi<;McNz}eLv4;{hg3+j=(K#H{#(!fePxuA~!B4;0hxS{hItc zzn}l<4&ll_?{*p9+@5!uYP+cWg}DtSR_`!2{?G3arhjndJlD||#8dWA(3pIEQzZo= z*2@<@zhe*lyr;zSE>NJ4rSY2RS9_rC8BoYNNP!Y^`|=AM4#2q3z3D6BTN>$wN!J04 zIoD~ds-QsCt-;zXYX=BgpIh#pOM$jEPwr{@qq@!`vEWMw1?WgGTlYD@osGmH%li~a zX?I@ZkmCT~lj8;j1}Gr(ig_$210uY~l+AYS8%gJ_n$$?d4jRifGgJ3~r<+ zVZ(ypcwY+KMAv(AIRek8T|51^BRxF++}?=l+JoW5fHf%;*sI{Fyr1K70FM)R9KquZ9*6Kag~u^G&f#$okCS*DEo1fK{r%b& zg!+arGU|*WeRa6x-siS3%kN|_=WhaYKg3INVr}8rgRT>{=_au8@J*xi2wSjaW_`DQ zEdY=7PR$cETbQm%)-Bix;I!j$s%M-nh`)Z-T-5}y;H%m9^I8r+L;48V$N4bC76M4R z$8TtofvEX;`CNf5#J7LT(SAY(hV z$qn4iw}Txni3Qg`l3^y{aq7dLb|6pZ##ossz}=C#_nV|WtjTa>os^>h7vqx}re+Uj zYO>4hGbtdrm&fr^ustNqO}g=(r9iXR*Y!1<>_IV>o$XW?1vUHYyzaQgy~z(gAvm zf2z{&P{H`h()@aGfCu^GB=%)Akbd40xyHo-B*)HrDEQFe_=}c_bLSlvc@x*Hjdv_` z0LRV-=?oJZlr-*)-dXAZ=Vyhk&I-|Bz@`7hD28zEbhEFy7!BH%&q-}Rhj8wBUQ6=~ z6)sOzo#_m8fRf-kcAe!k$P`&#jrll0!6R?4uqG;u?l`_p<}|`VM7wrSAxY}|2E9oK zNKYhhzIz(sn$g$w`Wqa9z@c(i<2n_TOT6r(;~XKAIXkl+sNftWFA|rHaOR*RYq1S= zasO}1B5LU_A{9RA*2argJA%DHZ+gwIzi|qWV|bjy;~*X<@i>adSv(HoaT<@~c$~+_ z0eqam#}Ry-!N(zdoWjR3y!|1Q3`2HIoVViz-7V;bkfvXGn#UQ&R<-?5D>ePcj-~$C z@id=KPTnmC2xI@^#w%$GTkEJx#_l_S)(RCsP^Nt=Bt3LKUIC@_;GAjivb~GaMJ6bui}w$DL6}DBjD?<`-!S;-3ufFCjQV z^D)PyZM#eXt6R!173Ku4n>^}gc9_EBAFDPAv^s&-&F)QyHkty-s5V~x9m*SyRNLg5 zm@cj*b^29vyRs>aqWY?Y-5HFVtt|Ujm_mhxn%n(iCm@b&97}1V0prjGjvoh{V8a8+ zh?*lbC}8=p^0mGba9ZddPfVo&=|v_>m4p-YU+o@m)S0i^!dhaQTFSd|cPxet`EA zydUBH4DW|{KgIiT3DVDz&kj(Bt}kCngXjwyQwONX@1k;*UPRCU6);~f&Nx83cyGGp zWg6V!J?bfl;_KE48jt@$bpO|?Gp`C9KC7A1e_h)~{aHzUgZS9~`shHc zB}~5Q)PBKj3LAT(y%jmF;5y+(SiFKMTrD+GxX7>q$!_o7q9vx_kv(P^4Uh1M1SW$(tl_BM;Wa=2E7{Ft+WC28a@8N#N(Fb*C)&uI%HN|%Wnt|2+D zS?W<@U<;)E1Lri4(;zC5YkFz3E$n)`F`U1c2I}I}=D7)5Xw2q6(zl-mybjMBN7U?K zGSs7fn2iQb758mz8|~nr>}BHaN-B7NST+8J(;kW=i289zHJO<0d|?;^QtpF5}}iKCa{Aev&A)8OySTn@1GN#*8VD ze(z(XE72AvZdh6Fp;BNl)VGrBgbfVUTDA3Er2s|>9(|%~11srKOM4qBK)tj7tkrpI zFurBgc8i4yk-N+S&F!oqX7)>R>Kx2<58)MaAm z4}^y=qP-(Stl;w8WH$-PhlARu$7Zg!l2XnY6|U zOUO3*sXDNo3PgFOxKHVpP?GDIq<)49skQshwqLM>PLrRi&*rEgoTJL${B#!nYd+WT5TU6*(7&FwUJs(aFf zhsg>K-mYAg*-V2&f%!uIeU=cMTvB%j;nPYV;*V=1=d}U zopNY^_ow-`{R9H@KkowT?^5OevD|F2wqK~_**4{tQ+xkYPg(!*OZ-`E{8?@MS#JDU zZ~R$s{8@4QS#tbYbNpF!{8@GUS$6zccl=rSZtMEzZxOB2VPN|R(NkZ~WDN_OK|)I0 zpf~cPy5!L032ie-+0M?!Bu#|{S%pO8Ds^0=Li`rDm{Y-Ki?WKf@U8s3-3qU7t&49nbs_peADsXMNX%w*19JFLESH1M5LRZh;jkSRIUA|7^bO5^F&bP7( z$!oUj8}G{7R4C%usdp{*bm%-Ze)E_l6{OSe+j2+IVeSl}{ii(Q-}T3v^rLjh=RI}uSQ`ae!p|wc zUuglOrwQ$!FCv@}%cMQtU;%5A;utlXC{V;+nsr~o65OY1!p?e7;GTj~+?98h;5zFf zkrMZ}pWyun?`L>F#QQ1UkMVwv#{oP};Bf?xGx+)gU!UOX7kqt#uYd6M5x#!H*H`%Z z3%}2W-v`6*li~N#@cV4|eK`C+9ey7#Sw|_Z|A7r0*TW_p{Pm$_;Hiu?)dp%LwnWPw z*9U&CaLMPFt)a{|LNDx=J`5^+UQUU(1_oEgEyo!Wc-vNm{4%x%!`iMB7o`oLGvaY- zai0}%IY;PCPLp6%`t$NsFDtnErXz?ejs(h;YdS<4EP-)NH+1G92`;UBoch+t5-OHW zq+C8^0Ed~EdX&w-_jfz$X<@$s%#y1@R5n^bP}ZppNu7ppVDsyvHQW{ehfi(jk}(EG zg+1w6D;?zTM?H0$GXiC@P2tX1Ivi_{daCU3ckc)UVH+2|i@eYeb)ioXzRw2Je|;Bu zzQ@pi_ZY(WQpNXL#rGV=_aw#lEXDUU#rHgo-?yft`wJNcMJ^NLu9ATZU9YqX**{+| zmppoob?1zEv^E8F%DcxkmLR`H^WSu%z!16Xv5^D?V(FV2&Z0VozqBy1B$NWQnQKJ{ zP#rURF{ZS^jso&gs{FC5QC)R~x8HL!x?X)BXPyTYrYD57$X;liH+b}QBZ^~HF{Nh_ zJ#jPITg8+$#Ux=;C6xd$mwZw=cdZ#MC+n_11s@RRx`_f=xEfc7&zkdMb3B~rL9r0$1 zYdL=NSk1RQGjPT#SBdYUf&J0#c{lEv0fUh!_-!2xl1e{De!E}>gHOD@UYgUOujmSk z;-ncIyimwIgX(_aT{*A#9-w&Hr}`Zlnd!;58YKLR?bP;$-R>a~$Mqz-Ki zUtjxo{6Zq!L?AHz^M2p>UIK-MTNcc;S=ZZCTEWr&)=uh(E@VvG-flN@_($&Y^Fsfx zYbmwWHhCLuKzx0i>+Nw_u(&*M&i%R#3~uJnXgw+qQ!66E`B&IN!fZ&H3{?U8*EQYF zJ!1=6%YxaKh^O+J{($2lRN7x=FgzS9w3M+vfgq`!%um^EF?rVE=Rl!cU z+&(lN-ETgg)g?rP;2)_GUQg_yXX+^F=^Y|mv@CIbLvVn>-%rk#-&6-bwYpaoh?aks zsu1x@9oF(kN!d#~0E0@VzZ=v9^2X-a6@m^h`!Iw(mQ4$!+FT5jXbupmRv#E`|`YL(TybGAc&}_v?bbXnFR?4|^cE4eaXA)`k8k!dv&F_KUJ!x~yP# zRDwOw2a96fsOrFP+kqyB2zv-U)5&>GM;jzq7%R_{(fzZj3Prx!@UE{&KUNT}=g_y1 zpnIAyR3A6E9ob#N9k z&5m=h1wHF($+A`rczi5ZDIV2DjDbtZ$ph-p{H&09^$r`D$mfhT?IJ?`dn-#RZyTUL z+QwfPO$5P&PNJK@Kgp$z!=S5K0IXHy0JqOH3@dCSVOFX580uG?n2% zPOg&1RV&E8lB{xfNMX^|u&QL0<)8pyXSdE)4#lf;$toLj<>2a$@~s&))_}LS`M%c# z0@FXgvyb+Nu&>nAh4agI?;I|(0Q-D@t5$aaI=PptbhY(AcB``R`!c54WvF7fI7a-G z6?mJ3;`-HUoAj?%FmqwlMa<6#Uhb>gclW6kR9#6<-#lytbrP-;%ZIJN&77WC^U@f$ z2NwjVG+RL`3#)P4tT7zJ6yziKTfsI#ySk6JQQo{xS9o-t6%1yiH-`t9KqqrVxFDYu zoXbjYE@(4>@B62xusKT*ujIX~Rcitax2D_0!>JNIbix?izjszUyt9B%@iYbDOyfmazdWGulP+%zVR`Iqv}z0BIY>A~B^yD@ z`Dz=8v^ghlLC~t}ljar%_Y6-C_x3{?u z!s6T<2kc4G8YmAMO1>U8WC4+V93ILm$)HzIVwlik0rIP!H#Q5Cq2&NE;7$?Jv+j>K z&l1T%zchX`rvT|O?76336&X@f&9?CbqWY%ptKWa!zFSn$fi0`du&G+BF;kDVA$ z7!M+S{86#dnW_V6CroXt}W>SKgY6>PUna({vc+-fi=Y zg9eQGb1&`mMcKeZ-hPj>bV!~5&e$^)Kfl%JI0w-o_A&p3)03#4ITdicL4*!^Q8}-6 zqkKT`YO^oT7jvjNeST?i02S=FroF%L&K!ubp~+{((fxL{84>WTgbf0!yFhHvQ#ZHBs?}(I7cvtY;Wq!zUL^|>%L5IZg!3q2wG?Tcshm z@VCXiFGc*XNmFPuFofA}G@gWdGsv#bDTsPn8+MGp5};9cEE} z)bmP+W84rT2fe){B+&XUOE!JzGX!<>rrTV4=J0t!xnRpmBZyOrrBW&80M{G7T~soH zs-3yz2Z-iyZsq0Vj7B3cb|?sLS2l-_Tm_l=km+R4dsSV#I}2<*a|ujRlCkcp?Z-XQoi-w zDmv7N|75wFYyjn>-*Vav5dC!4@bbrq?<%0Fnx}a%fH{#5N{N+(r;Bd7L zOr9IR`C$Ut=eqP}t`Z%nQtRb1K>ROYQ4ZpdONR~#7dDcI&2=E{2zhhqdc<$tE1{uG z9dHgU-}-Go9Xio`P0c#cWpKG_&t5t_R~D1Kk)#8WJU8C6-J-(;f7e;Hjk*wPIwrDH z4&iuzYp4F0E{yh$CIskM!2I`5FR<%Dm0oOvC@w*$PL;O!{%1E1Z|W7-g)zj}vq9UW#*cXB$s)`HHdPVLp_ zkt}+OVq(IyARghq{&hNF+j%c*J<)_P?bxQaV|3_IBkBq|YN9&8*j1qw#f>uB@bgg( z*jP_!Z$;Pluy&nY_d)~2_e{qmHPFFbK9`UeqyZdh#DD|8(EN8p*eRvzU=}s}+Kdg^ zUFj>K*$j1v#R%=5swhs)JvqB>fCyD7xk}}RC@uMgqdCx%zJ<&&6JbAJc4mSbicg8;cSJK)*mJ4b*WTG2cE34m_k^ml z$lIXJu*du3O5lDz{WFn_;%Redc_StdJxAjhCIRM<6V0FDZz~Va*oC$3W}(b^94I`#3(!m<*_BxQyR%y}d1 z#TIi|kj4E&X_0#lA96*wVP1<=x>c`ZIgnY z4a2XgZD>A)^0EF7X*j#=#{27~Xug-q2CZvkU{pZm?(P;8513a^IKGgDc0_Zc`!5N| zGBN1#uws_;^sndUu<&ia%$tOEo@eGDrrkx~)2{&b`@43MN`BqZM4*%(i zxL_NRLG%kcugn%>ryz=jFYNwfhvI+i5Fa=3aTOnTi+;&4_6z7h>5|<$S0Xv`Sdr<0 zMcP2N`>9IZZ4UVZM&&1#Xv5^|Z#i4!&0(TOD9u+-3;OizM(lae{a<7ldHI?Uyw$__ z$7&S!Bf4pkT^ev`L!zMTlo?#_Xbawi=-uBxeRM%I#zRlUVl)=l5{dB6`jk3IqxqkF zGlR~h^u#eFmolpCff}MYv}lp(N3xMzBv{|=0K}hbVAykCBGkL+92ugAn$)JDQOAA+P1L z039THjmkBQwBT_g>BdI|I)GfAx50KTpxZHGm(h{F+La3=IfySKXr<%S?HvQA?ZT@RGR@o9NOXl&v2 z$x=hl6K=|D7B_?`oy%3X)*=3+3rGSP24H+FZt%qwWXFV9{+9*@K;m&d^#s*}Rf3F; zyfG3CB0Idd;_rMNsch>)Z>I&H^o*IF674YsepEXJ7a1+)^8el23C}maj>6Yj_&RK1 zZZ6HsRr`|6z=GMh9Hfn4;&x2wT&Nk;z@ z!W7n%6UW_W4WJ0|`fJ1#vO8$Q%Z&{n?p#4obl`YZ z;Oa3CgtZAVandq6FnA{Yv!M~fCyC*j!7Obsyvy4k9$*H8KV%p!aoSLGDey4yh#3?$ zx=cTQiSok^t2V1@GvK*I8y0EOf>2~fYy)P0;U^wn@%W3!XFPuYRafEaE__{vuiNl- z9lq|v*M)dH#M>#}j`4Q>mmlE#1n)<9dqpO^ZJ|F6K`L>a&aJ(;SGKShCitk3`J$Tj z|MaK9?~CF0&G7qb_C;=31&2TZ$%vp2-(|kItypah2`qMX zTT=DmacbSZXI$2Q?HeMI#V&k9WZ~HhV+(tT!YPS@s&m%=^9>RCBQbH!76#BK|HbWA zoi#LEkZcmWYXC7kWzVT6twFp}vgykU14!SXdtBY$`mg;+{C*{#FFfD)b>Y{ImkVBQ zc)99n zK^Hh@y}jBKtl?r{wdAO%9^4Eq7`E7p?mr>fwANk^k{sCitFqvDCyw+##$4%ft9HvmdG`HT1P-+bZu#;*&%ZoFI^%!s-VHtIo${pG4v>Ndbg z=e>M>g&xGcj4AyrZUf>MJ2^E`T`wvjd^N(>1{T&nE4q8<%n`&dOt)Sr&;!d;9sH+i zZGhi@V3$F^{-P` zDH|*qSAdw8$-2`<7C>+(_Kfe42fcURUTY=M{RQ2_xn1%=?ue1)nnQKZ5z@sbP6be1 zE_{`@8P!eZ!ycSGijaM~klFU^-*pte&cfGW_&N<=$KmTdd>x3d6Y+Hca|So_*^C8zaaXTP`HpXKgfU4{2j<+0SVt<{R%7M2M)_L=fMyQSg^&xM2~u= zk4T@tXf6A_1fi(c%5nvwvCwi=0Y4#NB-HIQ5<&LEy<3~=D-0ZMh0Ku$Edi5R_rC9h z5X7$Bz4Q7lOGwb|z4J6g7#MOYcOUm4nqr(;T`vYTw-n@CUZU$sVW#%R;xMsV_-ba8 zCBO@om*0*`K*G*M!E?+Ak6C|~j7dv?y^HLD5)CUT;x8ZDxLg8y7(Z1F;t*|Bayc|x z5^UNC?H*w$?(T~bX6=z$^qr!IE=PD?Ndb^!n|!UUK-_;oY|C322;D}%uwT^*pu+nR z(<>P$dd}M~%wz?aPWjl;cXA;Aq{|UKh-2CUq=KY%8yIL~LUk9GO<2=oBg0`5eF64^yel7gRukm>#KF`GGq4+!% zpU2|!Tznpk&y(?a^k_`ABr~TW9IB5gO*vx<;ycE(k}3?KI6ICpYheoK0!w%mej7jp zi>t(Ef+=)DD+6{YBYS6*qhn*`)_WOI_hxZ8Qk|VOytkNf!*<%BQo=(m*X-;pc-dI>5Y@7(gwj zfxTN?^{H1nFba&>(cd&!kTqF0mikK`@n1t5PCkIf`B~pBH$eQoF)F{aNe9?G%cspP zk^FXd)4B?DU@T+c9P?(hel*{PIUR7K}cveNhzp`NdulLgDzj-a5 zOgLp^NrF$u0#}EWl7YjIOgB;_!JP2rs?!OGw(8iJe?xC^K64@6D1|ybC{bt34(=zz zNG##h)4jTIXH9I=v(scCpqxbgiY`1p9(dU31sTKybfaWU^dP3i+e_#o8N}lQ!`88( zoJO^Y9CD2e`KuWn?@?|lZdtz7{sJ0LDeS&cuMd;)0}7l+$bjb$uM51c@Vdn78gC1D zTUn5odug-pmwPn8YCAbMSdk!eHS7BBZ8Xp?m2Ar8CP8z0++h5AH2%zGx*%E~V5n1D zd?O8t%D;uMe$)f;ATbC1?T8kM8u)ol7x>4{KL6r|_^Ze(j1JXGqf3Ki{y-c}P z&|6*T>C^lz`GX1*oQ#eKZphc9SASs|Ug+c$`KAN;EZLc6@rYIzlfAf4 z7v%3IUpFUHA^TMbdnlV8)EpYWDKCtk$C9RC%&7KJuNq%bl zN&%t||JavMU5Koy%-u2n?t0L8R#3AJ$aj9r;XF$L`$;~#I5}PDT$fwUr$Ygr#F)6~ zojO4L^S#(e8hdX_xi&ziW0HJ0n(uLIXR?(x44y4y&U%c-ea5qHifV(nGyOu19Fj*% zR^aQKS^%kWgPHU1WMwyetJT$l$k?!l>E%>du=l~CjbpzbBL4e~$|DQW_^i-XK5iOx z{%Fi=uh0VdhRRiyx-=lBjk^5)tp)Dg-*SvBk$x2Fyd_YL8Y#Pa$G`@pf6eLE!xh@V zac`8Mh4c+v1tb-29bg=)?oiD|_7l;p`7U1v>=At>n+6jrpL(%0=>Y$`>la!653E2g zwv#|$Ce8aMk5>t=z%tU}dyWh5&n@8Ayl>Lx9rKTm;@|Jae+v!&tu*|%)bQ^u;@@k; zzxRlLFY>SVCUNgo;@`W(zn6)BZxjDsC;q)p<*I0Jp*LjMzq*%8QwoizuIV_;M*)he z>@ssFWGlz+@Fh2sfpSMOf_0L5Z9nvKRAo@6u?(9qf^DGi~ zJeL7*tE4%fLcSx{UDL4Dfdm*uSGYtF`Pxm*F2Vy6Ou7#Ts_de|=T%L4HA*DldeBhf zR!fDG!&!kB%?*IHvXHq+8`;_s68HN@hTv8cQ~HWT19`UISCK|WAW^JPCV!j;3cLBo zykty(LiwqxJOBP<{Pw*4$^cqwb|XduG>BJ_CSSP?FxsC--^gqVL}P8oZHLLwcaVRr z>=ILm8RGDG6+;1&1a`KM2n*A@X3P_mso=Uy#dHJ0F5wte{=(Z-IHQ&3ESG_>?tyqo zyBZDBcC)i#duUKsU{4xrra?5a_3S$|*pSudXJcmy1FClG&b0mgekcC@PyG9#`1eQg z@0a4wE8x#F;LkhY&qLt-36K5wJOZC*xJmtF$rUjK)#KwgZ$6}fyLeL`=TAeZ+TdXv zR7!)jLp2T24-7%`rRHyreKaWB#6Py}gaPbEvEln}M2{*Ln5Y{-)5htTAa|tS+?cph zFA}`4iAsxEMS}%=;1}ynUupM$#2Q>I!pAE7dgj|Z&3~eP9(%;U`E^X%XfER90`Ap( z-?fw9{Xgf``2S!J9ux7HiN{nt=HfA#_=eB!)pt!GXv-e>+CzoG=SJm?Up2vf73(|E zuT-e=R`@x!4vn*XaT7pxFu1<3`+|ceL4WeQgealqgUMRZ8OWyi zTF4ak=R61&Jfa0p_6l9Zc zXhDl>`PQWe{*FON1-lXY|IJQew+a#%?^K^;163xdZA&7f0wy7hA^ z5o)$gPpv*?27{c#HPH@4$gU;n?@K_}pBN`}t1 zFh%j>$=Q1qh`*cN<7f9Gy*B>S$R3EA=%14*e9_x%tF>7J_I+y&n3p|aV>fM{UeE>6 zD=;i0Q62O|PMvHIH-#!S_FleENZu#MNv{gf{I^+*Cs>G3RolI33`6%n{l@O-p$f4< z!>`4~5Pw&@hfiawP?ka7d`aF67`|fnG`dxw?0G{;imw@npKR_r(V+tQ%fGnQrJ?n^ zBxdhOQeKot5xsmYV?+t`*0%j94>5xUTd6wAzjj{^((lUH8!x<+AT_ZgfA*Fc$~}3V zgd>!o)lv62Cx-N68nyf5V`X5A5OXN6Me`pVCtfD1fZi65decW}d~29#&wvUnc{G~9 zs6+hR9i6^&Ruwo9J$%Cq)GfQ{{by9cRk4?=ydUKy(y=!lyQx78MfSkm_h#UyX6JJx zO$|Ppq&e>p{rmUM`CecP_=H^}eU@Fz56t`7**3p6Tl|fFVXxx9@d>}Lf&YCT|NA}u z_kH~D|3sIP%Un05;lbg7bHhgFP*nPjeHW_Z+oeyPw3Rmp#`(_jVjoEe-7!7ou7~_e zH}_P-Ckc?Y`Kfx76+JI3a9QtnaVUSA*HSkBJ|R~ZU8GnXOfyRiC;8BPD@|J@*~DOO z-SF$wcStX%2mHhNMHbh}ppHF#<|hPRp~J8Hf0+SHZj=@z2!i`*$E4pn=CCLmNOS%y zX$}c}-z+R-`JpXR%;Bv!dj5pwb%j+zFfM!Q{@&-o?*rlYiT--ufk4=U{_OvGk0gA$f^>Ne+}SGTaIWUxJ)ea=o_W2o z|Ir(M{|CQ6gx^0(kYBA~dt~k6+zZcYp^k|(H=*l;#FlDEtcBT5-u|p*7Juz8p+_bz ze8V!~S-MKQ5bNT-3wuwI-Ici=br%2g4a>jwrSS9Oo{Q(JsH9tNoyIDVUN=OS9c=g5j=PN>r9O{Y{?ARKsbOB&Jk@mZmiihsw&d4;c%5smc< zg=-RdfVit}pLQ|gcXjL!pW#)DYbQh#1rKao0}T0f8qG<6@5>>P?43Uz@Q)n-%)L0a zum|T~FzkH$Kkrem_#;RBe%xRCb-4Y!zxMlZ`+tE|55Ry>b7N3_>wq5eMg5^?}a5yo* zdIJSIheVk+W(r`|^n&2GQvb=fB>u=L|AqOMns*wvHwk z@tOk9fGCxJpZMavovd+>>)FJy?}kmc4Tfp(C_l^iY?TNmJ|}cF0MTN6WGOc$5$sz% zuTT0{8bq;`jFnahVqn|#c;#o>A}^qw=KQh>J#Q-w%UCCbRYput>1LwqzkV&b9V3K2 zzEH^QsYrt=U+-mHLc*Bp*IheLtfj%k$eUauO&CiOkEM#CyqsW6h(G#E1S{>yYdPGB zYVa44Hv)r%v4gKRhCkm+1;(9J)|IQeuxNTZ-zKeUtUTDKlNtr zJ?DinT3I3U+qV?RcD4HcOGXqs&u8rV_8J8kbM6s^og!H1fohwibX4bY=Jv{ji!I7F zb*Oeb;f^>Kfa+MQofMFN82Lk|T>^7&X^__5gszXEF*EIwz(VV+Ed5Yj7V6(A$5$zV zxr)S6`6Q{pV>ueFnk$J(Ugzx}QlY|v?gDBnR|WY}VZtx(X73wGO!*>7U+pp#+cUqU?m1k zl93(#^E+7nn>}3et&*607%@QO8yT=^0!P0+ z2b|qlx3Bf(f9y!}j~(IVgqI^;&UiiG^@P_WUe9PAa1h#)!HuoUP^A}`&=w|n((D{7OjW^@`J27lQ))PduepVsuH8hbg zwV?dZ^K9DPGpn&@D4$GdH3cy093}?wV!y7F^h>Uwe9vIlxe+NY?D^*DDbJf|KGr$M z@eRKk;t$3#M6aRo^zdwtWwQ+v6@iC6lMt=(G9jz@smX~iv}{#Nq+5+=vJso#nsAGp zI{6|PjaOM`9w7ZR$qB1tpHf24^I8|-^qL*wjGJyBFf?7X4bE4ZzloEmUX16CUt322 zSrbO=#BpR-(cw~$#^0GtC6?3;q57S0@_Cc=>JgLUqnuJtToKLl{i>vWt%)#OHh2FD zL{H^UuY2|gWl6j~7tSL6R}qubch;G>zu`Q+5BUYb*4@r?xW8d#u!nKD45BMmW;zWF zH^^R&Db;&S0Ybgu_v0)x4Q4T6cGqesfF&Pv8&_h&obP*kS-PU}Wy!CToR(pYl&;6F z&J-Bj!>4$AJ1>@aM>3)bjYrB>HVOV%g+-c1EzMRy7#g@sMJI9fqAzrwOJyZ(TZ>hC z*RelhLH3*(GUPYOkCmv%F5^){_XlVNvtc521A7Ps+ZUbmZlxyCIQulCb=DWY)&zk-O1oAZpEgz)pG0U~(O$gww}p({S9u*Q zTzrjBW%s{x4`1G5v9dV`b|iew){@Y{jF*M3R=pSmMMZ0`e7~TL3HCnf$tVbdeBst3 zp0e6lneu*JMsg4wKXLWlizqE@f!77zWD{R<)no?Ef4@lJhNu?SW3@u6q$mjLFW&ur zdy5uEE;|uiQyBz_UzeIZXxGB<{Qu_@9Zdgx>y+d!u5}|T5QxL~j|-fY!BQT6zk1s` z2#9^Mb2Y(&*pmNxKbq~2OjggI9YcQu2`{-GQiC8tS7K6jM*81-|I8Kre&}3?P7vtl zuG-aeS_WJ0{I=OZF$hL`a@3xNN@F@+;#yutK`#0ifh)&AP}CFQ|>t}jyW&6 zKM*PMFP+sF@$jpB5Y+fO%S0PV{xyHagYPk>LP5~kcmMe3dMPaR<+79xZp3en%?I~3 zDJxv2Pq2z%S6`LY-EIy9 z_lL&jzb}elb9{Z{N%sR`FxjX5vbhKrGiloVU59+d1A@b_T(@SM3BoM{_Lu5yqZw z9|EquJLnhn$r$s0^&y_`+uj>;>%6;g2xJFjO-FIiu#QIS3uK-UgeeC+!fa?5Z5PM8 zU_uC38EURud4`4!T;Gy*ek2%-Y-zEAJv6LQ&_rK(C>W}5i{99KjfR_}&Z#;i*rOlDDat z2J_TS)7oI*x_Ew|=^z#J@Os-E^f(y!`77fDL#S9jdVX7XFpMz~w6a*Jn7ifEuQtzu zA%(QDtEGy9C9Yax?cN#;Z~FRqmv~VyO|Q3R+HJugVlf^twkTFjVRe8@t!9bMqZn{AwWBfVSs`QJ37v(4YzRa-X5}IGV zv#>3JjFsk3`0PjXKalor`f-YkDac>RJ^UmXp6YJwV&$V?#*v3I@4gR)$QscbO$8K; zq$3v*^&uGc+t)aHQYqLkiR~dn--1!Cn_5Q=p3Wif2lMQR`QLyu`9`T3s zhrp=t=xF-{1@8+KYKbf9Wuw6rs z^4sVkuz!6>!BI;jZ~kItZj%tGuM@ptd6mor^FDH{TrKkqh|A$hx!e>w1phJdzz z)^zp_Dt5f`#PD*75RgaqacY8!;q8SmT2|+)9SngAqoaKHOtJ4ecck0xgCYLmS8whl zQ|tj6CjbXtB=vyAd3GYSr zvx0QWe+LcoymfIp<7hDKYIc_;nb5G4@f`0;kv`OJ?MXbng@#pbm~ne|7TLp{+VNfI zY1oD0d*t%^U|^L?7CV9R$uQT=D!1qQCQyPGzCzOCow%T`(Nb z{r0U##1s=e$8=E~&BxTgURN>R6!U$6?Q6^nhG6Anv9lLVu_DSbdUkOzSUvjcP5Nkx zxdz>O|1vEYipKhRAAT~$n5EYE?1~5m&iByEDQ1Snh;1(&3l4_5gSK{E(Pmg|%!41b zPQeh))!ghhWrl6%D`q}M3WlwFirDw7m}4$azQ+_R1~2+={9wdGh2_DZ$e!-uX=jer z^gYUtoDG6k)I6e$j5+4N_a)c4r$Nx>e3mRLY>qAXf8$%W2$_#&NIr*b<6i~AhQJy} z%}g`QT|+LSW@#`~Fr|A8tT4lB!w+Tpiw47{?+#-vGp2v(6G6m&0)f5wpLZemKW{5j3Yflx*QW;tFI9V91h`;!mcv^V9V$E>;CMO)DI1<=iV9s&$`+PTh`Gq`5eV% zkQM+AijuC}ku>Z)M=^8KfdIJANznSmZi;b*KB&z|2!OXg4|oV@nPLM&L+?y>20-QU zMukR4$?%QmNHKtDZJXQ&SO;?teb-0^iZ{0opqZ9(*jCYNr4XY`(f6ukZ?`zQfAJ^`F zaEOMz-AsAbzBT|TL(nVaNyExslo{432f)U%C#2|L8Wu6Ud$80h0J5I86N++Z*uk_3 zkySnckm0mzm^Xoj;nz=iHJg%@<_Gql?lzYQ0hT29f~msP5ArHSZfJvb;;-zU%07%mc0Bc zVA1F9I$b~D3mMBR<2Dh{k2?(u;;`x_N3QpwoD_xI|LJt0x{lCjGgsjW=%KCm{^kD-HT!JMlU(Kp_p zYD|mWs!qoECI$_z-}Z(kBU-GS02%A(noSXI@`g=Hn(Nvh0#?H`Y5jW08?+2*vA*?a z{d$c$w?2Bq_6Mg+^iKgM$kjTrXZ*cQ5b za&9%kimNvZtvTlnere_w8y*^B{OjDW#GLhp%k^uoXe1e8l8)y;2p#YS(QEytBvC_b zX#LlB;(O8hn7*l8aWTM}uJbr%`FewVkN#8610-z0f80kb-#!%~VQt6d3p=B{Ve5^n z@6yvr7>P~!rk=Mq$kO@NaGli0B=bJ0O~-jd=So_I**kqqFMTc7NTxUFeDj(Ku_j@~ z${!9s&G3fN#68z6TJ^DW2i9^aM0i8cva?%m{?@}LoXv}ugn9$b_QMVSi+Y$_KyAF5 z2jV9|^yBUTJ!~7IB^cfSpS)(mALwFtcvyT*HhRO#;(pVxEL}{Prjzr1i#JSEeRB0$ zp^NSODtJz6lQ+!0smE%z>tJiDHy=Bk<_+=b>o@GVt&N39@@?9H`1|x?M0iJ%4mKdk zHy(rJ*MY{py0o#;p>Y4r{oYXU@WREWUD{aKx!>5SId4#0R+Dm=7mZ)vEObD{2cGX+ zzv0vsEiA>z?_-Ij55%m>QY<;Cg)ss}%_!bJ!04~Xv>LQ9uXjuNFJ<~b!lM!4c0VnQ z;&y&RVzm#1cO^N6Qnj$isX+t11|Rt4wY<-MnHJWlYTGB@=>rosN13L+XkvRll|D%Q z;sc6LlAPwgX=3-fz3kg{d_nIH&vy2Unwaf9OOqoWzHs8|@e>RqO-$7O$-R&3eW5XM z&$X?Bnr+s|_Qb4Zj~?cAL+; zNUC8)94>3j4E(@a!69|CtSVOIZ(e-W(hqD%zaM>6(OZ-LDLVq4D>xN92~MU@=Qw z*1Xx^2fCM5?c(36j9vY`GnzTo4-8lLUwC#&8LN+I&{^AEsPqd4gGnBEG#Ee6U%NT%dINZ*1 zO9h*i{gsm@#sEGWo>zM?6>R#5f$(ch1|*N1+p%v#1ylZ*9VMi~fYZy^I0L$sF~gfa z^_vVC@RYm%LbIU~#FJREJgDi`Un(s9# zDq=wU%cLVGRJ`MZ^ckKCWm=Wsg%(;{DHRYssmL(9;=k| z_i9|>4|@+k=XrBY4kPGCS8Nvb2YLPjUp24FW6k>&4~(t$hrZp;GL&WoOt|XZ>$^++ zLA>}mkMLndjIzY!M)NNQJf~b(D_g3FNp7M%i(~eOSk-`|UC~O|L5@DP)!!IEa7n7I zj8?`ddnXF7e`LV*hW8RS5h_?dQ*k2G1Ow_W7{2KKrGm{V{^E^B^V7bIxYP-#VagJ6 z5o;y=A%RePpzD?z#wogAH&??SQns-buh1o8^v3T~S1kM?Mnguf;Q{xCh~d}|qxI(Ep#^OS3-Kj^F}GLZ07$IkQHTfR>5hq+ln`&7QLjHd1t5pV!jn)W|6)3xj)zo{eGBjFN?i+x-6xx7x5cR<-40BhcTo4 zrTVo$-2UBnq4J>|*1n`Tan-OtjPxV@l*(g8&&v#}Cy>5XjO-l)ne*YT@hiC?onFI^b`bED zEdU9P80nQ>i{SaH;Q)ns(~Fhtq@qXDF6grBhT=}X=1Fe zhu&2t2SAJSlZcurO^nT1Tub&C;vWD%K)}Dn3)vto%rQY=$Je|7kdy8XjunDRC+Zs^4;q6r8m^DJdV!x}%>xKsaSCXyW3SlZn#4z?mp8(jjyt}*DhKg~y z-U?H;MD`Q6tvol0iWN+LA(bQh^gKG3N3f=1MSHFhd60eYtw+z(reY7;wT9RwkbMm8 zpkJG$U^n%jR_|XC0H5OdhCXmoG4~{=ktF0#A>ZV)kFTR*gwic(FS`BVSEkIDVHy<^ zf4zI~>Ro^6_05_tAyTojlnIgbSNtI*xHWL692Mh4__XPeKjgn%yL&q`72{zoZfw|% z@Q|f2EcXfpd$0T}=izpQZ+8yn`a4oESI1jne$oD**8{!PycA4_ca2YJ0P_EBd_!&h zWNg2~EyaGSKYY3>dc*Gl8GB>7_ou(DKRnxhiQh7tjI||A_>`FW1Aq2ho-xYL>py2l zvFQ3kv8v{}r)gx2V7YhaenE8okKdXer^r}%_?eStO8yYv)6cu#lZ=g__#s9_{Hb*o zR`8H9w$zNSI75FhoBgf%;}Kv}HsV^J^!>rKjV(eGVqoI)jUO)9uWrU-8q9fvAQwsw=o!^tlWQXdl z@$9IpCmFD+Uaf5t6JWFT-=}ob7|_n$fMxzL!6r7nw2lg6056aC%-Ajy>?V_mzKl8p z%wY11%n=i;;OD8vYIX)tOtTa_Crz-c< z`NExJP5`cI9>-sEh;jIf>oYflV#koZ*XH%ZdI6;EeQSa8}La^{` z`48EXT_NJYBc_wHW$C}q?(sOSeo@;Q^q!X)ZAz1bx|HKH+nvz!^jDO;6qf)zKZwP2 z0)f3?eq9Sa{)2|bQ;utBKwp~Tf$l*D^jz`ydgG?$KkqP4%rENC`VQ~i@pQef*W&ex zT7DltDR@CV^OoqDJsPlOZ0F=Ec`r!1koZ=`R}-=>Za#Lx+zWyfj~r5atO)^0JD%>( z@PaPiwtF80wBY~M^(F97cJ2RDDiX3)vS*il-?=at`wU~>#u}9d*(i0Lzh>*t{1$3ZN)pg zCw!Xls9dkI7;>!j-q~$8)c`m(12spy;1%~fyR+xifyfUL zQ^9WZMAn`qbQpV{-P{;s0eQ>(K1S%!;c#$@;PRsu&>Z>ceAjPhcqO<-ePfIzyl5ZPJC?w`b_K_QdwBk!vzv{}t-$!<;I$n#E)cR& zj3$!H6yD$hO$Lr{C$CyT^IL_w)f-(vjh*?g=G$&%76EV?=nCV1Jbt2|E5tE&W*-pfCCt|)6G2w}r z@kC5{BIZ01lb(oKPsFq*V%`%m@rjuEL`;1m<~|XVpNQE{#Pla({&&^en7sP#1yT(? zqud89p>%BPrePs($PY_7wspHDOiERkRQY&A+|IQR_pY&oH;Ny4H|Kl9e*F=VZD%cE zcO7@fk{NFh_1w64`7>;9vG)&r#x-`I;Yfa!niXv136cHn=>xtlI>N%PR?z)ieSG|i z58RLSxUpi56`Wa|&Jy+B2O`sp>(97bLCue?Ut&J{0PiE)R+-&a(24uU;QrY;17}V= zw*tEj-`)xC^nnt&lPC6JdzyPDZUU&wj>bbQS(Yv?_F$2m&b7Z9KDB|bA7sC6%2=Ysnui+*+6 z#;}2mvjO|m!52=dE_kO@Xafa@wz$-#`-1LW(tWclHb9Xdp2&RV%Zzs!x*sxq$bN9c zc0?pU%Ldp|7yZge^n>1+@bz1QZJ@VYPDHHO4+>>k69QgYLlv&6h3fslhwQkYwXVE?pf$XDngL!p!Rdg8|$hW`y4)SVA8C zdyhdF141T;EG@Y#Arsf|CxaN!rRMl{ajXTrYSb*E&>6tN*ZS*7lm#rNmk(~iG~cm% zvKi_YFkKL{_>l?D5lZFdRwY@$jUcvnD>N7&dbFl3Sla?9O1cea=P{u5{;n(J88XQE zJV-dZhyjC}4l6WVBSY$ifPJLLesE@d$Z`!L1AXoGr6;cV!TLpC-F6L=p=3*DVd;J> z|J4^%uOA>o#g4E>5eq-~z}fn1ltPA3*U0G^Za=tC)HKjLVGizLqL#a__(Dn|>!MSS z%;Czk;F-`=Ul^(7@p*K_90U+|$5wM+XjR!uAKYXPxx6i7$-%x*mtq>FT4@gNby8b| z-F+eCBRPX@&>XyTE}6CN^M%mZwj5qUpesp7H#||GzyDPC_fYlsq>Y|Qo)cSz#yo`fSznDQrqm7Asoe!i6 z#hI_UY6ioGw=PeP_y9viX8(dLGmv@uWA{Z(U*LD9D6JhegZ`tJ%yxxi|4K5AI&$3% z4*tUND;vwhYU5tTIy2z^v^dA=q%R2OHTjChn!$M5vx0Iv z=z!+?(g*W?dV}tQApf<^+VCK4McRuhZ(yxYd9&!OHXJX!<9vIsH;5mSaii#IL!wou zRZ)~TkiyqY`=x0^@)homa7S+_-gt7I45v11Uh^&NH$I;f6?|krrfC5JZ5|7pPh}Wv z&^;Th1qbw|B-iD6!NQc0clK_Ykm!3Y@oOCJziY3!BSjO|DoZCTgnGfepG~WU=4%0m zmuQo7tQU~t-Bd<)Mna77CwSVLsBS-fB>i_sW*KnwcMJPx{h%M%WF zu07uSPzyXQIkg;$J%KHGnPtC@HqiO{=7m>!!g|J6Ntb)t(7Klv{~^i~hReDNmQ8BI zcz9SNkBcWXK4a|=_tXJ9hgZEjMREV9tQ{90X@k^>yzkb797He?7pc$=K_ z04nReoTwUY7$ujl8{X#uHCyABPHxr)tzp44bzvS*agh=qL(_(iTVB+VC=citdS`b^ zQ42b5+*nDWc|h90K+FjZErXb)cW8`z+G{PU4bNW-%5%JM2Zc=w zw8p<^gHv&o(5|cQP-eL#Vy~1As5TwBZd&dRzZG`x(BRgEj&X&$iy`j7@xzH)|5^w1 z^dhJIHn@Z4h^a}~GCe3Od>k}``%{(dmaBfyg9Afbzhs_shj5EYANL#j5FdU@t@^Ax zNE+)+V%L~MB?_B;`ro`_vf z#I`45-xIO%iP+ghY;7X;HW8bfh}})Z_MUzsc;>pYE7Vsr8V#2l{)GWe#E2$hNE0!p zi5S#GjA|l=H4)>Qh=EPS$WHFO@_h9tXE0Him-BPT7}SGTaM+x82J-Qf>v(sXfOy0w z6}Nn6=+DcaFYXAi!2MLU#XV>6ysxos#0a36z9_-B%^5D~cR5kE0Zdq{^bB8g2KCT3 zfqMvG(`LE+fI4S*(Aln~CI2^eEfL$6h zBVyPQG46;MctngmB8DCjV~>czN317FB+`M|pXUDaE|;j$y?cv%AwqT1C-#RH^j_9e z9!m5DtBPZ4r)>59c_y%Uu69liYrMPB7g+C#iHrv60$E*v-=nn$gmur5riOrj~iZ&W}sTg9`|{RI`jTb7Tm{e zb`$^p7Ri40nE>&f8^WEO#5+5QcX|@<{3PB9%H7d^c0;fNRKLk?mY#0~8TvZBJcA0* zDz+f3^qwU&DE=-^rmO~CwyW=LXYVRovA5NrOb*tsO%WWZvV_5OlG*-7IcO?P`|P~Y z5*VKp4W8|igPi2HoPIk?pa!!hRw>ED#NmX&>Lr#?l`db<#Hj%K4bi3g_bgyzOz{Qs zQbjPE7P=JbZvm<=!e2~Nlp#6SX@S`!8K|yYm$mv-K>l=-FUK!3WW17QBq^xEHtyD6 zHwVb@sDa1X{jLg_ng3QcyMy!CnaF`}&8mlpzAC`A?y!Q( z_IMfs?WDbMbHMgsqRlq4y^@S+I9Fl2G8oz~9VfpBg<>mx)px)qdW{XsT zwA};p#dpl1YEj05JDtk#Wnsoa)kS3BE^O-3pHTvU-^!B9$x!Gmc9%C)32GG`-}>8Q z`Hs~xxWyD?==b z%Rv|48g=D9GKjAgIA9te18%n~4<{*EK>OLBtfGsgVfw&;ygsg53fGOQt__ibFBkt^4z z=ZRawoDUYLEZb7L!x9+74gEg?gds$r^;c0jZZAm4$ofEbS(7yABw$Bg(68u1-9 z;yY}_cif2Yz!Be(BfdjNe8-L$4@kWlF}HRp0avnV)LLF_e-d$5H?XU~i?O7w*&^mZ zTJ7kw#7P-ASX&bw*P6}Vk1X2zuC0rr)r&MLx`TgtIR z+sq&mHg2;%tpI|Z*%bc+X3$=Y_0=y2lPwKWF%Qguv^egnlaxG+Xx=te;x>m!tz%E) z5)>fMjtj*vF#n6c_V}LHJJzj&ukshF)T6My;r;JkCjtAOYs?HDo5jr3;jJlRq=XfPdk2~i9@Kc@#EZh@?ZRrNThW9>HFv1 z@3xkWJj&vbayDTwB*Yd}Ls%0j{_@Zt@>_Z1Gdr07U+-4(&c=7*Jw}9kj)?alC8tRQ zSh5Mx*7cpz_zxL;MHr zpnaaqJo*y}_+S}lz9`!cs3kvHmmiP>iqT0i*Q<8GJ>HixAt%k`chJ~F^Y{vBcrN9- zD*rfcUo10^lq>~r#+(K4Fm00nab9aogffX=uj$T++wuN2RTK z?;;Haib{9Zvj6GNaa&%_NdVIuo4Q)P9RAKlIUmY>4Q629KOYv-5sCt?Cw7X2SrK?K;`pa=|9DmM@OutaCBA6e2fRMX+QlB&Inj?) zj~lfPc>V6or{N4fB(m}(D@mbxIbmm9$qffXs1_CTr( z`(?t;hUWA^T9|(!*OnXgUUS;n(uBuv)ow11WJ9UmGW(r+F`fAMu(rocgLu8u&eJ`1 zIF7Z*?!7e+q3fe{-VvBq6&k$ql?|~)b3w@z=2tSrcV#6Bfy>^mE#sIz-BopB2??=z zZRKKpi}(N1@)4=$a|4@9#=#j!Ojmi0a36TukP%Bc7ioa$oR=u}++7pO5)1BQXJ+5^ zpx#)?KVU*?)7N8$jp=+GA+q_b4CAkY96Q;vS}bhbC%xmZ7rAcC9zlv+XXADqnIV-Qcc{ zOc@&hZ=s+hem+$DEt|4y9k$=o+IAmk*xtTpQ@k7gws-7;hvu;Q+!^}!lbv;uzWcH* zdm9{qM?0ZISyut^?cn)-`6Ts!u=?WCvg@K2D50N}VppYnDhw;Hy;;3Q33(lsNWB$B zWsa4t9byPksCb{*b={AYkgS)JsQn5m?DX1Y!?>r0=I`Qyp~sHkt}TCU=CnF$nb5 z(LQX!rbh*L&z|{OD%$8XUN6#u3M0rUOjlnUh1U)Z>bp~6Zm$Ldg;zz^P~p;r^i`>) zI%ri)zHnOx6@m(ctoBK0qcuKGq87(6-Fu6^=ddP9=jj^h+du`WrELlaxHM4y2!Geb zLsTdpjLh3~UkzOut+qM7jS6KQ+A0&PRhhCQ$_tAy6UWRgvvQxiH)akub~ubX^Y8pc&dL_EyNOnx{0d&k#) zGyD9f%@p_|wQS2ioa-)lbG`5A1q$%r-K8UO(HuSmMl@vRQQ$&)O`F|rbLe_?y{{;o z0&MNxPyIvx#?2+->Jo8xiMYH(++HHCFA+D9h^t7%T_oZ%5^)=exQ;~JM*(^{C{-V6FyO>m9>Ol|YeQkoUe z#h-GR9wme0)#>f|yjI|S$F_B6KN*PjLB!D{;vy4qlZp2j5pgt$IGaQqP9jd{|E~WJ zPUYdbdo+a4AFnloJ?J>M=gtvQ%#b0tJ>Vc>y zq8^EQCfWhfPKftY5$~@e-fu;Jo|QBY*M@no+G|XIk>P&AJ%=TAcpWvzHHV!nAa{Si z-%?zgXO5FIZfwW)kW5{V`29~_KMFRHAq4@6%0k^04H zIYbME$OaerL7vpnEZ6Po!1^x;Ea~6(4SD~?DboJ;SqlSyvJ>tf@YV7Kag%+u7enQN zt42@xBEK(Kz42*i`?MO0WMXyq*!n`1G1Ycn|7yrb+#Sf@7sj4SI-QhU4V_$yb$6-0 zpnGASs!5m}5a)BJbP?A2*+Ql;Qx@%}ec3;ZCCZ(*BktPFS` zyJR*v?F;UX);#qGr6Bd^puvl`zA)(c#%j|WNw6C_a=p9L7to$D(eWY)czR?-T73iN zZ}_T%_Yw&>c$2%sDaRL5&6gG%g-ia$uL1eiWsw{|;BM3mq<4vf+OYcgi2^^MUgz%e zCW$d+J`=O(_sC6Qh_>?kcr?uq#!f`#4+^XVarek+tr$OG1iIJURuln-eXggqn;#^X zJPz@e7J{sUFkHLN4^&HSOrNL=z^m!4n;0H`e{^arP2%Cg>rc*S=$8mU*RDms_@=OY zA{YH;>=Odcc|!6zOL4ob!+`ll5unP))^g(gCtuQ2O?452=FBbKUpjn&v`Df3V+C$s z#o+cBtaYw z?Ov3SB;yC6PhOOpu&#oP0}wi@>IW2+<1KX`R)N6IFtUQLALKlHQSSD06_9MEBpIGq zzMYS9ZFWh*@LTn9IOqq1`wqCoWlF&P6)j^Y4&(Nqbs9w(tNzl@qWJrqwuL?*aOvHv zsgn{g=Ocw98DHe;#bHdy=-%$NJ`mSaW@I8E0jqYLQoEhw1I^=KuWj~~gq=ll`Kk>* zP!;su{S1Lee0hP+jukrVmi#&3Xpr zN&U5cc7*%Ij3d5qDQ@K5Aw6|Su6rCLyWbb!)a&yb9;$&{9xa}965F@A>&MX*YOqw@ zpN_I>AoOJ%df$k=ZCV=-%{Tk zc5mkjuGNrv=NwzaHEdtpbq)eT3J`()=X<#?G+WA!O8cz_!0YRz;Psje@=qU9fF%W= z&M&9OeO7`( zT4d7Lqz{<9v8&{0SBBJQN3N&;@PSlL$vPn)RgmuSYIouFg&^4_5zp9Eq50Oi)CmD! zSpPB5-zQfM29m-W+m(EoF_qHmMa`h&{+|Q=*)g3mEqG>LBKC)4TpV9@)WOqU`_PSa zU&!bfh{@Zc{+Ip>aR2BOEU(txLhrg{peSnvM_QaOFbc**vzleW&)o0hM1e2p-8>)5 zzgiY3v6+R{cX2#Z`?$Ru(-4~}9^HiZw=a?WjV=Q_7k{y9FT?URKHid+APv^ZVU4Sk zeW7=e@0Y3bSQlea5o_ULHSaPB-hx6p@E| z-4DEb9sIyuqKL(YSN^a4%>JB^M4Hb&dtRnz1+Z^j=mDg(pDU8C51Wj3dr^z;`9QMA zwIt`uLjUCW#{Zs|EB~uMW51l(si!NU{=0&HKDP(JTfwuCDG1|LHYVL<4>Uuw&Y`g-tnsRC_-s=J4zW39|UkE_x$d%{l zs0Sox>#0(92!Jc4Pj`HvhJ4=Y(53F5OCaifK`WZ6e%D!1Csem2!iaw&_TlM_&XtA{PC+ zFoD*XDuXL^&H+gY=)l0J>y#55I z2=|GvCUF^ZBA1IX%{DS=A3tOAoI6&RJ3JUa z$daPy0fSlNm0mlRLb8_jp$bVnzOVU9H4h*7#c%1(=D_<2S-~}JvJApQ!y0q>JfN`X z9Q$5*0U*l1^zbFKs5l=$^h@NvG;SCU5ViaXI5yARtc}#;flxQAqM+~IfJCf{-(y;F zd103BkT)=%h?nJhE{0SKZJw-V+|KfuCpdEvB&w!bH=g!}c{$2Rl%5J;KOE%^ z$@)(I{0U!8nvctgurrmx=Ets{2-OxC&tUg3EMY`h*Gn>N&P0U%eo?to@qu z=MfX(HZQ75E2giq%R8Ekns|S^;`gS|i)n9}BK#2&F}R&0`0 zJSWYrZ~kkTx||gP4IqM1UFoI#0Don4PGk@_l5>M3Q$!0kO|rN zET*Y=T+lL+8{P1ecwM3~ryxvARu`AE`z}H&) zQ08eLT(hoveeNC4U-mPb{?0yfnT-u|vYC?wac}efeO5aAM{Fa?g(x?oT#0fg>P03l ztTDLM8wM@!r|0GVEoaQaQT!>J<$LZs%(?p&=k80K!+)FiZ{9!qNBsVih?z*lR3u_9 z60zlo*mK0Y8i{u~67PB>-UUg#E0TDZ>#j<|_qoLHdx`Rxy~>6}n)i=A zL@Ib3wkXkr7e^LwmlZU;uk3u?Nh6>~k=|5|P$6UKgsX&Y-e?TY_5-(Yw za%A%6S)v-i9pMO7tS6%%OPYX0eZpX~k|UT@+KMgFHU_p!reAwrIsl_qbk_$SLl}#E z;}!)DAd%-)GD~|l9-+?CRg`+~S^CnQQT6=MT#RJ+*Sutn>Nf9>M5K3FNWwp8k6i(mDPIA(Msm%I4%H9ruyqr7|_iI6P zRk0zZ&jH+@7Dmf0)q=BP7TgW?j?ljNn7-a#EvUO#Y-pY12y8W{E8R9~!w{S2|R)dSNemuu<)RLFhS(r*4% z2No|mwryiFmZ#f5(z6U5c(#%B#yuCy^FZaW;65GD6Fr;p_7@dYE5c2;PilcOKuNg3HEl&J)_7sZ|n0pGSac!gl(cZ8L=FZ(AIESM-htD{N=R1eL``>p660v@X zn9@WnMIzQB5sQ(C)kws0Bw{@hu^@?9kwh#>BG%+zSd;{;N+Ome5o45yL7I_&x>86` z4XAf}A~HiM&@AF{c-MY4D9d17v?c`8?M7$EUZ}y1N0odfQJBAC^6RJX)ZixAcVSv`qNPMZ9V|a<%%CDn?`ilM9HI`P%{`;EG73a~xs}a(P#rep z4V>|A_!|rPFRWw&mNF4*nTW+q#A+sDITQ7dP2w1~Bh^_un!`Ps!+V|MZ4O6i-oMYm zXa9&8Ohim&BIYs?lbMLwOvH31Vm=cwp^2E$L^%@WOw_|)dLrnNsAr-b5bcC$M?^a# zV&M|8a*0^FM66vR7B3NFiikl)#Hb=-SP?Od?yD~P5b=deHNgq|AP0>jN1QT4soAes4}T-2I41e+C+O5VQwtx za8syAi4t%JosZqdFatE--1Ul$Dv&lzUK9(*{eMry434Y9;2!0&h$EP`D3J0oSA+J} zlNOi1ngQe5N3*DKHAq%^-?>%OoVm8{)`j)=lgxn;y3A6eM-@Wh*Q=#P<^WOM+7Z1f zP&I1Sq(6+?m6hc=pDV){r$WM)Y%&b`9dFt23-f33>lN<+849V5(wRL9P^-YF8Zmpv zBza%0Sk-EP@H(SdTMIxQ-P$^&)y%!a_!3r~5P2AL&~myLXu*_CstxOh8AaUQaYo_9 zGIM#W=Ar)$Lq#Z@y}O)+3{d8Dk(FHuBHix@<(@T%LI&MQmRkwp@`@y{rkR7d?#|U( z2}&?%6z^VUY7Qjb8x!~ED?!F*4#7z*zrthHq52<`fLd|qH0eH;@4=XjLg9)))Yt6S zX1ErYn$`Pis^=40C2dPKmTZxknstBNc@zvRVC95z`~lUcGw?m z&PSKBDw@H4v3)vsa{NI`|5)(L52n!1v;3K0sy|%5zU#`lL#A-KbIEQUKYth;t$iQm zVhTwL(}yNx{2}@EP}4DcQ`o#?OoZ{10q&cM>yxell)n&O@%$wNbkFhlRE_`)j(n}# z_ksZd#{!Rd*_p!XiPnLa;CJ*5czl`V1&rbn=9#1HhcjFJ#Eey)FUBUf3VuY|DVI zb!#8e+W@q^)GUO(8L%_eX=kg5Dac6sAGb|pfcKJZKK_a+ zNcCE1#C9^^%em;%l4qvSc>Y~re{)$Cf7=um{b((aU*Qi~<@#3_pEiTbRqtMI zQ}l-~6+NSG!^~iK%gQYqz5HQ)hSP!%@65nX-e0IB+#f)dKo- zy0v7B{9(+5@uRbq3>l+e^)r(E;l|5$t+&l&7^sz`$#3z8Ojj|^bZ_*Bn;gF|HQ%gUxOf8|HghAsxAY!_G|$Y%u(v+y^}EUdShygp{T9H^hRN~oWej|- zHxzT10Hg}-yAof>070HOa}`g3S9(iN6=42~ntt*WC&6F*;*gL(m-hzTY5aQL%@HYX z?%c38%>X?&et%(`v@H@CU2Jm0O#}UR-g`rDv%B3gd!%~!nT4T}CQ3TdZ@g*L4*lK} zw(ih1O{6`;Ry+RP4sE|E*SKtjHnMEeb?AJR^AL}TeM!T<>;r!I?TP~efgo}zfc#o zD?K@xf$1N~q94UFbrD7O#HF{|wy4E9b!?}+9_p#sw|gNUBphmxB&8GH}I zQUy?PlGqz*O?tIq5;~Wp{kOiB@agSgFq-7r__8!wizr_{$5?9bNRKuR^Yd#R2Kod~ywD(?y-#n+&|yJ0LY_MgPr1I%rOJETW43$V3~h`jzEl zA?=8^Z4~{e!>NT*ySo=Gk#I!99dCtq?$AURO*gxjzH&fGG2b!_i!>0b-M9PjLIGK41e2)HdjnNE-ix9j+OulH_l>GDhsS#ZpoDG^{n{p zN7pSNUMc=GyQMs=H`v3zC(i5S~a^(qVSmVGpDOO_hc@+l-xUt0jlbPI{tS?fs6^^eyvKn7iA-!_n>H2F{&=~NvcR)l95{s6% z#jc=$;?a%W<*U`faIdn!=s^k;+qiywmah&(d&MSvf<&7CkKOY-SO$(!O__TyKB_F> z(rpWsMLna(`QabCxBJ)b*Ej44_uFC$wIX*{TX`A5@UJn>PBt@$_IxxiUEK(BYAae~ z!3>h&ot^u3BXF0o&u{THgPe>7T05Q@f#S}7zZhf&vE7vT?<8ZW-CFczXSW#?Ut=_0 za5aYJ&hJ)zGiK1=&c&gWVhnM1921twW}vt)!&5fJ1V~ZJ0)@%5cZ=J$I?MwoG|v%Q zm}}1DwK?k|i-ran)RKggAHOsKDa&84-hU=TVT(>^v8oANdV0OD-P013WD6eLQ!;}3 zbuV-8QLKO-bM>;vN+Xau`!VuqofS}@iiwnvj3Ca~KL1*u6-YI6aa8O!1jfUqht0Hb zooMYNOFdu+bfJS}zcm!#|1}V^zsdl{i{jJ%T* zHUQPmrH51atReR-qwzqMJ`~b!#+u@~*Kk#@!3Rryh`w{YHkr#BE^*ymUG`ZIsM5{V z7e%e%{`#+yXO8Oum{x^sH@60>kESLcob`ZPX};J2IvF?MpS(g{=)$y8t2O+%mGY+7SQlQ| z?_uY8i0kxK=bxWq(S@B!BCG<~Zpv2jp0OqAznh;gKLDp5n)*#a&Z2q!xe$go|Mu86x@L+3icz z0BW}<&qXmZ=tMs4y`-oK{oXHg^WT_*+(1G_gR3SO&N%n<95RQm3Ck?&Y_wp3-pyFH z9p(^BvFJZFr3J#6-kOT*+AmJjpe$_|OEHn>Gr{@t-Fj!^DjguTUGL*$F^7towChs* zx*%*5(eU;H?Z?a@UUm15;$?a;{JJFF zB-RXqPTfA>bxaR>$sMT!G&6YaQdY8~QxDK%wVIb=IRCHF2^~MA4_RtAV?RGKg(~l| zl0b$5xKlVL&SaRvrCS@jb+`>-E>=-B&Oe{xG6cobx})7erVzR{!!!A~A*6mt)Y!e& z6xJU$E)w}>2*h|b`&u22cn@ad-t-Lq+ZKZU32e!(|Na{SW4;Zfk|IYm`<>5``TvP~ z%m01mWrRPr_~U{~-wL^^-7agtrv~Ih`M{C5TLOOPO${CU7 z3AVgUvPOfAQ&TQ)ol#}f_~Nt5)+j?f&E&>CXC%1!5X}EZLH-96pS-{5jEYA~HrSk| zpnVrUsCnc&qdECS6vw*F%8w||H<4GLrDswRU+hqpFE<@22fc_7n@>ear}{hxdGUVN zXsL01c0j3_EiF&*Fd6!7J{OxGuSWRtFVSV@wOR4!0+BlsN2;*Q8XP{ZC> zhmO>#nwyNosOXin`~=B>j(U#7ZH|qgqMB>c9_OY1&~x(<`ER}3bti9&6B-X-|7`e( z#-x2iPJf)Gla1}JUw3dqnb)RnU2dVFH*Y6on-@5t-*3OA>fWcJv~zo{eto8)p7)t1 z`DbZJ#$8ig_Y(~n)3f_q4%3jt_KfZEqcr5QQoi1oK|=ytzqX3qqoHGz23-p?8oGVE z%R(!IhJrQeM zo@}qI-$_H0PsGR($~3fY>z8%(LK<3n%CYZ-FbxG~7HTM7r=fO{pj4k_G~{>M;TKCE z4aNMx8W*Rbt3MYBuKbGm$XoVNO`V3G)!cIV$nJ!SCs=>3*-AqL6Jddu_?!^e!3Ew= z8);~Xc+(eSaVOLixlOf4zzIFx+p=_86wBpK*TU^TX(&;L=kxGFC&WF;s&|>}gto@j zPM_y;LR$_k$dgZVLhR2)U&pR+LVk;$=W$$jLRKgFxyrB{^|WK2JU~vUw$_is={@Z) zeVEf@a#tVmK!NpBQ+hxD&}|L5KWFK#$&@zEa1Ye2sBb^^5zD8Cp^zoyfdUQ}YSd29 zkgH`u&7&LcNM|Io;o2`68t(ah@PsxN3&ngq_glvNgfy(%ccs{{k5N;?N7J0p7urGmGxOyu605gZN@uh zX@B(|mRb6$M#;OGTwip$i2q;{-3h_tWo!1@`=S$C(fZsBCse=pXv~92AJokupdf(z zZ@RU>+r82U4aCVY_ii|{PZe6xF_1-hl~eBPaN=ZLJ?I0zt~R1(R1jM3_49c)j!(ai?$P6O2tqPi`g>S9aQqCW?;d0Xq1&4JtQ)W4^Pw^M#HxTG zbkuIwu5(wM|KfM<9J{n8dqJ6!3zMEZ=ZNRt96i_H@?Ge-Tv71hpuE=ZL7&Z(ODAxL%N zo0;b#7Zmk+;)dgNFd`j_J<7xCf|lOk2tGI&jM9d;5AVmcH{1TrM)!l!0wnFBF5!Yk zY#;X3o%+-MlYHbrR9-O33lFb*%i@Bx-Y#4hX&j6S`#8A@e$dhW#+So0Vt@L-Kl(o4 zJ2$3DZ6Rt)T~Lb7jm0&CK?tpFXW1d;g3M{9qFuX!P=>37VY{Xa+CFdpWn>hDQdi4Q z3{qUslPCACQ*H&KvJxYyPi`)V;d5nFCL|CEPphc21Pvz5C|sW{olRow&U~70Hk{R*3xla7gVZzqvch90P2lo|2*)Mj(F=| z%H2{AMA6mNNqi%8bi={?n}|{%O3q`sV(^fT7+)?rX{`!GV;3E2USEx!1yCH_w(oI* zyK4doZoz`PhTs<5-Q5QQ!Ciy91P$(x;O;QEJIoMtuwfqGIrqJ)`>O7#+g-bQS6BD` z^}pBdu9m&lDk|m;y5aX4M^L+lO!(vl2%Z|?HNz5BY3B?CasAo8$NNxIMV}X%J&Jd< z&dCb;ZIrUwQxQjw*sp$#_nn)K_jd|`Bp|;#-61~fPWNa^V!2@N4Ww06lJBs#mXed0 zCT7R8Mtco#9CrAc)#8Eub0t((2QGS_JaW>jkZvk%-6|siwV>f{s_z50U^4zT_O=m0 z@Z3G}x8CZqV!BLq@}4{Rkn3^*C9*!9fQVWS_zFO2N=iUEx8R_g^|1D!_`;UHGPaGUtUhnI z$(CLhO-}wBe!=3VG&O6_R^+0l_lQ8IWig;r9_&Q^EA80KiRtX%%bBa3h40FwxaI_% zON)#eR4L}-gK`%`gWkuiyuvPs9Ux|xB8X+O%h zJELNtoFv^bt1+rv@d37tCWYMd*u=3o?rdoUHQ-O(R$?)0k8K6U?yEpJyRKBhuK9kl z%b1*}dcqy3aO^}~v#0D%?YDO01T*N`=aT9rZ`lW|ciHa62&#jhXMx@7QzM1K-wv&) zgWFtT{Y6t1FO+iEYHo1kdo`ENy8E%!an#D9-k9Py1FI)E`7Xkqy}N|YWWkb#fb=$q zV4;8EWO@Fwj!H3v z5VeRXMTUh8v;3g6c^a$^Ye=2wNf`0@#yspC3-SFSdCjfRdDu-OpncQXoEF|0OEjuj zxdIxlFApO^Q3U%Fv7{V@CBMV8ztw=t7KXpL>@J6`i4vK|*T*+90r9F2%M&Z#|0UBl%@6sDZM zo}RbsXa`s$q}PJ}+`|wdBsF!+ZoJ!=F=_F5g6N)JC57l|3fGc=uR88iXvl^RTD7(i zGqCf-U_qZxISKi>|84gj!czHxVP3YPQH7aKN0>q`f#mpt0l4=@I$ZH#oMzo!GJ4To z@$TYQTEJcWQ@mIMlfMibyi&Uek!oQB-XaZ@Z23A~LTA6kmm zOc1+@{_FdFT%n?=_BGVT;(8x*?bBsdr&u#w&9jdD%)DSM>hf7ac`)_UXLYs#NSo& zCY@JTj3h6sY0FJ2_HZgO<`b*};D1L25#%q4yv7n>n8;7qSAaL=Cln0oEgBg@tA8CD zecOWr%;v+tp7!6qMF^nISv!bs0LvU^%;zSq-YHMGCF*3S9x^KY(0$&NdI z+?LM5)$q&A_$_a1PJDy%K!+iQr(~Rdi;Uvq+-#e(|2TM3+2&XNJJDis>-cirRkFZ7 zPi-4Kv5Zg1XDajJNed?vITyoq%~oY-Q}UjwH@6k#1Y@puuG@>=R#PgDpoW?4J}3!8 ztUJBw!)ySy@`9NduF~2fWeoI72ERFuQ7hR|hK8xK4RhdYlCg9G4bg?JPoNZMTbpP$ z(`PoKv(~gTmOZ#y4G&*Y?&`}I(PlPZb8+)A2G<=$ita5{CP5PsWF4{|HQsL)fk9RvJi?N7S>jJMc$%*wYz_MEs zB5UaVoUg8S@m?&xIL7vP7~eV<;`~!yyi+~>1}#CzqKn@wHTu*z$oy$W#_}qG`Zcca zqWUap=e75oz%z_Hb}1`ERn6}7+e;CZCZQl&brR}dQ7e2w=gytSv{YOWg^G+Dgz)aWr1O-r(_9 zqHvire~{;cQq2OAXm-bTpjWCj&V`HsCh4nE}A7i0k5h5=6w+G)J|@}-*pQ-)#_yfZpZY@ z&UTl`qpDmLr8o6k!)s2k6bXIuik$RZep-~1<5}++rJepm%^UuKt*e>z;-dIScaPnl zo&iYUZ-SgI_uw`-SO-$8kn}iER%SM%fEk^hZwCkckXLH@tEM(pG}tU$ip_W=Ukx%x zo4ANDLiaQmoOC>`=iS$*vRh_K zaIfjs^cVhomI2FPqea+pegS9_*T&p_p1c!Xk(9|m3SGYA`eo3|hQHA7l~N1=n+@qz z_d;3q1%7RK=O(pF6zzFZ1*PBD6%^VKC;(o65Adg=Y%n5|lN;!DnC@`MA-S^iDm?O( zbZ|*7@Q_?LszoO6h)aRjQnccc1&U%3UP1@$ZQdNL3$>1{;w}2Mv<^7--wD?Y^%~9L zx#oYS5={&RHM;pCcX^8cdglndcIQEQ45v|a7A#<258A=T;?iAmCaHE4f7XG1Qk4G` z6gAhJawky0EO!z4$pDB$qy9pjeIC5*((NlgiKdlyv-S7YL3U_7=LNmJ)fD>;NG%|_ zX){WI<~m?Tz;*O81Cd*-B3X=aMg1E{i_sA{Y{pO)0L)&+rZ?`tT=wk0u0B}qoff3` zXxj6!Y4WczVI^Y9TER}m6nfh}-mBBTG5lg~;M;K(Log56C?`R4dMi*|03>i0D7LA1 z=`hYhj<=^X24*=jm%i;(e+eBv$Wg20bMAy^h)08-%s!0W$F60< zn;1BdUf`MBSrw&+EJg$I8#*rQ5~lf^ zgAHnHtFF9p?*pSKUiRg~*s#HQV(PA7Aaq-;8{?>Z9k1L{WuxC!|B4+)i{ctu(&ZX3 zLq?XhVz?CtnT=n|2AKGUvHEtE45L9+-(XJcmC;?2?#x@e8X9o`OLX7K%blIbVF^Fh zca1$Tkw<%I9$Z#bGh_}`la!^urr|j%C`4z7@)OLB&jS4D*}*C=vKcn)3XocR- z3VmPBr}iWciYzFm(bn=F%cA^o`*}amFk;`xpT|h`K8ZUbe)$7U)0oc+oh!u}W#+g~ zx5g!xei#VP63-rJK|wX@81P_z=_Rx;YLoWs5d9G1g2z@lM{G$*MnuC_R=mK}i2o=> zkqQxif`TyxE7JlSn4jW#?Sd{V^*A!)89V$pWw6u|3niv+udv#nvfn8oOe_Oxb3hVe zWx4OI-s^SfjnWRzEjU$9WyKBaLltej#PYLZ{_U6S0kuorU%hg-5We^LlzIuq#{bUg zKahnZOCy5#-nH#|tyvK1aJgGSHT}&}Lc+qx6w{p59h&=|FKlC@)wpHU?fcKlZjWu+ ztS!fhzKZu$#O)$_9mrObn$T`Y4N`aJUzts4b&9sJ|-PTw@&REewq*Q5xJ=h%eUvM<7PhJ zK8k&*2$gl|ZuI}jO+Js$h!ZtywWN|{K;Z9nVp}-m4W`OUQD*EfIk`T0$dc!Zv8-B0 z&EYuV4-C3frm`f}8AFHcJzv>8R+4ls{CN1~Go!+kVy0#Fg5@fVa{#R1JVU!IwU(k- zP0PGw4W+DMAg&slNI)m4#qwdVj^1DJ_j0)Uv-f1)tY^%`eVzZO20s)#flzIIjvO`| z-X@}F_6s~<$K$h@P^^VEJl?q|lfL3+bKl%KznG@X=-WR{)qr7{c}aRn!s2X|A+g!N zr4H%+ed+(j%Wy!AX#u`c#moMCl35aXRG;hRIa2hpW%zm%P;RI z;9=4sX`C6bq_Y0f4-hoOwa3DqG7sUPo#KQ=F=>fq^>;FK)Z_LQ9PjtbKMl0oQf~HS zgb$4KnxTB6H+47<8q}gh9rQlUhyD73KIUXsq`#i|vm! z-e;nme?R|Gn8tp6@dVXv;x^gQ$ln2Mh$TcJv~^6s^HPH~Y~!j(+f)8N&`k)umE!n} zk{V*xbG8|3ki!5Y)|-62>Q|HnS(+e!-toFzkYL4ce!C7GFwM{)Kjm~YdK}1D{u=m; zG~$OXCnUx7HHjs&RLUrzLEvmYijfW1r@_$oY-;5$JimobZ{l`9aKh6JTAGtcHh@b}1kDnHc3u2MebI;nU5K0D8X`Qyqe6|2+0!1PUj zT<+VNuRc&?)}d_6SaDs|&BWL<1a+sN>5qKo@*DKhKNWsIe8KTw2hj}C_He%S3#13b zzC8=>w4$9;cGfX=riZ&FXS9MW;GTa!sKpDV=eO$L9HG2rwQ<9@oAlrZm zu#DK^hjG$M^yRW8Ey%~Un-_T&&ffNs8`+^tkT`krBVe8A$?z;(Xa6Cr3htwm6BZ3J zh1MHzkE~M6d2FGy0@+2@A&Z#uPm-+A=S#m~NG`EEA1Y+8-sy;MH{euC;c>~yy`nYo z2jC+dknA%d{?(;I`-=syZ#~h{R?&1$F_(pI!Du7iP6sMTR~KfIjA_W0V@P3P!>HBF zEX_BCFcMNOmL5Ihl9!d{@nal68oKN$?ZYXyA=LN8_Z6Z2S4Yy2q*Q;KL-I*$*hh+s zH!-d`7k*wP;h{v!AnOMz7z$7!?MZp7viV#4BVwG`{V$~?gp4L;l?cd03n11(r6_pe zPhSk8w~MG9hb8kH{m*yUE?!P*-3=vC1sTntKg~_R zr5X0(tvdlKAcbhJ5ta9^kln56*roG=cVI%ir8ZPPPMtc<+!ILS`?bDr4GX5zHeD}{ zEATty`W3p_9`f|qM@K9u<$TO5@V?o*DZ%v2%xhL;Ha{$fb9!T2d7xf}R2FFg70M!? zWn824UqiGmOYP06MTknv#3H_{_6oWa@BU~>ycBYly?XBuN^?g`?IIX(gGLmU|A~jt z{sN9D#{M})bQ_B8eAVN*0)KdQK~tz+)@~O+L!>1HO_Y{e?-cm(>`s|^ReB#Nc=1Ql za!PK)-n?Nx4BfmpxEah=OOuyBqW+}_;UP$%^MsMew$dQ`s8b9Yby}9=c0pi^0!3r>divI+I9?l_gZ8#P$S>EeEP(9(&D-!{^=(Z+J0?0_Ng(EN8z>h zy@BDhEjW@uX|}O@n$93!dgElF!R1@KpLCiS(7iK;Kvir@r%6arYExJvIj>%M;+{Bg z$JBQMjiC&7iNlvLiX-&rZ+M9mIv!8S)4`}PynywOst6>reQDPIoIg+}i=aFltfWDq znk)6(5JHXYrij=*qB`UI)xD`1eIZLPwWM&&>&+qEEzfZFs_<2bAcHLcR~N+bEfuj( z1Otsx@a-Pu(z3I7gpp_7w1e0$kzU*QoWG@^znLP!#yMQNA4PT3J3^;!_}zBaoV6A` zNavfW-q;lvjbMlo=iVztKaI%%3U8i z(2>1KH-{T;K~l`*0`BoOv{yJ-ArPFBJWuN>owL&**WJDRyPfLbz!e1Qf=K9vJXO4y z79Uo$A2lcBnSPj~F_CT=aj92hCE$c}3CFp*)-n#&U@RWA^QP~%vsp`RHW-!kCCIDW zUL>90@o8@3vq8ewe?-I9j?y0WXTMUU->8(~kmsad#(UGsy6t@eh6L4cUIi-+4#aPSPGb64is&qqh&)q-Qh&ChZ)x@j7-I8*8M2;MYh&mQGcFL^|`nxyC-%>X3BZddW|Kj)@yl}~!-klun=PVr59MnqV9zbbd zvxR;W0p6gsYX?QpqySEZQ|>>rko}eV2zUr#W+vY+T|Hm4;=DDkX#Q!IhZ@SX^%S#b z>9$9v^nmHc4!C;{!|8T+jM6>kMIjZmr;HyfdDrP(D}?1w_G-Nv-;mM82Pw=*(RH0R z2gLL(u2A7?5=w051aX-_v)-y?5-Q~%dyX0J zdMFEMG@2Jg z&Af6d>$3Gr5sG2SSlRYLTRX!(YOQmE6@mz#6I8Gt5JR&zrakedf$e`Thm+yTMC{6_ zMk=+@OipRE^71*GF-;x;T8OLHYxKdHDqZisQ~xYLh2x*nsRo!mD1x8Zi74PKi$bny z9XdnrmTg-l$II_+jqenZTB$ot9#b{HazpiHYme;hAI^imT89nxlD{#ruH2>-D!2AleGfEz2`w87VCyl)k!6WUjMN zuB7`pgeemfbF#KtAH@7&7NuG*7SvFKxeOdX$NR1YC)UFflzxN2wDU_VH3@mnYhfZ| zmt%Khan9?m(lE^gO0B=-$04!f9c>)$ow@tsBFfHuVGhN|=s7hyvH@ z18lLH(?zEkrHZ*K-txXF`$RB=Y@Va0^JL+1HcLx7Khs*bei6Hu0>i8vOJZU-)RRRQ5B8X?UCVY6R>anuLJ@3k{`eS?)TOoN_+C(Z019QBezS#8=CFtJh zkl=&_HXV*PJSrv8j*OemO62F@tw^`-%%gW1Uyl_UVGF7+=8Dn}#sl-w2m}~p5VY~o?EIToQDTMb;ffWIp|2umYqXn{K^02d zP%`aqhf2j8!%i}~PKmUu_W;6f_u%PbN%{`a#Fb}b-#qe;pIXoV-RS1F|3y%bL#|nX zjuP{EadCch!z0z5`FL1o&_tJ(7WdgBC%$Ma?F-vl4Cp6AL%Bx}#wywMc80GX=LGac z5;AkZ{C)lpClPgRugnaxmIKE>?*F8O8exYl^_>NMyud9SRY~zNt)fl&g#*}Q_t@yl z6R8!TSYc=Fxp5tp_N<|1GTB07d+Fu2a&VO}91w7ldCUhrYCTzR+=EqJoaTJkN|Gw0 z1s@OxzDEjfjXqk5QZmQ~b&V8J;izsE);sDqGu@PB^A~ z67Mttzf9)*sKvfUh$4VF&px;aHV=jBP|;BL<>$~L@mmRbbB3J9uMikvZq>X&5|TJ^ad@7 z`H#C3n|51b;ySJ`Ue5Fr9^M&w1Okabh_Lb;yvAjDd>p9sk90% zSwp5(^u7shl+0;;d>G!OVc9=n5S~v{?ZAf%T#5FZquWH(Qd#;>(+v|rSJ2&k-$u@x zjWcFOA4kkxYqyNAGFOFvTjDnEMu&bc19imia%pPv*{ulPl7%Xys?NBX!{^|<*H*0~ zleoI0FIAT^rJveUDzzY!7@Gc-Jx5)YXmztV-Xjvuc?@JtsXMi+n`hE176&#lfep3qSqwfVhWMhP7y5~$0|6NA0ICHq%iO^;Kf(IS9C;c z$nFbQ$q$bT<9;^+30qByI**F|R#95FmwU}dOtbsm?KJ;FoYcOX{f;+ZJ$k(n(%zgR zo@6gZg_pWTHeJ|WP-q8kr!#6RzZ<2q%ZtsPpfW6AFUjGw)fR70zy<&eOA(v<2%Itb znTxva%e&vkuW|`+W(@+Gd-h(f`pH zS6M&1(RbENyC*$Dm6=~zp*kx^+&zPEI&$Zp(4duMhfE?Dox_({A_o$f)WTyHF1rKg z@CCnq>GiPI;_=DE&uxTwIRh$9m=74>XvM?0q9A1C564Vlxyoe7{(Tr}5 zbA?)Ex3fRm=rT z4O@V!)XnbkASXSCH@o08j5E@PH*G^+?qepwT!YIlimr zoXPd4$viCsSiP^I>onX$O6!;#3YZzay{lZ(!sKU}Mnqn|OP`1AmmJFvO|grqi5F zki-*qO`({~R}qe#Pr7b`q9Q6xKp|pwC3b-_)y(x|x82KVGB-+N(_DlR_u~feviKB} z=lhVVt^aelq!=N=iJ0@_HvywQ8fNw~cCPU9dAazJfcfv{4wA?{*0FDWS#K=uve_dc zM?hgiCxmlPV9cWS*`I^lC=dxwX0WH>NJa~N;D-xXYyubDYCI>rIRn9Rwu8|jq2(Jk zL6s+W;>_TsC#`SOn`Dw>F`WWHl-NSE{?@KbMYfo*JAQHBsXwVL;3FZ}zfOExA zcCeS^&c_f1^H22l8|L0KNLj<)iZR-m;$@Jg2a7f|p8{lxyg=>{_ow{uWZ5{=SzcJV z3<6x1i0R;as}G6wQ_t~WzTkJlQqNWLhHl7yTI{UZ9-1;up@1O`@^3)M4W)QWi z{NdK3H3X(!IN^?Bs`J)x;8U5iFaWytZ>XHe-bOD&6TW`oM#(3YST)y!AovFKbl+`y z6JHlsM<5?z;4%!0_CS)~T3iMi7#hIS2Eyi?zsI0(QawY{#LAg8?a%#wDJ#;1)iA$a zr99vFULD*}+Wk1kl4pUW4(Jqd%l^vsCn2krV4d&XWhVUh_nA9#R5m|X%ZoQn&^PoI zl!ueBC$gfh9W_jvCO^*-EWk$$Tl!0GOm0JyFeoW_Var#56=SrPCjdBtdIA#E1W82- zHn*~LN=KPbADGs;0*Tz@dm|qGrH}Mtec{N>(W(Pvl%O$^z(i(4VEgu2;_7V=@2={x z-JLF5NdNp|AXoMD5kHbmCXT#tD%`5pf*+cypUEf`OceGnAC!3?nbU3DLGv{GycB0$ zO;q?d{KrUcjZZgLb(Gc>`fsQO$ko`3>XY+X;R+`m#!XhE6_%?Y?#|rMAlnCwELpk6 zHl5RNcAYb)^?`FP$NrOJ4`dyQMyJr@8CND!iZ2zoH^A@vDuGN)b?D6eNdOsr zu!LibxA@R5g|A6hYouNE5iYS%e(YYcFSdWS+LTFZn;Mg;cyagPt4>eX!zAtu*k)%y zSb=n}t*gPR8!y%o-QdbY5@BE-sATNU8UCmB`?%sT{U7KZ7jItR556qjCqrHI3M0A3 z8-`eT(7A8-UL5_op_gyz`+g+!i}!(src78_(~OsNt(@ILV&K)!w)gdOmyVP|;0Du> z4kG!(=)u2)s;~N>ojND$o0XWZrh4l6EvtLm#7CHSPVr6btqe4_w=+=A)#UP4P5T!{*B=DGvG;2FdvmZNVs)n z!V!Lw@}?|Y&FOEl518sU3G={!Tp?dLy|1+@@F|GqZRkX`sBi{COCF*qyalOP3?dT? zP8j>TVf{|Afbh>l1<=o(Z_e-cTid-SGrL&w*EVdLMk(KgN&~<@hT$PzkKKSXGi!7k5%I7lxdwyoI7{_51s7s}96XEWGbuRD1$pjA_Qo)PCJ6G-K(Uh(ng8OSik| zfQQwz@{2-`MFDX0TN?~(_7=3@{+HYgRIW)5t`Rmy#O|@CGd;KE7Xj>i zg`K33daHhpYHgpMl^4B_oTUCXjPG#g7rS#$0_T2z@D7+~U&q!W%DS4Hn>mLy9luy% z;KJ;5?SDz5kD*tRe^umir1HC97F(hLlrGMg3yYSh2Cm8>xH*;BpbY85Cwq|ualS{9 zh7Dv3i0B+mgDjmK4FTi7uPp)Kq{j=vd`6O8Dvbf5io-+2pl|BVn}Z-=XX$vfYv*KCHd}xb+B}x; z5qpfBcZCzjUt=(TMKRGwnrEroCc-==$v-L^Tj2d}vV#<_V;R$jggjlFlPhHqul1zC zT|Q-Zf#R5GkM1_0Uk59DZ$2-G&5B0eNi}g4LGfK$V+;qv=!*#-jf<-eL2 zN&K0^)#2|JnI1*o9@x5C&ZNJb)eu8P_x9gYbi)wmaLJ z-<^;FOZ==&G2R6P`AbvLC0O^9+cJEdw{Bc4~;#^3z5U)UXuzQ8hU6u3~<*zUPI zxSYX+z}DYFhPFh%M$I2q>Zx~pJ=%TIdT0aVD|4`?zr?B zoziDE3<0afRotjydrY&{6oy|$`=wEx$40)j2cJ}gN_8zbxvh#G1LSR3iRwxvM%=VNDr4LQ)c9HHxGi}ltDh0OK#1X^IN!UPApapb-Ik(8TJkctC; zEi8I3VPoe2F59^>uhCqJ8~4-Ix)vxd3;dF1Cq~ zc|PzWd^*!FC8V*|0M^dZ;@84H44Z3v(QrDJAcQBztJ$AEZZ))tn z$&LyC%>V084!a)2$>=bZ$MK^-WkW|+ALNSelg6Xjw?+oKJEF2gPWvr=lrI&{IA&zV zfBj<1;(*`JzC=FuF|+1lnZtxh%KRwrjKe(fKzD!RDOk`kR2Lr>UgUHd{f-`WE~G`K zZh6V z#2>OJ55no|?soDSunVgX{nix?K$nXlW5u1{Fn5Jy(j%`&zYvAYQ>3`uTQ~R=vAA*9 z%Pz6lt)e59jX%>16z5iac7p{|WtJ{h+NSZ9N~+%hC+6X}AKSYA zqI@pk`>ZfE7b-MK{452qonN|KgbUv+J)HdER5yTDQh|~hh>@7G*kUn;K13cL%lZpR z7Ou6^s0rmJ-2Q{HZA);hp)Ad?2amY8X$|v{BWpemM-%c2&o*ki_}CG8e5>}!biR@L zuqUgssvBD{qcQ7MbbDuK73LDLwbN3Ve5R#7%2JYqW%u(1s&Vrl%@0!ndQ-aqFxcw0 zHc9skRvvenuIlb4PZ1t+Y3_44z5I}^g*Y*tY#Q6T7IVyB!1ff z;WD(zT5EoeO_gwgP0zX;>41)?BY$;LHXls_O>-?{he2Ohj}*=~>ys&}Z|4d>i1WYv zsI!qI0xqUe$c_Zt7;j>1Eg}7=c_KFLZ0twwE?)wiew0vRDXn4qRE=!zZRm{@Z7O8X z-|l-PQidv~C7e6N=*QomSnV68Pw5=EJ#8S&*_)Sqk5j zPoU7I0$A+Swq{KEtt0HW+$MI$+xCxygm7OMvlc+pW><#ZIO^{Edi9MBA%xqH)_H@5 zy*~WfrYirl^%~~sdj)^x!^AhDww~ICw&z<&^@gZ9QJp|Ba{n~Y=j?R%)+sv|!+s6{ zQj?Z}xXu~4@-CxM9^>O?{#wu!(G&387yl>nx?BfwgwyXA3sUt8rzp_J?X#i3Hr0?{ z89#Z|_5SwzZ>=n8Dou;RQ2@gh;QcOZFqN{qx|+KU2^f=b)P8;~7bW@?oAg6@P-OCG$g=I-sqblLX(^eKRO~cJxbwCBpu-~^H zTXD9vo3sf;pJ(D1#i|tY-^5^+gV3YT;G-EbrI}#K>G1@eoF^e8+&wg|P24>^E+f1> zGOo>#%E~F}0+fOt6sBuq}17ZKi2Zmk{SeX z8?@Ybjl>^owWbLUF{bl0I*5Ft`CWv@hsFagk*8;0{JPxzn|gP7Ds&3s3$v7l zV1doMX9u)Mo=2ft{C_>(zdjHUs3k~?5P*Vh7^Hno@N$P~n*R1$@RB?oIs{F9us-NG zH*Uw@v$~RfJN3sfYlgbb=Jxa3iE@oNX)fg9NdPpVjXjB@D3Ab@e@p{-!1>b~`673t zQUt??W*@@_?!{wi0J#`p?x8-lxCi|H6H}%El-YrI&+|KCy}Yyve?VUeR`-J$rF0*> zOJS&c#ol}zAXo8D>)!H}_gF)X(+}9sCqn#Q2r&05S@M?$SUvV}KF?PjbaLx860&V8HkEGlm2HV!EMyknj#Qr)9E*41Yr)(9|F(ojf}3)+i8ZB{g1f}sbfmFVrwJm2JcmhS+f!*tl4R~QAtNh9<(J{dKxOOB1Ns4ZNj{r-SQw9jvmk<__EYdA(( zW!At{8aoz>puB~lrkT{H%*!jOMO68?Kg&)FsSxs` zX8Fk9@3-UJ%GGf%=?!mN(>p673)aG4)}Y1jlE88_?@k@~2CDD- z{%so#cb+OC^s&Bcr6Rkth~Z)!zMffxoMVH%VNIv-tAt5;Ga+ZIPS9*>g0@_}>tdiJ zdZob~bCvUKL|@sjHZlnXQcPchUc?hbL9kD_xh6s-b`y&zE+5>`5tX(v>-&%=TURTS zR`}oT(XKRX3OT)kXJt@MIM<#O);h2P+}_fZcXkiO$CLd6E+e{RyhgE;X4k8RTv<+C z2c79mHnb#2i;4^pDLndGYMYD%`u5$cS05Rjyp;kv&2-;0AE5GcfCJ{P{(=~KZCV&} zbW%=CIhuQK3C+gnq>7S@VP8ewG#i%Y)FD*w{{HQ#7mn`exVZ9Imyw~sZ`TiOx1p`9 zzc$Ppqrb+ELx@Ni!NwM*_aj!k#yaZ&zpl%V5tMI^d;X?gBbKpgZ$v&N`#9XSKX`Uy za|n=P{zxWSO*k7p=H9oK`Z(cr6GjMlt4@wmKDgiico{%QlBySdaFz*azeO|>bG8`1 zMN?4Su;}<;eJ4iv4^y+PXjdfL6gUENVj z*ipy8QP03pUwmAU93tfxQ&>%g`fNx5U~zoFFH6^%;LO#mTlwY!hN}7*9)TK>5DpIR z4O|9dvr;t)xYCUb4z9`o4(>f1>FatpIQZ9667o;Y$;IJhj+%Wvfd3h#Eu*fXprXv~;p6cy6x)AB;lHX<6%N5he#QJ#sQ#NOe)9i>GIz25 z7lOy*e^B%96Za3oZ|T83Y!(hKs{{G}Azo>4ppcaRh_EoT`rjg0OM_GVUYkt$|5SiN zoX-D~3KuuK|1BaujfqPZ69KNP4*`zmzfRP@RfvK91H$+tySt6c{}w{QkvSfZ2nTom zYL5SFjekQn9}xb7;oWU~{u6=*?!T;|(X8~3j%AF0>!5<8|EKlWiT1VhL;o|v1^r)} x{nP7Tdq2MO;KJW|cwzRqcf?Xac%t^$e=Su%BcuG&;G5T(?^VeX+CM$({{RGfW+VUr literal 0 HcmV?d00001 diff --git a/pkg/shp/test_files/multipatch.shp b/pkg/shp/test_files/multipatch.shp new file mode 100644 index 0000000000000000000000000000000000000000..0e6c58de2486b4375080c12780de78185dd44ee6 GIT binary patch literal 1192 zcmdsxD-M7_3`EOUU@#aA7!t0*{WXW+7?i*>lWi7*VUzMQ(`l*M+F9}y`%#(A$=+oy z+uee<3ciI|IlbA{`hw|M(m1c olT(ck=||L@YMrC6)SPO~t0u30PBRCiJbn0ej{oEs-SL0_1RrP=&;S4c literal 0 HcmV?d00001 diff --git a/pkg/shp/test_files/multipoint.shp b/pkg/shp/test_files/multipoint.shp new file mode 100644 index 0000000000000000000000000000000000000000..ee30de9b4069f64d171ee591a836058a233d8ee9 GIT binary patch literal 196 ycmZQzQ0HR64w7ClGca&q$cZ>WgjCRI428%7j6jYKF=hd+gaUN)VfMmkggO8q4h72q literal 0 HcmV?d00001 diff --git a/pkg/shp/test_files/multipointm.shp b/pkg/shp/test_files/multipointm.shp new file mode 100644 index 0000000000000000000000000000000000000000..232f560b2049f2839960626786e27cbdc45f4acd GIT binary patch literal 236 zcmZQzQ0HR64$59IGcd?t$cZ>WgjCRI428%7j6jY9F=hd+gaUN)VfMmkbakE%5RphU K8Y+Gu$N>OwDG9Lv literal 0 HcmV?d00001 diff --git a/pkg/shp/test_files/multipointz.shp b/pkg/shp/test_files/multipointz.shp new file mode 100644 index 0000000000000000000000000000000000000000..e696e68c7933279fbbb15bf1a77c8b7b6a0fdadd GIT binary patch literal 276 zcmZQzQ0HR64!T}2GcX8Y$cZ>WgjCRIPbfbUN<&mZNk$+YLX25JE1>}0e3-p38eJW_ Y{V;bx#Sa8IKy-YlhtmHc;5`Nn0B)lt{Qv*} literal 0 HcmV?d00001 diff --git a/pkg/shp/test_files/ne_110m_admin_0_countries.zip b/pkg/shp/test_files/ne_110m_admin_0_countries.zip new file mode 100644 index 0000000000000000000000000000000000000000..09d3194f00026244a5135fd083d57c228514daab GIT binary patch literal 196764 zcmagFV{m3s6D}ItPTttIC$??dPA0Z(dor;-G2fUI+qR86-*?W%Ik)QGAFFErSoQ4M z-K+cQUe9hNS#Ssp5D*X;kPPq^xoYgIN*7!ZkSaY85DE}{5C?NZ78YiELt`_0YX?JS zLsQ3}4sI^i=B|t?;=*F`;*3^q_IB!Oupr=!Kf!eSEx>etdcuQ%LOg+jfczJBReyFWT>4zb}TLVhPjxg%Dh5cUcAaO|uJ^uu}y}yg19XN|^1^9j*c@|iOn}4=8IBG+e zK_B!62z(wQHcWByOj(f*@EyR$(VyScNHi1A%;%fOjhMQ28Q)F*Q4rM1fSDK`JN%ur zY>{nnW&KHY?t`{(j!!Y54L0I`yBC&Sn-ode^Sc4~;u*~5%=nv7noR(Xj5F_Yq=M<# zARcX^l_ELwFx9BH|BHG6UUL0ddOCT7JFi%dtk&DLIamJ(%VeFxL5Qv2<#_)zso^)O ztnuU^pUJ1D0X}81e7@lN>BalpTB?2Fk?Vr{|;ZJXd+K-d`1nAU+6 z>5ldl77r|FlSFWA9KO96)6HUN0Nz*-qa?iP8Qb|>;^3Pru~ljl{V$%X;W_p=kqlCg z8o5X!rX+K;&fimi#RPJ56Um0V;8^a&Q|%NNq{6}ZvsLF08#sdd(@liqg4yLhNPfb$;jJ(R+JE4h0p@9%38 zg&p66BT+CJV=EAq34=4Cxp{ubF2!9+ca%l8ZF&@j!Q75(DqUjl5;IywO{4v%ka;5+j(#T#lRZxAGTuZo;W8k35IRINF{&)(Zz}52Cw4;R%Z27H%rZkdLoP`2 zX;jW90*Ojrf?H0FYt9G7o1AHJOzFjOQ254{vl~VVtxqc?0XsU2LkuE#7hA>$=bD{p z(PtViq&u4R__ox>z6)FK*WAeJkVdS`h#`8UbnvxNHASNyJaSO8vUtgzx}qhwuuEVSsgbB zfsz&CJZ|ETm0yqlM*}57&svmrWrTjto1&qhW+t6^RQV9l`UAyGQxa_Y}?e>`*UC||SRpV-F(x05Z%u<`^osbT_m&(9$2Z6|5Qhhnjz%_%~KXC_L8R zia+P`BH2D>XT$8Xo7f<7Nv%j&*tub@ZK=RrR_Z!}k9MAdW$V;zslI#D^4rn!a~+nd z)hyN87G3}Duw~Q~@Qc)%@Y;JkNPTnc&*tJlKyAS?;^>D63Rp)T7`5%Fu?2F^`Gx8g z(*R8e+Erptyj0Wsu0ke_LQ?b64FVe=oj3YR&v3KLf&(wvDy;Ou`=PJ>UqQs=a?3co z%wO7&DKSK;EGt!Ns^0oIr>jO;F&SnGma`MH+ry*N+lOgL40}00z4Tno*n_5jN%LRd zfVDkCZJ^3(JvDkY)l`5cnO}TB#nP-8ZY~!D2hLNN{@qCbB(`8Cov8qf3d_e)nR~!H z&pb<=_34L{ArYTgW*d+Z#uDC*M$+VPW0zmotb-BS9Wtv!}()mqxNhr{=tr^{r!M6KqJacL=t5HdXD0l5oPCe3#3I5fN31 z*jC%IPegFk2WWBYJHq{NXC(atg4I9sp^rc3@^VN4r2}6SEkpvv^D*)-rz1$; zGFCXfGS@3Cok#J1Hyb(;Tf*XO-~r~`>q@(Jrg*%0P$XhuBx0dqfkIIG!v$sv!Un=J ze4w?k2-X3|Y6s+Ta62sRC&AcTq)@M!Qa{6mqA1*OgduDbY0t8D5X1!|1B>TX?!!tN zu|hO(=rOA|u_xHeXtB>kjthsg6dv1tV*1-q7nQO10`i4X7GL(vI(N%1VnV)q{A()c zN4?PjQ-b)9w~yy#Obl!cVzUs^$vR|xFk{oB4FOUsnSswnQ%j6Lfx((P34%xUfUl#PiTJ$IP?^T>QUQRxkH1{X1Qs!<<{x;tqiXZjiaQJ(_inyRlOeZJI|ar zvmA%ad`z29m7fw>V4ig(qTMG%``?x3A~U*|MuEz>>y?EU0*_DMvK zuSAQ-8)20&j#^o7)2C+M% zV&1Wgqt>2n1TMZ@NPqepbA`p;6l+tackkPWv=cjt)4YD)uOLBU%h^d#O6Pp4B9A4)%+E$O^Xx?13b)fE8q`q z#_#sj0nPdnzOQPFugf{kqGiJc|nM5k-*)2XqfOt*-gZNuN72mFo zrRK;#uU#+iO2z6*C4O+xi&B|&edTEze8 zgl&n;?^hLJf(hGYtiJJ;A+&CdBzNK&*fiItK;cuG< z6Y@skgv%1mZn^_Y5&oLODyo@ey>g?YMCK>QYt<9>)1meN6i<tz(amv5I^9F zp8lZ-389^K@|$t;W9>K8+-Urho)XuP1i@9O%mx~a(yvd~MZsE!wm z8O4POigUrlJ|P&(`yTFUV+dQbg%4JkAaVA>QlPDyvcz!$=er~d02R{`ov(hf9AuT8 zMIJU;+DhSq5MrCx*X1Nix$$DLLwxkP!vnvIV?ZInXnHuQRiDsWN3LSpF;BK@M{o?!de+yJ5F!5JP^A^ zPFP$zV?6h4P@E-pkf*UK9W8XAl2T^654yJ6TU`_Bj4xSP9^!O`xCAx?L4uc0Zob|r z``#^s`rA&-F7>&!(Y~2~{NJbJC>v)0KHF`=9f4eK$BxvoqlwF>G&PWONYr6 zN=9m--ejP16-0z&*41pgpZq41W%^apk398zozjYlM)Ij0_ ztR6otq000u(fS@_<5pPg!@{n2lyD1k`=}ota}n1NG6- zN)d-m z(pyQWw{mYrJ-3O*Uw!dIse$-A2ItIx^03HF_r=WUQuSO@0I*kH==vTe5lPA0Xr=)T z<2y}RnasItp!_Vb&jL;1Ji~G5ajcJ$BoL`)8M8i~Hnvg$LC!!EPfGfMeH15-VC;p1 zO5bf7Yu0rlmjHj`vLMuo#A$Z+B5AWgD7{>ela(q#O5yXi^3Q}0L2`}U>(YCh!bQ+2agZ#@^3#WIwe)TOrejZ+_- z3r*T!Whyw>Dkvv{S)AK!t@ERyaU}yqO?3phFYtF{LRqJt^p6mng)29Yj#*G+HtT!{ zc5_cqEh>S1=(?X^w-=|ke^Hf7cGD$1NnPnB;U$at_SxWJ76*#?1Z|VrHRrl|zup~< zU_u4V#(ois$$K(^5z&^vd{8NKH~nD46$cl4f&kV}lpy*v`_FG}v3nRK2J|>2RSvgW zw@z_F7%_v)Oxft%XJD*@5*qvczHxB7jQcih{INf7s2hBIQayId9a@Yr*F|C=`D-AOVN<8-ede5>PRWP};~}o%2Q4lAcIB4jW_EfSyb?h; zHi$9eoqjzx`&pdU=P)sjV)7B@bZ|EYoP_z-aEE9p&bo9q%*>LYK?1 zFRVlw5LQNm4Q>Y4`))tQ1LFD=R$qiWICR0h{329erE76%?$%~@e@}%kV5DV+sBc+8 zkz{j6qq?gI&=8gnQg^KDl$Ck0MW+sRoUDuoHn zS~^qyB%uS|lVCXV6}l)-1_eM$eLEWIfC*6U2byNp7NXj5=!C{f+SG}|)HM17)%z+P zp0f_RyEGIeV#WL5)Bp-5w85m1i=#?$^I)rBNles===QdS&i?)(T;Fm+Ofcl6gOSFB-J?Xz>0C!2I&0Sj{f#$vtb*3|$c-v#hL^_ayz&&^ z2RPp146k%z@y`&F_h0B?_g5v)vVyYjiyCStnvpJ!dU==KdhTAlE&sizc4K(yA-h%Lqi?h$V_r0W~67K1ZNjN!s59(38E+|dx0p^(3YQcHNmrhr3j1+ zzSk7tn0N(f@p8J6SLt4mJ~F>Gm982j;qh?QH%vaip{xX3wcYUr8Ts|b!poG(kfek8 zhB)U*vRqLE*Ax#|4YnrQx^>}{A9)uvG}V96_AHzaVug?1*{sW+Da~}2C~>{B5?xHu zoM237PB9!M4hd*=J2QeVutr%Q)vL?mx2`p@DSEa2$%T|%s!6x^IB>@E*PiSHR#@f` zU@hYjI|#vbCvD(d&^LVlPmw;)msj&8>(1yz1%;H$78dzC!jjy+G;wqtH_l#(E`#7kS3x$X-V%+JB@cm@fw*bLJxU9UO%ivp{?T z-2U)ZQaISzrGqXXw7nBCj+C+UQoV!`(paO+c0xuj2dgU_u??(3 zO(}E`GKS|KeaS8ORVSwENBB${}k%DD;A$_-hQ5F%5QB6nf2#b=NYJQjW4Y46qM$ z+2d@`r&A~_>k-|qSDf2SGot|ogJeH-0y${$&`e(5{<2aPK#pLx^i3zPKk#TBrqDMx zoSBBHw&Ue?nimoNHqo3wkW#$)^Y|waZ$l1H(}QQwRJdk)6bKpvaYMlki9ey{nH%XG z$_nUxYbxP3(iI%H^NtKXW0E*HU!+LF9oa#{+ioj%z-tMIO>MAo>K1t#gc-Ez)FKCp z!xi-t_QlmWQ*R&td;tX?rHV@RWRVd8$@iRwJH3&hCjcR8@>*EscXW^6X1*u(g+?gc zq?$ozsPIQ=LKflea_x3icBw!z+1N``S#ve*e-`?I`L{?m)&+0@!C0fI)AwT=gL01t zDuQNo;ugV~8}g4)t`dE-u5tqb#Bw&$mvwt8(KqrtaKeP%D>W@pxD5h|Y(%Dksk_xI z!2i=CIuNzr8?qpjCqo;-6O5D97y3lM?}ZIXLmm2T4>YIRYvN?^VcLh$oQ6Bx^b9>s zd{0?$>sKGMHqARJnR!W~k0gi(pD+W#g;o$2O4rQ&sb1(dMS2uv<;9or%gqZv-G)3E zs(qKJF$^~Cu31P*lBqO-B3r$Z$nxu#tQLk5mvk|bb1+vhBSK&<@{FT;6V8EcGYg*x z5eftT2p^p#6S{gvl?fE6D2qqi(*xwhGKg+dy?Waah#{?uiwwd(eNLsyVF?LBI+F%- zm2)*-dg+7Hz8{hR;5}9O6I<7&@)`SLv z+$D3Cu_{49l|{SOBd^6|nYU;XVhl6UCY0-q061!zP3z)LJXctfZn_LROxnQG{UFRl z$uFZk6rmr)aML;(?C#`u);q2?^`{Y$ee4vZPaWNuzip?+rbi&-nh4|G7ak}XKz77S z;QqzIWKLt>g!1iKS48IDdVf9XV;P45UNxln#eFi`nVD;{fUg7KO+OAq1O4sc4JX5k z?+*rKaj_jt?o$FTU4uV<@eDvQGOC(z0OPZYaSs#qaRZ$ z)sK&rH@VzeVN7&O)v}l)U(FDyR&qVp=IYh>96m;q3Jk7>HY&7#UnA9zkw0)bOir!d zV81Ez@_WGtvZMmS4Kxj!>hJ#+gSrc6VUA7D1#L4uv*$)G)J>6ME$@^v5^!}D z!gu^U8ah%wOF52awcxs=!9EQ9`0)}Kci=M z$0(GqWrd1!;KUA#{@T_Snh%emow12KyXBl}YLmo!{-Hz1o1lcoAD`yxuvW?_*pr=e z)>rV=?dI>{bNX?`d0AWLChH1-S7rMvP(7YUS+uS&0);VkIs-q13@A*IL|69<}5LZ!^R#aeg^K|>K zpu_M_(CPYr3p(tKER4*EaR2vP(EiCrN_;^`kpJHBpGENhDI1wOS^gKD-3?5aOa`tORuh>e?dU9J0Smmu+2;?{x9tR;Qw#f3;LU?fHC)D|KjBF z8uGLVUkyUuw1q7nRcv#K?KM7Fl6tH}kRZQhbs8IlCSm}&p!hXX|p$ z(;5!w7Zk!uU_T%AJ8vY{Z7D-dTPOS%kxPfA1ei7ZJX$a<(Jsl@B6V;lUJH%GXaUkeu8PnjY#s( zUd=sGio?6lPL($UMV|6T`U!6|imFU5DSKzkR_S&L~VlwfE;b)iDw`p0o{t}aK&y>4k#Ng?ZyLZ|v zw;zXJ+2H!0Y40P39rCysWVyhlc#xJExby_zk?^wUb3 zTmQDMe8$e6Qi-&>JO8K;#$y)D9$oR?`R?9t>gJg!-geLNx6Hbw@7x>iPQAwix(3Bv zVv0NISJ}JUALM0%`h6L1*NhpO=Dwi*WmwxaA0K94QM~2?>;*^}>!<14+XGL3uk75s zHn_da1s<-8B7xsj=6<6F?S9dpY*$;U+e{ljt8RzB(xj;Gez_1$Xwxiy<8%?!xNrE; z+#5e!@}Ax{AG`?C9ihi6M>~Fh3jD|FS9ZMxfpb{AiTr0og=|GxvF z-?rlZgay>`jzp`Auk!i$y;i%FkAk(pXTq7=Z}F(@ zqyT~FnQPCjFAa*XF;gCY(7#l@#3=l(z2y_Rgc{!J97q`o5#?O_vRc@d8Qpe zvF%(U)z5wG($K6p<8zEgCDq< zJhZy(FWwwOtPOyy6EVt;)To;DWUuGNv#STM#aOEX#D$x6^*4oB)?G8c_EbHo9vMdE zGT*+*3R}HXr+UBpwWl{fjwq20qmzgC0pFP(=bDgvVZ^#37YLB6#to-IH?_#*bC@;VFamY!+ZV2t z6hk(`hULqIaY)=1i8lg*1Cj@kA{LE8MLQ@x`z=^TkDnp9A~{fpEAaDI82a z`=CFE>>Ll&YBJ9EgY{77;Q}blH~R1p(m8XsU{orobg@;LROfZEK?UV9l(Qbx`1l_= z`5z;Npl{*(BC{SAX!X*Q648WzApC6>%{B;Zm6i?eDkV-t!9EJ*=tF#QmB{@4_t!5s zzWjJe7OTFKU3=cINTW#H7lS+mCkpiawWGpjyf`h&j{K-8%c45gh6cHZUqzFhD>qkM z%kIMx9*zh^#eC+{M?iFBj>8n>l%}zk1KYdy=2sr`GmNYZ+Nx1NvBMv8Waw2N)%E@5 zrlzXxx((%YY9L}dde;hK6u&}4@UnmG-_8$y5b2P@@vyOp;(un=P><#RrJ7!-R5!0H zYfXd|^=f?JG!S#MOzbfoaUpz#@v%k-;|AT%yc2<+Sk8=IQ|a^irHB$i)S>Wt6+j{> zcnk<^cwZp4frS9y+yw0?h99$H*)wbvvKTJp!W9buZ)CB^i`s$W`!X#MrD)p?h382i zDSO7j2SM8r+AE~ng9MNjY@=_?nm9&K>ccuXK?m;&Q0&Z-zF9@C?}97Kr8hK4nEi8= zvL%b@Wa1#AclWQUYA%*QKQpcqLIGQui#O8!T3{zqE=Pi^lr zBw+-moh3A3l!*h^Cy*P^KZ7XZfZ|gzvGlUe_2t3$LA`*AX7c{Pe}1}I93CC6ucxnu zVM=5snY7nK%eU9#W)74ZDJb5rn@cGu+4q1k@eJxlI(<>$?U8c=UVx&8`Y_K;gxPkB z;ti+8Dnk<)ryWGcRt0wWXX?#}VY4;Nur(wj+IYNNIXtX(0t~M=RG*|1p*OF8_=PW@ z<&Zx~*LE_^{^rm1=4k-Ko8j|R zk^*2ty)s2dDn!N)VGerI$pAj#K0TJg=1pBT>K&RNT*bBrqVet@I_b1Z zT$E6byGzZ(8zlif)#t{OoFY8&jzS>=;gxv_@MK0DQHa2aLvJfzah}j>jA;(7?9!tI zrhv6(CdcY#?wYliV!d0qF+}8KNmjOPT*jA2NnN6Rh&WtoNa)d0HPs7`WEXGkXH-Za3Rbeo85JQ)WDmQOWoE;D4LkzIzYyImFE3d{WR?ZW`qM zVqQjvF;@1am5-W8lqttLXf~wXO|Nft*4Dzz2lv*3I3VUjSqkGCxchgksV5`<)Pe^% z?+2fx6^dd5n^1<((NnOQ!~5bRxY0Hd$PYkkEqA;_JxLi+hj~E(sW>4k#|X!w(_6v< zsaluFzky+2{tm=W64FRsV->{{pnxsO{9xqA51sa=Q86M5ll`m}bP$wP+uwq*hLc~; zH9{7a7N=!H$FzsTh+u1?$V>vZf^Vt!$8g$OazA{&8MZ<3~yOiaqj- zU^ZGZwe|+k5Dl$7>3U*gk~x15E-c(!5h&^?G*e=7j)hIaIN`v&Lz4O0U4Hf$34UMt zZR2hPJl*g5w{BslmeB5`7aVq7G>@+G~mzgJf0=JjSkB9p=KaHl89X(M8Jdci4|a= zjLVU-3MqVZIT1BvTgGA;& zg$+dAhLoj|?K$@SMWh3;1j5ZgEPiF=!$6Eea&4rVl4pp4kVx?1Q-kY`^~@%ga!|(! z1AWg-R<;8}01jqF6DHN74U<=Q1OLcoYqPV}hvJK|6lE`vt;ibXPzuc@@P&x9%})+O z4H((G%aNbG(iQkiq>&amH?vMm*;+0KyV{6*)Su^Vutjn;so^q*?H>ujp$$1FiI#b` zu>Q=iR*jBXnEihm{`6q-c|V*s{Q!kd-F?nGBnrrNVvqHE(WbG%sUqT*F9M*IqYloM zYM=&Oeq-+Kw+gB634?;&DlbrPt9JzgWR}*xe;w| z+hqksoSB=?Fbfb}EEKhjtR+1a)BDP`?s-k7s;8iXheZ$BV?qEJKTDNo~YPH7}fLxSFBGMtL1x0EbM4@DBc z$Eev@g^Aes;90=AeSIy;uUyKP3X00ro@CdR_Q4h>T*SB2EX?x*I8g>#^^iG*} zfil%|Es@v{7Ox-qJ5UXjlFdjdL$!q;d|?KbB3OCS1Blf_vlgwp$LZdw*-#72+1U~m zX*G0njc{m$uk;+?*d>&7QhEovA5q5?2j&=ejc|P!e}EM+(dpL7?po(ZWZh>|iKW;( z)07%Qlmh|d6|FWX2x4uC33dNU?OB!gR~`C%&+461EJ7I9K-?uR0raCpCq>VAMZ@-@XX&M9q30gb0Fo%4U5r*5aHwPgPU3ca<^*jO`qy4a_OK;(VK&sj0MCs1}9g?{6JNRPoN<4})bdy}6lr_ZVI)1K7sKs4zF2)K?=10Z=+ zy$%Dr;$JaE7>Dy;@4+5Z@#5=UdCwRGh+@jRM5fXHux?t7)xtWRiX9u*Lf~8qMnKpJ zFM5Fzf}REySjmw@Br3wvRE@xMa9SjDY1?Ja{12Tt`wv*9&s@VgpxCNJ_4WfIL*H#& zt61(uXy3V3c4nHHdirx?Vw&;f?Y+_S%cSJ@+D2=X!ElZyv|_UIJ3^H~-I?V|%0@bOi6b)&N44M}ei_fN-I zz)%=>4>8sCqG{A!q$coFHGZQ`i0yk4SX{VJK50|F={VCIU3Hh_%|v*F?{}JdU}l(% z<5J{TVoRmt^UAM=9p-m*_}acl4Kh7FNYM(kKe%f(%vZm$f ze;T{!d(l{+I@Ck^>~hu57}C=DbSC!-%gUndADDB`Qd9jAW!Mp#ho9v<{aCM*A9##|ALy#5 zu1;Fav>kwlqktntO=toRIb{Tf!`^-Rd*5}nP45-rMyAQ}Ewe(GELs60%XY0PJ7i`D zArD`|0fRqM;IAF{y}39$(LX!fB-Z+bQ}K&02dL??yIQ>jc#t!V?b&4b5Y)BRf2Ftc zdu9)Ifm0Mo2>N&Td>Zk*AN@Q{jaf2&dQH_>I~sMa*0v@n4NoujWH)g;!%$H#@%4{C z`n*L{rynEHsJtka$D!W&HD=PV_Fw@Y6~msnTI5$i+mDEZ%}M4TJCd!PLWp##1BBP; z_VrhE3eigWJ%C90p*gT^)Qr3Kz)3goE>MC6!hn#`ZCc!AKR7YC!j#PT^r84s*&!~A>Y%h10yEs|M3jIJ8sONsTcO#9sH4+{RH*fng#c^z z@G4sDzm;y~a?3eBa9?Trs*uK%Ld-3oEZuu3RZ)kv0}D3}1cTnw(i;4WVIo-`bkZVx z6CK;S+O1bglFVgce*^~F)_g~ZYW=UYPiH7L&g)0d4wOoY%#&Yos|W2-`W90%2kyiB z9^mQiU)T9-%7Wx8+)|fJH=|WT)z$1bRU;f*co~BVo$l`Nbu>R|+C-=`4f(@;& zE%g?2TY*+Tf`@hKoh86(1T_G zUp3HCAN=gMhC`Wx<)pKX(?7v@qc+TUQ>)T~;w(*(RoRx750PRF!6I=bLz1N8N&c0( zWEAa;j2;%lkp`>F%yK;ec#$Iy%|@$p$~9vKhN10u7qOV797D#`FhyN*FlySZ%em!W zV7J%v`+UBaaIGe_Utnv(Yk*wU@_$)>fv#m~Z{}%<3r;chbbc=Ogk_PptWg#SE{ue! z1!zw|5?&An7wow3bE|km4kb^K)n8{(^~x|(YNj}*(0Tr*x^P|TO4ZxwO4LIICm>9O z#|L4F*oEwgTQ=J9ihDJaW>QX(sfk9ojzoA65AG2b=ps_!?(uO2_{Tm{7>Hn^Ase?{ zy8C}ge0-~Vf`jYzj)z7r&lmldYKMNK{#GqZtfUq8=jCw?#d{$n z9BI40oU2qS`fm|3tg!kGos=(k1L>&7I77@ns@9IVtY|b0* zH^Uz!Rr&zI<7?qw(9*EKO}(Of*b;98Iw%$eh!BI^9-icT_V7hVwk~S#a=JTA zf{Mlv8%7h?*wz;7TEkY!&oK~%%)z~B%D9c*CURRtyXKf&SrQMI|G2f#ey)g^-~@{j z1f?3zbs)#)ik0DNJJ5Lm9t_k#!z;y__*%(`n5EVpS*Gf5qtCR|G9rT1!doP3I8s;8t=(z%l_z{Q)Dxo zh7~x|_><12Xkap6bVF&8d@bJ(M-4+mwrVz+V!sXkN> z^C>aBB4qXz(XRg7T3wQ1(_X?;>z1ff*}2%=qhG1}NY|^l)W?K2@U;?w`GR(y;|a}<{ss9g?xH{R@VvK>XTGq>IR*rQA=@+E-W}jJlU>KXM|AZCIhw6FH!N^ zXcOi3uhg;F*~P>p3fw_$kYk8X77e#{R6-Yil^=c=k+)S^t_W+N>psu(3N4 z7|Q^AEB*tW@W1b&*R1EVk}I5~;?$TW}!QSNNlMvr&8Zzge7cEEKXF z3HLFZK>a;~gOZ!`|LIw(Z>TE1wZ>&Kk2iL1L$Kb_#R~PNdnZ?sUa3+R9MGxDOp%Jg z;xs9(x&sP=B95rkJS}O_@Mugbsvi*l_DCu0I<%}W_;|D;2Z=S#R|S;)1Lh6Z^)C_$&NHjycm5k(xuTjQYOL< zd4}jRTZMIsxMlMgJ_8VrWsdvhXHYl5=sYt+)g}D-Gvfr&mwfS{j2bS5n!<%oJSre0 zVyt?eC{(qmMR`)rxi{JqF8|2|;}f8dR8d$fI?mXq#i~((+=|PFToEJa<9s+dUZDe2 zrSER%3Xe|!cUX=)C^D+BON2!y#)o3?Y^}{*>*%?;t+sX}^K-#>Fcxz+9T}08%hH0N zlJ@pF7#`a%e{!yWJy;!XGv~Gr+nP-_BC3z0bO58DH95hy)|A1hCecmG%F@`VcB?6; z_ytYc+??{JWlq*o-db77Auif0wYopD{Z+^{D!`)^w5=rXADzyKLMCVW4l1s(N^#ZR zvvYe=*D2<>LspTp)#uh0h(SLGU@zse^5zShSwDr&(D>~M7aD|jDLuMwgou|MDONA{t$+J|AyFSuKTaw?G|Pq=gga38&oG*0JXa$F@tlH!ttz{4zC3 zi&UtHjD2r8(wdGHJ3s^160u(~;1a1;8&QOcc8d@>LFR$`NebEy`ZIikXsH2%o%Vra z2qG7Q6g!b(@5elIG*}u?HIhULZ#%QC)$8$Nc;jw3OLrI_(?3byk4DzFqE>Vh$~#1# zM!8>vr;1p z`;_A*#_nuw*{+1kjRlo(z?yI2QnmX6?RlfF|9sxiBkEuB8+e3D(kjZG6u|ON!Zge| zF6$#JzT;bqzS`bN`my?Bc<3RFMUp#-_q2j>6Eu>ZOe+EhQ*@_kUF*`3jQpqJUMbzDnz$16H=MDM$DSB0IrZIdZn+UYFrhwUnJw+ zAk$*e(U?&eS;wiHsWi|`hRCdsed;r#wf^_!(-Ly79(-091~P?z>J+-)_m8IvCKqR1 zx`h#02zqFI*?qhAA`r_;XW|~9I#r4aJMAp!ImN}D!7{2A+|-r<*&QS&FE6)ZADvR% z!~QyO+fvxx98Wu7o`{8GuZ7KL6S=YmqrvYToX&!0An(u@Y9R*>G{ zf;FCfDXMZsJjPE`3m~O+JMDB1PD;{}$)sOtN}Q-?M7b%wlNT@JLhk_1}K8>M*~ zX&~CGDbr_5`$SnbHHcq`pNkbF62!dY0_w4EQLCDcv=zTWBr#6`SbVST6GU}-Q;Jz4 zg16ycX}oa{y*_)@$BaiuE(xpO6IPa5q4;ZD#Qlrsp!qey8^kTnU;6a98`2aS5U(_h zaekRu9EauHWhmC?&rJ+nRL)(EeOr{ceZM>vN(bM_o5M{aIWe=$WU) ztjJ8`!VKk@4gDu{Iozv*{w#JBER0m*(bPQ=-B3~xz>Fw}?e8v7Mc_RFVzD%ZLmt$7 zA-2DB7VzOE6Pncf1gM45(7g55unx)+yFLJcS>Xt=H=M*=LN0XLke#4D>Z!q7N;h^w zn8tO6jPn2?>%`gKpfQx%*3S!-E;(5ksgGY1GgM~k0h<)w;6&1SH>Bp`Ptu@UTD!R- z%Ycu@4cvzcQ0_7AR@|N1W)_&u^q$@DHe2L_#cMq0Ox* zykB*LR(zOI+ACf7b(%4pY{(|4W^zK_#OH!b7mpVqX>L#I?$YB5wYf6_Gd)g2@^@1} z$4rEKc0xry5kqL0IwM25y;UGO!h9F9Gqzk4$UHFlHf;@0k4S^g4#1gg#*dLKWG7Ic^tn*p$)E7MaPTkpIL_-aJ1%#e9Se@)ze*;L}an zzYD^q>$+oA=hK;(0RG;b`m7ieZ!Rxm)0d0kn~ly4bU=bX$4+@PiGTm*R@J6Dyx z0rb&cRY^FOcLB>*+(?jApPI>61c($LL%10pWE6?5!df+qZrA&pC zhC{e?aLYbS7M41wkk&__B1QuizOlP1+cjDQ-{SRI;~3+jcg`~!%%GUwCpZr#zaNxT za;Mqka@t*K-TRI2s4P>DDBEN$ zghbKblO@2>_$=}^c8gPmsLB>tx|R<1mU^z9kpkycgW+5C`gs9mw~b=H%fMeYJNXE8?+QmL z+P1>(-r~hi+X>|V!$uAIkK|VXFH)5}Wh=vu)O1F_xP%Po4Ewi~qTh?HUCO{{X8X68 z6BrYbkMGZ{~j zV3^`i=D9~T~^fBlyo3O9mX7K~XOT7%}j2)@>U*A_v{9PCWO#A5ZQUrLa zkBd*YYPkuc%&%53M)gf}z_suU0u{|1`Uy7sZ8*!>)E`wX?w9T&4D;;}asblf4^VY= z9JujBtxDCpod`2DE3x(Rxb~&>5x-C=wg3u*P`)pmFObVPHjiynOT<69QUgRUW2xM}qy8+f%r2qWq!lZXXT?5IuFT7FbD{8FYA{tYoYsf47o>)LTP~N;eEbA9>-6Tc+Gi_{JYIJ?=R|x;>zRI2xIsP|r zbwuL@QI27@Z(Z!~3{CVWz}KK%RU?=%0o&DBp++EW2`USWC73ap4aJ~V)mr4entc`zyumCCbK50?mnA^0j+l_c)>ezYS<5t9t@fLO1cbT5 z)iNS5__h7~x&L0k6`-S|*c-_8oNpI9W53b$dV5##5f`ItNqP4tz4&9+&EG*{d`3U; zPk^8<8aydx($^~XWfZVz8$U)W$qjn6FBxMjoDX^H>sV1Kp?w)KEh)vpgU7=GGEv) zp!?;pe!-dD^?iM6M3eK#v0nQnG5^cS5=czhG|&sW(Nv=J;5h(1C0(#M_-`rfmVwCg z)GV&jUE&?TdR#IDt5>TMX^r5$U#jilzE**U6s`30w2LNs{1dRED)nRo>foak;$Jh# zG>6~YkI%%4@!yv3=Q{>bXR%TQ2JdYD43+PH62yPH0I=t=m8kUtQ(xw*e^UaEY`bu*W3|7}GZo4#|S!@?q&iE4g^^Dg33ENCU9$_T>X;0NaWBfi* zv|JqbOp7-D#dX_u4h-aN3-o@+_0{l6_svXk{ahB~8EWZrETi&6y4oMrxhak47gMbB zMD+5#oDj9}JIzYmWgweIU zJipDpY3h9(KYNw0l(MpByo2jpt}lTa)S~1l?fI8mK3kaw_u4U2Ys$x4F;jm!G%7Im z)MDRhRNuk3b47Vx`gPXeZwYMN#JRrQ4TRhEHO&7N+Rh+OpJg(_q~Ws<%`^Wfufw6J zCdE`jB@=mAJprUiHMJX%+83#o7M$L}f;b(Y`5tC_ma`9ZcL&T9FOb|7$@q{}upu{s zJO!3JGao5=f2_v&xey}}-r|y3i|A+8j0lg0M9>~X5vPDyu>T`x1`)LFP)=I%uMpFK zVDixrqY$7H1>V7B1kco0ctCyFVN%&=F57B<@cQuJFqsy%3mUA6xh?)o8#M|d{`(RI z?TPr5@FQ@1eUP2`X7}t5fGHgQfJ==@hxu&ian>(zJ3)|`kGaReFkKa&+)u){Jbu{* zij)Qt*eST(tPwunU{XX^o-^QppPL&70VvH$o({sTLc#qoAQ7VP_dnh@vvP#7eazy) z-T?VoWN**{;E@>tv*iKW^ydcbm-G{!(?kg^ z!3y|pX5pR`Avc0<1cJSioAF(p$>oFG~A$~z3&gsS3hINvklb7BHYn2OZjHR%^UJaAPFwU{a&%B#!c97&~Yj;e&Cc^c=DZs~j^ z+SaHwiHg**fS^4FccNcHXe*hSzKwQf=NIa#(a4@X{)4_!=pXBx)@oT${|F3y?>Y0A zRK>JX+yx-E$5OpMj!5^b?Ow5~gn(21ANlo5h~V5{IHKfM-B-z|F=BS3ve#P$a0o!W zIzs<24@^b{8&2KK<5`NBBsJ~0Ao$}EEOEKTc~Pl@LA$y!TMxK5fc#Tz(D8#S3T-RV?RROWT+TV(LtXF1 zjBEpxL2q{5JDNv);rXrIH~SqG9mb%qhoFh_yf<)1(#D{=_GNX~OzP~-?jGCT1Cx$Z z9Msl?h~Hfi@70Fs@l9Xm`>*J~$b58QIgkH{I^WkrJdd7imi!~JU+b`u>8`~gFO;&3 z7SmABy-f&xGXusmaJAH2Vh7u@eg|7F#ID-n@QytTe(sOfM~Ap_f3!Z9k621uHe+SN z_R)oy7Q^wh9iDx`fJSERb75Et{fbDarq&$0aiC+~4&GE@e7%4B>c@H!DsmCphj*j!B ztejBhWep1TNz1)I1)4?%(DYA~8m#>YvT~5{H}Q#?N1(&Z@1HNT<~-QTB+D=~pBSf% zSM@p7e!4qLgx94~aN;!55<@cUc!Y|aql5jS)9V&t)+}+NYlqU&Wtjq^ru@_kS7dPp z!;%|ihVuLOhm0EaH4rDY;RF_LjJ4JOVti zFVP5ACLrIka>YLFUo7C?@RR}gMwR_}rec(IZk8{xalD8Zavzjjp!ef(t>f>K9+kya z1ROw85IfDRYXn+>He;;=z0@UulZ<}qQFJZ`Q}ef@KzErlH&-ZtyR9}H}F z!>I1dN)B&EU$IR7OJ~(B>kgPfGMxD;?@v0El!UFWB?#(fDxMD!>WjKhLwqIQKk4A+ zTs^X*3^Bgo6^xbs3g5O11e_;^Q(M~Z$P0IVb%Jv(Xvv;du}o>Q&pkw2QaI{^-Kj+f zu{9{-@hLlb-`P2a;OsBWY)vxPc>At7edrB{wrR&~wrqX`Xk(Y@kIa2Inw^?mOY4TC zi=P@ee}C>;=(<8Gm8LM~2zu?^6%IR&2Be9}T^Q(UDJ~@?;q8u8OU0`0J*^yd{%R`b z`PYv6qi@3NXR|=M|FtfWZ#FsF{2%wx5jZ8_+q&>xVP~qIxJ6i=nR5`V|LSL+AEQ4H z^RzL0b#wU-EuWUPeI*Ue(bkna^rX;%cM($0xd7~+2~iHr`bfk&eD>Ny$`B3ty*%1I zR<%$Anl%6o;>CZ@gQo>XR-SpWGYKBB#&1ewbTd3r|a`oP2 z%j<Df%kk`;;I6*D(oaWo*#c8DwCu(?Oa(`I@-|R zm~3kt`SJ~&rNi6jM{;gAOARGaqny#qgxmPwioWW9kNd7^e{J_R z8#n@SFO^%N~bt`0QC(aF4w@9YJN535A{c>!kPRfqRx{*7}!gk_<-ql4vF@F@YhozLKpjurKe%Sp{W?>yL-Yvx&Jy{oTStbW{nLj{Ta?(yj< zrB}>S7?m-lm=Uu!v4HGNd zLPHVOyju178=w<-w8%fJnym!94Ig*3hs!fYya!a9rOe6xyf-ecZEMYUKhN#X`7df4N4Lm%e;P=P^8sIOcIxp)dx#7A1zPndqEp>iWDSbo;voaaE$JXEj zd}gTO?Vo`0P|G5HQ_a>iMJ5ukr>?vf+YH(iNe}(1?#GwkHu^ZtL=_fvg5}tarK+M` z`k`ySlTyBOR>o0%ZNaSq`Sg=?B=94 zUcLiTxJ_M5kZuc*OUof3QO*C1P#pZy1H`wn z+kqV(nHjCsUXmq=hso|aBe|ntnRb`{0AHlKFtVh$+(nzrGRxvG{DvA{z5#Qr;aGPR zzEixnb38Cj0X}Z7TCHC00fcHlS!{AUw%)@)!bjA`}dkL%Ibfa!Y{}IhOox{O6L( zQ!@Rkpo=N=vVu1Sd^iFN@jM@RB-2CT1kb)+v7f5Ws*HIT3$xO08(m!vk!*ZO3FxBL zEQC?tM4?KTJf^wv(R!!N5Kb(sKGJI%cfCjFp|1V2cPs&m_fSE1HA-;)L+khRsUj0| zhP&?GX$22W!f=AZ!l}r;9Uva$@u7@>53f;Z8k;6(yS|Oo&i9p8(wNpD<>=%s@L>`} zXM|nMTU*zWd}{0j*c6L*FBDD0W7Xv$f1M>mX6XlDTB-C&y8qNtSFh?kr~8h35lUu= zd9SPf`K@n+NPd(5CAsd=p&@DfJaq=It`gU|E=`kvV_xD8_8N|r$*MlBhgsM_Pj4*} z2@V$IjBZ-at{P0YLBPjtF)WG0y|e&6bh=U~=Xahd_r*v)+|@#<9>@)Gs6x_wg~^P5 z$gblw=h=Q3Dx*}?E{y8(tP=eL?5B${%dDnxia6wa*AA=jhA;@W**Pv)QiYmz5=RcN>7y@0;_s%P*` zKz6mP07^Se$6pYT^+o>nh#=hGGi}#lfJ^z&-Hb}SeX#b;<(D5w7Wr@g4eeYX@_TTv9%M#YvXHlJrR0oICid!$cnw@DHKVh`MFNUVx&q1+15;0-$r^d-P8<#Y)8xM#mgTR zT`HrFd&_$GrVtCAH-Rj&ng`M>5`u!zr)4~$u)MZ!!B^!a!>vqT4}aWtytiqMdNe3i z4gr2=v*|`!{1J6b!jNIns77I6J#9XOq@@y^Kw~H7isT#Eo2pi=oKdS$hp_;<@$%60 z_YDK0>Yya__LK!(7v}0tSI=Y3p8ZgiH!+`F)7jReL`b7wiR!k=QCiP;26gVTz-$G6xOr@vW;yM1~%s zZAE6VV}$IN20{~ose1&H z?PFT0!iK`8dsoi4H?oOQ#v<3Xkx_K047iU5$%`QTcTMHbBkggy6XdHRzN_f%SGy-` zosF>GZ9RvVbXpgyZACE2`R@95VQt3Gg@lhgjI888+>)6n%c8dgt_5AG0=2NwIg%qo3Dm^WkrzOpGE9pM4u7e@-vfMcdwVv~QYIhZ4W^@O z(U}e_2h_)`aiDOJQ7V&~GgC$0BNtb*iZ)>SuFs1zQ*AT)u#BOrULf3K-LUa`kLvyw z!NyrmyQfY67On?-Eo;|TC-~0S*V$?33WuLYE|Ztsql5QYgu(3aClC{;Q_~YDAr_xAvq% zZPYXU@THovlN2!N8DspU)6ab6O?ymXmG@qFv{stngZT_=n;D`U`QL=%b3f*uTsnE>#5M?#Wh-O zj5q$g`>Pd8Y6ca>#vC}I2p`~)@URfux5(fAal30LP^?;LbHBDk-hb=(TSy$p1~|O4 zP!53#Y52Rs1W^~&5~PM|gT+x+-1%EqoK7H4Zp{Oa(eG-UI$cc~ov(*h4#|5lO4hIa zfH2hefJfNJs;$lyH=XqHDAvz9)}jgC(pQmYK0?|1+iePVu(mQ1PuT7}N{q}8078~B zY!I!b(vccAnNI-ELM2I?2bluH`sQFVK6)(%C*yjenBl=nFif|_m6EdJ>wn1j9xUud zac1iJRqR3GtduH?e8pbUF_I44hJh{&7UItvxK$c_Cy@pV1uFN(8WNoKW}_JXedRkw ztW0cdv{U=M)CPS#PhXQ1QA7m7l^A({qdu*@Z=c&)4H6K1?J61@ac{_k;OE3_(iXz< z^^)bLJVC}?h2ck3{{)dAwD7Iz_ zU&ZZf8j%08grIYR-?rH52$U!Po+$i=Dx4FSHeqH?{9{PWqPJbO!KY8}0f9Q}DZe>( zL;jX!$|a?mRi#gO1R4oHl`yV>+np8ukJp&rVBoxInaOS33?{kL5?(|8-61FMh592> zl;oJbKyP_Vq^VVWPm46tY*S-{sCLCp?GBEc%G|K?-yQ+hq3W)_?}W<5`9itFYDz5} zf|lA_9iQowCAHuSPn2vcZUJ0;@VpxrFlXEXU$Bz{lz1)`v({(34jen49zRX+vBM|T zI7V#8QATfZUUPovt9Ow3Z=F8d;=2CYTOa@aA2M1};b2KFG12K1cP<3qZPKcU(^tQL z3jWp0OilD6L{7`p{*o?@Cec9O2K(~2>Ck`;)lQ_oPZFoBmEeqSUep|>TbyeodlD55 z=zR#~ye5ynx}(mD$ScSScbBgAn~8Hg%Z9Bt3itRbG}5~rXW+qw)$ysY_TIsAz@{-x z8w1Z0rOhO*z52DHaC|s{NZS6LjDz~Zs$agb?>pmX>!`_~uiy|C4@IJIB3_HgvJRdl zLx13sN_bGcaxOKExeN|%r6{)UXCT$gA6Te|X@b_3F9FX!h9ER`;jyYEL<5~JdN{#v z73P4yBe{1a@%ww2$f*aJPf^`O5v2h3f23Boz1YV4`~qZJZeg2nyOzNkJ=Ykb+baGu zXJb~CtC95^?7kiB)B>jv=ZhY%&vJ37yq)2F1?7d>DV;vBYayXc2dts@0LNe&JYA0h zweR8Qu{Fq9$Od>_5Iyo3?~$mbBMje;eaMjZLE69Me zVM!V8fz7pKtNAu$Pmi6O1gqtm8FuL6tZ#X>3}-iR!yXCtaL0YjOX{ywZXzqEy zLPkwb!Fj9WUiTPMvOz39Mor}WR<>!XE(#NztO8DxU3;~AcDXoudH>>}lTPfD`ndmn zVa`hEw_X@uC^~f|L&9>)_k-^N37M*#Twr zw?Qc8Q+7y!Y61zPTV(%m(jRhOoLG8xYqoPAJZf45>hBNe_@5B&{?O7XN^j~%L&^qw zLTk!=dmf%&I6l{Hhc8-8w!?Nc?Yet4%SBw_g2dh~bFZq-*6v@ll+WP`KpaJFU-!3U z)FUV)ugN3X(}}IEL#-Ydpr5iH+4q1YU-m@5iw;r^gm-5Q#{Nxkh9#wA#kifw3!q2g zIwB0>&&4YD3~=Z<$q~?0ek3F*j9m9!c-D{LzdFNFVqu_RnyJo4vvWDbjQt1*X9jx) z8U^?F1dL80&kSw~Wb=o$Nh*tNhw!J|1nFQ{zd>a_WMup;09Ntu0k7hz?QN9RFXE#ZlE{mB(|PM5 zj-$l?`VLMzZUrr}>*lg$GC063-UAV(0iJ~jTvS32s12>#Y1Tj#>K*KCgw?`{!SSl! z=17r&uzJR>3{I4&MzT`l`tWdl90Mq+e@U8J08s7;fn^69%_sSth$?{%6fELvbjQPA z$X+|y5E~+PBV~IjZY5WH?v?XD7;vnx@>_AG@W<0MX@Hprl984Sik4riB5%f?Ojs1% z`|rL%$I=QdT=8>O*!6OUyk~M_^SlP^^TawU1oEvP>zuc#BPSaUo)QqScIlgLVN zxX#EEDS1G|4Vb*;3&h)e53!=@kzsNX9G;rxG`%U)t;|Jnn1%32`kZzCsxp9<6oq=V09 zxjy#Qr2oSaD7=T-#H*@t?1gZILv0r3#4{I`djTwoFZ@ak^CtMt> z+4hw#sbZ?`^c08hiL!uTeE$&7ecb`ldRvZ=?8qk*$81KeVKs{ zULz70KxxI`{Cyb!tfJ=X21MwcE*^t!#OxGak3)m(wnN^dceZ!+e1O*y*X2wh#4NB6 zOq)VZ4pmMLonS|gXBTXIF_Ro4f^v~VfF$-AdzTZ_xznkPE7AhlJZ5KdG;$jV?u9t5 zaYGi3$7EN85=>an#OZV-#)d2$rznYqIQ-~jB-{5l#px(BAJLivv}5QUR6`qr!qIM* zB}naijPz4i!m@({zfK_+TC5290bGQOPuOiyOdw}{IssGfYa>f%q;MeRNPgl;w@`TS z&4?rNzeESMp6=8DwM*QYu8)$uqbM8{)9N^amZaocj$g| zh-KXzR@csW|C0XCBUG;RVZG7$B?bbc=P6C_%c=@wB&JJm=yQ|q+$Kkhu>wT&iH^9! z6h+5UHJWsEhK^p=we{o6$NA8!pV&eSErTmY$~1<<(YXJs8Q-RXLxnIAVrzWYp!&kgq<_keFwhi#L}g>le`@pLyEGS+loQ98Vva|g zNF2q5`eP3?4OFFtFZbvm9vlUvqinROf{vsM_AXEJhJINzIyooGt4Nlb^| zqa(XMcXC$L#opQTtfJgLaEzh?2X|{zal#V9-2Vkgn3h&UOO;=6 z$qMJ{J1i>qO`s(ihT2yQPgw%++YE~<57$lGs>aAe^FW&*gHP7efsh$^Gk-kPb6u7u z=#MJ0ErYSn+6?#hk< z0F_5NqWy>_|4l_@@Bx{ptHAx#?cojNmKd9lR$tF0B$2#|aI>?eKN z{yQ6~7b(M1xKc6N+Gg;Ks22eYKtzn@?eFB7yGwz$`FH(2+amt7rRY78U6drfpfhw| zgd%X8EHuflW_a>X>IRD;_acL{j%S_SF6iDV(M4IQ5ApVOjlH2wS_|rxb%~R;eg67*0f)iZ??rP9(+FSFHcUd9Bjx;MS9#_UqoKS2PtW6 zc9%lZ7M{XDVaa)tcG1@SaUn&o#JS@aZ}7+eG4z~$PksL5eaxu8=!UmiJVa>bRjXNW z4N~AB+2mbPj^Z24SH{UOIz|Yr!;=U&~VtFp(%^ zgZ}MXh7jaci*K7S*z2&18i&{Uq^A|s#m^C7a~a!cI+H~jpV>?#(oL#*3s^Wfr@DB& zeWPexVHf%xlR@M5VC~09%B&(YL{o!98h#_r#V-gzWj>eXH6O+P6Q*HxD+M8f=(}j2 z&S_6X6rt#a$5x(~#zh?$=eHefNawEQIZ4|V|ZOrhupT0{UgSp+0Spb6(r)h85 zIK@skc!OW7IJ7Fh6g>c3OViiD?&iaSmu4#z&ehHh_twFa&>?1GyoWva< z1emD1?8()^JB*Z-QH@9gx`P2L*EE`99<~s0S|_uZrmc(+E>2#n*F8(uH9osJB-Afy z#MFeLq5`D6q;lJ@2G`!Vp>6WKn4Rw|O}sfT$CJJ%dSrNf7ee@%jG^Kw7_CQ~WP}Lz zuW7_AQ^n#S%}Vc4edtQ2Vi-zn-+hnDi|WhEIWGKQZ9HDFh$em)O4)X}<&6ELZ7tz+ zj2QKa$irIb($r8k4A{@6)F)4s#>^GFq|wp)nHW+I+H1~CDtJ5G?b$%=@tzZz9c-GGWJ7QA8D=J|YTe)R`5cRv zu{>O0Uw5;5ypPu)nqNm(>L6Tj&2FYJh2uefYtuQ=w0ynu8E?Yi~L zKpm>yDcm+__zeT(oOL8@Fhc&4V+7=j6_6$(I?=^DAs34=A{IB3$95?@*xxes7XajY z$avI)=$h>bZI&$*i(DvoTx=|*L&CiQ->7BgpMJMe66dQoh2a8IvuwHM+!;_`wBGJTlYVuoWO z=yg~c)PS2_wz&TF(SXs@XYi}B7HgI5+EICNjFW3=o1{FiIkwayb5de8pJCl09Nl|W zTCK{BkPXSEu7pIjD%I%7nGJK49AR4bHi3!+GI-+&`1Zk`fj_I8VTf0RUh*4Tst`rq zzixvtE*jtHc7t!;usr~CyuH;BAgg%aLqGP9xomvU^e!|#uJV-rR+ysfpLr)_iXrhhzGyuJ=%&@@?k&!mlo%V4i7@=m1e@l-=6DmO1;QU z&|+!=W4!>MWrroGQ@VAITYiL8oM<4eV=fqBAGhCZxna$4N$E>_qOLZn^jHPW;Yefo z(x)3JQ0xpvH6k@#%f%ah&dY(bwrfQxAx8Y{@9}QQ@^M&+6+rn*$tP@==DN?nUgw(7 zNH?c8>|62G5JL_y#Ur~6+fN*R143Sg5H-eybQ+7w2?C5n6S)$zjX57w&#Ng--`<_e zDp}8M4}^}K)$3O_Yl63q53G%+LS62dJ%B7Cm}m<))Sr*6J5Iu*1hbE<#V?Z$N=bEJ z>ND?x4MKP?3hn069Tj$L*Wi?F?hR@b;vZPU14UoMuF7WluwQj<(z#x%KOMtV38&l` zOwtU1aV3djFIe!WhE0U!N*mTav4uw+I?!r*rcl|u?!fH46$zDGn9+S|4RNjn2XD2$ z5k{?R&2LV!Ye;=gOefp2v@Ext+|W7-Qv@5z($CQiNF1Ef!AYpK8Y-O^p3p+v>2?Qu zSfE%d;e0-v_u*tly_-ga&njh;|8<_$9YVMP&ljGB9I8l+*8vVh!?s4g5BJ6Qs2~A^85%!~u~}xZScVq4prB!R1ZHcX!@U`dV=Mp3{q=`vGkTE+Avu zXpL1GItw25c>}X7xp=0&9t|P>VLN-EEH5%OC7X{!kF?hZWc)tAAM;WCNNiGAlM?1Y z)7Slc6o*E2dPm%xrp2qan~DZWvLl+rY+5b>UrUSIU3|-Rd5fb=)K!`lc$u{@O`zXe z1TF^YUBOK-Pcm3X=IS(f_f&Uuv37GFSE3B+Llp};wyvS?UJF9Hw~9|hOI6G5T|03f z1CZ|wgnDnVk$B1>7u9kjau?u#B}-GwN;QDu1y=Y)<++$bkAq*+Q{cLSvt)E2LlKbi zkMuHcfn?3TS2!b`T!0$`cCOmQxqPEHB?JhutzhIi=`#)#IE)yj*VRq3i1K)diS&sw z52%}f(}6*5Z%QoEv0hhpvQ6EK9TG;baK+8_#&+%YG-2Q7Q5XJ|?y&Ves{^DEg2dCZ z0P@iAi^IFlie>h4`pUuYObYXT;#d2@=qT@3##YHPR5^p@b&jT8Qq7S%M8uYTC|78{ zX>0XIM1nyaOf4Ud-{prad|#k!^}d&}W-N^##{Q@=(FqRQ!R~}*ih!O5v97HtH`oO> zYA=o3VTZgfsJ7|YJ%33JFGW$Aj^D4BIz3gWS7?SPZ{hl=go4r64c@WZ0y(~keG@jX zE-wg+q zXjsp#aBk4sZ3I-Ty@*T7q=+p!C%tRhL;@zOt;LUK89eElQ7enJzsqjbPzP#{ra54a zNQjD^*G&_LDO7X6YE>*Yv)Uo=#u3B703#nO22D%vo9yFwIuaBYA%+hS4Bl`CXZZv6 zdoj9$-`|Xi=`2G9%G3okjj3ZwGls9{T{xdfkRF%;NatS(T~iGhEnB)ev<})s?kIMq zr6mGLDV82l_hoBNtPw^Xkw(VUVkwTdgAp<`1n8r>k~5nUJM8&=jZQ`e$%=>)af8Or z$2yisZXePH*thaa)45s&EuA89XqJ>0@)XR^7x}9zREc%W4Eo2Mi);~F_}>YCpdQIB z_-XE)M?)^u=RtG<*h{z^K^1H@7&pN9$%LPpZ1Nk>>HSDUE|}=^lSE?{YtIbhEtW2K zN>qsT2vI8Dlon|;NyKH*-7e@N!{T?8#Lr+))CYP(AH8N`L8dyIojJ}vk9lA8MpYQ& z8uweL>kMwZTuJjfI1o0&xaXWyO*X^#DE=I<(jXLQZflbLJA>&7`*xauba2kX~J5`Nk6OAA})C*&^=ir?UX@ zVV<&lRvDV0Jyn?6r9WUw#30$kN;FgNhVm5f+rv&ub^8Ys6JQ$#F5Ya#Oe>mDz-_4A zEZi;JQHtFJ;dry9A-igMAw=5u6zd+Kmy$DzppTMwVwa*qS|8UgPnHexMMnCXWEIN7 zM=<30H27nit-QM9SoSaxc7c0qIi%ZOZ`$==*YG}j5V~bH7{Lc#v3}NX3|}ZqP7iOQ z0R}+Z{WONk@#3TXWF_w{cXXiI5#!_%;@WN&gBO8vp0K(9r@o;4vQR|3r65*c_khp|m{U5DRi?Ygc&ZU1&|sIJS!jC~5y;Id*l5+; zZ17$U{iV*lXQ4xGsWXfEEi?Js6EZE4p#}i0k&?~Q3KlB)T@P)X+GSX*`2?IJODVIx zX2y0aFk1qI56zH=(YjZKuq>=bvDnp*gtmcv`@qEMW7*7!x0XxoQo=h;(kC_a`cKbX;^r)En6nWJ z&J*}h98k~a_prLWPz`7SN&c@qh^&h>Zmc$)T0-!iq{3Vz(7Ct|jaJK}^m1s^?h8E_Ar8>K$1BlE*#HM?SDU%N^RK%r{k3UAtOk>8jEa{b9)|Jib2lHkqwT65_ z0?`t$$P4&flj(YC`6FTrw|=`Jh@s0kP6TJ4UNqa)aojfT9My*R-(RK*!kNz$HzWIve&dB9sI=^|(sr(t+;_Z|7 z<@J_*YQO(jBu=DQxQ_8RkL{p@9m$WiaqV6IB6Cw3-q!r- z!v1hEwV|4vsap5zrIvBeA_lxrRX;pqC1dk{irFwErk3uliV@X1dAsKb9|PFeLwLh1 zf-{nTCTCu3gwYf^C)-aTVSSg-{-=B?$GR;g-SKk4E+8OaX`7_>pUhXDD6J!{hY%z| z_)f0q*W56Zb(Xa>m_eZ2BkTEfzh7(DAncT~Cy|W9>1HMN?S=ejtIJV&7P*7}&u4^$ z5VsMeS`~j-7JAIU^l*|2>-zx@9V`e+DJ4e~VjZ}u?<_Be8#fT}KB{!Rlx(;sgfN-c z*nuKHf|^7z^VfM6OwJ+a@cj&eEChTyY3}Fp1sw&&$cp~qXvJ@ASkHSd;5yNo!FcqO3jyJIYW+|LBE-l{V=I8<73Hr$ynbllc6G8C-5~W z6+r=f(n|3)jv0+no$*T(qC)cNkepYy+#3+QLh`M$>XJw%QRqi}WdT8CLWC<<<~C_& z&-V#Eo~tcwHjz_>dnwmR%wC#oF(8c`Tl3?6*jN}`k$!pJY11I!r~g@})6#eYEU0Pr zA2uo^6KRz{=u^3xYbPu>PkCGy$%q1~dkH3q*@dA6vnB(s zOBf@IvHXmJL{*lfb0y3Y0%ihp;_;;JVB|Ul6~q|@1uTAxe1b75pJf$Z5q&w>o}Wv$ z=*`365x>)Les6~&LB=qe!dKKBODAt0Rc3kflEU+U=QubB4?ptV+-&4tZ_KU;6Az)K zG+iamfr37cl?cWa(Mekf+64WdNIR_*ZUme-Q+eE}%@X(lY`FiCVejKOTck7B@l248 zC?qCwo4|_pt@7s9fe^e{#nVr^8a;hGUiweG=@3D6E|*tzKxjl1Z!y0|wk>aDsIj_3 z(G?n2(`2I}0}M(dl^>V9qGHNKrOI#sM~VLUrF6rkJyy}ipC!w23S1t;lF?PVL$awu zm&*G=U%$tshM1x9GiI-Voh`cnhkkH?_agJ1k2IZ#wWORF=pkK|Fe~(CJdG+=tJg!P z?SsPcEPPD&GDN5mns3_Ss>{fVesSg?!rl)&^o2hhJv~g@9qalY-ME$elmzR@eZZ<3lZLS$5HFZ+t-;2(@A*-}&{oE_GyiZl)@rKYVZ zhV4dhYKEI$E8_V5CNzSqJ5lV7aI>A7nnliw4iM3K$)EkyV~ZiBz*2!JF?bp|AAPXy zeDaH|#b*ckk-q5xn6lpj8->S&N0+_pN3M9{ohUNehl!cNWdYh(6@LjU4PMPGX={h^ zaleY;s?sX(AS#1>K=bnc^Wp#9GVO!J_H{a+WZ?O?n#_?NSYfjaPv|1_wyyvI!Wdt* zt89y&`B5q5HriqBsD2}vj}h7iD*h#@O!ttBQ4DfyPuyUi60=|))(G{eFt`PrjloL2 zc)UtVOiehzf1T2h@QN$7de0ivz@UK!`5P7RS>*dBt6uIZ#rz ze2Srzp;=F3aS((~MYv`pD*IT(6>wUPAO83j z`zD_2jX?lbfnSrq4q~bu;l~)aN8}c`r_m#FlfTS72OMG}1~mVYgd#A-zdD2c+K(aV z83WCw&tt*)K_5qoALHvs7?MqIk196wQ@YM0hpB1CA7lpNr4l5GIkfNk(gG<)|AEW> zc!aJ^%7t}AVL5%TcY-qIvFsRFl6C9-fLaef23JWnS;RYS;2aa%LExpF7fcL&0o%Ji zO0krO<=}@tB-W@__mMh^g-s<-O*K|g0dIy!%w`I_uIG5|J7)UlYer9w^%b>*JYfwI zuU6J1;&u!AYAn#8JQ&MzI)E{!_$f9-mVTviNr6j!7AL3X`*-Sp=h-}%C zE}tu+OM;J%#Ym*RoHm~ZjbB?jQ^RMpU3*Z7eaD>cvQSB}xbWBU7t2u| zv2!8H42zb3FIi|glcEmNN3H+I%h51KqC=vdr@R-J*nGl%m-Pw+HxjKlp^XUPl}#+0 zUj!lCitAan@*kDIIR^0oY{3YhL+Nsa`}(J%wxWW6+=SFRdMu+mVm`MOht_=087P?h z#a7jT#(y{ixmYKQ*@DK3^pO>BZuNG$wqD$lm#db0Em~(23(|>rMQF31UFP-Lop~SDSZ%FA!8r>TX&np&s>a?g z8!l7PKFKJDzXcHL0;I7Y^Ug+W7@A49e2BeLGj4CT<*(N9@uhW%IAUohK(RmRt|Y#9 z92_CprdW_@@XwEt^zg92$IaLZ{Kwuw!@B{Z&2oV&#j|yzTwduP5vtsm7-zd{MqNUk ze{gL&C*`7-^C{mer9PVNKj?S|d|pLRcjvlC=%TrA&B@C;7R96lXU#m*{N&BzySs~2lBbB8WeMihD<>k zPYFxZTVO_h;tVE!`g%QoMzR(W!h35c66leN zFWwja&Y}>467sq(i9Ot3u6r(Mdt@taX^s^ZpojT*+k$WfyZK?7YV0*tI(FI<-Wfx}o5*!Lf zj%}Nv`X3f*ag*Lo|J5~g(G6K)85Hf(&A7Z=oJ(uycjQ9Br~*?a=MpPrM0!-0U*68c z7m(#Oy$J;6+jx$!d3>9{Sx_`^kC~JU1%jx;Q5QUzIcdadeQi?Y%Vy6=G&XS9Y-$_=oom>UrsLp|zNe*p z@Pk^1gv2LNs;F{$S*rVXYFh(VvX~>!)F&KoI^0>rB6uu@m}Dl$D_vVq8^(?vBB>8> z@Mrtojs9iKmS;a^{sSPru!k+O;^Ayx(nptOB@V-aJ>XTPQGm*bbFLXrlYy4Byut^ z@Dk@jheE~_-&vC6YBhfbhKzHbA(Mz-Y-9(O(j%6CY3Na&y)9oTlst?crMriR?B%!P ztO+s?rmu+4K+HP>l8#>rzy$6RtC(*>Wh99X4L-S36&}q>-C252KL&t9d<>iGhHu$_ zvw2jKFBN9grbBhjLyWRan{rb#sTyIOQ8dM=b*{fP9Mny!ca@z=0R`u}ngk@hz_ z!3#yQ z2Ce^nHEL7ns@!1PR~}o5e!H(Zm+X8u=KEC#t^m2xD|Ql~D5t55%t|rOHIaFEH|48U zN{{rfSNS%D8U&`AA~DE#aj@OND1)nhqkxKgRS}vm8&^H2t_wxM6ju%@y<(fTyNxG> zgR2BXf|`Rm>_-*cyl}y=?(Gnq?N}Htv^5}-tNQG9wDKYN7rSC~O-LKXVc!eVYG!!( zPW9IK7h*2iF%m)t4t!7ccxx_zWzZ4Sv#3VlEJX;g3)Ep|GIH)UpGQ)8R$Tt&%@z0Q z-ZwvUdk>Xic~61w8sHS|K=FBEcZtik8J|3Y6?VJn0x692m4_5DeuqAWgc2g0I5zpP zSpWGPr|n4ygQS%IGkB(A|Be5}z_HShHg~;w>fs%8iO=BzsrH_xy7MnrY#ue=P)j43 zgs+vV8Ig&dCzzjDVB5)VXZ8GNMaVSqXWuzh_teGY%J*YT#{HH?E@7AH=l{ZSqki5D zY0MnR1lt9|8I&Hbo8RXvi327*kvjn~)zG6xlNgyJm# zl{_)(e9ne|XQik4kt?k5v~L4C24(iI!;1x?Ss_2XFvxSi*Gw1ghDho`8J4@8Ap^?f z8ers@n8|Qoz+Zih=#LuD=J0u?)BR=CB4~zYXGUrOKs-$&kC1F&h}iNRC0-Db~2uj1Rf@P`1e@yQE=C$=xPUd^>P#n<}-7n^$N-p#;e zJ?2ZFJw2-0FD%W285EfO2j=y!i=(yH5E`{5`-iK*HIz+FD(&sk&i<=3?LTOZ3I*`w zH&n;*lYNx0?gO&$_O3^e@s>pTV+&|ypgZK^Pk&wzJ_!BYEcu%NWUu+&LJYC4F#5pE zy87Z@BK?xpr=ESO2bp!0DE@V*C7CXB}2q1~Lt1qE@=ch(W0^Stx=uJ7{(@#caGmvhi!uV5vho`Vo)E z@{L*C1T~#A%%#!Q04AV&$oHI3lksDNRXoB3N$z;|V!WBrX;=Rv4KR=;k{Tc>QGV)Z zzr^`wtWOc#$Z9Ng={Z987%WuU@nL&+d!*QbUG?;Q5G~1x$cv;U#dGrZwxliaf}(A3 zp*-TV`JBXHp(g_vtIoNF15s$yIBjB&R3 zuf1M#=Z|v^Lk{{p4<9Q5CV<~J`>qO{6BomN96*-1TNzRuw8hg zO#0M2J%PC3BQvFu$(-H%Yi(^hz13@HeeG2*?is^Dq8qOK&*^8{oR_OmJLqX^J_&c; zn1Mb*XCdE>SRr>QI$uW6#u>MAv|_0I*nxl;M?6BAB1y|S{aG+M*Hq;VS zTah2or6kSVccpvm@r`FBKE#GD{v0U=x+d0f+`uoqY)ff zSwGEJm`$4)FUYW*{T#eYR-Hq9*mPU!9`qA2IJ2^!L?}{<5kMHgJ`6qOB3D2^?AB%qn`?-$qMh zcim29S8!o_w%_c=EhWJC=nIb=_OcxZbASYw{!Cmo;uJuorL=YwFgJT6kxLIBoV};tG+ya;kRe1vBoyFTyE{%CTbkZ0^HFu%sOOgcDSl|p| z{RBx&Hd`Y2VF2mxa25vobsCPSIjekScir~URheDL48)q&`7ayB{rSdBgAGsD;M7Kr zSa7T9mv0uS2pWGHe4I*>CV{Npvz_mx>z2R2_!-eo%Eh5}vBUSb;D`zu{%ydF(#sCD zuiWc*7s?s%-Cv?CX2-E_>Fv9v@7d&goa$a*5UBJ($1Y%=`_NYT{hJx>XVM5k`sto4t4;EH ztQocO@cc*Wm0k9WH=kM+Kik$-WOyeLI~g?PqlnUh(}c26vS9sAl3q>no?TjBI|d82 zbBVh#XI0O$6IbZ}BY6L{PwgPc@!#8IInEFF0P|%Eul|JsQFZ9P3p-*&6}Uc9Y%S;X zQ8+Q^e)c^71?D3)78;LcJRZg%E%9$ymR#1&B52*$}%Hu?aJK>4)z}T8m@rQ6j77 z+k$pTr?PmQG7N4mxuQRPz;W8_{PIb_ke<6TZApfJ|JlpWlfV2qrDq0ro(`AHHySc{ zp$G;R^sX!9s}UL9V+})@mfwV&w`M4+iROR$1e8w&zxlAI$>T~sw^lgnAi=kJQ@uVp zn$Qyghra|R4bvg!*x6ORxY7bNt`Ee-%sn#R?4-*+y>x=PxD^SNzg6odWazVETgn3k zsbpLPL#dyE93By+8L`iiEJP+ZH)sdjHp@Rl>5OSKFDB3rd)INqvR}I)SOh653BxTS?wJnr$c1+ zSO~1fL$;3e8jA_r8F#}O%uhtbTZ(tXvS3!(z>c~fup22z0~W3)p#9@5g4A;tz<;e9 z`)2b^AMTsSgv6vm*$X=a1VrC~o!1$n^1MW*KlC$R+GHel$o=p6>yysxX``pdV{3T6 z87q~q&tRM!3*Ujs$5F#+sp6@f26!VhW)PqsaP-CKet7#OO@K%yrN5abSLNl9o5j_O zXB~f^f4+L-vvcslCa7JEBlorh=$8Ss8gCmF5_SI-QG(Z0Fn8R04?>}0b0wp`evi9Z ziL@a)ZrHm7#Sjx>K4i6&Jw0DD)K^zb)5V-odL^$MchZd5;D^+WVaNL_K}k6G4SqJ6 z)liKHf5VC1z->yoKgippR1hg0RVIK8LV4SHTDN6z-jGce%C?Z0?)t$wnmF%jShR@U z>&Qu}NYF9BSqMjT0awuE^O{>*qIbZ5z{2yrFI-H)OYKxR0hYxJuV_Cm6#f-OUyJ2< zXGuKc6dU&gUQpu3Nd}!TN_@M*yF7($xcq^L3m5UMKcwb}USsal5pW~#p{}-ARY1sy z-(N&|9kq(Ai6_R6D+DRihZUz}3P@g16QVyrRDZ$Ya0w@x$j`cj%pp}N+`3Vp9h9j? z>psl0`h(4r*>E>GDja!|^F3k}bBqG}>-=X^?!zE5G~5UCvYW+5v(+Una~dCcr{a}_ zt2T6eP7G@-?Ak^yE66%HA69MAxzSuD!*9#CtZxrK5K4_tHz_qjD}~AyfQJcH=ncM) z9#iox-V32X`qOz(*IjPwXr2DyT5cBpAO2(ObFYX`I-q$e9dm%nR-s{l&-)-kMu1lN zvc7^D4M!@}Hh1pPh-m4AF7MZURa30N$)qQme5|Qvg4b$?c#N0cEeIIuiRS4p*+>qH zRiu%xR-qemgRHAF{NoPl4gpBUgFc3l;9_=Ai(5)AlMLz}Zn3`U)iBRgXnqB$zPjFM z_|Jh_nhCiSn$b3%L84-cVtr+tiotmiN0O5nzgelv2`rV~&x|!aH)2HGDjVIQwB<`p z`iIj$>I#%?lVm0em9quvo?{v@P0Lik8)qLCi1&*rw8WwG>h8>al^N z6!4K3d1r8cCUtJC#S4f!dwTLWM`O$HZ`MXf?<$g}VFVD@U(as#`6u>Th0Z;BM3An% z;DdiaM>s(5Zp2VkLp91=BAN+skp^4X^1ulOv&67l>(>6p8B89Ub&QP=rp5QBKXJ1c5R zG9iNA+3CsHRK}S)t$p@WJKud%5bK6+)8EwA@iFdzRM|?!nI{??l|8SGMjR{%CT=&y zi}c4xFdestGf;8Blh48aB@N40{i>gDLuFt51tdFEQs3i)z-Nz5WXFfrBUczpo2sut z^cz`U^&e)G*aNye;gBrFRnzRrGzQxd70INka5NhI#85a%lL(R}&FPs?Bnbjzf0lc+ z6ijnYxZQT_naKYvkj5*=%E*_8+zHAv*AAP(V-i}&AiK24Nd8Oe(ma@9>7N z(bXggOnk?6&P*zIsq3~sBfxuy0>?0GDf^hBxM~;>3f5iuNT*UutNXo4XVYd4mN#VX z$va}Qg7|_ixJkfc**lS9Fker#bIovDJRIBT8DlGUMsktznR*$}7V^jJ9=1?z#iK6} z78L%EEk57lJ_qlq*|OBdak&7b%3GTAUgTzwlkKkScl;_A!3IqIUV57B&y{GIC}l7_ z0EQUs#p3^$3I_>GKW26l;?ao5E<@kywzYC~%qvM_EFW-WRYoosG_LVVqqFnP8&$gy zv17)W4(+s}8z$Fwg-qFN|1=+*$*u2%kZ(JY%g8u3r06uH;~rXJ{oquL z&(^erLAS@nJ`~mbyT@O)q1wJk2_|DRoM?b9UM zUvZCB(wQA3?Bq&|0y!IN)?N^0w)R95hn-<5jVj%KHV-!KTg_!kF&jYO@kzgDG)hx|MsEK4MDUCI9o z7QFdUN!)UAH45NvkiW*k$1xGE!Qu`WF*|`veOGatbauExY|I(q86+*3kF6X>;+vQjuyN22NqOh{)ikqN zr_LRq(4Fcibww!?+jU6SR=c^pUp-qC|NQ0`N?P&RJT=ccy0R?tm}tIS+4#m)_;J)= zQ?cGUOlGn@?$9GcNqD)d2B;I>e>-q%=<#s-Q>HQqMe=^r#?g3GX#!~ei)#8lSL=lFKX>Nl_I>Wcy~xQw z)RB(TUn*TyRgF%wpxT~_@{SJk5j;rrLjwz)iZb4`73@vzwe3{7iaa0xdI$^@y>!m| z1gL1;$6|*EKX0ebTI*4_!5IYiH+Oz@CYI1J{F5KcW%dP`7F)fSX7>&Q6C|^Vv)E>; zF^Mh1krxIJ+-%T*=leVWyuZod@!JiT29QDAJxMR~z_4vie-pQMb>I`DHn^}lnV zL_z35&d#x>UbyHN->h;!oHr-D4zGAaLcobi$nu4j&b#@MoC6UHm|=QW5~w zn|NTj;2Ui7P6ekuoiCW$jmwPW01?fr`YYqKv4P%&X!*f^S#N$jQ_ZQ69iPW^NWoYU z|6h>}U$r72CZ!q`lT?by9(7RURW#Ti5IX)G^D}K!kyId@EvZ~f{!8n3A-?Dif`W=2 zL}A5Dkt!#abxvlPGDXKQvS}Ii%E-rO{dlk%xy)bB{^Y}*Lo&l_9vU8iLyr}V%wtSB ztcC&Kv6V;Of8~uU3|XsRdlg3m-4{5yYmD_`;bTLmM7$Ygq}kzIUrTa9)6~&hDVYmp{{{rA8Zv3&aL}H zb)V2s5OC#zB>DSmo`1r*Klrjw-oki{z%^xv*S{;!K8(LuKmXk)v<`M6H3W_2V-GVM zvrlycHKSdu{Z>LLRelCfM{ECSZi;$X*-Z)2<{(Ev~;xmym9+hLZ zV>u=6Ht8QjsLcI}y!f64um#Z7mfhR2cYAgC{CI1yZIzY(yth}NnpOzO5u9T*ZCcxHCqx0>?xKXS1iqtXl(fFz;b#=Y2wkp{VswEjZ*)x3YK*4l zkFzFiBYl)xDhJe~5#T^jI5JebDi~(pDa>?Xm)p^EPKL(@KdYNHIrb};P2(TFFSVaH zNTmy00t_W46akk2FP~bf-BQ8~a0(1#GNq~b8SsBW7=Q{`pi1AT2S7a7zCTqE;21;1 z5r94-`~oZf0!RdbCu8uQVL-+!iSr8ek@~rMioTNC&W~j~rsDWrFef3+i;d8}NS)ITwY3Z>>_kFbh>O<)+qhlCqs5 zFCrSwS~{TlgJ0H^7`1!}8#{9J2lEOJ%w%2a=mUKwUqhF#`$czFwXNc%woVnfmm&Wv7gCfgsBrjxOUbt7hga76cd)b(C86pN1mVfm9lM-%Xg#7~t zYo#ly1N{-rk@x+mGeA6Ioxpx!dOhC_QcqO>btR8a(}mp!iY{qgdn#_JuPlsneyTiZ zaQdb@S#hApc7{B>%AX7G=i0pTBgGFjE;aC1XFPUQ-ml_~*?LxeOR+Akh*dH@3gZsg zF|vK*uaKo*PY!!uO2=3|f8V+`sB4w75!Ti)1Z#(F%>GyKCo6W#tf-A zU7by&EV9MN%`z3K_SllLVoGSHMVX&hKvrv-zX+=(2^Idb^vfGhupP(X6bt<~=WFJm zX)SdGZ;F!p%pqdM4wrHux#k02osotL^rpIYf$Y3fbO&vor}5}--kW=)*&_v_DXLJh zx1O8(d4+@$hVla$rad~@3rV@x-u|z_OxWzFH+w%BL*2_p zTTRw-ayjZHe&0DXQA@JnQpUYnHXsQB;5|$xRX|7tcWKCPU5!R}kC$6Vt|Iq`YAmjn z6=l2EHf(~n$=^E}-#~wPgp(UT;vv3@C`&JJByHU);z>KlCKgl5>l zLxsqS#3tHD=TDLXlVEby&JY4VGH3KnNy4D~u%gCxw-LH2X4g6yi`n#^h-**fB)%ZO zN)`)|(?(Hv`!$;2c_(r=B}%j`kPN&X(g^=i~YE0v#yNWz`K^~KiM#mp0v zxk0QbsRvsuBt3%M!(WLtmv2E17U>Y|?kOQ#~oVMxl*_tUX5pZJgHO zuf1qW&7&UAt_tc4p;MMZkuv?6z4mQ>{2j^duAW{HQi`O0)-vC3qV0R@^DaDcKH@_@ zn0I7ckx0NSMbK{d+qIz|R!z7l)t^GOiyksPKPzR1Y-7u{m+voiYjm7m8Xqi4%=gT- zdGDG#ZZ?6XffvVKwe)dv`~wV?_SKWn5Zx2l6jjeFSyg#f4sry{3xmV+o`io)2X$7= zL?OQ6BZHypV)pIW;SHnIBi^@5&_%Ej@fs&@okWPF=Ejxcw^hjB)+5i~*fkuibNW?m zc{L?V0=>4FC;IX6>_HgKvzR^h{}9 zdQ!L9@t1QJiAC5nBU0hEk?xr^5lhG;B+POVvtR?);QfQWgGN|$l7ULuY z^g&N|6`pTP>X2&rz#ZyFok{8-jWrnH%gV-*dbVCsP^oqY%d)K9wW#L-J59!<8yly= zq3Ze5d~Ijfh2|c17UJEQg^cbd@8=HLZ&1d1?hYet7P+Zw`_|4W>=twti1S~sjS%tY zlJdHH)rxXuV`tt;-_?)Kbluiv&G@w6J__z+q0^nhE{~*Wr}ysX7b=`{BS@PViePiO=SMic_cZ?I3634-P@>t z>HSEM4eJvpxsflW*p}Q=aSVD*Ry~?3p|zvxJ7m(fK4lnAl^@WDAFF26Bk7B2Na;SE zw7r3)<51Xmf`&mG+|EwNM@si%SXj#bd2OqoL(CmcPEiU&YkpGfcXUz0>e+8W0WG8W zBKrf!Yq4u%r~BbfeX$ekA>0tU#vJ}5%*8aUg{>m&x^>VONtkt1cAV~@8!3bba+m;- zOP5DVJzcCLSH%x%^*N`!S*pB|r1bWc(xSWJ7@2^^yb63xzOnXF)Q=o7uga6$K)0dd zl|Pv1&+|Py542ayng>_{l(sx_(DDB}Ek7Hyb13Q9p(HyDnu;~)} z)8S7t`OqM6M)U-b#cv%$>@1nF_A3tdF7NR+K~Ul}d}z1s5rfji@;C46Al7k>svvwe zu({*R4@fc0xS#IxWfA(S%2&s^moK)r#pE6sa}-@SR&oP{Akm$m2DD0abcLNHu2al)_fq|yGzzg&iu7bzWI;+q1d zAn?kP(gzWt(5Uzzsdc?z71r!pP!TC2Q;kD8S{uD`c0VVwuM1pqII3gXRUJ^9?J_$1 zgE2TP3Wh@yCz)wR&N3Z)Lhy^$2nhKHYr;ppQ10U_fX6+{+fsC#NzfUntP^4pKovj%n%``$|gIwd+$- z9rGE6^4dhF7Kk1BHLe;K`t1MP5Wc`2iLds8uylnlnR9SdLiWNWCX zVcJnz%k5LCyU@rnbyzrpF?#;?gHPnbecy!U#U*VgEN`8{z%og{**(4P%9q{6%`j%% zS_fNL(tE(sOS8$Yv#^jA)fA|24DAQ3UiP>LM~RU0-h7WR_d!)ELjE%4_H1L=7Y%vG z*IEsFTZZ{HqH)+@nQmjL-b(0cdUGy(b=^5q43C&M8XR^#PgcdU?rJD#sn+@q0Z)dlJC^r> zXU1fUZ!7}-GbSVj$TsQF5`Ut%`xm2^cr3|nr6KUh-0uPnF$=#kdrRtEf=oeHvdctFTtdI(#fzWP4TK4*n%) zNg4Gh=M*TM2cO^dv*G8Cr!dZO=qAMu#n#T|I&5yLgj++!k^G)f%mNr(gokoKNGcz! zvslM-)+1B{)SOGzII_1m8oBlG7jaXdu7$XBJdTV{49b`FJ`?yBiGC$(gmI1+G!SHC zq)h&ysy>~JM~tFxs~vOMAPwky9xqyE(7)Yd6G`#04*$LIqTPS(00GxBxwXSj(h<^p zbw&3h-B->e?CMY@R=iV`PPQK$8tj*#Z;yDu`fcf_>@D-FJ;^O}Xa2^Rp`DuWMPSen zOae%L8daiE?TAin@SitQDd6-R)PG9(kt+Bdihv#?3#j4|clwPAZLOgO zAa(WHH?}3Pwd#^2f;+lwKTj6)%|ZZ9qU|Gmx{z_FzYa-J`r2NRFk+}=>)I&(GYnFO zNnFA2|N6b($>nj^$6+w-w|swKo%5CZETTaKKhLv?b5CK}yECS%u)>F`h-0FKc~Flm zVDcYezE*$Jj#B&%Dyc{A!wbAp74*7aly<6Gu;==X4RMS;S;kSuL5zETLpT8}aN?In zv_Tw!J?ZdYMHZvB1NjymgrBIN(v49{2K_t?amzTQ+nAI`f-wV!UJ$MNzI}y3tf^vZ zc}{P!=&NG^9E~`@S1Orr3H;tKX#Z>t!{rPt5CiFKhhm@a;Y8gyBlv@hdF69IFki)XJrtmarpC*M=pF$A!Gb{ zmC9^@Z)GYRWK=bLaD*+V5MtK~pEpR@qkM|Yh5ci4Ke?-sa8*n|<_7wf-INjNwsJKD z9I97%?4Lt*6ZP2cIab+Mp~&6>J;L(uChbrcZvKlWfszD~FxnCRMYLC&J^7A>nqwMc zlr{F_0e*+BvF#rYslk|xAkaA{Y=LL|^{@NPHKeS6Pc{hr_8d>7<9&TGRVQc!8iV9S z2Zd#>cWW&xsA>;tXvTPjcqkbvKRZY=50q;eWDd8Afea3S2`XXpE7#UNKU&mqZv01MY#KEPJ zgE8bjAHF$PrPO%8l0Cl$lfh0D+<^55Y!Y{y&}yPz{}rw*ss4d3CDBpV#q6W!HY34S zdpQKJP(S2uJ>OGNi}LD8!Dl?ZcV$LVVP4m`l!kf@MX{}Yf&Xf-?~%**VE+zFb)A}T zlYJ*^90UKyA-KQ(6pB60;qqGRdn@Vi7-=-MZ_m?25Wefcv;nO>pCpY4|8^O**+%!^ z0Qjx!T?>&B!}N*#k`kyBiBo-$!DCxiOk3 zgA{&d8MntI?uPExkKI@p%Z`39Kv_8yI(V~yn(quBPUA2AvsZ;!CNQZC#b z2f^X5u*)!SBV%PhM6OCr6KY80(%lU+A~(zC?*RIj6-X81e3OzI!Y8`k>dfjSPAV^& ziZUepdft>Q%h@X~k{LVBz|0wq-RhGv9dd6Th$F8zW6~vzz`1&@6xRvIR^_WDd8fMX zUb=~U=reP@k#$G#gipB!J-nW5N#5|e5^y`YX~LqpH0p3?oPgFA;4Y~pbJvwGDKAt0 zS72+*%q@2*Y878sWExD^sd(^+=?}+{7^hbXa>g$eT|EPO6cBkPz&0kNcC^l6qRc2y zS7M*_^uN@*YRkAq9G3gV$N(Y!G5Im(Z)u@Ki~KOdHFo~nJs!|LIlE9b={KvWuDoY` z7~&#_{1!-OWOyxX0CVxR-Vy#>$odt*Hl7^aV{CV?S8ky89ZWt3!h*SNFkEk+l7EU^ zSlUQ!pfVxjDA|Aw>Ff?mrWh1x4)jhTX0or64`|X60a=KG`2CTsMNq8}KF6Ufc4jsc zLGBw9^abd^5Y>uYugk2iHuKPVDH|rrzT+O&G zgUy-tZ|l@ulrCHy3z7aa(*ZF3_?=_n*4STQryFEY9`j4=ir7z;qw|3JA||0#>kp#| zj$B2L8~&Z@-929Rm*yZ}w+Ui%%=*-9UazW#Q)jQ8$u`oy#-piTRhAOB0jct0aBTtL zZI*RxKx&NLhp{6jE8cbquFTO>FqZ~Gb?oV>J4Xm3N6v6SpzR^0pvhjFw45dor~(1;-?L>sG#;mR5XkAkgukP^ z@ixa{*B}MS)0lt--j%0IwROZBjp7o9D;HH#;&)Fs?;Tmf>+LzZ=}o*4CIa*7Eq^~E zgBSnPA;wQGtGagIag`iIUOh$%Xr0?W_9k)2nWX?pUI`LFXz7Y~y_W_~b%O>C791q! zyi?XHjfer!>6PIvfDwwKM#%pdK;k{4hI;)ROyg36 z=N&G}JVtuc>`N@ljqqP_-6f%WSrZ#8z0l_mYw-?TKU{6|nlC;kp_SKm2wn=`MH zs0weATlpWRe?^li;MU7Vo>LN5dS}T(#Z7cfY46YvQaVH{_o17jMC}^C7XGe~qdP;= z1&WA&zlIDy+j-hsdyeM^s4_rWD8_?$Le0%=M7eY z?Mmma^w6jJ0(jFhECt%R_d4O#_{j2igQ7>g$}^SIN>p#R8aHo84$-Wq9_3=5qsc^D z2csk=mXs#Rn~^5XWAud#DQwE4p`q17l4amdf5e13dzzueFq=zZf%8IcArc&7AW z@si;!aE)TL`_@Rm&+k`%@~loD(}LCloHpQ!9XYsNd)bISQqz zs<(Y@rxo%$EStKx?DO!*mEY=k)x#qpuK5t({dkJ>l6e4>YZ}3xA6)fr#erx3aZqua6F^h6~a&ME&DlGJ!nNKTc!(28x^n)cbh@D8 zpIFcfq>Gpm5*BaCPK8Avu^RuRZDkiQCQ)i|7CZc#Sbtm*Ac3dI{ql7O)F1o3<`c2F zi7Yu8?0m05BaF~7uL)!!H?>*i!_vM}NGo7%O?9ve%uA;**0z)uvNxE|H}vh8DcB-?8u?nix6^2(pWC563FA5;Fo4#cijL-+(+2A=3`&Ze zT%#RL^GoA2xW2uhqy|Sw*a&}acwY=sQ=FBzL=ketIr&lFtY=#{!5YRopY6MfghOHc zdjNaoSP2ERhx~VMIS2Vaz2#)MSxYpyX1vVG*`3BlsJPqYf;Bw4Ppg0#ER8}1mKH&3 zjaAait7g)#XBlR6?$+Kg_kP0+LZGUr4}B(%0c*ImXCQ${VSVKcG%`)5zL_f!-M@_y z*=Sez(<8pg0q7eLKlT8*hR1-&y=BK;hvHcT4=wQut-GdF2z1BJ3gxF5#cBCz6s4t7 z-`xDB^++{uTezAs{;lUQ9DJ^Yt(4i0`Hes|%dyznMb#Oo+_S#u*9LYw#$zX%9j zq~ghknPT!oJzZp|?Hc2}GBdun4}D|3+$!gsJG?O#H4JMXvmiQP9WDb}v0Q%4XBdU=7|FG&br- ztBtQM$gBEx2h28Cy}trMJ5ncmr9tmGCszb*c0A5#UC81S8YGK`zpIya|vm z3xi**VlU}Gd|P^CZ5;lfUk)`{$#WBtlamr;)c8WC`^DwDT+St+Qpg4vWy7ZOeB{e& zQ2d^Die&D-r-MpcFx{w-&8zV7os>h79xF4)i;qwfI&6Y*m_$rMmCDJJ5V9IZX- z!kt(3<;tI?r^v2I_ad&l-<$OV{@K;F#x=um^UJ)!^+SXzQp=af7ub#dCP7cQm?f-V zJ9eVGrK&QIjepX28e+R29s&QAr zBn2sMw9p8m#}ZF8N9|9JYwd~YAT`(i)*Q}%`RVL5p)E1PP>#y;1r}o_x9_TwrK0nB z;4;a0**ZpdFcmUsD_T@)le>+I@Oh9V_TcR&AI*{>Hide*H{D;Ix|Sw8AGZ8%w!D$a z`l&`W{Sb=lxQnI>f&BSf&eggo5Ldf*pz9P57BBd8UCiUj_OMqUi4v`V8aEP zCcP%4^JNFM!KL1SFdbw{R}ymWA8l$=hiCWpxR)Y_(5;5b)sw?=@nzH|N`K(5FP>MM zWn2+5B0iw?PRL+Tg{_VAE@o?>f=FyWlrQ9C@;*|3`Kw`Pbttwu@y)q~oh%Lp$rT_L zxhV_>fr+H5!c?fZYNv=W{QMCRP$w0jtNqWqH86ezXms);gSQ;%Z!?ULNk{iU()qPSXhAmpJC+|yYZg{N_Jdr=4sUiHzw0W{K>Quc2|k>m z5)i@MKI#uH!B*#gd+_Hwu$0D}>zZelAia;h@vgKnlT7+#E&PR~MdX`s2)aGHPr3=H zimZb5+-m!%D5#5l#zT}fA`SC(jyFQ*xw%puo|RPE*dgNq)JTGjia{96%I`M<4b$Lx zL-fawG^h%!6ZXr@=Au1fv5Ddz9%V%*bJtgFRG!^NNY6ug(A{2DF=HRsDEMrDK<2dk zI}9-*Lo{3qR`LA3;Am>B5^Nj;8d*i8MWX5%hW+{tu^*%}VZ~YslLwW8U&2~jyQ+H5 zM|b0WPhkl%*iAsU-~QXAw zAo%=p?o|5KyLo$NyYVg1Wk!qq@-h5lT836xMuv`$L;rTV{|GqkxfA>rXiVmUj4>BN z=`@q%Q`5R;!7+5cIS9WA(z2CsM=+WvXd^4r|9t=HX>2(A)hc5r2s{!a?QcKk07Pa* zqKm{}V3<_r7Ne%?1VdIU9xeR%vc{#pCw28YVqBdDN}($y0Z@Hr(CDt zjx`E;F*3^1k_F=KcKg?~>FcbFFYK_p4_1)vu$l3w3|En}NKtuiy|AA??Joa-hS(Nl zX^p?45xp0%ES7m3QMDxEiehx#vzWK6jPLZTMpG?vZda6es7@wCAw(fi@pBFMIF08E zW&@YTK5>yM=Lw=zwkp-gT;|7ryTt3&&#THI+ZK8+n<+kSh^yrJaLZ6Elj-@@bq9k% z01xpeFWLv!wbc*LC0%zO2j2}V?RO)6FJn9?a^JeTmy{hF935LPJJ%x2tcVbC02+~` zMfeRB6+lB_x36oQ(EbD9=~ycL3E8S5Po;8^?iCtPC!`c2l(6du?J;%L-d_9#`6JIn zZ|!4fJJ?x*#dyDQAWHR~Q&woYfShT-GOnTCq5$f(cQI1|h`$Mpr0VPK8C`(dJduiW zs30|V?WK3qBaujQIK#W8AnPFLYBZiaAm%V&Jje#3L)zUQggui)=9^jd7%gH>po zps7R7g}ki22lG?pB8?P|b!M!;y=|VMQh*vYxK_#{_;RJB{8@$T+X7tPVV7y|`egKK z24vNp3*EU_ z-i7W8VO=0fEsp3sTUL8G!BO9!)~?Es$Xtpo5KuxYM*kt{S?J{b*65Fs0?vC=!JHnL zr_`6Q&!}v_T!UnB$p)bEl0_>XTDU;NSiEg4y47(Jt5^f*Hr4hsHX6ZL?xdfATA;ax z=Tt<7DY`-fa6KlTlVP6^cI(jN!?5xoIGny$^v|%E`Z=`w;NSCi>WTKaBhBiubPl)` zijRUzM7YIo{dW!-(|&F&z1QU%ZLhK&c(HInRr-9SkAnTBh{31RfyKqWO*7`Hhl`03 z1N*l+9T&!jL-#(J->PSxQkT}_EYAYR56lrio!^xJT^=G|7E4IJVJ6kr{t6YK{m6Z_ z{!SDD#J+Oz%*OXDOpmD4mvw#^p8g!*lm73@NxFNxO&!2u^{QF2no1nAbY^yukbwi!fp0BK5dEPxm;1iEA zwY|gZ0{Zg=rCMVe+Ddgi_~fEmwdJ)uGlpJ#>AI*6bSA~1uqXV|xl%U?Yy9^8<+8TdQzRfD~>{EDT@DRX1P!$8kETgeLz8%EAAiiU-o+}(7(f zm6y@HFSFL%Ni^|+knE9Ng%svyV=LH*T{v!XDD zoaeA__2|l_q-qE7Ud^{_Uy9g1JRmDX0Yj%e>#m=_va$-KIfzWjfbtb|2VS)Qlf}?! zWmZ5N8oa|h=nAcb0#<*enHnhl=|6Z>3LRZHa$IUB!9~5gI2ymN9c6 zN8YjhnX>1D+@bQU;|GKNb*lI2BTtGUn^GGEB?Q@v2O~$b&t&P#i#C;_PlgUKn5|+Kwe4zTx2595SrDvCbl$RT z|7;pTkhBzY1lQ4wA-%LFG@m9<=Zp+Yt;E8N;!avrxWW%TsNIciS+A>H#U2a2Un^%gJ{a z!fw&>j{OC=u2nz({8_7-7*WtA%C^@J7KC4#dh;B-C9rSE!GOx;VUG%X0N|e?M>VHo z(0CKHGA#+IHZpx9hgPlN)1Q0hSe_uJ)S}uj6Y@S0w{&63?d|>`__bI=^$bZw=d(mA z!6Zio>de;zKi=82Omz@)f;J4MdKVl>DK|AcDbQf;Hxz5dVd7O%jM;&o9yvVe=i}s@ zcpsDuxyinF$cbdFqa$wFGXWek5X_-9sO1^fN>-?Xvn7Lrppl(#6!Rc>;Vm{}Z-~7< z@gp$fJ3S@e=h=`v;V>pl^z1F)>6E|{6IofYAH>mh1v(8g7MWk)9L$-%qaB@jnWP3! zq(`>S%S+chRqr%C@EAzTszE&1yj zgKnc<2|F}rTD8Y2Nm?8w`nVM%6~bGu(l9!~+Lb4KLAIYYzg-3v7{4G>++1yT$gbOV zi#Ch2H!o82^m0Y7OL!h^e92MH;aX<%Y8%#D)|R!0am z7hv$`fm=qv)Gr73*JVOIK9Xlzi>pRTf03E+?6)W=TWb!g32UOQMS)(;9Q`rbs}ELa zwlDkN?E=X<5%0Qk1Xv)hC(}|^tI&bGzEF4Gwusscf0bn^syQ~D^DIA+c=qb(i@EcL9yqziezd z!6Ln&Goq|@T=)>e5rnJyzDD>(2Mf2u$Ij$sQ+7Mz; zof83L_7iM}NZJ$4nTFWa4VlP4Ml|Qxq_BKF%cX&@K6mzTb>srVOw;h@JC;wg`AIo2 zPq|K)dd?i->w9a=v{ir|{eLwGh@+0mYLcZUu|Wx?Y85q@*%AnzYgh{6I+r;W0cCQZ zH=uHTxAw*5e|EEsKV$%p>LGQ6n0D?T;C#7QrPN`>Y^R#wkVhL}V`)Wd!!c*@Q8(5X z3N{C5K4209QxGYMeNW8N}JZ}*c*R_u-LjGwtA4@*%tRVA-YuHk72zbcl+Q-PL!1{N>|zhvHTFTLFd3J6f{$(Js9Lb7&Q*!IXTp zEB}qC&xgN^qpvX8%QaXh2td&p)$-n2um*GtVlSlLt5g1h`cvP5z^9>WfbQ_@#S^Fw z_u%bxW6mx%Fga`E5thx~%l44E|Efy#)+_nX+>6`?kwea}{m#eSWV5-1siH;LeTI#k zEICies+PYzeb8tzD_A&1?{7aJ>61K)aBYH?@7b19f-k?kG-^LWh|+%Rl0I&??R`0O zFmmwaV5D!kPrtS~owa|jIz}Yo5!MZZEat-_yuTv(R>T^h*pls)dJ{ETqdahpCsWC9 zX5pL)`hMCuwe{%v>vlIxli{%PjY|20z~jaHN-wV{epwdI_sS33FN$RCc&^|tz{^!W z>>nyubW+%w^FCeS!-_bVrn?AaJegbtuRl3@n62Fpn^>#rX0JGi9jdmByJy+-bx5GGLIcV6;%mEaDc;uWo&{BJWN=OCvqLc;kE&d5e!6)C zSGQ7WCmw`Z^Xm3!Dw%s{rK|5gAL%LV=y;>#TA+@-ImW}EgRKHkvQktjQY&aYQ$eXT z!8$)cNGLEq2r72{PM%8&^oXr48~hWPyt7IlvHnV0107=C-PRl#=<*)5{F(ZlfDZ9J zpbrU_3);bZ^qn0PBj3;^(M$73onL(z(lp^XrMp6%3!Y8^?z9pEKtP6NhGWE%8oa!j zSafh~{M*>za%|o3(#4fpPto)>R3ZcU$f6mm4+BHY28*$c=EK}qWorVumMuT);zb9e zCZC^S7KeRU)>r-0x0T$#s`5Nq^t(e2nnv7|PXr*J&upp=(dirVW)BT7=Erv^Y;o+L z*@d`rC>omN;|d$Yz<=+8jDqs&X@a(Uv$tgJ+Um3_-tyG{BkZk$A`6zTQQRB%#+|_j zcXx-uT?QN6-DQBm-5myp!QI{6-QC@8pY#3qzxHq+_CrT>?1;==Rk<=NbFISQD95Y@ z3Rwcq-$vr-MI@cbq5J+gpTK+<2wiL37%@x(e^9JRVLg7t+|e^p>;Yl#E=3Z%(unwfTHF=;k_O=@U_s6nm z%ib2mA)>gY#p^c+YOKN$i{%vayBcZ{)$OzQpW5cilfh0N*028}vVILB4sAzJhSEP;K8(`|&FQY0 z@ZUUhn*z*t0Rh}kGg0jF2_C}a+u9^8D1Cf8g=!}|kAD{il|yxjRT4qL_y5GE!qT?= zCI|tfp>yJzMz+VCf0BBU6DCAw#Xx$@~;3BN|u4(Xv*fu z!bkj&iD^4`E0eGn&~8tHK1>cg|Fu_qm;u5;seiYwd7e%;#F-daF0_Bv_n~nUSbcFl zks#+)u48?^tT%(C3425~bBUTjqq920!1Mnu_5bFt&s6-Z@B zTa?i~_~aPvG(E3<4JB`3zyKN@iJX6jvWlvMt06vfe|*h|zg{4DSAm+1S6Wm?f!z>y zWly)EXhRD@C}5RS+S;oZxB!p7qJvL(SdYH_r0S0X(3ITYDVC!iVUM6y5zvqx=-(wT zaVeZ~hllM+oHgoNqd<~kT=oRA1AyPSfi@n+UdblMK_M__GX@6Gilw^{%9=BBZxH$?;$3XqO4N$f!v2QkTJn>kK9Cirkc zy2t@0mP4tjRkz|inO2rzY0qEhdK)0cp?_Z*uP1!8bXivwcTmIQ&jAPylO5|#aNO}| z@N@xbL5i{dfxCEuCz%^0>Pp8_edvy&9(Oc>%JRF1by{wsCDpC6bhD!Qafp# zVOM4;Sox`GxPGaCD7aTdpm%`ku_$%f3#HX-W(I8{@q1&DB>`wpCj?d5?k zgZl7N{DJb)!q>%O;g)&gWUa(gBP?YA$SCd=`!*hiC#M+amwmbJUJ{9m`E5jT1~t-o zsQ1+%FZ6H^zqzLZlSz}1L)!r&r4s65;d+qtkHueL5O8fR3ITc4ae*=J%I+FU%uh(n zD~JS2%+7xghGp9g()M}*uB5jnyxNA?6P52cth7~mA3nQs0)l90WyhJ@h$+OeavM5K zgP$0EJ@-%WRph%leLjoqNqg27TFcj-;nYh(kCGP+_O}<*MOr@3<{l;v==Yk=dhS~8 zG5bDiyc=K@*$>S%*)$siOxQ9a6*KTMnUXM*j*W}jd(56~k`0|J1L8z%5@2!1V`w=X z)?(w(&@p{Kw{8L_;uOzKhA0DVi;E(200^{fkRLe zLE`f4`IpCAP0-BW#3^LWaI~La{MSQMOJBJ9@9r+aja@w+Wb;3p@f~+77CWWtRN4() zmMsS0bsJ~(y{Ms7h*1`Y4GbD!|B<)gIbXN(=hJQD_yhupzx{#lTR{PKehyw$Kn1C6N<-SpgCSxiI1bdnkvAn z_=CT293rpEIYLt-vFvTu$I`XK&S&{@UERugIK_4FH$6ox3}xwaI5S2?tu(~(nAG1$ z>51!`MpmBMJ0(t|B#|~jh9eb;8`@(Gj7^DFc5EnPf^X>3z6{kP<*w$9-|?}cgLcC_ zT0feKG&W)bchibW*WNo5mw}W2wy1GDMOy1lR}Qf&n-!^w+}k1Df>&QjG1cTRSLk$i~9ooEx$G;hJh&2F~njf(+<t4tq+aLI3?`BmPuGjl}6+NgC3b zIycKJL+jKlq$qlN+E0^C&|Th}y$Y=ZE&Fqf!mcNJxc^W5Uy+2PyJ4xzk^{xkpZtPy zuM@(PS0Y=fPT1K*OT~^NQ5!yNWaD zou%d^ve1AyZ)H-QUA1z6PfD>4-w_zgG4>xTbzP{k{pbNgT>-f$n0K>l!U(ALgv zFGw@+;;v;K+BX$B!3Du*eR>woICyP&`uOQ>nUt-mn|&J^1q|I+fTCF#FTB#B7>JdY zx3Xgwu@=&*5B>;4=!Xy&UO93#eAL(8$Zj#R2o`*X`SM$_i^ew<`)s-hLkz2A9Ae&1 zR@O7(y(6Bg&?6f#N7bK;$|(J=f+iazV%ihRsO-C?8XU<@t8W@_;@Lw6JsifJnDN?^ zi*D6bX-`I2vf=X5$;ZSLghSO~<@EiOPS0-4o8Y|@#Gd~*FF&b56-t!f1}%>Ro2S}l z8h+8Y@<7o@B?6}c8Cb~`Fs2^(Xyv{qVgzBs%d65OtW~v2=V?TlwF48Qc>h9x1Y&te z(QH-7>ta$7yu?T#B{HakoKxj`VUJ3L48g zBvW)AU{cM9)Dg|7zKP-&@r#JN7#X=3VW%9yDs1{EtCAZgsf+?R5)16SMTx6hlPXPG z40fkM$Q+~_n`O1cW&k+N=GRkvkV!R7zh58vk71|6JGpy#J9_0SN;^WIR3az#g3`OaA5t5O=}6W^xt)x{+qb(a_y80Z^oY$4vtvkAA;dfh+( z0hYf!YN)GM2c^IH>Ue%*`)a%|i?r`TusoXZSzf>HTb+E^*-B}6Tu-i0op*H&Gp^jw z=B-JDII<#~+{>~t;?SziCq2+HRsX-7Z+ubu!kkeyN0x=}#nVczcTPIDiWZ?8fq1bK z&GA@+EppV?kvFM+%D_m~B-t;C(dp_=5C}AQ6$g85J>lVT|rp7Lz7YYSeq|`(&@nNG@a6 zvkBX`9E=&XcN!XpROGN1_bVIg_J$+k!|_ z+;2{@;%;&TSZ>fB(Z4G>xkh*na~>o`K~<^I*WN*hqW)}#Ke=bmPtpwZED3&nyq_c5 zaXvJKak2D%k4yxB=c8nVx!q8s~~Ss_bS~aV$X&txuBY(<_(|qR~Hc zK2EB_H1hgQaH@#d+mgLJAl2Ss(GfU(1*yusexpbV#n~b8w$X+7;7_>B^mW%EPN=L; zymD5Am$yf`FNvP1A9HMPm}%K_;4n{?Bu zvxIdP&>PAMG*wgU_CM8`{8ym;!fZ5Q!6X>c#H?S~w}+mQ$yjZG*@%v6>FK1;!Z3a? zrzU54pZ|M0D&G%br5RCorV`$qf zucel@33Gl%plmNrm1yZm#r|9(>@O#3M*LFy>#h?&nN zL?8BataIZ;6*r1JX=WE=>Zd!>aq*Rj`{nIM!ue?bls-uGdrn#XOL-Ir&po*AWWG9l z(xTkMlMe8pl$6gf`VbuV?o1xuiK+2Jrw_w*hN;?y;y_EzeP9!&yq~W>(KM6pwk$JV z1NsG=q7R}iSYnT)e$nS!o6M0q*9uVt%6|j~&=8r>o4R@z<*^(&t)NLj6 zrG8sML}weuWVT0`ea2ijlncofjv2ybgHxxX>IXQJm4&@NHz~0eIY3@alEc*DA`jdE zkB{?E*({tA5BvTM=2up}a<l42k!FVG#9ba>cSCd2_EADhkg*E94%2A2(X)o{Ke4KAD z@ogrk{tqB3)NamW^9^bmELjD5XHyX~J~59ocWF(kg9??uQhC=xlQw-4iNM6!wx|@+ zpc%zlTWP1_?wK2>P^ZaZA%-5*)lw#;+`i0WZc>Y3d?=`I@F z%0lLcg4l1DG#GfYj>Bl(JBXeMBX58Z$bN=`wlwB`kO`y&=NyX-&3gY?8qiG7S*I_t z+b};V`~bGg=60w)^5=p~eSn&@osa_yP%(3p#5T2&Mb0!^2!(d?XQCE~$UUH!gZ9s# zLgFIvlZjV=R|hjVXZuR_agg=uV`%vJTJg{8-0?xgr6~Hy{~Q z8kK8lY!(&ji)1MNs9WaruXUk`5mkqln4t6N`N^}k)i^S&Zw;KzO_Iw{RcqRPE!Fks zfd4z}UDS_SZ6@(XvqkJnzpK42viLAACz{Pbn0d2#ZicnmQrQJ*lqm8YlP6b_vshC)j-e#Ph9H&zuW~ zVrni6f9S{**P*n_=t9+CCN2jup`CPlqwCO5xxl=zHW-IuL4%x?52N?Hjh`1>b25H# z(*d(Z#;y@U`x-z zz3U|fH3n-jv|nidKWKN?CywK>Ffiz!Zo~19588MAn7K32wV-Xs7Je^~WU1{sHE8We z*Y%ya{0lN5*T07L{$Jm|aTf;CD@a&CE^LT_1KPVL$HfW%0wB5_WNFEKj1ce$b6tiT zMuYq~Q|tcAYrnmP4E??#LOWs}l+|U4Qv3W|Rc=j!H0v@Q}U2PXw7OSoYifXWg%kQk~YKMJ%<@ zL@mqoSV)$jKhwIG;d1_{qh5SxPRb5(|6+KYJ$RsTDDGL`2HS!M$6?}g7L@Z@H#`hW z6C}Q}zBsl-915|J%s>sZHg<^R4#E-$Q|0S5;Ce=!q3#B2TNcIU$@?CmZ~SNcAFX?F z9HH7jPQ!PY%divm587LY9KHZ54zn1Y<;*R7l`N(v7!t_=stdf|4mnkOjA3dn6T9-# zVqq+2D!oQ%53%3F_qF7F7!OuDK=R4Hteo5_ zUh^q!cQxmqn!EI-ih}HmC?0a4gY^Kl+bTBHfuVdN(uOO}echLC*v_6-HhW*)MHLeKWQU^)&xbKoL7_#jK zs|zZbB-SbPnEsYz;C)Ai<$Z40^*eDYKN2;zQ<6}rbcrw)9N+x_FgBG-J9l@ic{j4x zw?J9+I0}p#Nl7V#p;@nQ2`z|@(hI4bu<|<}q!3l3$k1BCqeAxkkztXcGAiDiAYZ6S z6#&}EhtAgz3=0fPt#Roc@zF$ch^8)s(iB-CTh3M$QvXZ_mvRp6VoI2%bacQ6?^DHK5=8PE>u2?L21f6?!?kp>Py7jGC<7FcmAyk$c7Z<1G zjyN`aBu^E3FkPB;v=)Akun!&j-<%()tol*A>4S9pK1Rq4N+^9DE`6D_)H{ zm)9^`_YuYDyODC>2Pk1{CmD7l`L;iZ z;;d`^`Sf|Fig0G#F`PwO*1CcrDK68qSZR zwocJMDE<>&jpSvcz}tehD&;x})bbXQ?KS3L8aAPR+z!n6FZK@hW|yR%5G^Fe2}bmc z>ZZ`fs?-(@L*(&y+fZmRSd-t#kL2(Psk7l|ssk z4&UoGW3{@6d)?#IVZIz3d3c{6_Y!!$0*aoL!#naU`t-Tfcf0YetLC&zm}x=bW~}%w zPCw4)KZQs1`Vz3P!|hI=xW|Y83Atkj{VoVS)DcJ|&7{6T$Eb*V;t@aBfax(oAS}+Y zjfo3SgvYGR#{ z!jo>WhpbR_kH&sg#oX_^e@$OU?)E}`C>?(WAVRA{XB2Sd5}`wIDgCD=OJ_6tO3j~e zh=E%;>zC!|joP^~J*y3d^3?%kLcSh#W+ zB&-4N~rW-%l~);*PJIwT7QJX&J>+BZNFS4dwuL=Du}!# z;70KZ+tCF76Z1a2^wW`M5w9awVL)n1#dzTMuGd=aJc;4C=kHBIp5|YM`<8C!v7nGeYMQkopp6kkl9zR3w8crx(-|JhX%c6g~$Go%Q%w(H{C&$lisCF@r zhO_q0OPKSq z*(it#=~NQ006U!;cJ2_NrT@lYOS)Djet$~%N6zQ21-%JHd|}KlLC;lH#ers2Q|Q$kI0*6(}u0Hm1riag&Txi*ycOq4$W&#%U{{HAj3Zm@2TbTe5{@W@rS71;B1 zTNB*(VQH6jZUN{$)0*E|R8AHVFO7Tf(DErmvW5|9b@eu;eq1EnJ)I#e9P2NcyMTBi z|IO(PmEkI<_bBMs`&S;WEl;29Ev`4y31chXl33#%3r*zFW_B4Xn&*lZ18MwAe_*E7 z36`DYpGuyf9|j4Sz*w03$qWZk;;Vfa*aerlO;XMU&g=cGGa^hH(Bb(|dj_o7Gm-&M z15Y3C?g%$AL($+AQ$Z(g76rOxmKWd~k!kd9kxqb>5hNFn*h| zIW-B#KHd37^DJ5HiQn$ygI@s%G&Z2kAZPI&%XJofkoANn)SIMtG6)lyU*G|wq4In! z%kwL7<#68~uiFkTb3yZqV16;AF z1ivG862c`f7bJ!;hECuK*P|dM@1b08f%zMOWbGWWga?SwNt4@t2&L93uj84dYZ^*u zkEIz$G@wdMY(hJVTAi|~3FSiqiat)+N+CnaTgdI%v$88o>bARftjsEXYwoF=7B}NI zE|f-~l8{(!4C%?YnWG;u9f@PQ^Yh~+ZTDdUkcg#emwpw})j5$!G&Hz`|8qPoD*Jrzk%*kxn95B-bx7%b4l2ijSxCkjp8Ha3gD(# zww)07cP&;>nFZv#6afM`*KmDTUFQ4VW(w0zzVlqu1aYDd5Df6TQjr))@o=>3fALi> zpDbNRq#$-HU(yaeCe1;_vp3PG?E|DtjxdH0{6*cY2+U*7L`eBi_(*rC!lI$Af$1FE zqHa6v83~&7!zM`Ou|IL9_jf%L>D%B&Rzu*)Y=l|=!Ke|uGIY>QVYGwc!`OqX`Rb2C zy4(Lf$V8=_mP!qS!~Z^y^&?)}6rILl0vTc;>$hxXp_6gKabkoFg@sqQ!ocym_Jo#= z5#9wY!fznYFP_tpi?Nj?&s9fv#fF-5bhQv8>1_snWj`u*ZhUoWf={0A+u^^N74f?L z+7-8Xw?&TL@m1aj-_EFMp-`6m){T2TlLYCp^JM~!n0yHcE>3<5WshCm?^ctKZ6CUQ zLMX{XLSce|fx&^HLpI3K7x^c6f_^1{9zrkzFx#I7EG*1621cefmbM1W1}1hcw$6^0 zKb;uu9j(-Uz=J`uIYa2MgB}-mL@;ouM{qE(|GrLsycW5a1T$pE&4>z$aUG%nEfA?e z-f~aKmIpVV5t*^GvtK+VA;IohHFLK1$D(;x;Xk@7q`jRQTa%wVF{9oQV~L_a1%dT0 zGCHK(?$lrm{)568Hy$G<=OOo27caiLSAS$l!|jgRA8p%QzT*GEu)7N#bezD33;aI~ zicX?skwAg~Ywv~l|1rqP!v237^dF=CUj|us!0JwIJ#p#F$j&liU5L+Y?$sMyELl}+ z@CKtJkV=aTD~O=|i?1N#5&1@1_%nn;j1+#yFX!Om?xXA7v;AuCLEuv0Y20h7Ly?Hc z1}*z*iP+B6orxG_>iKK?4;R7P!;pgsGh6#Z*V$(%;A`qtw=!=2-f!*u8poG+Qrk=p z$DZLJVBsNP4-o4-LjG2giYkr%RSt=$9rZY#$o#BxvE}=SO~x1S zen|`)aWPlbN%Osb95|Jsz_OdGyc78>!9dPt`xnKkDJtmekWaB{J1bnDz(Tg%C0voU zw?IQ)EXieW`z6>$lC>P^X0>SgthCr6_E#Zp9sBHc&L^x2Pg{O+|BfG5WCKdEA`JJ)|3l z`K>T~D^mGSdg+*>CthlSzODAmio1~PUgU0!=MInelmPt6=W~oZ9s<%n&XfB99#TFx z_YwIGoak8Jm!Yw-I_h0Mrmb$Awyf=$xRg-uBMO$TcOC4RxSX6qqgL40?HRo8LwH{)5eTb&Ci=G89r#wp6*6K)XMK0B>V=_UCCS>{^N z;)U&(i5OikKbWNrSI5lTGIPrpFJ2#wu;i9E^Zs45V*I5swVSN{A?g6g!fM&d^q(tD zC9*6G(1|+LuOxN)Vtd~cx(1KxY6d$vM#0prKWdfnsKhYk175Oq7KOi86nNrHWD0p3 zw*T%{X9)IKiGZ{t$UBzQTkCXzQow35Q{#mIjc9`8nxle#qOddM6Oy{gS7@~da0pcC z@jo>N?HQ~$+3EP1?*E3ge%hRwUlczjURU*)od9=ia{V3jn**D2C8EBR7t9=!1>fT4 zQu%q^2+X>IeJM&^a@CY_LRAs`JJoCU>ZcL_z<8gECkcW1!X+(BuigiKh}ko73#O+o z;T;g8#&S{RAiGQDB_&9E%EzyR1*GQf7du(*!|SOAo$6?w!j_?X;nk3$%k{oLkFVfvoc3+i z^meKwP`_MXfviDqnT(zqboTENF$sCDK|0aZ|!d)^R%%4a(fzv7!JplW=b#3`sdjmh><2W6gZ zX~Iy~*BFJ5u27639>%KqC8P$G@-mdRlNXrZI4ZG@3i&In=Bo*QpG}c%wOd`BXeQts zzGzYCCTo_s^DM$*vFc0E<15K$(9hTLO5qo-lo}r&oh}vw!nau;lgd0*?{wJ zh}N+^*39r_G&*?LuR`&h)3WLW@1$_pC(R#qP1AaNhonPxE@0dnb{A+Hb7<|Spu5XP ztlCMFQSQy@DANOvQ)Ceu_(Gq(McN)(xQmslYi&kvk$fuJbth&gnL{?qCh*9kU%6@? zRtKAhij_*)=1meR!a`(;8irod!F~8_x_myI*M02PmNX6yNvhPpn?T1f<EI)Z1!; zP2n_suqQajntv2L5((4gV%bbKa~S|LR>Q7i1F>@NHhbbc8TQ3GW9L1LpyAQj|2-n4 zYqp5_jpgO2=c*=K@$}AghqEg1M|xR97xCh6R!gaVAv&qk5b56v0X)__!w8cbU|yte zdj(y~cF}JvgQ|7~ zh=2a=j6-huUAmP*D^85sl3Tik!iLgPP}*GjRb-YaK3F;jodR$cY+J>T6TMr&6AW`D z;)xBT2R}4t8_q)yk#pu4ZeJ78T`Aj24ga1WVBR9W)AS6-mYTFS%Ip$$lv54Q?}r&s zo|8EBbKEs7GPE)7%vW+d5T9CNhTLu0psUh!QV9w>ZH$0D^$^@@NYuK4olBF}ezPER z4?Nn^JfZr`k@?~@jAy!4#uZ4|gP`p8hIbJ|*{tEdNbY6dkK zoz=_VdIa9@Tz&=~Z8)aIAq)1=PSkp*FaiQ+b$mnPj=LJ1vS#FB>7 z?OMh7FG$B`H~WLs;<<|x9Z%KqH!&$00&7An5Q6QZvJVf1A2x!;6)6<=E+p0yk0dY_ zn4{`X`iMi65Ey7dW-`zK*7_l!|Ufcu^Cfkc3!v-?x895@Qhu2N#OD|OEhtE;rl z+qqC86~3}m0(gARbRStLAkK6`mn>!kqU?;`J<7kaR(sTkCD z*6eBW0nQ=F>0+8%<0xj*EEgn{?)lSb_RSv~>* zU=|8JIxxkE^k;dsF%tTbZ`IM~0VgSzJ$<5kqH1Q@+2{51h~{{1-XP9E#%H5oY1xOx_g< zT{q(j3ibZph%D?0^U&KJm3lI9^z4BZPxq=_&fvo94n~+jwXq$KHK#kXbJ+k&mmmI; z+jJ~4HD1EygTjNPA&>bR{PfNW51CT(Yk|%bw%3b;vy|fxLTcApVro-wOtt1c#Mad; z-}<>vT(@DpCbv>1rPG636u)(sJ@LW4V&OJs?-^!n0tVgQZ?=m z7;uZ5muAX)4{*xpL1}2i!(Ybjw%36Z7PhhILo z=o_l?o}Gp|d_HUTnu|0?;V2mwhmQNjyU%r`bnzRddEKwbLm8MpU=#bE!xwlWzuU93 za5E7;1{Y8YkSq*J^0A+D6{($C|S>%q#L; z?eIUlwExq1c%R-!pZQt8d6R|A> z-y=`I-&MQ9>=H@sEJpvBbvi17pa*ls*R#N~KS|1tJ=iSAM{sfueMd{IoDuTv1=jBI z>M>9wiN0J28}Uf{($hh%8#wOgYMsnzw$m&(Rz&WwS@Udv7a!$IpZS-*N5$(3z-jHa z${=TE^xY`NAx-ocj+c%E|Cds9RIFG&xbUt*Nzwiz5$y^=_!Obk;(H< zc=VVZ)@1A*w3^6g^Y3svKTXPw+Ut-oQZXYN>9jci`Ep(x=ZhHsGCxdkG~l7K z;+K^>c?zvQWn>9Rch*f|IuYQSfF)yWjg0oXh(tm3EG8(u4{Z&m_1)Z#gztE3%QtQB zF|i9(w+TVat3Wq^cJj}R_KTfZClWe$IWH_kN{T< zTGOg*KjGq9v%#OY?MTMpj|=vVoq+jCHaL=J?`O|PX9l-zA07qw4_h8&K5C4tU1Uh7 zt{mk~SxK37rVbh4+|W0qKZ{Mk7;ZLP=IlI*`pkd==htz79E?wb!V=51e8oM(hnB6O z;z%VQ%%JjFL$8e-@fmvR>s)>SCP=LPM5_LjV26Q?Yf=$Og__6JHb>v*7t;xo9>rb( z9N`@t9-QQKt~al8v#s-c>)&!EkyM!7Q!U$wL1!f5MB23WS%AJkBuYc7jHC_gLF0L; zt&$AhEU!|qTbHxI8zSF((Cqxo+H6QOcjZxnN!-yagddm&rPKxq@EQ?vcU1) zcLoiEKAUl(tDC_JpQf0Nk6nbUDMfag3Xt5iamrmzq@EFW5od&VZOZM!hprLAKy$*e zyH)=97@C>}YB3UZput7+*liT2Zmy1}Oka6Ja5igFBXHdXoJB^svSe>Ec02s6LT~7? z9dqQJQ`Rn=Zy%vrJt10SW+w8xp|sy?uk&K1Cl5M9Yk^@Tos?4O2sIBj@utVEY#5z4 zi}sflb-)uJj+8;%9!${Tz;1g2u9V4CPOAGXV0nHsoD%Y6Dn%yc!ZB!<{k?_N8?Tvs z^n$Wu6kw~Dmi^*I2`Rb6Jq>upE$rohQQ=iq9eEe0dHceJOi>Q~5ZBnwBgg_G1tXec zQN%PuBv^F?1thLG)(m6KM?M&^w&6bSPDCTjs8wxCj+09IQB@gs`dzd1VPR9D!fr!g z&^0(RDv({Hkxlk$r#^E9Xd=T`nRr#+(ytdPKsEh6JMILFK|uVaK`DStc!Us!Qz@ry z*4w*Nte<^vhV%D}g?G+T08xI;ss9ky ziY`eSJ~g_B)Ozs*tgfN8K7C*-;CakL=!`MQLl($8Z8~ zjy~u_Yn+QJ@<%b@D6wTVXdDY)8PznE;m@51>PIMRO%glkt;Qh#&QAljgLjV z*QR$fg*LGq zJa_DV1zUS?g~El54&LS8iQWzNA@ptH!qKoRsdIvy4lD2)hFxl|pU)9d&Tf;}hx!^8 zU#&K${Uz4*!_4kI&ld>a#x#nrEN3BBg1O2}t6BlGC z%;l~f0glW?!HV@(s+kkIQ8r!hCM}Sj>&)d>-z@Q+r>RlPIS`PB(VOjm=)-($#3-ee zJ8LI1;U4hcnty0YLt>0pwqU@)GbBIfz+@R6yTKKFk1s)<>Dxx&4Dr5wG0 zT>s?z`)^RHzJkVf_DzaWpTm;a=$%6} zCWka%vf4GJH?Axn5=IKmCo~o4$-{-C=KEvOy)}ZCm=Xu{i!{3Ik=%`bBzu!gR%*N2P-BA)#*{a-_YA6jj+TcXFlyu>uTTsq>cLxYSd(LG0$4xBa$HK z_v0vRu>8<^3<~MzL4aeq54rhM-ClnrCUCJq*J>GhwXD_6kF8=>&eSvXLA!J%)%7#a zW9jIZ$dfn8@zbv$IB<^W)!G#ju(onQAj0!}7>JiNhPvog-KJwo8o zcm+8lDp0x!$pzZ&kXd?cN7_Gv_`#ol(kf1kn8=~7RTOq?QlK)~FwUWL^#|Xd(BH9* z2w58@KTlbh_X33b!f1~mYxb~+4R(Q4&LiA|Hafz`{0y)2O>g6VuL=CU2AXX&Umk-I<7(ws$|D0h>z^>blc&U%=X$Wwa zjcHu;0wAi(39zw7JYV@@4YuKVX~p{db=VA?71Fhh_jr7S#umGez`MFe>^EodU3@(H zyb!E=O8W9O@?&cvTrF(QfY*Llk`thMDQFySknD;`&An+J5Z7b@>#F?cS@T4I;o9lf z;M8ga)@iw>>zTCu_DJLtDZJ~aLEAvVNKAFV=;fU}5)a1KMf1-)n+-9MgSKPx3(vng zoBg~-!?=f=^9*BY1HwUl=uib768m}}Pxob9=ucEUr>R#>_lkx}d-k)P?2iN#o|b=x zx~F^xvMhxi{h!I-MX{yYQF&EY_F~j<#I1=@xyU4GeNSlpybV(=RU}t6me-zz&m#m^ zwSVAf+|_LlKl1_~neK;FWf**rMT3dn>q>m??uVjJ$#cXFSo3@i z6xPo09j>3vU8Qw@vai+tiHTZ#KNGO_91iw?6kSE@@0#t{Jp(WqDkZhtv~?B#5*6j>UJ*A=?@Ix>L<}UyXiG>km^X>3lN_D^Cy6DsD>W$8j@p z*S-BZHXk40m$5%Q_z$!yaPX5J4Bj8pVcyS)6_BKvFj+k)tQZfFMc(yd0?YVWNbscUsK*JnJa?@T58rHQ#O*_^G~GCU#hk#0sur7O=CNC_9OsD`gX}l#)ob^ zx=@%BskzT=9GZ8o**LEaKz3DU(N@}CfVl>qSce&!*GE>KvwS7&VJkAmCY41p$x`+kW%jw~17~W~+2@%J2A^$!SH*cU1YBw!C*2ZHpW6 zFxaLF^usNiCb_G9s}uF=P}be5dZ6o(w2T0JkIGvZ72VXt%e6Cm_s$>>=%f-7mk$ph#5>U;^UutogJpQh(8V63iD!ZT)zwX=oQ(LyL>jf#!*CR0(LSd>NU8nDF`C9 zHH%C-5fHj6X15ENB@kCJzPrAs!V~%uDFR1j??hX2($ni-oVng?)>Mnbh&u&*_UoRl zd2j{zKg|NNh&^rfovyMtX>+)k@{J>ic|yLIY$D`-1u-cfMB$Im(f|0o>}%6ch7XdY zLtbPs-D%3XV)Q0|6#muEpVdl4wK>j^ks%lkACa+(f@NpCdoz)}aZ8Lh-Q1Cimpaj( zU>IBV6blZGVj+%~klK8vceE3qf|ToGs?fgCFC-iP^-czfpxDjyEsvbWx5ks%Zf${WI96Snb+=+o}n!$ zxb-+PLAg8G_G$R}j*0?_hk>BZaWs>6>u*=`8`N&&bE{R9CIx$doAPUc^Y~ey{QSe8 zj_|cLbeH#NW+uMY`b0*L=e<>&PJ6O=5C5uUNqu-uc*pl}u+h?aK0YJmSZfE-^4X@o99*W`wM^OA!2|5RCyx&Csh7hF}?dC;?q5jOJd{9&0?_L zL;6UkVUCWs-#RkSqGgrtX5J;Dm!ebj5ylsvrfskAMYRA}0|RG0{qS?aQY~T0OQ2QD zVs(-Ro_jODw%M780oNj7BE7<*qW`oBF`nWEa$D%A5e$#5?}JM71dd}l*W3g|8g9*E zomVmqA%viH2EJc(c(GzcCqueISWh{0fUV&ap#n@f`k{Y2^T)(n&JPkhS9w^E$} z&K8S;_M5Zt-Vx&tk(a%aGTfbY&ROct);)zXAy2CNWp>}g!j zLcRmvroUQ9>Q`S|=yaKdZWo=Wm91w$*|ndMS;XF6?s&W6Rw|+A#0jmrEH<7t2@1@c z&w#y^ed3F4{IDvfpZ#Px17_bXkZ-%@hfBs^dvq|50Tr1m4T`7xqf1A_4)+2E%wJ#i zrYm{Vt}F~P4!%Pzsp568zcr`{y~Of^%z*l0T& z9}Y@ypG5o@Z8G0@EjCK1h;=^FXTplud!-*{vQb})<~GE~{73%0sFZncT@-*n%bR@< znrdCVB5V{khJ#K-{yh zW`({D6SfX!dQayK#3y9Dze1QWce}szrDw$cW|XuVrv5v=En0>$7tV*H;d|SpMS?8Q zQr|BBf8h!Z!$~q8;kcxJg&fSv`mN4_{lxFOLaB;Ul4ox`}^b zLfcq!PBaYP-FVLNn#F>(4y>=sWx~)}HCbK7f(6N}n4AXJF!cU$TBJ{v1!($AX=i2_ zKD$$yb!#dMq$UT?+4L<8Pn|OEi05O$LxtdXzazqNF0n^$OZCwW4!>D_=Gt2l_AWl};bJfQV^j6nA9qi*rIG8^p-e{oD19l3x?y5b+L7l*BSFg%={xiNi3E9V1MFeBjwBvN| z2P`nmb-DKJc@Q>ymzLhq$O8Lo714K1f-oZN=`|~2FHSzvIHQv<2>Gh!*41xkL0nY! zv4OXNxQ^goW+@9wiu10fu!3;Ht2j~fRmA>{%Rbgn9fV)WdVJl%g3puZ)Vg>Ecyh2S9D$Q8Omm=t z%HbEr4|pOlf9|W7sWWL1Ogn$^-K+=uIiE;;{*L{sO4@mo``Y&E|b%$w#oL5Uvkntc-6W!PyTxC`Z zGLs@u<-D%WHBEOY*m*B%b8rNfJNNC~{f!1!o>V+}Zxw;{n`YmASWSaom9LQ^Kf;lg zaD4IawX}ccE97ZYlo%C`9MM^LtIhFdU>}#C(`g?)H4C^qny5 zqV~g7nP5ZhD^3YVc`9yWkntDp`knJ3x19kxS6?Lc+z3Y=L&vnURgAIy%GcpILhY*% z`(>~${#DAd2pq0mu`B;814e$6zX$@WpN=~#$%Gd9lLwC(M_@_r%Kga>Wc*aX%1Ag4 zj_#Kc|B;HXAo%H~ez!9JnZHU~_WI<(VDuDx_gjhhpDVUB?@YNAi~=*ii`?g7!Lg6` zy`$2C$KnlsF(FE3e$3Rl!DI0?#Q(g!Wl8s>?ZKE&ZQLUh?oe?8i6J-|qjsJNzfZ>8 zbQKH5i2h$`r-*+zy3am86q`TUR9X}Nur#32=z3}>RwP(iPcCD^FX0*fEua3>Z)C#A z)5G>Zq{49f?ERZ=)-qu{wP7#IS*m`;snIukN_Gr#XNB=~&(W=BOI(brU*!IWS$ zA?VMoVL%~0C&Q^C2p3WZx%(NAMV({W1!0%Yl0}dIQn~-0Y&*=qI8tQkjFb~qA#)c2Z9~#dY6P@dE{_T{s9Ioe5^a= z#GWwBh&1vuApSwp`6@$O!7v=qcvH~ufB{0(f$1#5-^uGYb>C+|VvCc>#f~s+Pjwln zA^wG)l2F6Ak}#~yN^d?R#e@Xv95t29&*tmY8N}WYrQ!;jLNRxiyqy}G3C$Op-c+m) zMNYOw>kT&|U#Rn^W1;vs<^GR4T_z+_fdERO$UZFMs!8k#id-zOg<$^LHNg**n9zOU z6yNKmA*kdXuf3Y&C#(##7@%1}GZwCY( z81OS8XRq$U@W~-qnIZi6q6?8%yaX}NgR!LOk*9+1zx4a<>ts1Gr64>|<)f}o<}d7 z3q(phN)vgi8if4wRtKV9CXpQWf7(|I9E*2TXTm9}+>F4n^PIN~7>#Y;6^O^F4c~hV z_~`kw`e;kwSbSeA1KyC~eBueh-1z2Z`+mmQetc2TSpOm%Rv|StQ*kkcMrvmWU z51tY;Z#KNw+<4E5*w-xSUa2K?HZ=Scb6HjIkB7Ee@p{i-kLj0Cf1FpAd?{arJr>XP z*>5a9Q<@DgC~~^$hog9U%NmOhA^rmuHnsU73ub4Q{@`iy!=5tD?$xy{ShDnxo|B_L zW-AVSi`>c@iw`~Hj}Ajizuw%)g3DzQ<%$*oC`;7dH)4N1rW@XQmL7mz@u3#t2M5PZJ1jRloUeU?7s4;;I* z@ty@<)H$*~S?{d@ujNJ9|KQIh+pl`dD}NlUR~EHd!XAtL&i2PqIlYDrr-;})X6=u^ zs*lpvtYt$GF-zpH_~G7!0M%Dp{@ktM_+i^_y^bqK*)SRhCGIyye}~y)=h3&=I2u>F zojrD5TfoNAbGi%cF?w}n<0zY6WrOb<&d>c1SXj88Egw?{Bv$Q7LVBHgS=FHMkyOiUEh_?{^Wz+lziMp=*xVa14o9A-qi0hHlXkt zA&YW4x>12T^Vl#-_iyQVYnU)PhYhaj+Hse|=~zs~5fgayJnIV`N6!f@*|7W4&b&$C z40NFgSNlJFF~;H_KeNW-JA9eAzMLrVUe*}@r!k4X5xcqe5er7+%B=pxKee!6LjMAZ zXV00qiHg5!VnJ7yBfk>y=M#5uk}a;XVCs=x+Kdh+S}i4(PcaKNm>X>#kzwJ|sgL)b z+rWZ*lpIcD;rEA1D)~!Tutsw#-E0>Nk2p(S&L#evNKMP#Jp(M{FFhJN$D7zcwo3v_ zU$bzbXnlCU9SbbQw~qH(%EHmOKnE85P%GAdP1@T}-JLOHjl~|3qh)rfo;rK29dG5M#!0-CX`cYK6Dy4gA(v?lhK@%7U) z=EeJC{)=N3{WdJ9Wqf+8^wAGlORQ^Wd9udjskr~x`jYvK{=V@;-ff?z%CBI-+2rfN z>2`kju`Oof(kRy0`t0xW7^RX58?b$c7UFcO`T>*<<`8zy_s=WpVC{+1TH6RIF;sAAS+=8;kGIWRJz)68!vF z!rCID%7*73PCQvmEakU^jjoyfq32xxF?~VgT_RCu8`}KwV(0pwvC;qXf5pXzDfe$P z@tN<&UGpB(;Y8l2ZL_~J@zguHzU&4%Jm_fX(y=G&OFOWOQAdY7!&H4$UlyKmB@6P3 z4x@2)#w;v|xXqg>%zyx=7gIgIF)^Bomy=|GY{#9iteZ?6p!6sY1F}!d`%+lP9Mhkq zK6n3l5FZMR_nQ*hFLA)?Q6cYlyC2x34%@47SRF9xE4 zh@N`#Tn5xhwf+3MF%XyO_FS8}jsd#IS3Z{X2*ieiH>yQa8F0;c^5PTff%ud$ao?Ib zjDN;=SrE0Do(5WwkOLCDD@!r&=*JJ$XwlEH0~^o$U7=-$WVTZ!>F?-p_4cyn9UZXOZjtaAmR!<3Y z`=@`N(JPu_qjyQZJ(+Qnz9bgq`?BY5cxwdt6cJpeW9~8I-LHr(MP2k9y}(EQ-jMX2 zBt8NQkt9KqJW0wV>5z1W_uso8+_4bcu@c;|6x^{E+_4zku^Qa59Ne)U+_50su_D~D zB;2tk+_5O!u`1lLEZng!+_5m+u`=ATG~BT^+_5;^u{zwbJlwH9+_6C1u|nLjMBK4P z+_6a9u}a*rOx&?f+_6yHu~OWzRNS#v+_6~Pv0B`*T->o<+_7NXv0~h@WZbc4+_7lf zv1;70Y}~PK+_7-nv2xt8blkCa+_8Avv3lIGe4}*ze`5W(V*$Bi1-WAhxnm8vV-dMy z6}e*>xnmu?NbB)=>jKCd$Q-RWuOjaytlak=d0in-=hpHy07*jcY_O1B3}C}nJ*RNOxBZ9 z-Ax2eqvku!1)QmI?`MqJ&aa&zYjmL*SW5Nt(s|5R*}%YNYP}_0z+!~Rqck#KqJou& zoIz){!NhCJ87NQ1dfy}PsJtQNNAKVe`lR*<2}|=_wN&@0A@oS~^Ob=Hqvf0-of?NE z6LFNDoMGTj=Uh1pVv7=0U{~lowzo5viS3OCLT98pgV&<^+ewp1K4k-Jb_P+(2N+>s z7u7DwKS8-vgdWmHZ4+nUpT1VMV+9k7s69afC#~Pq{VAP^N!1It$#a~63bCJ?$3zuM zx7av?p4`X1`OAqd5tFpN+RS;(R@uNrgBeOf4V_MqL)o@fObn#xF3kyskL?hyxkB2f zbl7eu7&vfB<#rPjb12(wzZ2B!T5Y{WY-bBHj5{?>V|U>QJ!g#mj}t&d+d%$xGQX_* z#gk*4ppTeZ#n;IAsXZEFC(!WukgC(p9FxzyPB5sSa#3%Dv`686k0Yp1cNxa9u#||q z_992<;x~~nBfd;?2+;sv9YK_G(ul1N6&b>hb)3fJwJZw_C_S{z3H0V2lm(G`G`KLWynGHTjWfqcVh)l=*1`lmb@A9@WRT2bdPb~i?j zh2mG0Y*)lPKq+;HE{KKo#Fby?-~c6uJ}u5#&BB566K*dW0^q16rY$|lLJPwF@BzSU zaU%bVYeb&Be}|Se0QpL(9)h=7h?K3d9iV#(aZaSzI84j~BL{%QKFNn6=4{NMd}w|E zlV8j0*7&i<_VSYKA&r>4(Gf(RQ9T@9@>nO`$%(Q9ujcof8$JE8+~B-;H{A|Yj-|7CiT#ns{vP&4#}3k{ zy^vs1epcOqH{G^l@;t;Jld>LU$}P8rj211Q58nPLPF(CP4_m0dTbZOn%5$bHFn+9K z3-mn}RnL-%y+U1{^RWf$YSgQB{@6^N8B8Vf5wzXoS4m*so6!PVw$P>h&~6H`k20uB zs3&b9?NspY0p$Q3y`!+v7J8|@M4tf6pq8iJ780pD7>fh2EBVO6^7FQ1du=BI#@1)4 zEtpVy13du|WS+n@aBfn$5|>#d=M`gg$~%%J9_WCNiS*DceV5`=lw`yUlHkaskb;cgp*==j=n zailGrH6N~xnH_`!H|CmT+_e3>w>hd0YHi2t(VIbI_S3M<7`{eGd(^yKY(bAc`M2Ga zU~H!7Y^yC)x{HpRt{;pBA2TX+mF>p%-W`Jxh`4!LVmC&Q3Bj00#Ks~Sd+?(6VAchr zxbS-A=s0^gLHX`QBwwH3Qs=7upR}u$evm-EnPw&TN1kMwFd~r{J{n%Y-N0Lw7 z@h0%#VnS$FLr{fr*|glx9_p7pY|LVXVi9#GZaj>Q|7yAtdkChNg<|`N zw~litKp%Cc_d65^C^_H*pfcahLZ23f_0&5YL>*epDJdCXhM~B-e)-!q022%_M zfK#^X>K^uD0wFB2t`qaHNpqOv3FmPWKo?XB>3#QalI%U>nVBM?Koy1T@EMq z$X?M+Vop##>uK`(58+t3u>9BEB~DQN^ykO$ui?lugXp;zPGItU*Y_opBe0V4e{_gF zQDT<7U_t~Yl`Edt-%9L-hHH)|Cq|(7%gH8#znwvdI;&8NKo#naW0VWzsT!l89 z{W6B>KgxuDMMh)TFQ@PC9~nc7*h=r#plJL+?MY`ELzwZmxIMwq=u6B9k%h)%dv~P% zVj~NWa9d+Iry%(?FgY5pPHG2y6c}AjZZ|00zG347Q{Tdnh}kZ>z~wY49lo@VwOdt>AA#{Grt?dYSR2HQvaZO z)yP^s6F5QH2P9u`+RJD0&L&_))RXd(XzZr$JlUK4Gyl=Oj+N14dx_Ufz%bp}YaQwT zZ9#+iqY@KHqxAa5=&`e#ttL=pL=@%LXe=blxuD(z&fG7NUbr(Fn?4Z2d13<8kyJH- z%WcnSz z-B4EoC)$-wydI5yLtzplzNTaNt&heHY{CiaO~>x8H<0yM#5Q%<{%`;KguTzai$=Wy zV(6Hgfuq)B%LxP)KA{vG^TQM<_oej%p|{h6#S>nb!n13dH}`%c_+GFkwMyI!tZ!!Y zefb)VhkQUPQr!&3RgagH{v3^J!_Pbx+nD{+zUZSF+QWBZFq$E=RL{@{_#Ae;-+nFz zRe=A^c1<5(HV3TEsffY8Qw}Lh2fe|3o44Ndx)?l!%&aY4-XQZLVY}v|7_1gD`Mq-3 z8!S(m7H|0wgCnw~9WIJK|Fj=q9D)q(XdJ53)e#)=0t3Dq*CZvQ@j*0aIP$X>=slR> zmc<{9+lyx3J#xznZf#v#Wvmj7wyITcA8hi1k9w+Sbb-*r%2zLi!@Xd5jZ{Q7JsJ-u z6vRrd^nzttuWzLJMI$afzF6pz7aVThQl4NDjgi&NtTnw}|MbuOb_Ms_7Tj-NaKDYg z{dNZT+Zx<&Z*afO!Tojz_uC%aZ+~#V4Z{6)2>06}+;5L?zfHpZb_w^}CfskIhzD;P z%}+Bg-cP}2PrV#p6UD*Gn1a}zot~hxcya9K6&!3%Fju-z;|bww*UIwba4;`9SvYj2 z7ql-gABNK$r2pPo|7xcfEH=Bld}S>MOIOlP;v{d#t_cvZe9pm)MajZ~^4<`ot|~iO zIug4`zOJG-NdCBfc(-LFT1b_4{I>Uo@IdyP>&qgs`F36S!|UEq(0QOFp&$~4-g|l| zKkdu#57NKngo7c^yn*-0_&G}NN%`DNh1a2kf67HQ6>ddgbII*|uPh&!I_`#g7io`{ zo-CYD;sfK&hR+`xA>&JJxi1~&19vQUoLoit$zs1jRJWiHoa%inwwLfr^Uk5chbO#Y z!qf4d#=3+*5({Ea5&D^0{9{{^TQt@;)`eF~dc%)b_QS2SqOrZ9E?nn{7l_ZV?|QP1 z)Q{q%O*!uc12-?`-r5q4`GYkYNz1+9UhGF3ycUg|dwVkjti2$0>Hh0aZbxI6z`R8G z?FpJpr$s(Q{&a2GHho>aC$N>pqg;ezkY{3P2j{*g@J((tQP+;aa*N{qr^-A5B@1uv zaf`uTt-MY0t3AQeTe#OEipZmibKdTOp3tZ&9;LpUl)sew@shD8*q^?CL2F+O<{x=_ z)Xv=#D#I0aYc7kygj4czqH8=MdA!cVwvZUiSVB9Qd%_bQ@$1~aWEq1wQ>)%?`b5Up zcu`bXl8mQc?Smk%7i8|eD&RPd)VH&1%{29beR8dXJrARCz=D64TBa9hX?plxy%UW> z-DOhO_jo~#hDX23g=plFe^K|P-V2;U6gu{tCivQSJ6~X$H$2>PHO{X%8Y^4(j@!4$ z8#)QTtm_E==9?GZ_wfdShKr&bn~6R=EGW<#;|&_kgOj_SN2AWw{)?Vw-mt29@Y{`{ zXrw>jX);gshSg>FFU0+h#DRQtJ?d;>qTP(}+BYAo%}g9fLXpy%`GkykXJ9Ed$%= zF(~hy=5*?pH`I$pbF6Gr<1si8(zo{u z_<*^{4L&OZmnRp*E?VaU&3qgDUtEhp3)kZPJm>>XWv0a$7h*7NqRGnL!oI*Bp}(26 zmyGw#!oxxgU)bKdMVx0VX+OVuZS_`P=(%;#OEHbCU%2mU*+O6VcId|$Es7pYKL-@9 z^@ZDS?9=>d1b(+rEBuBpc=B#AQ!pg-BX!R-@S-m){`o$lQl9h|x2|Q<3j$ZnFFQ6h z1{1^k*^Xj#&~h~5-)<0tC8pb4Zce6yf7-ZBFG=}yOM~<@BRYiP%hL6FF=+GUo%t0* zI^=DC8@19WhWNkoA8)JEL3Zupdi%f_EEF|Ae_VzR-y$zeN>3*1A@GydK=MoFET`5G z`V18=s3}mQ!>5KRLwjzM`iar=KTW5DQ9RpKgy_$VRT8P?E_8V4?{sb*Uo0lo`B^`o zO$Q;R*6mwbV^Cv4!SBOs=umRS^}fMovYrHfT|x&*%Mi}De-k6 ziw+*5m7<@dTV5QV4I@}NFdMB6~gGmXx<5F(YA>!=( zvc`=uXwupsv*0xyPP*N-T7Du1QI&SHdK?4RZ1eV7+)n7nv!wN_ECU|%Z5YZW_CR{S z^!C*j3^;0}YTQ8Ta}26g^Q{=*vvx5pHkR~n5)_C547m68jM2@o7?id*NPoMSL5!3O zr(|qn&`X2n=C^|ZMiZhs-%KI>UpVsla3KT2ZFjr}!Vlt?oR5iDF@SYe^hOJjH_}ah z*7au@@OxkQl%2%h{O|nf($Dq&)1#s>ai;S0MGXuHF1Q-U=Sl3f{h?pBoL~U`_FzYo z4dFjw&ekuL3^?I&IJ{CI8XsR>v1{r%25k5s`}m*`;lHef8B{Eo&Eoz1Wo=PtvdJ@M z%^n8q=e^-1Lgc$ri2Uhy#SFNQyNKUZiW!6Sweb_neY=V#4cW3^8_6WCq zVp((a(yY{2Je+fUv2M377<6t)IvO2|OJ`QSZMsGH#XHgPf^RGa7Yz*#w);ZmmU5Xk z(^xcp!qYV4f-eZ(4|q9AHWn39ewIle_JuQt!Y`kZkHus5jX%`$eL*0kYx+lnSbU8{ zelGWgWky*xUhc7I6}yi8lH}KQ-TJxAkHEjxJ}8v?Lb^}l>5Al7blsuMI9W>g^UwKU zX5zK0pU(LHGhSj83Gndnzedjz{>Vgy5SoZxb8aVz0KYQ?*EXQKe>Sg zCKuc0Z=dS|SS>8CKAQ#p^MkH@UFrfVA8c-o=V3wJ9%feR3K!VA<)qN{UM3t^EhrF? z}TXtS%z-+6VC<8|y^e$F7K?xuD?obh)p>qE~v%KxUr z#d|dx30s^Y{L1oA6({KsFO;`w^A2Z7TU?l0Rz!zD#bV_-znmdqy@&nERdh(}{I-Su z+Zno8OCP#q)1iXi!aTFm1qN26C?2t-gFtf76~iYku(3t^>CIifkak43)bxQ1up{3- zIK%4;WiuRy4DAFuAY7OqcK@8|MIvCQ+*&iQJE1E;|i5Bhc`0np?H4eN1&3)sQZn;9YV@GYz5V4P&JU!A@xw5bx(0TL9H$HQPFr8nA^S}bef1KT;Qpiov!Nw*O$E9{WQ%3 zK6N*nc8a;c?X;J>>EHp4PCv_>x}72LjY^5Cr3YAq(oQbnBkRSRsJ1K61L&Wf^X^%Y z`4^0@oKFNBG2jtbX&DSY*g|1jrrc0p*l;E{hlV2MUXc3 zF77ioP@&#*DRqPTIl|wH9=XAPvZcH-dPj;84a!rd2<}=##&vb?xW(H@J8N(Gm~cqF z4Y%{RoTLH0;Bkasof|YOYxScJ-&7ZuvltX5+G+17v*9A8NM$ApV6) zo>>2PGM9&KEU&`H)HBU56Hg&nJ+F7dyYD%;30nCDPj4!o^j_16fyRz0EJ(n* zr`OJS!1m24dtAq{fpbAur+l6}#Qc<+AF06xow~i_&YdQ3`LMtSq7Ue?$-+bNWW9~u z)K<5%L0q%(M<-dYUV~%r+JCU2^eRu&{bYB@ogDH0kb)mnR^)zsMEI^e;|43%%@6AP zch>VwB6NS{?zEC9KPWvYzw1;x4NUSZXGov(1F(`=+WdwF-1?5FKtUcJ-g-)J{wJH9 zS3T9;ywwpFoHQET;KckZyC}VB_D64W%PnrX#x3`__vW>1TD_~80Wnru8&YLlpyW&Wq2~n*P~UORdvmQb zT$k7#tkB4S#YFB$ZgmFO{`$+QuMB8Xd6er>;rw?kHYIPH-Z{@1N{HUw^^pP9vDJdd zC7t1^-;;g4NZLPG7w)D^@@tnbSWfKBW5bGPhZUVc#rRS>b1ws!%{3Yu?VX{pvwq;{ zF=7W#(0A!w>I@nk^+~dR3{dMk5~LgE40dOP^REB{?&SAd2~Kc^t)I&evDFyRZufHr z?Ya}}Q=Dkp-A9Km{k|#QjZOe_Z?L={(_vqkmF2(|C%7n|^vh`i1N1^O72;ByKxo>k zNph1J;IHi)KUvy|*!Q*Pr+%UTrL}ppboO>!r-QsivGUH7j&MwLcc(!y9SUmKT$o?x z2;!DoRR#~xK}khsY3+z3#PLVGzfZlfcyw>(24g1(eJ9o5EkcI_Nd{3?3!H#;;+Bt3 ztuJiVsxVkt<^&Rqj@s8Vec@9=(3MvL&S0qGR5vo<1GRbbyB@E1hU84qA>le7SkW^# zQT_nIzpQXjiM0<{t$%vdxX>9mcCo$3M0{Y%^a_JH=bb@_(C?2CZy3MM;qyi%7fAoS zdhTkvH|Wq??i+DjK={%bzLVWvkVEut3t4X+|F;jAsa}wM*2?nI8)p!8w8{0)C4T?> zp@j}do&T`|ct$@VFzztLmqJ4JRlG%!5VL=6poEVm?u*^N>0m}QBq?-E;pO2+eo}_< zhcDGgG9bzI|I`z2Qu(cTn;#FKd859zEfNM4J|@hhi{P=dZ(g2~i~`WP@MA`|82*Au zFE8svL4K-|=D8IT__WBnaHCZepoR6ZoK;eIcn$0I9Q!Cxb+SHI|5ye&1OEJ%7e;|f zQdrY5KY3iiQW;#G9tA7uytcSd0qwr)mr6uLLHXP_AHTg&K#h$XTh!wSd|BVjf8!La zUgYS1E;9-i=By3eA*+IcQ|OJ$cSXV3m-eYUP1P_f{OhiJN24HgeyrIx4K-|~ueQx8 zje<0%yOtb5bu3w)?qqv43epW!KZS>@qv(@To01Dr(ERvT;m0fLSXao~f4CzG40KM) zh40kB`&s7=CbvdG{l2wEO@UX1#SSBpa4Q$kfyuw2i>C%8caeZs#6tlt|2 zj{}^fA8yvbxH)qcm@g;gcfD0@aMZw}v}d_~F;P&Mud!KmL=BsAH%jX<2|mCdc<1R>4W zS?3vQnC{|EmnkN6;*^*aYNLi41D`K$AnTv7*d*!Td^zN%hpC_a`IkvYgGyG$^h0hF z@$m6Q4F^@CVSmKi8Io<2P%rzu!7`2Lzi-;tpYzgRq7V%U%`Hy%#ifvc!hvx~q<(vo z*y-T2;^_bCh3+p=LYFI!u8m=cp+?Zey=`4Pq$BKY@i9@ z2jK&mGap6)T_)Bk@56W$pF77$`f1c(x~`~2!O(uPe*53wO0X5c*`Drsw%-Z<^g5<| z{4I#tbz#3w3PywY<;cV9-UO zQPBJD_-o0ZvUp(o&8v@cqd+I&`#2|*!G&*?cI6U!6-VDd^pnNr@ULxRmk57#i=93& zUKT%??x8)ei6S;>i_=8~Y0L`=ex7!nv{%QO70D}&OD!#Gwv+MT)O5x#6#nCSD1MvwWX?iq3|@0S>C#{nk(ucXJ3%RY-2Fn=>|+#UjFg;w zcw8J+TJGg@_@jX{%Vkw)zBne%zVoA1gse|}i_=n&z|f)37mWzqI}kK8JxvNTJ>A7q zen$cAE@#%dH!|p`_MYeThbTA|nyquuR`wr!W<`E&V+2OS)m1MpXoSk(kmG_xzNl#E z61gKB>nw{Uu9MW)CPzb&*S?gr+wy;(&)Lyn8Tox&%5>>}#z#b=FeO$gnR@C={Q%w%fa@C;yU{B({_;-w+gM%y zcQP>XPmD43r$y4tKekU8NpU14lC*@R!z7&~={!kP2W|fkxy#Lm+ ze+<#eS3!}dFOJH_5IInNaf6fkPzZZ`oyO9Wxaxo9+e1`b9!luhvMf+Z0st3of4M9SI!Q7=fe@^0-}G_DQ*I zB=GEOOuVyI2FryW_aFJqfwW84jE>Hf#HgDlQO>V8(0fnpv`D7}rk_)1n4RN525W8X zR$oyRZ7H?6ew+gXHRcm%PZmdU2lu@6{T!fAw?0;$B93Ldtsh(Db3i;cTsV4oB4!pX z5>_T~bAyG)`V}Imk~(?miIp5;LmP@O&lA9$=SsWO4{*SMchmm|4$K zi(yx$hjYMT{Tj!@cSAL1n_?`A=p5kD;B?;R{ZJG9rSWO)G6L`4dCc6Qr-t46;*D-D z2Y4rGDP|sSs|hWzE?if`f!`lGKP>y&TVs+JleY6ZDZf;0iRipfH47iT(9OQhf%5Yf z9v<2P7`UDHRLKW2-;-y;bv}xq*4m9NQNodsH(jIf{=#wi^!|iwg&C0$;dxtqm&1=5 z{^u)IHhM%thN8K)iQSJH&bP*=Z(|9)vH<@a0fG^u0hxx&=S zU*V8ml0R$3cC~-{kMX@cyQ+??Z}&B$s_lx%(TaSu=NehxPsd-s*dmXIm9^)2G!XiF zRM6U?Du=bo+8;*lav;xrkzCbH8Qj!(FW;$+1NjZ>ZqPo<;`iBylnn@+v3lN<4Ugpi zY5(xHvyLfgVSwzj&Yij9@W@P)N0c1~87sHEQtT3j-?>X4%B>EABJHco47|rdXY^E8 z5mKHf>fAJbyPx*_?1|?ZYe@Nc-Lmgb`|OS4FQtD;34@jQY(q2U-`F2)zA|}lI4S>F zpz)^lMf;}8ls!?Aqc2Qlm6yX27rw zfmOEgt=`dTpUA6sG0~ZnAAWFZbJ`bs1)JTS3ucr2-`06{6M10k6zTbDi$WknH1jLZ ztS|PVUoJ%S6p{SgZ*nUx_u4Dn4x!I%B>7b$y1w@P_Ux*YLK8j{xU~7wjE{Zx2J7WS z{pCY}r^4@7=jI{%`aL>R1kDL-Jp06_p|AFb4@QY>O9_PxD`uqhvd{ME208B6kAwnm z{#^IT-9z>{E@l$j>q7zgK1#%H6#zZiqU{#X2|hbiN_PhfLqdStftar(e~MelTT2P( zjhgEE4aj=`zFL_?U~c+adgzqdv*|>>h)-|5J6iyc9gqYWUk=plu+I`iAzZO|^7r$> zL_Qq6byVxA82SWk-&A2wdEemYJ(7f;l|Lr* z&z*qDhSKxbev1GV=klvpH78(j{2}Eaej@Mgt**aSBY@ofEB)-c^y@+d(A5-fd|-*8 zP3o6f*EU3ec!jJ^?;}wxI(dWTwK4)ej_(Q(ncT4k|y|Y^;8qu&})=n)e@NX2A+_e3$`+T;I-(p{*|s^U@k?(_N#;hjF)5$1vyE}1sjsH5f~Wv=X?E_7HLSuYMe=%0Fhc67-KWkBq z9wzdWXTHR__^3zrU7aEHXYaz{w`NJ@TkSjcXW2==T+70Nr^~12%C1NDVs8>7i=7C3 zr6~1DY`eX7sM~=H69}v!n5htc$A0Ujls$XL5%`eePT}INn)^JJ&t6%FL(v1i!LIc~ zH9t7r?J418yz18u+V1*LlhGN{eCr`8zcJKy*O`|!7DsgUu4^VRXQAPAi;p#p3Vh*5 z&W6Lv7UxfajqhtL?q;2Mk{=G7g3umOm*EM-lh@$R_rimXL!y)IE`9y_kNqqEM;b=60E05W^-6VFD z$p_(l%hGTte@6U(b+UMDyq~P<=5Ux_PvoL@_VY>k z7?Y%H@lwc~%oomz2#3<5N6Q1eBr)GwGovV;^moKJkmss6p6@?=XPIRuXQu49S!FkCkiRMT_G^1E0rv zis4`|cbDZmC0Q)>Fx&F|Qy8dN-n_f{v@DvwPK;dEM&!5J`76J&WH7Pt!>N`xVIXdQ zDl55F0!IRlz3Y1x2B#-FscssafL?Ef^Z9Os!L!$^>*u_hfawdis;Hk11J#lAO)CgL z${M|s+;NN4r@3Bn*5}2?3766!0z=Ol^nC3;T@fls)AZ;Q+=@v$uU3v_G+EtIEmQ;ecm5-zZrQ*mKiwkjUoM&n1ce zK5+HM+$;_3G<>&p;Bpw`MX@!@rl{l6rmPeDzJ&q3{Z=7wl^W(L7HzjD_()i`|KMk~ zIyTmb-!C>K_zLXuSTdlFYS&KYH0g&!%ZdD1$B6#tZvS-ghMp&W1n#Hj+?70 zrezTP?#-Xo`c4HOEpOWRpeh`MjvrlWZ=ixf8z5mpIiY96;T>nZR58rt-Bx!xp$F#j zRl-kI5j0aRbbo|F=sQ`({(Y)g#7J5(@I4IlYA$ZL{!#@`dYVaGC*w(+Gj1+xiON6y ze-djLIGGm)g$f=%>#J07>z)s%EGomGbW+N(bDLGLoxt+-VbClq{ycJ)D&}%lO;TJD z27$-tPc&{;MdyVx1L_!Iu<)0*-hi|ko;g{V8a^`&y1ksFpNOmB<0nRgAueH{!4tJx z{H+QW@c7B5B!mI`$oz>DdQ|Yu-VHswmk@ow=B?^aGGFfTd|RD`gEID3wpf zrCRSKUu%bexW(ige4sP>O&!5(=uM9&Q1mMw{PeP4bhK%g6rrC3*VA*+(nlD>|q4&Agi2NBv z#OSRmF#*BQ{`Tkoj(A1XdU$u5&+1?>7*VdBvqKKewr=S8(i{w>R?~LBz9)wRW+Uo* z`9q+&=F##=QIr3{@5$fw11IH3f1e+OWrcjK=@akdU8@!h`@=)zgRyQu4)j!dxad1JU)CU?`sfPOmgft)sjZ}o5R^L zhk}6iD09Px=i(^Txa0D#bwMz&W=hnJH=?+1>O08|K|#6Cs(dVh0xqbtmeK!dUtlxQv@S6ay0&S*m|DaKA{|cSS)Qc6xs*wKnh4O|uw~U1 z17YRC@tF8o43s^mx^mhBpj@Q3Qu>7y{O240=L^$mt2Tf65de9sf7ec`m4Ts$hkb-1 zNPp>3xz;1nkg>ZkwP+U^&-?NtcO@m@>R!t-<%U4uJnyj*t`dcgir0%~_XR?c#{M7x zTR^10fmmT!TJB*#Q8x%+{{l|VEgsnS?!q&r*+GzDct3a6aUKY&$~y6KKbgPnvddDD zL-vdB7!AI?PV#*h>YW&RZl7)Qz^(L45d3DXaIn92(Z2j)V&u5Tq&#a`tz&&pO-$C= z+HxMUo;oEr*M1tP;ieDbqKokG@V%h+0iG5-TDdcrITlx0K-QUH%aY1Rfr{NmFO}0{#P|q9r1N8LEivf#2Z4WzU=8ea0H1W(D1aua3 zIyBp!z=8|Ktj!(pt%LW->q0Ng6<#Rzw8R0M?8xslVIVYSyW88|9drAM%din;TPFPwv zeoxFkAI!39-rKs^33qF6I>(R_2~3_4!XTBCd=&KZB~sd%~3jgC*foYSmxoRM=ixw^=XjmG|-Pu!mYP)=~YxBD`yT-fV`GQ*~i!b`3iFR+VG|dGS))zDF1^JFLUyMCmlz+50)g%!Lin=dxV! z>XCJB2aD*Ky7S!{$u+LH(Cd5F?fZoO;{Dc@)VZRiw)F|^S9EM1mRnh*<%U;7C-|Ek zrsMU#gCzl3Zn!(ef4jgeI?la3vD9*(8=m@7a%#d#I$rf@-aAQxhW87|cpB+gJs~&9 zF^Gl+N7vo78l>Ziy*|E-ei}aeIJ@@X3+(`q14|U zw;Hq`SYXFQWu3OR*WvCMvdgaF7n_N`>x%@_4czfRet)+A-J04`8k!ljzev|*;)Y4O zd5e>1m}TpA*^sm!BhO>vq)bER8vV3lCDPwoTf3a6ZYad}Ec7fHk6vj6$1&Lr8^h-* zvtKf>KJT=A-Az}_OMGEz^p=5Eo^xA@(_L}j;OudZcNlp1!xM?97*~9L=GsEVhYXyv zHubo!i7T!wOgrEBiGeDv&3m&HT+#VTf&7^#44hYyl-%6yf+{{?K08T&zvSB5JkPqI z#i!dF)(SJRakid*k%|jWyY#d8RzCxqoaeS|6L7)!ne9<~NcpJj)AH+QJL5tR+_mK$ z14BMC`jTHe;a2Z3vD8im%C^ihheb{puDx%O`dNa1otoTwS0^;;$lSQ=CIb^>%9cHB zaKwYM8@rnbtYl;yZ_9SXd>8e|z+DWq@c&eEn8y)g+B0uI%xB;x?@DIn4hJk)_PNn# zAp?`vXnwAfcfe-PFdt0tVD(#J??1+1N}Rr3=P6&@${tkCMDmfcx-27b3G zvJRVRg}cY4Ke(vC!0}PLc`?fZbJX{lj(lTzQUMDb&MlS{tD)n*@(=#2M$GVr zx!2_fb##1Rv0-CuwHf+IY>Zj4h5lDxI4}O(h_-aRY}i>kN6HK@e}Af^O8C2W(W>*l zO=dVtgMFU<%NL{Wh%brwWr{a`8%hTy`eKiI+?_!oGmN@%{qv~+U*wyr7m4;}_~79J z>(E3WG}P3KT>RV|J33G3oa6IBi*q0RV}&d*#>IQYH{Kh|asq=_+_1o(D$_fcz4XFB z*%gyF>sn&GefbT=dtR7mRb;)cz!FU=-utTfdSmazxk4wDt?|_M@q0w?c%h4s@t&Yq zYYaTAZR=&{g?fu0%$uESjaJz+QYAV(QDwE8yL+@XX4EgR{u<+nh4{f=f4>#BomihO zO!vf=l7sE`mo2eAb4KbRaZh~cCM|1}Yx!57&I{4h{;F(+iE*yA!Vf&K;)2|nggz@= z`LIW(+0GM}x_6eYTVjndovWK}Z1+U|k|vWI&#lqL>8+8CrWYEA`KsoGI+%4>BQ$%EK zKJ`7s!=5Nky@N+&+kgFsZs0HcLfWg`Q6y;B=!s{kyR<$G>^t{FshqWsM8Os^i z?2@h>M|6ME=0lUhkb(Ks_x-XxF(>#z^-FOkW>9xA>^(8jB;x`wiLG+r17F^E4=g-l z)g5W=Io!uy{VXFa&%fgI|6%`FTZpzchidY~S4$H_dU5-XrbkH}r%a(!8FOBy2{xCvu$e!)3#K08`zvNZK8{^hO znYoJ$7}#y@vpO!q1SfffiAjkuurRTE<;E4JsQkmBhvy3&qXv!kq(1wX_qchJn|HZ+ zo16E!WdpbD;Fc}ivWHtXamy}l*~TsVxMd@^?BtfM+_INjHgn5vZrRQ)`?+-kx9;H9 zE!?_?TQ_m*E^gh%t^2rjBe(A4)~(#Sms>Y;>uzq{&aL~oZ3AxGf!ns=wmrCQ6K>mu z+qPM_rm<{I7z3plJ4?bfJy3Mlqm};Y41BSpv$xaP1Ebf+Ys#J{^P|=9OiuB@X&SsW z@kAb2dDQTfzV*PyjJ&I>9x-s{Z}2U8?}5J26a3GqGco@COuO;dJ@Bw3>+82fCJNGO za$nB=A8g{VW*Y_f)wkT(Rn0^i>91JT19hJ(JC`&w@uInB#inL={IDrrbJZ0lswqbQ z+_=UaTULGcIQx`|d4^@n<`Mif<>*(w>0#nj_niiI{xrN>_-JLtEkX~A_4HL9xuL>h zrPq^ln7DOa=R(UgH~dc?gTuFPub6IVp~GwMb%KfB_TRoWJa@(Tw?pxnJDJ#GQj=>i z(G|B;X#I-xWuocbKrI_9SDZ}NYe1ig3Ws*zmN0a|qQ>Ovmw`-7sP6WhFv$g{tZyuP zLFTv5EcDvFG8e3;hi$Uk!Nl)f0`qPa65D&)K2xn#Onmr6U|#hrXFP)i`F?YlSlb~m zuVB~-ZFboSzxQThb>G=@O2tkX95X>Sgu_Jr*K#XOHJq?w!sNySu}mEJ@Ij4tyCbGo zXstNz!9<<8MS|^{9Wk}0AU}@azau5}xaw60lzz8upVn6fj;jpR3M0PJ(=EkYCp~7M zTc_O0eWrloQ*Sbw2z`G{sSP{z&JJ_eHqJZM#=!9I!hI{`?XXyCU~cd;2JY$;U$XYG z4fe*A=Dx3D;F3*K-}fK1Mu+wBhhobZNVBtDaN#V`wfyPZI(IX$C4K7q@x|s?KP33A zbqxb;mw(ACk1@qhCBB_&)|2*cdGs8)YK&TQURbsh`4O|@>>KGIBW$wrx;(I$foco3 z-n=Mh^p89zDpHi_)?rGws%+wM3<=W#z3s7PXI4)`i+jx?JM`>9e4NiyYXOe zC4c7%X$2I|)%|H?YzG|M$vHg?B~-C&Ig)(Y4z7!~rpT13;7@%;HwvCx{M3Ph6U5Tw zcL&-*JqFZ{EK$Mo7{NzZcx)l>iG6BZvkKY=8JMe7+ki&nmYKUJs$x!8WzDn{8_4;s zt(U}B!{JNc;ufEQx=8-){q)_M=NvS58Br)N^V=*x|E7+W>9VrmdM9bWrHw z3T95UHRRqoy7a4)4(6#(X|!v>PFPU5lsw|YB^G)Z3bCa=1-h6T?^}71s^4unnB-; ztV?Q*>d5hz@0-v?7-aFdx$2_o$m1gT$nmx*col??INVdkjz@fU-#?jx_~H2z_ZO;Q z`zN1jxf)Zb6q|HIpUNi;ZS+KNIcM4iuR9siL+63}<&U&h0mbp;Q1jOHPW>wu#L>^-U^HO0GDEHhLrJbsTy?o}$&u1G$8^`*X z&VD7#x$;Tx$TVYsn~jM*pQmC@!rnG_eq*SopT9EnSsCT^49r^=8^O^hX=ekXl@Yy| zL7{3!keA_gDU#%acmHPTC58ZN-l|SkS3!f!ns35#hLAIFkzB)!sVKf%B+c}(0ay&1 zPdxBa3B~Q@`z~!YfFRS!IXjmtppXn%PnH4DET`>Wn<9or=Gheb4Ul;PsSfy5hsuT@pA7_=7yI# za`^FFO~H<5BtJY`=VG-iCgV=Kef>J%`#5MQbvX0R z{?vs^=Tli7%2F5+b6d7bKX>Y>3Si3W$QPMLe?!N7sE zv-TpASiVDdpTHbF(5dhZRMC^d=KawVJg4bHyL37u`?(a>m|ru`4VVFN%r_7tBrwBL zi??~zOrSgMFXyz0VS7fXK_RaJbj6#V{Ol%*NvC)mr9RDsyh{r%e6JXfoQh9+<_QMC zfgtzR&L1`TCo5~Ro)`enz0O@93qRM?C$u?BW*Y*}ax29}w_n#J=@^)=I7r}$G9t+a zk7{y0)b4zGl)yzRx6HhJvWACtPA&fdfj!IKU08nKzI4Pa*|5$Kei!WY*%bNCK21vGxOfH}OyKfxis0MAj8)F2Vq`@*p6G7+|7LyrCA^DjB zX+MNSLB6f?InM?o;IW%kS9C`lbZ*57p7>w{1Fo~`a#|$;+iZ_xsxc&O6mOxaNJD%7 zyHB$`2>qVCtLJuI66kw(?p^%e1d@i=dggdY!0^&G=ju{Zpy#Q+s(K|3yn90p&J%t_ zgRN?hr-(zKq~1UeX)%X*mu&#A;{R+1#t z&*|SRKg$BdAF$a$JHWZ)QMz&*{HObh)~-K( zM1Ynhu`=OHQpnzhD=<6TVsvN3&nNmI|-awBOuuHqNcsg`BlJnTi}R_ z-Qr*TrKUkRO}@&*7JlFMJ5lrQTTMf(;3FFcGM@`QTQ*ekq7ccKnraJ4Gp^oW;6Dy& z^q_5#0=A&?VtYr^c>$~>`Bze1B? z-)juMd_JtU#U1VhQm@?VjOu;OKYHJN&}jXu#{ZRVSbSx_!b)e@w`|fbrPOyd|FuOu z-BaNz03mA6LQ~#5!!a@A6(52o!RT8O;w~`uUa2_fQ;+pUxj-%To#$~9ptCSMdRL1J z!Bf@9+7>BTMtz8LnJfI13O!NED-EOHS$O3JrFEX>>v&Z_rD;K^w;c_10=@=5IiU(1 zi3>&2X*2+bNhO}I)gbT489sRe8$7(Xd)_B?5K^ALOohO-GV8schcqFEl;1|+fp?|0 zPa3q~cjx%J@Fg?|S1p}ub6*Q4JsN|f{=i3ep8Vo+fCi_(RhqFMYk}0B&2yZd&_K-h z)b>~%ZTLuie_PcZmQZg?g=mB4xR#cRFn7q$o3zwyf(}qm%~@~yA8!A{wKerF{z`4Q z(etvCzQ!HCzDa8WKP_;UYq`)wU^l+B2^us_pq^x@o9zzcq$(GgZ`Oc(hTes{69*uKRD5{0RU*zeo3?dMNpj(33vOzHvV_ccqX3BPuR*-Vj(mo>iDaMKSVDY5_P zkY0HwmGcS4zOi~v691!}B88bxGz4V-e{?v{%Qx`FZl}Lx&mCTJQ}=}zjvDmwZ{N z+J5v&I5~&0H#L`b*{j^2A0g=P0MuvAYg#_pw=Cyj?#Xh1#&wHjm@9tSTTt&_dpSTx z&l#IZ^MpXh8CBV~1a>+(>uIv+*jt=D4j_K%Q|t5J;^2EdVpo)}18k^$sj)Cx3UeqnE(kCKHob5^#oJfIWag)z4aO841>ol!9Y*~ zsOSBE?r;XJLq(4~gD3sN_uO)UTW)aE#poM9$UB zdz1un(+|P#Bp#k|MHC%2=T+Xk-){;!D+ZeLE{kIR6n>fK3<(ZAr;eU5Ac)p~{ z+i$V>TNZ7G8r<)g15}6AX#1%`hEnp=Rg27FSmU~8ey=JR5W8AD)*J@5$9y}#Rt;{3 z$@d-EWDepfhl=V$)j{Qb=kuP8=CDLT_GyK_23V*b_k{06uU9M*xieQ2bbh@1WckDl zW#fC z(?Fbhka`Bmr{0_2I(_U7%GIPj+Bf%eGxY!{dFM^?LsCDpkLW?hwFlEtEM&I(X{R@x2otXmvgQ>R@=Q;j`c_u1+*Uon&dVWpt z1AbFbFi~5en4o6UN*{2caMC{FZ zL|+iTRrxuzsB6#w#A8mSuH-lZj&t(A;6d!)vt2F`x}^VYbqP}r>4W%@@iV`hk@D1I zRqcAv9(rWUPBqg0dHu#jmFY107T5;|z{mY`X&qv}Q{UStb^sOXyG+w`0I4@SXA?NJ zxXS0C7SL=iGk1m&xkx?OeqWP}UsZWfh18dvq_f9S6NGkIhROZ_0OPs88>gv5o{FGm z(+EHY^(JVzDp=fJwUAEbU)8yF{=FI)EO3a=e?wrqOZQv9szG^h;m5{z09}QBt1l0! zL)R~5-C2*xdeHnZ=dcEpUrGCvUI2g{JWDE)G@&bML|1VGX@B8S=O@p#{^oN|U$(p- zq66|4FM@R<0933JpM76J{F@z%4a5COd%p&+pC|fM=$4S%B4JWqu7Vc$NgMJrdD|^? zNdL^-oOhwxK%ZE@!IZdpJlYSRy6I{G&+x;H`VM0s6>*~LWUB$Oz?%;L}NV>IZBxRp1;H(AdOQ&l9&nvqO znJioAD)FkGxKkZ6Vg@8_d29hro)`=>RR>OO)|&-iZ9qk;n6LX5u}`8|7P-ULe_`=Q zC0nB(TK_FyzOh;Fj*>NSKCX8AhKRL?9_uO)UTW)a66>ho1Etk0E z)<1HMTkdhmMQ*vtEmyhaF1K9fmfPjt4z%gkr2g@JaeF>$!tjVD$E(T|a%!^`{nRue zFte=Jg2xo{ayR;&{j3H_Icsv=CYeCa?tO8q1c;rtq^wt5-WY_|ZS-Sm5xNmde){Y# zkw<#Jw!&9c(2;#rY4OGgnr9s<;&&o^Dv7C`PK?eg#kfYX8l4Uf6O&A69x!e%V!rwm;OI3Ku;0r3 ze)Wq!!E?;Ff?cXGa7Kc4T~;5;hZ`f-d{BkFr$>A@_f7}cw=XVWvN{ZG$&GIeAo}FJ zlKdwTLf2-Tc`6S2&^7e=Qf8YvF!tBA?7XKB1NTg#hkp=05X|G>*gpd}OB~wcJMq zo`!Lix}fr4r=5r)>Cbt6rT)=rfYodCPm}Sf{4BNodX~TjoOvy8rUQ@kMIWDE)8Rkb z&rsd+Y50;J@c0mm@FSTIp|;rYnSlFr_qDE=0nH`KQ+5>@04H&1_JiF7Ub;?Y(|e*9 z?yAL1j+hBumxoaJ3*iUOtDwbqW)gW3|H~@c1d7V`rp@^_1L&fQ7K~gpfeh+9W*;O*-Q3^k11qKAKLZgj6Nh$Zw@{tbeY}2zFIl+FE6UxUZgOq z!x+l9JvG0w!t@_KOgL16B&7JdVTN@geJ>N!JwL1W3mUDGNeUo)`$&~eAvZ;eO! zTILU1yDs%-?|RR`KHXj0d(IpGEeozn8B3=dgR9&|jTe0k9GK`OE74#C7sOc?(nlDW z{-JK#d!k<*Is{)r$Y08pEbbJN#7gg1xC=-Bz9(=4C%jh z*@-E~jUe2hJ?fYd6LX)Po6N6Z47qhJH)arc*s}Cc(=uaNG4Dmy=3plBH_qNubQP%25%sC@koSGkk2S6lBk1f#6R89~wv zjB!`WSodOV9Tfho12^8d@yd;N?sfTR-MGeOvLQZp=d1tqt+%CEviJ-GPu#p66a3s5 zI$sTGS8ik=?lezWwBH!=2;7;@!0qR%SuYP6gO<$380%yPmaM(DQOeU8%xlbj&3zbH z|6}i+8FXXFKYh)_jn2T~@)~O! zXZ)^Vp!Bac>l~U9cnD7x+1v5IwUM~=Q&sLQ!MDvy_kpX1|Ijg6A}R9U#qs~?!;Cmo ziOejB+~y49#$dR4v2vpToejic=1CqmR*Witn=EuYNzX{?C8?jJuOtbOGLuNsCCQ$o z0Foj|vZI{xfH;saBgS7H<8S+kj%OG1KK~zlb4nUIo?}4Li4Bi>L*u|UuE^-uX$ElD z=YD@B!(sWYLM9EGH(bdydF={9)cf77ZXh3`8K`vM73y!SSMUmOgQC!1PT##P&#$6VbSupK`Gn7*ApT2a3UP_lwaDgO7zM@gJEBKi7O$h-P=(T$LpxN0K zRIZn8Qwwo{%1+`$eszIB&sXn-+*|-AUUR$NxZDLwKR+Ak5O#+BW_?qd>RdqS)SgP)?@qA3sh6khg3H*u+5yB@AN@w5 zE6|DS%6r2R0=)^s*sjn{eXP^p5$M#W#C}(xg~>|oIpYYu)Hfc_xDp%P+UMRBC$OM4 zqoxwPvwtkKX?2A1JBBLD7;ez(b)}J>&sRPr{hh4~ z7ys-AY4_z%9{l74SR7SYGld2@%F!Q!TS897Evu@R(>#Equ~uXsbrmlmSvY&D2k2OsGOw5rQ*`an;Aahz-&(ph zK^mZvT2Da_n5r#t+pNJJLYiG~@J;jp1L}ca0voW8c2ICa;_MQ;_4XkB>D!hc;Us?# zPqC_xJ+$h@SEQ};fTS5W{9;$yL*UGVf%A5FKxyOl72mZ0a&os#|C#3j^!!NAhCl$J zfzXg2Cp=)`#`Q;ts)3Rib0wv_9-vYtQDH*8>>6Va1taCCEST|uKgd|y8_h$9}* zwK2CYG0GmA$IVMrkoJUB1>frTSM9(#-dw3S-V<{8`0W~p?O+e;>LjiAga9{MX5}+G zSQ1+hn_cG#&ad}jE7hraRxpuV><#~ONDD$ZRA^@iqa%_0|6Z6N$< z+x%6By`d;Yw`|%EYv92heeKoW&^y6Ibvl7dbT$cf5EzJytUA>eidYLZcJcXuT4Bhc zcM>)be0>K!1$=-r`n@F^7)a^~pkVtlPvr?_b|7@t;d9$dA1D_rUf8N`4{FuRy6W|P zq3hPpy;HO7A^4C%l=ysK$nePQpCN1quE%cY*Vg$0&m6%=t;u%4@_u?vW+I)~&c4+V z8*E{bz)ztIHgrh&HtEd;b33@`qprUrg${X>D%O0PXbbZ5qi4>qCgpGJ+-vdO7T75~ zJ>`9L2=u59)m>l%HCxx*^OIu$y-1=WXO|85UQ_hZbY{Q+_1%p)TPPnt^Sg)#0}M_) z>UlER9>krC-d~8OzN!0*(?#Ii7+$GL0@JTe93Bw|7*07S^J57EaCQ4`$Cm)|yOdHr zSPbZ@`kK_>;0OszYb7U<_T{N>1!@wxTp^l#;s+fvc!%yCUF!^iyaxLmp3niP7_>ZR zKoQ;lkF76(i?RD2pEe~)rBd3aeXnWXo6|n+v$d&IS|t=A3fYo9LS+q6DI_9^uIv$I zD_aT?qL3uf|4zyK{a-(y&%EC2d7itR?VfY)x#!SJE1A%)dGzc!epiU$mGRih1YS8I zK@%6a5_HUGU^!kNFJJdJfZC7F^Y>OTA)A-qKM(U)k3vLx2@~==)Mt1lI6=jWi{g70 zh^~lJm1bXXMlH0U(J$XLPZJxM+rA)NV<0LT{jKSH)hk4?48b#gSR<7PyGGc zME(9-E|9cl`+lw}6G|ixU$|uD`uBag?+e=ZbhtqGu;QCr5lrZ6SRL^m?=yY88if0p z&~yeHp)2vez^j?KiwQJdKItEBaN2Cmoms1yAd<06P8IJz61+U@c>Rozy)7}1-5{wg zW&P$yc)dk<;d1T}soJUiaEJ*vG8XgopTqkPHfh&tutt4p)Ia&`C0NkJbEVGo00UnA z8cZ+N&ob{$y6z4mHJ)E%_&;9c){EN@xR*+b5Qg8W^e+_#9>C=}laJ#0@rw3+!Ta~D zU5oZqWBd#|zN(jGKoZYq+=18QyS!gr$rG9)axLBuGXe2j=uf?%Vp2Hsg(VAgcs^!( zZ@4XiH5@bCez7jZ_@WoQ-WllfQjCRdL92`XVcu~1GVWj;3n)InYD=rVL3%YNNJTt< z*_DdhOEBN_eA7xSfXcl+#c{r)a^9K+nV2mz2YewDgP~ax_s^@byv`2{W<78*w`Yyk zX~KNnHBy)3PfcfcztO)J;^%pDq*ft+kQ0?m7QyttUC5{Xku`=BR!4;WS&+e_6DkV~ z_Q=+D39+EOzSTL$8|y`w9OJZE@ER{COOyqvyjstm_}`OnrWA|dc|DqQzA2CetB<v^Qc3v&zV=Sm$VCBD{9FJc#RP^K$ z=GV05pd2+8jMOdq$%5=H*}7YOSP)!cz2}F;pBlj#EEuWlvtWo&tpEoOt>2_pu=xv#q!chV!=6#@FZZx)&Ze=SRnI zdH&w!-2yh)U+llV4a=tyIS|JN*I~ga`Ux10JfCO*9-mi(vV;X9yh1qcY%ov$FlXO0 z7Ig8>49>A(`;k+*mxS5S{ptzRBzHzj2cVkt?ru-2TA^4c)Oz%qct_0H-lKeM@IU%%cql_GAVC zg?9+Jmkr6%w+xRp1VHB2rk(Szvw`V!HqTr-5YlQ4v@CC8yz|bwast6%q#hxAv<`_) z5Qy;VMtx>OWI?a@FTWtTF-40bB*1~!BX!1tV5H8N2nSZDzHf-v4+gpMvu*MQ*`xNr z`C#B3X%;AQfX2fEEd<(m^-mNy;A+!++}Sw%StW((zBjW{H|TX3G?`vGX_SoV-ez3PiId?# zZ45Tw8;jc`b%nygXQT`Y{ytuGZ_DlJqx*#;8z?_6_%3gn4icTN=SAn@@kia23iaop3R=J>w{DzJm6btvjoedjl5dLs*zAD@w#xe{GXcX;*q0& z|Hk~uz>H-X37NcNy|=Mk;?=61840f?{U%JW#B$d}q+BsO5`1RP?OpQ?%cmJLzNVay z9NnKYfc)#J* zn@Nv?Yzlj~^$Qk=<)dTgW1>LfJT^q9vVnJW{&RB_1fQ!vA7=TdMtjAK(fpQT7=P`L zp4;D=0TR5Ts&DZ3JiS6C8e;g*etLuL>DMpf!>kLVfqGJQKYu-*F9REnFQS2mhP1OR z2;-e`4n~jKd3#xqw!AjzSbsDm^Xf9q!TUF_PL6U6WZs6@p->E;nHCzK7%{*--fJ{v zCYE13J8VS^(0F!eBHo{P^|UVH_g8L^v6znaJz;^n7Y1X%e)e#KS1g_%uTGL_EL`FF z4h>jfpP$vCJ}nlCc>6Qp?|F4vAQocUPN#A3koOhO;#u}|d#fkev$1M2c-+=7PPprxv{%dQzqHa9pi1j&*CGuC7 z#{r78N(#m8$L-LQb=_M2 z@Qhc}T0ahGyuvol{-C(5bidWTSQvQPvSR*AKj`MwR<4SL%r%S7W#9K3t<$(VcC;Sb zVm~m)DDGi{JlRG(4P4H{hf=GZ^l3x z&)$8F^^}qNZ!sV@Qb*1YM0i0Nd+>AK8TfPGQN1iB21GaRm8>@d~cZ_J%I3p52~+ z=gZT_47_3By`n%RB?f|d^0fflr-CmR{A!8@z-s-OCD=ZlTW~-2dNjzXwM!fw@PrCp zOie-bC_Gkpj>^$Z(NH*Y_Uk!XXLC7z{um=>yBCyK_N;H89Su#fetB0b@c1F;KRPds zhJk@6X5KS=V5DA9K5i!kESknI4^2aw{4bk)2Iy!oDEF zvj+rYActo+l=wom{2cVw4zJf`L(*G}zY_6t@^co%Ks%+4fBZH-DCg}HTW~uG1H#=O zKw4qyAIxY9g2+#6a;)+VINMqzpgr8@ay`~9~1a++2Yi16gpFE422ediVh6_Y>e6}5W- z-ksOhg+zgMfu7tBsyA$GU#|Z+1ItsM-YMq|N&7cn7c0Zx^MZ>uc#XzLm1FtKyA!4C z1+Kh#2=dNeSW!SdH?+2I zIksCSyFM4j^1cbHRf*ajFvP15r4R*_rF4tw+uWgPIiqfAZzM2yu_(XYK#WHpPa`2R zEpNj>r5kL-+qBWiNI-ZyZ;N*W=~s}H`u1Xn{glp z@3$uYZ^tikgFW2uEp~YSj2rB5cNceuAvfpHJCTtf@c|p8>F%&`!*wUOj7Vq=cyLdl z)E%sU#O+o%6bU*lS?g0Ue86g6cVTrTjMZ7r?;jVDe>oCH>UQq-047hcHbp{?vCk3F z1`l|)PH#g?dn8=`A$rKtfdS?;>*alAkw7bE3#LzDKyBQYM#~($KV$M3dhG!>wlfaq zMMOgIEo_K)dVti9eVPZ^k&qs^D>g2i0Z+{5b=MeR_@0m65nIgw3HkXgQtu<6LV|BC zvx5Qo4_24Q-iv^Qxr}2cr(^lzJ*2Ks8v)_2{_#G?JfZcdj*?1I1QcpDq|bQm2_o6! ze&%FEK(^JY5R7z&7cS{QgU+{k$e~AIrJ6MZT^us|UWLNC@&kPs{^f)N!WhVdTN#*X21-qA@!Ncp-{b5 zI7_dH0oxWGGRgTq4K_;$s^)hxKU6J43T}Yb-cN<#f4wzwi z-@oMhb?s?@zHd6=fS=RaUDo(~2m!|Bz5J^uctSTe-y?IdKF>ZSJk7}y9t7pY8LSS0 z5?(CZ0Z%{`Ui6RFA+X+i9apE`6NUqWPd)e@3{i}3CGBQUQ0tsps&+OQo|ni?kr?L% z+HmwkYickwJoffV!23=89vh>Fn}cD_bz2)rOHXLx+271yNZ{?q&ls?0ThLp%K5W16 zVsL&jAn?=XRo172;8nmcqiQ}+pnTS^Nm&;J`S|;l4xUi7WL9LEJ8lb>l*ZCMVa12d ztAr*6fx*(+wa4N-f&NjyrunpUk&U=Hn;x^^^Jh$$bB0 zJpi&^09j9ftT#Z`BOvP)ko646dIw}Z1hQTNSxr3S8C^{Y z8jUe^;(|0!UmptkdtREVkvE)^77UrZdWptdILPyNO$dSd=U8$4%z?@^RAJ4bVBm!= zM^$q`UFMqX%H&{}$cyQ&;K0#c)c1}TgTV4VR=n#uQ1tqt%{Xol?Bm@bf64)8o_|Iy zXtaLjcrJuW6&~4nD-fpIOh|ts!~JJ|Uha45_f>^}NBPxa$~_L~VDtQ8&@`BPzA3Eu z7zdm|o*4#{ZfP zBYA4Z&w!D-ey7=RWLX=x2DzE2Xs#eT~9iMy5j8vjPRCcXd zT@xD~cCQGGq(#AaUVDiB&-gs>Dz2RYSo`IuJz~Q%BlDJ-Ei-`U9*w%n23~#C*_AVZ z=T4ha&IZh_F&@ijKpf9*I>`nuWn+Ak%M36$dwQzWEjG-2m0gwQJp*{^%dHpefBL_a z^YGX{Y|l^S#cl?%p?;9Q_D4Y!Z0Grvli4s*fAbQa?`)gAmkZhOig%9F6E#|&aXNeS zocn7O81w4Q`{Mot=R9w$j{=@~qlw!!hacuxV|}yo*}X&t_uugtRDPA2N$z0tNEE<8J>-F8`r0e8lATN#>iA!NfXzYop~F!5`w{6yt~;8&BX zWK8cmO$vTaG%jR><||CWdhVtB)q;0zxsb`dw5SK~d-C}C4FfJ{2p!onXR9Z;X{H<+ zr;gvx3`)(z`*33td-~3)TnOlzCYdph0anUZ3(sTvR5E*9e;^X;3GD_iY$tFbJNMuh zp<5mh>(YPc2!#s;vtAX1fd~A`_kMkE65cl+y|Hx1d|9`rYgZ%gpIl#;zY|0Eyd_Yp zduqCMvHT6FQcHYo4og<3G*fiE!P+yuN~P2cxC-aqud~GR`|Zg!^Iw^PvijGcwmIG~ z$-PMOoT~-o%PXFa%J+tnW3!#^GcDn6WtH!@72Xgm64^L($r8NI_1G3+`!gY@E1>L% zB`AhmcFObe{%8E657eSbm%QL^P4U;ExooT#yIfoK)f+;qMK9=-u%R`r)ZkTP~Ng3t^Sn2l^uQ{DR%j(vIGajoo2drsxZOZot7c5$N|C6 z6^Ao7F=6S#Yiak*I3Vpd({&==Z@wCJ{!+K+z%ILOh3hR@Q1;P*)~3dRM-%@bH4 z=}sH^D$W7ZBGbN`I!vf~Y7k-7!v>9W&mt-s{9r?354RHY6WXXGEa~72^S?=Nzn;s6 z7PrxOY?zW9~V7eR>FdWDyHq}M_#bt$j%veD_F28S**YIv=_X+`{lDu zBMW>sZ0!(j^aS7P;`uY$SRgPwOD(X*6H1Ra-Tg{ogMOo0VIj?#W#HHVTff3~_wY?g{Ic$}g$yi-Kh}*QTG0@PyZ1Z?1iko&ou1u1z1z z!1{2FdUH-^6cpY&KG~s|0aE+z_Ov%gLFs9%`D`+#Cr+dJ@St9Q~V}5t+6FqeCr#sBS^A)-f1vPPu zV>*qD9lRgf z%}c7kiobs!7W!tfCuCmFE8{;E1q*%FacjSO!ZOZ<#FnZk*ki$?C| zV7_2UcTHYA9E;L<@fvnaa8~6PKQJy1Rfi`WzaPj1o;gp|jzb%HcYLClV8pvq<{gKw zDHM484rW4tZSe5husF0&H+6dSTqcyy`L?GoAPy~iHS4ayS|)t$6l(Rzh(jZHq!uwp z?@rB*LnHOnLzrX!Nu>P8KRMQ3`p18{o3~$@#Es^46Jz}Y@4FnOd1ireNUbc?LHpca zemI6DnS@V9>M5|9WH`+LdvtaT9Jw*{{l4W*Udw&Wq~O zn^|By=MbO0MI8DzaKU%^4c0&76)IaVxN{buVTxL6tP1WnP5o?V#U^KFC7{pUDAf~|KW=cPMD8miyjx55cf z(5maR+i-v0r3R%douK4tokr+=chK<^>-V^X-_LC;U2@tTN)#6Uc;w;?6!ilDYCx60 z`*mvZ`vuxI_*MFQ0n#3-eR4H&hr&g4 z-5aF<*0G(hd>QV*Q1RW%cMYKH*tO~NSL3#)d$gJ@ey$=o^L?p1RKLkEyT}Br*D{W! z*1JQO+0WOETez(lZ%X;-{trATqp5Ijy2;O5SuR`1a(8xZdblZt$OV@dAEb@sEjr zBlk-^jbLVwYNz%G4xAE8Dc>-`7)p6H>-o6wgm-rFiv|YzzFtlOTzIJE_h}KBLf@|k zY45r@khCK*#d?DU@I)f*0tf6~$5rzQn?a=!79vF)SdLZw`ZQBmmu|4MD;3)YytABq z6Br(E6!;*41JvkG)0~w}fLAKK5$ifU6Dj+?G4QUh`kQbdZk3Peyexgt=H-(L=0H48 zXAd-l(EGKoq~~(LAXffmaF8A-l@HI)U%`PZn%DQ!Li9s$;G`Al@17dk&EBAY+%v*d{XeDUEu0=a7H{xJ;8X z5l_L-XSOX{blwgg%crQXT+f28*z~HOhPE)*xv{c2l?9A$0mtMD8(0$}XVU4$f^+9Z z(*kO(A=C|W{YQq9r;g2}eP)j&mw`|G_3l@k3RjHkJv;ae& zqQI&ktefeF;IUyt=wDeUs)S$xZSD>df@OOFdfPS=m+^ zeAX9wR#BV<#I?Zre3kFc*}iahzV5!wXEflm1j{hkICb#pkR01#ZMbqP_eef_5B8GP`SUQzv6c2;%Uz^@oN6O9fIyuG(2+6B*N!S9m|1&=w9nZ0HseX==t z&h2^N{+a_Khh{sS>NJCy3w52}edB=6qT^kG3r*pY5yL~|I|nMR3piR%H-R9n99MDXs0CF-D??*s38zT=MEzgYc8hOb`n zw(WA12inX#8!O?C`jAe$q1C(#{dc(Vh8OGh#Rci}E?D2 z`Ua=JeMfCpt~)Dlht@a?Dmr&?VN>|QC9_)1ks8nWahwak&u&vvIBAJy@$UWX;KKXL z{ML-sw&>-%9hG9a+)=7I-x zdW&(ZH5y3+Fpmo>RIzgT))MhTKl;~j;bR8fV!D(i^5w-W%;mxqUSb4&E5th*yXMIS z-WGpkzd7pX1@T33p|@k(E5CKdXe4jK5-#vU$pY5vqLF=Q4)-rCdFdcZ*j(Ul6^)+Y zNkg~urbMb}az}ksk7=WOHKwC*-gVsx=IF4=G>?D@TrfX)+G?Q21npsFXS#@tt=DvP zPsVi3yRNt7v>AGxT{R$G!2w=cillNg6oH3p!~5I_oEDp+k$sEKf!~qs=e~=Wqs+;8 zrr4%e<;6@Lvp^Y9rU5h^+~(2YLo*~PANbjQ90$(s@lFXSFhjf}^3J<#;6ZOe z5$}k-I-CuDJm;W+C0f0&@#tL+8{F?dzS*8*g&da6y}uu~KZ{Kr)Mi?uYQFs{?mBE3 z#|swou|({sB-I-e*kH)h4?L{V@$-w1x@fV%c|7leuQhtdJ5!cnLn-f!KE(X zD#V6Q+r=LZt6HLheHB*(+F0-kqwgKEKu7iQLXNWFYWWlwn{`%bB!9s^7TmOY`Yhki z8l~}K9M7=e66?!*#SJ!S-ofxS+n=&V^%gT5bdu+Ltzf}8UTj2|En3e@r?8m?_fi*{ zTGv>iE4-K!EXQVGHVg^D^p>43<(bU_eqJn@unkhgs$~w71-wuNPMsZUUyT_;8SkS* zH0QK(M`YpjU@1j}1$N1IvhQ7^qf?pVpVbJn;Ca>RjHY-P)tP&kaE})wBJYHhcxUNl zOmGaF_e|H&8Kti>_Bn!WTp>m09lO>zBUt<63x5W4R9~XGjN&JW2};{(_xs}eFvuUaUrSI5uy8}lzju|hzWTutUM$WWH{{04>#)ubtm9p0&aH4mBfe@e zKN#WT827)jXf8H!FRFSV-jTZXY(F>~zf-V8m4Q0GUKZ)VZJ}c*=iy!ky3w*PMC!C3 z?7Meqkp7i13J(*1$dwZe6mRxKYp*!$bZqhmmpNGqy^>xij`LWiBh3PEd znYI`Bybv$#>8%(R#PVYPJiHJu6wy7D1$`GkSH;(Oq2E~T)neoCc~0*jZzRTxQHy7R z`lFW|N~AXu<=su%#)55hxwKD)-sr4hLsHIZ7AWxUV4U?rLrUxUmtq^;mAC$ro@mpO zZ1+V)Ea>6!!zM43e%Qc8v5^J4e|LMVr+A}^a=fFw8nfxW5&d%EDZwW!7|-O~di35K z&E@5tc*g=;9lWuGdXL&vnryJF2v~Ww)Eni!z4Sc9fenK^+h~#x;``b4V8={02}{;^AzlRKlQb^ex`+W`;)(2e!JK=zu)AYi z#xe>6P1xJgs=1mAI}-PAzV73J-j<4=lV8pqJh8uei?(1jMB3eYt*Na_@}@B z86W>w9{-Yg>lmvsH&(+gDgJfq@`n+yeV+Z3uN$Z%>zMFo9b`Sa$b=P>*0@E$gI;Df zhpCDxb}V_gCnEx$ePU+YJ*T47ol71%mqvhK+>PNkx2Q<$0Bfnt`e)dw%dKr}PL&iru`;-SXdhdt^O6a(TM08w%@QvD9%<~{E-Q`&xzP>_tYL$ z%%B_)SY!%HVmo6FO>jUZQPGJTC}tokJ6q(-fIVt|CG9^E>-5$9Vc$g;+N1paO`W$F-m8545S+yHB4ZpWWI+ipm^Llgm(lEosvCRhc8uj1Yd(<3u(+r|) zm)N47nbC|09Bzn%dOHHNi;8GPs^}{{h<-tukW=+31Zz} zST}7!{)N|@y$;q0Wm67pRkj29TQ(2JBL4 z?l1RNx%J2a#9}8t)0=9EGK*5vOW2NZVrlTSY&T0p_g&f3UE&DYr7UWrmL)2@97dy< z&|%0uyLSdyqTU_X%=VYj!S2!wlgbh+^gXpm((V%-jyOxpZkuk6atimh>_v2_95-8J zFx>{R8h7qZdqf9&lbd<3TWk>H_su5H06M%B6{Yi++MpW-k&TQAbl4+aya3MIpx}Vo zFr}}KQ2%i?=eCe7x)DXQvOMMp>m`d9bj8}D{ade@?LOfMVhfWGCtb8f61cx;1|4Py z?u;?^v_s81ubJ7j(c$}|0xPhsC)rkSKAJtX4O%tNxt^zMlH2SWGaB8xY_Q#$R3rfWrswrp~JR;w~DWw z98eRg`w_&U!}3K1d|h!4NN$;2_v1H?P@a(8`=!eP(Mu&OllMD9U(vUmj3h?{+oBU) z#2vx0Valm4bvk;d^-JJ$w>`YfPfih%1>`YLuG`7l9;&)Fgxuwo!d~37rFWqntUME} zHE}O~uCmKEYL+c1N_bbExdSNYu-fGpT{hr%aa(TGLqJ`%vz=P5+kkI&z3%$~K&`zF zWnf?f6AJFz4tD~|hyI&uk6DAom#f!8f8*y;yKE=tTfxPtYJn&5^Xi`;zWf-n1V%*M zF5ho}n7itJ{M=><`)rDxc6|cGc>ScKZ@mR9q}U!%Rdqszo7FDIR+z($Yqu$J7EXxq zC0o}~${fI2bi(ymCj|1H)wlPW{gpQZ3neR;n3zHL-sBWZJ1108;@bJ`i7B+;?_XFu zA-Ra?#Hu7y*mU%@cg-*!PbZhMFVYm;j;~m`SkejQ)7zHiIhq1YJf1Gq1L(%?*-k=W z3Vg3v-9kmUZR^B%+-(BJJy)+e`T^?wJ|wV+ZVER(UA^|v5>T*GZP-<7Q)oW6?RsP> z9f>?Yxkh!X2@I~s{Cv(4RVakyE|oBW#8-RveQI|=AXgivfZLT(eybdDo0Ny^Hbp1S z9kd69hgSy=^kMp#A^fy_y*)fp#{7KP9!2KtZ4sVq534H=wf=Z)kFHNkEMjNd!P3x^ zK5C8*$QtwS-C^6m{nH9k(_8E8ASviciRNKUFDKM4rzqINIHP6qRT~_UwOHkvRbKY6 zu<29aI~zyT>eN|Hdu$I!-`YIBJiCFhGp zyaN8E@%DzXxr~iH_U+hOczdAOpS^+Xha88T2RTo29ppMmIFN86;Yh-n#KZrwZbb@3 zXvD8i#b${iBOEc;eGl%KYzVKT23LPS6@XfYe05yIU_@Uy`e)r^@800uXK@ASw#G*E za2V<}Ext9w4}i(@Q6CTc`|Zemx6@%rj(6Y61>i>d!-mzIFjV*xE0;n5eG6CLUwJ7M zQF*!Juv`@5=_<~lsGXNzAczivy!#N7LQxLyUMX(p@b3TMcKgltDUO%uP@ZyST1MeC zlw&Apql4Q%M?~E<*G)qbmSaN261J|-~)^$(8o-fY+aJ z2+ACZFSdtgylXuj!Du92eZAeNZg)Qzwf{M<8hL&x7}0p+PPc`PJa}vgMuj7Au>~KV zju;h;dPm;30iWXd&TouhM0E_;u5Pn|q>amaZ{g>Ww&B{pepy3yo7JRWNqD?tH-T6tdoWrUevf%8M%d1S5p{QQ1LrERfa-A5z8R9~2jper*OxXVWYzrEvfDZqEeR zW?Ug2+wpWhI^7n)@XeVt|RqWObRj%CT=>8DLWuDerK;YlFs4U7Nn z-fjwlb=}OklYuCwLPe>TVFuERCIo673`EpZDoT^u%z!&H@8|11fv8YBT$@^D4%v5q zbv)P_h}u8(Wc3f5LxRwq2^n#LsHv@O0ma1vZeP26`i@#4Le#8PQOhhK)skOXbtyTlxxwNuZ!z`4wcEHBz}43aRX-DeB30FEavqyQbtr0C z+lt$9f)+EJ%zpKGw?LzK% zH&HVPQ;=W3{aXN%nDowPm9r^`tz4emD1_I4t4PFUwJGG=?yEAJ9EghYQy%_aZVFb8 zb_UZ`0+EDQ;K#xYQ<&PT;_7V~i29Ob+~r@Iz`(8sqjye$=vjfdSTw~H^vjV~Cf<6;y{aS0QgWC`s|NG+|6WH|lfl$b5{Jn3^w7O;!D9KiQD~AHn=_4!O_~P~y z-CjdY+`e+tiEiR#0`r9LOqlpQ5Zx~9Jh2%}pisa>#`Y?nuk~_I)ekhNVfifGd@K;H zZkBuX`veU%o;+|%J%Zt1xbn^FG#Y%`A0I1=@fFsg+c%UzgMizSAI|L!M70-$@7L5^R2+tZWr{?1K$7G}Q?aQfrK|!d{GGV8@oiXhBrgDWpEeN6c4=uwxjG@(g zfxK&Z5TYDXQJQZ?gP47W)dEeJUT?H5cub{1j?#-gi2}h`FL-rSl9yxq^wZ&Tb<7|A zD7(PV7+kx=Pqi^Ie-5DRbq|f9C?zlbVP-I5`ZiooQ=mceLBr}JwfK9jIo4e{czsKn zbq;n1Bg&g=de=|Wz(8i!N?D5#l>CIvyrpjf(u>Va4`6vD@tKmZb;JakddqZgE(t;H zkK|shes2N{Q_k4}EWesG&2KgQFoEE@G)swgERU`W-@ET+3bm<&SE>iG{Hkw^Iar7J zD{F%6Qu;KMtQxNUiD?G*jO zfX|NQv|VwyoqS-HzzZy2>^HrROv2+$UNG;at0m;j@LBq3#WciBb37=0%@Xn@w<|l; zVEN8;d$#e46_{U1JF{~T&o}DanKc`&!RNiz-D>$zL^&LJMG@cL}6*EIo#arQf z2e;cmD{J4kSz)0_LWVD2|30=`iobdtWrrdtTt|&bu?1^66B+OMp{RG`(^mOowouaf zOI4~M6h(RrhG**9fyk_P^T(Cq{pnK5L+Ud-=#u&#KmQiq-xhsU|JG{<$@-CrAH`@(~Lg6FV4vgf7E^NYCutxsD6Z`*<3)a`AV zMYw(H*I@GnJIEGU)p%(m?oVyson$49fAv`_r?1EQ;k;Mw!D6e4vf4@*MlVRitLvd0~*%IMMVrp-e+9OAB9ZbnE zGzv#evS;McK{}B7C{|IGD1UO}jn&SS_N`|3E_OnGM?T)Yzsm~=b_~Z$D>|b8&5cLa zIioSw&is_&^Y`5+>aiZVL*7X0%Y0|amySqd!c@=Sx4qHgB|{B)PaM&W3%jSddwQd| z*poi_%jih2h*|LTwHLB56`de94*rf^xTaoLBOFl0(e>lp?t7s~5lOeF1%N0+&YBNS zc%jV$!)DW70+L#3^xgWc7wR{g{h-0r38n5?yr5x`_usX=7GAoI@yHt`%db%higZGo zmGqx%R`5ae#V6$^&2d8NZtqPyw)&vkx;ts|R!->m!}5|7T|Vdm`%9fUwmZqXqGa7s zvMwoEx0I}F`hV1kQ>IYHjrbPB7N2`=w4D8SZA;$vCMAbs+t|pXXxgCkh<$PE4<1N3 zkZ>a5NWz)K1BoXRk0hSSb&&P&$a;BXJw3AC9$Al%tk*}@^J8f5KmFFy8{On&_pa8p zK~08_59DZiqk&B2k1E@&(F@1-mCJE^&zAwiC?RW9B(zj2hvSWsbZ+K-dSrz<#!vO! zw#pk>wn!}IP^{6^l*|*}r@Ya#B`ePS=(9pg`e*Su*L$NAG56=p*=B`o6!o9z-SI}t z1YOG?6T)bfIBOjExn?<$jH$})tqp+dQ2R+-j zVN;%gDVp|k7T>rw9~7MQEoaUq6Lj$CsTrTuebE>3xI3HOOpt84(wj09U-ZDm(t3S6 z4T=A_M?}y~-E?rIeriYf$Vm-W``XR;R6{ph%_0a3(nZ~XX{wQhP=55zv z^^kkPtz!*w{)jtpYfk`057~EmzR)W0M-MLL&lGjiMXwqdRJt@1^{HH)k=?C>Y9H4Q zUJPL(xvHP0pH#Kc9Sfgd1zaZT75;SbM1v+;sJhQ0EsTk{dA=W>32CBw4ZH4=048Fb z5pz|~Qb*g^=3nAUnTU42bVo7?c(&0bNnk%e@7_a-|nWik{!c9spl+r@$)%fUdqF9J8VQRJKTTOI3*Ks^0 zi-jJm+BhtxYoRFTtfNYYScvg+@$2J)TFB94K}gF!7P@gOsJ?el4Ji)@pPP1{h0<2q zKPl7EK&uL#(jQJ>qnwtl#wiLK=x9LRc@YgZ;vT#AC{R=#*$oOWUg^h1y(SlRio4ZN z*YS12L%>EFea!5;5^CsvLyG;z7&glGaFJ#`rlK*~!)y?I5+|pOa`g6D_{?FWNVhW= zjlz|X7~SVrIBr+$Eb%HTS3u)VtXq5BgN^EBx9<^KGZh`#^|Wph9#18^#ETm$hjbcJ zGHM*~`*CQ8=yzFk!y5(3*x>&6#~;#?mqAJ%>K&auEQEeEj(3QZLQl>_>d!mRLfP!_ z+qSzUQTqw4WzQ>FsOjm-4c>f`NJ;TJ%VaSN6&{rN5M&~W^qjKvlEPSsQnZt^>CHeL z)7Hn~-82@0y&cVt{h#Yz7FHhm6vM*&QG7nTprfvTugq-r3jBWM@nN%5*X#C_trH&3 z#O+&}lCGQ%dZKr@cpvVM&#ecZhi7dH&sELW-qjZqcnxFKSxlikKuP_niyT|%o=`VEnDtu0+nu(BU z`i09A`{)b8CImj-j-QKm&OBrEmTqe-)g&9uM8glCY}@Aajy@PPXaA`{JYJAwjeFK- zx}WUz=Be6DC2Wua=v4ogtIWw_*Ug>Z8jkZbi z%uo2E%!)U*DZBYVy7l2|>KT6oD`cCWdkVl3yR4%gl6XFAPYJjE5CDWy?DeMN^|)P1 zFA*3I+1JZv?wEqdXTNVb5h)I*CvA$cHD{t^yP4N++epBw^2>d8I!x4lBq@4Az6AJ) zjkm7)?2p>Lm$V4UNWh#mN^d@H@JHzF8(UR9aY+6)t#=Ufdm(B26W zDsK4u_~X5@UVZc>JB1W}V7&vnH52@jzSCEnUZ1r_9Dkp?^KhEd0G;9fzH-+C9|Qy0 zCT$-E>CAo49EXnjpt?1CUsxCp(+fU)KVlq%pL;egsBiv7SFGQiC+ddx3#b0(l$hW2 zzC60f{9$iIxzwSqC-aNGPH+F|_h0dT7Hp&_W=Mf&oS!by<9!gd@S!g43Lh-nez185 z)(4=TT4Ez32sRD#&VR&A4O`ZiIYtOTOu@5@OE-F>%z(@X2Wf(^_P4X9zKb_1^m?`T z=rVo?!~3(_1U%l7Q^H2A6xgkBBj?B~FGQIugfavKVE@cl1A^DF{<4PY*mjH`#`*np z$*IEn%f=}GpxG2SIVDccjF^?k<$%qPP3 zX%PM)Yvc%8|eq~G-AH4j%GA7UWz5Po&=#!vdW-OK{p9}I+kpN}!Q&`Ww0VEouA{+K$J7lD^_aUsXOyi{$`UG{yr=0 zjo6*9b+$RO2Knzi5Tz^Ia_^p(b*J(^-d)4M?L(4vor<68n5`z#pXR%x_B~;5y6+Fv zo%So;kZR$Ml5cK*tT>eqS&H*7OYw3?D;|G8czdl765M^|>#iy{lzfFcyjwy5-6?)3 zm|5$J{1swdn!JV4Y1?~mU4OZtUe3K&0z>1F;NbNfF>pam2AlWy?3##jveswi?r}zP zn{tK^GbSUsFM3aY*f=AJqfeI#CrhK+ry1K;-*!T@<7ua-6iB0(stY!%`>y6x8fR4g$|&x!G~O>O-t5S1aYn^g?mapZq=WP--|TR*bV1BR3q4j( z)7gMstyge~3$p)Iv%!3)K2m$;tl5+7g2tbHERh{xj2N-E zj_up-f+#WP)u&mQpoFEe2G$2%(Dvhd_Gz@4prau6u%f{Qt$ua4S<}T7xgRetnbF~b z++$uZk0>xhFn<4Oy=YeyZze6f`K>vsoLX`9W11@}4=mv8>#;zgc~|CVx4WW#)th;* zr&}WH^-TH9->&H5E7kRZ1D41qCE@j!b#6%W?L}1c-4ZSAKePM;+a0|QE8v?t%?j=A zlfNl-%pE=ddAIr3Eh`jYH+|D=dk++un0)w}tTkG;cYT)ReGeqkFdV<vqZ-m|^;^AM$!5G~|ek zrp8{sbkh^%1{B&&xlTvh%qrZjQ@v2eqa}<46+oxX?Vh3)>V2Qv3j0k^^?bR*D>9TXXn`eF>^@0iPWn|y^GY#NWG2J>qxzi)C)q)(zv6?YQXf3%ut@lPFlvd;ZX4elA{Z+|r7 zj>O`K-xlb?V4I?<923rabWUX?tmN{7)_aI~)O~mgn8{}Rv%%q{4+3}s9gITCIBPn@^ zWsDTNa+O`YSSa=LtG6!>8=;6FMLjhEEVRzW#$kMyA$ot&YDNjBUezb^M~(AeV?=kNPB^_CrEpPv`0vLg|uf#dxx}# zNPCI2r$~E?w8uz$jkM=TdyljSNqdpBCrLbzcp~vg;+doal1@lEBI%5zLy}I(8qZ`6 zXdBZ7@`9}_bVvC~$&_m*=!;OswS+kIH!W1I!ZB*^>Wb7SyFy{*A9Jmi^h0^c|T>zyf0E{>iv|VNOvwuh<97zswsh9 zXDDiiPv)Yn%amqlI!r)kM4aDTo4`ex6YjcS*eZ@u;^Hoy#%WG7;@5A zk&8;K7EJkkN)lN!)ek)8spX{DIbx0g749&AET);sfy?MX!I1ywq ztHkH14hP$-Yh$;y38JkA`z$(MveEFOy_=>D4b*kmNHM?NVIxZB#`_$-k9E$)m9Jj5 z;P&m}tJfA>t~)ba)N}6|`>!ncw7$W}We;wj{9?0YsJ*Uv`vLaDwQPhuqp7|8@1mvk+_h?k4lD$PCS-oBWn6VUysEC==0>>V>>>x zP;$wIo2x8cXm)!{2%`W7GbIsionR7oAY6L>9guM zf1i!n_GznUnZ0>2TB|hXkK7~ee$p>M`VC0G0_k@k{Su_#g7j;Ueh<--d)630D&CBwk3ok$5HXPSOQQHzZw=bVt%9Nw*|j_epcCyuadh?~IW3RaNwr zmFqmJq&SFTpI*JX@Cp5KcYpjV4cvb5`uu^ECi?XEA)ie{IcWIa$?Q+;$Mm@Nm75w8 z@pG*Z*R!+V(+9;eIOi7Pwh^uC?fZB1J2$Pi)SlrWC+oH>7rB1=7_DWVcK!Hx(ocHy z?2?V^i*Z|T<#(FP5dD+#CXL9Wm~S~XP8-zu;mroaf$-g!Z@rgvH$JSNc?^G_LT<1TUubSlEO%?+Qdk~xU>wcVlpkusFrdf=9a;nSS-?L)*S zWteNU&mzkozwcw2d+ve?OxYp$EDHBmaj(Q-u9zz9IheL^Zy*OTubD^t1gS#hiEfFF z7MK_7^9)2iRH4~cin&&ZgL*H8*Uc|i10>*l#NY$Qx5K>IpI@rM=Gktm*1u;Xn!y!K zuZ`-^JF~>6t_`>Q&-r!RYeJ#w^{t{cY;#P_>D_!I9X2XF8uR1ILT&gqW5Q0yNo@33 zUZ~OGoi@NsHvyMk)?ZmsF?V^z&c`eiyjCPJ8|lE{oig3qH&`g===J(m8y#?T$vSHN z5ZgnISqcj_X@elvGq}I8-XJT~c*sf{6zX>6=ZIrHMoOr$*h&i?Jz2RaP@9bs?hSr> zT%-wmsxF)N^7N(~C_DF!2ApnFalPV%`QygmH=D)kFd&@4SqxRIqq{jTVD-sd}-X^rpHF0u)bOoR6)$?-6<_ay#51HPMLS9P^`%}EH{OX((+pG zhwxG1cD>c4Q_5_VtaGyX#9C!o(owr)9)6$lZE9KHNhO%;7k=lY80OdOs+|R4O0bhP zxO)CC78*vXoio2HfTrxGBCRqkU%J{IUWX_`@q^As18Z1_Y7)_t)S>_kEPp;?y(BYv zNsCjQ98A;Pw_p+-!|TP=vMcxHK;>=jzJ3W7k~kO2{<237)?Kjj&*)}i{yWliUQ!;i zv*Ir`%KMl| z!e*PAh@cdR2Q};5ev9St;!>LPS#emK6@Pda?l1Dn?=@!w#lU#ha$5X-CMs;BmY8f5 z1yzd^;~srwqV^Amb5G@rgS#*Lwf8>5`+@#i@yJF&aNx_}?85eJp|E|9A0I!E`vu-Y z)Ucgh&D(eOoT!Yxdx48k=JUdb9g2T>CFgt27Y!Trk$uPf&%Q(Q3CTw!pOJh>@+ry3 zp~-HmE^XqVrqtV8Hi{{O&zIcki?G~j@|mV~cq$bV&tFb0-hlOsr^`EL9ijrIrL44# zCzmSK*(wKH%{#SSX$FG;h^ zIL$?jHbuGa?Nn%J*d@2{4Aw87Dazf}q{0~P2{x8uK3$&`D4l;6kI#2dfW220ig16z$8rB&tCsaYR)yJ{a#jSM$L+@E z0&y)hP=1(_aT>R?+0-g(pBfCY)jRqQV))GwNqnZN4%@4`n|^HIA{{2R$}&+6Cgc7* zH{s`p7Kqkje2F(&ZK<2jMahp!9L~N`1(O!>Q>SNh(T$@g7KKTufnN1G52Y}y|1QXI z-Ly>=cC>fyEVRe{O*gr=>8vXBuL^t+r^-bl6P_ivS*t8-{*TrJYcHFJUc&Tsxb?ogf;wcSD;8h4jQ0Vv-8XBy)Ie*^*Y)Cs*gm*z9_`+u z2C|yhS=(1}Q1-{bHIJ65fu+rYkcv4Rq+`7M=8f0??kkvu)G^xt>3zDxQIqCe_~v!3cioU&6ajyJEf(>!GEe6EI!+vV>gKVd#fn- zdb=82+tKhTadH5kd8{+4Uc?R80>L8zM zXr1C1fOPi6{D?EqfFH`7?;P6zWIcP9*~UC|IPxQRy1YjKYVAJfXRf3UcaF;#)&v9~ ziP|g$&oXt`ER(_UN(?|qYkl>L=jzawG9hsM`~Z}#v%Y$gn+6!_>4lcf3qaM+<}~%T zYrv9tb5m3NKK0|vqgE?5VGLJ=rrJGG4Vthk&CO-kY&@RI;$nuO7KAqZiux8E@E3RS zVc~ZgYyyz#k8^(8-)e%ht5nWp{QyM4`#l@)|AGeBw_cYGK$&5ymhBqWgg5rBFZIO& zki5nzY5Go0*s}k?HpUMw@;D>qq>-u#_tt-1|Lq-y@0K$s54UT=W^KLQb&oNfE6KJk zY{2kn*wsq8gz;l^MKfVY{U1IXC8be$FWa;$|Hk`(zkTZ*bR3^_lZ&bc^9;BmM*p9@ zC&wk{Mb4XC7rAZ{E+pJYxN1Ip;C69U0AkQSuGn%%3(k0+$SOV%fRYn0d{Ex24M;YF zbMZs~Dv`bwQRS%vQS&G4tZoQE!hXAN7QWYkv9%u!>^rYBpbM%q23Jo#R zf!?Y%n2X_o_-H-r_2Kq}!^XNzh`Op%&jtj#{uf9Ydizci>hAbd&-8e z{YP$Fqzj&}VZzSnsfO@4Vo*@u4zKr^eERwAhM*Chm!57OfTp<0wiO;S1ct&Hv<>qi z_^v5``JQI$uqe9 z@?lD?E#~|D;ujMO>J4Gr;+H={U;IZ~W3<-QtTv$HA^h-p zcgpx47u!A0lIKh_gzx72z7^l%BJ0N|7D?|n0B14hxVR^H|GK+C^mwm6+$op&=-Rt`(AB$KJS|gwms5; zqFcM z@+ry3B%i+?FS3~Z0{5T1YFS{u9tcoO0_wlucJATACs*`fTk@P2zMle+$cbL%FI)9t z8i;6bQ1+_|edzW+k#$Hd z5P9rzZh7=g9|8ya-}YGqBHgrA%X&Qwpm0ue$wiky6m)h|fZTBd7^9U!3{UOH7=E+N z=P`Nr0^X~$HA4*{e0vUGqF*4kKc6KZtTTi!j&@01_<823Gbj1fjG#U@$0+FK0gch zfAYmdw=QE4Yqr{w7>4Kb@!jjptH$8d->-el8`JOnhn7p5jUh6%#OE8<2hfs9MelwY zL-`@c@inTrO+P5`=8Q4CKJs+=)+x9@d!fdHGCcpe6J*B=;`uTb7r*f~hWwwjhuNR8 zUOi2_N9C~*G>IHO;ezE#;ZBolr`G=~r!Y%u;or#q>3*?0M=2{1rMYYOc>DZE8)LZQ z@v-zDJ|^)<;+doal1@lEBI%5zLy}HOI*!D6e}?fqHg~a?Wivl1V>*~VZ}w*n-oFyf z=Y7ZXVQLCB{#>mG&nAd0p2Ng+_kB*&mG^qUwxo6dOATP$;|D@{ zv3UIUcdv`i{Y&>4gxXks(s*zmyK6Ew))-BX-HqWJRjx<=koQc;dnn{R74jYndC!Hs z2SeVIA@9+U_iV^}IOIJY@*WR)&xgDRMBWpc92I`&2yQEGOL;gXU;=8|y&p@ld|3Y8 z=eXiEW5}4(th2=~5Pe=xtzzJ|g`)iWlUf*Wm>-VLG=|JhM!SJ(AY$Zi-}0@>_#b&e z$`exFkn)I>S0r9Zypwc6(hW&hB;9>TGOzrK;ZIw;eM@7rKFn&dy8GfzAZp(2+#>cA z%U|5q=nlks#;c?I!Qh{JNW3j~jK4|P#kM>UdNl68J6`|G-y|N$`&{IGFY-PZd0&jY zPe#Gdze@!oF)<(61&>R`0+HbEGH1V~CU9DfvuHZ*U#uy;{EVszM5VjAa3=*KbK{c3cH>RpSn7np zI$2DIb;9@B*J7O{98F=V8~XH znWVpy^oNrEQqrGF`ddkVEa|T${kf#Sm-Gjd{$kRfO!}Kie>CZ@CjHr@znk=jlm2qj zpHBMQNq=meWmg{u~3JBtOtWZ9h22vTC{d=p=ezOHlhtQJmD4m-rQa_k5g^*ZoZBRq%!t#(wzxN!B)dqZ*-Djmq$mpVOr5m45hLHm`haT>VZy_IOWIY$*9rrTUhF^^mA$?foP2s^JE! z?%FIVcUtJZ#}j>%h^5$+WVKB7U+wYHgmb1_StNtpsP|iCLQnwq(NPsOuR>2Rmke^9 zCg3P4A2Dm*kFWH3OX%NUydG&5niQ-HRKw@CC|c@1|Z3WZ9>$ST2UOm^Pwbb*af4i4O`LXu1IJ0TIfy|yy-wC zTM$+|?XWU@?;0LA=>a2I@gKzNH87iG6XI@FHEIj%=>XBH}T@g?w=FGLUS}X@#2bJ zOSvu6o#07vCwrJ%f|O)0-HGl7B0^t3j?Pgd!jBD8?~iGPxHbi|6qymS6Oa^t)qm=v zu(S31zGPOk!d3g_DkbBBPjDCKgjG|zwox>$3tN8GgJx1uuV|~oTJO5;;*aT=g;2AVzZ!*&)IWGzp9`QC+$UGAsKw)$s+3DVt zPzcXLar7YB>Fv0C!Y%QV`apazHiQAe1D%C(*ZuId?(HDfl0^hOlgAEQ;ZR1k=6Cy3 z`aYPm)Cy59ieYEJc@xZXHTru4!_jBz+(~u6U}8`M2i{?sQ&Vch1YoLE(#NyHI&^E> zRpavL4HJ+Sw{rYtEr zEr78|33gKVfKJ8uS5*79@SxKvPs?;qJDa8om@YD4Fe5UE zH~My;6VvZOle&}@t0@=%N69?B!lDO&`-U$P{veCHNW@~e`8$5E?slVl z;2Foy#<<2Yzn9=PQ+P3p1A?Njci#U=)9})99zD$jPqGN|bDLqiAH0s0;UmL+DJgQnl~(;JwCR9=zoEp|~lDmM(Jo;Frqv-XH+1%%7@k{~)bxR3h=I3!`| zgX!uq0>09Mf@a+k++l!R6j_w=&;?*dV`xhD3!g{kK{GIk_Huo6&OI16j{(PTCX+d| zdG}+zlPMx-t=#L`znvDHw^TQYm!u7?m1a{Iq712shr#X zt<#68VU%B0Wr;5z*$V0_Bn5U|*)1a(^2$EVp(vlyw=^aPQPS;8ZPM?2T}|aa?#ud9 zL+Sr6T91l65}%(u);d(uGYX2`MVc7Dj6TlC8EOV2gA9KXGi-lO=2WEBmsittiJ3|j zP7lCZ-&U`w7gY>m?QezaeaKf8c*Hn^)q)w+D1R#QQdT44U8T68RoX)y<0VtbR@*8B zU%5|o?RDc}Kn}lWOb81m@2BB~OZTCwEyKkgwZ#*ZS?#pikNB zw*`)n(_pkv3kzQax~|(EbqgS$SD1Y^J_d&J=<5WpIQwiuj0ELfyv$(4J&k5{e^r=b z^$mOC_MHyuWi*1EVbYL?NLu7{d?js19o|!N?1{Dh)&hx$7XgOM953tT8@;NhnYXc? zCt_@9z*h>^gz zJ=5)V+1D_3s?z&0&};F%?3=MaIm5|jfD75!m@0eO`@{Y4YpU zoIZCS^Qc%%G$%#V-vzgr6KlJGRlE%R;Juary40>^Oaq?i?3wxl(%|zy*x)0%gW1Wt zB91c0`Dgg^lZ(!>0C3~j12JUsYi-WiY)8yqKr6*bp-nadt~^g2C+J_QE2v;#)j689SyKFw_wbB9uzT<|)1@#vf?A;bx$d6M9kEXM~=OpJeh{Ef9 zvM79Y|hoF&?9nb%V&UB)0GMaZ8QL42Ghyf ziApl)N^D_w^TB)AmM#le*MHTk5H%d@8nZhdVc=68uKYotja0ZyA|Sne;=DHCFS;e( zU-R1T%-jAnVXA&iQdX%{j5!N`o9y`DL-9gwKwY|nr2=#kOG0d!nc44%q4Q8y1BT$E&SZ8^^*H*c?~hgPYf|Sn>}{j@++I`_JSfhb+L2aZ#8j z{FN+;;Hj=U&Do~JnbmQIaD^<0es#gk?-!RaKDo6BBV;?1V)!<6509wKp&|EFhD(-u z<);I7K)XH1S)-m(eO+`cW`1dUzNZl!tPUH`th5OqHMROCcm+8rS=K(B(&cl|x!OYG zP>_eQoZDb`S#XK)hEapjfM7sO!i5lxeQJixNZ0pj#ul~eN$-Xa{D;rB8sfG;BaGU~ zsyhFN!WNp%rq^4B2vD$2(6N__$Vkd)=hX-kW~dMwm9+YA%$pW0e>5V;3H3Xl=ycor zqW3J|T~*<{fVO4cYj(tBv~h(Hbg9~5`FQ{jew1T8SeW&kiF?v}l#O696y8MtK~VE9 zaO$*h!E{-H9?mGXryZs3oD>}ubqB^u`=*g)?)3tT<76=c(O7E>IS7v9af9!u&`)3q zsBWdY(&-fBQzC_8O3-6fJRk}Kh1FfYuE5OCH?Uij{zSqpu31G>zvtj*+NWi>JR=@~ zq6{B1G_^ko#uFPyL3W7qgT?je)oGq z@fWRs0=sK-LbRa2eRlzc9W15Ly_&t;T7R)kn17~6V)V>jPLsSC7b;H@R7hGp3o$w? zaR*JsYk2L%dy}cZ4toGo=3d%#Kz60G7a&LXIlL^E4UV$brMu&!}~&a*SKcaING1B8E=Z9dUw7*cfIC8xPWIU=cOC%3RxeHd3a8 zk>W6_<;66KZHkw+DxfQ`GzgS*7Am(Z><*}WKlaB+Ir^SNz&3z-!NWYEbjc-V^Psxa zf_Nnw*LgPL%YqT|{7Yk3%M@+wBVM*g_lHEHmAbVI=z4(rQ+Kw-62GVy;cV|^gn00D zE{BRUDC`e`g}`f_JCu&q-xE_7ljZ=LlfclfBg1uI`a@k0{@H(Qu{Oa0D+CmCC(b*) zaIVtn-96q}ES(cvPNT#S+|M^^tmt6dw zX^}ZSBzI~>{lJ^-l{XrXPn|7w#;-5aqH0=+Sl*f*UUky+_$>oz~%H^|R$RM|AnZ1ZI zM$)2vJgM$(p^JJazb3JR`L_7TVi5$RmmpBosQhXQYr;JM)~#Fb-#TdmoxZ!xt||Z~mLn z$TW$dotd0!^j~dT_>srxdchoBq&tFU|< z*2Opto;NwM>rGwBPdK@9u*b~~p%pYa zRmPlfCaI3K>){UuhJs9UP_JR|9*^n*Jw^kZ?)Tsd9S4l%ixpn*i<3=8Q?8_b3>ZT8 z@N0kBlz?`l&Sk_A-1k3TkCUAPOEf2?EiY+z+61g$_1&YRJI5n@UtSIM!87{h6}Bzp!DX zjSb#b-3^v2dq6+)>XyGScnCn($jW;Qvw~a+fj382uYw^kK z>?3p~P&$^xrIcB1?zKTyUyI-bIPTa3R)Oc|Tm(ioG%F8O3kEA{y6s;pgi>%L@YA|X z)19p5a;GT-<8ZiEJZyl$J)9(n(Yc=goag-)^y_@R+M< zTIIy`fIAFwWr7>v>5p;iYl*$U@1kQ_5%TFYLT@Ofb)4!C+~qLgoZr%O+|)+wTp|zCr0>&?VtJGC zq`6UtWJx#oib;-zA9j}M#?_DX980x+Wc}lfZM$tQ*9_aT2>PD}L^v-cEoAB`mnQAA zJ`B=hsCbwI;N;zG_|kZk*NN6CcUn%0d(4T@iFhGfCYx>?AJ0N8@#N-^+_!H^SWMf_z?8J-4sMGvp_He9~><5V@l(xR> ztpy5OS1dQ}vR+$H`SXE;P7fR2F65Y2(+%95qt~j3c$NaeV^QoJ`ubIX-D8L!M8B#t_TC23 z7o%kwf^c)$(IsFuJi_=V+5E0s9yeGOzAV(PU;BaZ+DK;gYRkwY;piW^pUW?xcSS=i z)MbiN>SZL+%v=kFdcD(+ep#vU{)1V}npz6U&7w^VVC?s%RWf3l1K9;n-j-1DTz`kU zM6c`07wAa~^BCnL>m0n#H@<#3jg=4m+^AJml4+T{W1+*JukLAUT$a0G0s7`GE!24_ zqMAAIx+GDB^(kJTIee1Ed-FL6y6-Lp9!n@%tHvuxk~5<*x#S-J{kkkk4l%z`&ap2& z%m*gt8v^nSs$w=h&g^myYM|*3Iq8bOU_G{843h(VQ4+eGA&4I6SUb3rug*?oa6b%M zw|&cEqTo(<@etoip|43=>Fj0-i`v#!Hat8m#Yet&+!16|oGr!~oIK5BCFIySx~B)z zJ1_E5*|i&D>{sj^iR-JU^zF~iQ}HlKj{?)l=w*ov6(?B+mcmuvM^KP$XmmK^(d~Ap2 z%CfSf-Q~yY0)K*j>-Zmc2_h~Bf3~}>M%0M+muGVtO_&p{n!44x`7i^sjypgsmvSS* zlFz+~o#*8)`qTR#>ZF**loZsF`=9AOw|`Tx5jz-9N|Hmi#t_^7Wj2kv8lFDk?RI_b z{kPv0kS6>xmM|l>O1cNhr_q5Rxx8%wqN>(GgTdpvE4(B9OZ`;5&jxo2XGL)QHh~XM zdoRfoa<;@p)3MN!uGN`}834gP5Vl!YV?j}9MH1gaqqp)vOLU|;t>UMT5T#%3!*EV@ z+O4*_XF-7i&h^H>m1v21!QL&2AZM}YT}AGB`}#JZ zTL?Y7rPiytNjS{;o zk_#k1;I#j~N>fFLbgHk;8p@DAr`U7#L!xa3e(3UKn$*W~Ys4gxQ>}f8UW|1QnOnG% zpyj zv_QCnC zUs_%{)oRza`bTcXt5PIvOx;C2>0PH^0sQy-f43o4u2nC5-hTI9G^-at=&P zxLJteSXuKh0S}$vxu(}|@bC4_+9N_~sJ+joF8VV|_f-}-N9)1-q2UhfLq4;292Inz zo-VbAZS!wPFz(vhvb56LZ7{5T_tiy~?ag~g7OsMp<}B-IwImnmwTuK6rA|Hx)g5N(xG&GQ2$#^mC7XcpJy&SUs6@x%l(ry4A$v|Icy z&C7Xv(cBvpS1682Ie~vZDJr z%#&v1kcTZq1qgJ-|<>qX_Dc$c1c3P%0 z{~-LOZi_PXfyx5py(OADU)^8<8GY0XJ;%a#>B1*W)Rx1AKNhIB0^czt-+>O*6m)Oi z2_0T(nKhBZ8Ms<9sW*e**n=k1g$d8SeCLfx13d!E$h@Qps&+q%jkQKG`3N=6#izb9 zc!u54RNt7T5Z~P3FEUg0pLuI50YlhJ4r}{ub=i-kUi~cZjT!vM$bEv|uPMx?&WkW; zNY&7Xb5{i>KD{3Wwu zQo2GB%K_e!=V5}~PaQ?)93cQd8r%DcYWDfh6+!P!FJ!4uH{;Q0Dzc`SCG{5Mp`E<% zUG{NbmGp#bU-%G))*kh*ftxY~#%{9S*8=(lyvGR7WpW|nYYbV9h@FsI=m{E1l0@II zv}lVQ`C}OGi83hv{>#|?$PTh2{D zRdqkICD@(6kxN~S^0E4DhY*SPfXLW@=kG^4sKOq<-!Uo z6h}R!mbv9?5(yoJ=EKid?-)SxgZRc0n2MApaz2hHBGOBJ0z)Pu{VG05q9UAl zAD!X=85a>3PI8r4#FFu#)K7B#wa6s{qBKnKmAJ>6@SrqI^8IzsH32HoBRzqm)0FOz zn50s%k3IMtwN4yfddmRj&G>tjjX|N$u3xuwRiiv|jIE`LutIGLr3fiwJP=X!QvOgS zIrgQ&5}I54nq0F+9C5Q0+~s5JFK2f2LB|N{-+^L`w?DyC!oB$BrKq-xd_;C_o;Q#k zj>~L_e8sPE_D4a&WuUR6>KmVWv?4hkFqy!_-*!&|Hif+H{0r8D&uP0MCI{BpIfkdx zO$N6TPrmz3t11F{4cnU|z+N>{rvwh2!`@+`gBcAg z|6?$}RweZKdBz4=WE@KJ=X(sbB$m>4c`}0;4UF5b8-}GZStV(tT6|$fgp{6tpYR=? zIq}J=I(f2R^wrqT{fCzOx>H{(J#FN7F)Ql3dSD}E#1CvYm(?H43donjM%OvuT$NhUQ z!|TDMSZDD^)OL(-GIIwk56|cYA3H;yzSpeS@S;}^m?II7?b+*P`zyVpFBSW9-x*(i z{|8_<&n%_44i3?rya&{TktW$XGd+e;D}}mz<$1ytJ2nQS_4>R0zV3VeZP7&b$ek^P z_CD;{7Lz04HppHIxqMA!x|r{BGx3#3fHw74O97KtLG#&-eCpnQB7y%|o+pj~<&=Dy zrYMER1!TrEH0lKoCTIq4=Ksm_v5B~!RDMxE4V?Z3PeD6FVOu zC%KMZqC`s~Y8sm&L5uw+T?=+JJ}tm=ms;ZAz_T$2Di9>4VCuD|lPFMlwuuHN1xICk z{-{RFK%11<=(-sfEfhK>u-#M5IP!N=FDcBOo4+y1hgKqLty~b6ftDW=d0!#0g@Edw z6zmfN4YqelL)RdgIx3Euguj)GJ!kbP4D>4GPMnl~m-0ku<_~#GFIAph@Nh%B&cjYaQ_Lm!y8KGkG1sqfTEnaaW1TbEdMBZ;r%Y9f|B$NOD?8>b zT=4FC>y;Gw8dk$OV9O$c`KjY-|3S@US=Te~E}BL{IXR6j2U3QgJR0gKw15BBQB|rc zq)7A}?U#nUPWL5oYYjK9uNv-JxEKC?i{?uPh-UVB_|3HXY0bDxCc&$ulF`1Be*Vx^ zZ4#Z;@V!tnrJ%U+qWoCI_s2cnL^x&s=g`>Kd#l~rbwoHJ6ENk+xZJ|4U+1&F@uSX=4YgHUOez6u>}`DA%)@GzkS3Q2{|ToFuMZ9f zXZ_($NDvZqTfn~TppZQ9PGjtuKdXlQ$TkGo)ZAsq9m&7eB#)ra21yd1-7Fa;8GyPL zTf?PVr^Een$D8s#)y8D$$j+i6scukL*={gG zhUGsSgPO0bEF0v$9i-@)AI698ri^nV*+sKg1mRTxBIc`+_mGO(=j7S{ijNsCt};=y zSdObr{Bm?a4DUv*Hqx-`h!+89{%g)U|359rR>$}+2Enw7y3@lRN#89{(t z*mRzQBvMKl`D9^`?Wd2*docjA^|HY`_`gelMqa})8mH# zuB2rTPW6ARB8*v>MfxC{$+o*s2w*<+8&`kN=@GvlHiES*H!!g$EGQyG&QIP-6(zT{ zmh?2bgj>X>&p-dbW4*@Ne zGYmIAD=V7|O}ekgJ+YtX-D@LNNTV**wLkQIddVHeY2};7jJLg0Ut3#H{9v1YR%p?D z6IflxqmF0UhTOt>^~&T0Bms(#U+KJhW%K%y{GApZ6y8>3`UK9?X+EAy4D%ns$$&4% z9nfC2qIm)d0QBRI5fK3Xa0k2>vv^*10)QTp)Ah}h=KN0yK~k-B=Rb_DL^r8crqiJa z6K|#DcjhH$iC3jM9$f3Fm8G!q$1fWg7R`trnc>?j?X0YelCLplh*BCm$`TdN8E4e6 ze}QwXVBaD=l&2_yW$t`s=+gyB>=uTVg?ckO`7r65y8?gs>~m| zxv==OBqw;tyQP1gt|LIP?^)91zZ{MRWEscAS2oQ8tVkpKfdhpuYGTYZzZ;ADs3%_^ zp9Gr}vAnS(Z+I^%BH2@gYv&Ei;dnwUw}-|!mAev@Ow=e>vppOpmK)c^u^OYJIm&+b zq->zOy%cg+nfKm(OJ53X!*EIUe>p37hO%CnZMx{sAvqOzjYO4A@J)L!l_R2GZ+)|o zb5;I?b$jNAcdPf;2(Xq%Pu zTY#kF|7E(W_b;~!WEU_b7V=Ri!tSXP;nFh>bUuxwjEb}j9M5FQ^KeB@jN3ju6lmfK z?B$+rvj`(gQqt2CjH^Tl1?9i{`%Zs*o$KRRj^H$k0bywHx2Z|nh=sPO{#6ShE27kU z*{6*=Un{c%-A%!rg^?|FDJT&f`%^)doj65n-pWf9<(9%c$wvL7fBL0#AB+rR6>b)| zbU0q#Bm0>ax3qh{`Tx_aX-Vem6~$L{Mn_8uvIT@}T%gUZNKKzIhB8gLenzJElZOeH z?Zu6V$IRIMBg?d+AS(_N?Fj6(e`2~wkJ!h(5*{em#A=g6U=V1~<0x}v1b;McAkjF* zDyvDP4Tger=Bdc_<|!p4LRAn19)_C-`scu}sMIK2nSOc;P4HXSUt-@>-|rsoGe{J7 zZ6xd>HW6ccI8z`;Ckt3&{E*?)-wAjAJ%YT^oNM{?_G{u3Sqzjt$&`?rR1?9`;HE(m z-sE@r8=vD|>6`51UfG+j<6gO&t>a#W8}Ni5@c~{geZX4MU$5BA$+|E~VtWIOsS$4} zz-71@XytoLy2!mk2qW$k|43`y2v~xjL7!U+b@caucLt`Pz6A8?sx-~(uvHMCxE0;;VZ>0A}NT0z8$?HSBFZnSJ-JbNT)TES(f6{&G zFxt;djL)6@124JOm>N6VT9d};aElEQPwJCzbmh`~KPUNAP?MaKQi@A*s7_ev%4PTz zCcV(nDD$KRlRT-<_eXDEM_wa0X}u;M29YM#XZeASLob!6MSht&a2TngQdD<~T?+b- zuPG=s6F*?HeyHiOpL_r3>b`e%q6A62#2ubB0eqM=5Setot!IvD{hfs5fs);!@NmRO zV-uXHOt%s6(#1z>5}cS!w~_F0Bt~ZvoVZN4XGEO%Ot(?J3?y;@2>_vEuWw!kKk{f2 z0rJNkKoP*;a0lBt&&${+<+bXdKh?GF;^)VhF!dGZVjRV_;3De#m;&Y1c)*#>@g{HI zxU|8zNbfS6p}AuWMlstU>ai&rqAT0a57M}Ran~z6oqw}UsT95-vdX3)^|}~vghHM~ zLw0Cuuof!RQ+L0($G_1Qrxnr(OQ=cG0FB_^$KB<`&RXBaKsq=C*Sel(*!teic{~Md z{E%r0dJc*@JW)?3(^X?od5+_1UL&;X>~&$^&RA)Zv@he5-UdvL%%+C@rpZeE8;`Y1 zl2pVup(2{=Gh?d}P4zh2gI};m4m+TtL8nmeB#@MPlfeY*>oa6&amWO{&{`>^niLpB z#Z=ouh21}`d@Z;}2R+>*0AhZ(M4edQoqkRVSOlr&H1 zmN|qDrbpp#I}Di;$rP^xPupz7mx$J$$iSPyuqduh+k|>h7T9#RPtW_R2BwNxiAW9? z(EV{H-LFMJ=HS}KJvW}W*P!H+nfa>p@;JE<#vs#XX`nSBLNPd72gGdgeUkLCpL=|c zvB$H~p!C`kx;UG%uSTFt_;2>XFLk ztYr3=mhTVBs93J10@XEpnc_>+L!p0|t4PU`7M~Am>7;Srcj5Nb3q9`cm=X9mzG=77 z1=f^?UDtCF2vKKBxDWKUV;wD%dY;@d+eE=0+iqhzzE$HRBU}A^%V`R{dSeWQ;$pP$ z%f)?jPZOov!f69HQJ=lP-;SJioERF^YcM^8iODIZBK-hPuYSE4qakwO)*d*lY_v-a z!*>+YtL^e^!jhgA(etE`BeG z0}wF;Zr<`=Lv;uE>QYMWS)=ezd8Y>BD3Prvo2QnT!eEt=lyv#3kwWB>&)l#GEUgjE zrc_c2-O}480lFD|XCMdeK20dB^C;@s&#moDjj#xrw!q_T)q*$A zop_kEi~y`KPLvt5pks^a!&*-W{qY2>V5Q+ZC)3%?1m4YZ(od6Sp`TrX9l!6Tm=j#{ zYw(kNct$bFlfiKe8Avck3Px2~g#OHiiM-o0XjX%V?5caeh1)+yd?mINy(bdPq9(Ye zX}a9s?Bd=;%UP?I?}6D+BjA_treDB?CXp4OEyKw5;E8T53t{!I)k1A1fsic>vmdIK zikJ43&F!*g2hS?Hp|G(Im-q4NYc>9H=T)yVy8t1>OLB%jTrH}3Qgm@a5}{Y^bl*RR zg0InR$2tEN`Ri5wYcUU7c)m62jA`S2UXegH+qAsns$qpIoBE6z$97V)aNDppN%-FS z@DZ#WLTSB8A~c{o_f4miLra?|S$PC?$M47-FHoU0fOX|Jdl6BCkkN@5R1f@;!jfd^ zH?C-*arPzn+!Ayalqk%o2rJAsgBtke8NQ*cjpW1-h2xy1KG(v{r{Rk+`sDAbPmL=u z@V0e1UACR52Y0AMhEhT$Fg@1ZuxrVV0V^UQIKuOv(b;Tz3+9R>0cQi0!-W2g_Xb$I zYk6rl>r@J&>5r;uh8IhJa&Wdt&?;jj5>kk9l}b?5&Ofj)sl-BLyBDqv`ITO5S5+jq zpfNKYe2jzBn19&Dlqql#)<$1xJ*$u4Uw)V9K}bIRub}LcB{oXpP3e0r=g?skR8a&; z0vi)6RO$^2iw|7bWkvNO42dvzU|c%QH7cBM^{-xywrC*fuE{WeT~*Fm#V`LIWuK3) zK;P^x(dMUbx#a2Y_Pf;Y?=JV*A3BETp)C4MQ8f5ak{x}olx_LDruJ0osm1UN>kAM3 zV}8Y|sHOC$4@N@!MVDjWfQGL3g0X@Q6mv)85Bw@ONzR!mTP1ol=Oj%b1g8`T#3Yi$ zxgXBDy%&btF$3mTjPYYq^Z_psf@>6WALA}!_uC2o_m=4H9g|-x3qUJRb8eabcJ#ib?H>b$IuKP&B7WP} z0x<;?py8qX7pJ*P@AMYe3t<1Xn8_BHdgd`|I&F5IjB%|Tb7F4ox^#)rIzXOj6@SUyh|W9aY7tbL5mVH)PC<->hhbRgZ_wy;!#rim^Y?> z+v5#23_~QQ!yzhJq*ZW!@sXB5Db`2>Jp2pAugAPs-rJ&NB*w5yEm&S`EuA4<_x10jFl?b|{{K^7+?-@aV zY0L4K@{*7+X)eg{-SUm*L{aDTWy{z$ffm8!@g?XjjYjv+(JQ96M1ey%idF@llV388 zb!FuL?Ki9WoZ+bIlr8~77M@O>xFmNVFY1+)n1x<>V_SM{Ry&2AGY^70KV`B)Xem>R zlx0*XWl|`~Wr@gA$$u=6O3)WkVhuVxQ~(|b_wv`OMbcbmg?;J{dUkF0TvpjCB2LBL z4eX{m6nh)BZuy4Cq?*ZWCT4j+6lSbfJz|o~ox;)Ev|SePRY&UR)JbE_g>4Sr%qJa~ zqvctw3-YT$>7y>;Jh9S7bkHq_k*)j%cl5ec0^0anca1eRn$@T&3XCW7n{=^HDi)h* zc8j6@fcc1srbVq7qhP>yIh-;&V^xPHvwB~N@8=ZflxI3D)s|p6xL-2&Xxc1w@b*7y z(*Z4Jtj{!E*Q)PNq}QJtxlL-?wu;Wt-Ze48^B{9VSzg-c28TK<3&qv&lrakrga1;7 zT6MWS>E+L0ia}Si*FMK=KUwTDfJxA_r*PK#n>6t=SWolTQONyV5IoJ^aK=$j-!l5` z5h!L0cDr6Rn&?<}$6;Hhm2jPre*!F#iQoF^S-mr)Clsb85ah5Jk~DF{c{D0qrmErW%nTP>;y^~u82 z;p?Bc(2+@P`YnBw(*KEn*~35;)d$9mM@JygT+JC!v2Qz6i%!4^kgY}Yz3U@waq%^( z8@Ia8N%WQlb^j=1mWI~N=o_hj=uGG~&0)qYSQt3t0;T2C)SA=xRfEu|e}O^_?q{y! zqK|+KvI0@HfP-vUv&39`pafjDd!PL+*b^AsB;dzxh7%7;&a;0duMTWTCMo?-GMQ|1 zX2>Ew;|sQl$)UqPf$?xF&^lZPv;KVL%?ju}uB5&GcG(fPc(`p>H#<1(?dNtc+fBGGd{aAnitq75E!XBG;FL;MX-fWb+bK$ynJ%J(zq5XdAnrdrB|9o`hR)>1*c$8_>r zh%y$7q>D*sM=taQ$7OB>C}X5QIPfdk%nFM9lO$m7Izea35k0B=v^V`dtxwh#{HAM= z;iL21sUQS{eV%%U>-IgM2VFM~Qc3`SRGIp^C3|wExZfs;1)L6ERE^uF#EaQodY*G6NyUX`-Ac1 zEIa~jtzzCsV1cYAF2d`=OEY${Ldp2MQ)j*+76oV4_b3+C-;+OnNwF$o5wHVt$fQ*Y z29}nu%sTK#sWgZ}vHw_WEK*|v*=LTd#NTv_9h)sxYP5fi!CVyO@th;YW|`&+Z)w~Hh1esEvk z_DP2=iriI%rV?fEwUl6u?9e@uT(;C(_SzZh+b0M8W}>2K)FD{ny|GCQuhooV=h98A ziVeNlO7v(-MmDhdYHMh)I(o=BN58610~7Ajl-?JCnwH;OtI80GJXQ`!X}wt<*sbS? z#6(IkYJQz@;1u?94EAGt+IP2%M!` zO)^`goMyX2Z#vKjjL8LPsJ6(^D#*W2kky}6zuMB|V5UPi=bJN~SJN12GZL%|0%8OA z{#fY8@*y-`PBTP#S$}=2vXk06BKxhLXoBfCPA1K=1trv6^`;uhXn%QP#ce zCVOc(hONjrXL{RZKt-h}YwBEwTwjKQroz3LE;@sMvUUj_x@IL2k3KckOy%1*^R7l- zVLzGRAP2eV4xcL>uOWf>`~A!>ElCkv+^lag-)#SzA^A4TC^?3Om-~3~Q>p!aTBBlZ zQmIw*{Z$Y&T0j>dq-Z4`V9^Iec zzHf8Bs@%KZndnLJ8FPXo3pmPqijG@If67_Db=vmOmxS2q@0Bse1g-p_oxonIWe*J~Oe8Fa-9Imm&5X*7qDSMQ1 zi09=N*mYg#AcAC4JiVF7BC%W;Ug)9l976h2iN-peWhu})+Epa;`p^2Yx3~DCH_AS~ ziEX#L6ck2jnzIXzrKnuc-R@`hmyB7rpyf0_{DgcQZbwD=|Dx*-BM`c45kw<4U-cCJ zyUcC0*V-#VaAVNaPpLy6$a`WV@XucSTHAPwMT7WViqrhE*wy-vSrAC@wGemI@s!9= zxCCLHyKDwM2VJWgLDw*(3_FH_W;MF)*Kd29&Na z??nJ*)HqL#YDux19x`f-c$`}>-5AW^)XB{IQ_$;<5BXfTXceqcA z67#40%Xq3#vi?D)h>0i!b0P7K@J5;-7f0Pa_&Lu9zT1uXu>DFc~J+xxcjJ+qVX8ov1PdsskO)QKQmAGkVabp6TvBg5g6T z&BYOrhrg9Ix^XPk&@pdC>yVMy4enW2!a?N1tdzcM6@n!aIm0`gOKyIflCv?~L0FI6 z7C!Fj47=MN8$mD%*{8;K9MzUqLt$~s_nDuR4y=vkne{AjSbfJE;2<2r_4y&}e`T{o z+eM8As#uI{!fq-z&GB#bpHgJ5ipTDZMF>_p3A_fakdM#9}R{HaaPsOXL4%kA^Cq=%4mf2 z^^76?id*r+q6j|exMKr&!umb&}e? z7EiGN60r|Qmhy*@wJADb7z=oYAhQuj*dBjV2Z-O#@UoYz{_1)}#T9eBpgQ#kH67oh z!e#MX`tRN_Z+2O7%u5$ZEj&7=U|V#l*PUSUocx@}y^}x;81K4yiO-H3UA8yBZuV8M z1AZ2HC28R9@I~u;{`YA=r!(!o>pmCF&%(Z#hD2ndDuv5pw)V z;Yq=kg2UU*i(_^X=UYxoVfo#F;bbrQb9($|AfKd+vlu>}yR{9{CLt8=#pDiCo@5w< zEsbId;g4-PdR{A3Cv#B7KvR&kLh|0+!uB8Dxc8duc*QTAKm6mtLlzs;bH7}{)6@cDKdRFqDW#BCyXXgt9B=CewwLeA@iW9L zBaG28$NuNp11*FffHqIl1_*d7Z6B4aRl?)Uk>T`W>`_KrZgHWD-Yx_+3LbOwee;Bs zk+oYUeg=7wS}$wqD;DdOm6>i^ARaG0(R(9}vX#>nuX}1ioo-5lXE4_flOnt`lHNBb z;7_c-MpKu?RKDg)T0NnI@}$N--CWmZB8z058y7+jZR}}fP1l!i<*o_a&h1CT)L4vf zx&H@eK$yQfcLF;8iYO(seG3sAoxT5F@--dYI~p;jAtYx7Cp~JS!)EEUbUOP+ep*e42~3*stSfLu$G@D;T)Wf+c<%bCvi#7&QZkI>ZoM>yTHBP# zk6TgAH%Do)eP#@d&IZC`)Nn7|=1o^{us5iyyy3b8V}CQ#~AFX66D#G=i-t3Jn|@p%lQ z-YXF?!Ia=6H_Qa4QBCr4N(C#YNs=2UOu+WyiBm=>|IQWc$eWcx`Yp~l>&73`!wu(I#cAL8SE}`MD`%Z3&H_6O}N=zU-Ih2y|#*XXUt}tcXeGmQ3kSm_lND z#-X4ej}eEMQ6;Qjyi?-hiyMXNrSpI)|FynCFAewCX~7(_ES{H(V| zTt#=-jq*(z=48ihR+8S%w)t8eW7yj@Tnt{*F6l3D=}@w|BLqajg9 zJDB#V6A)gH@=ONxi=w*7KC#UQ<&R)(N7pUFP~bH)y}S}&;okaEsomQ+$-wn$`!!{{ zD7YJTKk9QJLvbj9atRX!oy%1@p(xJ`o$RVe4i|;IZ&NN&+7tlhAEhZhqEO@AuwQZq z1tNx+UkELgSe*OK7j2PAnT6;p4mbx_PKlsY6zJ&r9duqG7x)dmGBEH(GK>@LN0_B#96kKof>@)sKhP}N` zN7!FVK}1A|O$^GrJFUV}QP6yx9n_#Gc`OZzUjg?G-)~h7k-I z%!JGeN`kG&kzCS@Aqp|AnjW?y-#4wHMYgfE` z^dWxlZlyAPK}-}am!4|?Mq2~|)*13*F>G;_w)F?`#n&zaWm*^}L zDPqbvOa{W>qnU8$&rPDc2j$}XkjywX8ET6)eH5;#Zu*2|JL=t=B11-5hT4Kz(v&~% zc?gyHQa?g8(}f8z=tclfx( z$1Ogt@o|sG1w3xxaRu*}SaP&rT%iO^a$e!94K)C+p`dcRIthqp<9mJci#~ufT|(Mh z0)AN^$sLu{2P}p_k=Q2z#PDlZRIBx%*dXWR8j1uoESpSTE2#&J?o5^&VG@9?ke?RW zM}nau&xfg}#35!jwo;0h1lT(ROOr=veB0&W8D6>&|M7W3!mtEfK3AaHKcNGeWu6bS zk4eJhL~JF$uMQBq1mq_VNI~{&Egw^s4um^(uH+Jt0S$vAxmV&oKU|WjW&p+1e1WA$sUUoe z&=FG&|MM;gKELDZn7Fn2Y0V)N2w7(6yiykNfmyzgO@aR%YhmoFlAeiUJjjo>DDxJ_ zQ-M%z(Qds2)jF1(GbZsA#Ot%%WfF{__3^Cf^IsI`?^?;Jlwb&TyGo`KpHQH*o^iXz z*$@;H`+c@z6xjZRzGD{Qh^w?!QJXgf^0Qbp_YD|8eqCjo450XfF-zCIs4ho!*<)0n zEUY!OY2RiaR0q}x-IX`>F!-k~#n-KP{(|Q>c>N%iIT8N8BhbBn`kYd`8A!~_O>6Jb zf)zpE-`rEPgiq}WZ`D?k{^uQmg>&bc6W;b3>Oj`c_^!`GW^jh|&5LfK0}L(ww5nk< zc-Y$Dux67)v+mpxpDe(?_*Q!TdmYf}kMjKR)&gWB+wZa%>p-*+e@Olh z3(%>npllM;0ZzVM>gR4*z}$z}O2ULT5Gc=@t&!YN75T&~SsNrH!<(3oTfpcly`>?7 z+R(@R;QhG*3pm*%BIH!71))eDjJH_SW#VojHeP}`l=H1VYeCS4H&**OxF487ir9r0 zrMq`^u8*j!rL7CWXr6-?%|KNDn^$464#=*%TI`?k-?`!MkEZ0Zu#SdJuaDlN zn}THPO3un;ePBFTl|#`)HEcGErpPM8|5?Y7@qUgQ^QPbtwa4tiR|C+wd8{sKJsnh; zUu=DxVgOI%PdKr1(qS&wuwG}|3XjN4I{J-~-toRVocij|h`$U9J z6AWNsE{A5F;>@!3B-WdSo5u4R5dYkG-J2k;cJv%UBq zo+A8r9e6y!i}ry1((_TrBH#CXUS+!@Ch0 zS4S}gJW1~!XG)8f);#(OltyWjs1kYk! zqObOtLitr`7m;~G2uF45QB=2H{FF1Rie$@U{XPRr=%ARxqS?L0a8dV!OVTdn4+j6( z4R2Sx-7ojuc;4l10!lK+@845007;YYZ{B^PL2;XQa48Ytd+sG>b#)r3?iP%H`r7E< z{6HYMA-_2E+b=ZyHBrrD4w&{Hvx+hz?;lL$F3VI3wFC*bJ!XRjI$&jV!m0FQH(_nb+xw z1q@ldT1_Bm{gbosITW8`xi0R=b5^!o^ylhROB>`3Ex~F_mbwUYBF5qtCVGillP0EmoupnDsX{`?-y`8QU6 zpC5f`>$1tW0kHJiGp^!RmBsTldex5kZYN^BqU*fkCjUF$qqx&2nui0xlE)zGi!2et zUmvmWMVjxk0BAV&$s^2J1sg3te)=AoPyWu(2(C6|Y+-&2^LZ2C?i=+w0C)=(+T;h7 zFc$rv+@+%d(6vpC`OQuh>?b;Z*-!wK-hC%_p7ihdXbB#_)4`wK<_VW6rg?54b9psc z@T(*uoxd>05qBV@=VnSsD*eeH+<(`uap8PQ_-Qv_Uwh9d)khW+bv)sZU$}wo%r)ylsuau%TK9L}af8ek506$_O9OuWNF|;G0u%3VJ&y1D zcwwchD}c%h?Qgf%!O&pY{hB~`c$`%$<4~@NJVD zh|2dz95;CasSCG9l_9iq$90p$u)emzcJ(Ay=3Lg;#lWOy(03kQPTE&K6 zUKIkjg9qOVklmpAf!a*iW&z-6nM)qp;09dZ6dD3&_+b}$h32wUv>xw{%;{IW@Gjz% zs!+5WOgMLB(hS!_A=mc0K{K~ST~Ak=8$F@9LG}y&iq?m`&@ih&D&j)NNBAq?5IIRI1vLdJY_+Y?B*hFL872+GkhO+|%P~4W?C%<%s7b}?=H9kUMRB=AVsL2&D zvd={8NfEeu?mW9;F~NMV5SUAt%cjIYWAv)e=2NaP=XT?L<$whAsB2|MG`m8b@QyPb@1@{H_cGqR zVKn}pP3pX;0()z*cU5+WE0`wUpJywSgY<^RquHIV5UI6pa41zCnxY4elUvaJa@j+w z&L}|RA-@BMjv@INd&o{hB^a7(oitBF_H&V$a57Scbp&SbfD%{OL%iqnxkwqlUmV%m zbTIv2OO}!ghkB z3e2GRe(HyCDl}^InJA@ya1TL%*TT2h7Id+IfvjhAmBrq;L+!4ZRo}L0!TY{I*9%^6c)jBFj<*ZmZg{)m?T+^gyx-vcibp@<=c=`C(6D2z ziBX#dgd-eGvOu}6pKNpsak@v1jeK-FH%J>C+|M{Anv$)`2kp?p$q`r9jY*y?wvxM8_F5WC4n?@p*gp8n&zjIRg> z&nLWnMOBCH%!6-Nn7Bjk#=hRhb86s}{_%oQtUJsx3`-2+R6*X#wB+z9cPON1xz`jB zq36?8ALed%z`SK96dtN9`clM-vj+P%DF2gBkxO|m^j?Y%I;9%C#$^$s7Ic*rCKyj} z*8iX0OL#rw;~O9UcznR)2OeMW_=CqMJbvNv4Ud0#e8l4?9$)eJi^pd?e&g}|kdKNB zrN|X@uU**Q;3fsJO&+!%t6hPwJmBl(OKI4Xa`5d&G@nVARi&@FWMOK(b$=nsFRh#2 znwA^N!^@#vQ74mKfp{_9>76O!MZcK5TOwUSM0~owx>5<=8lBsu732#2H`pFE@*ysM zn(NVib5~$L?{?tk2O_9VX6*A8Mf&zMEb-Y)1ogba?WcNNAXwTZ>Goz7NK8EVw&9Ws zoCxss^ggBnmfxAY*QU9E>m{P{m1RVjGBPcZEOCJdxiFotCaPe`1g>H;@+$E^BwNek?c=qfwdy8xr4KvikEHf(=&^~D26 zG+xSD6G0vw5Ljc~-<|9NbWJar_GjAAde*$j`;-f)?mD@tzfuQS!L;O98Pbo^nB`G9 zU0^?Rp8aE+3vkWGRyq+#kg@TU>ik`_p1SViPY#h_^%b$#Px)P8=v1m`162>M&bCJ6 z(2+e~6P2UZ>H(MLct9IkkCusM^4a#^q0G)4xM*} z&BounTpLMHUi>uR*SX*EWx0;{UKezABYygXxq`+x*=XZhU3fIDaQzbU7tT3*+lFCn zAhsO7K}P5QP)OdV&ZPtPrCS7&P&}2JEDukW)P@(s{BoQqo)w~4G^eMuK=GQ`>yQVo za66{2*GyOo@*V2qKK(>|?|m(wYNIBI2X>^F^SgoPyIMZ3Oigf7nU^R){LEJUNN)Z! z&3|wgkIQ&GK@j7!@XfIW%+23<<>DH3G2nFByK1YG2izWDdnD?v{XgFv!}EDO-^bSj z_<8|fPvGkfJbvQw6_3Aoe8%H99^di!kIxVI{DIFe`22&ZAt!? z-+MkM3TMrR++b~YXMo&gEwHqH)0-&l4xxQ)k1jn#akqKnZEh#`Mcucj+UB#=SChP zbo|KJC$8fG5d=;DbLq;9z9c)T;V=4J9=z|*u|BZ$fO6{3Lt#!b(Ak9e%X$x>4?a9f zVUdRZZKp0u+;<1M*{af_>ypsG&g}i+s5{s$HzKvKl7Ikm#7|Lk_eEb7Xz{RpqlWzD zd%MrZSaDEutB)I^q3cl{MSCjoPyP9OZyNc1!Ee70c8>V@i^~VLxh3$IKB2(0%+H5k zTp7SrCMPSl+xXA^9r3TdeJTw_#Ik)@AWCsoI2e6N2X))kq%V<0yk_KtV zw=P>MjqXnz7rNp|gX!@sRVPsf*l{S`5jUhk!mDfPG#&=rSFjne znsgPIX+W4QyD#y|2f7?8whawYQEjGpU3>`XYn+y~?JN~k$JD0J_4kQc0uiCYCj6m35Nx>;?B^)ZJ6&e&8{h-$j-*A>hA7azDwFwy-GA*p z$L^HpW#a>v84dfdeV~B;J#(WrLmzl*Q!x|ul>(&~{5W!ze8Boi9(`~H73yBhCG#8k zfSRn$1`QJ`aA!cUhP4mOYzuina-u@wrC(S)-3NLP@*XJ)rNa0VvDY_TeZbHwp`iRC z74(<{79p)%g}m*d2+`y1`WU%R6A?e~GX2sYLas${sLSLrsd{9jxD-`{BQcEZ~cZ)dz8;Qa*eM|eML zeps&Gh5UZ!OO8{nS1BOB?gOC#&FxWChz%ovvN%^R=JxMnU1aET&dnGXXF%cXR8PhM zGN?T)kDFS`fVw@gL$i@&sQfWh-+=tQYj^C>D}6FFMAXNvJMrJO7q6JI{CLC%j$6{t zpV&?YMZ)f=_EYHmEo`hOddRS#t8+R(s`@=CfN?nGaU#1lyCm?3M*Zu@NM(MgX87~$ z?QOp1D?F1b)5H~OIC1_vxs9c7!%pxY%YNHNh3%0Khh(@vVfxXYG!MwT1Y z*i9@VS9};(<_K?{glI?;E+SJc|yEEn=_LhCQG~GluG0O$RQjR(7N+4y^;h4O2|4MYe`ST=gM{YXA$y_{rV8RpPBw058 z>NkZauR~wnS>pw7O{5R%e=r5-(PjB#_Fmw+tzp0QGgGh(`!dF#=mn=`Po6lyPKU=Y z)TXyy^@6W3EX(#fW9c3CF#mUmtdky+Ys))kG9&Ywo-cWifST))8- zBDsTPez<#qx0AM@ptC7(ZSP3`q~rz9^~Z~H&oxIV9FxHipW>2jBV>ctCEY|KY@NW0045zxs#`8m}`+dlv=aq3%)la})01 zz9+<{#K0H^jaSr~q__iXLv)!cw=tZm8`;`y;Jz4}eY#rQo5kG0Z(iUsZJGwoOPRgr zAG<*ZIxd4`M%DQg+pTV(*z4&rLN*555#;|`ZV;y@eQ?&%7@B?r$}qolh4CET^>S2W z*!Kw8%M;bu8skNMKF07+L@M5Mohx)8`K6~Zh|>42>X>waqa$6lugi@=i}t4X)MFQr zd?NnwwXg~735DSGl`b$i)}_g(WCFbIDlWIZQBB69WZWNT0=;)0oXci+0g0rL2iG%9 zAYkeCx_u4K;3?O~dA!sF_&+F+cAa(xmP)yDk$w|sxN-P~Sfeup@haV^iZq2+k?T3X zW;=uZvn1`7UQ;-H*`upc%NYXq=2Y|`f2y=SRQ!tI3_ry;sMTB20gq1zviuRV`tzN7 zdS9T7^>RBPn#@bp_KPgmb0!}JF6%Y@=N*HTzc`1l?eMi9zBa_yj`-RVUwh(fQ+(}; zuWj+QFTOU$*UtFb8ee0TgPkhY2Bo7<-&D>zLp;k}`6nl|;dQmx zYpoB?pmM2#LYmhCkDF@IitWyj=Kon@BwicX*Qc0G3^+rVhhTjEY8_x{3w@dO%^7Zk z#yvkV9SGa~@xmHj7YOZgJHY-;2iDD>$W{F84C8W^>l&qWA@A)c%STt8f!;16G$p1B zx9txVkMDE_%li!uD~)v_VTQ@ubgMIPUg8K5<(YKFpoSJ+9{rKeVw)egj>o%-?eC*cxZ>SrOoS zbwCGppG{QUu+$lXUxXYBjn{$1Las;m6p^12(r45Ah^QI9oXW)@f-goGvHpHn! z>|U$m3>&_s-B|Tf3v{Le&n%}ngX^_~x0IDyV3^`};3Yc#?1Xl3d$lGgKcFskMe}jp z8MBJ{vL+lDZ=H;6bb|N;siFhBHDNCcPq;|46TCq+pIM^@^iDB(e>vdIl5gCz3;Kh>#Q`D^#uK2%{M{-UpAU zKvmN+-g`Xg_^)qYd^=Pizip_#V2LBNeLteuYpe{ewQAEJR-*H5ZcDwZPyjm7w8Vb3 zBMePEJgUJg57J46v+A=BFn4n`xm8dWIEqHLj?FlL=&`%a<5wg>`c3O(`x6HkBHoq~ zKQ0M{oKo>;>(Kdb?&2rbOThhiOx}_m4lwyJtZOh#9707O-)+lw0N0Ug)^!775Ng@6 zLO%|TN2+|jZ?7l_hJCz{?&q)=+tVXV-k+r$fJ;nfVsnQW1atX(uxEDwSu9y#<55ZA z@fZ+pnXm_JWI5a8B55$6-d5-K%pTYyXI+0~%0TGqc?q|x_Au$7LwFG@2L1{d0cYuvuwEc!}_k8{}*{p_K-<8HIocNJ*9x-7ra)E+pK7`Lx{ zP=yfw$9GdT>_JdL4LEJsf!N|*a4`IS-#&z z0^1WFUGEuou(c#r^z|eO%<~c}qtDpEo;NwOdO!3)URBuS&L=y_9U&^WjOjs~!W-8* zw0<+~3oqnn^r7AFGNs4U9uif1^=7^rz|W+oZ)OYZAtLd!#FKA^kX&*2#;k1+dI9Z>mhThw>#G)Dr3dLS6>S z{0g@R$rewKbFySmwqcF+$+U+-w}$c|@F-RS_A zN!|AJJpe4Or!E@ba{%`a^*!ST0HWa^Ra}Z3puez)Nz?{FEpR=j`2`1{9Xq*cb)pf( zKfd}x{-XmJDJ;qV`qB^-bGaS~n>s?^T*$FGWkZ-g(|9yN*b#cww?_3|H-Mp|=1t{G zQT%=E4Dh?B4_5LkG|y5UA;LXOCn-c9u21XV`?bdrxKda&$H(285a3#Vx<8Nv#Y=f6o-;c`uUN01WC#haDo6ZOS?&z9P1mk$dq9G7J3d}$W^xAG zwRh#kqVyo^^(V_(HiV~NO4$R4^x)?+{mI6qeFWvJ*}<(ULnP=xb;$cp zXBdr9%4SCQ&*)f|FUjNrM4jY)$DWcPUAS566srroAtmn{+@c38kEu)Z#!-GzyYM3G zjvgr2Hu9;9x|NYP%A6P3AE4O$%gZ5Xl z(G-V1(4AFWnD;s_&NT+r6$5lM;;&B;|+jv~Z<32tw;PVDP zui*2}Pl?BO%X}Rd*FLbnaP~{CBV5jA(M%9B1hpA`i<~Ep;5r~8bak5n)KxU{rG9k; zr)p`JZ*~SCDSFhMd6g5$7Ogd5?$HOG4Be_mZ~~({kx$YPt`>r@$;D_V=&rhzE~KFk ze(h!V*WGY}IM*H;g#Reeg# z76?&F*;V?wz%^q=__)Oeia*Czj-mT|H0;Pb+hh%uMba*8Jg6SN*6%YsVGa7bHv9}T zAc1VUiU0m`8wh){f!ZUe~Y zQmgefu>se2IkV}=k4vwVOog@Bz}%Y2TFN!U^Mtoo>#sHGzw;WTKfkL!ea}BpH@a7Dspa9{gf@7-{<^+bl3=rm4sVL zCG0@`d`;Z3D@G8On^tbz>e7r@J4h)cSmrR1K|W_> zYj~X<{FF@XKJ*iS{^Q*t_I5kSn{pC6t4M~F*uw3eckRIY6^*OMiww_K&Y#jBLFWhL zC=}+9A^s%t&lh$ubXcz3_%RvuPkPw4ciBO!XT$!aWfahwtv)R`V+U`qS+pCIDZsw* zqXly zjIA%WcBi4?e}0EB{);Q;IFCLjp0tI6=A^6PRTK!@C|mUGwk_~-pA^YEPl0~s<}1Qq zY=O3SP%iTj1xm>sE6%gq0pom6_!l&9No1EL+IC>bu|aK3B?YQ)4Ao~^+Cjj^oC?<* z3be0#d{^BU)pc$O1)n=9Ku31jw%-nJZzjGnzej=O4##zN*>>b9>-p6-MpQ1S47VNnT^Ye&4D@qU2!6TBbc{S5Dict6GaG2YMdae$8#d>rB93?GO1IK{^?KF;wt zfX4|uj^J?yk3)Ez!s8eo=kPd)$4NYnma}+p|9Wi=0{z1m7!8Jywl-9J-!p5N<8`o= z@il_^@1mvIG1hSGe)n~G+AGC!E3u7Oz zBI!w2$2mL>;&Bp>qj;Rf<1ikl@i>mhd3+wg=Lvis!RHx#9>V7-d>+I5A9BeM zNJ8fx2}3 zo*ihcQZklyH(u0R|B`F(G((M{eB(zIS08%_*b`zS1jbdZqx6^=;d!GIKK?m4$Up%bt2n`CD->-hH>i`^P+Jy-zG$1|C zV6GN(fPu?B6D?XO&h|fjGdW-nBP-g!e=?=PTjaNO7wtiCHyi7SG7ae5^QTTk+k;6p z>DuNgD)jOkeIBvP9%_!u$;Nj78wdC}!N(Ck&hT-Fk5hacmm>QdL3K(4I=^By4WiDg zO&_G9xQonDcpgRrRKR?>Flz@LqJ3%RH8i-*ebk*F<=1VKG%nvm==v|!r(YJ>fyCij ztITE^gmq`^JA(3e07?4b_O-~KIR8K{vkCdxpLfqYV_qM%1@lF0=h8hSI$dH5vbNoi z)ENKqGp)aVws3Ac^+y%;4VuS}*GC6q%wg(Hm*(@8#<00J%2S@h0_}Ljz8VewuTdB}j+Q zM3c|bK-T%)WBcuvkbIe>TR%pFW%ZYs&%d#R6i@p3ZnUnr&-*s-v$a~BE4y=b(D!*O zSei=yoGxe#48y?Da~#$nq)dQJ!>Ex7(A<9NQ1}(&Y5K`*0B5S=1|@e z8mNj=TjnRNp*f59NdEyEaN9j=9#OG@sbIIpVOAPARNk|;Znl9#(lx|ARaEeNzh>eM zhb@#u5dBC|R0v}6`S8ff7IM!PB^*?zF6vTHbnVXO`G4~kKCj{P9zHMP^Cmv8;`1&( zFXQtzKCk2Rexfk71qq3u#|3#gf&>LDThurGP=e&&lg&=Jqw zc9a79Yi&tVgXU1iqGUXe@IFpDgVsE04p|02lm~ZEfhemG`!UTNN^|TJRZmkPrT)N~ zj`QZwW%NV&={yw#vz2+fUCqHZbY|L$1^LH`k}2t}=8*EE?wx7`4NTjA?(O+t4uT1# z4R>tO@lz#Jw%!)d(CyiGeFqJmXrFN6T4Di*ZdR?yXrV!_f4+ckzc~aZl{VZ)I5sW% zX@x5qPrNuSQh0&}0r!s8nFgCffo1n2hin?);|YaWD5|IaJPWM5TbcL!3X{d!exa6U z4bQ2d_Wh@xviuvD_;<1K?`q@U<;K73jei#$|E@UxU2^=p=JyCdH zzQ?lh*;^!Qb?R9^MDp~P)0xA9CJ>(-JLHMts39phX;RY!l6SDNE|H|dg06zXauhnR zP$6!sOZ3S=lSN&{n0Z%y+F=6yyoS!-&QYOm+128_>n6b4WYPXQoC=&r`jCKZa=cd1`wqr*y>n(Ac4`}&s- z&F@vBgSX8}m?@=#&s%qH2GakPk1tJ10;$0J{H>O{9UZ!IC$1k8r-EeKJ?oW`beKO) z==dRv=I{LdP1;d9o)-(NlzuPZtnQh-A6T2|SMA zaRy(1;Oi57{erJ=@bwSAKEl^e`1%T8f8qDJ@cUr+eKPz$8h)P*zYmAsr^D~#C21+d z4&1kbLLF?<&Q}*&2cJk;Qmvp)Y-^Nsp)T-phKfI{v4nE#FrAPax-ca7X$2+D5*VE6 zH|%Fg;AveQ^wZE1^y|BiUy#&;uCPZbCH)q_=@_OnHA8|?$xka%JS^byo6Z2vSQ02! zt?LwOG6%*L?cmu5B)GWYQOa8bbEsTCnOu{r2Zx!Kxt0HZ?(b&glcE7Vm?KvQDQz}` zfXtJd61()_;Fi}%>sFcp96q_JTgng^mA0g(ZFG>m7x~0x-T)NIRzE zVP;Z5gY1Eha|e#TZbo^mI=bu(k|(c6c`7;6pyY7cr>cV#u(1zsk~&6%WP}@ussHvY z!S^)5_dLP(M8Wq=!S__b_jvk$^s)^@e|3MpQ^mv|a-`k(AI#n7PCws#;y-md&tE&^ z@feTSUBZE*qJt*j%952aUQPv3l2y@FBzNB$B~&(0K}OfANUqETXiGl3_#wGSlu3jf zZvyU@dcx}l|IHW6*DA!StD8XDyED&D5oi!$l`p{BZ49i>^ID@tk-STp*Ja2USd?r= z8og<-u$D6JRhB_`hzT zL1Nj5h_B~OVCb=@#|u=i^cP=ZmY*_#L+6W_W>MWQxI6nL&wZ3H`_+G`eWpSyTHhuX zQ+U3-{kvZm6-ux8TD&$eg_KvDLpRp{o4=3>!x7*A^L*dLJ_3b#r8JmmvTU@fvVfxl zZC%t6ZAhQ7zS&`7_ebxtfAzj_E~UQKDsQtDh;EE^zBwTcW;KInU9Vcf&=%hGwxhBz zy(%n}ca=57&jpoBQRQG@!>yY+r>&vOiP$^wT?VYhUYSI_wFUzBc+Qr78IWJ|ur@Ty z2DsI_=^td}pl3Gaqdto*K*A!ohSY(dm!<(0kK%3vc{VH=!=t~Z^?>=qzG;P;d;kH@yqJAIV&h4s9V61I}4E~k>|V?*jd-rN$iir)_A9t5$) zuxfxryOW**%?=_I+;WUhYb@%DSh<a915(HO3Aty>A0uL8O)J&s1TX#%Gst9~Xmtbx(9o%eCKDmV%lXT{oCgN|jbczK%|JUW)65Qpj_#^A-Iq(N0^d0ND@ zcBd6g=5xdtcN3xUorSrCrxnm2Zs#qEB7*Rorw2935>8eTC|jCUAe$C*%WZ`v5K0LY z7bhjqNk}NT!e>LHP%NCGzDM{(hE4f8q!>W^%R)8FUjm-vY8I-Ti zCMj*sk%7xQE4HQASpweQkPAj2zy0$&`{+Or+iG=fIJaWYuHkYsu+8_iXmbUillwT! z)>{7YTcw5HmofEjeI@bmPi=M7Y%>C) z$sS!McYe#R29bkXJ{ZD%4a29KmYM_C{)x=oQ{@CR3>S6%;-K6Q zx~w6D9V1xtTLT8@{oWKVxM!hXvnx%=ya1$q?0lk`)&D9ojH?HSN<@RR3jBiP~KE zwV`ddh+W)4WDoXRd7S&Tq3l}PC&dP&FTI(ziCAsme?Yp{F>MaNC^-bi79FtF@8kUV z8QCW@I@UT>7t&9Ww@6D`Kr20mP-&P_luZ<4qE`P20bBd zH89NX#dRgXGwHi zazDp$#G7J*BbNY@KyAMz3!6iX(#`FzL$EkD`$1chq#EKuuad5YyfTA`es(v-)nw2q zDAkW|H3Qi-&zf5V$ac{22FO(=M2W(N6$44Skp;$z1L<%R^!fHySzr6;OaCR2%iQmROzdg7Ujt zL7>JLI^xl*a%3&3sE$N&0r!C089Jo={?6D_ls~`L zX*mYaA?6Y9`BPJFzbO=?Lj2mS44!mTpzD8s6LN(DuF@w(dJ}&4yu1IN;*08| z`XZ)qH&cjsl)F0&@j`-s#Y~mGDRiY=VpfhNLquo-|C~6=*Gdu(j_)M{=CbeZX2f6e zNiG^TVPpsCFaUXjc%}p=b(|--Fm} zSq))sO;!d~3+cx!VIudb0oWXk9WtPrf+&@qaBkE9x(<=I*f}G6N;5i5G7W(BWNl~P zHpH(CDlAMw41noU%b<6qDR4bHQ?+!30oWRi3)$Q>g;6sR>5U==klz|DePJBwb45*3 zQ-wYhv+mg?vWgBw^q^w&Yj(RHk{|81FX$t|8rPyOH>C@Wp~J7;e;&_QxuWD zmZ$|0T-V;Q-k`%IZ}%CM&Ds!SJTA0L2H`m3OS}yoS$v_`HbEoA|tn&%5}%jK^;T=}`-Os5l_!q>QxU zqAnM7Qz_ys_6y~l|I3H)`3s-l@c9p)AMyDUpI`BIz}pFLN4%Z!et`EAydMR>=dn3@ zOcVTc*X~qopu^m$E)Kib8qhV}rMdPT(nW7^baaRY#39_*y-Ejc2jX9k)geSP=2rVL zI`pa#wfXJUQ5|6DEZ2teMmcTx*{B+9ZX|THq4RrLy3cHQt_GreXQC6E=-?`wL&ytI z19p^$4*o>r-w9%)l&OMAee&XX=&Vpj(1FY0^Fq?-Yut*PLNmJ&?*cia2DK>AfoZLjxMfz)#r!u1+oReDcW%43Drek!={?^CXcIu zn)b;PJ}wpzJZ#-yP@@LtANNei=vaV`S!}JyS#?y0Tzi+Qh4_wBt$4^A4cNI?uOUc!VQ7;@BEulen|WpG|xnY13Xz7@h&Jo zC6LF6Cd#n)VvDz}qbclpbH?T|RcUct0v>T(3wgd3UH?}%xdpL9vWTBl8|0i^MUwzS(G6<5QU0pl zn6|-?C;{T(3H+M3Q2rQbi)?F`fZk2Ruc_^5Jh_VTflf&{v;5k-t7T}s7m9jq>!e_m zPwCE{R+JBz)=t_#mxc}`bD--l@<}gY&}CuO9LK4j&k+Cl`)$9B>x2%jr=}pH*-hU& zAP2SwLLT(hn?mGB9YcSGJP0coIu|}f_lp`7IeS|Uy3S2pXOf~rQ%tP$(KvaC@?Qy5 zO*(K(q_&HwDuD9790k(|I&djTFG)PF2&J)ZhCHTp5aI7jIMAsC@5=2HBV$oMxj?$m zv{!l27i`Zv&E&BW;i#d_hDk4kGd6NRy7mxZX4uo?Fye!}$UmNZA;R93*WPWkq(iy+ zM5f70B8)l}1j^~t0q;M^rJ|7^{^^Oh;2RME^z<O%`(}KgxyAZT{njqJRAmpEvP& z6`yyDe@Zb9@M%HW(mlIYBRz6ikm>%#nn1Ssp-kOl3i*Qu6~~uq!qn@p*;{3iU(^Yt zdh2LFzmCm_Ef>1}vlJsQUmXIsxfy<6i}HS04=tiw4RSXn@H72qA)eK$L8c$cLV6Kn8QTeH ze$M_O&%BAy=%ig(z3Jb$!N(Oo?(lJm*So9LL?-7hRj}c57F#xf&d=-O*xZNaC$>S2 z=Or4y+n~Z4;m>8nYi=R^rSxs*ecz%6y@^D>?HqK(JEs&+DXN3t(ZRFil?d$+eIPn5UUEqZOcfT9Y2l0Fn z&nNMG6VFHSd=<}U@q8E0hw*$F&!_Qx8_&n_d>zl{@q8a&58&$sd_94$H-c-LzD}&r z2U(#FYNP0Upd?Jp$m&CLE4Nps3c8=*t-KadeVEp&slKrR%}=s`#FwrIhR0%uo?k+K zOo-uqp{EBVF6WbvQ9W4A&*;n>C&3W%!@H~glQz7CW>(V}JtjTij0C5bw4U9!?p< z=xu@2p-)<{u7=PNIc5wo?M^eKaxKW%Iy22JYyx7Xc}1=}v;ceYHQQIi1Y{|lo6lWF zc=%TR*LO1$7~LBko3K?23akCsj=LeOjgO9%l+uEs(`ldd4G=zw4c852YJ&b9?txH0 zw7&0BjMi99sJrNYn0UklikqEg9=$;Pu+yU5qSgesF4BgDZfQU;@*`GMcjNIBkFR+A z#p5#`zyGPLaCH~HF2mPt___{X_u=b8ydUEI6z|7)KmR8VaB+fl*%_VlH?_e0+61X|GvdLhzI|+~1rKU!t-kEFfK?`?+}CKDkk2Q0**D(; z7S<|N6k_v@*%EA*{b1gst_9XLyz31VETOfLHvD8r8=NZ!&$3BaLf6Z-E~kE-MP0D- z6_5-Fy6{cvv&*WrmJrWu)37x~7apZF?0?E>`Om%~Qdtb)?>9si?!9nqVGmI-Ie}k! z-tvFGAtHMuI=0135Bg<4yWD87gr@W2w?yvfK{QwSGwKOT5Umow_4&CTq;1kJRQ0v| zXFn3ZUx}X=e%|5Tw^@!X%dV$teKMZ)1!4mt0L)xtp$3ci9qT zR~C@;lC@y{kFVKbLr7-SiYMD?1IL`FM@PIRT=1_I9~IVt>%j%XX8X|f$Hi~0x7C3} zdz+DrVM~}?&FB=D*Im?I7g~>9fi8GPc0Y<8w}gvnq?PBkkwB-e*6JJ5A6}nG#XA=K z->pKud)`@l@mSZbE{>~W`VhMOp{CA#tN-!;(d8Oq3g7jhR5pYCS%Vc6Z;`8#M=}9B zJl>1sg=@VBd&2jYSpkGtea_gV2cz9JwT+%uFr+_R7jsn)C>7+-p2Ppn3qNoCy7249 z>&4E5sC|F44g}fORIgFB0!A8l&AC-N5c?v!?31V!h+gR8P)BvWu$bWGFl#GVSo@st zo?Wv?(0pO0?R!TkJEf)o*(JVy&`JGv|Ny^mw}KKl>r1zk0^kyZCw-UoYe9 z>3`~NTs@Aj*N1KL$}+Hx4d&$huj?WPRq}ItpKB;p;GborbUD z@O2)(4#d}q_&O3_XX5Kne4QFGlhU5ETnxC@XJxp)L*rS+I&Uoz1)Xwpm(hdm}FpB8Fdk)gwN?pRF-xju%>aoWeLvf#l`6?u1!4=AUJ_5jqZ`f}jg#2ga z9!;vZAh5R=F-06Q2TW?iyZ++>5VLyEuB$i9Azr)h_LCq%V8|%ldDM$!ieW}=qX^X9 zkdtkFfzBs|7~2|(!sJ@P%Ne)K0iH9z_su({yCIhmMd%~sp(EUGTvP3SE z1IBJ-KYcX*=c&$k3k3)k`|NUVnZ-Z*PbfrV(cj$fxMuUL6%X;%f~R8~rTk>RvHpYL4``r-#;EpatXUgJ+qxp!K8iHqC2+t8S_z+c9G(-rYmv zh}VMN2)9PlM@au1KbY5S)B^i!Bwh9;Cjan$WWqRv=k0&$Vm9r_4bmDw%axOD-bx0> z)``r@g8%B`cTdoT=hA1Rsz2uMH|A1G^8IZXi#BdBK|MQ}IZ)=-ixt zO=odDQvvNrxdt65Rb|W#9U#L<4B_OHecEt)UCgbgr^rA+oJ94KHasfyKWy-v3?h8m zky1uF5Z&tOA#i~VqH+Eq8(0yiQNBeExC+ z1TAT?Lvb6?@uyBR1yQ;HuevlvH`AcF;%gAg2OSU%5V6zUfn=e`!5>$&fp`4Ov(GMQ zewBGeQNcQpwM^;GuT{vGq$(@|-fBZ{zxprn?^KxNV07MhL9r&e_S+I9cR391_U%G7 z-QLoM5Bg|5X%-?*#aa+Ep#JNrC=Cc!rQE5{kgt^!ECaf!5Z|5Yf6NVy*JNRCpHBti z`7REjuUe4LoRwh`hh$X|=?nX{LH174RZ}t*vR($U1+(fvUGBtnSwVC^=2SUD4jtgl zvQO-JOo8}w>fI4S+F*NE{A2SM3J|?`$3F*aLquIw&d%S@t_MtH2DE5_Y}eOpjx!Xn zo#L^HmC=T-4LKD&S`^?)h>nfgr3J*l-;0f)vGpZaXaZ!|C(4GR@gB8xC0S^~(3v8p z%tz?B*F@%ZVNDQqq@S;oLHcOT^nZO_10W@KDC74tSy@eA>$No?A|~WPS_N9qUw!{QK*`}&BRoXNlrj9!O`lSJ`Jzuj8%#nTM z8a%~NjT#}ncIV(GWPkOUw!@X0zpn>cSoCPGMm0G~at?g9KLH-lgqCS?d1-3}O zltn|_?}-QVEiK?3yLy57|A7@K#C8z~Or+nj$>mYX&9|Ji_?+Xy^K%Qh^*c6ce`E7^ zjN+f~#(xV9|E)Cqx76^@E#jYR#6S0le=hQ$=O%H_RpOt!#6Opbe{K{1TqpjyPsQpe zPk}dNIIy;lQ(XcbPg&P_n1=!sW$ERn4#-!I-R4PZAp_;Mco>6@eD9Zh>^2WFJbj}6 z>v13j;FzyPBr_RU(R^eCDNq-n%&SNSXsL=WyS;(}sVfXCcrO`&t&w)&c6+pbdBodK z7{bbTFHLScQ$S~V-%H^DL$G~y#_cScZ}n%z0@X_fV4YfPwR1IE|H_)A6ZQH)pDa!Ahl6$CD^_WP0kFw%L&Yqi743@}pS0uHH?! zPl74eVSlCFRQR;!R$iR~2{`XJmAceZ;lyyJ{{>S$V5ur%x}}MH?Feb*yNCMVQWstJ zl0*Yp*1ne!1_mHjB3CY3NCUY&yyG5HMnIwbP}cta{AApYyaS2=8Y(s;27@$+lanN0 zx(P5kkVoIlWDG<@P5bSK$q%1Bh^tR9bw&l(b5hT8l>)FW5xE;prOE)G~PmkDCFx|w`s5`v){+c z#ux^bZ8n^4|M&S${PUmq=R@(&kK&&%#lNqBf1d&Wz61V!2z)%@u^-PP@I1ps;sv~)aW%x{)-R1B7JGAZT=s*2A7NQxeC7? z1R=Z8U+-`15&i1ZIc253h?5JrSNCoAE?(FFysyUl7kluSh{sGkrs6RdkIBS0JT@=C zsRKb%`rwydDhxd{sA&G84z6oh#)Q96q1sdK$MgntocXg0AM%5tjYU1@?bIP3t$VE( zT4zxghYdO&b3M8&^fJO+LIKI>fI4)wj>gOOBMk3iHdORchdSA{I~^uzpl5tn?YHbI zdM5X`ys-9sWY?8^C=P@YCMFxy7uPO&RUlQX6OI3w#>EVXuRT@>}$nK=_m*N<#+Ls0xO1+G@#3$RsOYrF&xOgAIN`110L_==h*!L#o@E!=-)DNJMq;n zBvj`X8|4^U-tz2O$&eLFoUde{yjz2cM6v0*Ll{ zdPEo?Y$p=6XZ+O{e>3`RH-*1;!~dRy|Gf$SdldfnD*W$R;34;;c(po+9*G^onoSn( zjXiIYG&ybpMv|rx81t3O(Xc=AfC*&z7xfhNt3uY}q^lJv zC@vL4O4A6c&}Cf^I9_A|U4k=hpHhfWw|!=M?J*M=;ux-rvLixPJxOW#>qx2!d@xFN+$sF;@14kH;}`G=yF&UTy`C4C4zRIq zd2O=z8~wsw#s9@8{JsYM_j&y9_xRuU@xT8Qol0vsuS>%H!-Hps4NRf9>?_-DRL6Hn zo;+bKYYL2WT@@u>;t;%ZX4+K;#g%r>$)=BDAZhhO`8o@_Ux@$mzHg#X@iwou{P**O zoZWPx5>YVDDAk|hLF279ZWU)0f%y%?uT#d5T}}=9hVlw6u9ZO@fAZ8v06c<+Ul065 zc9_~M$xq-1*HiY1zqCwYQ8$q4_(|Lp;`_gvnMw0PdxVJHTTgWVN%gC8YXo3I`s9hn zrAW46R2xn4L&y%dMz=fY{%^(0x^4@?{PJt>vakKS&x79w!tWFP^SlFruowN={&|ih zbf%J2vkq==6R|s6_uoCAg*~3%c4PZ%H~jt&et!tRe-tmfR?Ygz`o*yq?$t^i7izhM z&JPe-rY5!?=DN5CGMAhEv%iFHnLzma4a@kaX-XXeEQ{AJ>^(*FRONIwnElT;EdSY; z!jFr)FMeLdr9Cnm)YgFXO2NybKheDVcWb_uyygTuS@&${)$bS?y7@rI2qjF7zu9$$#e_v2i6Z&U7k4ye}ZOQTep zAso1WLlVihahbut<%Q#-+=7?MNXGgELe+^}K-}H1U$X?wcWumfui-U|YbQhz`44Vh z2MpN_YAuQX?#m&S>_dN9fAsiw?8RdXdvLx5!;Uxq^Bnc61$*Q6p$xw5E4<|7X9)hvbiAyR9|P;|N2@>47Uu$*sg5tI z(fzj3u=EWASXJ1}v~~tM|I3%so6!Q;qw_^f?(&GgczZ7A6cEIezwF+1d_Ce{BX4qu zG(jv;G=?gIcsaq45O?&a5LVWk*Lt`M)!@$~uK5QDVuxOC4t=(d3XIz+EUWhjVY|bL zewYg4gM~q_z6%LrZ(ebns#!(_AJvwXd(R1CwDKaRw{Izsuj>pjX?G*|z z=3T>zx`eRcgSA$PX{gTQ$mx>`6m-%Rva*jAAewBORl@hAc{@Tv3 zzG_W?Hx(v*@~-#25yuoSkaSgQs36O0zpb8E94m1jeqA<%j`L4cZkQIsavwBF2Jj#r zu2eLgl`4kC5XXx=eUZLW6|cU_6~jvPZjtkkAU>){XrzA^!!mAGt;xEB&d1pwxkM83 zqd&ic<-gg>DcdHF$%YX9)V`7dn<20d*s{Z!%?rBREQ@5eIWg?O@~o9Zi03cpI`?`{ zc<^sN=)#?TZcGGQ(Dft{t)E2zdkwe97h4hkb3c=M=k!|aDdLmyZHSv2bPW>&xUrvC zNxG$%5Z}|=eRf2G6MMF0X4?Ha8joe(eqz(lrnp0~4B;#2cv@(d+w!@l$x8pj?ukfN zdl8>m^2F%)XIhrBIkK(VQ|YiRZ;e(We)&8Q9j~^`I7s?olpWH*Hm!i}=dmHo;WZn? z8s55jP~UjbH#lCZ{~}JIdNFQg-1-IrNErfo_JOQb?{`o#8Mz+$3ESU8eJh0)$5WZ-vaWO(xMHHdpE>fF&Jr znNV1QIo|X1Fn31BmnXeWbXblxQ@S5HJ5pe1FOU4q9o$&LZSk;M=y-&5)h+(-Yp@98 z$Yoh_2t)mMD`_RHU5tgUvned3?d!2B&jz-K%*dZJf?oMd@nWS)(#yG2(DnXZ!<9Vz zm@C4Gi!uM=2nv~G!ry+h5XEtrHZ6f^og6${@x%;_zMmRz5#z;Vf2#lL&9eBPezaX0 z(at69yqL$$7H`f8^Tl(2W)fc&lqoDs!#r_D^Sbh3c1 zbb)}kX2Oee!!A5;=-MWPS=QPo(k%abocuJptdh+Fhc`qQk{EtGzisRrn>CT1t;+_8>Jr%f+Y%2ai2-o?$bQ%CYzYjO-t*Sw z3jiI%2fD3K#IV9RigV`b01%v$QS3b>ia9R5Hy9!GUpccs?7tB!f$BAQcXg)>D13>aX_2uafaqJwD+1CXCZ*-v62AzNJ zw=f>9d3p{wM6ux2M!JM~e{fu;YfxJ$ie+i|dU*Wshk5bD`hqoLSTkGjTGKcFV4LGT zzm+74>HNrwy!pr<<~AJPGRY!>U4B{KaI?i9Tpt*k{<^HQ!v?QzO+7af2nN=) z82(-w*356DtN1DqYHtc(+joVA8E;X&)7>2iDxWUbt}dlvx~4i!baZ~rna8A75)BLG z3SN7^KM-<1*^RqQQ?bce!6V++13_^5qqz7@DyGIXeciY|5I8TK8@zRhin)2bZ3%c3 z2)w*ivHU?)EFay!y(bXHmk=~EnW>no`I9eJPXi&Dw7I*rnt~;)S!(Ip76@jvPbx%kLZDE?Pyw0;5##)SnE5z5Gcc?#IcPJG-*1 zdu|{QB|L9kqmnV+>?_r}#es|ZlYCdMUwRRZui91A9#6)~@+Z9xpz-fZdfxhel8nj8 zUdlQAI1rv_Z|-K{pe76pW-L6BhYC5DwVZ*}GFI*iW$?L9f0B zqFOhlff_`?hCUj9uJ{=UC*7u+*4?FG=UzVK4do4jQNhvCj!6oZUwZc@hcdF4Y95i} zI|ch*+xy57f}oq%qDH%&vS{y?AAUcUJfmQ{Up>rkrw74-jX?!R&5^!&OPE#~1wmtj z@HO+pRLrts$%PZrL6Gs|PD?q`w=?;t9glDjX!>T(WL=|Tg;mFgSBM3HEb@<&lT-}v zFND$Z25-$k@Ru7M<+*E&ebc%v*?xW*`fIwI;Gdg;bjO4+Gyn0cA zU>%e>LwZHSN^5!_dF&2^cI02ZH)$As>-|{4eaL@Skxu&Vq+#wiF3hAK4TRk-uF@nU z8g?R%eXI=GL*>Tagi~8-Sk^?`s&X?RJS2P9!i%gQpal|J>n5IeN zcLQMx$rmeWn6qQ$v(HHW#zi`riS93sQPo z@B7&P=Da`%R7?^%bHNxZrW~VZl>~yt!!Mqs55|~tz>Rk=QUjrQe1QAGM`MghVx8CS zut4B=2YnnOCRnt{jRT|rdVD7!~BT30C-8wBT7k`V!r!caGre< z0PT)v$kKwQ*h2g_zhw=R`e1_elWQIKG5|LD*V(IQm|(7IGGTSg0-INjknGk|6@-C5eEnawvs>3Lhgm&i%yCj^8=rh2pU=Le@f>+G#qjIjfIuS9%XhsOV~e$V|}8uoSz6@iYv-e!|PSE+arBnd0M49JHDvf<0uS)ousTW|9$*i#|D}A6loT_@d5Us%e5bG zp!1VhziYqqh6f&P{+~VpM*2csI(o?)uIrv5ulNF3)Y&@@R}Xqa`iiR9Z~_@KTirG( z<=_o1==$|zfEky{3Q0(LgLqIe8`msg@vKvpYG1uT_~qr=iFv@TynY}r_Sg${uN@s# zBa<=yKw*;@%nPoC7#MEUBV#Ks=~j6ad%?@kcUq44kg-?fd5NQYz2NqNW;wGIGPdyj zy~bmYNtAtLtVS)RZHcTGtk3XaXd#(D`!YrNjVCA@(qgu$k};miA-$_NJ>ixCEk=fq zjCFR;B@5p2gm4A*4IK{vt6MT<`TCV7Xz0^oyc^N_b(*zqeDH)F_fM7To&=1avu$$c zdr$b#vy*PL1+el}K{D2#Jt2QtRjd&|V4mxWbH&F!VOW7y*;;6Xb*JuhvTXN+mEJt- zm&v-&j0!67a@l8k zdxC7Q?h}rKBy1smTu02`J`o~e?S-;MU6G!!?b_wBv@{Y%VpY7Z~be?sbCv-9K zypJj~v7XSinpSBtri+xc@C2=|9roA^cu^@4t@J(obAD@p1?o8Ce2E}Fd4sPC2mPbheB z{=%)@npnu$U)afcPmo_;mwcET9lyFo;GmKhJlnr<)5%L3Sh9i7hf;Gdh+dN^UwT3V zWB3c3P&~bWG0=!-7nka1(Q}smrQ?F$M$|KyPxve3*;XsI?R7n$L{uc*mh`ngU)TP z9c&lWG3&eLMn~Md;rQjk;|v3JOxX7E-47eRq1k`$mCVa(SlIVnQQI@U;mYhTzN2bt z7|-GLoUVtwA+c@?C9*>mtGhR{l_l330=`{MHYTfLQ7B&TW8NV6a89Xy0}<1 z=&v0(|Ma3FRw?^@i6p`=k?+|mQmTrWrNz92*=Zl}tk`DSJ)(fY_yc+E+dd#0vFuDo zi2|lwDJ!(+vk%;QtXbTWu86fJq~|6qX8^Y8a0l}ZC2UUmXLhOx19+^sUhc(|u$d!z zg0IyXkTi03=l)40Oz}fjq<|6wPAzBU@as{;^sjq0hU+un$;yHAE&2)==Prklesc!o z#$WyT^@=>kp>T};!k7WJo680`wdFC!HokAS9T=c--FyDzYB_8wesVy10|Rax2-ITp zmc>YWCkK+Yqx*BL7w9XH#cZ8sdx8%!K!258ia$vfD?MjL14w;+nk;PaZ zEoU{FXF%xJ5~v!N!Q7{n%4zJrKwEy;#CRmu2zG_Uf7eTScMy}2TT5p<&}w+Q=! zEbqZD>X&7)mILw!$JhEo{~kvvN{bvOSUvXo&N5#REqTTzcvv2zEH%2;@{-mkLuD+35liS<=ciWp_zWYN_R47l3#PRuGy3Cmwn zlCWfw0S)K%pZEM!!sg|Faz~-@Y2SpL8u(N&MKPJM_2Rw|PpCiGeM1G~5I&%tqvi|A z+gVFiX%jJe^S9|sX1)-uCMDB!pNKUMygGZ$-WT3-2HoF{WY@eUDm(mrVP@X(#&RxI zEZ52Xq;s$@Xss*O6Z28U&hgrszfSgr`8j^u*;-Xhe96)d=0aas>0)XY>#d6MoZgyh zQsN8GtDi1k@&v_C&6xk4a$n#KQ~8#@Al>om z4K$yWKrQM`87y~Jsl5G`FT6+o_2rB-_6_qgJbK#~piDCJVXW_v9u&%&9EOeSF8pxV?THU^X=c8RWbh5$)aTxKe&HM_eqVZ8upXJ zNH@sR4+=yLqU^q?VTHz(d~3q}fZsXdG*_%T#`5~rSXGiAv^qWxtD9EGSRF+*q>rKb zqkJJ9pn=)P^X>eS=Lgba%u5wYG_W@^G9M{t{a_5!eNs))#I(5jCob2b@q$+LSGj0m zg045-?Px}P@O2{=CZdH!50vXiKk$R=RUe&wGPJPK!*_2Q_xQmT*P}~VHfUj%JUgV@ z`u(8JMoPwMn>NPJi)w|>eh|c4mz=al2TS4Z4|iwshr$wArL$EB3$l6J;w9h@mD3)x zJ88O@eDakD4@rM8dhlz5&T102`uLIy%2NIS``+wPDkWk4rFw#2#r;_*dPIaOg z^%LrOWQ@uBZA*N+A5^~oC{B1v#!9HGB!ZECUtV})(WXhkTGs3+GimgL{f}A0zNAyI zp!2J-u_ixoY^a|IyF$T~=dvQR&ilckha0s=UQw_~i38i4P`%;lP-mYxLcwCbDamSw z`T=L6waqF)Dn`UGwgkl2!&mh5lvq(QPUjmTiss0FVz*c1BvP@0sn4Vehmy-iiy77Gj#cmFZ6k5&Xf|VSb6fK(8fy$9|PO` zcgavO4untPxxSG9cKx0mOjL}ErKGuO55hy{qL7?R6zrYi&+G>~5Wd|$l;dkp!JO@H zg!n}Hf=(~=)pAoXE$($*Wqv6BxAVMeA0T4~>~6>pP<`R!W#MZ+_sQ5B^L;;jt$pF? zj*GnJS!Ap|anh^Q1mQ>4e4ZiV=Z&ATBAK;)p+s4I!;@4pMlj#E>i|DG|NAd>x07Tn zH1zZd69r$0>mA@e;7-QIQT`AiqWP(G6;*PPG1ipy?pS?aFq!+M{{10f(^jGyA9a1f zxScgj9r5*=*MjTciui()AM-=^A;2)ReLqH)`@-#WZ!E+($yojkFT=853}EtZ^Ph+$ zW4x7?=g+=nz2@0(QATPw?bp27LPL_hVlns*hNVt~E6? zz~hJR$@EZEPjrSoxYf#lkLUI#EU`iL)-fxa_1cvyBy&q0X z{P{hB<8Lj!aExoLtn1i%VR(32>~&kPBSh@Ai*^3M2bYhWXP>rmgs$a0kuTbK;jI&E zti?LV#j%B}=8~VzIzYU}QO~v&08JzK4^+*1Oi*ZpYKKf?eQP*Y9hH%O%3`}e3<*2sE5;`xNP zO5W;_d11@3Xu!jB zx>co^2aNX&Zk4T32i8ox9Z97g5V~f}axYmO;M6auJ?sH*xW+6i&Z_}_KKO4cSWcYC z-M`!sCST;W-ri~qg=>92gljuOPGAE6x+BKW8u9T$_YVhn!@of-a;pivx?Og^FwPNN z20cA;%S}LGuOEj`kt2+zmFlm#V**?u>p9c!q45JMwi%u?1;YnJ_4~}7AShDAP9)wG zicvZg40D1OJ)4nl*G!>xMB%!8q!Y+8B9}Rn=+G#B)ZMFiz@!y=sf0G{n&3gPd?eX8dA2eQ3Ym(v)tPRm+s@%r#Q}?L*xe0f0-xFd}VqgsN zGVfO(vGIV(&Lr(!6l177eDJMLfd}MP`X5dVH-;VR%~vd2J)rO-hs2u8$bZTUXV-u6 z0G-a!c;OeuFk`Vov*Dcwa5;28@>+)UF!k&(0Y-d-Ayj*lhm#PGFBWl**nunTqvk;hB)zwub$7 zPw@ONH;vp~=?!npr4Jf>w15Pen$py9FG!GN4*fl70Vmek4RSvB0;_?&+D_bL*bsC* z!JOS2lx`Iy>m4CO-7#w17I|-Ab>Myl|q+Snpi=dTg66a+&@X=i|bCB6=axOzp{jCoKp)n_<}dde!b0kOUUWjQyJ9d3nAg;^EE!0-ZhbZJnwy>nrqR1 z$_N<-G`5b_E6{-Tdcd)uIx_GaE0ijBph3J#UQn<%8CtT)#m9YV;CBD9MTTnGd4@W z{@=N$OYp2Y%$k{aKA_Qn>%(uMH}U3>@AR!tKa>W+Gb85aoaT^;bNEw%H0V*af4eNk z3|=*96qB83z`nBW=dnmLSmsnQydC3}dG}>9)UZ7kMlX9r#4$pdob1{}Gq@SZ`fjy4 z4MdLDb_8je0a;O}@!TRBw6*WOO8QL#S?>q&=a$l7cQ4*AH&nzlCi21+nqRNd!B&ggS+9Yh|3m-VzevXq#5aJRs z_nXTXE*3Wr^-r6ETd0WnzN8B*h2D>bSih9Ww=f#MQOK)Cby> z4>%2PHHBQB)`_GbAE-|@j#Q~Kh4k=kK)M`DM}+DrI=XlWuuK zBA(x-$P}h)9|txhV|%TQDwE1Hg?2&1`>XxE;r81zPd50Ng5{%|Yr1xOL)ty@%|k4v zU{xP^LcPu#bQWbCemZFa8BJD1w|Z|#5sWomf6W9&4Q^kV8S@63vh+dbEEAA^`hDLe z4IkijBr9$hH-W+9mreGDVf#unjy!h51P=ei{wo{v!(z(;g?ba<{j@B{;*<{v z#F)TT+Oxt6b05gGCXiY68#T%LKuc9rnJK3UI9=9NO2Pg}T57C-dlOtH%{c)n=kNS$hc$FCo*E4RbzpVYhd zK@VU!aMuda?LN>{FYT&ULj(<14o$u|AAs}rmsr0T!KE?mzbHPCzu9b1^t=(UVtMM) ze4r-$1y8o55oql{t@?Ve4+s^;t`O@r1Y5|o{9<=FdKqUoN`~sMjueTmZy4M z*8l=Dw)e7;@%$a+xa;Bupd!Z`ZokF{0*8-&E9B6Je%tPQv9G1~@f1E-o2CP%QObQwus>aKufg&8No~-0U-n?p4=>PR4)oj5q6H7qR;RtF_5zlM zI$>>f|4z# zHcE47!M62ZLx17(NkQIQ=3|;BAkgAA!|_yx{$`zXL7H$#cUEF!z9%e69(!l&ssRZ; z*Au?P;{JONh}n}hV1trWl6;6KEc(&BPH?d%uzQL$JH~hdAue`>I36FfCA2AeEsnSF z{QOrnL7XL6MvKW4X0vEbA%`?!@a*Hjd$&E{X!nK_{SP(4!<<9YuEYaallaUBwY9)$ z<-nq_Di7F1`y%0dUklm}P~$#CdcbITPa)ro7EFbOHgP+9K+`jpE-?>nu(5m9zefc3 zf6CHz>5&#lp3MJdIqVL6Jsp8^m$e{6$j*y+-W@0`i*h1swP2i7v2irj9cp*Pu9(@T z1)8G*XX``Vq4E+rE}E(ZUAH|c!IADTDEQ9iw1Osd-MqPmOm&B}p`qxL>Y5M^9GZKr z-9cv1n0O~#6P%T2CGsrs{OZTEa-QJ*2DKk57jOr!_qA)d(lsH+@p{7NIX4`O*RD~$ zsR;_#*1p{E&<&bmpY~gdXu0&?sx+(3RSv*y&K7C4kd3huq;2Ic0q>rO*{Bc2 z%LW$tYAqdZ(8Vbe}aLWuz=)!tmA zF6IhF4_{x%eXajj?QF){+Kjcg8EbPh*6wDk?LGHI;Oq@07ig%VH5sfj_^SpqV~uFW z8q$n4rWtEcGuEhPtYOVqVhMXPMq4v zvxf*`;h&UU3mjoEzhJSLJpi-Y=^C^9j^NR*zH`hFpu}luyibQCT-NJxAnyd2wp8vL zz2pdLAszhp5x~}MvIYM2j_{znQ&Us!@7lGDwQU(|-!j%zWvpe$SnH6n79wM4lu5dNjgj*EbjTIW<7s~`P6R#2=sHm>1l5QDn)Fu zpSP>g@84v?eO&+SWB#{Dw)4*f7~i>JxRaCd&Q8WVJsI!(WV{oUtE=pYGE?3yhArE?u zQDu7f&0tG($wkr%1u&Ttyd2_Z1}ZPYUd)k|ASu^@*<^+U6c^4bnghxpcc$5g{U-@B zUP;js;1e<_*V#c}NKh@r17Dv%Q>O8u2h0=E_>qtQL8 zP?lU`U>-q&^3S)kH`CN0OLtp=ykj z;dR9nI%}M#_i?BKXJ0n?NTexz=Mj}YR;&UJhQE|%HkyKt#dv!E8Wr%rC;mwl+qIbK zuM+T42F{H~<(;Sg#RfEG*!*#%IY!VFhO=w$)U+srLxjo;5t=C^QJd9&397)V@e%WF z@uonz$?eD*p$uvJhU7}_nnLx`4CcGtN-()3Ht-)Z_Lm0NJ5@&$70saY+z%F!rBX0=Xh=>E=PgAW z$5l22OTwxb*)6xf;`X$v1C{F~zzyRPW^;gs#4i%})`Ch%vcM}Pb5Kd-cHHz-3}^`| zFLH#jK0b@`QC=?!mFo6yRbSxwK6|Y^bwd~m1)1MH>^FnqSaMLSng}fYM&k12GKVDM z#gLmULJ+ZLy;{B)*4IBaSg6dmy=*tu*U`qo@BTs%%-{C2xB|Bq#^>Z~3W9&M`^}33 zf4^hK_zoK5J8F#Yura>l#`q2#<2!PU@6a*6W5?(Z2>t5OxA!UnXOeN`1|F<`;<48@ zvnj)iiNqb*!lpo2XYak-K?&Gd+TtJAnatmhF;P*d1Z1~l0VRCSsBo??S5;S{`=q2H zwl$~EDZs32a^8`hCJ+Hzc3Pg12Z8Qvvfm*S=q$nV8kB{Z)<((b2PQyR7JJP>QVzy6 z?ied_nL>nS-qYB4dC0fnL~+cff6><&*Y|q&#wS^K;dE45G0NDPgYozbHNWfMWMYGGP!2xmRckTxUAZNf)dI zC&8!e>`_?%i`1j7u)b_93E=yU^U5NtA<@=mG06LxxMPzFUhmu)`wvtx2oW-RoQv~B zMtZQzO~;>sf8PD>Xx+lCBnHXn;)jEStwAN2C4uZG2ZO=Cl(syx`RAGK!n>6`^ZuRj z9wUZ(ju`JjN=g$C%rz8){^F7b(rs&4SZ_KxSwPgx22_eW&-v^Y1F@G?E6=aD0kKD| zTMSpNg{q%Fj_@9~fzCzJi=3W_!w2(N)1}!qKq>vfvg(iokPT0Xx?HmXuBn0KX;~>c zy~9TC8Yfmu!E;HMwFM_|`!eZ8gd|BAHhIcEw#5brd-X1a-;jjF3-QA%?eP54TXrt% zmVy>c&t*Nleq`GE_s&vKsGxXv1KWT7+3(27I`Ct>rMaigMgrJxrhS%e`1@WbF3yY6 zZ*R!L``nHk6HXgCKh^wyDe*qQ{C#l7ex;Huo!{e4XY8$4NyFwTucw!)tbwqx_D;tG zDM*veINba4@A>9mA0rSJ{Uf`D-|~}LUYL)}kbzJ?)0_5ohBzGZtIC3E`;Y8&{>YB? z|M70ZU%&llcsJnx@7;lgcMIU6-SOyg0aVZP!dxud8meDrKJ6`Ch3bn+41SsaKi(xU zAFy7BSW(2ibj_AaR*>}dJTa?*6%D`Md$nmE*TP9!Itv0%S}gAz?jNT@8hy`>s>PP> z{D9XdSUTGR8wdKH;(oKv4zJ&v`815S63MzqA2bTW_(|r|T~53xWnaVo7yEJlSEt8( zhIx?6T9;FNw{ZI!y?1L>dC~B)<)__WVcq0RI~D7;3bn8Vaipx%`MrbG=%8pbbD_4XsvcHA*RAS=u>@WnmL0WXQ&gC;~6cODMJgX(`N^ z{agsH@~=)dw*^8~=uaXW-rpa25SA2N%(do1{ns7#v^L}MJG5F#B3Mz1m-Io0evA_y zAJuaI-6+=Ju;)yl4fbQLG6!xiLg>bLy;nHKDuTmTzpx_KC{8Gy#q>%9`>d%VAaFj= zvwaHVGdm$v7+l!Y-UGJxevysH}2Pd{Z7o-$cft7G4{My zyyoEq5xo}wWct<;2!MX9&sF`jzn{&t3!uK!6L7Ty|ytC1USkKo1awjrYZk^8UU`QJv}toK`vCaS)CEuP2jBh$aeSE3BL^%tk_W9&0BUCYgf%4a$?-^|Uo zJ3p#=$%S-+db%>DtpL0P0~2{yqPnlyiQ{+`jY`9Q_`_AQ(2)%dr*V-q|=_*1LT zji%S02Y*$lYkLO-1+I>~DcrPc6Jhlfn zExGHzMYE|-aqT>3*DF{!(p^6#>T`&H>{9qwqM_e@Cg?TkH;d5_+-x~GW_+76jE ztf2tkr`cOOrnHf=)=@K7T?)8)^exs@)jTfxtpRi&SwfwBFl6#4Hcv{@YIbk7}TF?w+xM%@mMa z(IJ0`QymqI@%CIgLV=Rui2SYXs_5!?jn#>r6e#b~Ql4I?LgyVLzc33kvCrH-&-aCU zV;1Vhj)t;NTc}w=m49;Hx=;V!fwpjG^n5M&{Qlixz3LY9_ODq)xks)Rz{=!u>iIho z)E2GkXt`hkTeP3Hbf~c)8b9Ek)Vc(g6W6qW&_%7K}^OVgJrf@9p zZc3ulyYutmzS|_FJoO_#=41&brl)wE;80-WUSlB zSl5xU?jvJeNXEL68q2O0^_(yVxA5txipwOpJerlL^%LiCtQnk7(=q=mQ_pB(diyWx zT_fTg|C#ExA7xYva1wjUZj5t$`)hN%3V1BQ>#lX%o@g5__ zI+~1iHW}-1GS=y2Tpz3OQNo{lG=wgks56`~{cCS0wZ??ooB#KopM}1u?W2UMt7cIC z<@xNBWh8pv2=`D2wrwP!@ugY4uQr3EyK@)g)yyE{jBA@(z8L_=9gC)6GpL^2wx`?2 z4C)5Xyj1;W25#1`KeAew!KmK*?oN_9h&5Fm75BvRQF8><8qC2?V@%i+@OpNz(huhz zqZ`L~dY7C1#b-u2Fv^Khj*N0<)B{F6VZ5J;@%}2t`>mXwXC*GexncgR&RS!f!?(xZ zw_9G1*HN`!f7HPYat{vruE4o@<`gO8<}SRyjjmA=awJ%Gw`yffh8ZyO8;c-|Kw!%H zrw&H9Cq>5E8zL@#PnmRB1yO>*GC{?@kS}>W%Vn1u%-^B0`H$R|{)yA)>lA7IdDcS1 zpX~VdLq3{5AVy5ByA&b^oVB`2mw0`^;*ED>$ES5rEFGh>-`WSNjVRWO2G>CW;_5_%_5 z51{=MB2&fU@buW~w1!4Z-{>_vujS%!_!d`}Lyixmn64->43qeaUVYN-E5bRxz}2J? z;M5}qs-tRCCkuUna)YbKiy%tp`S0kZzs7C}L6n8>$Kz?fFmW=nV3>amh`B|~X-4}3 zEx@hzj)E}IY;!#`^l`A4lptgshS9o>zMxWOW&A{qA70Jv*h+Kv{ZFPwQUq>J zy#CB$nqDbC^z2>wbLA}NPsGw+v{XUhSR^QyvjVrv*bSL(5eAA}OdSW_f6`@Ll@w=D zXvy5(JK5y}gry1%A1iVDQs-5@mwn)~RRqDKM;P?*{$uz00M*)}WO1$_?CZU`W_Y^~ zoa|Mbu5S_ox09bP{5t9bdAt}L*|#)4QQ8+mp1i0avaE%SLl82q;tOQu6Rq_h)&l>YP?Efl zFXTLXQQ`VyEfB0`C1@U)zuk{=t@cX5=v%cZIP438(@9*2-4Y7UYhD333SiAeQ>YW^K zXqozQeVdO2>?xKlP-*mr>cDSqXOSd)GSBsB81@Fj1`lzw8`8i!D{xj~%o|iDh4|YA zrGQ_)ru4-vZ=l4P^bIYN{A>N}aJNet$9&*&?AW^_x@wSA|2RGd)^AgnkK?OVVTGFApj3trP`2MWq-?4JtV^6%ZQOwM`|cs&%> zk7T94B`U#<=uENF%|5`j_Q<4IxdO0$AJ)&rH)AAUQLvpSRo>s!JS4ug}SWfg*4C6t*|$ zlW@4lBL`_x1B+H-d#b+E-@Tf19c11;&l-Lm>latO9lxMFgk$^pR)OuuTxML#cO3v; zUppDE*Q{S~=7>BjFZ^_271;+ymnDt-G?NFy*s>gbS?oV_;?9U%m4_FTrc2`GeL%K; zUE1wb1)yEb{}xBY`-y#HLEun?q_k&+z6w6@yZ(^#hTV!#Gom&nAc@Dn+9&wB4ezh$ z;G12{m|q%8I`}>-LJ>70abm_Bh;MAF*gKUV<=L?t>EFE}g+roV&|3wh`aC5*4Dj>LRfVC%(56mBAG%K^_j^(@oN)io0e);4C(j9-U6g?B zA&-;&i?$kg*lHcQneGD_T|?3N+tvP(ehf;s7-!xVBK#t}MvKz!| zZ5(Bx$som2a;q=Y$x*BeR!c*P2S@E~!2YZ(@W-~3c)b&sZ}XdAf7N$fq40|oz)FSs zTU2bn(X1OPu1bQ2%)lb5x-YQClom~qBq42LSxy4b$vl>tlaP(rjOS(VopWJ7cO{Nau;RGz^066 zg*@E8(2{P_mnkd<4LToq4%qpEn|Lvk71mG2{mlQIkU&_>Hh*5GXZf@3SmF+Zv>&Sz zZ;TShdp#*7_q}occ0JMYir_ynzR{oaa?StgPdg|pdiv=aX!s_tSHR^C@K)d)WD3Ak zwH2|K1$kI=6XHBxEIrBvhA07CkNc#eff6$Q;bFAcM5YY2lHDu*T@cI zY}J*v$Z09u3}5>5;xt~5_NB=PUVoZHnCs*hVr+)2@RbsbS;uB<<9-vLbH(To-0^&@@;jwgEd@f|p2}TR zyub6Q=MKxU!Go#8OvwuFFq}12<+*zWBx!0Lsg%Iu2U;d;xL1O2-1gpVcD$e9)tqxg zJ_rj5ZOUEg4n@W1*$&9@10(;-j$Ss2jP(ZObeS}e#s#DPBIZ8;`{v)b>LPTxA;i_9 zIPjYnAYqG=_ZTazD$3Fs@dDZtvGQDxWsqW~#hulH+nGLd2W2jW1eFxarZZl!C@0Kv z&8ul**=c{aqmf>ar03wr8~=saaza*^McWGq>KiI2r@s+ZQdhIP@MC<@d2S$mn&@=x zJnQl=o{&`Dd0pxA7*VLhlcL;)@ijI%d*V3J>+4nDH$|RweajT)4Hu8b?d-|l2%%qz z0tUGQTQ}hL-})Iw90X`E?Y8j9^#r7?LhLG>A-?jxp6~$AhoYMGHS>QFA16Y{g+5Oh zd2iCTGHVGmO<#RZm|x%Y=O|?r3uLkf%hVC^ewhR|Uw*_2r#=l|FADR5MlVUTQK=Om zT^o6V^N<%r_#E(kt#x4Y~r=8vwjdu1RuBrTe3H+YQs zadp_xfW!@FZ6fBTpLjua%|iv5u9Y}9YmMk1#Pb#JsfugkgRK3}3dK0_StxqDt|*fq z@_sw53Q@xHaSLX9^-NWrhUq9^T;)Pf1uO~?6e~fL6d|~7p zBVQT$&L|h@*wCh+GA|f5Z%@z9{hQC2gyZ;A{?Gl;f4;+9xLDM4Q_f?yFnm9|Kx7XQo~+p-k(^5Xhp)1K_)00_ zzL;{{0PODw+DVb9;AYj}CAZZ88Z-Aka$Zfv`^qlRnK1;XNWz=yF$!d?xVZNRhavEX z2PlU(P$2Thig#DFjiBC6Lh)E61;qI58+;EM0!}GduNqK*u#x5I<1I$uXghR=kAnh9 zR>|W^%Z$Lhq9f;avpt3JEP=Be!z%s`kSZmKz zxo**d?IQ(?t41;Argbd7t^?i6@_sk!Qh;(={Lb4TU3eB*kiXV~0&C9y2sXFY1>uUZK$bHt@Y5GyarJXe zDm)b9s!C^6=`K3&&h#qMO!G8uxY2fr%k7TnIDZ&(F$7w!;TsH3@1pK+m{ z??V0E|9giZW36Atn$nE56d7wRGS*^btkuX^%aO6xBV#Q{##)h#wIms9P5xDjlA%^5 zV=YU@8l#LgNHYq~R0#^G0_9#`cxDJ0T7=z??megq9RJF1+ z64O^gdj0gBDty8C&`vUZHyTQOE}#b8ft+w^FUAtnt)=p6VE$SCH!8+>f8&p!U^NJ7 z=^Lk(lObaAb~evpHQ1a#bk?i!?^?)z)kHoo`e5$t}xVKVyvmmSaX@NCNpErX2zP%j5VJbYeF;DjArB` zBcB=N@Rytz*4kyP#miV@im?V2V~r}t8di)o zt{7`zG1kaptf9qNV~eo{*IzZY34{}bKQ)JjFAtmf%9%jXT^?hFcd|gyuq_BmH3JpB z7d%IPtNu^TA+9q^lxMU|K3x?HtV0m9~) zOQK=8|F7xj;VBgu-mg?1ehg!?LP>8^Rp@LxWp?F@3DB;8G>HsTg(SuI-8(c)>1*rk zSklm*XbQ9tK6CXx6$pW!uT~VB0z~#|h4(8%^|(p1-Y9NYQj+6%t^^Yt^6`_|BpCKR z(YpC3rqAr>E1n?|6j7R_GW+DAPJX3I`1~D{q|`dm>U98N^@cIlW`Nv#wX_NA=zE8$ z2N>5jLw@BE0zzx6>xjkS^1NEau9OlV*+szo^K1~w#{`p=ycO4C}<}E zx+v!w`*t0K$WI@Cc9aAJ);FV{56T0f{Zs3mtr-7)=y%gV0gC4DE@vVElsjBvVN-+% zx4VM5=S-o9=Hwv5r3kV4#S+)jOhHU%&pOR`MHn`Wb1OGC1%l4a>Gs8nknx#aUaeqePWs*FX!1lmPYweRNmfuvqu(8~|TFvz{? znLvskT)DCL>iHwaaHV_sK5bt=7#^>CAL(oiiSlztrltKL>Geo+o~Dew@}j|>6o)-+!p0yi<#)n5 zfd*d7Po13hHHO%jH8XdP(xBo=*rvFv#vs{mrXJHxgURzzWu?!Iq3OcAfU2)FQ2Oe; zHtvowEdAbAD7V@VvMThhEjwcZSE}E=+^OIPla+nrZ$nLBbo-j^TRi<>Q-%Zchj%7m zBj+br8s-Nfm!ir_1hHMc7V1O0{6KBdm%ep%rXVieR6sA{a&GEF7MH~hfu^N9J!Ia6TNpM+gkR8Fn*hsEzY zHRoo`fOW`BowUv$o45EUBUN*{P7c{>3#FYh1MvzTG`h$ihIg~w$aXi;bH~@+P#`G#eOhBqk?7`DmL*c{pyFJTw^UYN zYK6`D;e#}|+0S;`4eK*ay*E0mfCkm)xu0ESF^0%{??o-o?LKV5Kn(oU>8f*H43qReUp=Zvh6HHT;vCX&|;}y&B2| zU~7BS?|Y2~u!K3Z^ESYq#+j*aQ)Lf;SGp@s7h?K~ zn}2YZB*I_xVwX^`5cm4uZTfP-)gCEq>)yN}O&>isdVg`Jlr`cXUq-y?s*e6|+2GaV?S~vC02JN~e+r+n83z;|T*!4!( zpicQBeSNYPlDO_yam>I5?ZEA;S81Ua(u)%%-dH2xb-v;2sfBcfJ2JV)nLZlp~C_QdA$UCsyATd}M<*=@sc8$kRo? z#N-F!U)mtIRf%iT@%n*gwzKx@ZBbQatUJ9LEXJu^}ROP zAyp{_zilJhXhC*NA_{(psD;-4%HP9vF zZEj_+>`-F#*9?PVb%g3t_Z?kgN0;@*HBMKq9kxYwRsO0zp6WCz zkUCTwEXv7G! ze0IU{+XwH)(~A^<-SmC;F%g_69eyyo@wGf8tUKwf^xXnvBP_WuGbuoR`jQ5p^AAcw|7VWKpL%aQD4?HJ!B% zSUH^fmR>do)eGy=nmN`1m#L)j_DpjK8Gn?!VN?#*gbTBrO*Dt1dpyP}IDX-J@y)`H zXby1yLQKMz`I?kU{2QjY_6QB`*H%G3#{*Ku2tYXa*JzJ*Wvkitc0~>l`0U<$_(A8HUlr2M~k*+ zsY2aK`FP4}Ga$IEpL<7Dg^9PSwM?w$FuZ}!oFh~f?iW+^uAeZ6DV6flPn%W2&Ht=| z`XzIq{EFJ@?5ql22~!oHlq?`C@y?-oeN{*cPwM>bYXM0CPy18IIKRO1A|zR0|LQHH z{zV1St5VN2T(^KG|EK*!!m5x^ys9H+H5n9+Z|SXArv?TGl=#OFlcB`Q<>RvgHDJ_N ztimU8Job;?^V*pQOi+yJdoMYz#P8f|4OPW`<0p9GAHBEvqxYK{cZd0IH-1V=Zk2}F53T9mG42sw3?tukN&N$}3bZI>aqN!u2*`j|jY2D9ewXNI7#XV5nq znLtc0Iqn<52B8E zLrPSLaQW$tfld!|P?RZra9`078aBSny-&6Pr|4@}+}9X_Eo~lwb(4 zMz#gl2P{Ccg_FHNO!8e%2Hu}T(Fd#bVX9;cEb%7$)Qgcbf652O?kvMxo7r?kWc$cXqSbQ`lesI*q`QT#F zLrypk*7KJ6@mmLy_^uY!tha<~D+?cJ-O>TsRac7w4qJl65&KIU59>h4{;7(g8l0CK z%y{P)=|B(0ifxwg{dV%3ej^=tWxJn^`ytNL*Isyjnn?%tBnq?eW4$R~!*kY}pbeXt z9)1`~vIdD+xhd_PTJRL(iacvL)hH_LT&qRbonh-2JOqLk@LE>~|K4d0`S&-R`2Ab` zFFj<`Q${^z)N@8VV6+oPJ7TmmMmuD*Q${;xv~xy3!00C!{Yc$xg1VHW8C=#EO0sKK z0q@m8qp3KL8*^Gew|=E6jEB2MRf^*?M`*@-e;lSKBQ86Am;_l|`*xf3s)CrPL7GcG z2`C@rraqoi1Af7i&IYk0h~Rx^lbWColwJ?+OQIyuj(FOCSwRB^y&1 z7Y#7@?bz3M#1y{7^O@IMX#%tEtr*tbrVvCn8$3O$2|^g}NWppSqyr@|OA98FiE=B6 zI9|Tj;Am8>4TO#x0~}1IQ29IUh9s{J2w8QUgf0$G<|R*vrnJRFow&wxAbar8NhwG!jaS?a};qKF1dS>-SdudFEw^Ki2r;j6dG^6N*1xTrFJbPGq#?)PTq0 z-Hs^bP~2DAN;3M{s-?n&aU0L##C|6W#Qs|3wN|tvQanOg^YExSy?wwn>B6(GW=LH^ z&LFqY5nbH3s`AQBGqhK=pfm2SBT`EsvQ}O&Lsr#xwOk{P$V zX(zO=SjKRhDjD6s?Q6bD(+SBZ-alV! zXo-G&xZYc2?1akit(fEpvqBzn%?d+?PN?i+Q{+-UYgClWboHRD6M7@QIC0yY72>L~ ztJUOmLK{A9+@=1}5^*XM^3KdUBElkp)|ZKvXt-&1*7>a?s*0Ric23C>Wr(E_Z{BxA z0^5$j;;&@ncSzyM`%8|fWW02<)fqBMz4$@Zy#Vj$5C6hTVqE9>$H>n&5!arjXHwA0 zn2{_WE+?cE_#!TJF$E=_9`G3E!TVjWsml4;4y9zawo0yXLi`nRUt5pZp&w^?TOQ0g zqFWa(`SaV^q3pA~WP?#hRJ}tuSg+L2JH&b2opx1Df(@`)u%tO2+RPRopvfpJAc69=VvPFd!I=xI7da&ZW?MjpQy;lDSNQ>C>4qC%Geb*PDRdZ zX?~O(VaUzW||pP6u3o@Y^Xy;l4B~)-kYiD7YqM6E(#Tu zjJVf0_)^i@rtUYd)>DyHu-ToPrc@;OWLH(g9x9r7B1#HZqN0sECO0}2QPGOi_5&}3 zs3<72NL}Fu6?F;+rg-yFk?$G1pG*T(6#X4bT#SmY{a7ln<_o4HpYNlp8WlaOz3u#w z%>k86v;0`UgNlZxLjx|abU>VknZ2GiQPFa-=1C(l2h<#~Q>BmJ0X;s@x?)ZQ^W|>O zl3hQjC_$V1^XL)>#5KdBdxhkHcEr}rUEp*;+mA5k%cVIWw&x5-? zi&MOu<(QAUTG3A)AO}=e=gaQ!p8A(OOr4^0*Bx_50S&XWxe`O`;}&k}S;{zr?{>!zv5#k{cg(M>m`J(k&c{U;TT_We3_GS&?>Is7Qy zjn|J0^*$V?=7xTpv5WsTOGQxzfhk?=Zs_LvGhv=<9nihYPk(n!g)tM)F|6+IT1FB2@tMYmF;UCpFR?d6nD!Ovn6W z+|Q5Br#suu_@Hk}dMs-;IG~IUquuk^Ppyw>9$!;0efK-p2c0SAJ>2Z%fZ#FT`UAE; z=%i+p9v964H5@n|{b0r$^|JHJ^W*+oZ!>$jRe7VKxVpp#J`O0M`4rE^E^pLvGN#Wj z+5z!=q?HbG`k=gntt&3-dD<75O9sf@ws$~B0t&|;2Kds~Kg4e7!M4~JX}3odyo|y6 zqH97cmGPz1Q+#gcI=}6{Nb6v0x={Gv>ouOuVe8x$h>o>sZ4fPUq~qTm-NEzNTJn6I ze?TA_;O3XNKJ18$-Vwj|$pj)f4ZRMh97iNayE;D1g2%tuRTWt2h!X0~J`EoaKuCxn8(wchvnYyt5 z4075xObbMJH1t@uT*v1_Q_{(`{(xuP>2FPw9X=iUNd z=x_cmWJ)$M7&Y{Cizz5Mqj(jo3&VcFX!e7M=oqUr5-r*vr!5fbUsBh9x!(chx~^?!f-z5h2Zj0qjVswH8V$xRQFozasg_ivDI2cYs&L&;CB&WPrHbzC|)013@0 ztN5Bbqk*RSN~s}#M3zZweCObd%3lwD;ymJy;AdvsOt3Qwcvj53)xaMm-AE~3=j4pG z_AlAI^`##guX~haEJFhjX?-VCo`)0Kecv^pJud)Jj<#AJFn2;*IP>rE+6JJ6_{94* zR!%7J*^=dAOabU}qTpf@J}=*8Np{}s@kbGyLowOTPKaX-m&uhA{;0gOyQta32}$yE z`E#xJM`^tLL6hs8(6LNObst-Q`ss+6c5W+YO7=$v z*j}!`b41xZkDZp(_#@WEEInh79g$V!V!NJ6e>Awp!9lLu5uIkbyL*R102=JCAo;gB zq8%EZkh%fy2gfHuO^zsSP2@S9odGCq`OpnAw$BaQ7JoWc9)L2m`3UR8c8 z0F`j_pDWpg*N<7Obm2a>*UaGNQ5{Fb>bBzrQv>E#d-oe9q9ZCQ{&`9HB*sMpMMTH{ z)?*J2OZs8sh(wdOk?(2;ATFuqt!gAkgp_Zd-6|Y_dQQib|hmyJ*S(5xI`_;IfT z8YGq7in#2DiulJLq}+8tLtK$c)%*NVMDpYZizWwTw@lY>i>Dvz2_uy(zUhEO?klya zsrVt_AGe#XazF#gw&~Y6{Lrvl_Taix4v6`{{=sK&Xec4qaD(qr2c(z%)1|$GhWNeI zSBf8ZK%F;Tg9oqDP}0Dw80mZmlo)PMr3nY*8eTeNSYC+zeMd-5gDMSO zju`P+U*LdryyWlQWTBx-Wj))XEWE$7FTVV)^F?IRXx8gI2Q>CUy6(EWFP$E*>T9kI z1Yi1mDi>YsW={Jcp@IRicPBADQ+YgBNIvxbujDp1Opkff`;!dUu(h7^Mv|@F!Tkqu z|0{|*+&_LT zywG=HxrW9t2b4b~ZN%f~MIS$C7f4Pv@IsVHcM>n zq)@ZI^xy#xl(XSX*zy*wK#7qP``u8xSo2m%TfAS+2lrVwxgm|U&68IGFn>4AG^O&n zA+KfeQQOyJ`CNQT?7Z!YXf55rmvH>i@b=t0u8pop0mmD%4wyb~iBtSSt|(-xy|UQK z0kJ;q+53I~=bKjIi!Zq2_58gboZst$vhGI|RN#0^cJ8RpK06mw^y0eUZ3zcdG%m_4 zE{O5B(3iP@$LF?fR(Rx$@>5&WUs_{+ym|e2G6|pa+XTslW>_A^(YdxZ&UAVzeP)`d z_?$#j?FyQH*dF>hzvNapp{$cJuGd2xP}BCE5fVO5Xv(Lvb>B7zG(1Qa`z`N8A5S*7 zG5mz{tdL?}PBAi;pN75y0sGm7`M&?$U}7Bah*X9}UXOb?pd3eUJ09$(7Urk4^_Z!1 z;{MCxr`QuQ|ES)Fr|YmC2&C<8!SR`w4o}8@ylxAQFK0s?&}=$$zMG~4oj;WPSgliU zsc4ope(Eg+%eRg53+)ijQ*^jL(=74+K6NaAqfe#tCt=ANE8Sld1UlSX9;jh^+PJg2 zu#bYsfoxZA;P|jpBsADCBlC)a@+&^n zTz^PK{Tp{qT%DmH12^@fWml+3;UQV9nUjhlAHQ7YQABz|gc9~Gsj z@lblialcEqd*6joQLL%NNIK4sy!N%G_d4OcMJYOW`y2)7v~f5sGQ|C#yKf13O~HAW zo5MLgt_1bm^?OP|{g-{&k1wO5@5>#v-@m0Geyu7Q!Y4fLh|z)YaSCd9`IYrU4~0%Y zdSGF^vW|i#*c`tR&g1`|VYtDxl!8)Hm?u@hhcZ^msgSub?&{q$=wXKf-?tatV5Xpq zl&NKzpKX!KPSV)s8G9@@)vSkEwkX9*yshhrJ&G-fix#lK_%g3cO^rS3zm&SK`>hRH zcrHWP>FQXRZ-c6LjfflHu}7T~SgU_pqNb3jp1X|gQTeM(^(iyVSF7r~9FF#A*l@4)GG7X+ zd%xS}fHmGn&#tP$Z4^|EWN!GZ!22}XYps-q^|D*%fzWz;)O0O3PX7c2b=p+l-TEB2 z^Q0{y7GZj1`ir}GD9EkYF8)X@#!ipQ!*sBGDcN49S}EwVKp<->4fFNuQUP78H#xfR zE@f=M@;k`1gB|OA&spi41Sh<%-H%eg)i}Q-O8CjDVZFimf22GW4X-HMyyT}nf>`I; zL_I1}*%B^doQvt%B1q1#qawFmNy>SF_9&x}_izc$|548U^Ghx5QJqI;YwS*}&vUB0 zn~2dHv`SrEf7wc8C?{n}<{#P<83xO%FFay1&dBtdxBlYKPE% zg(r!d9O(1W&j;<3m+gPr(c^<^xx*5zrC47D)AoKWaiH^eld<(s(sk^QT{b_@T7~WV zoLziZC-#Si`!)!~P>_O&^-$?3_P9-t_ zRMc?8H_Z(DwXrW7OtzL$kxd!z;S;isC`F5>Q=bF-w=NDRVR=WCf8*E4yx&`Bhd#2{ zv6t9yNj6WORCGkuTVLq<+dI;6PQtYUM|~V;#w2prn&Ws?DvRe69wE5z+#1yr&WUL>kh~ zlkfIB(*5;9yes=kw267ABN7rzTXOg!UiX80&0ZY$a)qCfIej1dZHar_6fI2mlZvLP zVMkZG(T8v+^xdNRZh5mEJ%5R4*Rf0Lw?$KhMISkGoDl2GgEnbTTO?*>og;k( zuXj2wdb^MndxvsvRht$U$)BzxeZ&yufK}vJz7kStg=QK#fBxvOPo;3tVyZU zLo1a3KueFab*&$I5qU4y$j!QCA~aCZ;x?iwVxyF-BB?i^f# zJHf-j-5t*3cmGwd-o5YMf8E-et=ZbD{@s!poZ{SvUVq_CvL*aX8uxgZ=JE5>i6w*QV0yBoPXM)a z&Ghmq)ZpDyWz;c;45y&S;q4)%8Gp@<@(-QcMLcS+SFdw_+>@vAn zUfdli=X}s8{T^?Rfje+C=ps@fZJx!O^1QEhlKhnQirqJ>d;3Zs;_3r7G*DQ$ovM~M z8-}nz&lbtosS3tw$nY2q=)FoZyg9JESWpgENsXByi(`o|tM#wfN+m&yhIvn*N-LiM zIY#aU(6t(7<|##1^Wx5}R7gHp;+N1F%;I7D_^CIUf5h*QV^dWwwfa`_kci+F|GEDX zMFK{?KqqbSEeSH!T6h-X?mbi48MvceE;KoHT*Imdc4?{Ew$ujxWOyT zjgsN#@feO#bKKn4yIjv)d)r8-z-7zj2-0-PT-GeEwWzlTTa0;rcV|vUy*k9Z?VD}m zlabO6+r-n^BKh?TXK*UD*>~)Ix?(n-^Fusr47+wTYFecEmr%_ zo-legd-Q^*huN#!)0*XP*TJc+5B?aKd5+DTgVI67Tpm{tH;<{02>mI%-~Nj~3hdM^+bXGf%8KV$dnFZ4PG0(>jQ5m36|v-XVvblAjD@*;>8 zC=Htw8W<^7Rrx$?09P(sB`)L>X#hz>v&vX&B0|?9bI~7>o1`2dFJIb;9?yb-)}_e= zbK2VcE7XQyf;t`(e^0v#i^@ zLB%(C2mT_Ms?ND+B*p_!ywO)U#-`wh6Z>!RA zr>S!6c{7u=q`;>_!!;o2hz6kjM7x5XmM}%@?zrhA-mS~`r{l0(?37oZZ*3$8%=%;fO!a70#^gp7!UBi3 z3JtLP*C{ITbY1VnndF7Kw&m*yUXr{xBL|fcmaTNF0PC{;Phd$zhAnFn;{*$dd|*x) z)0<)0!Xm-{Ar9SRT42_m2oA;Ij^kH&wr^g5Av05 zD(m4V+naAYk#?Xm+0Mya*NcdAo|m6}Zxd>2+&nUSv|^aaSF?F;w*Dt3t@GZjSat2l z3!~CJr7^?;$kpWz3UkW1fz=$%F*&uPd+~2qZ=;-2!8WBm8*p)^Z&WOAf4Jof*Bqa2 zhJ>Z57!oc#fLHtxBe*`QrneQ!4gB_FOEe6}a&cHh7zN_;t>5dPwgW-A1m6^{%POoU zNW_0(T|FZIG)lF5?0RmhuHg+wto>pmo2<_(($;3Z`^e1j)k$DCg~eI4R+7)A(n!VR zuU{tDZzhTf<&PfuMZ91i%PF8y@sA-c`jisIg@Yozi9~VDsXrkIbM>&L~AFz$x^Qkadio3S2E*>*%AC9 zYu%9{evS+jQSN%XWxSB&9iN>=5MCHbw(3x5i3))ZOS?&s*!a7^*p{vXcp;ov_kk{6 zU_NIX*gfhKtM7MeVvFHxXJmVV1<#NGuJh@tt$JagbyUW^Gvxcc$CZ}hw%?t90tkZK(g&91y&0^|Jw*Cb)K``@Cqp|sl?l!x#M*PM z63sia7tUWR+VmH&#T8!#Hib(H>KhMqkSN~=&}RbZ_HqdQd-!o%<>yJkwxpBSN|XDZ za4>d(BgggNiWVF78TW|q2ef=WnNp`>=XFQ9*Dd2R^?qKaZWFc}e_cGjlKPTOZ80lqA+g@%Y6v}1=R&;8f>irda5TU0+Rge=xo1uFv;LrVjAaHvm{#OhQUeNTh=Yjve zbvz@yYEAI))s5j&X;FViZp5&vkDXmJukKQ?V5DVd;Mh&cYMFG>rh4!_HPCo=QsAFE zIFt$iVuJA{9F`quj~UIPt~?~7i>k)1a$NB9h90^LNb z{Lo-9AZuyE3~oa1?vPA3(C@Xa?X%Ax2>;izm@> zvx#>f5#TqB+A>Fu{LBPG_BYZ&q6O1u_^Q#426fM)A_ z7Jok``P>VeThj9_Cexmqk6prm-xQ|ndt984pH=FPmAPP0Zj`G~t!59eS?1Pt+edSy z4;)__c#qb3335dSh${0Ga9$%G?C7vj+7+RoWbTR+GqKEXrYz#cL{M<@fX%F*YMDt2 z#TI6RDS9R|0==J~ezI6y{t`bnFQ(j_CUO;UCoub7?z2Y5gf?k7d6dt&(iAW#!H*A0 zenW+ke+vpaVJ0eJn z_qfnHCk}86XieDS1dip_GN%8r3dBD-aTsZ>J%5d`-~Fg25c3ETnz0-j zgUeb!!S4ZVfY6d5sBu*t3(tU|Lyi(Ki1p&}$=%t{b4XfchCN7FcueE8!9W=x)B^%j zEkt-g_d)~pC9QTxqt~U`NRTGzBlAOCkA9ac-JsK4(Q?iW+cPZ%sm0W1@!Cz8 zxtmagzikcTab(681RaG=pYlg4qn3SALqxfquR|T+gjt%izVOnC)g>5Fw*z%_@rWcv zl!h|m{YWm+it<~{19sw-7Rd0H_TQ^m!fxbSc_w|oh8B$ctam8=EL{6Z-XFy+q_?+4 zrEFpof=f!ct%d;?8fp4M@0=1Rb}KlcXlbO03Nd~w!va?j2i}Y~P$=&>$PM$0Qp3(0 zlV4z}nbA|pr3+a~vb49EEm6W3rjyQJ{i26dDav-b#0h$R-3C!FP=%#sA(Joo=Z;!O z=V4EK9s}ubb38;WPPic6GS5znH2D^^=-jnq-mrdA;4;q zmG4jT_Ej7Y9+LRS3c<1T!K~vLBGl+-kv{yEMXPae{b3z>_KRXNr}=IH0`72-lZJwP z4C5Hhq#c-7r`}`>{$o@PS^`CpMn<=szadSgyY<~SePkptVi=31K~E0~X~MvWH7!i{ zha=$r(C@L?oVg^mr`Im%gO8N=8w zqLFmOml+#pVY-r<2$l{=&}@?-ik4>`t0A}*s=VWzxC7g?aBUGe9N70 zQrJu;Mr>l9>)3`Bpv3jin4HNgvgR5k$B0@`DDQtTMPP`K*wo&hNq#;~~D9 zqw?y|d=wh2uw=RbbUk+5 zo@s0;`Ahg)Nlk4_nQEEF5KS%!~0lEhQ=5)8MF#v@pwzaGiH(_bRH{ zVc)I8%GK89HSu>H2SNx*{5v-xw-Fa` z?@fvAAKjo5tVCioVKz#gP-ZyI1}OA&?=v2O!5<8jXDma8l#s6rD7qDc!J~NcximX> zjb&-4Ako7BC6m9ll;{H4=(gn934dWz zB=!r;ZP4Z7jyVWXM^e^*6&x;w);1`gf{EY~nZ$C&;I9QoCqGIX#e7ZoFgxp5TiINw zX!RW1hXa#B`p_h@Vu&PNTS_~bMOPFxArKJ)$5kkGU49uyC8h3J+^0e#_h|)ah})*{ zsDz8-R_tTymp?Us)il* z<63-{mD^#Wq4w&zIstc$>T(%@d}?kVTSjm)8DtJ3>^l?79)`tGaG2 zxqZCtYCpF`QQcj>p4u9I1DHB!$RAs?!}HSKJIk6q3Fr`NyUUW-Lkhd58A26cb9j?D zrc2T-?G^VCNT7dwVU;&&LR#DcdsrXF(3L$slOO33pjBABlCYwJOr_lr!v|*v2TM*^ zIYei5HWk#`cVyF|_!dH9{xHh4=}78CKVNux7m3@Ey0aODBT3FUIj+!b(6$K$nES|Q zlKoiuNNPy>w6@%6U{ApDS`M>z$9Fm%?CS?r>cFL2w^%ZacqBC0MMrHqFwi)#!a|M8 zS?+a1R?hr`X8V!brOZqxaOo39M z@p5E5rAlGdW5jrQnhQY%Y)g0bABAV&37pLT#SY!Aq>~e$8s((DbA==mK$-9|>a0c` z2d{f2<@d@V?IbzeFul);q5Jb*HcI^y`Mm9v8%+%Sd8GGEW341_;H5RZ$vS+R;e)U4 z1X{r?uCA($cvwvAPfvFbNOO;GGQa=c1^Rs7$n}LND8R(?KhMUf>aH(28qs9}zs;Yq z^Gm{S(ekpzAJkH1jC=tir_GAT78)$->|NPQY}0o;+um`n=eem&agCv4%5uX|5yy~U z!5acjt_1(I>kccE=c)qxCjlff9&dJr*2p3kfi~UqrC^r)x{p}e>8d`1pJ=Ha1*XoV z#+Gu3FYOo4+g?#aX3H2s>@aGDXoScG<6M;55ZQrwLqztEQRF2;KkwPc|K6N6YaR1_ zjZgagEeB1sifL^jcpNsN`5M%b^aW6HKMvewIU3NPY1IlK8dfgudFYdt6GCq&@A4h) z#Zz1eJ3Kl#?&nn-7FA1Nvtu(q5t#gi^h>MF5c`fWbn+5p2}{~s*vzk?S3Qg8LgJk#CDR{XUs%!}2 zl%vw6ap95ZljUw9qcoD_@N?2Rr1>Sn z1==A|gANGF2tW_1W?fRzEGF_ZN(kTwYhUaa0>HA96JImx^mN1iUUe;O(7hA_^{#v` z^h+vNb2hSxscr9d+58S!T2H`vM=l03ka>qOI|0xt;Qed80*_G9@sWlK0c#qh<1;TE zbbs5u`6dmCdTF3zrbsD1`!>i@Iaw+S!sl~1{&CQ=A?Pz9_c0q7n7#FFnH-NQ)2w zOJ%S*x65V=jZf-YisN1};MCOO&q)EPWj(8A>x_b)y}H{V0X^?pTX3=z1X~v#>G&n2 zEACepl~;Z%uM~CN7RIggJ$M&^I%d-YXpie2&-fOX`;Nfz!*u*N?qVa$+ZS-)K|i>D zjoq&G(~XCo!r<36RI-%lTy%TB!78wq#gi@4vp@5VM3}$pVk@ip zlbaUak&%m1Q^wlDO}E;li1Rpd|DE_XYy>S%&C6o1VC!Te(a!l(fq{zM8WjzA*+4kP z^Nn>AX3$Fu9b_OLs`$bs{Cz9n#yMpTT8PZ4%Yr2EQ> zMsFDYGG>6H?-(&sM>ct#wEx+tNnl#>tLN!GYmSx3)tpKY0LpqlzlESTP8F(mXK{!){({0sPjA)tSK)=51O3HRxeTa} z`5UP+JL5NN3vzp4Ebe%G>u=LtHWE_(67B@jLabL^a^OPV1ADecLlJ=jt!OeRnum*FNzS~BpH*jz+M$G&K^5Icv zd*Rc%rNJ8N!iLCZE~x>1>DKh4yslAEq-Lp`GVR&4)Is4dH>SwLj9;p@c5XWTE7Pkn z@XZZ??Lw9MFX?{xd(-8lG0y|#MlsJLba2vq)LhlPHVZ+{6QMeL19PI6JZMhmLtQ>a z80Z=z7b2R!E^AmLN*bSG`fI7k$E-y35_9wFC_`Y2KKDbP+yKHL(d!5YZz z75OQV(|}uai5$-~fa5n*^dZyVX=0Z*|B~ezop3-N=$n#epzUd9y!Qj%2hsW12qm!n z{Q$L{XNd!i0SKr<%LBhGto(_=zQo6Xk7E4OMoB|};Q{pXylNN7KRi_8q+!ed(XbkM zg$?s1e8QY^{ldKQ;@PqKd$aXv&O>Iazde>Oxfj_-fs^^MSb3dQBhG-p1Hp}Zcr&@4 zZNcc4VR%79e>^j^^B~y2>5@E&P%b%@PB|XBh1%c;XqCOE$Zbhv!|AcU_}%AFFVTBN zZf}6u(6a35BTLa#v0{rEBCo?*zR&r$D+*p;)p%Ff`RM*Mvdxa6gcPG0-1+DmU{YIG zr{JmaoyL$EN%{qmoMRuJ^(eFVw+~}EwRdOVhpxvSw;h+hd}m?dc8@ZFTWV~13axp} zu_i;!b^ZJLP_vTt)I|iA-+SfRB6}Ut)4|j|F^|0YHh)tnp|>Iy!RQ(li!$u^8NX^} z0uiafN}w)wn%p^#H;|~I4R4_JhZ(PG7S+z-Sb}{s+u-!c!@UVb>A4uPJ!Y+p?WT)F zdGIJ53se6OZ=U;USCbzvD!@{09F-chi;ji@TQVnHEC8AULGs!Mm&oyy>IsD(Jv1((D#2h3YBqVQ@s89BqA>8hT|g-s@!Ri z3w2p6*GiX$=#~z1?m%kb*#^<qMhb zWl6h8L*8Wmw*6Rzv*2eQu&X_3QPm&=b>yADQ@I{uWjVa%gp7=QRl3UUeZ%HXDZx-b zdgFjZd`5(2^WX5T&68(?!w$JmOO9A55G*nQr>VP=ebJSyGAco(QB-pg&z=mPqo8Mt zk9f#n1&7iKG&T7z1poAHw;&wO)!+@nQi-^(eQ#jb-FGwWlLXbXeRC~%a&147u8;`K zke*w_;TT=9eRLMrBYxcDDR)}$dwseTmk-h)?ny?Z?yfZrv`u_}J+k1+8QB3){Drrss6wfgOpH2qLQ_hq{cwtTR>Yb6-;zCei1{u` z&qma$Lc4{uF$>6;en~U!ZtO$oj@So!X99+QSQ8?~+w>0#rEfqKX;1SlB;k5qF`1~0ZPZ0?n%p>~-2!?RRlfc`5#P$?$QR~U z%9QxMFRThZtV5U#qtE*JnL=uON~J75xBQdfc5yL8DF3}9y*^H_hJ+7u%mYS))j?dT zL>t)t;Hc(+AYsO2wFaCNfS3Mbw8IToLw!r%h>kp&?-3@?I8>148P*{m$Fj6D(11zPSNv_x341@ zA}Fqr_O5wLjLRVdv-4`}6f2u60c_RCi;xkvJ{?ujtU^)#R4r7@hw)B8>)AU}mUTBY z-ufI?s6V;>C3%9(cmkYm9?m8BZawUR%UCrq@Vhlo1xm%kC|J}hv3P7 zIX%kyA6PEN9>Ch+t1}iYL-fEsNX^;77p8QXw#ATs_PXHeAZ9C zD6QMP+P(WzVQeH|Mcq^`=#l0xz~y(BzPC^<{k#LTJ&>f)ww8OjS#D>RH+=k799oUq zaWBC>jT|Oz2+5U2v3svuuh7o<{Wb;GM zbcXJR+q4xNV(3-fY2*Y}jo!@}OheVTpcpQjqlj5N3pu`{09xLA-d-?x z>V!WD&R~B7giW_djF+|tZv}MRJ;ex@SEXYwe%^;|6*Y5-s1+`-(wV(=HY~5Sd3FAR zWY(ucFBPjH%BMVIiBk-x??=CcQC%sP@}THZpGJauD3N`V0#oo2i+OwFZ?H|_(JJi2 z0?((#|K~jFSlqI#Y=9vIzD?(sl_TcTcbjIku43*Y(MN&-CBY)7hNw>z4zs3py@a}! z4QbVXbq}iy+AF?jz=iwYved>IBH@pZ+3CQ!6^!m#zjkjn2UU%b9|=3~)cS5{(LsDN z4QO9 zGPz9`&txsvSo4)HJEEQ42OCR{ux>e6ynSY}tR)Y$UEE)G*394ssJT&y{$Qdz2QJ&0 z7m^puR3ohIQA3*f3HR<~6!MC|@ z)+0M5c-Cx9-Cy4c++OjM@wB10_NHHA3HPu=+n>XOo!s%g{Rw5`vyu+peSYdE!N{Aj z^2QQN%DG#Q+c0!bHgUx81IaycxGO(%%M{h{r;|i*`x%s>NTA(AtTlr2ADb>=%Xhx= z*}IyNtwFIeGc7EO_x1zkKQCIsKfrhTDOc_EAXkmygPNFW3$TyF_@#dORXYpF)#Qa{ z3-o?J>~wiAKUN-;8n`ze^TcWP@ozYuWB(-P{tXPFEDr^Z1polx0hmzDUkO<<$K#*? zfKwO%fEYjoaIi39V`H^9GBLNeb}(W!GIMlyaC5P?aQ*CR<)xvH0D$6jgVg({xqBf2 zAfWFd0D%7xXRmH6o1%g)bkknl@LhIDm0XpaK=T{T+Nb1z6m08T07VLBOdO7*R!QK?}}G#2DSr_Hi|Y<*|iw-(|HzFY8m?oZ|LaXdg#7(8A~ zKd)m0Zl95OER3|WT{cv6oE{W@haS})P``rTMS$Ax*${s<+6ULQqJWSP&Aq*aqK6!0 z86IuPmT-ic$n#GMFT%90>Qw)?Q_O)Pgnr9uVc6a1m z`SNP&V0#lw1^R`3PVtlLdQh_SlYn-57h($=DTte}XF?0F_RY~kh z9qnoga|h3OnRuwPl@;v<8~0KWWo50#KV`y`I|3d6G+CSEEp$=(E7nPMW%LPK8B zs~X-5MpDRAyU1KH2Ra57Pt)(FL~uTFx`+pEJwLdi+j?IMz(sl|`XnX3siG+=+8&%0dpj3xSkcH=BqWLJn-O>y^*S)vf~4Cp=$O^H4T^*VZe7E>Pdi z_ySGzKiMK#&Jp#w-mm-Rq&;2KYehGcGO79NF*JcAgnrpFw#(vgqb%_o7L!hssF1=P zT$V^a6o7i}uNF>Kkf<$)ZSiL0?AR`DREHC%k$~qzl~(?w1_8NiYeb7r8(%ikA29eW zNgt#$TGzT5JA^y19!^zy@=4~Bz?S)&;JJz}paq>#pwZ&cR^g*>)Op`DWbQbN4Y>k3 z9x=_Ox9ip{EQ0p$8+Y^wE$3PS-P-@B09=*kDEf`V8o7jm;p!!{9xeh{EC()X zRLwAve!+s2>c^Yj=TZ3QL|FXHZ)rrBXLar=Dw)4Ie$FfzNgFl1q_4Pk;i5+QRs}TP zFDMP~uel~(EtG@(WmS{A1Cw&9{b{q?zl-ie+rH}Gbd%*j-OBS&aT$PA{qGIsbvu{z zjBlAjN?4j7ByoIF4#YWl_KoxlcqxCslgNcDz38wHEWDs7%R@ktK>f?i(?86-3ite% z3qAl)sSf~91Moh~9{~8UTtG?iKfV9i=vAe}Bo(ASTe;cWX{f^hpgz0*e}?}rD8zr{ z0RDjrhR{`k`{@7E{xj6<#{>M&P?}PzYH~`7pWVFN{sW5n-$VVgcqt18qkgPgfPWhC zf6|JF;NL-+Ia&S(#DBAl{l5eE4}{OslWW*40Fc!I^M4^e&;Vz_$^RC@-1OW37J{iX zDB0&jWb%K};w;$V^xtXWCcK4#W``z~N${}BQAaSTBM0DW&CC&2#!pW%_g literal 0 HcmV?d00001 diff --git a/pkg/shp/test_files/point.shp b/pkg/shp/test_files/point.shp new file mode 100644 index 0000000000000000000000000000000000000000..310419cbd1cb9b9905971e2910a76d3acaed158a GIT binary patch literal 184 zcmZQzQ0HR64q{#~GcYh>$cZ>WgjCRI428%7AY-@?W})b1LY0G=52k@GK-LE{QpEuP D!|Vi+ literal 0 HcmV?d00001 diff --git a/pkg/shp/test_files/pointm.shp b/pkg/shp/test_files/pointm.shp new file mode 100644 index 0000000000000000000000000000000000000000..7f6216e562a2ce48be62f90108e052d9a49cfa5e GIT binary patch literal 208 zcmZQzQ0HR64l-UaGcbr^$cZ>WgjCRI428%7j6e<_!Yp*Xksz~}P}RWffzh5IH9!|3 On*-#50o1$$K@I?rM+a>H literal 0 HcmV?d00001 diff --git a/pkg/shp/test_files/pointz.shp b/pkg/shp/test_files/pointz.shp new file mode 100644 index 0000000000000000000000000000000000000000..9e7ec168f4e54e2af0171c732eb38f0599ae49a5 GIT binary patch literal 232 zcmZQzQ0HR64oY4yGca&t$cZ>WgjCRIPbfbUN<&mZNk$+ogfI(TZ=}P2FnIqTsE!F$ aEzCX`?TM@w=t5+3fjlsPntvb&t`-18DIhlh literal 0 HcmV?d00001 diff --git a/pkg/shp/test_files/polygon.shp b/pkg/shp/test_files/polygon.shp new file mode 100644 index 0000000000000000000000000000000000000000..624a9db63f242e59050428eded06e0ff9902b515 GIT binary patch literal 236 zcmZQzQ0HR64$59IGcd4XmjjB5I6$OeG){#e2}U4xAjT|^Lfq;=M!^8gUR*Rx9fAe` DSi%H^ literal 0 HcmV?d00001 diff --git a/pkg/shp/test_files/polygonm.shp b/pkg/shp/test_files/polygonm.shp new file mode 100644 index 0000000000000000000000000000000000000000..f3a22d12c469ff17dea09c977015c81c2e6cdf86 GIT binary patch literal 292 zcmZQzQ0HR64ko=|W?+!SE(a79aezp{Xq*Z`5{y6`LyTD`5ka{%&z07k>yYKg@jm>L*u;sDml0HYCl0s1xx*#H0l literal 0 HcmV?d00001 diff --git a/pkg/shp/test_files/polylinem.shp b/pkg/shp/test_files/polylinem.shp new file mode 100644 index 0000000000000000000000000000000000000000..19a685ca6df252bc365f94b448dab789f566c39c GIT binary patch literal 388 zcmZQzQ0HR64i3FwW?&G Date: Thu, 3 Jun 2021 12:22:29 +0900 Subject: [PATCH 022/253] chore: load .env instead of .env.local --- internal/app/config.go | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/internal/app/config.go b/internal/app/config.go index 21484efde..e2a741312 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -47,12 +47,11 @@ type GCSConfig struct { } func ReadConfig(debug bool) (*Config, error) { - if debug { - // .env.local file is only available in debug environment - if err := godotenv.Load(".env.local"); err != nil && !os.IsNotExist(err) { - return nil, err - } - log.Infof("config: .env.local loaded") + // load .env + if err := godotenv.Load(".env"); err != nil && !os.IsNotExist(err) { + return nil, err + } else if err == nil { + log.Infof("config: .env loaded") } var c Config From 5db065be5009e30bde7ce300a116e29204905f91 Mon Sep 17 00:00:00 2001 From: HideBa <49897538+HideBa@users.noreply.github.com> Date: Thu, 3 Jun 2021 14:11:04 +0900 Subject: [PATCH 023/253] feat: Basic auth for projects (#6) --- .../adapter/graphql/controller_project.go | 3 + internal/adapter/graphql/convert_project.go | 3 + internal/adapter/graphql/models_gen.go | 6 + internal/app/app.go | 11 +- internal/graphql/generated.go | 174 ++++++++++++++++++ .../infrastructure/mongo/mongodoc/project.go | 9 + internal/usecase/interactor/project.go | 12 ++ internal/usecase/interfaces/project.go | 3 + pkg/project/builder.go | 15 ++ pkg/project/builder_test.go | 11 ++ pkg/project/project.go | 27 +++ pkg/project/project_test.go | 64 +++++++ schema.graphql | 6 + 13 files changed, 340 insertions(+), 4 deletions(-) diff --git a/internal/adapter/graphql/controller_project.go b/internal/adapter/graphql/controller_project.go index 5b9711caa..1c2f66df6 100644 --- a/internal/adapter/graphql/controller_project.go +++ b/internal/adapter/graphql/controller_project.go @@ -63,6 +63,9 @@ func (c *ProjectController) Update(ctx context.Context, ginput *UpdateProjectInp Alias: ginput.Alias, ImageURL: ginput.ImageURL, Archived: ginput.Archived, + IsBasicAuthActive: ginput.IsBasicAuthActive, + BasicAuthUsername: ginput.BasicAuthUsername, + BasicAuthPassword: ginput.BasicAuthPassword, PublicTitle: ginput.PublicTitle, PublicDescription: ginput.PublicDescription, PublicImage: fromFile(ginput.PublicImage), diff --git a/internal/adapter/graphql/convert_project.go b/internal/adapter/graphql/convert_project.go index cbb69f339..138fe8629 100644 --- a/internal/adapter/graphql/convert_project.go +++ b/internal/adapter/graphql/convert_project.go @@ -44,6 +44,9 @@ func toProject(p *project.Project) *Project { ID: p.ID().ID(), CreatedAt: p.CreatedAt(), IsArchived: p.IsArchived(), + IsBasicAuthActive: p.IsBasicAuthActive(), + BasicAuthUsername: p.BasicAuthUsername(), + BasicAuthPassword: p.BasicAuthPassword(), Alias: p.Alias(), Name: p.Name(), Description: p.Description(), diff --git a/internal/adapter/graphql/models_gen.go b/internal/adapter/graphql/models_gen.go index 24ecaa99d..b8a3223a1 100644 --- a/internal/adapter/graphql/models_gen.go +++ b/internal/adapter/graphql/models_gen.go @@ -581,6 +581,9 @@ type PluginExtension struct { type Project struct { ID id.ID `json:"id"` IsArchived bool `json:"isArchived"` + IsBasicAuthActive bool `json:"isBasicAuthActive"` + BasicAuthUsername string `json:"basicAuthUsername"` + BasicAuthPassword string `json:"basicAuthPassword"` CreatedAt time.Time `json:"createdAt"` UpdatedAt time.Time `json:"updatedAt"` PublishedAt *time.Time `json:"publishedAt"` @@ -1010,6 +1013,9 @@ type UpdateProjectInput struct { Name *string `json:"name"` Description *string `json:"description"` Archived *bool `json:"archived"` + IsBasicAuthActive *bool `json:"isBasicAuthActive"` + BasicAuthUsername *string `json:"basicAuthUsername"` + BasicAuthPassword *string `json:"basicAuthPassword"` Alias *string `json:"alias"` ImageURL *url.URL `json:"imageUrl"` PublicTitle *string `json:"publicTitle"` diff --git a/internal/app/app.go b/internal/app/app.go index f3cb0d272..160e97f59 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -132,10 +132,13 @@ func apiPublished(cfg *ServerConfig) echo.HandlerFunc { } return c.JSON(http.StatusOK, map[string]interface{}{ - "title": title, - "description": description, - "image": prj.PublicImage(), - "noindex": prj.PublicNoIndex(), + "title": title, + "description": description, + "image": prj.PublicImage(), + "noindex": prj.PublicNoIndex(), + "isBasicAuthActive": prj.IsBasicAuthActive(), + "basicAuthUsername": prj.BasicAuthUsername(), + "basicAuthPassword": prj.BasicAuthPassword(), }) } } diff --git a/internal/graphql/generated.go b/internal/graphql/generated.go index 6749845fa..690af8a7c 100644 --- a/internal/graphql/generated.go +++ b/internal/graphql/generated.go @@ -524,11 +524,14 @@ type ComplexityRoot struct { Project struct { Alias func(childComplexity int) int + BasicAuthPassword func(childComplexity int) int + BasicAuthUsername func(childComplexity int) int CreatedAt func(childComplexity int) int Description func(childComplexity int) int ID func(childComplexity int) int ImageURL func(childComplexity int) int IsArchived func(childComplexity int) int + IsBasicAuthActive func(childComplexity int) int Name func(childComplexity int) int PublicDescription func(childComplexity int) int PublicImage func(childComplexity int) int @@ -3513,6 +3516,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Project.Alias(childComplexity), true + case "Project.basicAuthPassword": + if e.complexity.Project.BasicAuthPassword == nil { + break + } + + return e.complexity.Project.BasicAuthPassword(childComplexity), true + + case "Project.basicAuthUsername": + if e.complexity.Project.BasicAuthUsername == nil { + break + } + + return e.complexity.Project.BasicAuthUsername(childComplexity), true + case "Project.createdAt": if e.complexity.Project.CreatedAt == nil { break @@ -3548,6 +3565,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Project.IsArchived(childComplexity), true + case "Project.isBasicAuthActive": + if e.complexity.Project.IsBasicAuthActive == nil { + break + } + + return e.complexity.Project.IsBasicAuthActive(childComplexity), true + case "Project.name": if e.complexity.Project.Name == nil { break @@ -5438,6 +5462,9 @@ enum Role { type Project implements Node { id: ID! isArchived: Boolean! + isBasicAuthActive: Boolean! + basicAuthUsername: String! + basicAuthPassword: String! createdAt: DateTime! updatedAt: DateTime! publishedAt: DateTime @@ -5990,6 +6017,9 @@ input UpdateProjectInput { name: String description: String archived: Boolean + isBasicAuthActive: Boolean + basicAuthUsername: String + basicAuthPassword: String alias: String imageUrl: URL publicTitle: String @@ -18621,6 +18651,111 @@ func (ec *executionContext) _Project_isArchived(ctx context.Context, field graph return ec.marshalNBoolean2bool(ctx, field.Selections, res) } +func (ec *executionContext) _Project_isBasicAuthActive(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsBasicAuthActive, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_basicAuthUsername(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.BasicAuthUsername, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Project_basicAuthPassword(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Project", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.BasicAuthPassword, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + func (ec *executionContext) _Project_createdAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -29027,6 +29162,30 @@ func (ec *executionContext) unmarshalInputUpdateProjectInput(ctx context.Context if err != nil { return it, err } + case "isBasicAuthActive": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("isBasicAuthActive")) + it.IsBasicAuthActive, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "basicAuthUsername": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("basicAuthUsername")) + it.BasicAuthUsername, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "basicAuthPassword": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("basicAuthPassword")) + it.BasicAuthPassword, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } case "alias": var err error @@ -32534,6 +32693,21 @@ func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "isBasicAuthActive": + out.Values[i] = ec._Project_isBasicAuthActive(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "basicAuthUsername": + out.Values[i] = ec._Project_basicAuthUsername(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "basicAuthPassword": + out.Values[i] = ec._Project_basicAuthPassword(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } case "createdAt": out.Values[i] = ec._Project_createdAt(ctx, field, obj) if out.Values[i] == graphql.Null { diff --git a/internal/infrastructure/mongo/mongodoc/project.go b/internal/infrastructure/mongo/mongodoc/project.go index 6e31f7d37..d2a53ff36 100644 --- a/internal/infrastructure/mongo/mongodoc/project.go +++ b/internal/infrastructure/mongo/mongodoc/project.go @@ -14,6 +14,9 @@ import ( type ProjectDocument struct { ID string Archived bool + IsBasicAuthActive bool + BasicAuthUsername string + BasicAuthPassword string UpdatedAt time.Time PublishedAt time.Time Name string @@ -67,6 +70,9 @@ func NewProject(project *project.Project) (*ProjectDocument, string) { return &ProjectDocument{ ID: pid, Archived: project.IsArchived(), + IsBasicAuthActive: project.IsBasicAuthActive(), + BasicAuthUsername: project.BasicAuthUsername(), + BasicAuthPassword: project.BasicAuthPassword(), UpdatedAt: project.UpdatedAt(), PublishedAt: project.PublishedAt(), Name: project.Name(), @@ -103,6 +109,9 @@ func (d *ProjectDocument) Model() (*project.Project, error) { return project.New(). ID(pid). IsArchived(d.Archived). + IsBasicAuthActive(d.IsBasicAuthActive). + BasicAuthUsername(d.BasicAuthUsername). + BasicAuthPassword(d.BasicAuthPassword). UpdatedAt(d.UpdatedAt). PublishedAt(d.PublishedAt). Name(d.Name). diff --git a/internal/usecase/interactor/project.go b/internal/usecase/interactor/project.go index 8d0c914ff..9f4c692f9 100644 --- a/internal/usecase/interactor/project.go +++ b/internal/usecase/interactor/project.go @@ -169,6 +169,18 @@ func (i *Project) Update(ctx context.Context, p interfaces.UpdateProjectParam, o prj.SetArchived(*p.Archived) } + if p.IsBasicAuthActive != nil { + prj.SetIsBasicAuthActive(*p.IsBasicAuthActive) + } + + if p.BasicAuthUsername != nil { + prj.SetBasicAuthUsername(*p.BasicAuthUsername) + } + + if p.BasicAuthPassword != nil { + prj.SetBasicAuthPassword(*p.BasicAuthPassword) + } + if p.PublicTitle != nil { prj.UpdatePublicTitle(*p.PublicTitle) } diff --git a/internal/usecase/interfaces/project.go b/internal/usecase/interfaces/project.go index 24ca6960a..7caa37b95 100644 --- a/internal/usecase/interfaces/project.go +++ b/internal/usecase/interfaces/project.go @@ -28,6 +28,9 @@ type UpdateProjectParam struct { Description *string Alias *string Archived *bool + IsBasicAuthActive *bool + BasicAuthUsername *string + BasicAuthPassword *string ImageURL *url.URL PublicTitle *string PublicDescription *string diff --git a/pkg/project/builder.go b/pkg/project/builder.go index 76a6b9962..1891ed2e4 100644 --- a/pkg/project/builder.go +++ b/pkg/project/builder.go @@ -52,6 +52,21 @@ func (b *Builder) IsArchived(isArchived bool) *Builder { return b } +func (b *Builder) IsBasicAuthActive(isBasicAuthActive bool) *Builder { + b.p.isBasicAuthActive = isBasicAuthActive + return b +} + +func (b *Builder) BasicAuthUsername(basicAuthUsername string) *Builder { + b.p.basicAuthUsername = basicAuthUsername + return b +} + +func (b *Builder) BasicAuthPassword(basicAuthPassword string) *Builder { + b.p.basicAuthPassword = basicAuthPassword + return b +} + func (b *Builder) UpdatedAt(updatedAt time.Time) *Builder { b.p.updatedAt = updatedAt return b diff --git a/pkg/project/builder_test.go b/pkg/project/builder_test.go index 0a6d5d822..09bbdf473 100644 --- a/pkg/project/builder_test.go +++ b/pkg/project/builder_test.go @@ -53,6 +53,17 @@ func TestBuilder_IsArchived(t *testing.T) { assert.True(t, res.IsArchived()) } +func TestBuilder_BasicAuthUsername(t *testing.T) { + var tb = New().NewID() + res := tb.BasicAuthUsername("username").MustBuild() + assert.Equal(t, "username", res.BasicAuthUsername()) +} +func TestBuilder_BasicAuthPassword(t *testing.T) { + var tb = New().NewID() + res := tb.BasicAuthPassword("password").MustBuild() + assert.Equal(t, "password", res.BasicAuthPassword()) +} + func TestBuilder_ImageURL(t *testing.T) { testCases := []struct { name string diff --git a/pkg/project/project.go b/pkg/project/project.go index bebfa06c5..c15f9f55a 100644 --- a/pkg/project/project.go +++ b/pkg/project/project.go @@ -19,6 +19,9 @@ var ( type Project struct { id id.ProjectID isArchived bool + isBasicAuthActive bool + basicAuthUsername string + basicAuthPassword string updatedAt time.Time publishedAt time.Time name string @@ -42,6 +45,18 @@ func (p *Project) IsArchived() bool { return p.isArchived } +func (p *Project) IsBasicAuthActive() bool { + return p.isBasicAuthActive +} + +func (p *Project) BasicAuthUsername() string { + return p.basicAuthUsername +} + +func (p *Project) BasicAuthPassword() string { + return p.basicAuthPassword +} + func (p *Project) UpdatedAt() time.Time { return p.updatedAt } @@ -106,6 +121,18 @@ func (p *Project) SetArchived(isArchived bool) { p.isArchived = isArchived } +func (p *Project) SetIsBasicAuthActive(isBasicAuthActive bool) { + p.isBasicAuthActive = isBasicAuthActive +} + +func (p *Project) SetBasicAuthUsername(basicAuthUsername string) { + p.basicAuthUsername = basicAuthUsername +} + +func (p *Project) SetBasicAuthPassword(basicAuthPassword string) { + p.basicAuthPassword = basicAuthPassword +} + func (p *Project) SetUpdatedAt(updatedAt time.Time) { p.updatedAt = updatedAt } diff --git a/pkg/project/project_test.go b/pkg/project/project_test.go index adc6dc9e9..15255f7fa 100644 --- a/pkg/project/project_test.go +++ b/pkg/project/project_test.go @@ -268,3 +268,67 @@ func TestProject_PublicName(t *testing.T) { }) } } + +func TestProject_IsBasicAuthActive(t *testing.T) { + tests := []struct { + name string + p *Project + expected bool + }{ + { + name: "basic auth is inactive", + p: &Project{ + isBasicAuthActive: false, + }, + expected: false, + }, + { + name: "basic auth is active", + p: &Project{ + isBasicAuthActive: true, + }, + expected: true, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res := tc.p.IsBasicAuthActive() + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestProject_BasicAuthUsername(t *testing.T) { + t.Run("return basic auth username", func(t *testing.T) { + p := &Project{basicAuthUsername: "test1"} + res := p.BasicAuthUsername() + assert.Equal(t, "test1", res) + }) +} + +func TestProject_BasicAuthPassword(t *testing.T) { + t.Run("return basic auth password", func(t *testing.T) { + p := &Project{basicAuthPassword: "password"} + res := p.BasicAuthPassword() + assert.Equal(t, "password", res) + }) +} + +func TestProject_SetIsBasicAuthActive(t *testing.T) { + p := &Project{} + p.SetIsBasicAuthActive(true) + assert.Equal(t, true, p.isBasicAuthActive) +} + +func TestProject_SetBasicAuthUsername(t *testing.T) { + p := &Project{} + p.SetBasicAuthUsername("username") + assert.Equal(t, "username", p.basicAuthUsername) +} + +func TestProject_SetBasicAuthPassword(t *testing.T) { + p := &Project{} + p.SetBasicAuthPassword("password") + assert.Equal(t, "password", p.basicAuthPassword) +} diff --git a/schema.graphql b/schema.graphql index ec1f67b1c..f925cdcde 100644 --- a/schema.graphql +++ b/schema.graphql @@ -180,6 +180,9 @@ enum Role { type Project implements Node { id: ID! isArchived: Boolean! + isBasicAuthActive: Boolean! + basicAuthUsername: String! + basicAuthPassword: String! createdAt: DateTime! updatedAt: DateTime! publishedAt: DateTime @@ -732,6 +735,9 @@ input UpdateProjectInput { name: String description: String archived: Boolean + isBasicAuthActive: Boolean + basicAuthUsername: String + basicAuthPassword: String alias: String imageUrl: URL publicTitle: String From 9629dd5a075c63602929c2c69abf1d04a3f448c9 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 3 Jun 2021 19:47:41 +0900 Subject: [PATCH 024/253] chore: add godoc workflow --- .github/workflows/godoc.yml | 55 +++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 .github/workflows/godoc.yml diff --git a/.github/workflows/godoc.yml b/.github/workflows/godoc.yml new file mode 100644 index 000000000..68c39d9f6 --- /dev/null +++ b/.github/workflows/godoc.yml @@ -0,0 +1,55 @@ +name: godoc +on: + workflow_run: + workflows: + - main + types: + - completed +env: + MOD: github.com/reearth/reearth-backend + REPO: github.com/reearth/reearth-backend + ADDR: localhost:6060 +jobs: + godoc: + name: godoc + runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.conclusion == 'success' }} + env: + BASE_PATH: ${{ github.event.workflow_run.head_branch }} + steps: + - name: set up + uses: actions/setup-go@v2 + with: + go-version: 1.16 + id: go + - name: checkout + uses: actions/checkout@v2 + - name: install godoc + run: go install golang.org/x/tools/cmd/godoc + - name: generate docs + run: | + godoc -http=$ADDR & + sleep 1 + wget -r -np -N -E -p -k http://${ADDR}/pkg/${MOD}/ + - name: replace urls + run: | + find ./${ADDR}/ -name "*.html" -print0 | xargs -0 sed -i -e "s/http:\/\/${ADDR}\/src\/${MOD}/https:\/\/${REPO}\/blob\/main/" + find ./${ADDR}/ -name "*.html" -print0 | xargs -0 sed -i -e "s/http:\/\/${ADDR}/\/${BASE_PATH}/" + - name: deploy + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./${{ env.ADDR }} + destination_dir: ${{ env.BASE_PATH }} + slack-notification: + if: always() + name: Slack Notification + needs: godoc + runs-on: ubuntu-latest + steps: + - name: Slack Notification + uses: Gamesight/slack-workflow-status@master + if: always() + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} From cc45b539b9e481a20628e34e155fce0e9c7985c6 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 3 Jun 2021 19:56:07 +0900 Subject: [PATCH 025/253] chore: fix godoc workflow --- .github/workflows/godoc.yml | 2 +- cmd/godoc/main.go | 41 ------------------------------------- 2 files changed, 1 insertion(+), 42 deletions(-) delete mode 100644 cmd/godoc/main.go diff --git a/.github/workflows/godoc.yml b/.github/workflows/godoc.yml index 68c39d9f6..22d659329 100644 --- a/.github/workflows/godoc.yml +++ b/.github/workflows/godoc.yml @@ -8,7 +8,7 @@ on: env: MOD: github.com/reearth/reearth-backend REPO: github.com/reearth/reearth-backend - ADDR: localhost:6060 + ADDR: 127.0.0.1:6060 jobs: godoc: name: godoc diff --git a/cmd/godoc/main.go b/cmd/godoc/main.go deleted file mode 100644 index 8fac7cf2f..000000000 --- a/cmd/godoc/main.go +++ /dev/null @@ -1,41 +0,0 @@ -package main - -import ( - "crypto/subtle" - "net/url" - "os" - "os/exec" - - "github.com/labstack/echo/v4" - "github.com/labstack/echo/v4/middleware" -) - -const godocPort = "8080" - -func main() { - e := echo.New() - err := exec.Command("godoc", "-http=:"+godocPort).Start() - if err != nil { - e.Logger.Fatal(err) - } - e.Use(middleware.Logger()) - e.Use(middleware.Recover()) - e.Use(middleware.BasicAuth(func(username, password string, c echo.Context) (bool, error) { - if subtle.ConstantTimeCompare([]byte(username), []byte(os.Getenv("BASIC_AUTH_USERNAME"))) == 1 && - subtle.ConstantTimeCompare([]byte(password), []byte(os.Getenv("BASIC_AUTH_PASSWORD"))) == 1 { - return true, nil - } - return false, nil - })) - t, err := url.Parse("http://localhost:8080/") - if err != nil { - e.Logger.Fatal(err) - } - tl := []*middleware.ProxyTarget{ - { - URL: t, - }, - } - e.Group("", middleware.Proxy(middleware.NewRoundRobinBalancer(tl))) - e.Logger.Fatal(e.Start(":" + os.Getenv("PORT"))) -} From 0db163d1c71fd162ad92392db095a70ebcea36e9 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 3 Jun 2021 20:05:10 +0900 Subject: [PATCH 026/253] chore: fix godoc workflow --- .github/workflows/godoc.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/godoc.yml b/.github/workflows/godoc.yml index 22d659329..478fb172f 100644 --- a/.github/workflows/godoc.yml +++ b/.github/workflows/godoc.yml @@ -29,7 +29,7 @@ jobs: - name: generate docs run: | godoc -http=$ADDR & - sleep 1 + sleep 5 wget -r -np -N -E -p -k http://${ADDR}/pkg/${MOD}/ - name: replace urls run: | From 9b78fc26437782a873add7e89e5b39683a89444b Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 4 Jun 2021 12:09:47 +0900 Subject: [PATCH 027/253] chore: fix godoc workflow --- .github/workflows/godoc.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/godoc.yml b/.github/workflows/godoc.yml index 478fb172f..ff0fc48ae 100644 --- a/.github/workflows/godoc.yml +++ b/.github/workflows/godoc.yml @@ -29,8 +29,8 @@ jobs: - name: generate docs run: | godoc -http=$ADDR & - sleep 5 - wget -r -np -N -E -p -k http://${ADDR}/pkg/${MOD}/ + sleep 10 + wget -c 10 --retry-connrefused -r -np -N -E -p -k http://${ADDR}/pkg/${MOD}/ - name: replace urls run: | find ./${ADDR}/ -name "*.html" -print0 | xargs -0 sed -i -e "s/http:\/\/${ADDR}\/src\/${MOD}/https:\/\/${REPO}\/blob\/main/" From f1e5a7679b5c2d3d533b8527d794294e69ca732e Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 4 Jun 2021 12:22:39 +0900 Subject: [PATCH 028/253] chore: fix godoc workflow --- .github/workflows/godoc.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/godoc.yml b/.github/workflows/godoc.yml index ff0fc48ae..d84caad91 100644 --- a/.github/workflows/godoc.yml +++ b/.github/workflows/godoc.yml @@ -8,7 +8,7 @@ on: env: MOD: github.com/reearth/reearth-backend REPO: github.com/reearth/reearth-backend - ADDR: 127.0.0.1:6060 + GODOC_ADDR: '127.0.0.1:6060' jobs: godoc: name: godoc @@ -28,9 +28,9 @@ jobs: run: go install golang.org/x/tools/cmd/godoc - name: generate docs run: | - godoc -http=$ADDR & + godoc -http="$ADDR" & sleep 10 - wget -c 10 --retry-connrefused -r -np -N -E -p -k http://${ADDR}/pkg/${MOD}/ + wget -r -np -N -E -p -k "http://${ADDR}/pkg/${MOD}/" - name: replace urls run: | find ./${ADDR}/ -name "*.html" -print0 | xargs -0 sed -i -e "s/http:\/\/${ADDR}\/src\/${MOD}/https:\/\/${REPO}\/blob\/main/" From f7866c024f9ae61c4e5aa7cfd518e8d310b98b6d Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 4 Jun 2021 12:43:03 +0900 Subject: [PATCH 029/253] chore: fix godoc workflow --- .github/workflows/godoc.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/godoc.yml b/.github/workflows/godoc.yml index d84caad91..4bcf3c2f7 100644 --- a/.github/workflows/godoc.yml +++ b/.github/workflows/godoc.yml @@ -8,7 +8,7 @@ on: env: MOD: github.com/reearth/reearth-backend REPO: github.com/reearth/reearth-backend - GODOC_ADDR: '127.0.0.1:6060' + ADDR: '127.0.0.1:6060' jobs: godoc: name: godoc From b442490b35e812a8281662fc26acd483f5eb887d Mon Sep 17 00:00:00 2001 From: HideBa <49897538+HideBa@users.noreply.github.com> Date: Fri, 4 Jun 2021 16:25:31 +0900 Subject: [PATCH 030/253] feat: google analytics for scene (#10) Co-authored-by: rot1024 --- pkg/builtin/manifest.yml | 13 +++++++++++++ pkg/builtin/manifest_gen.go | 2 +- pkg/builtin/manifest_ja.yml | 10 ++++++++++ pkg/builtin/manifest_ja_gen.go | 2 +- pkg/plugin/manifest/schema_gen.go | 2 +- 5 files changed, 26 insertions(+), 3 deletions(-) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index c1a9cf33c..bb2dfa7e4 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -139,6 +139,19 @@ extensions: description: "Set saturation of the fog. Min: -1 Max: 1" min: -1 max: 1 + - id: googleAnalytics + title: Google Analytics + description: Set your Google Analytics tracking ID and analyze how your published project is being viewed. + fields: + - id: enableGA + type: bool + title: Enable + defaultValue: false + description: Enable Google Analytics + - id: trackingId + type: string + title: Tracking ID + description: Paste your Google Analytics tracking ID here. This will be embedded in your published project. - id: infobox title: Infobox visualizer: cesium diff --git a/pkg/builtin/manifest_gen.go b/pkg/builtin/manifest_gen.go index 9a4ddbcb9..9d09b8527 100644 --- a/pkg/builtin/manifest_gen.go +++ b/pkg/builtin/manifest_gen.go @@ -2,4 +2,4 @@ package builtin -const pluginManifestJSON string = `{"author":"Re:Earth","description":"Official Plugin","extensions":[{"description":"Select here to find scene settings in the right panel. This includes setting map tiles, atmospheric conditions, real lighting, and more.","id":"cesium","schema":{"groups":[{"fields":[{"description":"The starting position of your project.","id":"camera","title":"Initial camera position","type":"camera"},{"description":"Show elevation when close to the surface.","id":"terrain","title":"Terrain","type":"bool"},{"defaultValue":true,"description":"Show the stars.","id":"skybox","title":"Sky","type":"bool"},{"description":"With Sky disabled, choose a background color.","id":"bgcolor","title":"Background color","type":"string","ui":"color"},{"description":"Cesium Ion account users may use their personal API keys to be able to use their Cesium Ion assets(tile data, 3D data, etc) with their project.","id":"ion","title":"Cesium Ion API access token","type":"string"}],"id":"default","title":"Scene"},{"description":"You may change the look of the Earth by obtaining map tile data and setting it here.","fields":[{"choices":[{"key":"default","label":"Default"},{"key":"default_label","label":"Labelled"},{"key":"default_road","label":"Road Map"},{"key":"stamen_watercolor","label":"Stamen Watercolor"},{"key":"stamen_toner","label":"Stamen Toner"},{"key":"open_street_map","label":"Open Street Map"},{"key":"esri_world_topo","label":"ESRI Topography"},{"key":"black_marble","label":"Earth at night"},{"key":"japan_gsi_standard","label":"Japan GSI Standard Map"},{"key":"url","label":"URL"}],"defaultValue":"default","id":"tile_type","title":"Tile Type","type":"string"},{"availableIf":{"field":"tile_type","type":"string","value":"url"},"id":"tile_url","title":"Tile map URL","type":"string"},{"id":"tile_minLevel","max":30,"min":0,"title":"Minimum zoom level","type":"number"},{"id":"tile_maxLevel","max":30,"min":0,"title":"Maximum zoom level","type":"number"}],"id":"tiles","list":true,"representativeField":"tile_type","title":"Tiles"},{"description":"Set the look and feel of the Earth.","fields":[{"defaultValue":true,"description":"Display the Sun.","id":"enable_sun","title":"Sun","type":"bool"},{"defaultValue":false,"description":"Display natural lighting from the sun.","id":"enable_lighting","title":"Lighting","type":"bool"},{"defaultValue":true,"description":"Display a lower atmospheric layer.","id":"ground_atmosphere","title":"Ground atmosphere","type":"bool"},{"defaultValue":true,"description":"Display an upper atmospheric layer.","id":"sky_atmosphere","title":"Sky atmosphere","type":"bool"},{"defaultValue":true,"description":"Display customizable fog.","id":"fog","title":"Fog","type":"bool"},{"defaultValue":0.0002,"description":"Set a thickness to the fog. Min: 0 Max: 1","id":"fog_density","max":1,"min":0,"title":"Fog density","type":"number"},{"defaultValue":0.03,"description":"Set brightness of the fog. Min: -1 Max: 1","id":"brightness_shift","max":1,"min":-1,"title":"Fog Brightness","type":"number"},{"description":"Set hue of the fog. Min: -1 Max: 1","id":"hue_shift","max":1,"min":-1,"title":"Fog Hue","type":"number"},{"description":"Set saturation of the fog. Min: -1 Max: 1","id":"surturation_shift","max":1,"min":-1,"title":"Fog Saturation","type":"number"}],"id":"atmosphere","title":"Atmospheric Conditions"}]},"title":"Cesium","type":"visualizer","visualizer":"cesium"},{"description":"Create an information area that appears when a layer is highlighted. Text, pictures, video, etc can be added to an infobox.","id":"infobox","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"choices":[{"key":"small","label":"Small"},{"key":"large","label":"Large"}],"defaultValue":"small","id":"size","title":"Size Type","type":"string"},{"id":"bgcolor","title":"Background Color","type":"string","ui":"color"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Basic"}]},"title":"Infobox","type":"infobox","visualizer":"cesium"},{"description":"A standard map marker.","id":"marker","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"choices":[{"key":"point","label":"Point"},{"key":"image","label":"Icon"}],"defaultValue":"image","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"point"},"id":"pointColor","title":"Point color","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"point"},"defaultValue":10,"id":"pointSize","min":0,"suffix":"px","title":"Point size","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"},{"availableIf":{"field":"style","type":"string","value":"image"},"defaultValue":1,"id":"imageSize","min":0,"title":"Image scale","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Image crop","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"imageShadow","title":"Image shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"label","title":"Label","type":"bool"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelText","title":"Label text","type":"string"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelTypography","title":"Label font","type":"typography"},{"id":"extrude","title":"Extruded","type":"bool"}],"id":"default","title":"Marker"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"},"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Marker","type":"primitive","visualizer":"cesium"},{"description":"Polyline primitive","id":"polyline","schema":{"groups":[{"fields":[{"id":"coordinates","title":"Coordinates","type":"coordinates"},{"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polyline"}]},"title":"Polyline","type":"primitive","visualizer":"cesium"},{"description":"Polygon primitive","id":"polygon","schema":{"groups":[{"fields":[{"id":"polygon","title":"Polygon","type":"polygon"},{"defaultValue":true,"id":"fill","title":"Fill","type":"bool"},{"availableIf":{"field":"fill","type":"bool","value":true},"id":"fillColor","title":"Fill color","type":"string","ui":"color"},{"id":"stroke","title":"Stroke","type":"bool"},{"availableIf":{"field":"stroke","type":"bool","value":true},"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"availableIf":{"field":"stroke","type":"bool","value":true},"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polygon"}]},"title":"Polygon","type":"primitive","visualizer":"cesium"},{"description":"Rectangle primitive","id":"rect","schema":{"groups":[{"fields":[{"id":"rect","title":"Rect","type":"rect"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"id":"extrudedHeight","min":0,"title":"Extruded height","type":"number"},{"choices":[{"key":"color","label":"Color"},{"key":"image","label":"Image"}],"defaultValue":"color","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"color"},"id":"fillColor","title":"Fill","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"}],"id":"default","title":"Rectangle"}]},"title":"Rectangle","type":"primitive","visualizer":"cesium"},{"description":"An Icon marker that allows you to set a photo that will appear after reaching its location.","id":"photooverlay","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"height","title":"Height","type":"number"},{"description":"Set the camera position for the overlay.","id":"camera","title":"Camera","type":"camera"},{"id":"image","title":"Icon","type":"url","ui":"image"},{"defaultValue":1,"id":"imageSize","prefix":"x","title":"Icon size","type":"number"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Icon crop","type":"string"},{"id":"imageShadow","title":"Icon shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"photoOverlayImage","title":"Photo","type":"url","ui":"image"},{"id":"photoOverlayDescription","title":"Photo description","type":"string","ui":"multiline"}],"id":"default","title":"Photo overlay"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"}}},"title":"Photo overlay","type":"primitive","visualizer":"cesium"},{"description":"A ball-like marker.","id":"ellipsoid","schema":{"groups":[{"fields":[{"id":"position","title":"Position","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"defaultValue":1000,"id":"radius","min":0,"suffix":"m","title":"Radius","type":"number"},{"id":"fillColor","title":"Fill","type":"string","ui":"color"}],"id":"default","title":"Ellipsoid"}],"linkable":{"latlng":{"fieldId":"position","schemaGroupId":"default"}}},"title":"Sphere","type":"primitive","visualizer":"cesium"},{"description":"Import your own primitives to be used instead of Re:Earth's built in ones.","id":"resource","schema":{"groups":[{"fields":[{"id":"url","title":"File URL","type":"url","ui":"file"},{"choices":[{"key":"auto","label":"Auto"},{"key":"kml","label":"KML"},{"key":"geojson","label":"GeoJSON / TopoJSON"},{"key":"czml","label":"CZML"}],"defaultValue":"auto","id":"type","title":"File format","type":"string"}],"id":"default","title":"File"}]},"title":"File","type":"primitive","visualizer":"cesium"},{"description":"Text block","id":"textblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"text","title":"Content","type":"string","ui":"multiline"},{"id":"markdown","title":"Use markdown","type":"bool"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Text block"}]},"title":"Text","type":"block","visualizer":"cesium"},{"description":"Image block","id":"imageblock","schema":{"groups":[{"fields":[{"id":"image","title":"Image","type":"url","ui":"image"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"},{"choices":[{"key":"cover","label":"Cover"},{"key":"contain","label":"Contain"}],"defaultValue":"cover","id":"imageSize","title":"Image size","type":"string"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imagePositionX","title":"Image horizontal position","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imagePositionY","title":"Image vertical position","type":"string"}],"id":"default","title":"Image block"}],"linkable":{"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Image","type":"block","visualizer":"cesium"},{"description":"Video block","id":"videoblock","schema":{"groups":[{"fields":[{"id":"url","title":"Video URL","type":"url","ui":"video"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Video block"}]},"title":"Video","type":"block","visualizer":"cesium"},{"description":"Location block","id":"locationblock","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Location block"}]},"title":"Location","type":"block","visualizer":"cesium"},{"description":"Table block","id":"dlblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Table block"},{"fields":[{"id":"item_title","title":"Title","type":"string"},{"choices":[{"key":"string","label":"String"},{"key":"number","label":"Number"}],"defaultValue":"string","id":"item_datatype","title":"Type","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"string"},"id":"item_datastr","title":"Data","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"number"},"id":"item_datanum","title":"Data","type":"number"}],"id":"items","list":true,"title":"Items"}]},"title":"Table","type":"block","visualizer":"cesium"},{"description":"Menu widgets","id":"menu","schema":{"groups":[{"fields":[{"id":"buttonInvisible","title":"Hide","type":"bool"},{"id":"buttonTitle","title":"Title","type":"string"},{"choices":[{"key":"topleft","label":"Top-Left"},{"key":"topright","label":"Top-Right"},{"key":"bottomleft","label":"Bottom-Left"},{"key":"bottomright","label":"Bottom-Right"}],"defaultValue":"topleft","id":"buttonPosition","title":"Position","type":"string"},{"choices":[{"key":"text","label":"Text only"},{"key":"icon","label":"Icon only"},{"key":"texticon","label":"Text and icon"}],"defaultValue":"text","id":"buttonStyle","title":"Style","type":"string"},{"id":"buttonIcon","title":"Icon","type":"url","ui":"image"},{"id":"buttonColor","title":"Text color","type":"string","ui":"color"},{"id":"buttonBgcolor","title":"Background color","type":"string","ui":"color"},{"choices":[{"key":"link","label":"Link"},{"key":"menu","label":"Menu"},{"key":"camera","label":"Camera flight"}],"defaultValue":"link","id":"buttonType","title":"Type","type":"string"},{"availableIf":{"field":"buttonType","type":"string","value":"link"},"id":"buttonLink","title":"Link","type":"url"},{"availableIf":{"field":"buttonType","type":"string","value":"camera"},"id":"buttonCamera","title":"Camera flight","type":"camera"}],"id":"buttons","list":true,"title":"Buttons"},{"fields":[{"id":"menuTitle","title":"Title","type":"string"},{"id":"menuIcon","title":"Icon","type":"url"},{"choices":[{"key":"link","label":"Link"},{"key":"camera","label":"Camera"},{"key":"border","label":"Break"}],"defaultValue":"link","id":"menuType","title":"Type","type":"string"},{"availableIf":{"field":"menuType","type":"string","value":"link"},"id":"menuLink","title":"Link","type":"url"},{"availableIf":{"field":"menuType","type":"string","value":"camera"},"id":"menuCamera","title":"Camera","type":"camera"}],"id":"menu","list":true,"title":"Menu"}]},"title":"Menu","type":"widget","visualizer":"cesium"},{"description":"A unique start screen that will display on load of your archive(ex. display the archive's title).","id":"splashscreen","schema":{"groups":[{"fields":[{"id":"overlayEnabled","title":"Enabled","type":"bool"},{"id":"overlayDelay","min":0,"suffix":"s","title":"Delay","type":"number"},{"id":"overlayDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"overlayTransitionDuration","min":0,"suffix":"s","title":"Fade out","type":"number"},{"id":"overlayImage","title":"Image","type":"url","ui":"image"},{"id":"overlayImageW","title":"Image width","type":"number"},{"id":"overlayImageH","title":"Image height","type":"number"},{"id":"overlayBgcolor","title":"Background color","type":"string","ui":"color"}],"id":"overlay","title":"Overlay screen"},{"fields":[{"id":"cameraPosition","title":"Camera position","type":"camera"},{"id":"cameraDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"cameraDelay","min":0,"suffix":"s","title":"Delay","type":"number"}],"id":"camera","list":true,"title":"Camera flight sequence"}]},"title":"Splash screen","type":"widget","visualizer":"cesium"},{"description":"SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily.","id":"storytelling","schema":{"groups":[{"fields":[{"defaultValue":3,"id":"duration","suffix":"s","title":"Duration","type":"number"},{"defaultValue":50000,"id":"range","suffix":"m","title":"Range","type":"number"},{"id":"camera","title":"Camera pose","type":"camera","ui":"camera_pose"},{"id":"autoStart","title":"Auto start","type":"bool"}],"id":"default","title":"Storytelling"},{"fields":[{"id":"layer","title":"Layer","type":"ref","ui":"layer"},{"id":"layerDuration","suffix":"s","title":"Duration","type":"number"},{"id":"layerRange","suffix":"m","title":"Range","type":"number"},{"id":"layerCamera","title":"Camera position","type":"camera"}],"id":"stories","list":true,"representativeField":"layer","title":"Stories"}]},"title":"Storytelling","type":"widget","visualizer":"cesium"}],"id":"reearth","system":true,"title":"Re:Earth Official Plugin"}` +const pluginManifestJSON string = `{"author":"Re:Earth","description":"Official Plugin","extensions":[{"description":"Select here to find scene settings in the right panel. This includes setting map tiles, atmospheric conditions, real lighting, and more.","id":"cesium","schema":{"groups":[{"fields":[{"description":"The starting position of your project.","id":"camera","title":"Initial camera position","type":"camera"},{"description":"Show elevation when close to the surface.","id":"terrain","title":"Terrain","type":"bool"},{"defaultValue":true,"description":"Show the stars.","id":"skybox","title":"Sky","type":"bool"},{"description":"With Sky disabled, choose a background color.","id":"bgcolor","title":"Background color","type":"string","ui":"color"},{"description":"Cesium Ion account users may use their personal API keys to be able to use their Cesium Ion assets(tile data, 3D data, etc) with their project.","id":"ion","title":"Cesium Ion API access token","type":"string"}],"id":"default","title":"Scene"},{"description":"You may change the look of the Earth by obtaining map tile data and setting it here.","fields":[{"choices":[{"key":"default","label":"Default"},{"key":"default_label","label":"Labelled"},{"key":"default_road","label":"Road Map"},{"key":"stamen_watercolor","label":"Stamen Watercolor"},{"key":"stamen_toner","label":"Stamen Toner"},{"key":"open_street_map","label":"Open Street Map"},{"key":"esri_world_topo","label":"ESRI Topography"},{"key":"black_marble","label":"Earth at night"},{"key":"japan_gsi_standard","label":"Japan GSI Standard Map"},{"key":"url","label":"URL"}],"defaultValue":"default","id":"tile_type","title":"Tile Type","type":"string"},{"availableIf":{"field":"tile_type","type":"string","value":"url"},"id":"tile_url","title":"Tile map URL","type":"string"},{"id":"tile_minLevel","max":30,"min":0,"title":"Minimum zoom level","type":"number"},{"id":"tile_maxLevel","max":30,"min":0,"title":"Maximum zoom level","type":"number"}],"id":"tiles","list":true,"representativeField":"tile_type","title":"Tiles"},{"description":"Set the look and feel of the Earth.","fields":[{"defaultValue":true,"description":"Display the Sun.","id":"enable_sun","title":"Sun","type":"bool"},{"defaultValue":false,"description":"Display natural lighting from the sun.","id":"enable_lighting","title":"Lighting","type":"bool"},{"defaultValue":true,"description":"Display a lower atmospheric layer.","id":"ground_atmosphere","title":"Ground atmosphere","type":"bool"},{"defaultValue":true,"description":"Display an upper atmospheric layer.","id":"sky_atmosphere","title":"Sky atmosphere","type":"bool"},{"defaultValue":true,"description":"Display customizable fog.","id":"fog","title":"Fog","type":"bool"},{"defaultValue":0.0002,"description":"Set a thickness to the fog. Min: 0 Max: 1","id":"fog_density","max":1,"min":0,"title":"Fog density","type":"number"},{"defaultValue":0.03,"description":"Set brightness of the fog. Min: -1 Max: 1","id":"brightness_shift","max":1,"min":-1,"title":"Fog Brightness","type":"number"},{"description":"Set hue of the fog. Min: -1 Max: 1","id":"hue_shift","max":1,"min":-1,"title":"Fog Hue","type":"number"},{"description":"Set saturation of the fog. Min: -1 Max: 1","id":"surturation_shift","max":1,"min":-1,"title":"Fog Saturation","type":"number"}],"id":"atmosphere","title":"Atmospheric Conditions"},{"description":"Set your Google Analytics tracking ID and analyze how your published project is being viewed.","fields":[{"defaultValue":false,"description":"Enable Google Analytics","id":"enableGA","title":"Enable","type":"bool"},{"description":"Paste your Google Analytics tracking ID here. This will be embedded in your published project.","id":"trackingId","title":"Tracking ID","type":"string"}],"id":"googleAnalytics","title":"Google Analytics"}]},"title":"Cesium","type":"visualizer","visualizer":"cesium"},{"description":"Create an information area that appears when a layer is highlighted. Text, pictures, video, etc can be added to an infobox.","id":"infobox","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"choices":[{"key":"small","label":"Small"},{"key":"large","label":"Large"}],"defaultValue":"small","id":"size","title":"Size Type","type":"string"},{"id":"bgcolor","title":"Background Color","type":"string","ui":"color"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Basic"}]},"title":"Infobox","type":"infobox","visualizer":"cesium"},{"description":"A standard map marker.","id":"marker","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"choices":[{"key":"point","label":"Point"},{"key":"image","label":"Icon"}],"defaultValue":"image","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"point"},"id":"pointColor","title":"Point color","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"point"},"defaultValue":10,"id":"pointSize","min":0,"suffix":"px","title":"Point size","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"},{"availableIf":{"field":"style","type":"string","value":"image"},"defaultValue":1,"id":"imageSize","min":0,"title":"Image scale","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Image crop","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"imageShadow","title":"Image shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"label","title":"Label","type":"bool"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelText","title":"Label text","type":"string"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelTypography","title":"Label font","type":"typography"},{"id":"extrude","title":"Extruded","type":"bool"}],"id":"default","title":"Marker"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"},"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Marker","type":"primitive","visualizer":"cesium"},{"description":"Polyline primitive","id":"polyline","schema":{"groups":[{"fields":[{"id":"coordinates","title":"Coordinates","type":"coordinates"},{"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polyline"}]},"title":"Polyline","type":"primitive","visualizer":"cesium"},{"description":"Polygon primitive","id":"polygon","schema":{"groups":[{"fields":[{"id":"polygon","title":"Polygon","type":"polygon"},{"defaultValue":true,"id":"fill","title":"Fill","type":"bool"},{"availableIf":{"field":"fill","type":"bool","value":true},"id":"fillColor","title":"Fill color","type":"string","ui":"color"},{"id":"stroke","title":"Stroke","type":"bool"},{"availableIf":{"field":"stroke","type":"bool","value":true},"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"availableIf":{"field":"stroke","type":"bool","value":true},"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polygon"}]},"title":"Polygon","type":"primitive","visualizer":"cesium"},{"description":"Rectangle primitive","id":"rect","schema":{"groups":[{"fields":[{"id":"rect","title":"Rect","type":"rect"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"id":"extrudedHeight","min":0,"title":"Extruded height","type":"number"},{"choices":[{"key":"color","label":"Color"},{"key":"image","label":"Image"}],"defaultValue":"color","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"color"},"id":"fillColor","title":"Fill","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"}],"id":"default","title":"Rectangle"}]},"title":"Rectangle","type":"primitive","visualizer":"cesium"},{"description":"An Icon marker that allows you to set a photo that will appear after reaching its location.","id":"photooverlay","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"height","title":"Height","type":"number"},{"description":"Set the camera position for the overlay.","id":"camera","title":"Camera","type":"camera"},{"id":"image","title":"Icon","type":"url","ui":"image"},{"defaultValue":1,"id":"imageSize","prefix":"x","title":"Icon size","type":"number"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Icon crop","type":"string"},{"id":"imageShadow","title":"Icon shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"photoOverlayImage","title":"Photo","type":"url","ui":"image"},{"id":"photoOverlayDescription","title":"Photo description","type":"string","ui":"multiline"}],"id":"default","title":"Photo overlay"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"}}},"title":"Photo overlay","type":"primitive","visualizer":"cesium"},{"description":"A ball-like marker.","id":"ellipsoid","schema":{"groups":[{"fields":[{"id":"position","title":"Position","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"defaultValue":1000,"id":"radius","min":0,"suffix":"m","title":"Radius","type":"number"},{"id":"fillColor","title":"Fill","type":"string","ui":"color"}],"id":"default","title":"Ellipsoid"}],"linkable":{"latlng":{"fieldId":"position","schemaGroupId":"default"}}},"title":"Sphere","type":"primitive","visualizer":"cesium"},{"description":"Import your own primitives to be used instead of Re:Earth's built in ones.","id":"resource","schema":{"groups":[{"fields":[{"id":"url","title":"File URL","type":"url","ui":"file"},{"choices":[{"key":"auto","label":"Auto"},{"key":"kml","label":"KML"},{"key":"geojson","label":"GeoJSON / TopoJSON"},{"key":"czml","label":"CZML"}],"defaultValue":"auto","id":"type","title":"File format","type":"string"}],"id":"default","title":"File"}]},"title":"File","type":"primitive","visualizer":"cesium"},{"description":"Text block","id":"textblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"text","title":"Content","type":"string","ui":"multiline"},{"id":"markdown","title":"Use markdown","type":"bool"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Text block"}]},"title":"Text","type":"block","visualizer":"cesium"},{"description":"Image block","id":"imageblock","schema":{"groups":[{"fields":[{"id":"image","title":"Image","type":"url","ui":"image"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"},{"choices":[{"key":"cover","label":"Cover"},{"key":"contain","label":"Contain"}],"defaultValue":"cover","id":"imageSize","title":"Image size","type":"string"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imagePositionX","title":"Image horizontal position","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imagePositionY","title":"Image vertical position","type":"string"}],"id":"default","title":"Image block"}],"linkable":{"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Image","type":"block","visualizer":"cesium"},{"description":"Video block","id":"videoblock","schema":{"groups":[{"fields":[{"id":"url","title":"Video URL","type":"url","ui":"video"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Video block"}]},"title":"Video","type":"block","visualizer":"cesium"},{"description":"Location block","id":"locationblock","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Location block"}]},"title":"Location","type":"block","visualizer":"cesium"},{"description":"Table block","id":"dlblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Table block"},{"fields":[{"id":"item_title","title":"Title","type":"string"},{"choices":[{"key":"string","label":"String"},{"key":"number","label":"Number"}],"defaultValue":"string","id":"item_datatype","title":"Type","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"string"},"id":"item_datastr","title":"Data","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"number"},"id":"item_datanum","title":"Data","type":"number"}],"id":"items","list":true,"title":"Items"}]},"title":"Table","type":"block","visualizer":"cesium"},{"description":"Menu widgets","id":"menu","schema":{"groups":[{"fields":[{"id":"buttonInvisible","title":"Hide","type":"bool"},{"id":"buttonTitle","title":"Title","type":"string"},{"choices":[{"key":"topleft","label":"Top-Left"},{"key":"topright","label":"Top-Right"},{"key":"bottomleft","label":"Bottom-Left"},{"key":"bottomright","label":"Bottom-Right"}],"defaultValue":"topleft","id":"buttonPosition","title":"Position","type":"string"},{"choices":[{"key":"text","label":"Text only"},{"key":"icon","label":"Icon only"},{"key":"texticon","label":"Text and icon"}],"defaultValue":"text","id":"buttonStyle","title":"Style","type":"string"},{"id":"buttonIcon","title":"Icon","type":"url","ui":"image"},{"id":"buttonColor","title":"Text color","type":"string","ui":"color"},{"id":"buttonBgcolor","title":"Background color","type":"string","ui":"color"},{"choices":[{"key":"link","label":"Link"},{"key":"menu","label":"Menu"},{"key":"camera","label":"Camera flight"}],"defaultValue":"link","id":"buttonType","title":"Type","type":"string"},{"availableIf":{"field":"buttonType","type":"string","value":"link"},"id":"buttonLink","title":"Link","type":"url"},{"availableIf":{"field":"buttonType","type":"string","value":"camera"},"id":"buttonCamera","title":"Camera flight","type":"camera"}],"id":"buttons","list":true,"title":"Buttons"},{"fields":[{"id":"menuTitle","title":"Title","type":"string"},{"id":"menuIcon","title":"Icon","type":"url"},{"choices":[{"key":"link","label":"Link"},{"key":"camera","label":"Camera"},{"key":"border","label":"Break"}],"defaultValue":"link","id":"menuType","title":"Type","type":"string"},{"availableIf":{"field":"menuType","type":"string","value":"link"},"id":"menuLink","title":"Link","type":"url"},{"availableIf":{"field":"menuType","type":"string","value":"camera"},"id":"menuCamera","title":"Camera","type":"camera"}],"id":"menu","list":true,"title":"Menu"}]},"title":"Menu","type":"widget","visualizer":"cesium"},{"description":"A unique start screen that will display on load of your archive(ex. display the archive's title).","id":"splashscreen","schema":{"groups":[{"fields":[{"id":"overlayEnabled","title":"Enabled","type":"bool"},{"id":"overlayDelay","min":0,"suffix":"s","title":"Delay","type":"number"},{"id":"overlayDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"overlayTransitionDuration","min":0,"suffix":"s","title":"Fade out","type":"number"},{"id":"overlayImage","title":"Image","type":"url","ui":"image"},{"id":"overlayImageW","title":"Image width","type":"number"},{"id":"overlayImageH","title":"Image height","type":"number"},{"id":"overlayBgcolor","title":"Background color","type":"string","ui":"color"}],"id":"overlay","title":"Overlay screen"},{"fields":[{"id":"cameraPosition","title":"Camera position","type":"camera"},{"id":"cameraDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"cameraDelay","min":0,"suffix":"s","title":"Delay","type":"number"}],"id":"camera","list":true,"title":"Camera flight sequence"}]},"title":"Splash screen","type":"widget","visualizer":"cesium"},{"description":"SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily.","id":"storytelling","schema":{"groups":[{"fields":[{"defaultValue":3,"id":"duration","suffix":"s","title":"Duration","type":"number"},{"defaultValue":50000,"id":"range","suffix":"m","title":"Range","type":"number"},{"id":"camera","title":"Camera pose","type":"camera","ui":"camera_pose"},{"id":"autoStart","title":"Auto start","type":"bool"}],"id":"default","title":"Storytelling"},{"fields":[{"id":"layer","title":"Layer","type":"ref","ui":"layer"},{"id":"layerDuration","suffix":"s","title":"Duration","type":"number"},{"id":"layerRange","suffix":"m","title":"Range","type":"number"},{"id":"layerCamera","title":"Camera position","type":"camera"}],"id":"stories","list":true,"representativeField":"layer","title":"Stories"}]},"title":"Storytelling","type":"widget","visualizer":"cesium"}],"id":"reearth","system":true,"title":"Re:Earth Official Plugin"}` diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 5588e3903..84313ac8b 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -76,6 +76,16 @@ extensions: title: ่‰ฒ็›ธ surturation_shift: title: ๅฝฉๅบฆ + googleAnalytics: + title: Google Analytics + description: Google Analyticsใ‚’ๆœ‰ๅŠนใซใ™ใ‚‹ใ“ใจใงใ€ๅ…ฌ้–‹ใƒšใƒผใ‚ธใŒใฉใฎใ‚ˆใ†ใซ้–ฒ่ฆงใ•ใ‚Œใฆใ„ใ‚‹ใ‹ใ‚’ๅˆ†ๆžใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚ + fields: + enableGA: + title: ๆœ‰ๅŠน + description: Google Analyticsใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚ + trackingCode: + title: ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐID + description: ใ“ใ“ใซใ‚ฐใƒผใ‚ฐใƒซใ‚ขใƒŠใƒชใƒ†ใ‚ฃใ‚ฏใ‚นใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐIDใ‚’่ฒผใ‚Šไป˜ใ‘ใ‚‹ใ“ใจใงใ€ๅ…ฌ้–‹ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใซใ“ใฎใ‚ณใƒผใƒ‰ใŒๅŸ‹ใ‚่พผใพใ‚Œใพใ™ใ€‚ infobox: title: ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น description: ้–ฒ่ฆง่€…ใŒๅœฐๅ›ณไธŠใฎใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ—ใŸๆ™‚ใซ่กจ็คบใ•ใ‚Œใ‚‹ใƒœใƒƒใ‚ฏใ‚นใงใ™ใ€‚ใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ€ๅ‹•็”ปใชใฉใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ diff --git a/pkg/builtin/manifest_ja_gen.go b/pkg/builtin/manifest_ja_gen.go index 1ba2a559b..85079918e 100644 --- a/pkg/builtin/manifest_ja_gen.go +++ b/pkg/builtin/manifest_ja_gen.go @@ -2,4 +2,4 @@ package builtin -const pluginManifestJSON_ja string = `{"description":"ๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ","extensions":{"cesium":{"description":"ๅณใƒ‘ใƒใƒซใงใ‚ทใƒผใƒณๅ…จไฝ“ใฎ่จญๅฎšใ‚’ๅค‰ๆ›ดใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ‚ฟใ‚คใƒซใ€ๅคงๆฐ—ใ€ใƒฉใ‚คใƒ†ใ‚ฃใƒณใ‚ฐใชใฉใฎ่จญๅฎšใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚","propertySchema":{"atmosphere":{"description":"ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎ่จญๅฎšใŒใงใใพใ™ใ€‚","fields":{"brightness_shift":{"title":"ๆ˜Žๅบฆ"},"enable_lighting":{"description":"ๅคช้™ฝๅ…‰ใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใ‚‹ใ“ใจใงใ€ๆ˜ผๅคœใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","title":"ๅคช้™ฝๅ…‰"},"enable_sun":{"description":"ๅฎ‡ๅฎ™็ฉบ้–“ใซๅญ˜ๅœจใ™ใ‚‹ๅคช้™ฝใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ๅคช้™ฝ"},"fog":{"description":"้œงใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"้œง"},"fog_density":{"description":"้œงใฎๆฟƒๅบฆใ‚’0ไปฅไธŠใ‹ใ‚‰่จญๅฎšใ—ใพใ™ใ€‚","title":"ๆฟƒๅบฆ"},"ground_atmosphere":{"description":"ๅœฐ่กจใฎๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ๅœฐ่กจใฎๅคงๆฐ—"},"hue_shift":{"title":"่‰ฒ็›ธ"},"sky_atmosphere":{"description":"ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ไธŠ็ฉบใฎๅคงๆฐ—"},"surturation_shift":{"title":"ๅฝฉๅบฆ"}},"title":"ๅคงๆฐ—"},"default":{"fields":{"bgcolor":{"description":"ๅฎ‡ๅฎ™็ฉบ้–“ใŒ้ž่กจ็คบใฎๅ ดๅˆใฎใ€่ƒŒๆ™ฏ่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚","title":"่ƒŒๆ™ฏ่‰ฒ"},"camera":{"description":"ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒๆœ€ๅˆใซ่กจ็คบใ™ใ‚‹ใ‚ซใƒกใƒฉใฎไฝ็ฝฎใ‚’่จญๅฎšใ—ใพใ™ใ€‚","title":"ใ‚ซใƒกใƒฉๅˆๆœŸไฝ็ฝฎ"},"ion":{"description":"่‡ช่บซใฎCesium Ionใ‚ขใ‚ซใ‚ฆใƒณใƒˆใ‹ใ‚‰APIใ‚ญใƒผใ‚’็™บ่กŒใ—ใ€ใ“ใ“ใซ่จญๅฎšใ—ใพใ™ใ€‚Cesium Ionใฎใ‚ขใ‚ปใƒƒใƒˆ๏ผˆใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ€3Dใƒ‡ใƒผใ‚ฟใชใฉ๏ผ‰ใฎไฝฟ็”จใŒๅฏ่ƒฝใซใชใ‚‹ใŸใ‚ใ€่จญๅฎšใ‚’ๆŽจๅฅจใ—ใพใ™ใ€‚","title":"Cesium Icon APIใ‚ขใ‚ฏใ‚ปใ‚นใƒˆใƒผใ‚ฏใƒณ"},"skybox":{"description":"ๅฎ‡ๅฎ™็ฉบ้–“ใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ๅฎ‡ๅฎ™ใฎ่กจ็คบ"},"terrain":{"description":"ๆœ‰ๅŠนใซใ™ใ‚‹ใจใ€ๆจ™้ซ˜ใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใฟใ“ใพใ‚Œใ€็ซ‹ไฝ“็š„ใชๅœฐๅฝขใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","title":"ๅœฐๅฝข"}},"title":"ใ‚ทใƒผใƒณ"},"tiles":{"description":"ๆ‰‹ๆŒใกใฎใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ‚’ไฝฟ็”จใ—ใ€ๅœฐ็ƒไธŠใซ่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","fields":{"tile_maxLevel":{"title":"ๆœ€ๅคงใƒฌใƒ™ใƒซ"},"tile_minLevel":{"title":"ๆœ€ๅฐใƒฌใƒ™ใƒซ"},"tile_title":{"title":"ๅๅ‰"},"tile_type":{"choices":{"black_marble":"Black Marble","default":"ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ","default_label":"ใƒฉใƒ™ใƒซไป˜ใๅœฐๅ›ณ","default_road":"้“่ทฏๅœฐๅ›ณ","esri_world_topo":"ESRI Topography","japan_gsi_standard":"ๅœฐ็†้™ขๅœฐๅ›ณ ๆจ™ๆบ–ๅœฐๅ›ณ","open_street_map":"Open Street Map","stamen_toner":"Stamen Toner","stamen_watercolor":"Stamen Watercolor","url":"URL"},"title":"็จฎ้กž"},"tile_url":{"title":"URL"}},"title":"ใ‚ฟใ‚คใƒซ"}},"title":"Cesium"},"dlblock":{"description":"่กจใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"typography":{"title":"ใƒ•ใ‚ฉใƒณใƒˆ"}},"title":"่กจใƒ–ใƒญใƒƒใ‚ฏ"},"items":{"fields":{"item_datanum":{"title":"ใƒ‡ใƒผใ‚ฟ(ๆ•ฐๅญ—)"},"item_datastr":{"title":"ใƒ‡ใƒผใ‚ฟ(ๆ–‡ๅญ—)"},"item_datatype":{"choices":{"number":"ๆ•ฐๅญ—","string":"ๆ–‡ๅญ—"},"title":"็จฎ้กž"},"item_title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"}},"title":"ใ‚ขใ‚คใƒ†ใƒ "}},"title":"่กจ"},"ellipsoid":{"description":"ๆฅ•ๅ††ๅฝขใƒ„ใƒผใƒซใ‚’ๅœฐๅ›ณไธŠใซใƒ‰ใƒฉใƒƒใ‚ฐ\u0026ใƒ‰ใƒญใƒƒใƒ—ใ™ใ‚‹ใ“ใจใง่ฟฝๅŠ ใงใใพใ™ใ€‚ๆฅ•ๅ††ๅฝขใƒ„ใƒผใƒซใซใ‚ˆใฃใฆ็ซ‹ไฝ“็š„ใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ๅœฐๅ›ณไธŠใซ่กจ็คบใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"fillColor":{"title":"ๅก—ใ‚Š่‰ฒ"},"height":{"title":"้ซ˜ๅบฆ"},"position":{"title":"ไฝ็ฝฎ"},"radius":{"title":"ๅŠๅพ„"}},"title":"็ƒไฝ“ใƒ„ใƒผใƒซ"}},"title":"็ƒไฝ“ใƒ„ใƒผใƒซ"},"imageblock":{"description":"็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"fullSize":{"title":"ใƒ•ใƒซใ‚ตใ‚คใ‚บ"},"image":{"title":"็”ปๅƒ"},"imagePositionX":{"choices":{"center":"ไธญๅคฎ","left":"ๅทฆ","right":"ๅณ"},"title":"ๆฐดๅนณไฝ็ฝฎ"},"imagePositionY":{"choices":{"bottom":"ไธ‹","center":"ไธญๅคฎ","top":"ไธŠ"},"title":"ๅž‚็›ดไฝ็ฝฎ"},"imageSize":{"choices":{"contain":"ๅซใ‚€","cover":"ใ‚ซใƒใƒผ"},"title":"็”ปๅƒใ‚ตใ‚คใ‚บ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"}},"title":"็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"็”ปๅƒ"},"infobox":{"description":"้–ฒ่ฆง่€…ใŒๅœฐๅ›ณไธŠใฎใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ—ใŸๆ™‚ใซ่กจ็คบใ•ใ‚Œใ‚‹ใƒœใƒƒใ‚ฏใ‚นใงใ™ใ€‚ใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ€ๅ‹•็”ปใชใฉใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"bgcolor":{"title":"่ƒŒๆ™ฏ่‰ฒ"},"size":{"choices":{"large":"ๅคง","small":"ๅฐ"},"title":"ใ‚ตใ‚คใ‚บ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"typography":{"title":"ใƒ•ใ‚ฉใƒณใƒˆ"}},"title":"ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น"}},"title":"ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น"},"locationblock":{"description":"ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"fullSize":{"title":"ใƒ•ใƒซใ‚ตใ‚คใ‚บ"},"location":{"title":"ไฝ็ฝฎๆƒ…ๅ ฑ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"}},"title":"ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"ไฝ็ฝฎๆƒ…ๅ ฑ"},"marker":{"description":"ใƒ‰ใƒฉใƒƒใ‚ฐ\u0026ใƒ‰ใƒญใƒƒใƒ—ใ™ใ‚‹ใ“ใจใงใ€ๅœฐๅ›ณไธŠใซใƒžใƒผใ‚ซใƒผใ‚’่ฟฝๅŠ ใ—ใพใ™ใ€‚ใƒžใƒผใ‚ซใƒผใซใฏใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ‚’็ดใฅใ‘ใ‚‹ใ“ใจใŒใงใใ€้–ฒ่ฆง่€…ใฏใƒžใƒผใ‚ซใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ™ใ‚‹ใ“ใจใงใใ‚Œใ‚‰ใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่ฆ‹ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"extrude":{"title":"ๅœฐ้ขใ‹ใ‚‰็ทšใ‚’ไผธใฐใ™"},"height":{"title":"้ซ˜ๅบฆ"},"image":{"title":"็”ปๅƒURL"},"imageCrop":{"choices":{"circle":"ๅ††ๅฝข","none":"ใชใ—"},"title":"ๅˆ‡ใ‚ŠๆŠœใ"},"imageShadow":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆ"},"imageShadowBlur":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆๅŠๅพ„"},"imageShadowColor":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆ่‰ฒ"},"imageShadowPositionX":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆX"},"imageShadowPositionY":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆY"},"imageSize":{"title":"็”ปๅƒใ‚ตใ‚คใ‚บ"},"label":{"title":"ใƒฉใƒ™ใƒซ"},"labelText":{"title":"ใƒฉใƒ™ใƒซๆ–‡ๅญ—"},"labelTypography":{"title":"ใƒฉใƒ™ใƒซใƒ•ใ‚ฉใƒณใƒˆ"},"location":{"title":"ไฝ็ฝฎ"},"pointColor":{"title":"ใƒใ‚คใƒณใƒˆ่‰ฒ"},"pointSize":{"title":"ใƒใ‚คใƒณใƒˆใ‚ตใ‚คใ‚บ"},"style":{"choices":{"image":"ใ‚ขใ‚คใ‚ณใƒณ","point":"ใƒใ‚คใƒณใƒˆ"},"title":"่กจ็คบๆ–นๆณ•"}},"title":"ใƒžใƒผใ‚ซใƒผ"}},"title":"ใƒžใƒผใ‚ซใƒผ"},"menu":{"description":"ใ‚ทใƒผใƒณใซใƒœใ‚ฟใƒณใ‚’่จญ็ฝฎใ—ใ€ใƒกใƒ‹ใƒฅใƒผใ‚’่กจ็คบใ—ใพใ™ใ€‚่ฟฝๅŠ ใ—ใŸใƒœใ‚ฟใƒณใซ่จญๅฎšใ•ใ‚ŒใŸใ‚ขใ‚ฏใ‚ทใƒงใƒณใ‚ฟใ‚คใƒ—ใซใ‚ˆใฃใฆๅ‹•ไฝœใŒๅค‰ใ‚ใ‚Šใพใ™ใ€‚\\nใƒปใƒชใƒณใ‚ฏ๏ผšใƒœใ‚ฟใƒณ่‡ชไฝ“ใŒๅค–้ƒจใ‚ตใ‚คใƒˆใธใฎใƒชใƒณใ‚ฏใซใชใ‚Šใพใ™ใ€‚\\nใƒปใƒกใƒ‹ใƒฅใƒผ๏ผš่ฟฝๅŠ ใ—ใŸใƒกใƒ‹ใƒฅใƒผใ‚’้–‹ใใพใ™\\nใƒปใ‚ซใƒกใƒฉใ‚ขใ‚ฏใ‚ทใƒงใƒณ๏ผšใ‚ฏใƒชใƒƒใ‚ฏๆ™‚ใซใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ—ใพใ™ใ€‚","propertySchema":{"buttons":{"fields":{"buttonBgcolor":{"title":"่ƒŒๆ™ฏ่‰ฒ"},"buttonCamera":{"title":"ใ‚ซใƒกใƒฉ"},"buttonColor":{"title":"ใƒ†ใ‚ญใ‚นใƒˆ่‰ฒ"},"buttonIcon":{"title":"ใ‚ขใ‚คใ‚ณใƒณ"},"buttonInvisible":{"title":"้ž่กจ็คบ"},"buttonLink":{"title":"ใƒชใƒณใ‚ฏ"},"buttonPosition":{"choices":{"bottomleft":"ไธ‹ๅทฆ","bottomright":"ไธ‹ๅณ","topleft":"ไธŠๅทฆ","topright":"ไธŠๅณ"},"title":"่กจ็คบไฝ็ฝฎ"},"buttonStyle":{"choices":{"icon":"ใ‚ขใ‚คใ‚ณใƒณใฎใฟ","text":"ใƒ†ใ‚ญใ‚นใƒˆใฎใฟ","texticon":"ใƒ†ใ‚ญใ‚นใƒˆ๏ผ‹ใ‚ขใ‚คใ‚ณใƒณ"},"title":"่กจ็คบๆ–นๆณ•"},"buttonTitle":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"buttonType":{"choices":{"camera":"ใ‚ซใƒกใƒฉ็งปๅ‹•","link":"ใƒชใƒณใ‚ฏ","menu":"ใƒกใƒ‹ใƒฅใƒผ้–‹้–‰"},"title":"ใ‚ขใ‚ฏใ‚ทใƒงใƒณ"}},"title":"ใƒœใ‚ฟใƒณ"},"menu":{"fields":{"menuCamera":{"title":"ใ‚ซใƒกใƒฉ"},"menuIcon":{"title":"ใ‚ขใ‚คใ‚ณใƒณ"},"menuLink":{"title":"ใƒชใƒณใ‚ฏ"},"menuTitle":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"menuType":{"choices":{"border":"ๅŒบๅˆ‡ใ‚Š็ทš","camera":"ใ‚ซใƒกใƒฉ็งปๅ‹•","link":"ใƒชใƒณใ‚ฏ"},"title":"ใ‚ขใ‚ฏใ‚ทใƒงใƒณ"}},"title":"ใƒกใƒ‹ใƒฅใƒผ"}},"title":"ใƒกใƒ‹ใƒฅใƒผ"},"photooverlay":{"description":"ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ใ•ใ‚ŒใŸใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚คใ‚’้ธๆŠžใ™ใ‚‹ใจใ€่จญๅฎšใ—ใŸ็”ปๅƒใ‚’ใƒขใƒผใƒ€ใƒซๅฝขๅผใง่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"camera":{"description":"ใ‚ฏใƒชใƒƒใ‚ฏใ•ใ‚ŒใŸใจใใซ็งปๅ‹•ใ™ใ‚‹ใ‚ซใƒกใƒฉใฎ่จญๅฎšใ‚’ใ—ใพใ™ใ€‚","title":"ใ‚ซใƒกใƒฉ"},"height":{"title":"้ซ˜ๅบฆ"},"image":{"title":"ใ‚ขใ‚คใ‚ณใƒณ"},"imageCrop":{"choices":{"circle":"ๅ††ๅฝข","none":"ใชใ—"},"title":"ๅˆ‡ใ‚ŠๆŠœใ"},"imageShadow":{"title":"ใ‚ขใ‚คใ‚ณใƒณใ‚ทใƒฃใƒ‰ใ‚ฆ"},"imageShadowBlur":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆๅŠๅพ„"},"imageShadowColor":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆ่‰ฒ"},"imageShadowPositionX":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆX"},"imageShadowPositionY":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆY"},"imageSize":{"title":"ใ‚ขใ‚คใ‚ณใƒณใ‚ตใ‚คใ‚บ"},"location":{"title":"ไฝ็ฝฎ"},"photoOverlayImage":{"title":"ใ‚ชใƒผใƒใƒฌใ‚ค็”ปๅƒ"}},"title":"ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค"}},"title":"ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค"},"polygon":{"description":"Polygon primitive","propertySchema":{"default":{"fields":{"fill":{"title":"ๅก—ใ‚Š"},"fillColor":{"title":"ๅก—ใ‚Š่‰ฒ"},"polygon":{"title":"ใƒใƒชใ‚ดใƒณ"},"stroke":{"title":"็ทš"},"strokeColor":{"title":"็ทš่‰ฒ"},"strokeWidth":{"title":"็ทšๅน…"}},"title":"ใƒใƒชใ‚ดใƒณ"}},"title":"ใƒใƒชใ‚ดใƒณ"},"polyline":{"description":"Polyline primitive","propertySchema":{"default":{"fields":{"coordinates":{"title":"้ ‚็‚น"},"strokeColor":{"title":"็ทš่‰ฒ"},"strokeWidth":{"title":"็ทšๅน…"}},"title":"็›ด็ทš"}},"title":"็›ด็ทš"},"rect":{"description":"Rectangle primitive","propertySchema":{"default":{"fields":{"extrudedHeight":{"title":"้ซ˜ใ•"},"fillColor":{"title":"ๅก—ใ‚Š่‰ฒ"},"height":{"title":"้ซ˜ๅบฆ"},"image":{"title":"็”ปๅƒURL"},"rect":{"title":"้•ทๆ–นๅฝข"},"style":{"choices":{"color":"่‰ฒ","image":"็”ปๅƒ"},"title":"ใ‚นใ‚ฟใ‚คใƒซ"}},"title":"้•ทๆ–นๅฝข"}},"title":"้•ทๆ–นๅฝข"},"resource":{"description":"ๅค–้ƒจใ‹ใ‚‰ใƒ‡ใƒผใ‚ฟ๏ผˆๅฝขๅผไฝ•๏ผŸ๏ผŸ๏ผŸ๏ผ‰ใ‚’ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ๅพŒใ€URLใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใงๅค–้ƒจใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใพใ‚Œใพใ™ใ€‚","propertySchema":{"default":{"fields":{"url":{"choices":{"auto":"่‡ชๅ‹•","czml":"CZML","geojson":"GeoJSON / TopoJSON","kml":"KML"},"title":"ใƒ•ใ‚กใ‚คใƒซ URL"}},"title":"ใƒ•ใ‚กใ‚คใƒซ"}},"title":"ใƒ•ใ‚กใ‚คใƒซ"},"splashscreen":{"description":"ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒใ€ๆœ€ๅˆใซ่กจ็คบใ•ใ‚Œใ‚‹ๆผ”ๅ‡บใ‚’่จญๅฎšใงใใพใ™ใ€‚ไพ‹ใˆใฐใ€ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ฟใ‚คใƒˆใƒซใ‚’้–ฒ่ฆง่€…ใซ่ฆ‹ใ›ใŸใ‚Šใ€ใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ•ใ›ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"camera":{"fields":{"cameraDelay":{"title":"ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“"},"cameraDuration":{"title":"ใ‚ซใƒกใƒฉ้–‹ๅง‹ๆ™‚้–“"},"cameraPosition":{"title":"ใ‚ซใƒกใƒฉไฝ็ฝฎ"}},"title":"ใ‚ซใƒกใƒฉใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณ"},"overlay":{"fields":{"overlayBgcolor":{"title":"่ƒŒๆ™ฏ่‰ฒ"},"overlayDelay":{"title":"้–‹ๅง‹ๆ™‚้–“"},"overlayDuration":{"title":"่กจ็คบๆ™‚้–“"},"overlayEnabled":{"title":"ๆœ‰ๅŠน"},"overlayImage":{"title":"ใ‚ชใƒผใƒใƒผใƒฌใ‚ค็”ปๅƒ"},"overlayImageH":{"title":"็”ปๅƒ้ซ˜ใ•"},"overlayImageW":{"title":"็”ปๅƒๅน…"},"overlayTransitionDuration":{"title":"ใƒ•ใ‚งใƒผใƒ‰ๆ™‚้–“"}},"title":"ใ‚ชใƒผใƒใƒผใƒฌใ‚ค"}},"title":"ใ‚นใƒ—ใƒฉใƒƒใ‚ทใƒฅใ‚นใ‚ฏใƒชใƒผใƒณ"},"storytelling":{"description":"ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐๆฉŸ่ƒฝใ‚’ไฝฟใˆใฐใ€ใƒ‡ใƒผใ‚ฟ้–“ใฎ็น‹ใŒใ‚Šใ‚„ๆ™‚็ณปๅˆ—ใ‚’ใ‚‚ใจใซใ€้ †็•ชใซ่ณ‡ๆ–™ใ‚’้–ฒ่ฆงใ—ใฆใ‚‚ใ‚‰ใ†ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚ไฝฟ็”จใ™ใ‚‹ใซใฏใ€ๅณใƒ‘ใƒใƒซใ‹ใ‚‰ๅœฐ็ƒไธŠใฎใƒฌใ‚คใƒคใƒผใซ้ †็•ชใ‚’ไป˜ไธŽใ—ใพใ™ใ€‚","propertySchema":{"default":{"fields":{"autoStart":{"title":"่‡ชๅ‹•ๅ†็”Ÿ"},"camera":{"title":"ใ‚ซใƒกใƒฉ"},"duration":{"title":"ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“"},"range":{"title":"็”ป่ง’"}},"title":"ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"},"stories":{"fields":{"layer":{"title":"ใƒฌใ‚คใƒคใƒผ"},"layerCamera":{"title":"ใ‚ซใƒกใƒฉ"},"layerDuration":{"title":"็งปๅ‹•ๆ™‚้–“"},"layerRange":{"title":"ใ‚ซใƒกใƒฉ็”ป่ง’"}},"title":"ใ‚นใƒˆใƒผใƒชใƒผ"}},"title":"ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐ"},"textblock":{"description":"Text block","propertySchema":{"default":{"fields":{"markdown":{"title":"ใƒžใƒผใ‚ฏใƒ€ใ‚ฆใƒณ"},"text":{"title":"ใ‚ณใƒณใƒ†ใƒณใƒ„"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"typography":{"title":"ใƒ•ใ‚ฉใƒณใƒˆ"}},"title":"ใƒ†ใ‚ญใ‚นใƒˆใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"ใƒ†ใ‚ญใ‚นใƒˆ"},"videoblock":{"description":"ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"fullSize":{"title":"ใƒ•ใƒซใ‚ตใ‚คใ‚บ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"url":{"title":"ๅ‹•็”ป URL"}},"title":"ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"ๅ‹•็”ป"}},"title":"Re:Earthๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ"}` +const pluginManifestJSON_ja string = `{"description":"ๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ","extensions":{"cesium":{"description":"ๅณใƒ‘ใƒใƒซใงใ‚ทใƒผใƒณๅ…จไฝ“ใฎ่จญๅฎšใ‚’ๅค‰ๆ›ดใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ‚ฟใ‚คใƒซใ€ๅคงๆฐ—ใ€ใƒฉใ‚คใƒ†ใ‚ฃใƒณใ‚ฐใชใฉใฎ่จญๅฎšใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚","propertySchema":{"atmosphere":{"description":"ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎ่จญๅฎšใŒใงใใพใ™ใ€‚","fields":{"brightness_shift":{"title":"ๆ˜Žๅบฆ"},"enable_lighting":{"description":"ๅคช้™ฝๅ…‰ใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใ‚‹ใ“ใจใงใ€ๆ˜ผๅคœใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","title":"ๅคช้™ฝๅ…‰"},"enable_sun":{"description":"ๅฎ‡ๅฎ™็ฉบ้–“ใซๅญ˜ๅœจใ™ใ‚‹ๅคช้™ฝใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ๅคช้™ฝ"},"fog":{"description":"้œงใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"้œง"},"fog_density":{"description":"้œงใฎๆฟƒๅบฆใ‚’0ไปฅไธŠใ‹ใ‚‰่จญๅฎšใ—ใพใ™ใ€‚","title":"ๆฟƒๅบฆ"},"ground_atmosphere":{"description":"ๅœฐ่กจใฎๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ๅœฐ่กจใฎๅคงๆฐ—"},"hue_shift":{"title":"่‰ฒ็›ธ"},"sky_atmosphere":{"description":"ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ไธŠ็ฉบใฎๅคงๆฐ—"},"surturation_shift":{"title":"ๅฝฉๅบฆ"}},"title":"ๅคงๆฐ—"},"default":{"fields":{"bgcolor":{"description":"ๅฎ‡ๅฎ™็ฉบ้–“ใŒ้ž่กจ็คบใฎๅ ดๅˆใฎใ€่ƒŒๆ™ฏ่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚","title":"่ƒŒๆ™ฏ่‰ฒ"},"camera":{"description":"ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒๆœ€ๅˆใซ่กจ็คบใ™ใ‚‹ใ‚ซใƒกใƒฉใฎไฝ็ฝฎใ‚’่จญๅฎšใ—ใพใ™ใ€‚","title":"ใ‚ซใƒกใƒฉๅˆๆœŸไฝ็ฝฎ"},"ion":{"description":"่‡ช่บซใฎCesium Ionใ‚ขใ‚ซใ‚ฆใƒณใƒˆใ‹ใ‚‰APIใ‚ญใƒผใ‚’็™บ่กŒใ—ใ€ใ“ใ“ใซ่จญๅฎšใ—ใพใ™ใ€‚Cesium Ionใฎใ‚ขใ‚ปใƒƒใƒˆ๏ผˆใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ€3Dใƒ‡ใƒผใ‚ฟใชใฉ๏ผ‰ใฎไฝฟ็”จใŒๅฏ่ƒฝใซใชใ‚‹ใŸใ‚ใ€่จญๅฎšใ‚’ๆŽจๅฅจใ—ใพใ™ใ€‚","title":"Cesium Icon APIใ‚ขใ‚ฏใ‚ปใ‚นใƒˆใƒผใ‚ฏใƒณ"},"skybox":{"description":"ๅฎ‡ๅฎ™็ฉบ้–“ใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ๅฎ‡ๅฎ™ใฎ่กจ็คบ"},"terrain":{"description":"ๆœ‰ๅŠนใซใ™ใ‚‹ใจใ€ๆจ™้ซ˜ใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใฟใ“ใพใ‚Œใ€็ซ‹ไฝ“็š„ใชๅœฐๅฝขใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","title":"ๅœฐๅฝข"}},"title":"ใ‚ทใƒผใƒณ"},"googleAnalytics":{"description":"Google Analyticsใ‚’ๆœ‰ๅŠนใซใ™ใ‚‹ใ“ใจใงใ€ๅ…ฌ้–‹ใƒšใƒผใ‚ธใŒใฉใฎใ‚ˆใ†ใซ้–ฒ่ฆงใ•ใ‚Œใฆใ„ใ‚‹ใ‹ใ‚’ๅˆ†ๆžใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚","fields":{"enableGA":{"description":"Google Analyticsใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚","title":"ๆœ‰ๅŠน"},"trackingCode":{"description":"ใ“ใ“ใซใ‚ฐใƒผใ‚ฐใƒซใ‚ขใƒŠใƒชใƒ†ใ‚ฃใ‚ฏใ‚นใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐIDใ‚’่ฒผใ‚Šไป˜ใ‘ใ‚‹ใ“ใจใงใ€ๅ…ฌ้–‹ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใซใ“ใฎใ‚ณใƒผใƒ‰ใŒๅŸ‹ใ‚่พผใพใ‚Œใพใ™ใ€‚","title":"ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐID"}},"title":"Google Analytics"},"tiles":{"description":"ๆ‰‹ๆŒใกใฎใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ‚’ไฝฟ็”จใ—ใ€ๅœฐ็ƒไธŠใซ่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","fields":{"tile_maxLevel":{"title":"ๆœ€ๅคงใƒฌใƒ™ใƒซ"},"tile_minLevel":{"title":"ๆœ€ๅฐใƒฌใƒ™ใƒซ"},"tile_title":{"title":"ๅๅ‰"},"tile_type":{"choices":{"black_marble":"Black Marble","default":"ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ","default_label":"ใƒฉใƒ™ใƒซไป˜ใๅœฐๅ›ณ","default_road":"้“่ทฏๅœฐๅ›ณ","esri_world_topo":"ESRI Topography","japan_gsi_standard":"ๅœฐ็†้™ขๅœฐๅ›ณ ๆจ™ๆบ–ๅœฐๅ›ณ","open_street_map":"Open Street Map","stamen_toner":"Stamen Toner","stamen_watercolor":"Stamen Watercolor","url":"URL"},"title":"็จฎ้กž"},"tile_url":{"title":"URL"}},"title":"ใ‚ฟใ‚คใƒซ"}},"title":"Cesium"},"dlblock":{"description":"่กจใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"typography":{"title":"ใƒ•ใ‚ฉใƒณใƒˆ"}},"title":"่กจใƒ–ใƒญใƒƒใ‚ฏ"},"items":{"fields":{"item_datanum":{"title":"ใƒ‡ใƒผใ‚ฟ(ๆ•ฐๅญ—)"},"item_datastr":{"title":"ใƒ‡ใƒผใ‚ฟ(ๆ–‡ๅญ—)"},"item_datatype":{"choices":{"number":"ๆ•ฐๅญ—","string":"ๆ–‡ๅญ—"},"title":"็จฎ้กž"},"item_title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"}},"title":"ใ‚ขใ‚คใƒ†ใƒ "}},"title":"่กจ"},"ellipsoid":{"description":"ๆฅ•ๅ††ๅฝขใƒ„ใƒผใƒซใ‚’ๅœฐๅ›ณไธŠใซใƒ‰ใƒฉใƒƒใ‚ฐ\u0026ใƒ‰ใƒญใƒƒใƒ—ใ™ใ‚‹ใ“ใจใง่ฟฝๅŠ ใงใใพใ™ใ€‚ๆฅ•ๅ††ๅฝขใƒ„ใƒผใƒซใซใ‚ˆใฃใฆ็ซ‹ไฝ“็š„ใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ๅœฐๅ›ณไธŠใซ่กจ็คบใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"fillColor":{"title":"ๅก—ใ‚Š่‰ฒ"},"height":{"title":"้ซ˜ๅบฆ"},"position":{"title":"ไฝ็ฝฎ"},"radius":{"title":"ๅŠๅพ„"}},"title":"็ƒไฝ“ใƒ„ใƒผใƒซ"}},"title":"็ƒไฝ“ใƒ„ใƒผใƒซ"},"imageblock":{"description":"็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"fullSize":{"title":"ใƒ•ใƒซใ‚ตใ‚คใ‚บ"},"image":{"title":"็”ปๅƒ"},"imagePositionX":{"choices":{"center":"ไธญๅคฎ","left":"ๅทฆ","right":"ๅณ"},"title":"ๆฐดๅนณไฝ็ฝฎ"},"imagePositionY":{"choices":{"bottom":"ไธ‹","center":"ไธญๅคฎ","top":"ไธŠ"},"title":"ๅž‚็›ดไฝ็ฝฎ"},"imageSize":{"choices":{"contain":"ๅซใ‚€","cover":"ใ‚ซใƒใƒผ"},"title":"็”ปๅƒใ‚ตใ‚คใ‚บ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"}},"title":"็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"็”ปๅƒ"},"infobox":{"description":"้–ฒ่ฆง่€…ใŒๅœฐๅ›ณไธŠใฎใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ—ใŸๆ™‚ใซ่กจ็คบใ•ใ‚Œใ‚‹ใƒœใƒƒใ‚ฏใ‚นใงใ™ใ€‚ใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ€ๅ‹•็”ปใชใฉใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"bgcolor":{"title":"่ƒŒๆ™ฏ่‰ฒ"},"size":{"choices":{"large":"ๅคง","small":"ๅฐ"},"title":"ใ‚ตใ‚คใ‚บ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"typography":{"title":"ใƒ•ใ‚ฉใƒณใƒˆ"}},"title":"ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น"}},"title":"ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น"},"locationblock":{"description":"ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"fullSize":{"title":"ใƒ•ใƒซใ‚ตใ‚คใ‚บ"},"location":{"title":"ไฝ็ฝฎๆƒ…ๅ ฑ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"}},"title":"ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"ไฝ็ฝฎๆƒ…ๅ ฑ"},"marker":{"description":"ใƒ‰ใƒฉใƒƒใ‚ฐ\u0026ใƒ‰ใƒญใƒƒใƒ—ใ™ใ‚‹ใ“ใจใงใ€ๅœฐๅ›ณไธŠใซใƒžใƒผใ‚ซใƒผใ‚’่ฟฝๅŠ ใ—ใพใ™ใ€‚ใƒžใƒผใ‚ซใƒผใซใฏใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ‚’็ดใฅใ‘ใ‚‹ใ“ใจใŒใงใใ€้–ฒ่ฆง่€…ใฏใƒžใƒผใ‚ซใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ™ใ‚‹ใ“ใจใงใใ‚Œใ‚‰ใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่ฆ‹ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"extrude":{"title":"ๅœฐ้ขใ‹ใ‚‰็ทšใ‚’ไผธใฐใ™"},"height":{"title":"้ซ˜ๅบฆ"},"image":{"title":"็”ปๅƒURL"},"imageCrop":{"choices":{"circle":"ๅ††ๅฝข","none":"ใชใ—"},"title":"ๅˆ‡ใ‚ŠๆŠœใ"},"imageShadow":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆ"},"imageShadowBlur":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆๅŠๅพ„"},"imageShadowColor":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆ่‰ฒ"},"imageShadowPositionX":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆX"},"imageShadowPositionY":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆY"},"imageSize":{"title":"็”ปๅƒใ‚ตใ‚คใ‚บ"},"label":{"title":"ใƒฉใƒ™ใƒซ"},"labelText":{"title":"ใƒฉใƒ™ใƒซๆ–‡ๅญ—"},"labelTypography":{"title":"ใƒฉใƒ™ใƒซใƒ•ใ‚ฉใƒณใƒˆ"},"location":{"title":"ไฝ็ฝฎ"},"pointColor":{"title":"ใƒใ‚คใƒณใƒˆ่‰ฒ"},"pointSize":{"title":"ใƒใ‚คใƒณใƒˆใ‚ตใ‚คใ‚บ"},"style":{"choices":{"image":"ใ‚ขใ‚คใ‚ณใƒณ","point":"ใƒใ‚คใƒณใƒˆ"},"title":"่กจ็คบๆ–นๆณ•"}},"title":"ใƒžใƒผใ‚ซใƒผ"}},"title":"ใƒžใƒผใ‚ซใƒผ"},"menu":{"description":"ใ‚ทใƒผใƒณใซใƒœใ‚ฟใƒณใ‚’่จญ็ฝฎใ—ใ€ใƒกใƒ‹ใƒฅใƒผใ‚’่กจ็คบใ—ใพใ™ใ€‚่ฟฝๅŠ ใ—ใŸใƒœใ‚ฟใƒณใซ่จญๅฎšใ•ใ‚ŒใŸใ‚ขใ‚ฏใ‚ทใƒงใƒณใ‚ฟใ‚คใƒ—ใซใ‚ˆใฃใฆๅ‹•ไฝœใŒๅค‰ใ‚ใ‚Šใพใ™ใ€‚\\nใƒปใƒชใƒณใ‚ฏ๏ผšใƒœใ‚ฟใƒณ่‡ชไฝ“ใŒๅค–้ƒจใ‚ตใ‚คใƒˆใธใฎใƒชใƒณใ‚ฏใซใชใ‚Šใพใ™ใ€‚\\nใƒปใƒกใƒ‹ใƒฅใƒผ๏ผš่ฟฝๅŠ ใ—ใŸใƒกใƒ‹ใƒฅใƒผใ‚’้–‹ใใพใ™\\nใƒปใ‚ซใƒกใƒฉใ‚ขใ‚ฏใ‚ทใƒงใƒณ๏ผšใ‚ฏใƒชใƒƒใ‚ฏๆ™‚ใซใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ—ใพใ™ใ€‚","propertySchema":{"buttons":{"fields":{"buttonBgcolor":{"title":"่ƒŒๆ™ฏ่‰ฒ"},"buttonCamera":{"title":"ใ‚ซใƒกใƒฉ"},"buttonColor":{"title":"ใƒ†ใ‚ญใ‚นใƒˆ่‰ฒ"},"buttonIcon":{"title":"ใ‚ขใ‚คใ‚ณใƒณ"},"buttonInvisible":{"title":"้ž่กจ็คบ"},"buttonLink":{"title":"ใƒชใƒณใ‚ฏ"},"buttonPosition":{"choices":{"bottomleft":"ไธ‹ๅทฆ","bottomright":"ไธ‹ๅณ","topleft":"ไธŠๅทฆ","topright":"ไธŠๅณ"},"title":"่กจ็คบไฝ็ฝฎ"},"buttonStyle":{"choices":{"icon":"ใ‚ขใ‚คใ‚ณใƒณใฎใฟ","text":"ใƒ†ใ‚ญใ‚นใƒˆใฎใฟ","texticon":"ใƒ†ใ‚ญใ‚นใƒˆ๏ผ‹ใ‚ขใ‚คใ‚ณใƒณ"},"title":"่กจ็คบๆ–นๆณ•"},"buttonTitle":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"buttonType":{"choices":{"camera":"ใ‚ซใƒกใƒฉ็งปๅ‹•","link":"ใƒชใƒณใ‚ฏ","menu":"ใƒกใƒ‹ใƒฅใƒผ้–‹้–‰"},"title":"ใ‚ขใ‚ฏใ‚ทใƒงใƒณ"}},"title":"ใƒœใ‚ฟใƒณ"},"menu":{"fields":{"menuCamera":{"title":"ใ‚ซใƒกใƒฉ"},"menuIcon":{"title":"ใ‚ขใ‚คใ‚ณใƒณ"},"menuLink":{"title":"ใƒชใƒณใ‚ฏ"},"menuTitle":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"menuType":{"choices":{"border":"ๅŒบๅˆ‡ใ‚Š็ทš","camera":"ใ‚ซใƒกใƒฉ็งปๅ‹•","link":"ใƒชใƒณใ‚ฏ"},"title":"ใ‚ขใ‚ฏใ‚ทใƒงใƒณ"}},"title":"ใƒกใƒ‹ใƒฅใƒผ"}},"title":"ใƒกใƒ‹ใƒฅใƒผ"},"photooverlay":{"description":"ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ใ•ใ‚ŒใŸใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚คใ‚’้ธๆŠžใ™ใ‚‹ใจใ€่จญๅฎšใ—ใŸ็”ปๅƒใ‚’ใƒขใƒผใƒ€ใƒซๅฝขๅผใง่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"camera":{"description":"ใ‚ฏใƒชใƒƒใ‚ฏใ•ใ‚ŒใŸใจใใซ็งปๅ‹•ใ™ใ‚‹ใ‚ซใƒกใƒฉใฎ่จญๅฎšใ‚’ใ—ใพใ™ใ€‚","title":"ใ‚ซใƒกใƒฉ"},"height":{"title":"้ซ˜ๅบฆ"},"image":{"title":"ใ‚ขใ‚คใ‚ณใƒณ"},"imageCrop":{"choices":{"circle":"ๅ††ๅฝข","none":"ใชใ—"},"title":"ๅˆ‡ใ‚ŠๆŠœใ"},"imageShadow":{"title":"ใ‚ขใ‚คใ‚ณใƒณใ‚ทใƒฃใƒ‰ใ‚ฆ"},"imageShadowBlur":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆๅŠๅพ„"},"imageShadowColor":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆ่‰ฒ"},"imageShadowPositionX":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆX"},"imageShadowPositionY":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆY"},"imageSize":{"title":"ใ‚ขใ‚คใ‚ณใƒณใ‚ตใ‚คใ‚บ"},"location":{"title":"ไฝ็ฝฎ"},"photoOverlayImage":{"title":"ใ‚ชใƒผใƒใƒฌใ‚ค็”ปๅƒ"}},"title":"ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค"}},"title":"ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค"},"polygon":{"description":"Polygon primitive","propertySchema":{"default":{"fields":{"fill":{"title":"ๅก—ใ‚Š"},"fillColor":{"title":"ๅก—ใ‚Š่‰ฒ"},"polygon":{"title":"ใƒใƒชใ‚ดใƒณ"},"stroke":{"title":"็ทš"},"strokeColor":{"title":"็ทš่‰ฒ"},"strokeWidth":{"title":"็ทšๅน…"}},"title":"ใƒใƒชใ‚ดใƒณ"}},"title":"ใƒใƒชใ‚ดใƒณ"},"polyline":{"description":"Polyline primitive","propertySchema":{"default":{"fields":{"coordinates":{"title":"้ ‚็‚น"},"strokeColor":{"title":"็ทš่‰ฒ"},"strokeWidth":{"title":"็ทšๅน…"}},"title":"็›ด็ทš"}},"title":"็›ด็ทš"},"rect":{"description":"Rectangle primitive","propertySchema":{"default":{"fields":{"extrudedHeight":{"title":"้ซ˜ใ•"},"fillColor":{"title":"ๅก—ใ‚Š่‰ฒ"},"height":{"title":"้ซ˜ๅบฆ"},"image":{"title":"็”ปๅƒURL"},"rect":{"title":"้•ทๆ–นๅฝข"},"style":{"choices":{"color":"่‰ฒ","image":"็”ปๅƒ"},"title":"ใ‚นใ‚ฟใ‚คใƒซ"}},"title":"้•ทๆ–นๅฝข"}},"title":"้•ทๆ–นๅฝข"},"resource":{"description":"ๅค–้ƒจใ‹ใ‚‰ใƒ‡ใƒผใ‚ฟ๏ผˆๅฝขๅผไฝ•๏ผŸ๏ผŸ๏ผŸ๏ผ‰ใ‚’ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ๅพŒใ€URLใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใงๅค–้ƒจใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใพใ‚Œใพใ™ใ€‚","propertySchema":{"default":{"fields":{"url":{"choices":{"auto":"่‡ชๅ‹•","czml":"CZML","geojson":"GeoJSON / TopoJSON","kml":"KML"},"title":"ใƒ•ใ‚กใ‚คใƒซ URL"}},"title":"ใƒ•ใ‚กใ‚คใƒซ"}},"title":"ใƒ•ใ‚กใ‚คใƒซ"},"splashscreen":{"description":"ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒใ€ๆœ€ๅˆใซ่กจ็คบใ•ใ‚Œใ‚‹ๆผ”ๅ‡บใ‚’่จญๅฎšใงใใพใ™ใ€‚ไพ‹ใˆใฐใ€ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ฟใ‚คใƒˆใƒซใ‚’้–ฒ่ฆง่€…ใซ่ฆ‹ใ›ใŸใ‚Šใ€ใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ•ใ›ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"camera":{"fields":{"cameraDelay":{"title":"ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“"},"cameraDuration":{"title":"ใ‚ซใƒกใƒฉ้–‹ๅง‹ๆ™‚้–“"},"cameraPosition":{"title":"ใ‚ซใƒกใƒฉไฝ็ฝฎ"}},"title":"ใ‚ซใƒกใƒฉใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณ"},"overlay":{"fields":{"overlayBgcolor":{"title":"่ƒŒๆ™ฏ่‰ฒ"},"overlayDelay":{"title":"้–‹ๅง‹ๆ™‚้–“"},"overlayDuration":{"title":"่กจ็คบๆ™‚้–“"},"overlayEnabled":{"title":"ๆœ‰ๅŠน"},"overlayImage":{"title":"ใ‚ชใƒผใƒใƒผใƒฌใ‚ค็”ปๅƒ"},"overlayImageH":{"title":"็”ปๅƒ้ซ˜ใ•"},"overlayImageW":{"title":"็”ปๅƒๅน…"},"overlayTransitionDuration":{"title":"ใƒ•ใ‚งใƒผใƒ‰ๆ™‚้–“"}},"title":"ใ‚ชใƒผใƒใƒผใƒฌใ‚ค"}},"title":"ใ‚นใƒ—ใƒฉใƒƒใ‚ทใƒฅใ‚นใ‚ฏใƒชใƒผใƒณ"},"storytelling":{"description":"ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐๆฉŸ่ƒฝใ‚’ไฝฟใˆใฐใ€ใƒ‡ใƒผใ‚ฟ้–“ใฎ็น‹ใŒใ‚Šใ‚„ๆ™‚็ณปๅˆ—ใ‚’ใ‚‚ใจใซใ€้ †็•ชใซ่ณ‡ๆ–™ใ‚’้–ฒ่ฆงใ—ใฆใ‚‚ใ‚‰ใ†ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚ไฝฟ็”จใ™ใ‚‹ใซใฏใ€ๅณใƒ‘ใƒใƒซใ‹ใ‚‰ๅœฐ็ƒไธŠใฎใƒฌใ‚คใƒคใƒผใซ้ †็•ชใ‚’ไป˜ไธŽใ—ใพใ™ใ€‚","propertySchema":{"default":{"fields":{"autoStart":{"title":"่‡ชๅ‹•ๅ†็”Ÿ"},"camera":{"title":"ใ‚ซใƒกใƒฉ"},"duration":{"title":"ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“"},"range":{"title":"็”ป่ง’"}},"title":"ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"},"stories":{"fields":{"layer":{"title":"ใƒฌใ‚คใƒคใƒผ"},"layerCamera":{"title":"ใ‚ซใƒกใƒฉ"},"layerDuration":{"title":"็งปๅ‹•ๆ™‚้–“"},"layerRange":{"title":"ใ‚ซใƒกใƒฉ็”ป่ง’"}},"title":"ใ‚นใƒˆใƒผใƒชใƒผ"}},"title":"ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐ"},"textblock":{"description":"Text block","propertySchema":{"default":{"fields":{"markdown":{"title":"ใƒžใƒผใ‚ฏใƒ€ใ‚ฆใƒณ"},"text":{"title":"ใ‚ณใƒณใƒ†ใƒณใƒ„"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"typography":{"title":"ใƒ•ใ‚ฉใƒณใƒˆ"}},"title":"ใƒ†ใ‚ญใ‚นใƒˆใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"ใƒ†ใ‚ญใ‚นใƒˆ"},"videoblock":{"description":"ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"fullSize":{"title":"ใƒ•ใƒซใ‚ตใ‚คใ‚บ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"url":{"title":"ๅ‹•็”ป URL"}},"title":"ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"ๅ‹•็”ป"}},"title":"Re:Earthๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ"}` diff --git a/pkg/plugin/manifest/schema_gen.go b/pkg/plugin/manifest/schema_gen.go index 815a1567e..cc5c899ef 100644 --- a/pkg/plugin/manifest/schema_gen.go +++ b/pkg/plugin/manifest/schema_gen.go @@ -1,6 +1,6 @@ package manifest -// generated by "/var/folders/qf/bs41lxts47s6y_d62bj5zgym0000gn/T/go-build906758225/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT +// generated by "/var/folders/lz/nhqy382n28g31wb4f_40gbmc0000gp/T/go-build3669425617/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT type Choice struct { Icon string `json:"icon,omitempty"` From 06cd4464c9b20aad30faa856bfbd36af4fed1218 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Tue, 8 Jun 2021 00:33:31 +0300 Subject: [PATCH 031/253] fix: Invisible layer issue in published project (#7) Co-authored-by: Ya Ka Co-authored-by: HideBa <49897538+HideBa@users.noreply.github.com> --- pkg/layer/merging/merger.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/layer/merging/merger.go b/pkg/layer/merging/merger.go index 0c59a42d3..c29919719 100644 --- a/pkg/layer/merging/merger.go +++ b/pkg/layer/merging/merger.go @@ -38,7 +38,7 @@ func (m *Merger) MergeLayer(ctx context.Context, l layer.Layer, parent *layer.Gr children := make([]MergedLayer, 0, len(layers)) for _, c := range layers { - if c == nil { + if c == nil || !(*c).IsVisible() { continue } ml, err := m.MergeLayer(ctx, *c, lg) From c31bdb61ff7e74df645372cbd9c4ec6a0a196bd6 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Tue, 8 Jun 2021 09:11:51 +0300 Subject: [PATCH 032/253] test: pkg/id (#4) * testing pkg/id/id.go * testing pkg/id/plugin.go * testing pkg/id/plugin.go * testing pkg/id/plugin_extension.go * testing pkg/id/property_schema.go * testing pkg/id/property_schema_field.go * testing id.tmpl * reduce some duplicated logic * - update gen.go file - uncomment some test cases * fix PR comments * test: pkg/shp (#5) * - comment out unused code * - add test data * - reader test * - sequential reader test * - shp type test * - shp test * - writer test * - zip reader test * - add test data * fix PR comments Co-authored-by: YK * test: pkg/shp (#5) * - comment out unused code * - add test data * - reader test * - sequential reader test * - shp type test * - shp test * - writer test * - zip reader test * - add test data * fix PR comments Co-authored-by: YK Co-authored-by: rot1024 --- pkg/id/asset_gen_test.go | 1004 ++++++++++++++++++++++ pkg/id/dataset_gen_test.go | 1004 ++++++++++++++++++++++ pkg/id/dataset_schema_field_gen.go | 38 +- pkg/id/dataset_schema_field_gen_test.go | 1004 ++++++++++++++++++++++ pkg/id/dataset_schema_gen_test.go | 1004 ++++++++++++++++++++++ pkg/id/gen.go | 17 + pkg/id/id_test.go | 373 +++++++++ pkg/id/id_test.tmpl | 1006 +++++++++++++++++++++++ pkg/id/infobox_field_gen.go | 38 +- pkg/id/infobox_field_gen_test.go | 1004 ++++++++++++++++++++++ pkg/id/layer_gen_test.go | 1004 ++++++++++++++++++++++ pkg/id/plugin_extension_test.go | 63 ++ pkg/id/plugin_test.go | 627 +++++++++++++- pkg/id/project_gen_test.go | 1004 ++++++++++++++++++++++ pkg/id/property_gen_test.go | 1004 ++++++++++++++++++++++ pkg/id/property_item_gen_test.go | 1004 ++++++++++++++++++++++ pkg/id/property_schema_field_test.go | 63 ++ pkg/id/property_schema_test.go | 423 ++++++++++ pkg/id/scene_gen_test.go | 1004 ++++++++++++++++++++++ pkg/id/team_gen_test.go | 1004 ++++++++++++++++++++++ pkg/id/user_gen_test.go | 1004 ++++++++++++++++++++++ pkg/id/widget_gen_test.go | 1004 ++++++++++++++++++++++ 22 files changed, 15689 insertions(+), 11 deletions(-) create mode 100644 pkg/id/asset_gen_test.go create mode 100644 pkg/id/dataset_gen_test.go create mode 100644 pkg/id/dataset_schema_field_gen_test.go create mode 100644 pkg/id/dataset_schema_gen_test.go create mode 100644 pkg/id/id_test.go create mode 100644 pkg/id/id_test.tmpl create mode 100644 pkg/id/infobox_field_gen_test.go create mode 100644 pkg/id/layer_gen_test.go create mode 100644 pkg/id/plugin_extension_test.go create mode 100644 pkg/id/project_gen_test.go create mode 100644 pkg/id/property_gen_test.go create mode 100644 pkg/id/property_item_gen_test.go create mode 100644 pkg/id/property_schema_field_test.go create mode 100644 pkg/id/scene_gen_test.go create mode 100644 pkg/id/team_gen_test.go create mode 100644 pkg/id/user_gen_test.go create mode 100644 pkg/id/widget_gen_test.go diff --git a/pkg/id/asset_gen_test.go b/pkg/id/asset_gen_test.go new file mode 100644 index 000000000..99392f3ae --- /dev/null +++ b/pkg/id/asset_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewAssetID(t *testing.T) { + id := NewAssetID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestAssetIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result AssetID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result AssetID + err error + }{ + AssetID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result AssetID + err error + }{ + AssetID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result AssetID + err error + }{ + AssetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := AssetIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustAssetID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected AssetID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: AssetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustAssetID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestAssetIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *AssetID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &AssetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := AssetIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestAssetIDFromRefID(t *testing.T) { + id := New() + + subId := AssetIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestAssetID_ID(t *testing.T) { + id := New() + subId := AssetIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestAssetID_String(t *testing.T) { + id := New() + subId := AssetIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestAssetID_GoString(t *testing.T) { + id := New() + subId := AssetIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.AssetID("+id.String()+")") +} + +func TestAssetID_RefString(t *testing.T) { + id := New() + subId := AssetIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestAssetID_Ref(t *testing.T) { + id := New() + subId := AssetIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestAssetID_CopyRef(t *testing.T) { + id := New() + subId := AssetIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestAssetID_IDRef(t *testing.T) { + id := New() + subId := AssetIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestAssetID_StringRef(t *testing.T) { + id := New() + subId := AssetIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestAssetID_MarhsalJSON(t *testing.T) { + id := New() + subId := AssetIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestAssetID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &AssetID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestAssetID_MarshalText(t *testing.T) { + id := New() + subId := AssetIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestAssetID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &AssetID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestAssetID_IsNil(t *testing.T) { + subId := AssetID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *AssetIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestAssetIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []AssetID + expected []string + }{ + { + name: "Empty slice", + input: make([]AssetID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, AssetIDToKeys(tc.input)) + }) + } + +} + +func TestAssetIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []AssetID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []AssetID + err error + }{ + res: make([]AssetID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []AssetID + err error + }{ + res: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []AssetID + err error + }{ + res: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []AssetID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := AssetIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := AssetIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestAssetIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []AssetID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]AssetID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := AssetIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestAssetIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []AssetID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]AssetID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []AssetID{MustAssetID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []AssetID{ + MustAssetID(id1.String()), + MustAssetID(id2.String()), + MustAssetID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := AssetIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestAssetIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []AssetID + expected []ID + }{ + { + name: "Empty slice", + input: make([]AssetID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := AssetIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestAssetIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustAssetID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustAssetID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustAssetID(id3.String()) + + testCases := []struct { + name string + input []*AssetID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*AssetID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*AssetID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*AssetID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := AssetIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewAssetIDSet(t *testing.T) { + AssetIdSet := NewAssetIDSet() + + assert.NotNil(t, AssetIdSet) + assert.Empty(t, AssetIdSet.m) + assert.Empty(t, AssetIdSet.s) +} + +func TestAssetIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []AssetID + expected *AssetIDSet + }{ + { + name: "Empty slice", + input: make([]AssetID, 0), + expected: &AssetIDSet{ + m: map[AssetID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &AssetIDSet{ + m: map[AssetID]struct{}{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &AssetIDSet{ + m: map[AssetID]struct{}{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewAssetIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestAssetIDSet_AddRef(t *testing.T) { + t.Parallel() + + AssetId := MustAssetID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *AssetID + expected *AssetIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &AssetIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &AssetId, + expected: &AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewAssetIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestAssetIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + AssetIDSet + AssetID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + AssetIDSet + AssetID + }{AssetIDSet: AssetIDSet{}, AssetID: MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + AssetIDSet + AssetID + }{AssetIDSet: AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, AssetID: MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + AssetIDSet + AssetID + }{AssetIDSet: AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, AssetID: MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.AssetIDSet.Has(tc.input.AssetID)) + }) + } +} + +func TestAssetIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input AssetIDSet + expected AssetIDSet + }{ + { + name: "Empty Set", + input: AssetIDSet{}, + expected: AssetIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: AssetIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestAssetIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *AssetIDSet + expected []AssetID + }{ + { + name: "Empty slice", + input: &AssetIDSet{ + m: map[AssetID]struct{}{}, + s: nil, + }, + expected: make([]AssetID, 0), + }, + { + name: "1 element", + input: &AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &AssetIDSet{ + m: map[AssetID]struct{}{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestAssetIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *AssetIDSet + expected *AssetIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewAssetIDSet(), + }, + { + name: "Empty set", + input: NewAssetIDSet(), + expected: NewAssetIDSet(), + }, + { + name: "1 element", + input: &AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &AssetIDSet{ + m: map[AssetID]struct{}{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &AssetIDSet{ + m: map[AssetID]struct{}{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestAssetIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *AssetIDSet + b *AssetIDSet + } + expected *AssetIDSet + }{ + { + name: "Empty Set", + input: struct { + a *AssetIDSet + b *AssetIDSet + }{ + a: &AssetIDSet{}, + b: &AssetIDSet{}, + }, + expected: &AssetIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *AssetIDSet + b *AssetIDSet + }{ + a: &AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &AssetIDSet{}, + }, + expected: &AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *AssetIDSet + b *AssetIDSet + }{ + a: &AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &AssetIDSet{ + m: map[AssetID]struct{}{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []AssetID{ + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/dataset_gen_test.go b/pkg/id/dataset_gen_test.go new file mode 100644 index 000000000..e5a29f7f6 --- /dev/null +++ b/pkg/id/dataset_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewDatasetID(t *testing.T) { + id := NewDatasetID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestDatasetIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result DatasetID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result DatasetID + err error + }{ + DatasetID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result DatasetID + err error + }{ + DatasetID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result DatasetID + err error + }{ + DatasetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := DatasetIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustDatasetID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected DatasetID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: DatasetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustDatasetID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestDatasetIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *DatasetID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &DatasetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := DatasetIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestDatasetIDFromRefID(t *testing.T) { + id := New() + + subId := DatasetIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestDatasetID_ID(t *testing.T) { + id := New() + subId := DatasetIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestDatasetID_String(t *testing.T) { + id := New() + subId := DatasetIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestDatasetID_GoString(t *testing.T) { + id := New() + subId := DatasetIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.DatasetID("+id.String()+")") +} + +func TestDatasetID_RefString(t *testing.T) { + id := New() + subId := DatasetIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestDatasetID_Ref(t *testing.T) { + id := New() + subId := DatasetIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestDatasetID_CopyRef(t *testing.T) { + id := New() + subId := DatasetIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestDatasetID_IDRef(t *testing.T) { + id := New() + subId := DatasetIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestDatasetID_StringRef(t *testing.T) { + id := New() + subId := DatasetIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestDatasetID_MarhsalJSON(t *testing.T) { + id := New() + subId := DatasetIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestDatasetID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &DatasetID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestDatasetID_MarshalText(t *testing.T) { + id := New() + subId := DatasetIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestDatasetID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &DatasetID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestDatasetID_IsNil(t *testing.T) { + subId := DatasetID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *DatasetIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestDatasetIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []DatasetID + expected []string + }{ + { + name: "Empty slice", + input: make([]DatasetID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, DatasetIDToKeys(tc.input)) + }) + } + +} + +func TestDatasetIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []DatasetID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []DatasetID + err error + }{ + res: make([]DatasetID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []DatasetID + err error + }{ + res: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []DatasetID + err error + }{ + res: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []DatasetID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := DatasetIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := DatasetIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestDatasetIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []DatasetID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]DatasetID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := DatasetIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestDatasetIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []DatasetID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]DatasetID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []DatasetID{MustDatasetID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []DatasetID{ + MustDatasetID(id1.String()), + MustDatasetID(id2.String()), + MustDatasetID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := DatasetIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestDatasetIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []DatasetID + expected []ID + }{ + { + name: "Empty slice", + input: make([]DatasetID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := DatasetIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestDatasetIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustDatasetID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustDatasetID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustDatasetID(id3.String()) + + testCases := []struct { + name string + input []*DatasetID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*DatasetID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*DatasetID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*DatasetID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := DatasetIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewDatasetIDSet(t *testing.T) { + DatasetIdSet := NewDatasetIDSet() + + assert.NotNil(t, DatasetIdSet) + assert.Empty(t, DatasetIdSet.m) + assert.Empty(t, DatasetIdSet.s) +} + +func TestDatasetIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []DatasetID + expected *DatasetIDSet + }{ + { + name: "Empty slice", + input: make([]DatasetID, 0), + expected: &DatasetIDSet{ + m: map[DatasetID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &DatasetIDSet{ + m: map[DatasetID]struct{}{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &DatasetIDSet{ + m: map[DatasetID]struct{}{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewDatasetIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestDatasetIDSet_AddRef(t *testing.T) { + t.Parallel() + + DatasetId := MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *DatasetID + expected *DatasetIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &DatasetIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &DatasetId, + expected: &DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewDatasetIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestDatasetIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + DatasetIDSet + DatasetID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + DatasetIDSet + DatasetID + }{DatasetIDSet: DatasetIDSet{}, DatasetID: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + DatasetIDSet + DatasetID + }{DatasetIDSet: DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, DatasetID: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + DatasetIDSet + DatasetID + }{DatasetIDSet: DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, DatasetID: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.DatasetIDSet.Has(tc.input.DatasetID)) + }) + } +} + +func TestDatasetIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input DatasetIDSet + expected DatasetIDSet + }{ + { + name: "Empty Set", + input: DatasetIDSet{}, + expected: DatasetIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: DatasetIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestDatasetIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *DatasetIDSet + expected []DatasetID + }{ + { + name: "Empty slice", + input: &DatasetIDSet{ + m: map[DatasetID]struct{}{}, + s: nil, + }, + expected: make([]DatasetID, 0), + }, + { + name: "1 element", + input: &DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &DatasetIDSet{ + m: map[DatasetID]struct{}{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestDatasetIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *DatasetIDSet + expected *DatasetIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewDatasetIDSet(), + }, + { + name: "Empty set", + input: NewDatasetIDSet(), + expected: NewDatasetIDSet(), + }, + { + name: "1 element", + input: &DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &DatasetIDSet{ + m: map[DatasetID]struct{}{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &DatasetIDSet{ + m: map[DatasetID]struct{}{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestDatasetIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *DatasetIDSet + b *DatasetIDSet + } + expected *DatasetIDSet + }{ + { + name: "Empty Set", + input: struct { + a *DatasetIDSet + b *DatasetIDSet + }{ + a: &DatasetIDSet{}, + b: &DatasetIDSet{}, + }, + expected: &DatasetIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *DatasetIDSet + b *DatasetIDSet + }{ + a: &DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &DatasetIDSet{}, + }, + expected: &DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *DatasetIDSet + b *DatasetIDSet + }{ + a: &DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &DatasetIDSet{ + m: map[DatasetID]struct{}{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []DatasetID{ + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/dataset_schema_field_gen.go b/pkg/id/dataset_schema_field_gen.go index b11285aec..e73a28ca1 100644 --- a/pkg/id/dataset_schema_field_gen.go +++ b/pkg/id/dataset_schema_field_gen.go @@ -1,7 +1,9 @@ -// Code generated by idgen, DO NOT EDIT. +// Code generated by gen, DO NOT EDIT. package id +import "encoding/json" + // DatasetSchemaFieldID is an ID for DatasetSchemaField. type DatasetSchemaFieldID ID @@ -59,6 +61,11 @@ func (d DatasetSchemaFieldID) String() string { return ID(d).String() } +// GoString implements fmt.GoStringer interface. +func (d DatasetSchemaFieldID) GoString() string { + return "id.DatasetSchemaFieldID(" + d.String() + ")" +} + // RefString returns a reference of string representation. func (d DatasetSchemaFieldID) RefString() *string { id := ID(d).String() @@ -98,6 +105,35 @@ func (d *DatasetSchemaFieldID) StringRef() *string { return &id } +// MarhsalJSON implements json.Marhsaler interface +func (d *DatasetSchemaFieldID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *DatasetSchemaFieldID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = DatasetSchemaFieldIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *DatasetSchemaFieldID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *DatasetSchemaFieldID) UnmarshalText(text []byte) (err error) { + *d, err = DatasetSchemaFieldIDFrom(string(text)) + return +} + // Ref returns true if a ID is nil or zero-value func (d DatasetSchemaFieldID) IsNil() bool { return ID(d).IsNil() diff --git a/pkg/id/dataset_schema_field_gen_test.go b/pkg/id/dataset_schema_field_gen_test.go new file mode 100644 index 000000000..b68460794 --- /dev/null +++ b/pkg/id/dataset_schema_field_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewDatasetSchemaFieldID(t *testing.T) { + id := NewDatasetSchemaFieldID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestDatasetSchemaFieldIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result DatasetSchemaFieldID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result DatasetSchemaFieldID + err error + }{ + DatasetSchemaFieldID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result DatasetSchemaFieldID + err error + }{ + DatasetSchemaFieldID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result DatasetSchemaFieldID + err error + }{ + DatasetSchemaFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := DatasetSchemaFieldIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustDatasetSchemaFieldID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected DatasetSchemaFieldID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: DatasetSchemaFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustDatasetSchemaFieldID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestDatasetSchemaFieldIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *DatasetSchemaFieldID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &DatasetSchemaFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := DatasetSchemaFieldIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestDatasetSchemaFieldIDFromRefID(t *testing.T) { + id := New() + + subId := DatasetSchemaFieldIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestDatasetSchemaFieldID_ID(t *testing.T) { + id := New() + subId := DatasetSchemaFieldIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestDatasetSchemaFieldID_String(t *testing.T) { + id := New() + subId := DatasetSchemaFieldIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestDatasetSchemaFieldID_GoString(t *testing.T) { + id := New() + subId := DatasetSchemaFieldIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.DatasetSchemaFieldID("+id.String()+")") +} + +func TestDatasetSchemaFieldID_RefString(t *testing.T) { + id := New() + subId := DatasetSchemaFieldIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestDatasetSchemaFieldID_Ref(t *testing.T) { + id := New() + subId := DatasetSchemaFieldIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestDatasetSchemaFieldID_CopyRef(t *testing.T) { + id := New() + subId := DatasetSchemaFieldIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestDatasetSchemaFieldID_IDRef(t *testing.T) { + id := New() + subId := DatasetSchemaFieldIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestDatasetSchemaFieldID_StringRef(t *testing.T) { + id := New() + subId := DatasetSchemaFieldIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestDatasetSchemaFieldID_MarhsalJSON(t *testing.T) { + id := New() + subId := DatasetSchemaFieldIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestDatasetSchemaFieldID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &DatasetSchemaFieldID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestDatasetSchemaFieldID_MarshalText(t *testing.T) { + id := New() + subId := DatasetSchemaFieldIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestDatasetSchemaFieldID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &DatasetSchemaFieldID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestDatasetSchemaFieldID_IsNil(t *testing.T) { + subId := DatasetSchemaFieldID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *DatasetSchemaFieldIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestDatasetSchemaFieldIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []DatasetSchemaFieldID + expected []string + }{ + { + name: "Empty slice", + input: make([]DatasetSchemaFieldID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, DatasetSchemaFieldIDToKeys(tc.input)) + }) + } + +} + +func TestDatasetSchemaFieldIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []DatasetSchemaFieldID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []DatasetSchemaFieldID + err error + }{ + res: make([]DatasetSchemaFieldID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []DatasetSchemaFieldID + err error + }{ + res: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []DatasetSchemaFieldID + err error + }{ + res: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []DatasetSchemaFieldID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := DatasetSchemaFieldIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := DatasetSchemaFieldIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestDatasetSchemaFieldIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []DatasetSchemaFieldID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]DatasetSchemaFieldID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := DatasetSchemaFieldIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestDatasetSchemaFieldIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []DatasetSchemaFieldID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]DatasetSchemaFieldID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []DatasetSchemaFieldID{MustDatasetSchemaFieldID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID(id1.String()), + MustDatasetSchemaFieldID(id2.String()), + MustDatasetSchemaFieldID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := DatasetSchemaFieldIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestDatasetSchemaFieldIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []DatasetSchemaFieldID + expected []ID + }{ + { + name: "Empty slice", + input: make([]DatasetSchemaFieldID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := DatasetSchemaFieldIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestDatasetSchemaFieldIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustDatasetSchemaFieldID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustDatasetSchemaFieldID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustDatasetSchemaFieldID(id3.String()) + + testCases := []struct { + name string + input []*DatasetSchemaFieldID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*DatasetSchemaFieldID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*DatasetSchemaFieldID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*DatasetSchemaFieldID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := DatasetSchemaFieldIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewDatasetSchemaFieldIDSet(t *testing.T) { + DatasetSchemaFieldIdSet := NewDatasetSchemaFieldIDSet() + + assert.NotNil(t, DatasetSchemaFieldIdSet) + assert.Empty(t, DatasetSchemaFieldIdSet.m) + assert.Empty(t, DatasetSchemaFieldIdSet.s) +} + +func TestDatasetSchemaFieldIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []DatasetSchemaFieldID + expected *DatasetSchemaFieldIDSet + }{ + { + name: "Empty slice", + input: make([]DatasetSchemaFieldID, 0), + expected: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewDatasetSchemaFieldIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestDatasetSchemaFieldIDSet_AddRef(t *testing.T) { + t.Parallel() + + DatasetSchemaFieldId := MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *DatasetSchemaFieldID + expected *DatasetSchemaFieldIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &DatasetSchemaFieldIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &DatasetSchemaFieldId, + expected: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewDatasetSchemaFieldIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestDatasetSchemaFieldIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + DatasetSchemaFieldIDSet + DatasetSchemaFieldID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + DatasetSchemaFieldIDSet + DatasetSchemaFieldID + }{DatasetSchemaFieldIDSet: DatasetSchemaFieldIDSet{}, DatasetSchemaFieldID: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + DatasetSchemaFieldIDSet + DatasetSchemaFieldID + }{DatasetSchemaFieldIDSet: DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, DatasetSchemaFieldID: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + DatasetSchemaFieldIDSet + DatasetSchemaFieldID + }{DatasetSchemaFieldIDSet: DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, DatasetSchemaFieldID: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.DatasetSchemaFieldIDSet.Has(tc.input.DatasetSchemaFieldID)) + }) + } +} + +func TestDatasetSchemaFieldIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input DatasetSchemaFieldIDSet + expected DatasetSchemaFieldIDSet + }{ + { + name: "Empty Set", + input: DatasetSchemaFieldIDSet{}, + expected: DatasetSchemaFieldIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: DatasetSchemaFieldIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestDatasetSchemaFieldIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *DatasetSchemaFieldIDSet + expected []DatasetSchemaFieldID + }{ + { + name: "Empty slice", + input: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{}, + s: nil, + }, + expected: make([]DatasetSchemaFieldID, 0), + }, + { + name: "1 element", + input: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestDatasetSchemaFieldIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *DatasetSchemaFieldIDSet + expected *DatasetSchemaFieldIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewDatasetSchemaFieldIDSet(), + }, + { + name: "Empty set", + input: NewDatasetSchemaFieldIDSet(), + expected: NewDatasetSchemaFieldIDSet(), + }, + { + name: "1 element", + input: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestDatasetSchemaFieldIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *DatasetSchemaFieldIDSet + b *DatasetSchemaFieldIDSet + } + expected *DatasetSchemaFieldIDSet + }{ + { + name: "Empty Set", + input: struct { + a *DatasetSchemaFieldIDSet + b *DatasetSchemaFieldIDSet + }{ + a: &DatasetSchemaFieldIDSet{}, + b: &DatasetSchemaFieldIDSet{}, + }, + expected: &DatasetSchemaFieldIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *DatasetSchemaFieldIDSet + b *DatasetSchemaFieldIDSet + }{ + a: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &DatasetSchemaFieldIDSet{}, + }, + expected: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *DatasetSchemaFieldIDSet + b *DatasetSchemaFieldIDSet + }{ + a: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []DatasetSchemaFieldID{ + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/dataset_schema_gen_test.go b/pkg/id/dataset_schema_gen_test.go new file mode 100644 index 000000000..9320f2b14 --- /dev/null +++ b/pkg/id/dataset_schema_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewDatasetSchemaID(t *testing.T) { + id := NewDatasetSchemaID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestDatasetSchemaIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result DatasetSchemaID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result DatasetSchemaID + err error + }{ + DatasetSchemaID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result DatasetSchemaID + err error + }{ + DatasetSchemaID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result DatasetSchemaID + err error + }{ + DatasetSchemaID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := DatasetSchemaIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustDatasetSchemaID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected DatasetSchemaID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: DatasetSchemaID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustDatasetSchemaID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestDatasetSchemaIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *DatasetSchemaID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &DatasetSchemaID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := DatasetSchemaIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestDatasetSchemaIDFromRefID(t *testing.T) { + id := New() + + subId := DatasetSchemaIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestDatasetSchemaID_ID(t *testing.T) { + id := New() + subId := DatasetSchemaIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestDatasetSchemaID_String(t *testing.T) { + id := New() + subId := DatasetSchemaIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestDatasetSchemaID_GoString(t *testing.T) { + id := New() + subId := DatasetSchemaIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.DatasetSchemaID("+id.String()+")") +} + +func TestDatasetSchemaID_RefString(t *testing.T) { + id := New() + subId := DatasetSchemaIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestDatasetSchemaID_Ref(t *testing.T) { + id := New() + subId := DatasetSchemaIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestDatasetSchemaID_CopyRef(t *testing.T) { + id := New() + subId := DatasetSchemaIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestDatasetSchemaID_IDRef(t *testing.T) { + id := New() + subId := DatasetSchemaIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestDatasetSchemaID_StringRef(t *testing.T) { + id := New() + subId := DatasetSchemaIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestDatasetSchemaID_MarhsalJSON(t *testing.T) { + id := New() + subId := DatasetSchemaIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestDatasetSchemaID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &DatasetSchemaID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestDatasetSchemaID_MarshalText(t *testing.T) { + id := New() + subId := DatasetSchemaIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestDatasetSchemaID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &DatasetSchemaID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestDatasetSchemaID_IsNil(t *testing.T) { + subId := DatasetSchemaID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *DatasetSchemaIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestDatasetSchemaIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []DatasetSchemaID + expected []string + }{ + { + name: "Empty slice", + input: make([]DatasetSchemaID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, DatasetSchemaIDToKeys(tc.input)) + }) + } + +} + +func TestDatasetSchemaIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []DatasetSchemaID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []DatasetSchemaID + err error + }{ + res: make([]DatasetSchemaID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []DatasetSchemaID + err error + }{ + res: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []DatasetSchemaID + err error + }{ + res: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []DatasetSchemaID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := DatasetSchemaIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := DatasetSchemaIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestDatasetSchemaIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []DatasetSchemaID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]DatasetSchemaID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := DatasetSchemaIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestDatasetSchemaIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []DatasetSchemaID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]DatasetSchemaID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []DatasetSchemaID{MustDatasetSchemaID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []DatasetSchemaID{ + MustDatasetSchemaID(id1.String()), + MustDatasetSchemaID(id2.String()), + MustDatasetSchemaID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := DatasetSchemaIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestDatasetSchemaIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []DatasetSchemaID + expected []ID + }{ + { + name: "Empty slice", + input: make([]DatasetSchemaID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := DatasetSchemaIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestDatasetSchemaIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustDatasetSchemaID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustDatasetSchemaID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustDatasetSchemaID(id3.String()) + + testCases := []struct { + name string + input []*DatasetSchemaID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*DatasetSchemaID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*DatasetSchemaID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*DatasetSchemaID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := DatasetSchemaIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewDatasetSchemaIDSet(t *testing.T) { + DatasetSchemaIdSet := NewDatasetSchemaIDSet() + + assert.NotNil(t, DatasetSchemaIdSet) + assert.Empty(t, DatasetSchemaIdSet.m) + assert.Empty(t, DatasetSchemaIdSet.s) +} + +func TestDatasetSchemaIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []DatasetSchemaID + expected *DatasetSchemaIDSet + }{ + { + name: "Empty slice", + input: make([]DatasetSchemaID, 0), + expected: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewDatasetSchemaIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestDatasetSchemaIDSet_AddRef(t *testing.T) { + t.Parallel() + + DatasetSchemaId := MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *DatasetSchemaID + expected *DatasetSchemaIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &DatasetSchemaIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &DatasetSchemaId, + expected: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewDatasetSchemaIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestDatasetSchemaIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + DatasetSchemaIDSet + DatasetSchemaID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + DatasetSchemaIDSet + DatasetSchemaID + }{DatasetSchemaIDSet: DatasetSchemaIDSet{}, DatasetSchemaID: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + DatasetSchemaIDSet + DatasetSchemaID + }{DatasetSchemaIDSet: DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, DatasetSchemaID: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + DatasetSchemaIDSet + DatasetSchemaID + }{DatasetSchemaIDSet: DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, DatasetSchemaID: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.DatasetSchemaIDSet.Has(tc.input.DatasetSchemaID)) + }) + } +} + +func TestDatasetSchemaIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input DatasetSchemaIDSet + expected DatasetSchemaIDSet + }{ + { + name: "Empty Set", + input: DatasetSchemaIDSet{}, + expected: DatasetSchemaIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: DatasetSchemaIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestDatasetSchemaIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *DatasetSchemaIDSet + expected []DatasetSchemaID + }{ + { + name: "Empty slice", + input: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{}, + s: nil, + }, + expected: make([]DatasetSchemaID, 0), + }, + { + name: "1 element", + input: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestDatasetSchemaIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *DatasetSchemaIDSet + expected *DatasetSchemaIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewDatasetSchemaIDSet(), + }, + { + name: "Empty set", + input: NewDatasetSchemaIDSet(), + expected: NewDatasetSchemaIDSet(), + }, + { + name: "1 element", + input: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestDatasetSchemaIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *DatasetSchemaIDSet + b *DatasetSchemaIDSet + } + expected *DatasetSchemaIDSet + }{ + { + name: "Empty Set", + input: struct { + a *DatasetSchemaIDSet + b *DatasetSchemaIDSet + }{ + a: &DatasetSchemaIDSet{}, + b: &DatasetSchemaIDSet{}, + }, + expected: &DatasetSchemaIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *DatasetSchemaIDSet + b *DatasetSchemaIDSet + }{ + a: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &DatasetSchemaIDSet{}, + }, + expected: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *DatasetSchemaIDSet + b *DatasetSchemaIDSet + }{ + a: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []DatasetSchemaID{ + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/gen.go b/pkg/id/gen.go index a14d534d6..54d0703b7 100644 --- a/pkg/id/gen.go +++ b/pkg/id/gen.go @@ -9,5 +9,22 @@ //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=widget_gen.go --name=Widget //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=team_gen.go --name=Team //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=user_gen.go --name=User +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=dataset_schema_field_gen.go --name=DatasetSchemaField +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=infobox_field_gen.go --name=InfoboxField + +// Testing +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=asset_gen_test.go --name=Asset +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=dataset_gen_test.go --name=Dataset +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=dataset_schema_gen_test.go --name=DatasetSchema +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=layer_gen_test.go --name=Layer +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=project_gen_test.go --name=Project +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=property_gen_test.go --name=Property +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=property_item_gen_test.go --name=PropertyItem +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=scene_gen_test.go --name=Scene +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=widget_gen_test.go --name=Widget +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=team_gen_test.go --name=Team +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=user_gen_test.go --name=User +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=dataset_schema_field_gen_test.go --name=DatasetSchemaField +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=infobox_field_gen_test.go --name=InfoboxField package id diff --git a/pkg/id/id_test.go b/pkg/id/id_test.go new file mode 100644 index 000000000..2becc4dc1 --- /dev/null +++ b/pkg/id/id_test.go @@ -0,0 +1,373 @@ +package id + +import ( + "errors" + "strings" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestID_New(t *testing.T) { + id := New() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestID_NewAllID(t *testing.T) { + testCases := []struct { + name string + input int + expected int + }{ + { + name: "success: Zero ID", + input: 0, + expected: 0, + }, + { + name: "success: One ID", + input: 1, + expected: 1, + }, + { + name: "success: Multiple IDs", + input: 5, + expected: 5, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := NewAllID(tc.input) + assert.Equal(tt, tc.expected, len(result)) + for _, id := range result { + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) + } + }) + } +} + +func TestID_NewIDWith(t *testing.T) { + testCases := []struct { + name string + input string + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + }, + { + name: "Fail:Not valid string", + input: "", + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := NewIDWith(tc.input) + exResult, exErr := FromID(tc.input) + assert.Equal(tt, exResult, result) + assert.Equal(tt, exErr, err) + }) + } +} + +func TestID_FromID(t *testing.T) { + testCases := []struct { + name string + input string + expected struct { + result ID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result ID + err error + }{ + ID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result ID + err error + }{ + ID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result ID + err error + }{ + ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := FromID(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestID_FromIDRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *ID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := FromIDRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestID_MustBeID(t *testing.T) { + testCases := []struct { + name string + input string + shouldPanic bool + expected ID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustBeID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestID_Copy(t *testing.T) { + id := New() + + id2 := id.Copy() + + assert.Equal(t, id.id, id2.id) + + assert.NotSame(t, id.id, id2.id) +} + +func TestID_Timestamp(t *testing.T) { + id := New() + + assert.Equal(t, ulid.Time(id.id.Time()), id.Timestamp()) +} + +func TestID_String(t *testing.T) { + id := MustBeID("01f2r7kg1fvvffp0gmexgy5hxy") + + assert.Equal(t, id.String(), "01f2r7kg1fvvffp0gmexgy5hxy") +} + +func TestID_GoString(t *testing.T) { + id := MustBeID("01f2r7kg1fvvffp0gmexgy5hxy") + + assert.Equal(t, id.GoString(), "id.ID(01f2r7kg1fvvffp0gmexgy5hxy)") +} + +func TestID_IsNil(t *testing.T) { + id := ID{} + + assert.True(t, id.IsNil()) + + id = New() + + assert.False(t, id.IsNil()) + +} + +func TestID_Compare(t *testing.T) { + id1 := New() + id2 := New() + + assert.Less(t, id1.Compare(id2), 0) + assert.Greater(t, id2.Compare(id1), 0) + assert.Equal(t, id1.Compare(id1), 0) + assert.Equal(t, id2.Compare(id2), 0) +} + +func TestID_Equal(t *testing.T) { + id1 := New() + id2 := id1.Copy() + + assert.True(t, id1.Equal(id2)) + assert.False(t, id1.Equal(New())) +} + +func TestID_IsEmpty(t *testing.T) { + id := ID{} + + assert.True(t, id.IsEmpty()) + + id = New() + + assert.False(t, id.IsEmpty()) +} + +func TestID_generateID(t *testing.T) { + id := generateID() + + assert.NotNil(t, id) +} + +func TestID_generateAllID(t *testing.T) { + testCases := []struct { + name string + input int + expected int + }{ + { + name: "success: Zero ID", + input: 0, + expected: 0, + }, + { + name: "success: One ID", + input: 1, + expected: 1, + }, + { + name: "success: Multiple IDs", + input: 5, + expected: 5, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := generateAllID(tc.input) + assert.Equal(tt, tc.expected, len(result)) + for _, id := range result { + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) + } + }) + } +} + +func TestID_parseID(t *testing.T) { + _, err := parseID("") + + assert.True(t, errors.As(ErrInvalidID, &err)) + + id, err := parseID("01f2r7kg1fvvffp0gmexgy5hxy") + + assert.Nil(t, err) + assert.EqualValues(t, strings.ToLower(id.String()), "01f2r7kg1fvvffp0gmexgy5hxy") +} + +func TestID_includeUpperCase(t *testing.T) { + testCases := []struct { + name string + input string + expected bool + }{ + { + name: "All small letters", + input: "abcd", + expected: false, + }, + { + name: "Contains Upper case", + input: "Abcd", + expected: true, + }, + { + name: "Contains Upper case", + input: "abcD", + expected: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := includeUpperCase(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} diff --git a/pkg/id/id_test.tmpl b/pkg/id/id_test.tmpl new file mode 100644 index 000000000..98b41c969 --- /dev/null +++ b/pkg/id/id_test.tmpl @@ -0,0 +1,1006 @@ +{{ $name := index .Flags.name 0 }} + +package {{.PackageName}} + +import ( + "encoding" + "errors" + "strings" + "testing" + + "github.com/blang/semver" + "github.com/stretchr/testify/assert" +) + + +func TestNew{{$name}}ID(t *testing.T) { + id := New{{$name}}ID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func Test{{$name}}IDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result {{$name}}ID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result {{$name}}ID + err error + }{ + {{$name}}ID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result {{$name}}ID + err error + }{ + {{$name}}ID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result {{$name}}ID + err error + }{ + {{$name}}ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := {{$name}}IDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMust{{$name}}ID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected {{$name}}ID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: {{$name}}ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := Must{{$name}}ID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func Test{{$name}}IDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *{{$name}}ID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &{{$name}}ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := {{$name}}IDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func Test{{$name}}IDFromRefID(t *testing.T) { + id := New() + + subId := {{$name}}IDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func Test{{$name}}ID_ID(t *testing.T) { + id := New() + subId := {{$name}}IDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func Test{{$name}}ID_String(t *testing.T) { + id := New() + subId := {{$name}}IDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func Test{{$name}}ID_GoString(t *testing.T) { + id := New() + subId := {{$name}}IDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.{{$name}}ID(" + id.String() + ")") +} + +func Test{{$name}}ID_RefString(t *testing.T) { + id := New() + subId := {{$name}}IDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func Test{{$name}}ID_Ref(t *testing.T) { + id := New() + subId := {{$name}}IDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func Test{{$name}}ID_CopyRef(t *testing.T) { + id := New() + subId := {{$name}}IDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func Test{{$name}}ID_IDRef(t *testing.T) { + id := New() + subId := {{$name}}IDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func Test{{$name}}ID_StringRef(t *testing.T) { + id := New() + subId := {{$name}}IDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func Test{{$name}}ID_MarhsalJSON(t *testing.T) { + id := New() + subId := {{$name}}IDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func Test{{$name}}ID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &{{$name}}ID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func Test{{$name}}ID_MarshalText(t *testing.T) { + id := New() + subId := {{$name}}IDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func Test{{$name}}ID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &{{$name}}ID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func Test{{$name}}ID_IsNil(t *testing.T) { + subId := {{$name}}ID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *{{$name}}IDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func Test{{$name}}IDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []{{$name}}ID + expected []string + }{ + { + name: "Empty slice", + input: make([]{{$name}}ID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, {{$name}}IDToKeys(tc.input)) + }) + } + +} + +func Test{{$name}}IDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []{{$name}}ID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []{{$name}}ID + err error + }{ + res: make([]{{$name}}ID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []{{$name}}ID + err error + }{ + res: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []{{$name}}ID + err error + }{ + res: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []{{$name}}ID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := {{$name}}IDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := {{$name}}IDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func Test{{$name}}IDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []{{$name}}ID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]{{$name}}ID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := {{$name}}IDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func Test{{$name}}IDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []{{$name}}ID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]{{$name}}ID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []{{$name}}ID{Must{{$name}}ID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []{{$name}}ID{ + Must{{$name}}ID(id1.String()), + Must{{$name}}ID(id2.String()), + Must{{$name}}ID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := {{$name}}IDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func Test{{$name}}IDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []{{$name}}ID + expected []ID + }{ + { + name: "Empty slice", + input: make([]{{$name}}ID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := {{$name}}IDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func Test{{$name}}IDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := Must{{$name}}ID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := Must{{$name}}ID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := Must{{$name}}ID(id3.String()) + + testCases := []struct { + name string + input []*{{$name}}ID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*{{$name}}ID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*{{$name}}ID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*{{$name}}ID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := {{$name}}IDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNew{{$name}}IDSet(t *testing.T) { + {{$name}}IdSet := New{{$name}}IDSet() + + assert.NotNil(t, {{$name}}IdSet) + assert.Empty(t, {{$name}}IdSet.m) + assert.Empty(t, {{$name}}IdSet.s) +} + +func Test{{$name}}IDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []{{$name}}ID + expected *{{$name}}IDSet + }{ + { + name: "Empty slice", + input: make([]{{$name}}ID, 0), + expected: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := New{{$name}}IDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func Test{{$name}}IDSet_AddRef(t *testing.T) { + t.Parallel() + + {{$name}}Id := Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *{{$name}}ID + expected *{{$name}}IDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &{{$name}}IDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &{{$name}}Id, + expected: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := New{{$name}}IDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func Test{{$name}}IDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + {{$name}}IDSet + {{$name}}ID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + {{$name}}IDSet + {{$name}}ID + }{ {{$name}}IDSet: {{$name}}IDSet{}, {{$name}}ID: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + {{$name}}IDSet + {{$name}}ID + }{ {{$name}}IDSet: {{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, {{$name}}ID: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + {{$name}}IDSet + {{$name}}ID + }{ {{$name}}IDSet: {{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, {{$name}}ID: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.{{$name}}IDSet.Has(tc.input.{{$name}}ID)) + }) + } +} + +func Test{{$name}}IDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input {{$name}}IDSet + expected {{$name}}IDSet + }{ + { + name: "Empty Set", + input: {{$name}}IDSet{}, + expected: {{$name}}IDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: {{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: {{$name}}IDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func Test{{$name}}IDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *{{$name}}IDSet + expected []{{$name}}ID + }{ + { + name: "Empty slice", + input: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{}, + s: nil, + }, + expected: make([]{{$name}}ID, 0), + }, + { + name: "1 element", + input: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func Test{{$name}}IDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *{{$name}}IDSet + expected *{{$name}}IDSet + }{ + { + name: "nil set", + input: nil, + expected: New{{$name}}IDSet(), + }, + { + name: "Empty set", + input: New{{$name}}IDSet(), + expected: New{{$name}}IDSet(), + }, + { + name: "1 element", + input: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func Test{{$name}}IDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *{{$name}}IDSet + b *{{$name}}IDSet + } + expected *{{$name}}IDSet + }{ + { + name: "Empty Set", + input: struct { + a *{{$name}}IDSet + b *{{$name}}IDSet + }{ + a: &{{$name}}IDSet{}, + b: &{{$name}}IDSet{}, + }, + expected: &{{$name}}IDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *{{$name}}IDSet + b *{{$name}}IDSet + }{ + a: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &{{$name}}IDSet{}, + }, + expected: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *{{$name}}IDSet + b *{{$name}}IDSet + }{ + a: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []{{$name}}ID{ + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/infobox_field_gen.go b/pkg/id/infobox_field_gen.go index 408b04af0..189646e2e 100644 --- a/pkg/id/infobox_field_gen.go +++ b/pkg/id/infobox_field_gen.go @@ -1,7 +1,9 @@ -// Code generated by idgen, DO NOT EDIT. +// Code generated by gen, DO NOT EDIT. package id +import "encoding/json" + // InfoboxFieldID is an ID for InfoboxField. type InfoboxFieldID ID @@ -59,6 +61,11 @@ func (d InfoboxFieldID) String() string { return ID(d).String() } +// GoString implements fmt.GoStringer interface. +func (d InfoboxFieldID) GoString() string { + return "id.InfoboxFieldID(" + d.String() + ")" +} + // RefString returns a reference of string representation. func (d InfoboxFieldID) RefString() *string { id := ID(d).String() @@ -98,6 +105,35 @@ func (d *InfoboxFieldID) StringRef() *string { return &id } +// MarhsalJSON implements json.Marhsaler interface +func (d *InfoboxFieldID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *InfoboxFieldID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = InfoboxFieldIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *InfoboxFieldID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *InfoboxFieldID) UnmarshalText(text []byte) (err error) { + *d, err = InfoboxFieldIDFrom(string(text)) + return +} + // Ref returns true if a ID is nil or zero-value func (d InfoboxFieldID) IsNil() bool { return ID(d).IsNil() diff --git a/pkg/id/infobox_field_gen_test.go b/pkg/id/infobox_field_gen_test.go new file mode 100644 index 000000000..89b02356b --- /dev/null +++ b/pkg/id/infobox_field_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewInfoboxFieldID(t *testing.T) { + id := NewInfoboxFieldID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestInfoboxFieldIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result InfoboxFieldID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result InfoboxFieldID + err error + }{ + InfoboxFieldID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result InfoboxFieldID + err error + }{ + InfoboxFieldID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result InfoboxFieldID + err error + }{ + InfoboxFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := InfoboxFieldIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustInfoboxFieldID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected InfoboxFieldID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: InfoboxFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustInfoboxFieldID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestInfoboxFieldIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *InfoboxFieldID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &InfoboxFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := InfoboxFieldIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestInfoboxFieldIDFromRefID(t *testing.T) { + id := New() + + subId := InfoboxFieldIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestInfoboxFieldID_ID(t *testing.T) { + id := New() + subId := InfoboxFieldIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestInfoboxFieldID_String(t *testing.T) { + id := New() + subId := InfoboxFieldIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestInfoboxFieldID_GoString(t *testing.T) { + id := New() + subId := InfoboxFieldIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.InfoboxFieldID("+id.String()+")") +} + +func TestInfoboxFieldID_RefString(t *testing.T) { + id := New() + subId := InfoboxFieldIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestInfoboxFieldID_Ref(t *testing.T) { + id := New() + subId := InfoboxFieldIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestInfoboxFieldID_CopyRef(t *testing.T) { + id := New() + subId := InfoboxFieldIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestInfoboxFieldID_IDRef(t *testing.T) { + id := New() + subId := InfoboxFieldIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestInfoboxFieldID_StringRef(t *testing.T) { + id := New() + subId := InfoboxFieldIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestInfoboxFieldID_MarhsalJSON(t *testing.T) { + id := New() + subId := InfoboxFieldIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestInfoboxFieldID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &InfoboxFieldID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestInfoboxFieldID_MarshalText(t *testing.T) { + id := New() + subId := InfoboxFieldIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestInfoboxFieldID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &InfoboxFieldID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestInfoboxFieldID_IsNil(t *testing.T) { + subId := InfoboxFieldID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *InfoboxFieldIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestInfoboxFieldIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []InfoboxFieldID + expected []string + }{ + { + name: "Empty slice", + input: make([]InfoboxFieldID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, InfoboxFieldIDToKeys(tc.input)) + }) + } + +} + +func TestInfoboxFieldIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []InfoboxFieldID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []InfoboxFieldID + err error + }{ + res: make([]InfoboxFieldID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []InfoboxFieldID + err error + }{ + res: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []InfoboxFieldID + err error + }{ + res: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []InfoboxFieldID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := InfoboxFieldIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := InfoboxFieldIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestInfoboxFieldIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []InfoboxFieldID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]InfoboxFieldID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := InfoboxFieldIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestInfoboxFieldIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []InfoboxFieldID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]InfoboxFieldID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []InfoboxFieldID{MustInfoboxFieldID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []InfoboxFieldID{ + MustInfoboxFieldID(id1.String()), + MustInfoboxFieldID(id2.String()), + MustInfoboxFieldID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := InfoboxFieldIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestInfoboxFieldIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []InfoboxFieldID + expected []ID + }{ + { + name: "Empty slice", + input: make([]InfoboxFieldID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := InfoboxFieldIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestInfoboxFieldIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustInfoboxFieldID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustInfoboxFieldID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustInfoboxFieldID(id3.String()) + + testCases := []struct { + name string + input []*InfoboxFieldID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*InfoboxFieldID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*InfoboxFieldID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*InfoboxFieldID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := InfoboxFieldIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewInfoboxFieldIDSet(t *testing.T) { + InfoboxFieldIdSet := NewInfoboxFieldIDSet() + + assert.NotNil(t, InfoboxFieldIdSet) + assert.Empty(t, InfoboxFieldIdSet.m) + assert.Empty(t, InfoboxFieldIdSet.s) +} + +func TestInfoboxFieldIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []InfoboxFieldID + expected *InfoboxFieldIDSet + }{ + { + name: "Empty slice", + input: make([]InfoboxFieldID, 0), + expected: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewInfoboxFieldIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestInfoboxFieldIDSet_AddRef(t *testing.T) { + t.Parallel() + + InfoboxFieldId := MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *InfoboxFieldID + expected *InfoboxFieldIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &InfoboxFieldIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &InfoboxFieldId, + expected: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewInfoboxFieldIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestInfoboxFieldIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + InfoboxFieldIDSet + InfoboxFieldID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + InfoboxFieldIDSet + InfoboxFieldID + }{InfoboxFieldIDSet: InfoboxFieldIDSet{}, InfoboxFieldID: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + InfoboxFieldIDSet + InfoboxFieldID + }{InfoboxFieldIDSet: InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, InfoboxFieldID: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + InfoboxFieldIDSet + InfoboxFieldID + }{InfoboxFieldIDSet: InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, InfoboxFieldID: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.InfoboxFieldIDSet.Has(tc.input.InfoboxFieldID)) + }) + } +} + +func TestInfoboxFieldIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input InfoboxFieldIDSet + expected InfoboxFieldIDSet + }{ + { + name: "Empty Set", + input: InfoboxFieldIDSet{}, + expected: InfoboxFieldIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: InfoboxFieldIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestInfoboxFieldIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *InfoboxFieldIDSet + expected []InfoboxFieldID + }{ + { + name: "Empty slice", + input: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{}, + s: nil, + }, + expected: make([]InfoboxFieldID, 0), + }, + { + name: "1 element", + input: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestInfoboxFieldIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *InfoboxFieldIDSet + expected *InfoboxFieldIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewInfoboxFieldIDSet(), + }, + { + name: "Empty set", + input: NewInfoboxFieldIDSet(), + expected: NewInfoboxFieldIDSet(), + }, + { + name: "1 element", + input: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestInfoboxFieldIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *InfoboxFieldIDSet + b *InfoboxFieldIDSet + } + expected *InfoboxFieldIDSet + }{ + { + name: "Empty Set", + input: struct { + a *InfoboxFieldIDSet + b *InfoboxFieldIDSet + }{ + a: &InfoboxFieldIDSet{}, + b: &InfoboxFieldIDSet{}, + }, + expected: &InfoboxFieldIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *InfoboxFieldIDSet + b *InfoboxFieldIDSet + }{ + a: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &InfoboxFieldIDSet{}, + }, + expected: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *InfoboxFieldIDSet + b *InfoboxFieldIDSet + }{ + a: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []InfoboxFieldID{ + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/layer_gen_test.go b/pkg/id/layer_gen_test.go new file mode 100644 index 000000000..a22ad38d0 --- /dev/null +++ b/pkg/id/layer_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewLayerID(t *testing.T) { + id := NewLayerID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestLayerIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result LayerID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result LayerID + err error + }{ + LayerID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result LayerID + err error + }{ + LayerID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result LayerID + err error + }{ + LayerID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := LayerIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustLayerID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected LayerID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: LayerID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustLayerID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestLayerIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *LayerID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &LayerID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := LayerIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestLayerIDFromRefID(t *testing.T) { + id := New() + + subId := LayerIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestLayerID_ID(t *testing.T) { + id := New() + subId := LayerIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestLayerID_String(t *testing.T) { + id := New() + subId := LayerIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestLayerID_GoString(t *testing.T) { + id := New() + subId := LayerIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.LayerID("+id.String()+")") +} + +func TestLayerID_RefString(t *testing.T) { + id := New() + subId := LayerIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestLayerID_Ref(t *testing.T) { + id := New() + subId := LayerIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestLayerID_CopyRef(t *testing.T) { + id := New() + subId := LayerIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestLayerID_IDRef(t *testing.T) { + id := New() + subId := LayerIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestLayerID_StringRef(t *testing.T) { + id := New() + subId := LayerIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestLayerID_MarhsalJSON(t *testing.T) { + id := New() + subId := LayerIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestLayerID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &LayerID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestLayerID_MarshalText(t *testing.T) { + id := New() + subId := LayerIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestLayerID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &LayerID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestLayerID_IsNil(t *testing.T) { + subId := LayerID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *LayerIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestLayerIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []LayerID + expected []string + }{ + { + name: "Empty slice", + input: make([]LayerID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, LayerIDToKeys(tc.input)) + }) + } + +} + +func TestLayerIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []LayerID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []LayerID + err error + }{ + res: make([]LayerID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []LayerID + err error + }{ + res: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []LayerID + err error + }{ + res: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []LayerID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := LayerIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := LayerIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestLayerIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []LayerID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]LayerID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := LayerIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestLayerIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []LayerID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]LayerID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []LayerID{MustLayerID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []LayerID{ + MustLayerID(id1.String()), + MustLayerID(id2.String()), + MustLayerID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := LayerIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestLayerIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []LayerID + expected []ID + }{ + { + name: "Empty slice", + input: make([]LayerID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := LayerIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestLayerIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustLayerID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustLayerID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustLayerID(id3.String()) + + testCases := []struct { + name string + input []*LayerID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*LayerID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*LayerID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*LayerID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := LayerIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewLayerIDSet(t *testing.T) { + LayerIdSet := NewLayerIDSet() + + assert.NotNil(t, LayerIdSet) + assert.Empty(t, LayerIdSet.m) + assert.Empty(t, LayerIdSet.s) +} + +func TestLayerIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []LayerID + expected *LayerIDSet + }{ + { + name: "Empty slice", + input: make([]LayerID, 0), + expected: &LayerIDSet{ + m: map[LayerID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &LayerIDSet{ + m: map[LayerID]struct{}{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &LayerIDSet{ + m: map[LayerID]struct{}{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewLayerIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestLayerIDSet_AddRef(t *testing.T) { + t.Parallel() + + LayerId := MustLayerID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *LayerID + expected *LayerIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &LayerIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &LayerId, + expected: &LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewLayerIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestLayerIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + LayerIDSet + LayerID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + LayerIDSet + LayerID + }{LayerIDSet: LayerIDSet{}, LayerID: MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + LayerIDSet + LayerID + }{LayerIDSet: LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, LayerID: MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + LayerIDSet + LayerID + }{LayerIDSet: LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, LayerID: MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.LayerIDSet.Has(tc.input.LayerID)) + }) + } +} + +func TestLayerIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input LayerIDSet + expected LayerIDSet + }{ + { + name: "Empty Set", + input: LayerIDSet{}, + expected: LayerIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: LayerIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestLayerIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *LayerIDSet + expected []LayerID + }{ + { + name: "Empty slice", + input: &LayerIDSet{ + m: map[LayerID]struct{}{}, + s: nil, + }, + expected: make([]LayerID, 0), + }, + { + name: "1 element", + input: &LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &LayerIDSet{ + m: map[LayerID]struct{}{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestLayerIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *LayerIDSet + expected *LayerIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewLayerIDSet(), + }, + { + name: "Empty set", + input: NewLayerIDSet(), + expected: NewLayerIDSet(), + }, + { + name: "1 element", + input: &LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &LayerIDSet{ + m: map[LayerID]struct{}{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &LayerIDSet{ + m: map[LayerID]struct{}{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestLayerIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *LayerIDSet + b *LayerIDSet + } + expected *LayerIDSet + }{ + { + name: "Empty Set", + input: struct { + a *LayerIDSet + b *LayerIDSet + }{ + a: &LayerIDSet{}, + b: &LayerIDSet{}, + }, + expected: &LayerIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *LayerIDSet + b *LayerIDSet + }{ + a: &LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &LayerIDSet{}, + }, + expected: &LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *LayerIDSet + b *LayerIDSet + }{ + a: &LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &LayerIDSet{ + m: map[LayerID]struct{}{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []LayerID{ + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/plugin_extension_test.go b/pkg/id/plugin_extension_test.go new file mode 100644 index 000000000..02f95aac0 --- /dev/null +++ b/pkg/id/plugin_extension_test.go @@ -0,0 +1,63 @@ +package id + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestPluginExtensionIDFromRef(t *testing.T) { + t.Parallel() + input1 := "testStringId" + expected1 := PluginExtensionID(input1) + testCases := []struct { + name string + input *string + expected *PluginExtensionID + }{ + { + name: "success:string input", + input: &input1, + expected: &expected1, + }, + { + name: "fail:nil pointer", + input: nil, + expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := PluginExtensionIDFromRef(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestPluginExtensionID_Ref(t *testing.T) { + pluginExtensionID := PluginExtensionID("test") + + assert.Equal(t, &pluginExtensionID, pluginExtensionID.Ref()) +} + +func TestPluginExtensionID_CopyRef(t *testing.T) { + pluginExtensionID := PluginExtensionID("test") + + assert.Equal(t, pluginExtensionID, *pluginExtensionID.CopyRef()) + + assert.False(t, pluginExtensionID.Ref() == pluginExtensionID.CopyRef()) +} + +func TestPluginExtensionID_String(t *testing.T) { + pluginExtensionID := PluginExtensionID("test") + + assert.Equal(t, "test", pluginExtensionID.String()) +} + +func TestPluginExtensionID_StringRef(t *testing.T) { + pluginExtensionID := PluginExtensionID("test") + + assert.Equal(t, "test", *pluginExtensionID.StringRef()) +} diff --git a/pkg/id/plugin_test.go b/pkg/id/plugin_test.go index 8a3fca172..2691aefc0 100644 --- a/pkg/id/plugin_test.go +++ b/pkg/id/plugin_test.go @@ -2,9 +2,11 @@ package id import ( "encoding" + "errors" "strings" "testing" + "github.com/blang/semver" "github.com/stretchr/testify/assert" ) @@ -12,13 +14,620 @@ var _ encoding.TextMarshaler = (*PluginID)(nil) var _ encoding.TextUnmarshaler = (*PluginID)(nil) func TestPluginIDValidator(t *testing.T) { - assert.True(t, validatePluginName("1cc.1_c-d"), "1cc.1_c-d") - assert.True(t, validatePluginName(strings.Repeat("a", 100)), "100 chars") - assert.False(t, validatePluginName(""), "empty") - assert.False(t, validatePluginName(" "), "space") - assert.False(t, validatePluginName("@bbb/aa-a_a"), "@bbb/aa-a_a") - assert.False(t, validatePluginName("bbb a"), "bbb a") - assert.False(t, validatePluginName("cccd="), "cccd=") - assert.False(t, validatePluginName("reearth"), "reearth") - assert.False(t, validatePluginName(strings.Repeat("a", 101)), "over 100 chars") + t.Parallel() + testCases := []struct { + name string + input string + expected bool + }{ + { + name: "accepted name", + input: "1cc.1_c-d", + expected: true, + }, + { + name: "les then 100", + input: strings.Repeat("a", 100), + expected: true, + }, + { + name: "empty", + input: "", + expected: false, + }, + { + name: "spaces", + input: " ", + expected: false, + }, + { + name: "contains not accepted characters", + input: "@bbb/aa-a_a", + expected: false, + }, + { + name: "contain space", + input: "bbb a", + expected: false, + }, + { + name: "contain =", + input: "cccd=", + expected: false, + }, + { + name: "contains reearth reserved key word", + input: "reearth", + expected: false, + }, + { + name: "more than 100 char", + input: strings.Repeat("a", 101), + expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, validatePluginName(tc.input)) + }) + } +} + +func TestPluginIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + err error + result PluginID + } + }{ + { + name: "success:accepted name", + input: "1cc.1_c-d#1.0.0", + expected: struct { + err error + result PluginID + }{ + err: nil, + result: PluginID{ + name: "1cc.1_c-d", + version: "1.0.0", + sys: false, + }, + }, + }, + { + name: "success:official plugin id", + input: officialPluginIDStr, + expected: struct { + err error + result PluginID + }{ + err: nil, + result: PluginID{ + name: officialPluginIDStr, + version: "", + sys: true, + }, + }, + }, + { + name: "fail:not valid name", + input: "1cc.1_c-d", + expected: struct { + err error + result PluginID + }{ + err: ErrInvalidID, + result: PluginID{}, + }, + }, + { + name: "fail:not valid name", + input: "1cc.1_c-d/?s#1.0.0", + expected: struct { + err error + result PluginID + }{ + err: ErrInvalidID, + result: PluginID{}, + }, + }, + { + name: "fail:not valid name", + input: "1cc.1_c-d/?s#1", + expected: struct { + err error + result PluginID + }{ + err: ErrInvalidID, + result: PluginID{}, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, _ := PluginIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + }) + } +} + +func TestMustPluginID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + err error + result PluginID + } + }{ + { + name: "success:accepted name", + input: "1cc.1_c-d#1.0.0", + expected: struct { + err error + result PluginID + }{ + err: nil, + result: PluginID{ + name: "1cc.1_c-d", + version: "1.0.0", + sys: false, + }, + }, + }, + { + name: "fail:not valid name", + input: "1cc.1_c-d", + expected: struct { + err error + result PluginID + }{ + err: ErrInvalidID, + result: PluginID{}, + }, + }, + { + name: "fail:not valid name", + input: "1cc.1_c-d/?s#1.0.0", + expected: struct { + err error + result PluginID + }{ + err: ErrInvalidID, + result: PluginID{}, + }, + }, + { + name: "fail:not valid name", + input: "1cc.1_c-d/?s#1", + expected: struct { + err error + result PluginID + }{ + err: ErrInvalidID, + result: PluginID{}, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + if tc.expected.err != nil { + assert.Panics(tt, func() { + _ = MustPluginID(tc.input) + }) + } else { + result := MustPluginID(tc.input) + assert.Equal(tt, tc.expected.result, result) + } + }) + } +} + +func TestPluginIDFromRef(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + err error + result PluginID + } + }{ + { + name: "success:accepted name", + input: "1cc.1_c-d#1.0.0", + expected: struct { + err error + result PluginID + }{ + err: nil, + result: PluginID{ + name: "1cc.1_c-d", + version: "1.0.0", + sys: false, + }, + }, + }, + { + name: "fail:not valid name", + input: "1cc.1_c-d", + expected: struct { + err error + result PluginID + }{ + err: ErrInvalidID, + result: PluginID{}, + }, + }, + { + name: "fail:not valid name", + input: "1cc.1_c-d/?s#1.0.0", + expected: struct { + err error + result PluginID + }{ + err: ErrInvalidID, + result: PluginID{}, + }, + }, + { + name: "fail:not valid name", + input: "1cc.1_c-d/?s#1", + expected: struct { + err error + result PluginID + }{ + err: ErrInvalidID, + result: PluginID{}, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + if tc.expected.err != nil { + result := PluginIDFromRef(&tc.input) + assert.Nil(tt, result) + } else { + result := PluginIDFromRef(&tc.input) + assert.Equal(tt, tc.expected.result, *result) + } + }) + } +} + +func TestPluginID_Name(t *testing.T) { + plugin := MustPluginID("MyPlugin#1.0.0") + + assert.Equal(t, "MyPlugin", plugin.Name()) +} + +func TestPluginID_Version(t *testing.T) { + plugin := MustPluginID("MyPlugin#1.0.0") + + assert.Equal(t, semver.MustParse("1.0.0"), plugin.Version()) +} + +func TestPluginID_System(t *testing.T) { + plugin := MustPluginID("MyPlugin#1.0.0") + + assert.False(t, plugin.System()) + + plugin = MustPluginID(officialPluginIDStr) + + assert.True(t, plugin.System()) +} + +func TestPluginID_Validate(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input PluginID + expected bool + }{ + { + name: "success:accepted name", + input: PluginID{ + name: "1cc.1_c-d", + version: "1.0.0", + sys: false, + }, + expected: true, + }, + { + name: "success:accepted name", + input: PluginID{ + name: "1cc.1/?_c-d", + version: "1.0.0", + sys: false, + }, + expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + assert.Equal(tt, tc.expected, tc.input.Validate()) + }) + } +} + +func TestPluginID_String(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input PluginID + expected string + }{ + { + name: "success:accepted name", + input: PluginID{ + name: "ppl", + version: "1.0.0", + sys: false, + }, + expected: "ppl#1.0.0", + }, + { + name: "success:accepted name", + input: PluginID{ + name: "plg", + version: "2.1.0-beta", + sys: false, + }, + expected: "plg#2.1.0-beta", + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + assert.Equal(tt, tc.expected, tc.input.String()) + }) + } +} + +func TestPluginID_Ref(t *testing.T) { + pluginID := MustPluginID("Test#1.0.0") + + ref := pluginID.Ref() + + assert.Equal(t, *ref, pluginID) +} + +func TestPluginID_CopyRef(t *testing.T) { + pluginID := MustPluginID("Test#1.0.0") + + ref := pluginID.Ref() + + ref2 := ref.CopyRef() + + assert.Equal(t, *ref, pluginID) + assert.Equal(t, *ref2, pluginID) + assert.Equal(t, *ref, *ref2) +} + +func TestPluginID_StringRef(t *testing.T) { + pluginID := MustPluginID("Test#1.0.0") + + ref := pluginID.Ref() + + strRef := ref.StringRef() + + assert.Equal(t, pluginID.String(), *strRef) +} + +func TestPluginID_Equal(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input struct { + pluginID1 PluginID + pluginID2 PluginID + } + expected bool + }{ + { + name: "Equal", + input: struct { + pluginID1 PluginID + pluginID2 PluginID + }{ + pluginID1: MustPluginID("Test#1.0.0"), + pluginID2: MustPluginID("Test#1.0.0"), + }, + expected: true, + }, + { + name: "Equal", + input: struct { + pluginID1 PluginID + pluginID2 PluginID + }{ + pluginID1: MustPluginID("Test#1.0.0"), + pluginID2: MustPluginID("Test#1.0.1"), + }, + expected: false, + }, + { + name: "Equal", + input: struct { + pluginID1 PluginID + pluginID2 PluginID + }{ + pluginID1: MustPluginID("Test0#1.0.0"), + pluginID2: MustPluginID("Test1#1.0.0"), + }, + expected: false, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.pluginID1.Equal(tc.input.pluginID2)) + assert.Equal(tt, tc.expected, tc.input.pluginID2.Equal(tc.input.pluginID1)) + }) + } + +} + +func TestPluginID_MarshalText(t *testing.T) { + pluginIdRef := MustPluginID("Test#1.0.0").Ref() + + res, err := pluginIdRef.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte("Test#1.0.0"), res) +} + +func TestPluginID_UnmarshalText(t *testing.T) { + text := []byte("Test#1.0.0") + + pluginId := &PluginID{} + + err := pluginId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "Test#1.0.0", pluginId.String()) + +} + +func TestPluginIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []PluginID + expected []string + }{ + { + name: "Empty slice", + input: make([]PluginID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []PluginID{MustPluginID("Test#1.0.0")}, + expected: []string{"Test#1.0.0"}, + }, + { + name: "multiple elements", + input: []PluginID{ + MustPluginID("Test#1.0.0"), + MustPluginID("Test#1.0.1"), + MustPluginID("Test#1.0.2"), + }, + expected: []string{ + "Test#1.0.0", + "Test#1.0.1", + "Test#1.0.2", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, PluginIDToKeys(tc.input)) + }) + } + +} + +func TestPluginIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []PluginID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []PluginID + err error + }{ + res: make([]PluginID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"Test#1.0.0"}, + expected: struct { + res []PluginID + err error + }{ + res: []PluginID{MustPluginID("Test#1.0.0")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "Test#1.0.0", + "Test#1.0.1", + "Test#1.0.2", + }, + expected: struct { + res []PluginID + err error + }{ + res: []PluginID{ + MustPluginID("Test#1.0.0"), + MustPluginID("Test#1.0.1"), + MustPluginID("Test#1.0.2"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "Test#1.0.0", + "Test#1.0.1", + "Test#1.0.2", + }, + expected: struct { + res []PluginID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := PluginIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := PluginIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } } diff --git a/pkg/id/project_gen_test.go b/pkg/id/project_gen_test.go new file mode 100644 index 000000000..2a7a07fb6 --- /dev/null +++ b/pkg/id/project_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewProjectID(t *testing.T) { + id := NewProjectID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestProjectIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result ProjectID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result ProjectID + err error + }{ + ProjectID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result ProjectID + err error + }{ + ProjectID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result ProjectID + err error + }{ + ProjectID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := ProjectIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustProjectID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected ProjectID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: ProjectID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustProjectID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestProjectIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *ProjectID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &ProjectID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := ProjectIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestProjectIDFromRefID(t *testing.T) { + id := New() + + subId := ProjectIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestProjectID_ID(t *testing.T) { + id := New() + subId := ProjectIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestProjectID_String(t *testing.T) { + id := New() + subId := ProjectIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestProjectID_GoString(t *testing.T) { + id := New() + subId := ProjectIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.ProjectID("+id.String()+")") +} + +func TestProjectID_RefString(t *testing.T) { + id := New() + subId := ProjectIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestProjectID_Ref(t *testing.T) { + id := New() + subId := ProjectIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestProjectID_CopyRef(t *testing.T) { + id := New() + subId := ProjectIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestProjectID_IDRef(t *testing.T) { + id := New() + subId := ProjectIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestProjectID_StringRef(t *testing.T) { + id := New() + subId := ProjectIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestProjectID_MarhsalJSON(t *testing.T) { + id := New() + subId := ProjectIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestProjectID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &ProjectID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestProjectID_MarshalText(t *testing.T) { + id := New() + subId := ProjectIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestProjectID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &ProjectID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestProjectID_IsNil(t *testing.T) { + subId := ProjectID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *ProjectIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestProjectIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ProjectID + expected []string + }{ + { + name: "Empty slice", + input: make([]ProjectID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, ProjectIDToKeys(tc.input)) + }) + } + +} + +func TestProjectIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []ProjectID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []ProjectID + err error + }{ + res: make([]ProjectID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []ProjectID + err error + }{ + res: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []ProjectID + err error + }{ + res: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []ProjectID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := ProjectIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := ProjectIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestProjectIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []ProjectID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]ProjectID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := ProjectIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestProjectIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []ProjectID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]ProjectID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []ProjectID{MustProjectID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []ProjectID{ + MustProjectID(id1.String()), + MustProjectID(id2.String()), + MustProjectID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := ProjectIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestProjectIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []ProjectID + expected []ID + }{ + { + name: "Empty slice", + input: make([]ProjectID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := ProjectIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestProjectIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustProjectID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustProjectID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustProjectID(id3.String()) + + testCases := []struct { + name string + input []*ProjectID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*ProjectID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*ProjectID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*ProjectID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := ProjectIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewProjectIDSet(t *testing.T) { + ProjectIdSet := NewProjectIDSet() + + assert.NotNil(t, ProjectIdSet) + assert.Empty(t, ProjectIdSet.m) + assert.Empty(t, ProjectIdSet.s) +} + +func TestProjectIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []ProjectID + expected *ProjectIDSet + }{ + { + name: "Empty slice", + input: make([]ProjectID, 0), + expected: &ProjectIDSet{ + m: map[ProjectID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &ProjectIDSet{ + m: map[ProjectID]struct{}{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &ProjectIDSet{ + m: map[ProjectID]struct{}{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewProjectIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestProjectIDSet_AddRef(t *testing.T) { + t.Parallel() + + ProjectId := MustProjectID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *ProjectID + expected *ProjectIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &ProjectIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &ProjectId, + expected: &ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewProjectIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestProjectIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + ProjectIDSet + ProjectID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + ProjectIDSet + ProjectID + }{ProjectIDSet: ProjectIDSet{}, ProjectID: MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + ProjectIDSet + ProjectID + }{ProjectIDSet: ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, ProjectID: MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + ProjectIDSet + ProjectID + }{ProjectIDSet: ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, ProjectID: MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.ProjectIDSet.Has(tc.input.ProjectID)) + }) + } +} + +func TestProjectIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input ProjectIDSet + expected ProjectIDSet + }{ + { + name: "Empty Set", + input: ProjectIDSet{}, + expected: ProjectIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: ProjectIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestProjectIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *ProjectIDSet + expected []ProjectID + }{ + { + name: "Empty slice", + input: &ProjectIDSet{ + m: map[ProjectID]struct{}{}, + s: nil, + }, + expected: make([]ProjectID, 0), + }, + { + name: "1 element", + input: &ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &ProjectIDSet{ + m: map[ProjectID]struct{}{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestProjectIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *ProjectIDSet + expected *ProjectIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewProjectIDSet(), + }, + { + name: "Empty set", + input: NewProjectIDSet(), + expected: NewProjectIDSet(), + }, + { + name: "1 element", + input: &ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &ProjectIDSet{ + m: map[ProjectID]struct{}{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &ProjectIDSet{ + m: map[ProjectID]struct{}{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestProjectIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *ProjectIDSet + b *ProjectIDSet + } + expected *ProjectIDSet + }{ + { + name: "Empty Set", + input: struct { + a *ProjectIDSet + b *ProjectIDSet + }{ + a: &ProjectIDSet{}, + b: &ProjectIDSet{}, + }, + expected: &ProjectIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *ProjectIDSet + b *ProjectIDSet + }{ + a: &ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &ProjectIDSet{}, + }, + expected: &ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *ProjectIDSet + b *ProjectIDSet + }{ + a: &ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &ProjectIDSet{ + m: map[ProjectID]struct{}{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []ProjectID{ + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/property_gen_test.go b/pkg/id/property_gen_test.go new file mode 100644 index 000000000..a3fa5bf00 --- /dev/null +++ b/pkg/id/property_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewPropertyID(t *testing.T) { + id := NewPropertyID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestPropertyIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result PropertyID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result PropertyID + err error + }{ + PropertyID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result PropertyID + err error + }{ + PropertyID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result PropertyID + err error + }{ + PropertyID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := PropertyIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustPropertyID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected PropertyID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: PropertyID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustPropertyID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestPropertyIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *PropertyID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &PropertyID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := PropertyIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestPropertyIDFromRefID(t *testing.T) { + id := New() + + subId := PropertyIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestPropertyID_ID(t *testing.T) { + id := New() + subId := PropertyIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestPropertyID_String(t *testing.T) { + id := New() + subId := PropertyIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestPropertyID_GoString(t *testing.T) { + id := New() + subId := PropertyIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.PropertyID("+id.String()+")") +} + +func TestPropertyID_RefString(t *testing.T) { + id := New() + subId := PropertyIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestPropertyID_Ref(t *testing.T) { + id := New() + subId := PropertyIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestPropertyID_CopyRef(t *testing.T) { + id := New() + subId := PropertyIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.False(t, subId == subIdCopyRef) +} + +func TestPropertyID_IDRef(t *testing.T) { + id := New() + subId := PropertyIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestPropertyID_StringRef(t *testing.T) { + id := New() + subId := PropertyIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestPropertyID_MarhsalJSON(t *testing.T) { + id := New() + subId := PropertyIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestPropertyID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &PropertyID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestPropertyID_MarshalText(t *testing.T) { + id := New() + subId := PropertyIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestPropertyID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &PropertyID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestPropertyID_IsNil(t *testing.T) { + subId := PropertyID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *PropertyIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestPropertyIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []PropertyID + expected []string + }{ + { + name: "Empty slice", + input: make([]PropertyID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, PropertyIDToKeys(tc.input)) + }) + } + +} + +func TestPropertyIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []PropertyID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []PropertyID + err error + }{ + res: make([]PropertyID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []PropertyID + err error + }{ + res: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []PropertyID + err error + }{ + res: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []PropertyID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := PropertyIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := PropertyIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestPropertyIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []PropertyID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]PropertyID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := PropertyIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestPropertyIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []PropertyID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]PropertyID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []PropertyID{MustPropertyID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []PropertyID{ + MustPropertyID(id1.String()), + MustPropertyID(id2.String()), + MustPropertyID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := PropertyIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestPropertyIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []PropertyID + expected []ID + }{ + { + name: "Empty slice", + input: make([]PropertyID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := PropertyIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestPropertyIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustPropertyID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustPropertyID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustPropertyID(id3.String()) + + testCases := []struct { + name string + input []*PropertyID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*PropertyID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*PropertyID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*PropertyID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := PropertyIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewPropertyIDSet(t *testing.T) { + PropertyIdSet := NewPropertyIDSet() + + assert.NotNil(t, PropertyIdSet) + assert.Empty(t, PropertyIdSet.m) + assert.Empty(t, PropertyIdSet.s) +} + +func TestPropertyIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []PropertyID + expected *PropertyIDSet + }{ + { + name: "Empty slice", + input: make([]PropertyID, 0), + expected: &PropertyIDSet{ + m: map[PropertyID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &PropertyIDSet{ + m: map[PropertyID]struct{}{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &PropertyIDSet{ + m: map[PropertyID]struct{}{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewPropertyIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestPropertyIDSet_AddRef(t *testing.T) { + t.Parallel() + + PropertyId := MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *PropertyID + expected *PropertyIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &PropertyIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &PropertyId, + expected: &PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewPropertyIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestPropertyIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + PropertyIDSet + PropertyID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + PropertyIDSet + PropertyID + }{PropertyIDSet: PropertyIDSet{}, PropertyID: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + PropertyIDSet + PropertyID + }{PropertyIDSet: PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, PropertyID: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + PropertyIDSet + PropertyID + }{PropertyIDSet: PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, PropertyID: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.PropertyIDSet.Has(tc.input.PropertyID)) + }) + } +} + +func TestPropertyIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input PropertyIDSet + expected PropertyIDSet + }{ + { + name: "Empty Set", + input: PropertyIDSet{}, + expected: PropertyIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: PropertyIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestPropertyIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *PropertyIDSet + expected []PropertyID + }{ + { + name: "Empty slice", + input: &PropertyIDSet{ + m: map[PropertyID]struct{}{}, + s: nil, + }, + expected: make([]PropertyID, 0), + }, + { + name: "1 element", + input: &PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &PropertyIDSet{ + m: map[PropertyID]struct{}{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestPropertyIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *PropertyIDSet + expected *PropertyIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewPropertyIDSet(), + }, + { + name: "Empty set", + input: NewPropertyIDSet(), + expected: NewPropertyIDSet(), + }, + { + name: "1 element", + input: &PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &PropertyIDSet{ + m: map[PropertyID]struct{}{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &PropertyIDSet{ + m: map[PropertyID]struct{}{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestPropertyIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *PropertyIDSet + b *PropertyIDSet + } + expected *PropertyIDSet + }{ + { + name: "Empty Set", + input: struct { + a *PropertyIDSet + b *PropertyIDSet + }{ + a: &PropertyIDSet{}, + b: &PropertyIDSet{}, + }, + expected: &PropertyIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *PropertyIDSet + b *PropertyIDSet + }{ + a: &PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &PropertyIDSet{}, + }, + expected: &PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *PropertyIDSet + b *PropertyIDSet + }{ + a: &PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &PropertyIDSet{ + m: map[PropertyID]struct{}{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []PropertyID{ + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/property_item_gen_test.go b/pkg/id/property_item_gen_test.go new file mode 100644 index 000000000..96e0617e2 --- /dev/null +++ b/pkg/id/property_item_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewPropertyItemID(t *testing.T) { + id := NewPropertyItemID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestPropertyItemIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result PropertyItemID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result PropertyItemID + err error + }{ + PropertyItemID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result PropertyItemID + err error + }{ + PropertyItemID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result PropertyItemID + err error + }{ + PropertyItemID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := PropertyItemIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustPropertyItemID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected PropertyItemID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: PropertyItemID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustPropertyItemID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestPropertyItemIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *PropertyItemID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &PropertyItemID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := PropertyItemIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestPropertyItemIDFromRefID(t *testing.T) { + id := New() + + subId := PropertyItemIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestPropertyItemID_ID(t *testing.T) { + id := New() + subId := PropertyItemIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestPropertyItemID_String(t *testing.T) { + id := New() + subId := PropertyItemIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestPropertyItemID_GoString(t *testing.T) { + id := New() + subId := PropertyItemIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.PropertyItemID("+id.String()+")") +} + +func TestPropertyItemID_RefString(t *testing.T) { + id := New() + subId := PropertyItemIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestPropertyItemID_Ref(t *testing.T) { + id := New() + subId := PropertyItemIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestPropertyItemID_CopyRef(t *testing.T) { + id := New() + subId := PropertyItemIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestPropertyItemID_IDRef(t *testing.T) { + id := New() + subId := PropertyItemIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestPropertyItemID_StringRef(t *testing.T) { + id := New() + subId := PropertyItemIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestPropertyItemID_MarhsalJSON(t *testing.T) { + id := New() + subId := PropertyItemIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestPropertyItemID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &PropertyItemID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestPropertyItemID_MarshalText(t *testing.T) { + id := New() + subId := PropertyItemIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestPropertyItemID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &PropertyItemID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestPropertyItemID_IsNil(t *testing.T) { + subId := PropertyItemID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *PropertyItemIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestPropertyItemIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []PropertyItemID + expected []string + }{ + { + name: "Empty slice", + input: make([]PropertyItemID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, PropertyItemIDToKeys(tc.input)) + }) + } + +} + +func TestPropertyItemIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []PropertyItemID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []PropertyItemID + err error + }{ + res: make([]PropertyItemID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []PropertyItemID + err error + }{ + res: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []PropertyItemID + err error + }{ + res: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []PropertyItemID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := PropertyItemIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := PropertyItemIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestPropertyItemIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []PropertyItemID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]PropertyItemID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := PropertyItemIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestPropertyItemIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []PropertyItemID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]PropertyItemID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []PropertyItemID{MustPropertyItemID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []PropertyItemID{ + MustPropertyItemID(id1.String()), + MustPropertyItemID(id2.String()), + MustPropertyItemID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := PropertyItemIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestPropertyItemIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []PropertyItemID + expected []ID + }{ + { + name: "Empty slice", + input: make([]PropertyItemID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := PropertyItemIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestPropertyItemIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustPropertyItemID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustPropertyItemID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustPropertyItemID(id3.String()) + + testCases := []struct { + name string + input []*PropertyItemID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*PropertyItemID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*PropertyItemID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*PropertyItemID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := PropertyItemIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewPropertyItemIDSet(t *testing.T) { + PropertyItemIdSet := NewPropertyItemIDSet() + + assert.NotNil(t, PropertyItemIdSet) + assert.Empty(t, PropertyItemIdSet.m) + assert.Empty(t, PropertyItemIdSet.s) +} + +func TestPropertyItemIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []PropertyItemID + expected *PropertyItemIDSet + }{ + { + name: "Empty slice", + input: make([]PropertyItemID, 0), + expected: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewPropertyItemIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestPropertyItemIDSet_AddRef(t *testing.T) { + t.Parallel() + + PropertyItemId := MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *PropertyItemID + expected *PropertyItemIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &PropertyItemIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &PropertyItemId, + expected: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewPropertyItemIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestPropertyItemIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + PropertyItemIDSet + PropertyItemID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + PropertyItemIDSet + PropertyItemID + }{PropertyItemIDSet: PropertyItemIDSet{}, PropertyItemID: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + PropertyItemIDSet + PropertyItemID + }{PropertyItemIDSet: PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, PropertyItemID: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + PropertyItemIDSet + PropertyItemID + }{PropertyItemIDSet: PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, PropertyItemID: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.PropertyItemIDSet.Has(tc.input.PropertyItemID)) + }) + } +} + +func TestPropertyItemIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input PropertyItemIDSet + expected PropertyItemIDSet + }{ + { + name: "Empty Set", + input: PropertyItemIDSet{}, + expected: PropertyItemIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: PropertyItemIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestPropertyItemIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *PropertyItemIDSet + expected []PropertyItemID + }{ + { + name: "Empty slice", + input: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{}, + s: nil, + }, + expected: make([]PropertyItemID, 0), + }, + { + name: "1 element", + input: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestPropertyItemIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *PropertyItemIDSet + expected *PropertyItemIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewPropertyItemIDSet(), + }, + { + name: "Empty set", + input: NewPropertyItemIDSet(), + expected: NewPropertyItemIDSet(), + }, + { + name: "1 element", + input: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestPropertyItemIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *PropertyItemIDSet + b *PropertyItemIDSet + } + expected *PropertyItemIDSet + }{ + { + name: "Empty Set", + input: struct { + a *PropertyItemIDSet + b *PropertyItemIDSet + }{ + a: &PropertyItemIDSet{}, + b: &PropertyItemIDSet{}, + }, + expected: &PropertyItemIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *PropertyItemIDSet + b *PropertyItemIDSet + }{ + a: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &PropertyItemIDSet{}, + }, + expected: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *PropertyItemIDSet + b *PropertyItemIDSet + }{ + a: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []PropertyItemID{ + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/property_schema_field_test.go b/pkg/id/property_schema_field_test.go new file mode 100644 index 000000000..0f9850777 --- /dev/null +++ b/pkg/id/property_schema_field_test.go @@ -0,0 +1,63 @@ +package id + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestPropertySchemaFieldIDFrom(t *testing.T) { + t.Parallel() + input1 := "testStringId" + expected1 := PropertySchemaFieldID(input1) + testCases := []struct { + name string + input *string + expected *PropertySchemaFieldID + }{ + { + name: "success:string input", + input: &input1, + expected: &expected1, + }, + { + name: "fail:nil pointer", + input: nil, + expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := PropertySchemaFieldIDFrom(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestPropertySchemaFieldID_Ref(t *testing.T) { + propertySchemaFieldID := PropertySchemaFieldID("test") + + assert.Equal(t, &propertySchemaFieldID, propertySchemaFieldID.Ref()) +} + +func TestPropertySchemaFieldID_CopyRef(t *testing.T) { + propertySchemaFieldID := PropertySchemaFieldID("test") + + assert.Equal(t, propertySchemaFieldID, *propertySchemaFieldID.CopyRef()) + + assert.False(t, propertySchemaFieldID.Ref() == propertySchemaFieldID.CopyRef()) +} + +func TestPropertySchemaFieldID_String(t *testing.T) { + propertySchemaFieldID := PropertySchemaFieldID("test") + + assert.Equal(t, "test", propertySchemaFieldID.String()) +} + +func TestPropertySchemaFieldID_StringRef(t *testing.T) { + propertySchemaFieldID := PropertySchemaFieldID("test") + + assert.Equal(t, "test", *propertySchemaFieldID.StringRef()) +} diff --git a/pkg/id/property_schema_test.go b/pkg/id/property_schema_test.go index b8dc2eb56..f40f881dc 100644 --- a/pkg/id/property_schema_test.go +++ b/pkg/id/property_schema_test.go @@ -2,7 +2,430 @@ package id import ( "encoding" + "errors" + "testing" + + "github.com/stretchr/testify/assert" ) var _ encoding.TextMarshaler = (*PropertySchemaID)(nil) var _ encoding.TextUnmarshaler = (*PropertySchemaID)(nil) + +func TestPropertySchemaIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result PropertySchemaID + err error + } + }{ + { + name: "success:valid name", + input: "test/Test_Test-01", + expected: struct { + result PropertySchemaID + err error + }{ + result: PropertySchemaID{ + plugin: "test", + id: "Test_Test-01", + }, + err: nil, + }, + }, + { + name: "fail:invalid name", + input: "Test", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + { + name: "fail:invalid name", + input: "Test/+dsad", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + { + name: "fail:invalid name", + input: "Test/dsa d", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := PropertySchemaIDFrom(tc.input) + if tc.expected.err != nil { + assert.Equal(tt, tc.expected.result, result) + assert.True(tt, errors.As(tc.expected.err, &err)) + } else { + assert.Equal(tt, tc.expected.result, result) + assert.Nil(tt, err) + } + + }) + } +} + +func TestPropertySchemaIDFromExtension(t *testing.T) { + pluginID := MustPluginID("test#2.0.0") + pluginExtensionID := PluginExtensionID("test2") + + propertySchemaID, err := PropertySchemaIDFromExtension(pluginID, pluginExtensionID) + + assert.NotNil(t, propertySchemaID) + assert.Equal(t, PropertySchemaID{ + plugin: "test#2.0.0", + id: "test2", + }, propertySchemaID) + assert.Nil(t, err) +} + +func TestMustPropertySchemaID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result PropertySchemaID + err error + } + }{ + { + name: "success:valid name", + input: "test/Test_Test-01", + expected: struct { + result PropertySchemaID + err error + }{ + result: PropertySchemaID{ + plugin: "test", + id: "Test_Test-01", + }, + err: nil, + }, + }, + { + name: "fail:invalid name", + input: "Test", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + { + name: "fail:invalid name", + input: "Test/+dsad", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + { + name: "fail:invalid name", + input: "Test/dsa d", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + assert.Panics(tt, func() { + _ = MustPropertySchemaID(tc.input) + }) + } else { + result := MustPropertySchemaID(tc.input) + assert.Equal(tt, tc.expected.result, result) + } + + }) + } +} + +func TestMustPropertySchemaIDFromExtension(t *testing.T) { + pluginID := MustPluginID("test#2.0.0") + pluginExtensionID := PluginExtensionID("test2") + + propertySchemaID := MustPropertySchemaIDFromExtension(pluginID, pluginExtensionID) + + assert.NotNil(t, propertySchemaID) + assert.Equal(t, PropertySchemaID{ + plugin: "test#2.0.0", + id: "test2", + }, propertySchemaID) +} + +func TestPropertySchemaIDFromRef(t *testing.T) { + t.Parallel() + input1 := "test/Test_Test-01" + input2 := "Test" + input3 := "Test/+dsad" + input4 := "Test/dsa d" + testCases := []struct { + name string + input *string + expected *PropertySchemaID + }{ + { + name: "success:valid name", + input: &input1, + expected: &PropertySchemaID{ + plugin: "test", + id: "Test_Test-01", + }, + }, + { + name: "fail:invalid name", + input: &input2, + expected: nil, + }, + { + name: "fail:invalid name", + input: &input3, + expected: nil, + }, + { + name: "fail:invalid name", + input: &input4, + expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + result := PropertySchemaIDFromRef(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestPropertySchemaID_ID(t *testing.T) { + propertySchemaID := MustPropertySchemaID("Test#2.0.0/test") + + assert.Equal(t, propertySchemaID.ID(), "test") +} + +func TestPropertySchemaID_Plugin(t *testing.T) { + propertySchemaID := MustPropertySchemaID("Test#2.0.0/test") + + assert.Equal(t, propertySchemaID.Plugin(), "Test#2.0.0") +} + +func TestPropertySchemaID_System(t *testing.T) { + propertySchemaID := MustPropertySchemaID("Test#2.0.0/test") + + assert.False(t, propertySchemaID.System()) + + extinctionName := schemaSystemIDPrefix + propertySchemaID = MustPropertySchemaIDFromExtension(MustPluginID("test#2.0.0"), *PluginExtensionIDFromRef(&extinctionName)) + + assert.True(t, propertySchemaID.System()) + + propertySchemaID = MustPropertySchemaID("Test#2.0.0/" + schemaSystemIDPrefix) + + assert.True(t, propertySchemaID.System()) +} + +func TestPropertySchemaID_String(t *testing.T) { + propertySchemaID := MustPropertySchemaID("Test#2.0.0/test") + + assert.Equal(t, propertySchemaID.String(), "Test#2.0.0/test") +} + +func TestPropertySchemaID_Ref(t *testing.T) { + propertySchemaID, _ := PropertySchemaIDFrom("test#2.0.0/test") + + assert.Equal(t, &propertySchemaID, propertySchemaID.Ref()) +} + +func TestPropertySchemaID_CopyRef(t *testing.T) { + propertySchemaID, _ := PropertySchemaIDFrom("test#2.0.0/test") + + assert.Equal(t, propertySchemaID, *propertySchemaID.CopyRef()) + + assert.False(t, propertySchemaID.Ref() == propertySchemaID.CopyRef()) +} + +func TestPropertySchemaID_IsNil(t *testing.T) { + propertySchemaID, _ := PropertySchemaIDFrom("test#2.0.0/test") + + assert.False(t, propertySchemaID.IsNil()) + + propertySchemaID = PropertySchemaID{} + + assert.True(t, propertySchemaID.IsNil()) +} + +func TestPropertySchemaID_StringRef(t *testing.T) { + propertySchemaID, _ := PropertySchemaIDFrom("test#2.0.0/test") + + ref := &propertySchemaID + + assert.Equal(t, *ref.StringRef(), ref.String()) +} + +func TestPropertySchemaID_MarshalText(t *testing.T) { + propertySchemaID, _ := PropertySchemaIDFrom("test#2.0.0/test") + + res, err := propertySchemaID.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte("test#2.0.0/test"), res) +} + +func TestPropertySchemaID_UnmarshalText(t *testing.T) { + text := []byte("test#2.0.0/test") + + propertySchemaID := &PropertySchemaID{} + + err := propertySchemaID.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "test#2.0.0/test", propertySchemaID.String()) +} + +func TestPropertySchemaIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []PropertySchemaID + expected []string + }{ + { + name: "Empty slice", + input: make([]PropertySchemaID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []PropertySchemaID{MustPropertySchemaID("test#2.0.0/test")}, + expected: []string{"test#2.0.0/test"}, + }, + { + name: "multiple elements", + input: []PropertySchemaID{ + MustPropertySchemaID("Test#1.0.0/test"), + MustPropertySchemaID("Test#1.0.1/test"), + MustPropertySchemaID("Test#1.0.2/test"), + }, + expected: []string{ + "Test#1.0.0/test", + "Test#1.0.1/test", + "Test#1.0.2/test", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, PropertySchemaIDToKeys(tc.input)) + }) + } + +} + +func TestPropertySchemaIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []PropertySchemaID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []PropertySchemaID + err error + }{ + res: make([]PropertySchemaID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"Test#1.0.0/test"}, + expected: struct { + res []PropertySchemaID + err error + }{ + res: []PropertySchemaID{MustPropertySchemaID("Test#1.0.0/test")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "Test#1.0.0/test", + "Test#1.0.1/test", + "Test#1.0.2/test", + }, + expected: struct { + res []PropertySchemaID + err error + }{ + res: []PropertySchemaID{ + MustPropertySchemaID("Test#1.0.0/test"), + MustPropertySchemaID("Test#1.0.1/test"), + MustPropertySchemaID("Test#1.0.2/test"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "Test#1.0.0/test", + "Test#1.0.1/test", + "Test#1.0.2/test", + }, + expected: struct { + res []PropertySchemaID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := PropertySchemaIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := PropertySchemaIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} diff --git a/pkg/id/scene_gen_test.go b/pkg/id/scene_gen_test.go new file mode 100644 index 000000000..d8b3928c7 --- /dev/null +++ b/pkg/id/scene_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewSceneID(t *testing.T) { + id := NewSceneID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestSceneIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result SceneID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result SceneID + err error + }{ + SceneID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result SceneID + err error + }{ + SceneID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result SceneID + err error + }{ + SceneID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := SceneIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustSceneID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected SceneID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: SceneID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustSceneID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestSceneIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *SceneID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &SceneID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := SceneIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestSceneIDFromRefID(t *testing.T) { + id := New() + + subId := SceneIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestSceneID_ID(t *testing.T) { + id := New() + subId := SceneIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestSceneID_String(t *testing.T) { + id := New() + subId := SceneIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestSceneID_GoString(t *testing.T) { + id := New() + subId := SceneIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.SceneID("+id.String()+")") +} + +func TestSceneID_RefString(t *testing.T) { + id := New() + subId := SceneIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestSceneID_Ref(t *testing.T) { + id := New() + subId := SceneIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestSceneID_CopyRef(t *testing.T) { + id := New() + subId := SceneIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestSceneID_IDRef(t *testing.T) { + id := New() + subId := SceneIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestSceneID_StringRef(t *testing.T) { + id := New() + subId := SceneIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestSceneID_MarhsalJSON(t *testing.T) { + id := New() + subId := SceneIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestSceneID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &SceneID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestSceneID_MarshalText(t *testing.T) { + id := New() + subId := SceneIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestSceneID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &SceneID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestSceneID_IsNil(t *testing.T) { + subId := SceneID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *SceneIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestSceneIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []SceneID + expected []string + }{ + { + name: "Empty slice", + input: make([]SceneID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, SceneIDToKeys(tc.input)) + }) + } + +} + +func TestSceneIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []SceneID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []SceneID + err error + }{ + res: make([]SceneID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []SceneID + err error + }{ + res: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []SceneID + err error + }{ + res: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []SceneID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := SceneIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := SceneIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestSceneIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []SceneID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]SceneID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := SceneIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestSceneIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []SceneID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]SceneID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []SceneID{MustSceneID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []SceneID{ + MustSceneID(id1.String()), + MustSceneID(id2.String()), + MustSceneID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := SceneIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestSceneIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []SceneID + expected []ID + }{ + { + name: "Empty slice", + input: make([]SceneID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := SceneIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestSceneIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustSceneID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustSceneID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustSceneID(id3.String()) + + testCases := []struct { + name string + input []*SceneID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*SceneID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*SceneID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*SceneID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := SceneIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewSceneIDSet(t *testing.T) { + SceneIdSet := NewSceneIDSet() + + assert.NotNil(t, SceneIdSet) + assert.Empty(t, SceneIdSet.m) + assert.Empty(t, SceneIdSet.s) +} + +func TestSceneIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []SceneID + expected *SceneIDSet + }{ + { + name: "Empty slice", + input: make([]SceneID, 0), + expected: &SceneIDSet{ + m: map[SceneID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &SceneIDSet{ + m: map[SceneID]struct{}{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &SceneIDSet{ + m: map[SceneID]struct{}{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewSceneIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestSceneIDSet_AddRef(t *testing.T) { + t.Parallel() + + SceneId := MustSceneID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *SceneID + expected *SceneIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &SceneIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &SceneId, + expected: &SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewSceneIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestSceneIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + SceneIDSet + SceneID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + SceneIDSet + SceneID + }{SceneIDSet: SceneIDSet{}, SceneID: MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + SceneIDSet + SceneID + }{SceneIDSet: SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, SceneID: MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + SceneIDSet + SceneID + }{SceneIDSet: SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, SceneID: MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.SceneIDSet.Has(tc.input.SceneID)) + }) + } +} + +func TestSceneIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input SceneIDSet + expected SceneIDSet + }{ + { + name: "Empty Set", + input: SceneIDSet{}, + expected: SceneIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: SceneIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestSceneIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *SceneIDSet + expected []SceneID + }{ + { + name: "Empty slice", + input: &SceneIDSet{ + m: map[SceneID]struct{}{}, + s: nil, + }, + expected: make([]SceneID, 0), + }, + { + name: "1 element", + input: &SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &SceneIDSet{ + m: map[SceneID]struct{}{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestSceneIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *SceneIDSet + expected *SceneIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewSceneIDSet(), + }, + { + name: "Empty set", + input: NewSceneIDSet(), + expected: NewSceneIDSet(), + }, + { + name: "1 element", + input: &SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &SceneIDSet{ + m: map[SceneID]struct{}{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &SceneIDSet{ + m: map[SceneID]struct{}{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestSceneIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *SceneIDSet + b *SceneIDSet + } + expected *SceneIDSet + }{ + { + name: "Empty Set", + input: struct { + a *SceneIDSet + b *SceneIDSet + }{ + a: &SceneIDSet{}, + b: &SceneIDSet{}, + }, + expected: &SceneIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *SceneIDSet + b *SceneIDSet + }{ + a: &SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &SceneIDSet{}, + }, + expected: &SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *SceneIDSet + b *SceneIDSet + }{ + a: &SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &SceneIDSet{ + m: map[SceneID]struct{}{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []SceneID{ + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/team_gen_test.go b/pkg/id/team_gen_test.go new file mode 100644 index 000000000..80ca77e71 --- /dev/null +++ b/pkg/id/team_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewTeamID(t *testing.T) { + id := NewTeamID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestTeamIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result TeamID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result TeamID + err error + }{ + TeamID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result TeamID + err error + }{ + TeamID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result TeamID + err error + }{ + TeamID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := TeamIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustTeamID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected TeamID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: TeamID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustTeamID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestTeamIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *TeamID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &TeamID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := TeamIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestTeamIDFromRefID(t *testing.T) { + id := New() + + subId := TeamIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestTeamID_ID(t *testing.T) { + id := New() + subId := TeamIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestTeamID_String(t *testing.T) { + id := New() + subId := TeamIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestTeamID_GoString(t *testing.T) { + id := New() + subId := TeamIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.TeamID("+id.String()+")") +} + +func TestTeamID_RefString(t *testing.T) { + id := New() + subId := TeamIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestTeamID_Ref(t *testing.T) { + id := New() + subId := TeamIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestTeamID_CopyRef(t *testing.T) { + id := New() + subId := TeamIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestTeamID_IDRef(t *testing.T) { + id := New() + subId := TeamIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestTeamID_StringRef(t *testing.T) { + id := New() + subId := TeamIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestTeamID_MarhsalJSON(t *testing.T) { + id := New() + subId := TeamIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestTeamID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &TeamID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestTeamID_MarshalText(t *testing.T) { + id := New() + subId := TeamIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestTeamID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &TeamID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestTeamID_IsNil(t *testing.T) { + subId := TeamID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *TeamIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestTeamIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []TeamID + expected []string + }{ + { + name: "Empty slice", + input: make([]TeamID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, TeamIDToKeys(tc.input)) + }) + } + +} + +func TestTeamIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []TeamID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []TeamID + err error + }{ + res: make([]TeamID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []TeamID + err error + }{ + res: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []TeamID + err error + }{ + res: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []TeamID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := TeamIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := TeamIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestTeamIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []TeamID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]TeamID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := TeamIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestTeamIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []TeamID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]TeamID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []TeamID{MustTeamID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []TeamID{ + MustTeamID(id1.String()), + MustTeamID(id2.String()), + MustTeamID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := TeamIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestTeamIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []TeamID + expected []ID + }{ + { + name: "Empty slice", + input: make([]TeamID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := TeamIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestTeamIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustTeamID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustTeamID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustTeamID(id3.String()) + + testCases := []struct { + name string + input []*TeamID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*TeamID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*TeamID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*TeamID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := TeamIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewTeamIDSet(t *testing.T) { + TeamIdSet := NewTeamIDSet() + + assert.NotNil(t, TeamIdSet) + assert.Empty(t, TeamIdSet.m) + assert.Empty(t, TeamIdSet.s) +} + +func TestTeamIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []TeamID + expected *TeamIDSet + }{ + { + name: "Empty slice", + input: make([]TeamID, 0), + expected: &TeamIDSet{ + m: map[TeamID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &TeamIDSet{ + m: map[TeamID]struct{}{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &TeamIDSet{ + m: map[TeamID]struct{}{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewTeamIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestTeamIDSet_AddRef(t *testing.T) { + t.Parallel() + + TeamId := MustTeamID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *TeamID + expected *TeamIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &TeamIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &TeamId, + expected: &TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewTeamIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestTeamIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + TeamIDSet + TeamID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + TeamIDSet + TeamID + }{TeamIDSet: TeamIDSet{}, TeamID: MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + TeamIDSet + TeamID + }{TeamIDSet: TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, TeamID: MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + TeamIDSet + TeamID + }{TeamIDSet: TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, TeamID: MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.TeamIDSet.Has(tc.input.TeamID)) + }) + } +} + +func TestTeamIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input TeamIDSet + expected TeamIDSet + }{ + { + name: "Empty Set", + input: TeamIDSet{}, + expected: TeamIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: TeamIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestTeamIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *TeamIDSet + expected []TeamID + }{ + { + name: "Empty slice", + input: &TeamIDSet{ + m: map[TeamID]struct{}{}, + s: nil, + }, + expected: make([]TeamID, 0), + }, + { + name: "1 element", + input: &TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &TeamIDSet{ + m: map[TeamID]struct{}{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestTeamIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *TeamIDSet + expected *TeamIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewTeamIDSet(), + }, + { + name: "Empty set", + input: NewTeamIDSet(), + expected: NewTeamIDSet(), + }, + { + name: "1 element", + input: &TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &TeamIDSet{ + m: map[TeamID]struct{}{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &TeamIDSet{ + m: map[TeamID]struct{}{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestTeamIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *TeamIDSet + b *TeamIDSet + } + expected *TeamIDSet + }{ + { + name: "Empty Set", + input: struct { + a *TeamIDSet + b *TeamIDSet + }{ + a: &TeamIDSet{}, + b: &TeamIDSet{}, + }, + expected: &TeamIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *TeamIDSet + b *TeamIDSet + }{ + a: &TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &TeamIDSet{}, + }, + expected: &TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *TeamIDSet + b *TeamIDSet + }{ + a: &TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &TeamIDSet{ + m: map[TeamID]struct{}{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []TeamID{ + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/user_gen_test.go b/pkg/id/user_gen_test.go new file mode 100644 index 000000000..39394b659 --- /dev/null +++ b/pkg/id/user_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewUserID(t *testing.T) { + id := NewUserID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestUserIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result UserID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result UserID + err error + }{ + UserID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result UserID + err error + }{ + UserID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result UserID + err error + }{ + UserID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := UserIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustUserID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected UserID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: UserID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustUserID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestUserIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *UserID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &UserID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := UserIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestUserIDFromRefID(t *testing.T) { + id := New() + + subId := UserIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestUserID_ID(t *testing.T) { + id := New() + subId := UserIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestUserID_String(t *testing.T) { + id := New() + subId := UserIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestUserID_GoString(t *testing.T) { + id := New() + subId := UserIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.UserID("+id.String()+")") +} + +func TestUserID_RefString(t *testing.T) { + id := New() + subId := UserIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestUserID_Ref(t *testing.T) { + id := New() + subId := UserIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestUserID_CopyRef(t *testing.T) { + id := New() + subId := UserIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.False(t, subId == subIdCopyRef) +} + +func TestUserID_IDRef(t *testing.T) { + id := New() + subId := UserIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestUserID_StringRef(t *testing.T) { + id := New() + subId := UserIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestUserID_MarhsalJSON(t *testing.T) { + id := New() + subId := UserIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestUserID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &UserID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestUserID_MarshalText(t *testing.T) { + id := New() + subId := UserIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestUserID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &UserID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestUserID_IsNil(t *testing.T) { + subId := UserID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *UserIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestUserIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []UserID + expected []string + }{ + { + name: "Empty slice", + input: make([]UserID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, UserIDToKeys(tc.input)) + }) + } + +} + +func TestUserIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []UserID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []UserID + err error + }{ + res: make([]UserID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []UserID + err error + }{ + res: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []UserID + err error + }{ + res: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []UserID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := UserIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := UserIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestUserIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []UserID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]UserID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := UserIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestUserIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []UserID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]UserID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []UserID{MustUserID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []UserID{ + MustUserID(id1.String()), + MustUserID(id2.String()), + MustUserID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := UserIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestUserIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []UserID + expected []ID + }{ + { + name: "Empty slice", + input: make([]UserID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := UserIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestUserIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustUserID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustUserID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustUserID(id3.String()) + + testCases := []struct { + name string + input []*UserID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*UserID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*UserID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*UserID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := UserIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewUserIDSet(t *testing.T) { + UserIdSet := NewUserIDSet() + + assert.NotNil(t, UserIdSet) + assert.Empty(t, UserIdSet.m) + assert.Empty(t, UserIdSet.s) +} + +func TestUserIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []UserID + expected *UserIDSet + }{ + { + name: "Empty slice", + input: make([]UserID, 0), + expected: &UserIDSet{ + m: map[UserID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &UserIDSet{ + m: map[UserID]struct{}{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &UserIDSet{ + m: map[UserID]struct{}{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewUserIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestUserIDSet_AddRef(t *testing.T) { + t.Parallel() + + UserId := MustUserID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *UserID + expected *UserIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &UserIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &UserId, + expected: &UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewUserIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestUserIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + UserIDSet + UserID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + UserIDSet + UserID + }{UserIDSet: UserIDSet{}, UserID: MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + UserIDSet + UserID + }{UserIDSet: UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, UserID: MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + UserIDSet + UserID + }{UserIDSet: UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, UserID: MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.UserIDSet.Has(tc.input.UserID)) + }) + } +} + +func TestUserIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input UserIDSet + expected UserIDSet + }{ + { + name: "Empty Set", + input: UserIDSet{}, + expected: UserIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: UserIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestUserIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *UserIDSet + expected []UserID + }{ + { + name: "Empty slice", + input: &UserIDSet{ + m: map[UserID]struct{}{}, + s: nil, + }, + expected: make([]UserID, 0), + }, + { + name: "1 element", + input: &UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &UserIDSet{ + m: map[UserID]struct{}{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestUserIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *UserIDSet + expected *UserIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewUserIDSet(), + }, + { + name: "Empty set", + input: NewUserIDSet(), + expected: NewUserIDSet(), + }, + { + name: "1 element", + input: &UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &UserIDSet{ + m: map[UserID]struct{}{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &UserIDSet{ + m: map[UserID]struct{}{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestUserIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *UserIDSet + b *UserIDSet + } + expected *UserIDSet + }{ + { + name: "Empty Set", + input: struct { + a *UserIDSet + b *UserIDSet + }{ + a: &UserIDSet{}, + b: &UserIDSet{}, + }, + expected: &UserIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *UserIDSet + b *UserIDSet + }{ + a: &UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &UserIDSet{}, + }, + expected: &UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *UserIDSet + b *UserIDSet + }{ + a: &UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &UserIDSet{ + m: map[UserID]struct{}{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []UserID{ + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/widget_gen_test.go b/pkg/id/widget_gen_test.go new file mode 100644 index 000000000..0a7c65f6e --- /dev/null +++ b/pkg/id/widget_gen_test.go @@ -0,0 +1,1004 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewWidgetID(t *testing.T) { + id := NewWidgetID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestWidgetIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result WidgetID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result WidgetID + err error + }{ + WidgetID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result WidgetID + err error + }{ + WidgetID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result WidgetID + err error + }{ + WidgetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := WidgetIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustWidgetID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected WidgetID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: WidgetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustWidgetID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestWidgetIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *WidgetID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &WidgetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := WidgetIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestWidgetIDFromRefID(t *testing.T) { + id := New() + + subId := WidgetIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestWidgetID_ID(t *testing.T) { + id := New() + subId := WidgetIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestWidgetID_String(t *testing.T) { + id := New() + subId := WidgetIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestWidgetID_GoString(t *testing.T) { + id := New() + subId := WidgetIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.WidgetID("+id.String()+")") +} + +func TestWidgetID_RefString(t *testing.T) { + id := New() + subId := WidgetIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestWidgetID_Ref(t *testing.T) { + id := New() + subId := WidgetIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestWidgetID_CopyRef(t *testing.T) { + id := New() + subId := WidgetIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestWidgetID_IDRef(t *testing.T) { + id := New() + subId := WidgetIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestWidgetID_StringRef(t *testing.T) { + id := New() + subId := WidgetIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestWidgetID_MarhsalJSON(t *testing.T) { + id := New() + subId := WidgetIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestWidgetID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &WidgetID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestWidgetID_MarshalText(t *testing.T) { + id := New() + subId := WidgetIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestWidgetID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &WidgetID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestWidgetID_IsNil(t *testing.T) { + subId := WidgetID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *WidgetIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestWidgetIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []WidgetID + expected []string + }{ + { + name: "Empty slice", + input: make([]WidgetID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, WidgetIDToKeys(tc.input)) + }) + } + +} + +func TestWidgetIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []WidgetID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []WidgetID + err error + }{ + res: make([]WidgetID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []WidgetID + err error + }{ + res: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []WidgetID + err error + }{ + res: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []WidgetID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := WidgetIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := WidgetIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestWidgetIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []WidgetID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]WidgetID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := WidgetIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestWidgetIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []WidgetID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]WidgetID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []WidgetID{MustWidgetID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []WidgetID{ + MustWidgetID(id1.String()), + MustWidgetID(id2.String()), + MustWidgetID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := WidgetIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestWidgetIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []WidgetID + expected []ID + }{ + { + name: "Empty slice", + input: make([]WidgetID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := WidgetIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestWidgetIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustWidgetID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustWidgetID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustWidgetID(id3.String()) + + testCases := []struct { + name string + input []*WidgetID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*WidgetID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*WidgetID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*WidgetID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := WidgetIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewWidgetIDSet(t *testing.T) { + WidgetIdSet := NewWidgetIDSet() + + assert.NotNil(t, WidgetIdSet) + assert.Empty(t, WidgetIdSet.m) + assert.Empty(t, WidgetIdSet.s) +} + +func TestWidgetIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []WidgetID + expected *WidgetIDSet + }{ + { + name: "Empty slice", + input: make([]WidgetID, 0), + expected: &WidgetIDSet{ + m: map[WidgetID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &WidgetIDSet{ + m: map[WidgetID]struct{}{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &WidgetIDSet{ + m: map[WidgetID]struct{}{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewWidgetIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestWidgetIDSet_AddRef(t *testing.T) { + t.Parallel() + + WidgetId := MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *WidgetID + expected *WidgetIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &WidgetIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &WidgetId, + expected: &WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewWidgetIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestWidgetIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + WidgetIDSet + WidgetID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + WidgetIDSet + WidgetID + }{WidgetIDSet: WidgetIDSet{}, WidgetID: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + WidgetIDSet + WidgetID + }{WidgetIDSet: WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, WidgetID: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + WidgetIDSet + WidgetID + }{WidgetIDSet: WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, WidgetID: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.WidgetIDSet.Has(tc.input.WidgetID)) + }) + } +} + +func TestWidgetIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input WidgetIDSet + expected WidgetIDSet + }{ + { + name: "Empty Set", + input: WidgetIDSet{}, + expected: WidgetIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: WidgetIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestWidgetIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *WidgetIDSet + expected []WidgetID + }{ + { + name: "Empty slice", + input: &WidgetIDSet{ + m: map[WidgetID]struct{}{}, + s: nil, + }, + expected: make([]WidgetID, 0), + }, + { + name: "1 element", + input: &WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &WidgetIDSet{ + m: map[WidgetID]struct{}{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestWidgetIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *WidgetIDSet + expected *WidgetIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewWidgetIDSet(), + }, + { + name: "Empty set", + input: NewWidgetIDSet(), + expected: NewWidgetIDSet(), + }, + { + name: "1 element", + input: &WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &WidgetIDSet{ + m: map[WidgetID]struct{}{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &WidgetIDSet{ + m: map[WidgetID]struct{}{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + }, + s: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestWidgetIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *WidgetIDSet + b *WidgetIDSet + } + expected *WidgetIDSet + }{ + { + name: "Empty Set", + input: struct { + a *WidgetIDSet + b *WidgetIDSet + }{ + a: &WidgetIDSet{}, + b: &WidgetIDSet{}, + }, + expected: &WidgetIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *WidgetIDSet + b *WidgetIDSet + }{ + a: &WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &WidgetIDSet{}, + }, + expected: &WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *WidgetIDSet + b *WidgetIDSet + }{ + a: &WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &WidgetIDSet{ + m: map[WidgetID]struct{}{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + }, + s: []WidgetID{ + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} From 5bc0894dd865dd9115a63568a00f844bcb4b251f Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 8 Jun 2021 17:58:33 +0900 Subject: [PATCH 033/253] chore: fix godoc workflow --- .github/workflows/godoc.yml | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/.github/workflows/godoc.yml b/.github/workflows/godoc.yml index 4bcf3c2f7..17986cfc2 100644 --- a/.github/workflows/godoc.yml +++ b/.github/workflows/godoc.yml @@ -8,7 +8,7 @@ on: env: MOD: github.com/reearth/reearth-backend REPO: github.com/reearth/reearth-backend - ADDR: '127.0.0.1:6060' + ADDR: 'localhost:6060' jobs: godoc: name: godoc @@ -25,22 +25,28 @@ jobs: - name: checkout uses: actions/checkout@v2 - name: install godoc - run: go install golang.org/x/tools/cmd/godoc + run: go install golang.org/x/tools/cmd/godoc@latest - name: generate docs + id: godoc + continue-on-error: true run: | godoc -http="$ADDR" & sleep 10 wget -r -np -N -E -p -k "http://${ADDR}/pkg/${MOD}/" - name: replace urls run: | - find ./${ADDR}/ -name "*.html" -print0 | xargs -0 sed -i -e "s/http:\/\/${ADDR}\/src\/${MOD}/https:\/\/${REPO}\/blob\/main/" - find ./${ADDR}/ -name "*.html" -print0 | xargs -0 sed -i -e "s/http:\/\/${ADDR}/\/${BASE_PATH}/" + [ `find . -name "*.html" -type f | wc -l` -eq 0 ] && exit 1 + find ./${ADDR}/ -name "*.html" -print0 | xargs -0 sed -i \ + -e "s@http://${ADDR}/src/${MOD}@https://${REPO}/blob/main@" \ + -e "s@\"http://${ADDR}/pkg/\"@\"/${DIR}/pkg/${REPO}/\"@" \ + -e 's@

Date: Tue, 8 Jun 2021 18:15:28 +0900 Subject: [PATCH 034/253] chore: fix godoc workflow --- .github/workflows/godoc.yml | 15 +++++++++++---- go.mod | 1 - 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/.github/workflows/godoc.yml b/.github/workflows/godoc.yml index 17986cfc2..d70bfe693 100644 --- a/.github/workflows/godoc.yml +++ b/.github/workflows/godoc.yml @@ -8,14 +8,14 @@ on: env: MOD: github.com/reearth/reearth-backend REPO: github.com/reearth/reearth-backend - ADDR: 'localhost:6060' + ADDR: 'localhost:8080' jobs: godoc: name: godoc runs-on: ubuntu-latest if: ${{ github.event.workflow_run.conclusion == 'success' }} env: - BASE_PATH: ${{ github.event.workflow_run.head_branch }} + DIR: ${{ github.event.workflow_run.head_branch }} steps: - name: set up uses: actions/setup-go@v2 @@ -24,14 +24,21 @@ jobs: id: go - name: checkout uses: actions/checkout@v2 + - name: cache + uses: actions/cache@v2 + with: + path: ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- + - run: go mod download - name: install godoc run: go install golang.org/x/tools/cmd/godoc@latest - name: generate docs - id: godoc continue-on-error: true run: | godoc -http="$ADDR" & - sleep 10 + sleep 15 wget -r -np -N -E -p -k "http://${ADDR}/pkg/${MOD}/" - name: replace urls run: | diff --git a/go.mod b/go.mod index 0557de0b6..23049d5b8 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,5 @@ module github.com/reearth/reearth-backend -// +heroku install golang.org/x/tools/cmd/godoc require ( cloud.google.com/go v0.80.0 cloud.google.com/go/storage v1.14.0 From 9f8e11dd8a697084afca8d7d9fb00692aa8c22fb Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 8 Jun 2021 18:37:43 +0900 Subject: [PATCH 035/253] chore: fix godoc workflow --- .github/workflows/godoc.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/godoc.yml b/.github/workflows/godoc.yml index d70bfe693..1adacce06 100644 --- a/.github/workflows/godoc.yml +++ b/.github/workflows/godoc.yml @@ -8,6 +8,7 @@ on: env: MOD: github.com/reearth/reearth-backend REPO: github.com/reearth/reearth-backend + REPO_NAME: reearth-backend ADDR: 'localhost:8080' jobs: godoc: @@ -38,14 +39,14 @@ jobs: continue-on-error: true run: | godoc -http="$ADDR" & - sleep 15 + sleep 10 wget -r -np -N -E -p -k "http://${ADDR}/pkg/${MOD}/" - name: replace urls run: | [ `find . -name "*.html" -type f | wc -l` -eq 0 ] && exit 1 find ./${ADDR}/ -name "*.html" -print0 | xargs -0 sed -i \ -e "s@http://${ADDR}/src/${MOD}@https://${REPO}/blob/main@" \ - -e "s@\"http://${ADDR}/pkg/\"@\"/${DIR}/pkg/${REPO}/\"@" \ + -e "s@\"http://${ADDR}/pkg/\"@\"/${REPO_NAME}/${DIR}/pkg/${REPO}/\"@" \ -e 's@ Date: Tue, 8 Jun 2021 18:38:25 +0900 Subject: [PATCH 036/253] chore: fix godoc workflow --- .github/workflows/godoc.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/godoc.yml b/.github/workflows/godoc.yml index 1adacce06..dfe92f983 100644 --- a/.github/workflows/godoc.yml +++ b/.github/workflows/godoc.yml @@ -48,7 +48,7 @@ jobs: -e "s@http://${ADDR}/src/${MOD}@https://${REPO}/blob/main@" \ -e "s@\"http://${ADDR}/pkg/\"@\"/${REPO_NAME}/${DIR}/pkg/${REPO}/\"@" \ -e 's@ Date: Tue, 15 Jun 2021 10:30:23 +0300 Subject: [PATCH 037/253] fix updateme payload (#14) --- internal/adapter/graphql/convert_user.go | 1 + 1 file changed, 1 insertion(+) diff --git a/internal/adapter/graphql/convert_user.go b/internal/adapter/graphql/convert_user.go index 687635002..02c80c8b1 100644 --- a/internal/adapter/graphql/convert_user.go +++ b/internal/adapter/graphql/convert_user.go @@ -23,6 +23,7 @@ func toUser(user *user.User) *User { Name: user.Name(), Email: user.Email(), Lang: user.Lang(), + Theme: Theme(user.Theme()), MyTeamID: user.Team().ID(), Auths: authsgql, } From 5b7a5fda97c831d0361db2362f90e7cc843a231a Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Fri, 18 Jun 2021 14:09:25 +0300 Subject: [PATCH 038/253] feat: create installable plugins (#1) * clone install-plugin branch to oss * test: pkg/shp (#5) * - comment out unused code * - add test data * - reader test * - sequential reader test * - shp type test * - shp test * - writer test * - zip reader test * - add test data * fix PR comments Co-authored-by: YK * test: pkg/shp (#5) * - comment out unused code * - add test data * - reader test * - sequential reader test * - shp type test * - shp test * - writer test * - zip reader test * - add test data * fix PR comments * resolve comments Co-authored-by: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Co-authored-by: YK --- go.mod | 1 + go.sum | 2 + internal/adapter/graphql/controller_plugin.go | 18 ++ internal/adapter/graphql/convert_plugin.go | 12 + internal/adapter/graphql/models_gen.go | 6 + internal/app/repo.go | 5 + internal/graphql/generated.go | 281 ++++++++++++++++++ internal/graphql/resolver_query.go | 7 + internal/infrastructure/github/fetcher.go | 26 ++ .../infrastructure/github/fetcher_test.go | 66 ++++ .../infrastructure/github/plugin_registry.go | 34 +++ .../github/plugin_registry_test.go | 49 +++ internal/usecase/gateway/container.go | 1 + internal/usecase/gateway/plugin_registry.go | 11 + internal/usecase/interactor/plugin.go | 15 + internal/usecase/interfaces/plugin.go | 1 + pkg/plugin/metadata.go | 9 + schema.graphql | 7 + 18 files changed, 551 insertions(+) create mode 100644 internal/infrastructure/github/fetcher.go create mode 100644 internal/infrastructure/github/fetcher_test.go create mode 100644 internal/infrastructure/github/plugin_registry.go create mode 100644 internal/infrastructure/github/plugin_registry_test.go create mode 100644 internal/usecase/gateway/plugin_registry.go create mode 100644 pkg/plugin/metadata.go diff --git a/go.mod b/go.mod index 23049d5b8..57c096f82 100644 --- a/go.mod +++ b/go.mod @@ -16,6 +16,7 @@ require ( github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/iancoleman/strcase v0.1.3 github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d + github.com/jarcoal/httpmock v1.0.8 github.com/joho/godotenv v1.3.0 github.com/jonas-p/go-shp v0.1.1 github.com/kelseyhightower/envconfig v1.4.0 diff --git a/go.sum b/go.sum index 4784ad74b..249bd0339 100644 --- a/go.sum +++ b/go.sum @@ -245,6 +245,8 @@ github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d h1:sQbbvtUo github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d/go.mod h1:xVHEhsiSJJnT0jlcQpQUg+GyoLf0i0xciM1kqWTGT58= github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/jarcoal/httpmock v1.0.8 h1:8kI16SoO6LQKgPE7PvQuV+YuD/inwHd7fOOe2zMbo4k= +github.com/jarcoal/httpmock v1.0.8/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= diff --git a/internal/adapter/graphql/controller_plugin.go b/internal/adapter/graphql/controller_plugin.go index 0578d8db2..3e7de83a3 100644 --- a/internal/adapter/graphql/controller_plugin.go +++ b/internal/adapter/graphql/controller_plugin.go @@ -36,3 +36,21 @@ func (c *PluginController) Upload(ctx context.Context, ginput *UploadPluginInput Plugin: toPlugin(res), }, nil } + +func (c *PluginController) FetchPluginMetadata(ctx context.Context, operator *usecase.Operator) ([]*PluginMetadata, error) { + res, err := c.usecase().FetchPluginMetadata(ctx, operator) + if err != nil { + return nil, err + } + + pluginMetaList := make([]*PluginMetadata, 0, len(res)) + for _, md := range res { + pm, err := toPluginMetadata(md) + if err != nil { + return nil, err + } + pluginMetaList = append(pluginMetaList, pm) + } + + return pluginMetaList, nil +} diff --git a/internal/adapter/graphql/convert_plugin.go b/internal/adapter/graphql/convert_plugin.go index f74c6f79d..2eda39ed1 100644 --- a/internal/adapter/graphql/convert_plugin.go +++ b/internal/adapter/graphql/convert_plugin.go @@ -56,3 +56,15 @@ func toPluginExtensionType(t plugin.ExtensionType) PluginExtensionType { } return PluginExtensionType("") } + +func toPluginMetadata(t *plugin.Metadata) (*PluginMetadata, error) { + if t == nil { + return nil, nil + } + + return &PluginMetadata{ + Name: t.Name, + Description: t.Description, + CreatedAt: t.CreatedAt, + }, nil +} diff --git a/internal/adapter/graphql/models_gen.go b/internal/adapter/graphql/models_gen.go index b8a3223a1..21536f068 100644 --- a/internal/adapter/graphql/models_gen.go +++ b/internal/adapter/graphql/models_gen.go @@ -578,6 +578,12 @@ type PluginExtension struct { TranslatedDescription string `json:"translatedDescription"` } +type PluginMetadata struct { + Name string `json:"name"` + Description string `json:"description"` + CreatedAt time.Time `json:"createdAt"` +} + type Project struct { ID id.ID `json:"id"` IsArchived bool `json:"isArchived"` diff --git a/internal/app/repo.go b/internal/app/repo.go index 1a2d2eeb8..ea1b4897a 100644 --- a/internal/app/repo.go +++ b/internal/app/repo.go @@ -5,6 +5,8 @@ import ( "fmt" "time" + "github.com/reearth/reearth-backend/internal/infrastructure/github" + "github.com/reearth/reearth-backend/internal/infrastructure/adapter" "github.com/reearth/reearth-backend/internal/infrastructure/auth0" "github.com/reearth/reearth-backend/internal/infrastructure/fs" @@ -72,6 +74,9 @@ func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo. // Auth0 gateways.Authenticator = auth0.New(conf.Auth0.Domain, conf.Auth0.ClientID, conf.Auth0.ClientSecret) + // github + gateways.PluginRegistry = github.NewPluginRegistry() + // release lock of all scenes if err := repos.SceneLock.ReleaseAllLock(context.Background()); err != nil { log.Fatalln(fmt.Sprintf("repo initialization error: %+v", err)) diff --git a/internal/graphql/generated.go b/internal/graphql/generated.go index 690af8a7c..01264ec54 100644 --- a/internal/graphql/generated.go +++ b/internal/graphql/generated.go @@ -522,6 +522,12 @@ type ComplexityRoot struct { Visualizer func(childComplexity int) int } + PluginMetadata struct { + CreatedAt func(childComplexity int) int + Description func(childComplexity int) int + Name func(childComplexity int) int + } + Project struct { Alias func(childComplexity int) int BasicAuthPassword func(childComplexity int) int @@ -698,6 +704,7 @@ type ComplexityRoot struct { DatasetSchemas func(childComplexity int, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int Datasets func(childComplexity int, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int DynamicDatasetSchemas func(childComplexity int, sceneID id.ID) int + InstallablePlugins func(childComplexity int) int Layer func(childComplexity int, id id.ID) int Me func(childComplexity int) int Node func(childComplexity int, id id.ID, typeArg graphql1.NodeType) int @@ -1096,6 +1103,7 @@ type QueryResolver interface { DynamicDatasetSchemas(ctx context.Context, sceneID id.ID) ([]*graphql1.DatasetSchema, error) SearchUser(ctx context.Context, nameOrEmail string) (*graphql1.SearchedUser, error) CheckProjectAlias(ctx context.Context, alias string) (*graphql1.CheckProjectAliasPayload, error) + InstallablePlugins(ctx context.Context) ([]*graphql1.PluginMetadata, error) } type SceneResolver interface { Project(ctx context.Context, obj *graphql1.Scene) (*graphql1.Project, error) @@ -3509,6 +3517,27 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PluginExtension.Visualizer(childComplexity), true + case "PluginMetadata.createdAt": + if e.complexity.PluginMetadata.CreatedAt == nil { + break + } + + return e.complexity.PluginMetadata.CreatedAt(childComplexity), true + + case "PluginMetadata.description": + if e.complexity.PluginMetadata.Description == nil { + break + } + + return e.complexity.PluginMetadata.Description(childComplexity), true + + case "PluginMetadata.name": + if e.complexity.PluginMetadata.Name == nil { + break + } + + return e.complexity.PluginMetadata.Name(childComplexity), true + case "Project.alias": if e.complexity.Project.Alias == nil { break @@ -4432,6 +4461,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Query.DynamicDatasetSchemas(childComplexity, args["sceneId"].(id.ID)), true + case "Query.installablePlugins": + if e.complexity.Query.InstallablePlugins == nil { + break + } + + return e.complexity.Query.InstallablePlugins(childComplexity), true + case "Query.layer": if e.complexity.Query.Layer == nil { break @@ -5512,6 +5548,12 @@ type Plugin { propertySchema: PropertySchema @goField(forceResolver: true) } +type PluginMetadata{ + name: String! + description: String! + createdAt: DateTime! +} + enum PluginExtensionType { PRIMITIVE WIDGET @@ -6591,6 +6633,7 @@ type Query { dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! searchUser(nameOrEmail: String!): SearchedUser checkProjectAlias(alias: String!): CheckProjectAliasPayload! + installablePlugins: [PluginMetadata!]! } # Mutation @@ -18581,6 +18624,111 @@ func (ec *executionContext) _PluginExtension_translatedDescription(ctx context.C return ec.marshalNString2string(ctx, field.Selections, res) } +func (ec *executionContext) _PluginMetadata_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginMetadata) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginMetadata", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginMetadata_description(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginMetadata) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginMetadata", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginMetadata_createdAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginMetadata) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginMetadata", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.CreatedAt, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(time.Time) + fc.Result = res + return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) +} + func (ec *executionContext) _Project_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -23315,6 +23463,41 @@ func (ec *executionContext) _Query_checkProjectAlias(ctx context.Context, field return ec.marshalNCheckProjectAliasPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCheckProjectAliasPayload(ctx, field.Selections, res) } +func (ec *executionContext) _Query_installablePlugins(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Query", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().InstallablePlugins(rctx) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*graphql1.PluginMetadata) + fc.Result = res + return ec.marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginMetadataแš„(ctx, field.Selections, res) +} + func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -32672,6 +32855,43 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select return out } +var pluginMetadataImplementors = []string{"PluginMetadata"} + +func (ec *executionContext) _PluginMetadata(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PluginMetadata) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, pluginMetadataImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("PluginMetadata") + case "name": + out.Values[i] = ec._PluginMetadata_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "description": + out.Values[i] = ec._PluginMetadata_description(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "createdAt": + out.Values[i] = ec._PluginMetadata_createdAt(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var projectImplementors = []string{"Project", "Node"} func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Project) graphql.Marshaler { @@ -33957,6 +34177,20 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr } return res }) + case "installablePlugins": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_installablePlugins(ctx, field) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) case "__type": out.Values[i] = ec._Query___type(ctx, field) case "__schema": @@ -36710,6 +36944,53 @@ func (ec *executionContext) marshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearth return res } +func (ec *executionContext) marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginMetadataแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PluginMetadata) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginMetadata(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + return ret +} + +func (ec *executionContext) marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginMetadata(ctx context.Context, sel ast.SelectionSet, v *graphql1.PluginMetadata) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PluginMetadata(ctx, sel, v) +} + func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Project) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup diff --git a/internal/graphql/resolver_query.go b/internal/graphql/resolver_query.go index 8a4d07f33..c293a0a27 100644 --- a/internal/graphql/resolver_query.go +++ b/internal/graphql/resolver_query.go @@ -317,3 +317,10 @@ func (r *queryResolver) CheckProjectAlias(ctx context.Context, alias string) (*g return r.config.Controllers.ProjectController.CheckAlias(ctx, alias) } + +func (r *queryResolver) InstallablePlugins(ctx context.Context) ([]*graphql1.PluginMetadata, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.PluginController.FetchPluginMetadata(ctx, getOperator(ctx)) +} diff --git a/internal/infrastructure/github/fetcher.go b/internal/infrastructure/github/fetcher.go new file mode 100644 index 000000000..01e9b9158 --- /dev/null +++ b/internal/infrastructure/github/fetcher.go @@ -0,0 +1,26 @@ +package github + +import ( + "context" + "fmt" + "io" + "net/http" +) + +func fetchURL(ctx context.Context, url string) (io.ReadCloser, error) { + req, err := http.NewRequestWithContext(ctx, "GET", url, nil) + if err != nil { + return nil, err + } + + res, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + + if res.StatusCode != http.StatusOK { + return nil, fmt.Errorf("StatusCode=%d", res.StatusCode) + } + + return res.Body, nil +} diff --git a/internal/infrastructure/github/fetcher_test.go b/internal/infrastructure/github/fetcher_test.go new file mode 100644 index 000000000..fc3d9073a --- /dev/null +++ b/internal/infrastructure/github/fetcher_test.go @@ -0,0 +1,66 @@ +package github + +import ( + "context" + "errors" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestFetchURL(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + _, err := rw.Write([]byte(`OK`)) + assert.NoError(t, err) + })) + server2 := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + rw.WriteHeader(http.StatusBadRequest) + })) + testCases := []struct { + Name, URL string + Ctx context.Context + ExpectedErr error + }{ + { + Name: "Fail: nil context", + Ctx: nil, + URL: server.URL, + ExpectedErr: errors.New("nil Context"), + }, + { + Name: "Fail: nil unsupported protocol scheme ", + Ctx: context.Background(), + URL: "", + ExpectedErr: errors.New("unsupported protocol scheme"), + }, + { + Name: "Fail: bad request ", + Ctx: context.Background(), + URL: server2.URL, + ExpectedErr: errors.New("StatusCode=400"), + }, + { + Name: "Success", + Ctx: context.Background(), + URL: server.URL, + }, + } + defer func() { + server.Close() + server2.Close() + }() + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + body, err := fetchURL(tc.Ctx, tc.URL) + if err != nil { + assert.True(tt, errors.As(tc.ExpectedErr, &err)) + } else { + assert.NotNil(tt, body) + } + }) + } + +} diff --git a/internal/infrastructure/github/plugin_registry.go b/internal/infrastructure/github/plugin_registry.go new file mode 100644 index 000000000..84a3cbe06 --- /dev/null +++ b/internal/infrastructure/github/plugin_registry.go @@ -0,0 +1,34 @@ +package github + +import ( + "context" + "encoding/json" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/plugin" +) + +type pluginRegistry struct{} + +func NewPluginRegistry() gateway.PluginRegistry { + return &pluginRegistry{} +} + +const source = `https://raw.githubusercontent.com/reearth/plugins/main/plugins.json` + +func (d *pluginRegistry) FetchMetadata(ctx context.Context) ([]*plugin.Metadata, error) { + + response, err := fetchURL(ctx, source) + if err != nil { + return nil, err + } + + defer func() { err = response.Close() }() + + var result []*plugin.Metadata + err = json.NewDecoder(response).Decode(&result) + if err != nil { + return nil, err + } + return result, nil +} diff --git a/internal/infrastructure/github/plugin_registry_test.go b/internal/infrastructure/github/plugin_registry_test.go new file mode 100644 index 000000000..075795fec --- /dev/null +++ b/internal/infrastructure/github/plugin_registry_test.go @@ -0,0 +1,49 @@ +package github + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/jarcoal/httpmock" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/stretchr/testify/assert" +) + +func TestNewPluginRegistry(t *testing.T) { + d := NewPluginRegistry() + assert.NotNil(t, d) +} + +func TestPluginRegistry_FetchMetadata(t *testing.T) { + httpmock.Activate() + defer httpmock.DeactivateAndReset() + httpmock.RegisterResponder("GET", "https://raw.githubusercontent.com/reearth/plugins/main/plugins.json", + httpmock.NewStringResponder(200, `[{"name": "reearth","description": "Official Plugin","createdAt": "2021-03-16T04:19:57.592Z"}]`)) + d := NewPluginRegistry() + res, err := d.FetchMetadata(context.Background()) + tm, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") + + assert.Equal(t, res, []*plugin.Metadata{ + { + Name: "reearth", + Description: "Official Plugin", + CreatedAt: tm, + }, + }) + assert.NoError(t, err) + + // fail: bad request + httpmock.RegisterResponder("GET", "https://raw.githubusercontent.com/reearth/plugins/main/plugins.json", + httpmock.NewStringResponder(400, `mock bad request`)) + _, err = d.FetchMetadata(context.Background()) + assert.True(t, errors.As(errors.New("StatusCode=400"), &err)) + + // fail: unable to marshal + httpmock.RegisterResponder("GET", "https://raw.githubusercontent.com/reearth/plugins/main/plugins.json", + httpmock.NewStringResponder(200, `{"hoge": "test"}`)) + _, err = d.FetchMetadata(context.Background()) + assert.True(t, errors.As(errors.New("cannot unmarshal object into Go value of type []*plugin.Metadata"), &err)) + +} diff --git a/internal/usecase/gateway/container.go b/internal/usecase/gateway/container.go index 205988fa0..5f730d7e2 100644 --- a/internal/usecase/gateway/container.go +++ b/internal/usecase/gateway/container.go @@ -5,5 +5,6 @@ type Container struct { Mailer Mailer PluginRepository PluginRepository DataSource DataSource + PluginRegistry PluginRegistry File File } diff --git a/internal/usecase/gateway/plugin_registry.go b/internal/usecase/gateway/plugin_registry.go new file mode 100644 index 000000000..7c1ef7cf0 --- /dev/null +++ b/internal/usecase/gateway/plugin_registry.go @@ -0,0 +1,11 @@ +package gateway + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/plugin" +) + +type PluginRegistry interface { + FetchMetadata(ctx context.Context) ([]*plugin.Metadata, error) +} diff --git a/internal/usecase/interactor/plugin.go b/internal/usecase/interactor/plugin.go index 3992764ff..c440ef4d3 100644 --- a/internal/usecase/interactor/plugin.go +++ b/internal/usecase/interactor/plugin.go @@ -20,6 +20,7 @@ type Plugin struct { file gateway.File pluginRepository gateway.PluginRepository transaction repo.Transaction + pluginRegistry gateway.PluginRegistry } func NewPlugin(r *repo.Container, gr *gateway.Container) interfaces.Plugin { @@ -29,6 +30,7 @@ func NewPlugin(r *repo.Container, gr *gateway.Container) interfaces.Plugin { transaction: r.Transaction, pluginRepository: gr.PluginRepository, file: gr.File, + pluginRegistry: gr.PluginRegistry, } } @@ -59,3 +61,16 @@ func (i *Plugin) Upload(ctx context.Context, r io.Reader, operator *usecase.Oper tx.Commit() return nil, errors.New("not implemented") } + +func (i *Plugin) FetchPluginMetadata(ctx context.Context, operator *usecase.Operator) ([]*plugin.Metadata, error) { + if err := i.OnlyOperator(operator); err != nil { + return nil, err + } + + res, err := i.pluginRegistry.FetchMetadata(ctx) + if err != nil { + return nil, err + } + + return res, nil +} diff --git a/internal/usecase/interfaces/plugin.go b/internal/usecase/interfaces/plugin.go index 5bba2b81e..adf6618ae 100644 --- a/internal/usecase/interfaces/plugin.go +++ b/internal/usecase/interfaces/plugin.go @@ -17,4 +17,5 @@ var ( type Plugin interface { Fetch(context.Context, []id.PluginID, *usecase.Operator) ([]*plugin.Plugin, error) Upload(context.Context, io.Reader, *usecase.Operator) (*plugin.Plugin, error) + FetchPluginMetadata(context.Context, *usecase.Operator) ([]*plugin.Metadata, error) } diff --git a/pkg/plugin/metadata.go b/pkg/plugin/metadata.go new file mode 100644 index 000000000..72a274a4c --- /dev/null +++ b/pkg/plugin/metadata.go @@ -0,0 +1,9 @@ +package plugin + +import "time" + +type Metadata struct { + Name string `json:"name"` + Description string `json:"description"` + CreatedAt time.Time `json:"createdAt"` +} diff --git a/schema.graphql b/schema.graphql index f925cdcde..1b72a5b09 100644 --- a/schema.graphql +++ b/schema.graphql @@ -230,6 +230,12 @@ type Plugin { propertySchema: PropertySchema @goField(forceResolver: true) } +type PluginMetadata{ + name: String! + description: String! + createdAt: DateTime! +} + enum PluginExtensionType { PRIMITIVE WIDGET @@ -1309,6 +1315,7 @@ type Query { dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! searchUser(nameOrEmail: String!): SearchedUser checkProjectAlias(alias: String!): CheckProjectAliasPayload! + installablePlugins: [PluginMetadata!]! } # Mutation From 888fe014a7986af73a57e74de257c7e670a2c4f6 Mon Sep 17 00:00:00 2001 From: HideBa <49897538+HideBa@users.noreply.github.com> Date: Wed, 23 Jun 2021 14:23:22 +0900 Subject: [PATCH 039/253] feat: add thumbnail, author fields on plugin metadata query (#15) --- internal/adapter/graphql/convert_plugin.go | 8 +- internal/adapter/graphql/models_gen.go | 8 +- internal/graphql/generated.go | 1880 +++++++++-------- .../github/plugin_registry_test.go | 10 +- pkg/plugin/metadata.go | 8 +- schema.graphql | 1778 ++++++++-------- 6 files changed, 1962 insertions(+), 1730 deletions(-) diff --git a/internal/adapter/graphql/convert_plugin.go b/internal/adapter/graphql/convert_plugin.go index 2eda39ed1..3e48bf75d 100644 --- a/internal/adapter/graphql/convert_plugin.go +++ b/internal/adapter/graphql/convert_plugin.go @@ -63,8 +63,10 @@ func toPluginMetadata(t *plugin.Metadata) (*PluginMetadata, error) { } return &PluginMetadata{ - Name: t.Name, - Description: t.Description, - CreatedAt: t.CreatedAt, + Name: t.Name, + Description: t.Description, + ThumbnailURL: t.ThumbnailUrl, + Author: t.Author, + CreatedAt: t.CreatedAt, }, nil } diff --git a/internal/adapter/graphql/models_gen.go b/internal/adapter/graphql/models_gen.go index 21536f068..1fc6e0b47 100644 --- a/internal/adapter/graphql/models_gen.go +++ b/internal/adapter/graphql/models_gen.go @@ -579,9 +579,11 @@ type PluginExtension struct { } type PluginMetadata struct { - Name string `json:"name"` - Description string `json:"description"` - CreatedAt time.Time `json:"createdAt"` + Name string `json:"name"` + Description string `json:"description"` + Author string `json:"author"` + ThumbnailURL string `json:"thumbnailUrl"` + CreatedAt time.Time `json:"createdAt"` } type Project struct { diff --git a/internal/graphql/generated.go b/internal/graphql/generated.go index 01264ec54..400126b9f 100644 --- a/internal/graphql/generated.go +++ b/internal/graphql/generated.go @@ -523,9 +523,11 @@ type ComplexityRoot struct { } PluginMetadata struct { - CreatedAt func(childComplexity int) int - Description func(childComplexity int) int - Name func(childComplexity int) int + Author func(childComplexity int) int + CreatedAt func(childComplexity int) int + Description func(childComplexity int) int + Name func(childComplexity int) int + ThumbnailURL func(childComplexity int) int } Project struct { @@ -3517,6 +3519,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PluginExtension.Visualizer(childComplexity), true + case "PluginMetadata.author": + if e.complexity.PluginMetadata.Author == nil { + break + } + + return e.complexity.PluginMetadata.Author(childComplexity), true + case "PluginMetadata.createdAt": if e.complexity.PluginMetadata.CreatedAt == nil { break @@ -3538,6 +3547,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PluginMetadata.Name(childComplexity), true + case "PluginMetadata.thumbnailUrl": + if e.complexity.PluginMetadata.ThumbnailURL == nil { + break + } + + return e.complexity.PluginMetadata.ThumbnailURL(childComplexity), true + case "Project.alias": if e.complexity.Project.Alias == nil { break @@ -5321,15 +5337,15 @@ var sources = []*ast.Source{ scalar Upload scalar Any -directive @goModel(model: String, models: [String!]) on OBJECT - | INPUT_OBJECT - | SCALAR - | ENUM - | INTERFACE - | UNION +directive @goModel( + model: String + models: [String!] +) on OBJECT | INPUT_OBJECT | SCALAR | ENUM | INTERFACE | UNION -directive @goField(forceResolver: Boolean, name: String) on INPUT_FIELD_DEFINITION - | FIELD_DEFINITION +directive @goField( + forceResolver: Boolean + name: String +) on INPUT_FIELD_DEFINITION | FIELD_DEFINITION # Basic types @@ -5345,1377 +5361,1441 @@ scalar PropertySchemaFieldID scalar TranslatedString type LatLng { - lat: Float! - lng: Float! + lat: Float! + lng: Float! } type LatLngHeight { - lat: Float! - lng: Float! - height: Float! + lat: Float! + lng: Float! + height: Float! } type Camera { - lat: Float! - lng: Float! - altitude: Float! - heading: Float! - pitch: Float! - roll: Float! - fov: Float! + lat: Float! + lng: Float! + altitude: Float! + heading: Float! + pitch: Float! + roll: Float! + fov: Float! } type Typography { - fontFamily: String - fontWeight: String - fontSize: Int - color: String - textAlign: TextAlign - bold: Boolean - italic: Boolean - underline: Boolean + fontFamily: String + fontWeight: String + fontSize: Int + color: String + textAlign: TextAlign + bold: Boolean + italic: Boolean + underline: Boolean } type Rect { - west: Float! - south: Float! - east: Float! - north: Float! + west: Float! + south: Float! + east: Float! + north: Float! } enum TextAlign { - LEFT - CENTER - RIGHT - JUSTIFY - JUSTIFY_ALL + LEFT + CENTER + RIGHT + JUSTIFY + JUSTIFY_ALL } enum ValueType { - BOOL - NUMBER - STRING - REF - URL - LATLNG - LATLNGHEIGHT - CAMERA - TYPOGRAPHY - COORDINATES - POLYGON - RECT + BOOL + NUMBER + STRING + REF + URL + LATLNG + LATLNGHEIGHT + CAMERA + TYPOGRAPHY + COORDINATES + POLYGON + RECT } enum ListOperation { - ADD, - MOVE, - REMOVE + ADD + MOVE + REMOVE } enum Theme { - DEFAULT - LIGHT - DARK + DEFAULT + LIGHT + DARK } # Meta Type interface Node { - id: ID! + id: ID! } type PageInfo { - startCursor: Cursor - endCursor: Cursor - hasNextPage: Boolean! - hasPreviousPage: Boolean! + startCursor: Cursor + endCursor: Cursor + hasNextPage: Boolean! + hasPreviousPage: Boolean! } # Asset type Asset implements Node { - id: ID! - createdAt: DateTime! - teamId: ID! - name: String! - size: FileSize! - url: String! - contentType: String! - team: Team @goField(forceResolver: true) + id: ID! + createdAt: DateTime! + teamId: ID! + name: String! + size: FileSize! + url: String! + contentType: String! + team: Team @goField(forceResolver: true) } # User type User implements Node { - id: ID! - name: String! - email: String! - lang: Lang! - theme: Theme! - myTeamId: ID! - auths: [String!]! - teams: [Team!]! @goField(forceResolver: true) - myTeam: Team! @goField(forceResolver: true) + id: ID! + name: String! + email: String! + lang: Lang! + theme: Theme! + myTeamId: ID! + auths: [String!]! + teams: [Team!]! @goField(forceResolver: true) + myTeam: Team! @goField(forceResolver: true) } type SearchedUser { - userId: ID! - userName: String! - userEmail: String! + userId: ID! + userName: String! + userEmail: String! } type CheckProjectAliasPayload { - alias: String! - available: Boolean! + alias: String! + available: Boolean! } type Team implements Node { - id: ID! - name: String! - members: [TeamMember!]! - personal: Boolean! - assets(first: Int, last: Int, after: Cursor, before: Cursor): AssetConnection! @goField(forceResolver: true) - projects(includeArchived: Boolean, first: Int, last: Int, after: Cursor, before: Cursor): ProjectConnection! @goField(forceResolver: true) + id: ID! + name: String! + members: [TeamMember!]! + personal: Boolean! + assets( + first: Int + last: Int + after: Cursor + before: Cursor + ): AssetConnection! @goField(forceResolver: true) + projects( + includeArchived: Boolean + first: Int + last: Int + after: Cursor + before: Cursor + ): ProjectConnection! @goField(forceResolver: true) } type TeamMember { - userId: ID! - role: Role! - user: User @goField(forceResolver: true) + userId: ID! + role: Role! + user: User @goField(forceResolver: true) } enum Role { - # a role who can read project - READER - # a role who can read and write project - WRITER - # a eole who can have full controll of project - OWNER + # a role who can read project + READER + # a role who can read and write project + WRITER + # a eole who can have full controll of project + OWNER } # Project type Project implements Node { - id: ID! - isArchived: Boolean! - isBasicAuthActive: Boolean! - basicAuthUsername: String! - basicAuthPassword: String! - createdAt: DateTime! - updatedAt: DateTime! - publishedAt: DateTime - name: String! - description: String! - alias: String! - publicTitle: String! - publicDescription: String! - publicImage: String! - publicNoIndex: Boolean! - imageUrl: URL - teamId: ID! - visualizer: Visualizer! - publishmentStatus: PublishmentStatus! - team: Team @goField(forceResolver: true) - scene: Scene @goField(forceResolver: true) + id: ID! + isArchived: Boolean! + isBasicAuthActive: Boolean! + basicAuthUsername: String! + basicAuthPassword: String! + createdAt: DateTime! + updatedAt: DateTime! + publishedAt: DateTime + name: String! + description: String! + alias: String! + publicTitle: String! + publicDescription: String! + publicImage: String! + publicNoIndex: Boolean! + imageUrl: URL + teamId: ID! + visualizer: Visualizer! + publishmentStatus: PublishmentStatus! + team: Team @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) } enum Visualizer { - CESIUM + CESIUM } enum PublishmentStatus { - PUBLIC - LIMITED - PRIVATE + PUBLIC + LIMITED + PRIVATE } # Plugin type Plugin { - id: PluginID! - name: String! - version: String! - description: String! - author: String! - repositoryUrl: String! - propertySchemaId: PropertySchemaID - extensions: [PluginExtension!]! - scenePlugin(sceneId: ID!): ScenePlugin - allTranslatedDescription: TranslatedString - allTranslatedName: TranslatedString - translatedName(lang: String): String! - translatedDescription(lang: String): String! - propertySchema: PropertySchema @goField(forceResolver: true) -} - -type PluginMetadata{ - name: String! - description: String! - createdAt: DateTime! + id: PluginID! + name: String! + version: String! + description: String! + author: String! + repositoryUrl: String! + propertySchemaId: PropertySchemaID + extensions: [PluginExtension!]! + scenePlugin(sceneId: ID!): ScenePlugin + allTranslatedDescription: TranslatedString + allTranslatedName: TranslatedString + translatedName(lang: String): String! + translatedDescription(lang: String): String! + propertySchema: PropertySchema @goField(forceResolver: true) +} + +type PluginMetadata { + name: String! + description: String! + author: String! + thumbnailUrl: String! + createdAt: DateTime! } enum PluginExtensionType { - PRIMITIVE - WIDGET - BLOCK - VISUALIZER - INFOBOX + PRIMITIVE + WIDGET + BLOCK + VISUALIZER + INFOBOX } type PluginExtension { - extensionId: PluginExtensionID! - pluginId: PluginID! - type: PluginExtensionType! - name: String! - description: String! - icon: String! - visualizer: Visualizer! - propertySchemaId: PropertySchemaID! - allTranslatedName: TranslatedString - allTranslatedDescription: TranslatedString - plugin: Plugin @goField(forceResolver: true) - sceneWidget(sceneId: ID!): SceneWidget @goField(forceResolver: true) - propertySchema: PropertySchema @goField(forceResolver: true) - translatedName(lang: String): String! @goField(forceResolver: true) - translatedDescription(lang: String): String! @goField(forceResolver: true) + extensionId: PluginExtensionID! + pluginId: PluginID! + type: PluginExtensionType! + name: String! + description: String! + icon: String! + visualizer: Visualizer! + propertySchemaId: PropertySchemaID! + allTranslatedName: TranslatedString + allTranslatedDescription: TranslatedString + plugin: Plugin @goField(forceResolver: true) + sceneWidget(sceneId: ID!): SceneWidget @goField(forceResolver: true) + propertySchema: PropertySchema @goField(forceResolver: true) + translatedName(lang: String): String! @goField(forceResolver: true) + translatedDescription(lang: String): String! @goField(forceResolver: true) } # Scene type Scene implements Node { - id: ID! - projectId: ID! - teamId: ID! - propertyId: ID! - createdAt: DateTime! - updatedAt: DateTime! - rootLayerId: ID! - widgets: [SceneWidget!]! - plugins: [ScenePlugin!]! - dynamicDatasetSchemas: [DatasetSchema!]! - project: Project @goField(forceResolver: true) - team: Team @goField(forceResolver: true) - property: Property @goField(forceResolver: true) - rootLayer: LayerGroup @goField(forceResolver: true) - lockMode: SceneLockMode! @goField(forceResolver: true) - datasetSchemas(first: Int, last: Int, after: Cursor, before: Cursor): DatasetSchemaConnection! @goField(forceResolver: true) + id: ID! + projectId: ID! + teamId: ID! + propertyId: ID! + createdAt: DateTime! + updatedAt: DateTime! + rootLayerId: ID! + widgets: [SceneWidget!]! + plugins: [ScenePlugin!]! + dynamicDatasetSchemas: [DatasetSchema!]! + project: Project @goField(forceResolver: true) + team: Team @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + rootLayer: LayerGroup @goField(forceResolver: true) + lockMode: SceneLockMode! @goField(forceResolver: true) + datasetSchemas( + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetSchemaConnection! @goField(forceResolver: true) } enum SceneLockMode { - FREE - PENDING - DATASET_SYNCING - PLUGIN_UPGRADING - PUBLISHING + FREE + PENDING + DATASET_SYNCING + PLUGIN_UPGRADING + PUBLISHING } type SceneWidget { - id: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! - propertyId: ID! - enabled: Boolean! - plugin: Plugin @goField(forceResolver: true) - extension: PluginExtension @goField(forceResolver: true) - property: Property @goField(forceResolver: true) + id: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + propertyId: ID! + enabled: Boolean! + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + property: Property @goField(forceResolver: true) } type ScenePlugin { - pluginId: PluginID! - propertyId: ID - plugin: Plugin @goField(forceResolver: true) - property: Property @goField(forceResolver: true) + pluginId: PluginID! + propertyId: ID + plugin: Plugin @goField(forceResolver: true) + property: Property @goField(forceResolver: true) } # Property type PropertySchema { - id: PropertySchemaID! - groups: [PropertySchemaGroup!]! - linkableFields: PropertyLinkableFields! + id: PropertySchemaID! + groups: [PropertySchemaGroup!]! + linkableFields: PropertyLinkableFields! } type PropertyLinkableFields { - schemaId: PropertySchemaID! - latlng: PropertySchemaFieldID - url: PropertySchemaFieldID - latlngField: PropertySchemaField @goField(forceResolver: true) - urlField: PropertySchemaField @goField(forceResolver: true) - schema: PropertySchema @goField(forceResolver: true) + schemaId: PropertySchemaID! + latlng: PropertySchemaFieldID + url: PropertySchemaFieldID + latlngField: PropertySchemaField @goField(forceResolver: true) + urlField: PropertySchemaField @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) } type PropertySchemaGroup { - schemaGroupId: PropertySchemaFieldID! - schemaId: PropertySchemaID! - fields: [PropertySchemaField!]! - isList: Boolean! - isAvailableIf: PropertyCondition - title: String - allTranslatedTitle: TranslatedString - # For compatibility: "name" field will be removed in the futrue - name: PropertySchemaFieldID - representativeFieldId: PropertySchemaFieldID - representativeField: PropertySchemaField - schema: PropertySchema @goField(forceResolver: true) - translatedTitle(lang: String): String! @goField(forceResolver: true) + schemaGroupId: PropertySchemaFieldID! + schemaId: PropertySchemaID! + fields: [PropertySchemaField!]! + isList: Boolean! + isAvailableIf: PropertyCondition + title: String + allTranslatedTitle: TranslatedString + # For compatibility: "name" field will be removed in the futrue + name: PropertySchemaFieldID + representativeFieldId: PropertySchemaFieldID + representativeField: PropertySchemaField + schema: PropertySchema @goField(forceResolver: true) + translatedTitle(lang: String): String! @goField(forceResolver: true) } type PropertySchemaField { - fieldId: PropertySchemaFieldID! - type: ValueType! - title: String! - # For compatibility: "name" field will be removed in the futrue - name: String! - description: String! - prefix: String - suffix: String - defaultValue: Any - ui: PropertySchemaFieldUI - min: Float - max: Float - choices: [PropertySchemaFieldChoice!] - isAvailableIf: PropertyCondition - allTranslatedTitle: TranslatedString - # For compatibility: "allTranslatedName" field will be removed in the futrue - allTranslatedName: TranslatedString - allTranslatedDescription: TranslatedString - translatedTitle(lang: String): String! @goField(forceResolver: true) - # For compatibility: "translatedName" field will be removed in the futrue - translatedName(lang: String): String! @goField(forceResolver: true) - translatedDescription(lang: String): String! @goField(forceResolver: true) + fieldId: PropertySchemaFieldID! + type: ValueType! + title: String! + # For compatibility: "name" field will be removed in the futrue + name: String! + description: String! + prefix: String + suffix: String + defaultValue: Any + ui: PropertySchemaFieldUI + min: Float + max: Float + choices: [PropertySchemaFieldChoice!] + isAvailableIf: PropertyCondition + allTranslatedTitle: TranslatedString + # For compatibility: "allTranslatedName" field will be removed in the futrue + allTranslatedName: TranslatedString + allTranslatedDescription: TranslatedString + translatedTitle(lang: String): String! @goField(forceResolver: true) + # For compatibility: "translatedName" field will be removed in the futrue + translatedName(lang: String): String! @goField(forceResolver: true) + translatedDescription(lang: String): String! @goField(forceResolver: true) } enum PropertySchemaFieldUI { - LAYER - MULTILINE - SELECTION - COLOR - RANGE - IMAGE - VIDEO - FILE - CAMERA_POSE + LAYER + MULTILINE + SELECTION + COLOR + RANGE + IMAGE + VIDEO + FILE + CAMERA_POSE } type PropertySchemaFieldChoice { - key: String! - title: String! - # For compatibility: "label" field will be removed in the futrue - label: String! - icon: String - allTranslatedTitle: TranslatedString - # For compatibility: "allTranslatedLabel" field will be removed in the futrue - allTranslatedLabel: TranslatedString - translatedTitle(lang: String): String! @goField(forceResolver: true) - # For compatibility: "translatedLabel" field will be removed in the futrue - translatedLabel(lang: String): String! @goField(forceResolver: true) + key: String! + title: String! + # For compatibility: "label" field will be removed in the futrue + label: String! + icon: String + allTranslatedTitle: TranslatedString + # For compatibility: "allTranslatedLabel" field will be removed in the futrue + allTranslatedLabel: TranslatedString + translatedTitle(lang: String): String! @goField(forceResolver: true) + # For compatibility: "translatedLabel" field will be removed in the futrue + translatedLabel(lang: String): String! @goField(forceResolver: true) } type PropertyCondition { - fieldId: PropertySchemaFieldID! - type: ValueType! - value: Any + fieldId: PropertySchemaFieldID! + type: ValueType! + value: Any } type Property implements Node { - id: ID! - schemaId: PropertySchemaID! - items: [PropertyItem!]! - schema: PropertySchema @goField(forceResolver: true) - layer: Layer @goField(forceResolver: true) - merged: MergedProperty @goField(forceResolver: true) + id: ID! + schemaId: PropertySchemaID! + items: [PropertyItem!]! + schema: PropertySchema @goField(forceResolver: true) + layer: Layer @goField(forceResolver: true) + merged: MergedProperty @goField(forceResolver: true) } union PropertyItem = PropertyGroup | PropertyGroupList type PropertyGroup { - id: ID! - schemaId: PropertySchemaID! - schemaGroupId: PropertySchemaFieldID! - fields: [PropertyField!]! - schema: PropertySchema @goField(forceResolver: true) - schemaGroup: PropertySchemaGroup @goField(forceResolver: true) + id: ID! + schemaId: PropertySchemaID! + schemaGroupId: PropertySchemaFieldID! + fields: [PropertyField!]! + schema: PropertySchema @goField(forceResolver: true) + schemaGroup: PropertySchemaGroup @goField(forceResolver: true) } type PropertyGroupList { - id: ID! - schemaId: PropertySchemaID! - schemaGroupId: PropertySchemaFieldID! - groups: [PropertyGroup!]! - schema: PropertySchema @goField(forceResolver: true) - schemaGroup: PropertySchemaGroup @goField(forceResolver: true) + id: ID! + schemaId: PropertySchemaID! + schemaGroupId: PropertySchemaFieldID! + groups: [PropertyGroup!]! + schema: PropertySchema @goField(forceResolver: true) + schemaGroup: PropertySchemaGroup @goField(forceResolver: true) } type PropertyField { - id: PropertySchemaFieldID! - parentId: ID! - schemaId: PropertySchemaID! - fieldId: PropertySchemaFieldID! - links: [PropertyFieldLink!] - type: ValueType! - value: Any - parent: Property @goField(forceResolver: true) - schema: PropertySchema @goField(forceResolver: true) - field: PropertySchemaField @goField(forceResolver: true) - actualValue: Any @goField(forceResolver: true) + id: PropertySchemaFieldID! + parentId: ID! + schemaId: PropertySchemaID! + fieldId: PropertySchemaFieldID! + links: [PropertyFieldLink!] + type: ValueType! + value: Any + parent: Property @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + field: PropertySchemaField @goField(forceResolver: true) + actualValue: Any @goField(forceResolver: true) } type PropertyFieldLink { - datasetId: ID - datasetSchemaId: ID! - datasetSchemaFieldId: ID! - dataset: Dataset @goField(forceResolver: true) - datasetField: DatasetField @goField(forceResolver: true) - datasetSchema: DatasetSchema @goField(forceResolver: true) - datasetSchemaField: DatasetSchemaField @goField(forceResolver: true) + datasetId: ID + datasetSchemaId: ID! + datasetSchemaFieldId: ID! + dataset: Dataset @goField(forceResolver: true) + datasetField: DatasetField @goField(forceResolver: true) + datasetSchema: DatasetSchema @goField(forceResolver: true) + datasetSchemaField: DatasetSchemaField @goField(forceResolver: true) } type MergedProperty { - originalId: ID - parentId: ID - # note: schemaId will not always be set - schemaId: PropertySchemaID - linkedDatasetId: ID - original: Property @goField(forceResolver: true) - parent: Property @goField(forceResolver: true) - schema: PropertySchema @goField(forceResolver: true) - linkedDataset: Dataset @goField(forceResolver: true) - groups: [MergedPropertyGroup!]! @goField(forceResolver: true) + originalId: ID + parentId: ID + # note: schemaId will not always be set + schemaId: PropertySchemaID + linkedDatasetId: ID + original: Property @goField(forceResolver: true) + parent: Property @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + groups: [MergedPropertyGroup!]! @goField(forceResolver: true) } type MergedPropertyGroup { - originalPropertyId: ID - parentPropertyId: ID - originalId: ID - parentId: ID - schemaGroupId: PropertySchemaFieldID! - # note: schemaId will not always be set - schemaId: PropertySchemaID - linkedDatasetId: ID - fields: [MergedPropertyField!]! - groups: [MergedPropertyGroup!]! - originalProperty: Property @goField(forceResolver: true) - parentProperty: Property @goField(forceResolver: true) - original: PropertyGroup @goField(forceResolver: true) - parent: PropertyGroup @goField(forceResolver: true) - schema: PropertySchema @goField(forceResolver: true) - linkedDataset: Dataset @goField(forceResolver: true) + originalPropertyId: ID + parentPropertyId: ID + originalId: ID + parentId: ID + schemaGroupId: PropertySchemaFieldID! + # note: schemaId will not always be set + schemaId: PropertySchemaID + linkedDatasetId: ID + fields: [MergedPropertyField!]! + groups: [MergedPropertyGroup!]! + originalProperty: Property @goField(forceResolver: true) + parentProperty: Property @goField(forceResolver: true) + original: PropertyGroup @goField(forceResolver: true) + parent: PropertyGroup @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) } type MergedPropertyField { - schemaId: PropertySchemaID! - fieldId: PropertySchemaFieldID! - value: Any - type: ValueType! - links: [PropertyFieldLink!] - overridden: Boolean! - schema: PropertySchema @goField(forceResolver: true) - field: PropertySchemaField @goField(forceResolver: true) - actualValue: Any @goField(forceResolver: true) + schemaId: PropertySchemaID! + fieldId: PropertySchemaFieldID! + value: Any + type: ValueType! + links: [PropertyFieldLink!] + overridden: Boolean! + schema: PropertySchema @goField(forceResolver: true) + field: PropertySchemaField @goField(forceResolver: true) + actualValue: Any @goField(forceResolver: true) } # Dataset type DatasetSchema implements Node { - id: ID! - source: String! - name: String! - sceneId: ID! - fields: [DatasetSchemaField!]! - representativeFieldId: ID - dynamic: Boolean - datasets(first: Int, last: Int, after: Cursor, before: Cursor): DatasetConnection! @goField(forceResolver: true) - scene: Scene @goField(forceResolver: true) - representativeField: DatasetSchemaField @goField(forceResolver: true) + id: ID! + source: String! + name: String! + sceneId: ID! + fields: [DatasetSchemaField!]! + representativeFieldId: ID + dynamic: Boolean + datasets( + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetConnection! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + representativeField: DatasetSchemaField @goField(forceResolver: true) } type DatasetSchemaField implements Node { - id: ID! - source: String! - name: String! - type: ValueType! - schemaId: ID! - refId: ID - schema: DatasetSchema @goField(forceResolver: true) - ref: DatasetSchema @goField(forceResolver: true) + id: ID! + source: String! + name: String! + type: ValueType! + schemaId: ID! + refId: ID + schema: DatasetSchema @goField(forceResolver: true) + ref: DatasetSchema @goField(forceResolver: true) } type Dataset implements Node { - id: ID! - source: String! - schemaId: ID! - fields: [DatasetField!]! - schema: DatasetSchema @goField(forceResolver: true) - name: String @goField(forceResolver: true) + id: ID! + source: String! + schemaId: ID! + fields: [DatasetField!]! + schema: DatasetSchema @goField(forceResolver: true) + name: String @goField(forceResolver: true) } type DatasetField { - fieldId: ID! - schemaId: ID! - source: String! - type: ValueType! - value: Any - schema: DatasetSchema @goField(forceResolver: true) - field: DatasetSchemaField @goField(forceResolver: true) - valueRef: Dataset @goField(forceResolver: true) + fieldId: ID! + schemaId: ID! + source: String! + type: ValueType! + value: Any + schema: DatasetSchema @goField(forceResolver: true) + field: DatasetSchemaField @goField(forceResolver: true) + valueRef: Dataset @goField(forceResolver: true) } # Layer interface Layer { - id: ID! - name: String! - isVisible: Boolean! - propertyId: ID - pluginId: PluginID - extensionId: PluginExtensionID - infobox: Infobox - # parentId will not be always set - parentId: ID - parent: LayerGroup - property: Property - plugin: Plugin - extension: PluginExtension + id: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: PluginID + extensionId: PluginExtensionID + infobox: Infobox + # parentId will not be always set + parentId: ID + parent: LayerGroup + property: Property + plugin: Plugin + extension: PluginExtension } union Layers = LayerItem | LayerGroup enum LayerEncodingFormat { - KML - CZML - GEOJSON - SHAPE - REEARTH + KML + CZML + GEOJSON + SHAPE + REEARTH } type LayerItem implements Layer { - id: ID! - name: String! - isVisible: Boolean! - propertyId: ID - pluginId: PluginID - extensionId: PluginExtensionID - infobox: Infobox - # parentId will not be always set - parentId: ID - linkedDatasetId: ID - parent: LayerGroup @goField(forceResolver: true) - property: Property @goField(forceResolver: true) - plugin: Plugin @goField(forceResolver: true) - extension: PluginExtension @goField(forceResolver: true) - linkedDataset: Dataset @goField(forceResolver: true) - merged: MergedLayer @goField(forceResolver: true) + id: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: PluginID + extensionId: PluginExtensionID + infobox: Infobox + # parentId will not be always set + parentId: ID + linkedDatasetId: ID + parent: LayerGroup @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedLayer @goField(forceResolver: true) } type LayerGroup implements Layer { - id: ID! - name: String! - isVisible: Boolean! - propertyId: ID - pluginId: PluginID - extensionId: PluginExtensionID - infobox: Infobox - # parentId will not be always set - parentId: ID - linkedDatasetSchemaId: ID - root: Boolean! - layerIds: [ID!]! - parent: LayerGroup @goField(forceResolver: true) - property: Property @goField(forceResolver: true) - plugin: Plugin @goField(forceResolver: true) - extension: PluginExtension @goField(forceResolver: true) - linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) - layers: [Layer]! @goField(forceResolver: true) + id: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: PluginID + extensionId: PluginExtensionID + infobox: Infobox + # parentId will not be always set + parentId: ID + linkedDatasetSchemaId: ID + root: Boolean! + layerIds: [ID!]! + parent: LayerGroup @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) + layers: [Layer]! @goField(forceResolver: true) } type Infobox { - layerId: ID! - propertyId: ID! - fields: [InfoboxField!]! - linkedDatasetId: ID - layer: Layer! @goField(forceResolver: true) - property: Property @goField(forceResolver: true) - linkedDataset: Dataset @goField(forceResolver: true) - merged: MergedInfobox @goField(forceResolver: true) + layerId: ID! + propertyId: ID! + fields: [InfoboxField!]! + linkedDatasetId: ID + layer: Layer! @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedInfobox @goField(forceResolver: true) } type InfoboxField { - id: ID! - layerId: ID! - propertyId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! - linkedDatasetId: ID - layer: Layer! @goField(forceResolver: true) - infobox: Infobox! @goField(forceResolver: true) - property: Property @goField(forceResolver: true) - plugin: Plugin @goField(forceResolver: true) - extension: PluginExtension @goField(forceResolver: true) - linkedDataset: Dataset @goField(forceResolver: true) - merged: MergedInfoboxField @goField(forceResolver: true) + id: ID! + layerId: ID! + propertyId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + linkedDatasetId: ID + layer: Layer! @goField(forceResolver: true) + infobox: Infobox! @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedInfoboxField @goField(forceResolver: true) } type MergedLayer { - originalId: ID! - parentId: ID - property: MergedProperty - infobox: MergedInfobox - original: LayerItem @goField(forceResolver: true) - parent: LayerGroup @goField(forceResolver: true) + originalId: ID! + parentId: ID + property: MergedProperty + infobox: MergedInfobox + original: LayerItem @goField(forceResolver: true) + parent: LayerGroup @goField(forceResolver: true) } type MergedInfobox { - property: MergedProperty - fields: [MergedInfoboxField!]! + property: MergedProperty + fields: [MergedInfoboxField!]! } type MergedInfoboxField { - originalId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! - property: MergedProperty - plugin: Plugin @goField(forceResolver: true) - extension: PluginExtension @goField(forceResolver: true) + originalId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + property: MergedProperty + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) } - # InputType input CreateAssetInput { - teamId: ID! - file: Upload! + teamId: ID! + file: Upload! } input RemoveAssetInput { - assetId: ID! + assetId: ID! } input SignupInput { - lang: Lang - theme: Theme - userId: ID - teamId: ID - secret: String + lang: Lang + theme: Theme + userId: ID + teamId: ID + secret: String } input UpdateMeInput { - name: String - email: String - lang: Lang - theme: Theme - password: String - passwordConfirmation: String + name: String + email: String + lang: Lang + theme: Theme + password: String + passwordConfirmation: String } input RemoveMyAuthInput { - auth: String! + auth: String! } input DeleteMeInput { - userId: ID! + userId: ID! } input CreateTeamInput { - name: String! + name: String! } input UpdateTeamInput { - teamId: ID! - name: String! + teamId: ID! + name: String! } input AddMemberToTeamInput { - teamId: ID! - userId: ID! - role: Role! + teamId: ID! + userId: ID! + role: Role! } input RemoveMemberFromTeamInput { - teamId: ID! - userId: ID! + teamId: ID! + userId: ID! } input UpdateMemberOfTeamInput { - teamId: ID! - userId: ID! - role: Role! + teamId: ID! + userId: ID! + role: Role! } input DeleteTeamInput { - teamId: ID! + teamId: ID! } input CreateProjectInput { - teamId: ID! - visualizer: Visualizer! - name: String - description: String - imageUrl: URL - alias: String - archived: Boolean + teamId: ID! + visualizer: Visualizer! + name: String + description: String + imageUrl: URL + alias: String + archived: Boolean } input UpdateProjectInput { - projectId: ID! - name: String - description: String - archived: Boolean - isBasicAuthActive: Boolean - basicAuthUsername: String - basicAuthPassword: String - alias: String - imageUrl: URL - publicTitle: String - publicDescription: String - publicImage: Upload - publicNoIndex: Boolean - deleteImageUrl: Boolean - deletePublicImage: Boolean + projectId: ID! + name: String + description: String + archived: Boolean + isBasicAuthActive: Boolean + basicAuthUsername: String + basicAuthPassword: String + alias: String + imageUrl: URL + publicTitle: String + publicDescription: String + publicImage: Upload + publicNoIndex: Boolean + deleteImageUrl: Boolean + deletePublicImage: Boolean } input UploadPluginInput { - file: Upload! + file: Upload! } input CreateSceneInput { - projectId: ID! + projectId: ID! } input PublishProjectInput { - projectId: ID! - alias: String - status: PublishmentStatus! + projectId: ID! + alias: String + status: PublishmentStatus! } input DeleteProjectInput { - projectId: ID! + projectId: ID! } input AddWidgetInput { - sceneId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + sceneId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! } input UpdateWidgetInput { - sceneId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! - enabled: Boolean + sceneId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + enabled: Boolean } input RemoveWidgetInput { - sceneId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + sceneId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! } input InstallPluginInput { - sceneId: ID! - pluginId: PluginID! + sceneId: ID! + pluginId: PluginID! } input UninstallPluginInput { - sceneId: ID! - pluginId: PluginID! + sceneId: ID! + pluginId: PluginID! } input UpgradePluginInput { - sceneId: ID! - pluginId: PluginID! - toPluginId: PluginID! + sceneId: ID! + pluginId: PluginID! + toPluginId: PluginID! } input SyncDatasetInput { - sceneId: ID! - url: String! + sceneId: ID! + url: String! } input UpdatePropertyValueInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - value: Any - type: ValueType! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + value: Any + type: ValueType! } input UpdatePropertyValueLatLngInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - lat: Float! - lng: Float! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + lat: Float! + lng: Float! } input UpdatePropertyValueLatLngHeightInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - lat: Float! - lng: Float! - height: Float! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + lat: Float! + lng: Float! + height: Float! } input UpdatePropertyValueCameraInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - lat: Float! - lng: Float! - altitude: Float! - heading: Float! - pitch: Float! - roll: Float! - fov: Float! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + lat: Float! + lng: Float! + altitude: Float! + heading: Float! + pitch: Float! + roll: Float! + fov: Float! } input UpdatePropertyValueTypographyInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - fontFamily: String - fontWeight: String - fontSize: Int - color: String - textAlign: TextAlign - bold: Boolean - italic: Boolean - underline: Boolean + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + fontFamily: String + fontWeight: String + fontSize: Int + color: String + textAlign: TextAlign + bold: Boolean + italic: Boolean + underline: Boolean } input RemovePropertyFieldInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! } input UploadFileToPropertyInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - file: Upload! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + file: Upload! } input LinkDatasetToPropertyValueInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - datasetSchemaIds: [ID!]! - datasetSchemaFieldIds: [ID!]! - datasetIds: [ID!] + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + datasetSchemaIds: [ID!]! + datasetSchemaFieldIds: [ID!]! + datasetIds: [ID!] } input UnlinkPropertyValueInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! } input AddPropertyItemInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID! - index: Int - nameFieldValue: Any - nameFieldType: ValueType + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + index: Int + nameFieldValue: Any + nameFieldType: ValueType } input MovePropertyItemInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID! - itemId: ID! - index: Int! + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + itemId: ID! + index: Int! } input RemovePropertyItemInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID! - itemId: ID! + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + itemId: ID! } input UpdatePropertyItemInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID! - operations: [UpdatePropertyItemOperationInput!]! + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + operations: [UpdatePropertyItemOperationInput!]! } input UpdatePropertyItemOperationInput { - operation: ListOperation! - itemId: ID - index: Int - nameFieldValue: Any - nameFieldType: ValueType + operation: ListOperation! + itemId: ID + index: Int + nameFieldValue: Any + nameFieldType: ValueType } input AddLayerItemInput { - parentLayerId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! - index: Int - name: String - lat: Float - lng: Float + parentLayerId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + index: Int + name: String + lat: Float + lng: Float } input AddLayerGroupInput { - parentLayerId: ID! - pluginId: PluginID - extensionId: PluginExtensionID - index: Int - linkedDatasetSchemaID: ID - name: String + parentLayerId: ID! + pluginId: PluginID + extensionId: PluginExtensionID + index: Int + linkedDatasetSchemaID: ID + name: String } input RemoveLayerInput { - layerId: ID! + layerId: ID! } input UpdateLayerInput { - layerId: ID! - name: String - visible: Boolean + layerId: ID! + name: String + visible: Boolean } input MoveLayerInput { - layerId: ID! - destLayerId: ID - index: Int + layerId: ID! + destLayerId: ID + index: Int } input CreateInfoboxInput { - layerId: ID! + layerId: ID! } input RemoveInfoboxInput { - layerId: ID! + layerId: ID! } input AddInfoboxFieldInput { - layerId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! - index: Int + layerId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + index: Int } input MoveInfoboxFieldInput { - layerId: ID! - infoboxFieldId: ID! - index: Int! + layerId: ID! + infoboxFieldId: ID! + index: Int! } input RemoveInfoboxFieldInput { - layerId: ID! - infoboxFieldId: ID! + layerId: ID! + infoboxFieldId: ID! } input UpdateDatasetSchemaInput { - schemaId: ID! - name: String! + schemaId: ID! + name: String! } input AddDynamicDatasetSchemaInput { - sceneId: ID! + sceneId: ID! } input AddDynamicDatasetInput { - datasetSchemaId: ID! - author: String! - content: String! - lat: Float - lng: Float - target: String + datasetSchemaId: ID! + author: String! + content: String! + lat: Float + lng: Float + target: String } input RemoveDatasetSchemaInput { - schemaId: ID! - force: Boolean + schemaId: ID! + force: Boolean } input ImportLayerInput { - layerId: ID! - file: Upload! - format: LayerEncodingFormat! + layerId: ID! + file: Upload! + format: LayerEncodingFormat! } input ImportDatasetInput { - file: Upload! - sceneId: ID! - datasetSchemaId: ID + file: Upload! + sceneId: ID! + datasetSchemaId: ID } input AddDatasetSchemaInput { - sceneId: ID! - name: String! - representativefield: ID + sceneId: ID! + name: String! + representativefield: ID } # Payload type CreateAssetPayload { - asset: Asset! + asset: Asset! } type RemoveAssetPayload { - assetId: ID! + assetId: ID! } type SignupPayload { - user: User! - team: Team! + user: User! + team: Team! } type UpdateMePayload { - user: User! + user: User! } type DeleteMePayload { - userId: ID! + userId: ID! } type CreateTeamPayload { - team: Team! + team: Team! } type UpdateTeamPayload { - team: Team! + team: Team! } type AddMemberToTeamPayload { - team: Team! + team: Team! } type RemoveMemberFromTeamPayload { - team: Team! + team: Team! } type UpdateMemberOfTeamPayload { - team: Team! + team: Team! } type DeleteTeamPayload { - teamId: ID! + teamId: ID! } type ProjectPayload { - project: Project! + project: Project! } type DeleteProjectPayload { - projectId: ID! + projectId: ID! } type UploadPluginPayload { - plugin: Plugin! + plugin: Plugin! } type CreateScenePayload { - scene: Scene! + scene: Scene! } type AddWidgetPayload { - scene: Scene! - sceneWidget: SceneWidget! + scene: Scene! + sceneWidget: SceneWidget! } type UpdateWidgetPayload { - scene: Scene! - sceneWidget: SceneWidget! + scene: Scene! + sceneWidget: SceneWidget! } type RemoveWidgetPayload { - scene: Scene! - pluginId: PluginID! - extensionId: PluginExtensionID! + scene: Scene! + pluginId: PluginID! + extensionId: PluginExtensionID! } type InstallPluginPayload { - scene: Scene! - scenePlugin: ScenePlugin! + scene: Scene! + scenePlugin: ScenePlugin! } type UninstallPluginPayload { - scene: Scene! - scenePlugin: ScenePlugin! + scene: Scene! + scenePlugin: ScenePlugin! } type UpgradePluginPayload { - scene: Scene! - scenePlugin: ScenePlugin! + scene: Scene! + scenePlugin: ScenePlugin! } type SyncDatasetPayload { - sceneId: ID! - url: String! - datasetSchema: [DatasetSchema!]! - dataset: [Dataset!]! + sceneId: ID! + url: String! + datasetSchema: [DatasetSchema!]! + dataset: [Dataset!]! } type PropertyFieldPayload { - property: Property! - propertyField: PropertyField + property: Property! + propertyField: PropertyField } type PropertyItemPayload { - property: Property! - propertyItem: PropertyItem + property: Property! + propertyItem: PropertyItem } type AddLayerItemPayload { - layer: LayerItem! - parentLayer: LayerGroup! - index: Int + layer: LayerItem! + parentLayer: LayerGroup! + index: Int } type AddLayerGroupPayload { - layer: LayerGroup! - parentLayer: LayerGroup! - index: Int + layer: LayerGroup! + parentLayer: LayerGroup! + index: Int } type RemoveLayerPayload { - layerId: ID! - parentLayer: LayerGroup! + layerId: ID! + parentLayer: LayerGroup! } type UpdateLayerPayload { - layer: Layer! + layer: Layer! } type MoveLayerPayload { - layerId: ID! - fromParentLayer: LayerGroup! - toParentLayer: LayerGroup! - index: Int! + layerId: ID! + fromParentLayer: LayerGroup! + toParentLayer: LayerGroup! + index: Int! } type CreateInfoboxPayload { - layer: Layer! + layer: Layer! } type RemoveInfoboxPayload { - layer: Layer! + layer: Layer! } type AddInfoboxFieldPayload { - infoboxField: InfoboxField! - layer: Layer! + infoboxField: InfoboxField! + layer: Layer! } type MoveInfoboxFieldPayload { - infoboxFieldId: ID! - layer: Layer! - index: Int! + infoboxFieldId: ID! + layer: Layer! + index: Int! } type RemoveInfoboxFieldPayload { - infoboxFieldId: ID! - layer: Layer! + infoboxFieldId: ID! + layer: Layer! } type UpdateDatasetSchemaPayload { - datasetSchema: DatasetSchema + datasetSchema: DatasetSchema } type RemoveDatasetSchemaPayload { - schemaId: ID! + schemaId: ID! } type AddDynamicDatasetSchemaPayload { - datasetSchema: DatasetSchema + datasetSchema: DatasetSchema } type AddDynamicDatasetPayload { - datasetSchema: DatasetSchema - dataset: Dataset + datasetSchema: DatasetSchema + dataset: Dataset } type ImportLayerPayload { - layers: [Layer!]! - parentLayer: LayerGroup! + layers: [Layer!]! + parentLayer: LayerGroup! } type ImportDatasetPayload { - datasetSchema: DatasetSchema! + datasetSchema: DatasetSchema! } type AddDatasetSchemaPayload { - datasetSchema: DatasetSchema + datasetSchema: DatasetSchema } # Connection enum NodeType { - USER - TEAM - PROJECT - PLUGIN - SCENE - PROPERTY_SCHEMA - PROPERTY - DATASET_SCHEMA - DATASET - LAYER_GROUP - LAYER_ITEM + USER + TEAM + PROJECT + PLUGIN + SCENE + PROPERTY_SCHEMA + PROPERTY + DATASET_SCHEMA + DATASET + LAYER_GROUP + LAYER_ITEM } type AssetConnection { - edges: [AssetEdge!]! - nodes: [Asset]! - pageInfo: PageInfo! - totalCount: Int! + edges: [AssetEdge!]! + nodes: [Asset]! + pageInfo: PageInfo! + totalCount: Int! } type AssetEdge { - cursor: Cursor! - node: Asset + cursor: Cursor! + node: Asset } type ProjectConnection { - edges: [ProjectEdge!]! - nodes: [Project]! - pageInfo: PageInfo! - totalCount: Int! + edges: [ProjectEdge!]! + nodes: [Project]! + pageInfo: PageInfo! + totalCount: Int! } type ProjectEdge { - cursor: Cursor! - node: Project + cursor: Cursor! + node: Project } type DatasetSchemaConnection { - edges: [DatasetSchemaEdge!]! - nodes: [DatasetSchema]! - pageInfo: PageInfo! - totalCount: Int! + edges: [DatasetSchemaEdge!]! + nodes: [DatasetSchema]! + pageInfo: PageInfo! + totalCount: Int! } type DatasetSchemaEdge { - cursor: Cursor! - node: DatasetSchema + cursor: Cursor! + node: DatasetSchema } type DatasetConnection { - edges: [DatasetEdge!]! - nodes: [Dataset]! - pageInfo: PageInfo! - totalCount: Int! + edges: [DatasetEdge!]! + nodes: [Dataset]! + pageInfo: PageInfo! + totalCount: Int! } type DatasetEdge { - cursor: Cursor! - node: Dataset + cursor: Cursor! + node: Dataset } - # Query type Query { - me: User - node(id: ID!, type: NodeType!): Node - nodes(id: [ID!]!, type: NodeType!): [Node]! - propertySchema(id: PropertySchemaID!): PropertySchema - propertySchemas(id: [PropertySchemaID!]!): [PropertySchema!]! - plugin(id: PluginID!): Plugin - plugins(id: [PluginID!]!): [Plugin!]! - layer(id: ID!): Layer - scene(projectId: ID!): Scene - assets(teamId: ID!, first: Int, last: Int, after: Cursor, before: Cursor): AssetConnection! - projects(teamId: ID!, includeArchived: Boolean, first: Int, last: Int, after: Cursor, before: Cursor): ProjectConnection! - datasetSchemas(sceneId: ID!, first: Int, last: Int, after: Cursor, before: Cursor): DatasetSchemaConnection! - datasets(datasetSchemaId: ID!, first: Int, last: Int, after: Cursor, before: Cursor): DatasetConnection! - sceneLock(sceneId: ID!): SceneLockMode - dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! - searchUser(nameOrEmail: String!): SearchedUser - checkProjectAlias(alias: String!): CheckProjectAliasPayload! - installablePlugins: [PluginMetadata!]! + me: User + node(id: ID!, type: NodeType!): Node + nodes(id: [ID!]!, type: NodeType!): [Node]! + propertySchema(id: PropertySchemaID!): PropertySchema + propertySchemas(id: [PropertySchemaID!]!): [PropertySchema!]! + plugin(id: PluginID!): Plugin + plugins(id: [PluginID!]!): [Plugin!]! + layer(id: ID!): Layer + scene(projectId: ID!): Scene + assets( + teamId: ID! + first: Int + last: Int + after: Cursor + before: Cursor + ): AssetConnection! + projects( + teamId: ID! + includeArchived: Boolean + first: Int + last: Int + after: Cursor + before: Cursor + ): ProjectConnection! + datasetSchemas( + sceneId: ID! + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetSchemaConnection! + datasets( + datasetSchemaId: ID! + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetConnection! + sceneLock(sceneId: ID!): SceneLockMode + dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! + searchUser(nameOrEmail: String!): SearchedUser + checkProjectAlias(alias: String!): CheckProjectAliasPayload! + installablePlugins: [PluginMetadata!]! } # Mutation type Mutation { - # Asset - createAsset(input: CreateAssetInput!): CreateAssetPayload - removeAsset(input: RemoveAssetInput!): RemoveAssetPayload - - # User - signup(input: SignupInput!): SignupPayload - updateMe(input: UpdateMeInput!): UpdateMePayload - removeMyAuth(input: RemoveMyAuthInput!): UpdateMePayload - deleteMe(input: DeleteMeInput!): DeleteMePayload - - # Team - createTeam(input: CreateTeamInput!): CreateTeamPayload - deleteTeam(input: DeleteTeamInput!): DeleteTeamPayload - updateTeam(input: UpdateTeamInput!): UpdateTeamPayload - addMemberToTeam(input: AddMemberToTeamInput!): AddMemberToTeamPayload - removeMemberFromTeam(input: RemoveMemberFromTeamInput!): RemoveMemberFromTeamPayload - updateMemberOfTeam(input: UpdateMemberOfTeamInput!): UpdateMemberOfTeamPayload - - # Project - createProject(input: CreateProjectInput!): ProjectPayload - updateProject(input: UpdateProjectInput!): ProjectPayload - publishProject(input: PublishProjectInput!): ProjectPayload - deleteProject(input: DeleteProjectInput!): DeleteProjectPayload - - # Plugin - uploadPlugin(input: UploadPluginInput!): UploadPluginPayload - - # Scene - createScene(input: CreateSceneInput!): CreateScenePayload - addWidget(input: AddWidgetInput!): AddWidgetPayload - updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload - removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload - installPlugin(input: InstallPluginInput!): InstallPluginPayload - uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload - upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload - - # Dataset - updateDatasetSchema(input:UpdateDatasetSchemaInput!): UpdateDatasetSchemaPayload - syncDataset(input: SyncDatasetInput!): SyncDatasetPayload - addDynamicDatasetSchema(input:AddDynamicDatasetSchemaInput!): AddDynamicDatasetSchemaPayload - addDynamicDataset(input:AddDynamicDatasetInput!): AddDynamicDatasetPayload - removeDatasetSchema(input: RemoveDatasetSchemaInput!): RemoveDatasetSchemaPayload - importDataset(input: ImportDatasetInput!): ImportDatasetPayload - addDatasetSchema(input:AddDatasetSchemaInput!): AddDatasetSchemaPayload - - # Property - updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload - updatePropertyValueLatLng(input: UpdatePropertyValueLatLngInput!): PropertyFieldPayload - updatePropertyValueLatLngHeight(input: UpdatePropertyValueLatLngHeightInput!): PropertyFieldPayload - updatePropertyValueCamera(input: UpdatePropertyValueCameraInput!): PropertyFieldPayload - updatePropertyValueTypography(input: UpdatePropertyValueTypographyInput!): PropertyFieldPayload - removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload - uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload - linkDatasetToPropertyValue(input: LinkDatasetToPropertyValueInput!): PropertyFieldPayload - unlinkPropertyValue(input: UnlinkPropertyValueInput!): PropertyFieldPayload - addPropertyItem(input: AddPropertyItemInput!): PropertyItemPayload - movePropertyItem(input: MovePropertyItemInput!): PropertyItemPayload - removePropertyItem(input: RemovePropertyItemInput!): PropertyItemPayload - updatePropertyItems(input: UpdatePropertyItemInput!): PropertyItemPayload - - # Layer - addLayerItem(input: AddLayerItemInput!): AddLayerItemPayload - addLayerGroup(input: AddLayerGroupInput!): AddLayerGroupPayload - removeLayer(input: RemoveLayerInput!): RemoveLayerPayload - updateLayer(input: UpdateLayerInput!): UpdateLayerPayload - moveLayer(input: MoveLayerInput!): MoveLayerPayload - createInfobox(input: CreateInfoboxInput!): CreateInfoboxPayload - removeInfobox(input: RemoveInfoboxInput!): RemoveInfoboxPayload - addInfoboxField(input: AddInfoboxFieldInput!): AddInfoboxFieldPayload - moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload - removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload - importLayer(input:ImportLayerInput!): ImportLayerPayload + # Asset + createAsset(input: CreateAssetInput!): CreateAssetPayload + removeAsset(input: RemoveAssetInput!): RemoveAssetPayload + + # User + signup(input: SignupInput!): SignupPayload + updateMe(input: UpdateMeInput!): UpdateMePayload + removeMyAuth(input: RemoveMyAuthInput!): UpdateMePayload + deleteMe(input: DeleteMeInput!): DeleteMePayload + + # Team + createTeam(input: CreateTeamInput!): CreateTeamPayload + deleteTeam(input: DeleteTeamInput!): DeleteTeamPayload + updateTeam(input: UpdateTeamInput!): UpdateTeamPayload + addMemberToTeam(input: AddMemberToTeamInput!): AddMemberToTeamPayload + removeMemberFromTeam( + input: RemoveMemberFromTeamInput! + ): RemoveMemberFromTeamPayload + updateMemberOfTeam(input: UpdateMemberOfTeamInput!): UpdateMemberOfTeamPayload + + # Project + createProject(input: CreateProjectInput!): ProjectPayload + updateProject(input: UpdateProjectInput!): ProjectPayload + publishProject(input: PublishProjectInput!): ProjectPayload + deleteProject(input: DeleteProjectInput!): DeleteProjectPayload + + # Plugin + uploadPlugin(input: UploadPluginInput!): UploadPluginPayload + + # Scene + createScene(input: CreateSceneInput!): CreateScenePayload + addWidget(input: AddWidgetInput!): AddWidgetPayload + updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload + removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload + installPlugin(input: InstallPluginInput!): InstallPluginPayload + uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload + upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload + + # Dataset + updateDatasetSchema( + input: UpdateDatasetSchemaInput! + ): UpdateDatasetSchemaPayload + syncDataset(input: SyncDatasetInput!): SyncDatasetPayload + addDynamicDatasetSchema( + input: AddDynamicDatasetSchemaInput! + ): AddDynamicDatasetSchemaPayload + addDynamicDataset(input: AddDynamicDatasetInput!): AddDynamicDatasetPayload + removeDatasetSchema( + input: RemoveDatasetSchemaInput! + ): RemoveDatasetSchemaPayload + importDataset(input: ImportDatasetInput!): ImportDatasetPayload + addDatasetSchema(input: AddDatasetSchemaInput!): AddDatasetSchemaPayload + + # Property + updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload + updatePropertyValueLatLng( + input: UpdatePropertyValueLatLngInput! + ): PropertyFieldPayload + updatePropertyValueLatLngHeight( + input: UpdatePropertyValueLatLngHeightInput! + ): PropertyFieldPayload + updatePropertyValueCamera( + input: UpdatePropertyValueCameraInput! + ): PropertyFieldPayload + updatePropertyValueTypography( + input: UpdatePropertyValueTypographyInput! + ): PropertyFieldPayload + removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload + uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload + linkDatasetToPropertyValue( + input: LinkDatasetToPropertyValueInput! + ): PropertyFieldPayload + unlinkPropertyValue(input: UnlinkPropertyValueInput!): PropertyFieldPayload + addPropertyItem(input: AddPropertyItemInput!): PropertyItemPayload + movePropertyItem(input: MovePropertyItemInput!): PropertyItemPayload + removePropertyItem(input: RemovePropertyItemInput!): PropertyItemPayload + updatePropertyItems(input: UpdatePropertyItemInput!): PropertyItemPayload + + # Layer + addLayerItem(input: AddLayerItemInput!): AddLayerItemPayload + addLayerGroup(input: AddLayerGroupInput!): AddLayerGroupPayload + removeLayer(input: RemoveLayerInput!): RemoveLayerPayload + updateLayer(input: UpdateLayerInput!): UpdateLayerPayload + moveLayer(input: MoveLayerInput!): MoveLayerPayload + createInfobox(input: CreateInfoboxInput!): CreateInfoboxPayload + removeInfobox(input: RemoveInfoboxInput!): RemoveInfoboxPayload + addInfoboxField(input: AddInfoboxFieldInput!): AddInfoboxFieldPayload + moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload + removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload + importLayer(input: ImportLayerInput!): ImportLayerPayload } schema { - query: Query - mutation: Mutation + query: Query + mutation: Mutation } `, BuiltIn: false}, } @@ -18694,6 +18774,76 @@ func (ec *executionContext) _PluginMetadata_description(ctx context.Context, fie return ec.marshalNString2string(ctx, field.Selections, res) } +func (ec *executionContext) _PluginMetadata_author(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginMetadata) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginMetadata", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Author, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _PluginMetadata_thumbnailUrl(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginMetadata) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginMetadata", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ThumbnailURL, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + func (ec *executionContext) _PluginMetadata_createdAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginMetadata) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -32876,6 +33026,16 @@ func (ec *executionContext) _PluginMetadata(ctx context.Context, sel ast.Selecti if out.Values[i] == graphql.Null { invalids++ } + case "author": + out.Values[i] = ec._PluginMetadata_author(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "thumbnailUrl": + out.Values[i] = ec._PluginMetadata_thumbnailUrl(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } case "createdAt": out.Values[i] = ec._PluginMetadata_createdAt(ctx, field, obj) if out.Values[i] == graphql.Null { diff --git a/internal/infrastructure/github/plugin_registry_test.go b/internal/infrastructure/github/plugin_registry_test.go index 075795fec..7da6ef6be 100644 --- a/internal/infrastructure/github/plugin_registry_test.go +++ b/internal/infrastructure/github/plugin_registry_test.go @@ -20,16 +20,18 @@ func TestPluginRegistry_FetchMetadata(t *testing.T) { httpmock.Activate() defer httpmock.DeactivateAndReset() httpmock.RegisterResponder("GET", "https://raw.githubusercontent.com/reearth/plugins/main/plugins.json", - httpmock.NewStringResponder(200, `[{"name": "reearth","description": "Official Plugin","createdAt": "2021-03-16T04:19:57.592Z"}]`)) + httpmock.NewStringResponder(200, `[{"name": "reearth","description": "Official Plugin", "author": "reearth", "thumbnailUrl": "", "createdAt": "2021-03-16T04:19:57.592Z"}]`)) d := NewPluginRegistry() res, err := d.FetchMetadata(context.Background()) tm, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") assert.Equal(t, res, []*plugin.Metadata{ { - Name: "reearth", - Description: "Official Plugin", - CreatedAt: tm, + Name: "reearth", + Description: "Official Plugin", + Author: "reearth", + ThumbnailUrl: "", + CreatedAt: tm, }, }) assert.NoError(t, err) diff --git a/pkg/plugin/metadata.go b/pkg/plugin/metadata.go index 72a274a4c..f4ae35388 100644 --- a/pkg/plugin/metadata.go +++ b/pkg/plugin/metadata.go @@ -3,7 +3,9 @@ package plugin import "time" type Metadata struct { - Name string `json:"name"` - Description string `json:"description"` - CreatedAt time.Time `json:"createdAt"` + Name string `json:"name"` + Description string `json:"description"` + ThumbnailUrl string `json:"thumbnailUrl"` + Author string `json:"author"` + CreatedAt time.Time `json:"createdAt"` } diff --git a/schema.graphql b/schema.graphql index 1b72a5b09..d2ee5cd3e 100644 --- a/schema.graphql +++ b/schema.graphql @@ -3,15 +3,15 @@ scalar Upload scalar Any -directive @goModel(model: String, models: [String!]) on OBJECT - | INPUT_OBJECT - | SCALAR - | ENUM - | INTERFACE - | UNION +directive @goModel( + model: String + models: [String!] +) on OBJECT | INPUT_OBJECT | SCALAR | ENUM | INTERFACE | UNION -directive @goField(forceResolver: Boolean, name: String) on INPUT_FIELD_DEFINITION - | FIELD_DEFINITION +directive @goField( + forceResolver: Boolean + name: String +) on INPUT_FIELD_DEFINITION | FIELD_DEFINITION # Basic types @@ -27,1375 +27,1439 @@ scalar PropertySchemaFieldID scalar TranslatedString type LatLng { - lat: Float! - lng: Float! + lat: Float! + lng: Float! } type LatLngHeight { - lat: Float! - lng: Float! - height: Float! + lat: Float! + lng: Float! + height: Float! } type Camera { - lat: Float! - lng: Float! - altitude: Float! - heading: Float! - pitch: Float! - roll: Float! - fov: Float! + lat: Float! + lng: Float! + altitude: Float! + heading: Float! + pitch: Float! + roll: Float! + fov: Float! } type Typography { - fontFamily: String - fontWeight: String - fontSize: Int - color: String - textAlign: TextAlign - bold: Boolean - italic: Boolean - underline: Boolean + fontFamily: String + fontWeight: String + fontSize: Int + color: String + textAlign: TextAlign + bold: Boolean + italic: Boolean + underline: Boolean } type Rect { - west: Float! - south: Float! - east: Float! - north: Float! + west: Float! + south: Float! + east: Float! + north: Float! } enum TextAlign { - LEFT - CENTER - RIGHT - JUSTIFY - JUSTIFY_ALL + LEFT + CENTER + RIGHT + JUSTIFY + JUSTIFY_ALL } enum ValueType { - BOOL - NUMBER - STRING - REF - URL - LATLNG - LATLNGHEIGHT - CAMERA - TYPOGRAPHY - COORDINATES - POLYGON - RECT + BOOL + NUMBER + STRING + REF + URL + LATLNG + LATLNGHEIGHT + CAMERA + TYPOGRAPHY + COORDINATES + POLYGON + RECT } enum ListOperation { - ADD, - MOVE, - REMOVE + ADD + MOVE + REMOVE } enum Theme { - DEFAULT - LIGHT - DARK + DEFAULT + LIGHT + DARK } # Meta Type interface Node { - id: ID! + id: ID! } type PageInfo { - startCursor: Cursor - endCursor: Cursor - hasNextPage: Boolean! - hasPreviousPage: Boolean! + startCursor: Cursor + endCursor: Cursor + hasNextPage: Boolean! + hasPreviousPage: Boolean! } # Asset type Asset implements Node { - id: ID! - createdAt: DateTime! - teamId: ID! - name: String! - size: FileSize! - url: String! - contentType: String! - team: Team @goField(forceResolver: true) + id: ID! + createdAt: DateTime! + teamId: ID! + name: String! + size: FileSize! + url: String! + contentType: String! + team: Team @goField(forceResolver: true) } # User type User implements Node { - id: ID! - name: String! - email: String! - lang: Lang! - theme: Theme! - myTeamId: ID! - auths: [String!]! - teams: [Team!]! @goField(forceResolver: true) - myTeam: Team! @goField(forceResolver: true) + id: ID! + name: String! + email: String! + lang: Lang! + theme: Theme! + myTeamId: ID! + auths: [String!]! + teams: [Team!]! @goField(forceResolver: true) + myTeam: Team! @goField(forceResolver: true) } type SearchedUser { - userId: ID! - userName: String! - userEmail: String! + userId: ID! + userName: String! + userEmail: String! } type CheckProjectAliasPayload { - alias: String! - available: Boolean! + alias: String! + available: Boolean! } type Team implements Node { - id: ID! - name: String! - members: [TeamMember!]! - personal: Boolean! - assets(first: Int, last: Int, after: Cursor, before: Cursor): AssetConnection! @goField(forceResolver: true) - projects(includeArchived: Boolean, first: Int, last: Int, after: Cursor, before: Cursor): ProjectConnection! @goField(forceResolver: true) + id: ID! + name: String! + members: [TeamMember!]! + personal: Boolean! + assets( + first: Int + last: Int + after: Cursor + before: Cursor + ): AssetConnection! @goField(forceResolver: true) + projects( + includeArchived: Boolean + first: Int + last: Int + after: Cursor + before: Cursor + ): ProjectConnection! @goField(forceResolver: true) } type TeamMember { - userId: ID! - role: Role! - user: User @goField(forceResolver: true) + userId: ID! + role: Role! + user: User @goField(forceResolver: true) } enum Role { - # a role who can read project - READER - # a role who can read and write project - WRITER - # a eole who can have full controll of project - OWNER + # a role who can read project + READER + # a role who can read and write project + WRITER + # a eole who can have full controll of project + OWNER } # Project type Project implements Node { - id: ID! - isArchived: Boolean! - isBasicAuthActive: Boolean! - basicAuthUsername: String! - basicAuthPassword: String! - createdAt: DateTime! - updatedAt: DateTime! - publishedAt: DateTime - name: String! - description: String! - alias: String! - publicTitle: String! - publicDescription: String! - publicImage: String! - publicNoIndex: Boolean! - imageUrl: URL - teamId: ID! - visualizer: Visualizer! - publishmentStatus: PublishmentStatus! - team: Team @goField(forceResolver: true) - scene: Scene @goField(forceResolver: true) + id: ID! + isArchived: Boolean! + isBasicAuthActive: Boolean! + basicAuthUsername: String! + basicAuthPassword: String! + createdAt: DateTime! + updatedAt: DateTime! + publishedAt: DateTime + name: String! + description: String! + alias: String! + publicTitle: String! + publicDescription: String! + publicImage: String! + publicNoIndex: Boolean! + imageUrl: URL + teamId: ID! + visualizer: Visualizer! + publishmentStatus: PublishmentStatus! + team: Team @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) } enum Visualizer { - CESIUM + CESIUM } enum PublishmentStatus { - PUBLIC - LIMITED - PRIVATE + PUBLIC + LIMITED + PRIVATE } # Plugin type Plugin { - id: PluginID! - name: String! - version: String! - description: String! - author: String! - repositoryUrl: String! - propertySchemaId: PropertySchemaID - extensions: [PluginExtension!]! - scenePlugin(sceneId: ID!): ScenePlugin - allTranslatedDescription: TranslatedString - allTranslatedName: TranslatedString - translatedName(lang: String): String! - translatedDescription(lang: String): String! - propertySchema: PropertySchema @goField(forceResolver: true) -} - -type PluginMetadata{ - name: String! - description: String! - createdAt: DateTime! + id: PluginID! + name: String! + version: String! + description: String! + author: String! + repositoryUrl: String! + propertySchemaId: PropertySchemaID + extensions: [PluginExtension!]! + scenePlugin(sceneId: ID!): ScenePlugin + allTranslatedDescription: TranslatedString + allTranslatedName: TranslatedString + translatedName(lang: String): String! + translatedDescription(lang: String): String! + propertySchema: PropertySchema @goField(forceResolver: true) +} + +type PluginMetadata { + name: String! + description: String! + author: String! + thumbnailUrl: String! + createdAt: DateTime! } enum PluginExtensionType { - PRIMITIVE - WIDGET - BLOCK - VISUALIZER - INFOBOX + PRIMITIVE + WIDGET + BLOCK + VISUALIZER + INFOBOX } type PluginExtension { - extensionId: PluginExtensionID! - pluginId: PluginID! - type: PluginExtensionType! - name: String! - description: String! - icon: String! - visualizer: Visualizer! - propertySchemaId: PropertySchemaID! - allTranslatedName: TranslatedString - allTranslatedDescription: TranslatedString - plugin: Plugin @goField(forceResolver: true) - sceneWidget(sceneId: ID!): SceneWidget @goField(forceResolver: true) - propertySchema: PropertySchema @goField(forceResolver: true) - translatedName(lang: String): String! @goField(forceResolver: true) - translatedDescription(lang: String): String! @goField(forceResolver: true) + extensionId: PluginExtensionID! + pluginId: PluginID! + type: PluginExtensionType! + name: String! + description: String! + icon: String! + visualizer: Visualizer! + propertySchemaId: PropertySchemaID! + allTranslatedName: TranslatedString + allTranslatedDescription: TranslatedString + plugin: Plugin @goField(forceResolver: true) + sceneWidget(sceneId: ID!): SceneWidget @goField(forceResolver: true) + propertySchema: PropertySchema @goField(forceResolver: true) + translatedName(lang: String): String! @goField(forceResolver: true) + translatedDescription(lang: String): String! @goField(forceResolver: true) } # Scene type Scene implements Node { - id: ID! - projectId: ID! - teamId: ID! - propertyId: ID! - createdAt: DateTime! - updatedAt: DateTime! - rootLayerId: ID! - widgets: [SceneWidget!]! - plugins: [ScenePlugin!]! - dynamicDatasetSchemas: [DatasetSchema!]! - project: Project @goField(forceResolver: true) - team: Team @goField(forceResolver: true) - property: Property @goField(forceResolver: true) - rootLayer: LayerGroup @goField(forceResolver: true) - lockMode: SceneLockMode! @goField(forceResolver: true) - datasetSchemas(first: Int, last: Int, after: Cursor, before: Cursor): DatasetSchemaConnection! @goField(forceResolver: true) + id: ID! + projectId: ID! + teamId: ID! + propertyId: ID! + createdAt: DateTime! + updatedAt: DateTime! + rootLayerId: ID! + widgets: [SceneWidget!]! + plugins: [ScenePlugin!]! + dynamicDatasetSchemas: [DatasetSchema!]! + project: Project @goField(forceResolver: true) + team: Team @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + rootLayer: LayerGroup @goField(forceResolver: true) + lockMode: SceneLockMode! @goField(forceResolver: true) + datasetSchemas( + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetSchemaConnection! @goField(forceResolver: true) } enum SceneLockMode { - FREE - PENDING - DATASET_SYNCING - PLUGIN_UPGRADING - PUBLISHING + FREE + PENDING + DATASET_SYNCING + PLUGIN_UPGRADING + PUBLISHING } type SceneWidget { - id: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! - propertyId: ID! - enabled: Boolean! - plugin: Plugin @goField(forceResolver: true) - extension: PluginExtension @goField(forceResolver: true) - property: Property @goField(forceResolver: true) + id: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + propertyId: ID! + enabled: Boolean! + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + property: Property @goField(forceResolver: true) } type ScenePlugin { - pluginId: PluginID! - propertyId: ID - plugin: Plugin @goField(forceResolver: true) - property: Property @goField(forceResolver: true) + pluginId: PluginID! + propertyId: ID + plugin: Plugin @goField(forceResolver: true) + property: Property @goField(forceResolver: true) } # Property type PropertySchema { - id: PropertySchemaID! - groups: [PropertySchemaGroup!]! - linkableFields: PropertyLinkableFields! + id: PropertySchemaID! + groups: [PropertySchemaGroup!]! + linkableFields: PropertyLinkableFields! } type PropertyLinkableFields { - schemaId: PropertySchemaID! - latlng: PropertySchemaFieldID - url: PropertySchemaFieldID - latlngField: PropertySchemaField @goField(forceResolver: true) - urlField: PropertySchemaField @goField(forceResolver: true) - schema: PropertySchema @goField(forceResolver: true) + schemaId: PropertySchemaID! + latlng: PropertySchemaFieldID + url: PropertySchemaFieldID + latlngField: PropertySchemaField @goField(forceResolver: true) + urlField: PropertySchemaField @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) } type PropertySchemaGroup { - schemaGroupId: PropertySchemaFieldID! - schemaId: PropertySchemaID! - fields: [PropertySchemaField!]! - isList: Boolean! - isAvailableIf: PropertyCondition - title: String - allTranslatedTitle: TranslatedString - # For compatibility: "name" field will be removed in the futrue - name: PropertySchemaFieldID - representativeFieldId: PropertySchemaFieldID - representativeField: PropertySchemaField - schema: PropertySchema @goField(forceResolver: true) - translatedTitle(lang: String): String! @goField(forceResolver: true) + schemaGroupId: PropertySchemaFieldID! + schemaId: PropertySchemaID! + fields: [PropertySchemaField!]! + isList: Boolean! + isAvailableIf: PropertyCondition + title: String + allTranslatedTitle: TranslatedString + # For compatibility: "name" field will be removed in the futrue + name: PropertySchemaFieldID + representativeFieldId: PropertySchemaFieldID + representativeField: PropertySchemaField + schema: PropertySchema @goField(forceResolver: true) + translatedTitle(lang: String): String! @goField(forceResolver: true) } type PropertySchemaField { - fieldId: PropertySchemaFieldID! - type: ValueType! - title: String! - # For compatibility: "name" field will be removed in the futrue - name: String! - description: String! - prefix: String - suffix: String - defaultValue: Any - ui: PropertySchemaFieldUI - min: Float - max: Float - choices: [PropertySchemaFieldChoice!] - isAvailableIf: PropertyCondition - allTranslatedTitle: TranslatedString - # For compatibility: "allTranslatedName" field will be removed in the futrue - allTranslatedName: TranslatedString - allTranslatedDescription: TranslatedString - translatedTitle(lang: String): String! @goField(forceResolver: true) - # For compatibility: "translatedName" field will be removed in the futrue - translatedName(lang: String): String! @goField(forceResolver: true) - translatedDescription(lang: String): String! @goField(forceResolver: true) + fieldId: PropertySchemaFieldID! + type: ValueType! + title: String! + # For compatibility: "name" field will be removed in the futrue + name: String! + description: String! + prefix: String + suffix: String + defaultValue: Any + ui: PropertySchemaFieldUI + min: Float + max: Float + choices: [PropertySchemaFieldChoice!] + isAvailableIf: PropertyCondition + allTranslatedTitle: TranslatedString + # For compatibility: "allTranslatedName" field will be removed in the futrue + allTranslatedName: TranslatedString + allTranslatedDescription: TranslatedString + translatedTitle(lang: String): String! @goField(forceResolver: true) + # For compatibility: "translatedName" field will be removed in the futrue + translatedName(lang: String): String! @goField(forceResolver: true) + translatedDescription(lang: String): String! @goField(forceResolver: true) } enum PropertySchemaFieldUI { - LAYER - MULTILINE - SELECTION - COLOR - RANGE - IMAGE - VIDEO - FILE - CAMERA_POSE + LAYER + MULTILINE + SELECTION + COLOR + RANGE + IMAGE + VIDEO + FILE + CAMERA_POSE } type PropertySchemaFieldChoice { - key: String! - title: String! - # For compatibility: "label" field will be removed in the futrue - label: String! - icon: String - allTranslatedTitle: TranslatedString - # For compatibility: "allTranslatedLabel" field will be removed in the futrue - allTranslatedLabel: TranslatedString - translatedTitle(lang: String): String! @goField(forceResolver: true) - # For compatibility: "translatedLabel" field will be removed in the futrue - translatedLabel(lang: String): String! @goField(forceResolver: true) + key: String! + title: String! + # For compatibility: "label" field will be removed in the futrue + label: String! + icon: String + allTranslatedTitle: TranslatedString + # For compatibility: "allTranslatedLabel" field will be removed in the futrue + allTranslatedLabel: TranslatedString + translatedTitle(lang: String): String! @goField(forceResolver: true) + # For compatibility: "translatedLabel" field will be removed in the futrue + translatedLabel(lang: String): String! @goField(forceResolver: true) } type PropertyCondition { - fieldId: PropertySchemaFieldID! - type: ValueType! - value: Any + fieldId: PropertySchemaFieldID! + type: ValueType! + value: Any } type Property implements Node { - id: ID! - schemaId: PropertySchemaID! - items: [PropertyItem!]! - schema: PropertySchema @goField(forceResolver: true) - layer: Layer @goField(forceResolver: true) - merged: MergedProperty @goField(forceResolver: true) + id: ID! + schemaId: PropertySchemaID! + items: [PropertyItem!]! + schema: PropertySchema @goField(forceResolver: true) + layer: Layer @goField(forceResolver: true) + merged: MergedProperty @goField(forceResolver: true) } union PropertyItem = PropertyGroup | PropertyGroupList type PropertyGroup { - id: ID! - schemaId: PropertySchemaID! - schemaGroupId: PropertySchemaFieldID! - fields: [PropertyField!]! - schema: PropertySchema @goField(forceResolver: true) - schemaGroup: PropertySchemaGroup @goField(forceResolver: true) + id: ID! + schemaId: PropertySchemaID! + schemaGroupId: PropertySchemaFieldID! + fields: [PropertyField!]! + schema: PropertySchema @goField(forceResolver: true) + schemaGroup: PropertySchemaGroup @goField(forceResolver: true) } type PropertyGroupList { - id: ID! - schemaId: PropertySchemaID! - schemaGroupId: PropertySchemaFieldID! - groups: [PropertyGroup!]! - schema: PropertySchema @goField(forceResolver: true) - schemaGroup: PropertySchemaGroup @goField(forceResolver: true) + id: ID! + schemaId: PropertySchemaID! + schemaGroupId: PropertySchemaFieldID! + groups: [PropertyGroup!]! + schema: PropertySchema @goField(forceResolver: true) + schemaGroup: PropertySchemaGroup @goField(forceResolver: true) } type PropertyField { - id: PropertySchemaFieldID! - parentId: ID! - schemaId: PropertySchemaID! - fieldId: PropertySchemaFieldID! - links: [PropertyFieldLink!] - type: ValueType! - value: Any - parent: Property @goField(forceResolver: true) - schema: PropertySchema @goField(forceResolver: true) - field: PropertySchemaField @goField(forceResolver: true) - actualValue: Any @goField(forceResolver: true) + id: PropertySchemaFieldID! + parentId: ID! + schemaId: PropertySchemaID! + fieldId: PropertySchemaFieldID! + links: [PropertyFieldLink!] + type: ValueType! + value: Any + parent: Property @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + field: PropertySchemaField @goField(forceResolver: true) + actualValue: Any @goField(forceResolver: true) } type PropertyFieldLink { - datasetId: ID - datasetSchemaId: ID! - datasetSchemaFieldId: ID! - dataset: Dataset @goField(forceResolver: true) - datasetField: DatasetField @goField(forceResolver: true) - datasetSchema: DatasetSchema @goField(forceResolver: true) - datasetSchemaField: DatasetSchemaField @goField(forceResolver: true) + datasetId: ID + datasetSchemaId: ID! + datasetSchemaFieldId: ID! + dataset: Dataset @goField(forceResolver: true) + datasetField: DatasetField @goField(forceResolver: true) + datasetSchema: DatasetSchema @goField(forceResolver: true) + datasetSchemaField: DatasetSchemaField @goField(forceResolver: true) } type MergedProperty { - originalId: ID - parentId: ID - # note: schemaId will not always be set - schemaId: PropertySchemaID - linkedDatasetId: ID - original: Property @goField(forceResolver: true) - parent: Property @goField(forceResolver: true) - schema: PropertySchema @goField(forceResolver: true) - linkedDataset: Dataset @goField(forceResolver: true) - groups: [MergedPropertyGroup!]! @goField(forceResolver: true) + originalId: ID + parentId: ID + # note: schemaId will not always be set + schemaId: PropertySchemaID + linkedDatasetId: ID + original: Property @goField(forceResolver: true) + parent: Property @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + groups: [MergedPropertyGroup!]! @goField(forceResolver: true) } type MergedPropertyGroup { - originalPropertyId: ID - parentPropertyId: ID - originalId: ID - parentId: ID - schemaGroupId: PropertySchemaFieldID! - # note: schemaId will not always be set - schemaId: PropertySchemaID - linkedDatasetId: ID - fields: [MergedPropertyField!]! - groups: [MergedPropertyGroup!]! - originalProperty: Property @goField(forceResolver: true) - parentProperty: Property @goField(forceResolver: true) - original: PropertyGroup @goField(forceResolver: true) - parent: PropertyGroup @goField(forceResolver: true) - schema: PropertySchema @goField(forceResolver: true) - linkedDataset: Dataset @goField(forceResolver: true) + originalPropertyId: ID + parentPropertyId: ID + originalId: ID + parentId: ID + schemaGroupId: PropertySchemaFieldID! + # note: schemaId will not always be set + schemaId: PropertySchemaID + linkedDatasetId: ID + fields: [MergedPropertyField!]! + groups: [MergedPropertyGroup!]! + originalProperty: Property @goField(forceResolver: true) + parentProperty: Property @goField(forceResolver: true) + original: PropertyGroup @goField(forceResolver: true) + parent: PropertyGroup @goField(forceResolver: true) + schema: PropertySchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) } type MergedPropertyField { - schemaId: PropertySchemaID! - fieldId: PropertySchemaFieldID! - value: Any - type: ValueType! - links: [PropertyFieldLink!] - overridden: Boolean! - schema: PropertySchema @goField(forceResolver: true) - field: PropertySchemaField @goField(forceResolver: true) - actualValue: Any @goField(forceResolver: true) + schemaId: PropertySchemaID! + fieldId: PropertySchemaFieldID! + value: Any + type: ValueType! + links: [PropertyFieldLink!] + overridden: Boolean! + schema: PropertySchema @goField(forceResolver: true) + field: PropertySchemaField @goField(forceResolver: true) + actualValue: Any @goField(forceResolver: true) } # Dataset type DatasetSchema implements Node { - id: ID! - source: String! - name: String! - sceneId: ID! - fields: [DatasetSchemaField!]! - representativeFieldId: ID - dynamic: Boolean - datasets(first: Int, last: Int, after: Cursor, before: Cursor): DatasetConnection! @goField(forceResolver: true) - scene: Scene @goField(forceResolver: true) - representativeField: DatasetSchemaField @goField(forceResolver: true) + id: ID! + source: String! + name: String! + sceneId: ID! + fields: [DatasetSchemaField!]! + representativeFieldId: ID + dynamic: Boolean + datasets( + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetConnection! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + representativeField: DatasetSchemaField @goField(forceResolver: true) } type DatasetSchemaField implements Node { - id: ID! - source: String! - name: String! - type: ValueType! - schemaId: ID! - refId: ID - schema: DatasetSchema @goField(forceResolver: true) - ref: DatasetSchema @goField(forceResolver: true) + id: ID! + source: String! + name: String! + type: ValueType! + schemaId: ID! + refId: ID + schema: DatasetSchema @goField(forceResolver: true) + ref: DatasetSchema @goField(forceResolver: true) } type Dataset implements Node { - id: ID! - source: String! - schemaId: ID! - fields: [DatasetField!]! - schema: DatasetSchema @goField(forceResolver: true) - name: String @goField(forceResolver: true) + id: ID! + source: String! + schemaId: ID! + fields: [DatasetField!]! + schema: DatasetSchema @goField(forceResolver: true) + name: String @goField(forceResolver: true) } type DatasetField { - fieldId: ID! - schemaId: ID! - source: String! - type: ValueType! - value: Any - schema: DatasetSchema @goField(forceResolver: true) - field: DatasetSchemaField @goField(forceResolver: true) - valueRef: Dataset @goField(forceResolver: true) + fieldId: ID! + schemaId: ID! + source: String! + type: ValueType! + value: Any + schema: DatasetSchema @goField(forceResolver: true) + field: DatasetSchemaField @goField(forceResolver: true) + valueRef: Dataset @goField(forceResolver: true) } # Layer interface Layer { - id: ID! - name: String! - isVisible: Boolean! - propertyId: ID - pluginId: PluginID - extensionId: PluginExtensionID - infobox: Infobox - # parentId will not be always set - parentId: ID - parent: LayerGroup - property: Property - plugin: Plugin - extension: PluginExtension + id: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: PluginID + extensionId: PluginExtensionID + infobox: Infobox + # parentId will not be always set + parentId: ID + parent: LayerGroup + property: Property + plugin: Plugin + extension: PluginExtension } union Layers = LayerItem | LayerGroup enum LayerEncodingFormat { - KML - CZML - GEOJSON - SHAPE - REEARTH + KML + CZML + GEOJSON + SHAPE + REEARTH } type LayerItem implements Layer { - id: ID! - name: String! - isVisible: Boolean! - propertyId: ID - pluginId: PluginID - extensionId: PluginExtensionID - infobox: Infobox - # parentId will not be always set - parentId: ID - linkedDatasetId: ID - parent: LayerGroup @goField(forceResolver: true) - property: Property @goField(forceResolver: true) - plugin: Plugin @goField(forceResolver: true) - extension: PluginExtension @goField(forceResolver: true) - linkedDataset: Dataset @goField(forceResolver: true) - merged: MergedLayer @goField(forceResolver: true) + id: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: PluginID + extensionId: PluginExtensionID + infobox: Infobox + # parentId will not be always set + parentId: ID + linkedDatasetId: ID + parent: LayerGroup @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedLayer @goField(forceResolver: true) } type LayerGroup implements Layer { - id: ID! - name: String! - isVisible: Boolean! - propertyId: ID - pluginId: PluginID - extensionId: PluginExtensionID - infobox: Infobox - # parentId will not be always set - parentId: ID - linkedDatasetSchemaId: ID - root: Boolean! - layerIds: [ID!]! - parent: LayerGroup @goField(forceResolver: true) - property: Property @goField(forceResolver: true) - plugin: Plugin @goField(forceResolver: true) - extension: PluginExtension @goField(forceResolver: true) - linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) - layers: [Layer]! @goField(forceResolver: true) + id: ID! + name: String! + isVisible: Boolean! + propertyId: ID + pluginId: PluginID + extensionId: PluginExtensionID + infobox: Infobox + # parentId will not be always set + parentId: ID + linkedDatasetSchemaId: ID + root: Boolean! + layerIds: [ID!]! + parent: LayerGroup @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) + layers: [Layer]! @goField(forceResolver: true) } type Infobox { - layerId: ID! - propertyId: ID! - fields: [InfoboxField!]! - linkedDatasetId: ID - layer: Layer! @goField(forceResolver: true) - property: Property @goField(forceResolver: true) - linkedDataset: Dataset @goField(forceResolver: true) - merged: MergedInfobox @goField(forceResolver: true) + layerId: ID! + propertyId: ID! + fields: [InfoboxField!]! + linkedDatasetId: ID + layer: Layer! @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedInfobox @goField(forceResolver: true) } type InfoboxField { - id: ID! - layerId: ID! - propertyId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! - linkedDatasetId: ID - layer: Layer! @goField(forceResolver: true) - infobox: Infobox! @goField(forceResolver: true) - property: Property @goField(forceResolver: true) - plugin: Plugin @goField(forceResolver: true) - extension: PluginExtension @goField(forceResolver: true) - linkedDataset: Dataset @goField(forceResolver: true) - merged: MergedInfoboxField @goField(forceResolver: true) + id: ID! + layerId: ID! + propertyId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + linkedDatasetId: ID + layer: Layer! @goField(forceResolver: true) + infobox: Infobox! @goField(forceResolver: true) + property: Property @goField(forceResolver: true) + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + merged: MergedInfoboxField @goField(forceResolver: true) } type MergedLayer { - originalId: ID! - parentId: ID - property: MergedProperty - infobox: MergedInfobox - original: LayerItem @goField(forceResolver: true) - parent: LayerGroup @goField(forceResolver: true) + originalId: ID! + parentId: ID + property: MergedProperty + infobox: MergedInfobox + original: LayerItem @goField(forceResolver: true) + parent: LayerGroup @goField(forceResolver: true) } type MergedInfobox { - property: MergedProperty - fields: [MergedInfoboxField!]! + property: MergedProperty + fields: [MergedInfoboxField!]! } type MergedInfoboxField { - originalId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! - property: MergedProperty - plugin: Plugin @goField(forceResolver: true) - extension: PluginExtension @goField(forceResolver: true) + originalId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + property: MergedProperty + plugin: Plugin @goField(forceResolver: true) + extension: PluginExtension @goField(forceResolver: true) } - # InputType input CreateAssetInput { - teamId: ID! - file: Upload! + teamId: ID! + file: Upload! } input RemoveAssetInput { - assetId: ID! + assetId: ID! } input SignupInput { - lang: Lang - theme: Theme - userId: ID - teamId: ID - secret: String + lang: Lang + theme: Theme + userId: ID + teamId: ID + secret: String } input UpdateMeInput { - name: String - email: String - lang: Lang - theme: Theme - password: String - passwordConfirmation: String + name: String + email: String + lang: Lang + theme: Theme + password: String + passwordConfirmation: String } input RemoveMyAuthInput { - auth: String! + auth: String! } input DeleteMeInput { - userId: ID! + userId: ID! } input CreateTeamInput { - name: String! + name: String! } input UpdateTeamInput { - teamId: ID! - name: String! + teamId: ID! + name: String! } input AddMemberToTeamInput { - teamId: ID! - userId: ID! - role: Role! + teamId: ID! + userId: ID! + role: Role! } input RemoveMemberFromTeamInput { - teamId: ID! - userId: ID! + teamId: ID! + userId: ID! } input UpdateMemberOfTeamInput { - teamId: ID! - userId: ID! - role: Role! + teamId: ID! + userId: ID! + role: Role! } input DeleteTeamInput { - teamId: ID! + teamId: ID! } input CreateProjectInput { - teamId: ID! - visualizer: Visualizer! - name: String - description: String - imageUrl: URL - alias: String - archived: Boolean + teamId: ID! + visualizer: Visualizer! + name: String + description: String + imageUrl: URL + alias: String + archived: Boolean } input UpdateProjectInput { - projectId: ID! - name: String - description: String - archived: Boolean - isBasicAuthActive: Boolean - basicAuthUsername: String - basicAuthPassword: String - alias: String - imageUrl: URL - publicTitle: String - publicDescription: String - publicImage: Upload - publicNoIndex: Boolean - deleteImageUrl: Boolean - deletePublicImage: Boolean + projectId: ID! + name: String + description: String + archived: Boolean + isBasicAuthActive: Boolean + basicAuthUsername: String + basicAuthPassword: String + alias: String + imageUrl: URL + publicTitle: String + publicDescription: String + publicImage: Upload + publicNoIndex: Boolean + deleteImageUrl: Boolean + deletePublicImage: Boolean } input UploadPluginInput { - file: Upload! + file: Upload! } input CreateSceneInput { - projectId: ID! + projectId: ID! } input PublishProjectInput { - projectId: ID! - alias: String - status: PublishmentStatus! + projectId: ID! + alias: String + status: PublishmentStatus! } input DeleteProjectInput { - projectId: ID! + projectId: ID! } input AddWidgetInput { - sceneId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + sceneId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! } input UpdateWidgetInput { - sceneId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! - enabled: Boolean + sceneId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + enabled: Boolean } input RemoveWidgetInput { - sceneId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + sceneId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! } input InstallPluginInput { - sceneId: ID! - pluginId: PluginID! + sceneId: ID! + pluginId: PluginID! } input UninstallPluginInput { - sceneId: ID! - pluginId: PluginID! + sceneId: ID! + pluginId: PluginID! } input UpgradePluginInput { - sceneId: ID! - pluginId: PluginID! - toPluginId: PluginID! + sceneId: ID! + pluginId: PluginID! + toPluginId: PluginID! } input SyncDatasetInput { - sceneId: ID! - url: String! + sceneId: ID! + url: String! } input UpdatePropertyValueInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - value: Any - type: ValueType! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + value: Any + type: ValueType! } input UpdatePropertyValueLatLngInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - lat: Float! - lng: Float! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + lat: Float! + lng: Float! } input UpdatePropertyValueLatLngHeightInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - lat: Float! - lng: Float! - height: Float! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + lat: Float! + lng: Float! + height: Float! } input UpdatePropertyValueCameraInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - lat: Float! - lng: Float! - altitude: Float! - heading: Float! - pitch: Float! - roll: Float! - fov: Float! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + lat: Float! + lng: Float! + altitude: Float! + heading: Float! + pitch: Float! + roll: Float! + fov: Float! } input UpdatePropertyValueTypographyInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - fontFamily: String - fontWeight: String - fontSize: Int - color: String - textAlign: TextAlign - bold: Boolean - italic: Boolean - underline: Boolean + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + fontFamily: String + fontWeight: String + fontSize: Int + color: String + textAlign: TextAlign + bold: Boolean + italic: Boolean + underline: Boolean } input RemovePropertyFieldInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! } input UploadFileToPropertyInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - file: Upload! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + file: Upload! } input LinkDatasetToPropertyValueInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - datasetSchemaIds: [ID!]! - datasetSchemaFieldIds: [ID!]! - datasetIds: [ID!] + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! + datasetSchemaIds: [ID!]! + datasetSchemaFieldIds: [ID!]! + datasetIds: [ID!] } input UnlinkPropertyValueInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! + propertyId: ID! + schemaItemId: PropertySchemaFieldID + itemId: ID + fieldId: PropertySchemaFieldID! } input AddPropertyItemInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID! - index: Int - nameFieldValue: Any - nameFieldType: ValueType + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + index: Int + nameFieldValue: Any + nameFieldType: ValueType } input MovePropertyItemInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID! - itemId: ID! - index: Int! + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + itemId: ID! + index: Int! } input RemovePropertyItemInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID! - itemId: ID! + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + itemId: ID! } input UpdatePropertyItemInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID! - operations: [UpdatePropertyItemOperationInput!]! + propertyId: ID! + schemaItemId: PropertySchemaFieldID! + operations: [UpdatePropertyItemOperationInput!]! } input UpdatePropertyItemOperationInput { - operation: ListOperation! - itemId: ID - index: Int - nameFieldValue: Any - nameFieldType: ValueType + operation: ListOperation! + itemId: ID + index: Int + nameFieldValue: Any + nameFieldType: ValueType } input AddLayerItemInput { - parentLayerId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! - index: Int - name: String - lat: Float - lng: Float + parentLayerId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + index: Int + name: String + lat: Float + lng: Float } input AddLayerGroupInput { - parentLayerId: ID! - pluginId: PluginID - extensionId: PluginExtensionID - index: Int - linkedDatasetSchemaID: ID - name: String + parentLayerId: ID! + pluginId: PluginID + extensionId: PluginExtensionID + index: Int + linkedDatasetSchemaID: ID + name: String } input RemoveLayerInput { - layerId: ID! + layerId: ID! } input UpdateLayerInput { - layerId: ID! - name: String - visible: Boolean + layerId: ID! + name: String + visible: Boolean } input MoveLayerInput { - layerId: ID! - destLayerId: ID - index: Int + layerId: ID! + destLayerId: ID + index: Int } input CreateInfoboxInput { - layerId: ID! + layerId: ID! } input RemoveInfoboxInput { - layerId: ID! + layerId: ID! } input AddInfoboxFieldInput { - layerId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! - index: Int + layerId: ID! + pluginId: PluginID! + extensionId: PluginExtensionID! + index: Int } input MoveInfoboxFieldInput { - layerId: ID! - infoboxFieldId: ID! - index: Int! + layerId: ID! + infoboxFieldId: ID! + index: Int! } input RemoveInfoboxFieldInput { - layerId: ID! - infoboxFieldId: ID! + layerId: ID! + infoboxFieldId: ID! } input UpdateDatasetSchemaInput { - schemaId: ID! - name: String! + schemaId: ID! + name: String! } input AddDynamicDatasetSchemaInput { - sceneId: ID! + sceneId: ID! } input AddDynamicDatasetInput { - datasetSchemaId: ID! - author: String! - content: String! - lat: Float - lng: Float - target: String + datasetSchemaId: ID! + author: String! + content: String! + lat: Float + lng: Float + target: String } input RemoveDatasetSchemaInput { - schemaId: ID! - force: Boolean + schemaId: ID! + force: Boolean } input ImportLayerInput { - layerId: ID! - file: Upload! - format: LayerEncodingFormat! + layerId: ID! + file: Upload! + format: LayerEncodingFormat! } input ImportDatasetInput { - file: Upload! - sceneId: ID! - datasetSchemaId: ID + file: Upload! + sceneId: ID! + datasetSchemaId: ID } input AddDatasetSchemaInput { - sceneId: ID! - name: String! - representativefield: ID + sceneId: ID! + name: String! + representativefield: ID } # Payload type CreateAssetPayload { - asset: Asset! + asset: Asset! } type RemoveAssetPayload { - assetId: ID! + assetId: ID! } type SignupPayload { - user: User! - team: Team! + user: User! + team: Team! } type UpdateMePayload { - user: User! + user: User! } type DeleteMePayload { - userId: ID! + userId: ID! } type CreateTeamPayload { - team: Team! + team: Team! } type UpdateTeamPayload { - team: Team! + team: Team! } type AddMemberToTeamPayload { - team: Team! + team: Team! } type RemoveMemberFromTeamPayload { - team: Team! + team: Team! } type UpdateMemberOfTeamPayload { - team: Team! + team: Team! } type DeleteTeamPayload { - teamId: ID! + teamId: ID! } type ProjectPayload { - project: Project! + project: Project! } type DeleteProjectPayload { - projectId: ID! + projectId: ID! } type UploadPluginPayload { - plugin: Plugin! + plugin: Plugin! } type CreateScenePayload { - scene: Scene! + scene: Scene! } type AddWidgetPayload { - scene: Scene! - sceneWidget: SceneWidget! + scene: Scene! + sceneWidget: SceneWidget! } type UpdateWidgetPayload { - scene: Scene! - sceneWidget: SceneWidget! + scene: Scene! + sceneWidget: SceneWidget! } type RemoveWidgetPayload { - scene: Scene! - pluginId: PluginID! - extensionId: PluginExtensionID! + scene: Scene! + pluginId: PluginID! + extensionId: PluginExtensionID! } type InstallPluginPayload { - scene: Scene! - scenePlugin: ScenePlugin! + scene: Scene! + scenePlugin: ScenePlugin! } type UninstallPluginPayload { - scene: Scene! - scenePlugin: ScenePlugin! + scene: Scene! + scenePlugin: ScenePlugin! } type UpgradePluginPayload { - scene: Scene! - scenePlugin: ScenePlugin! + scene: Scene! + scenePlugin: ScenePlugin! } type SyncDatasetPayload { - sceneId: ID! - url: String! - datasetSchema: [DatasetSchema!]! - dataset: [Dataset!]! + sceneId: ID! + url: String! + datasetSchema: [DatasetSchema!]! + dataset: [Dataset!]! } type PropertyFieldPayload { - property: Property! - propertyField: PropertyField + property: Property! + propertyField: PropertyField } type PropertyItemPayload { - property: Property! - propertyItem: PropertyItem + property: Property! + propertyItem: PropertyItem } type AddLayerItemPayload { - layer: LayerItem! - parentLayer: LayerGroup! - index: Int + layer: LayerItem! + parentLayer: LayerGroup! + index: Int } type AddLayerGroupPayload { - layer: LayerGroup! - parentLayer: LayerGroup! - index: Int + layer: LayerGroup! + parentLayer: LayerGroup! + index: Int } type RemoveLayerPayload { - layerId: ID! - parentLayer: LayerGroup! + layerId: ID! + parentLayer: LayerGroup! } type UpdateLayerPayload { - layer: Layer! + layer: Layer! } type MoveLayerPayload { - layerId: ID! - fromParentLayer: LayerGroup! - toParentLayer: LayerGroup! - index: Int! + layerId: ID! + fromParentLayer: LayerGroup! + toParentLayer: LayerGroup! + index: Int! } type CreateInfoboxPayload { - layer: Layer! + layer: Layer! } type RemoveInfoboxPayload { - layer: Layer! + layer: Layer! } type AddInfoboxFieldPayload { - infoboxField: InfoboxField! - layer: Layer! + infoboxField: InfoboxField! + layer: Layer! } type MoveInfoboxFieldPayload { - infoboxFieldId: ID! - layer: Layer! - index: Int! + infoboxFieldId: ID! + layer: Layer! + index: Int! } type RemoveInfoboxFieldPayload { - infoboxFieldId: ID! - layer: Layer! + infoboxFieldId: ID! + layer: Layer! } type UpdateDatasetSchemaPayload { - datasetSchema: DatasetSchema + datasetSchema: DatasetSchema } type RemoveDatasetSchemaPayload { - schemaId: ID! + schemaId: ID! } type AddDynamicDatasetSchemaPayload { - datasetSchema: DatasetSchema + datasetSchema: DatasetSchema } type AddDynamicDatasetPayload { - datasetSchema: DatasetSchema - dataset: Dataset + datasetSchema: DatasetSchema + dataset: Dataset } type ImportLayerPayload { - layers: [Layer!]! - parentLayer: LayerGroup! + layers: [Layer!]! + parentLayer: LayerGroup! } type ImportDatasetPayload { - datasetSchema: DatasetSchema! + datasetSchema: DatasetSchema! } type AddDatasetSchemaPayload { - datasetSchema: DatasetSchema + datasetSchema: DatasetSchema } # Connection enum NodeType { - USER - TEAM - PROJECT - PLUGIN - SCENE - PROPERTY_SCHEMA - PROPERTY - DATASET_SCHEMA - DATASET - LAYER_GROUP - LAYER_ITEM + USER + TEAM + PROJECT + PLUGIN + SCENE + PROPERTY_SCHEMA + PROPERTY + DATASET_SCHEMA + DATASET + LAYER_GROUP + LAYER_ITEM } type AssetConnection { - edges: [AssetEdge!]! - nodes: [Asset]! - pageInfo: PageInfo! - totalCount: Int! + edges: [AssetEdge!]! + nodes: [Asset]! + pageInfo: PageInfo! + totalCount: Int! } type AssetEdge { - cursor: Cursor! - node: Asset + cursor: Cursor! + node: Asset } type ProjectConnection { - edges: [ProjectEdge!]! - nodes: [Project]! - pageInfo: PageInfo! - totalCount: Int! + edges: [ProjectEdge!]! + nodes: [Project]! + pageInfo: PageInfo! + totalCount: Int! } type ProjectEdge { - cursor: Cursor! - node: Project + cursor: Cursor! + node: Project } type DatasetSchemaConnection { - edges: [DatasetSchemaEdge!]! - nodes: [DatasetSchema]! - pageInfo: PageInfo! - totalCount: Int! + edges: [DatasetSchemaEdge!]! + nodes: [DatasetSchema]! + pageInfo: PageInfo! + totalCount: Int! } type DatasetSchemaEdge { - cursor: Cursor! - node: DatasetSchema + cursor: Cursor! + node: DatasetSchema } type DatasetConnection { - edges: [DatasetEdge!]! - nodes: [Dataset]! - pageInfo: PageInfo! - totalCount: Int! + edges: [DatasetEdge!]! + nodes: [Dataset]! + pageInfo: PageInfo! + totalCount: Int! } type DatasetEdge { - cursor: Cursor! - node: Dataset + cursor: Cursor! + node: Dataset } - # Query type Query { - me: User - node(id: ID!, type: NodeType!): Node - nodes(id: [ID!]!, type: NodeType!): [Node]! - propertySchema(id: PropertySchemaID!): PropertySchema - propertySchemas(id: [PropertySchemaID!]!): [PropertySchema!]! - plugin(id: PluginID!): Plugin - plugins(id: [PluginID!]!): [Plugin!]! - layer(id: ID!): Layer - scene(projectId: ID!): Scene - assets(teamId: ID!, first: Int, last: Int, after: Cursor, before: Cursor): AssetConnection! - projects(teamId: ID!, includeArchived: Boolean, first: Int, last: Int, after: Cursor, before: Cursor): ProjectConnection! - datasetSchemas(sceneId: ID!, first: Int, last: Int, after: Cursor, before: Cursor): DatasetSchemaConnection! - datasets(datasetSchemaId: ID!, first: Int, last: Int, after: Cursor, before: Cursor): DatasetConnection! - sceneLock(sceneId: ID!): SceneLockMode - dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! - searchUser(nameOrEmail: String!): SearchedUser - checkProjectAlias(alias: String!): CheckProjectAliasPayload! - installablePlugins: [PluginMetadata!]! + me: User + node(id: ID!, type: NodeType!): Node + nodes(id: [ID!]!, type: NodeType!): [Node]! + propertySchema(id: PropertySchemaID!): PropertySchema + propertySchemas(id: [PropertySchemaID!]!): [PropertySchema!]! + plugin(id: PluginID!): Plugin + plugins(id: [PluginID!]!): [Plugin!]! + layer(id: ID!): Layer + scene(projectId: ID!): Scene + assets( + teamId: ID! + first: Int + last: Int + after: Cursor + before: Cursor + ): AssetConnection! + projects( + teamId: ID! + includeArchived: Boolean + first: Int + last: Int + after: Cursor + before: Cursor + ): ProjectConnection! + datasetSchemas( + sceneId: ID! + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetSchemaConnection! + datasets( + datasetSchemaId: ID! + first: Int + last: Int + after: Cursor + before: Cursor + ): DatasetConnection! + sceneLock(sceneId: ID!): SceneLockMode + dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! + searchUser(nameOrEmail: String!): SearchedUser + checkProjectAlias(alias: String!): CheckProjectAliasPayload! + installablePlugins: [PluginMetadata!]! } # Mutation type Mutation { - # Asset - createAsset(input: CreateAssetInput!): CreateAssetPayload - removeAsset(input: RemoveAssetInput!): RemoveAssetPayload - - # User - signup(input: SignupInput!): SignupPayload - updateMe(input: UpdateMeInput!): UpdateMePayload - removeMyAuth(input: RemoveMyAuthInput!): UpdateMePayload - deleteMe(input: DeleteMeInput!): DeleteMePayload - - # Team - createTeam(input: CreateTeamInput!): CreateTeamPayload - deleteTeam(input: DeleteTeamInput!): DeleteTeamPayload - updateTeam(input: UpdateTeamInput!): UpdateTeamPayload - addMemberToTeam(input: AddMemberToTeamInput!): AddMemberToTeamPayload - removeMemberFromTeam(input: RemoveMemberFromTeamInput!): RemoveMemberFromTeamPayload - updateMemberOfTeam(input: UpdateMemberOfTeamInput!): UpdateMemberOfTeamPayload - - # Project - createProject(input: CreateProjectInput!): ProjectPayload - updateProject(input: UpdateProjectInput!): ProjectPayload - publishProject(input: PublishProjectInput!): ProjectPayload - deleteProject(input: DeleteProjectInput!): DeleteProjectPayload - - # Plugin - uploadPlugin(input: UploadPluginInput!): UploadPluginPayload - - # Scene - createScene(input: CreateSceneInput!): CreateScenePayload - addWidget(input: AddWidgetInput!): AddWidgetPayload - updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload - removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload - installPlugin(input: InstallPluginInput!): InstallPluginPayload - uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload - upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload - - # Dataset - updateDatasetSchema(input:UpdateDatasetSchemaInput!): UpdateDatasetSchemaPayload - syncDataset(input: SyncDatasetInput!): SyncDatasetPayload - addDynamicDatasetSchema(input:AddDynamicDatasetSchemaInput!): AddDynamicDatasetSchemaPayload - addDynamicDataset(input:AddDynamicDatasetInput!): AddDynamicDatasetPayload - removeDatasetSchema(input: RemoveDatasetSchemaInput!): RemoveDatasetSchemaPayload - importDataset(input: ImportDatasetInput!): ImportDatasetPayload - addDatasetSchema(input:AddDatasetSchemaInput!): AddDatasetSchemaPayload - - # Property - updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload - updatePropertyValueLatLng(input: UpdatePropertyValueLatLngInput!): PropertyFieldPayload - updatePropertyValueLatLngHeight(input: UpdatePropertyValueLatLngHeightInput!): PropertyFieldPayload - updatePropertyValueCamera(input: UpdatePropertyValueCameraInput!): PropertyFieldPayload - updatePropertyValueTypography(input: UpdatePropertyValueTypographyInput!): PropertyFieldPayload - removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload - uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload - linkDatasetToPropertyValue(input: LinkDatasetToPropertyValueInput!): PropertyFieldPayload - unlinkPropertyValue(input: UnlinkPropertyValueInput!): PropertyFieldPayload - addPropertyItem(input: AddPropertyItemInput!): PropertyItemPayload - movePropertyItem(input: MovePropertyItemInput!): PropertyItemPayload - removePropertyItem(input: RemovePropertyItemInput!): PropertyItemPayload - updatePropertyItems(input: UpdatePropertyItemInput!): PropertyItemPayload - - # Layer - addLayerItem(input: AddLayerItemInput!): AddLayerItemPayload - addLayerGroup(input: AddLayerGroupInput!): AddLayerGroupPayload - removeLayer(input: RemoveLayerInput!): RemoveLayerPayload - updateLayer(input: UpdateLayerInput!): UpdateLayerPayload - moveLayer(input: MoveLayerInput!): MoveLayerPayload - createInfobox(input: CreateInfoboxInput!): CreateInfoboxPayload - removeInfobox(input: RemoveInfoboxInput!): RemoveInfoboxPayload - addInfoboxField(input: AddInfoboxFieldInput!): AddInfoboxFieldPayload - moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload - removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload - importLayer(input:ImportLayerInput!): ImportLayerPayload + # Asset + createAsset(input: CreateAssetInput!): CreateAssetPayload + removeAsset(input: RemoveAssetInput!): RemoveAssetPayload + + # User + signup(input: SignupInput!): SignupPayload + updateMe(input: UpdateMeInput!): UpdateMePayload + removeMyAuth(input: RemoveMyAuthInput!): UpdateMePayload + deleteMe(input: DeleteMeInput!): DeleteMePayload + + # Team + createTeam(input: CreateTeamInput!): CreateTeamPayload + deleteTeam(input: DeleteTeamInput!): DeleteTeamPayload + updateTeam(input: UpdateTeamInput!): UpdateTeamPayload + addMemberToTeam(input: AddMemberToTeamInput!): AddMemberToTeamPayload + removeMemberFromTeam( + input: RemoveMemberFromTeamInput! + ): RemoveMemberFromTeamPayload + updateMemberOfTeam(input: UpdateMemberOfTeamInput!): UpdateMemberOfTeamPayload + + # Project + createProject(input: CreateProjectInput!): ProjectPayload + updateProject(input: UpdateProjectInput!): ProjectPayload + publishProject(input: PublishProjectInput!): ProjectPayload + deleteProject(input: DeleteProjectInput!): DeleteProjectPayload + + # Plugin + uploadPlugin(input: UploadPluginInput!): UploadPluginPayload + + # Scene + createScene(input: CreateSceneInput!): CreateScenePayload + addWidget(input: AddWidgetInput!): AddWidgetPayload + updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload + removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload + installPlugin(input: InstallPluginInput!): InstallPluginPayload + uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload + upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload + + # Dataset + updateDatasetSchema( + input: UpdateDatasetSchemaInput! + ): UpdateDatasetSchemaPayload + syncDataset(input: SyncDatasetInput!): SyncDatasetPayload + addDynamicDatasetSchema( + input: AddDynamicDatasetSchemaInput! + ): AddDynamicDatasetSchemaPayload + addDynamicDataset(input: AddDynamicDatasetInput!): AddDynamicDatasetPayload + removeDatasetSchema( + input: RemoveDatasetSchemaInput! + ): RemoveDatasetSchemaPayload + importDataset(input: ImportDatasetInput!): ImportDatasetPayload + addDatasetSchema(input: AddDatasetSchemaInput!): AddDatasetSchemaPayload + + # Property + updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload + updatePropertyValueLatLng( + input: UpdatePropertyValueLatLngInput! + ): PropertyFieldPayload + updatePropertyValueLatLngHeight( + input: UpdatePropertyValueLatLngHeightInput! + ): PropertyFieldPayload + updatePropertyValueCamera( + input: UpdatePropertyValueCameraInput! + ): PropertyFieldPayload + updatePropertyValueTypography( + input: UpdatePropertyValueTypographyInput! + ): PropertyFieldPayload + removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload + uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload + linkDatasetToPropertyValue( + input: LinkDatasetToPropertyValueInput! + ): PropertyFieldPayload + unlinkPropertyValue(input: UnlinkPropertyValueInput!): PropertyFieldPayload + addPropertyItem(input: AddPropertyItemInput!): PropertyItemPayload + movePropertyItem(input: MovePropertyItemInput!): PropertyItemPayload + removePropertyItem(input: RemovePropertyItemInput!): PropertyItemPayload + updatePropertyItems(input: UpdatePropertyItemInput!): PropertyItemPayload + + # Layer + addLayerItem(input: AddLayerItemInput!): AddLayerItemPayload + addLayerGroup(input: AddLayerGroupInput!): AddLayerGroupPayload + removeLayer(input: RemoveLayerInput!): RemoveLayerPayload + updateLayer(input: UpdateLayerInput!): UpdateLayerPayload + moveLayer(input: MoveLayerInput!): MoveLayerPayload + createInfobox(input: CreateInfoboxInput!): CreateInfoboxPayload + removeInfobox(input: RemoveInfoboxInput!): RemoveInfoboxPayload + addInfoboxField(input: AddInfoboxFieldInput!): AddInfoboxFieldPayload + moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload + removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload + importLayer(input: ImportLayerInput!): ImportLayerPayload } schema { - query: Query - mutation: Mutation + query: Query + mutation: Mutation } From f726b6ea5c8fd8886ab64a616fe4794ac2ff73ad Mon Sep 17 00:00:00 2001 From: HideBa <49897538+HideBa@users.noreply.github.com> Date: Wed, 23 Jun 2021 19:42:50 +0900 Subject: [PATCH 040/253] Create pull_request_template.md --- .github/pull_request_template.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 .github/pull_request_template.md diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..bfcf0a18e --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,12 @@ +# Overview + +## What I've done + + +## What I haven't done + +## How I tested + +## Which point I want you to review particularly + +## Memo From aebac3fb1ae8d33f843ce12ba081765979c20dcc Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 25 Jun 2021 13:48:10 +0900 Subject: [PATCH 041/253] feat: published page api (#11) * feat: published page api * fix * fix basic auth * add test --- internal/adapter/http/published.go | 29 +++ .../http/{user_controller.go => user.go} | 0 internal/app/app.go | 159 +++++++--------- internal/app/config.go | 6 + internal/app/graphql.go | 2 +- internal/app/main.go | 61 ++++++- internal/app/{userAPIs.go => private.go} | 3 +- internal/app/public.go | 171 +++++++++++------- internal/app/server.go | 89 --------- internal/usecase/interactor/project.go | 4 - internal/usecase/interactor/published.go | 139 ++++++++++++++ internal/usecase/interactor/published_test.go | 35 ++++ internal/usecase/interfaces/published.go | 37 ++++ pkg/cache/cache.go | 56 ++++++ pkg/cache/cache_test.go | 81 +++++++++ 15 files changed, 614 insertions(+), 258 deletions(-) create mode 100644 internal/adapter/http/published.go rename internal/adapter/http/{user_controller.go => user.go} (100%) rename internal/app/{userAPIs.go => private.go} (99%) delete mode 100644 internal/app/server.go create mode 100644 internal/usecase/interactor/published.go create mode 100644 internal/usecase/interactor/published_test.go create mode 100644 internal/usecase/interfaces/published.go create mode 100644 pkg/cache/cache.go create mode 100644 pkg/cache/cache_test.go diff --git a/internal/adapter/http/published.go b/internal/adapter/http/published.go new file mode 100644 index 000000000..82e6f23d1 --- /dev/null +++ b/internal/adapter/http/published.go @@ -0,0 +1,29 @@ +package http + +import ( + "context" + "io" + "net/url" + + "github.com/reearth/reearth-backend/internal/usecase/interfaces" +) + +type PublishedController struct { + usecase interfaces.Published +} + +func NewPublishedController(usecase interfaces.Published) *PublishedController { + return &PublishedController{usecase: usecase} +} + +func (c *PublishedController) Metadata(ctx context.Context, name string) (interfaces.ProjectPublishedMetadata, error) { + return c.usecase.Metadata(ctx, name) +} + +func (c *PublishedController) Data(ctx context.Context, name string) (io.Reader, error) { + return c.usecase.Data(ctx, name) +} + +func (c *PublishedController) Index(ctx context.Context, name string, url *url.URL) (string, error) { + return c.usecase.Index(ctx, name, url) +} diff --git a/internal/adapter/http/user_controller.go b/internal/adapter/http/user.go similarity index 100% rename from internal/adapter/http/user_controller.go rename to internal/adapter/http/user.go diff --git a/internal/app/app.go b/internal/app/app.go index 160e97f59..a5b2890c9 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -9,49 +9,47 @@ import ( "github.com/labstack/echo/v4/middleware" "github.com/reearth/reearth-backend/internal/adapter/graphql" err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/log" + echotracer "go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo" ) -func initAppEcho(cfg *ServerConfig) *echo.Echo { - e := newEcho(cfg) +func initEcho(cfg *ServerConfig) *echo.Echo { + if cfg.Config == nil { + log.Fatalln("ServerConfig.Config is nil") + } - controllers := graphql.NewContainer(cfg.Repos, cfg.Gateways, graphql.ContainerConfig{ - SignupSecret: cfg.Config.SignupSecret, - }) + e := echo.New() + e.Debug = cfg.Debug + e.HideBanner = true + e.HidePort = true + + logger := GetEchoLogger() + e.Logger = logger + e.Use(logger.Hook()) + e.Use(middleware.Recover(), echotracer.Middleware("reearth-backend")) + + origins := allowedOrigins(cfg) + if len(origins) > 0 { + e.Use( + middleware.CORSWithConfig(middleware.CORSConfig{ + AllowOrigins: origins, + }), + ) + } + + if e.Debug { + // enable pprof + e.GET("/debug/pprof/*", echo.WrapHandler(http.DefaultServeMux)) + } e.HTTPErrorHandler = func(err error, c echo.Context) { if c.Response().Committed { return } - code := http.StatusBadRequest - msg := err.Error() - - if err2, ok := err.(*echo.HTTPError); ok { - code = err2.Code - if msg2, ok := err2.Message.(string); ok { - msg = msg2 - } else if msg2, ok := err2.Message.(error); ok { - msg = msg2.Error() - } else { - msg = "error" - } - if err2.Internal != nil { - c.Logger().Errorf("echo internal err: %+v", err2) - } - } else if errors.Is(err, err1.ErrNotFound) { - code = http.StatusNotFound - msg = "not found" - } else { - var ierr *err1.ErrInternal - if errors.As(err, &ierr) { - if err2 := ierr.Unwrap(); err2 != nil { - c.Logger().Errorf("internal err: %+v", err2) - } - code = http.StatusInternalServerError - msg = "internal server error" - } - } - + code, msg := errorMessage(err, func(f string, args ...interface{}) { + c.Echo().Logger.Errorf(f, args...) + }) if err := c.JSON(code, map[string]string{ "error": msg, }); err != nil { @@ -59,19 +57,6 @@ func initAppEcho(cfg *ServerConfig) *echo.Echo { } } - origins := allowedOrigins(cfg) - if len(origins) > 0 { - e.Use( - middleware.CORSWithConfig(middleware.CORSConfig{ - AllowOrigins: origins, - }), - ) - } - - e.GET("/api/ping", func(c echo.Context) error { - return c.JSON(http.StatusOK, "pong") - }) - if cfg.Debug || cfg.Config.Dev { // GraphQL Playground without auth e.GET("/graphql", echo.WrapHandler( @@ -79,17 +64,18 @@ func initAppEcho(cfg *ServerConfig) *echo.Echo { )) } - e.GET("/api/published/:name", apiPublished(cfg)) - e.GET("/api/published_data/:name", apiPublishedData(cfg)) api := e.Group("/api") - - privateApi := api.Group("") + publicAPI(e, api, cfg.Config, cfg.Repos, cfg.Gateways) jwks := &JwksSyncOnce{} + privateApi := api.Group("") authRequired(privateApi, jwks, cfg) + graphqlAPI(e, privateApi, cfg, graphql.NewContainer(cfg.Repos, cfg.Gateways, graphql.ContainerConfig{ + SignupSecret: cfg.Config.SignupSecret, + })) + privateAPI(e, privateApi, cfg.Repos) - publicRoute(e, api, cfg.Config, cfg.Repos, cfg.Gateways) - graphqlRoute(e, privateApi, cfg, controllers) - userRoute(e, privateApi, cfg.Repos) + published := e.Group("/p") + publishedRoute(e, published, cfg.Config, cfg.Repos, cfg.Gateways) serveFiles(e, cfg.Gateways.File) web(e, cfg.Config.Web, cfg.Config.Auth0) @@ -114,48 +100,35 @@ func allowedOrigins(cfg *ServerConfig) []string { return origins } -func apiPublished(cfg *ServerConfig) echo.HandlerFunc { - return func(c echo.Context) error { - name := c.Param("name") - prj, err := cfg.Repos.Project.FindByPublicName(c.Request().Context(), name) - if err != nil || prj == nil { - return echo.ErrNotFound - } +func errorMessage(err error, log func(string, ...interface{})) (int, string) { + code := http.StatusBadRequest + msg := err.Error() - title := prj.PublicTitle() - description := prj.PublicDescription() - if title == "" { - title = prj.Name() + if err2, ok := err.(*echo.HTTPError); ok { + code = err2.Code + if msg2, ok := err2.Message.(string); ok { + msg = msg2 + } else if msg2, ok := err2.Message.(error); ok { + msg = msg2.Error() + } else { + msg = "error" } - if description == "" { - description = prj.Description() + if err2.Internal != nil { + log("echo internal err: %+v", err2) } - - return c.JSON(http.StatusOK, map[string]interface{}{ - "title": title, - "description": description, - "image": prj.PublicImage(), - "noindex": prj.PublicNoIndex(), - "isBasicAuthActive": prj.IsBasicAuthActive(), - "basicAuthUsername": prj.BasicAuthUsername(), - "basicAuthPassword": prj.BasicAuthPassword(), - }) - } -} - -func apiPublishedData(cfg *ServerConfig) echo.HandlerFunc { - return func(c echo.Context) error { - name := c.Param("name") - prj, err := cfg.Repos.Project.FindByPublicName(c.Request().Context(), name) - if err != nil || prj == nil { - return echo.ErrNotFound - } - - r, err := cfg.Gateways.File.ReadBuiltSceneFile(c.Request().Context(), prj.PublicName()) - if err != nil { - return err + } else if errors.Is(err, err1.ErrNotFound) { + code = http.StatusNotFound + msg = "not found" + } else { + var ierr *err1.ErrInternal + if errors.As(err, &ierr) { + if err2 := ierr.Unwrap(); err2 != nil { + log("internal err: %+v", err2) + } + code = http.StatusInternalServerError + msg = "internal server error" } - - return c.Stream(http.StatusOK, echo.MIMEApplicationJSON, r) } + + return code, msg } diff --git a/internal/app/config.go b/internal/app/config.go index e2a741312..a5e62cefd 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -2,6 +2,7 @@ package app import ( "fmt" + "net/url" "os" "strings" @@ -18,6 +19,7 @@ type Config struct { DB string `default:"mongodb://localhost"` Auth0 Auth0Config GraphQL GraphQLConfig + Published PublishedConfig GCPProject string `envconfig:"GOOGLE_CLOUD_PROJECT"` Profiler string Tracer string @@ -41,6 +43,10 @@ type GraphQLConfig struct { ComplexityLimit int `default:"4000"` } +type PublishedConfig struct { + IndexURL *url.URL +} + type GCSConfig struct { BucketName string PublicationCacheControl string diff --git a/internal/app/graphql.go b/internal/app/graphql.go index 070c75345..bcb4ea91e 100644 --- a/internal/app/graphql.go +++ b/internal/app/graphql.go @@ -68,7 +68,7 @@ func tracerMiddleware(enabled bool) echo.MiddlewareFunc { } } -func graphqlRoute( +func graphqlAPI( ec *echo.Echo, r *echo.Group, conf *ServerConfig, diff --git a/internal/app/main.go b/internal/app/main.go index b76c94880..0b43eee34 100644 --- a/internal/app/main.go +++ b/internal/app/main.go @@ -2,7 +2,12 @@ package app import ( "context" + "os" + "os/signal" + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/log" ) @@ -34,11 +39,61 @@ func Start(debug bool, version string) { // Init repositories repos, gateways := initReposAndGateways(ctx, conf, debug) - server := NewServer(&ServerConfig{ + // Start web server + NewServer(&ServerConfig{ Config: conf, Debug: debug, Repos: repos, Gateways: gateways, - }) - server.Run() + }).Run() +} + +type WebServer struct { + address string + appServer *echo.Echo +} + +type ServerConfig struct { + Config *Config + Debug bool + Repos *repo.Container + Gateways *gateway.Container +} + +func NewServer(cfg *ServerConfig) *WebServer { + port := cfg.Config.Port + if port == "" { + port = "8080" + } + + address := "0.0.0.0:" + port + if cfg.Debug { + address = "localhost:" + port + } + + w := &WebServer{ + address: address, + } + + w.appServer = initEcho(cfg) + return w +} + +func (w *WebServer) Run() { + defer log.Infoln("Server shutdown") + + debugLog := "" + if w.appServer.Debug { + debugLog += " with debug mode" + } + log.Infof("Server started%s\n", debugLog) + + go func() { + err := w.appServer.Start(w.address) + log.Fatalln(err.Error()) + }() + + quit := make(chan os.Signal, 1) + signal.Notify(quit, os.Interrupt) + <-quit } diff --git a/internal/app/userAPIs.go b/internal/app/private.go similarity index 99% rename from internal/app/userAPIs.go rename to internal/app/private.go index 9f2b11163..8fafd4a31 100644 --- a/internal/app/userAPIs.go +++ b/internal/app/private.go @@ -52,7 +52,8 @@ func getEncoder(w io.Writer, ext string) (encoding.Encoder, string) { } return nil, "" } -func userRoute( + +func privateAPI( ec *echo.Echo, r *echo.Group, repos *repo.Container, diff --git a/internal/app/public.go b/internal/app/public.go index 66d454954..3f020c9b0 100644 --- a/internal/app/public.go +++ b/internal/app/public.go @@ -1,49 +1,23 @@ package app import ( - "errors" + "context" + "crypto/subtle" "fmt" "net/http" + "net/url" + "os" "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" http1 "github.com/reearth/reearth-backend/internal/adapter/http" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interactor" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/internal/usecase/repo" - "github.com/reearth/reearth-backend/pkg/dataset" - err1 "github.com/reearth/reearth-backend/pkg/error" - "github.com/reearth/reearth-backend/pkg/id" ) -type inputJSON struct { - DatasetSchemaID string `json:"datasetSchemaId"` - Author string `json:"author"` - Content string `json:"content"` - Target *string `json:"target"` - Lat *float64 `json:"lat"` - Lng *float64 `json:"lng"` -} - -func toResponseValue(v *dataset.Value) interface{} { - if v == nil { - return nil - } - switch v2 := v.Value().(type) { - case float64: - return v2 - case string: - return v2 - case dataset.LatLng: - return map[string]float64{ - "lat": v2.Lat, - "lng": v2.Lng, - } - } - return nil -} - -func publicRoute( +func publicAPI( ec *echo.Echo, r *echo.Group, conf *Config, @@ -51,59 +25,122 @@ func publicRoute( gateways *gateway.Container, ) { controller := http1.NewUserController(interactor.NewUser(repos, gateways, conf.SignupSecret)) + publishedController := http1.NewPublishedController(interactor.NewPublished(repos.Project, gateways.File, "")) - // TODO: move to adapter and usecase layer - r.POST("/comments", func(c echo.Context) error { - var inp inputJSON + r.GET("/ping", func(c echo.Context) error { + return c.JSON(http.StatusOK, "pong") + }) + + r.POST("/signup", func(c echo.Context) error { + var inp http1.CreateUserInput if err := c.Bind(&inp); err != nil { - return &echo.HTTPError{Code: http.StatusBadRequest, Message: err} + return &echo.HTTPError{Code: http.StatusBadRequest, Message: fmt.Errorf("failed to parse request body: %w", err)} } - dssid, err := id.DatasetSchemaIDFrom(inp.DatasetSchemaID) + output, err := controller.CreateUser(c.Request().Context(), inp) if err != nil { - return &echo.HTTPError{Code: http.StatusNotFound, Message: err1.ErrNotFound} + return err } - if inp.Author == "" { - return &echo.HTTPError{Code: http.StatusBadRequest, Message: errors.New("require author value")} + + return c.JSON(http.StatusOK, output) + }) + + r.GET("/published/:name", func(c echo.Context) error { + name := c.Param("string") + if name == "" { + return echo.ErrNotFound } - if inp.Content == "" { - return &echo.HTTPError{Code: http.StatusBadRequest, Message: errors.New("require content value")} + + res, err := publishedController.Metadata(c.Request().Context(), name) + if err != nil { + return err } - interactor := interactor.NewDataset(repos, gateways) - dss, ds, err := interactor.AddDynamicDataset(c.Request().Context(), interfaces.AddDynamicDatasetParam{ - SchemaId: dssid, - Author: inp.Author, - Content: inp.Content, - Lat: inp.Lat, - Lng: inp.Lng, - Target: inp.Target, - }) + return c.JSON(http.StatusOK, res) + }) + + r.GET("/published_data/:name", func(c echo.Context) error { + name := c.Param("string") + if name == "" { + return echo.ErrNotFound + } + + r, err := publishedController.Data(c.Request().Context(), name) if err != nil { - if errors.Is(err1.ErrNotFound, err) { - return &echo.HTTPError{Code: http.StatusNotFound, Message: err} - } - return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} + return err } - response := make(map[string]interface{}) - response["id"] = ds.ID().String() - for _, f := range dss.Fields() { - response[f.Name()] = toResponseValue(ds.Field(f.ID()).Value()) + + return c.Stream(http.StatusOK, "application/json", r) + }) +} + +func publishedRoute( + ec *echo.Echo, + r *echo.Group, + conf *Config, + repos *repo.Container, + gateways *gateway.Container, +) { + var i interfaces.Published + if conf.Published.IndexURL == nil || conf.Published.IndexURL.String() == "" { + html, err := os.ReadFile("web/published.html") + if err == nil { + i = interactor.NewPublished(repos.Project, gateways.File, string(html)) + } else { + i = interactor.NewPublished(repos.Project, gateways.File, "") } - return c.JSON(http.StatusOK, response) + } else { + i = interactor.NewPublishedWithURL(repos.Project, gateways.File, conf.Published.IndexURL) + } + contr := http1.NewPublishedController(i) + + key := struct{}{} + auth := middleware.BasicAuthWithConfig(middleware.BasicAuthConfig{ + Validator: func(user string, password string, c echo.Context) (bool, error) { + md, ok := c.Request().Context().Value(key).(interfaces.ProjectPublishedMetadata) + if !ok { + return true, echo.ErrNotFound + } + return !md.IsBasicAuthActive || subtle.ConstantTimeCompare([]byte(user), []byte(md.BasicAuthUsername)) == 1 && subtle.ConstantTimeCompare([]byte(password), []byte(md.BasicAuthPassword)) == 1, nil + }, + Skipper: func(c echo.Context) bool { + name := c.Param("name") + if name == "" { + return true + } + + md, err := contr.Metadata(c.Request().Context(), name) + if err != nil { + return true + } + + c.SetRequest(c.Request().WithContext(context.WithValue(c.Request().Context(), key, md))) + return !md.IsBasicAuthActive + }, }) - r.POST("/signup", func(c echo.Context) error { - var inp http1.CreateUserInput - if err := c.Bind(&inp); err != nil { - return &echo.HTTPError{Code: http.StatusBadRequest, Message: fmt.Errorf("failed to parse request body: %w", err)} + r.GET("/:name/data.json", func(c echo.Context) error { + r, err := contr.Data(c.Request().Context(), c.Param("name")) + if err != nil { + return err } - output, err := controller.CreateUser(c.Request().Context(), inp) + return c.Stream(http.StatusOK, "application/json", r) + }, auth) + + r.GET("/:name/", func(c echo.Context) error { + index, err := contr.Index(c.Request().Context(), c.Param("name"), &url.URL{ + Scheme: "http", + Host: c.Request().Host, + Path: c.Request().URL.Path, + }) if err != nil { return err } + if index == "" { + return echo.ErrNotFound + } - return c.JSON(http.StatusOK, output) - }) + return c.HTML(http.StatusOK, index) + }, auth) } diff --git a/internal/app/server.go b/internal/app/server.go deleted file mode 100644 index b52b7ffc2..000000000 --- a/internal/app/server.go +++ /dev/null @@ -1,89 +0,0 @@ -package app - -import ( - "net/http" - _ "net/http/pprof" - "os" - "os/signal" - - "github.com/labstack/echo/v4" - "github.com/labstack/echo/v4/middleware" - "github.com/reearth/reearth-backend/internal/usecase/gateway" - "github.com/reearth/reearth-backend/internal/usecase/repo" - "github.com/reearth/reearth-backend/pkg/log" - echotracer "go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo" -) - -type WebServer struct { - address string - appServer *echo.Echo -} - -type ServerConfig struct { - Config *Config - Debug bool - Repos *repo.Container - Gateways *gateway.Container -} - -func NewServer(cfg *ServerConfig) *WebServer { - port := cfg.Config.Port - if port == "" { - port = "8080" - } - - address := "0.0.0.0:" + port - if cfg.Debug { - address = "localhost:" + port - } - - w := &WebServer{ - address: address, - } - - w.appServer = initAppEcho(cfg) - return w -} - -func (w *WebServer) Run() { - defer log.Infoln("Server shutdown") - - debugLog := "" - if w.appServer.Debug { - debugLog += " with debug mode" - } - log.Infof("Server started%s\n", debugLog) - - go func() { - err := w.appServer.Start(w.address) - log.Fatalln(err.Error()) - }() - - quit := make(chan os.Signal, 1) - signal.Notify(quit, os.Interrupt) - <-quit -} - -func newEcho(cfg *ServerConfig) *echo.Echo { - if cfg.Config == nil { - log.Fatalln("ServerConfig.Config is nil") - } - - e := echo.New() - e.Debug = cfg.Debug - e.HideBanner = true - e.HidePort = true - - logger := GetEchoLogger() - e.Logger = logger - e.Use(logger.Hook()) - - e.Use(middleware.Recover(), echotracer.Middleware("reearth-backend")) - - if e.Debug { - // enable pprof - e.GET("/debug/pprof/*", echo.WrapHandler(http.DefaultServeMux)) - } - - return e -} diff --git a/internal/usecase/interactor/project.go b/internal/usecase/interactor/project.go index 9f4c692f9..6b4c4a58c 100644 --- a/internal/usecase/interactor/project.go +++ b/internal/usecase/interactor/project.go @@ -68,7 +68,6 @@ func (i *Project) FindByTeam(ctx context.Context, id id.TeamID, p *usecase.Pagin } func (i *Project) Create(ctx context.Context, p interfaces.CreateProjectParam, operator *usecase.Operator) (_ *project.Project, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -118,7 +117,6 @@ func (i *Project) Create(ctx context.Context, p interfaces.CreateProjectParam, o } func (i *Project) Update(ctx context.Context, p interfaces.UpdateProjectParam, operator *usecase.Operator) (_ *project.Project, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -351,7 +349,6 @@ func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectP } func (i *Project) createAsset(ctx context.Context, f *file.File, t id.TeamID) (_ *asset.Asset, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -388,7 +385,6 @@ func (i *Project) createAsset(ctx context.Context, f *file.File, t id.TeamID) (_ } func (i *Project) Delete(ctx context.Context, projectID id.ProjectID, operator *usecase.Operator) (err error) { - tx, err := i.transaction.Begin() if err != nil { return diff --git a/internal/usecase/interactor/published.go b/internal/usecase/interactor/published.go new file mode 100644 index 000000000..6a14d1432 --- /dev/null +++ b/internal/usecase/interactor/published.go @@ -0,0 +1,139 @@ +package interactor + +import ( + "bytes" + "context" + "errors" + "html" + "html/template" + "io" + "net/http" + "net/url" + "regexp" + "strings" + "time" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/cache" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/log" +) + +type Published struct { + project repo.Project + file gateway.File + indexHTML *cache.Cache + indexHTMLStr string +} + +func NewPublished(project repo.Project, file gateway.File, indexHTML string) interfaces.Published { + return &Published{ + project: project, + file: file, + indexHTMLStr: indexHTML, + } +} + +func NewPublishedWithURL(project repo.Project, file gateway.File, indexHTMLURL *url.URL) interfaces.Published { + return &Published{ + project: project, + file: file, + indexHTML: cache.New(func(c context.Context, i interface{}) (interface{}, error) { + req, err := http.NewRequestWithContext(c, http.MethodGet, indexHTMLURL.String(), nil) + if err != nil { + return nil, err + } + res, err := http.DefaultClient.Do(req) + if err != nil { + log.Errorf("published index: conn err: %s", err) + return nil, errors.New("failed to fetch HTML") + } + if res.StatusCode >= 300 { + log.Errorf("published index: status err: %d", res.StatusCode) + return nil, errors.New("failed to fetch HTML") + } + defer func() { + _ = res.Body.Close() + }() + str, err := io.ReadAll(res.Body) + if err != nil { + log.Errorf("published index: read err: %s", err) + return "", errors.New("failed to fetch HTML") + } + return string(str), nil + }, time.Hour), + } +} + +func (i *Published) Metadata(ctx context.Context, name string) (interfaces.ProjectPublishedMetadata, error) { + prj, err := i.project.FindByPublicName(ctx, name) + if err != nil || prj == nil { + return interfaces.ProjectPublishedMetadata{}, err1.ErrNotFound + } + + return interfaces.ProjectPublishedMetadataFrom(prj), nil +} + +func (i *Published) Data(ctx context.Context, name string) (io.Reader, error) { + r, err := i.file.ReadBuiltSceneFile(ctx, name) + if err != nil { + return nil, err + } + + return r, nil +} + +func (i *Published) Index(ctx context.Context, name string, u *url.URL) (string, error) { + prj, err := i.project.FindByPublicName(ctx, name) + if err != nil || prj == nil { + return "", err1.ErrNotFound + } + + md := interfaces.ProjectPublishedMetadataFrom(prj) + + html := i.indexHTMLStr + if i.indexHTML != nil { + htmli, err := i.indexHTML.Get(ctx) + if err != nil { + return "", err + } + html = htmli.(string) + } + return renderIndex(html, u.String(), md), nil +} + +const headers = `{{if .title}} + {{end}}{{if .description}} + + {{end}}{{if .image}} + + + {{end}} + + {{if .noindex}} + {{end}} +` + +var ( + headersTemplate = template.Must(template.New("headers").Parse(headers)) + titleRegexp = regexp.MustCompile(".+?") +) + +// renderIndex returns index HTML with OGP and some meta tags for the project. +func renderIndex(index, url string, d interfaces.ProjectPublishedMetadata) string { + if d.Title != "" { + index = titleRegexp.ReplaceAllLiteralString(index, ""+html.EscapeString(d.Title)+"") + } + var b bytes.Buffer + _ = headersTemplate.Execute(&b, + map[string]interface{}{ + "title": d.Title, + "description": d.Description, + "image": d.Image, + "noindex": d.Noindex, + "url": url, + }) + return strings.Replace(index, "", b.String()+"", -1) +} diff --git a/internal/usecase/interactor/published_test.go b/internal/usecase/interactor/published_test.go new file mode 100644 index 000000000..b6c31aae9 --- /dev/null +++ b/internal/usecase/interactor/published_test.go @@ -0,0 +1,35 @@ +package interactor + +import ( + "testing" + + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/stretchr/testify/assert" +) + +func TestRenderIndex(t *testing.T) { + assert.Equal(t, ` + xxx> + + + + + + + + + + +`, renderIndex( + ` + Foobar +`, + "https://xxss.com", + interfaces.ProjectPublishedMetadata{ + Title: "xxx>", + Description: "desc", + Image: "hogehoge", + Noindex: true, + }, + )) +} diff --git a/internal/usecase/interfaces/published.go b/internal/usecase/interfaces/published.go new file mode 100644 index 000000000..0034aede5 --- /dev/null +++ b/internal/usecase/interfaces/published.go @@ -0,0 +1,37 @@ +package interfaces + +import ( + "context" + "io" + "net/url" + + "github.com/reearth/reearth-backend/pkg/project" +) + +type ProjectPublishedMetadata struct { + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Image string `json:"image,omitempty"` + Noindex bool `json:"noindex,omitempty"` + IsBasicAuthActive bool `json:"isBasicAuthActive,omitempty"` + BasicAuthUsername string `json:"basicAuthUsername,omitempty"` + BasicAuthPassword string `json:"basicAuthPassword,omitempty"` +} + +func ProjectPublishedMetadataFrom(prj *project.Project) ProjectPublishedMetadata { + return ProjectPublishedMetadata{ + Title: prj.PublicTitle(), + Description: prj.PublicDescription(), + Image: prj.PublicImage(), + Noindex: prj.PublicNoIndex(), + IsBasicAuthActive: prj.IsBasicAuthActive(), + BasicAuthUsername: prj.BasicAuthUsername(), + BasicAuthPassword: prj.BasicAuthPassword(), + } +} + +type Published interface { + Metadata(context.Context, string) (ProjectPublishedMetadata, error) + Data(context.Context, string) (io.Reader, error) + Index(context.Context, string, *url.URL) (string, error) +} diff --git a/pkg/cache/cache.go b/pkg/cache/cache.go new file mode 100644 index 000000000..b7f8e310d --- /dev/null +++ b/pkg/cache/cache.go @@ -0,0 +1,56 @@ +package cache + +import ( + "context" + "sync" + "time" +) + +// Cache holds data can be accessed synchronously. The data will be automatically updated when it expires. +type Cache struct { + updater func(context.Context, interface{}) (interface{}, error) + expiresIn time.Duration + updatedAt time.Time + lock sync.Mutex + data interface{} + now func() time.Time +} + +func New(updater func(context.Context, interface{}) (interface{}, error), expiresIn time.Duration) *Cache { + return &Cache{updater: updater, expiresIn: expiresIn} +} + +func (c *Cache) Get(ctx context.Context) (interface{}, error) { + if c == nil { + return nil, nil + } + + c.lock.Lock() + defer c.lock.Unlock() + + if c.updatedAt.IsZero() || c.updatedAt.Add(c.expiresIn).Before(c.currentTime()) { + if err := c.update(ctx); err != nil { + return c.data, err + } + } + return c.data, nil +} + +func (c *Cache) update(ctx context.Context) error { + var err error + data, err := c.updater(ctx, c.data) + if err != nil { + return err + } + + c.data = data + c.updatedAt = c.currentTime() + return nil +} + +func (c *Cache) currentTime() time.Time { + if c.now == nil { + return time.Now() + } + return c.now() +} diff --git a/pkg/cache/cache_test.go b/pkg/cache/cache_test.go new file mode 100644 index 000000000..daa6cc2da --- /dev/null +++ b/pkg/cache/cache_test.go @@ -0,0 +1,81 @@ +package cache + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestCache_Get(t *testing.T) { + ctx := context.Background() + data := &struct{}{} + err := errors.New("err!") + var cache *Cache + called := 0 + + res, e := cache.Get(ctx) // nil cache + assert.NoError(t, e) + assert.Nil(t, res) + + cache = New(func(c context.Context, i interface{}) (interface{}, error) { + assert.Same(t, ctx, c) + if called == 0 { + assert.Nil(t, i) + } else { + assert.Same(t, cache.data, i) + } + called++ + if called == 3 { + return data, err + } + return data, nil + }, time.Duration(0)) // duration 0 means data will be updated every time + + res, e = cache.Get(ctx) // first + assert.NoError(t, e) + assert.Same(t, data, res) + assert.Equal(t, 1, called) + + res, e = cache.Get(ctx) // second + assert.NoError(t, e) + assert.Same(t, data, res) + assert.Equal(t, 2, called) + + res, e = cache.Get(ctx) // third + assert.Same(t, err, e) + assert.Same(t, data, res) + assert.Equal(t, 3, called) +} + +func TestCache_Get2(t *testing.T) { + ctx := context.Background() + data := &struct{}{} + now := time.Date(2022, 6, 4, 0, 0, 0, 0, time.UTC) + called := 0 + + cache := New(func(_ context.Context, _ interface{}) (interface{}, error) { + called++ + return data, nil + }, time.Second) + cache.now = func() time.Time { return now } + + assert.Equal(t, 0, called) + _, _ = cache.Get(ctx) + assert.Equal(t, 1, called) + _, _ = cache.Get(ctx) + assert.Equal(t, 1, called) + now = now.Add(time.Millisecond) + _, _ = cache.Get(ctx) + assert.Equal(t, 1, called) + now = now.Add(time.Second) + _, _ = cache.Get(ctx) + assert.Equal(t, 2, called) + _, _ = cache.Get(ctx) + assert.Equal(t, 2, called) + now = now.Add(time.Second * 2) + _, _ = cache.Get(ctx) + assert.Equal(t, 3, called) +} From 25da0d1732881a97a0b142c69410db934e6bc92d Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Wed, 7 Jul 2021 08:02:23 +0300 Subject: [PATCH 042/253] fix: dataset link merge bug #378 (#18) * - return the overridden value * - add test case * - fix bug --- internal/graphql/resolver_property.go | 8 ++-- internal/graphql/resolver_property_test.go | 50 ++++++++++++++++++++++ 2 files changed, 54 insertions(+), 4 deletions(-) create mode 100644 internal/graphql/resolver_property_test.go diff --git a/internal/graphql/resolver_property.go b/internal/graphql/resolver_property.go index 72f6ca1cf..7b98b0022 100644 --- a/internal/graphql/resolver_property.go +++ b/internal/graphql/resolver_property.go @@ -142,7 +142,7 @@ func (r *propertyFieldResolver) ActualValue(ctx context.Context, obj *graphql1.P defer exit() datasetLoader := dataloader.DataLoadersFromContext(ctx).Dataset - return actualValue(datasetLoader, obj.Value, obj.Links) + return actualValue(datasetLoader, obj.Value, obj.Links, false) } func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { @@ -422,7 +422,7 @@ func (r *mergedPropertyFieldResolver) ActualValue(ctx context.Context, obj *grap defer exit() datasetLoader := dataloader.DataLoadersFromContext(ctx).Dataset - return actualValue(datasetLoader, obj.Value, obj.Links) + return actualValue(datasetLoader, obj.Value, obj.Links, obj.Overridden) } type propertyGroupListResolver struct{ *Resolver } @@ -465,8 +465,8 @@ func (*propertyGroupResolver) SchemaGroup(ctx context.Context, obj *graphql1.Pro return s.Group(obj.SchemaGroupID), nil } -func actualValue(datasetLoader dataloader.DatasetDataLoader, value interface{}, links []*graphql1.PropertyFieldLink) (interface{}, error) { - if len(links) == 0 { +func actualValue(datasetLoader dataloader.DatasetDataLoader, value interface{}, links []*graphql1.PropertyFieldLink, overridden bool) (interface{}, error) { + if len(links) == 0 || overridden { return &value, nil } // ๅ…ˆ้ ญใฎใƒชใƒณใ‚ฏใซใ—ใ‹DatasetใŒๅ‰ฒใ‚Šๅฝ“ใฆใ‚‰ใ‚Œใฆใ„ใชใ„โ†’ๅ…ˆ้ ญใ‹ใ‚‰้ †ใ€…ใซ่พฟใฃใฆใ„ใ diff --git a/internal/graphql/resolver_property_test.go b/internal/graphql/resolver_property_test.go new file mode 100644 index 000000000..8bf5ba9be --- /dev/null +++ b/internal/graphql/resolver_property_test.go @@ -0,0 +1,50 @@ +package graphql + +import ( + "testing" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/stretchr/testify/assert" +) + +func Test_actualValue(t *testing.T) { + value := 300 + + type args struct { + datasetLoader dataloader.DatasetDataLoader + value interface{} + links []*graphql1.PropertyFieldLink + overridden bool + } + var tests = []struct { + name string + args args + want interface{} + wantErr bool + }{ + { + "Overridden value", + args{ + datasetLoader: nil, + value: value, + links: nil, + overridden: true, + }, + 300, + false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := actualValue(tt.args.datasetLoader, tt.args.value, tt.args.links, tt.args.overridden) + if (err != nil) != tt.wantErr { + t.Errorf("actualValue() error = %v, wantErr %v", err, tt.wantErr) + return + } + temp := got.(*interface{}) + t2 := (*temp).(int) + assert.Equal(t, tt.want, t2) + }) + } +} From dcb4b0842ee19fdf35a0859e2d28932d42d8eefc Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Wed, 7 Jul 2021 08:09:57 +0300 Subject: [PATCH 043/253] fix: ogp image for published page (#17) * - use project image as fallback image if the public image does not exists * - use project data as fallback if the public data does not exists * - fix routing --- internal/app/public.go | 4 ++-- internal/usecase/interfaces/published.go | 19 ++++++++++++++++--- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/internal/app/public.go b/internal/app/public.go index 3f020c9b0..61840dbba 100644 --- a/internal/app/public.go +++ b/internal/app/public.go @@ -46,7 +46,7 @@ func publicAPI( }) r.GET("/published/:name", func(c echo.Context) error { - name := c.Param("string") + name := c.Param("name") if name == "" { return echo.ErrNotFound } @@ -60,7 +60,7 @@ func publicAPI( }) r.GET("/published_data/:name", func(c echo.Context) error { - name := c.Param("string") + name := c.Param("name") if name == "" { return echo.ErrNotFound } diff --git a/internal/usecase/interfaces/published.go b/internal/usecase/interfaces/published.go index 0034aede5..d0476702e 100644 --- a/internal/usecase/interfaces/published.go +++ b/internal/usecase/interfaces/published.go @@ -19,10 +19,23 @@ type ProjectPublishedMetadata struct { } func ProjectPublishedMetadataFrom(prj *project.Project) ProjectPublishedMetadata { + title := prj.PublicTitle() + description := prj.PublicDescription() + image := prj.PublicImage() + if title == "" { + title = prj.Name() + } + if description == "" { + description = prj.Description() + } + if image == "" { + image = prj.ImageURL().String() + } + return ProjectPublishedMetadata{ - Title: prj.PublicTitle(), - Description: prj.PublicDescription(), - Image: prj.PublicImage(), + Title: title, + Description: description, + Image: image, Noindex: prj.PublicNoIndex(), IsBasicAuthActive: prj.IsBasicAuthActive(), BasicAuthUsername: prj.BasicAuthUsername(), From 2ef7efdd917cd95aa43bec3c0d2b24a0bbeccd22 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Thu, 8 Jul 2021 19:03:08 +0300 Subject: [PATCH 044/253] feat: import dataset from google sheets (#16) * - prepare graphql mutation and types - prepare the controller * implement load google sheet method * regenerate graphql * restructure the feature in layers * - fix some names - set imported dataset name as sheet name * - add test case * - Use readCloser instead of using files * - fix method name * - remove use cases dependency * - remove token --- go.mod | 1 + go.sum | 5 + .../adapter/graphql/controller_dataset.go | 15 ++ internal/adapter/graphql/models_gen.go | 8 ++ internal/app/repo.go | 11 +- internal/graphql/generated.go | 136 ++++++++++++++++++ internal/graphql/resolver_mutation.go | 7 + internal/infrastructure/google/fetch.go | 26 ++++ internal/infrastructure/google/fetch_test.go | 90 ++++++++++++ internal/infrastructure/google/google.go | 18 +++ internal/usecase/gateway/container.go | 1 + internal/usecase/gateway/google.go | 9 ++ internal/usecase/interactor/dataset.go | 54 +++++-- internal/usecase/interfaces/dataset.go | 9 ++ schema.graphql | 9 ++ 15 files changed, 383 insertions(+), 16 deletions(-) create mode 100644 internal/infrastructure/google/fetch.go create mode 100644 internal/infrastructure/google/fetch_test.go create mode 100644 internal/infrastructure/google/google.go create mode 100644 internal/usecase/gateway/google.go diff --git a/go.mod b/go.mod index 57c096f82..553d52426 100644 --- a/go.mod +++ b/go.mod @@ -50,6 +50,7 @@ require ( golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/tools v0.1.0 gopkg.in/go-playground/colors.v1 v1.2.0 + gopkg.in/h2non/gock.v1 v1.1.0 // indirect gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 // indirect ) diff --git a/go.sum b/go.sum index 249bd0339..b4dd9e75f 100644 --- a/go.sum +++ b/go.sum @@ -232,6 +232,8 @@ github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB7 github.com/gorilla/websocket v1.2.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw= +github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= @@ -310,6 +312,7 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= @@ -809,6 +812,8 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/go-playground/colors.v1 v1.2.0 h1:SPweMUve+ywPrfwao+UvfD5Ah78aOLUkT5RlJiZn52c= gopkg.in/go-playground/colors.v1 v1.2.0/go.mod h1:AvbqcMpNXVl5gBrM20jBm3VjjKBbH/kI5UnqjU7lxFI= +gopkg.in/h2non/gock.v1 v1.1.0 h1:Yy6sSXyTP9wYc6+H7U0NuB1LQ6H2HYmDp2sxFQ8vTEY= +gopkg.in/h2non/gock.v1 v1.1.0/go.mod h1:n7UGz/ckNChHiK05rDoiC4MYSunEC/lyaUm2WWaDva0= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/internal/adapter/graphql/controller_dataset.go b/internal/adapter/graphql/controller_dataset.go index df729babe..dcf974d33 100644 --- a/internal/adapter/graphql/controller_dataset.go +++ b/internal/adapter/graphql/controller_dataset.go @@ -78,6 +78,21 @@ func (c *DatasetController) ImportDataset(ctx context.Context, i *ImportDatasetI return &ImportDatasetPayload{DatasetSchema: toDatasetSchema(res)}, nil } +func (c *DatasetController) ImportDatasetFromGoogleSheet(ctx context.Context, i *ImportDatasetFromGoogleSheetInput, o *usecase.Operator) (*ImportDatasetPayload, error) { + res, err := c.usecase().ImportDatasetFromGoogleSheet(ctx, interfaces.ImportDatasetFromGoogleSheetParam{ + Token: i.AccessToken, + FileID: i.FileID, + SheetName: i.SheetName, + SceneId: id.SceneID(i.SceneID), + SchemaId: id.DatasetSchemaIDFromRefID(i.DatasetSchemaID), + }, o) + if err != nil { + return nil, err + } + + return &ImportDatasetPayload{DatasetSchema: toDatasetSchema(res)}, nil +} + func (c *DatasetController) GraphFetchSchema(ctx context.Context, i id.ID, depth int, operator *usecase.Operator) ([]*DatasetSchema, []error) { res, err := c.usecase().GraphFetchSchema(ctx, id.DatasetSchemaID(i), depth, operator) if err != nil { diff --git a/internal/adapter/graphql/models_gen.go b/internal/adapter/graphql/models_gen.go index 1fc6e0b47..f3319715b 100644 --- a/internal/adapter/graphql/models_gen.go +++ b/internal/adapter/graphql/models_gen.go @@ -316,6 +316,14 @@ type DeleteTeamPayload struct { TeamID id.ID `json:"teamId"` } +type ImportDatasetFromGoogleSheetInput struct { + AccessToken string `json:"accessToken"` + FileID string `json:"fileId"` + SheetName string `json:"sheetName"` + SceneID id.ID `json:"sceneId"` + DatasetSchemaID *id.ID `json:"datasetSchemaId"` +} + type ImportDatasetInput struct { File graphql.Upload `json:"file"` SceneID id.ID `json:"sceneId"` diff --git a/internal/app/repo.go b/internal/app/repo.go index ea1b4897a..7050f2759 100644 --- a/internal/app/repo.go +++ b/internal/app/repo.go @@ -6,6 +6,11 @@ import ( "time" "github.com/reearth/reearth-backend/internal/infrastructure/github" + "github.com/reearth/reearth-backend/internal/infrastructure/google" + + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" + mongotrace "go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver" "github.com/reearth/reearth-backend/internal/infrastructure/adapter" "github.com/reearth/reearth-backend/internal/infrastructure/auth0" @@ -15,9 +20,6 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/log" - "go.mongodb.org/mongo-driver/mongo" - "go.mongodb.org/mongo-driver/mongo/options" - mongotrace "go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver" ) func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo.Container, *gateway.Container) { @@ -77,6 +79,9 @@ func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo. // github gateways.PluginRegistry = github.NewPluginRegistry() + // google + gateways.Google = google.NewGoogle() + // release lock of all scenes if err := repos.SceneLock.ReleaseAllLock(context.Background()); err != nil { log.Fatalln(fmt.Sprintf("repo initialization error: %+v", err)) diff --git a/internal/graphql/generated.go b/internal/graphql/generated.go index 400126b9f..dbba8a9d7 100644 --- a/internal/graphql/generated.go +++ b/internal/graphql/generated.go @@ -441,6 +441,7 @@ type ComplexityRoot struct { DeleteProject func(childComplexity int, input graphql1.DeleteProjectInput) int DeleteTeam func(childComplexity int, input graphql1.DeleteTeamInput) int ImportDataset func(childComplexity int, input graphql1.ImportDatasetInput) int + ImportDatasetFromGoogleSheet func(childComplexity int, input graphql1.ImportDatasetFromGoogleSheetInput) int ImportLayer func(childComplexity int, input graphql1.ImportLayerInput) int InstallPlugin func(childComplexity int, input graphql1.InstallPluginInput) int LinkDatasetToPropertyValue func(childComplexity int, input graphql1.LinkDatasetToPropertyValueInput) int @@ -1004,6 +1005,7 @@ type MutationResolver interface { AddDynamicDataset(ctx context.Context, input graphql1.AddDynamicDatasetInput) (*graphql1.AddDynamicDatasetPayload, error) RemoveDatasetSchema(ctx context.Context, input graphql1.RemoveDatasetSchemaInput) (*graphql1.RemoveDatasetSchemaPayload, error) ImportDataset(ctx context.Context, input graphql1.ImportDatasetInput) (*graphql1.ImportDatasetPayload, error) + ImportDatasetFromGoogleSheet(ctx context.Context, input graphql1.ImportDatasetFromGoogleSheetInput) (*graphql1.ImportDatasetPayload, error) AddDatasetSchema(ctx context.Context, input graphql1.AddDatasetSchemaInput) (*graphql1.AddDatasetSchemaPayload, error) UpdatePropertyValue(ctx context.Context, input graphql1.UpdatePropertyValueInput) (*graphql1.PropertyFieldPayload, error) UpdatePropertyValueLatLng(ctx context.Context, input graphql1.UpdatePropertyValueLatLngInput) (*graphql1.PropertyFieldPayload, error) @@ -2814,6 +2816,18 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Mutation.ImportDataset(childComplexity, args["input"].(graphql1.ImportDatasetInput)), true + case "Mutation.importDatasetFromGoogleSheet": + if e.complexity.Mutation.ImportDatasetFromGoogleSheet == nil { + break + } + + args, err := ec.field_Mutation_importDatasetFromGoogleSheet_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.ImportDatasetFromGoogleSheet(childComplexity, args["input"].(graphql1.ImportDatasetFromGoogleSheetInput)), true + case "Mutation.importLayer": if e.complexity.Mutation.ImportLayer == nil { break @@ -6388,6 +6402,14 @@ input ImportDatasetInput { datasetSchemaId: ID } +input ImportDatasetFromGoogleSheetInput { + accessToken: String! + fileId: String! + sheetName: String! + sceneId: ID! + datasetSchemaId: ID +} + input AddDatasetSchemaInput { sceneId: ID! name: String! @@ -6752,6 +6774,7 @@ type Mutation { input: RemoveDatasetSchemaInput! ): RemoveDatasetSchemaPayload importDataset(input: ImportDatasetInput!): ImportDatasetPayload + importDatasetFromGoogleSheet(input: ImportDatasetFromGoogleSheetInput!): ImportDatasetPayload addDatasetSchema(input: AddDatasetSchemaInput!): AddDatasetSchemaPayload # Property @@ -7102,6 +7125,21 @@ func (ec *executionContext) field_Mutation_deleteTeam_args(ctx context.Context, return args, nil } +func (ec *executionContext) field_Mutation_importDatasetFromGoogleSheet_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 graphql1.ImportDatasetFromGoogleSheetInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNImportDatasetFromGoogleSheetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetFromGoogleSheetInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + func (ec *executionContext) field_Mutation_importDataset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -16568,6 +16606,45 @@ func (ec *executionContext) _Mutation_importDataset(ctx context.Context, field g return ec.marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx, field.Selections, res) } +func (ec *executionContext) _Mutation_importDatasetFromGoogleSheet(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_importDatasetFromGoogleSheet_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().ImportDatasetFromGoogleSheet(rctx, args["input"].(graphql1.ImportDatasetFromGoogleSheetInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.ImportDatasetPayload) + fc.Result = res + return ec.marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx, field.Selections, res) +} + func (ec *executionContext) _Mutation_addDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -28545,6 +28622,58 @@ func (ec *executionContext) unmarshalInputDeleteTeamInput(ctx context.Context, o return it, nil } +func (ec *executionContext) unmarshalInputImportDatasetFromGoogleSheetInput(ctx context.Context, obj interface{}) (graphql1.ImportDatasetFromGoogleSheetInput, error) { + var it graphql1.ImportDatasetFromGoogleSheetInput + var asMap = obj.(map[string]interface{}) + + for k, v := range asMap { + switch k { + case "accessToken": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("accessToken")) + it.AccessToken, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "fileId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fileId")) + it.FileID, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "sheetName": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sheetName")) + it.SheetName, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "datasetSchemaId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaId")) + it.DatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + func (ec *executionContext) unmarshalInputImportDatasetInput(ctx context.Context, obj interface{}) (graphql1.ImportDatasetInput, error) { var it graphql1.ImportDatasetInput var asMap = obj.(map[string]interface{}) @@ -32695,6 +32824,8 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) out.Values[i] = ec._Mutation_removeDatasetSchema(ctx, field) case "importDataset": out.Values[i] = ec._Mutation_importDataset(ctx, field) + case "importDatasetFromGoogleSheet": + out.Values[i] = ec._Mutation_importDatasetFromGoogleSheet(ctx, field) case "addDatasetSchema": out.Values[i] = ec._Mutation_addDatasetSchema(ctx, field) case "updatePropertyValue": @@ -36471,6 +36602,11 @@ func (ec *executionContext) marshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘bac return res } +func (ec *executionContext) unmarshalNImportDatasetFromGoogleSheetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetFromGoogleSheetInput(ctx context.Context, v interface{}) (graphql1.ImportDatasetFromGoogleSheetInput, error) { + res, err := ec.unmarshalInputImportDatasetFromGoogleSheetInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + func (ec *executionContext) unmarshalNImportDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetInput(ctx context.Context, v interface{}) (graphql1.ImportDatasetInput, error) { res, err := ec.unmarshalInputImportDatasetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) diff --git a/internal/graphql/resolver_mutation.go b/internal/graphql/resolver_mutation.go index b10d36587..788969ca2 100644 --- a/internal/graphql/resolver_mutation.go +++ b/internal/graphql/resolver_mutation.go @@ -425,3 +425,10 @@ func (r *mutationResolver) ImportDataset(ctx context.Context, input graphql1.Imp return r.config.Controllers.DatasetController.ImportDataset(ctx, &input, getOperator(ctx)) } + +func (r *mutationResolver) ImportDatasetFromGoogleSheet(ctx context.Context, input graphql1.ImportDatasetFromGoogleSheetInput) (*graphql1.ImportDatasetPayload, error) { + exit := trace(ctx) + defer exit() + + return r.config.Controllers.DatasetController.ImportDatasetFromGoogleSheet(ctx, &input, getOperator(ctx)) +} diff --git a/internal/infrastructure/google/fetch.go b/internal/infrastructure/google/fetch.go new file mode 100644 index 000000000..9ea6af9b5 --- /dev/null +++ b/internal/infrastructure/google/fetch.go @@ -0,0 +1,26 @@ +package google + +import ( + "fmt" + "io" + "net/http" +) + +func fetchCSV(token string, fileId string, sheetName string) (*io.ReadCloser, error) { + url := fmt.Sprintf("https://docs.google.com/spreadsheets/d/%s/gviz/tq?tqx=out:csv&sheet=%s", fileId, sheetName) + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return nil, err + } + + req.Header.Set("Authorization", "Bearer "+token) + res, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + if res.StatusCode != http.StatusOK { + return nil, fmt.Errorf("StatusCode=%d", res.StatusCode) + } + + return &res.Body, nil +} diff --git a/internal/infrastructure/google/fetch_test.go b/internal/infrastructure/google/fetch_test.go new file mode 100644 index 000000000..b7e8d2c44 --- /dev/null +++ b/internal/infrastructure/google/fetch_test.go @@ -0,0 +1,90 @@ +package google + +import ( + "net/http" + "testing" + + "github.com/reearth/reearth-backend/pkg/file" + "github.com/stretchr/testify/assert" + "gopkg.in/h2non/gock.v1" +) + +func Test_fetchCSV(t *testing.T) { + t.Cleanup(func() { + gock.EnableNetworking() + gock.OffAll() + }) + + gock.DisableNetworking() + + type args struct { + token string + fileId string + sheetName string + } + tests := []struct { + name string + setup func() + args args + want *file.File + wantErr bool + }{ + { + name: "Invalid Token", + setup: func() { + gock.New("https://docs.google.com"). + Get("/spreadsheets/d/(.*)/gviz/tq"). + PathParam("d", "1bXBDUrOgYWdHzScMiLNHRUsmNC9SUV4VFOvpqrx0Yok"). + MatchParams(map[string]string{ + "tqx": "out:csv", + "sheet": "Dataset1", + }). + Reply(http.StatusUnauthorized) + }, + args: args{ + token: "xxxx", + fileId: "1bXBDUrOgYWdHzScMiLNHRUsmNC9SUV4VFOvpqrxxxxx", + sheetName: "Dataset1", + }, + wantErr: true, + }, + { + name: "Working scenario", + setup: func() { + gock.New("https://docs.google.com"). + Get("/spreadsheets/d/(.*)/gviz/tq"). + PathParam("d", "1bXBDUrOgYWdHzScMiLNHRUsmNC9SUV4VFOvpqrxxxxx"). + MatchParams(map[string]string{ + "tqx": "out:csv", + "sheet": "Dataset1", + }). + Reply(http.StatusOK). + BodyString("lat,lng,hieght\n30,35,300\n30.1,35,400") + }, + args: args{ + token: "xxxx", + fileId: "1bXBDUrOgYWdHzScMiLNHRUsmNC9SUV4VFOvpqrxxxxx", + sheetName: "Dataset1", + }, + wantErr: false, + }, + } + for _, tt := range tests { + tt := tt + + t.Run(tt.name, func(t *testing.T) { + tt.setup() + + got, err := fetchCSV(tt.args.token, tt.args.fileId, tt.args.sheetName) + if (err != nil) != tt.wantErr { + t.Errorf("fetchCSV() error = %v, wantErr %v", err, tt.wantErr) + return + } + if tt.wantErr { + assert.Nil(t, got) + return + } + assert.NotNil(t, got) + }) + } +} diff --git a/internal/infrastructure/google/google.go b/internal/infrastructure/google/google.go new file mode 100644 index 000000000..7810b2784 --- /dev/null +++ b/internal/infrastructure/google/google.go @@ -0,0 +1,18 @@ +package google + +import ( + "io" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" +) + +type google struct { +} + +func NewGoogle() gateway.Google { + return &google{} +} + +func (g google) FetchCSV(token string, fileId string, sheetName string) (*io.ReadCloser, error) { + return fetchCSV(token, fileId, sheetName) +} diff --git a/internal/usecase/gateway/container.go b/internal/usecase/gateway/container.go index 5f730d7e2..149e422b5 100644 --- a/internal/usecase/gateway/container.go +++ b/internal/usecase/gateway/container.go @@ -7,4 +7,5 @@ type Container struct { DataSource DataSource PluginRegistry PluginRegistry File File + Google Google } diff --git a/internal/usecase/gateway/google.go b/internal/usecase/gateway/google.go new file mode 100644 index 000000000..26655fbab --- /dev/null +++ b/internal/usecase/gateway/google.go @@ -0,0 +1,9 @@ +package gateway + +import ( + "io" +) + +type Google interface { + FetchCSV(token string, fileId string, sheetName string) (*io.ReadCloser, error) +} diff --git a/internal/usecase/interactor/dataset.go b/internal/usecase/interactor/dataset.go index a1a0791b4..7590ae9cb 100644 --- a/internal/usecase/interactor/dataset.go +++ b/internal/usecase/interactor/dataset.go @@ -3,10 +3,12 @@ package interactor import ( "context" "errors" + "io" "strings" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" @@ -34,6 +36,7 @@ type Dataset struct { transaction repo.Transaction datasource gateway.DataSource file gateway.File + google gateway.Google } func NewDataset(r *repo.Container, gr *gateway.Container) interfaces.Dataset { @@ -48,6 +51,7 @@ func NewDataset(r *repo.Container, gr *gateway.Container) interfaces.Dataset { transaction: r.Transaction, datasource: gr.DataSource, file: gr.File, + google: gr.Google, } } @@ -185,6 +189,34 @@ func (i *Dataset) ImportDataset(ctx context.Context, inp interfaces.ImportDatase return nil, interfaces.ErrFileNotIncluded } + separator := ',' + if strings.HasSuffix(inp.File.Name, ".tsv") { + separator = '\t' + } + + return i.importDataset(ctx, inp.File.Content, inp.File.Name, separator, inp.SceneId, inp.SchemaId) +} + +func (i *Dataset) ImportDatasetFromGoogleSheet(ctx context.Context, inp interfaces.ImportDatasetFromGoogleSheetParam, operator *usecase.Operator) (_ *dataset.Schema, err error) { + if err := i.CanWriteScene(ctx, inp.SceneId, operator); err != nil { + return nil, err + } + + csvFile, err := i.google.FetchCSV(inp.Token, inp.FileID, inp.SheetName) + if err != nil { + return nil, err + } + defer func() { + err = (*csvFile).Close() + if err != nil { + log.Fatal(err) + } + }() + + return i.importDataset(ctx, *csvFile, inp.SheetName, ',', inp.SceneId, inp.SchemaId) +} + +func (i *Dataset) importDataset(ctx context.Context, content io.Reader, name string, separator rune, sceneId id.SceneID, schemaId *id.DatasetSchemaID) (_ *dataset.Schema, err error) { tx, err := i.transaction.Begin() if err != nil { return @@ -195,20 +227,16 @@ func (i *Dataset) ImportDataset(ctx context.Context, inp interfaces.ImportDatase } }() - seperator := ',' - if strings.HasSuffix(inp.File.Name, ".tsv") { - seperator = '\t' - } - scenes := []id.SceneID{inp.SceneId} - csv := dataset.NewCSVParser(inp.File.Content, inp.File.Name, seperator) + scenes := []id.SceneID{sceneId} + csv := dataset.NewCSVParser(content, name, separator) err = csv.Init() if err != nil { return nil, err } // replacment mode - if inp.SchemaId != nil { - dss, err := i.datasetSchemaRepo.FindByID(ctx, *inp.SchemaId, scenes) + if schemaId != nil { + dss, err := i.datasetSchemaRepo.FindByID(ctx, *schemaId, scenes) if err != nil { return nil, err } @@ -216,7 +244,7 @@ func (i *Dataset) ImportDataset(ctx context.Context, inp interfaces.ImportDatase if err != nil { return nil, err } - toreplace, err := i.datasetRepo.FindBySchemaAll(ctx, *inp.SchemaId) + toreplace, err := i.datasetRepo.FindBySchemaAll(ctx, *schemaId) if err != nil { return nil, err } @@ -225,7 +253,7 @@ func (i *Dataset) ImportDataset(ctx context.Context, inp interfaces.ImportDatase return nil, err } } else { - err = csv.GuessSchema(inp.SceneId) + err = csv.GuessSchema(sceneId) if err != nil { return nil, err } @@ -245,8 +273,8 @@ func (i *Dataset) ImportDataset(ctx context.Context, inp interfaces.ImportDatase return nil, err } - if inp.SchemaId != nil { - layergroups, err := i.layerRepo.FindGroupBySceneAndLinkedDatasetSchema(ctx, inp.SceneId, *inp.SchemaId) + if schemaId != nil { + layergroups, err := i.layerRepo.FindGroupBySceneAndLinkedDatasetSchema(ctx, sceneId, *schemaId) if err != nil { return nil, err } @@ -279,7 +307,7 @@ func (i *Dataset) ImportDataset(ctx context.Context, inp interfaces.ImportDatase name = rf.Value().Value().(string) } layerItem, layerProperty, err := initializer.LayerItem{ - SceneID: inp.SceneId, + SceneID: sceneId, ParentLayerID: lg.ID(), Plugin: builtin.Plugin(), ExtensionID: &extensionForLinkedLayers, diff --git a/internal/usecase/interfaces/dataset.go b/internal/usecase/interfaces/dataset.go index 8a87593de..9f6eee029 100644 --- a/internal/usecase/interfaces/dataset.go +++ b/internal/usecase/interfaces/dataset.go @@ -35,6 +35,14 @@ type ImportDatasetParam struct { SchemaId *id.DatasetSchemaID } +type ImportDatasetFromGoogleSheetParam struct { + Token string + FileID string + SheetName string + SceneId id.SceneID + SchemaId *id.DatasetSchemaID +} + type RemoveDatasetSchemaParam struct { SchemaId id.DatasetSchemaID Force *bool @@ -56,6 +64,7 @@ type Dataset interface { GraphFetch(context.Context, id.DatasetID, int, *usecase.Operator) (dataset.List, error) FetchSchema(context.Context, []id.DatasetSchemaID, *usecase.Operator) (dataset.SchemaList, error) ImportDataset(context.Context, ImportDatasetParam, *usecase.Operator) (*dataset.Schema, error) + ImportDatasetFromGoogleSheet(context.Context, ImportDatasetFromGoogleSheetParam, *usecase.Operator) (*dataset.Schema, error) GraphFetchSchema(context.Context, id.DatasetSchemaID, int, *usecase.Operator) (dataset.SchemaList, error) AddDynamicDatasetSchema(context.Context, AddDynamicDatasetSchemaParam) (*dataset.Schema, error) AddDynamicDataset(context.Context, AddDynamicDatasetParam) (*dataset.Schema, *dataset.Dataset, error) diff --git a/schema.graphql b/schema.graphql index d2ee5cd3e..9d5d82fad 100644 --- a/schema.graphql +++ b/schema.graphql @@ -1054,6 +1054,14 @@ input ImportDatasetInput { datasetSchemaId: ID } +input ImportDatasetFromGoogleSheetInput { + accessToken: String! + fileId: String! + sheetName: String! + sceneId: ID! + datasetSchemaId: ID +} + input AddDatasetSchemaInput { sceneId: ID! name: String! @@ -1418,6 +1426,7 @@ type Mutation { input: RemoveDatasetSchemaInput! ): RemoveDatasetSchemaPayload importDataset(input: ImportDatasetInput!): ImportDatasetPayload + importDatasetFromGoogleSheet(input: ImportDatasetFromGoogleSheetInput!): ImportDatasetPayload addDatasetSchema(input: AddDatasetSchemaInput!): AddDatasetSchemaPayload # Property From 5213f3c590f01e7b92a66159e3df20cfca5b3a2b Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 9 Jul 2021 14:42:19 +0900 Subject: [PATCH 045/253] feat: add scenePlugin resolver to layers (#20) --- internal/adapter/graphql/controller_layer.go | 2 +- internal/adapter/graphql/convert_layer.go | 25 +- internal/adapter/graphql/models_gen.go | 18 + internal/graphql/generated.go | 1007 +++++++++++++++++- internal/graphql/resolver_layer.go | 133 +++ schema.graphql | 20 + 6 files changed, 1157 insertions(+), 48 deletions(-) diff --git a/internal/adapter/graphql/controller_layer.go b/internal/adapter/graphql/controller_layer.go index 4bcfc2321..6faadf564 100644 --- a/internal/adapter/graphql/controller_layer.go +++ b/internal/adapter/graphql/controller_layer.go @@ -147,7 +147,7 @@ func (c *LayerController) AddInfoboxField(ctx context.Context, ginput *AddInfobo } return &AddInfoboxFieldPayload{ - InfoboxField: toInfoboxField(infoboxField, nil), + InfoboxField: toInfoboxField(infoboxField, layer.Scene(), nil), Layer: toLayer(layer, nil), }, nil } diff --git a/internal/adapter/graphql/convert_layer.go b/internal/adapter/graphql/convert_layer.go index 1c76d2360..3700d2905 100644 --- a/internal/adapter/graphql/convert_layer.go +++ b/internal/adapter/graphql/convert_layer.go @@ -13,12 +13,13 @@ func toLayerItem(l *layer.Item, parent *id.LayerID) *LayerItem { return &LayerItem{ ID: l.ID().ID(), + SceneID: l.Scene().ID(), Name: l.Name(), IsVisible: l.IsVisible(), PropertyID: l.Property().IDRef(), PluginID: l.Plugin(), ExtensionID: l.Extension(), - Infobox: toInfobox(l.Infobox(), l.ID(), l.LinkedDataset()), + Infobox: toInfobox(l.Infobox(), l.ID(), l.Scene(), l.LinkedDataset()), LinkedDatasetID: l.LinkedDataset().IDRef(), ParentID: parent.IDRef(), } @@ -37,12 +38,13 @@ func toLayerGroup(l *layer.Group, parent *id.LayerID) *LayerGroup { return &LayerGroup{ ID: l.ID().ID(), + SceneID: l.Scene().ID(), Name: l.Name(), IsVisible: l.IsVisible(), PropertyID: l.Property().IDRef(), PluginID: l.Plugin(), ExtensionID: l.Extension(), - Infobox: toInfobox(l.Infobox(), l.ID(), nil), + Infobox: toInfobox(l.Infobox(), l.ID(), l.Scene(), nil), LinkedDatasetSchemaID: l.LinkedDatasetSchema().IDRef(), LayerIds: layers, Root: l.IsRoot(), @@ -79,12 +81,13 @@ func toLayers(layers layer.List, parent *id.LayerID) []Layer { return result } -func toInfoboxField(ibf *layer.InfoboxField, parentDatasetID *id.DatasetID) *InfoboxField { +func toInfoboxField(ibf *layer.InfoboxField, parentSceneID id.SceneID, parentDatasetID *id.DatasetID) *InfoboxField { if ibf == nil { return nil } return &InfoboxField{ ID: ibf.ID().ID(), + SceneID: parentSceneID.ID(), PluginID: ibf.Plugin(), ExtensionID: ibf.Extension(), PropertyID: ibf.Property().ID(), @@ -92,17 +95,18 @@ func toInfoboxField(ibf *layer.InfoboxField, parentDatasetID *id.DatasetID) *Inf } } -func toInfobox(ib *layer.Infobox, parent id.LayerID, parentDatasetID *id.DatasetID) *Infobox { +func toInfobox(ib *layer.Infobox, parent id.LayerID, parentSceneID id.SceneID, parentDatasetID *id.DatasetID) *Infobox { if ib == nil { return nil } ibFields := ib.Fields() fields := make([]*InfoboxField, 0, len(ibFields)) for _, ibf := range ibFields { - fields = append(fields, toInfoboxField(ibf, parentDatasetID)) + fields = append(fields, toInfoboxField(ibf, parentSceneID, parentDatasetID)) } return &Infobox{ + SceneID: parentSceneID.ID(), PropertyID: ib.Property().ID(), Fields: fields, LayerID: parent.ID(), @@ -116,35 +120,38 @@ func toMergedLayer(layer *layer.Merged) *MergedLayer { } return &MergedLayer{ + SceneID: layer.Scene.ID(), OriginalID: layer.Original.ID(), ParentID: layer.Parent.IDRef(), - Infobox: toMergedInfobox(layer.Infobox), + Infobox: toMergedInfobox(layer.Infobox, layer.Scene), Property: toMergedPropertyFromMetadata(layer.Property), } } -func toMergedInfobox(ib *layer.MergedInfobox) *MergedInfobox { +func toMergedInfobox(ib *layer.MergedInfobox, sceneID id.SceneID) *MergedInfobox { if ib == nil { return nil } fields := make([]*MergedInfoboxField, 0, len(ib.Fields)) for _, f := range ib.Fields { - fields = append(fields, toMergedInfoboxField(f)) + fields = append(fields, toMergedInfoboxField(f, sceneID)) } return &MergedInfobox{ + SceneID: sceneID.ID(), Fields: fields, Property: toMergedPropertyFromMetadata(ib.Property), } } -func toMergedInfoboxField(ibf *layer.MergedInfoboxField) *MergedInfoboxField { +func toMergedInfoboxField(ibf *layer.MergedInfoboxField, sceneID id.SceneID) *MergedInfoboxField { if ibf == nil { return nil } return &MergedInfoboxField{ + SceneID: sceneID.ID(), OriginalID: ibf.ID.ID(), PluginID: ibf.Plugin, ExtensionID: ibf.Extension, diff --git a/internal/adapter/graphql/models_gen.go b/internal/adapter/graphql/models_gen.go index f3319715b..d57bd4f1e 100644 --- a/internal/adapter/graphql/models_gen.go +++ b/internal/adapter/graphql/models_gen.go @@ -346,6 +346,7 @@ type ImportLayerPayload struct { } type Infobox struct { + SceneID id.ID `json:"sceneId"` LayerID id.ID `json:"layerId"` PropertyID id.ID `json:"propertyId"` Fields []*InfoboxField `json:"fields"` @@ -354,10 +355,12 @@ type Infobox struct { Property *Property `json:"property"` LinkedDataset *Dataset `json:"linkedDataset"` Merged *MergedInfobox `json:"merged"` + Scene *Scene `json:"scene"` } type InfoboxField struct { ID id.ID `json:"id"` + SceneID id.ID `json:"sceneId"` LayerID id.ID `json:"layerId"` PropertyID id.ID `json:"propertyId"` PluginID id.PluginID `json:"pluginId"` @@ -370,6 +373,8 @@ type InfoboxField struct { Extension *PluginExtension `json:"extension"` LinkedDataset *Dataset `json:"linkedDataset"` Merged *MergedInfoboxField `json:"merged"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` } type InstallPluginInput struct { @@ -395,6 +400,7 @@ type LatLngHeight struct { type LayerGroup struct { ID id.ID `json:"id"` + SceneID id.ID `json:"sceneId"` Name string `json:"name"` IsVisible bool `json:"isVisible"` PropertyID *id.ID `json:"propertyId"` @@ -411,6 +417,8 @@ type LayerGroup struct { Extension *PluginExtension `json:"extension"` LinkedDatasetSchema *DatasetSchema `json:"linkedDatasetSchema"` Layers []Layer `json:"layers"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` } func (LayerGroup) IsLayers() {} @@ -418,6 +426,7 @@ func (LayerGroup) IsLayer() {} type LayerItem struct { ID id.ID `json:"id"` + SceneID id.ID `json:"sceneId"` Name string `json:"name"` IsVisible bool `json:"isVisible"` PropertyID *id.ID `json:"propertyId"` @@ -432,6 +441,8 @@ type LayerItem struct { Extension *PluginExtension `json:"extension"` LinkedDataset *Dataset `json:"linkedDataset"` Merged *MergedLayer `json:"merged"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` } func (LayerItem) IsLayers() {} @@ -448,26 +459,33 @@ type LinkDatasetToPropertyValueInput struct { } type MergedInfobox struct { + SceneID id.ID `json:"sceneID"` Property *MergedProperty `json:"property"` Fields []*MergedInfoboxField `json:"fields"` + Scene *Scene `json:"scene"` } type MergedInfoboxField struct { OriginalID id.ID `json:"originalId"` + SceneID id.ID `json:"sceneID"` PluginID id.PluginID `json:"pluginId"` ExtensionID id.PluginExtensionID `json:"extensionId"` Property *MergedProperty `json:"property"` Plugin *Plugin `json:"plugin"` Extension *PluginExtension `json:"extension"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` } type MergedLayer struct { OriginalID id.ID `json:"originalId"` ParentID *id.ID `json:"parentId"` + SceneID id.ID `json:"sceneID"` Property *MergedProperty `json:"property"` Infobox *MergedInfobox `json:"infobox"` Original *LayerItem `json:"original"` Parent *LayerGroup `json:"parent"` + Scene *Scene `json:"scene"` } type MergedProperty struct { diff --git a/internal/graphql/generated.go b/internal/graphql/generated.go index dbba8a9d7..25458d3b9 100644 --- a/internal/graphql/generated.go +++ b/internal/graphql/generated.go @@ -50,6 +50,7 @@ type ResolverRoot interface { InfoboxField() InfoboxFieldResolver LayerGroup() LayerGroupResolver LayerItem() LayerItemResolver + MergedInfobox() MergedInfoboxResolver MergedInfoboxField() MergedInfoboxFieldResolver MergedLayer() MergedLayerResolver MergedProperty() MergedPropertyResolver @@ -272,6 +273,8 @@ type ComplexityRoot struct { Merged func(childComplexity int) int Property func(childComplexity int) int PropertyID func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int } InfoboxField struct { @@ -288,6 +291,9 @@ type ComplexityRoot struct { PluginID func(childComplexity int) int Property func(childComplexity int) int PropertyID func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + ScenePlugin func(childComplexity int) int } InstallPluginPayload struct { @@ -324,6 +330,9 @@ type ComplexityRoot struct { Property func(childComplexity int) int PropertyID func(childComplexity int) int Root func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + ScenePlugin func(childComplexity int) int } LayerItem struct { @@ -342,11 +351,16 @@ type ComplexityRoot struct { PluginID func(childComplexity int) int Property func(childComplexity int) int PropertyID func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + ScenePlugin func(childComplexity int) int } MergedInfobox struct { Fields func(childComplexity int) int Property func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int } MergedInfoboxField struct { @@ -356,6 +370,9 @@ type ComplexityRoot struct { Plugin func(childComplexity int) int PluginID func(childComplexity int) int Property func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + ScenePlugin func(childComplexity int) int } MergedLayer struct { @@ -365,6 +382,8 @@ type ComplexityRoot struct { Parent func(childComplexity int) int ParentID func(childComplexity int) int Property func(childComplexity int) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int } MergedProperty struct { @@ -920,6 +939,7 @@ type InfoboxResolver interface { Property(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Property, error) LinkedDataset(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Dataset, error) Merged(ctx context.Context, obj *graphql1.Infobox) (*graphql1.MergedInfobox, error) + Scene(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Scene, error) } type InfoboxFieldResolver interface { Layer(ctx context.Context, obj *graphql1.InfoboxField) (graphql1.Layer, error) @@ -929,6 +949,8 @@ type InfoboxFieldResolver interface { Extension(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.PluginExtension, error) LinkedDataset(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Dataset, error) Merged(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.MergedInfoboxField, error) + Scene(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Scene, error) + ScenePlugin(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.ScenePlugin, error) } type LayerGroupResolver interface { Parent(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.LayerGroup, error) @@ -937,6 +959,8 @@ type LayerGroupResolver interface { Extension(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.PluginExtension, error) LinkedDatasetSchema(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.DatasetSchema, error) Layers(ctx context.Context, obj *graphql1.LayerGroup) ([]graphql1.Layer, error) + Scene(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.Scene, error) + ScenePlugin(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.ScenePlugin, error) } type LayerItemResolver interface { Parent(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.LayerGroup, error) @@ -945,14 +969,22 @@ type LayerItemResolver interface { Extension(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.PluginExtension, error) LinkedDataset(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Dataset, error) Merged(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.MergedLayer, error) + Scene(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Scene, error) + ScenePlugin(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.ScenePlugin, error) +} +type MergedInfoboxResolver interface { + Scene(ctx context.Context, obj *graphql1.MergedInfobox) (*graphql1.Scene, error) } type MergedInfoboxFieldResolver interface { Plugin(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.Plugin, error) Extension(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.PluginExtension, error) + Scene(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.Scene, error) + ScenePlugin(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.ScenePlugin, error) } type MergedLayerResolver interface { Original(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.LayerItem, error) Parent(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.LayerGroup, error) + Scene(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.Scene, error) } type MergedPropertyResolver interface { Original(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Property, error) @@ -1858,6 +1890,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Infobox.PropertyID(childComplexity), true + case "Infobox.scene": + if e.complexity.Infobox.Scene == nil { + break + } + + return e.complexity.Infobox.Scene(childComplexity), true + + case "Infobox.sceneId": + if e.complexity.Infobox.SceneID == nil { + break + } + + return e.complexity.Infobox.SceneID(childComplexity), true + case "InfoboxField.extension": if e.complexity.InfoboxField.Extension == nil { break @@ -1949,6 +1995,27 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.InfoboxField.PropertyID(childComplexity), true + case "InfoboxField.scene": + if e.complexity.InfoboxField.Scene == nil { + break + } + + return e.complexity.InfoboxField.Scene(childComplexity), true + + case "InfoboxField.sceneId": + if e.complexity.InfoboxField.SceneID == nil { + break + } + + return e.complexity.InfoboxField.SceneID(childComplexity), true + + case "InfoboxField.scenePlugin": + if e.complexity.InfoboxField.ScenePlugin == nil { + break + } + + return e.complexity.InfoboxField.ScenePlugin(childComplexity), true + case "InstallPluginPayload.scene": if e.complexity.InstallPluginPayload.Scene == nil { break @@ -2117,6 +2184,27 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.LayerGroup.Root(childComplexity), true + case "LayerGroup.scene": + if e.complexity.LayerGroup.Scene == nil { + break + } + + return e.complexity.LayerGroup.Scene(childComplexity), true + + case "LayerGroup.sceneId": + if e.complexity.LayerGroup.SceneID == nil { + break + } + + return e.complexity.LayerGroup.SceneID(childComplexity), true + + case "LayerGroup.scenePlugin": + if e.complexity.LayerGroup.ScenePlugin == nil { + break + } + + return e.complexity.LayerGroup.ScenePlugin(childComplexity), true + case "LayerItem.extension": if e.complexity.LayerItem.Extension == nil { break @@ -2222,6 +2310,27 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.LayerItem.PropertyID(childComplexity), true + case "LayerItem.scene": + if e.complexity.LayerItem.Scene == nil { + break + } + + return e.complexity.LayerItem.Scene(childComplexity), true + + case "LayerItem.sceneId": + if e.complexity.LayerItem.SceneID == nil { + break + } + + return e.complexity.LayerItem.SceneID(childComplexity), true + + case "LayerItem.scenePlugin": + if e.complexity.LayerItem.ScenePlugin == nil { + break + } + + return e.complexity.LayerItem.ScenePlugin(childComplexity), true + case "MergedInfobox.fields": if e.complexity.MergedInfobox.Fields == nil { break @@ -2236,6 +2345,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.MergedInfobox.Property(childComplexity), true + case "MergedInfobox.scene": + if e.complexity.MergedInfobox.Scene == nil { + break + } + + return e.complexity.MergedInfobox.Scene(childComplexity), true + + case "MergedInfobox.sceneID": + if e.complexity.MergedInfobox.SceneID == nil { + break + } + + return e.complexity.MergedInfobox.SceneID(childComplexity), true + case "MergedInfoboxField.extension": if e.complexity.MergedInfoboxField.Extension == nil { break @@ -2278,6 +2401,27 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.MergedInfoboxField.Property(childComplexity), true + case "MergedInfoboxField.scene": + if e.complexity.MergedInfoboxField.Scene == nil { + break + } + + return e.complexity.MergedInfoboxField.Scene(childComplexity), true + + case "MergedInfoboxField.sceneID": + if e.complexity.MergedInfoboxField.SceneID == nil { + break + } + + return e.complexity.MergedInfoboxField.SceneID(childComplexity), true + + case "MergedInfoboxField.scenePlugin": + if e.complexity.MergedInfoboxField.ScenePlugin == nil { + break + } + + return e.complexity.MergedInfoboxField.ScenePlugin(childComplexity), true + case "MergedLayer.infobox": if e.complexity.MergedLayer.Infobox == nil { break @@ -2320,6 +2464,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.MergedLayer.Property(childComplexity), true + case "MergedLayer.scene": + if e.complexity.MergedLayer.Scene == nil { + break + } + + return e.complexity.MergedLayer.Scene(childComplexity), true + + case "MergedLayer.sceneID": + if e.complexity.MergedLayer.SceneID == nil { + break + } + + return e.complexity.MergedLayer.SceneID(childComplexity), true + case "MergedProperty.groups": if e.complexity.MergedProperty.Groups == nil { break @@ -5917,6 +6075,7 @@ type DatasetField { interface Layer { id: ID! + sceneId: ID! name: String! isVisible: Boolean! propertyId: ID @@ -5929,6 +6088,7 @@ interface Layer { property: Property plugin: Plugin extension: PluginExtension + scenePlugin: ScenePlugin } union Layers = LayerItem | LayerGroup @@ -5943,6 +6103,7 @@ enum LayerEncodingFormat { type LayerItem implements Layer { id: ID! + sceneId: ID! name: String! isVisible: Boolean! propertyId: ID @@ -5958,10 +6119,13 @@ type LayerItem implements Layer { extension: PluginExtension @goField(forceResolver: true) linkedDataset: Dataset @goField(forceResolver: true) merged: MergedLayer @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) } type LayerGroup implements Layer { id: ID! + sceneId: ID! name: String! isVisible: Boolean! propertyId: ID @@ -5979,9 +6143,12 @@ type LayerGroup implements Layer { extension: PluginExtension @goField(forceResolver: true) linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) layers: [Layer]! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) } type Infobox { + sceneId: ID! layerId: ID! propertyId: ID! fields: [InfoboxField!]! @@ -5990,10 +6157,12 @@ type Infobox { property: Property @goField(forceResolver: true) linkedDataset: Dataset @goField(forceResolver: true) merged: MergedInfobox @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) } type InfoboxField { id: ID! + sceneId: ID! layerId: ID! propertyId: ID! pluginId: PluginID! @@ -6006,29 +6175,38 @@ type InfoboxField { extension: PluginExtension @goField(forceResolver: true) linkedDataset: Dataset @goField(forceResolver: true) merged: MergedInfoboxField @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) } type MergedLayer { originalId: ID! parentId: ID + sceneID: ID! property: MergedProperty infobox: MergedInfobox original: LayerItem @goField(forceResolver: true) parent: LayerGroup @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) } type MergedInfobox { + sceneID: ID! property: MergedProperty fields: [MergedInfoboxField!]! + scene: Scene @goField(forceResolver: true) } type MergedInfoboxField { originalId: ID! + sceneID: ID! pluginId: PluginID! extensionId: PluginExtensionID! property: MergedProperty plugin: Plugin @goField(forceResolver: true) extension: PluginExtension @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) } # InputType @@ -11647,6 +11825,41 @@ func (ec *executionContext) _ImportLayerPayload_parentLayer(ctx context.Context, return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) } +func (ec *executionContext) _Infobox_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Infobox", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + func (ec *executionContext) _Infobox_layerId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -11915,6 +12128,38 @@ func (ec *executionContext) _Infobox_merged(ctx context.Context, field graphql.C return ec.marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfobox(ctx, field.Selections, res) } +func (ec *executionContext) _Infobox_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Infobox", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Infobox().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + func (ec *executionContext) _InfoboxField_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -11950,6 +12195,41 @@ func (ec *executionContext) _InfoboxField_id(ctx context.Context, field graphql. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } +func (ec *executionContext) _InfoboxField_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + func (ec *executionContext) _InfoboxField_layerId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -12352,6 +12632,70 @@ func (ec *executionContext) _InfoboxField_merged(ctx context.Context, field grap return ec.marshalOMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxField(ctx, field.Selections, res) } +func (ec *executionContext) _InfoboxField_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _InfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.InfoboxField().ScenePlugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.ScenePlugin) + fc.Result = res + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) +} + func (ec *executionContext) _InstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.InstallPluginPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -12632,7 +12976,7 @@ func (ec *executionContext) _LayerGroup_id(ctx context.Context, field graphql.Co return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12650,7 +12994,7 @@ func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql. ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Name, nil + return obj.SceneID, nil }) if err != nil { ec.Error(ctx, err) @@ -12662,12 +13006,47 @@ func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql. } return graphql.Null } - res := resTmp.(string) + res := resTmp.(id.ID) fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_isVisible(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_isVisible(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13159,6 +13538,70 @@ func (ec *executionContext) _LayerGroup_layers(ctx context.Context, field graphq return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) } +func (ec *executionContext) _LayerGroup_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().ScenePlugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.ScenePlugin) + fc.Result = res + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) +} + func (ec *executionContext) _LayerItem_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -13194,6 +13637,41 @@ func (ec *executionContext) _LayerItem_id(ctx context.Context, field graphql.Col return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } +func (ec *executionContext) _LayerItem_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + func (ec *executionContext) _LayerItem_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -13648,6 +14126,105 @@ func (ec *executionContext) _LayerItem_merged(ctx context.Context, field graphql return ec.marshalOMergedLayer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedLayer(ctx, field.Selections, res) } +func (ec *executionContext) _LayerItem_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerItem_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerItem().ScenePlugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.ScenePlugin) + fc.Result = res + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfobox", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -13715,6 +14292,38 @@ func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field gra return ec.marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxFieldแš„(ctx, field.Selections, res) } +func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfobox", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfobox().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -13745,12 +14354,117 @@ func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginID) + fc.Result = res + return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginExtensionID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13768,24 +14482,21 @@ func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, fi ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.PluginID, nil + return obj.Property, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } - res := resTmp.(id.PluginID) + res := resTmp.(*graphql1.MergedProperty) fc.Result = res - return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedProperty(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13796,31 +14507,28 @@ func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, Object: "MergedInfoboxField", Field: field, Args: nil, - IsMethod: false, - IsResolver: false, + IsMethod: true, + IsResolver: true, } ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.ExtensionID, nil + return ec.resolvers.MergedInfoboxField().Plugin(rctx, obj) }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } - res := resTmp.(id.PluginExtensionID) + res := resTmp.(*graphql1.Plugin) fc.Result = res - return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13831,14 +14539,14 @@ func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, fi Object: "MergedInfoboxField", Field: field, Args: nil, - IsMethod: false, - IsResolver: false, + IsMethod: true, + IsResolver: true, } ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Property, nil + return ec.resolvers.MergedInfoboxField().Extension(rctx, obj) }) if err != nil { ec.Error(ctx, err) @@ -13847,12 +14555,12 @@ func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.MergedProperty) + res := resTmp.(*graphql1.PluginExtension) fc.Result = res - return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedProperty(ctx, field.Selections, res) + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13870,7 +14578,7 @@ func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, fiel ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.MergedInfoboxField().Plugin(rctx, obj) + return ec.resolvers.MergedInfoboxField().Scene(rctx, obj) }) if err != nil { ec.Error(ctx, err) @@ -13879,12 +14587,12 @@ func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Plugin) + res := resTmp.(*graphql1.Scene) fc.Result = res - return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13902,7 +14610,7 @@ func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, f ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.MergedInfoboxField().Extension(rctx, obj) + return ec.resolvers.MergedInfoboxField().ScenePlugin(rctx, obj) }) if err != nil { ec.Error(ctx, err) @@ -13911,9 +14619,9 @@ func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PluginExtension) + res := resTmp.(*graphql1.ScenePlugin) fc.Result = res - return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, field.Selections, res) + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) } func (ec *executionContext) _MergedLayer_originalId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { @@ -13983,6 +14691,41 @@ func (ec *executionContext) _MergedLayer_parentId(ctx context.Context, field gra return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } +func (ec *executionContext) _MergedLayer_sceneID(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + func (ec *executionContext) _MergedLayer_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -14111,6 +14854,38 @@ func (ec *executionContext) _MergedLayer_parent(ctx context.Context, field graph return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) } +func (ec *executionContext) _MergedLayer_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedLayer().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + func (ec *executionContext) _MergedProperty_originalId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -31692,6 +32467,11 @@ func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, switch field.Name { case "__typename": out.Values[i] = graphql.MarshalString("Infobox") + case "sceneId": + out.Values[i] = ec._Infobox_sceneId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } case "layerId": out.Values[i] = ec._Infobox_layerId(ctx, field, obj) if out.Values[i] == graphql.Null { @@ -31756,6 +32536,17 @@ func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, res = ec._Infobox_merged(ctx, field, obj) return res }) + case "scene": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Infobox_scene(ctx, field, obj) + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -31783,6 +32574,11 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "sceneId": + out.Values[i] = ec._InfoboxField_sceneId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } case "layerId": out.Values[i] = ec._InfoboxField_layerId(ctx, field, obj) if out.Values[i] == graphql.Null { @@ -31888,6 +32684,28 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection res = ec._InfoboxField_merged(ctx, field, obj) return res }) + case "scene": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_scene(ctx, field, obj) + return res + }) + case "scenePlugin": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._InfoboxField_scenePlugin(ctx, field, obj) + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -32016,6 +32834,11 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "sceneId": + out.Values[i] = ec._LayerGroup_sceneId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } case "name": out.Values[i] = ec._LayerGroup_name(ctx, field, obj) if out.Values[i] == graphql.Null { @@ -32117,6 +32940,28 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe } return res }) + case "scene": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_scene(ctx, field, obj) + return res + }) + case "scenePlugin": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_scenePlugin(ctx, field, obj) + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -32144,6 +32989,11 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "sceneId": + out.Values[i] = ec._LayerItem_sceneId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } case "name": out.Values[i] = ec._LayerItem_name(ctx, field, obj) if out.Values[i] == graphql.Null { @@ -32232,6 +33082,28 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet res = ec._LayerItem_merged(ctx, field, obj) return res }) + case "scene": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_scene(ctx, field, obj) + return res + }) + case "scenePlugin": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_scenePlugin(ctx, field, obj) + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -32254,13 +33126,29 @@ func (ec *executionContext) _MergedInfobox(ctx context.Context, sel ast.Selectio switch field.Name { case "__typename": out.Values[i] = graphql.MarshalString("MergedInfobox") + case "sceneID": + out.Values[i] = ec._MergedInfobox_sceneID(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } case "property": out.Values[i] = ec._MergedInfobox_property(ctx, field, obj) case "fields": out.Values[i] = ec._MergedInfobox_fields(ctx, field, obj) if out.Values[i] == graphql.Null { - invalids++ + atomic.AddUint32(&invalids, 1) } + case "scene": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedInfobox_scene(ctx, field, obj) + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -32288,6 +33176,11 @@ func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.Sel if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "sceneID": + out.Values[i] = ec._MergedInfoboxField_sceneID(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } case "pluginId": out.Values[i] = ec._MergedInfoboxField_pluginId(ctx, field, obj) if out.Values[i] == graphql.Null { @@ -32322,6 +33215,28 @@ func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.Sel res = ec._MergedInfoboxField_extension(ctx, field, obj) return res }) + case "scene": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedInfoboxField_scene(ctx, field, obj) + return res + }) + case "scenePlugin": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedInfoboxField_scenePlugin(ctx, field, obj) + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -32351,6 +33266,11 @@ func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionS } case "parentId": out.Values[i] = ec._MergedLayer_parentId(ctx, field, obj) + case "sceneID": + out.Values[i] = ec._MergedLayer_sceneID(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } case "property": out.Values[i] = ec._MergedLayer_property(ctx, field, obj) case "infobox": @@ -32377,6 +33297,17 @@ func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionS res = ec._MergedLayer_parent(ctx, field, obj) return res }) + case "scene": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._MergedLayer_scene(ctx, field, obj) + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } diff --git a/internal/graphql/resolver_layer.go b/internal/graphql/resolver_layer.go index 36a76ef50..fd7a09616 100644 --- a/internal/graphql/resolver_layer.go +++ b/internal/graphql/resolver_layer.go @@ -28,6 +28,10 @@ func (r *Resolver) MergedLayer() MergedLayerResolver { return &mergedLayerResolver{r} } +func (r *Resolver) MergedInfobox() MergedInfoboxResolver { + return &mergedInfoboxResolver{r} +} + func (r *Resolver) MergedInfoboxField() MergedInfoboxFieldResolver { return &mergedInfoboxFieldResolver{r} } @@ -73,6 +77,38 @@ func (r *infoboxResolver) Merged(ctx context.Context, obj *graphql1.Infobox) (*g return ml.Infobox, nil } +func (r *infoboxResolver) Scene(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Scene, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +func (r *infoboxResolver) ScenePlugin(ctx context.Context, obj *graphql1.Infobox) (*graphql1.ScenePlugin, error) { + exit := trace(ctx) + defer exit() + + layer, err := dataloader.DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) + if err != nil || layer == nil { + return nil, err + } + var pluginID *id.PluginID + if lg, ok := (*layer).(*graphql1.LayerGroup); ok { + pluginID = lg.PluginID + } else if li, ok := (*layer).(*graphql1.LayerItem); ok { + pluginID = li.PluginID + } + if pluginID == nil { + return nil, nil + } + + s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + if err != nil { + return nil, err + } + return s.Plugin(*pluginID), nil +} + type infoboxFieldResolver struct{ *Resolver } func (r *infoboxFieldResolver) Layer(ctx context.Context, obj *graphql1.InfoboxField) (graphql1.Layer, error) { @@ -147,6 +183,24 @@ func (r *infoboxFieldResolver) Merged(ctx context.Context, obj *graphql1.Infobox return ml.Infobox.Field(obj.ID), nil } +func (r *infoboxFieldResolver) Scene(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Scene, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +func (r *infoboxFieldResolver) ScenePlugin(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.ScenePlugin, error) { + exit := trace(ctx) + defer exit() + + s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + if err != nil { + return nil, err + } + return s.Plugin(obj.PluginID), nil +} + type layerGroupResolver struct{ *Resolver } func (r *layerGroupResolver) Parent(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.LayerGroup, error) { @@ -221,6 +275,27 @@ func (r *layerGroupResolver) Layers(ctx context.Context, obj *graphql1.LayerGrou return graphql1.AttachParentLayer(layers, obj.ID), nil } +func (r *layerGroupResolver) Scene(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.Scene, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +func (r *layerGroupResolver) ScenePlugin(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.ScenePlugin, error) { + exit := trace(ctx) + defer exit() + + if obj.PluginID == nil { + return nil, nil + } + s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + if err != nil { + return nil, err + } + return s.Plugin(*obj.PluginID), nil +} + type layerItemResolver struct{ *Resolver } func (r *layerItemResolver) Parent(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.LayerGroup, error) { @@ -287,6 +362,27 @@ func (r *layerItemResolver) Merged(ctx context.Context, obj *graphql1.LayerItem) return r.config.Controllers.LayerController.FetchMerged(ctx, id.LayerID(obj.ID), id.LayerIDFromRefID(obj.ParentID), getOperator(ctx)) } +func (r *layerItemResolver) Scene(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Scene, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +func (r *layerItemResolver) ScenePlugin(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.ScenePlugin, error) { + exit := trace(ctx) + defer exit() + + if obj.PluginID == nil { + return nil, nil + } + s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + if err != nil { + return nil, err + } + return s.Plugin(*obj.PluginID), nil +} + type mergedLayerResolver struct{ *Resolver } func (r *mergedLayerResolver) Original(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.LayerItem, error) { @@ -306,6 +402,25 @@ func (r *mergedLayerResolver) Parent(ctx context.Context, obj *graphql1.MergedLa return dataloader.DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) } +func (r *mergedLayerResolver) Scene(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.Scene, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +type mergedInfoboxResolver struct{ *Resolver } + +func (r *mergedInfoboxResolver) Scene(ctx context.Context, obj *graphql1.MergedInfobox) (*graphql1.Scene, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + type mergedInfoboxFieldResolver struct{ *Resolver } func (r *mergedInfoboxFieldResolver) Plugin(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.Plugin, error) { @@ -325,3 +440,21 @@ func (r *mergedInfoboxFieldResolver) Extension(ctx context.Context, obj *graphql } return plugin.Extension(obj.ExtensionID), nil } + +func (r *mergedInfoboxFieldResolver) Scene(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.Scene, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +func (r *mergedInfoboxFieldResolver) ScenePlugin(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.ScenePlugin, error) { + exit := trace(ctx) + defer exit() + + s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + if err != nil { + return nil, err + } + return s.Plugin(obj.PluginID), nil +} diff --git a/schema.graphql b/schema.graphql index 9d5d82fad..c82d934fc 100644 --- a/schema.graphql +++ b/schema.graphql @@ -569,6 +569,7 @@ type DatasetField { interface Layer { id: ID! + sceneId: ID! name: String! isVisible: Boolean! propertyId: ID @@ -581,6 +582,7 @@ interface Layer { property: Property plugin: Plugin extension: PluginExtension + scenePlugin: ScenePlugin } union Layers = LayerItem | LayerGroup @@ -595,6 +597,7 @@ enum LayerEncodingFormat { type LayerItem implements Layer { id: ID! + sceneId: ID! name: String! isVisible: Boolean! propertyId: ID @@ -610,10 +613,13 @@ type LayerItem implements Layer { extension: PluginExtension @goField(forceResolver: true) linkedDataset: Dataset @goField(forceResolver: true) merged: MergedLayer @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) } type LayerGroup implements Layer { id: ID! + sceneId: ID! name: String! isVisible: Boolean! propertyId: ID @@ -631,9 +637,12 @@ type LayerGroup implements Layer { extension: PluginExtension @goField(forceResolver: true) linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) layers: [Layer]! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) } type Infobox { + sceneId: ID! layerId: ID! propertyId: ID! fields: [InfoboxField!]! @@ -642,10 +651,12 @@ type Infobox { property: Property @goField(forceResolver: true) linkedDataset: Dataset @goField(forceResolver: true) merged: MergedInfobox @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) } type InfoboxField { id: ID! + sceneId: ID! layerId: ID! propertyId: ID! pluginId: PluginID! @@ -658,29 +669,38 @@ type InfoboxField { extension: PluginExtension @goField(forceResolver: true) linkedDataset: Dataset @goField(forceResolver: true) merged: MergedInfoboxField @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) } type MergedLayer { originalId: ID! parentId: ID + sceneID: ID! property: MergedProperty infobox: MergedInfobox original: LayerItem @goField(forceResolver: true) parent: LayerGroup @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) } type MergedInfobox { + sceneID: ID! property: MergedProperty fields: [MergedInfoboxField!]! + scene: Scene @goField(forceResolver: true) } type MergedInfoboxField { originalId: ID! + sceneID: ID! pluginId: PluginID! extensionId: PluginExtensionID! property: MergedProperty plugin: Plugin @goField(forceResolver: true) extension: PluginExtension @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + scenePlugin: ScenePlugin @goField(forceResolver: true) } # InputType From c59955937f4792217d3eca69423b57ce35b965bb Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 13 Jul 2021 12:04:42 +0900 Subject: [PATCH 046/253] revert: "fix: ogp image for published page" (#21) --- internal/app/public.go | 4 ++-- internal/usecase/interfaces/published.go | 19 +++---------------- 2 files changed, 5 insertions(+), 18 deletions(-) diff --git a/internal/app/public.go b/internal/app/public.go index 61840dbba..3f020c9b0 100644 --- a/internal/app/public.go +++ b/internal/app/public.go @@ -46,7 +46,7 @@ func publicAPI( }) r.GET("/published/:name", func(c echo.Context) error { - name := c.Param("name") + name := c.Param("string") if name == "" { return echo.ErrNotFound } @@ -60,7 +60,7 @@ func publicAPI( }) r.GET("/published_data/:name", func(c echo.Context) error { - name := c.Param("name") + name := c.Param("string") if name == "" { return echo.ErrNotFound } diff --git a/internal/usecase/interfaces/published.go b/internal/usecase/interfaces/published.go index d0476702e..0034aede5 100644 --- a/internal/usecase/interfaces/published.go +++ b/internal/usecase/interfaces/published.go @@ -19,23 +19,10 @@ type ProjectPublishedMetadata struct { } func ProjectPublishedMetadataFrom(prj *project.Project) ProjectPublishedMetadata { - title := prj.PublicTitle() - description := prj.PublicDescription() - image := prj.PublicImage() - if title == "" { - title = prj.Name() - } - if description == "" { - description = prj.Description() - } - if image == "" { - image = prj.ImageURL().String() - } - return ProjectPublishedMetadata{ - Title: title, - Description: description, - Image: image, + Title: prj.PublicTitle(), + Description: prj.PublicDescription(), + Image: prj.PublicImage(), Noindex: prj.PublicNoIndex(), IsBasicAuthActive: prj.IsBasicAuthActive(), BasicAuthUsername: prj.BasicAuthUsername(), From cd07c0dd0e1d3392f0d65597808fb1d8728ac0b9 Mon Sep 17 00:00:00 2001 From: KaWaite <34051327+KaWaite@users.noreply.github.com> Date: Tue, 13 Jul 2021 17:17:37 +0900 Subject: [PATCH 047/253] raise graphql complexity limit 4000->6000 (#22) Co-authored-by: KaWaite --- internal/app/config.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/app/config.go b/internal/app/config.go index a5e62cefd..a2076864b 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -40,7 +40,7 @@ type Auth0Config struct { } type GraphQLConfig struct { - ComplexityLimit int `default:"4000"` + ComplexityLimit int `default:"6000"` } type PublishedConfig struct { From bb9e4c6af5ab4b2f94a8c1c4f690293f5b3bf20c Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 14 Jul 2021 02:13:39 +0900 Subject: [PATCH 048/253] feat: marker label position --- pkg/builtin/manifest.yml | 27 +++++++++++++++++++++++---- pkg/builtin/manifest_gen.go | 2 +- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index bb2dfa7e4..e6ebda96d 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -117,26 +117,26 @@ extensions: type: number title: Fog density defaultValue: 2.0e-4 - description: "Set a thickness to the fog. Min: 0 Max: 1" + description: 'Set a thickness to the fog. Min: 0 Max: 1' min: 0 max: 1 - id: brightness_shift type: number title: Fog Brightness defaultValue: 0.03 - description: "Set brightness of the fog. Min: -1 Max: 1" + description: 'Set brightness of the fog. Min: -1 Max: 1' min: -1 max: 1 - id: hue_shift type: number title: Fog Hue - description: "Set hue of the fog. Min: -1 Max: 1" + description: 'Set hue of the fog. Min: -1 Max: 1' min: -1 max: 1 - id: surturation_shift type: number title: Fog Saturation - description: "Set saturation of the fog. Min: -1 Max: 1" + description: 'Set saturation of the fog. Min: -1 Max: 1' min: -1 max: 1 - id: googleAnalytics @@ -343,6 +343,23 @@ extensions: field: label type: bool value: true + - id: labelPosition + type: string + title: Label position + defaultValue: left + choices: + - key: left + label: Left + - key: right + label: Right + - key: top + label: Top + - key: bottom + label: Bottom + availableIf: + field: label + type: bool + value: true - id: labelTypography type: typography title: Label font @@ -967,9 +984,11 @@ extensions: - id: overlayImageW type: number title: Image width + suffix: px - id: overlayImageH type: number title: Image height + suffix: px - id: overlayBgcolor type: string title: Background color diff --git a/pkg/builtin/manifest_gen.go b/pkg/builtin/manifest_gen.go index 9d09b8527..e34ff00c1 100644 --- a/pkg/builtin/manifest_gen.go +++ b/pkg/builtin/manifest_gen.go @@ -2,4 +2,4 @@ package builtin -const pluginManifestJSON string = `{"author":"Re:Earth","description":"Official Plugin","extensions":[{"description":"Select here to find scene settings in the right panel. This includes setting map tiles, atmospheric conditions, real lighting, and more.","id":"cesium","schema":{"groups":[{"fields":[{"description":"The starting position of your project.","id":"camera","title":"Initial camera position","type":"camera"},{"description":"Show elevation when close to the surface.","id":"terrain","title":"Terrain","type":"bool"},{"defaultValue":true,"description":"Show the stars.","id":"skybox","title":"Sky","type":"bool"},{"description":"With Sky disabled, choose a background color.","id":"bgcolor","title":"Background color","type":"string","ui":"color"},{"description":"Cesium Ion account users may use their personal API keys to be able to use their Cesium Ion assets(tile data, 3D data, etc) with their project.","id":"ion","title":"Cesium Ion API access token","type":"string"}],"id":"default","title":"Scene"},{"description":"You may change the look of the Earth by obtaining map tile data and setting it here.","fields":[{"choices":[{"key":"default","label":"Default"},{"key":"default_label","label":"Labelled"},{"key":"default_road","label":"Road Map"},{"key":"stamen_watercolor","label":"Stamen Watercolor"},{"key":"stamen_toner","label":"Stamen Toner"},{"key":"open_street_map","label":"Open Street Map"},{"key":"esri_world_topo","label":"ESRI Topography"},{"key":"black_marble","label":"Earth at night"},{"key":"japan_gsi_standard","label":"Japan GSI Standard Map"},{"key":"url","label":"URL"}],"defaultValue":"default","id":"tile_type","title":"Tile Type","type":"string"},{"availableIf":{"field":"tile_type","type":"string","value":"url"},"id":"tile_url","title":"Tile map URL","type":"string"},{"id":"tile_minLevel","max":30,"min":0,"title":"Minimum zoom level","type":"number"},{"id":"tile_maxLevel","max":30,"min":0,"title":"Maximum zoom level","type":"number"}],"id":"tiles","list":true,"representativeField":"tile_type","title":"Tiles"},{"description":"Set the look and feel of the Earth.","fields":[{"defaultValue":true,"description":"Display the Sun.","id":"enable_sun","title":"Sun","type":"bool"},{"defaultValue":false,"description":"Display natural lighting from the sun.","id":"enable_lighting","title":"Lighting","type":"bool"},{"defaultValue":true,"description":"Display a lower atmospheric layer.","id":"ground_atmosphere","title":"Ground atmosphere","type":"bool"},{"defaultValue":true,"description":"Display an upper atmospheric layer.","id":"sky_atmosphere","title":"Sky atmosphere","type":"bool"},{"defaultValue":true,"description":"Display customizable fog.","id":"fog","title":"Fog","type":"bool"},{"defaultValue":0.0002,"description":"Set a thickness to the fog. Min: 0 Max: 1","id":"fog_density","max":1,"min":0,"title":"Fog density","type":"number"},{"defaultValue":0.03,"description":"Set brightness of the fog. Min: -1 Max: 1","id":"brightness_shift","max":1,"min":-1,"title":"Fog Brightness","type":"number"},{"description":"Set hue of the fog. Min: -1 Max: 1","id":"hue_shift","max":1,"min":-1,"title":"Fog Hue","type":"number"},{"description":"Set saturation of the fog. Min: -1 Max: 1","id":"surturation_shift","max":1,"min":-1,"title":"Fog Saturation","type":"number"}],"id":"atmosphere","title":"Atmospheric Conditions"},{"description":"Set your Google Analytics tracking ID and analyze how your published project is being viewed.","fields":[{"defaultValue":false,"description":"Enable Google Analytics","id":"enableGA","title":"Enable","type":"bool"},{"description":"Paste your Google Analytics tracking ID here. This will be embedded in your published project.","id":"trackingId","title":"Tracking ID","type":"string"}],"id":"googleAnalytics","title":"Google Analytics"}]},"title":"Cesium","type":"visualizer","visualizer":"cesium"},{"description":"Create an information area that appears when a layer is highlighted. Text, pictures, video, etc can be added to an infobox.","id":"infobox","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"choices":[{"key":"small","label":"Small"},{"key":"large","label":"Large"}],"defaultValue":"small","id":"size","title":"Size Type","type":"string"},{"id":"bgcolor","title":"Background Color","type":"string","ui":"color"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Basic"}]},"title":"Infobox","type":"infobox","visualizer":"cesium"},{"description":"A standard map marker.","id":"marker","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"choices":[{"key":"point","label":"Point"},{"key":"image","label":"Icon"}],"defaultValue":"image","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"point"},"id":"pointColor","title":"Point color","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"point"},"defaultValue":10,"id":"pointSize","min":0,"suffix":"px","title":"Point size","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"},{"availableIf":{"field":"style","type":"string","value":"image"},"defaultValue":1,"id":"imageSize","min":0,"title":"Image scale","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Image crop","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"imageShadow","title":"Image shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"label","title":"Label","type":"bool"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelText","title":"Label text","type":"string"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelTypography","title":"Label font","type":"typography"},{"id":"extrude","title":"Extruded","type":"bool"}],"id":"default","title":"Marker"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"},"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Marker","type":"primitive","visualizer":"cesium"},{"description":"Polyline primitive","id":"polyline","schema":{"groups":[{"fields":[{"id":"coordinates","title":"Coordinates","type":"coordinates"},{"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polyline"}]},"title":"Polyline","type":"primitive","visualizer":"cesium"},{"description":"Polygon primitive","id":"polygon","schema":{"groups":[{"fields":[{"id":"polygon","title":"Polygon","type":"polygon"},{"defaultValue":true,"id":"fill","title":"Fill","type":"bool"},{"availableIf":{"field":"fill","type":"bool","value":true},"id":"fillColor","title":"Fill color","type":"string","ui":"color"},{"id":"stroke","title":"Stroke","type":"bool"},{"availableIf":{"field":"stroke","type":"bool","value":true},"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"availableIf":{"field":"stroke","type":"bool","value":true},"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polygon"}]},"title":"Polygon","type":"primitive","visualizer":"cesium"},{"description":"Rectangle primitive","id":"rect","schema":{"groups":[{"fields":[{"id":"rect","title":"Rect","type":"rect"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"id":"extrudedHeight","min":0,"title":"Extruded height","type":"number"},{"choices":[{"key":"color","label":"Color"},{"key":"image","label":"Image"}],"defaultValue":"color","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"color"},"id":"fillColor","title":"Fill","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"}],"id":"default","title":"Rectangle"}]},"title":"Rectangle","type":"primitive","visualizer":"cesium"},{"description":"An Icon marker that allows you to set a photo that will appear after reaching its location.","id":"photooverlay","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"height","title":"Height","type":"number"},{"description":"Set the camera position for the overlay.","id":"camera","title":"Camera","type":"camera"},{"id":"image","title":"Icon","type":"url","ui":"image"},{"defaultValue":1,"id":"imageSize","prefix":"x","title":"Icon size","type":"number"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Icon crop","type":"string"},{"id":"imageShadow","title":"Icon shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"photoOverlayImage","title":"Photo","type":"url","ui":"image"},{"id":"photoOverlayDescription","title":"Photo description","type":"string","ui":"multiline"}],"id":"default","title":"Photo overlay"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"}}},"title":"Photo overlay","type":"primitive","visualizer":"cesium"},{"description":"A ball-like marker.","id":"ellipsoid","schema":{"groups":[{"fields":[{"id":"position","title":"Position","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"defaultValue":1000,"id":"radius","min":0,"suffix":"m","title":"Radius","type":"number"},{"id":"fillColor","title":"Fill","type":"string","ui":"color"}],"id":"default","title":"Ellipsoid"}],"linkable":{"latlng":{"fieldId":"position","schemaGroupId":"default"}}},"title":"Sphere","type":"primitive","visualizer":"cesium"},{"description":"Import your own primitives to be used instead of Re:Earth's built in ones.","id":"resource","schema":{"groups":[{"fields":[{"id":"url","title":"File URL","type":"url","ui":"file"},{"choices":[{"key":"auto","label":"Auto"},{"key":"kml","label":"KML"},{"key":"geojson","label":"GeoJSON / TopoJSON"},{"key":"czml","label":"CZML"}],"defaultValue":"auto","id":"type","title":"File format","type":"string"}],"id":"default","title":"File"}]},"title":"File","type":"primitive","visualizer":"cesium"},{"description":"Text block","id":"textblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"text","title":"Content","type":"string","ui":"multiline"},{"id":"markdown","title":"Use markdown","type":"bool"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Text block"}]},"title":"Text","type":"block","visualizer":"cesium"},{"description":"Image block","id":"imageblock","schema":{"groups":[{"fields":[{"id":"image","title":"Image","type":"url","ui":"image"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"},{"choices":[{"key":"cover","label":"Cover"},{"key":"contain","label":"Contain"}],"defaultValue":"cover","id":"imageSize","title":"Image size","type":"string"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imagePositionX","title":"Image horizontal position","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imagePositionY","title":"Image vertical position","type":"string"}],"id":"default","title":"Image block"}],"linkable":{"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Image","type":"block","visualizer":"cesium"},{"description":"Video block","id":"videoblock","schema":{"groups":[{"fields":[{"id":"url","title":"Video URL","type":"url","ui":"video"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Video block"}]},"title":"Video","type":"block","visualizer":"cesium"},{"description":"Location block","id":"locationblock","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Location block"}]},"title":"Location","type":"block","visualizer":"cesium"},{"description":"Table block","id":"dlblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Table block"},{"fields":[{"id":"item_title","title":"Title","type":"string"},{"choices":[{"key":"string","label":"String"},{"key":"number","label":"Number"}],"defaultValue":"string","id":"item_datatype","title":"Type","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"string"},"id":"item_datastr","title":"Data","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"number"},"id":"item_datanum","title":"Data","type":"number"}],"id":"items","list":true,"title":"Items"}]},"title":"Table","type":"block","visualizer":"cesium"},{"description":"Menu widgets","id":"menu","schema":{"groups":[{"fields":[{"id":"buttonInvisible","title":"Hide","type":"bool"},{"id":"buttonTitle","title":"Title","type":"string"},{"choices":[{"key":"topleft","label":"Top-Left"},{"key":"topright","label":"Top-Right"},{"key":"bottomleft","label":"Bottom-Left"},{"key":"bottomright","label":"Bottom-Right"}],"defaultValue":"topleft","id":"buttonPosition","title":"Position","type":"string"},{"choices":[{"key":"text","label":"Text only"},{"key":"icon","label":"Icon only"},{"key":"texticon","label":"Text and icon"}],"defaultValue":"text","id":"buttonStyle","title":"Style","type":"string"},{"id":"buttonIcon","title":"Icon","type":"url","ui":"image"},{"id":"buttonColor","title":"Text color","type":"string","ui":"color"},{"id":"buttonBgcolor","title":"Background color","type":"string","ui":"color"},{"choices":[{"key":"link","label":"Link"},{"key":"menu","label":"Menu"},{"key":"camera","label":"Camera flight"}],"defaultValue":"link","id":"buttonType","title":"Type","type":"string"},{"availableIf":{"field":"buttonType","type":"string","value":"link"},"id":"buttonLink","title":"Link","type":"url"},{"availableIf":{"field":"buttonType","type":"string","value":"camera"},"id":"buttonCamera","title":"Camera flight","type":"camera"}],"id":"buttons","list":true,"title":"Buttons"},{"fields":[{"id":"menuTitle","title":"Title","type":"string"},{"id":"menuIcon","title":"Icon","type":"url"},{"choices":[{"key":"link","label":"Link"},{"key":"camera","label":"Camera"},{"key":"border","label":"Break"}],"defaultValue":"link","id":"menuType","title":"Type","type":"string"},{"availableIf":{"field":"menuType","type":"string","value":"link"},"id":"menuLink","title":"Link","type":"url"},{"availableIf":{"field":"menuType","type":"string","value":"camera"},"id":"menuCamera","title":"Camera","type":"camera"}],"id":"menu","list":true,"title":"Menu"}]},"title":"Menu","type":"widget","visualizer":"cesium"},{"description":"A unique start screen that will display on load of your archive(ex. display the archive's title).","id":"splashscreen","schema":{"groups":[{"fields":[{"id":"overlayEnabled","title":"Enabled","type":"bool"},{"id":"overlayDelay","min":0,"suffix":"s","title":"Delay","type":"number"},{"id":"overlayDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"overlayTransitionDuration","min":0,"suffix":"s","title":"Fade out","type":"number"},{"id":"overlayImage","title":"Image","type":"url","ui":"image"},{"id":"overlayImageW","title":"Image width","type":"number"},{"id":"overlayImageH","title":"Image height","type":"number"},{"id":"overlayBgcolor","title":"Background color","type":"string","ui":"color"}],"id":"overlay","title":"Overlay screen"},{"fields":[{"id":"cameraPosition","title":"Camera position","type":"camera"},{"id":"cameraDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"cameraDelay","min":0,"suffix":"s","title":"Delay","type":"number"}],"id":"camera","list":true,"title":"Camera flight sequence"}]},"title":"Splash screen","type":"widget","visualizer":"cesium"},{"description":"SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily.","id":"storytelling","schema":{"groups":[{"fields":[{"defaultValue":3,"id":"duration","suffix":"s","title":"Duration","type":"number"},{"defaultValue":50000,"id":"range","suffix":"m","title":"Range","type":"number"},{"id":"camera","title":"Camera pose","type":"camera","ui":"camera_pose"},{"id":"autoStart","title":"Auto start","type":"bool"}],"id":"default","title":"Storytelling"},{"fields":[{"id":"layer","title":"Layer","type":"ref","ui":"layer"},{"id":"layerDuration","suffix":"s","title":"Duration","type":"number"},{"id":"layerRange","suffix":"m","title":"Range","type":"number"},{"id":"layerCamera","title":"Camera position","type":"camera"}],"id":"stories","list":true,"representativeField":"layer","title":"Stories"}]},"title":"Storytelling","type":"widget","visualizer":"cesium"}],"id":"reearth","system":true,"title":"Re:Earth Official Plugin"}` +const pluginManifestJSON string = `{"author":"Re:Earth","description":"Official Plugin","extensions":[{"description":"Select here to find scene settings in the right panel. This includes setting map tiles, atmospheric conditions, real lighting, and more.","id":"cesium","schema":{"groups":[{"fields":[{"description":"The starting position of your project.","id":"camera","title":"Initial camera position","type":"camera"},{"description":"Show elevation when close to the surface.","id":"terrain","title":"Terrain","type":"bool"},{"defaultValue":true,"description":"Show the stars.","id":"skybox","title":"Sky","type":"bool"},{"description":"With Sky disabled, choose a background color.","id":"bgcolor","title":"Background color","type":"string","ui":"color"},{"description":"Cesium Ion account users may use their personal API keys to be able to use their Cesium Ion assets(tile data, 3D data, etc) with their project.","id":"ion","title":"Cesium Ion API access token","type":"string"}],"id":"default","title":"Scene"},{"description":"You may change the look of the Earth by obtaining map tile data and setting it here.","fields":[{"choices":[{"key":"default","label":"Default"},{"key":"default_label","label":"Labelled"},{"key":"default_road","label":"Road Map"},{"key":"stamen_watercolor","label":"Stamen Watercolor"},{"key":"stamen_toner","label":"Stamen Toner"},{"key":"open_street_map","label":"Open Street Map"},{"key":"esri_world_topo","label":"ESRI Topography"},{"key":"black_marble","label":"Earth at night"},{"key":"japan_gsi_standard","label":"Japan GSI Standard Map"},{"key":"url","label":"URL"}],"defaultValue":"default","id":"tile_type","title":"Tile Type","type":"string"},{"availableIf":{"field":"tile_type","type":"string","value":"url"},"id":"tile_url","title":"Tile map URL","type":"string"},{"id":"tile_minLevel","max":30,"min":0,"title":"Minimum zoom level","type":"number"},{"id":"tile_maxLevel","max":30,"min":0,"title":"Maximum zoom level","type":"number"}],"id":"tiles","list":true,"representativeField":"tile_type","title":"Tiles"},{"description":"Set the look and feel of the Earth.","fields":[{"defaultValue":true,"description":"Display the Sun.","id":"enable_sun","title":"Sun","type":"bool"},{"defaultValue":false,"description":"Display natural lighting from the sun.","id":"enable_lighting","title":"Lighting","type":"bool"},{"defaultValue":true,"description":"Display a lower atmospheric layer.","id":"ground_atmosphere","title":"Ground atmosphere","type":"bool"},{"defaultValue":true,"description":"Display an upper atmospheric layer.","id":"sky_atmosphere","title":"Sky atmosphere","type":"bool"},{"defaultValue":true,"description":"Display customizable fog.","id":"fog","title":"Fog","type":"bool"},{"defaultValue":0.0002,"description":"Set a thickness to the fog. Min: 0 Max: 1","id":"fog_density","max":1,"min":0,"title":"Fog density","type":"number"},{"defaultValue":0.03,"description":"Set brightness of the fog. Min: -1 Max: 1","id":"brightness_shift","max":1,"min":-1,"title":"Fog Brightness","type":"number"},{"description":"Set hue of the fog. Min: -1 Max: 1","id":"hue_shift","max":1,"min":-1,"title":"Fog Hue","type":"number"},{"description":"Set saturation of the fog. Min: -1 Max: 1","id":"surturation_shift","max":1,"min":-1,"title":"Fog Saturation","type":"number"}],"id":"atmosphere","title":"Atmospheric Conditions"},{"description":"Set your Google Analytics tracking ID and analyze how your published project is being viewed.","fields":[{"defaultValue":false,"description":"Enable Google Analytics","id":"enableGA","title":"Enable","type":"bool"},{"description":"Paste your Google Analytics tracking ID here. This will be embedded in your published project.","id":"trackingId","title":"Tracking ID","type":"string"}],"id":"googleAnalytics","title":"Google Analytics"}]},"title":"Cesium","type":"visualizer","visualizer":"cesium"},{"description":"Create an information area that appears when a layer is highlighted. Text, pictures, video, etc can be added to an infobox.","id":"infobox","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"choices":[{"key":"small","label":"Small"},{"key":"large","label":"Large"}],"defaultValue":"small","id":"size","title":"Size Type","type":"string"},{"id":"bgcolor","title":"Background Color","type":"string","ui":"color"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Basic"}]},"title":"Infobox","type":"infobox","visualizer":"cesium"},{"description":"A standard map marker.","id":"marker","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"choices":[{"key":"point","label":"Point"},{"key":"image","label":"Icon"}],"defaultValue":"image","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"point"},"id":"pointColor","title":"Point color","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"point"},"defaultValue":10,"id":"pointSize","min":0,"suffix":"px","title":"Point size","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"},{"availableIf":{"field":"style","type":"string","value":"image"},"defaultValue":1,"id":"imageSize","min":0,"title":"Image scale","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Image crop","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"imageShadow","title":"Image shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"label","title":"Label","type":"bool"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelText","title":"Label text","type":"string"},{"availableIf":{"field":"label","type":"bool","value":true},"choices":[{"key":"left","label":"Left"},{"key":"right","label":"Right"},{"key":"top","label":"Top"},{"key":"bottom","label":"Bottom"}],"defaultValue":"left","id":"labelPosition","title":"Label position","type":"string"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelTypography","title":"Label font","type":"typography"},{"id":"extrude","title":"Extruded","type":"bool"}],"id":"default","title":"Marker"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"},"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Marker","type":"primitive","visualizer":"cesium"},{"description":"Polyline primitive","id":"polyline","schema":{"groups":[{"fields":[{"id":"coordinates","title":"Coordinates","type":"coordinates"},{"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polyline"}]},"title":"Polyline","type":"primitive","visualizer":"cesium"},{"description":"Polygon primitive","id":"polygon","schema":{"groups":[{"fields":[{"id":"polygon","title":"Polygon","type":"polygon"},{"defaultValue":true,"id":"fill","title":"Fill","type":"bool"},{"availableIf":{"field":"fill","type":"bool","value":true},"id":"fillColor","title":"Fill color","type":"string","ui":"color"},{"id":"stroke","title":"Stroke","type":"bool"},{"availableIf":{"field":"stroke","type":"bool","value":true},"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"availableIf":{"field":"stroke","type":"bool","value":true},"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polygon"}]},"title":"Polygon","type":"primitive","visualizer":"cesium"},{"description":"Rectangle primitive","id":"rect","schema":{"groups":[{"fields":[{"id":"rect","title":"Rect","type":"rect"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"id":"extrudedHeight","min":0,"title":"Extruded height","type":"number"},{"choices":[{"key":"color","label":"Color"},{"key":"image","label":"Image"}],"defaultValue":"color","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"color"},"id":"fillColor","title":"Fill","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"}],"id":"default","title":"Rectangle"}]},"title":"Rectangle","type":"primitive","visualizer":"cesium"},{"description":"An Icon marker that allows you to set a photo that will appear after reaching its location.","id":"photooverlay","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"height","title":"Height","type":"number"},{"description":"Set the camera position for the overlay.","id":"camera","title":"Camera","type":"camera"},{"id":"image","title":"Icon","type":"url","ui":"image"},{"defaultValue":1,"id":"imageSize","prefix":"x","title":"Icon size","type":"number"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Icon crop","type":"string"},{"id":"imageShadow","title":"Icon shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"photoOverlayImage","title":"Photo","type":"url","ui":"image"},{"id":"photoOverlayDescription","title":"Photo description","type":"string","ui":"multiline"}],"id":"default","title":"Photo overlay"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"}}},"title":"Photo overlay","type":"primitive","visualizer":"cesium"},{"description":"A ball-like marker.","id":"ellipsoid","schema":{"groups":[{"fields":[{"id":"position","title":"Position","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"defaultValue":1000,"id":"radius","min":0,"suffix":"m","title":"Radius","type":"number"},{"id":"fillColor","title":"Fill","type":"string","ui":"color"}],"id":"default","title":"Ellipsoid"}],"linkable":{"latlng":{"fieldId":"position","schemaGroupId":"default"}}},"title":"Sphere","type":"primitive","visualizer":"cesium"},{"description":"Import your own primitives to be used instead of Re:Earth's built in ones.","id":"resource","schema":{"groups":[{"fields":[{"id":"url","title":"File URL","type":"url","ui":"file"},{"choices":[{"key":"auto","label":"Auto"},{"key":"kml","label":"KML"},{"key":"geojson","label":"GeoJSON / TopoJSON"},{"key":"czml","label":"CZML"}],"defaultValue":"auto","id":"type","title":"File format","type":"string"}],"id":"default","title":"File"}]},"title":"File","type":"primitive","visualizer":"cesium"},{"description":"Text block","id":"textblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"text","title":"Content","type":"string","ui":"multiline"},{"id":"markdown","title":"Use markdown","type":"bool"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Text block"}]},"title":"Text","type":"block","visualizer":"cesium"},{"description":"Image block","id":"imageblock","schema":{"groups":[{"fields":[{"id":"image","title":"Image","type":"url","ui":"image"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"},{"choices":[{"key":"cover","label":"Cover"},{"key":"contain","label":"Contain"}],"defaultValue":"cover","id":"imageSize","title":"Image size","type":"string"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imagePositionX","title":"Image horizontal position","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imagePositionY","title":"Image vertical position","type":"string"}],"id":"default","title":"Image block"}],"linkable":{"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Image","type":"block","visualizer":"cesium"},{"description":"Video block","id":"videoblock","schema":{"groups":[{"fields":[{"id":"url","title":"Video URL","type":"url","ui":"video"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Video block"}]},"title":"Video","type":"block","visualizer":"cesium"},{"description":"Location block","id":"locationblock","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Location block"}]},"title":"Location","type":"block","visualizer":"cesium"},{"description":"Table block","id":"dlblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Table block"},{"fields":[{"id":"item_title","title":"Title","type":"string"},{"choices":[{"key":"string","label":"String"},{"key":"number","label":"Number"}],"defaultValue":"string","id":"item_datatype","title":"Type","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"string"},"id":"item_datastr","title":"Data","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"number"},"id":"item_datanum","title":"Data","type":"number"}],"id":"items","list":true,"title":"Items"}]},"title":"Table","type":"block","visualizer":"cesium"},{"description":"Menu widgets","id":"menu","schema":{"groups":[{"fields":[{"id":"buttonInvisible","title":"Hide","type":"bool"},{"id":"buttonTitle","title":"Title","type":"string"},{"choices":[{"key":"topleft","label":"Top-Left"},{"key":"topright","label":"Top-Right"},{"key":"bottomleft","label":"Bottom-Left"},{"key":"bottomright","label":"Bottom-Right"}],"defaultValue":"topleft","id":"buttonPosition","title":"Position","type":"string"},{"choices":[{"key":"text","label":"Text only"},{"key":"icon","label":"Icon only"},{"key":"texticon","label":"Text and icon"}],"defaultValue":"text","id":"buttonStyle","title":"Style","type":"string"},{"id":"buttonIcon","title":"Icon","type":"url","ui":"image"},{"id":"buttonColor","title":"Text color","type":"string","ui":"color"},{"id":"buttonBgcolor","title":"Background color","type":"string","ui":"color"},{"choices":[{"key":"link","label":"Link"},{"key":"menu","label":"Menu"},{"key":"camera","label":"Camera flight"}],"defaultValue":"link","id":"buttonType","title":"Type","type":"string"},{"availableIf":{"field":"buttonType","type":"string","value":"link"},"id":"buttonLink","title":"Link","type":"url"},{"availableIf":{"field":"buttonType","type":"string","value":"camera"},"id":"buttonCamera","title":"Camera flight","type":"camera"}],"id":"buttons","list":true,"title":"Buttons"},{"fields":[{"id":"menuTitle","title":"Title","type":"string"},{"id":"menuIcon","title":"Icon","type":"url"},{"choices":[{"key":"link","label":"Link"},{"key":"camera","label":"Camera"},{"key":"border","label":"Break"}],"defaultValue":"link","id":"menuType","title":"Type","type":"string"},{"availableIf":{"field":"menuType","type":"string","value":"link"},"id":"menuLink","title":"Link","type":"url"},{"availableIf":{"field":"menuType","type":"string","value":"camera"},"id":"menuCamera","title":"Camera","type":"camera"}],"id":"menu","list":true,"title":"Menu"}]},"title":"Menu","type":"widget","visualizer":"cesium"},{"description":"A unique start screen that will display on load of your archive(ex. display the archive's title).","id":"splashscreen","schema":{"groups":[{"fields":[{"id":"overlayEnabled","title":"Enabled","type":"bool"},{"id":"overlayDelay","min":0,"suffix":"s","title":"Delay","type":"number"},{"id":"overlayDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"overlayTransitionDuration","min":0,"suffix":"s","title":"Fade out","type":"number"},{"id":"overlayImage","title":"Image","type":"url","ui":"image"},{"id":"overlayImageW","suffix":"px","title":"Image width","type":"number"},{"id":"overlayImageH","suffix":"px","title":"Image height","type":"number"},{"id":"overlayBgcolor","title":"Background color","type":"string","ui":"color"}],"id":"overlay","title":"Overlay screen"},{"fields":[{"id":"cameraPosition","title":"Camera position","type":"camera"},{"id":"cameraDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"cameraDelay","min":0,"suffix":"s","title":"Delay","type":"number"}],"id":"camera","list":true,"title":"Camera flight sequence"}]},"title":"Splash screen","type":"widget","visualizer":"cesium"},{"description":"SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily.","id":"storytelling","schema":{"groups":[{"fields":[{"defaultValue":3,"id":"duration","suffix":"s","title":"Duration","type":"number"},{"defaultValue":50000,"id":"range","suffix":"m","title":"Range","type":"number"},{"id":"camera","title":"Camera pose","type":"camera","ui":"camera_pose"},{"id":"autoStart","title":"Auto start","type":"bool"}],"id":"default","title":"Storytelling"},{"fields":[{"id":"layer","title":"Layer","type":"ref","ui":"layer"},{"id":"layerDuration","suffix":"s","title":"Duration","type":"number"},{"id":"layerRange","suffix":"m","title":"Range","type":"number"},{"id":"layerCamera","title":"Camera position","type":"camera"}],"id":"stories","list":true,"representativeField":"layer","title":"Stories"}]},"title":"Storytelling","type":"widget","visualizer":"cesium"}],"id":"reearth","system":true,"title":"Re:Earth Official Plugin"}` From a2059e933c18c4ce7950f8855ee7b4622809ef64 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 14 Jul 2021 02:36:23 +0900 Subject: [PATCH 049/253] fix: change default value of marker label position --- pkg/builtin/manifest.yml | 2 +- pkg/builtin/manifest_gen.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index e6ebda96d..c306066ae 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -346,7 +346,7 @@ extensions: - id: labelPosition type: string title: Label position - defaultValue: left + defaultValue: right choices: - key: left label: Left diff --git a/pkg/builtin/manifest_gen.go b/pkg/builtin/manifest_gen.go index e34ff00c1..3b61de220 100644 --- a/pkg/builtin/manifest_gen.go +++ b/pkg/builtin/manifest_gen.go @@ -2,4 +2,4 @@ package builtin -const pluginManifestJSON string = `{"author":"Re:Earth","description":"Official Plugin","extensions":[{"description":"Select here to find scene settings in the right panel. This includes setting map tiles, atmospheric conditions, real lighting, and more.","id":"cesium","schema":{"groups":[{"fields":[{"description":"The starting position of your project.","id":"camera","title":"Initial camera position","type":"camera"},{"description":"Show elevation when close to the surface.","id":"terrain","title":"Terrain","type":"bool"},{"defaultValue":true,"description":"Show the stars.","id":"skybox","title":"Sky","type":"bool"},{"description":"With Sky disabled, choose a background color.","id":"bgcolor","title":"Background color","type":"string","ui":"color"},{"description":"Cesium Ion account users may use their personal API keys to be able to use their Cesium Ion assets(tile data, 3D data, etc) with their project.","id":"ion","title":"Cesium Ion API access token","type":"string"}],"id":"default","title":"Scene"},{"description":"You may change the look of the Earth by obtaining map tile data and setting it here.","fields":[{"choices":[{"key":"default","label":"Default"},{"key":"default_label","label":"Labelled"},{"key":"default_road","label":"Road Map"},{"key":"stamen_watercolor","label":"Stamen Watercolor"},{"key":"stamen_toner","label":"Stamen Toner"},{"key":"open_street_map","label":"Open Street Map"},{"key":"esri_world_topo","label":"ESRI Topography"},{"key":"black_marble","label":"Earth at night"},{"key":"japan_gsi_standard","label":"Japan GSI Standard Map"},{"key":"url","label":"URL"}],"defaultValue":"default","id":"tile_type","title":"Tile Type","type":"string"},{"availableIf":{"field":"tile_type","type":"string","value":"url"},"id":"tile_url","title":"Tile map URL","type":"string"},{"id":"tile_minLevel","max":30,"min":0,"title":"Minimum zoom level","type":"number"},{"id":"tile_maxLevel","max":30,"min":0,"title":"Maximum zoom level","type":"number"}],"id":"tiles","list":true,"representativeField":"tile_type","title":"Tiles"},{"description":"Set the look and feel of the Earth.","fields":[{"defaultValue":true,"description":"Display the Sun.","id":"enable_sun","title":"Sun","type":"bool"},{"defaultValue":false,"description":"Display natural lighting from the sun.","id":"enable_lighting","title":"Lighting","type":"bool"},{"defaultValue":true,"description":"Display a lower atmospheric layer.","id":"ground_atmosphere","title":"Ground atmosphere","type":"bool"},{"defaultValue":true,"description":"Display an upper atmospheric layer.","id":"sky_atmosphere","title":"Sky atmosphere","type":"bool"},{"defaultValue":true,"description":"Display customizable fog.","id":"fog","title":"Fog","type":"bool"},{"defaultValue":0.0002,"description":"Set a thickness to the fog. Min: 0 Max: 1","id":"fog_density","max":1,"min":0,"title":"Fog density","type":"number"},{"defaultValue":0.03,"description":"Set brightness of the fog. Min: -1 Max: 1","id":"brightness_shift","max":1,"min":-1,"title":"Fog Brightness","type":"number"},{"description":"Set hue of the fog. Min: -1 Max: 1","id":"hue_shift","max":1,"min":-1,"title":"Fog Hue","type":"number"},{"description":"Set saturation of the fog. Min: -1 Max: 1","id":"surturation_shift","max":1,"min":-1,"title":"Fog Saturation","type":"number"}],"id":"atmosphere","title":"Atmospheric Conditions"},{"description":"Set your Google Analytics tracking ID and analyze how your published project is being viewed.","fields":[{"defaultValue":false,"description":"Enable Google Analytics","id":"enableGA","title":"Enable","type":"bool"},{"description":"Paste your Google Analytics tracking ID here. This will be embedded in your published project.","id":"trackingId","title":"Tracking ID","type":"string"}],"id":"googleAnalytics","title":"Google Analytics"}]},"title":"Cesium","type":"visualizer","visualizer":"cesium"},{"description":"Create an information area that appears when a layer is highlighted. Text, pictures, video, etc can be added to an infobox.","id":"infobox","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"choices":[{"key":"small","label":"Small"},{"key":"large","label":"Large"}],"defaultValue":"small","id":"size","title":"Size Type","type":"string"},{"id":"bgcolor","title":"Background Color","type":"string","ui":"color"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Basic"}]},"title":"Infobox","type":"infobox","visualizer":"cesium"},{"description":"A standard map marker.","id":"marker","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"choices":[{"key":"point","label":"Point"},{"key":"image","label":"Icon"}],"defaultValue":"image","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"point"},"id":"pointColor","title":"Point color","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"point"},"defaultValue":10,"id":"pointSize","min":0,"suffix":"px","title":"Point size","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"},{"availableIf":{"field":"style","type":"string","value":"image"},"defaultValue":1,"id":"imageSize","min":0,"title":"Image scale","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Image crop","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"imageShadow","title":"Image shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"label","title":"Label","type":"bool"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelText","title":"Label text","type":"string"},{"availableIf":{"field":"label","type":"bool","value":true},"choices":[{"key":"left","label":"Left"},{"key":"right","label":"Right"},{"key":"top","label":"Top"},{"key":"bottom","label":"Bottom"}],"defaultValue":"left","id":"labelPosition","title":"Label position","type":"string"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelTypography","title":"Label font","type":"typography"},{"id":"extrude","title":"Extruded","type":"bool"}],"id":"default","title":"Marker"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"},"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Marker","type":"primitive","visualizer":"cesium"},{"description":"Polyline primitive","id":"polyline","schema":{"groups":[{"fields":[{"id":"coordinates","title":"Coordinates","type":"coordinates"},{"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polyline"}]},"title":"Polyline","type":"primitive","visualizer":"cesium"},{"description":"Polygon primitive","id":"polygon","schema":{"groups":[{"fields":[{"id":"polygon","title":"Polygon","type":"polygon"},{"defaultValue":true,"id":"fill","title":"Fill","type":"bool"},{"availableIf":{"field":"fill","type":"bool","value":true},"id":"fillColor","title":"Fill color","type":"string","ui":"color"},{"id":"stroke","title":"Stroke","type":"bool"},{"availableIf":{"field":"stroke","type":"bool","value":true},"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"availableIf":{"field":"stroke","type":"bool","value":true},"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polygon"}]},"title":"Polygon","type":"primitive","visualizer":"cesium"},{"description":"Rectangle primitive","id":"rect","schema":{"groups":[{"fields":[{"id":"rect","title":"Rect","type":"rect"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"id":"extrudedHeight","min":0,"title":"Extruded height","type":"number"},{"choices":[{"key":"color","label":"Color"},{"key":"image","label":"Image"}],"defaultValue":"color","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"color"},"id":"fillColor","title":"Fill","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"}],"id":"default","title":"Rectangle"}]},"title":"Rectangle","type":"primitive","visualizer":"cesium"},{"description":"An Icon marker that allows you to set a photo that will appear after reaching its location.","id":"photooverlay","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"height","title":"Height","type":"number"},{"description":"Set the camera position for the overlay.","id":"camera","title":"Camera","type":"camera"},{"id":"image","title":"Icon","type":"url","ui":"image"},{"defaultValue":1,"id":"imageSize","prefix":"x","title":"Icon size","type":"number"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Icon crop","type":"string"},{"id":"imageShadow","title":"Icon shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"photoOverlayImage","title":"Photo","type":"url","ui":"image"},{"id":"photoOverlayDescription","title":"Photo description","type":"string","ui":"multiline"}],"id":"default","title":"Photo overlay"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"}}},"title":"Photo overlay","type":"primitive","visualizer":"cesium"},{"description":"A ball-like marker.","id":"ellipsoid","schema":{"groups":[{"fields":[{"id":"position","title":"Position","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"defaultValue":1000,"id":"radius","min":0,"suffix":"m","title":"Radius","type":"number"},{"id":"fillColor","title":"Fill","type":"string","ui":"color"}],"id":"default","title":"Ellipsoid"}],"linkable":{"latlng":{"fieldId":"position","schemaGroupId":"default"}}},"title":"Sphere","type":"primitive","visualizer":"cesium"},{"description":"Import your own primitives to be used instead of Re:Earth's built in ones.","id":"resource","schema":{"groups":[{"fields":[{"id":"url","title":"File URL","type":"url","ui":"file"},{"choices":[{"key":"auto","label":"Auto"},{"key":"kml","label":"KML"},{"key":"geojson","label":"GeoJSON / TopoJSON"},{"key":"czml","label":"CZML"}],"defaultValue":"auto","id":"type","title":"File format","type":"string"}],"id":"default","title":"File"}]},"title":"File","type":"primitive","visualizer":"cesium"},{"description":"Text block","id":"textblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"text","title":"Content","type":"string","ui":"multiline"},{"id":"markdown","title":"Use markdown","type":"bool"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Text block"}]},"title":"Text","type":"block","visualizer":"cesium"},{"description":"Image block","id":"imageblock","schema":{"groups":[{"fields":[{"id":"image","title":"Image","type":"url","ui":"image"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"},{"choices":[{"key":"cover","label":"Cover"},{"key":"contain","label":"Contain"}],"defaultValue":"cover","id":"imageSize","title":"Image size","type":"string"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imagePositionX","title":"Image horizontal position","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imagePositionY","title":"Image vertical position","type":"string"}],"id":"default","title":"Image block"}],"linkable":{"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Image","type":"block","visualizer":"cesium"},{"description":"Video block","id":"videoblock","schema":{"groups":[{"fields":[{"id":"url","title":"Video URL","type":"url","ui":"video"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Video block"}]},"title":"Video","type":"block","visualizer":"cesium"},{"description":"Location block","id":"locationblock","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Location block"}]},"title":"Location","type":"block","visualizer":"cesium"},{"description":"Table block","id":"dlblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Table block"},{"fields":[{"id":"item_title","title":"Title","type":"string"},{"choices":[{"key":"string","label":"String"},{"key":"number","label":"Number"}],"defaultValue":"string","id":"item_datatype","title":"Type","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"string"},"id":"item_datastr","title":"Data","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"number"},"id":"item_datanum","title":"Data","type":"number"}],"id":"items","list":true,"title":"Items"}]},"title":"Table","type":"block","visualizer":"cesium"},{"description":"Menu widgets","id":"menu","schema":{"groups":[{"fields":[{"id":"buttonInvisible","title":"Hide","type":"bool"},{"id":"buttonTitle","title":"Title","type":"string"},{"choices":[{"key":"topleft","label":"Top-Left"},{"key":"topright","label":"Top-Right"},{"key":"bottomleft","label":"Bottom-Left"},{"key":"bottomright","label":"Bottom-Right"}],"defaultValue":"topleft","id":"buttonPosition","title":"Position","type":"string"},{"choices":[{"key":"text","label":"Text only"},{"key":"icon","label":"Icon only"},{"key":"texticon","label":"Text and icon"}],"defaultValue":"text","id":"buttonStyle","title":"Style","type":"string"},{"id":"buttonIcon","title":"Icon","type":"url","ui":"image"},{"id":"buttonColor","title":"Text color","type":"string","ui":"color"},{"id":"buttonBgcolor","title":"Background color","type":"string","ui":"color"},{"choices":[{"key":"link","label":"Link"},{"key":"menu","label":"Menu"},{"key":"camera","label":"Camera flight"}],"defaultValue":"link","id":"buttonType","title":"Type","type":"string"},{"availableIf":{"field":"buttonType","type":"string","value":"link"},"id":"buttonLink","title":"Link","type":"url"},{"availableIf":{"field":"buttonType","type":"string","value":"camera"},"id":"buttonCamera","title":"Camera flight","type":"camera"}],"id":"buttons","list":true,"title":"Buttons"},{"fields":[{"id":"menuTitle","title":"Title","type":"string"},{"id":"menuIcon","title":"Icon","type":"url"},{"choices":[{"key":"link","label":"Link"},{"key":"camera","label":"Camera"},{"key":"border","label":"Break"}],"defaultValue":"link","id":"menuType","title":"Type","type":"string"},{"availableIf":{"field":"menuType","type":"string","value":"link"},"id":"menuLink","title":"Link","type":"url"},{"availableIf":{"field":"menuType","type":"string","value":"camera"},"id":"menuCamera","title":"Camera","type":"camera"}],"id":"menu","list":true,"title":"Menu"}]},"title":"Menu","type":"widget","visualizer":"cesium"},{"description":"A unique start screen that will display on load of your archive(ex. display the archive's title).","id":"splashscreen","schema":{"groups":[{"fields":[{"id":"overlayEnabled","title":"Enabled","type":"bool"},{"id":"overlayDelay","min":0,"suffix":"s","title":"Delay","type":"number"},{"id":"overlayDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"overlayTransitionDuration","min":0,"suffix":"s","title":"Fade out","type":"number"},{"id":"overlayImage","title":"Image","type":"url","ui":"image"},{"id":"overlayImageW","suffix":"px","title":"Image width","type":"number"},{"id":"overlayImageH","suffix":"px","title":"Image height","type":"number"},{"id":"overlayBgcolor","title":"Background color","type":"string","ui":"color"}],"id":"overlay","title":"Overlay screen"},{"fields":[{"id":"cameraPosition","title":"Camera position","type":"camera"},{"id":"cameraDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"cameraDelay","min":0,"suffix":"s","title":"Delay","type":"number"}],"id":"camera","list":true,"title":"Camera flight sequence"}]},"title":"Splash screen","type":"widget","visualizer":"cesium"},{"description":"SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily.","id":"storytelling","schema":{"groups":[{"fields":[{"defaultValue":3,"id":"duration","suffix":"s","title":"Duration","type":"number"},{"defaultValue":50000,"id":"range","suffix":"m","title":"Range","type":"number"},{"id":"camera","title":"Camera pose","type":"camera","ui":"camera_pose"},{"id":"autoStart","title":"Auto start","type":"bool"}],"id":"default","title":"Storytelling"},{"fields":[{"id":"layer","title":"Layer","type":"ref","ui":"layer"},{"id":"layerDuration","suffix":"s","title":"Duration","type":"number"},{"id":"layerRange","suffix":"m","title":"Range","type":"number"},{"id":"layerCamera","title":"Camera position","type":"camera"}],"id":"stories","list":true,"representativeField":"layer","title":"Stories"}]},"title":"Storytelling","type":"widget","visualizer":"cesium"}],"id":"reearth","system":true,"title":"Re:Earth Official Plugin"}` +const pluginManifestJSON string = `{"author":"Re:Earth","description":"Official Plugin","extensions":[{"description":"Select here to find scene settings in the right panel. This includes setting map tiles, atmospheric conditions, real lighting, and more.","id":"cesium","schema":{"groups":[{"fields":[{"description":"The starting position of your project.","id":"camera","title":"Initial camera position","type":"camera"},{"description":"Show elevation when close to the surface.","id":"terrain","title":"Terrain","type":"bool"},{"defaultValue":true,"description":"Show the stars.","id":"skybox","title":"Sky","type":"bool"},{"description":"With Sky disabled, choose a background color.","id":"bgcolor","title":"Background color","type":"string","ui":"color"},{"description":"Cesium Ion account users may use their personal API keys to be able to use their Cesium Ion assets(tile data, 3D data, etc) with their project.","id":"ion","title":"Cesium Ion API access token","type":"string"}],"id":"default","title":"Scene"},{"description":"You may change the look of the Earth by obtaining map tile data and setting it here.","fields":[{"choices":[{"key":"default","label":"Default"},{"key":"default_label","label":"Labelled"},{"key":"default_road","label":"Road Map"},{"key":"stamen_watercolor","label":"Stamen Watercolor"},{"key":"stamen_toner","label":"Stamen Toner"},{"key":"open_street_map","label":"Open Street Map"},{"key":"esri_world_topo","label":"ESRI Topography"},{"key":"black_marble","label":"Earth at night"},{"key":"japan_gsi_standard","label":"Japan GSI Standard Map"},{"key":"url","label":"URL"}],"defaultValue":"default","id":"tile_type","title":"Tile Type","type":"string"},{"availableIf":{"field":"tile_type","type":"string","value":"url"},"id":"tile_url","title":"Tile map URL","type":"string"},{"id":"tile_minLevel","max":30,"min":0,"title":"Minimum zoom level","type":"number"},{"id":"tile_maxLevel","max":30,"min":0,"title":"Maximum zoom level","type":"number"}],"id":"tiles","list":true,"representativeField":"tile_type","title":"Tiles"},{"description":"Set the look and feel of the Earth.","fields":[{"defaultValue":true,"description":"Display the Sun.","id":"enable_sun","title":"Sun","type":"bool"},{"defaultValue":false,"description":"Display natural lighting from the sun.","id":"enable_lighting","title":"Lighting","type":"bool"},{"defaultValue":true,"description":"Display a lower atmospheric layer.","id":"ground_atmosphere","title":"Ground atmosphere","type":"bool"},{"defaultValue":true,"description":"Display an upper atmospheric layer.","id":"sky_atmosphere","title":"Sky atmosphere","type":"bool"},{"defaultValue":true,"description":"Display customizable fog.","id":"fog","title":"Fog","type":"bool"},{"defaultValue":0.0002,"description":"Set a thickness to the fog. Min: 0 Max: 1","id":"fog_density","max":1,"min":0,"title":"Fog density","type":"number"},{"defaultValue":0.03,"description":"Set brightness of the fog. Min: -1 Max: 1","id":"brightness_shift","max":1,"min":-1,"title":"Fog Brightness","type":"number"},{"description":"Set hue of the fog. Min: -1 Max: 1","id":"hue_shift","max":1,"min":-1,"title":"Fog Hue","type":"number"},{"description":"Set saturation of the fog. Min: -1 Max: 1","id":"surturation_shift","max":1,"min":-1,"title":"Fog Saturation","type":"number"}],"id":"atmosphere","title":"Atmospheric Conditions"},{"description":"Set your Google Analytics tracking ID and analyze how your published project is being viewed.","fields":[{"defaultValue":false,"description":"Enable Google Analytics","id":"enableGA","title":"Enable","type":"bool"},{"description":"Paste your Google Analytics tracking ID here. This will be embedded in your published project.","id":"trackingId","title":"Tracking ID","type":"string"}],"id":"googleAnalytics","title":"Google Analytics"}]},"title":"Cesium","type":"visualizer","visualizer":"cesium"},{"description":"Create an information area that appears when a layer is highlighted. Text, pictures, video, etc can be added to an infobox.","id":"infobox","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"choices":[{"key":"small","label":"Small"},{"key":"large","label":"Large"}],"defaultValue":"small","id":"size","title":"Size Type","type":"string"},{"id":"bgcolor","title":"Background Color","type":"string","ui":"color"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Basic"}]},"title":"Infobox","type":"infobox","visualizer":"cesium"},{"description":"A standard map marker.","id":"marker","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"choices":[{"key":"point","label":"Point"},{"key":"image","label":"Icon"}],"defaultValue":"image","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"point"},"id":"pointColor","title":"Point color","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"point"},"defaultValue":10,"id":"pointSize","min":0,"suffix":"px","title":"Point size","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"},{"availableIf":{"field":"style","type":"string","value":"image"},"defaultValue":1,"id":"imageSize","min":0,"title":"Image scale","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Image crop","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"imageShadow","title":"Image shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"label","title":"Label","type":"bool"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelText","title":"Label text","type":"string"},{"availableIf":{"field":"label","type":"bool","value":true},"choices":[{"key":"left","label":"Left"},{"key":"right","label":"Right"},{"key":"top","label":"Top"},{"key":"bottom","label":"Bottom"}],"defaultValue":"right","id":"labelPosition","title":"Label position","type":"string"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelTypography","title":"Label font","type":"typography"},{"id":"extrude","title":"Extruded","type":"bool"}],"id":"default","title":"Marker"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"},"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Marker","type":"primitive","visualizer":"cesium"},{"description":"Polyline primitive","id":"polyline","schema":{"groups":[{"fields":[{"id":"coordinates","title":"Coordinates","type":"coordinates"},{"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polyline"}]},"title":"Polyline","type":"primitive","visualizer":"cesium"},{"description":"Polygon primitive","id":"polygon","schema":{"groups":[{"fields":[{"id":"polygon","title":"Polygon","type":"polygon"},{"defaultValue":true,"id":"fill","title":"Fill","type":"bool"},{"availableIf":{"field":"fill","type":"bool","value":true},"id":"fillColor","title":"Fill color","type":"string","ui":"color"},{"id":"stroke","title":"Stroke","type":"bool"},{"availableIf":{"field":"stroke","type":"bool","value":true},"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"availableIf":{"field":"stroke","type":"bool","value":true},"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polygon"}]},"title":"Polygon","type":"primitive","visualizer":"cesium"},{"description":"Rectangle primitive","id":"rect","schema":{"groups":[{"fields":[{"id":"rect","title":"Rect","type":"rect"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"id":"extrudedHeight","min":0,"title":"Extruded height","type":"number"},{"choices":[{"key":"color","label":"Color"},{"key":"image","label":"Image"}],"defaultValue":"color","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"color"},"id":"fillColor","title":"Fill","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"}],"id":"default","title":"Rectangle"}]},"title":"Rectangle","type":"primitive","visualizer":"cesium"},{"description":"An Icon marker that allows you to set a photo that will appear after reaching its location.","id":"photooverlay","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"height","title":"Height","type":"number"},{"description":"Set the camera position for the overlay.","id":"camera","title":"Camera","type":"camera"},{"id":"image","title":"Icon","type":"url","ui":"image"},{"defaultValue":1,"id":"imageSize","prefix":"x","title":"Icon size","type":"number"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Icon crop","type":"string"},{"id":"imageShadow","title":"Icon shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"photoOverlayImage","title":"Photo","type":"url","ui":"image"},{"id":"photoOverlayDescription","title":"Photo description","type":"string","ui":"multiline"}],"id":"default","title":"Photo overlay"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"}}},"title":"Photo overlay","type":"primitive","visualizer":"cesium"},{"description":"A ball-like marker.","id":"ellipsoid","schema":{"groups":[{"fields":[{"id":"position","title":"Position","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"defaultValue":1000,"id":"radius","min":0,"suffix":"m","title":"Radius","type":"number"},{"id":"fillColor","title":"Fill","type":"string","ui":"color"}],"id":"default","title":"Ellipsoid"}],"linkable":{"latlng":{"fieldId":"position","schemaGroupId":"default"}}},"title":"Sphere","type":"primitive","visualizer":"cesium"},{"description":"Import your own primitives to be used instead of Re:Earth's built in ones.","id":"resource","schema":{"groups":[{"fields":[{"id":"url","title":"File URL","type":"url","ui":"file"},{"choices":[{"key":"auto","label":"Auto"},{"key":"kml","label":"KML"},{"key":"geojson","label":"GeoJSON / TopoJSON"},{"key":"czml","label":"CZML"}],"defaultValue":"auto","id":"type","title":"File format","type":"string"}],"id":"default","title":"File"}]},"title":"File","type":"primitive","visualizer":"cesium"},{"description":"Text block","id":"textblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"text","title":"Content","type":"string","ui":"multiline"},{"id":"markdown","title":"Use markdown","type":"bool"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Text block"}]},"title":"Text","type":"block","visualizer":"cesium"},{"description":"Image block","id":"imageblock","schema":{"groups":[{"fields":[{"id":"image","title":"Image","type":"url","ui":"image"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"},{"choices":[{"key":"cover","label":"Cover"},{"key":"contain","label":"Contain"}],"defaultValue":"cover","id":"imageSize","title":"Image size","type":"string"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imagePositionX","title":"Image horizontal position","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imagePositionY","title":"Image vertical position","type":"string"}],"id":"default","title":"Image block"}],"linkable":{"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Image","type":"block","visualizer":"cesium"},{"description":"Video block","id":"videoblock","schema":{"groups":[{"fields":[{"id":"url","title":"Video URL","type":"url","ui":"video"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Video block"}]},"title":"Video","type":"block","visualizer":"cesium"},{"description":"Location block","id":"locationblock","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Location block"}]},"title":"Location","type":"block","visualizer":"cesium"},{"description":"Table block","id":"dlblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Table block"},{"fields":[{"id":"item_title","title":"Title","type":"string"},{"choices":[{"key":"string","label":"String"},{"key":"number","label":"Number"}],"defaultValue":"string","id":"item_datatype","title":"Type","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"string"},"id":"item_datastr","title":"Data","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"number"},"id":"item_datanum","title":"Data","type":"number"}],"id":"items","list":true,"title":"Items"}]},"title":"Table","type":"block","visualizer":"cesium"},{"description":"Menu widgets","id":"menu","schema":{"groups":[{"fields":[{"id":"buttonInvisible","title":"Hide","type":"bool"},{"id":"buttonTitle","title":"Title","type":"string"},{"choices":[{"key":"topleft","label":"Top-Left"},{"key":"topright","label":"Top-Right"},{"key":"bottomleft","label":"Bottom-Left"},{"key":"bottomright","label":"Bottom-Right"}],"defaultValue":"topleft","id":"buttonPosition","title":"Position","type":"string"},{"choices":[{"key":"text","label":"Text only"},{"key":"icon","label":"Icon only"},{"key":"texticon","label":"Text and icon"}],"defaultValue":"text","id":"buttonStyle","title":"Style","type":"string"},{"id":"buttonIcon","title":"Icon","type":"url","ui":"image"},{"id":"buttonColor","title":"Text color","type":"string","ui":"color"},{"id":"buttonBgcolor","title":"Background color","type":"string","ui":"color"},{"choices":[{"key":"link","label":"Link"},{"key":"menu","label":"Menu"},{"key":"camera","label":"Camera flight"}],"defaultValue":"link","id":"buttonType","title":"Type","type":"string"},{"availableIf":{"field":"buttonType","type":"string","value":"link"},"id":"buttonLink","title":"Link","type":"url"},{"availableIf":{"field":"buttonType","type":"string","value":"camera"},"id":"buttonCamera","title":"Camera flight","type":"camera"}],"id":"buttons","list":true,"title":"Buttons"},{"fields":[{"id":"menuTitle","title":"Title","type":"string"},{"id":"menuIcon","title":"Icon","type":"url"},{"choices":[{"key":"link","label":"Link"},{"key":"camera","label":"Camera"},{"key":"border","label":"Break"}],"defaultValue":"link","id":"menuType","title":"Type","type":"string"},{"availableIf":{"field":"menuType","type":"string","value":"link"},"id":"menuLink","title":"Link","type":"url"},{"availableIf":{"field":"menuType","type":"string","value":"camera"},"id":"menuCamera","title":"Camera","type":"camera"}],"id":"menu","list":true,"title":"Menu"}]},"title":"Menu","type":"widget","visualizer":"cesium"},{"description":"A unique start screen that will display on load of your archive(ex. display the archive's title).","id":"splashscreen","schema":{"groups":[{"fields":[{"id":"overlayEnabled","title":"Enabled","type":"bool"},{"id":"overlayDelay","min":0,"suffix":"s","title":"Delay","type":"number"},{"id":"overlayDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"overlayTransitionDuration","min":0,"suffix":"s","title":"Fade out","type":"number"},{"id":"overlayImage","title":"Image","type":"url","ui":"image"},{"id":"overlayImageW","suffix":"px","title":"Image width","type":"number"},{"id":"overlayImageH","suffix":"px","title":"Image height","type":"number"},{"id":"overlayBgcolor","title":"Background color","type":"string","ui":"color"}],"id":"overlay","title":"Overlay screen"},{"fields":[{"id":"cameraPosition","title":"Camera position","type":"camera"},{"id":"cameraDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"cameraDelay","min":0,"suffix":"s","title":"Delay","type":"number"}],"id":"camera","list":true,"title":"Camera flight sequence"}]},"title":"Splash screen","type":"widget","visualizer":"cesium"},{"description":"SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily.","id":"storytelling","schema":{"groups":[{"fields":[{"defaultValue":3,"id":"duration","suffix":"s","title":"Duration","type":"number"},{"defaultValue":50000,"id":"range","suffix":"m","title":"Range","type":"number"},{"id":"camera","title":"Camera pose","type":"camera","ui":"camera_pose"},{"id":"autoStart","title":"Auto start","type":"bool"}],"id":"default","title":"Storytelling"},{"fields":[{"id":"layer","title":"Layer","type":"ref","ui":"layer"},{"id":"layerDuration","suffix":"s","title":"Duration","type":"number"},{"id":"layerRange","suffix":"m","title":"Range","type":"number"},{"id":"layerCamera","title":"Camera position","type":"camera"}],"id":"stories","list":true,"representativeField":"layer","title":"Stories"}]},"title":"Storytelling","type":"widget","visualizer":"cesium"}],"id":"reearth","system":true,"title":"Re:Earth Official Plugin"}` From 077558c226186a638ab6ff9375ea144131d442bf Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Wed, 14 Jul 2021 04:29:56 +0300 Subject: [PATCH 050/253] fix: import dataset from google sheet bug (#23) Co-authored-by: Ya Ka Co-authored-by: HideBa <49897538+HideBa@users.noreply.github.com> --- pkg/dataset/csvparser.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg/dataset/csvparser.go b/pkg/dataset/csvparser.go index 2edb43381..47348bf45 100644 --- a/pkg/dataset/csvparser.go +++ b/pkg/dataset/csvparser.go @@ -5,6 +5,7 @@ import ( "errors" "io" "strconv" + "strings" "github.com/reearth/reearth-backend/pkg/id" ) @@ -87,7 +88,7 @@ func (p *DatasetCSVParser) GuessSchema(sid id.SceneID) error { if h == "lng" { haslng = true } - if h != "lng" && h != "lat" { + if h != "lng" && h != "lat" && strings.TrimSpace(h) != "" { t := p.getRecord(p.firstline[k]).Type() field, _ := NewSchemaField().NewID().Name(h).Type(t).Build() schemafields = append(schemafields, field) From 84695748a6d870f1227f10889d99a62be8763d16 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 14 Jul 2021 19:03:23 +0900 Subject: [PATCH 051/253] fix: public api param --- internal/app/public.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/app/public.go b/internal/app/public.go index 3f020c9b0..61840dbba 100644 --- a/internal/app/public.go +++ b/internal/app/public.go @@ -46,7 +46,7 @@ func publicAPI( }) r.GET("/published/:name", func(c echo.Context) error { - name := c.Param("string") + name := c.Param("name") if name == "" { return echo.ErrNotFound } @@ -60,7 +60,7 @@ func publicAPI( }) r.GET("/published_data/:name", func(c echo.Context) error { - name := c.Param("string") + name := c.Param("name") if name == "" { return echo.ErrNotFound } From ba7d1648d27fb4dc7309003b81f68dbf3d5bf945 Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Fri, 16 Jul 2021 10:58:19 +0300 Subject: [PATCH 052/253] fix: replace strings.Split() with strings.field() (#25) --- pkg/layer/decoding/kml.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/layer/decoding/kml.go b/pkg/layer/decoding/kml.go index 8ccb4218e..07f701d81 100644 --- a/pkg/layer/decoding/kml.go +++ b/pkg/layer/decoding/kml.go @@ -51,7 +51,7 @@ func coordinatesToLatLngHeight(c string) (*property.LatLng, float64, error) { func coordinatesToLatLngHeightList(c string) ([]property.LatLngHeight, error) { var LatLngHeighList []property.LatLngHeight - coords := strings.Split(c, "\n") + coords := strings.Fields(c) for _, llh := range coords { reg, err := regexp.Compile(`\s+`) if err != nil { From e82b5484c8203d9ab3a71d5ca458bf6be048ad10 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 16 Jul 2021 20:34:36 +0900 Subject: [PATCH 053/253] fix: project public image type --- .../adapter/graphql/controller_project.go | 2 +- internal/adapter/graphql/models_gen.go | 30 +++++----- internal/graphql/generated.go | 19 +----- internal/usecase/interactor/project.go | 60 +++---------------- internal/usecase/interfaces/project.go | 3 +- schema.graphql | 2 +- 6 files changed, 27 insertions(+), 89 deletions(-) diff --git a/internal/adapter/graphql/controller_project.go b/internal/adapter/graphql/controller_project.go index 1c2f66df6..90493e1b2 100644 --- a/internal/adapter/graphql/controller_project.go +++ b/internal/adapter/graphql/controller_project.go @@ -68,7 +68,7 @@ func (c *ProjectController) Update(ctx context.Context, ginput *UpdateProjectInp BasicAuthPassword: ginput.BasicAuthPassword, PublicTitle: ginput.PublicTitle, PublicDescription: ginput.PublicDescription, - PublicImage: fromFile(ginput.PublicImage), + PublicImage: ginput.PublicImage, PublicNoIndex: ginput.PublicNoIndex, DeletePublicImage: deletePublicImage, DeleteImageURL: deleteImageURL, diff --git a/internal/adapter/graphql/models_gen.go b/internal/adapter/graphql/models_gen.go index d57bd4f1e..22451d1d7 100644 --- a/internal/adapter/graphql/models_gen.go +++ b/internal/adapter/graphql/models_gen.go @@ -1043,21 +1043,21 @@ type UpdateMemberOfTeamPayload struct { } type UpdateProjectInput struct { - ProjectID id.ID `json:"projectId"` - Name *string `json:"name"` - Description *string `json:"description"` - Archived *bool `json:"archived"` - IsBasicAuthActive *bool `json:"isBasicAuthActive"` - BasicAuthUsername *string `json:"basicAuthUsername"` - BasicAuthPassword *string `json:"basicAuthPassword"` - Alias *string `json:"alias"` - ImageURL *url.URL `json:"imageUrl"` - PublicTitle *string `json:"publicTitle"` - PublicDescription *string `json:"publicDescription"` - PublicImage *graphql.Upload `json:"publicImage"` - PublicNoIndex *bool `json:"publicNoIndex"` - DeleteImageURL *bool `json:"deleteImageUrl"` - DeletePublicImage *bool `json:"deletePublicImage"` + ProjectID id.ID `json:"projectId"` + Name *string `json:"name"` + Description *string `json:"description"` + Archived *bool `json:"archived"` + IsBasicAuthActive *bool `json:"isBasicAuthActive"` + BasicAuthUsername *string `json:"basicAuthUsername"` + BasicAuthPassword *string `json:"basicAuthPassword"` + Alias *string `json:"alias"` + ImageURL *url.URL `json:"imageUrl"` + PublicTitle *string `json:"publicTitle"` + PublicDescription *string `json:"publicDescription"` + PublicImage *string `json:"publicImage"` + PublicNoIndex *bool `json:"publicNoIndex"` + DeleteImageURL *bool `json:"deleteImageUrl"` + DeletePublicImage *bool `json:"deletePublicImage"` } type UpdatePropertyItemInput struct { diff --git a/internal/graphql/generated.go b/internal/graphql/generated.go index 25458d3b9..ba724c447 100644 --- a/internal/graphql/generated.go +++ b/internal/graphql/generated.go @@ -6296,7 +6296,7 @@ input UpdateProjectInput { imageUrl: URL publicTitle: String publicDescription: String - publicImage: Upload + publicImage: String publicNoIndex: Boolean deleteImageUrl: Boolean deletePublicImage: Boolean @@ -30459,7 +30459,7 @@ func (ec *executionContext) unmarshalInputUpdateProjectInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("publicImage")) - it.PublicImage, err = ec.unmarshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + it.PublicImage, err = ec.unmarshalOString2แš–string(ctx, v) if err != nil { return it, err } @@ -40420,21 +40420,6 @@ func (ec *executionContext) marshalOUpgradePluginPayload2แš–githubแš—comแš‹reear return ec._UpgradePluginPayload(ctx, sel, v) } -func (ec *executionContext) unmarshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, v interface{}) (*graphql.Upload, error) { - if v == nil { - return nil, nil - } - res, err := graphql.UnmarshalUpload(v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, sel ast.SelectionSet, v *graphql.Upload) graphql.Marshaler { - if v == nil { - return graphql.Null - } - return graphql.MarshalUpload(*v) -} - func (ec *executionContext) marshalOUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UploadPluginPayload) graphql.Marshaler { if v == nil { return graphql.Null diff --git a/internal/usecase/interactor/project.go b/internal/usecase/interactor/project.go index 6b4c4a58c..7d10c1e10 100644 --- a/internal/usecase/interactor/project.go +++ b/internal/usecase/interactor/project.go @@ -10,9 +10,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/internal/usecase/repo" - "github.com/reearth/reearth-backend/pkg/asset" err1 "github.com/reearth/reearth-backend/pkg/error" - "github.com/reearth/reearth-backend/pkg/file" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/project" "github.com/reearth/reearth-backend/pkg/scene" @@ -156,11 +154,10 @@ func (i *Project) Update(ctx context.Context, p interfaces.UpdateProjectParam, o } } - if p.ImageURL != nil && !p.DeleteImageURL { - prj.SetImageURL(p.ImageURL) - } if p.DeleteImageURL { prj.SetImageURL(nil) + } else if p.ImageURL != nil { + prj.SetImageURL(p.ImageURL) } if p.Archived != nil { @@ -187,24 +184,17 @@ func (i *Project) Update(ctx context.Context, p interfaces.UpdateProjectParam, o prj.UpdatePublicDescription(*p.PublicDescription) } - if p.PublicImage != nil && !p.DeletePublicImage { - asset, err := i.createAsset(ctx, p.PublicImage, prj.Team()) - if err != nil { - return nil, err - } - prj.UpdatePublicImage(asset.URL()) + if p.DeletePublicImage { + prj.UpdatePublicImage("") + } else if p.PublicImage != nil { + prj.UpdatePublicImage(*p.PublicImage) } if p.PublicNoIndex != nil { prj.UpdatePublicNoIndex(*p.PublicNoIndex) } - if p.DeletePublicImage { - prj.UpdatePublicImage("") - } - - err = i.projectRepo.Save(ctx, prj) - if err != nil { + if err := i.projectRepo.Save(ctx, prj); err != nil { return nil, err } @@ -348,42 +338,6 @@ func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectP return prj, nil } -func (i *Project) createAsset(ctx context.Context, f *file.File, t id.TeamID) (_ *asset.Asset, err error) { - tx, err := i.transaction.Begin() - if err != nil { - return - } - defer func() { - if err2 := tx.End(ctx); err == nil && err2 != nil { - err = err2 - } - }() - - url, err := i.file.UploadAsset(ctx, f) - if err != nil { - return nil, err - } - - asset, err := asset.New(). - NewID(). - Team(t). - Name(f.Name). - Size(f.Size). - URL(url.String()). - Build() - if err != nil { - return nil, err - } - - err = i.assetRepo.Save(ctx, asset) - if err != nil { - return nil, err - } - - tx.Commit() - return asset, nil -} - func (i *Project) Delete(ctx context.Context, projectID id.ProjectID, operator *usecase.Operator) (err error) { tx, err := i.transaction.Begin() if err != nil { diff --git a/internal/usecase/interfaces/project.go b/internal/usecase/interfaces/project.go index 7caa37b95..91559973d 100644 --- a/internal/usecase/interfaces/project.go +++ b/internal/usecase/interfaces/project.go @@ -6,7 +6,6 @@ import ( "net/url" "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/file" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/project" "github.com/reearth/reearth-backend/pkg/visualizer" @@ -34,7 +33,7 @@ type UpdateProjectParam struct { ImageURL *url.URL PublicTitle *string PublicDescription *string - PublicImage *file.File + PublicImage *string PublicNoIndex *bool DeletePublicImage bool DeleteImageURL bool diff --git a/schema.graphql b/schema.graphql index c82d934fc..9f052898f 100644 --- a/schema.graphql +++ b/schema.graphql @@ -790,7 +790,7 @@ input UpdateProjectInput { imageUrl: URL publicTitle: String publicDescription: String - publicImage: Upload + publicImage: String publicNoIndex: Boolean deleteImageUrl: Boolean deletePublicImage: Boolean From a9d209126aae9a65aa0a34dd438e1bc9dd83feb9 Mon Sep 17 00:00:00 2001 From: HideBa <49897538+HideBa@users.noreply.github.com> Date: Mon, 26 Jul 2021 12:02:35 +0900 Subject: [PATCH 054/253] docs: refine readme (#28) * refine readme * fix typo --- README.md | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7241a49f8..049ba89a4 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,14 @@ # reearth-backend +This is the back-end repository of [Re:Earth](https://github.com/reearth/reearth). -(unfinished) +## Bug reporting and requesting features +Any requests for new features are very welcome! Just create [an issue under the Re:Earth repository](https://github.com/reearth/reearth/issues) and the core team will take it into consideration. +When you find a bug, please create a bug issue with the [bug report template](https://github.com/reearth/reearth/issues/new?assignees=&labels=&template=bug_report.md&title=). + + +## Discussion +[Here](https://github.com/reearth/reearth/discussions) we have a dedicated area for discussions on all topics. ## License -[Apache License 2.0](LICENSE) +Distributed under the Apache-2.0 License. See [Apache License 2.0](LICENSE) for more information. From 8ad1f80cdcef0c8e52eb949c867de0fc177f5a29 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 26 Jul 2021 14:19:47 +0900 Subject: [PATCH 055/253] fix: published API (#27) --- internal/app/public.go | 77 -------- internal/app/published.go | 102 +++++++++++ internal/app/published_test.go | 219 +++++++++++++++++++++++ internal/usecase/interactor/published.go | 2 +- 4 files changed, 322 insertions(+), 78 deletions(-) create mode 100644 internal/app/published.go create mode 100644 internal/app/published_test.go diff --git a/internal/app/public.go b/internal/app/public.go index 61840dbba..c536605a3 100644 --- a/internal/app/public.go +++ b/internal/app/public.go @@ -1,19 +1,13 @@ package app import ( - "context" - "crypto/subtle" "fmt" "net/http" - "net/url" - "os" "github.com/labstack/echo/v4" - "github.com/labstack/echo/v4/middleware" http1 "github.com/reearth/reearth-backend/internal/adapter/http" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interactor" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/internal/usecase/repo" ) @@ -73,74 +67,3 @@ func publicAPI( return c.Stream(http.StatusOK, "application/json", r) }) } - -func publishedRoute( - ec *echo.Echo, - r *echo.Group, - conf *Config, - repos *repo.Container, - gateways *gateway.Container, -) { - var i interfaces.Published - if conf.Published.IndexURL == nil || conf.Published.IndexURL.String() == "" { - html, err := os.ReadFile("web/published.html") - if err == nil { - i = interactor.NewPublished(repos.Project, gateways.File, string(html)) - } else { - i = interactor.NewPublished(repos.Project, gateways.File, "") - } - } else { - i = interactor.NewPublishedWithURL(repos.Project, gateways.File, conf.Published.IndexURL) - } - contr := http1.NewPublishedController(i) - - key := struct{}{} - auth := middleware.BasicAuthWithConfig(middleware.BasicAuthConfig{ - Validator: func(user string, password string, c echo.Context) (bool, error) { - md, ok := c.Request().Context().Value(key).(interfaces.ProjectPublishedMetadata) - if !ok { - return true, echo.ErrNotFound - } - return !md.IsBasicAuthActive || subtle.ConstantTimeCompare([]byte(user), []byte(md.BasicAuthUsername)) == 1 && subtle.ConstantTimeCompare([]byte(password), []byte(md.BasicAuthPassword)) == 1, nil - }, - Skipper: func(c echo.Context) bool { - name := c.Param("name") - if name == "" { - return true - } - - md, err := contr.Metadata(c.Request().Context(), name) - if err != nil { - return true - } - - c.SetRequest(c.Request().WithContext(context.WithValue(c.Request().Context(), key, md))) - return !md.IsBasicAuthActive - }, - }) - - r.GET("/:name/data.json", func(c echo.Context) error { - r, err := contr.Data(c.Request().Context(), c.Param("name")) - if err != nil { - return err - } - - return c.Stream(http.StatusOK, "application/json", r) - }, auth) - - r.GET("/:name/", func(c echo.Context) error { - index, err := contr.Index(c.Request().Context(), c.Param("name"), &url.URL{ - Scheme: "http", - Host: c.Request().Host, - Path: c.Request().URL.Path, - }) - if err != nil { - return err - } - if index == "" { - return echo.ErrNotFound - } - - return c.HTML(http.StatusOK, index) - }, auth) -} diff --git a/internal/app/published.go b/internal/app/published.go new file mode 100644 index 000000000..339ae21a9 --- /dev/null +++ b/internal/app/published.go @@ -0,0 +1,102 @@ +package app + +import ( + "context" + "crypto/subtle" + "io" + "io/fs" + "net/http" + "net/url" + "os" + + "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" + http1 "github.com/reearth/reearth-backend/internal/adapter/http" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interactor" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +func publishedRoute( + ec *echo.Echo, + r *echo.Group, + conf *Config, + repos *repo.Container, + gateways *gateway.Container, +) { + contr := http1.NewPublishedController(publishedUsecaseFrom(conf.Published.IndexURL, repos.Project, gateways.File, os.DirFS("."))) + auth := PublishedAuthMiddleware(contr.Metadata) + r.GET("/:name/data.json", PublishedData(contr.Data), auth) + r.GET("/:name/", PublishedIndex(contr.Index), auth) +} + +func PublishedData(data func(ctx context.Context, name string) (io.Reader, error)) echo.HandlerFunc { + return func(c echo.Context) error { + r, err := data(c.Request().Context(), c.Param("name")) + if err != nil { + return err + } + + return c.Stream(http.StatusOK, "application/json", r) + } +} + +func PublishedIndex(index func(ctx context.Context, name string, url *url.URL) (string, error)) echo.HandlerFunc { + return func(c echo.Context) error { + index, err := index(c.Request().Context(), c.Param("name"), &url.URL{ + Scheme: "http", + Host: c.Request().Host, + Path: c.Request().URL.Path, + }) + if err != nil { + return err + } + if index == "" { + return echo.ErrNotFound + } + return c.HTML(http.StatusOK, index) + } +} + +func PublishedAuthMiddleware(metadata func(ctx context.Context, name string) (interfaces.ProjectPublishedMetadata, error)) echo.MiddlewareFunc { + key := struct{}{} + return middleware.BasicAuthWithConfig(middleware.BasicAuthConfig{ + Validator: func(user string, password string, c echo.Context) (bool, error) { + md, ok := c.Request().Context().Value(key).(interfaces.ProjectPublishedMetadata) + if !ok { + return true, echo.ErrNotFound + } + return !md.IsBasicAuthActive || subtle.ConstantTimeCompare([]byte(user), []byte(md.BasicAuthUsername)) == 1 && subtle.ConstantTimeCompare([]byte(password), []byte(md.BasicAuthPassword)) == 1, nil + }, + Skipper: func(c echo.Context) bool { + name := c.Param("name") + if name == "" { + return true + } + + md, err := metadata(c.Request().Context(), name) + if err != nil { + return true + } + + c.SetRequest(c.Request().WithContext(context.WithValue(c.Request().Context(), key, md))) + return !md.IsBasicAuthActive + }, + }) +} + +func publishedUsecaseFrom(indexURL *url.URL, p repo.Project, f gateway.File, ff fs.FS) interfaces.Published { + var i interfaces.Published + if indexURL == nil || indexURL.String() == "" { + html, err := fs.ReadFile(ff, "web/published.html") + if err == nil { + i = interactor.NewPublished(p, f, string(html)) + } else { + i = interactor.NewPublished(p, f, "") + } + } else { + i = interactor.NewPublishedWithURL(p, f, indexURL) + } + return i +} diff --git a/internal/app/published_test.go b/internal/app/published_test.go new file mode 100644 index 000000000..111fdb0ff --- /dev/null +++ b/internal/app/published_test.go @@ -0,0 +1,219 @@ +package app + +import ( + "context" + "encoding/base64" + "io" + "net/http" + "net/http/httptest" + "net/url" + "strings" + "testing" + + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/stretchr/testify/assert" +) + +func TestPublishedAuthMiddleware(t *testing.T) { + h := PublishedAuthMiddleware(func(ctx context.Context, name string) (interfaces.ProjectPublishedMetadata, error) { + if name == "active" { + return interfaces.ProjectPublishedMetadata{ + IsBasicAuthActive: true, + BasicAuthUsername: "fooo", + BasicAuthPassword: "baar", + }, nil + } else if name == "inactive" { + return interfaces.ProjectPublishedMetadata{ + IsBasicAuthActive: false, + BasicAuthUsername: "fooo", + BasicAuthPassword: "baar", + }, nil + } + return interfaces.ProjectPublishedMetadata{}, err1.ErrNotFound + })(func(c echo.Context) error { + return c.String(http.StatusOK, "test") + }) + + testCases := []struct { + Name string + PublishedName string + BasicAuthUsername string + BasicAuthPassword string + Error error + }{ + { + Name: "empty name", + }, + { + Name: "not found", + PublishedName: "aaa", + }, + { + Name: "no auth", + PublishedName: "inactive", + }, + { + Name: "auth", + PublishedName: "active", + Error: echo.ErrUnauthorized, + }, + { + Name: "auth with invalid credentials", + PublishedName: "active", + BasicAuthUsername: "aaa", + BasicAuthPassword: "bbb", + Error: echo.ErrUnauthorized, + }, + { + Name: "auth with valid credentials", + PublishedName: "active", + BasicAuthUsername: "fooo", + BasicAuthPassword: "baar", + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + assert := assert.New(tt) + req := httptest.NewRequest(http.MethodGet, "/", nil) + if tc.BasicAuthUsername != "" { + req.Header.Set(echo.HeaderAuthorization, "basic "+base64.StdEncoding.EncodeToString([]byte(tc.BasicAuthUsername+":"+tc.BasicAuthPassword))) + } + res := httptest.NewRecorder() + e := echo.New() + c := e.NewContext(req, res) + c.SetParamNames("name") + c.SetParamValues(tc.PublishedName) + + err := h(c) + if tc.Error == nil { + assert.NoError(err) + assert.Equal(http.StatusOK, res.Code) + assert.Equal("test", res.Body.String()) + } else { + assert.ErrorIs(err, tc.Error) + } + }) + } +} + +func TestPublishedData(t *testing.T) { + h := PublishedData(func(ctx context.Context, name string) (io.Reader, error) { + if name == "prj" { + return strings.NewReader("aaa"), nil + } + return nil, err1.ErrNotFound + }) + + testCases := []struct { + Name string + PublishedName string + Error error + }{ + { + Name: "empty", + Error: err1.ErrNotFound, + }, + { + Name: "not found", + PublishedName: "pr", + Error: err1.ErrNotFound, + }, + { + Name: "ok", + PublishedName: "prj", + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert := assert.New(tt) + req := httptest.NewRequest(http.MethodGet, "/", nil) + res := httptest.NewRecorder() + e := echo.New() + c := e.NewContext(req, res) + c.SetParamNames("name") + c.SetParamValues(tc.PublishedName) + + err := h(c) + if tc.Error == nil { + assert.NoError(err) + assert.Equal(http.StatusOK, res.Code) + assert.Equal("application/json", res.Header().Get(echo.HeaderContentType)) + assert.Equal("aaa", res.Body.String()) + } else { + assert.ErrorIs(err, tc.Error) + } + }) + } +} + +func TestPublishedIndex(t *testing.T) { + testCases := []struct { + Name string + PublishedName string + Error error + EmptyIndex bool + }{ + { + Name: "empty", + Error: err1.ErrNotFound, + }, + { + Name: "empty index", + Error: echo.ErrNotFound, + EmptyIndex: true, + }, + { + Name: "not found", + PublishedName: "pr", + Error: err1.ErrNotFound, + }, + { + Name: "ok", + PublishedName: "prj", + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + assert := assert.New(tt) + req := httptest.NewRequest(http.MethodGet, "/aaa/bbb", nil) + res := httptest.NewRecorder() + e := echo.New() + c := e.NewContext(req, res) + c.SetParamNames("name") + c.SetParamValues(tc.PublishedName) + + err := PublishedIndex(func(ctx context.Context, name string, url *url.URL) (string, error) { + if tc.EmptyIndex { + return "", nil + } + if name == "prj" { + assert.Equal("http://example.com/aaa/bbb", url.String()) + return "index", nil + } + return "", err1.ErrNotFound + })(c) + + if tc.Error == nil { + assert.NoError(err) + assert.Equal(http.StatusOK, res.Code) + assert.Equal("text/html; charset=UTF-8", res.Header().Get(echo.HeaderContentType)) + assert.Equal("index", res.Body.String()) + } else { + assert.ErrorIs(err, tc.Error) + } + }) + } +} diff --git a/internal/usecase/interactor/published.go b/internal/usecase/interactor/published.go index 6a14d1432..a837bd3cc 100644 --- a/internal/usecase/interactor/published.go +++ b/internal/usecase/interactor/published.go @@ -88,7 +88,7 @@ func (i *Published) Data(ctx context.Context, name string) (io.Reader, error) { func (i *Published) Index(ctx context.Context, name string, u *url.URL) (string, error) { prj, err := i.project.FindByPublicName(ctx, name) if err != nil || prj == nil { - return "", err1.ErrNotFound + return "", err } md := interfaces.ProjectPublishedMetadataFrom(prj) From 5dd3dba28b3673cf5a34d9695f1bcc9a7175aa86 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Wed, 28 Jul 2021 10:07:48 +0300 Subject: [PATCH 056/253] feat: Refine dataset import (#26) * - Declare DatasetSchemaFieldID scalar - Extend AddLayerGroupInput to accept optional markerTitleFieldId - Use selected Field value as layer name * - fix mutation variable name - use representativeFieldID to set layer title --- gqlgen.yml | 2 ++ internal/adapter/graphql/controller_layer.go | 1 + internal/adapter/graphql/models_gen.go | 13 +++++----- internal/adapter/graphql/scalar.go | 13 ++++++++++ internal/graphql/generated.go | 25 ++++++++++++++++++++ internal/usecase/interactor/layer.go | 8 ++++++- internal/usecase/interfaces/layer.go | 1 + schema.graphql | 2 ++ 8 files changed, 58 insertions(+), 7 deletions(-) diff --git a/gqlgen.yml b/gqlgen.yml index 30993f753..f58884a94 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -31,6 +31,8 @@ models: model: github.com/reearth/reearth-backend/internal/adapter/graphql.PropertySchemaID PropertySchemaFieldID: model: github.com/reearth/reearth-backend/internal/adapter/graphql.PropertySchemaFieldID + DatasetSchemaFieldID: + model: github.com/reearth/reearth-backend/internal/adapter/graphql.DatasetSchemaFieldID TranslatedString: model: github.com/reearth/reearth-backend/internal/adapter/graphql.Map Lang: diff --git a/internal/adapter/graphql/controller_layer.go b/internal/adapter/graphql/controller_layer.go index 6faadf564..2beaaea00 100644 --- a/internal/adapter/graphql/controller_layer.go +++ b/internal/adapter/graphql/controller_layer.go @@ -56,6 +56,7 @@ func (c *LayerController) AddGroup(ctx context.Context, ginput *AddLayerGroupInp Index: ginput.Index, Name: refToString(ginput.Name), LinkedDatasetSchemaID: id.DatasetSchemaIDFromRefID(ginput.LinkedDatasetSchemaID), + RepresentativeFieldId: ginput.RepresentativeFieldID, }, operator) if err != nil { return nil, err diff --git a/internal/adapter/graphql/models_gen.go b/internal/adapter/graphql/models_gen.go index 22451d1d7..07f3610c7 100644 --- a/internal/adapter/graphql/models_gen.go +++ b/internal/adapter/graphql/models_gen.go @@ -76,12 +76,13 @@ type AddInfoboxFieldPayload struct { } type AddLayerGroupInput struct { - ParentLayerID id.ID `json:"parentLayerId"` - PluginID *id.PluginID `json:"pluginId"` - ExtensionID *id.PluginExtensionID `json:"extensionId"` - Index *int `json:"index"` - LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaID"` - Name *string `json:"name"` + ParentLayerID id.ID `json:"parentLayerId"` + PluginID *id.PluginID `json:"pluginId"` + ExtensionID *id.PluginExtensionID `json:"extensionId"` + Index *int `json:"index"` + LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaID"` + Name *string `json:"name"` + RepresentativeFieldID *id.DatasetSchemaFieldID `json:"representativeFieldId"` } type AddLayerGroupPayload struct { diff --git a/internal/adapter/graphql/scalar.go b/internal/adapter/graphql/scalar.go index 928b25304..b6d355e8e 100644 --- a/internal/adapter/graphql/scalar.go +++ b/internal/adapter/graphql/scalar.go @@ -129,6 +129,19 @@ func UnmarshalPropertySchemaFieldID(v interface{}) (id.PropertySchemaFieldID, er return id.PropertySchemaFieldID(""), errors.New("invalid ID") } +func MarshalDatasetSchemaFieldID(t id.DatasetSchemaFieldID) graphql1.Marshaler { + return graphql1.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalDatasetSchemaFieldID(v interface{}) (id.DatasetSchemaFieldID, error) { + if tmpStr, ok := v.(string); ok { + return id.DatasetSchemaFieldIDFrom(tmpStr) + } + return id.NewDatasetSchemaFieldID(), errors.New("invalid ID") +} + func MarshalMap(val map[string]string) graphql1.Marshaler { return graphql1.WriterFunc(func(w io.Writer) { _ = json.NewEncoder(w).Encode(val) diff --git a/internal/graphql/generated.go b/internal/graphql/generated.go index ba724c447..f75523500 100644 --- a/internal/graphql/generated.go +++ b/internal/graphql/generated.go @@ -5531,6 +5531,7 @@ scalar PluginExtensionID scalar PropertySchemaID scalar PropertySchemaFieldID scalar TranslatedString +scalar DatasetSchemaFieldID type LatLng { lat: Float! @@ -6501,6 +6502,7 @@ input AddLayerGroupInput { index: Int linkedDatasetSchemaID: ID name: String + representativeFieldId: DatasetSchemaFieldID } input RemoveLayerInput { @@ -28983,6 +28985,14 @@ func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context if err != nil { return it, err } + case "representativeFieldId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("representativeFieldId")) + it.RepresentativeFieldID, err = ec.unmarshalODatasetSchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšDatasetSchemaFieldID(ctx, v) + if err != nil { + return it, err + } } } @@ -39644,6 +39654,21 @@ func (ec *executionContext) marshalODatasetSchemaField2แš–githubแš—comแš‹reearth return ec._DatasetSchemaField(ctx, sel, v) } +func (ec *executionContext) unmarshalODatasetSchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšDatasetSchemaFieldID(ctx context.Context, v interface{}) (*id.DatasetSchemaFieldID, error) { + if v == nil { + return nil, nil + } + res, err := graphql1.UnmarshalDatasetSchemaFieldID(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalODatasetSchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšDatasetSchemaFieldID(ctx context.Context, sel ast.SelectionSet, v *id.DatasetSchemaFieldID) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql1.MarshalDatasetSchemaFieldID(*v) +} + func (ec *executionContext) unmarshalODateTime2แš–timeแšTime(ctx context.Context, v interface{}) (*time.Time, error) { if v == nil { return nil, nil diff --git a/internal/usecase/interactor/layer.go b/internal/usecase/interactor/layer.go index 79d7a0207..6558c069b 100644 --- a/internal/usecase/interactor/layer.go +++ b/internal/usecase/interactor/layer.go @@ -354,7 +354,13 @@ func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, } // create item layers - representativeFieldID := datasetSchema.RepresentativeFieldID() + var representativeFieldID *id.DatasetSchemaFieldID + if inp.RepresentativeFieldId != nil { + representativeFieldID = inp.RepresentativeFieldId + } else { + representativeFieldID = datasetSchema.RepresentativeFieldID() + } + layerItems := make([]*layer.Item, 0, len(ds)) layerItemProperties := make([]*property.Property, 0, len(ds)) index := -1 diff --git a/internal/usecase/interfaces/layer.go b/internal/usecase/interfaces/layer.go index 449671114..9be04c60b 100644 --- a/internal/usecase/interfaces/layer.go +++ b/internal/usecase/interfaces/layer.go @@ -28,6 +28,7 @@ type AddLayerGroupInput struct { ExtensionID *id.PluginExtensionID Index *int LinkedDatasetSchemaID *id.DatasetSchemaID + RepresentativeFieldId *id.DatasetSchemaFieldID Name string } diff --git a/schema.graphql b/schema.graphql index 9f052898f..749e82cbf 100644 --- a/schema.graphql +++ b/schema.graphql @@ -25,6 +25,7 @@ scalar PluginExtensionID scalar PropertySchemaID scalar PropertySchemaFieldID scalar TranslatedString +scalar DatasetSchemaFieldID type LatLng { lat: Float! @@ -995,6 +996,7 @@ input AddLayerGroupInput { index: Int linkedDatasetSchemaID: ID name: String + representativeFieldId: DatasetSchemaFieldID } input RemoveLayerInput { From f7866e51c4fe6c56a7e181594f054ef6d874a5ab Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 30 Jul 2021 16:43:37 +0900 Subject: [PATCH 057/253] chore: use go:embed (#24) --- go.mod | 4 +- go.sum | 8 +- pkg/builtin/main.go | 14 +- pkg/builtin/manifest_gen.go | 5 - pkg/builtin/manifest_ja_gen.go | 5 - pkg/plugin/manifest/parser.go | 57 +-- pkg/plugin/manifest/parser_test.go | 83 +--- pkg/plugin/manifest/parser_translation.go | 66 +--- .../manifest/parser_translation_test.go | 117 +++--- pkg/plugin/manifest/schema_json_gen.go | 365 ------------------ .../manifest/schema_json_translation_gen.go | 131 ------- tools/cmd/embed/main.go | 178 --------- 12 files changed, 96 insertions(+), 937 deletions(-) delete mode 100644 pkg/builtin/manifest_gen.go delete mode 100644 pkg/builtin/manifest_ja_gen.go delete mode 100644 pkg/plugin/manifest/schema_json_gen.go delete mode 100644 pkg/plugin/manifest/schema_json_translation_gen.go delete mode 100644 tools/cmd/embed/main.go diff --git a/go.mod b/go.mod index 553d52426..08d88a502 100644 --- a/go.mod +++ b/go.mod @@ -38,8 +38,6 @@ require ( github.com/uber/jaeger-lib v2.4.1+incompatible github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e github.com/vektah/gqlparser/v2 v2.1.0 - github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect - github.com/xeipuuv/gojsonschema v1.2.0 go.mongodb.org/mongo-driver v1.5.1 go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo v0.0.0-20200707171851-ae0d272a2deb go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver v0.7.0 @@ -50,7 +48,7 @@ require ( golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/tools v0.1.0 gopkg.in/go-playground/colors.v1 v1.2.0 - gopkg.in/h2non/gock.v1 v1.1.0 // indirect + gopkg.in/h2non/gock.v1 v1.1.0 gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 // indirect ) diff --git a/go.sum b/go.sum index b4dd9e75f..c45f44bb6 100644 --- a/go.sum +++ b/go.sum @@ -312,6 +312,7 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4= github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= @@ -421,13 +422,6 @@ github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6 github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= -github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= -github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= -github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= -github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= -github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= -github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= -github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= diff --git a/pkg/builtin/main.go b/pkg/builtin/main.go index ebb36c2a1..92aa7c9d4 100644 --- a/pkg/builtin/main.go +++ b/pkg/builtin/main.go @@ -1,8 +1,8 @@ -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/embed -all -n pluginManifestJSON -i manifest.yml -yaml2json - package builtin import ( + _ "embed" + "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/plugin/manifest" @@ -10,8 +10,14 @@ import ( "github.com/reearth/reearth-backend/pkg/visualizer" ) -var pluginTranslationList = map[string]*manifest.TranslationRoot{"ja": manifest.MustParseTransSystemFromStaticJSON(pluginManifestJSON_ja)} -var pluginManifest = manifest.MergeManifestTranslation(manifest.MustParseSystemFromStaticJSON(pluginManifestJSON), pluginTranslationList) +//go:embed manifest.yml +var pluginManifestJSON []byte + +//go:embed manifest_ja.yml +var pluginManifestJSON_ja []byte + +var pluginTranslationList = map[string]*manifest.TranslationRoot{"ja": manifest.MustParseTranslationFromBytes(pluginManifestJSON_ja)} +var pluginManifest = manifest.MergeManifestTranslation(manifest.MustParseSystemFromBytes(pluginManifestJSON), pluginTranslationList) // MUST NOT CHANGE var PropertySchemaIDVisualizerCesium = id.MustPropertySchemaID("reearth/cesium") diff --git a/pkg/builtin/manifest_gen.go b/pkg/builtin/manifest_gen.go deleted file mode 100644 index 3b61de220..000000000 --- a/pkg/builtin/manifest_gen.go +++ /dev/null @@ -1,5 +0,0 @@ -// Code generated by github.com/reearth/reearth-backend/tools/cmd/embed, DO NOT EDIT. - -package builtin - -const pluginManifestJSON string = `{"author":"Re:Earth","description":"Official Plugin","extensions":[{"description":"Select here to find scene settings in the right panel. This includes setting map tiles, atmospheric conditions, real lighting, and more.","id":"cesium","schema":{"groups":[{"fields":[{"description":"The starting position of your project.","id":"camera","title":"Initial camera position","type":"camera"},{"description":"Show elevation when close to the surface.","id":"terrain","title":"Terrain","type":"bool"},{"defaultValue":true,"description":"Show the stars.","id":"skybox","title":"Sky","type":"bool"},{"description":"With Sky disabled, choose a background color.","id":"bgcolor","title":"Background color","type":"string","ui":"color"},{"description":"Cesium Ion account users may use their personal API keys to be able to use their Cesium Ion assets(tile data, 3D data, etc) with their project.","id":"ion","title":"Cesium Ion API access token","type":"string"}],"id":"default","title":"Scene"},{"description":"You may change the look of the Earth by obtaining map tile data and setting it here.","fields":[{"choices":[{"key":"default","label":"Default"},{"key":"default_label","label":"Labelled"},{"key":"default_road","label":"Road Map"},{"key":"stamen_watercolor","label":"Stamen Watercolor"},{"key":"stamen_toner","label":"Stamen Toner"},{"key":"open_street_map","label":"Open Street Map"},{"key":"esri_world_topo","label":"ESRI Topography"},{"key":"black_marble","label":"Earth at night"},{"key":"japan_gsi_standard","label":"Japan GSI Standard Map"},{"key":"url","label":"URL"}],"defaultValue":"default","id":"tile_type","title":"Tile Type","type":"string"},{"availableIf":{"field":"tile_type","type":"string","value":"url"},"id":"tile_url","title":"Tile map URL","type":"string"},{"id":"tile_minLevel","max":30,"min":0,"title":"Minimum zoom level","type":"number"},{"id":"tile_maxLevel","max":30,"min":0,"title":"Maximum zoom level","type":"number"}],"id":"tiles","list":true,"representativeField":"tile_type","title":"Tiles"},{"description":"Set the look and feel of the Earth.","fields":[{"defaultValue":true,"description":"Display the Sun.","id":"enable_sun","title":"Sun","type":"bool"},{"defaultValue":false,"description":"Display natural lighting from the sun.","id":"enable_lighting","title":"Lighting","type":"bool"},{"defaultValue":true,"description":"Display a lower atmospheric layer.","id":"ground_atmosphere","title":"Ground atmosphere","type":"bool"},{"defaultValue":true,"description":"Display an upper atmospheric layer.","id":"sky_atmosphere","title":"Sky atmosphere","type":"bool"},{"defaultValue":true,"description":"Display customizable fog.","id":"fog","title":"Fog","type":"bool"},{"defaultValue":0.0002,"description":"Set a thickness to the fog. Min: 0 Max: 1","id":"fog_density","max":1,"min":0,"title":"Fog density","type":"number"},{"defaultValue":0.03,"description":"Set brightness of the fog. Min: -1 Max: 1","id":"brightness_shift","max":1,"min":-1,"title":"Fog Brightness","type":"number"},{"description":"Set hue of the fog. Min: -1 Max: 1","id":"hue_shift","max":1,"min":-1,"title":"Fog Hue","type":"number"},{"description":"Set saturation of the fog. Min: -1 Max: 1","id":"surturation_shift","max":1,"min":-1,"title":"Fog Saturation","type":"number"}],"id":"atmosphere","title":"Atmospheric Conditions"},{"description":"Set your Google Analytics tracking ID and analyze how your published project is being viewed.","fields":[{"defaultValue":false,"description":"Enable Google Analytics","id":"enableGA","title":"Enable","type":"bool"},{"description":"Paste your Google Analytics tracking ID here. This will be embedded in your published project.","id":"trackingId","title":"Tracking ID","type":"string"}],"id":"googleAnalytics","title":"Google Analytics"}]},"title":"Cesium","type":"visualizer","visualizer":"cesium"},{"description":"Create an information area that appears when a layer is highlighted. Text, pictures, video, etc can be added to an infobox.","id":"infobox","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"choices":[{"key":"small","label":"Small"},{"key":"large","label":"Large"}],"defaultValue":"small","id":"size","title":"Size Type","type":"string"},{"id":"bgcolor","title":"Background Color","type":"string","ui":"color"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Basic"}]},"title":"Infobox","type":"infobox","visualizer":"cesium"},{"description":"A standard map marker.","id":"marker","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"choices":[{"key":"point","label":"Point"},{"key":"image","label":"Icon"}],"defaultValue":"image","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"point"},"id":"pointColor","title":"Point color","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"point"},"defaultValue":10,"id":"pointSize","min":0,"suffix":"px","title":"Point size","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"},{"availableIf":{"field":"style","type":"string","value":"image"},"defaultValue":1,"id":"imageSize","min":0,"title":"Image scale","type":"number"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Image crop","type":"string"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"imageShadow","title":"Image shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"label","title":"Label","type":"bool"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelText","title":"Label text","type":"string"},{"availableIf":{"field":"label","type":"bool","value":true},"choices":[{"key":"left","label":"Left"},{"key":"right","label":"Right"},{"key":"top","label":"Top"},{"key":"bottom","label":"Bottom"}],"defaultValue":"right","id":"labelPosition","title":"Label position","type":"string"},{"availableIf":{"field":"label","type":"bool","value":true},"id":"labelTypography","title":"Label font","type":"typography"},{"id":"extrude","title":"Extruded","type":"bool"}],"id":"default","title":"Marker"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"},"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Marker","type":"primitive","visualizer":"cesium"},{"description":"Polyline primitive","id":"polyline","schema":{"groups":[{"fields":[{"id":"coordinates","title":"Coordinates","type":"coordinates"},{"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polyline"}]},"title":"Polyline","type":"primitive","visualizer":"cesium"},{"description":"Polygon primitive","id":"polygon","schema":{"groups":[{"fields":[{"id":"polygon","title":"Polygon","type":"polygon"},{"defaultValue":true,"id":"fill","title":"Fill","type":"bool"},{"availableIf":{"field":"fill","type":"bool","value":true},"id":"fillColor","title":"Fill color","type":"string","ui":"color"},{"id":"stroke","title":"Stroke","type":"bool"},{"availableIf":{"field":"stroke","type":"bool","value":true},"id":"strokeColor","title":"Stroke color","type":"string","ui":"color"},{"availableIf":{"field":"stroke","type":"bool","value":true},"defaultValue":1,"id":"strokeWidth","min":0,"suffix":"px","title":"Stroke width","type":"number"}],"id":"default","title":"Polygon"}]},"title":"Polygon","type":"primitive","visualizer":"cesium"},{"description":"Rectangle primitive","id":"rect","schema":{"groups":[{"fields":[{"id":"rect","title":"Rect","type":"rect"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"id":"extrudedHeight","min":0,"title":"Extruded height","type":"number"},{"choices":[{"key":"color","label":"Color"},{"key":"image","label":"Image"}],"defaultValue":"color","id":"style","title":"Style","type":"string"},{"availableIf":{"field":"style","type":"string","value":"color"},"id":"fillColor","title":"Fill","type":"string","ui":"color"},{"availableIf":{"field":"style","type":"string","value":"image"},"id":"image","title":"Image URL","type":"url","ui":"image"}],"id":"default","title":"Rectangle"}]},"title":"Rectangle","type":"primitive","visualizer":"cesium"},{"description":"An Icon marker that allows you to set a photo that will appear after reaching its location.","id":"photooverlay","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"height","title":"Height","type":"number"},{"description":"Set the camera position for the overlay.","id":"camera","title":"Camera","type":"camera"},{"id":"image","title":"Icon","type":"url","ui":"image"},{"defaultValue":1,"id":"imageSize","prefix":"x","title":"Icon size","type":"number"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imageHorizontalOrigin","title":"Image horizontal origin","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"baseline","label":"Baseline"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imageVerticalOrigin","title":"Image vertical origin","type":"string"},{"choices":[{"key":"none","label":"None"},{"key":"circle","label":"Circle"}],"defaultValue":"none","id":"imageCrop","title":"Icon crop","type":"string"},{"id":"imageShadow","title":"Icon shadow","type":"bool"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"id":"imageShadowColor","title":"Shadow color","type":"string","ui":"color"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":3,"id":"imageShadowBlur","suffix":"px","title":"Shadow radius","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionX","suffix":"px","title":"Shadow X","type":"number"},{"availableIf":{"field":"imageShadow","type":"bool","value":true},"defaultValue":0,"id":"imageShadowPositionY","suffix":"px","title":"Shadow Y","type":"number"},{"id":"photoOverlayImage","title":"Photo","type":"url","ui":"image"},{"id":"photoOverlayDescription","title":"Photo description","type":"string","ui":"multiline"}],"id":"default","title":"Photo overlay"}],"linkable":{"latlng":{"fieldId":"location","schemaGroupId":"default"}}},"title":"Photo overlay","type":"primitive","visualizer":"cesium"},{"description":"A ball-like marker.","id":"ellipsoid","schema":{"groups":[{"fields":[{"id":"position","title":"Position","type":"latlng"},{"defaultValue":0,"id":"height","min":0,"suffix":"m","title":"Height","type":"number"},{"defaultValue":1000,"id":"radius","min":0,"suffix":"m","title":"Radius","type":"number"},{"id":"fillColor","title":"Fill","type":"string","ui":"color"}],"id":"default","title":"Ellipsoid"}],"linkable":{"latlng":{"fieldId":"position","schemaGroupId":"default"}}},"title":"Sphere","type":"primitive","visualizer":"cesium"},{"description":"Import your own primitives to be used instead of Re:Earth's built in ones.","id":"resource","schema":{"groups":[{"fields":[{"id":"url","title":"File URL","type":"url","ui":"file"},{"choices":[{"key":"auto","label":"Auto"},{"key":"kml","label":"KML"},{"key":"geojson","label":"GeoJSON / TopoJSON"},{"key":"czml","label":"CZML"}],"defaultValue":"auto","id":"type","title":"File format","type":"string"}],"id":"default","title":"File"}]},"title":"File","type":"primitive","visualizer":"cesium"},{"description":"Text block","id":"textblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"text","title":"Content","type":"string","ui":"multiline"},{"id":"markdown","title":"Use markdown","type":"bool"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Text block"}]},"title":"Text","type":"block","visualizer":"cesium"},{"description":"Image block","id":"imageblock","schema":{"groups":[{"fields":[{"id":"image","title":"Image","type":"url","ui":"image"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"},{"choices":[{"key":"cover","label":"Cover"},{"key":"contain","label":"Contain"}],"defaultValue":"cover","id":"imageSize","title":"Image size","type":"string"},{"choices":[{"key":"left","label":"Left"},{"key":"center","label":"Center"},{"key":"right","label":"Right"}],"defaultValue":"center","id":"imagePositionX","title":"Image horizontal position","type":"string"},{"choices":[{"key":"top","label":"Top"},{"key":"center","label":"Center"},{"key":"bottom","label":"Bottom"}],"defaultValue":"center","id":"imagePositionY","title":"Image vertical position","type":"string"}],"id":"default","title":"Image block"}],"linkable":{"url":{"fieldId":"image","schemaGroupId":"default"}}},"title":"Image","type":"block","visualizer":"cesium"},{"description":"Video block","id":"videoblock","schema":{"groups":[{"fields":[{"id":"url","title":"Video URL","type":"url","ui":"video"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Video block"}]},"title":"Video","type":"block","visualizer":"cesium"},{"description":"Location block","id":"locationblock","schema":{"groups":[{"fields":[{"id":"location","title":"Location","type":"latlng"},{"id":"title","title":"Title","type":"string"},{"id":"fullSize","title":"Full size","type":"bool"}],"id":"default","title":"Location block"}]},"title":"Location","type":"block","visualizer":"cesium"},{"description":"Table block","id":"dlblock","schema":{"groups":[{"fields":[{"id":"title","title":"Title","type":"string"},{"id":"typography","title":"Font","type":"typography"}],"id":"default","title":"Table block"},{"fields":[{"id":"item_title","title":"Title","type":"string"},{"choices":[{"key":"string","label":"String"},{"key":"number","label":"Number"}],"defaultValue":"string","id":"item_datatype","title":"Type","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"string"},"id":"item_datastr","title":"Data","type":"string"},{"availableIf":{"field":"item_datatype","type":"string","value":"number"},"id":"item_datanum","title":"Data","type":"number"}],"id":"items","list":true,"title":"Items"}]},"title":"Table","type":"block","visualizer":"cesium"},{"description":"Menu widgets","id":"menu","schema":{"groups":[{"fields":[{"id":"buttonInvisible","title":"Hide","type":"bool"},{"id":"buttonTitle","title":"Title","type":"string"},{"choices":[{"key":"topleft","label":"Top-Left"},{"key":"topright","label":"Top-Right"},{"key":"bottomleft","label":"Bottom-Left"},{"key":"bottomright","label":"Bottom-Right"}],"defaultValue":"topleft","id":"buttonPosition","title":"Position","type":"string"},{"choices":[{"key":"text","label":"Text only"},{"key":"icon","label":"Icon only"},{"key":"texticon","label":"Text and icon"}],"defaultValue":"text","id":"buttonStyle","title":"Style","type":"string"},{"id":"buttonIcon","title":"Icon","type":"url","ui":"image"},{"id":"buttonColor","title":"Text color","type":"string","ui":"color"},{"id":"buttonBgcolor","title":"Background color","type":"string","ui":"color"},{"choices":[{"key":"link","label":"Link"},{"key":"menu","label":"Menu"},{"key":"camera","label":"Camera flight"}],"defaultValue":"link","id":"buttonType","title":"Type","type":"string"},{"availableIf":{"field":"buttonType","type":"string","value":"link"},"id":"buttonLink","title":"Link","type":"url"},{"availableIf":{"field":"buttonType","type":"string","value":"camera"},"id":"buttonCamera","title":"Camera flight","type":"camera"}],"id":"buttons","list":true,"title":"Buttons"},{"fields":[{"id":"menuTitle","title":"Title","type":"string"},{"id":"menuIcon","title":"Icon","type":"url"},{"choices":[{"key":"link","label":"Link"},{"key":"camera","label":"Camera"},{"key":"border","label":"Break"}],"defaultValue":"link","id":"menuType","title":"Type","type":"string"},{"availableIf":{"field":"menuType","type":"string","value":"link"},"id":"menuLink","title":"Link","type":"url"},{"availableIf":{"field":"menuType","type":"string","value":"camera"},"id":"menuCamera","title":"Camera","type":"camera"}],"id":"menu","list":true,"title":"Menu"}]},"title":"Menu","type":"widget","visualizer":"cesium"},{"description":"A unique start screen that will display on load of your archive(ex. display the archive's title).","id":"splashscreen","schema":{"groups":[{"fields":[{"id":"overlayEnabled","title":"Enabled","type":"bool"},{"id":"overlayDelay","min":0,"suffix":"s","title":"Delay","type":"number"},{"id":"overlayDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"overlayTransitionDuration","min":0,"suffix":"s","title":"Fade out","type":"number"},{"id":"overlayImage","title":"Image","type":"url","ui":"image"},{"id":"overlayImageW","suffix":"px","title":"Image width","type":"number"},{"id":"overlayImageH","suffix":"px","title":"Image height","type":"number"},{"id":"overlayBgcolor","title":"Background color","type":"string","ui":"color"}],"id":"overlay","title":"Overlay screen"},{"fields":[{"id":"cameraPosition","title":"Camera position","type":"camera"},{"id":"cameraDuration","min":0,"suffix":"s","title":"Duration","type":"number"},{"id":"cameraDelay","min":0,"suffix":"s","title":"Delay","type":"number"}],"id":"camera","list":true,"title":"Camera flight sequence"}]},"title":"Splash screen","type":"widget","visualizer":"cesium"},{"description":"SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily.","id":"storytelling","schema":{"groups":[{"fields":[{"defaultValue":3,"id":"duration","suffix":"s","title":"Duration","type":"number"},{"defaultValue":50000,"id":"range","suffix":"m","title":"Range","type":"number"},{"id":"camera","title":"Camera pose","type":"camera","ui":"camera_pose"},{"id":"autoStart","title":"Auto start","type":"bool"}],"id":"default","title":"Storytelling"},{"fields":[{"id":"layer","title":"Layer","type":"ref","ui":"layer"},{"id":"layerDuration","suffix":"s","title":"Duration","type":"number"},{"id":"layerRange","suffix":"m","title":"Range","type":"number"},{"id":"layerCamera","title":"Camera position","type":"camera"}],"id":"stories","list":true,"representativeField":"layer","title":"Stories"}]},"title":"Storytelling","type":"widget","visualizer":"cesium"}],"id":"reearth","system":true,"title":"Re:Earth Official Plugin"}` diff --git a/pkg/builtin/manifest_ja_gen.go b/pkg/builtin/manifest_ja_gen.go deleted file mode 100644 index 85079918e..000000000 --- a/pkg/builtin/manifest_ja_gen.go +++ /dev/null @@ -1,5 +0,0 @@ -// Code generated by github.com/reearth/reearth-backend/tools/cmd/embed, DO NOT EDIT. - -package builtin - -const pluginManifestJSON_ja string = `{"description":"ๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ","extensions":{"cesium":{"description":"ๅณใƒ‘ใƒใƒซใงใ‚ทใƒผใƒณๅ…จไฝ“ใฎ่จญๅฎšใ‚’ๅค‰ๆ›ดใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ‚ฟใ‚คใƒซใ€ๅคงๆฐ—ใ€ใƒฉใ‚คใƒ†ใ‚ฃใƒณใ‚ฐใชใฉใฎ่จญๅฎšใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚","propertySchema":{"atmosphere":{"description":"ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎ่จญๅฎšใŒใงใใพใ™ใ€‚","fields":{"brightness_shift":{"title":"ๆ˜Žๅบฆ"},"enable_lighting":{"description":"ๅคช้™ฝๅ…‰ใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใ‚‹ใ“ใจใงใ€ๆ˜ผๅคœใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","title":"ๅคช้™ฝๅ…‰"},"enable_sun":{"description":"ๅฎ‡ๅฎ™็ฉบ้–“ใซๅญ˜ๅœจใ™ใ‚‹ๅคช้™ฝใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ๅคช้™ฝ"},"fog":{"description":"้œงใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"้œง"},"fog_density":{"description":"้œงใฎๆฟƒๅบฆใ‚’0ไปฅไธŠใ‹ใ‚‰่จญๅฎšใ—ใพใ™ใ€‚","title":"ๆฟƒๅบฆ"},"ground_atmosphere":{"description":"ๅœฐ่กจใฎๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ๅœฐ่กจใฎๅคงๆฐ—"},"hue_shift":{"title":"่‰ฒ็›ธ"},"sky_atmosphere":{"description":"ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ไธŠ็ฉบใฎๅคงๆฐ—"},"surturation_shift":{"title":"ๅฝฉๅบฆ"}},"title":"ๅคงๆฐ—"},"default":{"fields":{"bgcolor":{"description":"ๅฎ‡ๅฎ™็ฉบ้–“ใŒ้ž่กจ็คบใฎๅ ดๅˆใฎใ€่ƒŒๆ™ฏ่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚","title":"่ƒŒๆ™ฏ่‰ฒ"},"camera":{"description":"ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒๆœ€ๅˆใซ่กจ็คบใ™ใ‚‹ใ‚ซใƒกใƒฉใฎไฝ็ฝฎใ‚’่จญๅฎšใ—ใพใ™ใ€‚","title":"ใ‚ซใƒกใƒฉๅˆๆœŸไฝ็ฝฎ"},"ion":{"description":"่‡ช่บซใฎCesium Ionใ‚ขใ‚ซใ‚ฆใƒณใƒˆใ‹ใ‚‰APIใ‚ญใƒผใ‚’็™บ่กŒใ—ใ€ใ“ใ“ใซ่จญๅฎšใ—ใพใ™ใ€‚Cesium Ionใฎใ‚ขใ‚ปใƒƒใƒˆ๏ผˆใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ€3Dใƒ‡ใƒผใ‚ฟใชใฉ๏ผ‰ใฎไฝฟ็”จใŒๅฏ่ƒฝใซใชใ‚‹ใŸใ‚ใ€่จญๅฎšใ‚’ๆŽจๅฅจใ—ใพใ™ใ€‚","title":"Cesium Icon APIใ‚ขใ‚ฏใ‚ปใ‚นใƒˆใƒผใ‚ฏใƒณ"},"skybox":{"description":"ๅฎ‡ๅฎ™็ฉบ้–“ใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚","title":"ๅฎ‡ๅฎ™ใฎ่กจ็คบ"},"terrain":{"description":"ๆœ‰ๅŠนใซใ™ใ‚‹ใจใ€ๆจ™้ซ˜ใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใฟใ“ใพใ‚Œใ€็ซ‹ไฝ“็š„ใชๅœฐๅฝขใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","title":"ๅœฐๅฝข"}},"title":"ใ‚ทใƒผใƒณ"},"googleAnalytics":{"description":"Google Analyticsใ‚’ๆœ‰ๅŠนใซใ™ใ‚‹ใ“ใจใงใ€ๅ…ฌ้–‹ใƒšใƒผใ‚ธใŒใฉใฎใ‚ˆใ†ใซ้–ฒ่ฆงใ•ใ‚Œใฆใ„ใ‚‹ใ‹ใ‚’ๅˆ†ๆžใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚","fields":{"enableGA":{"description":"Google Analyticsใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚","title":"ๆœ‰ๅŠน"},"trackingCode":{"description":"ใ“ใ“ใซใ‚ฐใƒผใ‚ฐใƒซใ‚ขใƒŠใƒชใƒ†ใ‚ฃใ‚ฏใ‚นใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐIDใ‚’่ฒผใ‚Šไป˜ใ‘ใ‚‹ใ“ใจใงใ€ๅ…ฌ้–‹ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใซใ“ใฎใ‚ณใƒผใƒ‰ใŒๅŸ‹ใ‚่พผใพใ‚Œใพใ™ใ€‚","title":"ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐID"}},"title":"Google Analytics"},"tiles":{"description":"ๆ‰‹ๆŒใกใฎใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ‚’ไฝฟ็”จใ—ใ€ๅœฐ็ƒไธŠใซ่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","fields":{"tile_maxLevel":{"title":"ๆœ€ๅคงใƒฌใƒ™ใƒซ"},"tile_minLevel":{"title":"ๆœ€ๅฐใƒฌใƒ™ใƒซ"},"tile_title":{"title":"ๅๅ‰"},"tile_type":{"choices":{"black_marble":"Black Marble","default":"ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ","default_label":"ใƒฉใƒ™ใƒซไป˜ใๅœฐๅ›ณ","default_road":"้“่ทฏๅœฐๅ›ณ","esri_world_topo":"ESRI Topography","japan_gsi_standard":"ๅœฐ็†้™ขๅœฐๅ›ณ ๆจ™ๆบ–ๅœฐๅ›ณ","open_street_map":"Open Street Map","stamen_toner":"Stamen Toner","stamen_watercolor":"Stamen Watercolor","url":"URL"},"title":"็จฎ้กž"},"tile_url":{"title":"URL"}},"title":"ใ‚ฟใ‚คใƒซ"}},"title":"Cesium"},"dlblock":{"description":"่กจใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"typography":{"title":"ใƒ•ใ‚ฉใƒณใƒˆ"}},"title":"่กจใƒ–ใƒญใƒƒใ‚ฏ"},"items":{"fields":{"item_datanum":{"title":"ใƒ‡ใƒผใ‚ฟ(ๆ•ฐๅญ—)"},"item_datastr":{"title":"ใƒ‡ใƒผใ‚ฟ(ๆ–‡ๅญ—)"},"item_datatype":{"choices":{"number":"ๆ•ฐๅญ—","string":"ๆ–‡ๅญ—"},"title":"็จฎ้กž"},"item_title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"}},"title":"ใ‚ขใ‚คใƒ†ใƒ "}},"title":"่กจ"},"ellipsoid":{"description":"ๆฅ•ๅ††ๅฝขใƒ„ใƒผใƒซใ‚’ๅœฐๅ›ณไธŠใซใƒ‰ใƒฉใƒƒใ‚ฐ\u0026ใƒ‰ใƒญใƒƒใƒ—ใ™ใ‚‹ใ“ใจใง่ฟฝๅŠ ใงใใพใ™ใ€‚ๆฅ•ๅ††ๅฝขใƒ„ใƒผใƒซใซใ‚ˆใฃใฆ็ซ‹ไฝ“็š„ใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ๅœฐๅ›ณไธŠใซ่กจ็คบใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"fillColor":{"title":"ๅก—ใ‚Š่‰ฒ"},"height":{"title":"้ซ˜ๅบฆ"},"position":{"title":"ไฝ็ฝฎ"},"radius":{"title":"ๅŠๅพ„"}},"title":"็ƒไฝ“ใƒ„ใƒผใƒซ"}},"title":"็ƒไฝ“ใƒ„ใƒผใƒซ"},"imageblock":{"description":"็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"fullSize":{"title":"ใƒ•ใƒซใ‚ตใ‚คใ‚บ"},"image":{"title":"็”ปๅƒ"},"imagePositionX":{"choices":{"center":"ไธญๅคฎ","left":"ๅทฆ","right":"ๅณ"},"title":"ๆฐดๅนณไฝ็ฝฎ"},"imagePositionY":{"choices":{"bottom":"ไธ‹","center":"ไธญๅคฎ","top":"ไธŠ"},"title":"ๅž‚็›ดไฝ็ฝฎ"},"imageSize":{"choices":{"contain":"ๅซใ‚€","cover":"ใ‚ซใƒใƒผ"},"title":"็”ปๅƒใ‚ตใ‚คใ‚บ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"}},"title":"็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"็”ปๅƒ"},"infobox":{"description":"้–ฒ่ฆง่€…ใŒๅœฐๅ›ณไธŠใฎใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ—ใŸๆ™‚ใซ่กจ็คบใ•ใ‚Œใ‚‹ใƒœใƒƒใ‚ฏใ‚นใงใ™ใ€‚ใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ€ๅ‹•็”ปใชใฉใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"bgcolor":{"title":"่ƒŒๆ™ฏ่‰ฒ"},"size":{"choices":{"large":"ๅคง","small":"ๅฐ"},"title":"ใ‚ตใ‚คใ‚บ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"typography":{"title":"ใƒ•ใ‚ฉใƒณใƒˆ"}},"title":"ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น"}},"title":"ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น"},"locationblock":{"description":"ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"fullSize":{"title":"ใƒ•ใƒซใ‚ตใ‚คใ‚บ"},"location":{"title":"ไฝ็ฝฎๆƒ…ๅ ฑ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"}},"title":"ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"ไฝ็ฝฎๆƒ…ๅ ฑ"},"marker":{"description":"ใƒ‰ใƒฉใƒƒใ‚ฐ\u0026ใƒ‰ใƒญใƒƒใƒ—ใ™ใ‚‹ใ“ใจใงใ€ๅœฐๅ›ณไธŠใซใƒžใƒผใ‚ซใƒผใ‚’่ฟฝๅŠ ใ—ใพใ™ใ€‚ใƒžใƒผใ‚ซใƒผใซใฏใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ‚’็ดใฅใ‘ใ‚‹ใ“ใจใŒใงใใ€้–ฒ่ฆง่€…ใฏใƒžใƒผใ‚ซใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ™ใ‚‹ใ“ใจใงใใ‚Œใ‚‰ใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่ฆ‹ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"extrude":{"title":"ๅœฐ้ขใ‹ใ‚‰็ทšใ‚’ไผธใฐใ™"},"height":{"title":"้ซ˜ๅบฆ"},"image":{"title":"็”ปๅƒURL"},"imageCrop":{"choices":{"circle":"ๅ††ๅฝข","none":"ใชใ—"},"title":"ๅˆ‡ใ‚ŠๆŠœใ"},"imageShadow":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆ"},"imageShadowBlur":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆๅŠๅพ„"},"imageShadowColor":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆ่‰ฒ"},"imageShadowPositionX":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆX"},"imageShadowPositionY":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆY"},"imageSize":{"title":"็”ปๅƒใ‚ตใ‚คใ‚บ"},"label":{"title":"ใƒฉใƒ™ใƒซ"},"labelText":{"title":"ใƒฉใƒ™ใƒซๆ–‡ๅญ—"},"labelTypography":{"title":"ใƒฉใƒ™ใƒซใƒ•ใ‚ฉใƒณใƒˆ"},"location":{"title":"ไฝ็ฝฎ"},"pointColor":{"title":"ใƒใ‚คใƒณใƒˆ่‰ฒ"},"pointSize":{"title":"ใƒใ‚คใƒณใƒˆใ‚ตใ‚คใ‚บ"},"style":{"choices":{"image":"ใ‚ขใ‚คใ‚ณใƒณ","point":"ใƒใ‚คใƒณใƒˆ"},"title":"่กจ็คบๆ–นๆณ•"}},"title":"ใƒžใƒผใ‚ซใƒผ"}},"title":"ใƒžใƒผใ‚ซใƒผ"},"menu":{"description":"ใ‚ทใƒผใƒณใซใƒœใ‚ฟใƒณใ‚’่จญ็ฝฎใ—ใ€ใƒกใƒ‹ใƒฅใƒผใ‚’่กจ็คบใ—ใพใ™ใ€‚่ฟฝๅŠ ใ—ใŸใƒœใ‚ฟใƒณใซ่จญๅฎšใ•ใ‚ŒใŸใ‚ขใ‚ฏใ‚ทใƒงใƒณใ‚ฟใ‚คใƒ—ใซใ‚ˆใฃใฆๅ‹•ไฝœใŒๅค‰ใ‚ใ‚Šใพใ™ใ€‚\\nใƒปใƒชใƒณใ‚ฏ๏ผšใƒœใ‚ฟใƒณ่‡ชไฝ“ใŒๅค–้ƒจใ‚ตใ‚คใƒˆใธใฎใƒชใƒณใ‚ฏใซใชใ‚Šใพใ™ใ€‚\\nใƒปใƒกใƒ‹ใƒฅใƒผ๏ผš่ฟฝๅŠ ใ—ใŸใƒกใƒ‹ใƒฅใƒผใ‚’้–‹ใใพใ™\\nใƒปใ‚ซใƒกใƒฉใ‚ขใ‚ฏใ‚ทใƒงใƒณ๏ผšใ‚ฏใƒชใƒƒใ‚ฏๆ™‚ใซใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ—ใพใ™ใ€‚","propertySchema":{"buttons":{"fields":{"buttonBgcolor":{"title":"่ƒŒๆ™ฏ่‰ฒ"},"buttonCamera":{"title":"ใ‚ซใƒกใƒฉ"},"buttonColor":{"title":"ใƒ†ใ‚ญใ‚นใƒˆ่‰ฒ"},"buttonIcon":{"title":"ใ‚ขใ‚คใ‚ณใƒณ"},"buttonInvisible":{"title":"้ž่กจ็คบ"},"buttonLink":{"title":"ใƒชใƒณใ‚ฏ"},"buttonPosition":{"choices":{"bottomleft":"ไธ‹ๅทฆ","bottomright":"ไธ‹ๅณ","topleft":"ไธŠๅทฆ","topright":"ไธŠๅณ"},"title":"่กจ็คบไฝ็ฝฎ"},"buttonStyle":{"choices":{"icon":"ใ‚ขใ‚คใ‚ณใƒณใฎใฟ","text":"ใƒ†ใ‚ญใ‚นใƒˆใฎใฟ","texticon":"ใƒ†ใ‚ญใ‚นใƒˆ๏ผ‹ใ‚ขใ‚คใ‚ณใƒณ"},"title":"่กจ็คบๆ–นๆณ•"},"buttonTitle":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"buttonType":{"choices":{"camera":"ใ‚ซใƒกใƒฉ็งปๅ‹•","link":"ใƒชใƒณใ‚ฏ","menu":"ใƒกใƒ‹ใƒฅใƒผ้–‹้–‰"},"title":"ใ‚ขใ‚ฏใ‚ทใƒงใƒณ"}},"title":"ใƒœใ‚ฟใƒณ"},"menu":{"fields":{"menuCamera":{"title":"ใ‚ซใƒกใƒฉ"},"menuIcon":{"title":"ใ‚ขใ‚คใ‚ณใƒณ"},"menuLink":{"title":"ใƒชใƒณใ‚ฏ"},"menuTitle":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"menuType":{"choices":{"border":"ๅŒบๅˆ‡ใ‚Š็ทš","camera":"ใ‚ซใƒกใƒฉ็งปๅ‹•","link":"ใƒชใƒณใ‚ฏ"},"title":"ใ‚ขใ‚ฏใ‚ทใƒงใƒณ"}},"title":"ใƒกใƒ‹ใƒฅใƒผ"}},"title":"ใƒกใƒ‹ใƒฅใƒผ"},"photooverlay":{"description":"ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ใ•ใ‚ŒใŸใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚คใ‚’้ธๆŠžใ™ใ‚‹ใจใ€่จญๅฎšใ—ใŸ็”ปๅƒใ‚’ใƒขใƒผใƒ€ใƒซๅฝขๅผใง่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"default":{"fields":{"camera":{"description":"ใ‚ฏใƒชใƒƒใ‚ฏใ•ใ‚ŒใŸใจใใซ็งปๅ‹•ใ™ใ‚‹ใ‚ซใƒกใƒฉใฎ่จญๅฎšใ‚’ใ—ใพใ™ใ€‚","title":"ใ‚ซใƒกใƒฉ"},"height":{"title":"้ซ˜ๅบฆ"},"image":{"title":"ใ‚ขใ‚คใ‚ณใƒณ"},"imageCrop":{"choices":{"circle":"ๅ††ๅฝข","none":"ใชใ—"},"title":"ๅˆ‡ใ‚ŠๆŠœใ"},"imageShadow":{"title":"ใ‚ขใ‚คใ‚ณใƒณใ‚ทใƒฃใƒ‰ใ‚ฆ"},"imageShadowBlur":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆๅŠๅพ„"},"imageShadowColor":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆ่‰ฒ"},"imageShadowPositionX":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆX"},"imageShadowPositionY":{"title":"ใ‚ทใƒฃใƒ‰ใ‚ฆY"},"imageSize":{"title":"ใ‚ขใ‚คใ‚ณใƒณใ‚ตใ‚คใ‚บ"},"location":{"title":"ไฝ็ฝฎ"},"photoOverlayImage":{"title":"ใ‚ชใƒผใƒใƒฌใ‚ค็”ปๅƒ"}},"title":"ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค"}},"title":"ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค"},"polygon":{"description":"Polygon primitive","propertySchema":{"default":{"fields":{"fill":{"title":"ๅก—ใ‚Š"},"fillColor":{"title":"ๅก—ใ‚Š่‰ฒ"},"polygon":{"title":"ใƒใƒชใ‚ดใƒณ"},"stroke":{"title":"็ทš"},"strokeColor":{"title":"็ทš่‰ฒ"},"strokeWidth":{"title":"็ทšๅน…"}},"title":"ใƒใƒชใ‚ดใƒณ"}},"title":"ใƒใƒชใ‚ดใƒณ"},"polyline":{"description":"Polyline primitive","propertySchema":{"default":{"fields":{"coordinates":{"title":"้ ‚็‚น"},"strokeColor":{"title":"็ทš่‰ฒ"},"strokeWidth":{"title":"็ทšๅน…"}},"title":"็›ด็ทš"}},"title":"็›ด็ทš"},"rect":{"description":"Rectangle primitive","propertySchema":{"default":{"fields":{"extrudedHeight":{"title":"้ซ˜ใ•"},"fillColor":{"title":"ๅก—ใ‚Š่‰ฒ"},"height":{"title":"้ซ˜ๅบฆ"},"image":{"title":"็”ปๅƒURL"},"rect":{"title":"้•ทๆ–นๅฝข"},"style":{"choices":{"color":"่‰ฒ","image":"็”ปๅƒ"},"title":"ใ‚นใ‚ฟใ‚คใƒซ"}},"title":"้•ทๆ–นๅฝข"}},"title":"้•ทๆ–นๅฝข"},"resource":{"description":"ๅค–้ƒจใ‹ใ‚‰ใƒ‡ใƒผใ‚ฟ๏ผˆๅฝขๅผไฝ•๏ผŸ๏ผŸ๏ผŸ๏ผ‰ใ‚’ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ๅพŒใ€URLใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใงๅค–้ƒจใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใพใ‚Œใพใ™ใ€‚","propertySchema":{"default":{"fields":{"url":{"choices":{"auto":"่‡ชๅ‹•","czml":"CZML","geojson":"GeoJSON / TopoJSON","kml":"KML"},"title":"ใƒ•ใ‚กใ‚คใƒซ URL"}},"title":"ใƒ•ใ‚กใ‚คใƒซ"}},"title":"ใƒ•ใ‚กใ‚คใƒซ"},"splashscreen":{"description":"ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒใ€ๆœ€ๅˆใซ่กจ็คบใ•ใ‚Œใ‚‹ๆผ”ๅ‡บใ‚’่จญๅฎšใงใใพใ™ใ€‚ไพ‹ใˆใฐใ€ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ฟใ‚คใƒˆใƒซใ‚’้–ฒ่ฆง่€…ใซ่ฆ‹ใ›ใŸใ‚Šใ€ใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ•ใ›ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚","propertySchema":{"camera":{"fields":{"cameraDelay":{"title":"ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“"},"cameraDuration":{"title":"ใ‚ซใƒกใƒฉ้–‹ๅง‹ๆ™‚้–“"},"cameraPosition":{"title":"ใ‚ซใƒกใƒฉไฝ็ฝฎ"}},"title":"ใ‚ซใƒกใƒฉใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณ"},"overlay":{"fields":{"overlayBgcolor":{"title":"่ƒŒๆ™ฏ่‰ฒ"},"overlayDelay":{"title":"้–‹ๅง‹ๆ™‚้–“"},"overlayDuration":{"title":"่กจ็คบๆ™‚้–“"},"overlayEnabled":{"title":"ๆœ‰ๅŠน"},"overlayImage":{"title":"ใ‚ชใƒผใƒใƒผใƒฌใ‚ค็”ปๅƒ"},"overlayImageH":{"title":"็”ปๅƒ้ซ˜ใ•"},"overlayImageW":{"title":"็”ปๅƒๅน…"},"overlayTransitionDuration":{"title":"ใƒ•ใ‚งใƒผใƒ‰ๆ™‚้–“"}},"title":"ใ‚ชใƒผใƒใƒผใƒฌใ‚ค"}},"title":"ใ‚นใƒ—ใƒฉใƒƒใ‚ทใƒฅใ‚นใ‚ฏใƒชใƒผใƒณ"},"storytelling":{"description":"ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐๆฉŸ่ƒฝใ‚’ไฝฟใˆใฐใ€ใƒ‡ใƒผใ‚ฟ้–“ใฎ็น‹ใŒใ‚Šใ‚„ๆ™‚็ณปๅˆ—ใ‚’ใ‚‚ใจใซใ€้ †็•ชใซ่ณ‡ๆ–™ใ‚’้–ฒ่ฆงใ—ใฆใ‚‚ใ‚‰ใ†ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚ไฝฟ็”จใ™ใ‚‹ใซใฏใ€ๅณใƒ‘ใƒใƒซใ‹ใ‚‰ๅœฐ็ƒไธŠใฎใƒฌใ‚คใƒคใƒผใซ้ †็•ชใ‚’ไป˜ไธŽใ—ใพใ™ใ€‚","propertySchema":{"default":{"fields":{"autoStart":{"title":"่‡ชๅ‹•ๅ†็”Ÿ"},"camera":{"title":"ใ‚ซใƒกใƒฉ"},"duration":{"title":"ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“"},"range":{"title":"็”ป่ง’"}},"title":"ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"},"stories":{"fields":{"layer":{"title":"ใƒฌใ‚คใƒคใƒผ"},"layerCamera":{"title":"ใ‚ซใƒกใƒฉ"},"layerDuration":{"title":"็งปๅ‹•ๆ™‚้–“"},"layerRange":{"title":"ใ‚ซใƒกใƒฉ็”ป่ง’"}},"title":"ใ‚นใƒˆใƒผใƒชใƒผ"}},"title":"ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐ"},"textblock":{"description":"Text block","propertySchema":{"default":{"fields":{"markdown":{"title":"ใƒžใƒผใ‚ฏใƒ€ใ‚ฆใƒณ"},"text":{"title":"ใ‚ณใƒณใƒ†ใƒณใƒ„"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"typography":{"title":"ใƒ•ใ‚ฉใƒณใƒˆ"}},"title":"ใƒ†ใ‚ญใ‚นใƒˆใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"ใƒ†ใ‚ญใ‚นใƒˆ"},"videoblock":{"description":"ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ","propertySchema":{"default":{"fields":{"fullSize":{"title":"ใƒ•ใƒซใ‚ตใ‚คใ‚บ"},"title":{"title":"ใ‚ฟใ‚คใƒˆใƒซ"},"url":{"title":"ๅ‹•็”ป URL"}},"title":"ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ"}},"title":"ๅ‹•็”ป"}},"title":"Re:Earthๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ"}` diff --git a/pkg/plugin/manifest/parser.go b/pkg/plugin/manifest/parser.go index 246d78385..3de1d74af 100644 --- a/pkg/plugin/manifest/parser.go +++ b/pkg/plugin/manifest/parser.go @@ -1,40 +1,25 @@ package manifest //go:generate go run github.com/idubinskiy/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/embed -o schema_json_gen.go -n SchemaJSON -i ../../../plugin_manifest_schema.json import ( - "encoding/json" "errors" - "fmt" "io" - "github.com/xeipuuv/gojsonschema" + "gopkg.in/yaml.v2" ) var ( ErrInvalidManifest error = errors.New("invalid manifest") ErrFailedToParseManifest error = errors.New("failed to parse plugin manifest") ErrSystemManifest = errors.New("cannot build system manifest") - schemaLoader = gojsonschema.NewStringLoader(SchemaJSON) ) func Parse(source io.Reader) (*Manifest, error) { - // TODO: When using gojsonschema.NewReaderLoader, gojsonschema.Validate returns io.EOF error. - doc, err := io.ReadAll(source) - if err != nil { - return nil, ErrFailedToParseManifest - } - - documentLoader := gojsonschema.NewBytesLoader(doc) - if err := validate(documentLoader); err != nil { - return nil, err - } - root := Root{} - // err = json.NewDecoder(reader2).Decode(&root) - if err = json.Unmarshal(doc, &root); err != nil { + if err := yaml.NewDecoder(source).Decode(&root); err != nil { return nil, ErrFailedToParseManifest + // return nil, fmt.Errorf("failed to parse plugin manifest: %w", err) } manifest, err := root.manifest() @@ -48,16 +33,11 @@ func Parse(source io.Reader) (*Manifest, error) { return manifest, nil } -func ParseSystemFromStaticJSON(source string) (*Manifest, error) { - src := []byte(source) - documentLoader := gojsonschema.NewBytesLoader(src) - if err := validate(documentLoader); err != nil { - return nil, err - } - +func ParseSystemFromBytes(source []byte) (*Manifest, error) { root := Root{} - if err := json.Unmarshal(src, &root); err != nil { + if err := yaml.Unmarshal(source, &root); err != nil { return nil, ErrFailedToParseManifest + // return nil, fmt.Errorf("failed to parse plugin manifest: %w", err) } manifest, err := root.manifest() @@ -68,31 +48,10 @@ func ParseSystemFromStaticJSON(source string) (*Manifest, error) { return manifest, nil } -func MustParseSystemFromStaticJSON(source string) *Manifest { - m, err := ParseSystemFromStaticJSON(source) +func MustParseSystemFromBytes(source []byte) *Manifest { + m, err := ParseSystemFromBytes(source) if err != nil { panic(err) } return m } - -func validate(ld gojsonschema.JSONLoader) error { - // documentLoader, reader2 := gojsonschema.NewReaderLoader(source) - result, err := gojsonschema.Validate(schemaLoader, ld) - if err != nil { - return ErrFailedToParseManifest - } - - if !result.Valid() { - var errstr string - for i, e := range result.Errors() { - if i > 0 { - errstr += ", " - } - errstr += e.String() - } - return fmt.Errorf("invalid manifest: %w", errors.New(errstr)) - } - - return nil -} diff --git a/pkg/plugin/manifest/parser_test.go b/pkg/plugin/manifest/parser_test.go index 155d9774b..8a9dda86c 100644 --- a/pkg/plugin/manifest/parser_test.go +++ b/pkg/plugin/manifest/parser_test.go @@ -1,6 +1,7 @@ package manifest import ( + "errors" "io" "strings" "testing" @@ -9,7 +10,6 @@ import ( "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" "github.com/stretchr/testify/assert" - "github.com/xeipuuv/gojsonschema" ) func TestParse(t *testing.T) { @@ -51,6 +51,7 @@ func TestParse(t *testing.T) { err: ErrSystemManifest, }, } + for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { @@ -67,7 +68,7 @@ func TestParse(t *testing.T) { } -func TestParseSystemFromStaticJSON(t *testing.T) { +func TestParseSystemFromBytes(t *testing.T) { testCases := []struct { name, input string expected *Manifest @@ -88,29 +89,29 @@ func TestParseSystemFromStaticJSON(t *testing.T) { err: nil, }, { - name: "fail not valid JSON", - input: "", + name: "fail not valid YAML", + input: "--", expected: nil, err: ErrFailedToParseManifest, }, } + for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - m, err := ParseSystemFromStaticJSON(tc.input) + m, err := ParseSystemFromBytes([]byte(tc.input)) if err == nil { assert.Equal(t, tc.expected.Plugin.ID(), m.Plugin.ID()) assert.Equal(t, m.Plugin.Name(), m.Plugin.Name()) } else { - assert.Equal(t, tc.err, err) + assert.True(t, errors.Is(tc.err, err)) } }) } - } -func TestMustParseSystemFromStaticJSON(t *testing.T) { +func TestMustParseSystemFromBytes(t *testing.T) { testCases := []struct { name, input string expected *Manifest @@ -132,71 +133,27 @@ func TestMustParseSystemFromStaticJSON(t *testing.T) { }, { name: "fail not valid JSON", - input: "", + input: "--", expected: nil, err: ErrFailedToParseManifest, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - var m *Manifest - defer func() { - if r := recover(); r == nil { - assert.Equal(t, tc.expected.Plugin.ID(), m.Plugin.ID()) - assert.Equal(t, m.Plugin.Name(), m.Plugin.Name()) - } - }() - m = MustParseSystemFromStaticJSON(tc.input) - - }) - } - -} - -func TestValidate(t *testing.T) { - testCases := []struct { - name, input string - err bool - }{ - { - name: "success create manifest", - input: `{ - "id": "aaa", - "title": "bbb", - "version": "1.1.1" - }`, - - err: false, - }, - { - name: "fail not valid JSON", - input: "", - err: true, - }, - { - name: "fail invalid name type", - input: `{ - "id": "aaa", - "title": 123, - "version": "1.1.1" - }`, - err: true, - }, - } for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - err := validate(gojsonschema.NewBytesLoader([]byte(tc.input))) - if tc.err { - assert.Error(tt, err) - } else { - assert.NoError(tt, err) + + if tc.err != nil { + assert.PanicsWithError(tt, tc.err.Error(), func() { + _ = MustParseSystemFromBytes([]byte(tc.input)) + }) + return } + + m := MustParseSystemFromBytes([]byte(tc.input)) + assert.Equal(tt, tc.expected.Plugin.ID(), m.Plugin.ID()) + assert.Equal(tt, m.Plugin.Name(), m.Plugin.Name()) }) } - } diff --git a/pkg/plugin/manifest/parser_translation.go b/pkg/plugin/manifest/parser_translation.go index f4422a398..7c9780f4c 100644 --- a/pkg/plugin/manifest/parser_translation.go +++ b/pkg/plugin/manifest/parser_translation.go @@ -2,90 +2,48 @@ package manifest // Generating types with schema typer for translation schema is disabled because some fields are wrongly typed. // DISABLED go:generate go run github.com/idubinskiy/schematyper -o schema_translation_gen.go --package manifest --prefix Translation ../../../plugin_manifest_schema_translation.json -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/embed -o schema_json_translation_gen.go -n SchemaTranslationJSON -i ../../../plugin_manifest_schema_translation.json import ( - "encoding/json" "errors" - "fmt" "io" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" - "github.com/xeipuuv/gojsonschema" + "gopkg.in/yaml.v2" ) var ( - ErrInvalidManifestTranslation error = errors.New("invalid manifest translation") - ErrFailedToParseManifestTranslation error = errors.New("failed to parse plugin manifest translation") - schemaTranslationLoader gojsonschema.JSONLoader = gojsonschema.NewStringLoader(SchemaTranslationJSON) + ErrInvalidManifestTranslation error = errors.New("invalid manifest translation") + ErrFailedToParseManifestTranslation error = errors.New("failed to parse plugin manifest translation") ) func ParseTranslation(source io.Reader) (*TranslationRoot, error) { - // TODO: When using gojsonschema.NewReaderLoader, gojsonschema.Validate returns io.EOF error. - doc, err := io.ReadAll(source) - if err != nil { - return nil, ErrFailedToParseManifestTranslation - } - - documentLoader := gojsonschema.NewBytesLoader(doc) - if err := validateTranslation(documentLoader); err != nil { - return nil, err - } - root := TranslationRoot{} - // err = json.NewDecoder(reader2).Decode(&root) - if err = json.Unmarshal(doc, &root); err != nil { - return nil, ErrInvalidManifestTranslation + if err := yaml.NewDecoder(source).Decode(&root); err != nil { + return nil, ErrFailedToParseManifestTranslation + // return nil, fmt.Errorf("failed to parse plugin manifest translation: %w", err) } return &root, nil } -func ParseTranslationFromStaticJSON(source string) (*TranslationRoot, error) { - src := []byte(source) - - documentLoader := gojsonschema.NewBytesLoader(src) - if err := validateTranslation(documentLoader); err != nil { - return nil, err - } - +func ParseTranslationFromBytes(source []byte) (*TranslationRoot, error) { tr := TranslationRoot{} - if err := json.Unmarshal(src, &tr); err != nil { - return nil, ErrFailedToParseManifest + if err := yaml.Unmarshal(source, &tr); err != nil { + return nil, ErrFailedToParseManifestTranslation + // return nil, fmt.Errorf("failed to parse plugin manifest translation: %w", err) } return &tr, nil } -func MustParseTransSystemFromStaticJSON(source string) *TranslationRoot { - m, err := ParseTranslationFromStaticJSON(source) +func MustParseTranslationFromBytes(source []byte) *TranslationRoot { + m, err := ParseTranslationFromBytes(source) if err != nil { panic(err) } return m } -func validateTranslation(ld gojsonschema.JSONLoader) error { - // documentLoader, reader2 := gojsonschema.NewReaderLoader(source) - result, err := gojsonschema.Validate(schemaTranslationLoader, ld) - if err != nil { - return ErrFailedToParseManifest - } - - if !result.Valid() { - var errstr string - for i, e := range result.Errors() { - if i > 0 { - errstr += ", " - } - errstr += e.String() - } - return fmt.Errorf("invalid manifest translation: %w", errors.New(errstr)) - } - - return nil -} - func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Manifest { for lang, t := range tl { if t == nil { diff --git a/pkg/plugin/manifest/parser_translation_test.go b/pkg/plugin/manifest/parser_translation_test.go index 7747fe81f..9366083ee 100644 --- a/pkg/plugin/manifest/parser_translation_test.go +++ b/pkg/plugin/manifest/parser_translation_test.go @@ -6,8 +6,6 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/xeipuuv/gojsonschema" - "github.com/stretchr/testify/assert" ) @@ -86,6 +84,7 @@ func TestParseTranslation(t *testing.T) { err: ErrFailedToParseManifest, }, } + for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { @@ -125,7 +124,7 @@ func TestParseTranslation(t *testing.T) { } } -func TestParseTranslationFromStaticJSON(t *testing.T) { +func TestParseTranslationFromBytes(t *testing.T) { desc := "test plugin desc" name := "test plugin name" ext_name := "test ext name" @@ -169,17 +168,18 @@ func TestParseTranslationFromStaticJSON(t *testing.T) { err: nil, }, { - name: "fail not valid JSON", - input: "", + name: "fail not valid YAML", + input: "--", expected: nil, - err: ErrFailedToParseManifest, + err: ErrFailedToParseManifestTranslation, }, } + for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - res, err := ParseTranslationFromStaticJSON(tc.input) + res, err := ParseTranslationFromBytes([]byte(tc.input)) if err == nil { assert.Equal(tt, *tc.expected.Title, *res.Title) assert.Equal(tt, *res.Description, *tc.expected.Description) @@ -207,13 +207,13 @@ func TestParseTranslationFromStaticJSON(t *testing.T) { } } } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.True(tt, errors.Is(tc.err, err)) } }) } } -func TestMustParseTransSystemFromStaticJSON(t *testing.T) { +func TestMustParseTransSystemFromBytes(t *testing.T) { desc := "test plugin desc" name := "test plugin name" ext_name := "test ext name" @@ -257,49 +257,51 @@ func TestMustParseTransSystemFromStaticJSON(t *testing.T) { err: nil, }, { - name: "fail not valid JSON", - input: "", + name: "fail not valid YAML", + input: "--", expected: nil, - err: ErrFailedToParseManifest, + err: ErrFailedToParseManifestTranslation, }, } + for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - tc := tc - var res *TranslationRoot - defer func() { - if r := recover(); r == nil { - assert.Equal(tt, *tc.expected.Title, *res.Title) - assert.Equal(tt, *res.Description, *tc.expected.Description) - assert.Equal(tt, res.Schema, tc.expected.Schema) - if len(res.Extensions) > 0 { - for k, v := range res.Extensions { - assert.Equal(tt, *v.Title, *tc.expected.Extensions[k].Title) - if len(v.PropertySchema) > 0 { - for kk, vv := range v.PropertySchema { - assert.Equal(tt, *vv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Description) - assert.Equal(tt, *vv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Title) - if len(vv.Fields) > 0 { - for kkk, vvv := range vv.Fields { - assert.Equal(tt, *vvv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Description) - assert.Equal(tt, *vvv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Title) - if len(vvv.Choices) > 0 { - for kkkk, vvvv := range vvv.Choices { - assert.Equal(tt, vvvv, tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Choices[kkkk]) - } - } + + if tc.err != nil { + assert.PanicsWithError(tt, tc.err.Error(), func() { + _ = MustParseTranslationFromBytes([]byte(tc.input)) + }) + return + } + + res := MustParseTranslationFromBytes([]byte(tc.input)) + assert.Equal(tt, *tc.expected.Title, *res.Title) + assert.Equal(tt, *res.Description, *tc.expected.Description) + assert.Equal(tt, res.Schema, tc.expected.Schema) + if len(res.Extensions) > 0 { + for k, v := range res.Extensions { + assert.Equal(tt, *v.Title, *tc.expected.Extensions[k].Title) + if len(v.PropertySchema) > 0 { + for kk, vv := range v.PropertySchema { + assert.Equal(tt, *vv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Description) + assert.Equal(tt, *vv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Title) + if len(vv.Fields) > 0 { + for kkk, vvv := range vv.Fields { + assert.Equal(tt, *vvv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Description) + assert.Equal(tt, *vvv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Title) + if len(vvv.Choices) > 0 { + for kkkk, vvvv := range vvv.Choices { + assert.Equal(tt, vvvv, tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Choices[kkkk]) } } } } } } - } - }() - res = MustParseTransSystemFromStaticJSON(tc.input) + } }) } } @@ -325,7 +327,7 @@ func TestMergeManifestTranslation(t *testing.T) { { "id": "test_field", "title": "nnn", - "type": "string", + "type": "string", "description": "kkk" } ] @@ -352,8 +354,8 @@ func TestMergeManifestTranslation(t *testing.T) { }, { name: "nil translition list", - tl: map[string]*TranslationRoot{"xx": MustParseTransSystemFromStaticJSON(translatedManifest)}, - m: MustParseSystemFromStaticJSON(manifest), + tl: map[string]*TranslationRoot{"xx": MustParseTranslationFromBytes([]byte(translatedManifest))}, + m: MustParseSystemFromBytes([]byte(manifest)), Expected: &struct{ PluginName, PluginDesc, ExtName, PsTitle, FieldName, FieldDesc i18n.String }{ PluginName: i18n.String{"en": "aaa", "xx": "test plugin name"}, PluginDesc: i18n.String{"en": "ddd", "xx": "test plugin desc"}, @@ -364,6 +366,7 @@ func TestMergeManifestTranslation(t *testing.T) { }, }, } + for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { @@ -382,35 +385,3 @@ func TestMergeManifestTranslation(t *testing.T) { }) } } - -func TestValidatTranslation(t *testing.T) { - testCases := []struct { - name, input string - err bool - }{ - { - name: "success create translation", - input: translatedManifest, - - err: false, - }, - { - name: "fail not valid JSON", - input: "", - err: true, - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - err := validateTranslation(gojsonschema.NewBytesLoader([]byte(tc.input))) - if tc.err { - assert.Error(tt, err) - } else { - assert.NoError(tt, err) - } - }) - } - -} diff --git a/pkg/plugin/manifest/schema_json_gen.go b/pkg/plugin/manifest/schema_json_gen.go deleted file mode 100644 index 3a80fc46d..000000000 --- a/pkg/plugin/manifest/schema_json_gen.go +++ /dev/null @@ -1,365 +0,0 @@ -// Code generated by github.com/reearth/reearth-backend/tools/cmd/embed, DO NOT EDIT. - -package manifest - -const SchemaJSON string = `{ - "$id": "https://app.reearth.io/schemas/plugin-manifest", - "$schema": "http://json-schema.org/draft-04/schema", - "description": "Re:Earth plugin manifest schema", - "definitions": { - "id": { - "$id": "#id", - "type": "string", - "pattern": "^[A-Za-z]{1}[\\w-:.]{0,}$" - }, - "id?": { - "$id": "#id?", - "type": [ - "string", - "null" - ], - "pattern": "^[A-Za-z]{1}[\\w-:.]{0,}$" - }, - "valuetype": { - "$id": "#valuetype", - "type": "string", - "enum": [ - "bool", - "number", - "string", - "url", - "latlng", - "latlngheight", - "camera", - "typography", - "coordinates", - "polygon", - "rect", - "ref" - ] - }, - "propertyPointer": { - "$id": "#propertyPointer", - "type": [ - "object", - "null" - ], - "properties": { - "schemaGroupId": { - "type": "string" - }, - "fieldId": { - "type": "string" - } - }, - "required": [ - "schemaGroupId", - "fieldId" - ], - "additionalProperties": false - }, - "propertyLinkableFields": { - "$id": "#propertyLinkableFields", - "type": [ - "object", - "null" - ], - "properties": { - "latlng": { - "$ref": "#/definitions/propertyPointer" - }, - "url": { - "$ref": "#/definitions/propertyPointer" - } - }, - "additionalProperties": false - }, - "propertyCondition": { - "$id": "#propertyCondition", - "type": [ - "object", - "null" - ], - "properties": { - "field": { - "type": "string" - }, - "type": { - "$ref": "#/definitions/valuetype" - }, - "value": {} - }, - "required": [ - "field", - "type", - "value" - ], - "additionalProperties": false - }, - "propertySchemaField": { - "$id": "#propertySchemaField", - "type": "object", - "properties": { - "id": { - "$ref": "#/definitions/id" - }, - "title": { - "type": [ - "string", - "null" - ] - }, - "description": { - "type": [ - "string", - "null" - ] - }, - "type": { - "$ref": "#/definitions/valuetype" - }, - "prefix": { - "type": [ - "string", - "null" - ] - }, - "suffix": { - "type": [ - "string", - "null" - ] - }, - "defaultValue": {}, - "ui": { - "type": [ - "string", - "null" - ], - "enum": [ - "layer", - "color", - "multiline", - "selection", - "buttons", - "range", - "image", - "video", - "file", - "camera_pose" - ] - }, - "min": { - "type": [ - "number", - "null" - ] - }, - "max": { - "type": [ - "number", - "null" - ] - }, - "choices": { - "type": "array", - "items": { - "type": "object", - "properties": { - "key": { - "type": "string" - }, - "label": { - "type": "string" - }, - "icon": { - "type": "string" - } - }, - "required": [ - "key" - ], - "additionalProperties": false - } - }, - "availableIf": { - "$ref": "#/definitions/propertyCondition" - } - }, - "required": [ - "id", - "type", - "title" - ], - "additionalProperties": false - }, - "propertySchemaGroup": { - "$id": "#propertySchemaGroup", - "type": "object", - "properties": { - "id": { - "$ref": "#/definitions/id" - }, - "title": { - "type": "string" - }, - "description": { - "type": [ - "string", - "null" - ] - }, - "list": { - "type": "boolean" - }, - "availableIf": { - "$ref": "#/definitions/propertyCondition" - }, - "representativeField": { - "$ref": "#/definitions/id?" - }, - "fields": { - "type": "array", - "items": { - "$ref": "#/definitions/propertySchemaField" - } - } - }, - "required": [ - "id", - "title" - ], - "additionalProperties": false - }, - "propertySchema": { - "$id": "#propertySchema", - "type": [ - "object", - "null" - ], - "properties": { - "version": { - "type": "number" - }, - "linkable": { - "$ref": "#/definitions/propertyLinkableFields" - }, - "groups": { - "type": "array", - "items": { - "$ref": "#/definitions/propertySchemaGroup" - } - } - }, - "additionalProperties": false - }, - "extension": { - "$id": "#extension", - "type": "object", - "properties": { - "id": { - "$ref": "#/definitions/id" - }, - "title": { - "type": "string" - }, - "description": { - "type": [ - "string", - "null" - ] - }, - "icon": { - "type": [ - "string", - "null" - ] - }, - "visualizer": { - "type": "string", - "enum": [ - "cesium" - ] - }, - "type": { - "type": "string", - "enum": [ - "primitive", - "widget", - "block", - "visualizer", - "infobox" - ] - }, - "schema": { - "$ref": "#/definitions/propertySchema" - } - }, - "required": [ - "id", - "title", - "visualizer", - "type" - ], - "additionalProperties": false - }, - "root": { - "$id": "#root", - "type": "object", - "properties": { - "id": { - "$ref": "#/definitions/id" - }, - "title": { - "type": "string" - }, - "system": { - "type": "boolean" - }, - "version": { - "type": "string" - }, - "description": { - "type": [ - "string", - "null" - ] - }, - "repository": { - "type": [ - "string", - "null" - ] - }, - "author": { - "type": [ - "string", - "null" - ] - }, - "main": { - "type": [ - "string", - "null" - ] - }, - "extensions": { - "type": "array", - "items": { - "$ref": "#/definitions/extension" - } - }, - "schema": { - "$ref": "#/definitions/propertySchema" - } - }, - "required": [ - "id", - "title" - ], - "additionalProperties": false - } - }, - "$ref": "#/definitions/root" -} -` diff --git a/pkg/plugin/manifest/schema_json_translation_gen.go b/pkg/plugin/manifest/schema_json_translation_gen.go deleted file mode 100644 index 2942c4834..000000000 --- a/pkg/plugin/manifest/schema_json_translation_gen.go +++ /dev/null @@ -1,131 +0,0 @@ -// Code generated by github.com/reearth/reearth-backend/tools/cmd/embed, DO NOT EDIT. - -package manifest - -const SchemaTranslationJSON string = `{ - "$id": "https://app.reearth.io/schemas/plugin-manifest-translation", - "$schema": "http://json-schema.org/draft-04/schema", - "description": "Re:Earth plugin manifest schema translation", - "definitions": { - "propertySchemaField": { - "$id": "#propertySchemaField", - "type": "object", - "additionalProperties": false, - "properties": { - "title": { - "type": [ - "string", - "null" - ] - }, - "description": { - "type": [ - "string", - "null" - ] - }, - "choices": { - "type": "object", - "additionalProperties": false, - "patternProperties": { - "^[A-Za-z]{1}[\\w-:.]{0,}$": { - "type": "string" - } - } - } - } - }, - "propertySchemaGroup": { - "$id": "#propertySchemaGroup", - "type": "object", - "additionalProperties": false, - "properties": { - "title": { - "type": [ - "string", - "null" - ] - }, - "description": { - "type": [ - "string", - "null" - ] - }, - "fields": { - "type": "object", - "additionalProperties": false, - "patternProperties": { - "^[A-Za-z]{1}[\\w-:.]{0,}$": { - "$ref": "#/definitions/propertySchemaField" - } - } - } - } - }, - "propertySchema": { - "$id": "#propertySchema", - "type": "object", - "additionalProperties": false, - "patternProperties": { - "^[A-Za-z]{1}[\\w-:.]{0,}$": { - "$ref": "#/definitions/propertySchemaGroup" - } - } - }, - "extension": { - "$id": "#extension", - "type": "object", - "additionalProperties": false, - "properties": { - "title": { - "type": [ - "string", - "null" - ] - }, - "description": { - "type": [ - "string", - "null" - ] - }, - "propertySchema": { - "$ref": "#/definitions/propertySchema" - } - } - }, - "root": { - "$id": "#root", - "type": "object", - "additionalProperties": false, - "properties": { - "title": { - "type": [ - "string", - "null" - ] - }, - "description": { - "type": [ - "string", - "null" - ] - }, - "extensions": { - "type": "object", - "patternProperties": { - "^[A-Za-z]{1}[\\w-:.]{0,}$": { - "$ref": "#/definitions/extension" - } - } - }, - "schema": { - "$ref": "#/definitions/propertySchema" - } - } - } - }, - "$ref": "#/definitions/root" -} -` diff --git a/tools/cmd/embed/main.go b/tools/cmd/embed/main.go deleted file mode 100644 index a8cfd97a4..000000000 --- a/tools/cmd/embed/main.go +++ /dev/null @@ -1,178 +0,0 @@ -package main - -import ( - "bytes" - "encoding/json" - "flag" - "fmt" - "log" - "os" - "path/filepath" - "strings" - "text/template" - "unsafe" - - "github.com/pkg/errors" - "golang.org/x/tools/go/packages" - "golang.org/x/tools/imports" - "gopkg.in/yaml.v2" -) - -func main() { - log.SetPrefix("idgen: ") - log.SetFlags(0) - - wd, err := os.Getwd() - if err != nil { - log.Fatal(err) - } - - if err := run(wd); err != nil { - log.Fatal(err) - } -} - -func run(wd string) error { - input := flag.String("i", "", "input") - output := flag.String("o", "", "output") - name := flag.String("n", "", "const name") - pkgname := flag.String("p", "", "package name") - yamltojson := flag.Bool("yaml2json", false, "convert YAML to JSON") - all := flag.Bool("all", false, "read all files") - flag.Parse() - - if *input == "" { - return errors.New("input option is required") - } - - if *output == "" && !*all { - return errors.New("output option is required") - } - - if *name == "" { - return errors.New("name option is required") - } - - pkgs, err := packages.Load(&packages.Config{Dir: wd}, ".") - if err != nil { - return errors.Wrap(err, "failed to load package") - } - - if *pkgname == "" { - pkgname = &pkgs[0].Name - } - - if *all { - filename, ext := getFileNameExt(*input) - filenames, err := os.ReadDir(".") - if err != nil { - return err - } - for _, f := range filenames { - if strings.HasPrefix(f.Name(), filename) && strings.HasSuffix(f.Name(), ext) { - if err != nil { - return errors.Wrap(err, fmt.Sprintf("failed to read file %s", f.Name())) - } - fstr, _ := getFileNameExt(f.Name()) - err = handleOneFile(f.Name(), fstr+"_gen.go", *pkgname, *name+fstr[len(filename):], yamltojson) - if err != nil { - return errors.Wrap(err, fmt.Sprintf("failed to handle file %s", f.Name())) - } - } - } - } else { - return handleOneFile(*input, *output, *pkgname, *name, yamltojson) - } - return nil -} - -type templateData struct { - PackageName string - Name string - Content string -} - -var templ = template.Must(template.New("generated").Parse(` -// Code generated by github.com/reearth/reearth-backend/tools/cmd/embed, DO NOT EDIT. - -package {{.PackageName}} - -const {{.Name}} string = ` + "`{{.Content}}`" + ``)) - -// https://stackoverflow.com/questions/40737122/convert-yaml-to-json-without-struct -func convert(i interface{}) interface{} { - switch x := i.(type) { - case map[interface{}]interface{}: - m2 := map[string]interface{}{} - for k, v := range x { - m2[k.(string)] = convert(v) - } - return m2 - case []interface{}: - for i, v := range x { - x[i] = convert(v) - } - } - return i -} - -func yaml2json(content []byte) ([]byte, error) { - var y interface{} - if err := yaml.Unmarshal([]byte(content), &y); err != nil { - return nil, errors.Wrap(err, "failed to parse YAML") - } - y = convert(y) - b, err := json.Marshal(&y) - if err != nil { - return nil, errors.Wrap(err, "failed to marhsal JSON") - } - return b, nil -} - -func processAndWriteOneFile(data templateData, output string) error { - buf := &bytes.Buffer{} - - if err := templ.Execute(buf, data); err != nil { - return errors.Wrap(err, "unable to generate code") - } - - src, err := imports.Process("", buf.Bytes(), nil) - if err != nil { - return errors.Wrap(err, "unable to gofmt") - } - - err = os.WriteFile(output, src, 0644) - if err != nil { - return errors.Wrap(err, "unable to write file") - } - return nil -} - -func handleOneFile(input, output, pkgname, name string, yamltojson *bool) error { - content, err := os.ReadFile(input) - if err != nil { - return errors.Wrap(err, "failed to read file") - } - - if yamltojson != nil && *yamltojson { - content, err = yaml2json(content) - if err != nil { - return errors.Wrap(err, "failed to read file") - } - } - - contentstr := *(*string)(unsafe.Pointer(&content)) - - data := templateData{ - PackageName: pkgname, - Name: name, - Content: strings.ReplaceAll(contentstr, "`", "` + \"`\" + `"), - } - return processAndWriteOneFile(data, output) -} - -func getFileNameExt(input string) (string, string) { - ext := filepath.Ext(input) - fname := input[0 : len(input)-len(ext)] - return fname, ext -} From 78ac136e2da6822362cb825f5551eb1ede9d169d Mon Sep 17 00:00:00 2001 From: rot1024 Date: Sat, 31 Jul 2021 04:37:51 +0900 Subject: [PATCH 058/253] fix: plugin manifest parser bugs (#32) --- go.mod | 8 +- go.sum | 27 +- pkg/i18n/string.go | 6 + pkg/i18n/string_test.go | 8 + pkg/plugin/builder.go | 6 + pkg/plugin/builder_test.go | 4 +- pkg/plugin/manifest/convert.go | 7 +- pkg/plugin/manifest/convert_test.go | 2 +- pkg/plugin/manifest/parser.go | 2 +- pkg/plugin/manifest/parser_test.go | 157 +++++--- pkg/plugin/manifest/parser_translation.go | 2 +- .../manifest/parser_translation_test.go | 347 +++++------------- pkg/plugin/manifest/testdata/minimum.yml | 2 + pkg/plugin/manifest/testdata/test.yml | 21 ++ pkg/plugin/manifest/testdata/translation.yml | 28 ++ .../manifest/testdata/translation_merge.yml | 34 ++ pkg/property/schema_group_builder.go | 7 +- pkg/property/schema_group_test.go | 2 +- pkg/property/value_type.go | 20 + 19 files changed, 340 insertions(+), 350 deletions(-) create mode 100644 pkg/plugin/manifest/testdata/minimum.yml create mode 100644 pkg/plugin/manifest/testdata/test.yml create mode 100644 pkg/plugin/manifest/testdata/translation.yml create mode 100644 pkg/plugin/manifest/testdata/translation_merge.yml diff --git a/go.mod b/go.mod index 08d88a502..475bfe636 100644 --- a/go.mod +++ b/go.mod @@ -12,7 +12,9 @@ require ( github.com/blang/semver v3.5.1+incompatible github.com/dgrijalva/jwt-go v3.2.0+incompatible github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b // indirect + github.com/fatih/color v1.12.0 // indirect github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 // indirect + github.com/goccy/go-yaml v1.9.2 github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/iancoleman/strcase v0.1.3 github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d @@ -23,6 +25,7 @@ require ( github.com/klauspost/compress v1.10.10 // indirect github.com/labstack/echo/v4 v4.2.1 github.com/labstack/gommon v0.3.0 + github.com/mattn/go-isatty v0.0.13 // indirect github.com/mitchellh/mapstructure v1.4.1 github.com/oklog/ulid v1.3.1 github.com/opentracing/opentracing-go v1.2.0 // indirect @@ -44,13 +47,14 @@ require ( go.opentelemetry.io/otel v0.7.0 go.uber.org/atomic v1.7.0 // indirect golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c // indirect + golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c // indirect golang.org/x/text v0.3.5 golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/tools v0.1.0 gopkg.in/go-playground/colors.v1 v1.2.0 gopkg.in/h2non/gock.v1 v1.1.0 - gopkg.in/yaml.v2 v2.4.0 - gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) go 1.16 diff --git a/go.sum b/go.sum index c45f44bb6..5d523b567 100644 --- a/go.sum +++ b/go.sum @@ -109,6 +109,9 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= +github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= +github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 h1:Uc+IZ7gYqAf/rSGFplbWBSHaGolEQlNLgMgSE3ccnIQ= github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813/go.mod h1:P+oSoE9yhSRvsmYyZsshflcR6ePWYLql6UU1amW13IM= github.com/go-chi/chi v3.3.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= @@ -120,6 +123,13 @@ github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2 github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= +github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= @@ -147,6 +157,8 @@ github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWe github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= +github.com/goccy/go-yaml v1.9.2 h1:2Njwzw+0+pjU2gb805ZC1B/uBuAs2VcZ3K+ZgHwDs7w= +github.com/goccy/go-yaml v1.9.2/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA= github.com/gogo/protobuf v1.0.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= @@ -287,6 +299,8 @@ github.com/labstack/echo/v4 v4.2.1 h1:LF5Iq7t/jrtUuSutNuiEWtB5eiHfZ5gSe2pcu5exjQ github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg= github.com/labstack/gommon v0.3.0 h1:JEeO0bvc78PKdyHxloTKiF8BD5iGrH8T6MSeGvSgob0= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= +github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= @@ -294,12 +308,14 @@ github.com/matryer/moq v0.0.0-20200106131100-75d0ddfc0007/go.mod h1:9ELz6aaclSIG github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.7 h1:bQGKb3vps/j0E9GfJQ03JyhRuxsvdAanXlT9BTw3mdw= github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= -github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.13 h1:qdl+GuBjcsKKDco5BsxPJlId98mSWNKqYA+Co0SC1yA= +github.com/mattn/go-isatty v0.0.13/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= @@ -611,8 +627,9 @@ golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210314195730-07df6a141424/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4 h1:EZ2mChiOa8udjfp6rRmswTbtZN/QzUQp4ptM4rnjHvc= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -817,8 +834,8 @@ gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ= -gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/pkg/i18n/string.go b/pkg/i18n/string.go index a6d0dfb94..f4bc4dde9 100644 --- a/pkg/i18n/string.go +++ b/pkg/i18n/string.go @@ -3,6 +3,9 @@ package i18n type String map[string]string // key should use BCP 47 representation func StringFrom(s string) String { + if s == "" { + return nil + } return String{"en": s} } @@ -19,6 +22,9 @@ func (s String) Translated(lang ...string) string { } func (s String) Copy() String { + if s == nil { + return nil + } s2 := make(String, len(s)) for k, v := range s { s2[k] = v diff --git a/pkg/i18n/string_test.go b/pkg/i18n/string_test.go index 263478334..ca154298d 100644 --- a/pkg/i18n/string_test.go +++ b/pkg/i18n/string_test.go @@ -67,6 +67,7 @@ func TestStringTranslated(t *testing.T) { func TestStringFrom(t *testing.T) { assert.Equal(t, String{"en": "foo"}, StringFrom("foo")) + assert.Nil(t, String(nil), StringFrom("")) } func TestStringCopy(t *testing.T) { @@ -82,12 +83,19 @@ func TestStringCopy(t *testing.T) { Name: "empty String", SourceString: String{}, }, + { + Name: "nil", + SourceString: nil, + }, } for _, tc := range testCases { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() assert.True(tt, reflect.DeepEqual(tc.SourceString, tc.SourceString.Copy())) + if tc.SourceString == nil { + assert.Nil(tt, tc.SourceString.Copy()) + } }) } } diff --git a/pkg/plugin/builder.go b/pkg/plugin/builder.go index bc6117113..44e45910d 100644 --- a/pkg/plugin/builder.go +++ b/pkg/plugin/builder.go @@ -52,6 +52,12 @@ func (b *Builder) RepositoryURL(repositoryURL string) *Builder { } func (b *Builder) Extensions(extensions []*Extension) *Builder { + if len(extensions) == 0 { + b.p.extensions = nil + b.p.extensionOrder = nil + return b + } + b.p.extensions = make(map[id.PluginExtensionID]*Extension, len(extensions)) b.p.extensionOrder = make([]id.PluginExtensionID, 0, len(extensions)) for _, e := range extensions { diff --git a/pkg/plugin/builder_test.go b/pkg/plugin/builder_test.go index 36b13cf25..81b17ee76 100644 --- a/pkg/plugin/builder_test.go +++ b/pkg/plugin/builder_test.go @@ -125,12 +125,11 @@ func TestBuilder_Build(t *testing.T) { Schema(tc.schema). Author(tc.author). Build() - if err == nil { + if tc.err == nil { assert.Equal(tt, tc.expected, p) } else { assert.True(tt, errors.As(tc.err, &err)) } - }) } } @@ -186,7 +185,6 @@ func TestBuilder_MustBuild(t *testing.T) { Author(tc.author). MustBuild() assert.Equal(tt, tc.expected, p) - }) } } diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go index a30cbf50e..14c89e9a5 100644 --- a/pkg/plugin/manifest/convert.go +++ b/pkg/plugin/manifest/convert.go @@ -22,10 +22,6 @@ func (i *Root) manifest() (*Manifest, error) { } } - if i.Title == "" { - i.Title = string(i.ID) - } - var pluginSchema *property.Schema if i.Schema != nil { schema, err := i.Schema.schema(pid, "@") @@ -193,8 +189,7 @@ func (i PropertySchemaGroup) schemaGroup(sid id.PropertySchemaID) (*property.Sch title := i.Title var representativeField *id.PropertySchemaFieldID if i.RepresentativeField != nil { - representativeField2 := id.PropertySchemaFieldID(*i.RepresentativeField) - representativeField = &representativeField2 + representativeField = id.PropertySchemaFieldID(*i.RepresentativeField).Ref() } // fields diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index 33830295e..2e45745e5 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -88,7 +88,7 @@ func TestManifest(t *testing.T) { { name: "success empty name", root: &Root{ - Title: "", + Title: "reearth", ID: "reearth", System: true, }, diff --git a/pkg/plugin/manifest/parser.go b/pkg/plugin/manifest/parser.go index 3de1d74af..eb3b1802d 100644 --- a/pkg/plugin/manifest/parser.go +++ b/pkg/plugin/manifest/parser.go @@ -6,7 +6,7 @@ import ( "errors" "io" - "gopkg.in/yaml.v2" + "github.com/goccy/go-yaml" ) var ( diff --git a/pkg/plugin/manifest/parser_test.go b/pkg/plugin/manifest/parser_test.go index 8a9dda86c..798dd63c4 100644 --- a/pkg/plugin/manifest/parser_test.go +++ b/pkg/plugin/manifest/parser_test.go @@ -1,52 +1,92 @@ package manifest import ( - "errors" - "io" + _ "embed" "strings" "testing" + "github.com/reearth/reearth-backend/pkg/i18n" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/visualizer" "github.com/stretchr/testify/assert" ) +//go:embed testdata/minimum.yml +var minimum string +var minimumExpected = &Manifest{ + Plugin: plugin.New().ID(id.MustPluginID("aaa#1.1.1")).MustBuild(), + ExtensionSchema: []*property.Schema{}, + Schema: nil, +} + +//go:embed testdata/test.yml +var normal string +var normalExpected = &Manifest{ + Plugin: plugin.New().ID(id.MustPluginID("aaa#1.1.1")).Name(i18n.StringFrom("bbb")).Extensions([]*plugin.Extension{ + plugin.NewExtension().ID(id.PluginExtensionID("hoge")). + Visualizer(visualizer.VisualizerCesium). + Type(plugin.ExtensionTypePrimitive). + Schema(id.MustPropertySchemaID("aaa#1.1.1/hoge")). + MustBuild(), + }).MustBuild(), + ExtensionSchema: []*property.Schema{ + property.NewSchema().ID(id.MustPropertySchemaID("aaa#1.1.1/hoge")).Groups([]*property.SchemaGroup{ + property.NewSchemaGroup().ID(id.PropertySchemaFieldID("default")). + Schema(id.MustPropertySchemaID("aaa#1.1.1/hoge")). + RepresentativeField(id.PropertySchemaFieldID("a").Ref()). + Fields([]*property.SchemaField{ + property.NewSchemaField().ID(id.PropertySchemaFieldID("a")). + Type(property.ValueTypeBool). + DefaultValue(property.ValueTypeBool.MustBeValue(true)). + IsAvailableIf(&property.Condition{ + Field: id.PropertySchemaFieldID("b"), + Value: property.ValueTypeNumber.MustBeValue(1), + }). + MustBuild(), + property.NewSchemaField().ID(id.PropertySchemaFieldID("b")). + Type(property.ValueTypeNumber). + MustBuild(), + }).MustBuild(), + }).MustBuild(), + }, + Schema: nil, +} + func TestParse(t *testing.T) { testCases := []struct { name string - input io.Reader + input string expected *Manifest err error }{ { - name: "success create manifest", - input: strings.NewReader(`{ - "id": "aaa", - "title": "bbb", - "version": "1.1.1" - }`), - expected: &Manifest{ - Plugin: plugin.New().ID(id.MustPluginID("aaa#1.1.1")).MustBuild(), - ExtensionSchema: []*property.Schema{}, - Schema: nil, - }, - err: nil, + name: "success create simple manifest", + input: minimum, + expected: minimumExpected, + err: nil, + }, + { + name: "success create manifest", + input: normal, + expected: normalExpected, + err: nil, }, { name: "fail not valid JSON", - input: strings.NewReader(""), + input: "", expected: nil, err: ErrFailedToParseManifest, }, { name: "fail system manifest", - input: strings.NewReader(`{ - "system":true, - "id": "reearth", - "title": "bbb", - "version": "1.1.1" - }`), + input: `{ + "system": true, + "id": "reearth", + "title": "bbb", + "version": "1.1.1" + }`, expected: nil, err: ErrSystemManifest, }, @@ -56,13 +96,15 @@ func TestParse(t *testing.T) { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - m, err := Parse(tc.input) - if err == nil { - assert.Equal(t, tc.expected.Plugin.ID(), m.Plugin.ID()) - assert.Equal(t, m.Plugin.Name(), m.Plugin.Name()) - } else { - assert.Equal(t, tc.err, err) + m, err := Parse(strings.NewReader(tc.input)) + if tc.err == nil { + if !assert.NoError(tt, err) { + return + } + assert.Equal(tt, tc.expected, m) + return } + assert.ErrorIs(tt, tc.err, err) }) } @@ -75,18 +117,16 @@ func TestParseSystemFromBytes(t *testing.T) { err error }{ { - name: "success create manifest", - input: `{ - "id": "aaa", - "title": "bbb", - "version": "1.1.1" - }`, - expected: &Manifest{ - Plugin: plugin.New().ID(id.MustPluginID("aaa#1.1.1")).MustBuild(), - ExtensionSchema: []*property.Schema{}, - Schema: nil, - }, - err: nil, + name: "success create simple manifest", + input: minimum, + expected: minimumExpected, + err: nil, + }, + { + name: "success create manifest", + input: normal, + expected: normalExpected, + err: nil, }, { name: "fail not valid YAML", @@ -101,12 +141,14 @@ func TestParseSystemFromBytes(t *testing.T) { t.Run(tc.name, func(tt *testing.T) { tt.Parallel() m, err := ParseSystemFromBytes([]byte(tc.input)) - if err == nil { - assert.Equal(t, tc.expected.Plugin.ID(), m.Plugin.ID()) - assert.Equal(t, m.Plugin.Name(), m.Plugin.Name()) - } else { - assert.True(t, errors.Is(tc.err, err)) + if tc.err == nil { + if !assert.NoError(tt, err) { + return + } + assert.Equal(tt, tc.expected, m) + return } + assert.ErrorIs(tt, tc.err, err) }) } } @@ -118,18 +160,16 @@ func TestMustParseSystemFromBytes(t *testing.T) { err error }{ { - name: "success create manifest", - input: `{ - "id": "aaa", - "name": "bbb", - "version": "1.1.1" - }`, - expected: &Manifest{ - Plugin: plugin.New().ID(id.MustPluginID("aaa#1.1.1")).MustBuild(), - ExtensionSchema: []*property.Schema{}, - Schema: nil, - }, - err: nil, + name: "success create simple manifest", + input: minimum, + expected: minimumExpected, + err: nil, + }, + { + name: "success create manifest", + input: normal, + expected: normalExpected, + err: nil, }, { name: "fail not valid JSON", @@ -152,8 +192,7 @@ func TestMustParseSystemFromBytes(t *testing.T) { } m := MustParseSystemFromBytes([]byte(tc.input)) - assert.Equal(tt, tc.expected.Plugin.ID(), m.Plugin.ID()) - assert.Equal(tt, m.Plugin.Name(), m.Plugin.Name()) + assert.Equal(tt, m, tc.expected) }) } } diff --git a/pkg/plugin/manifest/parser_translation.go b/pkg/plugin/manifest/parser_translation.go index 7c9780f4c..59b77e62f 100644 --- a/pkg/plugin/manifest/parser_translation.go +++ b/pkg/plugin/manifest/parser_translation.go @@ -7,9 +7,9 @@ import ( "errors" "io" + "github.com/goccy/go-yaml" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" - "gopkg.in/yaml.v2" ) var ( diff --git a/pkg/plugin/manifest/parser_translation_test.go b/pkg/plugin/manifest/parser_translation_test.go index 9366083ee..2b1e26979 100644 --- a/pkg/plugin/manifest/parser_translation_test.go +++ b/pkg/plugin/manifest/parser_translation_test.go @@ -1,47 +1,46 @@ package manifest import ( - "errors" + _ "embed" "strings" "testing" "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) -const translatedManifest = `{ - "description": "test plugin desc", - "title": "test plugin name", - "extensions": { - "test_ext": { - "title": "test ext name", - "propertySchema": { - "test_ps": { - "description": "test ps desc", - "title": "test ps title", - "fields": { - "test_field": { - "title": "test field name", - "description": "test field desc", - "choices": { - "test_key": "test choice value" - } - } - } - } - } - } - } -}` +//go:embed testdata/translation.yml +var translatedManifest string +var expected = &TranslationRoot{ + Description: sr("test plugin desc"), + Extensions: map[string]TranslationExtension{ + "test_ext": { + Title: sr("test ext name"), + PropertySchema: TranslationPropertySchema{ + "test_ps": TranslationPropertySchemaGroup{ + Description: sr("test ps desc"), + Fields: map[string]TranslationPropertySchemaField{ + "test_field": { + Choices: map[string]string{ + "test_key": "test choice value"}, + Description: sr("test field desc"), + Title: sr("test field name"), + }, + }, + Title: sr("test ps title"), + }, + }, + }, + }, + Title: sr("test plugin name"), + Schema: nil, +} + +//go:embed testdata/translation_merge.yml +var mergeManifest string func TestParseTranslation(t *testing.T) { - desc := "test plugin desc" - name := "test plugin name" - ext_name := "test ext name" - ps_title := "test ps title" - ps_desc := "test ps desc" - psf_desc := "test field desc" - psf_name := "test field name" testCases := []struct { name string input string @@ -49,39 +48,16 @@ func TestParseTranslation(t *testing.T) { err error }{ { - name: "success create translation", - input: translatedManifest, - expected: &TranslationRoot{ - Description: &desc, - Extensions: map[string]TranslationExtension{ - "test_ext": { - Title: &ext_name, - PropertySchema: TranslationPropertySchema{ - "test_ps": TranslationPropertySchemaGroup{ - Description: &ps_desc, - Fields: map[string]TranslationPropertySchemaField{ - "test_field": { - Choices: map[string]string{ - "test_key": "test choice value"}, - Description: &psf_desc, - Title: &psf_name, - }, - }, - Title: &ps_title, - }, - }, - }, - }, - Title: &name, - Schema: nil, - }, - err: nil, + name: "success create translation", + input: translatedManifest, + expected: expected, + err: nil, }, { name: "fail not valid JSON", input: "", expected: nil, - err: ErrFailedToParseManifest, + err: ErrFailedToParseManifestTranslation, }, } @@ -91,47 +67,16 @@ func TestParseTranslation(t *testing.T) { tt.Parallel() r := strings.NewReader(tc.input) res, err := ParseTranslation(r) - if err == nil { - assert.Equal(tt, *tc.expected.Title, *res.Title) - assert.Equal(tt, *res.Description, *tc.expected.Description) - assert.Equal(tt, res.Schema, tc.expected.Schema) - if len(res.Extensions) > 0 { - for k, v := range res.Extensions { - assert.Equal(tt, *v.Title, *tc.expected.Extensions[k].Title) - if len(v.PropertySchema) > 0 { - for kk, vv := range v.PropertySchema { - assert.Equal(tt, *vv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Description) - assert.Equal(tt, *vv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Title) - if len(vv.Fields) > 0 { - for kkk, vvv := range vv.Fields { - assert.Equal(tt, *vvv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Description) - assert.Equal(tt, *vvv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Title) - if len(vvv.Choices) > 0 { - for kkkk, vvvv := range vvv.Choices { - assert.Equal(tt, vvvv, tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Choices[kkkk]) - } - } - } - } - } - } - } - } - } else { - assert.True(tt, errors.As(tc.err, &err)) + if tc.err != nil { + assert.ErrorIs(tt, err, tc.err) + return } + assert.Equal(tt, tc.expected, res) }) } } func TestParseTranslationFromBytes(t *testing.T) { - desc := "test plugin desc" - name := "test plugin name" - ext_name := "test ext name" - ps_title := "test ps title" - ps_desc := "test ps desc" - psf_desc := "test field desc" - psf_name := "test field name" testCases := []struct { name string input string @@ -139,33 +84,10 @@ func TestParseTranslationFromBytes(t *testing.T) { err error }{ { - name: "success create translation", - input: translatedManifest, - expected: &TranslationRoot{ - Description: &desc, - Extensions: map[string]TranslationExtension{ - "test_ext": { - Title: &ext_name, - PropertySchema: TranslationPropertySchema{ - "test_ps": TranslationPropertySchemaGroup{ - Description: &ps_desc, - Fields: map[string]TranslationPropertySchemaField{ - "test_field": { - Choices: map[string]string{ - "test_key": "test choice value"}, - Description: &psf_desc, - Title: &psf_name, - }, - }, - Title: &ps_title, - }, - }, - }, - }, - Title: &name, - Schema: nil, - }, - err: nil, + name: "success create translation", + input: translatedManifest, + expected: expected, + err: nil, }, { name: "fail not valid YAML", @@ -180,47 +102,16 @@ func TestParseTranslationFromBytes(t *testing.T) { t.Run(tc.name, func(tt *testing.T) { tt.Parallel() res, err := ParseTranslationFromBytes([]byte(tc.input)) - if err == nil { - assert.Equal(tt, *tc.expected.Title, *res.Title) - assert.Equal(tt, *res.Description, *tc.expected.Description) - assert.Equal(tt, res.Schema, tc.expected.Schema) - if len(res.Extensions) > 0 { - for k, v := range res.Extensions { - assert.Equal(tt, *v.Title, *tc.expected.Extensions[k].Title) - if len(v.PropertySchema) > 0 { - for kk, vv := range v.PropertySchema { - assert.Equal(tt, *vv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Description) - assert.Equal(tt, *vv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Title) - if len(vv.Fields) > 0 { - for kkk, vvv := range vv.Fields { - assert.Equal(tt, *vvv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Description) - assert.Equal(tt, *vvv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Title) - if len(vvv.Choices) > 0 { - for kkkk, vvvv := range vvv.Choices { - assert.Equal(tt, vvvv, tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Choices[kkkk]) - } - } - } - } - } - } - } - } - } else { - assert.True(tt, errors.Is(tc.err, err)) + if tc.err != nil { + assert.ErrorIs(tt, err, tc.err) + return } + assert.Equal(tt, tc.expected, res) }) } } func TestMustParseTransSystemFromBytes(t *testing.T) { - desc := "test plugin desc" - name := "test plugin name" - ext_name := "test ext name" - ps_title := "test ps title" - ps_desc := "test ps desc" - psf_desc := "test field desc" - psf_name := "test field name" testCases := []struct { name string input string @@ -228,33 +119,10 @@ func TestMustParseTransSystemFromBytes(t *testing.T) { err error }{ { - name: "success create translation", - input: translatedManifest, - expected: &TranslationRoot{ - Description: &desc, - Extensions: map[string]TranslationExtension{ - "test_ext": { - Title: &ext_name, - PropertySchema: TranslationPropertySchema{ - "test_ps": TranslationPropertySchemaGroup{ - Description: &ps_desc, - Fields: map[string]TranslationPropertySchemaField{ - "test_field": { - Choices: map[string]string{ - "test_key": "test choice value"}, - Description: &psf_desc, - Title: &psf_name, - }, - }, - Title: &ps_title, - }, - }, - }, - }, - Title: &name, - Schema: nil, - }, - err: nil, + name: "success create translation", + input: translatedManifest, + expected: expected, + err: nil, }, { name: "fail not valid YAML", @@ -277,91 +145,36 @@ func TestMustParseTransSystemFromBytes(t *testing.T) { } res := MustParseTranslationFromBytes([]byte(tc.input)) - assert.Equal(tt, *tc.expected.Title, *res.Title) - assert.Equal(tt, *res.Description, *tc.expected.Description) - assert.Equal(tt, res.Schema, tc.expected.Schema) - if len(res.Extensions) > 0 { - for k, v := range res.Extensions { - assert.Equal(tt, *v.Title, *tc.expected.Extensions[k].Title) - if len(v.PropertySchema) > 0 { - for kk, vv := range v.PropertySchema { - assert.Equal(tt, *vv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Description) - assert.Equal(tt, *vv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Title) - if len(vv.Fields) > 0 { - for kkk, vvv := range vv.Fields { - assert.Equal(tt, *vvv.Description, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Description) - assert.Equal(tt, *vvv.Title, *tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Title) - if len(vvv.Choices) > 0 { - for kkkk, vvvv := range vvv.Choices { - assert.Equal(tt, vvvv, tc.expected.Extensions[k].PropertySchema[kk].Fields[kkk].Choices[kkkk]) - } - } - } - } - } - } - } - } + assert.Equal(tt, tc.expected, res) }) } } func TestMergeManifestTranslation(t *testing.T) { - const manifest = `{ - "id": "xxx", - "title": "aaa", - "version": "1.1.1", - "description": "ddd", - "extensions": [ - { - "id": "test_ext", - "title": "ttt", - "visualizer": "cesium", - "type": "primitive", - "schema": { - "groups": [ - { - "id": "test_ps", - "title": "sss", - "fields": [ - { - "id": "test_field", - "title": "nnn", - "type": "string", - "description": "kkk" - } - ] - } - ] - } - } - ] -}` - testCases := []struct { - name string - tl map[string]*TranslationRoot - m *Manifest - Expected *struct { - PluginName, PluginDesc, ExtName, PsTitle, FieldName, FieldDesc i18n.String + Name string + Translations map[string]*TranslationRoot + Manifest *Manifest + Expected *struct { + PluginName, PluginDesc, ExtName, PsTitle, FieldTitle, FieldDesc i18n.String } }{ { - name: "nil translition list", - tl: nil, - m: nil, - Expected: nil, + Name: "nil translition list", + Translations: nil, + Manifest: nil, + Expected: nil, }, { - name: "nil translition list", - tl: map[string]*TranslationRoot{"xx": MustParseTranslationFromBytes([]byte(translatedManifest))}, - m: MustParseSystemFromBytes([]byte(manifest)), - Expected: &struct{ PluginName, PluginDesc, ExtName, PsTitle, FieldName, FieldDesc i18n.String }{ + Name: "nil translition list", + Translations: map[string]*TranslationRoot{"xx": MustParseTranslationFromBytes([]byte(translatedManifest))}, + Manifest: MustParseSystemFromBytes([]byte(mergeManifest)), + Expected: &struct{ PluginName, PluginDesc, ExtName, PsTitle, FieldTitle, FieldDesc i18n.String }{ PluginName: i18n.String{"en": "aaa", "xx": "test plugin name"}, PluginDesc: i18n.String{"en": "ddd", "xx": "test plugin desc"}, ExtName: i18n.String{"en": "ttt", "xx": "test ext name"}, PsTitle: i18n.String{"en": "sss", "xx": "test ps title"}, - FieldName: i18n.String{"en": "nnn", "xx": "test field name"}, + FieldTitle: i18n.String{"en": "nnn", "xx": "test field name"}, FieldDesc: i18n.String{"en": "kkk", "xx": "test field desc"}, }, }, @@ -369,19 +182,23 @@ func TestMergeManifestTranslation(t *testing.T) { for _, tc := range testCases { tc := tc - t.Run(tc.name, func(tt *testing.T) { + t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - res := MergeManifestTranslation(tc.m, tc.tl) - if res == nil { - assert.Nil(tt, tc.Expected) - } else { - assert.Equal(tt, tc.Expected.PluginDesc, res.Plugin.Description()) - assert.Equal(tt, tc.Expected.PluginName, res.Plugin.Name()) - //assert.Equal(tt, tc.Expected.PsTitle, res.ExtensionSchema[0].Group("test_ps").Title()) - //assert.Equal(tt, tc.Expected.FieldName, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Name()) - //assert.Equal(tt, tc.Expected.FieldDesc, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Description()) - //assert.Equal(tt, tc.Expected.ExtName, res.ExtensionSchema[0]) + res := MergeManifestTranslation(tc.Manifest, tc.Translations) + if tc.Expected == nil { + assert.Nil(tt, res) + return } + assert.Equal(tt, tc.Expected.PluginName, res.Plugin.Name()) + assert.Equal(tt, tc.Expected.PluginDesc, res.Plugin.Description()) + assert.Equal(tt, tc.Expected.ExtName, res.Plugin.Extension(id.PluginExtensionID("test_ext")).Name()) + assert.Equal(tt, tc.Expected.PsTitle, res.ExtensionSchema[0].Group("test_ps").Title()) + assert.Equal(tt, tc.Expected.FieldTitle, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Title()) + assert.Equal(tt, tc.Expected.FieldDesc, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Description()) }) } } + +func sr(s string) *string { + return &s +} diff --git a/pkg/plugin/manifest/testdata/minimum.yml b/pkg/plugin/manifest/testdata/minimum.yml new file mode 100644 index 000000000..f4db3d5cb --- /dev/null +++ b/pkg/plugin/manifest/testdata/minimum.yml @@ -0,0 +1,2 @@ +id: aaa +version: 1.1.1 diff --git a/pkg/plugin/manifest/testdata/test.yml b/pkg/plugin/manifest/testdata/test.yml new file mode 100644 index 000000000..34b4668c3 --- /dev/null +++ b/pkg/plugin/manifest/testdata/test.yml @@ -0,0 +1,21 @@ +id: aaa +title: bbb +version: 1.1.1 +extensions: + - id: hoge + type: primitive + visualizer: cesium + schema: + groups: + - id: default + representativeField: a + fields: + - id: a + type: bool + defaultValue: true + availableIf: + field: b + type: number + value: 1 + - id: b + type: number diff --git a/pkg/plugin/manifest/testdata/translation.yml b/pkg/plugin/manifest/testdata/translation.yml new file mode 100644 index 000000000..af00d8155 --- /dev/null +++ b/pkg/plugin/manifest/testdata/translation.yml @@ -0,0 +1,28 @@ +{ + 'description': 'test plugin desc', + 'title': 'test plugin name', + 'extensions': + { + 'test_ext': + { + 'title': 'test ext name', + 'propertySchema': + { + 'test_ps': + { + 'description': 'test ps desc', + 'title': 'test ps title', + 'fields': + { + 'test_field': + { + 'title': 'test field name', + 'description': 'test field desc', + 'choices': { 'test_key': 'test choice value' }, + }, + }, + }, + }, + }, + }, +} diff --git a/pkg/plugin/manifest/testdata/translation_merge.yml b/pkg/plugin/manifest/testdata/translation_merge.yml new file mode 100644 index 000000000..0ac599781 --- /dev/null +++ b/pkg/plugin/manifest/testdata/translation_merge.yml @@ -0,0 +1,34 @@ +{ + 'id': 'xxx', + 'title': 'aaa', + 'version': '1.1.1', + 'description': 'ddd', + 'extensions': + [ + { + 'id': 'test_ext', + 'title': 'ttt', + 'visualizer': 'cesium', + 'type': 'primitive', + 'schema': + { + 'groups': + [ + { + 'id': 'test_ps', + 'title': 'sss', + 'fields': + [ + { + 'id': 'test_field', + 'title': 'nnn', + 'type': 'string', + 'description': 'kkk', + }, + ], + }, + ], + }, + }, + ], +} diff --git a/pkg/property/schema_group_builder.go b/pkg/property/schema_group_builder.go index d26729e00..cef672c15 100644 --- a/pkg/property/schema_group_builder.go +++ b/pkg/property/schema_group_builder.go @@ -73,11 +73,6 @@ func (b *SchemaGroupBuilder) Title(title i18n.String) *SchemaGroupBuilder { } func (b *SchemaGroupBuilder) RepresentativeField(representativeField *id.PropertySchemaFieldID) *SchemaGroupBuilder { - if representativeField == nil { - b.p.representativeField = nil - return b - } - representativeField2 := *representativeField - b.p.representativeField = &representativeField2 + b.p.representativeField = representativeField.CopyRef() return b } diff --git a/pkg/property/schema_group_test.go b/pkg/property/schema_group_test.go index e9ddd01ca..bb37e8596 100644 --- a/pkg/property/schema_group_test.go +++ b/pkg/property/schema_group_test.go @@ -48,7 +48,7 @@ func TestSchemaGroup(t *testing.T) { GID: scid, SID: sid, Fields: []*SchemaField{sf}, - Title: make(i18n.String), + Title: nil, }, }, } diff --git a/pkg/property/value_type.go b/pkg/property/value_type.go index 19f27fe02..1b0ce23fe 100644 --- a/pkg/property/value_type.go +++ b/pkg/property/value_type.go @@ -277,10 +277,30 @@ func (t ValueType) ValueFrom(v interface{}) (*Value, bool) { if v3, err := v2.Float64(); err == nil { return &Value{v: v3, t: ValueTypeNumber}, true } + } else if v2, ok := v.(float32); ok { + return &Value{v: v2, t: ValueTypeNumber}, true } else if v2, ok := v.(float64); ok { return &Value{v: v2, t: ValueTypeNumber}, true } else if v2, ok := v.(int); ok { return &Value{v: float64(v2), t: ValueTypeNumber}, true + } else if v2, ok := v.(int8); ok { + return &Value{v: float64(v2), t: ValueTypeNumber}, true + } else if v2, ok := v.(int16); ok { + return &Value{v: float64(v2), t: ValueTypeNumber}, true + } else if v2, ok := v.(int32); ok { + return &Value{v: float64(v2), t: ValueTypeNumber}, true + } else if v2, ok := v.(int64); ok { + return &Value{v: float64(v2), t: ValueTypeNumber}, true + } else if v2, ok := v.(uint); ok { + return &Value{v: float64(v2), t: ValueTypeNumber}, true + } else if v2, ok := v.(uint8); ok { + return &Value{v: float64(v2), t: ValueTypeNumber}, true + } else if v2, ok := v.(uint16); ok { + return &Value{v: float64(v2), t: ValueTypeNumber}, true + } else if v2, ok := v.(uint32); ok { + return &Value{v: float64(v2), t: ValueTypeNumber}, true + } else if v2, ok := v.(uint64); ok { + return &Value{v: float64(v2), t: ValueTypeNumber}, true } case ValueTypeString: if v2, ok := v.(string); ok { From a3f8b6dd74303deaa6974a063f19f27100406f54 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Sat, 31 Jul 2021 04:45:08 +0900 Subject: [PATCH 059/253] refactor: pkg/error (#31) --- internal/app/app.go | 6 +- internal/app/auth.go | 10 +- internal/app/file.go | 4 +- internal/app/graphql.go | 4 +- internal/app/private.go | 6 +- internal/app/published_test.go | 16 +- internal/graphql/resolver_project.go | 4 +- internal/graphql/resolver_property.go | 6 +- internal/infrastructure/adapter/plugin.go | 8 +- .../infrastructure/adapter/property_schema.go | 12 +- internal/infrastructure/fs/archive.go | 12 +- internal/infrastructure/fs/file.go | 20 +-- internal/infrastructure/fs/plugin.go | 10 +- .../infrastructure/fs/plugin_repository.go | 4 +- internal/infrastructure/fs/property_schema.go | 16 +- internal/infrastructure/gcs/file.go | 30 ++-- internal/infrastructure/memory/asset.go | 4 +- internal/infrastructure/memory/dataset.go | 4 +- .../infrastructure/memory/dataset_schema.go | 6 +- internal/infrastructure/memory/layer.go | 12 +- internal/infrastructure/memory/plugin.go | 4 +- internal/infrastructure/memory/project.go | 6 +- internal/infrastructure/memory/property.go | 4 +- .../infrastructure/memory/property_schema.go | 4 +- internal/infrastructure/memory/scene.go | 8 +- internal/infrastructure/memory/team.go | 4 +- internal/infrastructure/memory/user.go | 16 +- internal/infrastructure/mongo/asset.go | 4 +- internal/infrastructure/mongo/config.go | 6 +- internal/infrastructure/mongo/dataset.go | 16 +- .../infrastructure/mongo/dataset_schema.go | 6 +- internal/infrastructure/mongo/layer.go | 16 +- .../infrastructure/mongo/migration/client.go | 10 +- .../infrastructure/mongo/mongodoc/client.go | 28 ++-- internal/infrastructure/mongo/plugin.go | 2 +- internal/infrastructure/mongo/project.go | 4 +- internal/infrastructure/mongo/property.go | 6 +- internal/infrastructure/mongo/scene.go | 10 +- internal/infrastructure/mongo/scene_lock.go | 8 +- internal/infrastructure/mongo/team.go | 2 +- internal/infrastructure/mongo/transaction.go | 10 +- internal/infrastructure/npm/archive.go | 4 +- .../infrastructure/npm/plugin_repository.go | 6 +- internal/usecase/interactor/common.go | 4 +- internal/usecase/interactor/dataset.go | 8 +- internal/usecase/interactor/layer.go | 8 +- internal/usecase/interactor/project.go | 6 +- internal/usecase/interactor/published.go | 4 +- internal/usecase/interactor/scene.go | 10 +- internal/usecase/interactor/user.go | 16 +- pkg/error/error.go | 73 --------- pkg/error/error_test.go | 30 ---- pkg/layer/initializer.go | 21 ++- pkg/rerror/error.go | 131 ++++++++++++++++ pkg/rerror/error_test.go | 140 ++++++++++++++++++ pkg/scene/sceneops/plugin_installer.go | 20 +-- pkg/scene/sceneops/plugin_migrator.go | 4 +- 57 files changed, 514 insertions(+), 339 deletions(-) delete mode 100644 pkg/error/error.go delete mode 100644 pkg/error/error_test.go create mode 100644 pkg/rerror/error.go create mode 100644 pkg/rerror/error_test.go diff --git a/internal/app/app.go b/internal/app/app.go index a5b2890c9..cdbf1fafb 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -8,8 +8,8 @@ import ( "github.com/labstack/echo/v4" "github.com/labstack/echo/v4/middleware" "github.com/reearth/reearth-backend/internal/adapter/graphql" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" echotracer "go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo" ) @@ -116,11 +116,11 @@ func errorMessage(err error, log func(string, ...interface{})) (int, string) { if err2.Internal != nil { log("echo internal err: %+v", err2) } - } else if errors.Is(err, err1.ErrNotFound) { + } else if errors.Is(err, rerror.ErrNotFound) { code = http.StatusNotFound msg = "not found" } else { - var ierr *err1.ErrInternal + var ierr *rerror.ErrInternal if errors.As(err, &ierr) { if err2 := ierr.Unwrap(); err2 != nil { log("internal err: %+v", err2) diff --git a/internal/app/auth.go b/internal/app/auth.go index 8ad615335..3ddf5f68f 100644 --- a/internal/app/auth.go +++ b/internal/app/auth.go @@ -6,8 +6,8 @@ import ( "github.com/labstack/echo/v4" "github.com/reearth/reearth-backend/internal/graphql" "github.com/reearth/reearth-backend/internal/usecase" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/user" ) @@ -46,7 +46,7 @@ func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { if u == nil && userID != "" { if userID2, err := id.UserIDFrom(userID); err == nil { u, err = cfg.Repos.User.FindByID(ctx, userID2) - if err != nil && err != err1.ErrNotFound { + if err != nil && err != rerror.ErrNotFound { return err } } else { @@ -58,7 +58,7 @@ func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { var err error // find user u, err = cfg.Repos.User.FindByAuth0Sub(ctx, sub) - if err != nil && err != err1.ErrNotFound { + if err != nil && err != rerror.ErrNotFound { return err } @@ -78,11 +78,11 @@ func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { // // } // u, err = cfg.Repos.User.FindByEmail(ctx, data.Email) - // if err != nil && err != err1.ErrNotFound { + // if err != nil && err != rerror.ErrNotFound { // return err // } // if u == nil { - // return err1.ErrUserNotFound + // return rerror.ErrUserNotFound // } // } } diff --git a/internal/app/file.go b/internal/app/file.go index 1d6ba1f03..a496ef287 100644 --- a/internal/app/file.go +++ b/internal/app/file.go @@ -8,8 +8,8 @@ import ( "github.com/labstack/echo/v4" "github.com/reearth/reearth-backend/internal/usecase/gateway" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" ) func serveFiles( @@ -51,7 +51,7 @@ func serveFiles( fileHandler(func(ctx echo.Context) (io.Reader, string, error) { pid, err := id.PluginIDFrom(ctx.Param("name") + "#" + ctx.Param("version")) if err != nil { - return nil, "", err1.ErrNotFound + return nil, "", rerror.ErrNotFound } filename := ctx.Param("filename") r, err := repo.ReadPluginFile(ctx.Request().Context(), pid, filename) diff --git a/internal/app/graphql.go b/internal/app/graphql.go index bcb4ea91e..d331f93bc 100644 --- a/internal/app/graphql.go +++ b/internal/app/graphql.go @@ -18,7 +18,7 @@ import ( infra_graphql "github.com/reearth/reearth-backend/internal/graphql" "github.com/reearth/reearth-backend/internal/graphql/dataloader" "github.com/reearth/reearth-backend/internal/usecase" - err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/rerror" ) const enableDataLoaders = true @@ -105,7 +105,7 @@ func graphqlAPI( // show more detailed error messgage in debug mode func(ctx context.Context, e error) *gqlerror.Error { if conf.Debug { - var ierr *err1.ErrInternal + var ierr *rerror.ErrInternal if errors.As(e, &ierr) { if err2 := ierr.Unwrap(); err2 != nil { // TODO: display stacktrace with xerrors diff --git a/internal/app/private.go b/internal/app/private.go index 8fafd4a31..6b5b376ef 100644 --- a/internal/app/private.go +++ b/internal/app/private.go @@ -11,10 +11,10 @@ import ( "github.com/reearth/reearth-backend/internal/graphql" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer/encoding" "github.com/reearth/reearth-backend/pkg/layer/merging" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/user" ) @@ -80,14 +80,14 @@ func privateAPI( } scenes, err := repos.Scene.FindIDsByTeam(ctx, op.ReadableTeams) if err != nil { - if errors.Is(err1.ErrNotFound, err) { + if errors.Is(rerror.ErrNotFound, err) { return &echo.HTTPError{Code: http.StatusNotFound, Message: err} } return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} } layer, err := repos.Layer.FindByID(ctx, lid, scenes) if err != nil { - if errors.Is(err1.ErrNotFound, err) { + if errors.Is(rerror.ErrNotFound, err) { return &echo.HTTPError{Code: http.StatusNotFound, Message: err} } return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} diff --git a/internal/app/published_test.go b/internal/app/published_test.go index 111fdb0ff..1553ec773 100644 --- a/internal/app/published_test.go +++ b/internal/app/published_test.go @@ -12,7 +12,7 @@ import ( "github.com/labstack/echo/v4" "github.com/reearth/reearth-backend/internal/usecase/interfaces" - err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/stretchr/testify/assert" ) @@ -31,7 +31,7 @@ func TestPublishedAuthMiddleware(t *testing.T) { BasicAuthPassword: "baar", }, nil } - return interfaces.ProjectPublishedMetadata{}, err1.ErrNotFound + return interfaces.ProjectPublishedMetadata{}, rerror.ErrNotFound })(func(c echo.Context) error { return c.String(http.StatusOK, "test") }) @@ -107,7 +107,7 @@ func TestPublishedData(t *testing.T) { if name == "prj" { return strings.NewReader("aaa"), nil } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound }) testCases := []struct { @@ -117,12 +117,12 @@ func TestPublishedData(t *testing.T) { }{ { Name: "empty", - Error: err1.ErrNotFound, + Error: rerror.ErrNotFound, }, { Name: "not found", PublishedName: "pr", - Error: err1.ErrNotFound, + Error: rerror.ErrNotFound, }, { Name: "ok", @@ -164,7 +164,7 @@ func TestPublishedIndex(t *testing.T) { }{ { Name: "empty", - Error: err1.ErrNotFound, + Error: rerror.ErrNotFound, }, { Name: "empty index", @@ -174,7 +174,7 @@ func TestPublishedIndex(t *testing.T) { { Name: "not found", PublishedName: "pr", - Error: err1.ErrNotFound, + Error: rerror.ErrNotFound, }, { Name: "ok", @@ -203,7 +203,7 @@ func TestPublishedIndex(t *testing.T) { assert.Equal("http://example.com/aaa/bbb", url.String()) return "index", nil } - return "", err1.ErrNotFound + return "", rerror.ErrNotFound })(c) if tc.Error == nil { diff --git a/internal/graphql/resolver_project.go b/internal/graphql/resolver_project.go index 60f2472e6..8a343aada 100644 --- a/internal/graphql/resolver_project.go +++ b/internal/graphql/resolver_project.go @@ -5,8 +5,8 @@ import ( graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" "github.com/reearth/reearth-backend/internal/graphql/dataloader" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" ) func (r *Resolver) Project() ProjectResolver { @@ -27,7 +27,7 @@ func (r *projectResolver) Scene(ctx context.Context, obj *graphql1.Project) (*gr defer exit() s, err := r.config.Controllers.SceneController.FindByProject(ctx, id.ProjectID(obj.ID), getOperator(ctx)) - if err != nil && err != err1.ErrNotFound { + if err != nil && err != rerror.ErrNotFound { return nil, err } return s, nil diff --git a/internal/graphql/resolver_property.go b/internal/graphql/resolver_property.go index 7b98b0022..0b9429a67 100644 --- a/internal/graphql/resolver_property.go +++ b/internal/graphql/resolver_property.go @@ -6,8 +6,8 @@ import ( graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" "github.com/reearth/reearth-backend/internal/graphql/dataloader" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" ) func (r *Resolver) Property() PropertyResolver { @@ -72,7 +72,7 @@ func (r *propertyResolver) Layer(ctx context.Context, obj *graphql1.Property) (g defer exit() l, err := r.config.Controllers.LayerController.FetchByProperty(ctx, id.PropertyID(obj.ID), getOperator(ctx)) - if err != nil || errors.Is(err, err1.ErrNotFound) { + if err != nil || errors.Is(err, rerror.ErrNotFound) { return nil, nil } return l, err @@ -84,7 +84,7 @@ func (r *propertyResolver) Merged(ctx context.Context, obj *graphql1.Property) ( l, err := r.config.Controllers.LayerController.FetchByProperty(ctx, id.PropertyID(obj.ID), getOperator(ctx)) if err != nil { - if errors.Is(err, err1.ErrNotFound) { + if errors.Is(err, rerror.ErrNotFound) { return nil, nil } return nil, err diff --git a/internal/infrastructure/adapter/plugin.go b/internal/infrastructure/adapter/plugin.go index f38b6cf67..e2992976d 100644 --- a/internal/infrastructure/adapter/plugin.go +++ b/internal/infrastructure/adapter/plugin.go @@ -5,9 +5,9 @@ import ( "errors" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/rerror" ) // TODO: ใ“ใ“ใงๅน…ๅ„ชๅ…ˆๆŽข็ดขใ—ใฆใ„ใใ‚ขใƒซใ‚ดใƒชใ‚บใƒ ใ‚’ๆ›ธใ„ใฆmongoใ‹ใ‚‰ใƒ“ใƒซใƒˆใ‚คใƒณใฎๆคœ็ดขใƒญใ‚ธใƒƒใ‚ฏใ‚’้™คๅŽปใ™ใ‚‹ @@ -27,7 +27,7 @@ func NewPlugin(readers []repo.Plugin, writer repo.Plugin) repo.Plugin { func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { for _, re := range r.readers { if res, err := re.FindByID(ctx, id); err != nil { - if errors.Is(err, err1.ErrNotFound) { + if errors.Is(err, rerror.ErrNotFound) { continue } else { return nil, err @@ -36,14 +36,14 @@ func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plug return res, nil } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { results := make([]*plugin.Plugin, 0, len(ids)) for _, id := range ids { res, err := r.FindByID(ctx, id) - if err != nil && err != err1.ErrNotFound { + if err != nil && err != rerror.ErrNotFound { return nil, err } results = append(results, res) diff --git a/internal/infrastructure/adapter/property_schema.go b/internal/infrastructure/adapter/property_schema.go index c06d800b6..86fe4e3a4 100644 --- a/internal/infrastructure/adapter/property_schema.go +++ b/internal/infrastructure/adapter/property_schema.go @@ -5,9 +5,9 @@ import ( "errors" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" ) // TODO: ใ“ใ“ใงๅน…ๅ„ชๅ…ˆๆŽข็ดขใ—ใฆใ„ใใ‚ขใƒซใ‚ดใƒชใ‚บใƒ ใ‚’ๆ›ธใ„ใฆmongoใ‹ใ‚‰ใƒ“ใƒซใƒˆใ‚คใƒณใฎๆคœ็ดขใƒญใ‚ธใƒƒใ‚ฏใ‚’้™คๅŽปใ™ใ‚‹ @@ -27,7 +27,7 @@ func NewPropertySchema(readers []repo.PropertySchema, writer repo.PropertySchema func (r *propertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) (*property.Schema, error) { for _, re := range r.readers { if res, err := re.FindByID(ctx, id); err != nil { - if errors.Is(err, err1.ErrNotFound) { + if errors.Is(err, rerror.ErrNotFound) { continue } else { return nil, err @@ -36,14 +36,14 @@ func (r *propertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) ( return res, nil } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *propertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { results := make(property.SchemaList, 0, len(ids)) for _, id := range ids { res, err := r.FindByID(ctx, id) - if err != nil && err != err1.ErrNotFound { + if err != nil && err != rerror.ErrNotFound { return nil, err } results = append(results, res) @@ -53,14 +53,14 @@ func (r *propertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaI func (r *propertySchema) Save(ctx context.Context, p *property.Schema) error { if r.writer == nil { - return err1.ErrInternalBy(errors.New("writer is not set")) + return rerror.ErrInternalBy(errors.New("writer is not set")) } return r.writer.Save(ctx, p) } func (r *propertySchema) SaveAll(ctx context.Context, p property.SchemaList) error { if r.writer == nil { - return err1.ErrInternalBy(errors.New("writer is not set")) + return rerror.ErrInternalBy(errors.New("writer is not set")) } return r.writer.SaveAll(ctx, p) } diff --git a/internal/infrastructure/fs/archive.go b/internal/infrastructure/fs/archive.go index 66727f418..64f0f781c 100644 --- a/internal/infrastructure/fs/archive.go +++ b/internal/infrastructure/fs/archive.go @@ -5,8 +5,8 @@ import ( "path" "strings" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/rerror" ) type archive struct { @@ -24,9 +24,9 @@ func NewArchive(p string) (file.Archive, error) { files, size, err := dirwalk(bp, "", 0) if err != nil { if os.IsNotExist(err) { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } return &archive{ p: bp, @@ -46,12 +46,12 @@ func (a *archive) Next() (f *file.File, derr error) { a.counter++ fi, err := os.Open(path.Join(a.p, next)) if err != nil { - derr = err1.ErrInternalBy(err) + derr = rerror.ErrInternalBy(err) return } stat, err := fi.Stat() if err != nil { - derr = err1.ErrInternalBy(err) + derr = rerror.ErrInternalBy(err) return } @@ -68,7 +68,7 @@ func (a *archive) Next() (f *file.File, derr error) { func (a *archive) Close() error { if a.fi != nil { if err := a.fi.Close(); err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } a.fi = nil } diff --git a/internal/infrastructure/fs/file.go b/internal/infrastructure/fs/file.go index ca4d83c5d..c69318623 100644 --- a/internal/infrastructure/fs/file.go +++ b/internal/infrastructure/fs/file.go @@ -9,10 +9,10 @@ import ( "path" "github.com/reearth/reearth-backend/internal/usecase/gateway" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/file" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/rerror" ) type fileRepo struct { @@ -39,9 +39,9 @@ func (f *fileRepo) ReadAsset(ctx context.Context, name string) (io.Reader, error file, err := os.Open(filename) if err != nil { if os.IsNotExist(err) { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } return file, nil } @@ -51,9 +51,9 @@ func (f *fileRepo) ReadPluginFile(ctx context.Context, id id.PluginID, p string) file, err := os.Open(filename) if err != nil { if os.IsNotExist(err) { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } return file, nil } @@ -63,9 +63,9 @@ func (f *fileRepo) ReadBuiltSceneFile(ctx context.Context, name string) (io.Read file, err := os.Open(filename) if err != nil { if os.IsNotExist(err) { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } return file, nil } @@ -216,9 +216,9 @@ func (f *fileRepo) MoveBuiltScene(ctx context.Context, oldName, name string) err newfilename, ); err != nil { if errors.Is(err, os.ErrNotExist) { - return err1.ErrNotFound + return rerror.ErrNotFound } - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil @@ -230,7 +230,7 @@ func (f *fileRepo) RemoveBuiltScene(ctx context.Context, name string) error { if errors.Is(err, os.ErrNotExist) { return nil } - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } diff --git a/internal/infrastructure/fs/plugin.go b/internal/infrastructure/fs/plugin.go index 1f49c99a3..2286f8c3c 100644 --- a/internal/infrastructure/fs/plugin.go +++ b/internal/infrastructure/fs/plugin.go @@ -7,10 +7,10 @@ import ( "path" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/reearth/reearth-backend/pkg/rerror" ) type pluginRepo struct { @@ -30,11 +30,11 @@ func (r *pluginRepo) manifest(ctx context.Context, id id.PluginID) string { func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { filename := r.manifest(ctx, id) if _, err := os.Stat(filename); err != nil { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } file, err := os.Open(filename) if err != nil { - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } defer func() { _ = file.Close() @@ -42,7 +42,7 @@ func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plug m, err := manifest.Parse(file) if err != nil { - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } return m.Plugin, nil @@ -61,5 +61,5 @@ func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugi } func (r *pluginRepo) Save(ctx context.Context, p *plugin.Plugin) error { - return err1.ErrInternalBy(errors.New("read only")) + return rerror.ErrInternalBy(errors.New("read only")) } diff --git a/internal/infrastructure/fs/plugin_repository.go b/internal/infrastructure/fs/plugin_repository.go index a0ce34f23..1ad928c21 100644 --- a/internal/infrastructure/fs/plugin_repository.go +++ b/internal/infrastructure/fs/plugin_repository.go @@ -6,10 +6,10 @@ import ( "path" "github.com/reearth/reearth-backend/internal/usecase/gateway" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/file" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/reearth/reearth-backend/pkg/rerror" ) type pluginRepository struct { @@ -42,7 +42,7 @@ func (r *pluginRepository) Manifest(ctx context.Context, id id.PluginID) (*manif break } if err != nil { - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } if f.Fullpath == manifestFilePath { m, err := manifest.Parse(f.Content) diff --git a/internal/infrastructure/fs/property_schema.go b/internal/infrastructure/fs/property_schema.go index 7d6d15c8a..5f891924e 100644 --- a/internal/infrastructure/fs/property_schema.go +++ b/internal/infrastructure/fs/property_schema.go @@ -7,10 +7,10 @@ import ( "path" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin/manifest" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" ) type propertySchema struct { @@ -30,15 +30,15 @@ func (r *propertySchema) manifest(id id.PluginID) string { func (r *propertySchema) FindByID(ctx context.Context, i id.PropertySchemaID) (*property.Schema, error) { pid, err := id.PluginIDFrom(i.Plugin()) if err != nil { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } filename := r.manifest(pid) if _, err := os.Stat(filename); err != nil { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } file, err2 := os.Open(filename) if err2 != nil { - return nil, err1.ErrInternalBy(err2) + return nil, rerror.ErrInternalBy(err2) } defer func() { _ = file.Close() @@ -46,7 +46,7 @@ func (r *propertySchema) FindByID(ctx context.Context, i id.PropertySchemaID) (* m, err := manifest.Parse(file) if err != nil { - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } if m.Schema != nil && m.Schema.ID() == i { @@ -61,7 +61,7 @@ func (r *propertySchema) FindByID(ctx context.Context, i id.PropertySchemaID) (* } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *propertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { @@ -77,9 +77,9 @@ func (r *propertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaI } func (r *propertySchema) Save(ctx context.Context, p *property.Schema) error { - return err1.ErrInternalBy(errors.New("read only")) + return rerror.ErrInternalBy(errors.New("read only")) } func (r *propertySchema) SaveAll(ctx context.Context, p property.SchemaList) error { - return err1.ErrInternalBy(errors.New("read only")) + return rerror.ErrInternalBy(errors.New("read only")) } diff --git a/internal/infrastructure/gcs/file.go b/internal/infrastructure/gcs/file.go index fab52a81f..c28a3fb0b 100644 --- a/internal/infrastructure/gcs/file.go +++ b/internal/infrastructure/gcs/file.go @@ -11,11 +11,11 @@ import ( "cloud.google.com/go/storage" "github.com/reearth/reearth-backend/internal/usecase/gateway" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/file" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/rerror" ) const ( @@ -65,7 +65,7 @@ func (f *fileRepo) bucket(ctx context.Context) (*storage.BucketHandle, error) { func (f *fileRepo) ReadAsset(ctx context.Context, name string) (io.Reader, error) { if name == "" { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } p := path.Join(gcsAssetBasePath, name) @@ -77,16 +77,16 @@ func (f *fileRepo) ReadAsset(ctx context.Context, name string) (io.Reader, error reader, err := bucket.Object(p).NewReader(ctx) if err != nil { if errors.Is(err, storage.ErrObjectNotExist) { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } return reader, nil } func (f *fileRepo) ReadPluginFile(ctx context.Context, plugin id.PluginID, name string) (io.Reader, error) { if name == "" { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } p := path.Join(gcsPluginBasePath, plugin.Name(), plugin.Version().String(), name) @@ -98,16 +98,16 @@ func (f *fileRepo) ReadPluginFile(ctx context.Context, plugin id.PluginID, name reader, err := bucket.Object(p).NewReader(ctx) if err != nil { if errors.Is(err, storage.ErrObjectNotExist) { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } return reader, nil } func (f *fileRepo) ReadBuiltSceneFile(ctx context.Context, name string) (io.Reader, error) { if name == "" { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } p := path.Join(gcsMapBasePath, name+".json") @@ -120,9 +120,9 @@ func (f *fileRepo) ReadBuiltSceneFile(ctx context.Context, name string) (io.Read reader, err := bucket.Object(p).NewReader(ctx) if err != nil { if errors.Is(err, storage.ErrObjectNotExist) { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } return reader, nil } @@ -190,7 +190,7 @@ func (f *fileRepo) RemoveAsset(ctx context.Context, u *url.URL) error { if errors.Is(err, storage.ErrObjectNotExist) { return nil } - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } @@ -299,12 +299,12 @@ func (f *fileRepo) MoveBuiltScene(ctx context.Context, oldName, name string) err destObject := bucket.Object(filename) if _, err := destObject.CopierFrom(object).Run(ctx); err != nil { if errors.Is(err, storage.ErrObjectNotExist) { - return err1.ErrNotFound + return rerror.ErrNotFound } - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } if err := object.Delete(ctx); err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } @@ -320,7 +320,7 @@ func (f *fileRepo) RemoveBuiltScene(ctx context.Context, name string) error { if errors.Is(err, storage.ErrObjectNotExist) { return nil } - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } diff --git a/internal/infrastructure/memory/asset.go b/internal/infrastructure/memory/asset.go index 2f0fabff7..0c47531e8 100644 --- a/internal/infrastructure/memory/asset.go +++ b/internal/infrastructure/memory/asset.go @@ -7,8 +7,8 @@ import ( "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/asset" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" ) type Asset struct { @@ -30,7 +30,7 @@ func (r *Asset) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, erro if ok { return d, nil } - return &asset.Asset{}, err1.ErrNotFound + return &asset.Asset{}, rerror.ErrNotFound } func (r *Asset) Save(ctx context.Context, a *asset.Asset) error { diff --git a/internal/infrastructure/memory/dataset.go b/internal/infrastructure/memory/dataset.go index f7ada2dea..5f2584cd0 100644 --- a/internal/infrastructure/memory/dataset.go +++ b/internal/infrastructure/memory/dataset.go @@ -7,8 +7,8 @@ import ( "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/dataset" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" ) type Dataset struct { @@ -30,7 +30,7 @@ func (r *Dataset) FindByID(ctx context.Context, id id.DatasetID, f []id.SceneID) if ok && isSceneIncludes(p.Scene(), f) { return &p, nil } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Dataset) FindByIDs(ctx context.Context, ids []id.DatasetID, f []id.SceneID) (dataset.List, error) { diff --git a/internal/infrastructure/memory/dataset_schema.go b/internal/infrastructure/memory/dataset_schema.go index 45008e6b4..5e1b4c33d 100644 --- a/internal/infrastructure/memory/dataset_schema.go +++ b/internal/infrastructure/memory/dataset_schema.go @@ -7,8 +7,8 @@ import ( "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/dataset" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" ) type DatasetSchema struct { @@ -30,7 +30,7 @@ func (r *DatasetSchema) FindByID(ctx context.Context, id id.DatasetSchemaID, f [ if ok { return &p, nil } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *DatasetSchema) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID, f []id.SceneID) (dataset.SchemaList, error) { @@ -114,7 +114,7 @@ func (r *DatasetSchema) FindDynamicByID(ctx context.Context, id id.DatasetSchema if ok && p.Dynamic() { return &p, nil } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *DatasetSchema) FindBySceneAndSource(ctx context.Context, s id.SceneID, src dataset.Source) (dataset.SchemaList, error) { diff --git a/internal/infrastructure/memory/layer.go b/internal/infrastructure/memory/layer.go index 1e7ce7012..674313342 100644 --- a/internal/infrastructure/memory/layer.go +++ b/internal/infrastructure/memory/layer.go @@ -5,9 +5,9 @@ import ( "sync" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/rerror" ) type Layer struct { @@ -29,7 +29,7 @@ func (r *Layer) FindByID(ctx context.Context, id id.LayerID, f []id.SceneID) (la if ok && isSceneIncludes(res.Scene(), f) { return res, nil } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Layer) FindByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.List, error) { @@ -100,7 +100,7 @@ func (r *Layer) FindItemByID(ctx context.Context, id id.LayerID, f []id.SceneID) return li, nil } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Layer) FindGroupByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { @@ -116,7 +116,7 @@ func (r *Layer) FindGroupByID(ctx context.Context, id id.LayerID, f []id.SceneID return lg, nil } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Layer) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, s id.SceneID, ds id.DatasetSchemaID) (layer.GroupList, error) { @@ -157,7 +157,7 @@ func (r *Layer) FindByProperty(ctx context.Context, id id.PropertyID, f []id.Sce } } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Layer) FindParentByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { @@ -178,7 +178,7 @@ func (r *Layer) FindParentByID(ctx context.Context, id id.LayerID, f []id.SceneI } } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Layer) FindByScene(ctx context.Context, sceneID id.SceneID) (layer.List, error) { diff --git a/internal/infrastructure/memory/plugin.go b/internal/infrastructure/memory/plugin.go index 74a67eb98..403effda0 100644 --- a/internal/infrastructure/memory/plugin.go +++ b/internal/infrastructure/memory/plugin.go @@ -7,9 +7,9 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/builtin" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/rerror" ) type Plugin struct { @@ -36,7 +36,7 @@ func (r *Plugin) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, return &p2, nil } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Plugin) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { diff --git a/internal/infrastructure/memory/project.go b/internal/infrastructure/memory/project.go index 54bae3677..cb061fa4d 100644 --- a/internal/infrastructure/memory/project.go +++ b/internal/infrastructure/memory/project.go @@ -7,9 +7,9 @@ import ( "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/rerror" ) type Project struct { @@ -76,7 +76,7 @@ func (r *Project) FindByID(ctx context.Context, id id.ProjectID, filter []id.Tea if ok && isTeamIncludes(p.Team(), filter) { return p, nil } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Project) FindByPublicName(ctx context.Context, name string) (*project.Project, error) { @@ -91,7 +91,7 @@ func (r *Project) FindByPublicName(ctx context.Context, name string) (*project.P return p, nil } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Project) CountByTeam(ctx context.Context, team id.TeamID) (c int, err error) { diff --git a/internal/infrastructure/memory/property.go b/internal/infrastructure/memory/property.go index 4ece98327..38f25fcf3 100644 --- a/internal/infrastructure/memory/property.go +++ b/internal/infrastructure/memory/property.go @@ -4,9 +4,9 @@ import ( "context" "sync" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/internal/usecase/repo" ) @@ -30,7 +30,7 @@ func (r *Property) FindByID(ctx context.Context, id id.PropertyID, f []id.SceneI if ok && isSceneIncludes(p.Scene(), f) { return &p, nil } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Property) FindByIDs(ctx context.Context, ids []id.PropertyID, f []id.SceneID) (property.List, error) { diff --git a/internal/infrastructure/memory/property_schema.go b/internal/infrastructure/memory/property_schema.go index 3c2cda5ad..7565cbc55 100644 --- a/internal/infrastructure/memory/property_schema.go +++ b/internal/infrastructure/memory/property_schema.go @@ -7,9 +7,9 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/builtin" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" ) type PropertySchema struct { @@ -34,7 +34,7 @@ func (r *PropertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) ( if ok { return &p, nil } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *PropertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { diff --git a/internal/infrastructure/memory/scene.go b/internal/infrastructure/memory/scene.go index 616b89202..2dedfd3cb 100644 --- a/internal/infrastructure/memory/scene.go +++ b/internal/infrastructure/memory/scene.go @@ -5,8 +5,8 @@ import ( "sync" "time" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/scene" "github.com/reearth/reearth-backend/internal/usecase/repo" @@ -31,7 +31,7 @@ func (r *Scene) FindByID(ctx context.Context, id id.SceneID, f []id.TeamID) (*sc if ok && isTeamIncludes(s.Team(), f) { return &s, nil } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Scene) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) ([]*scene.Scene, error) { @@ -61,7 +61,7 @@ func (r *Scene) FindByProject(ctx context.Context, id id.ProjectID, f []id.TeamI return &d, nil } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Scene) FindIDsByTeam(ctx context.Context, teams []id.TeamID) ([]id.SceneID, error) { @@ -83,7 +83,7 @@ func (r *Scene) HasSceneTeam(ctx context.Context, id id.SceneID, teams []id.Team s, ok := r.data[id] if !ok { - return false, err1.ErrNotFound + return false, rerror.ErrNotFound } return s.IsTeamIncluded(teams), nil } diff --git a/internal/infrastructure/memory/team.go b/internal/infrastructure/memory/team.go index 9f0b5addc..9d2f46e66 100644 --- a/internal/infrastructure/memory/team.go +++ b/internal/infrastructure/memory/team.go @@ -5,8 +5,8 @@ import ( "sync" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/user" ) @@ -57,7 +57,7 @@ func (r *Team) FindByID(ctx context.Context, id id.TeamID) (*user.Team, error) { if ok { return &d, nil } - return &user.Team{}, err1.ErrNotFound + return &user.Team{}, rerror.ErrNotFound } func (r *Team) Save(ctx context.Context, t *user.Team) error { diff --git a/internal/infrastructure/memory/user.go b/internal/infrastructure/memory/user.go index 34ae4b1dc..76d4383f8 100644 --- a/internal/infrastructure/memory/user.go +++ b/internal/infrastructure/memory/user.go @@ -5,8 +5,8 @@ import ( "sync" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/user" ) @@ -44,7 +44,7 @@ func (r *User) FindByID(ctx context.Context, id id.UserID) (*user.User, error) { if ok { return &d, nil } - return &user.User{}, err1.ErrNotFound + return &user.User{}, rerror.ErrNotFound } func (r *User) Save(ctx context.Context, u *user.User) error { @@ -60,7 +60,7 @@ func (r *User) FindByAuth0Sub(ctx context.Context, auth0sub string) (*user.User, defer r.lock.Unlock() if auth0sub == "" { - return nil, err1.ErrInvalidParams + return nil, rerror.ErrInvalidParams } for _, u := range r.data { @@ -69,7 +69,7 @@ func (r *User) FindByAuth0Sub(ctx context.Context, auth0sub string) (*user.User, } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *User) FindByEmail(ctx context.Context, email string) (*user.User, error) { @@ -77,7 +77,7 @@ func (r *User) FindByEmail(ctx context.Context, email string) (*user.User, error defer r.lock.Unlock() if email == "" { - return nil, err1.ErrInvalidParams + return nil, rerror.ErrInvalidParams } for _, u := range r.data { @@ -86,7 +86,7 @@ func (r *User) FindByEmail(ctx context.Context, email string) (*user.User, error } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *User) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user.User, error) { @@ -94,7 +94,7 @@ func (r *User) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user defer r.lock.Unlock() if nameOrEmail == "" { - return nil, err1.ErrInvalidParams + return nil, rerror.ErrInvalidParams } for _, u := range r.data { @@ -103,7 +103,7 @@ func (r *User) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user } } - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } func (r *User) Remove(ctx context.Context, user id.UserID) error { diff --git a/internal/infrastructure/mongo/asset.go b/internal/infrastructure/mongo/asset.go index 12ed59d7a..6cb76d0c6 100644 --- a/internal/infrastructure/mongo/asset.go +++ b/internal/infrastructure/mongo/asset.go @@ -9,9 +9,9 @@ import ( "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/asset" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" ) type assetRepo struct { @@ -35,7 +35,7 @@ func (r *assetRepo) paginate(ctx context.Context, filter bson.D, pagination *use var c mongodoc.AssetConsumer pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) if err2 != nil { - return nil, nil, err1.ErrInternalBy(err2) + return nil, nil, rerror.ErrInternalBy(err2) } return c.Rows, pageInfo, nil } diff --git a/internal/infrastructure/mongo/config.go b/internal/infrastructure/mongo/config.go index 4b562a5bd..53a2108a9 100644 --- a/internal/infrastructure/mongo/config.go +++ b/internal/infrastructure/mongo/config.go @@ -7,7 +7,7 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/config" - err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/rerror" "go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo/options" ) @@ -28,7 +28,7 @@ func (r *configRepo) Load(ctx context.Context) (*config.Config, error) { if errors.Is(err, mongo.ErrNoDocuments) { return cfg, nil } - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } return cfg, nil } @@ -40,7 +40,7 @@ func (r *configRepo) Save(ctx context.Context, cfg *config.Config) error { if _, err := r.client.Collection().UpdateOne(ctx, nil, cfg, &options.UpdateOptions{ Upsert: &upsert, }); err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } diff --git a/internal/infrastructure/mongo/dataset.go b/internal/infrastructure/mongo/dataset.go index 7921ef995..1b8e1e1a8 100644 --- a/internal/infrastructure/mongo/dataset.go +++ b/internal/infrastructure/mongo/dataset.go @@ -9,9 +9,9 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/dataset" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/internal/usecase/repo" ) @@ -212,7 +212,7 @@ func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, f []id.Sc cursor, err2 := r.client.Collection().Aggregate(ctx, pipeline) if err2 != nil { - return nil, err1.ErrInternalBy(err2) + return nil, rerror.ErrInternalBy(err2) } defer func() { _ = cursor.Close(ctx) @@ -220,7 +220,7 @@ func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, f []id.Sc doc := mongodoc.DatasetExtendedDocument{} if err2 := bson.Unmarshal(cursor.Current, &doc); err2 != nil { - return nil, err1.ErrInternalBy(err2) + return nil, rerror.ErrInternalBy(err2) } docs := make([]*mongodoc.DatasetExtendedDocument, 0, len(fields)) for i := 0; i < len(fields); i++ { @@ -239,11 +239,11 @@ func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, f []id.Sc res := make(dataset.List, 0, len(docs)) for i, d := range docs { if i > 0 && i-1 != d.Depth { - return nil, err1.ErrInternalBy(errors.New("invalid order")) + return nil, rerror.ErrInternalBy(errors.New("invalid order")) } ds, err2 := d.DatasetDocument.Model() if err2 != nil { - return nil, err1.ErrInternalBy(err2) + return nil, rerror.ErrInternalBy(err2) } res = append(res, ds) } @@ -280,7 +280,7 @@ func (r *datasetRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) err } _, err := r.client.Collection().DeleteMany(ctx, filter) if err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } @@ -289,7 +289,7 @@ func (r *datasetRepo) paginate(ctx context.Context, filter bson.D, pagination *u var c mongodoc.DatasetConsumer pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) if err2 != nil { - return nil, nil, err1.ErrInternalBy(err2) + return nil, nil, rerror.ErrInternalBy(err2) } return c.Rows, pageInfo, nil } @@ -299,7 +299,7 @@ func (r *datasetRepo) find(ctx context.Context, dst dataset.List, filter bson.D) Rows: dst, } if err2 := r.client.Find(ctx, filter, &c); err2 != nil { - return nil, err1.ErrInternalBy(err2) + return nil, rerror.ErrInternalBy(err2) } return c.Rows, nil } diff --git a/internal/infrastructure/mongo/dataset_schema.go b/internal/infrastructure/mongo/dataset_schema.go index 3fc3625ed..e4887edb7 100644 --- a/internal/infrastructure/mongo/dataset_schema.go +++ b/internal/infrastructure/mongo/dataset_schema.go @@ -9,9 +9,9 @@ import ( "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/dataset" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" ) type datasetSchemaRepo struct { @@ -120,7 +120,7 @@ func (r *datasetSchemaRepo) RemoveByScene(ctx context.Context, sceneID id.SceneI } _, err := r.client.Collection().DeleteMany(ctx, filter) if err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } @@ -150,7 +150,7 @@ func (r *datasetSchemaRepo) paginate(ctx context.Context, filter bson.D, paginat var c mongodoc.DatasetSchemaConsumer pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) if err2 != nil { - return nil, nil, err1.ErrInternalBy(err2) + return nil, nil, rerror.ErrInternalBy(err2) } return c.Rows, pageInfo, nil } diff --git a/internal/infrastructure/mongo/layer.go b/internal/infrastructure/mongo/layer.go index 966808beb..28ae04f2a 100644 --- a/internal/infrastructure/mongo/layer.go +++ b/internal/infrastructure/mongo/layer.go @@ -7,10 +7,10 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" ) type layerRepo struct { @@ -163,7 +163,7 @@ func (r *layerRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error } _, err := r.client.Collection().DeleteMany(ctx, filter) if err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } @@ -184,7 +184,7 @@ func (r *layerRepo) findOne(ctx context.Context, filter bson.D) (layer.Layer, er return nil, err } if len(c.Rows) == 0 { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } return *c.Rows[0], nil } @@ -195,7 +195,7 @@ func (r *layerRepo) findItemOne(ctx context.Context, filter bson.D) (*layer.Item return nil, err } if len(c.ItemRows) == 0 { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } return c.ItemRows[0], nil } @@ -206,7 +206,7 @@ func (r *layerRepo) findGroupOne(ctx context.Context, filter bson.D) (*layer.Gro return nil, err } if len(c.GroupRows) == 0 { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } return c.GroupRows[0], nil } @@ -215,7 +215,7 @@ func (r *layerRepo) findGroupOne(ctx context.Context, filter bson.D) (*layer.Gro // var c mongodoc.LayerConsumer // pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) // if err2 != nil { -// return nil, nil, err1.ErrInternalBy(err2) +// return nil, nil, rerror.ErrInternalBy(err2) // } // return c.Rows, pageInfo, nil // } @@ -237,7 +237,7 @@ func (r *layerRepo) findItems(ctx context.Context, dst layer.ItemList, filter bs // var c mongodoc.LayerConsumer // pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) // if err2 != nil { -// return nil, nil, err1.ErrInternalBy(err2) +// return nil, nil, rerror.ErrInternalBy(err2) // } // return c.ItemRows, pageInfo, nil // } @@ -259,7 +259,7 @@ func (r *layerRepo) findGroups(ctx context.Context, dst layer.GroupList, filter // var c mongodoc.LayerConsumer // pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) // if err2 != nil { -// return nil, nil, err1.ErrInternalBy(err2) +// return nil, nil, rerror.ErrInternalBy(err2) // } // return c.GroupRows, pageInfo, nil // } diff --git a/internal/infrastructure/mongo/migration/client.go b/internal/infrastructure/mongo/migration/client.go index e716dce1b..4b210825d 100644 --- a/internal/infrastructure/mongo/migration/client.go +++ b/internal/infrastructure/mongo/migration/client.go @@ -7,8 +7,8 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/pkg/config" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo/options" @@ -27,7 +27,7 @@ type Client struct { func (c Client) Migrate(ctx context.Context) error { config, err := c.loadConfig(ctx) if err != nil { - var ie *err1.ErrInternal + var ie *rerror.ErrInternal if ok := errors.As(err, &ie); ok { err = ie.Unwrap() } @@ -43,7 +43,7 @@ func (c Client) Migrate(ctx context.Context) error { log.Infof("DB migration: %d\n", m) if err := migrations[m](ctx, c.Client); err != nil { - var ie *err1.ErrInternal + var ie *rerror.ErrInternal if ok := errors.As(err, &ie); ok { err = ie.Unwrap() } @@ -52,7 +52,7 @@ func (c Client) Migrate(ctx context.Context) error { config.Migration = m if err := c.saveConfig(ctx, config); err != nil { - var ie *err1.ErrInternal + var ie *rerror.ErrInternal if ok := errors.As(err, &ie); ok { err = ie.Unwrap() } @@ -94,7 +94,7 @@ func (c *Client) saveConfig(ctx context.Context, cfg *config.Config) error { }, &options.UpdateOptions{ Upsert: &upsert, }); err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil diff --git a/internal/infrastructure/mongo/mongodoc/client.go b/internal/infrastructure/mongo/mongodoc/client.go index 08d384f8c..c959875b7 100644 --- a/internal/infrastructure/mongo/mongodoc/client.go +++ b/internal/infrastructure/mongo/mongodoc/client.go @@ -7,7 +7,7 @@ import ( "io" "github.com/reearth/reearth-backend/internal/usecase" - err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/rerror" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo/options" @@ -39,10 +39,10 @@ func (c *Client) Collection(col string) *mongo.Collection { func (c *Client) Find(ctx context.Context, col string, filter interface{}, consumer Consumer) error { cursor, err := c.Collection(col).Find(ctx, filter) if errors.Is(err, mongo.ErrNilDocument) || errors.Is(err, mongo.ErrNoDocuments) { - return err1.ErrNotFound + return rerror.ErrNotFound } if err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } defer func() { _ = cursor.Close(ctx) @@ -51,18 +51,18 @@ func (c *Client) Find(ctx context.Context, col string, filter interface{}, consu for { c := cursor.Next(ctx) if err := cursor.Err(); err != nil && !errors.Is(err, io.EOF) { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } if !c { if err := consumer.Consume(nil); err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } break } if err := consumer.Consume(cursor.Current); err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } } return nil @@ -71,10 +71,10 @@ func (c *Client) Find(ctx context.Context, col string, filter interface{}, consu func (c *Client) FindOne(ctx context.Context, col string, filter interface{}, consumer Consumer) error { raw, err := c.Collection(col).FindOne(ctx, filter).DecodeBytes() if errors.Is(err, mongo.ErrNilDocument) || errors.Is(err, mongo.ErrNoDocuments) { - return err1.ErrNotFound + return rerror.ErrNotFound } if err := consumer.Consume(raw); err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } @@ -82,7 +82,7 @@ func (c *Client) FindOne(ctx context.Context, col string, filter interface{}, co func (c *Client) Count(ctx context.Context, col string, filter interface{}) (int64, error) { count, err := c.Collection(col).CountDocuments(ctx, filter) if err != nil { - return count, err1.ErrInternalBy(err) + return count, rerror.ErrInternalBy(err) } return count, nil } @@ -98,7 +98,7 @@ func (c *Client) RemoveAll(ctx context.Context, col string, ids []string) error } _, err := c.Collection(col).DeleteMany(ctx, filter) if err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } @@ -106,7 +106,7 @@ func (c *Client) RemoveAll(ctx context.Context, col string, ids []string) error func (c *Client) RemoveOne(ctx context.Context, col string, id string) error { _, err := c.Collection(col).DeleteOne(ctx, bson.D{{Key: "id", Value: id}}) if err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } @@ -121,7 +121,7 @@ var ( func (c *Client) SaveOne(ctx context.Context, col string, id string, replacement interface{}) error { _, err := c.Collection(col).ReplaceOne(ctx, bson.D{{Key: "id", Value: id}}, replacement, replaceOption) if err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } @@ -131,7 +131,7 @@ func (c *Client) SaveAll(ctx context.Context, col string, ids []string, updates return nil } if len(ids) != len(updates) { - return err1.ErrInternalBy(errors.New("invalid save args")) + return rerror.ErrInternalBy(errors.New("invalid save args")) } writeModels := make([]mongo.WriteModel, 0, len(updates)) @@ -146,7 +146,7 @@ func (c *Client) SaveAll(ctx context.Context, col string, ids []string, updates _, err := c.Collection(col).BulkWrite(ctx, writeModels) if err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } diff --git a/internal/infrastructure/mongo/plugin.go b/internal/infrastructure/mongo/plugin.go index 86558212e..99e8cc8d7 100644 --- a/internal/infrastructure/mongo/plugin.go +++ b/internal/infrastructure/mongo/plugin.go @@ -125,7 +125,7 @@ func (r *pluginRepo) findOne(ctx context.Context, filter bson.D) (*plugin.Plugin // var c mongodoc.PluginConsumer // pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) // if err2 != nil { -// return nil, nil, err1.ErrInternalBy(err2) +// return nil, nil, rerror.ErrInternalBy(err2) // } // return c.Rows, pageInfo, nil // } diff --git a/internal/infrastructure/mongo/project.go b/internal/infrastructure/mongo/project.go index 51aa63230..0f12d99c1 100644 --- a/internal/infrastructure/mongo/project.go +++ b/internal/infrastructure/mongo/project.go @@ -8,10 +8,10 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/rerror" ) type projectRepo struct { @@ -117,7 +117,7 @@ func (r *projectRepo) paginate(ctx context.Context, filter bson.D, pagination *u var c mongodoc.ProjectConsumer pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) if err2 != nil { - return nil, nil, err1.ErrInternalBy(err2) + return nil, nil, rerror.ErrInternalBy(err2) } return c.Rows, pageInfo, nil } diff --git a/internal/infrastructure/mongo/property.go b/internal/infrastructure/mongo/property.go index d5d4e4735..a879654bb 100644 --- a/internal/infrastructure/mongo/property.go +++ b/internal/infrastructure/mongo/property.go @@ -5,10 +5,10 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" "go.mongodb.org/mongo-driver/bson" ) @@ -108,7 +108,7 @@ func (r *propertyRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) er } _, err := r.client.Collection().DeleteMany(ctx, filter) if err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } return nil } @@ -138,7 +138,7 @@ func (r *propertyRepo) findOne(ctx context.Context, filter bson.D) (*property.Pr // var c propertyConsumer // pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) // if err2 != nil { -// return nil, nil, err1.ErrInternalBy(err2) +// return nil, nil, rerror.ErrInternalBy(err2) // } // return c.rows, pageInfo, nil // } diff --git a/internal/infrastructure/mongo/scene.go b/internal/infrastructure/mongo/scene.go index 35feb65dd..8a5990815 100644 --- a/internal/infrastructure/mongo/scene.go +++ b/internal/infrastructure/mongo/scene.go @@ -5,9 +5,9 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/scene" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" @@ -83,7 +83,7 @@ func (r *sceneRepo) HasSceneTeam(ctx context.Context, sceneID id.SceneID, temaID } res, err2 := r.client.Collection().CountDocuments(ctx, filter) if err2 != nil { - return false, err1.ErrInternalBy(err2) + return false, rerror.ErrInternalBy(err2) } return res == 1, nil } @@ -97,13 +97,13 @@ func (r *sceneRepo) HasScenesTeam(ctx context.Context, sceneIDs []id.SceneID, te }) if err2 != nil { - return nil, err1.ErrInternalBy(err2) + return nil, rerror.ErrInternalBy(err2) } var res []struct{ ID string } err2 = cursor.All(ctx, res) if err2 != nil { - return nil, err1.ErrInternalBy(err2) + return nil, rerror.ErrInternalBy(err2) } res2 := make([]bool, 0, len(sceneIDs)) @@ -155,7 +155,7 @@ func (r *sceneRepo) findOne(ctx context.Context, filter bson.D) (*scene.Scene, e // var c mongodoc.SceneConsumer // pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) // if err2 != nil { -// return nil, nil, err1.ErrInternalBy(err2) +// return nil, nil, rerror.ErrInternalBy(err2) // } // return c.Rows, pageInfo, nil // } diff --git a/internal/infrastructure/mongo/scene_lock.go b/internal/infrastructure/mongo/scene_lock.go index 1229cf267..c41e015f7 100644 --- a/internal/infrastructure/mongo/scene_lock.go +++ b/internal/infrastructure/mongo/scene_lock.go @@ -6,8 +6,8 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/scene" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" @@ -28,7 +28,7 @@ func (r *sceneLockRepo) GetLock(ctx context.Context, sceneID id.SceneID) (scene. } var c mongodoc.SceneLockConsumer if err2 := r.client.FindOne(ctx, filter, &c); err2 != nil { - if errors.Is(err2, err1.ErrNotFound) { + if errors.Is(err2, rerror.ErrNotFound) { return scene.LockModeFree, nil } return scene.LockMode(""), err2 @@ -60,7 +60,7 @@ func (r *sceneLockRepo) SaveLock(ctx context.Context, sceneID id.SceneID, lock s }, &options.UpdateOptions{ Upsert: &upsert, }); err2 != nil { - return err1.ErrInternalBy(err2) + return rerror.ErrInternalBy(err2) } return nil } @@ -68,7 +68,7 @@ func (r *sceneLockRepo) SaveLock(ctx context.Context, sceneID id.SceneID, lock s func (r *sceneLockRepo) ReleaseAllLock(ctx context.Context) error { if _, err2 := r.client.Collection().DeleteMany(ctx, bson.D{}); err2 != nil { if err2 != mongo.ErrNilDocument && err2 != mongo.ErrNoDocuments { - return err1.ErrInternalBy(err2) + return rerror.ErrInternalBy(err2) } } return nil diff --git a/internal/infrastructure/mongo/team.go b/internal/infrastructure/mongo/team.go index b2ae10819..0ba00f9b5 100644 --- a/internal/infrastructure/mongo/team.go +++ b/internal/infrastructure/mongo/team.go @@ -108,7 +108,7 @@ func (r *teamRepo) findOne(ctx context.Context, filter bson.D) (*user.Team, erro // var c mongodoc.TeamConsumer // pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) // if err2 != nil { -// return nil, nil, err1.ErrInternalBy(err2) +// return nil, nil, rerror.ErrInternalBy(err2) // } // return c.Rows, pageInfo, nil // } diff --git a/internal/infrastructure/mongo/transaction.go b/internal/infrastructure/mongo/transaction.go index 074394bd8..7f0eb2995 100644 --- a/internal/infrastructure/mongo/transaction.go +++ b/internal/infrastructure/mongo/transaction.go @@ -5,7 +5,7 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" + "github.com/reearth/reearth-backend/pkg/rerror" "go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo/options" ) @@ -23,11 +23,11 @@ func NewTransaction(client *mongodoc.Client) repo.Transaction { func (t *Transaction) Begin() (repo.Tx, error) { s, err := t.client.Session() if err != nil { - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } if err := s.StartTransaction(&options.TransactionOptions{}); err != nil { - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } return &Tx{session: s, commit: false}, nil @@ -52,10 +52,10 @@ func (t *Tx) End(ctx context.Context) error { if t.commit { if err := t.session.CommitTransaction(ctx); err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } } else if err := t.session.AbortTransaction(ctx); err != nil { - return err1.ErrInternalBy(err) + return rerror.ErrInternalBy(err) } t.session.EndSession(ctx) diff --git a/internal/infrastructure/npm/archive.go b/internal/infrastructure/npm/archive.go index f8937e0d4..a67b5d3bc 100644 --- a/internal/infrastructure/npm/archive.go +++ b/internal/infrastructure/npm/archive.go @@ -7,8 +7,8 @@ import ( "io" "strings" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/rerror" ) type archive struct { @@ -41,7 +41,7 @@ func (a *archive) Next() (f *file.File, derr error) { return } if err != nil { - derr = err1.ErrInternalBy(err) + derr = rerror.ErrInternalBy(err) return } if strings.HasPrefix(head.Name, "package/") { diff --git a/internal/infrastructure/npm/plugin_repository.go b/internal/infrastructure/npm/plugin_repository.go index d79e8598e..63266a147 100644 --- a/internal/infrastructure/npm/plugin_repository.go +++ b/internal/infrastructure/npm/plugin_repository.go @@ -8,10 +8,10 @@ import ( "strings" "github.com/reearth/reearth-backend/internal/usecase/gateway" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/file" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/reearth/reearth-backend/pkg/rerror" ) const ( @@ -55,7 +55,7 @@ func (r *pluginRepository) Manifest(ctx context.Context, id id.PluginID) (*manif break } if err != nil { - return nil, err1.ErrInternalBy(err) + return nil, rerror.ErrInternalBy(err) } if f.Fullpath == manifestFilePath { manifest, err := manifest.Parse(f.Content) @@ -88,7 +88,7 @@ func (r *pluginRepository) getNpmTarball(ctx context.Context, id id.PluginID) (f res, err := http.DefaultClient.Do(req) if err != nil || res.StatusCode != http.StatusOK { if res.StatusCode == http.StatusNotFound { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } return nil, gateway.ErrFailedToFetchPluiginRepositoryData } diff --git a/internal/usecase/interactor/common.go b/internal/usecase/interactor/common.go index 714ff38f0..849c8bbbf 100644 --- a/internal/usecase/interactor/common.go +++ b/internal/usecase/interactor/common.go @@ -9,9 +9,9 @@ import ( "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/scene" ) @@ -218,7 +218,7 @@ func (d ProjectDeleter) Delete(ctx context.Context, prj *project.Project, force // Fetch scene s, err := d.Scene.FindByProject(ctx, prj.ID(), operator.WritableTeams) - if err != nil && !errors.Is(err, err1.ErrNotFound) { + if err != nil && !errors.Is(err, rerror.ErrNotFound) { return err } diff --git a/internal/usecase/interactor/dataset.go b/internal/usecase/interactor/dataset.go index 7590ae9cb..8a56ff029 100644 --- a/internal/usecase/interactor/dataset.go +++ b/internal/usecase/interactor/dataset.go @@ -9,12 +9,12 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/builtin" "github.com/reearth/reearth-backend/pkg/dataset" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/initializer" @@ -378,7 +378,7 @@ func (i *Dataset) GraphFetch(ctx context.Context, id id.DatasetID, depth int, op res = append(res, d) next, done = it.Next(d) if next.ID().IsNil() { - return nil, err1.ErrInternalBy(errors.New("next id is nil")) + return nil, rerror.ErrInternalBy(errors.New("next id is nil")) } if done { break @@ -418,7 +418,7 @@ func (i *Dataset) GraphFetchSchema(ctx context.Context, id id.DatasetSchemaID, d res = append(res, d) next, done = it.Next(d) if next.ID().IsNil() { - return nil, err1.ErrInternalBy(errors.New("next id is nil")) + return nil, rerror.ErrInternalBy(errors.New("next id is nil")) } if done { break @@ -575,7 +575,7 @@ func (i *Dataset) RemoveDatasetSchema(ctx context.Context, inp interfaces.Remove } if s == nil { - return inp.SchemaId, err1.ErrNotFound + return inp.SchemaId, rerror.ErrNotFound } datasets, err := i.datasetRepo.FindBySchemaAll(ctx, inp.SchemaId) diff --git a/internal/usecase/interactor/layer.go b/internal/usecase/interactor/layer.go index 6558c069b..c2f10a2de 100644 --- a/internal/usecase/interactor/layer.go +++ b/internal/usecase/interactor/layer.go @@ -8,13 +8,13 @@ import ( "strings" "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/shp" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/builtin" "github.com/reearth/reearth-backend/pkg/dataset" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/decoding" @@ -170,7 +170,7 @@ func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, o parentLayer, err := i.layerRepo.FindGroupByID(ctx, inp.ParentLayerID, scenes) if err != nil { - if errors.Is(err, err1.ErrNotFound) { + if errors.Is(err, rerror.ErrNotFound) { return nil, nil, err } return nil, nil, err @@ -486,7 +486,7 @@ func (i *Layer) Remove(ctx context.Context, lid id.LayerID, operator *usecase.Op } parentLayer, err := i.layerRepo.FindParentByID(ctx, lid, scenes) - if err != nil && err != err1.ErrNotFound { + if err != nil && err != rerror.ErrNotFound { return lid, nil, err } if parentLayer != nil { @@ -900,7 +900,7 @@ func (i *Layer) getPlugin(ctx context.Context, p *id.PluginID, e *id.PluginExten plugin, err := i.pluginRepo.FindByID(ctx, *p) if err != nil { - if errors.Is(err, err1.ErrNotFound) { + if errors.Is(err, rerror.ErrNotFound) { return nil, nil, interfaces.ErrPluginNotFound } return nil, nil, err diff --git a/internal/usecase/interactor/project.go b/internal/usecase/interactor/project.go index 7d10c1e10..bd77cb3ed 100644 --- a/internal/usecase/interactor/project.go +++ b/internal/usecase/interactor/project.go @@ -10,9 +10,9 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/scene" "github.com/reearth/reearth-backend/pkg/scene/builder" ) @@ -201,7 +201,7 @@ func (i *Project) Update(ctx context.Context, p interfaces.UpdateProjectParam, o if prj.PublishmentStatus() != project.PublishmentStatusPrivate && p.Alias != nil && *p.Alias != oldAlias { if err := i.file.MoveBuiltScene(ctx, oldAlias, *p.Alias); err != nil { // ignore ErrNotFound - if !errors.Is(err, err1.ErrNotFound) { + if !errors.Is(err, rerror.ErrNotFound) { return nil, err } } @@ -217,7 +217,7 @@ func (i *Project) CheckAlias(ctx context.Context, alias string) (bool, error) { } prj, err := i.projectRepo.FindByPublicName(ctx, alias) - if prj == nil && err == nil || err != nil && errors.Is(err, err1.ErrNotFound) { + if prj == nil && err == nil || err != nil && errors.Is(err, rerror.ErrNotFound) { return true, nil } diff --git a/internal/usecase/interactor/published.go b/internal/usecase/interactor/published.go index a837bd3cc..515b7824b 100644 --- a/internal/usecase/interactor/published.go +++ b/internal/usecase/interactor/published.go @@ -17,8 +17,8 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/cache" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" ) type Published struct { @@ -70,7 +70,7 @@ func NewPublishedWithURL(project repo.Project, file gateway.File, indexHTMLURL * func (i *Published) Metadata(ctx context.Context, name string) (interfaces.ProjectPublishedMetadata, error) { prj, err := i.project.FindByPublicName(ctx, name) if err != nil || prj == nil { - return interfaces.ProjectPublishedMetadata{}, err1.ErrNotFound + return interfaces.ProjectPublishedMetadata{}, rerror.ErrNotFound } return interfaces.ProjectPublishedMetadataFrom(prj), nil diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 941442bba..0e4fa442c 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -8,11 +8,11 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/builtin" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/scene" "github.com/reearth/reearth-backend/pkg/scene/sceneops" "github.com/reearth/reearth-backend/pkg/visualizer" @@ -246,7 +246,7 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP ws := scene.WidgetSystem() widget := ws.Widget(param.PluginID, param.ExtensionID) if widget == nil { - return nil, nil, err1.ErrNotFound + return nil, nil, rerror.ErrNotFound } if param.Enabled != nil { @@ -295,7 +295,7 @@ func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, pid id.PluginID widget := ws.Widget(pid, eid) if widget == nil { - return nil, err1.ErrNotFound + return nil, rerror.ErrNotFound } ws.Remove(pid, eid) @@ -349,7 +349,7 @@ func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plugin plugin, err2 := i.pluginRepo.FindByID(ctx, pid) if err2 != nil { - if errors.Is(err2, err1.ErrNotFound) { + if errors.Is(err2, rerror.ErrNotFound) { // // Install Plugin // @@ -594,7 +594,7 @@ func (i *Scene) UpgradePlugin(ctx context.Context, sid id.SceneID, oldPluginID, func (i *Scene) getPlugin(ctx context.Context, p id.PluginID, e id.PluginExtensionID) (*plugin.Plugin, *plugin.Extension, error) { plugin, err2 := i.pluginRepo.FindByID(ctx, p) if err2 != nil { - if errors.Is(err2, err1.ErrNotFound) { + if errors.Is(err2, rerror.ErrNotFound) { return nil, nil, interfaces.ErrPluginNotFound } return nil, nil, err2 diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index 67abe52c6..491a110ad 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -8,9 +8,9 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/internal/usecase/repo" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/project" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/user" ) @@ -97,7 +97,7 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. // Check if user and team already exists existed, err := i.userRepo.FindByAuth0Sub(ctx, inp.Sub) - if err != nil && !errors.Is(err, err1.ErrNotFound) { + if err != nil && !errors.Is(err, rerror.ErrNotFound) { return nil, nil, err } if existed != nil { @@ -106,7 +106,7 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. if inp.UserID != nil { existed, err := i.userRepo.FindByID(ctx, *inp.UserID) - if err != nil && !errors.Is(err, err1.ErrNotFound) { + if err != nil && !errors.Is(err, rerror.ErrNotFound) { return nil, nil, err } if existed != nil { @@ -116,7 +116,7 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. if inp.TeamID != nil { existed, err := i.teamRepo.FindByID(ctx, *inp.TeamID) - if err != nil && !errors.Is(err, err1.ErrNotFound) { + if err != nil && !errors.Is(err, rerror.ErrNotFound) { return nil, nil, err } if existed != nil { @@ -133,7 +133,7 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. // Check if user and team already exists var team *user.Team existed, err = i.userRepo.FindByEmail(ctx, ui.Email) - if err != nil && !errors.Is(err, err1.ErrNotFound) { + if err != nil && !errors.Is(err, rerror.ErrNotFound) { return nil, nil, err } if existed != nil { @@ -197,7 +197,7 @@ func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operato u.UpdateName(*p.Name) team, err = i.teamRepo.FindByID(ctx, u.Team()) - if err != nil && !errors.Is(err, err1.ErrNotFound) { + if err != nil && !errors.Is(err, rerror.ErrNotFound) { return nil, err } @@ -281,7 +281,7 @@ func (i *User) RemoveMyAuth(ctx context.Context, authProvider string, operator * func (i *User) SearchUser(ctx context.Context, nameOrEmail string, operator *usecase.Operator) (u *user.User, err error) { u, err = i.userRepo.FindByNameOrEmail(ctx, nameOrEmail) - if err != nil && !errors.Is(err, err1.ErrNotFound) { + if err != nil && !errors.Is(err, rerror.ErrNotFound) { return nil, err } return u, nil @@ -307,7 +307,7 @@ func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase }() u, err := i.userRepo.FindByID(ctx, userID) - if err != nil && !errors.Is(err, err1.ErrNotFound) { + if err != nil && !errors.Is(err, rerror.ErrNotFound) { return err } if u == nil { diff --git a/pkg/error/error.go b/pkg/error/error.go deleted file mode 100644 index 5f16e8061..000000000 --- a/pkg/error/error.go +++ /dev/null @@ -1,73 +0,0 @@ -package error - -import ( - "fmt" - - "github.com/pkg/errors" -) - -var ( - // ErrNotFound _ - ErrNotFound = errors.New("not found") - // ErrInvalidParams represents the params are invalid, such as empty string. - ErrInvalidParams = errors.New("invalid params") - // ErrNotImplemented _ - ErrNotImplemented = errors.New("not implemented") - // ErrUserNotFound _ - ErrUserNotFound = errors.New("user is not found") -) - -// ErrInternal is an error struct that can hold an internal error but hides users the details. -type ErrInternal struct { - Err error -} - -func ErrInternalBy(err error) error { - return &ErrInternal{ - Err: err, - } -} - -func (e *ErrInternal) Error() string { - if e == nil { - return "" - } - return "internal" -} - -func (e *ErrInternal) Unwrap() error { - if e == nil { - return nil - } - return e.Err -} - -// Error can hold an error together with any label. This is useful for displaying a hierarchical error. -type Error struct { - Label string - Err error -} - -func New(label string, err error) *Error { - return &Error{ - Label: label, - Err: err, - } -} - -func (e *Error) Error() string { - if e == nil { - return "" - } - if e2, ok := e.Err.(*Error); ok { - return fmt.Sprintf("%s.%s", e.Label, e2) - } - return fmt.Sprintf("%s: %s", e.Label, e.Err) -} - -func (e *Error) Unwrap() error { - if e == nil { - return nil - } - return e.Err -} diff --git a/pkg/error/error_test.go b/pkg/error/error_test.go deleted file mode 100644 index 61f7f003a..000000000 --- a/pkg/error/error_test.go +++ /dev/null @@ -1,30 +0,0 @@ -package error - -import ( - "errors" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestErrInternal(t *testing.T) { - werr := errors.New("wrapped") - err := ErrInternalBy(werr) - var err2 *ErrInternal - assert.Equal(t, "internal", err.Error()) - assert.True(t, errors.As(err, &err2)) - assert.Same(t, werr, errors.Unwrap(err)) -} - -func TestError(t *testing.T) { - werr := errors.New("wrapped") - err := New("label", werr) - var err2 *Error - assert.Equal(t, "label: wrapped", err.Error()) - assert.True(t, errors.As(err, &err2)) - assert.Same(t, werr, errors.Unwrap(err)) - err3 := New("foo", err) - assert.Equal(t, "foo.label: wrapped", err3.Error()) - err4 := New("bar", err3) - assert.Equal(t, "bar.foo.label: wrapped", err4.Error()) -} diff --git a/pkg/layer/initializer.go b/pkg/layer/initializer.go index 78aefb589..98aa1b714 100644 --- a/pkg/layer/initializer.go +++ b/pkg/layer/initializer.go @@ -5,9 +5,16 @@ import ( "fmt" "github.com/reearth/reearth-backend/pkg/builtin" - perror "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +var ( + ErrInitializationInfobox = errors.New("infobox") + ErrInitializationInfoboxWith = rerror.With(ErrInitializationInfobox) + ErrInitializationProperty = errors.New("property") + ErrInitializationPropertyWith = rerror.With(ErrInitializationProperty) ) type InitializerResult struct { @@ -94,7 +101,7 @@ func (i *Initializer) Layer(sid id.SceneID) (r InitializerResult, err error) { ib, pm, err2 := i.Infobox.Infobox(sid) if err2 != nil { - err = perror.New("infobox", err2) + err = ErrInitializationInfoboxWith(err2) return } r.Properties = r.Properties.Merge(pm) @@ -107,7 +114,7 @@ func (i *Initializer) Layer(sid id.SceneID) (r InitializerResult, err error) { pid := i.PropertyID lp, err2 := i.Property.Property(sid) if err2 != nil { - err = perror.New("property", err2) + err = ErrInitializationPropertyWith(err2) return } if lp != nil { @@ -132,7 +139,7 @@ func (i *Initializer) Layer(sid id.SceneID) (r InitializerResult, err error) { for i, lay2 := range i.Layers { r2, err2 := lay2.Layer(sid) if err2 != nil { - err = perror.New(fmt.Sprint(i), err2) + err = rerror.From(fmt.Sprint(i), err2) return } if rootLayer := r2.RootLayer(); rootLayer != nil { @@ -205,7 +212,7 @@ func (i *InitializerInfobox) Infobox(scene id.SceneID) (*Infobox, property.Map, for i, f := range i.Fields { ibf, ibfp, err := f.InfoboxField(scene) if err != nil { - return nil, nil, perror.New(fmt.Sprint(i), err) + return nil, nil, rerror.From(fmt.Sprint(i), err) } fields = append(fields, ibf) pm = pm.Add(ibfp) @@ -218,7 +225,7 @@ func (i *InitializerInfobox) Infobox(scene id.SceneID) (*Infobox, property.Map, var err error ibp, err = i.Property.PropertyIncludingEmpty(scene, builtin.PropertySchemaIDInfobox) if err != nil { - return nil, nil, perror.New("property", err) + return nil, nil, ErrInitializationPropertyWith(err) } if ibp != nil { ibpid = ibp.IDRef() @@ -274,7 +281,7 @@ func (i *InitializerInfoboxField) InfoboxField(scene id.SceneID) (*InfoboxField, if pid == nil { p, err = i.Property.PropertyIncludingEmpty(scene, psid) if err != nil { - return nil, nil, perror.New("property", err) + return nil, nil, ErrInitializationPropertyWith(err) } if p != nil { pid = p.IDRef() diff --git a/pkg/rerror/error.go b/pkg/rerror/error.go new file mode 100644 index 000000000..3632a97b9 --- /dev/null +++ b/pkg/rerror/error.go @@ -0,0 +1,131 @@ +package rerror + +import ( + "fmt" + + "github.com/pkg/errors" +) + +var ( + errInternal = errors.New("internal") + // ErrNotFound indicates something was not found. + ErrNotFound = errors.New("not found") + // ErrInvalidParams represents the params are invalid, such as empty string. + ErrInvalidParams = errors.New("invalid params") + // ErrNotImplemented indicates unimplemented. + ErrNotImplemented = errors.New("not implemented") +) + +// ErrInternal is an error struct that can hold an internal error but hides users the details. +type ErrInternal struct { + err Error +} + +func ErrInternalBy(err error) error { + return &ErrInternal{ + err: Error{ + Label: errInternal, + Err: err, + Hidden: true, + }, + } +} + +func (e *ErrInternal) Error() string { + return e.err.Error() +} + +func (e *ErrInternal) Unwrap() error { + return e.err.Unwrap() +} + +// Error can hold an error together with label. +// This is useful for displaying a hierarchical error message cleanly and searching by label later to retrieve a wrapped error. +// Currently, Go standard error library does not support these use cases. That's why we need our own error type. +type Error struct { + Label error + Err error + Hidden bool +} + +// From creates an Error with string label. +func From(label string, err error) *Error { + return &Error{Label: errors.New(label), Err: err} +} + +// Error implements error interface. +func (e *Error) Error() string { + if e == nil { + return "" + } + if e.Hidden { + return e.Label.Error() + } + if e2, ok := e.Err.(*Error); ok { + return fmt.Sprintf("%s.%s", e.Label, e2) + } + return fmt.Sprintf("%s: %s", e.Label, e.Err) +} + +// Unwrap implements the interface for errors.Unwrap. +func (e *Error) Unwrap() error { + if e == nil { + return nil + } + return e.Err +} + +// Get gets Error struct from an error +func Get(err error) *Error { + var target *Error + _ = errors.As(err, &target) + return target +} + +// Is looks up errors whose label is the same as the specific label and return true if it was found +func Is(err error, label error) bool { + if err == nil { + return false + } + e := err + var target *Error + for { + if !errors.As(e, &target) { + break + } + if target.Label == label { + return true + } + e = target.Unwrap() + } + return false +} + +// As looks up errors whose label is the same as the specific label and return a wrapped error. +func As(err error, label error) error { + if err == nil { + return nil + } + e := err + var target *Error + for { + if !errors.As(e, &target) { + break + } + if target.Label == label { + return target.Unwrap() + } + e = target.Unwrap() + } + return nil +} + +// With returns a new constructor to generate an Error with specific label. +func With(label error) func(error) *Error { + return func(err error) *Error { + return &Error{ + Label: label, + Err: err, + } + } +} diff --git a/pkg/rerror/error_test.go b/pkg/rerror/error_test.go new file mode 100644 index 000000000..018f2c373 --- /dev/null +++ b/pkg/rerror/error_test.go @@ -0,0 +1,140 @@ +package rerror + +import ( + "errors" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestErrInternal(t *testing.T) { + werr := errors.New("wrapped") + err := ErrInternalBy(werr) + var err2 *ErrInternal + assert.Equal(t, "internal", err.Error()) + assert.True(t, errors.As(err, &err2)) + assert.Same(t, werr, errors.Unwrap(err)) +} + +func TestError(t *testing.T) { + werr := errors.New("wrapped") + label := errors.New("label") + var err error = &Error{Label: label, Err: werr} + + var err2 *Error + assert.Equal(t, "label: wrapped", err.Error()) + assert.True(t, errors.As(err, &err2)) + assert.Same(t, werr, errors.Unwrap(err)) + + label2 := errors.New("foo") + err3 := &Error{Label: label2, Err: err} + assert.Equal(t, "foo.label: wrapped", err3.Error()) + + label3 := errors.New("bar") + err4 := &Error{Label: label3, Err: err3} + assert.Equal(t, "bar.foo.label: wrapped", err4.Error()) + + err5 := Error{ + Label: label, + Err: werr, + Hidden: true, + } + assert.Equal(t, "label", err5.Error()) + + var nilerr *Error + assert.Equal(t, "", nilerr.Error()) + assert.Nil(t, nilerr.Unwrap()) +} + +func TestFrom(t *testing.T) { + werr := errors.New("wrapped") + err := From("label", werr) + assert.Equal(t, "label", err.Label.Error()) + assert.Same(t, werr, err.Err) + assert.False(t, err.Hidden) +} + +func TestGet(t *testing.T) { + werr := &Error{Label: errors.New("hoge"), Err: errors.New("wrapped")} + err := fmt.Errorf("wrapped: %w", werr) + assert.Same(t, werr, Get(err)) + assert.Same(t, werr, Get(werr)) +} + +func TestIs(t *testing.T) { + werr := errors.New("wrapped") + label := errors.New("label") + err := &Error{ + Label: label, + Err: werr, + } + assert.True(t, Is(err, label)) + assert.False(t, Is(err, errors.New("label"))) + assert.False(t, Is(err, errors.New("nested"))) + assert.False(t, Is(err, errors.New("wrapped"))) + + label2 := errors.New("nested") + err = &Error{ + Label: label2, + Err: &Error{ + Label: label, + Err: werr, + }, + } + assert.True(t, Is(err, label)) + assert.True(t, Is(err, label2)) + assert.False(t, Is(err, errors.New("label"))) + assert.False(t, Is(err, errors.New("nested"))) + assert.False(t, Is(err, errors.New("wrapped"))) + assert.False(t, Is(nil, errors.New("label"))) +} + +func TestAs(t *testing.T) { + werr := errors.New("wrapped") + label := errors.New("label") + err := &Error{ + Label: label, + Err: werr, + } + assert.Same(t, werr, As(err, label)) + assert.Nil(t, As(err, errors.New("label"))) + assert.Nil(t, As(err, errors.New("nested"))) + assert.Nil(t, As(err, errors.New("wrapped"))) + + label2 := errors.New("nested") + err = &Error{ + Label: label2, + Err: &Error{ + Label: label, + Err: werr, + }, + } + assert.Same(t, werr, As(err, label)) + assert.Same(t, err.Err, As(err, label2)) + assert.Nil(t, As(err, errors.New("label"))) + assert.Nil(t, As(err, errors.New("nested"))) + assert.Nil(t, As(err, errors.New("wrapped"))) + + assert.Nil(t, As(nil, errors.New("label"))) +} + +func TestWith(t *testing.T) { + werr := errors.New("wrapped") + label := errors.New("label") + err := With(label)(werr) + assert.Equal(t, &Error{ + Label: label, + Err: werr, + }, err) + assert.Same(t, label, err.Label) + assert.Same(t, werr, err.Err) + + err = With(label)(nil) + assert.Equal(t, &Error{ + Label: label, + Err: nil, + }, err) + assert.Same(t, label, err.Label) + assert.Nil(t, err.Err) +} diff --git a/pkg/scene/sceneops/plugin_installer.go b/pkg/scene/sceneops/plugin_installer.go index 4678cf307..dfc7609b4 100644 --- a/pkg/scene/sceneops/plugin_installer.go +++ b/pkg/scene/sceneops/plugin_installer.go @@ -31,27 +31,27 @@ func (s PluginInstaller) InstallPluginFromRepository(pluginID id.PluginID) error // for _, s := range manifest.ExtensionSchema { // err = i.propertySchemaRepo.Save(&s) // if err != nil { - // i.output.Upload(nil, err1.ErrInternalBy(err)) + // i.output.Upload(nil, rerror.ErrInternalBy(err)) // return // } // } // err = i.pluginRepo.Save(plugin) // if err != nil { - // i.output.Upload(nil, err1.ErrInternalBy(err)) + // i.output.Upload(nil, rerror.ErrInternalBy(err)) // return // } // // Download and extract plugin files to storage // data, err := i.pluginRepositoryRepo.Data(inp.Name, inp.Version) // if err != nil { - // i.output.Upload(nil, err1.ErrInternalBy(err)) + // i.output.Upload(nil, rerror.ErrInternalBy(err)) // return // } // _, err = i.fileRepo.UploadAndExtractPluginFiles(data, plugin) // if err != nil { - // i.output.Upload(nil, err1.ErrInternalBy(err)) + // i.output.Upload(nil, rerror.ErrInternalBy(err)) // return // } @@ -78,7 +78,7 @@ func (s PluginInstaller) InstallPluginFromRepository(pluginID id.PluginID) error // Public(inp.Public). // Build() // if err != nil { - // i.output.Upload(nil, err1.ErrInternalBy(err)) + // i.output.Upload(nil, rerror.ErrInternalBy(err)) // return // } @@ -86,7 +86,7 @@ func (s PluginInstaller) InstallPluginFromRepository(pluginID id.PluginID) error // if manifest.Schema != nil { // err = i.propertySchemaRepo.Save(manifest.Schema) // if err != nil { - // i.output.Upload(nil, err1.ErrInternalBy(err)) + // i.output.Upload(nil, rerror.ErrInternalBy(err)) // return // } // } @@ -94,27 +94,27 @@ func (s PluginInstaller) InstallPluginFromRepository(pluginID id.PluginID) error // for _, s := range manifest.ExtensionSchema { // err = i.propertySchemaRepo.Save(&s) // if err != nil { - // i.output.Upload(nil, err1.ErrInternalBy(err)) + // i.output.Upload(nil, rerror.ErrInternalBy(err)) // return // } // } // err = i.pluginRepo.Save(plugin) // if err != nil { - // i.output.Upload(nil, err1.ErrInternalBy(err)) + // i.output.Upload(nil, rerror.ErrInternalBy(err)) // return // } // // Download and extract plugin files to storage // data, err := i.pluginRepositoryRepo.Data(inp.Name, inp.Version) // if err != nil { - // i.output.Upload(nil, err1.ErrInternalBy(err)) + // i.output.Upload(nil, rerror.ErrInternalBy(err)) // return // } // _, err = i.fileRepo.UploadAndExtractPluginFiles(data, plugin) // if err != nil { - // i.output.Upload(nil, err1.ErrInternalBy(err)) + // i.output.Upload(nil, rerror.ErrInternalBy(err)) // return // } diff --git a/pkg/scene/sceneops/plugin_migrator.go b/pkg/scene/sceneops/plugin_migrator.go index 04d174373..a959c1889 100644 --- a/pkg/scene/sceneops/plugin_migrator.go +++ b/pkg/scene/sceneops/plugin_migrator.go @@ -5,11 +5,11 @@ import ( "errors" "github.com/reearth/reearth-backend/pkg/dataset" - err1 "github.com/reearth/reearth-backend/pkg/error" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/scene" ) @@ -36,7 +36,7 @@ var ( func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, oldPluginID, newPluginID id.PluginID) (MigratePluginsResult, error) { if s == nil { - return MigratePluginsResult{}, err1.ErrInternalBy(errors.New("scene is nil")) + return MigratePluginsResult{}, rerror.ErrInternalBy(errors.New("scene is nil")) } if oldPluginID.Equal(newPluginID) || oldPluginID.Name() != newPluginID.Name() { From da7506e527fbbe2dc1c87249e6b91b85056a2423 Mon Sep 17 00:00:00 2001 From: KaWaite <34051327+KaWaite@users.noreply.github.com> Date: Tue, 3 Aug 2021 15:05:25 +0900 Subject: [PATCH 060/253] Fix issue where assets not seen (#34) Co-authored-by: KaWaite --- internal/app/config.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/app/config.go b/internal/app/config.go index a2076864b..d6c7e1a51 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -25,7 +25,7 @@ type Config struct { Tracer string TracerSample float64 GCS GCSConfig - AssetBaseURL string + AssetBaseURL string `default:"http://localhost:8080/assets"` Origins []string Web WebConfig SignupSecret string From 8742dbab980497fe18bc88efed29a25941f10722 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 5 Aug 2021 00:08:27 +0900 Subject: [PATCH 061/253] feat: plugin upload and deletion (#33) --- go.mod | 10 +- go.sum | 108 +++- internal/adapter/graphql/container.go | 2 +- internal/adapter/graphql/controller_plugin.go | 20 +- internal/adapter/graphql/controller_scene.go | 2 +- internal/adapter/graphql/convert.go | 6 +- internal/adapter/graphql/convert_plugin.go | 1 + internal/adapter/graphql/models_gen.go | 14 +- internal/app/file.go | 4 +- internal/app/repo.go | 17 +- internal/graphql/generated.go | 286 ++++++++-- internal/graphql/resolver_plugin.go | 20 +- internal/infrastructure/adapter/plugin.go | 15 +- .../infrastructure/adapter/property_schema.go | 14 + internal/infrastructure/fs/archive.go | 115 ----- internal/infrastructure/fs/common.go | 50 +- internal/infrastructure/fs/common_test.go | 22 - internal/infrastructure/fs/file.go | 252 ++++----- internal/infrastructure/fs/file_test.go | 263 ++++++++++ internal/infrastructure/fs/plugin.go | 41 +- .../infrastructure/fs/plugin_repository.go | 48 +- internal/infrastructure/fs/property_schema.go | 43 +- internal/infrastructure/gcs/file.go | 354 ++++++------- internal/infrastructure/gcs/file_test.go | 16 +- internal/infrastructure/memory/plugin.go | 21 +- .../infrastructure/memory/property_schema.go | 18 + .../infrastructure/mongo/mongodoc/plugin.go | 4 +- .../infrastructure/mongo/mongodoc/scene.go | 6 +- internal/infrastructure/mongo/plugin.go | 59 ++- .../infrastructure/mongo/property_schema.go | 11 + internal/infrastructure/npm/archive.go | 73 --- .../infrastructure/npm/plugin_repository.go | 97 ---- internal/usecase/gateway/file.go | 12 +- internal/usecase/gateway/plugin_repository.go | 2 +- internal/usecase/interactor/asset.go | 3 +- internal/usecase/interactor/dataset.go | 8 +- internal/usecase/interactor/layer.go | 23 +- internal/usecase/interactor/plugin.go | 32 +- internal/usecase/interactor/plugin_delete.go | 58 +++ internal/usecase/interactor/plugin_upload.go | 183 +++++++ internal/usecase/interactor/property.go | 3 +- internal/usecase/interactor/scene.go | 158 +++--- internal/usecase/interfaces/plugin.go | 11 +- internal/usecase/repo/plugin.go | 9 +- internal/usecase/repo/property_schema.go | 2 + pkg/builtin/main.go | 2 +- pkg/builtin/main_test.go | 4 +- pkg/file/file.go | 161 +++++- pkg/file/file_test.go | 196 +++++++ pkg/file/targz.go | 45 ++ pkg/file/testdata/test.tar.gz | Bin 0 -> 260 bytes pkg/file/testdata/test.zip | Bin 0 -> 667 bytes pkg/file/zip.go | 87 ++++ pkg/file/zip_test.go | 32 ++ pkg/id/asset_gen.go | 10 + pkg/id/asset_gen_test.go | 63 ++- pkg/id/dataset_gen.go | 10 + pkg/id/dataset_gen_test.go | 63 ++- pkg/id/dataset_schema_field_gen.go | 10 + pkg/id/dataset_schema_field_gen_test.go | 63 ++- pkg/id/dataset_schema_gen.go | 10 + pkg/id/dataset_schema_gen_test.go | 63 ++- pkg/id/id.tmpl | 10 + pkg/id/id_test.tmpl | 63 ++- pkg/id/infobox_field_gen.go | 10 + pkg/id/infobox_field_gen_test.go | 63 ++- pkg/id/layer_gen.go | 10 + pkg/id/layer_gen_test.go | 63 ++- pkg/id/plugin.go | 86 +++- pkg/id/plugin_test.go | 487 ++++++++++-------- pkg/id/project_gen.go | 10 + pkg/id/project_gen_test.go | 63 ++- pkg/id/property_gen.go | 10 + pkg/id/property_gen_test.go | 65 +-- pkg/id/property_item_gen.go | 10 + pkg/id/property_item_gen_test.go | 63 ++- pkg/id/property_schema.go | 23 +- pkg/id/property_schema_test.go | 118 +++-- pkg/id/scene_gen.go | 10 + pkg/id/scene_gen_test.go | 63 ++- pkg/id/team_gen.go | 10 + pkg/id/team_gen_test.go | 63 ++- pkg/id/user_gen.go | 10 + pkg/id/user_gen_test.go | 65 +-- pkg/id/widget_gen.go | 10 + pkg/id/widget_gen_test.go | 63 ++- pkg/layer/group_test.go | 8 +- pkg/layer/infobox.go | 16 + pkg/layer/infobox_test.go | 16 + .../{initializer => layerops}/initializer.go | 2 +- .../initializer_test.go | 6 +- pkg/layer/layerops/processor.go | 48 ++ pkg/layer/layerops/processor_test.go | 39 ++ pkg/layer/list.go | 47 ++ pkg/layer/list_test.go | 19 + pkg/layer/loader.go | 27 + pkg/layer/loader_test.go | 80 +++ pkg/layer/merged_test.go | 2 +- pkg/layer/merging/merger_test.go | 4 +- pkg/plugin/builder_test.go | 24 +- pkg/plugin/extension_builder_test.go | 12 +- pkg/plugin/extension_test.go | 4 +- pkg/plugin/loader.go | 2 +- pkg/plugin/manifest/convert.go | 13 +- pkg/plugin/manifest/convert_test.go | 4 +- pkg/plugin/manifest/manifest.go | 7 + pkg/plugin/manifest/parser.go | 13 +- pkg/plugin/manifest/parser_test.go | 21 +- .../manifest/parser_translation_test.go | 2 +- pkg/plugin/plugin.go | 17 +- pkg/plugin/plugin_test.go | 37 +- pkg/plugin/pluginpack/package.go | 57 ++ pkg/plugin/pluginpack/package_test.go | 54 ++ pkg/plugin/pluginpack/testdata/test.zip | Bin 0 -> 791 bytes pkg/plugin/repourl/repos.go | 76 +++ pkg/plugin/repourl/repourl.go | 136 +++++ pkg/plugin/repourl/repourl_test.go | 303 +++++++++++ pkg/property/builder_test.go | 18 +- pkg/property/group_builder_test.go | 6 +- pkg/property/group_list_builder_test.go | 8 +- pkg/property/group_list_test.go | 84 +-- pkg/property/group_test.go | 36 +- pkg/property/item_test.go | 10 +- pkg/property/list_test.go | 6 +- pkg/property/loader_test.go | 4 +- pkg/property/merged_test.go | 4 +- pkg/property/property_test.go | 14 +- pkg/property/schema_builder_test.go | 26 +- pkg/property/schema_group_builder_test.go | 2 +- pkg/property/schema_group_test.go | 6 +- pkg/property/schema_test.go | 6 +- pkg/property/sealed_test.go | 4 +- pkg/scene/builder/builder_test.go | 8 +- pkg/scene/builder/scene_test.go | 2 +- pkg/scene/builder_test.go | 6 +- pkg/scene/plugin_system.go | 3 +- pkg/scene/plugin_system_test.go | 36 +- pkg/scene/plugin_test.go | 2 +- pkg/scene/scene_test.go | 2 +- pkg/scene/sceneops/dataset_migrator.go | 6 +- pkg/scene/sceneops/plugin_migrator.go | 2 +- pkg/scene/widget.go | 10 +- pkg/scene/widget_system.go | 20 +- pkg/scene/widget_system_test.go | 98 +++- pkg/scene/widget_test.go | 82 +-- schema.graphql | 12 +- 146 files changed, 4347 insertions(+), 2055 deletions(-) delete mode 100644 internal/infrastructure/fs/archive.go delete mode 100644 internal/infrastructure/fs/common_test.go create mode 100644 internal/infrastructure/fs/file_test.go delete mode 100644 internal/infrastructure/npm/archive.go delete mode 100644 internal/infrastructure/npm/plugin_repository.go create mode 100644 internal/usecase/interactor/plugin_delete.go create mode 100644 internal/usecase/interactor/plugin_upload.go create mode 100644 pkg/file/file_test.go create mode 100644 pkg/file/targz.go create mode 100644 pkg/file/testdata/test.tar.gz create mode 100644 pkg/file/testdata/test.zip create mode 100644 pkg/file/zip.go create mode 100644 pkg/file/zip_test.go rename pkg/layer/{initializer => layerops}/initializer.go (98%) rename pkg/layer/{initializer => layerops}/initializer_test.go (94%) create mode 100644 pkg/layer/layerops/processor.go create mode 100644 pkg/layer/layerops/processor_test.go create mode 100644 pkg/layer/list_test.go create mode 100644 pkg/layer/loader_test.go create mode 100644 pkg/plugin/pluginpack/package.go create mode 100644 pkg/plugin/pluginpack/package_test.go create mode 100644 pkg/plugin/pluginpack/testdata/test.zip create mode 100644 pkg/plugin/repourl/repos.go create mode 100644 pkg/plugin/repourl/repourl.go create mode 100644 pkg/plugin/repourl/repourl_test.go diff --git a/go.mod b/go.mod index 475bfe636..4dc138a7c 100644 --- a/go.mod +++ b/go.mod @@ -1,7 +1,7 @@ module github.com/reearth/reearth-backend require ( - cloud.google.com/go v0.80.0 + cloud.google.com/go v0.87.0 cloud.google.com/go/storage v1.14.0 github.com/99designs/gqlgen v0.13.0 github.com/99designs/gqlgen-contrib v0.1.1-0.20200601100547-7a955d321bbd @@ -22,6 +22,7 @@ require ( github.com/joho/godotenv v1.3.0 github.com/jonas-p/go-shp v0.1.1 github.com/kelseyhightower/envconfig v1.4.0 + github.com/kennygrant/sanitize v1.2.4 github.com/klauspost/compress v1.10.10 // indirect github.com/labstack/echo/v4 v4.2.1 github.com/labstack/gommon v0.3.0 @@ -33,6 +34,7 @@ require ( github.com/pkg/errors v0.9.1 github.com/sirupsen/logrus v1.8.1 github.com/smartystreets/assertions v1.1.1 // indirect + github.com/spf13/afero v1.6.0 github.com/stretchr/objx v0.2.0 // indirect github.com/stretchr/testify v1.7.0 github.com/tidwall/pretty v1.0.1 // indirect @@ -47,10 +49,10 @@ require ( go.opentelemetry.io/otel v0.7.0 go.uber.org/atomic v1.7.0 // indirect golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c // indirect - golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c // indirect - golang.org/x/text v0.3.5 + golang.org/x/text v0.3.6 golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect - golang.org/x/tools v0.1.0 + golang.org/x/tools v0.1.5 + google.golang.org/api v0.51.0 gopkg.in/go-playground/colors.v1 v1.2.0 gopkg.in/h2non/gock.v1 v1.1.0 gopkg.in/yaml.v2 v2.4.0 // indirect diff --git a/go.sum b/go.sum index 5d523b567..ba640f2cb 100644 --- a/go.sum +++ b/go.sum @@ -18,8 +18,11 @@ cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmW cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= -cloud.google.com/go v0.80.0 h1:kAdyAMrj9CjqOSGiluseVjIgAyQ3uxADYtUYR6MwYeY= -cloud.google.com/go v0.80.0/go.mod h1:fqpb6QRi1CFGAMXDoE72G+b+Ybv7dMB/T1tbExDHktI= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= +cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= +cloud.google.com/go v0.87.0 h1:8ZtzmY4a2JIO2sljMbpqkDYxA8aJQveYr3AMa+X40oc= +cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -68,6 +71,7 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4 h1:Hs82Z41s6SdL1C github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= github.com/auth0/go-jwt-middleware v0.0.0-20200507191422-d30d7b9ece63 h1:LY/kRH+fCqA090FsM2VfZ+oocD99ogm3HrT1r0WDnCk= @@ -90,6 +94,7 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= @@ -108,12 +113,15 @@ github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.m github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 h1:Uc+IZ7gYqAf/rSGFplbWBSHaGolEQlNLgMgSE3ccnIQ= github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813/go.mod h1:P+oSoE9yhSRvsmYyZsshflcR6ePWYLql6UU1amW13IM= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-chi/chi v3.3.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -173,8 +181,9 @@ github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/mock v1.5.0 h1:jlYHihg//f7RRwuPfptm04yp4s7O6Kw8EZiVYIGcH0g= github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -190,10 +199,12 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.1 h1:jAbXjIeW2ZSW2AwFxlGTDoc2CjI2XujLkV3ArsZFCvc= github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= -github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -206,14 +217,16 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0 h1:wCKgOCHuUEVfsaQLpPSJb7VdYCdTVZQAuOdYm1yc/60= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.2.1 h1:d8MncMlErDFTwQGBK1xhv026j9kqhvw1Qv9IbWT1VLQ= +github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -225,8 +238,10 @@ github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5 h1:zIaiqGYDQwa4HVx5wGRTXbx38Pqxjemn4BP98wpzpXo= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9 h1:2tft2559dNwKl2znYB58oVTql0grRB+Ml3LWIBbc4WM= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -244,6 +259,7 @@ github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB7 github.com/gorilla/websocket v1.2.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw= github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= @@ -281,6 +297,8 @@ github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaR github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= +github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o= +github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= @@ -288,6 +306,7 @@ github.com/klauspost/compress v1.10.10 h1:a/y8CglcM7gLGYmlbP/stPE5sR3hbhFRUjCBfd github.com/klauspost/compress v1.10.10/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= @@ -348,6 +367,7 @@ github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= @@ -362,6 +382,7 @@ github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8b github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.11/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= @@ -387,6 +408,8 @@ github.com/smartystreets/assertions v1.1.1 h1:T/YLemO5Yp7KPzS+lVtu+WsHn8yoSwTfIt github.com/smartystreets/assertions v1.1.1/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -444,6 +467,7 @@ github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.mongodb.org/mongo-driver v1.3.4/go.mod h1:MSWZXKOynuguX+JSvwP8i+58jYCXxbia8HS3gZBapIE= go.mongodb.org/mongo-driver v1.5.1 h1:9nOVLGDfOaZ9R0tBumx/BcuqkbFpyTCU2r/Po7A2azI= go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw= @@ -464,6 +488,7 @@ go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver v0.7.0/g go.opentelemetry.io/otel v0.5.0/go.mod h1:jzBIgIzK43Iu1BpDAXwqOd6UPsSAk+ewVZ5ofSXw4Ek= go.opentelemetry.io/otel v0.7.0 h1:u43jukpwqR8EsyeJOMgrsUgZwVI1e1eVw7yuzRkD1l0= go.opentelemetry.io/otel v0.7.0/go.mod h1:aZMyHG5TqDOXEgH2tyLiXSUKly1jT3yqE9PmrzIeCdo= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= @@ -472,6 +497,7 @@ golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaE golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191105034135-c7e5f84aec59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200221231518-2aa609cf4a9d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= @@ -503,8 +529,9 @@ golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 h1:2M3HP5CCK1Si9FQhwnzYhXdG6DXeebvUHFpre8QvbyI= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= @@ -514,8 +541,9 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1 h1:Kvvh58BN8Y9/lBi7hTekvtMpm07eUZ0ck5pRHpsMWrY= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -551,8 +579,10 @@ golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4 h1:b0LrWgu8+q7z4J+0Y3Umo5q1dL7NXBkKBWkaVkAq17E= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420 h1:a8jGStKg0XqKDlKqjLrXn0ioF5MH36pT7Z0BRTqLhbk= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -563,8 +593,10 @@ golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84 h1:duBc5zuJsmJXYOVVE/6PxejI+N3AaCqKjtsoLn1Je5Q= golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 h1:3B43BWw0xEBsLZ/NO1VALz6fppU3481pik+2Ksv45z8= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -625,9 +657,14 @@ golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210314195730-07df6a141424/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= @@ -638,8 +675,9 @@ golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3 golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -701,8 +739,13 @@ golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -728,8 +771,12 @@ google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= -google.golang.org/api v0.42.0 h1:uqATLkpxiBrhrvFoebXUjvyzE9nQf+pVyy0Z0IHE+fc= -google.golang.org/api v0.42.0/go.mod h1:+Oj4s6ch2SEGtPjGqfUfZonBH0GjQH89gTeKKAEGZKI= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= +google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= +google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= +google.golang.org/api v0.51.0 h1:SQaA2Cx57B+iPw2MBgyjEkoeMkRK2IenSGoia0U3lCk= +google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -763,6 +810,7 @@ google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= @@ -779,9 +827,16 @@ google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210312152112-fc591d9ea70f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210323160006-e668133fea6a h1:XVaQ1+BDKvrRcgppHhtAaniHCKyV5xJAvymwsPHHFaE= -google.golang.org/genproto v0.0.0-20210323160006-e668133fea6a/go.mod h1:f2Bd7+2PlaVKmvKQ52aspJZXIDaRQBVdOOBfJ5i8OEs= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= +google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea h1:8ZyCcgugUqamxp/vZSEJw9CMy7VZlSWYJLLJPi/dSDA= +google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -795,11 +850,18 @@ google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3Iji google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.0 h1:o1bcQ6imQMIOpdrO3SWf2z5RV72WbDwdXuK0MDlc8As= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.39.0 h1:Klz8I9kdtkIN6EpHHUOMLCYhTn/2WAe5a0s1hcBkdTI= +google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -811,8 +873,9 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -827,6 +890,7 @@ gopkg.in/h2non/gock.v1 v1.1.0 h1:Yy6sSXyTP9wYc6+H7U0NuB1LQ6H2HYmDp2sxFQ8vTEY= gopkg.in/h2non/gock.v1 v1.1.0/go.mod h1:n7UGz/ckNChHiK05rDoiC4MYSunEC/lyaUm2WWaDva0= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/internal/adapter/graphql/container.go b/internal/adapter/graphql/container.go index 2f04f1d86..2b51d11a3 100644 --- a/internal/adapter/graphql/container.go +++ b/internal/adapter/graphql/container.go @@ -70,7 +70,7 @@ func NewContainer(r *repo.Container, g *gateway.Container, conf ContainerConfig) SceneController: NewSceneController( SceneControllerConfig{ SceneInput: func() interfaces.Scene { - return interactor.NewScene(r) + return interactor.NewScene(r, g) }, }, ), diff --git a/internal/adapter/graphql/controller_plugin.go b/internal/adapter/graphql/controller_plugin.go index 3e7de83a3..9be47d8d1 100644 --- a/internal/adapter/graphql/controller_plugin.go +++ b/internal/adapter/graphql/controller_plugin.go @@ -2,9 +2,13 @@ package graphql import ( "context" + "errors" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/scene" ) type PluginControllerConfig struct { @@ -27,13 +31,25 @@ func (c *PluginController) usecase() interfaces.Plugin { } func (c *PluginController) Upload(ctx context.Context, ginput *UploadPluginInput, operator *usecase.Operator) (*UploadPluginPayload, error) { - res, err := c.usecase().Upload(ctx, ginput.File.File, operator) + var p *plugin.Plugin + var s *scene.Scene + var err error + + if ginput.File != nil { + p, s, err = c.usecase().Upload(ctx, ginput.File.File, id.SceneID(ginput.SceneID), operator) + } else if ginput.URL != nil { + p, s, err = c.usecase().UploadFromRemote(ctx, ginput.URL, id.SceneID(ginput.SceneID), operator) + } else { + return nil, errors.New("either file or url is required") + } if err != nil { return nil, err } return &UploadPluginPayload{ - Plugin: toPlugin(res), + Plugin: toPlugin(p), + Scene: toScene(s), + ScenePlugin: toScenePlugin(s.PluginSystem().Plugin(p.ID())), }, nil } diff --git a/internal/adapter/graphql/controller_scene.go b/internal/adapter/graphql/controller_scene.go index 695b7727c..bdcfd6e80 100644 --- a/internal/adapter/graphql/controller_scene.go +++ b/internal/adapter/graphql/controller_scene.go @@ -109,7 +109,7 @@ func (c *SceneController) UninstallPlugin(ctx context.Context, ginput *Uninstall return nil, err } - return &UninstallPluginPayload{Scene: toScene(scene)}, nil + return &UninstallPluginPayload{PluginID: ginput.PluginID, Scene: toScene(scene)}, nil } func (c *SceneController) UpgradePlugin(ctx context.Context, ginput *UpgradePluginInput, operator *usecase.Operator) (*UpgradePluginPayload, error) { diff --git a/internal/adapter/graphql/convert.go b/internal/adapter/graphql/convert.go index f147bba78..a38bb6219 100644 --- a/internal/adapter/graphql/convert.go +++ b/internal/adapter/graphql/convert.go @@ -1,6 +1,8 @@ package graphql import ( + "io" + "github.com/99designs/gqlgen/graphql" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/interfaces" @@ -68,8 +70,8 @@ func fromFile(f *graphql.Upload) *file.File { return nil } return &file.File{ - Content: f.File, - Name: f.Filename, + Content: io.NopCloser(f.File), + Path: f.Filename, Size: f.Size, ContentType: f.ContentType, } diff --git a/internal/adapter/graphql/convert_plugin.go b/internal/adapter/graphql/convert_plugin.go index 3e48bf75d..6abfdead9 100644 --- a/internal/adapter/graphql/convert_plugin.go +++ b/internal/adapter/graphql/convert_plugin.go @@ -29,6 +29,7 @@ func toPlugin(p *plugin.Plugin) *Plugin { return &Plugin{ ID: pid, + SceneID: pid.Scene().IDRef(), Name: p.Name().String(), Description: p.Description().String(), AllTranslatedDescription: p.Description(), diff --git a/internal/adapter/graphql/models_gen.go b/internal/adapter/graphql/models_gen.go index 07f3610c7..8d00ac70a 100644 --- a/internal/adapter/graphql/models_gen.go +++ b/internal/adapter/graphql/models_gen.go @@ -572,6 +572,7 @@ type PageInfo struct { type Plugin struct { ID id.PluginID `json:"id"` + SceneID *id.ID `json:"sceneId"` Name string `json:"name"` Version string `json:"version"` Description string `json:"description"` @@ -579,6 +580,7 @@ type Plugin struct { RepositoryURL string `json:"repositoryUrl"` PropertySchemaID *id.PropertySchemaID `json:"propertySchemaId"` Extensions []*PluginExtension `json:"extensions"` + Scene *Scene `json:"scene"` ScenePlugin *ScenePlugin `json:"scenePlugin"` AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` AllTranslatedName map[string]string `json:"allTranslatedName"` @@ -990,8 +992,8 @@ type UninstallPluginInput struct { } type UninstallPluginPayload struct { - Scene *Scene `json:"scene"` - ScenePlugin *ScenePlugin `json:"scenePlugin"` + PluginID id.PluginID `json:"pluginId"` + Scene *Scene `json:"scene"` } type UnlinkPropertyValueInput struct { @@ -1173,11 +1175,15 @@ type UploadFileToPropertyInput struct { } type UploadPluginInput struct { - File graphql.Upload `json:"file"` + SceneID id.ID `json:"sceneId"` + File *graphql.Upload `json:"file"` + URL *url.URL `json:"url"` } type UploadPluginPayload struct { - Plugin *Plugin `json:"plugin"` + Plugin *Plugin `json:"plugin"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` } type User struct { diff --git a/internal/app/file.go b/internal/app/file.go index a496ef287..7bfb7b095 100644 --- a/internal/app/file.go +++ b/internal/app/file.go @@ -47,9 +47,9 @@ func serveFiles( ) ec.GET( - "/plugins/:name/:version/:filename", + "/plugins/:plugin/:filename", fileHandler(func(ctx echo.Context) (io.Reader, string, error) { - pid, err := id.PluginIDFrom(ctx.Param("name") + "#" + ctx.Param("version")) + pid, err := id.PluginIDFrom(ctx.Param("plugin")) if err != nil { return nil, "", rerror.ErrNotFound } diff --git a/internal/app/repo.go b/internal/app/repo.go index 7050f2759..44a98f65c 100644 --- a/internal/app/repo.go +++ b/internal/app/repo.go @@ -7,12 +7,12 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/github" "github.com/reearth/reearth-backend/internal/infrastructure/google" + "github.com/spf13/afero" "go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo/options" mongotrace "go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver" - "github.com/reearth/reearth-backend/internal/infrastructure/adapter" "github.com/reearth/reearth-backend/internal/infrastructure/auth0" "github.com/reearth/reearth-backend/internal/infrastructure/fs" "github.com/reearth/reearth-backend/internal/infrastructure/gcs" @@ -41,23 +41,12 @@ func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo. log.Fatalln(fmt.Sprintf("Failed to init mongo: %+v", err)) } - // Plugin and PropertySchema - if debug { - repos.Plugin = adapter.NewPlugin([]repo.Plugin{ - fs.NewPlugin("data"), - repos.Plugin, - }, repos.Plugin) - repos.PropertySchema = adapter.NewPropertySchema([]repo.PropertySchema{ - fs.NewPropertySchema("data"), - repos.PropertySchema, - }, repos.PropertySchema) - } - // File + datafs := afero.NewBasePathFs(afero.NewOsFs(), "data") var fileRepo gateway.File if conf.GCS.BucketName == "" { log.Infoln("file: local storage is used") - fileRepo, err = fs.NewFile("data", conf.AssetBaseURL) + fileRepo, err = fs.NewFile(datafs, conf.AssetBaseURL) } else { log.Infof("file: GCS storage is used: %s\n", conf.GCS.BucketName) fileRepo, err = gcs.NewFile(conf.GCS.BucketName, conf.AssetBaseURL, conf.GCS.PublicationCacheControl) diff --git a/internal/graphql/generated.go b/internal/graphql/generated.go index f75523500..2816225fa 100644 --- a/internal/graphql/generated.go +++ b/internal/graphql/generated.go @@ -518,7 +518,9 @@ type ComplexityRoot struct { PropertySchema func(childComplexity int) int PropertySchemaID func(childComplexity int) int RepositoryURL func(childComplexity int) int - ScenePlugin func(childComplexity int, sceneID id.ID) int + Scene func(childComplexity int) int + SceneID func(childComplexity int) int + ScenePlugin func(childComplexity int, sceneID *id.ID) int TranslatedDescription func(childComplexity int, lang *string) int TranslatedName func(childComplexity int, lang *string) int Version func(childComplexity int) int @@ -862,8 +864,8 @@ type ComplexityRoot struct { } UninstallPluginPayload struct { - Scene func(childComplexity int) int - ScenePlugin func(childComplexity int) int + PluginID func(childComplexity int) int + Scene func(childComplexity int) int } UpdateDatasetSchemaPayload struct { @@ -897,7 +899,9 @@ type ComplexityRoot struct { } UploadPluginPayload struct { - Plugin func(childComplexity int) int + Plugin func(childComplexity int) int + Scene func(childComplexity int) int + ScenePlugin func(childComplexity int) int } User struct { @@ -1065,6 +1069,8 @@ type MutationResolver interface { ImportLayer(ctx context.Context, input graphql1.ImportLayerInput) (*graphql1.ImportLayerPayload, error) } type PluginResolver interface { + Scene(ctx context.Context, obj *graphql1.Plugin) (*graphql1.Scene, error) + PropertySchema(ctx context.Context, obj *graphql1.Plugin) (*graphql1.PropertySchema, error) } type PluginExtensionResolver interface { @@ -3528,6 +3534,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Plugin.RepositoryURL(childComplexity), true + case "Plugin.scene": + if e.complexity.Plugin.Scene == nil { + break + } + + return e.complexity.Plugin.Scene(childComplexity), true + + case "Plugin.sceneId": + if e.complexity.Plugin.SceneID == nil { + break + } + + return e.complexity.Plugin.SceneID(childComplexity), true + case "Plugin.scenePlugin": if e.complexity.Plugin.ScenePlugin == nil { break @@ -3538,7 +3558,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Plugin.ScenePlugin(childComplexity, args["sceneId"].(id.ID)), true + return e.complexity.Plugin.ScenePlugin(childComplexity, args["sceneId"].(*id.ID)), true case "Plugin.translatedDescription": if e.complexity.Plugin.TranslatedDescription == nil { @@ -5293,19 +5313,19 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Typography.Underline(childComplexity), true - case "UninstallPluginPayload.scene": - if e.complexity.UninstallPluginPayload.Scene == nil { + case "UninstallPluginPayload.pluginId": + if e.complexity.UninstallPluginPayload.PluginID == nil { break } - return e.complexity.UninstallPluginPayload.Scene(childComplexity), true + return e.complexity.UninstallPluginPayload.PluginID(childComplexity), true - case "UninstallPluginPayload.scenePlugin": - if e.complexity.UninstallPluginPayload.ScenePlugin == nil { + case "UninstallPluginPayload.scene": + if e.complexity.UninstallPluginPayload.Scene == nil { break } - return e.complexity.UninstallPluginPayload.ScenePlugin(childComplexity), true + return e.complexity.UninstallPluginPayload.Scene(childComplexity), true case "UpdateDatasetSchemaPayload.datasetSchema": if e.complexity.UpdateDatasetSchemaPayload.DatasetSchema == nil { @@ -5377,6 +5397,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.UploadPluginPayload.Plugin(childComplexity), true + case "UploadPluginPayload.scene": + if e.complexity.UploadPluginPayload.Scene == nil { + break + } + + return e.complexity.UploadPluginPayload.Scene(childComplexity), true + + case "UploadPluginPayload.scenePlugin": + if e.complexity.UploadPluginPayload.ScenePlugin == nil { + break + } + + return e.complexity.UploadPluginPayload.ScenePlugin(childComplexity), true + case "User.auths": if e.complexity.User.Auths == nil { break @@ -5733,6 +5767,7 @@ enum PublishmentStatus { type Plugin { id: PluginID! + sceneId: ID name: String! version: String! description: String! @@ -5740,7 +5775,8 @@ type Plugin { repositoryUrl: String! propertySchemaId: PropertySchemaID extensions: [PluginExtension!]! - scenePlugin(sceneId: ID!): ScenePlugin + scene: Scene @goField(forceResolver: true) + scenePlugin(sceneId: ID): ScenePlugin allTranslatedDescription: TranslatedString allTranslatedName: TranslatedString translatedName(lang: String): String! @@ -6304,7 +6340,9 @@ input UpdateProjectInput { } input UploadPluginInput { - file: Upload! + sceneId: ID! + file: Upload + url: URL } input CreateSceneInput { @@ -6652,6 +6690,8 @@ type DeleteProjectPayload { type UploadPluginPayload { plugin: Plugin! + scene: Scene! + scenePlugin: ScenePlugin! } type CreateScenePayload { @@ -6680,8 +6720,8 @@ type InstallPluginPayload { } type UninstallPluginPayload { + pluginId: PluginID! scene: Scene! - scenePlugin: ScenePlugin! } type UpgradePluginPayload { @@ -7938,10 +7978,10 @@ func (ec *executionContext) field_PluginExtension_translatedName_args(ctx contex func (ec *executionContext) field_Plugin_scenePlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 id.ID + var arg0 *id.ID if tmp, ok := rawArgs["sceneId"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + arg0, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) if err != nil { return nil, err } @@ -18566,6 +18606,38 @@ func (ec *executionContext) _Plugin_id(ctx context.Context, field graphql.Collec return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) } +func (ec *executionContext) _Plugin_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + func (ec *executionContext) _Plugin_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -18808,6 +18880,38 @@ func (ec *executionContext) _Plugin_extensions(ctx context.Context, field graphq return ec.marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtensionแš„(ctx, field.Selections, res) } +func (ec *executionContext) _Plugin_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Plugin", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Plugin().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -26952,7 +27056,7 @@ func (ec *executionContext) _Typography_underline(ctx context.Context, field gra return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) } -func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.UninstallPluginPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UninstallPluginPayload_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.UninstallPluginPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26970,7 +27074,7 @@ func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, f ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Scene, nil + return obj.PluginID, nil }) if err != nil { ec.Error(ctx, err) @@ -26982,12 +27086,12 @@ func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, f } return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(id.PluginID) fc.Result = res - return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) } -func (ec *executionContext) _UninstallPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.UninstallPluginPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.UninstallPluginPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27005,7 +27109,7 @@ func (ec *executionContext) _UninstallPluginPayload_scenePlugin(ctx context.Cont ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.ScenePlugin, nil + return obj.Scene, nil }) if err != nil { ec.Error(ctx, err) @@ -27017,9 +27121,9 @@ func (ec *executionContext) _UninstallPluginPayload_scenePlugin(ctx context.Cont } return graphql.Null } - res := resTmp.(*graphql1.ScenePlugin) + res := resTmp.(*graphql1.Scene) fc.Result = res - return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) } func (ec *executionContext) _UpdateDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateDatasetSchemaPayload) (ret graphql.Marshaler) { @@ -27369,6 +27473,76 @@ func (ec *executionContext) _UploadPluginPayload_plugin(ctx context.Context, fie return ec.marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) } +func (ec *executionContext) _UploadPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.UploadPluginPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UploadPluginPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _UploadPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.UploadPluginPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UploadPluginPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ScenePlugin, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*graphql1.ScenePlugin) + fc.Result = res + return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) +} + func (ec *executionContext) _User_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -31153,11 +31327,27 @@ func (ec *executionContext) unmarshalInputUploadPluginInput(ctx context.Context, for k, v := range asMap { switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } case "file": var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("file")) - it.File, err = ec.unmarshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + it.File, err = ec.unmarshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx, v) + if err != nil { + return it, err + } + case "url": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("url")) + it.URL, err = ec.unmarshalOURL2แš–netแš‹urlแšURL(ctx, v) if err != nil { return it, err } @@ -33880,6 +34070,8 @@ func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, o if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "sceneId": + out.Values[i] = ec._Plugin_sceneId(ctx, field, obj) case "name": out.Values[i] = ec._Plugin_name(ctx, field, obj) if out.Values[i] == graphql.Null { @@ -33912,6 +34104,17 @@ func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, o if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "scene": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Plugin_scene(ctx, field, obj) + return res + }) case "scenePlugin": out.Values[i] = ec._Plugin_scenePlugin(ctx, field, obj) case "allTranslatedDescription": @@ -36237,13 +36440,13 @@ func (ec *executionContext) _UninstallPluginPayload(ctx context.Context, sel ast switch field.Name { case "__typename": out.Values[i] = graphql.MarshalString("UninstallPluginPayload") - case "scene": - out.Values[i] = ec._UninstallPluginPayload_scene(ctx, field, obj) + case "pluginId": + out.Values[i] = ec._UninstallPluginPayload_pluginId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } - case "scenePlugin": - out.Values[i] = ec._UninstallPluginPayload_scenePlugin(ctx, field, obj) + case "scene": + out.Values[i] = ec._UninstallPluginPayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } @@ -36470,6 +36673,16 @@ func (ec *executionContext) _UploadPluginPayload(ctx context.Context, sel ast.Se if out.Values[i] == graphql.Null { invalids++ } + case "scene": + out.Values[i] = ec._UploadPluginPayload_scene(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "scenePlugin": + out.Values[i] = ec._UploadPluginPayload_scenePlugin(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -40445,6 +40658,21 @@ func (ec *executionContext) marshalOUpgradePluginPayload2แš–githubแš—comแš‹reear return ec._UpgradePluginPayload(ctx, sel, v) } +func (ec *executionContext) unmarshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, v interface{}) (*graphql.Upload, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalUpload(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแšUpload(ctx context.Context, sel ast.SelectionSet, v *graphql.Upload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return graphql.MarshalUpload(*v) +} + func (ec *executionContext) marshalOUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UploadPluginPayload) graphql.Marshaler { if v == nil { return graphql.Null diff --git a/internal/graphql/resolver_plugin.go b/internal/graphql/resolver_plugin.go index 50d5a0249..2a5aac651 100644 --- a/internal/graphql/resolver_plugin.go +++ b/internal/graphql/resolver_plugin.go @@ -28,11 +28,27 @@ func (r *pluginResolver) PropertySchema(ctx context.Context, obj *graphql1.Plugi return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(*obj.PropertySchemaID) } -func (r *pluginResolver) ScenePlugin(ctx context.Context, obj *graphql1.Plugin, sceneID id.ID) (*graphql1.ScenePlugin, error) { +func (r *pluginResolver) Scene(ctx context.Context, obj *graphql1.Plugin) (*graphql1.Scene, error) { exit := trace(ctx) defer exit() - s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(sceneID)) + if obj.SceneID == nil { + return nil, nil + } + return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(*obj.SceneID)) +} + +func (r *pluginResolver) ScenePlugin(ctx context.Context, obj *graphql1.Plugin, sceneID *id.ID) (*graphql1.ScenePlugin, error) { + exit := trace(ctx) + defer exit() + + if sceneID == nil && obj.SceneID != nil { + sceneID = obj.SceneID + } + if sceneID == nil { + return nil, nil + } + s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(*sceneID)) return s.Plugin(obj.ID), err } diff --git a/internal/infrastructure/adapter/plugin.go b/internal/infrastructure/adapter/plugin.go index e2992976d..c956d03e7 100644 --- a/internal/infrastructure/adapter/plugin.go +++ b/internal/infrastructure/adapter/plugin.go @@ -24,9 +24,9 @@ func NewPlugin(readers []repo.Plugin, writer repo.Plugin) repo.Plugin { } } -func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { +func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID, sids []id.SceneID) (*plugin.Plugin, error) { for _, re := range r.readers { - if res, err := re.FindByID(ctx, id); err != nil { + if res, err := re.FindByID(ctx, id, sids); err != nil { if errors.Is(err, rerror.ErrNotFound) { continue } else { @@ -39,10 +39,10 @@ func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plug return nil, rerror.ErrNotFound } -func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { +func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id.SceneID) ([]*plugin.Plugin, error) { results := make([]*plugin.Plugin, 0, len(ids)) for _, id := range ids { - res, err := r.FindByID(ctx, id) + res, err := r.FindByID(ctx, id, sids) if err != nil && err != rerror.ErrNotFound { return nil, err } @@ -57,3 +57,10 @@ func (r *pluginRepo) Save(ctx context.Context, p *plugin.Plugin) error { } return r.writer.Save(ctx, p) } + +func (r *pluginRepo) Remove(ctx context.Context, p id.PluginID) error { + if r.writer == nil { + return errors.New("cannot write") + } + return r.writer.Remove(ctx, p) +} diff --git a/internal/infrastructure/adapter/property_schema.go b/internal/infrastructure/adapter/property_schema.go index 86fe4e3a4..af3bbeaec 100644 --- a/internal/infrastructure/adapter/property_schema.go +++ b/internal/infrastructure/adapter/property_schema.go @@ -64,3 +64,17 @@ func (r *propertySchema) SaveAll(ctx context.Context, p property.SchemaList) err } return r.writer.SaveAll(ctx, p) } + +func (r *propertySchema) Remove(ctx context.Context, p id.PropertySchemaID) error { + if r.writer == nil { + return rerror.ErrInternalBy(errors.New("writer is not set")) + } + return r.writer.Remove(ctx, p) +} + +func (r *propertySchema) RemoveAll(ctx context.Context, p []id.PropertySchemaID) error { + if r.writer == nil { + return rerror.ErrInternalBy(errors.New("writer is not set")) + } + return r.writer.RemoveAll(ctx, p) +} diff --git a/internal/infrastructure/fs/archive.go b/internal/infrastructure/fs/archive.go deleted file mode 100644 index 64f0f781c..000000000 --- a/internal/infrastructure/fs/archive.go +++ /dev/null @@ -1,115 +0,0 @@ -package fs - -import ( - "os" - "path" - "strings" - - "github.com/reearth/reearth-backend/pkg/file" - "github.com/reearth/reearth-backend/pkg/rerror" -) - -type archive struct { - p string - files []string - counter int - name string - size int64 - fi *os.File -} - -// NewArchive _ -func NewArchive(p string) (file.Archive, error) { - bp := strings.TrimSuffix(p, "/") - files, size, err := dirwalk(bp, "", 0) - if err != nil { - if os.IsNotExist(err) { - return nil, rerror.ErrNotFound - } - return nil, rerror.ErrInternalBy(err) - } - return &archive{ - p: bp, - files: files, - counter: 0, - name: path.Base(p), - size: size, - }, nil -} - -// Next _ -func (a *archive) Next() (f *file.File, derr error) { - if len(a.files) <= a.counter { - return nil, file.EOF - } - next := a.files[a.counter] - a.counter++ - fi, err := os.Open(path.Join(a.p, next)) - if err != nil { - derr = rerror.ErrInternalBy(err) - return - } - stat, err := fi.Stat() - if err != nil { - derr = rerror.ErrInternalBy(err) - return - } - - f = &file.File{ - Content: fi, - Name: stat.Name(), - Fullpath: strings.TrimPrefix(next, a.p+"/"), - Size: stat.Size(), - } - return -} - -// Close _ -func (a *archive) Close() error { - if a.fi != nil { - if err := a.fi.Close(); err != nil { - return rerror.ErrInternalBy(err) - } - a.fi = nil - } - return nil -} - -// Name _ -func (a *archive) Name() string { - return a.name -} - -// Size _ -func (a *archive) Size() int64 { - return a.size -} - -func dirwalk(dir string, base string, size int64) ([]string, int64, error) { - files, err := os.ReadDir(dir) - if err != nil { - return []string{}, 0, err - } - - var paths []string - for _, file := range files { - if file.IsDir() { - fname := file.Name() - dfiles, dsize, err := dirwalk(path.Join(dir, fname), path.Join(base, fname), size) - if err != nil { - return []string{}, 0, err - } - paths = append(paths, dfiles...) - size += dsize - continue - } - paths = append(paths, path.Join(base, file.Name())) - fileInfo, err := file.Info() - if err != nil { - return []string{}, 0, err - } - size += fileInfo.Size() - } - - return paths, size, nil -} diff --git a/internal/infrastructure/fs/common.go b/internal/infrastructure/fs/common.go index cecff5fb6..c4bfccff1 100644 --- a/internal/infrastructure/fs/common.go +++ b/internal/infrastructure/fs/common.go @@ -1,50 +1,34 @@ package fs import ( - "net/url" - "path" - "strings" + "path/filepath" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/spf13/afero" ) const ( - manifestFilePath = "reearth.json" assetDir = "assets" pluginDir = "plugins" publishedDir = "published" + manifestFilePath = "reearth.yml" ) -func getPluginFilePath(base string, pluginID id.PluginID, filename string) string { - return path.Join(base, pluginDir, pluginID.Name(), pluginID.Version().String(), filename) -} - -func getAssetFilePath(base string, filename string) string { - return path.Join(base, assetDir, filename) -} - -func getPublishedDataFilePath(base, name string) string { - return path.Join(base, publishedDir, name+".json") -} - -func getAssetFileURL(base *url.URL, filename string) *url.URL { - if base == nil { - return nil +func readManifest(fs afero.Fs, pid id.PluginID) (*manifest.Manifest, error) { + f, err := fs.Open(filepath.Join(pluginDir, pid.String(), manifestFilePath)) + if err != nil { + return nil, rerror.ErrInternalBy(err) } + defer func() { + _ = f.Close() + }() - b := *base - b.Path = path.Join(b.Path, filename) - return &b -} - -func getAssetFilePathFromURL(base string, u *url.URL) string { - if u == nil { - return "" + m, err := manifest.Parse(f, nil) + if err != nil { + return nil, err } - p := strings.Split(u.Path, "/") - if len(p) == 0 { - return "" - } - f := p[len(p)-1] - return getAssetFilePath(base, f) + + return m, nil } diff --git a/internal/infrastructure/fs/common_test.go b/internal/infrastructure/fs/common_test.go deleted file mode 100644 index 3cc762b73..000000000 --- a/internal/infrastructure/fs/common_test.go +++ /dev/null @@ -1,22 +0,0 @@ -package fs - -import ( - "net/url" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGetAssetFileURL(t *testing.T) { - e, err := url.Parse("http://hoge.com/assets/xxx.yyy") - assert.NoError(t, err) - b, err := url.Parse("http://hoge.com/assets") - assert.NoError(t, err) - assert.Equal(t, e, getAssetFileURL(b, "xxx.yyy")) -} - -func TestGetAssetFilePathFromURL(t *testing.T) { - u, err := url.Parse("http://hoge.com/assets/xxx.yyy") - assert.NoError(t, err) - assert.Equal(t, "a/assets/xxx.yyy", getAssetFilePathFromURL("a", u)) -} diff --git a/internal/infrastructure/fs/file.go b/internal/infrastructure/fs/file.go index c69318623..a0d4d643b 100644 --- a/internal/infrastructure/fs/file.go +++ b/internal/infrastructure/fs/file.go @@ -7,20 +7,22 @@ import ( "net/url" "os" "path" + "path/filepath" + "github.com/kennygrant/sanitize" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/pkg/file" "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/spf13/afero" ) type fileRepo struct { - basePath string - urlBase *url.URL + fs afero.Fs + urlBase *url.URL } -func NewFile(basePath, urlBase string) (gateway.File, error) { +func NewFile(fs afero.Fs, urlBase string) (gateway.File, error) { var b *url.URL var err error b, err = url.Parse(urlBase) @@ -29,193 +31,128 @@ func NewFile(basePath, urlBase string) (gateway.File, error) { } return &fileRepo{ - basePath: basePath, - urlBase: b, + fs: fs, + urlBase: b, }, nil } -func (f *fileRepo) ReadAsset(ctx context.Context, name string) (io.Reader, error) { - filename := getAssetFilePath(f.basePath, name) - file, err := os.Open(filename) - if err != nil { - if os.IsNotExist(err) { - return nil, rerror.ErrNotFound - } - return nil, rerror.ErrInternalBy(err) - } - return file, nil -} +// asset -func (f *fileRepo) ReadPluginFile(ctx context.Context, id id.PluginID, p string) (io.Reader, error) { - filename := getPluginFilePath(f.basePath, id, p) - file, err := os.Open(filename) - if err != nil { - if os.IsNotExist(err) { - return nil, rerror.ErrNotFound - } - return nil, rerror.ErrInternalBy(err) - } - return file, nil +func (f *fileRepo) ReadAsset(ctx context.Context, filename string) (io.ReadCloser, error) { + return f.read(ctx, filepath.Join(assetDir, sanitize.Path(filename))) } -func (f *fileRepo) ReadBuiltSceneFile(ctx context.Context, name string) (io.Reader, error) { - filename := getPublishedDataFilePath(f.basePath, name) - file, err := os.Open(filename) - if err != nil { - if os.IsNotExist(err) { - return nil, rerror.ErrNotFound - } - return nil, rerror.ErrInternalBy(err) +func (f *fileRepo) UploadAsset(ctx context.Context, file *file.File) (*url.URL, error) { + filename := sanitize.Path(id.New().String() + path.Ext(file.Path)) + if err := f.upload(ctx, filepath.Join(assetDir, filename), file.Content); err != nil { + return nil, err } - return file, nil + return getAssetFileURL(f.urlBase, filename), nil } -func (f *fileRepo) UploadAsset(ctx context.Context, file *file.File) (*url.URL, error) { - if f == nil || f.urlBase == nil { - return nil, errors.New("cannot upload asset because of url lack") +func (f *fileRepo) RemoveAsset(ctx context.Context, u *url.URL) error { + if u == nil { + return nil } - if file == nil { - return nil, gateway.ErrInvalidFile + p := sanitize.Path(u.Path) + if p == "" || f.urlBase == nil || u.Scheme != f.urlBase.Scheme || u.Host != f.urlBase.Host || path.Dir(p) != f.urlBase.Path { + return gateway.ErrInvalidFile } + return f.delete(ctx, filepath.Join(assetDir, path.Base(p))) +} - base := path.Join(f.basePath, "assets") - err := os.MkdirAll(base, 0755) - if err != nil { - return nil, gateway.ErrFailedToUploadFile - // return nil, repo.ErrFailedToUploadFile.CausedBy(err) - } +// plugin - // calc checksum - // hasher := sha256.New() - // tr := io.TeeReader(file.Content, hasher) - // checksum := hex.EncodeToString(hasher.Sum(nil)) +func (f *fileRepo) ReadPluginFile(ctx context.Context, pid id.PluginID, filename string) (io.ReadCloser, error) { + return f.read(ctx, filepath.Join(pluginDir, pid.String(), sanitize.Path(filename))) +} - id := id.New().String() - filename := id + path.Ext(file.Name) - name := getAssetFilePath(f.basePath, filename) +func (f *fileRepo) UploadPluginFile(ctx context.Context, pid id.PluginID, file *file.File) error { + return f.upload(ctx, filepath.Join(pluginDir, pid.String(), sanitize.Path(file.Path)), file.Content) +} - dest, err2 := os.Create(name) - if err2 != nil { - return nil, gateway.ErrFailedToUploadFile - // return nil, repo.ErrFailedToUploadFile.CausedBy(err2) - } - defer func() { - _ = dest.Close() - }() - if _, err := io.Copy(dest, file.Content); err != nil { - return nil, gateway.ErrFailedToUploadFile - // return nil, repo.ErrFailedToUploadFile.CausedBy(err) - } +func (f *fileRepo) RemovePlugin(ctx context.Context, pid id.PluginID) error { + return f.delete(ctx, filepath.Join(pluginDir, pid.String())) +} - return getAssetFileURL(f.urlBase, filename), nil +// built scene + +func (f *fileRepo) ReadBuiltSceneFile(ctx context.Context, name string) (io.ReadCloser, error) { + return f.read(ctx, filepath.Join(publishedDir, sanitize.Path(name+".json"))) } -func (f *fileRepo) RemoveAsset(ctx context.Context, u *url.URL) error { - if u == nil { - return gateway.ErrInvalidFile - } +func (f *fileRepo) UploadBuiltScene(ctx context.Context, reader io.Reader, name string) error { + return f.upload(ctx, filepath.Join(publishedDir, sanitize.Path(name+".json")), reader) +} - p := getAssetFilePathFromURL(f.basePath, u) - if p != "" { - if err := os.Remove(p); err != nil { - if os.IsNotExist(err) { - return nil - } - return gateway.ErrFailedToRemoveFile - } - } +func (f *fileRepo) MoveBuiltScene(ctx context.Context, oldName, name string) error { + return f.move( + ctx, + filepath.Join(publishedDir, sanitize.Path(oldName+".json")), + filepath.Join(publishedDir, sanitize.Path(name+".json")), + ) +} - return nil +func (f *fileRepo) RemoveBuiltScene(ctx context.Context, name string) error { + return f.delete(ctx, filepath.Join(publishedDir, sanitize.Path(name+".json"))) } -func (f *fileRepo) UploadAndExtractPluginFiles(ctx context.Context, archive file.Archive, plugin *plugin.Plugin) (*url.URL, error) { - defer func() { - _ = archive.Close() - }() - base := getPluginFilePath(f.basePath, plugin.ID(), "") - url, _ := url.Parse(base) - - for { - err := func() error { - f, err := archive.Next() - if errors.Is(err, file.EOF) { - return err - } - name := path.Join(base, f.Fullpath) - fbase := path.Dir(name) - err2 := os.MkdirAll(fbase, 0755) - if err2 != nil { - return gateway.ErrFailedToUploadFile - // return repo.ErrFailedToUploadFile.CausedBy(err2) - } - dest, err2 := os.Create(name) - if err2 != nil { - return gateway.ErrFailedToUploadFile - // return repo.ErrFailedToUploadFile.CausedBy(err2) - } - defer func() { - _ = dest.Close() - }() - if _, err := io.Copy(dest, f.Content); err != nil { - return gateway.ErrFailedToUploadFile - // return repo.ErrFailedToUploadFile.CausedBy(err) - } - return nil - }() +// helpers - if errors.Is(err, file.EOF) { - break - } - if err != nil { - return nil, err - } +func (f *fileRepo) read(ctx context.Context, filename string) (io.ReadCloser, error) { + if filename == "" { + return nil, rerror.ErrNotFound } - return url, nil + file, err := f.fs.Open(filename) + if err != nil { + if os.IsNotExist(err) { + return nil, rerror.ErrNotFound + } + return nil, rerror.ErrInternalBy(err) + } + return file, nil } -func (f *fileRepo) UploadBuiltScene(ctx context.Context, reader io.Reader, name string) error { - filename := getPublishedDataFilePath(f.basePath, name) - err := os.MkdirAll(path.Dir(filename), 0755) - if err != nil { +func (f *fileRepo) upload(ctx context.Context, filename string, content io.Reader) error { + if filename == "" { return gateway.ErrFailedToUploadFile - // return repo.ErrFailedToUploadFile.CausedBy(err) } - dest, err2 := os.Create(filename) - if err2 != nil { - return gateway.ErrFailedToUploadFile - // return repo.ErrFailedToUploadFile.CausedBy(err2) + if fnd := path.Dir(filename); fnd != "" { + if err := f.fs.MkdirAll(fnd, 0755); err != nil { + return rerror.ErrInternalBy(err) + } + } + + dest, err := f.fs.Create(filename) + if err != nil { + return rerror.ErrInternalBy(err) } defer func() { _ = dest.Close() }() - if _, err := io.Copy(dest, reader); err != nil { + + if _, err := io.Copy(dest, content); err != nil { return gateway.ErrFailedToUploadFile - // return repo.ErrFailedToUploadFile.CausedBy(err) } return nil } -func (f *fileRepo) MoveBuiltScene(ctx context.Context, oldName, name string) error { - if oldName == name { - return nil +func (f *fileRepo) move(ctx context.Context, from, dest string) error { + if from == "" || dest == "" || from == dest { + return gateway.ErrInvalidFile } - filename := getPublishedDataFilePath(f.basePath, oldName) - newfilename := getPublishedDataFilePath(f.basePath, name) - err := os.MkdirAll(path.Dir(newfilename), 0755) - if err != nil { - return gateway.ErrFailedToUploadFile - // return repo.ErrFailedToUploadFile.CausedBy(err) + if destd := path.Dir(dest); destd != "" { + if err := f.fs.MkdirAll(destd, 0755); err != nil { + return rerror.ErrInternalBy(err) + } } - if err := os.Rename( - filename, - newfilename, - ); err != nil { - if errors.Is(err, os.ErrNotExist) { + if err := f.fs.Rename(from, dest); err != nil { + if os.IsNotExist(err) { return rerror.ErrNotFound } return rerror.ErrInternalBy(err) @@ -224,13 +161,26 @@ func (f *fileRepo) MoveBuiltScene(ctx context.Context, oldName, name string) err return nil } -func (f *fileRepo) RemoveBuiltScene(ctx context.Context, name string) error { - filename := getPublishedDataFilePath(f.basePath, name) - if err := os.Remove(filename); err != nil { - if errors.Is(err, os.ErrNotExist) { +func (f *fileRepo) delete(ctx context.Context, filename string) error { + if filename == "" { + return gateway.ErrFailedToUploadFile + } + + if err := f.fs.RemoveAll(filename); err != nil { + if os.IsNotExist(err) { return nil } return rerror.ErrInternalBy(err) } return nil } + +func getAssetFileURL(base *url.URL, filename string) *url.URL { + if base == nil { + return nil + } + + b := *base + b.Path = path.Join(b.Path, filename) + return &b +} diff --git a/internal/infrastructure/fs/file_test.go b/internal/infrastructure/fs/file_test.go new file mode 100644 index 000000000..592e75120 --- /dev/null +++ b/internal/infrastructure/fs/file_test.go @@ -0,0 +1,263 @@ +package fs + +import ( + "context" + "io" + "net/url" + "os" + "path" + "path/filepath" + "strings" + "testing" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestNewFile(t *testing.T) { + f, err := NewFile(mockFs(), "") + assert.NoError(t, err) + assert.NotNil(t, f) +} + +func TestFile_ReadAsset(t *testing.T) { + f, _ := NewFile(mockFs(), "") + + r, err := f.ReadAsset(context.Background(), "xxx.txt") + assert.NoError(t, err) + c, err := io.ReadAll(r) + assert.NoError(t, err) + assert.Equal(t, "hello", string(c)) + assert.NoError(t, r.Close()) + + r, err = f.ReadAsset(context.Background(), "aaa.txt") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) + + r, err = f.ReadAsset(context.Background(), "../published/s.json") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) +} + +func TestFile_UploadAsset(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "https://example.com/assets") + + u, err := f.UploadAsset(context.Background(), &file.File{ + Path: "aaa.txt", + Content: io.NopCloser(strings.NewReader("aaa")), + }) + assert.NoError(t, err) + assert.Equal(t, "https", u.Scheme) + assert.Equal(t, "example.com", u.Host) + assert.True(t, strings.HasPrefix(u.Path, "/assets/")) + assert.Equal(t, ".txt", path.Ext(u.Path)) + + uf, _ := fs.Open(filepath.Join("assets", path.Base(u.Path))) + c, _ := io.ReadAll(uf) + assert.Equal(t, "aaa", string(c)) +} + +func TestFile_RemoveAsset(t *testing.T) { + cases := []struct { + Name string + URL string + Deleted bool + Err error + }{ + { + Name: "deleted", + URL: "https://example.com/assets/xxx.txt", + Deleted: true, + }, + { + Name: "not deleted 1", + URL: "https://example.com/assets/aaa.txt", + Err: nil, + }, + { + Name: "not deleted 2", + URL: "https://example.com/plugins/xxx.txt", + Err: gateway.ErrInvalidFile, + }, + } + + for _, tc := range cases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + fs := mockFs() + f, _ := NewFile(fs, "https://example.com/assets") + + u, _ := url.Parse(tc.URL) + err := f.RemoveAsset(context.Background(), u) + + if tc.Err == nil { + assert.NoError(tt, err) + } else { + assert.Same(tt, tc.Err, err) + } + + _, err = fs.Stat(filepath.Join("assets", "xxx.txt")) + if tc.Deleted { + assert.ErrorIs(tt, err, os.ErrNotExist) + } else { + assert.NoError(tt, err) + } + }) + } +} + +func TestFile_ReadPluginFile(t *testing.T) { + f, _ := NewFile(mockFs(), "") + + r, err := f.ReadPluginFile(context.Background(), id.MustPluginID("aaa~1.0.0"), "foo.js") + assert.NoError(t, err) + c, err := io.ReadAll(r) + assert.NoError(t, err) + assert.Equal(t, "bar", string(c)) + assert.NoError(t, r.Close()) + + r, err = f.ReadPluginFile(context.Background(), id.MustPluginID("aaa~1.0.0"), "aaa.txt") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) + + r, err = f.ReadPluginFile(context.Background(), id.MustPluginID("aaa~1.0.1"), "foo.js") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) + + r, err = f.ReadPluginFile(context.Background(), id.MustPluginID("aaa~1.0.1"), "../../assets/xxx.txt") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) +} + +func TestFile_UploadPluginFile(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "") + + err := f.UploadPluginFile(context.Background(), id.MustPluginID("aaa~1.0.1"), &file.File{ + Path: "aaa.js", + Content: io.NopCloser(strings.NewReader("aaa")), + }) + assert.NoError(t, err) + + uf, _ := fs.Open(filepath.Join("plugins", "aaa~1.0.1", "aaa.js")) + c, _ := io.ReadAll(uf) + assert.Equal(t, "aaa", string(c)) +} + +func TestFile_RemovePluginFile(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "") + + err := f.RemovePlugin(context.Background(), id.MustPluginID("aaa~1.0.1")) + assert.NoError(t, err) + + _, err = fs.Stat(filepath.Join("plugins", "aaa~1.0.0")) + assert.NoError(t, err) + + err = f.RemovePlugin(context.Background(), id.MustPluginID("aaa~1.0.0")) + assert.NoError(t, err) + + _, err = fs.Stat(filepath.Join("plugins", "aaa~1.0.0")) + assert.ErrorIs(t, err, os.ErrNotExist) +} + +func TestFile_ReadBuiltSceneFile(t *testing.T) { + f, _ := NewFile(mockFs(), "") + + r, err := f.ReadBuiltSceneFile(context.Background(), "s") + assert.NoError(t, err) + c, err := io.ReadAll(r) + assert.NoError(t, err) + assert.Equal(t, "{}", string(c)) + assert.NoError(t, r.Close()) + + r, err = f.ReadBuiltSceneFile(context.Background(), "a") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) + + r, err = f.ReadBuiltSceneFile(context.Background(), "../assets/xxx.txt") + assert.ErrorIs(t, err, rerror.ErrNotFound) + assert.Nil(t, r) +} + +func TestFile_UploadBuiltScene(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "") + + err := f.UploadBuiltScene(context.Background(), io.NopCloser(strings.NewReader("{\"aaa\":1}")), "a") + assert.NoError(t, err) + + uf, _ := fs.Open(filepath.Join("published", "a.json")) + c, _ := io.ReadAll(uf) + assert.Equal(t, "{\"aaa\":1}", string(c)) +} + +func TestFile_MoveBuiltScene(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "") + + uf, _ := fs.Open(filepath.Join("published", "s.json")) + c, _ := io.ReadAll(uf) + assert.Equal(t, "{}", string(c)) + + uf, err := fs.Open(filepath.Join("published", "a.json")) + assert.ErrorIs(t, err, os.ErrNotExist) + assert.Nil(t, uf) + + err = f.MoveBuiltScene(context.Background(), "s", "a") + assert.NoError(t, err) + + uf, err = fs.Open(filepath.Join("published", "s.json")) + assert.ErrorIs(t, err, os.ErrNotExist) + assert.Nil(t, uf) + + uf, _ = fs.Open(filepath.Join("published", "a.json")) + c, _ = io.ReadAll(uf) + assert.Equal(t, "{}", string(c)) +} + +func TestFile_RemoveBuiltScene(t *testing.T) { + fs := mockFs() + f, _ := NewFile(fs, "") + + err := f.RemoveBuiltScene(context.Background(), "a") + assert.NoError(t, err) + + _, err = fs.Stat(filepath.Join("published", "s.json")) + assert.NoError(t, err) + + err = f.RemoveBuiltScene(context.Background(), "s") + assert.NoError(t, err) + + _, err = fs.Stat(filepath.Join("published", "s.json")) + assert.ErrorIs(t, err, os.ErrNotExist) +} + +func TestGetAssetFileURL(t *testing.T) { + e, err := url.Parse("http://hoge.com/assets/xxx.yyy") + assert.NoError(t, err) + b, err := url.Parse("http://hoge.com/assets") + assert.NoError(t, err) + assert.Equal(t, e, getAssetFileURL(b, "xxx.yyy")) +} + +func mockFs() afero.Fs { + fs := afero.NewMemMapFs() + f, _ := fs.Create("assets/xxx.txt") + _, _ = f.WriteString("hello") + _ = f.Close() + f, _ = fs.Create("plugins/aaa~1.0.0/foo.js") + _, _ = f.WriteString("bar") + _ = f.Close() + f, _ = fs.Create("published/s.json") + _, _ = f.WriteString("{}") + _ = f.Close() + return fs +} diff --git a/internal/infrastructure/fs/plugin.go b/internal/infrastructure/fs/plugin.go index 2286f8c3c..a201b5146 100644 --- a/internal/infrastructure/fs/plugin.go +++ b/internal/infrastructure/fs/plugin.go @@ -3,55 +3,42 @@ package fs import ( "context" "errors" - "os" - "path" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" - "github.com/reearth/reearth-backend/pkg/plugin/manifest" "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/spf13/afero" ) type pluginRepo struct { - basePath string + fs afero.Fs } -func NewPlugin(basePath string) repo.Plugin { +func NewPlugin(fs afero.Fs) repo.Plugin { return &pluginRepo{ - basePath: basePath, + fs: fs, } } -func (r *pluginRepo) manifest(ctx context.Context, id id.PluginID) string { - return path.Join(getPluginFilePath(r.basePath, id, manifestFilePath)) -} - -func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { - filename := r.manifest(ctx, id) - if _, err := os.Stat(filename); err != nil { - return nil, rerror.ErrNotFound - } - file, err := os.Open(filename) +func (r *pluginRepo) FindByID(ctx context.Context, pid id.PluginID, sids []id.SceneID) (*plugin.Plugin, error) { + m, err := readManifest(r.fs, pid) if err != nil { - return nil, rerror.ErrInternalBy(err) + return nil, err } - defer func() { - _ = file.Close() - }() - m, err := manifest.Parse(file) - if err != nil { - return nil, rerror.ErrInternalBy(err) + sid := m.Plugin.ID().Scene() + if sid != nil && !sid.Contains(sids) { + return nil, nil } return m.Plugin, nil } -func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { +func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id.SceneID) ([]*plugin.Plugin, error) { results := make([]*plugin.Plugin, 0, len(ids)) for _, id := range ids { - res, err := r.FindByID(ctx, id) + res, err := r.FindByID(ctx, id, sids) if err != nil { return nil, err } @@ -63,3 +50,7 @@ func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugi func (r *pluginRepo) Save(ctx context.Context, p *plugin.Plugin) error { return rerror.ErrInternalBy(errors.New("read only")) } + +func (r *pluginRepo) Remove(ctx context.Context, pid id.PluginID) error { + return rerror.ErrInternalBy(errors.New("read only")) +} diff --git a/internal/infrastructure/fs/plugin_repository.go b/internal/infrastructure/fs/plugin_repository.go index 1ad928c21..aae3a63e0 100644 --- a/internal/infrastructure/fs/plugin_repository.go +++ b/internal/infrastructure/fs/plugin_repository.go @@ -2,61 +2,29 @@ package fs import ( "context" - "errors" - "path" + "path/filepath" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/pkg/file" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin/manifest" - "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/spf13/afero" ) type pluginRepository struct { - basePath string + fs afero.Fs } -func NewPluginRepository(basePath string) gateway.PluginRepository { +func NewPluginRepository(fs afero.Fs) gateway.PluginRepository { return &pluginRepository{ - basePath: basePath, + fs: fs, } } -func (r *pluginRepository) Data(ctx context.Context, id id.PluginID) (file.Archive, error) { - return r.getArchive(id) +func (r *pluginRepository) Data(ctx context.Context, id id.PluginID) (file.Iterator, error) { + return file.NewFsIterator(afero.NewBasePathFs(r.fs, filepath.Join(pluginDir, id.String()))) } func (r *pluginRepository) Manifest(ctx context.Context, id id.PluginID) (*manifest.Manifest, error) { - archive, err := r.getArchive(id) - if err != nil { - return nil, err - } - - defer func() { - _ = archive.Close() - }() - - for { - f, err := archive.Next() - if errors.Is(err, file.EOF) { - break - } - if err != nil { - return nil, rerror.ErrInternalBy(err) - } - if f.Fullpath == manifestFilePath { - m, err := manifest.Parse(f.Content) - if err != nil { - return nil, err - } - return m, nil - } - } - return nil, manifest.ErrFailedToParseManifest -} - -func (r *pluginRepository) getArchive(id id.PluginID) (file.Archive, error) { - return NewArchive( - path.Join(r.basePath, id.Name()+"_"+id.Version().String()), - ) + return readManifest(r.fs, id) } diff --git a/internal/infrastructure/fs/property_schema.go b/internal/infrastructure/fs/property_schema.go index 5f891924e..064fbdf6c 100644 --- a/internal/infrastructure/fs/property_schema.go +++ b/internal/infrastructure/fs/property_schema.go @@ -3,55 +3,34 @@ package fs import ( "context" "errors" - "os" - "path" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/plugin/manifest" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/spf13/afero" ) type propertySchema struct { - basePath string + fs afero.Fs } -func NewPropertySchema(basePath string) repo.PropertySchema { +func NewPropertySchema(fs afero.Fs) repo.PropertySchema { return &propertySchema{ - basePath: basePath, + fs: fs, } } -func (r *propertySchema) manifest(id id.PluginID) string { - return path.Join(getPluginFilePath(r.basePath, id, manifestFilePath)) -} - func (r *propertySchema) FindByID(ctx context.Context, i id.PropertySchemaID) (*property.Schema, error) { - pid, err := id.PluginIDFrom(i.Plugin()) - if err != nil { - return nil, rerror.ErrNotFound - } - filename := r.manifest(pid) - if _, err := os.Stat(filename); err != nil { - return nil, rerror.ErrNotFound - } - file, err2 := os.Open(filename) - if err2 != nil { - return nil, rerror.ErrInternalBy(err2) - } - defer func() { - _ = file.Close() - }() - - m, err := manifest.Parse(file) + m, err := readManifest(r.fs, i.Plugin()) if err != nil { - return nil, rerror.ErrInternalBy(err) + return nil, err } if m.Schema != nil && m.Schema.ID() == i { return m.Schema, nil } + for _, ps := range m.ExtensionSchema { if ps == nil { continue @@ -83,3 +62,11 @@ func (r *propertySchema) Save(ctx context.Context, p *property.Schema) error { func (r *propertySchema) SaveAll(ctx context.Context, p property.SchemaList) error { return rerror.ErrInternalBy(errors.New("read only")) } + +func (r *propertySchema) Remove(ctx context.Context, pid id.PropertySchemaID) error { + return rerror.ErrInternalBy(errors.New("read only")) +} + +func (r *propertySchema) RemoveAll(ctx context.Context, pid []id.PropertySchemaID) error { + return rerror.ErrInternalBy(errors.New("read only")) +} diff --git a/internal/infrastructure/gcs/file.go b/internal/infrastructure/gcs/file.go index c28a3fb0b..239d65118 100644 --- a/internal/infrastructure/gcs/file.go +++ b/internal/infrastructure/gcs/file.go @@ -10,12 +10,13 @@ import ( "strings" "cloud.google.com/go/storage" + "github.com/kennygrant/sanitize" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/pkg/file" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/log" - "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/rerror" + "google.golang.org/api/iterator" ) const ( @@ -54,189 +55,252 @@ func NewFile(bucketName, base string, cacheControl string) (gateway.File, error) }, nil } -func (f *fileRepo) bucket(ctx context.Context) (*storage.BucketHandle, error) { - client, err := storage.NewClient(ctx) - if err != nil { - return nil, err +func (f *fileRepo) ReadAsset(ctx context.Context, name string) (io.ReadCloser, error) { + sn := sanitize.Path(name) + if sn == "" { + return nil, rerror.ErrNotFound } - bucket := client.Bucket(f.bucketName) - return bucket, err + return f.read(ctx, path.Join(gcsAssetBasePath, sn)) } -func (f *fileRepo) ReadAsset(ctx context.Context, name string) (io.Reader, error) { - if name == "" { - return nil, rerror.ErrNotFound +func (f *fileRepo) UploadAsset(ctx context.Context, file *file.File) (*url.URL, error) { + if file == nil { + return nil, gateway.ErrInvalidFile + } + if file.Size >= fileSizeLimit { + return nil, gateway.ErrFileTooLarge } - p := path.Join(gcsAssetBasePath, name) - bucket, err := f.bucket(ctx) - if err != nil { + sn := sanitize.Path(id.New().String() + path.Ext(file.Path)) + if sn == "" { + return nil, gateway.ErrInvalidFile + } + + filename := path.Join(gcsAssetBasePath, sn) + u := getGCSObjectURL(f.base, filename) + if u == nil { + return nil, gateway.ErrInvalidFile + } + + if err := f.upload(ctx, filename, file.Content); err != nil { return nil, err } - log.Infof("gcs: read asset from gs://%s/%s", f.bucketName, p) - reader, err := bucket.Object(p).NewReader(ctx) - if err != nil { - if errors.Is(err, storage.ErrObjectNotExist) { - return nil, rerror.ErrNotFound - } - return nil, rerror.ErrInternalBy(err) + return u, nil +} + +func (f *fileRepo) RemoveAsset(ctx context.Context, u *url.URL) error { + sn := getGCSObjectNameFromURL(f.base, u) + if sn == "" { + return gateway.ErrInvalidFile } - return reader, nil + return f.delete(ctx, sn) +} + +// plugin + +func (f *fileRepo) ReadPluginFile(ctx context.Context, pid id.PluginID, filename string) (io.ReadCloser, error) { + sn := sanitize.Path(filename) + if sn == "" { + return nil, rerror.ErrNotFound + } + return f.read(ctx, path.Join(gcsPluginBasePath, pid.String(), sn)) +} + +func (f *fileRepo) UploadPluginFile(ctx context.Context, pid id.PluginID, file *file.File) error { + sn := sanitize.Path(file.Path) + if sn == "" { + return gateway.ErrInvalidFile + } + return f.upload(ctx, path.Join(gcsPluginBasePath, pid.String(), sanitize.Path(file.Path)), file.Content) } -func (f *fileRepo) ReadPluginFile(ctx context.Context, plugin id.PluginID, name string) (io.Reader, error) { +func (f *fileRepo) RemovePlugin(ctx context.Context, pid id.PluginID) error { + return f.deleteAll(ctx, path.Join(gcsPluginBasePath, pid.String())) +} + +// built scene + +func (f *fileRepo) ReadBuiltSceneFile(ctx context.Context, name string) (io.ReadCloser, error) { if name == "" { return nil, rerror.ErrNotFound } + return f.read(ctx, path.Join(gcsMapBasePath, sanitize.Path(name)+".json")) +} - p := path.Join(gcsPluginBasePath, plugin.Name(), plugin.Version().String(), name) - bucket, err := f.bucket(ctx) - if err != nil { - return nil, err +func (f *fileRepo) UploadBuiltScene(ctx context.Context, content io.Reader, name string) error { + sn := sanitize.Path(name + ".json") + if sn == "" { + return gateway.ErrInvalidFile + } + return f.upload(ctx, path.Join(gcsMapBasePath, sn), content) +} + +func (f *fileRepo) MoveBuiltScene(ctx context.Context, oldName, name string) error { + from := sanitize.Path(oldName + ".json") + dest := sanitize.Path(name + ".json") + if from == "" || dest == "" { + return gateway.ErrInvalidFile + } + return f.move(ctx, path.Join(gcsMapBasePath, from), path.Join(gcsMapBasePath, dest)) +} + +func (f *fileRepo) RemoveBuiltScene(ctx context.Context, name string) error { + sn := sanitize.Path(name + ".json") + if sn == "" { + return gateway.ErrInvalidFile } - log.Infof("gcs: read plugin from gs://%s/%s", f.bucketName, p) - reader, err := bucket.Object(p).NewReader(ctx) + return f.delete(ctx, path.Join(gcsMapBasePath, sn)) +} + +// helpers + +func (f *fileRepo) bucket(ctx context.Context) (*storage.BucketHandle, error) { + client, err := storage.NewClient(ctx) if err != nil { - if errors.Is(err, storage.ErrObjectNotExist) { - return nil, rerror.ErrNotFound - } - return nil, rerror.ErrInternalBy(err) + return nil, err } - return reader, nil + bucket := client.Bucket(f.bucketName) + return bucket, nil } -func (f *fileRepo) ReadBuiltSceneFile(ctx context.Context, name string) (io.Reader, error) { - if name == "" { +func (f *fileRepo) read(ctx context.Context, filename string) (io.ReadCloser, error) { + if filename == "" { return nil, rerror.ErrNotFound } - p := path.Join(gcsMapBasePath, name+".json") bucket, err := f.bucket(ctx) if err != nil { - return nil, err + log.Errorf("gcs: read bucket err: %+v\n", err) + return nil, rerror.ErrInternalBy(err) } - log.Infof("gcs: read scene from gs://%s/%s", f.bucketName, p) - reader, err := bucket.Object(p).NewReader(ctx) + reader, err := bucket.Object(filename).NewReader(ctx) if err != nil { if errors.Is(err, storage.ErrObjectNotExist) { return nil, rerror.ErrNotFound } + log.Errorf("gcs: read err: %+v\n", err) return nil, rerror.ErrInternalBy(err) } + return reader, nil } -func (f *fileRepo) UploadAsset(ctx context.Context, file *file.File) (*url.URL, error) { - if file == nil { - return nil, gateway.ErrInvalidFile - } - if file.Size >= fileSizeLimit { - return nil, gateway.ErrFileTooLarge +func (f *fileRepo) upload(ctx context.Context, filename string, content io.Reader) error { + if filename == "" { + return gateway.ErrInvalidFile } bucket, err := f.bucket(ctx) if err != nil { - return nil, err - } - - // calc checksum - // hasher := sha256.New() - // tr := io.TeeReader(file.Content, hasher) - // checksum := hex.EncodeToString(hasher.Sum(nil)) - - id := id.New().String() - filename := id + path.Ext(file.Name) - name := path.Join(gcsAssetBasePath, filename) - objectURL := getGCSObjectURL(f.base, name) - if objectURL == nil { - return nil, gateway.ErrInvalidFile + log.Errorf("gcs: upload bucket err: %+v\n", err) + return rerror.ErrInternalBy(err) } - object := bucket.Object(name) - _, err = object.Attrs(ctx) - if !errors.Is(err, storage.ErrObjectNotExist) { - log.Errorf("gcs: err=%+v\n", err) - return nil, gateway.ErrFailedToUploadFile + object := bucket.Object(filename) + if err := object.Delete(ctx); err != nil && !errors.Is(err, storage.ErrObjectNotExist) { + log.Errorf("gcs: upload delete err: %+v\n", err) + return gateway.ErrFailedToUploadFile } writer := object.NewWriter(ctx) - if _, err := io.Copy(writer, file.Content); err != nil { - log.Errorf("gcs: err=%+v\n", err) - return nil, gateway.ErrFailedToUploadFile + writer.ObjectAttrs.CacheControl = f.cacheControl + + if _, err := io.Copy(writer, content); err != nil { + log.Errorf("gcs: upload err: %+v\n", err) + return gateway.ErrFailedToUploadFile } + if err := writer.Close(); err != nil { - log.Errorf("gcs: err=%+v\n", err) - return nil, gateway.ErrFailedToUploadFile + log.Errorf("gcs: upload close err: %+v\n", err) + return gateway.ErrFailedToUploadFile } - return objectURL, nil + return nil } -func (f *fileRepo) RemoveAsset(ctx context.Context, u *url.URL) error { - if u == nil { +func (f *fileRepo) move(ctx context.Context, from, dest string) error { + if from == "" || dest == "" || from == dest { return gateway.ErrInvalidFile } - name := getGCSObjectNameFromURL(f.base, u) - if name == "" { + + bucket, err := f.bucket(ctx) + if err != nil { + log.Errorf("gcs: move bucket err: %+v\n", err) + return rerror.ErrInternalBy(err) + } + + object := bucket.Object(from) + destObject := bucket.Object(dest) + if _, err := destObject.CopierFrom(object).Run(ctx); err != nil { + if errors.Is(err, storage.ErrObjectNotExist) { + return rerror.ErrNotFound + } + log.Errorf("gcs: move copy err: %+v\n", err) + return rerror.ErrInternalBy(err) + } + + if err := object.Delete(ctx); err != nil { + log.Errorf("gcs: move delete err: %+v\n", err) + return rerror.ErrInternalBy(err) + } + + return nil +} + +func (f *fileRepo) delete(ctx context.Context, filename string) error { + if filename == "" { return gateway.ErrInvalidFile } + bucket, err := f.bucket(ctx) if err != nil { - return err + log.Errorf("gcs: delete bucket err: %+v\n", err) + return rerror.ErrInternalBy(err) } - object := bucket.Object(name) + + object := bucket.Object(filename) if err := object.Delete(ctx); err != nil { if errors.Is(err, storage.ErrObjectNotExist) { return nil } + + log.Errorf("gcs: delete err: %+v\n", err) return rerror.ErrInternalBy(err) } return nil } -func (f *fileRepo) UploadAndExtractPluginFiles(ctx context.Context, archive file.Archive, plugin *plugin.Plugin) (*url.URL, error) { - defer func() { - _ = archive.Close() - }() +func (f *fileRepo) deleteAll(ctx context.Context, path string) error { + if path == "" { + return gateway.ErrInvalidFile + } - basePath := path.Join(gcsPluginBasePath, plugin.ID().Name(), plugin.Version().String()) - objectURL := getGCSObjectURL(f.base, basePath) - if objectURL == nil { - return nil, gateway.ErrInvalidFile + bucket, err := f.bucket(ctx) + if err != nil { + log.Errorf("gcs: deleteAll bucket err: %+v\n", err) + return rerror.ErrInternalBy(err) } + it := bucket.Objects(ctx, &storage.Query{ + Prefix: path, + }) + for { - ff, err := archive.Next() - if errors.Is(err, file.EOF) { + attrs, err := it.Next() + if err == iterator.Done { break } - bucket, err := f.bucket(ctx) if err != nil { - return nil, err - } - name := path.Join(basePath, ff.Fullpath) - object := bucket.Object(name) - _, err2 := object.Attrs(ctx) - if errors.Is(err2, storage.ErrBucketNotExist) { - return nil, gateway.ErrFailedToUploadFile - } else if !errors.Is(err2, storage.ErrObjectNotExist) { - // does not overwrite - continue + log.Errorf("gcs: deleteAll next err: %+v\n", err) + return rerror.ErrInternalBy(err) } - - writer := object.NewWriter(ctx) - if _, err := io.Copy(writer, ff.Content); err != nil { - log.Errorf("gcs: err=%+v\n", err) - return nil, gateway.ErrFailedToUploadFile - } - if err := writer.Close(); err != nil { - log.Errorf("gcs: err=%+v\n", err) - return nil, gateway.ErrFailedToUploadFile + if err := bucket.Object(attrs.Name).Delete(ctx); err != nil { + log.Errorf("gcs: deleteAll err: %+v\n", err) + return rerror.ErrInternalBy(err) } } - - return objectURL, nil + return nil } func getGCSObjectURL(base *url.URL, objectName string) *url.URL { @@ -252,75 +316,13 @@ func getGCSObjectNameFromURL(base, u *url.URL) string { if u == nil { return "" } - bp := "" - if base != nil { - bp = base.Path - } - return strings.TrimPrefix(strings.TrimPrefix(u.Path, bp), "/") -} - -func (f *fileRepo) UploadBuiltScene(ctx context.Context, reader io.Reader, name string) error { - filename := path.Join(gcsMapBasePath, name+".json") - bucket, err := f.bucket(ctx) - if err != nil { - return err - } - object := bucket.Object(filename) - - if err := object.Delete(ctx); err != nil && !errors.Is(err, storage.ErrObjectNotExist) { - log.Errorf("gcs: err=%+v\n", err) - return gateway.ErrFailedToUploadFile - } - - writer := object.NewWriter(ctx) - writer.ObjectAttrs.CacheControl = f.cacheControl - - if _, err := io.Copy(writer, reader); err != nil { - log.Errorf("gcs: err=%+v\n", err) - return gateway.ErrFailedToUploadFile - } - - if err := writer.Close(); err != nil { - log.Errorf("gcs: err=%+v\n", err) - return gateway.ErrFailedToUploadFile - } - - return nil -} - -func (f *fileRepo) MoveBuiltScene(ctx context.Context, oldName, name string) error { - oldFilename := path.Join(gcsMapBasePath, oldName+".json") - filename := path.Join(gcsMapBasePath, name+".json") - bucket, err := f.bucket(ctx) - if err != nil { - return err - } - object := bucket.Object(oldFilename) - destObject := bucket.Object(filename) - if _, err := destObject.CopierFrom(object).Run(ctx); err != nil { - if errors.Is(err, storage.ErrObjectNotExist) { - return rerror.ErrNotFound - } - return rerror.ErrInternalBy(err) + if base == nil { + base = &url.URL{} } - if err := object.Delete(ctx); err != nil { - return rerror.ErrInternalBy(err) + p := sanitize.Path(strings.TrimPrefix(u.Path, "/")) + if p == "" || u.Host != base.Host || u.Scheme != base.Scheme || !strings.HasPrefix(p, gcsAssetBasePath+"/") { + return "" } - return nil -} -func (f *fileRepo) RemoveBuiltScene(ctx context.Context, name string) error { - filename := path.Join(gcsMapBasePath, name+".json") - bucket, err := f.bucket(ctx) - if err != nil { - return err - } - object := bucket.Object(filename) - if err := object.Delete(ctx); err != nil { - if errors.Is(err, storage.ErrObjectNotExist) { - return nil - } - return rerror.ErrInternalBy(err) - } - return nil + return p } diff --git a/internal/infrastructure/gcs/file_test.go b/internal/infrastructure/gcs/file_test.go index b5687d54e..e2e48e944 100644 --- a/internal/infrastructure/gcs/file_test.go +++ b/internal/infrastructure/gcs/file_test.go @@ -8,17 +8,17 @@ import ( ) func TestGetGCSObjectURL(t *testing.T) { - e, err := url.Parse("https://hoge.com/assets/xxx.yyy") - assert.NoError(t, err) - b, err := url.Parse("https://hoge.com/assets") - assert.NoError(t, err) + e, _ := url.Parse("https://hoge.com/assets/xxx.yyy") + b, _ := url.Parse("https://hoge.com/assets") assert.Equal(t, e, getGCSObjectURL(b, "xxx.yyy")) } func TestGetGCSObjectNameFromURL(t *testing.T) { - u, err := url.Parse("https://hoge.com/assets/xxx.yyy") - assert.NoError(t, err) - b, err := url.Parse("https://hoge.com") - assert.NoError(t, err) + u, _ := url.Parse("https://hoge.com/assets/xxx.yyy") + b, _ := url.Parse("https://hoge.com") + b2, _ := url.Parse("https://hoge2.com") assert.Equal(t, "assets/xxx.yyy", getGCSObjectNameFromURL(b, u)) + assert.Equal(t, "", getGCSObjectNameFromURL(b2, u)) + assert.Equal(t, "", getGCSObjectNameFromURL(nil, u)) + assert.Equal(t, "", getGCSObjectNameFromURL(b, nil)) } diff --git a/internal/infrastructure/memory/plugin.go b/internal/infrastructure/memory/plugin.go index 403effda0..d17d7e4a0 100644 --- a/internal/infrastructure/memory/plugin.go +++ b/internal/infrastructure/memory/plugin.go @@ -23,7 +23,7 @@ func NewPlugin() repo.Plugin { } } -func (r *Plugin) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { +func (r *Plugin) FindByID(ctx context.Context, id id.PluginID, sids []id.SceneID) (*plugin.Plugin, error) { r.lock.Lock() defer r.lock.Unlock() @@ -31,7 +31,7 @@ func (r *Plugin) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, return p, nil } for _, p := range r.data { - if p.ID().Equal(id) { + if p.ID().Equal(id) && (p.ID().Scene() == nil || p.ID().Scene().Contains(sids)) { p2 := *p return &p2, nil } @@ -39,7 +39,7 @@ func (r *Plugin) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, return nil, rerror.ErrNotFound } -func (r *Plugin) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { +func (r *Plugin) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id.SceneID) ([]*plugin.Plugin, error) { r.lock.Lock() defer r.lock.Unlock() @@ -50,7 +50,7 @@ func (r *Plugin) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Pl continue } for _, p := range r.data { - if p.ID().Equal(id) { + if p.ID().Equal(id) && (p.ID().Scene() == nil || p.ID().Scene().Contains(sids)) { p2 := *p result = append(result, &p2) } else { @@ -77,3 +77,16 @@ func (r *Plugin) Save(ctx context.Context, p *plugin.Plugin) error { r.data = append(r.data, &p2) return nil } + +func (r *Plugin) Remove(ctx context.Context, id id.PluginID) error { + r.lock.Lock() + defer r.lock.Unlock() + + for i := 0; i < len(r.data); i++ { + if r.data[i].ID().Equal(id) { + r.data = append(r.data[:i], r.data[i+1:]...) + i-- + } + } + return nil +} diff --git a/internal/infrastructure/memory/property_schema.go b/internal/infrastructure/memory/property_schema.go index 7565cbc55..475e975db 100644 --- a/internal/infrastructure/memory/property_schema.go +++ b/internal/infrastructure/memory/property_schema.go @@ -78,3 +78,21 @@ func (r *PropertySchema) SaveAll(ctx context.Context, p property.SchemaList) err } return nil } + +func (r *PropertySchema) Remove(ctx context.Context, id id.PropertySchemaID) error { + r.lock.Lock() + defer r.lock.Unlock() + + delete(r.data, id) + return nil +} + +func (r *PropertySchema) RemoveAll(ctx context.Context, ids []id.PropertySchemaID) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, id := range ids { + delete(r.data, id) + } + return nil +} diff --git a/internal/infrastructure/mongo/mongodoc/plugin.go b/internal/infrastructure/mongo/mongodoc/plugin.go index 6af6a0cd3..2a252a028 100644 --- a/internal/infrastructure/mongo/mongodoc/plugin.go +++ b/internal/infrastructure/mongo/mongodoc/plugin.go @@ -22,10 +22,9 @@ type PluginDocument struct { Author string Description map[string]string RepositoryURL string - Deprecated bool - Public bool Extensions []PluginExtensionDocument Schema *string + Scene *string `bson:",omitempty"` } type PluginConsumer struct { @@ -73,6 +72,7 @@ func NewPlugin(plugin *plugin.Plugin) (*PluginDocument, string) { RepositoryURL: plugin.RepositoryURL(), Extensions: extensionsDoc, Schema: plugin.Schema().StringRef(), + Scene: plugin.ID().Scene().StringRef(), }, pid } diff --git a/internal/infrastructure/mongo/mongodoc/scene.go b/internal/infrastructure/mongo/mongodoc/scene.go index 5c8f9e638..cf42427b2 100644 --- a/internal/infrastructure/mongo/mongodoc/scene.go +++ b/internal/infrastructure/mongo/mongodoc/scene.go @@ -143,6 +143,10 @@ func (d *SceneDocument) Model() (*scene.Scene, error) { ps := make([]*scene.Plugin, 0, len(d.Plugins)) for _, w := range d.Widgets { + wid, err := id.WidgetIDFrom(w.ID) + if err != nil { + return nil, err + } pid, err := id.PluginIDFrom(w.Plugin) if err != nil { return nil, err @@ -152,7 +156,7 @@ func (d *SceneDocument) Model() (*scene.Scene, error) { return nil, err } sw, err := scene.NewWidget( - id.WidgetIDFromRef(&w.ID), + wid, pid, id.PluginExtensionID(w.Extension), prid, diff --git a/internal/infrastructure/mongo/plugin.go b/internal/infrastructure/mongo/plugin.go index 99e8cc8d7..c9f7e79d4 100644 --- a/internal/infrastructure/mongo/plugin.go +++ b/internal/infrastructure/mongo/plugin.go @@ -31,17 +31,36 @@ func (r *pluginRepo) init() { } } -func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { - if p := builtin.GetPlugin(id); p != nil { +func (r *pluginRepo) FindByID(ctx context.Context, pid id.PluginID, sids []id.SceneID) (*plugin.Plugin, error) { + // TODO: separate built-in plugins to another repository + if p := builtin.GetPlugin(pid); p != nil { return p, nil } - filter := bson.D{ - {Key: "id", Value: id.String()}, + + pids := pid.String() + filter := bson.M{ + "$or": []bson.M{ + { + "id": pids, + "scene": nil, + }, + { + "id": pids, + "scene": "", + }, + { + "id": pids, + "scene": bson.M{ + "$in": id.SceneIDToKeys(sids), + }, + }, + }, } return r.findOne(ctx, filter) } -func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { +func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id.SceneID) ([]*plugin.Plugin, error) { + // TODO: separate built-in plugins to another repository // exclude built-in b := map[string]*plugin.Plugin{} ids2 := make([]id.PluginID, 0, len(ids)) @@ -57,10 +76,24 @@ func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugi var err error if len(ids2) > 0 { - filter := bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.PluginIDToKeys(ids2)}, - }}, + keys := id.PluginIDToKeys(ids2) + filter := bson.M{ + "$or": []bson.M{ + { + "id": bson.M{"$in": keys}, + "scene": nil, + }, + { + "id": bson.M{"$in": keys}, + "scene": "", + }, + { + "id": bson.M{"$in": keys}, + "scene": bson.M{ + "$in": id.SceneIDToKeys(sids), + }, + }, + }, } dst := make([]*plugin.Plugin, 0, len(ids2)) res, err = r.find(ctx, dst, filter) @@ -100,7 +133,11 @@ func (r *pluginRepo) Save(ctx context.Context, plugin *plugin.Plugin) error { return r.client.SaveOne(ctx, id, doc) } -func (r *pluginRepo) find(ctx context.Context, dst []*plugin.Plugin, filter bson.D) ([]*plugin.Plugin, error) { +func (r *pluginRepo) Remove(ctx context.Context, id id.PluginID) error { + return r.client.RemoveOne(ctx, id.String()) +} + +func (r *pluginRepo) find(ctx context.Context, dst []*plugin.Plugin, filter interface{}) ([]*plugin.Plugin, error) { c := mongodoc.PluginConsumer{ Rows: dst, } @@ -110,7 +147,7 @@ func (r *pluginRepo) find(ctx context.Context, dst []*plugin.Plugin, filter bson return c.Rows, nil } -func (r *pluginRepo) findOne(ctx context.Context, filter bson.D) (*plugin.Plugin, error) { +func (r *pluginRepo) findOne(ctx context.Context, filter interface{}) (*plugin.Plugin, error) { dst := make([]*plugin.Plugin, 0, 1) c := mongodoc.PluginConsumer{ Rows: dst, diff --git a/internal/infrastructure/mongo/property_schema.go b/internal/infrastructure/mongo/property_schema.go index 9b079181c..9cfcc1383 100644 --- a/internal/infrastructure/mongo/property_schema.go +++ b/internal/infrastructure/mongo/property_schema.go @@ -112,6 +112,17 @@ func (r *propertySchemaRepo) SaveAll(ctx context.Context, m property.SchemaList) return r.client.SaveAll(ctx, ids, docs) } +func (r *propertySchemaRepo) Remove(ctx context.Context, id id.PropertySchemaID) error { + return r.client.RemoveOne(ctx, id.String()) +} + +func (r *propertySchemaRepo) RemoveAll(ctx context.Context, ids []id.PropertySchemaID) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, id.PropertySchemaIDToKeys(ids)) +} + func (r *propertySchemaRepo) find(ctx context.Context, dst property.SchemaList, filter bson.D) (property.SchemaList, error) { c := mongodoc.PropertySchemaConsumer{ Rows: dst, diff --git a/internal/infrastructure/npm/archive.go b/internal/infrastructure/npm/archive.go deleted file mode 100644 index a67b5d3bc..000000000 --- a/internal/infrastructure/npm/archive.go +++ /dev/null @@ -1,73 +0,0 @@ -package npm - -import ( - "archive/tar" - "compress/gzip" - "errors" - "io" - "strings" - - "github.com/reearth/reearth-backend/pkg/file" - "github.com/reearth/reearth-backend/pkg/rerror" -) - -type archive struct { - gzipReader *gzip.Reader - tarReader *tar.Reader - name string - size int64 -} - -// NewArchive _ -func NewArchive(reader io.Reader, name string, size int64) file.Archive { - gzipReader, _ := gzip.NewReader(reader) - tarReader := tar.NewReader(gzipReader) - return &archive{ - gzipReader: gzipReader, - tarReader: tarReader, - name: name, - size: size, - } -} - -// Next _ -func (a *archive) Next() (f *file.File, derr error) { - var head *tar.Header - var err error - for { - head, err = a.tarReader.Next() - if errors.Is(err, io.EOF) { - derr = file.EOF - return - } - if err != nil { - derr = rerror.ErrInternalBy(err) - return - } - if strings.HasPrefix(head.Name, "package/") { - break - } - } - f = &file.File{ - Content: a.tarReader, - Name: head.FileInfo().Name(), - Fullpath: strings.TrimPrefix(head.Name, "package/"), - Size: head.Size, - } - return -} - -// Close _ -func (a *archive) Close() error { - return a.gzipReader.Close() -} - -// Name _ -func (a *archive) Name() string { - return a.name -} - -// Size _ -func (a *archive) Size() int64 { - return a.size -} diff --git a/internal/infrastructure/npm/plugin_repository.go b/internal/infrastructure/npm/plugin_repository.go deleted file mode 100644 index 63266a147..000000000 --- a/internal/infrastructure/npm/plugin_repository.go +++ /dev/null @@ -1,97 +0,0 @@ -package npm - -import ( - "context" - "errors" - "fmt" - "net/http" - "strings" - - "github.com/reearth/reearth-backend/internal/usecase/gateway" - "github.com/reearth/reearth-backend/pkg/file" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/plugin/manifest" - "github.com/reearth/reearth-backend/pkg/rerror" -) - -const ( - manifestFilePath = "reearth.json" -) - -type pluginRepository struct { - registryURL string -} - -func NewPluginRepository() gateway.PluginRepository { - return &pluginRepository{ - registryURL: "https://registry.npmjs.org/", - } -} - -func NewPluginRepositoryOf(url string) gateway.PluginRepository { - return &pluginRepository{ - registryURL: url, - } -} - -func (r *pluginRepository) Data(ctx context.Context, id id.PluginID) (file.Archive, error) { - return r.getNpmTarball(ctx, id) -} - -// Manifest _ -func (r *pluginRepository) Manifest(ctx context.Context, id id.PluginID) (*manifest.Manifest, error) { - archive, err := r.getNpmTarball(ctx, id) - if err != nil { - return nil, err - } - - defer func() { - _ = archive.Close() - }() - - for { - f, err := archive.Next() - if errors.Is(err, file.EOF) { - break - } - if err != nil { - return nil, rerror.ErrInternalBy(err) - } - if f.Fullpath == manifestFilePath { - manifest, err := manifest.Parse(f.Content) - if err != nil { - return nil, err - } - return manifest, nil - } - } - return nil, manifest.ErrFailedToParseManifest -} - -func (r *pluginRepository) getNpmTarball(ctx context.Context, id id.PluginID) (file.Archive, error) { - rawPkgName := id.Name() - pkgVersion := id.Version().String() - scopelessPkgName := id.Name() - if rawPkgName[0] == '@' { - splitted := strings.Split(rawPkgName, "/") - if len(splitted) > 1 { - scopelessPkgName = splitted[1] - } - } - tarballURL := fmt.Sprintf("%s%s/-/%s-%s.tgz", r.registryURL, rawPkgName, scopelessPkgName, pkgVersion) - - req, err := http.NewRequestWithContext(ctx, "GET", tarballURL, nil) - if err != nil { - return nil, gateway.ErrFailedToFetchPluiginRepositoryData - } - - res, err := http.DefaultClient.Do(req) - if err != nil || res.StatusCode != http.StatusOK { - if res.StatusCode == http.StatusNotFound { - return nil, rerror.ErrNotFound - } - return nil, gateway.ErrFailedToFetchPluiginRepositoryData - } - - return NewArchive(res.Body, fmt.Sprintf("%s-%s.tgz", rawPkgName, pkgVersion), res.ContentLength), nil -} diff --git a/internal/usecase/gateway/file.go b/internal/usecase/gateway/file.go index e333e4589..2eb86d4fd 100644 --- a/internal/usecase/gateway/file.go +++ b/internal/usecase/gateway/file.go @@ -8,7 +8,6 @@ import ( "github.com/reearth/reearth-backend/pkg/file" "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/plugin" ) var ( @@ -19,13 +18,14 @@ var ( ) type File interface { - ReadAsset(context.Context, string) (io.Reader, error) - ReadPluginFile(context.Context, id.PluginID, string) (io.Reader, error) - ReadBuiltSceneFile(context.Context, string) (io.Reader, error) + ReadAsset(context.Context, string) (io.ReadCloser, error) UploadAsset(context.Context, *file.File) (*url.URL, error) - UploadAndExtractPluginFiles(context.Context, file.Archive, *plugin.Plugin) (*url.URL, error) + RemoveAsset(context.Context, *url.URL) error + ReadPluginFile(context.Context, id.PluginID, string) (io.ReadCloser, error) + UploadPluginFile(context.Context, id.PluginID, *file.File) error + RemovePlugin(context.Context, id.PluginID) error UploadBuiltScene(context.Context, io.Reader, string) error + ReadBuiltSceneFile(context.Context, string) (io.ReadCloser, error) MoveBuiltScene(context.Context, string, string) error - RemoveAsset(context.Context, *url.URL) error RemoveBuiltScene(context.Context, string) error } diff --git a/internal/usecase/gateway/plugin_repository.go b/internal/usecase/gateway/plugin_repository.go index 9212e61a5..afa1fd0eb 100644 --- a/internal/usecase/gateway/plugin_repository.go +++ b/internal/usecase/gateway/plugin_repository.go @@ -15,5 +15,5 @@ var ( type PluginRepository interface { Manifest(context.Context, id.PluginID) (*manifest.Manifest, error) - Data(context.Context, id.PluginID) (file.Archive, error) + Data(context.Context, id.PluginID) (file.Iterator, error) } diff --git a/internal/usecase/interactor/asset.go b/internal/usecase/interactor/asset.go index b4d0aa5e7..746dc9879 100644 --- a/internal/usecase/interactor/asset.go +++ b/internal/usecase/interactor/asset.go @@ -3,6 +3,7 @@ package interactor import ( "context" "net/url" + "path" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/gateway" @@ -56,7 +57,7 @@ func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, ope result, err = asset.New(). NewID(). Team(inp.TeamID). - Name(inp.File.Name). + Name(path.Base(inp.File.Path)). Size(inp.File.Size). URL(url.String()). Build() diff --git a/internal/usecase/interactor/dataset.go b/internal/usecase/interactor/dataset.go index 8a56ff029..eaace244b 100644 --- a/internal/usecase/interactor/dataset.go +++ b/internal/usecase/interactor/dataset.go @@ -8,6 +8,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/layer/layerops" "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/pkg/rerror" @@ -17,7 +18,6 @@ import ( "github.com/reearth/reearth-backend/pkg/dataset" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" - "github.com/reearth/reearth-backend/pkg/layer/initializer" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/scene" "github.com/reearth/reearth-backend/pkg/scene/sceneops" @@ -190,11 +190,11 @@ func (i *Dataset) ImportDataset(ctx context.Context, inp interfaces.ImportDatase } separator := ',' - if strings.HasSuffix(inp.File.Name, ".tsv") { + if strings.HasSuffix(inp.File.Path, ".tsv") { separator = '\t' } - return i.importDataset(ctx, inp.File.Content, inp.File.Name, separator, inp.SceneId, inp.SchemaId) + return i.importDataset(ctx, inp.File.Content, inp.File.Path, separator, inp.SceneId, inp.SchemaId) } func (i *Dataset) ImportDatasetFromGoogleSheet(ctx context.Context, inp interfaces.ImportDatasetFromGoogleSheetParam, operator *usecase.Operator) (_ *dataset.Schema, err error) { @@ -306,7 +306,7 @@ func (i *Dataset) importDataset(ctx context.Context, content io.Reader, name str if rf := ds.FieldRef(representativeFieldID); rf != nil && rf.Type() == dataset.ValueTypeString { name = rf.Value().Value().(string) } - layerItem, layerProperty, err := initializer.LayerItem{ + layerItem, layerProperty, err := layerops.LayerItem{ SceneID: sceneId, ParentLayerID: lg.ID(), Plugin: builtin.Plugin(), diff --git a/internal/usecase/interactor/layer.go b/internal/usecase/interactor/layer.go index c2f10a2de..6e6325f68 100644 --- a/internal/usecase/interactor/layer.go +++ b/internal/usecase/interactor/layer.go @@ -18,7 +18,7 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/decoding" - "github.com/reearth/reearth-backend/pkg/layer/initializer" + "github.com/reearth/reearth-backend/pkg/layer/layerops" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" ) @@ -152,7 +152,6 @@ func (i *Layer) FetchParentAndMerged(ctx context.Context, org id.LayerID, operat } func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, operator *usecase.Operator) (_ *layer.Item, _ *layer.Group, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -185,7 +184,7 @@ func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, o return nil, nil, interfaces.ErrCannotAddLayerToLinkedLayerGroup } - plugin, extension, err := i.getPlugin(ctx, inp.PluginID, inp.ExtensionID) + plugin, extension, err := i.getPlugin(ctx, parentLayer.Scene(), inp.PluginID, inp.ExtensionID) if err != nil { return nil, nil, err } @@ -195,7 +194,7 @@ func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, o return nil, nil, err } - layerItem, property, err := initializer.LayerItem{ + layerItem, property, err := layerops.LayerItem{ SceneID: parentLayer.Scene(), ParentLayerID: parentLayer.ID(), Plugin: plugin, @@ -238,7 +237,6 @@ func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, o } func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, operator *usecase.Operator) (_ *layer.Group, _ *layer.Group, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -273,7 +271,7 @@ func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, var extensionSchemaID id.PropertySchemaID var propertySchema *property.Schema - plug, extension, err := i.getPlugin(ctx, inp.PluginID, inp.ExtensionID) + plug, extension, err := i.getPlugin(ctx, parentLayer.Scene(), inp.PluginID, inp.ExtensionID) if err != nil { return nil, nil, err } @@ -372,7 +370,7 @@ func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, name = rf.Value().Value().(string) } - layerItem, property, err := initializer.LayerItem{ + layerItem, property, err := layerops.LayerItem{ SceneID: parentLayer.Scene(), ParentLayerID: layerGroup.ID(), Plugin: plug, @@ -733,7 +731,6 @@ func (i *Layer) RemoveInfobox(ctx context.Context, layerID id.LayerID, operator } func (i *Layer) AddInfoboxField(ctx context.Context, inp interfaces.AddInfoboxFieldParam, operator *usecase.Operator) (_ *layer.InfoboxField, _ layer.Layer, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -764,7 +761,7 @@ func (i *Layer) AddInfoboxField(ctx context.Context, inp interfaces.AddInfoboxFi return nil, nil, interfaces.ErrInfoboxNotFound } - _, extension, err := i.getPlugin(ctx, &inp.PluginID, &inp.ExtensionID) + _, extension, err := i.getPlugin(ctx, l.Scene(), &inp.PluginID, &inp.ExtensionID) if err != nil { return nil, nil, err } @@ -893,12 +890,12 @@ func (i *Layer) RemoveInfoboxField(ctx context.Context, inp interfaces.RemoveInf return inp.InfoboxFieldID, layer, err } -func (i *Layer) getPlugin(ctx context.Context, p *id.PluginID, e *id.PluginExtensionID) (*plugin.Plugin, *plugin.Extension, error) { +func (i *Layer) getPlugin(ctx context.Context, sid id.SceneID, p *id.PluginID, e *id.PluginExtensionID) (*plugin.Plugin, *plugin.Extension, error) { if p == nil { return nil, nil, nil } - plugin, err := i.pluginRepo.FindByID(ctx, *p) + plugin, err := i.pluginRepo.FindByID(ctx, *p, []id.SceneID{sid}) if err != nil { if errors.Is(err, rerror.ErrNotFound) { return nil, nil, interfaces.ErrPluginNotFound @@ -960,13 +957,13 @@ func (i *Layer) ImportLayer(ctx context.Context, inp interfaces.ImportLayerParam return nil, nil, errors.New("file is too big") } var reader decoding.ShapeReader - if inp.File.ContentType == "application/octet-stream" && strings.HasSuffix(inp.File.Name, ".shp") { + if inp.File.ContentType == "application/octet-stream" && strings.HasSuffix(inp.File.Path, ".shp") { reader, err = shp.ReadFrom(inp.File.Content) if err != nil { return nil, nil, err } decoder = decoding.NewShapeDecoder(reader, parent.Scene()) - } else if inp.File.ContentType == "application/zip" && strings.HasSuffix(inp.File.Name, ".zip") { + } else if inp.File.ContentType == "application/zip" && strings.HasSuffix(inp.File.Path, ".zip") { reader, err = shp.ReadZipFrom(inp.File.Content) if err != nil { return nil, nil, err diff --git a/internal/usecase/interactor/plugin.go b/internal/usecase/interactor/plugin.go index c440ef4d3..3ea24883b 100644 --- a/internal/usecase/interactor/plugin.go +++ b/internal/usecase/interactor/plugin.go @@ -2,8 +2,6 @@ package interactor import ( "context" - "errors" - "io" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/gateway" @@ -15,8 +13,10 @@ import ( type Plugin struct { common + commonScene pluginRepo repo.Plugin propertySchemaRepo repo.PropertySchema + propertyRepo repo.Property file gateway.File pluginRepository gateway.PluginRepository transaction repo.Transaction @@ -25,8 +25,12 @@ type Plugin struct { func NewPlugin(r *repo.Container, gr *gateway.Container) interfaces.Plugin { return &Plugin{ + commonScene: commonScene{ + sceneRepo: r.Scene, + }, pluginRepo: r.Plugin, propertySchemaRepo: r.PropertySchema, + propertyRepo: r.Property, transaction: r.Transaction, pluginRepository: gr.PluginRepository, file: gr.File, @@ -35,31 +39,13 @@ func NewPlugin(r *repo.Container, gr *gateway.Container) interfaces.Plugin { } func (i *Plugin) Fetch(ctx context.Context, ids []id.PluginID, operator *usecase.Operator) ([]*plugin.Plugin, error) { - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - res, err := i.pluginRepo.FindByIDs(ctx, ids) - return res, err -} - -func (i *Plugin) Upload(ctx context.Context, r io.Reader, operator *usecase.Operator) (_ *plugin.Plugin, err error) { - - tx, err := i.transaction.Begin() + scenes, err := i.OnlyReadableScenes(ctx, operator) if err != nil { - return - } - defer func() { - if err2 := tx.End(ctx); err == nil && err2 != nil { - err = err2 - } - }() - - if err := i.OnlyOperator(operator); err != nil { return nil, err } - tx.Commit() - return nil, errors.New("not implemented") + res, err := i.pluginRepo.FindByIDs(ctx, ids, scenes) + return res, err } func (i *Plugin) FetchPluginMetadata(ctx context.Context, operator *usecase.Operator) ([]*plugin.Metadata, error) { diff --git a/internal/usecase/interactor/plugin_delete.go b/internal/usecase/interactor/plugin_delete.go new file mode 100644 index 000000000..4d2558e00 --- /dev/null +++ b/internal/usecase/interactor/plugin_delete.go @@ -0,0 +1,58 @@ +package interactor + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (i *Plugin) Delete(ctx context.Context, pid id.PluginID, operator *usecase.Operator) (err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + p, err := i.pluginRepo.FindByID(ctx, pid, nil) + if err != nil { + return err + } + + sid := p.ID().Scene() + if sid == nil || p.ID().System() { + return interfaces.ErrCannotDeletePublicPlugin + } + + s, err := i.sceneRepo.FindByID(ctx, *sid, operator.WritableTeams) + if err != nil { + return err + } + if s == nil { + return interfaces.ErrOperationDenied + } + + if s.PluginSystem().HasPlugin(p.ID()) { + return interfaces.ErrCannotDeleteUsedPlugin + } + + if err := i.pluginRepo.Remove(ctx, p.ID()); err != nil { + return err + } + if ps := p.PropertySchemas(); len(ps) > 0 { + if err := i.propertySchemaRepo.RemoveAll(ctx, ps); err != nil { + return err + } + } + if err := i.file.RemovePlugin(ctx, p.ID()); err != nil { + return err + } + + tx.Commit() + return nil +} diff --git a/internal/usecase/interactor/plugin_upload.go b/internal/usecase/interactor/plugin_upload.go new file mode 100644 index 000000000..7560b3ab3 --- /dev/null +++ b/internal/usecase/interactor/plugin_upload.go @@ -0,0 +1,183 @@ +package interactor + +import ( + "context" + "errors" + "io" + "net/http" + "net/url" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/pluginpack" + "github.com/reearth/reearth-backend/pkg/plugin/repourl" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" +) + +var pluginPackageSizeLimit int64 = 10 * 1024 * 1024 // 10MB + +func (i *Plugin) Upload(ctx context.Context, r io.Reader, sid id.SceneID, operator *usecase.Operator) (_ *plugin.Plugin, _ *scene.Scene, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if err := i.CanWriteScene(ctx, sid, operator); err != nil { + return nil, nil, err + } + + s, err := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) + if err != nil { + return nil, nil, err + } + + p, err := pluginpack.PackageFromZip(r, &sid, pluginPackageSizeLimit) + if err != nil { + return nil, nil, interfaces.ErrInvalidPluginPackage + } + + for { + f, err := p.Files.Next() + if err != nil { + return nil, nil, interfaces.ErrInvalidPluginPackage + } + if f == nil { + break + } + if err := i.file.UploadPluginFile(ctx, p.Manifest.Plugin.ID(), f); err != nil { + return nil, nil, rerror.ErrInternalBy(err) + } + } + + if ps := p.Manifest.PropertySchemas(); len(ps) > 0 { + if err := i.propertySchemaRepo.SaveAll(ctx, ps); err != nil { + return nil, nil, err + } + } + if err := i.pluginRepo.Save(ctx, p.Manifest.Plugin); err != nil { + return nil, nil, err + } + + if err := i.installPlugin(ctx, p, s); err != nil { + return nil, nil, err + } + + tx.Commit() + return p.Manifest.Plugin, s, nil +} + +func (i *Plugin) UploadFromRemote(ctx context.Context, u *url.URL, sid id.SceneID, operator *usecase.Operator) (_ *plugin.Plugin, _ *scene.Scene, err error) { + ru, err := repourl.New(u) + if err != nil { + return nil, nil, err + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if err := i.CanWriteScene(ctx, sid, operator); err != nil { + return nil, nil, err + } + + s, err := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) + if err != nil { + return nil, nil, err + } + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, ru.ArchiveURL().String(), nil) + if err != nil { + return nil, nil, interfaces.ErrInvalidPluginPackage + } + + res, err := http.DefaultClient.Do(req) + if err != nil { + return nil, nil, interfaces.ErrInvalidPluginPackage + } + + defer func() { + _ = res.Body.Close() + }() + + if res.StatusCode != 200 { + return nil, nil, interfaces.ErrInvalidPluginPackage + } + + p, err := pluginpack.PackageFromZip(res.Body, &sid, pluginPackageSizeLimit) + if err != nil { + return nil, nil, interfaces.ErrInvalidPluginPackage + } + + if p, err := i.pluginRepo.FindByID(ctx, p.Manifest.Plugin.ID(), []id.SceneID{sid}); err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, nil, err + } else if p != nil { + return nil, nil, interfaces.ErrPluginAlreadyInstalled + } + + for { + f, err := p.Files.Next() + if err != nil { + return nil, nil, interfaces.ErrInvalidPluginPackage + } + if f == nil { + break + } + if err := i.file.UploadPluginFile(ctx, p.Manifest.Plugin.ID(), f); err != nil { + return nil, nil, rerror.ErrInternalBy(err) + } + } + + if ps := p.Manifest.PropertySchemas(); len(ps) > 0 { + if err := i.propertySchemaRepo.SaveAll(ctx, ps); err != nil { + return nil, nil, err + } + } + if err := i.pluginRepo.Save(ctx, p.Manifest.Plugin); err != nil { + return nil, nil, err + } + + if err := i.installPlugin(ctx, p, s); err != nil { + return nil, nil, err + } + + tx.Commit() + return p.Manifest.Plugin, s, nil +} + +// installPlugin installs the plugin to the scene +func (i *Plugin) installPlugin(ctx context.Context, p *pluginpack.Package, s *scene.Scene) (err error) { + var ppid *id.PropertyID + var pp *property.Property + if psid := p.Manifest.Plugin.Schema(); psid != nil { + pp, err = property.New().NewID().Schema(*psid).Build() + if err != nil { + return err + } + } + s.PluginSystem().Add(scene.NewPlugin(p.Manifest.Plugin.ID(), ppid)) + + if pp != nil { + if err := i.propertyRepo.Save(ctx, pp); err != nil { + return err + } + } + if err := i.sceneRepo.Save(ctx, s); err != nil { + return err + } + return nil +} diff --git a/internal/usecase/interactor/property.go b/internal/usecase/interactor/property.go index 1017bb70e..a381ba261 100644 --- a/internal/usecase/interactor/property.go +++ b/internal/usecase/interactor/property.go @@ -3,6 +3,7 @@ package interactor import ( "context" "errors" + "path" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/gateway" @@ -225,7 +226,7 @@ func (i *Property) UploadFile(ctx context.Context, inp interfaces.UploadFilePara asset, err := asset.New(). NewID(). Team(propertyScene.Team()). - Name(inp.File.Name). + Name(path.Base(inp.File.Path)). Size(inp.File.Size). URL(url.String()). Build() diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 0e4fa442c..682413cf8 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -5,11 +5,13 @@ import ( "errors" "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/builtin" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/layer/layerops" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/rerror" @@ -30,9 +32,10 @@ type Scene struct { layerRepo repo.Layer datasetRepo repo.Dataset transaction repo.Transaction + file gateway.File } -func NewScene(r *repo.Container) interfaces.Scene { +func NewScene(r *repo.Container, g *gateway.Container) interfaces.Scene { return &Scene{ commonScene: commonScene{sceneRepo: r.Scene}, commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, @@ -45,6 +48,7 @@ func NewScene(r *repo.Container) interfaces.Scene { layerRepo: r.Layer, datasetRepo: r.Dataset, transaction: r.Transaction, + file: g.File, } } @@ -151,8 +155,7 @@ func (s *Scene) FetchLock(ctx context.Context, ids []id.SceneID, operator *useca return s.sceneLockRepo.GetAllLock(ctx, ids) } -func (i *Scene) AddWidget(ctx context.Context, id id.SceneID, pid id.PluginID, eid id.PluginExtensionID, operator *usecase.Operator) (_ *scene.Scene, widget *scene.Widget, err error) { - +func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, eid id.PluginExtensionID, operator *usecase.Operator) (_ *scene.Scene, widget *scene.Widget, err error) { tx, err := i.transaction.Begin() if err != nil { return @@ -168,11 +171,11 @@ func (i *Scene) AddWidget(ctx context.Context, id id.SceneID, pid id.PluginID, e } // check scene lock - if err := i.CheckSceneLock(ctx, id); err != nil { + if err := i.CheckSceneLock(ctx, sid); err != nil { return nil, nil, err } - s, err := i.sceneRepo.FindByID(ctx, id, operator.WritableTeams) + s, err := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) if err != nil { return nil, nil, err } @@ -180,7 +183,7 @@ func (i *Scene) AddWidget(ctx context.Context, id id.SceneID, pid id.PluginID, e return nil, nil, err } - _, extension, err := i.getPlugin(ctx, pid, eid) + _, extension, err := i.getPlugin(ctx, sid, pid, eid) if err != nil { return nil, nil, err } @@ -188,12 +191,12 @@ func (i *Scene) AddWidget(ctx context.Context, id id.SceneID, pid id.PluginID, e return nil, nil, interfaces.ErrExtensionTypeMustBeWidget } - property, err := property.New().NewID().Schema(extension.Schema()).Scene(id).Build() + property, err := property.New().NewID().Schema(extension.Schema()).Scene(sid).Build() if err != nil { return nil, nil, err } - widget, err = scene.NewWidget(nil, pid, eid, property.ID(), true) + widget, err = scene.NewWidget(id.NewWidgetID(), pid, eid, property.ID(), true) if err != nil { return nil, nil, err } @@ -315,7 +318,6 @@ func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, pid id.PluginID } func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.PluginID, operator *usecase.Operator) (_ *scene.Scene, _ id.PluginID, _ *id.PropertyID, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -347,15 +349,15 @@ func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plugin return nil, pid, nil, interfaces.ErrPluginAlreadyInstalled } - plugin, err2 := i.pluginRepo.FindByID(ctx, pid) - if err2 != nil { + plugin, err := i.pluginRepo.FindByID(ctx, pid, []id.SceneID{sid}) + if err != nil { if errors.Is(err2, rerror.ErrNotFound) { - // - // Install Plugin - // return nil, pid, nil, interfaces.ErrPluginNotFound } - return nil, pid, nil, err2 + return nil, pid, nil, err + } + if psid := plugin.ID().Scene(); psid != nil && *psid != sid { + return nil, pid, nil, interfaces.ErrPluginNotFound } var p *property.Property @@ -374,14 +376,12 @@ func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plugin s.PluginSystem().Add(scene.NewPlugin(pid, propertyID)) if p != nil { - err2 = i.propertyRepo.Save(ctx, p) - if err2 != nil { + if err := i.propertyRepo.Save(ctx, p); err != nil { return nil, pid, nil, err2 } } - err2 = i.sceneRepo.Save(ctx, s) - if err2 != nil { + if err := i.sceneRepo.Save(ctx, s); err != nil { return nil, pid, nil, err2 } @@ -390,6 +390,9 @@ func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plugin } func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.PluginID, operator *usecase.Operator) (_ *scene.Scene, err error) { + if pid.System() { + return nil, rerror.ErrNotFound + } tx, err := i.transaction.Begin() if err != nil { @@ -413,6 +416,11 @@ func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plug return nil, err } + pl, err := i.pluginRepo.FindByID(ctx, pid, []id.SceneID{sid}) + if err != nil { + return nil, err + } + // check scene lock if err := i.CheckSceneLock(ctx, sid); err != nil { return nil, err @@ -432,96 +440,53 @@ func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plug ps.Remove(pid) // remove widgets - for _, w := range scene.WidgetSystem().Widgets() { - if w.Plugin().Equal(pid) { - scene.WidgetSystem().Remove(pid, w.Extension()) - removedProperties = append(removedProperties, w.Property()) - } - } + removedProperties = append(removedProperties, scene.WidgetSystem().RemoveAllByPlugin(pid)...) - // remove layers and infobox fields - modifiedLayers := layer.List{} - removedLayers := []id.LayerID{} - layers, err := i.layerRepo.FindByScene(ctx, sid) + // remove layers + res, err := layerops.Processor{ + LayerLoader: repo.LayerLoaderFrom(i.layerRepo, []id.SceneID{sid}), + RootLayerID: scene.RootLayer(), + }.UninstallPlugin(ctx, pid) if err != nil { return nil, err } - for _, l := range layers { - if l == nil { - continue - } - ll := *l - if p := ll.Plugin(); p != nil && pid.Equal(*p) { - removedLayers = append(removedLayers, ll.ID()) - if pp := ll.Property(); pp != nil { - removedProperties = append(removedProperties, *pp) - } - if ib := ll.Infobox(); ib != nil { - removedProperties = append(removedProperties, ib.Property()) - for _, f := range ib.Fields() { - removedProperties = append(removedProperties, f.Property()) - } - } - } else if ib := ll.Infobox(); ib != nil { - removedProperties = append(removedProperties, ib.Property()) - for _, f := range ib.Fields() { - removedProperties = append(removedProperties, f.Property()) - } - var ll2 layer.Layer = ll - modifiedLayers = append(modifiedLayers, &ll2) - } - } - for _, lg := range layers.ToLayerGroupList() { - modified := false - cancel := false - for _, lid := range removedLayers { - if lg.ID() == lid { - cancel = true - break - } - if lg.Layers().HasLayer(lid) { - lg.Layers().RemoveLayer(lid) - modified = true - } - } - if cancel { - continue - } - if modified { - already := false - for _, l := range modifiedLayers { - if l != nil && (*l).ID() == lg.ID() { - already = true - break - } - } - if already { - continue - } - var lg2 layer.Layer = lg - modifiedLayers = append(modifiedLayers, &lg2) + + removedProperties = append(removedProperties, res.RemovedProperties...) + + // save + if len(res.ModifiedLayers) > 0 { + if err := i.layerRepo.SaveAll(ctx, res.ModifiedLayers); err != nil { + return nil, err } } - if len(modifiedLayers) > 0 { - err = i.layerRepo.SaveAll(ctx, modifiedLayers) - if err != nil { + if res.RemovedLayers.LayerCount() > 0 { + if err := i.layerRepo.RemoveAll(ctx, res.RemovedLayers.Layers()); err != nil { return nil, err } } - if len(removedLayers) > 0 { - err = i.layerRepo.RemoveAll(ctx, removedLayers) - if err != nil { + + if len(removedProperties) > 0 { + if err := i.propertyRepo.RemoveAll(ctx, removedProperties); err != nil { return nil, err } } - err = i.sceneRepo.Save(ctx, scene) - if err != nil { + + if err := i.sceneRepo.Save(ctx, scene); err != nil { return nil, err } - if len(removedProperties) > 0 { - err = i.propertyRepo.RemoveAll(ctx, removedProperties) - if err != nil { + + // if the plugin is private, uninstall it + if psid := pid.Scene(); psid != nil && *psid == sid { + if err := i.pluginRepo.Remove(ctx, pl.ID()); err != nil { + return nil, err + } + if ps := pl.PropertySchemas(); len(ps) > 0 { + if err := i.propertySchemaRepo.RemoveAll(ctx, ps); err != nil { + return nil, err + } + } + if err := i.file.RemovePlugin(ctx, pl.ID()); err != nil { return nil, err } } @@ -531,7 +496,6 @@ func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plug } func (i *Scene) UpgradePlugin(ctx context.Context, sid id.SceneID, oldPluginID, newPluginID id.PluginID, operator *usecase.Operator) (_ *scene.Scene, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -591,8 +555,8 @@ func (i *Scene) UpgradePlugin(ctx context.Context, sid id.SceneID, oldPluginID, return result.Scene, err } -func (i *Scene) getPlugin(ctx context.Context, p id.PluginID, e id.PluginExtensionID) (*plugin.Plugin, *plugin.Extension, error) { - plugin, err2 := i.pluginRepo.FindByID(ctx, p) +func (i *Scene) getPlugin(ctx context.Context, sid id.SceneID, p id.PluginID, e id.PluginExtensionID) (*plugin.Plugin, *plugin.Extension, error) { + plugin, err2 := i.pluginRepo.FindByID(ctx, p, []id.SceneID{sid}) if err2 != nil { if errors.Is(err2, rerror.ErrNotFound) { return nil, nil, interfaces.ErrPluginNotFound diff --git a/internal/usecase/interfaces/plugin.go b/internal/usecase/interfaces/plugin.go index adf6618ae..ca0fe9054 100644 --- a/internal/usecase/interfaces/plugin.go +++ b/internal/usecase/interfaces/plugin.go @@ -4,18 +4,25 @@ import ( "context" "errors" "io" + "net/url" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/scene" ) var ( - ErrPluginAlreadyRegistered error = errors.New("plugin already registered") + ErrPluginAlreadyRegistered = errors.New("plugin already registered") + ErrInvalidPluginPackage = errors.New("invalid plugin package") + ErrCannotDeletePublicPlugin = errors.New("cannot delete public plugin") + ErrCannotDeleteUsedPlugin = errors.New("cannot delete plugin used by at least one scene") ) type Plugin interface { Fetch(context.Context, []id.PluginID, *usecase.Operator) ([]*plugin.Plugin, error) - Upload(context.Context, io.Reader, *usecase.Operator) (*plugin.Plugin, error) + Upload(context.Context, io.Reader, id.SceneID, *usecase.Operator) (*plugin.Plugin, *scene.Scene, error) + UploadFromRemote(context.Context, *url.URL, id.SceneID, *usecase.Operator) (*plugin.Plugin, *scene.Scene, error) + Delete(context.Context, id.PluginID, *usecase.Operator) error FetchPluginMetadata(context.Context, *usecase.Operator) ([]*plugin.Metadata, error) } diff --git a/internal/usecase/repo/plugin.go b/internal/usecase/repo/plugin.go index 052564c8f..c8a472a71 100644 --- a/internal/usecase/repo/plugin.go +++ b/internal/usecase/repo/plugin.go @@ -8,13 +8,14 @@ import ( ) type Plugin interface { - FindByID(context.Context, id.PluginID) (*plugin.Plugin, error) - FindByIDs(context.Context, []id.PluginID) ([]*plugin.Plugin, error) + FindByID(context.Context, id.PluginID, []id.SceneID) (*plugin.Plugin, error) + FindByIDs(context.Context, []id.PluginID, []id.SceneID) ([]*plugin.Plugin, error) Save(context.Context, *plugin.Plugin) error + Remove(context.Context, id.PluginID) error } func PluginLoaderFrom(r Plugin) plugin.Loader { - return func(ctx context.Context, ids ...id.PluginID) ([]*plugin.Plugin, error) { - return r.FindByIDs(ctx, ids) + return func(ctx context.Context, ids []id.PluginID, sids []id.SceneID) ([]*plugin.Plugin, error) { + return r.FindByIDs(ctx, ids, sids) } } diff --git a/internal/usecase/repo/property_schema.go b/internal/usecase/repo/property_schema.go index 878cc6b12..952eddda4 100644 --- a/internal/usecase/repo/property_schema.go +++ b/internal/usecase/repo/property_schema.go @@ -12,6 +12,8 @@ type PropertySchema interface { FindByIDs(context.Context, []id.PropertySchemaID) (property.SchemaList, error) Save(context.Context, *property.Schema) error SaveAll(context.Context, property.SchemaList) error + Remove(context.Context, id.PropertySchemaID) error + RemoveAll(context.Context, []id.PropertySchemaID) error } func PropertySchemaLoaderFrom(r PropertySchema) property.SchemaLoader { diff --git a/pkg/builtin/main.go b/pkg/builtin/main.go index 92aa7c9d4..513a23528 100644 --- a/pkg/builtin/main.go +++ b/pkg/builtin/main.go @@ -17,7 +17,7 @@ var pluginManifestJSON []byte var pluginManifestJSON_ja []byte var pluginTranslationList = map[string]*manifest.TranslationRoot{"ja": manifest.MustParseTranslationFromBytes(pluginManifestJSON_ja)} -var pluginManifest = manifest.MergeManifestTranslation(manifest.MustParseSystemFromBytes(pluginManifestJSON), pluginTranslationList) +var pluginManifest = manifest.MergeManifestTranslation(manifest.MustParseSystemFromBytes(pluginManifestJSON, nil), pluginTranslationList) // MUST NOT CHANGE var PropertySchemaIDVisualizerCesium = id.MustPropertySchemaID("reearth/cesium") diff --git a/pkg/builtin/main_test.go b/pkg/builtin/main_test.go index 816a5e0d7..0f8e06c9a 100644 --- a/pkg/builtin/main_test.go +++ b/pkg/builtin/main_test.go @@ -55,7 +55,7 @@ func TestGetPlugin(t *testing.T) { }, { name: "foo plugin", - pluginID: id.MustPluginID("foo#1.1.1"), + pluginID: id.MustPluginID("foo~1.1.1"), expectedNil: true, }, } @@ -86,7 +86,7 @@ func TestGetPropertySchema(t *testing.T) { }, { name: "unknown propertySchemaId", - psId: id.MustPropertySchemaID("xxx#1.1.1/aa"), + psId: id.MustPropertySchemaID("xxx~1.1.1/aa"), expectedNil: true, }, } diff --git a/pkg/file/file.go b/pkg/file/file.go index d087bf15f..890f9d5c4 100644 --- a/pkg/file/file.go +++ b/pkg/file/file.go @@ -1,28 +1,159 @@ +// Package file provides convenient helpers for files and abstractions of files package file import ( - "errors" "io" -) + "io/fs" + "strings" -var ( - // EOF _ - EOF error = errors.New("eof") + "github.com/spf13/afero" ) -// File _ +// File abstracts an abstract file type File struct { - Content io.Reader - Name string - Fullpath string - Size int64 + Content io.ReadCloser + Path string + Size int64 + // If the content type is not explicitly specified, ContenType will be an empty string. ContentType string } -// Archive is a file like tarball. -type Archive interface { - Name() string - Size() int64 +// Iterator is an iterator of files +type Iterator interface { + // Next returns the next File. If there is no next File, returns nil file and nil error Next() (*File, error) - Close() error +} + +// For debugging +type SimpleIterator struct { + c int + files []File +} + +func NewSimpleIterator(files []File) *SimpleIterator { + files2 := make([]File, len(files)) + copy(files2, files) + return &SimpleIterator{ + files: files2, + } +} + +func (s *SimpleIterator) Next() (*File, error) { + if len(s.files) <= s.c { + return nil, nil + } + n := s.files[s.c] + s.c++ + return &n, nil +} + +type PrefixIterator struct { + a Iterator + prefix string +} + +func NewPrefixIterator(a Iterator, prefix string) *PrefixIterator { + return &PrefixIterator{ + a: a, + prefix: prefix, + } +} + +func (s *PrefixIterator) Next() (*File, error) { + for { + n, err := s.a.Next() + if err != nil { + return nil, err + } + if n == nil { + return nil, nil + } + if s.prefix == "" { + return n, nil + } + if strings.HasPrefix(n.Path, s.prefix+"/") { + n2 := *n + n2.Path = strings.TrimPrefix(n2.Path, s.prefix+"/") + return &n2, nil + } + } +} + +type FilteredIterator struct { + a Iterator + skipper func(p string) bool +} + +func NewFilteredIterator(a Iterator, skipper func(p string) bool) *FilteredIterator { + return &FilteredIterator{ + a: a, + skipper: skipper, + } +} + +func (s *FilteredIterator) Next() (*File, error) { + for { + n, err := s.a.Next() + if err != nil { + return nil, err + } + if n == nil { + return nil, nil + } + if !s.skipper(n.Path) { + return n, nil + } + } +} + +type FsIterator struct { + fs afero.Fs + files []string + c int +} + +func NewFsIterator(afs afero.Fs) (*FsIterator, error) { + var files []string + var size int64 + + if err := afero.Walk(afs, "", func(path string, info fs.FileInfo, err error) error { + if info.IsDir() { + return nil + } + files = append(files, path) + size += info.Size() + return nil + }); err != nil { + return nil, err + } + + return &FsIterator{ + fs: afs, + files: files, + c: 0, + }, nil +} + +func (a *FsIterator) Next() (*File, error) { + if len(a.files) <= a.c { + return nil, nil + } + + next := a.files[a.c] + a.c++ + fi, err := a.fs.Open(next) + if err != nil { + return nil, err + } + + stat, err := fi.Stat() + if err != nil { + return nil, err + } + + return &File{ + Content: fi, + Path: next, + Size: stat.Size(), + }, nil } diff --git a/pkg/file/file_test.go b/pkg/file/file_test.go new file mode 100644 index 000000000..5d71c9ab9 --- /dev/null +++ b/pkg/file/file_test.go @@ -0,0 +1,196 @@ +package file + +import ( + "io" + "os" + "testing" + + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestReaders(t *testing.T) { + zf, err := os.Open("testdata/test.zip") + assert.NoError(t, err) + defer func() { + _ = zf.Close() + }() + zr, err := ZipReaderFrom(zf, 1024) + assert.NoError(t, err) + + tf, err := os.Open("testdata/test.tar.gz") + assert.NoError(t, err) + defer func() { + _ = tf.Close() + }() + tr, err := TarReaderFromTarGz(tf) + assert.NoError(t, err) + + files := map[string]string{ + "reearth.json": "{\n \"reearth\": \"Re:Earth\"\n}\n", + "index.js": "console.log(\"hello world\");\n", + "test/foo.bar": "test\n", + } + + testCases := []struct { + Name string + Archive Iterator + Files []string + }{ + { + Name: "zip", + Archive: zr, + Files: []string{"test/foo.bar", "index.js", "reearth.json"}, + }, + { + Name: "tar", + Archive: tr, + Files: []string{"test/foo.bar", "index.js", "reearth.json"}, + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + // tt.Parallel() cannot be used + assert := assert.New(tt) + + for i, f := range tc.Files { + n, err := tc.Archive.Next() + assert.NoError(err) + assert.Equal(f, n.Path, "file %d in %s", i, tc.Name) + assert.Equal(int64(len(files[f])), n.Size, "file %d in %s", i, tc.Name) + assert.Equal("", n.ContentType, "file %d in %s", i, tc.Name) + + fc, err := io.ReadAll(n.Content) + assert.NoError(err) + assert.Equal(files[f], string(fc)) + + assert.NoError(n.Content.Close()) + } + + n, err := tc.Archive.Next() + assert.Nil(err) + assert.Nil(n) + + n, err = tc.Archive.Next() + assert.Nil(err) + assert.Nil(n) + }) + } +} + +func TestSimpleIterator(t *testing.T) { + a := NewSimpleIterator([]File{{Path: "a"}, {Path: "b"}, {Path: "c"}}) + + n, err := a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "a"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "b"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "c"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Nil(t, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Nil(t, n) +} + +func TestPrefixIterator(t *testing.T) { + ba := NewSimpleIterator([]File{ + {Path: "a"}, {Path: "b"}, {Path: "c/d"}, {Path: "e"}, {Path: "f/g/h"}, {Path: "c/i/j"}, + }) + a := NewPrefixIterator(ba, "c") + + n, err := a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "d"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "i/j"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Nil(t, n) + + ba2 := NewSimpleIterator([]File{ + {Path: "a"}, {Path: "b"}, + }) + a2 := NewPrefixIterator(ba2, "") + + n2, err := a2.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "a"}, n2) + + n2, err = a2.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "b"}, n2) + + n2, err = a2.Next() + assert.NoError(t, err) + assert.Nil(t, n2) +} + +func TestFilteredIterator(t *testing.T) { + var paths []string + ba := NewSimpleIterator([]File{ + {Path: "0"}, {Path: "1"}, {Path: "2"}, + }) + a := NewFilteredIterator(ba, func(p string) bool { + paths = append(paths, p) + return p == "1" + }) + + n, err := a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "0"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Equal(t, &File{Path: "2"}, n) + + n, err = a.Next() + assert.NoError(t, err) + assert.Nil(t, n) + assert.Equal(t, []string{"0", "1", "2"}, paths) +} + +func TestFsIterator(t *testing.T) { + fs := afero.NewMemMapFs() + _ = fs.MkdirAll("a/b", 0755) + f, _ := fs.Create("b") + _, _ = f.WriteString("hello") + _ = f.Close() + _, _ = fs.Create("a/b/c") + + a, err := NewFsIterator(fs) + assert.NoError(t, err) + + n, err := a.Next() + assert.NoError(t, err) + assert.Equal(t, "a/b/c", n.Path) + nd, err := io.ReadAll(n.Content) + assert.NoError(t, err) + assert.Equal(t, []byte{}, nd) + assert.NoError(t, n.Content.Close()) + + n, err = a.Next() + assert.NoError(t, err) + assert.Equal(t, "b", n.Path) + nd, err = io.ReadAll(n.Content) + assert.NoError(t, err) + assert.Equal(t, "hello", string(nd)) + assert.NoError(t, n.Content.Close()) + + n, err = a.Next() + assert.NoError(t, err) + assert.Nil(t, n) +} diff --git a/pkg/file/targz.go b/pkg/file/targz.go new file mode 100644 index 000000000..e4be21db6 --- /dev/null +++ b/pkg/file/targz.go @@ -0,0 +1,45 @@ +package file + +import ( + "archive/tar" + "compress/gzip" + "errors" + "io" +) + +type TarReader struct { + tr *tar.Reader +} + +func NewTarReader(tr *tar.Reader) *TarReader { + return &TarReader{tr: tr} +} + +func TarReaderFromTarGz(r io.Reader) (*TarReader, error) { + gzipReader, err := gzip.NewReader(r) + if err != nil { + return nil, err + } + return &TarReader{tr: tar.NewReader(gzipReader)}, nil +} + +func (r *TarReader) Next() (*File, error) { + if r == nil || r.tr == nil { + return nil, nil + } + + h, err := r.tr.Next() + if errors.Is(err, io.EOF) { + return nil, nil + } + if err != nil { + return nil, err + } + + fi := h.FileInfo() + if fi.IsDir() { + return r.Next() + } + + return &File{Content: io.NopCloser(r.tr), Path: h.Name, Size: fi.Size()}, nil +} diff --git a/pkg/file/testdata/test.tar.gz b/pkg/file/testdata/test.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..c2f71e47209de06d73bb8530c9acbf90358b77c8 GIT binary patch literal 260 zcmV+f0sHPXe zcj^= zmocqkmJ&JXtm@0Y+}8Ie=9%|#tiOb`_rGUu?(-=WBAP=b{~hM^?2xI{lRgp+~!$!w;?r$Ag` zK@R3%=m(m9<$c~sDX8fnhZd!#CKi=s0L{=J|TZ>pX@66f>35*ab8Z5!67_KtYYweYgS>*|Zlx(@=vJXd;$i4e(}V11e=; O0YY1#*E~SpV*mgn2$9PG literal 0 HcmV?d00001 diff --git a/pkg/file/zip.go b/pkg/file/zip.go new file mode 100644 index 000000000..8a6a71b6c --- /dev/null +++ b/pkg/file/zip.go @@ -0,0 +1,87 @@ +package file + +import ( + "archive/zip" + "bytes" + "io" + "strings" +) + +type ZipReader struct { + zr *zip.Reader + i int +} + +func NewZipReader(zr *zip.Reader) *ZipReader { + return &ZipReader{zr: zr} +} + +func ZipReaderFrom(r io.Reader, n int64) (*ZipReader, error) { + b, err := io.ReadAll(io.LimitReader(r, n)) + if err != nil { + return nil, err + } + + zr, err := zip.NewReader(bytes.NewReader(b), int64(len(b))) + if err != nil { + return nil, err + } + + return NewZipReader(zr), nil +} + +func (r *ZipReader) Next() (*File, error) { + if r == nil || r.zr == nil { + return nil, nil + } + + if len(r.zr.File) <= r.i { + return nil, nil + } + + f := r.zr.File[r.i] + r.i++ + + fi := f.FileInfo() + if fi.IsDir() { + return r.Next() + } + + c, err := f.Open() + if err != nil { + return nil, err + } + + return &File{Content: c, Path: f.Name, Size: fi.Size()}, nil +} + +func MockZipReader(files []string) *zip.Reader { + b := new(bytes.Buffer) + w := zip.NewWriter(b) + for _, f := range files { + _, _ = w.Create(f) + } + _ = w.Close() + b2 := b.Bytes() + zr, _ := zip.NewReader(bytes.NewReader(b2), int64(len(b2))) + return zr +} + +func ZipBasePath(zr *zip.Reader) (b string) { + for _, f := range zr.File { + fp := strings.Split(f.Name, "/") + if len(fp) <= 1 { + // a file is existing in the root + return "" + } + // extract root directory name + if len(fp) == 2 && fp[1] == "" { + if b != "" { + // there are multiple directories on the root + return "" + } + b = fp[0] + } + } + return +} diff --git a/pkg/file/zip_test.go b/pkg/file/zip_test.go new file mode 100644 index 000000000..2c4e7fb77 --- /dev/null +++ b/pkg/file/zip_test.go @@ -0,0 +1,32 @@ +package file + +import ( + "io" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestMockZipReader(t *testing.T) { + z := MockZipReader([]string{"a", "b", "c/", "c/d"}) + assert.Equal(t, "a", z.File[0].Name) + assert.Equal(t, "b", z.File[1].Name) + assert.Equal(t, "c/", z.File[2].Name) + assert.Equal(t, "c/d", z.File[3].Name) + + for _, f := range []string{"a", "b", "c/d"} { + zf, err := z.Open(f) + assert.NoError(t, err) + b, err := io.ReadAll(zf) + assert.NoError(t, err) + assert.Equal(t, []byte{}, b) + assert.NoError(t, zf.Close()) + } +} + +func TestZipBasePath(t *testing.T) { + assert.Equal(t, "aaa", ZipBasePath(MockZipReader([]string{"aaa/", "aaa/a"}))) + assert.Equal(t, "", ZipBasePath(MockZipReader([]string{"aaa/", "aaa/a", "b"}))) + assert.Equal(t, "", ZipBasePath(MockZipReader([]string{"aaa"}))) + assert.Equal(t, "", ZipBasePath(MockZipReader([]string{"aaa/", "aaa/a", "b/", "b/c"}))) +} diff --git a/pkg/id/asset_gen.go b/pkg/id/asset_gen.go index 624d889e6..fa061e5c1 100644 --- a/pkg/id/asset_gen.go +++ b/pkg/id/asset_gen.go @@ -78,6 +78,16 @@ func (d AssetID) Ref() *AssetID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d AssetID) Contains(ids []AssetID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *AssetID) CopyRef() *AssetID { if d == nil { diff --git a/pkg/id/asset_gen_test.go b/pkg/id/asset_gen_test.go index 99392f3ae..14d5985b7 100644 --- a/pkg/id/asset_gen_test.go +++ b/pkg/id/asset_gen_test.go @@ -203,6 +203,13 @@ func TestAssetID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestAssetID_Contains(t *testing.T) { + id := NewAssetID() + id2 := NewAssetID() + assert.True(t, id.Contains([]AssetID{id, id2})) + assert.False(t, id.Contains([]AssetID{id2})) +} + func TestAssetID_CopyRef(t *testing.T) { id := New() subId := AssetIDFromRefID(&id) @@ -612,7 +619,7 @@ func TestAssetIDSet_Add(t *testing.T) { name: "1 element", input: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestAssetIDSet_Add(t *testing.T) { }, expected: &AssetIDSet{ m: map[AssetID]struct{}{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []AssetID{ MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestAssetIDSet_Add(t *testing.T) { }, expected: &AssetIDSet{ m: map[AssetID]struct{}{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []AssetID{ MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestAssetIDSet_AddRef(t *testing.T) { name: "1 element", input: &AssetId, expected: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestAssetIDSet_Has(t *testing.T) { AssetIDSet AssetID }{AssetIDSet: AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, AssetID: MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestAssetIDSet_Has(t *testing.T) { AssetIDSet AssetID }{AssetIDSet: AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, AssetID: MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestAssetIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: AssetIDSet{ @@ -820,7 +827,7 @@ func TestAssetIDSet_All(t *testing.T) { { name: "1 element", input: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestAssetIDSet_All(t *testing.T) { name: "multiple elements", input: &AssetIDSet{ m: map[AssetID]struct{}{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []AssetID{ MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestAssetIDSet_Clone(t *testing.T) { { name: "1 element", input: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestAssetIDSet_Clone(t *testing.T) { name: "multiple elements", input: &AssetIDSet{ m: map[AssetID]struct{}{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []AssetID{ MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestAssetIDSet_Clone(t *testing.T) { }, expected: &AssetIDSet{ m: map[AssetID]struct{}{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []AssetID{ MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestAssetIDSet_Merge(t *testing.T) { b *AssetIDSet }{ a: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &AssetIDSet{}, }, expected: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestAssetIDSet_Merge(t *testing.T) { b *AssetIDSet }{ a: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &AssetIDSet{ m: map[AssetID]struct{}{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []AssetID{ MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/dataset_gen.go b/pkg/id/dataset_gen.go index b6fe12c24..1e76621b5 100644 --- a/pkg/id/dataset_gen.go +++ b/pkg/id/dataset_gen.go @@ -78,6 +78,16 @@ func (d DatasetID) Ref() *DatasetID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d DatasetID) Contains(ids []DatasetID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *DatasetID) CopyRef() *DatasetID { if d == nil { diff --git a/pkg/id/dataset_gen_test.go b/pkg/id/dataset_gen_test.go index e5a29f7f6..48c1c8251 100644 --- a/pkg/id/dataset_gen_test.go +++ b/pkg/id/dataset_gen_test.go @@ -203,6 +203,13 @@ func TestDatasetID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestDatasetID_Contains(t *testing.T) { + id := NewDatasetID() + id2 := NewDatasetID() + assert.True(t, id.Contains([]DatasetID{id, id2})) + assert.False(t, id.Contains([]DatasetID{id2})) +} + func TestDatasetID_CopyRef(t *testing.T) { id := New() subId := DatasetIDFromRefID(&id) @@ -612,7 +619,7 @@ func TestDatasetIDSet_Add(t *testing.T) { name: "1 element", input: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestDatasetIDSet_Add(t *testing.T) { }, expected: &DatasetIDSet{ m: map[DatasetID]struct{}{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetID{ MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestDatasetIDSet_Add(t *testing.T) { }, expected: &DatasetIDSet{ m: map[DatasetID]struct{}{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetID{ MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestDatasetIDSet_AddRef(t *testing.T) { name: "1 element", input: &DatasetId, expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestDatasetIDSet_Has(t *testing.T) { DatasetIDSet DatasetID }{DatasetIDSet: DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, DatasetID: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestDatasetIDSet_Has(t *testing.T) { DatasetIDSet DatasetID }{DatasetIDSet: DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, DatasetID: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestDatasetIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: DatasetIDSet{ @@ -820,7 +827,7 @@ func TestDatasetIDSet_All(t *testing.T) { { name: "1 element", input: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestDatasetIDSet_All(t *testing.T) { name: "multiple elements", input: &DatasetIDSet{ m: map[DatasetID]struct{}{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetID{ MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestDatasetIDSet_Clone(t *testing.T) { { name: "1 element", input: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestDatasetIDSet_Clone(t *testing.T) { name: "multiple elements", input: &DatasetIDSet{ m: map[DatasetID]struct{}{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetID{ MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestDatasetIDSet_Clone(t *testing.T) { }, expected: &DatasetIDSet{ m: map[DatasetID]struct{}{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetID{ MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestDatasetIDSet_Merge(t *testing.T) { b *DatasetIDSet }{ a: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &DatasetIDSet{}, }, expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestDatasetIDSet_Merge(t *testing.T) { b *DatasetIDSet }{ a: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &DatasetIDSet{ m: map[DatasetID]struct{}{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []DatasetID{ MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/dataset_schema_field_gen.go b/pkg/id/dataset_schema_field_gen.go index e73a28ca1..b26c073e0 100644 --- a/pkg/id/dataset_schema_field_gen.go +++ b/pkg/id/dataset_schema_field_gen.go @@ -78,6 +78,16 @@ func (d DatasetSchemaFieldID) Ref() *DatasetSchemaFieldID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d DatasetSchemaFieldID) Contains(ids []DatasetSchemaFieldID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *DatasetSchemaFieldID) CopyRef() *DatasetSchemaFieldID { if d == nil { diff --git a/pkg/id/dataset_schema_field_gen_test.go b/pkg/id/dataset_schema_field_gen_test.go index b68460794..bd1926f1f 100644 --- a/pkg/id/dataset_schema_field_gen_test.go +++ b/pkg/id/dataset_schema_field_gen_test.go @@ -203,6 +203,13 @@ func TestDatasetSchemaFieldID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestDatasetSchemaFieldID_Contains(t *testing.T) { + id := NewDatasetSchemaFieldID() + id2 := NewDatasetSchemaFieldID() + assert.True(t, id.Contains([]DatasetSchemaFieldID{id, id2})) + assert.False(t, id.Contains([]DatasetSchemaFieldID{id2})) +} + func TestDatasetSchemaFieldID_CopyRef(t *testing.T) { id := New() subId := DatasetSchemaFieldIDFromRefID(&id) @@ -612,7 +619,7 @@ func TestDatasetSchemaFieldIDSet_Add(t *testing.T) { name: "1 element", input: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestDatasetSchemaFieldIDSet_Add(t *testing.T) { }, expected: &DatasetSchemaFieldIDSet{ m: map[DatasetSchemaFieldID]struct{}{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetSchemaFieldID{ MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestDatasetSchemaFieldIDSet_Add(t *testing.T) { }, expected: &DatasetSchemaFieldIDSet{ m: map[DatasetSchemaFieldID]struct{}{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetSchemaFieldID{ MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestDatasetSchemaFieldIDSet_AddRef(t *testing.T) { name: "1 element", input: &DatasetSchemaFieldId, expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestDatasetSchemaFieldIDSet_Has(t *testing.T) { DatasetSchemaFieldIDSet DatasetSchemaFieldID }{DatasetSchemaFieldIDSet: DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, DatasetSchemaFieldID: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestDatasetSchemaFieldIDSet_Has(t *testing.T) { DatasetSchemaFieldIDSet DatasetSchemaFieldID }{DatasetSchemaFieldIDSet: DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, DatasetSchemaFieldID: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestDatasetSchemaFieldIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: DatasetSchemaFieldIDSet{ @@ -820,7 +827,7 @@ func TestDatasetSchemaFieldIDSet_All(t *testing.T) { { name: "1 element", input: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestDatasetSchemaFieldIDSet_All(t *testing.T) { name: "multiple elements", input: &DatasetSchemaFieldIDSet{ m: map[DatasetSchemaFieldID]struct{}{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetSchemaFieldID{ MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestDatasetSchemaFieldIDSet_Clone(t *testing.T) { { name: "1 element", input: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestDatasetSchemaFieldIDSet_Clone(t *testing.T) { name: "multiple elements", input: &DatasetSchemaFieldIDSet{ m: map[DatasetSchemaFieldID]struct{}{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetSchemaFieldID{ MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestDatasetSchemaFieldIDSet_Clone(t *testing.T) { }, expected: &DatasetSchemaFieldIDSet{ m: map[DatasetSchemaFieldID]struct{}{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetSchemaFieldID{ MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestDatasetSchemaFieldIDSet_Merge(t *testing.T) { b *DatasetSchemaFieldIDSet }{ a: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &DatasetSchemaFieldIDSet{}, }, expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestDatasetSchemaFieldIDSet_Merge(t *testing.T) { b *DatasetSchemaFieldIDSet }{ a: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &DatasetSchemaFieldIDSet{ m: map[DatasetSchemaFieldID]struct{}{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []DatasetSchemaFieldID{ MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/dataset_schema_gen.go b/pkg/id/dataset_schema_gen.go index a9d0b132a..2c47eeb79 100644 --- a/pkg/id/dataset_schema_gen.go +++ b/pkg/id/dataset_schema_gen.go @@ -78,6 +78,16 @@ func (d DatasetSchemaID) Ref() *DatasetSchemaID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d DatasetSchemaID) Contains(ids []DatasetSchemaID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *DatasetSchemaID) CopyRef() *DatasetSchemaID { if d == nil { diff --git a/pkg/id/dataset_schema_gen_test.go b/pkg/id/dataset_schema_gen_test.go index 9320f2b14..c3b8910b2 100644 --- a/pkg/id/dataset_schema_gen_test.go +++ b/pkg/id/dataset_schema_gen_test.go @@ -203,6 +203,13 @@ func TestDatasetSchemaID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestDatasetSchemaID_Contains(t *testing.T) { + id := NewDatasetSchemaID() + id2 := NewDatasetSchemaID() + assert.True(t, id.Contains([]DatasetSchemaID{id, id2})) + assert.False(t, id.Contains([]DatasetSchemaID{id2})) +} + func TestDatasetSchemaID_CopyRef(t *testing.T) { id := New() subId := DatasetSchemaIDFromRefID(&id) @@ -612,7 +619,7 @@ func TestDatasetSchemaIDSet_Add(t *testing.T) { name: "1 element", input: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestDatasetSchemaIDSet_Add(t *testing.T) { }, expected: &DatasetSchemaIDSet{ m: map[DatasetSchemaID]struct{}{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetSchemaID{ MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestDatasetSchemaIDSet_Add(t *testing.T) { }, expected: &DatasetSchemaIDSet{ m: map[DatasetSchemaID]struct{}{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetSchemaID{ MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestDatasetSchemaIDSet_AddRef(t *testing.T) { name: "1 element", input: &DatasetSchemaId, expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestDatasetSchemaIDSet_Has(t *testing.T) { DatasetSchemaIDSet DatasetSchemaID }{DatasetSchemaIDSet: DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, }, DatasetSchemaID: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestDatasetSchemaIDSet_Has(t *testing.T) { DatasetSchemaIDSet DatasetSchemaID }{DatasetSchemaIDSet: DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, }, DatasetSchemaID: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestDatasetSchemaIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: DatasetSchemaIDSet{ @@ -820,7 +827,7 @@ func TestDatasetSchemaIDSet_All(t *testing.T) { { name: "1 element", input: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestDatasetSchemaIDSet_All(t *testing.T) { name: "multiple elements", input: &DatasetSchemaIDSet{ m: map[DatasetSchemaID]struct{}{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetSchemaID{ MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestDatasetSchemaIDSet_Clone(t *testing.T) { { name: "1 element", input: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestDatasetSchemaIDSet_Clone(t *testing.T) { name: "multiple elements", input: &DatasetSchemaIDSet{ m: map[DatasetSchemaID]struct{}{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetSchemaID{ MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestDatasetSchemaIDSet_Clone(t *testing.T) { }, expected: &DatasetSchemaIDSet{ m: map[DatasetSchemaID]struct{}{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []DatasetSchemaID{ MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestDatasetSchemaIDSet_Merge(t *testing.T) { b *DatasetSchemaIDSet }{ a: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &DatasetSchemaIDSet{}, }, expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestDatasetSchemaIDSet_Merge(t *testing.T) { b *DatasetSchemaIDSet }{ a: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &DatasetSchemaIDSet{ m: map[DatasetSchemaID]struct{}{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []DatasetSchemaID{ MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/id.tmpl b/pkg/id/id.tmpl index 402f4d405..aad5207dd 100644 --- a/pkg/id/id.tmpl +++ b/pkg/id/id.tmpl @@ -78,6 +78,16 @@ func (d {{$name}}ID) Ref() *{{$name}}ID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d {{$name}}ID) Contains(ids []{{$name}}ID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *{{$name}}ID) CopyRef() *{{$name}}ID { if d == nil { diff --git a/pkg/id/id_test.tmpl b/pkg/id/id_test.tmpl index 98b41c969..c8f57ec65 100644 --- a/pkg/id/id_test.tmpl +++ b/pkg/id/id_test.tmpl @@ -205,6 +205,13 @@ func Test{{$name}}ID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func Test{{$name}}ID_Contains(t *testing.T) { + id := New{{$name}}ID() + id2 := New{{$name}}ID() + assert.True(t, id.Contains([]{{$name}}ID{id, id2})) + assert.False(t, id.Contains([]{{$name}}ID{id2})) +} + func Test{{$name}}ID_CopyRef(t *testing.T) { id := New() subId := {{$name}}IDFromRefID(&id) @@ -614,7 +621,7 @@ func Test{{$name}}IDSet_Add(t *testing.T) { name: "1 element", input: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -627,9 +634,9 @@ func Test{{$name}}IDSet_Add(t *testing.T) { }, expected: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []{{$name}}ID{ Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -647,8 +654,8 @@ func Test{{$name}}IDSet_Add(t *testing.T) { }, expected: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []{{$name}}ID{ Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -692,7 +699,7 @@ func Test{{$name}}IDSet_AddRef(t *testing.T) { name: "1 element", input: &{{$name}}Id, expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -735,7 +742,7 @@ func Test{{$name}}IDSet_Has(t *testing.T) { {{$name}}IDSet {{$name}}ID }{ {{$name}}IDSet: {{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, }, {{$name}}ID: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -746,7 +753,7 @@ func Test{{$name}}IDSet_Has(t *testing.T) { {{$name}}IDSet {{$name}}ID }{ {{$name}}IDSet: {{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, }, {{$name}}ID: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -781,7 +788,7 @@ func Test{{$name}}IDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: {{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: {{$name}}IDSet{ @@ -822,7 +829,7 @@ func Test{{$name}}IDSet_All(t *testing.T) { { name: "1 element", input: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -831,9 +838,9 @@ func Test{{$name}}IDSet_All(t *testing.T) { name: "multiple elements", input: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []{{$name}}ID{ Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -880,11 +887,11 @@ func Test{{$name}}IDSet_Clone(t *testing.T) { { name: "1 element", input: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -892,9 +899,9 @@ func Test{{$name}}IDSet_Clone(t *testing.T) { name: "multiple elements", input: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []{{$name}}ID{ Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -904,9 +911,9 @@ func Test{{$name}}IDSet_Clone(t *testing.T) { }, expected: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []{{$name}}ID{ Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -957,13 +964,13 @@ func Test{{$name}}IDSet_Merge(t *testing.T) { b *{{$name}}IDSet }{ a: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &{{$name}}IDSet{}, }, expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -974,18 +981,18 @@ func Test{{$name}}IDSet_Merge(t *testing.T) { b *{{$name}}IDSet }{ a: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []{{$name}}ID{ Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/infobox_field_gen.go b/pkg/id/infobox_field_gen.go index 189646e2e..40758876a 100644 --- a/pkg/id/infobox_field_gen.go +++ b/pkg/id/infobox_field_gen.go @@ -78,6 +78,16 @@ func (d InfoboxFieldID) Ref() *InfoboxFieldID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d InfoboxFieldID) Contains(ids []InfoboxFieldID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *InfoboxFieldID) CopyRef() *InfoboxFieldID { if d == nil { diff --git a/pkg/id/infobox_field_gen_test.go b/pkg/id/infobox_field_gen_test.go index 89b02356b..8c1fd0748 100644 --- a/pkg/id/infobox_field_gen_test.go +++ b/pkg/id/infobox_field_gen_test.go @@ -203,6 +203,13 @@ func TestInfoboxFieldID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestInfoboxFieldID_Contains(t *testing.T) { + id := NewInfoboxFieldID() + id2 := NewInfoboxFieldID() + assert.True(t, id.Contains([]InfoboxFieldID{id, id2})) + assert.False(t, id.Contains([]InfoboxFieldID{id2})) +} + func TestInfoboxFieldID_CopyRef(t *testing.T) { id := New() subId := InfoboxFieldIDFromRefID(&id) @@ -612,7 +619,7 @@ func TestInfoboxFieldIDSet_Add(t *testing.T) { name: "1 element", input: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestInfoboxFieldIDSet_Add(t *testing.T) { }, expected: &InfoboxFieldIDSet{ m: map[InfoboxFieldID]struct{}{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []InfoboxFieldID{ MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestInfoboxFieldIDSet_Add(t *testing.T) { }, expected: &InfoboxFieldIDSet{ m: map[InfoboxFieldID]struct{}{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []InfoboxFieldID{ MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestInfoboxFieldIDSet_AddRef(t *testing.T) { name: "1 element", input: &InfoboxFieldId, expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestInfoboxFieldIDSet_Has(t *testing.T) { InfoboxFieldIDSet InfoboxFieldID }{InfoboxFieldIDSet: InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, InfoboxFieldID: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestInfoboxFieldIDSet_Has(t *testing.T) { InfoboxFieldIDSet InfoboxFieldID }{InfoboxFieldIDSet: InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, InfoboxFieldID: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestInfoboxFieldIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: InfoboxFieldIDSet{ @@ -820,7 +827,7 @@ func TestInfoboxFieldIDSet_All(t *testing.T) { { name: "1 element", input: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestInfoboxFieldIDSet_All(t *testing.T) { name: "multiple elements", input: &InfoboxFieldIDSet{ m: map[InfoboxFieldID]struct{}{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []InfoboxFieldID{ MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestInfoboxFieldIDSet_Clone(t *testing.T) { { name: "1 element", input: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestInfoboxFieldIDSet_Clone(t *testing.T) { name: "multiple elements", input: &InfoboxFieldIDSet{ m: map[InfoboxFieldID]struct{}{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []InfoboxFieldID{ MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestInfoboxFieldIDSet_Clone(t *testing.T) { }, expected: &InfoboxFieldIDSet{ m: map[InfoboxFieldID]struct{}{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []InfoboxFieldID{ MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestInfoboxFieldIDSet_Merge(t *testing.T) { b *InfoboxFieldIDSet }{ a: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &InfoboxFieldIDSet{}, }, expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestInfoboxFieldIDSet_Merge(t *testing.T) { b *InfoboxFieldIDSet }{ a: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &InfoboxFieldIDSet{ m: map[InfoboxFieldID]struct{}{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []InfoboxFieldID{ MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/layer_gen.go b/pkg/id/layer_gen.go index 0d10e9eb9..d49abdcff 100644 --- a/pkg/id/layer_gen.go +++ b/pkg/id/layer_gen.go @@ -78,6 +78,16 @@ func (d LayerID) Ref() *LayerID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d LayerID) Contains(ids []LayerID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *LayerID) CopyRef() *LayerID { if d == nil { diff --git a/pkg/id/layer_gen_test.go b/pkg/id/layer_gen_test.go index a22ad38d0..bfc9218e3 100644 --- a/pkg/id/layer_gen_test.go +++ b/pkg/id/layer_gen_test.go @@ -203,6 +203,13 @@ func TestLayerID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestLayerID_Contains(t *testing.T) { + id := NewLayerID() + id2 := NewLayerID() + assert.True(t, id.Contains([]LayerID{id, id2})) + assert.False(t, id.Contains([]LayerID{id2})) +} + func TestLayerID_CopyRef(t *testing.T) { id := New() subId := LayerIDFromRefID(&id) @@ -612,7 +619,7 @@ func TestLayerIDSet_Add(t *testing.T) { name: "1 element", input: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestLayerIDSet_Add(t *testing.T) { }, expected: &LayerIDSet{ m: map[LayerID]struct{}{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []LayerID{ MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestLayerIDSet_Add(t *testing.T) { }, expected: &LayerIDSet{ m: map[LayerID]struct{}{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []LayerID{ MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestLayerIDSet_AddRef(t *testing.T) { name: "1 element", input: &LayerId, expected: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestLayerIDSet_Has(t *testing.T) { LayerIDSet LayerID }{LayerIDSet: LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, }, LayerID: MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestLayerIDSet_Has(t *testing.T) { LayerIDSet LayerID }{LayerIDSet: LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, }, LayerID: MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestLayerIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: LayerIDSet{ @@ -820,7 +827,7 @@ func TestLayerIDSet_All(t *testing.T) { { name: "1 element", input: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestLayerIDSet_All(t *testing.T) { name: "multiple elements", input: &LayerIDSet{ m: map[LayerID]struct{}{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []LayerID{ MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestLayerIDSet_Clone(t *testing.T) { { name: "1 element", input: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestLayerIDSet_Clone(t *testing.T) { name: "multiple elements", input: &LayerIDSet{ m: map[LayerID]struct{}{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []LayerID{ MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestLayerIDSet_Clone(t *testing.T) { }, expected: &LayerIDSet{ m: map[LayerID]struct{}{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []LayerID{ MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestLayerIDSet_Merge(t *testing.T) { b *LayerIDSet }{ a: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &LayerIDSet{}, }, expected: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestLayerIDSet_Merge(t *testing.T) { b *LayerIDSet }{ a: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &LayerIDSet{ m: map[LayerID]struct{}{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []LayerID{ MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/plugin.go b/pkg/id/plugin.go index 5755f0cf3..c6a6ac8d1 100644 --- a/pkg/id/plugin.go +++ b/pkg/id/plugin.go @@ -7,20 +7,24 @@ import ( "github.com/blang/semver" ) -// MUST NOT CHANGE -const officialPluginIDStr = "reearth" - -// OfficialPluginID _ -var OfficialPluginID = PluginID{name: officialPluginIDStr, sys: true} - // PluginID is an ID for Plugin. type PluginID struct { name string version string sys bool + scene *SceneID } -var pluginNameRe = regexp.MustCompile("^[a-zA-Z0-9._-]+$") +// MUST NOT CHANGE +const ( + officialPluginIDStr = "reearth" + sepPluginID = "~" +) + +var ( + OfficialPluginID = PluginID{name: officialPluginIDStr, sys: true} + pluginNameRe = regexp.MustCompile("^[a-zA-Z0-9_-]+$") +) func validatePluginName(s string) bool { if len(s) == 0 || len(s) > 100 || s == "reearth" || strings.Contains(s, "/") { @@ -29,6 +33,27 @@ func validatePluginName(s string) bool { return pluginNameRe.MatchString(s) } +func NewPluginID(name string, version string, scene *SceneID) (PluginID, error) { + if name == officialPluginIDStr { + // official plugin + return PluginID{name: name, sys: true}, nil + } + + if !validatePluginName(name) { + return PluginID{}, ErrInvalidID + } + + if _, err := semver.Parse(version); err != nil { + return PluginID{}, ErrInvalidID + } + + return PluginID{ + name: name, + version: version, + scene: scene.CopyRef(), + }, nil +} + // PluginIDFrom generates a new id.PluginID from a string. func PluginIDFrom(id string) (PluginID, error) { if id == officialPluginIDStr { @@ -36,15 +61,27 @@ func PluginIDFrom(id string) (PluginID, error) { return PluginID{name: id, sys: true}, nil } - ids := strings.Split(id, "#") - if len(ids) != 2 || !validatePluginName(ids[0]) { - return PluginID{}, ErrInvalidID - } - v, err2 := semver.Parse(ids[1]) - if err2 != nil { + var name, version string + var sceneID *SceneID + + ids := strings.SplitN(id, sepPluginID, 3) + switch len(ids) { + case 2: + name = ids[0] + version = ids[1] + case 3: + sceneID2, err := SceneIDFrom(ids[0]) + if err != nil { + return PluginID{}, ErrInvalidID + } + sceneID = &sceneID2 + name = ids[1] + version = ids[2] + default: return PluginID{}, ErrInvalidID } - return PluginID{name: ids[0], version: v.String()}, nil + + return NewPluginID(name, version, sceneID) } // MustPluginID generates a new id.PluginID from a string, but panics if the string cannot be parsed. @@ -90,6 +127,11 @@ func (d PluginID) System() bool { return d.sys } +// Scene returns a scene ID of the plugin. It indicates this plugin is private and available for only the specific scene. +func (d PluginID) Scene() *SceneID { + return d.scene.CopyRef() +} + // Validate returns true if id is valid. func (d PluginID) Validate() bool { if d.sys { @@ -99,11 +141,15 @@ func (d PluginID) Validate() bool { } // String returns a string representation. -func (d PluginID) String() string { +func (d PluginID) String() (s string) { if d.sys { return d.name } - return d.name + "#" + d.version + if d.scene != nil { + s = d.scene.String() + sepPluginID + } + s += d.name + sepPluginID + d.version + return } // Ref returns a reference. @@ -132,6 +178,14 @@ func (d *PluginID) StringRef() *string { // Equal returns if two IDs are quivarent. func (d PluginID) Equal(d2 PluginID) bool { + if d.sys { + return d2.sys + } + if d.scene != nil { + if d2.scene == nil || *d.scene != *d2.scene { + return false + } + } return d.name == d2.name && d.version == d2.version } diff --git a/pkg/id/plugin_test.go b/pkg/id/plugin_test.go index 2691aefc0..49a8b937c 100644 --- a/pkg/id/plugin_test.go +++ b/pkg/id/plugin_test.go @@ -22,7 +22,7 @@ func TestPluginIDValidator(t *testing.T) { }{ { name: "accepted name", - input: "1cc.1_c-d", + input: "1cc1_c-d", expected: true, }, { @@ -75,86 +75,155 @@ func TestPluginIDValidator(t *testing.T) { } } +func TestNewPluginID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + pluginName string + version string + scene *SceneID + expected PluginID + expectedError bool + }{ + { + name: "success:accepted name", + pluginName: "1ccc1_c-d", + version: "1.0.0", + scene: nil, + expected: PluginID{ + name: "1ccc1_c-d", + version: "1.0.0", + sys: false, + scene: nil, + }, + }, + { + name: "success:with scene id", + pluginName: "aaaaa", + version: "0.1.0", + scene: MustSceneID("01fbpdqax0ttrftj3gb5gm4rw7").Ref(), + expected: PluginID{ + name: "aaaaa", + version: "0.1.0", + sys: false, + scene: MustSceneID("01fbpdqax0ttrftj3gb5gm4rw7").Ref(), + }, + }, + { + name: "success:official plugin id", + pluginName: officialPluginIDStr, + expected: PluginID{ + name: officialPluginIDStr, + version: "", + sys: true, + scene: nil, + }, + }, + { + name: "fail:invalid name1", + pluginName: "1cc1_c-d", + version: "", + scene: nil, + expectedError: true, + }, + { + name: "fail:invalid name2", + pluginName: "1cc1_c-d/?s", + version: "1.0.0", + scene: nil, + expectedError: true, + }, + { + name: "fail:invalid name3", + pluginName: "1cc1_c-d/?s", + version: "_1", + scene: nil, + expectedError: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := NewPluginID(tc.pluginName, tc.version, tc.scene) + if tc.expectedError { + assert.Error(tt, err) + } else { + assert.Equal(tt, tc.expected, result) + } + }) + } +} + func TestPluginIDFrom(t *testing.T) { t.Parallel() testCases := []struct { - name string - input string - expected struct { - err error - result PluginID - } + name string + input string + expected PluginID + expectedError bool }{ { name: "success:accepted name", - input: "1cc.1_c-d#1.0.0", - expected: struct { - err error - result PluginID - }{ - err: nil, - result: PluginID{ - name: "1cc.1_c-d", - version: "1.0.0", - sys: false, - }, + input: "1cc1_c-d~1.0.0", + expected: PluginID{ + name: "1cc1_c-d", + version: "1.0.0", + sys: false, + scene: nil, + }, + }, + { + name: "success:with scene id", + input: "01fbpdqax0ttrftj3gb5gm4rw7~aaaaa~0.1.0", + expected: PluginID{ + name: "aaaaa", + version: "0.1.0", + sys: false, + scene: MustSceneID("01fbpdqax0ttrftj3gb5gm4rw7").Ref(), }, }, { name: "success:official plugin id", input: officialPluginIDStr, - expected: struct { - err error - result PluginID - }{ - err: nil, - result: PluginID{ - name: officialPluginIDStr, - version: "", - sys: true, - }, + expected: PluginID{ + name: officialPluginIDStr, + version: "", + sys: true, + scene: nil, }, }, { - name: "fail:not valid name", - input: "1cc.1_c-d", - expected: struct { - err error - result PluginID - }{ - err: ErrInvalidID, - result: PluginID{}, - }, + name: "fail:invalid name1", + input: "1cc1_c-d", + expectedError: true, }, { - name: "fail:not valid name", - input: "1cc.1_c-d/?s#1.0.0", - expected: struct { - err error - result PluginID - }{ - err: ErrInvalidID, - result: PluginID{}, - }, + name: "fail:invalid name2", + input: "1cc1_c-d/?s~1.0.0", + expectedError: true, }, { - name: "fail:not valid name", - input: "1cc.1_c-d/?s#1", - expected: struct { - err error - result PluginID - }{ - err: ErrInvalidID, - result: PluginID{}, - }, + name: "fail:invalid name3", + input: "1cc1_c-d/?s~1", + expectedError: true, + }, + { + name: "fail:invalid scene id", + input: "xxxx~ssss~1.0.0", + expectedError: true, }, } for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - result, _ := PluginIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) + result, err := PluginIDFrom(tc.input) + if tc.expectedError { + assert.Error(tt, err) + } else { + assert.Equal(tt, tc.expected, result) + } }) } } @@ -162,73 +231,52 @@ func TestPluginIDFrom(t *testing.T) { func TestMustPluginID(t *testing.T) { t.Parallel() testCases := []struct { - name string - input string - expected struct { - err error - result PluginID - } + name string + input string + expected PluginID + expectedError bool }{ { name: "success:accepted name", - input: "1cc.1_c-d#1.0.0", - expected: struct { - err error - result PluginID - }{ - err: nil, - result: PluginID{ - name: "1cc.1_c-d", - version: "1.0.0", - sys: false, - }, + input: "1cc1_c-d~1.0.0", + expected: PluginID{ + name: "1cc1_c-d", + version: "1.0.0", + sys: false, }, }, { - name: "fail:not valid name", - input: "1cc.1_c-d", - expected: struct { - err error - result PluginID - }{ - err: ErrInvalidID, - result: PluginID{}, - }, + name: "fail:invalid name", + input: "1cc.1_c-d", + expectedError: true, }, { - name: "fail:not valid name", - input: "1cc.1_c-d/?s#1.0.0", - expected: struct { - err error - result PluginID - }{ - err: ErrInvalidID, - result: PluginID{}, - }, + name: "fail:invalid name2", + input: "1cc.1_c-d/?s~1.0.0", + expectedError: true, }, { - name: "fail:not valid name", - input: "1cc.1_c-d/?s#1", - expected: struct { - err error - result PluginID - }{ - err: ErrInvalidID, - result: PluginID{}, - }, + name: "fail:invalid name3", + input: "1cc.1_c-d/?s~1", + expectedError: true, + }, + { + name: "fail:invalid scene id", + input: "xxxx~ssss~1.0.0", + expectedError: true, }, } for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - if tc.expected.err != nil { + if tc.expectedError { assert.Panics(tt, func() { _ = MustPluginID(tc.input) }) } else { result := MustPluginID(tc.input) - assert.Equal(tt, tc.expected.result, result) + assert.Equal(tt, tc.expected, result) } }) } @@ -239,89 +287,72 @@ func TestPluginIDFromRef(t *testing.T) { testCases := []struct { name string input string - expected struct { - err error - result PluginID - } + expected *PluginID }{ { name: "success:accepted name", - input: "1cc.1_c-d#1.0.0", - expected: struct { - err error - result PluginID - }{ - err: nil, - result: PluginID{ - name: "1cc.1_c-d", - version: "1.0.0", - sys: false, - }, + input: "1cc1_c-d~1.0.0", + expected: &PluginID{ + name: "1cc1_c-d", + version: "1.0.0", + sys: false, }, }, { - name: "fail:not valid name", - input: "1cc.1_c-d", - expected: struct { - err error - result PluginID - }{ - err: ErrInvalidID, - result: PluginID{}, - }, + name: "fail:invalid name1", + input: "1cc1_c-d", }, { - name: "fail:not valid name", - input: "1cc.1_c-d/?s#1.0.0", - expected: struct { - err error - result PluginID - }{ - err: ErrInvalidID, - result: PluginID{}, - }, + name: "fail:invalid name2", + input: "1cc1_c-d/?s~1.0.0", }, { - name: "fail:not valid name", - input: "1cc.1_c-d/?s#1", - expected: struct { - err error - result PluginID - }{ - err: ErrInvalidID, - result: PluginID{}, - }, + name: "fail:invalid name3", + input: "1cc1_c-d/?s~1", + }, + { + name: "fail:invalid scene id", + input: "xxxx~ssss~1.0.0", }, } for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - if tc.expected.err != nil { + if tc.expected == nil { result := PluginIDFromRef(&tc.input) assert.Nil(tt, result) } else { result := PluginIDFromRef(&tc.input) - assert.Equal(tt, tc.expected.result, *result) + assert.Equal(tt, *tc.expected, *result) } }) } } func TestPluginID_Name(t *testing.T) { - plugin := MustPluginID("MyPlugin#1.0.0") + plugin := MustPluginID("MyPlugin~1.0.0") assert.Equal(t, "MyPlugin", plugin.Name()) } func TestPluginID_Version(t *testing.T) { - plugin := MustPluginID("MyPlugin#1.0.0") + plugin := MustPluginID("MyPlugin~1.0.0") assert.Equal(t, semver.MustParse("1.0.0"), plugin.Version()) } +func TestPluginID_Scene(t *testing.T) { + scene := MustSceneID("01fbpdqax0ttrftj3gb5gm4rw7") + sid := PluginID{ + scene: &scene, + }.Scene() + assert.Equal(t, scene, *sid) + assert.NotSame(t, scene, *sid) +} + func TestPluginID_System(t *testing.T) { - plugin := MustPluginID("MyPlugin#1.0.0") + plugin := MustPluginID("MyPlugin~1.0.0") assert.False(t, plugin.System()) @@ -340,7 +371,7 @@ func TestPluginID_Validate(t *testing.T) { { name: "success:accepted name", input: PluginID{ - name: "1cc.1_c-d", + name: "1cc1_c-d", version: "1.0.0", sys: false, }, @@ -349,7 +380,7 @@ func TestPluginID_Validate(t *testing.T) { { name: "success:accepted name", input: PluginID{ - name: "1cc.1/?_c-d", + name: "1cc1/?_c-d", version: "1.0.0", sys: false, }, @@ -372,22 +403,44 @@ func TestPluginID_String(t *testing.T) { expected string }{ { - name: "success:accepted name", + name: "accepted name1", input: PluginID{ name: "ppl", version: "1.0.0", + scene: nil, sys: false, }, - expected: "ppl#1.0.0", + expected: "ppl~1.0.0", }, { - name: "success:accepted name", + name: "accepted name2", + input: PluginID{ + name: "plg", + version: "2.1.0-beta", + scene: nil, + sys: false, + }, + expected: "plg~2.1.0-beta", + }, + { + name: "with scene id", input: PluginID{ name: "plg", version: "2.1.0-beta", + scene: MustSceneID("01fbpdqax0ttrftj3gb5gm4rw7").Ref(), sys: false, }, - expected: "plg#2.1.0-beta", + expected: "01fbpdqax0ttrftj3gb5gm4rw7~plg~2.1.0-beta", + }, + { + name: "system", + input: PluginID{ + name: "reearth", + version: "", + scene: nil, + sys: true, + }, + expected: "reearth", }, } for _, tc := range testCases { @@ -399,7 +452,7 @@ func TestPluginID_String(t *testing.T) { } func TestPluginID_Ref(t *testing.T) { - pluginID := MustPluginID("Test#1.0.0") + pluginID := MustPluginID("Test~1.0.0") ref := pluginID.Ref() @@ -407,7 +460,7 @@ func TestPluginID_Ref(t *testing.T) { } func TestPluginID_CopyRef(t *testing.T) { - pluginID := MustPluginID("Test#1.0.0") + pluginID := MustPluginID("Test~1.0.0") ref := pluginID.Ref() @@ -419,7 +472,7 @@ func TestPluginID_CopyRef(t *testing.T) { } func TestPluginID_StringRef(t *testing.T) { - pluginID := MustPluginID("Test#1.0.0") + pluginID := MustPluginID("Test~1.0.0") ref := pluginID.Ref() @@ -431,44 +484,51 @@ func TestPluginID_StringRef(t *testing.T) { func TestPluginID_Equal(t *testing.T) { t.Parallel() testCases := []struct { - name string - input struct { - pluginID1 PluginID - pluginID2 PluginID - } + name string + input1 PluginID + input2 PluginID expected bool }{ { - name: "Equal", - input: struct { - pluginID1 PluginID - pluginID2 PluginID - }{ - pluginID1: MustPluginID("Test#1.0.0"), - pluginID2: MustPluginID("Test#1.0.0"), - }, + name: "system", + input1: MustPluginID("reearth"), + input2: MustPluginID("reearth"), expected: true, }, { - name: "Equal", - input: struct { - pluginID1 PluginID - pluginID2 PluginID - }{ - pluginID1: MustPluginID("Test#1.0.0"), - pluginID2: MustPluginID("Test#1.0.1"), - }, + name: "system and normal", + input1: MustPluginID("reearth"), + input2: MustPluginID("Test~1.0.0"), expected: false, }, { - name: "Equal", - input: struct { - pluginID1 PluginID - pluginID2 PluginID - }{ - pluginID1: MustPluginID("Test0#1.0.0"), - pluginID2: MustPluginID("Test1#1.0.0"), - }, + name: "same", + input1: MustPluginID("Test~1.0.0"), + input2: MustPluginID("Test~1.0.0"), + expected: true, + }, + { + name: "diff version", + input1: MustPluginID("Test~1.0.0"), + input2: MustPluginID("Test~1.0.1"), + expected: false, + }, + { + name: "diff name", + input1: MustPluginID("Test0~1.0.0"), + input2: MustPluginID("Test1~1.0.0"), + expected: false, + }, + { + name: "same scene", + input1: MustPluginID("01fbprc3j929w0a3h16nh8rqy6~Test~1.0.0"), + input2: MustPluginID("01fbprc3j929w0a3h16nh8rqy6~Test~1.0.0"), + expected: true, + }, + { + name: "diff scene", + input1: MustPluginID("01fbprc3j929w0a3h16nh8rqy6~Test~1.0.0"), + input2: MustPluginID("01fbprc3j929w0a3h16nh8rqy7~Test~1.0.0"), expected: false, }, } @@ -476,31 +536,31 @@ func TestPluginID_Equal(t *testing.T) { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.pluginID1.Equal(tc.input.pluginID2)) - assert.Equal(tt, tc.expected, tc.input.pluginID2.Equal(tc.input.pluginID1)) + assert.Equal(tt, tc.expected, tc.input1.Equal(tc.input2)) + assert.Equal(tt, tc.expected, tc.input2.Equal(tc.input1)) }) } } func TestPluginID_MarshalText(t *testing.T) { - pluginIdRef := MustPluginID("Test#1.0.0").Ref() + pluginIdRef := MustPluginID("Test~1.0.0").Ref() res, err := pluginIdRef.MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte("Test#1.0.0"), res) + assert.Equal(t, []byte("Test~1.0.0"), res) } func TestPluginID_UnmarshalText(t *testing.T) { - text := []byte("Test#1.0.0") + text := []byte("Test~1.0.0") pluginId := &PluginID{} err := pluginId.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "Test#1.0.0", pluginId.String()) + assert.Equal(t, "Test~1.0.0", pluginId.String()) } @@ -518,20 +578,20 @@ func TestPluginIDToKeys(t *testing.T) { }, { name: "1 element", - input: []PluginID{MustPluginID("Test#1.0.0")}, - expected: []string{"Test#1.0.0"}, + input: []PluginID{MustPluginID("Test~1.0.0")}, + expected: []string{"Test~1.0.0"}, }, { name: "multiple elements", input: []PluginID{ - MustPluginID("Test#1.0.0"), - MustPluginID("Test#1.0.1"), - MustPluginID("Test#1.0.2"), + MustPluginID("Test~1.0.0"), + MustPluginID("Test~1.0.1"), + MustPluginID("Test~1.0.2"), }, expected: []string{ - "Test#1.0.0", - "Test#1.0.1", - "Test#1.0.2", + "Test~1.0.0", + "Test~1.0.1", + "Test~1.0.2", }, }, } @@ -569,30 +629,30 @@ func TestPluginIDsFrom(t *testing.T) { }, { name: "1 element", - input: []string{"Test#1.0.0"}, + input: []string{"Test~1.0.0"}, expected: struct { res []PluginID err error }{ - res: []PluginID{MustPluginID("Test#1.0.0")}, + res: []PluginID{MustPluginID("Test~1.0.0")}, err: nil, }, }, { name: "multiple elements", input: []string{ - "Test#1.0.0", - "Test#1.0.1", - "Test#1.0.2", + "Test~1.0.0", + "Test~1.0.1", + "Test~1.0.2", }, expected: struct { res []PluginID err error }{ res: []PluginID{ - MustPluginID("Test#1.0.0"), - MustPluginID("Test#1.0.1"), - MustPluginID("Test#1.0.2"), + MustPluginID("Test~1.0.0"), + MustPluginID("Test~1.0.1"), + MustPluginID("Test~1.0.2"), }, err: nil, }, @@ -600,9 +660,9 @@ func TestPluginIDsFrom(t *testing.T) { { name: "multiple elements", input: []string{ - "Test#1.0.0", - "Test#1.0.1", - "Test#1.0.2", + "Test~1.0.0", + "Test~1.0.1", + "Test~1.0.2", }, expected: struct { res []PluginID @@ -627,7 +687,6 @@ func TestPluginIDsFrom(t *testing.T) { assert.Equal(tt, tc.expected.res, res) assert.Nil(tt, err) } - }) } } diff --git a/pkg/id/project_gen.go b/pkg/id/project_gen.go index 41c10b7ff..883d8c1a8 100644 --- a/pkg/id/project_gen.go +++ b/pkg/id/project_gen.go @@ -78,6 +78,16 @@ func (d ProjectID) Ref() *ProjectID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d ProjectID) Contains(ids []ProjectID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *ProjectID) CopyRef() *ProjectID { if d == nil { diff --git a/pkg/id/project_gen_test.go b/pkg/id/project_gen_test.go index 2a7a07fb6..fa8eed3f2 100644 --- a/pkg/id/project_gen_test.go +++ b/pkg/id/project_gen_test.go @@ -203,6 +203,13 @@ func TestProjectID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestProjectID_Contains(t *testing.T) { + id := NewProjectID() + id2 := NewProjectID() + assert.True(t, id.Contains([]ProjectID{id, id2})) + assert.False(t, id.Contains([]ProjectID{id2})) +} + func TestProjectID_CopyRef(t *testing.T) { id := New() subId := ProjectIDFromRefID(&id) @@ -612,7 +619,7 @@ func TestProjectIDSet_Add(t *testing.T) { name: "1 element", input: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestProjectIDSet_Add(t *testing.T) { }, expected: &ProjectIDSet{ m: map[ProjectID]struct{}{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []ProjectID{ MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestProjectIDSet_Add(t *testing.T) { }, expected: &ProjectIDSet{ m: map[ProjectID]struct{}{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []ProjectID{ MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestProjectIDSet_AddRef(t *testing.T) { name: "1 element", input: &ProjectId, expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestProjectIDSet_Has(t *testing.T) { ProjectIDSet ProjectID }{ProjectIDSet: ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, }, ProjectID: MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestProjectIDSet_Has(t *testing.T) { ProjectIDSet ProjectID }{ProjectIDSet: ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, }, ProjectID: MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestProjectIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: ProjectIDSet{ @@ -820,7 +827,7 @@ func TestProjectIDSet_All(t *testing.T) { { name: "1 element", input: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestProjectIDSet_All(t *testing.T) { name: "multiple elements", input: &ProjectIDSet{ m: map[ProjectID]struct{}{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []ProjectID{ MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestProjectIDSet_Clone(t *testing.T) { { name: "1 element", input: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestProjectIDSet_Clone(t *testing.T) { name: "multiple elements", input: &ProjectIDSet{ m: map[ProjectID]struct{}{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []ProjectID{ MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestProjectIDSet_Clone(t *testing.T) { }, expected: &ProjectIDSet{ m: map[ProjectID]struct{}{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []ProjectID{ MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestProjectIDSet_Merge(t *testing.T) { b *ProjectIDSet }{ a: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &ProjectIDSet{}, }, expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestProjectIDSet_Merge(t *testing.T) { b *ProjectIDSet }{ a: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &ProjectIDSet{ m: map[ProjectID]struct{}{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []ProjectID{ MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/property_gen.go b/pkg/id/property_gen.go index 7c8ea8759..f0cfdbf23 100644 --- a/pkg/id/property_gen.go +++ b/pkg/id/property_gen.go @@ -78,6 +78,16 @@ func (d PropertyID) Ref() *PropertyID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d PropertyID) Contains(ids []PropertyID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *PropertyID) CopyRef() *PropertyID { if d == nil { diff --git a/pkg/id/property_gen_test.go b/pkg/id/property_gen_test.go index a3fa5bf00..1c9ea59c8 100644 --- a/pkg/id/property_gen_test.go +++ b/pkg/id/property_gen_test.go @@ -203,6 +203,13 @@ func TestPropertyID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestPropertyID_Contains(t *testing.T) { + id := NewPropertyID() + id2 := NewPropertyID() + assert.True(t, id.Contains([]PropertyID{id, id2})) + assert.False(t, id.Contains([]PropertyID{id2})) +} + func TestPropertyID_CopyRef(t *testing.T) { id := New() subId := PropertyIDFromRefID(&id) @@ -210,7 +217,7 @@ func TestPropertyID_CopyRef(t *testing.T) { subIdCopyRef := subId.CopyRef() assert.Equal(t, *subId, *subIdCopyRef) - assert.False(t, subId == subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) } func TestPropertyID_IDRef(t *testing.T) { @@ -612,7 +619,7 @@ func TestPropertyIDSet_Add(t *testing.T) { name: "1 element", input: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestPropertyIDSet_Add(t *testing.T) { }, expected: &PropertyIDSet{ m: map[PropertyID]struct{}{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []PropertyID{ MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestPropertyIDSet_Add(t *testing.T) { }, expected: &PropertyIDSet{ m: map[PropertyID]struct{}{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []PropertyID{ MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestPropertyIDSet_AddRef(t *testing.T) { name: "1 element", input: &PropertyId, expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestPropertyIDSet_Has(t *testing.T) { PropertyIDSet PropertyID }{PropertyIDSet: PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, }, PropertyID: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestPropertyIDSet_Has(t *testing.T) { PropertyIDSet PropertyID }{PropertyIDSet: PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, }, PropertyID: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestPropertyIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: PropertyIDSet{ @@ -820,7 +827,7 @@ func TestPropertyIDSet_All(t *testing.T) { { name: "1 element", input: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestPropertyIDSet_All(t *testing.T) { name: "multiple elements", input: &PropertyIDSet{ m: map[PropertyID]struct{}{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []PropertyID{ MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestPropertyIDSet_Clone(t *testing.T) { { name: "1 element", input: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestPropertyIDSet_Clone(t *testing.T) { name: "multiple elements", input: &PropertyIDSet{ m: map[PropertyID]struct{}{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []PropertyID{ MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestPropertyIDSet_Clone(t *testing.T) { }, expected: &PropertyIDSet{ m: map[PropertyID]struct{}{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []PropertyID{ MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestPropertyIDSet_Merge(t *testing.T) { b *PropertyIDSet }{ a: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &PropertyIDSet{}, }, expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestPropertyIDSet_Merge(t *testing.T) { b *PropertyIDSet }{ a: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &PropertyIDSet{ m: map[PropertyID]struct{}{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []PropertyID{ MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/property_item_gen.go b/pkg/id/property_item_gen.go index 8344f3a05..122e9b1a0 100644 --- a/pkg/id/property_item_gen.go +++ b/pkg/id/property_item_gen.go @@ -78,6 +78,16 @@ func (d PropertyItemID) Ref() *PropertyItemID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d PropertyItemID) Contains(ids []PropertyItemID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *PropertyItemID) CopyRef() *PropertyItemID { if d == nil { diff --git a/pkg/id/property_item_gen_test.go b/pkg/id/property_item_gen_test.go index 96e0617e2..680c31693 100644 --- a/pkg/id/property_item_gen_test.go +++ b/pkg/id/property_item_gen_test.go @@ -203,6 +203,13 @@ func TestPropertyItemID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestPropertyItemID_Contains(t *testing.T) { + id := NewPropertyItemID() + id2 := NewPropertyItemID() + assert.True(t, id.Contains([]PropertyItemID{id, id2})) + assert.False(t, id.Contains([]PropertyItemID{id2})) +} + func TestPropertyItemID_CopyRef(t *testing.T) { id := New() subId := PropertyItemIDFromRefID(&id) @@ -612,7 +619,7 @@ func TestPropertyItemIDSet_Add(t *testing.T) { name: "1 element", input: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestPropertyItemIDSet_Add(t *testing.T) { }, expected: &PropertyItemIDSet{ m: map[PropertyItemID]struct{}{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []PropertyItemID{ MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestPropertyItemIDSet_Add(t *testing.T) { }, expected: &PropertyItemIDSet{ m: map[PropertyItemID]struct{}{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []PropertyItemID{ MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestPropertyItemIDSet_AddRef(t *testing.T) { name: "1 element", input: &PropertyItemId, expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestPropertyItemIDSet_Has(t *testing.T) { PropertyItemIDSet PropertyItemID }{PropertyItemIDSet: PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, }, PropertyItemID: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestPropertyItemIDSet_Has(t *testing.T) { PropertyItemIDSet PropertyItemID }{PropertyItemIDSet: PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, }, PropertyItemID: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestPropertyItemIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: PropertyItemIDSet{ @@ -820,7 +827,7 @@ func TestPropertyItemIDSet_All(t *testing.T) { { name: "1 element", input: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestPropertyItemIDSet_All(t *testing.T) { name: "multiple elements", input: &PropertyItemIDSet{ m: map[PropertyItemID]struct{}{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []PropertyItemID{ MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestPropertyItemIDSet_Clone(t *testing.T) { { name: "1 element", input: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestPropertyItemIDSet_Clone(t *testing.T) { name: "multiple elements", input: &PropertyItemIDSet{ m: map[PropertyItemID]struct{}{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []PropertyItemID{ MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestPropertyItemIDSet_Clone(t *testing.T) { }, expected: &PropertyItemIDSet{ m: map[PropertyItemID]struct{}{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []PropertyItemID{ MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestPropertyItemIDSet_Merge(t *testing.T) { b *PropertyItemIDSet }{ a: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &PropertyItemIDSet{}, }, expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestPropertyItemIDSet_Merge(t *testing.T) { b *PropertyItemIDSet }{ a: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &PropertyItemIDSet{ m: map[PropertyItemID]struct{}{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []PropertyItemID{ MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/property_schema.go b/pkg/id/property_schema.go index 6f9b4228c..5a4fd9d5e 100644 --- a/pkg/id/property_schema.go +++ b/pkg/id/property_schema.go @@ -7,26 +7,30 @@ import ( const schemaSystemIDPrefix = "reearth" -var schemaNameRe = regexp.MustCompile("^[a-zA-Z0-9._-]+$") +var schemaNameRe = regexp.MustCompile("^[a-zA-Z0-9_-]+$") // PropertySchemaID is an ID for PropertySchema. type PropertySchemaID struct { - plugin string + plugin PluginID id string } // PropertySchemaIDFrom generates a new PropertySchemaID from a string. func PropertySchemaIDFrom(id string) (PropertySchemaID, error) { - ids := strings.Split(id, "/") + ids := strings.SplitN(id, "/", 2) if len(ids) < 2 || !schemaNameRe.MatchString(ids[len(ids)-1]) { return PropertySchemaID{}, ErrInvalidID } - return PropertySchemaID{plugin: strings.Join(ids[:len(ids)-1], "/"), id: ids[len(ids)-1]}, nil + pid, err := PluginIDFrom(ids[0]) + if err != nil { + return PropertySchemaID{}, ErrInvalidID + } + return PropertySchemaID{plugin: pid, id: ids[1]}, nil } // PropertySchemaIDFromExtension generates a new PropertySchemaID from a plugin ID and an extension ID. func PropertySchemaIDFromExtension(p PluginID, e PluginExtensionID) (PropertySchemaID, error) { - return PropertySchemaID{plugin: p.String(), id: e.String()}, nil + return PropertySchemaID{plugin: p, id: e.String()}, nil } // MustPropertySchemaID generates a new PropertySchemaID from a string, but panics if the string cannot be parsed. @@ -65,7 +69,7 @@ func (d PropertySchemaID) ID() string { } // Plugin returns a fragment of plugin ID. -func (d PropertySchemaID) Plugin() string { +func (d PropertySchemaID) Plugin() PluginID { return d.plugin } @@ -76,10 +80,7 @@ func (d PropertySchemaID) System() bool { // String returns a string representation. func (d PropertySchemaID) String() string { - if d.plugin == "" { - return d.id - } - return d.plugin + "/" + d.id + return d.plugin.String() + "/" + d.id } // Ref returns a reference. @@ -99,7 +100,7 @@ func (d *PropertySchemaID) CopyRef() *PropertySchemaID { // IsNil checks if ID is empty or not. func (d PropertySchemaID) IsNil() bool { - return d.plugin == "" && d.id == "" + return d.plugin == PluginID{} && d.id == "" } // StringRef returns a reference of a string representation. diff --git a/pkg/id/property_schema_test.go b/pkg/id/property_schema_test.go index f40f881dc..e234b5dd2 100644 --- a/pkg/id/property_schema_test.go +++ b/pkg/id/property_schema_test.go @@ -23,13 +23,13 @@ func TestPropertySchemaIDFrom(t *testing.T) { }{ { name: "success:valid name", - input: "test/Test_Test-01", + input: "test~1.0.0/Test_Test-01", expected: struct { result PropertySchemaID err error }{ result: PropertySchemaID{ - plugin: "test", + plugin: MustPluginID("test~1.0.0"), id: "Test_Test-01", }, err: nil, @@ -78,14 +78,13 @@ func TestPropertySchemaIDFrom(t *testing.T) { } func TestPropertySchemaIDFromExtension(t *testing.T) { - pluginID := MustPluginID("test#2.0.0") + pluginID := MustPluginID("test~2.0.0") pluginExtensionID := PluginExtensionID("test2") - propertySchemaID, err := PropertySchemaIDFromExtension(pluginID, pluginExtensionID) assert.NotNil(t, propertySchemaID) assert.Equal(t, PropertySchemaID{ - plugin: "test#2.0.0", + plugin: MustPluginID("test~2.0.0"), id: "test2", }, propertySchemaID) assert.Nil(t, err) @@ -103,13 +102,13 @@ func TestMustPropertySchemaID(t *testing.T) { }{ { name: "success:valid name", - input: "test/Test_Test-01", + input: "test~1.0.0/Test_Test-01", expected: struct { result PropertySchemaID err error }{ result: PropertySchemaID{ - plugin: "test", + plugin: MustPluginID("test~1.0.0"), id: "Test_Test-01", }, err: nil, @@ -159,50 +158,50 @@ func TestMustPropertySchemaID(t *testing.T) { } func TestMustPropertySchemaIDFromExtension(t *testing.T) { - pluginID := MustPluginID("test#2.0.0") + pluginID := MustPluginID("test~2.0.0") pluginExtensionID := PluginExtensionID("test2") propertySchemaID := MustPropertySchemaIDFromExtension(pluginID, pluginExtensionID) assert.NotNil(t, propertySchemaID) assert.Equal(t, PropertySchemaID{ - plugin: "test#2.0.0", + plugin: MustPluginID("test~2.0.0"), id: "test2", }, propertySchemaID) } func TestPropertySchemaIDFromRef(t *testing.T) { - t.Parallel() - input1 := "test/Test_Test-01" - input2 := "Test" - input3 := "Test/+dsad" - input4 := "Test/dsa d" testCases := []struct { name string - input *string + input string expected *PropertySchemaID }{ { name: "success:valid name", - input: &input1, + input: "test~1.0.0/Test_Test-01", expected: &PropertySchemaID{ - plugin: "test", + plugin: MustPluginID("test~1.0.0"), id: "Test_Test-01", }, }, { - name: "fail:invalid name", - input: &input2, + name: "fail:invalid name 1", + input: "Test~1.0.0", expected: nil, }, { - name: "fail:invalid name", - input: &input3, + name: "fail:invalid name 2", + input: "Test~1.0.0/+dsad", expected: nil, }, { - name: "fail:invalid name", - input: &input4, + name: "fail:invalid name 3", + input: "Test~1.0.0/dsa d", + expected: nil, + }, + { + name: "fail:invalid name 4", + input: "Test/dsa", expected: nil, }, } @@ -211,53 +210,52 @@ func TestPropertySchemaIDFromRef(t *testing.T) { t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - result := PropertySchemaIDFromRef(tc.input) + result := PropertySchemaIDFromRef(&tc.input) assert.Equal(tt, tc.expected, result) }) } } func TestPropertySchemaID_ID(t *testing.T) { - propertySchemaID := MustPropertySchemaID("Test#2.0.0/test") + propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") assert.Equal(t, propertySchemaID.ID(), "test") } func TestPropertySchemaID_Plugin(t *testing.T) { - propertySchemaID := MustPropertySchemaID("Test#2.0.0/test") - - assert.Equal(t, propertySchemaID.Plugin(), "Test#2.0.0") + propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") + assert.Equal(t, MustPluginID("Test~2.0.0"), propertySchemaID.Plugin()) } func TestPropertySchemaID_System(t *testing.T) { - propertySchemaID := MustPropertySchemaID("Test#2.0.0/test") + propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") assert.False(t, propertySchemaID.System()) extinctionName := schemaSystemIDPrefix - propertySchemaID = MustPropertySchemaIDFromExtension(MustPluginID("test#2.0.0"), *PluginExtensionIDFromRef(&extinctionName)) + propertySchemaID = MustPropertySchemaIDFromExtension(MustPluginID("test~2.0.0"), *PluginExtensionIDFromRef(&extinctionName)) assert.True(t, propertySchemaID.System()) - propertySchemaID = MustPropertySchemaID("Test#2.0.0/" + schemaSystemIDPrefix) + propertySchemaID = MustPropertySchemaID("Test~2.0.0/" + schemaSystemIDPrefix) assert.True(t, propertySchemaID.System()) } func TestPropertySchemaID_String(t *testing.T) { - propertySchemaID := MustPropertySchemaID("Test#2.0.0/test") + propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") - assert.Equal(t, propertySchemaID.String(), "Test#2.0.0/test") + assert.Equal(t, propertySchemaID.String(), "Test~2.0.0/test") } func TestPropertySchemaID_Ref(t *testing.T) { - propertySchemaID, _ := PropertySchemaIDFrom("test#2.0.0/test") + propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") assert.Equal(t, &propertySchemaID, propertySchemaID.Ref()) } func TestPropertySchemaID_CopyRef(t *testing.T) { - propertySchemaID, _ := PropertySchemaIDFrom("test#2.0.0/test") + propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") assert.Equal(t, propertySchemaID, *propertySchemaID.CopyRef()) @@ -265,7 +263,7 @@ func TestPropertySchemaID_CopyRef(t *testing.T) { } func TestPropertySchemaID_IsNil(t *testing.T) { - propertySchemaID, _ := PropertySchemaIDFrom("test#2.0.0/test") + propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") assert.False(t, propertySchemaID.IsNil()) @@ -275,7 +273,7 @@ func TestPropertySchemaID_IsNil(t *testing.T) { } func TestPropertySchemaID_StringRef(t *testing.T) { - propertySchemaID, _ := PropertySchemaIDFrom("test#2.0.0/test") + propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") ref := &propertySchemaID @@ -283,23 +281,23 @@ func TestPropertySchemaID_StringRef(t *testing.T) { } func TestPropertySchemaID_MarshalText(t *testing.T) { - propertySchemaID, _ := PropertySchemaIDFrom("test#2.0.0/test") + propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") res, err := propertySchemaID.MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte("test#2.0.0/test"), res) + assert.Equal(t, []byte("test~2.0.0/test"), res) } func TestPropertySchemaID_UnmarshalText(t *testing.T) { - text := []byte("test#2.0.0/test") + text := []byte("test~2.0.0/test") propertySchemaID := &PropertySchemaID{} err := propertySchemaID.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "test#2.0.0/test", propertySchemaID.String()) + assert.Equal(t, "test~2.0.0/test", propertySchemaID.String()) } func TestPropertySchemaIDToKeys(t *testing.T) { @@ -316,20 +314,20 @@ func TestPropertySchemaIDToKeys(t *testing.T) { }, { name: "1 element", - input: []PropertySchemaID{MustPropertySchemaID("test#2.0.0/test")}, - expected: []string{"test#2.0.0/test"}, + input: []PropertySchemaID{MustPropertySchemaID("test~2.0.0/test")}, + expected: []string{"test~2.0.0/test"}, }, { name: "multiple elements", input: []PropertySchemaID{ - MustPropertySchemaID("Test#1.0.0/test"), - MustPropertySchemaID("Test#1.0.1/test"), - MustPropertySchemaID("Test#1.0.2/test"), + MustPropertySchemaID("Test~1.0.0/test"), + MustPropertySchemaID("Test~1.0.1/test"), + MustPropertySchemaID("Test~1.0.2/test"), }, expected: []string{ - "Test#1.0.0/test", - "Test#1.0.1/test", - "Test#1.0.2/test", + "Test~1.0.0/test", + "Test~1.0.1/test", + "Test~1.0.2/test", }, }, } @@ -367,30 +365,30 @@ func TestPropertySchemaIDsFrom(t *testing.T) { }, { name: "1 element", - input: []string{"Test#1.0.0/test"}, + input: []string{"Test~1.0.0/test"}, expected: struct { res []PropertySchemaID err error }{ - res: []PropertySchemaID{MustPropertySchemaID("Test#1.0.0/test")}, + res: []PropertySchemaID{MustPropertySchemaID("Test~1.0.0/test")}, err: nil, }, }, { name: "multiple elements", input: []string{ - "Test#1.0.0/test", - "Test#1.0.1/test", - "Test#1.0.2/test", + "Test~1.0.0/test", + "Test~1.0.1/test", + "Test~1.0.2/test", }, expected: struct { res []PropertySchemaID err error }{ res: []PropertySchemaID{ - MustPropertySchemaID("Test#1.0.0/test"), - MustPropertySchemaID("Test#1.0.1/test"), - MustPropertySchemaID("Test#1.0.2/test"), + MustPropertySchemaID("Test~1.0.0/test"), + MustPropertySchemaID("Test~1.0.1/test"), + MustPropertySchemaID("Test~1.0.2/test"), }, err: nil, }, @@ -398,9 +396,9 @@ func TestPropertySchemaIDsFrom(t *testing.T) { { name: "multiple elements", input: []string{ - "Test#1.0.0/test", - "Test#1.0.1/test", - "Test#1.0.2/test", + "Test~1.0.0/test", + "Test~1.0.1/test", + "Test~1.0.2/test", }, expected: struct { res []PropertySchemaID diff --git a/pkg/id/scene_gen.go b/pkg/id/scene_gen.go index 2bbd669dd..bc8d2c37b 100644 --- a/pkg/id/scene_gen.go +++ b/pkg/id/scene_gen.go @@ -78,6 +78,16 @@ func (d SceneID) Ref() *SceneID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d SceneID) Contains(ids []SceneID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *SceneID) CopyRef() *SceneID { if d == nil { diff --git a/pkg/id/scene_gen_test.go b/pkg/id/scene_gen_test.go index d8b3928c7..cec679206 100644 --- a/pkg/id/scene_gen_test.go +++ b/pkg/id/scene_gen_test.go @@ -203,6 +203,13 @@ func TestSceneID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestSceneID_Contains(t *testing.T) { + id := NewSceneID() + id2 := NewSceneID() + assert.True(t, id.Contains([]SceneID{id, id2})) + assert.False(t, id.Contains([]SceneID{id2})) +} + func TestSceneID_CopyRef(t *testing.T) { id := New() subId := SceneIDFromRefID(&id) @@ -612,7 +619,7 @@ func TestSceneIDSet_Add(t *testing.T) { name: "1 element", input: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestSceneIDSet_Add(t *testing.T) { }, expected: &SceneIDSet{ m: map[SceneID]struct{}{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []SceneID{ MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestSceneIDSet_Add(t *testing.T) { }, expected: &SceneIDSet{ m: map[SceneID]struct{}{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []SceneID{ MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestSceneIDSet_AddRef(t *testing.T) { name: "1 element", input: &SceneId, expected: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestSceneIDSet_Has(t *testing.T) { SceneIDSet SceneID }{SceneIDSet: SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, }, SceneID: MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestSceneIDSet_Has(t *testing.T) { SceneIDSet SceneID }{SceneIDSet: SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, }, SceneID: MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestSceneIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: SceneIDSet{ @@ -820,7 +827,7 @@ func TestSceneIDSet_All(t *testing.T) { { name: "1 element", input: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestSceneIDSet_All(t *testing.T) { name: "multiple elements", input: &SceneIDSet{ m: map[SceneID]struct{}{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []SceneID{ MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestSceneIDSet_Clone(t *testing.T) { { name: "1 element", input: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestSceneIDSet_Clone(t *testing.T) { name: "multiple elements", input: &SceneIDSet{ m: map[SceneID]struct{}{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []SceneID{ MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestSceneIDSet_Clone(t *testing.T) { }, expected: &SceneIDSet{ m: map[SceneID]struct{}{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []SceneID{ MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestSceneIDSet_Merge(t *testing.T) { b *SceneIDSet }{ a: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &SceneIDSet{}, }, expected: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestSceneIDSet_Merge(t *testing.T) { b *SceneIDSet }{ a: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &SceneIDSet{ m: map[SceneID]struct{}{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []SceneID{ MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/team_gen.go b/pkg/id/team_gen.go index f814bda17..fa5fbc25d 100644 --- a/pkg/id/team_gen.go +++ b/pkg/id/team_gen.go @@ -78,6 +78,16 @@ func (d TeamID) Ref() *TeamID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d TeamID) Contains(ids []TeamID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *TeamID) CopyRef() *TeamID { if d == nil { diff --git a/pkg/id/team_gen_test.go b/pkg/id/team_gen_test.go index 80ca77e71..952e26a38 100644 --- a/pkg/id/team_gen_test.go +++ b/pkg/id/team_gen_test.go @@ -203,6 +203,13 @@ func TestTeamID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestTeamID_Contains(t *testing.T) { + id := NewTeamID() + id2 := NewTeamID() + assert.True(t, id.Contains([]TeamID{id, id2})) + assert.False(t, id.Contains([]TeamID{id2})) +} + func TestTeamID_CopyRef(t *testing.T) { id := New() subId := TeamIDFromRefID(&id) @@ -612,7 +619,7 @@ func TestTeamIDSet_Add(t *testing.T) { name: "1 element", input: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestTeamIDSet_Add(t *testing.T) { }, expected: &TeamIDSet{ m: map[TeamID]struct{}{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []TeamID{ MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestTeamIDSet_Add(t *testing.T) { }, expected: &TeamIDSet{ m: map[TeamID]struct{}{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []TeamID{ MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestTeamIDSet_AddRef(t *testing.T) { name: "1 element", input: &TeamId, expected: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestTeamIDSet_Has(t *testing.T) { TeamIDSet TeamID }{TeamIDSet: TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, }, TeamID: MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestTeamIDSet_Has(t *testing.T) { TeamIDSet TeamID }{TeamIDSet: TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, }, TeamID: MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestTeamIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: TeamIDSet{ @@ -820,7 +827,7 @@ func TestTeamIDSet_All(t *testing.T) { { name: "1 element", input: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestTeamIDSet_All(t *testing.T) { name: "multiple elements", input: &TeamIDSet{ m: map[TeamID]struct{}{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []TeamID{ MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestTeamIDSet_Clone(t *testing.T) { { name: "1 element", input: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestTeamIDSet_Clone(t *testing.T) { name: "multiple elements", input: &TeamIDSet{ m: map[TeamID]struct{}{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []TeamID{ MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestTeamIDSet_Clone(t *testing.T) { }, expected: &TeamIDSet{ m: map[TeamID]struct{}{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []TeamID{ MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestTeamIDSet_Merge(t *testing.T) { b *TeamIDSet }{ a: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &TeamIDSet{}, }, expected: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestTeamIDSet_Merge(t *testing.T) { b *TeamIDSet }{ a: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &TeamIDSet{ m: map[TeamID]struct{}{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []TeamID{ MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/user_gen.go b/pkg/id/user_gen.go index df4b8977f..830d5f530 100644 --- a/pkg/id/user_gen.go +++ b/pkg/id/user_gen.go @@ -78,6 +78,16 @@ func (d UserID) Ref() *UserID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d UserID) Contains(ids []UserID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *UserID) CopyRef() *UserID { if d == nil { diff --git a/pkg/id/user_gen_test.go b/pkg/id/user_gen_test.go index 39394b659..3d074a4d2 100644 --- a/pkg/id/user_gen_test.go +++ b/pkg/id/user_gen_test.go @@ -203,6 +203,13 @@ func TestUserID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestUserID_Contains(t *testing.T) { + id := NewUserID() + id2 := NewUserID() + assert.True(t, id.Contains([]UserID{id, id2})) + assert.False(t, id.Contains([]UserID{id2})) +} + func TestUserID_CopyRef(t *testing.T) { id := New() subId := UserIDFromRefID(&id) @@ -210,7 +217,7 @@ func TestUserID_CopyRef(t *testing.T) { subIdCopyRef := subId.CopyRef() assert.Equal(t, *subId, *subIdCopyRef) - assert.False(t, subId == subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) } func TestUserID_IDRef(t *testing.T) { @@ -612,7 +619,7 @@ func TestUserIDSet_Add(t *testing.T) { name: "1 element", input: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestUserIDSet_Add(t *testing.T) { }, expected: &UserIDSet{ m: map[UserID]struct{}{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []UserID{ MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestUserIDSet_Add(t *testing.T) { }, expected: &UserIDSet{ m: map[UserID]struct{}{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []UserID{ MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestUserIDSet_AddRef(t *testing.T) { name: "1 element", input: &UserId, expected: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestUserIDSet_Has(t *testing.T) { UserIDSet UserID }{UserIDSet: UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, }, UserID: MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestUserIDSet_Has(t *testing.T) { UserIDSet UserID }{UserIDSet: UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, }, UserID: MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestUserIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: UserIDSet{ @@ -820,7 +827,7 @@ func TestUserIDSet_All(t *testing.T) { { name: "1 element", input: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestUserIDSet_All(t *testing.T) { name: "multiple elements", input: &UserIDSet{ m: map[UserID]struct{}{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []UserID{ MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestUserIDSet_Clone(t *testing.T) { { name: "1 element", input: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestUserIDSet_Clone(t *testing.T) { name: "multiple elements", input: &UserIDSet{ m: map[UserID]struct{}{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []UserID{ MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestUserIDSet_Clone(t *testing.T) { }, expected: &UserIDSet{ m: map[UserID]struct{}{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []UserID{ MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestUserIDSet_Merge(t *testing.T) { b *UserIDSet }{ a: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &UserIDSet{}, }, expected: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestUserIDSet_Merge(t *testing.T) { b *UserIDSet }{ a: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &UserIDSet{ m: map[UserID]struct{}{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []UserID{ MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/id/widget_gen.go b/pkg/id/widget_gen.go index 73eb8712b..52028e49c 100644 --- a/pkg/id/widget_gen.go +++ b/pkg/id/widget_gen.go @@ -78,6 +78,16 @@ func (d WidgetID) Ref() *WidgetID { return &d2 } +// Contains returns whether the id is contained in the slice. +func (d WidgetID) Contains(ids []WidgetID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + // CopyRef returns a copy of a reference. func (d *WidgetID) CopyRef() *WidgetID { if d == nil { diff --git a/pkg/id/widget_gen_test.go b/pkg/id/widget_gen_test.go index 0a7c65f6e..32215f0d5 100644 --- a/pkg/id/widget_gen_test.go +++ b/pkg/id/widget_gen_test.go @@ -203,6 +203,13 @@ func TestWidgetID_Ref(t *testing.T) { assert.Equal(t, *subId, *subIdRef) } +func TestWidgetID_Contains(t *testing.T) { + id := NewWidgetID() + id2 := NewWidgetID() + assert.True(t, id.Contains([]WidgetID{id, id2})) + assert.False(t, id.Contains([]WidgetID{id2})) +} + func TestWidgetID_CopyRef(t *testing.T) { id := New() subId := WidgetIDFromRefID(&id) @@ -612,7 +619,7 @@ func TestWidgetIDSet_Add(t *testing.T) { name: "1 element", input: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -625,9 +632,9 @@ func TestWidgetIDSet_Add(t *testing.T) { }, expected: &WidgetIDSet{ m: map[WidgetID]struct{}{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []WidgetID{ MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -645,8 +652,8 @@ func TestWidgetIDSet_Add(t *testing.T) { }, expected: &WidgetIDSet{ m: map[WidgetID]struct{}{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []WidgetID{ MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -690,7 +697,7 @@ func TestWidgetIDSet_AddRef(t *testing.T) { name: "1 element", input: &WidgetId, expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -733,7 +740,7 @@ func TestWidgetIDSet_Has(t *testing.T) { WidgetIDSet WidgetID }{WidgetIDSet: WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, WidgetID: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: true, @@ -744,7 +751,7 @@ func TestWidgetIDSet_Has(t *testing.T) { WidgetIDSet WidgetID }{WidgetIDSet: WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, WidgetID: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, expected: false, @@ -779,7 +786,7 @@ func TestWidgetIDSet_Clear(t *testing.T) { { name: "Set Contains the element", input: WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: WidgetIDSet{ @@ -820,7 +827,7 @@ func TestWidgetIDSet_All(t *testing.T) { { name: "1 element", input: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -829,9 +836,9 @@ func TestWidgetIDSet_All(t *testing.T) { name: "multiple elements", input: &WidgetIDSet{ m: map[WidgetID]struct{}{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []WidgetID{ MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -878,11 +885,11 @@ func TestWidgetIDSet_Clone(t *testing.T) { { name: "1 element", input: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -890,9 +897,9 @@ func TestWidgetIDSet_Clone(t *testing.T) { name: "multiple elements", input: &WidgetIDSet{ m: map[WidgetID]struct{}{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []WidgetID{ MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -902,9 +909,9 @@ func TestWidgetIDSet_Clone(t *testing.T) { }, expected: &WidgetIDSet{ m: map[WidgetID]struct{}{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, }, s: []WidgetID{ MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -955,13 +962,13 @@ func TestWidgetIDSet_Merge(t *testing.T) { b *WidgetIDSet }{ a: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &WidgetIDSet{}, }, expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, }, @@ -972,18 +979,18 @@ func TestWidgetIDSet_Merge(t *testing.T) { b *WidgetIDSet }{ a: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}}, + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, b: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}}, + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, }, }, expected: &WidgetIDSet{ m: map[WidgetID]struct{}{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): struct{}{}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): struct{}{}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, }, s: []WidgetID{ MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), diff --git a/pkg/layer/group_test.go b/pkg/layer/group_test.go index 94a7ebd56..aa231412c 100644 --- a/pkg/layer/group_test.go +++ b/pkg/layer/group_test.go @@ -16,7 +16,7 @@ var group = Group{ id: id.MustLayerID(id.New().String()), name: "xxx", visible: false, - plugin: id.MustPluginID("aaa#1.1.1").Ref(), + plugin: id.MustPluginID("aaa~1.1.1").Ref(), extension: id.PluginExtensionID("foo").Ref(), property: nil, infobox: nil, @@ -41,7 +41,7 @@ func TestGroup_Name(t *testing.T) { func TestGroup_Plugin(t *testing.T) { assert.NotNil(t, group.Plugin()) - assert.True(t, id.MustPluginID("aaa#1.1.1").Equal(*group.Plugin())) + assert.True(t, id.MustPluginID("aaa~1.1.1").Equal(*group.Plugin())) } func TestGroup_IDRef(t *testing.T) { @@ -90,9 +90,9 @@ func TestGroup_SetInfobox(t *testing.T) { } func TestGroup_SetPlugin(t *testing.T) { - group.SetPlugin(id.MustPluginID("ccc#1.1.1").Ref()) + group.SetPlugin(id.MustPluginID("ccc~1.1.1").Ref()) assert.NotNil(t, group.Plugin()) - assert.True(t, id.MustPluginID("ccc#1.1.1").Equal(*group.Plugin())) + assert.True(t, id.MustPluginID("ccc~1.1.1").Equal(*group.Plugin())) } func TestGroup_SetVisible(t *testing.T) { diff --git a/pkg/layer/infobox.go b/pkg/layer/infobox.go index 28907e8d9..610c7829f 100644 --- a/pkg/layer/infobox.go +++ b/pkg/layer/infobox.go @@ -125,6 +125,22 @@ func (i *Infobox) Remove(field id.InfoboxFieldID) { } } +func (i *Infobox) RemoveAllByPlugin(pid id.PluginID) []id.PropertyID { + if i == nil { + return nil + } + + var properties []id.PropertyID + for j := 0; j < len(i.fields); j++ { + if i.fields[j].plugin.Equal(pid) { + properties = append(properties, i.fields[j].Property()) + i.fields = append(i.fields[:j], i.fields[j+1:]...) + j-- + } + } + return properties +} + func (i *Infobox) RemoveAt(index int) { l := len(i.fields) if index < 0 || l <= index { diff --git a/pkg/layer/infobox_test.go b/pkg/layer/infobox_test.go index 462c99f5a..d8a016aa2 100644 --- a/pkg/layer/infobox_test.go +++ b/pkg/layer/infobox_test.go @@ -50,3 +50,19 @@ func TestInfobox(t *testing.T) { assert.Equal(t, f3, infobox.FieldAt(1)) assert.Equal(t, f4, infobox.FieldAt(2)) } + +func TestInfobox_RemoveAllByPlugin(t *testing.T) { + pid1 := id.MustPluginID("xxx~1.1.1") + pid2 := id.MustPluginID("xxy~1.1.1") + f1 := &InfoboxField{id: id.NewInfoboxFieldID(), plugin: pid1, extension: "a", property: id.NewPropertyID()} + f2 := &InfoboxField{id: id.NewInfoboxFieldID(), plugin: pid2, extension: "b", property: id.NewPropertyID()} + f3 := &InfoboxField{id: id.NewInfoboxFieldID(), plugin: pid1, extension: "c", property: id.NewPropertyID()} + infobox := NewInfobox([]*InfoboxField{f1, f2, f3}, id.NewPropertyID()) + + assert.Equal(t, []id.PropertyID(nil), (*Infobox)(nil).RemoveAllByPlugin(pid1)) + assert.Equal(t, []*InfoboxField{f1, f2, f3}, infobox.fields) + assert.Equal(t, []id.PropertyID{f1.Property(), f3.Property()}, infobox.RemoveAllByPlugin(pid1)) + assert.Equal(t, []*InfoboxField{f2}, infobox.fields) + assert.Equal(t, []id.PropertyID(nil), infobox.RemoveAllByPlugin(pid1)) + assert.Equal(t, []*InfoboxField{f2}, infobox.fields) +} diff --git a/pkg/layer/initializer/initializer.go b/pkg/layer/layerops/initializer.go similarity index 98% rename from pkg/layer/initializer/initializer.go rename to pkg/layer/layerops/initializer.go index d510bc825..49b2bfa13 100644 --- a/pkg/layer/initializer/initializer.go +++ b/pkg/layer/layerops/initializer.go @@ -1,4 +1,4 @@ -package initializer +package layerops import ( "errors" diff --git a/pkg/layer/initializer/initializer_test.go b/pkg/layer/layerops/initializer_test.go similarity index 94% rename from pkg/layer/initializer/initializer_test.go rename to pkg/layer/layerops/initializer_test.go index 30aee20a4..d016a31ce 100644 --- a/pkg/layer/initializer/initializer_test.go +++ b/pkg/layer/layerops/initializer_test.go @@ -1,4 +1,4 @@ -package initializer +package layerops import ( "errors" @@ -12,7 +12,7 @@ import ( func TestInitialize(t *testing.T) { lid := id.MustLayerID(id.New().String()) - ps := id.MustPropertySchemaID("xxx#1.1.1/aa") + ps := id.MustPropertySchemaID("xxx~1.1.1/aa") eid := id.PluginExtensionID("foo") eid2 := id.PluginExtensionID("foo2") e := plugin.NewExtension(). @@ -29,7 +29,7 @@ func TestInitialize(t *testing.T) { es := append(make([]*plugin.Extension, 0), e) es = append(es, e2) p := plugin.New(). - ID(id.MustPluginID("xxx#1.1.1")). + ID(id.MustPluginID("xxx~1.1.1")). Schema(&ps). Extensions(es). MustBuild() diff --git a/pkg/layer/layerops/processor.go b/pkg/layer/layerops/processor.go new file mode 100644 index 000000000..aa02737af --- /dev/null +++ b/pkg/layer/layerops/processor.go @@ -0,0 +1,48 @@ +package layerops + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" +) + +type Processor struct { + RootLayerID id.LayerID + LayerLoader layer.Loader +} + +type UninstallPluginResult struct { + ModifiedLayers layer.List + RemovedLayers *layer.IDList + RemovedProperties []id.PropertyID +} + +func (p Processor) UninstallPlugin(ctx context.Context, pluginID id.PluginID) (res UninstallPluginResult, err error) { + err = p.LayerLoader.Walk(ctx, func(l layer.Layer, parents layer.GroupList) error { + parent := parents.Last() + parentRemoved := parent != nil && res.RemovedLayers.HasLayer(parent.ID()) + + if !parentRemoved { + if pid := l.Plugin(); pid == nil || !pid.Equal(pluginID) { + // delete infobox fields + removedProperties := l.Infobox().RemoveAllByPlugin(pluginID) + if len(removedProperties) > 0 { + res.RemovedProperties = append(res.RemovedProperties, removedProperties...) + res.ModifiedLayers = append(res.ModifiedLayers, &l) + } + return nil + } + + parent.Layers().RemoveLayer(l.ID()) + res.ModifiedLayers = append(res.ModifiedLayers, parent.LayerRef()) + } + + res.RemovedLayers = res.RemovedLayers.AppendLayers(l.ID()) + res.RemovedProperties = append(res.RemovedProperties, l.Properties()...) + res.ModifiedLayers = res.ModifiedLayers.Remove(l.ID()) + return nil + }, []id.LayerID{p.RootLayerID}) + + return +} diff --git a/pkg/layer/layerops/processor_test.go b/pkg/layer/layerops/processor_test.go new file mode 100644 index 000000000..819936433 --- /dev/null +++ b/pkg/layer/layerops/processor_test.go @@ -0,0 +1,39 @@ +package layerops + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/stretchr/testify/assert" +) + +func TestProcessor_UninstallPlugin(t *testing.T) { + sid := id.NewSceneID() + pid := id.MustPluginID("hoge~1.0.0") + pid2 := id.MustPluginID("hoge~1.0.1") + ibf1 := layer.NewInfoboxField().NewID().Plugin(pid).Extension("a").Property(id.NewPropertyID()).MustBuild() + ibf2 := layer.NewInfoboxField().NewID().Plugin(pid2).Extension("a").Property(id.NewPropertyID()).MustBuild() + ib := layer.NewInfobox([]*layer.InfoboxField{ibf1, ibf2}, id.NewPropertyID()) + l1 := layer.NewItem().NewID().Scene(sid).Property(id.NewPropertyID().Ref()).Plugin(&pid).MustBuild() + l2 := layer.NewItem().NewID().Scene(sid).Property(id.NewPropertyID().Ref()).Plugin(&pid2).MustBuild() + l3 := layer.NewItem().NewID().Scene(sid).Property(id.NewPropertyID().Ref()).Plugin(&pid2).Infobox(ib).MustBuild() + l4 := layer.NewGroup().NewID().Scene(sid).Property(id.NewPropertyID().Ref()).Layers(layer.NewIDList([]id.LayerID{l1.ID(), l2.ID()})).MustBuild() + l5 := layer.NewGroup().NewID().Scene(sid).Layers(layer.NewIDList([]id.LayerID{l3.ID(), l4.ID()})).MustBuild() + + res, err := Processor{ + LayerLoader: layer.LoaderFrom([]layer.Layer{l1, l2, l3, l4, l5}), + RootLayerID: l5.ID(), + }.UninstallPlugin(context.TODO(), pid) + + assert.NoError(t, err) + assert.Equal(t, UninstallPluginResult{ + ModifiedLayers: layer.List{l3.LayerRef(), l4.LayerRef()}, + RemovedLayers: layer.NewIDList([]id.LayerID{l1.ID()}), + RemovedProperties: []id.PropertyID{ibf1.Property(), *l1.Property()}, + }, res) + + assert.Equal(t, layer.NewIDList([]id.LayerID{l2.ID()}), l4.Layers()) + assert.Equal(t, []*layer.InfoboxField{ibf2}, ib.Fields()) +} diff --git a/pkg/layer/list.go b/pkg/layer/list.go index f44ab3b31..d6fb1b31f 100644 --- a/pkg/layer/list.go +++ b/pkg/layer/list.go @@ -8,6 +8,13 @@ import ( type List []*Layer +func (ll List) Last() *Layer { + if len(ll) == 0 { + return nil + } + return ll[len(ll)-1] +} + func (ll List) Pick(il *IDList) List { if il == nil { return nil @@ -104,6 +111,32 @@ func (ll List) Map() Map { return m } +func (ll List) Remove(lids ...id.LayerID) List { + if ll == nil { + return nil + } + + res := make(List, 0, len(ll)) + + for _, l := range ll { + if l == nil { + continue + } + hit := false + for _, lid := range lids { + if (*l).ID() == lid { + hit = true + break + } + } + if !hit { + res = append(res, l) + } + } + + return res +} + type ItemList []*Item func (ll ItemList) FindByDataset(ds id.DatasetID) *Item { @@ -125,6 +158,13 @@ func (ll ItemList) ToLayerList() List { return res } +func (ll ItemList) Last() *Item { + if len(ll) == 0 { + return nil + } + return ll[len(ll)-1] +} + type GroupList []*Group func (ll GroupList) ToLayerList() List { @@ -136,6 +176,13 @@ func (ll GroupList) ToLayerList() List { return res } +func (ll GroupList) Last() *Group { + if len(ll) == 0 { + return nil + } + return ll[len(ll)-1] +} + type Map map[id.LayerID]*Layer func MapFrom(l Layer) Map { diff --git a/pkg/layer/list_test.go b/pkg/layer/list_test.go new file mode 100644 index 000000000..398325dc1 --- /dev/null +++ b/pkg/layer/list_test.go @@ -0,0 +1,19 @@ +package layer + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestList_Remove(t *testing.T) { + sid := id.NewSceneID() + l1 := NewItem().NewID().Scene(sid).MustBuild() + l2 := NewItem().NewID().Scene(sid).MustBuild() + l3 := NewItem().NewID().Scene(sid).MustBuild() + assert.Equal(t, List{l2.LayerRef()}, List{l1.LayerRef(), l2.LayerRef()}.Remove(l1.ID(), l3.ID())) + assert.Equal(t, List{l1.LayerRef(), l2.LayerRef()}, List{l1.LayerRef(), l2.LayerRef()}.Remove()) + assert.Equal(t, List(nil), List(nil).Remove(l1.ID())) + assert.Equal(t, List{}, List{}.Remove(l1.ID())) +} diff --git a/pkg/layer/loader.go b/pkg/layer/loader.go index afa5b2c04..0cce4aa7b 100644 --- a/pkg/layer/loader.go +++ b/pkg/layer/loader.go @@ -2,6 +2,7 @@ package layer import ( "context" + "errors" "github.com/reearth/reearth-backend/pkg/id" ) @@ -9,6 +10,8 @@ import ( type Loader func(context.Context, ...id.LayerID) (List, error) type LoaderByScene func(context.Context, id.SceneID) (List, error) +var WalkerSkipChildren = errors.New("LAYER_WALKER_SKIP_CHILDREN") + func LoaderFrom(data []Layer) Loader { return func(ctx context.Context, ids ...id.LayerID) (List, error) { res := make([]*Layer, 0, len(ids)) @@ -42,3 +45,27 @@ func LoaderFromMap(data map[id.LayerID]Layer) Loader { return res, nil } } + +func (l Loader) Walk(ctx context.Context, walker func(Layer, GroupList) error, init []id.LayerID) error { + var walk func(ids []id.LayerID, parents GroupList) error + walk = func(ids []id.LayerID, parents GroupList) error { + loaded, err := l(ctx, ids...) + if err != nil { + return err + } + for _, l := range loaded.Deref() { + if err := walker(l, parents); err == WalkerSkipChildren { + continue + } else if err != nil { + return err + } + if lg := ToLayerGroup(l); lg != nil && lg.Layers().LayerCount() > 0 { + if err := walk(lg.Layers().Layers(), append(parents, lg)); err != nil { + return err + } + } + } + return nil + } + return walk(init, nil) +} diff --git a/pkg/layer/loader_test.go b/pkg/layer/loader_test.go new file mode 100644 index 000000000..e51b95596 --- /dev/null +++ b/pkg/layer/loader_test.go @@ -0,0 +1,80 @@ +package layer + +import ( + "context" + "errors" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestLoader_Walk(t *testing.T) { + sid := id.NewSceneID() + l1 := NewItem().NewID().Scene(sid).MustBuild() + l2 := NewItem().NewID().Scene(sid).MustBuild() + l3 := NewItem().NewID().Scene(sid).MustBuild() + l4 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]id.LayerID{l1.ID(), l2.ID()})).MustBuild() + l5 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]id.LayerID{l3.ID(), l4.ID()})).MustBuild() + w := LoaderFrom([]Layer{l1, l2, l3, l4, l5}) + + layers := []Layer{} + parents := []GroupList{} + err := w.Walk(context.TODO(), func(l Layer, p GroupList) error { + layers = append(layers, l) + parents = append(parents, p) + return nil + }, []id.LayerID{l5.ID()}) + + assert.NoError(t, err) + assert.Equal(t, []Layer{l5, l3, l4, l1, l2}, layers) + assert.Equal(t, []GroupList{nil, {l5}, {l5}, {l5, l4}, {l5, l4}}, parents) +} + +func TestLoader_Walk2(t *testing.T) { + sid := id.NewSceneID() + l1 := NewItem().NewID().Scene(sid).MustBuild() + l2 := NewItem().NewID().Scene(sid).MustBuild() + l3 := NewItem().NewID().Scene(sid).MustBuild() + l4 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]id.LayerID{l1.ID(), l2.ID()})).MustBuild() + l5 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]id.LayerID{l3.ID(), l4.ID()})).MustBuild() + w := LoaderFrom([]Layer{l1, l2, l3, l4, l5}) + + layers := []Layer{} + parents := []GroupList{} + err := w.Walk(context.TODO(), func(l Layer, p GroupList) error { + layers = append(layers, l) + parents = append(parents, p) + return WalkerSkipChildren + }, []id.LayerID{l5.ID()}) + + assert.NoError(t, err) + assert.Equal(t, []Layer{l5}, layers) + assert.Equal(t, []GroupList{nil}, parents) +} + +func TestLoader_Walk3(t *testing.T) { + sid := id.NewSceneID() + l1 := NewItem().NewID().Scene(sid).MustBuild() + l2 := NewItem().NewID().Scene(sid).MustBuild() + l3 := NewItem().NewID().Scene(sid).MustBuild() + l4 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]id.LayerID{l1.ID(), l2.ID()})).MustBuild() + l5 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]id.LayerID{l3.ID(), l4.ID()})).MustBuild() + w := LoaderFrom([]Layer{l1, l2, l3, l4, l5}) + + err := errors.New("Error") + layers := []Layer{} + parents := []GroupList{} + err2 := w.Walk(context.TODO(), func(l Layer, p GroupList) error { + layers = append(layers, l) + parents = append(parents, p) + if l == l4 { + return err + } + return nil + }, []id.LayerID{l5.ID()}) + + assert.Same(t, err, err2) + assert.Equal(t, []Layer{l5, l3, l4}, layers) + assert.Equal(t, []GroupList{nil, {l5}, {l5}}, parents) +} diff --git a/pkg/layer/merged_test.go b/pkg/layer/merged_test.go index 2c8b18f34..8676b29e4 100644 --- a/pkg/layer/merged_test.go +++ b/pkg/layer/merged_test.go @@ -11,7 +11,7 @@ import ( func TestMerge(t *testing.T) { scene := id.NewSceneID() dataset1 := id.NewDatasetID() - p := id.MustPluginID("xxx#1.1.1") + p := id.MustPluginID("xxx~1.1.1") e := id.PluginExtensionID("foo") itemProperty := id.NewPropertyID() diff --git a/pkg/layer/merging/merger_test.go b/pkg/layer/merging/merger_test.go index 58606d0d9..edbf1a03c 100644 --- a/pkg/layer/merging/merger_test.go +++ b/pkg/layer/merging/merger_test.go @@ -14,8 +14,8 @@ func TestMergeLayer(t *testing.T) { // ids scene := id.NewSceneID() dataset1 := id.NewDatasetID() - ps := id.MustPropertySchemaID("xxx#1.1.1/aa") - p := id.MustPluginID("xxx#1.1.1") + ps := id.MustPropertySchemaID("xxx~1.1.1/aa") + p := id.MustPluginID("xxx~1.1.1") e := id.PluginExtensionID("foo") itemProperty := id.NewPropertyID() groupProperty := id.NewPropertyID() diff --git a/pkg/plugin/builder_test.go b/pkg/plugin/builder_test.go index 81b17ee76..e4b5e94b8 100644 --- a/pkg/plugin/builder_test.go +++ b/pkg/plugin/builder_test.go @@ -11,8 +11,8 @@ import ( func TestBuilder_ID(t *testing.T) { var b = New() - res := b.ID(id.MustPluginID("aaa#1.1.1")).MustBuild() - assert.Equal(t, id.MustPluginID("aaa#1.1.1"), res.ID()) + res := b.ID(id.MustPluginID("aaa~1.1.1")).MustBuild() + assert.Equal(t, id.MustPluginID("aaa~1.1.1"), res.ID()) } func TestBuilder_Name(t *testing.T) { @@ -45,8 +45,8 @@ func TestBuilder_Schema(t *testing.T) { }, { name: "build schema", - sid: id.MustPropertySchemaID("hoge#0.1.0/fff").Ref(), - expected: id.MustPropertySchemaID("hoge#0.1.0/fff").Ref(), + sid: id.MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + expected: id.MustPropertySchemaID("hoge~0.1.0/fff").Ref(), }, } for _, tc := range testCases { @@ -86,7 +86,7 @@ func TestBuilder_Build(t *testing.T) { err error // skip for now as error is always nil }{ { - id: id.MustPluginID("hoge#0.1.0"), + id: id.MustPluginID("hoge~0.1.0"), name: "success build new plugin", author: "aaa", repositoryURL: "uuu", @@ -96,9 +96,9 @@ func TestBuilder_Build(t *testing.T) { NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild(), }, - schema: id.MustPropertySchemaID("hoge#0.1.0/fff").Ref(), + schema: id.MustPropertySchemaID("hoge~0.1.0/fff").Ref(), expected: &Plugin{ - id: id.MustPluginID("hoge#0.1.0"), + id: id.MustPluginID("hoge~0.1.0"), name: i18n.StringFrom("nnn"), author: "aaa", description: i18n.StringFrom("ddd"), @@ -108,7 +108,7 @@ func TestBuilder_Build(t *testing.T) { id.PluginExtensionID("yyy"): NewExtension().ID("yyy").MustBuild(), }, extensionOrder: []id.PluginExtensionID{id.PluginExtensionID("xxx"), id.PluginExtensionID("yyy")}, - schema: id.MustPropertySchemaID("hoge#0.1.0/fff").Ref(), + schema: id.MustPropertySchemaID("hoge~0.1.0/fff").Ref(), }, }, } @@ -144,7 +144,7 @@ func TestBuilder_MustBuild(t *testing.T) { expected *Plugin }{ { - id: id.MustPluginID("hoge#0.1.0"), + id: id.MustPluginID("hoge~0.1.0"), name: "success build new plugin", author: "aaa", repositoryURL: "uuu", @@ -154,9 +154,9 @@ func TestBuilder_MustBuild(t *testing.T) { NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild(), }, - schema: id.MustPropertySchemaID("hoge#0.1.0/fff").Ref(), + schema: id.MustPropertySchemaID("hoge~0.1.0/fff").Ref(), expected: &Plugin{ - id: id.MustPluginID("hoge#0.1.0"), + id: id.MustPluginID("hoge~0.1.0"), name: i18n.StringFrom("nnn"), author: "aaa", description: i18n.StringFrom("ddd"), @@ -166,7 +166,7 @@ func TestBuilder_MustBuild(t *testing.T) { id.PluginExtensionID("yyy"): NewExtension().ID("yyy").MustBuild(), }, extensionOrder: []id.PluginExtensionID{id.PluginExtensionID("xxx"), id.PluginExtensionID("yyy")}, - schema: id.MustPropertySchemaID("hoge#0.1.0/fff").Ref(), + schema: id.MustPropertySchemaID("hoge~0.1.0/fff").Ref(), }, }, } diff --git a/pkg/plugin/extension_builder_test.go b/pkg/plugin/extension_builder_test.go index 86d0d43fb..98eef0d8b 100644 --- a/pkg/plugin/extension_builder_test.go +++ b/pkg/plugin/extension_builder_test.go @@ -42,8 +42,8 @@ func TestExtensionBuilder_Icon(t *testing.T) { func TestExtensionBuilder_Schema(t *testing.T) { var b = NewExtension() - res := b.ID("xxx").Schema(id.MustPropertySchemaID("hoge#0.1.0/fff")).MustBuild() - assert.Equal(t, id.MustPropertySchemaID("hoge#0.1.0/fff"), res.Schema()) + res := b.ID("xxx").Schema(id.MustPropertySchemaID("hoge~0.1.0/fff")).MustBuild() + assert.Equal(t, id.MustPropertySchemaID("hoge~0.1.0/fff"), res.Schema()) } func TestExtensionBuilder_Visualizer(t *testing.T) { @@ -73,7 +73,7 @@ func TestExtensionBuilder_Build(t *testing.T) { system: false, ename: i18n.StringFrom("nnn"), description: i18n.StringFrom("ddd"), - schema: id.MustPropertySchemaID("foo#1.1.1/hhh"), + schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", expected: &Extension{ id: "xxx", @@ -81,7 +81,7 @@ func TestExtensionBuilder_Build(t *testing.T) { name: i18n.StringFrom("nnn"), description: i18n.StringFrom("ddd"), icon: "ttt", - schema: id.MustPropertySchemaID("foo#1.1.1/hhh"), + schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", }, err: nil, @@ -145,7 +145,7 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { system: false, ename: i18n.StringFrom("nnn"), description: i18n.StringFrom("ddd"), - schema: id.MustPropertySchemaID("foo#1.1.1/hhh"), + schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", expected: &Extension{ id: "xxx", @@ -153,7 +153,7 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { name: i18n.StringFrom("nnn"), description: i18n.StringFrom("ddd"), icon: "ttt", - schema: id.MustPropertySchemaID("foo#1.1.1/hhh"), + schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", }, }, diff --git a/pkg/plugin/extension_test.go b/pkg/plugin/extension_test.go index 9e0410fe1..6758f3041 100644 --- a/pkg/plugin/extension_test.go +++ b/pkg/plugin/extension_test.go @@ -24,7 +24,7 @@ func TestExtension(t *testing.T) { Name: i18n.StringFrom("aaa"), Description: i18n.StringFrom("ddd"), Icon: "test", - Schema: id.MustPropertySchemaID("hoge#0.1.0/fff"), + Schema: id.MustPropertySchemaID("hoge~0.1.0/fff"), Visualizer: "vvv", } @@ -32,7 +32,7 @@ func TestExtension(t *testing.T) { ID("xxx"). Name(i18n.StringFrom("aaa")). Description(i18n.StringFrom("ddd")). - Schema(id.MustPropertySchemaID("hoge#0.1.0/fff")). + Schema(id.MustPropertySchemaID("hoge~0.1.0/fff")). Icon("test"). Visualizer("vvv"). Type(ExtensionTypePrimitive). diff --git a/pkg/plugin/loader.go b/pkg/plugin/loader.go index d93ba8e42..add41da5b 100644 --- a/pkg/plugin/loader.go +++ b/pkg/plugin/loader.go @@ -6,4 +6,4 @@ import ( "github.com/reearth/reearth-backend/pkg/id" ) -type Loader func(context.Context, ...id.PluginID) ([]*Plugin, error) +type Loader func(context.Context, []id.PluginID, []id.SceneID) ([]*Plugin, error) diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go index 14c89e9a5..532f10f4c 100644 --- a/pkg/plugin/manifest/convert.go +++ b/pkg/plugin/manifest/convert.go @@ -10,13 +10,13 @@ import ( "github.com/reearth/reearth-backend/pkg/visualizer" ) -func (i *Root) manifest() (*Manifest, error) { +func (i *Root) manifest(sid *id.SceneID) (*Manifest, error) { var pid id.PluginID var err error if i.System && string(i.ID) == id.OfficialPluginID.Name() { pid = id.OfficialPluginID } else { - pid, err = id.PluginIDFrom(string(i.ID) + "#" + i.Version) + pid, err = id.NewPluginID(string(i.ID), i.Version, sid) if err != nil { return nil, ErrInvalidManifest } @@ -31,8 +31,13 @@ func (i *Root) manifest() (*Manifest, error) { pluginSchema = schema } - extensions := make([]*plugin.Extension, 0, len(i.Extensions)) - extensionSchemas := make([]*property.Schema, 0, len(i.Extensions)) + var extensions []*plugin.Extension + var extensionSchemas []*property.Schema + if l := len(i.Extensions); l > 0 { + extensions = make([]*plugin.Extension, 0, l) + extensionSchemas = make([]*property.Schema, 0, l) + } + for _, e := range i.Extensions { extension, extensionSchema, err2 := e.extension(pid, i.System) if err2 != nil { diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index 2e45745e5..3c391c876 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -142,7 +142,7 @@ func TestManifest(t *testing.T) { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - m, err := tc.root.manifest() + m, err := tc.root.manifest(nil) if err == nil { assert.Equal(tt, tc.expected.Plugin.ID(), m.Plugin.ID()) assert.Equal(tt, tc.expected.Plugin.Name(), m.Plugin.Name()) @@ -446,7 +446,7 @@ func TestSchema(t *testing.T) { Linkable: nil, Version: 0, }, - pid: id.MustPluginID("aaa#1.1.1"), + pid: id.MustPluginID("aaa~1.1.1"), expected: nil, err: id.ErrInvalidID, }, diff --git a/pkg/plugin/manifest/manifest.go b/pkg/plugin/manifest/manifest.go index 29b4db008..32f5ab719 100644 --- a/pkg/plugin/manifest/manifest.go +++ b/pkg/plugin/manifest/manifest.go @@ -10,3 +10,10 @@ type Manifest struct { ExtensionSchema []*property.Schema Schema *property.Schema } + +func (m Manifest) PropertySchemas() []*property.Schema { + if m.Schema == nil { + return append([]*property.Schema{}, m.ExtensionSchema...) + } + return append(m.ExtensionSchema, m.Schema) +} diff --git a/pkg/plugin/manifest/parser.go b/pkg/plugin/manifest/parser.go index eb3b1802d..8f059bb76 100644 --- a/pkg/plugin/manifest/parser.go +++ b/pkg/plugin/manifest/parser.go @@ -7,6 +7,7 @@ import ( "io" "github.com/goccy/go-yaml" + "github.com/reearth/reearth-backend/pkg/id" ) var ( @@ -15,14 +16,14 @@ var ( ErrSystemManifest = errors.New("cannot build system manifest") ) -func Parse(source io.Reader) (*Manifest, error) { +func Parse(source io.Reader, scene *id.SceneID) (*Manifest, error) { root := Root{} if err := yaml.NewDecoder(source).Decode(&root); err != nil { return nil, ErrFailedToParseManifest // return nil, fmt.Errorf("failed to parse plugin manifest: %w", err) } - manifest, err := root.manifest() + manifest, err := root.manifest(scene) if err != nil { return nil, err } @@ -33,14 +34,14 @@ func Parse(source io.Reader) (*Manifest, error) { return manifest, nil } -func ParseSystemFromBytes(source []byte) (*Manifest, error) { +func ParseSystemFromBytes(source []byte, scene *id.SceneID) (*Manifest, error) { root := Root{} if err := yaml.Unmarshal(source, &root); err != nil { return nil, ErrFailedToParseManifest // return nil, fmt.Errorf("failed to parse plugin manifest: %w", err) } - manifest, err := root.manifest() + manifest, err := root.manifest(scene) if err != nil { return nil, err } @@ -48,8 +49,8 @@ func ParseSystemFromBytes(source []byte) (*Manifest, error) { return manifest, nil } -func MustParseSystemFromBytes(source []byte) *Manifest { - m, err := ParseSystemFromBytes(source) +func MustParseSystemFromBytes(source []byte, scene *id.SceneID) *Manifest { + m, err := ParseSystemFromBytes(source, scene) if err != nil { panic(err) } diff --git a/pkg/plugin/manifest/parser_test.go b/pkg/plugin/manifest/parser_test.go index 798dd63c4..6102ef310 100644 --- a/pkg/plugin/manifest/parser_test.go +++ b/pkg/plugin/manifest/parser_test.go @@ -16,25 +16,23 @@ import ( //go:embed testdata/minimum.yml var minimum string var minimumExpected = &Manifest{ - Plugin: plugin.New().ID(id.MustPluginID("aaa#1.1.1")).MustBuild(), - ExtensionSchema: []*property.Schema{}, - Schema: nil, + Plugin: plugin.New().ID(id.MustPluginID("aaa~1.1.1")).MustBuild(), } //go:embed testdata/test.yml var normal string var normalExpected = &Manifest{ - Plugin: plugin.New().ID(id.MustPluginID("aaa#1.1.1")).Name(i18n.StringFrom("bbb")).Extensions([]*plugin.Extension{ + Plugin: plugin.New().ID(id.MustPluginID("aaa~1.1.1")).Name(i18n.StringFrom("bbb")).Extensions([]*plugin.Extension{ plugin.NewExtension().ID(id.PluginExtensionID("hoge")). Visualizer(visualizer.VisualizerCesium). Type(plugin.ExtensionTypePrimitive). - Schema(id.MustPropertySchemaID("aaa#1.1.1/hoge")). + Schema(id.MustPropertySchemaID("aaa~1.1.1/hoge")). MustBuild(), }).MustBuild(), ExtensionSchema: []*property.Schema{ - property.NewSchema().ID(id.MustPropertySchemaID("aaa#1.1.1/hoge")).Groups([]*property.SchemaGroup{ + property.NewSchema().ID(id.MustPropertySchemaID("aaa~1.1.1/hoge")).Groups([]*property.SchemaGroup{ property.NewSchemaGroup().ID(id.PropertySchemaFieldID("default")). - Schema(id.MustPropertySchemaID("aaa#1.1.1/hoge")). + Schema(id.MustPropertySchemaID("aaa~1.1.1/hoge")). RepresentativeField(id.PropertySchemaFieldID("a").Ref()). Fields([]*property.SchemaField{ property.NewSchemaField().ID(id.PropertySchemaFieldID("a")). @@ -51,7 +49,6 @@ var normalExpected = &Manifest{ }).MustBuild(), }).MustBuild(), }, - Schema: nil, } func TestParse(t *testing.T) { @@ -96,7 +93,7 @@ func TestParse(t *testing.T) { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - m, err := Parse(strings.NewReader(tc.input)) + m, err := Parse(strings.NewReader(tc.input), nil) if tc.err == nil { if !assert.NoError(tt, err) { return @@ -140,7 +137,7 @@ func TestParseSystemFromBytes(t *testing.T) { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - m, err := ParseSystemFromBytes([]byte(tc.input)) + m, err := ParseSystemFromBytes([]byte(tc.input), nil) if tc.err == nil { if !assert.NoError(tt, err) { return @@ -186,12 +183,12 @@ func TestMustParseSystemFromBytes(t *testing.T) { if tc.err != nil { assert.PanicsWithError(tt, tc.err.Error(), func() { - _ = MustParseSystemFromBytes([]byte(tc.input)) + _ = MustParseSystemFromBytes([]byte(tc.input), nil) }) return } - m := MustParseSystemFromBytes([]byte(tc.input)) + m := MustParseSystemFromBytes([]byte(tc.input), nil) assert.Equal(tt, m, tc.expected) }) } diff --git a/pkg/plugin/manifest/parser_translation_test.go b/pkg/plugin/manifest/parser_translation_test.go index 2b1e26979..b1eb966d7 100644 --- a/pkg/plugin/manifest/parser_translation_test.go +++ b/pkg/plugin/manifest/parser_translation_test.go @@ -168,7 +168,7 @@ func TestMergeManifestTranslation(t *testing.T) { { Name: "nil translition list", Translations: map[string]*TranslationRoot{"xx": MustParseTranslationFromBytes([]byte(translatedManifest))}, - Manifest: MustParseSystemFromBytes([]byte(mergeManifest)), + Manifest: MustParseSystemFromBytes([]byte(mergeManifest), nil), Expected: &struct{ PluginName, PluginDesc, ExtName, PsTitle, FieldTitle, FieldDesc i18n.String }{ PluginName: i18n.String{"en": "aaa", "xx": "test plugin name"}, PluginDesc: i18n.String{"en": "ddd", "xx": "test plugin desc"}, diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go index 9bb3fc28d..12d529f0e 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/plugin.go @@ -74,10 +74,23 @@ func (p *Plugin) Schema() *id.PropertySchemaID { return p.schema } -// Rename _ +func (p *Plugin) PropertySchemas() []id.PropertySchemaID { + if p == nil { + return nil + } + + ps := make([]id.PropertySchemaID, 0, len(p.extensions)+1) + if p.schema != nil { + ps = append(ps, *p.schema) + } + for _, e := range p.extensionOrder { + ps = append(ps, p.extensions[e].Schema()) + } + return ps +} + func (p *Plugin) Rename(name i18n.String) { p.name = name.Copy() - } // SetDescription _ diff --git a/pkg/plugin/plugin_test.go b/pkg/plugin/plugin_test.go index 8285e989f..f9e2fc976 100644 --- a/pkg/plugin/plugin_test.go +++ b/pkg/plugin/plugin_test.go @@ -37,6 +37,41 @@ func TestPlugin_Extension(t *testing.T) { } } +func TestPlugin_PropertySchemas(t *testing.T) { + ps1 := id.MustPropertySchemaID("hoge~0.1.0/a") + ps2 := id.MustPropertySchemaID("hoge~0.1.0/b") + ps3 := id.MustPropertySchemaID("hoge~0.1.0/c") + + testCases := []struct { + name string + plugin *Plugin + expected []id.PropertySchemaID + }{ + { + name: "normal", + plugin: New().Schema(&ps1).Extensions([]*Extension{NewExtension().ID("xxx").Schema(ps2).MustBuild(), NewExtension().ID("yyy").Schema(ps3).MustBuild()}).MustBuild(), + expected: []id.PropertySchemaID{ps1, ps2, ps3}, + }, + { + name: "no plugin property schema", + plugin: New().Extensions([]*Extension{NewExtension().ID("xxx").Schema(ps2).MustBuild(), NewExtension().ID("yyy").Schema(ps3).MustBuild()}).MustBuild(), + expected: []id.PropertySchemaID{ps2, ps3}, + }, + { + name: "nil", + plugin: nil, + expected: []id.PropertySchemaID(nil), + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.plugin.PropertySchemas()) + }) + } +} + func TestPlugin_Rename(t *testing.T) { p := New().Name(i18n.StringFrom("x")).MustBuild() p.Rename(i18n.StringFrom("z")) @@ -55,5 +90,5 @@ func TestPlugin_Author(t *testing.T) { } func TestPlugin_ID(t *testing.T) { - assert.Equal(t, New().ID(id.MustPluginID("xxx#1.1.1")).MustBuild().ID(), id.MustPluginID("xxx#1.1.1")) + assert.Equal(t, New().ID(id.MustPluginID("xxx~1.1.1")).MustBuild().ID(), id.MustPluginID("xxx~1.1.1")) } diff --git a/pkg/plugin/pluginpack/package.go b/pkg/plugin/pluginpack/package.go new file mode 100644 index 000000000..2d5c385e3 --- /dev/null +++ b/pkg/plugin/pluginpack/package.go @@ -0,0 +1,57 @@ +package pluginpack + +import ( + "archive/zip" + "bytes" + "io" + "path" + "path/filepath" + + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" +) + +const manfiestFilePath = "reearth.yml" + +type Package struct { + Manifest *manifest.Manifest + Files file.Iterator +} + +func PackageFromZip(r io.Reader, scene *id.SceneID, sizeLimit int64) (*Package, error) { + b, err := io.ReadAll(io.LimitReader(r, sizeLimit)) + if err != nil { + return nil, err + } + + zr, err := zip.NewReader(bytes.NewReader(b), int64(len(b))) + if err != nil { + return nil, err + } + + basePath := file.ZipBasePath(zr) + f, err := zr.Open(path.Join(basePath, manfiestFilePath)) + if err != nil { + return nil, err + } + defer func() { + _ = f.Close() + }() + + m, err := manifest.Parse(f, scene) + if err != nil { + return nil, err + } + + return &Package{ + Manifest: m, + Files: iterator(file.NewZipReader(zr), basePath), + }, nil +} + +func iterator(a file.Iterator, prefix string) file.Iterator { + return file.NewFilteredIterator(file.NewPrefixIterator(a, prefix), func(p string) bool { + return p == manfiestFilePath || filepath.Ext(p) != ".js" + }) +} diff --git a/pkg/plugin/pluginpack/package_test.go b/pkg/plugin/pluginpack/package_test.go new file mode 100644 index 000000000..09ff0ebad --- /dev/null +++ b/pkg/plugin/pluginpack/package_test.go @@ -0,0 +1,54 @@ +package pluginpack + +import ( + "archive/zip" + "os" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/stretchr/testify/assert" +) + +func TestPackageFromZip(t *testing.T) { + f, err := os.Open("testdata/test.zip") + assert.NoError(t, err) + defer func() { + _ = f.Close() + }() + + expected := plugin.New(). + ID(id.MustPluginID("testplugin~1.0.1")). + Name(i18n.StringFrom("testplugin")). + MustBuild() + + p, err := PackageFromZip(f, nil, 1000) + assert.NoError(t, err) + assert.Equal(t, &manifest.Manifest{ + Plugin: expected, + }, p.Manifest) + + var files []string + for { + n, err := p.Files.Next() + assert.NoError(t, err) + if n == nil { + break + } + files = append(files, n.Path) + } + assert.Equal(t, []string{"index.js"}, files) +} + +func TestPackageFromZip2(t *testing.T) { + f, err := os.Open("testdata/test.zip") + assert.NoError(t, err) + defer func() { + _ = f.Close() + }() + + _, err = PackageFromZip(f, nil, 100) + assert.ErrorIs(t, err, zip.ErrFormat) +} diff --git a/pkg/plugin/pluginpack/testdata/test.zip b/pkg/plugin/pluginpack/testdata/test.zip new file mode 100644 index 0000000000000000000000000000000000000000..cf317e25a09c1842ec67cd377cc6a19641c8d7dd GIT binary patch literal 791 zcmWIWW@Zs#00BwO|3P2|lwf60U?@o~F3}GS;bmYCieX6v;doiD#L^0G21b^zj0_B5 ztpQ*oI2brkj9>*C!vixSEk9o`DX|D{QUcHvEGB7t>gqjYVhHeN=Qw7%)0!P<1jrcd z7RUf)IDi&p=B1=o=w%fXGTi&zdF^xOJ+GcV>3==ol*ieVC&M+)>!0!rytL&R#LhW8 zna|4sO$C{Zu=6WYxOf0%xPf*SrKTnpm1O8u=H`H{eHY7;2*MLFLud7~pn!l=K7pHp z0$T+d;U!DCC1KaOT|RfpFM9}!t<5q3yF{*%-RVKfUT0Y?Iu9$ zKvo8LGct)VDO~XhRNfP_)6ol168kkwiu!%&{<&kwX;}Uofzw uaS4z~Z0sSM3W^cr-~q)53@mAsU}PXAegeE%*+80EfG`HAbtw=tFaQ8&$D-i? literal 0 HcmV?d00001 diff --git a/pkg/plugin/repourl/repos.go b/pkg/plugin/repourl/repos.go new file mode 100644 index 000000000..c13af9413 --- /dev/null +++ b/pkg/plugin/repourl/repos.go @@ -0,0 +1,76 @@ +package repourl + +import ( + "fmt" + "net/url" + "strings" +) + +var repos = map[string]func(s string) *URL{ + "github.com": github, +} + +var reposArchive = map[string]func(u *URL) *url.URL{ + "github.com": githubArchive, +} + +func github(p string) *URL { + s := strings.SplitN(p, "/", 3) + if len(s) < 2 { + return nil + } + + ref := "" + if len(s) == 3 { + s2 := strings.Split(s[2], "/") + if len(s2) == 1 { + ref = "heads/" + s2[0] + } + // tree/* + if len(s2) >= 2 && s2[0] == "tree" { + // unknown whether it is a branch name or a tag name + ref = "heads/" + s2[1] + } + // releases/tag/* + if len(s2) >= 3 && s2[0] == "release" && s2[1] == "tag" { + ref = "tags/" + s2[2] + } + // archive/*.zip + if len(s2) == 2 && s2[0] == "archive" { + ref = fileNameWithoutExtension(s2[1]) + } + // archive/refs/*/*.zip + if len(s2) == 4 && s2[0] == "archive" && s2[1] == "refs" { + ref = s2[2] + "/" + fileNameWithoutExtension(s2[3]) + } + } + + return &URL{ + Host: "github.com", + Owner: s[0], + Repo: strings.TrimSuffix(s[1], ".git"), + Ref: ref, + } +} + +func githubArchive(u *URL) *url.URL { + r := u.Ref + if r == "" { + r = "refs/heads/main" + } else if c := u.Commit(); c == "" { + r = "refs/" + r + } + + return &url.URL{ + Scheme: "https", + Host: "github.com", + Path: fmt.Sprintf("%s/%s/archive/%s.zip", u.Owner, u.Repo, r), + } +} + +func fileNameWithoutExtension(fileName string) string { + if pos := strings.LastIndexByte(fileName, '.'); pos != -1 { + return fileName[:pos] + } + return fileName +} diff --git a/pkg/plugin/repourl/repourl.go b/pkg/plugin/repourl/repourl.go new file mode 100644 index 000000000..31c35b36b --- /dev/null +++ b/pkg/plugin/repourl/repourl.go @@ -0,0 +1,136 @@ +package repourl + +import ( + "errors" + "net/url" + "strings" +) + +// URL is a URL of specific Git repository on well-known hosting services. +type URL struct { + Host string + Owner string + Repo string + // Ref represents ref of Git. There are 3 patterns: commit hash, "heads/BRANCH", and "tags/TAG". + Ref string +} + +var ( + ErrInvalidURL = errors.New("invalid repository url") + ErrUnsupportedHost = errors.New("unsupported host") +) + +func New(u *url.URL) (*URL, error) { + if u == nil { + return nil, nil + } + + h := u.Host + p := strings.TrimPrefix(u.Path, "/") + if u.Scheme == "" { + // github.com/aaa/bbb + s := strings.SplitN(u.Path, "/", 2) + if len(p) < 2 { + return nil, ErrInvalidURL + } + h = s[0] + p = s[1] + } else if u.Scheme != "http" && u.Scheme != "https" { + return nil, ErrInvalidURL + } + + var r *URL + if f := repos[h]; f != nil { + r = f(p) + } else { + return nil, ErrUnsupportedHost + } + + if r == nil { + return nil, ErrInvalidURL + } + return r, nil +} + +func Must(u *url.URL) *URL { + u2, err := New(u) + if err != nil { + panic(err) + } + return u2 +} + +func Parse(s string) (*URL, error) { + u, err := url.Parse(s) + if err != nil { + return nil, ErrInvalidURL + } + return New(u) +} + +func MustParse(s string) *URL { + u, err := Parse(s) + if err != nil { + panic(err) + } + return u +} + +func (u *URL) String() string { + if u == nil || u.Host == "" || u.Owner == "" || u.Repo == "" { + return "" + } + sb := strings.Builder{} + sb.WriteString(u.Host) + sb.WriteRune('/') + sb.WriteString(u.Owner) + sb.WriteRune('/') + sb.WriteString(u.Repo) + if u.Ref != "" { + sb.WriteRune('/') + sb.WriteString(u.Ref) + } + return sb.String() +} + +func (u *URL) Head() string { + if u == nil || u.Ref == "" { + return "" + } + h := strings.TrimPrefix(u.Ref, "heads/") + if len(h) == len(u.Ref) { + return "" // ref is not a head + } + return h +} + +func (u *URL) Tag() string { + if u == nil || u.Ref == "" { + return "" + } + h := strings.TrimPrefix(u.Ref, "tags/") + if len(h) == len(u.Ref) { + return "" // ref is not a tag + } + return h +} + +func (u *URL) Commit() string { + if u == nil || u.Ref == "" || strings.Contains(u.Ref, "/") { + return "" + } + return u.Ref +} + +func (u *URL) ArchiveURL() *url.URL { + if u == nil { + return nil + } + + f := reposArchive[u.Host] + if f == nil { + return nil + } + + return f(u) +} diff --git a/pkg/plugin/repourl/repourl_test.go b/pkg/plugin/repourl/repourl_test.go new file mode 100644 index 000000000..1ebb2ab87 --- /dev/null +++ b/pkg/plugin/repourl/repourl_test.go @@ -0,0 +1,303 @@ +package repourl + +import ( + "net/url" + "testing" + + "github.com/stretchr/testify/assert" +) + +var cases = []struct { + Name string + Input string + Expected *URL + Err error +}{ + { + Name: "github.com/aaaa/bbbb", + Input: "https://github.com/aaaa/bbbb", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + }, + }, + { + Name: "github.com/aaaa/bbbb.git", + Input: "https://github.com/aaaa/bbbb.git", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + }, + }, + { + Name: "github.com/aaaa/bbbb/tree/cccc", + Input: "https://github.com/aaaa/bbbb/tree/cccc", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + Ref: "heads/cccc", + }, + }, + { + Name: "github.com/aaaa/bbbb/tree/cccc/dddd", + Input: "https://github.com/aaaa/bbbb/tree/cccc/dddd", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + Ref: "heads/cccc", + }, + }, + { + Name: "github.com/aaaa/bbbb/archive/cccc.zip", + Input: "https://github.com/aaaa/bbbb/archive/cccc.zip", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + Ref: "cccc", + }, + }, + { + Name: "github.com/aaaa/bbbb/archive/refs/heads/cccc.zip", + Input: "https://github.com/aaaa/bbbb/archive/refs/heads/cccc.zip", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + Ref: "heads/cccc", + }, + }, + { + Name: "github.com/aaaa/bbbb/archive/refs/tags/cccc.zip", + Input: "https://github.com/aaaa/bbbb/archive/refs/tags/cccc.zip", + Expected: &URL{ + Host: "github.com", + Owner: "aaaa", + Repo: "bbbb", + Ref: "tags/cccc", + }, + }, + { + Name: "nil", + Input: "", + Err: ErrInvalidURL, + }, + { + Name: "cannot parsed URL", + Input: "", + Err: ErrInvalidURL, + }, + { + Name: "invalid URL", + Input: "https://github.com/bbb", + Err: ErrInvalidURL, + }, + { + Name: "unsupported host", + Input: "https://aaaa.com/xxx", + Err: ErrUnsupportedHost, + }, +} + +func TestNew(t *testing.T) { + // nil + u, err := New(nil) + assert.NoError(t, err) + assert.Nil(t, u) + + for _, tc := range cases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + ur, _ := url.Parse(tc.Input) + u, err := New(ur) + if tc.Err != nil { + assert.ErrorIs(tt, err, tc.Err) + } else { + assert.NoError(tt, err) + assert.Equal(tt, tc.Expected, u) + } + }) + } +} + +func TestMust(t *testing.T) { + for _, tc := range cases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + ur, _ := url.Parse(tc.Input) + if tc.Err != nil { + assert.PanicsWithError(tt, tc.Err.Error(), func() { + _ = Must(ur) + }) + } else { + assert.Equal(tt, tc.Expected, Must(ur)) + } + }) + } +} + +func TestParse(t *testing.T) { + for _, tc := range cases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + u, err := Parse(tc.Input) + if tc.Err != nil { + assert.ErrorIs(tt, err, tc.Err) + } else { + assert.NoError(tt, err) + assert.Equal(tt, tc.Expected, u) + } + }) + } +} + +func TestMustParse(t *testing.T) { + for _, tc := range cases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + if tc.Err != nil { + assert.PanicsWithError(tt, tc.Err.Error(), func() { + _ = MustParse(tc.Input) + }) + } else { + assert.Equal(tt, tc.Expected, MustParse(tc.Input)) + } + }) + } +} + +func TestURL_String(t *testing.T) { + assert.Equal(t, "", (*URL)(nil).String()) + assert.Equal(t, "", (&URL{}).String()) + assert.Equal(t, "", (&URL{Host: "github.com"}).String()) + assert.Equal(t, "", (&URL{Host: "github.com", Owner: "aaa"}).String()) + assert.Equal(t, "", (&URL{Host: "github.com", Repo: "bbb"}).String()) + assert.Equal(t, "github.com/aaa/bbb", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + }).String()) + assert.Equal(t, "github.com/aaa/bbb/ccc", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "ccc", + }).String()) +} + +func TestURL_Head(t *testing.T) { + assert.Equal(t, "", (*URL)(nil).Head()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "", + }).Head()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "ccc", + }).Head()) + assert.Equal(t, "ccc", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "heads/ccc", + }).Head()) +} + +func TestURL_Tag(t *testing.T) { + assert.Equal(t, "", (*URL)(nil).Tag()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "", + }).Tag()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "ccc", + }).Tag()) + assert.Equal(t, "ccc", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "tags/ccc", + }).Tag()) +} + +func TestURL_Commit(t *testing.T) { + assert.Equal(t, "", (*URL)(nil).Commit()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "", + }).Commit()) + assert.Equal(t, "ccc", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "ccc", + }).Commit()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "heads/ccc", + }).Commit()) + assert.Equal(t, "", (&URL{ + Host: "github.com", + Owner: "aaa", + Repo: "bbb", + Ref: "tags/ccc", + }).Commit()) +} + +func TestURL_ArchiveURL(t *testing.T) { + cases := []struct { + Name string + Input *URL + Expected string + }{ + { + Name: "github.com/aaaa/bbbb", + Input: &URL{Host: "github.com", Owner: "aaaa", Repo: "bbbb", Ref: ""}, + Expected: "https://github.com/aaaa/bbbb/archive/refs/heads/main.zip", + }, + { + Name: "github.com/aaaa/ccc", + Input: &URL{Host: "github.com", Owner: "aaaa", Repo: "bbbb", Ref: "ccc"}, + Expected: "https://github.com/aaaa/bbbb/archive/ccc.zip", + }, + { + Name: "github.com/aaaa/bbbb/heads/cccc", + Input: &URL{Host: "github.com", Owner: "aaaa", Repo: "bbbb", Ref: "heads/ccc"}, + Expected: "https://github.com/aaaa/bbbb/archive/refs/heads/ccc.zip", + }, + { + Name: "github.com/aaaa/bbbb/tags/ccc", + Input: &URL{Host: "github.com", Owner: "aaaa", Repo: "bbbb", Ref: "tags/ccc"}, + Expected: "https://github.com/aaaa/bbbb/archive/refs/tags/ccc.zip", + }, + } + + for _, tc := range cases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.Input.ArchiveURL().String()) + }) + } +} diff --git a/pkg/property/builder_test.go b/pkg/property/builder_test.go index 2df04a556..241ccb0b3 100644 --- a/pkg/property/builder_test.go +++ b/pkg/property/builder_test.go @@ -15,29 +15,29 @@ func TestBuilder_New(t *testing.T) { func TestBuilder_ID(t *testing.T) { pid := id.NewPropertyID() - p := New().ID(pid).Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xxx#1.1.1/aa")).MustBuild() + p := New().ID(pid).Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xxx~1.1.1/aa")).MustBuild() assert.Equal(t, pid, p.ID()) } func TestBuilder_NewID(t *testing.T) { - p := New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xxx#1.1.1/aa")).MustBuild() + p := New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xxx~1.1.1/aa")).MustBuild() assert.False(t, p.ID().IsNil()) } func TestBuilder_Schema(t *testing.T) { - p := New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xxx#1.1.1/aa")).MustBuild() - assert.Equal(t, id.MustPropertySchemaID("xxx#1.1.1/aa"), p.Schema()) + p := New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xxx~1.1.1/aa")).MustBuild() + assert.Equal(t, id.MustPropertySchemaID("xxx~1.1.1/aa"), p.Schema()) } func TestBuilder_Scene(t *testing.T) { sid := id.NewSceneID() - p := New().NewID().Scene(sid).Schema(id.MustPropertySchemaID("xxx#1.1.1/aa")).MustBuild() + p := New().NewID().Scene(sid).Schema(id.MustPropertySchemaID("xxx~1.1.1/aa")).MustBuild() assert.Equal(t, sid, p.Scene()) } func TestBuilder_Items(t *testing.T) { iid := id.NewPropertyItemID() - propertySchemaID := id.MustPropertySchemaID("xxx#1.1.1/aa") + propertySchemaID := id.MustPropertySchemaID("xxx~1.1.1/aa") propertySchemaField1ID := id.PropertySchemaFieldID("a") propertySchemaGroup1ID := id.PropertySchemaFieldID("A") @@ -87,7 +87,7 @@ func TestBuilder_Items(t *testing.T) { tt.Parallel() res := New().NewID(). Scene(id.NewSceneID()). - Schema(id.MustPropertySchemaID("xxx#1.1.1/aa")). + Schema(id.MustPropertySchemaID("xxx~1.1.1/aa")). Items(tc.Input). MustBuild() assert.Equal(tt, tc.Expected, res.Items()) @@ -98,7 +98,7 @@ func TestBuilder_Items(t *testing.T) { func TestBuilder_Build(t *testing.T) { pid := id.NewPropertyID() sid := id.NewSceneID() - scid := id.MustPropertySchemaID("xxx#1.1.1/aa") + scid := id.MustPropertySchemaID("xxx~1.1.1/aa") iid := id.NewPropertyItemID() propertySchemaField1ID := id.PropertySchemaFieldID("a") propertySchemaGroup1ID := id.PropertySchemaFieldID("A") @@ -176,7 +176,7 @@ func TestBuilder_Build(t *testing.T) { Scene: sid, Schema: scid, Items: []Item{ - NewGroup().ID(iid).Schema(id.MustPropertySchemaID("zzz#1.1.1/aa"), propertySchemaGroup1ID). + NewGroup().ID(iid).Schema(id.MustPropertySchemaID("zzz~1.1.1/aa"), propertySchemaGroup1ID). Fields([]*Field{ NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). diff --git a/pkg/property/group_builder_test.go b/pkg/property/group_builder_test.go index fb7ae6615..298b23981 100644 --- a/pkg/property/group_builder_test.go +++ b/pkg/property/group_builder_test.go @@ -10,7 +10,7 @@ import ( func TestGroupBuilder_Build(t *testing.T) { iid := id.NewPropertyItemID() - sid := id.MustPropertySchemaID("xx/aa") + sid := id.MustPropertySchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFromUnsafe("vvv") f := NewField(sf).Value(v).MustBuild() @@ -72,7 +72,7 @@ func TestGroupBuilder_Build(t *testing.T) { func TestGroupBuilder_MustBuild(t *testing.T) { iid := id.NewPropertyItemID() - sid := id.MustPropertySchemaID("xx/aa") + sid := id.MustPropertySchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFromUnsafe("vvv") f := NewField(sf).Value(v).MustBuild() @@ -146,7 +146,7 @@ func TestGroupBuilder_NewID(t *testing.T) { func TestGroupBuilder_InitGroupFrom(t *testing.T) { var sg *SchemaGroup assert.Nil(t, InitGroupFrom(sg)) - sg = NewSchemaGroup().ID("a").Schema(id.MustPropertySchemaID("xx/aa")).MustBuild() + sg = NewSchemaGroup().ID("a").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).MustBuild() g := InitGroupFrom(sg) assert.Equal(t, sg.ID(), g.SchemaGroup()) assert.Equal(t, sg.Schema(), g.Schema()) diff --git a/pkg/property/group_list_builder_test.go b/pkg/property/group_list_builder_test.go index 95adc1407..e5a2f2175 100644 --- a/pkg/property/group_list_builder_test.go +++ b/pkg/property/group_list_builder_test.go @@ -10,7 +10,7 @@ import ( func TestGroupListBuilder_Build(t *testing.T) { pid := id.NewPropertyItemID() - scid := id.MustPropertySchemaID("xx/aa") + scid := id.MustPropertySchemaID("xx~1.0.0/aa") groups := []*Group{NewGroup().ID(pid).MustBuild()} testCases := []struct { Name string @@ -73,7 +73,7 @@ func TestGroupListBuilder_NewID(t *testing.T) { func TestGroupListBuilder_MustBuild(t *testing.T) { pid := id.NewPropertyItemID() - scid := id.MustPropertySchemaID("xx/aa") + scid := id.MustPropertySchemaID("xx~1.0.0/aa") groups := []*Group{NewGroup().ID(pid).MustBuild()} testCases := []struct { Name string @@ -148,9 +148,9 @@ func TestInitGroupListFrom(t *testing.T) { }, { Name: "success", - SchemaGroup: NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).MustBuild(), + SchemaGroup: NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).MustBuild(), ExpectedSG: "aa", - ExpectedSchema: id.MustPropertySchemaID("xx/aa"), + ExpectedSchema: id.MustPropertySchemaID("xx~1.0.0/aa"), }, } diff --git a/pkg/property/group_list_test.go b/pkg/property/group_list_test.go index 6579e20df..c09a88f9c 100644 --- a/pkg/property/group_list_test.go +++ b/pkg/property/group_list_test.go @@ -26,9 +26,9 @@ func TestGroupList_SchemaRef(t *testing.T) { }, { Name: "success", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("aa/xx"), id.PropertySchemaFieldID("xx")).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), id.PropertySchemaFieldID("xx")).MustBuild(), ExpectedSG: id.PropertySchemaFieldID("xx").Ref(), - ExpectedSchema: id.MustPropertySchemaID("aa/xx").Ref(), + ExpectedSchema: id.MustPropertySchemaID("xx~1.0.0/aa").Ref(), }, } for _, tc := range testCases { @@ -60,12 +60,12 @@ func TestGroupList_HasLinkedField(t *testing.T) { }, { Name: "has linked field", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), Expected: true, }, { Name: "no linked field", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups2).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups2).MustBuild(), Expected: false, }, } @@ -98,12 +98,12 @@ func TestGroupList_CollectDatasets(t *testing.T) { }, { Name: "one dataset", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), Expected: []id.DatasetID{dsid}, }, { Name: "empty list", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups2).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups2).MustBuild(), Expected: []id.DatasetID{}, }, } @@ -135,12 +135,12 @@ func TestGroupList_FieldsByLinkedDataset(t *testing.T) { }, { Name: "one field list", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), Expected: []*Field{f}, }, { Name: "empty list", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups2).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups2).MustBuild(), Expected: []*Field{}, }, } @@ -171,12 +171,12 @@ func TestGroupList_IsEmpty(t *testing.T) { }, { Name: "is empty", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").MustBuild(), Expected: true, }, { Name: "is not empty", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), Expected: false, }, } @@ -207,7 +207,7 @@ func TestGroupList_Prune(t *testing.T) { }, { Name: "pruned list", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), Expected: pruned, }, } @@ -236,13 +236,13 @@ func TestGroupList_GetGroup(t *testing.T) { { Name: "found", Input: pid, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g}).MustBuild(), Expected: g, }, { Name: "not found", Input: id.NewPropertyItemID(), - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g}).MustBuild(), Expected: nil, }, } @@ -280,7 +280,7 @@ func TestGroupList_GroupAt(t *testing.T) { { Name: "found", Index: 2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: g3, }, } @@ -310,13 +310,13 @@ func TestGroupList_Has(t *testing.T) { { Name: "found", Input: g2.ID(), - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: true, }, { Name: "not found", Input: g3.ID(), - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g4}).MustBuild(), Expected: false, }, } @@ -344,7 +344,7 @@ func TestGroupList_Count(t *testing.T) { }, { Name: "not found", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: 4, }, } @@ -379,7 +379,7 @@ func TestGroupList_Add(t *testing.T) { Name: "index < 0", Index: -1, Gr: g2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -392,7 +392,7 @@ func TestGroupList_Add(t *testing.T) { Name: "len(g) > index > 0 ", Index: 2, Gr: g2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -435,7 +435,7 @@ func TestGroupList_AddOrMove(t *testing.T) { Name: "index < 0", Index: -1, Gr: g2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -448,7 +448,7 @@ func TestGroupList_AddOrMove(t *testing.T) { Name: "len(g) > index > 0 ", Index: 2, Gr: g2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -461,7 +461,7 @@ func TestGroupList_AddOrMove(t *testing.T) { Name: "move group", Index: 2, Gr: g1, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -504,7 +504,7 @@ func TestGroupList_Move(t *testing.T) { Name: "success", Id: g1.ID(), ToIndex: 2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: struct { Id id.PropertyItemID Index int @@ -540,21 +540,21 @@ func TestGroupList_MoveAt(t *testing.T) { Name: "from = to", FromIndex: 2, ToIndex: 2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g2, g3, g4}, }, { Name: "from < 0", FromIndex: -1, ToIndex: 2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g2, g3, g4}, }, { Name: "success move", FromIndex: 0, ToIndex: 2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g2, g3, g1, g4}, }, } @@ -586,19 +586,19 @@ func TestGroupList_RemoveAt(t *testing.T) { { Name: "success", Index: 1, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g3, g4}, }, { Name: "index < 0", Index: -1, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g2, g3, g4}, }, { Name: "index > length", Index: 5, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g2, g3, g4}, }, } @@ -629,13 +629,13 @@ func TestGroupList_Remove(t *testing.T) { { Name: "success", Input: g1.ID(), - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: true, }, { Name: "not found", Input: g4.ID(), - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "xx").Groups([]*Group{g1, g2, g3}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3}).MustBuild(), Expected: false, }, } @@ -652,7 +652,7 @@ func TestGroupList_Remove(t *testing.T) { func TestGroupList_GetOrCreateField(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() g := NewGroup().ID(id.NewPropertyItemID()).Schema(sg.Schema(), sf.ID()).MustBuild() testCases := []struct { Name string @@ -666,8 +666,8 @@ func TestGroupList_GetOrCreateField(t *testing.T) { }{ { Name: "success", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").Groups([]*Group{g}).MustBuild(), - Schema: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").Groups([]*Group{g}).MustBuild(), + Schema: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), Expected: struct { Ok bool @@ -679,20 +679,20 @@ func TestGroupList_GetOrCreateField(t *testing.T) { }, { Name: "can't get a group", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), - Schema: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + Schema: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), }, { Name: "FieldByItem not ok: sg!=nil", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").Groups([]*Group{g}).MustBuild(), - Schema: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").Groups([]*Group{g}).MustBuild(), + Schema: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Ptr: NewPointer(sg.IDRef(), g.IDRef(), sf.ID().Ref()), }, { Name: "psg == nil", GL: NewGroupList().NewID().Groups([]*Group{g}).MustBuild(), - Schema: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Schema: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), }, } @@ -711,7 +711,7 @@ func TestGroupList_GetOrCreateField(t *testing.T) { func TestGroupList_CreateAndAddListItem(t *testing.T) { getIntRef := func(i int) *int { return &i } sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() g := NewGroup().ID(id.NewPropertyItemID()).Schema(sg.Schema(), sf.ID()).MustBuild() testCases := []struct { Name string @@ -723,8 +723,8 @@ func TestGroupList_CreateAndAddListItem(t *testing.T) { { Name: "success", Index: getIntRef(0), - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), - Schema: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + Schema: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Expected: g, }, } diff --git a/pkg/property/group_test.go b/pkg/property/group_test.go index 8a041f333..df89c2096 100644 --- a/pkg/property/group_test.go +++ b/pkg/property/group_test.go @@ -21,7 +21,7 @@ func TestGroup_SchemaGroup(t *testing.T) { assert.Nil(t, g.SchemaGroupRef()) assert.Equal(t, id.PropertySchemaFieldID(""), g.SchemaGroup()) pfid := id.PropertySchemaFieldID("aa") - g = NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), pfid).MustBuild() + g = NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), pfid).MustBuild() assert.Equal(t, pfid, g.SchemaGroup()) assert.Equal(t, pfid.Ref(), g.SchemaGroupRef()) } @@ -245,7 +245,7 @@ func TestGroup_Prune(t *testing.T) { func TestGroup_GetOrCreateField(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() f := NewField(sf).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() testCases := []struct { Name string Group *Group @@ -261,17 +261,17 @@ func TestGroup_GetOrCreateField(t *testing.T) { }, { Name: "nil ps", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), }, { Name: "group schema doesn't equal to ps", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xxx/aaa"), "aa").MustBuild(), - PS: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aaa"), "aa").MustBuild(), + PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), }, { Name: "create field", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), - PS: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), FID: "aa", Expected: struct { Field *Field @@ -283,8 +283,8 @@ func TestGroup_GetOrCreateField(t *testing.T) { }, { Name: "get field", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").Fields([]*Field{f}).MustBuild(), - PS: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").Fields([]*Field{f}).MustBuild(), + PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), FID: "aa", Expected: struct { Field *Field @@ -415,8 +415,8 @@ func TestGroup_Field(t *testing.T) { func TestGroup_UpdateNameFieldValue(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() //f := NewField(sf).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() - sg2 := NewSchemaGroup().ID("bb").Schema(id.MustPropertySchemaID("xx/bb")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg2 := NewSchemaGroup().ID("bb").Schema(id.MustPropertySchemaID("xx~1.0.0/bb")).Fields([]*SchemaField{sf}).MustBuild() testCases := []struct { Name string Group *Group @@ -431,25 +431,25 @@ func TestGroup_UpdateNameFieldValue(t *testing.T) { }, { Name: "nil ps", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), }, { Name: "group schema doesn't equal to ps", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xxx/aaa"), "aa").MustBuild(), - PS: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aaa"), "aa").MustBuild(), + PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), }, { Name: "update value", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), - PS: NewSchema().ID(id.MustPropertySchemaID("xx/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Value: ValueTypeString.ValueFromUnsafe("abc"), FID: "aa", Expected: NewField(sf).Value(ValueTypeString.ValueFromUnsafe("abc")).MustBuild(), }, { Name: "invalid property field", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx/aa"), "aa").MustBuild(), - PS: NewSchema().ID(id.MustPropertySchemaID("xx/bb")).Groups([]*SchemaGroup{sg2}).MustBuild(), + Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/bb")).Groups([]*SchemaGroup{sg2}).MustBuild(), Value: ValueTypeString.ValueFromUnsafe("abc"), FID: "aa", Expected: nil, diff --git a/pkg/property/item_test.go b/pkg/property/item_test.go index 230550f25..c6d22c3e5 100644 --- a/pkg/property/item_test.go +++ b/pkg/property/item_test.go @@ -9,10 +9,10 @@ import ( func TestInitItemFrom(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() - sgl := NewSchemaGroup().ID("aa").IsList(true).Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sgl := NewSchemaGroup().ID("aa").IsList(true).Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() iid := id.NewPropertyItemID() - propertySchemaID := id.MustPropertySchemaID("xx/aa") + propertySchemaID := id.MustPropertySchemaID("xx~1.0.0/aa") propertySchemaField1ID := id.PropertySchemaFieldID("aa") testCases := []struct { Name string @@ -51,7 +51,7 @@ func TestInitItemFrom(t *testing.T) { func TestToGroup(t *testing.T) { iid := id.NewPropertyItemID() - propertySchemaID := id.MustPropertySchemaID("xxx#1.1.1/aa") + propertySchemaID := id.MustPropertySchemaID("xxx~1.1.1/aa") propertySchemaField1ID := id.PropertySchemaFieldID("a") propertySchemaGroup1ID := id.PropertySchemaFieldID("A") il := []Item{ @@ -73,7 +73,7 @@ func TestToGroup(t *testing.T) { func TestToGroupList(t *testing.T) { iid := id.NewPropertyItemID() - propertySchemaID := id.MustPropertySchemaID("xxx#1.1.1/aa") + propertySchemaID := id.MustPropertySchemaID("xxx~1.1.1/aa") propertySchemaGroup1ID := id.PropertySchemaFieldID("A") il := []Item{ NewGroupList().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID).MustBuild(), diff --git a/pkg/property/list_test.go b/pkg/property/list_test.go index 1868622ee..8b27eadb9 100644 --- a/pkg/property/list_test.go +++ b/pkg/property/list_test.go @@ -9,9 +9,9 @@ import ( var ( sf = NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg = NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() - p = New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xx/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() - p2 = New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xx/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() + sg = NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + p = New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() + p2 = New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() ) func TestMap_Add(t *testing.T) { diff --git a/pkg/property/loader_test.go b/pkg/property/loader_test.go index f2d1f56a9..8e6630de5 100644 --- a/pkg/property/loader_test.go +++ b/pkg/property/loader_test.go @@ -10,7 +10,7 @@ import ( func TestLoaderFrom(t *testing.T) { scene := id.NewSceneID() - ps := id.MustPropertySchemaID("xxx#1.1.1/aa") + ps := id.MustPropertySchemaID("xxx~1.1.1/aa") pid1 := id.NewPropertyID() pid2 := id.NewPropertyID() p1 := New().ID(pid1).Scene(scene).Schema(ps).MustBuild() @@ -28,7 +28,7 @@ func TestLoaderFrom(t *testing.T) { func TestLoaderFromMap(t *testing.T) { scene := id.NewSceneID() - ps := id.MustPropertySchemaID("xxx#1.1.1/aa") + ps := id.MustPropertySchemaID("xxx~1.1.1/aa") pid1 := id.NewPropertyID() pid2 := id.NewPropertyID() pid3 := id.NewPropertyID() diff --git a/pkg/property/merged_test.go b/pkg/property/merged_test.go index dcb02f695..a7fc5e974 100644 --- a/pkg/property/merged_test.go +++ b/pkg/property/merged_test.go @@ -15,8 +15,8 @@ func TestMerge(t *testing.T) { d2 := id.NewDatasetID() opid := id.NewPropertyID() ppid := id.NewPropertyID() - psid := id.MustPropertySchemaID("hoge#0.1.0/fff") - psid2 := id.MustPropertySchemaID("hoge#0.1.0/aaa") + psid := id.MustPropertySchemaID("hoge~0.1.0/fff") + psid2 := id.MustPropertySchemaID("hoge~0.1.0/aaa") psgid1 := id.PropertySchemaFieldID("group1") psgid2 := id.PropertySchemaFieldID("group2") psgid3 := id.PropertySchemaFieldID("group3") diff --git a/pkg/property/property_test.go b/pkg/property/property_test.go index 6a5cb0bbf..b85ec1b8c 100644 --- a/pkg/property/property_test.go +++ b/pkg/property/property_test.go @@ -12,8 +12,8 @@ import ( func TestPropertyMigrateSchema(t *testing.T) { sceneID := id.NewSceneID() - oldSchema, _ := id.PropertySchemaIDFrom("hoge/test") - newSchema, _ := id.PropertySchemaIDFrom("hoge/test2") + oldSchema, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") + newSchema, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test2") schemaField1ID := id.PropertySchemaFieldID("a") schemaField2ID := id.PropertySchemaFieldID("b") schemaField3ID := id.PropertySchemaFieldID("c") @@ -116,7 +116,7 @@ func TestPropertyMigrateSchema(t *testing.T) { func TestGetOrCreateItem(t *testing.T) { sceneID := id.NewSceneID() - sid, _ := id.PropertySchemaIDFrom("hoge/test") + sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") sf1id := id.PropertySchemaFieldID("a") sf2id := id.PropertySchemaFieldID("b") sg1id := id.PropertySchemaFieldID("c") @@ -166,7 +166,7 @@ func TestGetOrCreateItem(t *testing.T) { func TestGetOrCreateField(t *testing.T) { sceneID := id.NewSceneID() - sid, _ := id.PropertySchemaIDFrom("hoge/test") + sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") sf1id := id.PropertySchemaFieldID("a") sf2id := id.PropertySchemaFieldID("b") sg1id := id.PropertySchemaFieldID("c") @@ -218,7 +218,7 @@ func TestGetOrCreateField(t *testing.T) { func TestAddListItem(t *testing.T) { sceneID := id.NewSceneID() - sid, _ := id.PropertySchemaIDFrom("hoge/test") + sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") sfid := id.PropertySchemaFieldID("a") sgid := id.PropertySchemaFieldID("b") sf := NewSchemaField().ID(sfid).Type(ValueTypeString).MustBuild() @@ -240,7 +240,7 @@ func TestAddListItem(t *testing.T) { func TestMoveListItem(t *testing.T) { sceneID := id.NewSceneID() - sid, _ := id.PropertySchemaIDFrom("hoge/test") + sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") sgid := id.PropertySchemaFieldID("b") g1 := NewGroup().NewID().Schema(sid, sgid).MustBuild() g2 := NewGroup().NewID().Schema(sid, sgid).MustBuild() @@ -255,7 +255,7 @@ func TestMoveListItem(t *testing.T) { func TestRemoveListItem(t *testing.T) { sceneID := id.NewSceneID() - sid, _ := id.PropertySchemaIDFrom("hoge/test") + sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") sgid := id.PropertySchemaFieldID("b") g1 := NewGroup().NewID().Schema(sid, sgid).MustBuild() g2 := NewGroup().NewID().Schema(sid, sgid).MustBuild() diff --git a/pkg/property/schema_builder_test.go b/pkg/property/schema_builder_test.go index 845e5f624..9d7dfbbef 100644 --- a/pkg/property/schema_builder_test.go +++ b/pkg/property/schema_builder_test.go @@ -11,8 +11,8 @@ import ( func TestSchemaBuilder_Build(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aaa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() - sg2 := NewSchemaGroup().ID("daa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aaa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg2 := NewSchemaGroup().ID("daa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() testCases := []struct { Name string Id id.PropertySchemaID @@ -33,19 +33,19 @@ func TestSchemaBuilder_Build(t *testing.T) { }, { Name: "fail: invalid linkable field", - Id: id.MustPropertySchemaID("xx/aa"), + Id: id.MustPropertySchemaID("xx~1.0.0/aa"), Linkable: LinkableFields{LatLng: NewPointer(nil, nil, id.PropertySchemaFieldID("xx").Ref())}, Err: ErrInvalidPropertyLinkableField, }, { Name: "fail: duplicated field", - Id: id.MustPropertySchemaID("xx/aa"), + Id: id.MustPropertySchemaID("xx~1.0.0/aa"), Groups: []*SchemaGroup{sg, sg2}, - Err: fmt.Errorf("%s: %s %s", ErrDuplicatedField, id.MustPropertySchemaID("xx/aa"), []id.PropertySchemaFieldID{"aa"}), + Err: fmt.Errorf("%s: %s %s", ErrDuplicatedField, id.MustPropertySchemaID("xx~1.0.0/aa"), []id.PropertySchemaFieldID{"aa"}), }, { Name: "success", - Id: id.MustPropertySchemaID("xx/aa"), + Id: id.MustPropertySchemaID("xx~1.0.0/aa"), Groups: []*SchemaGroup{sg}, Version: 1, Expected: struct { @@ -53,7 +53,7 @@ func TestSchemaBuilder_Build(t *testing.T) { Version int Groups []*SchemaGroup Linkable LinkableFields - }{Id: id.MustPropertySchemaID("xx/aa"), Version: 1, Groups: []*SchemaGroup{sg}}, + }{Id: id.MustPropertySchemaID("xx~1.0.0/aa"), Version: 1, Groups: []*SchemaGroup{sg}}, }, } @@ -81,8 +81,8 @@ func TestSchemaBuilder_Build(t *testing.T) { func TestSchemaBuilder_MustBuild(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aaa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() - sg2 := NewSchemaGroup().ID("daa").Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aaa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg2 := NewSchemaGroup().ID("daa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() testCases := []struct { Name string Fails bool @@ -103,19 +103,19 @@ func TestSchemaBuilder_MustBuild(t *testing.T) { }, { Name: "fail: invalid linkable field", - Id: id.MustPropertySchemaID("xx/aa"), + Id: id.MustPropertySchemaID("xx~1.0.0/aa"), Linkable: LinkableFields{LatLng: NewPointer(nil, nil, id.PropertySchemaFieldID("xx").Ref())}, Fails: true, }, { Name: "fail: duplicated field", - Id: id.MustPropertySchemaID("xx/aa"), + Id: id.MustPropertySchemaID("xx~1.0.0/aa"), Groups: []*SchemaGroup{sg, sg2}, Fails: true, }, { Name: "success", - Id: id.MustPropertySchemaID("xx/aa"), + Id: id.MustPropertySchemaID("xx~1.0.0/aa"), Groups: []*SchemaGroup{sg}, Version: 1, Expected: struct { @@ -123,7 +123,7 @@ func TestSchemaBuilder_MustBuild(t *testing.T) { Version int Groups []*SchemaGroup Linkable LinkableFields - }{Id: id.MustPropertySchemaID("xx/aa"), Version: 1, Groups: []*SchemaGroup{sg}}, + }{Id: id.MustPropertySchemaID("xx~1.0.0/aa"), Version: 1, Groups: []*SchemaGroup{sg}}, }, } diff --git a/pkg/property/schema_group_builder_test.go b/pkg/property/schema_group_builder_test.go index acd92277a..73da78d60 100644 --- a/pkg/property/schema_group_builder_test.go +++ b/pkg/property/schema_group_builder_test.go @@ -10,7 +10,7 @@ import ( ) func TestSchemaGroupBuilder_Build(t *testing.T) { - sid := id.MustPropertySchemaID("xx/aa") + sid := id.MustPropertySchemaID("xx~1.0.0/aa") gid := id.PropertySchemaFieldID("xx") sf := NewSchemaField().ID("ff").Type(ValueTypeString).MustBuild() testCases := []struct { diff --git a/pkg/property/schema_group_test.go b/pkg/property/schema_group_test.go index bb37e8596..05cb88507 100644 --- a/pkg/property/schema_group_test.go +++ b/pkg/property/schema_group_test.go @@ -10,7 +10,7 @@ import ( func TestSchemaGroup(t *testing.T) { scid := id.PropertySchemaFieldID("aa") - sid := id.MustPropertySchemaID("xx/aa") + sid := id.MustPropertySchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() testCases := []struct { @@ -72,7 +72,7 @@ func TestSchemaGroup(t *testing.T) { func TestSchemaGroup_Field(t *testing.T) { scid := id.PropertySchemaFieldID("aa") - sid := id.MustPropertySchemaID("xx/aa") + sid := id.MustPropertySchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() testCases := []struct { @@ -112,7 +112,7 @@ func TestSchemaGroup_Field(t *testing.T) { } func TestSchemaGroup_SetTitle(t *testing.T) { - sg := NewSchemaGroup().ID(id.PropertySchemaFieldID("aa")).Schema(id.MustPropertySchemaID("xx/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID(id.PropertySchemaFieldID("aa")).Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() sg.SetTitle(i18n.StringFrom("ttt")) assert.Equal(t, i18n.StringFrom("ttt"), sg.Title()) } diff --git a/pkg/property/schema_test.go b/pkg/property/schema_test.go index efc677612..bedc0d100 100644 --- a/pkg/property/schema_test.go +++ b/pkg/property/schema_test.go @@ -16,7 +16,7 @@ func TestSchema_Nil(t *testing.T) { } func TestSchema_Field(t *testing.T) { - sid := id.MustPropertySchemaID("xx/aa") + sid := id.MustPropertySchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() @@ -56,7 +56,7 @@ func TestSchema_Field(t *testing.T) { } func TestSchema_Group(t *testing.T) { - sid := id.MustPropertySchemaID("xx/aa") + sid := id.MustPropertySchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() @@ -98,7 +98,7 @@ func TestSchema_Group(t *testing.T) { } func TestSchema_DetectDuplicatedFields(t *testing.T) { - sid := id.MustPropertySchemaID("xx/aa") + sid := id.MustPropertySchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() diff --git a/pkg/property/sealed_test.go b/pkg/property/sealed_test.go index aefc4f525..7d364be5e 100644 --- a/pkg/property/sealed_test.go +++ b/pkg/property/sealed_test.go @@ -16,7 +16,7 @@ var ( d = id.NewDatasetID() opid = id.NewPropertyID() ppid = id.NewPropertyID() - psid = id.MustPropertySchemaID("hoge#0.1.0/fff") + psid = id.MustPropertySchemaID("hoge~0.1.0/fff") psiid1 = id.PropertySchemaFieldID("x") psiid2 = id.PropertySchemaFieldID("y") i1id = id.NewPropertyItemID() @@ -168,7 +168,7 @@ func TestSeal(t *testing.T) { func TestSealProperty(t *testing.T) { pid := id.NewPropertyID() - ps := id.MustPropertySchemaID("xxx#1.1.1/aa") + ps := id.MustPropertySchemaID("xxx~1.1.1/aa") testCases := []struct { Name string Input *Property diff --git a/pkg/scene/builder/builder_test.go b/pkg/scene/builder/builder_test.go index 38e326981..7ebeb0b42 100644 --- a/pkg/scene/builder/builder_test.go +++ b/pkg/scene/builder/builder_test.go @@ -17,8 +17,8 @@ func TestSceneBuilder(t *testing.T) { // ids sceneID := id.NewSceneID() scenePropertyID := id.NewPropertyID() - propertySchemaID := id.MustPropertySchemaID("hoge#0.1.0/foobar") - pluginID := id.MustPluginID("hoge#0.1.0") + propertySchemaID := id.MustPropertySchemaID("hoge~0.1.0/foobar") + pluginID := id.MustPluginID("hoge~0.1.0") pluginExtension1ID := id.PluginExtensionID("ext") pluginExtension2ID := id.PluginExtensionID("ext2") propertySchemaField1ID := id.PropertySchemaFieldID("a") @@ -380,8 +380,8 @@ func TestSceneBuilder(t *testing.T) { sceneWidgetID1 := id.NewWidgetID() sceneWidgetID2 := id.NewWidgetID() - sceneWidget1 := scene.MustNewWidget(&sceneWidgetID1, pluginID, pluginExtension1ID, scenePropertyID, false) - sceneWidget2 := scene.MustNewWidget(&sceneWidgetID2, pluginID, pluginExtension2ID, scenePropertyID, true) + sceneWidget1 := scene.MustNewWidget(sceneWidgetID1, pluginID, pluginExtension1ID, scenePropertyID, false) + sceneWidget2 := scene.MustNewWidget(sceneWidgetID2, pluginID, pluginExtension2ID, scenePropertyID, true) scenePlugin1 := scene.NewPlugin(pluginID, &scenePropertyID) assert.Equal(t, sceneWidgetID1, sceneWidget1.ID()) diff --git a/pkg/scene/builder/scene_test.go b/pkg/scene/builder/scene_test.go index 8c0e741d1..7bf950cff 100644 --- a/pkg/scene/builder/scene_test.go +++ b/pkg/scene/builder/scene_test.go @@ -11,7 +11,7 @@ import ( func TestScene_FindProperty(t *testing.T) { p1 := id.NewPropertyID() sid := id.NewSceneID() - scid := id.MustPropertySchemaID("xx/aa") + scid := id.MustPropertySchemaID("xx~1.0.0/aa") pl := []*property.Property{ property.New().NewID().Scene(sid).Schema(scid).MustBuild(), property.New().ID(p1).Scene(sid).Schema(scid).MustBuild(), diff --git a/pkg/scene/builder_test.go b/pkg/scene/builder_test.go index 596ac9e6b..c7d9c2c2d 100644 --- a/pkg/scene/builder_test.go +++ b/pkg/scene/builder_test.go @@ -50,7 +50,7 @@ func TestBuilder_Project(t *testing.T) { func TestBuilder_WidgetSystem(t *testing.T) { nid := id.New() ws := NewWidgetSystem([]*Widget{ - MustNewWidget(id.WidgetIDFromRefID(&nid), id.OfficialPluginID, "xxx", id.NewPropertyID(), true), + MustNewWidget(id.WidgetID(nid), id.OfficialPluginID, "xxx", id.NewPropertyID(), true), }) b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).WidgetSystem(ws).MustBuild() assert.Equal(t, ws, b.WidgetSystem()) @@ -64,7 +64,7 @@ func TestBuilder_Build(t *testing.T) { lid := id.NewLayerID() nid := id.New() ws := NewWidgetSystem([]*Widget{ - MustNewWidget(id.WidgetIDFromRefID(&nid), id.OfficialPluginID, "xxx", ppid, true), + MustNewWidget(id.WidgetID(nid), id.OfficialPluginID, "xxx", ppid, true), }) ps := NewPluginSystem([]*Plugin{ NewPlugin(id.OfficialPluginID, ppid.Ref()), @@ -197,7 +197,7 @@ func TestBuilder_MustBuild(t *testing.T) { lid := id.NewLayerID() nid := id.New() ws := NewWidgetSystem([]*Widget{ - MustNewWidget(id.WidgetIDFromRefID(&nid), id.OfficialPluginID, "xxx", ppid, true), + MustNewWidget(id.WidgetID(nid), id.OfficialPluginID, "xxx", ppid, true), }) ps := NewPluginSystem([]*Plugin{ NewPlugin(id.OfficialPluginID, ppid.Ref()), diff --git a/pkg/scene/plugin_system.go b/pkg/scene/plugin_system.go index cf36ac598..e0c756cfb 100644 --- a/pkg/scene/plugin_system.go +++ b/pkg/scene/plugin_system.go @@ -43,8 +43,7 @@ func (p *PluginSystem) Plugins() []*Plugin { func (p *PluginSystem) Property(id id.PluginID) *id.PropertyID { for _, p := range p.plugins { if p.plugin.Equal(id) { - p2 := *p.property - return &p2 + return p.property.CopyRef() } } return nil diff --git a/pkg/scene/plugin_system_test.go b/pkg/scene/plugin_system_test.go index acf0a0491..e8acd9204 100644 --- a/pkg/scene/plugin_system_test.go +++ b/pkg/scene/plugin_system_test.go @@ -8,7 +8,7 @@ import ( ) func TestNewPluginSystem(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID().Ref() testCases := []struct { Name string @@ -65,7 +65,7 @@ func TestNewPluginSystem(t *testing.T) { } func TestPluginSystem_Property(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID().Ref() testCases := []struct { Name string @@ -79,10 +79,16 @@ func TestPluginSystem_Property(t *testing.T) { PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), Expected: pr, }, + { + Name: "property is nil", + Input: pid, + PS: NewPluginSystem([]*Plugin{NewPlugin(pid, nil)}), + Expected: nil, + }, { Name: "property is not found", Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz#1.1.1"), pr)}), + PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz~1.1.1"), pr)}), Expected: nil, }, } @@ -97,7 +103,7 @@ func TestPluginSystem_Property(t *testing.T) { } func TestPluginSystem_Plugin(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID().Ref() testCases := []struct { Name string @@ -114,7 +120,7 @@ func TestPluginSystem_Plugin(t *testing.T) { { Name: "plugin is not found", Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz#1.1.1"), pr)}), + PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz~1.1.1"), pr)}), Expected: nil, }, } @@ -144,8 +150,8 @@ func TestPluginSystem_Properties(t *testing.T) { { Name: "get properties", PS: NewPluginSystem([]*Plugin{ - NewPlugin(id.MustPluginID("zzz#1.1.1"), pr), - NewPlugin(id.MustPluginID("xxx#1.1.1"), pr2), + NewPlugin(id.MustPluginID("zzz~1.1.1"), pr), + NewPlugin(id.MustPluginID("xxx~1.1.1"), pr2), }), Expected: []id.PropertyID{*pr, *pr2}, }, @@ -161,7 +167,7 @@ func TestPluginSystem_Properties(t *testing.T) { } func TestPluginSystem_Has(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID().Ref() testCases := []struct { Name string @@ -178,7 +184,7 @@ func TestPluginSystem_Has(t *testing.T) { { Name: "property is not found", Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz#1.1.1"), pr)}), + PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz~1.1.1"), pr)}), Expected: false, }, } @@ -193,7 +199,7 @@ func TestPluginSystem_Has(t *testing.T) { } func TestPluginSystem_HasPlugin(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID().Ref() testCases := []struct { Name string @@ -210,7 +216,7 @@ func TestPluginSystem_HasPlugin(t *testing.T) { { Name: "property is not found", Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz#1.1.1"), pr)}), + PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz~1.1.1"), pr)}), Expected: false, }, } @@ -225,7 +231,7 @@ func TestPluginSystem_HasPlugin(t *testing.T) { } func TestPluginSystem_Add(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID().Ref() testCases := []struct { Name string @@ -268,7 +274,7 @@ func TestPluginSystem_Add(t *testing.T) { } func TestPluginSystem_Remove(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID().Ref() testCases := []struct { Name string @@ -299,8 +305,8 @@ func TestPluginSystem_Remove(t *testing.T) { } func TestPluginSystem_Upgrade(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") - nid := id.MustPluginID("zzz#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") + nid := id.MustPluginID("zzz~1.1.1") pr := id.NewPropertyID().Ref() testCases := []struct { Name string diff --git a/pkg/scene/plugin_test.go b/pkg/scene/plugin_test.go index 93fbb58c3..fa35ca273 100644 --- a/pkg/scene/plugin_test.go +++ b/pkg/scene/plugin_test.go @@ -8,7 +8,7 @@ import ( ) func TestPlugin(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID().Ref() res := NewPlugin(pid, pr) p := Plugin{ diff --git a/pkg/scene/scene_test.go b/pkg/scene/scene_test.go index 1dd71405a..b7354a333 100644 --- a/pkg/scene/scene_test.go +++ b/pkg/scene/scene_test.go @@ -65,7 +65,7 @@ func TestScene_Properties(t *testing.T) { pid2 := id.NewPropertyID() s := New().NewID().Team(id.NewTeamID()).RootLayer(id.NewLayerID()).Property(pid1).WidgetSystem( NewWidgetSystem([]*Widget{ - MustNewWidget(id.NewWidgetID().Ref(), id.MustPluginID("xxx#1.1.1"), "eee", pid2, true), + MustNewWidget(id.NewWidgetID(), id.MustPluginID("xxx~1.1.1"), "eee", pid2, true), })).MustBuild() assert.Equal(t, []id.PropertyID{pid1, pid2}, s.Properties()) diff --git a/pkg/scene/sceneops/dataset_migrator.go b/pkg/scene/sceneops/dataset_migrator.go index 08453c61a..c84303b91 100644 --- a/pkg/scene/sceneops/dataset_migrator.go +++ b/pkg/scene/sceneops/dataset_migrator.go @@ -7,7 +7,7 @@ import ( "github.com/reearth/reearth-backend/pkg/dataset" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" - "github.com/reearth/reearth-backend/pkg/layer/initializer" + "github.com/reearth/reearth-backend/pkg/layer/layerops" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" ) @@ -216,7 +216,7 @@ func (srv DatasetMigrator) migrateLayer(ctx context.Context, sid id.SceneID, old // ใƒ—ใƒฉใ‚ฐใ‚คใƒณใ‚’ๅ–ๅพ— var plug *plugin.Plugin if pid := lg.Plugin(); pid != nil { - plug2, err := srv.Plugin(ctx, *pid) + plug2, err := srv.Plugin(ctx, []id.PluginID{*pid}, []id.SceneID{sid}) if err != nil || len(plug2) < 1 { return MigrateDatasetResult{}, err } @@ -232,7 +232,7 @@ func (srv DatasetMigrator) migrateLayer(ctx context.Context, sid id.SceneID, old name = rf.Value().Value().(string) } - layerItem, property, err := initializer.LayerItem{ + layerItem, property, err := layerops.LayerItem{ SceneID: sid, ParentLayerID: lg.ID(), LinkedDatasetID: &did, diff --git a/pkg/scene/sceneops/plugin_migrator.go b/pkg/scene/sceneops/plugin_migrator.go index a959c1889..33340c80b 100644 --- a/pkg/scene/sceneops/plugin_migrator.go +++ b/pkg/scene/sceneops/plugin_migrator.go @@ -43,7 +43,7 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol return MigratePluginsResult{}, ErrInvalidPlugins } - plugins, err := s.Plugin(ctx, oldPluginID, newPluginID) + plugins, err := s.Plugin(ctx, []id.PluginID{oldPluginID, newPluginID}, []id.SceneID{sc.ID()}) if err != nil || len(plugins) < 2 { return MigratePluginsResult{}, ErrInvalidPlugins } diff --git a/pkg/scene/widget.go b/pkg/scene/widget.go index 8d3f3902c..43055e6ce 100644 --- a/pkg/scene/widget.go +++ b/pkg/scene/widget.go @@ -12,17 +12,13 @@ type Widget struct { enabled bool } -func NewWidget(wid *id.WidgetID, plugin id.PluginID, extension id.PluginExtensionID, property id.PropertyID, enabled bool) (*Widget, error) { +func NewWidget(wid id.WidgetID, plugin id.PluginID, extension id.PluginExtensionID, property id.PropertyID, enabled bool) (*Widget, error) { if !plugin.Validate() || string(extension) == "" || id.ID(property).IsNil() { return nil, id.ErrInvalidID } - if wid == nil || (*wid).IsNil() { - wid = id.NewWidgetID().Ref() - } - return &Widget{ - id: *wid, + id: wid, plugin: plugin, extension: extension, property: property, @@ -30,7 +26,7 @@ func NewWidget(wid *id.WidgetID, plugin id.PluginID, extension id.PluginExtensio }, nil } -func MustNewWidget(wid *id.WidgetID, plugin id.PluginID, extension id.PluginExtensionID, property id.PropertyID, enabled bool) *Widget { +func MustNewWidget(wid id.WidgetID, plugin id.PluginID, extension id.PluginExtensionID, property id.PropertyID, enabled bool) *Widget { w, err := NewWidget(wid, plugin, extension, property, enabled) if err != nil { panic(err) diff --git a/pkg/scene/widget_system.go b/pkg/scene/widget_system.go index 00418ae03..f0ad465fb 100644 --- a/pkg/scene/widget_system.go +++ b/pkg/scene/widget_system.go @@ -81,12 +81,26 @@ func (w *WidgetSystem) Remove(p id.PluginID, e id.PluginExtensionID) { if w == nil { return } - for i, ww := range w.widgets { - if ww.plugin.Equal(p) && ww.extension == e { + for i := 0; i < len(w.widgets); i++ { + if w.widgets[i].plugin.Equal(p) && w.widgets[i].extension == e { + w.widgets = append(w.widgets[:i], w.widgets[i+1:]...) + i-- + } + } +} + +func (w *WidgetSystem) RemoveAllByPlugin(p id.PluginID) (res []id.PropertyID) { + if w == nil { + return nil + } + for i := 0; i < len(w.widgets); i++ { + if w.widgets[i].plugin.Equal(p) { + res = append(res, w.widgets[i].Property()) w.widgets = append(w.widgets[:i], w.widgets[i+1:]...) - return + i-- } } + return res } func (w *WidgetSystem) Replace(oldp, newp id.PluginID) { diff --git a/pkg/scene/widget_system_test.go b/pkg/scene/widget_system_test.go index 3ca3659a9..665faeb80 100644 --- a/pkg/scene/widget_system_test.go +++ b/pkg/scene/widget_system_test.go @@ -8,9 +8,9 @@ import ( ) func TestNewWidgetSystem(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID() - wid := id.NewWidgetID().Ref() + wid := id.NewWidgetID() testCases := []struct { Name string Input []*Widget @@ -30,7 +30,7 @@ func TestNewWidgetSystem(t *testing.T) { Name: "widget list with matched values", Input: []*Widget{ { - id: *wid, + id: wid, plugin: pid, extension: "eee", property: pr, @@ -45,14 +45,14 @@ func TestNewWidgetSystem(t *testing.T) { Name: "widget list with matched values", Input: []*Widget{ { - id: *wid, + id: wid, plugin: pid, extension: "eee", property: pr, enabled: true, }, { - id: *wid, + id: wid, plugin: pid, extension: "eee", property: pr, @@ -75,9 +75,9 @@ func TestNewWidgetSystem(t *testing.T) { } func TestWidgetSystem_Add(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID() - wid := id.NewWidgetID().Ref() + wid := id.NewWidgetID() testCases := []struct { Name string Input *Widget @@ -86,7 +86,7 @@ func TestWidgetSystem_Add(t *testing.T) { { Name: "add new widget", Input: &Widget{ - id: *wid, + id: wid, plugin: pid, extension: "eee", property: pr, @@ -104,7 +104,7 @@ func TestWidgetSystem_Add(t *testing.T) { { Name: "add to nil widgetSystem", Input: &Widget{ - id: *wid, + id: wid, plugin: pid, extension: "eee", property: pr, @@ -116,7 +116,7 @@ func TestWidgetSystem_Add(t *testing.T) { { Name: "add existing widget", Input: &Widget{ - id: *wid, + id: wid, plugin: pid, extension: "eee", property: pr, @@ -137,9 +137,14 @@ func TestWidgetSystem_Add(t *testing.T) { } func TestWidgetSystem_Remove(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") + pid2 := id.MustPluginID("xxx~1.1.2") pr := id.NewPropertyID() - wid := id.NewWidgetID().Ref() + w1 := MustNewWidget(id.NewWidgetID(), pid, "e1", pr, true) + w2 := MustNewWidget(id.NewWidgetID(), pid, "e1", pr, true) + w3 := MustNewWidget(id.NewWidgetID(), pid, "e2", pr, true) + w4 := MustNewWidget(id.NewWidgetID(), pid2, "e1", pr, true) + testCases := []struct { Name string PID id.PluginID @@ -149,9 +154,9 @@ func TestWidgetSystem_Remove(t *testing.T) { { Name: "remove a widget", PID: pid, - EID: "eee", - WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), - Expected: NewWidgetSystem([]*Widget{}), + EID: "e1", + WS: NewWidgetSystem([]*Widget{w1, w2, w3, w4}), + Expected: NewWidgetSystem([]*Widget{w3, w4}), }, { Name: "remove from nil widgetSystem", @@ -169,11 +174,48 @@ func TestWidgetSystem_Remove(t *testing.T) { } } +func TestWidgetSystem_RemoveAllByPlugin(t *testing.T) { + pid := id.MustPluginID("xxx~1.1.1") + pid2 := id.MustPluginID("xxx~1.1.2") + w1 := MustNewWidget(id.NewWidgetID(), pid, "e1", id.NewPropertyID(), true) + w2 := MustNewWidget(id.NewWidgetID(), pid, "e2", id.NewPropertyID(), true) + w3 := MustNewWidget(id.NewWidgetID(), pid2, "e1", id.NewPropertyID(), true) + + testCases := []struct { + Name string + PID id.PluginID + WS, Expected *WidgetSystem + ExpectedResult []id.PropertyID + }{ + { + Name: "remove widgets", + PID: pid, + WS: NewWidgetSystem([]*Widget{w1, w2, w3}), + Expected: NewWidgetSystem([]*Widget{w3}), + ExpectedResult: []id.PropertyID{w1.Property(), w2.Property()}, + }, + { + Name: "remove from nil widgetSystem", + WS: nil, + Expected: nil, + ExpectedResult: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.ExpectedResult, tc.WS.RemoveAllByPlugin(tc.PID)) + assert.Equal(tt, tc.Expected, tc.WS) + }) + } +} + func TestWidgetSystem_Replace(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") - pid2 := id.MustPluginID("zzz#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") + pid2 := id.MustPluginID("zzz~1.1.1") pr := id.NewPropertyID() - wid := id.NewWidgetID().Ref() + wid := id.NewWidgetID() testCases := []struct { Name string PID, NewID id.PluginID @@ -209,11 +251,11 @@ func TestWidgetSystem_Replace(t *testing.T) { } func TestWidgetSystem_Properties(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID() pr2 := id.NewPropertyID() - wid := id.NewWidgetID().Ref() - wid2 := id.NewWidgetID().Ref() + wid := id.NewWidgetID() + wid2 := id.NewWidgetID() testCases := []struct { Name string WS *WidgetSystem @@ -244,11 +286,11 @@ func TestWidgetSystem_Properties(t *testing.T) { } func TestWidgetSystem_Widgets(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID() pr2 := id.NewPropertyID() - wid := id.NewWidgetID().Ref() - wid2 := id.NewWidgetID().Ref() + wid := id.NewWidgetID() + wid2 := id.NewWidgetID() testCases := []struct { Name string WS *WidgetSystem @@ -282,9 +324,9 @@ func TestWidgetSystem_Widgets(t *testing.T) { } func TestWidgetSystem_Widget(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID() - wid := id.NewWidgetID().Ref() + wid := id.NewWidgetID() testCases := []struct { Name string PID id.PluginID @@ -323,9 +365,9 @@ func TestWidgetSystem_Widget(t *testing.T) { } func TestWidgetSystem_Has(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID() - wid := id.NewWidgetID().Ref() + wid := id.NewWidgetID() testCases := []struct { Name string PID id.PluginID diff --git a/pkg/scene/widget_test.go b/pkg/scene/widget_test.go index f0719836f..07551a1a2 100644 --- a/pkg/scene/widget_test.go +++ b/pkg/scene/widget_test.go @@ -1,7 +1,6 @@ package scene import ( - "errors" "testing" "github.com/reearth/reearth-backend/pkg/id" @@ -9,12 +8,12 @@ import ( ) func TestNewWidget(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID() wid := id.NewWidgetID() testCases := []struct { Name string - Id *id.WidgetID + ID id.WidgetID Plugin id.PluginID Extension id.PluginExtensionID Property id.PropertyID @@ -30,29 +29,7 @@ func TestNewWidget(t *testing.T) { }{ { Name: "success new widget", - Id: wid.Ref(), - Plugin: pid, - Extension: "eee", - Property: pr, - Enabled: true, - Expected: struct { - Id id.WidgetID - Plugin id.PluginID - Extension id.PluginExtensionID - Property id.PropertyID - Enabled bool - }{ - Id: wid, - Plugin: pid, - Extension: "eee", - Property: pr, - Enabled: true, - }, - err: nil, - }, - { - Name: "success nil id", - Id: nil, + ID: wid, Plugin: pid, Extension: "eee", Property: pr, @@ -74,7 +51,7 @@ func TestNewWidget(t *testing.T) { }, { Name: "fail empty extension", - Id: wid.Ref(), + ID: wid, Plugin: pid, Extension: "", Property: pr, @@ -87,30 +64,26 @@ func TestNewWidget(t *testing.T) { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - res, err := NewWidget(tc.Id, tc.Plugin, tc.Extension, tc.Property, tc.Enabled) + res, err := NewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled) if err == nil { - if tc.Id == nil { - assert.NotNil(tt, res.ID()) - } else { - assert.Equal(tt, tc.Expected.Id, res.ID()) - } + assert.Equal(tt, tc.Expected.Id, res.ID()) assert.Equal(tt, tc.Expected.Property, res.Property()) assert.Equal(tt, tc.Expected.Extension, res.Extension()) assert.Equal(tt, tc.Expected.Enabled, res.Enabled()) assert.Equal(tt, tc.Expected.Plugin, res.Plugin()) } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.ErrorIs(tt, err, tc.err) } }) } } func TestMustNewWidget(t *testing.T) { - pid := id.MustPluginID("xxx#1.1.1") + pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID() wid := id.NewWidgetID() testCases := []struct { Name string - Id *id.WidgetID + ID id.WidgetID Plugin id.PluginID Extension id.PluginExtensionID Property id.PropertyID @@ -126,29 +99,7 @@ func TestMustNewWidget(t *testing.T) { }{ { Name: "success new widget", - Id: wid.Ref(), - Plugin: pid, - Extension: "eee", - Property: pr, - Enabled: true, - Expected: struct { - Id id.WidgetID - Plugin id.PluginID - Extension id.PluginExtensionID - Property id.PropertyID - Enabled bool - }{ - Id: wid, - Plugin: pid, - Extension: "eee", - Property: pr, - Enabled: true, - }, - err: nil, - }, - { - Name: "success nil id", - Id: nil, + ID: wid, Plugin: pid, Extension: "eee", Property: pr, @@ -170,7 +121,7 @@ func TestMustNewWidget(t *testing.T) { }, { Name: "fail empty extension", - Id: wid.Ref(), + ID: wid, Plugin: pid, Extension: "", Property: pr, @@ -186,25 +137,20 @@ func TestMustNewWidget(t *testing.T) { var res *Widget defer func() { if r := recover(); r == nil { - if tc.Id == nil { - assert.NotNil(tt, res.ID()) - } else { - assert.Equal(tt, tc.Expected.Id, res.ID()) - } + assert.Equal(tt, tc.Expected.Id, res.ID()) assert.Equal(tt, tc.Expected.Property, res.Property()) assert.Equal(tt, tc.Expected.Extension, res.Extension()) assert.Equal(tt, tc.Expected.Enabled, res.Enabled()) assert.Equal(tt, tc.Expected.Plugin, res.Plugin()) } }() - res = MustNewWidget(tc.Id, tc.Plugin, tc.Extension, tc.Property, tc.Enabled) - + res = MustNewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled) }) } } func TestWidget_SetEnabled(t *testing.T) { - res := MustNewWidget(id.NewWidgetID().Ref(), id.MustPluginID("xxx#1.1.1"), "eee", id.NewPropertyID(), false) + res := MustNewWidget(id.NewWidgetID(), id.MustPluginID("xxx~1.1.1"), "eee", id.NewPropertyID(), false) res.SetEnabled(true) assert.True(t, res.Enabled()) } diff --git a/schema.graphql b/schema.graphql index 749e82cbf..c3660a6ec 100644 --- a/schema.graphql +++ b/schema.graphql @@ -227,6 +227,7 @@ enum PublishmentStatus { type Plugin { id: PluginID! + sceneId: ID name: String! version: String! description: String! @@ -234,7 +235,8 @@ type Plugin { repositoryUrl: String! propertySchemaId: PropertySchemaID extensions: [PluginExtension!]! - scenePlugin(sceneId: ID!): ScenePlugin + scene: Scene @goField(forceResolver: true) + scenePlugin(sceneId: ID): ScenePlugin allTranslatedDescription: TranslatedString allTranslatedName: TranslatedString translatedName(lang: String): String! @@ -798,7 +800,9 @@ input UpdateProjectInput { } input UploadPluginInput { - file: Upload! + sceneId: ID! + file: Upload + url: URL } input CreateSceneInput { @@ -1146,6 +1150,8 @@ type DeleteProjectPayload { type UploadPluginPayload { plugin: Plugin! + scene: Scene! + scenePlugin: ScenePlugin! } type CreateScenePayload { @@ -1174,8 +1180,8 @@ type InstallPluginPayload { } type UninstallPluginPayload { + pluginId: PluginID! scene: Scene! - scenePlugin: ScenePlugin! } type UpgradePluginPayload { From 0b8c007da472c7c1ec6a13a584ecab5e52212f22 Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Fri, 6 Aug 2021 10:07:55 +0300 Subject: [PATCH 062/253] fix: dataset layers are not exported correctly (#36) * fix: layer encoding * refactor: enhance code --- pkg/layer/encoding/czml.go | 69 +++++++++++++++++++------------- pkg/layer/encoding/geojson.go | 57 ++++++++++++++++---------- pkg/layer/encoding/kml.go | 75 ++++++++++++++++++++--------------- pkg/layer/encoding/shp.go | 22 ++++++---- 4 files changed, 135 insertions(+), 88 deletions(-) diff --git a/pkg/layer/encoding/czml.go b/pkg/layer/encoding/czml.go index c8ec42048..4e386761d 100644 --- a/pkg/layer/encoding/czml.go +++ b/pkg/layer/encoding/czml.go @@ -53,16 +53,29 @@ func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feat case "marker": latlng := property.LatLng{} var height float64 - if li.Property.Field("location") != nil { - latlng, ok = li.Property.Field("location").PropertyValue.ValueLatLng() + if f := li.Property.Field("location"); f != nil { + latlng, ok = f.PropertyValue.ValueLatLng() if !ok { - return nil, errors.New("invalid value type") + dsll := f.DatasetValue.ValueLatLng() + if dsll != nil { + latlng = property.LatLng{ + Lat: dsll.Lat, + Lng: dsll.Lng, + } + } else { + return nil, errors.New("invalid value type") + } } - if li.Property.Field("height") != nil { - height, ok = li.Property.Field("height").PropertyValue.ValueNumber() + if f := li.Property.Field("height"); f != nil { + height, ok = f.PropertyValue.ValueNumber() if !ok { - return nil, errors.New("invalid value type") + dsHeight := f.DatasetValue.ValueNumber() + if dsHeight != nil { + height = *dsHeight + } else { + return nil, errors.New("invalid value type") + } } position := czml.Position{ CartographicDegrees: []float64{latlng.Lng, latlng.Lat, height}, @@ -75,14 +88,14 @@ func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feat feature.Position = &position } } - if li.Property.Field("pointColor") != nil { - pointColor, ok = li.Property.Field("pointColor").PropertyValue.ValueString() + if f := li.Property.Field("pointColor"); f != nil { + pointColor, ok = f.PropertyValue.ValueString() if !ok { return nil, errors.New("invalid value type") } } - if li.Property.Field("pointSize") != nil { - pointSize, ok = li.Property.Field("pointSize").PropertyValue.ValueNumber() + if f := li.Property.Field("pointSize"); f != nil { + pointSize, ok = f.PropertyValue.ValueNumber() if !ok { return nil, errors.New("invalid value type") } @@ -100,8 +113,8 @@ func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feat var fill, stroke bool var fillColor, strokeColor *czml.Color var strokeWidth float64 - if li.Property.Field("polygon") != nil { - polygon, ok = li.Property.Field("polygon").PropertyValue.ValuePolygon() + if f := li.Property.Field("polygon"); f != nil { + polygon, ok = f.PropertyValue.ValuePolygon() if !ok { return nil, errors.New("invalid value type") } @@ -111,20 +124,20 @@ func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feat } } } - if li.Property.Field("fill") != nil { - fill, ok = li.Property.Field("fill").PropertyValue.ValueBool() + if f := li.Property.Field("fill"); f != nil { + fill, ok = f.PropertyValue.ValueBool() if !ok { return nil, errors.New("invalid value type") } } - if li.Property.Field("stroke") != nil { - stroke, ok = li.Property.Field("stroke").PropertyValue.ValueBool() + if f := li.Property.Field("stroke"); f != nil { + stroke, ok = f.PropertyValue.ValueBool() if !ok { return nil, errors.New("invalid value type") } } - if li.Property.Field("fillColor") != nil { - fillStr, ok := li.Property.Field("fillColor").PropertyValue.ValueString() + if f := li.Property.Field("fillColor"); f != nil { + fillStr, ok := f.PropertyValue.ValueString() if !ok { return nil, errors.New("invalid value type") } @@ -133,8 +146,8 @@ func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feat return nil, err } } - if li.Property.Field("strokeColor") != nil { - strokeStr, ok := li.Property.Field("strokeColor").PropertyValue.ValueString() + if f := li.Property.Field("strokeColor"); f != nil { + strokeStr, ok := f.PropertyValue.ValueString() if !ok { return nil, errors.New("invalid value type") } @@ -143,8 +156,8 @@ func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feat return nil, err } } - if li.Property.Field("strokeWidth") != nil { - strokeWidth, ok = li.Property.Field("strokeWidth").PropertyValue.ValueNumber() + if f := li.Property.Field("strokeWidth"); f != nil { + strokeWidth, ok = f.PropertyValue.ValueNumber() if !ok { return nil, errors.New("invalid value type") } @@ -163,8 +176,8 @@ func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feat position := czml.Position{} var strokeColor *czml.Color var strokeWidth float64 - if li.Property.Field("coordinates") != nil { - polyline, ok = li.Property.Field("coordinates").PropertyValue.ValueCoordinates() + if f := li.Property.Field("coordinates"); f != nil { + polyline, ok = f.PropertyValue.ValueCoordinates() if !ok { return nil, errors.New("invalid value type") } @@ -173,8 +186,8 @@ func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feat } } - if li.Property.Field("strokeColor") != nil { - strokeStr, ok := li.Property.Field("strokeColor").PropertyValue.ValueString() + if f := li.Property.Field("strokeColor"); f != nil { + strokeStr, ok := f.PropertyValue.ValueString() if !ok { return nil, errors.New("invalid value type") } @@ -183,8 +196,8 @@ func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feat return nil, err } } - if li.Property.Field("strokeWidth") != nil { - strokeWidth, ok = li.Property.Field("strokeWidth").PropertyValue.ValueNumber() + if f := li.Property.Field("strokeWidth"); f != nil { + strokeWidth, ok = f.PropertyValue.ValueNumber() if !ok { return nil, errors.New("invalid value type") } diff --git a/pkg/layer/encoding/geojson.go b/pkg/layer/encoding/geojson.go index c7c36d908..eff43c531 100644 --- a/pkg/layer/encoding/geojson.go +++ b/pkg/layer/encoding/geojson.go @@ -51,15 +51,28 @@ func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojso case "marker": latlng := property.LatLng{} var height float64 - if li.Property.Field("location") != nil { - latlng, ok = li.Property.Field("location").PropertyValue.ValueLatLng() + if f := li.Property.Field("location"); f != nil { + latlng, ok = f.PropertyValue.ValueLatLng() if !ok { - return nil, errors.New("invalid value type") + dsll := f.DatasetValue.ValueLatLng() + if dsll != nil { + latlng = property.LatLng{ + Lat: dsll.Lat, + Lng: dsll.Lng, + } + } else { + return nil, errors.New("invalid value type") + } } - if li.Property.Field("height") != nil { - height, ok = li.Property.Field("height").PropertyValue.ValueNumber() + if f := li.Property.Field("height"); f != nil { + height, ok = f.PropertyValue.ValueNumber() if !ok { - return nil, errors.New("invalid value type") + dsHeight := f.DatasetValue.ValueNumber() + if dsHeight != nil { + height = *dsHeight + } else { + return nil, errors.New("invalid value type") + } } geo = geojson.NewPointGeometry([]float64{latlng.Lng, latlng.Lat, height}) } else { @@ -68,8 +81,8 @@ func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojso res = geojson.NewFeature(geo) res.SetProperty("name", li.Name) } - if li.Property.Field("pointColor") != nil { - pointColor, ok := li.Property.Field("pointColor").PropertyValue.ValueString() + if f := li.Property.Field("pointColor"); f != nil { + pointColor, ok := f.PropertyValue.ValueString() if !ok { return nil, errors.New("invalid value type") } @@ -79,8 +92,8 @@ func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojso } case "polygon": var polygon property.Polygon - if li.Property.Field("polygon") != nil { - polygon, ok = li.Property.Field("polygon").PropertyValue.ValuePolygon() + if f := li.Property.Field("polygon"); f != nil { + polygon, ok = f.PropertyValue.ValuePolygon() if !ok { return nil, errors.New("invalid value type") } @@ -90,8 +103,8 @@ func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojso res = geojson.NewFeature(geo) res.SetProperty("name", li.Name) } - if li.Property.Field("fillColor") != nil { - fillColor, ok := li.Property.Field("fillColor").PropertyValue.ValueString() + if f := li.Property.Field("fillColor"); f != nil { + fillColor, ok := f.PropertyValue.ValueString() if !ok { return nil, errors.New("invalid value type") } @@ -99,8 +112,8 @@ func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojso res.SetProperty("fill", fillColor) } } - if li.Property.Field("strokeColor") != nil { - strokeColor, ok := li.Property.Field("strokeColor").PropertyValue.ValueString() + if f := li.Property.Field("strokeColor"); f != nil { + strokeColor, ok := f.PropertyValue.ValueString() if !ok { return nil, errors.New("invalid value type") } @@ -108,8 +121,8 @@ func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojso res.SetProperty("stroke", strokeColor) } } - if li.Property.Field("strokeWidth") != nil { - strokeWidth, ok := li.Property.Field("strokeWidth").PropertyValue.ValueNumber() + if f := li.Property.Field("strokeWidth"); f != nil { + strokeWidth, ok := f.PropertyValue.ValueNumber() if !ok { return nil, errors.New("invalid value type") } @@ -119,8 +132,8 @@ func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojso } case "polyline": var polyline property.Coordinates - if li.Property.Field("coordinates") != nil { - polyline, ok = li.Property.Field("coordinates").PropertyValue.ValueCoordinates() + if f := li.Property.Field("coordinates"); f != nil { + polyline, ok = f.PropertyValue.ValueCoordinates() if !ok { return nil, errors.New("invalid value type") } @@ -129,8 +142,8 @@ func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojso res = geojson.NewFeature(geo) res.SetProperty("name", li.Name) } - if li.Property.Field("strokeColor") != nil { - strokeColor, ok := li.Property.Field("strokeColor").PropertyValue.ValueString() + if f := li.Property.Field("strokeColor"); f != nil { + strokeColor, ok := f.PropertyValue.ValueString() if !ok { return nil, errors.New("invalid value type") } @@ -138,8 +151,8 @@ func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojso res.SetProperty("stroke", strokeColor) } } - if li.Property.Field("strokeWidth") != nil { - strokeWidth, ok := li.Property.Field("strokeWidth").PropertyValue.ValueNumber() + if f := li.Property.Field("strokeWidth"); f != nil { + strokeWidth, ok := f.PropertyValue.ValueNumber() if !ok { return nil, errors.New("invalid value type") } diff --git a/pkg/layer/encoding/kml.go b/pkg/layer/encoding/kml.go index 803af9166..463dd4e91 100644 --- a/pkg/layer/encoding/kml.go +++ b/pkg/layer/encoding/kml.go @@ -56,9 +56,9 @@ func (e *KMLEncoder) encodePointStyle(li *merging.SealedLayerItem) (*kml.SharedE var ok bool var imageSize float64 var pointColor color.Color - if li.Property.Field("image") != nil { - if li.Property.Field("image").PropertyValue != nil { - image, ok = li.Property.Field("image").PropertyValue.ValueURL() + if f := li.Property.Field("image"); f != nil { + if f.PropertyValue != nil { + image, ok = f.PropertyValue.ValueURL() if !ok { return nil, "", errors.New("invalid value type") } @@ -67,14 +67,14 @@ func (e *KMLEncoder) encodePointStyle(li *merging.SealedLayerItem) (*kml.SharedE } } } - if li.Property.Field("imageSize") != nil { - imageSize, ok = li.Property.Field("imageSize").PropertyValue.ValueNumber() + if f := li.Property.Field("imageSize"); f != nil { + imageSize, ok = f.PropertyValue.ValueNumber() if !ok { return nil, "", errors.New("invalid value type") } } - if li.Property.Field("pointColor") != nil { - colorStr, ok := li.Property.Field("pointColor").PropertyValue.ValueString() + if f := li.Property.Field("pointColor"); f != nil { + colorStr, ok := f.PropertyValue.ValueString() if !ok { return nil, "", errors.New("invalid value type") } @@ -111,21 +111,21 @@ func (e *KMLEncoder) encodePolygonStyle(li *merging.SealedLayerItem) (*kml.Share var strokeWidth float64 var err error var ok bool - if li.Property.Field("fill") != nil { - fill, ok = li.Property.Field("fill").PropertyValue.ValueBool() + if f := li.Property.Field("fill"); f != nil { + fill, ok = f.PropertyValue.ValueBool() if !ok { return nil, "", errors.New("invalid value type") } } - if li.Property.Field("stroke") != nil { - stroke, ok = li.Property.Field("stroke").PropertyValue.ValueBool() + if f := li.Property.Field("stroke"); f != nil { + stroke, ok = f.PropertyValue.ValueBool() if !ok { return nil, "", errors.New("invalid value type") } } - if li.Property.Field("fillColor") != nil { - colorStr, ok := li.Property.Field("fillColor").PropertyValue.ValueString() + if f := li.Property.Field("fillColor"); f != nil { + colorStr, ok := f.PropertyValue.ValueString() if !ok { return nil, "", errors.New("invalid value type") } @@ -134,8 +134,8 @@ func (e *KMLEncoder) encodePolygonStyle(li *merging.SealedLayerItem) (*kml.Share return nil, "", err } } - if li.Property.Field("strokeColor") != nil { - colorStr, ok := li.Property.Field("strokeColor").PropertyValue.ValueString() + if f := li.Property.Field("strokeColor"); f != nil { + colorStr, ok := f.PropertyValue.ValueString() if !ok { return nil, "", errors.New("invalid value type") } @@ -144,8 +144,8 @@ func (e *KMLEncoder) encodePolygonStyle(li *merging.SealedLayerItem) (*kml.Share return nil, "", err } } - if li.Property.Field("strokeWidth") != nil { - strokeWidth, ok = li.Property.Field("strokeWidth").PropertyValue.ValueNumber() + if f := li.Property.Field("strokeWidth"); f != nil { + strokeWidth, ok = f.PropertyValue.ValueNumber() if !ok { return nil, "", errors.New("invalid value type") } @@ -192,8 +192,8 @@ func (e *KMLEncoder) encodePolylineStyle(li *merging.SealedLayerItem) (*kml.Shar var err error var ok bool - if li.Property.Field("strokeColor") != nil { - colorStr, ok := li.Property.Field("strokeColor").PropertyValue.ValueString() + if f := li.Property.Field("strokeColor"); f != nil { + colorStr, ok := f.PropertyValue.ValueString() if !ok { return nil, "", errors.New("invalid value type") } @@ -202,8 +202,8 @@ func (e *KMLEncoder) encodePolylineStyle(li *merging.SealedLayerItem) (*kml.Shar return nil, "", err } } - if li.Property.Field("strokeWidth") != nil { - strokeWidth, ok = li.Property.Field("strokeWidth").PropertyValue.ValueNumber() + if f := li.Property.Field("strokeWidth"); f != nil { + strokeWidth, ok = f.PropertyValue.ValueNumber() if !ok { return nil, "", errors.New("invalid value type") } @@ -254,16 +254,29 @@ func (e *KMLEncoder) encodeLayerTag(li *merging.SealedLayerItem) (*kml.CompoundE layerTag = kml.Point() latlng := property.LatLng{} var height float64 - if li.Property.Field("location") != nil { - latlng, ok = li.Property.Field("location").PropertyValue.ValueLatLng() + if f := li.Property.Field("location"); f != nil { + latlng, ok = f.PropertyValue.ValueLatLng() if !ok { - return nil, errors.New("invalid value type") + dsll := f.DatasetValue.ValueLatLng() + if dsll != nil { + latlng = property.LatLng{ + Lat: dsll.Lat, + Lng: dsll.Lng, + } + } else { + return nil, errors.New("invalid value type") + } } } - if li.Property.Field("height") != nil { - height, ok = li.Property.Field("height").PropertyValue.ValueNumber() + if f := li.Property.Field("height"); f != nil { + height, ok = f.PropertyValue.ValueNumber() if !ok { - return nil, errors.New("invalid value type") + dsHeight := f.DatasetValue.ValueNumber() + if dsHeight != nil { + height = *dsHeight + } else { + return nil, errors.New("invalid value type") + } } } layerTag = layerTag.Add( @@ -276,8 +289,8 @@ func (e *KMLEncoder) encodeLayerTag(li *merging.SealedLayerItem) (*kml.CompoundE case "polygon": layerTag = kml.Polygon() polygon := property.Polygon{} - if li.Property.Field("polygon") != nil { - polygon, ok = li.Property.Field("polygon").PropertyValue.ValuePolygon() + if f := li.Property.Field("polygon"); f != nil { + polygon, ok = f.PropertyValue.ValuePolygon() if !ok { return nil, errors.New("invalid value type") } @@ -309,8 +322,8 @@ func (e *KMLEncoder) encodeLayerTag(li *merging.SealedLayerItem) (*kml.CompoundE case "polyline": layerTag = kml.LineString() polyline := property.Coordinates{} - if li.Property.Field("coordinates") != nil { - polyline, ok = li.Property.Field("coordinates").PropertyValue.ValueCoordinates() + if f := li.Property.Field("coordinates"); f != nil { + polyline, ok = f.PropertyValue.ValueCoordinates() if !ok { return nil, errors.New("invalid value type") } diff --git a/pkg/layer/encoding/shp.go b/pkg/layer/encoding/shp.go index ce866c2ac..2f75a7fa0 100644 --- a/pkg/layer/encoding/shp.go +++ b/pkg/layer/encoding/shp.go @@ -86,10 +86,18 @@ func (e *SHPEncoder) encodeLayer(li *merging.SealedLayerItem) (shp.Shape, shp.Sh case "marker": shapeType = shp.POINT latlng := property.LatLng{} - if li.Property.Field("location") != nil { - latlng, ok = li.Property.Field("location").PropertyValue.ValueLatLng() + if f := li.Property.Field("location"); f != nil { + latlng, ok = f.PropertyValue.ValueLatLng() if !ok { - return nil, 0, errors.New("invalid value type") + dsll := f.DatasetValue.ValueLatLng() + if dsll != nil { + latlng = property.LatLng{ + Lat: dsll.Lat, + Lng: dsll.Lng, + } + } else { + return nil, 0, errors.New("invalid value type") + } } sh = &shp.Point{ X: latlng.Lng, @@ -100,8 +108,8 @@ func (e *SHPEncoder) encodeLayer(li *merging.SealedLayerItem) (shp.Shape, shp.Sh case "polygon": shapeType = shp.POLYGON polygon := property.Polygon{} - if li.Property.Field("polygon") != nil { - polygon, ok = li.Property.Field("polygon").PropertyValue.ValuePolygon() + if f := li.Property.Field("polygon"); f != nil { + polygon, ok = f.PropertyValue.ValuePolygon() if !ok { return nil, 0, errors.New("invalid value type") } @@ -114,8 +122,8 @@ func (e *SHPEncoder) encodeLayer(li *merging.SealedLayerItem) (shp.Shape, shp.Sh case "polyline": shapeType = shp.POLYLINE polyline := property.Coordinates{} - if li.Property.Field("coordinates") != nil { - polyline, ok = li.Property.Field("coordinates").PropertyValue.ValueCoordinates() + if f := li.Property.Field("coordinates"); f != nil { + polyline, ok = f.PropertyValue.ValueCoordinates() if !ok { return nil, 0, errors.New("invalid value type") } From d8c8cdacda3f1bf26fad257f3f265f2c0a175c2f Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Fri, 6 Aug 2021 13:52:49 +0300 Subject: [PATCH 063/253] fix: hide parent infobox fields when child infobox is not nil (#37) --- pkg/layer/merged.go | 2 +- pkg/layer/merged_test.go | 23 +---------------------- pkg/layer/merging/merger_test.go | 23 ++--------------------- 3 files changed, 4 insertions(+), 44 deletions(-) diff --git a/pkg/layer/merged.go b/pkg/layer/merged.go index 566109c7a..59f900c8b 100644 --- a/pkg/layer/merged.go +++ b/pkg/layer/merged.go @@ -60,7 +60,7 @@ func MergeInfobox(o *Infobox, p *Infobox, linked *id.DatasetID) *MergedInfobox { } var ibf []*InfoboxField - if o != nil && o.Count() > 0 { + if o != nil { ibf = o.Fields() } else if p != nil { ibf = p.Fields() diff --git a/pkg/layer/merged_test.go b/pkg/layer/merged_test.go index 8676b29e4..d98b9c3ae 100644 --- a/pkg/layer/merged_test.go +++ b/pkg/layer/merged_test.go @@ -292,28 +292,7 @@ func TestMerge(t *testing.T) { Parent: &ib2pr, LinkedDataset: &dataset1, }, - Fields: []*MergedInfoboxField{ - { - ID: f2.ID(), - Plugin: p, - Extension: e, - Property: &property.MergedMetadata{ - Original: &f2pr, - Parent: nil, - LinkedDataset: &dataset1, - }, - }, - { - ID: f3.ID(), - Plugin: p, - Extension: e, - Property: &property.MergedMetadata{ - Original: &f3pr, - Parent: nil, - LinkedDataset: &dataset1, - }, - }, - }, + Fields: []*MergedInfoboxField{}, }, } diff --git a/pkg/layer/merging/merger_test.go b/pkg/layer/merging/merger_test.go index edbf1a03c..cc9263dc9 100644 --- a/pkg/layer/merging/merger_test.go +++ b/pkg/layer/merging/merger_test.go @@ -55,23 +55,13 @@ func TestMergeLayer(t *testing.T) { MustBuild(), }) - // assert - expectedInfoboxField := layer.MergedInfoboxField{ - ID: l1if1, - Plugin: p, - Extension: e, - Property: &property.MergedMetadata{ - Original: &fpr, - LinkedDataset: &dataset1, - }, - } expectedInfobox := layer.MergedInfobox{ Property: &property.MergedMetadata{ Original: &ib1pr, Parent: &ib2pr, LinkedDataset: &dataset1, }, - Fields: []*layer.MergedInfoboxField{&expectedInfoboxField}, + Fields: []*layer.MergedInfoboxField{}, } expectedInfoboxField2 := layer.MergedInfoboxField{ ID: l1if1, @@ -141,16 +131,7 @@ func TestMergeLayer(t *testing.T) { Schema: ps, LinkedDataset: &dataset1, }, - Fields: []*MergedInfoboxField{ - { - MergedInfoboxField: expectedInfoboxField, - Property: &property.Merged{ - Original: &fpr, - Schema: ps, - LinkedDataset: &dataset1, - }, - }, - }, + Fields: []*MergedInfoboxField{}, }, Property: &property.Merged{ Original: &itemProperty, From be00da95c3fad6c9e5884be61fcb71f9f065aeba Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 9 Aug 2021 22:02:34 +0900 Subject: [PATCH 064/253] fix: mongo.PropertySchema.FindByIDs, propertySchemaID.Equal --- .../infrastructure/memory/property_schema.go | 31 ++++++++++------ .../infrastructure/mongo/property_schema.go | 27 +++----------- pkg/id/plugin.go | 2 +- pkg/id/property_schema.go | 5 +++ pkg/id/property_schema_test.go | 36 ++++++------------- 5 files changed, 42 insertions(+), 59 deletions(-) diff --git a/internal/infrastructure/memory/property_schema.go b/internal/infrastructure/memory/property_schema.go index 475e975db..9d2aa51de 100644 --- a/internal/infrastructure/memory/property_schema.go +++ b/internal/infrastructure/memory/property_schema.go @@ -14,12 +14,16 @@ import ( type PropertySchema struct { lock sync.Mutex - data map[id.PropertySchemaID]property.Schema + data map[string]*property.Schema } func NewPropertySchema() repo.PropertySchema { - return &PropertySchema{ - data: map[id.PropertySchemaID]property.Schema{}, + return &PropertySchema{} +} + +func (r *PropertySchema) initMap() { + if r.data != nil { + r.data = map[string]*property.Schema{} } } @@ -30,9 +34,11 @@ func (r *PropertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) ( if ps := builtin.GetPropertySchema(id); ps != nil { return ps, nil } - p, ok := r.data[id] + + r.initMap() + p, ok := r.data[id.String()] if ok { - return &p, nil + return p, nil } return nil, rerror.ErrNotFound } @@ -41,14 +47,15 @@ func (r *PropertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaI r.lock.Lock() defer r.lock.Unlock() + r.initMap() result := property.SchemaList{} for _, id := range ids { if ps := builtin.GetPropertySchema(id); ps != nil { result = append(result, ps) continue } - if d, ok := r.data[id]; ok { - result = append(result, &d) + if d, ok := r.data[id.String()]; ok { + result = append(result, d) } else { result = append(result, nil) } @@ -60,10 +67,11 @@ func (r *PropertySchema) Save(ctx context.Context, p *property.Schema) error { r.lock.Lock() defer r.lock.Unlock() + r.initMap() if p.ID().System() { return errors.New("cannnot save system property schema") } - r.data[p.ID()] = *p + r.data[p.ID().String()] = p return nil } @@ -71,6 +79,7 @@ func (r *PropertySchema) SaveAll(ctx context.Context, p property.SchemaList) err r.lock.Lock() defer r.lock.Unlock() + r.initMap() for _, ps := range p { if err := r.Save(ctx, ps); err != nil { return err @@ -83,7 +92,8 @@ func (r *PropertySchema) Remove(ctx context.Context, id id.PropertySchemaID) err r.lock.Lock() defer r.lock.Unlock() - delete(r.data, id) + r.initMap() + delete(r.data, id.String()) return nil } @@ -91,8 +101,9 @@ func (r *PropertySchema) RemoveAll(ctx context.Context, ids []id.PropertySchemaI r.lock.Lock() defer r.lock.Unlock() + r.initMap() for _, id := range ids { - delete(r.data, id) + delete(r.data, id.String()) } return nil } diff --git a/internal/infrastructure/mongo/property_schema.go b/internal/infrastructure/mongo/property_schema.go index 9cfcc1383..2595bdd15 100644 --- a/internal/infrastructure/mongo/property_schema.go +++ b/internal/infrastructure/mongo/property_schema.go @@ -56,9 +56,9 @@ func (r *propertySchemaRepo) FindByIDs(ctx context.Context, ids []id.PropertySch if len(ids2) > 0 { filter := bson.D{{Key: "id", Value: bson.D{{ - Key: "$in", Value: id.PropertySchemaIDToKeys(ids), + Key: "$in", Value: id.PropertySchemaIDToKeys(ids2), }}}} - dst := make(property.SchemaList, 0, len(ids)) + dst := make(property.SchemaList, 0, len(ids2)) res, err = r.find(ctx, dst, filter) if err != nil { return nil, err @@ -74,7 +74,7 @@ func (r *propertySchemaRepo) FindByIDs(ctx context.Context, ids []id.PropertySch } found := false for _, p := range res { - if p != nil && p.ID() == id { + if p != nil && p.ID().Equal(id) { results = append(results, p) found = true break @@ -85,7 +85,7 @@ func (r *propertySchemaRepo) FindByIDs(ctx context.Context, ids []id.PropertySch } } - return filterPropertySchemas(ids, results), nil + return results, nil } func (r *propertySchemaRepo) Save(ctx context.Context, m *property.Schema) error { @@ -143,22 +143,3 @@ func (r *propertySchemaRepo) findOne(ctx context.Context, filter bson.D) (*prope } return c.Rows[0], nil } - -func filterPropertySchemas(ids []id.PropertySchemaID, rows property.SchemaList) property.SchemaList { - res := make(property.SchemaList, 0, len(ids)) - for _, id := range ids { - var r2 *property.Schema - if ps := builtin.GetPropertySchema(id); ps != nil { - r2 = ps - } else { - for _, r := range rows { - if r.ID() == id { - r2 = r - break - } - } - } - res = append(res, r2) - } - return res -} diff --git a/pkg/id/plugin.go b/pkg/id/plugin.go index c6a6ac8d1..d13ed808c 100644 --- a/pkg/id/plugin.go +++ b/pkg/id/plugin.go @@ -176,7 +176,7 @@ func (d *PluginID) StringRef() *string { return &id } -// Equal returns if two IDs are quivarent. +// Equal returns true if two IDs are equal. func (d PluginID) Equal(d2 PluginID) bool { if d.sys { return d2.sys diff --git a/pkg/id/property_schema.go b/pkg/id/property_schema.go index 5a4fd9d5e..6530825b6 100644 --- a/pkg/id/property_schema.go +++ b/pkg/id/property_schema.go @@ -103,6 +103,11 @@ func (d PropertySchemaID) IsNil() bool { return d.plugin == PluginID{} && d.id == "" } +// Equal returns true if two IDs are equal. +func (d PropertySchemaID) Equal(d2 PropertySchemaID) bool { + return d.plugin.Equal(d2.plugin) && d.id == d2.id +} + // StringRef returns a reference of a string representation. func (d *PropertySchemaID) StringRef() *string { if d == nil { diff --git a/pkg/id/property_schema_test.go b/pkg/id/property_schema_test.go index e234b5dd2..7adcb8562 100644 --- a/pkg/id/property_schema_test.go +++ b/pkg/id/property_schema_test.go @@ -60,6 +60,7 @@ func TestPropertySchemaIDFrom(t *testing.T) { }{result: PropertySchemaID{}, err: ErrInvalidID}, }, } + for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { @@ -72,7 +73,6 @@ func TestPropertySchemaIDFrom(t *testing.T) { assert.Equal(tt, tc.expected.result, result) assert.Nil(tt, err) } - }) } } @@ -152,7 +152,6 @@ func TestMustPropertySchemaID(t *testing.T) { result := MustPropertySchemaID(tc.input) assert.Equal(tt, tc.expected.result, result) } - }) } } @@ -160,7 +159,6 @@ func TestMustPropertySchemaID(t *testing.T) { func TestMustPropertySchemaIDFromExtension(t *testing.T) { pluginID := MustPluginID("test~2.0.0") pluginExtensionID := PluginExtensionID("test2") - propertySchemaID := MustPropertySchemaIDFromExtension(pluginID, pluginExtensionID) assert.NotNil(t, propertySchemaID) @@ -205,6 +203,7 @@ func TestPropertySchemaIDFromRef(t *testing.T) { expected: nil, }, } + for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { @@ -218,7 +217,6 @@ func TestPropertySchemaIDFromRef(t *testing.T) { func TestPropertySchemaID_ID(t *testing.T) { propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") - assert.Equal(t, propertySchemaID.ID(), "test") } @@ -229,73 +227,62 @@ func TestPropertySchemaID_Plugin(t *testing.T) { func TestPropertySchemaID_System(t *testing.T) { propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") - assert.False(t, propertySchemaID.System()) - extinctionName := schemaSystemIDPrefix propertySchemaID = MustPropertySchemaIDFromExtension(MustPluginID("test~2.0.0"), *PluginExtensionIDFromRef(&extinctionName)) - assert.True(t, propertySchemaID.System()) - propertySchemaID = MustPropertySchemaID("Test~2.0.0/" + schemaSystemIDPrefix) - assert.True(t, propertySchemaID.System()) } func TestPropertySchemaID_String(t *testing.T) { propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") - assert.Equal(t, propertySchemaID.String(), "Test~2.0.0/test") } func TestPropertySchemaID_Ref(t *testing.T) { propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") - assert.Equal(t, &propertySchemaID, propertySchemaID.Ref()) } func TestPropertySchemaID_CopyRef(t *testing.T) { propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") - assert.Equal(t, propertySchemaID, *propertySchemaID.CopyRef()) - - assert.False(t, propertySchemaID.Ref() == propertySchemaID.CopyRef()) + assert.NotSame(t, propertySchemaID.Ref(), propertySchemaID.CopyRef()) } func TestPropertySchemaID_IsNil(t *testing.T) { propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") - assert.False(t, propertySchemaID.IsNil()) - propertySchemaID = PropertySchemaID{} - assert.True(t, propertySchemaID.IsNil()) } -func TestPropertySchemaID_StringRef(t *testing.T) { +func TestPropertySchemaID_Equal(t *testing.T) { propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") + propertySchemaID2, _ := PropertySchemaIDFrom("test~2.0.0/test") + propertySchemaID3, _ := PropertySchemaIDFrom("test~2.0.1/test") + assert.True(t, propertySchemaID.Equal(propertySchemaID2)) + assert.False(t, propertySchemaID.Equal(propertySchemaID3)) +} +func TestPropertySchemaID_StringRef(t *testing.T) { + propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") ref := &propertySchemaID - assert.Equal(t, *ref.StringRef(), ref.String()) } func TestPropertySchemaID_MarshalText(t *testing.T) { propertySchemaID, _ := PropertySchemaIDFrom("test~2.0.0/test") - res, err := propertySchemaID.MarshalText() - assert.Nil(t, err) assert.Equal(t, []byte("test~2.0.0/test"), res) } func TestPropertySchemaID_UnmarshalText(t *testing.T) { text := []byte("test~2.0.0/test") - propertySchemaID := &PropertySchemaID{} - err := propertySchemaID.UnmarshalText(text) - assert.Nil(t, err) assert.Equal(t, "test~2.0.0/test", propertySchemaID.String()) } @@ -423,7 +410,6 @@ func TestPropertySchemaIDsFrom(t *testing.T) { assert.Equal(tt, tc.expected.res, res) assert.Nil(tt, err) } - }) } } From a4770ecfe858b6bd8a87e6716d7a03bc5d817a1a Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 9 Aug 2021 22:31:34 +0900 Subject: [PATCH 065/253] fix: gql propertySchemaGroup.translatedTitle resolver --- internal/graphql/resolver_property.go | 112 ----------------- internal/graphql/resolver_property_schema.go | 125 +++++++++++++++++++ 2 files changed, 125 insertions(+), 112 deletions(-) create mode 100644 internal/graphql/resolver_property_schema.go diff --git a/internal/graphql/resolver_property.go b/internal/graphql/resolver_property.go index 0b9429a67..467645b2f 100644 --- a/internal/graphql/resolver_property.go +++ b/internal/graphql/resolver_property.go @@ -18,22 +18,10 @@ func (r *Resolver) PropertyField() PropertyFieldResolver { return &propertyFieldResolver{r} } -func (r *Resolver) PropertySchemaField() PropertySchemaFieldResolver { - return &propertySchemaFieldResolver{r} -} - -func (r *Resolver) PropertySchemaFieldChoice() PropertySchemaFieldChoiceResolver { - return &propertySchemaFieldChoiceResolver{r} -} - func (r *Resolver) PropertyFieldLink() PropertyFieldLinkResolver { return &propertyFieldLinkResolver{r} } -func (r *Resolver) PropertyLinkableFields() PropertyLinkableFieldsResolver { - return &propertyLinkableFieldsResolver{r} -} - func (r *Resolver) MergedProperty() MergedPropertyResolver { return &mergedPropertyResolver{r} } @@ -54,10 +42,6 @@ func (r *Resolver) PropertyGroup() PropertyGroupResolver { return &propertyGroupResolver{r} } -func (r *Resolver) PropertySchemaGroup() PropertySchemaGroupResolver { - return &propertySchemaGroupResolver{r} -} - type propertyResolver struct{ *Resolver } func (r *propertyResolver) Schema(ctx context.Context, obj *graphql1.Property) (*graphql1.PropertySchema, error) { @@ -109,8 +93,6 @@ func (r *propertyResolver) Merged(ctx context.Context, obj *graphql1.Property) ( } type propertyFieldResolver struct{ *Resolver } -type propertySchemaFieldResolver struct{ *Resolver } -type propertySchemaFieldChoiceResolver struct{ *Resolver } func (r *propertyFieldResolver) Parent(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.Property, error) { exit := trace(ctx) @@ -145,37 +127,6 @@ func (r *propertyFieldResolver) ActualValue(ctx context.Context, obj *graphql1.P return actualValue(datasetLoader, obj.Value, obj.Links, false) } -func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { - exit := trace(ctx) - defer exit() - - if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { - return s, nil - } - return obj.Name, nil -} - -// deprecated -func (r *propertySchemaFieldResolver) TranslatedName(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { - exit := trace(ctx) - defer exit() - - if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { - return s, nil - } - return obj.Name, nil -} - -func (r *propertySchemaFieldResolver) TranslatedDescription(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { - exit := trace(ctx) - defer exit() - - if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { - return s, nil - } - return obj.Description, nil -} - type propertyFieldLinkResolver struct{ *Resolver } func (r *propertyFieldLinkResolver) Dataset(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.Dataset, error) { @@ -217,37 +168,6 @@ func (r *propertyFieldLinkResolver) DatasetSchemaField(ctx context.Context, obj return ds.Field(obj.DatasetSchemaFieldID), err } -type propertyLinkableFieldsResolver struct{ *Resolver } - -func (r *propertyLinkableFieldsResolver) Schema(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchema, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) -} - -func (r *propertyLinkableFieldsResolver) LatlngField(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchemaField, error) { - exit := trace(ctx) - defer exit() - - if obj.Latlng == nil { - return nil, nil - } - ps, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) - return ps.Field(*obj.Latlng), err -} - -func (r *propertyLinkableFieldsResolver) URLField(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchemaField, error) { - exit := trace(ctx) - defer exit() - - if obj.URL == nil { - return nil, nil - } - ps, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) - return ps.Field(*obj.URL), err -} - type mergedPropertyResolver struct{ *Resolver } func (r *mergedPropertyResolver) Original(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Property, error) { @@ -508,35 +428,3 @@ func actualValue(datasetLoader dataloader.DatasetDataLoader, value interface{}, } return nil, nil } - -type propertySchemaGroupResolver struct{ *Resolver } - -func (r *propertySchemaGroupResolver) Schema(ctx context.Context, obj *graphql1.PropertySchemaGroup) (*graphql1.PropertySchema, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) -} - -func (r *propertySchemaGroupResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaGroup, lang *string) (string, error) { - if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { - return s, nil - } - t := obj.Title - return *t, nil -} - -func (r *propertySchemaFieldChoiceResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaFieldChoice, lang *string) (string, error) { - if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { - return s, nil - } - return obj.Label, nil -} - -// deprecated -func (r *propertySchemaFieldChoiceResolver) TranslatedLabel(ctx context.Context, obj *graphql1.PropertySchemaFieldChoice, lang *string) (string, error) { - if s, ok := obj.AllTranslatedLabel[getLang(ctx, lang)]; ok { - return s, nil - } - return obj.Label, nil -} diff --git a/internal/graphql/resolver_property_schema.go b/internal/graphql/resolver_property_schema.go new file mode 100644 index 000000000..b6b1bfa1d --- /dev/null +++ b/internal/graphql/resolver_property_schema.go @@ -0,0 +1,125 @@ +package graphql + +import ( + "context" + + graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/graphql/dataloader" +) + +func (r *Resolver) PropertySchemaField() PropertySchemaFieldResolver { + return &propertySchemaFieldResolver{r} +} + +func (r *Resolver) PropertySchemaFieldChoice() PropertySchemaFieldChoiceResolver { + return &propertySchemaFieldChoiceResolver{r} +} + +func (r *Resolver) PropertyLinkableFields() PropertyLinkableFieldsResolver { + return &propertyLinkableFieldsResolver{r} +} + +func (r *Resolver) PropertySchemaGroup() PropertySchemaGroupResolver { + return &propertySchemaGroupResolver{r} +} + +type propertySchemaFieldResolver struct{ *Resolver } + +func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { + exit := trace(ctx) + defer exit() + + if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Name, nil +} + +// deprecated +func (r *propertySchemaFieldResolver) TranslatedName(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { + exit := trace(ctx) + defer exit() + + if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Name, nil +} + +func (r *propertySchemaFieldResolver) TranslatedDescription(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { + exit := trace(ctx) + defer exit() + + if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Description, nil +} + +type propertyLinkableFieldsResolver struct{ *Resolver } + +func (r *propertyLinkableFieldsResolver) Schema(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (r *propertyLinkableFieldsResolver) LatlngField(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchemaField, error) { + exit := trace(ctx) + defer exit() + + if obj.Latlng == nil { + return nil, nil + } + ps, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return ps.Field(*obj.Latlng), err +} + +func (r *propertyLinkableFieldsResolver) URLField(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchemaField, error) { + exit := trace(ctx) + defer exit() + + if obj.URL == nil { + return nil, nil + } + ps, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return ps.Field(*obj.URL), err +} + +type propertySchemaGroupResolver struct{ *Resolver } + +func (r *propertySchemaGroupResolver) Schema(ctx context.Context, obj *graphql1.PropertySchemaGroup) (*graphql1.PropertySchema, error) { + exit := trace(ctx) + defer exit() + + return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) +} + +func (r *propertySchemaGroupResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaGroup, lang *string) (string, error) { + if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { + return s, nil + } + + if obj.Title == nil { + return "", nil + } + return *obj.Title, nil +} + +type propertySchemaFieldChoiceResolver struct{ *Resolver } + +func (r *propertySchemaFieldChoiceResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaFieldChoice, lang *string) (string, error) { + if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Label, nil +} + +// deprecated +func (r *propertySchemaFieldChoiceResolver) TranslatedLabel(ctx context.Context, obj *graphql1.PropertySchemaFieldChoice, lang *string) (string, error) { + if s, ok := obj.AllTranslatedLabel[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Label, nil +} From 8a64591f437f896c4bfb8feda02800a9d50174da Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 9 Aug 2021 23:56:17 +0900 Subject: [PATCH 066/253] fix: use PropertySchemaID.Equal --- internal/infrastructure/fs/property_schema.go | 2 +- pkg/property/property.go | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/internal/infrastructure/fs/property_schema.go b/internal/infrastructure/fs/property_schema.go index 064fbdf6c..2ae587a7d 100644 --- a/internal/infrastructure/fs/property_schema.go +++ b/internal/infrastructure/fs/property_schema.go @@ -35,7 +35,7 @@ func (r *propertySchema) FindByID(ctx context.Context, i id.PropertySchemaID) (* if ps == nil { continue } - if ps.ID() == i { + if ps.ID().Equal(i) { return ps, nil } } diff --git a/pkg/property/property.go b/pkg/property/property.go index 55be1412f..389ed44f5 100644 --- a/pkg/property/property.go +++ b/pkg/property/property.go @@ -269,7 +269,7 @@ func (p *Property) UnlinkAllByDataset(s id.DatasetSchemaID, ds id.DatasetID) { } func (p *Property) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, *GroupList, *Group, bool) { - if p == nil || ps == nil || ptr == nil || ps.ID() != p.Schema() { + if p == nil || ps == nil || ptr == nil || !ps.ID().Equal(p.Schema()) { return nil, nil, nil, false } @@ -289,7 +289,7 @@ func (p *Property) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, *GroupLis } func (p *Property) GetOrCreateItem(ps *Schema, ptr *Pointer) (Item, *GroupList) { - if p == nil || ps == nil || ptr == nil || ps.ID() != p.Schema() { + if p == nil || ps == nil || ptr == nil || !ps.ID().Equal(p.Schema()) { return nil, nil } @@ -320,7 +320,7 @@ func (p *Property) GetOrCreateItem(ps *Schema, ptr *Pointer) (Item, *GroupList) } func (p *Property) GetOrCreateGroup(ps *Schema, ptr *Pointer) (*Group, *GroupList) { - if p == nil || ps == nil || ptr == nil || ps.ID() != p.Schema() { + if p == nil || ps == nil || ptr == nil || !ps.ID().Equal(p.Schema()) { return nil, nil } @@ -339,7 +339,7 @@ func (p *Property) GetOrCreateGroup(ps *Schema, ptr *Pointer) (*Group, *GroupLis } func (p *Property) GetOrCreateGroupList(ps *Schema, ptr *Pointer) *GroupList { - if p == nil || ps == nil || ptr == nil || ps.ID() != p.Schema() { + if p == nil || ps == nil || ptr == nil || !ps.ID().Equal(p.Schema()) { return nil } From 41c3771c64548dd7b65d18b875ab540c6bd6ddb8 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 10 Aug 2021 00:12:44 +0900 Subject: [PATCH 067/253] chore: add internal error log --- pkg/rerror/error.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkg/rerror/error.go b/pkg/rerror/error.go index 3632a97b9..892be4bec 100644 --- a/pkg/rerror/error.go +++ b/pkg/rerror/error.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/pkg/errors" + "github.com/reearth/reearth-backend/pkg/log" ) var ( @@ -22,6 +23,7 @@ type ErrInternal struct { } func ErrInternalBy(err error) error { + log.Errorf("internal error: %s", err.Error()) return &ErrInternal{ err: Error{ Label: errInternal, From 1c3cf1587ce8cf35065a9bfb9668bf2075678d21 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 10 Aug 2021 00:29:27 +0900 Subject: [PATCH 068/253] fix: use PropertySchemaID.Equal --- pkg/layer/infobox.go | 2 +- pkg/layer/infobox_field.go | 2 +- pkg/layer/layer.go | 2 +- pkg/property/builder.go | 2 +- pkg/property/group.go | 6 +++--- pkg/property/group_list.go | 4 ++-- pkg/property/merged.go | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/pkg/layer/infobox.go b/pkg/layer/infobox.go index 610c7829f..001c9e84c 100644 --- a/pkg/layer/infobox.go +++ b/pkg/layer/infobox.go @@ -165,7 +165,7 @@ func (i *Infobox) ValidateProperties(pm property.Map) error { if lp == nil { return errors.New("property does not exist") } - if lp.Schema() != builtin.PropertySchemaIDInfobox { + if !lp.Schema().Equal(builtin.PropertySchemaIDInfobox) { return errors.New("property has a invalid schema") } diff --git a/pkg/layer/infobox_field.go b/pkg/layer/infobox_field.go index 48a2e1b6c..70c036fce 100644 --- a/pkg/layer/infobox_field.go +++ b/pkg/layer/infobox_field.go @@ -48,7 +48,7 @@ func (i *InfoboxField) ValidateProperty(pm property.Map) error { if lp == nil { return errors.New("property does not exist") } - if lp.Schema() != id.MustPropertySchemaIDFromExtension(i.plugin, i.extension) { + if !lp.Schema().Equal(id.MustPropertySchemaIDFromExtension(i.plugin, i.extension)) { return errors.New("property has a invalid schema") } diff --git a/pkg/layer/layer.go b/pkg/layer/layer.go index 921dfd997..a22586686 100644 --- a/pkg/layer/layer.go +++ b/pkg/layer/layer.go @@ -211,7 +211,7 @@ func (l *layerBase) ValidateProperties(pm property.Map) error { return errors.New("layer property does not exist") } - if lp.Schema() != psid { + if !lp.Schema().Equal(psid) { return errors.New("layer property has a invalid schema") } } else if l.plugin != nil || l.extension != nil { diff --git a/pkg/property/builder.go b/pkg/property/builder.go index 22e768601..c39156803 100644 --- a/pkg/property/builder.go +++ b/pkg/property/builder.go @@ -33,7 +33,7 @@ func (b *Builder) Build() (*Property, error) { return nil, ErrInvalidPropertySchemaID } for _, i := range b.p.items { - if i.Schema() != b.p.schema { + if !i.Schema().Equal(b.p.schema) { return nil, ErrInvalidItem } } diff --git a/pkg/property/group.go b/pkg/property/group.go index b07976461..c5f252148 100644 --- a/pkg/property/group.go +++ b/pkg/property/group.go @@ -163,7 +163,7 @@ func (g *Group) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset // GetOrCreateField _ func (g *Group) GetOrCreateField(ps *Schema, fid id.PropertySchemaFieldID) (*Field, bool) { - if g == nil || ps == nil || g.Schema() != ps.ID() { + if g == nil || ps == nil || !g.Schema().Equal(ps.ID()) { return nil, false } psg := ps.Group(g.SchemaGroup()) @@ -249,7 +249,7 @@ func (g *Group) MigrateDataset(q DatasetMigrationParam) { } func (g *Group) UpdateNameFieldValue(ps *Schema, value *Value) error { - if g == nil || ps == nil || g.Schema() != ps.ID() { + if g == nil || ps == nil || !g.Schema().Equal(ps.ID()) { return nil } if psg := ps.GroupByPointer(NewPointer(&g.itemBase.SchemaGroup, nil, nil)); psg != nil { @@ -269,7 +269,7 @@ func (p *Group) ValidateSchema(ps *SchemaGroup) error { if ps == nil { return errors.New("invalid schema") } - if p.Schema() != ps.Schema() { + if !p.Schema().Equal(ps.Schema()) { return errors.New("invalid schema id") } if p.SchemaGroup() != ps.ID() { diff --git a/pkg/property/group_list.go b/pkg/property/group_list.go index c8b7e4fe8..631765aeb 100644 --- a/pkg/property/group_list.go +++ b/pkg/property/group_list.go @@ -339,7 +339,7 @@ func (g *GroupList) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, bool) { // CreateAndAddListItem _ func (g *GroupList) CreateAndAddListItem(ps *Schema, index *int) *Group { - if g == nil || ps == nil || g.Schema() != ps.ID() { + if g == nil || ps == nil || !g.Schema().Equal(ps.ID()) { return nil } psg := ps.Group(g.SchemaGroup()) @@ -377,7 +377,7 @@ func (p *GroupList) ValidateSchema(ps *SchemaGroup) error { if ps == nil { return errors.New("invalid schema") } - if p.Schema() != ps.Schema() { + if !p.Schema().Equal(ps.Schema()) { return errors.New("invalid schema id") } if p.SchemaGroup() != ps.ID() { diff --git a/pkg/property/merged.go b/pkg/property/merged.go index 955756750..cff3660ac 100644 --- a/pkg/property/merged.go +++ b/pkg/property/merged.go @@ -109,7 +109,7 @@ func (f *MergedField) DatasetValue(ctx context.Context, d dataset.GraphLoader) ( // Merge merges two properties func Merge(o *Property, p *Property, linked *id.DatasetID) *Merged { - if o == nil && p == nil || o != nil && p != nil && o.Schema() != p.Schema() { + if o == nil && p == nil || o != nil && p != nil && !o.Schema().Equal(p.Schema()) { return nil } From 108711a96f1735bead785b1213c5078c63d5dee1 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 10 Aug 2021 01:40:23 +0900 Subject: [PATCH 069/253] feat: new primitives, new properties on primitives --- pkg/builtin/manifest.yml | 400 ++++++++++++++++++++++++++++++++++-- pkg/builtin/manifest_ja.yml | 241 +++++++++++++++++++--- 2 files changed, 601 insertions(+), 40 deletions(-) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index c306066ae..3a8d03f2c 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -22,6 +22,22 @@ extensions: type: bool title: Terrain description: Show elevation when close to the surface. + - id: terrain + type: bool + title: Terrain + description: Show elevation when close to the surface. + # NOT SUPPORTED YET + # - id: terrainExaggeration + # type: number + # title: Terrain exaggeration + # defaultValue: 1 + # - id: terrainExaggerationRelativeHeight + # type: number + # title: Terrain exaggeration relative height + - id: depthTestAgainstTerrain + type: bool + title: Hide objects under terrain + description: Hides objects under the terrain. Depending on the loading status of the terrain, objects may be shown or hidden. - id: skybox type: bool title: Sky @@ -108,6 +124,10 @@ extensions: title: Sky atmosphere defaultValue: true description: Display an upper atmospheric layer. + - id: shadows + type: bool + title: Shadow + description: Display shadows on the Earth. Shadows for each layers should be also enabled to see them. - id: fog type: bool title: Fog @@ -139,6 +159,14 @@ extensions: description: 'Set saturation of the fog. Min: -1 Max: 1' min: -1 max: 1 + - id: timeline + title: Timeline + fields: + - id: animation + type: bool + title: Animation + defaultValue: false + description: Enables anmation. If enabled, each 3D models can animate. - id: googleAnalytics title: Google Analytics description: Set your Google Analytics tracking ID and analyze how your published project is being viewed. @@ -200,6 +228,18 @@ extensions: defaultValue: 0 min: 0 suffix: m + - id: heightReference + type: string + title: Height standard + description: The position relative to the terrain. By default, it is the absolute altitude from the WGS84 ellipsoid. + defaultValue: none + choices: + - key: none + label: Absolute + - key: clamp + label: Clamp to ground + - key: relative + label: Relative to ground - id: style type: string title: Style @@ -227,6 +267,21 @@ extensions: field: style type: string value: point + - id: pointOutlineColor + type: string + title: Point outline color + ui: color + availableIf: + field: style + type: string + value: point + - id: pointOutlineWidth + type: number + title: Point outline width + availableIf: + field: style + type: string + value: point - id: image type: url title: Image URL @@ -276,6 +331,14 @@ extensions: field: style type: string value: image + - id: imageColor + type: string + title: Image color + ui: color + availableIf: + field: style + type: string + value: image - id: imageCrop type: string title: Image crop @@ -303,36 +366,36 @@ extensions: title: Shadow color ui: color availableIf: - field: imageShadow - type: bool - value: true + field: style + type: string + value: image - id: imageShadowBlur type: number title: Shadow radius defaultValue: 3 suffix: px availableIf: - field: imageShadow - type: bool - value: true + field: style + type: string + value: image - id: imageShadowPositionX type: number title: Shadow X suffix: px defaultValue: 0 availableIf: - field: imageShadow - type: bool - value: true + field: style + type: string + value: image - id: imageShadowPositionY type: number title: Shadow Y suffix: px defaultValue: 0 availableIf: - field: imageShadow - type: bool - value: true + field: style + type: string + value: image - id: label type: bool title: Label @@ -348,14 +411,22 @@ extensions: title: Label position defaultValue: right choices: - - key: left - label: Left - key: right label: Right + - key: left + label: Left - key: top label: Top - key: bottom label: Bottom + - key: righttop + label: Right top + - key: rightbottom + label: Right bottom + - key: lefttop + label: Left top + - key: leftbottom + label: Left bottom availableIf: field: label type: bool @@ -367,6 +438,13 @@ extensions: field: label type: bool value: true + - id: labelBackground + type: bool + title: Label background + availableIf: + field: label + type: bool + value: true - id: extrude type: bool title: Extruded @@ -510,6 +588,18 @@ extensions: - id: height type: number title: Height + - id: heightReference + type: string + title: Height standard + description: The position relative to the terrain. By default, it is the absolute altitude from the WGS84 ellipsoid. + defaultValue: none + choices: + - key: none + label: Absolute + - key: clamp + label: Clamp to ground + - key: relative + label: Relative to ground - id: camera type: camera title: Camera @@ -608,11 +698,92 @@ extensions: latlng: schemaGroupId: default fieldId: location + # - id: rect + # visualizer: cesium + # type: primitive + # title: Rectangle + # description: A rectangle + # schema: + # groups: + # - id: default + # title: Rectangle + # fields: + # - id: rect + # type: rect + # title: Rectangle + # - id: height + # type: number + # title: Height + # defaultValue: 0 + # min: 0 + # suffix: m + # - id: heightReference + # type: string + # title: Height standard + # description: The position relative to the terrain. By default, it is the absolute altitude from the WGS84 ellipsoid. + # defaultValue: none + # choices: + # - key: none + # label: Absolute + # - key: clamp + # label: Clamp to ground + # - key: relative + # label: Relative to ground + # - id: extrudedHeight + # type: bool + # title: Extruded + # - id: style + # type: string + # title: Style + # defaultValue: color + # choices: + # - key: color + # label: Color + # - key: image + # label: Image + # - id: fillColor + # type: string + # title: Fill + # ui: color + # availableIf: + # field: style + # type: string + # value: color + # - id: image + # type: url + # title: Image + # ui: image + # availableIf: + # field: style + # type: string + # value: image + # - id: outlineColor + # type: string + # title: Fill + # ui: color + # - id: outlineWidth + # type: number + # title: Outline width + # suffix: px + # - id: shadows + # type: string + # title: Shadows + # description: Enables shadows. Also shadows in the scene should be enabled. Rendering may become overloaded. + # defaultValue: disabled + # choices: + # - key: disabled + # label: Disabled + # - key: enabled + # label: Enabled + # - key: cast_only + # label: Cast only + # - key: receive_only + # label: Receive only - id: ellipsoid visualizer: cesium type: primitive title: Sphere - description: A ball-like marker. + description: A 3D ellipsoid schema: groups: - id: default @@ -627,6 +798,18 @@ extensions: defaultValue: 0 min: 0 suffix: m + - id: heightReference + type: string + title: Height standard + description: The position relative to the terrain. By default, it is the absolute altitude from the WGS84 ellipsoid. + defaultValue: none + choices: + - key: none + label: Absolute + - key: clamp + label: Clamp to ground + - key: relative + label: Relative to ground - id: radius type: number title: Radius @@ -637,10 +820,197 @@ extensions: type: string title: Fill ui: color + - id: shadows + type: string + title: Shadows + description: Enables shadows. Also shadows in the scene should be enabled. Rendering may become overloaded. + defaultValue: disabled + choices: + - key: disabled + label: Disabled + - key: enabled + label: Enabled + - key: cast_only + label: Cast only + - key: receive_only + label: Receive only linkable: latlng: schemaGroupId: default fieldId: position + - id: model + visualizer: cesium + type: primitive + title: 3D Model + description: A 3D model + schema: + groups: + - id: default + title: 3D Model + fields: + - id: model + type: url + title: URL + description: only glTF format is supported + - id: location + type: latlng + title: Location + - id: height + type: number + title: Height + defaultValue: 0 + min: 0 + suffix: m + - id: heightReference + type: string + title: Height standard + description: The position relative to the terrain. By default, it is the absolute altitude from the WGS84 ellipsoid. + defaultValue: none + choices: + - key: none + label: Absolute + - key: clamp + label: Clamp to ground + - key: relative + label: Relative to ground + - id: heading + type: number + title: Heading + suffix: ยฐ + defaultValue: 0 + - id: pitch + type: number + title: Pitch + suffix: ยฐ + defaultValue: 0 + - id: roll + type: number + title: Roll + suffix: ยฐ + defaultValue: 0 + - id: scale + type: number + title: Scale + prefix: x + defaultValue: 1 + - id: maximumScale + type: number + title: Max scale + prefix: x + - id: minimumPixelSize + type: number + title: Min size + suffix: px + - id: animation + type: bool + title: Animation + description: Enables aniamtion. The scene animation should also be enabled. + defaultValue: true + - id: appearance + title: Appearance + fields: + - id: shadows + type: string + title: Shadows + description: Enables shadows. Also shadows in the scene should be enabled. Rendering may become overloaded. + defaultValue: disabled + choices: + - key: disabled + label: Disabled + - key: enabled + label: Enabled + - key: cast_only + label: Cast only + - key: receive_only + label: Receive only + - id: colorBlend + type: string + title: Color blend + defaultValue: none + choices: + - key: none + label: None + - key: highlight + label: Highlight + - key: replace + label: Replace + - key: mix + label: Mix + - id: color + type: string + title: Color + ui: color + - id: colorBlendAmount + type: number + title: Color blend amount + description: O + min: 0 + max: 1 + defaultValue: 1 + availableIf: + field: colorBlend + type: string + value: mix + - id: lightColor + type: string + title: Light color + ui: color + - id: silhouette + type: bool + title: Silhouette + - id: silhouetteColor + type: string + title: Silhouette color + ui: color + availableIf: + field: silhouette + type: bool + value: true + - id: silhouetteSize + type: number + title: Silhouette size + defaultValue: 1 + suffix: px + availableIf: + field: silhouette + type: bool + value: true + linkable: + latlng: + schemaGroupId: default + fieldId: location + - id: tileset + visualizer: cesium + type: primitive + title: 3D Tiles + description: 3D tiles in "3D Tiles" format + schema: + groups: + - id: default + title: Model + fields: + - id: tileset + type: url + title: Tileset URL + description: A path to tileset.json in 3D tiles + - id: styleUrl + type: url + title: Styling URL + description: Optional. A path to a JSON file in 3D Tiles styles + - id: shadows + type: string + title: Shadows + description: Enables shadows. Also shadows in the scene should be enabled. Rendering may become overloaded. + defaultValue: disabled + choices: + - key: disabled + label: Disabled + - key: enabled + label: Enabled + - key: cast_only + label: Cast only + - key: receive_only + label: Receive only - id: resource visualizer: cesium type: primitive diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 84313ac8b..4296faeeb 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -14,6 +14,9 @@ extensions: terrain: title: ๅœฐๅฝข description: ๆœ‰ๅŠนใซใ™ใ‚‹ใจใ€ๆจ™้ซ˜ใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใฟใ“ใพใ‚Œใ€็ซ‹ไฝ“็š„ใชๅœฐๅฝขใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + depthTestAgainstTerrain: + title: ๅœฐๅฝขใฎไธ‹ใ‚’้ž่กจ็คบ + description: ๅœฐๅฝขใฎไธ‹ใซใ‚ใ‚‹ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’้ž่กจ็คบใซใ—ใพใ™ใ€‚ๆจ™้ซ˜ใƒ‡ใƒผใ‚ฟใฎ่ชญใฟ่พผใฟ็Šถๆณใซใ‚ˆใฃใฆใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใŒ่กจ็คบใ•ใ‚ŒใŸใ‚Š้š ใ‚ŒใŸใ‚Šใ™ใ‚‹ใ“ใจใŒใ‚ใ‚Šใพใ™ใ€‚ skybox: title: ๅฎ‡ๅฎ™ใฎ่กจ็คบ description: ๅฎ‡ๅฎ™็ฉบ้–“ใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ @@ -64,6 +67,9 @@ extensions: sky_atmosphere: title: ไธŠ็ฉบใฎๅคงๆฐ— description: ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ + shadows: + title: ๅฝฑ + description: ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใŒ่ฝใจใ™ๅฝฑใ‚’่กจ็คบใ—ใพใ™ใ€‚ไฝตใ›ใฆๅ„ใƒฌใ‚คใƒคใƒผใฎๅฝฑใฎ่จญๅฎšใ‚’ใใ‚Œใžใ‚Œๆœ‰ๅŠนใซใ™ใ‚‹ๅฟ…่ฆใŒใ‚ใ‚Šใพใ™ใ€‚ fog: title: ้œง description: ้œงใฎON/OFFใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ @@ -76,6 +82,12 @@ extensions: title: ่‰ฒ็›ธ surturation_shift: title: ๅฝฉๅบฆ + timeline: + title: ใ‚ฟใ‚คใƒ ใƒฉใ‚คใƒณ + fields: + animation: + title: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณ + description: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚3Dใƒขใƒ‡ใƒซใฎใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใŒๅ†็”Ÿใ•ใ‚Œใ‚‹ใ‚ˆใ†ใซใชใ‚Šใพใ™ใ€‚ googleAnalytics: title: Google Analytics description: Google Analyticsใ‚’ๆœ‰ๅŠนใซใ™ใ‚‹ใ“ใจใงใ€ๅ…ฌ้–‹ใƒšใƒผใ‚ธใŒใฉใฎใ‚ˆใ†ใซ้–ฒ่ฆงใ•ใ‚Œใฆใ„ใ‚‹ใ‹ใ‚’ๅˆ†ๆžใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚ @@ -106,7 +118,7 @@ extensions: title: ใƒ•ใ‚ฉใƒณใƒˆ marker: title: ใƒžใƒผใ‚ซใƒผ - description: ใƒ‰ใƒฉใƒƒใ‚ฐ&ใƒ‰ใƒญใƒƒใƒ—ใ™ใ‚‹ใ“ใจใงใ€ๅœฐๅ›ณไธŠใซใƒžใƒผใ‚ซใƒผใ‚’่ฟฝๅŠ ใ—ใพใ™ใ€‚ใƒžใƒผใ‚ซใƒผใซใฏใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ‚’็ดใฅใ‘ใ‚‹ใ“ใจใŒใงใใ€้–ฒ่ฆง่€…ใฏใƒžใƒผใ‚ซใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ™ใ‚‹ใ“ใจใงใใ‚Œใ‚‰ใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่ฆ‹ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + description: ใƒžใƒผใ‚ซใƒผใซใฏใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ‚’็ดใฅใ‘ใ‚‹ใ“ใจใŒใงใใ€้–ฒ่ฆง่€…ใฏใƒžใƒผใ‚ซใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ™ใ‚‹ใ“ใจใงใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่ฆ‹ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ propertySchema: default: title: ใƒžใƒผใ‚ซใƒผ @@ -115,45 +127,83 @@ extensions: title: ไฝ็ฝฎ height: title: ้ซ˜ๅบฆ + heightReference: + title: ้ซ˜ๅบฆใฎๅŸบๆบ– + description: ๅœฐๅฝขใ‚’ๅŸบๆบ–ใจใ—ใŸ็›ธๅฏพ็š„ใช้ซ˜ๅบฆใซใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใงใฏWGS84ๆฅ•ๅ††ไฝ“ใ‹ใ‚‰ใฎ็ตถๅฏพ็š„ใช้ซ˜ๅบฆใซใชใ‚Šใพใ™ใ€‚ + choices: + none: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + clamp: ๅœฐ่กจใซๅ›บๅฎš + relative: ๅœฐ่กจใ‹ใ‚‰ใฎ้ซ˜ๅบฆ style: title: ่กจ็คบๆ–นๆณ• choices: - point: ใƒใ‚คใƒณใƒˆ + point: ็‚น image: ใ‚ขใ‚คใ‚ณใƒณ pointColor: - title: ใƒใ‚คใƒณใƒˆ่‰ฒ + title: ็‚นใฎ่‰ฒ pointSize: - title: ใƒใ‚คใƒณใƒˆใ‚ตใ‚คใ‚บ + title: ็‚นใฎๅคงใใ• + pointOutlineColor: + title: ็‚นใฎ็ทšใฎ่‰ฒ + pointOutlineWidth: + title: ็‚นใฎ็ทšใฎๅน… image: title: ็”ปๅƒURL imageSize: title: ็”ปๅƒใ‚ตใ‚คใ‚บ + imageColor: + title: ็”ปๅƒใฎ่‰ฒ imageCrop: title: ๅˆ‡ใ‚ŠๆŠœใ choices: none: ใชใ— circle: ๅ††ๅฝข + imageHorizontalOrigin: + title: ็”ปๅƒใฎไธญๅฟƒๆจช + choices: + left: ๅทฆ + center: ไธญๅคฎ + right: ๅณ + imageVerticalOrigin: + title: ็”ปๅƒใฎไธญๅฟƒ็ธฆ + choices: + top: ไธŠ + center: ไธญๅคฎ + baseline: ใƒ™ใƒผใ‚นใƒฉใ‚คใƒณ + bottom: ไธ‹ imageShadow: - title: ใ‚ทใƒฃใƒ‰ใ‚ฆ + title: ็”ปๅƒใฎๅฝฑ imageShadowColor: - title: ใ‚ทใƒฃใƒ‰ใ‚ฆ่‰ฒ + title: ็”ปๅƒใฎๅฝฑ่‰ฒ imageShadowBlur: - title: ใ‚ทใƒฃใƒ‰ใ‚ฆๅŠๅพ„ + title: ็”ปๅƒใฎๅฝฑๅŠๅพ„ imageShadowPositionX: - title: ใ‚ทใƒฃใƒ‰ใ‚ฆX + title: ็”ปๅƒใฎๅฝฑX imageShadowPositionY: - title: ใ‚ทใƒฃใƒ‰ใ‚ฆY + title: ็”ปๅƒใฎๅฝฑY label: title: ใƒฉใƒ™ใƒซ labelText: - title: ใƒฉใƒ™ใƒซๆ–‡ๅญ— + title: ใƒฉใƒ™ใƒซใฎๆ–‡ๅญ— + labelPosition: + title: ใƒฉใƒ™ใƒซใฎไฝ็ฝฎ + choices: + right: ๅณ + left: ๅทฆ + top: ไธŠ + bottom: ไธ‹ + righttop: ๅณไธŠ + rightbottom: ๅณไธ‹ + lefttop: ๅทฆไธŠ + leftbottom: ๅทฆไธ‹ labelTypography: title: ใƒฉใƒ™ใƒซใƒ•ใ‚ฉใƒณใƒˆ + labelBackground: + title: ใƒฉใƒ™ใƒซใฎ่ƒŒๆ™ฏ extrude: title: ๅœฐ้ขใ‹ใ‚‰็ทšใ‚’ไผธใฐใ™ polyline: title: ็›ด็ทš - description: Polyline primitive propertySchema: default: title: ็›ด็ทš @@ -166,7 +216,6 @@ extensions: title: ็ทšๅน… polygon: title: ใƒใƒชใ‚ดใƒณ - description: Polygon primitive propertySchema: default: title: ใƒใƒชใ‚ดใƒณ @@ -185,7 +234,6 @@ extensions: title: ็ทšๅน… rect: title: ้•ทๆ–นๅฝข - description: Rectangle primitive propertySchema: default: title: ้•ทๆ–นๅฝข @@ -194,6 +242,13 @@ extensions: title: ้•ทๆ–นๅฝข height: title: ้ซ˜ๅบฆ + heightReference: + title: ้ซ˜ๅบฆใฎๅŸบๆบ– + description: ๅœฐๅฝขใ‚’ๅŸบๆบ–ใจใ—ใŸ็›ธๅฏพ็š„ใช้ซ˜ๅบฆใซใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใงใฏWGS84ๆฅ•ๅ††ไฝ“ใ‹ใ‚‰ใฎ็ตถๅฏพ็š„ใช้ซ˜ๅบฆใซใชใ‚Šใพใ™ใ€‚ + choices: + none: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + clamp: ๅœฐ่กจใซๅ›บๅฎš + relative: ๅœฐ่กจใ‹ใ‚‰ใฎ้ซ˜ๅบฆ extrudedHeight: title: ้ซ˜ใ• style: @@ -205,9 +260,21 @@ extensions: title: ๅก—ใ‚Š่‰ฒ image: title: ็”ปๅƒURL + outlineColor: + title: ็ทšใฎ่‰ฒ + outlineWidth: + title: ็ทšใฎๅน… + shadows: + title: ๅฝฑ + description: ๅฝฑใ‚’่กจ็คบใ—ใพใ™ใ€‚ๅฟ…ใšใ‚ทใƒผใƒณใฎๅฝฑใฎ่จญๅฎšใ‚‚ๆœ‰ๅŠนใซใ—ใฆใใ ใ•ใ„ใ€‚ใƒฌใƒณใƒ€ใƒชใƒณใ‚ฐใŒ้‡ใใชใ‚‹ใ“ใจใŒใ‚ใ‚Šใพใ™ใ€‚ + choices: + disabled: ็„กๅŠน + enabled: ๆœ‰ๅŠน + cast_only: ๅฝฑใ‚’่ฝใจใ™ใ ใ‘ + receive_only: ๅฝฑใ‚’่ฝใจใ•ใ‚Œใ‚‹ใ ใ‘ photooverlay: title: ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค - description: ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ใ•ใ‚ŒใŸใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚คใ‚’้ธๆŠžใ™ใ‚‹ใจใ€่จญๅฎšใ—ใŸ็”ปๅƒใ‚’ใƒขใƒผใƒ€ใƒซๅฝขๅผใง่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + description: ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚คใ‚’้ธๆŠžใ™ใ‚‹ใจใ€่จญๅฎšใ—ใŸ็”ปๅƒใ‚’ใƒขใƒผใƒ€ใƒซๅฝขๅผใง็”ป้ขไธŠใซ่ขซใ›ใฆ่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ propertySchema: default: title: ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค @@ -216,6 +283,13 @@ extensions: title: ไฝ็ฝฎ height: title: ้ซ˜ๅบฆ + heightReference: + title: ้ซ˜ๅบฆใฎๅŸบๆบ– + description: ๅœฐๅฝขใ‚’ๅŸบๆบ–ใจใ—ใŸ็›ธๅฏพ็š„ใช้ซ˜ๅบฆใซใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใงใฏWGS84ๆฅ•ๅ††ไฝ“ใ‹ใ‚‰ใฎ็ตถๅฏพ็š„ใช้ซ˜ๅบฆใซใชใ‚Šใพใ™ใ€‚ + choices: + none: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + clamp: ๅœฐ่กจใซๅ›บๅฎš + relative: ๅœฐ่กจใ‹ใ‚‰ใฎ้ซ˜ๅบฆ camera: title: ใ‚ซใƒกใƒฉ description: ใ‚ฏใƒชใƒƒใ‚ฏใ•ใ‚ŒใŸใจใใซ็งปๅ‹•ใ™ใ‚‹ใ‚ซใƒกใƒฉใฎ่จญๅฎšใ‚’ใ—ใพใ™ใ€‚ @@ -223,41 +297,158 @@ extensions: title: ใ‚ขใ‚คใ‚ณใƒณ imageSize: title: ใ‚ขใ‚คใ‚ณใƒณใ‚ตใ‚คใ‚บ + imageHorizontalOrigin: + title: ใ‚ขใ‚คใ‚ณใƒณใฎไธญๅฟƒๆจช + choices: + left: ๅทฆ + center: ไธญๅคฎ + right: ๅณ + imageVerticalOrigin: + title: ใ‚ขใ‚คใ‚ณใƒณใฎไธญๅฟƒ็ธฆ + choices: + top: ไธŠ + center: ไธญๅคฎ + baseline: ใƒ™ใƒผใ‚นใƒฉใ‚คใƒณ + bottom: ไธ‹ imageCrop: title: ๅˆ‡ใ‚ŠๆŠœใ choices: none: ใชใ— circle: ๅ††ๅฝข imageShadow: - title: ใ‚ขใ‚คใ‚ณใƒณใ‚ทใƒฃใƒ‰ใ‚ฆ + title: ใ‚ขใ‚คใ‚ณใƒณใฎๅฝฑ imageShadowColor: - title: ใ‚ทใƒฃใƒ‰ใ‚ฆ่‰ฒ + title: ๅฝฑ่‰ฒ imageShadowBlur: - title: ใ‚ทใƒฃใƒ‰ใ‚ฆๅŠๅพ„ + title: ๅฝฑๅŠๅพ„ imageShadowPositionX: - title: ใ‚ทใƒฃใƒ‰ใ‚ฆX + title: ๅฝฑX imageShadowPositionY: - title: ใ‚ทใƒฃใƒ‰ใ‚ฆY + title: ๅฝฑY photoOverlayImage: title: ใ‚ชใƒผใƒใƒฌใ‚ค็”ปๅƒ + photoOverlayDescription: + title: ใ‚ชใƒผใƒใƒฌใ‚ค็”ปๅƒใฎ่ชฌๆ˜Ž ellipsoid: - title: ็ƒไฝ“ใƒ„ใƒผใƒซ - description: ๆฅ•ๅ††ๅฝขใƒ„ใƒผใƒซใ‚’ๅœฐๅ›ณไธŠใซใƒ‰ใƒฉใƒƒใ‚ฐ&ใƒ‰ใƒญใƒƒใƒ—ใ™ใ‚‹ใ“ใจใง่ฟฝๅŠ ใงใใพใ™ใ€‚ๆฅ•ๅ††ๅฝขใƒ„ใƒผใƒซใซใ‚ˆใฃใฆ็ซ‹ไฝ“็š„ใชใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’ๅœฐๅ›ณไธŠใซ่กจ็คบใงใใพใ™ใ€‚ + title: ็ƒไฝ“ + description: ็ซ‹ไฝ“็š„ใช็ƒไฝ“ใ‚’ๅœฐๅ›ณไธŠใซ่กจ็คบใงใใพใ™ใ€‚ propertySchema: default: - title: ็ƒไฝ“ใƒ„ใƒผใƒซ + title: ็ƒไฝ“ fields: position: title: ไฝ็ฝฎ height: title: ้ซ˜ๅบฆ + heightReference: + title: ้ซ˜ๅบฆใฎๅŸบๆบ– + description: ๅœฐๅฝขใ‚’ๅŸบๆบ–ใจใ—ใŸ็›ธๅฏพ็š„ใช้ซ˜ๅบฆใซใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใงใฏWGS84ๆฅ•ๅ††ไฝ“ใ‹ใ‚‰ใฎ็ตถๅฏพ็š„ใช้ซ˜ๅบฆใซใชใ‚Šใพใ™ใ€‚ + choices: + none: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + clamp: ๅœฐ่กจใซๅ›บๅฎš + relative: ๅœฐ่กจใ‹ใ‚‰ใฎ้ซ˜ๅบฆ radius: title: ๅŠๅพ„ fillColor: title: ๅก—ใ‚Š่‰ฒ + shadows: + title: ๅฝฑ + description: ๅฝฑใ‚’่กจ็คบใ—ใพใ™ใ€‚ๅฟ…ใšใ‚ทใƒผใƒณใฎๅฝฑใฎ่จญๅฎšใ‚‚ๆœ‰ๅŠนใซใ—ใฆใใ ใ•ใ„ใ€‚ใƒฌใƒณใƒ€ใƒชใƒณใ‚ฐใŒ้‡ใใชใ‚‹ใ“ใจใŒใ‚ใ‚Šใพใ™ใ€‚ + choices: + disabled: ็„กๅŠน + enabled: ๆœ‰ๅŠน + cast_only: ๅฝฑใ‚’่ฝใจใ™ใ ใ‘ + receive_only: ๅฝฑใ‚’่ฝใจใ•ใ‚Œใ‚‹ใ ใ‘ + model: + title: 3Dใƒขใƒ‡ใƒซ + description: glTFๅฝขๅผใฎ3Dใƒขใƒ‡ใƒซใ‚’่ชญใฟ่พผใ‚“ใง่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ๅพŒใ€URLใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใง่ชญใฟ่พผใพใ‚Œใพใ™ใ€‚ + propertySchema: + default: + title: 3Dใƒขใƒ‡ใƒซ + fields: + model: + title: URL + description: glTFๅฝขๅผใฎใฟๅฏพๅฟœใ—ใฆใ„ใพใ™ใ€‚ + location: + title: ไฝ็ฝฎ + height: + title: ้ซ˜ๅบฆ + heightReference: + title: ้ซ˜ๅบฆใฎๅŸบๆบ– + description: ๅœฐๅฝขใ‚’ๅŸบๆบ–ใจใ—ใŸ็›ธๅฏพ็š„ใช้ซ˜ๅบฆใซใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใงใฏWGS84ๆฅ•ๅ††ไฝ“ใ‹ใ‚‰ใฎ็ตถๅฏพ็š„ใช้ซ˜ๅบฆใซใชใ‚Šใพใ™ใ€‚ + choices: + none: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + clamp: ๅœฐ่กจใซๅ›บๅฎš + relative: ๅœฐ่กจใ‹ใ‚‰ใฎ้ซ˜ๅบฆ + heading: + title: ใƒ˜ใƒƒใƒ‡ใ‚ฃใƒณใ‚ฐ + pitch: + title: ใƒ”ใƒƒใƒ + roll: + title: ใƒญใƒผใƒซ + scale: + title: ๅคงใใ• + maximumScale: + title: ๆœ€ๅคงใฎๅคงใใ• + minimumPixelSize: + title: ๆœ€ๅฐใฎๅคงใใ• + animation: + title: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณ + description: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚ๅฟ…ใšใ‚ทใƒผใƒณใฎใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใฎ่จญๅฎšใ‚‚ๆœ‰ๅŠนใซใ—ใฆใใ ใ•ใ„ใ€‚ + appearance: + title: ๅค–่ฆณ + fields: + shadows: + title: ๅฝฑ + description: ๅฝฑใ‚’่กจ็คบใ—ใพใ™ใ€‚ๅฟ…ใšใ‚ทใƒผใƒณใฎๅฝฑใฎ่จญๅฎšใ‚‚ๆœ‰ๅŠนใซใ—ใฆใใ ใ•ใ„ใ€‚ใƒฌใƒณใƒ€ใƒชใƒณใ‚ฐใŒ้‡ใใชใ‚‹ใ“ใจใŒใ‚ใ‚Šใพใ™ใ€‚ + choices: + disabled: ็„กๅŠน + enabled: ๆœ‰ๅŠน + cast_only: ๅฝฑใ‚’่ฝใจใ™ใ ใ‘ + receive_only: ๅฝฑใ‚’่ฝใจใ•ใ‚Œใ‚‹ใ ใ‘ + colorBlend: + title: ่‰ฒใฎใƒ–ใƒฌใƒณใƒ‰ + choices: + none: ็„กๅŠน + highlight: ใƒใ‚คใƒฉใ‚คใƒˆ + replace: ็ฝฎใๆ›ใˆ + mix: ๆททๅˆ + color: + title: ่‰ฒ + colorBlendAmount: + title: ่‰ฒใฎๆททๅˆ้‡ + lightColor: + title: ็…งๆ˜Ž่‰ฒ + silhouette: + title: ใ‚ทใƒซใ‚จใƒƒใƒˆ + silhouetteColor: + title: ใ‚ทใƒซใ‚จใƒƒใƒˆ่‰ฒ + silhouetteSize: + title: ใ‚ทใƒซใ‚จใƒƒใƒˆใ‚ตใ‚คใ‚บ + tileset: + title: 3Dใ‚ฟใ‚คใƒซ + description: 3D Tilesๅฝขๅผใฎ3Dใ‚ฟใ‚คใƒซใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + propertySchema: + default: + title: 3Dใ‚ฟใ‚คใƒซ + fields: + tileset: + title: ใ‚ฟใ‚คใƒซใ‚ปใƒƒใƒˆURL + description: 3Dใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟๅ†…ใฎ tileset.json ใฎURL + styleUrl: + title: ใ‚นใ‚ฟใ‚คใƒซURL + description: 3D Tiles styles ใŒ่จ˜่ฟฐใ•ใ‚ŒใŸJSONใฎURLใ€‚ใ‚นใ‚ฟใ‚คใƒซใ‚’้ฉ็”จใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚่จญๅฎšใฏไปปๆ„ใงใ™ใ€‚ + shadows: + title: ๅฝฑ + description: ๅฝฑใ‚’่กจ็คบใ—ใพใ™ใ€‚ๅฟ…ใšใ‚ทใƒผใƒณใฎๅฝฑใฎ่จญๅฎšใ‚‚ๆœ‰ๅŠนใซใ—ใฆใใ ใ•ใ„ใ€‚ใƒฌใƒณใƒ€ใƒชใƒณใ‚ฐใŒ้‡ใใชใ‚‹ใ“ใจใŒใ‚ใ‚Šใพใ™ใ€‚ + choices: + disabled: ็„กๅŠน + enabled: ๆœ‰ๅŠน + cast_only: ๅฝฑใ‚’่ฝใจใ™ใ ใ‘ + receive_only: ๅฝฑใ‚’่ฝใจใ•ใ‚Œใ‚‹ใ ใ‘ resource: title: ใƒ•ใ‚กใ‚คใƒซ - description: ๅค–้ƒจใ‹ใ‚‰ใƒ‡ใƒผใ‚ฟ๏ผˆๅฝขๅผไฝ•๏ผŸ๏ผŸ๏ผŸ๏ผ‰ใ‚’ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ๅพŒใ€URLใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใงๅค–้ƒจใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใพใ‚Œใพใ™ใ€‚ + description: ๅค–้ƒจใ‹ใ‚‰ใƒ‡ใƒผใ‚ฟใ‚’ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ๅพŒใ€URLใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใงๅค–้ƒจใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใพใ‚Œใพใ™ใ€‚ propertySchema: default: title: ใƒ•ใ‚กใ‚คใƒซ @@ -451,9 +642,9 @@ extensions: cameraPosition: title: ใ‚ซใƒกใƒฉไฝ็ฝฎ cameraDuration: - title: ใ‚ซใƒกใƒฉ้–‹ๅง‹ๆ™‚้–“ - cameraDelay: title: ใ‚ซใƒกใƒฉ็งปๅ‹•ๆ™‚้–“ + cameraDelay: + title: ใ‚ซใƒกใƒฉๅพ…ๆฉŸๆ™‚้–“ storytelling: title: ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐ description: ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐๆฉŸ่ƒฝใ‚’ไฝฟใˆใฐใ€ใƒ‡ใƒผใ‚ฟ้–“ใฎ็น‹ใŒใ‚Šใ‚„ๆ™‚็ณปๅˆ—ใ‚’ใ‚‚ใจใซใ€้ †็•ชใซ่ณ‡ๆ–™ใ‚’้–ฒ่ฆงใ—ใฆใ‚‚ใ‚‰ใ†ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚ไฝฟ็”จใ™ใ‚‹ใซใฏใ€ๅณใƒ‘ใƒใƒซใ‹ใ‚‰ๅœฐ็ƒไธŠใฎใƒฌใ‚คใƒคใƒผใซ้ †็•ชใ‚’ไป˜ไธŽใ—ใพใ™ใ€‚ From 080ab974fd6ad0e59652433852def3619e62114f Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 13 Aug 2021 17:44:09 +0900 Subject: [PATCH 070/253] fix: tweak field names of model primitive --- pkg/builtin/manifest.yml | 1 + pkg/builtin/manifest_ja.yml | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 3a8d03f2c..2314122ae 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -891,6 +891,7 @@ extensions: - id: scale type: number title: Scale + description: Displays the 3D model equally multiplied by the set value. prefix: x defaultValue: 1 - id: maximumScale diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 4296faeeb..5e92f3c28 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -387,11 +387,12 @@ extensions: roll: title: ใƒญใƒผใƒซ scale: - title: ๅคงใใ• + title: ใ‚นใ‚ฑใƒผใƒซ + description: 3Dใƒขใƒ‡ใƒซใ‚’่จญๅฎšๅ€คๅˆ†็ญ‰ๅ€ใ—ใฆ่กจ็คบใ—ใพใ™ใ€‚ maximumScale: - title: ๆœ€ๅคงใฎๅคงใใ• + title: ๆœ€ๅคงใ‚นใ‚ฑใƒผใƒซ minimumPixelSize: - title: ๆœ€ๅฐใฎๅคงใใ• + title: ๆœ€ๅฐ่กจ็คบใ‚ตใ‚คใ‚บ animation: title: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณ description: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚ๅฟ…ใšใ‚ทใƒผใƒณใฎใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใฎ่จญๅฎšใ‚‚ๆœ‰ๅŠนใซใ—ใฆใใ ใ•ใ„ใ€‚ From 0f800fe832cad7f9cdab34c1d21800ddfd5c34bf Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Tue, 24 Aug 2021 09:59:20 +0300 Subject: [PATCH 071/253] make fetch url support unicode chars (#38) --- internal/infrastructure/google/fetch.go | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/internal/infrastructure/google/fetch.go b/internal/infrastructure/google/fetch.go index 9ea6af9b5..c0309836e 100644 --- a/internal/infrastructure/google/fetch.go +++ b/internal/infrastructure/google/fetch.go @@ -4,11 +4,27 @@ import ( "fmt" "io" "net/http" + "net/url" ) +func sheetURL(fileId string, sheetName string) string { + gurl := url.URL{ + Scheme: "https", + Host: "docs.google.com", + Path: fmt.Sprintf("spreadsheets/d/%s/gviz/tq", fileId), + } + + queryValues := gurl.Query() + queryValues.Set("tqx", "out:csv") + queryValues.Set("sheet", sheetName) + gurl.RawQuery = queryValues.Encode() + + return gurl.String() +} + func fetchCSV(token string, fileId string, sheetName string) (*io.ReadCloser, error) { - url := fmt.Sprintf("https://docs.google.com/spreadsheets/d/%s/gviz/tq?tqx=out:csv&sheet=%s", fileId, sheetName) - req, err := http.NewRequest("GET", url, nil) + u := sheetURL(fileId, sheetName) + req, err := http.NewRequest("GET", u, nil) if err != nil { return nil, err } From 2e4f52a0700367a3e05e5feec187ed6f315e716b Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Wed, 25 Aug 2021 10:47:33 +0300 Subject: [PATCH 072/253] feat: set scene theme (#35) * add theme schema to the system extension * add Japanese translation section * fix keys naming * set colors default values * re order theme widget * simplify the ids * add Jp translations * restructure the menu items --- pkg/builtin/manifest.yml | 48 +++++++++++++++++++++++++++++++++++++ pkg/builtin/manifest_ja.yml | 21 ++++++++++++++++ 2 files changed, 69 insertions(+) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 2314122ae..8c01643e4 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -100,6 +100,54 @@ extensions: title: Maximum zoom level min: 0 max: 30 + - id: theme + title: Publish Theme + description: Set your theme. + fields: + - id: themeType + type: string + title: Theme + defaultValue: dark + description: Select the theme. + choices: + - key: dark + label: Re:Earth Dark + - key: light + label: Re:Earth Light + - key: forest + label: Forest + - key: custom + label: Custom theme + - id: themeTextColor + type: string + ui: color + title: Text color + description: Select a color. + defaultValue: '#434343' + availableIf: + field: themeType + type: string + value: custom + - id: themeSelectColor + type: string + ui: color + title: Select color + description: Select a color. + defaultValue: '#C52C63' + availableIf: + field: themeType + type: string + value: custom + - id: themeBackgroundColor + type: string + ui: color + title: Background color + description: Select a color. + defaultValue: '#DFE5F0' + availableIf: + field: themeType + type: string + value: custom - id: atmosphere title: Atmospheric Conditions description: Set the look and feel of the Earth. diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 5e92f3c28..2b5b07037 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -98,6 +98,27 @@ extensions: trackingCode: title: ใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐID description: ใ“ใ“ใซใ‚ฐใƒผใ‚ฐใƒซใ‚ขใƒŠใƒชใƒ†ใ‚ฃใ‚ฏใ‚นใฎใƒˆใƒฉใƒƒใ‚ญใƒณใ‚ฐIDใ‚’่ฒผใ‚Šไป˜ใ‘ใ‚‹ใ“ใจใงใ€ๅ…ฌ้–‹ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใซใ“ใฎใ‚ณใƒผใƒ‰ใŒๅŸ‹ใ‚่พผใพใ‚Œใพใ™ใ€‚ + theme: + title: ๅ…ฌ้–‹็”จใƒ†ใƒผใƒž + description: ๅ…ฌ้–‹็”จใฎใƒ†ใƒผใƒžใ‚’่จญๅฎšใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + fields: + themeType: + title: ใƒ†ใƒผใƒž + description: ใƒ†ใƒผใƒžใฎ็จฎ้กžใ‚’้ธๆŠžใ—ใพใ™ใ€‚ใ‚ˆใ‚Š็ดฐใ‹ใใƒ†ใƒผใƒžใ‚’ๆŒ‡ๅฎšใ—ใŸใ„ๅ ดๅˆใฏใ‚ซใ‚นใ‚ฟใƒ ใ‚’้ธๆŠžใ—ใพใ™ใ€‚ + choices: + dark: Re:Earth ใƒ€ใƒผใ‚ฏ + light: Re:Earth ใƒฉใ‚คใƒˆ + forest: ๆฃฎ + custom: ใ‚ซใ‚นใ‚ฟใƒ  + themeTextColor: + title: ๆ–‡ๅญ—่‰ฒ + description: ๆ–‡ๅญ—่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚ + themeSelectColor: + title: ้ธๆŠž่‰ฒ + description: ้ธๆŠž่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚ + themeBackgroundColor: + title: ่ƒŒๆ™ฏ่‰ฒ + description: ่ƒŒๆ™ฏ่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚ infobox: title: ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น description: ้–ฒ่ฆง่€…ใŒๅœฐๅ›ณไธŠใฎใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ—ใŸๆ™‚ใซ่กจ็คบใ•ใ‚Œใ‚‹ใƒœใƒƒใ‚ฏใ‚นใงใ™ใ€‚ใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ€ๅ‹•็”ปใชใฉใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ From cc63cdb8c5571eb59e55100bede5d6810535db4d Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 27 Aug 2021 12:40:47 +0900 Subject: [PATCH 073/253] docs: add badges to readme [skip ci] --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 049ba89a4..e4cce25ad 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,7 @@ # reearth-backend + +[![main](https://github.com/reearth/reearth-backend/actions/workflows/main.yml/badge.svg)](https://github.com/reearth/reearth-backend/actions/workflows/main.yml) [![codecov](https://codecov.io/gh/reearth/reearth-backend/branch/main/graph/badge.svg?token=4UV79645UP)](https://codecov.io/gh/reearth/reearth-backend) [![Go Report Card](https://goreportcard.com/badge/github.com/reearth/reearth-backend)](https://goreportcard.com/report/github.com/reearth/reearth-backend) + This is the back-end repository of [Re:Earth](https://github.com/reearth/reearth). ## Bug reporting and requesting features From 2a1d4fd077486892d12159aea00a3b175cf3853a Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 31 Aug 2021 18:39:50 +0900 Subject: [PATCH 074/253] refactor: graphql adapter (#40) --- gqlgen.yml | 26 +- internal/{graphql => adapter/gql}/context.go | 3 +- .../{graphql => adapter/gql}/generated.go | 4881 +++++++---------- .../gql/gqldataloader/assetloader_gen.go} | 91 +- .../gql/gqldataloader}/dataloader.go | 27 +- .../gql/gqldataloader}/datasetloader_gen.go | 42 +- .../gqldataloader}/datasetschemaloader_gen.go | 42 +- .../gqldataloader}/layergrouploader_gen.go | 42 +- .../gql/gqldataloader}/layeritemloader_gen.go | 42 +- .../gql/gqldataloader}/layerloader_gen.go | 42 +- .../gql/gqldataloader}/pluginloader_gen.go | 42 +- .../gql/gqldataloader}/projectloader_gen.go | 42 +- .../gql/gqldataloader}/propertyloader_gen.go | 42 +- .../propertyschemaloader_gen.go | 42 +- .../gql/gqldataloader}/sceneloader_gen.go | 42 +- .../gql/gqldataloader}/teamloader_gen.go | 42 +- .../gql/gqldataloader}/userloader_gen.go | 42 +- .../{graphql => gql/gqlmodel}/convert.go | 32 +- .../gqlmodel}/convert_asset.go | 4 +- .../gqlmodel}/convert_dataset.go | 30 +- .../gqlmodel}/convert_layer.go | 44 +- .../gqlmodel}/convert_plugin.go | 12 +- .../gqlmodel}/convert_project.go | 10 +- .../gqlmodel}/convert_property.go | 106 +- .../gqlmodel}/convert_scene.go | 14 +- .../{graphql => gql/gqlmodel}/convert_team.go | 6 +- .../{graphql => gql/gqlmodel}/convert_user.go | 11 +- .../{graphql => gql/gqlmodel}/models.go | 4 +- .../{graphql => gql/gqlmodel}/models_gen.go | 54 +- .../{graphql => gql/gqlmodel}/scalar.go | 44 +- internal/adapter/gql/loader.go | 110 + internal/adapter/gql/loader_asset.go | 100 + internal/adapter/gql/loader_dataset.go | 225 + internal/adapter/gql/loader_layer.go | 229 + internal/adapter/gql/loader_plugin.go | 94 + internal/adapter/gql/loader_project.go | 111 + internal/adapter/gql/loader_property.go | 142 + internal/adapter/gql/loader_scene.go | 110 + internal/adapter/gql/loader_team.go | 87 + internal/adapter/gql/loader_user.go | 85 + internal/adapter/gql/resolver.go | 28 + internal/adapter/gql/resolver_asset.go | 21 + .../gql}/resolver_dataset.go | 25 +- .../gql}/resolver_dataset_schema.go | 23 +- internal/adapter/gql/resolver_layer.go | 459 ++ internal/adapter/gql/resolver_mutation.go | 7 + .../adapter/gql/resolver_mutation_asset.go | 36 + .../adapter/gql/resolver_mutation_dataset.go | 148 + .../adapter/gql/resolver_mutation_layer.go | 216 + .../adapter/gql/resolver_mutation_project.go | 96 + .../adapter/gql/resolver_mutation_property.go | 207 + .../adapter/gql/resolver_mutation_scene.go | 179 + .../adapter/gql/resolver_mutation_team.go | 79 + .../adapter/gql/resolver_mutation_user.go | 75 + .../gql}/resolver_plugin.go | 37 +- internal/adapter/gql/resolver_project.go | 33 + .../gql}/resolver_property.go | 139 +- .../gql}/resolver_property_schema.go | 33 +- .../gql}/resolver_property_test.go | 9 +- .../gql}/resolver_query.go | 147 +- internal/adapter/gql/resolver_scene.go | 130 + internal/adapter/gql/resolver_team.go | 42 + internal/adapter/gql/resolver_user.go | 28 + internal/{graphql => adapter/gql}/tracer.go | 2 +- internal/adapter/graphql/container.go | 92 - internal/adapter/graphql/controller_asset.go | 75 - .../adapter/graphql/controller_dataset.go | 223 - internal/adapter/graphql/controller_layer.go | 202 - internal/adapter/graphql/controller_plugin.go | 72 - .../adapter/graphql/controller_project.go | 112 - .../adapter/graphql/controller_property.go | 200 - internal/adapter/graphql/controller_scene.go | 130 - internal/adapter/graphql/controller_team.go | 82 - internal/adapter/graphql/controller_user.go | 103 - internal/adapter/graphql/loader_dataset.go | 50 - internal/adapter/graphql/loader_layer.go | 91 - internal/adapter/graphql/loader_plugin.go | 22 - internal/adapter/graphql/loader_project.go | 47 - internal/adapter/graphql/loader_property.go | 46 - internal/adapter/graphql/loader_scene.go | 56 - internal/adapter/graphql/loader_team.go | 1 - internal/adapter/graphql/loader_user.go | 33 - internal/app/app.go | 10 +- internal/app/auth.go | 8 +- internal/app/graphql.go | 46 +- internal/app/private.go | 6 +- internal/graphql/dataloader/context.go | 73 - internal/graphql/dataloader/loader.tmpl | 55 - .../dataloader/loader_dataset_schema_gen.go | 54 - internal/graphql/dataloader/loader_gen.go | 348 -- .../dataloader/loader_layer_group_gen.go | 54 - .../dataloader/loader_layer_item_gen.go | 54 - .../dataloader/loader_property_schema_gen.go | 54 - internal/graphql/resolver.go | 35 - internal/graphql/resolver_asset.go | 22 - internal/graphql/resolver_layer.go | 460 -- internal/graphql/resolver_mutation.go | 434 -- internal/graphql/resolver_project.go | 34 - internal/graphql/resolver_scene.go | 131 - internal/graphql/resolver_team.go | 43 - internal/graphql/resolver_user.go | 29 - internal/infrastructure/memory/asset.go | 19 +- internal/infrastructure/mongo/asset.go | 80 +- internal/usecase/interactor/asset.go | 26 +- internal/usecase/interactor/common.go | 18 + internal/usecase/interfaces/asset.go | 3 +- internal/usecase/interfaces/common.go | 13 + internal/usecase/repo/asset.go | 3 +- schema.graphql | 65 +- 109 files changed, 6016 insertions(+), 7193 deletions(-) rename internal/{graphql => adapter/gql}/context.go (97%) rename internal/{graphql => adapter/gql}/generated.go (85%) rename internal/{graphql/dataloader/scenelockloader_gen.go => adapter/gql/gqldataloader/assetloader_gen.go} (59%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/dataloader.go (61%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/datasetloader_gen.go (80%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/datasetschemaloader_gen.go (82%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/layergrouploader_gen.go (79%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/layeritemloader_gen.go (80%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/layerloader_gen.go (81%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/pluginloader_gen.go (80%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/projectloader_gen.go (80%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/propertyloader_gen.go (81%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/propertyschemaloader_gen.go (82%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/sceneloader_gen.go (81%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/teamloader_gen.go (81%) rename internal/{graphql/dataloader => adapter/gql/gqldataloader}/userloader_gen.go (81%) rename internal/adapter/{graphql => gql/gqlmodel}/convert.go (69%) rename internal/adapter/{graphql => gql/gqlmodel}/convert_asset.go (84%) rename internal/adapter/{graphql => gql/gqlmodel}/convert_dataset.go (73%) rename internal/adapter/{graphql => gql/gqlmodel}/convert_layer.go (72%) rename internal/adapter/{graphql => gql/gqlmodel}/convert_plugin.go (85%) rename internal/adapter/{graphql => gql/gqlmodel}/convert_project.go (85%) rename internal/adapter/{graphql => gql/gqlmodel}/convert_property.go (82%) rename internal/adapter/{graphql => gql/gqlmodel}/convert_scene.go (81%) rename internal/adapter/{graphql => gql/gqlmodel}/convert_team.go (90%) rename internal/adapter/{graphql => gql/gqlmodel}/convert_user.go (81%) rename internal/adapter/{graphql => gql/gqlmodel}/models.go (98%) rename internal/adapter/{graphql => gql/gqlmodel}/models_gen.go (95%) rename internal/adapter/{graphql => gql/gqlmodel}/scalar.go (74%) create mode 100644 internal/adapter/gql/loader.go create mode 100644 internal/adapter/gql/loader_asset.go create mode 100644 internal/adapter/gql/loader_dataset.go create mode 100644 internal/adapter/gql/loader_layer.go create mode 100644 internal/adapter/gql/loader_plugin.go create mode 100644 internal/adapter/gql/loader_project.go create mode 100644 internal/adapter/gql/loader_property.go create mode 100644 internal/adapter/gql/loader_scene.go create mode 100644 internal/adapter/gql/loader_team.go create mode 100644 internal/adapter/gql/loader_user.go create mode 100644 internal/adapter/gql/resolver.go create mode 100644 internal/adapter/gql/resolver_asset.go rename internal/{graphql => adapter/gql}/resolver_dataset.go (51%) rename internal/{graphql => adapter/gql}/resolver_dataset_schema.go (56%) create mode 100644 internal/adapter/gql/resolver_layer.go create mode 100644 internal/adapter/gql/resolver_mutation.go create mode 100644 internal/adapter/gql/resolver_mutation_asset.go create mode 100644 internal/adapter/gql/resolver_mutation_dataset.go create mode 100644 internal/adapter/gql/resolver_mutation_layer.go create mode 100644 internal/adapter/gql/resolver_mutation_project.go create mode 100644 internal/adapter/gql/resolver_mutation_property.go create mode 100644 internal/adapter/gql/resolver_mutation_scene.go create mode 100644 internal/adapter/gql/resolver_mutation_team.go create mode 100644 internal/adapter/gql/resolver_mutation_user.go rename internal/{graphql => adapter/gql}/resolver_plugin.go (60%) create mode 100644 internal/adapter/gql/resolver_project.go rename internal/{graphql => adapter/gql}/resolver_property.go (58%) rename internal/{graphql => adapter/gql}/resolver_property_schema.go (70%) rename internal/{graphql => adapter/gql}/resolver_property_test.go (77%) rename internal/{graphql => adapter/gql}/resolver_query.go (62%) create mode 100644 internal/adapter/gql/resolver_scene.go create mode 100644 internal/adapter/gql/resolver_team.go create mode 100644 internal/adapter/gql/resolver_user.go rename internal/{graphql => adapter/gql}/tracer.go (99%) delete mode 100644 internal/adapter/graphql/container.go delete mode 100644 internal/adapter/graphql/controller_asset.go delete mode 100644 internal/adapter/graphql/controller_dataset.go delete mode 100644 internal/adapter/graphql/controller_layer.go delete mode 100644 internal/adapter/graphql/controller_plugin.go delete mode 100644 internal/adapter/graphql/controller_project.go delete mode 100644 internal/adapter/graphql/controller_property.go delete mode 100644 internal/adapter/graphql/controller_scene.go delete mode 100644 internal/adapter/graphql/controller_team.go delete mode 100644 internal/adapter/graphql/controller_user.go delete mode 100644 internal/adapter/graphql/loader_dataset.go delete mode 100644 internal/adapter/graphql/loader_layer.go delete mode 100644 internal/adapter/graphql/loader_plugin.go delete mode 100644 internal/adapter/graphql/loader_project.go delete mode 100644 internal/adapter/graphql/loader_property.go delete mode 100644 internal/adapter/graphql/loader_scene.go delete mode 100644 internal/adapter/graphql/loader_team.go delete mode 100644 internal/adapter/graphql/loader_user.go delete mode 100644 internal/graphql/dataloader/context.go delete mode 100644 internal/graphql/dataloader/loader.tmpl delete mode 100644 internal/graphql/dataloader/loader_dataset_schema_gen.go delete mode 100644 internal/graphql/dataloader/loader_gen.go delete mode 100644 internal/graphql/dataloader/loader_layer_group_gen.go delete mode 100644 internal/graphql/dataloader/loader_layer_item_gen.go delete mode 100644 internal/graphql/dataloader/loader_property_schema_gen.go delete mode 100644 internal/graphql/resolver.go delete mode 100644 internal/graphql/resolver_asset.go delete mode 100644 internal/graphql/resolver_layer.go delete mode 100644 internal/graphql/resolver_mutation.go delete mode 100644 internal/graphql/resolver_project.go delete mode 100644 internal/graphql/resolver_scene.go delete mode 100644 internal/graphql/resolver_team.go delete mode 100644 internal/graphql/resolver_user.go diff --git a/gqlgen.yml b/gqlgen.yml index f58884a94..f13110be9 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -6,11 +6,11 @@ schema: - schema.graphql exec: - filename: internal/graphql/generated.go + filename: internal/adapter/gql/generated.go model: - filename: internal/adapter/graphql/models_gen.go + filename: internal/adapter/gql/gqlmodel/models_gen.go resolver: - filename: internal/graphql/resolver.go + filename: internal/adapter/gql/resolver.go type: Resolver models: DateTime: @@ -18,22 +18,22 @@ models: FileSize: model: github.com/99designs/gqlgen/graphql.Int64 ID: - model: github.com/reearth/reearth-backend/internal/adapter/graphql.ID + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID Cursor: - model: github.com/reearth/reearth-backend/internal/adapter/graphql.Cursor + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Cursor URL: - model: github.com/reearth/reearth-backend/internal/adapter/graphql.URL + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.URL PluginID: - model: github.com/reearth/reearth-backend/internal/adapter/graphql.PluginID + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PluginID PluginExtensionID: - model: github.com/reearth/reearth-backend/internal/adapter/graphql.PluginExtensionID + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PluginExtensionID PropertySchemaID: - model: github.com/reearth/reearth-backend/internal/adapter/graphql.PropertySchemaID + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PropertySchemaID PropertySchemaFieldID: - model: github.com/reearth/reearth-backend/internal/adapter/graphql.PropertySchemaFieldID + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PropertySchemaFieldID DatasetSchemaFieldID: - model: github.com/reearth/reearth-backend/internal/adapter/graphql.DatasetSchemaFieldID + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.DatasetSchemaFieldID TranslatedString: - model: github.com/reearth/reearth-backend/internal/adapter/graphql.Map + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Map Lang: - model: github.com/reearth/reearth-backend/internal/adapter/graphql.Lang + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Lang diff --git a/internal/graphql/context.go b/internal/adapter/gql/context.go similarity index 97% rename from internal/graphql/context.go rename to internal/adapter/gql/context.go index 768647a0e..df46435bd 100644 --- a/internal/graphql/context.go +++ b/internal/adapter/gql/context.go @@ -1,4 +1,4 @@ -package graphql +package gql import ( "context" @@ -7,7 +7,6 @@ import ( "github.com/reearth/reearth-backend/pkg/user" ) -// ContextKey _ type ContextKey string const ( diff --git a/internal/graphql/generated.go b/internal/adapter/gql/generated.go similarity index 85% rename from internal/graphql/generated.go rename to internal/adapter/gql/generated.go index 2816225fa..87334227e 100644 --- a/internal/graphql/generated.go +++ b/internal/adapter/gql/generated.go @@ -1,6 +1,6 @@ // Code generated by github.com/99designs/gqlgen, DO NOT EDIT. -package graphql +package gql import ( "bytes" @@ -15,7 +15,7 @@ import ( "github.com/99designs/gqlgen/graphql" "github.com/99designs/gqlgen/graphql/introspection" - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/id" gqlparser "github.com/vektah/gqlparser/v2" @@ -442,62 +442,58 @@ type ComplexityRoot struct { } Mutation struct { - AddDatasetSchema func(childComplexity int, input graphql1.AddDatasetSchemaInput) int - AddDynamicDataset func(childComplexity int, input graphql1.AddDynamicDatasetInput) int - AddDynamicDatasetSchema func(childComplexity int, input graphql1.AddDynamicDatasetSchemaInput) int - AddInfoboxField func(childComplexity int, input graphql1.AddInfoboxFieldInput) int - AddLayerGroup func(childComplexity int, input graphql1.AddLayerGroupInput) int - AddLayerItem func(childComplexity int, input graphql1.AddLayerItemInput) int - AddMemberToTeam func(childComplexity int, input graphql1.AddMemberToTeamInput) int - AddPropertyItem func(childComplexity int, input graphql1.AddPropertyItemInput) int - AddWidget func(childComplexity int, input graphql1.AddWidgetInput) int - CreateAsset func(childComplexity int, input graphql1.CreateAssetInput) int - CreateInfobox func(childComplexity int, input graphql1.CreateInfoboxInput) int - CreateProject func(childComplexity int, input graphql1.CreateProjectInput) int - CreateScene func(childComplexity int, input graphql1.CreateSceneInput) int - CreateTeam func(childComplexity int, input graphql1.CreateTeamInput) int - DeleteMe func(childComplexity int, input graphql1.DeleteMeInput) int - DeleteProject func(childComplexity int, input graphql1.DeleteProjectInput) int - DeleteTeam func(childComplexity int, input graphql1.DeleteTeamInput) int - ImportDataset func(childComplexity int, input graphql1.ImportDatasetInput) int - ImportDatasetFromGoogleSheet func(childComplexity int, input graphql1.ImportDatasetFromGoogleSheetInput) int - ImportLayer func(childComplexity int, input graphql1.ImportLayerInput) int - InstallPlugin func(childComplexity int, input graphql1.InstallPluginInput) int - LinkDatasetToPropertyValue func(childComplexity int, input graphql1.LinkDatasetToPropertyValueInput) int - MoveInfoboxField func(childComplexity int, input graphql1.MoveInfoboxFieldInput) int - MoveLayer func(childComplexity int, input graphql1.MoveLayerInput) int - MovePropertyItem func(childComplexity int, input graphql1.MovePropertyItemInput) int - PublishProject func(childComplexity int, input graphql1.PublishProjectInput) int - RemoveAsset func(childComplexity int, input graphql1.RemoveAssetInput) int - RemoveDatasetSchema func(childComplexity int, input graphql1.RemoveDatasetSchemaInput) int - RemoveInfobox func(childComplexity int, input graphql1.RemoveInfoboxInput) int - RemoveInfoboxField func(childComplexity int, input graphql1.RemoveInfoboxFieldInput) int - RemoveLayer func(childComplexity int, input graphql1.RemoveLayerInput) int - RemoveMemberFromTeam func(childComplexity int, input graphql1.RemoveMemberFromTeamInput) int - RemoveMyAuth func(childComplexity int, input graphql1.RemoveMyAuthInput) int - RemovePropertyField func(childComplexity int, input graphql1.RemovePropertyFieldInput) int - RemovePropertyItem func(childComplexity int, input graphql1.RemovePropertyItemInput) int - RemoveWidget func(childComplexity int, input graphql1.RemoveWidgetInput) int - Signup func(childComplexity int, input graphql1.SignupInput) int - SyncDataset func(childComplexity int, input graphql1.SyncDatasetInput) int - UninstallPlugin func(childComplexity int, input graphql1.UninstallPluginInput) int - UnlinkPropertyValue func(childComplexity int, input graphql1.UnlinkPropertyValueInput) int - UpdateDatasetSchema func(childComplexity int, input graphql1.UpdateDatasetSchemaInput) int - UpdateLayer func(childComplexity int, input graphql1.UpdateLayerInput) int - UpdateMe func(childComplexity int, input graphql1.UpdateMeInput) int - UpdateMemberOfTeam func(childComplexity int, input graphql1.UpdateMemberOfTeamInput) int - UpdateProject func(childComplexity int, input graphql1.UpdateProjectInput) int - UpdatePropertyItems func(childComplexity int, input graphql1.UpdatePropertyItemInput) int - UpdatePropertyValue func(childComplexity int, input graphql1.UpdatePropertyValueInput) int - UpdatePropertyValueCamera func(childComplexity int, input graphql1.UpdatePropertyValueCameraInput) int - UpdatePropertyValueLatLng func(childComplexity int, input graphql1.UpdatePropertyValueLatLngInput) int - UpdatePropertyValueLatLngHeight func(childComplexity int, input graphql1.UpdatePropertyValueLatLngHeightInput) int - UpdatePropertyValueTypography func(childComplexity int, input graphql1.UpdatePropertyValueTypographyInput) int - UpdateTeam func(childComplexity int, input graphql1.UpdateTeamInput) int - UpdateWidget func(childComplexity int, input graphql1.UpdateWidgetInput) int - UpgradePlugin func(childComplexity int, input graphql1.UpgradePluginInput) int - UploadFileToProperty func(childComplexity int, input graphql1.UploadFileToPropertyInput) int - UploadPlugin func(childComplexity int, input graphql1.UploadPluginInput) int + AddDatasetSchema func(childComplexity int, input gqlmodel.AddDatasetSchemaInput) int + AddDynamicDataset func(childComplexity int, input gqlmodel.AddDynamicDatasetInput) int + AddDynamicDatasetSchema func(childComplexity int, input gqlmodel.AddDynamicDatasetSchemaInput) int + AddInfoboxField func(childComplexity int, input gqlmodel.AddInfoboxFieldInput) int + AddLayerGroup func(childComplexity int, input gqlmodel.AddLayerGroupInput) int + AddLayerItem func(childComplexity int, input gqlmodel.AddLayerItemInput) int + AddMemberToTeam func(childComplexity int, input gqlmodel.AddMemberToTeamInput) int + AddPropertyItem func(childComplexity int, input gqlmodel.AddPropertyItemInput) int + AddWidget func(childComplexity int, input gqlmodel.AddWidgetInput) int + CreateAsset func(childComplexity int, input gqlmodel.CreateAssetInput) int + CreateInfobox func(childComplexity int, input gqlmodel.CreateInfoboxInput) int + CreateProject func(childComplexity int, input gqlmodel.CreateProjectInput) int + CreateScene func(childComplexity int, input gqlmodel.CreateSceneInput) int + CreateTeam func(childComplexity int, input gqlmodel.CreateTeamInput) int + DeleteMe func(childComplexity int, input gqlmodel.DeleteMeInput) int + DeleteProject func(childComplexity int, input gqlmodel.DeleteProjectInput) int + DeleteTeam func(childComplexity int, input gqlmodel.DeleteTeamInput) int + ImportDataset func(childComplexity int, input gqlmodel.ImportDatasetInput) int + ImportDatasetFromGoogleSheet func(childComplexity int, input gqlmodel.ImportDatasetFromGoogleSheetInput) int + ImportLayer func(childComplexity int, input gqlmodel.ImportLayerInput) int + InstallPlugin func(childComplexity int, input gqlmodel.InstallPluginInput) int + LinkDatasetToPropertyValue func(childComplexity int, input gqlmodel.LinkDatasetToPropertyValueInput) int + MoveInfoboxField func(childComplexity int, input gqlmodel.MoveInfoboxFieldInput) int + MoveLayer func(childComplexity int, input gqlmodel.MoveLayerInput) int + MovePropertyItem func(childComplexity int, input gqlmodel.MovePropertyItemInput) int + PublishProject func(childComplexity int, input gqlmodel.PublishProjectInput) int + RemoveAsset func(childComplexity int, input gqlmodel.RemoveAssetInput) int + RemoveDatasetSchema func(childComplexity int, input gqlmodel.RemoveDatasetSchemaInput) int + RemoveInfobox func(childComplexity int, input gqlmodel.RemoveInfoboxInput) int + RemoveInfoboxField func(childComplexity int, input gqlmodel.RemoveInfoboxFieldInput) int + RemoveLayer func(childComplexity int, input gqlmodel.RemoveLayerInput) int + RemoveMemberFromTeam func(childComplexity int, input gqlmodel.RemoveMemberFromTeamInput) int + RemoveMyAuth func(childComplexity int, input gqlmodel.RemoveMyAuthInput) int + RemovePropertyField func(childComplexity int, input gqlmodel.RemovePropertyFieldInput) int + RemovePropertyItem func(childComplexity int, input gqlmodel.RemovePropertyItemInput) int + RemoveWidget func(childComplexity int, input gqlmodel.RemoveWidgetInput) int + Signup func(childComplexity int, input gqlmodel.SignupInput) int + SyncDataset func(childComplexity int, input gqlmodel.SyncDatasetInput) int + UninstallPlugin func(childComplexity int, input gqlmodel.UninstallPluginInput) int + UnlinkPropertyValue func(childComplexity int, input gqlmodel.UnlinkPropertyValueInput) int + UpdateDatasetSchema func(childComplexity int, input gqlmodel.UpdateDatasetSchemaInput) int + UpdateLayer func(childComplexity int, input gqlmodel.UpdateLayerInput) int + UpdateMe func(childComplexity int, input gqlmodel.UpdateMeInput) int + UpdateMemberOfTeam func(childComplexity int, input gqlmodel.UpdateMemberOfTeamInput) int + UpdateProject func(childComplexity int, input gqlmodel.UpdateProjectInput) int + UpdatePropertyItems func(childComplexity int, input gqlmodel.UpdatePropertyItemInput) int + UpdatePropertyValue func(childComplexity int, input gqlmodel.UpdatePropertyValueInput) int + UpdateTeam func(childComplexity int, input gqlmodel.UpdateTeamInput) int + UpdateWidget func(childComplexity int, input gqlmodel.UpdateWidgetInput) int + UpgradePlugin func(childComplexity int, input gqlmodel.UpgradePluginInput) int + UploadFileToProperty func(childComplexity int, input gqlmodel.UploadFileToPropertyInput) int + UploadPlugin func(childComplexity int, input gqlmodel.UploadPluginInput) int } PageInfo struct { @@ -731,8 +727,8 @@ type ComplexityRoot struct { InstallablePlugins func(childComplexity int) int Layer func(childComplexity int, id id.ID) int Me func(childComplexity int) int - Node func(childComplexity int, id id.ID, typeArg graphql1.NodeType) int - Nodes func(childComplexity int, id []*id.ID, typeArg graphql1.NodeType) int + Node func(childComplexity int, id id.ID, typeArg gqlmodel.NodeType) int + Nodes func(childComplexity int, id []*id.ID, typeArg gqlmodel.NodeType) int Plugin func(childComplexity int, id id.PluginID) int Plugins func(childComplexity int, id []*id.PluginID) int Projects func(childComplexity int, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int @@ -918,262 +914,258 @@ type ComplexityRoot struct { } type AssetResolver interface { - Team(ctx context.Context, obj *graphql1.Asset) (*graphql1.Team, error) + Team(ctx context.Context, obj *gqlmodel.Asset) (*gqlmodel.Team, error) } type DatasetResolver interface { - Schema(ctx context.Context, obj *graphql1.Dataset) (*graphql1.DatasetSchema, error) - Name(ctx context.Context, obj *graphql1.Dataset) (*string, error) + Schema(ctx context.Context, obj *gqlmodel.Dataset) (*gqlmodel.DatasetSchema, error) + Name(ctx context.Context, obj *gqlmodel.Dataset) (*string, error) } type DatasetFieldResolver interface { - Schema(ctx context.Context, obj *graphql1.DatasetField) (*graphql1.DatasetSchema, error) - Field(ctx context.Context, obj *graphql1.DatasetField) (*graphql1.DatasetSchemaField, error) - ValueRef(ctx context.Context, obj *graphql1.DatasetField) (*graphql1.Dataset, error) + Schema(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.DatasetSchema, error) + Field(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.DatasetSchemaField, error) + ValueRef(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.Dataset, error) } type DatasetSchemaResolver interface { - Datasets(ctx context.Context, obj *graphql1.DatasetSchema, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetConnection, error) - Scene(ctx context.Context, obj *graphql1.DatasetSchema) (*graphql1.Scene, error) - RepresentativeField(ctx context.Context, obj *graphql1.DatasetSchema) (*graphql1.DatasetSchemaField, error) + Datasets(ctx context.Context, obj *gqlmodel.DatasetSchema, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) + Scene(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.Scene, error) + RepresentativeField(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.DatasetSchemaField, error) } type DatasetSchemaFieldResolver interface { - Schema(ctx context.Context, obj *graphql1.DatasetSchemaField) (*graphql1.DatasetSchema, error) - Ref(ctx context.Context, obj *graphql1.DatasetSchemaField) (*graphql1.DatasetSchema, error) + Schema(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) + Ref(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) } type InfoboxResolver interface { - Layer(ctx context.Context, obj *graphql1.Infobox) (graphql1.Layer, error) - Property(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Property, error) - LinkedDataset(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Dataset, error) - Merged(ctx context.Context, obj *graphql1.Infobox) (*graphql1.MergedInfobox, error) - Scene(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Scene, error) + Layer(ctx context.Context, obj *gqlmodel.Infobox) (gqlmodel.Layer, error) + Property(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Property, error) + LinkedDataset(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Dataset, error) + Merged(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.MergedInfobox, error) + Scene(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Scene, error) } type InfoboxFieldResolver interface { - Layer(ctx context.Context, obj *graphql1.InfoboxField) (graphql1.Layer, error) - Infobox(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Infobox, error) - Property(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Property, error) - Plugin(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Plugin, error) - Extension(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.PluginExtension, error) - LinkedDataset(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Dataset, error) - Merged(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.MergedInfoboxField, error) - Scene(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Scene, error) - ScenePlugin(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.ScenePlugin, error) + Layer(ctx context.Context, obj *gqlmodel.InfoboxField) (gqlmodel.Layer, error) + Infobox(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Infobox, error) + Property(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Property, error) + Plugin(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Plugin, error) + Extension(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.PluginExtension, error) + LinkedDataset(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Dataset, error) + Merged(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.MergedInfoboxField, error) + Scene(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Scene, error) + ScenePlugin(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.ScenePlugin, error) } type LayerGroupResolver interface { - Parent(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.LayerGroup, error) - Property(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.Property, error) - Plugin(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.Plugin, error) - Extension(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.PluginExtension, error) - LinkedDatasetSchema(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.DatasetSchema, error) - Layers(ctx context.Context, obj *graphql1.LayerGroup) ([]graphql1.Layer, error) - Scene(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.Scene, error) - ScenePlugin(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.ScenePlugin, error) + Parent(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.LayerGroup, error) + Property(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Property, error) + Plugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Plugin, error) + Extension(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.PluginExtension, error) + LinkedDatasetSchema(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.DatasetSchema, error) + Layers(ctx context.Context, obj *gqlmodel.LayerGroup) ([]gqlmodel.Layer, error) + Scene(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Scene, error) + ScenePlugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.ScenePlugin, error) } type LayerItemResolver interface { - Parent(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.LayerGroup, error) - Property(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Property, error) - Plugin(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Plugin, error) - Extension(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.PluginExtension, error) - LinkedDataset(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Dataset, error) - Merged(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.MergedLayer, error) - Scene(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Scene, error) - ScenePlugin(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.ScenePlugin, error) + Parent(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.LayerGroup, error) + Property(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Property, error) + Plugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Plugin, error) + Extension(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.PluginExtension, error) + LinkedDataset(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Dataset, error) + Merged(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.MergedLayer, error) + Scene(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Scene, error) + ScenePlugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.ScenePlugin, error) } type MergedInfoboxResolver interface { - Scene(ctx context.Context, obj *graphql1.MergedInfobox) (*graphql1.Scene, error) + Scene(ctx context.Context, obj *gqlmodel.MergedInfobox) (*gqlmodel.Scene, error) } type MergedInfoboxFieldResolver interface { - Plugin(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.Plugin, error) - Extension(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.PluginExtension, error) - Scene(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.Scene, error) - ScenePlugin(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.ScenePlugin, error) + Plugin(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.Plugin, error) + Extension(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.PluginExtension, error) + Scene(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.Scene, error) + ScenePlugin(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.ScenePlugin, error) } type MergedLayerResolver interface { - Original(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.LayerItem, error) - Parent(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.LayerGroup, error) - Scene(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.Scene, error) + Original(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerItem, error) + Parent(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerGroup, error) + Scene(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.Scene, error) } type MergedPropertyResolver interface { - Original(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Property, error) - Parent(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Property, error) - Schema(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.PropertySchema, error) - LinkedDataset(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Dataset, error) - Groups(ctx context.Context, obj *graphql1.MergedProperty) ([]*graphql1.MergedPropertyGroup, error) + Original(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Property, error) + Parent(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Property, error) + Schema(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.PropertySchema, error) + LinkedDataset(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Dataset, error) + Groups(ctx context.Context, obj *gqlmodel.MergedProperty) ([]*gqlmodel.MergedPropertyGroup, error) } type MergedPropertyFieldResolver interface { - Schema(ctx context.Context, obj *graphql1.MergedPropertyField) (*graphql1.PropertySchema, error) - Field(ctx context.Context, obj *graphql1.MergedPropertyField) (*graphql1.PropertySchemaField, error) - ActualValue(ctx context.Context, obj *graphql1.MergedPropertyField) (interface{}, error) + Schema(ctx context.Context, obj *gqlmodel.MergedPropertyField) (*gqlmodel.PropertySchema, error) + Field(ctx context.Context, obj *gqlmodel.MergedPropertyField) (*gqlmodel.PropertySchemaField, error) + ActualValue(ctx context.Context, obj *gqlmodel.MergedPropertyField) (interface{}, error) } type MergedPropertyGroupResolver interface { - OriginalProperty(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.Property, error) - ParentProperty(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.Property, error) - Original(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.PropertyGroup, error) - Parent(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.PropertyGroup, error) - Schema(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.PropertySchema, error) - LinkedDataset(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.Dataset, error) + OriginalProperty(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Property, error) + ParentProperty(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Property, error) + Original(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertyGroup, error) + Parent(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertyGroup, error) + Schema(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertySchema, error) + LinkedDataset(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Dataset, error) } type MutationResolver interface { - CreateAsset(ctx context.Context, input graphql1.CreateAssetInput) (*graphql1.CreateAssetPayload, error) - RemoveAsset(ctx context.Context, input graphql1.RemoveAssetInput) (*graphql1.RemoveAssetPayload, error) - Signup(ctx context.Context, input graphql1.SignupInput) (*graphql1.SignupPayload, error) - UpdateMe(ctx context.Context, input graphql1.UpdateMeInput) (*graphql1.UpdateMePayload, error) - RemoveMyAuth(ctx context.Context, input graphql1.RemoveMyAuthInput) (*graphql1.UpdateMePayload, error) - DeleteMe(ctx context.Context, input graphql1.DeleteMeInput) (*graphql1.DeleteMePayload, error) - CreateTeam(ctx context.Context, input graphql1.CreateTeamInput) (*graphql1.CreateTeamPayload, error) - DeleteTeam(ctx context.Context, input graphql1.DeleteTeamInput) (*graphql1.DeleteTeamPayload, error) - UpdateTeam(ctx context.Context, input graphql1.UpdateTeamInput) (*graphql1.UpdateTeamPayload, error) - AddMemberToTeam(ctx context.Context, input graphql1.AddMemberToTeamInput) (*graphql1.AddMemberToTeamPayload, error) - RemoveMemberFromTeam(ctx context.Context, input graphql1.RemoveMemberFromTeamInput) (*graphql1.RemoveMemberFromTeamPayload, error) - UpdateMemberOfTeam(ctx context.Context, input graphql1.UpdateMemberOfTeamInput) (*graphql1.UpdateMemberOfTeamPayload, error) - CreateProject(ctx context.Context, input graphql1.CreateProjectInput) (*graphql1.ProjectPayload, error) - UpdateProject(ctx context.Context, input graphql1.UpdateProjectInput) (*graphql1.ProjectPayload, error) - PublishProject(ctx context.Context, input graphql1.PublishProjectInput) (*graphql1.ProjectPayload, error) - DeleteProject(ctx context.Context, input graphql1.DeleteProjectInput) (*graphql1.DeleteProjectPayload, error) - UploadPlugin(ctx context.Context, input graphql1.UploadPluginInput) (*graphql1.UploadPluginPayload, error) - CreateScene(ctx context.Context, input graphql1.CreateSceneInput) (*graphql1.CreateScenePayload, error) - AddWidget(ctx context.Context, input graphql1.AddWidgetInput) (*graphql1.AddWidgetPayload, error) - UpdateWidget(ctx context.Context, input graphql1.UpdateWidgetInput) (*graphql1.UpdateWidgetPayload, error) - RemoveWidget(ctx context.Context, input graphql1.RemoveWidgetInput) (*graphql1.RemoveWidgetPayload, error) - InstallPlugin(ctx context.Context, input graphql1.InstallPluginInput) (*graphql1.InstallPluginPayload, error) - UninstallPlugin(ctx context.Context, input graphql1.UninstallPluginInput) (*graphql1.UninstallPluginPayload, error) - UpgradePlugin(ctx context.Context, input graphql1.UpgradePluginInput) (*graphql1.UpgradePluginPayload, error) - UpdateDatasetSchema(ctx context.Context, input graphql1.UpdateDatasetSchemaInput) (*graphql1.UpdateDatasetSchemaPayload, error) - SyncDataset(ctx context.Context, input graphql1.SyncDatasetInput) (*graphql1.SyncDatasetPayload, error) - AddDynamicDatasetSchema(ctx context.Context, input graphql1.AddDynamicDatasetSchemaInput) (*graphql1.AddDynamicDatasetSchemaPayload, error) - AddDynamicDataset(ctx context.Context, input graphql1.AddDynamicDatasetInput) (*graphql1.AddDynamicDatasetPayload, error) - RemoveDatasetSchema(ctx context.Context, input graphql1.RemoveDatasetSchemaInput) (*graphql1.RemoveDatasetSchemaPayload, error) - ImportDataset(ctx context.Context, input graphql1.ImportDatasetInput) (*graphql1.ImportDatasetPayload, error) - ImportDatasetFromGoogleSheet(ctx context.Context, input graphql1.ImportDatasetFromGoogleSheetInput) (*graphql1.ImportDatasetPayload, error) - AddDatasetSchema(ctx context.Context, input graphql1.AddDatasetSchemaInput) (*graphql1.AddDatasetSchemaPayload, error) - UpdatePropertyValue(ctx context.Context, input graphql1.UpdatePropertyValueInput) (*graphql1.PropertyFieldPayload, error) - UpdatePropertyValueLatLng(ctx context.Context, input graphql1.UpdatePropertyValueLatLngInput) (*graphql1.PropertyFieldPayload, error) - UpdatePropertyValueLatLngHeight(ctx context.Context, input graphql1.UpdatePropertyValueLatLngHeightInput) (*graphql1.PropertyFieldPayload, error) - UpdatePropertyValueCamera(ctx context.Context, input graphql1.UpdatePropertyValueCameraInput) (*graphql1.PropertyFieldPayload, error) - UpdatePropertyValueTypography(ctx context.Context, input graphql1.UpdatePropertyValueTypographyInput) (*graphql1.PropertyFieldPayload, error) - RemovePropertyField(ctx context.Context, input graphql1.RemovePropertyFieldInput) (*graphql1.PropertyFieldPayload, error) - UploadFileToProperty(ctx context.Context, input graphql1.UploadFileToPropertyInput) (*graphql1.PropertyFieldPayload, error) - LinkDatasetToPropertyValue(ctx context.Context, input graphql1.LinkDatasetToPropertyValueInput) (*graphql1.PropertyFieldPayload, error) - UnlinkPropertyValue(ctx context.Context, input graphql1.UnlinkPropertyValueInput) (*graphql1.PropertyFieldPayload, error) - AddPropertyItem(ctx context.Context, input graphql1.AddPropertyItemInput) (*graphql1.PropertyItemPayload, error) - MovePropertyItem(ctx context.Context, input graphql1.MovePropertyItemInput) (*graphql1.PropertyItemPayload, error) - RemovePropertyItem(ctx context.Context, input graphql1.RemovePropertyItemInput) (*graphql1.PropertyItemPayload, error) - UpdatePropertyItems(ctx context.Context, input graphql1.UpdatePropertyItemInput) (*graphql1.PropertyItemPayload, error) - AddLayerItem(ctx context.Context, input graphql1.AddLayerItemInput) (*graphql1.AddLayerItemPayload, error) - AddLayerGroup(ctx context.Context, input graphql1.AddLayerGroupInput) (*graphql1.AddLayerGroupPayload, error) - RemoveLayer(ctx context.Context, input graphql1.RemoveLayerInput) (*graphql1.RemoveLayerPayload, error) - UpdateLayer(ctx context.Context, input graphql1.UpdateLayerInput) (*graphql1.UpdateLayerPayload, error) - MoveLayer(ctx context.Context, input graphql1.MoveLayerInput) (*graphql1.MoveLayerPayload, error) - CreateInfobox(ctx context.Context, input graphql1.CreateInfoboxInput) (*graphql1.CreateInfoboxPayload, error) - RemoveInfobox(ctx context.Context, input graphql1.RemoveInfoboxInput) (*graphql1.RemoveInfoboxPayload, error) - AddInfoboxField(ctx context.Context, input graphql1.AddInfoboxFieldInput) (*graphql1.AddInfoboxFieldPayload, error) - MoveInfoboxField(ctx context.Context, input graphql1.MoveInfoboxFieldInput) (*graphql1.MoveInfoboxFieldPayload, error) - RemoveInfoboxField(ctx context.Context, input graphql1.RemoveInfoboxFieldInput) (*graphql1.RemoveInfoboxFieldPayload, error) - ImportLayer(ctx context.Context, input graphql1.ImportLayerInput) (*graphql1.ImportLayerPayload, error) + CreateAsset(ctx context.Context, input gqlmodel.CreateAssetInput) (*gqlmodel.CreateAssetPayload, error) + RemoveAsset(ctx context.Context, input gqlmodel.RemoveAssetInput) (*gqlmodel.RemoveAssetPayload, error) + Signup(ctx context.Context, input gqlmodel.SignupInput) (*gqlmodel.SignupPayload, error) + UpdateMe(ctx context.Context, input gqlmodel.UpdateMeInput) (*gqlmodel.UpdateMePayload, error) + RemoveMyAuth(ctx context.Context, input gqlmodel.RemoveMyAuthInput) (*gqlmodel.UpdateMePayload, error) + DeleteMe(ctx context.Context, input gqlmodel.DeleteMeInput) (*gqlmodel.DeleteMePayload, error) + CreateTeam(ctx context.Context, input gqlmodel.CreateTeamInput) (*gqlmodel.CreateTeamPayload, error) + DeleteTeam(ctx context.Context, input gqlmodel.DeleteTeamInput) (*gqlmodel.DeleteTeamPayload, error) + UpdateTeam(ctx context.Context, input gqlmodel.UpdateTeamInput) (*gqlmodel.UpdateTeamPayload, error) + AddMemberToTeam(ctx context.Context, input gqlmodel.AddMemberToTeamInput) (*gqlmodel.AddMemberToTeamPayload, error) + RemoveMemberFromTeam(ctx context.Context, input gqlmodel.RemoveMemberFromTeamInput) (*gqlmodel.RemoveMemberFromTeamPayload, error) + UpdateMemberOfTeam(ctx context.Context, input gqlmodel.UpdateMemberOfTeamInput) (*gqlmodel.UpdateMemberOfTeamPayload, error) + CreateProject(ctx context.Context, input gqlmodel.CreateProjectInput) (*gqlmodel.ProjectPayload, error) + UpdateProject(ctx context.Context, input gqlmodel.UpdateProjectInput) (*gqlmodel.ProjectPayload, error) + PublishProject(ctx context.Context, input gqlmodel.PublishProjectInput) (*gqlmodel.ProjectPayload, error) + DeleteProject(ctx context.Context, input gqlmodel.DeleteProjectInput) (*gqlmodel.DeleteProjectPayload, error) + CreateScene(ctx context.Context, input gqlmodel.CreateSceneInput) (*gqlmodel.CreateScenePayload, error) + AddWidget(ctx context.Context, input gqlmodel.AddWidgetInput) (*gqlmodel.AddWidgetPayload, error) + UpdateWidget(ctx context.Context, input gqlmodel.UpdateWidgetInput) (*gqlmodel.UpdateWidgetPayload, error) + RemoveWidget(ctx context.Context, input gqlmodel.RemoveWidgetInput) (*gqlmodel.RemoveWidgetPayload, error) + InstallPlugin(ctx context.Context, input gqlmodel.InstallPluginInput) (*gqlmodel.InstallPluginPayload, error) + UninstallPlugin(ctx context.Context, input gqlmodel.UninstallPluginInput) (*gqlmodel.UninstallPluginPayload, error) + UploadPlugin(ctx context.Context, input gqlmodel.UploadPluginInput) (*gqlmodel.UploadPluginPayload, error) + UpgradePlugin(ctx context.Context, input gqlmodel.UpgradePluginInput) (*gqlmodel.UpgradePluginPayload, error) + UpdateDatasetSchema(ctx context.Context, input gqlmodel.UpdateDatasetSchemaInput) (*gqlmodel.UpdateDatasetSchemaPayload, error) + SyncDataset(ctx context.Context, input gqlmodel.SyncDatasetInput) (*gqlmodel.SyncDatasetPayload, error) + AddDynamicDatasetSchema(ctx context.Context, input gqlmodel.AddDynamicDatasetSchemaInput) (*gqlmodel.AddDynamicDatasetSchemaPayload, error) + AddDynamicDataset(ctx context.Context, input gqlmodel.AddDynamicDatasetInput) (*gqlmodel.AddDynamicDatasetPayload, error) + RemoveDatasetSchema(ctx context.Context, input gqlmodel.RemoveDatasetSchemaInput) (*gqlmodel.RemoveDatasetSchemaPayload, error) + ImportDataset(ctx context.Context, input gqlmodel.ImportDatasetInput) (*gqlmodel.ImportDatasetPayload, error) + ImportDatasetFromGoogleSheet(ctx context.Context, input gqlmodel.ImportDatasetFromGoogleSheetInput) (*gqlmodel.ImportDatasetPayload, error) + AddDatasetSchema(ctx context.Context, input gqlmodel.AddDatasetSchemaInput) (*gqlmodel.AddDatasetSchemaPayload, error) + UpdatePropertyValue(ctx context.Context, input gqlmodel.UpdatePropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) + RemovePropertyField(ctx context.Context, input gqlmodel.RemovePropertyFieldInput) (*gqlmodel.PropertyFieldPayload, error) + UploadFileToProperty(ctx context.Context, input gqlmodel.UploadFileToPropertyInput) (*gqlmodel.PropertyFieldPayload, error) + LinkDatasetToPropertyValue(ctx context.Context, input gqlmodel.LinkDatasetToPropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) + UnlinkPropertyValue(ctx context.Context, input gqlmodel.UnlinkPropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) + AddPropertyItem(ctx context.Context, input gqlmodel.AddPropertyItemInput) (*gqlmodel.PropertyItemPayload, error) + MovePropertyItem(ctx context.Context, input gqlmodel.MovePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) + RemovePropertyItem(ctx context.Context, input gqlmodel.RemovePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) + UpdatePropertyItems(ctx context.Context, input gqlmodel.UpdatePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) + AddLayerItem(ctx context.Context, input gqlmodel.AddLayerItemInput) (*gqlmodel.AddLayerItemPayload, error) + AddLayerGroup(ctx context.Context, input gqlmodel.AddLayerGroupInput) (*gqlmodel.AddLayerGroupPayload, error) + RemoveLayer(ctx context.Context, input gqlmodel.RemoveLayerInput) (*gqlmodel.RemoveLayerPayload, error) + UpdateLayer(ctx context.Context, input gqlmodel.UpdateLayerInput) (*gqlmodel.UpdateLayerPayload, error) + MoveLayer(ctx context.Context, input gqlmodel.MoveLayerInput) (*gqlmodel.MoveLayerPayload, error) + CreateInfobox(ctx context.Context, input gqlmodel.CreateInfoboxInput) (*gqlmodel.CreateInfoboxPayload, error) + RemoveInfobox(ctx context.Context, input gqlmodel.RemoveInfoboxInput) (*gqlmodel.RemoveInfoboxPayload, error) + AddInfoboxField(ctx context.Context, input gqlmodel.AddInfoboxFieldInput) (*gqlmodel.AddInfoboxFieldPayload, error) + MoveInfoboxField(ctx context.Context, input gqlmodel.MoveInfoboxFieldInput) (*gqlmodel.MoveInfoboxFieldPayload, error) + RemoveInfoboxField(ctx context.Context, input gqlmodel.RemoveInfoboxFieldInput) (*gqlmodel.RemoveInfoboxFieldPayload, error) + ImportLayer(ctx context.Context, input gqlmodel.ImportLayerInput) (*gqlmodel.ImportLayerPayload, error) } type PluginResolver interface { - Scene(ctx context.Context, obj *graphql1.Plugin) (*graphql1.Scene, error) + Scene(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.Scene, error) - PropertySchema(ctx context.Context, obj *graphql1.Plugin) (*graphql1.PropertySchema, error) + PropertySchema(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.PropertySchema, error) } type PluginExtensionResolver interface { - Plugin(ctx context.Context, obj *graphql1.PluginExtension) (*graphql1.Plugin, error) - SceneWidget(ctx context.Context, obj *graphql1.PluginExtension, sceneID id.ID) (*graphql1.SceneWidget, error) - PropertySchema(ctx context.Context, obj *graphql1.PluginExtension) (*graphql1.PropertySchema, error) - TranslatedName(ctx context.Context, obj *graphql1.PluginExtension, lang *string) (string, error) - TranslatedDescription(ctx context.Context, obj *graphql1.PluginExtension, lang *string) (string, error) + Plugin(ctx context.Context, obj *gqlmodel.PluginExtension) (*gqlmodel.Plugin, error) + SceneWidget(ctx context.Context, obj *gqlmodel.PluginExtension, sceneID id.ID) (*gqlmodel.SceneWidget, error) + PropertySchema(ctx context.Context, obj *gqlmodel.PluginExtension) (*gqlmodel.PropertySchema, error) + TranslatedName(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) + TranslatedDescription(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) } type ProjectResolver interface { - Team(ctx context.Context, obj *graphql1.Project) (*graphql1.Team, error) - Scene(ctx context.Context, obj *graphql1.Project) (*graphql1.Scene, error) + Team(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Team, error) + Scene(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Scene, error) } type PropertyResolver interface { - Schema(ctx context.Context, obj *graphql1.Property) (*graphql1.PropertySchema, error) - Layer(ctx context.Context, obj *graphql1.Property) (graphql1.Layer, error) - Merged(ctx context.Context, obj *graphql1.Property) (*graphql1.MergedProperty, error) + Schema(ctx context.Context, obj *gqlmodel.Property) (*gqlmodel.PropertySchema, error) + Layer(ctx context.Context, obj *gqlmodel.Property) (gqlmodel.Layer, error) + Merged(ctx context.Context, obj *gqlmodel.Property) (*gqlmodel.MergedProperty, error) } type PropertyFieldResolver interface { - Parent(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.Property, error) - Schema(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.PropertySchema, error) - Field(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.PropertySchemaField, error) - ActualValue(ctx context.Context, obj *graphql1.PropertyField) (interface{}, error) + Parent(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.Property, error) + Schema(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.PropertySchema, error) + Field(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.PropertySchemaField, error) + ActualValue(ctx context.Context, obj *gqlmodel.PropertyField) (interface{}, error) } type PropertyFieldLinkResolver interface { - Dataset(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.Dataset, error) - DatasetField(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.DatasetField, error) - DatasetSchema(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.DatasetSchema, error) - DatasetSchemaField(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.DatasetSchemaField, error) + Dataset(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.Dataset, error) + DatasetField(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetField, error) + DatasetSchema(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetSchema, error) + DatasetSchemaField(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetSchemaField, error) } type PropertyGroupResolver interface { - Schema(ctx context.Context, obj *graphql1.PropertyGroup) (*graphql1.PropertySchema, error) - SchemaGroup(ctx context.Context, obj *graphql1.PropertyGroup) (*graphql1.PropertySchemaGroup, error) + Schema(ctx context.Context, obj *gqlmodel.PropertyGroup) (*gqlmodel.PropertySchema, error) + SchemaGroup(ctx context.Context, obj *gqlmodel.PropertyGroup) (*gqlmodel.PropertySchemaGroup, error) } type PropertyGroupListResolver interface { - Schema(ctx context.Context, obj *graphql1.PropertyGroupList) (*graphql1.PropertySchema, error) - SchemaGroup(ctx context.Context, obj *graphql1.PropertyGroupList) (*graphql1.PropertySchemaGroup, error) + Schema(ctx context.Context, obj *gqlmodel.PropertyGroupList) (*gqlmodel.PropertySchema, error) + SchemaGroup(ctx context.Context, obj *gqlmodel.PropertyGroupList) (*gqlmodel.PropertySchemaGroup, error) } type PropertyLinkableFieldsResolver interface { - LatlngField(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchemaField, error) - URLField(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchemaField, error) - Schema(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchema, error) + LatlngField(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchemaField, error) + URLField(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchemaField, error) + Schema(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchema, error) } type PropertySchemaFieldResolver interface { - TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) - TranslatedName(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) - TranslatedDescription(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) + TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) + TranslatedName(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) + TranslatedDescription(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) } type PropertySchemaFieldChoiceResolver interface { - TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaFieldChoice, lang *string) (string, error) - TranslatedLabel(ctx context.Context, obj *graphql1.PropertySchemaFieldChoice, lang *string) (string, error) + TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *string) (string, error) + TranslatedLabel(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *string) (string, error) } type PropertySchemaGroupResolver interface { - Schema(ctx context.Context, obj *graphql1.PropertySchemaGroup) (*graphql1.PropertySchema, error) - TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaGroup, lang *string) (string, error) + Schema(ctx context.Context, obj *gqlmodel.PropertySchemaGroup) (*gqlmodel.PropertySchema, error) + TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaGroup, lang *string) (string, error) } type QueryResolver interface { - Me(ctx context.Context) (*graphql1.User, error) - Node(ctx context.Context, id id.ID, typeArg graphql1.NodeType) (graphql1.Node, error) - Nodes(ctx context.Context, id []*id.ID, typeArg graphql1.NodeType) ([]graphql1.Node, error) - PropertySchema(ctx context.Context, id id.PropertySchemaID) (*graphql1.PropertySchema, error) - PropertySchemas(ctx context.Context, id []*id.PropertySchemaID) ([]*graphql1.PropertySchema, error) - Plugin(ctx context.Context, id id.PluginID) (*graphql1.Plugin, error) - Plugins(ctx context.Context, id []*id.PluginID) ([]*graphql1.Plugin, error) - Layer(ctx context.Context, id id.ID) (graphql1.Layer, error) - Scene(ctx context.Context, projectID id.ID) (*graphql1.Scene, error) - Assets(ctx context.Context, teamID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.AssetConnection, error) - Projects(ctx context.Context, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.ProjectConnection, error) - DatasetSchemas(ctx context.Context, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetSchemaConnection, error) - Datasets(ctx context.Context, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetConnection, error) - SceneLock(ctx context.Context, sceneID id.ID) (*graphql1.SceneLockMode, error) - DynamicDatasetSchemas(ctx context.Context, sceneID id.ID) ([]*graphql1.DatasetSchema, error) - SearchUser(ctx context.Context, nameOrEmail string) (*graphql1.SearchedUser, error) - CheckProjectAlias(ctx context.Context, alias string) (*graphql1.CheckProjectAliasPayload, error) - InstallablePlugins(ctx context.Context) ([]*graphql1.PluginMetadata, error) + Me(ctx context.Context) (*gqlmodel.User, error) + Node(ctx context.Context, id id.ID, typeArg gqlmodel.NodeType) (gqlmodel.Node, error) + Nodes(ctx context.Context, id []*id.ID, typeArg gqlmodel.NodeType) ([]gqlmodel.Node, error) + PropertySchema(ctx context.Context, id id.PropertySchemaID) (*gqlmodel.PropertySchema, error) + PropertySchemas(ctx context.Context, id []*id.PropertySchemaID) ([]*gqlmodel.PropertySchema, error) + Plugin(ctx context.Context, id id.PluginID) (*gqlmodel.Plugin, error) + Plugins(ctx context.Context, id []*id.PluginID) ([]*gqlmodel.Plugin, error) + Layer(ctx context.Context, id id.ID) (gqlmodel.Layer, error) + Scene(ctx context.Context, projectID id.ID) (*gqlmodel.Scene, error) + Assets(ctx context.Context, teamID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) + Projects(ctx context.Context, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) + DatasetSchemas(ctx context.Context, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) + Datasets(ctx context.Context, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) + SceneLock(ctx context.Context, sceneID id.ID) (*gqlmodel.SceneLockMode, error) + DynamicDatasetSchemas(ctx context.Context, sceneID id.ID) ([]*gqlmodel.DatasetSchema, error) + SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.SearchedUser, error) + CheckProjectAlias(ctx context.Context, alias string) (*gqlmodel.CheckProjectAliasPayload, error) + InstallablePlugins(ctx context.Context) ([]*gqlmodel.PluginMetadata, error) } type SceneResolver interface { - Project(ctx context.Context, obj *graphql1.Scene) (*graphql1.Project, error) - Team(ctx context.Context, obj *graphql1.Scene) (*graphql1.Team, error) - Property(ctx context.Context, obj *graphql1.Scene) (*graphql1.Property, error) - RootLayer(ctx context.Context, obj *graphql1.Scene) (*graphql1.LayerGroup, error) - LockMode(ctx context.Context, obj *graphql1.Scene) (graphql1.SceneLockMode, error) - DatasetSchemas(ctx context.Context, obj *graphql1.Scene, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetSchemaConnection, error) + Project(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Project, error) + Team(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Team, error) + Property(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Property, error) + RootLayer(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.LayerGroup, error) + LockMode(ctx context.Context, obj *gqlmodel.Scene) (gqlmodel.SceneLockMode, error) + DatasetSchemas(ctx context.Context, obj *gqlmodel.Scene, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) } type ScenePluginResolver interface { - Plugin(ctx context.Context, obj *graphql1.ScenePlugin) (*graphql1.Plugin, error) - Property(ctx context.Context, obj *graphql1.ScenePlugin) (*graphql1.Property, error) + Plugin(ctx context.Context, obj *gqlmodel.ScenePlugin) (*gqlmodel.Plugin, error) + Property(ctx context.Context, obj *gqlmodel.ScenePlugin) (*gqlmodel.Property, error) } type SceneWidgetResolver interface { - Plugin(ctx context.Context, obj *graphql1.SceneWidget) (*graphql1.Plugin, error) - Extension(ctx context.Context, obj *graphql1.SceneWidget) (*graphql1.PluginExtension, error) - Property(ctx context.Context, obj *graphql1.SceneWidget) (*graphql1.Property, error) + Plugin(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Plugin, error) + Extension(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.PluginExtension, error) + Property(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Property, error) } type TeamResolver interface { - Assets(ctx context.Context, obj *graphql1.Team, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.AssetConnection, error) - Projects(ctx context.Context, obj *graphql1.Team, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.ProjectConnection, error) + Assets(ctx context.Context, obj *gqlmodel.Team, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) + Projects(ctx context.Context, obj *gqlmodel.Team, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) } type TeamMemberResolver interface { - User(ctx context.Context, obj *graphql1.TeamMember) (*graphql1.User, error) + User(ctx context.Context, obj *gqlmodel.TeamMember) (*gqlmodel.User, error) } type UserResolver interface { - Teams(ctx context.Context, obj *graphql1.User) ([]*graphql1.Team, error) - MyTeam(ctx context.Context, obj *graphql1.User) (*graphql1.Team, error) + Teams(ctx context.Context, obj *gqlmodel.User) ([]*gqlmodel.Team, error) + MyTeam(ctx context.Context, obj *gqlmodel.User) (*gqlmodel.Team, error) } type executableSchema struct { @@ -2774,7 +2766,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.AddDatasetSchema(childComplexity, args["input"].(graphql1.AddDatasetSchemaInput)), true + return e.complexity.Mutation.AddDatasetSchema(childComplexity, args["input"].(gqlmodel.AddDatasetSchemaInput)), true case "Mutation.addDynamicDataset": if e.complexity.Mutation.AddDynamicDataset == nil { @@ -2786,7 +2778,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.AddDynamicDataset(childComplexity, args["input"].(graphql1.AddDynamicDatasetInput)), true + return e.complexity.Mutation.AddDynamicDataset(childComplexity, args["input"].(gqlmodel.AddDynamicDatasetInput)), true case "Mutation.addDynamicDatasetSchema": if e.complexity.Mutation.AddDynamicDatasetSchema == nil { @@ -2798,7 +2790,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.AddDynamicDatasetSchema(childComplexity, args["input"].(graphql1.AddDynamicDatasetSchemaInput)), true + return e.complexity.Mutation.AddDynamicDatasetSchema(childComplexity, args["input"].(gqlmodel.AddDynamicDatasetSchemaInput)), true case "Mutation.addInfoboxField": if e.complexity.Mutation.AddInfoboxField == nil { @@ -2810,7 +2802,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.AddInfoboxField(childComplexity, args["input"].(graphql1.AddInfoboxFieldInput)), true + return e.complexity.Mutation.AddInfoboxField(childComplexity, args["input"].(gqlmodel.AddInfoboxFieldInput)), true case "Mutation.addLayerGroup": if e.complexity.Mutation.AddLayerGroup == nil { @@ -2822,7 +2814,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.AddLayerGroup(childComplexity, args["input"].(graphql1.AddLayerGroupInput)), true + return e.complexity.Mutation.AddLayerGroup(childComplexity, args["input"].(gqlmodel.AddLayerGroupInput)), true case "Mutation.addLayerItem": if e.complexity.Mutation.AddLayerItem == nil { @@ -2834,7 +2826,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.AddLayerItem(childComplexity, args["input"].(graphql1.AddLayerItemInput)), true + return e.complexity.Mutation.AddLayerItem(childComplexity, args["input"].(gqlmodel.AddLayerItemInput)), true case "Mutation.addMemberToTeam": if e.complexity.Mutation.AddMemberToTeam == nil { @@ -2846,7 +2838,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.AddMemberToTeam(childComplexity, args["input"].(graphql1.AddMemberToTeamInput)), true + return e.complexity.Mutation.AddMemberToTeam(childComplexity, args["input"].(gqlmodel.AddMemberToTeamInput)), true case "Mutation.addPropertyItem": if e.complexity.Mutation.AddPropertyItem == nil { @@ -2858,7 +2850,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.AddPropertyItem(childComplexity, args["input"].(graphql1.AddPropertyItemInput)), true + return e.complexity.Mutation.AddPropertyItem(childComplexity, args["input"].(gqlmodel.AddPropertyItemInput)), true case "Mutation.addWidget": if e.complexity.Mutation.AddWidget == nil { @@ -2870,7 +2862,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.AddWidget(childComplexity, args["input"].(graphql1.AddWidgetInput)), true + return e.complexity.Mutation.AddWidget(childComplexity, args["input"].(gqlmodel.AddWidgetInput)), true case "Mutation.createAsset": if e.complexity.Mutation.CreateAsset == nil { @@ -2882,7 +2874,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.CreateAsset(childComplexity, args["input"].(graphql1.CreateAssetInput)), true + return e.complexity.Mutation.CreateAsset(childComplexity, args["input"].(gqlmodel.CreateAssetInput)), true case "Mutation.createInfobox": if e.complexity.Mutation.CreateInfobox == nil { @@ -2894,7 +2886,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.CreateInfobox(childComplexity, args["input"].(graphql1.CreateInfoboxInput)), true + return e.complexity.Mutation.CreateInfobox(childComplexity, args["input"].(gqlmodel.CreateInfoboxInput)), true case "Mutation.createProject": if e.complexity.Mutation.CreateProject == nil { @@ -2906,7 +2898,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.CreateProject(childComplexity, args["input"].(graphql1.CreateProjectInput)), true + return e.complexity.Mutation.CreateProject(childComplexity, args["input"].(gqlmodel.CreateProjectInput)), true case "Mutation.createScene": if e.complexity.Mutation.CreateScene == nil { @@ -2918,7 +2910,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.CreateScene(childComplexity, args["input"].(graphql1.CreateSceneInput)), true + return e.complexity.Mutation.CreateScene(childComplexity, args["input"].(gqlmodel.CreateSceneInput)), true case "Mutation.createTeam": if e.complexity.Mutation.CreateTeam == nil { @@ -2930,7 +2922,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.CreateTeam(childComplexity, args["input"].(graphql1.CreateTeamInput)), true + return e.complexity.Mutation.CreateTeam(childComplexity, args["input"].(gqlmodel.CreateTeamInput)), true case "Mutation.deleteMe": if e.complexity.Mutation.DeleteMe == nil { @@ -2942,7 +2934,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.DeleteMe(childComplexity, args["input"].(graphql1.DeleteMeInput)), true + return e.complexity.Mutation.DeleteMe(childComplexity, args["input"].(gqlmodel.DeleteMeInput)), true case "Mutation.deleteProject": if e.complexity.Mutation.DeleteProject == nil { @@ -2954,7 +2946,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.DeleteProject(childComplexity, args["input"].(graphql1.DeleteProjectInput)), true + return e.complexity.Mutation.DeleteProject(childComplexity, args["input"].(gqlmodel.DeleteProjectInput)), true case "Mutation.deleteTeam": if e.complexity.Mutation.DeleteTeam == nil { @@ -2966,7 +2958,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.DeleteTeam(childComplexity, args["input"].(graphql1.DeleteTeamInput)), true + return e.complexity.Mutation.DeleteTeam(childComplexity, args["input"].(gqlmodel.DeleteTeamInput)), true case "Mutation.importDataset": if e.complexity.Mutation.ImportDataset == nil { @@ -2978,7 +2970,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.ImportDataset(childComplexity, args["input"].(graphql1.ImportDatasetInput)), true + return e.complexity.Mutation.ImportDataset(childComplexity, args["input"].(gqlmodel.ImportDatasetInput)), true case "Mutation.importDatasetFromGoogleSheet": if e.complexity.Mutation.ImportDatasetFromGoogleSheet == nil { @@ -2990,7 +2982,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.ImportDatasetFromGoogleSheet(childComplexity, args["input"].(graphql1.ImportDatasetFromGoogleSheetInput)), true + return e.complexity.Mutation.ImportDatasetFromGoogleSheet(childComplexity, args["input"].(gqlmodel.ImportDatasetFromGoogleSheetInput)), true case "Mutation.importLayer": if e.complexity.Mutation.ImportLayer == nil { @@ -3002,7 +2994,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.ImportLayer(childComplexity, args["input"].(graphql1.ImportLayerInput)), true + return e.complexity.Mutation.ImportLayer(childComplexity, args["input"].(gqlmodel.ImportLayerInput)), true case "Mutation.installPlugin": if e.complexity.Mutation.InstallPlugin == nil { @@ -3014,7 +3006,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.InstallPlugin(childComplexity, args["input"].(graphql1.InstallPluginInput)), true + return e.complexity.Mutation.InstallPlugin(childComplexity, args["input"].(gqlmodel.InstallPluginInput)), true case "Mutation.linkDatasetToPropertyValue": if e.complexity.Mutation.LinkDatasetToPropertyValue == nil { @@ -3026,7 +3018,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.LinkDatasetToPropertyValue(childComplexity, args["input"].(graphql1.LinkDatasetToPropertyValueInput)), true + return e.complexity.Mutation.LinkDatasetToPropertyValue(childComplexity, args["input"].(gqlmodel.LinkDatasetToPropertyValueInput)), true case "Mutation.moveInfoboxField": if e.complexity.Mutation.MoveInfoboxField == nil { @@ -3038,7 +3030,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.MoveInfoboxField(childComplexity, args["input"].(graphql1.MoveInfoboxFieldInput)), true + return e.complexity.Mutation.MoveInfoboxField(childComplexity, args["input"].(gqlmodel.MoveInfoboxFieldInput)), true case "Mutation.moveLayer": if e.complexity.Mutation.MoveLayer == nil { @@ -3050,7 +3042,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.MoveLayer(childComplexity, args["input"].(graphql1.MoveLayerInput)), true + return e.complexity.Mutation.MoveLayer(childComplexity, args["input"].(gqlmodel.MoveLayerInput)), true case "Mutation.movePropertyItem": if e.complexity.Mutation.MovePropertyItem == nil { @@ -3062,7 +3054,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.MovePropertyItem(childComplexity, args["input"].(graphql1.MovePropertyItemInput)), true + return e.complexity.Mutation.MovePropertyItem(childComplexity, args["input"].(gqlmodel.MovePropertyItemInput)), true case "Mutation.publishProject": if e.complexity.Mutation.PublishProject == nil { @@ -3074,7 +3066,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.PublishProject(childComplexity, args["input"].(graphql1.PublishProjectInput)), true + return e.complexity.Mutation.PublishProject(childComplexity, args["input"].(gqlmodel.PublishProjectInput)), true case "Mutation.removeAsset": if e.complexity.Mutation.RemoveAsset == nil { @@ -3086,7 +3078,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.RemoveAsset(childComplexity, args["input"].(graphql1.RemoveAssetInput)), true + return e.complexity.Mutation.RemoveAsset(childComplexity, args["input"].(gqlmodel.RemoveAssetInput)), true case "Mutation.removeDatasetSchema": if e.complexity.Mutation.RemoveDatasetSchema == nil { @@ -3098,7 +3090,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.RemoveDatasetSchema(childComplexity, args["input"].(graphql1.RemoveDatasetSchemaInput)), true + return e.complexity.Mutation.RemoveDatasetSchema(childComplexity, args["input"].(gqlmodel.RemoveDatasetSchemaInput)), true case "Mutation.removeInfobox": if e.complexity.Mutation.RemoveInfobox == nil { @@ -3110,7 +3102,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.RemoveInfobox(childComplexity, args["input"].(graphql1.RemoveInfoboxInput)), true + return e.complexity.Mutation.RemoveInfobox(childComplexity, args["input"].(gqlmodel.RemoveInfoboxInput)), true case "Mutation.removeInfoboxField": if e.complexity.Mutation.RemoveInfoboxField == nil { @@ -3122,7 +3114,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.RemoveInfoboxField(childComplexity, args["input"].(graphql1.RemoveInfoboxFieldInput)), true + return e.complexity.Mutation.RemoveInfoboxField(childComplexity, args["input"].(gqlmodel.RemoveInfoboxFieldInput)), true case "Mutation.removeLayer": if e.complexity.Mutation.RemoveLayer == nil { @@ -3134,7 +3126,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.RemoveLayer(childComplexity, args["input"].(graphql1.RemoveLayerInput)), true + return e.complexity.Mutation.RemoveLayer(childComplexity, args["input"].(gqlmodel.RemoveLayerInput)), true case "Mutation.removeMemberFromTeam": if e.complexity.Mutation.RemoveMemberFromTeam == nil { @@ -3146,7 +3138,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.RemoveMemberFromTeam(childComplexity, args["input"].(graphql1.RemoveMemberFromTeamInput)), true + return e.complexity.Mutation.RemoveMemberFromTeam(childComplexity, args["input"].(gqlmodel.RemoveMemberFromTeamInput)), true case "Mutation.removeMyAuth": if e.complexity.Mutation.RemoveMyAuth == nil { @@ -3158,7 +3150,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.RemoveMyAuth(childComplexity, args["input"].(graphql1.RemoveMyAuthInput)), true + return e.complexity.Mutation.RemoveMyAuth(childComplexity, args["input"].(gqlmodel.RemoveMyAuthInput)), true case "Mutation.removePropertyField": if e.complexity.Mutation.RemovePropertyField == nil { @@ -3170,7 +3162,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.RemovePropertyField(childComplexity, args["input"].(graphql1.RemovePropertyFieldInput)), true + return e.complexity.Mutation.RemovePropertyField(childComplexity, args["input"].(gqlmodel.RemovePropertyFieldInput)), true case "Mutation.removePropertyItem": if e.complexity.Mutation.RemovePropertyItem == nil { @@ -3182,7 +3174,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.RemovePropertyItem(childComplexity, args["input"].(graphql1.RemovePropertyItemInput)), true + return e.complexity.Mutation.RemovePropertyItem(childComplexity, args["input"].(gqlmodel.RemovePropertyItemInput)), true case "Mutation.removeWidget": if e.complexity.Mutation.RemoveWidget == nil { @@ -3194,7 +3186,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.RemoveWidget(childComplexity, args["input"].(graphql1.RemoveWidgetInput)), true + return e.complexity.Mutation.RemoveWidget(childComplexity, args["input"].(gqlmodel.RemoveWidgetInput)), true case "Mutation.signup": if e.complexity.Mutation.Signup == nil { @@ -3206,7 +3198,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.Signup(childComplexity, args["input"].(graphql1.SignupInput)), true + return e.complexity.Mutation.Signup(childComplexity, args["input"].(gqlmodel.SignupInput)), true case "Mutation.syncDataset": if e.complexity.Mutation.SyncDataset == nil { @@ -3218,7 +3210,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.SyncDataset(childComplexity, args["input"].(graphql1.SyncDatasetInput)), true + return e.complexity.Mutation.SyncDataset(childComplexity, args["input"].(gqlmodel.SyncDatasetInput)), true case "Mutation.uninstallPlugin": if e.complexity.Mutation.UninstallPlugin == nil { @@ -3230,7 +3222,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UninstallPlugin(childComplexity, args["input"].(graphql1.UninstallPluginInput)), true + return e.complexity.Mutation.UninstallPlugin(childComplexity, args["input"].(gqlmodel.UninstallPluginInput)), true case "Mutation.unlinkPropertyValue": if e.complexity.Mutation.UnlinkPropertyValue == nil { @@ -3242,7 +3234,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UnlinkPropertyValue(childComplexity, args["input"].(graphql1.UnlinkPropertyValueInput)), true + return e.complexity.Mutation.UnlinkPropertyValue(childComplexity, args["input"].(gqlmodel.UnlinkPropertyValueInput)), true case "Mutation.updateDatasetSchema": if e.complexity.Mutation.UpdateDatasetSchema == nil { @@ -3254,7 +3246,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UpdateDatasetSchema(childComplexity, args["input"].(graphql1.UpdateDatasetSchemaInput)), true + return e.complexity.Mutation.UpdateDatasetSchema(childComplexity, args["input"].(gqlmodel.UpdateDatasetSchemaInput)), true case "Mutation.updateLayer": if e.complexity.Mutation.UpdateLayer == nil { @@ -3266,7 +3258,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UpdateLayer(childComplexity, args["input"].(graphql1.UpdateLayerInput)), true + return e.complexity.Mutation.UpdateLayer(childComplexity, args["input"].(gqlmodel.UpdateLayerInput)), true case "Mutation.updateMe": if e.complexity.Mutation.UpdateMe == nil { @@ -3278,7 +3270,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UpdateMe(childComplexity, args["input"].(graphql1.UpdateMeInput)), true + return e.complexity.Mutation.UpdateMe(childComplexity, args["input"].(gqlmodel.UpdateMeInput)), true case "Mutation.updateMemberOfTeam": if e.complexity.Mutation.UpdateMemberOfTeam == nil { @@ -3290,7 +3282,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UpdateMemberOfTeam(childComplexity, args["input"].(graphql1.UpdateMemberOfTeamInput)), true + return e.complexity.Mutation.UpdateMemberOfTeam(childComplexity, args["input"].(gqlmodel.UpdateMemberOfTeamInput)), true case "Mutation.updateProject": if e.complexity.Mutation.UpdateProject == nil { @@ -3302,7 +3294,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UpdateProject(childComplexity, args["input"].(graphql1.UpdateProjectInput)), true + return e.complexity.Mutation.UpdateProject(childComplexity, args["input"].(gqlmodel.UpdateProjectInput)), true case "Mutation.updatePropertyItems": if e.complexity.Mutation.UpdatePropertyItems == nil { @@ -3314,7 +3306,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UpdatePropertyItems(childComplexity, args["input"].(graphql1.UpdatePropertyItemInput)), true + return e.complexity.Mutation.UpdatePropertyItems(childComplexity, args["input"].(gqlmodel.UpdatePropertyItemInput)), true case "Mutation.updatePropertyValue": if e.complexity.Mutation.UpdatePropertyValue == nil { @@ -3326,55 +3318,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UpdatePropertyValue(childComplexity, args["input"].(graphql1.UpdatePropertyValueInput)), true - - case "Mutation.updatePropertyValueCamera": - if e.complexity.Mutation.UpdatePropertyValueCamera == nil { - break - } - - args, err := ec.field_Mutation_updatePropertyValueCamera_args(context.TODO(), rawArgs) - if err != nil { - return 0, false - } - - return e.complexity.Mutation.UpdatePropertyValueCamera(childComplexity, args["input"].(graphql1.UpdatePropertyValueCameraInput)), true - - case "Mutation.updatePropertyValueLatLng": - if e.complexity.Mutation.UpdatePropertyValueLatLng == nil { - break - } - - args, err := ec.field_Mutation_updatePropertyValueLatLng_args(context.TODO(), rawArgs) - if err != nil { - return 0, false - } - - return e.complexity.Mutation.UpdatePropertyValueLatLng(childComplexity, args["input"].(graphql1.UpdatePropertyValueLatLngInput)), true - - case "Mutation.updatePropertyValueLatLngHeight": - if e.complexity.Mutation.UpdatePropertyValueLatLngHeight == nil { - break - } - - args, err := ec.field_Mutation_updatePropertyValueLatLngHeight_args(context.TODO(), rawArgs) - if err != nil { - return 0, false - } - - return e.complexity.Mutation.UpdatePropertyValueLatLngHeight(childComplexity, args["input"].(graphql1.UpdatePropertyValueLatLngHeightInput)), true - - case "Mutation.updatePropertyValueTypography": - if e.complexity.Mutation.UpdatePropertyValueTypography == nil { - break - } - - args, err := ec.field_Mutation_updatePropertyValueTypography_args(context.TODO(), rawArgs) - if err != nil { - return 0, false - } - - return e.complexity.Mutation.UpdatePropertyValueTypography(childComplexity, args["input"].(graphql1.UpdatePropertyValueTypographyInput)), true + return e.complexity.Mutation.UpdatePropertyValue(childComplexity, args["input"].(gqlmodel.UpdatePropertyValueInput)), true case "Mutation.updateTeam": if e.complexity.Mutation.UpdateTeam == nil { @@ -3386,7 +3330,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UpdateTeam(childComplexity, args["input"].(graphql1.UpdateTeamInput)), true + return e.complexity.Mutation.UpdateTeam(childComplexity, args["input"].(gqlmodel.UpdateTeamInput)), true case "Mutation.updateWidget": if e.complexity.Mutation.UpdateWidget == nil { @@ -3398,7 +3342,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UpdateWidget(childComplexity, args["input"].(graphql1.UpdateWidgetInput)), true + return e.complexity.Mutation.UpdateWidget(childComplexity, args["input"].(gqlmodel.UpdateWidgetInput)), true case "Mutation.upgradePlugin": if e.complexity.Mutation.UpgradePlugin == nil { @@ -3410,7 +3354,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UpgradePlugin(childComplexity, args["input"].(graphql1.UpgradePluginInput)), true + return e.complexity.Mutation.UpgradePlugin(childComplexity, args["input"].(gqlmodel.UpgradePluginInput)), true case "Mutation.uploadFileToProperty": if e.complexity.Mutation.UploadFileToProperty == nil { @@ -3422,7 +3366,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UploadFileToProperty(childComplexity, args["input"].(graphql1.UploadFileToPropertyInput)), true + return e.complexity.Mutation.UploadFileToProperty(childComplexity, args["input"].(gqlmodel.UploadFileToPropertyInput)), true case "Mutation.uploadPlugin": if e.complexity.Mutation.UploadPlugin == nil { @@ -3434,7 +3378,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Mutation.UploadPlugin(childComplexity, args["input"].(graphql1.UploadPluginInput)), true + return e.complexity.Mutation.UploadPlugin(childComplexity, args["input"].(gqlmodel.UploadPluginInput)), true case "PageInfo.endCursor": if e.complexity.PageInfo.EndCursor == nil { @@ -4705,7 +4649,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.Node(childComplexity, args["id"].(id.ID), args["type"].(graphql1.NodeType)), true + return e.complexity.Query.Node(childComplexity, args["id"].(id.ID), args["type"].(gqlmodel.NodeType)), true case "Query.nodes": if e.complexity.Query.Nodes == nil { @@ -4717,7 +4661,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.Nodes(childComplexity, args["id"].([]*id.ID), args["type"].(graphql1.NodeType)), true + return e.complexity.Query.Nodes(childComplexity, args["id"].([]*id.ID), args["type"].(gqlmodel.NodeType)), true case "Query.plugin": if e.complexity.Query.Plugin == nil { @@ -6408,54 +6352,6 @@ input UpdatePropertyValueInput { type: ValueType! } -input UpdatePropertyValueLatLngInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - lat: Float! - lng: Float! -} - -input UpdatePropertyValueLatLngHeightInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - lat: Float! - lng: Float! - height: Float! -} - -input UpdatePropertyValueCameraInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - lat: Float! - lng: Float! - altitude: Float! - heading: Float! - pitch: Float! - roll: Float! - fov: Float! -} - -input UpdatePropertyValueTypographyInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - fontFamily: String - fontWeight: String - fontSize: Int - color: String - textAlign: TextAlign - bold: Boolean - italic: Boolean - underline: Boolean -} - input RemovePropertyFieldInput { propertyId: ID! schemaItemId: PropertySchemaFieldID @@ -6831,6 +6727,7 @@ type AddDatasetSchemaPayload { # Connection enum NodeType { + ASSET USER TEAM PROJECT @@ -6969,9 +6866,6 @@ type Mutation { publishProject(input: PublishProjectInput!): ProjectPayload deleteProject(input: DeleteProjectInput!): DeleteProjectPayload - # Plugin - uploadPlugin(input: UploadPluginInput!): UploadPluginPayload - # Scene createScene(input: CreateSceneInput!): CreateScenePayload addWidget(input: AddWidgetInput!): AddWidgetPayload @@ -6979,6 +6873,7 @@ type Mutation { removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload installPlugin(input: InstallPluginInput!): InstallPluginPayload uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload + uploadPlugin(input: UploadPluginInput!): UploadPluginPayload upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload # Dataset @@ -6999,18 +6894,6 @@ type Mutation { # Property updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload - updatePropertyValueLatLng( - input: UpdatePropertyValueLatLngInput! - ): PropertyFieldPayload - updatePropertyValueLatLngHeight( - input: UpdatePropertyValueLatLngHeightInput! - ): PropertyFieldPayload - updatePropertyValueCamera( - input: UpdatePropertyValueCameraInput! - ): PropertyFieldPayload - updatePropertyValueTypography( - input: UpdatePropertyValueTypographyInput! - ): PropertyFieldPayload removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload linkDatasetToPropertyValue( @@ -7093,10 +6976,10 @@ func (ec *executionContext) field_DatasetSchema_datasets_args(ctx context.Contex func (ec *executionContext) field_Mutation_addDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.AddDatasetSchemaInput + var arg0 gqlmodel.AddDatasetSchemaInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNAddDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaInput(ctx, tmp) + arg0, err = ec.unmarshalNAddDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDatasetSchemaInput(ctx, tmp) if err != nil { return nil, err } @@ -7108,10 +6991,10 @@ func (ec *executionContext) field_Mutation_addDatasetSchema_args(ctx context.Con func (ec *executionContext) field_Mutation_addDynamicDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.AddDynamicDatasetSchemaInput + var arg0 gqlmodel.AddDynamicDatasetSchemaInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNAddDynamicDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaInput(ctx, tmp) + arg0, err = ec.unmarshalNAddDynamicDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetSchemaInput(ctx, tmp) if err != nil { return nil, err } @@ -7123,10 +7006,10 @@ func (ec *executionContext) field_Mutation_addDynamicDatasetSchema_args(ctx cont func (ec *executionContext) field_Mutation_addDynamicDataset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.AddDynamicDatasetInput + var arg0 gqlmodel.AddDynamicDatasetInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNAddDynamicDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetInput(ctx, tmp) + arg0, err = ec.unmarshalNAddDynamicDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetInput(ctx, tmp) if err != nil { return nil, err } @@ -7138,10 +7021,10 @@ func (ec *executionContext) field_Mutation_addDynamicDataset_args(ctx context.Co func (ec *executionContext) field_Mutation_addInfoboxField_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.AddInfoboxFieldInput + var arg0 gqlmodel.AddInfoboxFieldInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNAddInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldInput(ctx, tmp) + arg0, err = ec.unmarshalNAddInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddInfoboxFieldInput(ctx, tmp) if err != nil { return nil, err } @@ -7153,10 +7036,10 @@ func (ec *executionContext) field_Mutation_addInfoboxField_args(ctx context.Cont func (ec *executionContext) field_Mutation_addLayerGroup_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.AddLayerGroupInput + var arg0 gqlmodel.AddLayerGroupInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNAddLayerGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupInput(ctx, tmp) + arg0, err = ec.unmarshalNAddLayerGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerGroupInput(ctx, tmp) if err != nil { return nil, err } @@ -7168,10 +7051,10 @@ func (ec *executionContext) field_Mutation_addLayerGroup_args(ctx context.Contex func (ec *executionContext) field_Mutation_addLayerItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.AddLayerItemInput + var arg0 gqlmodel.AddLayerItemInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNAddLayerItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemInput(ctx, tmp) + arg0, err = ec.unmarshalNAddLayerItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerItemInput(ctx, tmp) if err != nil { return nil, err } @@ -7183,10 +7066,10 @@ func (ec *executionContext) field_Mutation_addLayerItem_args(ctx context.Context func (ec *executionContext) field_Mutation_addMemberToTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.AddMemberToTeamInput + var arg0 gqlmodel.AddMemberToTeamInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNAddMemberToTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamInput(ctx, tmp) + arg0, err = ec.unmarshalNAddMemberToTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddMemberToTeamInput(ctx, tmp) if err != nil { return nil, err } @@ -7198,10 +7081,10 @@ func (ec *executionContext) field_Mutation_addMemberToTeam_args(ctx context.Cont func (ec *executionContext) field_Mutation_addPropertyItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.AddPropertyItemInput + var arg0 gqlmodel.AddPropertyItemInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNAddPropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddPropertyItemInput(ctx, tmp) + arg0, err = ec.unmarshalNAddPropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddPropertyItemInput(ctx, tmp) if err != nil { return nil, err } @@ -7213,10 +7096,10 @@ func (ec *executionContext) field_Mutation_addPropertyItem_args(ctx context.Cont func (ec *executionContext) field_Mutation_addWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.AddWidgetInput + var arg0 gqlmodel.AddWidgetInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNAddWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetInput(ctx, tmp) + arg0, err = ec.unmarshalNAddWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddWidgetInput(ctx, tmp) if err != nil { return nil, err } @@ -7228,10 +7111,10 @@ func (ec *executionContext) field_Mutation_addWidget_args(ctx context.Context, r func (ec *executionContext) field_Mutation_createAsset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.CreateAssetInput + var arg0 gqlmodel.CreateAssetInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNCreateAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetInput(ctx, tmp) + arg0, err = ec.unmarshalNCreateAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateAssetInput(ctx, tmp) if err != nil { return nil, err } @@ -7243,10 +7126,10 @@ func (ec *executionContext) field_Mutation_createAsset_args(ctx context.Context, func (ec *executionContext) field_Mutation_createInfobox_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.CreateInfoboxInput + var arg0 gqlmodel.CreateInfoboxInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNCreateInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxInput(ctx, tmp) + arg0, err = ec.unmarshalNCreateInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateInfoboxInput(ctx, tmp) if err != nil { return nil, err } @@ -7258,10 +7141,10 @@ func (ec *executionContext) field_Mutation_createInfobox_args(ctx context.Contex func (ec *executionContext) field_Mutation_createProject_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.CreateProjectInput + var arg0 gqlmodel.CreateProjectInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNCreateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateProjectInput(ctx, tmp) + arg0, err = ec.unmarshalNCreateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateProjectInput(ctx, tmp) if err != nil { return nil, err } @@ -7273,10 +7156,10 @@ func (ec *executionContext) field_Mutation_createProject_args(ctx context.Contex func (ec *executionContext) field_Mutation_createScene_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.CreateSceneInput + var arg0 gqlmodel.CreateSceneInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNCreateSceneInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateSceneInput(ctx, tmp) + arg0, err = ec.unmarshalNCreateSceneInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateSceneInput(ctx, tmp) if err != nil { return nil, err } @@ -7288,10 +7171,10 @@ func (ec *executionContext) field_Mutation_createScene_args(ctx context.Context, func (ec *executionContext) field_Mutation_createTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.CreateTeamInput + var arg0 gqlmodel.CreateTeamInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNCreateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamInput(ctx, tmp) + arg0, err = ec.unmarshalNCreateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTeamInput(ctx, tmp) if err != nil { return nil, err } @@ -7303,10 +7186,10 @@ func (ec *executionContext) field_Mutation_createTeam_args(ctx context.Context, func (ec *executionContext) field_Mutation_deleteMe_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.DeleteMeInput + var arg0 gqlmodel.DeleteMeInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNDeleteMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMeInput(ctx, tmp) + arg0, err = ec.unmarshalNDeleteMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteMeInput(ctx, tmp) if err != nil { return nil, err } @@ -7318,10 +7201,10 @@ func (ec *executionContext) field_Mutation_deleteMe_args(ctx context.Context, ra func (ec *executionContext) field_Mutation_deleteProject_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.DeleteProjectInput + var arg0 gqlmodel.DeleteProjectInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNDeleteProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectInput(ctx, tmp) + arg0, err = ec.unmarshalNDeleteProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteProjectInput(ctx, tmp) if err != nil { return nil, err } @@ -7333,10 +7216,10 @@ func (ec *executionContext) field_Mutation_deleteProject_args(ctx context.Contex func (ec *executionContext) field_Mutation_deleteTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.DeleteTeamInput + var arg0 gqlmodel.DeleteTeamInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNDeleteTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamInput(ctx, tmp) + arg0, err = ec.unmarshalNDeleteTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteTeamInput(ctx, tmp) if err != nil { return nil, err } @@ -7348,10 +7231,10 @@ func (ec *executionContext) field_Mutation_deleteTeam_args(ctx context.Context, func (ec *executionContext) field_Mutation_importDatasetFromGoogleSheet_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.ImportDatasetFromGoogleSheetInput + var arg0 gqlmodel.ImportDatasetFromGoogleSheetInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNImportDatasetFromGoogleSheetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetFromGoogleSheetInput(ctx, tmp) + arg0, err = ec.unmarshalNImportDatasetFromGoogleSheetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetFromGoogleSheetInput(ctx, tmp) if err != nil { return nil, err } @@ -7363,10 +7246,10 @@ func (ec *executionContext) field_Mutation_importDatasetFromGoogleSheet_args(ctx func (ec *executionContext) field_Mutation_importDataset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.ImportDatasetInput + var arg0 gqlmodel.ImportDatasetInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNImportDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetInput(ctx, tmp) + arg0, err = ec.unmarshalNImportDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetInput(ctx, tmp) if err != nil { return nil, err } @@ -7378,10 +7261,10 @@ func (ec *executionContext) field_Mutation_importDataset_args(ctx context.Contex func (ec *executionContext) field_Mutation_importLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.ImportLayerInput + var arg0 gqlmodel.ImportLayerInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNImportLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerInput(ctx, tmp) + arg0, err = ec.unmarshalNImportLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportLayerInput(ctx, tmp) if err != nil { return nil, err } @@ -7393,10 +7276,10 @@ func (ec *executionContext) field_Mutation_importLayer_args(ctx context.Context, func (ec *executionContext) field_Mutation_installPlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.InstallPluginInput + var arg0 gqlmodel.InstallPluginInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNInstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginInput(ctx, tmp) + arg0, err = ec.unmarshalNInstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInstallPluginInput(ctx, tmp) if err != nil { return nil, err } @@ -7408,10 +7291,10 @@ func (ec *executionContext) field_Mutation_installPlugin_args(ctx context.Contex func (ec *executionContext) field_Mutation_linkDatasetToPropertyValue_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.LinkDatasetToPropertyValueInput + var arg0 gqlmodel.LinkDatasetToPropertyValueInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNLinkDatasetToPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLinkDatasetToPropertyValueInput(ctx, tmp) + arg0, err = ec.unmarshalNLinkDatasetToPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLinkDatasetToPropertyValueInput(ctx, tmp) if err != nil { return nil, err } @@ -7423,10 +7306,10 @@ func (ec *executionContext) field_Mutation_linkDatasetToPropertyValue_args(ctx c func (ec *executionContext) field_Mutation_moveInfoboxField_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.MoveInfoboxFieldInput + var arg0 gqlmodel.MoveInfoboxFieldInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNMoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldInput(ctx, tmp) + arg0, err = ec.unmarshalNMoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveInfoboxFieldInput(ctx, tmp) if err != nil { return nil, err } @@ -7438,10 +7321,10 @@ func (ec *executionContext) field_Mutation_moveInfoboxField_args(ctx context.Con func (ec *executionContext) field_Mutation_moveLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.MoveLayerInput + var arg0 gqlmodel.MoveLayerInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNMoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerInput(ctx, tmp) + arg0, err = ec.unmarshalNMoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveLayerInput(ctx, tmp) if err != nil { return nil, err } @@ -7453,10 +7336,10 @@ func (ec *executionContext) field_Mutation_moveLayer_args(ctx context.Context, r func (ec *executionContext) field_Mutation_movePropertyItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.MovePropertyItemInput + var arg0 gqlmodel.MovePropertyItemInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNMovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMovePropertyItemInput(ctx, tmp) + arg0, err = ec.unmarshalNMovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMovePropertyItemInput(ctx, tmp) if err != nil { return nil, err } @@ -7468,10 +7351,10 @@ func (ec *executionContext) field_Mutation_movePropertyItem_args(ctx context.Con func (ec *executionContext) field_Mutation_publishProject_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.PublishProjectInput + var arg0 gqlmodel.PublishProjectInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNPublishProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPublishProjectInput(ctx, tmp) + arg0, err = ec.unmarshalNPublishProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishProjectInput(ctx, tmp) if err != nil { return nil, err } @@ -7483,10 +7366,10 @@ func (ec *executionContext) field_Mutation_publishProject_args(ctx context.Conte func (ec *executionContext) field_Mutation_removeAsset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.RemoveAssetInput + var arg0 gqlmodel.RemoveAssetInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNRemoveAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetInput(ctx, tmp) + arg0, err = ec.unmarshalNRemoveAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveAssetInput(ctx, tmp) if err != nil { return nil, err } @@ -7498,10 +7381,10 @@ func (ec *executionContext) field_Mutation_removeAsset_args(ctx context.Context, func (ec *executionContext) field_Mutation_removeDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.RemoveDatasetSchemaInput + var arg0 gqlmodel.RemoveDatasetSchemaInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNRemoveDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaInput(ctx, tmp) + arg0, err = ec.unmarshalNRemoveDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveDatasetSchemaInput(ctx, tmp) if err != nil { return nil, err } @@ -7513,10 +7396,10 @@ func (ec *executionContext) field_Mutation_removeDatasetSchema_args(ctx context. func (ec *executionContext) field_Mutation_removeInfoboxField_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.RemoveInfoboxFieldInput + var arg0 gqlmodel.RemoveInfoboxFieldInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNRemoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldInput(ctx, tmp) + arg0, err = ec.unmarshalNRemoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxFieldInput(ctx, tmp) if err != nil { return nil, err } @@ -7528,10 +7411,10 @@ func (ec *executionContext) field_Mutation_removeInfoboxField_args(ctx context.C func (ec *executionContext) field_Mutation_removeInfobox_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.RemoveInfoboxInput + var arg0 gqlmodel.RemoveInfoboxInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNRemoveInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxInput(ctx, tmp) + arg0, err = ec.unmarshalNRemoveInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxInput(ctx, tmp) if err != nil { return nil, err } @@ -7543,10 +7426,10 @@ func (ec *executionContext) field_Mutation_removeInfobox_args(ctx context.Contex func (ec *executionContext) field_Mutation_removeLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.RemoveLayerInput + var arg0 gqlmodel.RemoveLayerInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNRemoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerInput(ctx, tmp) + arg0, err = ec.unmarshalNRemoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveLayerInput(ctx, tmp) if err != nil { return nil, err } @@ -7558,10 +7441,10 @@ func (ec *executionContext) field_Mutation_removeLayer_args(ctx context.Context, func (ec *executionContext) field_Mutation_removeMemberFromTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.RemoveMemberFromTeamInput + var arg0 gqlmodel.RemoveMemberFromTeamInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNRemoveMemberFromTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamInput(ctx, tmp) + arg0, err = ec.unmarshalNRemoveMemberFromTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMemberFromTeamInput(ctx, tmp) if err != nil { return nil, err } @@ -7573,10 +7456,10 @@ func (ec *executionContext) field_Mutation_removeMemberFromTeam_args(ctx context func (ec *executionContext) field_Mutation_removeMyAuth_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.RemoveMyAuthInput + var arg0 gqlmodel.RemoveMyAuthInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNRemoveMyAuthInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMyAuthInput(ctx, tmp) + arg0, err = ec.unmarshalNRemoveMyAuthInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMyAuthInput(ctx, tmp) if err != nil { return nil, err } @@ -7588,10 +7471,10 @@ func (ec *executionContext) field_Mutation_removeMyAuth_args(ctx context.Context func (ec *executionContext) field_Mutation_removePropertyField_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.RemovePropertyFieldInput + var arg0 gqlmodel.RemovePropertyFieldInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNRemovePropertyFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemovePropertyFieldInput(ctx, tmp) + arg0, err = ec.unmarshalNRemovePropertyFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemovePropertyFieldInput(ctx, tmp) if err != nil { return nil, err } @@ -7603,10 +7486,10 @@ func (ec *executionContext) field_Mutation_removePropertyField_args(ctx context. func (ec *executionContext) field_Mutation_removePropertyItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.RemovePropertyItemInput + var arg0 gqlmodel.RemovePropertyItemInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNRemovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemovePropertyItemInput(ctx, tmp) + arg0, err = ec.unmarshalNRemovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemovePropertyItemInput(ctx, tmp) if err != nil { return nil, err } @@ -7618,10 +7501,10 @@ func (ec *executionContext) field_Mutation_removePropertyItem_args(ctx context.C func (ec *executionContext) field_Mutation_removeWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.RemoveWidgetInput + var arg0 gqlmodel.RemoveWidgetInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNRemoveWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetInput(ctx, tmp) + arg0, err = ec.unmarshalNRemoveWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveWidgetInput(ctx, tmp) if err != nil { return nil, err } @@ -7633,10 +7516,10 @@ func (ec *executionContext) field_Mutation_removeWidget_args(ctx context.Context func (ec *executionContext) field_Mutation_signup_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.SignupInput + var arg0 gqlmodel.SignupInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNSignupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupInput(ctx, tmp) + arg0, err = ec.unmarshalNSignupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSignupInput(ctx, tmp) if err != nil { return nil, err } @@ -7648,10 +7531,10 @@ func (ec *executionContext) field_Mutation_signup_args(ctx context.Context, rawA func (ec *executionContext) field_Mutation_syncDataset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.SyncDatasetInput + var arg0 gqlmodel.SyncDatasetInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNSyncDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetInput(ctx, tmp) + arg0, err = ec.unmarshalNSyncDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSyncDatasetInput(ctx, tmp) if err != nil { return nil, err } @@ -7663,10 +7546,10 @@ func (ec *executionContext) field_Mutation_syncDataset_args(ctx context.Context, func (ec *executionContext) field_Mutation_uninstallPlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UninstallPluginInput + var arg0 gqlmodel.UninstallPluginInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUninstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginInput(ctx, tmp) + arg0, err = ec.unmarshalNUninstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUninstallPluginInput(ctx, tmp) if err != nil { return nil, err } @@ -7678,10 +7561,10 @@ func (ec *executionContext) field_Mutation_uninstallPlugin_args(ctx context.Cont func (ec *executionContext) field_Mutation_unlinkPropertyValue_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UnlinkPropertyValueInput + var arg0 gqlmodel.UnlinkPropertyValueInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUnlinkPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUnlinkPropertyValueInput(ctx, tmp) + arg0, err = ec.unmarshalNUnlinkPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUnlinkPropertyValueInput(ctx, tmp) if err != nil { return nil, err } @@ -7693,10 +7576,10 @@ func (ec *executionContext) field_Mutation_unlinkPropertyValue_args(ctx context. func (ec *executionContext) field_Mutation_updateDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UpdateDatasetSchemaInput + var arg0 gqlmodel.UpdateDatasetSchemaInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdateDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaInput(ctx, tmp) + arg0, err = ec.unmarshalNUpdateDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateDatasetSchemaInput(ctx, tmp) if err != nil { return nil, err } @@ -7708,10 +7591,10 @@ func (ec *executionContext) field_Mutation_updateDatasetSchema_args(ctx context. func (ec *executionContext) field_Mutation_updateLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UpdateLayerInput + var arg0 gqlmodel.UpdateLayerInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdateLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerInput(ctx, tmp) + arg0, err = ec.unmarshalNUpdateLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateLayerInput(ctx, tmp) if err != nil { return nil, err } @@ -7723,10 +7606,10 @@ func (ec *executionContext) field_Mutation_updateLayer_args(ctx context.Context, func (ec *executionContext) field_Mutation_updateMe_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UpdateMeInput + var arg0 gqlmodel.UpdateMeInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdateMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMeInput(ctx, tmp) + arg0, err = ec.unmarshalNUpdateMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMeInput(ctx, tmp) if err != nil { return nil, err } @@ -7738,10 +7621,10 @@ func (ec *executionContext) field_Mutation_updateMe_args(ctx context.Context, ra func (ec *executionContext) field_Mutation_updateMemberOfTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UpdateMemberOfTeamInput + var arg0 gqlmodel.UpdateMemberOfTeamInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdateMemberOfTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamInput(ctx, tmp) + arg0, err = ec.unmarshalNUpdateMemberOfTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMemberOfTeamInput(ctx, tmp) if err != nil { return nil, err } @@ -7753,10 +7636,10 @@ func (ec *executionContext) field_Mutation_updateMemberOfTeam_args(ctx context.C func (ec *executionContext) field_Mutation_updateProject_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UpdateProjectInput + var arg0 gqlmodel.UpdateProjectInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateProjectInput(ctx, tmp) + arg0, err = ec.unmarshalNUpdateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateProjectInput(ctx, tmp) if err != nil { return nil, err } @@ -7768,70 +7651,10 @@ func (ec *executionContext) field_Mutation_updateProject_args(ctx context.Contex func (ec *executionContext) field_Mutation_updatePropertyItems_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UpdatePropertyItemInput - if tmp, ok := rawArgs["input"]; ok { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdatePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyItemInput(ctx, tmp) - if err != nil { - return nil, err - } - } - args["input"] = arg0 - return args, nil -} - -func (ec *executionContext) field_Mutation_updatePropertyValueCamera_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { - var err error - args := map[string]interface{}{} - var arg0 graphql1.UpdatePropertyValueCameraInput - if tmp, ok := rawArgs["input"]; ok { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdatePropertyValueCameraInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueCameraInput(ctx, tmp) - if err != nil { - return nil, err - } - } - args["input"] = arg0 - return args, nil -} - -func (ec *executionContext) field_Mutation_updatePropertyValueLatLngHeight_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { - var err error - args := map[string]interface{}{} - var arg0 graphql1.UpdatePropertyValueLatLngHeightInput - if tmp, ok := rawArgs["input"]; ok { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdatePropertyValueLatLngHeightInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueLatLngHeightInput(ctx, tmp) - if err != nil { - return nil, err - } - } - args["input"] = arg0 - return args, nil -} - -func (ec *executionContext) field_Mutation_updatePropertyValueLatLng_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { - var err error - args := map[string]interface{}{} - var arg0 graphql1.UpdatePropertyValueLatLngInput + var arg0 gqlmodel.UpdatePropertyItemInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdatePropertyValueLatLngInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueLatLngInput(ctx, tmp) - if err != nil { - return nil, err - } - } - args["input"] = arg0 - return args, nil -} - -func (ec *executionContext) field_Mutation_updatePropertyValueTypography_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { - var err error - args := map[string]interface{}{} - var arg0 graphql1.UpdatePropertyValueTypographyInput - if tmp, ok := rawArgs["input"]; ok { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdatePropertyValueTypographyInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueTypographyInput(ctx, tmp) + arg0, err = ec.unmarshalNUpdatePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemInput(ctx, tmp) if err != nil { return nil, err } @@ -7843,10 +7666,10 @@ func (ec *executionContext) field_Mutation_updatePropertyValueTypography_args(ct func (ec *executionContext) field_Mutation_updatePropertyValue_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UpdatePropertyValueInput + var arg0 gqlmodel.UpdatePropertyValueInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdatePropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueInput(ctx, tmp) + arg0, err = ec.unmarshalNUpdatePropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyValueInput(ctx, tmp) if err != nil { return nil, err } @@ -7858,10 +7681,10 @@ func (ec *executionContext) field_Mutation_updatePropertyValue_args(ctx context. func (ec *executionContext) field_Mutation_updateTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UpdateTeamInput + var arg0 gqlmodel.UpdateTeamInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamInput(ctx, tmp) + arg0, err = ec.unmarshalNUpdateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTeamInput(ctx, tmp) if err != nil { return nil, err } @@ -7873,10 +7696,10 @@ func (ec *executionContext) field_Mutation_updateTeam_args(ctx context.Context, func (ec *executionContext) field_Mutation_updateWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UpdateWidgetInput + var arg0 gqlmodel.UpdateWidgetInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpdateWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetInput(ctx, tmp) + arg0, err = ec.unmarshalNUpdateWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetInput(ctx, tmp) if err != nil { return nil, err } @@ -7888,10 +7711,10 @@ func (ec *executionContext) field_Mutation_updateWidget_args(ctx context.Context func (ec *executionContext) field_Mutation_upgradePlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UpgradePluginInput + var arg0 gqlmodel.UpgradePluginInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUpgradePluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginInput(ctx, tmp) + arg0, err = ec.unmarshalNUpgradePluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpgradePluginInput(ctx, tmp) if err != nil { return nil, err } @@ -7903,10 +7726,10 @@ func (ec *executionContext) field_Mutation_upgradePlugin_args(ctx context.Contex func (ec *executionContext) field_Mutation_uploadFileToProperty_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UploadFileToPropertyInput + var arg0 gqlmodel.UploadFileToPropertyInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUploadFileToPropertyInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadFileToPropertyInput(ctx, tmp) + arg0, err = ec.unmarshalNUploadFileToPropertyInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadFileToPropertyInput(ctx, tmp) if err != nil { return nil, err } @@ -7918,10 +7741,10 @@ func (ec *executionContext) field_Mutation_uploadFileToProperty_args(ctx context func (ec *executionContext) field_Mutation_uploadPlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 graphql1.UploadPluginInput + var arg0 gqlmodel.UploadPluginInput if tmp, ok := rawArgs["input"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) - arg0, err = ec.unmarshalNUploadPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginInput(ctx, tmp) + arg0, err = ec.unmarshalNUploadPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadPluginInput(ctx, tmp) if err != nil { return nil, err } @@ -8335,10 +8158,10 @@ func (ec *executionContext) field_Query_node_args(ctx context.Context, rawArgs m } } args["id"] = arg0 - var arg1 graphql1.NodeType + var arg1 gqlmodel.NodeType if tmp, ok := rawArgs["type"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("type")) - arg1, err = ec.unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNodeType(ctx, tmp) + arg1, err = ec.unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNodeType(ctx, tmp) if err != nil { return nil, err } @@ -8359,10 +8182,10 @@ func (ec *executionContext) field_Query_nodes_args(ctx context.Context, rawArgs } } args["id"] = arg0 - var arg1 graphql1.NodeType + var arg1 gqlmodel.NodeType if tmp, ok := rawArgs["type"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("type")) - arg1, err = ec.unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNodeType(ctx, tmp) + arg1, err = ec.unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNodeType(ctx, tmp) if err != nil { return nil, err } @@ -8709,7 +8532,7 @@ func (ec *executionContext) field___Type_fields_args(ctx context.Context, rawArg // region **************************** field.gotpl ***************************** -func (ec *executionContext) _AddDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddDatasetSchemaPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDatasetSchemaPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8736,12 +8559,12 @@ func (ec *executionContext) _AddDatasetSchemaPayload_datasetSchema(ctx context.C if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchema) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _AddDynamicDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddDynamicDatasetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddDynamicDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8768,12 +8591,12 @@ func (ec *executionContext) _AddDynamicDatasetPayload_datasetSchema(ctx context. if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchema) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _AddDynamicDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddDynamicDatasetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddDynamicDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8800,12 +8623,12 @@ func (ec *executionContext) _AddDynamicDatasetPayload_dataset(ctx context.Contex if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Dataset) + res := resTmp.(*gqlmodel.Dataset) fc.Result = res - return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } -func (ec *executionContext) _AddDynamicDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddDynamicDatasetSchemaPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddDynamicDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetSchemaPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8832,12 +8655,12 @@ func (ec *executionContext) _AddDynamicDatasetSchemaPayload_datasetSchema(ctx co if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchema) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _AddInfoboxFieldPayload_infoboxField(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddInfoboxFieldPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddInfoboxFieldPayload_infoboxField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddInfoboxFieldPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8867,12 +8690,12 @@ func (ec *executionContext) _AddInfoboxFieldPayload_infoboxField(ctx context.Con } return graphql.Null } - res := resTmp.(*graphql1.InfoboxField) + res := resTmp.(*gqlmodel.InfoboxField) fc.Result = res - return ec.marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfoboxField(ctx, field.Selections, res) + return ec.marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxField(ctx, field.Selections, res) } -func (ec *executionContext) _AddInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddInfoboxFieldPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddInfoboxFieldPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8902,12 +8725,12 @@ func (ec *executionContext) _AddInfoboxFieldPayload_layer(ctx context.Context, f } return graphql.Null } - res := resTmp.(graphql1.Layer) + res := resTmp.(gqlmodel.Layer) fc.Result = res - return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _AddLayerGroupPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddLayerGroupPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddLayerGroupPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8937,12 +8760,12 @@ func (ec *executionContext) _AddLayerGroupPayload_layer(ctx context.Context, fie } return graphql.Null } - res := resTmp.(*graphql1.LayerGroup) + res := resTmp.(*gqlmodel.LayerGroup) fc.Result = res - return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _AddLayerGroupPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddLayerGroupPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddLayerGroupPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -8972,12 +8795,12 @@ func (ec *executionContext) _AddLayerGroupPayload_parentLayer(ctx context.Contex } return graphql.Null } - res := resTmp.(*graphql1.LayerGroup) + res := resTmp.(*gqlmodel.LayerGroup) fc.Result = res - return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _AddLayerGroupPayload_index(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddLayerGroupPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddLayerGroupPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9009,7 +8832,7 @@ func (ec *executionContext) _AddLayerGroupPayload_index(ctx context.Context, fie return ec.marshalOInt2แš–int(ctx, field.Selections, res) } -func (ec *executionContext) _AddLayerItemPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddLayerItemPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddLayerItemPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9039,12 +8862,12 @@ func (ec *executionContext) _AddLayerItemPayload_layer(ctx context.Context, fiel } return graphql.Null } - res := resTmp.(*graphql1.LayerItem) + res := resTmp.(*gqlmodel.LayerItem) fc.Result = res - return ec.marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerItem(ctx, field.Selections, res) + return ec.marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx, field.Selections, res) } -func (ec *executionContext) _AddLayerItemPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddLayerItemPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddLayerItemPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9074,12 +8897,12 @@ func (ec *executionContext) _AddLayerItemPayload_parentLayer(ctx context.Context } return graphql.Null } - res := resTmp.(*graphql1.LayerGroup) + res := resTmp.(*gqlmodel.LayerGroup) fc.Result = res - return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _AddLayerItemPayload_index(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddLayerItemPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddLayerItemPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9111,7 +8934,7 @@ func (ec *executionContext) _AddLayerItemPayload_index(ctx context.Context, fiel return ec.marshalOInt2แš–int(ctx, field.Selections, res) } -func (ec *executionContext) _AddMemberToTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddMemberToTeamPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddMemberToTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddMemberToTeamPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9141,12 +8964,12 @@ func (ec *executionContext) _AddMemberToTeamPayload_team(ctx context.Context, fi } return graphql.Null } - res := resTmp.(*graphql1.Team) + res := resTmp.(*gqlmodel.Team) fc.Result = res - return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } -func (ec *executionContext) _AddWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddWidgetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddWidgetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9176,12 +8999,12 @@ func (ec *executionContext) _AddWidgetPayload_scene(ctx context.Context, field g } return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _AddWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *graphql1.AddWidgetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddWidgetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9211,12 +9034,12 @@ func (ec *executionContext) _AddWidgetPayload_sceneWidget(ctx context.Context, f } return graphql.Null } - res := resTmp.(*graphql1.SceneWidget) + res := resTmp.(*gqlmodel.SceneWidget) fc.Result = res - return ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidget(ctx, field.Selections, res) + return ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { +func (ec *executionContext) _Asset_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9251,7 +9074,7 @@ func (ec *executionContext) _Asset_id(ctx context.Context, field graphql.Collect return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_createdAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { +func (ec *executionContext) _Asset_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9286,7 +9109,7 @@ func (ec *executionContext) _Asset_createdAt(ctx context.Context, field graphql. return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_teamId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { +func (ec *executionContext) _Asset_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9321,7 +9144,7 @@ func (ec *executionContext) _Asset_teamId(ctx context.Context, field graphql.Col return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { +func (ec *executionContext) _Asset_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9356,7 +9179,7 @@ func (ec *executionContext) _Asset_name(ctx context.Context, field graphql.Colle return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_size(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { +func (ec *executionContext) _Asset_size(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9391,7 +9214,7 @@ func (ec *executionContext) _Asset_size(ctx context.Context, field graphql.Colle return ec.marshalNFileSize2int64(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_url(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { +func (ec *executionContext) _Asset_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9426,7 +9249,7 @@ func (ec *executionContext) _Asset_url(ctx context.Context, field graphql.Collec return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_contentType(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { +func (ec *executionContext) _Asset_contentType(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9461,7 +9284,7 @@ func (ec *executionContext) _Asset_contentType(ctx context.Context, field graphq return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.Asset) (ret graphql.Marshaler) { +func (ec *executionContext) _Asset_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9488,12 +9311,12 @@ func (ec *executionContext) _Asset_team(ctx context.Context, field graphql.Colle if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Team) + res := resTmp.(*gqlmodel.Team) fc.Result = res - return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) + return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } -func (ec *executionContext) _AssetConnection_edges(ctx context.Context, field graphql.CollectedField, obj *graphql1.AssetConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _AssetConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9523,12 +9346,12 @@ func (ec *executionContext) _AssetConnection_edges(ctx context.Context, field gr } return graphql.Null } - res := resTmp.([]*graphql1.AssetEdge) + res := resTmp.([]*gqlmodel.AssetEdge) fc.Result = res - return ec.marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetEdgeแš„(ctx, field.Selections, res) + return ec.marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetEdgeแš„(ctx, field.Selections, res) } -func (ec *executionContext) _AssetConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *graphql1.AssetConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _AssetConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9558,12 +9381,12 @@ func (ec *executionContext) _AssetConnection_nodes(ctx context.Context, field gr } return graphql.Null } - res := resTmp.([]*graphql1.Asset) + res := resTmp.([]*gqlmodel.Asset) fc.Result = res - return ec.marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx, field.Selections, res) + return ec.marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx, field.Selections, res) } -func (ec *executionContext) _AssetConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *graphql1.AssetConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _AssetConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9593,12 +9416,12 @@ func (ec *executionContext) _AssetConnection_pageInfo(ctx context.Context, field } return graphql.Null } - res := resTmp.(*graphql1.PageInfo) + res := resTmp.(*gqlmodel.PageInfo) fc.Result = res - return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPageInfo(ctx, field.Selections, res) + return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx, field.Selections, res) } -func (ec *executionContext) _AssetConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *graphql1.AssetConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _AssetConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9633,7 +9456,7 @@ func (ec *executionContext) _AssetConnection_totalCount(ctx context.Context, fie return ec.marshalNInt2int(ctx, field.Selections, res) } -func (ec *executionContext) _AssetEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.AssetEdge) (ret graphql.Marshaler) { +func (ec *executionContext) _AssetEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetEdge) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9668,7 +9491,7 @@ func (ec *executionContext) _AssetEdge_cursor(ctx context.Context, field graphql return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) } -func (ec *executionContext) _AssetEdge_node(ctx context.Context, field graphql.CollectedField, obj *graphql1.AssetEdge) (ret graphql.Marshaler) { +func (ec *executionContext) _AssetEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetEdge) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9695,12 +9518,12 @@ func (ec *executionContext) _AssetEdge_node(ctx context.Context, field graphql.C if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Asset) + res := resTmp.(*gqlmodel.Asset) fc.Result = res - return ec.marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx, field.Selections, res) + return ec.marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_lat(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { +func (ec *executionContext) _Camera_lat(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9735,7 +9558,7 @@ func (ec *executionContext) _Camera_lat(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_lng(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { +func (ec *executionContext) _Camera_lng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9770,7 +9593,7 @@ func (ec *executionContext) _Camera_lng(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_altitude(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { +func (ec *executionContext) _Camera_altitude(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9805,7 +9628,7 @@ func (ec *executionContext) _Camera_altitude(ctx context.Context, field graphql. return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_heading(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { +func (ec *executionContext) _Camera_heading(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9840,7 +9663,7 @@ func (ec *executionContext) _Camera_heading(ctx context.Context, field graphql.C return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_pitch(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { +func (ec *executionContext) _Camera_pitch(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9875,7 +9698,7 @@ func (ec *executionContext) _Camera_pitch(ctx context.Context, field graphql.Col return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_roll(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { +func (ec *executionContext) _Camera_roll(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9910,7 +9733,7 @@ func (ec *executionContext) _Camera_roll(ctx context.Context, field graphql.Coll return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_fov(ctx context.Context, field graphql.CollectedField, obj *graphql1.Camera) (ret graphql.Marshaler) { +func (ec *executionContext) _Camera_fov(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9945,7 +9768,7 @@ func (ec *executionContext) _Camera_fov(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _CheckProjectAliasPayload_alias(ctx context.Context, field graphql.CollectedField, obj *graphql1.CheckProjectAliasPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _CheckProjectAliasPayload_alias(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CheckProjectAliasPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9980,7 +9803,7 @@ func (ec *executionContext) _CheckProjectAliasPayload_alias(ctx context.Context, return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _CheckProjectAliasPayload_available(ctx context.Context, field graphql.CollectedField, obj *graphql1.CheckProjectAliasPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _CheckProjectAliasPayload_available(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CheckProjectAliasPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10015,7 +9838,7 @@ func (ec *executionContext) _CheckProjectAliasPayload_available(ctx context.Cont return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _CreateAssetPayload_asset(ctx context.Context, field graphql.CollectedField, obj *graphql1.CreateAssetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _CreateAssetPayload_asset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateAssetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10045,12 +9868,12 @@ func (ec *executionContext) _CreateAssetPayload_asset(ctx context.Context, field } return graphql.Null } - res := resTmp.(*graphql1.Asset) + res := resTmp.(*gqlmodel.Asset) fc.Result = res - return ec.marshalNAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx, field.Selections, res) + return ec.marshalNAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx, field.Selections, res) } -func (ec *executionContext) _CreateInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.CreateInfoboxPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _CreateInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateInfoboxPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10080,12 +9903,12 @@ func (ec *executionContext) _CreateInfoboxPayload_layer(ctx context.Context, fie } return graphql.Null } - res := resTmp.(graphql1.Layer) + res := resTmp.(gqlmodel.Layer) fc.Result = res - return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _CreateScenePayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.CreateScenePayload) (ret graphql.Marshaler) { +func (ec *executionContext) _CreateScenePayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateScenePayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10115,12 +9938,12 @@ func (ec *executionContext) _CreateScenePayload_scene(ctx context.Context, field } return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _CreateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.CreateTeamPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _CreateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTeamPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10150,12 +9973,12 @@ func (ec *executionContext) _CreateTeamPayload_team(ctx context.Context, field g } return graphql.Null } - res := resTmp.(*graphql1.Team) + res := resTmp.(*gqlmodel.Team) fc.Result = res - return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } -func (ec *executionContext) _Dataset_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Dataset) (ret graphql.Marshaler) { +func (ec *executionContext) _Dataset_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10190,7 +10013,7 @@ func (ec *executionContext) _Dataset_id(ctx context.Context, field graphql.Colle return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Dataset_source(ctx context.Context, field graphql.CollectedField, obj *graphql1.Dataset) (ret graphql.Marshaler) { +func (ec *executionContext) _Dataset_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10225,7 +10048,7 @@ func (ec *executionContext) _Dataset_source(ctx context.Context, field graphql.C return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Dataset_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Dataset) (ret graphql.Marshaler) { +func (ec *executionContext) _Dataset_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10260,7 +10083,7 @@ func (ec *executionContext) _Dataset_schemaId(ctx context.Context, field graphql return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Dataset_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.Dataset) (ret graphql.Marshaler) { +func (ec *executionContext) _Dataset_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10290,12 +10113,12 @@ func (ec *executionContext) _Dataset_fields(ctx context.Context, field graphql.C } return graphql.Null } - res := resTmp.([]*graphql1.DatasetField) + res := resTmp.([]*gqlmodel.DatasetField) fc.Result = res - return ec.marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetFieldแš„(ctx, field.Selections, res) + return ec.marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetFieldแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Dataset_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.Dataset) (ret graphql.Marshaler) { +func (ec *executionContext) _Dataset_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10322,12 +10145,12 @@ func (ec *executionContext) _Dataset_schema(ctx context.Context, field graphql.C if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchema) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _Dataset_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.Dataset) (ret graphql.Marshaler) { +func (ec *executionContext) _Dataset_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10359,7 +10182,7 @@ func (ec *executionContext) _Dataset_name(ctx context.Context, field graphql.Col return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetConnection_edges(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10389,12 +10212,12 @@ func (ec *executionContext) _DatasetConnection_edges(ctx context.Context, field } return graphql.Null } - res := resTmp.([]*graphql1.DatasetEdge) + res := resTmp.([]*gqlmodel.DatasetEdge) fc.Result = res - return ec.marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetEdgeแš„(ctx, field.Selections, res) + return ec.marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetEdgeแš„(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10424,12 +10247,12 @@ func (ec *executionContext) _DatasetConnection_nodes(ctx context.Context, field } return graphql.Null } - res := resTmp.([]*graphql1.Dataset) + res := resTmp.([]*gqlmodel.Dataset) fc.Result = res - return ec.marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) + return ec.marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10459,12 +10282,12 @@ func (ec *executionContext) _DatasetConnection_pageInfo(ctx context.Context, fie } return graphql.Null } - res := resTmp.(*graphql1.PageInfo) + res := resTmp.(*gqlmodel.PageInfo) fc.Result = res - return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPageInfo(ctx, field.Selections, res) + return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10499,7 +10322,7 @@ func (ec *executionContext) _DatasetConnection_totalCount(ctx context.Context, f return ec.marshalNInt2int(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetEdge) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetEdge) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10534,7 +10357,7 @@ func (ec *executionContext) _DatasetEdge_cursor(ctx context.Context, field graph return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetEdge_node(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetEdge) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetEdge) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10561,12 +10384,12 @@ func (ec *executionContext) _DatasetEdge_node(ctx context.Context, field graphql if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Dataset) + res := resTmp.(*gqlmodel.Dataset) fc.Result = res - return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetField_fieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10601,7 +10424,7 @@ func (ec *executionContext) _DatasetField_fieldId(ctx context.Context, field gra return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetField_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10636,7 +10459,7 @@ func (ec *executionContext) _DatasetField_schemaId(ctx context.Context, field gr return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetField_source(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetField_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10671,7 +10494,7 @@ func (ec *executionContext) _DatasetField_source(ctx context.Context, field grap return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetField_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10701,12 +10524,12 @@ func (ec *executionContext) _DatasetField_type(ctx context.Context, field graphq } return graphql.Null } - res := resTmp.(graphql1.ValueType) + res := resTmp.(gqlmodel.ValueType) fc.Result = res - return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, field.Selections, res) + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetField_value(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10738,7 +10561,7 @@ func (ec *executionContext) _DatasetField_value(ctx context.Context, field graph return ec.marshalOAny2interface(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetField_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10765,12 +10588,12 @@ func (ec *executionContext) _DatasetField_schema(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchema) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetField_field(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetField_field(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10797,12 +10620,12 @@ func (ec *executionContext) _DatasetField_field(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchemaField) + res := resTmp.(*gqlmodel.DatasetSchemaField) fc.Result = res - return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaField(ctx, field.Selections, res) + return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetField_valueRef(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetField_valueRef(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10829,12 +10652,12 @@ func (ec *executionContext) _DatasetField_valueRef(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Dataset) + res := resTmp.(*gqlmodel.Dataset) fc.Result = res - return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchema_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10869,7 +10692,7 @@ func (ec *executionContext) _DatasetSchema_id(ctx context.Context, field graphql return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_source(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchema_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10904,7 +10727,7 @@ func (ec *executionContext) _DatasetSchema_source(ctx context.Context, field gra return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchema_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10939,7 +10762,7 @@ func (ec *executionContext) _DatasetSchema_name(ctx context.Context, field graph return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchema_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -10974,7 +10797,7 @@ func (ec *executionContext) _DatasetSchema_sceneId(ctx context.Context, field gr return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchema_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11004,12 +10827,12 @@ func (ec *executionContext) _DatasetSchema_fields(ctx context.Context, field gra } return graphql.Null } - res := resTmp.([]*graphql1.DatasetSchemaField) + res := resTmp.([]*gqlmodel.DatasetSchemaField) fc.Result = res - return ec.marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaFieldแš„(ctx, field.Selections, res) + return ec.marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaFieldแš„(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchema_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11041,7 +10864,7 @@ func (ec *executionContext) _DatasetSchema_representativeFieldId(ctx context.Con return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_dynamic(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchema_dynamic(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11073,7 +10896,7 @@ func (ec *executionContext) _DatasetSchema_dynamic(ctx context.Context, field gr return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_datasets(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchema_datasets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11110,12 +10933,12 @@ func (ec *executionContext) _DatasetSchema_datasets(ctx context.Context, field g } return graphql.Null } - res := resTmp.(*graphql1.DatasetConnection) + res := resTmp.(*gqlmodel.DatasetConnection) fc.Result = res - return ec.marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetConnection(ctx, field.Selections, res) + return ec.marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetConnection(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchema_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11142,12 +10965,12 @@ func (ec *executionContext) _DatasetSchema_scene(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_representativeField(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchema) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchema_representativeField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11174,12 +10997,12 @@ func (ec *executionContext) _DatasetSchema_representativeField(ctx context.Conte if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchemaField) + res := resTmp.(*gqlmodel.DatasetSchemaField) fc.Result = res - return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaField(ctx, field.Selections, res) + return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaConnection_edges(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11209,12 +11032,12 @@ func (ec *executionContext) _DatasetSchemaConnection_edges(ctx context.Context, } return graphql.Null } - res := resTmp.([]*graphql1.DatasetSchemaEdge) + res := resTmp.([]*gqlmodel.DatasetSchemaEdge) fc.Result = res - return ec.marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaEdgeแš„(ctx, field.Selections, res) + return ec.marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaEdgeแš„(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11244,12 +11067,12 @@ func (ec *executionContext) _DatasetSchemaConnection_nodes(ctx context.Context, } return graphql.Null } - res := resTmp.([]*graphql1.DatasetSchema) + res := resTmp.([]*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11279,12 +11102,12 @@ func (ec *executionContext) _DatasetSchemaConnection_pageInfo(ctx context.Contex } return graphql.Null } - res := resTmp.(*graphql1.PageInfo) + res := resTmp.(*gqlmodel.PageInfo) fc.Result = res - return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPageInfo(ctx, field.Selections, res) + return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11319,7 +11142,7 @@ func (ec *executionContext) _DatasetSchemaConnection_totalCount(ctx context.Cont return ec.marshalNInt2int(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaEdge) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaEdge) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11354,7 +11177,7 @@ func (ec *executionContext) _DatasetSchemaEdge_cursor(ctx context.Context, field return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaEdge_node(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaEdge) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaEdge) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11381,12 +11204,12 @@ func (ec *executionContext) _DatasetSchemaEdge_node(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchema) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaField_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11421,7 +11244,7 @@ func (ec *executionContext) _DatasetSchemaField_id(ctx context.Context, field gr return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_source(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaField_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11456,7 +11279,7 @@ func (ec *executionContext) _DatasetSchemaField_source(ctx context.Context, fiel return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaField_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11491,7 +11314,7 @@ func (ec *executionContext) _DatasetSchemaField_name(ctx context.Context, field return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11521,12 +11344,12 @@ func (ec *executionContext) _DatasetSchemaField_type(ctx context.Context, field } return graphql.Null } - res := resTmp.(graphql1.ValueType) + res := resTmp.(gqlmodel.ValueType) fc.Result = res - return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, field.Selections, res) + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11561,7 +11384,7 @@ func (ec *executionContext) _DatasetSchemaField_schemaId(ctx context.Context, fi return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_refId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaField_refId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11593,7 +11416,7 @@ func (ec *executionContext) _DatasetSchemaField_refId(ctx context.Context, field return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11620,12 +11443,12 @@ func (ec *executionContext) _DatasetSchemaField_schema(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchema) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_ref(ctx context.Context, field graphql.CollectedField, obj *graphql1.DatasetSchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _DatasetSchemaField_ref(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11652,12 +11475,12 @@ func (ec *executionContext) _DatasetSchemaField_ref(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchema) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _DeleteMePayload_userId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DeleteMePayload) (ret graphql.Marshaler) { +func (ec *executionContext) _DeleteMePayload_userId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DeleteMePayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11692,7 +11515,7 @@ func (ec *executionContext) _DeleteMePayload_userId(ctx context.Context, field g return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DeleteProjectPayload_projectId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DeleteProjectPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _DeleteProjectPayload_projectId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DeleteProjectPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11727,7 +11550,7 @@ func (ec *executionContext) _DeleteProjectPayload_projectId(ctx context.Context, return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DeleteTeamPayload_teamId(ctx context.Context, field graphql.CollectedField, obj *graphql1.DeleteTeamPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _DeleteTeamPayload_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DeleteTeamPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11762,7 +11585,7 @@ func (ec *executionContext) _DeleteTeamPayload_teamId(ctx context.Context, field return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _ImportDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.ImportDatasetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _ImportDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ImportDatasetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11792,12 +11615,12 @@ func (ec *executionContext) _ImportDatasetPayload_datasetSchema(ctx context.Cont } return graphql.Null } - res := resTmp.(*graphql1.DatasetSchema) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _ImportLayerPayload_layers(ctx context.Context, field graphql.CollectedField, obj *graphql1.ImportLayerPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _ImportLayerPayload_layers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ImportLayerPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11827,12 +11650,12 @@ func (ec *executionContext) _ImportLayerPayload_layers(ctx context.Context, fiel } return graphql.Null } - res := resTmp.([]graphql1.Layer) + res := resTmp.([]gqlmodel.Layer) fc.Result = res - return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerแš„(ctx, field.Selections, res) + return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx, field.Selections, res) } -func (ec *executionContext) _ImportLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.ImportLayerPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _ImportLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ImportLayerPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11862,12 +11685,12 @@ func (ec *executionContext) _ImportLayerPayload_parentLayer(ctx context.Context, } return graphql.Null } - res := resTmp.(*graphql1.LayerGroup) + res := resTmp.(*gqlmodel.LayerGroup) fc.Result = res - return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Infobox_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11902,7 +11725,7 @@ func (ec *executionContext) _Infobox_sceneId(ctx context.Context, field graphql. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_layerId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Infobox_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11937,7 +11760,7 @@ func (ec *executionContext) _Infobox_layerId(ctx context.Context, field graphql. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Infobox_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11972,7 +11795,7 @@ func (ec *executionContext) _Infobox_propertyId(ctx context.Context, field graph return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Infobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12002,12 +11825,12 @@ func (ec *executionContext) _Infobox_fields(ctx context.Context, field graphql.C } return graphql.Null } - res := resTmp.([]*graphql1.InfoboxField) + res := resTmp.([]*gqlmodel.InfoboxField) fc.Result = res - return ec.marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfoboxFieldแš„(ctx, field.Selections, res) + return ec.marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxFieldแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Infobox_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12039,7 +11862,7 @@ func (ec *executionContext) _Infobox_linkedDatasetId(ctx context.Context, field return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Infobox_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12069,12 +11892,12 @@ func (ec *executionContext) _Infobox_layer(ctx context.Context, field graphql.Co } return graphql.Null } - res := resTmp.(graphql1.Layer) + res := resTmp.(gqlmodel.Layer) fc.Result = res - return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Infobox_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12101,12 +11924,12 @@ func (ec *executionContext) _Infobox_property(ctx context.Context, field graphql if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Infobox_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12133,12 +11956,12 @@ func (ec *executionContext) _Infobox_linkedDataset(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Dataset) + res := resTmp.(*gqlmodel.Dataset) fc.Result = res - return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_merged(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Infobox_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12165,12 +11988,12 @@ func (ec *executionContext) _Infobox_merged(ctx context.Context, field graphql.C if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.MergedInfobox) + res := resTmp.(*gqlmodel.MergedInfobox) fc.Result = res - return ec.marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfobox(ctx, field.Selections, res) + return ec.marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfobox(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.Infobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Infobox_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12197,12 +12020,12 @@ func (ec *executionContext) _Infobox_scene(ctx context.Context, field graphql.Co if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12237,7 +12060,7 @@ func (ec *executionContext) _InfoboxField_id(ctx context.Context, field graphql. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12272,7 +12095,7 @@ func (ec *executionContext) _InfoboxField_sceneId(ctx context.Context, field gra return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_layerId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12307,7 +12130,7 @@ func (ec *executionContext) _InfoboxField_layerId(ctx context.Context, field gra return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12342,7 +12165,7 @@ func (ec *executionContext) _InfoboxField_propertyId(ctx context.Context, field return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12377,7 +12200,7 @@ func (ec *executionContext) _InfoboxField_pluginId(ctx context.Context, field gr return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12412,7 +12235,7 @@ func (ec *executionContext) _InfoboxField_extensionId(ctx context.Context, field return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12444,7 +12267,7 @@ func (ec *executionContext) _InfoboxField_linkedDatasetId(ctx context.Context, f return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12474,12 +12297,12 @@ func (ec *executionContext) _InfoboxField_layer(ctx context.Context, field graph } return graphql.Null } - res := resTmp.(graphql1.Layer) + res := resTmp.(gqlmodel.Layer) fc.Result = res - return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_infobox(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12509,12 +12332,12 @@ func (ec *executionContext) _InfoboxField_infobox(ctx context.Context, field gra } return graphql.Null } - res := resTmp.(*graphql1.Infobox) + res := resTmp.(*gqlmodel.Infobox) fc.Result = res - return ec.marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx, field.Selections, res) + return ec.marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12541,12 +12364,12 @@ func (ec *executionContext) _InfoboxField_property(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12573,12 +12396,12 @@ func (ec *executionContext) _InfoboxField_plugin(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Plugin) + res := resTmp.(*gqlmodel.Plugin) fc.Result = res - return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12605,12 +12428,12 @@ func (ec *executionContext) _InfoboxField_extension(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PluginExtension) + res := resTmp.(*gqlmodel.PluginExtension) fc.Result = res - return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, field.Selections, res) + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12637,12 +12460,12 @@ func (ec *executionContext) _InfoboxField_linkedDataset(ctx context.Context, fie if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Dataset) + res := resTmp.(*gqlmodel.Dataset) fc.Result = res - return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_merged(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12669,12 +12492,12 @@ func (ec *executionContext) _InfoboxField_merged(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.MergedInfoboxField) + res := resTmp.(*gqlmodel.MergedInfoboxField) fc.Result = res - return ec.marshalOMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxField(ctx, field.Selections, res) + return ec.marshalOMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12701,12 +12524,12 @@ func (ec *executionContext) _InfoboxField_scene(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.InfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _InfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12733,12 +12556,12 @@ func (ec *executionContext) _InfoboxField_scenePlugin(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.ScenePlugin) + res := resTmp.(*gqlmodel.ScenePlugin) fc.Result = res - return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _InstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.InstallPluginPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _InstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InstallPluginPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12768,12 +12591,12 @@ func (ec *executionContext) _InstallPluginPayload_scene(ctx context.Context, fie } return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _InstallPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.InstallPluginPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _InstallPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InstallPluginPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12803,12 +12626,12 @@ func (ec *executionContext) _InstallPluginPayload_scenePlugin(ctx context.Contex } return graphql.Null } - res := resTmp.(*graphql1.ScenePlugin) + res := resTmp.(*gqlmodel.ScenePlugin) fc.Result = res - return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) + return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _LatLng_lat(ctx context.Context, field graphql.CollectedField, obj *graphql1.LatLng) (ret graphql.Marshaler) { +func (ec *executionContext) _LatLng_lat(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLng) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12843,7 +12666,7 @@ func (ec *executionContext) _LatLng_lat(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _LatLng_lng(ctx context.Context, field graphql.CollectedField, obj *graphql1.LatLng) (ret graphql.Marshaler) { +func (ec *executionContext) _LatLng_lng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLng) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12878,7 +12701,7 @@ func (ec *executionContext) _LatLng_lng(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _LatLngHeight_lat(ctx context.Context, field graphql.CollectedField, obj *graphql1.LatLngHeight) (ret graphql.Marshaler) { +func (ec *executionContext) _LatLngHeight_lat(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLngHeight) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12913,7 +12736,7 @@ func (ec *executionContext) _LatLngHeight_lat(ctx context.Context, field graphql return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _LatLngHeight_lng(ctx context.Context, field graphql.CollectedField, obj *graphql1.LatLngHeight) (ret graphql.Marshaler) { +func (ec *executionContext) _LatLngHeight_lng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLngHeight) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12948,7 +12771,7 @@ func (ec *executionContext) _LatLngHeight_lng(ctx context.Context, field graphql return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _LatLngHeight_height(ctx context.Context, field graphql.CollectedField, obj *graphql1.LatLngHeight) (ret graphql.Marshaler) { +func (ec *executionContext) _LatLngHeight_height(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLngHeight) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -12983,7 +12806,7 @@ func (ec *executionContext) _LatLngHeight_height(ctx context.Context, field grap return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13018,7 +12841,7 @@ func (ec *executionContext) _LayerGroup_id(ctx context.Context, field graphql.Co return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13053,7 +12876,7 @@ func (ec *executionContext) _LayerGroup_sceneId(ctx context.Context, field graph return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13088,7 +12911,7 @@ func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql. return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_isVisible(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_isVisible(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13123,7 +12946,7 @@ func (ec *executionContext) _LayerGroup_isVisible(ctx context.Context, field gra return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13155,7 +12978,7 @@ func (ec *executionContext) _LayerGroup_propertyId(ctx context.Context, field gr return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13187,7 +13010,7 @@ func (ec *executionContext) _LayerGroup_pluginId(ctx context.Context, field grap return ec.marshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13219,7 +13042,7 @@ func (ec *executionContext) _LayerGroup_extensionId(ctx context.Context, field g return ec.marshalOPluginExtensionID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_infobox(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13246,12 +13069,12 @@ func (ec *executionContext) _LayerGroup_infobox(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Infobox) + res := resTmp.(*gqlmodel.Infobox) fc.Result = res - return ec.marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx, field.Selections, res) + return ec.marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_parentId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13283,7 +13106,7 @@ func (ec *executionContext) _LayerGroup_parentId(ctx context.Context, field grap return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_linkedDatasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_linkedDatasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13315,7 +13138,7 @@ func (ec *executionContext) _LayerGroup_linkedDatasetSchemaId(ctx context.Contex return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_root(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_root(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13350,7 +13173,7 @@ func (ec *executionContext) _LayerGroup_root(ctx context.Context, field graphql. return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_layerIds(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_layerIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13385,7 +13208,7 @@ func (ec *executionContext) _LayerGroup_layerIds(ctx context.Context, field grap return ec.marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_parent(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13412,12 +13235,12 @@ func (ec *executionContext) _LayerGroup_parent(ctx context.Context, field graphq if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.LayerGroup) + res := resTmp.(*gqlmodel.LayerGroup) fc.Result = res - return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) + return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13444,12 +13267,12 @@ func (ec *executionContext) _LayerGroup_property(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13476,12 +13299,12 @@ func (ec *executionContext) _LayerGroup_plugin(ctx context.Context, field graphq if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Plugin) + res := resTmp.(*gqlmodel.Plugin) fc.Result = res - return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_extension(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13508,12 +13331,12 @@ func (ec *executionContext) _LayerGroup_extension(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PluginExtension) + res := resTmp.(*gqlmodel.PluginExtension) fc.Result = res - return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, field.Selections, res) + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13540,12 +13363,12 @@ func (ec *executionContext) _LayerGroup_linkedDatasetSchema(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchema) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_layers(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_layers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13575,12 +13398,12 @@ func (ec *executionContext) _LayerGroup_layers(ctx context.Context, field graphq } return graphql.Null } - res := resTmp.([]graphql1.Layer) + res := resTmp.([]gqlmodel.Layer) fc.Result = res - return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) + return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13607,12 +13430,12 @@ func (ec *executionContext) _LayerGroup_scene(ctx context.Context, field graphql if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerGroup_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13639,12 +13462,12 @@ func (ec *executionContext) _LayerGroup_scenePlugin(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.ScenePlugin) + res := resTmp.(*gqlmodel.ScenePlugin) fc.Result = res - return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13679,7 +13502,7 @@ func (ec *executionContext) _LayerItem_id(ctx context.Context, field graphql.Col return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13714,7 +13537,7 @@ func (ec *executionContext) _LayerItem_sceneId(ctx context.Context, field graphq return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13749,7 +13572,7 @@ func (ec *executionContext) _LayerItem_name(ctx context.Context, field graphql.C return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_isVisible(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_isVisible(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13784,7 +13607,7 @@ func (ec *executionContext) _LayerItem_isVisible(ctx context.Context, field grap return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13816,7 +13639,7 @@ func (ec *executionContext) _LayerItem_propertyId(ctx context.Context, field gra return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13848,7 +13671,7 @@ func (ec *executionContext) _LayerItem_pluginId(ctx context.Context, field graph return ec.marshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13880,7 +13703,7 @@ func (ec *executionContext) _LayerItem_extensionId(ctx context.Context, field gr return ec.marshalOPluginExtensionID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_infobox(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13907,12 +13730,12 @@ func (ec *executionContext) _LayerItem_infobox(ctx context.Context, field graphq if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Infobox) + res := resTmp.(*gqlmodel.Infobox) fc.Result = res - return ec.marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx, field.Selections, res) + return ec.marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_parentId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13944,7 +13767,7 @@ func (ec *executionContext) _LayerItem_parentId(ctx context.Context, field graph return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -13976,7 +13799,7 @@ func (ec *executionContext) _LayerItem_linkedDatasetId(ctx context.Context, fiel return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_parent(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14003,12 +13826,12 @@ func (ec *executionContext) _LayerItem_parent(ctx context.Context, field graphql if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.LayerGroup) + res := resTmp.(*gqlmodel.LayerGroup) fc.Result = res - return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) + return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14035,12 +13858,12 @@ func (ec *executionContext) _LayerItem_property(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14067,12 +13890,12 @@ func (ec *executionContext) _LayerItem_plugin(ctx context.Context, field graphql if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Plugin) + res := resTmp.(*gqlmodel.Plugin) fc.Result = res - return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_extension(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14099,12 +13922,12 @@ func (ec *executionContext) _LayerItem_extension(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PluginExtension) + res := resTmp.(*gqlmodel.PluginExtension) fc.Result = res - return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, field.Selections, res) + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14131,12 +13954,12 @@ func (ec *executionContext) _LayerItem_linkedDataset(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Dataset) + res := resTmp.(*gqlmodel.Dataset) fc.Result = res - return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_merged(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14163,12 +13986,12 @@ func (ec *executionContext) _LayerItem_merged(ctx context.Context, field graphql if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.MergedLayer) + res := resTmp.(*gqlmodel.MergedLayer) fc.Result = res - return ec.marshalOMergedLayer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedLayer(ctx, field.Selections, res) + return ec.marshalOMergedLayer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedLayer(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14195,12 +14018,12 @@ func (ec *executionContext) _LayerItem_scene(ctx context.Context, field graphql. if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14227,12 +14050,12 @@ func (ec *executionContext) _LayerItem_scenePlugin(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.ScenePlugin) + res := resTmp.(*gqlmodel.ScenePlugin) fc.Result = res - return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfobox) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14267,7 +14090,7 @@ func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field gr return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfobox) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14294,12 +14117,12 @@ func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.MergedProperty) + res := resTmp.(*gqlmodel.MergedProperty) fc.Result = res - return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedProperty(ctx, field.Selections, res) + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfobox) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14329,12 +14152,12 @@ func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field gra } return graphql.Null } - res := resTmp.([]*graphql1.MergedInfoboxField) + res := resTmp.([]*gqlmodel.MergedInfoboxField) fc.Result = res - return ec.marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxFieldแš„(ctx, field.Selections, res) + return ec.marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxFieldแš„(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfobox) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14361,12 +14184,12 @@ func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14401,7 +14224,7 @@ func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14436,7 +14259,7 @@ func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, fie return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14471,7 +14294,7 @@ func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, fi return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14506,7 +14329,7 @@ func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14533,12 +14356,12 @@ func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.MergedProperty) + res := resTmp.(*gqlmodel.MergedProperty) fc.Result = res - return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedProperty(ctx, field.Selections, res) + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14565,12 +14388,12 @@ func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Plugin) + res := resTmp.(*gqlmodel.Plugin) fc.Result = res - return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14597,12 +14420,12 @@ func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PluginExtension) + res := resTmp.(*gqlmodel.PluginExtension) fc.Result = res - return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, field.Selections, res) + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14629,12 +14452,12 @@ func (ec *executionContext) _MergedInfoboxField_scene(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14661,12 +14484,12 @@ func (ec *executionContext) _MergedInfoboxField_scenePlugin(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.ScenePlugin) + res := resTmp.(*gqlmodel.ScenePlugin) fc.Result = res - return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _MergedLayer_originalId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedLayer_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14701,7 +14524,7 @@ func (ec *executionContext) _MergedLayer_originalId(ctx context.Context, field g return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedLayer_parentId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedLayer_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14733,7 +14556,7 @@ func (ec *executionContext) _MergedLayer_parentId(ctx context.Context, field gra return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedLayer_sceneID(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedLayer_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14768,7 +14591,7 @@ func (ec *executionContext) _MergedLayer_sceneID(ctx context.Context, field grap return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedLayer_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedLayer_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14795,12 +14618,12 @@ func (ec *executionContext) _MergedLayer_property(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.MergedProperty) + res := resTmp.(*gqlmodel.MergedProperty) fc.Result = res - return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedProperty(ctx, field.Selections, res) + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) } -func (ec *executionContext) _MergedLayer_infobox(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedLayer_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14827,12 +14650,12 @@ func (ec *executionContext) _MergedLayer_infobox(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.MergedInfobox) + res := resTmp.(*gqlmodel.MergedInfobox) fc.Result = res - return ec.marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfobox(ctx, field.Selections, res) + return ec.marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfobox(ctx, field.Selections, res) } -func (ec *executionContext) _MergedLayer_original(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedLayer_original(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14859,12 +14682,12 @@ func (ec *executionContext) _MergedLayer_original(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.LayerItem) + res := resTmp.(*gqlmodel.LayerItem) fc.Result = res - return ec.marshalOLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerItem(ctx, field.Selections, res) + return ec.marshalOLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx, field.Selections, res) } -func (ec *executionContext) _MergedLayer_parent(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedLayer_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14891,12 +14714,12 @@ func (ec *executionContext) _MergedLayer_parent(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.LayerGroup) + res := resTmp.(*gqlmodel.LayerGroup) fc.Result = res - return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) + return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _MergedLayer_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedLayer) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedLayer_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14923,12 +14746,12 @@ func (ec *executionContext) _MergedLayer_scene(ctx context.Context, field graphq if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_originalId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedProperty_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14960,7 +14783,7 @@ func (ec *executionContext) _MergedProperty_originalId(ctx context.Context, fiel return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_parentId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedProperty_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14992,7 +14815,7 @@ func (ec *executionContext) _MergedProperty_parentId(ctx context.Context, field return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedProperty_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15024,7 +14847,7 @@ func (ec *executionContext) _MergedProperty_schemaId(ctx context.Context, field return ec.marshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedProperty_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15056,7 +14879,7 @@ func (ec *executionContext) _MergedProperty_linkedDatasetId(ctx context.Context, return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_original(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedProperty_original(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15083,12 +14906,12 @@ func (ec *executionContext) _MergedProperty_original(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_parent(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedProperty_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15115,12 +14938,12 @@ func (ec *executionContext) _MergedProperty_parent(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedProperty_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15147,12 +14970,12 @@ func (ec *executionContext) _MergedProperty_schema(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchema) + res := resTmp.(*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedProperty_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15179,12 +15002,12 @@ func (ec *executionContext) _MergedProperty_linkedDataset(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Dataset) + res := resTmp.(*gqlmodel.Dataset) fc.Result = res - return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_groups(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedProperty) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedProperty_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15214,12 +15037,12 @@ func (ec *executionContext) _MergedProperty_groups(ctx context.Context, field gr } return graphql.Null } - res := resTmp.([]*graphql1.MergedPropertyGroup) + res := resTmp.([]*gqlmodel.MergedPropertyGroup) fc.Result = res - return ec.marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyGroupแš„(ctx, field.Selections, res) + return ec.marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroupแš„(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15254,7 +15077,7 @@ func (ec *executionContext) _MergedPropertyField_schemaId(ctx context.Context, f return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15289,7 +15112,7 @@ func (ec *executionContext) _MergedPropertyField_fieldId(ctx context.Context, fi return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_value(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15321,7 +15144,7 @@ func (ec *executionContext) _MergedPropertyField_value(ctx context.Context, fiel return ec.marshalOAny2interface(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15351,12 +15174,12 @@ func (ec *executionContext) _MergedPropertyField_type(ctx context.Context, field } return graphql.Null } - res := resTmp.(graphql1.ValueType) + res := resTmp.(gqlmodel.ValueType) fc.Result = res - return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, field.Selections, res) + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_links(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyField_links(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15383,12 +15206,12 @@ func (ec *executionContext) _MergedPropertyField_links(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.([]*graphql1.PropertyFieldLink) + res := resTmp.([]*gqlmodel.PropertyFieldLink) fc.Result = res - return ec.marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldLinkแš„(ctx, field.Selections, res) + return ec.marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLinkแš„(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_overridden(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyField_overridden(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15423,7 +15246,7 @@ func (ec *executionContext) _MergedPropertyField_overridden(ctx context.Context, return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15450,12 +15273,12 @@ func (ec *executionContext) _MergedPropertyField_schema(ctx context.Context, fie if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchema) + res := resTmp.(*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_field(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyField_field(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15482,12 +15305,12 @@ func (ec *executionContext) _MergedPropertyField_field(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchemaField) + res := resTmp.(*gqlmodel.PropertySchemaField) fc.Result = res - return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx, field.Selections, res) + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_actualValue(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyField_actualValue(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15519,7 +15342,7 @@ func (ec *executionContext) _MergedPropertyField_actualValue(ctx context.Context return ec.marshalOAny2interface(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_originalPropertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_originalPropertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15551,7 +15374,7 @@ func (ec *executionContext) _MergedPropertyGroup_originalPropertyId(ctx context. return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_parentPropertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_parentPropertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15583,7 +15406,7 @@ func (ec *executionContext) _MergedPropertyGroup_parentPropertyId(ctx context.Co return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_originalId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15615,7 +15438,7 @@ func (ec *executionContext) _MergedPropertyGroup_originalId(ctx context.Context, return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_parentId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15647,7 +15470,7 @@ func (ec *executionContext) _MergedPropertyGroup_parentId(ctx context.Context, f return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15682,7 +15505,7 @@ func (ec *executionContext) _MergedPropertyGroup_schemaGroupId(ctx context.Conte return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15714,7 +15537,7 @@ func (ec *executionContext) _MergedPropertyGroup_schemaId(ctx context.Context, f return ec.marshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15746,7 +15569,7 @@ func (ec *executionContext) _MergedPropertyGroup_linkedDatasetId(ctx context.Con return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15776,12 +15599,12 @@ func (ec *executionContext) _MergedPropertyGroup_fields(ctx context.Context, fie } return graphql.Null } - res := resTmp.([]*graphql1.MergedPropertyField) + res := resTmp.([]*gqlmodel.MergedPropertyField) fc.Result = res - return ec.marshalNMergedPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyFieldแš„(ctx, field.Selections, res) + return ec.marshalNMergedPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyFieldแš„(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_groups(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15811,12 +15634,12 @@ func (ec *executionContext) _MergedPropertyGroup_groups(ctx context.Context, fie } return graphql.Null } - res := resTmp.([]*graphql1.MergedPropertyGroup) + res := resTmp.([]*gqlmodel.MergedPropertyGroup) fc.Result = res - return ec.marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyGroupแš„(ctx, field.Selections, res) + return ec.marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroupแš„(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_originalProperty(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_originalProperty(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15843,12 +15666,12 @@ func (ec *executionContext) _MergedPropertyGroup_originalProperty(ctx context.Co if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_parentProperty(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_parentProperty(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15875,12 +15698,12 @@ func (ec *executionContext) _MergedPropertyGroup_parentProperty(ctx context.Cont if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_original(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_original(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15907,12 +15730,12 @@ func (ec *executionContext) _MergedPropertyGroup_original(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyGroup) + res := resTmp.(*gqlmodel.PropertyGroup) fc.Result = res - return ec.marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroup(ctx, field.Selections, res) + return ec.marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_parent(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15939,12 +15762,12 @@ func (ec *executionContext) _MergedPropertyGroup_parent(ctx context.Context, fie if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyGroup) + res := resTmp.(*gqlmodel.PropertyGroup) fc.Result = res - return ec.marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroup(ctx, field.Selections, res) + return ec.marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15971,12 +15794,12 @@ func (ec *executionContext) _MergedPropertyGroup_schema(ctx context.Context, fie if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchema) + res := resTmp.(*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.MergedPropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedPropertyGroup_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16003,12 +15826,12 @@ func (ec *executionContext) _MergedPropertyGroup_linkedDataset(ctx context.Conte if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Dataset) + res := resTmp.(*gqlmodel.Dataset) fc.Result = res - return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } -func (ec *executionContext) _MoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _MoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16043,7 +15866,7 @@ func (ec *executionContext) _MoveInfoboxFieldPayload_infoboxFieldId(ctx context. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _MoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16073,12 +15896,12 @@ func (ec *executionContext) _MoveInfoboxFieldPayload_layer(ctx context.Context, } return graphql.Null } - res := resTmp.(graphql1.Layer) + res := resTmp.(gqlmodel.Layer) fc.Result = res - return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _MoveInfoboxFieldPayload_index(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _MoveInfoboxFieldPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16113,7 +15936,7 @@ func (ec *executionContext) _MoveInfoboxFieldPayload_index(ctx context.Context, return ec.marshalNInt2int(ctx, field.Selections, res) } -func (ec *executionContext) _MoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveLayerPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _MoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16148,7 +15971,7 @@ func (ec *executionContext) _MoveLayerPayload_layerId(ctx context.Context, field return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MoveLayerPayload_fromParentLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveLayerPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _MoveLayerPayload_fromParentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16178,12 +16001,12 @@ func (ec *executionContext) _MoveLayerPayload_fromParentLayer(ctx context.Contex } return graphql.Null } - res := resTmp.(*graphql1.LayerGroup) + res := resTmp.(*gqlmodel.LayerGroup) fc.Result = res - return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _MoveLayerPayload_toParentLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveLayerPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _MoveLayerPayload_toParentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16213,12 +16036,12 @@ func (ec *executionContext) _MoveLayerPayload_toParentLayer(ctx context.Context, } return graphql.Null } - res := resTmp.(*graphql1.LayerGroup) + res := resTmp.(*gqlmodel.LayerGroup) fc.Result = res - return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _MoveLayerPayload_index(ctx context.Context, field graphql.CollectedField, obj *graphql1.MoveLayerPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _MoveLayerPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16278,7 +16101,7 @@ func (ec *executionContext) _Mutation_createAsset(ctx context.Context, field gra fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().CreateAsset(rctx, args["input"].(graphql1.CreateAssetInput)) + return ec.resolvers.Mutation().CreateAsset(rctx, args["input"].(gqlmodel.CreateAssetInput)) }) if err != nil { ec.Error(ctx, err) @@ -16287,9 +16110,9 @@ func (ec *executionContext) _Mutation_createAsset(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.CreateAssetPayload) + res := resTmp.(*gqlmodel.CreateAssetPayload) fc.Result = res - return ec.marshalOCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetPayload(ctx, field.Selections, res) + return ec.marshalOCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateAssetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeAsset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16317,7 +16140,7 @@ func (ec *executionContext) _Mutation_removeAsset(ctx context.Context, field gra fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveAsset(rctx, args["input"].(graphql1.RemoveAssetInput)) + return ec.resolvers.Mutation().RemoveAsset(rctx, args["input"].(gqlmodel.RemoveAssetInput)) }) if err != nil { ec.Error(ctx, err) @@ -16326,9 +16149,9 @@ func (ec *executionContext) _Mutation_removeAsset(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.RemoveAssetPayload) + res := resTmp.(*gqlmodel.RemoveAssetPayload) fc.Result = res - return ec.marshalORemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetPayload(ctx, field.Selections, res) + return ec.marshalORemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveAssetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_signup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16356,7 +16179,7 @@ func (ec *executionContext) _Mutation_signup(ctx context.Context, field graphql. fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().Signup(rctx, args["input"].(graphql1.SignupInput)) + return ec.resolvers.Mutation().Signup(rctx, args["input"].(gqlmodel.SignupInput)) }) if err != nil { ec.Error(ctx, err) @@ -16365,9 +16188,9 @@ func (ec *executionContext) _Mutation_signup(ctx context.Context, field graphql. if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.SignupPayload) + res := resTmp.(*gqlmodel.SignupPayload) fc.Result = res - return ec.marshalOSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupPayload(ctx, field.Selections, res) + return ec.marshalOSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSignupPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateMe(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16395,7 +16218,7 @@ func (ec *executionContext) _Mutation_updateMe(ctx context.Context, field graphq fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateMe(rctx, args["input"].(graphql1.UpdateMeInput)) + return ec.resolvers.Mutation().UpdateMe(rctx, args["input"].(gqlmodel.UpdateMeInput)) }) if err != nil { ec.Error(ctx, err) @@ -16404,9 +16227,9 @@ func (ec *executionContext) _Mutation_updateMe(ctx context.Context, field graphq if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.UpdateMePayload) + res := resTmp.(*gqlmodel.UpdateMePayload) fc.Result = res - return ec.marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx, field.Selections, res) + return ec.marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMePayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeMyAuth(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16434,7 +16257,7 @@ func (ec *executionContext) _Mutation_removeMyAuth(ctx context.Context, field gr fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveMyAuth(rctx, args["input"].(graphql1.RemoveMyAuthInput)) + return ec.resolvers.Mutation().RemoveMyAuth(rctx, args["input"].(gqlmodel.RemoveMyAuthInput)) }) if err != nil { ec.Error(ctx, err) @@ -16443,9 +16266,9 @@ func (ec *executionContext) _Mutation_removeMyAuth(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.UpdateMePayload) + res := resTmp.(*gqlmodel.UpdateMePayload) fc.Result = res - return ec.marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx, field.Selections, res) + return ec.marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMePayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_deleteMe(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16473,7 +16296,7 @@ func (ec *executionContext) _Mutation_deleteMe(ctx context.Context, field graphq fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().DeleteMe(rctx, args["input"].(graphql1.DeleteMeInput)) + return ec.resolvers.Mutation().DeleteMe(rctx, args["input"].(gqlmodel.DeleteMeInput)) }) if err != nil { ec.Error(ctx, err) @@ -16482,9 +16305,9 @@ func (ec *executionContext) _Mutation_deleteMe(ctx context.Context, field graphq if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DeleteMePayload) + res := resTmp.(*gqlmodel.DeleteMePayload) fc.Result = res - return ec.marshalODeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMePayload(ctx, field.Selections, res) + return ec.marshalODeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteMePayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_createTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16512,7 +16335,7 @@ func (ec *executionContext) _Mutation_createTeam(ctx context.Context, field grap fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().CreateTeam(rctx, args["input"].(graphql1.CreateTeamInput)) + return ec.resolvers.Mutation().CreateTeam(rctx, args["input"].(gqlmodel.CreateTeamInput)) }) if err != nil { ec.Error(ctx, err) @@ -16521,9 +16344,9 @@ func (ec *executionContext) _Mutation_createTeam(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.CreateTeamPayload) + res := resTmp.(*gqlmodel.CreateTeamPayload) fc.Result = res - return ec.marshalOCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamPayload(ctx, field.Selections, res) + return ec.marshalOCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTeamPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_deleteTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16551,7 +16374,7 @@ func (ec *executionContext) _Mutation_deleteTeam(ctx context.Context, field grap fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().DeleteTeam(rctx, args["input"].(graphql1.DeleteTeamInput)) + return ec.resolvers.Mutation().DeleteTeam(rctx, args["input"].(gqlmodel.DeleteTeamInput)) }) if err != nil { ec.Error(ctx, err) @@ -16560,9 +16383,9 @@ func (ec *executionContext) _Mutation_deleteTeam(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DeleteTeamPayload) + res := resTmp.(*gqlmodel.DeleteTeamPayload) fc.Result = res - return ec.marshalODeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamPayload(ctx, field.Selections, res) + return ec.marshalODeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteTeamPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16590,7 +16413,7 @@ func (ec *executionContext) _Mutation_updateTeam(ctx context.Context, field grap fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateTeam(rctx, args["input"].(graphql1.UpdateTeamInput)) + return ec.resolvers.Mutation().UpdateTeam(rctx, args["input"].(gqlmodel.UpdateTeamInput)) }) if err != nil { ec.Error(ctx, err) @@ -16599,9 +16422,9 @@ func (ec *executionContext) _Mutation_updateTeam(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.UpdateTeamPayload) + res := resTmp.(*gqlmodel.UpdateTeamPayload) fc.Result = res - return ec.marshalOUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamPayload(ctx, field.Selections, res) + return ec.marshalOUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTeamPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addMemberToTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16629,7 +16452,7 @@ func (ec *executionContext) _Mutation_addMemberToTeam(ctx context.Context, field fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddMemberToTeam(rctx, args["input"].(graphql1.AddMemberToTeamInput)) + return ec.resolvers.Mutation().AddMemberToTeam(rctx, args["input"].(gqlmodel.AddMemberToTeamInput)) }) if err != nil { ec.Error(ctx, err) @@ -16638,9 +16461,9 @@ func (ec *executionContext) _Mutation_addMemberToTeam(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.AddMemberToTeamPayload) + res := resTmp.(*gqlmodel.AddMemberToTeamPayload) fc.Result = res - return ec.marshalOAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamPayload(ctx, field.Selections, res) + return ec.marshalOAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddMemberToTeamPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeMemberFromTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16668,7 +16491,7 @@ func (ec *executionContext) _Mutation_removeMemberFromTeam(ctx context.Context, fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveMemberFromTeam(rctx, args["input"].(graphql1.RemoveMemberFromTeamInput)) + return ec.resolvers.Mutation().RemoveMemberFromTeam(rctx, args["input"].(gqlmodel.RemoveMemberFromTeamInput)) }) if err != nil { ec.Error(ctx, err) @@ -16677,9 +16500,9 @@ func (ec *executionContext) _Mutation_removeMemberFromTeam(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.RemoveMemberFromTeamPayload) + res := resTmp.(*gqlmodel.RemoveMemberFromTeamPayload) fc.Result = res - return ec.marshalORemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamPayload(ctx, field.Selections, res) + return ec.marshalORemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMemberFromTeamPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateMemberOfTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16707,7 +16530,7 @@ func (ec *executionContext) _Mutation_updateMemberOfTeam(ctx context.Context, fi fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateMemberOfTeam(rctx, args["input"].(graphql1.UpdateMemberOfTeamInput)) + return ec.resolvers.Mutation().UpdateMemberOfTeam(rctx, args["input"].(gqlmodel.UpdateMemberOfTeamInput)) }) if err != nil { ec.Error(ctx, err) @@ -16716,9 +16539,9 @@ func (ec *executionContext) _Mutation_updateMemberOfTeam(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.UpdateMemberOfTeamPayload) + res := resTmp.(*gqlmodel.UpdateMemberOfTeamPayload) fc.Result = res - return ec.marshalOUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamPayload(ctx, field.Selections, res) + return ec.marshalOUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMemberOfTeamPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_createProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16746,7 +16569,7 @@ func (ec *executionContext) _Mutation_createProject(ctx context.Context, field g fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().CreateProject(rctx, args["input"].(graphql1.CreateProjectInput)) + return ec.resolvers.Mutation().CreateProject(rctx, args["input"].(gqlmodel.CreateProjectInput)) }) if err != nil { ec.Error(ctx, err) @@ -16755,9 +16578,9 @@ func (ec *executionContext) _Mutation_createProject(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.ProjectPayload) + res := resTmp.(*gqlmodel.ProjectPayload) fc.Result = res - return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx, field.Selections, res) + return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16785,7 +16608,7 @@ func (ec *executionContext) _Mutation_updateProject(ctx context.Context, field g fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateProject(rctx, args["input"].(graphql1.UpdateProjectInput)) + return ec.resolvers.Mutation().UpdateProject(rctx, args["input"].(gqlmodel.UpdateProjectInput)) }) if err != nil { ec.Error(ctx, err) @@ -16794,9 +16617,9 @@ func (ec *executionContext) _Mutation_updateProject(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.ProjectPayload) + res := resTmp.(*gqlmodel.ProjectPayload) fc.Result = res - return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx, field.Selections, res) + return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_publishProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16824,7 +16647,7 @@ func (ec *executionContext) _Mutation_publishProject(ctx context.Context, field fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().PublishProject(rctx, args["input"].(graphql1.PublishProjectInput)) + return ec.resolvers.Mutation().PublishProject(rctx, args["input"].(gqlmodel.PublishProjectInput)) }) if err != nil { ec.Error(ctx, err) @@ -16833,9 +16656,9 @@ func (ec *executionContext) _Mutation_publishProject(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.ProjectPayload) + res := resTmp.(*gqlmodel.ProjectPayload) fc.Result = res - return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx, field.Selections, res) + return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_deleteProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -16863,7 +16686,7 @@ func (ec *executionContext) _Mutation_deleteProject(ctx context.Context, field g fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().DeleteProject(rctx, args["input"].(graphql1.DeleteProjectInput)) + return ec.resolvers.Mutation().DeleteProject(rctx, args["input"].(gqlmodel.DeleteProjectInput)) }) if err != nil { ec.Error(ctx, err) @@ -16872,12 +16695,12 @@ func (ec *executionContext) _Mutation_deleteProject(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DeleteProjectPayload) + res := resTmp.(*gqlmodel.DeleteProjectPayload) fc.Result = res - return ec.marshalODeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectPayload(ctx, field.Selections, res) + return ec.marshalODeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteProjectPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_uploadPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { +func (ec *executionContext) _Mutation_createScene(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16894,7 +16717,7 @@ func (ec *executionContext) _Mutation_uploadPlugin(ctx context.Context, field gr ctx = graphql.WithFieldContext(ctx, fc) rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_uploadPlugin_args(ctx, rawArgs) + args, err := ec.field_Mutation_createScene_args(ctx, rawArgs) if err != nil { ec.Error(ctx, err) return graphql.Null @@ -16902,7 +16725,7 @@ func (ec *executionContext) _Mutation_uploadPlugin(ctx context.Context, field gr fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UploadPlugin(rctx, args["input"].(graphql1.UploadPluginInput)) + return ec.resolvers.Mutation().CreateScene(rctx, args["input"].(gqlmodel.CreateSceneInput)) }) if err != nil { ec.Error(ctx, err) @@ -16911,12 +16734,12 @@ func (ec *executionContext) _Mutation_uploadPlugin(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.UploadPluginPayload) + res := resTmp.(*gqlmodel.CreateScenePayload) fc.Result = res - return ec.marshalOUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginPayload(ctx, field.Selections, res) + return ec.marshalOCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateScenePayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_createScene(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { +func (ec *executionContext) _Mutation_addWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16933,7 +16756,7 @@ func (ec *executionContext) _Mutation_createScene(ctx context.Context, field gra ctx = graphql.WithFieldContext(ctx, fc) rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_createScene_args(ctx, rawArgs) + args, err := ec.field_Mutation_addWidget_args(ctx, rawArgs) if err != nil { ec.Error(ctx, err) return graphql.Null @@ -16941,7 +16764,7 @@ func (ec *executionContext) _Mutation_createScene(ctx context.Context, field gra fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().CreateScene(rctx, args["input"].(graphql1.CreateSceneInput)) + return ec.resolvers.Mutation().AddWidget(rctx, args["input"].(gqlmodel.AddWidgetInput)) }) if err != nil { ec.Error(ctx, err) @@ -16950,12 +16773,12 @@ func (ec *executionContext) _Mutation_createScene(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.CreateScenePayload) + res := resTmp.(*gqlmodel.AddWidgetPayload) fc.Result = res - return ec.marshalOCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateScenePayload(ctx, field.Selections, res) + return ec.marshalOAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddWidgetPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_addWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { +func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16972,7 +16795,7 @@ func (ec *executionContext) _Mutation_addWidget(ctx context.Context, field graph ctx = graphql.WithFieldContext(ctx, fc) rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_addWidget_args(ctx, rawArgs) + args, err := ec.field_Mutation_updateWidget_args(ctx, rawArgs) if err != nil { ec.Error(ctx, err) return graphql.Null @@ -16980,7 +16803,7 @@ func (ec *executionContext) _Mutation_addWidget(ctx context.Context, field graph fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddWidget(rctx, args["input"].(graphql1.AddWidgetInput)) + return ec.resolvers.Mutation().UpdateWidget(rctx, args["input"].(gqlmodel.UpdateWidgetInput)) }) if err != nil { ec.Error(ctx, err) @@ -16989,12 +16812,12 @@ func (ec *executionContext) _Mutation_addWidget(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.AddWidgetPayload) + res := resTmp.(*gqlmodel.UpdateWidgetPayload) fc.Result = res - return ec.marshalOAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetPayload(ctx, field.Selections, res) + return ec.marshalOUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { +func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -17011,7 +16834,7 @@ func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field gr ctx = graphql.WithFieldContext(ctx, fc) rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updateWidget_args(ctx, rawArgs) + args, err := ec.field_Mutation_removeWidget_args(ctx, rawArgs) if err != nil { ec.Error(ctx, err) return graphql.Null @@ -17019,7 +16842,7 @@ func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field gr fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateWidget(rctx, args["input"].(graphql1.UpdateWidgetInput)) + return ec.resolvers.Mutation().RemoveWidget(rctx, args["input"].(gqlmodel.RemoveWidgetInput)) }) if err != nil { ec.Error(ctx, err) @@ -17028,12 +16851,12 @@ func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.UpdateWidgetPayload) + res := resTmp.(*gqlmodel.RemoveWidgetPayload) fc.Result = res - return ec.marshalOUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetPayload(ctx, field.Selections, res) + return ec.marshalORemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveWidgetPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { +func (ec *executionContext) _Mutation_installPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -17050,7 +16873,7 @@ func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field gr ctx = graphql.WithFieldContext(ctx, fc) rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removeWidget_args(ctx, rawArgs) + args, err := ec.field_Mutation_installPlugin_args(ctx, rawArgs) if err != nil { ec.Error(ctx, err) return graphql.Null @@ -17058,7 +16881,7 @@ func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field gr fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveWidget(rctx, args["input"].(graphql1.RemoveWidgetInput)) + return ec.resolvers.Mutation().InstallPlugin(rctx, args["input"].(gqlmodel.InstallPluginInput)) }) if err != nil { ec.Error(ctx, err) @@ -17067,12 +16890,12 @@ func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.RemoveWidgetPayload) + res := resTmp.(*gqlmodel.InstallPluginPayload) fc.Result = res - return ec.marshalORemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetPayload(ctx, field.Selections, res) + return ec.marshalOInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInstallPluginPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_installPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { +func (ec *executionContext) _Mutation_uninstallPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -17089,7 +16912,7 @@ func (ec *executionContext) _Mutation_installPlugin(ctx context.Context, field g ctx = graphql.WithFieldContext(ctx, fc) rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_installPlugin_args(ctx, rawArgs) + args, err := ec.field_Mutation_uninstallPlugin_args(ctx, rawArgs) if err != nil { ec.Error(ctx, err) return graphql.Null @@ -17097,7 +16920,7 @@ func (ec *executionContext) _Mutation_installPlugin(ctx context.Context, field g fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().InstallPlugin(rctx, args["input"].(graphql1.InstallPluginInput)) + return ec.resolvers.Mutation().UninstallPlugin(rctx, args["input"].(gqlmodel.UninstallPluginInput)) }) if err != nil { ec.Error(ctx, err) @@ -17106,12 +16929,12 @@ func (ec *executionContext) _Mutation_installPlugin(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.InstallPluginPayload) + res := resTmp.(*gqlmodel.UninstallPluginPayload) fc.Result = res - return ec.marshalOInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginPayload(ctx, field.Selections, res) + return ec.marshalOUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUninstallPluginPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_uninstallPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { +func (ec *executionContext) _Mutation_uploadPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -17128,7 +16951,7 @@ func (ec *executionContext) _Mutation_uninstallPlugin(ctx context.Context, field ctx = graphql.WithFieldContext(ctx, fc) rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_uninstallPlugin_args(ctx, rawArgs) + args, err := ec.field_Mutation_uploadPlugin_args(ctx, rawArgs) if err != nil { ec.Error(ctx, err) return graphql.Null @@ -17136,7 +16959,7 @@ func (ec *executionContext) _Mutation_uninstallPlugin(ctx context.Context, field fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UninstallPlugin(rctx, args["input"].(graphql1.UninstallPluginInput)) + return ec.resolvers.Mutation().UploadPlugin(rctx, args["input"].(gqlmodel.UploadPluginInput)) }) if err != nil { ec.Error(ctx, err) @@ -17145,9 +16968,9 @@ func (ec *executionContext) _Mutation_uninstallPlugin(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.UninstallPluginPayload) + res := resTmp.(*gqlmodel.UploadPluginPayload) fc.Result = res - return ec.marshalOUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginPayload(ctx, field.Selections, res) + return ec.marshalOUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadPluginPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_upgradePlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17175,7 +16998,7 @@ func (ec *executionContext) _Mutation_upgradePlugin(ctx context.Context, field g fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpgradePlugin(rctx, args["input"].(graphql1.UpgradePluginInput)) + return ec.resolvers.Mutation().UpgradePlugin(rctx, args["input"].(gqlmodel.UpgradePluginInput)) }) if err != nil { ec.Error(ctx, err) @@ -17184,9 +17007,9 @@ func (ec *executionContext) _Mutation_upgradePlugin(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.UpgradePluginPayload) + res := resTmp.(*gqlmodel.UpgradePluginPayload) fc.Result = res - return ec.marshalOUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginPayload(ctx, field.Selections, res) + return ec.marshalOUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpgradePluginPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17214,7 +17037,7 @@ func (ec *executionContext) _Mutation_updateDatasetSchema(ctx context.Context, f fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateDatasetSchema(rctx, args["input"].(graphql1.UpdateDatasetSchemaInput)) + return ec.resolvers.Mutation().UpdateDatasetSchema(rctx, args["input"].(gqlmodel.UpdateDatasetSchemaInput)) }) if err != nil { ec.Error(ctx, err) @@ -17223,9 +17046,9 @@ func (ec *executionContext) _Mutation_updateDatasetSchema(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.UpdateDatasetSchemaPayload) + res := resTmp.(*gqlmodel.UpdateDatasetSchemaPayload) fc.Result = res - return ec.marshalOUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaPayload(ctx, field.Selections, res) + return ec.marshalOUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateDatasetSchemaPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_syncDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17253,7 +17076,7 @@ func (ec *executionContext) _Mutation_syncDataset(ctx context.Context, field gra fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().SyncDataset(rctx, args["input"].(graphql1.SyncDatasetInput)) + return ec.resolvers.Mutation().SyncDataset(rctx, args["input"].(gqlmodel.SyncDatasetInput)) }) if err != nil { ec.Error(ctx, err) @@ -17262,9 +17085,9 @@ func (ec *executionContext) _Mutation_syncDataset(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.SyncDatasetPayload) + res := resTmp.(*gqlmodel.SyncDatasetPayload) fc.Result = res - return ec.marshalOSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetPayload(ctx, field.Selections, res) + return ec.marshalOSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSyncDatasetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addDynamicDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17292,7 +17115,7 @@ func (ec *executionContext) _Mutation_addDynamicDatasetSchema(ctx context.Contex fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddDynamicDatasetSchema(rctx, args["input"].(graphql1.AddDynamicDatasetSchemaInput)) + return ec.resolvers.Mutation().AddDynamicDatasetSchema(rctx, args["input"].(gqlmodel.AddDynamicDatasetSchemaInput)) }) if err != nil { ec.Error(ctx, err) @@ -17301,9 +17124,9 @@ func (ec *executionContext) _Mutation_addDynamicDatasetSchema(ctx context.Contex if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.AddDynamicDatasetSchemaPayload) + res := resTmp.(*gqlmodel.AddDynamicDatasetSchemaPayload) fc.Result = res - return ec.marshalOAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaPayload(ctx, field.Selections, res) + return ec.marshalOAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetSchemaPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addDynamicDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17331,7 +17154,7 @@ func (ec *executionContext) _Mutation_addDynamicDataset(ctx context.Context, fie fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddDynamicDataset(rctx, args["input"].(graphql1.AddDynamicDatasetInput)) + return ec.resolvers.Mutation().AddDynamicDataset(rctx, args["input"].(gqlmodel.AddDynamicDatasetInput)) }) if err != nil { ec.Error(ctx, err) @@ -17340,9 +17163,9 @@ func (ec *executionContext) _Mutation_addDynamicDataset(ctx context.Context, fie if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.AddDynamicDatasetPayload) + res := resTmp.(*gqlmodel.AddDynamicDatasetPayload) fc.Result = res - return ec.marshalOAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetPayload(ctx, field.Selections, res) + return ec.marshalOAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17370,7 +17193,7 @@ func (ec *executionContext) _Mutation_removeDatasetSchema(ctx context.Context, f fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveDatasetSchema(rctx, args["input"].(graphql1.RemoveDatasetSchemaInput)) + return ec.resolvers.Mutation().RemoveDatasetSchema(rctx, args["input"].(gqlmodel.RemoveDatasetSchemaInput)) }) if err != nil { ec.Error(ctx, err) @@ -17379,9 +17202,9 @@ func (ec *executionContext) _Mutation_removeDatasetSchema(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.RemoveDatasetSchemaPayload) + res := resTmp.(*gqlmodel.RemoveDatasetSchemaPayload) fc.Result = res - return ec.marshalORemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaPayload(ctx, field.Selections, res) + return ec.marshalORemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveDatasetSchemaPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_importDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17409,7 +17232,7 @@ func (ec *executionContext) _Mutation_importDataset(ctx context.Context, field g fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().ImportDataset(rctx, args["input"].(graphql1.ImportDatasetInput)) + return ec.resolvers.Mutation().ImportDataset(rctx, args["input"].(gqlmodel.ImportDatasetInput)) }) if err != nil { ec.Error(ctx, err) @@ -17418,9 +17241,9 @@ func (ec *executionContext) _Mutation_importDataset(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.ImportDatasetPayload) + res := resTmp.(*gqlmodel.ImportDatasetPayload) fc.Result = res - return ec.marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx, field.Selections, res) + return ec.marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_importDatasetFromGoogleSheet(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17448,7 +17271,7 @@ func (ec *executionContext) _Mutation_importDatasetFromGoogleSheet(ctx context.C fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().ImportDatasetFromGoogleSheet(rctx, args["input"].(graphql1.ImportDatasetFromGoogleSheetInput)) + return ec.resolvers.Mutation().ImportDatasetFromGoogleSheet(rctx, args["input"].(gqlmodel.ImportDatasetFromGoogleSheetInput)) }) if err != nil { ec.Error(ctx, err) @@ -17457,9 +17280,9 @@ func (ec *executionContext) _Mutation_importDatasetFromGoogleSheet(ctx context.C if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.ImportDatasetPayload) + res := resTmp.(*gqlmodel.ImportDatasetPayload) fc.Result = res - return ec.marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx, field.Selections, res) + return ec.marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17487,7 +17310,7 @@ func (ec *executionContext) _Mutation_addDatasetSchema(ctx context.Context, fiel fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddDatasetSchema(rctx, args["input"].(graphql1.AddDatasetSchemaInput)) + return ec.resolvers.Mutation().AddDatasetSchema(rctx, args["input"].(gqlmodel.AddDatasetSchemaInput)) }) if err != nil { ec.Error(ctx, err) @@ -17496,9 +17319,9 @@ func (ec *executionContext) _Mutation_addDatasetSchema(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.AddDatasetSchemaPayload) + res := resTmp.(*gqlmodel.AddDatasetSchemaPayload) fc.Result = res - return ec.marshalOAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaPayload(ctx, field.Selections, res) + return ec.marshalOAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDatasetSchemaPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updatePropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17526,163 +17349,7 @@ func (ec *executionContext) _Mutation_updatePropertyValue(ctx context.Context, f fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdatePropertyValue(rctx, args["input"].(graphql1.UpdatePropertyValueInput)) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*graphql1.PropertyFieldPayload) - fc.Result = res - return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) -} - -func (ec *executionContext) _Mutation_updatePropertyValueLatLng(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "Mutation", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updatePropertyValueLatLng_args(ctx, rawArgs) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - fc.Args = args - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdatePropertyValueLatLng(rctx, args["input"].(graphql1.UpdatePropertyValueLatLngInput)) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*graphql1.PropertyFieldPayload) - fc.Result = res - return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) -} - -func (ec *executionContext) _Mutation_updatePropertyValueLatLngHeight(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "Mutation", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updatePropertyValueLatLngHeight_args(ctx, rawArgs) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - fc.Args = args - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdatePropertyValueLatLngHeight(rctx, args["input"].(graphql1.UpdatePropertyValueLatLngHeightInput)) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*graphql1.PropertyFieldPayload) - fc.Result = res - return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) -} - -func (ec *executionContext) _Mutation_updatePropertyValueCamera(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "Mutation", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updatePropertyValueCamera_args(ctx, rawArgs) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - fc.Args = args - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdatePropertyValueCamera(rctx, args["input"].(graphql1.UpdatePropertyValueCameraInput)) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*graphql1.PropertyFieldPayload) - fc.Result = res - return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) -} - -func (ec *executionContext) _Mutation_updatePropertyValueTypography(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "Mutation", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updatePropertyValueTypography_args(ctx, rawArgs) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - fc.Args = args - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdatePropertyValueTypography(rctx, args["input"].(graphql1.UpdatePropertyValueTypographyInput)) + return ec.resolvers.Mutation().UpdatePropertyValue(rctx, args["input"].(gqlmodel.UpdatePropertyValueInput)) }) if err != nil { ec.Error(ctx, err) @@ -17691,9 +17358,9 @@ func (ec *executionContext) _Mutation_updatePropertyValueTypography(ctx context. if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyFieldPayload) + res := resTmp.(*gqlmodel.PropertyFieldPayload) fc.Result = res - return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removePropertyField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17721,7 +17388,7 @@ func (ec *executionContext) _Mutation_removePropertyField(ctx context.Context, f fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemovePropertyField(rctx, args["input"].(graphql1.RemovePropertyFieldInput)) + return ec.resolvers.Mutation().RemovePropertyField(rctx, args["input"].(gqlmodel.RemovePropertyFieldInput)) }) if err != nil { ec.Error(ctx, err) @@ -17730,9 +17397,9 @@ func (ec *executionContext) _Mutation_removePropertyField(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyFieldPayload) + res := resTmp.(*gqlmodel.PropertyFieldPayload) fc.Result = res - return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_uploadFileToProperty(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17760,7 +17427,7 @@ func (ec *executionContext) _Mutation_uploadFileToProperty(ctx context.Context, fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UploadFileToProperty(rctx, args["input"].(graphql1.UploadFileToPropertyInput)) + return ec.resolvers.Mutation().UploadFileToProperty(rctx, args["input"].(gqlmodel.UploadFileToPropertyInput)) }) if err != nil { ec.Error(ctx, err) @@ -17769,9 +17436,9 @@ func (ec *executionContext) _Mutation_uploadFileToProperty(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyFieldPayload) + res := resTmp.(*gqlmodel.PropertyFieldPayload) fc.Result = res - return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_linkDatasetToPropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17799,7 +17466,7 @@ func (ec *executionContext) _Mutation_linkDatasetToPropertyValue(ctx context.Con fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().LinkDatasetToPropertyValue(rctx, args["input"].(graphql1.LinkDatasetToPropertyValueInput)) + return ec.resolvers.Mutation().LinkDatasetToPropertyValue(rctx, args["input"].(gqlmodel.LinkDatasetToPropertyValueInput)) }) if err != nil { ec.Error(ctx, err) @@ -17808,9 +17475,9 @@ func (ec *executionContext) _Mutation_linkDatasetToPropertyValue(ctx context.Con if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyFieldPayload) + res := resTmp.(*gqlmodel.PropertyFieldPayload) fc.Result = res - return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_unlinkPropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17838,7 +17505,7 @@ func (ec *executionContext) _Mutation_unlinkPropertyValue(ctx context.Context, f fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UnlinkPropertyValue(rctx, args["input"].(graphql1.UnlinkPropertyValueInput)) + return ec.resolvers.Mutation().UnlinkPropertyValue(rctx, args["input"].(gqlmodel.UnlinkPropertyValueInput)) }) if err != nil { ec.Error(ctx, err) @@ -17847,9 +17514,9 @@ func (ec *executionContext) _Mutation_unlinkPropertyValue(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyFieldPayload) + res := resTmp.(*gqlmodel.PropertyFieldPayload) fc.Result = res - return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx, field.Selections, res) + return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addPropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17877,7 +17544,7 @@ func (ec *executionContext) _Mutation_addPropertyItem(ctx context.Context, field fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddPropertyItem(rctx, args["input"].(graphql1.AddPropertyItemInput)) + return ec.resolvers.Mutation().AddPropertyItem(rctx, args["input"].(gqlmodel.AddPropertyItemInput)) }) if err != nil { ec.Error(ctx, err) @@ -17886,9 +17553,9 @@ func (ec *executionContext) _Mutation_addPropertyItem(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyItemPayload) + res := resTmp.(*gqlmodel.PropertyItemPayload) fc.Result = res - return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) + return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_movePropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17916,7 +17583,7 @@ func (ec *executionContext) _Mutation_movePropertyItem(ctx context.Context, fiel fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().MovePropertyItem(rctx, args["input"].(graphql1.MovePropertyItemInput)) + return ec.resolvers.Mutation().MovePropertyItem(rctx, args["input"].(gqlmodel.MovePropertyItemInput)) }) if err != nil { ec.Error(ctx, err) @@ -17925,9 +17592,9 @@ func (ec *executionContext) _Mutation_movePropertyItem(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyItemPayload) + res := resTmp.(*gqlmodel.PropertyItemPayload) fc.Result = res - return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) + return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removePropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17955,7 +17622,7 @@ func (ec *executionContext) _Mutation_removePropertyItem(ctx context.Context, fi fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemovePropertyItem(rctx, args["input"].(graphql1.RemovePropertyItemInput)) + return ec.resolvers.Mutation().RemovePropertyItem(rctx, args["input"].(gqlmodel.RemovePropertyItemInput)) }) if err != nil { ec.Error(ctx, err) @@ -17964,9 +17631,9 @@ func (ec *executionContext) _Mutation_removePropertyItem(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyItemPayload) + res := resTmp.(*gqlmodel.PropertyItemPayload) fc.Result = res - return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) + return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updatePropertyItems(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -17994,7 +17661,7 @@ func (ec *executionContext) _Mutation_updatePropertyItems(ctx context.Context, f fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdatePropertyItems(rctx, args["input"].(graphql1.UpdatePropertyItemInput)) + return ec.resolvers.Mutation().UpdatePropertyItems(rctx, args["input"].(gqlmodel.UpdatePropertyItemInput)) }) if err != nil { ec.Error(ctx, err) @@ -18003,9 +17670,9 @@ func (ec *executionContext) _Mutation_updatePropertyItems(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyItemPayload) + res := resTmp.(*gqlmodel.PropertyItemPayload) fc.Result = res - return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx, field.Selections, res) + return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addLayerItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -18033,7 +17700,7 @@ func (ec *executionContext) _Mutation_addLayerItem(ctx context.Context, field gr fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddLayerItem(rctx, args["input"].(graphql1.AddLayerItemInput)) + return ec.resolvers.Mutation().AddLayerItem(rctx, args["input"].(gqlmodel.AddLayerItemInput)) }) if err != nil { ec.Error(ctx, err) @@ -18042,9 +17709,9 @@ func (ec *executionContext) _Mutation_addLayerItem(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.AddLayerItemPayload) + res := resTmp.(*gqlmodel.AddLayerItemPayload) fc.Result = res - return ec.marshalOAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemPayload(ctx, field.Selections, res) + return ec.marshalOAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerItemPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addLayerGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -18072,7 +17739,7 @@ func (ec *executionContext) _Mutation_addLayerGroup(ctx context.Context, field g fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddLayerGroup(rctx, args["input"].(graphql1.AddLayerGroupInput)) + return ec.resolvers.Mutation().AddLayerGroup(rctx, args["input"].(gqlmodel.AddLayerGroupInput)) }) if err != nil { ec.Error(ctx, err) @@ -18081,9 +17748,9 @@ func (ec *executionContext) _Mutation_addLayerGroup(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.AddLayerGroupPayload) + res := resTmp.(*gqlmodel.AddLayerGroupPayload) fc.Result = res - return ec.marshalOAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupPayload(ctx, field.Selections, res) + return ec.marshalOAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerGroupPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -18111,7 +17778,7 @@ func (ec *executionContext) _Mutation_removeLayer(ctx context.Context, field gra fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveLayer(rctx, args["input"].(graphql1.RemoveLayerInput)) + return ec.resolvers.Mutation().RemoveLayer(rctx, args["input"].(gqlmodel.RemoveLayerInput)) }) if err != nil { ec.Error(ctx, err) @@ -18120,9 +17787,9 @@ func (ec *executionContext) _Mutation_removeLayer(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.RemoveLayerPayload) + res := resTmp.(*gqlmodel.RemoveLayerPayload) fc.Result = res - return ec.marshalORemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerPayload(ctx, field.Selections, res) + return ec.marshalORemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveLayerPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_updateLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -18150,7 +17817,7 @@ func (ec *executionContext) _Mutation_updateLayer(ctx context.Context, field gra fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateLayer(rctx, args["input"].(graphql1.UpdateLayerInput)) + return ec.resolvers.Mutation().UpdateLayer(rctx, args["input"].(gqlmodel.UpdateLayerInput)) }) if err != nil { ec.Error(ctx, err) @@ -18159,9 +17826,9 @@ func (ec *executionContext) _Mutation_updateLayer(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.UpdateLayerPayload) + res := resTmp.(*gqlmodel.UpdateLayerPayload) fc.Result = res - return ec.marshalOUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerPayload(ctx, field.Selections, res) + return ec.marshalOUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateLayerPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_moveLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -18189,7 +17856,7 @@ func (ec *executionContext) _Mutation_moveLayer(ctx context.Context, field graph fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().MoveLayer(rctx, args["input"].(graphql1.MoveLayerInput)) + return ec.resolvers.Mutation().MoveLayer(rctx, args["input"].(gqlmodel.MoveLayerInput)) }) if err != nil { ec.Error(ctx, err) @@ -18198,9 +17865,9 @@ func (ec *executionContext) _Mutation_moveLayer(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.MoveLayerPayload) + res := resTmp.(*gqlmodel.MoveLayerPayload) fc.Result = res - return ec.marshalOMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerPayload(ctx, field.Selections, res) + return ec.marshalOMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveLayerPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_createInfobox(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -18228,7 +17895,7 @@ func (ec *executionContext) _Mutation_createInfobox(ctx context.Context, field g fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().CreateInfobox(rctx, args["input"].(graphql1.CreateInfoboxInput)) + return ec.resolvers.Mutation().CreateInfobox(rctx, args["input"].(gqlmodel.CreateInfoboxInput)) }) if err != nil { ec.Error(ctx, err) @@ -18237,9 +17904,9 @@ func (ec *executionContext) _Mutation_createInfobox(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.CreateInfoboxPayload) + res := resTmp.(*gqlmodel.CreateInfoboxPayload) fc.Result = res - return ec.marshalOCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxPayload(ctx, field.Selections, res) + return ec.marshalOCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateInfoboxPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeInfobox(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -18267,7 +17934,7 @@ func (ec *executionContext) _Mutation_removeInfobox(ctx context.Context, field g fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveInfobox(rctx, args["input"].(graphql1.RemoveInfoboxInput)) + return ec.resolvers.Mutation().RemoveInfobox(rctx, args["input"].(gqlmodel.RemoveInfoboxInput)) }) if err != nil { ec.Error(ctx, err) @@ -18276,9 +17943,9 @@ func (ec *executionContext) _Mutation_removeInfobox(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.RemoveInfoboxPayload) + res := resTmp.(*gqlmodel.RemoveInfoboxPayload) fc.Result = res - return ec.marshalORemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxPayload(ctx, field.Selections, res) + return ec.marshalORemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_addInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -18306,7 +17973,7 @@ func (ec *executionContext) _Mutation_addInfoboxField(ctx context.Context, field fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddInfoboxField(rctx, args["input"].(graphql1.AddInfoboxFieldInput)) + return ec.resolvers.Mutation().AddInfoboxField(rctx, args["input"].(gqlmodel.AddInfoboxFieldInput)) }) if err != nil { ec.Error(ctx, err) @@ -18315,9 +17982,9 @@ func (ec *executionContext) _Mutation_addInfoboxField(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.AddInfoboxFieldPayload) + res := resTmp.(*gqlmodel.AddInfoboxFieldPayload) fc.Result = res - return ec.marshalOAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldPayload(ctx, field.Selections, res) + return ec.marshalOAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddInfoboxFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_moveInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -18345,7 +18012,7 @@ func (ec *executionContext) _Mutation_moveInfoboxField(ctx context.Context, fiel fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().MoveInfoboxField(rctx, args["input"].(graphql1.MoveInfoboxFieldInput)) + return ec.resolvers.Mutation().MoveInfoboxField(rctx, args["input"].(gqlmodel.MoveInfoboxFieldInput)) }) if err != nil { ec.Error(ctx, err) @@ -18354,9 +18021,9 @@ func (ec *executionContext) _Mutation_moveInfoboxField(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.MoveInfoboxFieldPayload) + res := resTmp.(*gqlmodel.MoveInfoboxFieldPayload) fc.Result = res - return ec.marshalOMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldPayload(ctx, field.Selections, res) + return ec.marshalOMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveInfoboxFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_removeInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -18384,7 +18051,7 @@ func (ec *executionContext) _Mutation_removeInfoboxField(ctx context.Context, fi fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveInfoboxField(rctx, args["input"].(graphql1.RemoveInfoboxFieldInput)) + return ec.resolvers.Mutation().RemoveInfoboxField(rctx, args["input"].(gqlmodel.RemoveInfoboxFieldInput)) }) if err != nil { ec.Error(ctx, err) @@ -18393,9 +18060,9 @@ func (ec *executionContext) _Mutation_removeInfoboxField(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.RemoveInfoboxFieldPayload) + res := resTmp.(*gqlmodel.RemoveInfoboxFieldPayload) fc.Result = res - return ec.marshalORemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldPayload(ctx, field.Selections, res) + return ec.marshalORemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxFieldPayload(ctx, field.Selections, res) } func (ec *executionContext) _Mutation_importLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -18423,7 +18090,7 @@ func (ec *executionContext) _Mutation_importLayer(ctx context.Context, field gra fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().ImportLayer(rctx, args["input"].(graphql1.ImportLayerInput)) + return ec.resolvers.Mutation().ImportLayer(rctx, args["input"].(gqlmodel.ImportLayerInput)) }) if err != nil { ec.Error(ctx, err) @@ -18432,12 +18099,12 @@ func (ec *executionContext) _Mutation_importLayer(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.ImportLayerPayload) + res := resTmp.(*gqlmodel.ImportLayerPayload) fc.Result = res - return ec.marshalOImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerPayload(ctx, field.Selections, res) + return ec.marshalOImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportLayerPayload(ctx, field.Selections, res) } -func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.PageInfo) (ret graphql.Marshaler) { +func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18469,7 +18136,7 @@ func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field gra return ec.marshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) } -func (ec *executionContext) _PageInfo_endCursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.PageInfo) (ret graphql.Marshaler) { +func (ec *executionContext) _PageInfo_endCursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18501,7 +18168,7 @@ func (ec *executionContext) _PageInfo_endCursor(ctx context.Context, field graph return ec.marshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) } -func (ec *executionContext) _PageInfo_hasNextPage(ctx context.Context, field graphql.CollectedField, obj *graphql1.PageInfo) (ret graphql.Marshaler) { +func (ec *executionContext) _PageInfo_hasNextPage(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18536,7 +18203,7 @@ func (ec *executionContext) _PageInfo_hasNextPage(ctx context.Context, field gra return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _PageInfo_hasPreviousPage(ctx context.Context, field graphql.CollectedField, obj *graphql1.PageInfo) (ret graphql.Marshaler) { +func (ec *executionContext) _PageInfo_hasPreviousPage(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18571,7 +18238,7 @@ func (ec *executionContext) _PageInfo_hasPreviousPage(ctx context.Context, field return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18606,7 +18273,7 @@ func (ec *executionContext) _Plugin_id(ctx context.Context, field graphql.Collec return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18638,7 +18305,7 @@ func (ec *executionContext) _Plugin_sceneId(ctx context.Context, field graphql.C return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18673,7 +18340,7 @@ func (ec *executionContext) _Plugin_name(ctx context.Context, field graphql.Coll return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_version(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_version(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18708,7 +18375,7 @@ func (ec *executionContext) _Plugin_version(ctx context.Context, field graphql.C return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_description(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18743,7 +18410,7 @@ func (ec *executionContext) _Plugin_description(ctx context.Context, field graph return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_author(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_author(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18778,7 +18445,7 @@ func (ec *executionContext) _Plugin_author(ctx context.Context, field graphql.Co return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_repositoryUrl(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_repositoryUrl(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18813,7 +18480,7 @@ func (ec *executionContext) _Plugin_repositoryUrl(ctx context.Context, field gra return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18845,7 +18512,7 @@ func (ec *executionContext) _Plugin_propertySchemaId(ctx context.Context, field return ec.marshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_extensions(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_extensions(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18875,12 +18542,12 @@ func (ec *executionContext) _Plugin_extensions(ctx context.Context, field graphq } return graphql.Null } - res := resTmp.([]*graphql1.PluginExtension) + res := resTmp.([]*gqlmodel.PluginExtension) fc.Result = res - return ec.marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtensionแš„(ctx, field.Selections, res) + return ec.marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18907,12 +18574,12 @@ func (ec *executionContext) _Plugin_scene(ctx context.Context, field graphql.Col if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18946,12 +18613,12 @@ func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.ScenePlugin) + res := resTmp.(*gqlmodel.ScenePlugin) fc.Result = res - return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18983,7 +18650,7 @@ func (ec *executionContext) _Plugin_allTranslatedDescription(ctx context.Context return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19015,7 +18682,7 @@ func (ec *executionContext) _Plugin_allTranslatedName(ctx context.Context, field return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_translatedName(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_translatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19057,7 +18724,7 @@ func (ec *executionContext) _Plugin_translatedName(ctx context.Context, field gr return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19099,7 +18766,7 @@ func (ec *executionContext) _Plugin_translatedDescription(ctx context.Context, f return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_propertySchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_propertySchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19126,12 +18793,12 @@ func (ec *executionContext) _Plugin_propertySchema(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchema) + res := resTmp.(*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19166,7 +18833,7 @@ func (ec *executionContext) _PluginExtension_extensionId(ctx context.Context, fi return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19201,7 +18868,7 @@ func (ec *executionContext) _PluginExtension_pluginId(ctx context.Context, field return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19231,12 +18898,12 @@ func (ec *executionContext) _PluginExtension_type(ctx context.Context, field gra } return graphql.Null } - res := resTmp.(graphql1.PluginExtensionType) + res := resTmp.(gqlmodel.PluginExtensionType) fc.Result = res - return ec.marshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtensionType(ctx, field.Selections, res) + return ec.marshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionType(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19271,7 +18938,7 @@ func (ec *executionContext) _PluginExtension_name(ctx context.Context, field gra return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_description(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19306,7 +18973,7 @@ func (ec *executionContext) _PluginExtension_description(ctx context.Context, fi return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_icon(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_icon(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19341,7 +19008,7 @@ func (ec *executionContext) _PluginExtension_icon(ctx context.Context, field gra return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_visualizer(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_visualizer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19371,12 +19038,12 @@ func (ec *executionContext) _PluginExtension_visualizer(ctx context.Context, fie } return graphql.Null } - res := resTmp.(graphql1.Visualizer) + res := resTmp.(gqlmodel.Visualizer) fc.Result = res - return ec.marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšVisualizer(ctx, field.Selections, res) + return ec.marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19411,7 +19078,7 @@ func (ec *executionContext) _PluginExtension_propertySchemaId(ctx context.Contex return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19443,7 +19110,7 @@ func (ec *executionContext) _PluginExtension_allTranslatedName(ctx context.Conte return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19475,7 +19142,7 @@ func (ec *executionContext) _PluginExtension_allTranslatedDescription(ctx contex return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19502,12 +19169,12 @@ func (ec *executionContext) _PluginExtension_plugin(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Plugin) + res := resTmp.(*gqlmodel.Plugin) fc.Result = res - return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19541,12 +19208,12 @@ func (ec *executionContext) _PluginExtension_sceneWidget(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.SceneWidget) + res := resTmp.(*gqlmodel.SceneWidget) fc.Result = res - return ec.marshalOSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidget(ctx, field.Selections, res) + return ec.marshalOSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_propertySchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_propertySchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19573,12 +19240,12 @@ func (ec *executionContext) _PluginExtension_propertySchema(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchema) + res := resTmp.(*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_translatedName(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_translatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19620,7 +19287,7 @@ func (ec *executionContext) _PluginExtension_translatedName(ctx context.Context, return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginExtension) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginExtension_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19662,7 +19329,7 @@ func (ec *executionContext) _PluginExtension_translatedDescription(ctx context.C return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginMetadata_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginMetadata) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginMetadata_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19697,7 +19364,7 @@ func (ec *executionContext) _PluginMetadata_name(ctx context.Context, field grap return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginMetadata_description(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginMetadata) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginMetadata_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19732,7 +19399,7 @@ func (ec *executionContext) _PluginMetadata_description(ctx context.Context, fie return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginMetadata_author(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginMetadata) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginMetadata_author(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19767,7 +19434,7 @@ func (ec *executionContext) _PluginMetadata_author(ctx context.Context, field gr return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginMetadata_thumbnailUrl(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginMetadata) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginMetadata_thumbnailUrl(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19802,7 +19469,7 @@ func (ec *executionContext) _PluginMetadata_thumbnailUrl(ctx context.Context, fi return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginMetadata_createdAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.PluginMetadata) (ret graphql.Marshaler) { +func (ec *executionContext) _PluginMetadata_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19837,7 +19504,7 @@ func (ec *executionContext) _PluginMetadata_createdAt(ctx context.Context, field return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Project_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19872,7 +19539,7 @@ func (ec *executionContext) _Project_id(ctx context.Context, field graphql.Colle return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Project_isArchived(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_isArchived(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19907,7 +19574,7 @@ func (ec *executionContext) _Project_isArchived(ctx context.Context, field graph return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _Project_isBasicAuthActive(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_isBasicAuthActive(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19942,7 +19609,7 @@ func (ec *executionContext) _Project_isBasicAuthActive(ctx context.Context, fiel return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _Project_basicAuthUsername(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_basicAuthUsername(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -19977,7 +19644,7 @@ func (ec *executionContext) _Project_basicAuthUsername(ctx context.Context, fiel return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_basicAuthPassword(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_basicAuthPassword(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20012,7 +19679,7 @@ func (ec *executionContext) _Project_basicAuthPassword(ctx context.Context, fiel return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_createdAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20047,7 +19714,7 @@ func (ec *executionContext) _Project_createdAt(ctx context.Context, field graphq return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Project_updatedAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_updatedAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20082,7 +19749,7 @@ func (ec *executionContext) _Project_updatedAt(ctx context.Context, field graphq return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Project_publishedAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_publishedAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20114,7 +19781,7 @@ func (ec *executionContext) _Project_publishedAt(ctx context.Context, field grap return ec.marshalODateTime2แš–timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Project_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20149,7 +19816,7 @@ func (ec *executionContext) _Project_name(ctx context.Context, field graphql.Col return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_description(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20184,7 +19851,7 @@ func (ec *executionContext) _Project_description(ctx context.Context, field grap return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_alias(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_alias(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20219,7 +19886,7 @@ func (ec *executionContext) _Project_alias(ctx context.Context, field graphql.Co return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_publicTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_publicTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20254,7 +19921,7 @@ func (ec *executionContext) _Project_publicTitle(ctx context.Context, field grap return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_publicDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_publicDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20289,7 +19956,7 @@ func (ec *executionContext) _Project_publicDescription(ctx context.Context, fiel return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_publicImage(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_publicImage(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20324,7 +19991,7 @@ func (ec *executionContext) _Project_publicImage(ctx context.Context, field grap return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_publicNoIndex(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_publicNoIndex(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20359,7 +20026,7 @@ func (ec *executionContext) _Project_publicNoIndex(ctx context.Context, field gr return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _Project_imageUrl(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_imageUrl(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20391,7 +20058,7 @@ func (ec *executionContext) _Project_imageUrl(ctx context.Context, field graphql return ec.marshalOURL2แš–netแš‹urlแšURL(ctx, field.Selections, res) } -func (ec *executionContext) _Project_teamId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20426,7 +20093,7 @@ func (ec *executionContext) _Project_teamId(ctx context.Context, field graphql.C return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Project_visualizer(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_visualizer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20456,12 +20123,12 @@ func (ec *executionContext) _Project_visualizer(ctx context.Context, field graph } return graphql.Null } - res := resTmp.(graphql1.Visualizer) + res := resTmp.(gqlmodel.Visualizer) fc.Result = res - return ec.marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšVisualizer(ctx, field.Selections, res) + return ec.marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx, field.Selections, res) } -func (ec *executionContext) _Project_publishmentStatus(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_publishmentStatus(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20491,12 +20158,12 @@ func (ec *executionContext) _Project_publishmentStatus(ctx context.Context, fiel } return graphql.Null } - res := resTmp.(graphql1.PublishmentStatus) + res := resTmp.(gqlmodel.PublishmentStatus) fc.Result = res - return ec.marshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPublishmentStatus(ctx, field.Selections, res) + return ec.marshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishmentStatus(ctx, field.Selections, res) } -func (ec *executionContext) _Project_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20523,12 +20190,12 @@ func (ec *executionContext) _Project_team(ctx context.Context, field graphql.Col if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Team) + res := resTmp.(*gqlmodel.Team) fc.Result = res - return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) + return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } -func (ec *executionContext) _Project_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.Project) (ret graphql.Marshaler) { +func (ec *executionContext) _Project_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20555,12 +20222,12 @@ func (ec *executionContext) _Project_scene(ctx context.Context, field graphql.Co if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _ProjectConnection_edges(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _ProjectConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20590,12 +20257,12 @@ func (ec *executionContext) _ProjectConnection_edges(ctx context.Context, field } return graphql.Null } - res := resTmp.([]*graphql1.ProjectEdge) + res := resTmp.([]*gqlmodel.ProjectEdge) fc.Result = res - return ec.marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectEdgeแš„(ctx, field.Selections, res) + return ec.marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdgeแš„(ctx, field.Selections, res) } -func (ec *executionContext) _ProjectConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _ProjectConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20625,12 +20292,12 @@ func (ec *executionContext) _ProjectConnection_nodes(ctx context.Context, field } return graphql.Null } - res := resTmp.([]*graphql1.Project) + res := resTmp.([]*gqlmodel.Project) fc.Result = res - return ec.marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx, field.Selections, res) + return ec.marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, field.Selections, res) } -func (ec *executionContext) _ProjectConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _ProjectConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20660,12 +20327,12 @@ func (ec *executionContext) _ProjectConnection_pageInfo(ctx context.Context, fie } return graphql.Null } - res := resTmp.(*graphql1.PageInfo) + res := resTmp.(*gqlmodel.PageInfo) fc.Result = res - return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPageInfo(ctx, field.Selections, res) + return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx, field.Selections, res) } -func (ec *executionContext) _ProjectConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectConnection) (ret graphql.Marshaler) { +func (ec *executionContext) _ProjectConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20700,7 +20367,7 @@ func (ec *executionContext) _ProjectConnection_totalCount(ctx context.Context, f return ec.marshalNInt2int(ctx, field.Selections, res) } -func (ec *executionContext) _ProjectEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectEdge) (ret graphql.Marshaler) { +func (ec *executionContext) _ProjectEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectEdge) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20735,7 +20402,7 @@ func (ec *executionContext) _ProjectEdge_cursor(ctx context.Context, field graph return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) } -func (ec *executionContext) _ProjectEdge_node(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectEdge) (ret graphql.Marshaler) { +func (ec *executionContext) _ProjectEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectEdge) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20762,12 +20429,12 @@ func (ec *executionContext) _ProjectEdge_node(ctx context.Context, field graphql if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Project) + res := resTmp.(*gqlmodel.Project) fc.Result = res - return ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx, field.Selections, res) + return ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, field.Selections, res) } -func (ec *executionContext) _ProjectPayload_project(ctx context.Context, field graphql.CollectedField, obj *graphql1.ProjectPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _ProjectPayload_project(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20797,12 +20464,12 @@ func (ec *executionContext) _ProjectPayload_project(ctx context.Context, field g } return graphql.Null } - res := resTmp.(*graphql1.Project) + res := resTmp.(*gqlmodel.Project) fc.Result = res - return ec.marshalNProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx, field.Selections, res) + return ec.marshalNProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, field.Selections, res) } -func (ec *executionContext) _Property_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Property) (ret graphql.Marshaler) { +func (ec *executionContext) _Property_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20837,7 +20504,7 @@ func (ec *executionContext) _Property_id(ctx context.Context, field graphql.Coll return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Property_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Property) (ret graphql.Marshaler) { +func (ec *executionContext) _Property_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20872,7 +20539,7 @@ func (ec *executionContext) _Property_schemaId(ctx context.Context, field graphq return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) } -func (ec *executionContext) _Property_items(ctx context.Context, field graphql.CollectedField, obj *graphql1.Property) (ret graphql.Marshaler) { +func (ec *executionContext) _Property_items(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20902,12 +20569,12 @@ func (ec *executionContext) _Property_items(ctx context.Context, field graphql.C } return graphql.Null } - res := resTmp.([]graphql1.PropertyItem) + res := resTmp.([]gqlmodel.PropertyItem) fc.Result = res - return ec.marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemแš„(ctx, field.Selections, res) + return ec.marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Property_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.Property) (ret graphql.Marshaler) { +func (ec *executionContext) _Property_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20934,12 +20601,12 @@ func (ec *executionContext) _Property_schema(ctx context.Context, field graphql. if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchema) + res := resTmp.(*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _Property_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.Property) (ret graphql.Marshaler) { +func (ec *executionContext) _Property_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20966,12 +20633,12 @@ func (ec *executionContext) _Property_layer(ctx context.Context, field graphql.C if resTmp == nil { return graphql.Null } - res := resTmp.(graphql1.Layer) + res := resTmp.(gqlmodel.Layer) fc.Result = res - return ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) + return ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _Property_merged(ctx context.Context, field graphql.CollectedField, obj *graphql1.Property) (ret graphql.Marshaler) { +func (ec *executionContext) _Property_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -20998,12 +20665,12 @@ func (ec *executionContext) _Property_merged(ctx context.Context, field graphql. if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.MergedProperty) + res := resTmp.(*gqlmodel.MergedProperty) fc.Result = res - return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedProperty(ctx, field.Selections, res) + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyCondition_fieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyCondition) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyCondition_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyCondition) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21038,7 +20705,7 @@ func (ec *executionContext) _PropertyCondition_fieldId(ctx context.Context, fiel return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyCondition_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyCondition) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyCondition_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyCondition) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21068,12 +20735,12 @@ func (ec *executionContext) _PropertyCondition_type(ctx context.Context, field g } return graphql.Null } - res := resTmp.(graphql1.ValueType) + res := resTmp.(gqlmodel.ValueType) fc.Result = res - return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, field.Selections, res) + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyCondition_value(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyCondition) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyCondition_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyCondition) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21105,7 +20772,7 @@ func (ec *executionContext) _PropertyCondition_value(ctx context.Context, field return ec.marshalOAny2interface(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyField_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21140,7 +20807,7 @@ func (ec *executionContext) _PropertyField_id(ctx context.Context, field graphql return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_parentId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyField_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21175,7 +20842,7 @@ func (ec *executionContext) _PropertyField_parentId(ctx context.Context, field g return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21210,7 +20877,7 @@ func (ec *executionContext) _PropertyField_schemaId(ctx context.Context, field g return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21245,7 +20912,7 @@ func (ec *executionContext) _PropertyField_fieldId(ctx context.Context, field gr return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_links(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyField_links(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21272,12 +20939,12 @@ func (ec *executionContext) _PropertyField_links(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.([]*graphql1.PropertyFieldLink) + res := resTmp.([]*gqlmodel.PropertyFieldLink) fc.Result = res - return ec.marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldLinkแš„(ctx, field.Selections, res) + return ec.marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLinkแš„(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21307,12 +20974,12 @@ func (ec *executionContext) _PropertyField_type(ctx context.Context, field graph } return graphql.Null } - res := resTmp.(graphql1.ValueType) + res := resTmp.(gqlmodel.ValueType) fc.Result = res - return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, field.Selections, res) + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_value(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21344,7 +21011,7 @@ func (ec *executionContext) _PropertyField_value(ctx context.Context, field grap return ec.marshalOAny2interface(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_parent(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyField_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21371,12 +21038,12 @@ func (ec *executionContext) _PropertyField_parent(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21403,12 +21070,12 @@ func (ec *executionContext) _PropertyField_schema(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchema) + res := resTmp.(*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_field(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyField_field(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21435,12 +21102,12 @@ func (ec *executionContext) _PropertyField_field(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchemaField) + res := resTmp.(*gqlmodel.PropertySchemaField) fc.Result = res - return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx, field.Selections, res) + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_actualValue(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyField_actualValue(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21472,7 +21139,7 @@ func (ec *executionContext) _PropertyField_actualValue(ctx context.Context, fiel return ec.marshalOAny2interface(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyFieldLink_datasetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyFieldLink_datasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21504,7 +21171,7 @@ func (ec *executionContext) _PropertyFieldLink_datasetId(ctx context.Context, fi return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyFieldLink_datasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyFieldLink_datasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21539,7 +21206,7 @@ func (ec *executionContext) _PropertyFieldLink_datasetSchemaId(ctx context.Conte return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyFieldLink_datasetSchemaFieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyFieldLink_datasetSchemaFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21574,7 +21241,7 @@ func (ec *executionContext) _PropertyFieldLink_datasetSchemaFieldId(ctx context. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyFieldLink_dataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyFieldLink_dataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21601,12 +21268,12 @@ func (ec *executionContext) _PropertyFieldLink_dataset(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Dataset) + res := resTmp.(*gqlmodel.Dataset) fc.Result = res - return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, field.Selections, res) + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyFieldLink_datasetField(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyFieldLink_datasetField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21633,12 +21300,12 @@ func (ec *executionContext) _PropertyFieldLink_datasetField(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetField) + res := resTmp.(*gqlmodel.DatasetField) fc.Result = res - return ec.marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetField(ctx, field.Selections, res) + return ec.marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyFieldLink_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyFieldLink_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21665,12 +21332,12 @@ func (ec *executionContext) _PropertyFieldLink_datasetSchema(ctx context.Context if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchema) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyFieldLink_datasetSchemaField(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldLink) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyFieldLink_datasetSchemaField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21697,12 +21364,12 @@ func (ec *executionContext) _PropertyFieldLink_datasetSchemaField(ctx context.Co if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchemaField) + res := resTmp.(*gqlmodel.DatasetSchemaField) fc.Result = res - return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaField(ctx, field.Selections, res) + return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyFieldPayload_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyFieldPayload_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21732,12 +21399,12 @@ func (ec *executionContext) _PropertyFieldPayload_property(ctx context.Context, } return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyFieldPayload_propertyField(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyFieldPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyFieldPayload_propertyField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21764,12 +21431,12 @@ func (ec *executionContext) _PropertyFieldPayload_propertyField(ctx context.Cont if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyField) + res := resTmp.(*gqlmodel.PropertyField) fc.Result = res - return ec.marshalOPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyField(ctx, field.Selections, res) + return ec.marshalOPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroup_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyGroup_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21804,7 +21471,7 @@ func (ec *executionContext) _PropertyGroup_id(ctx context.Context, field graphql return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21839,7 +21506,7 @@ func (ec *executionContext) _PropertyGroup_schemaId(ctx context.Context, field g return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21874,7 +21541,7 @@ func (ec *executionContext) _PropertyGroup_schemaGroupId(ctx context.Context, fi return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21904,12 +21571,12 @@ func (ec *executionContext) _PropertyGroup_fields(ctx context.Context, field gra } return graphql.Null } - res := resTmp.([]*graphql1.PropertyField) + res := resTmp.([]*gqlmodel.PropertyField) fc.Result = res - return ec.marshalNPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldแš„(ctx, field.Selections, res) + return ec.marshalNPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldแš„(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroup_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyGroup_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21936,12 +21603,12 @@ func (ec *executionContext) _PropertyGroup_schema(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchema) + res := resTmp.(*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroup_schemaGroup(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyGroup_schemaGroup(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -21968,12 +21635,12 @@ func (ec *executionContext) _PropertyGroup_schemaGroup(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchemaGroup) + res := resTmp.(*gqlmodel.PropertySchemaGroup) fc.Result = res - return ec.marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroup(ctx, field.Selections, res) + return ec.marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroupList_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroupList) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyGroupList_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22008,7 +21675,7 @@ func (ec *executionContext) _PropertyGroupList_id(ctx context.Context, field gra return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroupList_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroupList) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyGroupList_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22043,7 +21710,7 @@ func (ec *executionContext) _PropertyGroupList_schemaId(ctx context.Context, fie return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroupList_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroupList) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyGroupList_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22078,7 +21745,7 @@ func (ec *executionContext) _PropertyGroupList_schemaGroupId(ctx context.Context return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroupList_groups(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroupList) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyGroupList_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22108,12 +21775,12 @@ func (ec *executionContext) _PropertyGroupList_groups(ctx context.Context, field } return graphql.Null } - res := resTmp.([]*graphql1.PropertyGroup) + res := resTmp.([]*gqlmodel.PropertyGroup) fc.Result = res - return ec.marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroupแš„(ctx, field.Selections, res) + return ec.marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroupแš„(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroupList_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroupList) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyGroupList_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22140,12 +21807,12 @@ func (ec *executionContext) _PropertyGroupList_schema(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchema) + res := resTmp.(*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroupList_schemaGroup(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyGroupList) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyGroupList_schemaGroup(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22172,12 +21839,12 @@ func (ec *executionContext) _PropertyGroupList_schemaGroup(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchemaGroup) + res := resTmp.(*gqlmodel.PropertySchemaGroup) fc.Result = res - return ec.marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroup(ctx, field.Selections, res) + return ec.marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyItemPayload_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyItemPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyItemPayload_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyItemPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22207,12 +21874,12 @@ func (ec *executionContext) _PropertyItemPayload_property(ctx context.Context, f } return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyItemPayload_propertyItem(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyItemPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyItemPayload_propertyItem(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyItemPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22239,12 +21906,12 @@ func (ec *executionContext) _PropertyItemPayload_propertyItem(ctx context.Contex if resTmp == nil { return graphql.Null } - res := resTmp.(graphql1.PropertyItem) + res := resTmp.(gqlmodel.PropertyItem) fc.Result = res - return ec.marshalOPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItem(ctx, field.Selections, res) + return ec.marshalOPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyLinkableFields_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyLinkableFields) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyLinkableFields_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22279,7 +21946,7 @@ func (ec *executionContext) _PropertyLinkableFields_schemaId(ctx context.Context return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyLinkableFields_latlng(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyLinkableFields) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyLinkableFields_latlng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22311,7 +21978,7 @@ func (ec *executionContext) _PropertyLinkableFields_latlng(ctx context.Context, return ec.marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyLinkableFields_url(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyLinkableFields) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyLinkableFields_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22343,7 +22010,7 @@ func (ec *executionContext) _PropertyLinkableFields_url(ctx context.Context, fie return ec.marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyLinkableFields_latlngField(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyLinkableFields) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyLinkableFields_latlngField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22370,12 +22037,12 @@ func (ec *executionContext) _PropertyLinkableFields_latlngField(ctx context.Cont if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchemaField) + res := resTmp.(*gqlmodel.PropertySchemaField) fc.Result = res - return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx, field.Selections, res) + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyLinkableFields_urlField(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyLinkableFields) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyLinkableFields_urlField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22402,12 +22069,12 @@ func (ec *executionContext) _PropertyLinkableFields_urlField(ctx context.Context if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchemaField) + res := resTmp.(*gqlmodel.PropertySchemaField) fc.Result = res - return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx, field.Selections, res) + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyLinkableFields_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertyLinkableFields) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertyLinkableFields_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22434,12 +22101,12 @@ func (ec *executionContext) _PropertyLinkableFields_schema(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchema) + res := resTmp.(*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchema_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchema) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchema_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22474,7 +22141,7 @@ func (ec *executionContext) _PropertySchema_id(ctx context.Context, field graphq return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchema_groups(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchema) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchema_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22504,12 +22171,12 @@ func (ec *executionContext) _PropertySchema_groups(ctx context.Context, field gr } return graphql.Null } - res := resTmp.([]*graphql1.PropertySchemaGroup) + res := resTmp.([]*gqlmodel.PropertySchemaGroup) fc.Result = res - return ec.marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroupแš„(ctx, field.Selections, res) + return ec.marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroupแš„(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchema_linkableFields(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchema) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchema_linkableFields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22539,12 +22206,12 @@ func (ec *executionContext) _PropertySchema_linkableFields(ctx context.Context, } return graphql.Null } - res := resTmp.(*graphql1.PropertyLinkableFields) + res := resTmp.(*gqlmodel.PropertyLinkableFields) fc.Result = res - return ec.marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyLinkableFields(ctx, field.Selections, res) + return ec.marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyLinkableFields(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_fieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22579,7 +22246,7 @@ func (ec *executionContext) _PropertySchemaField_fieldId(ctx context.Context, fi return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_type(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22609,12 +22276,12 @@ func (ec *executionContext) _PropertySchemaField_type(ctx context.Context, field } return graphql.Null } - res := resTmp.(graphql1.ValueType) + res := resTmp.(gqlmodel.ValueType) fc.Result = res - return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, field.Selections, res) + return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_title(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_title(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22649,7 +22316,7 @@ func (ec *executionContext) _PropertySchemaField_title(ctx context.Context, fiel return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22684,7 +22351,7 @@ func (ec *executionContext) _PropertySchemaField_name(ctx context.Context, field return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_description(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22719,7 +22386,7 @@ func (ec *executionContext) _PropertySchemaField_description(ctx context.Context return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_prefix(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_prefix(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22751,7 +22418,7 @@ func (ec *executionContext) _PropertySchemaField_prefix(ctx context.Context, fie return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_suffix(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_suffix(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22783,7 +22450,7 @@ func (ec *executionContext) _PropertySchemaField_suffix(ctx context.Context, fie return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_defaultValue(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_defaultValue(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22815,7 +22482,7 @@ func (ec *executionContext) _PropertySchemaField_defaultValue(ctx context.Contex return ec.marshalOAny2interface(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_ui(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_ui(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22842,12 +22509,12 @@ func (ec *executionContext) _PropertySchemaField_ui(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchemaFieldUI) + res := resTmp.(*gqlmodel.PropertySchemaFieldUI) fc.Result = res - return ec.marshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldUI(ctx, field.Selections, res) + return ec.marshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldUI(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_min(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_min(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22879,7 +22546,7 @@ func (ec *executionContext) _PropertySchemaField_min(ctx context.Context, field return ec.marshalOFloat2แš–float64(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_max(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_max(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22911,7 +22578,7 @@ func (ec *executionContext) _PropertySchemaField_max(ctx context.Context, field return ec.marshalOFloat2แš–float64(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_choices(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_choices(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22938,12 +22605,12 @@ func (ec *executionContext) _PropertySchemaField_choices(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.([]*graphql1.PropertySchemaFieldChoice) + res := resTmp.([]*gqlmodel.PropertySchemaFieldChoice) fc.Result = res - return ec.marshalOPropertySchemaFieldChoice2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldChoiceแš„(ctx, field.Selections, res) + return ec.marshalOPropertySchemaFieldChoice2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldChoiceแš„(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_isAvailableIf(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_isAvailableIf(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -22970,12 +22637,12 @@ func (ec *executionContext) _PropertySchemaField_isAvailableIf(ctx context.Conte if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyCondition) + res := resTmp.(*gqlmodel.PropertyCondition) fc.Result = res - return ec.marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyCondition(ctx, field.Selections, res) + return ec.marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyCondition(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23007,7 +22674,7 @@ func (ec *executionContext) _PropertySchemaField_allTranslatedTitle(ctx context. return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23039,7 +22706,7 @@ func (ec *executionContext) _PropertySchemaField_allTranslatedName(ctx context.C return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23071,7 +22738,7 @@ func (ec *executionContext) _PropertySchemaField_allTranslatedDescription(ctx co return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23113,7 +22780,7 @@ func (ec *executionContext) _PropertySchemaField_translatedTitle(ctx context.Con return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_translatedName(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_translatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23155,7 +22822,7 @@ func (ec *executionContext) _PropertySchemaField_translatedName(ctx context.Cont return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaField) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaField_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23197,7 +22864,7 @@ func (ec *executionContext) _PropertySchemaField_translatedDescription(ctx conte return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_key(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaFieldChoice_key(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23232,7 +22899,7 @@ func (ec *executionContext) _PropertySchemaFieldChoice_key(ctx context.Context, return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_title(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaFieldChoice_title(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23267,7 +22934,7 @@ func (ec *executionContext) _PropertySchemaFieldChoice_title(ctx context.Context return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_label(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaFieldChoice_label(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23302,7 +22969,7 @@ func (ec *executionContext) _PropertySchemaFieldChoice_label(ctx context.Context return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_icon(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaFieldChoice_icon(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23334,7 +23001,7 @@ func (ec *executionContext) _PropertySchemaFieldChoice_icon(ctx context.Context, return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23366,7 +23033,7 @@ func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedTitle(ctx co return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedLabel(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedLabel(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23398,7 +23065,7 @@ func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedLabel(ctx co return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaFieldChoice_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23440,7 +23107,7 @@ func (ec *executionContext) _PropertySchemaFieldChoice_translatedTitle(ctx conte return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_translatedLabel(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaFieldChoice) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaFieldChoice_translatedLabel(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23482,7 +23149,7 @@ func (ec *executionContext) _PropertySchemaFieldChoice_translatedLabel(ctx conte return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23517,7 +23184,7 @@ func (ec *executionContext) _PropertySchemaGroup_schemaGroupId(ctx context.Conte return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23552,7 +23219,7 @@ func (ec *executionContext) _PropertySchemaGroup_schemaId(ctx context.Context, f return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_fields(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23582,12 +23249,12 @@ func (ec *executionContext) _PropertySchemaGroup_fields(ctx context.Context, fie } return graphql.Null } - res := resTmp.([]*graphql1.PropertySchemaField) + res := resTmp.([]*gqlmodel.PropertySchemaField) fc.Result = res - return ec.marshalNPropertySchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldแš„(ctx, field.Selections, res) + return ec.marshalNPropertySchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldแš„(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_isList(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaGroup_isList(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23622,7 +23289,7 @@ func (ec *executionContext) _PropertySchemaGroup_isList(ctx context.Context, fie return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_isAvailableIf(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaGroup_isAvailableIf(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23649,12 +23316,12 @@ func (ec *executionContext) _PropertySchemaGroup_isAvailableIf(ctx context.Conte if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertyCondition) + res := resTmp.(*gqlmodel.PropertyCondition) fc.Result = res - return ec.marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyCondition(ctx, field.Selections, res) + return ec.marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyCondition(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_title(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaGroup_title(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23686,7 +23353,7 @@ func (ec *executionContext) _PropertySchemaGroup_title(ctx context.Context, fiel return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaGroup_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23718,7 +23385,7 @@ func (ec *executionContext) _PropertySchemaGroup_allTranslatedTitle(ctx context. return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaGroup_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23750,7 +23417,7 @@ func (ec *executionContext) _PropertySchemaGroup_name(ctx context.Context, field return ec.marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaGroup_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23782,7 +23449,7 @@ func (ec *executionContext) _PropertySchemaGroup_representativeFieldId(ctx conte return ec.marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_representativeField(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaGroup_representativeField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23809,12 +23476,12 @@ func (ec *executionContext) _PropertySchemaGroup_representativeField(ctx context if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchemaField) + res := resTmp.(*gqlmodel.PropertySchemaField) fc.Result = res - return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx, field.Selections, res) + return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_schema(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaGroup_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23841,12 +23508,12 @@ func (ec *executionContext) _PropertySchemaGroup_schema(ctx context.Context, fie if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchema) + res := resTmp.(*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *graphql1.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _PropertySchemaGroup_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -23915,9 +23582,9 @@ func (ec *executionContext) _Query_me(ctx context.Context, field graphql.Collect if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.User) + res := resTmp.(*gqlmodel.User) fc.Result = res - return ec.marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx, field.Selections, res) + return ec.marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) } func (ec *executionContext) _Query_node(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -23945,7 +23612,7 @@ func (ec *executionContext) _Query_node(ctx context.Context, field graphql.Colle fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Node(rctx, args["id"].(id.ID), args["type"].(graphql1.NodeType)) + return ec.resolvers.Query().Node(rctx, args["id"].(id.ID), args["type"].(gqlmodel.NodeType)) }) if err != nil { ec.Error(ctx, err) @@ -23954,9 +23621,9 @@ func (ec *executionContext) _Query_node(ctx context.Context, field graphql.Colle if resTmp == nil { return graphql.Null } - res := resTmp.(graphql1.Node) + res := resTmp.(gqlmodel.Node) fc.Result = res - return ec.marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNode(ctx, field.Selections, res) + return ec.marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx, field.Selections, res) } func (ec *executionContext) _Query_nodes(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -23984,7 +23651,7 @@ func (ec *executionContext) _Query_nodes(ctx context.Context, field graphql.Coll fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Nodes(rctx, args["id"].([]*id.ID), args["type"].(graphql1.NodeType)) + return ec.resolvers.Query().Nodes(rctx, args["id"].([]*id.ID), args["type"].(gqlmodel.NodeType)) }) if err != nil { ec.Error(ctx, err) @@ -23996,9 +23663,9 @@ func (ec *executionContext) _Query_nodes(ctx context.Context, field graphql.Coll } return graphql.Null } - res := resTmp.([]graphql1.Node) + res := resTmp.([]gqlmodel.Node) fc.Result = res - return ec.marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNode(ctx, field.Selections, res) + return ec.marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx, field.Selections, res) } func (ec *executionContext) _Query_propertySchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24035,9 +23702,9 @@ func (ec *executionContext) _Query_propertySchema(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PropertySchema) + res := resTmp.(*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, field.Selections, res) + return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } func (ec *executionContext) _Query_propertySchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24077,9 +23744,9 @@ func (ec *executionContext) _Query_propertySchemas(ctx context.Context, field gr } return graphql.Null } - res := resTmp.([]*graphql1.PropertySchema) + res := resTmp.([]*gqlmodel.PropertySchema) fc.Result = res - return ec.marshalNPropertySchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaแš„(ctx, field.Selections, res) + return ec.marshalNPropertySchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaแš„(ctx, field.Selections, res) } func (ec *executionContext) _Query_plugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24116,9 +23783,9 @@ func (ec *executionContext) _Query_plugin(ctx context.Context, field graphql.Col if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Plugin) + res := resTmp.(*gqlmodel.Plugin) fc.Result = res - return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } func (ec *executionContext) _Query_plugins(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24158,9 +23825,9 @@ func (ec *executionContext) _Query_plugins(ctx context.Context, field graphql.Co } return graphql.Null } - res := resTmp.([]*graphql1.Plugin) + res := resTmp.([]*gqlmodel.Plugin) fc.Result = res - return ec.marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginแš„(ctx, field.Selections, res) + return ec.marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginแš„(ctx, field.Selections, res) } func (ec *executionContext) _Query_layer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24197,9 +23864,9 @@ func (ec *executionContext) _Query_layer(ctx context.Context, field graphql.Coll if resTmp == nil { return graphql.Null } - res := resTmp.(graphql1.Layer) + res := resTmp.(gqlmodel.Layer) fc.Result = res - return ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) + return ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } func (ec *executionContext) _Query_scene(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24236,9 +23903,9 @@ func (ec *executionContext) _Query_scene(ctx context.Context, field graphql.Coll if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } func (ec *executionContext) _Query_assets(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24278,9 +23945,9 @@ func (ec *executionContext) _Query_assets(ctx context.Context, field graphql.Col } return graphql.Null } - res := resTmp.(*graphql1.AssetConnection) + res := resTmp.(*gqlmodel.AssetConnection) fc.Result = res - return ec.marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetConnection(ctx, field.Selections, res) + return ec.marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetConnection(ctx, field.Selections, res) } func (ec *executionContext) _Query_projects(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24320,9 +23987,9 @@ func (ec *executionContext) _Query_projects(ctx context.Context, field graphql.C } return graphql.Null } - res := resTmp.(*graphql1.ProjectConnection) + res := resTmp.(*gqlmodel.ProjectConnection) fc.Result = res - return ec.marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectConnection(ctx, field.Selections, res) + return ec.marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx, field.Selections, res) } func (ec *executionContext) _Query_datasetSchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24362,9 +24029,9 @@ func (ec *executionContext) _Query_datasetSchemas(ctx context.Context, field gra } return graphql.Null } - res := resTmp.(*graphql1.DatasetSchemaConnection) + res := resTmp.(*gqlmodel.DatasetSchemaConnection) fc.Result = res - return ec.marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaConnection(ctx, field.Selections, res) + return ec.marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaConnection(ctx, field.Selections, res) } func (ec *executionContext) _Query_datasets(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24404,9 +24071,9 @@ func (ec *executionContext) _Query_datasets(ctx context.Context, field graphql.C } return graphql.Null } - res := resTmp.(*graphql1.DatasetConnection) + res := resTmp.(*gqlmodel.DatasetConnection) fc.Result = res - return ec.marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetConnection(ctx, field.Selections, res) + return ec.marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetConnection(ctx, field.Selections, res) } func (ec *executionContext) _Query_sceneLock(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24443,9 +24110,9 @@ func (ec *executionContext) _Query_sceneLock(ctx context.Context, field graphql. if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.SceneLockMode) + res := resTmp.(*gqlmodel.SceneLockMode) fc.Result = res - return ec.marshalOSceneLockMode2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneLockMode(ctx, field.Selections, res) + return ec.marshalOSceneLockMode2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneLockMode(ctx, field.Selections, res) } func (ec *executionContext) _Query_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24485,9 +24152,9 @@ func (ec *executionContext) _Query_dynamicDatasetSchemas(ctx context.Context, fi } return graphql.Null } - res := resTmp.([]*graphql1.DatasetSchema) + res := resTmp.([]*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaแš„(ctx, field.Selections, res) + return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaแš„(ctx, field.Selections, res) } func (ec *executionContext) _Query_searchUser(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24524,9 +24191,9 @@ func (ec *executionContext) _Query_searchUser(ctx context.Context, field graphql if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.SearchedUser) + res := resTmp.(*gqlmodel.SearchedUser) fc.Result = res - return ec.marshalOSearchedUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSearchedUser(ctx, field.Selections, res) + return ec.marshalOSearchedUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSearchedUser(ctx, field.Selections, res) } func (ec *executionContext) _Query_checkProjectAlias(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24566,9 +24233,9 @@ func (ec *executionContext) _Query_checkProjectAlias(ctx context.Context, field } return graphql.Null } - res := resTmp.(*graphql1.CheckProjectAliasPayload) + res := resTmp.(*gqlmodel.CheckProjectAliasPayload) fc.Result = res - return ec.marshalNCheckProjectAliasPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCheckProjectAliasPayload(ctx, field.Selections, res) + return ec.marshalNCheckProjectAliasPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCheckProjectAliasPayload(ctx, field.Selections, res) } func (ec *executionContext) _Query_installablePlugins(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24601,9 +24268,9 @@ func (ec *executionContext) _Query_installablePlugins(ctx context.Context, field } return graphql.Null } - res := resTmp.([]*graphql1.PluginMetadata) + res := resTmp.([]*gqlmodel.PluginMetadata) fc.Result = res - return ec.marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginMetadataแš„(ctx, field.Selections, res) + return ec.marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadataแš„(ctx, field.Selections, res) } func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24677,7 +24344,7 @@ func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.C return ec.marshalO__Schema2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšSchema(ctx, field.Selections, res) } -func (ec *executionContext) _Rect_west(ctx context.Context, field graphql.CollectedField, obj *graphql1.Rect) (ret graphql.Marshaler) { +func (ec *executionContext) _Rect_west(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -24712,7 +24379,7 @@ func (ec *executionContext) _Rect_west(ctx context.Context, field graphql.Collec return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Rect_south(ctx context.Context, field graphql.CollectedField, obj *graphql1.Rect) (ret graphql.Marshaler) { +func (ec *executionContext) _Rect_south(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -24747,7 +24414,7 @@ func (ec *executionContext) _Rect_south(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Rect_east(ctx context.Context, field graphql.CollectedField, obj *graphql1.Rect) (ret graphql.Marshaler) { +func (ec *executionContext) _Rect_east(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -24782,7 +24449,7 @@ func (ec *executionContext) _Rect_east(ctx context.Context, field graphql.Collec return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Rect_north(ctx context.Context, field graphql.CollectedField, obj *graphql1.Rect) (ret graphql.Marshaler) { +func (ec *executionContext) _Rect_north(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -24817,7 +24484,7 @@ func (ec *executionContext) _Rect_north(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveAssetPayload_assetId(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveAssetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _RemoveAssetPayload_assetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveAssetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -24852,7 +24519,7 @@ func (ec *executionContext) _RemoveAssetPayload_assetId(ctx context.Context, fie return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveDatasetSchemaPayload_schemaId(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveDatasetSchemaPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _RemoveDatasetSchemaPayload_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveDatasetSchemaPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -24887,7 +24554,7 @@ func (ec *executionContext) _RemoveDatasetSchemaPayload_schemaId(ctx context.Con return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _RemoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -24922,7 +24589,7 @@ func (ec *executionContext) _RemoveInfoboxFieldPayload_infoboxFieldId(ctx contex return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _RemoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -24952,12 +24619,12 @@ func (ec *executionContext) _RemoveInfoboxFieldPayload_layer(ctx context.Context } return graphql.Null } - res := resTmp.(graphql1.Layer) + res := resTmp.(gqlmodel.Layer) fc.Result = res - return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveInfoboxPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _RemoveInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveInfoboxPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -24987,12 +24654,12 @@ func (ec *executionContext) _RemoveInfoboxPayload_layer(ctx context.Context, fie } return graphql.Null } - res := resTmp.(graphql1.Layer) + res := resTmp.(gqlmodel.Layer) fc.Result = res - return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveLayerPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _RemoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveLayerPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25027,7 +24694,7 @@ func (ec *executionContext) _RemoveLayerPayload_layerId(ctx context.Context, fie return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveLayerPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _RemoveLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveLayerPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25057,12 +24724,12 @@ func (ec *executionContext) _RemoveLayerPayload_parentLayer(ctx context.Context, } return graphql.Null } - res := resTmp.(*graphql1.LayerGroup) + res := resTmp.(*gqlmodel.LayerGroup) fc.Result = res - return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveMemberFromTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveMemberFromTeamPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _RemoveMemberFromTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveMemberFromTeamPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25092,12 +24759,12 @@ func (ec *executionContext) _RemoveMemberFromTeamPayload_team(ctx context.Contex } return graphql.Null } - res := resTmp.(*graphql1.Team) + res := resTmp.(*gqlmodel.Team) fc.Result = res - return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveWidgetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _RemoveWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveWidgetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25127,12 +24794,12 @@ func (ec *executionContext) _RemoveWidgetPayload_scene(ctx context.Context, fiel } return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveWidgetPayload_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveWidgetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _RemoveWidgetPayload_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveWidgetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25167,7 +24834,7 @@ func (ec *executionContext) _RemoveWidgetPayload_pluginId(ctx context.Context, f return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveWidgetPayload_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.RemoveWidgetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _RemoveWidgetPayload_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveWidgetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25202,7 +24869,7 @@ func (ec *executionContext) _RemoveWidgetPayload_extensionId(ctx context.Context return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25237,7 +24904,7 @@ func (ec *executionContext) _Scene_id(ctx context.Context, field graphql.Collect return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_projectId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_projectId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25272,7 +24939,7 @@ func (ec *executionContext) _Scene_projectId(ctx context.Context, field graphql. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_teamId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25307,7 +24974,7 @@ func (ec *executionContext) _Scene_teamId(ctx context.Context, field graphql.Col return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25342,7 +25009,7 @@ func (ec *executionContext) _Scene_propertyId(ctx context.Context, field graphql return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_createdAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25377,7 +25044,7 @@ func (ec *executionContext) _Scene_createdAt(ctx context.Context, field graphql. return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_updatedAt(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_updatedAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25412,7 +25079,7 @@ func (ec *executionContext) _Scene_updatedAt(ctx context.Context, field graphql. return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_rootLayerId(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_rootLayerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25447,7 +25114,7 @@ func (ec *executionContext) _Scene_rootLayerId(ctx context.Context, field graphq return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_widgets(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_widgets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25477,12 +25144,12 @@ func (ec *executionContext) _Scene_widgets(ctx context.Context, field graphql.Co } return graphql.Null } - res := resTmp.([]*graphql1.SceneWidget) + res := resTmp.([]*gqlmodel.SceneWidget) fc.Result = res - return ec.marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidgetแš„(ctx, field.Selections, res) + return ec.marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidgetแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_plugins(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_plugins(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25512,12 +25179,12 @@ func (ec *executionContext) _Scene_plugins(ctx context.Context, field graphql.Co } return graphql.Null } - res := resTmp.([]*graphql1.ScenePlugin) + res := resTmp.([]*gqlmodel.ScenePlugin) fc.Result = res - return ec.marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePluginแš„(ctx, field.Selections, res) + return ec.marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePluginแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25547,12 +25214,12 @@ func (ec *executionContext) _Scene_dynamicDatasetSchemas(ctx context.Context, fi } return graphql.Null } - res := resTmp.([]*graphql1.DatasetSchema) + res := resTmp.([]*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaแš„(ctx, field.Selections, res) + return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_project(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_project(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25579,12 +25246,12 @@ func (ec *executionContext) _Scene_project(ctx context.Context, field graphql.Co if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Project) + res := resTmp.(*gqlmodel.Project) fc.Result = res - return ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx, field.Selections, res) + return ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25611,12 +25278,12 @@ func (ec *executionContext) _Scene_team(ctx context.Context, field graphql.Colle if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Team) + res := resTmp.(*gqlmodel.Team) fc.Result = res - return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) + return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25643,12 +25310,12 @@ func (ec *executionContext) _Scene_property(ctx context.Context, field graphql.C if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_rootLayer(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_rootLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25675,12 +25342,12 @@ func (ec *executionContext) _Scene_rootLayer(ctx context.Context, field graphql. if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.LayerGroup) + res := resTmp.(*gqlmodel.LayerGroup) fc.Result = res - return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx, field.Selections, res) + return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_lockMode(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_lockMode(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25710,12 +25377,12 @@ func (ec *executionContext) _Scene_lockMode(ctx context.Context, field graphql.C } return graphql.Null } - res := resTmp.(graphql1.SceneLockMode) + res := resTmp.(gqlmodel.SceneLockMode) fc.Result = res - return ec.marshalNSceneLockMode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneLockMode(ctx, field.Selections, res) + return ec.marshalNSceneLockMode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneLockMode(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_datasetSchemas(ctx context.Context, field graphql.CollectedField, obj *graphql1.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) _Scene_datasetSchemas(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25752,12 +25419,12 @@ func (ec *executionContext) _Scene_datasetSchemas(ctx context.Context, field gra } return graphql.Null } - res := resTmp.(*graphql1.DatasetSchemaConnection) + res := resTmp.(*gqlmodel.DatasetSchemaConnection) fc.Result = res - return ec.marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaConnection(ctx, field.Selections, res) + return ec.marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaConnection(ctx, field.Selections, res) } -func (ec *executionContext) _ScenePlugin_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.ScenePlugin) (ret graphql.Marshaler) { +func (ec *executionContext) _ScenePlugin_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25792,7 +25459,7 @@ func (ec *executionContext) _ScenePlugin_pluginId(ctx context.Context, field gra return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) } -func (ec *executionContext) _ScenePlugin_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.ScenePlugin) (ret graphql.Marshaler) { +func (ec *executionContext) _ScenePlugin_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25824,7 +25491,7 @@ func (ec *executionContext) _ScenePlugin_propertyId(ctx context.Context, field g return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _ScenePlugin_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.ScenePlugin) (ret graphql.Marshaler) { +func (ec *executionContext) _ScenePlugin_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25851,12 +25518,12 @@ func (ec *executionContext) _ScenePlugin_plugin(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Plugin) + res := resTmp.(*gqlmodel.Plugin) fc.Result = res - return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } -func (ec *executionContext) _ScenePlugin_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.ScenePlugin) (ret graphql.Marshaler) { +func (ec *executionContext) _ScenePlugin_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25883,12 +25550,12 @@ func (ec *executionContext) _ScenePlugin_property(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { +func (ec *executionContext) _SceneWidget_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25923,7 +25590,7 @@ func (ec *executionContext) _SceneWidget_id(ctx context.Context, field graphql.C return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { +func (ec *executionContext) _SceneWidget_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25958,7 +25625,7 @@ func (ec *executionContext) _SceneWidget_pluginId(ctx context.Context, field gra return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_extensionId(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { +func (ec *executionContext) _SceneWidget_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -25993,7 +25660,7 @@ func (ec *executionContext) _SceneWidget_extensionId(ctx context.Context, field return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_propertyId(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { +func (ec *executionContext) _SceneWidget_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26028,7 +25695,7 @@ func (ec *executionContext) _SceneWidget_propertyId(ctx context.Context, field g return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_enabled(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { +func (ec *executionContext) _SceneWidget_enabled(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26063,7 +25730,7 @@ func (ec *executionContext) _SceneWidget_enabled(ctx context.Context, field grap return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { +func (ec *executionContext) _SceneWidget_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26090,12 +25757,12 @@ func (ec *executionContext) _SceneWidget_plugin(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Plugin) + res := resTmp.(*gqlmodel.Plugin) fc.Result = res - return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_extension(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { +func (ec *executionContext) _SceneWidget_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26122,12 +25789,12 @@ func (ec *executionContext) _SceneWidget_extension(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.PluginExtension) + res := resTmp.(*gqlmodel.PluginExtension) fc.Result = res - return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, field.Selections, res) + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_property(ctx context.Context, field graphql.CollectedField, obj *graphql1.SceneWidget) (ret graphql.Marshaler) { +func (ec *executionContext) _SceneWidget_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26154,12 +25821,12 @@ func (ec *executionContext) _SceneWidget_property(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.Property) + res := resTmp.(*gqlmodel.Property) fc.Result = res - return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx, field.Selections, res) + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _SearchedUser_userId(ctx context.Context, field graphql.CollectedField, obj *graphql1.SearchedUser) (ret graphql.Marshaler) { +func (ec *executionContext) _SearchedUser_userId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SearchedUser) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26194,7 +25861,7 @@ func (ec *executionContext) _SearchedUser_userId(ctx context.Context, field grap return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _SearchedUser_userName(ctx context.Context, field graphql.CollectedField, obj *graphql1.SearchedUser) (ret graphql.Marshaler) { +func (ec *executionContext) _SearchedUser_userName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SearchedUser) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26229,7 +25896,7 @@ func (ec *executionContext) _SearchedUser_userName(ctx context.Context, field gr return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _SearchedUser_userEmail(ctx context.Context, field graphql.CollectedField, obj *graphql1.SearchedUser) (ret graphql.Marshaler) { +func (ec *executionContext) _SearchedUser_userEmail(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SearchedUser) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26264,7 +25931,7 @@ func (ec *executionContext) _SearchedUser_userEmail(ctx context.Context, field g return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _SignupPayload_user(ctx context.Context, field graphql.CollectedField, obj *graphql1.SignupPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _SignupPayload_user(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SignupPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26294,12 +25961,12 @@ func (ec *executionContext) _SignupPayload_user(ctx context.Context, field graph } return graphql.Null } - res := resTmp.(*graphql1.User) + res := resTmp.(*gqlmodel.User) fc.Result = res - return ec.marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx, field.Selections, res) + return ec.marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) } -func (ec *executionContext) _SignupPayload_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.SignupPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _SignupPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SignupPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26329,12 +25996,12 @@ func (ec *executionContext) _SignupPayload_team(ctx context.Context, field graph } return graphql.Null } - res := resTmp.(*graphql1.Team) + res := resTmp.(*gqlmodel.Team) fc.Result = res - return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } -func (ec *executionContext) _SyncDatasetPayload_sceneId(ctx context.Context, field graphql.CollectedField, obj *graphql1.SyncDatasetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _SyncDatasetPayload_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26369,7 +26036,7 @@ func (ec *executionContext) _SyncDatasetPayload_sceneId(ctx context.Context, fie return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _SyncDatasetPayload_url(ctx context.Context, field graphql.CollectedField, obj *graphql1.SyncDatasetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _SyncDatasetPayload_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26404,7 +26071,7 @@ func (ec *executionContext) _SyncDatasetPayload_url(ctx context.Context, field g return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _SyncDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.SyncDatasetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _SyncDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26434,12 +26101,12 @@ func (ec *executionContext) _SyncDatasetPayload_datasetSchema(ctx context.Contex } return graphql.Null } - res := resTmp.([]*graphql1.DatasetSchema) + res := resTmp.([]*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaแš„(ctx, field.Selections, res) + return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaแš„(ctx, field.Selections, res) } -func (ec *executionContext) _SyncDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField, obj *graphql1.SyncDatasetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _SyncDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26469,12 +26136,12 @@ func (ec *executionContext) _SyncDatasetPayload_dataset(ctx context.Context, fie } return graphql.Null } - res := resTmp.([]*graphql1.Dataset) + res := resTmp.([]*gqlmodel.Dataset) fc.Result = res - return ec.marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetแš„(ctx, field.Selections, res) + return ec.marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Team_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.Team) (ret graphql.Marshaler) { +func (ec *executionContext) _Team_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26509,7 +26176,7 @@ func (ec *executionContext) _Team_id(ctx context.Context, field graphql.Collecte return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Team_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.Team) (ret graphql.Marshaler) { +func (ec *executionContext) _Team_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26544,7 +26211,7 @@ func (ec *executionContext) _Team_name(ctx context.Context, field graphql.Collec return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Team_members(ctx context.Context, field graphql.CollectedField, obj *graphql1.Team) (ret graphql.Marshaler) { +func (ec *executionContext) _Team_members(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26574,12 +26241,12 @@ func (ec *executionContext) _Team_members(ctx context.Context, field graphql.Col } return graphql.Null } - res := resTmp.([]*graphql1.TeamMember) + res := resTmp.([]*gqlmodel.TeamMember) fc.Result = res - return ec.marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeamMemberแš„(ctx, field.Selections, res) + return ec.marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamMemberแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Team_personal(ctx context.Context, field graphql.CollectedField, obj *graphql1.Team) (ret graphql.Marshaler) { +func (ec *executionContext) _Team_personal(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26614,7 +26281,7 @@ func (ec *executionContext) _Team_personal(ctx context.Context, field graphql.Co return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _Team_assets(ctx context.Context, field graphql.CollectedField, obj *graphql1.Team) (ret graphql.Marshaler) { +func (ec *executionContext) _Team_assets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26651,12 +26318,12 @@ func (ec *executionContext) _Team_assets(ctx context.Context, field graphql.Coll } return graphql.Null } - res := resTmp.(*graphql1.AssetConnection) + res := resTmp.(*gqlmodel.AssetConnection) fc.Result = res - return ec.marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetConnection(ctx, field.Selections, res) + return ec.marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetConnection(ctx, field.Selections, res) } -func (ec *executionContext) _Team_projects(ctx context.Context, field graphql.CollectedField, obj *graphql1.Team) (ret graphql.Marshaler) { +func (ec *executionContext) _Team_projects(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26693,12 +26360,12 @@ func (ec *executionContext) _Team_projects(ctx context.Context, field graphql.Co } return graphql.Null } - res := resTmp.(*graphql1.ProjectConnection) + res := resTmp.(*gqlmodel.ProjectConnection) fc.Result = res - return ec.marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectConnection(ctx, field.Selections, res) + return ec.marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx, field.Selections, res) } -func (ec *executionContext) _TeamMember_userId(ctx context.Context, field graphql.CollectedField, obj *graphql1.TeamMember) (ret graphql.Marshaler) { +func (ec *executionContext) _TeamMember_userId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TeamMember) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26733,7 +26400,7 @@ func (ec *executionContext) _TeamMember_userId(ctx context.Context, field graphq return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _TeamMember_role(ctx context.Context, field graphql.CollectedField, obj *graphql1.TeamMember) (ret graphql.Marshaler) { +func (ec *executionContext) _TeamMember_role(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TeamMember) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26763,12 +26430,12 @@ func (ec *executionContext) _TeamMember_role(ctx context.Context, field graphql. } return graphql.Null } - res := resTmp.(graphql1.Role) + res := resTmp.(gqlmodel.Role) fc.Result = res - return ec.marshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRole(ctx, field.Selections, res) + return ec.marshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRole(ctx, field.Selections, res) } -func (ec *executionContext) _TeamMember_user(ctx context.Context, field graphql.CollectedField, obj *graphql1.TeamMember) (ret graphql.Marshaler) { +func (ec *executionContext) _TeamMember_user(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TeamMember) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26795,12 +26462,12 @@ func (ec *executionContext) _TeamMember_user(ctx context.Context, field graphql. if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.User) + res := resTmp.(*gqlmodel.User) fc.Result = res - return ec.marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx, field.Selections, res) + return ec.marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_fontFamily(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { +func (ec *executionContext) _Typography_fontFamily(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26832,7 +26499,7 @@ func (ec *executionContext) _Typography_fontFamily(ctx context.Context, field gr return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_fontWeight(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { +func (ec *executionContext) _Typography_fontWeight(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26864,7 +26531,7 @@ func (ec *executionContext) _Typography_fontWeight(ctx context.Context, field gr return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_fontSize(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { +func (ec *executionContext) _Typography_fontSize(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26896,7 +26563,7 @@ func (ec *executionContext) _Typography_fontSize(ctx context.Context, field grap return ec.marshalOInt2แš–int(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_color(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { +func (ec *executionContext) _Typography_color(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26928,7 +26595,7 @@ func (ec *executionContext) _Typography_color(ctx context.Context, field graphql return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_textAlign(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { +func (ec *executionContext) _Typography_textAlign(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26955,12 +26622,12 @@ func (ec *executionContext) _Typography_textAlign(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.TextAlign) + res := resTmp.(*gqlmodel.TextAlign) fc.Result = res - return ec.marshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTextAlign(ctx, field.Selections, res) + return ec.marshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTextAlign(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_bold(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { +func (ec *executionContext) _Typography_bold(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -26992,7 +26659,7 @@ func (ec *executionContext) _Typography_bold(ctx context.Context, field graphql. return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_italic(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { +func (ec *executionContext) _Typography_italic(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27024,7 +26691,7 @@ func (ec *executionContext) _Typography_italic(ctx context.Context, field graphq return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_underline(ctx context.Context, field graphql.CollectedField, obj *graphql1.Typography) (ret graphql.Marshaler) { +func (ec *executionContext) _Typography_underline(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27056,7 +26723,7 @@ func (ec *executionContext) _Typography_underline(ctx context.Context, field gra return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) } -func (ec *executionContext) _UninstallPluginPayload_pluginId(ctx context.Context, field graphql.CollectedField, obj *graphql1.UninstallPluginPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UninstallPluginPayload_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UninstallPluginPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27091,7 +26758,7 @@ func (ec *executionContext) _UninstallPluginPayload_pluginId(ctx context.Context return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) } -func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.UninstallPluginPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UninstallPluginPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27121,12 +26788,12 @@ func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, f } return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _UpdateDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateDatasetSchemaPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UpdateDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateDatasetSchemaPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27153,12 +26820,12 @@ func (ec *executionContext) _UpdateDatasetSchemaPayload_datasetSchema(ctx contex if resTmp == nil { return graphql.Null } - res := resTmp.(*graphql1.DatasetSchema) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _UpdateLayerPayload_layer(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateLayerPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UpdateLayerPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateLayerPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27188,12 +26855,12 @@ func (ec *executionContext) _UpdateLayerPayload_layer(ctx context.Context, field } return graphql.Null } - res := resTmp.(graphql1.Layer) + res := resTmp.(gqlmodel.Layer) fc.Result = res - return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, field.Selections, res) + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _UpdateMePayload_user(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateMePayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UpdateMePayload_user(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateMePayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27223,12 +26890,12 @@ func (ec *executionContext) _UpdateMePayload_user(ctx context.Context, field gra } return graphql.Null } - res := resTmp.(*graphql1.User) + res := resTmp.(*gqlmodel.User) fc.Result = res - return ec.marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx, field.Selections, res) + return ec.marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) } -func (ec *executionContext) _UpdateMemberOfTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateMemberOfTeamPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UpdateMemberOfTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateMemberOfTeamPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27258,12 +26925,12 @@ func (ec *executionContext) _UpdateMemberOfTeamPayload_team(ctx context.Context, } return graphql.Null } - res := resTmp.(*graphql1.Team) + res := resTmp.(*gqlmodel.Team) fc.Result = res - return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } -func (ec *executionContext) _UpdateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateTeamPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UpdateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateTeamPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27293,12 +26960,12 @@ func (ec *executionContext) _UpdateTeamPayload_team(ctx context.Context, field g } return graphql.Null } - res := resTmp.(*graphql1.Team) + res := resTmp.(*gqlmodel.Team) fc.Result = res - return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } -func (ec *executionContext) _UpdateWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateWidgetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UpdateWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateWidgetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27328,12 +26995,12 @@ func (ec *executionContext) _UpdateWidgetPayload_scene(ctx context.Context, fiel } return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _UpdateWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpdateWidgetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UpdateWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateWidgetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27363,12 +27030,12 @@ func (ec *executionContext) _UpdateWidgetPayload_sceneWidget(ctx context.Context } return graphql.Null } - res := resTmp.(*graphql1.SceneWidget) + res := resTmp.(*gqlmodel.SceneWidget) fc.Result = res - return ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidget(ctx, field.Selections, res) + return ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx, field.Selections, res) } -func (ec *executionContext) _UpgradePluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpgradePluginPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UpgradePluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpgradePluginPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27398,12 +27065,12 @@ func (ec *executionContext) _UpgradePluginPayload_scene(ctx context.Context, fie } return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _UpgradePluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.UpgradePluginPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UpgradePluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpgradePluginPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27433,12 +27100,12 @@ func (ec *executionContext) _UpgradePluginPayload_scenePlugin(ctx context.Contex } return graphql.Null } - res := resTmp.(*graphql1.ScenePlugin) + res := resTmp.(*gqlmodel.ScenePlugin) fc.Result = res - return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) + return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _UploadPluginPayload_plugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.UploadPluginPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UploadPluginPayload_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UploadPluginPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27468,12 +27135,12 @@ func (ec *executionContext) _UploadPluginPayload_plugin(ctx context.Context, fie } return graphql.Null } - res := resTmp.(*graphql1.Plugin) + res := resTmp.(*gqlmodel.Plugin) fc.Result = res - return ec.marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, field.Selections, res) + return ec.marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } -func (ec *executionContext) _UploadPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *graphql1.UploadPluginPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UploadPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UploadPluginPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27503,12 +27170,12 @@ func (ec *executionContext) _UploadPluginPayload_scene(ctx context.Context, fiel } return graphql.Null } - res := resTmp.(*graphql1.Scene) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx, field.Selections, res) + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _UploadPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *graphql1.UploadPluginPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UploadPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UploadPluginPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27538,12 +27205,12 @@ func (ec *executionContext) _UploadPluginPayload_scenePlugin(ctx context.Context } return graphql.Null } - res := resTmp.(*graphql1.ScenePlugin) + res := resTmp.(*gqlmodel.ScenePlugin) fc.Result = res - return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, field.Selections, res) + return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _User_id(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { +func (ec *executionContext) _User_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27578,7 +27245,7 @@ func (ec *executionContext) _User_id(ctx context.Context, field graphql.Collecte return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _User_name(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { +func (ec *executionContext) _User_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27613,7 +27280,7 @@ func (ec *executionContext) _User_name(ctx context.Context, field graphql.Collec return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _User_email(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { +func (ec *executionContext) _User_email(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27648,7 +27315,7 @@ func (ec *executionContext) _User_email(ctx context.Context, field graphql.Colle return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _User_lang(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { +func (ec *executionContext) _User_lang(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27683,7 +27350,7 @@ func (ec *executionContext) _User_lang(ctx context.Context, field graphql.Collec return ec.marshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, field.Selections, res) } -func (ec *executionContext) _User_theme(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { +func (ec *executionContext) _User_theme(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27713,12 +27380,12 @@ func (ec *executionContext) _User_theme(ctx context.Context, field graphql.Colle } return graphql.Null } - res := resTmp.(graphql1.Theme) + res := resTmp.(gqlmodel.Theme) fc.Result = res - return ec.marshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx, field.Selections, res) + return ec.marshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx, field.Selections, res) } -func (ec *executionContext) _User_myTeamId(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { +func (ec *executionContext) _User_myTeamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27753,7 +27420,7 @@ func (ec *executionContext) _User_myTeamId(ctx context.Context, field graphql.Co return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _User_auths(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { +func (ec *executionContext) _User_auths(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27788,7 +27455,7 @@ func (ec *executionContext) _User_auths(ctx context.Context, field graphql.Colle return ec.marshalNString2แš•stringแš„(ctx, field.Selections, res) } -func (ec *executionContext) _User_teams(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { +func (ec *executionContext) _User_teams(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27818,12 +27485,12 @@ func (ec *executionContext) _User_teams(ctx context.Context, field graphql.Colle } return graphql.Null } - res := resTmp.([]*graphql1.Team) + res := resTmp.([]*gqlmodel.Team) fc.Result = res - return ec.marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeamแš„(ctx, field.Selections, res) + return ec.marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamแš„(ctx, field.Selections, res) } -func (ec *executionContext) _User_myTeam(ctx context.Context, field graphql.CollectedField, obj *graphql1.User) (ret graphql.Marshaler) { +func (ec *executionContext) _User_myTeam(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27853,9 +27520,9 @@ func (ec *executionContext) _User_myTeam(ctx context.Context, field graphql.Coll } return graphql.Null } - res := resTmp.(*graphql1.Team) + res := resTmp.(*gqlmodel.Team) fc.Result = res - return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, field.Selections, res) + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { @@ -28945,8 +28612,8 @@ func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.Co // region **************************** input.gotpl ***************************** -func (ec *executionContext) unmarshalInputAddDatasetSchemaInput(ctx context.Context, obj interface{}) (graphql1.AddDatasetSchemaInput, error) { - var it graphql1.AddDatasetSchemaInput +func (ec *executionContext) unmarshalInputAddDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.AddDatasetSchemaInput, error) { + var it gqlmodel.AddDatasetSchemaInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -28981,8 +28648,8 @@ func (ec *executionContext) unmarshalInputAddDatasetSchemaInput(ctx context.Cont return it, nil } -func (ec *executionContext) unmarshalInputAddDynamicDatasetInput(ctx context.Context, obj interface{}) (graphql1.AddDynamicDatasetInput, error) { - var it graphql1.AddDynamicDatasetInput +func (ec *executionContext) unmarshalInputAddDynamicDatasetInput(ctx context.Context, obj interface{}) (gqlmodel.AddDynamicDatasetInput, error) { + var it gqlmodel.AddDynamicDatasetInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29041,8 +28708,8 @@ func (ec *executionContext) unmarshalInputAddDynamicDatasetInput(ctx context.Con return it, nil } -func (ec *executionContext) unmarshalInputAddDynamicDatasetSchemaInput(ctx context.Context, obj interface{}) (graphql1.AddDynamicDatasetSchemaInput, error) { - var it graphql1.AddDynamicDatasetSchemaInput +func (ec *executionContext) unmarshalInputAddDynamicDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.AddDynamicDatasetSchemaInput, error) { + var it gqlmodel.AddDynamicDatasetSchemaInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29061,8 +28728,8 @@ func (ec *executionContext) unmarshalInputAddDynamicDatasetSchemaInput(ctx conte return it, nil } -func (ec *executionContext) unmarshalInputAddInfoboxFieldInput(ctx context.Context, obj interface{}) (graphql1.AddInfoboxFieldInput, error) { - var it graphql1.AddInfoboxFieldInput +func (ec *executionContext) unmarshalInputAddInfoboxFieldInput(ctx context.Context, obj interface{}) (gqlmodel.AddInfoboxFieldInput, error) { + var it gqlmodel.AddInfoboxFieldInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29105,8 +28772,8 @@ func (ec *executionContext) unmarshalInputAddInfoboxFieldInput(ctx context.Conte return it, nil } -func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context, obj interface{}) (graphql1.AddLayerGroupInput, error) { - var it graphql1.AddLayerGroupInput +func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context, obj interface{}) (gqlmodel.AddLayerGroupInput, error) { + var it gqlmodel.AddLayerGroupInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29173,8 +28840,8 @@ func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context return it, nil } -func (ec *executionContext) unmarshalInputAddLayerItemInput(ctx context.Context, obj interface{}) (graphql1.AddLayerItemInput, error) { - var it graphql1.AddLayerItemInput +func (ec *executionContext) unmarshalInputAddLayerItemInput(ctx context.Context, obj interface{}) (gqlmodel.AddLayerItemInput, error) { + var it gqlmodel.AddLayerItemInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29241,8 +28908,8 @@ func (ec *executionContext) unmarshalInputAddLayerItemInput(ctx context.Context, return it, nil } -func (ec *executionContext) unmarshalInputAddMemberToTeamInput(ctx context.Context, obj interface{}) (graphql1.AddMemberToTeamInput, error) { - var it graphql1.AddMemberToTeamInput +func (ec *executionContext) unmarshalInputAddMemberToTeamInput(ctx context.Context, obj interface{}) (gqlmodel.AddMemberToTeamInput, error) { + var it gqlmodel.AddMemberToTeamInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29267,7 +28934,7 @@ func (ec *executionContext) unmarshalInputAddMemberToTeamInput(ctx context.Conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("role")) - it.Role, err = ec.unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRole(ctx, v) + it.Role, err = ec.unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRole(ctx, v) if err != nil { return it, err } @@ -29277,8 +28944,8 @@ func (ec *executionContext) unmarshalInputAddMemberToTeamInput(ctx context.Conte return it, nil } -func (ec *executionContext) unmarshalInputAddPropertyItemInput(ctx context.Context, obj interface{}) (graphql1.AddPropertyItemInput, error) { - var it graphql1.AddPropertyItemInput +func (ec *executionContext) unmarshalInputAddPropertyItemInput(ctx context.Context, obj interface{}) (gqlmodel.AddPropertyItemInput, error) { + var it gqlmodel.AddPropertyItemInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29319,7 +28986,7 @@ func (ec *executionContext) unmarshalInputAddPropertyItemInput(ctx context.Conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nameFieldType")) - it.NameFieldType, err = ec.unmarshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, v) + it.NameFieldType, err = ec.unmarshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, v) if err != nil { return it, err } @@ -29329,8 +28996,8 @@ func (ec *executionContext) unmarshalInputAddPropertyItemInput(ctx context.Conte return it, nil } -func (ec *executionContext) unmarshalInputAddWidgetInput(ctx context.Context, obj interface{}) (graphql1.AddWidgetInput, error) { - var it graphql1.AddWidgetInput +func (ec *executionContext) unmarshalInputAddWidgetInput(ctx context.Context, obj interface{}) (gqlmodel.AddWidgetInput, error) { + var it gqlmodel.AddWidgetInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29365,8 +29032,8 @@ func (ec *executionContext) unmarshalInputAddWidgetInput(ctx context.Context, ob return it, nil } -func (ec *executionContext) unmarshalInputCreateAssetInput(ctx context.Context, obj interface{}) (graphql1.CreateAssetInput, error) { - var it graphql1.CreateAssetInput +func (ec *executionContext) unmarshalInputCreateAssetInput(ctx context.Context, obj interface{}) (gqlmodel.CreateAssetInput, error) { + var it gqlmodel.CreateAssetInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29393,8 +29060,8 @@ func (ec *executionContext) unmarshalInputCreateAssetInput(ctx context.Context, return it, nil } -func (ec *executionContext) unmarshalInputCreateInfoboxInput(ctx context.Context, obj interface{}) (graphql1.CreateInfoboxInput, error) { - var it graphql1.CreateInfoboxInput +func (ec *executionContext) unmarshalInputCreateInfoboxInput(ctx context.Context, obj interface{}) (gqlmodel.CreateInfoboxInput, error) { + var it gqlmodel.CreateInfoboxInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29413,8 +29080,8 @@ func (ec *executionContext) unmarshalInputCreateInfoboxInput(ctx context.Context return it, nil } -func (ec *executionContext) unmarshalInputCreateProjectInput(ctx context.Context, obj interface{}) (graphql1.CreateProjectInput, error) { - var it graphql1.CreateProjectInput +func (ec *executionContext) unmarshalInputCreateProjectInput(ctx context.Context, obj interface{}) (gqlmodel.CreateProjectInput, error) { + var it gqlmodel.CreateProjectInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29431,7 +29098,7 @@ func (ec *executionContext) unmarshalInputCreateProjectInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("visualizer")) - it.Visualizer, err = ec.unmarshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšVisualizer(ctx, v) + it.Visualizer, err = ec.unmarshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx, v) if err != nil { return it, err } @@ -29481,8 +29148,8 @@ func (ec *executionContext) unmarshalInputCreateProjectInput(ctx context.Context return it, nil } -func (ec *executionContext) unmarshalInputCreateSceneInput(ctx context.Context, obj interface{}) (graphql1.CreateSceneInput, error) { - var it graphql1.CreateSceneInput +func (ec *executionContext) unmarshalInputCreateSceneInput(ctx context.Context, obj interface{}) (gqlmodel.CreateSceneInput, error) { + var it gqlmodel.CreateSceneInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29501,8 +29168,8 @@ func (ec *executionContext) unmarshalInputCreateSceneInput(ctx context.Context, return it, nil } -func (ec *executionContext) unmarshalInputCreateTeamInput(ctx context.Context, obj interface{}) (graphql1.CreateTeamInput, error) { - var it graphql1.CreateTeamInput +func (ec *executionContext) unmarshalInputCreateTeamInput(ctx context.Context, obj interface{}) (gqlmodel.CreateTeamInput, error) { + var it gqlmodel.CreateTeamInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29521,8 +29188,8 @@ func (ec *executionContext) unmarshalInputCreateTeamInput(ctx context.Context, o return it, nil } -func (ec *executionContext) unmarshalInputDeleteMeInput(ctx context.Context, obj interface{}) (graphql1.DeleteMeInput, error) { - var it graphql1.DeleteMeInput +func (ec *executionContext) unmarshalInputDeleteMeInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteMeInput, error) { + var it gqlmodel.DeleteMeInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29541,8 +29208,8 @@ func (ec *executionContext) unmarshalInputDeleteMeInput(ctx context.Context, obj return it, nil } -func (ec *executionContext) unmarshalInputDeleteProjectInput(ctx context.Context, obj interface{}) (graphql1.DeleteProjectInput, error) { - var it graphql1.DeleteProjectInput +func (ec *executionContext) unmarshalInputDeleteProjectInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteProjectInput, error) { + var it gqlmodel.DeleteProjectInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29561,8 +29228,8 @@ func (ec *executionContext) unmarshalInputDeleteProjectInput(ctx context.Context return it, nil } -func (ec *executionContext) unmarshalInputDeleteTeamInput(ctx context.Context, obj interface{}) (graphql1.DeleteTeamInput, error) { - var it graphql1.DeleteTeamInput +func (ec *executionContext) unmarshalInputDeleteTeamInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteTeamInput, error) { + var it gqlmodel.DeleteTeamInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29581,8 +29248,8 @@ func (ec *executionContext) unmarshalInputDeleteTeamInput(ctx context.Context, o return it, nil } -func (ec *executionContext) unmarshalInputImportDatasetFromGoogleSheetInput(ctx context.Context, obj interface{}) (graphql1.ImportDatasetFromGoogleSheetInput, error) { - var it graphql1.ImportDatasetFromGoogleSheetInput +func (ec *executionContext) unmarshalInputImportDatasetFromGoogleSheetInput(ctx context.Context, obj interface{}) (gqlmodel.ImportDatasetFromGoogleSheetInput, error) { + var it gqlmodel.ImportDatasetFromGoogleSheetInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29633,8 +29300,8 @@ func (ec *executionContext) unmarshalInputImportDatasetFromGoogleSheetInput(ctx return it, nil } -func (ec *executionContext) unmarshalInputImportDatasetInput(ctx context.Context, obj interface{}) (graphql1.ImportDatasetInput, error) { - var it graphql1.ImportDatasetInput +func (ec *executionContext) unmarshalInputImportDatasetInput(ctx context.Context, obj interface{}) (gqlmodel.ImportDatasetInput, error) { + var it gqlmodel.ImportDatasetInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29669,8 +29336,8 @@ func (ec *executionContext) unmarshalInputImportDatasetInput(ctx context.Context return it, nil } -func (ec *executionContext) unmarshalInputImportLayerInput(ctx context.Context, obj interface{}) (graphql1.ImportLayerInput, error) { - var it graphql1.ImportLayerInput +func (ec *executionContext) unmarshalInputImportLayerInput(ctx context.Context, obj interface{}) (gqlmodel.ImportLayerInput, error) { + var it gqlmodel.ImportLayerInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29695,7 +29362,7 @@ func (ec *executionContext) unmarshalInputImportLayerInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("format")) - it.Format, err = ec.unmarshalNLayerEncodingFormat2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerEncodingFormat(ctx, v) + it.Format, err = ec.unmarshalNLayerEncodingFormat2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerEncodingFormat(ctx, v) if err != nil { return it, err } @@ -29705,8 +29372,8 @@ func (ec *executionContext) unmarshalInputImportLayerInput(ctx context.Context, return it, nil } -func (ec *executionContext) unmarshalInputInstallPluginInput(ctx context.Context, obj interface{}) (graphql1.InstallPluginInput, error) { - var it graphql1.InstallPluginInput +func (ec *executionContext) unmarshalInputInstallPluginInput(ctx context.Context, obj interface{}) (gqlmodel.InstallPluginInput, error) { + var it gqlmodel.InstallPluginInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29733,8 +29400,8 @@ func (ec *executionContext) unmarshalInputInstallPluginInput(ctx context.Context return it, nil } -func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx context.Context, obj interface{}) (graphql1.LinkDatasetToPropertyValueInput, error) { - var it graphql1.LinkDatasetToPropertyValueInput +func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx context.Context, obj interface{}) (gqlmodel.LinkDatasetToPropertyValueInput, error) { + var it gqlmodel.LinkDatasetToPropertyValueInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29801,8 +29468,8 @@ func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx co return it, nil } -func (ec *executionContext) unmarshalInputMoveInfoboxFieldInput(ctx context.Context, obj interface{}) (graphql1.MoveInfoboxFieldInput, error) { - var it graphql1.MoveInfoboxFieldInput +func (ec *executionContext) unmarshalInputMoveInfoboxFieldInput(ctx context.Context, obj interface{}) (gqlmodel.MoveInfoboxFieldInput, error) { + var it gqlmodel.MoveInfoboxFieldInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29837,8 +29504,8 @@ func (ec *executionContext) unmarshalInputMoveInfoboxFieldInput(ctx context.Cont return it, nil } -func (ec *executionContext) unmarshalInputMoveLayerInput(ctx context.Context, obj interface{}) (graphql1.MoveLayerInput, error) { - var it graphql1.MoveLayerInput +func (ec *executionContext) unmarshalInputMoveLayerInput(ctx context.Context, obj interface{}) (gqlmodel.MoveLayerInput, error) { + var it gqlmodel.MoveLayerInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29873,8 +29540,8 @@ func (ec *executionContext) unmarshalInputMoveLayerInput(ctx context.Context, ob return it, nil } -func (ec *executionContext) unmarshalInputMovePropertyItemInput(ctx context.Context, obj interface{}) (graphql1.MovePropertyItemInput, error) { - var it graphql1.MovePropertyItemInput +func (ec *executionContext) unmarshalInputMovePropertyItemInput(ctx context.Context, obj interface{}) (gqlmodel.MovePropertyItemInput, error) { + var it gqlmodel.MovePropertyItemInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29917,8 +29584,8 @@ func (ec *executionContext) unmarshalInputMovePropertyItemInput(ctx context.Cont return it, nil } -func (ec *executionContext) unmarshalInputPublishProjectInput(ctx context.Context, obj interface{}) (graphql1.PublishProjectInput, error) { - var it graphql1.PublishProjectInput +func (ec *executionContext) unmarshalInputPublishProjectInput(ctx context.Context, obj interface{}) (gqlmodel.PublishProjectInput, error) { + var it gqlmodel.PublishProjectInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29943,7 +29610,7 @@ func (ec *executionContext) unmarshalInputPublishProjectInput(ctx context.Contex var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("status")) - it.Status, err = ec.unmarshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPublishmentStatus(ctx, v) + it.Status, err = ec.unmarshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishmentStatus(ctx, v) if err != nil { return it, err } @@ -29953,8 +29620,8 @@ func (ec *executionContext) unmarshalInputPublishProjectInput(ctx context.Contex return it, nil } -func (ec *executionContext) unmarshalInputRemoveAssetInput(ctx context.Context, obj interface{}) (graphql1.RemoveAssetInput, error) { - var it graphql1.RemoveAssetInput +func (ec *executionContext) unmarshalInputRemoveAssetInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveAssetInput, error) { + var it gqlmodel.RemoveAssetInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -29973,8 +29640,8 @@ func (ec *executionContext) unmarshalInputRemoveAssetInput(ctx context.Context, return it, nil } -func (ec *executionContext) unmarshalInputRemoveDatasetSchemaInput(ctx context.Context, obj interface{}) (graphql1.RemoveDatasetSchemaInput, error) { - var it graphql1.RemoveDatasetSchemaInput +func (ec *executionContext) unmarshalInputRemoveDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveDatasetSchemaInput, error) { + var it gqlmodel.RemoveDatasetSchemaInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30001,8 +29668,8 @@ func (ec *executionContext) unmarshalInputRemoveDatasetSchemaInput(ctx context.C return it, nil } -func (ec *executionContext) unmarshalInputRemoveInfoboxFieldInput(ctx context.Context, obj interface{}) (graphql1.RemoveInfoboxFieldInput, error) { - var it graphql1.RemoveInfoboxFieldInput +func (ec *executionContext) unmarshalInputRemoveInfoboxFieldInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveInfoboxFieldInput, error) { + var it gqlmodel.RemoveInfoboxFieldInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30029,8 +29696,8 @@ func (ec *executionContext) unmarshalInputRemoveInfoboxFieldInput(ctx context.Co return it, nil } -func (ec *executionContext) unmarshalInputRemoveInfoboxInput(ctx context.Context, obj interface{}) (graphql1.RemoveInfoboxInput, error) { - var it graphql1.RemoveInfoboxInput +func (ec *executionContext) unmarshalInputRemoveInfoboxInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveInfoboxInput, error) { + var it gqlmodel.RemoveInfoboxInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30049,8 +29716,8 @@ func (ec *executionContext) unmarshalInputRemoveInfoboxInput(ctx context.Context return it, nil } -func (ec *executionContext) unmarshalInputRemoveLayerInput(ctx context.Context, obj interface{}) (graphql1.RemoveLayerInput, error) { - var it graphql1.RemoveLayerInput +func (ec *executionContext) unmarshalInputRemoveLayerInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveLayerInput, error) { + var it gqlmodel.RemoveLayerInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30069,8 +29736,8 @@ func (ec *executionContext) unmarshalInputRemoveLayerInput(ctx context.Context, return it, nil } -func (ec *executionContext) unmarshalInputRemoveMemberFromTeamInput(ctx context.Context, obj interface{}) (graphql1.RemoveMemberFromTeamInput, error) { - var it graphql1.RemoveMemberFromTeamInput +func (ec *executionContext) unmarshalInputRemoveMemberFromTeamInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveMemberFromTeamInput, error) { + var it gqlmodel.RemoveMemberFromTeamInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30097,8 +29764,8 @@ func (ec *executionContext) unmarshalInputRemoveMemberFromTeamInput(ctx context. return it, nil } -func (ec *executionContext) unmarshalInputRemoveMyAuthInput(ctx context.Context, obj interface{}) (graphql1.RemoveMyAuthInput, error) { - var it graphql1.RemoveMyAuthInput +func (ec *executionContext) unmarshalInputRemoveMyAuthInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveMyAuthInput, error) { + var it gqlmodel.RemoveMyAuthInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30117,8 +29784,8 @@ func (ec *executionContext) unmarshalInputRemoveMyAuthInput(ctx context.Context, return it, nil } -func (ec *executionContext) unmarshalInputRemovePropertyFieldInput(ctx context.Context, obj interface{}) (graphql1.RemovePropertyFieldInput, error) { - var it graphql1.RemovePropertyFieldInput +func (ec *executionContext) unmarshalInputRemovePropertyFieldInput(ctx context.Context, obj interface{}) (gqlmodel.RemovePropertyFieldInput, error) { + var it gqlmodel.RemovePropertyFieldInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30161,8 +29828,8 @@ func (ec *executionContext) unmarshalInputRemovePropertyFieldInput(ctx context.C return it, nil } -func (ec *executionContext) unmarshalInputRemovePropertyItemInput(ctx context.Context, obj interface{}) (graphql1.RemovePropertyItemInput, error) { - var it graphql1.RemovePropertyItemInput +func (ec *executionContext) unmarshalInputRemovePropertyItemInput(ctx context.Context, obj interface{}) (gqlmodel.RemovePropertyItemInput, error) { + var it gqlmodel.RemovePropertyItemInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30197,8 +29864,8 @@ func (ec *executionContext) unmarshalInputRemovePropertyItemInput(ctx context.Co return it, nil } -func (ec *executionContext) unmarshalInputRemoveWidgetInput(ctx context.Context, obj interface{}) (graphql1.RemoveWidgetInput, error) { - var it graphql1.RemoveWidgetInput +func (ec *executionContext) unmarshalInputRemoveWidgetInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveWidgetInput, error) { + var it gqlmodel.RemoveWidgetInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30233,8 +29900,8 @@ func (ec *executionContext) unmarshalInputRemoveWidgetInput(ctx context.Context, return it, nil } -func (ec *executionContext) unmarshalInputSignupInput(ctx context.Context, obj interface{}) (graphql1.SignupInput, error) { - var it graphql1.SignupInput +func (ec *executionContext) unmarshalInputSignupInput(ctx context.Context, obj interface{}) (gqlmodel.SignupInput, error) { + var it gqlmodel.SignupInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30251,7 +29918,7 @@ func (ec *executionContext) unmarshalInputSignupInput(ctx context.Context, obj i var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("theme")) - it.Theme, err = ec.unmarshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx, v) + it.Theme, err = ec.unmarshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx, v) if err != nil { return it, err } @@ -30285,8 +29952,8 @@ func (ec *executionContext) unmarshalInputSignupInput(ctx context.Context, obj i return it, nil } -func (ec *executionContext) unmarshalInputSyncDatasetInput(ctx context.Context, obj interface{}) (graphql1.SyncDatasetInput, error) { - var it graphql1.SyncDatasetInput +func (ec *executionContext) unmarshalInputSyncDatasetInput(ctx context.Context, obj interface{}) (gqlmodel.SyncDatasetInput, error) { + var it gqlmodel.SyncDatasetInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30313,8 +29980,8 @@ func (ec *executionContext) unmarshalInputSyncDatasetInput(ctx context.Context, return it, nil } -func (ec *executionContext) unmarshalInputUninstallPluginInput(ctx context.Context, obj interface{}) (graphql1.UninstallPluginInput, error) { - var it graphql1.UninstallPluginInput +func (ec *executionContext) unmarshalInputUninstallPluginInput(ctx context.Context, obj interface{}) (gqlmodel.UninstallPluginInput, error) { + var it gqlmodel.UninstallPluginInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30341,8 +30008,8 @@ func (ec *executionContext) unmarshalInputUninstallPluginInput(ctx context.Conte return it, nil } -func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.Context, obj interface{}) (graphql1.UnlinkPropertyValueInput, error) { - var it graphql1.UnlinkPropertyValueInput +func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.Context, obj interface{}) (gqlmodel.UnlinkPropertyValueInput, error) { + var it gqlmodel.UnlinkPropertyValueInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30385,8 +30052,8 @@ func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.C return it, nil } -func (ec *executionContext) unmarshalInputUpdateDatasetSchemaInput(ctx context.Context, obj interface{}) (graphql1.UpdateDatasetSchemaInput, error) { - var it graphql1.UpdateDatasetSchemaInput +func (ec *executionContext) unmarshalInputUpdateDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateDatasetSchemaInput, error) { + var it gqlmodel.UpdateDatasetSchemaInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30413,8 +30080,8 @@ func (ec *executionContext) unmarshalInputUpdateDatasetSchemaInput(ctx context.C return it, nil } -func (ec *executionContext) unmarshalInputUpdateLayerInput(ctx context.Context, obj interface{}) (graphql1.UpdateLayerInput, error) { - var it graphql1.UpdateLayerInput +func (ec *executionContext) unmarshalInputUpdateLayerInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateLayerInput, error) { + var it gqlmodel.UpdateLayerInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30449,8 +30116,8 @@ func (ec *executionContext) unmarshalInputUpdateLayerInput(ctx context.Context, return it, nil } -func (ec *executionContext) unmarshalInputUpdateMeInput(ctx context.Context, obj interface{}) (graphql1.UpdateMeInput, error) { - var it graphql1.UpdateMeInput +func (ec *executionContext) unmarshalInputUpdateMeInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateMeInput, error) { + var it gqlmodel.UpdateMeInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30483,7 +30150,7 @@ func (ec *executionContext) unmarshalInputUpdateMeInput(ctx context.Context, obj var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("theme")) - it.Theme, err = ec.unmarshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx, v) + it.Theme, err = ec.unmarshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx, v) if err != nil { return it, err } @@ -30509,8 +30176,8 @@ func (ec *executionContext) unmarshalInputUpdateMeInput(ctx context.Context, obj return it, nil } -func (ec *executionContext) unmarshalInputUpdateMemberOfTeamInput(ctx context.Context, obj interface{}) (graphql1.UpdateMemberOfTeamInput, error) { - var it graphql1.UpdateMemberOfTeamInput +func (ec *executionContext) unmarshalInputUpdateMemberOfTeamInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateMemberOfTeamInput, error) { + var it gqlmodel.UpdateMemberOfTeamInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30535,7 +30202,7 @@ func (ec *executionContext) unmarshalInputUpdateMemberOfTeamInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("role")) - it.Role, err = ec.unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRole(ctx, v) + it.Role, err = ec.unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRole(ctx, v) if err != nil { return it, err } @@ -30545,8 +30212,8 @@ func (ec *executionContext) unmarshalInputUpdateMemberOfTeamInput(ctx context.Co return it, nil } -func (ec *executionContext) unmarshalInputUpdateProjectInput(ctx context.Context, obj interface{}) (graphql1.UpdateProjectInput, error) { - var it graphql1.UpdateProjectInput +func (ec *executionContext) unmarshalInputUpdateProjectInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateProjectInput, error) { + var it gqlmodel.UpdateProjectInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30677,8 +30344,8 @@ func (ec *executionContext) unmarshalInputUpdateProjectInput(ctx context.Context return it, nil } -func (ec *executionContext) unmarshalInputUpdatePropertyItemInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyItemInput, error) { - var it graphql1.UpdatePropertyItemInput +func (ec *executionContext) unmarshalInputUpdatePropertyItemInput(ctx context.Context, obj interface{}) (gqlmodel.UpdatePropertyItemInput, error) { + var it gqlmodel.UpdatePropertyItemInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30703,7 +30370,7 @@ func (ec *executionContext) unmarshalInputUpdatePropertyItemInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("operations")) - it.Operations, err = ec.unmarshalNUpdatePropertyItemOperationInput2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyItemOperationInputแš„(ctx, v) + it.Operations, err = ec.unmarshalNUpdatePropertyItemOperationInput2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemOperationInputแš„(ctx, v) if err != nil { return it, err } @@ -30713,8 +30380,8 @@ func (ec *executionContext) unmarshalInputUpdatePropertyItemInput(ctx context.Co return it, nil } -func (ec *executionContext) unmarshalInputUpdatePropertyItemOperationInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyItemOperationInput, error) { - var it graphql1.UpdatePropertyItemOperationInput +func (ec *executionContext) unmarshalInputUpdatePropertyItemOperationInput(ctx context.Context, obj interface{}) (gqlmodel.UpdatePropertyItemOperationInput, error) { + var it gqlmodel.UpdatePropertyItemOperationInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30723,7 +30390,7 @@ func (ec *executionContext) unmarshalInputUpdatePropertyItemOperationInput(ctx c var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("operation")) - it.Operation, err = ec.unmarshalNListOperation2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšListOperation(ctx, v) + it.Operation, err = ec.unmarshalNListOperation2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšListOperation(ctx, v) if err != nil { return it, err } @@ -30755,7 +30422,7 @@ func (ec *executionContext) unmarshalInputUpdatePropertyItemOperationInput(ctx c var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("nameFieldType")) - it.NameFieldType, err = ec.unmarshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, v) + it.NameFieldType, err = ec.unmarshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, v) if err != nil { return it, err } @@ -30765,108 +30432,8 @@ func (ec *executionContext) unmarshalInputUpdatePropertyItemOperationInput(ctx c return it, nil } -func (ec *executionContext) unmarshalInputUpdatePropertyValueCameraInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyValueCameraInput, error) { - var it graphql1.UpdatePropertyValueCameraInput - var asMap = obj.(map[string]interface{}) - - for k, v := range asMap { - switch k { - case "propertyId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) - if err != nil { - return it, err - } - case "schemaItemId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) - if err != nil { - return it, err - } - case "itemId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) - it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) - if err != nil { - return it, err - } - case "fieldId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) - it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) - if err != nil { - return it, err - } - case "lat": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lat")) - it.Lat, err = ec.unmarshalNFloat2float64(ctx, v) - if err != nil { - return it, err - } - case "lng": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lng")) - it.Lng, err = ec.unmarshalNFloat2float64(ctx, v) - if err != nil { - return it, err - } - case "altitude": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("altitude")) - it.Altitude, err = ec.unmarshalNFloat2float64(ctx, v) - if err != nil { - return it, err - } - case "heading": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("heading")) - it.Heading, err = ec.unmarshalNFloat2float64(ctx, v) - if err != nil { - return it, err - } - case "pitch": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pitch")) - it.Pitch, err = ec.unmarshalNFloat2float64(ctx, v) - if err != nil { - return it, err - } - case "roll": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("roll")) - it.Roll, err = ec.unmarshalNFloat2float64(ctx, v) - if err != nil { - return it, err - } - case "fov": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fov")) - it.Fov, err = ec.unmarshalNFloat2float64(ctx, v) - if err != nil { - return it, err - } - } - } - - return it, nil -} - -func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyValueInput, error) { - var it graphql1.UpdatePropertyValueInput +func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.Context, obj interface{}) (gqlmodel.UpdatePropertyValueInput, error) { + var it gqlmodel.UpdatePropertyValueInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -30915,135 +30482,7 @@ func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("type")) - it.Type, err = ec.unmarshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx, v) - if err != nil { - return it, err - } - } - } - - return it, nil -} - -func (ec *executionContext) unmarshalInputUpdatePropertyValueLatLngHeightInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyValueLatLngHeightInput, error) { - var it graphql1.UpdatePropertyValueLatLngHeightInput - var asMap = obj.(map[string]interface{}) - - for k, v := range asMap { - switch k { - case "propertyId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) - if err != nil { - return it, err - } - case "schemaItemId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) - if err != nil { - return it, err - } - case "itemId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) - it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) - if err != nil { - return it, err - } - case "fieldId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) - it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) - if err != nil { - return it, err - } - case "lat": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lat")) - it.Lat, err = ec.unmarshalNFloat2float64(ctx, v) - if err != nil { - return it, err - } - case "lng": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lng")) - it.Lng, err = ec.unmarshalNFloat2float64(ctx, v) - if err != nil { - return it, err - } - case "height": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("height")) - it.Height, err = ec.unmarshalNFloat2float64(ctx, v) - if err != nil { - return it, err - } - } - } - - return it, nil -} - -func (ec *executionContext) unmarshalInputUpdatePropertyValueLatLngInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyValueLatLngInput, error) { - var it graphql1.UpdatePropertyValueLatLngInput - var asMap = obj.(map[string]interface{}) - - for k, v := range asMap { - switch k { - case "propertyId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) - if err != nil { - return it, err - } - case "schemaItemId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) - if err != nil { - return it, err - } - case "itemId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) - it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) - if err != nil { - return it, err - } - case "fieldId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) - it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) - if err != nil { - return it, err - } - case "lat": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lat")) - it.Lat, err = ec.unmarshalNFloat2float64(ctx, v) - if err != nil { - return it, err - } - case "lng": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lng")) - it.Lng, err = ec.unmarshalNFloat2float64(ctx, v) + it.Type, err = ec.unmarshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, v) if err != nil { return it, err } @@ -31053,116 +30492,8 @@ func (ec *executionContext) unmarshalInputUpdatePropertyValueLatLngInput(ctx con return it, nil } -func (ec *executionContext) unmarshalInputUpdatePropertyValueTypographyInput(ctx context.Context, obj interface{}) (graphql1.UpdatePropertyValueTypographyInput, error) { - var it graphql1.UpdatePropertyValueTypographyInput - var asMap = obj.(map[string]interface{}) - - for k, v := range asMap { - switch k { - case "propertyId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) - if err != nil { - return it, err - } - case "schemaItemId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) - if err != nil { - return it, err - } - case "itemId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) - it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) - if err != nil { - return it, err - } - case "fieldId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) - it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) - if err != nil { - return it, err - } - case "fontFamily": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fontFamily")) - it.FontFamily, err = ec.unmarshalOString2แš–string(ctx, v) - if err != nil { - return it, err - } - case "fontWeight": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fontWeight")) - it.FontWeight, err = ec.unmarshalOString2แš–string(ctx, v) - if err != nil { - return it, err - } - case "fontSize": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fontSize")) - it.FontSize, err = ec.unmarshalOInt2แš–int(ctx, v) - if err != nil { - return it, err - } - case "color": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("color")) - it.Color, err = ec.unmarshalOString2แš–string(ctx, v) - if err != nil { - return it, err - } - case "textAlign": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("textAlign")) - it.TextAlign, err = ec.unmarshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTextAlign(ctx, v) - if err != nil { - return it, err - } - case "bold": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("bold")) - it.Bold, err = ec.unmarshalOBoolean2แš–bool(ctx, v) - if err != nil { - return it, err - } - case "italic": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("italic")) - it.Italic, err = ec.unmarshalOBoolean2แš–bool(ctx, v) - if err != nil { - return it, err - } - case "underline": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("underline")) - it.Underline, err = ec.unmarshalOBoolean2แš–bool(ctx, v) - if err != nil { - return it, err - } - } - } - - return it, nil -} - -func (ec *executionContext) unmarshalInputUpdateTeamInput(ctx context.Context, obj interface{}) (graphql1.UpdateTeamInput, error) { - var it graphql1.UpdateTeamInput +func (ec *executionContext) unmarshalInputUpdateTeamInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateTeamInput, error) { + var it gqlmodel.UpdateTeamInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -31189,8 +30520,8 @@ func (ec *executionContext) unmarshalInputUpdateTeamInput(ctx context.Context, o return it, nil } -func (ec *executionContext) unmarshalInputUpdateWidgetInput(ctx context.Context, obj interface{}) (graphql1.UpdateWidgetInput, error) { - var it graphql1.UpdateWidgetInput +func (ec *executionContext) unmarshalInputUpdateWidgetInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateWidgetInput, error) { + var it gqlmodel.UpdateWidgetInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -31233,8 +30564,8 @@ func (ec *executionContext) unmarshalInputUpdateWidgetInput(ctx context.Context, return it, nil } -func (ec *executionContext) unmarshalInputUpgradePluginInput(ctx context.Context, obj interface{}) (graphql1.UpgradePluginInput, error) { - var it graphql1.UpgradePluginInput +func (ec *executionContext) unmarshalInputUpgradePluginInput(ctx context.Context, obj interface{}) (gqlmodel.UpgradePluginInput, error) { + var it gqlmodel.UpgradePluginInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -31269,8 +30600,8 @@ func (ec *executionContext) unmarshalInputUpgradePluginInput(ctx context.Context return it, nil } -func (ec *executionContext) unmarshalInputUploadFileToPropertyInput(ctx context.Context, obj interface{}) (graphql1.UploadFileToPropertyInput, error) { - var it graphql1.UploadFileToPropertyInput +func (ec *executionContext) unmarshalInputUploadFileToPropertyInput(ctx context.Context, obj interface{}) (gqlmodel.UploadFileToPropertyInput, error) { + var it gqlmodel.UploadFileToPropertyInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -31321,8 +30652,8 @@ func (ec *executionContext) unmarshalInputUploadFileToPropertyInput(ctx context. return it, nil } -func (ec *executionContext) unmarshalInputUploadPluginInput(ctx context.Context, obj interface{}) (graphql1.UploadPluginInput, error) { - var it graphql1.UploadPluginInput +func (ec *executionContext) unmarshalInputUploadPluginInput(ctx context.Context, obj interface{}) (gqlmodel.UploadPluginInput, error) { + var it gqlmodel.UploadPluginInput var asMap = obj.(map[string]interface{}) for k, v := range asMap { @@ -31361,20 +30692,20 @@ func (ec *executionContext) unmarshalInputUploadPluginInput(ctx context.Context, // region ************************** interface.gotpl *************************** -func (ec *executionContext) _Layer(ctx context.Context, sel ast.SelectionSet, obj graphql1.Layer) graphql.Marshaler { +func (ec *executionContext) _Layer(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.Layer) graphql.Marshaler { switch obj := (obj).(type) { case nil: return graphql.Null - case graphql1.LayerItem: + case gqlmodel.LayerItem: return ec._LayerItem(ctx, sel, &obj) - case *graphql1.LayerItem: + case *gqlmodel.LayerItem: if obj == nil { return graphql.Null } return ec._LayerItem(ctx, sel, obj) - case graphql1.LayerGroup: + case gqlmodel.LayerGroup: return ec._LayerGroup(ctx, sel, &obj) - case *graphql1.LayerGroup: + case *gqlmodel.LayerGroup: if obj == nil { return graphql.Null } @@ -31384,20 +30715,20 @@ func (ec *executionContext) _Layer(ctx context.Context, sel ast.SelectionSet, ob } } -func (ec *executionContext) _Layers(ctx context.Context, sel ast.SelectionSet, obj graphql1.Layers) graphql.Marshaler { +func (ec *executionContext) _Layers(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.Layers) graphql.Marshaler { switch obj := (obj).(type) { case nil: return graphql.Null - case graphql1.LayerItem: + case gqlmodel.LayerItem: return ec._LayerItem(ctx, sel, &obj) - case *graphql1.LayerItem: + case *gqlmodel.LayerItem: if obj == nil { return graphql.Null } return ec._LayerItem(ctx, sel, obj) - case graphql1.LayerGroup: + case gqlmodel.LayerGroup: return ec._LayerGroup(ctx, sel, &obj) - case *graphql1.LayerGroup: + case *gqlmodel.LayerGroup: if obj == nil { return graphql.Null } @@ -31407,69 +30738,69 @@ func (ec *executionContext) _Layers(ctx context.Context, sel ast.SelectionSet, o } } -func (ec *executionContext) _Node(ctx context.Context, sel ast.SelectionSet, obj graphql1.Node) graphql.Marshaler { +func (ec *executionContext) _Node(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.Node) graphql.Marshaler { switch obj := (obj).(type) { case nil: return graphql.Null - case graphql1.Asset: + case gqlmodel.Asset: return ec._Asset(ctx, sel, &obj) - case *graphql1.Asset: + case *gqlmodel.Asset: if obj == nil { return graphql.Null } return ec._Asset(ctx, sel, obj) - case graphql1.User: + case gqlmodel.User: return ec._User(ctx, sel, &obj) - case *graphql1.User: + case *gqlmodel.User: if obj == nil { return graphql.Null } return ec._User(ctx, sel, obj) - case graphql1.Team: + case gqlmodel.Team: return ec._Team(ctx, sel, &obj) - case *graphql1.Team: + case *gqlmodel.Team: if obj == nil { return graphql.Null } return ec._Team(ctx, sel, obj) - case graphql1.Project: + case gqlmodel.Project: return ec._Project(ctx, sel, &obj) - case *graphql1.Project: + case *gqlmodel.Project: if obj == nil { return graphql.Null } return ec._Project(ctx, sel, obj) - case graphql1.Scene: + case gqlmodel.Scene: return ec._Scene(ctx, sel, &obj) - case *graphql1.Scene: + case *gqlmodel.Scene: if obj == nil { return graphql.Null } return ec._Scene(ctx, sel, obj) - case graphql1.Property: + case gqlmodel.Property: return ec._Property(ctx, sel, &obj) - case *graphql1.Property: + case *gqlmodel.Property: if obj == nil { return graphql.Null } return ec._Property(ctx, sel, obj) - case graphql1.DatasetSchema: + case gqlmodel.DatasetSchema: return ec._DatasetSchema(ctx, sel, &obj) - case *graphql1.DatasetSchema: + case *gqlmodel.DatasetSchema: if obj == nil { return graphql.Null } return ec._DatasetSchema(ctx, sel, obj) - case graphql1.DatasetSchemaField: + case gqlmodel.DatasetSchemaField: return ec._DatasetSchemaField(ctx, sel, &obj) - case *graphql1.DatasetSchemaField: + case *gqlmodel.DatasetSchemaField: if obj == nil { return graphql.Null } return ec._DatasetSchemaField(ctx, sel, obj) - case graphql1.Dataset: + case gqlmodel.Dataset: return ec._Dataset(ctx, sel, &obj) - case *graphql1.Dataset: + case *gqlmodel.Dataset: if obj == nil { return graphql.Null } @@ -31479,20 +30810,20 @@ func (ec *executionContext) _Node(ctx context.Context, sel ast.SelectionSet, obj } } -func (ec *executionContext) _PropertyItem(ctx context.Context, sel ast.SelectionSet, obj graphql1.PropertyItem) graphql.Marshaler { +func (ec *executionContext) _PropertyItem(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.PropertyItem) graphql.Marshaler { switch obj := (obj).(type) { case nil: return graphql.Null - case graphql1.PropertyGroup: + case gqlmodel.PropertyGroup: return ec._PropertyGroup(ctx, sel, &obj) - case *graphql1.PropertyGroup: + case *gqlmodel.PropertyGroup: if obj == nil { return graphql.Null } return ec._PropertyGroup(ctx, sel, obj) - case graphql1.PropertyGroupList: + case gqlmodel.PropertyGroupList: return ec._PropertyGroupList(ctx, sel, &obj) - case *graphql1.PropertyGroupList: + case *gqlmodel.PropertyGroupList: if obj == nil { return graphql.Null } @@ -31508,7 +30839,7 @@ func (ec *executionContext) _PropertyItem(ctx context.Context, sel ast.Selection var addDatasetSchemaPayloadImplementors = []string{"AddDatasetSchemaPayload"} -func (ec *executionContext) _AddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddDatasetSchemaPayload) graphql.Marshaler { +func (ec *executionContext) _AddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddDatasetSchemaPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addDatasetSchemaPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -31532,7 +30863,7 @@ func (ec *executionContext) _AddDatasetSchemaPayload(ctx context.Context, sel as var addDynamicDatasetPayloadImplementors = []string{"AddDynamicDatasetPayload"} -func (ec *executionContext) _AddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddDynamicDatasetPayload) graphql.Marshaler { +func (ec *executionContext) _AddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddDynamicDatasetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addDynamicDatasetPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -31558,7 +30889,7 @@ func (ec *executionContext) _AddDynamicDatasetPayload(ctx context.Context, sel a var addDynamicDatasetSchemaPayloadImplementors = []string{"AddDynamicDatasetSchemaPayload"} -func (ec *executionContext) _AddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddDynamicDatasetSchemaPayload) graphql.Marshaler { +func (ec *executionContext) _AddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddDynamicDatasetSchemaPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addDynamicDatasetSchemaPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -31582,7 +30913,7 @@ func (ec *executionContext) _AddDynamicDatasetSchemaPayload(ctx context.Context, var addInfoboxFieldPayloadImplementors = []string{"AddInfoboxFieldPayload"} -func (ec *executionContext) _AddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddInfoboxFieldPayload) graphql.Marshaler { +func (ec *executionContext) _AddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddInfoboxFieldPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addInfoboxFieldPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -31614,7 +30945,7 @@ func (ec *executionContext) _AddInfoboxFieldPayload(ctx context.Context, sel ast var addLayerGroupPayloadImplementors = []string{"AddLayerGroupPayload"} -func (ec *executionContext) _AddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddLayerGroupPayload) graphql.Marshaler { +func (ec *executionContext) _AddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddLayerGroupPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addLayerGroupPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -31648,7 +30979,7 @@ func (ec *executionContext) _AddLayerGroupPayload(ctx context.Context, sel ast.S var addLayerItemPayloadImplementors = []string{"AddLayerItemPayload"} -func (ec *executionContext) _AddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddLayerItemPayload) graphql.Marshaler { +func (ec *executionContext) _AddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddLayerItemPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addLayerItemPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -31682,7 +31013,7 @@ func (ec *executionContext) _AddLayerItemPayload(ctx context.Context, sel ast.Se var addMemberToTeamPayloadImplementors = []string{"AddMemberToTeamPayload"} -func (ec *executionContext) _AddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddMemberToTeamPayload) graphql.Marshaler { +func (ec *executionContext) _AddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddMemberToTeamPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addMemberToTeamPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -31709,7 +31040,7 @@ func (ec *executionContext) _AddMemberToTeamPayload(ctx context.Context, sel ast var addWidgetPayloadImplementors = []string{"AddWidgetPayload"} -func (ec *executionContext) _AddWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AddWidgetPayload) graphql.Marshaler { +func (ec *executionContext) _AddWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddWidgetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addWidgetPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -31741,7 +31072,7 @@ func (ec *executionContext) _AddWidgetPayload(ctx context.Context, sel ast.Selec var assetImplementors = []string{"Asset", "Node"} -func (ec *executionContext) _Asset(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Asset) graphql.Marshaler { +func (ec *executionContext) _Asset(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Asset) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, assetImplementors) out := graphql.NewFieldSet(fields) @@ -31809,7 +31140,7 @@ func (ec *executionContext) _Asset(ctx context.Context, sel ast.SelectionSet, ob var assetConnectionImplementors = []string{"AssetConnection"} -func (ec *executionContext) _AssetConnection(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AssetConnection) graphql.Marshaler { +func (ec *executionContext) _AssetConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AssetConnection) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, assetConnectionImplementors) out := graphql.NewFieldSet(fields) @@ -31851,7 +31182,7 @@ func (ec *executionContext) _AssetConnection(ctx context.Context, sel ast.Select var assetEdgeImplementors = []string{"AssetEdge"} -func (ec *executionContext) _AssetEdge(ctx context.Context, sel ast.SelectionSet, obj *graphql1.AssetEdge) graphql.Marshaler { +func (ec *executionContext) _AssetEdge(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AssetEdge) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, assetEdgeImplementors) out := graphql.NewFieldSet(fields) @@ -31880,7 +31211,7 @@ func (ec *executionContext) _AssetEdge(ctx context.Context, sel ast.SelectionSet var cameraImplementors = []string{"Camera"} -func (ec *executionContext) _Camera(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Camera) graphql.Marshaler { +func (ec *executionContext) _Camera(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Camera) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, cameraImplementors) out := graphql.NewFieldSet(fields) @@ -31937,7 +31268,7 @@ func (ec *executionContext) _Camera(ctx context.Context, sel ast.SelectionSet, o var checkProjectAliasPayloadImplementors = []string{"CheckProjectAliasPayload"} -func (ec *executionContext) _CheckProjectAliasPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.CheckProjectAliasPayload) graphql.Marshaler { +func (ec *executionContext) _CheckProjectAliasPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CheckProjectAliasPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, checkProjectAliasPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -31969,7 +31300,7 @@ func (ec *executionContext) _CheckProjectAliasPayload(ctx context.Context, sel a var createAssetPayloadImplementors = []string{"CreateAssetPayload"} -func (ec *executionContext) _CreateAssetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.CreateAssetPayload) graphql.Marshaler { +func (ec *executionContext) _CreateAssetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateAssetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, createAssetPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -31996,7 +31327,7 @@ func (ec *executionContext) _CreateAssetPayload(ctx context.Context, sel ast.Sel var createInfoboxPayloadImplementors = []string{"CreateInfoboxPayload"} -func (ec *executionContext) _CreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.CreateInfoboxPayload) graphql.Marshaler { +func (ec *executionContext) _CreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateInfoboxPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, createInfoboxPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -32023,7 +31354,7 @@ func (ec *executionContext) _CreateInfoboxPayload(ctx context.Context, sel ast.S var createScenePayloadImplementors = []string{"CreateScenePayload"} -func (ec *executionContext) _CreateScenePayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.CreateScenePayload) graphql.Marshaler { +func (ec *executionContext) _CreateScenePayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateScenePayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, createScenePayloadImplementors) out := graphql.NewFieldSet(fields) @@ -32050,7 +31381,7 @@ func (ec *executionContext) _CreateScenePayload(ctx context.Context, sel ast.Sel var createTeamPayloadImplementors = []string{"CreateTeamPayload"} -func (ec *executionContext) _CreateTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.CreateTeamPayload) graphql.Marshaler { +func (ec *executionContext) _CreateTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateTeamPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, createTeamPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -32077,7 +31408,7 @@ func (ec *executionContext) _CreateTeamPayload(ctx context.Context, sel ast.Sele var datasetImplementors = []string{"Dataset", "Node"} -func (ec *executionContext) _Dataset(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Dataset) graphql.Marshaler { +func (ec *executionContext) _Dataset(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Dataset) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetImplementors) out := graphql.NewFieldSet(fields) @@ -32141,7 +31472,7 @@ func (ec *executionContext) _Dataset(ctx context.Context, sel ast.SelectionSet, var datasetConnectionImplementors = []string{"DatasetConnection"} -func (ec *executionContext) _DatasetConnection(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetConnection) graphql.Marshaler { +func (ec *executionContext) _DatasetConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetConnection) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetConnectionImplementors) out := graphql.NewFieldSet(fields) @@ -32183,7 +31514,7 @@ func (ec *executionContext) _DatasetConnection(ctx context.Context, sel ast.Sele var datasetEdgeImplementors = []string{"DatasetEdge"} -func (ec *executionContext) _DatasetEdge(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetEdge) graphql.Marshaler { +func (ec *executionContext) _DatasetEdge(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetEdge) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetEdgeImplementors) out := graphql.NewFieldSet(fields) @@ -32212,7 +31543,7 @@ func (ec *executionContext) _DatasetEdge(ctx context.Context, sel ast.SelectionS var datasetFieldImplementors = []string{"DatasetField"} -func (ec *executionContext) _DatasetField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetField) graphql.Marshaler { +func (ec *executionContext) _DatasetField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetFieldImplementors) out := graphql.NewFieldSet(fields) @@ -32289,7 +31620,7 @@ func (ec *executionContext) _DatasetField(ctx context.Context, sel ast.Selection var datasetSchemaImplementors = []string{"DatasetSchema", "Node"} -func (ec *executionContext) _DatasetSchema(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetSchema) graphql.Marshaler { +func (ec *executionContext) _DatasetSchema(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetSchema) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaImplementors) out := graphql.NewFieldSet(fields) @@ -32376,7 +31707,7 @@ func (ec *executionContext) _DatasetSchema(ctx context.Context, sel ast.Selectio var datasetSchemaConnectionImplementors = []string{"DatasetSchemaConnection"} -func (ec *executionContext) _DatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetSchemaConnection) graphql.Marshaler { +func (ec *executionContext) _DatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetSchemaConnection) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaConnectionImplementors) out := graphql.NewFieldSet(fields) @@ -32418,7 +31749,7 @@ func (ec *executionContext) _DatasetSchemaConnection(ctx context.Context, sel as var datasetSchemaEdgeImplementors = []string{"DatasetSchemaEdge"} -func (ec *executionContext) _DatasetSchemaEdge(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetSchemaEdge) graphql.Marshaler { +func (ec *executionContext) _DatasetSchemaEdge(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetSchemaEdge) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaEdgeImplementors) out := graphql.NewFieldSet(fields) @@ -32447,7 +31778,7 @@ func (ec *executionContext) _DatasetSchemaEdge(ctx context.Context, sel ast.Sele var datasetSchemaFieldImplementors = []string{"DatasetSchemaField", "Node"} -func (ec *executionContext) _DatasetSchemaField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DatasetSchemaField) graphql.Marshaler { +func (ec *executionContext) _DatasetSchemaField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetSchemaField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaFieldImplementors) out := graphql.NewFieldSet(fields) @@ -32518,7 +31849,7 @@ func (ec *executionContext) _DatasetSchemaField(ctx context.Context, sel ast.Sel var deleteMePayloadImplementors = []string{"DeleteMePayload"} -func (ec *executionContext) _DeleteMePayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DeleteMePayload) graphql.Marshaler { +func (ec *executionContext) _DeleteMePayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DeleteMePayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, deleteMePayloadImplementors) out := graphql.NewFieldSet(fields) @@ -32545,7 +31876,7 @@ func (ec *executionContext) _DeleteMePayload(ctx context.Context, sel ast.Select var deleteProjectPayloadImplementors = []string{"DeleteProjectPayload"} -func (ec *executionContext) _DeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DeleteProjectPayload) graphql.Marshaler { +func (ec *executionContext) _DeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DeleteProjectPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, deleteProjectPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -32572,7 +31903,7 @@ func (ec *executionContext) _DeleteProjectPayload(ctx context.Context, sel ast.S var deleteTeamPayloadImplementors = []string{"DeleteTeamPayload"} -func (ec *executionContext) _DeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.DeleteTeamPayload) graphql.Marshaler { +func (ec *executionContext) _DeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DeleteTeamPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, deleteTeamPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -32599,7 +31930,7 @@ func (ec *executionContext) _DeleteTeamPayload(ctx context.Context, sel ast.Sele var importDatasetPayloadImplementors = []string{"ImportDatasetPayload"} -func (ec *executionContext) _ImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.ImportDatasetPayload) graphql.Marshaler { +func (ec *executionContext) _ImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ImportDatasetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, importDatasetPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -32626,7 +31957,7 @@ func (ec *executionContext) _ImportDatasetPayload(ctx context.Context, sel ast.S var importLayerPayloadImplementors = []string{"ImportLayerPayload"} -func (ec *executionContext) _ImportLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.ImportLayerPayload) graphql.Marshaler { +func (ec *executionContext) _ImportLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ImportLayerPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, importLayerPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -32658,7 +31989,7 @@ func (ec *executionContext) _ImportLayerPayload(ctx context.Context, sel ast.Sel var infoboxImplementors = []string{"Infobox"} -func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Infobox) graphql.Marshaler { +func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Infobox) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, infoboxImplementors) out := graphql.NewFieldSet(fields) @@ -32760,7 +32091,7 @@ func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, var infoboxFieldImplementors = []string{"InfoboxField"} -func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.InfoboxField) graphql.Marshaler { +func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.InfoboxField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, infoboxFieldImplementors) out := graphql.NewFieldSet(fields) @@ -32919,7 +32250,7 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection var installPluginPayloadImplementors = []string{"InstallPluginPayload"} -func (ec *executionContext) _InstallPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.InstallPluginPayload) graphql.Marshaler { +func (ec *executionContext) _InstallPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.InstallPluginPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, installPluginPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -32951,7 +32282,7 @@ func (ec *executionContext) _InstallPluginPayload(ctx context.Context, sel ast.S var latLngImplementors = []string{"LatLng"} -func (ec *executionContext) _LatLng(ctx context.Context, sel ast.SelectionSet, obj *graphql1.LatLng) graphql.Marshaler { +func (ec *executionContext) _LatLng(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LatLng) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, latLngImplementors) out := graphql.NewFieldSet(fields) @@ -32983,7 +32314,7 @@ func (ec *executionContext) _LatLng(ctx context.Context, sel ast.SelectionSet, o var latLngHeightImplementors = []string{"LatLngHeight"} -func (ec *executionContext) _LatLngHeight(ctx context.Context, sel ast.SelectionSet, obj *graphql1.LatLngHeight) graphql.Marshaler { +func (ec *executionContext) _LatLngHeight(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LatLngHeight) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, latLngHeightImplementors) out := graphql.NewFieldSet(fields) @@ -33020,7 +32351,7 @@ func (ec *executionContext) _LatLngHeight(ctx context.Context, sel ast.Selection var layerGroupImplementors = []string{"LayerGroup", "Layers", "Layer"} -func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSet, obj *graphql1.LayerGroup) graphql.Marshaler { +func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerGroup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, layerGroupImplementors) out := graphql.NewFieldSet(fields) @@ -33175,7 +32506,7 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe var layerItemImplementors = []string{"LayerItem", "Layers", "Layer"} -func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet, obj *graphql1.LayerItem) graphql.Marshaler { +func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerItem) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, layerItemImplementors) out := graphql.NewFieldSet(fields) @@ -33317,7 +32648,7 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet var mergedInfoboxImplementors = []string{"MergedInfobox"} -func (ec *executionContext) _MergedInfobox(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MergedInfobox) graphql.Marshaler { +func (ec *executionContext) _MergedInfobox(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedInfobox) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mergedInfoboxImplementors) out := graphql.NewFieldSet(fields) @@ -33362,7 +32693,7 @@ func (ec *executionContext) _MergedInfobox(ctx context.Context, sel ast.Selectio var mergedInfoboxFieldImplementors = []string{"MergedInfoboxField"} -func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MergedInfoboxField) graphql.Marshaler { +func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedInfoboxField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mergedInfoboxFieldImplementors) out := graphql.NewFieldSet(fields) @@ -33450,7 +32781,7 @@ func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.Sel var mergedLayerImplementors = []string{"MergedLayer"} -func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MergedLayer) graphql.Marshaler { +func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedLayer) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mergedLayerImplementors) out := graphql.NewFieldSet(fields) @@ -33521,7 +32852,7 @@ func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionS var mergedPropertyImplementors = []string{"MergedProperty"} -func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MergedProperty) graphql.Marshaler { +func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedProperty) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mergedPropertyImplementors) out := graphql.NewFieldSet(fields) @@ -33609,7 +32940,7 @@ func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.Selecti var mergedPropertyFieldImplementors = []string{"MergedPropertyField"} -func (ec *executionContext) _MergedPropertyField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MergedPropertyField) graphql.Marshaler { +func (ec *executionContext) _MergedPropertyField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedPropertyField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mergedPropertyFieldImplementors) out := graphql.NewFieldSet(fields) @@ -33688,7 +33019,7 @@ func (ec *executionContext) _MergedPropertyField(ctx context.Context, sel ast.Se var mergedPropertyGroupImplementors = []string{"MergedPropertyGroup"} -func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MergedPropertyGroup) graphql.Marshaler { +func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedPropertyGroup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mergedPropertyGroupImplementors) out := graphql.NewFieldSet(fields) @@ -33803,7 +33134,7 @@ func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.Se var moveInfoboxFieldPayloadImplementors = []string{"MoveInfoboxFieldPayload"} -func (ec *executionContext) _MoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MoveInfoboxFieldPayload) graphql.Marshaler { +func (ec *executionContext) _MoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MoveInfoboxFieldPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, moveInfoboxFieldPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -33840,7 +33171,7 @@ func (ec *executionContext) _MoveInfoboxFieldPayload(ctx context.Context, sel as var moveLayerPayloadImplementors = []string{"MoveLayerPayload"} -func (ec *executionContext) _MoveLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.MoveLayerPayload) graphql.Marshaler { +func (ec *executionContext) _MoveLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MoveLayerPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, moveLayerPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -33927,8 +33258,6 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) out.Values[i] = ec._Mutation_publishProject(ctx, field) case "deleteProject": out.Values[i] = ec._Mutation_deleteProject(ctx, field) - case "uploadPlugin": - out.Values[i] = ec._Mutation_uploadPlugin(ctx, field) case "createScene": out.Values[i] = ec._Mutation_createScene(ctx, field) case "addWidget": @@ -33941,6 +33270,8 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) out.Values[i] = ec._Mutation_installPlugin(ctx, field) case "uninstallPlugin": out.Values[i] = ec._Mutation_uninstallPlugin(ctx, field) + case "uploadPlugin": + out.Values[i] = ec._Mutation_uploadPlugin(ctx, field) case "upgradePlugin": out.Values[i] = ec._Mutation_upgradePlugin(ctx, field) case "updateDatasetSchema": @@ -33961,14 +33292,6 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) out.Values[i] = ec._Mutation_addDatasetSchema(ctx, field) case "updatePropertyValue": out.Values[i] = ec._Mutation_updatePropertyValue(ctx, field) - case "updatePropertyValueLatLng": - out.Values[i] = ec._Mutation_updatePropertyValueLatLng(ctx, field) - case "updatePropertyValueLatLngHeight": - out.Values[i] = ec._Mutation_updatePropertyValueLatLngHeight(ctx, field) - case "updatePropertyValueCamera": - out.Values[i] = ec._Mutation_updatePropertyValueCamera(ctx, field) - case "updatePropertyValueTypography": - out.Values[i] = ec._Mutation_updatePropertyValueTypography(ctx, field) case "removePropertyField": out.Values[i] = ec._Mutation_removePropertyField(ctx, field) case "uploadFileToProperty": @@ -34020,7 +33343,7 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) var pageInfoImplementors = []string{"PageInfo"} -func (ec *executionContext) _PageInfo(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PageInfo) graphql.Marshaler { +func (ec *executionContext) _PageInfo(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PageInfo) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, pageInfoImplementors) out := graphql.NewFieldSet(fields) @@ -34056,7 +33379,7 @@ func (ec *executionContext) _PageInfo(ctx context.Context, sel ast.SelectionSet, var pluginImplementors = []string{"Plugin"} -func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Plugin) graphql.Marshaler { +func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Plugin) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, pluginImplementors) out := graphql.NewFieldSet(fields) @@ -34155,7 +33478,7 @@ func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, o var pluginExtensionImplementors = []string{"PluginExtension"} -func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PluginExtension) graphql.Marshaler { +func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PluginExtension) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, pluginExtensionImplementors) out := graphql.NewFieldSet(fields) @@ -34282,7 +33605,7 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select var pluginMetadataImplementors = []string{"PluginMetadata"} -func (ec *executionContext) _PluginMetadata(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PluginMetadata) graphql.Marshaler { +func (ec *executionContext) _PluginMetadata(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PluginMetadata) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, pluginMetadataImplementors) out := graphql.NewFieldSet(fields) @@ -34329,7 +33652,7 @@ func (ec *executionContext) _PluginMetadata(ctx context.Context, sel ast.Selecti var projectImplementors = []string{"Project", "Node"} -func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Project) graphql.Marshaler { +func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Project) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, projectImplementors) out := graphql.NewFieldSet(fields) @@ -34462,7 +33785,7 @@ func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, var projectConnectionImplementors = []string{"ProjectConnection"} -func (ec *executionContext) _ProjectConnection(ctx context.Context, sel ast.SelectionSet, obj *graphql1.ProjectConnection) graphql.Marshaler { +func (ec *executionContext) _ProjectConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectConnection) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, projectConnectionImplementors) out := graphql.NewFieldSet(fields) @@ -34504,7 +33827,7 @@ func (ec *executionContext) _ProjectConnection(ctx context.Context, sel ast.Sele var projectEdgeImplementors = []string{"ProjectEdge"} -func (ec *executionContext) _ProjectEdge(ctx context.Context, sel ast.SelectionSet, obj *graphql1.ProjectEdge) graphql.Marshaler { +func (ec *executionContext) _ProjectEdge(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectEdge) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, projectEdgeImplementors) out := graphql.NewFieldSet(fields) @@ -34533,7 +33856,7 @@ func (ec *executionContext) _ProjectEdge(ctx context.Context, sel ast.SelectionS var projectPayloadImplementors = []string{"ProjectPayload"} -func (ec *executionContext) _ProjectPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.ProjectPayload) graphql.Marshaler { +func (ec *executionContext) _ProjectPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, projectPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -34560,7 +33883,7 @@ func (ec *executionContext) _ProjectPayload(ctx context.Context, sel ast.Selecti var propertyImplementors = []string{"Property", "Node"} -func (ec *executionContext) _Property(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Property) graphql.Marshaler { +func (ec *executionContext) _Property(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Property) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyImplementors) out := graphql.NewFieldSet(fields) @@ -34630,7 +33953,7 @@ func (ec *executionContext) _Property(ctx context.Context, sel ast.SelectionSet, var propertyConditionImplementors = []string{"PropertyCondition"} -func (ec *executionContext) _PropertyCondition(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyCondition) graphql.Marshaler { +func (ec *executionContext) _PropertyCondition(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyCondition) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyConditionImplementors) out := graphql.NewFieldSet(fields) @@ -34664,7 +33987,7 @@ func (ec *executionContext) _PropertyCondition(ctx context.Context, sel ast.Sele var propertyFieldImplementors = []string{"PropertyField"} -func (ec *executionContext) _PropertyField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyField) graphql.Marshaler { +func (ec *executionContext) _PropertyField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyFieldImplementors) out := graphql.NewFieldSet(fields) @@ -34759,7 +34082,7 @@ func (ec *executionContext) _PropertyField(ctx context.Context, sel ast.Selectio var propertyFieldLinkImplementors = []string{"PropertyFieldLink"} -func (ec *executionContext) _PropertyFieldLink(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyFieldLink) graphql.Marshaler { +func (ec *executionContext) _PropertyFieldLink(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyFieldLink) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyFieldLinkImplementors) out := graphql.NewFieldSet(fields) @@ -34837,7 +34160,7 @@ func (ec *executionContext) _PropertyFieldLink(ctx context.Context, sel ast.Sele var propertyFieldPayloadImplementors = []string{"PropertyFieldPayload"} -func (ec *executionContext) _PropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyFieldPayload) graphql.Marshaler { +func (ec *executionContext) _PropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyFieldPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyFieldPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -34866,7 +34189,7 @@ func (ec *executionContext) _PropertyFieldPayload(ctx context.Context, sel ast.S var propertyGroupImplementors = []string{"PropertyGroup", "PropertyItem"} -func (ec *executionContext) _PropertyGroup(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyGroup) graphql.Marshaler { +func (ec *executionContext) _PropertyGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyGroup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyGroupImplementors) out := graphql.NewFieldSet(fields) @@ -34930,7 +34253,7 @@ func (ec *executionContext) _PropertyGroup(ctx context.Context, sel ast.Selectio var propertyGroupListImplementors = []string{"PropertyGroupList", "PropertyItem"} -func (ec *executionContext) _PropertyGroupList(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyGroupList) graphql.Marshaler { +func (ec *executionContext) _PropertyGroupList(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyGroupList) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyGroupListImplementors) out := graphql.NewFieldSet(fields) @@ -34994,7 +34317,7 @@ func (ec *executionContext) _PropertyGroupList(ctx context.Context, sel ast.Sele var propertyItemPayloadImplementors = []string{"PropertyItemPayload"} -func (ec *executionContext) _PropertyItemPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyItemPayload) graphql.Marshaler { +func (ec *executionContext) _PropertyItemPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyItemPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyItemPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -35023,7 +34346,7 @@ func (ec *executionContext) _PropertyItemPayload(ctx context.Context, sel ast.Se var propertyLinkableFieldsImplementors = []string{"PropertyLinkableFields"} -func (ec *executionContext) _PropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertyLinkableFields) graphql.Marshaler { +func (ec *executionContext) _PropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyLinkableFields) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyLinkableFieldsImplementors) out := graphql.NewFieldSet(fields) @@ -35087,7 +34410,7 @@ func (ec *executionContext) _PropertyLinkableFields(ctx context.Context, sel ast var propertySchemaImplementors = []string{"PropertySchema"} -func (ec *executionContext) _PropertySchema(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertySchema) graphql.Marshaler { +func (ec *executionContext) _PropertySchema(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertySchema) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaImplementors) out := graphql.NewFieldSet(fields) @@ -35124,7 +34447,7 @@ func (ec *executionContext) _PropertySchema(ctx context.Context, sel ast.Selecti var propertySchemaFieldImplementors = []string{"PropertySchemaField"} -func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertySchemaField) graphql.Marshaler { +func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertySchemaField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaFieldImplementors) out := graphql.NewFieldSet(fields) @@ -35235,7 +34558,7 @@ func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.Se var propertySchemaFieldChoiceImplementors = []string{"PropertySchemaFieldChoice"} -func (ec *executionContext) _PropertySchemaFieldChoice(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertySchemaFieldChoice) graphql.Marshaler { +func (ec *executionContext) _PropertySchemaFieldChoice(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertySchemaFieldChoice) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaFieldChoiceImplementors) out := graphql.NewFieldSet(fields) @@ -35306,7 +34629,7 @@ func (ec *executionContext) _PropertySchemaFieldChoice(ctx context.Context, sel var propertySchemaGroupImplementors = []string{"PropertySchemaGroup"} -func (ec *executionContext) _PropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, obj *graphql1.PropertySchemaGroup) graphql.Marshaler { +func (ec *executionContext) _PropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertySchemaGroup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaGroupImplementors) out := graphql.NewFieldSet(fields) @@ -35643,7 +34966,7 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr var rectImplementors = []string{"Rect"} -func (ec *executionContext) _Rect(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Rect) graphql.Marshaler { +func (ec *executionContext) _Rect(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Rect) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, rectImplementors) out := graphql.NewFieldSet(fields) @@ -35685,7 +35008,7 @@ func (ec *executionContext) _Rect(ctx context.Context, sel ast.SelectionSet, obj var removeAssetPayloadImplementors = []string{"RemoveAssetPayload"} -func (ec *executionContext) _RemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveAssetPayload) graphql.Marshaler { +func (ec *executionContext) _RemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveAssetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeAssetPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -35712,7 +35035,7 @@ func (ec *executionContext) _RemoveAssetPayload(ctx context.Context, sel ast.Sel var removeDatasetSchemaPayloadImplementors = []string{"RemoveDatasetSchemaPayload"} -func (ec *executionContext) _RemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveDatasetSchemaPayload) graphql.Marshaler { +func (ec *executionContext) _RemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveDatasetSchemaPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeDatasetSchemaPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -35739,7 +35062,7 @@ func (ec *executionContext) _RemoveDatasetSchemaPayload(ctx context.Context, sel var removeInfoboxFieldPayloadImplementors = []string{"RemoveInfoboxFieldPayload"} -func (ec *executionContext) _RemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveInfoboxFieldPayload) graphql.Marshaler { +func (ec *executionContext) _RemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveInfoboxFieldPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeInfoboxFieldPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -35771,7 +35094,7 @@ func (ec *executionContext) _RemoveInfoboxFieldPayload(ctx context.Context, sel var removeInfoboxPayloadImplementors = []string{"RemoveInfoboxPayload"} -func (ec *executionContext) _RemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveInfoboxPayload) graphql.Marshaler { +func (ec *executionContext) _RemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveInfoboxPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeInfoboxPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -35798,7 +35121,7 @@ func (ec *executionContext) _RemoveInfoboxPayload(ctx context.Context, sel ast.S var removeLayerPayloadImplementors = []string{"RemoveLayerPayload"} -func (ec *executionContext) _RemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveLayerPayload) graphql.Marshaler { +func (ec *executionContext) _RemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveLayerPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeLayerPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -35830,7 +35153,7 @@ func (ec *executionContext) _RemoveLayerPayload(ctx context.Context, sel ast.Sel var removeMemberFromTeamPayloadImplementors = []string{"RemoveMemberFromTeamPayload"} -func (ec *executionContext) _RemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveMemberFromTeamPayload) graphql.Marshaler { +func (ec *executionContext) _RemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveMemberFromTeamPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeMemberFromTeamPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -35857,7 +35180,7 @@ func (ec *executionContext) _RemoveMemberFromTeamPayload(ctx context.Context, se var removeWidgetPayloadImplementors = []string{"RemoveWidgetPayload"} -func (ec *executionContext) _RemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.RemoveWidgetPayload) graphql.Marshaler { +func (ec *executionContext) _RemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveWidgetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeWidgetPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -35894,7 +35217,7 @@ func (ec *executionContext) _RemoveWidgetPayload(ctx context.Context, sel ast.Se var sceneImplementors = []string{"Scene", "Node"} -func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Scene) graphql.Marshaler { +func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Scene) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, sceneImplementors) out := graphql.NewFieldSet(fields) @@ -36038,7 +35361,7 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob var scenePluginImplementors = []string{"ScenePlugin"} -func (ec *executionContext) _ScenePlugin(ctx context.Context, sel ast.SelectionSet, obj *graphql1.ScenePlugin) graphql.Marshaler { +func (ec *executionContext) _ScenePlugin(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ScenePlugin) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, scenePluginImplementors) out := graphql.NewFieldSet(fields) @@ -36089,7 +35412,7 @@ func (ec *executionContext) _ScenePlugin(ctx context.Context, sel ast.SelectionS var sceneWidgetImplementors = []string{"SceneWidget"} -func (ec *executionContext) _SceneWidget(ctx context.Context, sel ast.SelectionSet, obj *graphql1.SceneWidget) graphql.Marshaler { +func (ec *executionContext) _SceneWidget(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.SceneWidget) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, sceneWidgetImplementors) out := graphql.NewFieldSet(fields) @@ -36169,7 +35492,7 @@ func (ec *executionContext) _SceneWidget(ctx context.Context, sel ast.SelectionS var searchedUserImplementors = []string{"SearchedUser"} -func (ec *executionContext) _SearchedUser(ctx context.Context, sel ast.SelectionSet, obj *graphql1.SearchedUser) graphql.Marshaler { +func (ec *executionContext) _SearchedUser(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.SearchedUser) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, searchedUserImplementors) out := graphql.NewFieldSet(fields) @@ -36206,7 +35529,7 @@ func (ec *executionContext) _SearchedUser(ctx context.Context, sel ast.Selection var signupPayloadImplementors = []string{"SignupPayload"} -func (ec *executionContext) _SignupPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.SignupPayload) graphql.Marshaler { +func (ec *executionContext) _SignupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.SignupPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, signupPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -36238,7 +35561,7 @@ func (ec *executionContext) _SignupPayload(ctx context.Context, sel ast.Selectio var syncDatasetPayloadImplementors = []string{"SyncDatasetPayload"} -func (ec *executionContext) _SyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.SyncDatasetPayload) graphql.Marshaler { +func (ec *executionContext) _SyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.SyncDatasetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, syncDatasetPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -36280,7 +35603,7 @@ func (ec *executionContext) _SyncDatasetPayload(ctx context.Context, sel ast.Sel var teamImplementors = []string{"Team", "Node"} -func (ec *executionContext) _Team(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Team) graphql.Marshaler { +func (ec *executionContext) _Team(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Team) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, teamImplementors) out := graphql.NewFieldSet(fields) @@ -36350,7 +35673,7 @@ func (ec *executionContext) _Team(ctx context.Context, sel ast.SelectionSet, obj var teamMemberImplementors = []string{"TeamMember"} -func (ec *executionContext) _TeamMember(ctx context.Context, sel ast.SelectionSet, obj *graphql1.TeamMember) graphql.Marshaler { +func (ec *executionContext) _TeamMember(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.TeamMember) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, teamMemberImplementors) out := graphql.NewFieldSet(fields) @@ -36393,7 +35716,7 @@ func (ec *executionContext) _TeamMember(ctx context.Context, sel ast.SelectionSe var typographyImplementors = []string{"Typography"} -func (ec *executionContext) _Typography(ctx context.Context, sel ast.SelectionSet, obj *graphql1.Typography) graphql.Marshaler { +func (ec *executionContext) _Typography(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Typography) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, typographyImplementors) out := graphql.NewFieldSet(fields) @@ -36431,7 +35754,7 @@ func (ec *executionContext) _Typography(ctx context.Context, sel ast.SelectionSe var uninstallPluginPayloadImplementors = []string{"UninstallPluginPayload"} -func (ec *executionContext) _UninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UninstallPluginPayload) graphql.Marshaler { +func (ec *executionContext) _UninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UninstallPluginPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, uninstallPluginPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -36463,7 +35786,7 @@ func (ec *executionContext) _UninstallPluginPayload(ctx context.Context, sel ast var updateDatasetSchemaPayloadImplementors = []string{"UpdateDatasetSchemaPayload"} -func (ec *executionContext) _UpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpdateDatasetSchemaPayload) graphql.Marshaler { +func (ec *executionContext) _UpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateDatasetSchemaPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateDatasetSchemaPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -36487,7 +35810,7 @@ func (ec *executionContext) _UpdateDatasetSchemaPayload(ctx context.Context, sel var updateLayerPayloadImplementors = []string{"UpdateLayerPayload"} -func (ec *executionContext) _UpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpdateLayerPayload) graphql.Marshaler { +func (ec *executionContext) _UpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateLayerPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateLayerPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -36514,7 +35837,7 @@ func (ec *executionContext) _UpdateLayerPayload(ctx context.Context, sel ast.Sel var updateMePayloadImplementors = []string{"UpdateMePayload"} -func (ec *executionContext) _UpdateMePayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpdateMePayload) graphql.Marshaler { +func (ec *executionContext) _UpdateMePayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateMePayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateMePayloadImplementors) out := graphql.NewFieldSet(fields) @@ -36541,7 +35864,7 @@ func (ec *executionContext) _UpdateMePayload(ctx context.Context, sel ast.Select var updateMemberOfTeamPayloadImplementors = []string{"UpdateMemberOfTeamPayload"} -func (ec *executionContext) _UpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpdateMemberOfTeamPayload) graphql.Marshaler { +func (ec *executionContext) _UpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateMemberOfTeamPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateMemberOfTeamPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -36568,7 +35891,7 @@ func (ec *executionContext) _UpdateMemberOfTeamPayload(ctx context.Context, sel var updateTeamPayloadImplementors = []string{"UpdateTeamPayload"} -func (ec *executionContext) _UpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpdateTeamPayload) graphql.Marshaler { +func (ec *executionContext) _UpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateTeamPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateTeamPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -36595,7 +35918,7 @@ func (ec *executionContext) _UpdateTeamPayload(ctx context.Context, sel ast.Sele var updateWidgetPayloadImplementors = []string{"UpdateWidgetPayload"} -func (ec *executionContext) _UpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpdateWidgetPayload) graphql.Marshaler { +func (ec *executionContext) _UpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateWidgetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateWidgetPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -36627,7 +35950,7 @@ func (ec *executionContext) _UpdateWidgetPayload(ctx context.Context, sel ast.Se var upgradePluginPayloadImplementors = []string{"UpgradePluginPayload"} -func (ec *executionContext) _UpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UpgradePluginPayload) graphql.Marshaler { +func (ec *executionContext) _UpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpgradePluginPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, upgradePluginPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -36659,7 +35982,7 @@ func (ec *executionContext) _UpgradePluginPayload(ctx context.Context, sel ast.S var uploadPluginPayloadImplementors = []string{"UploadPluginPayload"} -func (ec *executionContext) _UploadPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *graphql1.UploadPluginPayload) graphql.Marshaler { +func (ec *executionContext) _UploadPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UploadPluginPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, uploadPluginPayloadImplementors) out := graphql.NewFieldSet(fields) @@ -36696,7 +36019,7 @@ func (ec *executionContext) _UploadPluginPayload(ctx context.Context, sel ast.Se var userImplementors = []string{"User", "Node"} -func (ec *executionContext) _User(ctx context.Context, sel ast.SelectionSet, obj *graphql1.User) graphql.Marshaler { +func (ec *executionContext) _User(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.User) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, userImplementors) out := graphql.NewFieldSet(fields) @@ -37024,52 +36347,52 @@ func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, o // region ***************************** type.gotpl ***************************** -func (ec *executionContext) unmarshalNAddDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaInput(ctx context.Context, v interface{}) (graphql1.AddDatasetSchemaInput, error) { +func (ec *executionContext) unmarshalNAddDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDatasetSchemaInput(ctx context.Context, v interface{}) (gqlmodel.AddDatasetSchemaInput, error) { res, err := ec.unmarshalInputAddDatasetSchemaInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNAddDynamicDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetInput(ctx context.Context, v interface{}) (graphql1.AddDynamicDatasetInput, error) { +func (ec *executionContext) unmarshalNAddDynamicDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetInput(ctx context.Context, v interface{}) (gqlmodel.AddDynamicDatasetInput, error) { res, err := ec.unmarshalInputAddDynamicDatasetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNAddDynamicDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaInput(ctx context.Context, v interface{}) (graphql1.AddDynamicDatasetSchemaInput, error) { +func (ec *executionContext) unmarshalNAddDynamicDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetSchemaInput(ctx context.Context, v interface{}) (gqlmodel.AddDynamicDatasetSchemaInput, error) { res, err := ec.unmarshalInputAddDynamicDatasetSchemaInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNAddInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldInput(ctx context.Context, v interface{}) (graphql1.AddInfoboxFieldInput, error) { +func (ec *executionContext) unmarshalNAddInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddInfoboxFieldInput(ctx context.Context, v interface{}) (gqlmodel.AddInfoboxFieldInput, error) { res, err := ec.unmarshalInputAddInfoboxFieldInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNAddLayerGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupInput(ctx context.Context, v interface{}) (graphql1.AddLayerGroupInput, error) { +func (ec *executionContext) unmarshalNAddLayerGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerGroupInput(ctx context.Context, v interface{}) (gqlmodel.AddLayerGroupInput, error) { res, err := ec.unmarshalInputAddLayerGroupInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNAddLayerItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemInput(ctx context.Context, v interface{}) (graphql1.AddLayerItemInput, error) { +func (ec *executionContext) unmarshalNAddLayerItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerItemInput(ctx context.Context, v interface{}) (gqlmodel.AddLayerItemInput, error) { res, err := ec.unmarshalInputAddLayerItemInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNAddMemberToTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamInput(ctx context.Context, v interface{}) (graphql1.AddMemberToTeamInput, error) { +func (ec *executionContext) unmarshalNAddMemberToTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddMemberToTeamInput(ctx context.Context, v interface{}) (gqlmodel.AddMemberToTeamInput, error) { res, err := ec.unmarshalInputAddMemberToTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNAddPropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddPropertyItemInput(ctx context.Context, v interface{}) (graphql1.AddPropertyItemInput, error) { +func (ec *executionContext) unmarshalNAddPropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddPropertyItemInput(ctx context.Context, v interface{}) (gqlmodel.AddPropertyItemInput, error) { res, err := ec.unmarshalInputAddPropertyItemInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNAddWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetInput(ctx context.Context, v interface{}) (graphql1.AddWidgetInput, error) { +func (ec *executionContext) unmarshalNAddWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddWidgetInput(ctx context.Context, v interface{}) (gqlmodel.AddWidgetInput, error) { res, err := ec.unmarshalInputAddWidgetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Asset) graphql.Marshaler { +func (ec *executionContext) marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Asset) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37093,7 +36416,7 @@ func (ec *executionContext) marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx, sel, v[i]) + ret[i] = ec.marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37106,7 +36429,7 @@ func (ec *executionContext) marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearth return ret } -func (ec *executionContext) marshalNAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx context.Context, sel ast.SelectionSet, v *graphql1.Asset) graphql.Marshaler { +func (ec *executionContext) marshalNAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Asset) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37116,11 +36439,11 @@ func (ec *executionContext) marshalNAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘ return ec._Asset(ctx, sel, v) } -func (ec *executionContext) marshalNAssetConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetConnection(ctx context.Context, sel ast.SelectionSet, v graphql1.AssetConnection) graphql.Marshaler { +func (ec *executionContext) marshalNAssetConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetConnection(ctx context.Context, sel ast.SelectionSet, v gqlmodel.AssetConnection) graphql.Marshaler { return ec._AssetConnection(ctx, sel, &v) } -func (ec *executionContext) marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetConnection(ctx context.Context, sel ast.SelectionSet, v *graphql1.AssetConnection) graphql.Marshaler { +func (ec *executionContext) marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AssetConnection) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37130,7 +36453,7 @@ func (ec *executionContext) marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹ return ec._AssetConnection(ctx, sel, v) } -func (ec *executionContext) marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.AssetEdge) graphql.Marshaler { +func (ec *executionContext) marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.AssetEdge) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37154,7 +36477,7 @@ func (ec *executionContext) marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹ree if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNAssetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetEdge(ctx, sel, v[i]) + ret[i] = ec.marshalNAssetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetEdge(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37167,7 +36490,7 @@ func (ec *executionContext) marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹ree return ret } -func (ec *executionContext) marshalNAssetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAssetEdge(ctx context.Context, sel ast.SelectionSet, v *graphql1.AssetEdge) graphql.Marshaler { +func (ec *executionContext) marshalNAssetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AssetEdge) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37192,11 +36515,11 @@ func (ec *executionContext) marshalNBoolean2bool(ctx context.Context, sel ast.Se return res } -func (ec *executionContext) marshalNCheckProjectAliasPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCheckProjectAliasPayload(ctx context.Context, sel ast.SelectionSet, v graphql1.CheckProjectAliasPayload) graphql.Marshaler { +func (ec *executionContext) marshalNCheckProjectAliasPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCheckProjectAliasPayload(ctx context.Context, sel ast.SelectionSet, v gqlmodel.CheckProjectAliasPayload) graphql.Marshaler { return ec._CheckProjectAliasPayload(ctx, sel, &v) } -func (ec *executionContext) marshalNCheckProjectAliasPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCheckProjectAliasPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CheckProjectAliasPayload) graphql.Marshaler { +func (ec *executionContext) marshalNCheckProjectAliasPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCheckProjectAliasPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CheckProjectAliasPayload) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37206,38 +36529,38 @@ func (ec *executionContext) marshalNCheckProjectAliasPayload2แš–githubแš—comแš‹r return ec._CheckProjectAliasPayload(ctx, sel, v) } -func (ec *executionContext) unmarshalNCreateAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetInput(ctx context.Context, v interface{}) (graphql1.CreateAssetInput, error) { +func (ec *executionContext) unmarshalNCreateAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateAssetInput(ctx context.Context, v interface{}) (gqlmodel.CreateAssetInput, error) { res, err := ec.unmarshalInputCreateAssetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNCreateInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxInput(ctx context.Context, v interface{}) (graphql1.CreateInfoboxInput, error) { +func (ec *executionContext) unmarshalNCreateInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateInfoboxInput(ctx context.Context, v interface{}) (gqlmodel.CreateInfoboxInput, error) { res, err := ec.unmarshalInputCreateInfoboxInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNCreateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateProjectInput(ctx context.Context, v interface{}) (graphql1.CreateProjectInput, error) { +func (ec *executionContext) unmarshalNCreateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateProjectInput(ctx context.Context, v interface{}) (gqlmodel.CreateProjectInput, error) { res, err := ec.unmarshalInputCreateProjectInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNCreateSceneInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateSceneInput(ctx context.Context, v interface{}) (graphql1.CreateSceneInput, error) { +func (ec *executionContext) unmarshalNCreateSceneInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateSceneInput(ctx context.Context, v interface{}) (gqlmodel.CreateSceneInput, error) { res, err := ec.unmarshalInputCreateSceneInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNCreateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamInput(ctx context.Context, v interface{}) (graphql1.CreateTeamInput, error) { +func (ec *executionContext) unmarshalNCreateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTeamInput(ctx context.Context, v interface{}) (gqlmodel.CreateTeamInput, error) { res, err := ec.unmarshalInputCreateTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } func (ec *executionContext) unmarshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx context.Context, v interface{}) (usecase.Cursor, error) { - res, err := graphql1.UnmarshalCursor(v) + res, err := gqlmodel.UnmarshalCursor(v) return res, graphql.ErrorOnPath(ctx, err) } func (ec *executionContext) marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx context.Context, sel ast.SelectionSet, v usecase.Cursor) graphql.Marshaler { - res := graphql1.MarshalCursor(v) + res := gqlmodel.MarshalCursor(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37246,7 +36569,7 @@ func (ec *executionContext) marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘ba return res } -func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Dataset) graphql.Marshaler { +func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Dataset) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37270,7 +36593,7 @@ func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reear if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, sel, v[i]) + ret[i] = ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37283,7 +36606,7 @@ func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reear return ret } -func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Dataset) graphql.Marshaler { +func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Dataset) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37307,7 +36630,7 @@ func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reear if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNDataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx, sel, v[i]) + ret[i] = ec.marshalNDataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37320,7 +36643,7 @@ func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reear return ret } -func (ec *executionContext) marshalNDataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx context.Context, sel ast.SelectionSet, v *graphql1.Dataset) graphql.Marshaler { +func (ec *executionContext) marshalNDataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Dataset) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37330,11 +36653,11 @@ func (ec *executionContext) marshalNDataset2แš–githubแš—comแš‹reearthแš‹reearth return ec._Dataset(ctx, sel, v) } -func (ec *executionContext) marshalNDatasetConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetConnection(ctx context.Context, sel ast.SelectionSet, v graphql1.DatasetConnection) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetConnection(ctx context.Context, sel ast.SelectionSet, v gqlmodel.DatasetConnection) graphql.Marshaler { return ec._DatasetConnection(ctx, sel, &v) } -func (ec *executionContext) marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetConnection(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetConnection) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetConnection) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37344,7 +36667,7 @@ func (ec *executionContext) marshalNDatasetConnection2แš–githubแš—comแš‹reearth return ec._DatasetConnection(ctx, sel, v) } -func (ec *executionContext) marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.DatasetEdge) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.DatasetEdge) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37368,7 +36691,7 @@ func (ec *executionContext) marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹r if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNDatasetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetEdge(ctx, sel, v[i]) + ret[i] = ec.marshalNDatasetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetEdge(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37381,7 +36704,7 @@ func (ec *executionContext) marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹r return ret } -func (ec *executionContext) marshalNDatasetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetEdge(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetEdge) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetEdge) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37391,7 +36714,7 @@ func (ec *executionContext) marshalNDatasetEdge2แš–githubแš—comแš‹reearthแš‹reea return ec._DatasetEdge(ctx, sel, v) } -func (ec *executionContext) marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.DatasetField) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.DatasetField) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37415,7 +36738,7 @@ func (ec *executionContext) marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹ if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNDatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetField(ctx, sel, v[i]) + ret[i] = ec.marshalNDatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37428,7 +36751,7 @@ func (ec *executionContext) marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹ return ret } -func (ec *executionContext) marshalNDatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetField(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetField) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37438,7 +36761,7 @@ func (ec *executionContext) marshalNDatasetField2แš–githubแš—comแš‹reearthแš‹ree return ec._DatasetField(ctx, sel, v) } -func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v []*graphql1.DatasetSchema) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.DatasetSchema) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37462,7 +36785,7 @@ func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, sel, v[i]) + ret[i] = ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37475,7 +36798,7 @@ func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearth return ret } -func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.DatasetSchema) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.DatasetSchema) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37499,7 +36822,7 @@ func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx, sel, v[i]) + ret[i] = ec.marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37512,7 +36835,7 @@ func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearth return ret } -func (ec *executionContext) marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetSchema) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchema) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37522,11 +36845,11 @@ func (ec *executionContext) marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹re return ec._DatasetSchema(ctx, sel, v) } -func (ec *executionContext) marshalNDatasetSchemaConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, v graphql1.DatasetSchemaConnection) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetSchemaConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, v gqlmodel.DatasetSchemaConnection) graphql.Marshaler { return ec._DatasetSchemaConnection(ctx, sel, &v) } -func (ec *executionContext) marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetSchemaConnection) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchemaConnection) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37536,7 +36859,7 @@ func (ec *executionContext) marshalNDatasetSchemaConnection2แš–githubแš—comแš‹re return ec._DatasetSchemaConnection(ctx, sel, v) } -func (ec *executionContext) marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.DatasetSchemaEdge) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.DatasetSchemaEdge) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37560,7 +36883,7 @@ func (ec *executionContext) marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reear if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNDatasetSchemaEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaEdge(ctx, sel, v[i]) + ret[i] = ec.marshalNDatasetSchemaEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaEdge(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37573,7 +36896,7 @@ func (ec *executionContext) marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reear return ret } -func (ec *executionContext) marshalNDatasetSchemaEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaEdge(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetSchemaEdge) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetSchemaEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchemaEdge) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37583,7 +36906,7 @@ func (ec *executionContext) marshalNDatasetSchemaEdge2แš–githubแš—comแš‹reearth return ec._DatasetSchemaEdge(ctx, sel, v) } -func (ec *executionContext) marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.DatasetSchemaField) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.DatasetSchemaField) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37607,7 +36930,7 @@ func (ec *executionContext) marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reea if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNDatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaField(ctx, sel, v[i]) + ret[i] = ec.marshalNDatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37620,7 +36943,7 @@ func (ec *executionContext) marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reea return ret } -func (ec *executionContext) marshalNDatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaField(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetSchemaField) graphql.Marshaler { +func (ec *executionContext) marshalNDatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchemaField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37645,17 +36968,17 @@ func (ec *executionContext) marshalNDateTime2timeแšTime(ctx context.Context, se return res } -func (ec *executionContext) unmarshalNDeleteMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMeInput(ctx context.Context, v interface{}) (graphql1.DeleteMeInput, error) { +func (ec *executionContext) unmarshalNDeleteMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteMeInput(ctx context.Context, v interface{}) (gqlmodel.DeleteMeInput, error) { res, err := ec.unmarshalInputDeleteMeInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNDeleteProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectInput(ctx context.Context, v interface{}) (graphql1.DeleteProjectInput, error) { +func (ec *executionContext) unmarshalNDeleteProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteProjectInput(ctx context.Context, v interface{}) (gqlmodel.DeleteProjectInput, error) { res, err := ec.unmarshalInputDeleteProjectInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNDeleteTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamInput(ctx context.Context, v interface{}) (graphql1.DeleteTeamInput, error) { +func (ec *executionContext) unmarshalNDeleteTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteTeamInput(ctx context.Context, v interface{}) (gqlmodel.DeleteTeamInput, error) { res, err := ec.unmarshalInputDeleteTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } @@ -37691,12 +37014,12 @@ func (ec *executionContext) marshalNFloat2float64(ctx context.Context, sel ast.S } func (ec *executionContext) unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, v interface{}) (id.ID, error) { - res, err := graphql1.UnmarshalID(v) + res, err := gqlmodel.UnmarshalID(v) return res, graphql.ErrorOnPath(ctx, err) } func (ec *executionContext) marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, sel ast.SelectionSet, v id.ID) graphql.Marshaler { - res := graphql1.MarshalID(v) + res := gqlmodel.MarshalID(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37736,7 +37059,7 @@ func (ec *executionContext) marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘ } func (ec *executionContext) unmarshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, v interface{}) (*id.ID, error) { - res, err := graphql1.UnmarshalID(v) + res, err := gqlmodel.UnmarshalID(v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -37747,7 +37070,7 @@ func (ec *executionContext) marshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘bac } return graphql.Null } - res := graphql1.MarshalID(*v) + res := gqlmodel.MarshalID(*v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37756,26 +37079,26 @@ func (ec *executionContext) marshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘bac return res } -func (ec *executionContext) unmarshalNImportDatasetFromGoogleSheetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetFromGoogleSheetInput(ctx context.Context, v interface{}) (graphql1.ImportDatasetFromGoogleSheetInput, error) { +func (ec *executionContext) unmarshalNImportDatasetFromGoogleSheetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetFromGoogleSheetInput(ctx context.Context, v interface{}) (gqlmodel.ImportDatasetFromGoogleSheetInput, error) { res, err := ec.unmarshalInputImportDatasetFromGoogleSheetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNImportDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetInput(ctx context.Context, v interface{}) (graphql1.ImportDatasetInput, error) { +func (ec *executionContext) unmarshalNImportDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetInput(ctx context.Context, v interface{}) (gqlmodel.ImportDatasetInput, error) { res, err := ec.unmarshalInputImportDatasetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNImportLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerInput(ctx context.Context, v interface{}) (graphql1.ImportLayerInput, error) { +func (ec *executionContext) unmarshalNImportLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportLayerInput(ctx context.Context, v interface{}) (gqlmodel.ImportLayerInput, error) { res, err := ec.unmarshalInputImportLayerInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNInfobox2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx context.Context, sel ast.SelectionSet, v graphql1.Infobox) graphql.Marshaler { +func (ec *executionContext) marshalNInfobox2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Infobox) graphql.Marshaler { return ec._Infobox(ctx, sel, &v) } -func (ec *executionContext) marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx context.Context, sel ast.SelectionSet, v *graphql1.Infobox) graphql.Marshaler { +func (ec *executionContext) marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Infobox) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37785,7 +37108,7 @@ func (ec *executionContext) marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearth return ec._Infobox(ctx, sel, v) } -func (ec *executionContext) marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfoboxFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.InfoboxField) graphql.Marshaler { +func (ec *executionContext) marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.InfoboxField) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37809,7 +37132,7 @@ func (ec *executionContext) marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹ if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfoboxField(ctx, sel, v[i]) + ret[i] = ec.marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxField(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37822,7 +37145,7 @@ func (ec *executionContext) marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹ return ret } -func (ec *executionContext) marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfoboxField(ctx context.Context, sel ast.SelectionSet, v *graphql1.InfoboxField) graphql.Marshaler { +func (ec *executionContext) marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.InfoboxField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37832,7 +37155,7 @@ func (ec *executionContext) marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹ree return ec._InfoboxField(ctx, sel, v) } -func (ec *executionContext) unmarshalNInstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginInput(ctx context.Context, v interface{}) (graphql1.InstallPluginInput, error) { +func (ec *executionContext) unmarshalNInstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInstallPluginInput(ctx context.Context, v interface{}) (gqlmodel.InstallPluginInput, error) { res, err := ec.unmarshalInputInstallPluginInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } @@ -37853,12 +37176,12 @@ func (ec *executionContext) marshalNInt2int(ctx context.Context, sel ast.Selecti } func (ec *executionContext) unmarshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx context.Context, v interface{}) (language.Tag, error) { - res, err := graphql1.UnmarshalLang(v) + res, err := gqlmodel.UnmarshalLang(v) return res, graphql.ErrorOnPath(ctx, err) } func (ec *executionContext) marshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx context.Context, sel ast.SelectionSet, v language.Tag) graphql.Marshaler { - res := graphql1.MarshalLang(v) + res := gqlmodel.MarshalLang(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37867,7 +37190,7 @@ func (ec *executionContext) marshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTa return res } -func (ec *executionContext) marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx context.Context, sel ast.SelectionSet, v graphql1.Layer) graphql.Marshaler { +func (ec *executionContext) marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Layer) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37877,7 +37200,7 @@ func (ec *executionContext) marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘bac return ec._Layer(ctx, sel, v) } -func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx context.Context, sel ast.SelectionSet, v []graphql1.Layer) graphql.Marshaler { +func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.Layer) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37901,7 +37224,7 @@ func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘ if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, sel, v[i]) + ret[i] = ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37914,7 +37237,7 @@ func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘ return ret } -func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerแš„(ctx context.Context, sel ast.SelectionSet, v []graphql1.Layer) graphql.Marshaler { +func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.Layer) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -37938,7 +37261,7 @@ func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘ if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx, sel, v[i]) + ret[i] = ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, sel, v[i]) } if isLen1 { f(i) @@ -37951,17 +37274,17 @@ func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘ return ret } -func (ec *executionContext) unmarshalNLayerEncodingFormat2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerEncodingFormat(ctx context.Context, v interface{}) (graphql1.LayerEncodingFormat, error) { - var res graphql1.LayerEncodingFormat +func (ec *executionContext) unmarshalNLayerEncodingFormat2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerEncodingFormat(ctx context.Context, v interface{}) (gqlmodel.LayerEncodingFormat, error) { + var res gqlmodel.LayerEncodingFormat err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNLayerEncodingFormat2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerEncodingFormat(ctx context.Context, sel ast.SelectionSet, v graphql1.LayerEncodingFormat) graphql.Marshaler { +func (ec *executionContext) marshalNLayerEncodingFormat2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerEncodingFormat(ctx context.Context, sel ast.SelectionSet, v gqlmodel.LayerEncodingFormat) graphql.Marshaler { return v } -func (ec *executionContext) marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.LayerGroup) graphql.Marshaler { +func (ec *executionContext) marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerGroup) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37971,7 +37294,7 @@ func (ec *executionContext) marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reear return ec._LayerGroup(ctx, sel, v) } -func (ec *executionContext) marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerItem(ctx context.Context, sel ast.SelectionSet, v *graphql1.LayerItem) graphql.Marshaler { +func (ec *executionContext) marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerItem) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -37981,22 +37304,22 @@ func (ec *executionContext) marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reeart return ec._LayerItem(ctx, sel, v) } -func (ec *executionContext) unmarshalNLinkDatasetToPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLinkDatasetToPropertyValueInput(ctx context.Context, v interface{}) (graphql1.LinkDatasetToPropertyValueInput, error) { +func (ec *executionContext) unmarshalNLinkDatasetToPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLinkDatasetToPropertyValueInput(ctx context.Context, v interface{}) (gqlmodel.LinkDatasetToPropertyValueInput, error) { res, err := ec.unmarshalInputLinkDatasetToPropertyValueInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNListOperation2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšListOperation(ctx context.Context, v interface{}) (graphql1.ListOperation, error) { - var res graphql1.ListOperation +func (ec *executionContext) unmarshalNListOperation2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšListOperation(ctx context.Context, v interface{}) (gqlmodel.ListOperation, error) { + var res gqlmodel.ListOperation err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNListOperation2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšListOperation(ctx context.Context, sel ast.SelectionSet, v graphql1.ListOperation) graphql.Marshaler { +func (ec *executionContext) marshalNListOperation2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšListOperation(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ListOperation) graphql.Marshaler { return v } -func (ec *executionContext) marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.MergedInfoboxField) graphql.Marshaler { +func (ec *executionContext) marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.MergedInfoboxField) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38020,7 +37343,7 @@ func (ec *executionContext) marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reea if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxField(ctx, sel, v[i]) + ret[i] = ec.marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38033,7 +37356,7 @@ func (ec *executionContext) marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reea return ret } -func (ec *executionContext) marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxField(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedInfoboxField) graphql.Marshaler { +func (ec *executionContext) marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedInfoboxField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38043,7 +37366,7 @@ func (ec *executionContext) marshalNMergedInfoboxField2แš–githubแš—comแš‹reearth return ec._MergedInfoboxField(ctx, sel, v) } -func (ec *executionContext) marshalNMergedPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.MergedPropertyField) graphql.Marshaler { +func (ec *executionContext) marshalNMergedPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.MergedPropertyField) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38067,7 +37390,7 @@ func (ec *executionContext) marshalNMergedPropertyField2แš•แš–githubแš—comแš‹ree if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyField(ctx, sel, v[i]) + ret[i] = ec.marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyField(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38080,7 +37403,7 @@ func (ec *executionContext) marshalNMergedPropertyField2แš•แš–githubแš—comแš‹ree return ret } -func (ec *executionContext) marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyField(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedPropertyField) graphql.Marshaler { +func (ec *executionContext) marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedPropertyField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38090,7 +37413,7 @@ func (ec *executionContext) marshalNMergedPropertyField2แš–githubแš—comแš‹reeart return ec._MergedPropertyField(ctx, sel, v) } -func (ec *executionContext) marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.MergedPropertyGroup) graphql.Marshaler { +func (ec *executionContext) marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.MergedPropertyGroup) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38114,7 +37437,7 @@ func (ec *executionContext) marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹ree if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyGroup(ctx, sel, v[i]) + ret[i] = ec.marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroup(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38127,7 +37450,7 @@ func (ec *executionContext) marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹ree return ret } -func (ec *executionContext) marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedPropertyGroup) graphql.Marshaler { +func (ec *executionContext) marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedPropertyGroup) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38137,22 +37460,22 @@ func (ec *executionContext) marshalNMergedPropertyGroup2แš–githubแš—comแš‹reeart return ec._MergedPropertyGroup(ctx, sel, v) } -func (ec *executionContext) unmarshalNMoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldInput(ctx context.Context, v interface{}) (graphql1.MoveInfoboxFieldInput, error) { +func (ec *executionContext) unmarshalNMoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveInfoboxFieldInput(ctx context.Context, v interface{}) (gqlmodel.MoveInfoboxFieldInput, error) { res, err := ec.unmarshalInputMoveInfoboxFieldInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNMoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerInput(ctx context.Context, v interface{}) (graphql1.MoveLayerInput, error) { +func (ec *executionContext) unmarshalNMoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveLayerInput(ctx context.Context, v interface{}) (gqlmodel.MoveLayerInput, error) { res, err := ec.unmarshalInputMoveLayerInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNMovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMovePropertyItemInput(ctx context.Context, v interface{}) (graphql1.MovePropertyItemInput, error) { +func (ec *executionContext) unmarshalNMovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMovePropertyItemInput(ctx context.Context, v interface{}) (gqlmodel.MovePropertyItemInput, error) { res, err := ec.unmarshalInputMovePropertyItemInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNode(ctx context.Context, sel ast.SelectionSet, v []graphql1.Node) graphql.Marshaler { +func (ec *executionContext) marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.Node) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38176,7 +37499,7 @@ func (ec *executionContext) marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘b if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNode(ctx, sel, v[i]) + ret[i] = ec.marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38189,17 +37512,17 @@ func (ec *executionContext) marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘b return ret } -func (ec *executionContext) unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNodeType(ctx context.Context, v interface{}) (graphql1.NodeType, error) { - var res graphql1.NodeType +func (ec *executionContext) unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNodeType(ctx context.Context, v interface{}) (gqlmodel.NodeType, error) { + var res gqlmodel.NodeType err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNodeType(ctx context.Context, sel ast.SelectionSet, v graphql1.NodeType) graphql.Marshaler { +func (ec *executionContext) marshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNodeType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.NodeType) graphql.Marshaler { return v } -func (ec *executionContext) marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPageInfo(ctx context.Context, sel ast.SelectionSet, v *graphql1.PageInfo) graphql.Marshaler { +func (ec *executionContext) marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PageInfo) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38209,7 +37532,7 @@ func (ec *executionContext) marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearth return ec._PageInfo(ctx, sel, v) } -func (ec *executionContext) marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Plugin) graphql.Marshaler { +func (ec *executionContext) marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Plugin) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38233,7 +37556,7 @@ func (ec *executionContext) marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reeart if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx, sel, v[i]) + ret[i] = ec.marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38246,7 +37569,7 @@ func (ec *executionContext) marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reeart return ret } -func (ec *executionContext) marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx context.Context, sel ast.SelectionSet, v *graphql1.Plugin) graphql.Marshaler { +func (ec *executionContext) marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Plugin) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38256,7 +37579,7 @@ func (ec *executionContext) marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearth return ec._Plugin(ctx, sel, v) } -func (ec *executionContext) marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtensionแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PluginExtension) graphql.Marshaler { +func (ec *executionContext) marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PluginExtension) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38280,7 +37603,7 @@ func (ec *executionContext) marshalNPluginExtension2แš•แš–githubแš—comแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx, sel, v[i]) + ret[i] = ec.marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38293,7 +37616,7 @@ func (ec *executionContext) marshalNPluginExtension2แš•แš–githubแš—comแš‹reearth return ret } -func (ec *executionContext) marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx context.Context, sel ast.SelectionSet, v *graphql1.PluginExtension) graphql.Marshaler { +func (ec *executionContext) marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PluginExtension) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38304,12 +37627,12 @@ func (ec *executionContext) marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹ } func (ec *executionContext) unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx context.Context, v interface{}) (id.PluginExtensionID, error) { - res, err := graphql1.UnmarshalPluginExtensionID(v) + res, err := gqlmodel.UnmarshalPluginExtensionID(v) return res, graphql.ErrorOnPath(ctx, err) } func (ec *executionContext) marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx context.Context, sel ast.SelectionSet, v id.PluginExtensionID) graphql.Marshaler { - res := graphql1.MarshalPluginExtensionID(v) + res := gqlmodel.MarshalPluginExtensionID(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38318,23 +37641,23 @@ func (ec *executionContext) marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹r return res } -func (ec *executionContext) unmarshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtensionType(ctx context.Context, v interface{}) (graphql1.PluginExtensionType, error) { - var res graphql1.PluginExtensionType +func (ec *executionContext) unmarshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionType(ctx context.Context, v interface{}) (gqlmodel.PluginExtensionType, error) { + var res gqlmodel.PluginExtensionType err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtensionType(ctx context.Context, sel ast.SelectionSet, v graphql1.PluginExtensionType) graphql.Marshaler { +func (ec *executionContext) marshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PluginExtensionType) graphql.Marshaler { return v } func (ec *executionContext) unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, v interface{}) (id.PluginID, error) { - res, err := graphql1.UnmarshalPluginID(v) + res, err := gqlmodel.UnmarshalPluginID(v) return res, graphql.ErrorOnPath(ctx, err) } func (ec *executionContext) marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, sel ast.SelectionSet, v id.PluginID) graphql.Marshaler { - res := graphql1.MarshalPluginID(v) + res := gqlmodel.MarshalPluginID(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38374,7 +37697,7 @@ func (ec *executionContext) marshalNPluginID2แš•แš–githubแš—comแš‹reearthแš‹reea } func (ec *executionContext) unmarshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, v interface{}) (*id.PluginID, error) { - res, err := graphql1.UnmarshalPluginID(v) + res, err := gqlmodel.UnmarshalPluginID(v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -38385,7 +37708,7 @@ func (ec *executionContext) marshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearth } return graphql.Null } - res := graphql1.MarshalPluginID(*v) + res := gqlmodel.MarshalPluginID(*v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38394,7 +37717,7 @@ func (ec *executionContext) marshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearth return res } -func (ec *executionContext) marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginMetadataแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PluginMetadata) graphql.Marshaler { +func (ec *executionContext) marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadataแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PluginMetadata) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38418,7 +37741,7 @@ func (ec *executionContext) marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginMetadata(ctx, sel, v[i]) + ret[i] = ec.marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadata(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38431,7 +37754,7 @@ func (ec *executionContext) marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearth return ret } -func (ec *executionContext) marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginMetadata(ctx context.Context, sel ast.SelectionSet, v *graphql1.PluginMetadata) graphql.Marshaler { +func (ec *executionContext) marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadata(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PluginMetadata) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38441,7 +37764,7 @@ func (ec *executionContext) marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹r return ec._PluginMetadata(ctx, sel, v) } -func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Project) graphql.Marshaler { +func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Project) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38465,7 +37788,7 @@ func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reear if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx, sel, v[i]) + ret[i] = ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38478,7 +37801,7 @@ func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reear return ret } -func (ec *executionContext) marshalNProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx context.Context, sel ast.SelectionSet, v *graphql1.Project) graphql.Marshaler { +func (ec *executionContext) marshalNProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Project) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38488,11 +37811,11 @@ func (ec *executionContext) marshalNProject2แš–githubแš—comแš‹reearthแš‹reearth return ec._Project(ctx, sel, v) } -func (ec *executionContext) marshalNProjectConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v graphql1.ProjectConnection) graphql.Marshaler { +func (ec *executionContext) marshalNProjectConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ProjectConnection) graphql.Marshaler { return ec._ProjectConnection(ctx, sel, &v) } -func (ec *executionContext) marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v *graphql1.ProjectConnection) graphql.Marshaler { +func (ec *executionContext) marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectConnection) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38502,7 +37825,7 @@ func (ec *executionContext) marshalNProjectConnection2แš–githubแš—comแš‹reearth return ec._ProjectConnection(ctx, sel, v) } -func (ec *executionContext) marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.ProjectEdge) graphql.Marshaler { +func (ec *executionContext) marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.ProjectEdge) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38526,7 +37849,7 @@ func (ec *executionContext) marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹r if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectEdge(ctx, sel, v[i]) + ret[i] = ec.marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdge(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38539,7 +37862,7 @@ func (ec *executionContext) marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹r return ret } -func (ec *executionContext) marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectEdge(ctx context.Context, sel ast.SelectionSet, v *graphql1.ProjectEdge) graphql.Marshaler { +func (ec *executionContext) marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectEdge) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38549,7 +37872,7 @@ func (ec *executionContext) marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reea return ec._ProjectEdge(ctx, sel, v) } -func (ec *executionContext) marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx context.Context, sel ast.SelectionSet, v *graphql1.Property) graphql.Marshaler { +func (ec *executionContext) marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Property) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38559,7 +37882,7 @@ func (ec *executionContext) marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearth return ec._Property(ctx, sel, v) } -func (ec *executionContext) marshalNPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertyField) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertyField) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38583,7 +37906,7 @@ func (ec *executionContext) marshalNPropertyField2แš•แš–githubแš—comแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyField(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38596,7 +37919,7 @@ func (ec *executionContext) marshalNPropertyField2แš•แš–githubแš—comแš‹reearth return ret } -func (ec *executionContext) marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyField(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyField) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38606,7 +37929,7 @@ func (ec *executionContext) marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹re return ec._PropertyField(ctx, sel, v) } -func (ec *executionContext) marshalNPropertyFieldLink2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldLink(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyFieldLink) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyFieldLink2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLink(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyFieldLink) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38616,7 +37939,7 @@ func (ec *executionContext) marshalNPropertyFieldLink2แš–githubแš—comแš‹reearth return ec._PropertyFieldLink(ctx, sel, v) } -func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertyGroup) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertyGroup) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38640,7 +37963,7 @@ func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroup(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38653,7 +37976,7 @@ func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearth return ret } -func (ec *executionContext) marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyGroup) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyGroup) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38663,7 +37986,7 @@ func (ec *executionContext) marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹re return ec._PropertyGroup(ctx, sel, v) } -func (ec *executionContext) marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItem(ctx context.Context, sel ast.SelectionSet, v graphql1.PropertyItem) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PropertyItem) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38673,7 +37996,7 @@ func (ec *executionContext) marshalNPropertyItem2githubแš—comแš‹reearthแš‹reeart return ec._PropertyItem(ctx, sel, v) } -func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemแš„(ctx context.Context, sel ast.SelectionSet, v []graphql1.PropertyItem) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.PropertyItem) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38697,7 +38020,7 @@ func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹ree if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItem(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38710,7 +38033,7 @@ func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹ree return ret } -func (ec *executionContext) marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyLinkableFields) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyLinkableFields) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38720,7 +38043,7 @@ func (ec *executionContext) marshalNPropertyLinkableFields2แš–githubแš—comแš‹ree return ec._PropertyLinkableFields(ctx, sel, v) } -func (ec *executionContext) marshalNPropertySchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertySchema) graphql.Marshaler { +func (ec *executionContext) marshalNPropertySchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchema) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38744,7 +38067,7 @@ func (ec *executionContext) marshalNPropertySchema2แš•แš–githubแš—comแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38757,7 +38080,7 @@ func (ec *executionContext) marshalNPropertySchema2แš•แš–githubแš—comแš‹reearth return ret } -func (ec *executionContext) marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchema) graphql.Marshaler { +func (ec *executionContext) marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchema) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38767,7 +38090,7 @@ func (ec *executionContext) marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹r return ec._PropertySchema(ctx, sel, v) } -func (ec *executionContext) marshalNPropertySchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertySchemaField) graphql.Marshaler { +func (ec *executionContext) marshalNPropertySchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchemaField) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38791,7 +38114,7 @@ func (ec *executionContext) marshalNPropertySchemaField2แš•แš–githubแš—comแš‹ree if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38804,7 +38127,7 @@ func (ec *executionContext) marshalNPropertySchemaField2แš•แš–githubแš—comแš‹ree return ret } -func (ec *executionContext) marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchemaField) graphql.Marshaler { +func (ec *executionContext) marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38814,7 +38137,7 @@ func (ec *executionContext) marshalNPropertySchemaField2แš–githubแš—comแš‹reeart return ec._PropertySchemaField(ctx, sel, v) } -func (ec *executionContext) marshalNPropertySchemaFieldChoice2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldChoice(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchemaFieldChoice) graphql.Marshaler { +func (ec *executionContext) marshalNPropertySchemaFieldChoice2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldChoice(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaFieldChoice) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38825,12 +38148,12 @@ func (ec *executionContext) marshalNPropertySchemaFieldChoice2แš–githubแš—comแš‹ } func (ec *executionContext) unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx context.Context, v interface{}) (id.PropertySchemaFieldID, error) { - res, err := graphql1.UnmarshalPropertySchemaFieldID(v) + res, err := gqlmodel.UnmarshalPropertySchemaFieldID(v) return res, graphql.ErrorOnPath(ctx, err) } func (ec *executionContext) marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx context.Context, sel ast.SelectionSet, v id.PropertySchemaFieldID) graphql.Marshaler { - res := graphql1.MarshalPropertySchemaFieldID(v) + res := gqlmodel.MarshalPropertySchemaFieldID(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38839,7 +38162,7 @@ func (ec *executionContext) marshalNPropertySchemaFieldID2githubแš—comแš‹reearth return res } -func (ec *executionContext) marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertySchemaGroup) graphql.Marshaler { +func (ec *executionContext) marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchemaGroup) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -38863,7 +38186,7 @@ func (ec *executionContext) marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹ree if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroup(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx, sel, v[i]) } if isLen1 { f(i) @@ -38876,7 +38199,7 @@ func (ec *executionContext) marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹ree return ret } -func (ec *executionContext) marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchemaGroup) graphql.Marshaler { +func (ec *executionContext) marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaGroup) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38887,12 +38210,12 @@ func (ec *executionContext) marshalNPropertySchemaGroup2แš–githubแš—comแš‹reeart } func (ec *executionContext) unmarshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, v interface{}) (id.PropertySchemaID, error) { - res, err := graphql1.UnmarshalPropertySchemaID(v) + res, err := gqlmodel.UnmarshalPropertySchemaID(v) return res, graphql.ErrorOnPath(ctx, err) } func (ec *executionContext) marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, sel ast.SelectionSet, v id.PropertySchemaID) graphql.Marshaler { - res := graphql1.MarshalPropertySchemaID(v) + res := gqlmodel.MarshalPropertySchemaID(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38932,7 +38255,7 @@ func (ec *executionContext) marshalNPropertySchemaID2แš•แš–githubแš—comแš‹reeart } func (ec *executionContext) unmarshalNPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, v interface{}) (*id.PropertySchemaID, error) { - res, err := graphql1.UnmarshalPropertySchemaID(v) + res, err := gqlmodel.UnmarshalPropertySchemaID(v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -38943,7 +38266,7 @@ func (ec *executionContext) marshalNPropertySchemaID2แš–githubแš—comแš‹reearth } return graphql.Null } - res := graphql1.MarshalPropertySchemaID(*v) + res := gqlmodel.MarshalPropertySchemaID(*v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -38952,82 +38275,82 @@ func (ec *executionContext) marshalNPropertySchemaID2แš–githubแš—comแš‹reearth return res } -func (ec *executionContext) unmarshalNPublishProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPublishProjectInput(ctx context.Context, v interface{}) (graphql1.PublishProjectInput, error) { +func (ec *executionContext) unmarshalNPublishProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishProjectInput(ctx context.Context, v interface{}) (gqlmodel.PublishProjectInput, error) { res, err := ec.unmarshalInputPublishProjectInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPublishmentStatus(ctx context.Context, v interface{}) (graphql1.PublishmentStatus, error) { - var res graphql1.PublishmentStatus +func (ec *executionContext) unmarshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishmentStatus(ctx context.Context, v interface{}) (gqlmodel.PublishmentStatus, error) { + var res gqlmodel.PublishmentStatus err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPublishmentStatus(ctx context.Context, sel ast.SelectionSet, v graphql1.PublishmentStatus) graphql.Marshaler { +func (ec *executionContext) marshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishmentStatus(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PublishmentStatus) graphql.Marshaler { return v } -func (ec *executionContext) unmarshalNRemoveAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetInput(ctx context.Context, v interface{}) (graphql1.RemoveAssetInput, error) { +func (ec *executionContext) unmarshalNRemoveAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveAssetInput(ctx context.Context, v interface{}) (gqlmodel.RemoveAssetInput, error) { res, err := ec.unmarshalInputRemoveAssetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNRemoveDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaInput(ctx context.Context, v interface{}) (graphql1.RemoveDatasetSchemaInput, error) { +func (ec *executionContext) unmarshalNRemoveDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveDatasetSchemaInput(ctx context.Context, v interface{}) (gqlmodel.RemoveDatasetSchemaInput, error) { res, err := ec.unmarshalInputRemoveDatasetSchemaInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNRemoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldInput(ctx context.Context, v interface{}) (graphql1.RemoveInfoboxFieldInput, error) { +func (ec *executionContext) unmarshalNRemoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxFieldInput(ctx context.Context, v interface{}) (gqlmodel.RemoveInfoboxFieldInput, error) { res, err := ec.unmarshalInputRemoveInfoboxFieldInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNRemoveInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxInput(ctx context.Context, v interface{}) (graphql1.RemoveInfoboxInput, error) { +func (ec *executionContext) unmarshalNRemoveInfoboxInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxInput(ctx context.Context, v interface{}) (gqlmodel.RemoveInfoboxInput, error) { res, err := ec.unmarshalInputRemoveInfoboxInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNRemoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerInput(ctx context.Context, v interface{}) (graphql1.RemoveLayerInput, error) { +func (ec *executionContext) unmarshalNRemoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveLayerInput(ctx context.Context, v interface{}) (gqlmodel.RemoveLayerInput, error) { res, err := ec.unmarshalInputRemoveLayerInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNRemoveMemberFromTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamInput(ctx context.Context, v interface{}) (graphql1.RemoveMemberFromTeamInput, error) { +func (ec *executionContext) unmarshalNRemoveMemberFromTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMemberFromTeamInput(ctx context.Context, v interface{}) (gqlmodel.RemoveMemberFromTeamInput, error) { res, err := ec.unmarshalInputRemoveMemberFromTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNRemoveMyAuthInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMyAuthInput(ctx context.Context, v interface{}) (graphql1.RemoveMyAuthInput, error) { +func (ec *executionContext) unmarshalNRemoveMyAuthInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMyAuthInput(ctx context.Context, v interface{}) (gqlmodel.RemoveMyAuthInput, error) { res, err := ec.unmarshalInputRemoveMyAuthInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNRemovePropertyFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemovePropertyFieldInput(ctx context.Context, v interface{}) (graphql1.RemovePropertyFieldInput, error) { +func (ec *executionContext) unmarshalNRemovePropertyFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemovePropertyFieldInput(ctx context.Context, v interface{}) (gqlmodel.RemovePropertyFieldInput, error) { res, err := ec.unmarshalInputRemovePropertyFieldInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNRemovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemovePropertyItemInput(ctx context.Context, v interface{}) (graphql1.RemovePropertyItemInput, error) { +func (ec *executionContext) unmarshalNRemovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemovePropertyItemInput(ctx context.Context, v interface{}) (gqlmodel.RemovePropertyItemInput, error) { res, err := ec.unmarshalInputRemovePropertyItemInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNRemoveWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetInput(ctx context.Context, v interface{}) (graphql1.RemoveWidgetInput, error) { +func (ec *executionContext) unmarshalNRemoveWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveWidgetInput(ctx context.Context, v interface{}) (gqlmodel.RemoveWidgetInput, error) { res, err := ec.unmarshalInputRemoveWidgetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRole(ctx context.Context, v interface{}) (graphql1.Role, error) { - var res graphql1.Role +func (ec *executionContext) unmarshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRole(ctx context.Context, v interface{}) (gqlmodel.Role, error) { + var res gqlmodel.Role err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRole(ctx context.Context, sel ast.SelectionSet, v graphql1.Role) graphql.Marshaler { +func (ec *executionContext) marshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRole(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Role) graphql.Marshaler { return v } -func (ec *executionContext) marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx context.Context, sel ast.SelectionSet, v *graphql1.Scene) graphql.Marshaler { +func (ec *executionContext) marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Scene) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -39037,17 +38360,17 @@ func (ec *executionContext) marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘ return ec._Scene(ctx, sel, v) } -func (ec *executionContext) unmarshalNSceneLockMode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneLockMode(ctx context.Context, v interface{}) (graphql1.SceneLockMode, error) { - var res graphql1.SceneLockMode +func (ec *executionContext) unmarshalNSceneLockMode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneLockMode(ctx context.Context, v interface{}) (gqlmodel.SceneLockMode, error) { + var res gqlmodel.SceneLockMode err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNSceneLockMode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneLockMode(ctx context.Context, sel ast.SelectionSet, v graphql1.SceneLockMode) graphql.Marshaler { +func (ec *executionContext) marshalNSceneLockMode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneLockMode(ctx context.Context, sel ast.SelectionSet, v gqlmodel.SceneLockMode) graphql.Marshaler { return v } -func (ec *executionContext) marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePluginแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.ScenePlugin) graphql.Marshaler { +func (ec *executionContext) marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePluginแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.ScenePlugin) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -39071,7 +38394,7 @@ func (ec *executionContext) marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹r if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx, sel, v[i]) + ret[i] = ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, sel, v[i]) } if isLen1 { f(i) @@ -39084,7 +38407,7 @@ func (ec *executionContext) marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹r return ret } -func (ec *executionContext) marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx context.Context, sel ast.SelectionSet, v *graphql1.ScenePlugin) graphql.Marshaler { +func (ec *executionContext) marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ScenePlugin) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -39094,7 +38417,7 @@ func (ec *executionContext) marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reea return ec._ScenePlugin(ctx, sel, v) } -func (ec *executionContext) marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidgetแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.SceneWidget) graphql.Marshaler { +func (ec *executionContext) marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidgetแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.SceneWidget) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -39118,7 +38441,7 @@ func (ec *executionContext) marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹r if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidget(ctx, sel, v[i]) + ret[i] = ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx, sel, v[i]) } if isLen1 { f(i) @@ -39131,7 +38454,7 @@ func (ec *executionContext) marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹r return ret } -func (ec *executionContext) marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidget(ctx context.Context, sel ast.SelectionSet, v *graphql1.SceneWidget) graphql.Marshaler { +func (ec *executionContext) marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SceneWidget) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -39141,7 +38464,7 @@ func (ec *executionContext) marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reea return ec._SceneWidget(ctx, sel, v) } -func (ec *executionContext) unmarshalNSignupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupInput(ctx context.Context, v interface{}) (graphql1.SignupInput, error) { +func (ec *executionContext) unmarshalNSignupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSignupInput(ctx context.Context, v interface{}) (gqlmodel.SignupInput, error) { res, err := ec.unmarshalInputSignupInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } @@ -39191,16 +38514,16 @@ func (ec *executionContext) marshalNString2แš•stringแš„(ctx context.Context, sel return ret } -func (ec *executionContext) unmarshalNSyncDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetInput(ctx context.Context, v interface{}) (graphql1.SyncDatasetInput, error) { +func (ec *executionContext) unmarshalNSyncDatasetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSyncDatasetInput(ctx context.Context, v interface{}) (gqlmodel.SyncDatasetInput, error) { res, err := ec.unmarshalInputSyncDatasetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNTeam2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx context.Context, sel ast.SelectionSet, v graphql1.Team) graphql.Marshaler { +func (ec *executionContext) marshalNTeam2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Team) graphql.Marshaler { return ec._Team(ctx, sel, &v) } -func (ec *executionContext) marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeamแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.Team) graphql.Marshaler { +func (ec *executionContext) marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Team) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -39224,7 +38547,7 @@ func (ec *executionContext) marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx, sel, v[i]) + ret[i] = ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, sel, v[i]) } if isLen1 { f(i) @@ -39237,7 +38560,7 @@ func (ec *executionContext) marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearth return ret } -func (ec *executionContext) marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx context.Context, sel ast.SelectionSet, v *graphql1.Team) graphql.Marshaler { +func (ec *executionContext) marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Team) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -39247,7 +38570,7 @@ func (ec *executionContext) marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘b return ec._Team(ctx, sel, v) } -func (ec *executionContext) marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeamMemberแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.TeamMember) graphql.Marshaler { +func (ec *executionContext) marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamMemberแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.TeamMember) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -39271,7 +38594,7 @@ func (ec *executionContext) marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹re if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNTeamMember2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeamMember(ctx, sel, v[i]) + ret[i] = ec.marshalNTeamMember2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamMember(ctx, sel, v[i]) } if isLen1 { f(i) @@ -39284,7 +38607,7 @@ func (ec *executionContext) marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹re return ret } -func (ec *executionContext) marshalNTeamMember2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeamMember(ctx context.Context, sel ast.SelectionSet, v *graphql1.TeamMember) graphql.Marshaler { +func (ec *executionContext) marshalNTeamMember2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamMember(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TeamMember) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -39294,57 +38617,57 @@ func (ec *executionContext) marshalNTeamMember2แš–githubแš—comแš‹reearthแš‹reear return ec._TeamMember(ctx, sel, v) } -func (ec *executionContext) unmarshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx context.Context, v interface{}) (graphql1.Theme, error) { - var res graphql1.Theme +func (ec *executionContext) unmarshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx context.Context, v interface{}) (gqlmodel.Theme, error) { + var res gqlmodel.Theme err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx context.Context, sel ast.SelectionSet, v graphql1.Theme) graphql.Marshaler { +func (ec *executionContext) marshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Theme) graphql.Marshaler { return v } -func (ec *executionContext) unmarshalNUninstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginInput(ctx context.Context, v interface{}) (graphql1.UninstallPluginInput, error) { +func (ec *executionContext) unmarshalNUninstallPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUninstallPluginInput(ctx context.Context, v interface{}) (gqlmodel.UninstallPluginInput, error) { res, err := ec.unmarshalInputUninstallPluginInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUnlinkPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUnlinkPropertyValueInput(ctx context.Context, v interface{}) (graphql1.UnlinkPropertyValueInput, error) { +func (ec *executionContext) unmarshalNUnlinkPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUnlinkPropertyValueInput(ctx context.Context, v interface{}) (gqlmodel.UnlinkPropertyValueInput, error) { res, err := ec.unmarshalInputUnlinkPropertyValueInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUpdateDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaInput(ctx context.Context, v interface{}) (graphql1.UpdateDatasetSchemaInput, error) { +func (ec *executionContext) unmarshalNUpdateDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateDatasetSchemaInput(ctx context.Context, v interface{}) (gqlmodel.UpdateDatasetSchemaInput, error) { res, err := ec.unmarshalInputUpdateDatasetSchemaInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUpdateLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerInput(ctx context.Context, v interface{}) (graphql1.UpdateLayerInput, error) { +func (ec *executionContext) unmarshalNUpdateLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateLayerInput(ctx context.Context, v interface{}) (gqlmodel.UpdateLayerInput, error) { res, err := ec.unmarshalInputUpdateLayerInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUpdateMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMeInput(ctx context.Context, v interface{}) (graphql1.UpdateMeInput, error) { +func (ec *executionContext) unmarshalNUpdateMeInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMeInput(ctx context.Context, v interface{}) (gqlmodel.UpdateMeInput, error) { res, err := ec.unmarshalInputUpdateMeInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUpdateMemberOfTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamInput(ctx context.Context, v interface{}) (graphql1.UpdateMemberOfTeamInput, error) { +func (ec *executionContext) unmarshalNUpdateMemberOfTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMemberOfTeamInput(ctx context.Context, v interface{}) (gqlmodel.UpdateMemberOfTeamInput, error) { res, err := ec.unmarshalInputUpdateMemberOfTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUpdateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateProjectInput(ctx context.Context, v interface{}) (graphql1.UpdateProjectInput, error) { +func (ec *executionContext) unmarshalNUpdateProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateProjectInput(ctx context.Context, v interface{}) (gqlmodel.UpdateProjectInput, error) { res, err := ec.unmarshalInputUpdateProjectInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUpdatePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyItemInput(ctx context.Context, v interface{}) (graphql1.UpdatePropertyItemInput, error) { +func (ec *executionContext) unmarshalNUpdatePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemInput(ctx context.Context, v interface{}) (gqlmodel.UpdatePropertyItemInput, error) { res, err := ec.unmarshalInputUpdatePropertyItemInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUpdatePropertyItemOperationInput2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyItemOperationInputแš„(ctx context.Context, v interface{}) ([]*graphql1.UpdatePropertyItemOperationInput, error) { +func (ec *executionContext) unmarshalNUpdatePropertyItemOperationInput2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemOperationInputแš„(ctx context.Context, v interface{}) ([]*gqlmodel.UpdatePropertyItemOperationInput, error) { var vSlice []interface{} if v != nil { if tmp1, ok := v.([]interface{}); ok { @@ -39354,10 +38677,10 @@ func (ec *executionContext) unmarshalNUpdatePropertyItemOperationInput2แš•แš–git } } var err error - res := make([]*graphql1.UpdatePropertyItemOperationInput, len(vSlice)) + res := make([]*gqlmodel.UpdatePropertyItemOperationInput, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNUpdatePropertyItemOperationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyItemOperationInput(ctx, vSlice[i]) + res[i], err = ec.unmarshalNUpdatePropertyItemOperationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemOperationInput(ctx, vSlice[i]) if err != nil { return nil, err } @@ -39365,47 +38688,27 @@ func (ec *executionContext) unmarshalNUpdatePropertyItemOperationInput2แš•แš–git return res, nil } -func (ec *executionContext) unmarshalNUpdatePropertyItemOperationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyItemOperationInput(ctx context.Context, v interface{}) (*graphql1.UpdatePropertyItemOperationInput, error) { +func (ec *executionContext) unmarshalNUpdatePropertyItemOperationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemOperationInput(ctx context.Context, v interface{}) (*gqlmodel.UpdatePropertyItemOperationInput, error) { res, err := ec.unmarshalInputUpdatePropertyItemOperationInput(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUpdatePropertyValueCameraInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueCameraInput(ctx context.Context, v interface{}) (graphql1.UpdatePropertyValueCameraInput, error) { - res, err := ec.unmarshalInputUpdatePropertyValueCameraInput(ctx, v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) unmarshalNUpdatePropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueInput(ctx context.Context, v interface{}) (graphql1.UpdatePropertyValueInput, error) { +func (ec *executionContext) unmarshalNUpdatePropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyValueInput(ctx context.Context, v interface{}) (gqlmodel.UpdatePropertyValueInput, error) { res, err := ec.unmarshalInputUpdatePropertyValueInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUpdatePropertyValueLatLngHeightInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueLatLngHeightInput(ctx context.Context, v interface{}) (graphql1.UpdatePropertyValueLatLngHeightInput, error) { - res, err := ec.unmarshalInputUpdatePropertyValueLatLngHeightInput(ctx, v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) unmarshalNUpdatePropertyValueLatLngInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueLatLngInput(ctx context.Context, v interface{}) (graphql1.UpdatePropertyValueLatLngInput, error) { - res, err := ec.unmarshalInputUpdatePropertyValueLatLngInput(ctx, v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) unmarshalNUpdatePropertyValueTypographyInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdatePropertyValueTypographyInput(ctx context.Context, v interface{}) (graphql1.UpdatePropertyValueTypographyInput, error) { - res, err := ec.unmarshalInputUpdatePropertyValueTypographyInput(ctx, v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) unmarshalNUpdateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamInput(ctx context.Context, v interface{}) (graphql1.UpdateTeamInput, error) { +func (ec *executionContext) unmarshalNUpdateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTeamInput(ctx context.Context, v interface{}) (gqlmodel.UpdateTeamInput, error) { res, err := ec.unmarshalInputUpdateTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUpdateWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetInput(ctx context.Context, v interface{}) (graphql1.UpdateWidgetInput, error) { +func (ec *executionContext) unmarshalNUpdateWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetInput(ctx context.Context, v interface{}) (gqlmodel.UpdateWidgetInput, error) { res, err := ec.unmarshalInputUpdateWidgetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUpgradePluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginInput(ctx context.Context, v interface{}) (graphql1.UpgradePluginInput, error) { +func (ec *executionContext) unmarshalNUpgradePluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpgradePluginInput(ctx context.Context, v interface{}) (gqlmodel.UpgradePluginInput, error) { res, err := ec.unmarshalInputUpgradePluginInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } @@ -39425,17 +38728,17 @@ func (ec *executionContext) marshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹g return res } -func (ec *executionContext) unmarshalNUploadFileToPropertyInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadFileToPropertyInput(ctx context.Context, v interface{}) (graphql1.UploadFileToPropertyInput, error) { +func (ec *executionContext) unmarshalNUploadFileToPropertyInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadFileToPropertyInput(ctx context.Context, v interface{}) (gqlmodel.UploadFileToPropertyInput, error) { res, err := ec.unmarshalInputUploadFileToPropertyInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) unmarshalNUploadPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginInput(ctx context.Context, v interface{}) (graphql1.UploadPluginInput, error) { +func (ec *executionContext) unmarshalNUploadPluginInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadPluginInput(ctx context.Context, v interface{}) (gqlmodel.UploadPluginInput, error) { res, err := ec.unmarshalInputUploadPluginInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx context.Context, sel ast.SelectionSet, v *graphql1.User) graphql.Marshaler { +func (ec *executionContext) marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.User) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -39445,23 +38748,23 @@ func (ec *executionContext) marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘b return ec._User(ctx, sel, v) } -func (ec *executionContext) unmarshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx context.Context, v interface{}) (graphql1.ValueType, error) { - var res graphql1.ValueType +func (ec *executionContext) unmarshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx context.Context, v interface{}) (gqlmodel.ValueType, error) { + var res gqlmodel.ValueType err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx context.Context, sel ast.SelectionSet, v graphql1.ValueType) graphql.Marshaler { +func (ec *executionContext) marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ValueType) graphql.Marshaler { return v } -func (ec *executionContext) unmarshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšVisualizer(ctx context.Context, v interface{}) (graphql1.Visualizer, error) { - var res graphql1.Visualizer +func (ec *executionContext) unmarshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx context.Context, v interface{}) (gqlmodel.Visualizer, error) { + var res gqlmodel.Visualizer err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšVisualizer(ctx context.Context, sel ast.SelectionSet, v graphql1.Visualizer) graphql.Marshaler { +func (ec *executionContext) marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Visualizer) graphql.Marshaler { return v } @@ -39694,56 +38997,56 @@ func (ec *executionContext) marshalN__TypeKind2string(ctx context.Context, sel a return res } -func (ec *executionContext) marshalOAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddDatasetSchemaPayload) graphql.Marshaler { +func (ec *executionContext) marshalOAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddDatasetSchemaPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._AddDatasetSchemaPayload(ctx, sel, v) } -func (ec *executionContext) marshalOAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddDynamicDatasetPayload) graphql.Marshaler { +func (ec *executionContext) marshalOAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddDynamicDatasetPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._AddDynamicDatasetPayload(ctx, sel, v) } -func (ec *executionContext) marshalOAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddDynamicDatasetSchemaPayload) graphql.Marshaler { +func (ec *executionContext) marshalOAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddDynamicDatasetSchemaPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._AddDynamicDatasetSchemaPayload(ctx, sel, v) } -func (ec *executionContext) marshalOAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddInfoboxFieldPayload) graphql.Marshaler { +func (ec *executionContext) marshalOAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddInfoboxFieldPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._AddInfoboxFieldPayload(ctx, sel, v) } -func (ec *executionContext) marshalOAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddLayerGroupPayload) graphql.Marshaler { +func (ec *executionContext) marshalOAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddLayerGroupPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._AddLayerGroupPayload(ctx, sel, v) } -func (ec *executionContext) marshalOAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddLayerItemPayload) graphql.Marshaler { +func (ec *executionContext) marshalOAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddLayerItemPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._AddLayerItemPayload(ctx, sel, v) } -func (ec *executionContext) marshalOAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddMemberToTeamPayload) graphql.Marshaler { +func (ec *executionContext) marshalOAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddMemberToTeamPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._AddMemberToTeamPayload(ctx, sel, v) } -func (ec *executionContext) marshalOAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAddWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.AddWidgetPayload) graphql.Marshaler { +func (ec *executionContext) marshalOAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddWidgetPayload) graphql.Marshaler { if v == nil { return graphql.Null } @@ -39765,7 +39068,7 @@ func (ec *executionContext) marshalOAny2interface(ctx context.Context, sel ast.S return graphql.MarshalAny(v) } -func (ec *executionContext) marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšAsset(ctx context.Context, sel ast.SelectionSet, v *graphql1.Asset) graphql.Marshaler { +func (ec *executionContext) marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Asset) graphql.Marshaler { if v == nil { return graphql.Null } @@ -39796,28 +39099,28 @@ func (ec *executionContext) marshalOBoolean2แš–bool(ctx context.Context, sel ast return graphql.MarshalBoolean(*v) } -func (ec *executionContext) marshalOCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateAssetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateAssetPayload) graphql.Marshaler { +func (ec *executionContext) marshalOCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateAssetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateAssetPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._CreateAssetPayload(ctx, sel, v) } -func (ec *executionContext) marshalOCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateInfoboxPayload) graphql.Marshaler { +func (ec *executionContext) marshalOCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateInfoboxPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._CreateInfoboxPayload(ctx, sel, v) } -func (ec *executionContext) marshalOCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateScenePayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateScenePayload) graphql.Marshaler { +func (ec *executionContext) marshalOCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateScenePayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateScenePayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._CreateScenePayload(ctx, sel, v) } -func (ec *executionContext) marshalOCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšCreateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.CreateTeamPayload) graphql.Marshaler { +func (ec *executionContext) marshalOCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateTeamPayload) graphql.Marshaler { if v == nil { return graphql.Null } @@ -39828,7 +39131,7 @@ func (ec *executionContext) unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearth if v == nil { return nil, nil } - res, err := graphql1.UnmarshalCursor(v) + res, err := gqlmodel.UnmarshalCursor(v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -39836,31 +39139,31 @@ func (ec *executionContext) marshalOCursor2แš–githubแš—comแš‹reearthแš‹reearth if v == nil { return graphql.Null } - return graphql1.MarshalCursor(*v) + return gqlmodel.MarshalCursor(*v) } -func (ec *executionContext) marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDataset(ctx context.Context, sel ast.SelectionSet, v *graphql1.Dataset) graphql.Marshaler { +func (ec *executionContext) marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Dataset) graphql.Marshaler { if v == nil { return graphql.Null } return ec._Dataset(ctx, sel, v) } -func (ec *executionContext) marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetField(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetField) graphql.Marshaler { +func (ec *executionContext) marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetField) graphql.Marshaler { if v == nil { return graphql.Null } return ec._DatasetField(ctx, sel, v) } -func (ec *executionContext) marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetSchema) graphql.Marshaler { +func (ec *executionContext) marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchema) graphql.Marshaler { if v == nil { return graphql.Null } return ec._DatasetSchema(ctx, sel, v) } -func (ec *executionContext) marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDatasetSchemaField(ctx context.Context, sel ast.SelectionSet, v *graphql1.DatasetSchemaField) graphql.Marshaler { +func (ec *executionContext) marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchemaField) graphql.Marshaler { if v == nil { return graphql.Null } @@ -39871,7 +39174,7 @@ func (ec *executionContext) unmarshalODatasetSchemaFieldID2แš–githubแš—comแš‹ree if v == nil { return nil, nil } - res, err := graphql1.UnmarshalDatasetSchemaFieldID(v) + res, err := gqlmodel.UnmarshalDatasetSchemaFieldID(v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -39879,7 +39182,7 @@ func (ec *executionContext) marshalODatasetSchemaFieldID2แš–githubแš—comแš‹reear if v == nil { return graphql.Null } - return graphql1.MarshalDatasetSchemaFieldID(*v) + return gqlmodel.MarshalDatasetSchemaFieldID(*v) } func (ec *executionContext) unmarshalODateTime2แš–timeแšTime(ctx context.Context, v interface{}) (*time.Time, error) { @@ -39897,21 +39200,21 @@ func (ec *executionContext) marshalODateTime2แš–timeแšTime(ctx context.Context, return graphql.MarshalTime(*v) } -func (ec *executionContext) marshalODeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteMePayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.DeleteMePayload) graphql.Marshaler { +func (ec *executionContext) marshalODeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteMePayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DeleteMePayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._DeleteMePayload(ctx, sel, v) } -func (ec *executionContext) marshalODeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.DeleteProjectPayload) graphql.Marshaler { +func (ec *executionContext) marshalODeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DeleteProjectPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._DeleteProjectPayload(ctx, sel, v) } -func (ec *executionContext) marshalODeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšDeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.DeleteTeamPayload) graphql.Marshaler { +func (ec *executionContext) marshalODeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DeleteTeamPayload) graphql.Marshaler { if v == nil { return graphql.Null } @@ -39973,7 +39276,7 @@ func (ec *executionContext) unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘b if v == nil { return nil, nil } - res, err := graphql1.UnmarshalID(v) + res, err := gqlmodel.UnmarshalID(v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -39981,31 +39284,31 @@ func (ec *executionContext) marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘bac if v == nil { return graphql.Null } - return graphql1.MarshalID(*v) + return gqlmodel.MarshalID(*v) } -func (ec *executionContext) marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.ImportDatasetPayload) graphql.Marshaler { +func (ec *executionContext) marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ImportDatasetPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._ImportDatasetPayload(ctx, sel, v) } -func (ec *executionContext) marshalOImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšImportLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.ImportLayerPayload) graphql.Marshaler { +func (ec *executionContext) marshalOImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportLayerPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ImportLayerPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._ImportLayerPayload(ctx, sel, v) } -func (ec *executionContext) marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInfobox(ctx context.Context, sel ast.SelectionSet, v *graphql1.Infobox) graphql.Marshaler { +func (ec *executionContext) marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Infobox) graphql.Marshaler { if v == nil { return graphql.Null } return ec._Infobox(ctx, sel, v) } -func (ec *executionContext) marshalOInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšInstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.InstallPluginPayload) graphql.Marshaler { +func (ec *executionContext) marshalOInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.InstallPluginPayload) graphql.Marshaler { if v == nil { return graphql.Null } @@ -40031,7 +39334,7 @@ func (ec *executionContext) unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹language if v == nil { return nil, nil } - res, err := graphql1.UnmarshalLang(v) + res, err := gqlmodel.UnmarshalLang(v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -40039,87 +39342,87 @@ func (ec *executionContext) marshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹language if v == nil { return graphql.Null } - return graphql1.MarshalLang(*v) + return gqlmodel.MarshalLang(*v) } -func (ec *executionContext) marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayer(ctx context.Context, sel ast.SelectionSet, v graphql1.Layer) graphql.Marshaler { +func (ec *executionContext) marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Layer) graphql.Marshaler { if v == nil { return graphql.Null } return ec._Layer(ctx, sel, v) } -func (ec *executionContext) marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.LayerGroup) graphql.Marshaler { +func (ec *executionContext) marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerGroup) graphql.Marshaler { if v == nil { return graphql.Null } return ec._LayerGroup(ctx, sel, v) } -func (ec *executionContext) marshalOLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšLayerItem(ctx context.Context, sel ast.SelectionSet, v *graphql1.LayerItem) graphql.Marshaler { +func (ec *executionContext) marshalOLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerItem) graphql.Marshaler { if v == nil { return graphql.Null } return ec._LayerItem(ctx, sel, v) } -func (ec *executionContext) marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfobox(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedInfobox) graphql.Marshaler { +func (ec *executionContext) marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfobox(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedInfobox) graphql.Marshaler { if v == nil { return graphql.Null } return ec._MergedInfobox(ctx, sel, v) } -func (ec *executionContext) marshalOMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedInfoboxField(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedInfoboxField) graphql.Marshaler { +func (ec *executionContext) marshalOMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedInfoboxField) graphql.Marshaler { if v == nil { return graphql.Null } return ec._MergedInfoboxField(ctx, sel, v) } -func (ec *executionContext) marshalOMergedLayer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedLayer(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedLayer) graphql.Marshaler { +func (ec *executionContext) marshalOMergedLayer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedLayer(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedLayer) graphql.Marshaler { if v == nil { return graphql.Null } return ec._MergedLayer(ctx, sel, v) } -func (ec *executionContext) marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMergedProperty(ctx context.Context, sel ast.SelectionSet, v *graphql1.MergedProperty) graphql.Marshaler { +func (ec *executionContext) marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedProperty) graphql.Marshaler { if v == nil { return graphql.Null } return ec._MergedProperty(ctx, sel, v) } -func (ec *executionContext) marshalOMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.MoveInfoboxFieldPayload) graphql.Marshaler { +func (ec *executionContext) marshalOMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MoveInfoboxFieldPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._MoveInfoboxFieldPayload(ctx, sel, v) } -func (ec *executionContext) marshalOMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšMoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.MoveLayerPayload) graphql.Marshaler { +func (ec *executionContext) marshalOMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MoveLayerPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._MoveLayerPayload(ctx, sel, v) } -func (ec *executionContext) marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšNode(ctx context.Context, sel ast.SelectionSet, v graphql1.Node) graphql.Marshaler { +func (ec *executionContext) marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Node) graphql.Marshaler { if v == nil { return graphql.Null } return ec._Node(ctx, sel, v) } -func (ec *executionContext) marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPlugin(ctx context.Context, sel ast.SelectionSet, v *graphql1.Plugin) graphql.Marshaler { +func (ec *executionContext) marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Plugin) graphql.Marshaler { if v == nil { return graphql.Null } return ec._Plugin(ctx, sel, v) } -func (ec *executionContext) marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPluginExtension(ctx context.Context, sel ast.SelectionSet, v *graphql1.PluginExtension) graphql.Marshaler { +func (ec *executionContext) marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PluginExtension) graphql.Marshaler { if v == nil { return graphql.Null } @@ -40130,7 +39433,7 @@ func (ec *executionContext) unmarshalOPluginExtensionID2แš–githubแš—comแš‹reeart if v == nil { return nil, nil } - res, err := graphql1.UnmarshalPluginExtensionID(v) + res, err := gqlmodel.UnmarshalPluginExtensionID(v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -40138,14 +39441,14 @@ func (ec *executionContext) marshalOPluginExtensionID2แš–githubแš—comแš‹reearth if v == nil { return graphql.Null } - return graphql1.MarshalPluginExtensionID(*v) + return gqlmodel.MarshalPluginExtensionID(*v) } func (ec *executionContext) unmarshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, v interface{}) (*id.PluginID, error) { if v == nil { return nil, nil } - res, err := graphql1.UnmarshalPluginID(v) + res, err := gqlmodel.UnmarshalPluginID(v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -40153,45 +39456,45 @@ func (ec *executionContext) marshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearth if v == nil { return graphql.Null } - return graphql1.MarshalPluginID(*v) + return gqlmodel.MarshalPluginID(*v) } -func (ec *executionContext) marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProject(ctx context.Context, sel ast.SelectionSet, v *graphql1.Project) graphql.Marshaler { +func (ec *executionContext) marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Project) graphql.Marshaler { if v == nil { return graphql.Null } return ec._Project(ctx, sel, v) } -func (ec *executionContext) marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProjectPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.ProjectPayload) graphql.Marshaler { +func (ec *executionContext) marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._ProjectPayload(ctx, sel, v) } -func (ec *executionContext) marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšProperty(ctx context.Context, sel ast.SelectionSet, v *graphql1.Property) graphql.Marshaler { +func (ec *executionContext) marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Property) graphql.Marshaler { if v == nil { return graphql.Null } return ec._Property(ctx, sel, v) } -func (ec *executionContext) marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyCondition(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyCondition) graphql.Marshaler { +func (ec *executionContext) marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyCondition(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyCondition) graphql.Marshaler { if v == nil { return graphql.Null } return ec._PropertyCondition(ctx, sel, v) } -func (ec *executionContext) marshalOPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyField(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyField) graphql.Marshaler { +func (ec *executionContext) marshalOPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyField) graphql.Marshaler { if v == nil { return graphql.Null } return ec._PropertyField(ctx, sel, v) } -func (ec *executionContext) marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldLinkแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertyFieldLink) graphql.Marshaler { +func (ec *executionContext) marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLinkแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertyFieldLink) graphql.Marshaler { if v == nil { return graphql.Null } @@ -40218,7 +39521,7 @@ func (ec *executionContext) marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reear if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertyFieldLink2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldLink(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertyFieldLink2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLink(ctx, sel, v[i]) } if isLen1 { f(i) @@ -40231,49 +39534,49 @@ func (ec *executionContext) marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reear return ret } -func (ec *executionContext) marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyFieldPayload) graphql.Marshaler { +func (ec *executionContext) marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyFieldPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._PropertyFieldPayload(ctx, sel, v) } -func (ec *executionContext) marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyGroup) graphql.Marshaler { +func (ec *executionContext) marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyGroup) graphql.Marshaler { if v == nil { return graphql.Null } return ec._PropertyGroup(ctx, sel, v) } -func (ec *executionContext) marshalOPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItem(ctx context.Context, sel ast.SelectionSet, v graphql1.PropertyItem) graphql.Marshaler { +func (ec *executionContext) marshalOPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PropertyItem) graphql.Marshaler { if v == nil { return graphql.Null } return ec._PropertyItem(ctx, sel, v) } -func (ec *executionContext) marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertyItemPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertyItemPayload) graphql.Marshaler { +func (ec *executionContext) marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyItemPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._PropertyItemPayload(ctx, sel, v) } -func (ec *executionContext) marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchema(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchema) graphql.Marshaler { +func (ec *executionContext) marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchema) graphql.Marshaler { if v == nil { return graphql.Null } return ec._PropertySchema(ctx, sel, v) } -func (ec *executionContext) marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaField(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchemaField) graphql.Marshaler { +func (ec *executionContext) marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaField) graphql.Marshaler { if v == nil { return graphql.Null } return ec._PropertySchemaField(ctx, sel, v) } -func (ec *executionContext) marshalOPropertySchemaFieldChoice2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldChoiceแš„(ctx context.Context, sel ast.SelectionSet, v []*graphql1.PropertySchemaFieldChoice) graphql.Marshaler { +func (ec *executionContext) marshalOPropertySchemaFieldChoice2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldChoiceแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchemaFieldChoice) graphql.Marshaler { if v == nil { return graphql.Null } @@ -40300,7 +39603,7 @@ func (ec *executionContext) marshalOPropertySchemaFieldChoice2แš•แš–githubแš—com if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertySchemaFieldChoice2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldChoice(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertySchemaFieldChoice2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldChoice(ctx, sel, v[i]) } if isLen1 { f(i) @@ -40317,7 +39620,7 @@ func (ec *executionContext) unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹re if v == nil { return nil, nil } - res, err := graphql1.UnmarshalPropertySchemaFieldID(v) + res, err := gqlmodel.UnmarshalPropertySchemaFieldID(v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -40325,26 +39628,26 @@ func (ec *executionContext) marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reea if v == nil { return graphql.Null } - return graphql1.MarshalPropertySchemaFieldID(*v) + return gqlmodel.MarshalPropertySchemaFieldID(*v) } -func (ec *executionContext) unmarshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldUI(ctx context.Context, v interface{}) (*graphql1.PropertySchemaFieldUI, error) { +func (ec *executionContext) unmarshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldUI(ctx context.Context, v interface{}) (*gqlmodel.PropertySchemaFieldUI, error) { if v == nil { return nil, nil } - var res = new(graphql1.PropertySchemaFieldUI) + var res = new(gqlmodel.PropertySchemaFieldUI) err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaFieldUI(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchemaFieldUI) graphql.Marshaler { +func (ec *executionContext) marshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldUI(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaFieldUI) graphql.Marshaler { if v == nil { return graphql.Null } return v } -func (ec *executionContext) marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšPropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, v *graphql1.PropertySchemaGroup) graphql.Marshaler { +func (ec *executionContext) marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaGroup) graphql.Marshaler { if v == nil { return graphql.Null } @@ -40355,7 +39658,7 @@ func (ec *executionContext) unmarshalOPropertySchemaID2แš–githubแš—comแš‹reearth if v == nil { return nil, nil } - res, err := graphql1.UnmarshalPropertySchemaID(v) + res, err := gqlmodel.UnmarshalPropertySchemaID(v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -40363,103 +39666,103 @@ func (ec *executionContext) marshalOPropertySchemaID2แš–githubแš—comแš‹reearth if v == nil { return graphql.Null } - return graphql1.MarshalPropertySchemaID(*v) + return gqlmodel.MarshalPropertySchemaID(*v) } -func (ec *executionContext) marshalORemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveAssetPayload) graphql.Marshaler { +func (ec *executionContext) marshalORemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveAssetPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._RemoveAssetPayload(ctx, sel, v) } -func (ec *executionContext) marshalORemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveDatasetSchemaPayload) graphql.Marshaler { +func (ec *executionContext) marshalORemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveDatasetSchemaPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._RemoveDatasetSchemaPayload(ctx, sel, v) } -func (ec *executionContext) marshalORemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveInfoboxFieldPayload) graphql.Marshaler { +func (ec *executionContext) marshalORemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveInfoboxFieldPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._RemoveInfoboxFieldPayload(ctx, sel, v) } -func (ec *executionContext) marshalORemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveInfoboxPayload) graphql.Marshaler { +func (ec *executionContext) marshalORemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveInfoboxPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._RemoveInfoboxPayload(ctx, sel, v) } -func (ec *executionContext) marshalORemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveLayerPayload) graphql.Marshaler { +func (ec *executionContext) marshalORemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveLayerPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._RemoveLayerPayload(ctx, sel, v) } -func (ec *executionContext) marshalORemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveMemberFromTeamPayload) graphql.Marshaler { +func (ec *executionContext) marshalORemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveMemberFromTeamPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._RemoveMemberFromTeamPayload(ctx, sel, v) } -func (ec *executionContext) marshalORemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšRemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.RemoveWidgetPayload) graphql.Marshaler { +func (ec *executionContext) marshalORemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveWidgetPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._RemoveWidgetPayload(ctx, sel, v) } -func (ec *executionContext) marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScene(ctx context.Context, sel ast.SelectionSet, v *graphql1.Scene) graphql.Marshaler { +func (ec *executionContext) marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Scene) graphql.Marshaler { if v == nil { return graphql.Null } return ec._Scene(ctx, sel, v) } -func (ec *executionContext) unmarshalOSceneLockMode2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneLockMode(ctx context.Context, v interface{}) (*graphql1.SceneLockMode, error) { +func (ec *executionContext) unmarshalOSceneLockMode2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneLockMode(ctx context.Context, v interface{}) (*gqlmodel.SceneLockMode, error) { if v == nil { return nil, nil } - var res = new(graphql1.SceneLockMode) + var res = new(gqlmodel.SceneLockMode) err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOSceneLockMode2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneLockMode(ctx context.Context, sel ast.SelectionSet, v *graphql1.SceneLockMode) graphql.Marshaler { +func (ec *executionContext) marshalOSceneLockMode2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneLockMode(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SceneLockMode) graphql.Marshaler { if v == nil { return graphql.Null } return v } -func (ec *executionContext) marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšScenePlugin(ctx context.Context, sel ast.SelectionSet, v *graphql1.ScenePlugin) graphql.Marshaler { +func (ec *executionContext) marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ScenePlugin) graphql.Marshaler { if v == nil { return graphql.Null } return ec._ScenePlugin(ctx, sel, v) } -func (ec *executionContext) marshalOSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSceneWidget(ctx context.Context, sel ast.SelectionSet, v *graphql1.SceneWidget) graphql.Marshaler { +func (ec *executionContext) marshalOSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SceneWidget) graphql.Marshaler { if v == nil { return graphql.Null } return ec._SceneWidget(ctx, sel, v) } -func (ec *executionContext) marshalOSearchedUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSearchedUser(ctx context.Context, sel ast.SelectionSet, v *graphql1.SearchedUser) graphql.Marshaler { +func (ec *executionContext) marshalOSearchedUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSearchedUser(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SearchedUser) graphql.Marshaler { if v == nil { return graphql.Null } return ec._SearchedUser(ctx, sel, v) } -func (ec *executionContext) marshalOSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSignupPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.SignupPayload) graphql.Marshaler { +func (ec *executionContext) marshalOSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSignupPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SignupPayload) graphql.Marshaler { if v == nil { return graphql.Null } @@ -40526,46 +39829,46 @@ func (ec *executionContext) marshalOString2แš–string(ctx context.Context, sel as return graphql.MarshalString(*v) } -func (ec *executionContext) marshalOSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšSyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.SyncDatasetPayload) graphql.Marshaler { +func (ec *executionContext) marshalOSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SyncDatasetPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._SyncDatasetPayload(ctx, sel, v) } -func (ec *executionContext) marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTeam(ctx context.Context, sel ast.SelectionSet, v *graphql1.Team) graphql.Marshaler { +func (ec *executionContext) marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Team) graphql.Marshaler { if v == nil { return graphql.Null } return ec._Team(ctx, sel, v) } -func (ec *executionContext) unmarshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTextAlign(ctx context.Context, v interface{}) (*graphql1.TextAlign, error) { +func (ec *executionContext) unmarshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTextAlign(ctx context.Context, v interface{}) (*gqlmodel.TextAlign, error) { if v == nil { return nil, nil } - var res = new(graphql1.TextAlign) + var res = new(gqlmodel.TextAlign) err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTextAlign(ctx context.Context, sel ast.SelectionSet, v *graphql1.TextAlign) graphql.Marshaler { +func (ec *executionContext) marshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTextAlign(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TextAlign) graphql.Marshaler { if v == nil { return graphql.Null } return v } -func (ec *executionContext) unmarshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx context.Context, v interface{}) (*graphql1.Theme, error) { +func (ec *executionContext) unmarshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx context.Context, v interface{}) (*gqlmodel.Theme, error) { if v == nil { return nil, nil } - var res = new(graphql1.Theme) + var res = new(gqlmodel.Theme) err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšTheme(ctx context.Context, sel ast.SelectionSet, v *graphql1.Theme) graphql.Marshaler { +func (ec *executionContext) marshalOTheme2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Theme) graphql.Marshaler { if v == nil { return graphql.Null } @@ -40576,7 +39879,7 @@ func (ec *executionContext) unmarshalOTranslatedString2map(ctx context.Context, if v == nil { return nil, nil } - res, err := graphql1.UnmarshalMap(v) + res, err := gqlmodel.UnmarshalMap(v) return res, graphql.ErrorOnPath(ctx, err) } @@ -40584,14 +39887,14 @@ func (ec *executionContext) marshalOTranslatedString2map(ctx context.Context, se if v == nil { return graphql.Null } - return graphql1.MarshalMap(v) + return gqlmodel.MarshalMap(v) } func (ec *executionContext) unmarshalOURL2แš–netแš‹urlแšURL(ctx context.Context, v interface{}) (*url.URL, error) { if v == nil { return nil, nil } - res, err := graphql1.UnmarshalURL(v) + res, err := gqlmodel.UnmarshalURL(v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -40599,59 +39902,59 @@ func (ec *executionContext) marshalOURL2แš–netแš‹urlแšURL(ctx context.Context, if v == nil { return graphql.Null } - return graphql1.MarshalURL(*v) + return gqlmodel.MarshalURL(*v) } -func (ec *executionContext) marshalOUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UninstallPluginPayload) graphql.Marshaler { +func (ec *executionContext) marshalOUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UninstallPluginPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._UninstallPluginPayload(ctx, sel, v) } -func (ec *executionContext) marshalOUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateDatasetSchemaPayload) graphql.Marshaler { +func (ec *executionContext) marshalOUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateDatasetSchemaPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._UpdateDatasetSchemaPayload(ctx, sel, v) } -func (ec *executionContext) marshalOUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateLayerPayload) graphql.Marshaler { +func (ec *executionContext) marshalOUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateLayerPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._UpdateLayerPayload(ctx, sel, v) } -func (ec *executionContext) marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMePayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateMePayload) graphql.Marshaler { +func (ec *executionContext) marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMePayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateMePayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._UpdateMePayload(ctx, sel, v) } -func (ec *executionContext) marshalOUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateMemberOfTeamPayload) graphql.Marshaler { +func (ec *executionContext) marshalOUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateMemberOfTeamPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._UpdateMemberOfTeamPayload(ctx, sel, v) } -func (ec *executionContext) marshalOUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateTeamPayload) graphql.Marshaler { +func (ec *executionContext) marshalOUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateTeamPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._UpdateTeamPayload(ctx, sel, v) } -func (ec *executionContext) marshalOUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpdateWidgetPayload) graphql.Marshaler { +func (ec *executionContext) marshalOUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateWidgetPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._UpdateWidgetPayload(ctx, sel, v) } -func (ec *executionContext) marshalOUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UpgradePluginPayload) graphql.Marshaler { +func (ec *executionContext) marshalOUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpgradePluginPayload) graphql.Marshaler { if v == nil { return graphql.Null } @@ -40673,30 +39976,30 @@ func (ec *executionContext) marshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgen return graphql.MarshalUpload(*v) } -func (ec *executionContext) marshalOUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUploadPluginPayload(ctx context.Context, sel ast.SelectionSet, v *graphql1.UploadPluginPayload) graphql.Marshaler { +func (ec *executionContext) marshalOUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadPluginPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UploadPluginPayload) graphql.Marshaler { if v == nil { return graphql.Null } return ec._UploadPluginPayload(ctx, sel, v) } -func (ec *executionContext) marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšUser(ctx context.Context, sel ast.SelectionSet, v *graphql1.User) graphql.Marshaler { +func (ec *executionContext) marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.User) graphql.Marshaler { if v == nil { return graphql.Null } return ec._User(ctx, sel, v) } -func (ec *executionContext) unmarshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx context.Context, v interface{}) (*graphql1.ValueType, error) { +func (ec *executionContext) unmarshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx context.Context, v interface{}) (*gqlmodel.ValueType, error) { if v == nil { return nil, nil } - var res = new(graphql1.ValueType) + var res = new(gqlmodel.ValueType) err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹graphqlแšValueType(ctx context.Context, sel ast.SelectionSet, v *graphql1.ValueType) graphql.Marshaler { +func (ec *executionContext) marshalOValueType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ValueType) graphql.Marshaler { if v == nil { return graphql.Null } diff --git a/internal/graphql/dataloader/scenelockloader_gen.go b/internal/adapter/gql/gqldataloader/assetloader_gen.go similarity index 59% rename from internal/graphql/dataloader/scenelockloader_gen.go rename to internal/adapter/gql/gqldataloader/assetloader_gen.go index 810765b8e..805dc9542 100644 --- a/internal/graphql/dataloader/scenelockloader_gen.go +++ b/internal/adapter/gql/gqldataloader/assetloader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) -// SceneLockLoaderConfig captures the config to create a new SceneLockLoader -type SceneLockLoaderConfig struct { +// AssetLoaderConfig captures the config to create a new AssetLoader +type AssetLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.SceneID) ([]graphql.SceneLockMode, []error) + Fetch func(keys []id.AssetID) ([]*gqlmodel.Asset, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -22,19 +22,19 @@ type SceneLockLoaderConfig struct { MaxBatch int } -// NewSceneLockLoader creates a new SceneLockLoader given a fetch, wait, and maxBatch -func NewSceneLockLoader(config SceneLockLoaderConfig) *SceneLockLoader { - return &SceneLockLoader{ +// NewAssetLoader creates a new AssetLoader given a fetch, wait, and maxBatch +func NewAssetLoader(config AssetLoaderConfig) *AssetLoader { + return &AssetLoader{ fetch: config.Fetch, wait: config.Wait, maxBatch: config.MaxBatch, } } -// SceneLockLoader batches and caches requests -type SceneLockLoader struct { +// AssetLoader batches and caches requests +type AssetLoader struct { // this method provides the data for the loader - fetch func(keys []id.SceneID) ([]graphql.SceneLockMode, []error) + fetch func(keys []id.AssetID) ([]*gqlmodel.Asset, []error) // how long to done before sending a batch wait time.Duration @@ -45,51 +45,51 @@ type SceneLockLoader struct { // INTERNAL // lazily created cache - cache map[id.SceneID]graphql.SceneLockMode + cache map[id.AssetID]*gqlmodel.Asset // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners - batch *sceneLockLoaderBatch + batch *assetLoaderBatch // mutex to prevent races mu sync.Mutex } -type sceneLockLoaderBatch struct { - keys []id.SceneID - data []graphql.SceneLockMode +type assetLoaderBatch struct { + keys []id.AssetID + data []*gqlmodel.Asset error []error closing bool done chan struct{} } -// Load a SceneLockMode by key, batching and caching will be applied automatically -func (l *SceneLockLoader) Load(key id.SceneID) (graphql.SceneLockMode, error) { +// Load a Asset by key, batching and caching will be applied automatically +func (l *AssetLoader) Load(key id.AssetID) (*gqlmodel.Asset, error) { return l.LoadThunk(key)() } -// LoadThunk returns a function that when called will block waiting for a SceneLockMode. +// LoadThunk returns a function that when called will block waiting for a Asset. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *SceneLockLoader) LoadThunk(key id.SceneID) func() (graphql.SceneLockMode, error) { +func (l *AssetLoader) LoadThunk(key id.AssetID) func() (*gqlmodel.Asset, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (graphql.SceneLockMode, error) { + return func() (*gqlmodel.Asset, error) { return it, nil } } if l.batch == nil { - l.batch = &sceneLockLoaderBatch{done: make(chan struct{})} + l.batch = &assetLoaderBatch{done: make(chan struct{})} } batch := l.batch pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (graphql.SceneLockMode, error) { + return func() (*gqlmodel.Asset, error) { <-batch.done - var data graphql.SceneLockMode + var data *gqlmodel.Asset if pos < len(batch.data) { data = batch.data[pos] } @@ -114,69 +114,72 @@ func (l *SceneLockLoader) LoadThunk(key id.SceneID) func() (graphql.SceneLockMod // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *SceneLockLoader) LoadAll(keys []id.SceneID) ([]graphql.SceneLockMode, []error) { - results := make([]func() (graphql.SceneLockMode, error), len(keys)) +func (l *AssetLoader) LoadAll(keys []id.AssetID) ([]*gqlmodel.Asset, []error) { + results := make([]func() (*gqlmodel.Asset, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - sceneLockModes := make([]graphql.SceneLockMode, len(keys)) + assets := make([]*gqlmodel.Asset, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - sceneLockModes[i], errors[i] = thunk() + assets[i], errors[i] = thunk() } - return sceneLockModes, errors + return assets, errors } -// LoadAllThunk returns a function that when called will block waiting for a SceneLockModes. +// LoadAllThunk returns a function that when called will block waiting for a Assets. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *SceneLockLoader) LoadAllThunk(keys []id.SceneID) func() ([]graphql.SceneLockMode, []error) { - results := make([]func() (graphql.SceneLockMode, error), len(keys)) +func (l *AssetLoader) LoadAllThunk(keys []id.AssetID) func() ([]*gqlmodel.Asset, []error) { + results := make([]func() (*gqlmodel.Asset, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]graphql.SceneLockMode, []error) { - sceneLockModes := make([]graphql.SceneLockMode, len(keys)) + return func() ([]*gqlmodel.Asset, []error) { + assets := make([]*gqlmodel.Asset, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - sceneLockModes[i], errors[i] = thunk() + assets[i], errors[i] = thunk() } - return sceneLockModes, errors + return assets, errors } } // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *SceneLockLoader) Prime(key id.SceneID, value graphql.SceneLockMode) bool { +func (l *AssetLoader) Prime(key id.AssetID, value *gqlmodel.Asset) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { - l.unsafeSet(key, value) + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) } l.mu.Unlock() return !found } // Clear the value at key from the cache, if it exists -func (l *SceneLockLoader) Clear(key id.SceneID) { +func (l *AssetLoader) Clear(key id.AssetID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *SceneLockLoader) unsafeSet(key id.SceneID, value graphql.SceneLockMode) { +func (l *AssetLoader) unsafeSet(key id.AssetID, value *gqlmodel.Asset) { if l.cache == nil { - l.cache = map[id.SceneID]graphql.SceneLockMode{} + l.cache = map[id.AssetID]*gqlmodel.Asset{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *sceneLockLoaderBatch) keyIndex(l *SceneLockLoader, key id.SceneID) int { +func (b *assetLoaderBatch) keyIndex(l *AssetLoader, key id.AssetID) int { for i, existingKey := range b.keys { if key == existingKey { return i @@ -200,7 +203,7 @@ func (b *sceneLockLoaderBatch) keyIndex(l *SceneLockLoader, key id.SceneID) int return pos } -func (b *sceneLockLoaderBatch) startTimer(l *SceneLockLoader) { +func (b *assetLoaderBatch) startTimer(l *AssetLoader) { time.Sleep(l.wait) l.mu.Lock() @@ -216,7 +219,7 @@ func (b *sceneLockLoaderBatch) startTimer(l *SceneLockLoader) { b.end(l) } -func (b *sceneLockLoaderBatch) end(l *SceneLockLoader) { +func (b *assetLoaderBatch) end(l *AssetLoader) { b.data, b.error = l.fetch(b.keys) close(b.done) } diff --git a/internal/graphql/dataloader/dataloader.go b/internal/adapter/gql/gqldataloader/dataloader.go similarity index 61% rename from internal/graphql/dataloader/dataloader.go rename to internal/adapter/gql/gqldataloader/dataloader.go index c4ddfdc8c..8e570de61 100644 --- a/internal/graphql/dataloader/dataloader.go +++ b/internal/adapter/gql/gqldataloader/dataloader.go @@ -1,14 +1,15 @@ -package dataloader +package gqldataloader -//go:generate go run github.com/vektah/dataloaden DatasetLoader github.com/reearth/reearth-backend/pkg/id.DatasetID *github.com/reearth/reearth-backend/internal/adapter/graphql.Dataset -//go:generate go run github.com/vektah/dataloaden DatasetSchemaLoader github.com/reearth/reearth-backend/pkg/id.DatasetSchemaID *github.com/reearth/reearth-backend/internal/adapter/graphql.DatasetSchema -//go:generate go run github.com/vektah/dataloaden LayerLoader github.com/reearth/reearth-backend/pkg/id.LayerID *github.com/reearth/reearth-backend/internal/adapter/graphql.Layer -//go:generate go run github.com/vektah/dataloaden LayerGroupLoader github.com/reearth/reearth-backend/pkg/id.LayerID *github.com/reearth/reearth-backend/internal/adapter/graphql.LayerGroup -//go:generate go run github.com/vektah/dataloaden LayerItemLoader github.com/reearth/reearth-backend/pkg/id.LayerID *github.com/reearth/reearth-backend/internal/adapter/graphql.LayerItem -//go:generate go run github.com/vektah/dataloaden PluginLoader github.com/reearth/reearth-backend/pkg/id.PluginID *github.com/reearth/reearth-backend/internal/adapter/graphql.Plugin -//go:generate go run github.com/vektah/dataloaden ProjectLoader github.com/reearth/reearth-backend/pkg/id.ProjectID *github.com/reearth/reearth-backend/internal/adapter/graphql.Project -//go:generate go run github.com/vektah/dataloaden PropertyLoader github.com/reearth/reearth-backend/pkg/id.PropertyID *github.com/reearth/reearth-backend/internal/adapter/graphql.Property -//go:generate go run github.com/vektah/dataloaden PropertySchemaLoader github.com/reearth/reearth-backend/pkg/id.PropertySchemaID *github.com/reearth/reearth-backend/internal/adapter/graphql.PropertySchema -//go:generate go run github.com/vektah/dataloaden SceneLoader github.com/reearth/reearth-backend/pkg/id.SceneID *github.com/reearth/reearth-backend/internal/adapter/graphql.Scene -//go:generate go run github.com/vektah/dataloaden TeamLoader github.com/reearth/reearth-backend/pkg/id.TeamID *github.com/reearth/reearth-backend/internal/adapter/graphql.Team -//go:generate go run github.com/vektah/dataloaden UserLoader github.com/reearth/reearth-backend/pkg/id.UserID *github.com/reearth/reearth-backend/internal/adapter/graphql.User +//go:generate go run github.com/vektah/dataloaden AssetLoader github.com/reearth/reearth-backend/pkg/id.AssetID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Asset +//go:generate go run github.com/vektah/dataloaden DatasetLoader github.com/reearth/reearth-backend/pkg/id.DatasetID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Dataset +//go:generate go run github.com/vektah/dataloaden DatasetSchemaLoader github.com/reearth/reearth-backend/pkg/id.DatasetSchemaID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.DatasetSchema +//go:generate go run github.com/vektah/dataloaden LayerLoader github.com/reearth/reearth-backend/pkg/id.LayerID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Layer +//go:generate go run github.com/vektah/dataloaden LayerGroupLoader github.com/reearth/reearth-backend/pkg/id.LayerID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.LayerGroup +//go:generate go run github.com/vektah/dataloaden LayerItemLoader github.com/reearth/reearth-backend/pkg/id.LayerID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.LayerItem +//go:generate go run github.com/vektah/dataloaden PluginLoader github.com/reearth/reearth-backend/pkg/id.PluginID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Plugin +//go:generate go run github.com/vektah/dataloaden ProjectLoader github.com/reearth/reearth-backend/pkg/id.ProjectID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Project +//go:generate go run github.com/vektah/dataloaden PropertyLoader github.com/reearth/reearth-backend/pkg/id.PropertyID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Property +//go:generate go run github.com/vektah/dataloaden PropertySchemaLoader github.com/reearth/reearth-backend/pkg/id.PropertySchemaID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PropertySchema +//go:generate go run github.com/vektah/dataloaden SceneLoader github.com/reearth/reearth-backend/pkg/id.SceneID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Scene +//go:generate go run github.com/vektah/dataloaden TeamLoader github.com/reearth/reearth-backend/pkg/id.TeamID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Team +//go:generate go run github.com/vektah/dataloaden UserLoader github.com/reearth/reearth-backend/pkg/id.UserID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.User diff --git a/internal/graphql/dataloader/datasetloader_gen.go b/internal/adapter/gql/gqldataloader/datasetloader_gen.go similarity index 80% rename from internal/graphql/dataloader/datasetloader_gen.go rename to internal/adapter/gql/gqldataloader/datasetloader_gen.go index 45559750d..9b1cb1120 100644 --- a/internal/graphql/dataloader/datasetloader_gen.go +++ b/internal/adapter/gql/gqldataloader/datasetloader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) // DatasetLoaderConfig captures the config to create a new DatasetLoader type DatasetLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.DatasetID) ([]*graphql.Dataset, []error) + Fetch func(keys []id.DatasetID) ([]*gqlmodel.Dataset, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +34,7 @@ func NewDatasetLoader(config DatasetLoaderConfig) *DatasetLoader { // DatasetLoader batches and caches requests type DatasetLoader struct { // this method provides the data for the loader - fetch func(keys []id.DatasetID) ([]*graphql.Dataset, []error) + fetch func(keys []id.DatasetID) ([]*gqlmodel.Dataset, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +45,7 @@ type DatasetLoader struct { // INTERNAL // lazily created cache - cache map[id.DatasetID]*graphql.Dataset + cache map[id.DatasetID]*gqlmodel.Dataset // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -57,25 +57,25 @@ type DatasetLoader struct { type datasetLoaderBatch struct { keys []id.DatasetID - data []*graphql.Dataset + data []*gqlmodel.Dataset error []error closing bool done chan struct{} } // Load a Dataset by key, batching and caching will be applied automatically -func (l *DatasetLoader) Load(key id.DatasetID) (*graphql.Dataset, error) { +func (l *DatasetLoader) Load(key id.DatasetID) (*gqlmodel.Dataset, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Dataset. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *DatasetLoader) LoadThunk(key id.DatasetID) func() (*graphql.Dataset, error) { +func (l *DatasetLoader) LoadThunk(key id.DatasetID) func() (*gqlmodel.Dataset, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*graphql.Dataset, error) { + return func() (*gqlmodel.Dataset, error) { return it, nil } } @@ -86,10 +86,10 @@ func (l *DatasetLoader) LoadThunk(key id.DatasetID) func() (*graphql.Dataset, er pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*graphql.Dataset, error) { + return func() (*gqlmodel.Dataset, error) { <-batch.done - var data *graphql.Dataset + var data *gqlmodel.Dataset if pos < len(batch.data) { data = batch.data[pos] } @@ -114,14 +114,14 @@ func (l *DatasetLoader) LoadThunk(key id.DatasetID) func() (*graphql.Dataset, er // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *DatasetLoader) LoadAll(keys []id.DatasetID) ([]*graphql.Dataset, []error) { - results := make([]func() (*graphql.Dataset, error), len(keys)) +func (l *DatasetLoader) LoadAll(keys []id.DatasetID) ([]*gqlmodel.Dataset, []error) { + results := make([]func() (*gqlmodel.Dataset, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - datasets := make([]*graphql.Dataset, len(keys)) + datasets := make([]*gqlmodel.Dataset, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { datasets[i], errors[i] = thunk() @@ -132,13 +132,13 @@ func (l *DatasetLoader) LoadAll(keys []id.DatasetID) ([]*graphql.Dataset, []erro // LoadAllThunk returns a function that when called will block waiting for a Datasets. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *DatasetLoader) LoadAllThunk(keys []id.DatasetID) func() ([]*graphql.Dataset, []error) { - results := make([]func() (*graphql.Dataset, error), len(keys)) +func (l *DatasetLoader) LoadAllThunk(keys []id.DatasetID) func() ([]*gqlmodel.Dataset, []error) { + results := make([]func() (*gqlmodel.Dataset, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*graphql.Dataset, []error) { - datasets := make([]*graphql.Dataset, len(keys)) + return func() ([]*gqlmodel.Dataset, []error) { + datasets := make([]*gqlmodel.Dataset, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { datasets[i], errors[i] = thunk() @@ -150,7 +150,7 @@ func (l *DatasetLoader) LoadAllThunk(keys []id.DatasetID) func() ([]*graphql.Dat // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *DatasetLoader) Prime(key id.DatasetID, value *graphql.Dataset) bool { +func (l *DatasetLoader) Prime(key id.DatasetID, value *gqlmodel.Dataset) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -170,9 +170,9 @@ func (l *DatasetLoader) Clear(key id.DatasetID) { l.mu.Unlock() } -func (l *DatasetLoader) unsafeSet(key id.DatasetID, value *graphql.Dataset) { +func (l *DatasetLoader) unsafeSet(key id.DatasetID, value *gqlmodel.Dataset) { if l.cache == nil { - l.cache = map[id.DatasetID]*graphql.Dataset{} + l.cache = map[id.DatasetID]*gqlmodel.Dataset{} } l.cache[key] = value } diff --git a/internal/graphql/dataloader/datasetschemaloader_gen.go b/internal/adapter/gql/gqldataloader/datasetschemaloader_gen.go similarity index 82% rename from internal/graphql/dataloader/datasetschemaloader_gen.go rename to internal/adapter/gql/gqldataloader/datasetschemaloader_gen.go index cfe201b04..e8ca12d9a 100644 --- a/internal/graphql/dataloader/datasetschemaloader_gen.go +++ b/internal/adapter/gql/gqldataloader/datasetschemaloader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) // DatasetSchemaLoaderConfig captures the config to create a new DatasetSchemaLoader type DatasetSchemaLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) + Fetch func(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +34,7 @@ func NewDatasetSchemaLoader(config DatasetSchemaLoaderConfig) *DatasetSchemaLoad // DatasetSchemaLoader batches and caches requests type DatasetSchemaLoader struct { // this method provides the data for the loader - fetch func(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) + fetch func(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +45,7 @@ type DatasetSchemaLoader struct { // INTERNAL // lazily created cache - cache map[id.DatasetSchemaID]*graphql.DatasetSchema + cache map[id.DatasetSchemaID]*gqlmodel.DatasetSchema // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -57,25 +57,25 @@ type DatasetSchemaLoader struct { type datasetSchemaLoaderBatch struct { keys []id.DatasetSchemaID - data []*graphql.DatasetSchema + data []*gqlmodel.DatasetSchema error []error closing bool done chan struct{} } // Load a DatasetSchema by key, batching and caching will be applied automatically -func (l *DatasetSchemaLoader) Load(key id.DatasetSchemaID) (*graphql.DatasetSchema, error) { +func (l *DatasetSchemaLoader) Load(key id.DatasetSchemaID) (*gqlmodel.DatasetSchema, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a DatasetSchema. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *DatasetSchemaLoader) LoadThunk(key id.DatasetSchemaID) func() (*graphql.DatasetSchema, error) { +func (l *DatasetSchemaLoader) LoadThunk(key id.DatasetSchemaID) func() (*gqlmodel.DatasetSchema, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*graphql.DatasetSchema, error) { + return func() (*gqlmodel.DatasetSchema, error) { return it, nil } } @@ -86,10 +86,10 @@ func (l *DatasetSchemaLoader) LoadThunk(key id.DatasetSchemaID) func() (*graphql pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*graphql.DatasetSchema, error) { + return func() (*gqlmodel.DatasetSchema, error) { <-batch.done - var data *graphql.DatasetSchema + var data *gqlmodel.DatasetSchema if pos < len(batch.data) { data = batch.data[pos] } @@ -114,14 +114,14 @@ func (l *DatasetSchemaLoader) LoadThunk(key id.DatasetSchemaID) func() (*graphql // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *DatasetSchemaLoader) LoadAll(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) { - results := make([]func() (*graphql.DatasetSchema, error), len(keys)) +func (l *DatasetSchemaLoader) LoadAll(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) { + results := make([]func() (*gqlmodel.DatasetSchema, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - datasetSchemas := make([]*graphql.DatasetSchema, len(keys)) + datasetSchemas := make([]*gqlmodel.DatasetSchema, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { datasetSchemas[i], errors[i] = thunk() @@ -132,13 +132,13 @@ func (l *DatasetSchemaLoader) LoadAll(keys []id.DatasetSchemaID) ([]*graphql.Dat // LoadAllThunk returns a function that when called will block waiting for a DatasetSchemas. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *DatasetSchemaLoader) LoadAllThunk(keys []id.DatasetSchemaID) func() ([]*graphql.DatasetSchema, []error) { - results := make([]func() (*graphql.DatasetSchema, error), len(keys)) +func (l *DatasetSchemaLoader) LoadAllThunk(keys []id.DatasetSchemaID) func() ([]*gqlmodel.DatasetSchema, []error) { + results := make([]func() (*gqlmodel.DatasetSchema, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*graphql.DatasetSchema, []error) { - datasetSchemas := make([]*graphql.DatasetSchema, len(keys)) + return func() ([]*gqlmodel.DatasetSchema, []error) { + datasetSchemas := make([]*gqlmodel.DatasetSchema, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { datasetSchemas[i], errors[i] = thunk() @@ -150,7 +150,7 @@ func (l *DatasetSchemaLoader) LoadAllThunk(keys []id.DatasetSchemaID) func() ([] // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *DatasetSchemaLoader) Prime(key id.DatasetSchemaID, value *graphql.DatasetSchema) bool { +func (l *DatasetSchemaLoader) Prime(key id.DatasetSchemaID, value *gqlmodel.DatasetSchema) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -170,9 +170,9 @@ func (l *DatasetSchemaLoader) Clear(key id.DatasetSchemaID) { l.mu.Unlock() } -func (l *DatasetSchemaLoader) unsafeSet(key id.DatasetSchemaID, value *graphql.DatasetSchema) { +func (l *DatasetSchemaLoader) unsafeSet(key id.DatasetSchemaID, value *gqlmodel.DatasetSchema) { if l.cache == nil { - l.cache = map[id.DatasetSchemaID]*graphql.DatasetSchema{} + l.cache = map[id.DatasetSchemaID]*gqlmodel.DatasetSchema{} } l.cache[key] = value } diff --git a/internal/graphql/dataloader/layergrouploader_gen.go b/internal/adapter/gql/gqldataloader/layergrouploader_gen.go similarity index 79% rename from internal/graphql/dataloader/layergrouploader_gen.go rename to internal/adapter/gql/gqldataloader/layergrouploader_gen.go index 1c21aed1d..5c93dad38 100644 --- a/internal/graphql/dataloader/layergrouploader_gen.go +++ b/internal/adapter/gql/gqldataloader/layergrouploader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) // LayerGroupLoaderConfig captures the config to create a new LayerGroupLoader type LayerGroupLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.LayerID) ([]*graphql.LayerGroup, []error) + Fetch func(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +34,7 @@ func NewLayerGroupLoader(config LayerGroupLoaderConfig) *LayerGroupLoader { // LayerGroupLoader batches and caches requests type LayerGroupLoader struct { // this method provides the data for the loader - fetch func(keys []id.LayerID) ([]*graphql.LayerGroup, []error) + fetch func(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +45,7 @@ type LayerGroupLoader struct { // INTERNAL // lazily created cache - cache map[id.LayerID]*graphql.LayerGroup + cache map[id.LayerID]*gqlmodel.LayerGroup // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -57,25 +57,25 @@ type LayerGroupLoader struct { type layerGroupLoaderBatch struct { keys []id.LayerID - data []*graphql.LayerGroup + data []*gqlmodel.LayerGroup error []error closing bool done chan struct{} } // Load a LayerGroup by key, batching and caching will be applied automatically -func (l *LayerGroupLoader) Load(key id.LayerID) (*graphql.LayerGroup, error) { +func (l *LayerGroupLoader) Load(key id.LayerID) (*gqlmodel.LayerGroup, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a LayerGroup. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *LayerGroupLoader) LoadThunk(key id.LayerID) func() (*graphql.LayerGroup, error) { +func (l *LayerGroupLoader) LoadThunk(key id.LayerID) func() (*gqlmodel.LayerGroup, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*graphql.LayerGroup, error) { + return func() (*gqlmodel.LayerGroup, error) { return it, nil } } @@ -86,10 +86,10 @@ func (l *LayerGroupLoader) LoadThunk(key id.LayerID) func() (*graphql.LayerGroup pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*graphql.LayerGroup, error) { + return func() (*gqlmodel.LayerGroup, error) { <-batch.done - var data *graphql.LayerGroup + var data *gqlmodel.LayerGroup if pos < len(batch.data) { data = batch.data[pos] } @@ -114,14 +114,14 @@ func (l *LayerGroupLoader) LoadThunk(key id.LayerID) func() (*graphql.LayerGroup // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *LayerGroupLoader) LoadAll(keys []id.LayerID) ([]*graphql.LayerGroup, []error) { - results := make([]func() (*graphql.LayerGroup, error), len(keys)) +func (l *LayerGroupLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) { + results := make([]func() (*gqlmodel.LayerGroup, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - layerGroups := make([]*graphql.LayerGroup, len(keys)) + layerGroups := make([]*gqlmodel.LayerGroup, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { layerGroups[i], errors[i] = thunk() @@ -132,13 +132,13 @@ func (l *LayerGroupLoader) LoadAll(keys []id.LayerID) ([]*graphql.LayerGroup, [] // LoadAllThunk returns a function that when called will block waiting for a LayerGroups. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *LayerGroupLoader) LoadAllThunk(keys []id.LayerID) func() ([]*graphql.LayerGroup, []error) { - results := make([]func() (*graphql.LayerGroup, error), len(keys)) +func (l *LayerGroupLoader) LoadAllThunk(keys []id.LayerID) func() ([]*gqlmodel.LayerGroup, []error) { + results := make([]func() (*gqlmodel.LayerGroup, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*graphql.LayerGroup, []error) { - layerGroups := make([]*graphql.LayerGroup, len(keys)) + return func() ([]*gqlmodel.LayerGroup, []error) { + layerGroups := make([]*gqlmodel.LayerGroup, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { layerGroups[i], errors[i] = thunk() @@ -150,7 +150,7 @@ func (l *LayerGroupLoader) LoadAllThunk(keys []id.LayerID) func() ([]*graphql.La // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *LayerGroupLoader) Prime(key id.LayerID, value *graphql.LayerGroup) bool { +func (l *LayerGroupLoader) Prime(key id.LayerID, value *gqlmodel.LayerGroup) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -170,9 +170,9 @@ func (l *LayerGroupLoader) Clear(key id.LayerID) { l.mu.Unlock() } -func (l *LayerGroupLoader) unsafeSet(key id.LayerID, value *graphql.LayerGroup) { +func (l *LayerGroupLoader) unsafeSet(key id.LayerID, value *gqlmodel.LayerGroup) { if l.cache == nil { - l.cache = map[id.LayerID]*graphql.LayerGroup{} + l.cache = map[id.LayerID]*gqlmodel.LayerGroup{} } l.cache[key] = value } diff --git a/internal/graphql/dataloader/layeritemloader_gen.go b/internal/adapter/gql/gqldataloader/layeritemloader_gen.go similarity index 80% rename from internal/graphql/dataloader/layeritemloader_gen.go rename to internal/adapter/gql/gqldataloader/layeritemloader_gen.go index 3150226c6..9593fd1ed 100644 --- a/internal/graphql/dataloader/layeritemloader_gen.go +++ b/internal/adapter/gql/gqldataloader/layeritemloader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) // LayerItemLoaderConfig captures the config to create a new LayerItemLoader type LayerItemLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.LayerID) ([]*graphql.LayerItem, []error) + Fetch func(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +34,7 @@ func NewLayerItemLoader(config LayerItemLoaderConfig) *LayerItemLoader { // LayerItemLoader batches and caches requests type LayerItemLoader struct { // this method provides the data for the loader - fetch func(keys []id.LayerID) ([]*graphql.LayerItem, []error) + fetch func(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +45,7 @@ type LayerItemLoader struct { // INTERNAL // lazily created cache - cache map[id.LayerID]*graphql.LayerItem + cache map[id.LayerID]*gqlmodel.LayerItem // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -57,25 +57,25 @@ type LayerItemLoader struct { type layerItemLoaderBatch struct { keys []id.LayerID - data []*graphql.LayerItem + data []*gqlmodel.LayerItem error []error closing bool done chan struct{} } // Load a LayerItem by key, batching and caching will be applied automatically -func (l *LayerItemLoader) Load(key id.LayerID) (*graphql.LayerItem, error) { +func (l *LayerItemLoader) Load(key id.LayerID) (*gqlmodel.LayerItem, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a LayerItem. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *LayerItemLoader) LoadThunk(key id.LayerID) func() (*graphql.LayerItem, error) { +func (l *LayerItemLoader) LoadThunk(key id.LayerID) func() (*gqlmodel.LayerItem, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*graphql.LayerItem, error) { + return func() (*gqlmodel.LayerItem, error) { return it, nil } } @@ -86,10 +86,10 @@ func (l *LayerItemLoader) LoadThunk(key id.LayerID) func() (*graphql.LayerItem, pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*graphql.LayerItem, error) { + return func() (*gqlmodel.LayerItem, error) { <-batch.done - var data *graphql.LayerItem + var data *gqlmodel.LayerItem if pos < len(batch.data) { data = batch.data[pos] } @@ -114,14 +114,14 @@ func (l *LayerItemLoader) LoadThunk(key id.LayerID) func() (*graphql.LayerItem, // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *LayerItemLoader) LoadAll(keys []id.LayerID) ([]*graphql.LayerItem, []error) { - results := make([]func() (*graphql.LayerItem, error), len(keys)) +func (l *LayerItemLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) { + results := make([]func() (*gqlmodel.LayerItem, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - layerItems := make([]*graphql.LayerItem, len(keys)) + layerItems := make([]*gqlmodel.LayerItem, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { layerItems[i], errors[i] = thunk() @@ -132,13 +132,13 @@ func (l *LayerItemLoader) LoadAll(keys []id.LayerID) ([]*graphql.LayerItem, []er // LoadAllThunk returns a function that when called will block waiting for a LayerItems. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *LayerItemLoader) LoadAllThunk(keys []id.LayerID) func() ([]*graphql.LayerItem, []error) { - results := make([]func() (*graphql.LayerItem, error), len(keys)) +func (l *LayerItemLoader) LoadAllThunk(keys []id.LayerID) func() ([]*gqlmodel.LayerItem, []error) { + results := make([]func() (*gqlmodel.LayerItem, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*graphql.LayerItem, []error) { - layerItems := make([]*graphql.LayerItem, len(keys)) + return func() ([]*gqlmodel.LayerItem, []error) { + layerItems := make([]*gqlmodel.LayerItem, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { layerItems[i], errors[i] = thunk() @@ -150,7 +150,7 @@ func (l *LayerItemLoader) LoadAllThunk(keys []id.LayerID) func() ([]*graphql.Lay // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *LayerItemLoader) Prime(key id.LayerID, value *graphql.LayerItem) bool { +func (l *LayerItemLoader) Prime(key id.LayerID, value *gqlmodel.LayerItem) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -170,9 +170,9 @@ func (l *LayerItemLoader) Clear(key id.LayerID) { l.mu.Unlock() } -func (l *LayerItemLoader) unsafeSet(key id.LayerID, value *graphql.LayerItem) { +func (l *LayerItemLoader) unsafeSet(key id.LayerID, value *gqlmodel.LayerItem) { if l.cache == nil { - l.cache = map[id.LayerID]*graphql.LayerItem{} + l.cache = map[id.LayerID]*gqlmodel.LayerItem{} } l.cache[key] = value } diff --git a/internal/graphql/dataloader/layerloader_gen.go b/internal/adapter/gql/gqldataloader/layerloader_gen.go similarity index 81% rename from internal/graphql/dataloader/layerloader_gen.go rename to internal/adapter/gql/gqldataloader/layerloader_gen.go index b83418930..1dbe12af4 100644 --- a/internal/graphql/dataloader/layerloader_gen.go +++ b/internal/adapter/gql/gqldataloader/layerloader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) // LayerLoaderConfig captures the config to create a new LayerLoader type LayerLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.LayerID) ([]*graphql.Layer, []error) + Fetch func(keys []id.LayerID) ([]*gqlmodel.Layer, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +34,7 @@ func NewLayerLoader(config LayerLoaderConfig) *LayerLoader { // LayerLoader batches and caches requests type LayerLoader struct { // this method provides the data for the loader - fetch func(keys []id.LayerID) ([]*graphql.Layer, []error) + fetch func(keys []id.LayerID) ([]*gqlmodel.Layer, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +45,7 @@ type LayerLoader struct { // INTERNAL // lazily created cache - cache map[id.LayerID]*graphql.Layer + cache map[id.LayerID]*gqlmodel.Layer // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -57,25 +57,25 @@ type LayerLoader struct { type layerLoaderBatch struct { keys []id.LayerID - data []*graphql.Layer + data []*gqlmodel.Layer error []error closing bool done chan struct{} } // Load a Layer by key, batching and caching will be applied automatically -func (l *LayerLoader) Load(key id.LayerID) (*graphql.Layer, error) { +func (l *LayerLoader) Load(key id.LayerID) (*gqlmodel.Layer, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Layer. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *LayerLoader) LoadThunk(key id.LayerID) func() (*graphql.Layer, error) { +func (l *LayerLoader) LoadThunk(key id.LayerID) func() (*gqlmodel.Layer, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*graphql.Layer, error) { + return func() (*gqlmodel.Layer, error) { return it, nil } } @@ -86,10 +86,10 @@ func (l *LayerLoader) LoadThunk(key id.LayerID) func() (*graphql.Layer, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*graphql.Layer, error) { + return func() (*gqlmodel.Layer, error) { <-batch.done - var data *graphql.Layer + var data *gqlmodel.Layer if pos < len(batch.data) { data = batch.data[pos] } @@ -114,14 +114,14 @@ func (l *LayerLoader) LoadThunk(key id.LayerID) func() (*graphql.Layer, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *LayerLoader) LoadAll(keys []id.LayerID) ([]*graphql.Layer, []error) { - results := make([]func() (*graphql.Layer, error), len(keys)) +func (l *LayerLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.Layer, []error) { + results := make([]func() (*gqlmodel.Layer, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - layers := make([]*graphql.Layer, len(keys)) + layers := make([]*gqlmodel.Layer, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { layers[i], errors[i] = thunk() @@ -132,13 +132,13 @@ func (l *LayerLoader) LoadAll(keys []id.LayerID) ([]*graphql.Layer, []error) { // LoadAllThunk returns a function that when called will block waiting for a Layers. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *LayerLoader) LoadAllThunk(keys []id.LayerID) func() ([]*graphql.Layer, []error) { - results := make([]func() (*graphql.Layer, error), len(keys)) +func (l *LayerLoader) LoadAllThunk(keys []id.LayerID) func() ([]*gqlmodel.Layer, []error) { + results := make([]func() (*gqlmodel.Layer, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*graphql.Layer, []error) { - layers := make([]*graphql.Layer, len(keys)) + return func() ([]*gqlmodel.Layer, []error) { + layers := make([]*gqlmodel.Layer, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { layers[i], errors[i] = thunk() @@ -150,7 +150,7 @@ func (l *LayerLoader) LoadAllThunk(keys []id.LayerID) func() ([]*graphql.Layer, // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *LayerLoader) Prime(key id.LayerID, value *graphql.Layer) bool { +func (l *LayerLoader) Prime(key id.LayerID, value *gqlmodel.Layer) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -170,9 +170,9 @@ func (l *LayerLoader) Clear(key id.LayerID) { l.mu.Unlock() } -func (l *LayerLoader) unsafeSet(key id.LayerID, value *graphql.Layer) { +func (l *LayerLoader) unsafeSet(key id.LayerID, value *gqlmodel.Layer) { if l.cache == nil { - l.cache = map[id.LayerID]*graphql.Layer{} + l.cache = map[id.LayerID]*gqlmodel.Layer{} } l.cache[key] = value } diff --git a/internal/graphql/dataloader/pluginloader_gen.go b/internal/adapter/gql/gqldataloader/pluginloader_gen.go similarity index 80% rename from internal/graphql/dataloader/pluginloader_gen.go rename to internal/adapter/gql/gqldataloader/pluginloader_gen.go index 36d763982..b4a868a3a 100644 --- a/internal/graphql/dataloader/pluginloader_gen.go +++ b/internal/adapter/gql/gqldataloader/pluginloader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) // PluginLoaderConfig captures the config to create a new PluginLoader type PluginLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.PluginID) ([]*graphql.Plugin, []error) + Fetch func(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +34,7 @@ func NewPluginLoader(config PluginLoaderConfig) *PluginLoader { // PluginLoader batches and caches requests type PluginLoader struct { // this method provides the data for the loader - fetch func(keys []id.PluginID) ([]*graphql.Plugin, []error) + fetch func(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +45,7 @@ type PluginLoader struct { // INTERNAL // lazily created cache - cache map[id.PluginID]*graphql.Plugin + cache map[id.PluginID]*gqlmodel.Plugin // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -57,25 +57,25 @@ type PluginLoader struct { type pluginLoaderBatch struct { keys []id.PluginID - data []*graphql.Plugin + data []*gqlmodel.Plugin error []error closing bool done chan struct{} } // Load a Plugin by key, batching and caching will be applied automatically -func (l *PluginLoader) Load(key id.PluginID) (*graphql.Plugin, error) { +func (l *PluginLoader) Load(key id.PluginID) (*gqlmodel.Plugin, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Plugin. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *PluginLoader) LoadThunk(key id.PluginID) func() (*graphql.Plugin, error) { +func (l *PluginLoader) LoadThunk(key id.PluginID) func() (*gqlmodel.Plugin, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*graphql.Plugin, error) { + return func() (*gqlmodel.Plugin, error) { return it, nil } } @@ -86,10 +86,10 @@ func (l *PluginLoader) LoadThunk(key id.PluginID) func() (*graphql.Plugin, error pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*graphql.Plugin, error) { + return func() (*gqlmodel.Plugin, error) { <-batch.done - var data *graphql.Plugin + var data *gqlmodel.Plugin if pos < len(batch.data) { data = batch.data[pos] } @@ -114,14 +114,14 @@ func (l *PluginLoader) LoadThunk(key id.PluginID) func() (*graphql.Plugin, error // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *PluginLoader) LoadAll(keys []id.PluginID) ([]*graphql.Plugin, []error) { - results := make([]func() (*graphql.Plugin, error), len(keys)) +func (l *PluginLoader) LoadAll(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) { + results := make([]func() (*gqlmodel.Plugin, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - plugins := make([]*graphql.Plugin, len(keys)) + plugins := make([]*gqlmodel.Plugin, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { plugins[i], errors[i] = thunk() @@ -132,13 +132,13 @@ func (l *PluginLoader) LoadAll(keys []id.PluginID) ([]*graphql.Plugin, []error) // LoadAllThunk returns a function that when called will block waiting for a Plugins. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *PluginLoader) LoadAllThunk(keys []id.PluginID) func() ([]*graphql.Plugin, []error) { - results := make([]func() (*graphql.Plugin, error), len(keys)) +func (l *PluginLoader) LoadAllThunk(keys []id.PluginID) func() ([]*gqlmodel.Plugin, []error) { + results := make([]func() (*gqlmodel.Plugin, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*graphql.Plugin, []error) { - plugins := make([]*graphql.Plugin, len(keys)) + return func() ([]*gqlmodel.Plugin, []error) { + plugins := make([]*gqlmodel.Plugin, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { plugins[i], errors[i] = thunk() @@ -150,7 +150,7 @@ func (l *PluginLoader) LoadAllThunk(keys []id.PluginID) func() ([]*graphql.Plugi // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *PluginLoader) Prime(key id.PluginID, value *graphql.Plugin) bool { +func (l *PluginLoader) Prime(key id.PluginID, value *gqlmodel.Plugin) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -170,9 +170,9 @@ func (l *PluginLoader) Clear(key id.PluginID) { l.mu.Unlock() } -func (l *PluginLoader) unsafeSet(key id.PluginID, value *graphql.Plugin) { +func (l *PluginLoader) unsafeSet(key id.PluginID, value *gqlmodel.Plugin) { if l.cache == nil { - l.cache = map[id.PluginID]*graphql.Plugin{} + l.cache = map[id.PluginID]*gqlmodel.Plugin{} } l.cache[key] = value } diff --git a/internal/graphql/dataloader/projectloader_gen.go b/internal/adapter/gql/gqldataloader/projectloader_gen.go similarity index 80% rename from internal/graphql/dataloader/projectloader_gen.go rename to internal/adapter/gql/gqldataloader/projectloader_gen.go index 34daabef8..686d83223 100644 --- a/internal/graphql/dataloader/projectloader_gen.go +++ b/internal/adapter/gql/gqldataloader/projectloader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) // ProjectLoaderConfig captures the config to create a new ProjectLoader type ProjectLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.ProjectID) ([]*graphql.Project, []error) + Fetch func(keys []id.ProjectID) ([]*gqlmodel.Project, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +34,7 @@ func NewProjectLoader(config ProjectLoaderConfig) *ProjectLoader { // ProjectLoader batches and caches requests type ProjectLoader struct { // this method provides the data for the loader - fetch func(keys []id.ProjectID) ([]*graphql.Project, []error) + fetch func(keys []id.ProjectID) ([]*gqlmodel.Project, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +45,7 @@ type ProjectLoader struct { // INTERNAL // lazily created cache - cache map[id.ProjectID]*graphql.Project + cache map[id.ProjectID]*gqlmodel.Project // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -57,25 +57,25 @@ type ProjectLoader struct { type projectLoaderBatch struct { keys []id.ProjectID - data []*graphql.Project + data []*gqlmodel.Project error []error closing bool done chan struct{} } // Load a Project by key, batching and caching will be applied automatically -func (l *ProjectLoader) Load(key id.ProjectID) (*graphql.Project, error) { +func (l *ProjectLoader) Load(key id.ProjectID) (*gqlmodel.Project, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Project. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *ProjectLoader) LoadThunk(key id.ProjectID) func() (*graphql.Project, error) { +func (l *ProjectLoader) LoadThunk(key id.ProjectID) func() (*gqlmodel.Project, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*graphql.Project, error) { + return func() (*gqlmodel.Project, error) { return it, nil } } @@ -86,10 +86,10 @@ func (l *ProjectLoader) LoadThunk(key id.ProjectID) func() (*graphql.Project, er pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*graphql.Project, error) { + return func() (*gqlmodel.Project, error) { <-batch.done - var data *graphql.Project + var data *gqlmodel.Project if pos < len(batch.data) { data = batch.data[pos] } @@ -114,14 +114,14 @@ func (l *ProjectLoader) LoadThunk(key id.ProjectID) func() (*graphql.Project, er // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *ProjectLoader) LoadAll(keys []id.ProjectID) ([]*graphql.Project, []error) { - results := make([]func() (*graphql.Project, error), len(keys)) +func (l *ProjectLoader) LoadAll(keys []id.ProjectID) ([]*gqlmodel.Project, []error) { + results := make([]func() (*gqlmodel.Project, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - projects := make([]*graphql.Project, len(keys)) + projects := make([]*gqlmodel.Project, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { projects[i], errors[i] = thunk() @@ -132,13 +132,13 @@ func (l *ProjectLoader) LoadAll(keys []id.ProjectID) ([]*graphql.Project, []erro // LoadAllThunk returns a function that when called will block waiting for a Projects. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *ProjectLoader) LoadAllThunk(keys []id.ProjectID) func() ([]*graphql.Project, []error) { - results := make([]func() (*graphql.Project, error), len(keys)) +func (l *ProjectLoader) LoadAllThunk(keys []id.ProjectID) func() ([]*gqlmodel.Project, []error) { + results := make([]func() (*gqlmodel.Project, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*graphql.Project, []error) { - projects := make([]*graphql.Project, len(keys)) + return func() ([]*gqlmodel.Project, []error) { + projects := make([]*gqlmodel.Project, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { projects[i], errors[i] = thunk() @@ -150,7 +150,7 @@ func (l *ProjectLoader) LoadAllThunk(keys []id.ProjectID) func() ([]*graphql.Pro // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *ProjectLoader) Prime(key id.ProjectID, value *graphql.Project) bool { +func (l *ProjectLoader) Prime(key id.ProjectID, value *gqlmodel.Project) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -170,9 +170,9 @@ func (l *ProjectLoader) Clear(key id.ProjectID) { l.mu.Unlock() } -func (l *ProjectLoader) unsafeSet(key id.ProjectID, value *graphql.Project) { +func (l *ProjectLoader) unsafeSet(key id.ProjectID, value *gqlmodel.Project) { if l.cache == nil { - l.cache = map[id.ProjectID]*graphql.Project{} + l.cache = map[id.ProjectID]*gqlmodel.Project{} } l.cache[key] = value } diff --git a/internal/graphql/dataloader/propertyloader_gen.go b/internal/adapter/gql/gqldataloader/propertyloader_gen.go similarity index 81% rename from internal/graphql/dataloader/propertyloader_gen.go rename to internal/adapter/gql/gqldataloader/propertyloader_gen.go index b20777acf..bcd8b64ec 100644 --- a/internal/graphql/dataloader/propertyloader_gen.go +++ b/internal/adapter/gql/gqldataloader/propertyloader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) // PropertyLoaderConfig captures the config to create a new PropertyLoader type PropertyLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.PropertyID) ([]*graphql.Property, []error) + Fetch func(keys []id.PropertyID) ([]*gqlmodel.Property, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +34,7 @@ func NewPropertyLoader(config PropertyLoaderConfig) *PropertyLoader { // PropertyLoader batches and caches requests type PropertyLoader struct { // this method provides the data for the loader - fetch func(keys []id.PropertyID) ([]*graphql.Property, []error) + fetch func(keys []id.PropertyID) ([]*gqlmodel.Property, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +45,7 @@ type PropertyLoader struct { // INTERNAL // lazily created cache - cache map[id.PropertyID]*graphql.Property + cache map[id.PropertyID]*gqlmodel.Property // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -57,25 +57,25 @@ type PropertyLoader struct { type propertyLoaderBatch struct { keys []id.PropertyID - data []*graphql.Property + data []*gqlmodel.Property error []error closing bool done chan struct{} } // Load a Property by key, batching and caching will be applied automatically -func (l *PropertyLoader) Load(key id.PropertyID) (*graphql.Property, error) { +func (l *PropertyLoader) Load(key id.PropertyID) (*gqlmodel.Property, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Property. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *PropertyLoader) LoadThunk(key id.PropertyID) func() (*graphql.Property, error) { +func (l *PropertyLoader) LoadThunk(key id.PropertyID) func() (*gqlmodel.Property, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*graphql.Property, error) { + return func() (*gqlmodel.Property, error) { return it, nil } } @@ -86,10 +86,10 @@ func (l *PropertyLoader) LoadThunk(key id.PropertyID) func() (*graphql.Property, pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*graphql.Property, error) { + return func() (*gqlmodel.Property, error) { <-batch.done - var data *graphql.Property + var data *gqlmodel.Property if pos < len(batch.data) { data = batch.data[pos] } @@ -114,14 +114,14 @@ func (l *PropertyLoader) LoadThunk(key id.PropertyID) func() (*graphql.Property, // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *PropertyLoader) LoadAll(keys []id.PropertyID) ([]*graphql.Property, []error) { - results := make([]func() (*graphql.Property, error), len(keys)) +func (l *PropertyLoader) LoadAll(keys []id.PropertyID) ([]*gqlmodel.Property, []error) { + results := make([]func() (*gqlmodel.Property, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - propertys := make([]*graphql.Property, len(keys)) + propertys := make([]*gqlmodel.Property, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { propertys[i], errors[i] = thunk() @@ -132,13 +132,13 @@ func (l *PropertyLoader) LoadAll(keys []id.PropertyID) ([]*graphql.Property, []e // LoadAllThunk returns a function that when called will block waiting for a Propertys. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *PropertyLoader) LoadAllThunk(keys []id.PropertyID) func() ([]*graphql.Property, []error) { - results := make([]func() (*graphql.Property, error), len(keys)) +func (l *PropertyLoader) LoadAllThunk(keys []id.PropertyID) func() ([]*gqlmodel.Property, []error) { + results := make([]func() (*gqlmodel.Property, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*graphql.Property, []error) { - propertys := make([]*graphql.Property, len(keys)) + return func() ([]*gqlmodel.Property, []error) { + propertys := make([]*gqlmodel.Property, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { propertys[i], errors[i] = thunk() @@ -150,7 +150,7 @@ func (l *PropertyLoader) LoadAllThunk(keys []id.PropertyID) func() ([]*graphql.P // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *PropertyLoader) Prime(key id.PropertyID, value *graphql.Property) bool { +func (l *PropertyLoader) Prime(key id.PropertyID, value *gqlmodel.Property) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -170,9 +170,9 @@ func (l *PropertyLoader) Clear(key id.PropertyID) { l.mu.Unlock() } -func (l *PropertyLoader) unsafeSet(key id.PropertyID, value *graphql.Property) { +func (l *PropertyLoader) unsafeSet(key id.PropertyID, value *gqlmodel.Property) { if l.cache == nil { - l.cache = map[id.PropertyID]*graphql.Property{} + l.cache = map[id.PropertyID]*gqlmodel.Property{} } l.cache[key] = value } diff --git a/internal/graphql/dataloader/propertyschemaloader_gen.go b/internal/adapter/gql/gqldataloader/propertyschemaloader_gen.go similarity index 82% rename from internal/graphql/dataloader/propertyschemaloader_gen.go rename to internal/adapter/gql/gqldataloader/propertyschemaloader_gen.go index 5473685b4..f4c4379b7 100644 --- a/internal/graphql/dataloader/propertyschemaloader_gen.go +++ b/internal/adapter/gql/gqldataloader/propertyschemaloader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) // PropertySchemaLoaderConfig captures the config to create a new PropertySchemaLoader type PropertySchemaLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) + Fetch func(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +34,7 @@ func NewPropertySchemaLoader(config PropertySchemaLoaderConfig) *PropertySchemaL // PropertySchemaLoader batches and caches requests type PropertySchemaLoader struct { // this method provides the data for the loader - fetch func(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) + fetch func(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +45,7 @@ type PropertySchemaLoader struct { // INTERNAL // lazily created cache - cache map[id.PropertySchemaID]*graphql.PropertySchema + cache map[id.PropertySchemaID]*gqlmodel.PropertySchema // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -57,25 +57,25 @@ type PropertySchemaLoader struct { type propertySchemaLoaderBatch struct { keys []id.PropertySchemaID - data []*graphql.PropertySchema + data []*gqlmodel.PropertySchema error []error closing bool done chan struct{} } // Load a PropertySchema by key, batching and caching will be applied automatically -func (l *PropertySchemaLoader) Load(key id.PropertySchemaID) (*graphql.PropertySchema, error) { +func (l *PropertySchemaLoader) Load(key id.PropertySchemaID) (*gqlmodel.PropertySchema, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a PropertySchema. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *PropertySchemaLoader) LoadThunk(key id.PropertySchemaID) func() (*graphql.PropertySchema, error) { +func (l *PropertySchemaLoader) LoadThunk(key id.PropertySchemaID) func() (*gqlmodel.PropertySchema, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*graphql.PropertySchema, error) { + return func() (*gqlmodel.PropertySchema, error) { return it, nil } } @@ -86,10 +86,10 @@ func (l *PropertySchemaLoader) LoadThunk(key id.PropertySchemaID) func() (*graph pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*graphql.PropertySchema, error) { + return func() (*gqlmodel.PropertySchema, error) { <-batch.done - var data *graphql.PropertySchema + var data *gqlmodel.PropertySchema if pos < len(batch.data) { data = batch.data[pos] } @@ -114,14 +114,14 @@ func (l *PropertySchemaLoader) LoadThunk(key id.PropertySchemaID) func() (*graph // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *PropertySchemaLoader) LoadAll(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) { - results := make([]func() (*graphql.PropertySchema, error), len(keys)) +func (l *PropertySchemaLoader) LoadAll(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) { + results := make([]func() (*gqlmodel.PropertySchema, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - propertySchemas := make([]*graphql.PropertySchema, len(keys)) + propertySchemas := make([]*gqlmodel.PropertySchema, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { propertySchemas[i], errors[i] = thunk() @@ -132,13 +132,13 @@ func (l *PropertySchemaLoader) LoadAll(keys []id.PropertySchemaID) ([]*graphql.P // LoadAllThunk returns a function that when called will block waiting for a PropertySchemas. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *PropertySchemaLoader) LoadAllThunk(keys []id.PropertySchemaID) func() ([]*graphql.PropertySchema, []error) { - results := make([]func() (*graphql.PropertySchema, error), len(keys)) +func (l *PropertySchemaLoader) LoadAllThunk(keys []id.PropertySchemaID) func() ([]*gqlmodel.PropertySchema, []error) { + results := make([]func() (*gqlmodel.PropertySchema, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*graphql.PropertySchema, []error) { - propertySchemas := make([]*graphql.PropertySchema, len(keys)) + return func() ([]*gqlmodel.PropertySchema, []error) { + propertySchemas := make([]*gqlmodel.PropertySchema, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { propertySchemas[i], errors[i] = thunk() @@ -150,7 +150,7 @@ func (l *PropertySchemaLoader) LoadAllThunk(keys []id.PropertySchemaID) func() ( // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *PropertySchemaLoader) Prime(key id.PropertySchemaID, value *graphql.PropertySchema) bool { +func (l *PropertySchemaLoader) Prime(key id.PropertySchemaID, value *gqlmodel.PropertySchema) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -170,9 +170,9 @@ func (l *PropertySchemaLoader) Clear(key id.PropertySchemaID) { l.mu.Unlock() } -func (l *PropertySchemaLoader) unsafeSet(key id.PropertySchemaID, value *graphql.PropertySchema) { +func (l *PropertySchemaLoader) unsafeSet(key id.PropertySchemaID, value *gqlmodel.PropertySchema) { if l.cache == nil { - l.cache = map[id.PropertySchemaID]*graphql.PropertySchema{} + l.cache = map[id.PropertySchemaID]*gqlmodel.PropertySchema{} } l.cache[key] = value } diff --git a/internal/graphql/dataloader/sceneloader_gen.go b/internal/adapter/gql/gqldataloader/sceneloader_gen.go similarity index 81% rename from internal/graphql/dataloader/sceneloader_gen.go rename to internal/adapter/gql/gqldataloader/sceneloader_gen.go index 1996f82cf..29edcd912 100644 --- a/internal/graphql/dataloader/sceneloader_gen.go +++ b/internal/adapter/gql/gqldataloader/sceneloader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) // SceneLoaderConfig captures the config to create a new SceneLoader type SceneLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.SceneID) ([]*graphql.Scene, []error) + Fetch func(keys []id.SceneID) ([]*gqlmodel.Scene, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +34,7 @@ func NewSceneLoader(config SceneLoaderConfig) *SceneLoader { // SceneLoader batches and caches requests type SceneLoader struct { // this method provides the data for the loader - fetch func(keys []id.SceneID) ([]*graphql.Scene, []error) + fetch func(keys []id.SceneID) ([]*gqlmodel.Scene, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +45,7 @@ type SceneLoader struct { // INTERNAL // lazily created cache - cache map[id.SceneID]*graphql.Scene + cache map[id.SceneID]*gqlmodel.Scene // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -57,25 +57,25 @@ type SceneLoader struct { type sceneLoaderBatch struct { keys []id.SceneID - data []*graphql.Scene + data []*gqlmodel.Scene error []error closing bool done chan struct{} } // Load a Scene by key, batching and caching will be applied automatically -func (l *SceneLoader) Load(key id.SceneID) (*graphql.Scene, error) { +func (l *SceneLoader) Load(key id.SceneID) (*gqlmodel.Scene, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Scene. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *SceneLoader) LoadThunk(key id.SceneID) func() (*graphql.Scene, error) { +func (l *SceneLoader) LoadThunk(key id.SceneID) func() (*gqlmodel.Scene, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*graphql.Scene, error) { + return func() (*gqlmodel.Scene, error) { return it, nil } } @@ -86,10 +86,10 @@ func (l *SceneLoader) LoadThunk(key id.SceneID) func() (*graphql.Scene, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*graphql.Scene, error) { + return func() (*gqlmodel.Scene, error) { <-batch.done - var data *graphql.Scene + var data *gqlmodel.Scene if pos < len(batch.data) { data = batch.data[pos] } @@ -114,14 +114,14 @@ func (l *SceneLoader) LoadThunk(key id.SceneID) func() (*graphql.Scene, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *SceneLoader) LoadAll(keys []id.SceneID) ([]*graphql.Scene, []error) { - results := make([]func() (*graphql.Scene, error), len(keys)) +func (l *SceneLoader) LoadAll(keys []id.SceneID) ([]*gqlmodel.Scene, []error) { + results := make([]func() (*gqlmodel.Scene, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - scenes := make([]*graphql.Scene, len(keys)) + scenes := make([]*gqlmodel.Scene, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { scenes[i], errors[i] = thunk() @@ -132,13 +132,13 @@ func (l *SceneLoader) LoadAll(keys []id.SceneID) ([]*graphql.Scene, []error) { // LoadAllThunk returns a function that when called will block waiting for a Scenes. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *SceneLoader) LoadAllThunk(keys []id.SceneID) func() ([]*graphql.Scene, []error) { - results := make([]func() (*graphql.Scene, error), len(keys)) +func (l *SceneLoader) LoadAllThunk(keys []id.SceneID) func() ([]*gqlmodel.Scene, []error) { + results := make([]func() (*gqlmodel.Scene, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*graphql.Scene, []error) { - scenes := make([]*graphql.Scene, len(keys)) + return func() ([]*gqlmodel.Scene, []error) { + scenes := make([]*gqlmodel.Scene, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { scenes[i], errors[i] = thunk() @@ -150,7 +150,7 @@ func (l *SceneLoader) LoadAllThunk(keys []id.SceneID) func() ([]*graphql.Scene, // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *SceneLoader) Prime(key id.SceneID, value *graphql.Scene) bool { +func (l *SceneLoader) Prime(key id.SceneID, value *gqlmodel.Scene) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -170,9 +170,9 @@ func (l *SceneLoader) Clear(key id.SceneID) { l.mu.Unlock() } -func (l *SceneLoader) unsafeSet(key id.SceneID, value *graphql.Scene) { +func (l *SceneLoader) unsafeSet(key id.SceneID, value *gqlmodel.Scene) { if l.cache == nil { - l.cache = map[id.SceneID]*graphql.Scene{} + l.cache = map[id.SceneID]*gqlmodel.Scene{} } l.cache[key] = value } diff --git a/internal/graphql/dataloader/teamloader_gen.go b/internal/adapter/gql/gqldataloader/teamloader_gen.go similarity index 81% rename from internal/graphql/dataloader/teamloader_gen.go rename to internal/adapter/gql/gqldataloader/teamloader_gen.go index a9cfd2a67..ac7e4dd3a 100644 --- a/internal/graphql/dataloader/teamloader_gen.go +++ b/internal/adapter/gql/gqldataloader/teamloader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) // TeamLoaderConfig captures the config to create a new TeamLoader type TeamLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.TeamID) ([]*graphql.Team, []error) + Fetch func(keys []id.TeamID) ([]*gqlmodel.Team, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +34,7 @@ func NewTeamLoader(config TeamLoaderConfig) *TeamLoader { // TeamLoader batches and caches requests type TeamLoader struct { // this method provides the data for the loader - fetch func(keys []id.TeamID) ([]*graphql.Team, []error) + fetch func(keys []id.TeamID) ([]*gqlmodel.Team, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +45,7 @@ type TeamLoader struct { // INTERNAL // lazily created cache - cache map[id.TeamID]*graphql.Team + cache map[id.TeamID]*gqlmodel.Team // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -57,25 +57,25 @@ type TeamLoader struct { type teamLoaderBatch struct { keys []id.TeamID - data []*graphql.Team + data []*gqlmodel.Team error []error closing bool done chan struct{} } // Load a Team by key, batching and caching will be applied automatically -func (l *TeamLoader) Load(key id.TeamID) (*graphql.Team, error) { +func (l *TeamLoader) Load(key id.TeamID) (*gqlmodel.Team, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Team. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *TeamLoader) LoadThunk(key id.TeamID) func() (*graphql.Team, error) { +func (l *TeamLoader) LoadThunk(key id.TeamID) func() (*gqlmodel.Team, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*graphql.Team, error) { + return func() (*gqlmodel.Team, error) { return it, nil } } @@ -86,10 +86,10 @@ func (l *TeamLoader) LoadThunk(key id.TeamID) func() (*graphql.Team, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*graphql.Team, error) { + return func() (*gqlmodel.Team, error) { <-batch.done - var data *graphql.Team + var data *gqlmodel.Team if pos < len(batch.data) { data = batch.data[pos] } @@ -114,14 +114,14 @@ func (l *TeamLoader) LoadThunk(key id.TeamID) func() (*graphql.Team, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *TeamLoader) LoadAll(keys []id.TeamID) ([]*graphql.Team, []error) { - results := make([]func() (*graphql.Team, error), len(keys)) +func (l *TeamLoader) LoadAll(keys []id.TeamID) ([]*gqlmodel.Team, []error) { + results := make([]func() (*gqlmodel.Team, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - teams := make([]*graphql.Team, len(keys)) + teams := make([]*gqlmodel.Team, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { teams[i], errors[i] = thunk() @@ -132,13 +132,13 @@ func (l *TeamLoader) LoadAll(keys []id.TeamID) ([]*graphql.Team, []error) { // LoadAllThunk returns a function that when called will block waiting for a Teams. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *TeamLoader) LoadAllThunk(keys []id.TeamID) func() ([]*graphql.Team, []error) { - results := make([]func() (*graphql.Team, error), len(keys)) +func (l *TeamLoader) LoadAllThunk(keys []id.TeamID) func() ([]*gqlmodel.Team, []error) { + results := make([]func() (*gqlmodel.Team, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*graphql.Team, []error) { - teams := make([]*graphql.Team, len(keys)) + return func() ([]*gqlmodel.Team, []error) { + teams := make([]*gqlmodel.Team, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { teams[i], errors[i] = thunk() @@ -150,7 +150,7 @@ func (l *TeamLoader) LoadAllThunk(keys []id.TeamID) func() ([]*graphql.Team, []e // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *TeamLoader) Prime(key id.TeamID, value *graphql.Team) bool { +func (l *TeamLoader) Prime(key id.TeamID, value *gqlmodel.Team) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -170,9 +170,9 @@ func (l *TeamLoader) Clear(key id.TeamID) { l.mu.Unlock() } -func (l *TeamLoader) unsafeSet(key id.TeamID, value *graphql.Team) { +func (l *TeamLoader) unsafeSet(key id.TeamID, value *gqlmodel.Team) { if l.cache == nil { - l.cache = map[id.TeamID]*graphql.Team{} + l.cache = map[id.TeamID]*gqlmodel.Team{} } l.cache[key] = value } diff --git a/internal/graphql/dataloader/userloader_gen.go b/internal/adapter/gql/gqldataloader/userloader_gen.go similarity index 81% rename from internal/graphql/dataloader/userloader_gen.go rename to internal/adapter/gql/gqldataloader/userloader_gen.go index a6d1ebff6..84959213e 100644 --- a/internal/graphql/dataloader/userloader_gen.go +++ b/internal/adapter/gql/gqldataloader/userloader_gen.go @@ -1,19 +1,19 @@ // Code generated by github.com/vektah/dataloaden, DO NOT EDIT. -package dataloader +package gqldataloader import ( "sync" "time" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) // UserLoaderConfig captures the config to create a new UserLoader type UserLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.UserID) ([]*graphql.User, []error) + Fetch func(keys []id.UserID) ([]*gqlmodel.User, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +34,7 @@ func NewUserLoader(config UserLoaderConfig) *UserLoader { // UserLoader batches and caches requests type UserLoader struct { // this method provides the data for the loader - fetch func(keys []id.UserID) ([]*graphql.User, []error) + fetch func(keys []id.UserID) ([]*gqlmodel.User, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +45,7 @@ type UserLoader struct { // INTERNAL // lazily created cache - cache map[id.UserID]*graphql.User + cache map[id.UserID]*gqlmodel.User // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -57,25 +57,25 @@ type UserLoader struct { type userLoaderBatch struct { keys []id.UserID - data []*graphql.User + data []*gqlmodel.User error []error closing bool done chan struct{} } // Load a User by key, batching and caching will be applied automatically -func (l *UserLoader) Load(key id.UserID) (*graphql.User, error) { +func (l *UserLoader) Load(key id.UserID) (*gqlmodel.User, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a User. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *UserLoader) LoadThunk(key id.UserID) func() (*graphql.User, error) { +func (l *UserLoader) LoadThunk(key id.UserID) func() (*gqlmodel.User, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*graphql.User, error) { + return func() (*gqlmodel.User, error) { return it, nil } } @@ -86,10 +86,10 @@ func (l *UserLoader) LoadThunk(key id.UserID) func() (*graphql.User, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*graphql.User, error) { + return func() (*gqlmodel.User, error) { <-batch.done - var data *graphql.User + var data *gqlmodel.User if pos < len(batch.data) { data = batch.data[pos] } @@ -114,14 +114,14 @@ func (l *UserLoader) LoadThunk(key id.UserID) func() (*graphql.User, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *UserLoader) LoadAll(keys []id.UserID) ([]*graphql.User, []error) { - results := make([]func() (*graphql.User, error), len(keys)) +func (l *UserLoader) LoadAll(keys []id.UserID) ([]*gqlmodel.User, []error) { + results := make([]func() (*gqlmodel.User, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - users := make([]*graphql.User, len(keys)) + users := make([]*gqlmodel.User, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { users[i], errors[i] = thunk() @@ -132,13 +132,13 @@ func (l *UserLoader) LoadAll(keys []id.UserID) ([]*graphql.User, []error) { // LoadAllThunk returns a function that when called will block waiting for a Users. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *UserLoader) LoadAllThunk(keys []id.UserID) func() ([]*graphql.User, []error) { - results := make([]func() (*graphql.User, error), len(keys)) +func (l *UserLoader) LoadAllThunk(keys []id.UserID) func() ([]*gqlmodel.User, []error) { + results := make([]func() (*gqlmodel.User, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*graphql.User, []error) { - users := make([]*graphql.User, len(keys)) + return func() ([]*gqlmodel.User, []error) { + users := make([]*gqlmodel.User, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { users[i], errors[i] = thunk() @@ -150,7 +150,7 @@ func (l *UserLoader) LoadAllThunk(keys []id.UserID) func() ([]*graphql.User, []e // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *UserLoader) Prime(key id.UserID, value *graphql.User) bool { +func (l *UserLoader) Prime(key id.UserID, value *gqlmodel.User) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -170,9 +170,9 @@ func (l *UserLoader) Clear(key id.UserID) { l.mu.Unlock() } -func (l *UserLoader) unsafeSet(key id.UserID, value *graphql.User) { +func (l *UserLoader) unsafeSet(key id.UserID, value *gqlmodel.User) { if l.cache == nil { - l.cache = map[id.UserID]*graphql.User{} + l.cache = map[id.UserID]*gqlmodel.User{} } l.cache[key] = value } diff --git a/internal/adapter/graphql/convert.go b/internal/adapter/gql/gqlmodel/convert.go similarity index 69% rename from internal/adapter/graphql/convert.go rename to internal/adapter/gql/gqlmodel/convert.go index a38bb6219..874bf5be4 100644 --- a/internal/adapter/graphql/convert.go +++ b/internal/adapter/gql/gqlmodel/convert.go @@ -1,4 +1,4 @@ -package graphql +package gqlmodel import ( "io" @@ -10,28 +10,14 @@ import ( "github.com/reearth/reearth-backend/pkg/visualizer" ) -// func refToBool(s *bool) bool { -// if s == nil { -// return false -// } -// return *s -// } - -// func refToInt(s *int) int { -// if s == nil { -// return 0 -// } -// return *s -// } - -func refToIndex(s *int) int { - if s == nil { +func RefToIndex(i *int) int { + if i == nil { return -1 } - return *s + return *i } -func refToString(s *string) string { +func RefToString(s *string) string { if s == nil { return "" } @@ -45,7 +31,7 @@ func stringToRef(s string) *string { return &s } -func toPageInfo(p *usecase.PageInfo) *PageInfo { +func ToPageInfo(p *usecase.PageInfo) *PageInfo { if p == nil { return &PageInfo{} } @@ -57,7 +43,7 @@ func toPageInfo(p *usecase.PageInfo) *PageInfo { } } -func toVisualizer(v visualizer.Visualizer) Visualizer { +func ToVisualizer(v visualizer.Visualizer) Visualizer { switch v { case visualizer.VisualizerCesium: return VisualizerCesium @@ -65,7 +51,7 @@ func toVisualizer(v visualizer.Visualizer) Visualizer { return Visualizer("") } -func fromFile(f *graphql.Upload) *file.File { +func FromFile(f *graphql.Upload) *file.File { if f == nil { return nil } @@ -77,7 +63,7 @@ func fromFile(f *graphql.Upload) *file.File { } } -func fromListOperation(op ListOperation) interfaces.ListOperation { +func FromListOperation(op ListOperation) interfaces.ListOperation { if op == ListOperationAdd { return interfaces.ListOperationAdd } else if op == ListOperationMove { diff --git a/internal/adapter/graphql/convert_asset.go b/internal/adapter/gql/gqlmodel/convert_asset.go similarity index 84% rename from internal/adapter/graphql/convert_asset.go rename to internal/adapter/gql/gqlmodel/convert_asset.go index b2ec93df9..0f60ad769 100644 --- a/internal/adapter/graphql/convert_asset.go +++ b/internal/adapter/gql/gqlmodel/convert_asset.go @@ -1,10 +1,10 @@ -package graphql +package gqlmodel import ( "github.com/reearth/reearth-backend/pkg/asset" ) -func toAsset(a *asset.Asset) *Asset { +func ToAsset(a *asset.Asset) *Asset { if a == nil { return nil } diff --git a/internal/adapter/graphql/convert_dataset.go b/internal/adapter/gql/gqlmodel/convert_dataset.go similarity index 73% rename from internal/adapter/graphql/convert_dataset.go rename to internal/adapter/gql/gqlmodel/convert_dataset.go index b2cbc2edf..1239964fc 100644 --- a/internal/adapter/graphql/convert_dataset.go +++ b/internal/adapter/gql/gqlmodel/convert_dataset.go @@ -1,4 +1,4 @@ -package graphql +package gqlmodel import ( "net/url" @@ -7,7 +7,7 @@ import ( "github.com/reearth/reearth-backend/pkg/id" ) -func toDatasetValue(v *dataset.Value) *interface{} { +func ToDatasetValue(v *dataset.Value) *interface{} { var res interface{} if v == nil { return nil @@ -38,7 +38,7 @@ func toDatasetValue(v *dataset.Value) *interface{} { return &res } -func toDatasetValueType(t dataset.ValueType) ValueType { +func ToDatasetValueType(t dataset.ValueType) ValueType { switch t { case dataset.ValueTypeBool: return ValueTypeBool @@ -58,11 +58,11 @@ func toDatasetValueType(t dataset.ValueType) ValueType { return "" } -func toDatasetSource(ds dataset.Source) string { +func ToDatasetSource(ds dataset.Source) string { return ds.String() } -func toDatasetField(f *dataset.Field, parent *dataset.Dataset) *DatasetField { +func ToDatasetField(f *dataset.Field, parent *dataset.Dataset) *DatasetField { if f == nil || parent == nil { return nil } @@ -70,13 +70,13 @@ func toDatasetField(f *dataset.Field, parent *dataset.Dataset) *DatasetField { return &DatasetField{ SchemaID: parent.Schema().ID(), FieldID: f.Field().ID(), - Type: toDatasetValueType(f.Type()), - Value: toDatasetValue(f.Value()), - Source: toDatasetSource(f.Source()), + Type: ToDatasetValueType(f.Type()), + Value: ToDatasetValue(f.Value()), + Source: ToDatasetSource(f.Source()), } } -func toDataset(ds *dataset.Dataset) *Dataset { +func ToDataset(ds *dataset.Dataset) *Dataset { if ds == nil { return nil } @@ -84,18 +84,18 @@ func toDataset(ds *dataset.Dataset) *Dataset { dsFields := ds.Fields() fields := make([]*DatasetField, 0, len(dsFields)) for _, f := range dsFields { - fields = append(fields, toDatasetField(f, ds)) + fields = append(fields, ToDatasetField(f, ds)) } return &Dataset{ ID: ds.ID().ID(), SchemaID: ds.Schema().ID(), - Source: toDatasetSource(ds.Source()), + Source: ToDatasetSource(ds.Source()), Fields: fields, } } -func toDatasetSchema(ds *dataset.Schema) *DatasetSchema { +func ToDatasetSchema(ds *dataset.Schema) *DatasetSchema { if ds == nil { return nil } @@ -106,16 +106,16 @@ func toDatasetSchema(ds *dataset.Schema) *DatasetSchema { fields = append(fields, &DatasetSchemaField{ ID: f.ID().ID(), Name: f.Name(), - Type: toDatasetValueType(f.Type()), + Type: ToDatasetValueType(f.Type()), SchemaID: ds.ID().ID(), - Source: toDatasetSource(f.Source()), + Source: ToDatasetSource(f.Source()), RefID: f.Ref().IDRef(), }) } return &DatasetSchema{ ID: ds.ID().ID(), - Source: toDatasetSource(ds.Source()), + Source: ToDatasetSource(ds.Source()), Name: ds.Name(), SceneID: ds.Scene().ID(), RepresentativeFieldID: ds.RepresentativeField().IDRef().IDRef(), diff --git a/internal/adapter/graphql/convert_layer.go b/internal/adapter/gql/gqlmodel/convert_layer.go similarity index 72% rename from internal/adapter/graphql/convert_layer.go rename to internal/adapter/gql/gqlmodel/convert_layer.go index 3700d2905..0250ee348 100644 --- a/internal/adapter/graphql/convert_layer.go +++ b/internal/adapter/gql/gqlmodel/convert_layer.go @@ -1,4 +1,4 @@ -package graphql +package gqlmodel import ( "github.com/reearth/reearth-backend/pkg/id" @@ -6,7 +6,7 @@ import ( "github.com/reearth/reearth-backend/pkg/layer/decoding" ) -func toLayerItem(l *layer.Item, parent *id.LayerID) *LayerItem { +func ToLayerItem(l *layer.Item, parent *id.LayerID) *LayerItem { if l == nil { return nil } @@ -19,13 +19,13 @@ func toLayerItem(l *layer.Item, parent *id.LayerID) *LayerItem { PropertyID: l.Property().IDRef(), PluginID: l.Plugin(), ExtensionID: l.Extension(), - Infobox: toInfobox(l.Infobox(), l.ID(), l.Scene(), l.LinkedDataset()), + Infobox: ToInfobox(l.Infobox(), l.ID(), l.Scene(), l.LinkedDataset()), LinkedDatasetID: l.LinkedDataset().IDRef(), ParentID: parent.IDRef(), } } -func toLayerGroup(l *layer.Group, parent *id.LayerID) *LayerGroup { +func ToLayerGroup(l *layer.Group, parent *id.LayerID) *LayerGroup { if l == nil { return nil } @@ -44,7 +44,7 @@ func toLayerGroup(l *layer.Group, parent *id.LayerID) *LayerGroup { PropertyID: l.Property().IDRef(), PluginID: l.Plugin(), ExtensionID: l.Extension(), - Infobox: toInfobox(l.Infobox(), l.ID(), l.Scene(), nil), + Infobox: ToInfobox(l.Infobox(), l.ID(), l.Scene(), nil), LinkedDatasetSchemaID: l.LinkedDatasetSchema().IDRef(), LayerIds: layers, Root: l.IsRoot(), @@ -52,20 +52,20 @@ func toLayerGroup(l *layer.Group, parent *id.LayerID) *LayerGroup { } } -func toLayer(l layer.Layer, parent *id.LayerID) Layer { +func ToLayer(l layer.Layer, parent *id.LayerID) Layer { if l == nil { return nil } switch la := l.(type) { case *layer.Item: - return toLayerItem(la, parent) + return ToLayerItem(la, parent) case *layer.Group: - return toLayerGroup(la, parent) + return ToLayerGroup(la, parent) } return nil } -func toLayers(layers layer.List, parent *id.LayerID) []Layer { +func ToLayers(layers layer.List, parent *id.LayerID) []Layer { if len(layers) == 0 { return nil } @@ -75,13 +75,13 @@ func toLayers(layers layer.List, parent *id.LayerID) []Layer { if l == nil { continue } - result = append(result, toLayer(*l, parent)) + result = append(result, ToLayer(*l, parent)) } return result } -func toInfoboxField(ibf *layer.InfoboxField, parentSceneID id.SceneID, parentDatasetID *id.DatasetID) *InfoboxField { +func ToInfoboxField(ibf *layer.InfoboxField, parentSceneID id.SceneID, parentDatasetID *id.DatasetID) *InfoboxField { if ibf == nil { return nil } @@ -95,14 +95,14 @@ func toInfoboxField(ibf *layer.InfoboxField, parentSceneID id.SceneID, parentDat } } -func toInfobox(ib *layer.Infobox, parent id.LayerID, parentSceneID id.SceneID, parentDatasetID *id.DatasetID) *Infobox { +func ToInfobox(ib *layer.Infobox, parent id.LayerID, parentSceneID id.SceneID, parentDatasetID *id.DatasetID) *Infobox { if ib == nil { return nil } ibFields := ib.Fields() fields := make([]*InfoboxField, 0, len(ibFields)) for _, ibf := range ibFields { - fields = append(fields, toInfoboxField(ibf, parentSceneID, parentDatasetID)) + fields = append(fields, ToInfoboxField(ibf, parentSceneID, parentDatasetID)) } return &Infobox{ @@ -114,7 +114,7 @@ func toInfobox(ib *layer.Infobox, parent id.LayerID, parentSceneID id.SceneID, p } } -func toMergedLayer(layer *layer.Merged) *MergedLayer { +func ToMergedLayer(layer *layer.Merged) *MergedLayer { if layer == nil { return nil } @@ -123,29 +123,29 @@ func toMergedLayer(layer *layer.Merged) *MergedLayer { SceneID: layer.Scene.ID(), OriginalID: layer.Original.ID(), ParentID: layer.Parent.IDRef(), - Infobox: toMergedInfobox(layer.Infobox, layer.Scene), - Property: toMergedPropertyFromMetadata(layer.Property), + Infobox: ToMergedInfobox(layer.Infobox, layer.Scene), + Property: ToMergedPropertyFromMetadata(layer.Property), } } -func toMergedInfobox(ib *layer.MergedInfobox, sceneID id.SceneID) *MergedInfobox { +func ToMergedInfobox(ib *layer.MergedInfobox, sceneID id.SceneID) *MergedInfobox { if ib == nil { return nil } fields := make([]*MergedInfoboxField, 0, len(ib.Fields)) for _, f := range ib.Fields { - fields = append(fields, toMergedInfoboxField(f, sceneID)) + fields = append(fields, ToMergedInfoboxField(f, sceneID)) } return &MergedInfobox{ SceneID: sceneID.ID(), Fields: fields, - Property: toMergedPropertyFromMetadata(ib.Property), + Property: ToMergedPropertyFromMetadata(ib.Property), } } -func toMergedInfoboxField(ibf *layer.MergedInfoboxField, sceneID id.SceneID) *MergedInfoboxField { +func ToMergedInfoboxField(ibf *layer.MergedInfoboxField, sceneID id.SceneID) *MergedInfoboxField { if ibf == nil { return nil } @@ -155,10 +155,10 @@ func toMergedInfoboxField(ibf *layer.MergedInfoboxField, sceneID id.SceneID) *Me OriginalID: ibf.ID.ID(), PluginID: ibf.Plugin, ExtensionID: ibf.Extension, - Property: toMergedPropertyFromMetadata(ibf.Property), + Property: ToMergedPropertyFromMetadata(ibf.Property), } } -func fromLayerEncodingFormat(v LayerEncodingFormat) decoding.LayerEncodingFormat { +func FromLayerEncodingFormat(v LayerEncodingFormat) decoding.LayerEncodingFormat { switch v { case LayerEncodingFormatKml: return decoding.LayerEncodingFormatKML diff --git a/internal/adapter/graphql/convert_plugin.go b/internal/adapter/gql/gqlmodel/convert_plugin.go similarity index 85% rename from internal/adapter/graphql/convert_plugin.go rename to internal/adapter/gql/gqlmodel/convert_plugin.go index 6abfdead9..cef4045d1 100644 --- a/internal/adapter/graphql/convert_plugin.go +++ b/internal/adapter/gql/gqlmodel/convert_plugin.go @@ -1,10 +1,10 @@ -package graphql +package gqlmodel import ( "github.com/reearth/reearth-backend/pkg/plugin" ) -func toPlugin(p *plugin.Plugin) *Plugin { +func ToPlugin(p *plugin.Plugin) *Plugin { if p == nil { return nil } @@ -16,8 +16,8 @@ func toPlugin(p *plugin.Plugin) *Plugin { extensions = append(extensions, &PluginExtension{ ExtensionID: pe.ID(), PluginID: pid, - Type: toPluginExtensionType(pe.Type()), - Visualizer: toVisualizer(pe.Visualizer()), + Type: ToPluginExtensionType(pe.Type()), + Visualizer: ToVisualizer(pe.Visualizer()), Name: pe.Name().String(), Description: pe.Description().String(), Icon: pe.Icon(), @@ -42,7 +42,7 @@ func toPlugin(p *plugin.Plugin) *Plugin { } } -func toPluginExtensionType(t plugin.ExtensionType) PluginExtensionType { +func ToPluginExtensionType(t plugin.ExtensionType) PluginExtensionType { switch t { case plugin.ExtensionTypePrimitive: return PluginExtensionTypePrimitive @@ -58,7 +58,7 @@ func toPluginExtensionType(t plugin.ExtensionType) PluginExtensionType { return PluginExtensionType("") } -func toPluginMetadata(t *plugin.Metadata) (*PluginMetadata, error) { +func ToPluginMetadata(t *plugin.Metadata) (*PluginMetadata, error) { if t == nil { return nil, nil } diff --git a/internal/adapter/graphql/convert_project.go b/internal/adapter/gql/gqlmodel/convert_project.go similarity index 85% rename from internal/adapter/graphql/convert_project.go rename to internal/adapter/gql/gqlmodel/convert_project.go index 138fe8629..863c95ff9 100644 --- a/internal/adapter/graphql/convert_project.go +++ b/internal/adapter/gql/gqlmodel/convert_project.go @@ -1,4 +1,4 @@ -package graphql +package gqlmodel import ( "time" @@ -6,7 +6,7 @@ import ( "github.com/reearth/reearth-backend/pkg/project" ) -func fromPublishmentStatus(v PublishmentStatus) project.PublishmentStatus { +func FromPublishmentStatus(v PublishmentStatus) project.PublishmentStatus { switch v { case PublishmentStatusPublic: return project.PublishmentStatusPublic @@ -18,7 +18,7 @@ func fromPublishmentStatus(v PublishmentStatus) project.PublishmentStatus { return project.PublishmentStatus("") } -func toPublishmentStatus(v project.PublishmentStatus) PublishmentStatus { +func ToPublishmentStatus(v project.PublishmentStatus) PublishmentStatus { switch v { case project.PublishmentStatusPublic: return PublishmentStatusPublic @@ -30,7 +30,7 @@ func toPublishmentStatus(v project.PublishmentStatus) PublishmentStatus { return PublishmentStatus("") } -func toProject(p *project.Project) *Project { +func ToProject(p *project.Project) *Project { if p == nil { return nil } @@ -55,7 +55,7 @@ func toProject(p *project.Project) *Project { UpdatedAt: p.UpdatedAt(), Visualizer: Visualizer(p.Visualizer()), TeamID: p.Team().ID(), - PublishmentStatus: toPublishmentStatus(p.PublishmentStatus()), + PublishmentStatus: ToPublishmentStatus(p.PublishmentStatus()), PublicTitle: p.PublicTitle(), PublicDescription: p.PublicDescription(), PublicImage: p.PublicImage(), diff --git a/internal/adapter/graphql/convert_property.go b/internal/adapter/gql/gqlmodel/convert_property.go similarity index 82% rename from internal/adapter/graphql/convert_property.go rename to internal/adapter/gql/gqlmodel/convert_property.go index 3ca383c94..a69e3127c 100644 --- a/internal/adapter/graphql/convert_property.go +++ b/internal/adapter/gql/gqlmodel/convert_property.go @@ -1,4 +1,4 @@ -package graphql +package gqlmodel import ( "net/url" @@ -8,7 +8,7 @@ import ( "github.com/reearth/reearth-backend/pkg/property" ) -func toPropertyValue(v *property.Value) *interface{} { +func ToPropertyValue(v *property.Value) *interface{} { var res interface{} if v == nil { return nil @@ -51,7 +51,7 @@ func toPropertyValue(v *property.Value) *interface{} { FontSize: v2.FontSize, FontWeight: v2.FontWeight, Color: v2.Color, - TextAlign: toTextAlign(v2.TextAlign), + TextAlign: ToTextAlign(v2.TextAlign), Bold: v2.Bold, Italic: v2.Italic, Underline: v2.Underline, @@ -84,7 +84,7 @@ func toPropertyValue(v *property.Value) *interface{} { return &res } -func toTextAlign(t *property.TextAlign) *TextAlign { +func ToTextAlign(t *property.TextAlign) *TextAlign { if t == nil { return nil } @@ -106,7 +106,7 @@ func toTextAlign(t *property.TextAlign) *TextAlign { return &t3 } -func toPropertyValueType(t property.ValueType) ValueType { +func ToPropertyValueType(t property.ValueType) ValueType { switch t { case property.ValueTypeBool: return ValueTypeBool @@ -136,7 +136,7 @@ func toPropertyValueType(t property.ValueType) ValueType { return "" } -func fromPropertyValueType(t ValueType) property.ValueType { +func FromPropertyValueType(t ValueType) property.ValueType { switch t { case ValueTypeBool: return property.ValueTypeBool @@ -166,7 +166,7 @@ func fromPropertyValueType(t ValueType) property.ValueType { return "" } -func fromPropertyValueAndType(v interface{}, t ValueType) (*property.Value, bool) { +func FromPropertyValueAndType(v interface{}, t ValueType) (*property.Value, bool) { switch v2 := v.(type) { case LatLng: v = property.LatLng{ @@ -241,7 +241,7 @@ func fromPropertyValueAndType(v interface{}, t ValueType) (*property.Value, bool South: v2.South, } } - return fromPropertyValueType(t).ValueFrom(v) + return FromPropertyValueType(t).ValueFrom(v) } func fromTextAlign(t *TextAlign) *property.TextAlign { @@ -266,7 +266,7 @@ func fromTextAlign(t *TextAlign) *property.TextAlign { return &t2 } -func toPropertyField(f *property.Field, parent *property.Property, gl *property.GroupList, g *property.Group) *PropertyField { +func ToPropertyField(f *property.Field, parent *property.Property, gl *property.GroupList, g *property.Group) *PropertyField { if f == nil { return nil } @@ -275,7 +275,7 @@ func toPropertyField(f *property.Field, parent *property.Property, gl *property. if flinks := f.Links(); flinks != nil { links = make([]*PropertyFieldLink, 0, flinks.Len()) for _, l := range flinks.Links() { - links = append(links, toPropertyFieldLink(l)) + links = append(links, ToPropertyFieldLink(l)) } } @@ -285,25 +285,25 @@ func toPropertyField(f *property.Field, parent *property.Property, gl *property. ParentID: parent.ID().ID(), SchemaID: parent.Schema(), FieldID: f.Field(), - Value: toPropertyValue(f.Value()), - Type: toPropertyValueType(f.Type()), + Value: ToPropertyValue(f.Value()), + Type: ToPropertyValueType(f.Type()), Links: links, } } -func toPropertyFieldLinks(flinks *property.Links) []*PropertyFieldLink { +func ToPropertyFieldLinks(flinks *property.Links) []*PropertyFieldLink { if flinks == nil { return nil } var links []*PropertyFieldLink links = make([]*PropertyFieldLink, 0, flinks.Len()) for _, l := range flinks.Links() { - links = append(links, toPropertyFieldLink(l)) + links = append(links, ToPropertyFieldLink(l)) } return links } -func fromPropertyFieldLink(datasetSchema, ds, fields []*id.ID) *property.Links { +func FromPropertyFieldLink(datasetSchema, ds, fields []*id.ID) *property.Links { if len(datasetSchema) != len(fields) || (ds != nil && len(ds) != len(fields) && len(ds) > 1) { return nil } @@ -330,7 +330,7 @@ func fromPropertyFieldLink(datasetSchema, ds, fields []*id.ID) *property.Links { return property.NewLinks(links) } -func toPropertyFieldLink(link *property.Link) *PropertyFieldLink { +func ToPropertyFieldLink(link *property.Link) *PropertyFieldLink { return &PropertyFieldLink{ DatasetID: link.Dataset().IDRef(), DatasetSchemaID: link.DatasetSchema().ID(), @@ -338,7 +338,7 @@ func toPropertyFieldLink(link *property.Link) *PropertyFieldLink { } } -func toProperty(property *property.Property) *Property { +func ToProperty(property *property.Property) *Property { if property == nil { return nil } @@ -346,7 +346,7 @@ func toProperty(property *property.Property) *Property { pitems := property.Items() items := make([]PropertyItem, 0, len(pitems)) for _, i := range pitems { - items = append(items, toPropertyItem(i, property, nil)) + items = append(items, ToPropertyItem(i, property, nil)) } return &Property{ @@ -356,7 +356,7 @@ func toProperty(property *property.Property) *Property { } } -func toPropertySchema(propertySchema *property.Schema) *PropertySchema { +func ToPropertySchema(propertySchema *property.Schema) *PropertySchema { if propertySchema == nil { return nil } @@ -364,17 +364,17 @@ func toPropertySchema(propertySchema *property.Schema) *PropertySchema { pgroups := propertySchema.Groups() groups := make([]*PropertySchemaGroup, 0, len(pgroups)) for _, g := range pgroups { - groups = append(groups, toPropertySchemaGroup(g)) + groups = append(groups, ToPropertySchemaGroup(g)) } return &PropertySchema{ ID: propertySchema.ID(), Groups: groups, - LinkableFields: toPropertyLinkableFields(propertySchema.ID(), propertySchema.LinkableFields()), + LinkableFields: ToPropertyLinkableFields(propertySchema.ID(), propertySchema.LinkableFields()), } } -func toPropertyLinkableFields(sid id.PropertySchemaID, l property.LinkableFields) *PropertyLinkableFields { +func ToPropertyLinkableFields(sid id.PropertySchemaID, l property.LinkableFields) *PropertyLinkableFields { return &PropertyLinkableFields{ SchemaID: sid, Latlng: l.LatLng.FieldRef(), @@ -382,7 +382,7 @@ func toPropertyLinkableFields(sid id.PropertySchemaID, l property.LinkableFields } } -func toPropertySchemaField(f *property.SchemaField) *PropertySchemaField { +func ToPropertySchemaField(f *property.SchemaField) *PropertySchemaField { if f == nil { return nil } @@ -404,25 +404,25 @@ func toPropertySchemaField(f *property.SchemaField) *PropertySchemaField { return &PropertySchemaField{ FieldID: f.ID(), - Type: toPropertyValueType(f.Type()), + Type: ToPropertyValueType(f.Type()), Title: f.Title().String(), Name: f.Title().String(), // deprecated Description: f.Description().String(), Prefix: stringToRef(f.Prefix()), Suffix: stringToRef(f.Suffix()), - DefaultValue: toPropertyValue(f.DefaultValue()), - UI: toPropertySchemaFieldUI(f.UI()), + DefaultValue: ToPropertyValue(f.DefaultValue()), + UI: ToPropertySchemaFieldUI(f.UI()), Min: f.Min(), Max: f.Max(), Choices: choices, - IsAvailableIf: toPropertyConditon(f.IsAvailableIf()), + IsAvailableIf: ToPropertyConditon(f.IsAvailableIf()), AllTranslatedTitle: f.Title(), AllTranslatedName: f.Title(), // deprecated AllTranslatedDescription: f.Description(), } } -func toPropertySchemaFieldUI(ui *property.SchemaFieldUI) *PropertySchemaFieldUI { +func ToPropertySchemaFieldUI(ui *property.SchemaFieldUI) *PropertySchemaFieldUI { if ui == nil { return nil } @@ -453,7 +453,7 @@ func toPropertySchemaFieldUI(ui *property.SchemaFieldUI) *PropertySchemaFieldUI return nil } -func toMergedPropertyFromMetadata(m *property.MergedMetadata) *MergedProperty { +func ToMergedPropertyFromMetadata(m *property.MergedMetadata) *MergedProperty { if m == nil { return nil } @@ -465,13 +465,13 @@ func toMergedPropertyFromMetadata(m *property.MergedMetadata) *MergedProperty { } } -func toMergedProperty(m *property.Merged) *MergedProperty { +func ToMergedProperty(m *property.Merged) *MergedProperty { if m == nil { return nil } groups := make([]*MergedPropertyGroup, 0, len(m.Groups)) for _, f := range m.Groups { - groups = append(groups, toMergedPropertyGroup(f, m)) + groups = append(groups, ToMergedPropertyGroup(f, m)) } return &MergedProperty{ OriginalID: m.Original.IDRef(), @@ -482,17 +482,17 @@ func toMergedProperty(m *property.Merged) *MergedProperty { } } -func toMergedPropertyGroup(f *property.MergedGroup, p *property.Merged) *MergedPropertyGroup { +func ToMergedPropertyGroup(f *property.MergedGroup, p *property.Merged) *MergedPropertyGroup { if f == nil { return nil } fields := make([]*MergedPropertyField, 0, len(f.Fields)) for _, f2 := range f.Fields { - fields = append(fields, toMergedPropertyField(f2, p.Schema)) + fields = append(fields, ToMergedPropertyField(f2, p.Schema)) } groups := make([]*MergedPropertyGroup, 0, len(f.Groups)) for _, f2 := range f.Groups { - groups = append(groups, toMergedPropertyGroup(f2, p)) + groups = append(groups, ToMergedPropertyGroup(f2, p)) } return &MergedPropertyGroup{ OriginalPropertyID: p.Original.IDRef(), @@ -507,21 +507,21 @@ func toMergedPropertyGroup(f *property.MergedGroup, p *property.Merged) *MergedP } } -func toMergedPropertyField(f *property.MergedField, s id.PropertySchemaID) *MergedPropertyField { +func ToMergedPropertyField(f *property.MergedField, s id.PropertySchemaID) *MergedPropertyField { if f == nil { return nil } return &MergedPropertyField{ FieldID: f.ID, SchemaID: s, - Links: toPropertyFieldLinks(f.Links), - Value: toPropertyValue(f.Value), - Type: toPropertyValueType(f.Type), + Links: ToPropertyFieldLinks(f.Links), + Value: ToPropertyValue(f.Value), + Type: ToPropertyValueType(f.Type), Overridden: f.Overridden, } } -func toPropertySchemaGroup(g *property.SchemaGroup) *PropertySchemaGroup { +func ToPropertySchemaGroup(g *property.SchemaGroup) *PropertySchemaGroup { if g == nil { return nil } @@ -530,7 +530,7 @@ func toPropertySchemaGroup(g *property.SchemaGroup) *PropertySchemaGroup { var representativeField *PropertySchemaField representativeFieldID := g.RepresentativeFieldID() for _, f := range gfields { - f2 := toPropertySchemaField(f) + f2 := ToPropertySchemaField(f) fields = append(fields, f2) if representativeFieldID != nil && f.ID() == *representativeFieldID { representativeField = f2 @@ -546,11 +546,11 @@ func toPropertySchemaGroup(g *property.SchemaGroup) *PropertySchemaGroup { RepresentativeFieldID: representativeFieldID, RepresentativeField: representativeField, AllTranslatedTitle: g.Title(), - IsAvailableIf: toPropertyConditon(g.IsAvailableIf()), + IsAvailableIf: ToPropertyConditon(g.IsAvailableIf()), } } -func toPropertyGroup(g *property.Group, p *property.Property, gl *property.GroupList) *PropertyGroup { +func ToPropertyGroup(g *property.Group, p *property.Property, gl *property.GroupList) *PropertyGroup { if g == nil { return nil } @@ -558,7 +558,7 @@ func toPropertyGroup(g *property.Group, p *property.Property, gl *property.Group gfields := g.Fields() fields := make([]*PropertyField, 0, len(gfields)) for _, f := range gfields { - fields = append(fields, toPropertyField(f, p, gl, g)) + fields = append(fields, ToPropertyField(f, p, gl, g)) } return &PropertyGroup{ @@ -569,7 +569,7 @@ func toPropertyGroup(g *property.Group, p *property.Property, gl *property.Group } } -func toPropertyGroupList(g *property.GroupList, p *property.Property) *PropertyGroupList { +func ToPropertyGroupList(g *property.GroupList, p *property.Property) *PropertyGroupList { if g == nil { return nil } @@ -577,7 +577,7 @@ func toPropertyGroupList(g *property.GroupList, p *property.Property) *PropertyG ggroups := g.Groups() groups := make([]*PropertyGroup, 0, len(ggroups)) for _, f := range ggroups { - groups = append(groups, toPropertyGroup(f, p, g)) + groups = append(groups, ToPropertyGroup(f, p, g)) } return &PropertyGroupList{ @@ -588,37 +588,37 @@ func toPropertyGroupList(g *property.GroupList, p *property.Property) *PropertyG } } -func toPropertyItem(i property.Item, p *property.Property, pgl *property.GroupList) PropertyItem { +func ToPropertyItem(i property.Item, p *property.Property, pgl *property.GroupList) PropertyItem { if i == nil { return nil } if g := property.ToGroup(i); g != nil { - return toPropertyGroup(g, p, pgl) + return ToPropertyGroup(g, p, pgl) } else if gl := property.ToGroupList(i); gl != nil { - return toPropertyGroupList(gl, p) + return ToPropertyGroupList(gl, p) } return nil } -func toPropertyConditon(c *property.Condition) *PropertyCondition { +func ToPropertyConditon(c *property.Condition) *PropertyCondition { if c == nil { return nil } return &PropertyCondition{ FieldID: c.Field, - Value: toPropertyValue(c.Value), - Type: toPropertyValueType(c.Value.Type()), + Value: ToPropertyValue(c.Value), + Type: ToPropertyValueType(c.Value.Type()), } } -func fromPointer(schemaItem *id.PropertySchemaFieldID, item *id.ID, field *id.PropertySchemaFieldID) *property.Pointer { +func FromPointer(schemaItem *id.PropertySchemaFieldID, item *id.ID, field *id.PropertySchemaFieldID) *property.Pointer { i := id.PropertyItemIDFromRefID(item) return property.NewPointer(schemaItem, i, field) } -func toPropertyLatLng(lat, lng *float64) *property.LatLng { +func ToPropertyLatLng(lat, lng *float64) *property.LatLng { var latlng *property.LatLng if lat != nil && lng != nil { latlng2 := property.LatLng{Lat: *lat, Lng: *lng} diff --git a/internal/adapter/graphql/convert_scene.go b/internal/adapter/gql/gqlmodel/convert_scene.go similarity index 81% rename from internal/adapter/graphql/convert_scene.go rename to internal/adapter/gql/gqlmodel/convert_scene.go index 877693d4a..c99ae550a 100644 --- a/internal/adapter/graphql/convert_scene.go +++ b/internal/adapter/gql/gqlmodel/convert_scene.go @@ -1,10 +1,10 @@ -package graphql +package gqlmodel import ( "github.com/reearth/reearth-backend/pkg/scene" ) -func toSceneWidget(w *scene.Widget) *SceneWidget { +func ToSceneWidget(w *scene.Widget) *SceneWidget { if w == nil { return nil } @@ -18,7 +18,7 @@ func toSceneWidget(w *scene.Widget) *SceneWidget { } } -func toScenePlugin(sp *scene.Plugin) *ScenePlugin { +func ToScenePlugin(sp *scene.Plugin) *ScenePlugin { if sp == nil { return nil } @@ -29,7 +29,7 @@ func toScenePlugin(sp *scene.Plugin) *ScenePlugin { } } -func toScene(scene *scene.Scene) *Scene { +func ToScene(scene *scene.Scene) *Scene { if scene == nil { return nil } @@ -37,13 +37,13 @@ func toScene(scene *scene.Scene) *Scene { sceneWidgets := scene.WidgetSystem().Widgets() widgets := make([]*SceneWidget, 0, len(sceneWidgets)) for _, w := range sceneWidgets { - widgets = append(widgets, toSceneWidget(w)) + widgets = append(widgets, ToSceneWidget(w)) } scenePlugins := scene.PluginSystem().Plugins() plugins := make([]*ScenePlugin, 0, len(scenePlugins)) for _, sp := range scenePlugins { - plugins = append(plugins, toScenePlugin(sp)) + plugins = append(plugins, ToScenePlugin(sp)) } return &Scene{ @@ -59,7 +59,7 @@ func toScene(scene *scene.Scene) *Scene { } } -func toSceneLockMode(lm scene.LockMode) SceneLockMode { +func ToSceneLockMode(lm scene.LockMode) SceneLockMode { switch lm { case scene.LockModeFree: return SceneLockModeFree diff --git a/internal/adapter/graphql/convert_team.go b/internal/adapter/gql/gqlmodel/convert_team.go similarity index 90% rename from internal/adapter/graphql/convert_team.go rename to internal/adapter/gql/gqlmodel/convert_team.go index a1eb97d30..034784acc 100644 --- a/internal/adapter/graphql/convert_team.go +++ b/internal/adapter/gql/gqlmodel/convert_team.go @@ -1,10 +1,10 @@ -package graphql +package gqlmodel import ( "github.com/reearth/reearth-backend/pkg/user" ) -func toTeam(t *user.Team) *Team { +func ToTeam(t *user.Team) *Team { if t == nil { return nil } @@ -26,7 +26,7 @@ func toTeam(t *user.Team) *Team { } } -func fromRole(r Role) user.Role { +func FromRole(r Role) user.Role { switch r { case RoleReader: return user.RoleReader diff --git a/internal/adapter/graphql/convert_user.go b/internal/adapter/gql/gqlmodel/convert_user.go similarity index 81% rename from internal/adapter/graphql/convert_user.go rename to internal/adapter/gql/gqlmodel/convert_user.go index 02c80c8b1..d7ff342e0 100644 --- a/internal/adapter/graphql/convert_user.go +++ b/internal/adapter/gql/gqlmodel/convert_user.go @@ -1,15 +1,10 @@ -package graphql +package gqlmodel import ( "github.com/reearth/reearth-backend/pkg/user" ) -// ToUser _ func ToUser(user *user.User) *User { - return toUser(user) -} - -func toUser(user *user.User) *User { if user == nil { return nil } @@ -29,7 +24,7 @@ func toUser(user *user.User) *User { } } -func toSearchedUser(u *user.User) *SearchedUser { +func ToSearchedUser(u *user.User) *SearchedUser { if u == nil { return nil } @@ -40,7 +35,7 @@ func toSearchedUser(u *user.User) *SearchedUser { } } -func toTheme(t *Theme) *user.Theme { +func ToTheme(t *Theme) *user.Theme { th := user.ThemeDefault if t == nil { diff --git a/internal/adapter/graphql/models.go b/internal/adapter/gql/gqlmodel/models.go similarity index 98% rename from internal/adapter/graphql/models.go rename to internal/adapter/gql/gqlmodel/models.go index 0b2a450de..eac665f7a 100644 --- a/internal/adapter/graphql/models.go +++ b/internal/adapter/gql/gqlmodel/models.go @@ -1,4 +1,4 @@ -package graphql +package gqlmodel import ( "github.com/reearth/reearth-backend/internal/usecase" @@ -137,7 +137,7 @@ func AttachParentLayer(layers []*Layer, parent id.ID) []Layer { } func NewEmptyPageInfo() *PageInfo { - return toPageInfo(usecase.NewPageInfo(0, nil, nil, false, false)) + return ToPageInfo(usecase.NewPageInfo(0, nil, nil, false, false)) } func (d *PropertyGroup) Field(id id.PropertySchemaFieldID) *PropertyField { diff --git a/internal/adapter/graphql/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go similarity index 95% rename from internal/adapter/graphql/models_gen.go rename to internal/adapter/gql/gqlmodel/models_gen.go index 8d00ac70a..fa12a356b 100644 --- a/internal/adapter/graphql/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -1,6 +1,6 @@ // Code generated by github.com/99designs/gqlgen, DO NOT EDIT. -package graphql +package gqlmodel import ( "fmt" @@ -1077,20 +1077,6 @@ type UpdatePropertyItemOperationInput struct { NameFieldType *ValueType `json:"nameFieldType"` } -type UpdatePropertyValueCameraInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` - Lat float64 `json:"lat"` - Lng float64 `json:"lng"` - Altitude float64 `json:"altitude"` - Heading float64 `json:"heading"` - Pitch float64 `json:"pitch"` - Roll float64 `json:"roll"` - Fov float64 `json:"fov"` -} - type UpdatePropertyValueInput struct { PropertyID id.ID `json:"propertyId"` SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` @@ -1100,40 +1086,6 @@ type UpdatePropertyValueInput struct { Type ValueType `json:"type"` } -type UpdatePropertyValueLatLngHeightInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` - Lat float64 `json:"lat"` - Lng float64 `json:"lng"` - Height float64 `json:"height"` -} - -type UpdatePropertyValueLatLngInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` - Lat float64 `json:"lat"` - Lng float64 `json:"lng"` -} - -type UpdatePropertyValueTypographyInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` - FontFamily *string `json:"fontFamily"` - FontWeight *string `json:"fontWeight"` - FontSize *int `json:"fontSize"` - Color *string `json:"color"` - TextAlign *TextAlign `json:"textAlign"` - Bold *bool `json:"bold"` - Italic *bool `json:"italic"` - Underline *bool `json:"underline"` -} - type UpdateTeamInput struct { TeamID id.ID `json:"teamId"` Name string `json:"name"` @@ -1293,6 +1245,7 @@ func (e ListOperation) MarshalGQL(w io.Writer) { type NodeType string const ( + NodeTypeAsset NodeType = "ASSET" NodeTypeUser NodeType = "USER" NodeTypeTeam NodeType = "TEAM" NodeTypeProject NodeType = "PROJECT" @@ -1307,6 +1260,7 @@ const ( ) var AllNodeType = []NodeType{ + NodeTypeAsset, NodeTypeUser, NodeTypeTeam, NodeTypeProject, @@ -1322,7 +1276,7 @@ var AllNodeType = []NodeType{ func (e NodeType) IsValid() bool { switch e { - case NodeTypeUser, NodeTypeTeam, NodeTypeProject, NodeTypePlugin, NodeTypeScene, NodeTypePropertySchema, NodeTypeProperty, NodeTypeDatasetSchema, NodeTypeDataset, NodeTypeLayerGroup, NodeTypeLayerItem: + case NodeTypeAsset, NodeTypeUser, NodeTypeTeam, NodeTypeProject, NodeTypePlugin, NodeTypeScene, NodeTypePropertySchema, NodeTypeProperty, NodeTypeDatasetSchema, NodeTypeDataset, NodeTypeLayerGroup, NodeTypeLayerItem: return true } return false diff --git a/internal/adapter/graphql/scalar.go b/internal/adapter/gql/gqlmodel/scalar.go similarity index 74% rename from internal/adapter/graphql/scalar.go rename to internal/adapter/gql/gqlmodel/scalar.go index b6d355e8e..dfe15b19e 100644 --- a/internal/adapter/graphql/scalar.go +++ b/internal/adapter/gql/gqlmodel/scalar.go @@ -1,4 +1,4 @@ -package graphql +package gqlmodel import ( "encoding/json" @@ -8,14 +8,14 @@ import ( "net/url" "strconv" - graphql1 "github.com/99designs/gqlgen/graphql" + "github.com/99designs/gqlgen/graphql" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/id" "golang.org/x/text/language" ) -func MarshalURL(t url.URL) graphql1.Marshaler { - return graphql1.WriterFunc(func(w io.Writer) { +func MarshalURL(t url.URL) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { _, _ = io.WriteString(w, strconv.Quote(t.String())) }) } @@ -31,8 +31,8 @@ func UnmarshalURL(v interface{}) (url.URL, error) { return url.URL{}, errors.New("invalid URL") } -func MarshalLang(t language.Tag) graphql1.Marshaler { - return graphql1.WriterFunc(func(w io.Writer) { +func MarshalLang(t language.Tag) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { _, _ = io.WriteString(w, strconv.Quote(t.String())) }) } @@ -51,8 +51,8 @@ func UnmarshalLang(v interface{}) (language.Tag, error) { return language.Tag{}, errors.New("invalid lang") } -func MarshalID(t id.ID) graphql1.Marshaler { - return graphql1.WriterFunc(func(w io.Writer) { +func MarshalID(t id.ID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { _, _ = io.WriteString(w, strconv.Quote(t.String())) }) } @@ -64,8 +64,8 @@ func UnmarshalID(v interface{}) (id.ID, error) { return id.ID{}, errors.New("invalid ID") } -func MarshalCursor(t usecase.Cursor) graphql1.Marshaler { - return graphql1.WriterFunc(func(w io.Writer) { +func MarshalCursor(t usecase.Cursor) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { _, _ = io.WriteString(w, strconv.Quote(string(t))) }) } @@ -77,8 +77,8 @@ func UnmarshalCursor(v interface{}) (usecase.Cursor, error) { return usecase.Cursor(""), errors.New("invalid cursor") } -func MarshalPluginID(t id.PluginID) graphql1.Marshaler { - return graphql1.WriterFunc(func(w io.Writer) { +func MarshalPluginID(t id.PluginID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { _, _ = io.WriteString(w, strconv.Quote(t.String())) }) } @@ -90,8 +90,8 @@ func UnmarshalPluginID(v interface{}) (id.PluginID, error) { return id.PluginID{}, errors.New("invalid ID") } -func MarshalPluginExtensionID(t id.PluginExtensionID) graphql1.Marshaler { - return graphql1.WriterFunc(func(w io.Writer) { +func MarshalPluginExtensionID(t id.PluginExtensionID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { _, _ = io.WriteString(w, strconv.Quote(t.String())) }) } @@ -103,8 +103,8 @@ func UnmarshalPluginExtensionID(v interface{}) (id.PluginExtensionID, error) { return id.PluginExtensionID(""), errors.New("invalid ID") } -func MarshalPropertySchemaID(t id.PropertySchemaID) graphql1.Marshaler { - return graphql1.WriterFunc(func(w io.Writer) { +func MarshalPropertySchemaID(t id.PropertySchemaID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { _, _ = io.WriteString(w, strconv.Quote(t.String())) }) } @@ -116,8 +116,8 @@ func UnmarshalPropertySchemaID(v interface{}) (id.PropertySchemaID, error) { return id.PropertySchemaID{}, errors.New("invalid ID") } -func MarshalPropertySchemaFieldID(t id.PropertySchemaFieldID) graphql1.Marshaler { - return graphql1.WriterFunc(func(w io.Writer) { +func MarshalPropertySchemaFieldID(t id.PropertySchemaFieldID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { _, _ = io.WriteString(w, strconv.Quote(t.String())) }) } @@ -129,8 +129,8 @@ func UnmarshalPropertySchemaFieldID(v interface{}) (id.PropertySchemaFieldID, er return id.PropertySchemaFieldID(""), errors.New("invalid ID") } -func MarshalDatasetSchemaFieldID(t id.DatasetSchemaFieldID) graphql1.Marshaler { - return graphql1.WriterFunc(func(w io.Writer) { +func MarshalDatasetSchemaFieldID(t id.DatasetSchemaFieldID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { _, _ = io.WriteString(w, strconv.Quote(t.String())) }) } @@ -142,8 +142,8 @@ func UnmarshalDatasetSchemaFieldID(v interface{}) (id.DatasetSchemaFieldID, erro return id.NewDatasetSchemaFieldID(), errors.New("invalid ID") } -func MarshalMap(val map[string]string) graphql1.Marshaler { - return graphql1.WriterFunc(func(w io.Writer) { +func MarshalMap(val map[string]string) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { _ = json.NewEncoder(w).Encode(val) }) } diff --git a/internal/adapter/gql/loader.go b/internal/adapter/gql/loader.go new file mode 100644 index 000000000..b2951a7fc --- /dev/null +++ b/internal/adapter/gql/loader.go @@ -0,0 +1,110 @@ +package gql + +import ( + "context" + "time" + + "github.com/reearth/reearth-backend/internal/usecase/interfaces" +) + +const ( + dataLoaderWait = 1 * time.Millisecond + dataLoaderMaxBatch = 100 +) + +type Loaders struct { + usecases interfaces.Container + Asset *AssetLoader + Dataset *DatasetLoader + Layer *LayerLoader + Plugin *PluginLoader + Project *ProjectLoader + Property *PropertyLoader + Scene *SceneLoader + Team *TeamLoader + User *UserLoader +} + +type DataLoaders struct { + Asset AssetDataLoader + Dataset DatasetDataLoader + DatasetSchema DatasetSchemaDataLoader + LayerItem LayerItemDataLoader + LayerGroup LayerGroupDataLoader + Layer LayerDataLoader + Plugin PluginDataLoader + Project ProjectDataLoader + Property PropertyDataLoader + PropertySchema PropertySchemaDataLoader + Scene SceneDataLoader + Team TeamDataLoader + User UserDataLoader +} + +func NewLoaders(usecases interfaces.Container) Loaders { + return Loaders{ + usecases: usecases, + Asset: NewAssetLoader(usecases.Asset), + Dataset: NewDatasetLoader(usecases.Dataset), + Layer: NewLayerLoader(usecases.Layer), + Plugin: NewPluginLoader(usecases.Plugin), + Project: NewProjectLoader(usecases.Project), + Property: NewPropertyLoader(usecases.Property), + Scene: NewSceneLoader(usecases.Scene), + Team: NewTeamLoader(usecases.Team), + User: NewUserLoader(usecases.User), + } +} + +func (l Loaders) DataLoadersWith(ctx context.Context, enabled bool) DataLoaders { + if enabled { + return l.DataLoaders(ctx) + } + return l.OrdinaryDataLoaders(ctx) +} + +func (l Loaders) DataLoaders(ctx context.Context) DataLoaders { + return DataLoaders{ + Asset: l.Asset.DataLoader(ctx), + Dataset: l.Dataset.DataLoader(ctx), + DatasetSchema: l.Dataset.SchemaDataLoader(ctx), + LayerItem: l.Layer.ItemDataLoader(ctx), + LayerGroup: l.Layer.GroupDataLoader(ctx), + Layer: l.Layer.DataLoader(ctx), + Plugin: l.Plugin.DataLoader(ctx), + Project: l.Project.DataLoader(ctx), + Property: l.Property.DataLoader(ctx), + PropertySchema: l.Property.SchemaDataLoader(ctx), + Scene: l.Scene.DataLoader(ctx), + Team: l.Team.DataLoader(ctx), + User: l.User.DataLoader(ctx), + } +} + +func (l Loaders) OrdinaryDataLoaders(ctx context.Context) DataLoaders { + return DataLoaders{ + Asset: l.Asset.OrdinaryDataLoader(ctx), + Dataset: l.Dataset.OrdinaryDataLoader(ctx), + DatasetSchema: l.Dataset.SchemaOrdinaryDataLoader(ctx), + LayerItem: l.Layer.ItemOrdinaryDataLoader(ctx), + LayerGroup: l.Layer.GroupOrdinaryDataLoader(ctx), + Layer: l.Layer.OrdinaryDataLoader(ctx), + Plugin: l.Plugin.OrdinaryDataLoader(ctx), + Project: l.Project.OrdinaryDataLoader(ctx), + Property: l.Property.OrdinaryDataLoader(ctx), + PropertySchema: l.Property.SchemaOrdinaryDataLoader(ctx), + Scene: l.Scene.OrdinaryDataLoader(ctx), + Team: l.Team.OrdinaryDataLoader(ctx), + User: l.User.OrdinaryDataLoader(ctx), + } +} + +type dataLoadersKey struct{} + +func DataLoadersFromContext(ctx context.Context) DataLoaders { + return ctx.Value(dataLoadersKey{}).(DataLoaders) +} + +func DataLoadersKey() interface{} { + return dataLoadersKey{} +} diff --git a/internal/adapter/gql/loader_asset.go b/internal/adapter/gql/loader_asset.go new file mode 100644 index 000000000..936af5194 --- /dev/null +++ b/internal/adapter/gql/loader_asset.go @@ -0,0 +1,100 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type AssetLoader struct { + usecase interfaces.Asset +} + +func NewAssetLoader(usecase interfaces.Asset) *AssetLoader { + return &AssetLoader{usecase: usecase} +} + +func (c *AssetLoader) Fetch(ctx context.Context, ids []id.AssetID) ([]*gqlmodel.Asset, []error) { + res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + assets := make([]*gqlmodel.Asset, 0, len(res)) + for _, a := range res { + assets = append(assets, gqlmodel.ToAsset(a)) + } + + return assets, nil +} + +func (c *AssetLoader) FindByTeam(ctx context.Context, teamID id.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.AssetConnection, error) { + p := usecase.NewPagination(first, last, before, after) + assets, pi, err := c.usecase.FindByTeam(ctx, id.TeamID(teamID), p, getOperator(ctx)) + if err != nil { + return nil, err + } + + edges := make([]*gqlmodel.AssetEdge, 0, len(assets)) + nodes := make([]*gqlmodel.Asset, 0, len(assets)) + for _, a := range assets { + asset := gqlmodel.ToAsset(a) + edges = append(edges, &gqlmodel.AssetEdge{ + Node: asset, + Cursor: usecase.Cursor(asset.ID.String()), + }) + nodes = append(nodes, asset) + } + + return &gqlmodel.AssetConnection{ + Edges: edges, + Nodes: nodes, + PageInfo: gqlmodel.ToPageInfo(pi), + TotalCount: pi.TotalCount(), + }, nil +} + +// data loader + +type AssetDataLoader interface { + Load(id.AssetID) (*gqlmodel.Asset, error) + LoadAll([]id.AssetID) ([]*gqlmodel.Asset, []error) +} + +func (c *AssetLoader) DataLoader(ctx context.Context) AssetDataLoader { + return gqldataloader.NewAssetLoader(gqldataloader.AssetLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.AssetID) ([]*gqlmodel.Asset, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *AssetLoader) OrdinaryDataLoader(ctx context.Context) AssetDataLoader { + return &ordinaryAssetLoader{ctx: ctx, c: c} +} + +type ordinaryAssetLoader struct { + ctx context.Context + c *AssetLoader +} + +func (l *ordinaryAssetLoader) Load(key id.AssetID) (*gqlmodel.Asset, error) { + res, errs := l.c.Fetch(l.ctx, []id.AssetID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryAssetLoader) LoadAll(keys []id.AssetID) ([]*gqlmodel.Asset, []error) { + return l.c.Fetch(l.ctx, keys) +} diff --git a/internal/adapter/gql/loader_dataset.go b/internal/adapter/gql/loader_dataset.go new file mode 100644 index 000000000..0c7f0c2e4 --- /dev/null +++ b/internal/adapter/gql/loader_dataset.go @@ -0,0 +1,225 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type DatasetLoader struct { + usecase interfaces.Dataset +} + +func NewDatasetLoader(usecase interfaces.Dataset) *DatasetLoader { + return &DatasetLoader{usecase: usecase} +} + +func (c *DatasetLoader) Fetch(ctx context.Context, ids []id.DatasetID) ([]*gqlmodel.Dataset, []error) { + res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + datasets := make([]*gqlmodel.Dataset, 0, len(res)) + for _, d := range res { + datasets = append(datasets, gqlmodel.ToDataset(d)) + } + + return datasets, nil +} + +func (c *DatasetLoader) FetchSchema(ctx context.Context, ids []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) { + res, err := c.usecase.FetchSchema(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + schemas := make([]*gqlmodel.DatasetSchema, 0, len(res)) + for _, d := range res { + schemas = append(schemas, gqlmodel.ToDatasetSchema(d)) + } + + return schemas, nil +} + +func (c *DatasetLoader) GraphFetch(ctx context.Context, i id.DatasetID, depth int) ([]*gqlmodel.Dataset, []error) { + res, err := c.usecase.GraphFetch(ctx, i, depth, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + datasets := make([]*gqlmodel.Dataset, 0, len(res)) + for _, d := range res { + datasets = append(datasets, gqlmodel.ToDataset(d)) + } + + return datasets, nil +} + +func (c *DatasetLoader) GraphFetchSchema(ctx context.Context, i id.ID, depth int) ([]*gqlmodel.DatasetSchema, []error) { + res, err := c.usecase.GraphFetchSchema(ctx, id.DatasetSchemaID(i), depth, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + schemas := make([]*gqlmodel.DatasetSchema, 0, len(res)) + for _, d := range res { + schemas = append(schemas, gqlmodel.ToDatasetSchema(d)) + } + + return schemas, nil +} + +func (c *DatasetLoader) FindSchemaByScene(ctx context.Context, i id.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) { + res, pi, err := c.usecase.FindSchemaByScene(ctx, id.SceneID(i), usecase.NewPagination(first, last, before, after), getOperator(ctx)) + if err != nil { + return nil, err + } + + edges := make([]*gqlmodel.DatasetSchemaEdge, 0, len(res)) + nodes := make([]*gqlmodel.DatasetSchema, 0, len(res)) + for _, dataset := range res { + ds := gqlmodel.ToDatasetSchema(dataset) + edges = append(edges, &gqlmodel.DatasetSchemaEdge{ + Node: ds, + Cursor: usecase.Cursor(ds.ID.String()), + }) + nodes = append(nodes, ds) + } + + return &gqlmodel.DatasetSchemaConnection{ + Edges: edges, + Nodes: nodes, + PageInfo: gqlmodel.ToPageInfo(pi), + TotalCount: pi.TotalCount(), + }, nil +} + +func (c *DatasetLoader) FindDynamicSchemasByScene(ctx context.Context, sid id.ID) ([]*gqlmodel.DatasetSchema, error) { + res, err := c.usecase.FindDynamicSchemaByScene(ctx, id.SceneID(sid)) + if err != nil { + return nil, err + } + + dss := []*gqlmodel.DatasetSchema{} + for _, dataset := range res { + dss = append(dss, gqlmodel.ToDatasetSchema(dataset)) + } + + return dss, nil +} + +func (c *DatasetLoader) FindBySchema(ctx context.Context, dsid id.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.DatasetConnection, error) { + p := usecase.NewPagination(first, last, before, after) + res, pi, err2 := c.usecase.FindBySchema(ctx, id.DatasetSchemaID(dsid), p, getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + edges := make([]*gqlmodel.DatasetEdge, 0, len(res)) + nodes := make([]*gqlmodel.Dataset, 0, len(res)) + for _, dataset := range res { + ds := gqlmodel.ToDataset(dataset) + edges = append(edges, &gqlmodel.DatasetEdge{ + Node: ds, + Cursor: usecase.Cursor(ds.ID.String()), + }) + nodes = append(nodes, ds) + } + + conn := &gqlmodel.DatasetConnection{ + Edges: edges, + Nodes: nodes, + PageInfo: gqlmodel.ToPageInfo(pi), + TotalCount: pi.TotalCount(), + } + + return conn, nil +} + +// data loader + +type DatasetDataLoader interface { + Load(id.DatasetID) (*gqlmodel.Dataset, error) + LoadAll([]id.DatasetID) ([]*gqlmodel.Dataset, []error) +} + +func (c *DatasetLoader) DataLoader(ctx context.Context) DatasetDataLoader { + return gqldataloader.NewDatasetLoader(gqldataloader.DatasetLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.DatasetID) ([]*gqlmodel.Dataset, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *DatasetLoader) OrdinaryDataLoader(ctx context.Context) DatasetDataLoader { + return &ordinaryDatasetLoader{ctx: ctx, c: c} +} + +type ordinaryDatasetLoader struct { + ctx context.Context + c *DatasetLoader +} + +func (l *ordinaryDatasetLoader) Load(key id.DatasetID) (*gqlmodel.Dataset, error) { + res, errs := l.c.Fetch(l.ctx, []id.DatasetID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryDatasetLoader) LoadAll(keys []id.DatasetID) ([]*gqlmodel.Dataset, []error) { + return l.c.Fetch(l.ctx, keys) +} + +type DatasetSchemaDataLoader interface { + Load(id.DatasetSchemaID) (*gqlmodel.DatasetSchema, error) + LoadAll([]id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) +} + +func (c *DatasetLoader) SchemaDataLoader(ctx context.Context) DatasetSchemaDataLoader { + return gqldataloader.NewDatasetSchemaLoader(gqldataloader.DatasetSchemaLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) { + return c.FetchSchema(ctx, keys) + }, + }) +} + +func (c *DatasetLoader) SchemaOrdinaryDataLoader(ctx context.Context) DatasetSchemaDataLoader { + return &ordinaryDatasetSchemaLoader{ + fetch: func(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) { + return c.FetchSchema(ctx, keys) + }, + } +} + +type ordinaryDatasetSchemaLoader struct { + fetch func(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) +} + +func (l *ordinaryDatasetSchemaLoader) Load(key id.DatasetSchemaID) (*gqlmodel.DatasetSchema, error) { + res, errs := l.fetch([]id.DatasetSchemaID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryDatasetSchemaLoader) LoadAll(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) { + return l.fetch(keys) +} diff --git a/internal/adapter/gql/loader_layer.go b/internal/adapter/gql/loader_layer.go new file mode 100644 index 000000000..1beec499b --- /dev/null +++ b/internal/adapter/gql/loader_layer.go @@ -0,0 +1,229 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type LayerLoader struct { + usecase interfaces.Layer +} + +func NewLayerLoader(usecase interfaces.Layer) *LayerLoader { + return &LayerLoader{usecase: usecase} +} + +func (c *LayerLoader) Fetch(ctx context.Context, ids []id.LayerID) ([]*gqlmodel.Layer, []error) { + res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + layers := make([]*gqlmodel.Layer, 0, len(res)) + for _, l := range res { + if l == nil { + layers = append(layers, nil) + } else { + layer := gqlmodel.ToLayer(*l, nil) + layers = append(layers, &layer) + } + } + + return layers, nil +} + +func (c *LayerLoader) FetchGroup(ctx context.Context, ids []id.LayerID) ([]*gqlmodel.LayerGroup, []error) { + res, err := c.usecase.FetchGroup(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + layerGroups := make([]*gqlmodel.LayerGroup, 0, len(res)) + for _, l := range res { + layerGroups = append(layerGroups, gqlmodel.ToLayerGroup(l, nil)) + } + + return layerGroups, nil +} + +func (c *LayerLoader) FetchItem(ctx context.Context, ids []id.LayerID) ([]*gqlmodel.LayerItem, []error) { + res, err := c.usecase.FetchItem(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + layerItems := make([]*gqlmodel.LayerItem, 0, len(res)) + for _, l := range res { + layerItems = append(layerItems, gqlmodel.ToLayerItem(l, nil)) + } + + return layerItems, nil +} + +func (c *LayerLoader) FetchParent(ctx context.Context, lid id.LayerID) (*gqlmodel.LayerGroup, error) { + res, err := c.usecase.FetchParent(ctx, id.LayerID(lid), getOperator(ctx)) + if err != nil { + return nil, err + } + + return gqlmodel.ToLayerGroup(res, nil), nil +} + +func (c *LayerLoader) FetchByProperty(ctx context.Context, pid id.PropertyID) (gqlmodel.Layer, error) { + res, err := c.usecase.FetchByProperty(ctx, pid, getOperator(ctx)) + if err != nil { + return nil, err + } + + return gqlmodel.ToLayer(res, nil), nil +} + +func (c *LayerLoader) FetchMerged(ctx context.Context, org id.LayerID, parent *id.LayerID) (*gqlmodel.MergedLayer, error) { + res, err2 := c.usecase.FetchMerged(ctx, org, parent, getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + return gqlmodel.ToMergedLayer(res), nil +} + +func (c *LayerLoader) FetchParentAndMerged(ctx context.Context, org id.LayerID) (*gqlmodel.MergedLayer, error) { + res, err2 := c.usecase.FetchParentAndMerged(ctx, org, getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + return gqlmodel.ToMergedLayer(res), nil +} + +// data loader + +type LayerDataLoader interface { + Load(id.LayerID) (*gqlmodel.Layer, error) + LoadAll([]id.LayerID) ([]*gqlmodel.Layer, []error) +} + +func (c *LayerLoader) DataLoader(ctx context.Context) LayerDataLoader { + return gqldataloader.NewLayerLoader(gqldataloader.LayerLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.LayerID) ([]*gqlmodel.Layer, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *LayerLoader) OrdinaryDataLoader(ctx context.Context) LayerDataLoader { + return &ordinaryLayerLoader{ + fetch: func(keys []id.LayerID) ([]*gqlmodel.Layer, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryLayerLoader struct { + fetch func(keys []id.LayerID) ([]*gqlmodel.Layer, []error) +} + +func (l *ordinaryLayerLoader) Load(key id.LayerID) (*gqlmodel.Layer, error) { + res, errs := l.fetch([]id.LayerID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryLayerLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.Layer, []error) { + return l.fetch(keys) +} + +type LayerItemDataLoader interface { + Load(id.LayerID) (*gqlmodel.LayerItem, error) + LoadAll([]id.LayerID) ([]*gqlmodel.LayerItem, []error) +} + +func (c *LayerLoader) ItemDataLoader(ctx context.Context) LayerItemDataLoader { + return gqldataloader.NewLayerItemLoader(gqldataloader.LayerItemLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) { + return c.FetchItem(ctx, keys) + }, + }) +} + +func (c *LayerLoader) ItemOrdinaryDataLoader(ctx context.Context) LayerItemDataLoader { + return &ordinaryLayerItemLoader{ + fetch: func(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) { + return c.FetchItem(ctx, keys) + }, + } +} + +type ordinaryLayerItemLoader struct { + fetch func(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) +} + +func (l *ordinaryLayerItemLoader) Load(key id.LayerID) (*gqlmodel.LayerItem, error) { + res, errs := l.fetch([]id.LayerID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryLayerItemLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) { + return l.fetch(keys) +} + +type LayerGroupDataLoader interface { + Load(id.LayerID) (*gqlmodel.LayerGroup, error) + LoadAll([]id.LayerID) ([]*gqlmodel.LayerGroup, []error) +} + +func (c *LayerLoader) GroupDataLoader(ctx context.Context) LayerGroupDataLoader { + return gqldataloader.NewLayerGroupLoader(gqldataloader.LayerGroupLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) { + return c.FetchGroup(ctx, keys) + }, + }) +} + +func (c *LayerLoader) GroupOrdinaryDataLoader(ctx context.Context) LayerGroupDataLoader { + return &ordinaryLayerGroupLoader{ + fetch: func(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) { + return c.FetchGroup(ctx, keys) + }, + } +} + +type ordinaryLayerGroupLoader struct { + fetch func(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) +} + +func (l *ordinaryLayerGroupLoader) Load(key id.LayerID) (*gqlmodel.LayerGroup, error) { + res, errs := l.fetch([]id.LayerID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryLayerGroupLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) { + return l.fetch(keys) +} diff --git a/internal/adapter/gql/loader_plugin.go b/internal/adapter/gql/loader_plugin.go new file mode 100644 index 000000000..cc52dd29d --- /dev/null +++ b/internal/adapter/gql/loader_plugin.go @@ -0,0 +1,94 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type PluginLoader struct { + usecase interfaces.Plugin +} + +func NewPluginLoader(usecase interfaces.Plugin) *PluginLoader { + return &PluginLoader{usecase: usecase} +} + +func (c *PluginLoader) Fetch(ctx context.Context, ids []id.PluginID) ([]*gqlmodel.Plugin, []error) { + res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + plugins := make([]*gqlmodel.Plugin, 0, len(res)) + for _, pl := range res { + plugins = append(plugins, gqlmodel.ToPlugin(pl)) + } + + return plugins, nil +} + +func (c *PluginLoader) FetchPluginMetadata(ctx context.Context) ([]*gqlmodel.PluginMetadata, error) { + res, err := c.usecase.FetchPluginMetadata(ctx, getOperator(ctx)) + if err != nil { + return nil, err + } + + pluginMetaList := make([]*gqlmodel.PluginMetadata, 0, len(res)) + for _, md := range res { + pm, err := gqlmodel.ToPluginMetadata(md) + if err != nil { + return nil, err + } + pluginMetaList = append(pluginMetaList, pm) + } + + return pluginMetaList, nil +} + +// data loader + +type PluginDataLoader interface { + Load(id.PluginID) (*gqlmodel.Plugin, error) + LoadAll([]id.PluginID) ([]*gqlmodel.Plugin, []error) +} + +func (c *PluginLoader) DataLoader(ctx context.Context) PluginDataLoader { + return gqldataloader.NewPluginLoader(gqldataloader.PluginLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *PluginLoader) OrdinaryDataLoader(ctx context.Context) PluginDataLoader { + return &ordinaryPluginLoader{ + fetch: func(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryPluginLoader struct { + fetch func(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) +} + +func (l *ordinaryPluginLoader) Load(key id.PluginID) (*gqlmodel.Plugin, error) { + res, errs := l.fetch([]id.PluginID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryPluginLoader) LoadAll(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) { + return l.fetch(keys) +} diff --git a/internal/adapter/gql/loader_project.go b/internal/adapter/gql/loader_project.go new file mode 100644 index 000000000..b4055fb44 --- /dev/null +++ b/internal/adapter/gql/loader_project.go @@ -0,0 +1,111 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type ProjectLoader struct { + usecase interfaces.Project +} + +func NewProjectLoader(usecase interfaces.Project) *ProjectLoader { + return &ProjectLoader{usecase: usecase} +} + +func (c *ProjectLoader) Fetch(ctx context.Context, ids []id.ProjectID) ([]*gqlmodel.Project, []error) { + res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + projects := make([]*gqlmodel.Project, 0, len(res)) + for _, project := range res { + projects = append(projects, gqlmodel.ToProject(project)) + } + + return projects, nil +} + +func (c *ProjectLoader) FindByTeam(ctx context.Context, teamID id.TeamID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { + res, pi, err := c.usecase.FindByTeam(ctx, teamID, usecase.NewPagination(first, last, before, after), getOperator(ctx)) + if err != nil { + return nil, err + } + + edges := make([]*gqlmodel.ProjectEdge, 0, len(res)) + nodes := make([]*gqlmodel.Project, 0, len(res)) + for _, p := range res { + prj := gqlmodel.ToProject(p) + edges = append(edges, &gqlmodel.ProjectEdge{ + Node: prj, + Cursor: usecase.Cursor(prj.ID.String()), + }) + nodes = append(nodes, prj) + } + + return &gqlmodel.ProjectConnection{ + Edges: edges, + Nodes: nodes, + PageInfo: gqlmodel.ToPageInfo(pi), + TotalCount: pi.TotalCount(), + }, nil +} + +func (c *ProjectLoader) CheckAlias(ctx context.Context, alias string) (*gqlmodel.CheckProjectAliasPayload, error) { + ok, err := c.usecase.CheckAlias(ctx, alias) + if err != nil { + return nil, err + } + + return &gqlmodel.CheckProjectAliasPayload{Alias: alias, Available: ok}, nil +} + +// data loaders + +type ProjectDataLoader interface { + Load(id.ProjectID) (*gqlmodel.Project, error) + LoadAll([]id.ProjectID) ([]*gqlmodel.Project, []error) +} + +func (c *ProjectLoader) DataLoader(ctx context.Context) ProjectDataLoader { + return gqldataloader.NewProjectLoader(gqldataloader.ProjectLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.ProjectID) ([]*gqlmodel.Project, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *ProjectLoader) OrdinaryDataLoader(ctx context.Context) ProjectDataLoader { + return &ordinaryProjectLoader{ + fetch: func(keys []id.ProjectID) ([]*gqlmodel.Project, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryProjectLoader struct { + fetch func(keys []id.ProjectID) ([]*gqlmodel.Project, []error) +} + +func (l *ordinaryProjectLoader) Load(key id.ProjectID) (*gqlmodel.Project, error) { + res, errs := l.fetch([]id.ProjectID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryProjectLoader) LoadAll(keys []id.ProjectID) ([]*gqlmodel.Project, []error) { + return l.fetch(keys) +} diff --git a/internal/adapter/gql/loader_property.go b/internal/adapter/gql/loader_property.go new file mode 100644 index 000000000..b562a9acf --- /dev/null +++ b/internal/adapter/gql/loader_property.go @@ -0,0 +1,142 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type PropertyLoader struct { + usecase interfaces.Property +} + +func NewPropertyLoader(usecase interfaces.Property) *PropertyLoader { + return &PropertyLoader{usecase: usecase} +} + +func (c *PropertyLoader) Fetch(ctx context.Context, ids []id.PropertyID) ([]*gqlmodel.Property, []error) { + res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + properties := make([]*gqlmodel.Property, 0, len(res)) + for _, property := range res { + properties = append(properties, gqlmodel.ToProperty(property)) + } + + return properties, nil +} + +func (c *PropertyLoader) FetchSchema(ctx context.Context, ids []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) { + res, err := c.usecase.FetchSchema(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + schemas := make([]*gqlmodel.PropertySchema, 0, len(res)) + for _, propertySchema := range res { + schemas = append(schemas, gqlmodel.ToPropertySchema(propertySchema)) + } + + return schemas, nil +} + +func (c *PropertyLoader) FetchMerged(ctx context.Context, org, parent, linked *id.ID) (*gqlmodel.MergedProperty, error) { + res, err := c.usecase.FetchMerged(ctx, id.PropertyIDFromRefID(org), id.PropertyIDFromRefID(parent), id.DatasetIDFromRefID(linked), getOperator(ctx)) + + if err != nil { + return nil, err + } + + return gqlmodel.ToMergedProperty(res), nil +} + +// data loader + +type PropertyDataLoader interface { + Load(id.PropertyID) (*gqlmodel.Property, error) + LoadAll([]id.PropertyID) ([]*gqlmodel.Property, []error) +} + +func (c *PropertyLoader) DataLoader(ctx context.Context) PropertyDataLoader { + return gqldataloader.NewPropertyLoader(gqldataloader.PropertyLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.PropertyID) ([]*gqlmodel.Property, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *PropertyLoader) OrdinaryDataLoader(ctx context.Context) PropertyDataLoader { + return &ordinaryPropertyLoader{ + fetch: func(keys []id.PropertyID) ([]*gqlmodel.Property, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryPropertyLoader struct { + fetch func(keys []id.PropertyID) ([]*gqlmodel.Property, []error) +} + +func (l *ordinaryPropertyLoader) Load(key id.PropertyID) (*gqlmodel.Property, error) { + res, errs := l.fetch([]id.PropertyID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryPropertyLoader) LoadAll(keys []id.PropertyID) ([]*gqlmodel.Property, []error) { + return l.fetch(keys) +} + +type PropertySchemaDataLoader interface { + Load(id.PropertySchemaID) (*gqlmodel.PropertySchema, error) + LoadAll([]id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) +} + +func (c *PropertyLoader) SchemaDataLoader(ctx context.Context) PropertySchemaDataLoader { + return gqldataloader.NewPropertySchemaLoader(gqldataloader.PropertySchemaLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) { + return c.FetchSchema(ctx, keys) + }, + }) +} + +func (c *PropertyLoader) SchemaOrdinaryDataLoader(ctx context.Context) PropertySchemaDataLoader { + return &ordinaryPropertySchemaLoader{ + fetch: func(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) { + return c.FetchSchema(ctx, keys) + }, + } +} + +type ordinaryPropertySchemaLoader struct { + fetch func(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) +} + +func (l *ordinaryPropertySchemaLoader) Load(key id.PropertySchemaID) (*gqlmodel.PropertySchema, error) { + res, errs := l.fetch([]id.PropertySchemaID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryPropertySchemaLoader) LoadAll(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) { + return l.fetch(keys) +} diff --git a/internal/adapter/gql/loader_scene.go b/internal/adapter/gql/loader_scene.go new file mode 100644 index 000000000..4391c26a9 --- /dev/null +++ b/internal/adapter/gql/loader_scene.go @@ -0,0 +1,110 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type SceneLoader struct { + usecase interfaces.Scene +} + +func NewSceneLoader(usecase interfaces.Scene) *SceneLoader { + return &SceneLoader{usecase: usecase} +} + +func (c *SceneLoader) Fetch(ctx context.Context, ids []id.SceneID) ([]*gqlmodel.Scene, []error) { + res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + scenes := make([]*gqlmodel.Scene, 0, len(res)) + for _, scene := range res { + scenes = append(scenes, gqlmodel.ToScene(scene)) + } + return scenes, nil +} + +func (c *SceneLoader) FindByProject(ctx context.Context, projectID id.ProjectID) (*gqlmodel.Scene, error) { + res, err := c.usecase.FindByProject(ctx, projectID, getOperator(ctx)) + if err != nil { + return nil, err + } + + return gqlmodel.ToScene(res), nil +} + +func (c *SceneLoader) FetchLock(ctx context.Context, sid id.SceneID) (*gqlmodel.SceneLockMode, error) { + res, err := c.usecase.FetchLock(ctx, []id.SceneID{sid}, getOperator(ctx)) + if err != nil { + return nil, err + } + if len(res) > 0 { + return nil, nil + } + sl := gqlmodel.ToSceneLockMode(res[0]) + return &sl, nil +} + +func (c *SceneLoader) FetchLockAll(ctx context.Context, sid []id.SceneID) ([]gqlmodel.SceneLockMode, []error) { + res, err := c.usecase.FetchLock(ctx, sid, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + res2 := make([]gqlmodel.SceneLockMode, 0, len(res)) + for _, r := range res { + res2 = append(res2, gqlmodel.ToSceneLockMode(r)) + } + + return res2, nil +} + +// data loader + +type SceneDataLoader interface { + Load(id.SceneID) (*gqlmodel.Scene, error) + LoadAll([]id.SceneID) ([]*gqlmodel.Scene, []error) +} + +func (c *SceneLoader) DataLoader(ctx context.Context) SceneDataLoader { + return gqldataloader.NewSceneLoader(gqldataloader.SceneLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.SceneID) ([]*gqlmodel.Scene, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *SceneLoader) OrdinaryDataLoader(ctx context.Context) SceneDataLoader { + return &ordinarySceneLoader{ + fetch: func(keys []id.SceneID) ([]*gqlmodel.Scene, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinarySceneLoader struct { + fetch func(keys []id.SceneID) ([]*gqlmodel.Scene, []error) +} + +func (l *ordinarySceneLoader) Load(key id.SceneID) (*gqlmodel.Scene, error) { + res, errs := l.fetch([]id.SceneID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinarySceneLoader) LoadAll(keys []id.SceneID) ([]*gqlmodel.Scene, []error) { + return l.fetch(keys) +} diff --git a/internal/adapter/gql/loader_team.go b/internal/adapter/gql/loader_team.go new file mode 100644 index 000000000..0e041df98 --- /dev/null +++ b/internal/adapter/gql/loader_team.go @@ -0,0 +1,87 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type TeamLoader struct { + usecase interfaces.Team +} + +func NewTeamLoader(usecase interfaces.Team) *TeamLoader { + return &TeamLoader{usecase: usecase} +} + +func (c *TeamLoader) Fetch(ctx context.Context, ids []id.TeamID) ([]*gqlmodel.Team, []error) { + res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + teams := make([]*gqlmodel.Team, 0, len(res)) + for _, t := range res { + teams = append(teams, gqlmodel.ToTeam(t)) + } + return teams, nil +} + +func (c *TeamLoader) FindByUser(ctx context.Context, uid id.UserID) ([]*gqlmodel.Team, error) { + res, err := c.usecase.FindByUser(ctx, uid, getOperator(ctx)) + if err != nil { + return nil, err + } + teams := make([]*gqlmodel.Team, 0, len(res)) + for _, t := range res { + teams = append(teams, gqlmodel.ToTeam(t)) + } + return teams, nil +} + +// data loader + +type TeamDataLoader interface { + Load(id.TeamID) (*gqlmodel.Team, error) + LoadAll([]id.TeamID) ([]*gqlmodel.Team, []error) +} + +func (c *TeamLoader) DataLoader(ctx context.Context) TeamDataLoader { + return gqldataloader.NewTeamLoader(gqldataloader.TeamLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.TeamID) ([]*gqlmodel.Team, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *TeamLoader) OrdinaryDataLoader(ctx context.Context) TeamDataLoader { + return &ordinaryTeamLoader{ + fetch: func(keys []id.TeamID) ([]*gqlmodel.Team, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryTeamLoader struct { + fetch func(keys []id.TeamID) ([]*gqlmodel.Team, []error) +} + +func (l *ordinaryTeamLoader) Load(key id.TeamID) (*gqlmodel.Team, error) { + res, errs := l.fetch([]id.TeamID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryTeamLoader) LoadAll(keys []id.TeamID) ([]*gqlmodel.Team, []error) { + return l.fetch(keys) +} diff --git a/internal/adapter/gql/loader_user.go b/internal/adapter/gql/loader_user.go new file mode 100644 index 000000000..a18bf4be3 --- /dev/null +++ b/internal/adapter/gql/loader_user.go @@ -0,0 +1,85 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type UserLoader struct { + usecase interfaces.User +} + +func NewUserLoader(usecase interfaces.User) *UserLoader { + return &UserLoader{usecase: usecase} +} + +func (c *UserLoader) Fetch(ctx context.Context, ids []id.UserID) ([]*gqlmodel.User, []error) { + res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + users := make([]*gqlmodel.User, 0, len(res)) + for _, u := range res { + users = append(users, gqlmodel.ToUser(u)) + } + + return users, nil +} + +func (c *UserLoader) SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.SearchedUser, error) { + res, err := c.usecase.SearchUser(ctx, nameOrEmail, getOperator(ctx)) + if err != nil { + return nil, err + } + + return gqlmodel.ToSearchedUser(res), nil +} + +// data loader + +type UserDataLoader interface { + Load(id.UserID) (*gqlmodel.User, error) + LoadAll([]id.UserID) ([]*gqlmodel.User, []error) +} + +func (c *UserLoader) DataLoader(ctx context.Context) UserDataLoader { + return gqldataloader.NewUserLoader(gqldataloader.UserLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.UserID) ([]*gqlmodel.User, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *UserLoader) OrdinaryDataLoader(ctx context.Context) UserDataLoader { + return &ordinaryUserLoader{ + fetch: func(keys []id.UserID) ([]*gqlmodel.User, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryUserLoader struct { + fetch func(keys []id.UserID) ([]*gqlmodel.User, []error) +} + +func (l *ordinaryUserLoader) Load(key id.UserID) (*gqlmodel.User, error) { + res, errs := l.fetch([]id.UserID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (l *ordinaryUserLoader) LoadAll(keys []id.UserID) ([]*gqlmodel.User, []error) { + return l.fetch(keys) +} diff --git a/internal/adapter/gql/resolver.go b/internal/adapter/gql/resolver.go new file mode 100644 index 000000000..e708250ab --- /dev/null +++ b/internal/adapter/gql/resolver.go @@ -0,0 +1,28 @@ +//go:generate go run github.com/99designs/gqlgen + +package gql + +import ( + "errors" + + "github.com/reearth/reearth-backend/internal/usecase/interfaces" +) + +// THIS CODE IS A STARTING POINT ONLY. IT WILL NOT BE UPDATED WITH SCHEMA CHANGES. + +var ErrNotImplemented = errors.New("not impleneted yet") +var ErrUnauthorized = errors.New("unauthorized") + +type Resolver struct { + usecases interfaces.Container + loaders Loaders + debug bool +} + +func NewResolver(loaders Loaders, debug bool) ResolverRoot { + return &Resolver{ + usecases: loaders.usecases, + loaders: loaders, + debug: debug, + } +} diff --git a/internal/adapter/gql/resolver_asset.go b/internal/adapter/gql/resolver_asset.go new file mode 100644 index 000000000..b10cd1c4c --- /dev/null +++ b/internal/adapter/gql/resolver_asset.go @@ -0,0 +1,21 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) Asset() AssetResolver { + return &assetResolver{r} +} + +type assetResolver struct{ *Resolver } + +func (r *assetResolver) Team(ctx context.Context, obj *gqlmodel.Asset) (*gqlmodel.Team, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.TeamID)) +} diff --git a/internal/graphql/resolver_dataset.go b/internal/adapter/gql/resolver_dataset.go similarity index 51% rename from internal/graphql/resolver_dataset.go rename to internal/adapter/gql/resolver_dataset.go index b78a1369c..511a71d67 100644 --- a/internal/graphql/resolver_dataset.go +++ b/internal/adapter/gql/resolver_dataset.go @@ -1,10 +1,9 @@ -package graphql +package gql import ( "context" - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) @@ -18,18 +17,18 @@ func (r *Resolver) DatasetField() DatasetFieldResolver { type datasetResolver struct{ *Resolver } -func (r *datasetResolver) Schema(ctx context.Context, obj *graphql1.Dataset) (*graphql1.DatasetSchema, error) { +func (r *datasetResolver) Schema(ctx context.Context, obj *gqlmodel.Dataset) (*gqlmodel.DatasetSchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) } -func (r *datasetResolver) Name(ctx context.Context, obj *graphql1.Dataset) (*string, error) { +func (r *datasetResolver) Name(ctx context.Context, obj *gqlmodel.Dataset) (*string, error) { exit := trace(ctx) defer exit() - ds, err := dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + ds, err := DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) if err != nil || ds == nil || ds.RepresentativeFieldID == nil { return nil, err } @@ -46,22 +45,22 @@ func (r *datasetResolver) Name(ctx context.Context, obj *graphql1.Dataset) (*str type datasetFieldResolver struct{ *Resolver } -func (r *datasetFieldResolver) Field(ctx context.Context, obj *graphql1.DatasetField) (*graphql1.DatasetSchemaField, error) { +func (r *datasetFieldResolver) Field(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.DatasetSchemaField, error) { exit := trace(ctx) defer exit() - ds, err := dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + ds, err := DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) return ds.Field(obj.FieldID), err } -func (r *datasetFieldResolver) Schema(ctx context.Context, obj *graphql1.DatasetField) (*graphql1.DatasetSchema, error) { +func (r *datasetFieldResolver) Schema(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.DatasetSchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) } -func (r *datasetFieldResolver) ValueRef(ctx context.Context, obj *graphql1.DatasetField) (*graphql1.Dataset, error) { +func (r *datasetFieldResolver) ValueRef(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.Dataset, error) { exit := trace(ctx) defer exit() @@ -72,5 +71,5 @@ func (r *datasetFieldResolver) ValueRef(ctx context.Context, obj *graphql1.Datas if !ok { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(idstr)) + return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(idstr)) } diff --git a/internal/graphql/resolver_dataset_schema.go b/internal/adapter/gql/resolver_dataset_schema.go similarity index 56% rename from internal/graphql/resolver_dataset_schema.go rename to internal/adapter/gql/resolver_dataset_schema.go index 054dca1da..4b4e77920 100644 --- a/internal/graphql/resolver_dataset_schema.go +++ b/internal/adapter/gql/resolver_dataset_schema.go @@ -1,10 +1,9 @@ -package graphql +package gql import ( "context" - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/id" ) @@ -19,14 +18,14 @@ func (r *Resolver) DatasetSchemaField() DatasetSchemaFieldResolver { type datasetSchemaResolver struct{ *Resolver } -func (r *datasetSchemaResolver) Scene(ctx context.Context, obj *graphql1.DatasetSchema) (*graphql1.Scene, error) { +func (r *datasetSchemaResolver) Scene(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.Scene, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) } -func (r *datasetSchemaResolver) RepresentativeField(ctx context.Context, obj *graphql1.DatasetSchema) (*graphql1.DatasetSchemaField, error) { +func (r *datasetSchemaResolver) RepresentativeField(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.DatasetSchemaField, error) { exit := trace(ctx) defer exit() @@ -42,28 +41,28 @@ func (r *datasetSchemaResolver) RepresentativeField(ctx context.Context, obj *gr return nil, nil } -func (r *datasetSchemaResolver) Datasets(ctx context.Context, obj *graphql1.DatasetSchema, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetConnection, error) { +func (r *datasetSchemaResolver) Datasets(ctx context.Context, obj *gqlmodel.DatasetSchema, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) { exit := trace(ctx) defer exit() - return r.config.Controllers.DatasetController.FindBySchema(ctx, obj.ID, first, last, before, after, getOperator(ctx)) + return r.loaders.Dataset.FindBySchema(ctx, obj.ID, first, last, before, after) } type datasetSchemaFieldResolver struct{ *Resolver } -func (r *datasetSchemaFieldResolver) Schema(ctx context.Context, obj *graphql1.DatasetSchemaField) (*graphql1.DatasetSchema, error) { +func (r *datasetSchemaFieldResolver) Schema(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) } -func (r *datasetSchemaFieldResolver) Ref(ctx context.Context, obj *graphql1.DatasetSchemaField) (*graphql1.DatasetSchema, error) { +func (r *datasetSchemaFieldResolver) Ref(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) { exit := trace(ctx) defer exit() if obj.RefID == nil { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.RefID)) + return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.RefID)) } diff --git a/internal/adapter/gql/resolver_layer.go b/internal/adapter/gql/resolver_layer.go new file mode 100644 index 000000000..149dc8acd --- /dev/null +++ b/internal/adapter/gql/resolver_layer.go @@ -0,0 +1,459 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) LayerItem() LayerItemResolver { + return &layerItemResolver{r} +} + +func (r *Resolver) LayerGroup() LayerGroupResolver { + return &layerGroupResolver{r} +} + +func (r *Resolver) Infobox() InfoboxResolver { + return &infoboxResolver{r} +} + +func (r *Resolver) InfoboxField() InfoboxFieldResolver { + return &infoboxFieldResolver{r} +} + +func (r *Resolver) MergedLayer() MergedLayerResolver { + return &mergedLayerResolver{r} +} + +func (r *Resolver) MergedInfobox() MergedInfoboxResolver { + return &mergedInfoboxResolver{r} +} + +func (r *Resolver) MergedInfoboxField() MergedInfoboxFieldResolver { + return &mergedInfoboxFieldResolver{r} +} + +type infoboxResolver struct{ *Resolver } + +func (r *infoboxResolver) Property(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Property, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) +} + +func (r *infoboxResolver) Layer(ctx context.Context, obj *gqlmodel.Infobox) (gqlmodel.Layer, error) { + exit := trace(ctx) + defer exit() + + layer, err := DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) + if err != nil || layer == nil { + return nil, err + } + return *layer, nil +} + +func (r *infoboxResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Dataset, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) +} + +func (r *infoboxResolver) Merged(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.MergedInfobox, error) { + exit := trace(ctx) + defer exit() + + ml, err := r.loaders.Layer.FetchParentAndMerged(ctx, id.LayerID(obj.LayerID)) + if err != nil || ml == nil { + return nil, err + } + return ml.Infobox, nil +} + +func (r *infoboxResolver) Scene(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Scene, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +func (r *infoboxResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.ScenePlugin, error) { + exit := trace(ctx) + defer exit() + + layer, err := DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) + if err != nil || layer == nil { + return nil, err + } + var pluginID *id.PluginID + if lg, ok := (*layer).(*gqlmodel.LayerGroup); ok { + pluginID = lg.PluginID + } else if li, ok := (*layer).(*gqlmodel.LayerItem); ok { + pluginID = li.PluginID + } + if pluginID == nil { + return nil, nil + } + + s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + if err != nil { + return nil, err + } + return s.Plugin(*pluginID), nil +} + +type infoboxFieldResolver struct{ *Resolver } + +func (r *infoboxFieldResolver) Layer(ctx context.Context, obj *gqlmodel.InfoboxField) (gqlmodel.Layer, error) { + exit := trace(ctx) + defer exit() + + layer, err := DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) + if err != nil { + return nil, err + } + return *layer, nil +} + +func (r *infoboxFieldResolver) Infobox(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Infobox, error) { + exit := trace(ctx) + defer exit() + + layer, err := DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) + if err != nil || layer == nil { + return nil, err + } + layer2 := (*layer).(*gqlmodel.LayerItem) + if layer2 == nil { + return nil, nil + } + return layer2.Infobox, nil +} + +func (r *infoboxFieldResolver) Property(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Property, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) +} + +func (r *infoboxFieldResolver) Plugin(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Plugin, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) +} + +func (r *infoboxFieldResolver) Extension(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.PluginExtension, error) { + exit := trace(ctx) + defer exit() + + plugin, err := DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + if err != nil { + return nil, err + } + return plugin.Extension(obj.ExtensionID), nil +} + +func (r *infoboxFieldResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Dataset, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) +} + +func (r *infoboxFieldResolver) Merged(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.MergedInfoboxField, error) { + exit := trace(ctx) + defer exit() + + ml, err := r.loaders.Layer.FetchParentAndMerged(ctx, id.LayerID(obj.LayerID)) + if err != nil || ml == nil || ml.Infobox == nil { + return nil, err + } + return ml.Infobox.Field(obj.ID), nil +} + +func (r *infoboxFieldResolver) Scene(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Scene, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +func (r *infoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.ScenePlugin, error) { + exit := trace(ctx) + defer exit() + + s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + if err != nil { + return nil, err + } + return s.Plugin(obj.PluginID), nil +} + +type layerGroupResolver struct{ *Resolver } + +func (r *layerGroupResolver) Parent(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.LayerGroup, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID != nil { + return DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) + } + return r.loaders.Layer.FetchParent(ctx, id.LayerID(obj.ID)) +} + +func (r *layerGroupResolver) Property(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Property, error) { + exit := trace(ctx) + defer exit() + + if obj.PropertyID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) +} + +func (r *layerGroupResolver) Plugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Plugin, error) { + exit := trace(ctx) + defer exit() + + if obj.PluginID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) +} + +func (r *layerGroupResolver) Extension(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.PluginExtension, error) { + exit := trace(ctx) + defer exit() + + if obj.PluginID == nil || obj.ExtensionID == nil { + return nil, nil + } + plugin, err := DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) + if err != nil { + return nil, err + } + return plugin.Extension(*obj.ExtensionID), nil +} + +func (r *layerGroupResolver) ParentLayer(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.LayerGroup, error) { + exit := trace(ctx) + defer exit() + + return r.loaders.Layer.FetchParent(ctx, id.LayerID(obj.ID)) +} + +func (r *layerGroupResolver) LinkedDatasetSchema(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.DatasetSchema, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetSchemaID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.LinkedDatasetSchemaID)) +} + +func (r *layerGroupResolver) Layers(ctx context.Context, obj *gqlmodel.LayerGroup) ([]gqlmodel.Layer, error) { + exit := trace(ctx) + defer exit() + + layers, err := DataLoadersFromContext(ctx).Layer.LoadAll(id.LayerIDsFromIDRef(obj.LayerIds)) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + return gqlmodel.AttachParentLayer(layers, obj.ID), nil +} + +func (r *layerGroupResolver) Scene(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Scene, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +func (r *layerGroupResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.ScenePlugin, error) { + exit := trace(ctx) + defer exit() + + if obj.PluginID == nil { + return nil, nil + } + s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + if err != nil { + return nil, err + } + return s.Plugin(*obj.PluginID), nil +} + +type layerItemResolver struct{ *Resolver } + +func (r *layerItemResolver) Parent(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.LayerGroup, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID != nil { + return DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) + } + return r.loaders.Layer.FetchParent(ctx, id.LayerID(obj.ID)) +} + +func (r *layerItemResolver) Property(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Property, error) { + exit := trace(ctx) + defer exit() + + if obj.PropertyID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) +} + +func (r *layerItemResolver) Plugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Plugin, error) { + exit := trace(ctx) + defer exit() + + if obj.PluginID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) +} + +func (r *layerItemResolver) Extension(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.PluginExtension, error) { + exit := trace(ctx) + defer exit() + + if obj.PluginID == nil || obj.ExtensionID == nil { + return nil, nil + } + plugin, err := DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) + if err != nil { + return nil, err + } + return plugin.Extension(*obj.ExtensionID), nil +} + +func (r *layerItemResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Dataset, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) +} + +func (r *layerItemResolver) Merged(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.MergedLayer, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID == nil { + return r.loaders.Layer.FetchParentAndMerged(ctx, id.LayerID(obj.ID)) + } + return r.loaders.Layer.FetchMerged(ctx, id.LayerID(obj.ID), id.LayerIDFromRefID(obj.ParentID)) +} + +func (r *layerItemResolver) Scene(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Scene, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +func (r *layerItemResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.ScenePlugin, error) { + exit := trace(ctx) + defer exit() + + if obj.PluginID == nil { + return nil, nil + } + s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + if err != nil { + return nil, err + } + return s.Plugin(*obj.PluginID), nil +} + +type mergedLayerResolver struct{ *Resolver } + +func (r *mergedLayerResolver) Original(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerItem, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).LayerItem.Load(id.LayerID(obj.OriginalID)) +} + +func (r *mergedLayerResolver) Parent(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerGroup, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) +} + +func (r *mergedLayerResolver) Scene(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.Scene, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +type mergedInfoboxResolver struct{ *Resolver } + +func (r *mergedInfoboxResolver) Scene(ctx context.Context, obj *gqlmodel.MergedInfobox) (*gqlmodel.Scene, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +type mergedInfoboxFieldResolver struct{ *Resolver } + +func (r *mergedInfoboxFieldResolver) Plugin(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.Plugin, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) +} + +func (r *mergedInfoboxFieldResolver) Extension(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.PluginExtension, error) { + exit := trace(ctx) + defer exit() + + plugin, err := DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + if err != nil { + return nil, err + } + return plugin.Extension(obj.ExtensionID), nil +} + +func (r *mergedInfoboxFieldResolver) Scene(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.Scene, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +func (r *mergedInfoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.ScenePlugin, error) { + exit := trace(ctx) + defer exit() + + s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + if err != nil { + return nil, err + } + return s.Plugin(obj.PluginID), nil +} diff --git a/internal/adapter/gql/resolver_mutation.go b/internal/adapter/gql/resolver_mutation.go new file mode 100644 index 000000000..eb6a29a89 --- /dev/null +++ b/internal/adapter/gql/resolver_mutation.go @@ -0,0 +1,7 @@ +package gql + +func (r *Resolver) Mutation() MutationResolver { + return &mutationResolver{r} +} + +type mutationResolver struct{ *Resolver } diff --git a/internal/adapter/gql/resolver_mutation_asset.go b/internal/adapter/gql/resolver_mutation_asset.go new file mode 100644 index 000000000..a22e5dc87 --- /dev/null +++ b/internal/adapter/gql/resolver_mutation_asset.go @@ -0,0 +1,36 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *mutationResolver) CreateAsset(ctx context.Context, input gqlmodel.CreateAssetInput) (*gqlmodel.CreateAssetPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Asset.Create(ctx, interfaces.CreateAssetParam{ + TeamID: id.TeamID(input.TeamID), + File: gqlmodel.FromFile(&input.File), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.CreateAssetPayload{Asset: gqlmodel.ToAsset(res)}, nil +} + +func (r *mutationResolver) RemoveAsset(ctx context.Context, input gqlmodel.RemoveAssetInput) (*gqlmodel.RemoveAssetPayload, error) { + exit := trace(ctx) + defer exit() + + res, err2 := r.usecases.Asset.Remove(ctx, id.AssetID(input.AssetID), getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + return &gqlmodel.RemoveAssetPayload{AssetID: res.ID()}, nil +} diff --git a/internal/adapter/gql/resolver_mutation_dataset.go b/internal/adapter/gql/resolver_mutation_dataset.go new file mode 100644 index 000000000..51a2d652d --- /dev/null +++ b/internal/adapter/gql/resolver_mutation_dataset.go @@ -0,0 +1,148 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *mutationResolver) UpdateDatasetSchema(ctx context.Context, input gqlmodel.UpdateDatasetSchemaInput) (*gqlmodel.UpdateDatasetSchemaPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Dataset.UpdateDatasetSchema(ctx, interfaces.UpdateDatasetSchemaParam{ + SchemaId: id.DatasetSchemaID(input.SchemaID), + Name: input.Name, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateDatasetSchemaPayload{DatasetSchema: gqlmodel.ToDatasetSchema(res)}, nil +} + +func (r *mutationResolver) AddDynamicDatasetSchema(ctx context.Context, input gqlmodel.AddDynamicDatasetSchemaInput) (*gqlmodel.AddDynamicDatasetSchemaPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Dataset.AddDynamicDatasetSchema(ctx, interfaces.AddDynamicDatasetSchemaParam{ + SceneId: id.SceneID(input.SceneID), + }) + if err != nil { + return nil, err + } + + return &gqlmodel.AddDynamicDatasetSchemaPayload{DatasetSchema: gqlmodel.ToDatasetSchema(res)}, nil +} + +func (r *mutationResolver) AddDynamicDataset(ctx context.Context, input gqlmodel.AddDynamicDatasetInput) (*gqlmodel.AddDynamicDatasetPayload, error) { + exit := trace(ctx) + defer exit() + + dss, ds, err := r.usecases.Dataset.AddDynamicDataset(ctx, interfaces.AddDynamicDatasetParam{ + SchemaId: id.DatasetSchemaID(input.DatasetSchemaID), + Author: input.Author, + Content: input.Content, + Lat: input.Lat, + Lng: input.Lng, + Target: input.Target, + }) + if err != nil { + return nil, err + } + + return &gqlmodel.AddDynamicDatasetPayload{DatasetSchema: gqlmodel.ToDatasetSchema(dss), Dataset: gqlmodel.ToDataset(ds)}, nil +} + +func (r *mutationResolver) SyncDataset(ctx context.Context, input gqlmodel.SyncDatasetInput) (*gqlmodel.SyncDatasetPayload, error) { + exit := trace(ctx) + defer exit() + + dss, ds, err := r.usecases.Dataset.Sync(ctx, id.SceneID(input.SceneID), input.URL, getOperator(ctx)) + if err != nil { + return nil, err + } + + schemas := make([]*gqlmodel.DatasetSchema, 0, len(dss)) + datasets := make([]*gqlmodel.Dataset, 0, len(ds)) + for _, d := range dss { + schemas = append(schemas, gqlmodel.ToDatasetSchema(d)) + } + for _, d := range ds { + datasets = append(datasets, gqlmodel.ToDataset(d)) + } + + return &gqlmodel.SyncDatasetPayload{ + SceneID: input.SceneID, + URL: input.URL, + DatasetSchema: schemas, + Dataset: datasets, + }, nil +} + +func (r *mutationResolver) RemoveDatasetSchema(ctx context.Context, input gqlmodel.RemoveDatasetSchemaInput) (*gqlmodel.RemoveDatasetSchemaPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Dataset.RemoveDatasetSchema(ctx, interfaces.RemoveDatasetSchemaParam{ + SchemaId: id.DatasetSchemaID(input.SchemaID), + Force: input.Force, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveDatasetSchemaPayload{SchemaID: res.ID()}, nil +} + +func (r *mutationResolver) AddDatasetSchema(ctx context.Context, input gqlmodel.AddDatasetSchemaInput) (*gqlmodel.AddDatasetSchemaPayload, error) { + exit := trace(ctx) + defer exit() + + res, err2 := r.usecases.Dataset.AddDatasetSchema(ctx, interfaces.AddDatasetSchemaParam{ + SceneId: id.SceneID(input.SceneID), + Name: input.Name, + RepresentativeField: id.DatasetSchemaFieldIDFromRefID(input.Representativefield), + }, getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + return &gqlmodel.AddDatasetSchemaPayload{DatasetSchema: gqlmodel.ToDatasetSchema(res)}, nil +} + +func (r *mutationResolver) ImportDataset(ctx context.Context, input gqlmodel.ImportDatasetInput) (*gqlmodel.ImportDatasetPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Dataset.ImportDataset(ctx, interfaces.ImportDatasetParam{ + SceneId: id.SceneID(input.SceneID), + SchemaId: id.DatasetSchemaIDFromRefID(input.DatasetSchemaID), + File: gqlmodel.FromFile(&input.File), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.ImportDatasetPayload{DatasetSchema: gqlmodel.ToDatasetSchema(res)}, nil +} + +func (r *mutationResolver) ImportDatasetFromGoogleSheet(ctx context.Context, input gqlmodel.ImportDatasetFromGoogleSheetInput) (*gqlmodel.ImportDatasetPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Dataset.ImportDatasetFromGoogleSheet(ctx, interfaces.ImportDatasetFromGoogleSheetParam{ + Token: input.AccessToken, + FileID: input.FileID, + SheetName: input.SheetName, + SceneId: id.SceneID(input.SceneID), + SchemaId: id.DatasetSchemaIDFromRefID(input.DatasetSchemaID), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.ImportDatasetPayload{DatasetSchema: gqlmodel.ToDatasetSchema(res)}, nil +} diff --git a/internal/adapter/gql/resolver_mutation_layer.go b/internal/adapter/gql/resolver_mutation_layer.go new file mode 100644 index 000000000..3b1123295 --- /dev/null +++ b/internal/adapter/gql/resolver_mutation_layer.go @@ -0,0 +1,216 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *mutationResolver) AddLayerItem(ctx context.Context, input gqlmodel.AddLayerItemInput) (*gqlmodel.AddLayerItemPayload, error) { + exit := trace(ctx) + defer exit() + + layer, parent, err := r.usecases.Layer.AddItem(ctx, interfaces.AddLayerItemInput{ + ParentLayerID: id.LayerID(input.ParentLayerID), + PluginID: &input.PluginID, + ExtensionID: &input.ExtensionID, + Index: input.Index, + Name: gqlmodel.RefToString(input.Name), + LatLng: gqlmodel.ToPropertyLatLng(input.Lat, input.Lng), + // LinkedDatasetID: input.LinkedDatasetID, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.AddLayerItemPayload{ + Layer: gqlmodel.ToLayerItem(layer, parent.IDRef()), + ParentLayer: gqlmodel.ToLayerGroup(parent, nil), + Index: input.Index, + }, nil +} + +func (r *mutationResolver) AddLayerGroup(ctx context.Context, input gqlmodel.AddLayerGroupInput) (*gqlmodel.AddLayerGroupPayload, error) { + exit := trace(ctx) + defer exit() + + layer, parent, err := r.usecases.Layer.AddGroup(ctx, interfaces.AddLayerGroupInput{ + ParentLayerID: id.LayerID(input.ParentLayerID), + PluginID: input.PluginID, + ExtensionID: input.ExtensionID, + Index: input.Index, + Name: gqlmodel.RefToString(input.Name), + LinkedDatasetSchemaID: id.DatasetSchemaIDFromRefID(input.LinkedDatasetSchemaID), + RepresentativeFieldId: input.RepresentativeFieldID, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.AddLayerGroupPayload{ + Layer: gqlmodel.ToLayerGroup(layer, parent.IDRef()), + ParentLayer: gqlmodel.ToLayerGroup(parent, nil), + Index: input.Index, + }, nil +} + +func (r *mutationResolver) RemoveLayer(ctx context.Context, input gqlmodel.RemoveLayerInput) (*gqlmodel.RemoveLayerPayload, error) { + exit := trace(ctx) + defer exit() + + id, layer, err := r.usecases.Layer.Remove(ctx, id.LayerID(input.LayerID), getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveLayerPayload{ + LayerID: id.ID(), + ParentLayer: gqlmodel.ToLayerGroup(layer, nil), + }, nil +} + +func (r *mutationResolver) UpdateLayer(ctx context.Context, input gqlmodel.UpdateLayerInput) (*gqlmodel.UpdateLayerPayload, error) { + exit := trace(ctx) + defer exit() + + layer, err := r.usecases.Layer.Update(ctx, interfaces.UpdateLayerInput{ + LayerID: id.LayerID(input.LayerID), + Name: input.Name, + Visible: input.Visible, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateLayerPayload{ + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) MoveLayer(ctx context.Context, input gqlmodel.MoveLayerInput) (*gqlmodel.MoveLayerPayload, error) { + exit := trace(ctx) + defer exit() + + targetLayerID, layerGroupFrom, layerGroupTo, index, err := r.usecases.Layer.Move(ctx, interfaces.MoveLayerInput{ + LayerID: id.LayerID(input.LayerID), + DestLayerID: id.LayerIDFromRefID(input.DestLayerID), + Index: gqlmodel.RefToIndex(input.Index), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.MoveLayerPayload{ + LayerID: targetLayerID.ID(), + FromParentLayer: gqlmodel.ToLayerGroup(layerGroupFrom, nil), + ToParentLayer: gqlmodel.ToLayerGroup(layerGroupTo, nil), + Index: index, + }, nil +} + +func (r *mutationResolver) CreateInfobox(ctx context.Context, input gqlmodel.CreateInfoboxInput) (*gqlmodel.CreateInfoboxPayload, error) { + exit := trace(ctx) + defer exit() + + layer, err := r.usecases.Layer.CreateInfobox(ctx, id.LayerID(input.LayerID), getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.CreateInfoboxPayload{ + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) RemoveInfobox(ctx context.Context, input gqlmodel.RemoveInfoboxInput) (*gqlmodel.RemoveInfoboxPayload, error) { + exit := trace(ctx) + defer exit() + + layer, err := r.usecases.Layer.RemoveInfobox(ctx, id.LayerID(input.LayerID), getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveInfoboxPayload{ + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) AddInfoboxField(ctx context.Context, input gqlmodel.AddInfoboxFieldInput) (*gqlmodel.AddInfoboxFieldPayload, error) { + exit := trace(ctx) + defer exit() + + infoboxField, layer, err := r.usecases.Layer.AddInfoboxField(ctx, interfaces.AddInfoboxFieldParam{ + LayerID: id.LayerID(input.LayerID), + PluginID: input.PluginID, + ExtensionID: input.ExtensionID, + Index: input.Index, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.AddInfoboxFieldPayload{ + InfoboxField: gqlmodel.ToInfoboxField(infoboxField, layer.Scene(), nil), + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) MoveInfoboxField(ctx context.Context, input gqlmodel.MoveInfoboxFieldInput) (*gqlmodel.MoveInfoboxFieldPayload, error) { + exit := trace(ctx) + defer exit() + + infoboxField, layer, index, err := r.usecases.Layer.MoveInfoboxField(ctx, interfaces.MoveInfoboxFieldParam{ + LayerID: id.LayerID(input.LayerID), + InfoboxFieldID: id.InfoboxFieldID(input.InfoboxFieldID), + Index: input.Index, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.MoveInfoboxFieldPayload{ + InfoboxFieldID: infoboxField.ID(), + Layer: gqlmodel.ToLayer(layer, nil), + Index: index, + }, nil +} + +func (r *mutationResolver) RemoveInfoboxField(ctx context.Context, input gqlmodel.RemoveInfoboxFieldInput) (*gqlmodel.RemoveInfoboxFieldPayload, error) { + exit := trace(ctx) + defer exit() + + infoboxField, layer, err := r.usecases.Layer.RemoveInfoboxField(ctx, interfaces.RemoveInfoboxFieldParam{ + LayerID: id.LayerID(input.LayerID), + InfoboxFieldID: id.InfoboxFieldID(input.InfoboxFieldID), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveInfoboxFieldPayload{ + InfoboxFieldID: infoboxField.ID(), + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) ImportLayer(ctx context.Context, input gqlmodel.ImportLayerInput) (*gqlmodel.ImportLayerPayload, error) { + exit := trace(ctx) + defer exit() + + l, l2, err := r.usecases.Layer.ImportLayer(ctx, interfaces.ImportLayerParam{ + LayerID: id.LayerID(input.LayerID), + File: gqlmodel.FromFile(&input.File), + Format: gqlmodel.FromLayerEncodingFormat(input.Format), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.ImportLayerPayload{ + Layers: gqlmodel.ToLayers(l, l2.IDRef()), + ParentLayer: gqlmodel.ToLayerGroup(l2, nil), + }, err +} diff --git a/internal/adapter/gql/resolver_mutation_project.go b/internal/adapter/gql/resolver_mutation_project.go new file mode 100644 index 000000000..1cc522553 --- /dev/null +++ b/internal/adapter/gql/resolver_mutation_project.go @@ -0,0 +1,96 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/visualizer" +) + +func (r *mutationResolver) CreateProject(ctx context.Context, input gqlmodel.CreateProjectInput) (*gqlmodel.ProjectPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Project.Create(ctx, interfaces.CreateProjectParam{ + TeamID: id.TeamID(input.TeamID), + Visualizer: visualizer.Visualizer(input.Visualizer), + Name: input.Name, + Description: input.Description, + ImageURL: input.ImageURL, + Alias: input.Alias, + Archived: input.Archived, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.ProjectPayload{Project: gqlmodel.ToProject(res)}, nil +} + +func (r *mutationResolver) UpdateProject(ctx context.Context, input gqlmodel.UpdateProjectInput) (*gqlmodel.ProjectPayload, error) { + exit := trace(ctx) + defer exit() + + deletePublicImage := false + if input.DeletePublicImage != nil { + deletePublicImage = *input.DeletePublicImage + } + + deleteImageURL := false + if input.DeleteImageURL != nil { + deleteImageURL = *input.DeleteImageURL + } + + res, err := r.usecases.Project.Update(ctx, interfaces.UpdateProjectParam{ + ID: id.ProjectID(input.ProjectID), + Name: input.Name, + Description: input.Description, + Alias: input.Alias, + ImageURL: input.ImageURL, + Archived: input.Archived, + IsBasicAuthActive: input.IsBasicAuthActive, + BasicAuthUsername: input.BasicAuthUsername, + BasicAuthPassword: input.BasicAuthPassword, + PublicTitle: input.PublicTitle, + PublicDescription: input.PublicDescription, + PublicImage: input.PublicImage, + PublicNoIndex: input.PublicNoIndex, + DeletePublicImage: deletePublicImage, + DeleteImageURL: deleteImageURL, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.ProjectPayload{Project: gqlmodel.ToProject(res)}, nil +} + +func (r *mutationResolver) PublishProject(ctx context.Context, input gqlmodel.PublishProjectInput) (*gqlmodel.ProjectPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Project.Publish(ctx, interfaces.PublishProjectParam{ + ID: id.ProjectID(input.ProjectID), + Alias: input.Alias, + Status: gqlmodel.FromPublishmentStatus(input.Status), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.ProjectPayload{Project: gqlmodel.ToProject(res)}, nil +} + +func (r *mutationResolver) DeleteProject(ctx context.Context, input gqlmodel.DeleteProjectInput) (*gqlmodel.DeleteProjectPayload, error) { + exit := trace(ctx) + defer exit() + + err := r.usecases.Project.Delete(ctx, id.ProjectID(input.ProjectID), getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.DeleteProjectPayload{ProjectID: input.ProjectID}, nil +} diff --git a/internal/adapter/gql/resolver_mutation_property.go b/internal/adapter/gql/resolver_mutation_property.go new file mode 100644 index 000000000..b6cf3c3ac --- /dev/null +++ b/internal/adapter/gql/resolver_mutation_property.go @@ -0,0 +1,207 @@ +package gql + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" +) + +func (r *mutationResolver) UpdatePropertyValue(ctx context.Context, input gqlmodel.UpdatePropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + v, ok := gqlmodel.FromPropertyValueAndType(input.Value, input.Type) + if !ok { + return nil, errors.New("invalid value") + } + + pp, pgl, pg, pf, err := r.usecases.Property.UpdateValue(ctx, interfaces.UpdatePropertyValueParam{ + PropertyID: id.PropertyID(input.PropertyID), + Pointer: gqlmodel.FromPointer(input.SchemaItemID, input.ItemID, &input.FieldID), + Value: v, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyFieldPayload{ + Property: gqlmodel.ToProperty(pp), + PropertyField: gqlmodel.ToPropertyField(pf, pp, pgl, pg), + }, nil +} + +func (r *mutationResolver) RemovePropertyField(ctx context.Context, input gqlmodel.RemovePropertyFieldInput) (*gqlmodel.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + p, err := r.usecases.Property.RemoveField(ctx, interfaces.RemovePropertyFieldParam{ + PropertyID: id.PropertyID(input.PropertyID), + Pointer: gqlmodel.FromPointer(input.SchemaItemID, input.ItemID, &input.FieldID), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyFieldPayload{ + Property: gqlmodel.ToProperty(p), + }, nil +} + +func (r *mutationResolver) UploadFileToProperty(ctx context.Context, input gqlmodel.UploadFileToPropertyInput) (*gqlmodel.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + p, pgl, pg, pf, err := r.usecases.Property.UploadFile(ctx, interfaces.UploadFileParam{ + PropertyID: id.PropertyID(input.PropertyID), + Pointer: gqlmodel.FromPointer(input.SchemaItemID, input.ItemID, &input.FieldID), + File: gqlmodel.FromFile(&input.File), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyFieldPayload{ + Property: gqlmodel.ToProperty(p), + PropertyField: gqlmodel.ToPropertyField(pf, p, pgl, pg), + }, nil +} + +func (r *mutationResolver) LinkDatasetToPropertyValue(ctx context.Context, input gqlmodel.LinkDatasetToPropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + p, pgl, pg, pf, err := r.usecases.Property.LinkValue(ctx, interfaces.LinkPropertyValueParam{ + PropertyID: id.PropertyID(input.PropertyID), + Pointer: gqlmodel.FromPointer(input.SchemaItemID, input.ItemID, &input.FieldID), + Links: gqlmodel.FromPropertyFieldLink( + input.DatasetSchemaIds, + input.DatasetIds, + input.DatasetSchemaFieldIds, + ), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyFieldPayload{ + Property: gqlmodel.ToProperty(p), + PropertyField: gqlmodel.ToPropertyField(pf, p, pgl, pg), + }, nil +} + +func (r *mutationResolver) UnlinkPropertyValue(ctx context.Context, input gqlmodel.UnlinkPropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) { + exit := trace(ctx) + defer exit() + + p, pgl, pg, pf, err := r.usecases.Property.UnlinkValue(ctx, interfaces.UnlinkPropertyValueParam{ + PropertyID: id.PropertyID(input.PropertyID), + Pointer: gqlmodel.FromPointer(input.SchemaItemID, input.ItemID, &input.FieldID), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyFieldPayload{ + Property: gqlmodel.ToProperty(p), + PropertyField: gqlmodel.ToPropertyField(pf, p, pgl, pg), + }, nil +} + +func (r *mutationResolver) AddPropertyItem(ctx context.Context, input gqlmodel.AddPropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { + exit := trace(ctx) + defer exit() + + var v *property.Value + if input.NameFieldType != nil { + v, _ = gqlmodel.FromPropertyValueAndType(input.NameFieldValue, *input.NameFieldType) + } + + p, pgl, pi, err := r.usecases.Property.AddItem(ctx, interfaces.AddPropertyItemParam{ + PropertyID: id.PropertyID(input.PropertyID), + Pointer: gqlmodel.FromPointer(&input.SchemaItemID, nil, nil), + Index: input.Index, + NameFieldValue: v, + }, getOperator(ctx)) + + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyItemPayload{ + Property: gqlmodel.ToProperty(p), + PropertyItem: gqlmodel.ToPropertyItem(pi, p, pgl), + }, nil +} + +func (r *mutationResolver) MovePropertyItem(ctx context.Context, input gqlmodel.MovePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { + exit := trace(ctx) + defer exit() + + p, pgl, pi, err := r.usecases.Property.MoveItem(ctx, interfaces.MovePropertyItemParam{ + PropertyID: id.PropertyID(input.PropertyID), + Pointer: gqlmodel.FromPointer(&input.SchemaItemID, &input.ItemID, nil), + Index: input.Index, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyItemPayload{ + Property: gqlmodel.ToProperty(p), + PropertyItem: gqlmodel.ToPropertyItem(pi, p, pgl), + }, nil +} + +func (r *mutationResolver) RemovePropertyItem(ctx context.Context, input gqlmodel.RemovePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { + exit := trace(ctx) + defer exit() + + p, err := r.usecases.Property.RemoveItem(ctx, interfaces.RemovePropertyItemParam{ + PropertyID: id.PropertyID(input.PropertyID), + Pointer: gqlmodel.FromPointer(&input.SchemaItemID, &input.ItemID, nil), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.PropertyItemPayload{ + Property: gqlmodel.ToProperty(p), + }, nil +} + +func (r *mutationResolver) UpdatePropertyItems(ctx context.Context, input gqlmodel.UpdatePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { + exit := trace(ctx) + defer exit() + + op := make([]interfaces.UpdatePropertyItemsOperationParam, 0, len(input.Operations)) + for _, o := range input.Operations { + var v *property.Value + if o.NameFieldType != nil { + v, _ = gqlmodel.FromPropertyValueAndType(o.NameFieldValue, *o.NameFieldType) + } + + op = append(op, interfaces.UpdatePropertyItemsOperationParam{ + Operation: gqlmodel.FromListOperation(o.Operation), + ItemID: id.PropertyItemIDFromRefID(o.ItemID), + Index: o.Index, + NameFieldValue: v, + }) + } + + p, err2 := r.usecases.Property.UpdateItems(ctx, interfaces.UpdatePropertyItemsParam{ + PropertyID: id.PropertyID(input.PropertyID), + Pointer: gqlmodel.FromPointer(&input.SchemaItemID, nil, nil), + Operations: op, + }, getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + return &gqlmodel.PropertyItemPayload{ + Property: gqlmodel.ToProperty(p), + }, nil +} diff --git a/internal/adapter/gql/resolver_mutation_scene.go b/internal/adapter/gql/resolver_mutation_scene.go new file mode 100644 index 000000000..03ee76d2f --- /dev/null +++ b/internal/adapter/gql/resolver_mutation_scene.go @@ -0,0 +1,179 @@ +package gql + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/scene" +) + +func (r *mutationResolver) CreateScene(ctx context.Context, input gqlmodel.CreateSceneInput) (*gqlmodel.CreateScenePayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Scene.Create( + ctx, + id.ProjectID(input.ProjectID), + getOperator(ctx), + ) + if err != nil { + return nil, err + } + + return &gqlmodel.CreateScenePayload{ + Scene: gqlmodel.ToScene(res), + }, nil +} + +func (r *mutationResolver) AddWidget(ctx context.Context, input gqlmodel.AddWidgetInput) (*gqlmodel.AddWidgetPayload, error) { + exit := trace(ctx) + defer exit() + + scene, widget, err := r.usecases.Scene.AddWidget( + ctx, + id.SceneID(input.SceneID), + input.PluginID, + id.PluginExtensionID(input.ExtensionID), + getOperator(ctx), + ) + if err != nil { + return nil, err + } + + return &gqlmodel.AddWidgetPayload{ + Scene: gqlmodel.ToScene(scene), + SceneWidget: gqlmodel.ToSceneWidget(widget), + }, nil +} + +func (r *mutationResolver) UpdateWidget(ctx context.Context, input gqlmodel.UpdateWidgetInput) (*gqlmodel.UpdateWidgetPayload, error) { + exit := trace(ctx) + defer exit() + + scene, widget, err := r.usecases.Scene.UpdateWidget(ctx, interfaces.UpdateWidgetParam{ + SceneID: id.SceneID(input.SceneID), + PluginID: input.PluginID, + ExtensionID: id.PluginExtensionID(input.ExtensionID), + Enabled: input.Enabled, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateWidgetPayload{ + Scene: gqlmodel.ToScene(scene), + SceneWidget: gqlmodel.ToSceneWidget(widget), + }, nil +} + +func (r *mutationResolver) RemoveWidget(ctx context.Context, input gqlmodel.RemoveWidgetInput) (*gqlmodel.RemoveWidgetPayload, error) { + exit := trace(ctx) + defer exit() + + scene, err := r.usecases.Scene.RemoveWidget(ctx, + id.SceneID(input.SceneID), + id.PluginID(input.PluginID), + id.PluginExtensionID(input.ExtensionID), + getOperator(ctx), + ) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveWidgetPayload{ + Scene: gqlmodel.ToScene(scene), + PluginID: input.PluginID, + ExtensionID: input.ExtensionID, + }, nil +} + +func (r *mutationResolver) InstallPlugin(ctx context.Context, input gqlmodel.InstallPluginInput) (*gqlmodel.InstallPluginPayload, error) { + exit := trace(ctx) + defer exit() + + scene, pl, pr, err := r.usecases.Scene.InstallPlugin(ctx, + id.SceneID(input.SceneID), + input.PluginID, + getOperator(ctx), + ) + if err != nil { + return nil, err + } + + return &gqlmodel.InstallPluginPayload{ + Scene: gqlmodel.ToScene(scene), ScenePlugin: &gqlmodel.ScenePlugin{ + PluginID: pl, + PropertyID: pr.IDRef(), + }, + }, nil +} + +func (r *mutationResolver) UploadPlugin(ctx context.Context, input gqlmodel.UploadPluginInput) (*gqlmodel.UploadPluginPayload, error) { + exit := trace(ctx) + defer exit() + + operator := getOperator(ctx) + var p *plugin.Plugin + var s *scene.Scene + var err error + + if input.File != nil { + p, s, err = r.usecases.Plugin.Upload(ctx, input.File.File, id.SceneID(input.SceneID), operator) + } else if input.URL != nil { + p, s, err = r.usecases.Plugin.UploadFromRemote(ctx, input.URL, id.SceneID(input.SceneID), operator) + } else { + return nil, errors.New("either file or url is required") + } + if err != nil { + return nil, err + } + + return &gqlmodel.UploadPluginPayload{ + Plugin: gqlmodel.ToPlugin(p), + Scene: gqlmodel.ToScene(s), + ScenePlugin: gqlmodel.ToScenePlugin(s.PluginSystem().Plugin(p.ID())), + }, nil +} + +func (r *mutationResolver) UninstallPlugin(ctx context.Context, input gqlmodel.UninstallPluginInput) (*gqlmodel.UninstallPluginPayload, error) { + exit := trace(ctx) + defer exit() + + scene, err := r.usecases.Scene.UninstallPlugin(ctx, + id.SceneID(input.SceneID), + id.PluginID(input.PluginID), + getOperator(ctx), + ) + if err != nil { + return nil, err + } + + return &gqlmodel.UninstallPluginPayload{ + PluginID: input.PluginID, + Scene: gqlmodel.ToScene(scene), + }, nil +} + +func (r *mutationResolver) UpgradePlugin(ctx context.Context, input gqlmodel.UpgradePluginInput) (*gqlmodel.UpgradePluginPayload, error) { + exit := trace(ctx) + defer exit() + + s, err := r.usecases.Scene.UpgradePlugin(ctx, + id.SceneID(input.SceneID), + input.PluginID, + input.ToPluginID, + getOperator(ctx), + ) + if err != nil { + return nil, err + } + + return &gqlmodel.UpgradePluginPayload{ + Scene: gqlmodel.ToScene(s), + ScenePlugin: gqlmodel.ToScenePlugin(s.PluginSystem().Plugin(input.ToPluginID)), + }, nil +} diff --git a/internal/adapter/gql/resolver_mutation_team.go b/internal/adapter/gql/resolver_mutation_team.go new file mode 100644 index 000000000..f40d87c1a --- /dev/null +++ b/internal/adapter/gql/resolver_mutation_team.go @@ -0,0 +1,79 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *mutationResolver) CreateTeam(ctx context.Context, input gqlmodel.CreateTeamInput) (*gqlmodel.CreateTeamPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Team.Create(ctx, input.Name, getUser(ctx).ID()) + if err != nil { + return nil, err + } + + return &gqlmodel.CreateTeamPayload{Team: gqlmodel.ToTeam(res)}, nil +} + +func (r *mutationResolver) DeleteTeam(ctx context.Context, input gqlmodel.DeleteTeamInput) (*gqlmodel.DeleteTeamPayload, error) { + exit := trace(ctx) + defer exit() + + if err := r.usecases.Team.Remove(ctx, id.TeamID(input.TeamID), getOperator(ctx)); err != nil { + return nil, err + } + + return &gqlmodel.DeleteTeamPayload{TeamID: input.TeamID}, nil +} + +func (r *mutationResolver) UpdateTeam(ctx context.Context, input gqlmodel.UpdateTeamInput) (*gqlmodel.UpdateTeamPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Team.Update(ctx, id.TeamID(input.TeamID), input.Name, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateTeamPayload{Team: gqlmodel.ToTeam(res)}, nil +} + +func (r *mutationResolver) AddMemberToTeam(ctx context.Context, input gqlmodel.AddMemberToTeamInput) (*gqlmodel.AddMemberToTeamPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Team.AddMember(ctx, id.TeamID(input.TeamID), id.UserID(input.UserID), gqlmodel.FromRole(input.Role), getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.AddMemberToTeamPayload{Team: gqlmodel.ToTeam(res)}, nil +} + +func (r *mutationResolver) RemoveMemberFromTeam(ctx context.Context, input gqlmodel.RemoveMemberFromTeamInput) (*gqlmodel.RemoveMemberFromTeamPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Team.RemoveMember(ctx, id.TeamID(input.TeamID), id.UserID(input.UserID), getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveMemberFromTeamPayload{Team: gqlmodel.ToTeam(res)}, nil +} + +func (r *mutationResolver) UpdateMemberOfTeam(ctx context.Context, input gqlmodel.UpdateMemberOfTeamInput) (*gqlmodel.UpdateMemberOfTeamPayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.Team.UpdateMember(ctx, id.TeamID(input.TeamID), id.UserID(input.UserID), gqlmodel.FromRole(input.Role), getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateMemberOfTeamPayload{Team: gqlmodel.ToTeam(res)}, nil +} diff --git a/internal/adapter/gql/resolver_mutation_user.go b/internal/adapter/gql/resolver_mutation_user.go new file mode 100644 index 000000000..7f1930a18 --- /dev/null +++ b/internal/adapter/gql/resolver_mutation_user.go @@ -0,0 +1,75 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *mutationResolver) Signup(ctx context.Context, input gqlmodel.SignupInput) (*gqlmodel.SignupPayload, error) { + exit := trace(ctx) + defer exit() + + secret := "" + if input.Secret != nil { + secret = *input.Secret + } + + u, team, err := r.usecases.User.Signup(ctx, interfaces.SignupParam{ + Sub: getSub(ctx), + Lang: input.Lang, + Theme: gqlmodel.ToTheme(input.Theme), + UserID: id.UserIDFromRefID(input.UserID), + TeamID: id.TeamIDFromRefID(input.TeamID), + Secret: secret, + }) + if err != nil { + return nil, err + } + + return &gqlmodel.SignupPayload{User: gqlmodel.ToUser(u), Team: gqlmodel.ToTeam(team)}, nil +} + +func (r *mutationResolver) UpdateMe(ctx context.Context, input gqlmodel.UpdateMeInput) (*gqlmodel.UpdateMePayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.User.UpdateMe(ctx, interfaces.UpdateMeParam{ + Name: input.Name, + Email: input.Email, + Lang: input.Lang, + Theme: gqlmodel.ToTheme(input.Theme), + Password: input.Password, + PasswordConfirmation: input.PasswordConfirmation, + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateMePayload{User: gqlmodel.ToUser(res)}, nil +} + +func (r *mutationResolver) RemoveMyAuth(ctx context.Context, input gqlmodel.RemoveMyAuthInput) (*gqlmodel.UpdateMePayload, error) { + exit := trace(ctx) + defer exit() + + res, err := r.usecases.User.RemoveMyAuth(ctx, input.Auth, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateMePayload{User: gqlmodel.ToUser(res)}, nil +} + +func (r *mutationResolver) DeleteMe(ctx context.Context, input gqlmodel.DeleteMeInput) (*gqlmodel.DeleteMePayload, error) { + exit := trace(ctx) + defer exit() + + if err := r.usecases.User.DeleteMe(ctx, id.UserID(input.UserID), getOperator(ctx)); err != nil { + return nil, err + } + + return &gqlmodel.DeleteMePayload{UserID: input.UserID}, nil +} diff --git a/internal/graphql/resolver_plugin.go b/internal/adapter/gql/resolver_plugin.go similarity index 60% rename from internal/graphql/resolver_plugin.go rename to internal/adapter/gql/resolver_plugin.go index 2a5aac651..12bfd40fa 100644 --- a/internal/graphql/resolver_plugin.go +++ b/internal/adapter/gql/resolver_plugin.go @@ -1,10 +1,9 @@ -package graphql +package gql import ( "context" - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" ) @@ -18,27 +17,27 @@ func (r *Resolver) PluginExtension() PluginExtensionResolver { type pluginResolver struct{ *Resolver } -func (r *pluginResolver) PropertySchema(ctx context.Context, obj *graphql1.Plugin) (*graphql1.PropertySchema, error) { +func (r *pluginResolver) PropertySchema(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() if obj.PropertySchemaID == nil { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(*obj.PropertySchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(*obj.PropertySchemaID) } -func (r *pluginResolver) Scene(ctx context.Context, obj *graphql1.Plugin) (*graphql1.Scene, error) { +func (r *pluginResolver) Scene(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.Scene, error) { exit := trace(ctx) defer exit() if obj.SceneID == nil { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(*obj.SceneID)) + return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(*obj.SceneID)) } -func (r *pluginResolver) ScenePlugin(ctx context.Context, obj *graphql1.Plugin, sceneID *id.ID) (*graphql1.ScenePlugin, error) { +func (r *pluginResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Plugin, sceneID *id.ID) (*gqlmodel.ScenePlugin, error) { exit := trace(ctx) defer exit() @@ -48,18 +47,18 @@ func (r *pluginResolver) ScenePlugin(ctx context.Context, obj *graphql1.Plugin, if sceneID == nil { return nil, nil } - s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(*sceneID)) + s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(*sceneID)) return s.Plugin(obj.ID), err } -func (r *pluginResolver) TranslatedName(ctx context.Context, obj *graphql1.Plugin, lang *string) (string, error) { +func (r *pluginResolver) TranslatedName(ctx context.Context, obj *gqlmodel.Plugin, lang *string) (string, error) { if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { return s, nil } return obj.Name, nil } -func (r *pluginResolver) TranslatedDescription(ctx context.Context, obj *graphql1.Plugin, lang *string) (string, error) { +func (r *pluginResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.Plugin, lang *string) (string, error) { if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { return s, nil } @@ -68,36 +67,36 @@ func (r *pluginResolver) TranslatedDescription(ctx context.Context, obj *graphql type pluginExtensionResolver struct{ *Resolver } -func (r *pluginExtensionResolver) Plugin(ctx context.Context, obj *graphql1.PluginExtension) (*graphql1.Plugin, error) { +func (r *pluginExtensionResolver) Plugin(ctx context.Context, obj *gqlmodel.PluginExtension) (*gqlmodel.Plugin, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + return DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) } -func (r *pluginExtensionResolver) PropertySchema(ctx context.Context, obj *graphql1.PluginExtension) (*graphql1.PropertySchema, error) { +func (r *pluginExtensionResolver) PropertySchema(ctx context.Context, obj *gqlmodel.PluginExtension) (*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.PropertySchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(obj.PropertySchemaID) } -func (r *pluginExtensionResolver) TranslatedName(ctx context.Context, obj *graphql1.PluginExtension, lang *string) (string, error) { +func (r *pluginExtensionResolver) TranslatedName(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) { if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { return s, nil } return obj.Name, nil } -func (r *pluginExtensionResolver) SceneWidget(ctx context.Context, obj *graphql1.PluginExtension, sceneID id.ID) (*graphql1.SceneWidget, error) { +func (r *pluginExtensionResolver) SceneWidget(ctx context.Context, obj *gqlmodel.PluginExtension, sceneID id.ID) (*gqlmodel.SceneWidget, error) { exit := trace(ctx) defer exit() - s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(sceneID)) + s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(sceneID)) return s.Widget(obj.PluginID, obj.ExtensionID), err } -func (r *pluginExtensionResolver) TranslatedDescription(ctx context.Context, obj *graphql1.PluginExtension, lang *string) (string, error) { +func (r *pluginExtensionResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) { exit := trace(ctx) defer exit() diff --git a/internal/adapter/gql/resolver_project.go b/internal/adapter/gql/resolver_project.go new file mode 100644 index 000000000..c3a0b9610 --- /dev/null +++ b/internal/adapter/gql/resolver_project.go @@ -0,0 +1,33 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +func (r *Resolver) Project() ProjectResolver { + return &projectResolver{r} +} + +type projectResolver struct{ *Resolver } + +func (r *projectResolver) Team(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Team, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.TeamID)) +} + +func (r *projectResolver) Scene(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Scene, error) { + exit := trace(ctx) + defer exit() + + s, err := r.loaders.Scene.FindByProject(ctx, id.ProjectID(obj.ID)) + if err != nil && err != rerror.ErrNotFound { + return nil, err + } + return s, nil +} diff --git a/internal/graphql/resolver_property.go b/internal/adapter/gql/resolver_property.go similarity index 58% rename from internal/graphql/resolver_property.go rename to internal/adapter/gql/resolver_property.go index 467645b2f..f0885bfd1 100644 --- a/internal/graphql/resolver_property.go +++ b/internal/adapter/gql/resolver_property.go @@ -1,11 +1,10 @@ -package graphql +package gql import ( "context" "errors" - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/rerror" ) @@ -44,36 +43,36 @@ func (r *Resolver) PropertyGroup() PropertyGroupResolver { type propertyResolver struct{ *Resolver } -func (r *propertyResolver) Schema(ctx context.Context, obj *graphql1.Property) (*graphql1.PropertySchema, error) { +func (r *propertyResolver) Schema(ctx context.Context, obj *gqlmodel.Property) (*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) } -func (r *propertyResolver) Layer(ctx context.Context, obj *graphql1.Property) (graphql1.Layer, error) { +func (r *propertyResolver) Layer(ctx context.Context, obj *gqlmodel.Property) (gqlmodel.Layer, error) { exit := trace(ctx) defer exit() - l, err := r.config.Controllers.LayerController.FetchByProperty(ctx, id.PropertyID(obj.ID), getOperator(ctx)) + l, err := r.loaders.Layer.FetchByProperty(ctx, id.PropertyID(obj.ID)) if err != nil || errors.Is(err, rerror.ErrNotFound) { return nil, nil } return l, err } -func (r *propertyResolver) Merged(ctx context.Context, obj *graphql1.Property) (*graphql1.MergedProperty, error) { +func (r *propertyResolver) Merged(ctx context.Context, obj *gqlmodel.Property) (*gqlmodel.MergedProperty, error) { exit := trace(ctx) defer exit() - l, err := r.config.Controllers.LayerController.FetchByProperty(ctx, id.PropertyID(obj.ID), getOperator(ctx)) + l, err := r.loaders.Layer.FetchByProperty(ctx, id.PropertyID(obj.ID)) if err != nil { if errors.Is(err, rerror.ErrNotFound) { return nil, nil } return nil, err } - li, ok := l.(*graphql1.LayerItem) + li, ok := l.(*gqlmodel.LayerItem) if !ok { return nil, nil } @@ -94,140 +93,140 @@ func (r *propertyResolver) Merged(ctx context.Context, obj *graphql1.Property) ( type propertyFieldResolver struct{ *Resolver } -func (r *propertyFieldResolver) Parent(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.Property, error) { +func (r *propertyFieldResolver) Parent(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.Property, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.ParentID)) + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.ParentID)) } -func (r *propertyFieldResolver) Schema(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.PropertySchema, error) { +func (r *propertyFieldResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) } -func (r *propertyFieldResolver) Field(ctx context.Context, obj *graphql1.PropertyField) (*graphql1.PropertySchemaField, error) { +func (r *propertyFieldResolver) Field(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.PropertySchemaField, error) { exit := trace(ctx) defer exit() - schema, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + schema, err := DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) if err != nil { return nil, err } return schema.Field(obj.FieldID), nil } -func (r *propertyFieldResolver) ActualValue(ctx context.Context, obj *graphql1.PropertyField) (interface{}, error) { +func (r *propertyFieldResolver) ActualValue(ctx context.Context, obj *gqlmodel.PropertyField) (interface{}, error) { exit := trace(ctx) defer exit() - datasetLoader := dataloader.DataLoadersFromContext(ctx).Dataset + datasetLoader := DataLoadersFromContext(ctx).Dataset return actualValue(datasetLoader, obj.Value, obj.Links, false) } type propertyFieldLinkResolver struct{ *Resolver } -func (r *propertyFieldLinkResolver) Dataset(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.Dataset, error) { +func (r *propertyFieldLinkResolver) Dataset(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.Dataset, error) { exit := trace(ctx) defer exit() if obj.DatasetID == nil { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.DatasetID)) + return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.DatasetID)) } -func (r *propertyFieldLinkResolver) DatasetField(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.DatasetField, error) { +func (r *propertyFieldLinkResolver) DatasetField(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetField, error) { exit := trace(ctx) defer exit() if obj.DatasetID == nil { return nil, nil } - d, err := dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.DatasetID)) + d, err := DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.DatasetID)) if err != nil { return nil, err } return d.Field(obj.DatasetSchemaFieldID), nil } -func (r *propertyFieldLinkResolver) DatasetSchema(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.DatasetSchema, error) { +func (r *propertyFieldLinkResolver) DatasetSchema(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetSchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.DatasetSchemaID)) + return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.DatasetSchemaID)) } -func (r *propertyFieldLinkResolver) DatasetSchemaField(ctx context.Context, obj *graphql1.PropertyFieldLink) (*graphql1.DatasetSchemaField, error) { +func (r *propertyFieldLinkResolver) DatasetSchemaField(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetSchemaField, error) { exit := trace(ctx) defer exit() - ds, err := dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.DatasetSchemaID)) + ds, err := DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.DatasetSchemaID)) return ds.Field(obj.DatasetSchemaFieldID), err } type mergedPropertyResolver struct{ *Resolver } -func (r *mergedPropertyResolver) Original(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Property, error) { +func (r *mergedPropertyResolver) Original(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Property, error) { exit := trace(ctx) defer exit() if obj.OriginalID == nil { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) } -func (r *mergedPropertyResolver) Parent(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Property, error) { +func (r *mergedPropertyResolver) Parent(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Property, error) { exit := trace(ctx) defer exit() if obj.ParentID == nil { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.ParentID)) + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.ParentID)) } -func (r *mergedPropertyResolver) Schema(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.PropertySchema, error) { +func (r *mergedPropertyResolver) Schema(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() if obj.SchemaID == nil { if propertyID := obj.PropertyID(); propertyID != nil { - property, err := dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*propertyID)) + property, err := DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*propertyID)) if err != nil { return nil, err } if property == nil { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(property.SchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(property.SchemaID) } return nil, nil } - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(*obj.SchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(*obj.SchemaID) } -func (r *mergedPropertyResolver) LinkedDataset(ctx context.Context, obj *graphql1.MergedProperty) (*graphql1.Dataset, error) { +func (r *mergedPropertyResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Dataset, error) { exit := trace(ctx) defer exit() if obj.LinkedDatasetID == nil { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) } -func (r *mergedPropertyResolver) Groups(ctx context.Context, obj *graphql1.MergedProperty) ([]*graphql1.MergedPropertyGroup, error) { +func (r *mergedPropertyResolver) Groups(ctx context.Context, obj *gqlmodel.MergedProperty) ([]*gqlmodel.MergedPropertyGroup, error) { exit := trace(ctx) defer exit() if obj.Groups != nil { return obj.Groups, nil } - m, err := r.config.Controllers.PropertyController.FetchMerged(ctx, obj.OriginalID, obj.ParentID, obj.LinkedDatasetID, getOperator(ctx)) + m, err := r.loaders.Property.FetchMerged(ctx, obj.OriginalID, obj.ParentID, obj.LinkedDatasetID) if err != nil || m == nil { return nil, err } @@ -236,129 +235,129 @@ func (r *mergedPropertyResolver) Groups(ctx context.Context, obj *graphql1.Merge type mergedPropertyGroupResolver struct{ *Resolver } -func (r *mergedPropertyGroupResolver) Original(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.PropertyGroup, error) { +func (r *mergedPropertyGroupResolver) Original(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertyGroup, error) { exit := trace(ctx) defer exit() if obj.OriginalID == nil || obj.OriginalPropertyID == nil { return nil, nil } - p, err := dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) + p, err := DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) if err != nil { return nil, err } - if i, ok := p.Item(*obj.OriginalID).(*graphql1.PropertyGroup); ok { + if i, ok := p.Item(*obj.OriginalID).(*gqlmodel.PropertyGroup); ok { return i, nil } return nil, nil } -func (r *mergedPropertyGroupResolver) Parent(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.PropertyGroup, error) { +func (r *mergedPropertyGroupResolver) Parent(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertyGroup, error) { exit := trace(ctx) defer exit() if obj.ParentID == nil || obj.ParentPropertyID == nil { return nil, nil } - p, err := dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.ParentID)) + p, err := DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.ParentID)) if err != nil { return nil, err } - if i, ok := p.Item(*obj.ParentID).(*graphql1.PropertyGroup); ok { + if i, ok := p.Item(*obj.ParentID).(*gqlmodel.PropertyGroup); ok { return i, nil } return nil, nil } -func (r *mergedPropertyGroupResolver) OriginalProperty(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.Property, error) { +func (r *mergedPropertyGroupResolver) OriginalProperty(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Property, error) { exit := trace(ctx) defer exit() if obj.OriginalID == nil { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) } -func (r *mergedPropertyGroupResolver) ParentProperty(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.Property, error) { +func (r *mergedPropertyGroupResolver) ParentProperty(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Property, error) { exit := trace(ctx) defer exit() if obj.ParentID == nil { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.ParentID)) + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.ParentID)) } -func (r *mergedPropertyGroupResolver) Schema(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.PropertySchema, error) { +func (r *mergedPropertyGroupResolver) Schema(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() if obj.SchemaID == nil { if propertyID := obj.PropertyID(); propertyID != nil { - property, err := dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*propertyID)) + property, err := DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*propertyID)) if err != nil { return nil, err } if property == nil { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(property.SchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(property.SchemaID) } return nil, nil } - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(*obj.SchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(*obj.SchemaID) } -func (r *mergedPropertyGroupResolver) LinkedDataset(ctx context.Context, obj *graphql1.MergedPropertyGroup) (*graphql1.Dataset, error) { +func (r *mergedPropertyGroupResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Dataset, error) { exit := trace(ctx) defer exit() if obj.LinkedDatasetID == nil { return nil, nil } - return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) } type mergedPropertyFieldResolver struct{ *Resolver } -func (r *mergedPropertyFieldResolver) Schema(ctx context.Context, obj *graphql1.MergedPropertyField) (*graphql1.PropertySchema, error) { +func (r *mergedPropertyFieldResolver) Schema(ctx context.Context, obj *gqlmodel.MergedPropertyField) (*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) } -func (r *mergedPropertyFieldResolver) Field(ctx context.Context, obj *graphql1.MergedPropertyField) (*graphql1.PropertySchemaField, error) { +func (r *mergedPropertyFieldResolver) Field(ctx context.Context, obj *gqlmodel.MergedPropertyField) (*gqlmodel.PropertySchemaField, error) { exit := trace(ctx) defer exit() - s, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + s, err := DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) return s.Field(obj.FieldID), err } -func (r *mergedPropertyFieldResolver) ActualValue(ctx context.Context, obj *graphql1.MergedPropertyField) (interface{}, error) { +func (r *mergedPropertyFieldResolver) ActualValue(ctx context.Context, obj *gqlmodel.MergedPropertyField) (interface{}, error) { exit := trace(ctx) defer exit() - datasetLoader := dataloader.DataLoadersFromContext(ctx).Dataset + datasetLoader := DataLoadersFromContext(ctx).Dataset return actualValue(datasetLoader, obj.Value, obj.Links, obj.Overridden) } type propertyGroupListResolver struct{ *Resolver } -func (*propertyGroupListResolver) Schema(ctx context.Context, obj *graphql1.PropertyGroupList) (*graphql1.PropertySchema, error) { +func (*propertyGroupListResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyGroupList) (*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) } -func (*propertyGroupListResolver) SchemaGroup(ctx context.Context, obj *graphql1.PropertyGroupList) (*graphql1.PropertySchemaGroup, error) { +func (*propertyGroupListResolver) SchemaGroup(ctx context.Context, obj *gqlmodel.PropertyGroupList) (*gqlmodel.PropertySchemaGroup, error) { exit := trace(ctx) defer exit() - s, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + s, err := DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) if err != nil { return nil, err } @@ -367,25 +366,25 @@ func (*propertyGroupListResolver) SchemaGroup(ctx context.Context, obj *graphql1 type propertyGroupResolver struct{ *Resolver } -func (*propertyGroupResolver) Schema(ctx context.Context, obj *graphql1.PropertyGroup) (*graphql1.PropertySchema, error) { +func (*propertyGroupResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyGroup) (*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) } -func (*propertyGroupResolver) SchemaGroup(ctx context.Context, obj *graphql1.PropertyGroup) (*graphql1.PropertySchemaGroup, error) { +func (*propertyGroupResolver) SchemaGroup(ctx context.Context, obj *gqlmodel.PropertyGroup) (*gqlmodel.PropertySchemaGroup, error) { exit := trace(ctx) defer exit() - s, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + s, err := DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) if err != nil { return nil, err } return s.Group(obj.SchemaGroupID), nil } -func actualValue(datasetLoader dataloader.DatasetDataLoader, value interface{}, links []*graphql1.PropertyFieldLink, overridden bool) (interface{}, error) { +func actualValue(datasetLoader DatasetDataLoader, value interface{}, links []*gqlmodel.PropertyFieldLink, overridden bool) (interface{}, error) { if len(links) == 0 || overridden { return &value, nil } @@ -401,7 +400,7 @@ func actualValue(datasetLoader dataloader.DatasetDataLoader, value interface{}, if field != nil { if i == len(links)-1 { return &value, nil - } else if field.Type != graphql1.ValueTypeRef { + } else if field.Type != gqlmodel.ValueTypeRef { return nil, nil } if field.Value != nil { diff --git a/internal/graphql/resolver_property_schema.go b/internal/adapter/gql/resolver_property_schema.go similarity index 70% rename from internal/graphql/resolver_property_schema.go rename to internal/adapter/gql/resolver_property_schema.go index b6b1bfa1d..b670f50e7 100644 --- a/internal/graphql/resolver_property_schema.go +++ b/internal/adapter/gql/resolver_property_schema.go @@ -1,10 +1,9 @@ -package graphql +package gql import ( "context" - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" ) func (r *Resolver) PropertySchemaField() PropertySchemaFieldResolver { @@ -25,7 +24,7 @@ func (r *Resolver) PropertySchemaGroup() PropertySchemaGroupResolver { type propertySchemaFieldResolver struct{ *Resolver } -func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { +func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) { exit := trace(ctx) defer exit() @@ -36,7 +35,7 @@ func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj * } // deprecated -func (r *propertySchemaFieldResolver) TranslatedName(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { +func (r *propertySchemaFieldResolver) TranslatedName(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) { exit := trace(ctx) defer exit() @@ -46,7 +45,7 @@ func (r *propertySchemaFieldResolver) TranslatedName(ctx context.Context, obj *g return obj.Name, nil } -func (r *propertySchemaFieldResolver) TranslatedDescription(ctx context.Context, obj *graphql1.PropertySchemaField, lang *string) (string, error) { +func (r *propertySchemaFieldResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) { exit := trace(ctx) defer exit() @@ -58,45 +57,45 @@ func (r *propertySchemaFieldResolver) TranslatedDescription(ctx context.Context, type propertyLinkableFieldsResolver struct{ *Resolver } -func (r *propertyLinkableFieldsResolver) Schema(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchema, error) { +func (r *propertyLinkableFieldsResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) } -func (r *propertyLinkableFieldsResolver) LatlngField(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchemaField, error) { +func (r *propertyLinkableFieldsResolver) LatlngField(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchemaField, error) { exit := trace(ctx) defer exit() if obj.Latlng == nil { return nil, nil } - ps, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + ps, err := DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) return ps.Field(*obj.Latlng), err } -func (r *propertyLinkableFieldsResolver) URLField(ctx context.Context, obj *graphql1.PropertyLinkableFields) (*graphql1.PropertySchemaField, error) { +func (r *propertyLinkableFieldsResolver) URLField(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchemaField, error) { exit := trace(ctx) defer exit() if obj.URL == nil { return nil, nil } - ps, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + ps, err := DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) return ps.Field(*obj.URL), err } type propertySchemaGroupResolver struct{ *Resolver } -func (r *propertySchemaGroupResolver) Schema(ctx context.Context, obj *graphql1.PropertySchemaGroup) (*graphql1.PropertySchema, error) { +func (r *propertySchemaGroupResolver) Schema(ctx context.Context, obj *gqlmodel.PropertySchemaGroup) (*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) } -func (r *propertySchemaGroupResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaGroup, lang *string) (string, error) { +func (r *propertySchemaGroupResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaGroup, lang *string) (string, error) { if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { return s, nil } @@ -109,7 +108,7 @@ func (r *propertySchemaGroupResolver) TranslatedTitle(ctx context.Context, obj * type propertySchemaFieldChoiceResolver struct{ *Resolver } -func (r *propertySchemaFieldChoiceResolver) TranslatedTitle(ctx context.Context, obj *graphql1.PropertySchemaFieldChoice, lang *string) (string, error) { +func (r *propertySchemaFieldChoiceResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *string) (string, error) { if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { return s, nil } @@ -117,7 +116,7 @@ func (r *propertySchemaFieldChoiceResolver) TranslatedTitle(ctx context.Context, } // deprecated -func (r *propertySchemaFieldChoiceResolver) TranslatedLabel(ctx context.Context, obj *graphql1.PropertySchemaFieldChoice, lang *string) (string, error) { +func (r *propertySchemaFieldChoiceResolver) TranslatedLabel(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *string) (string, error) { if s, ok := obj.AllTranslatedLabel[getLang(ctx, lang)]; ok { return s, nil } diff --git a/internal/graphql/resolver_property_test.go b/internal/adapter/gql/resolver_property_test.go similarity index 77% rename from internal/graphql/resolver_property_test.go rename to internal/adapter/gql/resolver_property_test.go index 8bf5ba9be..73a8b5219 100644 --- a/internal/graphql/resolver_property_test.go +++ b/internal/adapter/gql/resolver_property_test.go @@ -1,10 +1,9 @@ -package graphql +package gql import ( "testing" - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/stretchr/testify/assert" ) @@ -12,9 +11,9 @@ func Test_actualValue(t *testing.T) { value := 300 type args struct { - datasetLoader dataloader.DatasetDataLoader + datasetLoader DatasetDataLoader value interface{} - links []*graphql1.PropertyFieldLink + links []*gqlmodel.PropertyFieldLink overridden bool } var tests = []struct { diff --git a/internal/graphql/resolver_query.go b/internal/adapter/gql/resolver_query.go similarity index 62% rename from internal/graphql/resolver_query.go rename to internal/adapter/gql/resolver_query.go index c293a0a27..4ce654c4c 100644 --- a/internal/graphql/resolver_query.go +++ b/internal/adapter/gql/resolver_query.go @@ -1,10 +1,9 @@ -package graphql +package gql import ( "context" - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/id" ) @@ -15,14 +14,14 @@ func (r *Resolver) Query() QueryResolver { type queryResolver struct{ *Resolver } -func (r *queryResolver) Assets(ctx context.Context, teamID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.AssetConnection, error) { +func (r *queryResolver) Assets(ctx context.Context, teamID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) { exit := trace(ctx) defer exit() - return r.config.Controllers.AssetController.FindByTeam(ctx, teamID, first, last, before, after, getOperator(ctx)) + return r.loaders.Asset.FindByTeam(ctx, teamID, first, last, before, after) } -func (r *queryResolver) Me(ctx context.Context) (*graphql1.User, error) { +func (r *queryResolver) Me(ctx context.Context) (*gqlmodel.User, error) { exit := trace(ctx) defer exit() @@ -30,64 +29,70 @@ func (r *queryResolver) Me(ctx context.Context) (*graphql1.User, error) { if u == nil { return nil, nil } - return graphql1.ToUser(u), nil + return gqlmodel.ToUser(u), nil } -func (r *queryResolver) Node(ctx context.Context, i id.ID, typeArg graphql1.NodeType) (graphql1.Node, error) { +func (r *queryResolver) Node(ctx context.Context, i id.ID, typeArg gqlmodel.NodeType) (gqlmodel.Node, error) { exit := trace(ctx) defer exit() - dataloaders := dataloader.DataLoadersFromContext(ctx) + dataloaders := DataLoadersFromContext(ctx) switch typeArg { - case graphql1.NodeTypeDataset: + case gqlmodel.NodeTypeAsset: + result, err := dataloaders.Asset.Load(id.AssetID(i)) + if result == nil { + return nil, nil + } + return result, err + case gqlmodel.NodeTypeDataset: result, err := dataloaders.Dataset.Load(id.DatasetID(i)) if result == nil { return nil, nil } return result, err - case graphql1.NodeTypeDatasetSchema: + case gqlmodel.NodeTypeDatasetSchema: result, err := dataloaders.DatasetSchema.Load(id.DatasetSchemaID(i)) if result == nil { return nil, nil } return result, err - case graphql1.NodeTypeLayerItem: + case gqlmodel.NodeTypeLayerItem: result, err := dataloaders.LayerItem.Load(id.LayerID(i)) if result == nil { return nil, nil } return result, err - case graphql1.NodeTypeLayerGroup: + case gqlmodel.NodeTypeLayerGroup: result, err := dataloaders.LayerGroup.Load(id.LayerID(i)) if result == nil { return nil, nil } return result, err - case graphql1.NodeTypeProject: + case gqlmodel.NodeTypeProject: result, err := dataloaders.Project.Load(id.ProjectID(i)) if result == nil { return nil, nil } return result, err - case graphql1.NodeTypeProperty: + case gqlmodel.NodeTypeProperty: result, err := dataloaders.Property.Load(id.PropertyID(i)) if result == nil { return nil, nil } return result, err - case graphql1.NodeTypeScene: + case gqlmodel.NodeTypeScene: result, err := dataloaders.Scene.Load(id.SceneID(i)) if result == nil { return nil, nil } return result, err - case graphql1.NodeTypeTeam: + case gqlmodel.NodeTypeTeam: result, err := dataloaders.Team.Load(id.TeamID(i)) if result == nil { return nil, nil } return result, err - case graphql1.NodeTypeUser: + case gqlmodel.NodeTypeUser: result, err := dataloaders.User.Load(id.UserID(i)) if result == nil { return nil, nil @@ -97,98 +102,108 @@ func (r *queryResolver) Node(ctx context.Context, i id.ID, typeArg graphql1.Node return nil, nil } -func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg graphql1.NodeType) ([]graphql1.Node, error) { +func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmodel.NodeType) ([]gqlmodel.Node, error) { exit := trace(ctx) defer exit() - dataloaders := dataloader.DataLoadersFromContext(ctx) + dataloaders := DataLoadersFromContext(ctx) switch typeArg { - case graphql1.NodeTypeDataset: + case gqlmodel.NodeTypeAsset: + data, err := dataloaders.Asset.LoadAll(id.AssetIDsFromIDRef(ids)) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + nodes := make([]gqlmodel.Node, len(data)) + for i := range data { + nodes[i] = data[i] + } + return nodes, nil + case gqlmodel.NodeTypeDataset: data, err := dataloaders.Dataset.LoadAll(id.DatasetIDsFromIDRef(ids)) if len(err) > 0 && err[0] != nil { return nil, err[0] } - nodes := make([]graphql1.Node, len(data)) + nodes := make([]gqlmodel.Node, len(data)) for i := range data { nodes[i] = data[i] } return nodes, nil - case graphql1.NodeTypeDatasetSchema: + case gqlmodel.NodeTypeDatasetSchema: data, err := dataloaders.DatasetSchema.LoadAll(id.DatasetSchemaIDsFromIDRef(ids)) if len(err) > 0 && err[0] != nil { return nil, err[0] } - nodes := make([]graphql1.Node, len(data)) + nodes := make([]gqlmodel.Node, len(data)) for i := range data { nodes[i] = data[i] } return nodes, nil - case graphql1.NodeTypeLayerItem: + case gqlmodel.NodeTypeLayerItem: data, err := dataloaders.LayerItem.LoadAll(id.LayerIDsFromIDRef(ids)) if len(err) > 0 && err[0] != nil { return nil, err[0] } - nodes := make([]graphql1.Node, len(data)) + nodes := make([]gqlmodel.Node, len(data)) for i := range data { nodes[i] = *data[i] } return nodes, nil - case graphql1.NodeTypeLayerGroup: + case gqlmodel.NodeTypeLayerGroup: data, err := dataloaders.LayerGroup.LoadAll(id.LayerIDsFromIDRef(ids)) if len(err) > 0 && err[0] != nil { return nil, err[0] } - nodes := make([]graphql1.Node, len(data)) + nodes := make([]gqlmodel.Node, len(data)) for i := range data { nodes[i] = *data[i] } return nodes, nil - case graphql1.NodeTypeProject: + case gqlmodel.NodeTypeProject: data, err := dataloaders.Project.LoadAll(id.ProjectIDsFromIDRef(ids)) if len(err) > 0 && err[0] != nil { return nil, err[0] } - nodes := make([]graphql1.Node, len(data)) + nodes := make([]gqlmodel.Node, len(data)) for i := range data { nodes[i] = data[i] } return nodes, nil - case graphql1.NodeTypeProperty: + case gqlmodel.NodeTypeProperty: data, err := dataloaders.Property.LoadAll(id.PropertyIDsFromIDRef(ids)) if len(err) > 0 && err[0] != nil { return nil, err[0] } - nodes := make([]graphql1.Node, len(data)) + nodes := make([]gqlmodel.Node, len(data)) for i := range data { nodes[i] = data[i] } return nodes, nil - case graphql1.NodeTypeScene: + case gqlmodel.NodeTypeScene: data, err := dataloaders.Scene.LoadAll(id.SceneIDsFromIDRef(ids)) if len(err) > 0 && err[0] != nil { return nil, err[0] } - nodes := make([]graphql1.Node, len(data)) + nodes := make([]gqlmodel.Node, len(data)) for i := range data { nodes[i] = data[i] } return nodes, nil - case graphql1.NodeTypeTeam: + case gqlmodel.NodeTypeTeam: data, err := dataloaders.Team.LoadAll(id.TeamIDsFromIDRef(ids)) if len(err) > 0 && err[0] != nil { return nil, err[0] } - nodes := make([]graphql1.Node, len(data)) + nodes := make([]gqlmodel.Node, len(data)) for i := range data { nodes[i] = data[i] } return nodes, nil - case graphql1.NodeTypeUser: + case gqlmodel.NodeTypeUser: data, err := dataloaders.User.LoadAll(id.UserIDsFromIDRef(ids)) if len(err) > 0 && err[0] != nil { return nil, err[0] } - nodes := make([]graphql1.Node, len(data)) + nodes := make([]gqlmodel.Node, len(data)) for i := range data { nodes[i] = data[i] } @@ -198,14 +213,14 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg graphql } } -func (r *queryResolver) PropertySchema(ctx context.Context, i id.PropertySchemaID) (*graphql1.PropertySchema, error) { +func (r *queryResolver) PropertySchema(ctx context.Context, i id.PropertySchemaID) (*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).PropertySchema.Load(i) + return DataLoadersFromContext(ctx).PropertySchema.Load(i) } -func (r *queryResolver) PropertySchemas(ctx context.Context, ids []*id.PropertySchemaID) ([]*graphql1.PropertySchema, error) { +func (r *queryResolver) PropertySchemas(ctx context.Context, ids []*id.PropertySchemaID) ([]*gqlmodel.PropertySchema, error) { exit := trace(ctx) defer exit() @@ -216,7 +231,7 @@ func (r *queryResolver) PropertySchemas(ctx context.Context, ids []*id.PropertyS } } - data, err := dataloader.DataLoadersFromContext(ctx).PropertySchema.LoadAll(ids2) + data, err := DataLoadersFromContext(ctx).PropertySchema.LoadAll(ids2) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -224,14 +239,14 @@ func (r *queryResolver) PropertySchemas(ctx context.Context, ids []*id.PropertyS return data, nil } -func (r *queryResolver) Plugin(ctx context.Context, id id.PluginID) (*graphql1.Plugin, error) { +func (r *queryResolver) Plugin(ctx context.Context, id id.PluginID) (*gqlmodel.Plugin, error) { exit := trace(ctx) defer exit() - return dataloader.DataLoadersFromContext(ctx).Plugin.Load(id) + return DataLoadersFromContext(ctx).Plugin.Load(id) } -func (r *queryResolver) Plugins(ctx context.Context, ids []*id.PluginID) ([]*graphql1.Plugin, error) { +func (r *queryResolver) Plugins(ctx context.Context, ids []*id.PluginID) ([]*gqlmodel.Plugin, error) { exit := trace(ctx) defer exit() @@ -242,7 +257,7 @@ func (r *queryResolver) Plugins(ctx context.Context, ids []*id.PluginID) ([]*gra } } - data, err := dataloader.DataLoadersFromContext(ctx).Plugin.LoadAll(ids2) + data, err := DataLoadersFromContext(ctx).Plugin.LoadAll(ids2) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -250,11 +265,11 @@ func (r *queryResolver) Plugins(ctx context.Context, ids []*id.PluginID) ([]*gra return data, nil } -func (r *queryResolver) Layer(ctx context.Context, layerID id.ID) (graphql1.Layer, error) { +func (r *queryResolver) Layer(ctx context.Context, layerID id.ID) (gqlmodel.Layer, error) { exit := trace(ctx) defer exit() - dataloaders := dataloader.DataLoadersFromContext(ctx) + dataloaders := DataLoadersFromContext(ctx) result, err := dataloaders.Layer.Load(id.LayerID(layerID)) if result == nil || *result == nil { return nil, nil @@ -262,65 +277,65 @@ func (r *queryResolver) Layer(ctx context.Context, layerID id.ID) (graphql1.Laye return *result, err } -func (r *queryResolver) Scene(ctx context.Context, projectID id.ID) (*graphql1.Scene, error) { +func (r *queryResolver) Scene(ctx context.Context, projectID id.ID) (*gqlmodel.Scene, error) { exit := trace(ctx) defer exit() - return r.config.Controllers.SceneController.FindByProject(ctx, id.ProjectID(projectID), getOperator(ctx)) + return r.loaders.Scene.FindByProject(ctx, id.ProjectID(projectID)) } -func (r *queryResolver) Projects(ctx context.Context, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.ProjectConnection, error) { +func (r *queryResolver) Projects(ctx context.Context, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { exit := trace(ctx) defer exit() - return r.config.Controllers.ProjectController.FindByTeam(ctx, id.TeamID(teamID), first, last, before, after, getOperator(ctx)) + return r.loaders.Project.FindByTeam(ctx, id.TeamID(teamID), first, last, before, after) } -func (r *queryResolver) DatasetSchemas(ctx context.Context, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetSchemaConnection, error) { +func (r *queryResolver) DatasetSchemas(ctx context.Context, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) { exit := trace(ctx) defer exit() - return r.config.Controllers.DatasetController.FindSchemaByScene(ctx, sceneID, first, last, before, after, getOperator(ctx)) + return r.loaders.Dataset.FindSchemaByScene(ctx, sceneID, first, last, before, after) } -func (r *queryResolver) DynamicDatasetSchemas(ctx context.Context, sceneID id.ID) ([]*graphql1.DatasetSchema, error) { +func (r *queryResolver) DynamicDatasetSchemas(ctx context.Context, sceneID id.ID) ([]*gqlmodel.DatasetSchema, error) { exit := trace(ctx) defer exit() - return r.config.Controllers.DatasetController.FindDynamicSchemasByScene(ctx, sceneID) + return r.loaders.Dataset.FindDynamicSchemasByScene(ctx, sceneID) } -func (r *queryResolver) Datasets(ctx context.Context, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetConnection, error) { +func (r *queryResolver) Datasets(ctx context.Context, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) { exit := trace(ctx) defer exit() - return r.config.Controllers.DatasetController.FindBySchema(ctx, datasetSchemaID, first, last, before, after, getOperator(ctx)) + return r.loaders.Dataset.FindBySchema(ctx, datasetSchemaID, first, last, before, after) } -func (r *queryResolver) SceneLock(ctx context.Context, sceneID id.ID) (*graphql1.SceneLockMode, error) { +func (r *queryResolver) SceneLock(ctx context.Context, sceneID id.ID) (*gqlmodel.SceneLockMode, error) { exit := trace(ctx) defer exit() - return r.config.Controllers.SceneController.FetchLock(ctx, id.SceneID(sceneID), getOperator(ctx)) + return r.loaders.Scene.FetchLock(ctx, id.SceneID(sceneID)) } -func (r *queryResolver) SearchUser(ctx context.Context, nameOrEmail string) (*graphql1.SearchedUser, error) { +func (r *queryResolver) SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.SearchedUser, error) { exit := trace(ctx) defer exit() - return r.config.Controllers.UserController.SearchUser(ctx, nameOrEmail, getOperator(ctx)) + return r.loaders.User.SearchUser(ctx, nameOrEmail) } -func (r *queryResolver) CheckProjectAlias(ctx context.Context, alias string) (*graphql1.CheckProjectAliasPayload, error) { +func (r *queryResolver) CheckProjectAlias(ctx context.Context, alias string) (*gqlmodel.CheckProjectAliasPayload, error) { exit := trace(ctx) defer exit() - return r.config.Controllers.ProjectController.CheckAlias(ctx, alias) + return r.loaders.Project.CheckAlias(ctx, alias) } -func (r *queryResolver) InstallablePlugins(ctx context.Context) ([]*graphql1.PluginMetadata, error) { +func (r *queryResolver) InstallablePlugins(ctx context.Context) ([]*gqlmodel.PluginMetadata, error) { exit := trace(ctx) defer exit() - return r.config.Controllers.PluginController.FetchPluginMetadata(ctx, getOperator(ctx)) + return r.loaders.Plugin.FetchPluginMetadata(ctx) } diff --git a/internal/adapter/gql/resolver_scene.go b/internal/adapter/gql/resolver_scene.go new file mode 100644 index 000000000..1071a3c5b --- /dev/null +++ b/internal/adapter/gql/resolver_scene.go @@ -0,0 +1,130 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) Scene() SceneResolver { + return &sceneResolver{r} +} + +func (r *Resolver) ScenePlugin() ScenePluginResolver { + return &scenePluginResolver{r} +} + +func (r *Resolver) SceneWidget() SceneWidgetResolver { + return &sceneWidgetResolver{r} +} + +type sceneResolver struct{ *Resolver } + +func (r *sceneResolver) Project(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Project, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Project.Load(id.ProjectID(obj.ProjectID)) +} + +func (r *sceneResolver) Team(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Team, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.TeamID)) +} + +func (r *sceneResolver) Property(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Property, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) +} + +func (r *sceneResolver) RootLayer(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.LayerGroup, error) { + exit := trace(ctx) + defer exit() + + layer, err := DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.RootLayerID)) + if err != nil { + return nil, err + } + if layer == nil { + return nil, nil + } + layerGroup, ok := (*layer).(*gqlmodel.LayerGroup) + if !ok { + return nil, nil + } + return layerGroup, nil +} + +func (r *sceneResolver) DatasetSchemas(ctx context.Context, obj *gqlmodel.Scene, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) { + exit := trace(ctx) + defer exit() + + return r.loaders.Dataset.FindSchemaByScene(ctx, obj.ID, first, last, before, after) +} + +func (r *sceneResolver) LockMode(ctx context.Context, obj *gqlmodel.Scene) (gqlmodel.SceneLockMode, error) { + exit := trace(ctx) + defer exit() + + sl, err := r.loaders.Scene.FetchLock(ctx, id.SceneID(obj.ID)) + if err != nil { + return gqlmodel.SceneLockModeFree, err + } + return *sl, nil +} + +type scenePluginResolver struct{ *Resolver } + +func (r *scenePluginResolver) Plugin(ctx context.Context, obj *gqlmodel.ScenePlugin) (*gqlmodel.Plugin, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) +} +func (r *scenePluginResolver) Property(ctx context.Context, obj *gqlmodel.ScenePlugin) (*gqlmodel.Property, error) { + exit := trace(ctx) + defer exit() + + if obj.PropertyID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) +} + +type sceneWidgetResolver struct{ *Resolver } + +func (r *sceneWidgetResolver) Plugin(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Plugin, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) +} + +func (r *sceneWidgetResolver) Extension(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.PluginExtension, error) { + exit := trace(ctx) + defer exit() + + plugin, err := DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + if err != nil { + return nil, err + } + for _, e := range plugin.Extensions { + if e.ExtensionID == obj.ExtensionID { + return e, nil + } + } + return nil, nil +} + +func (r *sceneWidgetResolver) Property(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Property, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) +} diff --git a/internal/adapter/gql/resolver_team.go b/internal/adapter/gql/resolver_team.go new file mode 100644 index 000000000..3501bb73d --- /dev/null +++ b/internal/adapter/gql/resolver_team.go @@ -0,0 +1,42 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) Team() TeamResolver { + return &teamResolver{r} +} + +func (r *Resolver) TeamMember() TeamMemberResolver { + return &teamMemberResolver{r} +} + +type teamResolver struct{ *Resolver } + +func (r *teamResolver) Assets(ctx context.Context, obj *gqlmodel.Team, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) { + exit := trace(ctx) + defer exit() + + return r.loaders.Asset.FindByTeam(ctx, obj.ID, first, last, before, after) +} + +func (r *teamResolver) Projects(ctx context.Context, obj *gqlmodel.Team, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { + exit := trace(ctx) + defer exit() + + return r.loaders.Project.FindByTeam(ctx, id.TeamID(obj.ID), first, last, before, after) +} + +type teamMemberResolver struct{ *Resolver } + +func (r *teamMemberResolver) User(ctx context.Context, obj *gqlmodel.TeamMember) (*gqlmodel.User, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).User.Load(id.UserID(obj.UserID)) +} diff --git a/internal/adapter/gql/resolver_user.go b/internal/adapter/gql/resolver_user.go new file mode 100644 index 000000000..3caba5b5b --- /dev/null +++ b/internal/adapter/gql/resolver_user.go @@ -0,0 +1,28 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *Resolver) User() UserResolver { + return &userResolver{r} +} + +type userResolver struct{ *Resolver } + +func (r *userResolver) MyTeam(ctx context.Context, obj *gqlmodel.User) (*gqlmodel.Team, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.MyTeamID)) +} + +func (r *userResolver) Teams(ctx context.Context, obj *gqlmodel.User) ([]*gqlmodel.Team, error) { + exit := trace(ctx) + defer exit() + + return r.loaders.Team.FindByUser(ctx, id.UserID(obj.ID)) +} diff --git a/internal/graphql/tracer.go b/internal/adapter/gql/tracer.go similarity index 99% rename from internal/graphql/tracer.go rename to internal/adapter/gql/tracer.go index e9854a262..2fa9c8efd 100644 --- a/internal/graphql/tracer.go +++ b/internal/adapter/gql/tracer.go @@ -1,4 +1,4 @@ -package graphql +package gql import ( "context" diff --git a/internal/adapter/graphql/container.go b/internal/adapter/graphql/container.go deleted file mode 100644 index 2b51d11a3..000000000 --- a/internal/adapter/graphql/container.go +++ /dev/null @@ -1,92 +0,0 @@ -package graphql - -import ( - "github.com/reearth/reearth-backend/internal/usecase/gateway" - "github.com/reearth/reearth-backend/internal/usecase/interactor" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/internal/usecase/repo" -) - -type Container struct { - AssetController *AssetController - DatasetController *DatasetController - LayerController *LayerController - PluginController *PluginController - ProjectController *ProjectController - PropertyController *PropertyController - SceneController *SceneController - TeamController *TeamController - UserController *UserController -} - -type ContainerConfig struct { - SignupSecret string -} - -func NewContainer(r *repo.Container, g *gateway.Container, conf ContainerConfig) *Container { - return &Container{ - AssetController: NewAssetController( - AssetControlerConfig{ - AssetInput: func() interfaces.Asset { - return interactor.NewAsset(r, g) - }, - }, - ), - DatasetController: NewDatasetController( - DatasetControllerConfig{ - DatasetInput: func() interfaces.Dataset { - return interactor.NewDataset(r, g) - }, - }, - ), - LayerController: NewLayerController( - LayerControllerConfig{ - LayerInput: func() interfaces.Layer { - return interactor.NewLayer(r) - }, - }, - ), - PluginController: NewPluginController( - PluginControllerConfig{ - PluginInput: func() interfaces.Plugin { - return interactor.NewPlugin(r, g) - }, - }, - ), - ProjectController: NewProjectController( - ProjectControllerConfig{ - ProjectInput: func() interfaces.Project { - return interactor.NewProject(r, g) - }, - }, - ), - PropertyController: NewPropertyController( - PropertyControllerConfig{ - PropertyInput: func() interfaces.Property { - return interactor.NewProperty(r, g) - }, - }, - ), - SceneController: NewSceneController( - SceneControllerConfig{ - SceneInput: func() interfaces.Scene { - return interactor.NewScene(r, g) - }, - }, - ), - TeamController: NewTeamController( - TeamControllerConfig{ - TeamInput: func() interfaces.Team { - return interactor.NewTeam(r) - }, - }, - ), - UserController: NewUserController( - UserControllerConfig{ - UserInput: func() interfaces.User { - return interactor.NewUser(r, g, conf.SignupSecret) - }, - }, - ), - } -} diff --git a/internal/adapter/graphql/controller_asset.go b/internal/adapter/graphql/controller_asset.go deleted file mode 100644 index 50c90a552..000000000 --- a/internal/adapter/graphql/controller_asset.go +++ /dev/null @@ -1,75 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/id" -) - -type AssetControlerConfig struct { - AssetInput func() interfaces.Asset -} - -type AssetController struct { - config AssetControlerConfig -} - -func NewAssetController(config AssetControlerConfig) *AssetController { - return &AssetController{config: config} -} - -func (c *AssetController) usecase() interfaces.Asset { - if c == nil { - return nil - } - return c.config.AssetInput() -} - -func (c *AssetController) Create(ctx context.Context, i *CreateAssetInput, o *usecase.Operator) (*CreateAssetPayload, error) { - res, err := c.usecase().Create(ctx, interfaces.CreateAssetParam{ - TeamID: id.TeamID(i.TeamID), - File: fromFile(&i.File), - }, o) - if err != nil { - return nil, err - } - - return &CreateAssetPayload{Asset: toAsset(res)}, nil -} - -func (c *AssetController) Remove(ctx context.Context, i *RemoveAssetInput, o *usecase.Operator) (*RemoveAssetPayload, error) { - res, err2 := c.usecase().Remove(ctx, id.AssetID(i.AssetID), o) - if err2 != nil { - return nil, err2 - } - - return &RemoveAssetPayload{AssetID: res.ID()}, nil -} - -func (c *AssetController) FindByTeam(ctx context.Context, teamID id.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor, operator *usecase.Operator) (*AssetConnection, error) { - p := usecase.NewPagination(first, last, before, after) - assets, pi, err := c.usecase().FindByTeam(ctx, id.TeamID(teamID), p, operator) - if err != nil { - return nil, err - } - - edges := make([]*AssetEdge, 0, len(assets)) - nodes := make([]*Asset, 0, len(assets)) - for _, a := range assets { - asset := toAsset(a) - edges = append(edges, &AssetEdge{ - Node: asset, - Cursor: usecase.Cursor(asset.ID.String()), - }) - nodes = append(nodes, asset) - } - - return &AssetConnection{ - Edges: edges, - Nodes: nodes, - PageInfo: toPageInfo(pi), - TotalCount: pi.TotalCount(), - }, nil -} diff --git a/internal/adapter/graphql/controller_dataset.go b/internal/adapter/graphql/controller_dataset.go deleted file mode 100644 index dcf974d33..000000000 --- a/internal/adapter/graphql/controller_dataset.go +++ /dev/null @@ -1,223 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/id" -) - -type DatasetControllerConfig struct { - DatasetInput func() interfaces.Dataset -} - -type DatasetController struct { - config DatasetControllerConfig -} - -func NewDatasetController(config DatasetControllerConfig) *DatasetController { - return &DatasetController{config: config} -} - -func (c *DatasetController) usecase() interfaces.Dataset { - if c == nil { - return nil - } - return c.config.DatasetInput() -} - -func (c *DatasetController) UpdateDatasetSchema(ctx context.Context, i *UpdateDatasetSchemaInput, operator *usecase.Operator) (*UpdateDatasetSchemaPayload, error) { - res, err := c.usecase().UpdateDatasetSchema(ctx, interfaces.UpdateDatasetSchemaParam{ - SchemaId: id.DatasetSchemaID(i.SchemaID), - Name: i.Name, - }, operator) - if err != nil { - return nil, err - } - return &UpdateDatasetSchemaPayload{DatasetSchema: toDatasetSchema(res)}, nil -} - -func (c *DatasetController) AddDynamicDatasetSchema(ctx context.Context, i *AddDynamicDatasetSchemaInput) (*AddDynamicDatasetSchemaPayload, error) { - res, err := c.usecase().AddDynamicDatasetSchema(ctx, interfaces.AddDynamicDatasetSchemaParam{ - SceneId: id.SceneID(i.SceneID), - }) - if err != nil { - return nil, err - } - - return &AddDynamicDatasetSchemaPayload{DatasetSchema: toDatasetSchema(res)}, nil -} - -func (c *DatasetController) AddDynamicDataset(ctx context.Context, i *AddDynamicDatasetInput) (*AddDynamicDatasetPayload, error) { - dss, ds, err := c.usecase().AddDynamicDataset(ctx, interfaces.AddDynamicDatasetParam{ - SchemaId: id.DatasetSchemaID(i.DatasetSchemaID), - Author: i.Author, - Content: i.Content, - Lat: i.Lat, - Lng: i.Lng, - Target: i.Target, - }) - if err != nil { - return nil, err - } - - return &AddDynamicDatasetPayload{DatasetSchema: toDatasetSchema(dss), Dataset: toDataset(ds)}, nil -} - -func (c *DatasetController) ImportDataset(ctx context.Context, i *ImportDatasetInput, o *usecase.Operator) (*ImportDatasetPayload, error) { - res, err := c.usecase().ImportDataset(ctx, interfaces.ImportDatasetParam{ - SceneId: id.SceneID(i.SceneID), - SchemaId: id.DatasetSchemaIDFromRefID(i.DatasetSchemaID), - File: fromFile(&i.File), - }, o) - if err != nil { - return nil, err - } - - return &ImportDatasetPayload{DatasetSchema: toDatasetSchema(res)}, nil -} - -func (c *DatasetController) ImportDatasetFromGoogleSheet(ctx context.Context, i *ImportDatasetFromGoogleSheetInput, o *usecase.Operator) (*ImportDatasetPayload, error) { - res, err := c.usecase().ImportDatasetFromGoogleSheet(ctx, interfaces.ImportDatasetFromGoogleSheetParam{ - Token: i.AccessToken, - FileID: i.FileID, - SheetName: i.SheetName, - SceneId: id.SceneID(i.SceneID), - SchemaId: id.DatasetSchemaIDFromRefID(i.DatasetSchemaID), - }, o) - if err != nil { - return nil, err - } - - return &ImportDatasetPayload{DatasetSchema: toDatasetSchema(res)}, nil -} - -func (c *DatasetController) GraphFetchSchema(ctx context.Context, i id.ID, depth int, operator *usecase.Operator) ([]*DatasetSchema, []error) { - res, err := c.usecase().GraphFetchSchema(ctx, id.DatasetSchemaID(i), depth, operator) - if err != nil { - return nil, []error{err} - } - - schemas := make([]*DatasetSchema, 0, len(res)) - for _, d := range res { - schemas = append(schemas, toDatasetSchema(d)) - } - - return schemas, nil -} - -func (c *DatasetController) FindSchemaByScene(ctx context.Context, i id.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor, operator *usecase.Operator) (*DatasetSchemaConnection, error) { - res, pi, err := c.usecase().FindSchemaByScene(ctx, id.SceneID(i), usecase.NewPagination(first, last, before, after), operator) - if err != nil { - return nil, err - } - - edges := make([]*DatasetSchemaEdge, 0, len(res)) - nodes := make([]*DatasetSchema, 0, len(res)) - for _, dataset := range res { - ds := toDatasetSchema(dataset) - edges = append(edges, &DatasetSchemaEdge{ - Node: ds, - Cursor: usecase.Cursor(ds.ID.String()), - }) - nodes = append(nodes, ds) - } - - return &DatasetSchemaConnection{ - Edges: edges, - Nodes: nodes, - PageInfo: toPageInfo(pi), - TotalCount: pi.TotalCount(), - }, nil -} - -func (c *DatasetController) FindDynamicSchemasByScene(ctx context.Context, sid id.ID) ([]*DatasetSchema, error) { - res, err := c.usecase().FindDynamicSchemaByScene(ctx, id.SceneID(sid)) - if err != nil { - return nil, err - } - - dss := []*DatasetSchema{} - for _, dataset := range res { - dss = append(dss, toDatasetSchema(dataset)) - } - - return dss, nil -} - -func (c *DatasetController) FindBySchema(ctx context.Context, dsid id.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor, operator *usecase.Operator) (*DatasetConnection, error) { - p := usecase.NewPagination(first, last, before, after) - res, pi, err2 := c.usecase().FindBySchema(ctx, id.DatasetSchemaID(dsid), p, operator) - if err2 != nil { - return nil, err2 - } - - edges := make([]*DatasetEdge, 0, len(res)) - nodes := make([]*Dataset, 0, len(res)) - for _, dataset := range res { - ds := toDataset(dataset) - edges = append(edges, &DatasetEdge{ - Node: ds, - Cursor: usecase.Cursor(ds.ID.String()), - }) - nodes = append(nodes, ds) - } - - conn := &DatasetConnection{ - Edges: edges, - Nodes: nodes, - PageInfo: toPageInfo(pi), - TotalCount: pi.TotalCount(), - } - - return conn, nil -} - -func (c *DatasetController) Sync(ctx context.Context, input *SyncDatasetInput, operator *usecase.Operator) (*SyncDatasetPayload, error) { - dss, ds, err := c.usecase().Sync(ctx, id.SceneID(input.SceneID), input.URL, operator) - if err != nil { - return nil, err - } - - schemas := make([]*DatasetSchema, 0, len(dss)) - datasets := make([]*Dataset, 0, len(ds)) - for _, d := range dss { - schemas = append(schemas, toDatasetSchema(d)) - } - for _, d := range ds { - datasets = append(datasets, toDataset(d)) - } - - return &SyncDatasetPayload{ - SceneID: input.SceneID, - URL: input.URL, - DatasetSchema: schemas, - Dataset: datasets, - }, nil -} - -func (c *DatasetController) RemoveDatasetSchema(ctx context.Context, i *RemoveDatasetSchemaInput, o *usecase.Operator) (*RemoveDatasetSchemaPayload, error) { - res, err := c.usecase().RemoveDatasetSchema(ctx, interfaces.RemoveDatasetSchemaParam{ - SchemaId: id.DatasetSchemaID(i.SchemaID), - Force: i.Force, - }, o) - if err != nil { - return nil, err - } - - return &RemoveDatasetSchemaPayload{SchemaID: res.ID()}, nil -} - -func (c *DatasetController) AddDatasetSchema(ctx context.Context, i *AddDatasetSchemaInput, o *usecase.Operator) (*AddDatasetSchemaPayload, error) { - res, err2 := c.usecase().AddDatasetSchema(ctx, interfaces.AddDatasetSchemaParam{ - SceneId: id.SceneID(i.SceneID), - Name: i.Name, - RepresentativeField: id.DatasetSchemaFieldIDFromRefID(i.Representativefield), - }, o) - if err2 != nil { - return nil, err2 - } - - return &AddDatasetSchemaPayload{DatasetSchema: toDatasetSchema(res)}, nil -} diff --git a/internal/adapter/graphql/controller_layer.go b/internal/adapter/graphql/controller_layer.go deleted file mode 100644 index 2beaaea00..000000000 --- a/internal/adapter/graphql/controller_layer.go +++ /dev/null @@ -1,202 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/id" -) - -type LayerControllerConfig struct { - LayerInput func() interfaces.Layer -} - -type LayerController struct { - config LayerControllerConfig -} - -func NewLayerController(config LayerControllerConfig) *LayerController { - return &LayerController{config: config} -} - -func (c *LayerController) usecase() interfaces.Layer { - if c == nil { - return nil - } - return c.config.LayerInput() -} - -func (c *LayerController) AddItem(ctx context.Context, ginput *AddLayerItemInput, operator *usecase.Operator) (*AddLayerItemPayload, error) { - layer, parent, err := c.usecase().AddItem(ctx, interfaces.AddLayerItemInput{ - ParentLayerID: id.LayerID(ginput.ParentLayerID), - PluginID: &ginput.PluginID, - ExtensionID: &ginput.ExtensionID, - Index: ginput.Index, - Name: refToString(ginput.Name), - LatLng: toPropertyLatLng(ginput.Lat, ginput.Lng), - // LinkedDatasetID: ginput.LinkedDatasetID, - }, operator) - if err != nil { - return nil, err - } - - return &AddLayerItemPayload{ - Layer: toLayerItem(layer, parent.IDRef()), - ParentLayer: toLayerGroup(parent, nil), - Index: ginput.Index, - }, nil -} - -func (c *LayerController) AddGroup(ctx context.Context, ginput *AddLayerGroupInput, operator *usecase.Operator) (*AddLayerGroupPayload, error) { - layer, parent, err := c.usecase().AddGroup(ctx, interfaces.AddLayerGroupInput{ - ParentLayerID: id.LayerID(ginput.ParentLayerID), - PluginID: ginput.PluginID, - ExtensionID: ginput.ExtensionID, - Index: ginput.Index, - Name: refToString(ginput.Name), - LinkedDatasetSchemaID: id.DatasetSchemaIDFromRefID(ginput.LinkedDatasetSchemaID), - RepresentativeFieldId: ginput.RepresentativeFieldID, - }, operator) - if err != nil { - return nil, err - } - - return &AddLayerGroupPayload{ - Layer: toLayerGroup(layer, parent.IDRef()), - ParentLayer: toLayerGroup(parent, nil), - Index: ginput.Index, - }, nil -} - -func (c *LayerController) Remove(ctx context.Context, ginput *RemoveLayerInput, operator *usecase.Operator) (*RemoveLayerPayload, error) { - id, layer, err := c.usecase().Remove(ctx, id.LayerID(ginput.LayerID), operator) - if err != nil { - return nil, err - } - - return &RemoveLayerPayload{ - LayerID: id.ID(), - ParentLayer: toLayerGroup(layer, nil), - }, nil -} - -func (c *LayerController) Update(ctx context.Context, ginput *UpdateLayerInput, operator *usecase.Operator) (*UpdateLayerPayload, error) { - layer, err := c.usecase().Update(ctx, interfaces.UpdateLayerInput{ - LayerID: id.LayerID(ginput.LayerID), - Name: ginput.Name, - Visible: ginput.Visible, - }, operator) - if err != nil { - return nil, err - } - - return &UpdateLayerPayload{ - Layer: toLayer(layer, nil), - }, nil -} - -func (c *LayerController) Move(ctx context.Context, ginput *MoveLayerInput, operator *usecase.Operator) (*MoveLayerPayload, error) { - targetLayerID, layerGroupFrom, layerGroupTo, index, err := c.usecase().Move(ctx, interfaces.MoveLayerInput{ - LayerID: id.LayerID(ginput.LayerID), - DestLayerID: id.LayerIDFromRefID(ginput.DestLayerID), - Index: refToIndex(ginput.Index), - }, operator) - if err != nil { - return nil, err - } - - return &MoveLayerPayload{ - LayerID: targetLayerID.ID(), - FromParentLayer: toLayerGroup(layerGroupFrom, nil), - ToParentLayer: toLayerGroup(layerGroupTo, nil), - Index: index, - }, nil -} - -func (c *LayerController) CreateInfobox(ctx context.Context, ginput *CreateInfoboxInput, operator *usecase.Operator) (*CreateInfoboxPayload, error) { - layer, err := c.usecase().CreateInfobox(ctx, id.LayerID(ginput.LayerID), operator) - if err != nil { - return nil, err - } - - return &CreateInfoboxPayload{ - Layer: toLayer(layer, nil), - }, nil -} - -func (c *LayerController) RemoveInfobox(ctx context.Context, ginput *RemoveInfoboxInput, operator *usecase.Operator) (*RemoveInfoboxPayload, error) { - layer, err := c.usecase().RemoveInfobox(ctx, id.LayerID(ginput.LayerID), operator) - if err != nil { - return nil, err - } - - return &RemoveInfoboxPayload{ - Layer: toLayer(layer, nil), - }, nil -} - -func (c *LayerController) AddInfoboxField(ctx context.Context, ginput *AddInfoboxFieldInput, operator *usecase.Operator) (*AddInfoboxFieldPayload, error) { - infoboxField, layer, err := c.usecase().AddInfoboxField(ctx, interfaces.AddInfoboxFieldParam{ - LayerID: id.LayerID(ginput.LayerID), - PluginID: ginput.PluginID, - ExtensionID: ginput.ExtensionID, - Index: ginput.Index, - }, operator) - if err != nil { - return nil, err - } - - return &AddInfoboxFieldPayload{ - InfoboxField: toInfoboxField(infoboxField, layer.Scene(), nil), - Layer: toLayer(layer, nil), - }, nil -} - -func (c *LayerController) MoveInfoboxField(ctx context.Context, ginput *MoveInfoboxFieldInput, operator *usecase.Operator) (*MoveInfoboxFieldPayload, error) { - infoboxField, layer, index, err := c.usecase().MoveInfoboxField(ctx, interfaces.MoveInfoboxFieldParam{ - LayerID: id.LayerID(ginput.LayerID), - InfoboxFieldID: id.InfoboxFieldID(ginput.InfoboxFieldID), - Index: ginput.Index, - }, operator) - if err != nil { - return nil, err - } - - return &MoveInfoboxFieldPayload{ - InfoboxFieldID: infoboxField.ID(), - Layer: toLayer(layer, nil), - Index: index, - }, nil -} - -func (c *LayerController) RemoveInfoboxField(ctx context.Context, ginput *RemoveInfoboxFieldInput, operator *usecase.Operator) (*RemoveInfoboxFieldPayload, error) { - infoboxField, layer, err := c.usecase().RemoveInfoboxField(ctx, interfaces.RemoveInfoboxFieldParam{ - LayerID: id.LayerID(ginput.LayerID), - InfoboxFieldID: id.InfoboxFieldID(ginput.InfoboxFieldID), - }, operator) - if err != nil { - return nil, err - } - - return &RemoveInfoboxFieldPayload{ - InfoboxFieldID: infoboxField.ID(), - Layer: toLayer(layer, nil), - }, nil -} - -func (c *LayerController) ImportLayer(ctx context.Context, ginput *ImportLayerInput, operator *usecase.Operator) (*ImportLayerPayload, error) { - l, l2, err := c.usecase().ImportLayer(ctx, interfaces.ImportLayerParam{ - LayerID: id.LayerID(ginput.LayerID), - File: fromFile(&ginput.File), - Format: fromLayerEncodingFormat(ginput.Format), - }, operator) - if err != nil { - return nil, err - } - - return &ImportLayerPayload{ - Layers: toLayers(l, l2.IDRef()), - ParentLayer: toLayerGroup(l2, nil), - }, err -} diff --git a/internal/adapter/graphql/controller_plugin.go b/internal/adapter/graphql/controller_plugin.go deleted file mode 100644 index 9be47d8d1..000000000 --- a/internal/adapter/graphql/controller_plugin.go +++ /dev/null @@ -1,72 +0,0 @@ -package graphql - -import ( - "context" - "errors" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/plugin" - "github.com/reearth/reearth-backend/pkg/scene" -) - -type PluginControllerConfig struct { - PluginInput func() interfaces.Plugin -} - -type PluginController struct { - config PluginControllerConfig -} - -func NewPluginController(config PluginControllerConfig) *PluginController { - return &PluginController{config: config} -} - -func (c *PluginController) usecase() interfaces.Plugin { - if c == nil { - return nil - } - return c.config.PluginInput() -} - -func (c *PluginController) Upload(ctx context.Context, ginput *UploadPluginInput, operator *usecase.Operator) (*UploadPluginPayload, error) { - var p *plugin.Plugin - var s *scene.Scene - var err error - - if ginput.File != nil { - p, s, err = c.usecase().Upload(ctx, ginput.File.File, id.SceneID(ginput.SceneID), operator) - } else if ginput.URL != nil { - p, s, err = c.usecase().UploadFromRemote(ctx, ginput.URL, id.SceneID(ginput.SceneID), operator) - } else { - return nil, errors.New("either file or url is required") - } - if err != nil { - return nil, err - } - - return &UploadPluginPayload{ - Plugin: toPlugin(p), - Scene: toScene(s), - ScenePlugin: toScenePlugin(s.PluginSystem().Plugin(p.ID())), - }, nil -} - -func (c *PluginController) FetchPluginMetadata(ctx context.Context, operator *usecase.Operator) ([]*PluginMetadata, error) { - res, err := c.usecase().FetchPluginMetadata(ctx, operator) - if err != nil { - return nil, err - } - - pluginMetaList := make([]*PluginMetadata, 0, len(res)) - for _, md := range res { - pm, err := toPluginMetadata(md) - if err != nil { - return nil, err - } - pluginMetaList = append(pluginMetaList, pm) - } - - return pluginMetaList, nil -} diff --git a/internal/adapter/graphql/controller_project.go b/internal/adapter/graphql/controller_project.go deleted file mode 100644 index 90493e1b2..000000000 --- a/internal/adapter/graphql/controller_project.go +++ /dev/null @@ -1,112 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/visualizer" -) - -type ProjectControllerConfig struct { - ProjectInput func() interfaces.Project -} - -type ProjectController struct { - config ProjectControllerConfig -} - -func NewProjectController(config ProjectControllerConfig) *ProjectController { - return &ProjectController{config: config} -} - -func (c *ProjectController) usecase() interfaces.Project { - if c == nil { - return nil - } - return c.config.ProjectInput() -} - -func (c *ProjectController) Create(ctx context.Context, i *CreateProjectInput, operator *usecase.Operator) (*ProjectPayload, error) { - res, err := c.usecase().Create(ctx, interfaces.CreateProjectParam{ - TeamID: id.TeamID(i.TeamID), - Visualizer: visualizer.Visualizer(i.Visualizer), - Name: i.Name, - Description: i.Description, - ImageURL: i.ImageURL, - Alias: i.Alias, - Archived: i.Archived, - }, operator) - if err != nil { - return nil, err - } - - return &ProjectPayload{Project: toProject(res)}, nil -} - -func (c *ProjectController) Update(ctx context.Context, ginput *UpdateProjectInput, operator *usecase.Operator) (*ProjectPayload, error) { - deletePublicImage := false - if ginput.DeletePublicImage != nil { - deletePublicImage = *ginput.DeletePublicImage - } - - deleteImageURL := false - if ginput.DeleteImageURL != nil { - deleteImageURL = *ginput.DeleteImageURL - } - - res, err := c.usecase().Update(ctx, interfaces.UpdateProjectParam{ - ID: id.ProjectID(ginput.ProjectID), - Name: ginput.Name, - Description: ginput.Description, - Alias: ginput.Alias, - ImageURL: ginput.ImageURL, - Archived: ginput.Archived, - IsBasicAuthActive: ginput.IsBasicAuthActive, - BasicAuthUsername: ginput.BasicAuthUsername, - BasicAuthPassword: ginput.BasicAuthPassword, - PublicTitle: ginput.PublicTitle, - PublicDescription: ginput.PublicDescription, - PublicImage: ginput.PublicImage, - PublicNoIndex: ginput.PublicNoIndex, - DeletePublicImage: deletePublicImage, - DeleteImageURL: deleteImageURL, - }, operator) - if err != nil { - return nil, err - } - - return &ProjectPayload{Project: toProject(res)}, nil -} - -func (c *ProjectController) CheckAlias(ctx context.Context, alias string) (*CheckProjectAliasPayload, error) { - ok, err := c.usecase().CheckAlias(ctx, alias) - if err != nil { - return nil, err - } - - return &CheckProjectAliasPayload{Alias: alias, Available: ok}, nil -} - -func (c *ProjectController) Publish(ctx context.Context, ginput *PublishProjectInput, operator *usecase.Operator) (*ProjectPayload, error) { - res, err := c.usecase().Publish(ctx, interfaces.PublishProjectParam{ - ID: id.ProjectID(ginput.ProjectID), - Alias: ginput.Alias, - Status: fromPublishmentStatus(ginput.Status), - }, operator) - if err != nil { - return nil, err - } - - return &ProjectPayload{Project: toProject(res)}, nil -} - -func (c *ProjectController) Delete(ctx context.Context, ginput *DeleteProjectInput, operator *usecase.Operator) (*DeleteProjectPayload, error) { - err := c.usecase().Delete(ctx, id.ProjectID(ginput.ProjectID), operator) - if err != nil { - return nil, err - } - - return &DeleteProjectPayload{ProjectID: ginput.ProjectID}, nil -} diff --git a/internal/adapter/graphql/controller_property.go b/internal/adapter/graphql/controller_property.go deleted file mode 100644 index ccfe2a2fe..000000000 --- a/internal/adapter/graphql/controller_property.go +++ /dev/null @@ -1,200 +0,0 @@ -package graphql - -import ( - "context" - "errors" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/property" -) - -type PropertyControllerConfig struct { - PropertyInput func() interfaces.Property -} - -type PropertyController struct { - config PropertyControllerConfig -} - -func NewPropertyController(config PropertyControllerConfig) *PropertyController { - return &PropertyController{config: config} -} - -func (c *PropertyController) usecase() interfaces.Property { - if c == nil { - return nil - } - return c.config.PropertyInput() -} - -func (c *PropertyController) UpdateValue(ctx context.Context, p id.ID, si *id.PropertySchemaFieldID, ii *id.ID, f id.PropertySchemaFieldID, val interface{}, t ValueType, operator *usecase.Operator) (*PropertyFieldPayload, error) { - v, ok := fromPropertyValueAndType(val, t) - if !ok { - return nil, errors.New("invalid value") - } - - pp, pgl, pg, pf, err := c.usecase().UpdateValue(ctx, interfaces.UpdatePropertyValueParam{ - PropertyID: id.PropertyID(p), - Pointer: fromPointer(si, ii, &f), - Value: v, - }, operator) - if err != nil { - return nil, err - } - - return &PropertyFieldPayload{ - Property: toProperty(pp), - PropertyField: toPropertyField(pf, pp, pgl, pg), - }, nil -} - -func (c *PropertyController) RemoveField(ctx context.Context, ginput *RemovePropertyFieldInput, operator *usecase.Operator) (*PropertyFieldPayload, error) { - p, err := c.usecase().RemoveField(ctx, interfaces.RemovePropertyFieldParam{ - PropertyID: id.PropertyID(ginput.PropertyID), - Pointer: fromPointer(ginput.SchemaItemID, ginput.ItemID, &ginput.FieldID), - }, operator) - if err != nil { - return nil, err - } - - return &PropertyFieldPayload{ - Property: toProperty(p), - }, nil -} - -func (c *PropertyController) UploadFile(ctx context.Context, ginput *UploadFileToPropertyInput, operator *usecase.Operator) (*PropertyFieldPayload, error) { - p, pgl, pg, pf, err := c.usecase().UploadFile(ctx, interfaces.UploadFileParam{ - PropertyID: id.PropertyID(ginput.PropertyID), - Pointer: fromPointer(ginput.SchemaItemID, ginput.ItemID, &ginput.FieldID), - File: fromFile(&ginput.File), - }, operator) - if err != nil { - return nil, err - } - - return &PropertyFieldPayload{ - Property: toProperty(p), - PropertyField: toPropertyField(pf, p, pgl, pg), - }, nil -} - -func (c *PropertyController) LinkValue(ctx context.Context, ginput *LinkDatasetToPropertyValueInput, operator *usecase.Operator) (*PropertyFieldPayload, error) { - p, pgl, pg, pf, err := c.usecase().LinkValue(ctx, interfaces.LinkPropertyValueParam{ - PropertyID: id.PropertyID(ginput.PropertyID), - Pointer: fromPointer(ginput.SchemaItemID, ginput.ItemID, &ginput.FieldID), - Links: fromPropertyFieldLink( - ginput.DatasetSchemaIds, - ginput.DatasetIds, - ginput.DatasetSchemaFieldIds, - ), - }, operator) - if err != nil { - return nil, err - } - - return &PropertyFieldPayload{ - Property: toProperty(p), - PropertyField: toPropertyField(pf, p, pgl, pg), - }, nil -} - -func (c *PropertyController) UnlinkValue(ctx context.Context, ginput *UnlinkPropertyValueInput, operator *usecase.Operator) (*PropertyFieldPayload, error) { - p, pgl, pg, pf, err := c.usecase().UnlinkValue(ctx, interfaces.UnlinkPropertyValueParam{ - PropertyID: id.PropertyID(ginput.PropertyID), - Pointer: fromPointer(ginput.SchemaItemID, ginput.ItemID, &ginput.FieldID), - }, operator) - if err != nil { - return nil, err - } - - return &PropertyFieldPayload{ - Property: toProperty(p), - PropertyField: toPropertyField(pf, p, pgl, pg), - }, nil -} - -func (c *PropertyController) AddItem(ctx context.Context, ginput *AddPropertyItemInput, operator *usecase.Operator) (*PropertyItemPayload, error) { - var v *property.Value - if ginput.NameFieldType != nil { - v, _ = fromPropertyValueAndType(ginput.NameFieldValue, *ginput.NameFieldType) - } - - p, pgl, pi, err := c.usecase().AddItem(ctx, interfaces.AddPropertyItemParam{ - PropertyID: id.PropertyID(ginput.PropertyID), - Pointer: fromPointer(&ginput.SchemaItemID, nil, nil), - Index: ginput.Index, - NameFieldValue: v, - }, operator) - - if err != nil { - return nil, err - } - - return &PropertyItemPayload{ - Property: toProperty(p), - PropertyItem: toPropertyItem(pi, p, pgl), - }, nil -} - -func (c *PropertyController) MoveItem(ctx context.Context, ginput *MovePropertyItemInput, operator *usecase.Operator) (*PropertyItemPayload, error) { - p, pgl, pi, err := c.usecase().MoveItem(ctx, interfaces.MovePropertyItemParam{ - PropertyID: id.PropertyID(ginput.PropertyID), - Pointer: fromPointer(&ginput.SchemaItemID, &ginput.ItemID, nil), - Index: ginput.Index, - }, operator) - if err != nil { - return nil, err - } - - return &PropertyItemPayload{ - Property: toProperty(p), - PropertyItem: toPropertyItem(pi, p, pgl), - }, nil -} - -func (c *PropertyController) RemoveItem(ctx context.Context, ginput *RemovePropertyItemInput, operator *usecase.Operator) (*PropertyItemPayload, error) { - p, err := c.usecase().RemoveItem(ctx, interfaces.RemovePropertyItemParam{ - PropertyID: id.PropertyID(ginput.PropertyID), - Pointer: fromPointer(&ginput.SchemaItemID, &ginput.ItemID, nil), - }, operator) - if err != nil { - return nil, err - } - - return &PropertyItemPayload{ - Property: toProperty(p), - }, nil -} - -func (c *PropertyController) UpdateItems(ctx context.Context, ginput *UpdatePropertyItemInput, operator *usecase.Operator) (*PropertyItemPayload, error) { - op := make([]interfaces.UpdatePropertyItemsOperationParam, 0, len(ginput.Operations)) - for _, o := range ginput.Operations { - var v *property.Value - if o.NameFieldType != nil { - v, _ = fromPropertyValueAndType(o.NameFieldValue, *o.NameFieldType) - } - - op = append(op, interfaces.UpdatePropertyItemsOperationParam{ - Operation: fromListOperation(o.Operation), - ItemID: id.PropertyItemIDFromRefID(o.ItemID), - Index: o.Index, - NameFieldValue: v, - }) - } - - p, err2 := c.usecase().UpdateItems(ctx, interfaces.UpdatePropertyItemsParam{ - PropertyID: id.PropertyID(ginput.PropertyID), - Pointer: fromPointer(&ginput.SchemaItemID, nil, nil), - Operations: op, - }, operator) - - if err2 != nil { - return nil, err2 - } - - return &PropertyItemPayload{ - Property: toProperty(p), - }, nil -} diff --git a/internal/adapter/graphql/controller_scene.go b/internal/adapter/graphql/controller_scene.go deleted file mode 100644 index bdcfd6e80..000000000 --- a/internal/adapter/graphql/controller_scene.go +++ /dev/null @@ -1,130 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/id" -) - -type SceneControllerConfig struct { - SceneInput func() interfaces.Scene -} - -type SceneController struct { - config SceneControllerConfig -} - -func NewSceneController(config SceneControllerConfig) *SceneController { - return &SceneController{config: config} -} - -func (c *SceneController) usecase() interfaces.Scene { - if c == nil { - return nil - } - return c.config.SceneInput() -} - -func (c *SceneController) Create(ctx context.Context, ginput *CreateSceneInput, operator *usecase.Operator) (*CreateScenePayload, error) { - res, err := c.usecase().Create( - ctx, - id.ProjectID(ginput.ProjectID), - operator, - ) - if err != nil { - return nil, err - } - - return &CreateScenePayload{Scene: toScene(res)}, nil -} - -func (c *SceneController) AddWidget(ctx context.Context, ginput *AddWidgetInput, operator *usecase.Operator) (*AddWidgetPayload, error) { - scene, widget, err := c.usecase().AddWidget( - ctx, - id.SceneID(ginput.SceneID), - ginput.PluginID, - id.PluginExtensionID(ginput.ExtensionID), - operator, - ) - if err != nil { - return nil, err - } - - return &AddWidgetPayload{Scene: toScene(scene), SceneWidget: toSceneWidget(widget)}, nil -} - -func (c *SceneController) UpdateWidget(ctx context.Context, ginput *UpdateWidgetInput, operator *usecase.Operator) (*UpdateWidgetPayload, error) { - scene, widget, err := c.usecase().UpdateWidget(ctx, interfaces.UpdateWidgetParam{ - SceneID: id.SceneID(ginput.SceneID), - PluginID: ginput.PluginID, - ExtensionID: id.PluginExtensionID(ginput.ExtensionID), - Enabled: ginput.Enabled, - }, operator) - if err != nil { - return nil, err - } - - return &UpdateWidgetPayload{Scene: toScene(scene), SceneWidget: toSceneWidget(widget)}, nil -} - -func (c *SceneController) RemoveWidget(ctx context.Context, ginput *RemoveWidgetInput, operator *usecase.Operator) (*RemoveWidgetPayload, error) { - scene, err := c.usecase().RemoveWidget(ctx, - id.SceneID(ginput.SceneID), - id.PluginID(ginput.PluginID), - id.PluginExtensionID(ginput.ExtensionID), - operator, - ) - if err != nil { - return nil, err - } - - return &RemoveWidgetPayload{Scene: toScene(scene), PluginID: ginput.PluginID, ExtensionID: ginput.ExtensionID}, nil -} - -func (c *SceneController) InstallPlugin(ctx context.Context, ginput *InstallPluginInput, operator *usecase.Operator) (*InstallPluginPayload, error) { - scene, pl, pr, err := c.usecase().InstallPlugin(ctx, - id.SceneID(ginput.SceneID), - ginput.PluginID, - operator, - ) - if err != nil { - return nil, err - } - - return &InstallPluginPayload{Scene: toScene(scene), ScenePlugin: &ScenePlugin{ - PluginID: pl, - PropertyID: pr.IDRef(), - }}, nil -} - -func (c *SceneController) UninstallPlugin(ctx context.Context, ginput *UninstallPluginInput, operator *usecase.Operator) (*UninstallPluginPayload, error) { - scene, err := c.usecase().UninstallPlugin(ctx, - id.SceneID(ginput.SceneID), - id.PluginID(ginput.PluginID), - operator, - ) - if err != nil { - return nil, err - } - - return &UninstallPluginPayload{PluginID: ginput.PluginID, Scene: toScene(scene)}, nil -} - -func (c *SceneController) UpgradePlugin(ctx context.Context, ginput *UpgradePluginInput, operator *usecase.Operator) (*UpgradePluginPayload, error) { - s, err := c.usecase().UpgradePlugin(ctx, - id.SceneID(ginput.SceneID), - ginput.PluginID, - ginput.ToPluginID, - operator, - ) - if err != nil { - return nil, err - } - - return &UpgradePluginPayload{ - Scene: toScene(s), - ScenePlugin: toScenePlugin(s.PluginSystem().Plugin(ginput.ToPluginID)), - }, nil -} diff --git a/internal/adapter/graphql/controller_team.go b/internal/adapter/graphql/controller_team.go deleted file mode 100644 index f511052a5..000000000 --- a/internal/adapter/graphql/controller_team.go +++ /dev/null @@ -1,82 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/user" -) - -type TeamControllerConfig struct { - TeamInput func() interfaces.Team -} - -type TeamController struct { - config TeamControllerConfig -} - -func NewTeamController(config TeamControllerConfig) *TeamController { - return &TeamController{config: config} -} - -func (c *TeamController) usecase() interfaces.Team { - if c == nil { - return nil - } - return c.config.TeamInput() -} - -func (c *TeamController) Create(ctx context.Context, i *CreateTeamInput, u *user.User) (*CreateTeamPayload, error) { - res, err := c.usecase().Create(ctx, i.Name, u.ID()) - if err != nil { - return nil, err - } - - return &CreateTeamPayload{Team: toTeam(res)}, nil -} - -func (c *TeamController) Update(ctx context.Context, i *UpdateTeamInput, o *usecase.Operator) (*UpdateTeamPayload, error) { - res, err := c.usecase().Update(ctx, id.TeamID(i.TeamID), i.Name, o) - if err != nil { - return nil, err - } - - return &UpdateTeamPayload{Team: toTeam(res)}, nil -} - -func (c *TeamController) AddMember(ctx context.Context, i *AddMemberToTeamInput, o *usecase.Operator) (*AddMemberToTeamPayload, error) { - res, err := c.usecase().AddMember(ctx, id.TeamID(i.TeamID), id.UserID(i.UserID), fromRole(i.Role), o) - if err != nil { - return nil, err - } - - return &AddMemberToTeamPayload{Team: toTeam(res)}, nil -} - -func (c *TeamController) RemoveMember(ctx context.Context, i *RemoveMemberFromTeamInput, o *usecase.Operator) (*RemoveMemberFromTeamPayload, error) { - res, err := c.usecase().RemoveMember(ctx, id.TeamID(i.TeamID), id.UserID(i.UserID), o) - if err != nil { - return nil, err - } - - return &RemoveMemberFromTeamPayload{Team: toTeam(res)}, nil -} - -func (c *TeamController) UpdateMember(ctx context.Context, i *UpdateMemberOfTeamInput, o *usecase.Operator) (*UpdateMemberOfTeamPayload, error) { - res, err := c.usecase().UpdateMember(ctx, id.TeamID(i.TeamID), id.UserID(i.UserID), fromRole(i.Role), o) - if err != nil { - return nil, err - } - - return &UpdateMemberOfTeamPayload{Team: toTeam(res)}, nil -} - -func (c *TeamController) Remove(ctx context.Context, team id.ID, o *usecase.Operator) (*DeleteTeamPayload, error) { - if err := c.usecase().Remove(ctx, id.TeamID(team), o); err != nil { - return nil, err - } - - return &DeleteTeamPayload{TeamID: team}, nil -} diff --git a/internal/adapter/graphql/controller_user.go b/internal/adapter/graphql/controller_user.go deleted file mode 100644 index 3522bbba6..000000000 --- a/internal/adapter/graphql/controller_user.go +++ /dev/null @@ -1,103 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/id" -) - -type UserControllerConfig struct { - UserInput func() interfaces.User -} - -type UserController struct { - config UserControllerConfig -} - -func NewUserController(config UserControllerConfig) *UserController { - return &UserController{config: config} -} - -func (c *UserController) usecase() interfaces.User { - if c == nil { - return nil - } - return c.config.UserInput() -} - -func (c *UserController) Fetch(ctx context.Context, ids []id.UserID, operator *usecase.Operator) ([]*User, []error) { - res, err := c.usecase().Fetch(ctx, ids, operator) - if err != nil { - return nil, []error{err} - } - - users := make([]*User, 0, len(res)) - for _, u := range res { - users = append(users, ToUser(u)) - } - - return users, nil -} - -func (c *UserController) Signup(ctx context.Context, ginput *SignupInput, sub string) (*SignupPayload, error) { - secret := "" - if ginput.Secret != nil { - secret = *ginput.Secret - } - u, team, err := c.usecase().Signup(ctx, interfaces.SignupParam{ - Sub: sub, - Lang: ginput.Lang, - Theme: toTheme(ginput.Theme), - UserID: id.UserIDFromRefID(ginput.UserID), - TeamID: id.TeamIDFromRefID(ginput.TeamID), - Secret: secret, - }) - if err != nil { - return nil, err - } - return &SignupPayload{User: ToUser(u), Team: toTeam(team)}, nil -} - -func (c *UserController) UpdateMe(ctx context.Context, ginput *UpdateMeInput, operator *usecase.Operator) (*UpdateMePayload, error) { - res, err := c.usecase().UpdateMe(ctx, interfaces.UpdateMeParam{ - Name: ginput.Name, - Email: ginput.Email, - Lang: ginput.Lang, - Theme: toTheme(ginput.Theme), - Password: ginput.Password, - PasswordConfirmation: ginput.PasswordConfirmation, - }, operator) - if err != nil { - return nil, err - } - - return &UpdateMePayload{User: ToUser(res)}, nil -} - -func (c *UserController) RemoveMyAuth(ctx context.Context, ginput *RemoveMyAuthInput, operator *usecase.Operator) (*UpdateMePayload, error) { - res, err := c.usecase().RemoveMyAuth(ctx, ginput.Auth, operator) - if err != nil { - return nil, err - } - - return &UpdateMePayload{User: ToUser(res)}, nil -} - -func (c *UserController) SearchUser(ctx context.Context, nameOrEmail string, operator *usecase.Operator) (*SearchedUser, error) { - res, err := c.usecase().SearchUser(ctx, nameOrEmail, operator) - if err != nil { - return nil, err - } - - return toSearchedUser(res), nil -} - -func (c *UserController) DeleteMe(ctx context.Context, user id.ID, operator *usecase.Operator) (*DeleteMePayload, error) { - if err := c.usecase().DeleteMe(ctx, id.UserID(user), operator); err != nil { - return nil, err - } - - return &DeleteMePayload{UserID: user}, nil -} diff --git a/internal/adapter/graphql/loader_dataset.go b/internal/adapter/graphql/loader_dataset.go deleted file mode 100644 index 11794914d..000000000 --- a/internal/adapter/graphql/loader_dataset.go +++ /dev/null @@ -1,50 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (c *DatasetController) Fetch(ctx context.Context, ids []id.DatasetID, operator *usecase.Operator) ([]*Dataset, []error) { - res, err := c.usecase().Fetch(ctx, ids, operator) - if err != nil { - return nil, []error{err} - } - - datasets := make([]*Dataset, 0, len(res)) - for _, d := range res { - datasets = append(datasets, toDataset(d)) - } - - return datasets, nil -} - -func (c *DatasetController) FetchSchema(ctx context.Context, ids []id.DatasetSchemaID, operator *usecase.Operator) ([]*DatasetSchema, []error) { - res, err := c.usecase().FetchSchema(ctx, ids, operator) - if err != nil { - return nil, []error{err} - } - - schemas := make([]*DatasetSchema, 0, len(res)) - for _, d := range res { - schemas = append(schemas, toDatasetSchema(d)) - } - - return schemas, nil -} - -func (c *DatasetController) GraphFetch(ctx context.Context, i id.DatasetID, depth int, operator *usecase.Operator) ([]*Dataset, []error) { - res, err := c.usecase().GraphFetch(ctx, i, depth, operator) - if err != nil { - return nil, []error{err} - } - - datasets := make([]*Dataset, 0, len(res)) - for _, d := range res { - datasets = append(datasets, toDataset(d)) - } - - return datasets, nil -} diff --git a/internal/adapter/graphql/loader_layer.go b/internal/adapter/graphql/loader_layer.go deleted file mode 100644 index 417df7756..000000000 --- a/internal/adapter/graphql/loader_layer.go +++ /dev/null @@ -1,91 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (c *LayerController) Fetch(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*Layer, []error) { - res, err := c.usecase().Fetch(ctx, ids, operator) - if err != nil { - return nil, []error{err} - } - - layers := make([]*Layer, 0, len(res)) - for _, l := range res { - if l == nil { - layers = append(layers, nil) - } else { - layer := toLayer(*l, nil) - layers = append(layers, &layer) - } - } - - return layers, nil -} - -func (c *LayerController) FetchGroup(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*LayerGroup, []error) { - res, err := c.usecase().FetchGroup(ctx, ids, operator) - if err != nil { - return nil, []error{err} - } - - layerGroups := make([]*LayerGroup, 0, len(res)) - for _, l := range res { - layerGroups = append(layerGroups, toLayerGroup(l, nil)) - } - - return layerGroups, nil -} - -func (c *LayerController) FetchItem(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*LayerItem, []error) { - res, err := c.usecase().FetchItem(ctx, ids, operator) - if err != nil { - return nil, []error{err} - } - - layerItems := make([]*LayerItem, 0, len(res)) - for _, l := range res { - layerItems = append(layerItems, toLayerItem(l, nil)) - } - - return layerItems, nil -} - -func (c *LayerController) FetchParent(ctx context.Context, lid id.LayerID, operator *usecase.Operator) (*LayerGroup, error) { - res, err := c.usecase().FetchParent(ctx, id.LayerID(lid), operator) - if err != nil { - return nil, err - } - - return toLayerGroup(res, nil), nil -} - -func (c *LayerController) FetchByProperty(ctx context.Context, pid id.PropertyID, operator *usecase.Operator) (Layer, error) { - res, err := c.usecase().FetchByProperty(ctx, pid, operator) - if err != nil { - return nil, err - } - - return toLayer(res, nil), nil -} - -func (c *LayerController) FetchMerged(ctx context.Context, org id.LayerID, parent *id.LayerID, operator *usecase.Operator) (*MergedLayer, error) { - res, err2 := c.usecase().FetchMerged(ctx, org, parent, operator) - if err2 != nil { - return nil, err2 - } - - return toMergedLayer(res), nil -} - -func (c *LayerController) FetchParentAndMerged(ctx context.Context, org id.LayerID, operator *usecase.Operator) (*MergedLayer, error) { - res, err2 := c.usecase().FetchParentAndMerged(ctx, org, operator) - if err2 != nil { - return nil, err2 - } - - return toMergedLayer(res), nil -} diff --git a/internal/adapter/graphql/loader_plugin.go b/internal/adapter/graphql/loader_plugin.go deleted file mode 100644 index dacc31500..000000000 --- a/internal/adapter/graphql/loader_plugin.go +++ /dev/null @@ -1,22 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (c *PluginController) Fetch(ctx context.Context, ids []id.PluginID, operator *usecase.Operator) ([]*Plugin, []error) { - res, err := c.usecase().Fetch(ctx, ids, operator) - if err != nil { - return nil, []error{err} - } - - plugins := make([]*Plugin, 0, len(res)) - for _, pl := range res { - plugins = append(plugins, toPlugin(pl)) - } - - return plugins, nil -} diff --git a/internal/adapter/graphql/loader_project.go b/internal/adapter/graphql/loader_project.go deleted file mode 100644 index 224f35212..000000000 --- a/internal/adapter/graphql/loader_project.go +++ /dev/null @@ -1,47 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (c *ProjectController) Fetch(ctx context.Context, ids []id.ProjectID, operator *usecase.Operator) ([]*Project, []error) { - res, err := c.usecase().Fetch(ctx, ids, operator) - if err != nil { - return nil, []error{err} - } - - projects := make([]*Project, 0, len(res)) - for _, project := range res { - projects = append(projects, toProject(project)) - } - - return projects, nil -} - -func (c *ProjectController) FindByTeam(ctx context.Context, teamID id.TeamID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor, operator *usecase.Operator) (*ProjectConnection, error) { - res, pi, err := c.usecase().FindByTeam(ctx, teamID, usecase.NewPagination(first, last, before, after), operator) - if err != nil { - return nil, err - } - - edges := make([]*ProjectEdge, 0, len(res)) - nodes := make([]*Project, 0, len(res)) - for _, p := range res { - prj := toProject(p) - edges = append(edges, &ProjectEdge{ - Node: prj, - Cursor: usecase.Cursor(prj.ID.String()), - }) - nodes = append(nodes, prj) - } - - return &ProjectConnection{ - Edges: edges, - Nodes: nodes, - PageInfo: toPageInfo(pi), - TotalCount: pi.TotalCount(), - }, nil -} diff --git a/internal/adapter/graphql/loader_property.go b/internal/adapter/graphql/loader_property.go deleted file mode 100644 index 08f75e7dc..000000000 --- a/internal/adapter/graphql/loader_property.go +++ /dev/null @@ -1,46 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (c *PropertyController) Fetch(ctx context.Context, ids []id.PropertyID, operator *usecase.Operator) ([]*Property, []error) { - res, err := c.usecase().Fetch(ctx, ids, operator) - if err != nil { - return nil, []error{err} - } - - properties := make([]*Property, 0, len(res)) - for _, property := range res { - properties = append(properties, toProperty(property)) - } - - return properties, nil -} - -func (c *PropertyController) FetchSchema(ctx context.Context, ids []id.PropertySchemaID, operator *usecase.Operator) ([]*PropertySchema, []error) { - res, err := c.usecase().FetchSchema(ctx, ids, operator) - if err != nil { - return nil, []error{err} - } - - schemas := make([]*PropertySchema, 0, len(res)) - for _, propertySchema := range res { - schemas = append(schemas, toPropertySchema(propertySchema)) - } - - return schemas, nil -} - -func (c *PropertyController) FetchMerged(ctx context.Context, org, parent, linked *id.ID, operator *usecase.Operator) (*MergedProperty, error) { - res, err := c.usecase().FetchMerged(ctx, id.PropertyIDFromRefID(org), id.PropertyIDFromRefID(parent), id.DatasetIDFromRefID(linked), operator) - - if err != nil { - return nil, err - } - - return toMergedProperty(res), nil -} diff --git a/internal/adapter/graphql/loader_scene.go b/internal/adapter/graphql/loader_scene.go deleted file mode 100644 index 29808c57a..000000000 --- a/internal/adapter/graphql/loader_scene.go +++ /dev/null @@ -1,56 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (c *SceneController) Fetch(ctx context.Context, ids []id.SceneID, operator *usecase.Operator) ([]*Scene, []error) { - res, err := c.usecase().Fetch(ctx, ids, operator) - if err != nil { - return nil, []error{err} - } - - scenes := make([]*Scene, 0, len(res)) - for _, scene := range res { - scenes = append(scenes, toScene(scene)) - } - return scenes, nil -} - -func (c *SceneController) FindByProject(ctx context.Context, projectID id.ProjectID, operator *usecase.Operator) (*Scene, error) { - res, err := c.usecase().FindByProject(ctx, projectID, operator) - if err != nil { - return nil, err - } - - return toScene(res), nil -} - -func (c *SceneController) FetchLock(ctx context.Context, sid id.SceneID, operator *usecase.Operator) (*SceneLockMode, error) { - res, err := c.usecase().FetchLock(ctx, []id.SceneID{sid}, operator) - if err != nil { - return nil, err - } - if len(res) > 0 { - return nil, nil - } - sl := toSceneLockMode(res[0]) - return &sl, nil -} - -func (c *SceneController) FetchLockAll(ctx context.Context, sid []id.SceneID, operator *usecase.Operator) ([]SceneLockMode, []error) { - res, err := c.usecase().FetchLock(ctx, sid, operator) - if err != nil { - return nil, []error{err} - } - - res2 := make([]SceneLockMode, 0, len(res)) - for _, r := range res { - res2 = append(res2, toSceneLockMode(r)) - } - - return res2, nil -} diff --git a/internal/adapter/graphql/loader_team.go b/internal/adapter/graphql/loader_team.go deleted file mode 100644 index 2337ca770..000000000 --- a/internal/adapter/graphql/loader_team.go +++ /dev/null @@ -1 +0,0 @@ -package graphql diff --git a/internal/adapter/graphql/loader_user.go b/internal/adapter/graphql/loader_user.go deleted file mode 100644 index 0da555abd..000000000 --- a/internal/adapter/graphql/loader_user.go +++ /dev/null @@ -1,33 +0,0 @@ -package graphql - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (c *TeamController) Fetch(ctx context.Context, ids []id.TeamID, operator *usecase.Operator) ([]*Team, []error) { - res, err := c.usecase().Fetch(ctx, ids, operator) - if err != nil { - return nil, []error{err} - } - - teams := make([]*Team, 0, len(res)) - for _, t := range res { - teams = append(teams, toTeam(t)) - } - return teams, nil -} - -func (c *TeamController) FindByUser(ctx context.Context, uid id.UserID, operator *usecase.Operator) ([]*Team, error) { - res, err := c.usecase().FindByUser(ctx, uid, operator) - if err != nil { - return nil, err - } - teams := make([]*Team, 0, len(res)) - for _, t := range res { - teams = append(teams, toTeam(t)) - } - return teams, nil -} diff --git a/internal/app/app.go b/internal/app/app.go index cdbf1fafb..0bf9b1ea6 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -7,7 +7,7 @@ import ( "github.com/99designs/gqlgen/graphql/playground" "github.com/labstack/echo/v4" "github.com/labstack/echo/v4/middleware" - "github.com/reearth/reearth-backend/internal/adapter/graphql" + "github.com/reearth/reearth-backend/internal/usecase/interactor" "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/pkg/rerror" echotracer "go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo" @@ -64,14 +64,16 @@ func initEcho(cfg *ServerConfig) *echo.Echo { )) } + usecases := interactor.NewContainer(cfg.Repos, cfg.Gateways, interactor.ContainerConfig{ + SignupSecret: cfg.Config.SignupSecret, + }) + api := e.Group("/api") publicAPI(e, api, cfg.Config, cfg.Repos, cfg.Gateways) jwks := &JwksSyncOnce{} privateApi := api.Group("") authRequired(privateApi, jwks, cfg) - graphqlAPI(e, privateApi, cfg, graphql.NewContainer(cfg.Repos, cfg.Gateways, graphql.ContainerConfig{ - SignupSecret: cfg.Config.SignupSecret, - })) + graphqlAPI(e, privateApi, cfg, usecases) privateAPI(e, privateApi, cfg.Repos) published := e.Group("/p") diff --git a/internal/app/auth.go b/internal/app/auth.go index 3ddf5f68f..4743aea1e 100644 --- a/internal/app/auth.go +++ b/internal/app/auth.go @@ -4,7 +4,7 @@ import ( "context" "github.com/labstack/echo/v4" - "github.com/reearth/reearth-backend/internal/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/rerror" @@ -29,7 +29,7 @@ func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { } // attach sub - ctx = context.WithValue(ctx, graphql.ContextSub, sub) + ctx = context.WithValue(ctx, gql.ContextSub, sub) // debug mode if cfg.Debug { @@ -99,10 +99,10 @@ func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { if err != nil { return err } - ctx = context.WithValue(ctx, graphql.ContextOperator, op) + ctx = context.WithValue(ctx, gql.ContextOperator, op) // attach user - ctx = context.WithValue(ctx, graphql.ContextUser, u) + ctx = context.WithValue(ctx, gql.ContextUser, u) c.SetRequest(req.WithContext(ctx)) return next(c) diff --git a/internal/app/graphql.go b/internal/app/graphql.go index d331f93bc..9f7926d90 100644 --- a/internal/app/graphql.go +++ b/internal/app/graphql.go @@ -6,7 +6,7 @@ import ( "github.com/99designs/gqlgen-contrib/gqlopencensus" "github.com/99designs/gqlgen-contrib/gqlopentracing" - graphql1 "github.com/99designs/gqlgen/graphql" + "github.com/99designs/gqlgen/graphql" "github.com/99designs/gqlgen/graphql/handler" "github.com/99designs/gqlgen/graphql/handler/extension" "github.com/99designs/gqlgen/graphql/handler/lru" @@ -14,38 +14,20 @@ import ( "github.com/labstack/echo/v4" "github.com/vektah/gqlparser/v2/gqlerror" - "github.com/reearth/reearth-backend/internal/adapter/graphql" - infra_graphql "github.com/reearth/reearth-backend/internal/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" - "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/adapter/gql" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/rerror" ) const enableDataLoaders = true -func getOperator(ctx context.Context) *usecase.Operator { - if v := ctx.Value(infra_graphql.ContextOperator); v != nil { - if v2, ok := v.(*usecase.Operator); ok { - return v2 - } - } - return nil -} - -func dataLoaderMiddleware(container *graphql.Container) echo.MiddlewareFunc { +func dataLoaderMiddleware(container gql.Loaders) echo.MiddlewareFunc { return func(next echo.HandlerFunc) echo.HandlerFunc { return func(echoCtx echo.Context) error { req := echoCtx.Request() ctx := req.Context() - var dl *dataloader.DataLoaders - if enableDataLoaders { - dl = dataloader.NewDataLoaders(ctx, container, getOperator(ctx)) - } else { - dl = dataloader.NewOrdinaryDataLoaders(ctx, container, getOperator(ctx)) - } - - ctx = context.WithValue(ctx, dataloader.DataLoadersKey(), dl) + ctx = context.WithValue(ctx, gql.DataLoadersKey(), container.DataLoadersWith(ctx, enableDataLoaders)) echoCtx.SetRequest(req.WithContext(ctx)) return next(echoCtx) } @@ -60,8 +42,8 @@ func tracerMiddleware(enabled bool) echo.MiddlewareFunc { } req := echoCtx.Request() ctx := req.Context() - t := &infra_graphql.Tracer{} - echoCtx.SetRequest(req.WithContext(infra_graphql.AttachTracer(ctx, t))) + t := &gql.Tracer{} + echoCtx.SetRequest(req.WithContext(gql.AttachTracer(ctx, t))) defer t.Print() return next(echoCtx) } @@ -72,9 +54,10 @@ func graphqlAPI( ec *echo.Echo, r *echo.Group, conf *ServerConfig, - controllers *graphql.Container, + usecases interfaces.Container, ) { playgroundEnabled := conf.Debug || conf.Config.Dev + controllers := gql.NewLoaders(usecases) if playgroundEnabled { r.GET("/graphql", echo.WrapHandler( @@ -82,11 +65,8 @@ func graphqlAPI( )) } - schema := infra_graphql.NewExecutableSchema(infra_graphql.Config{ - Resolvers: infra_graphql.NewResolver(infra_graphql.ResolverConfig{ - Controllers: controllers, - Debug: conf.Debug, - }), + schema := gql.NewExecutableSchema(gql.Config{ + Resolvers: gql.NewResolver(controllers, conf.Debug), }) srv := handler.NewDefaultServer(schema) @@ -112,9 +92,9 @@ func graphqlAPI( ec.Logger.Errorf("%+v", err2) } } - return gqlerror.ErrorPathf(graphql1.GetFieldContext(ctx).Path(), e.Error()) + return gqlerror.ErrorPathf(graphql.GetFieldContext(ctx).Path(), e.Error()) } - return graphql1.DefaultErrorPresenter(ctx, e) + return graphql.DefaultErrorPresenter(ctx, e) }, ) diff --git a/internal/app/private.go b/internal/app/private.go index 6b5b376ef..4ee108cd1 100644 --- a/internal/app/private.go +++ b/internal/app/private.go @@ -8,7 +8,7 @@ import ( "strings" "github.com/labstack/echo/v4" - "github.com/reearth/reearth-backend/internal/graphql" + "github.com/reearth/reearth-backend/internal/adapter/gql" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" @@ -60,11 +60,11 @@ func privateAPI( ) { r.GET("/layers/:param", func(c echo.Context) error { ctx := c.Request().Context() - user := c.Request().Context().Value(graphql.ContextUser).(*user.User) + user := c.Request().Context().Value(gql.ContextUser).(*user.User) if user == nil { return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrUnauthorized} } - op := c.Request().Context().Value(graphql.ContextOperator).(*usecase.Operator) + op := c.Request().Context().Value(gql.ContextOperator).(*usecase.Operator) if op == nil { return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrOpDenied} } diff --git a/internal/graphql/dataloader/context.go b/internal/graphql/dataloader/context.go deleted file mode 100644 index b0e56543f..000000000 --- a/internal/graphql/dataloader/context.go +++ /dev/null @@ -1,73 +0,0 @@ -package dataloader - -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen -template=loader.tmpl -output=loader_gen.go -m=Dataset -m=Layer -m=Plugin -m=Project -m=Property -m=Scene -m=Team -m=User -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen -template=loader.tmpl -output=loader_layer_item_gen.go -controller=Layer -method=FetchItem -id=LayerID -m=LayerItem -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen -template=loader.tmpl -output=loader_layer_group_gen.go -controller=Layer -method=FetchGroup -id=LayerID -m=LayerGroup -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen -template=loader.tmpl -output=loader_dataset_schema_gen.go -controller=Dataset -method=FetchSchema -m=DatasetSchema -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen -template=loader.tmpl -output=loader_property_schema_gen.go -controller=Property -method=FetchSchema -m=PropertySchema - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/usecase" -) - -type dataLoadersKey struct{} - -type DataLoaders struct { - Dataset DatasetDataLoader - DatasetSchema DatasetSchemaDataLoader - LayerItem LayerItemDataLoader - LayerGroup LayerGroupDataLoader - Layer LayerDataLoader - Plugin PluginDataLoader - Project ProjectDataLoader - Property PropertyDataLoader - PropertySchema PropertySchemaDataLoader - Scene SceneDataLoader - Team TeamDataLoader - User UserDataLoader -} - -func DataLoadersFromContext(ctx context.Context) *DataLoaders { - return ctx.Value(dataLoadersKey{}).(*DataLoaders) -} - -func DataLoadersKey() interface{} { - return dataLoadersKey{} -} - -func NewDataLoaders(ctx context.Context, c *graphql.Container, o *usecase.Operator) *DataLoaders { - return &DataLoaders{ - Dataset: newDataset(ctx, c.DatasetController, o), - DatasetSchema: newDatasetSchema(ctx, c.DatasetController, o), - LayerItem: newLayerItem(ctx, c.LayerController, o), - LayerGroup: newLayerGroup(ctx, c.LayerController, o), - Layer: newLayer(ctx, c.LayerController, o), - Plugin: newPlugin(ctx, c.PluginController, o), - Project: newProject(ctx, c.ProjectController, o), - Property: newProperty(ctx, c.PropertyController, o), - PropertySchema: newPropertySchema(ctx, c.PropertyController, o), - Scene: newScene(ctx, c.SceneController, o), - Team: newTeam(ctx, c.TeamController, o), - User: newUser(ctx, c.UserController, o), - } -} - -func NewOrdinaryDataLoaders(ctx context.Context, c *graphql.Container, o *usecase.Operator) *DataLoaders { - return &DataLoaders{ - Dataset: newOrdinaryDataset(ctx, c.DatasetController, o), - DatasetSchema: newOrdinaryDatasetSchema(ctx, c.DatasetController, o), - LayerItem: newOrdinaryLayerItem(ctx, c.LayerController, o), - LayerGroup: newOrdinaryLayerGroup(ctx, c.LayerController, o), - Layer: newOrdinaryLayer(ctx, c.LayerController, o), - Plugin: newOrdinaryPlugin(ctx, c.PluginController, o), - Project: newOrdinaryProject(ctx, c.ProjectController, o), - Property: newOrdinaryProperty(ctx, c.PropertyController, o), - PropertySchema: newOrdinaryPropertySchema(ctx, c.PropertyController, o), - Scene: newOrdinaryScene(ctx, c.SceneController, o), - Team: newOrdinaryTeam(ctx, c.TeamController, o), - User: newOrdinaryUser(ctx, c.UserController, o), - } -} diff --git a/internal/graphql/dataloader/loader.tmpl b/internal/graphql/dataloader/loader.tmpl deleted file mode 100644 index 5268e4c11..000000000 --- a/internal/graphql/dataloader/loader.tmpl +++ /dev/null @@ -1,55 +0,0 @@ -package {{.PackageName}} -{{$controller := ""}}{{if .Flags.controller}}{{$controller = index .Flags.controller 0}}{{end}} -{{$id := ""}}{{if .Flags.id}}{{$id = index .Flags.id 0}}{{end}} -{{$method := "Fetch"}}{{if .Flags.method}}{{$method = index .Flags.method 0}}{{end}} -import ( - "context" - "time" - - "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) -{{range .Flags.m}} -type {{camel .}}DataLoader interface { - Load(id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) (*graphql.{{camel .}}, error) - LoadAll([]id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) ([]*graphql.{{camel .}}, []error) -} - -func new{{camel .}}(ctx context.Context, c *graphql.{{if $controller}}{{$controller}}{{else}}{{camel .}}{{end}}Controller, o *usecase.Operator) *{{camel .}}Loader { - return New{{camel .}}Loader({{camel .}}LoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) ([]*graphql.{{camel .}}, []error) { - return c.{{$method}}(ctx, keys, o) - }, - }) -} - -func newOrdinary{{camel .}}(ctx context.Context, c *graphql.{{if $controller}}{{$controller}}{{else}}{{camel .}}{{end}}Controller, o *usecase.Operator) {{camel .}}DataLoader { - return &ordinary{{camel .}}Loader{ - fetch: func(keys []id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) ([]*graphql.{{camel .}}, []error) { - return c.{{$method}}(ctx, keys, o) - }, - } -} - -type ordinary{{camel .}}Loader struct { - fetch func(keys []id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) ([]*graphql.{{camel .}}, []error) -} - -func (l *ordinary{{camel .}}Loader) Load(key id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) (*graphql.{{camel .}}, error) { - res, errs := l.fetch([]id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinary{{camel .}}Loader) LoadAll(keys []id.{{if $id}}{{$id}}{{else}}{{camel .}}ID{{end}}) ([]*graphql.{{camel .}}, []error) { - return l.fetch(keys) -} -{{end}} diff --git a/internal/graphql/dataloader/loader_dataset_schema_gen.go b/internal/graphql/dataloader/loader_dataset_schema_gen.go deleted file mode 100644 index 2d90e275f..000000000 --- a/internal/graphql/dataloader/loader_dataset_schema_gen.go +++ /dev/null @@ -1,54 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package dataloader - -import ( - "context" - "time" - - "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -type DatasetSchemaDataLoader interface { - Load(id.DatasetSchemaID) (*graphql.DatasetSchema, error) - LoadAll([]id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) -} - -func newDatasetSchema(ctx context.Context, c *graphql.DatasetController, o *usecase.Operator) *DatasetSchemaLoader { - return NewDatasetSchemaLoader(DatasetSchemaLoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) { - return c.FetchSchema(ctx, keys, o) - }, - }) -} - -func newOrdinaryDatasetSchema(ctx context.Context, c *graphql.DatasetController, o *usecase.Operator) DatasetSchemaDataLoader { - return &ordinaryDatasetSchemaLoader{ - fetch: func(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) { - return c.FetchSchema(ctx, keys, o) - }, - } -} - -type ordinaryDatasetSchemaLoader struct { - fetch func(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) -} - -func (l *ordinaryDatasetSchemaLoader) Load(key id.DatasetSchemaID) (*graphql.DatasetSchema, error) { - res, errs := l.fetch([]id.DatasetSchemaID{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinaryDatasetSchemaLoader) LoadAll(keys []id.DatasetSchemaID) ([]*graphql.DatasetSchema, []error) { - return l.fetch(keys) -} diff --git a/internal/graphql/dataloader/loader_gen.go b/internal/graphql/dataloader/loader_gen.go deleted file mode 100644 index ad97fd004..000000000 --- a/internal/graphql/dataloader/loader_gen.go +++ /dev/null @@ -1,348 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package dataloader - -import ( - "context" - "time" - - "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -type DatasetDataLoader interface { - Load(id.DatasetID) (*graphql.Dataset, error) - LoadAll([]id.DatasetID) ([]*graphql.Dataset, []error) -} - -func newDataset(ctx context.Context, c *graphql.DatasetController, o *usecase.Operator) *DatasetLoader { - return NewDatasetLoader(DatasetLoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.DatasetID) ([]*graphql.Dataset, []error) { - return c.Fetch(ctx, keys, o) - }, - }) -} - -func newOrdinaryDataset(ctx context.Context, c *graphql.DatasetController, o *usecase.Operator) DatasetDataLoader { - return &ordinaryDatasetLoader{ - fetch: func(keys []id.DatasetID) ([]*graphql.Dataset, []error) { - return c.Fetch(ctx, keys, o) - }, - } -} - -type ordinaryDatasetLoader struct { - fetch func(keys []id.DatasetID) ([]*graphql.Dataset, []error) -} - -func (l *ordinaryDatasetLoader) Load(key id.DatasetID) (*graphql.Dataset, error) { - res, errs := l.fetch([]id.DatasetID{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinaryDatasetLoader) LoadAll(keys []id.DatasetID) ([]*graphql.Dataset, []error) { - return l.fetch(keys) -} - -type LayerDataLoader interface { - Load(id.LayerID) (*graphql.Layer, error) - LoadAll([]id.LayerID) ([]*graphql.Layer, []error) -} - -func newLayer(ctx context.Context, c *graphql.LayerController, o *usecase.Operator) *LayerLoader { - return NewLayerLoader(LayerLoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.LayerID) ([]*graphql.Layer, []error) { - return c.Fetch(ctx, keys, o) - }, - }) -} - -func newOrdinaryLayer(ctx context.Context, c *graphql.LayerController, o *usecase.Operator) LayerDataLoader { - return &ordinaryLayerLoader{ - fetch: func(keys []id.LayerID) ([]*graphql.Layer, []error) { - return c.Fetch(ctx, keys, o) - }, - } -} - -type ordinaryLayerLoader struct { - fetch func(keys []id.LayerID) ([]*graphql.Layer, []error) -} - -func (l *ordinaryLayerLoader) Load(key id.LayerID) (*graphql.Layer, error) { - res, errs := l.fetch([]id.LayerID{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinaryLayerLoader) LoadAll(keys []id.LayerID) ([]*graphql.Layer, []error) { - return l.fetch(keys) -} - -type PluginDataLoader interface { - Load(id.PluginID) (*graphql.Plugin, error) - LoadAll([]id.PluginID) ([]*graphql.Plugin, []error) -} - -func newPlugin(ctx context.Context, c *graphql.PluginController, o *usecase.Operator) *PluginLoader { - return NewPluginLoader(PluginLoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.PluginID) ([]*graphql.Plugin, []error) { - return c.Fetch(ctx, keys, o) - }, - }) -} - -func newOrdinaryPlugin(ctx context.Context, c *graphql.PluginController, o *usecase.Operator) PluginDataLoader { - return &ordinaryPluginLoader{ - fetch: func(keys []id.PluginID) ([]*graphql.Plugin, []error) { - return c.Fetch(ctx, keys, o) - }, - } -} - -type ordinaryPluginLoader struct { - fetch func(keys []id.PluginID) ([]*graphql.Plugin, []error) -} - -func (l *ordinaryPluginLoader) Load(key id.PluginID) (*graphql.Plugin, error) { - res, errs := l.fetch([]id.PluginID{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinaryPluginLoader) LoadAll(keys []id.PluginID) ([]*graphql.Plugin, []error) { - return l.fetch(keys) -} - -type ProjectDataLoader interface { - Load(id.ProjectID) (*graphql.Project, error) - LoadAll([]id.ProjectID) ([]*graphql.Project, []error) -} - -func newProject(ctx context.Context, c *graphql.ProjectController, o *usecase.Operator) *ProjectLoader { - return NewProjectLoader(ProjectLoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.ProjectID) ([]*graphql.Project, []error) { - return c.Fetch(ctx, keys, o) - }, - }) -} - -func newOrdinaryProject(ctx context.Context, c *graphql.ProjectController, o *usecase.Operator) ProjectDataLoader { - return &ordinaryProjectLoader{ - fetch: func(keys []id.ProjectID) ([]*graphql.Project, []error) { - return c.Fetch(ctx, keys, o) - }, - } -} - -type ordinaryProjectLoader struct { - fetch func(keys []id.ProjectID) ([]*graphql.Project, []error) -} - -func (l *ordinaryProjectLoader) Load(key id.ProjectID) (*graphql.Project, error) { - res, errs := l.fetch([]id.ProjectID{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinaryProjectLoader) LoadAll(keys []id.ProjectID) ([]*graphql.Project, []error) { - return l.fetch(keys) -} - -type PropertyDataLoader interface { - Load(id.PropertyID) (*graphql.Property, error) - LoadAll([]id.PropertyID) ([]*graphql.Property, []error) -} - -func newProperty(ctx context.Context, c *graphql.PropertyController, o *usecase.Operator) *PropertyLoader { - return NewPropertyLoader(PropertyLoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.PropertyID) ([]*graphql.Property, []error) { - return c.Fetch(ctx, keys, o) - }, - }) -} - -func newOrdinaryProperty(ctx context.Context, c *graphql.PropertyController, o *usecase.Operator) PropertyDataLoader { - return &ordinaryPropertyLoader{ - fetch: func(keys []id.PropertyID) ([]*graphql.Property, []error) { - return c.Fetch(ctx, keys, o) - }, - } -} - -type ordinaryPropertyLoader struct { - fetch func(keys []id.PropertyID) ([]*graphql.Property, []error) -} - -func (l *ordinaryPropertyLoader) Load(key id.PropertyID) (*graphql.Property, error) { - res, errs := l.fetch([]id.PropertyID{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinaryPropertyLoader) LoadAll(keys []id.PropertyID) ([]*graphql.Property, []error) { - return l.fetch(keys) -} - -type SceneDataLoader interface { - Load(id.SceneID) (*graphql.Scene, error) - LoadAll([]id.SceneID) ([]*graphql.Scene, []error) -} - -func newScene(ctx context.Context, c *graphql.SceneController, o *usecase.Operator) *SceneLoader { - return NewSceneLoader(SceneLoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.SceneID) ([]*graphql.Scene, []error) { - return c.Fetch(ctx, keys, o) - }, - }) -} - -func newOrdinaryScene(ctx context.Context, c *graphql.SceneController, o *usecase.Operator) SceneDataLoader { - return &ordinarySceneLoader{ - fetch: func(keys []id.SceneID) ([]*graphql.Scene, []error) { - return c.Fetch(ctx, keys, o) - }, - } -} - -type ordinarySceneLoader struct { - fetch func(keys []id.SceneID) ([]*graphql.Scene, []error) -} - -func (l *ordinarySceneLoader) Load(key id.SceneID) (*graphql.Scene, error) { - res, errs := l.fetch([]id.SceneID{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinarySceneLoader) LoadAll(keys []id.SceneID) ([]*graphql.Scene, []error) { - return l.fetch(keys) -} - -type TeamDataLoader interface { - Load(id.TeamID) (*graphql.Team, error) - LoadAll([]id.TeamID) ([]*graphql.Team, []error) -} - -func newTeam(ctx context.Context, c *graphql.TeamController, o *usecase.Operator) *TeamLoader { - return NewTeamLoader(TeamLoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.TeamID) ([]*graphql.Team, []error) { - return c.Fetch(ctx, keys, o) - }, - }) -} - -func newOrdinaryTeam(ctx context.Context, c *graphql.TeamController, o *usecase.Operator) TeamDataLoader { - return &ordinaryTeamLoader{ - fetch: func(keys []id.TeamID) ([]*graphql.Team, []error) { - return c.Fetch(ctx, keys, o) - }, - } -} - -type ordinaryTeamLoader struct { - fetch func(keys []id.TeamID) ([]*graphql.Team, []error) -} - -func (l *ordinaryTeamLoader) Load(key id.TeamID) (*graphql.Team, error) { - res, errs := l.fetch([]id.TeamID{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinaryTeamLoader) LoadAll(keys []id.TeamID) ([]*graphql.Team, []error) { - return l.fetch(keys) -} - -type UserDataLoader interface { - Load(id.UserID) (*graphql.User, error) - LoadAll([]id.UserID) ([]*graphql.User, []error) -} - -func newUser(ctx context.Context, c *graphql.UserController, o *usecase.Operator) *UserLoader { - return NewUserLoader(UserLoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.UserID) ([]*graphql.User, []error) { - return c.Fetch(ctx, keys, o) - }, - }) -} - -func newOrdinaryUser(ctx context.Context, c *graphql.UserController, o *usecase.Operator) UserDataLoader { - return &ordinaryUserLoader{ - fetch: func(keys []id.UserID) ([]*graphql.User, []error) { - return c.Fetch(ctx, keys, o) - }, - } -} - -type ordinaryUserLoader struct { - fetch func(keys []id.UserID) ([]*graphql.User, []error) -} - -func (l *ordinaryUserLoader) Load(key id.UserID) (*graphql.User, error) { - res, errs := l.fetch([]id.UserID{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinaryUserLoader) LoadAll(keys []id.UserID) ([]*graphql.User, []error) { - return l.fetch(keys) -} diff --git a/internal/graphql/dataloader/loader_layer_group_gen.go b/internal/graphql/dataloader/loader_layer_group_gen.go deleted file mode 100644 index 9724ccc1f..000000000 --- a/internal/graphql/dataloader/loader_layer_group_gen.go +++ /dev/null @@ -1,54 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package dataloader - -import ( - "context" - "time" - - "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -type LayerGroupDataLoader interface { - Load(id.LayerID) (*graphql.LayerGroup, error) - LoadAll([]id.LayerID) ([]*graphql.LayerGroup, []error) -} - -func newLayerGroup(ctx context.Context, c *graphql.LayerController, o *usecase.Operator) *LayerGroupLoader { - return NewLayerGroupLoader(LayerGroupLoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.LayerID) ([]*graphql.LayerGroup, []error) { - return c.FetchGroup(ctx, keys, o) - }, - }) -} - -func newOrdinaryLayerGroup(ctx context.Context, c *graphql.LayerController, o *usecase.Operator) LayerGroupDataLoader { - return &ordinaryLayerGroupLoader{ - fetch: func(keys []id.LayerID) ([]*graphql.LayerGroup, []error) { - return c.FetchGroup(ctx, keys, o) - }, - } -} - -type ordinaryLayerGroupLoader struct { - fetch func(keys []id.LayerID) ([]*graphql.LayerGroup, []error) -} - -func (l *ordinaryLayerGroupLoader) Load(key id.LayerID) (*graphql.LayerGroup, error) { - res, errs := l.fetch([]id.LayerID{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinaryLayerGroupLoader) LoadAll(keys []id.LayerID) ([]*graphql.LayerGroup, []error) { - return l.fetch(keys) -} diff --git a/internal/graphql/dataloader/loader_layer_item_gen.go b/internal/graphql/dataloader/loader_layer_item_gen.go deleted file mode 100644 index 50e593848..000000000 --- a/internal/graphql/dataloader/loader_layer_item_gen.go +++ /dev/null @@ -1,54 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package dataloader - -import ( - "context" - "time" - - "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -type LayerItemDataLoader interface { - Load(id.LayerID) (*graphql.LayerItem, error) - LoadAll([]id.LayerID) ([]*graphql.LayerItem, []error) -} - -func newLayerItem(ctx context.Context, c *graphql.LayerController, o *usecase.Operator) *LayerItemLoader { - return NewLayerItemLoader(LayerItemLoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.LayerID) ([]*graphql.LayerItem, []error) { - return c.FetchItem(ctx, keys, o) - }, - }) -} - -func newOrdinaryLayerItem(ctx context.Context, c *graphql.LayerController, o *usecase.Operator) LayerItemDataLoader { - return &ordinaryLayerItemLoader{ - fetch: func(keys []id.LayerID) ([]*graphql.LayerItem, []error) { - return c.FetchItem(ctx, keys, o) - }, - } -} - -type ordinaryLayerItemLoader struct { - fetch func(keys []id.LayerID) ([]*graphql.LayerItem, []error) -} - -func (l *ordinaryLayerItemLoader) Load(key id.LayerID) (*graphql.LayerItem, error) { - res, errs := l.fetch([]id.LayerID{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinaryLayerItemLoader) LoadAll(keys []id.LayerID) ([]*graphql.LayerItem, []error) { - return l.fetch(keys) -} diff --git a/internal/graphql/dataloader/loader_property_schema_gen.go b/internal/graphql/dataloader/loader_property_schema_gen.go deleted file mode 100644 index c7ec6113a..000000000 --- a/internal/graphql/dataloader/loader_property_schema_gen.go +++ /dev/null @@ -1,54 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package dataloader - -import ( - "context" - "time" - - "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -type PropertySchemaDataLoader interface { - Load(id.PropertySchemaID) (*graphql.PropertySchema, error) - LoadAll([]id.PropertySchemaID) ([]*graphql.PropertySchema, []error) -} - -func newPropertySchema(ctx context.Context, c *graphql.PropertyController, o *usecase.Operator) *PropertySchemaLoader { - return NewPropertySchemaLoader(PropertySchemaLoaderConfig{ - Wait: 1 * time.Millisecond, - MaxBatch: 100, - Fetch: func(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) { - return c.FetchSchema(ctx, keys, o) - }, - }) -} - -func newOrdinaryPropertySchema(ctx context.Context, c *graphql.PropertyController, o *usecase.Operator) PropertySchemaDataLoader { - return &ordinaryPropertySchemaLoader{ - fetch: func(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) { - return c.FetchSchema(ctx, keys, o) - }, - } -} - -type ordinaryPropertySchemaLoader struct { - fetch func(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) -} - -func (l *ordinaryPropertySchemaLoader) Load(key id.PropertySchemaID) (*graphql.PropertySchema, error) { - res, errs := l.fetch([]id.PropertySchemaID{key}) - if len(errs) > 0 { - return nil, errs[0] - } - if len(res) > 0 { - return res[0], nil - } - return nil, nil -} - -func (l *ordinaryPropertySchemaLoader) LoadAll(keys []id.PropertySchemaID) ([]*graphql.PropertySchema, []error) { - return l.fetch(keys) -} diff --git a/internal/graphql/resolver.go b/internal/graphql/resolver.go deleted file mode 100644 index 383c41969..000000000 --- a/internal/graphql/resolver.go +++ /dev/null @@ -1,35 +0,0 @@ -//go:generate go run github.com/99designs/gqlgen - -package graphql - -import ( - "errors" - - graphql "github.com/reearth/reearth-backend/internal/adapter/graphql" -) - -// THIS CODE IS A STARTING POINT ONLY. IT WILL NOT BE UPDATED WITH SCHEMA CHANGES. - -// Resolver _ -type Resolver struct { - config ResolverConfig -} - -// ErrNotImplemented _ -var ErrNotImplemented = errors.New("not impleneted yet") - -// ErrUnauthorized _ -var ErrUnauthorized = errors.New("unauthorized") - -// ResolverConfig _ -type ResolverConfig struct { - Controllers *graphql.Container - Debug bool -} - -// NewResolver _ -func NewResolver(config ResolverConfig) ResolverRoot { - return &Resolver{ - config: config, - } -} diff --git a/internal/graphql/resolver_asset.go b/internal/graphql/resolver_asset.go deleted file mode 100644 index d3084aec0..000000000 --- a/internal/graphql/resolver_asset.go +++ /dev/null @@ -1,22 +0,0 @@ -package graphql - -import ( - "context" - - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (r *Resolver) Asset() AssetResolver { - return &assetResolver{r} -} - -type assetResolver struct{ *Resolver } - -func (r *assetResolver) Team(ctx context.Context, obj *graphql1.Asset) (*graphql1.Team, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.TeamID)) -} diff --git a/internal/graphql/resolver_layer.go b/internal/graphql/resolver_layer.go deleted file mode 100644 index fd7a09616..000000000 --- a/internal/graphql/resolver_layer.go +++ /dev/null @@ -1,460 +0,0 @@ -package graphql - -import ( - "context" - - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (r *Resolver) LayerItem() LayerItemResolver { - return &layerItemResolver{r} -} - -func (r *Resolver) LayerGroup() LayerGroupResolver { - return &layerGroupResolver{r} -} - -func (r *Resolver) Infobox() InfoboxResolver { - return &infoboxResolver{r} -} - -func (r *Resolver) InfoboxField() InfoboxFieldResolver { - return &infoboxFieldResolver{r} -} - -func (r *Resolver) MergedLayer() MergedLayerResolver { - return &mergedLayerResolver{r} -} - -func (r *Resolver) MergedInfobox() MergedInfoboxResolver { - return &mergedInfoboxResolver{r} -} - -func (r *Resolver) MergedInfoboxField() MergedInfoboxFieldResolver { - return &mergedInfoboxFieldResolver{r} -} - -type infoboxResolver struct{ *Resolver } - -func (r *infoboxResolver) Property(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Property, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) -} - -func (r *infoboxResolver) Layer(ctx context.Context, obj *graphql1.Infobox) (graphql1.Layer, error) { - exit := trace(ctx) - defer exit() - - layer, err := dataloader.DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) - if err != nil || layer == nil { - return nil, err - } - return *layer, nil -} - -func (r *infoboxResolver) LinkedDataset(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Dataset, error) { - exit := trace(ctx) - defer exit() - - if obj.LinkedDatasetID == nil { - return nil, nil - } - return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) -} - -func (r *infoboxResolver) Merged(ctx context.Context, obj *graphql1.Infobox) (*graphql1.MergedInfobox, error) { - exit := trace(ctx) - defer exit() - - ml, err := r.config.Controllers.LayerController.FetchParentAndMerged(ctx, id.LayerID(obj.LayerID), getOperator(ctx)) - if err != nil || ml == nil { - return nil, err - } - return ml.Infobox, nil -} - -func (r *infoboxResolver) Scene(ctx context.Context, obj *graphql1.Infobox) (*graphql1.Scene, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) -} - -func (r *infoboxResolver) ScenePlugin(ctx context.Context, obj *graphql1.Infobox) (*graphql1.ScenePlugin, error) { - exit := trace(ctx) - defer exit() - - layer, err := dataloader.DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) - if err != nil || layer == nil { - return nil, err - } - var pluginID *id.PluginID - if lg, ok := (*layer).(*graphql1.LayerGroup); ok { - pluginID = lg.PluginID - } else if li, ok := (*layer).(*graphql1.LayerItem); ok { - pluginID = li.PluginID - } - if pluginID == nil { - return nil, nil - } - - s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) - if err != nil { - return nil, err - } - return s.Plugin(*pluginID), nil -} - -type infoboxFieldResolver struct{ *Resolver } - -func (r *infoboxFieldResolver) Layer(ctx context.Context, obj *graphql1.InfoboxField) (graphql1.Layer, error) { - exit := trace(ctx) - defer exit() - - layer, err := dataloader.DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) - if err != nil { - return nil, err - } - return *layer, nil -} - -func (r *infoboxFieldResolver) Infobox(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Infobox, error) { - exit := trace(ctx) - defer exit() - - layer, err := dataloader.DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) - if err != nil || layer == nil { - return nil, err - } - layer2 := (*layer).(*graphql1.LayerItem) - if layer2 == nil { - return nil, nil - } - return layer2.Infobox, nil -} - -func (r *infoboxFieldResolver) Property(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Property, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) -} - -func (r *infoboxFieldResolver) Plugin(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Plugin, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) -} - -func (r *infoboxFieldResolver) Extension(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.PluginExtension, error) { - exit := trace(ctx) - defer exit() - - plugin, err := dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) - if err != nil { - return nil, err - } - return plugin.Extension(obj.ExtensionID), nil -} - -func (r *infoboxFieldResolver) LinkedDataset(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Dataset, error) { - exit := trace(ctx) - defer exit() - - if obj.LinkedDatasetID == nil { - return nil, nil - } - return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) -} - -func (r *infoboxFieldResolver) Merged(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.MergedInfoboxField, error) { - exit := trace(ctx) - defer exit() - - ml, err := r.config.Controllers.LayerController.FetchParentAndMerged(ctx, id.LayerID(obj.LayerID), getOperator(ctx)) - if err != nil || ml == nil || ml.Infobox == nil { - return nil, err - } - return ml.Infobox.Field(obj.ID), nil -} - -func (r *infoboxFieldResolver) Scene(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.Scene, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) -} - -func (r *infoboxFieldResolver) ScenePlugin(ctx context.Context, obj *graphql1.InfoboxField) (*graphql1.ScenePlugin, error) { - exit := trace(ctx) - defer exit() - - s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) - if err != nil { - return nil, err - } - return s.Plugin(obj.PluginID), nil -} - -type layerGroupResolver struct{ *Resolver } - -func (r *layerGroupResolver) Parent(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.LayerGroup, error) { - exit := trace(ctx) - defer exit() - - if obj.ParentID != nil { - return dataloader.DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) - } - return r.config.Controllers.LayerController.FetchParent(ctx, id.LayerID(obj.ID), getOperator(ctx)) -} - -func (r *layerGroupResolver) Property(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.Property, error) { - exit := trace(ctx) - defer exit() - - if obj.PropertyID == nil { - return nil, nil - } - return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) -} - -func (r *layerGroupResolver) Plugin(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.Plugin, error) { - exit := trace(ctx) - defer exit() - - if obj.PluginID == nil { - return nil, nil - } - return dataloader.DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) -} - -func (r *layerGroupResolver) Extension(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.PluginExtension, error) { - exit := trace(ctx) - defer exit() - - if obj.PluginID == nil || obj.ExtensionID == nil { - return nil, nil - } - plugin, err := dataloader.DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) - if err != nil { - return nil, err - } - return plugin.Extension(*obj.ExtensionID), nil -} - -func (r *layerGroupResolver) ParentLayer(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.LayerGroup, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.LayerController.FetchParent(ctx, id.LayerID(obj.ID), getOperator(ctx)) -} - -func (r *layerGroupResolver) LinkedDatasetSchema(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.DatasetSchema, error) { - exit := trace(ctx) - defer exit() - - if obj.LinkedDatasetSchemaID == nil { - return nil, nil - } - return dataloader.DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.LinkedDatasetSchemaID)) -} - -func (r *layerGroupResolver) Layers(ctx context.Context, obj *graphql1.LayerGroup) ([]graphql1.Layer, error) { - exit := trace(ctx) - defer exit() - - layers, err := dataloader.DataLoadersFromContext(ctx).Layer.LoadAll(id.LayerIDsFromIDRef(obj.LayerIds)) - if len(err) > 0 && err[0] != nil { - return nil, err[0] - } - return graphql1.AttachParentLayer(layers, obj.ID), nil -} - -func (r *layerGroupResolver) Scene(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.Scene, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) -} - -func (r *layerGroupResolver) ScenePlugin(ctx context.Context, obj *graphql1.LayerGroup) (*graphql1.ScenePlugin, error) { - exit := trace(ctx) - defer exit() - - if obj.PluginID == nil { - return nil, nil - } - s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) - if err != nil { - return nil, err - } - return s.Plugin(*obj.PluginID), nil -} - -type layerItemResolver struct{ *Resolver } - -func (r *layerItemResolver) Parent(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.LayerGroup, error) { - exit := trace(ctx) - defer exit() - - if obj.ParentID != nil { - return dataloader.DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) - } - return r.config.Controllers.LayerController.FetchParent(ctx, id.LayerID(obj.ID), getOperator(ctx)) -} - -func (r *layerItemResolver) Property(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Property, error) { - exit := trace(ctx) - defer exit() - - if obj.PropertyID == nil { - return nil, nil - } - return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) -} - -func (r *layerItemResolver) Plugin(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Plugin, error) { - exit := trace(ctx) - defer exit() - - if obj.PluginID == nil { - return nil, nil - } - return dataloader.DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) -} - -func (r *layerItemResolver) Extension(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.PluginExtension, error) { - exit := trace(ctx) - defer exit() - - if obj.PluginID == nil || obj.ExtensionID == nil { - return nil, nil - } - plugin, err := dataloader.DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) - if err != nil { - return nil, err - } - return plugin.Extension(*obj.ExtensionID), nil -} - -func (r *layerItemResolver) LinkedDataset(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Dataset, error) { - exit := trace(ctx) - defer exit() - - if obj.LinkedDatasetID == nil { - return nil, nil - } - return dataloader.DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) -} - -func (r *layerItemResolver) Merged(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.MergedLayer, error) { - exit := trace(ctx) - defer exit() - - if obj.ParentID == nil { - return r.config.Controllers.LayerController.FetchParentAndMerged(ctx, id.LayerID(obj.ID), getOperator(ctx)) - } - return r.config.Controllers.LayerController.FetchMerged(ctx, id.LayerID(obj.ID), id.LayerIDFromRefID(obj.ParentID), getOperator(ctx)) -} - -func (r *layerItemResolver) Scene(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.Scene, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) -} - -func (r *layerItemResolver) ScenePlugin(ctx context.Context, obj *graphql1.LayerItem) (*graphql1.ScenePlugin, error) { - exit := trace(ctx) - defer exit() - - if obj.PluginID == nil { - return nil, nil - } - s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) - if err != nil { - return nil, err - } - return s.Plugin(*obj.PluginID), nil -} - -type mergedLayerResolver struct{ *Resolver } - -func (r *mergedLayerResolver) Original(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.LayerItem, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).LayerItem.Load(id.LayerID(obj.OriginalID)) -} - -func (r *mergedLayerResolver) Parent(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.LayerGroup, error) { - exit := trace(ctx) - defer exit() - - if obj.ParentID == nil { - return nil, nil - } - return dataloader.DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) -} - -func (r *mergedLayerResolver) Scene(ctx context.Context, obj *graphql1.MergedLayer) (*graphql1.Scene, error) { - exit := trace(ctx) - defer exit() - - if obj.ParentID == nil { - return nil, nil - } - return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) -} - -type mergedInfoboxResolver struct{ *Resolver } - -func (r *mergedInfoboxResolver) Scene(ctx context.Context, obj *graphql1.MergedInfobox) (*graphql1.Scene, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) -} - -type mergedInfoboxFieldResolver struct{ *Resolver } - -func (r *mergedInfoboxFieldResolver) Plugin(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.Plugin, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) -} - -func (r *mergedInfoboxFieldResolver) Extension(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.PluginExtension, error) { - exit := trace(ctx) - defer exit() - - plugin, err := dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) - if err != nil { - return nil, err - } - return plugin.Extension(obj.ExtensionID), nil -} - -func (r *mergedInfoboxFieldResolver) Scene(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.Scene, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) -} - -func (r *mergedInfoboxFieldResolver) ScenePlugin(ctx context.Context, obj *graphql1.MergedInfoboxField) (*graphql1.ScenePlugin, error) { - exit := trace(ctx) - defer exit() - - s, err := dataloader.DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) - if err != nil { - return nil, err - } - return s.Plugin(obj.PluginID), nil -} diff --git a/internal/graphql/resolver_mutation.go b/internal/graphql/resolver_mutation.go deleted file mode 100644 index 788969ca2..000000000 --- a/internal/graphql/resolver_mutation.go +++ /dev/null @@ -1,434 +0,0 @@ -package graphql - -import ( - "context" - - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" -) - -func (r *Resolver) Mutation() MutationResolver { - return &mutationResolver{r} -} - -type mutationResolver struct{ *Resolver } - -func (r *mutationResolver) CreateAsset(ctx context.Context, input graphql1.CreateAssetInput) (*graphql1.CreateAssetPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.AssetController.Create(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) RemoveAsset(ctx context.Context, input graphql1.RemoveAssetInput) (*graphql1.RemoveAssetPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.AssetController.Remove(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) UpdateDatasetSchema(ctx context.Context, input graphql1.UpdateDatasetSchemaInput) (*graphql1.UpdateDatasetSchemaPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.DatasetController.UpdateDatasetSchema(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) AddDynamicDatasetSchema(ctx context.Context, input graphql1.AddDynamicDatasetSchemaInput) (*graphql1.AddDynamicDatasetSchemaPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.DatasetController.AddDynamicDatasetSchema(ctx, &input) -} - -func (r *mutationResolver) AddDynamicDataset(ctx context.Context, input graphql1.AddDynamicDatasetInput) (*graphql1.AddDynamicDatasetPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.DatasetController.AddDynamicDataset(ctx, &input) -} - -func (r *mutationResolver) Signup(ctx context.Context, input graphql1.SignupInput) (*graphql1.SignupPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.UserController.Signup(ctx, &input, getSub(ctx)) -} - -func (r *mutationResolver) UpdateMe(ctx context.Context, input graphql1.UpdateMeInput) (*graphql1.UpdateMePayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.UserController.UpdateMe(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) RemoveMyAuth(ctx context.Context, input graphql1.RemoveMyAuthInput) (*graphql1.UpdateMePayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.UserController.RemoveMyAuth(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) DeleteMe(ctx context.Context, input graphql1.DeleteMeInput) (*graphql1.DeleteMePayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.UserController.DeleteMe(ctx, input.UserID, getOperator(ctx)) -} - -func (r *mutationResolver) CreateTeam(ctx context.Context, input graphql1.CreateTeamInput) (*graphql1.CreateTeamPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.TeamController.Create(ctx, &input, getUser(ctx)) -} - -func (r *mutationResolver) DeleteTeam(ctx context.Context, input graphql1.DeleteTeamInput) (*graphql1.DeleteTeamPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.TeamController.Remove(ctx, input.TeamID, getOperator(ctx)) -} - -func (r *mutationResolver) UpdateTeam(ctx context.Context, input graphql1.UpdateTeamInput) (*graphql1.UpdateTeamPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.TeamController.Update(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) AddMemberToTeam(ctx context.Context, input graphql1.AddMemberToTeamInput) (*graphql1.AddMemberToTeamPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.TeamController.AddMember(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) RemoveMemberFromTeam(ctx context.Context, input graphql1.RemoveMemberFromTeamInput) (*graphql1.RemoveMemberFromTeamPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.TeamController.RemoveMember(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) UpdateMemberOfTeam(ctx context.Context, input graphql1.UpdateMemberOfTeamInput) (*graphql1.UpdateMemberOfTeamPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.TeamController.UpdateMember(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) CreateProject(ctx context.Context, input graphql1.CreateProjectInput) (*graphql1.ProjectPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.ProjectController.Create(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) UpdateProject(ctx context.Context, input graphql1.UpdateProjectInput) (*graphql1.ProjectPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.ProjectController.Update(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) PublishProject(ctx context.Context, input graphql1.PublishProjectInput) (*graphql1.ProjectPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.ProjectController.Publish(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) DeleteProject(ctx context.Context, input graphql1.DeleteProjectInput) (*graphql1.DeleteProjectPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.ProjectController.Delete(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) UploadPlugin(ctx context.Context, input graphql1.UploadPluginInput) (*graphql1.UploadPluginPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PluginController.Upload(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) CreateScene(ctx context.Context, input graphql1.CreateSceneInput) (*graphql1.CreateScenePayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.SceneController.Create(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) AddWidget(ctx context.Context, input graphql1.AddWidgetInput) (*graphql1.AddWidgetPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.SceneController.AddWidget(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) UpdateWidget(ctx context.Context, input graphql1.UpdateWidgetInput) (*graphql1.UpdateWidgetPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.SceneController.UpdateWidget(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) RemoveWidget(ctx context.Context, input graphql1.RemoveWidgetInput) (*graphql1.RemoveWidgetPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.SceneController.RemoveWidget(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) InstallPlugin(ctx context.Context, input graphql1.InstallPluginInput) (*graphql1.InstallPluginPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.SceneController.InstallPlugin(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) UninstallPlugin(ctx context.Context, input graphql1.UninstallPluginInput) (*graphql1.UninstallPluginPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.SceneController.UninstallPlugin(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) UpgradePlugin(ctx context.Context, input graphql1.UpgradePluginInput) (*graphql1.UpgradePluginPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.SceneController.UpgradePlugin(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) SyncDataset(ctx context.Context, input graphql1.SyncDatasetInput) (*graphql1.SyncDatasetPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.DatasetController.Sync(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) UpdatePropertyValue(ctx context.Context, input graphql1.UpdatePropertyValueInput) (*graphql1.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.UpdateValue(ctx, - input.PropertyID, input.SchemaItemID, input.ItemID, input.FieldID, input.Value, input.Type, getOperator(ctx)) -} - -func (r *mutationResolver) UpdatePropertyValueLatLng(ctx context.Context, input graphql1.UpdatePropertyValueLatLngInput) (*graphql1.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.UpdateValue(ctx, - input.PropertyID, input.SchemaItemID, input.ItemID, input.FieldID, graphql1.LatLng{ - Lat: input.Lat, - Lng: input.Lng, - }, graphql1.ValueTypeLatlng, getOperator(ctx)) -} - -func (r *mutationResolver) UpdatePropertyValueLatLngHeight(ctx context.Context, input graphql1.UpdatePropertyValueLatLngHeightInput) (*graphql1.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.UpdateValue(ctx, - input.PropertyID, input.SchemaItemID, input.ItemID, input.FieldID, graphql1.LatLngHeight{ - Lat: input.Lat, - Lng: input.Lng, - Height: input.Height, - }, graphql1.ValueTypeLatlngheight, getOperator(ctx)) -} - -func (r *mutationResolver) UpdatePropertyValueCamera(ctx context.Context, input graphql1.UpdatePropertyValueCameraInput) (*graphql1.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.UpdateValue(ctx, - input.PropertyID, input.SchemaItemID, input.ItemID, input.FieldID, graphql1.Camera{ - Lat: input.Lat, - Lng: input.Lng, - Altitude: input.Altitude, - Heading: input.Heading, - Pitch: input.Pitch, - Roll: input.Roll, - Fov: input.Fov, - }, graphql1.ValueTypeLatlngheight, getOperator(ctx)) -} - -func (r *mutationResolver) UpdatePropertyValueTypography(ctx context.Context, input graphql1.UpdatePropertyValueTypographyInput) (*graphql1.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.UpdateValue(ctx, - input.PropertyID, input.SchemaItemID, input.ItemID, input.FieldID, graphql1.Typography{ - FontFamily: input.FontFamily, - FontSize: input.FontSize, - FontWeight: input.FontWeight, - Color: input.Color, - TextAlign: input.TextAlign, - Bold: input.Bold, - Italic: input.Italic, - Underline: input.Underline, - }, graphql1.ValueTypeLatlngheight, getOperator(ctx)) -} - -func (r *mutationResolver) RemovePropertyField(ctx context.Context, input graphql1.RemovePropertyFieldInput) (*graphql1.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.RemoveField(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) UploadFileToProperty(ctx context.Context, input graphql1.UploadFileToPropertyInput) (*graphql1.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.UploadFile(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) LinkDatasetToPropertyValue(ctx context.Context, input graphql1.LinkDatasetToPropertyValueInput) (*graphql1.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.LinkValue(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) UnlinkPropertyValue(ctx context.Context, input graphql1.UnlinkPropertyValueInput) (*graphql1.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.UnlinkValue(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) AddPropertyItem(ctx context.Context, input graphql1.AddPropertyItemInput) (*graphql1.PropertyItemPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.AddItem(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) MovePropertyItem(ctx context.Context, input graphql1.MovePropertyItemInput) (*graphql1.PropertyItemPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.MoveItem(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) RemovePropertyItem(ctx context.Context, input graphql1.RemovePropertyItemInput) (*graphql1.PropertyItemPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.RemoveItem(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) UpdatePropertyItems(ctx context.Context, input graphql1.UpdatePropertyItemInput) (*graphql1.PropertyItemPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.PropertyController.UpdateItems(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) AddLayerItem(ctx context.Context, input graphql1.AddLayerItemInput) (*graphql1.AddLayerItemPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.LayerController.AddItem(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) AddLayerGroup(ctx context.Context, input graphql1.AddLayerGroupInput) (*graphql1.AddLayerGroupPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.LayerController.AddGroup(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) RemoveLayer(ctx context.Context, input graphql1.RemoveLayerInput) (*graphql1.RemoveLayerPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.LayerController.Remove(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) UpdateLayer(ctx context.Context, input graphql1.UpdateLayerInput) (*graphql1.UpdateLayerPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.LayerController.Update(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) MoveLayer(ctx context.Context, input graphql1.MoveLayerInput) (*graphql1.MoveLayerPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.LayerController.Move(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) CreateInfobox(ctx context.Context, input graphql1.CreateInfoboxInput) (*graphql1.CreateInfoboxPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.LayerController.CreateInfobox(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) RemoveInfobox(ctx context.Context, input graphql1.RemoveInfoboxInput) (*graphql1.RemoveInfoboxPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.LayerController.RemoveInfobox(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) AddInfoboxField(ctx context.Context, input graphql1.AddInfoboxFieldInput) (*graphql1.AddInfoboxFieldPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.LayerController.AddInfoboxField(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) MoveInfoboxField(ctx context.Context, input graphql1.MoveInfoboxFieldInput) (*graphql1.MoveInfoboxFieldPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.LayerController.MoveInfoboxField(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) RemoveInfoboxField(ctx context.Context, input graphql1.RemoveInfoboxFieldInput) (*graphql1.RemoveInfoboxFieldPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.LayerController.RemoveInfoboxField(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) RemoveDatasetSchema(ctx context.Context, input graphql1.RemoveDatasetSchemaInput) (*graphql1.RemoveDatasetSchemaPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.DatasetController.RemoveDatasetSchema(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) AddDatasetSchema(ctx context.Context, input graphql1.AddDatasetSchemaInput) (*graphql1.AddDatasetSchemaPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.DatasetController.AddDatasetSchema(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) ImportLayer(ctx context.Context, input graphql1.ImportLayerInput) (*graphql1.ImportLayerPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.LayerController.ImportLayer(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) ImportDataset(ctx context.Context, input graphql1.ImportDatasetInput) (*graphql1.ImportDatasetPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.DatasetController.ImportDataset(ctx, &input, getOperator(ctx)) -} - -func (r *mutationResolver) ImportDatasetFromGoogleSheet(ctx context.Context, input graphql1.ImportDatasetFromGoogleSheetInput) (*graphql1.ImportDatasetPayload, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.DatasetController.ImportDatasetFromGoogleSheet(ctx, &input, getOperator(ctx)) -} diff --git a/internal/graphql/resolver_project.go b/internal/graphql/resolver_project.go deleted file mode 100644 index 8a343aada..000000000 --- a/internal/graphql/resolver_project.go +++ /dev/null @@ -1,34 +0,0 @@ -package graphql - -import ( - "context" - - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/rerror" -) - -func (r *Resolver) Project() ProjectResolver { - return &projectResolver{r} -} - -type projectResolver struct{ *Resolver } - -func (r *projectResolver) Team(ctx context.Context, obj *graphql1.Project) (*graphql1.Team, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.TeamID)) -} - -func (r *projectResolver) Scene(ctx context.Context, obj *graphql1.Project) (*graphql1.Scene, error) { - exit := trace(ctx) - defer exit() - - s, err := r.config.Controllers.SceneController.FindByProject(ctx, id.ProjectID(obj.ID), getOperator(ctx)) - if err != nil && err != rerror.ErrNotFound { - return nil, err - } - return s, nil -} diff --git a/internal/graphql/resolver_scene.go b/internal/graphql/resolver_scene.go deleted file mode 100644 index daabef883..000000000 --- a/internal/graphql/resolver_scene.go +++ /dev/null @@ -1,131 +0,0 @@ -package graphql - -import ( - "context" - - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (r *Resolver) Scene() SceneResolver { - return &sceneResolver{r} -} - -func (r *Resolver) ScenePlugin() ScenePluginResolver { - return &scenePluginResolver{r} -} - -func (r *Resolver) SceneWidget() SceneWidgetResolver { - return &sceneWidgetResolver{r} -} - -type sceneResolver struct{ *Resolver } - -func (r *sceneResolver) Project(ctx context.Context, obj *graphql1.Scene) (*graphql1.Project, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Project.Load(id.ProjectID(obj.ProjectID)) -} - -func (r *sceneResolver) Team(ctx context.Context, obj *graphql1.Scene) (*graphql1.Team, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.TeamID)) -} - -func (r *sceneResolver) Property(ctx context.Context, obj *graphql1.Scene) (*graphql1.Property, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) -} - -func (r *sceneResolver) RootLayer(ctx context.Context, obj *graphql1.Scene) (*graphql1.LayerGroup, error) { - exit := trace(ctx) - defer exit() - - layer, err := dataloader.DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.RootLayerID)) - if err != nil { - return nil, err - } - if layer == nil { - return nil, nil - } - layerGroup, ok := (*layer).(*graphql1.LayerGroup) - if !ok { - return nil, nil - } - return layerGroup, nil -} - -func (r *sceneResolver) DatasetSchemas(ctx context.Context, obj *graphql1.Scene, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.DatasetSchemaConnection, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.DatasetController.FindSchemaByScene(ctx, obj.ID, first, last, before, after, getOperator(ctx)) -} - -func (r *sceneResolver) LockMode(ctx context.Context, obj *graphql1.Scene) (graphql1.SceneLockMode, error) { - exit := trace(ctx) - defer exit() - - sl, err := r.config.Controllers.SceneController.FetchLock(ctx, id.SceneID(obj.ID), getOperator(ctx)) - if err != nil { - return graphql1.SceneLockModeFree, err - } - return *sl, nil -} - -type scenePluginResolver struct{ *Resolver } - -func (r *scenePluginResolver) Plugin(ctx context.Context, obj *graphql1.ScenePlugin) (*graphql1.Plugin, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) -} -func (r *scenePluginResolver) Property(ctx context.Context, obj *graphql1.ScenePlugin) (*graphql1.Property, error) { - exit := trace(ctx) - defer exit() - - if obj.PropertyID == nil { - return nil, nil - } - return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) -} - -type sceneWidgetResolver struct{ *Resolver } - -func (r *sceneWidgetResolver) Plugin(ctx context.Context, obj *graphql1.SceneWidget) (*graphql1.Plugin, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) -} - -func (r *sceneWidgetResolver) Extension(ctx context.Context, obj *graphql1.SceneWidget) (*graphql1.PluginExtension, error) { - exit := trace(ctx) - defer exit() - - plugin, err := dataloader.DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) - if err != nil { - return nil, err - } - for _, e := range plugin.Extensions { - if e.ExtensionID == obj.ExtensionID { - return e, nil - } - } - return nil, nil -} - -func (r *sceneWidgetResolver) Property(ctx context.Context, obj *graphql1.SceneWidget) (*graphql1.Property, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) -} diff --git a/internal/graphql/resolver_team.go b/internal/graphql/resolver_team.go deleted file mode 100644 index e53d0e476..000000000 --- a/internal/graphql/resolver_team.go +++ /dev/null @@ -1,43 +0,0 @@ -package graphql - -import ( - "context" - - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (r *Resolver) Team() TeamResolver { - return &teamResolver{r} -} - -func (r *Resolver) TeamMember() TeamMemberResolver { - return &teamMemberResolver{r} -} - -type teamResolver struct{ *Resolver } - -func (r *teamResolver) Assets(ctx context.Context, obj *graphql1.Team, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.AssetConnection, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.AssetController.FindByTeam(ctx, obj.ID, first, last, before, after, getOperator(ctx)) -} - -func (r *teamResolver) Projects(ctx context.Context, obj *graphql1.Team, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*graphql1.ProjectConnection, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.ProjectController.FindByTeam(ctx, id.TeamID(obj.ID), first, last, before, after, getOperator(ctx)) -} - -type teamMemberResolver struct{ *Resolver } - -func (r *teamMemberResolver) User(ctx context.Context, obj *graphql1.TeamMember) (*graphql1.User, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).User.Load(id.UserID(obj.UserID)) -} diff --git a/internal/graphql/resolver_user.go b/internal/graphql/resolver_user.go deleted file mode 100644 index 9be4aaa5c..000000000 --- a/internal/graphql/resolver_user.go +++ /dev/null @@ -1,29 +0,0 @@ -package graphql - -import ( - "context" - - graphql1 "github.com/reearth/reearth-backend/internal/adapter/graphql" - "github.com/reearth/reearth-backend/internal/graphql/dataloader" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (r *Resolver) User() UserResolver { - return &userResolver{r} -} - -type userResolver struct{ *Resolver } - -func (r *userResolver) MyTeam(ctx context.Context, obj *graphql1.User) (*graphql1.Team, error) { - exit := trace(ctx) - defer exit() - - return dataloader.DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.MyTeamID)) -} - -func (r *userResolver) Teams(ctx context.Context, obj *graphql1.User) ([]*graphql1.Team, error) { - exit := trace(ctx) - defer exit() - - return r.config.Controllers.TeamController.FindByUser(ctx, id.UserID(obj.ID), getOperator(ctx)) -} diff --git a/internal/infrastructure/memory/asset.go b/internal/infrastructure/memory/asset.go index 0c47531e8..9bde832cd 100644 --- a/internal/infrastructure/memory/asset.go +++ b/internal/infrastructure/memory/asset.go @@ -22,7 +22,7 @@ func NewAsset() repo.Asset { } } -func (r *Asset) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, error) { +func (r *Asset) FindByID(ctx context.Context, id id.AssetID, teams []id.TeamID) (*asset.Asset, error) { r.lock.Lock() defer r.lock.Unlock() @@ -33,6 +33,23 @@ func (r *Asset) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, erro return &asset.Asset{}, rerror.ErrNotFound } +func (r *Asset) FindByIDs(ctx context.Context, ids []id.AssetID, teams []id.TeamID) ([]*asset.Asset, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := []*asset.Asset{} + for _, id := range ids { + if d, ok := r.data[id]; ok { + if isTeamIncludes(d.Team(), teams) { + result = append(result, d) + continue + } + } + result = append(result, nil) + } + return result, nil +} + func (r *Asset) Save(ctx context.Context, a *asset.Asset) error { r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/mongo/asset.go b/internal/infrastructure/mongo/asset.go index 6cb76d0c6..2f5e93fa2 100644 --- a/internal/infrastructure/mongo/asset.go +++ b/internal/infrastructure/mongo/asset.go @@ -24,6 +24,41 @@ func NewAsset(client *mongodoc.Client) repo.Asset { return r } +func (r *assetRepo) FindByID(ctx context.Context, id id.AssetID, teams []id.TeamID) (*asset.Asset, error) { + filter := assetFilter(bson.M{ + "id": id.String(), + }, teams) + return r.findOne(ctx, filter) +} + +func (r *assetRepo) FindByIDs(ctx context.Context, ids []id.AssetID, teams []id.TeamID) ([]*asset.Asset, error) { + filter := assetFilter(bson.M{ + "id": bson.M{"$in": id.AssetIDToKeys(ids)}, + }, teams) + dst := make([]*asset.Asset, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterAssets(ids, res), nil +} + +func (r *assetRepo) Save(ctx context.Context, asset *asset.Asset) error { + doc, id := mongodoc.NewAsset(asset) + return r.client.SaveOne(ctx, id, doc) +} + +func (r *assetRepo) Remove(ctx context.Context, id id.AssetID) error { + return r.client.RemoveOne(ctx, id.String()) +} + +func (r *assetRepo) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { + filter := bson.D{ + {Key: "team", Value: id.String()}, + } + return r.paginate(ctx, filter, pagination) +} + func (r *assetRepo) init() { i := r.client.CreateIndex(context.Background(), nil) if len(i) > 0 { @@ -40,7 +75,17 @@ func (r *assetRepo) paginate(ctx context.Context, filter bson.D, pagination *use return c.Rows, pageInfo, nil } -func (r *assetRepo) findOne(ctx context.Context, filter bson.D) (*asset.Asset, error) { +func (r *assetRepo) find(ctx context.Context, dst []*asset.Asset, filter interface{}) ([]*asset.Asset, error) { + c := mongodoc.AssetConsumer{ + Rows: dst, + } + if err2 := r.client.Find(ctx, filter, &c); err2 != nil { + return nil, rerror.ErrInternalBy(err2) + } + return c.Rows, nil +} + +func (r *assetRepo) findOne(ctx context.Context, filter interface{}) (*asset.Asset, error) { dst := make([]*asset.Asset, 0, 1) c := mongodoc.AssetConsumer{ Rows: dst, @@ -51,25 +96,22 @@ func (r *assetRepo) findOne(ctx context.Context, filter bson.D) (*asset.Asset, e return c.Rows[0], nil } -func (r *assetRepo) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, error) { - filter := bson.D{ - {Key: "id", Value: id.String()}, +func filterAssets(ids []id.AssetID, rows []*asset.Asset) []*asset.Asset { + res := make([]*asset.Asset, 0, len(ids)) + for _, id := range ids { + var r2 *asset.Asset + for _, r := range rows { + if r.ID() == id { + r2 = r + break + } + } + res = append(res, r2) } - return r.findOne(ctx, filter) -} - -func (r *assetRepo) Save(ctx context.Context, asset *asset.Asset) error { - doc, id := mongodoc.NewAsset(asset) - return r.client.SaveOne(ctx, id, doc) -} - -func (r *assetRepo) Remove(ctx context.Context, id id.AssetID) error { - return r.client.RemoveOne(ctx, id.String()) + return res } -func (r *assetRepo) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { - filter := bson.D{ - {Key: "team", Value: id.String()}, - } - return r.paginate(ctx, filter, pagination) +func assetFilter(filter bson.M, teams []id.TeamID) bson.M { + filter["team"] = bson.M{"$in": id.TeamIDToKeys(teams)} + return filter } diff --git a/internal/usecase/interactor/asset.go b/internal/usecase/interactor/asset.go index 746dc9879..9c5e9e729 100644 --- a/internal/usecase/interactor/asset.go +++ b/internal/usecase/interactor/asset.go @@ -30,6 +30,18 @@ func NewAsset(r *repo.Container, gr *gateway.Container) interfaces.Asset { } } +func (i *Asset) Fetch(ctx context.Context, assets []id.AssetID, operator *usecase.Operator) ([]*asset.Asset, error) { + return i.assetRepo.FindByIDs(ctx, assets, operator.ReadableTeams) +} + +func (i *Asset) FindByTeam(ctx context.Context, tid id.TeamID, p *usecase.Pagination, operator *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) { + if err := i.CanReadTeam(tid, operator); err != nil { + return nil, nil, err + } + + return i.assetRepo.FindByTeam(ctx, tid, p) +} + func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, operator *usecase.Operator) (result *asset.Asset, err error) { if err := i.CanWriteTeam(inp.TeamID, operator); err != nil { return nil, err @@ -74,15 +86,11 @@ func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, ope } func (i *Asset) Remove(ctx context.Context, aid id.AssetID, operator *usecase.Operator) (result id.AssetID, err error) { - asset, err := i.assetRepo.FindByID(ctx, aid) + asset, err := i.assetRepo.FindByID(ctx, aid, operator.WritableTeams) if err != nil { return aid, err } - if err := i.CanWriteTeam(asset.Team(), operator); err != nil { - return aid, err - } - tx, err := i.transaction.Begin() if err != nil { return @@ -115,11 +123,3 @@ func (i *Asset) Remove(ctx context.Context, aid id.AssetID, operator *usecase.Op tx.Commit() return aid, nil } - -func (i *Asset) FindByTeam(ctx context.Context, tid id.TeamID, p *usecase.Pagination, operator *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) { - if err := i.CanReadTeam(tid, operator); err != nil { - return nil, nil, err - } - - return i.assetRepo.FindByTeam(ctx, tid, p) -} diff --git a/internal/usecase/interactor/common.go b/internal/usecase/interactor/common.go index 849c8bbbf..a0b5f0192 100644 --- a/internal/usecase/interactor/common.go +++ b/internal/usecase/interactor/common.go @@ -15,6 +15,24 @@ import ( "github.com/reearth/reearth-backend/pkg/scene" ) +type ContainerConfig struct { + SignupSecret string +} + +func NewContainer(r *repo.Container, g *gateway.Container, config ContainerConfig) interfaces.Container { + return interfaces.Container{ + Asset: NewAsset(r, g), + Dataset: NewDataset(r, g), + Layer: NewLayer(r), + Plugin: NewPlugin(r, g), + Project: NewProject(r, g), + Property: NewProperty(r, g), + Scene: NewScene(r, g), + Team: NewTeam(r), + User: NewUser(r, g, config.SignupSecret), + } +} + type common struct{} func (common) OnlyOperator(op *usecase.Operator) error { diff --git a/internal/usecase/interfaces/asset.go b/internal/usecase/interfaces/asset.go index d4a078939..57a43e22f 100644 --- a/internal/usecase/interfaces/asset.go +++ b/internal/usecase/interfaces/asset.go @@ -20,7 +20,8 @@ var ( ) type Asset interface { + Fetch(context.Context, []id.AssetID, *usecase.Operator) ([]*asset.Asset, error) + FindByTeam(context.Context, id.TeamID, *usecase.Pagination, *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) Create(context.Context, CreateAssetParam, *usecase.Operator) (*asset.Asset, error) Remove(context.Context, id.AssetID, *usecase.Operator) (id.AssetID, error) - FindByTeam(context.Context, id.TeamID, *usecase.Pagination, *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) } diff --git a/internal/usecase/interfaces/common.go b/internal/usecase/interfaces/common.go index 6069a60f8..8c90ad1b6 100644 --- a/internal/usecase/interfaces/common.go +++ b/internal/usecase/interfaces/common.go @@ -15,3 +15,16 @@ var ( ErrOperationDenied error = errors.New("operation denied") ErrFileNotIncluded error = errors.New("file not included") ) + +type Container struct { + Asset Asset + Dataset Dataset + Layer Layer + Plugin Plugin + Project Project + Property Property + Published Published + Scene Scene + Team Team + User User +} diff --git a/internal/usecase/repo/asset.go b/internal/usecase/repo/asset.go index 37f22216b..7648fd5c7 100644 --- a/internal/usecase/repo/asset.go +++ b/internal/usecase/repo/asset.go @@ -12,5 +12,6 @@ type Asset interface { Save(context.Context, *asset.Asset) error Remove(context.Context, id.AssetID) error FindByTeam(context.Context, id.TeamID, *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) - FindByID(context.Context, id.AssetID) (*asset.Asset, error) + FindByID(context.Context, id.AssetID, []id.TeamID) (*asset.Asset, error) + FindByIDs(context.Context, []id.AssetID, []id.TeamID) ([]*asset.Asset, error) } diff --git a/schema.graphql b/schema.graphql index c3660a6ec..02d9b34cc 100644 --- a/schema.graphql +++ b/schema.graphql @@ -868,54 +868,6 @@ input UpdatePropertyValueInput { type: ValueType! } -input UpdatePropertyValueLatLngInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - lat: Float! - lng: Float! -} - -input UpdatePropertyValueLatLngHeightInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - lat: Float! - lng: Float! - height: Float! -} - -input UpdatePropertyValueCameraInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - lat: Float! - lng: Float! - altitude: Float! - heading: Float! - pitch: Float! - roll: Float! - fov: Float! -} - -input UpdatePropertyValueTypographyInput { - propertyId: ID! - schemaItemId: PropertySchemaFieldID - itemId: ID - fieldId: PropertySchemaFieldID! - fontFamily: String - fontWeight: String - fontSize: Int - color: String - textAlign: TextAlign - bold: Boolean - italic: Boolean - underline: Boolean -} - input RemovePropertyFieldInput { propertyId: ID! schemaItemId: PropertySchemaFieldID @@ -1291,6 +1243,7 @@ type AddDatasetSchemaPayload { # Connection enum NodeType { + ASSET USER TEAM PROJECT @@ -1429,9 +1382,6 @@ type Mutation { publishProject(input: PublishProjectInput!): ProjectPayload deleteProject(input: DeleteProjectInput!): DeleteProjectPayload - # Plugin - uploadPlugin(input: UploadPluginInput!): UploadPluginPayload - # Scene createScene(input: CreateSceneInput!): CreateScenePayload addWidget(input: AddWidgetInput!): AddWidgetPayload @@ -1439,6 +1389,7 @@ type Mutation { removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload installPlugin(input: InstallPluginInput!): InstallPluginPayload uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload + uploadPlugin(input: UploadPluginInput!): UploadPluginPayload upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload # Dataset @@ -1459,18 +1410,6 @@ type Mutation { # Property updatePropertyValue(input: UpdatePropertyValueInput!): PropertyFieldPayload - updatePropertyValueLatLng( - input: UpdatePropertyValueLatLngInput! - ): PropertyFieldPayload - updatePropertyValueLatLngHeight( - input: UpdatePropertyValueLatLngHeightInput! - ): PropertyFieldPayload - updatePropertyValueCamera( - input: UpdatePropertyValueCameraInput! - ): PropertyFieldPayload - updatePropertyValueTypography( - input: UpdatePropertyValueTypographyInput! - ): PropertyFieldPayload removePropertyField(input: RemovePropertyFieldInput!): PropertyFieldPayload uploadFileToProperty(input: UploadFileToPropertyInput!): PropertyFieldPayload linkDatasetToPropertyValue( From 02b17f11df510cc166aff0e6725c861952e9ebaf Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Tue, 31 Aug 2021 14:38:28 +0300 Subject: [PATCH 075/253] fix: layer importing bug (#41) * fix property parsing when styleitem is nil * return "unsupported type for unsupported geojson types" * feat: support multipolygon * refactor: enhance multipolygon decoding * test: test code for multipolygon --- pkg/layer/decoding/common.go | 650 +++++++++++++++-------------- pkg/layer/decoding/geojson.go | 26 +- pkg/layer/decoding/geojson_test.go | 117 +++++- 3 files changed, 456 insertions(+), 337 deletions(-) diff --git a/pkg/layer/decoding/common.go b/pkg/layer/decoding/common.go index 67a0ebd53..c7a6df037 100644 --- a/pkg/layer/decoding/common.go +++ b/pkg/layer/decoding/common.go @@ -127,88 +127,89 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter } f.UpdateUnsafe(v2) } - - switch extension { - case "kml": - s, ok := styleItem.(kml.Style) - if !ok && styleItem != nil { - return nil, ErrFieldType - } - if s.IconStyle.Icon != nil && len(s.IconStyle.Icon.Href) > 0 { - imageValue, ok := property.ValueTypeURL.ValueFrom(s.IconStyle.Icon.Href) - if !ok { + if styleItem != nil { + switch extension { + case "kml": + s, ok := styleItem.(kml.Style) + if !ok && styleItem != nil { return nil, ErrFieldType } - imageField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "image"), - ) - imageField.UpdateUnsafe(imageValue) - } - if s.IconStyle.Scale != 0 { - scaleValue, ok := property.ValueTypeNumber.ValueFrom(s.IconStyle.Scale) - if !ok { - return nil, ErrFieldType + if s.IconStyle.Icon != nil && len(s.IconStyle.Icon.Href) > 0 { + imageValue, ok := property.ValueTypeURL.ValueFrom(s.IconStyle.Icon.Href) + if !ok { + return nil, ErrFieldType + } + imageField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "image"), + ) + imageField.UpdateUnsafe(imageValue) } - scaleField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "imageSize"), - ) - scaleField.UpdateUnsafe(scaleValue) - } - if len(s.IconStyle.Color) > 0 { - colorValue, ok := property.ValueTypeString.ValueFrom(s.IconStyle.Color) - if !ok { - return nil, ErrFieldType + if s.IconStyle.Scale != 0 { + scaleValue, ok := property.ValueTypeNumber.ValueFrom(s.IconStyle.Scale) + if !ok { + return nil, ErrFieldType + } + scaleField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "imageSize"), + ) + scaleField.UpdateUnsafe(scaleValue) } - colorField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "pointColor"), - ) - colorField.UpdateUnsafe(colorValue) - } - case "geojson": - s, ok := styleItem.(string) - if !ok { - return nil, ErrFieldType - } - if len(s) > 0 { - colorValue, ok := property.ValueTypeString.ValueFrom(s) - if !ok { - return nil, ErrFieldType + if len(s.IconStyle.Color) > 0 { + colorValue, ok := property.ValueTypeString.ValueFrom(s.IconStyle.Color) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "pointColor"), + ) + colorField.UpdateUnsafe(colorValue) } - colorField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "pointColor"), - ) - colorField.UpdateUnsafe(colorValue) - } - case "czml": - s, ok := styleItem.(*czml.Point) - if !ok { - return nil, ErrFieldType - } - if len(s.Color) > 0 { - colorValue, ok := property.ValueTypeString.ValueFrom(s.Color) + case "geojson": + s, ok := styleItem.(string) if !ok { return nil, ErrFieldType } - colorField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "pointColor"), - ) - colorField.UpdateUnsafe(colorValue) - } - if s.PixelSize != 0 { - sizeValue, ok := property.ValueTypeNumber.ValueFrom(s.PixelSize) + if len(s) > 0 { + colorValue, ok := property.ValueTypeString.ValueFrom(s) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "pointColor"), + ) + colorField.UpdateUnsafe(colorValue) + } + case "czml": + s, ok := styleItem.(*czml.Point) if !ok { return nil, ErrFieldType } - sizeField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "pointSize"), - ) - sizeField.UpdateUnsafe(sizeValue) + if len(s.Color) > 0 { + colorValue, ok := property.ValueTypeString.ValueFrom(s.Color) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "pointColor"), + ) + colorField.UpdateUnsafe(colorValue) + } + if s.PixelSize != 0 { + sizeValue, ok := property.ValueTypeNumber.ValueFrom(s.PixelSize) + if !ok { + return nil, ErrFieldType + } + sizeField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "pointSize"), + ) + sizeField.UpdateUnsafe(sizeValue) + } } } case "Polygon": @@ -217,312 +218,313 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter return nil, ErrFieldType } f.UpdateUnsafe(v2) - - switch extension { - case "kml": - s, ok := styleItem.(kml.Style) - if !ok && styleItem != nil { - return nil, ErrFieldType - } - if s.PolyStyle.Stroke { - stroke, ok := property.ValueTypeBool.ValueFrom(s.PolyStyle.Stroke) - if !ok { + if styleItem != nil { + switch extension { + case "kml": + s, ok := styleItem.(kml.Style) + if !ok && styleItem != nil { return nil, ErrFieldType } - strokeField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "stroke"), - ) - strokeField.UpdateUnsafe(stroke) - } - if s.LineStyle.Width != 0 { - width, ok := property.ValueTypeNumber.ValueFrom(s.LineStyle.Width) - if !ok { - return nil, ErrFieldType + if s.PolyStyle.Stroke { + stroke, ok := property.ValueTypeBool.ValueFrom(s.PolyStyle.Stroke) + if !ok { + return nil, ErrFieldType + } + strokeField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "stroke"), + ) + strokeField.UpdateUnsafe(stroke) } - widthField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "strokeWidth"), - ) - widthField.UpdateUnsafe(width) - } - if len(s.LineStyle.Color) > 0 { - color, ok := property.ValueTypeString.ValueFrom(s.LineStyle.Color) - if !ok { - return nil, ErrFieldType + if s.LineStyle.Width != 0 { + width, ok := property.ValueTypeNumber.ValueFrom(s.LineStyle.Width) + if !ok { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) } - colorField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "strokeColor"), - ) - colorField.UpdateUnsafe(color) - } - if s.PolyStyle.Fill { - fill, ok := property.ValueTypeBool.ValueFrom(s.PolyStyle.Fill) - if !ok { - return nil, ErrFieldType + if len(s.LineStyle.Color) > 0 { + color, ok := property.ValueTypeString.ValueFrom(s.LineStyle.Color) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) } - fillField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "fill"), - ) - fillField.UpdateUnsafe(fill) - } - if len(s.PolyStyle.Color) > 0 { - color, ok := property.ValueTypeString.ValueFrom(s.PolyStyle.Color) - if !ok { - return nil, ErrFieldType + if s.PolyStyle.Fill { + fill, ok := property.ValueTypeBool.ValueFrom(s.PolyStyle.Fill) + if !ok { + return nil, ErrFieldType + } + fillField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fill"), + ) + fillField.UpdateUnsafe(fill) } - colorField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "fillColor"), - ) - colorField.UpdateUnsafe(color) - } - - case "czml": - s, ok := styleItem.(*czml.Polygon) - if !ok && styleItem != nil { - return nil, ErrFieldType - } - if s.Stroke { - stroke, ok := property.ValueTypeBool.ValueFrom(s.Stroke) - if !ok { - return nil, ErrFieldType + if len(s.PolyStyle.Color) > 0 { + color, ok := property.ValueTypeString.ValueFrom(s.PolyStyle.Color) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fillColor"), + ) + colorField.UpdateUnsafe(color) } - strokeField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "stroke"), - ) - strokeField.UpdateUnsafe(stroke) - } - if s.StrokeWidth != 0 { - width, ok := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) - if !ok { + + case "czml": + s, ok := styleItem.(*czml.Polygon) + if !ok && styleItem != nil { return nil, ErrFieldType } - widthField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "strokeWidth"), - ) - widthField.UpdateUnsafe(width) - } - if s.StrokeColor != nil { - var colorValue string - var err error - if len(s.StrokeColor.RGBA) > 0 { - colorValue, err = rgbaToHex(s.StrokeColor.RGBA) - if err != nil { - return nil, err + if s.Stroke { + stroke, ok := property.ValueTypeBool.ValueFrom(s.Stroke) + if !ok { + return nil, ErrFieldType } + strokeField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "stroke"), + ) + strokeField.UpdateUnsafe(stroke) } - if len(s.StrokeColor.RGBAF) > 0 { - colorValue, err = rgbafToHex(s.StrokeColor.RGBAF) - if err != nil { - return nil, err + if s.StrokeWidth != 0 { + width, ok := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) + if !ok { + return nil, ErrFieldType } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) } - color, ok := property.ValueTypeString.ValueFrom(colorValue) - if !ok { - return nil, ErrFieldType - } - colorField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "strokeColor"), - ) - colorField.UpdateUnsafe(color) - } - if s.Fill { - fill, ok := property.ValueTypeBool.ValueFrom(s.Fill) - if !ok { - return nil, ErrFieldType + if s.StrokeColor != nil { + var colorValue string + var err error + if len(s.StrokeColor.RGBA) > 0 { + colorValue, err = rgbaToHex(s.StrokeColor.RGBA) + if err != nil { + return nil, err + } + } + if len(s.StrokeColor.RGBAF) > 0 { + colorValue, err = rgbafToHex(s.StrokeColor.RGBAF) + if err != nil { + return nil, err + } + } + color, ok := property.ValueTypeString.ValueFrom(colorValue) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) } - fillField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "fill"), - ) - fillField.UpdateUnsafe(fill) - } - if s.Material.SolidColor.Color != nil { - var colorValue string - var err error - if len(s.Material.SolidColor.Color.RGBA) > 0 { - colorValue, err = rgbaToHex(s.Material.SolidColor.Color.RGBA) - if err != nil { - return nil, err + if s.Fill { + fill, ok := property.ValueTypeBool.ValueFrom(s.Fill) + if !ok { + return nil, ErrFieldType } + fillField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fill"), + ) + fillField.UpdateUnsafe(fill) } - if len(s.Material.SolidColor.Color.RGBAF) > 0 { - colorValue, err = rgbafToHex(s.Material.SolidColor.Color.RGBAF) - if err != nil { - return nil, err + if s.Material.SolidColor.Color != nil { + var colorValue string + var err error + if len(s.Material.SolidColor.Color.RGBA) > 0 { + colorValue, err = rgbaToHex(s.Material.SolidColor.Color.RGBA) + if err != nil { + return nil, err + } } + if len(s.Material.SolidColor.Color.RGBAF) > 0 { + colorValue, err = rgbafToHex(s.Material.SolidColor.Color.RGBAF) + if err != nil { + return nil, err + } + } + color, ok := property.ValueTypeString.ValueFrom(colorValue) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fillColor"), + ) + colorField.UpdateUnsafe(color) } - color, ok := property.ValueTypeString.ValueFrom(colorValue) - if !ok { + case "geojson": + s, ok := styleItem.(GeoStyle) + if !ok && styleItem != nil { return nil, ErrFieldType } - colorField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "fillColor"), - ) - colorField.UpdateUnsafe(color) - } - case "geojson": - s, ok := styleItem.(GeoStyle) - if !ok && styleItem != nil { - return nil, ErrFieldType - } - if s.StrokeWidth > 0 { - width, ok := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) - if !ok { - return nil, ErrFieldType + if s.StrokeWidth > 0 { + width, ok := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) + if !ok { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) } - widthField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "strokeWidth"), - ) - widthField.UpdateUnsafe(width) - } - if len(s.FillColor) > 0 { - fill, ok := property.ValueTypeString.ValueFrom(s.FillColor) - if !ok { - return nil, ErrFieldType + if len(s.FillColor) > 0 { + fill, ok := property.ValueTypeString.ValueFrom(s.FillColor) + if !ok { + return nil, ErrFieldType + } + fillField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "fillColor"), + ) + fillField.UpdateUnsafe(fill) } - fillField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "fillColor"), - ) - fillField.UpdateUnsafe(fill) - } - if len(s.StrokeColor) > 0 { - color, ok := property.ValueTypeString.ValueFrom(s.StrokeColor) - if !ok { - return nil, ErrFieldType + if len(s.StrokeColor) > 0 { + color, ok := property.ValueTypeString.ValueFrom(s.StrokeColor) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) } - colorField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "strokeColor"), - ) - colorField.UpdateUnsafe(color) } } - case "Polyline": v2, ok := property.ValueTypeCoordinates.ValueFrom(v) if !ok { return nil, ErrFieldType } f.UpdateUnsafe(v2) - - switch extension { - case "kml": - s, ok := styleItem.(kml.Style) - if !ok && styleItem != nil { - return nil, ErrFieldType - } - - if len(s.LineStyle.Color) > 0 { - color, ok := property.ValueTypeString.ValueFrom(s.LineStyle.Color) - if !ok { + if styleItem != nil { + switch extension { + case "kml": + s, ok := styleItem.(kml.Style) + if !ok && styleItem != nil { return nil, ErrFieldType } - colorField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "strokeColor"), - ) - colorField.UpdateUnsafe(color) - } - if s.LineStyle.Width != 0 { - width, ok := property.ValueTypeNumber.ValueFrom(s.LineStyle.Width) - if !ok { - return nil, ErrFieldType + if len(s.LineStyle.Color) > 0 { + color, ok := property.ValueTypeString.ValueFrom(s.LineStyle.Color) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) } - widthField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "strokeWidth"), - ) - widthField.UpdateUnsafe(width) - } - case "czml": - s, ok := styleItem.(*czml.Polyline) - if !ok && styleItem != nil { - return nil, ErrFieldType - } - if s.Width != 0 { - width, ok := property.ValueTypeNumber.ValueFrom(s.Width) - if !ok { + if s.LineStyle.Width != 0 { + width, ok := property.ValueTypeNumber.ValueFrom(s.LineStyle.Width) + if !ok { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + case "czml": + s, ok := styleItem.(*czml.Polyline) + if !ok && styleItem != nil { return nil, ErrFieldType } - widthField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "strokeWidth"), - ) - widthField.UpdateUnsafe(width) - } - - if s.Material.PolylineOutline.Color != nil { - var colorValue string - var err error - if len(s.Material.PolylineOutline.Color.RGBA) > 0 { - colorValue, err = rgbaToHex(s.Material.PolylineOutline.Color.RGBA) - if err != nil { - return nil, err + if s.Width != 0 { + width, ok := property.ValueTypeNumber.ValueFrom(s.Width) + if !ok { + return nil, ErrFieldType } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) } - if len(s.Material.PolylineOutline.Color.RGBAF) > 0 { - colorValue, err = rgbafToHex(s.Material.PolylineOutline.Color.RGBAF) - if err != nil { - return nil, err + if s.Material.PolylineOutline.Color != nil { + var colorValue string + var err error + + if len(s.Material.PolylineOutline.Color.RGBA) > 0 { + colorValue, err = rgbaToHex(s.Material.PolylineOutline.Color.RGBA) + if err != nil { + return nil, err + } } - } - color, ok := property.ValueTypeString.ValueFrom(colorValue) - if !ok { - return nil, ErrFieldType - } + if len(s.Material.PolylineOutline.Color.RGBAF) > 0 { + colorValue, err = rgbafToHex(s.Material.PolylineOutline.Color.RGBAF) + if err != nil { + return nil, err + } + } - colorField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "strokeColor"), - ) - colorField.UpdateUnsafe(color) - } - case "geojson": - s, ok := styleItem.(GeoStyle) - if !ok && styleItem != nil { - return nil, ErrFieldType - } + color, ok := property.ValueTypeString.ValueFrom(colorValue) + if !ok { + return nil, ErrFieldType + } - if s.StrokeWidth > 0 { - width, ok := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) - if !ok { + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) + } + case "geojson": + s, ok := styleItem.(GeoStyle) + if !ok && styleItem != nil { return nil, ErrFieldType } - widthField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "strokeWidth"), - ) - widthField.UpdateUnsafe(width) - } - if len(s.StrokeColor) > 0 { - color, ok := property.ValueTypeString.ValueFrom(s.StrokeColor) - if !ok { - return nil, ErrFieldType + if s.StrokeWidth > 0 { + width, ok := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) + if !ok { + return nil, ErrFieldType + } + widthField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeWidth"), + ) + widthField.UpdateUnsafe(width) + } + + if len(s.StrokeColor) > 0 { + color, ok := property.ValueTypeString.ValueFrom(s.StrokeColor) + if !ok { + return nil, ErrFieldType + } + colorField, _, _, _ := p.GetOrCreateField( + ps, + property.PointFieldBySchemaGroup(item, "strokeColor"), + ) + colorField.UpdateUnsafe(color) } - colorField, _, _, _ := p.GetOrCreateField( - ps, - property.PointFieldBySchemaGroup(item, "strokeColor"), - ) - colorField.UpdateUnsafe(color) } } } diff --git a/pkg/layer/decoding/geojson.go b/pkg/layer/decoding/geojson.go index b7f9e34ed..82d563a3e 100644 --- a/pkg/layer/decoding/geojson.go +++ b/pkg/layer/decoding/geojson.go @@ -2,6 +2,7 @@ package decoding import ( "errors" + "fmt" "io" geojson "github.com/paulmach/go.geojson" @@ -30,6 +31,25 @@ func NewGeoJSONDecoder(r io.Reader, s id.SceneID) *GeoJSONDecoder { } } +func disassembleMultipolygon(fc []*geojson.Feature) []*geojson.Feature { + var res []*geojson.Feature + for _, f := range fc { + if f.Geometry.Type == geojson.GeometryMultiPolygon { + for _, p := range f.Geometry.MultiPolygon { + nf := geojson.NewPolygonFeature(p) + for k, v := range f.Properties { + nf.SetProperty(k, v) + } + res = append(res, nf) + } + } else { + res = append(res, f) + } + } + + return res +} + func (d *GeoJSONDecoder) Decode() (Result, error) { lg, err := layer.NewGroup().NewID().Scene(d.sceneId).Name("GeoJSON").Build() if err != nil { @@ -45,10 +65,10 @@ func (d *GeoJSONDecoder) Decode() (Result, error) { if err != nil { return Result{}, errors.New("unable to parse file content") } - + fl := disassembleMultipolygon(fc.Features) // if feature collection > append it to features list, else try to decode a single feature (layer) if len(fc.Features) > 0 { - d.features = fc.Features + d.features = fl } else { f, err := geojson.UnmarshalFeature(con) if err != nil { @@ -203,6 +223,8 @@ func (d *GeoJSONDecoder) decodeLayer() (*layer.Item, *property.Property, error) } layerName = "Polygon" + default: + return nil, nil, fmt.Errorf("unsupported type %s", feat.Geometry.Type) } if feat.Properties["name"] != nil { diff --git a/pkg/layer/decoding/geojson_test.go b/pkg/layer/decoding/geojson_test.go index 62992ccfd..fe76eb311 100644 --- a/pkg/layer/decoding/geojson_test.go +++ b/pkg/layer/decoding/geojson_test.go @@ -19,12 +19,15 @@ const geojsonmock = `{ "name": "EPSG:3857" } }, - "features": [ + "features": [ { "type": "Feature", "geometry": { "type": "Point", - "coordinates": [102.0, 0.5] + "coordinates": [ + 102.0, + 0.5 + ] }, "properties": { "marker-color": "red" @@ -35,11 +38,22 @@ const geojsonmock = `{ "geometry": { "type": "LineString", "coordinates": [ - [102.0, 0.0], [103.0, 1.0], [104.0, 0.0] + [ + 102.0, + 0.0 + ], + [ + 103.0, + 1.0 + ], + [ + 104.0, + 0.0 + ] ] }, "properties": { - "stroke": "#b55e5e", + "stroke": "#b55e5e", "stroke-width": 1.6, "prop0": "value0", "prop1": 0.0 @@ -51,19 +65,82 @@ const geojsonmock = `{ "type": "Polygon", "coordinates": [ [ - [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], - [100.0, 1.0], [100.0, 0.0] + [ + 100.0, + 0.0 + ], + [ + 101.0, + 0.0 + ], + [ + 101.0, + 1.0 + ], + [ + 100.0, + 1.0 + ], + [ + 100.0, + 0.0 + ] + ] + ] + }, + "properties": { + "stroke": "#ffffff", + "stroke-width": 2, + "stroke-opacity": 1, + "fill": "#7c3b3b", + "fill-opacity": 0.5, + "prop0": "value0", + "prop1": { + "this": "that" + } + } + }, + { + "type": "Feature", + "geometry": { + "type": "MultiPolygon", + "coordinates": [ + [ + [ + [ + 100.0, + 0.0 + ], + [ + 101.0, + 0.0 + ], + [ + 101.0, + 1.0 + ], + [ + 100.0, + 1.0 + ], + [ + 100.0, + 0.0 + ] + ] ] ] }, "properties": { - "stroke": "#ffffff", + "stroke": "#ffffff", "stroke-width": 2, "stroke-opacity": 1, "fill": "#7c3b3b", "fill-opacity": 0.5, "prop0": "value0", - "prop1": { "this": "that" } + "prop1": { + "this": "that" + } } } ] @@ -75,13 +152,13 @@ func TestGeoJSONDecoder_Decode(t *testing.T) { p := NewGeoJSONDecoder(r, s) result, err := p.Decode() assert.NoError(t, err) - assert.Equal(t, 4, len(result.Layers)) - assert.Equal(t, 3, len(result.Properties)) + assert.Equal(t, 5, len(result.Layers)) + assert.Equal(t, 4, len(result.Properties)) // Root layer rootLayer := result.RootLayers().ToLayerGroupList()[0] assert.NotNil(t, rootLayer) - assert.Equal(t, 3, rootLayer.Layers().LayerCount()) + assert.Equal(t, 4, rootLayer.Layers().LayerCount()) // marker prop := result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(0)).Property()] @@ -126,4 +203,22 @@ func TestGeoJSONDecoder_Decode(t *testing.T) { strokeWidth2, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeWidth")) assert.Equal(t, plist, f2.Value().Value()) assert.Equal(t, 2.0, strokeWidth2.Value().Value()) + + // MultiPolygon + prop = result.Properties[*result.Layers.Layer(rootLayer.Layers().LayerAt(2)).Property()] + items4 := prop.Items() + assert.NotEqual(t, 0, len(items4)) + field4 := propertyFields["Polygon"] + f4, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, field4)) + plist3 := property.Polygon{property.Coordinates{property.LatLngHeight{Lng: 100, Lat: 0, Height: 0}, property.LatLngHeight{Lng: 101, Lat: 0, Height: 0}, property.LatLngHeight{Lng: 101, Lat: 1, Height: 0}, property.LatLngHeight{Lng: 100, Lat: 1, Height: 0}, property.LatLngHeight{Lng: 100, Lat: 0, Height: 0}}} + assert.Equal(t, f4.Value().Value(), plist3) + fillColor2, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "fillColor")) + assert.Equal(t, plist3, f3.Value().Value()) + assert.Equal(t, "#7c3b3b", fillColor2.Value().Value()) + strokeColor3, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeColor")) + assert.Equal(t, plist3, f3.Value().Value()) + assert.Equal(t, "#ffffff", strokeColor3.Value().Value()) + strokeWidth3, _, _ := prop.Field(property.PointFieldBySchemaGroup(item, "strokeWidth")) + assert.Equal(t, plist3, f3.Value().Value()) + assert.Equal(t, 2.0, strokeWidth3.Value().Value()) } From 90c32795e31294e05a120ee100ab81f7290bf623 Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Thu, 2 Sep 2021 07:15:23 +0300 Subject: [PATCH 076/253] fix: skip nil geometries (#42) * fix: skip nil geomatries * test: add test case for nil geometry --- pkg/layer/decoding/geojson.go | 7 +++++-- pkg/layer/decoding/geojson_test.go | 11 +++++++++++ 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/pkg/layer/decoding/geojson.go b/pkg/layer/decoding/geojson.go index 82d563a3e..b21ef8bad 100644 --- a/pkg/layer/decoding/geojson.go +++ b/pkg/layer/decoding/geojson.go @@ -31,9 +31,12 @@ func NewGeoJSONDecoder(r io.Reader, s id.SceneID) *GeoJSONDecoder { } } -func disassembleMultipolygon(fc []*geojson.Feature) []*geojson.Feature { +func validateFeatures(fc []*geojson.Feature) []*geojson.Feature { var res []*geojson.Feature for _, f := range fc { + if f.Geometry == nil { + continue + } if f.Geometry.Type == geojson.GeometryMultiPolygon { for _, p := range f.Geometry.MultiPolygon { nf := geojson.NewPolygonFeature(p) @@ -65,7 +68,7 @@ func (d *GeoJSONDecoder) Decode() (Result, error) { if err != nil { return Result{}, errors.New("unable to parse file content") } - fl := disassembleMultipolygon(fc.Features) + fl := validateFeatures(fc.Features) // if feature collection > append it to features list, else try to decode a single feature (layer) if len(fc.Features) > 0 { d.features = fl diff --git a/pkg/layer/decoding/geojson_test.go b/pkg/layer/decoding/geojson_test.go index fe76eb311..d1fd13c69 100644 --- a/pkg/layer/decoding/geojson_test.go +++ b/pkg/layer/decoding/geojson_test.go @@ -100,6 +100,17 @@ const geojsonmock = `{ } } }, +{ + "type": "Feature", + "geometry": null, + "properties": { + "N03_001": "ๆ„›็Ÿฅ็œŒ", + "N03_002": null, + "N03_003": null, + "N03_004": "่ฑŠๆฉ‹ๅธ‚", + "N03_007": "23201" + } + }, { "type": "Feature", "geometry": { From 3651e212e24597e8e87cb4e1234eb552c9cf648f Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 3 Sep 2021 12:53:25 +0900 Subject: [PATCH 077/253] chore: support multiple platform docker image --- .github/workflows/nightly.yml | 1 + Dockerfile | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 55c9b5167..f9358eb7b 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -60,6 +60,7 @@ jobs: uses: docker/build-push-action@v2 with: context: . + platforms: linux/amd64,linux/arm64 push: true build-args: VERSION=nightly tags: reearth/reearth-backend:nightly diff --git a/Dockerfile b/Dockerfile index db1df0115..ee7a1cab9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,7 +12,7 @@ COPY cmd/ /reearth/cmd/ COPY pkg/ /reearth/pkg/ COPY internal/ /reearth/internal/ -RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -tags "${TAG}" "-ldflags=-X main.version=${VERSION} -s -buildid=" -trimpath ./cmd/reearth && upx reearth +RUN CGO_ENABLED=0 go build -tags "${TAG}" "-ldflags=-X main.version=${VERSION} -s -buildid=" -trimpath ./cmd/reearth && upx reearth FROM scratch From 3b5f93df59dfac8c2ae3f6898b96c7f058e4ba58 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 3 Sep 2021 13:48:25 +0900 Subject: [PATCH 078/253] chore: stop using upx as it doesn't work on arm64 --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index ee7a1cab9..8bd8696cf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ FROM golang:1.16-alpine AS build ARG TAG=release ARG VERSION -RUN apk add --update --no-cache git ca-certificates build-base upx +RUN apk add --update --no-cache git ca-certificates build-base COPY go.mod go.sum main.go /reearth/ WORKDIR /reearth @@ -12,7 +12,7 @@ COPY cmd/ /reearth/cmd/ COPY pkg/ /reearth/pkg/ COPY internal/ /reearth/internal/ -RUN CGO_ENABLED=0 go build -tags "${TAG}" "-ldflags=-X main.version=${VERSION} -s -buildid=" -trimpath ./cmd/reearth && upx reearth +RUN CGO_ENABLED=0 go build -tags "${TAG}" "-ldflags=-X main.version=${VERSION} -s -buildid=" -trimpath ./cmd/reearth FROM scratch From d3360b3c2849604d3b36ea7546c5909fb4e58d3b Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 7 Sep 2021 14:44:40 +0900 Subject: [PATCH 079/253] refactor: reorganize graphql schema (#43) --- internal/adapter/gql/generated.go | 862 ++++-------------- .../adapter/gql/gqlmodel/convert_property.go | 5 - internal/adapter/gql/gqlmodel/models_gen.go | 34 +- internal/adapter/gql/loader_project.go | 4 +- .../adapter/gql/resolver_mutation_scene.go | 15 +- .../adapter/gql/resolver_property_schema.go | 23 +- internal/adapter/gql/resolver_query.go | 2 +- internal/usecase/interactor/scene.go | 10 +- internal/usecase/interfaces/scene.go | 9 +- pkg/scene/sceneops/plugin_migrator.go | 12 +- pkg/scene/widget_system.go | 32 +- pkg/scene/widget_system_test.go | 81 +- schema.graphql | 90 +- 13 files changed, 353 insertions(+), 826 deletions(-) diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index 87334227e..03b98ee04 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -154,11 +154,6 @@ type ComplexityRoot struct { Roll func(childComplexity int) int } - CheckProjectAliasPayload struct { - Alias func(childComplexity int) int - Available func(childComplexity int) int - } - CreateAssetPayload struct { Asset func(childComplexity int) int } @@ -572,6 +567,11 @@ type ComplexityRoot struct { Visualizer func(childComplexity int) int } + ProjectAliasAvailability struct { + Alias func(childComplexity int) int + Available func(childComplexity int) int + } + ProjectConnection struct { Edges func(childComplexity int) int Nodes func(childComplexity int) int @@ -672,7 +672,6 @@ type ComplexityRoot struct { PropertySchemaField struct { AllTranslatedDescription func(childComplexity int) int - AllTranslatedName func(childComplexity int) int AllTranslatedTitle func(childComplexity int) int Choices func(childComplexity int) int DefaultValue func(childComplexity int) int @@ -681,25 +680,20 @@ type ComplexityRoot struct { IsAvailableIf func(childComplexity int) int Max func(childComplexity int) int Min func(childComplexity int) int - Name func(childComplexity int) int Prefix func(childComplexity int) int Suffix func(childComplexity int) int Title func(childComplexity int) int TranslatedDescription func(childComplexity int, lang *string) int - TranslatedName func(childComplexity int, lang *string) int TranslatedTitle func(childComplexity int, lang *string) int Type func(childComplexity int) int UI func(childComplexity int) int } PropertySchemaFieldChoice struct { - AllTranslatedLabel func(childComplexity int) int AllTranslatedTitle func(childComplexity int) int Icon func(childComplexity int) int Key func(childComplexity int) int - Label func(childComplexity int) int Title func(childComplexity int) int - TranslatedLabel func(childComplexity int, lang *string) int TranslatedTitle func(childComplexity int, lang *string) int } @@ -708,7 +702,6 @@ type ComplexityRoot struct { Fields func(childComplexity int) int IsAvailableIf func(childComplexity int) int IsList func(childComplexity int) int - Name func(childComplexity int) int RepresentativeField func(childComplexity int) int RepresentativeFieldID func(childComplexity int) int Schema func(childComplexity int) int @@ -773,9 +766,8 @@ type ComplexityRoot struct { } RemoveWidgetPayload struct { - ExtensionID func(childComplexity int) int - PluginID func(childComplexity int) int - Scene func(childComplexity int) int + Scene func(childComplexity int) int + WidgetID func(childComplexity int) int } Scene struct { @@ -1108,12 +1100,10 @@ type PropertyLinkableFieldsResolver interface { } type PropertySchemaFieldResolver interface { TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) - TranslatedName(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) TranslatedDescription(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) } type PropertySchemaFieldChoiceResolver interface { TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *string) (string, error) - TranslatedLabel(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *string) (string, error) } type PropertySchemaGroupResolver interface { Schema(ctx context.Context, obj *gqlmodel.PropertySchemaGroup) (*gqlmodel.PropertySchema, error) @@ -1136,7 +1126,7 @@ type QueryResolver interface { SceneLock(ctx context.Context, sceneID id.ID) (*gqlmodel.SceneLockMode, error) DynamicDatasetSchemas(ctx context.Context, sceneID id.ID) ([]*gqlmodel.DatasetSchema, error) SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.SearchedUser, error) - CheckProjectAlias(ctx context.Context, alias string) (*gqlmodel.CheckProjectAliasPayload, error) + CheckProjectAlias(ctx context.Context, alias string) (*gqlmodel.ProjectAliasAvailability, error) InstallablePlugins(ctx context.Context) ([]*gqlmodel.PluginMetadata, error) } type SceneResolver interface { @@ -1435,20 +1425,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Camera.Roll(childComplexity), true - case "CheckProjectAliasPayload.alias": - if e.complexity.CheckProjectAliasPayload.Alias == nil { - break - } - - return e.complexity.CheckProjectAliasPayload.Alias(childComplexity), true - - case "CheckProjectAliasPayload.available": - if e.complexity.CheckProjectAliasPayload.Available == nil { - break - } - - return e.complexity.CheckProjectAliasPayload.Available(childComplexity), true - case "CreateAssetPayload.asset": if e.complexity.CreateAssetPayload.Asset == nil { break @@ -3837,6 +3813,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Project.Visualizer(childComplexity), true + case "ProjectAliasAvailability.alias": + if e.complexity.ProjectAliasAvailability.Alias == nil { + break + } + + return e.complexity.ProjectAliasAvailability.Alias(childComplexity), true + + case "ProjectAliasAvailability.available": + if e.complexity.ProjectAliasAvailability.Available == nil { + break + } + + return e.complexity.ProjectAliasAvailability.Available(childComplexity), true + case "ProjectConnection.edges": if e.complexity.ProjectConnection.Edges == nil { break @@ -4257,13 +4247,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PropertySchemaField.AllTranslatedDescription(childComplexity), true - case "PropertySchemaField.allTranslatedName": - if e.complexity.PropertySchemaField.AllTranslatedName == nil { - break - } - - return e.complexity.PropertySchemaField.AllTranslatedName(childComplexity), true - case "PropertySchemaField.allTranslatedTitle": if e.complexity.PropertySchemaField.AllTranslatedTitle == nil { break @@ -4320,13 +4303,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PropertySchemaField.Min(childComplexity), true - case "PropertySchemaField.name": - if e.complexity.PropertySchemaField.Name == nil { - break - } - - return e.complexity.PropertySchemaField.Name(childComplexity), true - case "PropertySchemaField.prefix": if e.complexity.PropertySchemaField.Prefix == nil { break @@ -4360,18 +4336,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PropertySchemaField.TranslatedDescription(childComplexity, args["lang"].(*string)), true - case "PropertySchemaField.translatedName": - if e.complexity.PropertySchemaField.TranslatedName == nil { - break - } - - args, err := ec.field_PropertySchemaField_translatedName_args(context.TODO(), rawArgs) - if err != nil { - return 0, false - } - - return e.complexity.PropertySchemaField.TranslatedName(childComplexity, args["lang"].(*string)), true - case "PropertySchemaField.translatedTitle": if e.complexity.PropertySchemaField.TranslatedTitle == nil { break @@ -4398,13 +4362,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PropertySchemaField.UI(childComplexity), true - case "PropertySchemaFieldChoice.allTranslatedLabel": - if e.complexity.PropertySchemaFieldChoice.AllTranslatedLabel == nil { - break - } - - return e.complexity.PropertySchemaFieldChoice.AllTranslatedLabel(childComplexity), true - case "PropertySchemaFieldChoice.allTranslatedTitle": if e.complexity.PropertySchemaFieldChoice.AllTranslatedTitle == nil { break @@ -4426,13 +4383,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PropertySchemaFieldChoice.Key(childComplexity), true - case "PropertySchemaFieldChoice.label": - if e.complexity.PropertySchemaFieldChoice.Label == nil { - break - } - - return e.complexity.PropertySchemaFieldChoice.Label(childComplexity), true - case "PropertySchemaFieldChoice.title": if e.complexity.PropertySchemaFieldChoice.Title == nil { break @@ -4440,18 +4390,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PropertySchemaFieldChoice.Title(childComplexity), true - case "PropertySchemaFieldChoice.translatedLabel": - if e.complexity.PropertySchemaFieldChoice.TranslatedLabel == nil { - break - } - - args, err := ec.field_PropertySchemaFieldChoice_translatedLabel_args(context.TODO(), rawArgs) - if err != nil { - return 0, false - } - - return e.complexity.PropertySchemaFieldChoice.TranslatedLabel(childComplexity, args["lang"].(*string)), true - case "PropertySchemaFieldChoice.translatedTitle": if e.complexity.PropertySchemaFieldChoice.TranslatedTitle == nil { break @@ -4492,13 +4430,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PropertySchemaGroup.IsList(childComplexity), true - case "PropertySchemaGroup.name": - if e.complexity.PropertySchemaGroup.Name == nil { - break - } - - return e.complexity.PropertySchemaGroup.Name(childComplexity), true - case "PropertySchemaGroup.representativeField": if e.complexity.PropertySchemaGroup.RepresentativeField == nil { break @@ -4843,26 +4774,19 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.RemoveMemberFromTeamPayload.Team(childComplexity), true - case "RemoveWidgetPayload.extensionId": - if e.complexity.RemoveWidgetPayload.ExtensionID == nil { - break - } - - return e.complexity.RemoveWidgetPayload.ExtensionID(childComplexity), true - - case "RemoveWidgetPayload.pluginId": - if e.complexity.RemoveWidgetPayload.PluginID == nil { + case "RemoveWidgetPayload.scene": + if e.complexity.RemoveWidgetPayload.Scene == nil { break } - return e.complexity.RemoveWidgetPayload.PluginID(childComplexity), true + return e.complexity.RemoveWidgetPayload.Scene(childComplexity), true - case "RemoveWidgetPayload.scene": - if e.complexity.RemoveWidgetPayload.Scene == nil { + case "RemoveWidgetPayload.widgetId": + if e.complexity.RemoveWidgetPayload.WidgetID == nil { break } - return e.complexity.RemoveWidgetPayload.Scene(childComplexity), true + return e.complexity.RemoveWidgetPayload.WidgetID(childComplexity), true case "Scene.createdAt": if e.complexity.Scene.CreatedAt == nil { @@ -5497,9 +5421,38 @@ directive @goField( name: String ) on INPUT_FIELD_DEFINITION | FIELD_DEFINITION -# Basic types +# Meta Type scalar Cursor + +interface Node { + id: ID! +} + +enum NodeType { + ASSET + USER + TEAM + PROJECT + PLUGIN + SCENE + PROPERTY_SCHEMA + PROPERTY + DATASET_SCHEMA + DATASET + LAYER_GROUP + LAYER_ITEM +} + +type PageInfo { + startCursor: Cursor + endCursor: Cursor + hasNextPage: Boolean! + hasPreviousPage: Boolean! +} + +# Basic types + scalar DateTime scalar URL scalar Lang @@ -5508,8 +5461,8 @@ scalar PluginID scalar PluginExtensionID scalar PropertySchemaID scalar PropertySchemaFieldID -scalar TranslatedString scalar DatasetSchemaFieldID +scalar TranslatedString type LatLng { lat: Float! @@ -5585,19 +5538,6 @@ enum Theme { DARK } -# Meta Type - -interface Node { - id: ID! -} - -type PageInfo { - startCursor: Cursor - endCursor: Cursor - hasNextPage: Boolean! - hasPreviousPage: Boolean! -} - # Asset type Asset implements Node { @@ -5631,7 +5571,7 @@ type SearchedUser { userEmail: String! } -type CheckProjectAliasPayload { +type ProjectAliasAvailability { alias: String! available: Boolean! } @@ -5839,8 +5779,6 @@ type PropertySchemaGroup { isAvailableIf: PropertyCondition title: String allTranslatedTitle: TranslatedString - # For compatibility: "name" field will be removed in the futrue - name: PropertySchemaFieldID representativeFieldId: PropertySchemaFieldID representativeField: PropertySchemaField schema: PropertySchema @goField(forceResolver: true) @@ -5851,8 +5789,6 @@ type PropertySchemaField { fieldId: PropertySchemaFieldID! type: ValueType! title: String! - # For compatibility: "name" field will be removed in the futrue - name: String! description: String! prefix: String suffix: String @@ -5863,12 +5799,8 @@ type PropertySchemaField { choices: [PropertySchemaFieldChoice!] isAvailableIf: PropertyCondition allTranslatedTitle: TranslatedString - # For compatibility: "allTranslatedName" field will be removed in the futrue - allTranslatedName: TranslatedString allTranslatedDescription: TranslatedString translatedTitle(lang: String): String! @goField(forceResolver: true) - # For compatibility: "translatedName" field will be removed in the futrue - translatedName(lang: String): String! @goField(forceResolver: true) translatedDescription(lang: String): String! @goField(forceResolver: true) } @@ -5887,15 +5819,9 @@ enum PropertySchemaFieldUI { type PropertySchemaFieldChoice { key: String! title: String! - # For compatibility: "label" field will be removed in the futrue - label: String! icon: String allTranslatedTitle: TranslatedString - # For compatibility: "allTranslatedLabel" field will be removed in the futrue - allTranslatedLabel: TranslatedString translatedTitle(lang: String): String! @goField(forceResolver: true) - # For compatibility: "translatedLabel" field will be removed in the futrue - translatedLabel(lang: String): String! @goField(forceResolver: true) } type PropertyCondition { @@ -6191,6 +6117,7 @@ type MergedInfoboxField { } # InputType + input CreateAssetInput { teamId: ID! file: Upload! @@ -6311,15 +6238,13 @@ input AddWidgetInput { input UpdateWidgetInput { sceneId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + widgetId: ID! enabled: Boolean } input RemoveWidgetInput { sceneId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + widgetId: ID! } input InstallPluginInput { @@ -6531,6 +6456,7 @@ input AddDatasetSchemaInput { } # Payload + type CreateAssetPayload { asset: Asset! } @@ -6606,8 +6532,7 @@ type UpdateWidgetPayload { type RemoveWidgetPayload { scene: Scene! - pluginId: PluginID! - extensionId: PluginExtensionID! + widgetId: ID! } type InstallPluginPayload { @@ -6726,21 +6651,6 @@ type AddDatasetSchemaPayload { # Connection -enum NodeType { - ASSET - USER - TEAM - PROJECT - PLUGIN - SCENE - PROPERTY_SCHEMA - PROPERTY - DATASET_SCHEMA - DATASET - LAYER_GROUP - LAYER_ITEM -} - type AssetConnection { edges: [AssetEdge!]! nodes: [Asset]! @@ -6833,7 +6743,7 @@ type Query { sceneLock(sceneId: ID!): SceneLockMode dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! searchUser(nameOrEmail: String!): SearchedUser - checkProjectAlias(alias: String!): CheckProjectAliasPayload! + checkProjectAlias(alias: String!): ProjectAliasAvailability! installablePlugins: [PluginMetadata!]! } @@ -7843,21 +7753,6 @@ func (ec *executionContext) field_Plugin_translatedName_args(ctx context.Context return args, nil } -func (ec *executionContext) field_PropertySchemaFieldChoice_translatedLabel_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { - var err error - args := map[string]interface{}{} - var arg0 *string - if tmp, ok := rawArgs["lang"]; ok { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) - arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) - if err != nil { - return nil, err - } - } - args["lang"] = arg0 - return args, nil -} - func (ec *executionContext) field_PropertySchemaFieldChoice_translatedTitle_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -7888,21 +7783,6 @@ func (ec *executionContext) field_PropertySchemaField_translatedDescription_args return args, nil } -func (ec *executionContext) field_PropertySchemaField_translatedName_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { - var err error - args := map[string]interface{}{} - var arg0 *string - if tmp, ok := rawArgs["lang"]; ok { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) - arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) - if err != nil { - return nil, err - } - } - args["lang"] = arg0 - return args, nil -} - func (ec *executionContext) field_PropertySchemaField_translatedTitle_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -9768,76 +9648,6 @@ func (ec *executionContext) _Camera_fov(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _CheckProjectAliasPayload_alias(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CheckProjectAliasPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "CheckProjectAliasPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Alias, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(string) - fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) -} - -func (ec *executionContext) _CheckProjectAliasPayload_available(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CheckProjectAliasPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "CheckProjectAliasPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Available, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(bool) - fc.Result = res - return ec.marshalNBoolean2bool(ctx, field.Selections, res) -} - func (ec *executionContext) _CreateAssetPayload_asset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateAssetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -20227,6 +20037,76 @@ func (ec *executionContext) _Project_scene(ctx context.Context, field graphql.Co return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) _ProjectAliasAvailability_alias(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectAliasAvailability) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ProjectAliasAvailability", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Alias, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _ProjectAliasAvailability_available(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectAliasAvailability) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "ProjectAliasAvailability", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Available, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + func (ec *executionContext) _ProjectConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -22316,41 +22196,6 @@ func (ec *executionContext) _PropertySchemaField_title(ctx context.Context, fiel return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaField", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Name, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(string) - fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) -} - func (ec *executionContext) _PropertySchemaField_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -22674,38 +22519,6 @@ func (ec *executionContext) _PropertySchemaField_allTranslatedTitle(ctx context. return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaField", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.AllTranslatedName, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(map[string]string) - fc.Result = res - return ec.marshalOTranslatedString2map(ctx, field.Selections, res) -} - func (ec *executionContext) _PropertySchemaField_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -22780,48 +22593,6 @@ func (ec *executionContext) _PropertySchemaField_translatedTitle(ctx context.Con return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_translatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_PropertySchemaField_translatedName_args(ctx, rawArgs) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - fc.Args = args - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.PropertySchemaField().TranslatedName(rctx, obj, args["lang"].(*string)) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(string) - fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) -} - func (ec *executionContext) _PropertySchemaField_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -22934,41 +22705,6 @@ func (ec *executionContext) _PropertySchemaFieldChoice_title(ctx context.Context return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_label(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaFieldChoice", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Label, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(string) - fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) -} - func (ec *executionContext) _PropertySchemaFieldChoice_icon(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -23033,38 +22769,6 @@ func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedTitle(ctx co return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedLabel(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaFieldChoice", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.AllTranslatedLabel, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(map[string]string) - fc.Result = res - return ec.marshalOTranslatedString2map(ctx, field.Selections, res) -} - func (ec *executionContext) _PropertySchemaFieldChoice_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -23107,48 +22811,6 @@ func (ec *executionContext) _PropertySchemaFieldChoice_translatedTitle(ctx conte return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_translatedLabel(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaFieldChoice", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_PropertySchemaFieldChoice_translatedLabel_args(ctx, rawArgs) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - fc.Args = args - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.PropertySchemaFieldChoice().TranslatedLabel(rctx, obj, args["lang"].(*string)) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(string) - fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) -} - func (ec *executionContext) _PropertySchemaGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -23385,38 +23047,6 @@ func (ec *executionContext) _PropertySchemaGroup_allTranslatedTitle(ctx context. return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaGroup", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Name, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*id.PropertySchemaFieldID) - fc.Result = res - return ec.marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) -} - func (ec *executionContext) _PropertySchemaGroup_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -24233,9 +23863,9 @@ func (ec *executionContext) _Query_checkProjectAlias(ctx context.Context, field } return graphql.Null } - res := resTmp.(*gqlmodel.CheckProjectAliasPayload) + res := resTmp.(*gqlmodel.ProjectAliasAvailability) fc.Result = res - return ec.marshalNCheckProjectAliasPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCheckProjectAliasPayload(ctx, field.Selections, res) + return ec.marshalNProjectAliasAvailability2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectAliasAvailability(ctx, field.Selections, res) } func (ec *executionContext) _Query_installablePlugins(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -24799,42 +24429,7 @@ func (ec *executionContext) _RemoveWidgetPayload_scene(ctx context.Context, fiel return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveWidgetPayload_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveWidgetPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "RemoveWidgetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.PluginID, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(id.PluginID) - fc.Result = res - return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) -} - -func (ec *executionContext) _RemoveWidgetPayload_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveWidgetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _RemoveWidgetPayload_widgetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveWidgetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -24852,7 +24447,7 @@ func (ec *executionContext) _RemoveWidgetPayload_extensionId(ctx context.Context ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.ExtensionID, nil + return obj.WidgetID, nil }) if err != nil { ec.Error(ctx, err) @@ -24864,9 +24459,9 @@ func (ec *executionContext) _RemoveWidgetPayload_extensionId(ctx context.Context } return graphql.Null } - res := resTmp.(id.PluginExtensionID) + res := resTmp.(id.ID) fc.Result = res - return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } func (ec *executionContext) _Scene_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { @@ -29878,19 +29473,11 @@ func (ec *executionContext) unmarshalInputRemoveWidgetInput(ctx context.Context, if err != nil { return it, err } - case "pluginId": + case "widgetId": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) - it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) - if err != nil { - return it, err - } - case "extensionId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) - it.ExtensionID, err = ec.unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("widgetId")) + it.WidgetID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) if err != nil { return it, err } @@ -30534,19 +30121,11 @@ func (ec *executionContext) unmarshalInputUpdateWidgetInput(ctx context.Context, if err != nil { return it, err } - case "pluginId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) - it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) - if err != nil { - return it, err - } - case "extensionId": + case "widgetId": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) - it.ExtensionID, err = ec.unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("widgetId")) + it.WidgetID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) if err != nil { return it, err } @@ -31266,38 +30845,6 @@ func (ec *executionContext) _Camera(ctx context.Context, sel ast.SelectionSet, o return out } -var checkProjectAliasPayloadImplementors = []string{"CheckProjectAliasPayload"} - -func (ec *executionContext) _CheckProjectAliasPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CheckProjectAliasPayload) graphql.Marshaler { - fields := graphql.CollectFields(ec.OperationContext, sel, checkProjectAliasPayloadImplementors) - - out := graphql.NewFieldSet(fields) - var invalids uint32 - for i, field := range fields { - switch field.Name { - case "__typename": - out.Values[i] = graphql.MarshalString("CheckProjectAliasPayload") - case "alias": - out.Values[i] = ec._CheckProjectAliasPayload_alias(ctx, field, obj) - if out.Values[i] == graphql.Null { - invalids++ - } - case "available": - out.Values[i] = ec._CheckProjectAliasPayload_available(ctx, field, obj) - if out.Values[i] == graphql.Null { - invalids++ - } - default: - panic("unknown field " + strconv.Quote(field.Name)) - } - } - out.Dispatch() - if invalids > 0 { - return graphql.Null - } - return out -} - var createAssetPayloadImplementors = []string{"CreateAssetPayload"} func (ec *executionContext) _CreateAssetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateAssetPayload) graphql.Marshaler { @@ -33783,6 +33330,38 @@ func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, return out } +var projectAliasAvailabilityImplementors = []string{"ProjectAliasAvailability"} + +func (ec *executionContext) _ProjectAliasAvailability(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectAliasAvailability) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, projectAliasAvailabilityImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("ProjectAliasAvailability") + case "alias": + out.Values[i] = ec._ProjectAliasAvailability_alias(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "available": + out.Values[i] = ec._ProjectAliasAvailability_available(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var projectConnectionImplementors = []string{"ProjectConnection"} func (ec *executionContext) _ProjectConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectConnection) graphql.Marshaler { @@ -34471,11 +34050,6 @@ func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.Se if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } - case "name": - out.Values[i] = ec._PropertySchemaField_name(ctx, field, obj) - if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) - } case "description": out.Values[i] = ec._PropertySchemaField_description(ctx, field, obj) if out.Values[i] == graphql.Null { @@ -34499,8 +34073,6 @@ func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.Se out.Values[i] = ec._PropertySchemaField_isAvailableIf(ctx, field, obj) case "allTranslatedTitle": out.Values[i] = ec._PropertySchemaField_allTranslatedTitle(ctx, field, obj) - case "allTranslatedName": - out.Values[i] = ec._PropertySchemaField_allTranslatedName(ctx, field, obj) case "allTranslatedDescription": out.Values[i] = ec._PropertySchemaField_allTranslatedDescription(ctx, field, obj) case "translatedTitle": @@ -34517,20 +34089,6 @@ func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.Se } return res }) - case "translatedName": - field := field - out.Concurrently(i, func() (res graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - } - }() - res = ec._PropertySchemaField_translatedName(ctx, field, obj) - if res == graphql.Null { - atomic.AddUint32(&invalids, 1) - } - return res - }) case "translatedDescription": field := field out.Concurrently(i, func() (res graphql.Marshaler) { @@ -34577,17 +34135,10 @@ func (ec *executionContext) _PropertySchemaFieldChoice(ctx context.Context, sel if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } - case "label": - out.Values[i] = ec._PropertySchemaFieldChoice_label(ctx, field, obj) - if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) - } case "icon": out.Values[i] = ec._PropertySchemaFieldChoice_icon(ctx, field, obj) case "allTranslatedTitle": out.Values[i] = ec._PropertySchemaFieldChoice_allTranslatedTitle(ctx, field, obj) - case "allTranslatedLabel": - out.Values[i] = ec._PropertySchemaFieldChoice_allTranslatedLabel(ctx, field, obj) case "translatedTitle": field := field out.Concurrently(i, func() (res graphql.Marshaler) { @@ -34602,20 +34153,6 @@ func (ec *executionContext) _PropertySchemaFieldChoice(ctx context.Context, sel } return res }) - case "translatedLabel": - field := field - out.Concurrently(i, func() (res graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - } - }() - res = ec._PropertySchemaFieldChoice_translatedLabel(ctx, field, obj) - if res == graphql.Null { - atomic.AddUint32(&invalids, 1) - } - return res - }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -34664,8 +34201,6 @@ func (ec *executionContext) _PropertySchemaGroup(ctx context.Context, sel ast.Se out.Values[i] = ec._PropertySchemaGroup_title(ctx, field, obj) case "allTranslatedTitle": out.Values[i] = ec._PropertySchemaGroup_allTranslatedTitle(ctx, field, obj) - case "name": - out.Values[i] = ec._PropertySchemaGroup_name(ctx, field, obj) case "representativeFieldId": out.Values[i] = ec._PropertySchemaGroup_representativeFieldId(ctx, field, obj) case "representativeField": @@ -35194,13 +34729,8 @@ func (ec *executionContext) _RemoveWidgetPayload(ctx context.Context, sel ast.Se if out.Values[i] == graphql.Null { invalids++ } - case "pluginId": - out.Values[i] = ec._RemoveWidgetPayload_pluginId(ctx, field, obj) - if out.Values[i] == graphql.Null { - invalids++ - } - case "extensionId": - out.Values[i] = ec._RemoveWidgetPayload_extensionId(ctx, field, obj) + case "widgetId": + out.Values[i] = ec._RemoveWidgetPayload_widgetId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } @@ -36515,20 +36045,6 @@ func (ec *executionContext) marshalNBoolean2bool(ctx context.Context, sel ast.Se return res } -func (ec *executionContext) marshalNCheckProjectAliasPayload2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCheckProjectAliasPayload(ctx context.Context, sel ast.SelectionSet, v gqlmodel.CheckProjectAliasPayload) graphql.Marshaler { - return ec._CheckProjectAliasPayload(ctx, sel, &v) -} - -func (ec *executionContext) marshalNCheckProjectAliasPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCheckProjectAliasPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CheckProjectAliasPayload) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._CheckProjectAliasPayload(ctx, sel, v) -} - func (ec *executionContext) unmarshalNCreateAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateAssetInput(ctx context.Context, v interface{}) (gqlmodel.CreateAssetInput, error) { res, err := ec.unmarshalInputCreateAssetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -37811,6 +37327,20 @@ func (ec *executionContext) marshalNProject2แš–githubแš—comแš‹reearthแš‹reearth return ec._Project(ctx, sel, v) } +func (ec *executionContext) marshalNProjectAliasAvailability2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectAliasAvailability(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ProjectAliasAvailability) graphql.Marshaler { + return ec._ProjectAliasAvailability(ctx, sel, &v) +} + +func (ec *executionContext) marshalNProjectAliasAvailability2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectAliasAvailability(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectAliasAvailability) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._ProjectAliasAvailability(ctx, sel, v) +} + func (ec *executionContext) marshalNProjectConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ProjectConnection) graphql.Marshaler { return ec._ProjectConnection(ctx, sel, &v) } diff --git a/internal/adapter/gql/gqlmodel/convert_property.go b/internal/adapter/gql/gqlmodel/convert_property.go index a69e3127c..bc7491c87 100644 --- a/internal/adapter/gql/gqlmodel/convert_property.go +++ b/internal/adapter/gql/gqlmodel/convert_property.go @@ -394,9 +394,7 @@ func ToPropertySchemaField(f *property.SchemaField) *PropertySchemaField { choices = append(choices, &PropertySchemaFieldChoice{ Key: k.Key, Title: k.Title.String(), - Label: k.Title.String(), // deprecated AllTranslatedTitle: k.Title, - AllTranslatedLabel: k.Title, // deprecated Icon: stringToRef(k.Icon), }) } @@ -406,7 +404,6 @@ func ToPropertySchemaField(f *property.SchemaField) *PropertySchemaField { FieldID: f.ID(), Type: ToPropertyValueType(f.Type()), Title: f.Title().String(), - Name: f.Title().String(), // deprecated Description: f.Description().String(), Prefix: stringToRef(f.Prefix()), Suffix: stringToRef(f.Suffix()), @@ -417,7 +414,6 @@ func ToPropertySchemaField(f *property.SchemaField) *PropertySchemaField { Choices: choices, IsAvailableIf: ToPropertyConditon(f.IsAvailableIf()), AllTranslatedTitle: f.Title(), - AllTranslatedName: f.Title(), // deprecated AllTranslatedDescription: f.Description(), } } @@ -542,7 +538,6 @@ func ToPropertySchemaGroup(g *property.SchemaGroup) *PropertySchemaGroup { IsList: g.IsList(), Title: g.Title().StringRef(), Fields: fields, - Name: representativeFieldID, // deprecated RepresentativeFieldID: representativeFieldID, RepresentativeField: representativeField, AllTranslatedTitle: g.Title(), diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index fa12a356b..3bd17ea62 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -171,11 +171,6 @@ type Camera struct { Fov float64 `json:"fov"` } -type CheckProjectAliasPayload struct { - Alias string `json:"alias"` - Available bool `json:"available"` -} - type CreateAssetInput struct { TeamID id.ID `json:"teamId"` File graphql.Upload `json:"file"` @@ -641,6 +636,11 @@ type Project struct { func (Project) IsNode() {} +type ProjectAliasAvailability struct { + Alias string `json:"alias"` + Available bool `json:"available"` +} + type ProjectConnection struct { Edges []*ProjectEdge `json:"edges"` Nodes []*Project `json:"nodes"` @@ -749,7 +749,6 @@ type PropertySchemaField struct { FieldID id.PropertySchemaFieldID `json:"fieldId"` Type ValueType `json:"type"` Title string `json:"title"` - Name string `json:"name"` Description string `json:"description"` Prefix *string `json:"prefix"` Suffix *string `json:"suffix"` @@ -760,22 +759,17 @@ type PropertySchemaField struct { Choices []*PropertySchemaFieldChoice `json:"choices"` IsAvailableIf *PropertyCondition `json:"isAvailableIf"` AllTranslatedTitle map[string]string `json:"allTranslatedTitle"` - AllTranslatedName map[string]string `json:"allTranslatedName"` AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` TranslatedTitle string `json:"translatedTitle"` - TranslatedName string `json:"translatedName"` TranslatedDescription string `json:"translatedDescription"` } type PropertySchemaFieldChoice struct { Key string `json:"key"` Title string `json:"title"` - Label string `json:"label"` Icon *string `json:"icon"` AllTranslatedTitle map[string]string `json:"allTranslatedTitle"` - AllTranslatedLabel map[string]string `json:"allTranslatedLabel"` TranslatedTitle string `json:"translatedTitle"` - TranslatedLabel string `json:"translatedLabel"` } type PropertySchemaGroup struct { @@ -786,7 +780,6 @@ type PropertySchemaGroup struct { IsAvailableIf *PropertyCondition `json:"isAvailableIf"` Title *string `json:"title"` AllTranslatedTitle map[string]string `json:"allTranslatedTitle"` - Name *id.PropertySchemaFieldID `json:"name"` RepresentativeFieldID *id.PropertySchemaFieldID `json:"representativeFieldId"` RepresentativeField *PropertySchemaField `json:"representativeField"` Schema *PropertySchema `json:"schema"` @@ -877,15 +870,13 @@ type RemovePropertyItemInput struct { } type RemoveWidgetInput struct { - SceneID id.ID `json:"sceneId"` - PluginID id.PluginID `json:"pluginId"` - ExtensionID id.PluginExtensionID `json:"extensionId"` + SceneID id.ID `json:"sceneId"` + WidgetID id.ID `json:"widgetId"` } type RemoveWidgetPayload struct { - Scene *Scene `json:"scene"` - PluginID id.PluginID `json:"pluginId"` - ExtensionID id.PluginExtensionID `json:"extensionId"` + Scene *Scene `json:"scene"` + WidgetID id.ID `json:"widgetId"` } type Scene struct { @@ -1096,10 +1087,9 @@ type UpdateTeamPayload struct { } type UpdateWidgetInput struct { - SceneID id.ID `json:"sceneId"` - PluginID id.PluginID `json:"pluginId"` - ExtensionID id.PluginExtensionID `json:"extensionId"` - Enabled *bool `json:"enabled"` + SceneID id.ID `json:"sceneId"` + WidgetID id.ID `json:"widgetId"` + Enabled *bool `json:"enabled"` } type UpdateWidgetPayload struct { diff --git a/internal/adapter/gql/loader_project.go b/internal/adapter/gql/loader_project.go index b4055fb44..d97282af0 100644 --- a/internal/adapter/gql/loader_project.go +++ b/internal/adapter/gql/loader_project.go @@ -57,13 +57,13 @@ func (c *ProjectLoader) FindByTeam(ctx context.Context, teamID id.TeamID, first }, nil } -func (c *ProjectLoader) CheckAlias(ctx context.Context, alias string) (*gqlmodel.CheckProjectAliasPayload, error) { +func (c *ProjectLoader) CheckAlias(ctx context.Context, alias string) (*gqlmodel.ProjectAliasAvailability, error) { ok, err := c.usecase.CheckAlias(ctx, alias) if err != nil { return nil, err } - return &gqlmodel.CheckProjectAliasPayload{Alias: alias, Available: ok}, nil + return &gqlmodel.ProjectAliasAvailability{Alias: alias, Available: ok}, nil } // data loaders diff --git a/internal/adapter/gql/resolver_mutation_scene.go b/internal/adapter/gql/resolver_mutation_scene.go index 03ee76d2f..d15dd7309 100644 --- a/internal/adapter/gql/resolver_mutation_scene.go +++ b/internal/adapter/gql/resolver_mutation_scene.go @@ -55,10 +55,9 @@ func (r *mutationResolver) UpdateWidget(ctx context.Context, input gqlmodel.Upda defer exit() scene, widget, err := r.usecases.Scene.UpdateWidget(ctx, interfaces.UpdateWidgetParam{ - SceneID: id.SceneID(input.SceneID), - PluginID: input.PluginID, - ExtensionID: id.PluginExtensionID(input.ExtensionID), - Enabled: input.Enabled, + SceneID: id.SceneID(input.SceneID), + WidgetID: id.WidgetID(input.WidgetID), + Enabled: input.Enabled, }, getOperator(ctx)) if err != nil { return nil, err @@ -76,8 +75,7 @@ func (r *mutationResolver) RemoveWidget(ctx context.Context, input gqlmodel.Remo scene, err := r.usecases.Scene.RemoveWidget(ctx, id.SceneID(input.SceneID), - id.PluginID(input.PluginID), - id.PluginExtensionID(input.ExtensionID), + id.WidgetID(input.WidgetID), getOperator(ctx), ) if err != nil { @@ -85,9 +83,8 @@ func (r *mutationResolver) RemoveWidget(ctx context.Context, input gqlmodel.Remo } return &gqlmodel.RemoveWidgetPayload{ - Scene: gqlmodel.ToScene(scene), - PluginID: input.PluginID, - ExtensionID: input.ExtensionID, + Scene: gqlmodel.ToScene(scene), + WidgetID: input.WidgetID, }, nil } diff --git a/internal/adapter/gql/resolver_property_schema.go b/internal/adapter/gql/resolver_property_schema.go index b670f50e7..5efe5196c 100644 --- a/internal/adapter/gql/resolver_property_schema.go +++ b/internal/adapter/gql/resolver_property_schema.go @@ -31,18 +31,7 @@ func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj * if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { return s, nil } - return obj.Name, nil -} - -// deprecated -func (r *propertySchemaFieldResolver) TranslatedName(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) { - exit := trace(ctx) - defer exit() - - if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { - return s, nil - } - return obj.Name, nil + return obj.Title, nil } func (r *propertySchemaFieldResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) { @@ -112,13 +101,5 @@ func (r *propertySchemaFieldChoiceResolver) TranslatedTitle(ctx context.Context, if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { return s, nil } - return obj.Label, nil -} - -// deprecated -func (r *propertySchemaFieldChoiceResolver) TranslatedLabel(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *string) (string, error) { - if s, ok := obj.AllTranslatedLabel[getLang(ctx, lang)]; ok { - return s, nil - } - return obj.Label, nil + return obj.Title, nil } diff --git a/internal/adapter/gql/resolver_query.go b/internal/adapter/gql/resolver_query.go index 4ce654c4c..5d8775c7a 100644 --- a/internal/adapter/gql/resolver_query.go +++ b/internal/adapter/gql/resolver_query.go @@ -326,7 +326,7 @@ func (r *queryResolver) SearchUser(ctx context.Context, nameOrEmail string) (*gq return r.loaders.User.SearchUser(ctx, nameOrEmail) } -func (r *queryResolver) CheckProjectAlias(ctx context.Context, alias string) (*gqlmodel.CheckProjectAliasPayload, error) { +func (r *queryResolver) CheckProjectAlias(ctx context.Context, alias string) (*gqlmodel.ProjectAliasAvailability, error) { exit := trace(ctx) defer exit() diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 682413cf8..2b7990304 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -68,7 +68,6 @@ func (i *Scene) FindByProject(ctx context.Context, id id.ProjectID, operator *us } func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase.Operator) (_ *scene.Scene, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -218,7 +217,6 @@ func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, } func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetParam, operator *usecase.Operator) (_ *scene.Scene, _ *scene.Widget, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -247,7 +245,7 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP } ws := scene.WidgetSystem() - widget := ws.Widget(param.PluginID, param.ExtensionID) + widget := ws.Widget(param.WidgetID) if widget == nil { return nil, nil, rerror.ErrNotFound } @@ -265,7 +263,7 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP return scene, widget, nil } -func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, pid id.PluginID, eid id.PluginExtensionID, operator *usecase.Operator) (_ *scene.Scene, err error) { +func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, wid id.WidgetID, operator *usecase.Operator) (_ *scene.Scene, err error) { tx, err := i.transaction.Begin() if err != nil { @@ -296,12 +294,12 @@ func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, pid id.PluginID ws := scene.WidgetSystem() - widget := ws.Widget(pid, eid) + widget := ws.Widget(wid) if widget == nil { return nil, rerror.ErrNotFound } - ws.Remove(pid, eid) + ws.Remove(wid) err2 = i.propertyRepo.Remove(ctx, widget.Property()) if err2 != nil { diff --git a/internal/usecase/interfaces/scene.go b/internal/usecase/interfaces/scene.go index 04307e292..0d7669929 100644 --- a/internal/usecase/interfaces/scene.go +++ b/internal/usecase/interfaces/scene.go @@ -10,10 +10,9 @@ import ( ) type UpdateWidgetParam struct { - SceneID id.SceneID - PluginID id.PluginID - ExtensionID id.PluginExtensionID - Enabled *bool + SceneID id.SceneID + WidgetID id.WidgetID + Enabled *bool } var ( @@ -29,7 +28,7 @@ type Scene interface { Create(context.Context, id.ProjectID, *usecase.Operator) (*scene.Scene, error) AddWidget(context.Context, id.SceneID, id.PluginID, id.PluginExtensionID, *usecase.Operator) (*scene.Scene, *scene.Widget, error) UpdateWidget(context.Context, UpdateWidgetParam, *usecase.Operator) (*scene.Scene, *scene.Widget, error) - RemoveWidget(context.Context, id.SceneID, id.PluginID, id.PluginExtensionID, *usecase.Operator) (*scene.Scene, error) + RemoveWidget(context.Context, id.SceneID, id.WidgetID, *usecase.Operator) (*scene.Scene, error) InstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, id.PluginID, *id.PropertyID, error) UninstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, error) UpgradePlugin(context.Context, id.SceneID, id.PluginID, id.PluginID, *usecase.Operator) (*scene.Scene, error) diff --git a/pkg/scene/sceneops/plugin_migrator.go b/pkg/scene/sceneops/plugin_migrator.go index 33340c80b..2971b996c 100644 --- a/pkg/scene/sceneops/plugin_migrator.go +++ b/pkg/scene/sceneops/plugin_migrator.go @@ -43,6 +43,10 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol return MigratePluginsResult{}, ErrInvalidPlugins } + if !sc.PluginSystem().Has(oldPluginID) { + return MigratePluginsResult{}, ErrPluginNotInstalled + } + plugins, err := s.Plugin(ctx, []id.PluginID{oldPluginID, newPluginID}, []id.SceneID{sc.ID()}) if err != nil || len(plugins) < 2 { return MigratePluginsResult{}, ErrInvalidPlugins @@ -51,10 +55,6 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol oldPlugin := plugins[0] newPlugin := plugins[1] - if !sc.PluginSystem().Has(oldPluginID) { - return MigratePluginsResult{}, ErrPluginNotInstalled - } - // ๅ…จใƒฌใ‚คใƒคใƒผใฎๅ–ๅพ— layers, err := s.Layer(ctx, sc.ID()) if err != nil { @@ -109,11 +109,11 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol } // ใ‚ทใƒผใƒณใฎใ‚ฆใ‚ฃใ‚ธใ‚งใƒƒใƒˆ - sc.WidgetSystem().Replace(oldPluginID, newPluginID) + sc.WidgetSystem().ReplacePlugin(oldPluginID, newPluginID) for _, w := range sc.WidgetSystem().Widgets() { if w.Plugin().Equal(newPluginID) { if newPlugin.Extension(w.Extension()) == nil { - sc.WidgetSystem().Remove(oldPluginID, w.Extension()) + sc.WidgetSystem().RemoveAllByExtension(oldPluginID, w.Extension()) } else { propertyIDs = append(propertyIDs, w.Property()) } diff --git a/pkg/scene/widget_system.go b/pkg/scene/widget_system.go index f0ad465fb..87a9aa246 100644 --- a/pkg/scene/widget_system.go +++ b/pkg/scene/widget_system.go @@ -45,24 +45,24 @@ func (w *WidgetSystem) Widgets() []*Widget { return append([]*Widget{}, w.widgets...) } -func (w *WidgetSystem) Widget(p id.PluginID, e id.PluginExtensionID) *Widget { +func (w *WidgetSystem) Widget(wid id.WidgetID) *Widget { if w == nil { return nil } for _, ww := range w.widgets { - if ww.plugin.Equal(p) && ww.extension == e { + if ww.ID() == wid { return ww } } return nil } -func (w *WidgetSystem) Has(p id.PluginID, e id.PluginExtensionID) bool { +func (w *WidgetSystem) Has(wid id.WidgetID) bool { if w == nil { return false } for _, w2 := range w.widgets { - if w2.plugin.Equal(p) && w2.extension == e { + if w2.ID() == wid { return true } } @@ -70,21 +70,21 @@ func (w *WidgetSystem) Has(p id.PluginID, e id.PluginExtensionID) bool { } func (w *WidgetSystem) Add(sw *Widget) { - if w == nil || sw == nil || w.Has(sw.plugin, sw.extension) { + if w == nil || sw == nil || w.Has(sw.ID()) { return } sw2 := *sw w.widgets = append(w.widgets, &sw2) } -func (w *WidgetSystem) Remove(p id.PluginID, e id.PluginExtensionID) { +func (w *WidgetSystem) Remove(wid id.WidgetID) { if w == nil { return } for i := 0; i < len(w.widgets); i++ { - if w.widgets[i].plugin.Equal(p) && w.widgets[i].extension == e { + if w.widgets[i].ID() == wid { w.widgets = append(w.widgets[:i], w.widgets[i+1:]...) - i-- + return } } } @@ -103,7 +103,21 @@ func (w *WidgetSystem) RemoveAllByPlugin(p id.PluginID) (res []id.PropertyID) { return res } -func (w *WidgetSystem) Replace(oldp, newp id.PluginID) { +func (w *WidgetSystem) RemoveAllByExtension(p id.PluginID, e id.PluginExtensionID) (res []id.PropertyID) { + if w == nil { + return nil + } + for i := 0; i < len(w.widgets); i++ { + if w.widgets[i].Plugin().Equal(p) && w.widgets[i].Extension() == e { + res = append(res, w.widgets[i].Property()) + w.widgets = append(w.widgets[:i], w.widgets[i+1:]...) + i-- + } + } + return res +} + +func (w *WidgetSystem) ReplacePlugin(oldp, newp id.PluginID) { if w == nil || w.widgets == nil { return } diff --git a/pkg/scene/widget_system_test.go b/pkg/scene/widget_system_test.go index 665faeb80..9f765e9cf 100644 --- a/pkg/scene/widget_system_test.go +++ b/pkg/scene/widget_system_test.go @@ -147,19 +147,18 @@ func TestWidgetSystem_Remove(t *testing.T) { testCases := []struct { Name string - PID id.PluginID - EID id.PluginExtensionID + ID id.WidgetID WS, Expected *WidgetSystem }{ { Name: "remove a widget", - PID: pid, - EID: "e1", + ID: w1.ID(), WS: NewWidgetSystem([]*Widget{w1, w2, w3, w4}), - Expected: NewWidgetSystem([]*Widget{w3, w4}), + Expected: NewWidgetSystem([]*Widget{w2, w3, w4}), }, { Name: "remove from nil widgetSystem", + ID: w1.ID(), WS: nil, Expected: nil, }, @@ -168,7 +167,7 @@ func TestWidgetSystem_Remove(t *testing.T) { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - tc.WS.Remove(tc.PID, tc.EID) + tc.WS.Remove(tc.ID) assert.Equal(tt, tc.Expected, tc.WS) }) } @@ -211,7 +210,49 @@ func TestWidgetSystem_RemoveAllByPlugin(t *testing.T) { } } -func TestWidgetSystem_Replace(t *testing.T) { +func TestWidgetSystem_RemoveAllByExtension(t *testing.T) { + pid := id.MustPluginID("xxx~1.1.1") + pid2 := id.MustPluginID("xxx~1.1.2") + w1 := MustNewWidget(id.NewWidgetID(), pid, "e1", id.NewPropertyID(), true) + w2 := MustNewWidget(id.NewWidgetID(), pid, "e2", id.NewPropertyID(), true) + w3 := MustNewWidget(id.NewWidgetID(), pid, "e1", id.NewPropertyID(), true) + w4 := MustNewWidget(id.NewWidgetID(), pid2, "e1", id.NewPropertyID(), true) + + testCases := []struct { + Name string + PID id.PluginID + EID id.PluginExtensionID + WS, Expected *WidgetSystem + ExpectedResult []id.PropertyID + }{ + { + Name: "remove widgets", + PID: pid, + EID: id.PluginExtensionID("e1"), + WS: NewWidgetSystem([]*Widget{w1, w2, w3, w4}), + Expected: NewWidgetSystem([]*Widget{w2, w4}), + ExpectedResult: []id.PropertyID{w1.Property(), w3.Property()}, + }, + { + Name: "remove widgets from nil widget system", + PID: pid, + EID: id.PluginExtensionID("e1"), + WS: nil, + Expected: nil, + ExpectedResult: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.ExpectedResult, tc.WS.RemoveAllByExtension(tc.PID, tc.EID)) + assert.Equal(tt, tc.Expected, tc.WS) + }) + } +} + +func TestWidgetSystem_ReplacePlugin(t *testing.T) { pid := id.MustPluginID("xxx~1.1.1") pid2 := id.MustPluginID("zzz~1.1.1") pr := id.NewPropertyID() @@ -244,7 +285,7 @@ func TestWidgetSystem_Replace(t *testing.T) { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - tc.WS.Replace(tc.PID, tc.NewID) + tc.WS.ReplacePlugin(tc.PID, tc.NewID) assert.Equal(tt, tc.Expected, tc.WS) }) } @@ -329,27 +370,25 @@ func TestWidgetSystem_Widget(t *testing.T) { wid := id.NewWidgetID() testCases := []struct { Name string - PID id.PluginID - EID id.PluginExtensionID + ID id.WidgetID WS *WidgetSystem Expected *Widget }{ { Name: "get a widget", - PID: pid, - EID: "eee", + ID: wid, WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), Expected: MustNewWidget(wid, pid, "eee", pr, true), }, { Name: "dont has the widget", - PID: pid, - EID: "eee", + ID: wid, WS: NewWidgetSystem([]*Widget{}), Expected: nil, }, { Name: "get widget from nil widgetSystem", + ID: wid, WS: nil, Expected: nil, }, @@ -358,7 +397,7 @@ func TestWidgetSystem_Widget(t *testing.T) { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - res := tc.WS.Widget(tc.PID, tc.EID) + res := tc.WS.Widget(tc.ID) assert.Equal(tt, tc.Expected, res) }) } @@ -370,27 +409,25 @@ func TestWidgetSystem_Has(t *testing.T) { wid := id.NewWidgetID() testCases := []struct { Name string - PID id.PluginID - EID id.PluginExtensionID + ID id.WidgetID WS *WidgetSystem Expected bool }{ { Name: "has a widget", - PID: pid, - EID: "eee", + ID: wid, WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), Expected: true, }, { Name: "dont has a widget", - PID: pid, - EID: "eee", + ID: wid, WS: NewWidgetSystem([]*Widget{}), Expected: false, }, { Name: "has from nil widgetSystem", + ID: wid, WS: nil, Expected: false, }, @@ -399,7 +436,7 @@ func TestWidgetSystem_Has(t *testing.T) { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - res := tc.WS.Has(tc.PID, tc.EID) + res := tc.WS.Has(tc.ID) assert.Equal(tt, tc.Expected, res) }) } diff --git a/schema.graphql b/schema.graphql index 02d9b34cc..b7c03f253 100644 --- a/schema.graphql +++ b/schema.graphql @@ -13,9 +13,38 @@ directive @goField( name: String ) on INPUT_FIELD_DEFINITION | FIELD_DEFINITION -# Basic types +# Meta Type scalar Cursor + +interface Node { + id: ID! +} + +enum NodeType { + ASSET + USER + TEAM + PROJECT + PLUGIN + SCENE + PROPERTY_SCHEMA + PROPERTY + DATASET_SCHEMA + DATASET + LAYER_GROUP + LAYER_ITEM +} + +type PageInfo { + startCursor: Cursor + endCursor: Cursor + hasNextPage: Boolean! + hasPreviousPage: Boolean! +} + +# Basic types + scalar DateTime scalar URL scalar Lang @@ -24,8 +53,8 @@ scalar PluginID scalar PluginExtensionID scalar PropertySchemaID scalar PropertySchemaFieldID -scalar TranslatedString scalar DatasetSchemaFieldID +scalar TranslatedString type LatLng { lat: Float! @@ -101,19 +130,6 @@ enum Theme { DARK } -# Meta Type - -interface Node { - id: ID! -} - -type PageInfo { - startCursor: Cursor - endCursor: Cursor - hasNextPage: Boolean! - hasPreviousPage: Boolean! -} - # Asset type Asset implements Node { @@ -147,7 +163,7 @@ type SearchedUser { userEmail: String! } -type CheckProjectAliasPayload { +type ProjectAliasAvailability { alias: String! available: Boolean! } @@ -355,8 +371,6 @@ type PropertySchemaGroup { isAvailableIf: PropertyCondition title: String allTranslatedTitle: TranslatedString - # For compatibility: "name" field will be removed in the futrue - name: PropertySchemaFieldID representativeFieldId: PropertySchemaFieldID representativeField: PropertySchemaField schema: PropertySchema @goField(forceResolver: true) @@ -367,8 +381,6 @@ type PropertySchemaField { fieldId: PropertySchemaFieldID! type: ValueType! title: String! - # For compatibility: "name" field will be removed in the futrue - name: String! description: String! prefix: String suffix: String @@ -379,12 +391,8 @@ type PropertySchemaField { choices: [PropertySchemaFieldChoice!] isAvailableIf: PropertyCondition allTranslatedTitle: TranslatedString - # For compatibility: "allTranslatedName" field will be removed in the futrue - allTranslatedName: TranslatedString allTranslatedDescription: TranslatedString translatedTitle(lang: String): String! @goField(forceResolver: true) - # For compatibility: "translatedName" field will be removed in the futrue - translatedName(lang: String): String! @goField(forceResolver: true) translatedDescription(lang: String): String! @goField(forceResolver: true) } @@ -403,15 +411,9 @@ enum PropertySchemaFieldUI { type PropertySchemaFieldChoice { key: String! title: String! - # For compatibility: "label" field will be removed in the futrue - label: String! icon: String allTranslatedTitle: TranslatedString - # For compatibility: "allTranslatedLabel" field will be removed in the futrue - allTranslatedLabel: TranslatedString translatedTitle(lang: String): String! @goField(forceResolver: true) - # For compatibility: "translatedLabel" field will be removed in the futrue - translatedLabel(lang: String): String! @goField(forceResolver: true) } type PropertyCondition { @@ -707,6 +709,7 @@ type MergedInfoboxField { } # InputType + input CreateAssetInput { teamId: ID! file: Upload! @@ -827,15 +830,13 @@ input AddWidgetInput { input UpdateWidgetInput { sceneId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + widgetId: ID! enabled: Boolean } input RemoveWidgetInput { sceneId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + widgetId: ID! } input InstallPluginInput { @@ -1047,6 +1048,7 @@ input AddDatasetSchemaInput { } # Payload + type CreateAssetPayload { asset: Asset! } @@ -1122,8 +1124,7 @@ type UpdateWidgetPayload { type RemoveWidgetPayload { scene: Scene! - pluginId: PluginID! - extensionId: PluginExtensionID! + widgetId: ID! } type InstallPluginPayload { @@ -1242,21 +1243,6 @@ type AddDatasetSchemaPayload { # Connection -enum NodeType { - ASSET - USER - TEAM - PROJECT - PLUGIN - SCENE - PROPERTY_SCHEMA - PROPERTY - DATASET_SCHEMA - DATASET - LAYER_GROUP - LAYER_ITEM -} - type AssetConnection { edges: [AssetEdge!]! nodes: [Asset]! @@ -1349,7 +1335,7 @@ type Query { sceneLock(sceneId: ID!): SceneLockMode dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! searchUser(nameOrEmail: String!): SearchedUser - checkProjectAlias(alias: String!): CheckProjectAliasPayload! + checkProjectAlias(alias: String!): ProjectAliasAvailability! installablePlugins: [PluginMetadata!]! } From 33f4c7c44cf802d33880a86a56aff33be15e9921 Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Thu, 16 Sep 2021 12:36:12 +0300 Subject: [PATCH 080/253] chore: update golang version and modules (#51) * sys: update golang version and modules * sys: update golang version for docker and git workflow * sys: go mod tidy --- .github/workflows/godoc.yml | 2 +- .github/workflows/main.yml | 2 +- .github/workflows/nightly.yml | 2 +- .github/workflows/pr.yml | 2 +- Dockerfile | 2 +- go.mod | 56 +++++++++++++++++++++++++++++++---- go.sum | 42 ++++++++++++++++---------- 7 files changed, 82 insertions(+), 26 deletions(-) diff --git a/.github/workflows/godoc.yml b/.github/workflows/godoc.yml index dfe92f983..f3f7ab839 100644 --- a/.github/workflows/godoc.yml +++ b/.github/workflows/godoc.yml @@ -21,7 +21,7 @@ jobs: - name: set up uses: actions/setup-go@v2 with: - go-version: 1.16 + go-version: 1.17 id: go - name: checkout uses: actions/checkout@v2 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c38e87b46..90f94b665 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -11,7 +11,7 @@ jobs: - name: set up uses: actions/setup-go@v2 with: - go-version: 1.16 + go-version: 1.17 id: go - name: checkout uses: actions/checkout@v2 diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index f9358eb7b..b244a95f5 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -14,7 +14,7 @@ jobs: - name: set up uses: actions/setup-go@v2 with: - go-version: 1.16 + go-version: 1.17 id: go - name: checkout uses: actions/checkout@v2 diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 1abbcd204..5731a8713 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -8,7 +8,7 @@ jobs: - name: set up uses: actions/setup-go@v2 with: - go-version: 1.16 + go-version: 1.17 id: go - name: checkout uses: actions/checkout@v2 diff --git a/Dockerfile b/Dockerfile index 8bd8696cf..da3965c6a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.16-alpine AS build +FROM golang:1.17-alpine AS build ARG TAG=release ARG VERSION diff --git a/go.mod b/go.mod index 4dc138a7c..37144a4b9 100644 --- a/go.mod +++ b/go.mod @@ -3,13 +3,15 @@ module github.com/reearth/reearth-backend require ( cloud.google.com/go v0.87.0 cloud.google.com/go/storage v1.14.0 - github.com/99designs/gqlgen v0.13.0 + github.com/99designs/gqlgen v0.14.0 github.com/99designs/gqlgen-contrib v0.1.1-0.20200601100547-7a955d321bbd github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v0.2.0 github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect - github.com/agnivade/levenshtein v1.1.0 // indirect + github.com/agnivade/levenshtein v1.1.1 // indirect + github.com/alecthomas/units v0.0.0-20210912230133-d1bdfacee922 // indirect github.com/auth0/go-jwt-middleware v0.0.0-20200507191422-d30d7b9ece63 github.com/blang/semver v3.5.1+incompatible + github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect github.com/dgrijalva/jwt-go v3.2.0+incompatible github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b // indirect github.com/fatih/color v1.12.0 // indirect @@ -27,7 +29,7 @@ require ( github.com/labstack/echo/v4 v4.2.1 github.com/labstack/gommon v0.3.0 github.com/mattn/go-isatty v0.0.13 // indirect - github.com/mitchellh/mapstructure v1.4.1 + github.com/mitchellh/mapstructure v1.4.2 github.com/oklog/ulid v1.3.1 github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/paulmach/go.geojson v1.4.0 @@ -41,14 +43,17 @@ require ( github.com/twpayne/go-kml v1.5.2 github.com/uber/jaeger-client-go v2.25.0+incompatible github.com/uber/jaeger-lib v2.4.1+incompatible - github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e - github.com/vektah/gqlparser/v2 v2.1.0 + github.com/urfave/cli/v2 v2.3.0 // indirect + github.com/vektah/dataloaden v0.3.0 + github.com/vektah/gqlparser/v2 v2.2.0 go.mongodb.org/mongo-driver v1.5.1 go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo v0.0.0-20200707171851-ae0d272a2deb go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver v0.7.0 go.opentelemetry.io/otel v0.7.0 go.uber.org/atomic v1.7.0 // indirect golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c // indirect + golang.org/x/mod v0.5.0 // indirect + golang.org/x/sys v0.0.0-20210915083310-ed5796bab164 // indirect golang.org/x/text v0.3.6 golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/tools v0.1.5 @@ -59,4 +64,43 @@ require ( gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) -go 1.16 +require ( + github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect + github.com/aws/aws-sdk-go v1.34.28 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/go-stack/stack v1.8.0 // indirect + github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/golang/snappy v0.0.3 // indirect + github.com/google/go-cmp v0.5.6 // indirect + github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9 // indirect + github.com/googleapis/gax-go/v2 v2.0.5 // indirect + github.com/gorilla/websocket v1.4.2 // indirect + github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/jstemmer/go-junit-report v0.9.1 // indirect + github.com/mattn/go-colorable v0.1.8 // indirect + github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasttemplate v1.2.1 // indirect + github.com/xdg-go/pbkdf2 v1.0.0 // indirect + github.com/xdg-go/scram v1.0.2 // indirect + github.com/xdg-go/stringprep v1.0.2 // indirect + github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect + go.opencensus.io v0.23.0 // indirect + golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 // indirect + golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420 // indirect + golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 // indirect + golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea // indirect + google.golang.org/grpc v1.39.0 // indirect + google.golang.org/protobuf v1.27.1 // indirect + gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect + gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect +) + +go 1.17 diff --git a/go.sum b/go.sum index ba640f2cb..a9fbd91d3 100644 --- a/go.sum +++ b/go.sum @@ -44,8 +44,8 @@ cloud.google.com/go/storage v1.14.0 h1:6RRlFMv1omScs6iq2hfE3IvgE+l6RfJPampq8UZc5 cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/99designs/gqlgen v0.11.3/go.mod h1:RgX5GRRdDWNkh4pBrdzNpNPFVsdoUFY2+adM6nb1N+4= -github.com/99designs/gqlgen v0.13.0 h1:haLTcUp3Vwp80xMVEg5KRNwzfUrgFdRmtBY8fuB8scA= -github.com/99designs/gqlgen v0.13.0/go.mod h1:NV130r6f4tpRWuAI+zsrSdooO/eWUv+Gyyoi3rEfXIk= +github.com/99designs/gqlgen v0.14.0 h1:Wg8aNYQUjMR/4v+W3xD+7SizOy6lSvVeQ06AobNQAXI= +github.com/99designs/gqlgen v0.14.0/go.mod h1:S7z4boV+Nx4VvzMUpVrY/YuHjFX4n7rDyuTqvAkuoRE= github.com/99designs/gqlgen-contrib v0.1.1-0.20200601100547-7a955d321bbd h1:jtzFT7TsrvMTGwBn8DvwMFDowJ2INPqtP7HpL1R9TIY= github.com/99designs/gqlgen-contrib v0.1.1-0.20200601100547-7a955d321bbd/go.mod h1:ud8RnaGvSBJFGEIfo0gMid33OUXXb68bNJlWUUZARGY= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= @@ -61,14 +61,16 @@ github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF0 github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= github.com/agnivade/levenshtein v1.0.3/go.mod h1:4SFRZbbXWLF4MU1T9Qg0pGgH3Pjs+t6ie5efyrwRJXs= -github.com/agnivade/levenshtein v1.1.0 h1:n6qGwyHG61v3ABce1rPVZklEYRT8NFpCMrpZdBUbYGM= github.com/agnivade/levenshtein v1.1.0/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo= +github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8= +github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4 h1:Hs82Z41s6SdL1CELW+XaDYmOH4hkBN4/N9og/AsOv7E= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20210912230133-d1bdfacee922 h1:8ypNbf5sd3Sm3cKJ9waOGoQv6dKAFiFty9L6NP1AqJ4= +github.com/alecthomas/units v0.0.0-20210912230133-d1bdfacee922/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= @@ -96,8 +98,9 @@ github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnht github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= -github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= +github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -169,6 +172,7 @@ github.com/goccy/go-yaml v1.9.2 h1:2Njwzw+0+pjU2gb805ZC1B/uBuAs2VcZ3K+ZgHwDs7w= github.com/goccy/go-yaml v1.9.2/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA= github.com/gogo/protobuf v1.0.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -300,6 +304,7 @@ github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o= github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.10.10 h1:a/y8CglcM7gLGYmlbP/stPE5sR3hbhFRUjCBfd/0B3I= @@ -338,8 +343,8 @@ github.com/mattn/go-isatty v0.0.13/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.2 h1:6h7AQ0yhTcIsmFmnAwQls75jp2Gzs4iB8W7pjMO+rqo= +github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -387,13 +392,13 @@ github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= -github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shurcooL/httpfs v0.0.0-20171119174359-809beceb2371/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg= -github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/shurcooL/vfsgen v0.0.0-20180121065927-ffb13db8def0/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= @@ -435,8 +440,9 @@ github.com/uber/jaeger-client-go v2.25.0+incompatible h1:IxcNZ7WRY1Y3G4poYlx24sz github.com/uber/jaeger-client-go v2.25.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVKhn2Um6rjCsSsg= github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= -github.com/urfave/cli/v2 v2.1.1 h1:Qt8FeAtxE/vfdrLmR3rxR6JRE0RoVmbXu8+6kZtYU4k= github.com/urfave/cli/v2 v2.1.1/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ= +github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M= +github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc= github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= @@ -445,13 +451,14 @@ github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPU github.com/valyala/fasttemplate v1.1.0/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4= github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= -github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e h1:+w0Zm/9gaWpEAyDlU1eKOuk5twTjAjuevXqcJJw8hrg= github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= +github.com/vektah/dataloaden v0.3.0 h1:ZfVN2QD6swgvp+tDqdH/OIT/wu3Dhu0cus0k5gIZS84= +github.com/vektah/dataloaden v0.3.0/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= github.com/vektah/gqlparser v1.3.1 h1:8b0IcD3qZKWJQHSzynbDlrtP3IxVydZ2DZepCGofqfU= github.com/vektah/gqlparser v1.3.1/go.mod h1:bkVf0FX+Stjg/MHnm8mEyubuaArhNEqfQhF+OTiAL74= github.com/vektah/gqlparser/v2 v2.0.1/go.mod h1:SyUiHgLATUR8BiYURfTirrTcGpcE+4XkV2se04Px1Ms= -github.com/vektah/gqlparser/v2 v2.1.0 h1:uiKJ+T5HMGGQM2kRKQ8Pxw8+Zq9qhhZhz/lieYvCMns= -github.com/vektah/gqlparser/v2 v2.1.0/go.mod h1:SyUiHgLATUR8BiYURfTirrTcGpcE+4XkV2se04Px1Ms= +github.com/vektah/gqlparser/v2 v2.2.0 h1:bAc3slekAAJW6sZTi07aGq0OrfaCjj4jxARAaC7g2EM= +github.com/vektah/gqlparser/v2 v2.2.0/go.mod h1:i3mQIGIrbK2PD1RrCeMTlVbkF2FJ6WkU1KJlJlC+3F4= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.0.2 h1:akYIkZ28e6A96dkWNJQu3nmCzH3YfwMPQExUYDaRv7w= @@ -542,8 +549,9 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.0 h1:UG21uOlmZabA4fW5i7ZX6bjw1xELEGg/ZLgZq9auk/Q= +golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -665,8 +673,9 @@ golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210915083310-ed5796bab164 h1:7ZDGnxgHAMw7thfC5bEos0RDAccZKxioiWBhfIe+tvw= +golang.org/x/sys v0.0.0-20210915083310-ed5796bab164/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -730,6 +739,7 @@ golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= @@ -738,6 +748,7 @@ golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= @@ -911,4 +922,5 @@ rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8 rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= sourcegraph.com/sourcegraph/appdash v0.0.0-20180110180208-2cc67fd64755/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= +sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= sourcegraph.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67/go.mod h1:L5q+DGLGOQFpo1snNEkLOJT2d1YTW66rWNzatr3He1k= From 94611f96d32b81b1849b7d58b064ba82f9f3281d Mon Sep 17 00:00:00 2001 From: KaWaite <34051327+KaWaite@users.noreply.github.com> Date: Fri, 24 Sep 2021 18:31:31 +0900 Subject: [PATCH 081/253] feat: widget align system (#19) * wip: align system domain models; structs, getters for each level, add function and remove function + testing for all so far * wip: add WidgetLayout field to plugins; update plugin_manifest, generate types, update convert.go, update extension.go as well as its builder and tests * Add graphql types for widgetlayout, update graphql plugin conversion * Add floating parameter to WidgetLayout to be able to filter widgets that aren't related to the align system. fixed conversion logic for empty WidgetLayout fixed conversion logic for empty DefaultLocation * Change align_system name to widget_align_system * Move WidgetLayout to scene package Update plugin.WidgetLayout use and add WidgetLayout where necessary * convert if/else chains to switches add Move method * Make widgetLayout a pointer * Fix Remove method, add Reorder method, tests for both move and reorder, fix up tests * wip: add widget align system to usecases/graphql * Remove unneeded widgetlayout in AddWidgetInput Pull out widgetlayout and conditionally add widget to widgetalignsystem in AddWidget Add SetExtended method to Widget Add extended to UpdateWidgetParam Combine Move and Reorder methods + tests * Change defaultLocation to currentLocation Add WidgetIds and Alignment methods to alignSystem Update graphql Etc * Add SetCurrentLocation method Make widgetIds a slice of pointers to the ids Update add test to include two additions Update logic in scene interactor to be more deliberate Update scene controller to avoid assignment if nil * Conversions for mongo and structs Conversions for graphql and structs Updated graphql schemas + gens * Add comments to functions that didn't have them for what they do * Add alignment to graphql and mongodoc simplify conditionals/avoid lint warning * Re-rename currentLocation to defaultLocation Add align to widget update Add widgetLayout to reearth widgets Fix/improve conditionals * Remove unneeded conditional check * Add align sys and widgetlayout to published JSON Add extended bool to sceneWidget (graphql) Move up updating align to top of if statements Add Equal method to widgetId * Migration for adding widgetLayout to scene Migration for adding alignSystem to scene * Fix migration to get correct widgetlayout from manifest * Remove unused type in scene encoder Remove commented out code in scene builder * Add conditional in case widgetlayout is nil * Fix tests * Rename scene Location to WidgetLocation Rename graphql Location to WidgetLocation Rename mongodoc Location to WidgetLocation Fix where mongodoc PluginExtensionDoc type used a type from pkg Create a function to handle recreating widget align system in mongodoc and scene builder Update tests to include widget layout * Fix mongodoc plugin indentation Rename toStruct to stringsToWidgetIDs for clarity Remove error from ToModelAlignSystem since errors will never be created Remove comments in pkg/scene/builder.go * Remove location from RemoveWidgetInput and related Add find function to find location of a widget * Simplify WidgetLayoutInput only has location and index Make mandatory id params not pointers Fix bug with converting to mongo document where all sections were saving as the same Simplify method names in widget align system Update methods to work without needing a location * Update naming for clarity Cleanup code style for clarity Add WidgetLayout values to better test code * Replace plugId and extensionId with widgetId WidgetIds -> WidgetIDs * Add consts for align values Include switch case for assign align value * Update WZone, etc to WidgetZoneType style Regenerate graphql * Fix align value in a test (should only be start, center or end now) * Cleanup UpdateWidget controller Fix code readability Rename to WidgetLocationDocument Add new methods to WidgetArea Reorganize align sys domain code * Cleanup schema.graphql Make widgetIDs not pointers * Update align sys consts to WidgetXX Rename types in mongodc to XXDocument Remove old comments Cleanup code Use consts instead of strings * widgetlayouts floating field is necessary widgetLayouts extended extendable are not use consts instead of hardcoded values Location is necessary for adding widgets * test for manifest pkgs layout method * add tests in plugin/manifest and scene * refactor * Separate Widget zone, section and area logic. Fix Find method to be up to standards. Add Id and floating to widgetJSON. Fix issue with conversion funcs where they were converting to all the same Section. * Fix migrations for pointer values * Fix widget align sys remove test Add tests for WidgetArea, Section and Zone * Add tests to scene builder, align system and widget zone * Remove unwanted assignment to align on update * Add return if findwidgetlocation is returns nil. Fix test with race condition error. * Fix where updating extended would also add widget again. * Fix extending conditional * Change extendable from bool to object with vert and hor * Fixes from PR review * FindWidgetLocation method now returns int, -1 instead of nil. Add New___ methods for Zone, Section and Area. Update tests. * Fix SetExtended method * Add tests for align system and widget area. * Avoid race condition in test * Add tests to widget section * Add test to widget section * Add Extendable to published scene * Update schema.graphql Co-authored-by: rot1024 * Clean up tests. Fix Add and AddAll methods so they don't directly manipulate from WidgetAlignSystem. * Make convert functions public * Merge with main * Type widget area vars with named types * Type widget section vars * Type widget zone constants * type WidgetLocation with WidgetZoneType * Replace zone being typed as string with WidgetZoneType * Type WidgetLocations section and area, Fix nil pointer errors. * type alignments with WidgetAlignType * Clean up widget area and zone tests * refactor * pair programming * pair pro * pair pro * pair pro * pair pro Co-authored-by: KaWaite * pair pro Co-authored-by: KaWaite Co-authored-by: HideBa * pair pro (usecase) * add setters Co-authored-by: KaWaite Co-authored-by: HideBa * typo * rename and add nil check * reimpl mongo * rename file * update gql * fix * align CENTER -> CENTERED * Revert Widget zone, section, area to optional in graphql * Make Menu widget legacy, add Button widget to manifest * Update manifest.yml Co-authored-by: KaWaite Co-authored-by: rot1024 Co-authored-by: KaWaite Co-authored-by: HideBa --- internal/adapter/gql/generated.go | 2389 ++++++++++++++++- .../adapter/gql/gqlmodel/convert_plugin.go | 63 + .../adapter/gql/gqlmodel/convert_scene.go | 20 +- .../gql/gqlmodel/convert_scene_align.go | 124 + internal/adapter/gql/gqlmodel/models_gen.go | 238 +- .../adapter/gql/resolver_mutation_scene.go | 21 + .../210730175108_add_scene_align_system.go | 74 + .../mongo/migration/migrations.go | 7 +- .../infrastructure/mongo/mongodoc/plugin.go | 94 +- .../infrastructure/mongo/mongodoc/scene.go | 38 +- .../mongo/mongodoc/scene_align.go | 146 + internal/usecase/interactor/scene.go | 105 +- internal/usecase/interfaces/scene.go | 22 +- pkg/builtin/manifest.yml | 132 +- pkg/builtin/manifest_ja.yml | 60 +- pkg/plugin/extension.go | 96 +- pkg/plugin/extension_builder.go | 6 + pkg/plugin/extension_builder_test.go | 50 +- pkg/plugin/extension_test.go | 34 +- pkg/plugin/manifest/convert.go | 32 + pkg/plugin/manifest/convert_test.go | 54 + pkg/plugin/manifest/parser_test.go | 1 + pkg/plugin/manifest/schema_gen.go | 35 +- pkg/plugin/plugin.go | 5 +- pkg/plugin/plugin_test.go | 6 + pkg/scene/builder.go | 20 +- pkg/scene/builder/builder_test.go | 16 +- pkg/scene/builder/encoder.go | 22 + pkg/scene/builder/scene.go | 100 +- pkg/scene/builder/scene_test.go | 81 + pkg/scene/builder_test.go | 324 ++- pkg/scene/scene.go | 24 +- pkg/scene/scene_test.go | 20 +- pkg/scene/widget.go | 28 +- pkg/scene/widget_align_system.go | 122 + pkg/scene/widget_align_system_test.go | 334 +++ pkg/scene/widget_area.go | 126 + pkg/scene/widget_area_test.go | 327 +++ pkg/scene/widget_section.go | 89 + pkg/scene/widget_section_test.go | 242 ++ pkg/scene/widget_system_test.go | 181 +- pkg/scene/widget_test.go | 109 +- pkg/scene/widget_zone.go | 86 + pkg/scene/widget_zone_test.go | 250 ++ plugin_manifest_schema.json | 74 +- schema.graphql | 87 + 46 files changed, 5907 insertions(+), 607 deletions(-) create mode 100644 internal/adapter/gql/gqlmodel/convert_scene_align.go create mode 100644 internal/infrastructure/mongo/migration/210730175108_add_scene_align_system.go create mode 100644 internal/infrastructure/mongo/mongodoc/scene_align.go create mode 100644 pkg/scene/widget_align_system.go create mode 100644 pkg/scene/widget_align_system_test.go create mode 100644 pkg/scene/widget_area.go create mode 100644 pkg/scene/widget_area_test.go create mode 100644 pkg/scene/widget_section.go create mode 100644 pkg/scene/widget_section_test.go create mode 100644 pkg/scene/widget_zone.go create mode 100644 pkg/scene/widget_zone_test.go diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index 03b98ee04..b806edc0a 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -486,6 +486,7 @@ type ComplexityRoot struct { UpdatePropertyValue func(childComplexity int, input gqlmodel.UpdatePropertyValueInput) int UpdateTeam func(childComplexity int, input gqlmodel.UpdateTeamInput) int UpdateWidget func(childComplexity int, input gqlmodel.UpdateWidgetInput) int + UpdateWidgetAlignSystem func(childComplexity int, input gqlmodel.UpdateWidgetAlignSystemInput) int UpgradePlugin func(childComplexity int, input gqlmodel.UpgradePluginInput) int UploadFileToProperty func(childComplexity int, input gqlmodel.UploadFileToPropertyInput) int UploadPlugin func(childComplexity int, input gqlmodel.UploadPluginInput) int @@ -533,6 +534,7 @@ type ComplexityRoot struct { TranslatedName func(childComplexity int, lang *string) int Type func(childComplexity int) int Visualizer func(childComplexity int) int + WidgetLayout func(childComplexity int) int } PluginMetadata struct { @@ -786,6 +788,7 @@ type ComplexityRoot struct { Team func(childComplexity int) int TeamID func(childComplexity int) int UpdatedAt func(childComplexity int) int + WidgetAlignSystem func(childComplexity int) int Widgets func(childComplexity int) int } @@ -798,6 +801,7 @@ type ComplexityRoot struct { SceneWidget struct { Enabled func(childComplexity int) int + Extended func(childComplexity int) int Extension func(childComplexity int) int ExtensionID func(childComplexity int) int ID func(childComplexity int) int @@ -876,6 +880,10 @@ type ComplexityRoot struct { Team func(childComplexity int) int } + UpdateWidgetAlignSystemPayload struct { + Scene func(childComplexity int) int + } + UpdateWidgetPayload struct { Scene func(childComplexity int) int SceneWidget func(childComplexity int) int @@ -903,6 +911,46 @@ type ComplexityRoot struct { Teams func(childComplexity int) int Theme func(childComplexity int) int } + + WidgetAlignSystem struct { + Inner func(childComplexity int) int + Outer func(childComplexity int) int + } + + WidgetArea struct { + Align func(childComplexity int) int + WidgetIds func(childComplexity int) int + } + + WidgetExtendable struct { + Horizontally func(childComplexity int) int + Vertically func(childComplexity int) int + } + + WidgetLayout struct { + DefaultLocation func(childComplexity int) int + Extendable func(childComplexity int) int + Extended func(childComplexity int) int + Floating func(childComplexity int) int + } + + WidgetLocation struct { + Area func(childComplexity int) int + Section func(childComplexity int) int + Zone func(childComplexity int) int + } + + WidgetSection struct { + Bottom func(childComplexity int) int + Middle func(childComplexity int) int + Top func(childComplexity int) int + } + + WidgetZone struct { + Center func(childComplexity int) int + Left func(childComplexity int) int + Right func(childComplexity int) int + } } type AssetResolver interface { @@ -1018,6 +1066,7 @@ type MutationResolver interface { CreateScene(ctx context.Context, input gqlmodel.CreateSceneInput) (*gqlmodel.CreateScenePayload, error) AddWidget(ctx context.Context, input gqlmodel.AddWidgetInput) (*gqlmodel.AddWidgetPayload, error) UpdateWidget(ctx context.Context, input gqlmodel.UpdateWidgetInput) (*gqlmodel.UpdateWidgetPayload, error) + UpdateWidgetAlignSystem(ctx context.Context, input gqlmodel.UpdateWidgetAlignSystemInput) (*gqlmodel.UpdateWidgetAlignSystemPayload, error) RemoveWidget(ctx context.Context, input gqlmodel.RemoveWidgetInput) (*gqlmodel.RemoveWidgetPayload, error) InstallPlugin(ctx context.Context, input gqlmodel.InstallPluginInput) (*gqlmodel.InstallPluginPayload, error) UninstallPlugin(ctx context.Context, input gqlmodel.UninstallPluginInput) (*gqlmodel.UninstallPluginPayload, error) @@ -3320,6 +3369,18 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Mutation.UpdateWidget(childComplexity, args["input"].(gqlmodel.UpdateWidgetInput)), true + case "Mutation.updateWidgetAlignSystem": + if e.complexity.Mutation.UpdateWidgetAlignSystem == nil { + break + } + + args, err := ec.field_Mutation_updateWidgetAlignSystem_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateWidgetAlignSystem(childComplexity, args["input"].(gqlmodel.UpdateWidgetAlignSystemInput)), true + case "Mutation.upgradePlugin": if e.complexity.Mutation.UpgradePlugin == nil { break @@ -3631,6 +3692,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PluginExtension.Visualizer(childComplexity), true + case "PluginExtension.widgetLayout": + if e.complexity.PluginExtension.WidgetLayout == nil { + break + } + + return e.complexity.PluginExtension.WidgetLayout(childComplexity), true + case "PluginMetadata.author": if e.complexity.PluginMetadata.Author == nil { break @@ -4898,6 +4966,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Scene.UpdatedAt(childComplexity), true + case "Scene.widgetAlignSystem": + if e.complexity.Scene.WidgetAlignSystem == nil { + break + } + + return e.complexity.Scene.WidgetAlignSystem(childComplexity), true + case "Scene.widgets": if e.complexity.Scene.Widgets == nil { break @@ -4940,6 +5015,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.SceneWidget.Enabled(childComplexity), true + case "SceneWidget.extended": + if e.complexity.SceneWidget.Extended == nil { + break + } + + return e.complexity.SceneWidget.Extended(childComplexity), true + case "SceneWidget.extension": if e.complexity.SceneWidget.Extension == nil { break @@ -5230,6 +5312,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.UpdateTeamPayload.Team(childComplexity), true + case "UpdateWidgetAlignSystemPayload.scene": + if e.complexity.UpdateWidgetAlignSystemPayload.Scene == nil { + break + } + + return e.complexity.UpdateWidgetAlignSystemPayload.Scene(childComplexity), true + case "UpdateWidgetPayload.scene": if e.complexity.UpdateWidgetPayload.Scene == nil { break @@ -5342,6 +5431,139 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.User.Theme(childComplexity), true + case "WidgetAlignSystem.inner": + if e.complexity.WidgetAlignSystem.Inner == nil { + break + } + + return e.complexity.WidgetAlignSystem.Inner(childComplexity), true + + case "WidgetAlignSystem.outer": + if e.complexity.WidgetAlignSystem.Outer == nil { + break + } + + return e.complexity.WidgetAlignSystem.Outer(childComplexity), true + + case "WidgetArea.align": + if e.complexity.WidgetArea.Align == nil { + break + } + + return e.complexity.WidgetArea.Align(childComplexity), true + + case "WidgetArea.widgetIds": + if e.complexity.WidgetArea.WidgetIds == nil { + break + } + + return e.complexity.WidgetArea.WidgetIds(childComplexity), true + + case "WidgetExtendable.horizontally": + if e.complexity.WidgetExtendable.Horizontally == nil { + break + } + + return e.complexity.WidgetExtendable.Horizontally(childComplexity), true + + case "WidgetExtendable.vertically": + if e.complexity.WidgetExtendable.Vertically == nil { + break + } + + return e.complexity.WidgetExtendable.Vertically(childComplexity), true + + case "WidgetLayout.defaultLocation": + if e.complexity.WidgetLayout.DefaultLocation == nil { + break + } + + return e.complexity.WidgetLayout.DefaultLocation(childComplexity), true + + case "WidgetLayout.extendable": + if e.complexity.WidgetLayout.Extendable == nil { + break + } + + return e.complexity.WidgetLayout.Extendable(childComplexity), true + + case "WidgetLayout.extended": + if e.complexity.WidgetLayout.Extended == nil { + break + } + + return e.complexity.WidgetLayout.Extended(childComplexity), true + + case "WidgetLayout.floating": + if e.complexity.WidgetLayout.Floating == nil { + break + } + + return e.complexity.WidgetLayout.Floating(childComplexity), true + + case "WidgetLocation.area": + if e.complexity.WidgetLocation.Area == nil { + break + } + + return e.complexity.WidgetLocation.Area(childComplexity), true + + case "WidgetLocation.section": + if e.complexity.WidgetLocation.Section == nil { + break + } + + return e.complexity.WidgetLocation.Section(childComplexity), true + + case "WidgetLocation.zone": + if e.complexity.WidgetLocation.Zone == nil { + break + } + + return e.complexity.WidgetLocation.Zone(childComplexity), true + + case "WidgetSection.bottom": + if e.complexity.WidgetSection.Bottom == nil { + break + } + + return e.complexity.WidgetSection.Bottom(childComplexity), true + + case "WidgetSection.middle": + if e.complexity.WidgetSection.Middle == nil { + break + } + + return e.complexity.WidgetSection.Middle(childComplexity), true + + case "WidgetSection.top": + if e.complexity.WidgetSection.Top == nil { + break + } + + return e.complexity.WidgetSection.Top(childComplexity), true + + case "WidgetZone.center": + if e.complexity.WidgetZone.Center == nil { + break + } + + return e.complexity.WidgetZone.Center(childComplexity), true + + case "WidgetZone.left": + if e.complexity.WidgetZone.Left == nil { + break + } + + return e.complexity.WidgetZone.Left(childComplexity), true + + case "WidgetZone.right": + if e.complexity.WidgetZone.Right == nil { + break + } + + return e.complexity.WidgetZone.Right(childComplexity), true + } return 0, false } @@ -5676,6 +5898,47 @@ type PluginMetadata { createdAt: DateTime! } +enum WidgetAreaAlign { + START + CENTERED + END +} + +enum WidgetZoneType { + INNER + OUTER +} + +enum WidgetSectionType { + LEFT + CENTER + RIGHT +} + +enum WidgetAreaType { + TOP + MIDDLE + BOTTOM +} + +type WidgetLocation { + zone: WidgetZoneType! + section: WidgetSectionType! + area: WidgetAreaType! +} + +type WidgetExtendable { + vertically: Boolean! + horizontally: Boolean! +} + +type WidgetLayout { + extendable: WidgetExtendable! + extended: Boolean! + floating: Boolean! + defaultLocation: WidgetLocation +} + enum PluginExtensionType { PRIMITIVE WIDGET @@ -5684,6 +5947,7 @@ enum PluginExtensionType { INFOBOX } + type PluginExtension { extensionId: PluginExtensionID! pluginId: PluginID! @@ -5691,6 +5955,7 @@ type PluginExtension { name: String! description: String! icon: String! + widgetLayout: WidgetLayout visualizer: Visualizer! propertySchemaId: PropertySchemaID! allTranslatedName: TranslatedString @@ -5714,6 +5979,7 @@ type Scene implements Node { rootLayerId: ID! widgets: [SceneWidget!]! plugins: [ScenePlugin!]! + widgetAlignSystem: WidgetAlignSystem dynamicDatasetSchemas: [DatasetSchema!]! project: Project @goField(forceResolver: true) team: Team @goField(forceResolver: true) @@ -5742,6 +6008,7 @@ type SceneWidget { extensionId: PluginExtensionID! propertyId: ID! enabled: Boolean! + extended: Boolean! plugin: Plugin @goField(forceResolver: true) extension: PluginExtension @goField(forceResolver: true) property: Property @goField(forceResolver: true) @@ -5754,6 +6021,28 @@ type ScenePlugin { property: Property @goField(forceResolver: true) } +type WidgetAlignSystem { + inner: WidgetZone + outer: WidgetZone +} + +type WidgetZone { + left: WidgetSection + center: WidgetSection + right: WidgetSection +} + +type WidgetSection { + top: WidgetArea + middle: WidgetArea + bottom: WidgetArea +} + +type WidgetArea { + widgetIds: [ID!]! + align: WidgetAreaAlign! +} + # Property type PropertySchema { @@ -6230,6 +6519,12 @@ input DeleteProjectInput { projectId: ID! } +input WidgetLocationInput { + zone: WidgetZoneType! + section: WidgetSectionType! + area: WidgetAreaType! +} + input AddWidgetInput { sceneId: ID! pluginId: PluginID! @@ -6240,6 +6535,15 @@ input UpdateWidgetInput { sceneId: ID! widgetId: ID! enabled: Boolean + location: WidgetLocationInput + extended: Boolean + index: Int +} + +input UpdateWidgetAlignSystemInput { + sceneId: ID! + location: WidgetLocationInput! + align: WidgetAreaAlign } input RemoveWidgetInput { @@ -6530,6 +6834,10 @@ type UpdateWidgetPayload { sceneWidget: SceneWidget! } +type UpdateWidgetAlignSystemPayload { + scene: Scene! +} + type RemoveWidgetPayload { scene: Scene! widgetId: ID! @@ -6780,6 +7088,7 @@ type Mutation { createScene(input: CreateSceneInput!): CreateScenePayload addWidget(input: AddWidgetInput!): AddWidgetPayload updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload + updateWidgetAlignSystem(input: UpdateWidgetAlignSystemInput!): UpdateWidgetAlignSystemPayload removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload installPlugin(input: InstallPluginInput!): InstallPluginPayload uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload @@ -7603,6 +7912,21 @@ func (ec *executionContext) field_Mutation_updateTeam_args(ctx context.Context, return args, nil } +func (ec *executionContext) field_Mutation_updateWidgetAlignSystem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateWidgetAlignSystemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateWidgetAlignSystemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetAlignSystemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + func (ec *executionContext) field_Mutation_updateWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -16627,6 +16951,45 @@ func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field gr return ec.marshalOUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetPayload(ctx, field.Selections, res) } +func (ec *executionContext) _Mutation_updateWidgetAlignSystem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updateWidgetAlignSystem_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateWidgetAlignSystem(rctx, args["input"].(gqlmodel.UpdateWidgetAlignSystemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateWidgetAlignSystemPayload) + fc.Result = res + return ec.marshalOUpdateWidgetAlignSystemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetAlignSystemPayload(ctx, field.Selections, res) +} + func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -18818,6 +19181,38 @@ func (ec *executionContext) _PluginExtension_icon(ctx context.Context, field gra return ec.marshalNString2string(ctx, field.Selections, res) } +func (ec *executionContext) _PluginExtension_widgetLayout(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.WidgetLayout, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetLayout) + fc.Result = res + return ec.marshalOWidgetLayout2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLayout(ctx, field.Selections, res) +} + func (ec *executionContext) _PluginExtension_visualizer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -24779,6 +25174,38 @@ func (ec *executionContext) _Scene_plugins(ctx context.Context, field graphql.Co return ec.marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePluginแš„(ctx, field.Selections, res) } +func (ec *executionContext) _Scene_widgetAlignSystem(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.WidgetAlignSystem, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetAlignSystem) + fc.Result = res + return ec.marshalOWidgetAlignSystem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAlignSystem(ctx, field.Selections, res) +} + func (ec *executionContext) _Scene_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -25325,6 +25752,41 @@ func (ec *executionContext) _SceneWidget_enabled(ctx context.Context, field grap return ec.marshalNBoolean2bool(ctx, field.Selections, res) } +func (ec *executionContext) _SceneWidget_extended(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Extended, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + func (ec *executionContext) _SceneWidget_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -26560,6 +27022,41 @@ func (ec *executionContext) _UpdateTeamPayload_team(ctx context.Context, field g return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) _UpdateWidgetAlignSystemPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateWidgetAlignSystemPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpdateWidgetAlignSystemPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + func (ec *executionContext) _UpdateWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateWidgetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -27120,7 +27617,7 @@ func (ec *executionContext) _User_myTeam(ctx context.Context, field graphql.Coll return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } -func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetAlignSystem_inner(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetAlignSystem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27128,7 +27625,7 @@ func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql } }() fc := &graphql.FieldContext{ - Object: "__Directive", + Object: "WidgetAlignSystem", Field: field, Args: nil, IsMethod: false, @@ -27138,7 +27635,71 @@ func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Name, nil + return obj.Inner, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetZone) + fc.Result = res + return ec.marshalOWidgetZone2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZone(ctx, field.Selections, res) +} + +func (ec *executionContext) _WidgetAlignSystem_outer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetAlignSystem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "WidgetAlignSystem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Outer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetZone) + fc.Result = res + return ec.marshalOWidgetZone2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZone(ctx, field.Selections, res) +} + +func (ec *executionContext) _WidgetArea_widgetIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetArea) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "WidgetArea", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.WidgetIds, nil }) if err != nil { ec.Error(ctx, err) @@ -27150,12 +27711,12 @@ func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql } return graphql.Null } - res := resTmp.(string) + res := resTmp.([]*id.ID) fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) + return ec.marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) } -func (ec *executionContext) ___Directive_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetArea_align(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetArea) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27163,7 +27724,7 @@ func (ec *executionContext) ___Directive_description(ctx context.Context, field } }() fc := &graphql.FieldContext{ - Object: "__Directive", + Object: "WidgetArea", Field: field, Args: nil, IsMethod: false, @@ -27173,21 +27734,24 @@ func (ec *executionContext) ___Directive_description(ctx context.Context, field ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Description, nil + return obj.Align, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } return graphql.Null } - res := resTmp.(string) + res := resTmp.(gqlmodel.WidgetAreaAlign) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalNWidgetAreaAlign2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx, field.Selections, res) } -func (ec *executionContext) ___Directive_locations(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetExtendable_vertically(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetExtendable) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27195,7 +27759,7 @@ func (ec *executionContext) ___Directive_locations(ctx context.Context, field gr } }() fc := &graphql.FieldContext{ - Object: "__Directive", + Object: "WidgetExtendable", Field: field, Args: nil, IsMethod: false, @@ -27205,7 +27769,7 @@ func (ec *executionContext) ___Directive_locations(ctx context.Context, field gr ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Locations, nil + return obj.Vertically, nil }) if err != nil { ec.Error(ctx, err) @@ -27217,12 +27781,12 @@ func (ec *executionContext) ___Directive_locations(ctx context.Context, field gr } return graphql.Null } - res := resTmp.([]string) + res := resTmp.(bool) fc.Result = res - return ec.marshalN__DirectiveLocation2แš•stringแš„(ctx, field.Selections, res) + return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetExtendable_horizontally(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetExtendable) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27230,7 +27794,7 @@ func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql } }() fc := &graphql.FieldContext{ - Object: "__Directive", + Object: "WidgetExtendable", Field: field, Args: nil, IsMethod: false, @@ -27240,7 +27804,7 @@ func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Args, nil + return obj.Horizontally, nil }) if err != nil { ec.Error(ctx, err) @@ -27252,12 +27816,12 @@ func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql } return graphql.Null } - res := resTmp.([]introspection.InputValue) + res := resTmp.(bool) fc.Result = res - return ec.marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) + return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetLayout_extendable(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27265,7 +27829,7 @@ func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql } }() fc := &graphql.FieldContext{ - Object: "__EnumValue", + Object: "WidgetLayout", Field: field, Args: nil, IsMethod: false, @@ -27275,7 +27839,7 @@ func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Name, nil + return obj.Extendable, nil }) if err != nil { ec.Error(ctx, err) @@ -27287,12 +27851,12 @@ func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql } return graphql.Null } - res := resTmp.(string) + res := resTmp.(*gqlmodel.WidgetExtendable) fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) + return ec.marshalNWidgetExtendable2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetExtendable(ctx, field.Selections, res) } -func (ec *executionContext) ___EnumValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetLayout_extended(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27300,7 +27864,7 @@ func (ec *executionContext) ___EnumValue_description(ctx context.Context, field } }() fc := &graphql.FieldContext{ - Object: "__EnumValue", + Object: "WidgetLayout", Field: field, Args: nil, IsMethod: false, @@ -27310,21 +27874,24 @@ func (ec *executionContext) ___EnumValue_description(ctx context.Context, field ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Description, nil + return obj.Extended, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } return graphql.Null } - res := resTmp.(string) + res := resTmp.(bool) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetLayout_floating(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27332,17 +27899,17 @@ func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field } }() fc := &graphql.FieldContext{ - Object: "__EnumValue", + Object: "WidgetLayout", Field: field, Args: nil, - IsMethod: true, + IsMethod: false, IsResolver: false, } ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.IsDeprecated(), nil + return obj.Floating, nil }) if err != nil { ec.Error(ctx, err) @@ -27359,7 +27926,7 @@ func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetLayout_defaultLocation(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27367,17 +27934,17 @@ func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, } }() fc := &graphql.FieldContext{ - Object: "__EnumValue", + Object: "WidgetLayout", Field: field, Args: nil, - IsMethod: true, + IsMethod: false, IsResolver: false, } ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.DeprecationReason(), nil + return obj.DefaultLocation, nil }) if err != nil { ec.Error(ctx, err) @@ -27386,12 +27953,12 @@ func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*string) + res := resTmp.(*gqlmodel.WidgetLocation) fc.Result = res - return ec.marshalOString2แš–string(ctx, field.Selections, res) + return ec.marshalOWidgetLocation2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocation(ctx, field.Selections, res) } -func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetLocation_zone(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLocation) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27399,7 +27966,7 @@ func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.Col } }() fc := &graphql.FieldContext{ - Object: "__Field", + Object: "WidgetLocation", Field: field, Args: nil, IsMethod: false, @@ -27409,7 +27976,7 @@ func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.Col ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Name, nil + return obj.Zone, nil }) if err != nil { ec.Error(ctx, err) @@ -27421,12 +27988,12 @@ func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.Col } return graphql.Null } - res := resTmp.(string) + res := resTmp.(gqlmodel.WidgetZoneType) fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) + return ec.marshalNWidgetZoneType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZoneType(ctx, field.Selections, res) } -func (ec *executionContext) ___Field_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetLocation_section(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLocation) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27434,7 +28001,7 @@ func (ec *executionContext) ___Field_description(ctx context.Context, field grap } }() fc := &graphql.FieldContext{ - Object: "__Field", + Object: "WidgetLocation", Field: field, Args: nil, IsMethod: false, @@ -27444,21 +28011,24 @@ func (ec *executionContext) ___Field_description(ctx context.Context, field grap ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Description, nil + return obj.Section, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } return graphql.Null } - res := resTmp.(string) + res := resTmp.(gqlmodel.WidgetSectionType) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalNWidgetSectionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSectionType(ctx, field.Selections, res) } -func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetLocation_area(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLocation) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27466,7 +28036,7 @@ func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.Col } }() fc := &graphql.FieldContext{ - Object: "__Field", + Object: "WidgetLocation", Field: field, Args: nil, IsMethod: false, @@ -27476,7 +28046,7 @@ func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.Col ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Args, nil + return obj.Area, nil }) if err != nil { ec.Error(ctx, err) @@ -27488,12 +28058,12 @@ func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.Col } return graphql.Null } - res := resTmp.([]introspection.InputValue) + res := resTmp.(gqlmodel.WidgetAreaType) fc.Result = res - return ec.marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) + return ec.marshalNWidgetAreaType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaType(ctx, field.Selections, res) } -func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetSection_top(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetSection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27501,7 +28071,7 @@ func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.Col } }() fc := &graphql.FieldContext{ - Object: "__Field", + Object: "WidgetSection", Field: field, Args: nil, IsMethod: false, @@ -27511,24 +28081,53 @@ func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.Col ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Type, nil + return obj.Top, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetArea) + fc.Result = res + return ec.marshalOWidgetArea2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetArea(ctx, field.Selections, res) +} + +func (ec *executionContext) _WidgetSection_middle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetSection) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null } + }() + fc := &graphql.FieldContext{ + Object: "WidgetSection", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Middle, nil + }) + if err != nil { + ec.Error(ctx, err) return graphql.Null } - res := resTmp.(*introspection.Type) + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetArea) fc.Result = res - return ec.marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) + return ec.marshalOWidgetArea2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetArea(ctx, field.Selections, res) } -func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetSection_bottom(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetSection) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27536,34 +28135,63 @@ func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field gra } }() fc := &graphql.FieldContext{ - Object: "__Field", + Object: "WidgetSection", Field: field, Args: nil, - IsMethod: true, + IsMethod: false, IsResolver: false, } ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.IsDeprecated(), nil + return obj.Bottom, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetArea) + fc.Result = res + return ec.marshalOWidgetArea2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetArea(ctx, field.Selections, res) +} + +func (ec *executionContext) _WidgetZone_left(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetZone) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null } + }() + fc := &graphql.FieldContext{ + Object: "WidgetZone", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Left, nil + }) + if err != nil { + ec.Error(ctx, err) return graphql.Null } - res := resTmp.(bool) + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetSection) fc.Result = res - return ec.marshalNBoolean2bool(ctx, field.Selections, res) + return ec.marshalOWidgetSection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSection(ctx, field.Selections, res) } -func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetZone_center(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetZone) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27571,17 +28199,17 @@ func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, fiel } }() fc := &graphql.FieldContext{ - Object: "__Field", + Object: "WidgetZone", Field: field, Args: nil, - IsMethod: true, + IsMethod: false, IsResolver: false, } ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.DeprecationReason(), nil + return obj.Center, nil }) if err != nil { ec.Error(ctx, err) @@ -27590,12 +28218,12 @@ func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(*string) + res := resTmp.(*gqlmodel.WidgetSection) fc.Result = res - return ec.marshalOString2แš–string(ctx, field.Selections, res) + return ec.marshalOWidgetSection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSection(ctx, field.Selections, res) } -func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { +func (ec *executionContext) _WidgetZone_right(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetZone) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27603,7 +28231,39 @@ func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphq } }() fc := &graphql.FieldContext{ - Object: "__InputValue", + Object: "WidgetZone", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Right, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.WidgetSection) + fc.Result = res + return ec.marshalOWidgetSection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSection(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Directive", Field: field, Args: nil, IsMethod: false, @@ -27630,7 +28290,7 @@ func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphq return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) ___InputValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { +func (ec *executionContext) ___Directive_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -27638,7 +28298,517 @@ func (ec *executionContext) ___InputValue_description(ctx context.Context, field } }() fc := &graphql.FieldContext{ - Object: "__InputValue", + Object: "__Directive", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Directive_locations(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Directive", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Locations, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]string) + fc.Result = res + return ec.marshalN__DirectiveLocation2แš•stringแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Directive", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Args, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Directive_isRepeatable(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Directive", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsRepeatable, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___EnumValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsDeprecated(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DeprecationReason(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Field", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Field_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Field", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalOString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Field", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Args, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Field", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Field", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsDeprecated(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Field", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DeprecationReason(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) ___InputValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__InputValue", Field: field, Args: nil, IsMethod: false, @@ -28209,7 +29379,10 @@ func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.Co func (ec *executionContext) unmarshalInputAddDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.AddDatasetSchemaInput, error) { var it gqlmodel.AddDatasetSchemaInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28245,7 +29418,10 @@ func (ec *executionContext) unmarshalInputAddDatasetSchemaInput(ctx context.Cont func (ec *executionContext) unmarshalInputAddDynamicDatasetInput(ctx context.Context, obj interface{}) (gqlmodel.AddDynamicDatasetInput, error) { var it gqlmodel.AddDynamicDatasetInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28305,7 +29481,10 @@ func (ec *executionContext) unmarshalInputAddDynamicDatasetInput(ctx context.Con func (ec *executionContext) unmarshalInputAddDynamicDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.AddDynamicDatasetSchemaInput, error) { var it gqlmodel.AddDynamicDatasetSchemaInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28325,7 +29504,10 @@ func (ec *executionContext) unmarshalInputAddDynamicDatasetSchemaInput(ctx conte func (ec *executionContext) unmarshalInputAddInfoboxFieldInput(ctx context.Context, obj interface{}) (gqlmodel.AddInfoboxFieldInput, error) { var it gqlmodel.AddInfoboxFieldInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28369,7 +29551,10 @@ func (ec *executionContext) unmarshalInputAddInfoboxFieldInput(ctx context.Conte func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context, obj interface{}) (gqlmodel.AddLayerGroupInput, error) { var it gqlmodel.AddLayerGroupInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28437,7 +29622,10 @@ func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context func (ec *executionContext) unmarshalInputAddLayerItemInput(ctx context.Context, obj interface{}) (gqlmodel.AddLayerItemInput, error) { var it gqlmodel.AddLayerItemInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28505,7 +29693,10 @@ func (ec *executionContext) unmarshalInputAddLayerItemInput(ctx context.Context, func (ec *executionContext) unmarshalInputAddMemberToTeamInput(ctx context.Context, obj interface{}) (gqlmodel.AddMemberToTeamInput, error) { var it gqlmodel.AddMemberToTeamInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28541,7 +29732,10 @@ func (ec *executionContext) unmarshalInputAddMemberToTeamInput(ctx context.Conte func (ec *executionContext) unmarshalInputAddPropertyItemInput(ctx context.Context, obj interface{}) (gqlmodel.AddPropertyItemInput, error) { var it gqlmodel.AddPropertyItemInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28593,7 +29787,10 @@ func (ec *executionContext) unmarshalInputAddPropertyItemInput(ctx context.Conte func (ec *executionContext) unmarshalInputAddWidgetInput(ctx context.Context, obj interface{}) (gqlmodel.AddWidgetInput, error) { var it gqlmodel.AddWidgetInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28629,7 +29826,10 @@ func (ec *executionContext) unmarshalInputAddWidgetInput(ctx context.Context, ob func (ec *executionContext) unmarshalInputCreateAssetInput(ctx context.Context, obj interface{}) (gqlmodel.CreateAssetInput, error) { var it gqlmodel.CreateAssetInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28657,7 +29857,10 @@ func (ec *executionContext) unmarshalInputCreateAssetInput(ctx context.Context, func (ec *executionContext) unmarshalInputCreateInfoboxInput(ctx context.Context, obj interface{}) (gqlmodel.CreateInfoboxInput, error) { var it gqlmodel.CreateInfoboxInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28677,7 +29880,10 @@ func (ec *executionContext) unmarshalInputCreateInfoboxInput(ctx context.Context func (ec *executionContext) unmarshalInputCreateProjectInput(ctx context.Context, obj interface{}) (gqlmodel.CreateProjectInput, error) { var it gqlmodel.CreateProjectInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28745,7 +29951,10 @@ func (ec *executionContext) unmarshalInputCreateProjectInput(ctx context.Context func (ec *executionContext) unmarshalInputCreateSceneInput(ctx context.Context, obj interface{}) (gqlmodel.CreateSceneInput, error) { var it gqlmodel.CreateSceneInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28765,7 +29974,10 @@ func (ec *executionContext) unmarshalInputCreateSceneInput(ctx context.Context, func (ec *executionContext) unmarshalInputCreateTeamInput(ctx context.Context, obj interface{}) (gqlmodel.CreateTeamInput, error) { var it gqlmodel.CreateTeamInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28785,7 +29997,10 @@ func (ec *executionContext) unmarshalInputCreateTeamInput(ctx context.Context, o func (ec *executionContext) unmarshalInputDeleteMeInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteMeInput, error) { var it gqlmodel.DeleteMeInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28805,7 +30020,10 @@ func (ec *executionContext) unmarshalInputDeleteMeInput(ctx context.Context, obj func (ec *executionContext) unmarshalInputDeleteProjectInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteProjectInput, error) { var it gqlmodel.DeleteProjectInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28825,7 +30043,10 @@ func (ec *executionContext) unmarshalInputDeleteProjectInput(ctx context.Context func (ec *executionContext) unmarshalInputDeleteTeamInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteTeamInput, error) { var it gqlmodel.DeleteTeamInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28845,7 +30066,10 @@ func (ec *executionContext) unmarshalInputDeleteTeamInput(ctx context.Context, o func (ec *executionContext) unmarshalInputImportDatasetFromGoogleSheetInput(ctx context.Context, obj interface{}) (gqlmodel.ImportDatasetFromGoogleSheetInput, error) { var it gqlmodel.ImportDatasetFromGoogleSheetInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28897,7 +30121,10 @@ func (ec *executionContext) unmarshalInputImportDatasetFromGoogleSheetInput(ctx func (ec *executionContext) unmarshalInputImportDatasetInput(ctx context.Context, obj interface{}) (gqlmodel.ImportDatasetInput, error) { var it gqlmodel.ImportDatasetInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28933,7 +30160,10 @@ func (ec *executionContext) unmarshalInputImportDatasetInput(ctx context.Context func (ec *executionContext) unmarshalInputImportLayerInput(ctx context.Context, obj interface{}) (gqlmodel.ImportLayerInput, error) { var it gqlmodel.ImportLayerInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28969,7 +30199,10 @@ func (ec *executionContext) unmarshalInputImportLayerInput(ctx context.Context, func (ec *executionContext) unmarshalInputInstallPluginInput(ctx context.Context, obj interface{}) (gqlmodel.InstallPluginInput, error) { var it gqlmodel.InstallPluginInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -28997,7 +30230,10 @@ func (ec *executionContext) unmarshalInputInstallPluginInput(ctx context.Context func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx context.Context, obj interface{}) (gqlmodel.LinkDatasetToPropertyValueInput, error) { var it gqlmodel.LinkDatasetToPropertyValueInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29065,7 +30301,10 @@ func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx co func (ec *executionContext) unmarshalInputMoveInfoboxFieldInput(ctx context.Context, obj interface{}) (gqlmodel.MoveInfoboxFieldInput, error) { var it gqlmodel.MoveInfoboxFieldInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29101,7 +30340,10 @@ func (ec *executionContext) unmarshalInputMoveInfoboxFieldInput(ctx context.Cont func (ec *executionContext) unmarshalInputMoveLayerInput(ctx context.Context, obj interface{}) (gqlmodel.MoveLayerInput, error) { var it gqlmodel.MoveLayerInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29137,7 +30379,10 @@ func (ec *executionContext) unmarshalInputMoveLayerInput(ctx context.Context, ob func (ec *executionContext) unmarshalInputMovePropertyItemInput(ctx context.Context, obj interface{}) (gqlmodel.MovePropertyItemInput, error) { var it gqlmodel.MovePropertyItemInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29181,7 +30426,10 @@ func (ec *executionContext) unmarshalInputMovePropertyItemInput(ctx context.Cont func (ec *executionContext) unmarshalInputPublishProjectInput(ctx context.Context, obj interface{}) (gqlmodel.PublishProjectInput, error) { var it gqlmodel.PublishProjectInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29217,7 +30465,10 @@ func (ec *executionContext) unmarshalInputPublishProjectInput(ctx context.Contex func (ec *executionContext) unmarshalInputRemoveAssetInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveAssetInput, error) { var it gqlmodel.RemoveAssetInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29237,7 +30488,10 @@ func (ec *executionContext) unmarshalInputRemoveAssetInput(ctx context.Context, func (ec *executionContext) unmarshalInputRemoveDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveDatasetSchemaInput, error) { var it gqlmodel.RemoveDatasetSchemaInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29265,7 +30519,10 @@ func (ec *executionContext) unmarshalInputRemoveDatasetSchemaInput(ctx context.C func (ec *executionContext) unmarshalInputRemoveInfoboxFieldInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveInfoboxFieldInput, error) { var it gqlmodel.RemoveInfoboxFieldInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29293,7 +30550,10 @@ func (ec *executionContext) unmarshalInputRemoveInfoboxFieldInput(ctx context.Co func (ec *executionContext) unmarshalInputRemoveInfoboxInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveInfoboxInput, error) { var it gqlmodel.RemoveInfoboxInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29313,7 +30573,10 @@ func (ec *executionContext) unmarshalInputRemoveInfoboxInput(ctx context.Context func (ec *executionContext) unmarshalInputRemoveLayerInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveLayerInput, error) { var it gqlmodel.RemoveLayerInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29333,7 +30596,10 @@ func (ec *executionContext) unmarshalInputRemoveLayerInput(ctx context.Context, func (ec *executionContext) unmarshalInputRemoveMemberFromTeamInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveMemberFromTeamInput, error) { var it gqlmodel.RemoveMemberFromTeamInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29361,7 +30627,10 @@ func (ec *executionContext) unmarshalInputRemoveMemberFromTeamInput(ctx context. func (ec *executionContext) unmarshalInputRemoveMyAuthInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveMyAuthInput, error) { var it gqlmodel.RemoveMyAuthInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29381,7 +30650,10 @@ func (ec *executionContext) unmarshalInputRemoveMyAuthInput(ctx context.Context, func (ec *executionContext) unmarshalInputRemovePropertyFieldInput(ctx context.Context, obj interface{}) (gqlmodel.RemovePropertyFieldInput, error) { var it gqlmodel.RemovePropertyFieldInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29425,7 +30697,10 @@ func (ec *executionContext) unmarshalInputRemovePropertyFieldInput(ctx context.C func (ec *executionContext) unmarshalInputRemovePropertyItemInput(ctx context.Context, obj interface{}) (gqlmodel.RemovePropertyItemInput, error) { var it gqlmodel.RemovePropertyItemInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29461,7 +30736,10 @@ func (ec *executionContext) unmarshalInputRemovePropertyItemInput(ctx context.Co func (ec *executionContext) unmarshalInputRemoveWidgetInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveWidgetInput, error) { var it gqlmodel.RemoveWidgetInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29489,7 +30767,10 @@ func (ec *executionContext) unmarshalInputRemoveWidgetInput(ctx context.Context, func (ec *executionContext) unmarshalInputSignupInput(ctx context.Context, obj interface{}) (gqlmodel.SignupInput, error) { var it gqlmodel.SignupInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29541,7 +30822,10 @@ func (ec *executionContext) unmarshalInputSignupInput(ctx context.Context, obj i func (ec *executionContext) unmarshalInputSyncDatasetInput(ctx context.Context, obj interface{}) (gqlmodel.SyncDatasetInput, error) { var it gqlmodel.SyncDatasetInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29569,7 +30853,10 @@ func (ec *executionContext) unmarshalInputSyncDatasetInput(ctx context.Context, func (ec *executionContext) unmarshalInputUninstallPluginInput(ctx context.Context, obj interface{}) (gqlmodel.UninstallPluginInput, error) { var it gqlmodel.UninstallPluginInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29597,7 +30884,10 @@ func (ec *executionContext) unmarshalInputUninstallPluginInput(ctx context.Conte func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.Context, obj interface{}) (gqlmodel.UnlinkPropertyValueInput, error) { var it gqlmodel.UnlinkPropertyValueInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29641,7 +30931,10 @@ func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.C func (ec *executionContext) unmarshalInputUpdateDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateDatasetSchemaInput, error) { var it gqlmodel.UpdateDatasetSchemaInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29669,7 +30962,10 @@ func (ec *executionContext) unmarshalInputUpdateDatasetSchemaInput(ctx context.C func (ec *executionContext) unmarshalInputUpdateLayerInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateLayerInput, error) { var it gqlmodel.UpdateLayerInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29705,7 +31001,10 @@ func (ec *executionContext) unmarshalInputUpdateLayerInput(ctx context.Context, func (ec *executionContext) unmarshalInputUpdateMeInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateMeInput, error) { var it gqlmodel.UpdateMeInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29765,7 +31064,10 @@ func (ec *executionContext) unmarshalInputUpdateMeInput(ctx context.Context, obj func (ec *executionContext) unmarshalInputUpdateMemberOfTeamInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateMemberOfTeamInput, error) { var it gqlmodel.UpdateMemberOfTeamInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29801,7 +31103,10 @@ func (ec *executionContext) unmarshalInputUpdateMemberOfTeamInput(ctx context.Co func (ec *executionContext) unmarshalInputUpdateProjectInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateProjectInput, error) { var it gqlmodel.UpdateProjectInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29933,7 +31238,10 @@ func (ec *executionContext) unmarshalInputUpdateProjectInput(ctx context.Context func (ec *executionContext) unmarshalInputUpdatePropertyItemInput(ctx context.Context, obj interface{}) (gqlmodel.UpdatePropertyItemInput, error) { var it gqlmodel.UpdatePropertyItemInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -29969,7 +31277,10 @@ func (ec *executionContext) unmarshalInputUpdatePropertyItemInput(ctx context.Co func (ec *executionContext) unmarshalInputUpdatePropertyItemOperationInput(ctx context.Context, obj interface{}) (gqlmodel.UpdatePropertyItemOperationInput, error) { var it gqlmodel.UpdatePropertyItemOperationInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -30021,7 +31332,10 @@ func (ec *executionContext) unmarshalInputUpdatePropertyItemOperationInput(ctx c func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.Context, obj interface{}) (gqlmodel.UpdatePropertyValueInput, error) { var it gqlmodel.UpdatePropertyValueInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -30081,7 +31395,10 @@ func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.C func (ec *executionContext) unmarshalInputUpdateTeamInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateTeamInput, error) { var it gqlmodel.UpdateTeamInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -30107,9 +31424,51 @@ func (ec *executionContext) unmarshalInputUpdateTeamInput(ctx context.Context, o return it, nil } +func (ec *executionContext) unmarshalInputUpdateWidgetAlignSystemInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateWidgetAlignSystemInput, error) { + var it gqlmodel.UpdateWidgetAlignSystemInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "location": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("location")) + it.Location, err = ec.unmarshalNWidgetLocationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocationInput(ctx, v) + if err != nil { + return it, err + } + case "align": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("align")) + it.Align, err = ec.unmarshalOWidgetAreaAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + func (ec *executionContext) unmarshalInputUpdateWidgetInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateWidgetInput, error) { var it gqlmodel.UpdateWidgetInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -30137,6 +31496,30 @@ func (ec *executionContext) unmarshalInputUpdateWidgetInput(ctx context.Context, if err != nil { return it, err } + case "location": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("location")) + it.Location, err = ec.unmarshalOWidgetLocationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocationInput(ctx, v) + if err != nil { + return it, err + } + case "extended": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extended")) + it.Extended, err = ec.unmarshalOBoolean2แš–bool(ctx, v) + if err != nil { + return it, err + } + case "index": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("index")) + it.Index, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } } } @@ -30145,7 +31528,10 @@ func (ec *executionContext) unmarshalInputUpdateWidgetInput(ctx context.Context, func (ec *executionContext) unmarshalInputUpgradePluginInput(ctx context.Context, obj interface{}) (gqlmodel.UpgradePluginInput, error) { var it gqlmodel.UpgradePluginInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -30181,7 +31567,10 @@ func (ec *executionContext) unmarshalInputUpgradePluginInput(ctx context.Context func (ec *executionContext) unmarshalInputUploadFileToPropertyInput(ctx context.Context, obj interface{}) (gqlmodel.UploadFileToPropertyInput, error) { var it gqlmodel.UploadFileToPropertyInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -30233,7 +31622,10 @@ func (ec *executionContext) unmarshalInputUploadFileToPropertyInput(ctx context. func (ec *executionContext) unmarshalInputUploadPluginInput(ctx context.Context, obj interface{}) (gqlmodel.UploadPluginInput, error) { var it gqlmodel.UploadPluginInput - var asMap = obj.(map[string]interface{}) + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } for k, v := range asMap { switch k { @@ -30267,6 +31659,45 @@ func (ec *executionContext) unmarshalInputUploadPluginInput(ctx context.Context, return it, nil } +func (ec *executionContext) unmarshalInputWidgetLocationInput(ctx context.Context, obj interface{}) (gqlmodel.WidgetLocationInput, error) { + var it gqlmodel.WidgetLocationInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "zone": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("zone")) + it.Zone, err = ec.unmarshalNWidgetZoneType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZoneType(ctx, v) + if err != nil { + return it, err + } + case "section": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("section")) + it.Section, err = ec.unmarshalNWidgetSectionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSectionType(ctx, v) + if err != nil { + return it, err + } + case "area": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("area")) + it.Area, err = ec.unmarshalNWidgetAreaType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaType(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + // endregion **************************** input.gotpl ***************************** // region ************************** interface.gotpl *************************** @@ -32811,6 +34242,8 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) out.Values[i] = ec._Mutation_addWidget(ctx, field) case "updateWidget": out.Values[i] = ec._Mutation_updateWidget(ctx, field) + case "updateWidgetAlignSystem": + out.Values[i] = ec._Mutation_updateWidgetAlignSystem(ctx, field) case "removeWidget": out.Values[i] = ec._Mutation_removeWidget(ctx, field) case "installPlugin": @@ -33064,6 +34497,8 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "widgetLayout": + out.Values[i] = ec._PluginExtension_widgetLayout(ctx, field, obj) case "visualizer": out.Values[i] = ec._PluginExtension_visualizer(ctx, field, obj) if out.Values[i] == graphql.Null { @@ -34801,6 +36236,8 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "widgetAlignSystem": + out.Values[i] = ec._Scene_widgetAlignSystem(ctx, field, obj) case "dynamicDatasetSchemas": out.Values[i] = ec._Scene_dynamicDatasetSchemas(ctx, field, obj) if out.Values[i] == graphql.Null { @@ -34976,6 +36413,11 @@ func (ec *executionContext) _SceneWidget(ctx context.Context, sel ast.SelectionS if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "extended": + out.Values[i] = ec._SceneWidget_extended(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } case "plugin": field := field out.Concurrently(i, func() (res graphql.Marshaler) { @@ -35446,6 +36888,33 @@ func (ec *executionContext) _UpdateTeamPayload(ctx context.Context, sel ast.Sele return out } +var updateWidgetAlignSystemPayloadImplementors = []string{"UpdateWidgetAlignSystemPayload"} + +func (ec *executionContext) _UpdateWidgetAlignSystemPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateWidgetAlignSystemPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateWidgetAlignSystemPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateWidgetAlignSystemPayload") + case "scene": + out.Values[i] = ec._UpdateWidgetAlignSystemPayload_scene(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var updateWidgetPayloadImplementors = []string{"UpdateWidgetPayload"} func (ec *executionContext) _UpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateWidgetPayload) graphql.Marshaler { @@ -35632,6 +37101,228 @@ func (ec *executionContext) _User(ctx context.Context, sel ast.SelectionSet, obj return out } +var widgetAlignSystemImplementors = []string{"WidgetAlignSystem"} + +func (ec *executionContext) _WidgetAlignSystem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetAlignSystem) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetAlignSystemImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetAlignSystem") + case "inner": + out.Values[i] = ec._WidgetAlignSystem_inner(ctx, field, obj) + case "outer": + out.Values[i] = ec._WidgetAlignSystem_outer(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetAreaImplementors = []string{"WidgetArea"} + +func (ec *executionContext) _WidgetArea(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetArea) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetAreaImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetArea") + case "widgetIds": + out.Values[i] = ec._WidgetArea_widgetIds(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "align": + out.Values[i] = ec._WidgetArea_align(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetExtendableImplementors = []string{"WidgetExtendable"} + +func (ec *executionContext) _WidgetExtendable(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetExtendable) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetExtendableImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetExtendable") + case "vertically": + out.Values[i] = ec._WidgetExtendable_vertically(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "horizontally": + out.Values[i] = ec._WidgetExtendable_horizontally(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetLayoutImplementors = []string{"WidgetLayout"} + +func (ec *executionContext) _WidgetLayout(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetLayout) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetLayoutImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetLayout") + case "extendable": + out.Values[i] = ec._WidgetLayout_extendable(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "extended": + out.Values[i] = ec._WidgetLayout_extended(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "floating": + out.Values[i] = ec._WidgetLayout_floating(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "defaultLocation": + out.Values[i] = ec._WidgetLayout_defaultLocation(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetLocationImplementors = []string{"WidgetLocation"} + +func (ec *executionContext) _WidgetLocation(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetLocation) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetLocationImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetLocation") + case "zone": + out.Values[i] = ec._WidgetLocation_zone(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "section": + out.Values[i] = ec._WidgetLocation_section(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "area": + out.Values[i] = ec._WidgetLocation_area(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetSectionImplementors = []string{"WidgetSection"} + +func (ec *executionContext) _WidgetSection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetSection) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetSectionImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetSection") + case "top": + out.Values[i] = ec._WidgetSection_top(ctx, field, obj) + case "middle": + out.Values[i] = ec._WidgetSection_middle(ctx, field, obj) + case "bottom": + out.Values[i] = ec._WidgetSection_bottom(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var widgetZoneImplementors = []string{"WidgetZone"} + +func (ec *executionContext) _WidgetZone(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetZone) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, widgetZoneImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("WidgetZone") + case "left": + out.Values[i] = ec._WidgetZone_left(ctx, field, obj) + case "center": + out.Values[i] = ec._WidgetZone_center(ctx, field, obj) + case "right": + out.Values[i] = ec._WidgetZone_right(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var __DirectiveImplementors = []string{"__Directive"} func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionSet, obj *introspection.Directive) graphql.Marshaler { @@ -35660,6 +37351,11 @@ func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionS if out.Values[i] == graphql.Null { invalids++ } + case "isRepeatable": + out.Values[i] = ec.___Directive_isRepeatable(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -35956,6 +37652,7 @@ func (ec *executionContext) marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearth } wg.Wait() + return ret } @@ -36017,6 +37714,13 @@ func (ec *executionContext) marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹ree } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -36119,6 +37823,7 @@ func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reear } wg.Wait() + return ret } @@ -36156,6 +37861,13 @@ func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reear } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -36217,6 +37929,13 @@ func (ec *executionContext) marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹r } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -36264,6 +37983,13 @@ func (ec *executionContext) marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹ } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -36311,6 +38037,7 @@ func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearth } wg.Wait() + return ret } @@ -36348,6 +38075,13 @@ func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearth } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -36409,6 +38143,13 @@ func (ec *executionContext) marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reear } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -36456,6 +38197,13 @@ func (ec *executionContext) marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reea } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -36571,6 +38319,12 @@ func (ec *executionContext) marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘ ret[i] = ec.marshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, sel, v[i]) } + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -36658,6 +38412,13 @@ func (ec *executionContext) marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹ } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -36750,6 +38511,7 @@ func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘ } wg.Wait() + return ret } @@ -36787,6 +38549,13 @@ func (ec *executionContext) marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘ } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -36869,6 +38638,13 @@ func (ec *executionContext) marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reea } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -36916,6 +38692,13 @@ func (ec *executionContext) marshalNMergedPropertyField2แš•แš–githubแš—comแš‹ree } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -36963,6 +38746,13 @@ func (ec *executionContext) marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹ree } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37025,6 +38815,7 @@ func (ec *executionContext) marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘b } wg.Wait() + return ret } @@ -37082,6 +38873,13 @@ func (ec *executionContext) marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reeart } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37129,6 +38927,13 @@ func (ec *executionContext) marshalNPluginExtension2แš•แš–githubแš—comแš‹reearth } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37209,6 +39014,12 @@ func (ec *executionContext) marshalNPluginID2แš•แš–githubแš—comแš‹reearthแš‹reea ret[i] = ec.marshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, sel, v[i]) } + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37267,6 +39078,13 @@ func (ec *executionContext) marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearth } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37314,6 +39132,7 @@ func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reear } wg.Wait() + return ret } @@ -37389,6 +39208,13 @@ func (ec *executionContext) marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹r } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37446,6 +39272,13 @@ func (ec *executionContext) marshalNPropertyField2แš•แš–githubแš—comแš‹reearth } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37503,6 +39336,13 @@ func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearth } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37560,6 +39400,13 @@ func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹ree } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37607,6 +39454,13 @@ func (ec *executionContext) marshalNPropertySchema2แš•แš–githubแš—comแš‹reearth } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37654,6 +39508,13 @@ func (ec *executionContext) marshalNPropertySchemaField2แš•แš–githubแš—comแš‹ree } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37726,6 +39587,13 @@ func (ec *executionContext) marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹ree } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37781,6 +39649,12 @@ func (ec *executionContext) marshalNPropertySchemaID2แš•แš–githubแš—comแš‹reeart ret[i] = ec.marshalNPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, sel, v[i]) } + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37934,6 +39808,13 @@ func (ec *executionContext) marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹r } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -37981,6 +39862,13 @@ func (ec *executionContext) marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹r } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -38041,6 +39929,12 @@ func (ec *executionContext) marshalNString2แš•stringแš„(ctx context.Context, sel ret[i] = ec.marshalNString2string(ctx, sel, v[i]) } + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -38087,6 +39981,13 @@ func (ec *executionContext) marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearth } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -38134,6 +40035,13 @@ func (ec *executionContext) marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹re } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -38233,6 +40141,11 @@ func (ec *executionContext) unmarshalNUpdateTeamInput2githubแš—comแš‹reearthแš‹r return res, graphql.ErrorOnPath(ctx, err) } +func (ec *executionContext) unmarshalNUpdateWidgetAlignSystemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetAlignSystemInput(ctx context.Context, v interface{}) (gqlmodel.UpdateWidgetAlignSystemInput, error) { + res, err := ec.unmarshalInputUpdateWidgetAlignSystemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + func (ec *executionContext) unmarshalNUpdateWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetInput(ctx context.Context, v interface{}) (gqlmodel.UpdateWidgetInput, error) { res, err := ec.unmarshalInputUpdateWidgetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -38298,6 +40211,61 @@ func (ec *executionContext) marshalNVisualizer2githubแš—comแš‹reearthแš‹reearth return v } +func (ec *executionContext) unmarshalNWidgetAreaAlign2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx context.Context, v interface{}) (gqlmodel.WidgetAreaAlign, error) { + var res gqlmodel.WidgetAreaAlign + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNWidgetAreaAlign2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx context.Context, sel ast.SelectionSet, v gqlmodel.WidgetAreaAlign) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalNWidgetAreaType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaType(ctx context.Context, v interface{}) (gqlmodel.WidgetAreaType, error) { + var res gqlmodel.WidgetAreaType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNWidgetAreaType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.WidgetAreaType) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNWidgetExtendable2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetExtendable(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetExtendable) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._WidgetExtendable(ctx, sel, v) +} + +func (ec *executionContext) unmarshalNWidgetLocationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocationInput(ctx context.Context, v interface{}) (*gqlmodel.WidgetLocationInput, error) { + res, err := ec.unmarshalInputWidgetLocationInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNWidgetSectionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSectionType(ctx context.Context, v interface{}) (gqlmodel.WidgetSectionType, error) { + var res gqlmodel.WidgetSectionType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNWidgetSectionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSectionType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.WidgetSectionType) graphql.Marshaler { + return v +} + +func (ec *executionContext) unmarshalNWidgetZoneType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZoneType(ctx context.Context, v interface{}) (gqlmodel.WidgetZoneType, error) { + var res gqlmodel.WidgetZoneType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNWidgetZoneType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZoneType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.WidgetZoneType) graphql.Marshaler { + return v +} + func (ec *executionContext) marshalN__Directive2githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšDirective(ctx context.Context, sel ast.SelectionSet, v introspection.Directive) graphql.Marshaler { return ec.___Directive(ctx, sel, &v) } @@ -38336,6 +40304,13 @@ func (ec *executionContext) marshalN__Directive2แš•githubแš—comแš‹99designsแš‹gq } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -38409,6 +40384,13 @@ func (ec *executionContext) marshalN__DirectiveLocation2แš•stringแš„(ctx context } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -38458,6 +40440,13 @@ func (ec *executionContext) marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹g } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -38499,6 +40488,13 @@ func (ec *executionContext) marshalN__Type2แš•githubแš—comแš‹99designsแš‹gqlgen } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -38799,6 +40795,12 @@ func (ec *executionContext) marshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘ ret[i] = ec.marshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, sel, v[i]) } + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -39061,6 +41063,13 @@ func (ec *executionContext) marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reear } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -39143,6 +41152,13 @@ func (ec *executionContext) marshalOPropertySchemaFieldChoice2แš•แš–githubแš—com } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -39341,6 +41357,12 @@ func (ec *executionContext) marshalOString2แš•stringแš„(ctx context.Context, sel ret[i] = ec.marshalNString2string(ctx, sel, v[i]) } + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -39477,6 +41499,13 @@ func (ec *executionContext) marshalOUpdateTeamPayload2แš–githubแš—comแš‹reearth return ec._UpdateTeamPayload(ctx, sel, v) } +func (ec *executionContext) marshalOUpdateWidgetAlignSystemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetAlignSystemPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateWidgetAlignSystemPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateWidgetAlignSystemPayload(ctx, sel, v) +} + func (ec *executionContext) marshalOUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateWidgetPayload) graphql.Marshaler { if v == nil { return graphql.Null @@ -39536,6 +41565,72 @@ func (ec *executionContext) marshalOValueType2แš–githubแš—comแš‹reearthแš‹reeart return v } +func (ec *executionContext) marshalOWidgetAlignSystem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAlignSystem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetAlignSystem) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._WidgetAlignSystem(ctx, sel, v) +} + +func (ec *executionContext) marshalOWidgetArea2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetArea(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetArea) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._WidgetArea(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOWidgetAreaAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx context.Context, v interface{}) (*gqlmodel.WidgetAreaAlign, error) { + if v == nil { + return nil, nil + } + var res = new(gqlmodel.WidgetAreaAlign) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOWidgetAreaAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetAreaAlign) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + +func (ec *executionContext) marshalOWidgetLayout2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLayout(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetLayout) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._WidgetLayout(ctx, sel, v) +} + +func (ec *executionContext) marshalOWidgetLocation2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocation(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetLocation) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._WidgetLocation(ctx, sel, v) +} + +func (ec *executionContext) unmarshalOWidgetLocationInput2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocationInput(ctx context.Context, v interface{}) (*gqlmodel.WidgetLocationInput, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputWidgetLocationInput(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOWidgetSection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetSection) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._WidgetSection(ctx, sel, v) +} + +func (ec *executionContext) marshalOWidgetZone2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZone(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetZone) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._WidgetZone(ctx, sel, v) +} + func (ec *executionContext) marshalO__EnumValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšEnumValueแš„(ctx context.Context, sel ast.SelectionSet, v []introspection.EnumValue) graphql.Marshaler { if v == nil { return graphql.Null @@ -39573,6 +41668,13 @@ func (ec *executionContext) marshalO__EnumValue2แš•githubแš—comแš‹99designsแš‹gq } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -39613,6 +41715,13 @@ func (ec *executionContext) marshalO__Field2แš•githubแš—comแš‹99designsแš‹gqlgen } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -39653,6 +41762,13 @@ func (ec *executionContext) marshalO__InputValue2แš•githubแš—comแš‹99designsแš‹g } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } @@ -39700,6 +41816,13 @@ func (ec *executionContext) marshalO__Type2แš•githubแš—comแš‹99designsแš‹gqlgen } wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + return ret } diff --git a/internal/adapter/gql/gqlmodel/convert_plugin.go b/internal/adapter/gql/gqlmodel/convert_plugin.go index cef4045d1..3e8526cb6 100644 --- a/internal/adapter/gql/gqlmodel/convert_plugin.go +++ b/internal/adapter/gql/gqlmodel/convert_plugin.go @@ -21,6 +21,7 @@ func ToPlugin(p *plugin.Plugin) *Plugin { Name: pe.Name().String(), Description: pe.Description().String(), Icon: pe.Icon(), + WidgetLayout: ToPluginWidgetLayout(pe.WidgetLayout()), PropertySchemaID: pe.Schema(), AllTranslatedDescription: pe.Description(), AllTranslatedName: pe.Name(), @@ -71,3 +72,65 @@ func ToPluginMetadata(t *plugin.Metadata) (*PluginMetadata, error) { CreatedAt: t.CreatedAt, }, nil } + +func ToPluginWidgetLayout(wl *plugin.WidgetLayout) *WidgetLayout { + if wl == nil { + return nil + } + + return &WidgetLayout{ + Extendable: &WidgetExtendable{ + Horizontally: wl.HorizontallyExtendable(), + Vertically: wl.VerticallyExtendable(), + }, + Extended: wl.Extended(), + Floating: wl.Floating(), + DefaultLocation: ToPluginWidgetLocation(wl.DefaultLocation()), + } +} + +func ToPluginWidgetLocation(l *plugin.WidgetLocation) *WidgetLocation { + if l == nil { + return nil + } + + return &WidgetLocation{ + Zone: ToPluginWidgetZoneType(l.Zone), + Section: ToPluginWidgetSectionType(l.Section), + Area: ToPluginWidgetAreaType(l.Area), + } +} + +func ToPluginWidgetZoneType(t plugin.WidgetZoneType) WidgetZoneType { + switch t { + case plugin.WidgetZoneInner: + return WidgetZoneTypeInner + case plugin.WidgetZoneOuter: + return WidgetZoneTypeOuter + } + return "" +} + +func ToPluginWidgetSectionType(t plugin.WidgetSectionType) WidgetSectionType { + switch t { + case plugin.WidgetSectionLeft: + return WidgetSectionTypeLeft + case plugin.WidgetSectionCenter: + return WidgetSectionTypeCenter + case plugin.WidgetSectionRight: + return WidgetSectionTypeRight + } + return "" +} + +func ToPluginWidgetAreaType(t plugin.WidgetAreaType) WidgetAreaType { + switch t { + case plugin.WidgetAreaTop: + return WidgetAreaTypeTop + case plugin.WidgetAreaMiddle: + return WidgetAreaTypeMiddle + case plugin.WidgetAreaBottom: + return WidgetAreaTypeBottom + } + return "" +} diff --git a/internal/adapter/gql/gqlmodel/convert_scene.go b/internal/adapter/gql/gqlmodel/convert_scene.go index c99ae550a..5c5e7a2f3 100644 --- a/internal/adapter/gql/gqlmodel/convert_scene.go +++ b/internal/adapter/gql/gqlmodel/convert_scene.go @@ -15,6 +15,7 @@ func ToSceneWidget(w *scene.Widget) *SceneWidget { ExtensionID: w.Extension(), PropertyID: w.Property().ID(), Enabled: w.Enabled(), + Extended: w.Extended(), } } @@ -47,15 +48,16 @@ func ToScene(scene *scene.Scene) *Scene { } return &Scene{ - ID: scene.ID().ID(), - ProjectID: scene.Project().ID(), - PropertyID: scene.Property().ID(), - TeamID: scene.Team().ID(), - RootLayerID: scene.RootLayer().ID(), - CreatedAt: scene.CreatedAt(), - UpdatedAt: scene.UpdatedAt(), - Widgets: widgets, - Plugins: plugins, + ID: scene.ID().ID(), + ProjectID: scene.Project().ID(), + PropertyID: scene.Property().ID(), + TeamID: scene.Team().ID(), + RootLayerID: scene.RootLayer().ID(), + CreatedAt: scene.CreatedAt(), + UpdatedAt: scene.UpdatedAt(), + Widgets: widgets, + WidgetAlignSystem: ToWidgetAlignSystem(scene.WidgetAlignSystem()), + Plugins: plugins, } } diff --git a/internal/adapter/gql/gqlmodel/convert_scene_align.go b/internal/adapter/gql/gqlmodel/convert_scene_align.go new file mode 100644 index 000000000..c453ec7aa --- /dev/null +++ b/internal/adapter/gql/gqlmodel/convert_scene_align.go @@ -0,0 +1,124 @@ +package gqlmodel + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +func ToWidgetAlignSystem(sas *scene.WidgetAlignSystem) *WidgetAlignSystem { + widgetAlignDoc := WidgetAlignSystem{ + Inner: ToWidgetZone(sas.Zone(scene.WidgetZoneInner)), + Outer: ToWidgetZone(sas.Zone(scene.WidgetZoneOuter)), + } + return &widgetAlignDoc +} + +func ToWidgetZone(z *scene.WidgetZone) *WidgetZone { + if z == nil { + return nil + } + return &WidgetZone{ + Left: ToWidgetSection(z.Section(scene.WidgetSectionLeft)), + Center: ToWidgetSection(z.Section(scene.WidgetSectionCenter)), + Right: ToWidgetSection(z.Section(scene.WidgetSectionRight)), + } +} + +func ToWidgetSection(s *scene.WidgetSection) *WidgetSection { + if s == nil { + return nil + } + return &WidgetSection{ + Top: ToWidgetArea(s.Area(scene.WidgetAreaTop)), + Middle: ToWidgetArea(s.Area(scene.WidgetAreaMiddle)), + Bottom: ToWidgetArea(s.Area(scene.WidgetAreaBottom)), + } +} + +func ToWidgetArea(a *scene.WidgetArea) *WidgetArea { + if a == nil { + return nil + } + wids := a.WidgetIDs() + ids := make([]*id.ID, 0, len(wids)) + for _, wid := range wids { + ids = append(ids, wid.IDRef()) + } + return &WidgetArea{ + WidgetIds: ids, + Align: ToWidgetAlignType(a.Alignment()), + } +} + +func ToWidgetAlignType(s scene.WidgetAlignType) WidgetAreaAlign { + switch s { + case scene.WidgetAlignStart: + return WidgetAreaAlignStart + case scene.WidgetAlignCentered: + return WidgetAreaAlignCentered + case scene.WidgetAlignEnd: + return WidgetAreaAlignEnd + } + return "" +} + +func FromSceneWidgetLocation(l *WidgetLocationInput) *scene.WidgetLocation { + if l == nil { + return nil + } + return &scene.WidgetLocation{ + Zone: FromSceneWidgetZoneType(l.Zone), + Section: FromSceneWidgetSectionType(l.Section), + Area: FromSceneWidgetAreaType(l.Area), + } +} + +func FromSceneWidgetZoneType(t WidgetZoneType) scene.WidgetZoneType { + switch t { + case WidgetZoneTypeInner: + return scene.WidgetZoneInner + case WidgetZoneTypeOuter: + return scene.WidgetZoneOuter + } + return "" +} + +func FromSceneWidgetSectionType(t WidgetSectionType) scene.WidgetSectionType { + switch t { + case WidgetSectionTypeLeft: + return scene.WidgetSectionLeft + case WidgetSectionTypeCenter: + return scene.WidgetSectionCenter + case WidgetSectionTypeRight: + return scene.WidgetSectionRight + } + return "" +} + +func FromSceneWidgetAreaType(t WidgetAreaType) scene.WidgetAreaType { + switch t { + case WidgetAreaTypeTop: + return scene.WidgetAreaTop + case WidgetAreaTypeMiddle: + return scene.WidgetAreaMiddle + case WidgetAreaTypeBottom: + return scene.WidgetAreaBottom + } + return "" +} + +func FromWidgetAlignType(a *WidgetAreaAlign) *scene.WidgetAlignType { + if a == nil { + return nil + } + var r scene.WidgetAlignType + switch *a { + case WidgetAreaAlignStart: + r = scene.WidgetAlignStart + case WidgetAreaAlignCentered: + r = scene.WidgetAlignCentered + case WidgetAreaAlignEnd: + r = scene.WidgetAlignEnd + } + return &r +} diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index 3bd17ea62..3ea0fc5f2 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -591,6 +591,7 @@ type PluginExtension struct { Name string `json:"name"` Description string `json:"description"` Icon string `json:"icon"` + WidgetLayout *WidgetLayout `json:"widgetLayout"` Visualizer Visualizer `json:"visualizer"` PropertySchemaID id.PropertySchemaID `json:"propertySchemaId"` AllTranslatedName map[string]string `json:"allTranslatedName"` @@ -889,6 +890,7 @@ type Scene struct { RootLayerID id.ID `json:"rootLayerId"` Widgets []*SceneWidget `json:"widgets"` Plugins []*ScenePlugin `json:"plugins"` + WidgetAlignSystem *WidgetAlignSystem `json:"widgetAlignSystem"` DynamicDatasetSchemas []*DatasetSchema `json:"dynamicDatasetSchemas"` Project *Project `json:"project"` Team *Team `json:"team"` @@ -913,6 +915,7 @@ type SceneWidget struct { ExtensionID id.PluginExtensionID `json:"extensionId"` PropertyID id.ID `json:"propertyId"` Enabled bool `json:"enabled"` + Extended bool `json:"extended"` Plugin *Plugin `json:"plugin"` Extension *PluginExtension `json:"extension"` Property *Property `json:"property"` @@ -1086,10 +1089,23 @@ type UpdateTeamPayload struct { Team *Team `json:"team"` } +type UpdateWidgetAlignSystemInput struct { + SceneID id.ID `json:"sceneId"` + Location *WidgetLocationInput `json:"location"` + Align *WidgetAreaAlign `json:"align"` +} + +type UpdateWidgetAlignSystemPayload struct { + Scene *Scene `json:"scene"` +} + type UpdateWidgetInput struct { - SceneID id.ID `json:"sceneId"` - WidgetID id.ID `json:"widgetId"` - Enabled *bool `json:"enabled"` + SceneID id.ID `json:"sceneId"` + WidgetID id.ID `json:"widgetId"` + Enabled *bool `json:"enabled"` + Location *WidgetLocationInput `json:"location"` + Extended *bool `json:"extended"` + Index *int `json:"index"` } type UpdateWidgetPayload struct { @@ -1142,6 +1158,52 @@ type User struct { func (User) IsNode() {} +type WidgetAlignSystem struct { + Inner *WidgetZone `json:"inner"` + Outer *WidgetZone `json:"outer"` +} + +type WidgetArea struct { + WidgetIds []*id.ID `json:"widgetIds"` + Align WidgetAreaAlign `json:"align"` +} + +type WidgetExtendable struct { + Vertically bool `json:"vertically"` + Horizontally bool `json:"horizontally"` +} + +type WidgetLayout struct { + Extendable *WidgetExtendable `json:"extendable"` + Extended bool `json:"extended"` + Floating bool `json:"floating"` + DefaultLocation *WidgetLocation `json:"defaultLocation"` +} + +type WidgetLocation struct { + Zone WidgetZoneType `json:"zone"` + Section WidgetSectionType `json:"section"` + Area WidgetAreaType `json:"area"` +} + +type WidgetLocationInput struct { + Zone WidgetZoneType `json:"zone"` + Section WidgetSectionType `json:"section"` + Area WidgetAreaType `json:"area"` +} + +type WidgetSection struct { + Top *WidgetArea `json:"top"` + Middle *WidgetArea `json:"middle"` + Bottom *WidgetArea `json:"bottom"` +} + +type WidgetZone struct { + Left *WidgetSection `json:"left"` + Center *WidgetSection `json:"center"` + Right *WidgetSection `json:"right"` +} + type LayerEncodingFormat string const ( @@ -1717,3 +1779,173 @@ func (e *Visualizer) UnmarshalGQL(v interface{}) error { func (e Visualizer) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } + +type WidgetAreaAlign string + +const ( + WidgetAreaAlignStart WidgetAreaAlign = "START" + WidgetAreaAlignCentered WidgetAreaAlign = "CENTERED" + WidgetAreaAlignEnd WidgetAreaAlign = "END" +) + +var AllWidgetAreaAlign = []WidgetAreaAlign{ + WidgetAreaAlignStart, + WidgetAreaAlignCentered, + WidgetAreaAlignEnd, +} + +func (e WidgetAreaAlign) IsValid() bool { + switch e { + case WidgetAreaAlignStart, WidgetAreaAlignCentered, WidgetAreaAlignEnd: + return true + } + return false +} + +func (e WidgetAreaAlign) String() string { + return string(e) +} + +func (e *WidgetAreaAlign) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = WidgetAreaAlign(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid WidgetAreaAlign", str) + } + return nil +} + +func (e WidgetAreaAlign) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type WidgetAreaType string + +const ( + WidgetAreaTypeTop WidgetAreaType = "TOP" + WidgetAreaTypeMiddle WidgetAreaType = "MIDDLE" + WidgetAreaTypeBottom WidgetAreaType = "BOTTOM" +) + +var AllWidgetAreaType = []WidgetAreaType{ + WidgetAreaTypeTop, + WidgetAreaTypeMiddle, + WidgetAreaTypeBottom, +} + +func (e WidgetAreaType) IsValid() bool { + switch e { + case WidgetAreaTypeTop, WidgetAreaTypeMiddle, WidgetAreaTypeBottom: + return true + } + return false +} + +func (e WidgetAreaType) String() string { + return string(e) +} + +func (e *WidgetAreaType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = WidgetAreaType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid WidgetAreaType", str) + } + return nil +} + +func (e WidgetAreaType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type WidgetSectionType string + +const ( + WidgetSectionTypeLeft WidgetSectionType = "LEFT" + WidgetSectionTypeCenter WidgetSectionType = "CENTER" + WidgetSectionTypeRight WidgetSectionType = "RIGHT" +) + +var AllWidgetSectionType = []WidgetSectionType{ + WidgetSectionTypeLeft, + WidgetSectionTypeCenter, + WidgetSectionTypeRight, +} + +func (e WidgetSectionType) IsValid() bool { + switch e { + case WidgetSectionTypeLeft, WidgetSectionTypeCenter, WidgetSectionTypeRight: + return true + } + return false +} + +func (e WidgetSectionType) String() string { + return string(e) +} + +func (e *WidgetSectionType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = WidgetSectionType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid WidgetSectionType", str) + } + return nil +} + +func (e WidgetSectionType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + +type WidgetZoneType string + +const ( + WidgetZoneTypeInner WidgetZoneType = "INNER" + WidgetZoneTypeOuter WidgetZoneType = "OUTER" +) + +var AllWidgetZoneType = []WidgetZoneType{ + WidgetZoneTypeInner, + WidgetZoneTypeOuter, +} + +func (e WidgetZoneType) IsValid() bool { + switch e { + case WidgetZoneTypeInner, WidgetZoneTypeOuter: + return true + } + return false +} + +func (e WidgetZoneType) String() string { + return string(e) +} + +func (e *WidgetZoneType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = WidgetZoneType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid WidgetZoneType", str) + } + return nil +} + +func (e WidgetZoneType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} diff --git a/internal/adapter/gql/resolver_mutation_scene.go b/internal/adapter/gql/resolver_mutation_scene.go index d15dd7309..f79c3c01b 100644 --- a/internal/adapter/gql/resolver_mutation_scene.go +++ b/internal/adapter/gql/resolver_mutation_scene.go @@ -58,6 +58,9 @@ func (r *mutationResolver) UpdateWidget(ctx context.Context, input gqlmodel.Upda SceneID: id.SceneID(input.SceneID), WidgetID: id.WidgetID(input.WidgetID), Enabled: input.Enabled, + Extended: input.Extended, + Location: gqlmodel.FromSceneWidgetLocation(input.Location), + Index: input.Index, }, getOperator(ctx)) if err != nil { return nil, err @@ -88,6 +91,24 @@ func (r *mutationResolver) RemoveWidget(ctx context.Context, input gqlmodel.Remo }, nil } +func (r *mutationResolver) UpdateWidgetAlignSystem(ctx context.Context, input gqlmodel.UpdateWidgetAlignSystemInput) (*gqlmodel.UpdateWidgetAlignSystemPayload, error) { + exit := trace(ctx) + defer exit() + + scene, err := r.usecases.Scene.UpdateWidgetAlignSystem(ctx, interfaces.UpdateWidgetAlignSystemParam{ + SceneID: id.SceneID(input.SceneID), + Location: *gqlmodel.FromSceneWidgetLocation(input.Location), + Align: gqlmodel.FromWidgetAlignType(input.Align), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateWidgetAlignSystemPayload{ + Scene: gqlmodel.ToScene(scene), + }, nil +} + func (r *mutationResolver) InstallPlugin(ctx context.Context, input gqlmodel.InstallPluginInput) (*gqlmodel.InstallPluginPayload, error) { exit := trace(ctx) defer exit() diff --git a/internal/infrastructure/mongo/migration/210730175108_add_scene_align_system.go b/internal/infrastructure/mongo/migration/210730175108_add_scene_align_system.go new file mode 100644 index 000000000..3c659c779 --- /dev/null +++ b/internal/infrastructure/mongo/migration/210730175108_add_scene_align_system.go @@ -0,0 +1,74 @@ +package migration + +import ( + "context" + + "github.com/labstack/gommon/log" + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/pkg/builtin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/scene" + "go.mongodb.org/mongo-driver/bson" +) + +func AddSceneAlignSystem(ctx context.Context, c DBClient) error { + col := c.WithCollection("scene") + + return col.Find(ctx, bson.D{}, &mongodoc.BatchConsumer{ + Size: 50, + Callback: func(rows []bson.Raw) error { + ids := make([]string, 0, len(rows)) + newRows := make([]interface{}, 0, len(rows)) + + log.Infof("migration: AddSceneAlignSystem: hit scenes: %d\n", len(rows)) + + for _, row := range rows { + var doc mongodoc.SceneDocument + if err := bson.Unmarshal(row, &doc); err != nil { + return err + } + + swas := scene.NewWidgetAlignSystem() + + for _, w := range doc.Widgets { + wid, err := id.WidgetIDFrom(w.ID) + if err != nil { + continue + } + + pid, err := id.PluginIDFrom(w.Plugin) + if err != nil { + continue + } + + l := builtin.GetPlugin(pid).Extension(id.PluginExtensionID(w.Extension)).WidgetLayout() + if l == nil || l.Floating() { + continue + } + + dl := l.DefaultLocation() + if dl == nil { + dl = &plugin.WidgetLocation{ + Zone: plugin.WidgetZoneInner, + Section: plugin.WidgetSectionLeft, + Area: plugin.WidgetAreaTop, + } + } + + swas.Area(scene.WidgetLocation{ + Zone: scene.WidgetZoneType(dl.Zone), + Section: scene.WidgetSectionType(dl.Section), + Area: scene.WidgetAreaType(dl.Area), + }).Add(wid, -1) + } + + doc.AlignSystem = mongodoc.NewWidgetAlignSystem(swas) + ids = append(ids, doc.ID) + newRows = append(newRows, doc) + } + + return col.SaveAll(ctx, ids, newRows) + }, + }) +} diff --git a/internal/infrastructure/mongo/migration/migrations.go b/internal/infrastructure/mongo/migration/migrations.go index 7fafb0fd0..b59e6461f 100644 --- a/internal/infrastructure/mongo/migration/migrations.go +++ b/internal/infrastructure/mongo/migration/migrations.go @@ -3,7 +3,8 @@ package migration var migrations = map[int64]MigrationFunc{ - 201217132559: AddSceneWidgetId, - 201217193948: AddSceneDefaultTile, - 210310145844: RemovePreviewToken, + 201217132559: AddSceneWidgetId, + 201217193948: AddSceneDefaultTile, + 210310145844: RemovePreviewToken, + 210730175108: AddSceneAlignSystem, } diff --git a/internal/infrastructure/mongo/mongodoc/plugin.go b/internal/infrastructure/mongo/mongodoc/plugin.go index 2a252a028..50d833878 100644 --- a/internal/infrastructure/mongo/mongodoc/plugin.go +++ b/internal/infrastructure/mongo/mongodoc/plugin.go @@ -6,16 +6,6 @@ import ( "go.mongodb.org/mongo-driver/bson" ) -type PluginExtensionDocument struct { - ID string - Type string - Name map[string]string - Description map[string]string - Icon string - Schema string - Visualizer string -} - type PluginDocument struct { ID string Name map[string]string @@ -27,6 +17,35 @@ type PluginDocument struct { Scene *string `bson:",omitempty"` } +type PluginExtensionDocument struct { + ID string + Type string + Name map[string]string + Description map[string]string + Icon string + Schema string + Visualizer string + WidgetLayout *WidgetLayoutDocument +} + +type WidgetLayoutDocument struct { + Extendable *WidgetExtendableDocument + Extended bool + Floating bool + DefaultLocation *WidgetLocationDocument +} + +type WidgetExtendableDocument struct { + Vertically bool + Horizontally bool +} + +type WidgetLocationDocument struct { + Zone string + Section string + Area string +} + type PluginConsumer struct { Rows []*plugin.Plugin } @@ -53,13 +72,14 @@ func NewPlugin(plugin *plugin.Plugin) (*PluginDocument, string) { extensionsDoc := make([]PluginExtensionDocument, 0, len(extensions)) for _, e := range extensions { extensionsDoc = append(extensionsDoc, PluginExtensionDocument{ - ID: string(e.ID()), - Type: string(e.Type()), - Name: e.Name(), - Description: e.Description(), - Icon: e.Icon(), - Schema: e.Schema().String(), - Visualizer: string(e.Visualizer()), + ID: string(e.ID()), + Type: string(e.Type()), + Name: e.Name(), + Description: e.Description(), + Icon: e.Icon(), + Schema: e.Schema().String(), + Visualizer: string(e.Visualizer()), + WidgetLayout: NewWidgetLayout(e.WidgetLayout()), }) } @@ -94,6 +114,7 @@ func (d *PluginDocument) Model() (*plugin.Plugin, error) { Name(d.Name). Description(d.Description). Icon(e.Icon). + WidgetLayout(e.WidgetLayout.Model()). Schema(psid). Build() if err != nil { @@ -112,3 +133,42 @@ func (d *PluginDocument) Model() (*plugin.Plugin, error) { Schema(id.PropertySchemaIDFromRef(d.Schema)). Build() } + +func NewWidgetLayout(l *plugin.WidgetLayout) *WidgetLayoutDocument { + return &WidgetLayoutDocument{ + Extendable: &WidgetExtendableDocument{ + Vertically: l.VerticallyExtendable(), + Horizontally: l.HorizontallyExtendable(), + }, + Extended: l.Extended(), + Floating: l.Floating(), + DefaultLocation: &WidgetLocationDocument{ + Zone: string(l.DefaultLocation().Zone), + Section: string(l.DefaultLocation().Section), + Area: string(l.DefaultLocation().Area), + }, + } +} + +func (d *WidgetLayoutDocument) Model() *plugin.WidgetLayout { + if d == nil { + return nil + } + + var loc *plugin.WidgetLocation + if d.DefaultLocation != nil { + loc = &plugin.WidgetLocation{ + Zone: plugin.WidgetZoneType(d.DefaultLocation.Zone), + Section: plugin.WidgetSectionType(d.DefaultLocation.Section), + Area: plugin.WidgetAreaType(d.DefaultLocation.Area), + } + } + + return plugin.NewWidgetLayout( + d.Extendable.Horizontally, + d.Extendable.Vertically, + d.Extended, + d.Floating, + loc, + ).Ref() +} diff --git a/internal/infrastructure/mongo/mongodoc/scene.go b/internal/infrastructure/mongo/mongodoc/scene.go index cf42427b2..6cef08eb4 100644 --- a/internal/infrastructure/mongo/mongodoc/scene.go +++ b/internal/infrastructure/mongo/mongodoc/scene.go @@ -16,6 +16,7 @@ type SceneWidgetDocument struct { Extension string Property string Enabled bool + Extended bool } type ScenePluginDocument struct { @@ -24,14 +25,15 @@ type ScenePluginDocument struct { } type SceneDocument struct { - ID string - Project string - Team string - RootLayer string - Widgets []SceneWidgetDocument - Plugins []ScenePluginDocument - UpdateAt time.Time - Property string + ID string + Project string + Team string + RootLayer string + Widgets []SceneWidgetDocument + AlignSystem *WidgetAlignSystemDocument + Plugins []ScenePluginDocument + UpdateAt time.Time + Property string } type SceneConsumer struct { @@ -94,6 +96,7 @@ func NewScene(scene *scene.Scene) (*SceneDocument, string) { Extension: string(w.Extension()), Property: w.Property().String(), Enabled: w.Enabled(), + Extended: w.Extended(), }) } @@ -106,14 +109,15 @@ func NewScene(scene *scene.Scene) (*SceneDocument, string) { id := scene.ID().String() return &SceneDocument{ - ID: id, - Project: scene.Project().String(), - Team: scene.Team().String(), - RootLayer: scene.RootLayer().String(), - Widgets: widgetsDoc, - Plugins: pluginsDoc, - UpdateAt: scene.UpdatedAt(), - Property: scene.Property().String(), + ID: id, + Project: scene.Project().String(), + Team: scene.Team().String(), + RootLayer: scene.RootLayer().String(), + Widgets: widgetsDoc, + Plugins: pluginsDoc, + AlignSystem: NewWidgetAlignSystem(scene.WidgetAlignSystem()), + UpdateAt: scene.UpdatedAt(), + Property: scene.Property().String(), }, id } @@ -161,6 +165,7 @@ func (d *SceneDocument) Model() (*scene.Scene, error) { id.PluginExtensionID(w.Extension), prid, w.Enabled, + w.Extended, ) if err != nil { return nil, err @@ -182,6 +187,7 @@ func (d *SceneDocument) Model() (*scene.Scene, error) { Team(tid). RootLayer(lid). WidgetSystem(scene.NewWidgetSystem(ws)). + WidgetAlignSystem(d.AlignSystem.Model()). PluginSystem(scene.NewPluginSystem(ps)). UpdatedAt(d.UpdateAt). Property(prid). diff --git a/internal/infrastructure/mongo/mongodoc/scene_align.go b/internal/infrastructure/mongo/mongodoc/scene_align.go new file mode 100644 index 000000000..ab746b98c --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/scene_align.go @@ -0,0 +1,146 @@ +package mongodoc + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" +) + +type WidgetAlignSystemDocument struct { + Inner *WidgetZoneDocument + Outer *WidgetZoneDocument +} +type WidgetZoneDocument struct { + Left *WidgetSectionDocument + Center *WidgetSectionDocument + Right *WidgetSectionDocument +} + +type WidgetSectionDocument struct { + Top *WidgetAreaDocument + Middle *WidgetAreaDocument + Bottom *WidgetAreaDocument +} + +type WidgetAreaDocument struct { + WidgetIDs []string + Align string +} + +func NewWidgetAlignSystem(was *scene.WidgetAlignSystem) *WidgetAlignSystemDocument { + if was == nil { + return nil + } + + d := &WidgetAlignSystemDocument{ + Inner: NewWidgetZone(was.Zone(scene.WidgetZoneInner)), + Outer: NewWidgetZone(was.Zone(scene.WidgetZoneOuter)), + } + + if d.Inner == nil && d.Outer == nil { + return nil + } + return d +} + +func NewWidgetZone(z *scene.WidgetZone) *WidgetZoneDocument { + if z == nil { + return nil + } + + d := &WidgetZoneDocument{ + Left: NewWidgetSection(z.Section(scene.WidgetSectionLeft)), + Center: NewWidgetSection(z.Section(scene.WidgetSectionCenter)), + Right: NewWidgetSection(z.Section(scene.WidgetSectionRight)), + } + + if d.Left == nil && d.Center == nil && d.Right == nil { + return nil + } + return d +} + +func NewWidgetSection(s *scene.WidgetSection) *WidgetSectionDocument { + if s == nil { + return nil + } + + d := &WidgetSectionDocument{ + Top: NewWidgetArea(s.Area(scene.WidgetAreaTop)), + Middle: NewWidgetArea(s.Area(scene.WidgetAreaMiddle)), + Bottom: NewWidgetArea(s.Area(scene.WidgetAreaBottom)), + } + + if d.Top == nil && d.Middle == nil && d.Bottom == nil { + return nil + } + return d +} + +func NewWidgetArea(a *scene.WidgetArea) *WidgetAreaDocument { + if a == nil { + return nil + } + + return &WidgetAreaDocument{ + WidgetIDs: id.WidgetIDToKeys(a.WidgetIDs()), + Align: string(a.Alignment()), + } +} + +func (d *WidgetAlignSystemDocument) Model() *scene.WidgetAlignSystem { + if d == nil { + return nil + } + + was := scene.NewWidgetAlignSystem() + was.SetZone(scene.WidgetZoneInner, d.Inner.Model()) + was.SetZone(scene.WidgetZoneOuter, d.Outer.Model()) + return was +} + +func (d *WidgetZoneDocument) Model() *scene.WidgetZone { + if d == nil { + return nil + } + + wz := scene.NewWidgetZone() + wz.SetSection(scene.WidgetSectionLeft, d.Left.Model()) + wz.SetSection(scene.WidgetSectionCenter, d.Center.Model()) + wz.SetSection(scene.WidgetSectionRight, d.Right.Model()) + return wz +} + +func (d *WidgetSectionDocument) Model() *scene.WidgetSection { + if d == nil { + return nil + } + + ws := scene.NewWidgetSection() + ws.SetArea(scene.WidgetAreaTop, d.Top.Model()) + ws.SetArea(scene.WidgetAreaMiddle, d.Middle.Model()) + ws.SetArea(scene.WidgetAreaBottom, d.Bottom.Model()) + return ws +} + +func (a *WidgetAreaDocument) Model() *scene.WidgetArea { + if a == nil { + return nil + } + + return scene.NewWidgetArea(stringsToWidgetIDs(a.WidgetIDs), scene.WidgetAlignType(a.Align)) +} + +func stringsToWidgetIDs(wids []string) []id.WidgetID { + if wids == nil { + return nil + } + var docids []id.WidgetID + for _, wid := range wids { + nid, err := id.WidgetIDFrom(wid) + if err != nil { + continue + } + docids = append(docids, nid) + } + return docids +} diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 2b7990304..9643b214d 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -195,13 +195,37 @@ func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, return nil, nil, err } - widget, err = scene.NewWidget(id.NewWidgetID(), pid, eid, property.ID(), true) + extended := false + floating := false + var location *plugin.WidgetLocation + if widgetLayout := extension.WidgetLayout(); widgetLayout != nil { + extended = widgetLayout.Extended() + floating = widgetLayout.Floating() + location = widgetLayout.DefaultLocation() + } + + widget, err = scene.NewWidget( + id.NewWidgetID(), + pid, + eid, + property.ID(), + true, + extended, + ) if err != nil { return nil, nil, err } s.WidgetSystem().Add(widget) + if !floating && location != nil { + s.WidgetAlignSystem().Area(scene.WidgetLocation{ + Zone: scene.WidgetZoneType(location.Zone), + Section: scene.WidgetSectionType(location.Section), + Area: scene.WidgetAreaType(location.Area), + }).Add(widget.ID(), -1) + } + err = i.propertyRepo.Save(ctx, property) if err != nil { return nil, nil, err @@ -244,16 +268,43 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP return nil, nil, err } - ws := scene.WidgetSystem() - widget := ws.Widget(param.WidgetID) + widget := scene.WidgetSystem().Widget(param.WidgetID) if widget == nil { return nil, nil, rerror.ErrNotFound } + _, location := scene.WidgetAlignSystem().Find(param.WidgetID) + + _, extension, err := i.getPlugin(ctx, scene.ID(), widget.Plugin(), widget.Extension()) + if err != nil { + return nil, nil, err + } + if extension.Type() != plugin.ExtensionTypeWidget { + return nil, nil, interfaces.ErrExtensionTypeMustBeWidget + } if param.Enabled != nil { widget.SetEnabled(*param.Enabled) } + if param.Location != nil || param.Index != nil { + if param.Location != nil { + location = *param.Location + } + index := -1 + if param.Index != nil { + index = *param.Index + } + scene.WidgetAlignSystem().Move(widget.ID(), location, index) + } + + if param.Extended != nil { + if layout := extension.WidgetLayout(); layout != nil { + if layout.HorizontallyExtendable() && location.Horizontal() || layout.VerticallyExtendable() && location.Vertical() { + widget.SetExtended(*param.Extended) + } + } + } + err2 = i.sceneRepo.Save(ctx, scene) if err2 != nil { return nil, nil, err2 @@ -263,8 +314,53 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP return scene, widget, nil } -func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, wid id.WidgetID, operator *usecase.Operator) (_ *scene.Scene, err error) { +func (i *Scene) UpdateWidgetAlignSystem(ctx context.Context, param interfaces.UpdateWidgetAlignSystemParam, operator *usecase.Operator) (_ *scene.Scene, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, interfaces.ErrOperationDenied + } + + // check scene lock + if err := i.CheckSceneLock(ctx, param.SceneID); err != nil { + return nil, err + } + scene, err2 := i.sceneRepo.FindByID(ctx, param.SceneID, operator.WritableTeams) + if err2 != nil { + return nil, err2 + } + if err := i.CanWriteTeam(scene.Team(), operator); err != nil { + return nil, err + } + + area := scene.WidgetAlignSystem().Area(param.Location) + + if area == nil { + return nil, errors.New("invalid location") + } + + if param.Align != nil { + area.SetAlignment(*param.Align) + } + + if err = i.sceneRepo.Save(ctx, scene); err != nil { + return nil, err + } + + tx.Commit() + return scene, nil +} + +func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, wid id.WidgetID, operator *usecase.Operator) (_ *scene.Scene, err error) { tx, err := i.transaction.Begin() if err != nil { return @@ -300,6 +396,7 @@ func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, wid id.WidgetID } ws.Remove(wid) + scene.WidgetAlignSystem().Remove(wid) err2 = i.propertyRepo.Remove(ctx, widget.Property()) if err2 != nil { diff --git a/internal/usecase/interfaces/scene.go b/internal/usecase/interfaces/scene.go index 0d7669929..34c23e4d9 100644 --- a/internal/usecase/interfaces/scene.go +++ b/internal/usecase/interfaces/scene.go @@ -9,12 +9,6 @@ import ( "github.com/reearth/reearth-backend/pkg/scene" ) -type UpdateWidgetParam struct { - SceneID id.SceneID - WidgetID id.WidgetID - Enabled *bool -} - var ( ErrPluginAlreadyInstalled error = errors.New("plugin already installed") ErrPluginNotInstalled error = errors.New("plugin not installed") @@ -28,8 +22,24 @@ type Scene interface { Create(context.Context, id.ProjectID, *usecase.Operator) (*scene.Scene, error) AddWidget(context.Context, id.SceneID, id.PluginID, id.PluginExtensionID, *usecase.Operator) (*scene.Scene, *scene.Widget, error) UpdateWidget(context.Context, UpdateWidgetParam, *usecase.Operator) (*scene.Scene, *scene.Widget, error) + UpdateWidgetAlignSystem(context.Context, UpdateWidgetAlignSystemParam, *usecase.Operator) (*scene.Scene, error) RemoveWidget(context.Context, id.SceneID, id.WidgetID, *usecase.Operator) (*scene.Scene, error) InstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, id.PluginID, *id.PropertyID, error) UninstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, error) UpgradePlugin(context.Context, id.SceneID, id.PluginID, id.PluginID, *usecase.Operator) (*scene.Scene, error) } + +type UpdateWidgetParam struct { + SceneID id.SceneID + WidgetID id.WidgetID + Enabled *bool + Extended *bool + Location *scene.WidgetLocation + Index *int +} + +type UpdateWidgetAlignSystemParam struct { + SceneID id.SceneID + Location scene.WidgetLocation + Align *scene.WidgetAlignType +} diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 8c01643e4..1b5dd9868 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -123,7 +123,7 @@ extensions: ui: color title: Text color description: Select a color. - defaultValue: '#434343' + defaultValue: "#434343" availableIf: field: themeType type: string @@ -133,7 +133,7 @@ extensions: ui: color title: Select color description: Select a color. - defaultValue: '#C52C63' + defaultValue: "#C52C63" availableIf: field: themeType type: string @@ -143,7 +143,7 @@ extensions: ui: color title: Background color description: Select a color. - defaultValue: '#DFE5F0' + defaultValue: "#DFE5F0" availableIf: field: themeType type: string @@ -185,26 +185,26 @@ extensions: type: number title: Fog density defaultValue: 2.0e-4 - description: 'Set a thickness to the fog. Min: 0 Max: 1' + description: "Set a thickness to the fog. Min: 0 Max: 1" min: 0 max: 1 - id: brightness_shift type: number title: Fog Brightness defaultValue: 0.03 - description: 'Set brightness of the fog. Min: -1 Max: 1' + description: "Set brightness of the fog. Min: -1 Max: 1" min: -1 max: 1 - id: hue_shift type: number title: Fog Hue - description: 'Set hue of the fog. Min: -1 Max: 1' + description: "Set hue of the fog. Min: -1 Max: 1" min: -1 max: 1 - id: surturation_shift type: number title: Fog Saturation - description: 'Set saturation of the fog. Min: -1 Max: 1' + description: "Set saturation of the fog. Min: -1 Max: 1" min: -1 max: 1 - id: timeline @@ -1258,8 +1258,10 @@ extensions: - id: menu visualizer: cesium type: widget - title: Menu + title: Menu (legacy) description: Menu widgets + widgetLayout: + floating: true schema: groups: - id: buttons @@ -1368,11 +1370,118 @@ extensions: field: menuType type: string value: camera + - id: button + visualizer: cesium + type: widget + title: Button + description: Button widget + widgetLayout: + defaultLocation: + zone: outer + section: left + area: top + schema: + groups: + - id: default + title: Button + fields: + - id: buttonTitle + type: string + title: Title + - id: buttonStyle + type: string + title: Style + defaultValue: text + choices: + - key: text + label: Text only + - key: icon + label: Icon only + - key: texticon + label: Text and icon + - id: buttonIcon + type: url + title: Icon + ui: image + - id: buttonColor + type: string + title: Text color + ui: color + - id: buttonBgcolor + type: string + title: Background color + ui: color + - id: buttonType + type: string + title: Type + defaultValue: link + choices: + - key: link + label: Link + - key: menu + label: Menu + - key: camera + label: Camera flight + - id: buttonLink + type: url + title: Link + availableIf: + field: buttonType + type: string + value: link + - id: buttonCamera + type: camera + title: Camera flight + availableIf: + field: buttonType + type: string + value: camera + - id: menu + title: Menu + list: true + availableIf: + field: buttonType + type: string + value: menu + fields: + - id: menuTitle + type: string + title: Title + - id: menuIcon + type: url + title: Icon + - id: menuType + type: string + title: Type + defaultValue: link + choices: + - key: link + label: Link + - key: camera + label: Camera + - key: border + label: Break + - id: menuLink + type: url + title: Link + availableIf: + field: menuType + type: string + value: link + - id: menuCamera + type: camera + title: Camera + availableIf: + field: menuType + type: string + value: camera - id: splashscreen visualizer: cesium type: widget title: Splash screen description: A unique start screen that will display on load of your archive(ex. display the archive's title). + widgetLayout: + floating: true schema: groups: - id: overlay @@ -1434,6 +1543,13 @@ extensions: type: widget title: Storytelling description: SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily. + widgetLayout: + extendable: + horizontally: true + defaultLocation: + zone: outer + section: left + area: bottom schema: groups: - id: default diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 2b5b07037..6a41c47ae 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -579,8 +579,12 @@ extensions: item_datanum: title: ใƒ‡ใƒผใ‚ฟ(ๆ•ฐๅญ—) menu: - title: ใƒกใƒ‹ใƒฅใƒผ - description: ใ‚ทใƒผใƒณใซใƒœใ‚ฟใƒณใ‚’่จญ็ฝฎใ—ใ€ใƒกใƒ‹ใƒฅใƒผใ‚’่กจ็คบใ—ใพใ™ใ€‚่ฟฝๅŠ ใ—ใŸใƒœใ‚ฟใƒณใซ่จญๅฎšใ•ใ‚ŒใŸใ‚ขใ‚ฏใ‚ทใƒงใƒณใ‚ฟใ‚คใƒ—ใซใ‚ˆใฃใฆๅ‹•ไฝœใŒๅค‰ใ‚ใ‚Šใพใ™ใ€‚\nใƒปใƒชใƒณใ‚ฏ๏ผšใƒœใ‚ฟใƒณ่‡ชไฝ“ใŒๅค–้ƒจใ‚ตใ‚คใƒˆใธใฎใƒชใƒณใ‚ฏใซใชใ‚Šใพใ™ใ€‚\nใƒปใƒกใƒ‹ใƒฅใƒผ๏ผš่ฟฝๅŠ ใ—ใŸใƒกใƒ‹ใƒฅใƒผใ‚’้–‹ใใพใ™\nใƒปใ‚ซใƒกใƒฉใ‚ขใ‚ฏใ‚ทใƒงใƒณ๏ผšใ‚ฏใƒชใƒƒใ‚ฏๆ™‚ใซใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ—ใพใ™ใ€‚ + title: ใƒกใƒ‹ใƒฅใƒผ (ๅปƒๆญขไบˆๅฎš) + description: | + ใ‚ทใƒผใƒณใซใƒœใ‚ฟใƒณใ‚’่จญ็ฝฎใ—ใ€ใƒกใƒ‹ใƒฅใƒผใ‚’่กจ็คบใ—ใพใ™ใ€‚่ฟฝๅŠ ใ—ใŸใƒœใ‚ฟใƒณใซ่จญๅฎšใ•ใ‚ŒใŸใ‚ขใ‚ฏใ‚ทใƒงใƒณใ‚ฟใ‚คใƒ—ใซใ‚ˆใฃใฆๅ‹•ไฝœใŒๅค‰ใ‚ใ‚Šใพใ™ใ€‚ + ใƒปใƒชใƒณใ‚ฏ๏ผšใƒœใ‚ฟใƒณ่‡ชไฝ“ใŒๅค–้ƒจใ‚ตใ‚คใƒˆใธใฎใƒชใƒณใ‚ฏใซใชใ‚Šใพใ™ใ€‚ + ใƒปใƒกใƒ‹ใƒฅใƒผ๏ผš่ฟฝๅŠ ใ—ใŸใƒกใƒ‹ใƒฅใƒผใ‚’้–‹ใใพใ™ใ€‚ + ใƒปใ‚ซใƒกใƒฉใ‚ขใ‚ฏใ‚ทใƒงใƒณ๏ผšใ‚ฏใƒชใƒƒใ‚ฏๆ™‚ใซใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ—ใพใ™ใ€‚ propertySchema: buttons: title: ใƒœใ‚ฟใƒณ @@ -635,6 +639,58 @@ extensions: title: ใƒชใƒณใ‚ฏ menuCamera: title: ใ‚ซใƒกใƒฉ + button: + title: ใƒœใ‚ฟใƒณ + description: | + ใ‚ทใƒผใƒณใซใƒœใ‚ฟใƒณใ‚’่จญ็ฝฎใ—ใ€ใƒกใƒ‹ใƒฅใƒผใ‚’่กจ็คบใ—ใพใ™ใ€‚่ฟฝๅŠ ใ—ใŸใƒœใ‚ฟใƒณใซ่จญๅฎšใ•ใ‚ŒใŸใ‚ขใ‚ฏใ‚ทใƒงใƒณใ‚ฟใ‚คใƒ—ใซใ‚ˆใฃใฆๅ‹•ไฝœใŒๅค‰ใ‚ใ‚Šใพใ™ใ€‚ + ใ€€ใƒปใƒชใƒณใ‚ฏ๏ผšใƒœใ‚ฟใƒณ่‡ชไฝ“ใŒๅค–้ƒจใ‚ตใ‚คใƒˆใธใฎใƒชใƒณใ‚ฏใซใชใ‚Šใพใ™ใ€‚ + ใ€€ใƒปใƒกใƒ‹ใƒฅใƒผ๏ผš่ฟฝๅŠ ใ—ใŸใƒกใƒ‹ใƒฅใƒผใ‚’้–‹ใใพใ™ใ€‚ + ใ€€ใƒปใ‚ซใƒกใƒฉใ‚ขใ‚ฏใ‚ทใƒงใƒณ๏ผšใ‚ฏใƒชใƒƒใ‚ฏๆ™‚ใซใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ—ใพใ™ใ€‚ + propertySchema: + default: + title: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + fields: + buttonTitle: + title: ใ‚ฟใ‚คใƒˆใƒซ + buttonStyle: + title: ่กจ็คบๆ–นๆณ• + choices: + text: ใƒ†ใ‚ญใ‚นใƒˆใฎใฟ + icon: ใ‚ขใ‚คใ‚ณใƒณใฎใฟ + texticon: ใƒ†ใ‚ญใ‚นใƒˆ๏ผ‹ใ‚ขใ‚คใ‚ณใƒณ + buttonIcon: + title: ใ‚ขใ‚คใ‚ณใƒณ + buttonColor: + title: ใƒ†ใ‚ญใ‚นใƒˆ่‰ฒ + buttonBgcolor: + title: ่ƒŒๆ™ฏ่‰ฒ + buttonType: + title: ใ‚ขใ‚ฏใ‚ทใƒงใƒณ + choices: + link: ใƒชใƒณใ‚ฏ + menu: ใƒกใƒ‹ใƒฅใƒผ้–‹้–‰ + camera: ใ‚ซใƒกใƒฉ็งปๅ‹• + buttonLink: + title: ใƒชใƒณใ‚ฏ + buttonCamera: + title: ใ‚ซใƒกใƒฉ + menu: + title: ใƒกใƒ‹ใƒฅใƒผ + fields: + menuTitle: + title: ใ‚ฟใ‚คใƒˆใƒซ + menuIcon: + title: ใ‚ขใ‚คใ‚ณใƒณ + menuType: + title: ใ‚ขใ‚ฏใ‚ทใƒงใƒณ + choices: + link: ใƒชใƒณใ‚ฏ + camera: ใ‚ซใƒกใƒฉ็งปๅ‹• + border: ๅŒบๅˆ‡ใ‚Š็ทš + menuLink: + title: ใƒชใƒณใ‚ฏ + menuCamera: + title: ใ‚ซใƒกใƒฉ splashscreen: title: ใ‚นใƒ—ใƒฉใƒƒใ‚ทใƒฅใ‚นใ‚ฏใƒชใƒผใƒณ description: ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒใ€ๆœ€ๅˆใซ่กจ็คบใ•ใ‚Œใ‚‹ๆผ”ๅ‡บใ‚’่จญๅฎšใงใใพใ™ใ€‚ไพ‹ใˆใฐใ€ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ฟใ‚คใƒˆใƒซใ‚’้–ฒ่ฆง่€…ใซ่ฆ‹ใ›ใŸใ‚Šใ€ใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ•ใ›ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ diff --git a/pkg/plugin/extension.go b/pkg/plugin/extension.go index f7f276bde..3066db0eb 100644 --- a/pkg/plugin/extension.go +++ b/pkg/plugin/extension.go @@ -8,7 +8,6 @@ import ( "github.com/reearth/reearth-backend/pkg/visualizer" ) -// ExtensionType _ type ExtensionType string var ( @@ -26,7 +25,6 @@ var ( ExtensionTypeInfobox ExtensionType = "infobox" ) -// Extension _ type Extension struct { id id.PluginExtensionID extensionType ExtensionType @@ -35,50 +33,126 @@ type Extension struct { icon string schema id.PropertySchemaID visualizer visualizer.Visualizer + widgetLayout *WidgetLayout } -// ID _ func (w *Extension) ID() id.PluginExtensionID { return w.id } -// Type _ func (w *Extension) Type() ExtensionType { return w.extensionType } -// Name _ func (w *Extension) Name() i18n.String { return w.name.Copy() } -// Description _ func (w *Extension) Description() i18n.String { return w.description.Copy() } -// Icon _ func (w *Extension) Icon() string { return w.icon } -// Schema _ func (w *Extension) Schema() id.PropertySchemaID { return w.schema } -// Visualizer _ func (w *Extension) Visualizer() visualizer.Visualizer { return w.visualizer } -// Rename _ +func (w *Extension) WidgetLayout() *WidgetLayout { + if w == nil { + return nil + } + return w.widgetLayout +} + func (w *Extension) Rename(name i18n.String) { w.name = name.Copy() } -// SetDescription _ func (w *Extension) SetDescription(des i18n.String) { w.description = des.Copy() } + +type WidgetLayout struct { + horizontallyExtendable bool + verticallyExtendable bool + extended bool + floating bool + defaultLocation *WidgetLocation +} + +func NewWidgetLayout(horizontallyExtendable, verticallyExtendable, extended, floating bool, defaultLocation *WidgetLocation) WidgetLayout { + return WidgetLayout{ + horizontallyExtendable: horizontallyExtendable, + verticallyExtendable: verticallyExtendable, + extended: extended, + floating: floating, + defaultLocation: defaultLocation.CopyRef(), + } +} + +func (l WidgetLayout) Ref() *WidgetLayout { + return &l +} + +func (l WidgetLayout) HorizontallyExtendable() bool { + return l.horizontallyExtendable +} + +func (l WidgetLayout) VerticallyExtendable() bool { + return l.verticallyExtendable +} + +func (l WidgetLayout) Extended() bool { + return l.extended +} + +func (l WidgetLayout) Floating() bool { + return l.floating +} + +func (l WidgetLayout) DefaultLocation() *WidgetLocation { + if l.defaultLocation == nil { + return nil + } + return l.defaultLocation.CopyRef() +} + +type WidgetLocation struct { + Zone WidgetZoneType + Section WidgetSectionType + Area WidgetAreaType +} + +func (l *WidgetLocation) CopyRef() *WidgetLocation { + if l == nil { + return nil + } + return &WidgetLocation{ + Zone: l.Zone, + Section: l.Section, + Area: l.Area, + } +} + +type WidgetZoneType string +type WidgetSectionType string +type WidgetAreaType string + +const ( + WidgetZoneInner WidgetZoneType = "inner" + WidgetZoneOuter WidgetZoneType = "outer" + WidgetSectionLeft WidgetSectionType = "left" + WidgetSectionCenter WidgetSectionType = "center" + WidgetSectionRight WidgetSectionType = "right" + WidgetAreaTop WidgetAreaType = "top" + WidgetAreaMiddle WidgetAreaType = "middle" + WidgetAreaBottom WidgetAreaType = "bottom" +) diff --git a/pkg/plugin/extension_builder.go b/pkg/plugin/extension_builder.go index daab6ace2..524430974 100644 --- a/pkg/plugin/extension_builder.go +++ b/pkg/plugin/extension_builder.go @@ -83,6 +83,12 @@ func (b *ExtensionBuilder) Visualizer(visualizer visualizer.Visualizer) *Extensi return b } +// WidgetLayout _ +func (b *ExtensionBuilder) WidgetLayout(widgetLayout *WidgetLayout) *ExtensionBuilder { + b.p.widgetLayout = widgetLayout + return b +} + // System _ func (b *ExtensionBuilder) System(s bool) *ExtensionBuilder { b.s = s diff --git a/pkg/plugin/extension_builder_test.go b/pkg/plugin/extension_builder_test.go index 98eef0d8b..90a9b6960 100644 --- a/pkg/plugin/extension_builder_test.go +++ b/pkg/plugin/extension_builder_test.go @@ -1,7 +1,6 @@ package plugin import ( - "errors" "testing" "github.com/reearth/reearth-backend/pkg/i18n" @@ -52,6 +51,15 @@ func TestExtensionBuilder_Visualizer(t *testing.T) { assert.Equal(t, visualizer.Visualizer("ccc"), res.Visualizer()) } +func TestExtensionBuilder_WidgetLayout(t *testing.T) { + var b = NewExtension() + wl := NewWidgetLayout( + false, true, false, false, nil, + ) + res := b.ID("xxx").WidgetLayout(&wl).MustBuild() + assert.Same(t, &wl, res.WidgetLayout()) +} + func TestExtensionBuilder_Build(t *testing.T) { testCases := []struct { name, icon string @@ -62,6 +70,7 @@ func TestExtensionBuilder_Build(t *testing.T) { description i18n.String schema id.PropertySchemaID visualizer visualizer.Visualizer + widgetLayout *WidgetLayout expected *Extension err error }{ @@ -75,6 +84,13 @@ func TestExtensionBuilder_Build(t *testing.T) { description: i18n.StringFrom("ddd"), schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", + widgetLayout: NewWidgetLayout( + false, false, true, false, &WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + ).Ref(), expected: &Extension{ id: "xxx", extensionType: "ppp", @@ -83,24 +99,32 @@ func TestExtensionBuilder_Build(t *testing.T) { icon: "ttt", schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", + widgetLayout: NewWidgetLayout( + false, false, true, false, &WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + ).Ref(), }, err: nil, }, { name: "fail not system type visualizer", extensionType: ExtensionTypeVisualizer, - err: errors.New("cannot build system extension"), + err: id.ErrInvalidID, }, { name: "fail not system type infobox", extensionType: ExtensionTypeInfobox, - err: errors.New("cannot build system extension"), + err: id.ErrInvalidID, }, { name: "fail nil id", err: id.ErrInvalidID, }, } + for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { @@ -114,13 +138,13 @@ func TestExtensionBuilder_Build(t *testing.T) { Description(tc.description). Name(tc.ename). Icon(tc.icon). + WidgetLayout(tc.widgetLayout). Build() - if err == nil { + if tc.err == nil { assert.Equal(tt, tc.expected, e) } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(tt, tc.err, err) } - }) } } @@ -135,6 +159,7 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { description i18n.String schema id.PropertySchemaID visualizer visualizer.Visualizer + widgetLayout *WidgetLayout expected *Extension }{ { @@ -147,6 +172,12 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { description: i18n.StringFrom("ddd"), schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", + widgetLayout: NewWidgetLayout( + false, false, true, false, &WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }).Ref(), expected: &Extension{ id: "xxx", extensionType: "ppp", @@ -155,6 +186,12 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { icon: "ttt", schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", + widgetLayout: NewWidgetLayout( + false, false, true, false, &WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }).Ref(), }, }, { @@ -189,6 +226,7 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { Description(tc.description). Name(tc.ename). Icon(tc.icon). + WidgetLayout(tc.widgetLayout). MustBuild() }) } diff --git a/pkg/plugin/extension_test.go b/pkg/plugin/extension_test.go index 6758f3041..47713d7cb 100644 --- a/pkg/plugin/extension_test.go +++ b/pkg/plugin/extension_test.go @@ -11,21 +11,23 @@ import ( func TestExtension(t *testing.T) { expected := struct { - Id id.PluginExtensionID - Type ExtensionType - Name i18n.String - Description i18n.String - Icon string - Schema id.PropertySchemaID - Visualizer visualizer.Visualizer + ID id.PluginExtensionID + Type ExtensionType + Name i18n.String + Description i18n.String + Icon string + Schema id.PropertySchemaID + Visualizer visualizer.Visualizer + WidgetLayout *WidgetLayout }{ - Id: "xxx", - Type: ExtensionTypePrimitive, - Name: i18n.StringFrom("aaa"), - Description: i18n.StringFrom("ddd"), - Icon: "test", - Schema: id.MustPropertySchemaID("hoge~0.1.0/fff"), - Visualizer: "vvv", + ID: "xxx", + Type: ExtensionTypePrimitive, + Name: i18n.StringFrom("aaa"), + Description: i18n.StringFrom("ddd"), + Icon: "test", + Schema: id.MustPropertySchemaID("hoge~0.1.0/fff"), + Visualizer: "vvv", + WidgetLayout: NewWidgetLayout(false, false, true, false, nil).Ref(), } actual := NewExtension(). @@ -34,6 +36,7 @@ func TestExtension(t *testing.T) { Description(i18n.StringFrom("ddd")). Schema(id.MustPropertySchemaID("hoge~0.1.0/fff")). Icon("test"). + WidgetLayout(NewWidgetLayout(false, false, true, false, nil).Ref()). Visualizer("vvv"). Type(ExtensionTypePrimitive). MustBuild() @@ -43,8 +46,9 @@ func TestExtension(t *testing.T) { assert.Equal(t, expected.Description, actual.Description()) assert.Equal(t, expected.Name, actual.Name()) assert.Equal(t, expected.Icon, actual.Icon()) + assert.Equal(t, expected.WidgetLayout, actual.WidgetLayout()) assert.Equal(t, expected.Schema, actual.Schema()) - assert.Equal(t, expected.Id, actual.ID()) + assert.Equal(t, expected.ID, actual.ID()) } func TestExtension_Rename(t *testing.T) { diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go index 532f10f4c..6379d377a 100644 --- a/pkg/plugin/manifest/convert.go +++ b/pkg/plugin/manifest/convert.go @@ -123,6 +123,7 @@ func (i Extension) extension(pluginID id.PluginID, sys bool) (*plugin.Extension, Description(i18n.StringFrom(desc)). Visualizer(viz). Type(typ). + WidgetLayout(i.WidgetLayout.layout()). Icon(icon). Schema(schema.ID()). System(sys). @@ -134,6 +135,37 @@ func (i Extension) extension(pluginID id.PluginID, sys bool) (*plugin.Extension, return ext, schema, nil } +func (l *WidgetLayout) layout() *plugin.WidgetLayout { + if l == nil { + return nil + } + + horizontallyExtendable := false + verticallyExtendable := false + extended := false + + if l.Extendable != nil && l.Extendable.Horizontally != nil && *l.Extendable.Horizontally { + horizontallyExtendable = true + } + if l.Extendable != nil && l.Extendable.Vertically != nil && *l.Extendable.Vertically { + verticallyExtendable = true + } + if l.Extended != nil && *l.Extended { + extended = false + } + + var dl *plugin.WidgetLocation + if l.DefaultLocation != nil { + dl = &plugin.WidgetLocation{ + Zone: plugin.WidgetZoneType(l.DefaultLocation.Zone), + Section: plugin.WidgetSectionType(l.DefaultLocation.Section), + Area: plugin.WidgetAreaType(l.DefaultLocation.Area), + } + } + + return plugin.NewWidgetLayout(horizontallyExtendable, verticallyExtendable, extended, l.Floating, dl).Ref() +} + func (i *PropertySchema) schema(pluginID id.PluginID, idstr string) (*property.Schema, error) { psid, err := id.PropertySchemaIDFrom(pluginID.String() + "/" + idstr) if err != nil { diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index 3c391c876..04772af89 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -753,3 +753,57 @@ func TestSchemaField(t *testing.T) { }) } } + +func TestLayout(t *testing.T) { + tr := true + + testCases := []struct { + name string + widgetLayout WidgetLayout + expected *plugin.WidgetLayout + }{ + { + name: "convert manifest widget layout to scene widget layout", + widgetLayout: WidgetLayout{ + Extendable: &Extendable{ + Horizontally: &tr, + Vertically: nil, + }, + Extended: nil, + Floating: true, + DefaultLocation: &Location{ + Zone: "outer", + Section: "left", + Area: "top", + }, + }, + expected: plugin.NewWidgetLayout(true, false, false, true, &plugin.WidgetLocation{ + Zone: plugin.WidgetZoneOuter, + Section: plugin.WidgetSectionLeft, + Area: plugin.WidgetAreaTop, + }).Ref(), + }, + { + name: "nil default location", + widgetLayout: WidgetLayout{ + Extendable: &Extendable{ + Horizontally: nil, + Vertically: &tr, + }, + Extended: nil, + Floating: false, + DefaultLocation: nil, + }, + expected: plugin.NewWidgetLayout(false, true, false, false, nil).Ref(), + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + res := tc.widgetLayout.layout() + assert.Equal(tt, tc.expected, res) + }) + } +} diff --git a/pkg/plugin/manifest/parser_test.go b/pkg/plugin/manifest/parser_test.go index 6102ef310..c9e1dee2e 100644 --- a/pkg/plugin/manifest/parser_test.go +++ b/pkg/plugin/manifest/parser_test.go @@ -26,6 +26,7 @@ var normalExpected = &Manifest{ plugin.NewExtension().ID(id.PluginExtensionID("hoge")). Visualizer(visualizer.VisualizerCesium). Type(plugin.ExtensionTypePrimitive). + WidgetLayout(nil). Schema(id.MustPropertySchemaID("aaa~1.1.1/hoge")). MustBuild(), }).MustBuild(), diff --git a/pkg/plugin/manifest/schema_gen.go b/pkg/plugin/manifest/schema_gen.go index cc5c899ef..8ec66eb70 100644 --- a/pkg/plugin/manifest/schema_gen.go +++ b/pkg/plugin/manifest/schema_gen.go @@ -1,6 +1,6 @@ package manifest -// generated by "/var/folders/lz/nhqy382n28g31wb4f_40gbmc0000gp/T/go-build3669425617/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT +// generated by "/var/folders/_n/99kwktfn5ml3fmw3fbn575hh0000gn/T/go-build181274042/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT type Choice struct { Icon string `json:"icon,omitempty"` @@ -8,20 +8,32 @@ type Choice struct { Label string `json:"label,omitempty"` } +type Extendable struct { + Horizontally *bool `json:"horizontally,omitempty"` + Vertically *bool `json:"vertically,omitempty"` +} + type Extension struct { - Description *string `json:"description,omitempty"` - ID ID `json:"id"` - Icon *string `json:"icon,omitempty"` - Schema *PropertySchema `json:"schema,omitempty"` - Title string `json:"title"` - Type string `json:"type"` - Visualizer string `json:"visualizer"` + Description *string `json:"description,omitempty"` + ID ID `json:"id"` + Icon *string `json:"icon,omitempty"` + Schema *PropertySchema `json:"schema,omitempty"` + Title string `json:"title"` + Type string `json:"type"` + Visualizer string `json:"visualizer"` + WidgetLayout *WidgetLayout `json:"widgetLayout,omitempty"` } type ID string type Id string +type Location struct { + Area string `json:"area,omitempty"` + Section string `json:"section,omitempty"` + Zone string `json:"zone,omitempty"` +} + type PropertyCondition struct { Field string `json:"field"` Type Valuetype `json:"type"` @@ -83,3 +95,10 @@ type Root struct { } type Valuetype string + +type WidgetLayout struct { + DefaultLocation *Location `json:"defaultLocation,omitempty"` + Extendable *Extendable `json:"extendable,omitempty"` + Extended *bool `json:"extended,omitempty"` + Floating bool `json:"floating,omitempty"` +} diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go index 12d529f0e..4757f6050 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/plugin.go @@ -60,8 +60,11 @@ func (p *Plugin) Extensions() []*Extension { return list } -// Extension _ func (p *Plugin) Extension(id id.PluginExtensionID) *Extension { + if p == nil { + return nil + } + e, ok := p.extensions[id] if ok { return e diff --git a/pkg/plugin/plugin_test.go b/pkg/plugin/plugin_test.go index f9e2fc976..f31ae2a8e 100644 --- a/pkg/plugin/plugin_test.go +++ b/pkg/plugin/plugin_test.go @@ -27,6 +27,12 @@ func TestPlugin_Extension(t *testing.T) { plugin: New().Extensions([]*Extension{NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild()}).MustBuild(), expected: nil, }, + { + name: "nil", + key: "zzz", + plugin: nil, + expected: nil, + }, } for _, tc := range testCases { tc := tc diff --git a/pkg/scene/builder.go b/pkg/scene/builder.go index d9ebd0eea..f30d9d31f 100644 --- a/pkg/scene/builder.go +++ b/pkg/scene/builder.go @@ -6,12 +6,10 @@ import ( "github.com/reearth/reearth-backend/pkg/id" ) -// Builder _ type Builder struct { scene *Scene } -// New _ func New() *Builder { return &Builder{scene: &Scene{}} } @@ -30,6 +28,9 @@ func (b *Builder) Build() (*Scene, error) { if b.scene.widgetSystem == nil { b.scene.widgetSystem = NewWidgetSystem(nil) } + if b.scene.widgetAlignSystem == nil { + b.scene.widgetAlignSystem = NewWidgetAlignSystem() + } if b.scene.pluginSystem == nil { b.scene.pluginSystem = NewPluginSystem(nil) } @@ -39,7 +40,6 @@ func (b *Builder) Build() (*Scene, error) { return b.scene, nil } -// MustBuild _ func (b *Builder) MustBuild() *Scene { r, err := b.Build() if err != nil { @@ -48,57 +48,53 @@ func (b *Builder) MustBuild() *Scene { return r } -// ID _ func (b *Builder) ID(id id.SceneID) *Builder { b.scene.id = id return b } -// NewID _ func (b *Builder) NewID() *Builder { b.scene.id = id.SceneID(id.New()) return b } -// Project _ func (b *Builder) Project(prj id.ProjectID) *Builder { b.scene.project = prj return b } -// Team _ func (b *Builder) Team(team id.TeamID) *Builder { b.scene.team = team return b } -// UpdatedAt _ func (b *Builder) UpdatedAt(updatedAt time.Time) *Builder { b.scene.updatedAt = updatedAt return b } -// WidgetSystem _ func (b *Builder) WidgetSystem(widgetSystem *WidgetSystem) *Builder { widgetSystem2 := *widgetSystem b.scene.widgetSystem = &widgetSystem2 return b } -// RootLayer _ +func (b *Builder) WidgetAlignSystem(widgetAlignSystem *WidgetAlignSystem) *Builder { + b.scene.widgetAlignSystem = widgetAlignSystem + return b +} + func (b *Builder) RootLayer(rootLayer id.LayerID) *Builder { b.scene.rootLayer = rootLayer return b } -// PluginSystem _ func (b *Builder) PluginSystem(pluginSystem *PluginSystem) *Builder { pluginSystem2 := *pluginSystem b.scene.pluginSystem = &pluginSystem2 return b } -// Property _ func (b *Builder) Property(p id.PropertyID) *Builder { b.scene.property = p return b diff --git a/pkg/scene/builder/builder_test.go b/pkg/scene/builder/builder_test.go index 7ebeb0b42..544a297b4 100644 --- a/pkg/scene/builder/builder_test.go +++ b/pkg/scene/builder/builder_test.go @@ -380,8 +380,20 @@ func TestSceneBuilder(t *testing.T) { sceneWidgetID1 := id.NewWidgetID() sceneWidgetID2 := id.NewWidgetID() - sceneWidget1 := scene.MustNewWidget(sceneWidgetID1, pluginID, pluginExtension1ID, scenePropertyID, false) - sceneWidget2 := scene.MustNewWidget(sceneWidgetID2, pluginID, pluginExtension2ID, scenePropertyID, true) + sceneWidget1 := scene.MustNewWidget( + sceneWidgetID1, + pluginID, + pluginExtension1ID, + scenePropertyID, + false, + true) + sceneWidget2 := scene.MustNewWidget( + sceneWidgetID2, + pluginID, + pluginExtension2ID, + scenePropertyID, + true, + false) scenePlugin1 := scene.NewPlugin(pluginID, &scenePropertyID) assert.Equal(t, sceneWidgetID1, sceneWidget1.ID()) diff --git a/pkg/scene/builder/encoder.go b/pkg/scene/builder/encoder.go index 2dd030c4d..bf3134374 100644 --- a/pkg/scene/builder/encoder.go +++ b/pkg/scene/builder/encoder.go @@ -110,3 +110,25 @@ type infoboxFieldJSON struct { } type propertyJSON = map[string]interface{} + +type widgetAlignSystemJSON struct { + Inner *widgetZoneJSON `json:"inner"` + Outer *widgetZoneJSON `json:"outer"` +} + +type widgetZoneJSON struct { + Left *widgetSectionJSON `json:"left"` + Center *widgetSectionJSON `json:"center"` + Right *widgetSectionJSON `json:"right"` +} + +type widgetSectionJSON struct { + Top *widgetAreaJSON `json:"top"` + Middle *widgetAreaJSON `json:"middle"` + Bottom *widgetAreaJSON `json:"bottom"` +} + +type widgetAreaJSON struct { + WidgetIDs []string `json:"widgetIds"` + Align string `json:"align"` +} diff --git a/pkg/scene/builder/scene.go b/pkg/scene/builder/scene.go index fb7cd9833..5060f5d97 100644 --- a/pkg/scene/builder/scene.go +++ b/pkg/scene/builder/scene.go @@ -10,30 +10,34 @@ import ( ) type sceneJSON struct { - SchemaVersion int `json:"schemaVersion"` - ID string `json:"id"` - PublishedAt time.Time `json:"publishedAt"` - Property propertyJSON `json:"property"` - Plugins map[string]propertyJSON `json:"plugins"` - Layers []*layerJSON `json:"layers"` - Widgets []*widgetJSON `json:"widgets"` + SchemaVersion int `json:"schemaVersion"` + ID string `json:"id"` + PublishedAt time.Time `json:"publishedAt"` + Property propertyJSON `json:"property"` + Plugins map[string]propertyJSON `json:"plugins"` + Layers []*layerJSON `json:"layers"` + Widgets []*widgetJSON `json:"widgets"` + WidgetAlignSystem *widgetAlignSystemJSON `json:"widgetAlignSystem"` } type widgetJSON struct { + ID string `json:"id"` PluginID string `json:"pluginId"` ExtensionID string `json:"extensionId"` Property propertyJSON `json:"property"` + Extended bool `json:"extended"` } func (b *Builder) scene(ctx context.Context, s *scene.Scene, publishedAt time.Time, l []*layerJSON, p []*property.Property) *sceneJSON { return &sceneJSON{ - SchemaVersion: version, - ID: s.ID().String(), - PublishedAt: publishedAt, - Property: b.property(ctx, findProperty(p, s.Property())), - Plugins: b.plugins(ctx, s, p), - Widgets: b.widgets(ctx, s, p), - Layers: l, + SchemaVersion: version, + ID: s.ID().String(), + PublishedAt: publishedAt, + Property: b.property(ctx, findProperty(p, s.Property())), + Plugins: b.plugins(ctx, s, p), + Widgets: b.widgets(ctx, s, p), + Layers: l, + WidgetAlignSystem: buildWidgetAlignSystem(s.WidgetAlignSystem()), } } @@ -58,10 +62,13 @@ func (b *Builder) widgets(ctx context.Context, s *scene.Scene, p []*property.Pro if !w.Enabled() { continue } + res = append(res, &widgetJSON{ + ID: w.ID().String(), PluginID: w.Plugin().String(), ExtensionID: string(w.Extension()), Property: b.property(ctx, findProperty(p, w.Property())), + Extended: w.Extended(), }) } return res @@ -79,3 +86,68 @@ func findProperty(pp []*property.Property, i id.PropertyID) *property.Property { } return nil } + +func toString(wids []id.WidgetID) []string { + if wids == nil { + return nil + } + docids := make([]string, 0, len(wids)) + for _, wid := range wids { + docids = append(docids, wid.String()) + } + return docids +} + +func buildWidgetAlignSystem(s *scene.WidgetAlignSystem) *widgetAlignSystemJSON { + if s == nil { + return nil + } + was := widgetAlignSystemJSON{ + Inner: buildWidgetZone(s.Zone(scene.WidgetZoneInner)), + Outer: buildWidgetZone(s.Zone(scene.WidgetZoneOuter)), + } + if was.Inner == nil && was.Outer == nil { + return nil + } + return &was +} + +func buildWidgetZone(z *scene.WidgetZone) *widgetZoneJSON { + if z == nil { + return nil + } + zj := widgetZoneJSON{ + Left: buildWidgetSection(z.Section(scene.WidgetSectionLeft)), + Center: buildWidgetSection(z.Section(scene.WidgetSectionCenter)), + Right: buildWidgetSection(z.Section(scene.WidgetSectionRight)), + } + if zj.Left == nil && zj.Center == nil && zj.Right == nil { + return nil + } + return &zj +} + +func buildWidgetSection(s *scene.WidgetSection) *widgetSectionJSON { + if s == nil { + return nil + } + sj := widgetSectionJSON{ + Middle: buildWidgetArea(s.Area(scene.WidgetAreaMiddle)), + Top: buildWidgetArea(s.Area(scene.WidgetAreaTop)), + Bottom: buildWidgetArea(s.Area(scene.WidgetAreaBottom)), + } + if sj.Top == nil && sj.Middle == nil && sj.Bottom == nil { + return nil + } + return &sj +} + +func buildWidgetArea(a *scene.WidgetArea) *widgetAreaJSON { + if a == nil || len(a.WidgetIDs()) == 0 { + return nil + } + return &widgetAreaJSON{ + WidgetIDs: toString(a.WidgetIDs()), + Align: string(a.Alignment()), + } +} diff --git a/pkg/scene/builder/scene_test.go b/pkg/scene/builder/scene_test.go index 7bf950cff..438206b8e 100644 --- a/pkg/scene/builder/scene_test.go +++ b/pkg/scene/builder/scene_test.go @@ -5,6 +5,7 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" "github.com/stretchr/testify/assert" ) @@ -44,3 +45,83 @@ func TestScene_FindProperty(t *testing.T) { }) } } + +func TestScene_ToString(t *testing.T) { + wid := id.NewWidgetID() + widS := wid.String() + wid2 := id.NewWidgetID() + wid2S := wid2.String() + wid3 := id.NewWidgetID() + wid3S := wid3.String() + wids := []id.WidgetID{wid, wid2, wid3} + widsString := []string{widS, wid2S, wid3S} + + testCases := []struct { + Name string + Input []id.WidgetID + Expected []string + }{ + { + Name: "Convert a slice of id.WidgetID to a slice of strings", + Input: wids, + Expected: widsString, + }, + { + Name: "Return nil when no WidgetIDs are inputted", + Input: nil, + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := toString(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestBuildWidgetAlignSystem(t *testing.T) { + wid := id.NewWidgetID() + was := scene.NewWidgetAlignSystem() + was.Area(scene.WidgetLocation{ + Zone: scene.WidgetZoneInner, + Section: scene.WidgetSectionLeft, + Area: scene.WidgetAreaTop, + }).Add(wid, -1) + + testCases := []struct { + Name string + Input *scene.WidgetAlignSystem + Expected *widgetAlignSystemJSON + }{ + { + Name: "works", + Input: was, + Expected: &widgetAlignSystemJSON{ + Inner: &widgetZoneJSON{ + Left: &widgetSectionJSON{ + Top: &widgetAreaJSON{ + WidgetIDs: []string{wid.String()}, + Align: "start", + }, + }, + }, + }, + }, + { + Name: "nil", + Input: nil, + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := buildWidgetAlignSystem(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} diff --git a/pkg/scene/builder_test.go b/pkg/scene/builder_test.go index c7d9c2c2d..8dadac455 100644 --- a/pkg/scene/builder_test.go +++ b/pkg/scene/builder_test.go @@ -50,11 +50,16 @@ func TestBuilder_Project(t *testing.T) { func TestBuilder_WidgetSystem(t *testing.T) { nid := id.New() ws := NewWidgetSystem([]*Widget{ - MustNewWidget(id.WidgetID(nid), id.OfficialPluginID, "xxx", id.NewPropertyID(), true), + MustNewWidget(id.WidgetID(nid), id.OfficialPluginID, "xxx", id.NewPropertyID(), true, false), }) b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).WidgetSystem(ws).MustBuild() assert.Equal(t, ws, b.WidgetSystem()) } +func TestBuilder_WidgetAlignSystem(t *testing.T) { + was := NewWidgetAlignSystem() + b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).WidgetAlignSystem(was).MustBuild() + assert.Equal(t, was, b.WidgetAlignSystem()) +} func TestBuilder_Build(t *testing.T) { tid := id.NewTeamID() @@ -64,97 +69,106 @@ func TestBuilder_Build(t *testing.T) { lid := id.NewLayerID() nid := id.New() ws := NewWidgetSystem([]*Widget{ - MustNewWidget(id.WidgetID(nid), id.OfficialPluginID, "xxx", ppid, true), + MustNewWidget(id.WidgetID(nid), id.OfficialPluginID, "xxx", ppid, true, false), }) + was := NewWidgetAlignSystem() ps := NewPluginSystem([]*Plugin{ NewPlugin(id.OfficialPluginID, ppid.Ref()), }) testCases := []struct { - Name string - Id id.SceneID - Project id.ProjectID - Team id.TeamID - RootLayer id.LayerID - WidgetSystem *WidgetSystem - PluginSystem *PluginSystem - UpdatedAt time.Time - Property id.PropertyID - Expected struct { - Id id.SceneID - Project id.ProjectID - Team id.TeamID - RootLayer id.LayerID - WidgetSystem *WidgetSystem - PluginSystem *PluginSystem - UpdatedAt time.Time - Property id.PropertyID + Name string + Id id.SceneID + Project id.ProjectID + Team id.TeamID + RootLayer id.LayerID + WidgetSystem *WidgetSystem + WidgetAlignSystem *WidgetAlignSystem + PluginSystem *PluginSystem + UpdatedAt time.Time + Property id.PropertyID + Expected struct { + Id id.SceneID + Project id.ProjectID + Team id.TeamID + RootLayer id.LayerID + WidgetSystem *WidgetSystem + WidgetAlignSystem *WidgetAlignSystem + PluginSystem *PluginSystem + UpdatedAt time.Time + Property id.PropertyID } err error }{ { - Name: "fail nil scene id", - Id: id.SceneID{}, - Project: pid, - Team: tid, - RootLayer: lid, - WidgetSystem: ws, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - err: id.ErrInvalidID, + Name: "fail nil scene id", + Id: id.SceneID{}, + Project: pid, + Team: tid, + RootLayer: lid, + WidgetSystem: ws, + WidgetAlignSystem: was, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + err: id.ErrInvalidID, }, { - Name: "fail nil team id", - Id: sid, - Project: pid, - Team: id.TeamID{}, - RootLayer: lid, - WidgetSystem: ws, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - err: id.ErrInvalidID, + Name: "fail nil team id", + Id: sid, + Project: pid, + Team: id.TeamID{}, + RootLayer: lid, + WidgetSystem: ws, + WidgetAlignSystem: was, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + err: id.ErrInvalidID, }, { - Name: "fail nil root layer id", - Id: sid, - Project: pid, - Team: tid, - RootLayer: id.LayerID{}, - WidgetSystem: ws, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - err: id.ErrInvalidID, + Name: "fail nil root layer id", + Id: sid, + Project: pid, + Team: tid, + RootLayer: id.LayerID{}, + WidgetSystem: ws, + WidgetAlignSystem: was, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + err: id.ErrInvalidID, }, { - Name: "success build new scene", - Id: sid, - Project: pid, - Team: tid, - RootLayer: lid, - WidgetSystem: ws, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, + Name: "success build new scene", + Id: sid, + Project: pid, + Team: tid, + RootLayer: lid, + WidgetSystem: ws, + WidgetAlignSystem: was, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, Expected: struct { - Id id.SceneID - Project id.ProjectID - Team id.TeamID - RootLayer id.LayerID - WidgetSystem *WidgetSystem - PluginSystem *PluginSystem - UpdatedAt time.Time - Property id.PropertyID + Id id.SceneID + Project id.ProjectID + Team id.TeamID + RootLayer id.LayerID + WidgetSystem *WidgetSystem + WidgetAlignSystem *WidgetAlignSystem + PluginSystem *PluginSystem + UpdatedAt time.Time + Property id.PropertyID }{ - Id: sid, - Project: pid, - Team: tid, - RootLayer: lid, - WidgetSystem: ws, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, + Id: sid, + Project: pid, + Team: tid, + RootLayer: lid, + WidgetSystem: ws, + WidgetAlignSystem: was, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, }, err: nil, }, @@ -166,6 +180,7 @@ func TestBuilder_Build(t *testing.T) { res, err := New(). ID(tc.Id). WidgetSystem(tc.WidgetSystem). + WidgetAlignSystem(tc.WidgetAlignSystem). Project(tc.Project). PluginSystem(tc.PluginSystem). Property(tc.Property). @@ -197,97 +212,106 @@ func TestBuilder_MustBuild(t *testing.T) { lid := id.NewLayerID() nid := id.New() ws := NewWidgetSystem([]*Widget{ - MustNewWidget(id.WidgetID(nid), id.OfficialPluginID, "xxx", ppid, true), + MustNewWidget(id.WidgetID(nid), id.OfficialPluginID, "xxx", ppid, true, false), }) + was := NewWidgetAlignSystem() ps := NewPluginSystem([]*Plugin{ NewPlugin(id.OfficialPluginID, ppid.Ref()), }) testCases := []struct { - Name string - Id id.SceneID - Project id.ProjectID - Team id.TeamID - RootLayer id.LayerID - WidgetSystem *WidgetSystem - PluginSystem *PluginSystem - UpdatedAt time.Time - Property id.PropertyID - Expected struct { - Id id.SceneID - Project id.ProjectID - Team id.TeamID - RootLayer id.LayerID - WidgetSystem *WidgetSystem - PluginSystem *PluginSystem - UpdatedAt time.Time - Property id.PropertyID + Name string + Id id.SceneID + Project id.ProjectID + Team id.TeamID + RootLayer id.LayerID + WidgetSystem *WidgetSystem + WidgetAlignSystem *WidgetAlignSystem + PluginSystem *PluginSystem + UpdatedAt time.Time + Property id.PropertyID + Expected struct { + Id id.SceneID + Project id.ProjectID + Team id.TeamID + RootLayer id.LayerID + WidgetSystem *WidgetSystem + WidgetAlignSystem *WidgetAlignSystem + PluginSystem *PluginSystem + UpdatedAt time.Time + Property id.PropertyID } err error }{ { - Name: "fail nil scene id", - Id: id.SceneID{}, - Project: pid, - Team: tid, - RootLayer: lid, - WidgetSystem: ws, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - err: id.ErrInvalidID, + Name: "fail nil scene id", + Id: id.SceneID{}, + Project: pid, + Team: tid, + RootLayer: lid, + WidgetSystem: ws, + WidgetAlignSystem: was, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + err: id.ErrInvalidID, }, { - Name: "fail nil team id", - Id: sid, - Project: pid, - Team: id.TeamID{}, - RootLayer: lid, - WidgetSystem: ws, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - err: id.ErrInvalidID, + Name: "fail nil team id", + Id: sid, + Project: pid, + Team: id.TeamID{}, + RootLayer: lid, + WidgetSystem: ws, + WidgetAlignSystem: was, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + err: id.ErrInvalidID, }, { - Name: "fail nil root layer id", - Id: sid, - Project: pid, - Team: tid, - RootLayer: id.LayerID{}, - WidgetSystem: ws, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - err: id.ErrInvalidID, + Name: "fail nil root layer id", + Id: sid, + Project: pid, + Team: tid, + RootLayer: id.LayerID{}, + WidgetSystem: ws, + WidgetAlignSystem: was, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + err: id.ErrInvalidID, }, { - Name: "success build new scene", - Id: sid, - Project: pid, - Team: tid, - RootLayer: lid, - WidgetSystem: ws, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, + Name: "success build new scene", + Id: sid, + Project: pid, + Team: tid, + RootLayer: lid, + WidgetSystem: ws, + WidgetAlignSystem: was, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, Expected: struct { - Id id.SceneID - Project id.ProjectID - Team id.TeamID - RootLayer id.LayerID - WidgetSystem *WidgetSystem - PluginSystem *PluginSystem - UpdatedAt time.Time - Property id.PropertyID + Id id.SceneID + Project id.ProjectID + Team id.TeamID + RootLayer id.LayerID + WidgetSystem *WidgetSystem + WidgetAlignSystem *WidgetAlignSystem + PluginSystem *PluginSystem + UpdatedAt time.Time + Property id.PropertyID }{ - Id: sid, - Project: pid, - Team: tid, - RootLayer: lid, - WidgetSystem: ws, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, + Id: sid, + Project: pid, + Team: tid, + RootLayer: lid, + WidgetSystem: ws, + WidgetAlignSystem: was, + PluginSystem: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, }, err: nil, }, @@ -306,6 +330,7 @@ func TestBuilder_MustBuild(t *testing.T) { assert.Equal(tt, tc.Expected.Property, res.Property()) assert.Equal(tt, tc.Expected.PluginSystem, res.PluginSystem()) assert.Equal(tt, tc.Expected.WidgetSystem, res.WidgetSystem()) + assert.Equal(tt, tc.Expected.WidgetAlignSystem, res.WidgetAlignSystem()) assert.Equal(tt, tc.Expected.Project, res.Project()) } }() @@ -313,6 +338,7 @@ func TestBuilder_MustBuild(t *testing.T) { res = New(). ID(tc.Id). WidgetSystem(tc.WidgetSystem). + WidgetAlignSystem(tc.WidgetAlignSystem). Project(tc.Project). PluginSystem(tc.PluginSystem). Property(tc.Property). diff --git a/pkg/scene/scene.go b/pkg/scene/scene.go index bad56a386..68554962e 100644 --- a/pkg/scene/scene.go +++ b/pkg/scene/scene.go @@ -10,14 +10,15 @@ import ( var ErrSceneIsLocked error = errors.New("scene is locked") type Scene struct { - id id.SceneID - project id.ProjectID - team id.TeamID - rootLayer id.LayerID - widgetSystem *WidgetSystem - pluginSystem *PluginSystem - updatedAt time.Time - property id.PropertyID + id id.SceneID + project id.ProjectID + team id.TeamID + rootLayer id.LayerID + widgetSystem *WidgetSystem + widgetAlignSystem *WidgetAlignSystem + pluginSystem *PluginSystem + updatedAt time.Time + property id.PropertyID } func (s *Scene) ID() id.SceneID { @@ -69,6 +70,13 @@ func (s *Scene) WidgetSystem() *WidgetSystem { return s.widgetSystem } +func (s *Scene) WidgetAlignSystem() *WidgetAlignSystem { + if s == nil { + return nil + } + return s.widgetAlignSystem +} + func (s *Scene) PluginSystem() *PluginSystem { if s == nil { return nil diff --git a/pkg/scene/scene_test.go b/pkg/scene/scene_test.go index b7354a333..4c11e326e 100644 --- a/pkg/scene/scene_test.go +++ b/pkg/scene/scene_test.go @@ -63,13 +63,22 @@ func TestScene_SetUpdatedAt(t *testing.T) { func TestScene_Properties(t *testing.T) { pid1 := id.NewPropertyID() pid2 := id.NewPropertyID() - s := New().NewID().Team(id.NewTeamID()).RootLayer(id.NewLayerID()).Property(pid1).WidgetSystem( - NewWidgetSystem([]*Widget{ - MustNewWidget(id.NewWidgetID(), id.MustPluginID("xxx~1.1.1"), "eee", pid2, true), - })).MustBuild() + s := New(). + NewID(). + Team(id.NewTeamID()). + RootLayer(id.NewLayerID()). + Property(pid1). + WidgetSystem( + NewWidgetSystem( + []*Widget{ + MustNewWidget(id.NewWidgetID(), id.MustPluginID("xxx~1.1.1"), "eee", pid2, true, false), + }, + ), + ). + WidgetAlignSystem(NewWidgetAlignSystem()). + MustBuild() assert.Equal(t, []id.PropertyID{pid1, pid2}, s.Properties()) - } func TestSceneNil(t *testing.T) { @@ -77,6 +86,7 @@ func TestSceneNil(t *testing.T) { assert.Nil(t, s.Properties()) assert.True(t, s.ID().IsNil()) assert.Nil(t, s.WidgetSystem()) + assert.Nil(t, s.WidgetAlignSystem()) assert.True(t, s.Project().IsNil()) assert.True(t, s.Team().IsNil()) assert.True(t, s.RootLayer().IsNil()) diff --git a/pkg/scene/widget.go b/pkg/scene/widget.go index 43055e6ce..5e7943ad4 100644 --- a/pkg/scene/widget.go +++ b/pkg/scene/widget.go @@ -10,9 +10,10 @@ type Widget struct { extension id.PluginExtensionID property id.PropertyID enabled bool + extended bool } -func NewWidget(wid id.WidgetID, plugin id.PluginID, extension id.PluginExtensionID, property id.PropertyID, enabled bool) (*Widget, error) { +func NewWidget(wid id.WidgetID, plugin id.PluginID, extension id.PluginExtensionID, property id.PropertyID, enabled, extended bool) (*Widget, error) { if !plugin.Validate() || string(extension) == "" || id.ID(property).IsNil() { return nil, id.ErrInvalidID } @@ -23,11 +24,12 @@ func NewWidget(wid id.WidgetID, plugin id.PluginID, extension id.PluginExtension extension: extension, property: property, enabled: enabled, + extended: extended, }, nil } -func MustNewWidget(wid id.WidgetID, plugin id.PluginID, extension id.PluginExtensionID, property id.PropertyID, enabled bool) *Widget { - w, err := NewWidget(wid, plugin, extension, property, enabled) +func MustNewWidget(wid id.WidgetID, plugin id.PluginID, extension id.PluginExtensionID, property id.PropertyID, enabled bool, extended bool) *Widget { + w, err := NewWidget(wid, plugin, extension, property, enabled, extended) if err != nil { panic(err) } @@ -51,9 +53,29 @@ func (w *Widget) Property() id.PropertyID { } func (w *Widget) Enabled() bool { + if w == nil { + return false + } return w.enabled } +func (w *Widget) Extended() bool { + if w == nil { + return false + } + return w.extended +} + func (w *Widget) SetEnabled(enabled bool) { + if w == nil { + return + } w.enabled = enabled } + +func (w *Widget) SetExtended(extended bool) { + if w == nil { + return + } + w.extended = extended +} diff --git a/pkg/scene/widget_align_system.go b/pkg/scene/widget_align_system.go new file mode 100644 index 000000000..239ce8baa --- /dev/null +++ b/pkg/scene/widget_align_system.go @@ -0,0 +1,122 @@ +package scene + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +type WidgetLocation struct { + Zone WidgetZoneType + Section WidgetSectionType + Area WidgetAreaType +} + +func (l WidgetLocation) Horizontal() bool { + return l.Section == WidgetSectionCenter +} + +func (l WidgetLocation) Vertical() bool { + return l.Area == WidgetAreaMiddle +} + +// WidgetAlignSystem is the layout structure of any enabled widgets that will be displayed over the scene. +type WidgetAlignSystem struct { + inner *WidgetZone + outer *WidgetZone +} + +type WidgetZoneType string + +const ( + WidgetZoneInner WidgetZoneType = "inner" + WidgetZoneOuter WidgetZoneType = "outer" +) + +// NewWidgetAlignSystem returns a new widget align system. +func NewWidgetAlignSystem() *WidgetAlignSystem { + return &WidgetAlignSystem{} +} + +// Zone will return a specific zone in the align system. +func (was *WidgetAlignSystem) Zone(zone WidgetZoneType) *WidgetZone { + if was == nil { + return nil + } + switch zone { + case WidgetZoneInner: + if was.inner == nil { + was.inner = NewWidgetZone() + } + return was.inner + case WidgetZoneOuter: + if was.outer == nil { + was.outer = NewWidgetZone() + } + return was.outer + } + return nil +} + +// Remove a widget from the align system. +func (was *WidgetAlignSystem) Remove(wid id.WidgetID) { + if was == nil { + return + } + + was.inner.Remove(wid) + was.outer.Remove(wid) +} + +func (was *WidgetAlignSystem) Area(loc WidgetLocation) *WidgetArea { + return was.Zone(loc.Zone).Section(loc.Section).Area(loc.Area) +} + +func (was *WidgetAlignSystem) Find(wid id.WidgetID) (int, WidgetLocation) { + if was == nil { + return -1, WidgetLocation{} + } + + if i, section, area := was.inner.Find(wid); i >= 0 { + return i, WidgetLocation{ + Zone: WidgetZoneInner, + Section: section, + Area: area, + } + } + if i, section, area := was.outer.Find(wid); i >= 0 { + return i, WidgetLocation{ + Zone: WidgetZoneOuter, + Section: section, + Area: area, + } + } + + return -1, WidgetLocation{} +} + +func (was *WidgetAlignSystem) Move(wid id.WidgetID, location WidgetLocation, index int) { + if was == nil { + return + } + + if i, loc := was.Find(wid); i < 0 { + return + } else if loc != location { + was.Area(loc).Remove(wid) + was.Area(location).Add(wid, index) + } else { + was.Area(location).Move(i, index) + } +} + +func (w *WidgetAlignSystem) SetZone(t WidgetZoneType, z *WidgetZone) { + if w == nil { + return + } + + switch t { + case WidgetZoneInner: + w.inner = z + case WidgetZoneOuter: + w.outer = z + } +} diff --git a/pkg/scene/widget_align_system_test.go b/pkg/scene/widget_align_system_test.go new file mode 100644 index 000000000..813957322 --- /dev/null +++ b/pkg/scene/widget_align_system_test.go @@ -0,0 +1,334 @@ +package scene + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestNewWidgetAlignSystem(t *testing.T) { + assert.Equal(t, &WidgetAlignSystem{}, NewWidgetAlignSystem()) +} + +func TestWidgetAlignSystem_Zone(t *testing.T) { + was := NewWidgetAlignSystem() + assert.Same(t, was.inner, was.Zone(WidgetZoneInner)) + assert.NotNil(t, was.inner) + assert.Same(t, was.outer, was.Zone(WidgetZoneOuter)) + assert.NotNil(t, was.outer) +} + +func TestWidgetAlignSystem_Area(t *testing.T) { + was := NewWidgetAlignSystem() + assert.Same(t, was.inner.right.middle, was.Area(WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionRight, + Area: WidgetAreaMiddle, + })) +} + +func TestWidgetAlignSystem_Find(t *testing.T) { + wid1 := id.NewWidgetID() + wid2 := id.NewWidgetID() + wid3 := id.NewWidgetID() + wid4 := id.NewWidgetID() + wid5 := id.NewWidgetID() + + testCases := []struct { + Name string + Input id.WidgetID + Expected1 int + Expected2 WidgetLocation + Nil bool + }{ + { + Name: "inner", + Input: wid2, + Expected1: 1, + Expected2: WidgetLocation{Zone: WidgetZoneInner, Section: WidgetSectionLeft, Area: WidgetAreaTop}, + }, + { + Name: "outer", + Input: wid4, + Expected1: 0, + Expected2: WidgetLocation{Zone: WidgetZoneOuter, Section: WidgetSectionLeft, Area: WidgetAreaTop}, + }, + { + Name: "invalid id", + Input: id.NewWidgetID(), + Expected1: -1, + Expected2: WidgetLocation{}, + }, + { + Name: "Return nil if no widget section", + Input: wid1, + Nil: true, + Expected1: -1, + Expected2: WidgetLocation{}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + if tc.Nil { + index, location := (*WidgetAlignSystem)(nil).Find(tc.Input) + assert.Equal(tt, tc.Expected1, index) + assert.Equal(tt, tc.Expected2, location) + return + } + + was := NewWidgetAlignSystem() + was.Zone(WidgetZoneInner).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]id.WidgetID{wid1, wid2, wid3}) + was.Zone(WidgetZoneOuter).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]id.WidgetID{wid4, wid5}) + + index, location := was.Find(tc.Input) + assert.Equal(tt, tc.Expected1, index) + assert.Equal(tt, tc.Expected2, location) + }) + } +} + +func TestWidgetAlignSystem_Remove(t *testing.T) { + wid := id.NewWidgetID() + + testCases := []struct { + Name string + Zone WidgetZoneType + Input id.WidgetID + Expected []id.WidgetID + Nil bool + }{ + { + Name: "inner: remove a widget from widget section", + Zone: WidgetZoneInner, + Input: wid, + Expected: []id.WidgetID{}, + }, + { + Name: "inner: couldn't find widgetId", + Zone: WidgetZoneInner, + Input: id.NewWidgetID(), + Expected: []id.WidgetID{wid}, + }, + { + Name: "outer: remove a widget from widget section", + Zone: WidgetZoneOuter, + Input: wid, + Expected: []id.WidgetID{}, + }, + { + Name: "outer: couldn't find widgetId", + Zone: WidgetZoneOuter, + Input: id.NewWidgetID(), + Expected: []id.WidgetID{wid}, + }, + { + Name: "nil", + Zone: WidgetZoneInner, + Input: wid, + Nil: true, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + if tc.Nil { + (*WidgetZone)(nil).Remove(tc.Input) + return + } + + ws := NewWidgetAlignSystem() + ws.Zone(tc.Zone).Section(WidgetSectionLeft).Area(WidgetAreaTop).Add(wid, -1) + ws.Remove(tc.Input) + assert.Equal(tt, tc.Expected, ws.Zone(tc.Zone).Section(WidgetSectionLeft).Area(WidgetAreaTop).WidgetIDs()) + }) + } +} + +func TestWidgetAlignSystem_Move(t *testing.T) { + wid1 := id.NewWidgetID() + wid2 := id.NewWidgetID() + wid3 := id.NewWidgetID() + wid4 := id.NewWidgetID() + wid5 := id.NewWidgetID() + + testCases := []struct { + Name string + Input1 id.WidgetID + Input2 WidgetLocation + Input3 int + Source WidgetLocation + ExpectedSource []id.WidgetID + ExpectedDest []id.WidgetID + Nil bool + }{ + { + Name: "move a widget in the same area with positive index", + Input1: wid1, + Input2: WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + Input3: 1, + Source: WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + ExpectedSource: []id.WidgetID{wid2, wid1, wid3}, + ExpectedDest: []id.WidgetID{wid2, wid1, wid3}, + }, + { + Name: "move a widget in the same area with negative index", + Input1: wid1, + Input2: WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + Input3: -1, + Source: WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + ExpectedSource: []id.WidgetID{wid2, wid3, wid1}, + ExpectedDest: []id.WidgetID{wid2, wid3, wid1}, + }, + { + Name: "move a widget to a different area with positive index", + Input1: wid1, + Input2: WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionLeft, + Area: WidgetAreaBottom, + }, + Input3: 1, + Source: WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionRight, + Area: WidgetAreaTop, + }, + ExpectedSource: []id.WidgetID{wid2, wid3}, + ExpectedDest: []id.WidgetID{wid4, wid1, wid5}, + }, + { + Name: "move a widget to a different area with negative index", + Input1: wid1, + Input2: WidgetLocation{ + Zone: WidgetZoneInner, + Section: WidgetSectionLeft, + Area: WidgetAreaBottom, + }, + Input3: -1, + Source: WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionCenter, + Area: WidgetAreaMiddle, + }, + ExpectedSource: []id.WidgetID{wid2, wid3}, + ExpectedDest: []id.WidgetID{wid4, wid5, wid1}, + }, + { + Name: "nil", + Nil: true, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + if tc.Nil { + (*WidgetAlignSystem)(nil).Move(tc.Input1, tc.Input2, tc.Input3) + return + } + + ws := NewWidgetAlignSystem() + ws.Area(tc.Source).AddAll([]id.WidgetID{wid1, wid2, wid3}) + if tc.Source != tc.Input2 { + ws.Area(tc.Input2).AddAll([]id.WidgetID{wid4, wid5}) + } + + ws.Move(tc.Input1, tc.Input2, tc.Input3) + + assert.Equal(tt, tc.ExpectedSource, ws.Area(tc.Source).WidgetIDs()) + assert.Equal(tt, tc.ExpectedDest, ws.Area(tc.Input2).WidgetIDs()) + }) + } +} + +func TestWidgetAlignSystem_SetZone(t *testing.T) { + type args struct { + t WidgetZoneType + z *WidgetZone + } + tests := []struct { + name string + args args + nil bool + }{ + { + name: "inner", + args: args{ + t: WidgetZoneInner, + z: &WidgetZone{}, + }, + }, + { + name: "outer", + args: args{ + t: WidgetZoneOuter, + z: &WidgetZone{}, + }, + }, + { + name: "nil area", + args: args{ + t: WidgetZoneInner, + z: nil, + }, + }, + { + name: "nil", + args: args{ + t: WidgetZoneInner, + z: &WidgetZone{}, + }, + nil: true, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var w *WidgetAlignSystem + if !tt.nil { + w = &WidgetAlignSystem{} + } + + w.SetZone(tt.args.t, tt.args.z) + + if !tt.nil { + var z2 *WidgetZone + switch tt.args.t { + case WidgetZoneInner: + z2 = w.inner + case WidgetZoneOuter: + z2 = w.outer + } + assert.Same(t, tt.args.z, z2) + } + }) + } +} diff --git a/pkg/scene/widget_area.go b/pkg/scene/widget_area.go new file mode 100644 index 000000000..7ded7c370 --- /dev/null +++ b/pkg/scene/widget_area.go @@ -0,0 +1,126 @@ +package scene + +import "github.com/reearth/reearth-backend/pkg/id" + +// WidgetArea has the widgets and alignment information found in each part area of a section. +type WidgetArea struct { + widgetIds []id.WidgetID + align WidgetAlignType +} + +type WidgetAlignType string + +const ( + WidgetAlignStart WidgetAlignType = "start" + WidgetAlignCentered WidgetAlignType = "centered" + WidgetAlignEnd WidgetAlignType = "end" +) + +func NewWidgetArea(widgetIds []id.WidgetID, align WidgetAlignType) *WidgetArea { + wa := &WidgetArea{} + wa.AddAll(widgetIds) + wa.SetAlignment(align) + return wa +} + +// WidgetIds will return a slice of widget ids from a specific area. +func (a *WidgetArea) WidgetIDs() []id.WidgetID { + if a == nil { + return nil + } + + return append([]id.WidgetID{}, a.widgetIds...) +} + +// Alignment will return the alignment of a specific area. +func (a *WidgetArea) Alignment() WidgetAlignType { + if a == nil { + return "" + } + + return a.align +} + +func (a *WidgetArea) Find(wid id.WidgetID) int { + if a == nil { + return -1 + } + + for i, w := range a.widgetIds { + if w == wid { + return i + } + } + return -1 +} + +func (a *WidgetArea) Add(wid id.WidgetID, index int) { + if a == nil || wid.Contains(a.widgetIds) { + return + } + + a.widgetIds = insertWidgetID(a.widgetIds, wid, index) +} + +func (a *WidgetArea) AddAll(wids []id.WidgetID) { + if a == nil { + return + } + + widgetIds := make([]id.WidgetID, 0, len(wids)) + for _, w := range wids { + if w.Contains(a.widgetIds) || w.Contains(widgetIds) { + continue + } + widgetIds = append(widgetIds, w) + } + + a.widgetIds = widgetIds +} + +func (a *WidgetArea) SetAlignment(at WidgetAlignType) { + if a == nil { + return + } + + if at == WidgetAlignStart || at == WidgetAlignCentered || at == WidgetAlignEnd { + a.align = at + } else { + a.align = WidgetAlignStart + } +} + +func (a *WidgetArea) Remove(wid id.WidgetID) { + if a == nil { + return + } + + for i, w := range a.widgetIds { + if w == wid { + a.widgetIds = removeWidgetID(a.widgetIds, i) + return + } + } +} + +func (a *WidgetArea) Move(from, to int) { + if a == nil { + return + } + + wid := a.widgetIds[from] + a.widgetIds = insertWidgetID(removeWidgetID(a.widgetIds, from), wid, to) +} + +// insertWidgetID is used in moveInt to add the widgetID to a new position(index). +func insertWidgetID(array []id.WidgetID, value id.WidgetID, index int) []id.WidgetID { + if index < 0 { + return append(array, value) + } + return append(array[:index], append([]id.WidgetID{value}, array[index:]...)...) +} + +// removeWidgetID is used in moveInt to remove the widgetID from original position(index). +func removeWidgetID(array []id.WidgetID, index int) []id.WidgetID { + return append(array[:index], array[index+1:]...) +} diff --git a/pkg/scene/widget_area_test.go b/pkg/scene/widget_area_test.go new file mode 100644 index 000000000..2c16d20ae --- /dev/null +++ b/pkg/scene/widget_area_test.go @@ -0,0 +1,327 @@ +package scene + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestWidgetArea(t *testing.T) { + wid1 := id.NewWidgetID() + wid2 := id.NewWidgetID() + + testCases := []struct { + Name string + Input1 []id.WidgetID + Input2 WidgetAlignType + Expected *WidgetArea + }{ + { + Name: "New widget area with proper widget ids and widget align type", + Input1: []id.WidgetID{wid1, wid2}, + Input2: WidgetAlignEnd, + Expected: &WidgetArea{widgetIds: []id.WidgetID{wid1, wid2}, align: WidgetAlignEnd}, + }, + { + Name: "New widget area with duplicated widget ids", + Input1: []id.WidgetID{wid1, wid1}, + Input2: WidgetAlignEnd, + Expected: &WidgetArea{widgetIds: []id.WidgetID{wid1}, align: WidgetAlignEnd}, + }, + { + Name: "New widget area with wrong widget align type", + Input1: []id.WidgetID{wid1, wid2}, + Input2: "wrong", + Expected: &WidgetArea{widgetIds: []id.WidgetID{wid1, wid2}, align: WidgetAlignStart}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + wa := NewWidgetArea(tc.Input1, tc.Input2) + assert.Equal(t, tc.Expected, wa) + }) + } +} + +func TestWidgetArea_WidgetIDs(t *testing.T) { + wid := id.NewWidgetID() + wa := NewWidgetArea([]id.WidgetID{wid}, WidgetAlignStart) + assert.Equal(t, wa.widgetIds, wa.WidgetIDs()) + assert.Nil(t, (*WidgetArea)(nil).WidgetIDs()) +} + +func TestWidgetArea_Alignment(t *testing.T) { + wa := NewWidgetArea(nil, WidgetAlignEnd) + assert.Equal(t, WidgetAlignEnd, wa.Alignment()) + assert.Equal(t, WidgetAlignType(""), (*WidgetArea)(nil).Alignment()) +} + +func TestWidgetArea_Find(t *testing.T) { + wid := id.NewWidgetID() + wid2 := id.NewWidgetID() + + testCases := []struct { + Name string + Input id.WidgetID + Expected int + Nil bool + }{ + { + Name: "Return index if contains widget id", + Input: wid, + Expected: 0, + }, + { + Name: "Return -1 if doesn't contain widget id", + Input: wid2, + Expected: -1, + }, + { + Name: "Return nil if WidgetArea is nil", + Nil: true, + Expected: -1, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + var wa *WidgetArea + if !tc.Nil { + wa = NewWidgetArea([]id.WidgetID{wid}, WidgetAlignStart) + } + assert.Equal(tt, tc.Expected, wa.Find(tc.Input)) + }) + } +} + +func TestWidgetArea_Add(t *testing.T) { + wid1 := id.NewWidgetID() + wid2 := id.NewWidgetID() + wid3 := id.NewWidgetID() + + testCases := []struct { + Name string + Nil bool + Input id.WidgetID + Input2 int + Expected []id.WidgetID + }{ + { + Name: "add a widget id", + Input: wid3, + Input2: -1, + Expected: []id.WidgetID{wid1, wid2, wid3}, + }, + { + Name: "add a widget id but already exists", + Input: wid1, + Input2: -1, + Expected: []id.WidgetID{wid1, wid2}, + }, + { + Name: "insert a widget id", + Input: wid3, + Input2: 1, + Expected: []id.WidgetID{wid1, wid3, wid2}, + }, + { + Name: "nil widget area", + Nil: true, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + if tc.Nil { + (*WidgetArea)(nil).Add(wid1, -1) + return + } + + wa := NewWidgetArea([]id.WidgetID{wid1, wid2}, WidgetAlignStart) + wa.Add(tc.Input, tc.Input2) + assert.Equal(tt, tc.Expected, wa.WidgetIDs()) + }) + } +} + +func TestWidgetArea_AddAll(t *testing.T) { + wid1 := id.NewWidgetID() + wid2 := id.NewWidgetID() + + testCases := []struct { + Name string + Nil bool + Input []id.WidgetID + Expected []id.WidgetID + }{ + { + Name: "add widget ids", + Input: []id.WidgetID{wid1, wid2}, + Expected: []id.WidgetID{wid1, wid2}, + }, + { + Name: "add widget ids but duplicated", + Input: []id.WidgetID{wid1, wid1, wid2}, + Expected: []id.WidgetID{wid1, wid2}, + }, + { + Name: "nil widget area", + Nil: true, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + if tc.Nil { + (*WidgetArea)(nil).AddAll(nil) + return + } + + wa := NewWidgetArea(nil, WidgetAlignStart) + wa.AddAll(tc.Input) + assert.Equal(tt, tc.Expected, wa.WidgetIDs()) + }) + } +} + +func TestWidgetArea_SetAlignment(t *testing.T) { + testCases := []struct { + Name string + Nil bool + Input WidgetAlignType + Expected WidgetAlignType + }{ + { + Name: "set alignment", + Input: WidgetAlignEnd, + Expected: WidgetAlignEnd, + }, + { + Name: "set alignment with wrong alignment", + Input: "wrong", + Expected: WidgetAlignStart, + }, + { + Name: "set alignment when widget area is nil", + Nil: true, + Input: WidgetAlignStart, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + var wa *WidgetArea + if !tc.Nil { + wa = NewWidgetArea(nil, WidgetAlignStart) + } + wa.SetAlignment(tc.Input) + if !tc.Nil { + assert.Equal(t, tc.Expected, wa.align) + } + }) + } +} + +func TestWidgetArea_Remove(t *testing.T) { + wid := id.NewWidgetID() + testCases := []struct { + Name string + Input id.WidgetID + Expected []id.WidgetID + Nil bool + }{ + { + Name: "Remove a widget from widget area", + Input: wid, + Expected: []id.WidgetID{}, + }, + { + Name: "Remove a widget from widget area that doesn't exist", + Input: id.NewWidgetID(), + Expected: []id.WidgetID{wid}, + }, + { + Name: "Return nil if no widget area", + Input: wid, + Nil: true, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + var wa *WidgetArea + if !tc.Nil { + wa = NewWidgetArea([]id.WidgetID{wid}, "") + } + wa.Remove(tc.Input) + if !tc.Nil { + assert.Equal(tt, tc.Expected, wa.widgetIds) + } + }) + } +} + +func TestWidgetArea_Move(t *testing.T) { + wid := id.NewWidgetID() + wid2 := id.NewWidgetID() + wid3 := id.NewWidgetID() + + testCases := []struct { + Name string + Input1, Input2 int + Expected []id.WidgetID + Nil bool + }{ + { + Name: "Move widget Id", + Input1: 1, + Input2: 2, + Expected: []id.WidgetID{wid, wid3, wid2}, + }, + { + Name: "Move widget Id", + Input1: 2, + Input2: 0, + Expected: []id.WidgetID{wid3, wid, wid2}, + }, + { + Name: "Nil", + Nil: true, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + var wa *WidgetArea + if !tc.Nil { + wa = NewWidgetArea([]id.WidgetID{wid, wid2, wid3}, "") + } + wa.Move(tc.Input1, tc.Input2) + if !tc.Nil { + assert.Equal(tt, tc.Expected, wa.widgetIds) + } + }) + } +} diff --git a/pkg/scene/widget_section.go b/pkg/scene/widget_section.go new file mode 100644 index 000000000..2fbcdf489 --- /dev/null +++ b/pkg/scene/widget_section.go @@ -0,0 +1,89 @@ +package scene + +import "github.com/reearth/reearth-backend/pkg/id" + +// WidgetSection is the structure of each section of the align system. +type WidgetSection struct { + top *WidgetArea + middle *WidgetArea + bottom *WidgetArea +} + +type WidgetAreaType string + +var ( + WidgetAreaTop WidgetAreaType = "top" + WidgetAreaMiddle WidgetAreaType = "middle" + WidgetAreaBottom WidgetAreaType = "bottom" +) + +func NewWidgetSection() *WidgetSection { + return &WidgetSection{} +} + +func (s *WidgetSection) Area(t WidgetAreaType) *WidgetArea { + if s == nil { + return nil + } + + switch t { + case WidgetAreaTop: + if s.top == nil { + s.top = NewWidgetArea(nil, WidgetAlignStart) + } + return s.top + case WidgetAreaMiddle: + if s.middle == nil { + s.middle = NewWidgetArea(nil, WidgetAlignStart) + } + return s.middle + case WidgetAreaBottom: + if s.bottom == nil { + s.bottom = NewWidgetArea(nil, WidgetAlignStart) + } + return s.bottom + } + return nil +} + +func (s *WidgetSection) Find(wid id.WidgetID) (int, WidgetAreaType) { + if s == nil { + return -1, "" + } + + if i := s.top.Find(wid); i >= 0 { + return i, WidgetAreaTop + } + if i := s.middle.Find(wid); i >= 0 { + return i, WidgetAreaMiddle + } + if i := s.bottom.Find(wid); i >= 0 { + return i, WidgetAreaBottom + } + return -1, "" +} + +func (s *WidgetSection) Remove(wid id.WidgetID) { + if s == nil { + return + } + + s.top.Remove(wid) + s.middle.Remove(wid) + s.bottom.Remove(wid) +} + +func (s *WidgetSection) SetArea(t WidgetAreaType, a *WidgetArea) { + if s == nil { + return + } + + switch t { + case WidgetAreaTop: + s.top = a + case WidgetAreaMiddle: + s.middle = a + case WidgetAreaBottom: + s.bottom = a + } +} diff --git a/pkg/scene/widget_section_test.go b/pkg/scene/widget_section_test.go new file mode 100644 index 000000000..9346a63da --- /dev/null +++ b/pkg/scene/widget_section_test.go @@ -0,0 +1,242 @@ +package scene + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestNewWidgetSection(t *testing.T) { + assert.Equal(t, &WidgetSection{}, NewWidgetSection()) +} + +func TestWidgetSection_Area(t *testing.T) { + ws := NewWidgetSection() + assert.Same(t, ws.top, ws.Area(WidgetAreaTop)) + assert.NotNil(t, ws.top) + assert.Same(t, ws.middle, ws.Area(WidgetAreaMiddle)) + assert.NotNil(t, ws.middle) + assert.Same(t, ws.bottom, ws.Area(WidgetAreaBottom)) + assert.NotNil(t, ws.bottom) +} + +func TestWidgetSection_Find(t *testing.T) { + wid1 := id.NewWidgetID() + wid2 := id.NewWidgetID() + wid3 := id.NewWidgetID() + wid4 := id.NewWidgetID() + wid5 := id.NewWidgetID() + wid6 := id.NewWidgetID() + wid7 := id.NewWidgetID() + + testCases := []struct { + Name string + Input id.WidgetID + Expected1 int + Expected2 WidgetAreaType + Nil bool + }{ + { + Name: "top", + Input: wid2, + Expected1: 1, + Expected2: WidgetAreaTop, + }, + { + Name: "middle", + Input: wid4, + Expected1: 0, + Expected2: WidgetAreaMiddle, + }, + { + Name: "bottom", + Input: wid7, + Expected1: 1, + Expected2: WidgetAreaBottom, + }, + { + Name: "invalid id", + Input: id.NewWidgetID(), + Expected1: -1, + Expected2: "", + }, + { + Name: "Return nil if no widget section", + Input: wid1, + Nil: true, + Expected1: -1, + Expected2: "", + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + if tc.Nil { + index, area := (*WidgetSection)(nil).Find(tc.Input) + assert.Equal(tt, tc.Expected1, index) + assert.Equal(tt, tc.Expected2, area) + return + } + + ws := NewWidgetSection() + ws.Area(WidgetAreaTop).AddAll([]id.WidgetID{wid1, wid2, wid3}) + ws.Area(WidgetAreaMiddle).AddAll([]id.WidgetID{wid4, wid5}) + ws.Area(WidgetAreaBottom).AddAll([]id.WidgetID{wid6, wid7}) + + index, area := ws.Find(tc.Input) + assert.Equal(tt, tc.Expected1, index) + assert.Equal(tt, tc.Expected2, area) + }) + } +} + +func TestWidgetSection_Remove(t *testing.T) { + wid := id.NewWidgetID() + + testCases := []struct { + Name string + Area WidgetAreaType + Input id.WidgetID + Expected []id.WidgetID + Nil bool + }{ + { + Name: "top: remove a widget from widget section", + Area: WidgetAreaTop, + Input: wid, + Expected: []id.WidgetID{}, + }, + { + Name: "top: couldn't find widgetId", + Area: WidgetAreaTop, + Input: id.NewWidgetID(), + Expected: []id.WidgetID{wid}, + }, + { + Name: "middle: remove a widget from widget section", + Area: WidgetAreaMiddle, + Input: wid, + Expected: []id.WidgetID{}, + }, + { + Name: "middle: couldn't find widgetId", + Area: WidgetAreaMiddle, + Input: id.NewWidgetID(), + Expected: []id.WidgetID{wid}, + }, + { + Name: "bottom: remove a widget from widget section", + Area: WidgetAreaBottom, + Input: wid, + Expected: []id.WidgetID{}, + }, + { + Name: "bottom: couldn't find widgetId", + Area: WidgetAreaBottom, + Input: id.NewWidgetID(), + Expected: []id.WidgetID{wid}, + }, + { + Name: "nil", + Area: WidgetAreaTop, + Input: wid, + Nil: true, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + if tc.Nil { + (*WidgetSection)(nil).Remove(tc.Input) + return + } + + ws := NewWidgetSection() + ws.Area(tc.Area).Add(wid, -1) + ws.Remove(tc.Input) + assert.Equal(tt, tc.Expected, ws.Area(tc.Area).WidgetIDs()) + }) + } +} + +func TestWidgetSection_SetArea(t *testing.T) { + type args struct { + t WidgetAreaType + a *WidgetArea + } + tests := []struct { + name string + args args + nil bool + }{ + { + name: "top", + args: args{ + t: WidgetAreaTop, + a: &WidgetArea{}, + }, + }, + { + name: "middle", + args: args{ + t: WidgetAreaMiddle, + a: &WidgetArea{}, + }, + }, + { + name: "bottom", + args: args{ + t: WidgetAreaBottom, + a: &WidgetArea{}, + }, + }, + { + name: "nil area", + args: args{ + t: WidgetAreaTop, + a: nil, + }, + }, + { + name: "nil", + args: args{ + t: WidgetAreaTop, + a: &WidgetArea{}, + }, + nil: true, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var s *WidgetSection + if !tt.nil { + s = &WidgetSection{} + } + + s.SetArea(tt.args.t, tt.args.a) + + if !tt.nil { + var a2 *WidgetArea + switch tt.args.t { + case WidgetAreaTop: + a2 = s.top + case WidgetAreaMiddle: + a2 = s.middle + case WidgetAreaBottom: + a2 = s.bottom + } + assert.Same(t, tt.args.a, a2) + } + }) + } +} diff --git a/pkg/scene/widget_system_test.go b/pkg/scene/widget_system_test.go index 9f765e9cf..d28772cb5 100644 --- a/pkg/scene/widget_system_test.go +++ b/pkg/scene/widget_system_test.go @@ -14,62 +14,44 @@ func TestNewWidgetSystem(t *testing.T) { testCases := []struct { Name string Input []*Widget - Expected *WidgetSystem + Expected []*Widget }{ { Name: "nil widget list", Input: nil, - Expected: &WidgetSystem{widgets: []*Widget{}}, + Expected: []*Widget{}, }, { Name: "widget list with nil", Input: []*Widget{nil}, - Expected: &WidgetSystem{widgets: []*Widget{}}, + Expected: []*Widget{}, }, { - Name: "widget list with matched values", + Name: "widget list", Input: []*Widget{ - { - id: wid, - plugin: pid, - extension: "eee", - property: pr, - enabled: true, - }, + MustNewWidget(wid, pid, "see", pr, true, false), + }, + Expected: []*Widget{ + MustNewWidget(wid, pid, "see", pr, true, false), }, - Expected: &WidgetSystem{widgets: []*Widget{ - MustNewWidget(wid, pid, "eee", pr, true), - }}, }, { - Name: "widget list with matched values", + Name: "widget list with duplicatd values", Input: []*Widget{ - { - id: wid, - plugin: pid, - extension: "eee", - property: pr, - enabled: true, - }, - { - id: wid, - plugin: pid, - extension: "eee", - property: pr, - enabled: true, - }, + MustNewWidget(wid, pid, "see", pr, true, false), + MustNewWidget(wid, pid, "see", pr, true, false), + }, + Expected: []*Widget{ + MustNewWidget(wid, pid, "see", pr, true, false), }, - Expected: &WidgetSystem{widgets: []*Widget{ - MustNewWidget(wid, pid, "eee", pr, true), - }}, }, } + for _, tc := range testCases { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - res := NewWidgetSystem(tc.Input) - assert.Equal(tt, tc.Expected, res) + assert.Equal(tt, tc.Expected, NewWidgetSystem(tc.Input).Widgets()) }) } } @@ -79,96 +61,85 @@ func TestWidgetSystem_Add(t *testing.T) { pr := id.NewPropertyID() wid := id.NewWidgetID() testCases := []struct { - Name string - Input *Widget - WS, Expected *WidgetSystem + Name string + Widgets []*Widget + Input *Widget + Expected []*Widget + Nil bool }{ { - Name: "add new widget", - Input: &Widget{ - id: wid, - plugin: pid, - extension: "eee", - property: pr, - enabled: true, - }, - WS: NewWidgetSystem([]*Widget{}), - Expected: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), + Name: "add new widget", + Input: MustNewWidget(wid, pid, "see", pr, true, false), + Expected: []*Widget{MustNewWidget(wid, pid, "see", pr, true, false)}, }, { Name: "add nil widget", Input: nil, - WS: NewWidgetSystem([]*Widget{}), - Expected: NewWidgetSystem([]*Widget{}), + Expected: []*Widget{}, }, { - Name: "add to nil widgetSystem", - Input: &Widget{ - id: wid, - plugin: pid, - extension: "eee", - property: pr, - enabled: true, - }, - WS: nil, + Name: "add to nil widgetSystem", + Input: MustNewWidget(wid, pid, "see", pr, true, false), Expected: nil, + Nil: true, }, { - Name: "add existing widget", - Input: &Widget{ - id: wid, - plugin: pid, - extension: "eee", - property: pr, - enabled: true, - }, - WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), - Expected: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), + Name: "add existing widget", + Widgets: []*Widget{MustNewWidget(wid, pid, "see", pr, true, false)}, + Input: MustNewWidget(wid, pid, "see", pr, true, false), + Expected: []*Widget{MustNewWidget(wid, pid, "see", pr, true, false)}, }, } for _, tc := range testCases { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - tc.WS.Add(tc.Input) - assert.Equal(tt, tc.Expected, tc.WS) + var ws *WidgetSystem + if !tc.Nil { + ws = NewWidgetSystem(tc.Widgets) + } + ws.Add(tc.Input) + assert.Equal(tt, tc.Expected, ws.Widgets()) }) } } func TestWidgetSystem_Remove(t *testing.T) { + wid := id.NewWidgetID() + wid2 := id.NewWidgetID() pid := id.MustPluginID("xxx~1.1.1") pid2 := id.MustPluginID("xxx~1.1.2") pr := id.NewPropertyID() - w1 := MustNewWidget(id.NewWidgetID(), pid, "e1", pr, true) - w2 := MustNewWidget(id.NewWidgetID(), pid, "e1", pr, true) - w3 := MustNewWidget(id.NewWidgetID(), pid, "e2", pr, true) - w4 := MustNewWidget(id.NewWidgetID(), pid2, "e1", pr, true) testCases := []struct { - Name string - ID id.WidgetID - WS, Expected *WidgetSystem + Name string + Input id.WidgetID + Nil bool }{ { - Name: "remove a widget", - ID: w1.ID(), - WS: NewWidgetSystem([]*Widget{w1, w2, w3, w4}), - Expected: NewWidgetSystem([]*Widget{w2, w3, w4}), + Name: "remove a widget", + Input: wid, }, { - Name: "remove from nil widgetSystem", - ID: w1.ID(), - WS: nil, - Expected: nil, + Name: "remove from nil widgetSystem", + Input: wid, + Nil: true, }, } for _, tc := range testCases { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - tc.WS.Remove(tc.ID) - assert.Equal(tt, tc.Expected, tc.WS) + var ws *WidgetSystem + if !tc.Nil { + ws = NewWidgetSystem([]*Widget{ + MustNewWidget(wid, pid2, "e1", pr, true, false), + MustNewWidget(wid2, pid, "e1", pr, true, false), + }) + assert.True(tt, ws.Has(tc.Input)) + } + ws.Remove(tc.Input) + assert.False(tt, ws.Has(tc.Input)) }) } } @@ -176,9 +147,9 @@ func TestWidgetSystem_Remove(t *testing.T) { func TestWidgetSystem_RemoveAllByPlugin(t *testing.T) { pid := id.MustPluginID("xxx~1.1.1") pid2 := id.MustPluginID("xxx~1.1.2") - w1 := MustNewWidget(id.NewWidgetID(), pid, "e1", id.NewPropertyID(), true) - w2 := MustNewWidget(id.NewWidgetID(), pid, "e2", id.NewPropertyID(), true) - w3 := MustNewWidget(id.NewWidgetID(), pid2, "e1", id.NewPropertyID(), true) + w1 := MustNewWidget(id.NewWidgetID(), pid, "e1", id.NewPropertyID(), true, false) + w2 := MustNewWidget(id.NewWidgetID(), pid, "e2", id.NewPropertyID(), true, false) + w3 := MustNewWidget(id.NewWidgetID(), pid2, "e1", id.NewPropertyID(), true, false) testCases := []struct { Name string @@ -213,10 +184,10 @@ func TestWidgetSystem_RemoveAllByPlugin(t *testing.T) { func TestWidgetSystem_RemoveAllByExtension(t *testing.T) { pid := id.MustPluginID("xxx~1.1.1") pid2 := id.MustPluginID("xxx~1.1.2") - w1 := MustNewWidget(id.NewWidgetID(), pid, "e1", id.NewPropertyID(), true) - w2 := MustNewWidget(id.NewWidgetID(), pid, "e2", id.NewPropertyID(), true) - w3 := MustNewWidget(id.NewWidgetID(), pid, "e1", id.NewPropertyID(), true) - w4 := MustNewWidget(id.NewWidgetID(), pid2, "e1", id.NewPropertyID(), true) + w1 := MustNewWidget(id.NewWidgetID(), pid, "e1", id.NewPropertyID(), true, false) + w2 := MustNewWidget(id.NewWidgetID(), pid, "e2", id.NewPropertyID(), true, false) + w3 := MustNewWidget(id.NewWidgetID(), pid, "e1", id.NewPropertyID(), true, false) + w4 := MustNewWidget(id.NewWidgetID(), pid2, "e1", id.NewPropertyID(), true, false) testCases := []struct { Name string @@ -266,8 +237,8 @@ func TestWidgetSystem_ReplacePlugin(t *testing.T) { Name: "replace a widget", PID: pid, NewID: pid2, - WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), - Expected: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid2, "eee", pr, true)}), + WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}), + Expected: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid2, "eee", pr, true, false)}), }, { Name: "replace with nil widget", @@ -305,8 +276,8 @@ func TestWidgetSystem_Properties(t *testing.T) { { Name: "get properties", WS: NewWidgetSystem([]*Widget{ - MustNewWidget(wid, pid, "eee", pr, true), - MustNewWidget(wid2, pid, "eee", pr2, true), + MustNewWidget(wid, pid, "eee", pr, true, false), + MustNewWidget(wid2, pid, "eee", pr2, true, false), }), Expected: []id.PropertyID{pr, pr2}, }, @@ -340,12 +311,12 @@ func TestWidgetSystem_Widgets(t *testing.T) { { Name: "get widgets", WS: NewWidgetSystem([]*Widget{ - MustNewWidget(wid, pid, "eee", pr, true), - MustNewWidget(wid2, pid, "eee", pr2, true), + MustNewWidget(wid, pid, "eee", pr, true, false), + MustNewWidget(wid2, pid, "eee", pr2, true, false), }), Expected: []*Widget{ - MustNewWidget(wid, pid, "eee", pr, true), - MustNewWidget(wid2, pid, "eee", pr2, true), + MustNewWidget(wid, pid, "eee", pr, true, false), + MustNewWidget(wid2, pid, "eee", pr2, true, false), }, }, { @@ -377,8 +348,8 @@ func TestWidgetSystem_Widget(t *testing.T) { { Name: "get a widget", ID: wid, - WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), - Expected: MustNewWidget(wid, pid, "eee", pr, true), + WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}), + Expected: MustNewWidget(wid, pid, "eee", pr, true, false), }, { Name: "dont has the widget", @@ -416,7 +387,7 @@ func TestWidgetSystem_Has(t *testing.T) { { Name: "has a widget", ID: wid, - WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true)}), + WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}), Expected: true, }, { diff --git a/pkg/scene/widget_test.go b/pkg/scene/widget_test.go index 07551a1a2..aa34d4c03 100644 --- a/pkg/scene/widget_test.go +++ b/pkg/scene/widget_test.go @@ -18,14 +18,8 @@ func TestNewWidget(t *testing.T) { Extension id.PluginExtensionID Property id.PropertyID Enabled bool - Expected struct { - Id id.WidgetID - Plugin id.PluginID - Extension id.PluginExtensionID - Property id.PropertyID - Enabled bool - } - err error + Extended bool + Err error }{ { Name: "success new widget", @@ -34,20 +28,8 @@ func TestNewWidget(t *testing.T) { Extension: "eee", Property: pr, Enabled: true, - Expected: struct { - Id id.WidgetID - Plugin id.PluginID - Extension id.PluginExtensionID - Property id.PropertyID - Enabled bool - }{ - Id: wid, - Plugin: pid, - Extension: "eee", - Property: pr, - Enabled: true, - }, - err: nil, + Extended: true, + Err: nil, }, { Name: "fail empty extension", @@ -56,7 +38,8 @@ func TestNewWidget(t *testing.T) { Extension: "", Property: pr, Enabled: true, - err: id.ErrInvalidID, + Extended: false, + Err: id.ErrInvalidID, }, } @@ -64,19 +47,21 @@ func TestNewWidget(t *testing.T) { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - res, err := NewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled) - if err == nil { - assert.Equal(tt, tc.Expected.Id, res.ID()) - assert.Equal(tt, tc.Expected.Property, res.Property()) - assert.Equal(tt, tc.Expected.Extension, res.Extension()) - assert.Equal(tt, tc.Expected.Enabled, res.Enabled()) - assert.Equal(tt, tc.Expected.Plugin, res.Plugin()) + res, err := NewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) + if tc.Err == nil { + assert.Equal(tt, tc.ID, res.ID()) + assert.Equal(tt, tc.Property, res.Property()) + assert.Equal(tt, tc.Extension, res.Extension()) + assert.Equal(tt, tc.Enabled, res.Enabled()) + assert.Equal(tt, tc.Extended, res.Extended()) + assert.Equal(tt, tc.Plugin, res.Plugin()) } else { - assert.ErrorIs(tt, err, tc.err) + assert.ErrorIs(tt, err, tc.Err) } }) } } + func TestMustNewWidget(t *testing.T) { pid := id.MustPluginID("xxx~1.1.1") pr := id.NewPropertyID() @@ -88,14 +73,8 @@ func TestMustNewWidget(t *testing.T) { Extension id.PluginExtensionID Property id.PropertyID Enabled bool - Expected struct { - Id id.WidgetID - Plugin id.PluginID - Extension id.PluginExtensionID - Property id.PropertyID - Enabled bool - } - err error + Extended bool + Err error }{ { Name: "success new widget", @@ -104,20 +83,8 @@ func TestMustNewWidget(t *testing.T) { Extension: "eee", Property: pr, Enabled: true, - Expected: struct { - Id id.WidgetID - Plugin id.PluginID - Extension id.PluginExtensionID - Property id.PropertyID - Enabled bool - }{ - Id: wid, - Plugin: pid, - Extension: "eee", - Property: pr, - Enabled: true, - }, - err: nil, + Extended: true, + Err: nil, }, { Name: "fail empty extension", @@ -126,7 +93,8 @@ func TestMustNewWidget(t *testing.T) { Extension: "", Property: pr, Enabled: true, - err: id.ErrInvalidID, + Extended: false, + Err: id.ErrInvalidID, }, } @@ -134,23 +102,32 @@ func TestMustNewWidget(t *testing.T) { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - var res *Widget - defer func() { - if r := recover(); r == nil { - assert.Equal(tt, tc.Expected.Id, res.ID()) - assert.Equal(tt, tc.Expected.Property, res.Property()) - assert.Equal(tt, tc.Expected.Extension, res.Extension()) - assert.Equal(tt, tc.Expected.Enabled, res.Enabled()) - assert.Equal(tt, tc.Expected.Plugin, res.Plugin()) - } - }() - res = MustNewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled) + + if tc.Err != nil { + assert.PanicsWithError(tt, tc.Err.Error(), func() { + MustNewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) + }) + return + } + + res := MustNewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) + assert.Equal(tt, tc.ID, res.ID()) + assert.Equal(tt, tc.Property, res.Property()) + assert.Equal(tt, tc.Extension, res.Extension()) + assert.Equal(tt, tc.Enabled, res.Enabled()) + assert.Equal(tt, tc.Plugin, res.Plugin()) }) } } func TestWidget_SetEnabled(t *testing.T) { - res := MustNewWidget(id.NewWidgetID(), id.MustPluginID("xxx~1.1.1"), "eee", id.NewPropertyID(), false) + res := MustNewWidget(id.NewWidgetID(), id.MustPluginID("xxx~1.1.1"), "eee", id.NewPropertyID(), false, false) res.SetEnabled(true) assert.True(t, res.Enabled()) } + +func TestWidget_SetExtended(t *testing.T) { + res := MustNewWidget(id.NewWidgetID(), id.MustPluginID("xxx~1.1.1"), "eee", id.NewPropertyID(), false, false) + res.SetExtended(true) + assert.True(t, res.Extended()) +} diff --git a/pkg/scene/widget_zone.go b/pkg/scene/widget_zone.go new file mode 100644 index 000000000..cbce3ded5 --- /dev/null +++ b/pkg/scene/widget_zone.go @@ -0,0 +1,86 @@ +package scene + +import "github.com/reearth/reearth-backend/pkg/id" + +// WidgetZone is the structure of each layer (inner and outer) of the align system. +type WidgetZone struct { + left *WidgetSection + center *WidgetSection + right *WidgetSection +} + +type WidgetSectionType string + +const ( + WidgetSectionLeft WidgetSectionType = "left" + WidgetSectionCenter WidgetSectionType = "center" + WidgetSectionRight WidgetSectionType = "right" +) + +func NewWidgetZone() *WidgetZone { + return &WidgetZone{} +} + +func (wz *WidgetZone) Section(s WidgetSectionType) *WidgetSection { + switch s { + case WidgetSectionLeft: + if wz.left == nil { + wz.left = NewWidgetSection() + } + return wz.left + case WidgetSectionCenter: + if wz.center == nil { + wz.center = NewWidgetSection() + } + return wz.center + case WidgetSectionRight: + if wz.right == nil { + wz.right = NewWidgetSection() + } + return wz.right + } + return nil +} + +func (z *WidgetZone) Remove(wid id.WidgetID) { + if z == nil { + return + } + + z.left.Remove(wid) + z.center.Remove(wid) + z.right.Remove(wid) +} + +func (z *WidgetZone) Find(wid id.WidgetID) (int, WidgetSectionType, WidgetAreaType) { + if z == nil { + return -1, "", "" + } + + if i, wa := z.left.Find(wid); i >= 0 { + return i, WidgetSectionLeft, wa + } + if i, wa := z.center.Find(wid); i >= 0 { + return i, WidgetSectionCenter, wa + } + if i, wa := z.right.Find(wid); i >= 0 { + return i, WidgetSectionRight, wa + } + + return -1, "", "" +} + +func (z *WidgetZone) SetSection(t WidgetSectionType, s *WidgetSection) { + if z == nil { + return + } + + switch t { + case WidgetSectionLeft: + z.left = s + case WidgetSectionCenter: + z.center = s + case WidgetSectionRight: + z.right = s + } +} diff --git a/pkg/scene/widget_zone_test.go b/pkg/scene/widget_zone_test.go new file mode 100644 index 000000000..40e21d5b4 --- /dev/null +++ b/pkg/scene/widget_zone_test.go @@ -0,0 +1,250 @@ +package scene + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestNewWidgetZone(t *testing.T) { + assert.Equal(t, &WidgetZone{}, NewWidgetZone()) +} + +func TestWidgetZone_Section(t *testing.T) { + wz := NewWidgetZone() + assert.Same(t, wz.left, wz.Section(WidgetSectionLeft)) + assert.NotNil(t, wz.left) + assert.Same(t, wz.center, wz.Section(WidgetSectionCenter)) + assert.NotNil(t, wz.center) + assert.Same(t, wz.right, wz.Section(WidgetSectionRight)) + assert.NotNil(t, wz.right) +} + +func TestWidgetZone_Find(t *testing.T) { + wid1 := id.NewWidgetID() + wid2 := id.NewWidgetID() + wid3 := id.NewWidgetID() + wid4 := id.NewWidgetID() + wid5 := id.NewWidgetID() + wid6 := id.NewWidgetID() + wid7 := id.NewWidgetID() + + testCases := []struct { + Name string + Input id.WidgetID + Expected1 int + Expected2 WidgetSectionType + Expected3 WidgetAreaType + Nil bool + }{ + { + Name: "left", + Input: wid2, + Expected1: 1, + Expected2: WidgetSectionLeft, + Expected3: WidgetAreaTop, + }, + { + Name: "center", + Input: wid4, + Expected1: 0, + Expected2: WidgetSectionCenter, + Expected3: WidgetAreaTop, + }, + { + Name: "right", + Input: wid7, + Expected1: 1, + Expected2: WidgetSectionRight, + Expected3: WidgetAreaTop, + }, + { + Name: "invalid id", + Input: id.NewWidgetID(), + Expected1: -1, + Expected2: "", + Expected3: "", + }, + { + Name: "Return nil if no widget section", + Input: wid1, + Nil: true, + Expected1: -1, + Expected2: "", + Expected3: "", + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + if tc.Nil { + index, section, area := (*WidgetZone)(nil).Find(tc.Input) + assert.Equal(tt, tc.Expected1, index) + assert.Equal(tt, tc.Expected2, section) + assert.Equal(tt, tc.Expected3, area) + return + } + + ez := NewWidgetZone() + ez.Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]id.WidgetID{wid1, wid2, wid3}) + ez.Section(WidgetSectionCenter).Area(WidgetAreaTop).AddAll([]id.WidgetID{wid4, wid5}) + ez.Section(WidgetSectionRight).Area(WidgetAreaTop).AddAll([]id.WidgetID{wid6, wid7}) + + index, section, area := ez.Find(tc.Input) + assert.Equal(tt, tc.Expected1, index) + assert.Equal(tt, tc.Expected2, section) + assert.Equal(tt, tc.Expected3, area) + }) + } +} + +func TestWidgetZone_Remove(t *testing.T) { + wid := id.NewWidgetID() + + testCases := []struct { + Name string + Section WidgetSectionType + Input id.WidgetID + Expected []id.WidgetID + Nil bool + }{ + { + Name: "left: remove a widget from widget section", + Section: WidgetSectionLeft, + Input: wid, + Expected: []id.WidgetID{}, + }, + { + Name: "left: couldn't find widgetId", + Section: WidgetSectionLeft, + Input: id.NewWidgetID(), + Expected: []id.WidgetID{wid}, + }, + { + Name: "center: remove a widget from widget section", + Section: WidgetSectionCenter, + Input: wid, + Expected: []id.WidgetID{}, + }, + { + Name: "center: couldn't find widgetId", + Section: WidgetSectionCenter, + Input: id.NewWidgetID(), + Expected: []id.WidgetID{wid}, + }, + { + Name: "right: remove a widget from widget section", + Section: WidgetSectionRight, + Input: wid, + Expected: []id.WidgetID{}, + }, + { + Name: "right: couldn't find widgetId", + Section: WidgetSectionRight, + Input: id.NewWidgetID(), + Expected: []id.WidgetID{wid}, + }, + { + Name: "nil", + Section: WidgetSectionLeft, + Input: wid, + Nil: true, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + + if tc.Nil { + (*WidgetZone)(nil).Remove(tc.Input) + return + } + + ws := NewWidgetZone() + ws.Section(tc.Section).Area(WidgetAreaTop).Add(wid, -1) + ws.Remove(tc.Input) + assert.Equal(tt, tc.Expected, ws.Section(tc.Section).Area(WidgetAreaTop).WidgetIDs()) + }) + } +} + +func TestWidgetZone_SetSection(t *testing.T) { + type args struct { + t WidgetSectionType + s *WidgetSection + } + tests := []struct { + name string + args args + nil bool + }{ + { + name: "left", + args: args{ + t: WidgetSectionLeft, + s: &WidgetSection{}, + }, + }, + { + name: "center", + args: args{ + t: WidgetSectionCenter, + s: &WidgetSection{}, + }, + }, + { + name: "right", + args: args{ + t: WidgetSectionRight, + s: &WidgetSection{}, + }, + }, + { + name: "nil area", + args: args{ + t: WidgetSectionLeft, + s: nil, + }, + }, + { + name: "nil", + args: args{ + t: WidgetSectionLeft, + s: &WidgetSection{}, + }, + nil: true, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var z *WidgetZone + if !tt.nil { + z = &WidgetZone{} + } + + z.SetSection(tt.args.t, tt.args.s) + + if !tt.nil { + var s2 *WidgetSection + switch tt.args.t { + case WidgetSectionLeft: + s2 = z.left + case WidgetSectionCenter: + s2 = z.center + case WidgetSectionRight: + s2 = z.right + } + assert.Same(t, tt.args.s, s2) + } + }) + } +} diff --git a/plugin_manifest_schema.json b/plugin_manifest_schema.json index 59c787a59..dc8379f73 100644 --- a/plugin_manifest_schema.json +++ b/plugin_manifest_schema.json @@ -249,6 +249,38 @@ }, "additionalProperties": false }, + "location": { + "$id": "#location", + "type": [ + "object", + "null" + ], + "properties": { + "zone": { + "type": "string", + "enum": [ + "inner", + "outer" + ] + }, + "section": { + "type": "string", + "enum": [ + "left", + "center", + "right" + ] + }, + "area": { + "type": "string", + "enum": [ + "top", + "middle", + "bottom" + ] + } + } + }, "extension": { "$id": "#extension", "type": "object", @@ -287,6 +319,46 @@ "infobox" ] }, + "widgetLayout": { + "type": [ + "object", + "null" + ], + "properties": { + "extendable": { + "type": [ + "object", + "null" + ], + "properties": { + "vertically": { + "type": [ + "boolean", + "null" + ] + }, + "horizontally": { + "type": [ + "boolean", + "null" + ] + } + } + }, + "extended": { + "type": [ + "boolean", + "null" + ] + }, + "floating": { + "type": "boolean" + }, + "defaultLocation": { + "$ref": "#/definitions/location" + } + } + }, "schema": { "$ref": "#/definitions/propertySchema" } @@ -357,4 +429,4 @@ } }, "$ref": "#/definitions/root" -} +} \ No newline at end of file diff --git a/schema.graphql b/schema.graphql index b7c03f253..e972c803a 100644 --- a/schema.graphql +++ b/schema.graphql @@ -268,6 +268,47 @@ type PluginMetadata { createdAt: DateTime! } +enum WidgetAreaAlign { + START + CENTERED + END +} + +enum WidgetZoneType { + INNER + OUTER +} + +enum WidgetSectionType { + LEFT + CENTER + RIGHT +} + +enum WidgetAreaType { + TOP + MIDDLE + BOTTOM +} + +type WidgetLocation { + zone: WidgetZoneType! + section: WidgetSectionType! + area: WidgetAreaType! +} + +type WidgetExtendable { + vertically: Boolean! + horizontally: Boolean! +} + +type WidgetLayout { + extendable: WidgetExtendable! + extended: Boolean! + floating: Boolean! + defaultLocation: WidgetLocation +} + enum PluginExtensionType { PRIMITIVE WIDGET @@ -276,6 +317,7 @@ enum PluginExtensionType { INFOBOX } + type PluginExtension { extensionId: PluginExtensionID! pluginId: PluginID! @@ -283,6 +325,7 @@ type PluginExtension { name: String! description: String! icon: String! + widgetLayout: WidgetLayout visualizer: Visualizer! propertySchemaId: PropertySchemaID! allTranslatedName: TranslatedString @@ -306,6 +349,7 @@ type Scene implements Node { rootLayerId: ID! widgets: [SceneWidget!]! plugins: [ScenePlugin!]! + widgetAlignSystem: WidgetAlignSystem dynamicDatasetSchemas: [DatasetSchema!]! project: Project @goField(forceResolver: true) team: Team @goField(forceResolver: true) @@ -334,6 +378,7 @@ type SceneWidget { extensionId: PluginExtensionID! propertyId: ID! enabled: Boolean! + extended: Boolean! plugin: Plugin @goField(forceResolver: true) extension: PluginExtension @goField(forceResolver: true) property: Property @goField(forceResolver: true) @@ -346,6 +391,28 @@ type ScenePlugin { property: Property @goField(forceResolver: true) } +type WidgetAlignSystem { + inner: WidgetZone + outer: WidgetZone +} + +type WidgetZone { + left: WidgetSection + center: WidgetSection + right: WidgetSection +} + +type WidgetSection { + top: WidgetArea + middle: WidgetArea + bottom: WidgetArea +} + +type WidgetArea { + widgetIds: [ID!]! + align: WidgetAreaAlign! +} + # Property type PropertySchema { @@ -822,6 +889,12 @@ input DeleteProjectInput { projectId: ID! } +input WidgetLocationInput { + zone: WidgetZoneType! + section: WidgetSectionType! + area: WidgetAreaType! +} + input AddWidgetInput { sceneId: ID! pluginId: PluginID! @@ -832,6 +905,15 @@ input UpdateWidgetInput { sceneId: ID! widgetId: ID! enabled: Boolean + location: WidgetLocationInput + extended: Boolean + index: Int +} + +input UpdateWidgetAlignSystemInput { + sceneId: ID! + location: WidgetLocationInput! + align: WidgetAreaAlign } input RemoveWidgetInput { @@ -1122,6 +1204,10 @@ type UpdateWidgetPayload { sceneWidget: SceneWidget! } +type UpdateWidgetAlignSystemPayload { + scene: Scene! +} + type RemoveWidgetPayload { scene: Scene! widgetId: ID! @@ -1372,6 +1458,7 @@ type Mutation { createScene(input: CreateSceneInput!): CreateScenePayload addWidget(input: AddWidgetInput!): AddWidgetPayload updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload + updateWidgetAlignSystem(input: UpdateWidgetAlignSystemInput!): UpdateWidgetAlignSystemPayload removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload installPlugin(input: InstallPluginInput!): InstallPluginPayload uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload From a7daf79f014ca8b31d48554d0f12c4e8cac0e8a7 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Sun, 26 Sep 2021 22:18:11 +0900 Subject: [PATCH 082/253] fix: validate widget extended when moved --- internal/usecase/interactor/scene.go | 16 ++++++++++++---- pkg/plugin/extension.go | 12 ++++++++++++ 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 9643b214d..38d763fef 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -298,10 +298,18 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP } if param.Extended != nil { - if layout := extension.WidgetLayout(); layout != nil { - if layout.HorizontallyExtendable() && location.Horizontal() || layout.VerticallyExtendable() && location.Vertical() { - widget.SetExtended(*param.Extended) - } + widget.SetExtended(*param.Extended) + } + + // check extendable + if layout := extension.WidgetLayout(); layout != nil { + extendable := layout.Extendable(plugin.WidgetLocation{ + Zone: plugin.WidgetZoneType(location.Zone), + Section: plugin.WidgetSectionType(location.Section), + Area: plugin.WidgetAreaType(location.Area), + }) + if extendable && widget.Extended() { + widget.SetExtended(*param.Extended) } } diff --git a/pkg/plugin/extension.go b/pkg/plugin/extension.go index 3066db0eb..4ef63e7c4 100644 --- a/pkg/plugin/extension.go +++ b/pkg/plugin/extension.go @@ -88,6 +88,10 @@ type WidgetLayout struct { defaultLocation *WidgetLocation } +func (l WidgetLayout) Extendable(loc WidgetLocation) bool { + return l.HorizontallyExtendable() && loc.Horizontal() || l.VerticallyExtendable() && loc.Vertical() +} + func NewWidgetLayout(horizontallyExtendable, verticallyExtendable, extended, floating bool, defaultLocation *WidgetLocation) WidgetLayout { return WidgetLayout{ horizontallyExtendable: horizontallyExtendable, @@ -131,6 +135,14 @@ type WidgetLocation struct { Area WidgetAreaType } +func (l WidgetLocation) Horizontal() bool { + return l.Section == WidgetSectionCenter +} + +func (l WidgetLocation) Vertical() bool { + return l.Area == WidgetAreaMiddle +} + func (l *WidgetLocation) CopyRef() *WidgetLocation { if l == nil { return nil From 98db7e05076327850c7e9a7fb04a218d49a0fe3f Mon Sep 17 00:00:00 2001 From: rot1024 Date: Sun, 26 Sep 2021 23:16:48 +0900 Subject: [PATCH 083/253] fix: widget extended validation --- internal/usecase/interactor/scene.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 38d763fef..859f96ab5 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -308,8 +308,8 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP Section: plugin.WidgetSectionType(location.Section), Area: plugin.WidgetAreaType(location.Area), }) - if extendable && widget.Extended() { - widget.SetExtended(*param.Extended) + if e := widget.Extended(); !extendable && e { + widget.SetExtended(false) } } From d236bed8ee832a639c0602fffa8142495115646c Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 27 Sep 2021 02:27:53 +0900 Subject: [PATCH 084/253] fix: nil error in mongodoc plugin --- internal/infrastructure/mongo/mongodoc/plugin.go | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/internal/infrastructure/mongo/mongodoc/plugin.go b/internal/infrastructure/mongo/mongodoc/plugin.go index 50d833878..ce628b383 100644 --- a/internal/infrastructure/mongo/mongodoc/plugin.go +++ b/internal/infrastructure/mongo/mongodoc/plugin.go @@ -68,6 +68,10 @@ func (c *PluginConsumer) Consume(raw bson.Raw) error { } func NewPlugin(plugin *plugin.Plugin) (*PluginDocument, string) { + if plugin == nil { + return nil, "" + } + extensions := plugin.Extensions() extensionsDoc := make([]PluginExtensionDocument, 0, len(extensions)) for _, e := range extensions { @@ -97,6 +101,10 @@ func NewPlugin(plugin *plugin.Plugin) (*PluginDocument, string) { } func (d *PluginDocument) Model() (*plugin.Plugin, error) { + if d == nil { + return nil, nil + } + pid, err := id.PluginIDFrom(d.ID) if err != nil { return nil, err @@ -135,6 +143,10 @@ func (d *PluginDocument) Model() (*plugin.Plugin, error) { } func NewWidgetLayout(l *plugin.WidgetLayout) *WidgetLayoutDocument { + if l == nil { + return nil + } + return &WidgetLayoutDocument{ Extendable: &WidgetExtendableDocument{ Vertically: l.VerticallyExtendable(), From eb1db482d52e9499807cf4534d90e74d8a14e548 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 27 Sep 2021 04:02:01 +0900 Subject: [PATCH 085/253] fix: add widget to default location --- internal/usecase/interactor/scene.go | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 859f96ab5..77eb8b844 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -218,12 +218,22 @@ func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, s.WidgetSystem().Add(widget) - if !floating && location != nil { - s.WidgetAlignSystem().Area(scene.WidgetLocation{ - Zone: scene.WidgetZoneType(location.Zone), - Section: scene.WidgetSectionType(location.Section), - Area: scene.WidgetAreaType(location.Area), - }).Add(widget.ID(), -1) + if !floating { + var loc scene.WidgetLocation + if location != nil { + loc = scene.WidgetLocation{ + Zone: scene.WidgetZoneType(location.Zone), + Section: scene.WidgetSectionType(location.Section), + Area: scene.WidgetAreaType(location.Area), + } + } else { + loc = scene.WidgetLocation{ + Zone: scene.WidgetZoneInner, + Section: scene.WidgetSectionLeft, + Area: scene.WidgetAreaTop, + } + } + s.WidgetAlignSystem().Area(loc).Add(widget.ID(), -1) } err = i.propertyRepo.Save(ctx, property) From 92d1373db703863cf1e98e5064fd02a0fd3c2555 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 28 Sep 2021 18:19:57 +0900 Subject: [PATCH 086/253] fix: invalid extension data from GraphQL, plugin manifest schema improvement, more friendly error from manifest parser (#56) --- internal/adapter/gql/generated.go | 28 ++-- internal/adapter/gql/gqlmodel/convert.go | 8 ++ .../adapter/gql/gqlmodel/convert_plugin.go | 2 +- internal/adapter/gql/gqlmodel/models_gen.go | 2 +- .../infrastructure/mongo/mongodoc/plugin.go | 51 ++++--- internal/usecase/interactor/plugin_upload.go | 14 +- pkg/builtin/manifest.yml | 55 +++----- pkg/builtin/manifest_ja.yml | 42 +++--- pkg/plugin/extension_builder.go | 13 -- pkg/plugin/manifest/convert.go | 53 ++++--- pkg/plugin/manifest/convert_test.go | 130 +++++++++--------- pkg/plugin/manifest/parser_translation.go | 8 +- .../manifest/parser_translation_test.go | 4 +- pkg/plugin/manifest/schema_gen.go | 8 +- pkg/plugin/manifest/schema_translation.go | 4 +- pkg/plugin/manifest/testdata/test.yml | 2 +- pkg/plugin/manifest/testdata/translation.yml | 28 ++-- .../manifest/testdata/translation_merge.yml | 36 ++--- pkg/plugin/pluginpack/package.go | 9 +- pkg/plugin/pluginpack/testdata/test.zip | Bin 791 -> 789 bytes pkg/plugin/pluginpack/testdata/test/index.js | 1 + .../pluginpack/testdata/test/reearth.yml | 1 + .../pluginpack/testdata/test/test/foo.bar | 1 + pkg/rerror/error.go | 23 +++- pkg/rerror/error_test.go | 22 ++- plugin_manifest_schema.json | 16 ++- plugin_manifest_schema_translation.json | 4 +- schema.graphql | 2 +- 28 files changed, 307 insertions(+), 260 deletions(-) create mode 100644 pkg/plugin/pluginpack/testdata/test/index.js create mode 100644 pkg/plugin/pluginpack/testdata/test/reearth.yml create mode 100644 pkg/plugin/pluginpack/testdata/test/test/foo.bar diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index b806edc0a..39cddd042 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -5956,7 +5956,7 @@ type PluginExtension { description: String! icon: String! widgetLayout: WidgetLayout - visualizer: Visualizer! + visualizer: Visualizer propertySchemaId: PropertySchemaID! allTranslatedName: TranslatedString allTranslatedDescription: TranslatedString @@ -19238,14 +19238,11 @@ func (ec *executionContext) _PluginExtension_visualizer(ctx context.Context, fie return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } - res := resTmp.(gqlmodel.Visualizer) + res := resTmp.(*gqlmodel.Visualizer) fc.Result = res - return ec.marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx, field.Selections, res) + return ec.marshalOVisualizer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx, field.Selections, res) } func (ec *executionContext) _PluginExtension_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { @@ -34501,9 +34498,6 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select out.Values[i] = ec._PluginExtension_widgetLayout(ctx, field, obj) case "visualizer": out.Values[i] = ec._PluginExtension_visualizer(ctx, field, obj) - if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) - } case "propertySchemaId": out.Values[i] = ec._PluginExtension_propertySchemaId(ctx, field, obj) if out.Values[i] == graphql.Null { @@ -41565,6 +41559,22 @@ func (ec *executionContext) marshalOValueType2แš–githubแš—comแš‹reearthแš‹reeart return v } +func (ec *executionContext) unmarshalOVisualizer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx context.Context, v interface{}) (*gqlmodel.Visualizer, error) { + if v == nil { + return nil, nil + } + var res = new(gqlmodel.Visualizer) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOVisualizer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Visualizer) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + func (ec *executionContext) marshalOWidgetAlignSystem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAlignSystem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetAlignSystem) graphql.Marshaler { if v == nil { return graphql.Null diff --git a/internal/adapter/gql/gqlmodel/convert.go b/internal/adapter/gql/gqlmodel/convert.go index 874bf5be4..b5c7cfa51 100644 --- a/internal/adapter/gql/gqlmodel/convert.go +++ b/internal/adapter/gql/gqlmodel/convert.go @@ -51,6 +51,14 @@ func ToVisualizer(v visualizer.Visualizer) Visualizer { return Visualizer("") } +func ToVisualizerRef(v visualizer.Visualizer) *Visualizer { + if v == "" { + return nil + } + v2 := ToVisualizer(v) + return &v2 +} + func FromFile(f *graphql.Upload) *file.File { if f == nil { return nil diff --git a/internal/adapter/gql/gqlmodel/convert_plugin.go b/internal/adapter/gql/gqlmodel/convert_plugin.go index 3e8526cb6..1fcca0e43 100644 --- a/internal/adapter/gql/gqlmodel/convert_plugin.go +++ b/internal/adapter/gql/gqlmodel/convert_plugin.go @@ -17,7 +17,7 @@ func ToPlugin(p *plugin.Plugin) *Plugin { ExtensionID: pe.ID(), PluginID: pid, Type: ToPluginExtensionType(pe.Type()), - Visualizer: ToVisualizer(pe.Visualizer()), + Visualizer: ToVisualizerRef(pe.Visualizer()), Name: pe.Name().String(), Description: pe.Description().String(), Icon: pe.Icon(), diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index 3ea0fc5f2..ea2d825d0 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -592,7 +592,7 @@ type PluginExtension struct { Description string `json:"description"` Icon string `json:"icon"` WidgetLayout *WidgetLayout `json:"widgetLayout"` - Visualizer Visualizer `json:"visualizer"` + Visualizer *Visualizer `json:"visualizer"` PropertySchemaID id.PropertySchemaID `json:"propertySchemaId"` AllTranslatedName map[string]string `json:"allTranslatedName"` AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` diff --git a/internal/infrastructure/mongo/mongodoc/plugin.go b/internal/infrastructure/mongo/mongodoc/plugin.go index ce628b383..0ca12189f 100644 --- a/internal/infrastructure/mongo/mongodoc/plugin.go +++ b/internal/infrastructure/mongo/mongodoc/plugin.go @@ -24,7 +24,7 @@ type PluginExtensionDocument struct { Description map[string]string Icon string Schema string - Visualizer string + Visualizer string `bson:",omitempty"` WidgetLayout *WidgetLayoutDocument } @@ -119,8 +119,8 @@ func (d *PluginDocument) Model() (*plugin.Plugin, error) { extension, err := plugin.NewExtension(). ID(id.PluginExtensionID(e.ID)). Type(plugin.ExtensionType(e.Type)). - Name(d.Name). - Description(d.Description). + Name(e.Name). + Description(e.Description). Icon(e.Icon). WidgetLayout(e.WidgetLayout.Model()). Schema(psid). @@ -152,13 +152,9 @@ func NewWidgetLayout(l *plugin.WidgetLayout) *WidgetLayoutDocument { Vertically: l.VerticallyExtendable(), Horizontally: l.HorizontallyExtendable(), }, - Extended: l.Extended(), - Floating: l.Floating(), - DefaultLocation: &WidgetLocationDocument{ - Zone: string(l.DefaultLocation().Zone), - Section: string(l.DefaultLocation().Section), - Area: string(l.DefaultLocation().Area), - }, + Extended: l.Extended(), + Floating: l.Floating(), + DefaultLocation: NewWidgetLocation(l.DefaultLocation()), } } @@ -167,20 +163,35 @@ func (d *WidgetLayoutDocument) Model() *plugin.WidgetLayout { return nil } - var loc *plugin.WidgetLocation - if d.DefaultLocation != nil { - loc = &plugin.WidgetLocation{ - Zone: plugin.WidgetZoneType(d.DefaultLocation.Zone), - Section: plugin.WidgetSectionType(d.DefaultLocation.Section), - Area: plugin.WidgetAreaType(d.DefaultLocation.Area), - } - } - return plugin.NewWidgetLayout( d.Extendable.Horizontally, d.Extendable.Vertically, d.Extended, d.Floating, - loc, + d.DefaultLocation.Model(), ).Ref() } + +func NewWidgetLocation(l *plugin.WidgetLocation) *WidgetLocationDocument { + if l == nil { + return nil + } + + return &WidgetLocationDocument{ + Zone: string(l.Zone), + Section: string(l.Section), + Area: string(l.Area), + } +} + +func (d *WidgetLocationDocument) Model() *plugin.WidgetLocation { + if d == nil { + return nil + } + + return &plugin.WidgetLocation{ + Zone: plugin.WidgetZoneType(d.Zone), + Section: plugin.WidgetSectionType(d.Section), + Area: plugin.WidgetAreaType(d.Area), + } +} diff --git a/internal/usecase/interactor/plugin_upload.go b/internal/usecase/interactor/plugin_upload.go index 7560b3ab3..b934c751c 100644 --- a/internal/usecase/interactor/plugin_upload.go +++ b/internal/usecase/interactor/plugin_upload.go @@ -42,13 +42,17 @@ func (i *Plugin) Upload(ctx context.Context, r io.Reader, sid id.SceneID, operat p, err := pluginpack.PackageFromZip(r, &sid, pluginPackageSizeLimit) if err != nil { - return nil, nil, interfaces.ErrInvalidPluginPackage + return nil, nil, &rerror.Error{ + Label: interfaces.ErrInvalidPluginPackage, + Err: err, + Separate: true, + } } for { f, err := p.Files.Next() if err != nil { - return nil, nil, interfaces.ErrInvalidPluginPackage + return nil, nil, rerror.ErrInternalBy(err) } if f == nil { break @@ -60,15 +64,15 @@ func (i *Plugin) Upload(ctx context.Context, r io.Reader, sid id.SceneID, operat if ps := p.Manifest.PropertySchemas(); len(ps) > 0 { if err := i.propertySchemaRepo.SaveAll(ctx, ps); err != nil { - return nil, nil, err + return nil, nil, rerror.ErrInternalBy(err) } } if err := i.pluginRepo.Save(ctx, p.Manifest.Plugin); err != nil { - return nil, nil, err + return nil, nil, rerror.ErrInternalBy(err) } if err := i.installPlugin(ctx, p, s); err != nil { - return nil, nil, err + return nil, nil, rerror.ErrInternalBy(err) } tx.Commit() diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 1b5dd9868..6d2829c84 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -1,11 +1,11 @@ id: reearth system: true -title: Re:Earth Official Plugin +name: Re:Earth Official Plugin description: Official Plugin author: Re:Earth extensions: - id: cesium - title: Cesium + name: Cesium description: Select here to find scene settings in the right panel. This includes setting map tiles, atmospheric conditions, real lighting, and more. visualizer: cesium type: visualizer @@ -229,8 +229,7 @@ extensions: title: Tracking ID description: Paste your Google Analytics tracking ID here. This will be embedded in your published project. - id: infobox - title: Infobox - visualizer: cesium + name: Infobox type: infobox description: Create an information area that appears when a layer is highlighted. Text, pictures, video, etc can be added to an infobox. schema: @@ -260,7 +259,7 @@ extensions: - id: marker visualizer: cesium type: primitive - title: Marker + name: Marker description: A standard map marker. schema: groups: @@ -506,7 +505,7 @@ extensions: - id: polyline visualizer: cesium type: primitive - title: Polyline + name: Polyline description: Polyline primitive schema: groups: @@ -529,7 +528,7 @@ extensions: - id: polygon visualizer: cesium type: primitive - title: Polygon + name: Polygon description: Polygon primitive schema: groups: @@ -575,7 +574,7 @@ extensions: - id: rect visualizer: cesium type: primitive - title: Rectangle + name: Rectangle description: Rectangle primitive schema: groups: @@ -623,7 +622,7 @@ extensions: - id: photooverlay visualizer: cesium type: primitive - title: Photo overlay + name: Photo overlay description: An Icon marker that allows you to set a photo that will appear after reaching its location. schema: groups: @@ -749,7 +748,7 @@ extensions: # - id: rect # visualizer: cesium # type: primitive - # title: Rectangle + # name: Rectangle # description: A rectangle # schema: # groups: @@ -830,7 +829,7 @@ extensions: - id: ellipsoid visualizer: cesium type: primitive - title: Sphere + name: Sphere description: A 3D ellipsoid schema: groups: @@ -889,7 +888,7 @@ extensions: - id: model visualizer: cesium type: primitive - title: 3D Model + name: 3D Model description: A 3D model schema: groups: @@ -1031,7 +1030,7 @@ extensions: - id: tileset visualizer: cesium type: primitive - title: 3D Tiles + name: 3D Tiles description: 3D tiles in "3D Tiles" format schema: groups: @@ -1063,7 +1062,7 @@ extensions: - id: resource visualizer: cesium type: primitive - title: File + name: File description: Import your own primitives to be used instead of Re:Earth's built in ones. schema: groups: @@ -1088,9 +1087,8 @@ extensions: - key: czml label: CZML - id: textblock - visualizer: cesium type: block - title: Text + name: Text description: Text block schema: groups: @@ -1111,9 +1109,8 @@ extensions: type: typography title: Font - id: imageblock - visualizer: cesium type: block - title: Image + name: Image description: Image block schema: groups: @@ -1166,9 +1163,8 @@ extensions: schemaGroupId: default fieldId: image - id: videoblock - visualizer: cesium type: block - title: Video + name: Video description: Video block schema: groups: @@ -1186,9 +1182,8 @@ extensions: type: bool title: Full size - id: locationblock - visualizer: cesium type: block - title: Location + name: Location description: Location block schema: groups: @@ -1205,9 +1200,8 @@ extensions: type: bool title: Full size - id: dlblock - visualizer: cesium type: block - title: Table + name: Table description: Table block schema: groups: @@ -1251,14 +1245,12 @@ extensions: type: string value: number # - id: navigator - # visualizer: cesium # type: widget # title: Navigator # description: Navigator widget - id: menu - visualizer: cesium type: widget - title: Menu (legacy) + name: Menu (legacy) description: Menu widgets widgetLayout: floating: true @@ -1371,9 +1363,8 @@ extensions: type: string value: camera - id: button - visualizer: cesium type: widget - title: Button + name: Button description: Button widget widgetLayout: defaultLocation: @@ -1476,9 +1467,8 @@ extensions: type: string value: camera - id: splashscreen - visualizer: cesium type: widget - title: Splash screen + name: Splash screen description: A unique start screen that will display on load of your archive(ex. display the archive's title). widgetLayout: floating: true @@ -1539,9 +1529,8 @@ extensions: suffix: s min: 0 - id: storytelling - visualizer: cesium type: widget - title: Storytelling + name: Storytelling description: SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily. widgetLayout: extendable: diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 6a41c47ae..56239d0ae 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -1,8 +1,8 @@ -title: Re:Earthๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ +name: Re:Earthๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ description: ๅ…ฌๅผใƒ—ใƒฉใ‚ฐใ‚คใƒณ extensions: cesium: - title: Cesium + name: Cesium description: ๅณใƒ‘ใƒใƒซใงใ‚ทใƒผใƒณๅ…จไฝ“ใฎ่จญๅฎšใ‚’ๅค‰ๆ›ดใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ใ‚ฟใ‚คใƒซใ€ๅคงๆฐ—ใ€ใƒฉใ‚คใƒ†ใ‚ฃใƒณใ‚ฐใชใฉใฎ่จญๅฎšใŒๅซใพใ‚Œใฆใ„ใพใ™ใ€‚ propertySchema: default: @@ -120,7 +120,7 @@ extensions: title: ่ƒŒๆ™ฏ่‰ฒ description: ่ƒŒๆ™ฏ่‰ฒใ‚’่จญๅฎšใ—ใพใ™ใ€‚ infobox: - title: ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น + name: ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น description: ้–ฒ่ฆง่€…ใŒๅœฐๅ›ณไธŠใฎใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ—ใŸๆ™‚ใซ่กจ็คบใ•ใ‚Œใ‚‹ใƒœใƒƒใ‚ฏใ‚นใงใ™ใ€‚ใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ€ๅ‹•็”ปใชใฉใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ propertySchema: default: @@ -138,7 +138,7 @@ extensions: typography: title: ใƒ•ใ‚ฉใƒณใƒˆ marker: - title: ใƒžใƒผใ‚ซใƒผ + name: ใƒžใƒผใ‚ซใƒผ description: ใƒžใƒผใ‚ซใƒผใซใฏใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ‚’็ดใฅใ‘ใ‚‹ใ“ใจใŒใงใใ€้–ฒ่ฆง่€…ใฏใƒžใƒผใ‚ซใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ™ใ‚‹ใ“ใจใงใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่ฆ‹ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ propertySchema: default: @@ -224,7 +224,7 @@ extensions: extrude: title: ๅœฐ้ขใ‹ใ‚‰็ทšใ‚’ไผธใฐใ™ polyline: - title: ็›ด็ทš + name: ็›ด็ทš propertySchema: default: title: ็›ด็ทš @@ -236,7 +236,7 @@ extensions: strokeWidth: title: ็ทšๅน… polygon: - title: ใƒใƒชใ‚ดใƒณ + name: ใƒใƒชใ‚ดใƒณ propertySchema: default: title: ใƒใƒชใ‚ดใƒณ @@ -254,7 +254,7 @@ extensions: strokeWidth: title: ็ทšๅน… rect: - title: ้•ทๆ–นๅฝข + name: ้•ทๆ–นๅฝข propertySchema: default: title: ้•ทๆ–นๅฝข @@ -294,7 +294,7 @@ extensions: cast_only: ๅฝฑใ‚’่ฝใจใ™ใ ใ‘ receive_only: ๅฝฑใ‚’่ฝใจใ•ใ‚Œใ‚‹ใ ใ‘ photooverlay: - title: ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค + name: ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚ค description: ใƒ•ใ‚ฉใƒˆใ‚ชใƒผใƒใƒผใƒฌใ‚คใ‚’้ธๆŠžใ™ใ‚‹ใจใ€่จญๅฎšใ—ใŸ็”ปๅƒใ‚’ใƒขใƒผใƒ€ใƒซๅฝขๅผใง็”ป้ขไธŠใซ่ขซใ›ใฆ่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ propertySchema: default: @@ -351,7 +351,7 @@ extensions: photoOverlayDescription: title: ใ‚ชใƒผใƒใƒฌใ‚ค็”ปๅƒใฎ่ชฌๆ˜Ž ellipsoid: - title: ็ƒไฝ“ + name: ็ƒไฝ“ description: ็ซ‹ไฝ“็š„ใช็ƒไฝ“ใ‚’ๅœฐๅ›ณไธŠใซ่กจ็คบใงใใพใ™ใ€‚ propertySchema: default: @@ -381,7 +381,7 @@ extensions: cast_only: ๅฝฑใ‚’่ฝใจใ™ใ ใ‘ receive_only: ๅฝฑใ‚’่ฝใจใ•ใ‚Œใ‚‹ใ ใ‘ model: - title: 3Dใƒขใƒ‡ใƒซ + name: 3Dใƒขใƒ‡ใƒซ description: glTFๅฝขๅผใฎ3Dใƒขใƒ‡ใƒซใ‚’่ชญใฟ่พผใ‚“ใง่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ๅพŒใ€URLใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใง่ชญใฟ่พผใพใ‚Œใพใ™ใ€‚ propertySchema: default: @@ -448,7 +448,7 @@ extensions: silhouetteSize: title: ใ‚ทใƒซใ‚จใƒƒใƒˆใ‚ตใ‚คใ‚บ tileset: - title: 3Dใ‚ฟใ‚คใƒซ + name: 3Dใ‚ฟใ‚คใƒซ description: 3D Tilesๅฝขๅผใฎ3Dใ‚ฟใ‚คใƒซใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ propertySchema: default: @@ -469,7 +469,7 @@ extensions: cast_only: ๅฝฑใ‚’่ฝใจใ™ใ ใ‘ receive_only: ๅฝฑใ‚’่ฝใจใ•ใ‚Œใ‚‹ใ ใ‘ resource: - title: ใƒ•ใ‚กใ‚คใƒซ + name: ใƒ•ใ‚กใ‚คใƒซ description: ๅค–้ƒจใ‹ใ‚‰ใƒ‡ใƒผใ‚ฟใ‚’ใ‚คใƒณใƒใƒผใƒˆใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ๅœฐๅ›ณไธŠใซ่ฟฝๅŠ ๅพŒใ€URLใ‚’ๆŒ‡ๅฎšใ™ใ‚‹ใ“ใจใงๅค–้ƒจใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใพใ‚Œใพใ™ใ€‚ propertySchema: default: @@ -483,7 +483,7 @@ extensions: geojson: GeoJSON / TopoJSON czml: CZML textblock: - title: ใƒ†ใ‚ญใ‚นใƒˆ + name: ใƒ†ใ‚ญใ‚นใƒˆ description: Text block propertySchema: default: @@ -498,7 +498,7 @@ extensions: typography: title: ใƒ•ใ‚ฉใƒณใƒˆ imageblock: - title: ็”ปๅƒ + name: ็”ปๅƒ description: ็”ปๅƒใƒ–ใƒญใƒƒใ‚ฏ propertySchema: default: @@ -528,7 +528,7 @@ extensions: center: ไธญๅคฎ bottom: ไธ‹ videoblock: - title: ๅ‹•็”ป + name: ๅ‹•็”ป description: ๅ‹•็”ปใƒ–ใƒญใƒƒใ‚ฏ propertySchema: default: @@ -541,7 +541,7 @@ extensions: fullSize: title: ใƒ•ใƒซใ‚ตใ‚คใ‚บ locationblock: - title: ไฝ็ฝฎๆƒ…ๅ ฑ + name: ไฝ็ฝฎๆƒ…ๅ ฑ description: ไฝ็ฝฎๆƒ…ๅ ฑใƒ–ใƒญใƒƒใ‚ฏ propertySchema: default: @@ -554,7 +554,7 @@ extensions: fullSize: title: ใƒ•ใƒซใ‚ตใ‚คใ‚บ dlblock: - title: ่กจ + name: ่กจ description: ่กจใƒ–ใƒญใƒƒใ‚ฏ propertySchema: default: @@ -579,7 +579,7 @@ extensions: item_datanum: title: ใƒ‡ใƒผใ‚ฟ(ๆ•ฐๅญ—) menu: - title: ใƒกใƒ‹ใƒฅใƒผ (ๅปƒๆญขไบˆๅฎš) + name: ใƒกใƒ‹ใƒฅใƒผ (ๅปƒๆญขไบˆๅฎš) description: | ใ‚ทใƒผใƒณใซใƒœใ‚ฟใƒณใ‚’่จญ็ฝฎใ—ใ€ใƒกใƒ‹ใƒฅใƒผใ‚’่กจ็คบใ—ใพใ™ใ€‚่ฟฝๅŠ ใ—ใŸใƒœใ‚ฟใƒณใซ่จญๅฎšใ•ใ‚ŒใŸใ‚ขใ‚ฏใ‚ทใƒงใƒณใ‚ฟใ‚คใƒ—ใซใ‚ˆใฃใฆๅ‹•ไฝœใŒๅค‰ใ‚ใ‚Šใพใ™ใ€‚ ใƒปใƒชใƒณใ‚ฏ๏ผšใƒœใ‚ฟใƒณ่‡ชไฝ“ใŒๅค–้ƒจใ‚ตใ‚คใƒˆใธใฎใƒชใƒณใ‚ฏใซใชใ‚Šใพใ™ใ€‚ @@ -640,7 +640,7 @@ extensions: menuCamera: title: ใ‚ซใƒกใƒฉ button: - title: ใƒœใ‚ฟใƒณ + name: ใƒœใ‚ฟใƒณ description: | ใ‚ทใƒผใƒณใซใƒœใ‚ฟใƒณใ‚’่จญ็ฝฎใ—ใ€ใƒกใƒ‹ใƒฅใƒผใ‚’่กจ็คบใ—ใพใ™ใ€‚่ฟฝๅŠ ใ—ใŸใƒœใ‚ฟใƒณใซ่จญๅฎšใ•ใ‚ŒใŸใ‚ขใ‚ฏใ‚ทใƒงใƒณใ‚ฟใ‚คใƒ—ใซใ‚ˆใฃใฆๅ‹•ไฝœใŒๅค‰ใ‚ใ‚Šใพใ™ใ€‚ ใ€€ใƒปใƒชใƒณใ‚ฏ๏ผšใƒœใ‚ฟใƒณ่‡ชไฝ“ใŒๅค–้ƒจใ‚ตใ‚คใƒˆใธใฎใƒชใƒณใ‚ฏใซใชใ‚Šใพใ™ใ€‚ @@ -692,7 +692,7 @@ extensions: menuCamera: title: ใ‚ซใƒกใƒฉ splashscreen: - title: ใ‚นใƒ—ใƒฉใƒƒใ‚ทใƒฅใ‚นใ‚ฏใƒชใƒผใƒณ + name: ใ‚นใƒ—ใƒฉใƒƒใ‚ทใƒฅใ‚นใ‚ฏใƒชใƒผใƒณ description: ใƒšใƒผใ‚ธใƒญใƒผใƒ‰ๅพŒใ€ๆœ€ๅˆใซ่กจ็คบใ•ใ‚Œใ‚‹ๆผ”ๅ‡บใ‚’่จญๅฎšใงใใพใ™ใ€‚ไพ‹ใˆใฐใ€ใƒ—ใƒญใ‚ธใ‚งใ‚ฏใƒˆใฎใ‚ฟใ‚คใƒˆใƒซใ‚’้–ฒ่ฆง่€…ใซ่ฆ‹ใ›ใŸใ‚Šใ€ใ‚ซใƒกใƒฉใ‚’็งปๅ‹•ใ•ใ›ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ propertySchema: overlay: @@ -724,7 +724,7 @@ extensions: cameraDelay: title: ใ‚ซใƒกใƒฉๅพ…ๆฉŸๆ™‚้–“ storytelling: - title: ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐ + name: ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐ description: ใ‚นใƒˆใƒผใƒชใƒผใƒ†ใƒชใƒณใ‚ฐๆฉŸ่ƒฝใ‚’ไฝฟใˆใฐใ€ใƒ‡ใƒผใ‚ฟ้–“ใฎ็น‹ใŒใ‚Šใ‚„ๆ™‚็ณปๅˆ—ใ‚’ใ‚‚ใจใซใ€้ †็•ชใซ่ณ‡ๆ–™ใ‚’้–ฒ่ฆงใ—ใฆใ‚‚ใ‚‰ใ†ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚ไฝฟ็”จใ™ใ‚‹ใซใฏใ€ๅณใƒ‘ใƒใƒซใ‹ใ‚‰ๅœฐ็ƒไธŠใฎใƒฌใ‚คใƒคใƒผใซ้ †็•ชใ‚’ไป˜ไธŽใ—ใพใ™ใ€‚ propertySchema: default: diff --git a/pkg/plugin/extension_builder.go b/pkg/plugin/extension_builder.go index 524430974..d7b6ba460 100644 --- a/pkg/plugin/extension_builder.go +++ b/pkg/plugin/extension_builder.go @@ -8,18 +8,15 @@ import ( "github.com/reearth/reearth-backend/pkg/visualizer" ) -// ExtensionBuilder _ type ExtensionBuilder struct { p *Extension s bool } -// NewExtension _ func NewExtension() *ExtensionBuilder { return &ExtensionBuilder{p: &Extension{}} } -// Build _ func (b *ExtensionBuilder) Build() (*Extension, error) { if string(b.p.id) == "" { return nil, id.ErrInvalidID @@ -32,7 +29,6 @@ func (b *ExtensionBuilder) Build() (*Extension, error) { return b.p, nil } -// MustBuild _ func (b *ExtensionBuilder) MustBuild() *Extension { p, err := b.Build() if err != nil { @@ -41,55 +37,46 @@ func (b *ExtensionBuilder) MustBuild() *Extension { return p } -// ID _ func (b *ExtensionBuilder) ID(id id.PluginExtensionID) *ExtensionBuilder { b.p.id = id return b } -// Name _ func (b *ExtensionBuilder) Name(name i18n.String) *ExtensionBuilder { b.p.name = name.Copy() return b } -// Type _ func (b *ExtensionBuilder) Type(extensionType ExtensionType) *ExtensionBuilder { b.p.extensionType = extensionType return b } -// Description _ func (b *ExtensionBuilder) Description(description i18n.String) *ExtensionBuilder { b.p.description = description.Copy() return b } -// Icon _ func (b *ExtensionBuilder) Icon(icon string) *ExtensionBuilder { b.p.icon = icon return b } -// Schema _ func (b *ExtensionBuilder) Schema(schema id.PropertySchemaID) *ExtensionBuilder { b.p.schema = schema return b } -// Visualizer _ func (b *ExtensionBuilder) Visualizer(visualizer visualizer.Visualizer) *ExtensionBuilder { b.p.visualizer = visualizer return b } -// WidgetLayout _ func (b *ExtensionBuilder) WidgetLayout(widgetLayout *WidgetLayout) *ExtensionBuilder { b.p.widgetLayout = widgetLayout return b } -// System _ func (b *ExtensionBuilder) System(s bool) *ExtensionBuilder { b.s = s return b diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go index 6379d377a..306e50657 100644 --- a/pkg/plugin/manifest/convert.go +++ b/pkg/plugin/manifest/convert.go @@ -1,15 +1,19 @@ package manifest import ( + "errors" "fmt" "github.com/reearth/reearth-backend/pkg/i18n" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/visualizer" ) +var errInvalidManifestWith = rerror.With(ErrInvalidManifest) + func (i *Root) manifest(sid *id.SceneID) (*Manifest, error) { var pid id.PluginID var err error @@ -18,7 +22,7 @@ func (i *Root) manifest(sid *id.SceneID) (*Manifest, error) { } else { pid, err = id.NewPluginID(string(i.ID), i.Version, sid) if err != nil { - return nil, ErrInvalidManifest + return nil, errInvalidManifestWith(fmt.Errorf("invalid plugin id: %s %s %s", i.ID, i.Version, sid)) } } @@ -26,7 +30,7 @@ func (i *Root) manifest(sid *id.SceneID) (*Manifest, error) { if i.Schema != nil { schema, err := i.Schema.schema(pid, "@") if err != nil { - return nil, err + return nil, errInvalidManifestWith(rerror.From("plugin property schema", err)) } pluginSchema = schema } @@ -41,7 +45,7 @@ func (i *Root) manifest(sid *id.SceneID) (*Manifest, error) { for _, e := range i.Extensions { extension, extensionSchema, err2 := e.extension(pid, i.System) if err2 != nil { - return nil, err2 + return nil, errInvalidManifestWith(rerror.From(fmt.Sprintf("ext (%s)", e.ID), err2)) } extensions = append(extensions, extension) extensionSchemas = append(extensionSchemas, extensionSchema) @@ -60,7 +64,7 @@ func (i *Root) manifest(sid *id.SceneID) (*Manifest, error) { p, err := plugin.New(). ID(pid). - Name(i18n.StringFrom(i.Title)). + Name(i18n.StringFrom(i.Name)). Author(author). Description(i18n.StringFrom(desc)). RepositoryURL(repository). @@ -68,7 +72,7 @@ func (i *Root) manifest(sid *id.SceneID) (*Manifest, error) { Extensions(extensions). Build() if err != nil { - return nil, err + return nil, errInvalidManifestWith(rerror.From("build", err)) } return &Manifest{ @@ -82,15 +86,21 @@ func (i Extension) extension(pluginID id.PluginID, sys bool) (*plugin.Extension, eid := string(i.ID) schema, err := i.Schema.schema(pluginID, eid) if err != nil { - return nil, nil, err + return nil, nil, rerror.From("property schema", err) } var viz visualizer.Visualizer - switch i.Visualizer { - case "cesium": - viz = visualizer.VisualizerCesium - default: - return nil, nil, ErrInvalidManifest + if i.Visualizer != nil { + switch *i.Visualizer { + case "cesium": + viz = visualizer.VisualizerCesium + case "": + return nil, nil, errors.New("visualizer missing") + default: + return nil, nil, fmt.Errorf("invalid visualizer: %s", *i.Visualizer) + } + } else if i.Type == "visualizer" { + return nil, nil, errors.New("visualizer missing") } var typ plugin.ExtensionType @@ -105,8 +115,10 @@ func (i Extension) extension(pluginID id.PluginID, sys bool) (*plugin.Extension, typ = plugin.ExtensionTypeVisualizer case "infobox": typ = plugin.ExtensionTypeInfobox + case "": + return nil, nil, errors.New("type missing") default: - return nil, nil, ErrInvalidManifest + return nil, nil, fmt.Errorf("invalid type: %s", i.Type) } var desc, icon string @@ -119,7 +131,7 @@ func (i Extension) extension(pluginID id.PluginID, sys bool) (*plugin.Extension, ext, err := plugin.NewExtension(). ID(id.PluginExtensionID(eid)). - Name(i18n.StringFrom(i.Title)). + Name(i18n.StringFrom(i.Name)). Description(i18n.StringFrom(desc)). Visualizer(viz). Type(typ). @@ -130,7 +142,7 @@ func (i Extension) extension(pluginID id.PluginID, sys bool) (*plugin.Extension, Build() if err != nil { - return nil, nil, err + return nil, nil, rerror.From("build", err) } return ext, schema, nil } @@ -169,7 +181,7 @@ func (l *WidgetLayout) layout() *plugin.WidgetLayout { func (i *PropertySchema) schema(pluginID id.PluginID, idstr string) (*property.Schema, error) { psid, err := id.PropertySchemaIDFrom(pluginID.String() + "/" + idstr) if err != nil { - return nil, err + return nil, fmt.Errorf("invalid id: %s", pluginID.String()+"/"+idstr) } if i == nil { @@ -183,7 +195,7 @@ func (i *PropertySchema) schema(pluginID id.PluginID, idstr string) (*property.S for _, d := range i.Groups { item, err := d.schemaGroup(psid) if err != nil { - return nil, err + return nil, rerror.From(fmt.Sprintf("item (%s)", d.ID), err) } items = append(items, item) } @@ -196,7 +208,7 @@ func (i *PropertySchema) schema(pluginID id.PluginID, idstr string) (*property.S LinkableFields(i.Linkable.linkable()). Build() if err != nil { - return nil, err + return nil, rerror.From("build", err) } return schema, nil } @@ -234,7 +246,7 @@ func (i PropertySchemaGroup) schemaGroup(sid id.PropertySchemaID) (*property.Sch for _, d := range i.Fields { field, err := d.schemaField() if err != nil { - return nil, err + return nil, rerror.From(fmt.Sprintf("field (%s)", d.ID), err) } fields = append(fields, field) } @@ -263,7 +275,7 @@ func (o *PropertyCondition) condition() *property.Condition { func (i PropertySchemaField) schemaField() (*property.SchemaField, error) { t, ok := property.ValueTypeFrom(string(i.Type)) if !ok { - return nil, fmt.Errorf("schema field: invalid value type") + return nil, fmt.Errorf("invalid value type: %s", i.Type) } var title, desc, prefix, suffix string @@ -305,6 +317,9 @@ func (i PropertySchemaField) schemaField() (*property.SchemaField, error) { UIRef(property.SchemaFieldUIFromRef(i.UI)). IsAvailableIf(i.AvailableIf.condition()). Build() + if err != nil { + return nil, rerror.From("build", err) + } return f, err } diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index 04772af89..a10471c53 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -1,14 +1,13 @@ package manifest import ( - "errors" - "fmt" "testing" "github.com/reearth/reearth-backend/pkg/i18n" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/stretchr/testify/assert" ) @@ -50,6 +49,8 @@ func TestChoice(t *testing.T) { } func TestManifest(t *testing.T) { + es := "" + cesium := "cesium" a := "aaa" d := "ddd" r := "rrr" @@ -57,22 +58,22 @@ func TestManifest(t *testing.T) { name string root *Root expected *Manifest - err error + err string }{ { name: "success official plugin", root: &Root{ Author: &a, - Title: "aaa", + Name: "aaa", ID: "reearth", Description: &d, Extensions: []Extension{{ Description: nil, ID: "cesium", - Title: "", + Name: "", Schema: nil, Type: "visualizer", - Visualizer: "cesium", + Visualizer: &cesium, }}, Repository: &r, System: true, @@ -83,12 +84,11 @@ func TestManifest(t *testing.T) { ExtensionSchema: nil, Schema: nil, }, - err: nil, }, { name: "success empty name", root: &Root{ - Title: "reearth", + Name: "reearth", ID: "reearth", System: true, }, @@ -97,22 +97,21 @@ func TestManifest(t *testing.T) { ExtensionSchema: nil, Schema: nil, }, - err: nil, }, { name: "fail invalid manifest - extension", root: &Root{ Author: &a, - Title: "aaa", + Name: "aaa", ID: "reearth", Description: &d, Extensions: []Extension{{ Description: nil, ID: "cesium", - Title: "", + Name: "", Schema: nil, Type: "visualizer", - Visualizer: "", + Visualizer: &es, }}, Repository: &r, System: true, @@ -123,19 +122,19 @@ func TestManifest(t *testing.T) { ExtensionSchema: nil, Schema: nil, }, - err: ErrInvalidManifest, + err: "invalid manifest: ext (cesium): visualizer missing", }, { name: "fail invalid manifest - id", root: &Root{ - Title: "", + Name: "", ID: "", System: false, }, expected: &Manifest{ Plugin: plugin.New().ID(id.OfficialPluginID).Name(i18n.StringFrom("reearth")).MustBuild(), }, - err: ErrInvalidManifest, + err: "invalid manifest: invalid plugin id: ", }, } for _, tc := range testCases { @@ -143,19 +142,21 @@ func TestManifest(t *testing.T) { t.Run(tc.name, func(tt *testing.T) { tt.Parallel() m, err := tc.root.manifest(nil) - if err == nil { + if tc.err == "" { assert.Equal(tt, tc.expected.Plugin.ID(), m.Plugin.ID()) assert.Equal(tt, tc.expected.Plugin.Name(), m.Plugin.Name()) assert.Equal(tt, len(tc.expected.Plugin.Extensions()), len(m.Plugin.Extensions())) //assert.Equal(tt,tc.expected.Schema..) } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(tt, tc.err, err.Error()) } }) } } func TestExtension(t *testing.T) { + es := "" + cesium := "cesium" d := "ddd" i := "xx:/aa.bb" testCases := []struct { @@ -165,144 +166,138 @@ func TestExtension(t *testing.T) { pid id.PluginID expectedPE *plugin.Extension expectedPS *property.Schema - err error + err string }{ { - name: "success official extension", + name: "visualizer", ext: Extension{ Description: &d, ID: "cesium", - Title: "Cesium", + Name: "Cesium", Icon: &i, Schema: nil, Type: "visualizer", - Visualizer: "cesium", + Visualizer: &cesium, }, sys: true, pid: id.OfficialPluginID, expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeVisualizer).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), - err: nil, }, { - name: "success official extension", + name: "primitive", ext: Extension{ Description: &d, ID: "cesium", - Title: "Cesium", + Name: "Cesium", Schema: nil, Type: "primitive", - Visualizer: "cesium", + Visualizer: &cesium, }, sys: true, pid: id.OfficialPluginID, expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypePrimitive).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), - err: nil, }, { - name: "success official extension", + name: "widget", ext: Extension{ Description: &d, ID: "cesium", - Title: "Cesium", + Name: "Cesium", Schema: nil, Type: "widget", - Visualizer: "cesium", }, sys: true, pid: id.OfficialPluginID, - expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeWidget).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), + expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("").Type(plugin.ExtensionTypeWidget).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), - err: nil, }, { - name: "success official extension", + name: "block", ext: Extension{ Description: &d, ID: "cesium", - Title: "Cesium", + Name: "Cesium", Schema: nil, Type: "block", - Visualizer: "cesium", }, sys: true, pid: id.OfficialPluginID, - expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeBlock).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), + expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("").Type(plugin.ExtensionTypeBlock).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), - err: nil, }, { - name: "success official extension", + name: "infobox", ext: Extension{ Description: &d, ID: "cesium", - Title: "Cesium", + Name: "Cesium", Schema: nil, Type: "infobox", - Visualizer: "cesium", + Visualizer: &cesium, }, sys: true, pid: id.OfficialPluginID, expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeInfobox).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), - err: nil, }, { - name: "success official extension", + name: "empty visualizer", ext: Extension{ Description: &d, ID: "cesium", - Title: "Cesium", + Name: "Cesium", Schema: nil, Type: "visualizer", - Visualizer: "", + Visualizer: &es, }, sys: true, pid: id.OfficialPluginID, expectedPE: nil, expectedPS: nil, - err: ErrInvalidManifest, + err: "visualizer missing", }, { - name: "success official extension", + name: "nil visualizer", ext: Extension{ Description: &d, ID: "cesium", - Title: "Cesium", + Name: "Cesium", Schema: nil, - Type: "", - Visualizer: "cesium", + Type: "visualizer", + Visualizer: nil, }, sys: true, pid: id.OfficialPluginID, expectedPE: nil, expectedPS: nil, - err: ErrInvalidManifest, + err: "visualizer missing", }, { - name: "success official extension", + name: "empty type", ext: Extension{ Description: &d, ID: "cesium", - Title: "Cesium", + Name: "Cesium", Schema: nil, - Type: "visualizer", - Visualizer: "cesium", + Type: "", + Visualizer: &cesium, }, - sys: false, + sys: true, pid: id.OfficialPluginID, expectedPE: nil, expectedPS: nil, - err: ErrInvalidManifest, + err: "type missing", }, } + for _, tc := range testCases { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() pe, ps, err := tc.ext.extension(tc.pid, tc.sys) - if err == nil { + if tc.err == "" { assert.Equal(tt, tc.expectedPE.ID(), pe.ID()) assert.Equal(tt, tc.expectedPE.Visualizer(), pe.Visualizer()) assert.Equal(tt, tc.expectedPE.Type(), pe.Type()) @@ -310,7 +305,7 @@ func TestExtension(t *testing.T) { assert.Equal(tt, tc.expectedPS.ID(), ps.ID()) assert.Equal(tt, tc.expectedPS.ID(), ps.ID()) } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(tt, tc.err, err.Error()) } }) } @@ -436,7 +431,7 @@ func TestSchema(t *testing.T) { ps *PropertySchema pid id.PluginID expected *property.Schema - err error + err string }{ { name: "fail invalid id", @@ -448,7 +443,7 @@ func TestSchema(t *testing.T) { }, pid: id.MustPluginID("aaa~1.1.1"), expected: nil, - err: id.ErrInvalidID, + err: "invalid id: aaa~1.1.1/@", }, { name: "success nil PropertySchema", @@ -494,7 +489,7 @@ func TestSchema(t *testing.T) { t.Run(tc.name, func(tt *testing.T) { tt.Parallel() res, err := tc.ps.schema(tc.pid, tc.psid) - if err == nil { + if tc.err == "" { assert.Equal(tt, len(tc.expected.Groups()), len(res.Groups())) assert.Equal(tt, tc.expected.LinkableFields(), res.LinkableFields()) assert.Equal(tt, tc.expected.Version(), res.Version()) @@ -503,7 +498,7 @@ func TestSchema(t *testing.T) { assert.NotNil(tt, exg) } } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(tt, tc.err, err.Error()) } }) } @@ -517,7 +512,7 @@ func TestSchemaGroup(t *testing.T) { psg PropertySchemaGroup sid id.PropertySchemaID expected *property.SchemaGroup - err error + err string }{ { name: "success reearth/cesium", @@ -544,7 +539,6 @@ func TestSchemaGroup(t *testing.T) { }, sid: id.MustPropertySchemaID("reearth/cesium"), expected: property.NewSchemaGroup().ID("default").Title(i18n.StringFrom("marker")).Title(i18n.StringFrom(str)).Schema(id.MustPropertySchemaID("reearth/cesium")).Fields([]*property.SchemaField{property.NewSchemaField().ID("location").Type(property.ValueTypeLatLng).MustBuild()}).MustBuild(), - err: nil, }, { name: "fail invalid schema field", @@ -571,7 +565,7 @@ func TestSchemaGroup(t *testing.T) { }, sid: id.MustPropertySchemaID("reearth/cesium"), expected: nil, - err: fmt.Errorf("schema field: invalid value type"), + err: "field (location): invalid value type: xx", }, } for _, tc := range testCases { @@ -579,7 +573,7 @@ func TestSchemaGroup(t *testing.T) { t.Run(tc.name, func(tt *testing.T) { tt.Parallel() res, err := tc.psg.schemaGroup(tc.sid) - if err == nil { + if tc.err == "" { assert.Equal(tt, tc.expected.Title().String(), res.Title().String()) assert.Equal(tt, tc.expected.Title(), res.Title()) assert.Equal(tt, tc.expected.Schema(), res.Schema()) @@ -589,7 +583,7 @@ func TestSchemaGroup(t *testing.T) { assert.NotNil(tt, tc.expected.Field(exf.ID())) } } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(tt, tc.err, err.Error()) } }) } @@ -741,14 +735,14 @@ func TestSchemaField(t *testing.T) { t.Run(tc.name, func(tt *testing.T) { tt.Parallel() res, err := tc.psg.schemaField() - if err == nil { + if tc.err == nil { assert.Equal(tt, tc.expected.Title(), res.Title()) assert.Equal(tt, tc.expected.Description(), res.Description()) assert.Equal(tt, tc.expected.Suffix(), res.Suffix()) assert.Equal(tt, tc.expected.Prefix(), res.Prefix()) assert.Equal(tt, tc.expected.Choices(), res.Choices()) } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(tt, tc.err, rerror.Get(err).Err) } }) } diff --git a/pkg/plugin/manifest/parser_translation.go b/pkg/plugin/manifest/parser_translation.go index 59b77e62f..57a644a35 100644 --- a/pkg/plugin/manifest/parser_translation.go +++ b/pkg/plugin/manifest/parser_translation.go @@ -50,9 +50,9 @@ func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Mani continue } - if t.Title != nil { + if t.Name != nil { name := m.Plugin.Name() - name[lang] = *t.Title + name[lang] = *t.Name m.Plugin.Rename(name) } @@ -68,9 +68,9 @@ func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Mani continue } - if te.Title != nil { + if te.Name != nil { name := ext.Name() - name[lang] = *te.Title + name[lang] = *te.Name ext.Rename(name) } diff --git a/pkg/plugin/manifest/parser_translation_test.go b/pkg/plugin/manifest/parser_translation_test.go index b1eb966d7..1dd0b156f 100644 --- a/pkg/plugin/manifest/parser_translation_test.go +++ b/pkg/plugin/manifest/parser_translation_test.go @@ -16,7 +16,7 @@ var expected = &TranslationRoot{ Description: sr("test plugin desc"), Extensions: map[string]TranslationExtension{ "test_ext": { - Title: sr("test ext name"), + Name: sr("test ext name"), PropertySchema: TranslationPropertySchema{ "test_ps": TranslationPropertySchemaGroup{ Description: sr("test ps desc"), @@ -33,7 +33,7 @@ var expected = &TranslationRoot{ }, }, }, - Title: sr("test plugin name"), + Name: sr("test plugin name"), Schema: nil, } diff --git a/pkg/plugin/manifest/schema_gen.go b/pkg/plugin/manifest/schema_gen.go index 8ec66eb70..0258fee9a 100644 --- a/pkg/plugin/manifest/schema_gen.go +++ b/pkg/plugin/manifest/schema_gen.go @@ -1,6 +1,6 @@ package manifest -// generated by "/var/folders/_n/99kwktfn5ml3fmw3fbn575hh0000gn/T/go-build181274042/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT +// generated by "/var/folders/3v/hjc_nxpn6f70hr_7l2l6mrfh0000gn/T/go-build3885740526/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT type Choice struct { Icon string `json:"icon,omitempty"` @@ -17,10 +17,10 @@ type Extension struct { Description *string `json:"description,omitempty"` ID ID `json:"id"` Icon *string `json:"icon,omitempty"` + Name string `json:"name"` Schema *PropertySchema `json:"schema,omitempty"` - Title string `json:"title"` Type string `json:"type"` - Visualizer string `json:"visualizer"` + Visualizer *string `json:"visualizer,omitempty"` WidgetLayout *WidgetLayout `json:"widgetLayout,omitempty"` } @@ -90,7 +90,7 @@ type Root struct { Repository *string `json:"repository,omitempty"` Schema *PropertySchema `json:"schema,omitempty"` System bool `json:"system,omitempty"` - Title string `json:"title"` + Name string `json:"name"` Version string `json:"version,omitempty"` } diff --git a/pkg/plugin/manifest/schema_translation.go b/pkg/plugin/manifest/schema_translation.go index c8604bc89..7d512c19f 100644 --- a/pkg/plugin/manifest/schema_translation.go +++ b/pkg/plugin/manifest/schema_translation.go @@ -2,7 +2,7 @@ package manifest type TranslationExtension struct { Description *string `json:"description,omitempty"` - Title *string `json:"title,omitempty"` + Name *string `json:"name,omitempty"` PropertySchema TranslationPropertySchema `json:"propertySchema,omitempty"` } @@ -23,6 +23,6 @@ type TranslationPropertySchemaGroup struct { type TranslationRoot struct { Description *string `json:"description,omitempty"` Extensions map[string]TranslationExtension `json:"extensions,omitempty"` - Title *string `json:"title,omitempty"` + Name *string `json:"name,omitempty"` Schema TranslationPropertySchema `json:"schema,omitempty"` } diff --git a/pkg/plugin/manifest/testdata/test.yml b/pkg/plugin/manifest/testdata/test.yml index 34b4668c3..4b918b0ed 100644 --- a/pkg/plugin/manifest/testdata/test.yml +++ b/pkg/plugin/manifest/testdata/test.yml @@ -1,5 +1,5 @@ id: aaa -title: bbb +name: bbb version: 1.1.1 extensions: - id: hoge diff --git a/pkg/plugin/manifest/testdata/translation.yml b/pkg/plugin/manifest/testdata/translation.yml index af00d8155..553a4a067 100644 --- a/pkg/plugin/manifest/testdata/translation.yml +++ b/pkg/plugin/manifest/testdata/translation.yml @@ -1,24 +1,24 @@ { - 'description': 'test plugin desc', - 'title': 'test plugin name', - 'extensions': + "description": "test plugin desc", + "name": "test plugin name", + "extensions": { - 'test_ext': + "test_ext": { - 'title': 'test ext name', - 'propertySchema': + "name": "test ext name", + "propertySchema": { - 'test_ps': + "test_ps": { - 'description': 'test ps desc', - 'title': 'test ps title', - 'fields': + "description": "test ps desc", + "title": "test ps title", + "fields": { - 'test_field': + "test_field": { - 'title': 'test field name', - 'description': 'test field desc', - 'choices': { 'test_key': 'test choice value' }, + "title": "test field name", + "description": "test field desc", + "choices": { "test_key": "test choice value" }, }, }, }, diff --git a/pkg/plugin/manifest/testdata/translation_merge.yml b/pkg/plugin/manifest/testdata/translation_merge.yml index 0ac599781..6f23f4a08 100644 --- a/pkg/plugin/manifest/testdata/translation_merge.yml +++ b/pkg/plugin/manifest/testdata/translation_merge.yml @@ -1,29 +1,29 @@ { - 'id': 'xxx', - 'title': 'aaa', - 'version': '1.1.1', - 'description': 'ddd', - 'extensions': + "id": "xxx", + "name": "aaa", + "version": "1.1.1", + "description": "ddd", + "extensions": [ { - 'id': 'test_ext', - 'title': 'ttt', - 'visualizer': 'cesium', - 'type': 'primitive', - 'schema': + "id": "test_ext", + "name": "ttt", + "visualizer": "cesium", + "type": "primitive", + "schema": { - 'groups': + "groups": [ { - 'id': 'test_ps', - 'title': 'sss', - 'fields': + "id": "test_ps", + "title": "sss", + "fields": [ { - 'id': 'test_field', - 'title': 'nnn', - 'type': 'string', - 'description': 'kkk', + "id": "test_field", + "title": "nnn", + "type": "string", + "description": "kkk", }, ], }, diff --git a/pkg/plugin/pluginpack/package.go b/pkg/plugin/pluginpack/package.go index 2d5c385e3..18f66245e 100644 --- a/pkg/plugin/pluginpack/package.go +++ b/pkg/plugin/pluginpack/package.go @@ -10,6 +10,7 @@ import ( "github.com/reearth/reearth-backend/pkg/file" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin/manifest" + "github.com/reearth/reearth-backend/pkg/rerror" ) const manfiestFilePath = "reearth.yml" @@ -22,18 +23,18 @@ type Package struct { func PackageFromZip(r io.Reader, scene *id.SceneID, sizeLimit int64) (*Package, error) { b, err := io.ReadAll(io.LimitReader(r, sizeLimit)) if err != nil { - return nil, err + return nil, rerror.From("zip read error", err) } zr, err := zip.NewReader(bytes.NewReader(b), int64(len(b))) if err != nil { - return nil, err + return nil, rerror.From("zip open error", err) } basePath := file.ZipBasePath(zr) f, err := zr.Open(path.Join(basePath, manfiestFilePath)) if err != nil { - return nil, err + return nil, rerror.From("manifest open error", err) } defer func() { _ = f.Close() @@ -41,7 +42,7 @@ func PackageFromZip(r io.Reader, scene *id.SceneID, sizeLimit int64) (*Package, m, err := manifest.Parse(f, scene) if err != nil { - return nil, err + return nil, rerror.From("invalid manifest", err) } return &Package{ diff --git a/pkg/plugin/pluginpack/testdata/test.zip b/pkg/plugin/pluginpack/testdata/test.zip index cf317e25a09c1842ec67cd377cc6a19641c8d7dd..b13b0aba34f9750cd98e656a318482707130cddb 100644 GIT binary patch delta 322 zcmbQvHkEDS5`A{2Jwb_#dx9q3^58vDXA{f-1&)*T8O38Ifs!y>TEWf0$nuqufq_K? zsAzRqz$u@=O+f)#p1OLPCp3M}p9#Jc7@*~?t99d(8K>?%(43;#O0O_LC)Wo8a486+S9Iy#6Pl0G;2TWeX nsLS|&@&!gwW}w@EhFdW^F)~a{5QDjC@<%2Gw#C3eU|;|Myf<4o delta 342 zcmbQrHl1zae*@-tS+2=OjLz%{vRsKEb`7KEWEVz}iDHs4p^0W5^wp2_s{n_)zB|KkwzVIk6Qk|)4xiaI_&a9mb0p9E!TP1DV zO@R6tJtq4z=}LkeP?B0)q7Sw&D262wgh8%;$tW{GN xYRt?_Nv+V!Du(NY`536z7UVyeWibDNYy*KMjS@hA;PLI`_e=_GOMz+_7yyfOUTXjV diff --git a/pkg/plugin/pluginpack/testdata/test/index.js b/pkg/plugin/pluginpack/testdata/test/index.js new file mode 100644 index 000000000..6b2b3db0f --- /dev/null +++ b/pkg/plugin/pluginpack/testdata/test/index.js @@ -0,0 +1 @@ +console.log("hello world"); diff --git a/pkg/plugin/pluginpack/testdata/test/reearth.yml b/pkg/plugin/pluginpack/testdata/test/reearth.yml new file mode 100644 index 000000000..65acfab82 --- /dev/null +++ b/pkg/plugin/pluginpack/testdata/test/reearth.yml @@ -0,0 +1 @@ +{ "id": "testplugin", "version": "1.0.1", "name": "testplugin" } diff --git a/pkg/plugin/pluginpack/testdata/test/test/foo.bar b/pkg/plugin/pluginpack/testdata/test/test/foo.bar new file mode 100644 index 000000000..9daeafb98 --- /dev/null +++ b/pkg/plugin/pluginpack/testdata/test/test/foo.bar @@ -0,0 +1 @@ +test diff --git a/pkg/rerror/error.go b/pkg/rerror/error.go index 892be4bec..b38526e09 100644 --- a/pkg/rerror/error.go +++ b/pkg/rerror/error.go @@ -45,9 +45,10 @@ func (e *ErrInternal) Unwrap() error { // This is useful for displaying a hierarchical error message cleanly and searching by label later to retrieve a wrapped error. // Currently, Go standard error library does not support these use cases. That's why we need our own error type. type Error struct { - Label error - Err error - Hidden bool + Label error + Err error + Hidden bool + Separate bool } // From creates an Error with string label. @@ -55,6 +56,11 @@ func From(label string, err error) *Error { return &Error{Label: errors.New(label), Err: err} } +// From creates an Error with string label, but separated from wrapped error message when the error is printed. +func FromSep(label string, err error) *Error { + return &Error{Label: errors.New(label), Err: err, Separate: true} +} + // Error implements error interface. func (e *Error) Error() string { if e == nil { @@ -63,8 +69,10 @@ func (e *Error) Error() string { if e.Hidden { return e.Label.Error() } - if e2, ok := e.Err.(*Error); ok { - return fmt.Sprintf("%s.%s", e.Label, e2) + if !e.Separate { + if e2, ok := e.Err.(*Error); ok { + return fmt.Sprintf("%s.%s", e.Label, e2) + } } return fmt.Sprintf("%s: %s", e.Label, e.Err) } @@ -126,8 +134,9 @@ func As(err error, label error) error { func With(label error) func(error) *Error { return func(err error) *Error { return &Error{ - Label: label, - Err: err, + Label: label, + Err: err, + Separate: true, } } } diff --git a/pkg/rerror/error_test.go b/pkg/rerror/error_test.go index 018f2c373..d254338d2 100644 --- a/pkg/rerror/error_test.go +++ b/pkg/rerror/error_test.go @@ -45,6 +45,9 @@ func TestError(t *testing.T) { var nilerr *Error assert.Equal(t, "", nilerr.Error()) assert.Nil(t, nilerr.Unwrap()) + + err6 := &Error{Label: errors.New("d"), Err: &Error{Label: errors.New("e"), Err: &Error{Label: errors.New("f"), Err: errors.New("g")}}, Separate: true} + assert.Equal(t, "d: e.f: g", err6.Error()) } func TestFrom(t *testing.T) { @@ -55,6 +58,15 @@ func TestFrom(t *testing.T) { assert.False(t, err.Hidden) } +func TestFromSep(t *testing.T) { + werr := &Error{Label: errors.New("wrapped"), Err: errors.New("wrapped2")} + err := FromSep("label", werr) + assert.Equal(t, "label", err.Label.Error()) + assert.Same(t, werr, err.Err) + assert.False(t, err.Hidden) + assert.True(t, err.Separate) +} + func TestGet(t *testing.T) { werr := &Error{Label: errors.New("hoge"), Err: errors.New("wrapped")} err := fmt.Errorf("wrapped: %w", werr) @@ -124,16 +136,18 @@ func TestWith(t *testing.T) { label := errors.New("label") err := With(label)(werr) assert.Equal(t, &Error{ - Label: label, - Err: werr, + Label: label, + Err: werr, + Separate: true, }, err) assert.Same(t, label, err.Label) assert.Same(t, werr, err.Err) err = With(label)(nil) assert.Equal(t, &Error{ - Label: label, - Err: nil, + Label: label, + Err: nil, + Separate: true, }, err) assert.Same(t, label, err.Label) assert.Nil(t, err.Err) diff --git a/plugin_manifest_schema.json b/plugin_manifest_schema.json index dc8379f73..1cf2a3f95 100644 --- a/plugin_manifest_schema.json +++ b/plugin_manifest_schema.json @@ -288,7 +288,7 @@ "id": { "$ref": "#/definitions/id" }, - "title": { + "name": { "type": "string" }, "description": { @@ -304,7 +304,10 @@ ] }, "visualizer": { - "type": "string", + "type": [ + "string", + "null" + ], "enum": [ "cesium" ] @@ -365,8 +368,7 @@ }, "required": [ "id", - "title", - "visualizer", + "name", "type" ], "additionalProperties": false @@ -378,7 +380,7 @@ "id": { "$ref": "#/definitions/id" }, - "title": { + "name": { "type": "string" }, "system": { @@ -423,10 +425,10 @@ }, "required": [ "id", - "title" + "name" ], "additionalProperties": false } }, "$ref": "#/definitions/root" -} \ No newline at end of file +} diff --git a/plugin_manifest_schema_translation.json b/plugin_manifest_schema_translation.json index de2a81652..2a6f20ebb 100644 --- a/plugin_manifest_schema_translation.json +++ b/plugin_manifest_schema_translation.json @@ -74,7 +74,7 @@ "type": "object", "additionalProperties": false, "properties": { - "title": { + "name": { "type": [ "string", "null" @@ -96,7 +96,7 @@ "type": "object", "additionalProperties": false, "properties": { - "title": { + "name": { "type": [ "string", "null" diff --git a/schema.graphql b/schema.graphql index e972c803a..e5edc7a38 100644 --- a/schema.graphql +++ b/schema.graphql @@ -326,7 +326,7 @@ type PluginExtension { description: String! icon: String! widgetLayout: WidgetLayout - visualizer: Visualizer! + visualizer: Visualizer propertySchemaId: PropertySchemaID! allTranslatedName: TranslatedString allTranslatedDescription: TranslatedString From 0a658a8fef42a1bd2f97c7a926bcd0e6a9059aa9 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 29 Sep 2021 13:02:37 +0900 Subject: [PATCH 087/253] fix: translated fields in plugin gql --- internal/adapter/gql/generated.go | 117 ++++++++++++-------- internal/adapter/gql/gqlmodel/models_gen.go | 2 +- internal/adapter/gql/resolver_plugin.go | 17 +-- schema.graphql | 6 +- 4 files changed, 82 insertions(+), 60 deletions(-) diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index 39cddd042..9e7988274 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -1103,7 +1103,8 @@ type MutationResolver interface { } type PluginResolver interface { Scene(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.Scene, error) - + TranslatedName(ctx context.Context, obj *gqlmodel.Plugin, lang *string) (string, error) + TranslatedDescription(ctx context.Context, obj *gqlmodel.Plugin, lang *string) (string, error) PropertySchema(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.PropertySchema, error) } type PluginExtensionResolver interface { @@ -5881,12 +5882,12 @@ type Plugin { repositoryUrl: String! propertySchemaId: PropertySchemaID extensions: [PluginExtension!]! - scene: Scene @goField(forceResolver: true) scenePlugin(sceneId: ID): ScenePlugin allTranslatedDescription: TranslatedString allTranslatedName: TranslatedString - translatedName(lang: String): String! - translatedDescription(lang: String): String! + scene: Scene @goField(forceResolver: true) + translatedName(lang: String): String! @goField(forceResolver: true) + translatedDescription(lang: String): String! @goField(forceResolver: true) propertySchema: PropertySchema @goField(forceResolver: true) } @@ -18720,7 +18721,7 @@ func (ec *executionContext) _Plugin_extensions(ctx context.Context, field graphq return ec.marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18731,14 +18732,21 @@ func (ec *executionContext) _Plugin_scene(ctx context.Context, field graphql.Col Object: "Plugin", Field: field, Args: nil, - IsMethod: true, - IsResolver: true, + IsMethod: false, + IsResolver: false, } ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Plugin_scenePlugin_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Plugin().Scene(rctx, obj) + return obj.ScenePlugin, nil }) if err != nil { ec.Error(ctx, err) @@ -18747,12 +18755,12 @@ func (ec *executionContext) _Plugin_scene(ctx context.Context, field graphql.Col if resTmp == nil { return graphql.Null } - res := resTmp.(*gqlmodel.Scene) + res := resTmp.(*gqlmodel.ScenePlugin) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18768,16 +18776,9 @@ func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graph } ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Plugin_scenePlugin_args(ctx, rawArgs) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.ScenePlugin, nil + return obj.AllTranslatedDescription, nil }) if err != nil { ec.Error(ctx, err) @@ -18786,12 +18787,12 @@ func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*gqlmodel.ScenePlugin) + res := resTmp.(map[string]string) fc.Result = res - return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) + return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18809,7 +18810,7 @@ func (ec *executionContext) _Plugin_allTranslatedDescription(ctx context.Context ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.AllTranslatedDescription, nil + return obj.AllTranslatedName, nil }) if err != nil { ec.Error(ctx, err) @@ -18823,7 +18824,7 @@ func (ec *executionContext) _Plugin_allTranslatedDescription(ctx context.Context return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { +func (ec *executionContext) _Plugin_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -18834,14 +18835,14 @@ func (ec *executionContext) _Plugin_allTranslatedName(ctx context.Context, field Object: "Plugin", Field: field, Args: nil, - IsMethod: false, - IsResolver: false, + IsMethod: true, + IsResolver: true, } ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.AllTranslatedName, nil + return ec.resolvers.Plugin().Scene(rctx, obj) }) if err != nil { ec.Error(ctx, err) @@ -18850,9 +18851,9 @@ func (ec *executionContext) _Plugin_allTranslatedName(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(map[string]string) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalOTranslatedString2map(ctx, field.Selections, res) + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } func (ec *executionContext) _Plugin_translatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { @@ -18866,8 +18867,8 @@ func (ec *executionContext) _Plugin_translatedName(ctx context.Context, field gr Object: "Plugin", Field: field, Args: nil, - IsMethod: false, - IsResolver: false, + IsMethod: true, + IsResolver: true, } ctx = graphql.WithFieldContext(ctx, fc) @@ -18880,7 +18881,7 @@ func (ec *executionContext) _Plugin_translatedName(ctx context.Context, field gr fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.TranslatedName, nil + return ec.resolvers.Plugin().TranslatedName(rctx, obj, args["lang"].(*string)) }) if err != nil { ec.Error(ctx, err) @@ -18908,8 +18909,8 @@ func (ec *executionContext) _Plugin_translatedDescription(ctx context.Context, f Object: "Plugin", Field: field, Args: nil, - IsMethod: false, - IsResolver: false, + IsMethod: true, + IsResolver: true, } ctx = graphql.WithFieldContext(ctx, fc) @@ -18922,7 +18923,7 @@ func (ec *executionContext) _Plugin_translatedDescription(ctx context.Context, f fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.TranslatedDescription, nil + return ec.resolvers.Plugin().TranslatedDescription(rctx, obj, args["lang"].(*string)) }) if err != nil { ec.Error(ctx, err) @@ -34404,6 +34405,12 @@ func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, o if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "scenePlugin": + out.Values[i] = ec._Plugin_scenePlugin(ctx, field, obj) + case "allTranslatedDescription": + out.Values[i] = ec._Plugin_allTranslatedDescription(ctx, field, obj) + case "allTranslatedName": + out.Values[i] = ec._Plugin_allTranslatedName(ctx, field, obj) case "scene": field := field out.Concurrently(i, func() (res graphql.Marshaler) { @@ -34415,22 +34422,34 @@ func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, o res = ec._Plugin_scene(ctx, field, obj) return res }) - case "scenePlugin": - out.Values[i] = ec._Plugin_scenePlugin(ctx, field, obj) - case "allTranslatedDescription": - out.Values[i] = ec._Plugin_allTranslatedDescription(ctx, field, obj) - case "allTranslatedName": - out.Values[i] = ec._Plugin_allTranslatedName(ctx, field, obj) case "translatedName": - out.Values[i] = ec._Plugin_translatedName(ctx, field, obj) - if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) - } + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Plugin_translatedName(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) case "translatedDescription": - out.Values[i] = ec._Plugin_translatedDescription(ctx, field, obj) - if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) - } + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Plugin_translatedDescription(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) case "propertySchema": field := field out.Concurrently(i, func() (res graphql.Marshaler) { diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index ea2d825d0..13495a7cb 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -575,10 +575,10 @@ type Plugin struct { RepositoryURL string `json:"repositoryUrl"` PropertySchemaID *id.PropertySchemaID `json:"propertySchemaId"` Extensions []*PluginExtension `json:"extensions"` - Scene *Scene `json:"scene"` ScenePlugin *ScenePlugin `json:"scenePlugin"` AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` AllTranslatedName map[string]string `json:"allTranslatedName"` + Scene *Scene `json:"scene"` TranslatedName string `json:"translatedName"` TranslatedDescription string `json:"translatedDescription"` PropertySchema *PropertySchema `json:"propertySchema"` diff --git a/internal/adapter/gql/resolver_plugin.go b/internal/adapter/gql/resolver_plugin.go index 12bfd40fa..2648729e2 100644 --- a/internal/adapter/gql/resolver_plugin.go +++ b/internal/adapter/gql/resolver_plugin.go @@ -81,13 +81,6 @@ func (r *pluginExtensionResolver) PropertySchema(ctx context.Context, obj *gqlmo return DataLoadersFromContext(ctx).PropertySchema.Load(obj.PropertySchemaID) } -func (r *pluginExtensionResolver) TranslatedName(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) { - if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { - return s, nil - } - return obj.Name, nil -} - func (r *pluginExtensionResolver) SceneWidget(ctx context.Context, obj *gqlmodel.PluginExtension, sceneID id.ID) (*gqlmodel.SceneWidget, error) { exit := trace(ctx) defer exit() @@ -96,6 +89,16 @@ func (r *pluginExtensionResolver) SceneWidget(ctx context.Context, obj *gqlmodel return s.Widget(obj.PluginID, obj.ExtensionID), err } +func (r *pluginExtensionResolver) TranslatedName(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) { + exit := trace(ctx) + defer exit() + + if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { + return s, nil + } + return obj.Name, nil +} + func (r *pluginExtensionResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) { exit := trace(ctx) defer exit() diff --git a/schema.graphql b/schema.graphql index e5edc7a38..881070410 100644 --- a/schema.graphql +++ b/schema.graphql @@ -251,12 +251,12 @@ type Plugin { repositoryUrl: String! propertySchemaId: PropertySchemaID extensions: [PluginExtension!]! - scene: Scene @goField(forceResolver: true) scenePlugin(sceneId: ID): ScenePlugin allTranslatedDescription: TranslatedString allTranslatedName: TranslatedString - translatedName(lang: String): String! - translatedDescription(lang: String): String! + scene: Scene @goField(forceResolver: true) + translatedName(lang: String): String! @goField(forceResolver: true) + translatedDescription(lang: String): String! @goField(forceResolver: true) propertySchema: PropertySchema @goField(forceResolver: true) } From 579b7a5f42aece96ca36641d62f7c3307fdf18b1 Mon Sep 17 00:00:00 2001 From: KaWaite Date: Fri, 1 Oct 2021 11:35:42 +0900 Subject: [PATCH 088/253] fix: fallback widgetLocation --- internal/usecase/interactor/scene.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 77eb8b844..6f32051f5 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -228,7 +228,7 @@ func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, } } else { loc = scene.WidgetLocation{ - Zone: scene.WidgetZoneInner, + Zone: scene.WidgetZoneOuter, Section: scene.WidgetSectionLeft, Area: scene.WidgetAreaTop, } From f04710adc1046d06e8d1cc2912921ee62e62ae85 Mon Sep 17 00:00:00 2001 From: KaWaite <34051327+KaWaite@users.noreply.github.com> Date: Mon, 11 Oct 2021 12:59:39 +0900 Subject: [PATCH 089/253] Add singleOnly to extension (#60) Co-authored-by: KaWaite --- internal/adapter/gql/generated.go | 43 +++++++++++++++++++ internal/adapter/gql/gqlmodel/convert.go | 4 ++ .../adapter/gql/gqlmodel/convert_plugin.go | 1 + internal/adapter/gql/gqlmodel/models_gen.go | 1 + .../infrastructure/mongo/mongodoc/plugin.go | 3 ++ pkg/builtin/manifest.yml | 3 ++ pkg/plugin/extension.go | 5 +++ pkg/plugin/extension_builder.go | 5 +++ pkg/plugin/extension_builder_test.go | 10 +++++ pkg/plugin/extension_test.go | 4 ++ pkg/plugin/manifest/convert.go | 5 +++ pkg/plugin/manifest/convert_test.go | 4 +- pkg/plugin/manifest/schema_gen.go | 5 ++- plugin_manifest_schema.json | 8 +++- schema.graphql | 1 + 15 files changed, 98 insertions(+), 4 deletions(-) diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index 9e7988274..3d368029c 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -530,6 +530,7 @@ type ComplexityRoot struct { PropertySchema func(childComplexity int) int PropertySchemaID func(childComplexity int) int SceneWidget func(childComplexity int, sceneID id.ID) int + SingleOnly func(childComplexity int) int TranslatedDescription func(childComplexity int, lang *string) int TranslatedName func(childComplexity int, lang *string) int Type func(childComplexity int) int @@ -3655,6 +3656,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PluginExtension.SceneWidget(childComplexity, args["sceneId"].(id.ID)), true + case "PluginExtension.singleOnly": + if e.complexity.PluginExtension.SingleOnly == nil { + break + } + + return e.complexity.PluginExtension.SingleOnly(childComplexity), true + case "PluginExtension.translatedDescription": if e.complexity.PluginExtension.TranslatedDescription == nil { break @@ -5956,6 +5964,7 @@ type PluginExtension { name: String! description: String! icon: String! + singleOnly: Boolean widgetLayout: WidgetLayout visualizer: Visualizer propertySchemaId: PropertySchemaID! @@ -19182,6 +19191,38 @@ func (ec *executionContext) _PluginExtension_icon(ctx context.Context, field gra return ec.marshalNString2string(ctx, field.Selections, res) } +func (ec *executionContext) _PluginExtension_singleOnly(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SingleOnly, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*bool) + fc.Result = res + return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) +} + func (ec *executionContext) _PluginExtension_widgetLayout(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -34513,6 +34554,8 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "singleOnly": + out.Values[i] = ec._PluginExtension_singleOnly(ctx, field, obj) case "widgetLayout": out.Values[i] = ec._PluginExtension_widgetLayout(ctx, field, obj) case "visualizer": diff --git a/internal/adapter/gql/gqlmodel/convert.go b/internal/adapter/gql/gqlmodel/convert.go index b5c7cfa51..f25dbf027 100644 --- a/internal/adapter/gql/gqlmodel/convert.go +++ b/internal/adapter/gql/gqlmodel/convert.go @@ -31,6 +31,10 @@ func stringToRef(s string) *string { return &s } +func BoolToRef(b bool) *bool { + return &b +} + func ToPageInfo(p *usecase.PageInfo) *PageInfo { if p == nil { return &PageInfo{} diff --git a/internal/adapter/gql/gqlmodel/convert_plugin.go b/internal/adapter/gql/gqlmodel/convert_plugin.go index 1fcca0e43..883a7e229 100644 --- a/internal/adapter/gql/gqlmodel/convert_plugin.go +++ b/internal/adapter/gql/gqlmodel/convert_plugin.go @@ -21,6 +21,7 @@ func ToPlugin(p *plugin.Plugin) *Plugin { Name: pe.Name().String(), Description: pe.Description().String(), Icon: pe.Icon(), + SingleOnly: BoolToRef(pe.SingleOnly()), WidgetLayout: ToPluginWidgetLayout(pe.WidgetLayout()), PropertySchemaID: pe.Schema(), AllTranslatedDescription: pe.Description(), diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index 13495a7cb..3a5a85a24 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -591,6 +591,7 @@ type PluginExtension struct { Name string `json:"name"` Description string `json:"description"` Icon string `json:"icon"` + SingleOnly *bool `json:"singleOnly"` WidgetLayout *WidgetLayout `json:"widgetLayout"` Visualizer *Visualizer `json:"visualizer"` PropertySchemaID id.PropertySchemaID `json:"propertySchemaId"` diff --git a/internal/infrastructure/mongo/mongodoc/plugin.go b/internal/infrastructure/mongo/mongodoc/plugin.go index 0ca12189f..99a60c87e 100644 --- a/internal/infrastructure/mongo/mongodoc/plugin.go +++ b/internal/infrastructure/mongo/mongodoc/plugin.go @@ -25,6 +25,7 @@ type PluginExtensionDocument struct { Icon string Schema string Visualizer string `bson:",omitempty"` + SingleOnly bool WidgetLayout *WidgetLayoutDocument } @@ -83,6 +84,7 @@ func NewPlugin(plugin *plugin.Plugin) (*PluginDocument, string) { Icon: e.Icon(), Schema: e.Schema().String(), Visualizer: string(e.Visualizer()), + SingleOnly: e.SingleOnly(), WidgetLayout: NewWidgetLayout(e.WidgetLayout()), }) } @@ -122,6 +124,7 @@ func (d *PluginDocument) Model() (*plugin.Plugin, error) { Name(e.Name). Description(e.Description). Icon(e.Icon). + SingleOnly(e.SingleOnly). WidgetLayout(e.WidgetLayout.Model()). Schema(psid). Build() diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 6d2829c84..1e7272603 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -1252,6 +1252,7 @@ extensions: type: widget name: Menu (legacy) description: Menu widgets + singleOnly: true widgetLayout: floating: true schema: @@ -1470,6 +1471,7 @@ extensions: type: widget name: Splash screen description: A unique start screen that will display on load of your archive(ex. display the archive's title). + singleOnly: true widgetLayout: floating: true schema: @@ -1532,6 +1534,7 @@ extensions: type: widget name: Storytelling description: SA feature that enables you to create a story. Connect points in a meaningful way so that your information can be consumed and understood easily. + singleOnly: true widgetLayout: extendable: horizontally: true diff --git a/pkg/plugin/extension.go b/pkg/plugin/extension.go index 4ef63e7c4..7404b48cc 100644 --- a/pkg/plugin/extension.go +++ b/pkg/plugin/extension.go @@ -33,6 +33,7 @@ type Extension struct { icon string schema id.PropertySchemaID visualizer visualizer.Visualizer + singleOnly bool widgetLayout *WidgetLayout } @@ -64,6 +65,10 @@ func (w *Extension) Visualizer() visualizer.Visualizer { return w.visualizer } +func (w *Extension) SingleOnly() bool { + return w.singleOnly +} + func (w *Extension) WidgetLayout() *WidgetLayout { if w == nil { return nil diff --git a/pkg/plugin/extension_builder.go b/pkg/plugin/extension_builder.go index d7b6ba460..65f3e7d23 100644 --- a/pkg/plugin/extension_builder.go +++ b/pkg/plugin/extension_builder.go @@ -72,6 +72,11 @@ func (b *ExtensionBuilder) Visualizer(visualizer visualizer.Visualizer) *Extensi return b } +func (b *ExtensionBuilder) SingleOnly(singleOnly bool) *ExtensionBuilder { + b.p.singleOnly = singleOnly + return b +} + func (b *ExtensionBuilder) WidgetLayout(widgetLayout *WidgetLayout) *ExtensionBuilder { b.p.widgetLayout = widgetLayout return b diff --git a/pkg/plugin/extension_builder_test.go b/pkg/plugin/extension_builder_test.go index 90a9b6960..8da82fe20 100644 --- a/pkg/plugin/extension_builder_test.go +++ b/pkg/plugin/extension_builder_test.go @@ -39,6 +39,12 @@ func TestExtensionBuilder_Icon(t *testing.T) { assert.Equal(t, "ccc", res.Icon()) } +func TestExtensionBuilder_SingleOnly(t *testing.T) { + var b = NewExtension() + res := b.ID("xxx").SingleOnly(true).MustBuild() + assert.Equal(t, true, res.SingleOnly()) +} + func TestExtensionBuilder_Schema(t *testing.T) { var b = NewExtension() res := b.ID("xxx").Schema(id.MustPropertySchemaID("hoge~0.1.0/fff")).MustBuild() @@ -160,6 +166,7 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { schema id.PropertySchemaID visualizer visualizer.Visualizer widgetLayout *WidgetLayout + singleOnly bool expected *Extension }{ { @@ -172,6 +179,7 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { description: i18n.StringFrom("ddd"), schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", + singleOnly: true, widgetLayout: NewWidgetLayout( false, false, true, false, &WidgetLocation{ Zone: WidgetZoneOuter, @@ -186,6 +194,7 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { icon: "ttt", schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", + singleOnly: true, widgetLayout: NewWidgetLayout( false, false, true, false, &WidgetLocation{ Zone: WidgetZoneOuter, @@ -226,6 +235,7 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { Description(tc.description). Name(tc.ename). Icon(tc.icon). + SingleOnly(tc.singleOnly). WidgetLayout(tc.widgetLayout). MustBuild() }) diff --git a/pkg/plugin/extension_test.go b/pkg/plugin/extension_test.go index 47713d7cb..ff34bd8a4 100644 --- a/pkg/plugin/extension_test.go +++ b/pkg/plugin/extension_test.go @@ -18,6 +18,7 @@ func TestExtension(t *testing.T) { Icon string Schema id.PropertySchemaID Visualizer visualizer.Visualizer + SingleOnly bool WidgetLayout *WidgetLayout }{ ID: "xxx", @@ -27,6 +28,7 @@ func TestExtension(t *testing.T) { Icon: "test", Schema: id.MustPropertySchemaID("hoge~0.1.0/fff"), Visualizer: "vvv", + SingleOnly: true, WidgetLayout: NewWidgetLayout(false, false, true, false, nil).Ref(), } @@ -38,6 +40,7 @@ func TestExtension(t *testing.T) { Icon("test"). WidgetLayout(NewWidgetLayout(false, false, true, false, nil).Ref()). Visualizer("vvv"). + SingleOnly(true). Type(ExtensionTypePrimitive). MustBuild() @@ -46,6 +49,7 @@ func TestExtension(t *testing.T) { assert.Equal(t, expected.Description, actual.Description()) assert.Equal(t, expected.Name, actual.Name()) assert.Equal(t, expected.Icon, actual.Icon()) + assert.Equal(t, expected.SingleOnly, actual.SingleOnly()) assert.Equal(t, expected.WidgetLayout, actual.WidgetLayout()) assert.Equal(t, expected.Schema, actual.Schema()) assert.Equal(t, expected.ID, actual.ID()) diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go index 306e50657..c7303829a 100644 --- a/pkg/plugin/manifest/convert.go +++ b/pkg/plugin/manifest/convert.go @@ -122,12 +122,16 @@ func (i Extension) extension(pluginID id.PluginID, sys bool) (*plugin.Extension, } var desc, icon string + var singleOnly bool if i.Description != nil { desc = *i.Description } if i.Icon != nil { icon = *i.Icon } + if i.SingleOnly != nil { + singleOnly = *i.SingleOnly + } ext, err := plugin.NewExtension(). ID(id.PluginExtensionID(eid)). @@ -135,6 +139,7 @@ func (i Extension) extension(pluginID id.PluginID, sys bool) (*plugin.Extension, Description(i18n.StringFrom(desc)). Visualizer(viz). Type(typ). + SingleOnly(singleOnly). WidgetLayout(i.WidgetLayout.layout()). Icon(icon). Schema(schema.ID()). diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index a10471c53..f963938fe 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -159,6 +159,7 @@ func TestExtension(t *testing.T) { cesium := "cesium" d := "ddd" i := "xx:/aa.bb" + tr := true testCases := []struct { name string ext Extension @@ -207,10 +208,11 @@ func TestExtension(t *testing.T) { Name: "Cesium", Schema: nil, Type: "widget", + SingleOnly: &tr, }, sys: true, pid: id.OfficialPluginID, - expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("").Type(plugin.ExtensionTypeWidget).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), + expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("").Type(plugin.ExtensionTypeWidget).System(true).Description(i18n.StringFrom("ddd")).SingleOnly(true).MustBuild(), expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), }, { diff --git a/pkg/plugin/manifest/schema_gen.go b/pkg/plugin/manifest/schema_gen.go index 0258fee9a..1e72becb3 100644 --- a/pkg/plugin/manifest/schema_gen.go +++ b/pkg/plugin/manifest/schema_gen.go @@ -1,6 +1,6 @@ package manifest -// generated by "/var/folders/3v/hjc_nxpn6f70hr_7l2l6mrfh0000gn/T/go-build3885740526/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT +// generated by "/var/folders/_n/99kwktfn5ml3fmw3fbn575hh0000gn/T/go-build3305837952/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT type Choice struct { Icon string `json:"icon,omitempty"` @@ -19,6 +19,7 @@ type Extension struct { Icon *string `json:"icon,omitempty"` Name string `json:"name"` Schema *PropertySchema `json:"schema,omitempty"` + SingleOnly *bool `json:"singleOnly,omitempty"` Type string `json:"type"` Visualizer *string `json:"visualizer,omitempty"` WidgetLayout *WidgetLayout `json:"widgetLayout,omitempty"` @@ -87,10 +88,10 @@ type Root struct { Extensions []Extension `json:"extensions,omitempty"` ID ID `json:"id"` Main *string `json:"main,omitempty"` + Name string `json:"name"` Repository *string `json:"repository,omitempty"` Schema *PropertySchema `json:"schema,omitempty"` System bool `json:"system,omitempty"` - Name string `json:"name"` Version string `json:"version,omitempty"` } diff --git a/plugin_manifest_schema.json b/plugin_manifest_schema.json index 1cf2a3f95..41294f965 100644 --- a/plugin_manifest_schema.json +++ b/plugin_manifest_schema.json @@ -322,6 +322,12 @@ "infobox" ] }, + "singleOnly": { + "type": [ + "boolean", + "null" + ] + }, "widgetLayout": { "type": [ "object", @@ -431,4 +437,4 @@ } }, "$ref": "#/definitions/root" -} +} \ No newline at end of file diff --git a/schema.graphql b/schema.graphql index 881070410..31ca85d22 100644 --- a/schema.graphql +++ b/schema.graphql @@ -325,6 +325,7 @@ type PluginExtension { name: String! description: String! icon: String! + singleOnly: Boolean widgetLayout: WidgetLayout visualizer: Visualizer propertySchemaId: PropertySchemaID! From 65ae322de0d0d515753d73da1e50aad5f1cff248 Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Wed, 20 Oct 2021 04:32:33 +0300 Subject: [PATCH 090/253] chore: updating modules (#62) --- go.mod | 10 +++++----- go.sum | 20 ++++++++++++-------- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/go.mod b/go.mod index 37144a4b9..e0d5e2fa1 100644 --- a/go.mod +++ b/go.mod @@ -8,7 +8,7 @@ require ( github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v0.2.0 github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect github.com/agnivade/levenshtein v1.1.1 // indirect - github.com/alecthomas/units v0.0.0-20210912230133-d1bdfacee922 // indirect + github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a // indirect github.com/auth0/go-jwt-middleware v0.0.0-20200507191422-d30d7b9ece63 github.com/blang/semver v3.5.1+incompatible github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect @@ -52,11 +52,11 @@ require ( go.opentelemetry.io/otel v0.7.0 go.uber.org/atomic v1.7.0 // indirect golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c // indirect - golang.org/x/mod v0.5.0 // indirect - golang.org/x/sys v0.0.0-20210915083310-ed5796bab164 // indirect + golang.org/x/mod v0.5.1 // indirect + golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac // indirect golang.org/x/text v0.3.6 golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect - golang.org/x/tools v0.1.5 + golang.org/x/tools v0.1.7 google.golang.org/api v0.51.0 gopkg.in/go-playground/colors.v1 v1.2.0 gopkg.in/h2non/gock.v1 v1.1.0 @@ -91,7 +91,7 @@ require ( github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect go.opencensus.io v0.23.0 // indirect golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 // indirect - golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420 // indirect + golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d // indirect golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect diff --git a/go.sum b/go.sum index a9fbd91d3..2e430c7d7 100644 --- a/go.sum +++ b/go.sum @@ -69,8 +69,8 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafo github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20210912230133-d1bdfacee922 h1:8ypNbf5sd3Sm3cKJ9waOGoQv6dKAFiFty9L6NP1AqJ4= -github.com/alecthomas/units v0.0.0-20210912230133-d1bdfacee922/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= +github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a h1:E/8AP5dFtMhl5KPJz66Kt9G0n+7Sn41Fy1wv9/jHOrc= +github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= @@ -475,6 +475,7 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.mongodb.org/mongo-driver v1.3.4/go.mod h1:MSWZXKOynuguX+JSvwP8i+58jYCXxbia8HS3gZBapIE= go.mongodb.org/mongo-driver v1.5.1 h1:9nOVLGDfOaZ9R0tBumx/BcuqkbFpyTCU2r/Po7A2azI= go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw= @@ -550,8 +551,8 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.0 h1:UG21uOlmZabA4fW5i7ZX6bjw1xELEGg/ZLgZq9auk/Q= -golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -589,8 +590,9 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420 h1:a8jGStKg0XqKDlKqjLrXn0ioF5MH36pT7Z0BRTqLhbk= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d h1:20cMwl2fHAzkJMEA+8J4JgqBQcQGzbisXo31MIeenXI= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -674,8 +676,9 @@ golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210915083310-ed5796bab164 h1:7ZDGnxgHAMw7thfC5bEos0RDAccZKxioiWBhfIe+tvw= -golang.org/x/sys v0.0.0-20210915083310-ed5796bab164/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac h1:oN6lz7iLW/YC7un8pq+9bOLyXrprv2+DKfkJY+2LJJw= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -755,8 +758,9 @@ golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.7 h1:6j8CgantCy3yc8JGBqkDLMKWqZ0RDU2g1HVgacojGWQ= +golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= From 163fcf83cefb5cfb1f12ceca33850ef267a69921 Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Mon, 25 Oct 2021 07:19:16 +0300 Subject: [PATCH 091/253] feat: tag system (#67) * feat: tag system domain models (#39) * feat: tag system domain models * refactor: * add tag interface * tag -> group and tag->item conversation * testing: generate test cases for the tagID * resolve notes * fix unit tests errors * add NewId test code fix NewId func * add more test cases refactor some parts * feat: tag system data-layer (mongo) (#44) * feat: tag system data-layer (mongo) * remove len > 0 check * goimport * Update pkg/tag/group_builder.go Co-authored-by: rot1024 * Update pkg/tag/item_builder.go Co-authored-by: rot1024 * rename itemFrom and groupFrom funcs Co-authored-by: rot1024 * feat: create tag group and tag item (#45) * tag item and group schema * feat: creat tags (GQL schema) * tag items and tag groups resolvers * datalayer (dummy memory) and usecases * receive list by reference * check if nil for list * resolve notes * generate new models * feat: memory infrastructure (#46) * refactor: implement memory infrastructure * test: implement memory infrastructure test cases * test: fix FindByScene test case * feat: attach/detach tag from layer (#50) * tag item and group schema * feat: creat tags (GQL schema) * tag items and tag groups resolvers * datalayer (dummy memory) and usecases * receive list by reference * check if nil for list * feat: introduce tags to layers * feat: attach/detach tags from layers * fix imports * refactor: resolve notes * test: test units for tags * refactor: resolve notes * feat: attach/detach tag item from group (#52) * refactor: transform group tags list to reference * feat: attach/detach tags * refactor: use params as use-case input * test: mongodoc testing (#61) * test: mongodoc testing * resolve notes * feat: remove tag (#58) * feat: remove a tag (init) * feat: remove tag * feat: remove tag usecase and infra * resolve notes * feat: tag system queries (#54) * feat: tag system queries * resolve notes * feat: update tag (#49) * tag item and group schema * feat: creat tags (GQL schema) * tag items and tag groups resolvers * datalayer (dummy memory) and usecases * receive list by reference * check if nil for list * feat: rename tag group * refactor: rename -> update * resolve notes * resolve notes * change find by id func return type * Merge branch 'tag-system' of https://github.com/reearth/reearth-backend into tag/update-group-label # Conflicts: # internal/adapter/gql/generated.go # internal/adapter/gql/gqlmodel/convert_tag.go * fix testing * resolve notes * resolve notes * resolve notes Co-authored-by: HideBa Co-authored-by: rot1024 --- internal/adapter/gql/generated.go | 3335 +++++++++++++++-- .../adapter/gql/gqldataloader/dataloader.go | 3 + .../gql/gqldataloader/taggrouploader_gen.go | 225 ++ .../gql/gqldataloader/tagitemloader_gen.go | 225 ++ .../gql/gqldataloader/tagloader_gen.go | 225 ++ .../adapter/gql/gqlmodel/convert_layer.go | 14 + internal/adapter/gql/gqlmodel/convert_tag.go | 52 + internal/adapter/gql/gqlmodel/models_gen.go | 115 + internal/adapter/gql/loader.go | 11 + internal/adapter/gql/loader_tag.go | 264 ++ internal/adapter/gql/resolver_layer.go | 84 + internal/adapter/gql/resolver_mutation_tag.go | 107 + internal/adapter/gql/resolver_scene.go | 16 + internal/adapter/gql/resolver_tag.go | 45 + internal/infrastructure/memory/container.go | 1 + internal/infrastructure/memory/layer.go | 15 + internal/infrastructure/memory/layer_test.go | 29 + internal/infrastructure/memory/tag.go | 220 ++ internal/infrastructure/memory/tag_test.go | 348 ++ internal/infrastructure/mongo/container.go | 1 + internal/infrastructure/mongo/layer.go | 11 + .../infrastructure/mongo/mongodoc/layer.go | 24 +- internal/infrastructure/mongo/mongodoc/tag.go | 169 + .../infrastructure/mongo/mongodoc/tag_test.go | 491 +++ internal/infrastructure/mongo/tag.go | 289 ++ internal/usecase/interactor/common.go | 1 + internal/usecase/interactor/layer.go | 66 + internal/usecase/interactor/tag.go | 362 ++ internal/usecase/interfaces/common.go | 1 + internal/usecase/interfaces/layer.go | 2 + internal/usecase/interfaces/tag.go | 59 + internal/usecase/repo/container.go | 1 + internal/usecase/repo/layer.go | 1 + internal/usecase/repo/tag.go | 26 + pkg/id/gen.go | 2 + pkg/id/tag_gen.go | 297 ++ pkg/id/tag_gen_test.go | 1011 +++++ pkg/layer/builder.go | 6 + pkg/layer/group.go | 21 + pkg/layer/group_builder.go | 6 + pkg/layer/group_builder_test.go | 15 + pkg/layer/group_test.go | 16 + pkg/layer/item.go | 21 + pkg/layer/item_builder.go | 6 + pkg/layer/item_builder_test.go | 15 + pkg/layer/item_test.go | 36 + pkg/layer/layer.go | 10 + pkg/plugin/manifest/schema_gen.go | 2 +- pkg/tag/group.go | 10 + pkg/tag/group_builder.go | 63 + pkg/tag/group_test.go | 112 + pkg/tag/item.go | 22 + pkg/tag/item_builder.go | 67 + pkg/tag/item_test.go | 117 + pkg/tag/list.go | 56 + pkg/tag/list_test.go | 74 + pkg/tag/tag.go | 55 + pkg/tag/tag_test.go | 33 + schema.graphql | 131 +- 59 files changed, 8741 insertions(+), 301 deletions(-) create mode 100644 internal/adapter/gql/gqldataloader/taggrouploader_gen.go create mode 100644 internal/adapter/gql/gqldataloader/tagitemloader_gen.go create mode 100644 internal/adapter/gql/gqldataloader/tagloader_gen.go create mode 100644 internal/adapter/gql/gqlmodel/convert_tag.go create mode 100644 internal/adapter/gql/loader_tag.go create mode 100644 internal/adapter/gql/resolver_mutation_tag.go create mode 100644 internal/adapter/gql/resolver_tag.go create mode 100644 internal/infrastructure/memory/layer_test.go create mode 100644 internal/infrastructure/memory/tag.go create mode 100644 internal/infrastructure/memory/tag_test.go create mode 100644 internal/infrastructure/mongo/mongodoc/tag.go create mode 100644 internal/infrastructure/mongo/mongodoc/tag_test.go create mode 100644 internal/infrastructure/mongo/tag.go create mode 100644 internal/usecase/interactor/tag.go create mode 100644 internal/usecase/interfaces/tag.go create mode 100644 internal/usecase/repo/tag.go create mode 100644 pkg/id/tag_gen.go create mode 100644 pkg/id/tag_gen_test.go create mode 100644 pkg/layer/group_builder_test.go create mode 100644 pkg/layer/item_builder_test.go create mode 100644 pkg/tag/group.go create mode 100644 pkg/tag/group_builder.go create mode 100644 pkg/tag/group_test.go create mode 100644 pkg/tag/item.go create mode 100644 pkg/tag/item_builder.go create mode 100644 pkg/tag/item_test.go create mode 100644 pkg/tag/list.go create mode 100644 pkg/tag/list_test.go create mode 100644 pkg/tag/tag.go create mode 100644 pkg/tag/tag_test.go diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index 3d368029c..aec43fda8 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -73,6 +73,7 @@ type ResolverRoot interface { Scene() SceneResolver ScenePlugin() ScenePluginResolver SceneWidget() SceneWidgetResolver + TagItem() TagItemResolver Team() TeamResolver TeamMember() TeamMemberResolver User() UserResolver @@ -144,6 +145,14 @@ type ComplexityRoot struct { Node func(childComplexity int) int } + AttachTagItemToGroupPayload struct { + Tag func(childComplexity int) int + } + + AttachTagToLayerPayload struct { + Layer func(childComplexity int) int + } + Camera struct { Altitude func(childComplexity int) int Fov func(childComplexity int) int @@ -166,6 +175,14 @@ type ComplexityRoot struct { Scene func(childComplexity int) int } + CreateTagGroupPayload struct { + Tag func(childComplexity int) int + } + + CreateTagItemPayload struct { + Tag func(childComplexity int) int + } + CreateTeamPayload struct { Team func(childComplexity int) int } @@ -250,6 +267,14 @@ type ComplexityRoot struct { TeamID func(childComplexity int) int } + DetachTagFromLayerPayload struct { + Layer func(childComplexity int) int + } + + DetachTagItemFromGroupPayload struct { + Tag func(childComplexity int) int + } + ImportDatasetPayload struct { DatasetSchema func(childComplexity int) int } @@ -328,6 +353,8 @@ type ComplexityRoot struct { Scene func(childComplexity int) int SceneID func(childComplexity int) int ScenePlugin func(childComplexity int) int + TagIds func(childComplexity int) int + Tags func(childComplexity int) int } LayerItem struct { @@ -349,6 +376,8 @@ type ComplexityRoot struct { Scene func(childComplexity int) int SceneID func(childComplexity int) int ScenePlugin func(childComplexity int) int + TagIds func(childComplexity int) int + Tags func(childComplexity int) int } MergedInfobox struct { @@ -446,14 +475,20 @@ type ComplexityRoot struct { AddMemberToTeam func(childComplexity int, input gqlmodel.AddMemberToTeamInput) int AddPropertyItem func(childComplexity int, input gqlmodel.AddPropertyItemInput) int AddWidget func(childComplexity int, input gqlmodel.AddWidgetInput) int + AttachTagItemToGroup func(childComplexity int, input gqlmodel.AttachTagItemToGroupInput) int + AttachTagToLayer func(childComplexity int, input gqlmodel.AttachTagToLayerInput) int CreateAsset func(childComplexity int, input gqlmodel.CreateAssetInput) int CreateInfobox func(childComplexity int, input gqlmodel.CreateInfoboxInput) int CreateProject func(childComplexity int, input gqlmodel.CreateProjectInput) int CreateScene func(childComplexity int, input gqlmodel.CreateSceneInput) int + CreateTagGroup func(childComplexity int, input gqlmodel.CreateTagGroupInput) int + CreateTagItem func(childComplexity int, input gqlmodel.CreateTagItemInput) int CreateTeam func(childComplexity int, input gqlmodel.CreateTeamInput) int DeleteMe func(childComplexity int, input gqlmodel.DeleteMeInput) int DeleteProject func(childComplexity int, input gqlmodel.DeleteProjectInput) int DeleteTeam func(childComplexity int, input gqlmodel.DeleteTeamInput) int + DetachTagFromLayer func(childComplexity int, input gqlmodel.DetachTagFromLayerInput) int + DetachTagItemFromGroup func(childComplexity int, input gqlmodel.DetachTagItemFromGroupInput) int ImportDataset func(childComplexity int, input gqlmodel.ImportDatasetInput) int ImportDatasetFromGoogleSheet func(childComplexity int, input gqlmodel.ImportDatasetFromGoogleSheetInput) int ImportLayer func(childComplexity int, input gqlmodel.ImportLayerInput) int @@ -472,6 +507,7 @@ type ComplexityRoot struct { RemoveMyAuth func(childComplexity int, input gqlmodel.RemoveMyAuthInput) int RemovePropertyField func(childComplexity int, input gqlmodel.RemovePropertyFieldInput) int RemovePropertyItem func(childComplexity int, input gqlmodel.RemovePropertyItemInput) int + RemoveTag func(childComplexity int, input gqlmodel.RemoveTagInput) int RemoveWidget func(childComplexity int, input gqlmodel.RemoveWidgetInput) int Signup func(childComplexity int, input gqlmodel.SignupInput) int SyncDataset func(childComplexity int, input gqlmodel.SyncDatasetInput) int @@ -484,6 +520,7 @@ type ComplexityRoot struct { UpdateProject func(childComplexity int, input gqlmodel.UpdateProjectInput) int UpdatePropertyItems func(childComplexity int, input gqlmodel.UpdatePropertyItemInput) int UpdatePropertyValue func(childComplexity int, input gqlmodel.UpdatePropertyValueInput) int + UpdateTag func(childComplexity int, input gqlmodel.UpdateTagInput) int UpdateTeam func(childComplexity int, input gqlmodel.UpdateTeamInput) int UpdateWidget func(childComplexity int, input gqlmodel.UpdateWidgetInput) int UpdateWidgetAlignSystem func(childComplexity int, input gqlmodel.UpdateWidgetAlignSystemInput) int @@ -768,6 +805,10 @@ type ComplexityRoot struct { Team func(childComplexity int) int } + RemoveTagPayload struct { + TagID func(childComplexity int) int + } + RemoveWidgetPayload struct { Scene func(childComplexity int) int WidgetID func(childComplexity int) int @@ -786,6 +827,8 @@ type ComplexityRoot struct { PropertyID func(childComplexity int) int RootLayer func(childComplexity int) int RootLayerID func(childComplexity int) int + TagIds func(childComplexity int) int + Tags func(childComplexity int) int Team func(childComplexity int) int TeamID func(childComplexity int) int UpdatedAt func(childComplexity int) int @@ -830,6 +873,25 @@ type ComplexityRoot struct { URL func(childComplexity int) int } + TagGroup struct { + ID func(childComplexity int) int + Label func(childComplexity int) int + SceneID func(childComplexity int) int + Tags func(childComplexity int) int + } + + TagItem struct { + ID func(childComplexity int) int + Label func(childComplexity int) int + LinkedDataset func(childComplexity int) int + LinkedDatasetField func(childComplexity int) int + LinkedDatasetFieldID func(childComplexity int) int + LinkedDatasetID func(childComplexity int) int + LinkedDatasetSchema func(childComplexity int) int + LinkedDatasetSchemaID func(childComplexity int) int + SceneID func(childComplexity int) int + } + Team struct { Assets func(childComplexity int, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int ID func(childComplexity int) int @@ -877,6 +939,10 @@ type ComplexityRoot struct { Team func(childComplexity int) int } + UpdateTagPayload struct { + Tag func(childComplexity int) int + } + UpdateTeamPayload struct { Team func(childComplexity int) int } @@ -1002,6 +1068,8 @@ type LayerGroupResolver interface { Layers(ctx context.Context, obj *gqlmodel.LayerGroup) ([]gqlmodel.Layer, error) Scene(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Scene, error) ScenePlugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.ScenePlugin, error) + + Tags(ctx context.Context, obj *gqlmodel.LayerGroup) ([]gqlmodel.Tag, error) } type LayerItemResolver interface { Parent(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.LayerGroup, error) @@ -1012,6 +1080,8 @@ type LayerItemResolver interface { Merged(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.MergedLayer, error) Scene(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Scene, error) ScenePlugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.ScenePlugin, error) + + Tags(ctx context.Context, obj *gqlmodel.LayerItem) ([]gqlmodel.Tag, error) } type MergedInfoboxResolver interface { Scene(ctx context.Context, obj *gqlmodel.MergedInfobox) (*gqlmodel.Scene, error) @@ -1101,6 +1171,14 @@ type MutationResolver interface { MoveInfoboxField(ctx context.Context, input gqlmodel.MoveInfoboxFieldInput) (*gqlmodel.MoveInfoboxFieldPayload, error) RemoveInfoboxField(ctx context.Context, input gqlmodel.RemoveInfoboxFieldInput) (*gqlmodel.RemoveInfoboxFieldPayload, error) ImportLayer(ctx context.Context, input gqlmodel.ImportLayerInput) (*gqlmodel.ImportLayerPayload, error) + AttachTagToLayer(ctx context.Context, input gqlmodel.AttachTagToLayerInput) (*gqlmodel.AttachTagToLayerPayload, error) + DetachTagFromLayer(ctx context.Context, input gqlmodel.DetachTagFromLayerInput) (*gqlmodel.DetachTagFromLayerPayload, error) + CreateTagItem(ctx context.Context, input gqlmodel.CreateTagItemInput) (*gqlmodel.CreateTagItemPayload, error) + CreateTagGroup(ctx context.Context, input gqlmodel.CreateTagGroupInput) (*gqlmodel.CreateTagGroupPayload, error) + AttachTagItemToGroup(ctx context.Context, input gqlmodel.AttachTagItemToGroupInput) (*gqlmodel.AttachTagItemToGroupPayload, error) + DetachTagItemFromGroup(ctx context.Context, input gqlmodel.DetachTagItemFromGroupInput) (*gqlmodel.DetachTagItemFromGroupPayload, error) + UpdateTag(ctx context.Context, input gqlmodel.UpdateTagInput) (*gqlmodel.UpdateTagPayload, error) + RemoveTag(ctx context.Context, input gqlmodel.RemoveTagInput) (*gqlmodel.RemoveTagPayload, error) } type PluginResolver interface { Scene(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.Scene, error) @@ -1187,6 +1265,8 @@ type SceneResolver interface { RootLayer(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.LayerGroup, error) LockMode(ctx context.Context, obj *gqlmodel.Scene) (gqlmodel.SceneLockMode, error) DatasetSchemas(ctx context.Context, obj *gqlmodel.Scene, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) + + Tags(ctx context.Context, obj *gqlmodel.Scene) ([]gqlmodel.Tag, error) } type ScenePluginResolver interface { Plugin(ctx context.Context, obj *gqlmodel.ScenePlugin) (*gqlmodel.Plugin, error) @@ -1197,6 +1277,11 @@ type SceneWidgetResolver interface { Extension(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.PluginExtension, error) Property(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Property, error) } +type TagItemResolver interface { + LinkedDatasetSchema(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetSchema, error) + LinkedDataset(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.Dataset, error) + LinkedDatasetField(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetField, error) +} type TeamResolver interface { Assets(ctx context.Context, obj *gqlmodel.Team, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) Projects(ctx context.Context, obj *gqlmodel.Team, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) @@ -1427,6 +1512,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.AssetEdge.Node(childComplexity), true + case "AttachTagItemToGroupPayload.tag": + if e.complexity.AttachTagItemToGroupPayload.Tag == nil { + break + } + + return e.complexity.AttachTagItemToGroupPayload.Tag(childComplexity), true + + case "AttachTagToLayerPayload.layer": + if e.complexity.AttachTagToLayerPayload.Layer == nil { + break + } + + return e.complexity.AttachTagToLayerPayload.Layer(childComplexity), true + case "Camera.altitude": if e.complexity.Camera.Altitude == nil { break @@ -1497,6 +1596,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.CreateScenePayload.Scene(childComplexity), true + case "CreateTagGroupPayload.tag": + if e.complexity.CreateTagGroupPayload.Tag == nil { + break + } + + return e.complexity.CreateTagGroupPayload.Tag(childComplexity), true + + case "CreateTagItemPayload.tag": + if e.complexity.CreateTagItemPayload.Tag == nil { + break + } + + return e.complexity.CreateTagItemPayload.Tag(childComplexity), true + case "CreateTeamPayload.team": if e.complexity.CreateTeamPayload.Team == nil { break @@ -1838,6 +1951,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.DeleteTeamPayload.TeamID(childComplexity), true + case "DetachTagFromLayerPayload.layer": + if e.complexity.DetachTagFromLayerPayload.Layer == nil { + break + } + + return e.complexity.DetachTagFromLayerPayload.Layer(childComplexity), true + + case "DetachTagItemFromGroupPayload.tag": + if e.complexity.DetachTagItemFromGroupPayload.Tag == nil { + break + } + + return e.complexity.DetachTagItemFromGroupPayload.Tag(childComplexity), true + case "ImportDatasetPayload.datasetSchema": if e.complexity.ImportDatasetPayload.DatasetSchema == nil { break @@ -2230,6 +2357,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.LayerGroup.ScenePlugin(childComplexity), true + case "LayerGroup.tagIds": + if e.complexity.LayerGroup.TagIds == nil { + break + } + + return e.complexity.LayerGroup.TagIds(childComplexity), true + + case "LayerGroup.tags": + if e.complexity.LayerGroup.Tags == nil { + break + } + + return e.complexity.LayerGroup.Tags(childComplexity), true + case "LayerItem.extension": if e.complexity.LayerItem.Extension == nil { break @@ -2356,6 +2497,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.LayerItem.ScenePlugin(childComplexity), true + case "LayerItem.tagIds": + if e.complexity.LayerItem.TagIds == nil { + break + } + + return e.complexity.LayerItem.TagIds(childComplexity), true + + case "LayerItem.tags": + if e.complexity.LayerItem.Tags == nil { + break + } + + return e.complexity.LayerItem.Tags(childComplexity), true + case "MergedInfobox.fields": if e.complexity.MergedInfobox.Fields == nil { break @@ -2891,6 +3046,30 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Mutation.AddWidget(childComplexity, args["input"].(gqlmodel.AddWidgetInput)), true + case "Mutation.attachTagItemToGroup": + if e.complexity.Mutation.AttachTagItemToGroup == nil { + break + } + + args, err := ec.field_Mutation_attachTagItemToGroup_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AttachTagItemToGroup(childComplexity, args["input"].(gqlmodel.AttachTagItemToGroupInput)), true + + case "Mutation.attachTagToLayer": + if e.complexity.Mutation.AttachTagToLayer == nil { + break + } + + args, err := ec.field_Mutation_attachTagToLayer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AttachTagToLayer(childComplexity, args["input"].(gqlmodel.AttachTagToLayerInput)), true + case "Mutation.createAsset": if e.complexity.Mutation.CreateAsset == nil { break @@ -2939,6 +3118,30 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Mutation.CreateScene(childComplexity, args["input"].(gqlmodel.CreateSceneInput)), true + case "Mutation.createTagGroup": + if e.complexity.Mutation.CreateTagGroup == nil { + break + } + + args, err := ec.field_Mutation_createTagGroup_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateTagGroup(childComplexity, args["input"].(gqlmodel.CreateTagGroupInput)), true + + case "Mutation.createTagItem": + if e.complexity.Mutation.CreateTagItem == nil { + break + } + + args, err := ec.field_Mutation_createTagItem_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.CreateTagItem(childComplexity, args["input"].(gqlmodel.CreateTagItemInput)), true + case "Mutation.createTeam": if e.complexity.Mutation.CreateTeam == nil { break @@ -2987,6 +3190,30 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Mutation.DeleteTeam(childComplexity, args["input"].(gqlmodel.DeleteTeamInput)), true + case "Mutation.detachTagFromLayer": + if e.complexity.Mutation.DetachTagFromLayer == nil { + break + } + + args, err := ec.field_Mutation_detachTagFromLayer_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.DetachTagFromLayer(childComplexity, args["input"].(gqlmodel.DetachTagFromLayerInput)), true + + case "Mutation.detachTagItemFromGroup": + if e.complexity.Mutation.DetachTagItemFromGroup == nil { + break + } + + args, err := ec.field_Mutation_detachTagItemFromGroup_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.DetachTagItemFromGroup(childComplexity, args["input"].(gqlmodel.DetachTagItemFromGroupInput)), true + case "Mutation.importDataset": if e.complexity.Mutation.ImportDataset == nil { break @@ -3203,6 +3430,18 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Mutation.RemovePropertyItem(childComplexity, args["input"].(gqlmodel.RemovePropertyItemInput)), true + case "Mutation.removeTag": + if e.complexity.Mutation.RemoveTag == nil { + break + } + + args, err := ec.field_Mutation_removeTag_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveTag(childComplexity, args["input"].(gqlmodel.RemoveTagInput)), true + case "Mutation.removeWidget": if e.complexity.Mutation.RemoveWidget == nil { break @@ -3347,6 +3586,18 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Mutation.UpdatePropertyValue(childComplexity, args["input"].(gqlmodel.UpdatePropertyValueInput)), true + case "Mutation.updateTag": + if e.complexity.Mutation.UpdateTag == nil { + break + } + + args, err := ec.field_Mutation_updateTag_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateTag(childComplexity, args["input"].(gqlmodel.UpdateTagInput)), true + case "Mutation.updateTeam": if e.complexity.Mutation.UpdateTeam == nil { break @@ -4851,6 +5102,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.RemoveMemberFromTeamPayload.Team(childComplexity), true + case "RemoveTagPayload.tagId": + if e.complexity.RemoveTagPayload.TagID == nil { + break + } + + return e.complexity.RemoveTagPayload.TagID(childComplexity), true + case "RemoveWidgetPayload.scene": if e.complexity.RemoveWidgetPayload.Scene == nil { break @@ -4954,6 +5212,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Scene.RootLayerID(childComplexity), true + case "Scene.tagIds": + if e.complexity.Scene.TagIds == nil { + break + } + + return e.complexity.Scene.TagIds(childComplexity), true + + case "Scene.tags": + if e.complexity.Scene.Tags == nil { + break + } + + return e.complexity.Scene.Tags(childComplexity), true + case "Scene.team": if e.complexity.Scene.Team == nil { break @@ -5143,6 +5415,97 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.SyncDatasetPayload.URL(childComplexity), true + case "TagGroup.id": + if e.complexity.TagGroup.ID == nil { + break + } + + return e.complexity.TagGroup.ID(childComplexity), true + + case "TagGroup.label": + if e.complexity.TagGroup.Label == nil { + break + } + + return e.complexity.TagGroup.Label(childComplexity), true + + case "TagGroup.sceneId": + if e.complexity.TagGroup.SceneID == nil { + break + } + + return e.complexity.TagGroup.SceneID(childComplexity), true + + case "TagGroup.tags": + if e.complexity.TagGroup.Tags == nil { + break + } + + return e.complexity.TagGroup.Tags(childComplexity), true + + case "TagItem.id": + if e.complexity.TagItem.ID == nil { + break + } + + return e.complexity.TagItem.ID(childComplexity), true + + case "TagItem.label": + if e.complexity.TagItem.Label == nil { + break + } + + return e.complexity.TagItem.Label(childComplexity), true + + case "TagItem.linkedDataset": + if e.complexity.TagItem.LinkedDataset == nil { + break + } + + return e.complexity.TagItem.LinkedDataset(childComplexity), true + + case "TagItem.linkedDatasetField": + if e.complexity.TagItem.LinkedDatasetField == nil { + break + } + + return e.complexity.TagItem.LinkedDatasetField(childComplexity), true + + case "TagItem.linkedDatasetFieldID": + if e.complexity.TagItem.LinkedDatasetFieldID == nil { + break + } + + return e.complexity.TagItem.LinkedDatasetFieldID(childComplexity), true + + case "TagItem.linkedDatasetID": + if e.complexity.TagItem.LinkedDatasetID == nil { + break + } + + return e.complexity.TagItem.LinkedDatasetID(childComplexity), true + + case "TagItem.linkedDatasetSchema": + if e.complexity.TagItem.LinkedDatasetSchema == nil { + break + } + + return e.complexity.TagItem.LinkedDatasetSchema(childComplexity), true + + case "TagItem.linkedDatasetSchemaID": + if e.complexity.TagItem.LinkedDatasetSchemaID == nil { + break + } + + return e.complexity.TagItem.LinkedDatasetSchemaID(childComplexity), true + + case "TagItem.sceneId": + if e.complexity.TagItem.SceneID == nil { + break + } + + return e.complexity.TagItem.SceneID(childComplexity), true + case "Team.assets": if e.complexity.Team.Assets == nil { break @@ -5314,6 +5677,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.UpdateMemberOfTeamPayload.Team(childComplexity), true + case "UpdateTagPayload.tag": + if e.complexity.UpdateTagPayload.Tag == nil { + break + } + + return e.complexity.UpdateTagPayload.Tag(childComplexity), true + case "UpdateTeamPayload.team": if e.complexity.UpdateTeamPayload.Team == nil { break @@ -6002,6 +6372,8 @@ type Scene implements Node { after: Cursor before: Cursor ): DatasetSchemaConnection! @goField(forceResolver: true) + tagIds: [ID!]! + tags: [Tag!]! @goField(forceResolver: true) } enum SceneLockMode { @@ -6295,6 +6667,8 @@ interface Layer { plugin: Plugin extension: PluginExtension scenePlugin: ScenePlugin + tagIds: [ID!]! + tags: [Tag!]! @goField(forceResolver: true) } union Layers = LayerItem | LayerGroup @@ -6327,6 +6701,8 @@ type LayerItem implements Layer { merged: MergedLayer @goField(forceResolver: true) scene: Scene @goField(forceResolver: true) scenePlugin: ScenePlugin @goField(forceResolver: true) + tagIds: [ID!]! + tags: [Tag!]! @goField(forceResolver: true) } type LayerGroup implements Layer { @@ -6351,6 +6727,8 @@ type LayerGroup implements Layer { layers: [Layer]! @goField(forceResolver: true) scene: Scene @goField(forceResolver: true) scenePlugin: ScenePlugin @goField(forceResolver: true) + tagIds: [ID!]! + tags: [Tag!]! @goField(forceResolver: true) } type Infobox { @@ -6415,6 +6793,33 @@ type MergedInfoboxField { scenePlugin: ScenePlugin @goField(forceResolver: true) } +interface Tag { + id: ID! + sceneId: ID! + label: String! +} + +type TagItem implements Tag { + id: ID! + sceneId: ID! + label: String! + linkedDatasetID: ID + linkedDatasetSchemaID: ID + linkedDatasetFieldID: ID + linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + linkedDatasetField: DatasetField @goField(forceResolver: true) +} + +type TagGroup implements Tag { + id: ID! + sceneId: ID! + label: String! + tags: [ID!] +} + +union Tags = TagItem | TagGroup + # InputType input CreateAssetInput { @@ -6756,11 +7161,11 @@ input ImportDatasetInput { } input ImportDatasetFromGoogleSheetInput { - accessToken: String! - fileId: String! - sheetName: String! - sceneId: ID! - datasetSchemaId: ID + accessToken: String! + fileId: String! + sheetName: String! + sceneId: ID! + datasetSchemaId: ID } input AddDatasetSchemaInput { @@ -6769,6 +7174,50 @@ input AddDatasetSchemaInput { representativefield: ID } +input CreateTagItemInput { + sceneId: ID! + label: String! + linkedDatasetSchemaID: ID + linkedDatasetID: ID + linkedDatasetField: ID +} + +input CreateTagGroupInput { + sceneId: ID! + label: String! + tags: [ID!] +} + +input UpdateTagInput { + tagId: ID! + sceneId: ID! + label: String +} + +input AttachTagItemToGroupInput { + itemID: ID! + groupID: ID! +} + +input DetachTagItemFromGroupInput { + itemID: ID! + groupID: ID! +} + +input AttachTagToLayerInput { + tagID: ID! + layerID: ID! +} + +input DetachTagFromLayerInput { + tagID: ID! + layerID: ID! +} + +input RemoveTagInput { + tagID: ID! +} + # Payload type CreateAssetPayload { @@ -6967,6 +7416,38 @@ type AddDatasetSchemaPayload { datasetSchema: DatasetSchema } +type CreateTagItemPayload { + tag: TagItem! +} + +type CreateTagGroupPayload { + tag: TagGroup! +} + +type AttachTagItemToGroupPayload { + tag: TagGroup! +} + +type DetachTagItemFromGroupPayload { + tag: TagGroup! +} + +type UpdateTagPayload { + tag: Tag! +} + +type AttachTagToLayerPayload{ + layer: Layer! +} + +type DetachTagFromLayerPayload{ + layer: Layer! +} + +type RemoveTagPayload{ + tagId: ID! +} + # Connection type AssetConnection { @@ -7146,6 +7627,16 @@ type Mutation { moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload importLayer(input: ImportLayerInput!): ImportLayerPayload + attachTagToLayer(input: AttachTagToLayerInput!): AttachTagToLayerPayload + detachTagFromLayer(input: DetachTagFromLayerInput!): DetachTagFromLayerPayload + + # Tag + createTagItem(input: CreateTagItemInput!): CreateTagItemPayload + createTagGroup(input: CreateTagGroupInput!): CreateTagGroupPayload + attachTagItemToGroup(input: AttachTagItemToGroupInput!): AttachTagItemToGroupPayload + detachTagItemFromGroup(input: DetachTagItemFromGroupInput!): DetachTagItemFromGroupPayload + updateTag(input: UpdateTagInput!): UpdateTagPayload + removeTag(input: RemoveTagInput!): RemoveTagPayload } schema { @@ -7337,6 +7828,36 @@ func (ec *executionContext) field_Mutation_addWidget_args(ctx context.Context, r return args, nil } +func (ec *executionContext) field_Mutation_attachTagItemToGroup_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AttachTagItemToGroupInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAttachTagItemToGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagItemToGroupInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_attachTagToLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AttachTagToLayerInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAttachTagToLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagToLayerInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + func (ec *executionContext) field_Mutation_createAsset_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -7397,6 +7918,36 @@ func (ec *executionContext) field_Mutation_createScene_args(ctx context.Context, return args, nil } +func (ec *executionContext) field_Mutation_createTagGroup_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.CreateTagGroupInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateTagGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagGroupInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_createTagItem_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.CreateTagItemInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNCreateTagItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagItemInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + func (ec *executionContext) field_Mutation_createTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -7457,6 +8008,36 @@ func (ec *executionContext) field_Mutation_deleteTeam_args(ctx context.Context, return args, nil } +func (ec *executionContext) field_Mutation_detachTagFromLayer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.DetachTagFromLayerInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNDetachTagFromLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagFromLayerInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + +func (ec *executionContext) field_Mutation_detachTagItemFromGroup_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.DetachTagItemFromGroupInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNDetachTagItemFromGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagItemFromGroupInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + func (ec *executionContext) field_Mutation_importDatasetFromGoogleSheet_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -7727,6 +8308,21 @@ func (ec *executionContext) field_Mutation_removePropertyItem_args(ctx context.C return args, nil } +func (ec *executionContext) field_Mutation_removeTag_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemoveTagInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveTagInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveTagInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + func (ec *executionContext) field_Mutation_removeWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -7907,6 +8503,21 @@ func (ec *executionContext) field_Mutation_updatePropertyValue_args(ctx context. return args, nil } +func (ec *executionContext) field_Mutation_updateTag_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateTagInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateTagInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTagInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + func (ec *executionContext) field_Mutation_updateTeam_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -9737,6 +10348,76 @@ func (ec *executionContext) _AssetEdge_node(ctx context.Context, field graphql.C return ec.marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx, field.Selections, res) } +func (ec *executionContext) _AttachTagItemToGroupPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AttachTagItemToGroupPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AttachTagItemToGroupPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tag, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.TagGroup) + fc.Result = res + return ec.marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _AttachTagToLayerPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AttachTagToLayerPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AttachTagToLayerPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + func (ec *executionContext) _Camera_lat(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -10087,6 +10768,76 @@ func (ec *executionContext) _CreateScenePayload_scene(ctx context.Context, field return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) _CreateTagGroupPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTagGroupPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "CreateTagGroupPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tag, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.TagGroup) + fc.Result = res + return ec.marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _CreateTagItemPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTagItemPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "CreateTagItemPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tag, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.TagItem) + fc.Result = res + return ec.marshalNTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItem(ctx, field.Selections, res) +} + func (ec *executionContext) _CreateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTeamPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -11729,6 +12480,76 @@ func (ec *executionContext) _DeleteTeamPayload_teamId(ctx context.Context, field return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } +func (ec *executionContext) _DetachTagFromLayerPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DetachTagFromLayerPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DetachTagFromLayerPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) +} + +func (ec *executionContext) _DetachTagItemFromGroupPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DetachTagItemFromGroupPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "DetachTagItemFromGroupPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tag, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.TagGroup) + fc.Result = res + return ec.marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) +} + func (ec *executionContext) _ImportDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ImportDatasetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -13611,6 +14432,76 @@ func (ec *executionContext) _LayerGroup_scenePlugin(ctx context.Context, field g return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } +func (ec *executionContext) _LayerGroup_tagIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TagIds, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*id.ID) + fc.Result = res + return ec.marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerGroup().Tags(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.Tag) + fc.Result = res + return ec.marshalNTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagแš„(ctx, field.Selections, res) +} + func (ec *executionContext) _LayerItem_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -14199,42 +15090,7 @@ func (ec *executionContext) _LayerItem_scenePlugin(ctx context.Context, field gr return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "MergedInfobox", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.SceneID, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(id.ID) - fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) -} - -func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_tagIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14242,39 +15098,7 @@ func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field g } }() fc := &graphql.FieldContext{ - Object: "MergedInfobox", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Property, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*gqlmodel.MergedProperty) - fc.Result = res - return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) -} - -func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "MergedInfobox", + Object: "LayerItem", Field: field, Args: nil, IsMethod: false, @@ -14284,7 +15108,7 @@ func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field gra ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Fields, nil + return obj.TagIds, nil }) if err != nil { ec.Error(ctx, err) @@ -14296,12 +15120,12 @@ func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field gra } return graphql.Null } - res := resTmp.([]*gqlmodel.MergedInfoboxField) + res := resTmp.([]*id.ID) fc.Result = res - return ec.marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxFieldแš„(ctx, field.Selections, res) + return ec.marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerItem_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14309,7 +15133,7 @@ func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field grap } }() fc := &graphql.FieldContext{ - Object: "MergedInfobox", + Object: "LayerItem", Field: field, Args: nil, IsMethod: true, @@ -14319,39 +15143,7 @@ func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field grap ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.MergedInfobox().Scene(rctx, obj) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*gqlmodel.Scene) - fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) -} - -func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.OriginalID, nil + return ec.resolvers.LayerItem().Tags(rctx, obj) }) if err != nil { ec.Error(ctx, err) @@ -14363,12 +15155,12 @@ func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.([]gqlmodel.Tag) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagแš„(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14376,7 +15168,7 @@ func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, fie } }() fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", + Object: "MergedInfobox", Field: field, Args: nil, IsMethod: false, @@ -14403,7 +15195,7 @@ func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, fie return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14411,7 +15203,7 @@ func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, fi } }() fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", + Object: "MergedInfobox", Field: field, Args: nil, IsMethod: false, @@ -14421,24 +15213,21 @@ func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, fi ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.PluginID, nil + return obj.Property, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } - res := resTmp.(id.PluginID) + res := resTmp.(*gqlmodel.MergedProperty) fc.Result = res - return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14446,7 +15235,7 @@ func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, } }() fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", + Object: "MergedInfobox", Field: field, Args: nil, IsMethod: false, @@ -14456,7 +15245,7 @@ func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.ExtensionID, nil + return obj.Fields, nil }) if err != nil { ec.Error(ctx, err) @@ -14468,108 +15257,12 @@ func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, } return graphql.Null } - res := resTmp.(id.PluginExtensionID) - fc.Result = res - return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) -} - -func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Property, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*gqlmodel.MergedProperty) - fc.Result = res - return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) -} - -func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.MergedInfoboxField().Plugin(rctx, obj) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*gqlmodel.Plugin) - fc.Result = res - return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) -} - -func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.MergedInfoboxField().Extension(rctx, obj) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*gqlmodel.PluginExtension) + res := resTmp.([]*gqlmodel.MergedInfoboxField) fc.Result = res - return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) + return ec.marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxFieldแš„(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14577,7 +15270,7 @@ func (ec *executionContext) _MergedInfoboxField_scene(ctx context.Context, field } }() fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", + Object: "MergedInfobox", Field: field, Args: nil, IsMethod: true, @@ -14587,7 +15280,7 @@ func (ec *executionContext) _MergedInfoboxField_scene(ctx context.Context, field ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.MergedInfoboxField().Scene(rctx, obj) + return ec.resolvers.MergedInfobox().Scene(rctx, obj) }) if err != nil { ec.Error(ctx, err) @@ -14601,7 +15294,7 @@ func (ec *executionContext) _MergedInfoboxField_scene(ctx context.Context, field return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -14612,38 +15305,306 @@ func (ec *executionContext) _MergedInfoboxField_scenePlugin(ctx context.Context, Object: "MergedInfoboxField", Field: field, Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.MergedInfoboxField().ScenePlugin(rctx, obj) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*gqlmodel.ScenePlugin) - fc.Result = res - return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) -} - -func (ec *executionContext) _MergedLayer_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "MergedLayer", - Field: field, - Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OriginalID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginID) + fc.Result = res + return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.PluginExtensionID) + fc.Result = res + return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.MergedProperty) + fc.Result = res + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfoboxField().Plugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Plugin) + fc.Result = res + return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfoboxField().Extension(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.PluginExtension) + fc.Result = res + return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfoboxField().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfoboxField().ScenePlugin(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.ScenePlugin) + fc.Result = res + return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedLayer_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + Args: nil, IsMethod: false, IsResolver: false, } @@ -18287,6 +19248,318 @@ func (ec *executionContext) _Mutation_importLayer(ctx context.Context, field gra return ec.marshalOImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportLayerPayload(ctx, field.Selections, res) } +func (ec *executionContext) _Mutation_attachTagToLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_attachTagToLayer_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AttachTagToLayer(rctx, args["input"].(gqlmodel.AttachTagToLayerInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AttachTagToLayerPayload) + fc.Result = res + return ec.marshalOAttachTagToLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagToLayerPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_detachTagFromLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_detachTagFromLayer_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().DetachTagFromLayer(rctx, args["input"].(gqlmodel.DetachTagFromLayerInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DetachTagFromLayerPayload) + fc.Result = res + return ec.marshalODetachTagFromLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagFromLayerPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_createTagItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_createTagItem_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateTagItem(rctx, args["input"].(gqlmodel.CreateTagItemInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.CreateTagItemPayload) + fc.Result = res + return ec.marshalOCreateTagItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_createTagGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_createTagGroup_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().CreateTagGroup(rctx, args["input"].(gqlmodel.CreateTagGroupInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.CreateTagGroupPayload) + fc.Result = res + return ec.marshalOCreateTagGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagGroupPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_attachTagItemToGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_attachTagItemToGroup_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AttachTagItemToGroup(rctx, args["input"].(gqlmodel.AttachTagItemToGroupInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AttachTagItemToGroupPayload) + fc.Result = res + return ec.marshalOAttachTagItemToGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagItemToGroupPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_detachTagItemFromGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_detachTagItemFromGroup_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().DetachTagItemFromGroup(rctx, args["input"].(gqlmodel.DetachTagItemFromGroupInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DetachTagItemFromGroupPayload) + fc.Result = res + return ec.marshalODetachTagItemFromGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagItemFromGroupPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updateTag(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updateTag_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateTag(rctx, args["input"].(gqlmodel.UpdateTagInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateTagPayload) + fc.Result = res + return ec.marshalOUpdateTagPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTagPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_removeTag(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_removeTag_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveTag(rctx, args["input"].(gqlmodel.RemoveTagInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.RemoveTagPayload) + fc.Result = res + return ec.marshalORemoveTagPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveTagPayload(ctx, field.Selections, res) +} + func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -24828,6 +26101,41 @@ func (ec *executionContext) _RemoveMemberFromTeamPayload_team(ctx context.Contex return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) _RemoveTagPayload_tagId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveTagPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveTagPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TagID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + func (ec *executionContext) _RemoveWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveWidgetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -25485,6 +26793,76 @@ func (ec *executionContext) _Scene_datasetSchemas(ctx context.Context, field gra return ec.marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaConnection(ctx, field.Selections, res) } +func (ec *executionContext) _Scene_tagIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TagIds, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*id.ID) + fc.Result = res + return ec.marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Scene_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Scene().Tags(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.Tag) + fc.Result = res + return ec.marshalNTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagแš„(ctx, field.Selections, res) +} + func (ec *executionContext) _ScenePlugin_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -26237,6 +27615,440 @@ func (ec *executionContext) _SyncDatasetPayload_dataset(ctx context.Context, fie return ec.marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetแš„(ctx, field.Selections, res) } +func (ec *executionContext) _TagGroup_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagGroup_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagGroup_label(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Label, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tags, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]*id.ID) + fc.Result = res + return ec.marshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagItem_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagItem_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagItem_label(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Label, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagItem_linkedDatasetID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagItem_linkedDatasetSchemaID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetSchemaID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagItem_linkedDatasetFieldID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.LinkedDatasetFieldID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagItem_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagItem().LinkedDatasetSchema(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetSchema) + fc.Result = res + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagItem_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagItem().LinkedDataset(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Dataset) + fc.Result = res + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagItem_linkedDatasetField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagItem().LinkedDatasetField(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.DatasetField) + fc.Result = res + return ec.marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx, field.Selections, res) +} + func (ec *executionContext) _Team_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -27026,6 +28838,41 @@ func (ec *executionContext) _UpdateMemberOfTeamPayload_team(ctx context.Context, return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) _UpdateTagPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateTagPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpdateTagPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tag, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.Tag) + fc.Result = res + return ec.marshalNTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx, field.Selections, res) +} + func (ec *executionContext) _UpdateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateTeamPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -29863,6 +31710,68 @@ func (ec *executionContext) unmarshalInputAddWidgetInput(ctx context.Context, ob return it, nil } +func (ec *executionContext) unmarshalInputAttachTagItemToGroupInput(ctx context.Context, obj interface{}) (gqlmodel.AttachTagItemToGroupInput, error) { + var it gqlmodel.AttachTagItemToGroupInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "itemID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemID")) + it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "groupID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("groupID")) + it.GroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputAttachTagToLayerInput(ctx context.Context, obj interface{}) (gqlmodel.AttachTagToLayerInput, error) { + var it gqlmodel.AttachTagToLayerInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "tagID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagID")) + it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "layerID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerID")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + func (ec *executionContext) unmarshalInputCreateAssetInput(ctx context.Context, obj interface{}) (gqlmodel.CreateAssetInput, error) { var it gqlmodel.CreateAssetInput asMap := map[string]interface{}{} @@ -30011,8 +31920,171 @@ func (ec *executionContext) unmarshalInputCreateSceneInput(ctx context.Context, return it, nil } -func (ec *executionContext) unmarshalInputCreateTeamInput(ctx context.Context, obj interface{}) (gqlmodel.CreateTeamInput, error) { - var it gqlmodel.CreateTeamInput +func (ec *executionContext) unmarshalInputCreateTagGroupInput(ctx context.Context, obj interface{}) (gqlmodel.CreateTagGroupInput, error) { + var it gqlmodel.CreateTagGroupInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "label": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("label")) + it.Label, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "tags": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tags")) + it.Tags, err = ec.unmarshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateTagItemInput(ctx context.Context, obj interface{}) (gqlmodel.CreateTagItemInput, error) { + var it gqlmodel.CreateTagItemInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "label": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("label")) + it.Label, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "linkedDatasetSchemaID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("linkedDatasetSchemaID")) + it.LinkedDatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "linkedDatasetID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("linkedDatasetID")) + it.LinkedDatasetID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "linkedDatasetField": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("linkedDatasetField")) + it.LinkedDatasetField, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputCreateTeamInput(ctx context.Context, obj interface{}) (gqlmodel.CreateTeamInput, error) { + var it gqlmodel.CreateTeamInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputDeleteMeInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteMeInput, error) { + var it gqlmodel.DeleteMeInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "userId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputDeleteProjectInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteProjectInput, error) { + var it gqlmodel.DeleteProjectInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "projectId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + +func (ec *executionContext) unmarshalInputDeleteTeamInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteTeamInput, error) { + var it gqlmodel.DeleteTeamInput asMap := map[string]interface{}{} for k, v := range obj.(map[string]interface{}) { asMap[k] = v @@ -30020,11 +32092,11 @@ func (ec *executionContext) unmarshalInputCreateTeamInput(ctx context.Context, o for k, v := range asMap { switch k { - case "name": + case "teamId": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) - it.Name, err = ec.unmarshalNString2string(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) if err != nil { return it, err } @@ -30034,8 +32106,8 @@ func (ec *executionContext) unmarshalInputCreateTeamInput(ctx context.Context, o return it, nil } -func (ec *executionContext) unmarshalInputDeleteMeInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteMeInput, error) { - var it gqlmodel.DeleteMeInput +func (ec *executionContext) unmarshalInputDetachTagFromLayerInput(ctx context.Context, obj interface{}) (gqlmodel.DetachTagFromLayerInput, error) { + var it gqlmodel.DetachTagFromLayerInput asMap := map[string]interface{}{} for k, v := range obj.(map[string]interface{}) { asMap[k] = v @@ -30043,34 +32115,19 @@ func (ec *executionContext) unmarshalInputDeleteMeInput(ctx context.Context, obj for k, v := range asMap { switch k { - case "userId": + case "tagID": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) - it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagID")) + it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) if err != nil { return it, err } - } - } - - return it, nil -} - -func (ec *executionContext) unmarshalInputDeleteProjectInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteProjectInput, error) { - var it gqlmodel.DeleteProjectInput - asMap := map[string]interface{}{} - for k, v := range obj.(map[string]interface{}) { - asMap[k] = v - } - - for k, v := range asMap { - switch k { - case "projectId": + case "layerID": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) - it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerID")) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) if err != nil { return it, err } @@ -30080,8 +32137,8 @@ func (ec *executionContext) unmarshalInputDeleteProjectInput(ctx context.Context return it, nil } -func (ec *executionContext) unmarshalInputDeleteTeamInput(ctx context.Context, obj interface{}) (gqlmodel.DeleteTeamInput, error) { - var it gqlmodel.DeleteTeamInput +func (ec *executionContext) unmarshalInputDetachTagItemFromGroupInput(ctx context.Context, obj interface{}) (gqlmodel.DetachTagItemFromGroupInput, error) { + var it gqlmodel.DetachTagItemFromGroupInput asMap := map[string]interface{}{} for k, v := range obj.(map[string]interface{}) { asMap[k] = v @@ -30089,11 +32146,19 @@ func (ec *executionContext) unmarshalInputDeleteTeamInput(ctx context.Context, o for k, v := range asMap { switch k { - case "teamId": + case "itemID": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) - it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemID")) + it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "groupID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("groupID")) + it.GroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) if err != nil { return it, err } @@ -30773,6 +32838,29 @@ func (ec *executionContext) unmarshalInputRemovePropertyItemInput(ctx context.Co return it, nil } +func (ec *executionContext) unmarshalInputRemoveTagInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveTagInput, error) { + var it gqlmodel.RemoveTagInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "tagID": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagID")) + it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + func (ec *executionContext) unmarshalInputRemoveWidgetInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveWidgetInput, error) { var it gqlmodel.RemoveWidgetInput asMap := map[string]interface{}{} @@ -31432,6 +33520,45 @@ func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.C return it, nil } +func (ec *executionContext) unmarshalInputUpdateTagInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateTagInput, error) { + var it gqlmodel.UpdateTagInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "tagId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagId")) + it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "label": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("label")) + it.Label, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + func (ec *executionContext) unmarshalInputUpdateTeamInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateTeamInput, error) { var it gqlmodel.UpdateTeamInput asMap := map[string]interface{}{} @@ -31882,6 +34009,52 @@ func (ec *executionContext) _PropertyItem(ctx context.Context, sel ast.Selection } } +func (ec *executionContext) _Tag(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.Tag) graphql.Marshaler { + switch obj := (obj).(type) { + case nil: + return graphql.Null + case gqlmodel.TagItem: + return ec._TagItem(ctx, sel, &obj) + case *gqlmodel.TagItem: + if obj == nil { + return graphql.Null + } + return ec._TagItem(ctx, sel, obj) + case gqlmodel.TagGroup: + return ec._TagGroup(ctx, sel, &obj) + case *gqlmodel.TagGroup: + if obj == nil { + return graphql.Null + } + return ec._TagGroup(ctx, sel, obj) + default: + panic(fmt.Errorf("unexpected type %T", obj)) + } +} + +func (ec *executionContext) _Tags(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.Tags) graphql.Marshaler { + switch obj := (obj).(type) { + case nil: + return graphql.Null + case gqlmodel.TagItem: + return ec._TagItem(ctx, sel, &obj) + case *gqlmodel.TagItem: + if obj == nil { + return graphql.Null + } + return ec._TagItem(ctx, sel, obj) + case gqlmodel.TagGroup: + return ec._TagGroup(ctx, sel, &obj) + case *gqlmodel.TagGroup: + if obj == nil { + return graphql.Null + } + return ec._TagGroup(ctx, sel, obj) + default: + panic(fmt.Errorf("unexpected type %T", obj)) + } +} + // endregion ************************** interface.gotpl *************************** // region **************************** object.gotpl **************************** @@ -32258,6 +34431,60 @@ func (ec *executionContext) _AssetEdge(ctx context.Context, sel ast.SelectionSet return out } +var attachTagItemToGroupPayloadImplementors = []string{"AttachTagItemToGroupPayload"} + +func (ec *executionContext) _AttachTagItemToGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AttachTagItemToGroupPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, attachTagItemToGroupPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AttachTagItemToGroupPayload") + case "tag": + out.Values[i] = ec._AttachTagItemToGroupPayload_tag(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var attachTagToLayerPayloadImplementors = []string{"AttachTagToLayerPayload"} + +func (ec *executionContext) _AttachTagToLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AttachTagToLayerPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, attachTagToLayerPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AttachTagToLayerPayload") + case "layer": + out.Values[i] = ec._AttachTagToLayerPayload_layer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var cameraImplementors = []string{"Camera"} func (ec *executionContext) _Camera(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Camera) graphql.Marshaler { @@ -32396,6 +34623,60 @@ func (ec *executionContext) _CreateScenePayload(ctx context.Context, sel ast.Sel return out } +var createTagGroupPayloadImplementors = []string{"CreateTagGroupPayload"} + +func (ec *executionContext) _CreateTagGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateTagGroupPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, createTagGroupPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CreateTagGroupPayload") + case "tag": + out.Values[i] = ec._CreateTagGroupPayload_tag(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var createTagItemPayloadImplementors = []string{"CreateTagItemPayload"} + +func (ec *executionContext) _CreateTagItemPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateTagItemPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, createTagItemPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("CreateTagItemPayload") + case "tag": + out.Values[i] = ec._CreateTagItemPayload_tag(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var createTeamPayloadImplementors = []string{"CreateTeamPayload"} func (ec *executionContext) _CreateTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateTeamPayload) graphql.Marshaler { @@ -32945,6 +35226,60 @@ func (ec *executionContext) _DeleteTeamPayload(ctx context.Context, sel ast.Sele return out } +var detachTagFromLayerPayloadImplementors = []string{"DetachTagFromLayerPayload"} + +func (ec *executionContext) _DetachTagFromLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DetachTagFromLayerPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, detachTagFromLayerPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DetachTagFromLayerPayload") + case "layer": + out.Values[i] = ec._DetachTagFromLayerPayload_layer(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var detachTagItemFromGroupPayloadImplementors = []string{"DetachTagItemFromGroupPayload"} + +func (ec *executionContext) _DetachTagItemFromGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DetachTagItemFromGroupPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, detachTagItemFromGroupPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("DetachTagItemFromGroupPayload") + case "tag": + out.Values[i] = ec._DetachTagItemFromGroupPayload_tag(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var importDatasetPayloadImplementors = []string{"ImportDatasetPayload"} func (ec *executionContext) _ImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ImportDatasetPayload) graphql.Marshaler { @@ -33510,6 +35845,25 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe res = ec._LayerGroup_scenePlugin(ctx, field, obj) return res }) + case "tagIds": + out.Values[i] = ec._LayerGroup_tagIds(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "tags": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerGroup_tags(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -33652,6 +36006,25 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet res = ec._LayerItem_scenePlugin(ctx, field, obj) return res }) + case "tagIds": + out.Values[i] = ec._LayerItem_tagIds(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "tags": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerItem_tags(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -34349,6 +36722,22 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) out.Values[i] = ec._Mutation_removeInfoboxField(ctx, field) case "importLayer": out.Values[i] = ec._Mutation_importLayer(ctx, field) + case "attachTagToLayer": + out.Values[i] = ec._Mutation_attachTagToLayer(ctx, field) + case "detachTagFromLayer": + out.Values[i] = ec._Mutation_detachTagFromLayer(ctx, field) + case "createTagItem": + out.Values[i] = ec._Mutation_createTagItem(ctx, field) + case "createTagGroup": + out.Values[i] = ec._Mutation_createTagGroup(ctx, field) + case "attachTagItemToGroup": + out.Values[i] = ec._Mutation_attachTagItemToGroup(ctx, field) + case "detachTagItemFromGroup": + out.Values[i] = ec._Mutation_detachTagItemFromGroup(ctx, field) + case "updateTag": + out.Values[i] = ec._Mutation_updateTag(ctx, field) + case "removeTag": + out.Values[i] = ec._Mutation_removeTag(ctx, field) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -36204,6 +38593,33 @@ func (ec *executionContext) _RemoveMemberFromTeamPayload(ctx context.Context, se return out } +var removeTagPayloadImplementors = []string{"RemoveTagPayload"} + +func (ec *executionContext) _RemoveTagPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveTagPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeTagPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveTagPayload") + case "tagId": + out.Values[i] = ec._RemoveTagPayload_tagId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var removeWidgetPayloadImplementors = []string{"RemoveWidgetPayload"} func (ec *executionContext) _RemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveWidgetPayload) graphql.Marshaler { @@ -36371,6 +38787,25 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob } return res }) + case "tagIds": + out.Values[i] = ec._Scene_tagIds(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "tags": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Scene_tags(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -36629,6 +39064,121 @@ func (ec *executionContext) _SyncDatasetPayload(ctx context.Context, sel ast.Sel return out } +var tagGroupImplementors = []string{"TagGroup", "Tag", "Tags"} + +func (ec *executionContext) _TagGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.TagGroup) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, tagGroupImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("TagGroup") + case "id": + out.Values[i] = ec._TagGroup_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "sceneId": + out.Values[i] = ec._TagGroup_sceneId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "label": + out.Values[i] = ec._TagGroup_label(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "tags": + out.Values[i] = ec._TagGroup_tags(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var tagItemImplementors = []string{"TagItem", "Tag", "Tags"} + +func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.TagItem) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, tagItemImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("TagItem") + case "id": + out.Values[i] = ec._TagItem_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "sceneId": + out.Values[i] = ec._TagItem_sceneId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "label": + out.Values[i] = ec._TagItem_label(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "linkedDatasetID": + out.Values[i] = ec._TagItem_linkedDatasetID(ctx, field, obj) + case "linkedDatasetSchemaID": + out.Values[i] = ec._TagItem_linkedDatasetSchemaID(ctx, field, obj) + case "linkedDatasetFieldID": + out.Values[i] = ec._TagItem_linkedDatasetFieldID(ctx, field, obj) + case "linkedDatasetSchema": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagItem_linkedDatasetSchema(ctx, field, obj) + return res + }) + case "linkedDataset": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagItem_linkedDataset(ctx, field, obj) + return res + }) + case "linkedDatasetField": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagItem_linkedDatasetField(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var teamImplementors = []string{"Team", "Node"} func (ec *executionContext) _Team(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Team) graphql.Marshaler { @@ -36917,6 +39467,33 @@ func (ec *executionContext) _UpdateMemberOfTeamPayload(ctx context.Context, sel return out } +var updateTagPayloadImplementors = []string{"UpdateTagPayload"} + +func (ec *executionContext) _UpdateTagPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateTagPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateTagPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateTagPayload") + case "tag": + out.Values[i] = ec._UpdateTagPayload_tag(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var updateTeamPayloadImplementors = []string{"UpdateTeamPayload"} func (ec *executionContext) _UpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateTeamPayload) graphql.Marshaler { @@ -37790,6 +40367,16 @@ func (ec *executionContext) marshalNAssetEdge2แš–githubแš—comแš‹reearthแš‹reeart return ec._AssetEdge(ctx, sel, v) } +func (ec *executionContext) unmarshalNAttachTagItemToGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagItemToGroupInput(ctx context.Context, v interface{}) (gqlmodel.AttachTagItemToGroupInput, error) { + res, err := ec.unmarshalInputAttachTagItemToGroupInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNAttachTagToLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagToLayerInput(ctx context.Context, v interface{}) (gqlmodel.AttachTagToLayerInput, error) { + res, err := ec.unmarshalInputAttachTagToLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + func (ec *executionContext) unmarshalNBoolean2bool(ctx context.Context, v interface{}) (bool, error) { res, err := graphql.UnmarshalBoolean(v) return res, graphql.ErrorOnPath(ctx, err) @@ -37825,6 +40412,16 @@ func (ec *executionContext) unmarshalNCreateSceneInput2githubแš—comแš‹reearthแš‹ return res, graphql.ErrorOnPath(ctx, err) } +func (ec *executionContext) unmarshalNCreateTagGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagGroupInput(ctx context.Context, v interface{}) (gqlmodel.CreateTagGroupInput, error) { + res, err := ec.unmarshalInputCreateTagGroupInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNCreateTagItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagItemInput(ctx context.Context, v interface{}) (gqlmodel.CreateTagItemInput, error) { + res, err := ec.unmarshalInputCreateTagItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + func (ec *executionContext) unmarshalNCreateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTeamInput(ctx context.Context, v interface{}) (gqlmodel.CreateTeamInput, error) { res, err := ec.unmarshalInputCreateTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -38303,6 +40900,16 @@ func (ec *executionContext) unmarshalNDeleteTeamInput2githubแš—comแš‹reearthแš‹r return res, graphql.ErrorOnPath(ctx, err) } +func (ec *executionContext) unmarshalNDetachTagFromLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagFromLayerInput(ctx context.Context, v interface{}) (gqlmodel.DetachTagFromLayerInput, error) { + res, err := ec.unmarshalInputDetachTagFromLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNDetachTagItemFromGroupInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagItemFromGroupInput(ctx context.Context, v interface{}) (gqlmodel.DetachTagItemFromGroupInput, error) { + res, err := ec.unmarshalInputDetachTagItemFromGroupInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + func (ec *executionContext) unmarshalNFileSize2int64(ctx context.Context, v interface{}) (int64, error) { res, err := graphql.UnmarshalInt64(v) return res, graphql.ErrorOnPath(ctx, err) @@ -39795,6 +42402,11 @@ func (ec *executionContext) unmarshalNRemovePropertyItemInput2githubแš—comแš‹ree return res, graphql.ErrorOnPath(ctx, err) } +func (ec *executionContext) unmarshalNRemoveTagInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveTagInput(ctx context.Context, v interface{}) (gqlmodel.RemoveTagInput, error) { + res, err := ec.unmarshalInputRemoveTagInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + func (ec *executionContext) unmarshalNRemoveWidgetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveWidgetInput(ctx context.Context, v interface{}) (gqlmodel.RemoveWidgetInput, error) { res, err := ec.unmarshalInputRemoveWidgetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -39999,6 +42611,80 @@ func (ec *executionContext) unmarshalNSyncDatasetInput2githubแš—comแš‹reearthแš‹ return res, graphql.ErrorOnPath(ctx, err) } +func (ec *executionContext) marshalNTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Tag) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._Tag(ctx, sel, v) +} + +func (ec *executionContext) marshalNTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.Tag) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TagGroup) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._TagGroup(ctx, sel, v) +} + +func (ec *executionContext) marshalNTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TagItem) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._TagItem(ctx, sel, v) +} + func (ec *executionContext) marshalNTeam2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Team) graphql.Marshaler { return ec._Team(ctx, sel, &v) } @@ -40192,6 +42878,11 @@ func (ec *executionContext) unmarshalNUpdatePropertyValueInput2githubแš—comแš‹re return res, graphql.ErrorOnPath(ctx, err) } +func (ec *executionContext) unmarshalNUpdateTagInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTagInput(ctx context.Context, v interface{}) (gqlmodel.UpdateTagInput, error) { + res, err := ec.unmarshalInputUpdateTagInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + func (ec *executionContext) unmarshalNUpdateTeamInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTeamInput(ctx context.Context, v interface{}) (gqlmodel.UpdateTeamInput, error) { res, err := ec.unmarshalInputUpdateTeamInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -40657,6 +43348,20 @@ func (ec *executionContext) marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘ return ec._Asset(ctx, sel, v) } +func (ec *executionContext) marshalOAttachTagItemToGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagItemToGroupPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AttachTagItemToGroupPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AttachTagItemToGroupPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOAttachTagToLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagToLayerPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AttachTagToLayerPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AttachTagToLayerPayload(ctx, sel, v) +} + func (ec *executionContext) unmarshalOBoolean2bool(ctx context.Context, v interface{}) (bool, error) { res, err := graphql.UnmarshalBoolean(v) return res, graphql.ErrorOnPath(ctx, err) @@ -40702,6 +43407,20 @@ func (ec *executionContext) marshalOCreateScenePayload2แš–githubแš—comแš‹reearth return ec._CreateScenePayload(ctx, sel, v) } +func (ec *executionContext) marshalOCreateTagGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagGroupPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateTagGroupPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._CreateTagGroupPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalOCreateTagItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagItemPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateTagItemPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._CreateTagItemPayload(ctx, sel, v) +} + func (ec *executionContext) marshalOCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateTeamPayload) graphql.Marshaler { if v == nil { return graphql.Null @@ -40803,6 +43522,20 @@ func (ec *executionContext) marshalODeleteTeamPayload2แš–githubแš—comแš‹reearth return ec._DeleteTeamPayload(ctx, sel, v) } +func (ec *executionContext) marshalODetachTagFromLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagFromLayerPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DetachTagFromLayerPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DetachTagFromLayerPayload(ctx, sel, v) +} + +func (ec *executionContext) marshalODetachTagItemFromGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagItemFromGroupPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DetachTagItemFromGroupPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._DetachTagItemFromGroupPayload(ctx, sel, v) +} + func (ec *executionContext) unmarshalOFloat2แš–float64(ctx context.Context, v interface{}) (*float64, error) { if v == nil { return nil, nil @@ -41313,6 +44046,13 @@ func (ec *executionContext) marshalORemoveMemberFromTeamPayload2แš–githubแš—com return ec._RemoveMemberFromTeamPayload(ctx, sel, v) } +func (ec *executionContext) marshalORemoveTagPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveTagPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveTagPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveTagPayload(ctx, sel, v) +} + func (ec *executionContext) marshalORemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveWidgetPayload) graphql.Marshaler { if v == nil { return graphql.Null @@ -41548,6 +44288,13 @@ func (ec *executionContext) marshalOUpdateMemberOfTeamPayload2แš–githubแš—comแš‹ return ec._UpdateMemberOfTeamPayload(ctx, sel, v) } +func (ec *executionContext) marshalOUpdateTagPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTagPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateTagPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateTagPayload(ctx, sel, v) +} + func (ec *executionContext) marshalOUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateTeamPayload) graphql.Marshaler { if v == nil { return graphql.Null diff --git a/internal/adapter/gql/gqldataloader/dataloader.go b/internal/adapter/gql/gqldataloader/dataloader.go index 8e570de61..6c15aced6 100644 --- a/internal/adapter/gql/gqldataloader/dataloader.go +++ b/internal/adapter/gql/gqldataloader/dataloader.go @@ -13,3 +13,6 @@ package gqldataloader //go:generate go run github.com/vektah/dataloaden SceneLoader github.com/reearth/reearth-backend/pkg/id.SceneID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Scene //go:generate go run github.com/vektah/dataloaden TeamLoader github.com/reearth/reearth-backend/pkg/id.TeamID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Team //go:generate go run github.com/vektah/dataloaden UserLoader github.com/reearth/reearth-backend/pkg/id.UserID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.User +//go:generate go run github.com/vektah/dataloaden TagLoader github.com/reearth/reearth-backend/pkg/id.TagID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Tag +//go:generate go run github.com/vektah/dataloaden TagItemLoader github.com/reearth/reearth-backend/pkg/id.TagID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.TagItem +//go:generate go run github.com/vektah/dataloaden TagGroupLoader github.com/reearth/reearth-backend/pkg/id.TagID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.TagGroup diff --git a/internal/adapter/gql/gqldataloader/taggrouploader_gen.go b/internal/adapter/gql/gqldataloader/taggrouploader_gen.go new file mode 100644 index 000000000..5a1ca6d5a --- /dev/null +++ b/internal/adapter/gql/gqldataloader/taggrouploader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/pkg/id" +) + +// TagGroupLoaderConfig captures the config to create a new TagGroupLoader +type TagGroupLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewTagGroupLoader creates a new TagGroupLoader given a fetch, wait, and maxBatch +func NewTagGroupLoader(config TagGroupLoaderConfig) *TagGroupLoader { + return &TagGroupLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// TagGroupLoader batches and caches requests +type TagGroupLoader struct { + // this method provides the data for the loader + fetch func(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.TagID]*gqlmodel.TagGroup + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *tagGroupLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type tagGroupLoaderBatch struct { + keys []id.TagID + data []*gqlmodel.TagGroup + error []error + closing bool + done chan struct{} +} + +// Load a TagGroup by key, batching and caching will be applied automatically +func (l *TagGroupLoader) Load(key id.TagID) (*gqlmodel.TagGroup, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a TagGroup. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TagGroupLoader) LoadThunk(key id.TagID) func() (*gqlmodel.TagGroup, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.TagGroup, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &tagGroupLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.TagGroup, error) { + <-batch.done + + var data *gqlmodel.TagGroup + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *TagGroupLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) { + results := make([]func() (*gqlmodel.TagGroup, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + tagGroups := make([]*gqlmodel.TagGroup, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + tagGroups[i], errors[i] = thunk() + } + return tagGroups, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a TagGroups. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TagGroupLoader) LoadAllThunk(keys []id.TagID) func() ([]*gqlmodel.TagGroup, []error) { + results := make([]func() (*gqlmodel.TagGroup, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.TagGroup, []error) { + tagGroups := make([]*gqlmodel.TagGroup, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + tagGroups[i], errors[i] = thunk() + } + return tagGroups, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *TagGroupLoader) Prime(key id.TagID, value *gqlmodel.TagGroup) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *TagGroupLoader) Clear(key id.TagID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *TagGroupLoader) unsafeSet(key id.TagID, value *gqlmodel.TagGroup) { + if l.cache == nil { + l.cache = map[id.TagID]*gqlmodel.TagGroup{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *tagGroupLoaderBatch) keyIndex(l *TagGroupLoader, key id.TagID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *tagGroupLoaderBatch) startTimer(l *TagGroupLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *tagGroupLoaderBatch) end(l *TagGroupLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/adapter/gql/gqldataloader/tagitemloader_gen.go b/internal/adapter/gql/gqldataloader/tagitemloader_gen.go new file mode 100644 index 000000000..7931dec24 --- /dev/null +++ b/internal/adapter/gql/gqldataloader/tagitemloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/pkg/id" +) + +// TagItemLoaderConfig captures the config to create a new TagItemLoader +type TagItemLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.TagID) ([]*gqlmodel.TagItem, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewTagItemLoader creates a new TagItemLoader given a fetch, wait, and maxBatch +func NewTagItemLoader(config TagItemLoaderConfig) *TagItemLoader { + return &TagItemLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// TagItemLoader batches and caches requests +type TagItemLoader struct { + // this method provides the data for the loader + fetch func(keys []id.TagID) ([]*gqlmodel.TagItem, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.TagID]*gqlmodel.TagItem + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *tagItemLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type tagItemLoaderBatch struct { + keys []id.TagID + data []*gqlmodel.TagItem + error []error + closing bool + done chan struct{} +} + +// Load a TagItem by key, batching and caching will be applied automatically +func (l *TagItemLoader) Load(key id.TagID) (*gqlmodel.TagItem, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a TagItem. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TagItemLoader) LoadThunk(key id.TagID) func() (*gqlmodel.TagItem, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.TagItem, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &tagItemLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.TagItem, error) { + <-batch.done + + var data *gqlmodel.TagItem + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *TagItemLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.TagItem, []error) { + results := make([]func() (*gqlmodel.TagItem, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + tagItems := make([]*gqlmodel.TagItem, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + tagItems[i], errors[i] = thunk() + } + return tagItems, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a TagItems. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TagItemLoader) LoadAllThunk(keys []id.TagID) func() ([]*gqlmodel.TagItem, []error) { + results := make([]func() (*gqlmodel.TagItem, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.TagItem, []error) { + tagItems := make([]*gqlmodel.TagItem, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + tagItems[i], errors[i] = thunk() + } + return tagItems, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *TagItemLoader) Prime(key id.TagID, value *gqlmodel.TagItem) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *TagItemLoader) Clear(key id.TagID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *TagItemLoader) unsafeSet(key id.TagID, value *gqlmodel.TagItem) { + if l.cache == nil { + l.cache = map[id.TagID]*gqlmodel.TagItem{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *tagItemLoaderBatch) keyIndex(l *TagItemLoader, key id.TagID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *tagItemLoaderBatch) startTimer(l *TagItemLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *tagItemLoaderBatch) end(l *TagItemLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/adapter/gql/gqldataloader/tagloader_gen.go b/internal/adapter/gql/gqldataloader/tagloader_gen.go new file mode 100644 index 000000000..3d4cee6c2 --- /dev/null +++ b/internal/adapter/gql/gqldataloader/tagloader_gen.go @@ -0,0 +1,225 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package gqldataloader + +import ( + "sync" + "time" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/pkg/id" +) + +// TagLoaderConfig captures the config to create a new TagLoader +type TagLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []id.TagID) ([]*gqlmodel.Tag, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewTagLoader creates a new TagLoader given a fetch, wait, and maxBatch +func NewTagLoader(config TagLoaderConfig) *TagLoader { + return &TagLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// TagLoader batches and caches requests +type TagLoader struct { + // this method provides the data for the loader + fetch func(keys []id.TagID) ([]*gqlmodel.Tag, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[id.TagID]*gqlmodel.Tag + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *tagLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type tagLoaderBatch struct { + keys []id.TagID + data []*gqlmodel.Tag + error []error + closing bool + done chan struct{} +} + +// Load a Tag by key, batching and caching will be applied automatically +func (l *TagLoader) Load(key id.TagID) (*gqlmodel.Tag, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a Tag. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TagLoader) LoadThunk(key id.TagID) func() (*gqlmodel.Tag, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (*gqlmodel.Tag, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &tagLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (*gqlmodel.Tag, error) { + <-batch.done + + var data *gqlmodel.Tag + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *TagLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.Tag, []error) { + results := make([]func() (*gqlmodel.Tag, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + tags := make([]*gqlmodel.Tag, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + tags[i], errors[i] = thunk() + } + return tags, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a Tags. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *TagLoader) LoadAllThunk(keys []id.TagID) func() ([]*gqlmodel.Tag, []error) { + results := make([]func() (*gqlmodel.Tag, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]*gqlmodel.Tag, []error) { + tags := make([]*gqlmodel.Tag, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + tags[i], errors[i] = thunk() + } + return tags, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *TagLoader) Prime(key id.TagID, value *gqlmodel.Tag) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + // make a copy when writing to the cache, its easy to pass a pointer in from a loop var + // and end up with the whole cache pointing to the same value. + cpy := *value + l.unsafeSet(key, &cpy) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *TagLoader) Clear(key id.TagID) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *TagLoader) unsafeSet(key id.TagID, value *gqlmodel.Tag) { + if l.cache == nil { + l.cache = map[id.TagID]*gqlmodel.Tag{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *tagLoaderBatch) keyIndex(l *TagLoader, key id.TagID) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *tagLoaderBatch) startTimer(l *TagLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *tagLoaderBatch) end(l *TagLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/adapter/gql/gqlmodel/convert_layer.go b/internal/adapter/gql/gqlmodel/convert_layer.go index 0250ee348..9bc9be1ac 100644 --- a/internal/adapter/gql/gqlmodel/convert_layer.go +++ b/internal/adapter/gql/gqlmodel/convert_layer.go @@ -11,6 +11,12 @@ func ToLayerItem(l *layer.Item, parent *id.LayerID) *LayerItem { return nil } + tags := l.Tags().Tags() + tagIDs := make([]*id.ID, 0, len(tags)) + for _, tid := range tags { + tagIDs = append(tagIDs, tid.IDRef()) + } + return &LayerItem{ ID: l.ID().ID(), SceneID: l.Scene().ID(), @@ -22,6 +28,7 @@ func ToLayerItem(l *layer.Item, parent *id.LayerID) *LayerItem { Infobox: ToInfobox(l.Infobox(), l.ID(), l.Scene(), l.LinkedDataset()), LinkedDatasetID: l.LinkedDataset().IDRef(), ParentID: parent.IDRef(), + TagIds: tagIDs, } } @@ -36,6 +43,12 @@ func ToLayerGroup(l *layer.Group, parent *id.LayerID) *LayerGroup { layers = append(layers, lay.IDRef()) } + tags := l.Tags().Tags() + tagIDs := make([]*id.ID, 0, len(tags)) + for _, tid := range tags { + tagIDs = append(tagIDs, tid.IDRef()) + } + return &LayerGroup{ ID: l.ID().ID(), SceneID: l.Scene().ID(), @@ -49,6 +62,7 @@ func ToLayerGroup(l *layer.Group, parent *id.LayerID) *LayerGroup { LayerIds: layers, Root: l.IsRoot(), ParentID: parent.IDRef(), + TagIds: tagIDs, } } diff --git a/internal/adapter/gql/gqlmodel/convert_tag.go b/internal/adapter/gql/gqlmodel/convert_tag.go new file mode 100644 index 000000000..77ea5103c --- /dev/null +++ b/internal/adapter/gql/gqlmodel/convert_tag.go @@ -0,0 +1,52 @@ +package gqlmodel + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" +) + +func ToTagItem(ti *tag.Item) *TagItem { + if ti == nil { + return nil + } + return &TagItem{ + ID: ti.ID().ID(), + SceneID: ti.Scene().ID(), + Label: ti.Label(), + LinkedDatasetID: ti.LinkedDatasetID().IDRef(), + LinkedDatasetSchemaID: ti.LinkedDatasetSchemaID().IDRef(), + LinkedDatasetFieldID: ti.LinkedDatasetFieldID().IDRef(), + } +} + +func ToTagGroup(tg *tag.Group) *TagGroup { + if tg == nil { + return nil + } + tags := tg.Tags().Tags() + ids := make([]*id.ID, 0, len(tags)) + for _, tid := range tags { + if !tid.IsNil() { + ids = append(ids, tid.IDRef()) + } + } + return &TagGroup{ + ID: tg.ID().ID(), + SceneID: tg.Scene().ID(), + Label: tg.Label(), + Tags: ids, + } +} + +func ToTag(t tag.Tag) Tag { + if t == nil { + return nil + } + switch ty := t.(type) { + case *tag.Item: + return ToTagItem(ty) + case *tag.Group: + return ToTagGroup(ty) + } + return nil +} diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index 3a5a85a24..72d63a810 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -31,6 +31,14 @@ type PropertyItem interface { IsPropertyItem() } +type Tag interface { + IsTag() +} + +type Tags interface { + IsTags() +} + type AddDatasetSchemaInput struct { SceneID id.ID `json:"sceneId"` Name string `json:"name"` @@ -161,6 +169,24 @@ type AssetEdge struct { Node *Asset `json:"node"` } +type AttachTagItemToGroupInput struct { + ItemID id.ID `json:"itemID"` + GroupID id.ID `json:"groupID"` +} + +type AttachTagItemToGroupPayload struct { + Tag *TagGroup `json:"tag"` +} + +type AttachTagToLayerInput struct { + TagID id.ID `json:"tagID"` + LayerID id.ID `json:"layerID"` +} + +type AttachTagToLayerPayload struct { + Layer Layer `json:"layer"` +} + type Camera struct { Lat float64 `json:"lat"` Lng float64 `json:"lng"` @@ -206,6 +232,28 @@ type CreateScenePayload struct { Scene *Scene `json:"scene"` } +type CreateTagGroupInput struct { + SceneID id.ID `json:"sceneId"` + Label string `json:"label"` + Tags []*id.ID `json:"tags"` +} + +type CreateTagGroupPayload struct { + Tag *TagGroup `json:"tag"` +} + +type CreateTagItemInput struct { + SceneID id.ID `json:"sceneId"` + Label string `json:"label"` + LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaID"` + LinkedDatasetID *id.ID `json:"linkedDatasetID"` + LinkedDatasetField *id.ID `json:"linkedDatasetField"` +} + +type CreateTagItemPayload struct { + Tag *TagItem `json:"tag"` +} + type CreateTeamInput struct { Name string `json:"name"` } @@ -312,6 +360,24 @@ type DeleteTeamPayload struct { TeamID id.ID `json:"teamId"` } +type DetachTagFromLayerInput struct { + TagID id.ID `json:"tagID"` + LayerID id.ID `json:"layerID"` +} + +type DetachTagFromLayerPayload struct { + Layer Layer `json:"layer"` +} + +type DetachTagItemFromGroupInput struct { + ItemID id.ID `json:"itemID"` + GroupID id.ID `json:"groupID"` +} + +type DetachTagItemFromGroupPayload struct { + Tag *TagGroup `json:"tag"` +} + type ImportDatasetFromGoogleSheetInput struct { AccessToken string `json:"accessToken"` FileID string `json:"fileId"` @@ -415,6 +481,8 @@ type LayerGroup struct { Layers []Layer `json:"layers"` Scene *Scene `json:"scene"` ScenePlugin *ScenePlugin `json:"scenePlugin"` + TagIds []*id.ID `json:"tagIds"` + Tags []Tag `json:"tags"` } func (LayerGroup) IsLayers() {} @@ -439,6 +507,8 @@ type LayerItem struct { Merged *MergedLayer `json:"merged"` Scene *Scene `json:"scene"` ScenePlugin *ScenePlugin `json:"scenePlugin"` + TagIds []*id.ID `json:"tagIds"` + Tags []Tag `json:"tags"` } func (LayerItem) IsLayers() {} @@ -871,6 +941,14 @@ type RemovePropertyItemInput struct { ItemID id.ID `json:"itemId"` } +type RemoveTagInput struct { + TagID id.ID `json:"tagID"` +} + +type RemoveTagPayload struct { + TagID id.ID `json:"tagId"` +} + type RemoveWidgetInput struct { SceneID id.ID `json:"sceneId"` WidgetID id.ID `json:"widgetId"` @@ -899,6 +977,8 @@ type Scene struct { RootLayer *LayerGroup `json:"rootLayer"` LockMode SceneLockMode `json:"lockMode"` DatasetSchemas *DatasetSchemaConnection `json:"datasetSchemas"` + TagIds []*id.ID `json:"tagIds"` + Tags []Tag `json:"tags"` } func (Scene) IsNode() {} @@ -953,6 +1033,31 @@ type SyncDatasetPayload struct { Dataset []*Dataset `json:"dataset"` } +type TagGroup struct { + ID id.ID `json:"id"` + SceneID id.ID `json:"sceneId"` + Label string `json:"label"` + Tags []*id.ID `json:"tags"` +} + +func (TagGroup) IsTag() {} +func (TagGroup) IsTags() {} + +type TagItem struct { + ID id.ID `json:"id"` + SceneID id.ID `json:"sceneId"` + Label string `json:"label"` + LinkedDatasetID *id.ID `json:"linkedDatasetID"` + LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaID"` + LinkedDatasetFieldID *id.ID `json:"linkedDatasetFieldID"` + LinkedDatasetSchema *DatasetSchema `json:"linkedDatasetSchema"` + LinkedDataset *Dataset `json:"linkedDataset"` + LinkedDatasetField *DatasetField `json:"linkedDatasetField"` +} + +func (TagItem) IsTag() {} +func (TagItem) IsTags() {} + type Team struct { ID id.ID `json:"id"` Name string `json:"name"` @@ -1081,6 +1186,16 @@ type UpdatePropertyValueInput struct { Type ValueType `json:"type"` } +type UpdateTagInput struct { + TagID id.ID `json:"tagId"` + SceneID id.ID `json:"sceneId"` + Label *string `json:"label"` +} + +type UpdateTagPayload struct { + Tag Tag `json:"tag"` +} + type UpdateTeamInput struct { TeamID id.ID `json:"teamId"` Name string `json:"name"` diff --git a/internal/adapter/gql/loader.go b/internal/adapter/gql/loader.go index b2951a7fc..c2df08e3c 100644 --- a/internal/adapter/gql/loader.go +++ b/internal/adapter/gql/loader.go @@ -23,6 +23,7 @@ type Loaders struct { Scene *SceneLoader Team *TeamLoader User *UserLoader + Tag *TagLoader } type DataLoaders struct { @@ -39,6 +40,9 @@ type DataLoaders struct { Scene SceneDataLoader Team TeamDataLoader User UserDataLoader + Tag TagDataLoader + TagItem TagItemDataLoader + TagGroup TagGroupDataLoader } func NewLoaders(usecases interfaces.Container) Loaders { @@ -53,6 +57,7 @@ func NewLoaders(usecases interfaces.Container) Loaders { Scene: NewSceneLoader(usecases.Scene), Team: NewTeamLoader(usecases.Team), User: NewUserLoader(usecases.User), + Tag: NewTagLoader(usecases.Tag), } } @@ -78,6 +83,9 @@ func (l Loaders) DataLoaders(ctx context.Context) DataLoaders { Scene: l.Scene.DataLoader(ctx), Team: l.Team.DataLoader(ctx), User: l.User.DataLoader(ctx), + Tag: l.Tag.DataLoader(ctx), + TagItem: l.Tag.ItemDataLoader(ctx), + TagGroup: l.Tag.GroupDataLoader(ctx), } } @@ -96,6 +104,9 @@ func (l Loaders) OrdinaryDataLoaders(ctx context.Context) DataLoaders { Scene: l.Scene.OrdinaryDataLoader(ctx), Team: l.Team.OrdinaryDataLoader(ctx), User: l.User.OrdinaryDataLoader(ctx), + Tag: l.Tag.OrdinaryDataLoader(ctx), + TagItem: l.Tag.ItemDataLoader(ctx), + TagGroup: l.Tag.GroupDataLoader(ctx), } } diff --git a/internal/adapter/gql/loader_tag.go b/internal/adapter/gql/loader_tag.go new file mode 100644 index 000000000..ab00b7ec9 --- /dev/null +++ b/internal/adapter/gql/loader_tag.go @@ -0,0 +1,264 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +type TagLoader struct { + usecase interfaces.Tag +} + +func NewTagLoader(usecase interfaces.Tag) *TagLoader { + return &TagLoader{usecase: usecase} +} + +func (c *TagLoader) Fetch(ctx context.Context, ids []id.TagID) ([]*gqlmodel.Tag, []error) { + res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + tags := make([]*gqlmodel.Tag, 0, len(res)) + for _, t := range res { + if t != nil { + tag := gqlmodel.ToTag(*t) + tags = append(tags, &tag) + } + } + + return tags, nil +} + +func (c *TagLoader) FetchGroup(ctx context.Context, ids []id.TagID) ([]*gqlmodel.TagGroup, []error) { + res, err := c.usecase.FetchGroup(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + tagGroups := make([]*gqlmodel.TagGroup, 0, len(res)) + for _, t := range res { + tg := gqlmodel.ToTagGroup(t) + if tg != nil { + tagGroups = append(tagGroups, tg) + } + } + + return tagGroups, nil +} + +func (c *TagLoader) FetchItem(ctx context.Context, ids []id.TagID) ([]*gqlmodel.TagItem, []error) { + res, err := c.usecase.FetchItem(ctx, ids, getOperator(ctx)) + if err != nil { + return nil, []error{err} + } + + tagItems := make([]*gqlmodel.TagItem, 0, len(res)) + for _, t := range res { + ti := gqlmodel.ToTagItem(t) + if ti != nil { + tagItems = append(tagItems, ti) + } + } + + return tagItems, nil +} + +func (c *TagLoader) FetchGroupByLayer(ctx context.Context, lid id.LayerID) ([]*gqlmodel.TagGroup, error) { + res, err := c.usecase.FetchGroupsByLayer(ctx, lid, getOperator(ctx)) + if err != nil { + return nil, err + } + tagGroups := make([]*gqlmodel.TagGroup, 0, len(res)) + for _, t := range res { + tg := gqlmodel.ToTagGroup(t) + if tg != nil { + tagGroups = append(tagGroups, tg) + } + } + + return tagGroups, nil +} + +func (c *TagLoader) FetchItemByLayer(ctx context.Context, lid id.LayerID) ([]*gqlmodel.TagItem, error) { + res, err := c.usecase.FetchItemsByLayer(ctx, lid, getOperator(ctx)) + if err != nil { + return nil, err + } + + tagItems := make([]*gqlmodel.TagItem, 0, len(res)) + for _, t := range res { + ti := gqlmodel.ToTagItem(t) + if ti != nil { + tagItems = append(tagItems, ti) + } + } + + return tagItems, nil +} + +func (c *TagLoader) FetchGroupByScene(ctx context.Context, sid id.SceneID) ([]*gqlmodel.TagGroup, error) { + res, err := c.usecase.FetchGroupsByScene(ctx, sid, getOperator(ctx)) + if err != nil { + return nil, err + } + + tagGroups := make([]*gqlmodel.TagGroup, 0, len(res)) + for _, t := range res { + tg := gqlmodel.ToTagGroup(t) + if tg != nil { + tagGroups = append(tagGroups, tg) + } + } + + return tagGroups, nil +} + +func (c *TagLoader) FetchItemByScene(ctx context.Context, sid id.SceneID) ([]*gqlmodel.TagItem, error) { + res, err := c.usecase.FetchItemsByScene(ctx, sid, getOperator(ctx)) + if err != nil { + return nil, err + } + + tagItems := make([]*gqlmodel.TagItem, 0, len(res)) + for _, t := range res { + ti := gqlmodel.ToTagItem(t) + if ti != nil { + tagItems = append(tagItems, ti) + } + } + + return tagItems, nil +} + +// data loaders + +type TagDataLoader interface { + Load(id.TagID) (*gqlmodel.Tag, error) + LoadAll([]id.TagID) ([]*gqlmodel.Tag, []error) +} + +func (c *TagLoader) DataLoader(ctx context.Context) TagDataLoader { + return gqldataloader.NewTagLoader(gqldataloader.TagLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.TagID) ([]*gqlmodel.Tag, []error) { + return c.Fetch(ctx, keys) + }, + }) +} + +func (c *TagLoader) OrdinaryDataLoader(ctx context.Context) TagDataLoader { + return &ordinaryTagLoader{ + fetch: func(keys []id.TagID) ([]*gqlmodel.Tag, []error) { + return c.Fetch(ctx, keys) + }, + } +} + +type ordinaryTagLoader struct { + fetch func(keys []id.TagID) ([]*gqlmodel.Tag, []error) +} + +func (t *ordinaryTagLoader) Load(key id.TagID) (*gqlmodel.Tag, error) { + res, errs := t.fetch([]id.TagID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (t *ordinaryTagLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.Tag, []error) { + return t.fetch(keys) +} + +type TagItemDataLoader interface { + Load(id.TagID) (*gqlmodel.TagItem, error) + LoadAll([]id.TagID) ([]*gqlmodel.TagItem, []error) +} + +func (c *TagLoader) ItemDataLoader(ctx context.Context) TagItemDataLoader { + return gqldataloader.NewTagItemLoader(gqldataloader.TagItemLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.TagID) ([]*gqlmodel.TagItem, []error) { + return c.FetchItem(ctx, keys) + }, + }) +} + +func (c *TagLoader) ItemOrdinaryDataLoader(ctx context.Context) TagItemDataLoader { + return &ordinaryTagItemLoader{ + fetch: func(keys []id.TagID) ([]*gqlmodel.TagItem, []error) { + return c.FetchItem(ctx, keys) + }, + } +} + +type ordinaryTagItemLoader struct { + fetch func(keys []id.TagID) ([]*gqlmodel.TagItem, []error) +} + +func (t *ordinaryTagItemLoader) Load(key id.TagID) (*gqlmodel.TagItem, error) { + res, errs := t.fetch([]id.TagID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (t *ordinaryTagItemLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.TagItem, []error) { + return t.fetch(keys) +} + +type TagGroupDataLoader interface { + Load(id.TagID) (*gqlmodel.TagGroup, error) + LoadAll([]id.TagID) ([]*gqlmodel.TagGroup, []error) +} + +func (c *TagLoader) GroupDataLoader(ctx context.Context) TagGroupDataLoader { + return gqldataloader.NewTagGroupLoader(gqldataloader.TagGroupLoaderConfig{ + Wait: dataLoaderWait, + MaxBatch: dataLoaderMaxBatch, + Fetch: func(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) { + return c.FetchGroup(ctx, keys) + }, + }) +} + +func (c *TagLoader) GroupOrdinaryDataLoader(ctx context.Context) TagGroupDataLoader { + return &ordinaryTagGroupLoader{ + fetch: func(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) { + return c.FetchGroup(ctx, keys) + }, + } +} + +type ordinaryTagGroupLoader struct { + fetch func(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) +} + +func (t *ordinaryTagGroupLoader) Load(key id.TagID) (*gqlmodel.TagGroup, error) { + res, errs := t.fetch([]id.TagID{key}) + if len(errs) > 0 { + return nil, errs[0] + } + if len(res) > 0 { + return res[0], nil + } + return nil, nil +} + +func (t *ordinaryTagGroupLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) { + return t.fetch(keys) +} diff --git a/internal/adapter/gql/resolver_layer.go b/internal/adapter/gql/resolver_layer.go index 149dc8acd..6c76eda6a 100644 --- a/internal/adapter/gql/resolver_layer.go +++ b/internal/adapter/gql/resolver_layer.go @@ -202,6 +202,35 @@ func (r *infoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.In type layerGroupResolver struct{ *Resolver } +func (r *layerGroupResolver) Tags(ctx context.Context, obj *gqlmodel.LayerGroup) ([]gqlmodel.Tag, error) { + exit := trace(ctx) + defer exit() + + ids := make([]id.TagID, 0, len(obj.TagIds)) + for _, tid := range obj.TagIds { + if tid != nil { + ids = append(ids, id.TagID(*tid)) + } + } + + tags, err := DataLoadersFromContext(ctx).Tag.LoadAll(ids) + if len(err) > 0 { + for _, err1 := range err { + if err1 != nil { + return nil, err1 + } + } + } + + res := make([]gqlmodel.Tag, 0, len(tags)) + for _, t := range tags { + if t != nil { + res = append(res, *t) + } + } + return res, nil +} + func (r *layerGroupResolver) Parent(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.LayerGroup, error) { exit := trace(ctx) defer exit() @@ -382,6 +411,35 @@ func (r *layerItemResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Layer return s.Plugin(*obj.PluginID), nil } +func (r *layerItemResolver) Tags(ctx context.Context, obj *gqlmodel.LayerItem) ([]gqlmodel.Tag, error) { + exit := trace(ctx) + defer exit() + + ids := make([]id.TagID, 0, len(obj.TagIds)) + for _, tid := range obj.TagIds { + if tid != nil { + ids = append(ids, id.TagID(*tid)) + } + } + + tags, err := DataLoadersFromContext(ctx).Tag.LoadAll(ids) + if len(err) > 0 { + for _, err1 := range err { + if err1 != nil { + return nil, err1 + } + } + } + + res := make([]gqlmodel.Tag, 0, len(tags)) + for _, t := range tags { + if t != nil { + res = append(res, *t) + } + } + return res, nil +} + type mergedLayerResolver struct{ *Resolver } func (r *mergedLayerResolver) Original(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerItem, error) { @@ -457,3 +515,29 @@ func (r *mergedInfoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmo } return s.Plugin(obj.PluginID), nil } + +func (r *mutationResolver) AttachTagToLayer(ctx context.Context, input gqlmodel.AttachTagToLayerInput) (*gqlmodel.AttachTagToLayerPayload, error) { + exit := trace(ctx) + defer exit() + + layer, err := r.usecases.Layer.AttachTag(ctx, id.LayerID(input.LayerID), id.TagID(input.TagID), getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.AttachTagToLayerPayload{ + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) DetachTagFromLayer(ctx context.Context, input gqlmodel.DetachTagFromLayerInput) (*gqlmodel.DetachTagFromLayerPayload, error) { + exit := trace(ctx) + defer exit() + + layer, err := r.usecases.Layer.DetachTag(ctx, id.LayerID(input.LayerID), id.TagID(input.TagID), getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.DetachTagFromLayerPayload{ + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} diff --git a/internal/adapter/gql/resolver_mutation_tag.go b/internal/adapter/gql/resolver_mutation_tag.go new file mode 100644 index 000000000..95cc8bd50 --- /dev/null +++ b/internal/adapter/gql/resolver_mutation_tag.go @@ -0,0 +1,107 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" +) + +func (r *mutationResolver) CreateTagItem(ctx context.Context, input gqlmodel.CreateTagItemInput) (*gqlmodel.CreateTagItemPayload, error) { + exit := trace(ctx) + defer exit() + + tag, err := r.usecases.Tag.CreateItem(ctx, interfaces.CreateTagItemParam{ + Label: input.Label, + SceneID: id.SceneID(input.SceneID), + LinkedDatasetSchemaID: id.DatasetSchemaIDFromRefID(input.LinkedDatasetSchemaID), + LinkedDatasetID: id.DatasetIDFromRefID(input.LinkedDatasetID), + LinkedDatasetField: id.DatasetSchemaFieldIDFromRefID(input.LinkedDatasetField), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.CreateTagItemPayload{ + Tag: gqlmodel.ToTagItem(tag), + }, nil +} + +func (r *mutationResolver) CreateTagGroup(ctx context.Context, input gqlmodel.CreateTagGroupInput) (*gqlmodel.CreateTagGroupPayload, error) { + exit := trace(ctx) + defer exit() + + tag, err := r.usecases.Tag.CreateGroup(ctx, interfaces.CreateTagGroupParam{ + Label: input.Label, + SceneID: id.SceneID(input.SceneID), + Tags: id.TagIDsFromIDRef(input.Tags), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.CreateTagGroupPayload{ + Tag: gqlmodel.ToTagGroup(tag), + }, nil +} + +func (r *mutationResolver) UpdateTag(ctx context.Context, input gqlmodel.UpdateTagInput) (*gqlmodel.UpdateTagPayload, error) { + exit := trace(ctx) + defer exit() + + tag, err := r.usecases.Tag.UpdateTag(ctx, interfaces.UpdateTagParam{ + Label: input.Label, + SceneID: id.SceneID(input.SceneID), + TagID: id.TagID(input.TagID), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.UpdateTagPayload{ + Tag: gqlmodel.ToTag(*tag), + }, nil +} + +func (r *mutationResolver) AttachTagItemToGroup(ctx context.Context, input gqlmodel.AttachTagItemToGroupInput) (*gqlmodel.AttachTagItemToGroupPayload, error) { + exit := trace(ctx) + defer exit() + + tag, err := r.usecases.Tag.AttachItemToGroup(ctx, interfaces.AttachItemToGroupParam{ + ItemID: id.TagID(input.ItemID), + GroupID: id.TagID(input.GroupID), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.AttachTagItemToGroupPayload{ + Tag: gqlmodel.ToTagGroup(tag), + }, nil +} + +func (r *mutationResolver) DetachTagItemFromGroup(ctx context.Context, input gqlmodel.DetachTagItemFromGroupInput) (*gqlmodel.DetachTagItemFromGroupPayload, error) { + exit := trace(ctx) + defer exit() + + tag, err := r.usecases.Tag.DetachItemFromGroup(ctx, interfaces.DetachItemToGroupParam{ + ItemID: id.TagID(input.ItemID), + GroupID: id.TagID(input.GroupID), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.DetachTagItemFromGroupPayload{ + Tag: gqlmodel.ToTagGroup(tag), + }, nil +} + +func (r *mutationResolver) RemoveTag(ctx context.Context, input gqlmodel.RemoveTagInput) (*gqlmodel.RemoveTagPayload, error) { + exit := trace(ctx) + defer exit() + + tagID, err := r.usecases.Tag.Remove(ctx, id.TagID(input.TagID), getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.RemoveTagPayload{ + TagID: tagID.ID(), + }, nil +} diff --git a/internal/adapter/gql/resolver_scene.go b/internal/adapter/gql/resolver_scene.go index 1071a3c5b..01b53e2e4 100644 --- a/internal/adapter/gql/resolver_scene.go +++ b/internal/adapter/gql/resolver_scene.go @@ -79,6 +79,22 @@ func (r *sceneResolver) LockMode(ctx context.Context, obj *gqlmodel.Scene) (gqlm return *sl, nil } +func (r *sceneResolver) Tags(ctx context.Context, obj *gqlmodel.Scene) ([]gqlmodel.Tag, error) { + exit := trace(ctx) + defer exit() + + tags, err := r.usecases.Tag.FetchByScene(ctx, id.SceneID(obj.ID), getOperator(ctx)) + if err != nil { + return nil, err + } + + res := make([]gqlmodel.Tag, 0, len(tags)) + for _, t := range tags { + res = append(res, gqlmodel.ToTag(*t)) + } + return res, nil +} + type scenePluginResolver struct{ *Resolver } func (r *scenePluginResolver) Plugin(ctx context.Context, obj *gqlmodel.ScenePlugin) (*gqlmodel.Plugin, error) { diff --git a/internal/adapter/gql/resolver_tag.go b/internal/adapter/gql/resolver_tag.go new file mode 100644 index 000000000..b3bd77e4e --- /dev/null +++ b/internal/adapter/gql/resolver_tag.go @@ -0,0 +1,45 @@ +package gql + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "github.com/reearth/reearth-backend/pkg/id" +) + +type tagItemResolver struct{ *Resolver } + +func (r *Resolver) TagItem() TagItemResolver { + return &tagItemResolver{r} +} + +func (t tagItemResolver) LinkedDatasetSchema(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetSchema, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.LinkedDatasetSchemaID)) +} + +func (t tagItemResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.Dataset, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) +} + +func (t tagItemResolver) LinkedDatasetField(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetField, error) { + exit := trace(ctx) + defer exit() + + if obj.LinkedDatasetID == nil { + return nil, nil + } + ds, err := DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return ds.Field(*obj.LinkedDatasetFieldID), err +} diff --git a/internal/infrastructure/memory/container.go b/internal/infrastructure/memory/container.go index 933116833..2c9b9d45f 100644 --- a/internal/infrastructure/memory/container.go +++ b/internal/infrastructure/memory/container.go @@ -19,6 +19,7 @@ func InitRepos(c *repo.Container) *repo.Container { c.PropertySchema = NewPropertySchema() c.Property = NewProperty() c.Scene = NewScene() + c.Tag = NewTag() c.Team = NewTeam() c.User = NewUser() c.SceneLock = NewSceneLock() diff --git a/internal/infrastructure/memory/layer.go b/internal/infrastructure/memory/layer.go index 674313342..c410e9271 100644 --- a/internal/infrastructure/memory/layer.go +++ b/internal/infrastructure/memory/layer.go @@ -255,3 +255,18 @@ func (r *Layer) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { } return nil } + +func (r *Layer) FindByTag(ctx context.Context, tagID id.TagID, s []id.SceneID) (layer.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + var res layer.List + for _, layer := range r.data { + for _, tag := range layer.Tags().Tags() { + if tag == tagID { + res = append(res, &layer) + } + } + } + + return res, nil +} diff --git a/internal/infrastructure/memory/layer_test.go b/internal/infrastructure/memory/layer_test.go new file mode 100644 index 000000000..f1e2936ce --- /dev/null +++ b/internal/infrastructure/memory/layer_test.go @@ -0,0 +1,29 @@ +package memory + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/tag" + "github.com/stretchr/testify/assert" +) + +func TestLayer_FindByTag(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sl := []id.SceneID{sid} + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + lg := layer.New().NewID().Tags(tl).Scene(sid).Group().MustBuild() + repo := Layer{ + data: map[id.LayerID]layer.Layer{ + lg.ID(): lg, + }, + } + out, err := repo.FindByTag(ctx, t1.ID(), sl) + assert.NoError(t, err) + l := layer.Layer(lg) + assert.Equal(t, layer.List{&l}, out) +} diff --git a/internal/infrastructure/memory/tag.go b/internal/infrastructure/memory/tag.go new file mode 100644 index 000000000..3b21b5afd --- /dev/null +++ b/internal/infrastructure/memory/tag.go @@ -0,0 +1,220 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/pkg/rerror" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" +) + +type Tag struct { + lock sync.Mutex + data map[id.TagID]tag.Tag +} + +func NewTag() repo.Tag { + return &Tag{ + data: map[id.TagID]tag.Tag{}, + } +} + +func (t *Tag) FindByID(ctx context.Context, tagID id.TagID, ids []id.SceneID) (tag.Tag, error) { + t.lock.Lock() + defer t.lock.Unlock() + + res, ok := t.data[tagID] + if ok && isSceneIncludes(res.Scene(), ids) { + return res, nil + } + return nil, rerror.ErrNotFound +} + +func (t *Tag) FindByIDs(ctx context.Context, tids []id.TagID, ids []id.SceneID) ([]*tag.Tag, error) { + t.lock.Lock() + defer t.lock.Unlock() + + var res []*tag.Tag + for _, id := range tids { + if d, ok := t.data[id]; ok { + if isSceneIncludes(d.Scene(), ids) { + res = append(res, &d) + continue + } + } + res = append(res, nil) + } + return res, nil +} + +func (t *Tag) FindItemByID(ctx context.Context, tagID id.TagID, ids []id.SceneID) (*tag.Item, error) { + t.lock.Lock() + defer t.lock.Unlock() + + if d, ok := t.data[tagID]; ok { + if res := tag.ItemFrom(d); res != nil { + if isSceneIncludes(res.Scene(), ids) { + return res, nil + } + } + } + return nil, rerror.ErrNotFound +} + +func (t *Tag) FindItemByIDs(ctx context.Context, tagIDs []id.TagID, ids []id.SceneID) ([]*tag.Item, error) { + t.lock.Lock() + defer t.lock.Unlock() + + var res []*tag.Item + for _, id := range tagIDs { + if d, ok := t.data[id]; ok { + if ti := tag.ItemFrom(d); ti != nil { + if isSceneIncludes(ti.Scene(), ids) { + res = append(res, ti) + } + } + } + } + return res, nil +} + +func (t *Tag) FindGroupByID(ctx context.Context, tagID id.TagID, ids []id.SceneID) (*tag.Group, error) { + t.lock.Lock() + defer t.lock.Unlock() + + if d, ok := t.data[tagID]; ok { + if res := tag.GroupFrom(d); res != nil { + if isSceneIncludes(res.Scene(), ids) { + return res, nil + } + } + } + return nil, rerror.ErrNotFound +} + +func (t *Tag) FindGroupByIDs(ctx context.Context, tagIDs []id.TagID, ids []id.SceneID) ([]*tag.Group, error) { + t.lock.Lock() + defer t.lock.Unlock() + + var res []*tag.Group + for _, id := range tagIDs { + if d, ok := t.data[id]; ok { + if tg := tag.GroupFrom(d); tg != nil { + if isSceneIncludes(tg.Scene(), ids) { + res = append(res, tg) + } + } + } + } + return res, nil +} + +func (t *Tag) FindByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Tag, error) { + t.lock.Lock() + defer t.lock.Unlock() + + var res []*tag.Tag + for _, tag := range t.data { + tag := tag + if tag.Scene() == sceneID { + res = append(res, &tag) + } + } + return res, nil +} + +func (t *Tag) Save(ctx context.Context, tag tag.Tag) error { + t.lock.Lock() + defer t.lock.Unlock() + + t.data[tag.ID()] = tag + return nil +} + +func (t *Tag) SaveAll(ctx context.Context, tags []*tag.Tag) error { + t.lock.Lock() + defer t.lock.Unlock() + + for _, tagRef := range tags { + tag := *tagRef + t.data[tag.ID()] = tag + } + return nil +} + +func (t *Tag) Remove(ctx context.Context, tagID id.TagID) error { + t.lock.Lock() + defer t.lock.Unlock() + + delete(t.data, tagID) + return nil +} + +func (t *Tag) RemoveAll(ctx context.Context, ids []id.TagID) error { + t.lock.Lock() + defer t.lock.Unlock() + + for _, tagID := range ids { + delete(t.data, tagID) + } + return nil +} + +func (t *Tag) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + t.lock.Lock() + defer t.lock.Unlock() + + for tid, v := range t.data { + if v.Scene() == sceneID { + delete(t.data, tid) + } + } + return nil +} + +func (t *Tag) FindGroupByItem(ctx context.Context, tagID id.TagID, s []id.SceneID) (*tag.Group, error) { + t.lock.Lock() + defer t.lock.Unlock() + + for _, tg := range t.data { + if res := tag.GroupFrom(tg); res != nil { + tags := res.Tags() + for _, item := range tags.Tags() { + if item == tagID { + return res, nil + } + } + } + } + + return nil, rerror.ErrNotFound +} + +func (t *Tag) FindGroupByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Group, error) { + t.lock.Lock() + defer t.lock.Unlock() + + var res []*tag.Group + for _, tt := range t.data { + if group := tag.ToTagGroup(tt); tt.Scene() == sceneID && group != nil { + res = append(res, group) + } + } + return res, nil +} + +func (t *Tag) FindItemByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Item, error) { + t.lock.Lock() + defer t.lock.Unlock() + + var res []*tag.Item + for _, tt := range t.data { + if item := tag.ToTagItem(tt); tt.Scene() == sceneID && item != nil { + res = append(res, item) + } + } + return res, nil +} diff --git a/internal/infrastructure/memory/tag_test.go b/internal/infrastructure/memory/tag_test.go new file mode 100644 index 000000000..b4bfeac2f --- /dev/null +++ b/internal/infrastructure/memory/tag_test.go @@ -0,0 +1,348 @@ +package memory + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/rerror" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" + + "github.com/stretchr/testify/assert" +) + +func TestNewTag(t *testing.T) { + repo := NewTag() + assert.NotNil(t, repo) + +} + +func TestTag_FindByID(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sl := []id.SceneID{sid} + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tti := tag.Tag(t1) + repo := Tag{ + data: map[id.TagID]tag.Tag{t1.ID(): tti}, + } + out, err := repo.FindByID(ctx, t1.ID(), sl) + assert.NoError(t, err) + assert.Equal(t, tti, out) + + _, err = repo.FindByID(ctx, id.TagID{}, sl) + assert.Same(t, rerror.ErrNotFound, err) +} + +func TestTag_FindByIDs(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sid2 := id.NewSceneID() + sl := []id.SceneID{sid} + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + t3, _ := tag.NewItem().NewID().Scene(sid2).Label("item2").Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t3) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + t3.ID(): tti2, + }, + } + out, err := repo.FindByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}, sl) + assert.NoError(t, err) + assert.Equal(t, []*tag.Tag{&tti, &ttg}, out) +} + +func TestTag_FindByScene(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sid2 := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + t3, _ := tag.NewItem().NewID().Scene(sid2).Label("item2").Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t3) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + t3.ID(): tti2, + }, + } + out, err := repo.FindByScene(ctx, sid2) + assert.NoError(t, err) + assert.Equal(t, []*tag.Tag{&tti2}, out) +} + +func TestTag_FindItemByScene(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sid2 := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + t2, _ := tag.NewGroup().NewID().Scene(sid2).Label("group").Tags(tl).Build() + t3, _ := tag.NewItem().NewID().Scene(sid2).Label("item2").Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t3) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + t3.ID(): tti2, + }, + } + out, err := repo.FindItemByScene(ctx, sid2) + assert.NoError(t, err) + assert.Equal(t, 1, len(out)) + assert.Same(t, t3, out[0]) + + out, err = repo.FindItemByScene(ctx, id.SceneID{}) + assert.NoError(t, err) + assert.Equal(t, 0, len(out)) +} + +func TestTag_FindGroupByScene(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sid2 := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + t2, _ := tag.NewGroup().NewID().Scene(sid2).Label("group").Tags(tl).Build() + t3, _ := tag.NewItem().NewID().Scene(sid2).Label("item2").Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t3) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + t3.ID(): tti2, + }, + } + out, err := repo.FindGroupByScene(ctx, sid2) + assert.NoError(t, err) + assert.Equal(t, 1, len(out)) + assert.Same(t, t2, out[0]) + + out, err = repo.FindGroupByScene(ctx, id.SceneID{}) + assert.NoError(t, err) + assert.Equal(t, 0, len(out)) +} + +func TestTag_FindGroupByID(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sl := []id.SceneID{sid} + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + tti := tag.Tag(t1) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + }, + } + out, err := repo.FindGroupByID(ctx, t2.ID(), sl) + assert.NoError(t, err) + assert.Equal(t, t2, out) + + _, err = repo.FindGroupByID(ctx, id.TagID{}, []id.SceneID{}) + assert.Same(t, rerror.ErrNotFound, err) +} + +func TestTag_FindItemByID(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sl := []id.SceneID{sid} + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + tti := tag.Tag(t1) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + }, + } + out, err := repo.FindItemByID(ctx, t1.ID(), sl) + assert.NoError(t, err) + assert.Equal(t, t1, out) + + _, err = repo.FindItemByID(ctx, id.TagID{}, sl) + assert.Same(t, rerror.ErrNotFound, err) +} + +func TestTag_FindGroupByIDs(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sl := []id.SceneID{sid} + t1, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Build() + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group2").Build() + ttg := tag.Tag(t1) + ttg2 := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): ttg, + t2.ID(): ttg2, + }, + } + out, err := repo.FindGroupByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}, sl) + assert.NoError(t, err) + assert.Equal(t, []*tag.Group{t1, t2}, out) + + out, err = repo.FindGroupByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}, []id.SceneID{}) + assert.NoError(t, err) + assert.Equal(t, 0, len(out)) +} + +func TestTag_FindItemByIDs(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sl := []id.SceneID{sid} + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + t2, _ := tag.NewItem().NewID().Scene(sid).Label("item2").Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): tti2, + }, + } + out, err := repo.FindItemByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}, sl) + assert.NoError(t, err) + assert.Equal(t, []*tag.Item{t1, t2}, out) + + out, err = repo.FindItemByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}, []id.SceneID{}) + assert.NoError(t, err) + assert.Equal(t, 0, len(out)) +} + +func TestTag_Save(t *testing.T) { + ctx := context.Background() + repo := NewTag() + sid := id.NewSceneID() + sl := []id.SceneID{sid} + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tti := tag.Tag(t1) + + err := repo.Save(ctx, tti) + assert.NoError(t, err) + out, _ := repo.FindByID(ctx, t1.ID(), sl) + assert.Equal(t, tti, out) +} + +func TestTag_SaveAll(t *testing.T) { + ctx := context.Background() + repo := NewTag() + sid := id.NewSceneID() + sl := []id.SceneID{sid} + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + t2, _ := tag.NewItem().NewID().Scene(sid).Label("item2").Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t2) + + err := repo.SaveAll(ctx, []*tag.Tag{&tti, &tti2}) + assert.NoError(t, err) + out, _ := repo.FindByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}, sl) + assert.Equal(t, []*tag.Tag{&tti, &tti2}, out) +} + +func TestTag_Remove(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + tti := tag.Tag(t1) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + }, + } + err := repo.Remove(ctx, t1.ID()) + assert.NoError(t, err) + out, _ := repo.FindByScene(ctx, sid) + assert.Equal(t, []*tag.Tag{&ttg}, out) +} + +func TestTag_RemoveAll(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + t2, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + t3, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t2) + ttg := tag.Tag(t3) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): tti2, + t3.ID(): ttg, + }, + } + err := repo.RemoveAll(ctx, []id.TagID{t1.ID(), t3.ID()}) + assert.NoError(t, err) + out, _ := repo.FindByScene(ctx, sid) + assert.Equal(t, []*tag.Tag{&tti2}, out) +} + +func TestTag_RemoveByScene(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sid2 := id.NewSceneID() + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + t2, _ := tag.NewItem().NewID().Scene(sid2).Label("item").Build() + t3, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + tti := tag.Tag(t1) + tti2 := tag.Tag(t2) + ttg := tag.Tag(t3) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): tti2, + t3.ID(): ttg, + }, + } + err := repo.RemoveByScene(ctx, sid) + assert.NoError(t, err) + out, _ := repo.FindByScene(ctx, sid2) + assert.Equal(t, []*tag.Tag{&tti2}, out) +} + +func TestTag_FindGroupByItem(t *testing.T) { + ctx := context.Background() + sid := id.NewSceneID() + sl := []id.SceneID{sid} + t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() + tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() + tti := tag.Tag(t1) + ttg := tag.Tag(t2) + repo := Tag{ + data: map[id.TagID]tag.Tag{ + t1.ID(): tti, + t2.ID(): ttg, + }, + } + out, err := repo.FindGroupByItem(ctx, t1.ID(), sl) + assert.NoError(t, err) + assert.Equal(t, t2, out) +} diff --git a/internal/infrastructure/mongo/container.go b/internal/infrastructure/mongo/container.go index a112483d3..ead33b18a 100644 --- a/internal/infrastructure/mongo/container.go +++ b/internal/infrastructure/mongo/container.go @@ -25,6 +25,7 @@ func InitRepos(ctx context.Context, c *repo.Container, mc *mongo.Client, databas c.PropertySchema = NewPropertySchema(client) c.Property = NewProperty(client) c.Scene = NewScene(client) + c.Tag = NewTag(client) c.Team = NewTeam(client) c.User = NewUser(client) c.SceneLock = NewSceneLock(client) diff --git a/internal/infrastructure/mongo/layer.go b/internal/infrastructure/mongo/layer.go index 28ae04f2a..3ef8a8f03 100644 --- a/internal/infrastructure/mongo/layer.go +++ b/internal/infrastructure/mongo/layer.go @@ -168,6 +168,17 @@ func (r *layerRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error return nil } +func (r *layerRepo) FindByTag(ctx context.Context, tagID id.TagID, f []id.SceneID) (layer.List, error) { + ids := []id.TagID{tagID} + filter := r.sceneFilter(bson.D{ + {Key: "tags", Value: bson.D{ + {Key: "$in", Value: id.TagIDToKeys(ids)}, + }}, + }, f) + + return r.find(ctx, nil, filter) +} + func (r *layerRepo) find(ctx context.Context, dst layer.List, filter bson.D) (layer.List, error) { c := mongodoc.LayerConsumer{ Rows: dst, diff --git a/internal/infrastructure/mongo/mongodoc/layer.go b/internal/infrastructure/mongo/mongodoc/layer.go index bcc4cfdc5..1c3b0ac88 100644 --- a/internal/infrastructure/mongo/mongodoc/layer.go +++ b/internal/infrastructure/mongo/mongodoc/layer.go @@ -3,6 +3,8 @@ package mongodoc import ( "errors" + "github.com/reearth/reearth-backend/pkg/tag" + "go.mongodb.org/mongo-driver/bson" "github.com/reearth/reearth-backend/pkg/id" @@ -42,6 +44,7 @@ type LayerDocument struct { Infobox *LayerInfoboxDocument Item *LayerItemDocument Group *LayerGroupDocument + Tags []string } type LayerConsumer struct { @@ -111,7 +114,11 @@ func NewLayer(l layer.Layer) (*LayerDocument, string) { Fields: fields, } } - + var tagIDs []string + tags := l.Tags() + for _, tid := range tags.Tags() { + tagIDs = append(tagIDs, tid.String()) + } id := l.ID().String() return &LayerDocument{ ID: id, @@ -124,6 +131,7 @@ func NewLayer(l layer.Layer) (*LayerDocument, string) { Plugin: l.Plugin().StringRef(), Extension: l.Extension().StringRef(), Property: l.Property().StringRef(), + Tags: tagIDs, }, id } @@ -173,6 +181,12 @@ func (d *LayerDocument) ModelItem() (*layer.Item, error) { return nil, err } + tids, err := id.TagIDsFrom(d.Tags) + if err != nil { + return nil, err + } + tagList := tag.NewListFromTags(tids) + return layer.NewItem(). ID(lid). Name(d.Name). @@ -182,6 +196,7 @@ func (d *LayerDocument) ModelItem() (*layer.Item, error) { Property(id.PropertyIDFromRef(d.Property)). Infobox(ib). Scene(sid). + Tags(tagList). // item LinkedDataset(id.DatasetIDFromRef(d.Item.LinkedDataset)). Build() @@ -210,6 +225,12 @@ func (d *LayerDocument) ModelGroup() (*layer.Group, error) { ids = append(ids, lid) } + tids, err := id.TagIDsFrom(d.Tags) + if err != nil { + return nil, err + } + tagList := tag.NewListFromTags(tids) + return layer.NewGroup(). ID(lid). Name(d.Name). @@ -219,6 +240,7 @@ func (d *LayerDocument) ModelGroup() (*layer.Group, error) { Property(id.PropertyIDFromRef(d.Property)). Infobox(ib). Scene(sid). + Tags(tagList). // group Root(d.Group != nil && d.Group.Root). Layers(layer.NewIDList(ids)). diff --git a/internal/infrastructure/mongo/mongodoc/tag.go b/internal/infrastructure/mongo/mongodoc/tag.go new file mode 100644 index 000000000..68ff9cfd2 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/tag.go @@ -0,0 +1,169 @@ +package mongodoc + +import ( + "errors" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" +) + +type TagItemDocument struct { + LinkedDatasetFieldID *string + LinkedDatasetID *string + LinkedDatasetSchemaID *string +} + +type TagGroupDocument struct { + Tags []string +} + +type TagDocument struct { + ID string + Label string + Scene string + Item *TagItemDocument + Group *TagGroupDocument +} + +type TagConsumer struct { + Rows []*tag.Tag + GroupRows []*tag.Group + ItemRows []*tag.Item +} + +func (c *TagConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc TagDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + ti, tg, err := doc.Model() + if err != nil { + return err + } + if ti != nil { + var t tag.Tag = ti + c.Rows = append(c.Rows, &t) + c.ItemRows = append(c.ItemRows, ti) + } + if tg != nil { + var t tag.Tag = tg + c.Rows = append(c.Rows, &t) + c.GroupRows = append(c.GroupRows, tg) + } + return nil +} + +func NewTag(t tag.Tag) (*TagDocument, string) { + var group *TagGroupDocument + var item *TagItemDocument + if tg := tag.GroupFrom(t); tg != nil { + tags := tg.Tags() + ids := tags.Tags() + + group = &TagGroupDocument{ + Tags: id.TagIDToKeys(ids), + } + } + + if ti := tag.ItemFrom(t); ti != nil { + item = &TagItemDocument{ + LinkedDatasetFieldID: ti.LinkedDatasetFieldID().StringRef(), + LinkedDatasetID: ti.LinkedDatasetID().StringRef(), + LinkedDatasetSchemaID: ti.LinkedDatasetSchemaID().StringRef(), + } + } + + tid := t.ID().String() + return &TagDocument{ + ID: tid, + Label: t.Label(), + Scene: t.Scene().String(), + Item: item, + Group: group, + }, tid +} + +func NewTags(tags []*tag.Tag) ([]interface{}, []string) { + res := make([]interface{}, 0, len(tags)) + ids := make([]string, 0, len(tags)) + for _, d := range tags { + if d == nil { + continue + } + r, tid := NewTag(*d) + res = append(res, r) + ids = append(ids, tid) + } + return res, ids +} + +func (d *TagDocument) Model() (*tag.Item, *tag.Group, error) { + if d.Item != nil { + ti, err := d.ModelItem() + if err != nil { + return nil, nil, err + } + return ti, nil, nil + } + if d.Group != nil { + tg, err := d.ModelGroup() + if err != nil { + return nil, nil, err + } + return nil, tg, nil + } + return nil, nil, errors.New("invalid tag") +} + +func (d *TagDocument) ModelItem() (*tag.Item, error) { + tid, err := id.TagIDFrom(d.ID) + if err != nil { + return nil, err + } + sid, err := id.SceneIDFrom(d.Scene) + if err != nil { + return nil, err + } + + return tag.NewItem(). + ID(tid). + Label(d.Label). + Scene(sid). + LinkedDatasetSchemaID(id.DatasetSchemaIDFromRef(d.Item.LinkedDatasetSchemaID)). + LinkedDatasetID(id.DatasetIDFromRef(d.Item.LinkedDatasetID)). + LinkedDatasetFieldID(id.DatasetSchemaFieldIDFromRef(d.Item.LinkedDatasetFieldID)). + Build() +} + +func (d *TagDocument) ModelGroup() (*tag.Group, error) { + tid, err := id.TagIDFrom(d.ID) + if err != nil { + return nil, err + } + sid, err := id.SceneIDFrom(d.Scene) + if err != nil { + return nil, err + } + + ids := make([]id.TagID, 0, len(d.Group.Tags)) + for _, lgid := range d.Group.Tags { + tagId, err := id.TagIDFrom(lgid) + if err != nil { + return nil, err + } + ids = append(ids, tagId) + } + + return tag.NewGroup(). + ID(tid). + Label(d.Label). + Scene(sid). + Tags(tag.NewListFromTags(ids)). + Build() +} diff --git a/internal/infrastructure/mongo/mongodoc/tag_test.go b/internal/infrastructure/mongo/mongodoc/tag_test.go new file mode 100644 index 000000000..ed9d30920 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/tag_test.go @@ -0,0 +1,491 @@ +package mongodoc + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" + "github.com/stretchr/testify/assert" + "go.mongodb.org/mongo-driver/bson" +) + +func TestNewTag(t *testing.T) { + sid := id.NewSceneID() + dssid := id.NewDatasetSchemaID() + dsid := id.NewDatasetID() + dssfid := id.NewDatasetSchemaFieldID() + ti, _ := tag.NewItem(). + NewID(). + Label("Item"). + Scene(sid). + LinkedDatasetFieldID(dssfid.Ref()). + LinkedDatasetID(dsid.Ref()). + LinkedDatasetSchemaID(dssid.Ref()). + Build() + tg, _ := tag.NewGroup(). + NewID(). + Label("group"). + Tags(tag.NewListFromTags([]id.TagID{ti.ID()})). + Scene(sid). + Build() + type args struct { + t tag.Tag + } + tests := []struct { + name string + args args + want *TagDocument + want1 string + }{ + { + name: "New tag group", + args: args{ + t: tg, + }, + want: &TagDocument{ + ID: tg.ID().String(), + Label: "group", + Scene: sid.ID().String(), + Item: nil, + Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, + }, + want1: tg.ID().String(), + }, + { + name: "New tag item", + args: args{ + t: ti, + }, + want: &TagDocument{ + ID: ti.ID().String(), + Label: "Item", + Scene: sid.ID().String(), + Item: &TagItemDocument{ + LinkedDatasetFieldID: dssfid.RefString(), + LinkedDatasetID: dsid.RefString(), + LinkedDatasetSchemaID: dssid.RefString(), + }, + Group: nil, + }, + want1: ti.ID().String(), + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + got, got1 := NewTag(tc.args.t) + assert.Equal(tt, tc.want1, got1) + assert.Equal(tt, tc.want, got) + }) + } +} + +func TestNewTags(t *testing.T) { + sid := id.NewSceneID() + ti, _ := tag.NewItem(). + NewID(). + Label("Item"). + Scene(sid). + Build() + tg, _ := tag.NewGroup(). + NewID(). + Label("group"). + Tags(tag.NewListFromTags([]id.TagID{ti.ID()})). + Scene(sid). + Build() + tgi := tag.Tag(tg) + type args struct { + tags []*tag.Tag + } + tests := []struct { + name string + args args + want []interface{} + want1 []string + }{ + { + name: "new tags", + args: args{ + tags: []*tag.Tag{ + &tgi, + }, + }, + want: []interface{}{ + &TagDocument{ + ID: tg.ID().String(), + Label: "group", + Scene: sid.ID().String(), + Item: nil, + Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, + }, + }, + want1: []string{tgi.ID().String()}, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + got, got1 := NewTags(tc.args.tags) + assert.Equal(tt, tc.want, got) + assert.Equal(tt, tc.want1, got1) + }) + } +} + +func TestFuncConsumer_Consume(t *testing.T) { + sid := id.NewSceneID() + tg, _ := tag.NewGroup(). + NewID(). + Label("group"). + Scene(sid). + Build() + ti, _ := tag.NewItem(). + NewID(). + Label("group"). + Scene(sid). + Build() + doc, _ := NewTag(tg) + doc1, _ := NewTag(ti) + r, _ := bson.Marshal(doc) + r1, _ := bson.Marshal(doc1) + type fields struct { + Rows []*tag.Tag + GroupRows []*tag.Group + ItemRows []*tag.Item + } + type args struct { + raw bson.Raw + } + tests := []struct { + name string + fields fields + args args + wantErr bool + }{ + { + name: "nil row", + fields: fields{ + Rows: nil, + GroupRows: nil, + ItemRows: nil, + }, + args: args{ + raw: nil, + }, + wantErr: false, + }, + { + name: "consume tag group", + fields: fields{ + Rows: nil, + GroupRows: nil, + ItemRows: nil, + }, + args: args{ + raw: r, + }, + wantErr: false, + }, + { + name: "consume tag item", + fields: fields{ + Rows: nil, + GroupRows: nil, + ItemRows: nil, + }, + args: args{ + raw: r1, + }, + wantErr: false, + }, + { + name: "fail: unmarshal error", + fields: fields{ + Rows: nil, + GroupRows: nil, + ItemRows: nil, + }, + args: args{ + raw: []byte{}, + }, + wantErr: true, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + c := &TagConsumer{ + Rows: tc.fields.Rows, + GroupRows: tc.fields.GroupRows, + ItemRows: tc.fields.ItemRows, + } + + if err := c.Consume(tc.args.raw); tc.wantErr { + assert.Error(tt, err) + } + }) + } +} + +func TestTagDocument_Model(t *testing.T) { + sid := id.NewSceneID() + dssid := id.NewDatasetSchemaID() + dsid := id.NewDatasetID() + dssfid := id.NewDatasetSchemaFieldID() + ti, _ := tag.NewItem(). + NewID(). + Label("Item"). + Scene(sid). + LinkedDatasetFieldID(dssfid.Ref()). + LinkedDatasetID(dsid.Ref()). + LinkedDatasetSchemaID(dssid.Ref()). + Build() + tg, _ := tag.NewGroup(). + NewID(). + Label("group"). + Tags(tag.NewListFromTags([]id.TagID{ti.ID()})). + Scene(sid). + Build() + type fields struct { + ID string + Label string + Scene string + Item *TagItemDocument + Group *TagGroupDocument + } + tests := []struct { + name string + fields fields + want *tag.Item + want1 *tag.Group + wantErr bool + }{ + { + name: "item model", + fields: fields{ + ID: ti.ID().String(), + Label: "Item", + Scene: sid.ID().String(), + Item: &TagItemDocument{ + LinkedDatasetFieldID: dssfid.RefString(), + LinkedDatasetID: dsid.RefString(), + LinkedDatasetSchemaID: dssid.RefString(), + }, + Group: nil, + }, + want: ti, + want1: nil, + wantErr: false, + }, + { + name: "group model", + fields: fields{ + ID: tg.ID().String(), + Label: "group", + Scene: sid.ID().String(), + Item: nil, + Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, + }, + want: nil, + want1: tg, + wantErr: false, + }, + { + name: "fail: invalid tag", + fields: fields{}, + want: nil, + want1: nil, + wantErr: true, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + d := &TagDocument{ + ID: tc.fields.ID, + Label: tc.fields.Label, + Scene: tc.fields.Scene, + Item: tc.fields.Item, + Group: tc.fields.Group, + } + got, got1, err := d.Model() + if tc.wantErr { + assert.Error(tt, err) + } else { + assert.Equal(tt, tc.want, got) + assert.Equal(tt, tc.want1, got1) + } + }) + } +} + +func TestTagDocument_ModelGroup(t *testing.T) { + sid := id.NewSceneID() + ti, _ := tag.NewItem(). + NewID(). + Label("Item"). + Scene(sid). + Build() + tg, _ := tag.NewGroup(). + NewID(). + Label("group"). + Tags(tag.NewListFromTags([]id.TagID{ti.ID()})). + Scene(sid). + Build() + type fields struct { + ID string + Label string + Scene string + Item *TagItemDocument + Group *TagGroupDocument + } + tests := []struct { + name string + fields fields + want *tag.Group + wantErr bool + }{ + { + name: "invalid id", + fields: fields{ + ID: "xxx", + }, + want: nil, + wantErr: true, + }, + { + name: "invalid id", + fields: fields{ + ID: id.NewTagID().String(), + Scene: "xxx", + }, + want: nil, + wantErr: true, + }, + { + name: "invalid item id", + fields: fields{ + ID: id.NewTagID().String(), + Scene: id.NewSceneID().String(), + Item: nil, + Group: &TagGroupDocument{Tags: []string{"xxx"}}, + }, + want: nil, + wantErr: true, + }, + { + name: "pass", + fields: fields{ + ID: tg.ID().String(), + Label: "group", + Scene: sid.ID().String(), + Item: nil, + Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, + }, + want: tg, + wantErr: false, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + d := &TagDocument{ + ID: tc.fields.ID, + Label: tc.fields.Label, + Scene: tc.fields.Scene, + Item: tc.fields.Item, + Group: tc.fields.Group, + } + got, err := d.ModelGroup() + if tc.wantErr { + assert.Error(tt, err) + } + assert.Equal(tt, tc.want, got) + }) + } +} + +func TestTagDocument_ModelItem(t *testing.T) { + sid := id.NewSceneID() + dssid := id.NewDatasetSchemaID() + dsid := id.NewDatasetID() + dssfid := id.NewDatasetSchemaFieldID() + ti, _ := tag.NewItem(). + NewID(). + Label("Item"). + Scene(sid). + LinkedDatasetFieldID(dssfid.Ref()). + LinkedDatasetID(dsid.Ref()). + LinkedDatasetSchemaID(dssid.Ref()). + Build() + type fields struct { + ID string + Label string + Scene string + Item *TagItemDocument + Group *TagGroupDocument + } + tests := []struct { + name string + fields fields + want *tag.Item + wantErr bool + }{ + { + name: "invalid id", + fields: fields{ + ID: "xxx", + }, + want: nil, + wantErr: true, + }, + { + name: "invalid id", + fields: fields{ + ID: id.NewTagID().String(), + Scene: "xxx", + }, + want: nil, + wantErr: true, + }, + { + name: "pass", + fields: fields{ + ID: ti.ID().String(), + Label: ti.Label(), + Scene: ti.Scene().String(), + Item: &TagItemDocument{ + LinkedDatasetFieldID: dssfid.RefString(), + LinkedDatasetID: dsid.RefString(), + LinkedDatasetSchemaID: dssid.RefString(), + }, + Group: nil, + }, + want: ti, + wantErr: false, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + d := &TagDocument{ + ID: tc.fields.ID, + Label: tc.fields.Label, + Scene: tc.fields.Scene, + Item: tc.fields.Item, + Group: tc.fields.Group, + } + got, err := d.ModelItem() + if tc.wantErr { + assert.Error(tt, err) + } + assert.Equal(tt, tc.want, got) + }) + } +} diff --git a/internal/infrastructure/mongo/tag.go b/internal/infrastructure/mongo/tag.go new file mode 100644 index 000000000..58175649f --- /dev/null +++ b/internal/infrastructure/mongo/tag.go @@ -0,0 +1,289 @@ +package mongo + +import ( + "context" + + "go.mongodb.org/mongo-driver/bson" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/tag" +) + +type tagRepo struct { + client *mongodoc.ClientCollection +} + +func NewTag(client *mongodoc.Client) repo.Tag { + r := &tagRepo{client: client.WithCollection("tag")} + r.init() + return r +} + +func (r *tagRepo) init() { + i := r.client.CreateIndex(context.Background(), nil) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "tag", i) + } +} + +func (r *tagRepo) FindByID(ctx context.Context, id id.TagID, f []id.SceneID) (tag.Tag, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: id.String()}, + }, f) + return r.findOne(ctx, filter) +} + +func (r *tagRepo) FindByIDs(ctx context.Context, ids []id.TagID, f []id.SceneID) ([]*tag.Tag, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: id.TagIDToKeys(ids)}, + }}, + }, f) + dst := make([]*tag.Tag, 0, len(ids)) + res, err := r.find(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterTags(ids, res), nil +} + +func (r *tagRepo) FindItemByID(ctx context.Context, id id.TagID, f []id.SceneID) (*tag.Item, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: id.String()}, + }, f) + return r.findItemOne(ctx, filter) +} + +func (r *tagRepo) FindItemByIDs(ctx context.Context, ids []id.TagID, f []id.SceneID) ([]*tag.Item, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: id.TagIDToKeys(ids)}, + }}, + }, f) + dst := make([]*tag.Item, 0, len(ids)) + res, err := r.findItems(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterTagItems(ids, res), nil +} + +func (r *tagRepo) FindGroupByID(ctx context.Context, id id.TagID, f []id.SceneID) (*tag.Group, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: id.String()}, + }, f) + return r.findGroupOne(ctx, filter) +} + +func (r *tagRepo) FindGroupByIDs(ctx context.Context, ids []id.TagID, f []id.SceneID) ([]*tag.Group, error) { + filter := r.sceneFilter(bson.D{ + {Key: "id", Value: bson.D{ + {Key: "$in", Value: id.TagIDToKeys(ids)}, + }}, + }, f) + dst := make([]*tag.Group, 0, len(ids)) + res, err := r.findGroups(ctx, dst, filter) + if err != nil { + return nil, err + } + return filterTagGroups(ids, res), nil +} + +func (r *tagRepo) FindByScene(ctx context.Context, id id.SceneID) ([]*tag.Tag, error) { + filter := bson.D{ + {Key: "scene", Value: id.String()}, + } + return r.find(ctx, nil, filter) +} + +func (r *tagRepo) FindGroupByScene(ctx context.Context, id id.SceneID) ([]*tag.Group, error) { + filter := bson.D{ + {Key: "scene", Value: id.String()}, + } + return r.findGroups(ctx, nil, filter) +} + +func (r *tagRepo) FindItemByScene(ctx context.Context, id id.SceneID) ([]*tag.Item, error) { + filter := bson.D{ + {Key: "scene", Value: id.String()}, + } + return r.findItems(ctx, nil, filter) +} + +func (r *tagRepo) FindGroupByItem(ctx context.Context, tagID id.TagID, f []id.SceneID) (*tag.Group, error) { + ids := []id.TagID{tagID} + filter := r.sceneFilter(bson.D{ + {Key: "group.tags", Value: bson.D{ + {Key: "$in", Value: id.TagIDToKeys(ids)}, + }}, + }, f) + + return r.findGroupOne(ctx, filter) +} + +func (r *tagRepo) Save(ctx context.Context, tag tag.Tag) error { + doc, tid := mongodoc.NewTag(tag) + return r.client.SaveOne(ctx, tid, doc) +} + +func (r *tagRepo) SaveAll(ctx context.Context, tags []*tag.Tag) error { + if tags == nil { + return nil + } + docs, ids := mongodoc.NewTags(tags) + return r.client.SaveAll(ctx, ids, docs) +} + +func (r *tagRepo) Remove(ctx context.Context, id id.TagID) error { + return r.client.RemoveOne(ctx, id.String()) +} + +func (r *tagRepo) RemoveAll(ctx context.Context, ids []id.TagID) error { + if len(ids) == 0 { + return nil + } + return r.client.RemoveAll(ctx, id.TagIDToKeys(ids)) +} + +func (r *tagRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + filter := bson.D{ + {Key: "scene", Value: sceneID.String()}, + } + _, err := r.client.Collection().DeleteMany(ctx, filter) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +func (r *tagRepo) find(ctx context.Context, dst []*tag.Tag, filter bson.D) ([]*tag.Tag, error) { + c := mongodoc.TagConsumer{ + Rows: dst, + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows, nil +} + +func (r *tagRepo) findOne(ctx context.Context, filter bson.D) (tag.Tag, error) { + c := mongodoc.TagConsumer{} + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + if len(c.Rows) == 0 { + return nil, rerror.ErrNotFound + } + return *c.Rows[0], nil +} + +func (r *tagRepo) findItemOne(ctx context.Context, filter bson.D) (*tag.Item, error) { + c := mongodoc.TagConsumer{} + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + if len(c.ItemRows) == 0 { + return nil, rerror.ErrNotFound + } + return c.ItemRows[0], nil +} + +func (r *tagRepo) findGroupOne(ctx context.Context, filter bson.D) (*tag.Group, error) { + c := mongodoc.TagConsumer{} + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + if len(c.GroupRows) == 0 { + return nil, rerror.ErrNotFound + } + return c.GroupRows[0], nil +} + +func (r *tagRepo) findItems(ctx context.Context, dst []*tag.Item, filter bson.D) ([]*tag.Item, error) { + c := mongodoc.TagConsumer{ + ItemRows: dst, + } + if c.ItemRows != nil { + c.Rows = make([]*tag.Tag, 0, len(c.ItemRows)) + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.ItemRows, nil +} + +func (r *tagRepo) findGroups(ctx context.Context, dst []*tag.Group, filter bson.D) ([]*tag.Group, error) { + c := mongodoc.TagConsumer{ + GroupRows: dst, + } + if c.GroupRows != nil { + c.Rows = make([]*tag.Tag, 0, len(c.GroupRows)) + } + if err := r.client.Find(ctx, filter, &c); err != nil { + return nil, err + } + return c.GroupRows, nil +} + +func filterTags(ids []id.TagID, rows []*tag.Tag) []*tag.Tag { + res := make([]*tag.Tag, 0, len(ids)) + for _, tid := range ids { + var r2 *tag.Tag + for _, r := range rows { + if r == nil { + continue + } + if r3 := *r; r3 != nil && r3.ID() == tid { + r2 = &r3 + break + } + } + res = append(res, r2) + } + return res +} + +func filterTagItems(ids []id.TagID, rows []*tag.Item) []*tag.Item { + res := make([]*tag.Item, 0, len(ids)) + for _, tid := range ids { + var r2 *tag.Item + for _, r := range rows { + if r != nil && r.ID() == tid { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func filterTagGroups(ids []id.TagID, rows []*tag.Group) []*tag.Group { + res := make([]*tag.Group, 0, len(ids)) + for _, tid := range ids { + var r2 *tag.Group + for _, r := range rows { + if r != nil && r.ID() == tid { + r2 = r + break + } + } + res = append(res, r2) + } + return res +} + +func (*tagRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { + if scenes == nil { + return filter + } + filter = append(filter, bson.E{ + Key: "scene", + Value: bson.D{{Key: "$in", Value: id.SceneIDToKeys(scenes)}}, + }) + return filter +} diff --git a/internal/usecase/interactor/common.go b/internal/usecase/interactor/common.go index a0b5f0192..6c5a383d2 100644 --- a/internal/usecase/interactor/common.go +++ b/internal/usecase/interactor/common.go @@ -28,6 +28,7 @@ func NewContainer(r *repo.Container, g *gateway.Container, config ContainerConfi Project: NewProject(r, g), Property: NewProperty(r, g), Scene: NewScene(r, g), + Tag: NewTag(r), Team: NewTeam(r), User: NewUser(r, g, config.SignupSecret), } diff --git a/internal/usecase/interactor/layer.go b/internal/usecase/interactor/layer.go index 6e6325f68..e6e62187d 100644 --- a/internal/usecase/interactor/layer.go +++ b/internal/usecase/interactor/layer.go @@ -1006,3 +1006,69 @@ func (i *Layer) ImportLayer(ctx context.Context, inp interfaces.ImportLayerParam tx.Commit() return rootLayers, parent, nil } + +func (i *Layer) AttachTag(ctx context.Context, layerID id.LayerID, tagID id.TagID, operator *usecase.Operator) (layer.Layer, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, err + } + + layer, err := i.layerRepo.FindByID(ctx, layerID, scenes) + if err != nil { + return nil, err + } + + if err := layer.AttachTag(tagID); err != nil { + return nil, err + } + err = i.layerRepo.Save(ctx, layer) + if err != nil { + return nil, err + } + + tx.Commit() + return layer, nil +} + +func (i *Layer) DetachTag(ctx context.Context, layerID id.LayerID, tagID id.TagID, operator *usecase.Operator) (layer.Layer, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, err + } + + layer, err := i.layerRepo.FindByID(ctx, layerID, scenes) + if err != nil { + return nil, err + } + + if err := layer.DetachTag(tagID); err != nil { + return nil, err + } + err = i.layerRepo.Save(ctx, layer) + if err != nil { + return nil, err + } + + tx.Commit() + return layer, nil +} diff --git a/internal/usecase/interactor/tag.go b/internal/usecase/interactor/tag.go new file mode 100644 index 000000000..9cb15da5f --- /dev/null +++ b/internal/usecase/interactor/tag.go @@ -0,0 +1,362 @@ +package interactor + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/tag" +) + +type Tag struct { + commonScene + tagRepo repo.Tag + layerRepo repo.Layer + sceneRepo repo.Scene + transaction repo.Transaction +} + +func NewTag(r *repo.Container) interfaces.Tag { + return &Tag{ + commonScene: commonScene{sceneRepo: r.Scene}, + tagRepo: r.Tag, + layerRepo: r.Layer, + sceneRepo: r.Scene, + transaction: r.Transaction, + } +} + +func (i *Tag) CreateItem(ctx context.Context, inp interfaces.CreateTagItemParam, operator *usecase.Operator) (*tag.Item, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if err := i.CanWriteScene(ctx, inp.SceneID, operator); err != nil { + return nil, interfaces.ErrOperationDenied + } + + builder := tag.NewItem(). + NewID(). + Label(inp.Label). + Scene(inp.SceneID) + if inp.LinkedDatasetSchemaID != nil && inp.LinkedDatasetID != nil && inp.LinkedDatasetField != nil { + builder = builder. + LinkedDatasetFieldID(inp.LinkedDatasetField). + LinkedDatasetID(inp.LinkedDatasetID). + LinkedDatasetSchemaID(inp.LinkedDatasetSchemaID) + } + item, err := builder.Build() + if err != nil { + return nil, err + } + + err = i.tagRepo.Save(ctx, item) + if err != nil { + return nil, err + } + tx.Commit() + return item, nil +} + +func (i *Tag) CreateGroup(ctx context.Context, inp interfaces.CreateTagGroupParam, operator *usecase.Operator) (*tag.Group, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if err := i.CanWriteScene(ctx, inp.SceneID, operator); err != nil { + return nil, interfaces.ErrOperationDenied + } + + list := tag.NewListFromTags(inp.Tags) + group, err := tag.NewGroup(). + NewID(). + Label(inp.Label). + Scene(inp.SceneID). + Tags(list). + Build() + + if err != nil { + return nil, err + } + + err = i.tagRepo.Save(ctx, group) + if err != nil { + return nil, err + } + tx.Commit() + return group, nil +} + +func (i *Tag) Fetch(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Tag, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + return i.tagRepo.FindByIDs(ctx, ids, scenes) +} + +func (i *Tag) FetchByScene(ctx context.Context, sid id.SceneID, operator *usecase.Operator) ([]*tag.Tag, error) { + err := i.CanReadScene(ctx, sid, operator) + if err != nil { + return nil, err + } + + return i.tagRepo.FindByScene(ctx, sid) +} + +func (i *Tag) FetchItem(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Item, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + return i.tagRepo.FindItemByIDs(ctx, ids, scenes) +} + +func (i *Tag) FetchGroup(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Group, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + return i.tagRepo.FindGroupByIDs(ctx, ids, scenes) +} + +func (i *Tag) FetchGroupsByLayer(ctx context.Context, lid id.LayerID, operator *usecase.Operator) ([]*tag.Group, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + layer, err := i.layerRepo.FindByID(ctx, lid, scenes) + if err != nil { + return nil, err + } + + return i.tagRepo.FindGroupByIDs(ctx, layer.Tags().Tags(), scenes) +} + +func (i *Tag) FetchGroupsByScene(ctx context.Context, sid id.SceneID, operator *usecase.Operator) ([]*tag.Group, error) { + err := i.CanReadScene(ctx, sid, operator) + if err != nil { + return nil, err + } + + return i.tagRepo.FindGroupByScene(ctx, sid) +} + +func (i *Tag) FetchItemsByLayer(ctx context.Context, lid id.LayerID, operator *usecase.Operator) ([]*tag.Item, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + + layer, err := i.layerRepo.FindByID(ctx, lid, scenes) + if err != nil { + return nil, err + } + return i.tagRepo.FindItemByIDs(ctx, layer.Tags().Tags(), scenes) +} + +func (i *Tag) FetchItemsByScene(ctx context.Context, sid id.SceneID, operator *usecase.Operator) ([]*tag.Item, error) { + err := i.CanReadScene(ctx, sid, operator) + if err != nil { + return nil, err + } + + return i.tagRepo.FindItemByScene(ctx, sid) +} + +func (i *Tag) AttachItemToGroup(ctx context.Context, inp interfaces.AttachItemToGroupParam, operator *usecase.Operator) (*tag.Group, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, err + } + // make sure item exist + _, err = i.tagRepo.FindItemByID(ctx, inp.ItemID, scenes) + if err != nil { + return nil, err + } + + tg, err := i.tagRepo.FindGroupByID(ctx, inp.GroupID, scenes) + if err != nil { + return nil, err + } + if !tg.Tags().Has(inp.ItemID) { + tg.Tags().Add(inp.ItemID) + } else { + return nil, errors.New("tag item is already attached to the group") + } + err = i.tagRepo.Save(ctx, tg) + if err != nil { + return nil, err + } + tx.Commit() + return tg, nil +} + +func (i *Tag) DetachItemFromGroup(ctx context.Context, inp interfaces.DetachItemToGroupParam, operator *usecase.Operator) (*tag.Group, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, err + } + // make sure item exist + _, err = i.tagRepo.FindItemByID(ctx, inp.ItemID, scenes) + if err != nil { + return nil, err + } + + tg, err := i.tagRepo.FindGroupByID(ctx, inp.GroupID, scenes) + if err != nil { + return nil, err + } + if tg.Tags().Has(inp.ItemID) { + tg.Tags().Remove(inp.ItemID) + } else { + return nil, errors.New("tag item is not attached to the group") + } + + err = i.tagRepo.Save(ctx, tg) + if err != nil { + return nil, err + } + + tx.Commit() + return tg, nil +} + +func (i *Tag) UpdateTag(ctx context.Context, inp interfaces.UpdateTagParam, operator *usecase.Operator) (*tag.Tag, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if err := i.CanWriteScene(ctx, inp.SceneID, operator); err != nil { + return nil, interfaces.ErrOperationDenied + } + + tg, err := i.tagRepo.FindByID(ctx, inp.TagID, []id.SceneID{inp.SceneID}) + if err != nil { + return nil, err + } + + if inp.Label != nil { + tg.Rename(*inp.Label) + } + + err = i.tagRepo.Save(ctx, tg) + if err != nil { + return nil, err + } + tx.Commit() + return &tg, nil +} + +func (i *Tag) Remove(ctx context.Context, tagID id.TagID, operator *usecase.Operator) (*id.TagID, error) { + tx, err := i.transaction.Begin() + + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + scenes, err := i.OnlyWritableScenes(ctx, operator) + if err != nil { + return nil, err + } + + t, err := i.tagRepo.FindByID(ctx, tagID, scenes) + if err != nil { + return nil, err + } + + if group := tag.ToTagGroup(t); group != nil { + tags := group.Tags() + if len(tags.Tags()) != 0 { + return nil, interfaces.ErrNonemptyTagGroupCannotDelete + } + } + + if item := tag.ToTagItem(t); item != nil { + g, err := i.tagRepo.FindGroupByItem(ctx, item.ID(), scenes) + if err != nil && !errors.Is(rerror.ErrNotFound, err) { + return nil, err + } + if g != nil { + g.Tags().Remove(item.ID()) + + err = i.tagRepo.Save(ctx, g) + if err != nil { + return nil, err + } + } + } + + ls, err := i.layerRepo.FindByTag(ctx, tagID, scenes) + if err != nil && !errors.Is(rerror.ErrNotFound, err) { + return nil, err + } + + if ls != nil && len(ls) > 0 { + for _, l := range ls.Deref() { + err = l.DetachTag(tagID) + if err != nil { + return nil, err + } + } + if err := i.layerRepo.SaveAll(ctx, ls); err != nil { + return nil, err + } + } + + if err := i.tagRepo.Remove(ctx, tagID); err != nil { + return nil, err + } + return &tagID, nil +} diff --git a/internal/usecase/interfaces/common.go b/internal/usecase/interfaces/common.go index 8c90ad1b6..3fa486959 100644 --- a/internal/usecase/interfaces/common.go +++ b/internal/usecase/interfaces/common.go @@ -25,6 +25,7 @@ type Container struct { Property Property Published Published Scene Scene + Tag Tag Team Team User User } diff --git a/internal/usecase/interfaces/layer.go b/internal/usecase/interfaces/layer.go index 9be04c60b..887f4563d 100644 --- a/internal/usecase/interfaces/layer.go +++ b/internal/usecase/interfaces/layer.go @@ -102,4 +102,6 @@ type Layer interface { MoveInfoboxField(context.Context, MoveInfoboxFieldParam, *usecase.Operator) (id.InfoboxFieldID, layer.Layer, int, error) RemoveInfoboxField(context.Context, RemoveInfoboxFieldParam, *usecase.Operator) (id.InfoboxFieldID, layer.Layer, error) ImportLayer(context.Context, ImportLayerParam, *usecase.Operator) (layer.List, *layer.Group, error) + AttachTag(context.Context, id.LayerID, id.TagID, *usecase.Operator) (layer.Layer, error) + DetachTag(context.Context, id.LayerID, id.TagID, *usecase.Operator) (layer.Layer, error) } diff --git a/internal/usecase/interfaces/tag.go b/internal/usecase/interfaces/tag.go new file mode 100644 index 000000000..af703d93f --- /dev/null +++ b/internal/usecase/interfaces/tag.go @@ -0,0 +1,59 @@ +package interfaces + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" +) + +var ( + ErrNonemptyTagGroupCannotDelete = errors.New("can't delete non-empty tag group") +) + +type CreateTagItemParam struct { + Label string + SceneID id.SceneID + LinkedDatasetSchemaID *id.DatasetSchemaID + LinkedDatasetID *id.DatasetID + LinkedDatasetField *id.DatasetSchemaFieldID +} + +type CreateTagGroupParam struct { + Label string + SceneID id.SceneID + Tags []id.TagID +} + +type AttachItemToGroupParam struct { + ItemID, GroupID id.TagID +} + +type DetachItemToGroupParam struct { + ItemID, GroupID id.TagID +} + +type UpdateTagParam struct { + Label *string + SceneID id.SceneID + TagID id.TagID +} + +type Tag interface { + Fetch(context.Context, []id.TagID, *usecase.Operator) ([]*tag.Tag, error) + FetchByScene(context.Context, id.SceneID, *usecase.Operator) ([]*tag.Tag, error) + FetchItem(context.Context, []id.TagID, *usecase.Operator) ([]*tag.Item, error) + FetchGroup(context.Context, []id.TagID, *usecase.Operator) ([]*tag.Group, error) + FetchGroupsByLayer(context.Context, id.LayerID, *usecase.Operator) ([]*tag.Group, error) + FetchGroupsByScene(context.Context, id.SceneID, *usecase.Operator) ([]*tag.Group, error) + FetchItemsByLayer(context.Context, id.LayerID, *usecase.Operator) ([]*tag.Item, error) + FetchItemsByScene(context.Context, id.SceneID, *usecase.Operator) ([]*tag.Item, error) + CreateItem(context.Context, CreateTagItemParam, *usecase.Operator) (*tag.Item, error) + CreateGroup(context.Context, CreateTagGroupParam, *usecase.Operator) (*tag.Group, error) + AttachItemToGroup(context.Context, AttachItemToGroupParam, *usecase.Operator) (*tag.Group, error) + DetachItemFromGroup(context.Context, DetachItemToGroupParam, *usecase.Operator) (*tag.Group, error) + UpdateTag(context.Context, UpdateTagParam, *usecase.Operator) (*tag.Tag, error) + Remove(context.Context, id.TagID, *usecase.Operator) (*id.TagID, error) +} diff --git a/internal/usecase/repo/container.go b/internal/usecase/repo/container.go index f329feb87..35c679e43 100644 --- a/internal/usecase/repo/container.go +++ b/internal/usecase/repo/container.go @@ -11,6 +11,7 @@ type Container struct { PropertySchema PropertySchema Property Property Scene Scene + Tag Tag Team Team User User SceneLock SceneLock diff --git a/internal/usecase/repo/layer.go b/internal/usecase/repo/layer.go index b4d03d741..75b134bf3 100644 --- a/internal/usecase/repo/layer.go +++ b/internal/usecase/repo/layer.go @@ -19,6 +19,7 @@ type Layer interface { FindParentByID(context.Context, id.LayerID, []id.SceneID) (*layer.Group, error) FindByProperty(context.Context, id.PropertyID, []id.SceneID) (layer.Layer, error) FindByScene(context.Context, id.SceneID) (layer.List, error) + FindByTag(context.Context, id.TagID, []id.SceneID) (layer.List, error) Save(context.Context, layer.Layer) error SaveAll(context.Context, layer.List) error Remove(context.Context, id.LayerID) error diff --git a/internal/usecase/repo/tag.go b/internal/usecase/repo/tag.go new file mode 100644 index 000000000..2554989a2 --- /dev/null +++ b/internal/usecase/repo/tag.go @@ -0,0 +1,26 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" +) + +type Tag interface { + FindByID(context.Context, id.TagID, []id.SceneID) (tag.Tag, error) + FindByIDs(context.Context, []id.TagID, []id.SceneID) ([]*tag.Tag, error) + FindItemByID(context.Context, id.TagID, []id.SceneID) (*tag.Item, error) + FindItemByIDs(context.Context, []id.TagID, []id.SceneID) ([]*tag.Item, error) + FindGroupByID(context.Context, id.TagID, []id.SceneID) (*tag.Group, error) + FindGroupByIDs(context.Context, []id.TagID, []id.SceneID) ([]*tag.Group, error) + FindByScene(context.Context, id.SceneID) ([]*tag.Tag, error) + FindGroupByItem(context.Context, id.TagID, []id.SceneID) (*tag.Group, error) + FindGroupByScene(context.Context, id.SceneID) ([]*tag.Group, error) + FindItemByScene(context.Context, id.SceneID) ([]*tag.Item, error) + Save(context.Context, tag.Tag) error + SaveAll(context.Context, []*tag.Tag) error + Remove(context.Context, id.TagID) error + RemoveAll(context.Context, []id.TagID) error + RemoveByScene(context.Context, id.SceneID) error +} diff --git a/pkg/id/gen.go b/pkg/id/gen.go index 54d0703b7..9f2b63284 100644 --- a/pkg/id/gen.go +++ b/pkg/id/gen.go @@ -11,9 +11,11 @@ //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=user_gen.go --name=User //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=dataset_schema_field_gen.go --name=DatasetSchemaField //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=infobox_field_gen.go --name=InfoboxField +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=tag_gen.go --name=Tag // Testing //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=asset_gen_test.go --name=Asset +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=tag_gen_test.go --name=Tag //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=dataset_gen_test.go --name=Dataset //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=dataset_schema_gen_test.go --name=DatasetSchema //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=layer_gen_test.go --name=Layer diff --git a/pkg/id/tag_gen.go b/pkg/id/tag_gen.go new file mode 100644 index 000000000..1e2df781c --- /dev/null +++ b/pkg/id/tag_gen.go @@ -0,0 +1,297 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// TagID is an ID for Tag. +type TagID ID + +// NewTagID generates a new TagId. +func NewTagID() TagID { + return TagID(New()) +} + +// TagIDFrom generates a new TagID from a string. +func TagIDFrom(i string) (nid TagID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = TagID(did) + return +} + +// MustTagID generates a new TagID from a string, but panics if the string cannot be parsed. +func MustTagID(i string) TagID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return TagID(did) +} + +// TagIDFromRef generates a new TagID from a string ref. +func TagIDFromRef(i *string) *TagID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := TagID(*did) + return &nid +} + +// TagIDFromRefID generates a new TagID from a ref of a generic ID. +func TagIDFromRefID(i *ID) *TagID { + if i == nil { + return nil + } + nid := TagID(*i) + return &nid +} + +// ID returns a domain ID. +func (d TagID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d TagID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d TagID) GoString() string { + return "id.TagID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d TagID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d TagID) Ref() *TagID { + d2 := d + return &d2 +} + +// Contains returns whether the id is contained in the slice. +func (d TagID) Contains(ids []TagID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + +// CopyRef returns a copy of a reference. +func (d *TagID) CopyRef() *TagID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *TagID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *TagID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *TagID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *TagID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = TagIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *TagID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *TagID) UnmarshalText(text []byte) (err error) { + *d, err = TagIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d TagID) IsNil() bool { + return ID(d).IsNil() +} + +// TagIDToKeys converts IDs into a string slice. +func TagIDToKeys(ids []TagID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// TagIDsFrom converts a string slice into a ID slice. +func TagIDsFrom(ids []string) ([]TagID, error) { + dids := make([]TagID, 0, len(ids)) + for _, i := range ids { + did, err := TagIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// TagIDsFromID converts a generic ID slice into a ID slice. +func TagIDsFromID(ids []ID) []TagID { + dids := make([]TagID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, TagID(i)) + } + return dids +} + +// TagIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func TagIDsFromIDRef(ids []*ID) []TagID { + dids := make([]TagID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, TagID(*i)) + } + } + return dids +} + +// TagIDsToID converts a ID slice into a generic ID slice. +func TagIDsToID(ids []TagID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// TagIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func TagIDsToIDRef(ids []*TagID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// TagIDSet represents a set of TagIDs +type TagIDSet struct { + m map[TagID]struct{} + s []TagID +} + +// NewTagIDSet creates a new TagIDSet +func NewTagIDSet() *TagIDSet { + return &TagIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *TagIDSet) Add(p ...TagID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[TagID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []TagID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *TagIDSet) AddRef(p *TagID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *TagIDSet) Has(p TagID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *TagIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *TagIDSet) All() []TagID { + if s == nil { + return nil + } + return append([]TagID{}, s.s...) +} + +// Clone returns a cloned set +func (s *TagIDSet) Clone() *TagIDSet { + if s == nil { + return NewTagIDSet() + } + s2 := NewTagIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *TagIDSet) Merge(s2 *TagIDSet) *TagIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/tag_gen_test.go b/pkg/id/tag_gen_test.go new file mode 100644 index 000000000..0620cf8bb --- /dev/null +++ b/pkg/id/tag_gen_test.go @@ -0,0 +1,1011 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewTagID(t *testing.T) { + id := NewTagID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestTagIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result TagID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result TagID + err error + }{ + TagID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result TagID + err error + }{ + TagID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result TagID + err error + }{ + TagID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := TagIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustTagID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected TagID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: TagID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustTagID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestTagIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *TagID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &TagID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := TagIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestTagIDFromRefID(t *testing.T) { + id := New() + + subId := TagIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestTagID_ID(t *testing.T) { + id := New() + subId := TagIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestTagID_String(t *testing.T) { + id := New() + subId := TagIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestTagID_GoString(t *testing.T) { + id := New() + subId := TagIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.TagID("+id.String()+")") +} + +func TestTagID_RefString(t *testing.T) { + id := New() + subId := TagIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestTagID_Ref(t *testing.T) { + id := New() + subId := TagIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestTagID_Contains(t *testing.T) { + id := NewTagID() + id2 := NewTagID() + assert.True(t, id.Contains([]TagID{id, id2})) + assert.False(t, id.Contains([]TagID{id2})) +} + +func TestTagID_CopyRef(t *testing.T) { + id := New() + subId := TagIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestTagID_IDRef(t *testing.T) { + id := New() + subId := TagIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestTagID_StringRef(t *testing.T) { + id := New() + subId := TagIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestTagID_MarhsalJSON(t *testing.T) { + id := New() + subId := TagIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestTagID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &TagID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestTagID_MarshalText(t *testing.T) { + id := New() + subId := TagIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestTagID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &TagID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestTagID_IsNil(t *testing.T) { + subId := TagID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *TagIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestTagIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []TagID + expected []string + }{ + { + name: "Empty slice", + input: make([]TagID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, TagIDToKeys(tc.input)) + }) + } + +} + +func TestTagIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []TagID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []TagID + err error + }{ + res: make([]TagID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []TagID + err error + }{ + res: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []TagID + err error + }{ + res: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []TagID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := TagIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := TagIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestTagIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []TagID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]TagID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := TagIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestTagIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []TagID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]TagID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []TagID{MustTagID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []TagID{ + MustTagID(id1.String()), + MustTagID(id2.String()), + MustTagID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := TagIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestTagIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []TagID + expected []ID + }{ + { + name: "Empty slice", + input: make([]TagID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := TagIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestTagIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustTagID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustTagID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustTagID(id3.String()) + + testCases := []struct { + name string + input []*TagID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*TagID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*TagID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*TagID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := TagIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewTagIDSet(t *testing.T) { + TagIdSet := NewTagIDSet() + + assert.NotNil(t, TagIdSet) + assert.Empty(t, TagIdSet.m) + assert.Empty(t, TagIdSet.s) +} + +func TestTagIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []TagID + expected *TagIDSet + }{ + { + name: "Empty slice", + input: make([]TagID, 0), + expected: &TagIDSet{ + m: map[TagID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &TagIDSet{ + m: map[TagID]struct{}{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &TagIDSet{ + m: map[TagID]struct{}{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewTagIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestTagIDSet_AddRef(t *testing.T) { + t.Parallel() + + TagId := MustTagID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *TagID + expected *TagIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &TagIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &TagId, + expected: &TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewTagIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestTagIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + TagIDSet + TagID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + TagIDSet + TagID + }{TagIDSet: TagIDSet{}, TagID: MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + TagIDSet + TagID + }{TagIDSet: TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, TagID: MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + TagIDSet + TagID + }{TagIDSet: TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, TagID: MustTagID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.TagIDSet.Has(tc.input.TagID)) + }) + } +} + +func TestTagIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input TagIDSet + expected TagIDSet + }{ + { + name: "Empty Set", + input: TagIDSet{}, + expected: TagIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: TagIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestTagIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *TagIDSet + expected []TagID + }{ + { + name: "Empty slice", + input: &TagIDSet{ + m: map[TagID]struct{}{}, + s: nil, + }, + expected: make([]TagID, 0), + }, + { + name: "1 element", + input: &TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &TagIDSet{ + m: map[TagID]struct{}{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestTagIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *TagIDSet + expected *TagIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewTagIDSet(), + }, + { + name: "Empty set", + input: NewTagIDSet(), + expected: NewTagIDSet(), + }, + { + name: "1 element", + input: &TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &TagIDSet{ + m: map[TagID]struct{}{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &TagIDSet{ + m: map[TagID]struct{}{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestTagIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *TagIDSet + b *TagIDSet + } + expected *TagIDSet + }{ + { + name: "Empty Set", + input: struct { + a *TagIDSet + b *TagIDSet + }{ + a: &TagIDSet{}, + b: &TagIDSet{}, + }, + expected: &TagIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *TagIDSet + b *TagIDSet + }{ + a: &TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &TagIDSet{}, + }, + expected: &TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *TagIDSet + b *TagIDSet + }{ + a: &TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &TagIDSet{ + m: map[TagID]struct{}{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + }, + s: []TagID{ + MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), + MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/layer/builder.go b/pkg/layer/builder.go index b726cb936..e28e27956 100644 --- a/pkg/layer/builder.go +++ b/pkg/layer/builder.go @@ -2,6 +2,7 @@ package layer import ( "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" ) type Builder struct { @@ -71,3 +72,8 @@ func (b *Builder) Infobox(infobox *Infobox) *Builder { b.base.infobox = infobox return b } + +func (b *Builder) Tags(tags *tag.List) *Builder { + b.base.tags = tags + return b +} diff --git a/pkg/layer/group.go b/pkg/layer/group.go index ad37499dd..e07284943 100644 --- a/pkg/layer/group.go +++ b/pkg/layer/group.go @@ -3,6 +3,7 @@ package layer import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/tag" ) type Group struct { @@ -180,3 +181,23 @@ func (l *Group) ValidateProperties(pm property.Map) error { } return l.layerBase.ValidateProperties(pm) } + +func (l *Group) Tags() *tag.List { + return l.layerBase.tags +} + +func (l *Group) AttachTag(t id.TagID) error { + if l.layerBase.tags.Has(t) { + return ErrDuplicatedTag + } + l.layerBase.tags.Add(t) + return nil +} + +func (l *Group) DetachTag(t id.TagID) error { + if !l.layerBase.tags.Has(t) { + return ErrTagNotFound + } + l.layerBase.tags.Remove(t) + return nil +} diff --git a/pkg/layer/group_builder.go b/pkg/layer/group_builder.go index 71fb7a281..5c5e67ddf 100644 --- a/pkg/layer/group_builder.go +++ b/pkg/layer/group_builder.go @@ -2,6 +2,7 @@ package layer import ( "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" ) func GroupFromLayer(l Layer) *Group { @@ -110,3 +111,8 @@ func (b *GroupBuilder) LinkedDatasetSchema(linkedDatasetSchema *id.DatasetSchema b.l.linkedDatasetSchema = linkedDatasetSchema.CopyRef() return b } + +func (b *GroupBuilder) Tags(tags *tag.List) *GroupBuilder { + b.l.tags = tags + return b +} diff --git a/pkg/layer/group_builder_test.go b/pkg/layer/group_builder_test.go new file mode 100644 index 000000000..14194147e --- /dev/null +++ b/pkg/layer/group_builder_test.go @@ -0,0 +1,15 @@ +package layer + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" + "github.com/stretchr/testify/assert" +) + +func TestGroupBuilder_Tags(t *testing.T) { + l := tag.NewListFromTags([]id.TagID{id.NewTagID()}) + b := NewGroup().NewID().Tags(l).MustBuild() + assert.Same(t, l, b.Tags()) +} diff --git a/pkg/layer/group_test.go b/pkg/layer/group_test.go index aa231412c..d4d3bea48 100644 --- a/pkg/layer/group_test.go +++ b/pkg/layer/group_test.go @@ -3,6 +3,8 @@ package layer import ( "testing" + "github.com/reearth/reearth-backend/pkg/tag" + "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -11,6 +13,7 @@ var _ Layer = &Group{} var l1 = id.MustLayerID(id.New().String()) var l2 = id.MustLayerID(id.New().String()) +var tags = []id.TagID{id.NewTagID()} var group = Group{ layerBase: layerBase{ id: id.MustLayerID(id.New().String()), @@ -20,6 +23,7 @@ var group = Group{ extension: id.PluginExtensionID("foo").Ref(), property: nil, infobox: nil, + tags: tag.NewListFromTags(tags), scene: id.SceneID{}, }, layers: &IDList{ @@ -135,3 +139,15 @@ func TestGroup_MoveLayerFrom(t *testing.T) { group.MoveLayerFrom(l1, 1, &group) assert.Equal(t, l1, group.Layers().Layers()[1]) } + +func TestGroup_Tags(t *testing.T) { + tt := id.NewTagID() + err := group.AttachTag(tt) + assert.NoError(t, err) + tl := tags + tl = append(tl, tt) + assert.Equal(t, tl, group.Tags().Tags()) + err = group.DetachTag(tt) + assert.NoError(t, err) + assert.Equal(t, tags, group.Tags().Tags()) +} diff --git a/pkg/layer/item.go b/pkg/layer/item.go index 6cec341ca..ca92a8c57 100644 --- a/pkg/layer/item.go +++ b/pkg/layer/item.go @@ -3,6 +3,7 @@ package layer import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/tag" ) type Item struct { @@ -149,3 +150,23 @@ func (l *Item) ValidateProperties(pm property.Map) error { } return l.layerBase.ValidateProperties(pm) } + +func (l *Item) Tags() *tag.List { + return l.layerBase.tags +} + +func (l *Item) AttachTag(t id.TagID) error { + if l.layerBase.tags.Has(t) { + return ErrDuplicatedTag + } + l.layerBase.tags.Add(t) + return nil +} + +func (l *Item) DetachTag(t id.TagID) error { + if !l.layerBase.tags.Has(t) { + return ErrTagNotFound + } + l.layerBase.tags.Remove(t) + return nil +} diff --git a/pkg/layer/item_builder.go b/pkg/layer/item_builder.go index 84d999efc..68a66b21d 100644 --- a/pkg/layer/item_builder.go +++ b/pkg/layer/item_builder.go @@ -2,6 +2,7 @@ package layer import ( "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" ) func ItemFromLayer(l Layer) *Item { @@ -100,3 +101,8 @@ func (b *ItemBuilder) LinkedDataset(linkedDataset *id.DatasetID) *ItemBuilder { b.l.linkedDataset = linkedDataset.CopyRef() return b } + +func (b *ItemBuilder) Tags(tags *tag.List) *ItemBuilder { + b.l.tags = tags + return b +} diff --git a/pkg/layer/item_builder_test.go b/pkg/layer/item_builder_test.go new file mode 100644 index 000000000..2c76c6a2f --- /dev/null +++ b/pkg/layer/item_builder_test.go @@ -0,0 +1,15 @@ +package layer + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" + "github.com/stretchr/testify/assert" +) + +func TestItemBuilder_Tags(t *testing.T) { + l := tag.NewListFromTags([]id.TagID{id.NewTagID()}) + b := NewItem().NewID().Tags(l).MustBuild() + assert.Same(t, l, b.Tags()) +} diff --git a/pkg/layer/item_test.go b/pkg/layer/item_test.go index a803e2dec..d65d9203b 100644 --- a/pkg/layer/item_test.go +++ b/pkg/layer/item_test.go @@ -1,3 +1,39 @@ package layer +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/tag" + "github.com/stretchr/testify/assert" +) + var _ Layer = &Item{} + +var tags2 = []id.TagID{id.NewTagID()} +var item = Item{ + layerBase: layerBase{ + id: id.MustLayerID(id.New().String()), + name: "xxx", + visible: false, + plugin: id.MustPluginID("aaa~1.1.1").Ref(), + extension: id.PluginExtensionID("foo").Ref(), + property: nil, + infobox: nil, + tags: tag.NewListFromTags(tags2), + scene: id.SceneID{}, + }, + linkedDataset: nil, +} + +func TestItem_Tags(t *testing.T) { + tt := id.NewTagID() + err := item.AttachTag(tt) + assert.NoError(t, err) + tl := tags2 + tl = append(tl, tt) + assert.Equal(t, tl, item.Tags().Tags()) + err = item.DetachTag(tt) + assert.NoError(t, err) + assert.Equal(t, tags2, item.Tags().Tags()) +} diff --git a/pkg/layer/layer.go b/pkg/layer/layer.go index a22586686..56013b295 100644 --- a/pkg/layer/layer.go +++ b/pkg/layer/layer.go @@ -6,6 +6,12 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/tag" +) + +var ( + ErrDuplicatedTag = errors.New("duplicated tag") + ErrTagNotFound = errors.New("tag not found") ) type Layer interface { @@ -19,12 +25,15 @@ type Layer interface { HasInfobox() bool Infobox() *Infobox Scene() id.SceneID + Tags() *tag.List Rename(string) SetVisible(bool) SetInfobox(*Infobox) SetPlugin(*id.PluginID) Properties() []id.PropertyID ValidateProperties(property.Map) error + AttachTag(t id.TagID) error + DetachTag(t id.TagID) error } func ToLayerGroup(l Layer) *Group { @@ -72,6 +81,7 @@ type layerBase struct { property *id.PropertyID infobox *Infobox scene id.SceneID + tags *tag.List } func (l *layerBase) ID() id.LayerID { diff --git a/pkg/plugin/manifest/schema_gen.go b/pkg/plugin/manifest/schema_gen.go index 1e72becb3..15c8cd1c4 100644 --- a/pkg/plugin/manifest/schema_gen.go +++ b/pkg/plugin/manifest/schema_gen.go @@ -1,6 +1,6 @@ package manifest -// generated by "/var/folders/_n/99kwktfn5ml3fmw3fbn575hh0000gn/T/go-build3305837952/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT +// generated by "/tmp/go-build698725398/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT type Choice struct { Icon string `json:"icon,omitempty"` diff --git a/pkg/tag/group.go b/pkg/tag/group.go new file mode 100644 index 000000000..7dcc6c8a1 --- /dev/null +++ b/pkg/tag/group.go @@ -0,0 +1,10 @@ +package tag + +type Group struct { + tag + tags *List +} + +func (g *Group) Tags() *List { + return g.tags +} diff --git a/pkg/tag/group_builder.go b/pkg/tag/group_builder.go new file mode 100644 index 000000000..76bce1200 --- /dev/null +++ b/pkg/tag/group_builder.go @@ -0,0 +1,63 @@ +package tag + +import ( + "github.com/reearth/reearth-backend/pkg/id" +) + +type GroupBuilder struct { + g *Group +} + +func NewGroup() *GroupBuilder { + return &GroupBuilder{g: &Group{}} +} + +func GroupFrom(t Tag) *Group { + li, ok := t.(*Group) + if !ok { + return nil + } + return li +} + +func (b *GroupBuilder) Build() (*Group, error) { + if id.ID(b.g.id).IsNil() { + return nil, id.ErrInvalidID + } + if id.ID(b.g.sceneId).IsNil() { + return nil, ErrInvalidSceneID + } + if b.g.label == "" { + return nil, ErrEmptyLabel + } + + return b.g, nil +} + +func (b *GroupBuilder) ID(tid id.TagID) *GroupBuilder { + b.g.id = tid + return b +} + +func (b *GroupBuilder) NewID() *GroupBuilder { + b.g.id = id.NewTagID() + return b +} + +func (b *GroupBuilder) Label(l string) *GroupBuilder { + b.g.label = l + return b +} + +func (b *GroupBuilder) Scene(sid id.SceneID) *GroupBuilder { + b.g.sceneId = sid + return b +} + +func (b *GroupBuilder) Tags(tl *List) *GroupBuilder { + if tl != nil { + b.g.tags = tl + } + + return b +} diff --git a/pkg/tag/group_test.go b/pkg/tag/group_test.go new file mode 100644 index 000000000..160246ada --- /dev/null +++ b/pkg/tag/group_test.go @@ -0,0 +1,112 @@ +package tag + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +var _ Tag = &Group{} + +func TestGroupBuilder_NewID(t *testing.T) { + b := NewGroup().NewID() + assert.NotEqual(t, id.TagID{}, b.g.id) +} + +func TestGroupBuilder_Build(t *testing.T) { + tid := id.NewTagID() + sid := id.NewSceneID() + tags := []id.TagID{ + id.NewTagID(), + id.NewTagID(), + } + testCases := []struct { + Name, Label string + Id id.TagID + Scene id.SceneID + Tags *List + Expected struct { + Group Group + Error error + } + }{ + { + Name: "fail: nil tag ID", + Label: "xxx", + Scene: id.NewSceneID(), + Expected: struct { + Group Group + Error error + }{ + Error: id.ErrInvalidID, + }, + }, + { + Name: "fail: empty label", + Id: id.NewTagID(), + Scene: id.NewSceneID(), + Expected: struct { + Group Group + Error error + }{ + Error: ErrEmptyLabel, + }, + }, + { + Name: "fail: nil scene ID", + Label: "xxx", + Id: id.NewTagID(), + Expected: struct { + Group Group + Error error + }{ + Error: ErrInvalidSceneID, + }, + }, + { + Name: "success", + Id: tid, + Label: "xxx", + Scene: sid, + Tags: &List{ + tags: tags, + }, + Expected: struct { + Group Group + Error error + }{ + Group: Group{ + tag: tag{ + id: tid, + label: "xxx", + sceneId: sid, + }, + tags: &List{ + tags: tags, + }, + }, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := NewGroup(). + ID(tc.Id). + Scene(tc.Scene). + Label(tc.Label). + Tags(tc.Tags). + Build() + if tc.Expected.Error == nil { + assert.Equal(tt, tc.Expected.Group.ID(), res.ID()) + assert.Equal(tt, tc.Expected.Group.Scene(), res.Scene()) + assert.Equal(tt, tc.Expected.Group.Label(), res.Label()) + assert.Equal(tt, tc.Expected.Group.Tags(), res.Tags()) + } else { + assert.Equal(tt, tc.Expected.Error, err) + } + }) + } +} diff --git a/pkg/tag/item.go b/pkg/tag/item.go new file mode 100644 index 000000000..ee817289a --- /dev/null +++ b/pkg/tag/item.go @@ -0,0 +1,22 @@ +package tag + +import "github.com/reearth/reearth-backend/pkg/id" + +type Item struct { + tag + linkedDatasetFieldID *id.DatasetSchemaFieldID + linkedDatasetID *id.DatasetID + linkedDatasetSchemaID *id.DatasetSchemaID +} + +func (i *Item) LinkedDatasetFieldID() *id.DatasetSchemaFieldID { + return i.linkedDatasetFieldID.CopyRef() +} + +func (i *Item) LinkedDatasetID() *id.DatasetID { + return i.linkedDatasetID.CopyRef() +} + +func (i *Item) LinkedDatasetSchemaID() *id.DatasetSchemaID { + return i.linkedDatasetSchemaID.CopyRef() +} diff --git a/pkg/tag/item_builder.go b/pkg/tag/item_builder.go new file mode 100644 index 000000000..6fb0dbdee --- /dev/null +++ b/pkg/tag/item_builder.go @@ -0,0 +1,67 @@ +package tag + +import "github.com/reearth/reearth-backend/pkg/id" + +type ItemBuilder struct { + i *Item +} + +func NewItem() *ItemBuilder { + return &ItemBuilder{i: &Item{}} +} + +func ItemFrom(t Tag) *Item { + li, ok := t.(*Item) + if !ok { + return nil + } + return li +} + +func (b *ItemBuilder) Build() (*Item, error) { + if id.ID(b.i.id).IsNil() { + return nil, id.ErrInvalidID + } + if id.ID(b.i.sceneId).IsNil() { + return nil, ErrInvalidSceneID + } + if b.i.label == "" { + return nil, ErrEmptyLabel + } + return b.i, nil +} + +func (b *ItemBuilder) ID(tid id.TagID) *ItemBuilder { + b.i.id = tid + return b +} + +func (b *ItemBuilder) NewID() *ItemBuilder { + b.i.id = id.NewTagID() + return b +} + +func (b *ItemBuilder) Label(l string) *ItemBuilder { + b.i.label = l + return b +} + +func (b *ItemBuilder) Scene(sid id.SceneID) *ItemBuilder { + b.i.sceneId = sid + return b +} + +func (b *ItemBuilder) LinkedDatasetFieldID(dfid *id.DatasetSchemaFieldID) *ItemBuilder { + b.i.linkedDatasetFieldID = dfid + return b +} + +func (b *ItemBuilder) LinkedDatasetID(did *id.DatasetID) *ItemBuilder { + b.i.linkedDatasetID = did + return b +} + +func (b *ItemBuilder) LinkedDatasetSchemaID(dsid *id.DatasetSchemaID) *ItemBuilder { + b.i.linkedDatasetSchemaID = dsid + return b +} diff --git a/pkg/tag/item_test.go b/pkg/tag/item_test.go new file mode 100644 index 000000000..a2ab775c1 --- /dev/null +++ b/pkg/tag/item_test.go @@ -0,0 +1,117 @@ +package tag + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +var _ Tag = &Item{} + +func TestItemBuilder_NewID(t *testing.T) { + b := NewItem().NewID() + assert.NotEqual(t, id.TagID{}, b.i.id) +} + +func TestItemBuilder_Build(t *testing.T) { + tid := id.NewTagID() + sid := id.NewSceneID() + dfid := id.NewDatasetSchemaFieldID() + did := id.NewDatasetID() + dsid := id.NewDatasetSchemaID() + testCases := []struct { + Name, Label string + Id id.TagID + Scene id.SceneID + LinkedDatasetFieldID *id.DatasetSchemaFieldID + LinkedDatasetID *id.DatasetID + LinkedDatasetSchemaID *id.DatasetSchemaID + Expected struct { + Item Item + Error error + } + }{ + { + Name: "fail: nil tag ID", + Label: "xxx", + Scene: id.NewSceneID(), + Expected: struct { + Item Item + Error error + }{ + Error: id.ErrInvalidID, + }, + }, + { + Name: "fail: empty label", + Id: id.NewTagID(), + Scene: id.NewSceneID(), + Expected: struct { + Item Item + Error error + }{ + Error: ErrEmptyLabel, + }, + }, + { + Name: "fail: nil scene ID", + Label: "xxx", + Id: id.NewTagID(), + Expected: struct { + Item Item + Error error + }{ + Error: ErrInvalidSceneID, + }, + }, + { + Name: "success", + Label: "xxx", + Id: tid, + Scene: sid, + LinkedDatasetFieldID: dfid.Ref(), + LinkedDatasetID: did.Ref(), + LinkedDatasetSchemaID: dsid.Ref(), + Expected: struct { + Item Item + Error error + }{ + Item: Item{ + tag: tag{ + id: tid, + label: "xxx", + sceneId: sid, + }, + linkedDatasetFieldID: dfid.Ref(), + linkedDatasetID: did.Ref(), + linkedDatasetSchemaID: dsid.Ref(), + }, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, err := NewItem(). + ID(tc.Id). + Scene(tc.Scene). + Label(tc.Label). + LinkedDatasetSchemaID(tc.LinkedDatasetSchemaID). + LinkedDatasetID(tc.LinkedDatasetID). + LinkedDatasetFieldID(tc.LinkedDatasetFieldID). + Build() + if tc.Expected.Error == nil { + assert.Equal(tt, tc.Expected.Item.ID(), res.ID()) + assert.Equal(tt, tc.Expected.Item.Scene(), res.Scene()) + assert.Equal(tt, tc.Expected.Item.Label(), res.Label()) + assert.Equal(tt, tc.Expected.Item.LinkedDatasetFieldID(), res.LinkedDatasetFieldID()) + assert.Equal(tt, tc.Expected.Item.LinkedDatasetSchemaID(), res.LinkedDatasetSchemaID()) + assert.Equal(tt, tc.Expected.Item.LinkedDatasetID(), res.LinkedDatasetID()) + } else { + assert.Equal(tt, tc.Expected.Error, err) + } + }) + } +} diff --git a/pkg/tag/list.go b/pkg/tag/list.go new file mode 100644 index 000000000..b0d0779b9 --- /dev/null +++ b/pkg/tag/list.go @@ -0,0 +1,56 @@ +package tag + +import "github.com/reearth/reearth-backend/pkg/id" + +type List struct { + tags []id.TagID +} + +func NewList() *List { + return &List{tags: []id.TagID{}} +} + +func NewListFromTags(tags []id.TagID) *List { + return &List{tags: tags} +} + +func (tl *List) Tags() []id.TagID { + if tl == nil || tl.tags == nil { + return nil + } + return append([]id.TagID{}, tl.tags...) +} + +func (tl *List) Has(tid id.TagID) bool { + if tl == nil || tl.tags == nil { + return false + } + for _, tag := range tl.tags { + if tag == tid { + return true + } + } + return false +} + +func (tl *List) Add(tags ...id.TagID) { + if tl == nil || tl.tags == nil { + return + } + tl.tags = append(tl.tags, tags...) +} + +func (tl *List) Remove(tags ...id.TagID) { + if tl == nil || tl.tags == nil { + return + } + for i := 0; i < len(tl.tags); i++ { + for _, tid := range tags { + if tl.tags[i] == tid { + tl.tags = append(tl.tags[:i], tl.tags[i+1:]...) + i-- + break + } + } + } +} diff --git a/pkg/tag/list_test.go b/pkg/tag/list_test.go new file mode 100644 index 000000000..c8e9f55dd --- /dev/null +++ b/pkg/tag/list_test.go @@ -0,0 +1,74 @@ +package tag + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestList_Add(t *testing.T) { + tid := id.NewTagID() + var tl *List + tl.Add(tid) + assert.Nil(t, tl.Tags()) + tl = NewList() + tl.Add(tid) + expected := []id.TagID{tid} + assert.Equal(t, expected, tl.Tags()) +} + +func TestList_Remove(t *testing.T) { + tid := id.NewTagID() + tid2 := id.NewTagID() + tags := []id.TagID{ + tid, + tid2, + } + var tl *List + tl.Remove(tid2) + assert.Nil(t, tl.Tags()) + tl = NewListFromTags(tags) + tl.Remove(tid2) + expected := []id.TagID{tid} + assert.Equal(t, expected, tl.Tags()) +} + +func TestList_Has(t *testing.T) { + tid1 := id.NewTagID() + tid2 := id.NewTagID() + tags := []id.TagID{ + tid1, + } + testCases := []struct { + Name string + Tags []id.TagID + TID id.TagID + Expected bool + }{ + { + Name: "false: nil tag list", + Expected: false, + }, + { + Name: "false: tag not found", + Tags: tags, + TID: tid2, + Expected: false, + }, + { + Name: "true: tag found", + Tags: tags, + TID: tid1, + Expected: true, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := NewListFromTags(tc.Tags).Has(tc.TID) + assert.Equal(t, tc.Expected, res) + }) + } +} diff --git a/pkg/tag/tag.go b/pkg/tag/tag.go new file mode 100644 index 000000000..797fbd9f5 --- /dev/null +++ b/pkg/tag/tag.go @@ -0,0 +1,55 @@ +package tag + +import ( + "errors" + + "github.com/reearth/reearth-backend/pkg/id" +) + +var ( + ErrEmptyLabel = errors.New("tag label can't be empty") + ErrInvalidSceneID = errors.New("invalid scene ID") +) + +type tag struct { + id id.TagID + label string + sceneId id.SceneID +} + +type Tag interface { + ID() id.TagID + Scene() id.SceneID + Label() string + Rename(string) +} + +func (t *tag) ID() id.TagID { + return t.id +} + +func (t *tag) Scene() id.SceneID { + return t.sceneId +} + +func (t *tag) Label() string { + return t.label +} + +func (t *tag) Rename(s string) { + t.label = s +} + +func ToTagGroup(t Tag) *Group { + if tg, ok := t.(*Group); ok { + return tg + } + return nil +} + +func ToTagItem(t Tag) *Item { + if ti, ok := t.(*Item); ok { + return ti + } + return nil +} diff --git a/pkg/tag/tag_test.go b/pkg/tag/tag_test.go new file mode 100644 index 000000000..d99abc142 --- /dev/null +++ b/pkg/tag/tag_test.go @@ -0,0 +1,33 @@ +package tag + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestToTagGroup(t *testing.T) { + tag := Item{} + group := ToTagGroup(&tag) + assert.Nil(t, group) + tag2 := Group{} + group2 := ToTagGroup(&tag2) + assert.NotNil(t, group2) +} + +func TestToTagItem(t *testing.T) { + tag := Group{} + item := ToTagItem(&tag) + assert.Nil(t, item) + tag2 := Item{} + item2 := ToTagItem(&tag2) + assert.NotNil(t, item2) +} + +func TestTag_Rename(t *testing.T) { + tt := tag{ + label: "xxx", + } + tt.Rename("changed") + assert.Equal(t, "changed", tt.Label()) +} diff --git a/schema.graphql b/schema.graphql index 31ca85d22..cc2b5a773 100644 --- a/schema.graphql +++ b/schema.graphql @@ -363,6 +363,8 @@ type Scene implements Node { after: Cursor before: Cursor ): DatasetSchemaConnection! @goField(forceResolver: true) + tagIds: [ID!]! + tags: [Tag!]! @goField(forceResolver: true) } enum SceneLockMode { @@ -656,6 +658,8 @@ interface Layer { plugin: Plugin extension: PluginExtension scenePlugin: ScenePlugin + tagIds: [ID!]! + tags: [Tag!]! @goField(forceResolver: true) } union Layers = LayerItem | LayerGroup @@ -688,6 +692,8 @@ type LayerItem implements Layer { merged: MergedLayer @goField(forceResolver: true) scene: Scene @goField(forceResolver: true) scenePlugin: ScenePlugin @goField(forceResolver: true) + tagIds: [ID!]! + tags: [Tag!]! @goField(forceResolver: true) } type LayerGroup implements Layer { @@ -712,6 +718,8 @@ type LayerGroup implements Layer { layers: [Layer]! @goField(forceResolver: true) scene: Scene @goField(forceResolver: true) scenePlugin: ScenePlugin @goField(forceResolver: true) + tagIds: [ID!]! + tags: [Tag!]! @goField(forceResolver: true) } type Infobox { @@ -776,6 +784,33 @@ type MergedInfoboxField { scenePlugin: ScenePlugin @goField(forceResolver: true) } +interface Tag { + id: ID! + sceneId: ID! + label: String! +} + +type TagItem implements Tag { + id: ID! + sceneId: ID! + label: String! + linkedDatasetID: ID + linkedDatasetSchemaID: ID + linkedDatasetFieldID: ID + linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) + linkedDataset: Dataset @goField(forceResolver: true) + linkedDatasetField: DatasetField @goField(forceResolver: true) +} + +type TagGroup implements Tag { + id: ID! + sceneId: ID! + label: String! + tags: [ID!] +} + +union Tags = TagItem | TagGroup + # InputType input CreateAssetInput { @@ -1117,11 +1152,11 @@ input ImportDatasetInput { } input ImportDatasetFromGoogleSheetInput { - accessToken: String! - fileId: String! - sheetName: String! - sceneId: ID! - datasetSchemaId: ID + accessToken: String! + fileId: String! + sheetName: String! + sceneId: ID! + datasetSchemaId: ID } input AddDatasetSchemaInput { @@ -1130,6 +1165,50 @@ input AddDatasetSchemaInput { representativefield: ID } +input CreateTagItemInput { + sceneId: ID! + label: String! + linkedDatasetSchemaID: ID + linkedDatasetID: ID + linkedDatasetField: ID +} + +input CreateTagGroupInput { + sceneId: ID! + label: String! + tags: [ID!] +} + +input UpdateTagInput { + tagId: ID! + sceneId: ID! + label: String +} + +input AttachTagItemToGroupInput { + itemID: ID! + groupID: ID! +} + +input DetachTagItemFromGroupInput { + itemID: ID! + groupID: ID! +} + +input AttachTagToLayerInput { + tagID: ID! + layerID: ID! +} + +input DetachTagFromLayerInput { + tagID: ID! + layerID: ID! +} + +input RemoveTagInput { + tagID: ID! +} + # Payload type CreateAssetPayload { @@ -1328,6 +1407,38 @@ type AddDatasetSchemaPayload { datasetSchema: DatasetSchema } +type CreateTagItemPayload { + tag: TagItem! +} + +type CreateTagGroupPayload { + tag: TagGroup! +} + +type AttachTagItemToGroupPayload { + tag: TagGroup! +} + +type DetachTagItemFromGroupPayload { + tag: TagGroup! +} + +type UpdateTagPayload { + tag: Tag! +} + +type AttachTagToLayerPayload{ + layer: Layer! +} + +type DetachTagFromLayerPayload{ + layer: Layer! +} + +type RemoveTagPayload{ + tagId: ID! +} + # Connection type AssetConnection { @@ -1507,6 +1618,16 @@ type Mutation { moveInfoboxField(input: MoveInfoboxFieldInput!): MoveInfoboxFieldPayload removeInfoboxField(input: RemoveInfoboxFieldInput!): RemoveInfoboxFieldPayload importLayer(input: ImportLayerInput!): ImportLayerPayload + attachTagToLayer(input: AttachTagToLayerInput!): AttachTagToLayerPayload + detachTagFromLayer(input: DetachTagFromLayerInput!): DetachTagFromLayerPayload + + # Tag + createTagItem(input: CreateTagItemInput!): CreateTagItemPayload + createTagGroup(input: CreateTagGroupInput!): CreateTagGroupPayload + attachTagItemToGroup(input: AttachTagItemToGroupInput!): AttachTagItemToGroupPayload + detachTagItemFromGroup(input: DetachTagItemFromGroupInput!): DetachTagItemFromGroupPayload + updateTag(input: UpdateTagInput!): UpdateTagPayload + removeTag(input: RemoveTagInput!): RemoveTagPayload } schema { From fbcdef3eaa3e1752c3e6615108a0d36adfd6e145 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 1 Nov 2021 20:12:09 +0900 Subject: [PATCH 092/253] chore: add github workflows to release --- .github/cliff.toml | 58 +++++++++++ .github/workflows/deploy_test.yml | 4 +- .github/workflows/godoc.yml | 69 ------------- .github/workflows/main.yml | 164 +++++++++++++++++++++++++++++- .github/workflows/nightly.yml | 84 --------------- .github/workflows/release.yml | 73 +++++++++++++ .gitignore | 1 + .goreleaser.yml | 30 ++++++ Dockerfile | 3 +- internal/app/main.go | 2 +- 10 files changed, 330 insertions(+), 158 deletions(-) create mode 100644 .github/cliff.toml delete mode 100644 .github/workflows/godoc.yml delete mode 100644 .github/workflows/nightly.yml create mode 100644 .github/workflows/release.yml create mode 100644 .goreleaser.yml diff --git a/.github/cliff.toml b/.github/cliff.toml new file mode 100644 index 000000000..60380d99e --- /dev/null +++ b/.github/cliff.toml @@ -0,0 +1,58 @@ +# configuration file for git-cliff (0.1.0) + +[changelog] +# changelog header +header = """ +# Changelog +All notable changes to this project will be documented in this file.\n +""" +# template for the changelog body +# https://tera.netlify.app/docs/#introduction +body = """ +{% if version %}\ + ## {{ version | replace(from="v", to="") }} - {{ timestamp | date(format="%Y-%m-%d") }} +{% else %}\ + ## Unreleased +{% endif %}\ +{% for group, commits in commits | group_by(attribute="group") %} + ### {{ group }} + {% for commit in commits %} + - {{ commit.message | upper_first }} `{{ commit.id | split(pat="") | slice(end=7) | join(sep="") }}`\ + {% endfor %} +{% endfor %}\n +""" +# remove the leading and trailing whitespaces from the template +trim = true +# changelog footer +footer = """ + +""" + +[git] +# allow only conventional commits +# https://www.conventionalcommits.org +conventional_commits = true +# regex for parsing and grouping commits +commit_parsers = [ + { message = "^feat", group = "๐Ÿš€ Features"}, + { message = "^fix", group = "๐Ÿ”ง Bug Fixes"}, + { message = "^docs", group = "๐Ÿ“– Documentation"}, + { message = "^doc", group = "๐Ÿ“– Documentation"}, + { message = "^perf", group = "โšก๏ธ Performance"}, + { message = "^refactor", group = "โœจ Refactor"}, + { message = "^style", group = "๐ŸŽจ Styling"}, + { message = "^test", group = "๐Ÿงช Testing"}, + { body = ".*security", group = "๐Ÿ”’ Security"}, + { message = "^chore", group = "Miscellaneous Tasks"}, + { message = "^build", group = "Miscellaneous Tasks"}, + { message = "^ci", group = "Miscellaneous Tasks"}, + { message = "^deps", group = "Miscellaneous Tasks"}, + { message = "^revert", skip = true}, + { message = "^v[0-9]+", skip = true}, +] +# filter out the commits that are not matched by commit parsers +filter_commits = false +# glob pattern for matching git tags +tag_pattern = "v[0-9]*" +# regex for skipping tags +skip_tags = "v0.1.0-beta.1" diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml index b44ca3c36..74ff3ddc8 100644 --- a/.github/workflows/deploy_test.yml +++ b/.github/workflows/deploy_test.yml @@ -2,7 +2,7 @@ name: deploy_test on: workflow_run: workflows: - - nightly + - main types: - completed env: @@ -13,7 +13,7 @@ jobs: deploy_test: name: deploy_test runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.conclusion == 'success' }} + if: github.event.workflow_run.conclusion == 'success' && github.event.repostory.full_name == 'reearth/reearth-backend' steps: - uses: google-github-actions/setup-gcloud@master with: diff --git a/.github/workflows/godoc.yml b/.github/workflows/godoc.yml deleted file mode 100644 index f3f7ab839..000000000 --- a/.github/workflows/godoc.yml +++ /dev/null @@ -1,69 +0,0 @@ -name: godoc -on: - workflow_run: - workflows: - - main - types: - - completed -env: - MOD: github.com/reearth/reearth-backend - REPO: github.com/reearth/reearth-backend - REPO_NAME: reearth-backend - ADDR: 'localhost:8080' -jobs: - godoc: - name: godoc - runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.conclusion == 'success' }} - env: - DIR: ${{ github.event.workflow_run.head_branch }} - steps: - - name: set up - uses: actions/setup-go@v2 - with: - go-version: 1.17 - id: go - - name: checkout - uses: actions/checkout@v2 - - name: cache - uses: actions/cache@v2 - with: - path: ~/go/pkg/mod - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - restore-keys: | - ${{ runner.os }}-go- - - run: go mod download - - name: install godoc - run: go install golang.org/x/tools/cmd/godoc@latest - - name: generate docs - continue-on-error: true - run: | - godoc -http="$ADDR" & - sleep 10 - wget -r -np -N -E -p -k "http://${ADDR}/pkg/${MOD}/" - - name: replace urls - run: | - [ `find . -name "*.html" -type f | wc -l` -eq 0 ] && exit 1 - find ./${ADDR}/ -name "*.html" -print0 | xargs -0 sed -i \ - -e "s@http://${ADDR}/src/${MOD}@https://${REPO}/blob/main@" \ - -e "s@\"http://${ADDR}/pkg/\"@\"/${REPO_NAME}/${DIR}/pkg/${REPO}/\"@" \ - -e 's@ /dev/null) + if [[ $PREV_TAG == v*.*.* ]]; then + echo "::set-output name=latest::--latest" + fi + - name: Generate changelog + uses: orhun/git-cliff-action@v1 + env: + OUTPUT: CHANGELOG.md + with: + config: .github/cliff.toml + args: --verbose --tag ${{ steps.tag.outputs.new_tag }} + - name: Generate latest changelog + uses: orhun/git-cliff-action@v1 + id: changelog + env: + OUTPUT: CHANGELOG_latest.md + with: + config: .github/cliff.toml + args: --verbose --strip all --tag ${{ steps.tag.outputs.new_tag }} ${{ steps.cliff_pre.outputs.latest }} + - name: Format changelogs + env: + URL: ${{ github.event.repository.html_url }} + run: | + URL=${URL//\//\\\/} + sed -i -E 's///g; s/\(#([0-9]+)\)/([#\1]('"$URL"'\/pull\/\1))/g; s/`([a-zA-Z0-9]+)`/[`\1`]('"$URL"'\/commit\/\1)/g' CHANGELOG*.md + sed -i '/^## .*$/d; 1d; 2d' CHANGELOG_latest.md + - name: Upload latest changelog + uses: actions/upload-artifact@v2 + with: + name: changelog-${{ steps.tag.outputs.new_tag }} + path: CHANGELOG_latest.md + - name: Commit & push + env: + TAG: ${{ steps.tag.outputs.new_tag }} + run: | + rm CHANGELOG_latest.md + git add CHANGELOG.md + git commit -am "$TAG" + git tag $TAG + git push + git push --tags diff --git a/.gitignore b/.gitignore index 27eed16d7..c7310eea7 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,7 @@ .idea # reearth +/dist /reearth /reearth-backend __debug_bin diff --git a/.goreleaser.yml b/.goreleaser.yml new file mode 100644 index 000000000..3c1454481 --- /dev/null +++ b/.goreleaser.yml @@ -0,0 +1,30 @@ +project_name: reearth-backend +before: + hooks: + - go mod tidy +builds: + - main: ./cmd/reearth + flags: + - -tags=release + - -trimpath + ldflags: + - -s -w + - -X main.version={{.Version}} + - -buildid= + env: + - CGO_ENABLED=0 +archives: + - name_template: "{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}" + replacements: + darwin: darwin + linux: linux + windows: windows + 386: i386 + amd64: x86_64 + format_overrides: + - goos: windows + format: zip +changelog: + skip: true +release: + disable: true diff --git a/Dockerfile b/Dockerfile index da3965c6a..f2edbd9cb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,6 @@ FROM golang:1.17-alpine AS build ARG TAG=release +ARG REV ARG VERSION RUN apk add --update --no-cache git ca-certificates build-base @@ -12,7 +13,7 @@ COPY cmd/ /reearth/cmd/ COPY pkg/ /reearth/pkg/ COPY internal/ /reearth/internal/ -RUN CGO_ENABLED=0 go build -tags "${TAG}" "-ldflags=-X main.version=${VERSION} -s -buildid=" -trimpath ./cmd/reearth +RUN CGO_ENABLED=0 go build -tags "${TAG}" "-ldflags=-X main.version=${VERSION} -s -w -buildid=" -trimpath ./cmd/reearth FROM scratch diff --git a/internal/app/main.go b/internal/app/main.go index 0b43eee34..8f33b955c 100644 --- a/internal/app/main.go +++ b/internal/app/main.go @@ -86,7 +86,7 @@ func (w *WebServer) Run() { if w.appServer.Debug { debugLog += " with debug mode" } - log.Infof("Server started%s\n", debugLog) + log.Infof("server started%s at %s\n", debugLog, w.address) go func() { err := w.appServer.Start(w.address) From fc93f2decd29920c0f558f762f7d1cd6359045c9 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 1 Nov 2021 11:38:08 +0000 Subject: [PATCH 093/253] v0.1.0 --- CHANGELOG.md | 107 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..e9798d209 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,107 @@ +# Changelog +All notable changes to this project will be documented in this file. + +## 0.1.0 - 2021-11-01 + +### ๐Ÿš€ Features + +- Support Auth0 audience ([#3](https://github.com/reearth/reearth-backend/pull/3)) [`c3758e`](https://github.com/reearth/reearth-backend/commit/c3758e) +- Basic auth for projects ([#6](https://github.com/reearth/reearth-backend/pull/6)) [`5db065`](https://github.com/reearth/reearth-backend/commit/5db065) +- Google analytics for scene ([#10](https://github.com/reearth/reearth-backend/pull/10)) [`b44249`](https://github.com/reearth/reearth-backend/commit/b44249) +- Create installable plugins ([#1](https://github.com/reearth/reearth-backend/pull/1)) [`5b7a5f`](https://github.com/reearth/reearth-backend/commit/5b7a5f) +- Add thumbnail, author fields on plugin metadata query ([#15](https://github.com/reearth/reearth-backend/pull/15)) [`888fe0`](https://github.com/reearth/reearth-backend/commit/888fe0) +- Published page api ([#11](https://github.com/reearth/reearth-backend/pull/11)) [`aebac3`](https://github.com/reearth/reearth-backend/commit/aebac3) +- Import dataset from google sheets ([#16](https://github.com/reearth/reearth-backend/pull/16)) [`2ef7ef`](https://github.com/reearth/reearth-backend/commit/2ef7ef) +- Add scenePlugin resolver to layers ([#20](https://github.com/reearth/reearth-backend/pull/20)) [`5213f3`](https://github.com/reearth/reearth-backend/commit/5213f3) +- Marker label position [`bb9e4c`](https://github.com/reearth/reearth-backend/commit/bb9e4c) +- Refine dataset import ([#26](https://github.com/reearth/reearth-backend/pull/26)) [`5dd3db`](https://github.com/reearth/reearth-backend/commit/5dd3db) +- Plugin upload and deletion ([#33](https://github.com/reearth/reearth-backend/pull/33)) [`8742db`](https://github.com/reearth/reearth-backend/commit/8742db) +- New primitives, new properties on primitives [`108711`](https://github.com/reearth/reearth-backend/commit/108711) +- Set scene theme ([#35](https://github.com/reearth/reearth-backend/pull/35)) [`2e4f52`](https://github.com/reearth/reearth-backend/commit/2e4f52) +- Widget align system ([#19](https://github.com/reearth/reearth-backend/pull/19)) [`94611f`](https://github.com/reearth/reearth-backend/commit/94611f) +- Tag system ([#67](https://github.com/reearth/reearth-backend/pull/67)) [`163fcf`](https://github.com/reearth/reearth-backend/commit/163fcf) + +### ๐Ÿ”ง Bug Fixes + +- Add mutex for each memory repo ([#2](https://github.com/reearth/reearth-backend/pull/2)) [`f4c3b0`](https://github.com/reearth/reearth-backend/commit/f4c3b0) +- Auth0 audience in reearth_config.json [`72e3ed`](https://github.com/reearth/reearth-backend/commit/72e3ed) +- Auth0 domain and multiple auds [`835a02`](https://github.com/reearth/reearth-backend/commit/835a02) +- Signing up and deleting user [`f17b9d`](https://github.com/reearth/reearth-backend/commit/f17b9d) +- Deleting user [`e9b8c9`](https://github.com/reearth/reearth-backend/commit/e9b8c9) +- Sign up and update user [`e5ab87`](https://github.com/reearth/reearth-backend/commit/e5ab87) +- Make gql mutation payloads optional [`9b1c4a`](https://github.com/reearth/reearth-backend/commit/9b1c4a) +- Auth0 [`6a27c6`](https://github.com/reearth/reearth-backend/commit/6a27c6) +- Errors are be overwriten by tx [`2d08c5`](https://github.com/reearth/reearth-backend/commit/2d08c5) +- Deleting user [`f531bd`](https://github.com/reearth/reearth-backend/commit/f531bd) +- Always enable dev mode in debug [`0815d3`](https://github.com/reearth/reearth-backend/commit/0815d3) +- User deletion [`a5eeae`](https://github.com/reearth/reearth-backend/commit/a5eeae) +- Invisible layer issue in published project ([#7](https://github.com/reearth/reearth-backend/pull/7)) [`06cd44`](https://github.com/reearth/reearth-backend/commit/06cd44) +- Dataset link merge bug #378 ([#18](https://github.com/reearth/reearth-backend/pull/18)) [`25da0d`](https://github.com/reearth/reearth-backend/commit/25da0d) +- Ogp image for published page ([#17](https://github.com/reearth/reearth-backend/pull/17)) [`dcb4b0`](https://github.com/reearth/reearth-backend/commit/dcb4b0) +- Change default value of marker label position [`a2059e`](https://github.com/reearth/reearth-backend/commit/a2059e) +- Import dataset from google sheet bug ([#23](https://github.com/reearth/reearth-backend/pull/23)) [`077558`](https://github.com/reearth/reearth-backend/commit/077558) +- Public api param [`846957`](https://github.com/reearth/reearth-backend/commit/846957) +- Replace strings.Split() with strings.field() ([#25](https://github.com/reearth/reearth-backend/pull/25)) [`ba7d16`](https://github.com/reearth/reearth-backend/commit/ba7d16) +- Project public image type [`e82b54`](https://github.com/reearth/reearth-backend/commit/e82b54) +- Published API ([#27](https://github.com/reearth/reearth-backend/pull/27)) [`8ad1f8`](https://github.com/reearth/reearth-backend/commit/8ad1f8) +- Plugin manifest parser bugs ([#32](https://github.com/reearth/reearth-backend/pull/32)) [`78ac13`](https://github.com/reearth/reearth-backend/commit/78ac13) +- Dataset layers are not exported correctly ([#36](https://github.com/reearth/reearth-backend/pull/36)) [`0b8c00`](https://github.com/reearth/reearth-backend/commit/0b8c00) +- Hide parent infobox fields when child infobox is not nil ([#37](https://github.com/reearth/reearth-backend/pull/37)) [`d8c8cd`](https://github.com/reearth/reearth-backend/commit/d8c8cd) +- Mongo.PropertySchema.FindByIDs, propertySchemaID.Equal [`be00da`](https://github.com/reearth/reearth-backend/commit/be00da) +- Gql propertySchemaGroup.translatedTitle resolver [`a4770e`](https://github.com/reearth/reearth-backend/commit/a4770e) +- Use PropertySchemaID.Equal [`8a6459`](https://github.com/reearth/reearth-backend/commit/8a6459) +- Use PropertySchemaID.Equal [`1c3cf1`](https://github.com/reearth/reearth-backend/commit/1c3cf1) +- Tweak field names of model primitive [`080ab9`](https://github.com/reearth/reearth-backend/commit/080ab9) +- Layer importing bug ([#41](https://github.com/reearth/reearth-backend/pull/41)) [`02b17f`](https://github.com/reearth/reearth-backend/commit/02b17f) +- Skip nil geometries ([#42](https://github.com/reearth/reearth-backend/pull/42)) [`90c327`](https://github.com/reearth/reearth-backend/commit/90c327) +- Validate widget extended when moved [`a7daf7`](https://github.com/reearth/reearth-backend/commit/a7daf7) +- Widget extended validation [`98db7e`](https://github.com/reearth/reearth-backend/commit/98db7e) +- Nil error in mongodoc plugin [`d236be`](https://github.com/reearth/reearth-backend/commit/d236be) +- Add widget to default location [`eb1db4`](https://github.com/reearth/reearth-backend/commit/eb1db4) +- Invalid extension data from GraphQL, plugin manifest schema improvement, more friendly error from manifest parser ([#56](https://github.com/reearth/reearth-backend/pull/56)) [`92d137`](https://github.com/reearth/reearth-backend/commit/92d137) +- Translated fields in plugin gql [`0a658a`](https://github.com/reearth/reearth-backend/commit/0a658a) +- Fallback widgetLocation [`579b7a`](https://github.com/reearth/reearth-backend/commit/579b7a) + +### ๐Ÿ“– Documentation + +- Refine readme ([#28](https://github.com/reearth/reearth-backend/pull/28)) [`a9d209`](https://github.com/reearth/reearth-backend/commit/a9d209) +- Add badges to readme [skip ci] [`cc63cd`](https://github.com/reearth/reearth-backend/commit/cc63cd) + +### โœจ Refactor + +- Remove unused code [`37b2c2`](https://github.com/reearth/reearth-backend/commit/37b2c2) +- Pkg/error ([#31](https://github.com/reearth/reearth-backend/pull/31)) [`a3f8b6`](https://github.com/reearth/reearth-backend/commit/a3f8b6) +- Graphql adapter ([#40](https://github.com/reearth/reearth-backend/pull/40)) [`2a1d4f`](https://github.com/reearth/reearth-backend/commit/2a1d4f) +- Reorganize graphql schema ([#43](https://github.com/reearth/reearth-backend/pull/43)) [`d3360b`](https://github.com/reearth/reearth-backend/commit/d3360b) + +### ๐Ÿงช Testing + +- Pkg/shp ([#5](https://github.com/reearth/reearth-backend/pull/5)) [`72ed8e`](https://github.com/reearth/reearth-backend/commit/72ed8e) +- Pkg/id ([#4](https://github.com/reearth/reearth-backend/pull/4)) [`c31bdb`](https://github.com/reearth/reearth-backend/commit/c31bdb) + +### Miscellaneous Tasks + +- Enable nightly release workflow [`16c037`](https://github.com/reearth/reearth-backend/commit/16c037) +- Set up workflows [`819639`](https://github.com/reearth/reearth-backend/commit/819639) +- Fix workflows [`c022a4`](https://github.com/reearth/reearth-backend/commit/c022a4) +- Print config [`0125aa`](https://github.com/reearth/reearth-backend/commit/0125aa) +- Load .env instead of .env.local [`487a73`](https://github.com/reearth/reearth-backend/commit/487a73) +- Add godoc workflow [`9629dd`](https://github.com/reearth/reearth-backend/commit/9629dd) +- Fix godoc workflow [`cc45b5`](https://github.com/reearth/reearth-backend/commit/cc45b5) +- Fix godoc workflow [`0db163`](https://github.com/reearth/reearth-backend/commit/0db163) +- Fix godoc workflow [`9b78fc`](https://github.com/reearth/reearth-backend/commit/9b78fc) +- Fix godoc workflow [`f1e5a7`](https://github.com/reearth/reearth-backend/commit/f1e5a7) +- Fix godoc workflow [`f7866c`](https://github.com/reearth/reearth-backend/commit/f7866c) +- Fix godoc workflow [`5bc089`](https://github.com/reearth/reearth-backend/commit/5bc089) +- Fix godoc workflow [`5f808b`](https://github.com/reearth/reearth-backend/commit/5f808b) +- Fix godoc workflow [`9f8e11`](https://github.com/reearth/reearth-backend/commit/9f8e11) +- Fix godoc workflow [`150550`](https://github.com/reearth/reearth-backend/commit/150550) +- Use go:embed ([#24](https://github.com/reearth/reearth-backend/pull/24)) [`f7866e`](https://github.com/reearth/reearth-backend/commit/f7866e) +- Add internal error log [`41c377`](https://github.com/reearth/reearth-backend/commit/41c377) +- Support multiple platform docker image [`3651e2`](https://github.com/reearth/reearth-backend/commit/3651e2) +- Stop using upx as it doesn't work on arm64 [`3b5f93`](https://github.com/reearth/reearth-backend/commit/3b5f93) +- Update golang version and modules ([#51](https://github.com/reearth/reearth-backend/pull/51)) [`33f4c7`](https://github.com/reearth/reearth-backend/commit/33f4c7) +- Updating modules ([#62](https://github.com/reearth/reearth-backend/pull/62)) [`65ae32`](https://github.com/reearth/reearth-backend/commit/65ae32) +- Add github workflows to release [`fbcdef`](https://github.com/reearth/reearth-backend/commit/fbcdef) + + From cfc79a0cf34d22e9757291c6e7adb6492a09515c Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 1 Nov 2021 21:01:24 +0900 Subject: [PATCH 094/253] chore: fix release workflow, fix build comment [skip ci] --- .github/workflows/main.yml | 4 ++++ cmd/reearth/debug.go | 2 +- cmd/reearth/release.go | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 10bc86f0d..42f039aed 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -39,6 +39,8 @@ jobs: with: token: ${{ secrets.CODECOV_TOKEN }} file: coverage.txt + - name: Fetch tags + run: git fetch --prune --unshallow --tags - name: Get info id: info # The tag name should be retrieved lazily, as tagging may be delayed. @@ -120,6 +122,8 @@ jobs: args: release --rm-dist --snapshot env: GORELEASER_CURRENT_TAG: 0.0.0 + - name: Rename artifacts + run: rename 's/^dist\/reearth-backend_0.0.0-SNAPSHOT-.+?_(.+)/dist\/reearth-backend_nightly_$1/s' dist/reearth-backend_*.* - name: Create GitHub release for nightly if: "!needs.main.outputs.new_tag" uses: ncipollo/release-action@v1 diff --git a/cmd/reearth/debug.go b/cmd/reearth/debug.go index 163bd1394..57897e39e 100644 --- a/cmd/reearth/debug.go +++ b/cmd/reearth/debug.go @@ -1,4 +1,4 @@ -// +build !release +//go:build !release package main diff --git a/cmd/reearth/release.go b/cmd/reearth/release.go index 103b98e7c..9ac0438f8 100644 --- a/cmd/reearth/release.go +++ b/cmd/reearth/release.go @@ -1,4 +1,4 @@ -// +build release +//go:build release package main From 1d29f5c7007c361d355ef0a97d06011122d3dbb9 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 1 Nov 2021 12:03:25 +0000 Subject: [PATCH 095/253] v0.1.0 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e9798d209..36be55d6a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -103,5 +103,6 @@ All notable changes to this project will be documented in this file. - Update golang version and modules ([#51](https://github.com/reearth/reearth-backend/pull/51)) [`33f4c7`](https://github.com/reearth/reearth-backend/commit/33f4c7) - Updating modules ([#62](https://github.com/reearth/reearth-backend/pull/62)) [`65ae32`](https://github.com/reearth/reearth-backend/commit/65ae32) - Add github workflows to release [`fbcdef`](https://github.com/reearth/reearth-backend/commit/fbcdef) +- Fix release workflow, fix build comment [skip ci] [`cfc79a`](https://github.com/reearth/reearth-backend/commit/cfc79a) From 96f0b365e47d9315f3a736f2a82c80838a953a42 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 1 Nov 2021 21:26:51 +0900 Subject: [PATCH 096/253] chore: fix renaming file names in release workflow --- .github/workflows/main.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 42f039aed..1fac21bba 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -123,7 +123,8 @@ jobs: env: GORELEASER_CURRENT_TAG: 0.0.0 - name: Rename artifacts - run: rename 's/^dist\/reearth-backend_0.0.0-SNAPSHOT-.+?_(.+)/dist\/reearth-backend_nightly_$1/s' dist/reearth-backend_*.* + if: "!needs.main.outputs.new_tag" + run: for f in dist/reearth-backend_*.*; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_nightly/'); done - name: Create GitHub release for nightly if: "!needs.main.outputs.new_tag" uses: ncipollo/release-action@v1 From 330852579545f566c1672d166a0dc7ba081985b9 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 1 Nov 2021 12:36:16 +0000 Subject: [PATCH 097/253] v0.1.0 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 36be55d6a..f059d74d5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -104,5 +104,6 @@ All notable changes to this project will be documented in this file. - Updating modules ([#62](https://github.com/reearth/reearth-backend/pull/62)) [`65ae32`](https://github.com/reearth/reearth-backend/commit/65ae32) - Add github workflows to release [`fbcdef`](https://github.com/reearth/reearth-backend/commit/fbcdef) - Fix release workflow, fix build comment [skip ci] [`cfc79a`](https://github.com/reearth/reearth-backend/commit/cfc79a) +- Fix renaming file names in release workflow [`96f0b3`](https://github.com/reearth/reearth-backend/commit/96f0b3) From d5466be2faa47024f7bf7c27e0b9608d0d6c4d4b Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 1 Nov 2021 22:21:25 +0900 Subject: [PATCH 098/253] chore: fix and refactor release workflow [skip ci] --- .github/workflows/main.yml | 45 +++++++++++++++++++++++++------------- 1 file changed, 30 insertions(+), 15 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1fac21bba..c0c6f79b4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -106,6 +106,8 @@ jobs: runs-on: ubuntu-latest needs: - main + env: + NAME: reearth-backend steps: - name: Checkout uses: actions/checkout@v2 @@ -124,24 +126,18 @@ jobs: GORELEASER_CURRENT_TAG: 0.0.0 - name: Rename artifacts if: "!needs.main.outputs.new_tag" - run: for f in dist/reearth-backend_*.*; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_nightly/'); done + run: for f in dist/${NAME}_*.*; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_nightly/'); done - name: Create GitHub release for nightly if: "!needs.main.outputs.new_tag" uses: ncipollo/release-action@v1 with: - artifacts: dist/reearth-backend_*.* + artifacts: dist/${{ env.NAME }}_*.* commit: ${{ github.sha }} name: Nightly tag: nightly body: ${{ github.sha }} prerelease: true allowUpdates: true - - name: Download latest changelog - if: needs.main.outputs.new_tag - uses: dawidd6/action-download-artifact@v2 - with: - workflow: release.yml - name: changelog-${{ needs.main.outputs.new_tag }} - name: Run GoReleaser if: needs.main.outputs.new_tag uses: goreleaser/goreleaser-action@v2 @@ -149,11 +145,17 @@ jobs: args: release --rm-dist env: GORELEASER_CURRENT_TAG: ${{ needs.main.outputs.new_tag }} + - name: Download latest changelog + if: needs.main.outputs.new_tag + uses: dawidd6/action-download-artifact@v2 + with: + workflow: release.yml + name: changelog-${{ needs.main.outputs.new_tag }} - name: Create GitHub release if: needs.main.outputs.new_tag uses: ncipollo/release-action@v1 with: - artifacts: dist/reearth-backend_*.* + artifacts: dist/${{ env.NAME }}_*.* commit: ${{ github.sha }} name: ${{ needs.main.outputs.new_tag }} tag: ${{ needs.main.outputs.new_tag }} @@ -163,6 +165,8 @@ jobs: runs-on: ubuntu-latest needs: - main + env: + IMAGE_NAME: reearth/reearth-backend steps: - name: Checkout uses: actions/checkout@v2 @@ -184,9 +188,22 @@ jobs: platforms: linux/amd64,linux/arm64 push: true build-args: VERSION=0.0.0-SNAPSHOT-${{ needs.main.outputs.sha_short }} - tags: reearth/reearth-backend:nightly - cache-from: type=registry,ref=reearth/reearth-backend:nightly + tags: ${{ env.IMAGE_NAME }}:nightly + cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:nightly cache-to: type=inline + - name: Get Docker tags + id: tags + if: needs.main.outputs.new_tag + env: + TAG: ${{ needs.main.outputs.new_tag_short }} + run: | + TAGS=$IMAGE_NAME:$TAG + if [[ ! $TAG =~ '-' ]]; then + TAGS+=,${IMAGE_NAME}:${TAG%.*} + TAGS+=,${IMAGE_NAME}:${TAG%%.*} + TAGS+=,${IMAGE_NAME}:latest + fi + echo "::set-output name=tags::$TAGS" - name: Build and push release if: needs.main.outputs.new_tag uses: docker/build-push-action@v2 @@ -195,10 +212,8 @@ jobs: platforms: linux/amd64,linux/arm64 push: true build-args: VERSION=${{ needs.main.outputs.new_tag_short }} - tags: | - reearth/reearth-backend:${{ needs.main.outputs.new_tag_short }} - reearth/reearth-backend:latest - cache-from: type=registry,ref=reearth/reearth-backend:latest + tags: ${{ steps.tags.outputs.tags }} + cache-from: type=registry,ref=${IMAGE_NAME}:latest cache-to: type=inline slack-notification: if: always() From 632116f9a552ba05eb281fb176fee772c32210aa Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 1 Nov 2021 13:22:17 +0000 Subject: [PATCH 099/253] v0.1.0 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f059d74d5..fb2993434 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -105,5 +105,6 @@ All notable changes to this project will be documented in this file. - Add github workflows to release [`fbcdef`](https://github.com/reearth/reearth-backend/commit/fbcdef) - Fix release workflow, fix build comment [skip ci] [`cfc79a`](https://github.com/reearth/reearth-backend/commit/cfc79a) - Fix renaming file names in release workflow [`96f0b3`](https://github.com/reearth/reearth-backend/commit/96f0b3) +- Fix and refactor release workflow [skip ci] [`d5466b`](https://github.com/reearth/reearth-backend/commit/d5466b) From 25c5810616447deaff7cf10193393581db3c6871 Mon Sep 17 00:00:00 2001 From: Katsuya Miyachi Date: Wed, 3 Nov 2021 05:33:39 +0900 Subject: [PATCH 100/253] feat: support opentelemetry (#68) --- go.mod | 60 +++++---- go.sum | 271 ++++++++++++++++++++++------------------ internal/app/app.go | 12 +- internal/app/graphql.go | 9 +- internal/app/main.go | 2 +- internal/app/repo.go | 9 +- internal/app/tracer.go | 23 ++-- 7 files changed, 208 insertions(+), 178 deletions(-) diff --git a/go.mod b/go.mod index e0d5e2fa1..b73e64fa8 100644 --- a/go.mod +++ b/go.mod @@ -1,11 +1,10 @@ module github.com/reearth/reearth-backend require ( - cloud.google.com/go v0.87.0 + cloud.google.com/go/profiler v0.1.1 cloud.google.com/go/storage v1.14.0 github.com/99designs/gqlgen v0.14.0 - github.com/99designs/gqlgen-contrib v0.1.1-0.20200601100547-7a955d321bbd - github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v0.2.0 + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.0.0 github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect github.com/agnivade/levenshtein v1.1.1 // indirect github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a // indirect @@ -18,22 +17,23 @@ require ( github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 // indirect github.com/goccy/go-yaml v1.9.2 github.com/hashicorp/golang-lru v0.5.4 // indirect - github.com/iancoleman/strcase v0.1.3 + github.com/iancoleman/strcase v0.2.0 github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d github.com/jarcoal/httpmock v1.0.8 github.com/joho/godotenv v1.3.0 github.com/jonas-p/go-shp v0.1.1 github.com/kelseyhightower/envconfig v1.4.0 github.com/kennygrant/sanitize v1.2.4 - github.com/klauspost/compress v1.10.10 // indirect - github.com/labstack/echo/v4 v4.2.1 + github.com/klauspost/compress v1.13.6 // indirect + github.com/labstack/echo/v4 v4.6.1 github.com/labstack/gommon v0.3.0 - github.com/mattn/go-isatty v0.0.13 // indirect + github.com/mattn/go-isatty v0.0.14 // indirect github.com/mitchellh/mapstructure v1.4.2 github.com/oklog/ulid v1.3.1 github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/paulmach/go.geojson v1.4.0 github.com/pkg/errors v0.9.1 + github.com/ravilushqa/otelgqlgen v0.2.0 github.com/sirupsen/logrus v1.8.1 github.com/smartystreets/assertions v1.1.1 // indirect github.com/spf13/afero v1.6.0 @@ -46,18 +46,19 @@ require ( github.com/urfave/cli/v2 v2.3.0 // indirect github.com/vektah/dataloaden v0.3.0 github.com/vektah/gqlparser/v2 v2.2.0 - go.mongodb.org/mongo-driver v1.5.1 - go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo v0.0.0-20200707171851-ae0d272a2deb - go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver v0.7.0 - go.opentelemetry.io/otel v0.7.0 + go.mongodb.org/mongo-driver v1.7.3 + go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.26.0 + go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.26.0 + go.opentelemetry.io/otel v1.1.0 + go.opentelemetry.io/otel/sdk v1.1.0 go.uber.org/atomic v1.7.0 // indirect - golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c // indirect + golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 // indirect golang.org/x/mod v0.5.1 // indirect - golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac // indirect - golang.org/x/text v0.3.6 + golang.org/x/sys v0.0.0-20211102061401-a2f17f7b995c // indirect + golang.org/x/text v0.3.7 golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/tools v0.1.7 - google.golang.org/api v0.51.0 + google.golang.org/api v0.60.0 gopkg.in/go-playground/colors.v1 v1.2.0 gopkg.in/h2non/gock.v1 v1.1.0 gopkg.in/yaml.v2 v2.4.0 // indirect @@ -65,20 +66,26 @@ require ( ) require ( + cloud.google.com/go v0.97.0 // indirect + cloud.google.com/go/trace v1.0.0 // indirect github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect - github.com/aws/aws-sdk-go v1.34.28 // indirect + github.com/census-instrumentation/opencensus-proto v0.3.0 // indirect + github.com/cespare/xxhash/v2 v2.1.2 // indirect + github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4 // indirect + github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/envoyproxy/go-control-plane v0.10.0 // indirect + github.com/envoyproxy/protoc-gen-validate v0.6.2 // indirect github.com/go-stack/stack v1.8.0 // indirect - github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect + github.com/golang-jwt/jwt v3.2.2+incompatible // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.2 // indirect github.com/golang/snappy v0.0.3 // indirect github.com/google/go-cmp v0.5.6 // indirect - github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9 // indirect - github.com/googleapis/gax-go/v2 v2.0.5 // indirect + github.com/google/pprof v0.0.0-20211008130755-947d60d73cc0 // indirect + github.com/googleapis/gax-go/v2 v2.1.1 // indirect github.com/gorilla/websocket v1.4.2 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect - github.com/jmespath/go-jmespath v0.4.0 // indirect - github.com/jstemmer/go-junit-report v0.9.1 // indirect github.com/mattn/go-colorable v0.1.8 // indirect github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect github.com/pmezard/go-difflib v1.0.0 // indirect @@ -90,14 +97,15 @@ require ( github.com/xdg-go/stringprep v1.0.2 // indirect github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect go.opencensus.io v0.23.0 // indirect - golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 // indirect - golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d // indirect - golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 // indirect + go.opentelemetry.io/contrib v0.23.0 // indirect + go.opentelemetry.io/otel/trace v1.1.0 // indirect + golang.org/x/net v0.0.0-20211101193420-4a448f8816b3 // indirect + golang.org/x/oauth2 v0.0.0-20211028175245-ba495a64dcb5 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea // indirect - google.golang.org/grpc v1.39.0 // indirect + google.golang.org/genproto v0.0.0-20211101144312-62acf1d99145 // indirect + google.golang.org/grpc v1.41.0 // indirect google.golang.org/protobuf v1.27.1 // indirect gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect diff --git a/go.sum b/go.sum index 2e430c7d7..8aa277609 100644 --- a/go.sum +++ b/go.sum @@ -21,8 +21,13 @@ cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= -cloud.google.com/go v0.87.0 h1:8ZtzmY4a2JIO2sljMbpqkDYxA8aJQveYr3AMa+X40oc= cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= +cloud.google.com/go v0.88.0/go.mod h1:dnKwfYbP9hQhefiUvpbcAyoGSHUrOxR20JVElLiUvEY= +cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0 h1:3DXvAyifywvq64LfkKaMOmkWPS1CikIQdMe2lY9vxU8= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -31,6 +36,8 @@ cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4g cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/profiler v0.1.1 h1:seMHZtcgOwZXAOKDZuW2sN3u1yKjYG19dUkElb4mbcQ= +cloud.google.com/go/profiler v0.1.1/go.mod h1:zG22vSCuJKJMvIlLpX3FhNjOsifaoLdPAYc4yLw5Iw4= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -42,33 +49,27 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0 h1:6RRlFMv1omScs6iq2hfE3IvgE+l6RfJPampq8UZc5TU= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +cloud.google.com/go/trace v1.0.0 h1:laKx2y7IWMjguCe5zZx6n7qLtREk4kyE69SXVC0VSN8= +cloud.google.com/go/trace v1.0.0/go.mod h1:4iErSByzxkyHWzzlAj63/Gmjz0NH1ASqhJguHpGcr6A= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/99designs/gqlgen v0.11.3/go.mod h1:RgX5GRRdDWNkh4pBrdzNpNPFVsdoUFY2+adM6nb1N+4= github.com/99designs/gqlgen v0.14.0 h1:Wg8aNYQUjMR/4v+W3xD+7SizOy6lSvVeQ06AobNQAXI= github.com/99designs/gqlgen v0.14.0/go.mod h1:S7z4boV+Nx4VvzMUpVrY/YuHjFX4n7rDyuTqvAkuoRE= -github.com/99designs/gqlgen-contrib v0.1.1-0.20200601100547-7a955d321bbd h1:jtzFT7TsrvMTGwBn8DvwMFDowJ2INPqtP7HpL1R9TIY= -github.com/99designs/gqlgen-contrib v0.1.1-0.20200601100547-7a955d321bbd/go.mod h1:ud8RnaGvSBJFGEIfo0gMid33OUXXb68bNJlWUUZARGY= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/DataDog/sketches-go v0.0.0-20190923095040-43f19ad77ff7 h1:qELHH0AWCvf98Yf+CNIJx9vOZOfHFDDzgDRYsnNk/vs= -github.com/DataDog/sketches-go v0.0.0-20190923095040-43f19ad77ff7/go.mod h1:Q5DbzQ+3AkgGwymQO7aZFNP7ns2lZKGtvRBzRXfdi60= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v0.2.0 h1:d/0HrwVskjLkJIz70Gn9ADURRNaNdTGOkQ1TiuCOefU= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v0.2.0/go.mod h1:Ps0PAOihxzMbs4J2PWLffeKwJo3Bka6LHMLP6r/K0l8= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.0.0 h1:38fNtfhHY6bs22b/D6+hDzO6JR0rDzpGPD36dY2uPL4= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.0.0/go.mod h1:jE23wM1jvwSKgdGcoOkj5j9n1VWtncW36pL2bK1JU+0= github.com/HdrHistogram/hdrhistogram-go v1.0.1 h1:GX8GAYDuhlFQnI2fRDHQhTlkHMz8bEn0jTI6LJU0mpw= github.com/HdrHistogram/hdrhistogram-go v1.0.1/go.mod h1:BWJ+nMSHY3L41Zj7CA3uXnloDp7xxV0YvstAE7nKTaM= github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= -github.com/agnivade/levenshtein v1.0.3/go.mod h1:4SFRZbbXWLF4MU1T9Qg0pGgH3Pjs+t6ie5efyrwRJXs= github.com/agnivade/levenshtein v1.1.0/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo= github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8= github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a h1:E/8AP5dFtMhl5KPJz66Kt9G0n+7Sn41Fy1wv9/jHOrc= github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= @@ -78,17 +79,16 @@ github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= github.com/auth0/go-jwt-middleware v0.0.0-20200507191422-d30d7b9ece63 h1:LY/kRH+fCqA090FsM2VfZ+oocD99ogm3HrT1r0WDnCk= github.com/auth0/go-jwt-middleware v0.0.0-20200507191422-d30d7b9ece63/go.mod h1:mF0ip7kTEFtnhBJbd/gJe62US3jykNN+dcZoZakJCCA= -github.com/aws/aws-sdk-go v1.34.28 h1:sscPpn/Ns3i0F4HPEWAVcwdIRaZZCuL7llJ2/60yPIk= -github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= -github.com/benbjohnson/clock v1.0.0/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM= -github.com/benbjohnson/clock v1.0.3/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= -github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/census-instrumentation/opencensus-proto v0.3.0 h1:t/LhUZLVitR1Ow2YOnduCsavhwFUklBMoGVYUCqmCqk= +github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -96,7 +96,14 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4 h1:hzAQntlaYRkVSFEfj9OTWlVV1H155FMD8BTKktLv0QI= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1 h1:zH8ljVhhq7yC0MIeUL/IviMtY8hx2mK8cN9wEYb8ggw= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= @@ -107,7 +114,6 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= -github.com/dgryski/trifles v0.0.0-20190318185328-a8d75aae118c/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b h1:8xx0j7yceTAgVxonE+qOOepmwWS/Ic3OLQapY9HJajc= github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= @@ -118,21 +124,23 @@ github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5y github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.0 h1:WVt4HEPbdRbRD/PKKPbPnIVavO6gk/h673jWyIJ016k= +github.com/envoyproxy/go-control-plane v0.10.0/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/envoyproxy/protoc-gen-validate v0.6.2 h1:JiO+kJTpmYGjEodY7O1Zk8oZcNz1+f30UtwtXoFUPzE= +github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= +github.com/felixge/httpsnoop v1.0.2 h1:+nS9g82KMXccJ/wp0zyRW9ZBHFETmMGtkk+2CTTrW4o= +github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 h1:Uc+IZ7gYqAf/rSGFplbWBSHaGolEQlNLgMgSE3ccnIQ= github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813/go.mod h1:P+oSoE9yhSRvsmYyZsshflcR6ePWYLql6UU1amW13IM= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-chi/chi v3.3.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= @@ -141,7 +149,6 @@ github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD87 github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= -github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= @@ -170,14 +177,15 @@ github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/V github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= github.com/goccy/go-yaml v1.9.2 h1:2Njwzw+0+pjU2gb805ZC1B/uBuAs2VcZ3K+ZgHwDs7w= github.com/goccy/go-yaml v1.9.2/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA= -github.com/gogo/protobuf v1.0.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= +github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= @@ -224,7 +232,6 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -244,14 +251,22 @@ github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9 h1:2tft2559dNwKl2znYB58oVTql0grRB+Ml3LWIBbc4WM= github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210715191844-86eeefc3e471/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20211008130755-947d60d73cc0 h1:zHs+jv3LO743/zFGcByu2KmpbliCU2AhjcGgrdTwSG4= +github.com/google/pprof v0.0.0-20211008130755-947d60d73cc0/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1 h1:dp3bWCh+PPO1zjRRiCSczJav13sBvG4UhNyVTa1KqdU= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c29bdd3 h1:eHv/jVY/JNop1xg2J9cBb4EzyMpWZoNCP1BslSAIkOI= +github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c29bdd3/go.mod h1:h/KNeRx7oYU4SpA4SoY7W2/NxDKEEVuwA6j9A27L4OI= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 h1:l5lAOZEym3oK3SQ2HBHWsJUfbNBiTXJDeW2QDxw9AQ0= github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= @@ -260,7 +275,6 @@ github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2z github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc= github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= -github.com/gorilla/websocket v1.2.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= @@ -271,87 +285,69 @@ github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/huandu/xstrings v1.3.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= -github.com/iancoleman/strcase v0.1.3 h1:dJBk1m2/qjL1twPLf68JND55vvivMupZ4wIzE8CTdBw= -github.com/iancoleman/strcase v0.1.3/go.mod h1:SK73tn/9oHe+/Y0h39VT4UCxmurVJkR5NA7kMEAOgSE= +github.com/iancoleman/strcase v0.2.0 h1:05I4QRnGpI0m37iZQRuskXh+w77mr6Z41lwQzuHLwW0= +github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d h1:sQbbvtUoen3Tfl9G/079tXeqniwPH6TgM/lU4y7lQN8= github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d/go.mod h1:xVHEhsiSJJnT0jlcQpQUg+GyoLf0i0xciM1kqWTGT58= github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/jarcoal/httpmock v1.0.8 h1:8kI16SoO6LQKgPE7PvQuV+YuD/inwHd7fOOe2zMbo4k= github.com/jarcoal/httpmock v1.0.8/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= -github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= -github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= -github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= -github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/jonas-p/go-shp v0.1.1 h1:LY81nN67DBCz6VNFn2kS64CjmnDo9IP8rmSkTvhO9jE= github.com/jonas-p/go-shp v0.1.1/go.mod h1:MRIhyxDQ6VVp0oYeD7yPGr5RSTNScUFKCDsI5DR7PtI= -github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o= github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak= -github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.10.10 h1:a/y8CglcM7gLGYmlbP/stPE5sR3hbhFRUjCBfd/0B3I= -github.com/klauspost/compress v1.10.10/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc= +github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/labstack/echo/v4 v4.1.16/go.mod h1:awO+5TzAjvL8XpibdsfXxPgHr+orhtXZJZIQCVjogKI= -github.com/labstack/echo/v4 v4.2.1 h1:LF5Iq7t/jrtUuSutNuiEWtB5eiHfZ5gSe2pcu5exjQw= -github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg= +github.com/labstack/echo/v4 v4.6.1 h1:OMVsrnNFzYlGSdaiYGHbgWQnr+JM7NG+B9suCPie14M= +github.com/labstack/echo/v4 v4.6.1/go.mod h1:RnjgMWNDB9g/HucVWhQYNQP9PvbYf6adqftqryo7s9k= github.com/labstack/gommon v0.3.0 h1:JEeO0bvc78PKdyHxloTKiF8BD5iGrH8T6MSeGvSgob0= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= +github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= github.com/matryer/moq v0.0.0-20200106131100-75d0ddfc0007/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.13 h1:qdl+GuBjcsKKDco5BsxPJlId98mSWNKqYA+Co0SC1yA= -github.com/mattn/go-isatty v0.0.13/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.4.2 h1:6h7AQ0yhTcIsmFmnAwQls75jp2Gzs4iB8W7pjMO+rqo= github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4= github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= @@ -360,13 +356,10 @@ github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/opentracing/opentracing-go v1.1.1-0.20190913142402-a7454ce5950e/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= github.com/paulmach/go.geojson v1.4.0 h1:5x5moCkCtDo5x8af62P9IOAYGQcYHtxz2QJ3x1DoCgY= github.com/paulmach/go.geojson v1.4.0/go.mod h1:YaKx1hKpWF+T2oj2lFJPsW/t1Q5e1jQI61eoQSTwpIs= -github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo= github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -375,18 +368,9 @@ github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.6.0/go.mod h1:ZLOG9ck3JLRdB5MgO8f+lLTe83AXG6ro35rLTxvnIl4= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.0.11/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/ravilushqa/otelgqlgen v0.2.0 h1:sLjnXsft8pD+qRhti0pbiH541ZdZOXurlX4h/HW67Yc= +github.com/ravilushqa/otelgqlgen v0.2.0/go.mod h1:XYDoucLfl1iAFyIL8UCgvumZjCx9J11m5jrlrzdrTNw= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= @@ -395,13 +379,11 @@ github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shurcooL/httpfs v0.0.0-20171119174359-809beceb2371/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/shurcooL/vfsgen v0.0.0-20180121065927-ffb13db8def0/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= @@ -413,6 +395,8 @@ github.com/smartystreets/assertions v1.1.1 h1:T/YLemO5Yp7KPzS+lVtu+WsHn8yoSwTfIt github.com/smartystreets/assertions v1.1.1/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= @@ -448,15 +432,11 @@ github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKn github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= -github.com/valyala/fasttemplate v1.1.0/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4= github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= github.com/vektah/dataloaden v0.3.0 h1:ZfVN2QD6swgvp+tDqdH/OIT/wu3Dhu0cus0k5gIZS84= github.com/vektah/dataloaden v0.3.0/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= -github.com/vektah/gqlparser v1.3.1 h1:8b0IcD3qZKWJQHSzynbDlrtP3IxVydZ2DZepCGofqfU= -github.com/vektah/gqlparser v1.3.1/go.mod h1:bkVf0FX+Stjg/MHnm8mEyubuaArhNEqfQhF+OTiAL74= -github.com/vektah/gqlparser/v2 v2.0.1/go.mod h1:SyUiHgLATUR8BiYURfTirrTcGpcE+4XkV2se04Px1Ms= github.com/vektah/gqlparser/v2 v2.2.0 h1:bAc3slekAAJW6sZTi07aGq0OrfaCjj4jxARAaC7g2EM= github.com/vektah/gqlparser/v2 v2.2.0/go.mod h1:i3mQIGIrbK2PD1RrCeMTlVbkF2FJ6WkU1KJlJlC+3F4= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= @@ -465,9 +445,6 @@ github.com/xdg-go/scram v1.0.2 h1:akYIkZ28e6A96dkWNJQu3nmCzH3YfwMPQExUYDaRv7w= github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= github.com/xdg-go/stringprep v1.0.2 h1:6iq84/ryjjeRmMJwxutI51F2GIPlP5BfTvXHeYjyhBc= github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= -github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= -github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= -github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -476,9 +453,8 @@ github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -go.mongodb.org/mongo-driver v1.3.4/go.mod h1:MSWZXKOynuguX+JSvwP8i+58jYCXxbia8HS3gZBapIE= -go.mongodb.org/mongo-driver v1.5.1 h1:9nOVLGDfOaZ9R0tBumx/BcuqkbFpyTCU2r/Po7A2azI= -go.mongodb.org/mongo-driver v1.5.1/go.mod h1:gRXCHX4Jo7J0IJ1oDQyUxF7jfy19UfxniMS4xxMmUqw= +go.mongodb.org/mongo-driver v1.7.3 h1:G4l/eYY9VrQAK/AUgkV0koQKzQnyddnWxrd/Etf0jIs= +go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -487,15 +463,33 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= -go.opentelemetry.io/contrib v0.7.0 h1:6IuKhaeEk+uxX5icJCdsgqlDVbsbDEPFD6NcHCDp9QI= -go.opentelemetry.io/contrib v0.7.0/go.mod h1:g4BXWOrb66AyXbXlSgfGWR7TQzXQX4Oq2NidBrSwZPM= -go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo v0.0.0-20200707171851-ae0d272a2deb h1:Jr4s7kqO0pFyoJmTFm0zC10wr2N7sJ3dibMghcQ3YlE= -go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo v0.0.0-20200707171851-ae0d272a2deb/go.mod h1:r41UB9RpbyhLlJLZ05Avf4ez4znJOWyubPuaO4TAVC8= -go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver v0.7.0 h1:ytNCwz7GNLx3dr8kKNfcRjkN5yAoq/zmoQCnxnCrA6Y= -go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver v0.7.0/go.mod h1:aypkg2vu/qouzRc9dh/qx+0UKOv3oaowY9WxxOfS20Q= -go.opentelemetry.io/otel v0.5.0/go.mod h1:jzBIgIzK43Iu1BpDAXwqOd6UPsSAk+ewVZ5ofSXw4Ek= -go.opentelemetry.io/otel v0.7.0 h1:u43jukpwqR8EsyeJOMgrsUgZwVI1e1eVw7yuzRkD1l0= -go.opentelemetry.io/otel v0.7.0/go.mod h1:aZMyHG5TqDOXEgH2tyLiXSUKly1jT3yqE9PmrzIeCdo= +go.opentelemetry.io/contrib v0.23.0 h1:MgRuo0JZZX8J9WLRjyd7OpTSbaLOdQXXJa6SnZvlWLM= +go.opentelemetry.io/contrib v0.23.0/go.mod h1:EH4yDYeNoaTqn/8yCWQmfNB78VHfGX2Jt2bvnvzBlGM= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.26.0 h1:cWoHMMzYycoFNhg00Fs3l+Bq+wrOylS+uAbzdPmKHo8= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.26.0/go.mod h1:aQWprSlKd3f5Dk5p+Au3XDGzjQyTfDb/YrWdcPtKrPw= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.26.0 h1:grpCdac/FGdtOezswo8ez5T3VweY23LkGZ0miiqF3os= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.26.0/go.mod h1:cEqIVE/mx9DacUC4me7sMchnVmj3LiUBxSZUSTdqzx4= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.24.0 h1:qW6j1kJU24yo2xIu16Py4m4AXn1dd+s2uKllGnTFAm0= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.24.0/go.mod h1:7W3JSDYTtH3qKKHrS1fMiwLtK7iZFLPq1+7htfspX/E= +go.opentelemetry.io/contrib/propagators/b3 v1.1.0 h1:PVDPc1TaspN2ksc2Ig8RLVAnmV1haEwmWiAQPhYrDp8= +go.opentelemetry.io/contrib/propagators/b3 v1.1.0/go.mod h1:FmkfYfWE0QCQHVpiio3tgDUijxfz32JO0duXldug9Ow= +go.opentelemetry.io/otel v1.0.0-RC3/go.mod h1:Ka5j3ua8tZs4Rkq4Ex3hwgBgOchyPVq5S6P2lz//nKQ= +go.opentelemetry.io/otel v1.0.0/go.mod h1:AjRVh9A5/5DE7S+mZtTR6t8vpKKryam+0lREnfmS4cg= +go.opentelemetry.io/otel v1.0.1/go.mod h1:OPEOD4jIT2SlZPMmwT6FqZz2C0ZNdQqiWcoK6M0SNFU= +go.opentelemetry.io/otel v1.1.0 h1:8p0uMLcyyIx0KHNTgO8o3CW8A1aA+dJZJW6PvnMz0Wc= +go.opentelemetry.io/otel v1.1.0/go.mod h1:7cww0OW51jQ8IaZChIEdqLwgh+44+7uiTdWsAL0wQpA= +go.opentelemetry.io/otel/internal/metric v0.23.0 h1:mPfzm9Iqhw7G2nDBmUAjFTfPqLZPbOW2k7QI57ITbaI= +go.opentelemetry.io/otel/internal/metric v0.23.0/go.mod h1:z+RPiDJe30YnCrOhFGivwBS+DU1JU/PiLKkk4re2DNY= +go.opentelemetry.io/otel/metric v0.23.0 h1:mYCcDxi60P4T27/0jchIDFa1WHEfQeU3zH9UEMpnj2c= +go.opentelemetry.io/otel/metric v0.23.0/go.mod h1:G/Nn9InyNnIv7J6YVkQfpc0JCfKBNJaERBGw08nqmVQ= +go.opentelemetry.io/otel/sdk v1.0.1/go.mod h1:HrdXne+BiwsOHYYkBE5ysIcv2bvdZstxzmCQhxTcZkI= +go.opentelemetry.io/otel/sdk v1.1.0 h1:j/1PngUJIDOddkCILQYTevrTIbWd494djgGkSsMit+U= +go.opentelemetry.io/otel/sdk v1.1.0/go.mod h1:3aQvM6uLm6C4wJpHtT8Od3vNzeZ34Pqc6bps8MywWzo= +go.opentelemetry.io/otel/trace v1.0.0-RC3/go.mod h1:VUt2TUYd8S2/ZRX09ZDFZQwn2RqfMB5MzO17jBojGxo= +go.opentelemetry.io/otel/trace v1.0.0/go.mod h1:PXTWqayeFUlJV1YDNhsJYB184+IvAH814St6o6ajzIs= +go.opentelemetry.io/otel/trace v1.0.1/go.mod h1:5g4i4fKLaX2BQpSBsxw8YYcgKpMMSW3x7ZTuYBr3sUk= +go.opentelemetry.io/otel/trace v1.1.0 h1:N25T9qCL0+7IpOT8RrRy0WYlL7y6U0WiUJzXcVdXY/o= +go.opentelemetry.io/otel/trace v1.1.0/go.mod h1:i47XtdcBQiktu5IsrPqOHe8w+sBmnLwwHt8wiUsWGTI= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= @@ -503,18 +497,14 @@ golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnf golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191105034135-c7e5f84aec59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200221231518-2aa609cf4a9d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c h1:9HhBz5L/UjnK9XLtiZhYAdue5BVKep3PMmS2LuPDt8k= -golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 h1:HWj/xjIHfjYU5nVXpTM0s39J9CbLn7Cc5a7IC5rwsMQ= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -538,7 +528,6 @@ golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRu golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= @@ -551,11 +540,11 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -563,7 +552,6 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -578,6 +566,7 @@ golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= @@ -591,8 +580,12 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d h1:20cMwl2fHAzkJMEA+8J4JgqBQcQGzbisXo31MIeenXI= +golang.org/x/net v0.0.0-20210716203947-853a461950ff/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210913180222-943fd674d43e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211101193420-4a448f8816b3 h1:VrJZAjbekhoRn7n5FBujY31gboH+iB3pdLxn3gE9FjU= +golang.org/x/net v0.0.0-20211101193420-4a448f8816b3/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -605,8 +598,12 @@ golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914 h1:3B43BWw0xEBsLZ/NO1VALz6fppU3481pik+2Ksv45z8= golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211028175245-ba495a64dcb5 h1:v79phzBz03tsVCUTbvTBmmC3CUXF5mKYt7DA4ZVldpM= +golang.org/x/oauth2 v0.0.0-20211028175245-ba495a64dcb5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -622,7 +619,6 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cO golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -641,7 +637,6 @@ golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -651,13 +646,11 @@ golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -671,15 +664,24 @@ golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac h1:oN6lz7iLW/YC7un8pq+9bOLyXrprv2+DKfkJY+2LJJw= +golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210910150752-751e447fb3d0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211102061401-a2f17f7b995c h1:QOfDMdrf/UwlVR0UBq2Mpr58UzNtvgJRXA4BgPfFACs= +golang.org/x/sys v0.0.0-20211102061401-a2f17f7b995c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -688,8 +690,9 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -697,7 +700,6 @@ golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxb golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE= golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -726,7 +728,6 @@ golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200114235610-7ae403b6b589/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -743,6 +744,7 @@ golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200701151220-7cb253f4c4f8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= @@ -790,8 +792,14 @@ google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= -google.golang.org/api v0.51.0 h1:SQaA2Cx57B+iPw2MBgyjEkoeMkRK2IenSGoia0U3lCk= google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= +google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.58.0/go.mod h1:cAbP2FsxoGVNwtgNAmmn3y5G1TWAiVYRmg4yku3lv+E= +google.golang.org/api v0.60.0 h1:eq/zs5WPH4J9undYM9IP1O7dSr7Yh8Y0GtSCpzGzIUk= +google.golang.org/api v0.60.0/go.mod h1:d7rl65NZAkEQ90JFzqBjcRq1TVeG5ZoGV3sSpEnnVb4= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -808,7 +816,6 @@ google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRn google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191009194640-548a555dbc03/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= @@ -819,7 +826,6 @@ google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4 google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200303153909-beee998c1893/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= @@ -828,6 +834,7 @@ google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200605102947-12044bf5ea91/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= @@ -850,8 +857,23 @@ google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxH google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea h1:8ZyCcgugUqamxp/vZSEJw9CMy7VZlSWYJLLJPi/dSDA= google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210721163202-f1cecdd8b78a/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210722135532-667f2b7c528f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210921142501-181ce0d877f6/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211021150943-2b146023228c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211101144312-62acf1d99145 h1:vum3nDKdleYb+aePXKFEDT2+ghuH00EgYp9B7Q7EZZE= +google.golang.org/genproto v0.0.0-20211101144312-62acf1d99145/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -874,8 +896,11 @@ google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAG google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.39.0 h1:Klz8I9kdtkIN6EpHHUOMLCYhTn/2WAe5a0s1hcBkdTI= google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.41.0 h1:f+PlOh7QV4iIJkPrx5NQ7qaNGFQ3OTse67yaDHfju4E= +google.golang.org/grpc v1.41.0/go.mod h1:U3l9uK9J0sini8mHphKoXyaqDA/8VyGnDee1zzIUK6k= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -903,12 +928,9 @@ gopkg.in/go-playground/colors.v1 v1.2.0 h1:SPweMUve+ywPrfwao+UvfD5Ah78aOLUkT5RlJ gopkg.in/go-playground/colors.v1 v1.2.0/go.mod h1:AvbqcMpNXVl5gBrM20jBm3VjjKBbH/kI5UnqjU7lxFI= gopkg.in/h2non/gock.v1 v1.1.0 h1:Yy6sSXyTP9wYc6+H7U0NuB1LQ6H2HYmDp2sxFQ8vTEY= gopkg.in/h2non/gock.v1 v1.1.0/go.mod h1:n7UGz/ckNChHiK05rDoiC4MYSunEC/lyaUm2WWaDva0= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= @@ -925,6 +947,5 @@ honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9 rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= -sourcegraph.com/sourcegraph/appdash v0.0.0-20180110180208-2cc67fd64755/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= sourcegraph.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67/go.mod h1:L5q+DGLGOQFpo1snNEkLOJT2d1YTW66rWNzatr3He1k= diff --git a/internal/app/app.go b/internal/app/app.go index 0bf9b1ea6..b6926e708 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -3,6 +3,7 @@ package app import ( "errors" "net/http" + "net/http/pprof" "github.com/99designs/gqlgen/graphql/playground" "github.com/labstack/echo/v4" @@ -10,7 +11,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/interactor" "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/pkg/rerror" - echotracer "go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo" + "go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho" ) func initEcho(cfg *ServerConfig) *echo.Echo { @@ -26,8 +27,8 @@ func initEcho(cfg *ServerConfig) *echo.Echo { logger := GetEchoLogger() e.Logger = logger e.Use(logger.Hook()) - e.Use(middleware.Recover(), echotracer.Middleware("reearth-backend")) + e.Use(middleware.Recover(), otelecho.Middleware("reearth-backend")) origins := allowedOrigins(cfg) if len(origins) > 0 { e.Use( @@ -39,7 +40,12 @@ func initEcho(cfg *ServerConfig) *echo.Echo { if e.Debug { // enable pprof - e.GET("/debug/pprof/*", echo.WrapHandler(http.DefaultServeMux)) + pprofGroup := e.Group("/debug/pprof") + pprofGroup.Any("/cmdline", echo.WrapHandler(http.HandlerFunc(pprof.Cmdline))) + pprofGroup.Any("/profile", echo.WrapHandler(http.HandlerFunc(pprof.Profile))) + pprofGroup.Any("/symbol", echo.WrapHandler(http.HandlerFunc(pprof.Symbol))) + pprofGroup.Any("/trace", echo.WrapHandler(http.HandlerFunc(pprof.Trace))) + pprofGroup.Any("/*", echo.WrapHandler(http.HandlerFunc(pprof.Index))) } e.HTTPErrorHandler = func(err error, c echo.Context) { diff --git a/internal/app/graphql.go b/internal/app/graphql.go index 9f7926d90..ced6b24ec 100644 --- a/internal/app/graphql.go +++ b/internal/app/graphql.go @@ -4,19 +4,17 @@ import ( "context" "errors" - "github.com/99designs/gqlgen-contrib/gqlopencensus" - "github.com/99designs/gqlgen-contrib/gqlopentracing" "github.com/99designs/gqlgen/graphql" "github.com/99designs/gqlgen/graphql/handler" "github.com/99designs/gqlgen/graphql/handler/extension" "github.com/99designs/gqlgen/graphql/handler/lru" "github.com/99designs/gqlgen/graphql/playground" "github.com/labstack/echo/v4" - "github.com/vektah/gqlparser/v2/gqlerror" - + "github.com/ravilushqa/otelgqlgen" "github.com/reearth/reearth-backend/internal/adapter/gql" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/vektah/gqlparser/v2/gqlerror" ) const enableDataLoaders = true @@ -70,8 +68,7 @@ func graphqlAPI( }) srv := handler.NewDefaultServer(schema) - srv.Use(gqlopentracing.Tracer{}) - srv.Use(gqlopencensus.Tracer{}) + srv.Use(otelgqlgen.Middleware()) if conf.Config.GraphQL.ComplexityLimit > 0 { srv.Use(extension.FixedComplexityLimit(conf.Config.GraphQL.ComplexityLimit)) } diff --git a/internal/app/main.go b/internal/app/main.go index 8f33b955c..0f4cf6cdc 100644 --- a/internal/app/main.go +++ b/internal/app/main.go @@ -27,7 +27,7 @@ func Start(debug bool, version string) { initProfiler(conf.Profiler, version) // Init tracer - closer := initTracer(conf) + closer := initTracer(ctx, conf) defer func() { if closer != nil { if err := closer.Close(); err != nil { diff --git a/internal/app/repo.go b/internal/app/repo.go index 44a98f65c..27d12a694 100644 --- a/internal/app/repo.go +++ b/internal/app/repo.go @@ -9,10 +9,6 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/google" "github.com/spf13/afero" - "go.mongodb.org/mongo-driver/mongo" - "go.mongodb.org/mongo-driver/mongo/options" - mongotrace "go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver" - "github.com/reearth/reearth-backend/internal/infrastructure/auth0" "github.com/reearth/reearth-backend/internal/infrastructure/fs" "github.com/reearth/reearth-backend/internal/infrastructure/gcs" @@ -20,6 +16,9 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/log" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" + "go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo" ) func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo.Container, *gateway.Container) { @@ -32,7 +31,7 @@ func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo. options.Client(). ApplyURI(conf.DB). SetConnectTimeout(time.Second*10). - SetMonitor(mongotrace.NewMonitor("reearth-backend")), + SetMonitor(otelmongo.NewMonitor()), ) if err != nil { log.Fatalln(fmt.Sprintf("repo initialization error: %+v", err)) diff --git a/internal/app/tracer.go b/internal/app/tracer.go index ac18bd8f1..d7e5f9276 100644 --- a/internal/app/tracer.go +++ b/internal/app/tracer.go @@ -1,6 +1,7 @@ package app import ( + "context" "io" texporter "github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace" @@ -9,33 +10,31 @@ import ( jaegercfg "github.com/uber/jaeger-client-go/config" jaegerlog "github.com/uber/jaeger-client-go/log" "github.com/uber/jaeger-lib/metrics" - "go.opentelemetry.io/otel/api/global" + "go.opentelemetry.io/otel" sdktrace "go.opentelemetry.io/otel/sdk/trace" ) -func initTracer(conf *Config) io.Closer { +func initTracer(ctx context.Context, conf *Config) io.Closer { if conf.Tracer == "gcp" { - initGCPTracer(conf) + initGCPTracer(ctx, conf) } else if conf.Tracer == "jaeger" { return initJaegerTracer(conf) } return nil } -func initGCPTracer(conf *Config) { - exporter, err := texporter.NewExporter(texporter.WithProjectID(conf.GCPProject)) +func initGCPTracer(ctx context.Context, conf *Config) { + exporter, err := texporter.New(texporter.WithProjectID(conf.GCPProject)) if err != nil { log.Fatalln(err) } - tp, err := sdktrace.NewProvider(sdktrace.WithConfig(sdktrace.Config{ - DefaultSampler: sdktrace.ProbabilitySampler(conf.TracerSample), - }), sdktrace.WithSyncer(exporter)) - if err != nil { - log.Fatalln(err) - } + tp := sdktrace.NewTracerProvider(sdktrace.WithSyncer(exporter), sdktrace.WithSampler(sdktrace.TraceIDRatioBased(conf.TracerSample))) + defer func() { + _ = tp.ForceFlush(ctx) + }() - global.SetTraceProvider(tp) + otel.SetTracerProvider(tp) log.Infof("tracer: initialized cloud trace with sample fraction: %g", conf.TracerSample) } From 9ece9e89a15ed901c319e94766204a5b6c86d11e Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 5 Nov 2021 11:37:21 +0900 Subject: [PATCH 101/253] refactor: add PropertySchemaGroupID to pkg/id (#70) --- gqlgen.yml | 2 + internal/adapter/gql/generated.go | 127 +++++++++++------- .../adapter/gql/gqlmodel/convert_property.go | 2 +- internal/adapter/gql/gqlmodel/models.go | 2 +- internal/adapter/gql/gqlmodel/models_gen.go | 70 +++++----- internal/adapter/gql/gqlmodel/scalar.go | 13 ++ .../adapter/gql/resolver_mutation_property.go | 18 +-- .../201217193948_add_scene_default_tile.go | 4 +- .../infrastructure/mongo/mongodoc/property.go | 2 +- .../mongo/mongodoc/property_schema.go | 4 +- internal/usecase/interactor/scene.go | 2 +- pkg/id/property_schema_field.go | 6 - pkg/id/property_schema_group.go | 36 +++++ pkg/id/property_schema_group_test.go | 63 +++++++++ pkg/layer/decoding/common.go | 2 +- pkg/layer/decoding/reearth.go | 4 +- pkg/layer/decoding/reearth_test.go | 4 +- pkg/layer/encoding/czml_test.go | 26 ++-- pkg/layer/encoding/geojson_test.go | 20 +-- pkg/layer/encoding/kml_test.go | 26 ++-- pkg/layer/encoding/shp_test.go | 6 +- pkg/plugin/manifest/convert.go | 4 +- pkg/plugin/manifest/convert_test.go | 6 +- pkg/plugin/manifest/parser_test.go | 2 +- pkg/plugin/manifest/parser_translation.go | 2 +- pkg/property/builder_test.go | 4 +- pkg/property/group.go | 21 +-- pkg/property/group_builder.go | 11 +- pkg/property/group_builder_test.go | 12 +- pkg/property/group_list.go | 6 +- pkg/property/group_list_builder.go | 11 +- pkg/property/group_list_builder_test.go | 14 +- pkg/property/group_list_test.go | 10 +- pkg/property/group_test.go | 4 +- pkg/property/initializer.go | 4 +- pkg/property/initializer_test.go | 16 +-- pkg/property/item.go | 10 +- pkg/property/item_builder.go | 2 +- pkg/property/item_test.go | 6 +- pkg/property/merged.go | 4 +- pkg/property/merged_test.go | 8 +- pkg/property/migrator.go | 15 +++ pkg/property/pointer.go | 101 ++++++-------- pkg/property/pointer_test.go | 2 +- pkg/property/property.go | 8 +- pkg/property/property_test.go | 22 +-- pkg/property/schema.go | 17 +-- pkg/property/schema_builder.go | 14 +- pkg/property/schema_group.go | 8 +- pkg/property/schema_group_builder.go | 2 +- pkg/property/schema_group_builder_test.go | 60 ++++----- pkg/property/schema_group_test.go | 14 +- pkg/property/schema_test.go | 13 +- pkg/property/sealed.go | 4 +- pkg/property/sealed_test.go | 4 +- pkg/scene/builder/builder_test.go | 4 +- pkg/scene/builder/encoder_test.go | 2 +- schema.graphql | 27 ++-- 58 files changed, 494 insertions(+), 419 deletions(-) create mode 100644 pkg/id/property_schema_group.go create mode 100644 pkg/id/property_schema_group_test.go create mode 100644 pkg/property/migrator.go diff --git a/gqlgen.yml b/gqlgen.yml index f13110be9..3f1c7a163 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -29,6 +29,8 @@ models: model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PluginExtensionID PropertySchemaID: model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PropertySchemaID + PropertySchemaGroupID: + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PropertySchemaGroupID PropertySchemaFieldID: model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PropertySchemaFieldID DatasetSchemaFieldID: diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index aec43fda8..5247cea3a 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -6061,6 +6061,7 @@ scalar FileSize scalar PluginID scalar PluginExtensionID scalar PropertySchemaID +scalar PropertySchemaGroupID scalar PropertySchemaFieldID scalar DatasetSchemaFieldID scalar TranslatedString @@ -6443,7 +6444,7 @@ type PropertyLinkableFields { } type PropertySchemaGroup { - schemaGroupId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! schemaId: PropertySchemaID! fields: [PropertySchemaField!]! isList: Boolean! @@ -6515,7 +6516,7 @@ union PropertyItem = PropertyGroup | PropertyGroupList type PropertyGroup { id: ID! schemaId: PropertySchemaID! - schemaGroupId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! fields: [PropertyField!]! schema: PropertySchema @goField(forceResolver: true) schemaGroup: PropertySchemaGroup @goField(forceResolver: true) @@ -6524,7 +6525,7 @@ type PropertyGroup { type PropertyGroupList { id: ID! schemaId: PropertySchemaID! - schemaGroupId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! groups: [PropertyGroup!]! schema: PropertySchema @goField(forceResolver: true) schemaGroup: PropertySchemaGroup @goField(forceResolver: true) @@ -6572,7 +6573,7 @@ type MergedPropertyGroup { parentPropertyId: ID originalId: ID parentId: ID - schemaGroupId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! # note: schemaId will not always be set schemaId: PropertySchemaID linkedDatasetId: ID @@ -6989,7 +6990,7 @@ input SyncDatasetInput { input UpdatePropertyValueInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID + schemaGroupId: PropertySchemaGroupID itemId: ID fieldId: PropertySchemaFieldID! value: Any @@ -6998,14 +6999,14 @@ input UpdatePropertyValueInput { input RemovePropertyFieldInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID + schemaGroupId: PropertySchemaGroupID itemId: ID fieldId: PropertySchemaFieldID! } input UploadFileToPropertyInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID + schemaGroupId: PropertySchemaGroupID itemId: ID fieldId: PropertySchemaFieldID! file: Upload! @@ -7013,7 +7014,7 @@ input UploadFileToPropertyInput { input LinkDatasetToPropertyValueInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID + schemaGroupId: PropertySchemaGroupID itemId: ID fieldId: PropertySchemaFieldID! datasetSchemaIds: [ID!]! @@ -7023,14 +7024,14 @@ input LinkDatasetToPropertyValueInput { input UnlinkPropertyValueInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID + schemaGroupId: PropertySchemaGroupID itemId: ID fieldId: PropertySchemaFieldID! } input AddPropertyItemInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! index: Int nameFieldValue: Any nameFieldType: ValueType @@ -7038,20 +7039,20 @@ input AddPropertyItemInput { input MovePropertyItemInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! itemId: ID! index: Int! } input RemovePropertyItemInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! itemId: ID! } input UpdatePropertyItemInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! operations: [UpdatePropertyItemOperationInput!]! } @@ -16605,9 +16606,9 @@ func (ec *executionContext) _MergedPropertyGroup_schemaGroupId(ctx context.Conte } return graphql.Null } - res := resTmp.(id.PropertySchemaFieldID) + res := resTmp.(id.PropertySchemaGroupID) fc.Result = res - return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) + return ec.marshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, field.Selections, res) } func (ec *executionContext) _MergedPropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { @@ -23123,9 +23124,9 @@ func (ec *executionContext) _PropertyGroup_schemaGroupId(ctx context.Context, fi } return graphql.Null } - res := resTmp.(id.PropertySchemaFieldID) + res := resTmp.(id.PropertySchemaGroupID) fc.Result = res - return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) + return ec.marshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { @@ -23327,9 +23328,9 @@ func (ec *executionContext) _PropertyGroupList_schemaGroupId(ctx context.Context } return graphql.Null } - res := resTmp.(id.PropertySchemaFieldID) + res := resTmp.(id.PropertySchemaGroupID) fc.Result = res - return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) + return ec.marshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyGroupList_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { @@ -24548,9 +24549,9 @@ func (ec *executionContext) _PropertySchemaGroup_schemaGroupId(ctx context.Conte } return graphql.Null } - res := resTmp.(id.PropertySchemaFieldID) + res := resTmp.(id.PropertySchemaGroupID) fc.Result = res - return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) + return ec.marshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, field.Selections, res) } func (ec *executionContext) _PropertySchemaGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { @@ -31633,11 +31634,11 @@ func (ec *executionContext) unmarshalInputAddPropertyItemInput(ctx context.Conte if err != nil { return it, err } - case "schemaItemId": + case "schemaGroupId": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) if err != nil { return it, err } @@ -32349,11 +32350,11 @@ func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx co if err != nil { return it, err } - case "schemaItemId": + case "schemaGroupId": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) if err != nil { return it, err } @@ -32498,11 +32499,11 @@ func (ec *executionContext) unmarshalInputMovePropertyItemInput(ctx context.Cont if err != nil { return it, err } - case "schemaItemId": + case "schemaGroupId": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) if err != nil { return it, err } @@ -32769,11 +32770,11 @@ func (ec *executionContext) unmarshalInputRemovePropertyFieldInput(ctx context.C if err != nil { return it, err } - case "schemaItemId": + case "schemaGroupId": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) if err != nil { return it, err } @@ -32816,11 +32817,11 @@ func (ec *executionContext) unmarshalInputRemovePropertyItemInput(ctx context.Co if err != nil { return it, err } - case "schemaItemId": + case "schemaGroupId": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) if err != nil { return it, err } @@ -33026,11 +33027,11 @@ func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.C if err != nil { return it, err } - case "schemaItemId": + case "schemaGroupId": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) if err != nil { return it, err } @@ -33380,11 +33381,11 @@ func (ec *executionContext) unmarshalInputUpdatePropertyItemInput(ctx context.Co if err != nil { return it, err } - case "schemaItemId": + case "schemaGroupId": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) if err != nil { return it, err } @@ -33474,11 +33475,11 @@ func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.C if err != nil { return it, err } - case "schemaItemId": + case "schemaGroupId": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) if err != nil { return it, err } @@ -33748,11 +33749,11 @@ func (ec *executionContext) unmarshalInputUploadFileToPropertyInput(ctx context. if err != nil { return it, err } - case "schemaItemId": + case "schemaGroupId": var err error - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaItemId")) - it.SchemaItemID, err = ec.unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) + it.SchemaGroupID, err = ec.unmarshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) if err != nil { return it, err } @@ -42270,6 +42271,21 @@ func (ec *executionContext) marshalNPropertySchemaGroup2แš–githubแš—comแš‹reeart return ec._PropertySchemaGroup(ctx, sel, v) } +func (ec *executionContext) unmarshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx context.Context, v interface{}) (id.PropertySchemaGroupID, error) { + res, err := gqlmodel.UnmarshalPropertySchemaGroupID(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx context.Context, sel ast.SelectionSet, v id.PropertySchemaGroupID) graphql.Marshaler { + res := gqlmodel.MarshalPropertySchemaGroupID(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + } + return res +} + func (ec *executionContext) unmarshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, v interface{}) (id.PropertySchemaID, error) { res, err := gqlmodel.UnmarshalPropertySchemaID(v) return res, graphql.ErrorOnPath(ctx, err) @@ -43989,6 +44005,21 @@ func (ec *executionContext) marshalOPropertySchemaGroup2แš–githubแš—comแš‹reeart return ec._PropertySchemaGroup(ctx, sel, v) } +func (ec *executionContext) unmarshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx context.Context, v interface{}) (*id.PropertySchemaGroupID, error) { + if v == nil { + return nil, nil + } + res, err := gqlmodel.UnmarshalPropertySchemaGroupID(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx context.Context, sel ast.SelectionSet, v *id.PropertySchemaGroupID) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return gqlmodel.MarshalPropertySchemaGroupID(*v) +} + func (ec *executionContext) unmarshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, v interface{}) (*id.PropertySchemaID, error) { if v == nil { return nil, nil diff --git a/internal/adapter/gql/gqlmodel/convert_property.go b/internal/adapter/gql/gqlmodel/convert_property.go index bc7491c87..746e4c82c 100644 --- a/internal/adapter/gql/gqlmodel/convert_property.go +++ b/internal/adapter/gql/gqlmodel/convert_property.go @@ -608,7 +608,7 @@ func ToPropertyConditon(c *property.Condition) *PropertyCondition { } } -func FromPointer(schemaItem *id.PropertySchemaFieldID, item *id.ID, field *id.PropertySchemaFieldID) *property.Pointer { +func FromPointer(schemaItem *id.PropertySchemaGroupID, item *id.ID, field *id.PropertySchemaFieldID) *property.Pointer { i := id.PropertyItemIDFromRefID(item) return property.NewPointer(schemaItem, i, field) } diff --git a/internal/adapter/gql/gqlmodel/models.go b/internal/adapter/gql/gqlmodel/models.go index eac665f7a..0e94c0068 100644 --- a/internal/adapter/gql/gqlmodel/models.go +++ b/internal/adapter/gql/gqlmodel/models.go @@ -152,7 +152,7 @@ func (d *PropertyGroup) Field(id id.PropertySchemaFieldID) *PropertyField { return nil } -func (d *PropertySchema) Group(id id.PropertySchemaFieldID) *PropertySchemaGroup { +func (d *PropertySchema) Group(id id.PropertySchemaGroupID) *PropertySchemaGroup { if d == nil || id == "" { return nil } diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index 72d63a810..af39ce5ab 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -127,7 +127,7 @@ type AddMemberToTeamPayload struct { type AddPropertyItemInput struct { PropertyID id.ID `json:"propertyId"` - SchemaItemID id.PropertySchemaFieldID `json:"schemaItemId"` + SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` Index *int `json:"index"` NameFieldValue interface{} `json:"nameFieldValue"` NameFieldType *ValueType `json:"nameFieldType"` @@ -516,7 +516,7 @@ func (LayerItem) IsLayer() {} type LinkDatasetToPropertyValueInput struct { PropertyID id.ID `json:"propertyId"` - SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` + SchemaGroupID *id.PropertySchemaGroupID `json:"schemaGroupId"` ItemID *id.ID `json:"itemId"` FieldID id.PropertySchemaFieldID `json:"fieldId"` DatasetSchemaIds []*id.ID `json:"datasetSchemaIds"` @@ -583,7 +583,7 @@ type MergedPropertyGroup struct { ParentPropertyID *id.ID `json:"parentPropertyId"` OriginalID *id.ID `json:"originalId"` ParentID *id.ID `json:"parentId"` - SchemaGroupID id.PropertySchemaFieldID `json:"schemaGroupId"` + SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` SchemaID *id.PropertySchemaID `json:"schemaId"` LinkedDatasetID *id.ID `json:"linkedDatasetId"` Fields []*MergedPropertyField `json:"fields"` @@ -622,10 +622,10 @@ type MoveLayerPayload struct { } type MovePropertyItemInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaItemID id.PropertySchemaFieldID `json:"schemaItemId"` - ItemID id.ID `json:"itemId"` - Index int `json:"index"` + PropertyID id.ID `json:"propertyId"` + SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` + ItemID id.ID `json:"itemId"` + Index int `json:"index"` } type PageInfo struct { @@ -778,7 +778,7 @@ type PropertyFieldPayload struct { type PropertyGroup struct { ID id.ID `json:"id"` SchemaID id.PropertySchemaID `json:"schemaId"` - SchemaGroupID id.PropertySchemaFieldID `json:"schemaGroupId"` + SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` Fields []*PropertyField `json:"fields"` Schema *PropertySchema `json:"schema"` SchemaGroup *PropertySchemaGroup `json:"schemaGroup"` @@ -789,7 +789,7 @@ func (PropertyGroup) IsPropertyItem() {} type PropertyGroupList struct { ID id.ID `json:"id"` SchemaID id.PropertySchemaID `json:"schemaId"` - SchemaGroupID id.PropertySchemaFieldID `json:"schemaGroupId"` + SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` Groups []*PropertyGroup `json:"groups"` Schema *PropertySchema `json:"schema"` SchemaGroup *PropertySchemaGroup `json:"schemaGroup"` @@ -845,7 +845,7 @@ type PropertySchemaFieldChoice struct { } type PropertySchemaGroup struct { - SchemaGroupID id.PropertySchemaFieldID `json:"schemaGroupId"` + SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` SchemaID id.PropertySchemaID `json:"schemaId"` Fields []*PropertySchemaField `json:"fields"` IsList bool `json:"isList"` @@ -929,16 +929,16 @@ type RemoveMyAuthInput struct { } type RemovePropertyFieldInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` + PropertyID id.ID `json:"propertyId"` + SchemaGroupID *id.PropertySchemaGroupID `json:"schemaGroupId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` } type RemovePropertyItemInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaItemID id.PropertySchemaFieldID `json:"schemaItemId"` - ItemID id.ID `json:"itemId"` + PropertyID id.ID `json:"propertyId"` + SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` + ItemID id.ID `json:"itemId"` } type RemoveTagInput struct { @@ -1097,10 +1097,10 @@ type UninstallPluginPayload struct { } type UnlinkPropertyValueInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` + PropertyID id.ID `json:"propertyId"` + SchemaGroupID *id.PropertySchemaGroupID `json:"schemaGroupId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` } type UpdateDatasetSchemaInput struct { @@ -1164,9 +1164,9 @@ type UpdateProjectInput struct { } type UpdatePropertyItemInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaItemID id.PropertySchemaFieldID `json:"schemaItemId"` - Operations []*UpdatePropertyItemOperationInput `json:"operations"` + PropertyID id.ID `json:"propertyId"` + SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` + Operations []*UpdatePropertyItemOperationInput `json:"operations"` } type UpdatePropertyItemOperationInput struct { @@ -1178,12 +1178,12 @@ type UpdatePropertyItemOperationInput struct { } type UpdatePropertyValueInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` - Value interface{} `json:"value"` - Type ValueType `json:"type"` + PropertyID id.ID `json:"propertyId"` + SchemaGroupID *id.PropertySchemaGroupID `json:"schemaGroupId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` + Value interface{} `json:"value"` + Type ValueType `json:"type"` } type UpdateTagInput struct { @@ -1241,11 +1241,11 @@ type UpgradePluginPayload struct { } type UploadFileToPropertyInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaItemID *id.PropertySchemaFieldID `json:"schemaItemId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` - File graphql.Upload `json:"file"` + PropertyID id.ID `json:"propertyId"` + SchemaGroupID *id.PropertySchemaGroupID `json:"schemaGroupId"` + ItemID *id.ID `json:"itemId"` + FieldID id.PropertySchemaFieldID `json:"fieldId"` + File graphql.Upload `json:"file"` } type UploadPluginInput struct { diff --git a/internal/adapter/gql/gqlmodel/scalar.go b/internal/adapter/gql/gqlmodel/scalar.go index dfe15b19e..143666b3b 100644 --- a/internal/adapter/gql/gqlmodel/scalar.go +++ b/internal/adapter/gql/gqlmodel/scalar.go @@ -116,6 +116,19 @@ func UnmarshalPropertySchemaID(v interface{}) (id.PropertySchemaID, error) { return id.PropertySchemaID{}, errors.New("invalid ID") } +func MarshalPropertySchemaGroupID(t id.PropertySchemaGroupID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPropertySchemaGroupID(v interface{}) (id.PropertySchemaGroupID, error) { + if tmpStr, ok := v.(string); ok { + return id.PropertySchemaGroupID(tmpStr), nil + } + return id.PropertySchemaGroupID(""), errors.New("invalid ID") +} + func MarshalPropertySchemaFieldID(t id.PropertySchemaFieldID) graphql.Marshaler { return graphql.WriterFunc(func(w io.Writer) { _, _ = io.WriteString(w, strconv.Quote(t.String())) diff --git a/internal/adapter/gql/resolver_mutation_property.go b/internal/adapter/gql/resolver_mutation_property.go index b6cf3c3ac..1e0565e70 100644 --- a/internal/adapter/gql/resolver_mutation_property.go +++ b/internal/adapter/gql/resolver_mutation_property.go @@ -21,7 +21,7 @@ func (r *mutationResolver) UpdatePropertyValue(ctx context.Context, input gqlmod pp, pgl, pg, pf, err := r.usecases.Property.UpdateValue(ctx, interfaces.UpdatePropertyValueParam{ PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(input.SchemaItemID, input.ItemID, &input.FieldID), + Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), Value: v, }, getOperator(ctx)) if err != nil { @@ -40,7 +40,7 @@ func (r *mutationResolver) RemovePropertyField(ctx context.Context, input gqlmod p, err := r.usecases.Property.RemoveField(ctx, interfaces.RemovePropertyFieldParam{ PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(input.SchemaItemID, input.ItemID, &input.FieldID), + Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), }, getOperator(ctx)) if err != nil { return nil, err @@ -57,7 +57,7 @@ func (r *mutationResolver) UploadFileToProperty(ctx context.Context, input gqlmo p, pgl, pg, pf, err := r.usecases.Property.UploadFile(ctx, interfaces.UploadFileParam{ PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(input.SchemaItemID, input.ItemID, &input.FieldID), + Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), File: gqlmodel.FromFile(&input.File), }, getOperator(ctx)) if err != nil { @@ -76,7 +76,7 @@ func (r *mutationResolver) LinkDatasetToPropertyValue(ctx context.Context, input p, pgl, pg, pf, err := r.usecases.Property.LinkValue(ctx, interfaces.LinkPropertyValueParam{ PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(input.SchemaItemID, input.ItemID, &input.FieldID), + Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), Links: gqlmodel.FromPropertyFieldLink( input.DatasetSchemaIds, input.DatasetIds, @@ -99,7 +99,7 @@ func (r *mutationResolver) UnlinkPropertyValue(ctx context.Context, input gqlmod p, pgl, pg, pf, err := r.usecases.Property.UnlinkValue(ctx, interfaces.UnlinkPropertyValueParam{ PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(input.SchemaItemID, input.ItemID, &input.FieldID), + Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), }, getOperator(ctx)) if err != nil { return nil, err @@ -122,7 +122,7 @@ func (r *mutationResolver) AddPropertyItem(ctx context.Context, input gqlmodel.A p, pgl, pi, err := r.usecases.Property.AddItem(ctx, interfaces.AddPropertyItemParam{ PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(&input.SchemaItemID, nil, nil), + Pointer: gqlmodel.FromPointer(&input.SchemaGroupID, nil, nil), Index: input.Index, NameFieldValue: v, }, getOperator(ctx)) @@ -143,7 +143,7 @@ func (r *mutationResolver) MovePropertyItem(ctx context.Context, input gqlmodel. p, pgl, pi, err := r.usecases.Property.MoveItem(ctx, interfaces.MovePropertyItemParam{ PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(&input.SchemaItemID, &input.ItemID, nil), + Pointer: gqlmodel.FromPointer(&input.SchemaGroupID, &input.ItemID, nil), Index: input.Index, }, getOperator(ctx)) if err != nil { @@ -162,7 +162,7 @@ func (r *mutationResolver) RemovePropertyItem(ctx context.Context, input gqlmode p, err := r.usecases.Property.RemoveItem(ctx, interfaces.RemovePropertyItemParam{ PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(&input.SchemaItemID, &input.ItemID, nil), + Pointer: gqlmodel.FromPointer(&input.SchemaGroupID, &input.ItemID, nil), }, getOperator(ctx)) if err != nil { return nil, err @@ -194,7 +194,7 @@ func (r *mutationResolver) UpdatePropertyItems(ctx context.Context, input gqlmod p, err2 := r.usecases.Property.UpdateItems(ctx, interfaces.UpdatePropertyItemsParam{ PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(&input.SchemaItemID, nil, nil), + Pointer: gqlmodel.FromPointer(&input.SchemaGroupID, nil, nil), Operations: op, }, getOperator(ctx)) if err2 != nil { diff --git a/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go b/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go index de2b0cd7b..bf1f160f1 100644 --- a/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go +++ b/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go @@ -52,11 +52,11 @@ func AddSceneDefaultTile(ctx context.Context, c DBClient) error { log.Infof("migration: AddSceneDefaultTile: hit properties: %d\n", len(properties)) for _, p := range properties { - g := p.GetOrCreateGroupList(scenePropertySchema, property.PointItemBySchema(id.PropertySchemaFieldID("tiles"))) + g := p.GetOrCreateGroupList(scenePropertySchema, property.PointItemBySchema(id.PropertySchemaGroupID("tiles"))) if g == nil || g.Count() > 0 { continue } - f := property.NewGroup().NewID().Schema(p.Schema(), id.PropertySchemaFieldID("tiles")).MustBuild() + f := property.NewGroup().NewID().Schema(p.Schema(), id.PropertySchemaGroupID("tiles")).MustBuild() g.Add(f, -1) } diff --git a/internal/infrastructure/mongo/mongodoc/property.go b/internal/infrastructure/mongo/mongodoc/property.go index 16eb1c3b4..f55f17c54 100644 --- a/internal/infrastructure/mongo/mongodoc/property.go +++ b/internal/infrastructure/mongo/mongodoc/property.go @@ -247,7 +247,7 @@ func toModelPropertyItem(f *PropertyItemDocument) (property.Item, error) { if err != nil { return nil, err } - gid := id.PropertySchemaFieldID(f.SchemaGroup) + gid := id.PropertySchemaGroupID(f.SchemaGroup) if f.Type == typePropertyItemGroup { fields := make([]*property.Field, 0, len(f.Fields)) diff --git a/internal/infrastructure/mongo/mongodoc/property_schema.go b/internal/infrastructure/mongo/mongodoc/property_schema.go index 1a491bddd..7fca03ad5 100644 --- a/internal/infrastructure/mongo/mongodoc/property_schema.go +++ b/internal/infrastructure/mongo/mongodoc/property_schema.go @@ -263,7 +263,7 @@ func toModelPropertySchemaGroup(d *PropertySchemaGroupDocument, sid id.PropertyS } return property.NewSchemaGroup(). - ID(id.PropertySchemaFieldID(d.ID)). + ID(id.PropertySchemaGroupID(d.ID)). Schema(sid). IsList(d.List). Title(d.Title). @@ -294,7 +294,7 @@ func toModelPropertyPointer(p *PropertyPointerDocument) *property.Pointer { return nil } return property.NewPointer( - id.PropertySchemaFieldIDFrom(p.SchemaGroupID), + id.PropertySchemaGroupIDFrom(p.SchemaGroupID), id.PropertyItemIDFromRef(p.ItemID), id.PropertySchemaFieldIDFrom(p.FieldID), ) diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 6f32051f5..ae71a1628 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -109,7 +109,7 @@ func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase. } // add default tile - tiles := id.PropertySchemaFieldID("tiles") + tiles := id.PropertySchemaGroupID("tiles") g := p.GetOrCreateGroupList(schema, property.PointItemBySchema(tiles)) g.Add(property.NewGroup().NewID().Schema(schema.ID(), tiles).MustBuild(), -1) diff --git a/pkg/id/property_schema_field.go b/pkg/id/property_schema_field.go index 64d208ec1..aaa986e98 100644 --- a/pkg/id/property_schema_field.go +++ b/pkg/id/property_schema_field.go @@ -1,9 +1,7 @@ package id -// PropertySchemaFieldID _ type PropertySchemaFieldID string -// PropertySchemaFieldIDFrom _ func PropertySchemaFieldIDFrom(str *string) *PropertySchemaFieldID { if str == nil { return nil @@ -12,13 +10,11 @@ func PropertySchemaFieldIDFrom(str *string) *PropertySchemaFieldID { return &id } -// Ref _ func (id PropertySchemaFieldID) Ref() *PropertySchemaFieldID { id2 := id return &id2 } -// CopyRef _ func (id *PropertySchemaFieldID) CopyRef() *PropertySchemaFieldID { if id == nil { return nil @@ -27,12 +23,10 @@ func (id *PropertySchemaFieldID) CopyRef() *PropertySchemaFieldID { return &id2 } -// String _ func (id PropertySchemaFieldID) String() string { return string(id) } -// StringRef _ func (id *PropertySchemaFieldID) StringRef() *string { if id == nil { return nil diff --git a/pkg/id/property_schema_group.go b/pkg/id/property_schema_group.go new file mode 100644 index 000000000..d0556e05d --- /dev/null +++ b/pkg/id/property_schema_group.go @@ -0,0 +1,36 @@ +package id + +type PropertySchemaGroupID string + +func PropertySchemaGroupIDFrom(str *string) *PropertySchemaGroupID { + if str == nil { + return nil + } + id := PropertySchemaGroupID(*str) + return &id +} + +func (id PropertySchemaGroupID) Ref() *PropertySchemaGroupID { + id2 := id + return &id2 +} + +func (id *PropertySchemaGroupID) CopyRef() *PropertySchemaGroupID { + if id == nil { + return nil + } + id2 := *id + return &id2 +} + +func (id PropertySchemaGroupID) String() string { + return string(id) +} + +func (id *PropertySchemaGroupID) StringRef() *string { + if id == nil { + return nil + } + str := string(*id) + return &str +} diff --git a/pkg/id/property_schema_group_test.go b/pkg/id/property_schema_group_test.go new file mode 100644 index 000000000..394a825d6 --- /dev/null +++ b/pkg/id/property_schema_group_test.go @@ -0,0 +1,63 @@ +package id + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestPropertySchemaGroupIDFrom(t *testing.T) { + t.Parallel() + input1 := "testStringId" + expected1 := PropertySchemaGroupID(input1) + testCases := []struct { + name string + input *string + expected *PropertySchemaGroupID + }{ + { + name: "success:string input", + input: &input1, + expected: &expected1, + }, + { + name: "fail:nil pointer", + input: nil, + expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := PropertySchemaGroupIDFrom(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestPropertySchemaGroupID_Ref(t *testing.T) { + PropertySchemaGroupID := PropertySchemaGroupID("test") + + assert.Equal(t, &PropertySchemaGroupID, PropertySchemaGroupID.Ref()) +} + +func TestPropertySchemaGroupID_CopyRef(t *testing.T) { + PropertySchemaGroupID := PropertySchemaGroupID("test") + + assert.Equal(t, PropertySchemaGroupID, *PropertySchemaGroupID.CopyRef()) + + assert.False(t, PropertySchemaGroupID.Ref() == PropertySchemaGroupID.CopyRef()) +} + +func TestPropertySchemaGroupID_String(t *testing.T) { + PropertySchemaGroupID := PropertySchemaGroupID("test") + + assert.Equal(t, "test", PropertySchemaGroupID.String()) +} + +func TestPropertySchemaGroupID_StringRef(t *testing.T) { + PropertySchemaGroupID := PropertySchemaGroupID("test") + + assert.Equal(t, "test", *PropertySchemaGroupID.StringRef()) +} diff --git a/pkg/layer/decoding/common.go b/pkg/layer/decoding/common.go index c7a6df037..15822ea21 100644 --- a/pkg/layer/decoding/common.go +++ b/pkg/layer/decoding/common.go @@ -28,7 +28,7 @@ var ( "Polygon": id.MustPropertySchemaID("reearth/polygon"), "Polyline": id.MustPropertySchemaID("reearth/polyline"), } - propertyItems = id.PropertySchemaFieldID("default") + propertyItems = id.PropertySchemaGroupID("default") propertyFields = map[string]id.PropertySchemaFieldID{ "Point": id.PropertySchemaFieldID("location"), "Polygon": id.PropertySchemaFieldID("polygon"), diff --git a/pkg/layer/decoding/reearth.go b/pkg/layer/decoding/reearth.go index 3b7fefaed..96e8fcbdf 100644 --- a/pkg/layer/decoding/reearth.go +++ b/pkg/layer/decoding/reearth.go @@ -182,7 +182,7 @@ func (f *ReearthInfoboxField) infoboxField() *layer.InitializerInfoboxField { } } -type ReearthProperty map[id.PropertySchemaFieldID]ReearthPropertyItem +type ReearthProperty map[id.PropertySchemaGroupID]ReearthPropertyItem func (p ReearthProperty) property(schema *id.PropertySchemaID) *property.Initializer { if schema == nil || p == nil { @@ -206,7 +206,7 @@ type ReearthPropertyItem struct { Fields ReearthPropertyGroup `json:"fields"` } -func (p *ReearthPropertyItem) propertyItem(key id.PropertySchemaFieldID) *property.InitializerItem { +func (p *ReearthPropertyItem) propertyItem(key id.PropertySchemaGroupID) *property.InitializerItem { if p == nil { return nil } diff --git a/pkg/layer/decoding/reearth_test.go b/pkg/layer/decoding/reearth_test.go index e34ccb101..8ddf35cc6 100644 --- a/pkg/layer/decoding/reearth_test.go +++ b/pkg/layer/decoding/reearth_test.go @@ -134,7 +134,7 @@ func TestReearthDecoder_Decode(t *testing.T) { Items: []*property.InitializerItem{ { ID: prop.Items()[0].ID().Ref(), - SchemaItem: id.PropertySchemaFieldID("default"), + SchemaItem: id.PropertySchemaGroupID("default"), Fields: []*property.InitializerField{ { Field: id.PropertySchemaFieldID("latlng"), @@ -180,7 +180,7 @@ func TestReearthDecoder_Decode(t *testing.T) { Items: []*property.InitializerItem{ { ID: prop.Items()[0].ID().Ref(), - SchemaItem: id.PropertySchemaFieldID("hoge"), + SchemaItem: id.PropertySchemaGroupID("hoge"), Groups: []*property.InitializerGroup{ { ID: property.ToGroupList(prop.Items()[0]).GroupAt(0).IDRef(), diff --git a/pkg/layer/encoding/czml_test.go b/pkg/layer/encoding/czml_test.go index feb141e22..0167ce5bd 100644 --- a/pkg/layer/encoding/czml_test.go +++ b/pkg/layer/encoding/czml_test.go @@ -39,7 +39,7 @@ func TestEncodeCZMLPoint(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, @@ -59,7 +59,7 @@ func TestEncodeCZMLPoint(t *testing.T) { item2 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl2, Groups: nil, @@ -77,7 +77,7 @@ func TestEncodeCZMLPoint(t *testing.T) { item3 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl3, Groups: nil, @@ -95,7 +95,7 @@ func TestEncodeCZMLPoint(t *testing.T) { item4 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl4, Groups: nil, @@ -202,7 +202,7 @@ func TestEncodeCZMLPolygon(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, @@ -221,7 +221,7 @@ func TestEncodeCZMLPolygon(t *testing.T) { item2 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl2, Groups: nil, @@ -239,7 +239,7 @@ func TestEncodeCZMLPolygon(t *testing.T) { item3 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl3, Groups: nil, @@ -257,7 +257,7 @@ func TestEncodeCZMLPolygon(t *testing.T) { item4 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl4, Groups: nil, @@ -275,7 +275,7 @@ func TestEncodeCZMLPolygon(t *testing.T) { item5 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl5, Groups: nil, @@ -293,7 +293,7 @@ func TestEncodeCZMLPolygon(t *testing.T) { item6 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl6, Groups: nil, @@ -404,7 +404,7 @@ func TestEncodeCZMLPolyline(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, @@ -424,7 +424,7 @@ func TestEncodeCZMLPolyline(t *testing.T) { item2 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl2, Groups: nil, @@ -442,7 +442,7 @@ func TestEncodeCZMLPolyline(t *testing.T) { item3 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl3, Groups: nil, diff --git a/pkg/layer/encoding/geojson_test.go b/pkg/layer/encoding/geojson_test.go index e2c1100f7..e105a85cf 100644 --- a/pkg/layer/encoding/geojson_test.go +++ b/pkg/layer/encoding/geojson_test.go @@ -37,7 +37,7 @@ func TestPointEncodeGeoJSON(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, @@ -57,7 +57,7 @@ func TestPointEncodeGeoJSON(t *testing.T) { item2 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl2, Groups: nil, @@ -75,7 +75,7 @@ func TestPointEncodeGeoJSON(t *testing.T) { item3 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl3, Groups: nil, @@ -172,7 +172,7 @@ func TestPolygonEncodeGeoJSON(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, @@ -191,7 +191,7 @@ func TestPolygonEncodeGeoJSON(t *testing.T) { item2 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl2, Groups: nil, @@ -209,7 +209,7 @@ func TestPolygonEncodeGeoJSON(t *testing.T) { item3 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl3, Groups: nil, @@ -227,7 +227,7 @@ func TestPolygonEncodeGeoJSON(t *testing.T) { item4 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl4, Groups: nil, @@ -326,7 +326,7 @@ func TestPolylineEncodeGeoJSON(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, @@ -346,7 +346,7 @@ func TestPolylineEncodeGeoJSON(t *testing.T) { item2 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl2, Groups: nil, @@ -364,7 +364,7 @@ func TestPolylineEncodeGeoJSON(t *testing.T) { item3 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl3, Groups: nil, diff --git a/pkg/layer/encoding/kml_test.go b/pkg/layer/encoding/kml_test.go index 5cc46b0c6..3ab474578 100644 --- a/pkg/layer/encoding/kml_test.go +++ b/pkg/layer/encoding/kml_test.go @@ -37,7 +37,7 @@ func TestEncodeKMLMarker(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, @@ -56,7 +56,7 @@ func TestEncodeKMLMarker(t *testing.T) { item2 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl2, Groups: nil, @@ -74,7 +74,7 @@ func TestEncodeKMLMarker(t *testing.T) { item3 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl3, Groups: nil, @@ -92,7 +92,7 @@ func TestEncodeKMLMarker(t *testing.T) { item4 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl4, Groups: nil, @@ -199,7 +199,7 @@ func TestEncodeKMLPolygon(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, @@ -218,7 +218,7 @@ func TestEncodeKMLPolygon(t *testing.T) { item2 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl2, Groups: nil, @@ -236,7 +236,7 @@ func TestEncodeKMLPolygon(t *testing.T) { item3 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl3, Groups: nil, @@ -254,7 +254,7 @@ func TestEncodeKMLPolygon(t *testing.T) { item4 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl4, Groups: nil, @@ -272,7 +272,7 @@ func TestEncodeKMLPolygon(t *testing.T) { item5 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl5, Groups: nil, @@ -290,7 +290,7 @@ func TestEncodeKMLPolygon(t *testing.T) { item6 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl6, Groups: nil, @@ -398,7 +398,7 @@ func TestEncodeKMLPolyline(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, @@ -418,7 +418,7 @@ func TestEncodeKMLPolyline(t *testing.T) { item2 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl2, Groups: nil, @@ -436,7 +436,7 @@ func TestEncodeKMLPolyline(t *testing.T) { item3 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl3, Groups: nil, diff --git a/pkg/layer/encoding/shp_test.go b/pkg/layer/encoding/shp_test.go index 87fb1c610..981177d80 100644 --- a/pkg/layer/encoding/shp_test.go +++ b/pkg/layer/encoding/shp_test.go @@ -37,7 +37,7 @@ func TestEncodeSHPMarker(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, @@ -137,7 +137,7 @@ func TestEncodeSHPPolygon(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, @@ -232,7 +232,7 @@ func TestEncodeSHPPolyline(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go index c7303829a..a933204d3 100644 --- a/pkg/plugin/manifest/convert.go +++ b/pkg/plugin/manifest/convert.go @@ -233,7 +233,7 @@ func (p *PropertyPointer) pointer() *property.Pointer { return nil } return property.NewPointer( - id.PropertySchemaFieldIDFrom(&p.SchemaGroupID), + id.PropertySchemaGroupIDFrom(&p.SchemaGroupID), nil, id.PropertySchemaFieldIDFrom(&p.FieldID), ) @@ -257,7 +257,7 @@ func (i PropertySchemaGroup) schemaGroup(sid id.PropertySchemaID) (*property.Sch } return property.NewSchemaGroup(). - ID(id.PropertySchemaFieldID(i.ID)). + ID(id.PropertySchemaGroupID(i.ID)). Schema(sid). IsList(i.List). Fields(fields). diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index f963938fe..f6060c9c6 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -340,7 +340,7 @@ func TestPointer(t *testing.T) { FieldID: "xxx", SchemaGroupID: "aaa", }, - expected: property.NewPointer(id.PropertySchemaFieldIDFrom(&sg), nil, id.PropertySchemaFieldIDFrom(&f)), + expected: property.NewPointer(id.PropertySchemaGroupIDFrom(&sg), nil, id.PropertySchemaFieldIDFrom(&f)), }, } for _, tc := range testCases { @@ -412,8 +412,8 @@ func TestLinkable(t *testing.T) { }, }, expected: property.LinkableFields{ - LatLng: property.NewPointer(id.PropertySchemaFieldIDFrom(&d), nil, id.PropertySchemaFieldIDFrom(&l)), - URL: property.NewPointer(id.PropertySchemaFieldIDFrom(&d), nil, id.PropertySchemaFieldIDFrom(&u)), + LatLng: property.NewPointer(id.PropertySchemaGroupIDFrom(&d), nil, id.PropertySchemaFieldIDFrom(&l)), + URL: property.NewPointer(id.PropertySchemaGroupIDFrom(&d), nil, id.PropertySchemaFieldIDFrom(&u)), }, }, } diff --git a/pkg/plugin/manifest/parser_test.go b/pkg/plugin/manifest/parser_test.go index c9e1dee2e..e73971282 100644 --- a/pkg/plugin/manifest/parser_test.go +++ b/pkg/plugin/manifest/parser_test.go @@ -32,7 +32,7 @@ var normalExpected = &Manifest{ }).MustBuild(), ExtensionSchema: []*property.Schema{ property.NewSchema().ID(id.MustPropertySchemaID("aaa~1.1.1/hoge")).Groups([]*property.SchemaGroup{ - property.NewSchemaGroup().ID(id.PropertySchemaFieldID("default")). + property.NewSchemaGroup().ID(id.PropertySchemaGroupID("default")). Schema(id.MustPropertySchemaID("aaa~1.1.1/hoge")). RepresentativeField(id.PropertySchemaFieldID("a").Ref()). Fields([]*property.SchemaField{ diff --git a/pkg/plugin/manifest/parser_translation.go b/pkg/plugin/manifest/parser_translation.go index 57a644a35..54dd00fad 100644 --- a/pkg/plugin/manifest/parser_translation.go +++ b/pkg/plugin/manifest/parser_translation.go @@ -92,7 +92,7 @@ func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Mani } for key, tsg := range te.PropertySchema { - psg := ps.Group(id.PropertySchemaFieldID(key)) + psg := ps.Group(id.PropertySchemaGroupID(key)) if psg == nil { continue } diff --git a/pkg/property/builder_test.go b/pkg/property/builder_test.go index 241ccb0b3..307331090 100644 --- a/pkg/property/builder_test.go +++ b/pkg/property/builder_test.go @@ -39,7 +39,7 @@ func TestBuilder_Items(t *testing.T) { iid := id.NewPropertyItemID() propertySchemaID := id.MustPropertySchemaID("xxx~1.1.1/aa") propertySchemaField1ID := id.PropertySchemaFieldID("a") - propertySchemaGroup1ID := id.PropertySchemaFieldID("A") + propertySchemaGroup1ID := id.PropertySchemaGroupID("A") testCases := []struct { Name string @@ -101,7 +101,7 @@ func TestBuilder_Build(t *testing.T) { scid := id.MustPropertySchemaID("xxx~1.1.1/aa") iid := id.NewPropertyItemID() propertySchemaField1ID := id.PropertySchemaFieldID("a") - propertySchemaGroup1ID := id.PropertySchemaFieldID("A") + propertySchemaGroup1ID := id.PropertySchemaGroupID("A") testCases := []struct { Name string diff --git a/pkg/property/group.go b/pkg/property/group.go index c5f252148..bbacd6ceb 100644 --- a/pkg/property/group.go +++ b/pkg/property/group.go @@ -18,7 +18,6 @@ type Group struct { // Group implements Item interface var _ Item = &Group{} -// ID returns id func (g *Group) ID() id.PropertyItemID { if g == nil { return id.PropertyItemID{} @@ -26,7 +25,6 @@ func (g *Group) ID() id.PropertyItemID { return g.itemBase.ID } -// IDRef returns a reference of id func (g *Group) IDRef() *id.PropertyItemID { if g == nil { return nil @@ -34,23 +32,20 @@ func (g *Group) IDRef() *id.PropertyItemID { return g.itemBase.ID.Ref() } -// SchemaGroup returns id of schema group -func (g *Group) SchemaGroup() id.PropertySchemaFieldID { +func (g *Group) SchemaGroup() id.PropertySchemaGroupID { if g == nil { - return id.PropertySchemaFieldID("") + return id.PropertySchemaGroupID("") } return g.itemBase.SchemaGroup } -// SchemaGroupRef _ -func (g *Group) SchemaGroupRef() *id.PropertySchemaFieldID { +func (g *Group) SchemaGroupRef() *id.PropertySchemaGroupID { if g == nil { return nil } return g.itemBase.SchemaGroup.Ref() } -// Schema _ func (g *Group) Schema() id.PropertySchemaID { if g == nil { return id.PropertySchemaID{} @@ -66,7 +61,6 @@ func (g *Group) SchemaRef() *id.PropertySchemaID { return g.itemBase.Schema.Ref() } -// HasLinkedField _ func (g *Group) HasLinkedField() bool { if g == nil { return false @@ -79,7 +73,6 @@ func (g *Group) HasLinkedField() bool { return false } -// CollectDatasets _ func (g *Group) CollectDatasets() []id.DatasetID { if g == nil { return nil @@ -93,7 +86,6 @@ func (g *Group) CollectDatasets() []id.DatasetID { return res } -// FieldsByLinkedDataset _ func (g *Group) FieldsByLinkedDataset(s id.DatasetSchemaID, i id.DatasetID) []*Field { if g == nil { return nil @@ -107,7 +99,6 @@ func (g *Group) FieldsByLinkedDataset(s id.DatasetSchemaID, i id.DatasetID) []*F return res } -// IsDatasetLinked _ func (g *Group) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { if g == nil { return false @@ -120,7 +111,6 @@ func (g *Group) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { return false } -// IsEmpty _ func (g *Group) IsEmpty() bool { if g != nil { for _, f := range g.fields { @@ -132,7 +122,6 @@ func (g *Group) IsEmpty() bool { return true } -// Prune _ func (g *Group) Prune() { if g == nil { return @@ -161,7 +150,6 @@ func (g *Group) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset g.Prune() } -// GetOrCreateField _ func (g *Group) GetOrCreateField(ps *Schema, fid id.PropertySchemaFieldID) (*Field, bool) { if g == nil || ps == nil || !g.Schema().Equal(ps.ID()) { return nil, false @@ -192,7 +180,6 @@ func (g *Group) GetOrCreateField(ps *Schema, fid id.PropertySchemaFieldID) (*Fie return field, true } -// RemoveField _ func (g *Group) RemoveField(fid id.PropertySchemaFieldID) { if g == nil { return @@ -205,7 +192,6 @@ func (g *Group) RemoveField(fid id.PropertySchemaFieldID) { } } -// FieldIDs _ func (g *Group) FieldIDs() []id.PropertySchemaFieldID { if g == nil { return nil @@ -238,7 +224,6 @@ func (g *Group) Field(fid id.PropertySchemaFieldID) *Field { return nil } -// MigrateDataset _ func (g *Group) MigrateDataset(q DatasetMigrationParam) { if g == nil { return diff --git a/pkg/property/group_builder.go b/pkg/property/group_builder.go index f7c4a63e9..d80d12aa7 100644 --- a/pkg/property/group_builder.go +++ b/pkg/property/group_builder.go @@ -2,19 +2,16 @@ package property import "github.com/reearth/reearth-backend/pkg/id" -// GroupBuilder _ type GroupBuilder struct { p *Group } -// NewGroup _ func NewGroup() *GroupBuilder { return &GroupBuilder{ p: &Group{}, } } -// InitGroupFrom _ func InitGroupFrom(g *SchemaGroup) *Group { if g == nil { return nil @@ -23,7 +20,6 @@ func InitGroupFrom(g *SchemaGroup) *Group { return g2 } -// Build _ func (b *GroupBuilder) Build() (*Group, error) { if id.ID(b.p.itemBase.ID).IsNil() { return nil, id.ErrInvalidID @@ -31,7 +27,6 @@ func (b *GroupBuilder) Build() (*Group, error) { return b.p, nil } -// MustBuild _ func (b *GroupBuilder) MustBuild() *Group { p, err := b.Build() if err != nil { @@ -45,26 +40,22 @@ func (b *GroupBuilder) base(base itemBase) *GroupBuilder { return b } -// ID _ func (b *GroupBuilder) ID(id id.PropertyItemID) *GroupBuilder { b.p.itemBase.ID = id return b } -// NewID _ func (b *GroupBuilder) NewID() *GroupBuilder { b.p.itemBase.ID = id.NewPropertyItemID() return b } -// Schema _ -func (b *GroupBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaFieldID) *GroupBuilder { +func (b *GroupBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaGroupID) *GroupBuilder { b.p.itemBase.Schema = s b.p.itemBase.SchemaGroup = g return b } -// Fields _ func (b *GroupBuilder) Fields(fields []*Field) *GroupBuilder { var newFields []*Field ids := map[id.PropertySchemaFieldID]struct{}{} diff --git a/pkg/property/group_builder_test.go b/pkg/property/group_builder_test.go index 298b23981..8eb75aaa2 100644 --- a/pkg/property/group_builder_test.go +++ b/pkg/property/group_builder_test.go @@ -18,12 +18,12 @@ func TestGroupBuilder_Build(t *testing.T) { Name string Id id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID Fields []*Field Expected struct { Id id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID Fields []*Field } Err error @@ -41,7 +41,7 @@ func TestGroupBuilder_Build(t *testing.T) { Expected: struct { Id id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID Fields []*Field }{ Id: iid, @@ -81,12 +81,12 @@ func TestGroupBuilder_MustBuild(t *testing.T) { Fail bool Id id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID Fields []*Field Expected struct { Id id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID Fields []*Field } }{ @@ -103,7 +103,7 @@ func TestGroupBuilder_MustBuild(t *testing.T) { Expected: struct { Id id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID Fields []*Field }{ Id: iid, diff --git a/pkg/property/group_list.go b/pkg/property/group_list.go index 631765aeb..e6ddb1cf0 100644 --- a/pkg/property/group_list.go +++ b/pkg/property/group_list.go @@ -35,15 +35,15 @@ func (g *GroupList) IDRef() *id.PropertyItemID { } // SchemaGroup returns id of schema group -func (g *GroupList) SchemaGroup() id.PropertySchemaFieldID { +func (g *GroupList) SchemaGroup() id.PropertySchemaGroupID { if g == nil { - return id.PropertySchemaFieldID("") + return id.PropertySchemaGroupID("") } return g.itemBase.SchemaGroup } // SchemaGroupRef _ -func (g *GroupList) SchemaGroupRef() *id.PropertySchemaFieldID { +func (g *GroupList) SchemaGroupRef() *id.PropertySchemaGroupID { if g == nil { return nil } diff --git a/pkg/property/group_list_builder.go b/pkg/property/group_list_builder.go index 0f6d55b2e..bebb6a9b1 100644 --- a/pkg/property/group_list_builder.go +++ b/pkg/property/group_list_builder.go @@ -2,19 +2,16 @@ package property import "github.com/reearth/reearth-backend/pkg/id" -// GroupListBuilder _ type GroupListBuilder struct { p *GroupList } -// NewGroupList _ func NewGroupList() *GroupListBuilder { return &GroupListBuilder{ p: &GroupList{}, } } -// InitGroupListFrom _ func InitGroupListFrom(g *SchemaGroup) *GroupList { if g == nil || !g.IsList() { return nil @@ -23,7 +20,6 @@ func InitGroupListFrom(g *SchemaGroup) *GroupList { return g2 } -// Build _ func (b *GroupListBuilder) Build() (*GroupList, error) { if id.ID(b.p.itemBase.ID).IsNil() { return nil, id.ErrInvalidID @@ -31,7 +27,6 @@ func (b *GroupListBuilder) Build() (*GroupList, error) { return b.p, nil } -// MustBuild _ func (b *GroupListBuilder) MustBuild() *GroupList { p, err := b.Build() if err != nil { @@ -45,26 +40,22 @@ func (b *GroupListBuilder) base(base itemBase) *GroupListBuilder { return b } -// ID _ func (b *GroupListBuilder) ID(id id.PropertyItemID) *GroupListBuilder { b.p.itemBase.ID = id return b } -// NewID _ func (b *GroupListBuilder) NewID() *GroupListBuilder { b.p.itemBase.ID = id.NewPropertyItemID() return b } -// Schema _ -func (b *GroupListBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaFieldID) *GroupListBuilder { +func (b *GroupListBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaGroupID) *GroupListBuilder { b.p.itemBase.Schema = s b.p.itemBase.SchemaGroup = g return b } -// Groups _ func (b *GroupListBuilder) Groups(fields []*Group) *GroupListBuilder { newGroups := []*Group{} ids := map[id.PropertyItemID]struct{}{} diff --git a/pkg/property/group_list_builder_test.go b/pkg/property/group_list_builder_test.go index e5a2f2175..8ef9d7d6a 100644 --- a/pkg/property/group_list_builder_test.go +++ b/pkg/property/group_list_builder_test.go @@ -16,12 +16,12 @@ func TestGroupListBuilder_Build(t *testing.T) { Name string Id id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID Groups []*Group Expected struct { Id id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID Groups []*Group } Err error @@ -35,7 +35,7 @@ func TestGroupListBuilder_Build(t *testing.T) { Expected: struct { Id id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID Groups []*Group }{ Id: pid, @@ -80,12 +80,12 @@ func TestGroupListBuilder_MustBuild(t *testing.T) { Fails bool Id id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID Groups []*Group Expected struct { Id id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID Groups []*Group } }{ @@ -98,7 +98,7 @@ func TestGroupListBuilder_MustBuild(t *testing.T) { Expected: struct { Id id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID Groups []*Group }{ Id: pid, @@ -140,7 +140,7 @@ func TestInitGroupListFrom(t *testing.T) { testCases := []struct { Name string SchemaGroup *SchemaGroup - ExpectedSG id.PropertySchemaFieldID + ExpectedSG id.PropertySchemaGroupID ExpectedSchema id.PropertySchemaID }{ { diff --git a/pkg/property/group_list_test.go b/pkg/property/group_list_test.go index c09a88f9c..a7ee4c37e 100644 --- a/pkg/property/group_list_test.go +++ b/pkg/property/group_list_test.go @@ -18,7 +18,7 @@ func TestGroupList_SchemaRef(t *testing.T) { testCases := []struct { Name string GL *GroupList - ExpectedSG *id.PropertySchemaFieldID + ExpectedSG *id.PropertySchemaGroupID ExpectedSchema *id.PropertySchemaID }{ { @@ -26,8 +26,8 @@ func TestGroupList_SchemaRef(t *testing.T) { }, { Name: "success", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), id.PropertySchemaFieldID("xx")).MustBuild(), - ExpectedSG: id.PropertySchemaFieldID("xx").Ref(), + GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), id.PropertySchemaGroupID("xx")).MustBuild(), + ExpectedSG: id.PropertySchemaGroupID("xx").Ref(), ExpectedSchema: id.MustPropertySchemaID("xx~1.0.0/aa").Ref(), }, } @@ -653,7 +653,7 @@ func TestGroupList_Remove(t *testing.T) { func TestGroupList_GetOrCreateField(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - g := NewGroup().ID(id.NewPropertyItemID()).Schema(sg.Schema(), sf.ID()).MustBuild() + g := NewGroup().ID(id.NewPropertyItemID()).Schema(sg.Schema(), sg.ID()).MustBuild() testCases := []struct { Name string GL *GroupList @@ -712,7 +712,7 @@ func TestGroupList_CreateAndAddListItem(t *testing.T) { getIntRef := func(i int) *int { return &i } sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - g := NewGroup().ID(id.NewPropertyItemID()).Schema(sg.Schema(), sf.ID()).MustBuild() + g := NewGroup().ID(id.NewPropertyItemID()).Schema(sg.Schema(), sg.ID()).MustBuild() testCases := []struct { Name string GL *GroupList diff --git a/pkg/property/group_test.go b/pkg/property/group_test.go index df89c2096..053507d10 100644 --- a/pkg/property/group_test.go +++ b/pkg/property/group_test.go @@ -19,8 +19,8 @@ func TestGroup_IDRef(t *testing.T) { func TestGroup_SchemaGroup(t *testing.T) { var g *Group assert.Nil(t, g.SchemaGroupRef()) - assert.Equal(t, id.PropertySchemaFieldID(""), g.SchemaGroup()) - pfid := id.PropertySchemaFieldID("aa") + assert.Equal(t, id.PropertySchemaGroupID(""), g.SchemaGroup()) + pfid := id.PropertySchemaGroupID("aa") g = NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), pfid).MustBuild() assert.Equal(t, pfid, g.SchemaGroup()) assert.Equal(t, pfid.Ref(), g.SchemaGroupRef()) diff --git a/pkg/property/initializer.go b/pkg/property/initializer.go index a47144dc2..245cc958e 100644 --- a/pkg/property/initializer.go +++ b/pkg/property/initializer.go @@ -92,7 +92,7 @@ func (p *Initializer) MustBeProperty(scene id.SceneID) *Property { type InitializerItem struct { ID *id.PropertyItemID `json:"id"` - SchemaItem id.PropertySchemaFieldID `json:"schemaItem"` + SchemaItem id.PropertySchemaGroupID `json:"schemaItem"` Groups []*InitializerGroup `json:"groups"` Fields []*InitializerField `json:"fields"` } @@ -206,7 +206,7 @@ func (p *InitializerGroup) Clone() *InitializerGroup { } } -func (p *InitializerGroup) PropertyGroup(parent id.PropertySchemaID, parentItem id.PropertySchemaFieldID) (*Group, error) { +func (p *InitializerGroup) PropertyGroup(parent id.PropertySchemaID, parentItem id.PropertySchemaGroupID) (*Group, error) { if p == nil { return nil, nil } diff --git a/pkg/property/initializer_test.go b/pkg/property/initializer_test.go index 1b0ab59aa..a965137c7 100644 --- a/pkg/property/initializer_test.go +++ b/pkg/property/initializer_test.go @@ -13,7 +13,7 @@ func TestInitializer_Clone(t *testing.T) { Schema: id.MustPropertySchemaID("reearth/marker"), Items: []*InitializerItem{{ ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaFieldID("hoge"), + SchemaItem: id.PropertySchemaGroupID("hoge"), }}, } @@ -32,7 +32,7 @@ func TestInitializer_Property(t *testing.T) { Schema: id.MustPropertySchemaID("reearth/marker"), Items: []*InitializerItem{{ ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaFieldID("hoge"), + SchemaItem: id.PropertySchemaGroupID("hoge"), }}, } @@ -69,7 +69,7 @@ func TestInitializer_PropertyIncludingEmpty(t *testing.T) { Schema: psid2, Items: []*InitializerItem{{ ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaFieldID("hoge"), + SchemaItem: id.PropertySchemaGroupID("hoge"), }}, } @@ -88,7 +88,7 @@ func TestInitializer_PropertyIncludingEmpty(t *testing.T) { func TestInitializerItem_Clone(t *testing.T) { item := &InitializerItem{ ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaFieldID("hoge"), + SchemaItem: id.PropertySchemaGroupID("hoge"), Groups: []*InitializerGroup{{ ID: id.NewPropertyItemID().Ref(), Fields: []*InitializerField{{ @@ -118,7 +118,7 @@ func TestInitializerItem_PropertyItem(t *testing.T) { parent := id.MustPropertySchemaID("reearth/marker") item := &InitializerItem{ ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaFieldID("hoge"), + SchemaItem: id.PropertySchemaGroupID("hoge"), } expected := NewItem().ID(*item.ID).Schema(parent, item.SchemaItem).Group().MustBuild() @@ -137,7 +137,7 @@ func TestInitializerItem_PropertyGroup(t *testing.T) { parent := id.MustPropertySchemaID("reearth/marker") item := &InitializerItem{ ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaFieldID("hoge"), + SchemaItem: id.PropertySchemaGroupID("hoge"), Fields: []*InitializerField{{ Field: id.PropertySchemaFieldID("name"), Type: ValueTypeString, @@ -160,7 +160,7 @@ func TestInitializerItem_PropertyGroupList(t *testing.T) { parent := id.MustPropertySchemaID("reearth/marker") item := &InitializerItem{ ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaFieldID("hoge"), + SchemaItem: id.PropertySchemaGroupID("hoge"), Groups: []*InitializerGroup{{ ID: id.NewPropertyItemID().Ref(), }}, @@ -202,7 +202,7 @@ func TestInitializerGroup_Clone(t *testing.T) { func TestInitializerGroup_PropertyGroup(t *testing.T) { parent := id.MustPropertySchemaID("reearth/marker") - parentItem := id.PropertySchemaFieldID("hoge") + parentItem := id.PropertySchemaGroupID("hoge") item := &InitializerGroup{ ID: id.NewPropertyItemID().Ref(), Fields: []*InitializerField{{ diff --git a/pkg/property/item.go b/pkg/property/item.go index 0c24addba..65807e0b0 100644 --- a/pkg/property/item.go +++ b/pkg/property/item.go @@ -7,12 +7,11 @@ import ( "github.com/reearth/reearth-backend/pkg/id" ) -// Item _ type Item interface { ID() id.PropertyItemID IDRef() *id.PropertyItemID - SchemaGroup() id.PropertySchemaFieldID - SchemaGroupRef() *id.PropertySchemaFieldID + SchemaGroup() id.PropertySchemaGroupID + SchemaGroupRef() *id.PropertySchemaGroupID Schema() id.PropertySchemaID SchemaRef() *id.PropertySchemaID HasLinkedField() bool @@ -29,22 +28,19 @@ type Item interface { type itemBase struct { ID id.PropertyItemID Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID } -// ToGroup _ func ToGroup(i Item) *Group { g, _ := i.(*Group) return g } -// ToGroupList _ func ToGroupList(i Item) *GroupList { g, _ := i.(*GroupList) return g } -// InitItemFrom _ func InitItemFrom(psg *SchemaGroup) Item { if psg == nil { return nil diff --git a/pkg/property/item_builder.go b/pkg/property/item_builder.go index 09ead6c9c..67c8d21e7 100644 --- a/pkg/property/item_builder.go +++ b/pkg/property/item_builder.go @@ -28,7 +28,7 @@ func (b *ItemBuilder) NewID() *ItemBuilder { return b } -func (b *ItemBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaFieldID) *ItemBuilder { +func (b *ItemBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaGroupID) *ItemBuilder { b.base.Schema = s b.base.SchemaGroup = g return b diff --git a/pkg/property/item_test.go b/pkg/property/item_test.go index c6d22c3e5..77b197f13 100644 --- a/pkg/property/item_test.go +++ b/pkg/property/item_test.go @@ -13,7 +13,7 @@ func TestInitItemFrom(t *testing.T) { sgl := NewSchemaGroup().ID("aa").IsList(true).Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() iid := id.NewPropertyItemID() propertySchemaID := id.MustPropertySchemaID("xx~1.0.0/aa") - propertySchemaField1ID := id.PropertySchemaFieldID("aa") + propertySchemaField1ID := id.PropertySchemaGroupID("aa") testCases := []struct { Name string SG *SchemaGroup @@ -53,7 +53,7 @@ func TestToGroup(t *testing.T) { iid := id.NewPropertyItemID() propertySchemaID := id.MustPropertySchemaID("xxx~1.1.1/aa") propertySchemaField1ID := id.PropertySchemaFieldID("a") - propertySchemaGroup1ID := id.PropertySchemaFieldID("A") + propertySchemaGroup1ID := id.PropertySchemaGroupID("A") il := []Item{ NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID). Fields([]*Field{ @@ -74,7 +74,7 @@ func TestToGroup(t *testing.T) { func TestToGroupList(t *testing.T) { iid := id.NewPropertyItemID() propertySchemaID := id.MustPropertySchemaID("xxx~1.1.1/aa") - propertySchemaGroup1ID := id.PropertySchemaFieldID("A") + propertySchemaGroup1ID := id.PropertySchemaGroupID("A") il := []Item{ NewGroupList().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID).MustBuild(), } diff --git a/pkg/property/merged.go b/pkg/property/merged.go index cff3660ac..9b049c5b2 100644 --- a/pkg/property/merged.go +++ b/pkg/property/merged.go @@ -20,7 +20,7 @@ type Merged struct { type MergedGroup struct { Original *id.PropertyItemID Parent *id.PropertyItemID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID LinkedDataset *id.DatasetID Groups []*MergedGroup Fields []*MergedField @@ -223,7 +223,7 @@ func mergeItem(o, p Item, linked *id.DatasetID) *MergedGroup { } var oid, pid *id.PropertyItemID - var sg id.PropertySchemaFieldID + var sg id.PropertySchemaGroupID if o != nil { oid = o.IDRef() sg = o.SchemaGroup() diff --git a/pkg/property/merged_test.go b/pkg/property/merged_test.go index a7fc5e974..4928117f1 100644 --- a/pkg/property/merged_test.go +++ b/pkg/property/merged_test.go @@ -17,10 +17,10 @@ func TestMerge(t *testing.T) { ppid := id.NewPropertyID() psid := id.MustPropertySchemaID("hoge~0.1.0/fff") psid2 := id.MustPropertySchemaID("hoge~0.1.0/aaa") - psgid1 := id.PropertySchemaFieldID("group1") - psgid2 := id.PropertySchemaFieldID("group2") - psgid3 := id.PropertySchemaFieldID("group3") - psgid4 := id.PropertySchemaFieldID("group4") + psgid1 := id.PropertySchemaGroupID("group1") + psgid2 := id.PropertySchemaGroupID("group2") + psgid3 := id.PropertySchemaGroupID("group3") + psgid4 := id.PropertySchemaGroupID("group4") i1id := id.NewPropertyItemID() i2id := id.NewPropertyItemID() i3id := id.NewPropertyItemID() diff --git a/pkg/property/migrator.go b/pkg/property/migrator.go new file mode 100644 index 000000000..b060d47ca --- /dev/null +++ b/pkg/property/migrator.go @@ -0,0 +1,15 @@ +package property + +type Migrator struct { + NewSchema *Schema + Plans []MigrationPlan +} + +type MigrationPlan struct { + From *Pointer + To *Pointer +} + +// func (m Migrator) Migrate(from *Property) *Property { + +// } diff --git a/pkg/property/pointer.go b/pkg/property/pointer.go index c7933a6e6..658f0a82e 100644 --- a/pkg/property/pointer.go +++ b/pkg/property/pointer.go @@ -2,72 +2,64 @@ package property import "github.com/reearth/reearth-backend/pkg/id" -// Pointer _ +// Pointer is a pointer to a field and an item in properties and schemas type Pointer struct { - schemaGroup *id.PropertySchemaFieldID - item *id.PropertyItemID - field *id.PropertySchemaFieldID + schemaItem *id.PropertySchemaGroupID + item *id.PropertyItemID + field *id.PropertySchemaFieldID } -// NewPointer _ -func NewPointer(sg *id.PropertySchemaFieldID, i *id.PropertyItemID, f *id.PropertySchemaFieldID) *Pointer { +// NewPointer creates a new Pointer. +func NewPointer(sg *id.PropertySchemaGroupID, i *id.PropertyItemID, f *id.PropertySchemaFieldID) *Pointer { if sg == nil && i == nil && f == nil { return nil } return &Pointer{ - schemaGroup: sg.CopyRef(), - item: i.CopyRef(), - field: f.CopyRef(), + schemaItem: sg.CopyRef(), + item: i.CopyRef(), + field: f.CopyRef(), } } -// PointField _ -func PointField(sg *id.PropertySchemaFieldID, i *id.PropertyItemID, f id.PropertySchemaFieldID) *Pointer { +// PointField creates a new Pointer pointing the field in properties. +func PointField(sg *id.PropertySchemaGroupID, i *id.PropertyItemID, f id.PropertySchemaFieldID) *Pointer { return &Pointer{ - schemaGroup: sg.CopyRef(), - item: i.CopyRef(), - field: &f, + schemaItem: sg.CopyRef(), + item: i.CopyRef(), + field: &f, } } -// PointFieldOnly _ +// PointField creates a new Pointer pointing the field in property schemas. func PointFieldOnly(fid id.PropertySchemaFieldID) *Pointer { return &Pointer{ field: &fid, } } -// PointItemBySchema _ -func PointItemBySchema(sg id.PropertySchemaFieldID) *Pointer { +// PointItemBySchema creates a new Pointer pointing the schema item in property schemas. +func PointItemBySchema(sg id.PropertySchemaGroupID) *Pointer { return &Pointer{ - schemaGroup: &sg, + schemaItem: &sg, } } -// PointItem _ +// PointItem creates a new Pointer pointing to the item in properties. func PointItem(i id.PropertyItemID) *Pointer { return &Pointer{ item: &i, } } -// PointFieldBySchemaGroup _ -func PointFieldBySchemaGroup(sg id.PropertySchemaFieldID, f id.PropertySchemaFieldID) *Pointer { +// PointFieldBySchemaGroup creates a new Pointer pointing to the field of the schema field in properties. +func PointFieldBySchemaGroup(sg id.PropertySchemaGroupID, f id.PropertySchemaFieldID) *Pointer { return &Pointer{ - schemaGroup: &sg, - field: &f, + schemaItem: &sg, + field: &f, } } -// PointFieldBySchemaGroupAndItem _ -func PointFieldBySchemaGroupAndItem(sg id.PropertySchemaFieldID, i id.PropertyItemID) *Pointer { - return &Pointer{ - schemaGroup: &sg, - item: &i, - } -} - -// PointFieldByItem _ +// PointFieldByItem creates a new Pointer pointing to the field of the item in properties. func PointFieldByItem(i id.PropertyItemID, f id.PropertySchemaFieldID) *Pointer { return &Pointer{ item: &i, @@ -75,49 +67,45 @@ func PointFieldByItem(i id.PropertyItemID, f id.PropertySchemaFieldID) *Pointer } } -// Clone _ func (p *Pointer) Clone() *Pointer { if p == nil { return nil } return &Pointer{ - field: p.field.CopyRef(), - item: p.item.CopyRef(), - schemaGroup: p.schemaGroup.CopyRef(), + field: p.field.CopyRef(), + item: p.item.CopyRef(), + schemaItem: p.schemaItem.CopyRef(), } } -// ItemBySchemaGroupAndItem _ -func (p *Pointer) ItemBySchemaGroupAndItem() (i id.PropertySchemaFieldID, i2 id.PropertyItemID, ok bool) { - if p == nil || p.schemaGroup == nil || p.item == nil { +func (p *Pointer) ItemBySchemaGroupAndItem() (i id.PropertySchemaGroupID, i2 id.PropertyItemID, ok bool) { + if p == nil || p.schemaItem == nil || p.item == nil { ok = false return } - i = *p.schemaGroup + i = *p.schemaItem i2 = *p.item ok = true return } -// ItemBySchemaGroup _ -func (p *Pointer) ItemBySchemaGroup() (i id.PropertySchemaFieldID, ok bool) { - if p == nil || p.schemaGroup == nil { +func (p *Pointer) ItemBySchemaGroup() (i id.PropertySchemaGroupID, ok bool) { + if p == nil || p.schemaItem == nil { ok = false return } - i = *p.schemaGroup + i = *p.schemaItem ok = true return } -// SchemaGroupAndItem _ -func (p *Pointer) SchemaGroupAndItem() (i id.PropertySchemaFieldID, i2 id.PropertyItemID, ok bool) { +func (p *Pointer) SchemaGroupAndItem() (i id.PropertySchemaGroupID, i2 id.PropertyItemID, ok bool) { ok = false if p == nil { return } - if p.schemaGroup != nil { - i = *p.schemaGroup + if p.schemaItem != nil { + i = *p.schemaItem ok = true } if p.item != nil { @@ -127,7 +115,6 @@ func (p *Pointer) SchemaGroupAndItem() (i id.PropertySchemaFieldID, i2 id.Proper return } -// Item _ func (p *Pointer) Item() (i id.PropertyItemID, ok bool) { if p == nil || p.item == nil { ok = false @@ -146,9 +133,8 @@ func (p *Pointer) ItemRef() *id.PropertyItemID { return &f } -// FieldByItem _ func (p *Pointer) FieldByItem() (i id.PropertyItemID, f id.PropertySchemaFieldID, ok bool) { - if p == nil || p.item == nil || p.schemaGroup != nil || p.field == nil { + if p == nil || p.item == nil || p.schemaItem != nil || p.field == nil { ok = false return } @@ -158,19 +144,17 @@ func (p *Pointer) FieldByItem() (i id.PropertyItemID, f id.PropertySchemaFieldID return } -// FieldBySchemaGroup _ -func (p *Pointer) FieldBySchemaGroup() (sg id.PropertySchemaFieldID, f id.PropertySchemaFieldID, ok bool) { - if p == nil || p.schemaGroup == nil || p.item != nil || p.field == nil { +func (p *Pointer) FieldBySchemaGroup() (sg id.PropertySchemaGroupID, f id.PropertySchemaFieldID, ok bool) { + if p == nil || p.schemaItem == nil || p.item != nil || p.field == nil { ok = false return } - sg = *p.schemaGroup + sg = *p.schemaItem f = *p.field ok = true return } -// Field _ func (p *Pointer) Field() (f id.PropertySchemaFieldID, ok bool) { if p == nil || p.field == nil { ok = false @@ -189,12 +173,11 @@ func (p *Pointer) FieldRef() *id.PropertySchemaFieldID { return &f } -// GetAll _ -func (p *Pointer) GetAll() (sg *id.PropertySchemaFieldID, i *id.PropertyItemID, f *id.PropertySchemaFieldID) { +func (p *Pointer) GetAll() (sg *id.PropertySchemaGroupID, i *id.PropertyItemID, f *id.PropertySchemaFieldID) { if p == nil { return } - sg = p.schemaGroup.CopyRef() + sg = p.schemaItem.CopyRef() i = p.item.CopyRef() f = p.field.CopyRef() return diff --git a/pkg/property/pointer_test.go b/pkg/property/pointer_test.go index bce148810..c1225a4e8 100644 --- a/pkg/property/pointer_test.go +++ b/pkg/property/pointer_test.go @@ -9,7 +9,7 @@ import ( func TestPointer(t *testing.T) { iid := id.NewPropertyItemID() - sgid := id.PropertySchemaFieldID("foo") + sgid := id.PropertySchemaGroupID("foo") fid := id.PropertySchemaFieldID("hoge") var p *Pointer diff --git a/pkg/property/property.go b/pkg/property/property.go index 389ed44f5..1fbbb2eb2 100644 --- a/pkg/property/property.go +++ b/pkg/property/property.go @@ -79,8 +79,8 @@ func (p *Property) Item(id id.PropertyItemID) (Item, *GroupList) { return nil, nil } -// ItemBySchema returns a root item by a schema field ID. -func (p *Property) ItemBySchema(id id.PropertySchemaFieldID) Item { +// ItemBySchema returns a root item by a schema group ID. +func (p *Property) ItemBySchema(id id.PropertySchemaGroupID) Item { if p == nil { return nil } @@ -92,7 +92,7 @@ func (p *Property) ItemBySchema(id id.PropertySchemaFieldID) Item { return nil } -func (p *Property) GroupBySchema(id id.PropertySchemaFieldID) *Group { +func (p *Property) GroupBySchema(id id.PropertySchemaGroupID) *Group { i := p.ItemBySchema(id) if i == nil { return nil @@ -103,7 +103,7 @@ func (p *Property) GroupBySchema(id id.PropertySchemaFieldID) *Group { return nil } -func (p *Property) GroupListBySchema(id id.PropertySchemaFieldID) *GroupList { +func (p *Property) GroupListBySchema(id id.PropertySchemaGroupID) *GroupList { i := p.ItemBySchema(id) if i == nil { return nil diff --git a/pkg/property/property_test.go b/pkg/property/property_test.go index b85ec1b8c..56ee60c15 100644 --- a/pkg/property/property_test.go +++ b/pkg/property/property_test.go @@ -22,7 +22,7 @@ func TestPropertyMigrateSchema(t *testing.T) { schemaField6ID := id.PropertySchemaFieldID("f") schemaField7ID := id.PropertySchemaFieldID("g") schemaField8ID := id.PropertySchemaFieldID("h") - schemaField9ID := id.PropertySchemaFieldID("i") + schemaGroupID := id.PropertySchemaGroupID("i") datasetID := id.NewDatasetID() datasetSchemaID := id.NewDatasetSchemaID() datasetFieldID := id.NewDatasetSchemaFieldID() @@ -47,7 +47,7 @@ func TestPropertyMigrateSchema(t *testing.T) { schemaField7, } schemaGroups := []*SchemaGroup{ - NewSchemaGroup().ID(schemaField9ID).Schema(oldSchema).Fields(schemaFields).MustBuild(), + NewSchemaGroup().ID(schemaGroupID).Schema(oldSchema).Fields(schemaFields).MustBuild(), } fields := []*Field{ @@ -89,7 +89,7 @@ func TestPropertyMigrateSchema(t *testing.T) { Build(), } items := []Item{ - NewGroup().NewID().Schema(oldSchema, schemaField9ID).Fields(fields).MustBuild(), + NewGroup().NewID().Schema(oldSchema, schemaGroupID).Fields(fields).MustBuild(), } datasetFields := []*dataset.Field{ @@ -102,7 +102,7 @@ func TestPropertyMigrateSchema(t *testing.T) { property.MigrateSchema(context.Background(), schema, dataset.LoaderFrom([]*dataset.Dataset{ds})) - newGroup := ToGroup(property.ItemBySchema(schemaField9ID)) + newGroup := ToGroup(property.ItemBySchema(schemaGroupID)) newFields := newGroup.Fields() assert.Equal(t, schema.ID(), property.Schema()) @@ -119,8 +119,8 @@ func TestGetOrCreateItem(t *testing.T) { sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") sf1id := id.PropertySchemaFieldID("a") sf2id := id.PropertySchemaFieldID("b") - sg1id := id.PropertySchemaFieldID("c") - sg2id := id.PropertySchemaFieldID("d") + sg1id := id.PropertySchemaGroupID("c") + sg2id := id.PropertySchemaGroupID("d") sf1 := NewSchemaField().ID(sf1id).Type(ValueTypeString).MustBuild() sg1 := NewSchemaGroup().ID(sg1id).Schema(sid).Fields([]*SchemaField{sf1}).MustBuild() @@ -169,8 +169,8 @@ func TestGetOrCreateField(t *testing.T) { sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") sf1id := id.PropertySchemaFieldID("a") sf2id := id.PropertySchemaFieldID("b") - sg1id := id.PropertySchemaFieldID("c") - sg2id := id.PropertySchemaFieldID("d") + sg1id := id.PropertySchemaGroupID("c") + sg2id := id.PropertySchemaGroupID("d") sf1 := NewSchemaField().ID(sf1id).Type(ValueTypeString).MustBuild() sg1 := NewSchemaGroup().ID(sg1id).Schema(sid).Fields([]*SchemaField{sf1}).MustBuild() @@ -220,7 +220,7 @@ func TestAddListItem(t *testing.T) { sceneID := id.NewSceneID() sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") sfid := id.PropertySchemaFieldID("a") - sgid := id.PropertySchemaFieldID("b") + sgid := id.PropertySchemaGroupID("b") sf := NewSchemaField().ID(sfid).Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID(sgid).Schema(sid).Fields([]*SchemaField{sf}).IsList(true).MustBuild() ps := NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild() @@ -241,7 +241,7 @@ func TestAddListItem(t *testing.T) { func TestMoveListItem(t *testing.T) { sceneID := id.NewSceneID() sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") - sgid := id.PropertySchemaFieldID("b") + sgid := id.PropertySchemaGroupID("b") g1 := NewGroup().NewID().Schema(sid, sgid).MustBuild() g2 := NewGroup().NewID().Schema(sid, sgid).MustBuild() gl := NewGroupList().NewID().Schema(sid, sgid).Groups([]*Group{g1, g2}).MustBuild() @@ -256,7 +256,7 @@ func TestMoveListItem(t *testing.T) { func TestRemoveListItem(t *testing.T) { sceneID := id.NewSceneID() sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") - sgid := id.PropertySchemaFieldID("b") + sgid := id.PropertySchemaGroupID("b") g1 := NewGroup().NewID().Schema(sid, sgid).MustBuild() g2 := NewGroup().NewID().Schema(sid, sgid).MustBuild() gl := NewGroupList().NewID().Schema(sid, sgid).Groups([]*Group{g1, g2}).MustBuild() diff --git a/pkg/property/schema.go b/pkg/property/schema.go index f3d6d09c4..b14eaa043 100644 --- a/pkg/property/schema.go +++ b/pkg/property/schema.go @@ -10,18 +10,15 @@ type Schema struct { linkable LinkableFields } -// LinkableFields _ type LinkableFields struct { LatLng *Pointer URL *Pointer } -// ID _ func (p *Schema) ID() id.PropertySchemaID { return p.id } -// IDRef _ func (p *Schema) IDRef() *id.PropertySchemaID { if p == nil { return nil @@ -29,12 +26,10 @@ func (p *Schema) IDRef() *id.PropertySchemaID { return p.id.Ref() } -// Version _ func (p *Schema) Version() int { return p.version } -// Fields _ func (p *Schema) Fields() []*SchemaField { if p == nil { return nil @@ -46,7 +41,6 @@ func (p *Schema) Fields() []*SchemaField { return fields } -// Field _ func (p *Schema) Field(id id.PropertySchemaFieldID) *SchemaField { if p == nil { return nil @@ -59,7 +53,6 @@ func (p *Schema) Field(id id.PropertySchemaFieldID) *SchemaField { return nil } -// FieldByPointer _ func (p *Schema) FieldByPointer(ptr *Pointer) *SchemaField { if p == nil { return nil @@ -71,7 +64,6 @@ func (p *Schema) FieldByPointer(ptr *Pointer) *SchemaField { return g.FieldByPointer(ptr) } -// Groups _ func (p *Schema) Groups() []*SchemaGroup { if p == nil { return nil @@ -79,8 +71,7 @@ func (p *Schema) Groups() []*SchemaGroup { return append([]*SchemaGroup{}, p.groups...) } -// Group _ -func (p *Schema) Group(id id.PropertySchemaFieldID) *SchemaGroup { +func (p *Schema) Group(id id.PropertySchemaGroupID) *SchemaGroup { if p == nil { return nil } @@ -92,7 +83,6 @@ func (p *Schema) Group(id id.PropertySchemaFieldID) *SchemaGroup { return nil } -// GroupByField _ func (p *Schema) GroupByField(id id.PropertySchemaFieldID) *SchemaGroup { if p == nil { return nil @@ -105,7 +95,6 @@ func (p *Schema) GroupByField(id id.PropertySchemaFieldID) *SchemaGroup { return nil } -// GroupByPointer _ func (p *Schema) GroupByPointer(ptr *Pointer) *SchemaGroup { if p == nil { return nil @@ -125,7 +114,6 @@ func (p *Schema) GroupByPointer(ptr *Pointer) *SchemaGroup { return nil } -// DetectDuplicatedFields _ func (s *Schema) DetectDuplicatedFields() []id.PropertySchemaFieldID { duplicated := []id.PropertySchemaFieldID{} ids := map[id.PropertySchemaFieldID]struct{}{} @@ -140,7 +128,6 @@ func (s *Schema) DetectDuplicatedFields() []id.PropertySchemaFieldID { return nil } -// LinkableFields _ func (p *Schema) LinkableFields() LinkableFields { if p == nil { return LinkableFields{} @@ -148,7 +135,6 @@ func (p *Schema) LinkableFields() LinkableFields { return p.linkable.Clone() } -// Clone _ func (l LinkableFields) Clone() LinkableFields { return LinkableFields{ LatLng: l.LatLng.Clone(), @@ -156,7 +142,6 @@ func (l LinkableFields) Clone() LinkableFields { } } -// Validate _ func (l LinkableFields) Validate(s *Schema) bool { if s == nil { return false diff --git a/pkg/property/schema_builder.go b/pkg/property/schema_builder.go index deb598630..d62a30fb3 100644 --- a/pkg/property/schema_builder.go +++ b/pkg/property/schema_builder.go @@ -8,12 +8,12 @@ import ( ) var ( - ErrInvalidSceneID error = errors.New("invalid scene id") - ErrInvalidPropertySchemaID error = errors.New("invalid property schema id") - ErrInvalidValue error = errors.New("invalid value") - ErrInvalidPropertyLinkableField error = errors.New("invalid property linkable field") - ErrInvalidVersion error = errors.New("invalid version") - ErrDuplicatedField = errors.New("duplicated field") + ErrInvalidSceneID = errors.New("invalid scene id") + ErrInvalidPropertySchemaID = errors.New("invalid property schema id") + ErrInvalidValue = errors.New("invalid value") + ErrInvalidPropertyLinkableField = errors.New("invalid property linkable field") + ErrInvalidVersion = errors.New("invalid version") + ErrDuplicatedField = errors.New("duplicated field") ) type SchemaBuilder struct { @@ -57,7 +57,7 @@ func (b *SchemaBuilder) Version(version int) *SchemaBuilder { func (b *SchemaBuilder) Groups(groups []*SchemaGroup) *SchemaBuilder { newGroups := []*SchemaGroup{} - ids := map[id.PropertySchemaFieldID]struct{}{} + ids := map[id.PropertySchemaGroupID]struct{}{} for _, f := range groups { if f == nil { continue diff --git a/pkg/property/schema_group.go b/pkg/property/schema_group.go index af9377860..73c0955f2 100644 --- a/pkg/property/schema_group.go +++ b/pkg/property/schema_group.go @@ -7,7 +7,7 @@ import ( // SchemaGroup represents a group of property that has some fields type SchemaGroup struct { - id id.PropertySchemaFieldID + id id.PropertySchemaGroupID sid id.PropertySchemaID fields []*SchemaField list bool @@ -17,14 +17,14 @@ type SchemaGroup struct { } // ID returns id -func (s *SchemaGroup) ID() id.PropertySchemaFieldID { +func (s *SchemaGroup) ID() id.PropertySchemaGroupID { if s == nil { - return id.PropertySchemaFieldID("") + return id.PropertySchemaGroupID("") } return s.id } -func (s *SchemaGroup) IDRef() *id.PropertySchemaFieldID { +func (s *SchemaGroup) IDRef() *id.PropertySchemaGroupID { if s == nil { return nil } diff --git a/pkg/property/schema_group_builder.go b/pkg/property/schema_group_builder.go index cef672c15..1b61fe117 100644 --- a/pkg/property/schema_group_builder.go +++ b/pkg/property/schema_group_builder.go @@ -30,7 +30,7 @@ func (b *SchemaGroupBuilder) MustBuild() *SchemaGroup { return p } -func (b *SchemaGroupBuilder) ID(id id.PropertySchemaFieldID) *SchemaGroupBuilder { +func (b *SchemaGroupBuilder) ID(id id.PropertySchemaGroupID) *SchemaGroupBuilder { b.p.id = id return b } diff --git a/pkg/property/schema_group_builder_test.go b/pkg/property/schema_group_builder_test.go index 73da78d60..31876c493 100644 --- a/pkg/property/schema_group_builder_test.go +++ b/pkg/property/schema_group_builder_test.go @@ -1,7 +1,6 @@ package property import ( - "errors" "testing" "github.com/reearth/reearth-backend/pkg/i18n" @@ -11,25 +10,28 @@ import ( func TestSchemaGroupBuilder_Build(t *testing.T) { sid := id.MustPropertySchemaID("xx~1.0.0/aa") - gid := id.PropertySchemaFieldID("xx") + gid := id.PropertySchemaGroupID("xx") sf := NewSchemaField().ID("ff").Type(ValueTypeString).MustBuild() + + type expected struct { + ID id.PropertySchemaGroupID + Sid id.PropertySchemaID + Fields []*SchemaField + List bool + IsAvailableIf *Condition + Title i18n.String + } + testCases := []struct { Name string - Id id.PropertySchemaFieldID + ID id.PropertySchemaGroupID Sid id.PropertySchemaID Fields []*SchemaField List bool IsAvailableIf *Condition Title i18n.String - Expected struct { - Id id.PropertySchemaFieldID - Sid id.PropertySchemaID - Fields []*SchemaField - List bool - IsAvailableIf *Condition - Title i18n.String - } - Err error + Expected expected + Err error }{ { Name: "fail: invalid id", @@ -37,7 +39,7 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { }, { Name: "success", - Id: gid, + ID: gid, Sid: sid, Fields: []*SchemaField{sf, nil, sf}, List: true, @@ -46,15 +48,8 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { Value: ValueTypeString.ValueFromUnsafe("abc"), }, Title: i18n.StringFrom("tt"), - Expected: struct { - Id id.PropertySchemaFieldID - Sid id.PropertySchemaID - Fields []*SchemaField - List bool - IsAvailableIf *Condition - Title i18n.String - }{ - Id: gid, + Expected: expected{ + ID: gid, Sid: sid, Fields: []*SchemaField{sf}, List: true, @@ -67,7 +62,7 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { }, { Name: "success: nil name", - Id: gid, + ID: gid, Sid: sid, Fields: []*SchemaField{sf}, List: true, @@ -76,15 +71,8 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { Value: ValueTypeString.ValueFromUnsafe("abc"), }, Title: i18n.StringFrom("tt"), - Expected: struct { - Id id.PropertySchemaFieldID - Sid id.PropertySchemaID - Fields []*SchemaField - List bool - IsAvailableIf *Condition - Title i18n.String - }{ - Id: gid, + Expected: expected{ + ID: gid, Sid: sid, Fields: []*SchemaField{sf}, List: true, @@ -102,22 +90,22 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() res, err := NewSchemaGroup(). - ID(tc.Id). + ID(tc.ID). Schema(tc.Sid). Fields(tc.Fields). IsList(tc.List). Title(tc.Title). IsAvailableIf(tc.IsAvailableIf). Build() - if err == nil { + if tc.Err == nil { assert.Equal(tt, tc.Expected.IsAvailableIf, res.IsAvailableIf()) assert.Equal(tt, tc.Expected.Sid, res.Schema()) - assert.Equal(tt, tc.Expected.Id, res.ID()) + assert.Equal(tt, tc.Expected.ID, res.ID()) assert.Equal(tt, tc.Expected.Title, res.Title()) assert.Equal(tt, tc.Expected.List, res.IsList()) assert.Equal(tt, tc.Expected.Fields, res.Fields()) } else { - assert.True(tt, errors.As(tc.Err, &err)) + assert.Equal(tt, tc.Err, err) } }) } diff --git a/pkg/property/schema_group_test.go b/pkg/property/schema_group_test.go index 05cb88507..09b635226 100644 --- a/pkg/property/schema_group_test.go +++ b/pkg/property/schema_group_test.go @@ -9,7 +9,7 @@ import ( ) func TestSchemaGroup(t *testing.T) { - scid := id.PropertySchemaFieldID("aa") + scid := id.PropertySchemaGroupID("aa") sid := id.MustPropertySchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() @@ -17,9 +17,9 @@ func TestSchemaGroup(t *testing.T) { Name string G *SchemaGroup Expected struct { - GIDRef *id.PropertySchemaFieldID + GIDRef *id.PropertySchemaGroupID SIDRef *id.PropertySchemaID - GID id.PropertySchemaFieldID + GID id.PropertySchemaGroupID SID id.PropertySchemaID Fields []*SchemaField Title i18n.String @@ -34,9 +34,9 @@ func TestSchemaGroup(t *testing.T) { Name: "success", G: NewSchemaGroup().ID(scid).Schema(sid).Fields([]*SchemaField{sf}).MustBuild(), Expected: struct { - GIDRef *id.PropertySchemaFieldID + GIDRef *id.PropertySchemaGroupID SIDRef *id.PropertySchemaID - GID id.PropertySchemaFieldID + GID id.PropertySchemaGroupID SID id.PropertySchemaID Fields []*SchemaField Title i18n.String @@ -71,7 +71,7 @@ func TestSchemaGroup(t *testing.T) { } func TestSchemaGroup_Field(t *testing.T) { - scid := id.PropertySchemaFieldID("aa") + scid := id.PropertySchemaGroupID("aa") sid := id.MustPropertySchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() @@ -112,7 +112,7 @@ func TestSchemaGroup_Field(t *testing.T) { } func TestSchemaGroup_SetTitle(t *testing.T) { - sg := NewSchemaGroup().ID(id.PropertySchemaFieldID("aa")).Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID(id.PropertySchemaGroupID("aa")).Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() sg.SetTitle(i18n.StringFrom("ttt")) assert.Equal(t, i18n.StringFrom("ttt"), sg.Title()) } diff --git a/pkg/property/schema_test.go b/pkg/property/schema_test.go index bedc0d100..6c7120433 100644 --- a/pkg/property/schema_test.go +++ b/pkg/property/schema_test.go @@ -61,11 +61,12 @@ func TestSchema_Group(t *testing.T) { sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() testCases := []struct { - Name string - S *Schema - PTR *Pointer - Input, InputField id.PropertySchemaFieldID - Expected *SchemaGroup + Name string + S *Schema + PTR *Pointer + Input id.PropertySchemaGroupID + InputField id.PropertySchemaFieldID + Expected *SchemaGroup }{ { Name: "nil schema", @@ -82,7 +83,7 @@ func TestSchema_Group(t *testing.T) { Name: "not found", S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), PTR: NewPointer(nil, nil, id.PropertySchemaFieldID("zz").Ref()), - Input: id.PropertySchemaFieldID("zz"), + Input: id.PropertySchemaGroupID("zz"), }, } diff --git a/pkg/property/sealed.go b/pkg/property/sealed.go index a876f81d4..fa89cb4bb 100644 --- a/pkg/property/sealed.go +++ b/pkg/property/sealed.go @@ -18,7 +18,7 @@ type Sealed struct { type SealedItem struct { Original *id.PropertyItemID Parent *id.PropertyItemID - SchemaGroup id.PropertySchemaFieldID + SchemaGroup id.PropertySchemaGroupID LinkedDataset *id.DatasetID Fields []*SealedField Groups []*SealedItem @@ -192,7 +192,7 @@ func (s *Sealed) ItemBy(ptr *Pointer) *SealedItem { return nil } -func (s *Sealed) ItemBySchemaGroup(i id.PropertySchemaFieldID) *SealedItem { +func (s *Sealed) ItemBySchemaGroup(i id.PropertySchemaGroupID) *SealedItem { if s == nil { return nil } diff --git a/pkg/property/sealed_test.go b/pkg/property/sealed_test.go index 7d364be5e..eb0a2a143 100644 --- a/pkg/property/sealed_test.go +++ b/pkg/property/sealed_test.go @@ -17,8 +17,8 @@ var ( opid = id.NewPropertyID() ppid = id.NewPropertyID() psid = id.MustPropertySchemaID("hoge~0.1.0/fff") - psiid1 = id.PropertySchemaFieldID("x") - psiid2 = id.PropertySchemaFieldID("y") + psiid1 = id.PropertySchemaGroupID("x") + psiid2 = id.PropertySchemaGroupID("y") i1id = id.NewPropertyItemID() i2id = id.NewPropertyItemID() i3id = id.NewPropertyItemID() diff --git a/pkg/scene/builder/builder_test.go b/pkg/scene/builder/builder_test.go index 544a297b4..a3c92725f 100644 --- a/pkg/scene/builder/builder_test.go +++ b/pkg/scene/builder/builder_test.go @@ -24,8 +24,8 @@ func TestSceneBuilder(t *testing.T) { propertySchemaField1ID := id.PropertySchemaFieldID("a") propertySchemaField2ID := id.PropertySchemaFieldID("b") propertySchemaField3ID := id.PropertySchemaFieldID("c") - propertySchemaGroup1ID := id.PropertySchemaFieldID("A") - propertySchemaGroup2ID := id.PropertySchemaFieldID("B") + propertySchemaGroup1ID := id.PropertySchemaGroupID("A") + propertySchemaGroup2ID := id.PropertySchemaGroupID("B") propertyItemID1 := id.NewPropertyItemID() propertyItemID2 := id.NewPropertyItemID() diff --git a/pkg/scene/builder/encoder_test.go b/pkg/scene/builder/encoder_test.go index 71b4deffd..afce3cc57 100644 --- a/pkg/scene/builder/encoder_test.go +++ b/pkg/scene/builder/encoder_test.go @@ -109,7 +109,7 @@ func TestEncoder_Layers(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaFieldID("default"), + SchemaGroup: id.PropertySchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, diff --git a/schema.graphql b/schema.graphql index cc2b5a773..43a4aae5a 100644 --- a/schema.graphql +++ b/schema.graphql @@ -52,6 +52,7 @@ scalar FileSize scalar PluginID scalar PluginExtensionID scalar PropertySchemaID +scalar PropertySchemaGroupID scalar PropertySchemaFieldID scalar DatasetSchemaFieldID scalar TranslatedString @@ -434,7 +435,7 @@ type PropertyLinkableFields { } type PropertySchemaGroup { - schemaGroupId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! schemaId: PropertySchemaID! fields: [PropertySchemaField!]! isList: Boolean! @@ -506,7 +507,7 @@ union PropertyItem = PropertyGroup | PropertyGroupList type PropertyGroup { id: ID! schemaId: PropertySchemaID! - schemaGroupId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! fields: [PropertyField!]! schema: PropertySchema @goField(forceResolver: true) schemaGroup: PropertySchemaGroup @goField(forceResolver: true) @@ -515,7 +516,7 @@ type PropertyGroup { type PropertyGroupList { id: ID! schemaId: PropertySchemaID! - schemaGroupId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! groups: [PropertyGroup!]! schema: PropertySchema @goField(forceResolver: true) schemaGroup: PropertySchemaGroup @goField(forceResolver: true) @@ -563,7 +564,7 @@ type MergedPropertyGroup { parentPropertyId: ID originalId: ID parentId: ID - schemaGroupId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! # note: schemaId will not always be set schemaId: PropertySchemaID linkedDatasetId: ID @@ -980,7 +981,7 @@ input SyncDatasetInput { input UpdatePropertyValueInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID + schemaGroupId: PropertySchemaGroupID itemId: ID fieldId: PropertySchemaFieldID! value: Any @@ -989,14 +990,14 @@ input UpdatePropertyValueInput { input RemovePropertyFieldInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID + schemaGroupId: PropertySchemaGroupID itemId: ID fieldId: PropertySchemaFieldID! } input UploadFileToPropertyInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID + schemaGroupId: PropertySchemaGroupID itemId: ID fieldId: PropertySchemaFieldID! file: Upload! @@ -1004,7 +1005,7 @@ input UploadFileToPropertyInput { input LinkDatasetToPropertyValueInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID + schemaGroupId: PropertySchemaGroupID itemId: ID fieldId: PropertySchemaFieldID! datasetSchemaIds: [ID!]! @@ -1014,14 +1015,14 @@ input LinkDatasetToPropertyValueInput { input UnlinkPropertyValueInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID + schemaGroupId: PropertySchemaGroupID itemId: ID fieldId: PropertySchemaFieldID! } input AddPropertyItemInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! index: Int nameFieldValue: Any nameFieldType: ValueType @@ -1029,20 +1030,20 @@ input AddPropertyItemInput { input MovePropertyItemInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! itemId: ID! index: Int! } input RemovePropertyItemInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! itemId: ID! } input UpdatePropertyItemInput { propertyId: ID! - schemaItemId: PropertySchemaFieldID! + schemaGroupId: PropertySchemaGroupID! operations: [UpdatePropertyItemOperationInput!]! } From 4a9dc5ddefdfc37a5d08431d770190eb0a50b9d4 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 8 Nov 2021 11:29:45 +0900 Subject: [PATCH 102/253] chore: fix typo in github actions --- .github/workflows/deploy_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml index 74ff3ddc8..707be6cfb 100644 --- a/.github/workflows/deploy_test.yml +++ b/.github/workflows/deploy_test.yml @@ -13,7 +13,7 @@ jobs: deploy_test: name: deploy_test runs-on: ubuntu-latest - if: github.event.workflow_run.conclusion == 'success' && github.event.repostory.full_name == 'reearth/reearth-backend' + if: github.event.workflow_run.conclusion == 'success' && github.event.repository.full_name == 'reearth/reearth-backend' steps: - uses: google-github-actions/setup-gcloud@master with: From b5b01bba5af35148d3b86cacfdffa8d7eb647d52 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 8 Nov 2021 12:15:55 +0900 Subject: [PATCH 103/253] chore: clean up unused code --- internal/infrastructure/mongo/mongodoc/project.go | 6 ------ internal/usecase/interfaces/project.go | 15 --------------- 2 files changed, 21 deletions(-) diff --git a/internal/infrastructure/mongo/mongodoc/project.go b/internal/infrastructure/mongo/mongodoc/project.go index d2a53ff36..da36df7fe 100644 --- a/internal/infrastructure/mongo/mongodoc/project.go +++ b/internal/infrastructure/mongo/mongodoc/project.go @@ -27,17 +27,11 @@ type ProjectDocument struct { PublicDescription string PublicImage string PublicNoIndex bool - Domains []*ProjectDomainDocument Team string Visualizer string PublishmentStatus string } -type ProjectDomainDocument struct { - Domain string - Ready bool -} - type ProjectConsumer struct { Rows []*project.Project } diff --git a/internal/usecase/interfaces/project.go b/internal/usecase/interfaces/project.go index 91559973d..d7286bde5 100644 --- a/internal/usecase/interfaces/project.go +++ b/internal/usecase/interfaces/project.go @@ -45,21 +45,6 @@ type PublishProjectParam struct { Status project.PublishmentStatus } -type AddProjectDomainParam struct { - ID id.ProjectID - Domain string -} - -type VerifyProjectDomainParam struct { - ID id.ProjectID - Domain string -} - -type RemoveProjectDomainParam struct { - ID id.ProjectID - Domain string -} - var ( ErrProjectAliasIsNotSet error = errors.New("project alias is not set") ) From 10f745349b4131c816b69a643682bb3d1be1aefd Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 8 Nov 2021 12:18:05 +0900 Subject: [PATCH 104/253] fix: add an index to mongo project collection to prevent creating projects whose alias is duplicated --- internal/infrastructure/mongo/project.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/infrastructure/mongo/project.go b/internal/infrastructure/mongo/project.go index 0f12d99c1..1ebc566d2 100644 --- a/internal/infrastructure/mongo/project.go +++ b/internal/infrastructure/mongo/project.go @@ -25,7 +25,7 @@ func NewProject(client *mongodoc.Client) repo.Project { } func (r *projectRepo) init() { - i := r.client.CreateIndex(context.Background(), nil) + i := r.client.CreateIndex(context.Background(), []string{"alias"}) if len(i) > 0 { log.Infof("mongo: %s: index created: %s", "project", i) } From 443f2c8fc4f75af1cf8f5160681c07f69a618483 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 8 Nov 2021 15:00:45 +0900 Subject: [PATCH 105/253] fix: check project alias duplication on project update --- internal/usecase/interactor/project.go | 6 ++++++ internal/usecase/interfaces/project.go | 3 ++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/internal/usecase/interactor/project.go b/internal/usecase/interactor/project.go index bd77cb3ed..889ad4a37 100644 --- a/internal/usecase/interactor/project.go +++ b/internal/usecase/interactor/project.go @@ -272,6 +272,12 @@ func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectP newAlias := prevAlias if params.Alias != nil { + if prj2, err := i.projectRepo.FindByPublicName(ctx, *params.Alias); err != nil && !errors.Is(rerror.ErrNotFound, err) { + return nil, err + } else if prj2 != nil && prj.ID() != prj2.ID() { + return nil, interfaces.ErrProjectAliasAlreadyUsed + } + if err := prj.UpdateAlias(*params.Alias); err != nil { return nil, err } diff --git a/internal/usecase/interfaces/project.go b/internal/usecase/interfaces/project.go index d7286bde5..b37aa4ae6 100644 --- a/internal/usecase/interfaces/project.go +++ b/internal/usecase/interfaces/project.go @@ -46,7 +46,8 @@ type PublishProjectParam struct { } var ( - ErrProjectAliasIsNotSet error = errors.New("project alias is not set") + ErrProjectAliasIsNotSet error = errors.New("project alias is not set") + ErrProjectAliasAlreadyUsed error = errors.New("project alias is already used by another project") ) type Project interface { From d54309fe8a5584ceb280c4a42b7f9c7d7ed35f1a Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 12 Nov 2021 12:13:46 +0900 Subject: [PATCH 106/253] chore: update codecov.yml to add ignored files --- codecov.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/codecov.yml b/codecov.yml index dc7e349fc..5da8003be 100644 --- a/codecov.yml +++ b/codecov.yml @@ -2,3 +2,11 @@ comment: layout: 'reach, diff, flags, files' behavior: default require_changes: false +ignore: + - **/*_gen.go + - **/*_test.go + - **/doc.go + - **/testdata + - tools + - main.go + - tools.go From 9d3822d38f2729ed060ce8f156f3def076e85bbf Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 12 Nov 2021 15:11:15 +0900 Subject: [PATCH 107/253] chore: ignore generated files in codecov --- codecov.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/codecov.yml b/codecov.yml index 5da8003be..b990fcc7c 100644 --- a/codecov.yml +++ b/codecov.yml @@ -7,6 +7,7 @@ ignore: - **/*_test.go - **/doc.go - **/testdata + - internal/adapter/gql/generated.go - tools - main.go - tools.go From 215947498b989a35c50c0227425cffc8225a3679 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 16 Nov 2021 15:47:09 +0900 Subject: [PATCH 108/253] chore: upgrade dependencies --- go.mod | 32 +++++++++---------- go.sum | 76 +++++++++++++++++++++------------------------ internal/app/jwt.go | 9 +++--- 3 files changed, 57 insertions(+), 60 deletions(-) diff --git a/go.mod b/go.mod index b73e64fa8..f514f5620 100644 --- a/go.mod +++ b/go.mod @@ -2,31 +2,31 @@ module github.com/reearth/reearth-backend require ( cloud.google.com/go/profiler v0.1.1 - cloud.google.com/go/storage v1.14.0 + cloud.google.com/go/storage v1.18.2 github.com/99designs/gqlgen v0.14.0 github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.0.0 github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect github.com/agnivade/levenshtein v1.1.1 // indirect github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a // indirect - github.com/auth0/go-jwt-middleware v0.0.0-20200507191422-d30d7b9ece63 + github.com/auth0/go-jwt-middleware v1.0.1 github.com/blang/semver v3.5.1+incompatible github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect - github.com/dgrijalva/jwt-go v3.2.0+incompatible github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b // indirect github.com/fatih/color v1.12.0 // indirect + github.com/form3tech-oss/jwt-go v3.2.2+incompatible github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 // indirect - github.com/goccy/go-yaml v1.9.2 + github.com/goccy/go-yaml v1.9.4 github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/iancoleman/strcase v0.2.0 github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d github.com/jarcoal/httpmock v1.0.8 - github.com/joho/godotenv v1.3.0 + github.com/joho/godotenv v1.4.0 github.com/jonas-p/go-shp v0.1.1 github.com/kelseyhightower/envconfig v1.4.0 github.com/kennygrant/sanitize v1.2.4 github.com/klauspost/compress v1.13.6 // indirect github.com/labstack/echo/v4 v4.6.1 - github.com/labstack/gommon v0.3.0 + github.com/labstack/gommon v0.3.1 github.com/mattn/go-isatty v0.0.14 // indirect github.com/mitchellh/mapstructure v1.4.2 github.com/oklog/ulid v1.3.1 @@ -41,26 +41,26 @@ require ( github.com/stretchr/testify v1.7.0 github.com/tidwall/pretty v1.0.1 // indirect github.com/twpayne/go-kml v1.5.2 - github.com/uber/jaeger-client-go v2.25.0+incompatible + github.com/uber/jaeger-client-go v2.29.1+incompatible github.com/uber/jaeger-lib v2.4.1+incompatible github.com/urfave/cli/v2 v2.3.0 // indirect github.com/vektah/dataloaden v0.3.0 github.com/vektah/gqlparser/v2 v2.2.0 - go.mongodb.org/mongo-driver v1.7.3 - go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.26.0 - go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.26.0 - go.opentelemetry.io/otel v1.1.0 - go.opentelemetry.io/otel/sdk v1.1.0 + go.mongodb.org/mongo-driver v1.7.4 + go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.27.0 + go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.27.0 + go.opentelemetry.io/otel v1.2.0 + go.opentelemetry.io/otel/sdk v1.2.0 go.uber.org/atomic v1.7.0 // indirect golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 // indirect golang.org/x/mod v0.5.1 // indirect - golang.org/x/sys v0.0.0-20211102061401-a2f17f7b995c // indirect + golang.org/x/sys v0.0.0-20211103235746-7861aae1554b // indirect golang.org/x/text v0.3.7 golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/tools v0.1.7 google.golang.org/api v0.60.0 gopkg.in/go-playground/colors.v1 v1.2.0 - gopkg.in/h2non/gock.v1 v1.1.0 + gopkg.in/h2non/gock.v1 v1.1.2 gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) @@ -86,7 +86,7 @@ require ( github.com/googleapis/gax-go/v2 v2.1.1 // indirect github.com/gorilla/websocket v1.4.2 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect - github.com/mattn/go-colorable v0.1.8 // indirect + github.com/mattn/go-colorable v0.1.11 // indirect github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect @@ -98,7 +98,7 @@ require ( github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect go.opencensus.io v0.23.0 // indirect go.opentelemetry.io/contrib v0.23.0 // indirect - go.opentelemetry.io/otel/trace v1.1.0 // indirect + go.opentelemetry.io/otel/trace v1.2.0 // indirect golang.org/x/net v0.0.0-20211101193420-4a448f8816b3 // indirect golang.org/x/oauth2 v0.0.0-20211028175245-ba495a64dcb5 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect diff --git a/go.sum b/go.sum index 8aa277609..1408c22e9 100644 --- a/go.sum +++ b/go.sum @@ -15,7 +15,6 @@ cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOY cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= @@ -47,8 +46,8 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.14.0 h1:6RRlFMv1omScs6iq2hfE3IvgE+l6RfJPampq8UZc5TU= -cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +cloud.google.com/go/storage v1.18.2 h1:5NQw6tOn3eMm0oE8vTkfjau18kjL79FlMjy/CHTpmoY= +cloud.google.com/go/storage v1.18.2/go.mod h1:AiIj7BWXyhO5gGVmYJ+S8tbkCx3yb0IMjua8Aw4naVM= cloud.google.com/go/trace v1.0.0 h1:laKx2y7IWMjguCe5zZx6n7qLtREk4kyE69SXVC0VSN8= cloud.google.com/go/trace v1.0.0/go.mod h1:4iErSByzxkyHWzzlAj63/Gmjz0NH1ASqhJguHpGcr6A= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= @@ -77,8 +76,8 @@ github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= -github.com/auth0/go-jwt-middleware v0.0.0-20200507191422-d30d7b9ece63 h1:LY/kRH+fCqA090FsM2VfZ+oocD99ogm3HrT1r0WDnCk= -github.com/auth0/go-jwt-middleware v0.0.0-20200507191422-d30d7b9ece63/go.mod h1:mF0ip7kTEFtnhBJbd/gJe62US3jykNN+dcZoZakJCCA= +github.com/auth0/go-jwt-middleware v1.0.1 h1:/fsQ4vRr4zod1wKReUH+0A3ySRjGiT9G34kypO/EKwI= +github.com/auth0/go-jwt-middleware v1.0.1/go.mod h1:YSeUX3z6+TF2H+7padiEqNJ73Zy9vXW72U//IgN0BIM= github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= @@ -104,7 +103,6 @@ github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1 h1:zH8ljVhhq7yC0MIeUL/IviMtY8hx2mK8cN9wEYb8ggw= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= @@ -112,8 +110,6 @@ github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ3 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= -github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b h1:8xx0j7yceTAgVxonE+qOOepmwWS/Ic3OLQapY9HJajc= github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= @@ -135,13 +131,14 @@ github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/felixge/httpsnoop v1.0.2 h1:+nS9g82KMXccJ/wp0zyRW9ZBHFETmMGtkk+2CTTrW4o= github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/form3tech-oss/jwt-go v3.2.2+incompatible h1:TcekIExNqud5crz4xD2pavyTgWiPvpYe4Xau31I0PRk= +github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 h1:Uc+IZ7gYqAf/rSGFplbWBSHaGolEQlNLgMgSE3ccnIQ= github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813/go.mod h1:P+oSoE9yhSRvsmYyZsshflcR6ePWYLql6UU1amW13IM= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= @@ -175,8 +172,8 @@ github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWe github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= -github.com/goccy/go-yaml v1.9.2 h1:2Njwzw+0+pjU2gb805ZC1B/uBuAs2VcZ3K+ZgHwDs7w= -github.com/goccy/go-yaml v1.9.2/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA= +github.com/goccy/go-yaml v1.9.4 h1:S0GCYjwHKVI6IHqio7QWNKNThUl6NLzFd/g8Z65Axw8= +github.com/goccy/go-yaml v1.9.4/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= @@ -247,7 +244,6 @@ github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= @@ -296,8 +292,9 @@ github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJ github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/jarcoal/httpmock v1.0.8 h1:8kI16SoO6LQKgPE7PvQuV+YuD/inwHd7fOOe2zMbo4k= github.com/jarcoal/httpmock v1.0.8/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= -github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= +github.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg= +github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/jonas-p/go-shp v0.1.1 h1:LY81nN67DBCz6VNFn2kS64CjmnDo9IP8rmSkTvhO9jE= github.com/jonas-p/go-shp v0.1.1/go.mod h1:MRIhyxDQ6VVp0oYeD7yPGr5RSTNScUFKCDsI5DR7PtI= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= @@ -324,8 +321,9 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/labstack/echo/v4 v4.6.1 h1:OMVsrnNFzYlGSdaiYGHbgWQnr+JM7NG+B9suCPie14M= github.com/labstack/echo/v4 v4.6.1/go.mod h1:RnjgMWNDB9g/HucVWhQYNQP9PvbYf6adqftqryo7s9k= -github.com/labstack/gommon v0.3.0 h1:JEeO0bvc78PKdyHxloTKiF8BD5iGrH8T6MSeGvSgob0= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= +github.com/labstack/gommon v0.3.1 h1:OomWaJXm7xR6L1HmEtGyQf26TEn7V6X88mktX9kee9o= +github.com/labstack/gommon v0.3.1/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM= github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= @@ -335,8 +333,9 @@ github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kN github.com/matryer/moq v0.0.0-20200106131100-75d0ddfc0007/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.11 h1:nQ+aFkoE2TMGc0b68U2OKSexC+eq46+XwZzWXHRmPYs= +github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= @@ -420,8 +419,8 @@ github.com/twpayne/go-kml v1.5.2 h1:rFMw2/EwgkVssGS2MT6YfWSPZz6BgcJkLxQ53jnE8rQ= github.com/twpayne/go-kml v1.5.2/go.mod h1:kz8jAiIz6FIdU2Zjce9qGlVtgFYES9vt7BTPBHf5jl4= github.com/twpayne/go-polyline v1.0.0/go.mod h1:ICh24bcLYBX8CknfvNPKqoTbe+eg+MX1NPyJmSBo7pU= github.com/twpayne/go-waypoint v0.0.0-20200706203930-b263a7f6e4e8/go.mod h1:qj5pHncxKhu9gxtZEYWypA/z097sxhFlbTyOyt9gcnU= -github.com/uber/jaeger-client-go v2.25.0+incompatible h1:IxcNZ7WRY1Y3G4poYlx24szfsn/3LvK9QHCq9oQw8+U= -github.com/uber/jaeger-client-go v2.25.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= +github.com/uber/jaeger-client-go v2.29.1+incompatible h1:R9ec3zO3sGpzs0abd43Y+fBZRJ9uiH6lXyR/+u6brW4= +github.com/uber/jaeger-client-go v2.29.1+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVKhn2Um6rjCsSsg= github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= github.com/urfave/cli/v2 v2.1.1/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ= @@ -453,8 +452,8 @@ github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -go.mongodb.org/mongo-driver v1.7.3 h1:G4l/eYY9VrQAK/AUgkV0koQKzQnyddnWxrd/Etf0jIs= -go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= +go.mongodb.org/mongo-driver v1.7.4 h1:sllcioag8Mec0LYkftYWq+cKNPIR4Kqq3iv9ZXY0g/E= +go.mongodb.org/mongo-driver v1.7.4/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -465,31 +464,31 @@ go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/contrib v0.23.0 h1:MgRuo0JZZX8J9WLRjyd7OpTSbaLOdQXXJa6SnZvlWLM= go.opentelemetry.io/contrib v0.23.0/go.mod h1:EH4yDYeNoaTqn/8yCWQmfNB78VHfGX2Jt2bvnvzBlGM= -go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.26.0 h1:cWoHMMzYycoFNhg00Fs3l+Bq+wrOylS+uAbzdPmKHo8= -go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.26.0/go.mod h1:aQWprSlKd3f5Dk5p+Au3XDGzjQyTfDb/YrWdcPtKrPw= -go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.26.0 h1:grpCdac/FGdtOezswo8ez5T3VweY23LkGZ0miiqF3os= -go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.26.0/go.mod h1:cEqIVE/mx9DacUC4me7sMchnVmj3LiUBxSZUSTdqzx4= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.27.0 h1:lt97RYNVVZE9YCZG4MgGhzOpOsoktyvR12NJbRnrsso= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.27.0/go.mod h1:95JJ0PE9JgjaFL3yLUhPIe4nu+1tm9IarHTcXhrAJ54= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.27.0 h1:y1BbYi2c/agRbWm1YLKAk3gJFUMExNMDRxTVIoYy5pU= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.27.0/go.mod h1:KdKx74FeuSamMc33LytyiMuxhuT1v5wfIgUF3lcFGdw= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.24.0 h1:qW6j1kJU24yo2xIu16Py4m4AXn1dd+s2uKllGnTFAm0= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.24.0/go.mod h1:7W3JSDYTtH3qKKHrS1fMiwLtK7iZFLPq1+7htfspX/E= -go.opentelemetry.io/contrib/propagators/b3 v1.1.0 h1:PVDPc1TaspN2ksc2Ig8RLVAnmV1haEwmWiAQPhYrDp8= -go.opentelemetry.io/contrib/propagators/b3 v1.1.0/go.mod h1:FmkfYfWE0QCQHVpiio3tgDUijxfz32JO0duXldug9Ow= +go.opentelemetry.io/contrib/propagators/b3 v1.2.0 h1:+zQjl3DBSOle9GEhHuhqzDUKtYcVSfbHSNv24hsoOJ0= +go.opentelemetry.io/contrib/propagators/b3 v1.2.0/go.mod h1:kO8hNKCfa1YmQJ0lM7pzfJGvbXEipn/S7afbOfaw2Kc= go.opentelemetry.io/otel v1.0.0-RC3/go.mod h1:Ka5j3ua8tZs4Rkq4Ex3hwgBgOchyPVq5S6P2lz//nKQ= go.opentelemetry.io/otel v1.0.0/go.mod h1:AjRVh9A5/5DE7S+mZtTR6t8vpKKryam+0lREnfmS4cg= go.opentelemetry.io/otel v1.0.1/go.mod h1:OPEOD4jIT2SlZPMmwT6FqZz2C0ZNdQqiWcoK6M0SNFU= -go.opentelemetry.io/otel v1.1.0 h1:8p0uMLcyyIx0KHNTgO8o3CW8A1aA+dJZJW6PvnMz0Wc= -go.opentelemetry.io/otel v1.1.0/go.mod h1:7cww0OW51jQ8IaZChIEdqLwgh+44+7uiTdWsAL0wQpA= +go.opentelemetry.io/otel v1.2.0 h1:YOQDvxO1FayUcT9MIhJhgMyNO1WqoduiyvQHzGN0kUQ= +go.opentelemetry.io/otel v1.2.0/go.mod h1:aT17Fk0Z1Nor9e0uisf98LrntPGMnk4frBO9+dkf69I= go.opentelemetry.io/otel/internal/metric v0.23.0 h1:mPfzm9Iqhw7G2nDBmUAjFTfPqLZPbOW2k7QI57ITbaI= go.opentelemetry.io/otel/internal/metric v0.23.0/go.mod h1:z+RPiDJe30YnCrOhFGivwBS+DU1JU/PiLKkk4re2DNY= go.opentelemetry.io/otel/metric v0.23.0 h1:mYCcDxi60P4T27/0jchIDFa1WHEfQeU3zH9UEMpnj2c= go.opentelemetry.io/otel/metric v0.23.0/go.mod h1:G/Nn9InyNnIv7J6YVkQfpc0JCfKBNJaERBGw08nqmVQ= go.opentelemetry.io/otel/sdk v1.0.1/go.mod h1:HrdXne+BiwsOHYYkBE5ysIcv2bvdZstxzmCQhxTcZkI= -go.opentelemetry.io/otel/sdk v1.1.0 h1:j/1PngUJIDOddkCILQYTevrTIbWd494djgGkSsMit+U= -go.opentelemetry.io/otel/sdk v1.1.0/go.mod h1:3aQvM6uLm6C4wJpHtT8Od3vNzeZ34Pqc6bps8MywWzo= +go.opentelemetry.io/otel/sdk v1.2.0 h1:wKN260u4DesJYhyjxDa7LRFkuhH7ncEVKU37LWcyNIo= +go.opentelemetry.io/otel/sdk v1.2.0/go.mod h1:jNN8QtpvbsKhgaC6V5lHiejMoKD+V8uadoSafgHPx1U= go.opentelemetry.io/otel/trace v1.0.0-RC3/go.mod h1:VUt2TUYd8S2/ZRX09ZDFZQwn2RqfMB5MzO17jBojGxo= go.opentelemetry.io/otel/trace v1.0.0/go.mod h1:PXTWqayeFUlJV1YDNhsJYB184+IvAH814St6o6ajzIs= go.opentelemetry.io/otel/trace v1.0.1/go.mod h1:5g4i4fKLaX2BQpSBsxw8YYcgKpMMSW3x7ZTuYBr3sUk= -go.opentelemetry.io/otel/trace v1.1.0 h1:N25T9qCL0+7IpOT8RrRy0WYlL7y6U0WiUJzXcVdXY/o= -go.opentelemetry.io/otel/trace v1.1.0/go.mod h1:i47XtdcBQiktu5IsrPqOHe8w+sBmnLwwHt8wiUsWGTI= +go.opentelemetry.io/otel/trace v1.2.0 h1:Ys3iqbqZhcf28hHzrm5WAquMkDHNZTUkw7KHbuNjej0= +go.opentelemetry.io/otel/trace v1.2.0/go.mod h1:N5FLswTubnxKxOJHM7XZC074qpeEdLy3CgAVsdMucK0= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= @@ -574,7 +573,6 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= @@ -658,7 +656,6 @@ golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -678,10 +675,11 @@ golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210910150752-751e447fb3d0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211102061401-a2f17f7b995c h1:QOfDMdrf/UwlVR0UBq2Mpr58UzNtvgJRXA4BgPfFACs= -golang.org/x/sys v0.0.0-20211102061401-a2f17f7b995c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211103235746-7861aae1554b h1:1VkfZQv42XQlA/jchYumAnv1UPo6RgF9rJFkTgZIxO4= +golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -754,7 +752,6 @@ golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= @@ -844,9 +841,7 @@ google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= @@ -871,6 +866,7 @@ google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEc google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210921142501-181ce0d877f6/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211016002631-37fc39342514/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211021150943-2b146023228c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211101144312-62acf1d99145 h1:vum3nDKdleYb+aePXKFEDT2+ghuH00EgYp9B7Q7EZZE= google.golang.org/genproto v0.0.0-20211101144312-62acf1d99145/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= @@ -926,8 +922,8 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/go-playground/colors.v1 v1.2.0 h1:SPweMUve+ywPrfwao+UvfD5Ah78aOLUkT5RlJiZn52c= gopkg.in/go-playground/colors.v1 v1.2.0/go.mod h1:AvbqcMpNXVl5gBrM20jBm3VjjKBbH/kI5UnqjU7lxFI= -gopkg.in/h2non/gock.v1 v1.1.0 h1:Yy6sSXyTP9wYc6+H7U0NuB1LQ6H2HYmDp2sxFQ8vTEY= -gopkg.in/h2non/gock.v1 v1.1.0/go.mod h1:n7UGz/ckNChHiK05rDoiC4MYSunEC/lyaUm2WWaDva0= +gopkg.in/h2non/gock.v1 v1.1.2 h1:jBbHXgGBK/AoPVfJh5x4r/WxIrElvbLel8TCZkkZJoY= +gopkg.in/h2non/gock.v1 v1.1.2/go.mod h1:n7UGz/ckNChHiK05rDoiC4MYSunEC/lyaUm2WWaDva0= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/internal/app/jwt.go b/internal/app/jwt.go index 44c2f018f..1cececd81 100644 --- a/internal/app/jwt.go +++ b/internal/app/jwt.go @@ -9,13 +9,14 @@ import ( "sync" jwtmiddleware "github.com/auth0/go-jwt-middleware" - "github.com/dgrijalva/jwt-go" + // TODO: github.com/form3tech-oss/jwt-go is decrepated. + // Alternative is https://github.com/golang-jwt/jwt, but go-jwt-middleware still uses github.comform3tech-oss/jwt-go + // See also https://github.com/auth0/go-jwt-middleware/issues/73 + "github.com/form3tech-oss/jwt-go" "github.com/labstack/echo/v4" "github.com/reearth/reearth-backend/pkg/log" ) -// TODO: move the authentication logic to infrastructure - type contextKey string const ( @@ -185,7 +186,7 @@ func urlFromDomain(path string) string { return path } -// WORKAROUND: https://github.com/dgrijalva/jwt-go/pull/308 should be merged +// WORKAROUND: golang-jwt/jwt-go supports multiple audiences, but go-jwt-middleware still uses github.comform3tech-oss/jwt-go func verifyAudience(claims jwt.MapClaims, aud string) bool { if aud == "" { return true From afe6c0ae3220a80a58fced5fe82275130cb48fd3 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 18 Nov 2021 06:22:09 +0000 Subject: [PATCH 109/253] v0.2.0 --- CHANGELOG.md | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fb2993434..b0dd49b8b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,29 @@ # Changelog All notable changes to this project will be documented in this file. +## 0.2.0 - 2021-11-16 + +### ๐Ÿš€ Features + +- Support opentelemetry ([#68](https://github.com/reearth/reearth-backend/pull/68)) [`25c581`](https://github.com/reearth/reearth-backend/commit/25c581) + +### ๐Ÿ”ง Bug Fixes + +- Add an index to mongo project collection to prevent creating projects whose alias is duplicated [`10f745`](https://github.com/reearth/reearth-backend/commit/10f745) +- Check project alias duplication on project update [`443f2c`](https://github.com/reearth/reearth-backend/commit/443f2c) + +### โœจ Refactor + +- Add PropertySchemaGroupID to pkg/id ([#70](https://github.com/reearth/reearth-backend/pull/70)) [`9ece9e`](https://github.com/reearth/reearth-backend/commit/9ece9e) + +### Miscellaneous Tasks + +- Fix typo in github actions [`4a9dc5`](https://github.com/reearth/reearth-backend/commit/4a9dc5) +- Clean up unused code [`b5b01b`](https://github.com/reearth/reearth-backend/commit/b5b01b) +- Update codecov.yml to add ignored files [`d54309`](https://github.com/reearth/reearth-backend/commit/d54309) +- Ignore generated files in codecov [`9d3822`](https://github.com/reearth/reearth-backend/commit/9d3822) +- Upgrade dependencies [`215947`](https://github.com/reearth/reearth-backend/commit/215947) + ## 0.1.0 - 2021-11-01 ### ๐Ÿš€ Features From a26ff03ffd80141053c002e66cfca06ad131acc7 Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Thu, 18 Nov 2021 15:06:18 +0300 Subject: [PATCH 110/253] wrap with double quotation (#78) --- codecov.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/codecov.yml b/codecov.yml index b990fcc7c..2960b1ec7 100644 --- a/codecov.yml +++ b/codecov.yml @@ -3,10 +3,10 @@ comment: behavior: default require_changes: false ignore: - - **/*_gen.go - - **/*_test.go - - **/doc.go - - **/testdata + - "**/*_gen.go" + - "**/*_test.go" + - "**/doc.go" + - "**/testdata" - internal/adapter/gql/generated.go - tools - main.go From a8b7f182437837f2ee15a18146ac2fc8784f16c9 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 19 Nov 2021 18:20:19 +0900 Subject: [PATCH 111/253] ci: skip release action at folk repos, hide ci changes from changelog --- .github/cliff.toml | 2 +- .github/workflows/deploy_test.yml | 2 +- .github/workflows/main.yml | 6 ++++-- .github/workflows/pr.yml | 2 +- .github/workflows/release.yml | 1 + 5 files changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/cliff.toml b/.github/cliff.toml index 60380d99e..fe239aa14 100644 --- a/.github/cliff.toml +++ b/.github/cliff.toml @@ -45,8 +45,8 @@ commit_parsers = [ { body = ".*security", group = "๐Ÿ”’ Security"}, { message = "^chore", group = "Miscellaneous Tasks"}, { message = "^build", group = "Miscellaneous Tasks"}, - { message = "^ci", group = "Miscellaneous Tasks"}, { message = "^deps", group = "Miscellaneous Tasks"}, + { message = "^ci", skip = true}, { message = "^revert", skip = true}, { message = "^v[0-9]+", skip = true}, ] diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml index 707be6cfb..c79401d5f 100644 --- a/.github/workflows/deploy_test.yml +++ b/.github/workflows/deploy_test.yml @@ -36,8 +36,8 @@ jobs: --platform managed \ --quiet slack-notification: - if: always() name: Slack Notification + if: github.event.repository.full_name == 'reearth/reearth-backend' && always() needs: - deploy_test runs-on: ubuntu-latest diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c0c6f79b4..595229234 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -57,7 +57,7 @@ jobs: runs-on: ubuntu-latest needs: - main - if: "needs.main.outputs.branch && !needs.main.outputs.new_tag" + if: github.event.repository.full_name == 'reearth/reearth-backend' env: REPO: github.com/reearth/reearth-backend REPO_NAME: reearth/reearth-backend @@ -106,6 +106,7 @@ jobs: runs-on: ubuntu-latest needs: - main + if: github.event.repository.full_name == 'reearth/reearth-backend' env: NAME: reearth-backend steps: @@ -163,6 +164,7 @@ jobs: docker: name: Build and push Docker image runs-on: ubuntu-latest + if: github.event.repository.full_name == 'reearth/reearth-backend' needs: - main env: @@ -216,7 +218,7 @@ jobs: cache-from: type=registry,ref=${IMAGE_NAME}:latest cache-to: type=inline slack-notification: - if: always() + if: github.event.workflow_run.conclusion == 'success' && always() name: Slack Notification needs: - main diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 5731a8713..5a060a7cd 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -31,7 +31,7 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} file: coverage.txt slack-notification: - if: always() + if: github.event.repository.full_name == 'reearth/reearth-backend' && always() name: Slack Notification needs: - pr diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2af9ec3ad..dd67724ca 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -9,6 +9,7 @@ jobs: release: name: Release runs-on: ubuntu-latest + if: github.event.repository.full_name == 'reearth/reearth-backend' steps: - name: Set up git config run: | From 3512c0d3affa02559d4b7faaac29860cc54983d4 Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Mon, 22 Nov 2021 08:03:08 +0300 Subject: [PATCH 112/253] feat: clusters for scenes (#75) * feat: cluster domain models (#69) * Co-authored-by: mimoham24 * feat: domain models * add cluster extenstion type * add cluster extension type test case * fix cluster layers * resolve notes * update manifest * fix manifest.yml margin * refactor models * resolve notes * fix manifest file * resolve notes * resolve notes Co-authored-by: HideBa * feat: cluster datalayer (#73) * feat: cluster graphql/mongo (#74) * feat: cluster datalayer * feat: cluster CRUD * resolve notes * resolve notes * add test cases Co-authored-by: HideBa --- internal/adapter/gql/generated.go | 1200 +++++++++++++++-- .../adapter/gql/gqlmodel/convert_scene.go | 15 + internal/adapter/gql/gqlmodel/models_gen.go | 40 + .../adapter/gql/resolver_mutation_scene.go | 47 + .../infrastructure/mongo/mongodoc/scene.go | 39 +- internal/usecase/interactor/scene.go | 116 ++ internal/usecase/interfaces/scene.go | 10 + pkg/builtin/manifest.yml | 73 + pkg/builtin/manifest_ja.yml | 34 + pkg/id/cluster_field_gen_test.go | 1011 ++++++++++++++ pkg/id/cluster_gen.go | 297 ++++ pkg/id/gen.go | 2 + pkg/plugin/extension.go | 19 +- pkg/plugin/manifest/convert.go | 2 + pkg/plugin/manifest/convert_test.go | 15 + pkg/plugin/manifest/schema_gen.go | 2 +- pkg/scene/builder.go | 5 + pkg/scene/cluster.go | 55 + pkg/scene/cluster_list.go | 67 + pkg/scene/cluster_list_test.go | 213 +++ pkg/scene/cluster_test.go | 244 ++++ pkg/scene/scene.go | 8 + pkg/scene/scene_test.go | 26 + schema.graphql | 43 + 24 files changed, 3489 insertions(+), 94 deletions(-) create mode 100644 pkg/id/cluster_field_gen_test.go create mode 100644 pkg/id/cluster_gen.go create mode 100644 pkg/scene/cluster.go create mode 100644 pkg/scene/cluster_list.go create mode 100644 pkg/scene/cluster_list_test.go create mode 100644 pkg/scene/cluster_test.go diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index 5247cea3a..4522471fc 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -83,6 +83,11 @@ type DirectiveRoot struct { } type ComplexityRoot struct { + AddClusterPayload struct { + Cluster func(childComplexity int) int + Scene func(childComplexity int) int + } + AddDatasetSchemaPayload struct { DatasetSchema func(childComplexity int) int } @@ -163,6 +168,12 @@ type ComplexityRoot struct { Roll func(childComplexity int) int } + Cluster struct { + ID func(childComplexity int) int + Name func(childComplexity int) int + Property func(childComplexity int) int + } + CreateAssetPayload struct { Asset func(childComplexity int) int } @@ -466,6 +477,7 @@ type ComplexityRoot struct { } Mutation struct { + AddCluster func(childComplexity int, input gqlmodel.AddClusterInput) int AddDatasetSchema func(childComplexity int, input gqlmodel.AddDatasetSchemaInput) int AddDynamicDataset func(childComplexity int, input gqlmodel.AddDynamicDatasetInput) int AddDynamicDatasetSchema func(childComplexity int, input gqlmodel.AddDynamicDatasetSchemaInput) int @@ -499,6 +511,7 @@ type ComplexityRoot struct { MovePropertyItem func(childComplexity int, input gqlmodel.MovePropertyItemInput) int PublishProject func(childComplexity int, input gqlmodel.PublishProjectInput) int RemoveAsset func(childComplexity int, input gqlmodel.RemoveAssetInput) int + RemoveCluster func(childComplexity int, input gqlmodel.RemoveClusterInput) int RemoveDatasetSchema func(childComplexity int, input gqlmodel.RemoveDatasetSchemaInput) int RemoveInfobox func(childComplexity int, input gqlmodel.RemoveInfoboxInput) int RemoveInfoboxField func(childComplexity int, input gqlmodel.RemoveInfoboxFieldInput) int @@ -513,6 +526,7 @@ type ComplexityRoot struct { SyncDataset func(childComplexity int, input gqlmodel.SyncDatasetInput) int UninstallPlugin func(childComplexity int, input gqlmodel.UninstallPluginInput) int UnlinkPropertyValue func(childComplexity int, input gqlmodel.UnlinkPropertyValueInput) int + UpdateCluster func(childComplexity int, input gqlmodel.UpdateClusterInput) int UpdateDatasetSchema func(childComplexity int, input gqlmodel.UpdateDatasetSchemaInput) int UpdateLayer func(childComplexity int, input gqlmodel.UpdateLayerInput) int UpdateMe func(childComplexity int, input gqlmodel.UpdateMeInput) int @@ -783,6 +797,11 @@ type ComplexityRoot struct { AssetID func(childComplexity int) int } + RemoveClusterPayload struct { + ClusterID func(childComplexity int) int + Scene func(childComplexity int) int + } + RemoveDatasetSchemaPayload struct { SchemaID func(childComplexity int) int } @@ -815,6 +834,7 @@ type ComplexityRoot struct { } Scene struct { + Clusters func(childComplexity int) int CreatedAt func(childComplexity int) int DatasetSchemas func(childComplexity int, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int DynamicDatasetSchemas func(childComplexity int) int @@ -923,6 +943,11 @@ type ComplexityRoot struct { Scene func(childComplexity int) int } + UpdateClusterPayload struct { + Cluster func(childComplexity int) int + Scene func(childComplexity int) int + } + UpdateDatasetSchemaPayload struct { DatasetSchema func(childComplexity int) int } @@ -1143,6 +1168,9 @@ type MutationResolver interface { UninstallPlugin(ctx context.Context, input gqlmodel.UninstallPluginInput) (*gqlmodel.UninstallPluginPayload, error) UploadPlugin(ctx context.Context, input gqlmodel.UploadPluginInput) (*gqlmodel.UploadPluginPayload, error) UpgradePlugin(ctx context.Context, input gqlmodel.UpgradePluginInput) (*gqlmodel.UpgradePluginPayload, error) + AddCluster(ctx context.Context, input gqlmodel.AddClusterInput) (*gqlmodel.AddClusterPayload, error) + UpdateCluster(ctx context.Context, input gqlmodel.UpdateClusterInput) (*gqlmodel.UpdateClusterPayload, error) + RemoveCluster(ctx context.Context, input gqlmodel.RemoveClusterInput) (*gqlmodel.RemoveClusterPayload, error) UpdateDatasetSchema(ctx context.Context, input gqlmodel.UpdateDatasetSchemaInput) (*gqlmodel.UpdateDatasetSchemaPayload, error) SyncDataset(ctx context.Context, input gqlmodel.SyncDatasetInput) (*gqlmodel.SyncDatasetPayload, error) AddDynamicDatasetSchema(ctx context.Context, input gqlmodel.AddDynamicDatasetSchemaInput) (*gqlmodel.AddDynamicDatasetSchemaPayload, error) @@ -1309,6 +1337,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in _ = ec switch typeName + "." + field { + case "AddClusterPayload.cluster": + if e.complexity.AddClusterPayload.Cluster == nil { + break + } + + return e.complexity.AddClusterPayload.Cluster(childComplexity), true + + case "AddClusterPayload.scene": + if e.complexity.AddClusterPayload.Scene == nil { + break + } + + return e.complexity.AddClusterPayload.Scene(childComplexity), true + case "AddDatasetSchemaPayload.datasetSchema": if e.complexity.AddDatasetSchemaPayload.DatasetSchema == nil { break @@ -1575,6 +1617,27 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Camera.Roll(childComplexity), true + case "Cluster.id": + if e.complexity.Cluster.ID == nil { + break + } + + return e.complexity.Cluster.ID(childComplexity), true + + case "Cluster.name": + if e.complexity.Cluster.Name == nil { + break + } + + return e.complexity.Cluster.Name(childComplexity), true + + case "Cluster.property": + if e.complexity.Cluster.Property == nil { + break + } + + return e.complexity.Cluster.Property(childComplexity), true + case "CreateAssetPayload.asset": if e.complexity.CreateAssetPayload.Asset == nil { break @@ -2938,6 +3001,18 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.MoveLayerPayload.ToParentLayer(childComplexity), true + case "Mutation.addCluster": + if e.complexity.Mutation.AddCluster == nil { + break + } + + args, err := ec.field_Mutation_addCluster_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.AddCluster(childComplexity, args["input"].(gqlmodel.AddClusterInput)), true + case "Mutation.addDatasetSchema": if e.complexity.Mutation.AddDatasetSchema == nil { break @@ -3334,6 +3409,18 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Mutation.RemoveAsset(childComplexity, args["input"].(gqlmodel.RemoveAssetInput)), true + case "Mutation.removeCluster": + if e.complexity.Mutation.RemoveCluster == nil { + break + } + + args, err := ec.field_Mutation_removeCluster_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.RemoveCluster(childComplexity, args["input"].(gqlmodel.RemoveClusterInput)), true + case "Mutation.removeDatasetSchema": if e.complexity.Mutation.RemoveDatasetSchema == nil { break @@ -3502,6 +3589,18 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Mutation.UnlinkPropertyValue(childComplexity, args["input"].(gqlmodel.UnlinkPropertyValueInput)), true + case "Mutation.updateCluster": + if e.complexity.Mutation.UpdateCluster == nil { + break + } + + args, err := ec.field_Mutation_updateCluster_args(context.TODO(), rawArgs) + if err != nil { + return 0, false + } + + return e.complexity.Mutation.UpdateCluster(childComplexity, args["input"].(gqlmodel.UpdateClusterInput)), true + case "Mutation.updateDatasetSchema": if e.complexity.Mutation.UpdateDatasetSchema == nil { break @@ -5053,6 +5152,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.RemoveAssetPayload.AssetID(childComplexity), true + case "RemoveClusterPayload.clusterId": + if e.complexity.RemoveClusterPayload.ClusterID == nil { + break + } + + return e.complexity.RemoveClusterPayload.ClusterID(childComplexity), true + + case "RemoveClusterPayload.scene": + if e.complexity.RemoveClusterPayload.Scene == nil { + break + } + + return e.complexity.RemoveClusterPayload.Scene(childComplexity), true + case "RemoveDatasetSchemaPayload.schemaId": if e.complexity.RemoveDatasetSchemaPayload.SchemaID == nil { break @@ -5123,6 +5236,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.RemoveWidgetPayload.WidgetID(childComplexity), true + case "Scene.clusters": + if e.complexity.Scene.Clusters == nil { + break + } + + return e.complexity.Scene.Clusters(childComplexity), true + case "Scene.createdAt": if e.complexity.Scene.CreatedAt == nil { break @@ -5649,6 +5769,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.UninstallPluginPayload.Scene(childComplexity), true + case "UpdateClusterPayload.cluster": + if e.complexity.UpdateClusterPayload.Cluster == nil { + break + } + + return e.complexity.UpdateClusterPayload.Cluster(childComplexity), true + + case "UpdateClusterPayload.scene": + if e.complexity.UpdateClusterPayload.Scene == nil { + break + } + + return e.complexity.UpdateClusterPayload.Scene(childComplexity), true + case "UpdateDatasetSchemaPayload.datasetSchema": if e.complexity.UpdateDatasetSchemaPayload.DatasetSchema == nil { break @@ -6375,6 +6509,7 @@ type Scene implements Node { ): DatasetSchemaConnection! @goField(forceResolver: true) tagIds: [ID!]! tags: [Tag!]! @goField(forceResolver: true) + clusters: [Cluster!]! } enum SceneLockMode { @@ -6821,6 +6956,12 @@ type TagGroup implements Tag { union Tags = TagItem | TagGroup +type Cluster { + id: ID! + name: String! + property: ID! +} + # InputType input CreateAssetInput { @@ -7219,6 +7360,24 @@ input RemoveTagInput { tagID: ID! } +input AddClusterInput { + sceneId: ID! + name: String! + propertyId: ID! +} + +input UpdateClusterInput { + clusterId: ID! + sceneId: ID! + name: String + propertyId: ID +} + +input RemoveClusterInput { + clusterId: ID! + sceneId: ID! +} + # Payload type CreateAssetPayload { @@ -7449,6 +7608,21 @@ type RemoveTagPayload{ tagId: ID! } +type AddClusterPayload { + scene: Scene! + cluster: Cluster! +} + +type UpdateClusterPayload { + scene: Scene! + cluster: Cluster! +} + +type RemoveClusterPayload{ + scene: Scene! + clusterId: ID! +} + # Connection type AssetConnection { @@ -7586,6 +7760,9 @@ type Mutation { uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload uploadPlugin(input: UploadPluginInput!): UploadPluginPayload upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload + addCluster(input: AddClusterInput!): AddClusterPayload + updateCluster(input: UpdateClusterInput!): UpdateClusterPayload + removeCluster(input: RemoveClusterInput!): RemoveClusterPayload # Dataset updateDatasetSchema( @@ -7694,6 +7871,21 @@ func (ec *executionContext) field_DatasetSchema_datasets_args(ctx context.Contex return args, nil } +func (ec *executionContext) field_Mutation_addCluster_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.AddClusterInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNAddClusterInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddClusterInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + func (ec *executionContext) field_Mutation_addDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -8189,6 +8381,21 @@ func (ec *executionContext) field_Mutation_removeAsset_args(ctx context.Context, return args, nil } +func (ec *executionContext) field_Mutation_removeCluster_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.RemoveClusterInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNRemoveClusterInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveClusterInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + func (ec *executionContext) field_Mutation_removeDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -8399,6 +8606,21 @@ func (ec *executionContext) field_Mutation_unlinkPropertyValue_args(ctx context. return args, nil } +func (ec *executionContext) field_Mutation_updateCluster_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 gqlmodel.UpdateClusterInput + if tmp, ok := rawArgs["input"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("input")) + arg0, err = ec.unmarshalNUpdateClusterInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateClusterInput(ctx, tmp) + if err != nil { + return nil, err + } + } + args["input"] = arg0 + return args, nil +} + func (ec *executionContext) field_Mutation_updateDatasetSchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} @@ -9358,7 +9580,7 @@ func (ec *executionContext) field___Type_fields_args(ctx context.Context, rawArg // region **************************** field.gotpl ***************************** -func (ec *executionContext) _AddDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDatasetSchemaPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddClusterPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddClusterPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9366,7 +9588,7 @@ func (ec *executionContext) _AddDatasetSchemaPayload_datasetSchema(ctx context.C } }() fc := &graphql.FieldContext{ - Object: "AddDatasetSchemaPayload", + Object: "AddClusterPayload", Field: field, Args: nil, IsMethod: false, @@ -9376,21 +9598,24 @@ func (ec *executionContext) _AddDatasetSchemaPayload_datasetSchema(ctx context.C ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.DatasetSchema, nil + return obj.Scene, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } return graphql.Null } - res := resTmp.(*gqlmodel.DatasetSchema) + res := resTmp.(*gqlmodel.Scene) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _AddDynamicDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddClusterPayload_cluster(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddClusterPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9398,7 +9623,7 @@ func (ec *executionContext) _AddDynamicDatasetPayload_datasetSchema(ctx context. } }() fc := &graphql.FieldContext{ - Object: "AddDynamicDatasetPayload", + Object: "AddClusterPayload", Field: field, Args: nil, IsMethod: false, @@ -9408,21 +9633,24 @@ func (ec *executionContext) _AddDynamicDatasetPayload_datasetSchema(ctx context. ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.DatasetSchema, nil + return obj.Cluster, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } return graphql.Null } - res := resTmp.(*gqlmodel.DatasetSchema) + res := resTmp.(*gqlmodel.Cluster) fc.Result = res - return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) + return ec.marshalNCluster2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCluster(ctx, field.Selections, res) } -func (ec *executionContext) _AddDynamicDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDatasetSchemaPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9430,7 +9658,7 @@ func (ec *executionContext) _AddDynamicDatasetPayload_dataset(ctx context.Contex } }() fc := &graphql.FieldContext{ - Object: "AddDynamicDatasetPayload", + Object: "AddDatasetSchemaPayload", Field: field, Args: nil, IsMethod: false, @@ -9440,7 +9668,7 @@ func (ec *executionContext) _AddDynamicDatasetPayload_dataset(ctx context.Contex ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Dataset, nil + return obj.DatasetSchema, nil }) if err != nil { ec.Error(ctx, err) @@ -9449,12 +9677,12 @@ func (ec *executionContext) _AddDynamicDatasetPayload_dataset(ctx context.Contex if resTmp == nil { return graphql.Null } - res := resTmp.(*gqlmodel.Dataset) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _AddDynamicDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetSchemaPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddDynamicDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9462,7 +9690,7 @@ func (ec *executionContext) _AddDynamicDatasetSchemaPayload_datasetSchema(ctx co } }() fc := &graphql.FieldContext{ - Object: "AddDynamicDatasetSchemaPayload", + Object: "AddDynamicDatasetPayload", Field: field, Args: nil, IsMethod: false, @@ -9486,7 +9714,7 @@ func (ec *executionContext) _AddDynamicDatasetSchemaPayload_datasetSchema(ctx co return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _AddInfoboxFieldPayload_infoboxField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddInfoboxFieldPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddDynamicDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9494,7 +9722,7 @@ func (ec *executionContext) _AddInfoboxFieldPayload_infoboxField(ctx context.Con } }() fc := &graphql.FieldContext{ - Object: "AddInfoboxFieldPayload", + Object: "AddDynamicDatasetPayload", Field: field, Args: nil, IsMethod: false, @@ -9504,24 +9732,21 @@ func (ec *executionContext) _AddInfoboxFieldPayload_infoboxField(ctx context.Con ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.InfoboxField, nil + return obj.Dataset, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } - res := resTmp.(*gqlmodel.InfoboxField) + res := resTmp.(*gqlmodel.Dataset) fc.Result = res - return ec.marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxField(ctx, field.Selections, res) + return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } -func (ec *executionContext) _AddInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddInfoboxFieldPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddDynamicDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetSchemaPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9529,7 +9754,7 @@ func (ec *executionContext) _AddInfoboxFieldPayload_layer(ctx context.Context, f } }() fc := &graphql.FieldContext{ - Object: "AddInfoboxFieldPayload", + Object: "AddDynamicDatasetSchemaPayload", Field: field, Args: nil, IsMethod: false, @@ -9539,24 +9764,21 @@ func (ec *executionContext) _AddInfoboxFieldPayload_layer(ctx context.Context, f ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Layer, nil + return obj.DatasetSchema, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } return graphql.Null } - res := resTmp.(gqlmodel.Layer) + res := resTmp.(*gqlmodel.DatasetSchema) fc.Result = res - return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) + return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } -func (ec *executionContext) _AddLayerGroupPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddInfoboxFieldPayload_infoboxField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddInfoboxFieldPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9564,7 +9786,7 @@ func (ec *executionContext) _AddLayerGroupPayload_layer(ctx context.Context, fie } }() fc := &graphql.FieldContext{ - Object: "AddLayerGroupPayload", + Object: "AddInfoboxFieldPayload", Field: field, Args: nil, IsMethod: false, @@ -9574,7 +9796,7 @@ func (ec *executionContext) _AddLayerGroupPayload_layer(ctx context.Context, fie ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Layer, nil + return obj.InfoboxField, nil }) if err != nil { ec.Error(ctx, err) @@ -9586,12 +9808,12 @@ func (ec *executionContext) _AddLayerGroupPayload_layer(ctx context.Context, fie } return graphql.Null } - res := resTmp.(*gqlmodel.LayerGroup) + res := resTmp.(*gqlmodel.InfoboxField) fc.Result = res - return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) + return ec.marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxField(ctx, field.Selections, res) } -func (ec *executionContext) _AddLayerGroupPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddInfoboxFieldPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9599,7 +9821,7 @@ func (ec *executionContext) _AddLayerGroupPayload_parentLayer(ctx context.Contex } }() fc := &graphql.FieldContext{ - Object: "AddLayerGroupPayload", + Object: "AddInfoboxFieldPayload", Field: field, Args: nil, IsMethod: false, @@ -9609,7 +9831,7 @@ func (ec *executionContext) _AddLayerGroupPayload_parentLayer(ctx context.Contex ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.ParentLayer, nil + return obj.Layer, nil }) if err != nil { ec.Error(ctx, err) @@ -9621,12 +9843,12 @@ func (ec *executionContext) _AddLayerGroupPayload_parentLayer(ctx context.Contex } return graphql.Null } - res := resTmp.(*gqlmodel.LayerGroup) + res := resTmp.(gqlmodel.Layer) fc.Result = res - return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) + return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _AddLayerGroupPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddLayerGroupPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9641,38 +9863,6 @@ func (ec *executionContext) _AddLayerGroupPayload_index(ctx context.Context, fie IsResolver: false, } - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Index, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*int) - fc.Result = res - return ec.marshalOInt2แš–int(ctx, field.Selections, res) -} - -func (ec *executionContext) _AddLayerItemPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "AddLayerItemPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children @@ -9688,12 +9878,12 @@ func (ec *executionContext) _AddLayerItemPayload_layer(ctx context.Context, fiel } return graphql.Null } - res := resTmp.(*gqlmodel.LayerItem) + res := resTmp.(*gqlmodel.LayerGroup) fc.Result = res - return ec.marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx, field.Selections, res) + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _AddLayerItemPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { +func (ec *executionContext) _AddLayerGroupPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -9701,7 +9891,109 @@ func (ec *executionContext) _AddLayerItemPayload_parentLayer(ctx context.Context } }() fc := &graphql.FieldContext{ - Object: "AddLayerItemPayload", + Object: "AddLayerGroupPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentLayer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerGroup) + fc.Result = res + return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddLayerGroupPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddLayerGroupPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Index, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*int) + fc.Result = res + return ec.marshalOInt2แš–int(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddLayerItemPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddLayerItemPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Layer, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.LayerItem) + fc.Result = res + return ec.marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx, field.Selections, res) +} + +func (ec *executionContext) _AddLayerItemPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "AddLayerItemPayload", Field: field, Args: nil, IsMethod: false, @@ -10664,6 +10956,111 @@ func (ec *executionContext) _Camera_fov(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } +func (ec *executionContext) _Cluster_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Cluster", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _Cluster_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Cluster", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) _Cluster_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Cluster", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + func (ec *executionContext) _CreateAssetPayload_asset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateAssetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -18157,6 +18554,123 @@ func (ec *executionContext) _Mutation_upgradePlugin(ctx context.Context, field g return ec.marshalOUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpgradePluginPayload(ctx, field.Selections, res) } +func (ec *executionContext) _Mutation_addCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_addCluster_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().AddCluster(rctx, args["input"].(gqlmodel.AddClusterInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.AddClusterPayload) + fc.Result = res + return ec.marshalOAddClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddClusterPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_updateCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_updateCluster_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateCluster(rctx, args["input"].(gqlmodel.UpdateClusterInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.UpdateClusterPayload) + fc.Result = res + return ec.marshalOUpdateClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateClusterPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) _Mutation_removeCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Mutation", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + rawArgs := field.ArgumentMap(ec.Variables) + args, err := ec.field_Mutation_removeCluster_args(ctx, rawArgs) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + fc.Args = args + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().RemoveCluster(rctx, args["input"].(gqlmodel.RemoveClusterInput)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.RemoveClusterPayload) + fc.Result = res + return ec.marshalORemoveClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveClusterPayload(ctx, field.Selections, res) +} + func (ec *executionContext) _Mutation_updateDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -25857,6 +26371,76 @@ func (ec *executionContext) _RemoveAssetPayload_assetId(ctx context.Context, fie return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } +func (ec *executionContext) _RemoveClusterPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveClusterPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveClusterPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _RemoveClusterPayload_clusterId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveClusterPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveClusterPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ClusterID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(id.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + func (ec *executionContext) _RemoveDatasetSchemaPayload_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveDatasetSchemaPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -26864,6 +27448,41 @@ func (ec *executionContext) _Scene_tags(ctx context.Context, field graphql.Colle return ec.marshalNTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagแš„(ctx, field.Selections, res) } +func (ec *executionContext) _Scene_clusters(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Scene", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Clusters, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.Cluster) + fc.Result = res + return ec.marshalNCluster2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšClusterแš„(ctx, field.Selections, res) +} + func (ec *executionContext) _ScenePlugin_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -28702,6 +29321,76 @@ func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, f return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) _UpdateClusterPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateClusterPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpdateClusterPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Scene, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _UpdateClusterPayload_cluster(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateClusterPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "UpdateClusterPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Cluster, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Cluster) + fc.Result = res + return ec.marshalNCluster2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCluster(ctx, field.Selections, res) +} + func (ec *executionContext) _UpdateDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateDatasetSchemaPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -31264,6 +31953,45 @@ func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.Co // region **************************** input.gotpl ***************************** +func (ec *executionContext) unmarshalInputAddClusterInput(ctx context.Context, obj interface{}) (gqlmodel.AddClusterInput, error) { + var it gqlmodel.AddClusterInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalNString2string(ctx, v) + if err != nil { + return it, err + } + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + func (ec *executionContext) unmarshalInputAddDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.AddDatasetSchemaInput, error) { var it gqlmodel.AddDatasetSchemaInput asMap := map[string]interface{}{} @@ -32591,6 +33319,37 @@ func (ec *executionContext) unmarshalInputRemoveAssetInput(ctx context.Context, return it, nil } +func (ec *executionContext) unmarshalInputRemoveClusterInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveClusterInput, error) { + var it gqlmodel.RemoveClusterInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "clusterId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("clusterId")) + it.ClusterID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + func (ec *executionContext) unmarshalInputRemoveDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.RemoveDatasetSchemaInput, error) { var it gqlmodel.RemoveDatasetSchemaInput asMap := map[string]interface{}{} @@ -33057,6 +33816,53 @@ func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.C return it, nil } +func (ec *executionContext) unmarshalInputUpdateClusterInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateClusterInput, error) { + var it gqlmodel.UpdateClusterInput + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "clusterId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("clusterId")) + it.ClusterID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "sceneId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + case "name": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + it.Name, err = ec.unmarshalOString2แš–string(ctx, v) + if err != nil { + return it, err + } + case "propertyId": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) + it.PropertyID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + func (ec *executionContext) unmarshalInputUpdateDatasetSchemaInput(ctx context.Context, obj interface{}) (gqlmodel.UpdateDatasetSchemaInput, error) { var it gqlmodel.UpdateDatasetSchemaInput asMap := map[string]interface{}{} @@ -34060,6 +34866,38 @@ func (ec *executionContext) _Tags(ctx context.Context, sel ast.SelectionSet, obj // region **************************** object.gotpl **************************** +var addClusterPayloadImplementors = []string{"AddClusterPayload"} + +func (ec *executionContext) _AddClusterPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddClusterPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, addClusterPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("AddClusterPayload") + case "scene": + out.Values[i] = ec._AddClusterPayload_scene(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "cluster": + out.Values[i] = ec._AddClusterPayload_cluster(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var addDatasetSchemaPayloadImplementors = []string{"AddDatasetSchemaPayload"} func (ec *executionContext) _AddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddDatasetSchemaPayload) graphql.Marshaler { @@ -34543,6 +35381,43 @@ func (ec *executionContext) _Camera(ctx context.Context, sel ast.SelectionSet, o return out } +var clusterImplementors = []string{"Cluster"} + +func (ec *executionContext) _Cluster(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Cluster) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, clusterImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Cluster") + case "id": + out.Values[i] = ec._Cluster_id(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "name": + out.Values[i] = ec._Cluster_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "property": + out.Values[i] = ec._Cluster_property(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var createAssetPayloadImplementors = []string{"CreateAssetPayload"} func (ec *executionContext) _CreateAssetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateAssetPayload) graphql.Marshaler { @@ -36667,6 +37542,12 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) out.Values[i] = ec._Mutation_uploadPlugin(ctx, field) case "upgradePlugin": out.Values[i] = ec._Mutation_upgradePlugin(ctx, field) + case "addCluster": + out.Values[i] = ec._Mutation_addCluster(ctx, field) + case "updateCluster": + out.Values[i] = ec._Mutation_updateCluster(ctx, field) + case "removeCluster": + out.Values[i] = ec._Mutation_removeCluster(ctx, field) case "updateDatasetSchema": out.Values[i] = ec._Mutation_updateDatasetSchema(ctx, field) case "syncDataset": @@ -38449,6 +39330,38 @@ func (ec *executionContext) _RemoveAssetPayload(ctx context.Context, sel ast.Sel return out } +var removeClusterPayloadImplementors = []string{"RemoveClusterPayload"} + +func (ec *executionContext) _RemoveClusterPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveClusterPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, removeClusterPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("RemoveClusterPayload") + case "scene": + out.Values[i] = ec._RemoveClusterPayload_scene(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "clusterId": + out.Values[i] = ec._RemoveClusterPayload_clusterId(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var removeDatasetSchemaPayloadImplementors = []string{"RemoveDatasetSchemaPayload"} func (ec *executionContext) _RemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveDatasetSchemaPayload) graphql.Marshaler { @@ -38807,6 +39720,11 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob } return res }) + case "clusters": + out.Values[i] = ec._Scene_clusters(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -39363,6 +40281,38 @@ func (ec *executionContext) _UninstallPluginPayload(ctx context.Context, sel ast return out } +var updateClusterPayloadImplementors = []string{"UpdateClusterPayload"} + +func (ec *executionContext) _UpdateClusterPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateClusterPayload) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, updateClusterPayloadImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("UpdateClusterPayload") + case "scene": + out.Values[i] = ec._UpdateClusterPayload_scene(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + case "cluster": + out.Values[i] = ec._UpdateClusterPayload_cluster(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + var updateDatasetSchemaPayloadImplementors = []string{"UpdateDatasetSchemaPayload"} func (ec *executionContext) _UpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateDatasetSchemaPayload) graphql.Marshaler { @@ -40207,6 +41157,11 @@ func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, o // region ***************************** type.gotpl ***************************** +func (ec *executionContext) unmarshalNAddClusterInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddClusterInput(ctx context.Context, v interface{}) (gqlmodel.AddClusterInput, error) { + res, err := ec.unmarshalInputAddClusterInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + func (ec *executionContext) unmarshalNAddDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDatasetSchemaInput(ctx context.Context, v interface{}) (gqlmodel.AddDatasetSchemaInput, error) { res, err := ec.unmarshalInputAddDatasetSchemaInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -40393,6 +41348,60 @@ func (ec *executionContext) marshalNBoolean2bool(ctx context.Context, sel ast.Se return res } +func (ec *executionContext) marshalNCluster2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšClusterแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Cluster) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNCluster2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCluster(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNCluster2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCluster(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Cluster) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._Cluster(ctx, sel, v) +} + func (ec *executionContext) unmarshalNCreateAssetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateAssetInput(ctx context.Context, v interface{}) (gqlmodel.CreateAssetInput, error) { res, err := ec.unmarshalInputCreateAssetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -42378,6 +43387,11 @@ func (ec *executionContext) unmarshalNRemoveAssetInput2githubแš—comแš‹reearthแš‹ return res, graphql.ErrorOnPath(ctx, err) } +func (ec *executionContext) unmarshalNRemoveClusterInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveClusterInput(ctx context.Context, v interface{}) (gqlmodel.RemoveClusterInput, error) { + res, err := ec.unmarshalInputRemoveClusterInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + func (ec *executionContext) unmarshalNRemoveDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveDatasetSchemaInput(ctx context.Context, v interface{}) (gqlmodel.RemoveDatasetSchemaInput, error) { res, err := ec.unmarshalInputRemoveDatasetSchemaInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -42833,6 +43847,11 @@ func (ec *executionContext) unmarshalNUnlinkPropertyValueInput2githubแš—comแš‹re return res, graphql.ErrorOnPath(ctx, err) } +func (ec *executionContext) unmarshalNUpdateClusterInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateClusterInput(ctx context.Context, v interface{}) (gqlmodel.UpdateClusterInput, error) { + res, err := ec.unmarshalInputUpdateClusterInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + func (ec *executionContext) unmarshalNUpdateDatasetSchemaInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateDatasetSchemaInput(ctx context.Context, v interface{}) (gqlmodel.UpdateDatasetSchemaInput, error) { res, err := ec.unmarshalInputUpdateDatasetSchemaInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -43286,6 +44305,13 @@ func (ec *executionContext) marshalN__TypeKind2string(ctx context.Context, sel a return res } +func (ec *executionContext) marshalOAddClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddClusterPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddClusterPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._AddClusterPayload(ctx, sel, v) +} + func (ec *executionContext) marshalOAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AddDatasetSchemaPayload) graphql.Marshaler { if v == nil { return graphql.Null @@ -44042,6 +45068,13 @@ func (ec *executionContext) marshalORemoveAssetPayload2แš–githubแš—comแš‹reearth return ec._RemoveAssetPayload(ctx, sel, v) } +func (ec *executionContext) marshalORemoveClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveClusterPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveClusterPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._RemoveClusterPayload(ctx, sel, v) +} + func (ec *executionContext) marshalORemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveDatasetSchemaPayload) graphql.Marshaler { if v == nil { return graphql.Null @@ -44291,6 +45324,13 @@ func (ec *executionContext) marshalOUninstallPluginPayload2แš–githubแš—comแš‹ree return ec._UninstallPluginPayload(ctx, sel, v) } +func (ec *executionContext) marshalOUpdateClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateClusterPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateClusterPayload) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._UpdateClusterPayload(ctx, sel, v) +} + func (ec *executionContext) marshalOUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UpdateDatasetSchemaPayload) graphql.Marshaler { if v == nil { return graphql.Null diff --git a/internal/adapter/gql/gqlmodel/convert_scene.go b/internal/adapter/gql/gqlmodel/convert_scene.go index 5c5e7a2f3..5fae166ad 100644 --- a/internal/adapter/gql/gqlmodel/convert_scene.go +++ b/internal/adapter/gql/gqlmodel/convert_scene.go @@ -30,6 +30,14 @@ func ToScenePlugin(sp *scene.Plugin) *ScenePlugin { } } +func ToCluster(c *scene.Cluster) *Cluster { + return &Cluster{ + ID: c.ID().ID(), + Name: c.Name(), + Property: c.Property().ID(), + } +} + func ToScene(scene *scene.Scene) *Scene { if scene == nil { return nil @@ -41,6 +49,12 @@ func ToScene(scene *scene.Scene) *Scene { widgets = append(widgets, ToSceneWidget(w)) } + cl := scene.Clusters().Clusters() + clusters := make([]*Cluster, 0, len(cl)) + for _, c := range cl { + clusters = append(clusters, ToCluster(c)) + } + scenePlugins := scene.PluginSystem().Plugins() plugins := make([]*ScenePlugin, 0, len(scenePlugins)) for _, sp := range scenePlugins { @@ -55,6 +69,7 @@ func ToScene(scene *scene.Scene) *Scene { RootLayerID: scene.RootLayer().ID(), CreatedAt: scene.CreatedAt(), UpdatedAt: scene.UpdatedAt(), + Clusters: clusters, Widgets: widgets, WidgetAlignSystem: ToWidgetAlignSystem(scene.WidgetAlignSystem()), Plugins: plugins, diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index af39ce5ab..3adf45b2b 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -39,6 +39,17 @@ type Tags interface { IsTags() } +type AddClusterInput struct { + SceneID id.ID `json:"sceneId"` + Name string `json:"name"` + PropertyID id.ID `json:"propertyId"` +} + +type AddClusterPayload struct { + Scene *Scene `json:"scene"` + Cluster *Cluster `json:"cluster"` +} + type AddDatasetSchemaInput struct { SceneID id.ID `json:"sceneId"` Name string `json:"name"` @@ -197,6 +208,12 @@ type Camera struct { Fov float64 `json:"fov"` } +type Cluster struct { + ID id.ID `json:"id"` + Name string `json:"name"` + Property id.ID `json:"property"` +} + type CreateAssetInput struct { TeamID id.ID `json:"teamId"` File graphql.Upload `json:"file"` @@ -879,6 +896,16 @@ type RemoveAssetPayload struct { AssetID id.ID `json:"assetId"` } +type RemoveClusterInput struct { + ClusterID id.ID `json:"clusterId"` + SceneID id.ID `json:"sceneId"` +} + +type RemoveClusterPayload struct { + Scene *Scene `json:"scene"` + ClusterID id.ID `json:"clusterId"` +} + type RemoveDatasetSchemaInput struct { SchemaID id.ID `json:"schemaId"` Force *bool `json:"force"` @@ -979,6 +1006,7 @@ type Scene struct { DatasetSchemas *DatasetSchemaConnection `json:"datasetSchemas"` TagIds []*id.ID `json:"tagIds"` Tags []Tag `json:"tags"` + Clusters []*Cluster `json:"clusters"` } func (Scene) IsNode() {} @@ -1103,6 +1131,18 @@ type UnlinkPropertyValueInput struct { FieldID id.PropertySchemaFieldID `json:"fieldId"` } +type UpdateClusterInput struct { + ClusterID id.ID `json:"clusterId"` + SceneID id.ID `json:"sceneId"` + Name *string `json:"name"` + PropertyID *id.ID `json:"propertyId"` +} + +type UpdateClusterPayload struct { + Scene *Scene `json:"scene"` + Cluster *Cluster `json:"cluster"` +} + type UpdateDatasetSchemaInput struct { SchemaID id.ID `json:"schemaId"` Name string `json:"name"` diff --git a/internal/adapter/gql/resolver_mutation_scene.go b/internal/adapter/gql/resolver_mutation_scene.go index f79c3c01b..306fcb2bb 100644 --- a/internal/adapter/gql/resolver_mutation_scene.go +++ b/internal/adapter/gql/resolver_mutation_scene.go @@ -195,3 +195,50 @@ func (r *mutationResolver) UpgradePlugin(ctx context.Context, input gqlmodel.Upg ScenePlugin: gqlmodel.ToScenePlugin(s.PluginSystem().Plugin(input.ToPluginID)), }, nil } + +func (r *mutationResolver) AddCluster(ctx context.Context, input gqlmodel.AddClusterInput) (*gqlmodel.AddClusterPayload, error) { + exit := trace(ctx) + defer exit() + s, c, err := r.usecases.Scene.AddCluster(ctx, id.SceneID(input.SceneID), input.Name, id.PropertyID(input.PropertyID), getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.AddClusterPayload{ + Scene: gqlmodel.ToScene(s), + Cluster: gqlmodel.ToCluster(c), + }, nil +} + +func (r *mutationResolver) UpdateCluster(ctx context.Context, input gqlmodel.UpdateClusterInput) (*gqlmodel.UpdateClusterPayload, error) { + exit := trace(ctx) + defer exit() + s, c, err := r.usecases.Scene.UpdateCluster(ctx, interfaces.UpdateClusterParam{ + ClusterID: id.ClusterID(input.ClusterID), + SceneID: id.SceneID(input.SceneID), + Name: input.Name, + PropertyID: id.PropertyIDFromRefID(input.PropertyID), + }, getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.UpdateClusterPayload{ + Scene: gqlmodel.ToScene(s), + Cluster: gqlmodel.ToCluster(c), + }, nil +} + +func (r *mutationResolver) RemoveCluster(ctx context.Context, input gqlmodel.RemoveClusterInput) (*gqlmodel.RemoveClusterPayload, error) { + exit := trace(ctx) + defer exit() + s, err := r.usecases.Scene.RemoveCluster(ctx, id.SceneID(input.SceneID), id.ClusterID(input.ClusterID), getOperator(ctx)) + if err != nil { + return nil, err + } + + return &gqlmodel.RemoveClusterPayload{ + Scene: gqlmodel.ToScene(s), + ClusterID: input.ClusterID, + }, nil +} diff --git a/internal/infrastructure/mongo/mongodoc/scene.go b/internal/infrastructure/mongo/mongodoc/scene.go index 6cef08eb4..fbb19a2b6 100644 --- a/internal/infrastructure/mongo/mongodoc/scene.go +++ b/internal/infrastructure/mongo/mongodoc/scene.go @@ -24,6 +24,12 @@ type ScenePluginDocument struct { Property *string } +type SceneClusterDocument struct { + ID string + Name string + Property string +} + type SceneDocument struct { ID string Project string @@ -34,6 +40,7 @@ type SceneDocument struct { Plugins []ScenePluginDocument UpdateAt time.Time Property string + Clusters []SceneClusterDocument } type SceneConsumer struct { @@ -85,10 +92,11 @@ func (c *SceneIDConsumer) Consume(raw bson.Raw) error { func NewScene(scene *scene.Scene) (*SceneDocument, string) { widgets := scene.WidgetSystem().Widgets() plugins := scene.PluginSystem().Plugins() + clusters := scene.Clusters().Clusters() widgetsDoc := make([]SceneWidgetDocument, 0, len(widgets)) pluginsDoc := make([]ScenePluginDocument, 0, len(plugins)) - + clsuterDoc := make([]SceneClusterDocument, 0, len(clusters)) for _, w := range widgets { widgetsDoc = append(widgetsDoc, SceneWidgetDocument{ ID: w.ID().String(), @@ -107,6 +115,14 @@ func NewScene(scene *scene.Scene) (*SceneDocument, string) { }) } + for _, cl := range clusters { + clsuterDoc = append(clsuterDoc, SceneClusterDocument{ + ID: cl.ID().String(), + Name: cl.Name(), + Property: cl.Property().String(), + }) + } + id := scene.ID().String() return &SceneDocument{ ID: id, @@ -118,6 +134,7 @@ func NewScene(scene *scene.Scene) (*SceneDocument, string) { AlignSystem: NewWidgetAlignSystem(scene.WidgetAlignSystem()), UpdateAt: scene.UpdatedAt(), Property: scene.Property().String(), + Clusters: clsuterDoc, }, id } @@ -145,6 +162,7 @@ func (d *SceneDocument) Model() (*scene.Scene, error) { ws := make([]*scene.Widget, 0, len(d.Widgets)) ps := make([]*scene.Plugin, 0, len(d.Plugins)) + clusters := make([]*scene.Cluster, 0, len(d.Clusters)) for _, w := range d.Widgets { wid, err := id.WidgetIDFrom(w.ID) @@ -181,11 +199,30 @@ func (d *SceneDocument) Model() (*scene.Scene, error) { ps = append(ps, scene.NewPlugin(pid, id.PropertyIDFromRef(p.Property))) } + for _, c := range d.Clusters { + cid, err := id.ClusterIDFrom(c.ID) + if err != nil { + return nil, err + } + pid, err := id.PropertyIDFrom(c.Property) + if err != nil { + return nil, err + } + cluster, err := scene.NewCluster(cid, c.Name, pid) + if err != nil { + return nil, err + } + clusters = append(clusters, cluster) + } + + cl := scene.NewClusterListFrom(clusters) + return scene.New(). ID(sid). Project(projectID). Team(tid). RootLayer(lid). + Clusters(cl). WidgetSystem(scene.NewWidgetSystem(ws)). WidgetAlignSystem(d.AlignSystem.Model()). PluginSystem(scene.NewPluginSystem(ps)). diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index ae71a1628..aefeff75b 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -684,3 +684,119 @@ func (i *Scene) getPlugin(ctx context.Context, sid id.SceneID, p id.PluginID, e return plugin, extension, nil } + +func (i *Scene) AddCluster(ctx context.Context, sceneID id.SceneID, name string, propertyID id.PropertyID, operator *usecase.Operator) (*scene.Scene, *scene.Cluster, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, nil, interfaces.ErrOperationDenied + } + + if err := i.CheckSceneLock(ctx, sceneID); err != nil { + return nil, nil, err + } + + s, err := i.sceneRepo.FindByID(ctx, sceneID, operator.WritableTeams) + if err != nil { + return nil, nil, err + } + + cid := id.NewClusterID() + cluster, err := scene.NewCluster(cid, name, propertyID) + if err != nil { + return nil, nil, err + } + s.Clusters().Add(cluster) + err = i.sceneRepo.Save(ctx, s) + if err != nil { + return nil, nil, err + } + + tx.Commit() + return s, cluster, nil +} + +func (i *Scene) UpdateCluster(ctx context.Context, param interfaces.UpdateClusterParam, operator *usecase.Operator) (*scene.Scene, *scene.Cluster, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, nil, interfaces.ErrOperationDenied + } + + if err := i.CheckSceneLock(ctx, param.SceneID); err != nil { + return nil, nil, err + } + + s, err := i.sceneRepo.FindByID(ctx, param.SceneID, operator.WritableTeams) + if err != nil { + return nil, nil, err + } + cluster := s.Clusters().Get(param.ClusterID) + if cluster == nil { + return nil, nil, rerror.ErrNotFound + } + if param.Name != nil { + cluster.Rename(*param.Name) + } + if param.PropertyID != nil { + cluster.UpdateProperty(*param.PropertyID) + } + + err = i.sceneRepo.Save(ctx, s) + if err != nil { + return nil, nil, err + } + + tx.Commit() + return s, cluster, nil +} + +func (i *Scene) RemoveCluster(ctx context.Context, sceneID id.SceneID, clusterID id.ClusterID, operator *usecase.Operator) (*scene.Scene, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + if err := i.OnlyOperator(operator); err != nil { + return nil, interfaces.ErrOperationDenied + } + + if err := i.CheckSceneLock(ctx, sceneID); err != nil { + return nil, err + } + + s, err := i.sceneRepo.FindByID(ctx, sceneID, operator.WritableTeams) + if err != nil { + return nil, err + } + s.Clusters().Remove(clusterID) + + err = i.sceneRepo.Save(ctx, s) + if err != nil { + return nil, err + } + + tx.Commit() + return s, nil +} diff --git a/internal/usecase/interfaces/scene.go b/internal/usecase/interfaces/scene.go index 34c23e4d9..dae978178 100644 --- a/internal/usecase/interfaces/scene.go +++ b/internal/usecase/interfaces/scene.go @@ -27,6 +27,9 @@ type Scene interface { InstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, id.PluginID, *id.PropertyID, error) UninstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, error) UpgradePlugin(context.Context, id.SceneID, id.PluginID, id.PluginID, *usecase.Operator) (*scene.Scene, error) + AddCluster(context.Context, id.SceneID, string, id.PropertyID, *usecase.Operator) (*scene.Scene, *scene.Cluster, error) + UpdateCluster(context.Context, UpdateClusterParam, *usecase.Operator) (*scene.Scene, *scene.Cluster, error) + RemoveCluster(context.Context, id.SceneID, id.ClusterID, *usecase.Operator) (*scene.Scene, error) } type UpdateWidgetParam struct { @@ -43,3 +46,10 @@ type UpdateWidgetAlignSystemParam struct { Location scene.WidgetLocation Align *scene.WidgetAlignType } + +type UpdateClusterParam struct { + ClusterID id.ClusterID + SceneID id.SceneID + Name *string + PropertyID *id.PropertyID +} diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 1e7272603..a9813f1e5 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -1584,3 +1584,76 @@ extensions: - id: layerCamera title: Camera position type: camera + - id: cluster + name: Cluster + type: cluster + description: Defines how layers are clustered together and displayed on earth. + schema: + groups: + - id: default + title: Cluster + fields: + - id: cluster_name + type: string + title: Name + description: Sets the name of the cluster. + - id: cluster_pixelRange + type: number + description: Sets the minimum range between layers to get clustered together. + title: Pixel range + defaultValue: 15 + min: 1 + max: 200 + suffix: px + - id: cluster_minSize + type: number + title: Minimum cluster size + description: The minimum number of layers that can be clustered. + defaultValue: 3 + min: 2 + max: 20 + - id: cluster_maxSize + title: Max cluster size + description: Sets the size of cluster entity. + defaultValue: 48 + type: number + min: 1 + max: 200 + - id: cluster_shapeType + type: string + title: Shape type + defaultValue: pin + description: Sets the shape of cluster entity. + choices: + - key: pin + label: Pin + - key: label + label: Label + - id: cluster_textColor + type: string + title: Text color + description: Sets the text color of cluster entity. + ui: color + availableIf: + field: cluster_shapeType + type: string + value: label + - id: cluster_backgroundColor + type: string + ui: color + title: Background color + description: Sets the background color of cluster entity. + - id: cluster_image + type: url + title: Image + description: Sets the image of cluster entity. + ui: image + - id: layers + title: Layers + representativeField: layer + list: true + fields: + - id: layer + title: Layer + type: ref + ui: layer \ No newline at end of file diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 56239d0ae..c9d12dcb9 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -749,3 +749,37 @@ extensions: title: ใ‚ซใƒกใƒฉ็”ป่ง’ layerCamera: title: ใ‚ซใƒกใƒฉ + cluster: + name: ใ‚ฏใƒฉใ‚นใ‚ฟ + description: ใƒฌใ‚คใƒคใƒผใ‚’่‡ชๅ‹•็š„ใซใพใจใ‚ใฆ่กจ็คบใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใชใ‚ฏใƒฉใ‚นใ‚ฟใ‚’่จญๅฎšใ—ใพใ™ใ€‚ + propertySchema: + default: + title: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + fields: + cluster_name: + title: ใ‚ฏใƒฉใ‚นใ‚ฟๅ + description: ใ‚ฏใƒฉใ‚นใ‚ฟใฎๅๅ‰ใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + cluster_pixelRange: + title: ๆœ€ๅฐ็ฏ„ๅ›ฒ + description: ็”ป้ขไธŠใฎไฝ•ใƒ”ใ‚ฏใ‚ปใƒซๅˆ†ใฎ็ฏ„ๅ›ฒใซใ‚ใ‚‹ใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒฉใ‚นใ‚ฟใซใพใจใ‚ใ‚‹ใ‹ใ‚’ๆœ€ๅฐๅ€คใงๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + cluster_minSize: + title: ๆœ€ๅฐใ‚ตใ‚คใ‚บ + description: ใ‚ฏใƒฉใ‚นใ‚ฟใŒ่กจ็คบใ•ใ‚Œใ‚‹ๆœ€ๅฐใฎใƒฌใ‚คใƒคใƒผๆ•ฐ + cluster_maxSize: + title: ๆœ€ๅคงใ‚ตใ‚คใ‚บ + description: ใ‚ฏใƒฉใ‚นใ‚ฟใซๅฑžใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใชๆœ€ๅคงใƒฌใ‚คใƒคใƒผๆ•ฐ + cluster_shapeType: + title: ่กจ็คบๆ–นๆณ• + description: ็”ป้ขไธŠใง่กจ็คบใ•ใ‚Œใ‚‹ใ‚ฏใƒฉใ‚นใ‚ฟใฎ่กจ็คบๆ–นๆณ•ใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + choices: + pin: ใƒ”ใƒณ + label: ใƒฉใƒ™ใƒซ + cluster_textColor: + title: ๆ–‡ๅญ—่‰ฒ + description: ็”ป้ขไธŠใง่กจ็คบใ•ใ‚Œใ‚‹ใ‚ฏใƒฉใ‚นใ‚ฟใฎๆ–‡ๅญ—่‰ฒใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + cluster_backgroundColor: + title: ่ƒŒๆ™ฏ่‰ฒ + description: ็”ป้ขไธŠใง่กจ็คบใ•ใ‚Œใ‚‹ใ‚ฏใƒฉใ‚นใ‚ฟใฎ่ƒŒๆ™ฏ่‰ฒใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + cluster_image: + title: ็”ปๅƒ + description: ็”ป้ขไธŠใง่กจ็คบใ•ใ‚Œใ‚‹ใ‚ฏใƒฉใ‚นใ‚ฟใฎ็”ปๅƒใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ diff --git a/pkg/id/cluster_field_gen_test.go b/pkg/id/cluster_field_gen_test.go new file mode 100644 index 000000000..ed76808b5 --- /dev/null +++ b/pkg/id/cluster_field_gen_test.go @@ -0,0 +1,1011 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewClusterID(t *testing.T) { + id := NewClusterID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestClusterIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result ClusterID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result ClusterID + err error + }{ + ClusterID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result ClusterID + err error + }{ + ClusterID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result ClusterID + err error + }{ + ClusterID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := ClusterIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustClusterID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected ClusterID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: ClusterID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustClusterID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestClusterIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *ClusterID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &ClusterID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := ClusterIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestClusterIDFromRefID(t *testing.T) { + id := New() + + subId := ClusterIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestClusterID_ID(t *testing.T) { + id := New() + subId := ClusterIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestClusterID_String(t *testing.T) { + id := New() + subId := ClusterIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestClusterID_GoString(t *testing.T) { + id := New() + subId := ClusterIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.ClusterID("+id.String()+")") +} + +func TestClusterID_RefString(t *testing.T) { + id := New() + subId := ClusterIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestClusterID_Ref(t *testing.T) { + id := New() + subId := ClusterIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestClusterID_Contains(t *testing.T) { + id := NewClusterID() + id2 := NewClusterID() + assert.True(t, id.Contains([]ClusterID{id, id2})) + assert.False(t, id.Contains([]ClusterID{id2})) +} + +func TestClusterID_CopyRef(t *testing.T) { + id := New() + subId := ClusterIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestClusterID_IDRef(t *testing.T) { + id := New() + subId := ClusterIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestClusterID_StringRef(t *testing.T) { + id := New() + subId := ClusterIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestClusterID_MarhsalJSON(t *testing.T) { + id := New() + subId := ClusterIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestClusterID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &ClusterID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestClusterID_MarshalText(t *testing.T) { + id := New() + subId := ClusterIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestClusterID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &ClusterID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestClusterID_IsNil(t *testing.T) { + subId := ClusterID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *ClusterIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestClusterIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ClusterID + expected []string + }{ + { + name: "Empty slice", + input: make([]ClusterID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, ClusterIDToKeys(tc.input)) + }) + } + +} + +func TestClusterIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []ClusterID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []ClusterID + err error + }{ + res: make([]ClusterID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []ClusterID + err error + }{ + res: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []ClusterID + err error + }{ + res: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []ClusterID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := ClusterIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := ClusterIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestClusterIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []ClusterID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]ClusterID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := ClusterIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestClusterIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []ClusterID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]ClusterID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []ClusterID{MustClusterID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []ClusterID{ + MustClusterID(id1.String()), + MustClusterID(id2.String()), + MustClusterID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := ClusterIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestClusterIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []ClusterID + expected []ID + }{ + { + name: "Empty slice", + input: make([]ClusterID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := ClusterIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestClusterIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustClusterID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustClusterID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustClusterID(id3.String()) + + testCases := []struct { + name string + input []*ClusterID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*ClusterID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*ClusterID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*ClusterID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := ClusterIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewClusterIDSet(t *testing.T) { + ClusterIdSet := NewClusterIDSet() + + assert.NotNil(t, ClusterIdSet) + assert.Empty(t, ClusterIdSet.m) + assert.Empty(t, ClusterIdSet.s) +} + +func TestClusterIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []ClusterID + expected *ClusterIDSet + }{ + { + name: "Empty slice", + input: make([]ClusterID, 0), + expected: &ClusterIDSet{ + m: map[ClusterID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &ClusterIDSet{ + m: map[ClusterID]struct{}{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &ClusterIDSet{ + m: map[ClusterID]struct{}{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewClusterIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestClusterIDSet_AddRef(t *testing.T) { + t.Parallel() + + ClusterId := MustClusterID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *ClusterID + expected *ClusterIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &ClusterIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &ClusterId, + expected: &ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewClusterIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestClusterIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + ClusterIDSet + ClusterID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + ClusterIDSet + ClusterID + }{ClusterIDSet: ClusterIDSet{}, ClusterID: MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + ClusterIDSet + ClusterID + }{ClusterIDSet: ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, ClusterID: MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + ClusterIDSet + ClusterID + }{ClusterIDSet: ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, ClusterID: MustClusterID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.ClusterIDSet.Has(tc.input.ClusterID)) + }) + } +} + +func TestClusterIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input ClusterIDSet + expected ClusterIDSet + }{ + { + name: "Empty Set", + input: ClusterIDSet{}, + expected: ClusterIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: ClusterIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestClusterIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *ClusterIDSet + expected []ClusterID + }{ + { + name: "Empty slice", + input: &ClusterIDSet{ + m: map[ClusterID]struct{}{}, + s: nil, + }, + expected: make([]ClusterID, 0), + }, + { + name: "1 element", + input: &ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &ClusterIDSet{ + m: map[ClusterID]struct{}{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestClusterIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *ClusterIDSet + expected *ClusterIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewClusterIDSet(), + }, + { + name: "Empty set", + input: NewClusterIDSet(), + expected: NewClusterIDSet(), + }, + { + name: "1 element", + input: &ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &ClusterIDSet{ + m: map[ClusterID]struct{}{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &ClusterIDSet{ + m: map[ClusterID]struct{}{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestClusterIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *ClusterIDSet + b *ClusterIDSet + } + expected *ClusterIDSet + }{ + { + name: "Empty Set", + input: struct { + a *ClusterIDSet + b *ClusterIDSet + }{ + a: &ClusterIDSet{}, + b: &ClusterIDSet{}, + }, + expected: &ClusterIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *ClusterIDSet + b *ClusterIDSet + }{ + a: &ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &ClusterIDSet{}, + }, + expected: &ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *ClusterIDSet + b *ClusterIDSet + }{ + a: &ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &ClusterIDSet{ + m: map[ClusterID]struct{}{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + }, + s: []ClusterID{ + MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), + MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/cluster_gen.go b/pkg/id/cluster_gen.go new file mode 100644 index 000000000..3720b1bf5 --- /dev/null +++ b/pkg/id/cluster_gen.go @@ -0,0 +1,297 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// ClusterID is an ID for Cluster. +type ClusterID ID + +// NewClusterID generates a new ClusterId. +func NewClusterID() ClusterID { + return ClusterID(New()) +} + +// ClusterIDFrom generates a new ClusterID from a string. +func ClusterIDFrom(i string) (nid ClusterID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = ClusterID(did) + return +} + +// MustClusterID generates a new ClusterID from a string, but panics if the string cannot be parsed. +func MustClusterID(i string) ClusterID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return ClusterID(did) +} + +// ClusterIDFromRef generates a new ClusterID from a string ref. +func ClusterIDFromRef(i *string) *ClusterID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := ClusterID(*did) + return &nid +} + +// ClusterIDFromRefID generates a new ClusterID from a ref of a generic ID. +func ClusterIDFromRefID(i *ID) *ClusterID { + if i == nil { + return nil + } + nid := ClusterID(*i) + return &nid +} + +// ID returns a domain ID. +func (d ClusterID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d ClusterID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d ClusterID) GoString() string { + return "id.ClusterID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d ClusterID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d ClusterID) Ref() *ClusterID { + d2 := d + return &d2 +} + +// Contains returns whether the id is contained in the slice. +func (d ClusterID) Contains(ids []ClusterID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + +// CopyRef returns a copy of a reference. +func (d *ClusterID) CopyRef() *ClusterID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *ClusterID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *ClusterID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *ClusterID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *ClusterID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = ClusterIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *ClusterID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *ClusterID) UnmarshalText(text []byte) (err error) { + *d, err = ClusterIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d ClusterID) IsNil() bool { + return ID(d).IsNil() +} + +// ClusterIDToKeys converts IDs into a string slice. +func ClusterIDToKeys(ids []ClusterID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// ClusterIDsFrom converts a string slice into a ID slice. +func ClusterIDsFrom(ids []string) ([]ClusterID, error) { + dids := make([]ClusterID, 0, len(ids)) + for _, i := range ids { + did, err := ClusterIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// ClusterIDsFromID converts a generic ID slice into a ID slice. +func ClusterIDsFromID(ids []ID) []ClusterID { + dids := make([]ClusterID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, ClusterID(i)) + } + return dids +} + +// ClusterIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func ClusterIDsFromIDRef(ids []*ID) []ClusterID { + dids := make([]ClusterID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, ClusterID(*i)) + } + } + return dids +} + +// ClusterIDsToID converts a ID slice into a generic ID slice. +func ClusterIDsToID(ids []ClusterID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// ClusterIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func ClusterIDsToIDRef(ids []*ClusterID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// ClusterIDSet represents a set of ClusterIDs +type ClusterIDSet struct { + m map[ClusterID]struct{} + s []ClusterID +} + +// NewClusterIDSet creates a new ClusterIDSet +func NewClusterIDSet() *ClusterIDSet { + return &ClusterIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *ClusterIDSet) Add(p ...ClusterID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[ClusterID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []ClusterID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *ClusterIDSet) AddRef(p *ClusterID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *ClusterIDSet) Has(p ClusterID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *ClusterIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *ClusterIDSet) All() []ClusterID { + if s == nil { + return nil + } + return append([]ClusterID{}, s.s...) +} + +// Clone returns a cloned set +func (s *ClusterIDSet) Clone() *ClusterIDSet { + if s == nil { + return NewClusterIDSet() + } + s2 := NewClusterIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *ClusterIDSet) Merge(s2 *ClusterIDSet) *ClusterIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/gen.go b/pkg/id/gen.go index 9f2b63284..7c4519f8a 100644 --- a/pkg/id/gen.go +++ b/pkg/id/gen.go @@ -12,6 +12,7 @@ //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=dataset_schema_field_gen.go --name=DatasetSchemaField //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=infobox_field_gen.go --name=InfoboxField //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=tag_gen.go --name=Tag +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=cluster_gen.go --name=Cluster // Testing //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=asset_gen_test.go --name=Asset @@ -28,5 +29,6 @@ //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=user_gen_test.go --name=User //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=dataset_schema_field_gen_test.go --name=DatasetSchemaField //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=infobox_field_gen_test.go --name=InfoboxField +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=cluster_field_gen_test.go --name=Cluster package id diff --git a/pkg/plugin/extension.go b/pkg/plugin/extension.go index 7404b48cc..4c2795000 100644 --- a/pkg/plugin/extension.go +++ b/pkg/plugin/extension.go @@ -11,18 +11,13 @@ import ( type ExtensionType string var ( - // ErrPluginExtensionDuplicated _ - ErrPluginExtensionDuplicated error = errors.New("plugin extension duplicated") - // ExtensionTypePrimitive _ - ExtensionTypePrimitive ExtensionType = "primitive" - // ExtensionTypeWidget _ - ExtensionTypeWidget ExtensionType = "widget" - // ExtensionTypeBlock _ - ExtensionTypeBlock ExtensionType = "block" - // ExtensionTypeVisualizer _ - ExtensionTypeVisualizer ExtensionType = "visualizer" - // ExtensionTypeInfobox _ - ExtensionTypeInfobox ExtensionType = "infobox" + ErrPluginExtensionDuplicated error = errors.New("plugin extension duplicated") + ExtensionTypePrimitive ExtensionType = "primitive" + ExtensionTypeWidget ExtensionType = "widget" + ExtensionTypeBlock ExtensionType = "block" + ExtensionTypeVisualizer ExtensionType = "visualizer" + ExtensionTypeInfobox ExtensionType = "infobox" + ExtensionTypeCluster ExtensionType = "cluster" ) type Extension struct { diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go index a933204d3..c903e8abd 100644 --- a/pkg/plugin/manifest/convert.go +++ b/pkg/plugin/manifest/convert.go @@ -115,6 +115,8 @@ func (i Extension) extension(pluginID id.PluginID, sys bool) (*plugin.Extension, typ = plugin.ExtensionTypeVisualizer case "infobox": typ = plugin.ExtensionTypeInfobox + case "cluster": + typ = plugin.ExtensionTypeCluster case "": return nil, nil, errors.New("type missing") default: diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index f6060c9c6..8e87ae9a6 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -244,6 +244,21 @@ func TestExtension(t *testing.T) { expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeInfobox).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), }, + { + name: "cluster", + ext: Extension{ + Description: &d, + ID: "cesium", + Name: "Cesium", + Schema: nil, + Type: "cluster", + Visualizer: &cesium, + }, + sys: true, + pid: id.OfficialPluginID, + expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeCluster).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), + expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), + }, { name: "empty visualizer", ext: Extension{ diff --git a/pkg/plugin/manifest/schema_gen.go b/pkg/plugin/manifest/schema_gen.go index 15c8cd1c4..db3c4e3b9 100644 --- a/pkg/plugin/manifest/schema_gen.go +++ b/pkg/plugin/manifest/schema_gen.go @@ -1,6 +1,6 @@ package manifest -// generated by "/tmp/go-build698725398/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT +// generated by "/var/folders/lz/nhqy382n28g31wb4f_40gbmc0000gp/T/go-build612118365/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT type Choice struct { Icon string `json:"icon,omitempty"` diff --git a/pkg/scene/builder.go b/pkg/scene/builder.go index f30d9d31f..7b15de0ef 100644 --- a/pkg/scene/builder.go +++ b/pkg/scene/builder.go @@ -99,3 +99,8 @@ func (b *Builder) Property(p id.PropertyID) *Builder { b.scene.property = p return b } + +func (b *Builder) Clusters(cl *ClusterList) *Builder { + b.scene.clusters = cl + return b +} diff --git a/pkg/scene/cluster.go b/pkg/scene/cluster.go new file mode 100644 index 000000000..be5faa902 --- /dev/null +++ b/pkg/scene/cluster.go @@ -0,0 +1,55 @@ +package scene + +import "github.com/reearth/reearth-backend/pkg/id" + +type Cluster struct { + id id.ClusterID + name string + property id.PropertyID +} + +func NewCluster(cid id.ClusterID, name string, pid id.PropertyID) (*Cluster, error) { + if id.ID(cid).IsNil() { + return nil, id.ErrInvalidID + } + return &Cluster{ + id: cid, + name: name, + property: pid, + }, nil +} + +func (c *Cluster) ID() id.ClusterID { + if c == nil { + return id.ClusterID{} + } + return c.id +} + +func (c *Cluster) Name() string { + if c == nil { + return "" + } + return c.name +} + +func (c *Cluster) Property() id.PropertyID { + if c == nil { + return id.PropertyID{} + } + return c.property +} + +func (c *Cluster) Rename(name string) { + if c == nil { + return + } + c.name = name +} + +func (c *Cluster) UpdateProperty(pid id.PropertyID) { + if c == nil { + return + } + c.property = pid +} diff --git a/pkg/scene/cluster_list.go b/pkg/scene/cluster_list.go new file mode 100644 index 000000000..12d88e171 --- /dev/null +++ b/pkg/scene/cluster_list.go @@ -0,0 +1,67 @@ +package scene + +import "github.com/reearth/reearth-backend/pkg/id" + +type ClusterList struct { + clusters []*Cluster +} + +func NewClusterList() *ClusterList { + return &ClusterList{} +} + +func NewClusterListFrom(clusters []*Cluster) *ClusterList { + return &ClusterList{clusters: append([]*Cluster{}, clusters...)} +} + +func (tl *ClusterList) Clusters() []*Cluster { + if tl == nil { + return nil + } + return append([]*Cluster{}, tl.clusters...) +} + +func (tl *ClusterList) Has(tid id.ClusterID) bool { + if tl == nil { + return false + } + for _, cluster := range tl.clusters { + if cluster.ID() == tid { + return true + } + } + return false +} + +func (tl *ClusterList) Add(clusters ...*Cluster) { + if tl == nil { + return + } + tl.clusters = append(tl.clusters, clusters...) +} + +func (tl *ClusterList) Get(cid id.ClusterID) *Cluster { + if tl == nil { + return nil + } + for _, c := range tl.clusters { + if c.ID() == cid { + return c + } + } + return nil +} + +func (tl *ClusterList) Remove(clusters ...id.ClusterID) { + if tl == nil { + return + } + for i := 0; i < len(tl.clusters); i++ { + for _, tid := range clusters { + if tl.clusters[i].id == tid { + tl.clusters = append(tl.clusters[:i], tl.clusters[i+1:]...) + i-- + } + } + } +} diff --git a/pkg/scene/cluster_list_test.go b/pkg/scene/cluster_list_test.go new file mode 100644 index 000000000..be15a4297 --- /dev/null +++ b/pkg/scene/cluster_list_test.go @@ -0,0 +1,213 @@ +package scene + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestList_Add(t *testing.T) { + c1, _ := NewCluster(id.NewClusterID(), "c1", id.NewPropertyID()) + c2, _ := NewCluster(id.NewClusterID(), "c2", id.NewPropertyID()) + type args struct { + clusters []*Cluster + } + tests := []struct { + name string + list *ClusterList + args args + want *ClusterList + }{ + { + name: "should add a new cluster", + list: &ClusterList{clusters: []*Cluster{c1}}, + args: args{clusters: []*Cluster{c2}}, + want: NewClusterListFrom([]*Cluster{c1, c2}), + }, + { + name: "nil_list: should not add a new cluster", + list: nil, + args: args{clusters: []*Cluster{c1}}, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + tc.list.Add(tc.args.clusters...) + assert.Equal(tt, tc.want, tc.list) + }) + } +} + +func TestList_Clusters(t *testing.T) { + c1, _ := NewCluster(id.NewClusterID(), "ccc", id.NewPropertyID()) + c2, _ := NewCluster(id.NewClusterID(), "xxx", id.NewPropertyID()) + tests := []struct { + name string + list *ClusterList + want []*Cluster + }{ + { + name: "should return clusters", + list: NewClusterListFrom([]*Cluster{c1, c2}), + want: []*Cluster{c1, c2}, + }, + { + name: "nil_list: should return nil", + list: nil, + want: nil, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.want, tc.list.Clusters()) + }) + } +} + +func TestList_Has(t *testing.T) { + c1, _ := NewCluster(id.NewClusterID(), "xxx", id.NewPropertyID()) + + type args struct { + tid id.ClusterID + } + tests := []struct { + name string + list *ClusterList + args args + want bool + }{ + { + name: "should return true", + list: NewClusterListFrom([]*Cluster{c1}), + args: args{ + tid: c1.ID(), + }, + want: true, + }, + { + name: "not existing: should return false", + list: NewClusterListFrom([]*Cluster{c1}), + args: args{ + tid: id.NewClusterID(), + }, + want: false, + }, + { + name: "nil_list: should return false", + args: args{ + tid: c1.ID(), + }, + want: false, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + assert.Equal(tt, tc.want, tc.list.Has(tc.args.tid)) + }) + } +} + +func TestList_Remove(t *testing.T) { + c1, _ := NewCluster(id.NewClusterID(), "xxx", id.NewPropertyID()) + c2, _ := NewCluster(id.NewClusterID(), "xxx", id.NewPropertyID()) + c3, _ := NewCluster(id.NewClusterID(), "xxx", id.NewPropertyID()) + + type args struct { + cluster id.ClusterID + } + tests := []struct { + name string + list *ClusterList + args args + want *ClusterList + }{ + { + name: "should remove a cluster", + list: NewClusterListFrom([]*Cluster{c1, c2, c3}), + args: args{ + cluster: c3.ID(), + }, + want: NewClusterListFrom([]*Cluster{c1, c2}), + }, + { + name: "not existing: should remove nothing", + list: NewClusterListFrom([]*Cluster{c1, c2}), + args: args{ + cluster: c3.ID(), + }, + want: NewClusterListFrom([]*Cluster{c1, c2}), + }, + { + name: "nil_list: return nothing", + args: args{ + cluster: c1.ID(), + }, + want: nil, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tc.list.Remove(tc.args.cluster) + assert.Equal(tt, tc.want, tc.list) + }) + } +} + +func TestClusterList_Get(t *testing.T) { + cid1 := id.NewClusterID() + cid2 := id.NewClusterID() + cid3 := id.NewClusterID() + c1, _ := NewCluster(cid1, "xxx", id.NewPropertyID()) + c2, _ := NewCluster(cid2, "zzz", id.NewPropertyID()) + c3, _ := NewCluster(cid3, "yyy", id.NewPropertyID()) + type args struct { + cid id.ClusterID + } + tests := []struct { + name string + list *ClusterList + args args + want *Cluster + }{ + { + name: "should get a cluster", + list: NewClusterListFrom([]*Cluster{c1, c2, c3}), + args: args{ + cid: cid1, + }, + want: c1, + }, + { + name: "not existing: should get nil", + list: NewClusterListFrom([]*Cluster{c2, c3}), + args: args{ + cid: cid1, + }, + want: nil, + }, + { + name: "nil_list: should return nil", + list: nil, + args: args{ + cid: cid1, + }, + want: nil, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + got := tc.list.Get(tc.args.cid) + assert.Equal(tt, tc.want, got) + }) + } +} diff --git a/pkg/scene/cluster_test.go b/pkg/scene/cluster_test.go new file mode 100644 index 000000000..1df000806 --- /dev/null +++ b/pkg/scene/cluster_test.go @@ -0,0 +1,244 @@ +package scene + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestCluster_ID(t *testing.T) { + cid := id.NewClusterID() + clusterA := &Cluster{ + id: cid, + } + tests := []struct { + name string + cluster *Cluster + want id.ClusterID + }{ + { + name: "should return cluster id", + cluster: clusterA, + want: cid, + }, + { + name: "should return empty if cluster is nil", + cluster: nil, + want: id.ClusterID{}, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + got := tc.cluster.ID() + assert.Equal(tt, tc.want, got) + }) + } +} +func TestCluster_Name(t *testing.T) { + clusterA := &Cluster{ + name: "clusterA", + } + tests := []struct { + name string + cluster *Cluster + want string + }{ + { + name: "should return cluster name", + cluster: clusterA, + want: "clusterA", + }, + { + name: "should return empty if cluster is nil", + cluster: nil, + want: "", + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + got := tc.cluster.Name() + assert.Equal(tt, tc.want, got) + }) + } +} +func TestCluster_Property(t *testing.T) { + propertyId := id.NewPropertyID() + clusterA := &Cluster{ + property: propertyId, + } + tests := []struct { + name string + cluster *Cluster + want id.PropertyID + }{ + { + name: "should return cluster property", + cluster: clusterA, + want: propertyId, + }, + { + name: "should return empty cluster property", + cluster: nil, + want: id.PropertyID{}, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + got := tc.cluster.Property() + assert.Equal(tt, tc.want, got) + }) + } +} + +func TestNew(t *testing.T) { + propertyId := id.NewPropertyID() + clusterId := id.NewClusterID() + type args struct { + cid id.ClusterID + name string + pid id.PropertyID + } + tests := []struct { + name string + args args + want *Cluster + wantErr bool + }{ + { + name: "should create a new cluster", + args: args{ + cid: clusterId, + name: "ccc", + pid: propertyId, + }, + want: &Cluster{ + id: clusterId, + name: "ccc", + property: propertyId, + }, + wantErr: false, + }, + { + name: "should return invalid id error", + args: args{ + cid: id.ClusterID{}, + name: "xxx", + pid: propertyId, + }, + want: nil, + wantErr: true, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + got, err := NewCluster(tc.args.cid, tc.args.name, tc.args.pid) + assert.Equal(tt, tc.wantErr, err != nil) + assert.Equal(tt, tc.want, got) + }) + } +} + +func TestCluster_Rename(t *testing.T) { + propertyId := id.NewPropertyID() + clusterId := id.NewClusterID() + + type args struct { + name string + } + tests := []struct { + name string + cluster *Cluster + args args + want *Cluster + }{ + { + name: "should change the name", + cluster: &Cluster{ + id: clusterId, + name: "ccc", + property: propertyId, + }, + args: args{ + name: "new name", + }, + want: &Cluster{ + id: clusterId, + name: "new name", + property: propertyId, + }, + }, + { + name: "shouldn't change the name", + args: args{ + name: "xxx", + }, + want: nil, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tc.cluster.Rename(tc.args.name) + assert.Equal(tt, tc.want, tc.cluster) + }) + } +} + +func TestCluster_UpdateProperty(t *testing.T) { + propertyId := id.NewPropertyID() + propertyId2 := id.NewPropertyID() + clusterId := id.NewClusterID() + + type args struct { + property id.PropertyID + } + tests := []struct { + name string + cluster *Cluster + args args + want *Cluster + }{ + { + name: "should update the property", + cluster: &Cluster{ + id: clusterId, + name: "ccc", + property: propertyId, + }, + args: args{ + property: propertyId2, + }, + want: &Cluster{ + id: clusterId, + name: "ccc", + property: propertyId2, + }, + }, + { + name: "shouldn't update the property", + args: args{ + property: propertyId2, + }, + want: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tc.cluster.UpdateProperty(tc.args.property) + assert.Equal(tt, tc.want, tc.cluster) + }) + } +} diff --git a/pkg/scene/scene.go b/pkg/scene/scene.go index 68554962e..182790d94 100644 --- a/pkg/scene/scene.go +++ b/pkg/scene/scene.go @@ -19,6 +19,7 @@ type Scene struct { pluginSystem *PluginSystem updatedAt time.Time property id.PropertyID + clusters *ClusterList } func (s *Scene) ID() id.SceneID { @@ -119,3 +120,10 @@ func (s *Scene) Properties() []id.PropertyID { ids = append(ids, s.widgetSystem.Properties()...) return ids } + +func (s *Scene) Clusters() *ClusterList { + if s == nil { + return nil + } + return s.clusters +} diff --git a/pkg/scene/scene_test.go b/pkg/scene/scene_test.go index 4c11e326e..f72a0ea28 100644 --- a/pkg/scene/scene_test.go +++ b/pkg/scene/scene_test.go @@ -94,3 +94,29 @@ func TestSceneNil(t *testing.T) { assert.Nil(t, s.PluginSystem()) assert.True(t, s.Property().IsNil()) } + +func TestScene_Clusters(t *testing.T) { + c1, _ := NewCluster(id.NewClusterID(), "xxx", id.NewPropertyID()) + + tests := []struct { + name string + scene *Scene + want *ClusterList + }{ + { + name: "should return a cluster list", + scene: &Scene{ + clusters: NewClusterListFrom([]*Cluster{c1}), + }, + want: NewClusterListFrom([]*Cluster{c1}), + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.want, tc.scene.Clusters()) + }) + } +} diff --git a/schema.graphql b/schema.graphql index 43a4aae5a..567207e64 100644 --- a/schema.graphql +++ b/schema.graphql @@ -366,6 +366,7 @@ type Scene implements Node { ): DatasetSchemaConnection! @goField(forceResolver: true) tagIds: [ID!]! tags: [Tag!]! @goField(forceResolver: true) + clusters: [Cluster!]! } enum SceneLockMode { @@ -812,6 +813,12 @@ type TagGroup implements Tag { union Tags = TagItem | TagGroup +type Cluster { + id: ID! + name: String! + property: ID! +} + # InputType input CreateAssetInput { @@ -1210,6 +1217,24 @@ input RemoveTagInput { tagID: ID! } +input AddClusterInput { + sceneId: ID! + name: String! + propertyId: ID! +} + +input UpdateClusterInput { + clusterId: ID! + sceneId: ID! + name: String + propertyId: ID +} + +input RemoveClusterInput { + clusterId: ID! + sceneId: ID! +} + # Payload type CreateAssetPayload { @@ -1440,6 +1465,21 @@ type RemoveTagPayload{ tagId: ID! } +type AddClusterPayload { + scene: Scene! + cluster: Cluster! +} + +type UpdateClusterPayload { + scene: Scene! + cluster: Cluster! +} + +type RemoveClusterPayload{ + scene: Scene! + clusterId: ID! +} + # Connection type AssetConnection { @@ -1577,6 +1617,9 @@ type Mutation { uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload uploadPlugin(input: UploadPluginInput!): UploadPluginPayload upgradePlugin(input: UpgradePluginInput!): UpgradePluginPayload + addCluster(input: AddClusterInput!): AddClusterPayload + updateCluster(input: UpdateClusterInput!): UpdateClusterPayload + removeCluster(input: RemoveClusterInput!): RemoveClusterPayload # Dataset updateDatasetSchema( From 2b57b1a0b61cb14a1736a9cc75dd14d6146dc868 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 26 Nov 2021 11:10:47 +0900 Subject: [PATCH 113/253] chore: fix plugin manifest JSON schema --- plugin_manifest_schema.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/plugin_manifest_schema.json b/plugin_manifest_schema.json index 41294f965..a149f9b97 100644 --- a/plugin_manifest_schema.json +++ b/plugin_manifest_schema.json @@ -319,7 +319,8 @@ "widget", "block", "visualizer", - "infobox" + "infobox", + "cluster" ] }, "singleOnly": { @@ -437,4 +438,4 @@ } }, "$ref": "#/definitions/root" -} \ No newline at end of file +} From 8693b4833399c06958bab2c553f19bd4e0341603 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 26 Nov 2021 12:27:42 +0900 Subject: [PATCH 114/253] feat: add fields of scene property for terrain --- pkg/builtin/manifest.yml | 51 +++++++++++++++++++++++++++---------- pkg/builtin/manifest_ja.yml | 12 +++++++++ 2 files changed, 50 insertions(+), 13 deletions(-) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index a9813f1e5..d35950dd6 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -22,22 +22,47 @@ extensions: type: bool title: Terrain description: Show elevation when close to the surface. - - id: terrain - type: bool - title: Terrain - description: Show elevation when close to the surface. - # NOT SUPPORTED YET - # - id: terrainExaggeration - # type: number - # title: Terrain exaggeration - # defaultValue: 1 - # - id: terrainExaggerationRelativeHeight - # type: number - # title: Terrain exaggeration relative height + - id: terrainType + type: string + title: Terrain Type + description: Specify terrain type. + choices: + - key: cesium + label: Cesium + - key: arcgis + label: ArcGIS + availableIf: + field: terrain + type: bool + value: true + - id: terrainExaggeration + type: number + title: Terrain exaggeration + description: A scalar used to exaggerate the terrain. Defaults to 1.0 (no exaggeration). A value of 2.0 scales the terrain by 2x. A value of 0.0 makes the terrain completely flat. + defaultValue: 1 + suffix: x + availableIf: + field: terrain + type: bool + value: true + - id: terrainExaggerationRelativeHeight + type: number + title: Terrain exaggeration relative height + description: The height from which terrain is exaggerated. Defaults to 0.0. Terrain that is above this height will scale upwards and terrain that is below this height will scale downwards. + defaultValue: 0 + suffix: m + availableIf: + field: terrain + type: bool + value: true - id: depthTestAgainstTerrain type: bool title: Hide objects under terrain description: Hides objects under the terrain. Depending on the loading status of the terrain, objects may be shown or hidden. + availableIf: + field: terrain + type: bool + value: true - id: skybox type: bool title: Sky @@ -1656,4 +1681,4 @@ extensions: - id: layer title: Layer type: ref - ui: layer \ No newline at end of file + ui: layer diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index c9d12dcb9..c3425182f 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -14,9 +14,21 @@ extensions: terrain: title: ๅœฐๅฝข description: ๆœ‰ๅŠนใซใ™ใ‚‹ใจใ€ๆจ™้ซ˜ใƒ‡ใƒผใ‚ฟใŒ่ชญใฟ่พผใฟใ“ใพใ‚Œใ€็ซ‹ไฝ“็š„ใชๅœฐๅฝขใ‚’่กจ็พใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + terrainType: + title: ๅœฐๅฝขใฎ็จฎ้กž + description: ๅœฐๅฝขใฎ็จฎ้กžใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + terrainExaggeration: + title: ๅœฐๅฝขใฎๅผท่ชฟ + description: ๅœฐๅฝขใฎๅผท่ชฟใ‚’่จญๅฎšใ—ใพใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ1.0๏ผˆ่ช‡ๅผตใ—ใชใ„๏ผ‰ใงใ™ใ€‚2.0ใฏใ€ๅœฐๅฝขใ‚’2ๅ€ใซๆ‹กๅคงใ—ใพใ™ใ€‚0.0ใฎๅ€คใฏๅœฐๅฝขใ‚’ๅฎŒๅ…จใซๅนณใ‚‰ใซใ—ใพใ™ใ€‚ + terrainExaggerationRelativeHeight: + title: ๅœฐๅฝขใฎๅผท่ชฟใฎๅŸบๆบ– + description: ๅœฐๅฝขใŒ่ช‡ๅผตใ•ใ‚Œใ‚‹้ซ˜ใ•ใงใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ0.0ใงใ™ใ€‚ใ“ใฎ้ซ˜ใ•ใ‚ˆใ‚ŠไธŠใซใ‚ใ‚‹ๅœฐๅฝขใฏไธŠๆ–นใซใ€ใ“ใฎ้ซ˜ใ•ใ‚ˆใ‚Šไธ‹ใซใ‚ใ‚‹ๅœฐๅฝขใฏไธ‹ๆ–นใซใ‚นใ‚ฑใƒผใƒซใ•ใ‚Œใพใ™ใ€‚ depthTestAgainstTerrain: title: ๅœฐๅฝขใฎไธ‹ใ‚’้ž่กจ็คบ description: ๅœฐๅฝขใฎไธ‹ใซใ‚ใ‚‹ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’้ž่กจ็คบใซใ—ใพใ™ใ€‚ๆจ™้ซ˜ใƒ‡ใƒผใ‚ฟใฎ่ชญใฟ่พผใฟ็Šถๆณใซใ‚ˆใฃใฆใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใŒ่กจ็คบใ•ใ‚ŒใŸใ‚Š้š ใ‚ŒใŸใ‚Šใ™ใ‚‹ใ“ใจใŒใ‚ใ‚Šใพใ™ใ€‚ + choices: + cesium: Cesium + arcgis: ArcGIS skybox: title: ๅฎ‡ๅฎ™ใฎ่กจ็คบ description: ๅฎ‡ๅฎ™็ฉบ้–“ใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ From 5e3d253ea63f2aaa356d7af528fbb8556d4b40d8 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 26 Nov 2021 12:31:30 +0900 Subject: [PATCH 115/253] fix: terrain fields of scene property --- pkg/builtin/manifest.yml | 5 +++-- pkg/builtin/manifest_ja.yml | 3 --- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index d35950dd6..3a38d8b30 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -26,11 +26,12 @@ extensions: type: string title: Terrain Type description: Specify terrain type. + defaultValue: cesium choices: - key: cesium - label: Cesium + label: Cesium World Terrain - key: arcgis - label: ArcGIS + label: ArcGIS Terrain availableIf: field: terrain type: bool diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index c3425182f..4eb66a968 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -26,9 +26,6 @@ extensions: depthTestAgainstTerrain: title: ๅœฐๅฝขใฎไธ‹ใ‚’้ž่กจ็คบ description: ๅœฐๅฝขใฎไธ‹ใซใ‚ใ‚‹ใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใ‚’้ž่กจ็คบใซใ—ใพใ™ใ€‚ๆจ™้ซ˜ใƒ‡ใƒผใ‚ฟใฎ่ชญใฟ่พผใฟ็Šถๆณใซใ‚ˆใฃใฆใ‚ชใƒ–ใ‚ธใ‚งใ‚ฏใƒˆใŒ่กจ็คบใ•ใ‚ŒใŸใ‚Š้š ใ‚ŒใŸใ‚Šใ™ใ‚‹ใ“ใจใŒใ‚ใ‚Šใพใ™ใ€‚ - choices: - cesium: Cesium - arcgis: ArcGIS skybox: title: ๅฎ‡ๅฎ™ใฎ่กจ็คบ description: ๅฎ‡ๅฎ™็ฉบ้–“ใฎ่กจ็คบใ‚’ๅˆ‡ใ‚Šๆ›ฟใˆใพใ™ใ€‚ From 73143b7bb598af04d0ce84177eaa1b91cb1468b5 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 26 Nov 2021 18:00:01 +0900 Subject: [PATCH 116/253] refactor: make property.Value and dataset.Value independent in pkg/value (#77) --- internal/adapter/gql/generated.go | 6 +- .../adapter/gql/gqlmodel/convert_dataset.go | 70 +- .../adapter/gql/gqlmodel/convert_property.go | 186 +-- .../adapter/gql/gqlmodel/convert_value.go | 156 ++ internal/adapter/gql/gqlmodel/models.go | 4 +- internal/adapter/gql/gqlmodel/models_gen.go | 2 +- .../adapter/gql/resolver_mutation_property.go | 19 +- internal/infrastructure/memory/dataset.go | 6 +- .../infrastructure/memory/dataset_schema.go | 2 +- .../infrastructure/mongo/dataset_schema.go | 2 +- .../infrastructure/mongo/mongodoc/dataset.go | 14 +- .../mongo/mongodoc/dataset_schema.go | 15 +- .../infrastructure/mongo/mongodoc/property.go | 15 +- .../mongo/mongodoc/property_schema.go | 2 +- .../infrastructure/mongo/mongodoc/util.go | 3 + internal/usecase/interactor/dataset.go | 10 +- internal/usecase/interactor/property.go | 2 +- internal/usecase/repo/dataset_schema.go | 2 +- pkg/dataset/builder.go | 20 +- pkg/dataset/csvparser.go | 36 +- pkg/dataset/csvparser_test.go | 14 +- pkg/dataset/dataset.go | 45 +- pkg/dataset/dataset_test.go | 89 + pkg/dataset/field.go | 41 +- pkg/dataset/graph_iterator.go | 11 +- pkg/dataset/graph_iterator_test.go | 3 +- pkg/dataset/list.go | 27 +- pkg/dataset/list_test.go | 6 +- pkg/dataset/schema.go | 19 +- pkg/dataset/schema_builder.go | 31 +- pkg/dataset/schema_field.go | 13 +- pkg/dataset/schema_field_builder.go | 14 +- pkg/dataset/schema_field_diff.go | 6 +- pkg/dataset/source.go | 9 - pkg/dataset/value.go | 385 ++--- pkg/dataset/value_optional.go | 78 + pkg/dataset/value_optional_test.go | 303 ++++ pkg/dataset/value_test.go | 63 - pkg/layer/decoding/common.go | 120 +- pkg/layer/decoding/reearth.go | 11 +- pkg/layer/decoding/reearth_test.go | 6 +- pkg/layer/encoding/common.go | 11 +- pkg/layer/encoding/common_test.go | 19 +- pkg/layer/encoding/czml.go | 242 +-- pkg/layer/encoding/czml_test.go | 731 +++------ pkg/layer/encoding/geojson.go | 149 +- pkg/layer/encoding/geojson_test.go | 607 +++---- pkg/layer/encoding/kml.go | 392 ++--- pkg/layer/encoding/kml_test.go | 758 ++++----- pkg/layer/encoding/shp.go | 148 +- pkg/layer/encoding/shp_test.go | 400 ++--- pkg/layer/layerops/initializer.go | 2 +- pkg/plugin/manifest/convert.go | 10 +- pkg/plugin/manifest/convert_test.go | 2 +- pkg/plugin/manifest/parser_test.go | 4 +- pkg/property/builder_test.go | 18 +- pkg/property/condition_test.go | 10 +- pkg/property/field.go | 53 +- pkg/property/field_builder.go | 45 +- pkg/property/field_builder_test.go | 28 +- pkg/property/field_test.go | 27 +- pkg/property/group_builder_test.go | 8 +- pkg/property/group_list_test.go | 20 +- pkg/property/group_test.go | 46 +- pkg/property/initializer.go | 2 +- pkg/property/initializer_test.go | 19 +- pkg/property/item_test.go | 3 +- pkg/property/link_test.go | 6 +- pkg/property/merged_test.go | 58 +- pkg/property/property_test.go | 12 +- pkg/property/schema_field.go | 12 +- pkg/property/schema_field_builder.go | 2 +- pkg/property/schema_field_test.go | 27 +- pkg/property/schema_group_builder_test.go | 8 +- pkg/property/sealed.go | 34 +- pkg/property/sealed_test.go | 442 +++-- pkg/property/value.go | 342 ++-- pkg/property/value_camera.go | 70 + pkg/property/value_camera_test.go | 51 + pkg/property/value_converter.go | 41 - pkg/property/value_converter_test.go | 120 -- pkg/property/value_dataset.go | 67 + pkg/property/value_dataset_test.go | 295 ++++ pkg/property/value_optional.go | 78 + pkg/property/value_optional_test.go | 303 ++++ pkg/property/value_test.go | 354 +--- pkg/property/value_type.go | 603 ------- pkg/property/value_type_test.go | 1426 ----------------- pkg/property/value_typography.go | 133 ++ pkg/property/value_typography_test.go | 204 +++ pkg/scene/builder/builder_test.go | 43 +- pkg/scene/builder/encoder_test.go | 13 +- pkg/value/bool.go | 32 + pkg/value/coordinates.go | 78 + pkg/value/latlng.go | 55 + pkg/value/latlng_test.go | 41 + pkg/value/latlngheight.go | 60 + pkg/value/latlngheight_test.go | 43 + pkg/value/number.go | 118 ++ pkg/value/optional.go | 61 + pkg/value/optional_test.go | 330 ++++ pkg/value/polygon.go | 59 + pkg/value/rect.go | 49 + pkg/value/ref.go | 40 + pkg/value/string.go | 32 + pkg/value/type.go | 53 + pkg/value/type_test.go | 117 ++ pkg/value/url.go | 52 + pkg/value/value.go | 82 + pkg/value/value_test.go | 264 +++ schema.graphql | 2 +- 111 files changed, 5631 insertions(+), 6301 deletions(-) create mode 100644 internal/adapter/gql/gqlmodel/convert_value.go create mode 100644 pkg/dataset/dataset_test.go delete mode 100644 pkg/dataset/source.go create mode 100644 pkg/dataset/value_optional.go create mode 100644 pkg/dataset/value_optional_test.go delete mode 100644 pkg/dataset/value_test.go create mode 100644 pkg/property/value_camera.go create mode 100644 pkg/property/value_camera_test.go delete mode 100644 pkg/property/value_converter.go delete mode 100644 pkg/property/value_converter_test.go create mode 100644 pkg/property/value_dataset.go create mode 100644 pkg/property/value_dataset_test.go create mode 100644 pkg/property/value_optional.go create mode 100644 pkg/property/value_optional_test.go delete mode 100644 pkg/property/value_type.go delete mode 100644 pkg/property/value_type_test.go create mode 100644 pkg/property/value_typography.go create mode 100644 pkg/property/value_typography_test.go create mode 100644 pkg/value/bool.go create mode 100644 pkg/value/coordinates.go create mode 100644 pkg/value/latlng.go create mode 100644 pkg/value/latlng_test.go create mode 100644 pkg/value/latlngheight.go create mode 100644 pkg/value/latlngheight_test.go create mode 100644 pkg/value/number.go create mode 100644 pkg/value/optional.go create mode 100644 pkg/value/optional_test.go create mode 100644 pkg/value/polygon.go create mode 100644 pkg/value/rect.go create mode 100644 pkg/value/ref.go create mode 100644 pkg/value/string.go create mode 100644 pkg/value/type.go create mode 100644 pkg/value/type_test.go create mode 100644 pkg/value/url.go create mode 100644 pkg/value/value.go create mode 100644 pkg/value/value_test.go diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index 4522471fc..869bd4920 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -6667,7 +6667,7 @@ type PropertyGroupList { } type PropertyField { - id: PropertySchemaFieldID! + id: String! parentId: ID! schemaId: PropertySchemaID! fieldId: PropertySchemaFieldID! @@ -22904,9 +22904,9 @@ func (ec *executionContext) _PropertyField_id(ctx context.Context, field graphql } return graphql.Null } - res := resTmp.(id.PropertySchemaFieldID) + res := resTmp.(string) fc.Result = res - return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) + return ec.marshalNString2string(ctx, field.Selections, res) } func (ec *executionContext) _PropertyField_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { diff --git a/internal/adapter/gql/gqlmodel/convert_dataset.go b/internal/adapter/gql/gqlmodel/convert_dataset.go index 1239964fc..3c0969ac8 100644 --- a/internal/adapter/gql/gqlmodel/convert_dataset.go +++ b/internal/adapter/gql/gqlmodel/convert_dataset.go @@ -1,65 +1,13 @@ package gqlmodel import ( - "net/url" - "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/value" ) func ToDatasetValue(v *dataset.Value) *interface{} { - var res interface{} - if v == nil { - return nil - } - switch v2 := v.Value().(type) { - case bool: - res = v2 - case float64: - res = v2 - case string: - res = v2 - case id.ID: - res = v2.String() - case *url.URL: - res = v2.String() - case dataset.LatLng: - res = LatLng{ - Lat: v2.Lat, - Lng: v2.Lng, - } - case dataset.LatLngHeight: - res = LatLngHeight{ - Lat: v2.Lat, - Lng: v2.Lng, - Height: v2.Height, - } - } - return &res -} - -func ToDatasetValueType(t dataset.ValueType) ValueType { - switch t { - case dataset.ValueTypeBool: - return ValueTypeBool - case dataset.ValueTypeNumber: - return ValueTypeNumber - case dataset.ValueTypeString: - return ValueTypeString - case dataset.ValueTypeLatLng: - return ValueTypeLatlng - case dataset.ValueTypeLatLngHeight: - return ValueTypeLatlngheight - case dataset.ValueTypeURL: - return ValueTypeURL - case dataset.ValueTypeRef: - return ValueTypeRef - } - return "" -} - -func ToDatasetSource(ds dataset.Source) string { - return ds.String() + i := valueInterfaceToGqlValue(v.Value()) + return &i } func ToDatasetField(f *dataset.Field, parent *dataset.Dataset) *DatasetField { @@ -70,9 +18,9 @@ func ToDatasetField(f *dataset.Field, parent *dataset.Dataset) *DatasetField { return &DatasetField{ SchemaID: parent.Schema().ID(), FieldID: f.Field().ID(), - Type: ToDatasetValueType(f.Type()), + Type: ToValueType(value.Type(f.Type())), Value: ToDatasetValue(f.Value()), - Source: ToDatasetSource(f.Source()), + Source: f.Source(), } } @@ -90,7 +38,7 @@ func ToDataset(ds *dataset.Dataset) *Dataset { return &Dataset{ ID: ds.ID().ID(), SchemaID: ds.Schema().ID(), - Source: ToDatasetSource(ds.Source()), + Source: ds.Source(), Fields: fields, } } @@ -106,16 +54,16 @@ func ToDatasetSchema(ds *dataset.Schema) *DatasetSchema { fields = append(fields, &DatasetSchemaField{ ID: f.ID().ID(), Name: f.Name(), - Type: ToDatasetValueType(f.Type()), + Type: ToValueType(value.Type(f.Type())), SchemaID: ds.ID().ID(), - Source: ToDatasetSource(f.Source()), + Source: f.Source(), RefID: f.Ref().IDRef(), }) } return &DatasetSchema{ ID: ds.ID().ID(), - Source: ToDatasetSource(ds.Source()), + Source: ds.Source(), Name: ds.Name(), SceneID: ds.Scene().ID(), RepresentativeFieldID: ds.RepresentativeField().IDRef().IDRef(), diff --git a/internal/adapter/gql/gqlmodel/convert_property.go b/internal/adapter/gql/gqlmodel/convert_property.go index 746e4c82c..ce36a8ad6 100644 --- a/internal/adapter/gql/gqlmodel/convert_property.go +++ b/internal/adapter/gql/gqlmodel/convert_property.go @@ -1,11 +1,11 @@ package gqlmodel import ( - "net/url" "strings" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/value" ) func ToPropertyValue(v *property.Value) *interface{} { @@ -14,27 +14,6 @@ func ToPropertyValue(v *property.Value) *interface{} { return nil } switch v2 := v.Value().(type) { - case bool: - res = v2 - case float64: - res = v2 - case string: - res = v2 - case id.ID: - res = v2.String() - case *url.URL: - res = v2.String() - case property.LatLng: - res = LatLng{ - Lat: v2.Lat, - Lng: v2.Lng, - } - case property.LatLngHeight: - res = LatLngHeight{ - Lat: v2.Lat, - Lng: v2.Lng, - Height: v2.Height, - } case property.Camera: res = Camera{ Lat: v2.Lat, @@ -56,30 +35,8 @@ func ToPropertyValue(v *property.Value) *interface{} { Italic: v2.Italic, Underline: v2.Underline, } - case property.Coordinates: - res2 := make([]LatLngHeight, 0, len(v2)) - for _, c := range v2 { - res2 = append(res2, LatLngHeight{ - Lat: c.Lat, - Lng: c.Lng, - Height: c.Height, - }) - } - res = res2 - case property.Polygon: - res2 := make([][]LatLngHeight, 0, len(v2)) - for _, d := range v2 { - coord := make([]LatLngHeight, 0, len(d)) - for _, c := range d { - coord = append(coord, LatLngHeight{ - Lat: c.Lat, - Lng: c.Lng, - Height: c.Height, - }) - } - res2 = append(res2, coord) - } - res = res2 + default: + res = valueInterfaceToGqlValue(v2) } return &res } @@ -106,88 +63,8 @@ func ToTextAlign(t *property.TextAlign) *TextAlign { return &t3 } -func ToPropertyValueType(t property.ValueType) ValueType { - switch t { - case property.ValueTypeBool: - return ValueTypeBool - case property.ValueTypeNumber: - return ValueTypeNumber - case property.ValueTypeString: - return ValueTypeString - case property.ValueTypeLatLng: - return ValueTypeLatlng - case property.ValueTypeLatLngHeight: - return ValueTypeLatlngheight - case property.ValueTypeURL: - return ValueTypeURL - case property.ValueTypeRef: - return ValueTypeRef - case property.ValueTypeCamera: - return ValueTypeCamera - case property.ValueTypeTypography: - return ValueTypeTypography - case property.ValueTypeCoordinates: - return ValueTypeCoordinates - case property.ValueTypePolygon: - return ValueTypePolygon - case property.ValueTypeRect: - return ValueTypeRect - } - return "" -} - -func FromPropertyValueType(t ValueType) property.ValueType { - switch t { - case ValueTypeBool: - return property.ValueTypeBool - case ValueTypeNumber: - return property.ValueTypeNumber - case ValueTypeString: - return property.ValueTypeString - case ValueTypeLatlng: - return property.ValueTypeLatLng - case ValueTypeLatlngheight: - return property.ValueTypeLatLngHeight - case ValueTypeURL: - return property.ValueTypeURL - case ValueTypeRef: - return property.ValueTypeRef - case ValueTypeCamera: - return property.ValueTypeCamera - case ValueTypeTypography: - return property.ValueTypeTypography - case ValueTypeCoordinates: - return property.ValueTypeCoordinates - case ValueTypePolygon: - return property.ValueTypePolygon - case ValueTypeRect: - return property.ValueTypeRect - } - return "" -} - -func FromPropertyValueAndType(v interface{}, t ValueType) (*property.Value, bool) { +func FromPropertyValueAndType(v interface{}, t ValueType) *property.Value { switch v2 := v.(type) { - case LatLng: - v = property.LatLng{ - Lat: v2.Lat, - Lng: v2.Lng} - case LatLngHeight: - v = property.LatLngHeight{ - Lat: v2.Lat, - Lng: v2.Lng, - Height: v2.Height} - case *LatLng: - v = property.LatLng{ - Lat: v2.Lat, - Lng: v2.Lng, - } - case *LatLngHeight: - v = property.LatLngHeight{ - Lat: v2.Lat, - Lng: v2.Lng, - Height: v2.Height, - } case *Camera: v = property.Camera{ Lat: v2.Lat, @@ -209,39 +86,10 @@ func FromPropertyValueAndType(v interface{}, t ValueType) (*property.Value, bool Italic: v2.Italic, Underline: v2.Underline, } - case []LatLngHeight: - res := make([]property.LatLngHeight, 0, len(v2)) - for _, c := range v2 { - res = append(res, property.LatLngHeight{ - Lat: c.Lat, - Lng: c.Lng, - Height: c.Height, - }) - } - v = res - case [][]LatLngHeight: - res := make([][]property.LatLngHeight, 0, len(v2)) - for _, d := range v2 { - coord := make([]property.LatLngHeight, 0, len(d)) - for _, c := range d { - coord = append(coord, property.LatLngHeight{ - Lat: c.Lat, - Lng: c.Lng, - Height: c.Height, - }) - } - res = append(res, coord) - } - v = res - case *Rect: - v = property.Rect{ - West: v2.West, - East: v2.East, - North: v2.North, - South: v2.South, - } + default: + v = gqlValueToValueInterface(v2) } - return FromPropertyValueType(t).ValueFrom(v) + return property.ValueType(FromValueType(t)).ValueFrom(v) } func fromTextAlign(t *TextAlign) *property.TextAlign { @@ -280,13 +128,12 @@ func ToPropertyField(f *property.Field, parent *property.Property, gl *property. } return &PropertyField{ - // TODO: PropertySchemaFieldID is mismatched - ID: id.PropertySchemaFieldID(propertyFieldID(parent, gl, g, f)), + ID: propertyFieldID(parent, gl, g, f), ParentID: parent.ID().ID(), SchemaID: parent.Schema(), FieldID: f.Field(), Value: ToPropertyValue(f.Value()), - Type: ToPropertyValueType(f.Type()), + Type: ToValueType(value.Type(f.Type())), Links: links, } } @@ -402,7 +249,7 @@ func ToPropertySchemaField(f *property.SchemaField) *PropertySchemaField { return &PropertySchemaField{ FieldID: f.ID(), - Type: ToPropertyValueType(f.Type()), + Type: ToValueType(value.Type(f.Type())), Title: f.Title().String(), Description: f.Description().String(), Prefix: stringToRef(f.Prefix()), @@ -512,7 +359,7 @@ func ToMergedPropertyField(f *property.MergedField, s id.PropertySchemaID) *Merg SchemaID: s, Links: ToPropertyFieldLinks(f.Links), Value: ToPropertyValue(f.Value), - Type: ToPropertyValueType(f.Type), + Type: ToValueType(value.Type(f.Type)), Overridden: f.Overridden, } } @@ -604,7 +451,7 @@ func ToPropertyConditon(c *property.Condition) *PropertyCondition { return &PropertyCondition{ FieldID: c.Field, Value: ToPropertyValue(c.Value), - Type: ToPropertyValueType(c.Value.Type()), + Type: ToValueType(value.Type(c.Value.Type())), } } @@ -641,3 +488,12 @@ func propertyFieldID(property *property.Property, groupList *property.GroupList, return sb.String() } + +func getPropertySchemaFieldIDFromGQLPropertyFieldID(i string) string { + const sep = "_" + s := strings.Split(i, sep) + if len(s) > 0 { + return s[len(s)-1] + } + return "" +} diff --git a/internal/adapter/gql/gqlmodel/convert_value.go b/internal/adapter/gql/gqlmodel/convert_value.go new file mode 100644 index 000000000..3ebe8202a --- /dev/null +++ b/internal/adapter/gql/gqlmodel/convert_value.go @@ -0,0 +1,156 @@ +package gqlmodel + +import ( + "net/url" + "strings" + + "github.com/reearth/reearth-backend/pkg/value" +) + +func valueInterfaceToGqlValue(v interface{}) interface{} { + if v == nil { + return nil + } + switch v2 := v.(type) { + case bool: + return v2 + case float64: + return v2 + case string: + return v2 + case *url.URL: + return v2.String() + case value.LatLng: + return LatLng{ + Lat: v2.Lat, + Lng: v2.Lng, + } + case value.LatLngHeight: + return LatLngHeight{ + Lat: v2.Lat, + Lng: v2.Lng, + Height: v2.Height, + } + case *value.LatLng: + return LatLng{ + Lat: v2.Lat, + Lng: v2.Lng, + } + case *value.LatLngHeight: + return LatLngHeight{ + Lat: v2.Lat, + Lng: v2.Lng, + Height: v2.Height, + } + case []value.LatLngHeight: + res := make([]LatLngHeight, 0, len(v2)) + for _, c := range v2 { + res = append(res, LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + return res + case [][]value.LatLngHeight: + res := make([][]LatLngHeight, 0, len(v2)) + for _, d := range v2 { + coord := make([]LatLngHeight, 0, len(d)) + for _, c := range d { + coord = append(coord, LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + res = append(res, coord) + } + return res + case *value.Rect: + return Rect{ + West: v2.West, + East: v2.East, + North: v2.North, + South: v2.South, + } + } + return nil +} + +func gqlValueToValueInterface(v interface{}) interface{} { + if v == nil { + return nil + } + switch v2 := v.(type) { + case bool: + return v2 + case float64: + return v2 + case string: + return v2 + case *url.URL: + return v2 + case LatLng: + return value.LatLng{ + Lat: v2.Lat, + Lng: v2.Lng, + } + case LatLngHeight: + return value.LatLngHeight{ + Lat: v2.Lat, + Lng: v2.Lng, + Height: v2.Height, + } + case *LatLng: + return value.LatLng{ + Lat: v2.Lat, + Lng: v2.Lng, + } + case *LatLngHeight: + return value.LatLngHeight{ + Lat: v2.Lat, + Lng: v2.Lng, + Height: v2.Height, + } + case []LatLngHeight: + res := make([]value.LatLngHeight, 0, len(v2)) + for _, c := range v2 { + res = append(res, value.LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + return value.Coordinates(res) + case [][]LatLngHeight: + res := make([]value.Coordinates, 0, len(v2)) + for _, d := range v2 { + coord := make([]value.LatLngHeight, 0, len(d)) + for _, c := range d { + coord = append(coord, value.LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + res = append(res, coord) + } + return value.Polygon(res) + case *Rect: + return value.Rect{ + West: v2.West, + East: v2.East, + North: v2.North, + South: v2.South, + } + } + return nil +} + +func ToValueType(t value.Type) ValueType { + return ValueType(strings.ToUpper(string(t))) +} + +func FromValueType(t ValueType) value.Type { + return value.Type(strings.ToLower(string(t))) +} diff --git a/internal/adapter/gql/gqlmodel/models.go b/internal/adapter/gql/gqlmodel/models.go index 0e94c0068..1886a626e 100644 --- a/internal/adapter/gql/gqlmodel/models.go +++ b/internal/adapter/gql/gqlmodel/models.go @@ -51,7 +51,7 @@ func (d *Property) Field(id id.PropertySchemaFieldID) *PropertyField { for _, g := range d.Items { if gi, ok := g.(*PropertyGroup); ok { for _, f := range gi.Fields { - if f.ID == id { + if s := getPropertySchemaFieldIDFromGQLPropertyFieldID(f.ID); s == string(id) { return f } } @@ -145,7 +145,7 @@ func (d *PropertyGroup) Field(id id.PropertySchemaFieldID) *PropertyField { return nil } for _, f := range d.Fields { - if f.ID == id { + if s := getPropertySchemaFieldIDFromGQLPropertyFieldID(f.ID); s == string(id) { return f } } diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index 3adf45b2b..3ddeb1189 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -764,7 +764,7 @@ type PropertyCondition struct { } type PropertyField struct { - ID id.PropertySchemaFieldID `json:"id"` + ID string `json:"id"` ParentID id.ID `json:"parentId"` SchemaID id.PropertySchemaID `json:"schemaId"` FieldID id.PropertySchemaFieldID `json:"fieldId"` diff --git a/internal/adapter/gql/resolver_mutation_property.go b/internal/adapter/gql/resolver_mutation_property.go index 1e0565e70..88d7bbee6 100644 --- a/internal/adapter/gql/resolver_mutation_property.go +++ b/internal/adapter/gql/resolver_mutation_property.go @@ -14,9 +14,12 @@ func (r *mutationResolver) UpdatePropertyValue(ctx context.Context, input gqlmod exit := trace(ctx) defer exit() - v, ok := gqlmodel.FromPropertyValueAndType(input.Value, input.Type) - if !ok { - return nil, errors.New("invalid value") + var v *property.Value + if input.Value != nil { + v = gqlmodel.FromPropertyValueAndType(input.Value, input.Type) + if v == nil { + return nil, errors.New("invalid value") + } } pp, pgl, pg, pf, err := r.usecases.Property.UpdateValue(ctx, interfaces.UpdatePropertyValueParam{ @@ -117,7 +120,10 @@ func (r *mutationResolver) AddPropertyItem(ctx context.Context, input gqlmodel.A var v *property.Value if input.NameFieldType != nil { - v, _ = gqlmodel.FromPropertyValueAndType(input.NameFieldValue, *input.NameFieldType) + v = gqlmodel.FromPropertyValueAndType(input.NameFieldValue, *input.NameFieldType) + if v == nil { + return nil, errors.New("invalid name field value") + } } p, pgl, pi, err := r.usecases.Property.AddItem(ctx, interfaces.AddPropertyItemParam{ @@ -181,7 +187,10 @@ func (r *mutationResolver) UpdatePropertyItems(ctx context.Context, input gqlmod for _, o := range input.Operations { var v *property.Value if o.NameFieldType != nil { - v, _ = gqlmodel.FromPropertyValueAndType(o.NameFieldValue, *o.NameFieldType) + v = gqlmodel.FromPropertyValueAndType(o.NameFieldValue, *o.NameFieldType) + if v == nil { + return nil, errors.New("invalid name field value") + } } op = append(op, interfaces.UpdatePropertyItemsOperationParam{ diff --git a/internal/infrastructure/memory/dataset.go b/internal/infrastructure/memory/dataset.go index 5f2584cd0..c3a119c15 100644 --- a/internal/infrastructure/memory/dataset.go +++ b/internal/infrastructure/memory/dataset.go @@ -106,8 +106,10 @@ func (r *Dataset) FindGraph(ctx context.Context, i id.DatasetID, f []id.SceneID, if f := d.Field(nextField); f != nil { if f.Type() == dataset.ValueTypeRef { if l := f.Value().ValueRef(); l != nil { - next = id.DatasetID(*l) - continue + if did, err := id.DatasetIDFrom(*l); err == nil { + next = did + continue + } } } } diff --git a/internal/infrastructure/memory/dataset_schema.go b/internal/infrastructure/memory/dataset_schema.go index 5e1b4c33d..372196f64 100644 --- a/internal/infrastructure/memory/dataset_schema.go +++ b/internal/infrastructure/memory/dataset_schema.go @@ -117,7 +117,7 @@ func (r *DatasetSchema) FindDynamicByID(ctx context.Context, id id.DatasetSchema return nil, rerror.ErrNotFound } -func (r *DatasetSchema) FindBySceneAndSource(ctx context.Context, s id.SceneID, src dataset.Source) (dataset.SchemaList, error) { +func (r *DatasetSchema) FindBySceneAndSource(ctx context.Context, s id.SceneID, src string) (dataset.SchemaList, error) { r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/mongo/dataset_schema.go b/internal/infrastructure/mongo/dataset_schema.go index e4887edb7..e15c51e39 100644 --- a/internal/infrastructure/mongo/dataset_schema.go +++ b/internal/infrastructure/mongo/dataset_schema.go @@ -82,7 +82,7 @@ func (r *datasetSchemaRepo) FindAllDynamicByScene(ctx context.Context, sceneID i return r.find(ctx, nil, filter) } -func (r *datasetSchemaRepo) FindBySceneAndSource(ctx context.Context, sceneID id.SceneID, source dataset.Source) (dataset.SchemaList, error) { +func (r *datasetSchemaRepo) FindBySceneAndSource(ctx context.Context, sceneID id.SceneID, source string) (dataset.SchemaList, error) { filter := bson.D{ {Key: "scene", Value: sceneID.String()}, {Key: "source", Value: string(source)}, diff --git a/internal/infrastructure/mongo/mongodoc/dataset.go b/internal/infrastructure/mongo/mongodoc/dataset.go index 194e1ffc5..3cfd9e08f 100644 --- a/internal/infrastructure/mongo/mongodoc/dataset.go +++ b/internal/infrastructure/mongo/mongodoc/dataset.go @@ -128,13 +128,13 @@ func (doc *DatasetDocument) Model() (*dataset.Dataset, error) { f := dataset.NewField( fid, toModelDatasetValue(field.Value, field.Type), - dataset.Source(field.Source), + field.Source, ) fields = append(fields, f) } return dataset.New(). ID(did). - Source(dataset.Source(doc.Source)). + Source(doc.Source). Fields(fields). Schema(ds). Scene(scene). @@ -145,7 +145,7 @@ func NewDataset(dataset *dataset.Dataset) (*DatasetDocument, string) { did := dataset.ID().String() var doc DatasetDocument doc.ID = did - doc.Source = dataset.Source().String() + doc.Source = dataset.Source() doc.Scene = id.ID(dataset.Scene()).String() doc.Schema = id.ID(dataset.Schema()).String() @@ -156,7 +156,7 @@ func NewDataset(dataset *dataset.Dataset) (*DatasetDocument, string) { Field: f.Field().String(), Type: string(f.Type()), Value: f.Value().Interface(), - Source: f.Source().String(), + Source: f.Source(), }) } return &doc, did @@ -183,9 +183,5 @@ func toModelDatasetValue(v interface{}, t string) *dataset.Value { if v2, ok := v.(bson.D); ok { v = v2.Map() } - vt, ok := dataset.ValueTypeFrom(t) - if !ok { - return nil - } - return vt.ValueFrom(v) + return dataset.ValueTypeFrom(t).ValueFrom(v) } diff --git a/internal/infrastructure/mongo/mongodoc/dataset_schema.go b/internal/infrastructure/mongo/mongodoc/dataset_schema.go index 0ac53fdc2..80fb6a975 100644 --- a/internal/infrastructure/mongo/mongodoc/dataset_schema.go +++ b/internal/infrastructure/mongo/mongodoc/dataset_schema.go @@ -1,8 +1,6 @@ package mongodoc import ( - "errors" - "go.mongodb.org/mongo-driver/bson" "github.com/reearth/reearth-backend/pkg/dataset" @@ -63,15 +61,12 @@ func (d *DatasetSchemaDocument) Model() (*dataset.Schema, error) { if err != nil { return nil, err } - vt, ok := dataset.ValueType(field.Type).Validate() - if !ok { - return nil, errors.New("invalid value type") - } + vt := dataset.ValueType(field.Type) f, err := dataset.NewSchemaField(). Name(field.Name). ID(fid). Type(vt). - Source(dataset.Source(field.Source)). + Source(field.Source). Build() if err != nil { return nil, err @@ -81,7 +76,7 @@ func (d *DatasetSchemaDocument) Model() (*dataset.Schema, error) { b := dataset.NewSchema(). ID(did). Name(d.Name). - Source(dataset.Source(d.Source)). + Source(d.Source). Scene(scene). Fields(fields) if d.RepresentativeField != nil { @@ -99,7 +94,7 @@ func NewDatasetSchema(dataset *dataset.Schema) (*DatasetSchemaDocument, string) doc := DatasetSchemaDocument{ ID: did, Name: dataset.Name(), - Source: dataset.Source().String(), + Source: dataset.Source(), Scene: id.ID(dataset.Scene()).String(), RepresentativeField: dataset.RepresentativeFieldID().StringRef(), Dynamic: dataset.Dynamic(), @@ -112,7 +107,7 @@ func NewDatasetSchema(dataset *dataset.Schema) (*DatasetSchemaDocument, string) ID: f.ID().String(), Type: string(f.Type()), Name: f.Name(), - Source: f.Source().String(), + Source: f.Source(), }) } diff --git a/internal/infrastructure/mongo/mongodoc/property.go b/internal/infrastructure/mongo/mongodoc/property.go index f55f17c54..3e962bac2 100644 --- a/internal/infrastructure/mongo/mongodoc/property.go +++ b/internal/infrastructure/mongo/mongodoc/property.go @@ -218,11 +218,10 @@ func toModelPropertyField(f *PropertyFieldDocument) *property.Field { flinks = property.NewLinks(links) } - vt, _ := property.ValueTypeFrom(f.Type) + vt := property.ValueType(f.Type) field := property.NewFieldUnsafe(). FieldUnsafe(id.PropertySchemaFieldID(f.Field)). - TypeUnsafe(vt). - ValueUnsafe(toModelPropertyValue(f.Value, f.Type)). + ValueUnsafe(property.NewOptionalValue(vt, toModelPropertyValue(f.Value, f.Type))). LinksUnsafe(flinks). Build() @@ -318,13 +317,5 @@ func (doc *PropertyDocument) Model() (*property.Property, error) { } func toModelPropertyValue(v interface{}, t string) *property.Value { - if v == nil { - return nil - } - v = convertDToM(v) - vt, ok := property.ValueTypeFrom(t) - if !ok { - return nil - } - return vt.ValueFromUnsafe(v) + return property.ValueType(t).ValueFrom(convertDToM(v)) } diff --git a/internal/infrastructure/mongo/mongodoc/property_schema.go b/internal/infrastructure/mongo/mongodoc/property_schema.go index 7fca03ad5..3003d380b 100644 --- a/internal/infrastructure/mongo/mongodoc/property_schema.go +++ b/internal/infrastructure/mongo/mongodoc/property_schema.go @@ -170,7 +170,7 @@ func ToModelPropertySchemaField(f *PropertySchemaFieldDocument) (*property.Schem Description(f.Description). Prefix(f.Prefix). Suffix(f.Suffix). - DefaultValue(vt.ValueFromUnsafe(f.DefaultValue)). + DefaultValue(vt.ValueFrom(f.DefaultValue)). UIRef(property.SchemaFieldUIFromRef(f.UI)). MinRef(f.Min). MaxRef(f.Max). diff --git a/internal/infrastructure/mongo/mongodoc/util.go b/internal/infrastructure/mongo/mongodoc/util.go index 0b1051762..e2e144c18 100644 --- a/internal/infrastructure/mongo/mongodoc/util.go +++ b/internal/infrastructure/mongo/mongodoc/util.go @@ -3,6 +3,9 @@ package mongodoc import "go.mongodb.org/mongo-driver/bson" func convertDToM(i interface{}) interface{} { + if i == nil { + return nil + } switch i2 := i.(type) { case bson.D: return i2.Map() diff --git a/internal/usecase/interactor/dataset.go b/internal/usecase/interactor/dataset.go index eaace244b..563504812 100644 --- a/internal/usecase/interactor/dataset.go +++ b/internal/usecase/interactor/dataset.go @@ -129,7 +129,6 @@ func (i *Dataset) AddDynamicDatasetSchema(ctx context.Context, inp interfaces.Ad } func (i *Dataset) AddDynamicDataset(ctx context.Context, inp interfaces.AddDynamicDatasetParam) (_ *dataset.Schema, _ *dataset.Dataset, err error) { - // Begin Db transaction tx, err := i.transaction.Begin() if err != nil { @@ -147,19 +146,18 @@ func (i *Dataset) AddDynamicDataset(ctx context.Context, inp interfaces.AddDynam return nil, nil, err } for _, f := range dss.Fields() { - if f.Name() == "author" { - fields = append(fields, dataset.NewField(f.ID(), dataset.ValueFrom(inp.Author), "")) + fields = append(fields, dataset.NewField(f.ID(), dataset.ValueTypeString.ValueFrom(inp.Author), "")) } if f.Name() == "content" { - fields = append(fields, dataset.NewField(f.ID(), dataset.ValueFrom(inp.Content), "")) + fields = append(fields, dataset.NewField(f.ID(), dataset.ValueTypeString.ValueFrom(inp.Content), "")) } if inp.Target != nil && len(*inp.Target) > 0 && f.Name() == "target" { - fields = append(fields, dataset.NewField(f.ID(), dataset.ValueFrom(inp.Target), "")) + fields = append(fields, dataset.NewField(f.ID(), dataset.ValueTypeString.ValueFrom(inp.Target), "")) } if inp.Lat != nil && inp.Lng != nil && f.Name() == "location" { latlng := dataset.LatLng{Lat: *inp.Lat, Lng: *inp.Lng} - fields = append(fields, dataset.NewField(f.ID(), dataset.ValueFrom(latlng), "")) + fields = append(fields, dataset.NewField(f.ID(), dataset.ValueTypeLatLng.ValueFrom(latlng), "")) } } ds, err := dataset. diff --git a/internal/usecase/interactor/property.go b/internal/usecase/interactor/property.go index a381ba261..ad4809085 100644 --- a/internal/usecase/interactor/property.go +++ b/internal/usecase/interactor/property.go @@ -239,7 +239,7 @@ func (i *Property) UploadFile(ctx context.Context, inp interfaces.UploadFilePara return nil, nil, nil, nil, err } - v := property.ValueTypeURL.ValueFromUnsafe(url) + v := property.ValueTypeURL.ValueFrom(url) if v == nil { return nil, nil, nil, nil, interfaces.ErrInvalidPropertyValue } diff --git a/internal/usecase/repo/dataset_schema.go b/internal/usecase/repo/dataset_schema.go index be94c38ab..2e42eae95 100644 --- a/internal/usecase/repo/dataset_schema.go +++ b/internal/usecase/repo/dataset_schema.go @@ -13,7 +13,7 @@ type DatasetSchema interface { FindByIDs(context.Context, []id.DatasetSchemaID, []id.SceneID) (dataset.SchemaList, error) FindByScene(context.Context, id.SceneID, *usecase.Pagination) (dataset.SchemaList, *usecase.PageInfo, error) FindBySceneAll(context.Context, id.SceneID) (dataset.SchemaList, error) - FindBySceneAndSource(context.Context, id.SceneID, dataset.Source) (dataset.SchemaList, error) + FindBySceneAndSource(context.Context, id.SceneID, string) (dataset.SchemaList, error) FindDynamicByID(context.Context, id.DatasetSchemaID) (*dataset.Schema, error) FindAllDynamicByScene(context.Context, id.SceneID) (dataset.SchemaList, error) Save(context.Context, *dataset.Schema) error diff --git a/pkg/dataset/builder.go b/pkg/dataset/builder.go index 576f85638..33919ae22 100644 --- a/pkg/dataset/builder.go +++ b/pkg/dataset/builder.go @@ -4,17 +4,14 @@ import ( "github.com/reearth/reearth-backend/pkg/id" ) -// Builder _ type Builder struct { d *Dataset } -// New _ func New() *Builder { return &Builder{d: &Dataset{}} } -// Build _ func (b *Builder) Build() (*Dataset, error) { if id.ID(b.d.id).IsNil() { return nil, id.ErrInvalidID @@ -26,7 +23,6 @@ func (b *Builder) Build() (*Dataset, error) { return b.d, nil } -// MustBuild _ func (b *Builder) MustBuild() *Dataset { r, err := b.Build() if err != nil { @@ -35,47 +31,46 @@ func (b *Builder) MustBuild() *Dataset { return r } -// ID _ func (b *Builder) ID(id id.DatasetID) *Builder { b.d.id = id return b } -// NewID _ func (b *Builder) NewID() *Builder { b.d.id = id.DatasetID(id.New()) return b } -// Scene _ func (b *Builder) Scene(scene id.SceneID) *Builder { b.d.scene = scene return b } -// Source _ -func (b *Builder) Source(source Source) *Builder { +func (b *Builder) Source(source string) *Builder { b.d.source = source return b } -// Schema _ func (b *Builder) Schema(schema id.DatasetSchemaID) *Builder { b.d.schema = schema return b } -// Fields _ func (b *Builder) Fields(fields []*Field) *Builder { b.d.fields = map[id.DatasetSchemaFieldID]*Field{} b.d.order = make([]id.DatasetSchemaFieldID, 0, len(fields)) - sources := map[Source]struct{}{} + + sources := map[string]struct{}{} for _, f := range b.d.fields { if source := f.Source(); source != "" { sources[source] = struct{}{} } } + for _, f := range fields { + if f.IsEmpty() { + continue + } source := f.Source() if source == "" { b.d.fields[f.Field()] = f @@ -86,5 +81,6 @@ func (b *Builder) Fields(fields []*Field) *Builder { sources[source] = struct{}{} } } + return b } diff --git a/pkg/dataset/csvparser.go b/pkg/dataset/csvparser.go index 47348bf45..16f77f6ac 100644 --- a/pkg/dataset/csvparser.go +++ b/pkg/dataset/csvparser.go @@ -53,28 +53,6 @@ func (p *DatasetCSVParser) validateLine(line []string) bool { return len(p.headers) == len(line) } -func (p *DatasetCSVParser) getRecord(rec string) *Value { - var v *Value - vint, err := strconv.Atoi(rec) - if err == nil { - v = ValueFrom(vint) - return v - } - - vfloat64, err := strconv.ParseFloat(rec, 64) - if err == nil { - v = ValueFrom(vfloat64) - return v - } - vbool, err := strconv.ParseBool(rec) - if err == nil { - v = ValueFrom(vbool) - return v - } - v = ValueFrom(rec) - return v -} - func (p *DatasetCSVParser) GuessSchema(sid id.SceneID) error { if !p.validateLine(p.firstline) { return ErrFailedToParseCSVorTSVFile @@ -89,7 +67,7 @@ func (p *DatasetCSVParser) GuessSchema(sid id.SceneID) error { haslng = true } if h != "lng" && h != "lat" && strings.TrimSpace(h) != "" { - t := p.getRecord(p.firstline[k]).Type() + t := ValueFromStringOrNumber(p.firstline[k]).Type() field, _ := NewSchemaField().NewID().Name(h).Type(t).Build() schemafields = append(schemafields, field) } @@ -102,7 +80,7 @@ func (p *DatasetCSVParser) GuessSchema(sid id.SceneID) error { NewID(). Scene(sid). Name(p.name). - Source(Source("file:///" + p.name)). + Source("file:///" + p.name). Fields(schemafields). Build() if err != nil { @@ -167,7 +145,7 @@ func (p *DatasetCSVParser) getFields(line []string, sfm map[string]id.DatasetSch fields := []*Field{} var lat, lng *float64 for i, record := range line { - value := p.getRecord(record).Value() + value := ValueFromStringOrNumber(record) if p.headers[i] == "lng" { value, err := strconv.ParseFloat(record, 64) if err != nil { @@ -184,12 +162,12 @@ func (p *DatasetCSVParser) getFields(line []string, sfm map[string]id.DatasetSch } if p.headers[i] != "lat" && p.headers[i] != "lng" { - fields = append(fields, NewField(sfm[p.headers[i]], ValueFrom(value), "")) + fields = append(fields, NewField(sfm[p.headers[i]], value, "")) } } if lat != nil && lng != nil { latlng := LatLng{Lat: *lat, Lng: *lng} - fields = append(fields, NewField(sfm["location"], ValueFrom(latlng), "")) + fields = append(fields, NewField(sfm["location"], ValueTypeLatLng.ValueFrom(latlng), "")) } return append([]*Field{}, fields...), nil } @@ -206,8 +184,8 @@ func (p *DatasetCSVParser) CheckCompatible(s *Schema) error { return ErrIncompatibleSchema } t := fieldsmap[h].Type() - v := p.getRecord(p.firstline[i]) - if !t.ValidateValue(v) { + v := ValueFromStringOrNumber(p.firstline[i]) + if v.Type() != t { return ErrIncompatibleSchema } } diff --git a/pkg/dataset/csvparser_test.go b/pkg/dataset/csvparser_test.go index d6a37b823..534dc32e5 100644 --- a/pkg/dataset/csvparser_test.go +++ b/pkg/dataset/csvparser_test.go @@ -27,25 +27,20 @@ func TestCSVParser(t *testing.T) { assert.NotEmpty(t, schema) assert.Equal(t, "hoge.csv", schema.Name()) - assert.Equal(t, Source("file:///hoge.csv"), schema.Source()) + assert.Equal(t, "file:///hoge.csv", schema.Source()) assert.Equal(t, 1, len(datasets)) - sfm := make(map[string]string) - for _, sf := range schema.Fields() { - sfm[sf.ID().String()] = sf.Name() - } + dsfm := make(map[string]interface{}) for _, dsf := range datasets[0].Fields() { - dsfm[sfm[dsf.Field().String()]] = dsf.Value().Interface() + dsfm[schema.Field(dsf.Field()).Name()] = dsf.Value().Interface() } - latlng := map[string]interface{}{"lat": 12.0, "lng": 15.0} assert.Equal(t, map[string]interface{}{ "hoge": 1.0, "foo": "foo", "bar": "bar", - "location": latlng, + "location": LatLng{Lat: 12.0, Lng: 15.0}, }, dsfm) - } func TestCSVParserCheckCompatible(t *testing.T) { @@ -62,5 +57,4 @@ func TestCSVParserCheckCompatible(t *testing.T) { assert.NoError(t, err) result := p.CheckCompatible(ds) assert.NoError(t, result) - } diff --git a/pkg/dataset/dataset.go b/pkg/dataset/dataset.go index abb09d844..f06678ed4 100644 --- a/pkg/dataset/dataset.go +++ b/pkg/dataset/dataset.go @@ -2,17 +2,15 @@ package dataset import "github.com/reearth/reearth-backend/pkg/id" -// Dataset _ type Dataset struct { id id.DatasetID - source Source + source string schema id.DatasetSchemaID fields map[id.DatasetSchemaFieldID]*Field order []id.DatasetSchemaFieldID scene id.SceneID } -// ID _ func (d *Dataset) ID() (i id.DatasetID) { if d == nil { return @@ -20,7 +18,6 @@ func (d *Dataset) ID() (i id.DatasetID) { return d.id } -// Scene _ func (d *Dataset) Scene() (i id.SceneID) { if d == nil { return @@ -28,15 +25,13 @@ func (d *Dataset) Scene() (i id.SceneID) { return d.scene } -// Source _ -func (d *Dataset) Source() (i Source) { +func (d *Dataset) Source() string { if d == nil { - return + return "" } return d.source } -// Schema _ func (d *Dataset) Schema() (i id.DatasetSchemaID) { if d == nil { return @@ -44,7 +39,6 @@ func (d *Dataset) Schema() (i id.DatasetSchemaID) { return d.schema } -// Fields _ func (d *Dataset) Fields() []*Field { if d == nil || d.order == nil { return nil @@ -56,7 +50,6 @@ func (d *Dataset) Fields() []*Field { return fields } -// Field _ func (d *Dataset) Field(id id.DatasetSchemaFieldID) *Field { if d == nil || d.fields == nil { return nil @@ -64,7 +57,6 @@ func (d *Dataset) Field(id id.DatasetSchemaFieldID) *Field { return d.fields[id] } -// FieldRef _ func (d *Dataset) FieldRef(id *id.DatasetSchemaFieldID) *Field { if d == nil || id == nil { return nil @@ -72,7 +64,6 @@ func (d *Dataset) FieldRef(id *id.DatasetSchemaFieldID) *Field { return d.fields[*id] } -// NameField _ func (d *Dataset) NameField(ds *Schema) *Field { if d == nil { return nil @@ -87,8 +78,7 @@ func (d *Dataset) NameField(ds *Schema) *Field { return d.fields[f.ID()] } -// FieldBySource _ -func (d *Dataset) FieldBySource(source Source) *Field { +func (d *Dataset) FieldBySource(source string) *Field { if d == nil { return nil } @@ -100,7 +90,6 @@ func (d *Dataset) FieldBySource(source Source) *Field { return nil } -// FieldByType _ func (d *Dataset) FieldByType(t ValueType) *Field { if d == nil { return nil @@ -112,3 +101,29 @@ func (d *Dataset) FieldByType(t ValueType) *Field { } return nil } + +// Interface returns a simple and human-readable representation of the dataset +func (d *Dataset) Interface(s *Schema) map[string]interface{} { + if d == nil || s == nil || d.Schema() != s.ID() { + return nil + } + m := map[string]interface{}{} + for _, f := range d.fields { + key := s.Field(f.Field()).Name() + m[key] = f.Value().Interface() + } + return m +} + +// Interface is almost same as Interface, but keys of the map are IDs of fields. +func (d *Dataset) InterfaceWithFieldIDs() map[string]interface{} { + if d == nil { + return nil + } + m := map[string]interface{}{} + for _, f := range d.fields { + key := f.Field().String() + m[key] = f.Value().Interface() + } + return m +} diff --git a/pkg/dataset/dataset_test.go b/pkg/dataset/dataset_test.go new file mode 100644 index 000000000..9520080ae --- /dev/null +++ b/pkg/dataset/dataset_test.go @@ -0,0 +1,89 @@ +package dataset + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestDataset_Interface(t *testing.T) { + f1 := id.NewDatasetSchemaFieldID() + f2 := id.NewDatasetSchemaFieldID() + sid := id.NewDatasetSchemaID() + + tests := []struct { + name string + schema *Schema + dataset *Dataset + want map[string]interface{} + }{ + { + name: "ok", + schema: NewSchema().ID(sid).Scene(id.NewSceneID()).Fields([]*SchemaField{ + NewSchemaField().ID(f1).Name("foo").Type(ValueTypeNumber).MustBuild(), + NewSchemaField().ID(f2).Name("bar").Type(ValueTypeLatLng).MustBuild(), + }).MustBuild(), + dataset: New().NewID().Scene(id.NewSceneID()).Schema(sid).Fields([]*Field{ + NewField(f1, ValueTypeNumber.ValueFrom(1), ""), + NewField(f2, ValueTypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}), ""), + }).MustBuild(), + want: map[string]interface{}{ + "foo": float64(1), + "bar": LatLng{Lat: 1, Lng: 2}, + }, + }, + { + name: "empty", + dataset: &Dataset{}, + }, + { + name: "nil", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.dataset.Interface(tt.schema)) + }) + } +} + +func TestDataset_InterfaceWithFieldIDs(t *testing.T) { + f1 := id.NewDatasetSchemaFieldID() + f2 := id.NewDatasetSchemaFieldID() + + tests := []struct { + name string + dataset *Dataset + want map[string]interface{} + }{ + { + name: "ok", + dataset: New().NewID().Scene(id.NewSceneID()).Schema(id.NewDatasetSchemaID()).Fields([]*Field{ + NewField(f1, ValueTypeNumber.ValueFrom(1), ""), + NewField(f2, ValueTypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}), ""), + }).MustBuild(), + want: map[string]interface{}{ + f1.String(): float64(1), + f2.String(): LatLng{Lat: 1, Lng: 2}, + }, + }, + { + name: "empty", + dataset: &Dataset{}, + want: map[string]interface{}{}, + }, + { + name: "nil", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.dataset.InterfaceWithFieldIDs()) + }) + } +} diff --git a/pkg/dataset/field.go b/pkg/dataset/field.go index e03407cbc..e99dffd35 100644 --- a/pkg/dataset/field.go +++ b/pkg/dataset/field.go @@ -2,25 +2,22 @@ package dataset import "github.com/reearth/reearth-backend/pkg/id" -// Field _ type Field struct { field id.DatasetSchemaFieldID - dtype ValueType value *Value - source Source + source string } -// NewField _ -func NewField(field id.DatasetSchemaFieldID, value *Value, source Source) *Field { +func NewField(field id.DatasetSchemaFieldID, value *Value, source string) *Field { + if value == nil { + return nil + } return &Field{ - dtype: value.Type(), - field: field, - value: value, - source: source, + field: field, + value: value, } } -// Field _ func (d *Field) Field() (i id.DatasetSchemaFieldID) { if d == nil { return @@ -28,7 +25,6 @@ func (d *Field) Field() (i id.DatasetSchemaFieldID) { return d.field } -// FieldRef _ func (d *Field) FieldRef() *id.DatasetSchemaFieldID { if d == nil { return nil @@ -36,26 +32,27 @@ func (d *Field) FieldRef() *id.DatasetSchemaFieldID { return d.field.Ref() } -// Type _ -func (d *Field) Type() (v ValueType) { - if d == nil { - return - } - return d.dtype +func (d *Field) IsEmpty() bool { + return d == nil || d.field.IsNil() || d.value == nil } -// Value _ func (d *Field) Value() *Value { if d == nil { return nil } - return d.value + return d.value.Clone() } -// Source _ -func (d *Field) Source() (s Source) { +func (d *Field) Type() ValueType { if d == nil { - return + return ValueTypeUnknown + } + return d.value.Type() +} + +func (d *Field) Source() string { + if d == nil { + return "" } return d.source } diff --git a/pkg/dataset/graph_iterator.go b/pkg/dataset/graph_iterator.go index cb8e6b981..db4432f78 100644 --- a/pkg/dataset/graph_iterator.go +++ b/pkg/dataset/graph_iterator.go @@ -2,7 +2,7 @@ package dataset import "github.com/reearth/reearth-backend/pkg/id" -// GraphIterator ใฏใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ใ‚ฐใƒฉใƒ•ๆŽข็ดขใ™ใ‚‹ใŸใ‚ใฎใ‚คใƒ†ใƒฌใƒผใ‚ฟใงใ™ใ€‚ +// GraphIterator is a iterator for graphically exploring a dataset. type GraphIterator struct { m Map ids [][]id.DatasetID @@ -11,7 +11,6 @@ type GraphIterator struct { maxDepth int } -// GraphIteratorFrom _ func GraphIteratorFrom(root id.DatasetID, depth int) *GraphIterator { return &GraphIterator{ ids: [][]id.DatasetID{{root}}, @@ -19,9 +18,8 @@ func GraphIteratorFrom(root id.DatasetID, depth int) *GraphIterator { } } -// Next _ func (di *GraphIterator) Next(d *Dataset) (id.DatasetID, bool) { - if di == nil || di.maxDepth == 0 || di.ids == nil || len(di.ids) == 0 || d == nil { + if di == nil || di.maxDepth == 0 || len(di.ids) == 0 || d == nil { return id.DatasetID{}, false } if di.currentDepthIndex >= len(di.ids) { @@ -41,7 +39,9 @@ func (di *GraphIterator) Next(d *Dataset) (id.DatasetID, bool) { currentIDs := di.ids[di.currentDepthIndex] for _, f := range d.Fields() { if r := f.Value().ValueRef(); r != nil { - nextDepthIDs = append(nextDepthIDs, id.DatasetID(*r)) + if rid, err := id.DatasetIDFrom(*r); err == nil { + nextDepthIDs = append(nextDepthIDs, rid) + } } } di.ids[di.currentDepthIndex+1] = nextDepthIDs @@ -63,7 +63,6 @@ func (di *GraphIterator) Next(d *Dataset) (id.DatasetID, bool) { return di.ids[di.currentDepthIndex][di.currentIndex], false } -// Result _ func (di *GraphIterator) Result() Map { if di == nil { return nil diff --git a/pkg/dataset/graph_iterator_test.go b/pkg/dataset/graph_iterator_test.go index 73435ecc3..254b1a8e9 100644 --- a/pkg/dataset/graph_iterator_test.go +++ b/pkg/dataset/graph_iterator_test.go @@ -50,13 +50,14 @@ func TestDatasetGraphIterator(t *testing.T) { } func testTestDatasetGraphIteratorNext(t *testing.T, it *GraphIterator, ds List) { + t.Helper() for i, d := range ds { next, done := it.Next(d) if i == len(ds)-1 { assert.Equal(t, true, done) } else { + assert.False(t, done, "next done %d", i) assert.Equal(t, ds[i+1].ID(), next, "next %d", i) - assert.Equal(t, false, done, "next done %d", i) } } assert.Equal(t, ds.Map(), it.Result()) diff --git a/pkg/dataset/list.go b/pkg/dataset/list.go index 850e70906..e22e89575 100644 --- a/pkg/dataset/list.go +++ b/pkg/dataset/list.go @@ -4,10 +4,8 @@ import ( "github.com/reearth/reearth-backend/pkg/id" ) -// List _ type List []*Dataset -// First _ func (l List) First() *Dataset { if l == nil || len(l) == 0 { return nil @@ -15,7 +13,6 @@ func (l List) First() *Dataset { return l[0] } -// Last _ func (l List) Last() *Dataset { if l == nil || len(l) == 0 { return nil @@ -23,7 +20,6 @@ func (l List) Last() *Dataset { return l[len(l)-1] } -// FindDataset _ func (l List) FindDataset(id id.DatasetID) *Dataset { for _, t := range l { if t.ID() == id { @@ -33,7 +29,6 @@ func (l List) FindDataset(id id.DatasetID) *Dataset { return nil } -// ToDatasetIds _ func (l List) ToDatasetIds() []id.DatasetID { if l == nil { return nil @@ -46,8 +41,7 @@ func (l List) ToDatasetIds() []id.DatasetID { return ids } -// FindDatasetBySource _ -func (l List) FindDatasetBySource(s Source) *Dataset { +func (l List) FindDatasetBySource(s string) *Dataset { for _, t := range l { if t.Source() == s { return t @@ -56,7 +50,6 @@ func (l List) FindDatasetBySource(s Source) *Dataset { return nil } -// FilterByDatasetSchema _ func (l List) FilterByDatasetSchema(s id.DatasetSchemaID) List { n := List{} for _, t := range l { @@ -67,15 +60,14 @@ func (l List) FilterByDatasetSchema(s id.DatasetSchemaID) List { return n } -// DiffBySource _ func (l List) DiffBySource(l2 List) Diff { // l is old, l2 is new added := []*Dataset{} removed := []*Dataset{} - // others := map[DatasetSource]DatasetDiffTouple{} + // others := map[string]DatasetDiffTouple{} others2 := map[id.DatasetID]*Dataset{} - s1 := map[Source]*Dataset{} + s1 := map[string]*Dataset{} for _, d1 := range l { s1[d1.Source()] = d1 } @@ -106,7 +98,6 @@ func (l List) DiffBySource(l2 List) Diff { } } -// Map _ func (l List) Map() Map { if l == nil { return nil @@ -128,10 +119,8 @@ func (l List) GraphLoader() GraphLoader { return GraphLoaderFromMap(l.Map()) } -// Map _ type Map map[id.DatasetID]*Dataset -// Add _ func (dm Map) Add(dss ...*Dataset) { if dss == nil { return @@ -147,7 +136,6 @@ func (dm Map) Add(dss ...*Dataset) { } } -// Slice _ func (dm Map) Slice() List { if dm == nil { return nil @@ -159,7 +147,6 @@ func (dm Map) Slice() List { return res } -// GraphSearchByFields _ func (dm Map) GraphSearchByFields(root id.DatasetID, fields ...id.DatasetSchemaFieldID) (List, *Field) { res := make(List, 0, len(fields)) currentD := dm[root] @@ -177,8 +164,12 @@ func (dm Map) GraphSearchByFields(root id.DatasetID, fields ...id.DatasetSchemaF } if len(fields)-1 == i { return res, field - } else if fid := field.Value().ValueRef(); fid != nil { - currentD = dm[id.DatasetID(*fid)] + } else if fids := field.Value().ValueRef(); fids != nil { + if fid, err := id.DatasetIDFrom(*fids); err == nil { + currentD = dm[id.DatasetID(fid)] + } else { + return res, nil + } } else { return res, nil } diff --git a/pkg/dataset/list_test.go b/pkg/dataset/list_test.go index 9c5fd3723..4dfcccbf3 100644 --- a/pkg/dataset/list_test.go +++ b/pkg/dataset/list_test.go @@ -9,9 +9,9 @@ import ( func TestDatasetListDiff(t *testing.T) { sid := id.SceneID(id.New()) - source1 := Source("hogehoge/1") - source2 := Source("hogehoge/2") - source3 := Source("hogehoge/3") + source1 := "hogehoge/1" + source2 := "hogehoge/2" + source3 := "hogehoge/3" d1, _ := New().NewID().Scene(sid).Source(source1).Build() d2, _ := New().NewID().Scene(sid).Source(source2).Build() d3, _ := New().NewID().Scene(sid).Source(source2).Build() diff --git a/pkg/dataset/schema.go b/pkg/dataset/schema.go index b3b9208e1..ae3bf22ad 100644 --- a/pkg/dataset/schema.go +++ b/pkg/dataset/schema.go @@ -2,10 +2,9 @@ package dataset import "github.com/reearth/reearth-backend/pkg/id" -// Schema _ type Schema struct { id id.DatasetSchemaID - source Source + source string name string fields map[id.DatasetSchemaFieldID]*SchemaField order []id.DatasetSchemaFieldID @@ -14,7 +13,6 @@ type Schema struct { dynamic bool } -// ID _ func (d *Schema) ID() (i id.DatasetSchemaID) { if d == nil { return @@ -22,7 +20,6 @@ func (d *Schema) ID() (i id.DatasetSchemaID) { return d.id } -// IDRef _ func (d *Schema) IDRef() *id.DatasetSchemaID { if d == nil { return nil @@ -30,7 +27,6 @@ func (d *Schema) IDRef() *id.DatasetSchemaID { return d.id.Ref() } -// Scene _ func (d *Schema) Scene() (i id.SceneID) { if d == nil { return @@ -38,15 +34,13 @@ func (d *Schema) Scene() (i id.SceneID) { return d.scene } -// Source _ -func (d *Schema) Source() (s Source) { +func (d *Schema) Source() (s string) { if d == nil { return } return d.source } -// Name _ func (d *Schema) Name() string { if d == nil { return "" @@ -54,7 +48,6 @@ func (d *Schema) Name() string { return d.name } -// RepresentativeFieldID _ func (d *Schema) RepresentativeFieldID() *id.DatasetSchemaFieldID { if d == nil { return nil @@ -62,7 +55,6 @@ func (d *Schema) RepresentativeFieldID() *id.DatasetSchemaFieldID { return d.representativeField } -// RepresentativeField _ func (d *Schema) RepresentativeField() *SchemaField { if d == nil || d.representativeField == nil { return nil @@ -70,7 +62,6 @@ func (d *Schema) RepresentativeField() *SchemaField { return d.fields[*d.representativeField] } -// Fields _ func (d *Schema) Fields() []*SchemaField { if d == nil || d.order == nil { return nil @@ -82,7 +73,6 @@ func (d *Schema) Fields() []*SchemaField { return fields } -// Field _ func (d *Schema) Field(id id.DatasetSchemaFieldID) *SchemaField { if d == nil { return nil @@ -90,7 +80,6 @@ func (d *Schema) Field(id id.DatasetSchemaFieldID) *SchemaField { return d.fields[id] } -// FieldRef _ func (d *Schema) FieldRef(id *id.DatasetSchemaFieldID) *SchemaField { if d == nil || id == nil { return nil @@ -98,8 +87,7 @@ func (d *Schema) FieldRef(id *id.DatasetSchemaFieldID) *SchemaField { return d.fields[*id] } -// FieldBySource _ -func (d *Schema) FieldBySource(source Source) *SchemaField { +func (d *Schema) FieldBySource(source string) *SchemaField { if d == nil { return nil } @@ -111,7 +99,6 @@ func (d *Schema) FieldBySource(source Source) *SchemaField { return nil } -// FieldByType _ func (d *Schema) FieldByType(t ValueType) *SchemaField { if d == nil { return nil diff --git a/pkg/dataset/schema_builder.go b/pkg/dataset/schema_builder.go index 7999703f1..675b2c80d 100644 --- a/pkg/dataset/schema_builder.go +++ b/pkg/dataset/schema_builder.go @@ -4,17 +4,14 @@ import ( "github.com/reearth/reearth-backend/pkg/id" ) -// SchemaBuilder _ type SchemaBuilder struct { d *Schema } -// NewSchema _ func NewSchema() *SchemaBuilder { return &SchemaBuilder{d: &Schema{}} } -// Build _ func (b *SchemaBuilder) Build() (*Schema, error) { if id.ID(b.d.id).IsNil() { return nil, id.ErrInvalidID @@ -26,7 +23,6 @@ func (b *SchemaBuilder) Build() (*Schema, error) { return b.d, nil } -// MustBuild _ func (b *SchemaBuilder) MustBuild() *Schema { r, err := b.Build() if err != nil { @@ -35,69 +31,53 @@ func (b *SchemaBuilder) MustBuild() *Schema { return r } -// ID _ func (b *SchemaBuilder) ID(id id.DatasetSchemaID) *SchemaBuilder { b.d.id = id return b } -// NewID _ func (b *SchemaBuilder) NewID() *SchemaBuilder { b.d.id = id.DatasetSchemaID(id.New()) return b } -// Scene _ func (b *SchemaBuilder) Scene(scene id.SceneID) *SchemaBuilder { b.d.scene = scene return b } -// Name _ func (b *SchemaBuilder) Name(name string) *SchemaBuilder { b.d.name = name return b } -// Dynamic _ func (b *SchemaBuilder) Dynamic(dynamic bool) *SchemaBuilder { b.d.dynamic = dynamic return b } -// Source _ -func (b *SchemaBuilder) Source(source Source) *SchemaBuilder { +func (b *SchemaBuilder) Source(source string) *SchemaBuilder { b.d.source = source return b } -// RepresentativeField _ func (b *SchemaBuilder) RepresentativeField(representativeField id.DatasetSchemaFieldID) *SchemaBuilder { rf := representativeField b.d.representativeField = &rf return b } -// Fields _ func (b *SchemaBuilder) Fields(fields []*SchemaField) *SchemaBuilder { b.d.fields = map[id.DatasetSchemaFieldID]*SchemaField{} - b.d.order = []id.DatasetSchemaFieldID{} + b.d.order = make([]id.DatasetSchemaFieldID, 0, len(fields)) sources := map[string]struct{}{} - for _, f := range b.d.fields { - if f == nil { - continue - } - source := f.Source().String() - if source != "" { - sources[source] = struct{}{} - } - } + for _, f := range fields { if f == nil { continue } - source := f.Source().String() - if source == "" { + + if source := f.Source(); source == "" { copied := *f b.d.fields[f.ID()] = &copied b.d.order = append(b.d.order, f.ID()) @@ -108,5 +88,6 @@ func (b *SchemaBuilder) Fields(fields []*SchemaField) *SchemaBuilder { sources[source] = struct{}{} } } + return b } diff --git a/pkg/dataset/schema_field.go b/pkg/dataset/schema_field.go index b090395dd..538dce7f2 100644 --- a/pkg/dataset/schema_field.go +++ b/pkg/dataset/schema_field.go @@ -1,19 +1,15 @@ -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/idgen --name DatasetSchemaField --output ../id - package dataset import "github.com/reearth/reearth-backend/pkg/id" -// SchemaField _ type SchemaField struct { id id.DatasetSchemaFieldID name string dataType ValueType - source Source + source string ref *id.DatasetSchemaID } -// ID _ func (d *SchemaField) ID() (i id.DatasetSchemaFieldID) { if d == nil { return @@ -21,7 +17,6 @@ func (d *SchemaField) ID() (i id.DatasetSchemaFieldID) { return d.id } -// IDRef _ func (d *SchemaField) IDRef() *id.DatasetSchemaFieldID { if d == nil { return nil @@ -29,7 +24,6 @@ func (d *SchemaField) IDRef() *id.DatasetSchemaFieldID { return d.id.Ref() } -// Name _ func (d *SchemaField) Name() (n string) { if d == nil { return @@ -37,7 +31,6 @@ func (d *SchemaField) Name() (n string) { return d.name } -// Ref _ func (d *SchemaField) Ref() *id.DatasetSchemaID { if d == nil { return nil @@ -45,7 +38,6 @@ func (d *SchemaField) Ref() *id.DatasetSchemaID { return d.ref } -// Type _ func (d *SchemaField) Type() (v ValueType) { if d == nil { return @@ -53,8 +45,7 @@ func (d *SchemaField) Type() (v ValueType) { return d.dataType } -// Source _ -func (d *SchemaField) Source() (s Source) { +func (d *SchemaField) Source() (s string) { if d == nil { return } diff --git a/pkg/dataset/schema_field_builder.go b/pkg/dataset/schema_field_builder.go index 07adf3b9b..b6db538bd 100644 --- a/pkg/dataset/schema_field_builder.go +++ b/pkg/dataset/schema_field_builder.go @@ -6,28 +6,24 @@ import ( "github.com/reearth/reearth-backend/pkg/id" ) -// SchemaFieldBuilder _ type SchemaFieldBuilder struct { d *SchemaField } -// NewSchemaField _ func NewSchemaField() *SchemaFieldBuilder { return &SchemaFieldBuilder{d: &SchemaField{}} } -// Build _ func (b *SchemaFieldBuilder) Build() (*SchemaField, error) { if id.ID(b.d.id).IsNil() { return nil, id.ErrInvalidID } - if _, ok := b.d.dataType.Validate(); !ok { + if !b.d.dataType.Default() { return nil, errors.New("invalid value type") } return b.d, nil } -// MustBuild _ func (b *SchemaFieldBuilder) MustBuild() *SchemaField { r, err := b.Build() if err != nil { @@ -36,37 +32,31 @@ func (b *SchemaFieldBuilder) MustBuild() *SchemaField { return r } -// ID _ func (b *SchemaFieldBuilder) ID(id id.DatasetSchemaFieldID) *SchemaFieldBuilder { b.d.id = id return b } -// NewID _ func (b *SchemaFieldBuilder) NewID() *SchemaFieldBuilder { b.d.id = id.DatasetSchemaFieldID(id.New()) return b } -// Name _ func (b *SchemaFieldBuilder) Name(name string) *SchemaFieldBuilder { b.d.name = name return b } -// Type _ func (b *SchemaFieldBuilder) Type(dataType ValueType) *SchemaFieldBuilder { b.d.dataType = dataType return b } -// Source _ -func (b *SchemaFieldBuilder) Source(source Source) *SchemaFieldBuilder { +func (b *SchemaFieldBuilder) Source(source string) *SchemaFieldBuilder { b.d.source = source return b } -// Ref _ func (b *SchemaFieldBuilder) Ref(ref *id.DatasetSchemaID) *SchemaFieldBuilder { if ref == nil { b.d.ref = nil diff --git a/pkg/dataset/schema_field_diff.go b/pkg/dataset/schema_field_diff.go index c6dab0ac4..a58b206a9 100644 --- a/pkg/dataset/schema_field_diff.go +++ b/pkg/dataset/schema_field_diff.go @@ -2,21 +2,19 @@ package dataset import "github.com/reearth/reearth-backend/pkg/id" -// SchemaFieldDiff _ type SchemaFieldDiff struct { Added []*SchemaField Removed []*SchemaField Replaced map[id.DatasetSchemaFieldID]*SchemaField } -// FieldDiffBySource _ func (d *Schema) FieldDiffBySource(d2 *Schema) SchemaFieldDiff { added := []*SchemaField{} removed := []*SchemaField{} - // others := map[DatasetSource]DatasetDiffTouple{} + // others := map[string]DatasetDiffTouple{} others2 := map[id.DatasetSchemaFieldID]*SchemaField{} - s1 := map[Source]*SchemaField{} + s1 := map[string]*SchemaField{} for _, d1 := range d.fields { s1[d1.Source()] = d1 } diff --git a/pkg/dataset/source.go b/pkg/dataset/source.go deleted file mode 100644 index 068773b58..000000000 --- a/pkg/dataset/source.go +++ /dev/null @@ -1,9 +0,0 @@ -package dataset - -// Source _ -type Source string - -// String implements Stringer -func (d Source) String() string { - return string(d) -} diff --git a/pkg/dataset/value.go b/pkg/dataset/value.go index 100555cfb..4fb4aea06 100644 --- a/pkg/dataset/value.go +++ b/pkg/dataset/value.go @@ -2,392 +2,207 @@ package dataset import ( "net/url" + "strconv" - "github.com/mitchellh/mapstructure" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/value" ) -// LatLng _ -type LatLng struct { - Lat float64 `mapstructure:"lat"` - Lng float64 `mapstructure:"lng"` -} +type LatLng = value.LatLng +type LatLngHeight = value.LatLngHeight +type Coordinates = value.Coordinates +type Rect = value.Rect +type Polygon = value.Polygon + +var ( + ValueTypeUnknown = ValueType(value.TypeUnknown) + ValueTypeBool = ValueType(value.TypeBool) + ValueTypeNumber = ValueType(value.TypeNumber) + ValueTypeString = ValueType(value.TypeString) + ValueTypeRef = ValueType(value.TypeRef) + ValueTypeURL = ValueType(value.TypeURL) + ValueTypeLatLng = ValueType(value.TypeLatLng) + ValueTypeLatLngHeight = ValueType(value.TypeLatLngHeight) + ValueTypeCoordinates = ValueType(value.TypeCoordinates) + ValueTypeRect = ValueType(value.TypeRect) + TypePolygon = ValueType(value.TypePolygon) +) -// LatLngFrom _ -func LatLngFrom(m interface{}) (LatLng, bool) { - l := LatLng{} - err := mapstructure.Decode(m, &l) - return l, err == nil -} +type ValueType value.Type -// LatLngHeight _ -type LatLngHeight struct { - Lat float64 `mapstructure:"lat"` - Lng float64 `mapstructure:"lng"` - Height float64 `mapstructure:"height"` +func ValueTypeFrom(t string) ValueType { + return ValueType(value.Type(t)) } -// LatLngHeightFrom _ -func LatLngHeightFrom(m interface{}) (LatLngHeight, bool) { - l := LatLngHeight{} - err := mapstructure.Decode(m, &l) - return l, err == nil +func (t ValueType) Default() bool { + return value.Type(t).Default() } -// ValueType _ -type ValueType string +func (t ValueType) ValueFrom(i interface{}) *Value { + vv := value.Type(t).ValueFrom(i, nil) + if vv == nil { + return nil + } + return &Value{v: *vv} +} -const ( - // ValueTypeBool _ - ValueTypeBool ValueType = "bool" - // ValueTypeNumber _ - ValueTypeNumber ValueType = "number" - // ValueTypeString _ - ValueTypeString ValueType = "string" - // ValueTypeRef _ - ValueTypeRef ValueType = "ref" - // ValueTypeURL _ - ValueTypeURL ValueType = "url" - // ValueTypeLatLng _ - ValueTypeLatLng ValueType = "latlng" - // ValueTypeLatLngHeight _ - ValueTypeLatLngHeight ValueType = "latlngheight" -) +type Value struct { + v value.Value +} -// ValueTypeFrom _ -func ValueTypeFrom(t string) (ValueType, bool) { - switch ValueType(t) { - case ValueTypeBool: - return ValueTypeBool, true - case ValueTypeNumber: - return ValueTypeNumber, true - case ValueTypeString: - return ValueTypeString, true - case ValueTypeRef: - return ValueTypeRef, true - case ValueTypeURL: - return ValueTypeURL, true - case ValueTypeLatLng: - return ValueTypeLatLng, true - case ValueTypeLatLngHeight: - return ValueTypeLatLngHeight, true +func (v *Value) Clone() *Value { + if v == nil { + return nil + } + vv := v.v.Clone() + if vv == nil { + return nil } - return ValueType(""), false + return &Value{v: *vv} } -// Validate _ -func (t ValueType) Validate() (ValueType, bool) { - switch t { - case ValueTypeBool: - fallthrough - case ValueTypeNumber: - fallthrough - case ValueTypeString: - fallthrough - case ValueTypeRef: - fallthrough - case ValueTypeURL: - fallthrough - case ValueTypeLatLng: - fallthrough - case ValueTypeLatLngHeight: - return t, true +func (v *Value) Type() ValueType { + if v == nil { + return ValueTypeUnknown } - return t, false + return ValueType(v.v.Type()) } -// Value _ -type Value struct { - v interface{} - t ValueType +func (v *Value) Value() interface{} { + if v == nil { + return nil + } + return v.v.Value() } -// Value _ -func (v *Value) Value() interface{} { +func (v *Value) Interface() interface{} { if v == nil { return nil } - return v.v + return v.v.Interface() } -// ValueBool _ func (v *Value) ValueBool() *bool { if v == nil { return nil } - if v2, ok := v.v.(bool); ok { - return &v2 + vv, ok := v.v.ValueBool() + if ok { + return &vv } return nil } -// ValueNumber _ func (v *Value) ValueNumber() *float64 { if v == nil { return nil } - if v2, ok := v.v.(float64); ok { - return &v2 + vv, ok := v.v.ValueNumber() + if ok { + return &vv } return nil } -// ValueString _ func (v *Value) ValueString() *string { if v == nil { return nil } - if v2, ok := v.v.(string); ok { - return &v2 + vv, ok := v.v.ValueString() + if ok { + return &vv } return nil } -// ValueRef _ -func (v *Value) ValueRef() *id.ID { +func (v *Value) ValueRef() *string { if v == nil { return nil } - if v2, ok := v.v.(id.ID); ok { - return &v2 + vv, ok := v.v.ValueRef() + if ok { + return &vv } return nil } -// ValueURL _ func (v *Value) ValueURL() *url.URL { if v == nil { return nil } - if v2, ok := v.v.(*url.URL); ok { - return v2 + vv, ok := v.v.ValueURL() + if ok { + return vv } return nil } -// ValueLatLng _ func (v *Value) ValueLatLng() *LatLng { if v == nil { return nil } - if v2, ok := v.v.(LatLng); ok { - return &v2 + vv, ok := v.v.ValueLatLng() + if ok { + return &vv } return nil } -// ValueLatLngHeight _ func (v *Value) ValueLatLngHeight() *LatLngHeight { if v == nil { return nil } - if v2, ok := v.v.(LatLngHeight); ok { - return &v2 + vv, ok := v.v.ValueLatLngHeight() + if ok { + return &vv } return nil } -// Type _ -func (v *Value) Type() ValueType { - if v == nil { - return ValueType("") - } - return v.t -} - -// ValueFrom _ -func (t ValueType) ValueFrom(v interface{}) *Value { +func (v *Value) ValueCoordinates() *Coordinates { if v == nil { return nil } - switch t { - case ValueTypeBool: - if v2, ok := v.(bool); ok { - return &Value{v: v2, t: ValueTypeBool} - } - case ValueTypeNumber: - if v2, ok := v.(float64); ok { - return &Value{v: v2, t: ValueTypeNumber} - } - if v2, ok := v.(int); ok { - return &Value{v: float64(v2), t: ValueTypeNumber} - } - case ValueTypeString: - if v2, ok := v.(string); ok { - return &Value{v: v2, t: ValueTypeString} - } - case ValueTypeRef: - if v2, ok := v.(id.ID); ok { - return &Value{v: v2, t: ValueTypeRef} - } - if v2, ok := v.(string); ok { - if id, err := id.NewIDWith(v2); err == nil { - return &Value{v: id, t: ValueTypeRef} - } - } - case ValueTypeURL: - if v2, ok := v.(*url.URL); ok { - return &Value{v: v2, t: ValueTypeURL} - } - if v2, ok := v.(string); ok { - if u, err := url.Parse(v2); err == nil { - return &Value{v: u, t: ValueTypeURL} - } - } - case ValueTypeLatLng: - if v2, ok := v.(LatLng); ok { - return &Value{v: v2, t: ValueTypeLatLng} - } else if v2, ok := v.(*LatLng); ok { - if v2 == nil { - return nil - } - return &Value{v: *v2, t: ValueTypeLatLng} - } - v2 := LatLng{} - if err := mapstructure.Decode(v, &v2); err != nil { - return nil - } - return &Value{v: v2, t: ValueTypeLatLng} - case ValueTypeLatLngHeight: - if v2, ok := v.(LatLngHeight); ok { - return &Value{v: v2, t: ValueTypeLatLngHeight} - } else if v2, ok := v.(*LatLngHeight); ok { - if v2 == nil { - return nil - } - return &Value{v: *v2, t: ValueTypeLatLngHeight} - } - v2 := LatLngHeight{} - if err := mapstructure.Decode(v, &v2); err != nil { - return nil - } - return &Value{v: v2, t: ValueTypeLatLng} + vv, ok := v.v.ValueCoordinates() + if ok { + return &vv } return nil } -// ValidateValue _ -func (t ValueType) ValidateValue(v *Value) bool { - if v == nil { - return true - } - vv := v.Value() - if vv == nil { - return true - } - switch t { - case ValueTypeBool: - if _, ok := vv.(bool); ok { - return true - } - case ValueTypeNumber: - if _, ok := vv.(float64); ok { - return true - } - case ValueTypeString: - if _, ok := vv.(string); ok { - return true - } - case ValueTypeRef: - if _, ok := vv.(id.ID); ok { - return true - } - case ValueTypeURL: - if _, ok := vv.(*url.URL); ok { - return true - } - case ValueTypeLatLng: - if _, ok := vv.(LatLng); ok { - return true - } - case ValueTypeLatLngHeight: - if _, ok := vv.(LatLngHeight); ok { - return true - } - } - return false -} - -// Clone _ -func (v *Value) Clone() *Value { +func (v *Value) ValueRect() *Rect { if v == nil { return nil } - var v3 interface{} - switch v2 := v.v.(type) { - case bool: - v3 = v2 - case float64: - v3 = v2 - case string: - v3 = v2 - case id.ID: - v3 = v2 - case *url.URL: - v3, _ = url.Parse(v2.String()) - case LatLng: - v3 = LatLng{Lat: v2.Lat, Lng: v2.Lng} - case LatLngHeight: - v3 = LatLngHeight{Lat: v2.Lat, Lng: v2.Lng, Height: v2.Height} + vv, ok := v.v.ValueRect() + if ok { + return &vv } - return &Value{v: v3, t: v.t} + return nil } -// ValueFrom _ -func ValueFrom(v interface{}) *Value { +func (v *Value) ValuePolygon() *Polygon { if v == nil { return nil } - switch v2 := v.(type) { - case bool: - return &Value{v: v2, t: ValueTypeBool} - case int: - return &Value{v: float64(v2), t: ValueTypeNumber} - case float64: - return &Value{v: v2, t: ValueTypeNumber} - case string: - return &Value{v: v2, t: ValueTypeString} - case id.ID: - return &Value{v: v2, t: ValueTypeRef} - case *url.URL: - return &Value{v: v2, t: ValueTypeURL} - case LatLng: - return &Value{v: v2, t: ValueTypeLatLng} - case LatLngHeight: - return &Value{v: v2, t: ValueTypeLatLngHeight} + vv, ok := v.v.ValuePolygon() + if ok { + return &vv } return nil } -// Interface converts the value into generic representation -func (v *Value) Interface() interface{} { - if v == nil { - return nil +func ValueFromStringOrNumber(s string) *Value { + if vint, err := strconv.Atoi(s); err == nil { + return ValueTypeNumber.ValueFrom(vint) } - switch v2 := v.Value().(type) { - case bool: - return v2 - case float64: - return v2 - case string: - return v2 - case id.ID: - return v2.String() - case *url.URL: - return v2.String() - case LatLng: - return encodeValue(&v2) - case LatLngHeight: - return encodeValue(&v2) + + if vfloat64, err := strconv.ParseFloat(s, 64); err == nil { + return ValueTypeNumber.ValueFrom(vfloat64) } - return nil -} -func encodeValue(v interface{}) map[string]interface{} { - var v3 map[string]interface{} - err := mapstructure.Decode(v, &v3) - if err != nil { - return nil + if vbool, err := strconv.ParseBool(s); err == nil { + return ValueTypeBool.ValueFrom(vbool) } - return v3 + + return ValueTypeString.ValueFrom(s) } diff --git a/pkg/dataset/value_optional.go b/pkg/dataset/value_optional.go new file mode 100644 index 000000000..61affc8cd --- /dev/null +++ b/pkg/dataset/value_optional.go @@ -0,0 +1,78 @@ +package dataset + +import "github.com/reearth/reearth-backend/pkg/value" + +type OptionalValue struct { + ov value.OptionalValue +} + +func NewOptionalValue(t ValueType, v *Value) *OptionalValue { + var vv *value.Value + if v != nil { + vv = &v.v + } + ov := value.NewOptionalValue(value.Type(t), vv) + if ov == nil { + return nil + } + return &OptionalValue{ov: *ov} +} + +func OptionalValueFrom(v *Value) *OptionalValue { + if v == nil { + return nil + } + ov := value.OptionalValueFrom(&v.v) + if ov == nil { + return nil + } + return &OptionalValue{ + ov: *ov, + } +} + +func (ov *OptionalValue) Type() ValueType { + if ov == nil { + return ValueTypeUnknown + } + return ValueType(ov.ov.Type()) +} + +func (ov *OptionalValue) Value() *Value { + if ov == nil { + return nil + } + vv := ov.ov.Value() + if vv == nil { + return nil + } + return &Value{v: *vv} +} + +func (ov *OptionalValue) TypeAndValue() (ValueType, *Value) { + return ov.Type(), ov.Value() +} + +func (ov *OptionalValue) SetValue(v *Value) { + if ov == nil { + return + } + if v == nil { + ov.ov.SetValue(nil) + } else { + ov.ov.SetValue(&v.v) + } +} + +func (ov *OptionalValue) Clone() *OptionalValue { + if ov == nil { + return nil + } + nov := ov.ov.Clone() + if nov == nil { + return nil + } + return &OptionalValue{ + ov: *nov, + } +} diff --git a/pkg/dataset/value_optional_test.go b/pkg/dataset/value_optional_test.go new file mode 100644 index 000000000..8767264a7 --- /dev/null +++ b/pkg/dataset/value_optional_test.go @@ -0,0 +1,303 @@ +package dataset + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/value" + "github.com/stretchr/testify/assert" +) + +func TestNewNilableValue(t *testing.T) { + type args struct { + t ValueType + v *Value + } + tests := []struct { + name string + args args + want *OptionalValue + }{ + { + name: "default type", + args: args{ + t: ValueTypeString, + v: ValueTypeString.ValueFrom("foo"), + }, + want: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foo", nil))}, + }, + { + name: "nil value", + args: args{ + t: ValueTypeString, + }, + want: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, nil)}, + }, + { + name: "invalid value", + args: args{ + t: ValueTypeNumber, + v: ValueTypeString.ValueFrom("foo"), + }, + want: nil, + }, + { + name: "invalid type", + args: args{ + t: ValueTypeUnknown, + v: ValueTypeString.ValueFrom("foo"), + }, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, NewOptionalValue(tt.args.t, tt.args.v)) + }) + } +} + +func TestOptionalValueFrom(t *testing.T) { + type args struct { + v *Value + } + tests := []struct { + name string + args args + want *OptionalValue + }{ + { + name: "default type", + args: args{ + v: ValueTypeString.ValueFrom("foo"), + }, + want: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, value.TypeString.ValueFrom("foo", nil))}, + }, + { + name: "empty value", + args: args{ + v: &Value{v: value.Value{}}, + }, + want: nil, + }, + { + name: "nil value", + args: args{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, OptionalValueFrom(tt.args.v)) + }) + } +} + +func TestOptionalValue_Type(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + want ValueType + }{ + { + name: "ok", + value: &OptionalValue{ov: *value.NewOptionalValue(value.TypeBool, nil)}, + want: ValueTypeBool, + }, + { + name: "empty", + value: &OptionalValue{}, + want: ValueTypeUnknown, + }, + { + name: "nil", + value: nil, + want: ValueTypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Type()) + }) + } +} + +func TestOptionalValue_Value(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + want *Value + }{ + { + name: "ok", + value: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foobar", nil))}, + want: ValueTypeString.ValueFrom("foobar"), + }, + { + name: "empty", + value: &OptionalValue{}, + want: nil, + }, + { + name: "nil", + value: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.value.Value() + assert.Equal(t, tt.want, res) + if res != nil { + assert.NotSame(t, tt.want, res) + } + }) + } +} + +func TestOptionalValue_TypeAndValue(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + wantt ValueType + wantv *Value + }{ + { + name: "ok", + value: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foobar", nil))}, + wantt: ValueTypeString, + wantv: ValueTypeString.ValueFrom("foobar"), + }, + { + name: "empty", + value: &OptionalValue{}, + wantt: ValueTypeUnknown, + wantv: nil, + }, + { + name: "nil", + value: nil, + wantt: ValueTypeUnknown, + wantv: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ty, tv := tt.value.TypeAndValue() + assert.Equal(t, tt.wantt, ty) + assert.Equal(t, tt.wantv, tv) + if tv != nil { + assert.NotSame(t, tt.wantv, tv) + } + }) + } +} + +func TestOptionalValue_SetValue(t *testing.T) { + type args struct { + v *Value + } + tests := []struct { + name string + value *OptionalValue + args args + invalid bool + }{ + { + name: "set", + value: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foo", nil))}, + args: args{v: ValueTypeString.ValueFrom("foobar")}, + }, + { + name: "set to nil", + value: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, nil)}, + args: args{v: ValueTypeString.ValueFrom("foobar")}, + }, + { + name: "invalid value", + value: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, nil)}, + args: args{v: ValueTypeNumber.ValueFrom(1)}, + invalid: true, + }, + { + name: "nil value", + args: args{v: ValueTypeNumber.ValueFrom(1)}, + }, + { + name: "empty", + value: &OptionalValue{}, + args: args{v: ValueTypeNumber.ValueFrom(1)}, + invalid: true, + }, + { + name: "nil", + args: args{v: ValueTypeNumber.ValueFrom(1)}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + var v *Value + if tt.value != nil { + v = tt.value.Value() + } + + tt.value.SetValue(tt.args.v) + + if tt.value != nil { + if tt.invalid { + assert.Equal(t, v, tt.value.Value()) + } else { + assert.Equal(t, tt.args.v, tt.value.Value()) + } + } + }) + } +} + +func TestOptionalValue_Clone(t *testing.T) { + tests := []struct { + name string + target *OptionalValue + }{ + { + name: "ok", + target: &OptionalValue{ + ov: *value.NewOptionalValue(value.TypeString, value.TypeString.ValueFrom("foo", nil)), + }, + }, + { + name: "empty", + target: &OptionalValue{}, + }, + { + name: "nil", + target: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := tt.target.Clone() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} diff --git a/pkg/dataset/value_test.go b/pkg/dataset/value_test.go deleted file mode 100644 index 6d2ea89e8..000000000 --- a/pkg/dataset/value_test.go +++ /dev/null @@ -1,63 +0,0 @@ -package dataset - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestValueInterface(t *testing.T) { - assert.Equal( - t, - map[string]interface{}{ - "lat": 1.2, - "lng": 1.3, - }, - ValueTypeLatLng.ValueFrom(LatLng{ - Lat: 1.2, - Lng: 1.3, - }).Interface(), - ) - - assert.Equal( - t, - map[string]interface{}{ - "lat": 1.2, - "lng": 1.3, - "height": 1.4, - }, - ValueTypeLatLngHeight.ValueFrom(LatLngHeight{ - Lat: 1.2, - Lng: 1.3, - Height: 1.4, - }).Interface(), - ) -} - -func TestValueFromInterface(t *testing.T) { - assert.Equal( - t, - LatLng{ - Lat: 1.2, - Lng: 1.3, - }, - ValueTypeLatLng.ValueFrom(map[string]interface{}{ - "lat": 1.2, - "lng": 1.3, - }).Value(), - ) - - assert.Equal( - t, - LatLngHeight{ - Lat: 1.2, - Lng: 1.3, - Height: 1.4, - }, - ValueTypeLatLngHeight.ValueFrom(map[string]interface{}{ - "lat": 1.2, - "lng": 1.3, - "height": 1.4, - }).Value(), - ) -} diff --git a/pkg/layer/decoding/common.go b/pkg/layer/decoding/common.go index 15822ea21..acae2d58f 100644 --- a/pkg/layer/decoding/common.go +++ b/pkg/layer/decoding/common.go @@ -104,14 +104,14 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter switch t { case "Point": if pf, ok := v.(property.LatLngHeight); ok { - v2, ok := property.ValueTypeLatLng.ValueFrom(&property.LatLng{Lat: pf.Lat, Lng: pf.Lng}) - if !ok { + v2 := property.ValueTypeLatLng.ValueFrom(&property.LatLng{Lat: pf.Lat, Lng: pf.Lng}) + if v2 == nil { return nil, ErrFieldType } f.UpdateUnsafe(v2) - v3, ok := property.ValueTypeNumber.ValueFrom(pf.Height) - if !ok { + v3 := property.ValueTypeNumber.ValueFrom(pf.Height) + if v3 == nil { return nil, ErrFieldType } f2, _, _, _ := p.GetOrCreateField( @@ -121,8 +121,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter f2.UpdateUnsafe(v3) } else if pf, ok := v.(property.LatLng); ok { - v2, ok := property.ValueTypeLatLng.ValueFrom(&property.LatLng{Lat: pf.Lat, Lng: pf.Lng}) - if !ok { + v2 := property.ValueTypeLatLng.ValueFrom(&property.LatLng{Lat: pf.Lat, Lng: pf.Lng}) + if v2 == nil { return nil, ErrFieldType } f.UpdateUnsafe(v2) @@ -135,8 +135,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter return nil, ErrFieldType } if s.IconStyle.Icon != nil && len(s.IconStyle.Icon.Href) > 0 { - imageValue, ok := property.ValueTypeURL.ValueFrom(s.IconStyle.Icon.Href) - if !ok { + imageValue := property.ValueTypeURL.ValueFrom(s.IconStyle.Icon.Href) + if imageValue == nil { return nil, ErrFieldType } imageField, _, _, _ := p.GetOrCreateField( @@ -146,8 +146,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter imageField.UpdateUnsafe(imageValue) } if s.IconStyle.Scale != 0 { - scaleValue, ok := property.ValueTypeNumber.ValueFrom(s.IconStyle.Scale) - if !ok { + scaleValue := property.ValueTypeNumber.ValueFrom(s.IconStyle.Scale) + if scaleValue == nil { return nil, ErrFieldType } scaleField, _, _, _ := p.GetOrCreateField( @@ -157,8 +157,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter scaleField.UpdateUnsafe(scaleValue) } if len(s.IconStyle.Color) > 0 { - colorValue, ok := property.ValueTypeString.ValueFrom(s.IconStyle.Color) - if !ok { + colorValue := property.ValueTypeString.ValueFrom(s.IconStyle.Color) + if colorValue == nil { return nil, ErrFieldType } colorField, _, _, _ := p.GetOrCreateField( @@ -173,8 +173,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter return nil, ErrFieldType } if len(s) > 0 { - colorValue, ok := property.ValueTypeString.ValueFrom(s) - if !ok { + colorValue := property.ValueTypeString.ValueFrom(s) + if colorValue == nil { return nil, ErrFieldType } colorField, _, _, _ := p.GetOrCreateField( @@ -189,8 +189,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter return nil, ErrFieldType } if len(s.Color) > 0 { - colorValue, ok := property.ValueTypeString.ValueFrom(s.Color) - if !ok { + colorValue := property.ValueTypeString.ValueFrom(s.Color) + if colorValue == nil { return nil, ErrFieldType } colorField, _, _, _ := p.GetOrCreateField( @@ -200,8 +200,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter colorField.UpdateUnsafe(colorValue) } if s.PixelSize != 0 { - sizeValue, ok := property.ValueTypeNumber.ValueFrom(s.PixelSize) - if !ok { + sizeValue := property.ValueTypeNumber.ValueFrom(s.PixelSize) + if sizeValue == nil { return nil, ErrFieldType } sizeField, _, _, _ := p.GetOrCreateField( @@ -213,8 +213,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter } } case "Polygon": - v2, ok := property.ValueTypePolygon.ValueFrom(v) - if !ok { + v2 := property.ValueTypePolygon.ValueFrom(v) + if v2 == nil { return nil, ErrFieldType } f.UpdateUnsafe(v2) @@ -226,8 +226,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter return nil, ErrFieldType } if s.PolyStyle.Stroke { - stroke, ok := property.ValueTypeBool.ValueFrom(s.PolyStyle.Stroke) - if !ok { + stroke := property.ValueTypeBool.ValueFrom(s.PolyStyle.Stroke) + if stroke == nil { return nil, ErrFieldType } strokeField, _, _, _ := p.GetOrCreateField( @@ -237,8 +237,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter strokeField.UpdateUnsafe(stroke) } if s.LineStyle.Width != 0 { - width, ok := property.ValueTypeNumber.ValueFrom(s.LineStyle.Width) - if !ok { + width := property.ValueTypeNumber.ValueFrom(s.LineStyle.Width) + if width == nil { return nil, ErrFieldType } widthField, _, _, _ := p.GetOrCreateField( @@ -248,8 +248,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter widthField.UpdateUnsafe(width) } if len(s.LineStyle.Color) > 0 { - color, ok := property.ValueTypeString.ValueFrom(s.LineStyle.Color) - if !ok { + color := property.ValueTypeString.ValueFrom(s.LineStyle.Color) + if color == nil { return nil, ErrFieldType } colorField, _, _, _ := p.GetOrCreateField( @@ -259,8 +259,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter colorField.UpdateUnsafe(color) } if s.PolyStyle.Fill { - fill, ok := property.ValueTypeBool.ValueFrom(s.PolyStyle.Fill) - if !ok { + fill := property.ValueTypeBool.ValueFrom(s.PolyStyle.Fill) + if fill == nil { return nil, ErrFieldType } fillField, _, _, _ := p.GetOrCreateField( @@ -270,8 +270,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter fillField.UpdateUnsafe(fill) } if len(s.PolyStyle.Color) > 0 { - color, ok := property.ValueTypeString.ValueFrom(s.PolyStyle.Color) - if !ok { + color := property.ValueTypeString.ValueFrom(s.PolyStyle.Color) + if color == nil { return nil, ErrFieldType } colorField, _, _, _ := p.GetOrCreateField( @@ -287,8 +287,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter return nil, ErrFieldType } if s.Stroke { - stroke, ok := property.ValueTypeBool.ValueFrom(s.Stroke) - if !ok { + stroke := property.ValueTypeBool.ValueFrom(s.Stroke) + if stroke == nil { return nil, ErrFieldType } strokeField, _, _, _ := p.GetOrCreateField( @@ -298,8 +298,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter strokeField.UpdateUnsafe(stroke) } if s.StrokeWidth != 0 { - width, ok := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) - if !ok { + width := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) + if width == nil { return nil, ErrFieldType } widthField, _, _, _ := p.GetOrCreateField( @@ -323,8 +323,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter return nil, err } } - color, ok := property.ValueTypeString.ValueFrom(colorValue) - if !ok { + color := property.ValueTypeString.ValueFrom(colorValue) + if color == nil { return nil, ErrFieldType } colorField, _, _, _ := p.GetOrCreateField( @@ -334,8 +334,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter colorField.UpdateUnsafe(color) } if s.Fill { - fill, ok := property.ValueTypeBool.ValueFrom(s.Fill) - if !ok { + fill := property.ValueTypeBool.ValueFrom(s.Fill) + if fill == nil { return nil, ErrFieldType } fillField, _, _, _ := p.GetOrCreateField( @@ -359,8 +359,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter return nil, err } } - color, ok := property.ValueTypeString.ValueFrom(colorValue) - if !ok { + color := property.ValueTypeString.ValueFrom(colorValue) + if color == nil { return nil, ErrFieldType } colorField, _, _, _ := p.GetOrCreateField( @@ -376,8 +376,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter } if s.StrokeWidth > 0 { - width, ok := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) - if !ok { + width := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) + if width == nil { return nil, ErrFieldType } widthField, _, _, _ := p.GetOrCreateField( @@ -388,8 +388,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter } if len(s.FillColor) > 0 { - fill, ok := property.ValueTypeString.ValueFrom(s.FillColor) - if !ok { + fill := property.ValueTypeString.ValueFrom(s.FillColor) + if fill == nil { return nil, ErrFieldType } fillField, _, _, _ := p.GetOrCreateField( @@ -400,8 +400,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter } if len(s.StrokeColor) > 0 { - color, ok := property.ValueTypeString.ValueFrom(s.StrokeColor) - if !ok { + color := property.ValueTypeString.ValueFrom(s.StrokeColor) + if color == nil { return nil, ErrFieldType } colorField, _, _, _ := p.GetOrCreateField( @@ -413,8 +413,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter } } case "Polyline": - v2, ok := property.ValueTypeCoordinates.ValueFrom(v) - if !ok { + v2 := property.ValueTypeCoordinates.ValueFrom(v) + if v2 == nil { return nil, ErrFieldType } f.UpdateUnsafe(v2) @@ -427,8 +427,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter } if len(s.LineStyle.Color) > 0 { - color, ok := property.ValueTypeString.ValueFrom(s.LineStyle.Color) - if !ok { + color := property.ValueTypeString.ValueFrom(s.LineStyle.Color) + if color == nil { return nil, ErrFieldType } colorField, _, _, _ := p.GetOrCreateField( @@ -439,8 +439,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter } if s.LineStyle.Width != 0 { - width, ok := property.ValueTypeNumber.ValueFrom(s.LineStyle.Width) - if !ok { + width := property.ValueTypeNumber.ValueFrom(s.LineStyle.Width) + if width == nil { return nil, ErrFieldType } widthField, _, _, _ := p.GetOrCreateField( @@ -456,8 +456,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter } if s.Width != 0 { - width, ok := property.ValueTypeNumber.ValueFrom(s.Width) - if !ok { + width := property.ValueTypeNumber.ValueFrom(s.Width) + if width == nil { return nil, ErrFieldType } widthField, _, _, _ := p.GetOrCreateField( @@ -485,8 +485,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter } } - color, ok := property.ValueTypeString.ValueFrom(colorValue) - if !ok { + color := property.ValueTypeString.ValueFrom(colorValue) + if color == nil { return nil, ErrFieldType } @@ -503,8 +503,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter } if s.StrokeWidth > 0 { - width, ok := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) - if !ok { + width := property.ValueTypeNumber.ValueFrom(s.StrokeWidth) + if width == nil { return nil, ErrFieldType } widthField, _, _, _ := p.GetOrCreateField( @@ -515,8 +515,8 @@ func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem inter } if len(s.StrokeColor) > 0 { - color, ok := property.ValueTypeString.ValueFrom(s.StrokeColor) - if !ok { + color := property.ValueTypeString.ValueFrom(s.StrokeColor) + if color == nil { return nil, ErrFieldType } colorField, _, _, _ := p.GetOrCreateField( diff --git a/pkg/layer/decoding/reearth.go b/pkg/layer/decoding/reearth.go index 96e8fcbdf..361b0bcd5 100644 --- a/pkg/layer/decoding/reearth.go +++ b/pkg/layer/decoding/reearth.go @@ -275,13 +275,8 @@ func (f *ReearthPropertyField) propertyField(key id.PropertySchemaFieldID) *prop return nil } - vt, ok := property.ValueTypeFrom(f.Type) - if !ok { - return nil - } - - v, ok := vt.ValueFrom(f.Value) - if !ok { + v := property.ValueType(f.Type).ValueFrom(f.Value) + if v == nil { return nil } @@ -299,7 +294,7 @@ func (f *ReearthPropertyField) propertyField(key id.PropertySchemaFieldID) *prop return &property.InitializerField{ Field: key, - Type: vt, + Type: v.Type(), Value: v, Links: links, } diff --git a/pkg/layer/decoding/reearth_test.go b/pkg/layer/decoding/reearth_test.go index 8ddf35cc6..29b781512 100644 --- a/pkg/layer/decoding/reearth_test.go +++ b/pkg/layer/decoding/reearth_test.go @@ -139,7 +139,7 @@ func TestReearthDecoder_Decode(t *testing.T) { { Field: id.PropertySchemaFieldID("latlng"), Type: property.ValueTypeLatLng, - Value: property.ValueTypeLatLng.MustBeValue(property.LatLng{Lat: 1, Lng: 2}), + Value: property.ValueTypeLatLng.ValueFrom(property.LatLng{Lat: 1, Lng: 2}), }, }, }, @@ -188,7 +188,7 @@ func TestReearthDecoder_Decode(t *testing.T) { { Field: id.PropertySchemaFieldID("foobar"), Type: property.ValueTypeString, - Value: property.ValueTypeString.MustBeValue("bar"), + Value: property.ValueTypeString.ValueFrom("bar"), }, }, }, @@ -198,7 +198,7 @@ func TestReearthDecoder_Decode(t *testing.T) { { Field: id.PropertySchemaFieldID("foobar"), Type: property.ValueTypeString, - Value: property.ValueTypeString.MustBeValue("foo"), + Value: property.ValueTypeString.ValueFrom("foo"), }, }, }, diff --git a/pkg/layer/encoding/common.go b/pkg/layer/encoding/common.go index 632754126..292b6f4c0 100644 --- a/pkg/layer/encoding/common.go +++ b/pkg/layer/encoding/common.go @@ -1,7 +1,6 @@ package encoding import ( - "errors" "image/color" "strconv" "strings" @@ -9,9 +8,11 @@ import ( "gopkg.in/go-playground/colors.v1" ) -var ErrInvalidColor = errors.New("invalid color") +func getColor(str string) *color.RGBA { + if len(str) == 0 { + return nil + } -func getColor(str string) (*color.RGBA, error) { cs := str a := "" @@ -27,7 +28,7 @@ func getColor(str string) (*color.RGBA, error) { b, err := colors.Parse(cs) if err != nil || b == nil { - return nil, ErrInvalidColor + return nil } c := b.ToRGBA() @@ -39,5 +40,5 @@ func getColor(str string) (*color.RGBA, error) { alpha = uint8(c.A * 255) } - return &color.RGBA{R: c.R, G: c.G, B: c.B, A: alpha}, nil + return &color.RGBA{R: c.R, G: c.G, B: c.B, A: alpha} } diff --git a/pkg/layer/encoding/common_test.go b/pkg/layer/encoding/common_test.go index 99da072f8..cf356e261 100644 --- a/pkg/layer/encoding/common_test.go +++ b/pkg/layer/encoding/common_test.go @@ -8,19 +8,8 @@ import ( ) func TestGetColor(t *testing.T) { - c, err := getColor("#ffffff") - assert.NoError(t, err) - assert.Equal(t, &color.RGBA{R: 255, G: 255, B: 255, A: 255}, c) - - c, err = getColor("#fff") - assert.NoError(t, err) - assert.Equal(t, &color.RGBA{R: 255, G: 255, B: 255, A: 255}, c) - - c, err = getColor("#fffa") - assert.NoError(t, err) - assert.Equal(t, &color.RGBA{R: 255, G: 255, B: 255, A: 170}, c) - - c, err = getColor("#ff0000aa") - assert.NoError(t, err) - assert.Equal(t, &color.RGBA{R: 255, G: 0, B: 0, A: 170}, c) + assert.Equal(t, &color.RGBA{R: 255, G: 255, B: 255, A: 255}, getColor("#ffffff")) + assert.Equal(t, &color.RGBA{R: 255, G: 255, B: 255, A: 255}, getColor("#fff")) + assert.Equal(t, &color.RGBA{R: 255, G: 255, B: 255, A: 170}, getColor("#fffa")) + assert.Equal(t, &color.RGBA{R: 255, G: 0, B: 0, A: 170}, getColor("#ff0000aa")) } diff --git a/pkg/layer/encoding/czml.go b/pkg/layer/encoding/czml.go index 4e386761d..5f616c974 100644 --- a/pkg/layer/encoding/czml.go +++ b/pkg/layer/encoding/czml.go @@ -8,7 +8,6 @@ import ( "github.com/reearth/reearth-backend/pkg/czml" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer/merging" - "github.com/reearth/reearth-backend/pkg/property" ) type CZMLEncoder struct { @@ -21,17 +20,12 @@ func NewCZMLEncoder(w io.Writer) *CZMLEncoder { } } -func (e *CZMLEncoder) stringToCZMLColor(s string) (*czml.Color, error) { - c, err := getColor(s) - if err != nil || c == nil { - if err == nil { - err = ErrInvalidColor - } - return nil, err +func (e *CZMLEncoder) stringToCZMLColor(s string) *czml.Color { + c := getColor(s) + if c == nil { + return nil } - return &czml.Color{ - RGBA: []int64{int64(c.R), int64(c.G), int64(c.B), int64(c.A)}, - }, nil + return &czml.Color{RGBA: []int64{int64(c.R), int64(c.G), int64(c.B), int64(c.A)}} } func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feature, error) { @@ -39,179 +33,105 @@ func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feat return nil, nil } - var ok bool - var err error - var pointSize float64 - var pointColor string feature := czml.Feature{ - Id: "", - Name: "", - Point: nil, + Id: li.Original.String(), + Name: li.Name, } - feature.Name = li.Name + switch li.ExtensionID.String() { case "marker": - latlng := property.LatLng{} - var height float64 - if f := li.Property.Field("location"); f != nil { - latlng, ok = f.PropertyValue.ValueLatLng() - if !ok { - dsll := f.DatasetValue.ValueLatLng() - if dsll != nil { - latlng = property.LatLng{ - Lat: dsll.Lat, - Lng: dsll.Lng, - } - } else { - return nil, errors.New("invalid value type") - } - } - - if f := li.Property.Field("height"); f != nil { - height, ok = f.PropertyValue.ValueNumber() - if !ok { - dsHeight := f.DatasetValue.ValueNumber() - if dsHeight != nil { - height = *dsHeight - } else { - return nil, errors.New("invalid value type") - } - } - position := czml.Position{ - CartographicDegrees: []float64{latlng.Lng, latlng.Lat, height}, - } - feature.Position = &position - } else { - position := czml.Position{ - CartographicDegrees: []float64{latlng.Lng, latlng.Lat}, - } - feature.Position = &position - } + var position czml.Position + point := czml.Point{} + if f := li.Property.Field("location").Value().ValueLatLng(); f != nil { + position = czml.Position{CartographicDegrees: []float64{(*f).Lng, (*f).Lat}} + } else { + return nil, errors.New("invalid value type") } - if f := li.Property.Field("pointColor"); f != nil { - pointColor, ok = f.PropertyValue.ValueString() - if !ok { - return nil, errors.New("invalid value type") - } + + if f := li.Property.Field("height").Value().ValueNumber(); f != nil { + position.CartographicDegrees = append(position.CartographicDegrees, *f) } - if f := li.Property.Field("pointSize"); f != nil { - pointSize, ok = f.PropertyValue.ValueNumber() - if !ok { - return nil, errors.New("invalid value type") - } + + if f := li.Property.Field("pointColor").Value().ValueString(); f != nil { + point.Color = *f } - if pointSize != 0 || len(pointColor) > 0 { - point := czml.Point{ - Color: pointColor, - PixelSize: pointSize, - } - feature.Point = &point + + if f := li.Property.Field("pointSize").Value().ValueNumber(); f != nil { + point.PixelSize = *f } + + feature.Position = &position + feature.Point = &point case "polygon": - var polygon property.Polygon - position := czml.Position{} - var fill, stroke bool - var fillColor, strokeColor *czml.Color - var strokeWidth float64 - if f := li.Property.Field("polygon"); f != nil { - polygon, ok = f.PropertyValue.ValuePolygon() - if !ok { - return nil, errors.New("invalid value type") - } - for _, c := range polygon { - for _, l := range c { - position.CartographicDegrees = append(position.CartographicDegrees, []float64{l.Lng, l.Lat, l.Height}...) - } - } - } - if f := li.Property.Field("fill"); f != nil { - fill, ok = f.PropertyValue.ValueBool() - if !ok { - return nil, errors.New("invalid value type") - } + polygon := czml.Polygon{} + + if f := li.Property.Field("polygon").Value().ValuePolygon(); f != nil && len(*f) > 0 { + // CZML polygon does not support multi inner rings + for _, l := range (*f)[0] { + polygon.Positions.CartographicDegrees = append( + polygon.Positions.CartographicDegrees, + []float64{l.Lng, l.Lat, l.Height}..., + ) + } + } else { + // polygon is required + return nil, errors.New("invalid value type") } - if f := li.Property.Field("stroke"); f != nil { - stroke, ok = f.PropertyValue.ValueBool() - if !ok { - return nil, errors.New("invalid value type") - } + + if f := li.Property.Field("fill").Value().ValueBool(); f != nil { + polygon.Fill = *f } - if f := li.Property.Field("fillColor"); f != nil { - fillStr, ok := f.PropertyValue.ValueString() - if !ok { - return nil, errors.New("invalid value type") - } - fillColor, err = e.stringToCZMLColor(fillStr) - if err != nil { - return nil, err - } + + if f := li.Property.Field("stroke").Value().ValueBool(); f != nil { + polygon.Stroke = *f } - if f := li.Property.Field("strokeColor"); f != nil { - strokeStr, ok := f.PropertyValue.ValueString() - if !ok { - return nil, errors.New("invalid value type") - } - strokeColor, err = e.stringToCZMLColor(strokeStr) - if err != nil { - return nil, err + + if f := li.Property.Field("fillColor").Value().ValueString(); f != nil { + if c := e.stringToCZMLColor(*f); c != nil { + polygon.Material = &czml.Material{SolidColor: &czml.SolidColor{Color: c}} } } - if f := li.Property.Field("strokeWidth"); f != nil { - strokeWidth, ok = f.PropertyValue.ValueNumber() - if !ok { - return nil, errors.New("invalid value type") + + if f := li.Property.Field("strokeColor").Value().ValueString(); f != nil { + if strokeColor := e.stringToCZMLColor(*f); strokeColor != nil { + polygon.StrokeColor = strokeColor } } - polygonCZML := czml.Polygon{ - Positions: position, - Fill: fill, - Material: &czml.Material{SolidColor: &czml.SolidColor{Color: fillColor}}, - Stroke: stroke, - StrokeColor: strokeColor, - StrokeWidth: strokeWidth, + + if f := li.Property.Field("strokeWidth").Value().ValueNumber(); f != nil { + polygon.StrokeWidth = *f } - feature.Polygon = &polygonCZML + + feature.Polygon = &polygon case "polyline": - var polyline property.Coordinates - position := czml.Position{} - var strokeColor *czml.Color - var strokeWidth float64 - if f := li.Property.Field("coordinates"); f != nil { - polyline, ok = f.PropertyValue.ValueCoordinates() - if !ok { - return nil, errors.New("invalid value type") - } - for _, l := range polyline { - position.CartographicDegrees = append(position.CartographicDegrees, []float64{l.Lng, l.Lat, l.Height}...) - } - } + polyline := czml.Polyline{Positions: czml.Position{}} - if f := li.Property.Field("strokeColor"); f != nil { - strokeStr, ok := f.PropertyValue.ValueString() - if !ok { - return nil, errors.New("invalid value type") - } - strokeColor, err = e.stringToCZMLColor(strokeStr) - if err != nil { - return nil, err + if f := li.Property.Field("coordinates").Value().ValueCoordinates(); f != nil { + for _, l := range *f { + polyline.Positions.CartographicDegrees = append( + polyline.Positions.CartographicDegrees, + l.Lng, l.Lat, l.Height, + ) } + } else { + return nil, errors.New("invalid value type") } - if f := li.Property.Field("strokeWidth"); f != nil { - strokeWidth, ok = f.PropertyValue.ValueNumber() - if !ok { - return nil, errors.New("invalid value type") + + if f := li.Property.Field("strokeColor").Value().ValueString(); f != nil { + if strokeColor := e.stringToCZMLColor(*f); strokeColor != nil { + polyline.Material = &czml.Material{ + PolylineOutline: &czml.PolylineOutline{Color: strokeColor}, + } } } - polylineCZML := czml.Polyline{ - Positions: position, - Material: &czml.Material{ - PolylineOutline: &czml.PolylineOutline{Color: strokeColor}, - }, - Width: strokeWidth, + + if f := li.Property.Field("strokeWidth").Value().ValueNumber(); f != nil { + polyline.Width = *f } - feature.Polyline = &polylineCZML + feature.Polyline = &polyline } + return &feature, nil } @@ -248,6 +168,7 @@ func (e *CZMLEncoder) encodeLayerGroup(li *merging.SealedLayerGroup) ([]*czml.Fe func (e *CZMLEncoder) Encode(layer merging.SealedLayer) error { var res []*czml.Feature var err error + if i, ok := layer.(*merging.SealedLayerItem); ok { feature, err := e.encodeSingleLayer(i) if err != nil { @@ -261,9 +182,8 @@ func (e *CZMLEncoder) Encode(layer merging.SealedLayer) error { return err } } - en := json.NewEncoder(e.writer) - err = en.Encode(res) - if err != nil { + + if err := json.NewEncoder(e.writer).Encode(res); err != nil { return err } return nil diff --git a/pkg/layer/encoding/czml_test.go b/pkg/layer/encoding/czml_test.go index 0167ce5bd..805d7ab7a 100644 --- a/pkg/layer/encoding/czml_test.go +++ b/pkg/layer/encoding/czml_test.go @@ -3,7 +3,6 @@ package encoding import ( "bytes" "encoding/json" - "io" "testing" "github.com/reearth/reearth-backend/pkg/czml" @@ -15,503 +14,255 @@ import ( "github.com/stretchr/testify/assert" ) -var _ Encoder = (*GeoJSONEncoder)(nil) +var _ Encoder = (*CZMLEncoder)(nil) -func TestEncodeCZMLPoint(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("marker") - iid := id.MustPropertyItemID(id.New().String()) - v1 := property.LatLng{ - Lat: 4.4, - Lng: 53.4, - } - - f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("location"), - Type: "latlng", - DatasetValue: nil, - PropertyValue: v1.Value(), - } - fl1 := []*property.SealedField{} - fl1 = append(fl1, &f1) - item1 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl1, - Groups: nil, - } - il := []*property.SealedItem{} - il = append(il, &item1) +func TestCZMLEncoder_Encode(t *testing.T) { + lid := id.NewLayerID() + sid := id.NewSceneID() + iid := id.NewPropertyItemID() - v2 := property.ValueTypeString - f2 := property.SealedField{ - ID: id.PropertySchemaFieldID("pointColor"), - Type: "string", - DatasetValue: nil, - PropertyValue: v2.ValueFromUnsafe("#7fff00ff"), - } - fl2 := []*property.SealedField{} - fl2 = append(fl2, &f2) - item2 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl2, - Groups: nil, - } - il = append(il, &item2) - v3 := property.ValueTypeNumber - f3 := property.SealedField{ - ID: id.PropertySchemaFieldID("height"), - Type: "number", - DatasetValue: nil, - PropertyValue: v3.ValueFromUnsafe(34), - } - fl3 := []*property.SealedField{} - fl3 = append(fl3, &f3) - item3 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl3, - Groups: nil, - } - il = append(il, &item3) - v4 := property.ValueTypeNumber - f4 := property.SealedField{ - ID: id.PropertySchemaFieldID("pointSize"), - Type: "number", - DatasetValue: nil, - PropertyValue: v4.ValueFromUnsafe(2.4), - } - fl4 := []*property.SealedField{} - fl4 = append(fl4, &f4) - item4 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl4, - Groups: nil, - } - il = append(il, &item4) - sp := property.Sealed{ - Original: &pid, - Items: il, - } - l := merging.SealedLayerItem{ - SealedLayerCommon: merging.SealedLayerCommon{ - Merged: layer.Merged{ - Original: lid, - Parent: nil, - Name: "test", - Scene: sid, - Property: nil, - Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: &ex, + tests := []struct { + name string + target merging.SealedLayer + want []*czml.Feature + }{ + { + name: "marker", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Name: "test", + Scene: sid, + PluginID: &id.OfficialPluginID, + ExtensionID: id.PluginExtensionID("marker").Ref(), + }, + Property: &property.Sealed{ + Original: id.NewPropertyID().Ref(), + Items: []*property.SealedItem{ + { + Original: &iid, + SchemaGroup: id.PropertySchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: id.PropertySchemaFieldID("location"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeLatLng, + nil, + property.ValueTypeLatLng.ValueFrom(property.LatLng{Lat: 4.4, Lng: 53.4}), + ), + }, + { + ID: id.PropertySchemaFieldID("height"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(34), + ), + }, + { + ID: id.PropertySchemaFieldID("pointColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#7fff00ff"), + ), + }, + { + ID: id.PropertySchemaFieldID("pointSize"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(2.4), + ), + }, + }, + }, + }, + }, + }, + }, + want: []*czml.Feature{ + { + Id: lid.String(), + Name: "test", + Position: &czml.Position{CartographicDegrees: []float64{53.4, 4.4, 34}}, + Point: &czml.Point{ + Color: "#7fff00ff", + PixelSize: float64(2.4), + }, + }, }, - Property: &sp, - Infobox: nil, - }} - - reader, writer := io.Pipe() - en := NewCZMLEncoder(writer) - var err error - go func() { - defer func() { - _ = writer.Close() - }() - err = en.Encode(&l) - assert.NoError(t, err) - }() - - colorStr, _ := f2.PropertyValue.ValueString() - height, _ := f3.PropertyValue.ValueNumber() - size, _ := f4.PropertyValue.ValueNumber() - expected := []*czml.Feature{} - exPos := czml.Position{CartographicDegrees: []float64{v1.Lng, v1.Lat, height}} - exPoint := czml.Point{ - Color: colorStr, - PixelSize: size, - } - exValue := czml.Feature{ - Id: "", - Name: "test", - Position: &exPos, - Point: &exPoint, - } - expected = append(expected, &exValue) - reader2, writer2 := io.Pipe() - exEn := json.NewEncoder(writer2) - go func() { - defer func() { - _ = writer2.Close() - }() - err = exEn.Encode(expected) - assert.NoError(t, err) - }() - - buf := new(bytes.Buffer) - _, err = buf.ReadFrom(reader) - assert.NoError(t, err) - s := buf.String() - buf2 := new(bytes.Buffer) - _, err = buf2.ReadFrom(reader2) - assert.NoError(t, err) - s2 := buf2.String() - assert.Equal(t, s2, s) -} - -func TestEncodeCZMLPolygon(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("polygon") - iid := id.MustPropertyItemID(id.New().String()) - vc := property.Coordinates{ - property.LatLngHeight{ - Lat: 3.4, - Lng: 5.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 45.4, - Lng: 2.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 34.66, - Lng: 654.34, - Height: 100, }, - } - v1 := property.Polygon{vc} - f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("polygon"), - Type: "polygon", - DatasetValue: nil, - PropertyValue: v1.Value(), - } - fl1 := []*property.SealedField{} - fl1 = append(fl1, &f1) - item1 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl1, - Groups: nil, - } - il := []*property.SealedItem{} - il = append(il, &item1) - v2 := property.ValueTypeBool - f2 := property.SealedField{ - ID: id.PropertySchemaFieldID("fill"), - Type: "bool", - DatasetValue: nil, - PropertyValue: v2.ValueFromUnsafe(true), - } - fl2 := []*property.SealedField{} - fl2 = append(fl2, &f2) - item2 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl2, - Groups: nil, - } - il = append(il, &item2) - v3 := property.ValueTypeString - f3 := property.SealedField{ - ID: id.PropertySchemaFieldID("fillColor"), - Type: "string", - DatasetValue: nil, - PropertyValue: v3.ValueFromUnsafe("#ff000000"), - } - fl3 := []*property.SealedField{} - fl3 = append(fl3, &f3) - item3 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl3, - Groups: nil, - } - il = append(il, &item3) - v4 := property.ValueTypeBool - f4 := property.SealedField{ - ID: id.PropertySchemaFieldID("stroke"), - Type: "bool", - DatasetValue: nil, - PropertyValue: v4.ValueFromUnsafe(true), - } - fl4 := []*property.SealedField{} - fl4 = append(fl4, &f4) - item4 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl4, - Groups: nil, - } - il = append(il, &item4) - v5 := property.ValueTypeString - f5 := property.SealedField{ - ID: id.PropertySchemaFieldID("strokeColor"), - Type: "string", - DatasetValue: nil, - PropertyValue: v5.ValueFromUnsafe("#ff554555"), - } - fl5 := []*property.SealedField{} - fl5 = append(fl5, &f5) - item5 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl5, - Groups: nil, - } - il = append(il, &item5) - v6 := property.ValueTypeNumber - f6 := property.SealedField{ - ID: id.PropertySchemaFieldID("strokeWidth"), - Type: "number", - DatasetValue: nil, - PropertyValue: v6.ValueFromUnsafe(3), - } - fl6 := []*property.SealedField{} - fl6 = append(fl6, &f6) - item6 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl6, - Groups: nil, - } - il = append(il, &item6) - sp := property.Sealed{ - Original: &pid, - Items: il, - } - l := merging.SealedLayerItem{ - SealedLayerCommon: merging.SealedLayerCommon{ - Merged: layer.Merged{ - Original: lid, - Parent: nil, - Name: "test", - Scene: sid, - Property: nil, - Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: &ex, + { + name: "polygon", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Name: "test", + Scene: sid, + PluginID: &id.OfficialPluginID, + ExtensionID: id.PluginExtensionID("polygon").Ref(), + }, + Property: &property.Sealed{ + Original: id.NewPropertyID().Ref(), + Items: []*property.SealedItem{ + { + Original: &iid, + SchemaGroup: id.PropertySchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: id.PropertySchemaFieldID("polygon"), + Val: property.NewValueAndDatasetValue( + property.ValueTypePolygon, + nil, + property.ValueTypePolygon.ValueFrom( + property.Polygon{property.Coordinates{ + {Lat: 3.4, Lng: 5.34, Height: 100}, + {Lat: 45.4, Lng: 2.34, Height: 100}, + {Lat: 34.66, Lng: 654.34, Height: 100}, + }}, + ), + ), + }, + { + ID: id.PropertySchemaFieldID("fill"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeBool, + nil, + property.ValueTypeBool.ValueFrom(true), + ), + }, + { + ID: id.PropertySchemaFieldID("fillColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff000000"), + ), + }, + { + ID: id.PropertySchemaFieldID("stroke"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeBool, + nil, + property.ValueTypeBool.ValueFrom(true), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff554555"), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + }, + }, + }, + }, + }, + }, + want: []*czml.Feature{ + { + Id: lid.String(), + Name: "test", + Polygon: &czml.Polygon{ + Positions: czml.Position{CartographicDegrees: []float64{5.34, 3.4, 100, 2.34, 45.4, 100, 654.34, 34.66, 100}}, + Fill: true, + Material: &czml.Material{ + SolidColor: &czml.SolidColor{Color: &czml.Color{RGBA: []int64{255, 0, 0, 0}}}, + }, + Stroke: true, + StrokeColor: &czml.Color{RGBA: []int64{255, 85, 69, 85}}, + StrokeWidth: 3, + }, + }, }, - Property: &sp, - Infobox: nil, - }} - - reader, writer := io.Pipe() - en := NewCZMLEncoder(writer) - var err error - go func() { - defer func() { - _ = writer.Close() - }() - err = en.Encode(&l) - assert.NoError(t, err) - }() - - expected := []*czml.Feature{} - exPos := czml.Position{CartographicDegrees: []float64{5.34, 3.4, 100, 2.34, 45.4, 100, 654.34, 34.66, 100}} - exPoint := czml.Polygon{ - Positions: exPos, - Fill: true, - Material: &czml.Material{SolidColor: &czml.SolidColor{Color: &czml.Color{ - RGBA: []int64{255, 0, 0, 0}, - }}}, - Stroke: true, - StrokeColor: &czml.Color{ - RGBA: []int64{255, 85, 69, 85}, }, - StrokeWidth: 3, - } - exValue := czml.Feature{ - Id: "", - Name: "test", - Polygon: &exPoint, - } - expected = append(expected, &exValue) - reader2, writer2 := io.Pipe() - exEn := json.NewEncoder(writer2) - go func() { - defer func() { - _ = writer2.Close() - }() - err = exEn.Encode(expected) - assert.NoError(t, err) - }() - - assert.NoError(t, err) - buf := new(bytes.Buffer) - _, err = buf.ReadFrom(reader) - assert.NoError(t, err) - s := buf.String() - buf2 := new(bytes.Buffer) - _, err = buf2.ReadFrom(reader2) - assert.NoError(t, err) - s2 := buf2.String() - assert.Equal(t, s2, s) -} - -func TestEncodeCZMLPolyline(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("polyline") - iid := id.MustPropertyItemID(id.New().String()) - v1 := property.Coordinates{ - property.LatLngHeight{ - Lat: 3.4, - Lng: 5.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 45.4, - Lng: 2.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 34.66, - Lng: 654.34, - Height: 100, + { + name: "polyline", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Name: "test", + Scene: sid, + PluginID: &id.OfficialPluginID, + ExtensionID: id.PluginExtensionID("polyline").Ref(), + }, + Property: &property.Sealed{ + Original: id.NewPropertyID().Ref(), + Items: []*property.SealedItem{ + { + Original: &iid, + SchemaGroup: id.PropertySchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: id.PropertySchemaFieldID("coordinates"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeCoordinates, + nil, + property.ValueTypeCoordinates.ValueFrom(property.Coordinates{ + {Lat: 3.4, Lng: 5.34, Height: 100}, + {Lat: 45.4, Lng: 2.34, Height: 100}, + {Lat: 34.66, Lng: 654.34, Height: 100}, + }), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff224222"), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + }, + }, + }, + }, + }, + }, + want: []*czml.Feature{ + { + Id: lid.String(), + Name: "test", + Polyline: &czml.Polyline{ + Positions: czml.Position{CartographicDegrees: []float64{5.34, 3.4, 100, 2.34, 45.4, 100, 654.34, 34.66, 100}}, + Material: &czml.Material{PolylineOutline: &czml.PolylineOutline{ + Color: &czml.Color{RGBA: []int64{255, 34, 66, 34}}, + }}, + Width: 3, + }, + }, + }, }, } - f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("coordinates"), - Type: "coordinates", - DatasetValue: nil, - PropertyValue: v1.Value(), - } - fl1 := []*property.SealedField{} - fl1 = append(fl1, &f1) - item1 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl1, - Groups: nil, - } - il := []*property.SealedItem{} - il = append(il, &item1) - v2 := property.ValueTypeString - f2 := property.SealedField{ - ID: id.PropertySchemaFieldID("strokeColor"), - Type: "string", - DatasetValue: nil, - PropertyValue: v2.ValueFromUnsafe("#ff224222"), - } - fl2 := []*property.SealedField{} - fl2 = append(fl2, &f2) - item2 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl2, - Groups: nil, - } - il = append(il, &item2) - v3 := property.ValueTypeNumber - f3 := property.SealedField{ - ID: id.PropertySchemaFieldID("strokeWidth"), - Type: "number", - DatasetValue: nil, - PropertyValue: v3.ValueFromUnsafe(3), - } - fl3 := []*property.SealedField{} - fl3 = append(fl3, &f3) - item3 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl3, - Groups: nil, - } - il = append(il, &item3) - sp := property.Sealed{ - Original: &pid, - Items: il, + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + expected, _ := json.Marshal(tt.want) + writer := bytes.Buffer{} + assert.NoError(t, NewCZMLEncoder(&writer).Encode(tt.target)) + assert.Equal(t, string(expected)+"\n", writer.String()) + }) } - l := merging.SealedLayerItem{ - SealedLayerCommon: merging.SealedLayerCommon{ - Merged: layer.Merged{ - Original: lid, - Parent: nil, - Name: "test", - Scene: sid, - Property: nil, - Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: &ex, - }, - Property: &sp, - Infobox: nil, - }} - - reader, writer := io.Pipe() - en := NewCZMLEncoder(writer) - var err error - go func() { - defer func() { - _ = writer.Close() - }() - err = en.Encode(&l) - assert.NoError(t, err) - }() - - expected := []*czml.Feature{} - exPos := czml.Position{CartographicDegrees: []float64{5.34, 3.4, 100, 2.34, 45.4, 100, 654.34, 34.66, 100}} - exPolyline := czml.Polyline{ - Positions: exPos, - Material: &czml.Material{PolylineOutline: &czml.PolylineOutline{Color: &czml.Color{ - RGBA: []int64{255, 34, 66, 34}, - }}}, - Width: 3, - } - exValue := czml.Feature{ - Id: "", - Name: "test", - Polyline: &exPolyline, - } - expected = append(expected, &exValue) - reader2, writer2 := io.Pipe() - exEn := json.NewEncoder(writer2) - go func() { - defer func() { - _ = writer2.Close() - }() - err = exEn.Encode(expected) - assert.NoError(t, err) - }() - - assert.NoError(t, err) - buf := new(bytes.Buffer) - _, err = buf.ReadFrom(reader) - assert.NoError(t, err) - s := buf.String() - buf2 := new(bytes.Buffer) - _, err = buf2.ReadFrom(reader2) - assert.NoError(t, err) - s2 := buf2.String() - assert.Equal(t, s2, s) } diff --git a/pkg/layer/encoding/geojson.go b/pkg/layer/encoding/geojson.go index eff43c531..b65d4e70c 100644 --- a/pkg/layer/encoding/geojson.go +++ b/pkg/layer/encoding/geojson.go @@ -40,126 +40,67 @@ func (e *GeoJSONEncoder) coordsToFloat(c property.Coordinates) [][]float64 { } func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojson.Feature, error) { - if li.PluginID == nil || !id.OfficialPluginID.Equal(*li.PluginID) { + if li == nil || li.PluginID == nil || !id.OfficialPluginID.Equal(*li.PluginID) { return nil, nil } - var ok bool - var geo *geojson.Geometry var res *geojson.Feature + switch li.ExtensionID.String() { case "marker": - latlng := property.LatLng{} - var height float64 - if f := li.Property.Field("location"); f != nil { - latlng, ok = f.PropertyValue.ValueLatLng() - if !ok { - dsll := f.DatasetValue.ValueLatLng() - if dsll != nil { - latlng = property.LatLng{ - Lat: dsll.Lat, - Lng: dsll.Lng, - } - } else { - return nil, errors.New("invalid value type") - } - } - if f := li.Property.Field("height"); f != nil { - height, ok = f.PropertyValue.ValueNumber() - if !ok { - dsHeight := f.DatasetValue.ValueNumber() - if dsHeight != nil { - height = *dsHeight - } else { - return nil, errors.New("invalid value type") - } - } - geo = geojson.NewPointGeometry([]float64{latlng.Lng, latlng.Lat, height}) - } else { - geo = geojson.NewPointGeometry([]float64{latlng.Lng, latlng.Lat}) - } - res = geojson.NewFeature(geo) - res.SetProperty("name", li.Name) + var coords []float64 + + if f := li.Property.Field("location").Value().ValueLatLng(); f != nil { + coords = []float64{(*f).Lng, (*f).Lat} + } else { + return nil, errors.New("invalid value type") } - if f := li.Property.Field("pointColor"); f != nil { - pointColor, ok := f.PropertyValue.ValueString() - if !ok { - return nil, errors.New("invalid value type") - } - if res != nil { - res.SetProperty("marker-color", pointColor) - } + + if height := li.Property.Field("height").Value().ValueNumber(); height != nil { + coords = append(coords, *height) } - case "polygon": - var polygon property.Polygon - if f := li.Property.Field("polygon"); f != nil { - polygon, ok = f.PropertyValue.ValuePolygon() - if !ok { - return nil, errors.New("invalid value type") - } - fl := e.polygonToFloat(polygon) - geo = geojson.NewPolygonGeometry(fl) - res = geojson.NewFeature(geo) - res.SetProperty("name", li.Name) + res = geojson.NewFeature(geojson.NewPointGeometry(coords)) + + if f := li.Property.Field("pointColor").Value().ValueString(); f != nil { + res.SetProperty("marker-color", *f) } - if f := li.Property.Field("fillColor"); f != nil { - fillColor, ok := f.PropertyValue.ValueString() - if !ok { - return nil, errors.New("invalid value type") - } - if res != nil { - res.SetProperty("fill", fillColor) - } + case "polygon": + if f := li.Property.Field("polygon").Value().ValuePolygon(); f != nil { + res = geojson.NewFeature(geojson.NewPolygonGeometry(e.polygonToFloat(*f))) + } else { + return nil, errors.New("invalid value type") } - if f := li.Property.Field("strokeColor"); f != nil { - strokeColor, ok := f.PropertyValue.ValueString() - if !ok { - return nil, errors.New("invalid value type") - } - if res != nil { - res.SetProperty("stroke", strokeColor) - } + + if f := li.Property.Field("fillColor").Value().ValueString(); f != nil { + res.SetProperty("fill", *f) } - if f := li.Property.Field("strokeWidth"); f != nil { - strokeWidth, ok := f.PropertyValue.ValueNumber() - if !ok { - return nil, errors.New("invalid value type") - } - if res != nil { - res.SetProperty("stroke-width", strokeWidth) - } + + if f := li.Property.Field("strokeColor").Value().ValueString(); f != nil { + res.SetProperty("stroke", *f) + } + + if f := li.Property.Field("strokeWidth").Value().ValueNumber(); f != nil { + res.SetProperty("stroke-width", *f) } case "polyline": - var polyline property.Coordinates - if f := li.Property.Field("coordinates"); f != nil { - polyline, ok = f.PropertyValue.ValueCoordinates() - if !ok { - return nil, errors.New("invalid value type") - } - fl := e.coordsToFloat(polyline) - geo = geojson.NewLineStringGeometry(fl) - res = geojson.NewFeature(geo) - res.SetProperty("name", li.Name) - } - if f := li.Property.Field("strokeColor"); f != nil { - strokeColor, ok := f.PropertyValue.ValueString() - if !ok { - return nil, errors.New("invalid value type") - } - if res != nil { - res.SetProperty("stroke", strokeColor) - } + if f := li.Property.Field("coordinates").Value().ValueCoordinates(); f != nil { + res = geojson.NewFeature(geojson.NewLineStringGeometry(e.coordsToFloat(*f))) + } else { + return nil, errors.New("invalid value type") } - if f := li.Property.Field("strokeWidth"); f != nil { - strokeWidth, ok := f.PropertyValue.ValueNumber() - if !ok { - return nil, errors.New("invalid value type") - } - if res != nil { - res.SetProperty("stroke-width", strokeWidth) - } + + if f := li.Property.Field("strokeColor").Value().ValueString(); f != nil { + res.SetProperty("stroke", *f) } + + if f := li.Property.Field("strokeWidth").Value().ValueNumber(); f != nil { + res.SetProperty("stroke-width", *f) + } + } + + if res != nil { + res.SetProperty("name", li.Name) } return res, nil } diff --git a/pkg/layer/encoding/geojson_test.go b/pkg/layer/encoding/geojson_test.go index e105a85cf..04095a28d 100644 --- a/pkg/layer/encoding/geojson_test.go +++ b/pkg/layer/encoding/geojson_test.go @@ -2,7 +2,6 @@ package encoding import ( "bytes" - "io" "testing" geojson "github.com/paulmach/go.geojson" @@ -15,416 +14,212 @@ import ( var _ Encoder = (*GeoJSONEncoder)(nil) -func TestPointEncodeGeoJSON(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("marker") - iid := id.MustPropertyItemID(id.New().String()) - v1 := property.LatLng{ - Lat: 4.4, - Lng: 53.4, - } - - f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("location"), - Type: "latlng", - DatasetValue: nil, - PropertyValue: v1.Value(), - } - fl1 := []*property.SealedField{} - fl1 = append(fl1, &f1) - item1 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl1, - Groups: nil, - } - il := []*property.SealedItem{} - il = append(il, &item1) - - v2 := property.ValueTypeString - f2 := property.SealedField{ - ID: id.PropertySchemaFieldID("pointColor"), - Type: "string", - DatasetValue: nil, - PropertyValue: v2.ValueFromUnsafe("#7fff00ff"), - } - fl2 := []*property.SealedField{} - fl2 = append(fl2, &f2) - item2 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl2, - Groups: nil, - } - il = append(il, &item2) - v3 := property.ValueTypeNumber - f3 := property.SealedField{ - ID: id.PropertySchemaFieldID("height"), - Type: "number", - DatasetValue: nil, - PropertyValue: v3.ValueFromUnsafe(34), - } - fl3 := []*property.SealedField{} - fl3 = append(fl3, &f3) - item3 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl3, - Groups: nil, - } - il = append(il, &item3) - sp := property.Sealed{ - Original: &pid, - Items: il, - } - l := merging.SealedLayerItem{ - SealedLayerCommon: merging.SealedLayerCommon{ - Merged: layer.Merged{ - Original: lid, - Parent: nil, - Name: "test", - Scene: sid, - Property: nil, - Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: &ex, +func TestGeoJSONEncoder_Encode(t *testing.T) { + tests := []struct { + name string + target merging.SealedLayer + want func() *geojson.Feature + }{ + { + name: "point", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: id.NewLayerID(), + Scene: id.NewSceneID(), + Name: "test", + PluginID: &id.OfficialPluginID, + ExtensionID: id.PluginExtensionID("marker").Ref(), + }, + Property: &property.Sealed{ + Original: id.NewPropertyID().Ref(), + Items: []*property.SealedItem{ + { + Original: id.NewPropertyItemID().Ref(), + SchemaGroup: id.PropertySchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: id.PropertySchemaFieldID("location"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeLatLng, + nil, + property.ValueTypeLatLng.ValueFrom(property.LatLng{Lat: 4.4, Lng: 53.4}), + ), + }, + { + ID: id.PropertySchemaFieldID("pointColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#7fff00ff"), + ), + }, + { + ID: id.PropertySchemaFieldID("height"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(34), + ), + }, + }, + }, + }, + }, + }, + }, + want: func() *geojson.Feature { + f := geojson.NewFeature(geojson.NewPointGeometry([]float64{53.4, 4.4, 34})) + f.SetProperty("marker-color", "#7fff00ff") + f.SetProperty("name", "test") + return f }, - Property: &sp, - Infobox: nil, - }} - - reader, writer := io.Pipe() - en := NewGeoJSONEncoder(writer) - var err error - go func() { - defer func() { - _ = writer.Close() - }() - err = en.Encode(&l) - assert.NoError(t, err) - }() - - colorStr, _ := f2.PropertyValue.ValueString() - height, _ := f3.PropertyValue.ValueNumber() - expected := geojson.NewFeature(geojson.NewPointGeometry([]float64{v1.Lng, v1.Lat, height})) - expected.SetProperty("marker-color", colorStr) - expected.SetProperty("name", l.Name) - reader2, writer2 := io.Pipe() - var data []byte - data, err = expected.MarshalJSON() - go func() { - defer func() { - _ = writer2.Close() - }() - _, err = writer2.Write(data) - assert.NoError(t, err) - }() - - buf := new(bytes.Buffer) - _, err = buf.ReadFrom(reader) - assert.NoError(t, err) - s := buf.String() - buf2 := new(bytes.Buffer) - _, err = buf2.ReadFrom(reader2) - assert.NoError(t, err) - s2 := buf2.String() - assert.Equal(t, s2, s) -} - -func TestPolygonEncodeGeoJSON(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("polygon") - iid := id.MustPropertyItemID(id.New().String()) - vc := property.Coordinates{ - property.LatLngHeight{ - Lat: 3.4, - Lng: 5.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 45.4, - Lng: 2.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 34.66, - Lng: 654.34, - Height: 100, }, - } - v1 := property.Polygon{vc} - f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("polygon"), - Type: "polygon", - DatasetValue: nil, - PropertyValue: v1.Value(), - } - fl1 := []*property.SealedField{} - fl1 = append(fl1, &f1) - item1 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl1, - Groups: nil, - } - il := []*property.SealedItem{} - il = append(il, &item1) - v2 := property.ValueTypeString - f2 := property.SealedField{ - ID: id.PropertySchemaFieldID("fillColor"), - Type: "string", - DatasetValue: nil, - PropertyValue: v2.ValueFromUnsafe("#7c3b3b"), - } - fl2 := []*property.SealedField{} - fl2 = append(fl2, &f2) - item2 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl2, - Groups: nil, - } - il = append(il, &item2) - v3 := property.ValueTypeString - f3 := property.SealedField{ - ID: id.PropertySchemaFieldID("strokeColor"), - Type: "string", - DatasetValue: nil, - PropertyValue: v3.ValueFromUnsafe("#ff3343"), - } - fl3 := []*property.SealedField{} - fl3 = append(fl3, &f3) - item3 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl3, - Groups: nil, - } - il = append(il, &item3) - v4 := property.ValueTypeNumber - f4 := property.SealedField{ - ID: id.PropertySchemaFieldID("strokeWidth"), - Type: "number", - DatasetValue: nil, - PropertyValue: v4.ValueFromUnsafe(3), - } - fl4 := []*property.SealedField{} - fl4 = append(fl4, &f4) - item4 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl4, - Groups: nil, - } - il = append(il, &item4) - sp := property.Sealed{ - Original: &pid, - Items: il, - } - l := merging.SealedLayerItem{ - SealedLayerCommon: merging.SealedLayerCommon{ - Merged: layer.Merged{ - Original: lid, - Parent: nil, - Name: "test", - Scene: sid, - Property: nil, - Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: &ex, + { + name: "polygon", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: id.NewLayerID(), + Scene: id.NewSceneID(), + Name: "test", + PluginID: &id.OfficialPluginID, + ExtensionID: id.PluginExtensionID("polygon").Ref(), + }, + Property: &property.Sealed{ + Original: id.NewPropertyID().Ref(), + Items: []*property.SealedItem{ + { + Original: id.NewPropertyItemID().Ref(), + SchemaGroup: id.PropertySchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: id.PropertySchemaFieldID("polygon"), + Val: property.NewValueAndDatasetValue( + property.ValueTypePolygon, + nil, + property.ValueTypePolygon.ValueFrom(property.Polygon{property.Coordinates{ + {Lat: 3.4, Lng: 5.34, Height: 100}, + {Lat: 45.4, Lng: 2.34, Height: 100}, + {Lat: 34.66, Lng: 654.34, Height: 100}, + }}), + ), + }, + { + ID: id.PropertySchemaFieldID("fillColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#7c3b3b"), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff3343"), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + }, + }, + }, + }, + }, + }, + want: func() *geojson.Feature { + expected := geojson.NewFeature(geojson.NewPolygonGeometry([][][]float64{{{5.34, 3.4, 100}, {2.34, 45.4, 100}, {654.34, 34.66, 100}}})) + expected.SetProperty("name", "test") + expected.SetProperty("fill", "#7c3b3b") + expected.SetProperty("stroke", "#ff3343") + expected.SetProperty("stroke-width", 3) + return expected + }, + }, + { + name: "polyline", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: id.NewLayerID(), + Scene: id.NewSceneID(), + Name: "test", + PluginID: &id.OfficialPluginID, + ExtensionID: id.PluginExtensionID("polyline").Ref(), + }, + Property: &property.Sealed{ + Original: id.NewPropertyID().Ref(), + Items: []*property.SealedItem{ + { + Original: id.NewPropertyItemID().Ref(), + SchemaGroup: id.PropertySchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: id.PropertySchemaFieldID("coordinates"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeCoordinates, + nil, + property.ValueTypeCoordinates.ValueFrom(property.Coordinates{ + {Lat: 3.4, Lng: 5.34, Height: 100}, + {Lat: 45.4, Lng: 2.34, Height: 100}, + {Lat: 34.66, Lng: 654.34, Height: 100}, + }), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff3343"), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + }, + }, + }, + }, + }, + }, + want: func() *geojson.Feature { + expected := geojson.NewFeature(geojson.NewLineStringGeometry([][]float64{{5.34, 3.4, 100}, {2.34, 45.4, 100}, {654.34, 34.66, 100}})) + expected.SetProperty("name", "test") + expected.SetProperty("stroke", "#ff3343") + expected.SetProperty("stroke-width", 3) + return expected }, - Property: &sp, - Infobox: nil, - }} - - reader, writer := io.Pipe() - en := NewGeoJSONEncoder(writer) - var err error - go func() { - defer func() { - _ = writer.Close() - }() - err = en.Encode(&l) - assert.NoError(t, err) - }() - - fillStr, _ := f2.PropertyValue.ValueString() - strokeStr, _ := f3.PropertyValue.ValueString() - width, _ := f4.PropertyValue.ValueNumber() - expected := geojson.NewFeature(geojson.NewPolygonGeometry([][][]float64{{{5.34, 3.4, 100}, {2.34, 45.4, 100}, {654.34, 34.66, 100}}})) - expected.SetProperty("name", l.Name) - expected.SetProperty("fill", fillStr) - expected.SetProperty("stroke", strokeStr) - expected.SetProperty("stroke-width", width) - reader2, writer2 := io.Pipe() - var data []byte - data, err = expected.MarshalJSON() - go func() { - defer func() { - _ = writer2.Close() - }() - _, err = writer2.Write(data) - }() - assert.NoError(t, err) - - buf := new(bytes.Buffer) - _, err = buf.ReadFrom(reader) - assert.NoError(t, err) - s := buf.String() - buf2 := new(bytes.Buffer) - _, err = buf2.ReadFrom(reader2) - assert.NoError(t, err) - s2 := buf2.String() - assert.Equal(t, s2, s) -} - -func TestPolylineEncodeGeoJSON(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("polyline") - iid := id.MustPropertyItemID(id.New().String()) - v1 := property.Coordinates{ - property.LatLngHeight{ - Lat: 3.4, - Lng: 5.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 45.4, - Lng: 2.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 34.66, - Lng: 654.34, - Height: 100, }, } - f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("coordinates"), - Type: "coordinates", - DatasetValue: nil, - PropertyValue: v1.Value(), - } - fl1 := []*property.SealedField{} - fl1 = append(fl1, &f1) - item1 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl1, - Groups: nil, - } - il := []*property.SealedItem{} - il = append(il, &item1) - v2 := property.ValueTypeString - f2 := property.SealedField{ - ID: id.PropertySchemaFieldID("strokeColor"), - Type: "string", - DatasetValue: nil, - PropertyValue: v2.ValueFromUnsafe("#ff3343"), - } - fl2 := []*property.SealedField{} - fl2 = append(fl2, &f2) - item2 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl2, - Groups: nil, - } - il = append(il, &item2) - v3 := property.ValueTypeNumber - f3 := property.SealedField{ - ID: id.PropertySchemaFieldID("strokeWidth"), - Type: "number", - DatasetValue: nil, - PropertyValue: v3.ValueFromUnsafe(3), - } - fl3 := []*property.SealedField{} - fl3 = append(fl3, &f3) - item3 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl3, - Groups: nil, + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + expected, _ := tt.want().MarshalJSON() + writer := bytes.Buffer{} + assert.NoError(t, NewGeoJSONEncoder(&writer).Encode(tt.target)) + assert.Equal(t, string(expected), writer.String()) + }) } - il = append(il, &item3) - sp := property.Sealed{ - Original: &pid, - Items: il, - } - l := merging.SealedLayerItem{ - SealedLayerCommon: merging.SealedLayerCommon{ - Merged: layer.Merged{ - Original: lid, - Parent: nil, - Name: "test", - Scene: sid, - Property: nil, - Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: &ex, - }, - Property: &sp, - Infobox: nil, - }} - - reader, writer := io.Pipe() - en := NewGeoJSONEncoder(writer) - var err error - go func() { - defer func() { - _ = writer.Close() - }() - err = en.Encode(&l) - assert.NoError(t, err) - }() - - strokeStr, _ := f2.PropertyValue.ValueString() - width, _ := f3.PropertyValue.ValueNumber() - expected := geojson.NewFeature(geojson.NewLineStringGeometry([][]float64{{5.34, 3.4, 100}, {2.34, 45.4, 100}, {654.34, 34.66, 100}})) - expected.SetProperty("name", l.Name) - expected.SetProperty("stroke", strokeStr) - expected.SetProperty("stroke-width", width) - reader2, writer2 := io.Pipe() - var data []byte - data, err = expected.MarshalJSON() - go func() { - defer func() { - _ = writer2.Close() - }() - _, err = writer2.Write(data) - assert.NoError(t, err) - }() - - buf := new(bytes.Buffer) - _, err = buf.ReadFrom(reader) - assert.NoError(t, err) - s := buf.String() - buf2 := new(bytes.Buffer) - _, err = buf2.ReadFrom(reader2) - assert.NoError(t, err) - s2 := buf2.String() - assert.Equal(t, s2, s) } diff --git a/pkg/layer/encoding/kml.go b/pkg/layer/encoding/kml.go index 463dd4e91..750c5cb2e 100644 --- a/pkg/layer/encoding/kml.go +++ b/pkg/layer/encoding/kml.go @@ -2,15 +2,10 @@ package encoding import ( "errors" - "image/color" "io" - "math/rand" - "net/url" - "strings" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer/merging" - "github.com/reearth/reearth-backend/pkg/property" kml "github.com/twpayne/go-kml" ) @@ -26,22 +21,8 @@ func NewKMLEncoder(w io.Writer) *KMLEncoder { } // generates a composite string of layer name and id to be used as style tag id -func (e *KMLEncoder) generateStyleId(id string, name string) (string, error) { - if len(id) > 0 { - subid := id[len(id)-5:] - trimmedName := "" - if len(name) > 0 { - trimmedName = strings.Join(strings.Fields(name), "") + "_" - - } - b := make([]byte, 8) - if _, err := rand.Read(b); err != nil { - return "", err - } - return trimmedName + subid, nil - } - - return "", nil +func generateKMLStyleId(id string) string { + return id + "_style" } func (e *KMLEncoder) getName(str string) *kml.SimpleElement { @@ -49,132 +30,79 @@ func (e *KMLEncoder) getName(str string) *kml.SimpleElement { } // encodes style features and return style kml element and used id -func (e *KMLEncoder) encodePointStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string, error) { - var image *url.URL - var styleId string - var err error - var ok bool - var imageSize float64 - var pointColor color.Color - if f := li.Property.Field("image"); f != nil { - if f.PropertyValue != nil { - image, ok = f.PropertyValue.ValueURL() - if !ok { - return nil, "", errors.New("invalid value type") - } - if len(image.String()) == 0 { - return nil, "", errors.New("empty URL") - } - } +func (e *KMLEncoder) encodePointStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string) { + added := false + styleId := generateKMLStyleId(li.Original.String()) + iconStyle := kml.IconStyle() + + if f := li.Property.Field("image").Value().ValueURL(); f != nil { + iconStyle.Add(kml.Icon(kml.Href(f.String()))) + added = true } - if f := li.Property.Field("imageSize"); f != nil { - imageSize, ok = f.PropertyValue.ValueNumber() - if !ok { - return nil, "", errors.New("invalid value type") - } + + if f := li.Property.Field("imageSize").Value().ValueNumber(); f != nil { + iconStyle.Add(kml.Scale(*f)) + added = true } - if f := li.Property.Field("pointColor"); f != nil { - colorStr, ok := f.PropertyValue.ValueString() - if !ok { - return nil, "", errors.New("invalid value type") - } - pointColor, err = getColor(colorStr) - if err != nil { - return nil, "", err + + if f := li.Property.Field("pointColor").Value().ValueString(); f != nil { + if c := getColor(*f); c != nil { + iconStyle.Add(kml.Color(c)) + added = true } } - styleId, err = e.generateStyleId(li.Original.String(), li.Name) - if err != nil { - return nil, "", err - } - if imageSize != 0 || pointColor != nil || (image != nil && len(image.String()) > 0) { - iconStyle := kml.IconStyle() - if imageSize != 0 { - iconStyle.Add(kml.Scale(imageSize)) - } - if pointColor != nil { - iconStyle.Add(kml.Color(pointColor)) - } - if image != nil { - iconStyle.Add(kml.Icon( - kml.Href(image.String()))) - } - return kml.SharedStyle(styleId, iconStyle), styleId, nil + + if !added { + return nil, "" } - return nil, "", nil + + return kml.SharedStyle(styleId, iconStyle), styleId } -func (e *KMLEncoder) encodePolygonStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string, error) { - var styleId string - var fill, stroke bool - var fillColor, strokeColor color.Color - var strokeWidth float64 - var err error - var ok bool - if f := li.Property.Field("fill"); f != nil { - fill, ok = f.PropertyValue.ValueBool() - if !ok { - return nil, "", errors.New("invalid value type") - } - } - if f := li.Property.Field("stroke"); f != nil { - stroke, ok = f.PropertyValue.ValueBool() - if !ok { - return nil, "", errors.New("invalid value type") - } - } +func (e *KMLEncoder) encodePolygonStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string) { + styleId := generateKMLStyleId(li.Original.String()) - if f := li.Property.Field("fillColor"); f != nil { - colorStr, ok := f.PropertyValue.ValueString() - if !ok { - return nil, "", errors.New("invalid value type") - } - fillColor, err = getColor(colorStr) - if err != nil { - return nil, "", err - } - } - if f := li.Property.Field("strokeColor"); f != nil { - colorStr, ok := f.PropertyValue.ValueString() - if !ok { - return nil, "", errors.New("invalid value type") - } - strokeColor, err = getColor(colorStr) - if err != nil { - return nil, "", err - } - } - if f := li.Property.Field("strokeWidth"); f != nil { - strokeWidth, ok = f.PropertyValue.ValueNumber() - if !ok { - return nil, "", errors.New("invalid value type") + var polyStyle, lineStyle *kml.CompoundElement + + if f := li.Property.Field("fill").Value().ValueBool(); f != nil { + if polyStyle == nil { + polyStyle = kml.PolyStyle() } + polyStyle.Add(kml.Fill(*f)) } - styleId, err = e.generateStyleId(li.Original.String(), li.Name) - if err != nil { - return nil, "", err - } - polyStyle := kml.PolyStyle() - lineStyle := kml.LineStyle() - if fill || fillColor != nil { - if fill { - polyStyle.Add(kml.Fill(fill)) - } - if fillColor != nil { + + if f := li.Property.Field("fillColor").Value().ValueString(); f != nil { + if fillColor := getColor(*f); fillColor != nil { + if polyStyle == nil { + polyStyle = kml.PolyStyle() + } polyStyle.Add(kml.Color(fillColor)) } } - if stroke || strokeColor != nil || strokeWidth != 0 { - if stroke { - lineStyle.Add(kml.Outline(stroke)) + + if f := li.Property.Field("stroke").Value().ValueBool(); f != nil { + if lineStyle == nil { + lineStyle = kml.LineStyle() } - if strokeColor != nil { + lineStyle.Add(kml.Outline(*f)) + } + + if f := li.Property.Field("strokeColor").Value().ValueString(); f != nil { + if strokeColor := getColor(*f); lineStyle != nil { + if lineStyle == nil { + lineStyle = kml.LineStyle() + } lineStyle.Add(kml.Color(strokeColor)) } - if strokeWidth != 0 { - lineStyle.Add(kml.Width(strokeWidth)) + } + + if f := li.Property.Field("strokeWidth").Value().ValueNumber(); f != nil { + if lineStyle == nil { + lineStyle = kml.LineStyle() } + lineStyle.Add(kml.Width(*f)) } + style := kml.SharedStyle(styleId) if polyStyle != nil { style.Add(polyStyle) @@ -182,53 +110,37 @@ func (e *KMLEncoder) encodePolygonStyle(li *merging.SealedLayerItem) (*kml.Share if lineStyle != nil { style.Add(lineStyle) } - return style, styleId, nil + return style, styleId } -func (e *KMLEncoder) encodePolylineStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string, error) { - var styleId string - var strokeColor color.Color - var strokeWidth float64 - var err error - var ok bool - - if f := li.Property.Field("strokeColor"); f != nil { - colorStr, ok := f.PropertyValue.ValueString() - if !ok { - return nil, "", errors.New("invalid value type") - } - strokeColor, err = getColor(colorStr) - if err != nil { - return nil, "", err - } - } - if f := li.Property.Field("strokeWidth"); f != nil { - strokeWidth, ok = f.PropertyValue.ValueNumber() - if !ok { - return nil, "", errors.New("invalid value type") - } - } - styleId, err = e.generateStyleId(li.Original.String(), li.Name) - if err != nil { - return nil, "", err - } - lineStyle := kml.LineStyle() - if strokeColor != nil || strokeWidth != 0 { - if strokeColor != nil { +func (e *KMLEncoder) encodePolylineStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string) { + styleId := generateKMLStyleId(li.Original.String()) + style := kml.SharedStyle(styleId) + var lineStyle *kml.CompoundElement + + if f := li.Property.Field("strokeColor").Value().ValueString(); f != nil { + if strokeColor := getColor(*f); strokeColor != nil { + if lineStyle == nil { + lineStyle = kml.LineStyle() + } lineStyle.Add(kml.Color(strokeColor)) } - if strokeWidth != 0 { - lineStyle.Add(kml.Width(strokeWidth)) + } + + if f := li.Property.Field("strokeWidth").Value().ValueNumber(); f != nil { + if lineStyle == nil { + lineStyle = kml.LineStyle() } + lineStyle.Add(kml.Width(*f)) } - style := kml.SharedStyle(styleId) + if lineStyle != nil { style.Add(lineStyle) } - return style, styleId, nil + return style, styleId } -func (e *KMLEncoder) encodeStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string, error) { +func (e *KMLEncoder) encodeStyle(li *merging.SealedLayerItem) (*kml.SharedElement, string) { switch li.ExtensionID.String() { case "marker": return e.encodePointStyle(li) @@ -237,7 +149,7 @@ func (e *KMLEncoder) encodeStyle(li *merging.SealedLayerItem) (*kml.SharedElemen case "polyline": return e.encodePolylineStyle(li) } - return nil, "", nil + return nil, "" } // encodes non style layer features @@ -247,102 +159,65 @@ func (e *KMLEncoder) encodeLayerTag(li *merging.SealedLayerItem) (*kml.CompoundE } var layerTag *kml.CompoundElement - var ok bool - name := e.getName(li.Name) + switch li.ExtensionID.String() { case "marker": - layerTag = kml.Point() - latlng := property.LatLng{} - var height float64 - if f := li.Property.Field("location"); f != nil { - latlng, ok = f.PropertyValue.ValueLatLng() - if !ok { - dsll := f.DatasetValue.ValueLatLng() - if dsll != nil { - latlng = property.LatLng{ - Lat: dsll.Lat, - Lng: dsll.Lng, - } - } else { - return nil, errors.New("invalid value type") - } - } + c := kml.Coordinate{} + + if f := li.Property.Field("location").Value().ValueLatLng(); f != nil { + c.Lat = (*f).Lat + c.Lon = (*f).Lng + } else { + return nil, errors.New("invalid value type") } - if f := li.Property.Field("height"); f != nil { - height, ok = f.PropertyValue.ValueNumber() - if !ok { - dsHeight := f.DatasetValue.ValueNumber() - if dsHeight != nil { - height = *dsHeight - } else { - return nil, errors.New("invalid value type") - } - } + + if f := li.Property.Field("height").Value().ValueNumber(); f != nil { + c.Alt = *f } - layerTag = layerTag.Add( - kml.Coordinates( - kml.Coordinate{ - Lon: latlng.Lng, - Lat: latlng.Lat, - Alt: height, - })) + + layerTag = kml.Point().Add(kml.Coordinates(c)) case "polygon": layerTag = kml.Polygon() - polygon := property.Polygon{} - if f := li.Property.Field("polygon"); f != nil { - polygon, ok = f.PropertyValue.ValuePolygon() - if !ok { - return nil, errors.New("invalid value type") - } - } - // by default, first polygon coords set is for outer boundaries... the second is for inner - if len(polygon) > 0 { - var coords []kml.Coordinate - for _, c := range polygon[0] { - coords = append(coords, kml.Coordinate{ - Lon: c.Lng, - Lat: c.Lat, - Alt: c.Height, - }) - } - layerTag = layerTag.Add(kml.OuterBoundaryIs(kml.LinearRing(kml.Coordinates(coords...)))) - } - //encode inner boundaries - if len(polygon) == 2 { - var coords []kml.Coordinate - for _, c := range polygon[1] { - coords = append(coords, kml.Coordinate{ - Lon: c.Lng, - Lat: c.Lat, - Alt: c.Height, - }) + // polygon := property.Polygon{} + if f := li.Property.Field("polygon").Value().ValuePolygon(); f != nil && len(*f) > 0 { + // by default, first polygon coords set is for outer boundaries... the second is for inner + for i, r := range *f { + var coords []kml.Coordinate + for _, c := range r { + coords = append(coords, kml.Coordinate{ + Lon: c.Lng, + Lat: c.Lat, + Alt: c.Height, + }) + } + if i == 0 { + layerTag = layerTag.Add(kml.OuterBoundaryIs(kml.LinearRing(kml.Coordinates(coords...)))) + } else { + layerTag = layerTag.Add(kml.InnerBoundaryIs(kml.LinearRing(kml.Coordinates(coords...)))) + } } - layerTag.Add(kml.InnerBoundaryIs(kml.LinearRing(kml.Coordinates(coords...)))) + } else { + return nil, errors.New("invalid value type") } case "polyline": - layerTag = kml.LineString() - polyline := property.Coordinates{} - if f := li.Property.Field("coordinates"); f != nil { - polyline, ok = f.PropertyValue.ValueCoordinates() - if !ok { - return nil, errors.New("invalid value type") - } - } - if len(polyline) > 0 { - var coords []kml.Coordinate - for _, c := range polyline { + if f := li.Property.Field("coordinates").Value().ValueCoordinates(); f != nil && len(*f) > 0 { + coords := make([]kml.Coordinate, 0, len(*f)) + for _, c := range *f { coords = append(coords, kml.Coordinate{ Lon: c.Lng, Lat: c.Lat, Alt: c.Height, }) } - layerTag = layerTag.Add(kml.Coordinates(coords...)) + layerTag = kml.LineString().Add(kml.Coordinates(coords...)) + } else { + return nil, errors.New("invalid value type") } } + placemark := kml.Placemark() if len(li.Name) != 0 { - placemark.Add(name) + placemark.Add(e.getName(li.Name)) } placemark = placemark.Add(layerTag) @@ -358,31 +233,22 @@ func (e *KMLEncoder) encodeLayerGroup(li *merging.SealedLayerGroup, parent *kml. for _, ch := range li.Children { if g, ok := ch.(*merging.SealedLayerGroup); ok { folder := kml.Folder() - folder, err := e.encodeLayerGroup(g, folder) if err != nil { return nil, err } - - parent.Add(folder) + parent = parent.Add(folder) } else if i, ok := ch.(*merging.SealedLayerItem); ok { placemark, err := e.encodeLayerTag(i) if err != nil { return nil, err - } - if placemark == nil { + } else if placemark == nil { return nil, nil } - - style, styleId, err := e.encodeStyle(i) - if err != nil { - return nil, err - } - if style != nil { + if style, styleId := e.encodeStyle(i); style != nil { e.styles = append(e.styles, style) placemark.Add(kml.StyleURL("#" + styleId)) } - parent = parent.Add(placemark) } } @@ -390,19 +256,16 @@ func (e *KMLEncoder) encodeLayerGroup(li *merging.SealedLayerGroup, parent *kml. return parent, nil } -func (e *KMLEncoder) Encode(layer merging.SealedLayer) error { +func (e *KMLEncoder) Encode(layer merging.SealedLayer) (err error) { var res *kml.CompoundElement - var err error if i, ok := layer.(*merging.SealedLayerItem); ok { - style, styleId, err := e.encodeStyle(i) - if err != nil { - return err - } + style, styleId := e.encodeStyle(i) l, err := e.encodeLayerTag(i) if err != nil { return err } + if style != nil { res = kml.KML(style) res = res.Add(l) @@ -412,22 +275,23 @@ func (e *KMLEncoder) Encode(layer merging.SealedLayer) error { } } else if g, ok := layer.(*merging.SealedLayerGroup); ok { doc := kml.Document() - doc, err := e.encodeLayerGroup(g, doc) if err != nil { return err } + if len(e.styles) > 0 { for _, s := range e.styles { doc.Add(s) } } + res = kml.KML(doc) } - err = res.WriteIndent(e.writer, "", " ") - if err != nil { + if err := res.WriteIndent(e.writer, "", " "); err != nil { return err } + return nil } diff --git a/pkg/layer/encoding/kml_test.go b/pkg/layer/encoding/kml_test.go index 3ab474578..686b366f3 100644 --- a/pkg/layer/encoding/kml_test.go +++ b/pkg/layer/encoding/kml_test.go @@ -2,7 +2,6 @@ package encoding import ( "bytes" - "io" "testing" "github.com/reearth/reearth-backend/pkg/id" @@ -15,493 +14,288 @@ import ( var _ Encoder = (*KMLEncoder)(nil) -func TestEncodeKMLMarker(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("marker") - iid := id.MustPropertyItemID(id.New().String()) - v1 := property.LatLng{ - Lat: 4.4, - Lng: 53.4, - } +func TestKMLEncoder_Encode(t *testing.T) { + lid := id.MustLayerID("01fmph48ykj1nd82r8e4znh6a6") - f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("location"), - Type: "latlng", - DatasetValue: nil, - PropertyValue: v1.Value(), - } - fl1 := []*property.SealedField{} - fl1 = append(fl1, &f1) - item1 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl1, - Groups: nil, - } - il := []*property.SealedItem{} - il = append(il, &item1) - v2 := property.ValueTypeNumber - f2 := property.SealedField{ - ID: id.PropertySchemaFieldID("imageSize"), - Type: "number", - DatasetValue: nil, - PropertyValue: v2.ValueFromUnsafe(4), - } - fl2 := []*property.SealedField{} - fl2 = append(fl2, &f2) - item2 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl2, - Groups: nil, - } - il = append(il, &item2) - v3 := property.ValueTypeURL - f3 := property.SealedField{ - ID: id.PropertySchemaFieldID("image"), - Type: "url", - DatasetValue: nil, - PropertyValue: v3.ValueFromUnsafe("http://maps.google.com/mapfiles/kml/pal4/icon28.png"), - } - fl3 := []*property.SealedField{} - fl3 = append(fl3, &f3) - item3 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl3, - Groups: nil, - } - il = append(il, &item3) - v4 := property.ValueTypeString - f4 := property.SealedField{ - ID: id.PropertySchemaFieldID("pointColor"), - Type: "string", - DatasetValue: nil, - PropertyValue: v4.ValueFromUnsafe("#7fff00ff"), - } - fl4 := []*property.SealedField{} - fl4 = append(fl4, &f4) - item4 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl4, - Groups: nil, - } - il = append(il, &item4) - sp := property.Sealed{ - Original: &pid, - Items: il, - } - l := merging.SealedLayerItem{ - SealedLayerCommon: merging.SealedLayerCommon{ - Merged: layer.Merged{ - Original: lid, - Parent: nil, - Name: "test", - Scene: sid, - Property: nil, - Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: &ex, + tests := []struct { + name string + target merging.SealedLayer + want func() *kml.CompoundElement + }{ + { + name: "marker", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Scene: id.NewSceneID(), + Name: "test", + PluginID: &id.OfficialPluginID, + ExtensionID: id.PluginExtensionID("marker").Ref(), + }, + Property: &property.Sealed{ + Original: id.NewPropertyID().Ref(), + Items: []*property.SealedItem{ + { + Original: id.NewPropertyItemID().Ref(), + SchemaGroup: id.PropertySchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: id.PropertySchemaFieldID("location"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeLatLng, + nil, + property.ValueTypeLatLng.ValueFrom(property.LatLng{Lat: 4.4, Lng: 53.4}), + ), + }, + { + ID: id.PropertySchemaFieldID("height"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(100), + ), + }, + { + ID: id.PropertySchemaFieldID("imageSize"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(4), + ), + }, + { + ID: id.PropertySchemaFieldID("image"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeURL, + nil, + property.ValueTypeURL.ValueFrom("http://maps.google.com/mapfiles/kml/pal4/icon28.png"), + ), + }, + { + ID: id.PropertySchemaFieldID("pointColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#7fff00ff"), + ), + }, + }, + }, + }, + }, + }, + }, + want: func() *kml.CompoundElement { + k := kml.KML( + kml.SharedStyle( + "01fmph48ykj1nd82r8e4znh6a6_style", + kml.IconStyle( + kml.Icon(kml.Href("http://maps.google.com/mapfiles/kml/pal4/icon28.png")), + kml.Scale(4), + kml.Color(getColor("#7fff00ff")), + ), + ), + ) + k = k.Add( + kml.Placemark( + kml.Name("test"), + kml.Point(kml.Coordinates(kml.Coordinate{Lon: 53.4, Lat: 4.4, Alt: 100})), + kml.StyleURL("#01fmph48ykj1nd82r8e4znh6a6_style"), + ), + ) + return k }, - Property: &sp, - Infobox: nil, - }} - - reader, writer := io.Pipe() - en := NewKMLEncoder(writer) - var err error - go func() { - defer func() { - _ = writer.Close() - }() - err = en.Encode(&l) - assert.NoError(t, err) - }() - - colorStr, _ := f4.PropertyValue.ValueString() - sizeFloat, _ := f2.PropertyValue.ValueNumber() - urlValue, _ := f3.PropertyValue.ValueURL() - b, _ := getColor(colorStr) - stid, err := en.generateStyleId(l.Original.String(), l.Name) - assert.NoError(t, err) - expected := kml.KML(kml.SharedStyle(stid, kml.IconStyle( - kml.Scale(sizeFloat), - kml.Color(b), - kml.Icon( - kml.Href(urlValue.String()))))) - expected = expected.Add(kml.Placemark(kml.Name("test"), - kml.Point(kml.Coordinates(kml.Coordinate{ - Lon: v1.Lng, - Lat: v1.Lat, - })), - kml.StyleURL("#"+stid))) - reader2, writer2 := io.Pipe() - go func() { - defer func() { - _ = writer2.Close() - }() - err = expected.WriteIndent(writer2, "", " ") - assert.NoError(t, err) - }() - - buf := new(bytes.Buffer) - _, err = buf.ReadFrom(reader) - assert.NoError(t, err) - s := buf.String() - buf2 := new(bytes.Buffer) - _, err = buf2.ReadFrom(reader2) - assert.NoError(t, err) - - s2 := buf2.String() - assert.Equal(t, s2, s) -} -func TestEncodeKMLPolygon(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("polygon") - iid := id.MustPropertyItemID(id.New().String()) - vc := property.Coordinates{ - property.LatLngHeight{ - Lat: 3.4, - Lng: 5.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 45.4, - Lng: 2.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 34.66, - Lng: 654.34, - Height: 100, }, - } - v1 := property.Polygon{vc} - f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("polygon"), - Type: "polygon", - DatasetValue: nil, - PropertyValue: v1.Value(), - } - fl1 := []*property.SealedField{} - fl1 = append(fl1, &f1) - item1 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl1, - Groups: nil, - } - il := []*property.SealedItem{} - il = append(il, &item1) - v2 := property.ValueTypeBool - f2 := property.SealedField{ - ID: id.PropertySchemaFieldID("fill"), - Type: "bool", - DatasetValue: nil, - PropertyValue: v2.ValueFromUnsafe(true), - } - fl2 := []*property.SealedField{} - fl2 = append(fl2, &f2) - item2 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl2, - Groups: nil, - } - il = append(il, &item2) - v3 := property.ValueTypeString - f3 := property.SealedField{ - ID: id.PropertySchemaFieldID("fillColor"), - Type: "string", - DatasetValue: nil, - PropertyValue: v3.ValueFromUnsafe("#ff334353"), - } - fl3 := []*property.SealedField{} - fl3 = append(fl3, &f3) - item3 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl3, - Groups: nil, - } - il = append(il, &item3) - v4 := property.ValueTypeBool - f4 := property.SealedField{ - ID: id.PropertySchemaFieldID("stroke"), - Type: "bool", - DatasetValue: nil, - PropertyValue: v4.ValueFromUnsafe(true), - } - fl4 := []*property.SealedField{} - fl4 = append(fl4, &f4) - item4 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl4, - Groups: nil, - } - il = append(il, &item4) - v5 := property.ValueTypeString - f5 := property.SealedField{ - ID: id.PropertySchemaFieldID("strokeColor"), - Type: "string", - DatasetValue: nil, - PropertyValue: v5.ValueFromUnsafe("#ff554555"), - } - fl5 := []*property.SealedField{} - fl5 = append(fl5, &f5) - item5 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl5, - Groups: nil, - } - il = append(il, &item5) - v6 := property.ValueTypeNumber - f6 := property.SealedField{ - ID: id.PropertySchemaFieldID("strokeWidth"), - Type: "number", - DatasetValue: nil, - PropertyValue: v6.ValueFromUnsafe(3), - } - fl6 := []*property.SealedField{} - fl6 = append(fl6, &f6) - item6 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl6, - Groups: nil, - } - il = append(il, &item6) - sp := property.Sealed{ - Original: &pid, - Items: il, - } - l := merging.SealedLayerItem{ - SealedLayerCommon: merging.SealedLayerCommon{ - Merged: layer.Merged{ - Original: lid, - Parent: nil, - Name: "test", - Scene: sid, - Property: nil, - Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: &ex, + { + name: "polygon", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Scene: id.NewSceneID(), + Name: "test", + PluginID: &id.OfficialPluginID, + ExtensionID: id.PluginExtensionID("polygon").Ref(), + }, + Property: &property.Sealed{ + Original: id.NewPropertyID().Ref(), + Items: []*property.SealedItem{ + { + Original: id.NewPropertyItemID().Ref(), + SchemaGroup: id.PropertySchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: id.PropertySchemaFieldID("polygon"), + Val: property.NewValueAndDatasetValue( + property.ValueTypePolygon, + nil, + property.ValueTypePolygon.ValueFrom(property.Polygon{property.Coordinates{ + property.LatLngHeight{Lat: 3.4, Lng: 5.34, Height: 100}, + property.LatLngHeight{Lat: 45.4, Lng: 2.34, Height: 100}, + property.LatLngHeight{Lat: 34.66, Lng: 654.34, Height: 100}, + }}), + ), + }, + { + ID: id.PropertySchemaFieldID("fill"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeBool, + nil, + property.ValueTypeBool.ValueFrom(true), + ), + }, + { + ID: id.PropertySchemaFieldID("fillColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff334353"), + ), + }, + { + ID: id.PropertySchemaFieldID("stroke"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeBool, + nil, + property.ValueTypeBool.ValueFrom(true), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff554555"), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + }, + }, + }, + }, + }, + }, + want: func() *kml.CompoundElement { + k := kml.KML( + kml.SharedStyle( + "01fmph48ykj1nd82r8e4znh6a6_style", + kml.PolyStyle( + kml.Fill(true), + kml.Color(getColor("#ff334353")), + ), + kml.LineStyle( + kml.Outline(true), + kml.Color(getColor("#ff554555")), + kml.Width(3), + ), + ), + ) + k = k.Add( + kml.Placemark(kml.Name("test"), + kml.Polygon(kml.OuterBoundaryIs(kml.LinearRing(kml.Coordinates( + kml.Coordinate{Lon: 5.34, Lat: 3.4, Alt: 100}, + kml.Coordinate{Lon: 2.34, Lat: 45.4, Alt: 100}, + kml.Coordinate{Lon: 654.34, Lat: 34.66, Alt: 100}, + )))), + kml.StyleURL("#01fmph48ykj1nd82r8e4znh6a6_style"), + ), + ) + return k + }, + }, + { + name: "polyline", + target: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: lid, + Scene: id.NewSceneID(), + Name: "test", + PluginID: &id.OfficialPluginID, + ExtensionID: id.PluginExtensionID("polyline").Ref(), + }, + Property: &property.Sealed{ + Original: id.NewPropertyID().Ref(), + Items: []*property.SealedItem{ + { + Original: id.NewPropertyItemID().Ref(), + SchemaGroup: id.PropertySchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: id.PropertySchemaFieldID("coordinates"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeCoordinates, + nil, + property.ValueTypeCoordinates.ValueFrom(property.Coordinates{ + property.LatLngHeight{Lat: 3.4, Lng: 5.34, Height: 100}, + property.LatLngHeight{Lat: 45.4, Lng: 2.34, Height: 100}, + property.LatLngHeight{Lat: 34.66, Lng: 654.34, Height: 100}, + }), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeColor"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeString, + nil, + property.ValueTypeString.ValueFrom("#ff224222"), + ), + }, + { + ID: id.PropertySchemaFieldID("strokeWidth"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeNumber, + nil, + property.ValueTypeNumber.ValueFrom(3), + ), + }, + }, + }, + }, + }, + }, + }, + want: func() *kml.CompoundElement { + k := kml.KML( + kml.SharedStyle( + "01fmph48ykj1nd82r8e4znh6a6_style", + kml.LineStyle( + kml.Color(getColor("#ff224222")), + kml.Width(3), + ), + ), + ) + k = k.Add( + kml.Placemark( + kml.Name("test"), + kml.LineString(kml.Coordinates( + kml.Coordinate{Lon: 5.34, Lat: 3.4, Alt: 100}, + kml.Coordinate{Lon: 2.34, Lat: 45.4, Alt: 100}, + kml.Coordinate{Lon: 654.34, Lat: 34.66, Alt: 100}, + )), + kml.StyleURL("#01fmph48ykj1nd82r8e4znh6a6_style"), + ), + ) + return k }, - Property: &sp, - Infobox: nil, - }} - - reader, writer := io.Pipe() - en := NewKMLEncoder(writer) - var err error - go func() { - defer func() { - _ = writer.Close() - }() - err = en.Encode(&l) - }() - fillColorStr, _ := f3.PropertyValue.ValueString() - strokeColorStr, _ := f5.PropertyValue.ValueString() - b1, _ := getColor(fillColorStr) - b2, _ := getColor(strokeColorStr) - stid, err := en.generateStyleId(l.Original.String(), l.Name) - assert.NoError(t, err) - expected := kml.KML(kml.SharedStyle(stid, - kml.PolyStyle( - kml.Fill(true), - kml.Color(b1), - ), - kml.LineStyle( - kml.Outline(true), - kml.Color(b2), - kml.Width(3), - ))) - expected = expected.Add(kml.Placemark(kml.Name("test"), - kml.Polygon(kml.OuterBoundaryIs(kml.LinearRing(kml.Coordinates([]kml.Coordinate{ - {Lon: 5.34, Lat: 3.4, Alt: 100}, - {Lon: 2.34, Lat: 45.4, Alt: 100}, - {Lon: 654.34, Lat: 34.66, Alt: 100}, - }...)))), - kml.StyleURL("#"+stid))) - reader2, writer2 := io.Pipe() - go func() { - defer func() { - _ = writer2.Close() - }() - err = expected.WriteIndent(writer2, "", " ") - }() - assert.NoError(t, err) - buf := new(bytes.Buffer) - _, err = buf.ReadFrom(reader) - assert.NoError(t, err) - s := buf.String() - buf2 := new(bytes.Buffer) - _, err = buf2.ReadFrom(reader2) - assert.NoError(t, err) - s2 := buf2.String() - assert.Equal(t, s2, s) -} -func TestEncodeKMLPolyline(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("polyline") - iid := id.MustPropertyItemID(id.New().String()) - v1 := property.Coordinates{ - property.LatLngHeight{ - Lat: 3.4, - Lng: 5.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 45.4, - Lng: 2.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 34.66, - Lng: 654.34, - Height: 100, }, } - f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("coordinates"), - Type: "coordinates", - DatasetValue: nil, - PropertyValue: v1.Value(), - } - fl1 := []*property.SealedField{} - fl1 = append(fl1, &f1) - item1 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl1, - Groups: nil, - } - il := []*property.SealedItem{} - il = append(il, &item1) - v2 := property.ValueTypeString - f2 := property.SealedField{ - ID: id.PropertySchemaFieldID("strokeColor"), - Type: "string", - DatasetValue: nil, - PropertyValue: v2.ValueFromUnsafe("#ff224222"), - } - fl2 := []*property.SealedField{} - fl2 = append(fl2, &f2) - item2 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl2, - Groups: nil, + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + we := bytes.Buffer{} + _ = tt.want().WriteIndent(&we, "", " ") + wa := bytes.Buffer{} + assert.NoError(t, NewKMLEncoder(&wa).Encode(tt.target)) + assert.Equal(t, we.String(), wa.String()) + }) } - il = append(il, &item2) - v3 := property.ValueTypeNumber - f3 := property.SealedField{ - ID: id.PropertySchemaFieldID("strokeWidth"), - Type: "number", - DatasetValue: nil, - PropertyValue: v3.ValueFromUnsafe(3), - } - fl3 := []*property.SealedField{} - fl3 = append(fl3, &f3) - item3 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl3, - Groups: nil, - } - il = append(il, &item3) - sp := property.Sealed{ - Original: &pid, - Items: il, - } - l := merging.SealedLayerItem{ - SealedLayerCommon: merging.SealedLayerCommon{ - Merged: layer.Merged{ - Original: lid, - Parent: nil, - Name: "test", - Scene: sid, - Property: nil, - Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: &ex, - }, - Property: &sp, - Infobox: nil, - }} - - reader, writer := io.Pipe() - en := NewKMLEncoder(writer) - var err error - go func() { - defer func() { - _ = writer.Close() - }() - err = en.Encode(&l) - }() - strokeColorStr, _ := f2.PropertyValue.ValueString() - b1, _ := getColor(strokeColorStr) - stid, err := en.generateStyleId(l.Original.String(), l.Name) - assert.NoError(t, err) - expected := kml.KML(kml.SharedStyle(stid, - kml.LineStyle( - kml.Color(b1), - kml.Width(3), - ))) - expected = expected.Add(kml.Placemark(kml.Name("test"), - kml.LineString(kml.Coordinates([]kml.Coordinate{ - {Lon: 5.34, Lat: 3.4, Alt: 100}, - {Lon: 2.34, Lat: 45.4, Alt: 100}, - {Lon: 654.34, Lat: 34.66, Alt: 100}, - }...)), - kml.StyleURL("#"+stid))) - reader2, writer2 := io.Pipe() - go func() { - defer func() { - _ = writer2.Close() - }() - err = expected.WriteIndent(writer2, "", " ") - }() - assert.NoError(t, err) - buf := new(bytes.Buffer) - _, err = buf.ReadFrom(reader) - assert.NoError(t, err) - s := buf.String() - buf2 := new(bytes.Buffer) - _, err = buf2.ReadFrom(reader2) - assert.NoError(t, err) - s2 := buf2.String() - assert.Equal(t, s2, s) } diff --git a/pkg/layer/encoding/shp.go b/pkg/layer/encoding/shp.go index 2f75a7fa0..833c07316 100644 --- a/pkg/layer/encoding/shp.go +++ b/pkg/layer/encoding/shp.go @@ -31,6 +31,7 @@ func coordsToPoints(coords property.Coordinates) []shp.Point { } return res } + func polygonToPoints(poly property.Polygon) ([]shp.Point, []int32) { var res []shp.Point parts := []int32{0} @@ -39,25 +40,47 @@ func polygonToPoints(poly property.Polygon) ([]shp.Point, []int32) { res = append(res, partPoints...) if i > 0 { parts = append(parts, int32(len(partPoints)-1)) - } } return res, parts } + func getMaxMinPoints(points []shp.Point) (shp.Point, shp.Point) { var max, min shp.Point - max = points[0] - min = points[0] - for _, p := range points { - if p.X > max.X && p.Y > max.Y { - max = p + for i, p := range points { + if i == 0 || p.X > min.X { + max.X = p.X + } + if i == 0 || p.X < min.X { + min.X = p.X } - if p.X < min.X && p.Y < min.Y { - min = p + if i == 0 || p.Y > max.Y { + max.Y = p.Y + } + if i == 0 || p.Y < min.Y { + min.Y = p.Y } } return max, min } + +func coordinatesToSHP(coordinates property.Coordinates) *shp.PolyLine { + points := coordsToPoints(coordinates) + max, min := getMaxMinPoints(points) + return &shp.PolyLine{ + Box: shp.Box{ + MinX: min.X, + MinY: min.Y, + MaxX: max.X, + MaxY: max.Y, + }, + NumParts: 1, + NumPoints: int32(len(points)), + Parts: []int32{0}, + Points: points, + } +} + func polygonToSHP(poly property.Polygon) *shp.Polygon { points, parts := polygonToPoints(poly) max, min := getMaxMinPoints(points) @@ -75,78 +98,29 @@ func polygonToSHP(poly property.Polygon) *shp.Polygon { } return &res } -func (e *SHPEncoder) encodeLayer(li *merging.SealedLayerItem) (shp.Shape, shp.ShapeType, error) { + +func (e *SHPEncoder) encodeLayer(li *merging.SealedLayerItem) (sh shp.Shape, st shp.ShapeType, err error) { if li.PluginID == nil || !id.OfficialPluginID.Equal(*li.PluginID) { return nil, 0, nil } - var shapeType shp.ShapeType - var ok bool - var sh shp.Shape switch li.ExtensionID.String() { case "marker": - shapeType = shp.POINT - latlng := property.LatLng{} - if f := li.Property.Field("location"); f != nil { - latlng, ok = f.PropertyValue.ValueLatLng() - if !ok { - dsll := f.DatasetValue.ValueLatLng() - if dsll != nil { - latlng = property.LatLng{ - Lat: dsll.Lat, - Lng: dsll.Lng, - } - } else { - return nil, 0, errors.New("invalid value type") - } - } - sh = &shp.Point{ - X: latlng.Lng, - Y: latlng.Lat, - } - - } + sh, st = e.encodeMarker(li) case "polygon": - shapeType = shp.POLYGON - polygon := property.Polygon{} - if f := li.Property.Field("polygon"); f != nil { - polygon, ok = f.PropertyValue.ValuePolygon() - if !ok { - return nil, 0, errors.New("invalid value type") - } - } - if len(polygon) > 0 { - shpPoly := polygonToSHP(polygon) - sh = shpPoly - } - + sh, st = e.encodePolygon(li) case "polyline": - shapeType = shp.POLYLINE - polyline := property.Coordinates{} - if f := li.Property.Field("coordinates"); f != nil { - polyline, ok = f.PropertyValue.ValueCoordinates() - if !ok { - return nil, 0, errors.New("invalid value type") - } - } - if len(polyline) > 0 { - points := coordsToPoints(polyline) - sh = &shp.PolyLine{ - Box: shp.Box{MinX: 102, MinY: 0, MaxX: 104, MaxY: 0}, - NumParts: 1, - NumPoints: int32(len(points)), - Parts: []int32{0}, - Points: points, - } - } + sh, st = e.encodePolyline(li) + } + if sh == nil || st == 0 { + return nil, 0, errors.New("invalid value type") } - return sh, shapeType, nil + return sh, st, nil } func (e *SHPEncoder) encodeLayerGroup(w *wsc.WriterSeeker, li *merging.SealedLayerGroup, shape *shp.Writer) error { for _, ch := range li.Children { if g, ok := ch.(*merging.SealedLayerGroup); ok { - err := e.encodeLayerGroup(w, g, shape) - if err != nil { + if err := e.encodeLayerGroup(w, g, shape); err != nil { return err } } else if i, ok := ch.(*merging.SealedLayerItem); ok { @@ -154,27 +128,28 @@ func (e *SHPEncoder) encodeLayerGroup(w *wsc.WriterSeeker, li *merging.SealedLay if err != nil { return err } + if shape == nil { shape, err = shp.CreateFrom(w, t) + if err != nil { return err } + defer func() { err = shape.Close() - }() - if err != nil { - return err - } } - _, err = shape.Write(l) - if err != nil { + + if _, err := shape.Write(l); err != nil { return err } } } + return nil } + func (e *SHPEncoder) Encode(layer merging.SealedLayer) error { var err error var w wsc.WriterSeeker @@ -210,3 +185,30 @@ func (e *SHPEncoder) Encode(layer merging.SealedLayer) error { } return nil } + +func (*SHPEncoder) encodeMarker(li *merging.SealedLayerItem) (shp.Shape, shp.ShapeType) { + f := li.Property.Field("location").Value().ValueLatLng() + if f == nil { + return nil, 0 + } + return &shp.Point{ + X: (*f).Lng, + Y: (*f).Lat, + }, shp.POINT +} + +func (*SHPEncoder) encodePolygon(li *merging.SealedLayerItem) (shp.Shape, shp.ShapeType) { + f := li.Property.Field("polygon").Value().ValuePolygon() + if f == nil || len(*f) == 0 { + return nil, 0 + } + return polygonToSHP(*f), shp.POLYGON +} + +func (*SHPEncoder) encodePolyline(li *merging.SealedLayerItem) (shp.Shape, shp.ShapeType) { + f := li.Property.Field("coordinates").Value().ValueCoordinates() + if f == nil || len(*f) == 0 { + return nil, 0 + } + return coordinatesToSHP(*f), shp.POLYLINE +} diff --git a/pkg/layer/encoding/shp_test.go b/pkg/layer/encoding/shp_test.go index 981177d80..b7256f5b7 100644 --- a/pkg/layer/encoding/shp_test.go +++ b/pkg/layer/encoding/shp_test.go @@ -1,7 +1,6 @@ package encoding import ( - "io" "os" "testing" @@ -16,283 +15,140 @@ import ( var _ Encoder = (*SHPEncoder)(nil) -func TestEncodeSHPMarker(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("marker") - iid := id.MustPropertyItemID(id.New().String()) - v1 := property.LatLng{ - Lat: 4.4, - Lng: 53.4, - } - f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("location"), - Type: "latlng", - DatasetValue: nil, - PropertyValue: v1.Value(), - } - fl1 := []*property.SealedField{} - fl1 = append(fl1, &f1) - item1 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl1, - Groups: nil, - } - il := []*property.SealedItem{} - il = append(il, &item1) - - sp := property.Sealed{ - Original: &pid, - Items: il, - } - l := merging.SealedLayerItem{ - SealedLayerCommon: merging.SealedLayerCommon{ - Merged: layer.Merged{ - Original: lid, - Parent: nil, - Scene: sid, - Property: nil, - Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: &ex, +func TestEncodeSHP(t *testing.T) { + tests := []struct { + name string + layer *merging.SealedLayerItem + want shp.Shape + }{ + { + layer: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: id.NewLayerID(), + Parent: nil, + Scene: id.NewSceneID(), + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: id.PluginExtensionID("polygon").Ref(), + }, + Property: &property.Sealed{ + Original: id.NewPropertyID().Ref(), + Items: []*property.SealedItem{ + { + Original: id.NewPropertyItemID().Ref(), + SchemaGroup: id.PropertySchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: id.PropertySchemaFieldID("polygon"), + Val: property.NewValueAndDatasetValue( + property.ValueTypePolygon, + nil, + property.ValueTypePolygon.ValueFrom(property.Polygon{property.Coordinates{ + {Lat: 3.4, Lng: 5.34, Height: 100}, + {Lat: 45.4, Lng: 2.34, Height: 100}, + {Lat: 34.66, Lng: 654.34, Height: 100}, + }}), + ), + }, + }, + }, + }, + }, + }, + }, + want: &shp.Polygon{ + Box: shp.Box{ + MinX: 2.34, + MaxX: 654.34, + MinY: 3.4, + MaxY: 45.4, + }, + NumParts: 1, + NumPoints: 3, + Parts: []int32{0}, + Points: []shp.Point{ + {X: 5.34, Y: 3.4}, + {X: 2.34, Y: 45.4}, + {X: 654.34, Y: 34.66}, + }, }, - Property: &sp, - Infobox: nil, - }} - - reader, writer := io.Pipe() - en := NewSHPEncoder(writer) - var err error - go func() { - defer func() { - _ = writer.Close() - }() - err = en.Encode(&l) - assert.NoError(t, err) - }() - tmpFile, err := os.CreateTemp(os.TempDir(), "*.shp") - assert.NoError(t, err) - defer func() { - err := os.Remove(tmpFile.Name()) - assert.NoError(t, err) - }() - b, err := io.ReadAll(reader) - assert.NoError(t, err) - _, err = tmpFile.Write(b) - assert.NoError(t, err) - err = tmpFile.Close() - assert.NoError(t, err) - shape, err := shp.Open(tmpFile.Name()) - assert.NoError(t, err) - defer func() { - err := shape.Close() - assert.NoError(t, err) - }() - var expected *shp.Point - var ok bool - for shape.Next() { - _, p := shape.Shape() - expected, ok = p.(*shp.Point) - } - assert.Equal(t, true, ok) - assert.Equal(t, expected, &shp.Point{ - X: 53.4, - Y: 4.4, - }) -} -func TestEncodeSHPPolygon(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("polygon") - iid := id.MustPropertyItemID(id.New().String()) - vc := property.Coordinates{ - property.LatLngHeight{ - Lat: 3.4, - Lng: 5.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 45.4, - Lng: 2.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 34.66, - Lng: 654.34, - Height: 100, }, - } - v1 := property.Polygon{vc} - f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("polygon"), - Type: "polygon", - DatasetValue: nil, - PropertyValue: v1.Value(), - } - fl1 := []*property.SealedField{} - fl1 = append(fl1, &f1) - item1 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl1, - Groups: nil, - } - il := []*property.SealedItem{} - il = append(il, &item1) - sp := property.Sealed{ - Original: &pid, - Items: il, - } - l := merging.SealedLayerItem{ - SealedLayerCommon: merging.SealedLayerCommon{ - Merged: layer.Merged{ - Original: lid, - Parent: nil, - Scene: sid, - Property: nil, - Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: &ex, + { + name: "polyline", + layer: &merging.SealedLayerItem{ + SealedLayerCommon: merging.SealedLayerCommon{ + Merged: layer.Merged{ + Original: id.NewLayerID(), + Parent: nil, + Name: "test", + Scene: id.NewSceneID(), + Property: nil, + Infobox: nil, + PluginID: &id.OfficialPluginID, + ExtensionID: id.PluginExtensionID("polyline").Ref(), + }, + Property: &property.Sealed{ + Original: id.NewPropertyID().Ref(), + Items: []*property.SealedItem{ + { + Original: id.NewPropertyItemID().Ref(), + SchemaGroup: id.PropertySchemaGroupID("default"), + Fields: []*property.SealedField{ + { + ID: id.PropertySchemaFieldID("coordinates"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeCoordinates, + nil, + property.ValueTypeCoordinates.ValueFrom(property.Coordinates{ + {Lat: 3.4, Lng: 5.34, Height: 100}, + {Lat: 45.4, Lng: 2.34, Height: 100}, + {Lat: 34.66, Lng: 654.34, Height: 100}, + }), + ), + }, + }, + }, + }, + }, + }, + }, + want: &shp.PolyLine{ + Box: shp.Box{ + MinX: 2.34, + MaxX: 654.34, + MinY: 3.4, + MaxY: 45.4, + }, + NumParts: 1, + NumPoints: 3, + Parts: []int32{0}, + Points: []shp.Point{ + {X: 5.34, Y: 3.4}, + {X: 2.34, Y: 45.4}, + {X: 654.34, Y: 34.66}, + }, }, - Property: &sp, - Infobox: nil, - }} - - reader, writer := io.Pipe() - en := NewSHPEncoder(writer) - var err error - go func() { - defer func() { - _ = writer.Close() - }() - err = en.Encode(&l) - }() - tmpFile, err := os.CreateTemp(os.TempDir(), "*.shp") - assert.NoError(t, err) - defer func() { - err := os.Remove(tmpFile.Name()) - assert.NoError(t, err) - }() - b, err := io.ReadAll(reader) - assert.NoError(t, err) - _, err = tmpFile.Write(b) - assert.NoError(t, err) - err = tmpFile.Close() - assert.NoError(t, err) - shape, err := shp.Open(tmpFile.Name()) - assert.NoError(t, err) - defer func() { - err := shape.Close() - assert.NoError(t, err) - }() - var expected *shp.Polygon - var ok bool - for shape.Next() { - _, p := shape.Shape() - expected, ok = p.(*shp.Polygon) - } - assert.Equal(t, true, ok) - assert.Equal(t, expected, &shp.Polygon{Box: shp.Box{MinX: 5.34, MinY: 3.4, MaxX: 654.34, MaxY: 34.66}, NumParts: 1, NumPoints: 3, Parts: []int32{0}, Points: []shp.Point{{X: 5.34, Y: 3.4}, {X: 2.34, Y: 45.4}, {X: 654.34, Y: 34.66}}}) -} - -func TestEncodeSHPPolyline(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("polyline") - iid := id.MustPropertyItemID(id.New().String()) - v1 := property.Coordinates{ - property.LatLngHeight{ - Lat: 3.4, - Lng: 5.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 45.4, - Lng: 2.34, - Height: 100, - }, property.LatLngHeight{ - Lat: 34.66, - Lng: 654.34, - Height: 100, }, } - f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("coordinates"), - Type: "coordinates", - DatasetValue: nil, - PropertyValue: v1.Value(), - } - fl1 := []*property.SealedField{} - fl1 = append(fl1, &f1) - item1 := property.SealedItem{ - Original: &iid, - Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), - LinkedDataset: nil, - Fields: fl1, - Groups: nil, - } - il := []*property.SealedItem{} - il = append(il, &item1) - sp := property.Sealed{ - Original: &pid, - Items: il, - } - l := merging.SealedLayerItem{ - SealedLayerCommon: merging.SealedLayerCommon{ - Merged: layer.Merged{ - Original: lid, - Parent: nil, - Name: "test", - Scene: sid, - Property: nil, - Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: &ex, - }, - Property: &sp, - Infobox: nil, - }} - reader, writer := io.Pipe() - en := NewSHPEncoder(writer) - var err error - go func() { - defer func() { - _ = writer.Close() - }() - err = en.Encode(&l) - }() - tmpFile, err := os.CreateTemp(os.TempDir(), "*.shp") - assert.NoError(t, err) - defer func() { - err := os.Remove(tmpFile.Name()) - assert.NoError(t, err) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tmpFile, err := os.CreateTemp(os.TempDir(), "*.shp") + assert.NoError(t, err) + en := NewSHPEncoder(tmpFile) + assert.NoError(t, en.Encode(tt.layer)) + + shape, err := shp.Open(tmpFile.Name()) + assert.True(t, shape.Next()) + + assert.NoError(t, err) + assert.NoError(t, os.Remove(tmpFile.Name())) + assert.NoError(t, shape.Close()) - }() - b, err := io.ReadAll(reader) - assert.NoError(t, err) - _, err = tmpFile.Write(b) - assert.NoError(t, err) - err = tmpFile.Close() - assert.NoError(t, err) - shape, err := shp.Open(tmpFile.Name()) - assert.NoError(t, err) - defer func() { - err := shape.Close() - assert.NoError(t, err) - }() - var expected *shp.PolyLine - var ok bool - for shape.Next() { - _, p := shape.Shape() - expected, ok = p.(*shp.PolyLine) + _, p := shape.Shape() + assert.Equal(t, tt.want, p) + }) } - assert.Equal(t, true, ok) - assert.Equal(t, expected, &shp.PolyLine{Box: shp.Box{MinX: 102, MinY: 0, MaxX: 104, MaxY: 0}, NumParts: 1, NumPoints: 3, Parts: []int32{0}, Points: []shp.Point{{X: 5.34, Y: 3.4}, {X: 2.34, Y: 45.4}, {X: 654.34, Y: 34.66}}}) } diff --git a/pkg/layer/layerops/initializer.go b/pkg/layer/layerops/initializer.go index 49b2bfa13..f69b3e2b8 100644 --- a/pkg/layer/layerops/initializer.go +++ b/pkg/layer/layerops/initializer.go @@ -46,7 +46,7 @@ func (i LayerItem) Initialize() (*layer.Item, *property.Property, error) { return nil, nil, err } - p.UpdateLinkableValue(i.LinkablePropertySchema, property.ValueTypeLatLng.ValueFromUnsafe(i.LatLng)) + p.UpdateLinkableValue(i.LinkablePropertySchema, property.ValueTypeLatLng.ValueFrom(i.LatLng)) builder. Plugin(i.Plugin.ID().Ref()). diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go index c903e8abd..4e9c54211 100644 --- a/pkg/plugin/manifest/convert.go +++ b/pkg/plugin/manifest/convert.go @@ -280,8 +280,8 @@ func (o *PropertyCondition) condition() *property.Condition { } func (i PropertySchemaField) schemaField() (*property.SchemaField, error) { - t, ok := property.ValueTypeFrom(string(i.Type)) - if !ok { + t := property.ValueType(i.Type) + if !t.Valid() { return nil, fmt.Errorf("invalid value type: %s", i.Type) } @@ -342,9 +342,5 @@ func (c *Choice) choice() *property.SchemaFieldChoice { } func toValue(v interface{}, t Valuetype) *property.Value { - vt, ok := property.ValueTypeFrom(string(t)) - if !ok { - return nil - } - return vt.ValueFromUnsafe(v) + return property.ValueType(t).ValueFrom(v) } diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index 8e87ae9a6..73d045400 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -14,7 +14,7 @@ import ( func TestToValue(t *testing.T) { v := property.ValueTypeBool var vv *property.Value = nil - assert.Equal(t, toValue(false, "bool"), v.ValueFromUnsafe(false)) + assert.Equal(t, toValue(false, "bool"), v.ValueFrom(false)) assert.Equal(t, toValue("xx", "xxx"), vv) } diff --git a/pkg/plugin/manifest/parser_test.go b/pkg/plugin/manifest/parser_test.go index e73971282..08d9661ed 100644 --- a/pkg/plugin/manifest/parser_test.go +++ b/pkg/plugin/manifest/parser_test.go @@ -38,10 +38,10 @@ var normalExpected = &Manifest{ Fields([]*property.SchemaField{ property.NewSchemaField().ID(id.PropertySchemaFieldID("a")). Type(property.ValueTypeBool). - DefaultValue(property.ValueTypeBool.MustBeValue(true)). + DefaultValue(property.ValueTypeBool.ValueFrom(true)). IsAvailableIf(&property.Condition{ Field: id.PropertySchemaFieldID("b"), - Value: property.ValueTypeNumber.MustBeValue(1), + Value: property.ValueTypeNumber.ValueFrom(1), }). MustBuild(), property.NewSchemaField().ID(id.PropertySchemaFieldID("b")). diff --git a/pkg/property/builder_test.go b/pkg/property/builder_test.go index 307331090..551710308 100644 --- a/pkg/property/builder_test.go +++ b/pkg/property/builder_test.go @@ -57,16 +57,14 @@ func TestBuilder_Items(t *testing.T) { Fields([]*Field{ NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(ValueTypeString). - ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). Build(), }).MustBuild(), NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID). Fields([]*Field{ NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(ValueTypeString). - ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). Build(), }).MustBuild(), }, @@ -74,8 +72,7 @@ func TestBuilder_Items(t *testing.T) { Fields([]*Field{ NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(ValueTypeString). - ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). Build(), }).MustBuild()}, }, @@ -127,8 +124,7 @@ func TestBuilder_Build(t *testing.T) { Fields([]*Field{ NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(ValueTypeString). - ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). Build(), }).MustBuild()}, Expected: struct { @@ -145,8 +141,7 @@ func TestBuilder_Build(t *testing.T) { Fields([]*Field{ NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(ValueTypeString). - ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). Build(), }).MustBuild()}, }, @@ -180,8 +175,7 @@ func TestBuilder_Build(t *testing.T) { Fields([]*Field{ NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(ValueTypeString). - ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). Build(), }).MustBuild()}, Err: ErrInvalidItem, diff --git a/pkg/property/condition_test.go b/pkg/property/condition_test.go index 236d827fa..74a7ea2fb 100644 --- a/pkg/property/condition_test.go +++ b/pkg/property/condition_test.go @@ -20,17 +20,11 @@ func TestCondition_Clone(t *testing.T) { Name: "nil condition", Con: &Condition{ Field: "a", - Value: &Value{ - v: true, - t: ValueTypeBool, - }, + Value: ValueTypeBool.ValueFrom(true), }, Expected: &Condition{ Field: "a", - Value: &Value{ - v: true, - t: ValueTypeBool, - }, + Value: ValueTypeBool.ValueFrom(true), }, }, } diff --git a/pkg/property/field.go b/pkg/property/field.go index 0ecd7fb3c..d38ffa139 100644 --- a/pkg/property/field.go +++ b/pkg/property/field.go @@ -17,17 +17,15 @@ var ( type Field struct { field id.PropertySchemaFieldID - ptype ValueType links *Links - value *Value + v *OptionalValue } func (p *Field) Clone() *Field { return &Field{ field: p.field, - ptype: p.ptype, - value: p.value.Clone(), links: p.links.Clone(), + v: p.v.Clone(), } } @@ -43,14 +41,17 @@ func (p *Field) Links() *Links { } func (p *Field) Type() ValueType { - return p.ptype + if p == nil { + return ValueTypeUnknown + } + return p.v.Type() } func (p *Field) Value() *Value { if p == nil { return nil } - return p.value + return p.v.Value() } func (p *Field) ActualValue(ds *dataset.Dataset) *Value { @@ -60,14 +61,13 @@ func (p *Field) ActualValue(ds *dataset.Dataset) *Value { ldsfid := l.DatasetSchemaField() if ldid != nil || ldsfid != nil || ds.ID() == *ldid { if f := ds.Field(*ldsfid); f != nil { - v1, _ := valueFromDataset(f.Value()) - return v1 + return valueFromDataset(f.Value()) } } } return nil } - return p.value + return p.Value() } func (p *Field) HasLinkedField() bool { @@ -95,26 +95,41 @@ func (p *Field) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { } func (p *Field) Update(value *Value, field *SchemaField) error { - if field == nil || p.field != field.ID() || !field.Validate(value) { + if p == nil { + return nil + } + if field == nil || p.field != field.ID() || !field.Validate(p.v) { return ErrInvalidPropertyValue } - p.value = value + p.v.SetValue(value) return nil } func (p *Field) UpdateUnsafe(value *Value) { - p.value = value + if p == nil { + return + } + p.v.SetValue(value) } func (p *Field) Link(links *Links) { + if p == nil { + return + } p.links = links.Clone() } func (p *Field) Unlink() { + if p == nil { + return + } p.links = nil } func (p *Field) UpdateField(field id.PropertySchemaFieldID) { + if p == nil { + return + } p.field = field } @@ -134,7 +149,7 @@ func (p *Field) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset invalid := schemaField == nil // if value is not compatible for type, value will be cleared - if !schemaField.Validate(p.Value()) { + if !schemaField.Validate(p.v) { p.UpdateUnsafe(nil) } @@ -144,7 +159,7 @@ func (p *Field) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset if dsid, dsfid := l.Last().Dataset(), l.Last().DatasetSchemaField(); dsid != nil && dsfid != nil { dss, _ := dl(ctx, *dsid) if dsf := dss[0].Field(*dsfid); dsf != nil { - if schemaField.Type() != valueTypeFromDataset(dsf.Type()) { + if schemaField.Type() != ValueType(dsf.Type()) { p.Unlink() } } @@ -179,14 +194,8 @@ func (p *Field) ValidateSchema(ps *SchemaField) error { if ps == nil { return errors.New("schema not found") } - if p.ptype != ps.Type() { - return errors.New("invalid field type") - } - if p.ptype != p.value.Type() { - return errors.New("invalid field value type") - } - if !p.ptype.ValidateValue(p.value) { - return errors.New("invalid field value") + if p.v == nil { + return errors.New("invalid field value and type") } return nil } diff --git a/pkg/property/field_builder.go b/pkg/property/field_builder.go index 3bd7ab99f..00bbbae54 100644 --- a/pkg/property/field_builder.go +++ b/pkg/property/field_builder.go @@ -2,18 +2,15 @@ package property import "github.com/reearth/reearth-backend/pkg/id" -// FieldBuilder _ type FieldBuilder struct { p *Field psf *SchemaField } -// FieldUnsafeBuilder _ type FieldUnsafeBuilder struct { p *Field } -// NewField _ func NewField(p *SchemaField) *FieldBuilder { b := &FieldBuilder{ p: &Field{}, @@ -21,18 +18,16 @@ func NewField(p *SchemaField) *FieldBuilder { return b.schemaField(p) } -// Build _ func (b *FieldBuilder) Build() (*Field, error) { if b.p.field == id.PropertySchemaFieldID("") { return nil, id.ErrInvalidID } - if b.psf != nil && !b.psf.Validate(b.p.value) { + if b.psf != nil && !b.psf.Validate(b.p.v) { return nil, ErrInvalidPropertyValue } return b.p, nil } -// MustBuild _ func (b *FieldBuilder) MustBuild() *Field { p, err := b.Build() if err != nil { @@ -45,69 +40,41 @@ func (b *FieldBuilder) schemaField(p *SchemaField) *FieldBuilder { if p != nil { b.psf = p b.p.field = p.ID() - b.p.ptype = p.Type() - if dv := p.DefaultValue(); dv != nil { - dv2 := *dv - b.p.value = &dv2 - } + b.p.v = NewOptionalValue(p.Type(), p.DefaultValue().Clone()) } return b } -// Value _ -func (b *FieldBuilder) Value(v *Value) *FieldBuilder { - if b.p.field == id.PropertySchemaFieldID("") { - return b - } - v2 := *v - b.p.value = &v2 +func (b *FieldBuilder) Value(v *OptionalValue) *FieldBuilder { + b.p.v = v.Clone() return b } -// Link _ func (b *FieldBuilder) Link(l *Links) *FieldBuilder { b.p.links = l.Clone() return b } -// NewFieldUnsafe _ func NewFieldUnsafe() *FieldUnsafeBuilder { return &FieldUnsafeBuilder{ p: &Field{}, } } -// Build _ func (b *FieldUnsafeBuilder) Build() *Field { return b.p } -// FieldUnsafe _ func (b *FieldUnsafeBuilder) FieldUnsafe(f id.PropertySchemaFieldID) *FieldUnsafeBuilder { b.p.field = f return b } -// TypeUnsafe _ -func (b *FieldUnsafeBuilder) TypeUnsafe(t ValueType) *FieldUnsafeBuilder { - b.p.ptype = t - return b -} - -// ValueUnsafe _ -func (b *FieldUnsafeBuilder) ValueUnsafe(v *Value) *FieldUnsafeBuilder { - if v == nil { - b.p.value = nil - return b - } - - v2 := *v - b.p.value = &v2 - b.p.ptype = v.Type() +func (b *FieldUnsafeBuilder) ValueUnsafe(v *OptionalValue) *FieldUnsafeBuilder { + b.p.v = v.Clone() return b } -// LinksUnsafe _ func (b *FieldUnsafeBuilder) LinksUnsafe(l *Links) *FieldUnsafeBuilder { b.p.links = l.Clone() return b diff --git a/pkg/property/field_builder_test.go b/pkg/property/field_builder_test.go index f8c850c3f..0322ddef8 100644 --- a/pkg/property/field_builder_test.go +++ b/pkg/property/field_builder_test.go @@ -10,8 +10,8 @@ import ( func TestFieldBuilder_Value(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") - b := NewField(p).Value(v).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + b := NewField(p).Value(OptionalValueFrom(v)).MustBuild() assert.Equal(t, v, b.Value()) } @@ -49,7 +49,7 @@ func TestFieldBuilder_Build(t *testing.T) { { Name: "fail invalid property type", SF: NewSchemaField().ID("A").Type(ValueTypeBool).MustBuild(), - Value: ValueTypeString.ValueFromUnsafe("vvv"), + Value: ValueTypeString.ValueFrom("vvv"), Expected: struct { PType ValueType Links *Links @@ -61,7 +61,7 @@ func TestFieldBuilder_Build(t *testing.T) { Name: "success", SF: NewSchemaField().ID("A").Type(ValueTypeString).MustBuild(), Links: NewLinks([]*Link{l}), - Value: ValueTypeString.ValueFromUnsafe("vvv"), + Value: ValueTypeString.ValueFrom("vvv"), Expected: struct { PType ValueType Links *Links @@ -69,7 +69,7 @@ func TestFieldBuilder_Build(t *testing.T) { }{ PType: ValueTypeString, Links: NewLinks([]*Link{l}), - Value: ValueTypeString.ValueFromUnsafe("vvv"), + Value: ValueTypeString.ValueFrom("vvv"), }, Err: nil, }, @@ -78,7 +78,7 @@ func TestFieldBuilder_Build(t *testing.T) { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - res, err := NewField(tc.SF).Value(tc.Value).Link(tc.Links).Build() + res, err := NewField(tc.SF).Value(OptionalValueFrom(tc.Value)).Link(tc.Links).Build() if err == nil { assert.Equal(tt, tc.Expected.Links, res.Links()) assert.Equal(tt, tc.Expected.PType, res.Type()) @@ -116,7 +116,7 @@ func TestFieldBuilder_MustBuild(t *testing.T) { { Name: "fail invalid property type", SF: NewSchemaField().ID("A").Type(ValueTypeBool).MustBuild(), - Value: ValueTypeString.ValueFromUnsafe("vvv"), + Value: ValueTypeString.ValueFrom("vvv"), Fails: true, Expected: struct { PType ValueType @@ -128,7 +128,7 @@ func TestFieldBuilder_MustBuild(t *testing.T) { Name: "success", SF: NewSchemaField().ID("A").Type(ValueTypeString).MustBuild(), Links: NewLinks([]*Link{l}), - Value: ValueTypeString.ValueFromUnsafe("vvv"), + Value: ValueTypeString.ValueFrom("vvv"), Expected: struct { PType ValueType Links *Links @@ -136,7 +136,7 @@ func TestFieldBuilder_MustBuild(t *testing.T) { }{ PType: ValueTypeString, Links: NewLinks([]*Link{l}), - Value: ValueTypeString.ValueFromUnsafe("vvv"), + Value: ValueTypeString.ValueFrom("vvv"), }, }, } @@ -151,9 +151,9 @@ func TestFieldBuilder_MustBuild(t *testing.T) { assert.Nil(tt, res) } }() - res = NewField(tc.SF).Value(tc.Value).Link(tc.Links).MustBuild() + res = NewField(tc.SF).Value(OptionalValueFrom(tc.Value)).Link(tc.Links).MustBuild() } else { - res = NewField(tc.SF).Value(tc.Value).Link(tc.Links).MustBuild() + res = NewField(tc.SF).Value(OptionalValueFrom(tc.Value)).Link(tc.Links).MustBuild() assert.Equal(tt, tc.Expected.Links, res.Links()) assert.Equal(tt, tc.Expected.PType, res.Type()) assert.Equal(tt, tc.Expected.Value, res.Value()) @@ -185,7 +185,7 @@ func TestFieldUnsafeBuilder_Build(t *testing.T) { { Name: "success", Links: NewLinks([]*Link{l}), - Value: ValueTypeString.ValueFromUnsafe("vvv"), + Value: ValueTypeString.ValueFrom("vvv"), Type: ValueTypeString, Field: "a", Expected: struct { @@ -197,7 +197,7 @@ func TestFieldUnsafeBuilder_Build(t *testing.T) { PType: ValueTypeString, Field: "a", Links: NewLinks([]*Link{l}), - Value: ValueTypeString.ValueFromUnsafe("vvv"), + Value: ValueTypeString.ValueFrom("vvv"), }, }, { @@ -223,7 +223,7 @@ func TestFieldUnsafeBuilder_Build(t *testing.T) { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - res := NewFieldUnsafe().ValueUnsafe(tc.Value).LinksUnsafe(tc.Links).TypeUnsafe(tc.Type).FieldUnsafe(tc.Field).Build() + res := NewFieldUnsafe().ValueUnsafe(NewOptionalValue(tc.Type, tc.Value)).LinksUnsafe(tc.Links).FieldUnsafe(tc.Field).Build() assert.Equal(tt, tc.Expected.Links, res.Links()) assert.Equal(tt, tc.Expected.PType, res.Type()) assert.Equal(tt, tc.Expected.Value, res.Value()) diff --git a/pkg/property/field_test.go b/pkg/property/field_test.go index 35d26480f..aade07fbc 100644 --- a/pkg/property/field_test.go +++ b/pkg/property/field_test.go @@ -24,19 +24,24 @@ func TestField_ActualValue(t *testing.T) { }{ { Name: "nil links", - Field: NewField(p).Value(ValueTypeString.ValueFromUnsafe("vvv")).MustBuild(), - Expected: ValueTypeString.ValueFromUnsafe("vvv"), + Field: NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).MustBuild(), + Expected: ValueTypeString.ValueFrom("vvv"), }, { Name: "nil last link", - Field: NewField(p).Value(ValueTypeString.ValueFromUnsafe("vvv")).Link(&Links{}).MustBuild(), + Field: NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Link(&Links{}).MustBuild(), Expected: nil, }, { - Name: "dataset value", - Field: NewField(p).Value(ValueTypeString.ValueFromUnsafe("vvv")).Link(ls).MustBuild(), - DS: dataset.New().ID(dsid).Schema(dssid).Fields([]*dataset.Field{dataset.NewField(dssfid, dataset.ValueFrom("xxx"), "")}).MustBuild(), - Expected: ValueTypeString.ValueFromUnsafe("xxx"), + Name: "dataset value", + Field: NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Link(ls).MustBuild(), + DS: dataset.New(). + ID(dsid).Schema(dssid). + Fields([]*dataset.Field{ + dataset.NewField(dssfid, dataset.ValueTypeString.ValueFrom("xxx"), "")}, + ). + MustBuild(), + Expected: ValueTypeString.ValueFrom("xxx"), }, } @@ -65,7 +70,7 @@ func TestField_CollectDatasets(t *testing.T) { }{ { Name: "list of one datasets", - Field: NewField(p).Value(ValueTypeString.ValueFromUnsafe("vvv")).Link(ls).MustBuild(), + Field: NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Link(ls).MustBuild(), Expected: []id.DatasetID{dsid}, }, { @@ -88,7 +93,7 @@ func TestField_Clone(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) ls := NewLinks([]*Link{l}) - b := NewField(p).Value(ValueTypeString.ValueFromUnsafe("vvv")).Link(ls).MustBuild() + b := NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Link(ls).MustBuild() r := b.Clone() assert.Equal(t, b, r) } @@ -109,8 +114,8 @@ func TestField(t *testing.T) { func TestField_Update(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() - b := NewField(p).Value(ValueTypeString.ValueFromUnsafe("vvv")).MustBuild() - v := ValueTypeString.ValueFromUnsafe("xxx") + b := NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).MustBuild() + v := ValueTypeString.ValueFrom("xxx") b.UpdateUnsafe(v) assert.Equal(t, v, b.Value()) } diff --git a/pkg/property/group_builder_test.go b/pkg/property/group_builder_test.go index 8eb75aaa2..504a37549 100644 --- a/pkg/property/group_builder_test.go +++ b/pkg/property/group_builder_test.go @@ -12,8 +12,8 @@ func TestGroupBuilder_Build(t *testing.T) { iid := id.NewPropertyItemID() sid := id.MustPropertySchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") - f := NewField(sf).Value(v).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() testCases := []struct { Name string Id id.PropertyItemID @@ -74,8 +74,8 @@ func TestGroupBuilder_MustBuild(t *testing.T) { iid := id.NewPropertyItemID() sid := id.MustPropertySchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") - f := NewField(sf).Value(v).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() testCases := []struct { Name string Fail bool diff --git a/pkg/property/group_list_test.go b/pkg/property/group_list_test.go index a7ee4c37e..39329d43d 100644 --- a/pkg/property/group_list_test.go +++ b/pkg/property/group_list_test.go @@ -44,10 +44,10 @@ func TestGroupList_SchemaRef(t *testing.T) { func TestGroupList_HasLinkedField(t *testing.T) { pid := id.NewPropertyItemID() sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") + v := ValueTypeString.ValueFrom("vvv") dsid := id.NewDatasetID() dssid := id.NewDatasetSchemaID() - f := NewField(sf).Value(v).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() + f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} groups2 := []*Group{NewGroup().ID(pid).MustBuild()} testCases := []struct { @@ -82,10 +82,10 @@ func TestGroupList_HasLinkedField(t *testing.T) { func TestGroupList_CollectDatasets(t *testing.T) { pid := id.NewPropertyItemID() sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") + v := ValueTypeString.ValueFrom("vvv") dsid := id.NewDatasetID() dssid := id.NewDatasetSchemaID() - f := NewField(sf).Value(v).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() + f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} groups2 := []*Group{NewGroup().ID(pid).MustBuild()} testCases := []struct { @@ -119,10 +119,10 @@ func TestGroupList_CollectDatasets(t *testing.T) { func TestGroupList_FieldsByLinkedDataset(t *testing.T) { pid := id.NewPropertyItemID() sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") + v := ValueTypeString.ValueFrom("vvv") dsid := id.NewDatasetID() dssid := id.NewDatasetSchemaID() - f := NewField(sf).Value(v).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() + f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} groups2 := []*Group{NewGroup().ID(pid).MustBuild()} testCases := []struct { @@ -156,10 +156,10 @@ func TestGroupList_FieldsByLinkedDataset(t *testing.T) { func TestGroupList_IsEmpty(t *testing.T) { pid := id.NewPropertyItemID() sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") + v := ValueTypeString.ValueFrom("vvv") dsid := id.NewDatasetID() dssid := id.NewDatasetSchemaID() - f := NewField(sf).Value(v).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() + f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} testCases := []struct { Name string @@ -191,8 +191,8 @@ func TestGroupList_IsEmpty(t *testing.T) { func TestGroupList_Prune(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") - f := NewField(sf).Value(v).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := NewField(sf).MustBuild() pid := id.NewPropertyItemID() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f, f2}).MustBuild()} diff --git a/pkg/property/group_test.go b/pkg/property/group_test.go index 053507d10..1321a14c5 100644 --- a/pkg/property/group_test.go +++ b/pkg/property/group_test.go @@ -28,11 +28,11 @@ func TestGroup_SchemaGroup(t *testing.T) { func TestGroup_HasLinkedField(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") + v := ValueTypeString.ValueFrom("vvv") l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) ls := NewLinks([]*Link{l}) - f := NewField(sf).Value(v).Link(ls).MustBuild() - f2 := NewField(sf).Value(v).MustBuild() + f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() + f2 := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() testCases := []struct { Name string @@ -66,13 +66,13 @@ func TestGroup_HasLinkedField(t *testing.T) { } func TestGroup_IsDatasetLinked(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") + v := ValueTypeString.ValueFrom("vvv") dsid := id.NewDatasetID() dssid := id.NewDatasetSchemaID() l := NewLink(dsid, dssid, id.NewDatasetSchemaFieldID()) ls := NewLinks([]*Link{l}) - f := NewField(sf).Value(v).Link(ls).MustBuild() - f2 := NewField(sf).Value(v).MustBuild() + f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() + f2 := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() testCases := []struct { Name string @@ -109,11 +109,11 @@ func TestGroup_IsDatasetLinked(t *testing.T) { func TestGroup_CollectDatasets(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") + v := ValueTypeString.ValueFrom("vvv") dsid := id.NewDatasetID() l := NewLink(dsid, id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) ls := NewLinks([]*Link{l}) - f := NewField(sf).Value(v).Link(ls).MustBuild() + f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() testCases := []struct { Name string @@ -143,12 +143,12 @@ func TestGroup_CollectDatasets(t *testing.T) { func TestGroup_FieldsByLinkedDataset(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") + v := ValueTypeString.ValueFrom("vvv") dsid := id.NewDatasetID() dssid := id.NewDatasetSchemaID() l := NewLink(dsid, dssid, id.NewDatasetSchemaFieldID()) ls := NewLinks([]*Link{l}) - f := NewField(sf).Value(v).Link(ls).MustBuild() + f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() testCases := []struct { Name string @@ -180,8 +180,8 @@ func TestGroup_FieldsByLinkedDataset(t *testing.T) { func TestGroup_IsEmpty(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") - f := NewField(sf).Value(v).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := NewField(sf).MustBuild() testCases := []struct { @@ -213,8 +213,8 @@ func TestGroup_IsEmpty(t *testing.T) { func TestGroup_Prune(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") - f := NewField(sf).Value(v).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := NewField(sf).MustBuild() testCases := []struct { @@ -309,8 +309,8 @@ func TestGroup_GetOrCreateField(t *testing.T) { func TestGroup_RemoveField(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() sf2 := NewSchemaField().ID("b").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") - f := NewField(sf).Value(v).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := NewField(sf2).MustBuild() testCases := []struct { @@ -343,8 +343,8 @@ func TestGroup_RemoveField(t *testing.T) { func TestGroup_FieldIDs(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() sf2 := NewSchemaField().ID("b").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") - f := NewField(sf).Value(v).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := NewField(sf2).MustBuild() testCases := []struct { @@ -375,8 +375,8 @@ func TestGroup_FieldIDs(t *testing.T) { func TestGroup_Field(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() sf2 := NewSchemaField().ID("b").Type(ValueTypeString).MustBuild() - v := ValueTypeString.ValueFromUnsafe("vvv") - f := NewField(sf).Value(v).MustBuild() + v := ValueTypeString.ValueFrom("vvv") + f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := NewField(sf2).MustBuild() testCases := []struct { @@ -442,15 +442,15 @@ func TestGroup_UpdateNameFieldValue(t *testing.T) { Name: "update value", Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), - Value: ValueTypeString.ValueFromUnsafe("abc"), + Value: ValueTypeString.ValueFrom("abc"), FID: "aa", - Expected: NewField(sf).Value(ValueTypeString.ValueFromUnsafe("abc")).MustBuild(), + Expected: NewField(sf).Value(OptionalValueFrom(ValueTypeString.ValueFrom("abc"))).MustBuild(), }, { Name: "invalid property field", Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/bb")).Groups([]*SchemaGroup{sg2}).MustBuild(), - Value: ValueTypeString.ValueFromUnsafe("abc"), + Value: ValueTypeString.ValueFrom("abc"), FID: "aa", Expected: nil, Err: ErrInvalidPropertyField, diff --git a/pkg/property/initializer.go b/pkg/property/initializer.go index 245cc958e..aff489fe6 100644 --- a/pkg/property/initializer.go +++ b/pkg/property/initializer.go @@ -276,7 +276,7 @@ func (p *InitializerField) PropertyField() *Field { plinks = NewLinks(links) } - return NewFieldUnsafe().LinksUnsafe(plinks).FieldUnsafe(p.Field).TypeUnsafe(p.Type).ValueUnsafe(p.Value.Clone()).Build() + return NewFieldUnsafe().LinksUnsafe(plinks).FieldUnsafe(p.Field).ValueUnsafe(NewOptionalValue(p.Type, p.Value.Clone())).Build() } type InitializerLink struct { diff --git a/pkg/property/initializer_test.go b/pkg/property/initializer_test.go index a965137c7..ebd6fe1e2 100644 --- a/pkg/property/initializer_test.go +++ b/pkg/property/initializer_test.go @@ -94,7 +94,7 @@ func TestInitializerItem_Clone(t *testing.T) { Fields: []*InitializerField{{ Field: id.PropertySchemaFieldID("name"), Type: ValueTypeString, - Value: ValueTypeString.ValueFromUnsafe("aaa"), + Value: ValueTypeString.ValueFrom("aaa"), Links: []*InitializerLink{{ Dataset: id.NewDatasetID().Ref(), Schema: id.NewDatasetSchemaID(), @@ -141,12 +141,12 @@ func TestInitializerItem_PropertyGroup(t *testing.T) { Fields: []*InitializerField{{ Field: id.PropertySchemaFieldID("name"), Type: ValueTypeString, - Value: ValueTypeString.ValueFromUnsafe("aaa"), + Value: ValueTypeString.ValueFrom("aaa"), }}, } expected := NewItem().ID(*item.ID).Schema(parent, item.SchemaItem).Group().Fields([]*Field{ - NewFieldUnsafe().FieldUnsafe(item.Fields[0].Field).TypeUnsafe(item.Fields[0].Type).ValueUnsafe(item.Fields[0].Value).Build(), + NewFieldUnsafe().FieldUnsafe(item.Fields[0].Field).ValueUnsafe(NewOptionalValue(item.Fields[0].Type, item.Fields[0].Value)).Build(), }).MustBuild() assert.Equal(t, expected, item.PropertyGroup(parent)) @@ -183,7 +183,7 @@ func TestInitializerGroup_Clone(t *testing.T) { Fields: []*InitializerField{{ Field: id.PropertySchemaFieldID("name"), Type: ValueTypeString, - Value: ValueTypeString.ValueFromUnsafe("aaa"), + Value: ValueTypeString.ValueFrom("aaa"), Links: []*InitializerLink{{ Dataset: id.NewDatasetID().Ref(), Schema: id.NewDatasetSchemaID(), @@ -208,12 +208,12 @@ func TestInitializerGroup_PropertyGroup(t *testing.T) { Fields: []*InitializerField{{ Field: id.PropertySchemaFieldID("name"), Type: ValueTypeString, - Value: ValueTypeString.ValueFromUnsafe("aaa"), + Value: ValueTypeString.ValueFrom("aaa"), }}, } expected := NewItem().ID(*item.ID).Schema(parent, parentItem).Group().Fields([]*Field{ - NewFieldUnsafe().FieldUnsafe(item.Fields[0].Field).TypeUnsafe(item.Fields[0].Type).ValueUnsafe(item.Fields[0].Value).Build(), + NewFieldUnsafe().FieldUnsafe(item.Fields[0].Field).ValueUnsafe(NewOptionalValue(item.Fields[0].Type, item.Fields[0].Value)).Build(), }).MustBuild() p, err := item.PropertyGroup(parent, parentItem) @@ -231,7 +231,7 @@ func TestInitializerField_Clone(t *testing.T) { field := &InitializerField{ Field: id.PropertySchemaFieldID("name"), Type: ValueTypeString, - Value: ValueTypeString.ValueFromUnsafe("aaa"), + Value: ValueTypeString.ValueFrom("aaa"), Links: []*InitializerLink{{ Dataset: id.NewDatasetID().Ref(), Schema: id.NewDatasetSchemaID(), @@ -249,7 +249,7 @@ func TestInitializerField_PropertyField(t *testing.T) { field := &InitializerField{ Field: id.PropertySchemaFieldID("name"), Type: ValueTypeString, - Value: ValueTypeString.ValueFromUnsafe("aaa"), + Value: ValueTypeString.ValueFrom("aaa"), Links: []*InitializerLink{{ Dataset: id.NewDatasetID().Ref(), Schema: id.NewDatasetSchemaID(), @@ -259,8 +259,7 @@ func TestInitializerField_PropertyField(t *testing.T) { expected := NewFieldUnsafe(). FieldUnsafe(field.Field). - TypeUnsafe(field.Type). - ValueUnsafe(field.Value). + ValueUnsafe(NewOptionalValue(field.Type, field.Value)). LinksUnsafe(NewLinks([]*Link{NewLink(*field.Links[0].Dataset.CopyRef(), field.Links[0].Schema, field.Links[0].Field)})). Build() diff --git a/pkg/property/item_test.go b/pkg/property/item_test.go index 77b197f13..8ff6eff35 100644 --- a/pkg/property/item_test.go +++ b/pkg/property/item_test.go @@ -59,8 +59,7 @@ func TestToGroup(t *testing.T) { Fields([]*Field{ NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(ValueTypeString). - ValueUnsafe(ValueTypeString.ValueFromUnsafe("xxx")). + ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). Build(), }).MustBuild(), } diff --git a/pkg/property/link_test.go b/pkg/property/link_test.go index 2e1927404..706b85658 100644 --- a/pkg/property/link_test.go +++ b/pkg/property/link_test.go @@ -137,7 +137,7 @@ func TestLinks_Validate(t *testing.T) { }, DM: dataset.Map{ did1: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{ - dataset.NewField(dfid1, dataset.ValueFrom("vvv"), ""), + dataset.NewField(dfid1, dataset.ValueTypeString.ValueFrom("vvv"), ""), }).MustBuild(), }, Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), @@ -416,7 +416,7 @@ func TestLink_Validate(t *testing.T) { { Name: "input schema id != link schema", DS: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{ - dataset.NewField(dfid1, dataset.ValueFrom("vvv"), "")}).MustBuild(), + dataset.NewField(dfid1, dataset.ValueTypeString.ValueFrom("vvv"), "")}).MustBuild(), DSS: dataset.NewSchema().NewID().Fields([]*dataset.SchemaField{ dataset.NewSchemaField(). ID(dfid1). @@ -452,7 +452,7 @@ func TestLink_Validate(t *testing.T) { { Name: "valid", DS: dataset.New().ID(did1).Schema(dsid1).Fields([]*dataset.Field{ - dataset.NewField(dfid1, dataset.ValueFrom("vvv"), "")}).MustBuild(), + dataset.NewField(dfid1, dataset.ValueTypeString.ValueFrom("vvv"), "")}).MustBuild(), DSS: dataset.NewSchema().ID(dsid1).Fields([]*dataset.SchemaField{ dataset.NewSchemaField(). ID(dfid1). diff --git a/pkg/property/merged_test.go b/pkg/property/merged_test.go index 4928117f1..e21e96064 100644 --- a/pkg/property/merged_test.go +++ b/pkg/property/merged_test.go @@ -31,17 +31,17 @@ func TestMerge(t *testing.T) { i8id := id.NewPropertyItemID() fields1 := []*Field{ - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("a")).ValueUnsafe(ValueTypeString.ValueFromUnsafe("a")).Build(), - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("b")).ValueUnsafe(ValueTypeString.ValueFromUnsafe("b")).Build(), - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("e")).TypeUnsafe(ValueTypeString).LinksUnsafe(NewLinks([]*Link{NewLink(d2, ds, df)})).Build(), - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("f")).TypeUnsafe(ValueTypeNumber).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("a")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("a"))).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("b")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("b"))).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("e")).ValueUnsafe(NewOptionalValue(ValueTypeString, nil)).LinksUnsafe(NewLinks([]*Link{NewLink(d2, ds, df)})).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("f")).ValueUnsafe(NewOptionalValue(ValueTypeNumber, nil)).Build(), } fields2 := []*Field{ - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("a")).ValueUnsafe(ValueTypeString.ValueFromUnsafe("1")).Build(), - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("c")).ValueUnsafe(ValueTypeString.ValueFromUnsafe("2")).Build(), - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("d")).TypeUnsafe(ValueTypeString).LinksUnsafe(NewLinks([]*Link{NewLinkFieldOnly(ds, df)})).Build(), - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("f")).TypeUnsafe(ValueTypeString).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("a")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("1"))).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("c")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("2"))).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("d")).ValueUnsafe(NewOptionalValue(ValueTypeString, nil)).LinksUnsafe(NewLinks([]*Link{NewLinkFieldOnly(ds, df)})).Build(), + NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("f")).ValueUnsafe(NewOptionalValue(ValueTypeString, nil)).Build(), } groups1 := []*Group{ @@ -90,12 +90,12 @@ func TestMerge(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("a"), + Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("b"), - Value: ValueTypeString.ValueFromUnsafe("b"), + Value: ValueTypeString.ValueFrom("b"), Type: ValueTypeString, }, { @@ -119,13 +119,13 @@ func TestMerge(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("a"), + Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, Overridden: true, }, { ID: id.PropertySchemaFieldID("b"), - Value: ValueTypeString.ValueFromUnsafe("b"), + Value: ValueTypeString.ValueFrom("b"), Type: ValueTypeString, }, { @@ -135,7 +135,7 @@ func TestMerge(t *testing.T) { }, { ID: id.PropertySchemaFieldID("c"), - Value: ValueTypeString.ValueFromUnsafe("2"), + Value: ValueTypeString.ValueFrom("2"), Type: ValueTypeString, }, { @@ -153,12 +153,12 @@ func TestMerge(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("a"), + Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("b"), - Value: ValueTypeString.ValueFromUnsafe("b"), + Value: ValueTypeString.ValueFrom("b"), Type: ValueTypeString, }, { @@ -180,12 +180,12 @@ func TestMerge(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("1"), + Value: ValueTypeString.ValueFrom("1"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("c"), - Value: ValueTypeString.ValueFromUnsafe("2"), + Value: ValueTypeString.ValueFrom("2"), Type: ValueTypeString, }, { @@ -223,12 +223,12 @@ func TestMerge(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("a"), + Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("b"), - Value: ValueTypeString.ValueFromUnsafe("b"), + Value: ValueTypeString.ValueFrom("b"), Type: ValueTypeString, }, { @@ -252,12 +252,12 @@ func TestMerge(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("a"), + Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("b"), - Value: ValueTypeString.ValueFromUnsafe("b"), + Value: ValueTypeString.ValueFrom("b"), Type: ValueTypeString, }, { @@ -279,12 +279,12 @@ func TestMerge(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("a"), + Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("b"), - Value: ValueTypeString.ValueFromUnsafe("b"), + Value: ValueTypeString.ValueFrom("b"), Type: ValueTypeString, }, { @@ -322,12 +322,12 @@ func TestMerge(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("1"), + Value: ValueTypeString.ValueFrom("1"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("c"), - Value: ValueTypeString.ValueFromUnsafe("2"), + Value: ValueTypeString.ValueFrom("2"), Type: ValueTypeString, }, { @@ -351,12 +351,12 @@ func TestMerge(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("1"), + Value: ValueTypeString.ValueFrom("1"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("c"), - Value: ValueTypeString.ValueFromUnsafe("2"), + Value: ValueTypeString.ValueFrom("2"), Type: ValueTypeString, }, { @@ -378,12 +378,12 @@ func TestMerge(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("1"), + Value: ValueTypeString.ValueFrom("1"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("c"), - Value: ValueTypeString.ValueFromUnsafe("2"), + Value: ValueTypeString.ValueFrom("2"), Type: ValueTypeString, }, { diff --git a/pkg/property/property_test.go b/pkg/property/property_test.go index 56ee60c15..153a1cdc6 100644 --- a/pkg/property/property_test.go +++ b/pkg/property/property_test.go @@ -53,19 +53,19 @@ func TestPropertyMigrateSchema(t *testing.T) { fields := []*Field{ // should remain NewFieldUnsafe().FieldUnsafe(schemaField1ID). - ValueUnsafe(ValueTypeString.ValueFromUnsafe("foobar")). + ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("foobar"))). Build(), // should be removed because of max NewFieldUnsafe().FieldUnsafe(schemaField2ID). - ValueUnsafe(ValueTypeNumber.ValueFromUnsafe(101)). + ValueUnsafe(OptionalValueFrom(ValueTypeNumber.ValueFrom(101))). Build(), // should remain NewFieldUnsafe().FieldUnsafe(schemaField3ID). - ValueUnsafe(ValueTypeNumber.ValueFromUnsafe(1)). + ValueUnsafe(OptionalValueFrom(ValueTypeNumber.ValueFrom(1))). Build(), // should be removed because of choices NewFieldUnsafe().FieldUnsafe(schemaField4ID). - ValueUnsafe(ValueTypeString.ValueFromUnsafe("z")). + ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("z"))). Build(), // should remain NewFieldUnsafe().FieldUnsafe(schemaField5ID). @@ -81,11 +81,11 @@ func TestPropertyMigrateSchema(t *testing.T) { Build(), // should be removed because of type NewFieldUnsafe().FieldUnsafe(schemaField7ID). - ValueUnsafe(ValueTypeString.ValueFromUnsafe("hogehoge")). + ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("hogehoge"))). Build(), // should be removed because of not existing field NewFieldUnsafe().FieldUnsafe(schemaField8ID). - ValueUnsafe(ValueTypeString.ValueFromUnsafe("hogehoge")). + ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("hogehoge"))). Build(), } items := []Item{ diff --git a/pkg/property/schema_field.go b/pkg/property/schema_field.go index 2b470fd7e..402be2b0f 100644 --- a/pkg/property/schema_field.go +++ b/pkg/property/schema_field.go @@ -118,17 +118,11 @@ func (p *SchemaField) IsAvailableIf() *Condition { return p.cond.Clone() } -func (p *SchemaField) Validate(value *Value) bool { - if p == nil { - return false - } - if value == nil { - return true - } - if p.propertyType != value.Type() { +func (p *SchemaField) Validate(value *OptionalValue) bool { + if p == nil || value == nil || p.propertyType != value.Type() { return false } - switch v := value.Value().(type) { + switch v := value.Value().Value().(type) { case float64: if min := p.Min(); min != nil { if v < *min { diff --git a/pkg/property/schema_field_builder.go b/pkg/property/schema_field_builder.go index 4fcf1f9c1..268b15080 100644 --- a/pkg/property/schema_field_builder.go +++ b/pkg/property/schema_field_builder.go @@ -25,7 +25,7 @@ func (b *SchemaFieldBuilder) Build() (*SchemaField, error) { if b.p.min != nil && b.p.max != nil && *b.p.min > *b.p.max { return nil, errors.New("invalid min and max") } - if _, ok := b.p.propertyType.Validate(); !ok { + if ok := b.p.propertyType.Valid(); !ok { return nil, errors.New("invalid value type") } return b.p, nil diff --git a/pkg/property/schema_field_test.go b/pkg/property/schema_field_test.go index dce9058d6..e339c3e6a 100644 --- a/pkg/property/schema_field_test.go +++ b/pkg/property/schema_field_test.go @@ -117,33 +117,46 @@ func TestSchemaField_Validate(t *testing.T) { testCases := []struct { Name string SF *SchemaField - Input *Value + Input *OptionalValue Expected bool }{ { Name: "nil sf", }, + { + Name: "nil optional value", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).MustBuild(), + Input: nil, + Expected: false, + }, { Name: "nil value", SF: NewSchemaField().ID("A").Type(ValueTypeNumber).MustBuild(), + Input: NewOptionalValue(ValueTypeNumber, nil), Expected: true, }, { Name: "property type != value type", SF: NewSchemaField().ID("A").Type(ValueTypeNumber).MustBuild(), - Input: ValueTypeBool.ValueFromUnsafe(true), + Input: OptionalValueFrom(ValueTypeBool.ValueFrom(true)), + Expected: false, + }, + { + Name: "property type != value type with nil value", + SF: NewSchemaField().ID("A").Type(ValueTypeNumber).MustBuild(), + Input: NewOptionalValue(ValueTypeBool, nil), Expected: false, }, { Name: "validate min", SF: NewSchemaField().ID("A").Type(ValueTypeNumber).Min(10).MustBuild(), - Input: ValueTypeNumber.ValueFromUnsafe(9), + Input: OptionalValueFrom(ValueTypeNumber.ValueFrom(9)), Expected: false, }, { Name: "validate max", SF: NewSchemaField().ID("A").Type(ValueTypeNumber).Max(10).MustBuild(), - Input: ValueTypeNumber.ValueFromUnsafe(11), + Input: OptionalValueFrom(ValueTypeNumber.ValueFrom(11)), Expected: false, }, { @@ -160,7 +173,7 @@ func TestSchemaField_Validate(t *testing.T) { Icon: "", }, }).MustBuild(), - Input: ValueTypeString.ValueFromUnsafe("xxx"), + Input: OptionalValueFrom(ValueTypeString.ValueFrom("xxx")), Expected: true, }, { @@ -177,13 +190,13 @@ func TestSchemaField_Validate(t *testing.T) { Icon: "", }, }).MustBuild(), - Input: ValueTypeString.ValueFromUnsafe("aaa"), + Input: OptionalValueFrom(ValueTypeString.ValueFrom("aaa")), Expected: false, }, { Name: "validate other", SF: NewSchemaField().ID("A").Type(ValueTypeLatLng).MustBuild(), - Input: ValueTypeLatLng.ValueFromUnsafe(LatLng{Lat: 10, Lng: 11}), + Input: OptionalValueFrom(ValueTypeLatLng.ValueFrom(LatLng{Lat: 10, Lng: 11})), Expected: true, }, } diff --git a/pkg/property/schema_group_builder_test.go b/pkg/property/schema_group_builder_test.go index 31876c493..6b88a95b5 100644 --- a/pkg/property/schema_group_builder_test.go +++ b/pkg/property/schema_group_builder_test.go @@ -45,7 +45,7 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { List: true, IsAvailableIf: &Condition{ Field: "ff", - Value: ValueTypeString.ValueFromUnsafe("abc"), + Value: ValueTypeString.ValueFrom("abc"), }, Title: i18n.StringFrom("tt"), Expected: expected{ @@ -55,7 +55,7 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { List: true, IsAvailableIf: &Condition{ Field: "ff", - Value: ValueTypeString.ValueFromUnsafe("abc"), + Value: ValueTypeString.ValueFrom("abc"), }, Title: i18n.StringFrom("tt"), }, @@ -68,7 +68,7 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { List: true, IsAvailableIf: &Condition{ Field: "ff", - Value: ValueTypeString.ValueFromUnsafe("abc"), + Value: ValueTypeString.ValueFrom("abc"), }, Title: i18n.StringFrom("tt"), Expected: expected{ @@ -78,7 +78,7 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { List: true, IsAvailableIf: &Condition{ Field: "ff", - Value: ValueTypeString.ValueFromUnsafe("abc"), + Value: ValueTypeString.ValueFrom("abc"), }, Title: i18n.StringFrom("tt"), }, diff --git a/pkg/property/sealed.go b/pkg/property/sealed.go index fa89cb4bb..2279ad0cb 100644 --- a/pkg/property/sealed.go +++ b/pkg/property/sealed.go @@ -25,10 +25,15 @@ type SealedItem struct { } type SealedField struct { - ID id.PropertySchemaFieldID - Type ValueType - DatasetValue *dataset.Value - PropertyValue *Value + ID id.PropertySchemaFieldID + Val *ValueAndDatasetValue +} + +func (f *SealedField) Value() *Value { + if f == nil { + return nil + } + return f.Val.Value() } func Seal(ctx context.Context, p *Merged, d dataset.GraphLoader) (*Sealed, error) { @@ -102,12 +107,13 @@ func sealedGroup(ctx context.Context, fields []*MergedField, d dataset.GraphLoad if err != nil { return nil, err } - res = append(res, &SealedField{ - ID: f.ID, - Type: f.Type, - PropertyValue: f.Value.Clone(), - DatasetValue: dv.Clone(), - }) + + if val := NewValueAndDatasetValue(f.Type, dv.Clone(), f.Value.Clone()); val != nil { + res = append(res, &SealedField{ + ID: f.ID, + Val: val, + }) + } } return res, nil } @@ -152,13 +158,7 @@ func sealedFieldsInterface(fields []*SealedField) map[string]interface{} { item := map[string]interface{}{} for _, f := range fields { - var v interface{} - if f.DatasetValue != nil { - v = f.DatasetValue.Interface() - } else { - v = f.PropertyValue.Interface() - } - item[f.ID.String()] = v + item[f.ID.String()] = f.Val.Value().Interface() } return item diff --git a/pkg/property/sealed_test.go b/pkg/property/sealed_test.go index eb0a2a143..209856c86 100644 --- a/pkg/property/sealed_test.go +++ b/pkg/property/sealed_test.go @@ -58,12 +58,12 @@ func TestSeal(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("a"), + Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("b"), - Value: ValueTypeString.ValueFromUnsafe("b"), + Value: ValueTypeString.ValueFrom("b"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, }, @@ -79,12 +79,12 @@ func TestSeal(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("aaa"), + Value: ValueTypeString.ValueFrom("aaa"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("b"), - Value: ValueTypeString.ValueFromUnsafe("aaa"), + Value: ValueTypeString.ValueFrom("aaa"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, }, @@ -115,15 +115,20 @@ func TestSeal(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("b"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), }, }, }, @@ -136,15 +141,20 @@ func TestSeal(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), }, }, }, @@ -227,12 +237,12 @@ func TestSealedItemFrom(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("a"), + Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("b"), - Value: ValueTypeString.ValueFromUnsafe("b"), + Value: ValueTypeString.ValueFrom("b"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, }, @@ -257,15 +267,20 @@ func TestSealedItemFrom(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("b"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), }, }, }, @@ -289,12 +304,12 @@ func TestSealedItemFrom(t *testing.T) { Fields: []*MergedField{ { ID: id.PropertySchemaFieldID("a"), - Value: ValueTypeString.ValueFromUnsafe("aaa"), + Value: ValueTypeString.ValueFrom("aaa"), Type: ValueTypeString, }, { ID: id.PropertySchemaFieldID("b"), - Value: ValueTypeString.ValueFromUnsafe("aaa"), + Value: ValueTypeString.ValueFrom("aaa"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, }, @@ -320,15 +335,20 @@ func TestSealedItemFrom(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), }, }, }, @@ -380,15 +400,20 @@ func TestSealed_Interface(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("b"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), }, }, }, @@ -401,15 +426,20 @@ func TestSealed_Interface(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), }, }, }, @@ -465,15 +495,20 @@ func TestSealedItem_Match(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("b"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), }, }, }, @@ -525,15 +560,20 @@ func TestSealed_ItemBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("b"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), }, }, }, @@ -546,15 +586,20 @@ func TestSealed_ItemBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("b"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), }, }, }, @@ -573,15 +618,20 @@ func TestSealed_ItemBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("b"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), }, }, }, @@ -608,15 +658,20 @@ func TestSealed_ItemBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("b"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), }, }, }, @@ -629,15 +684,20 @@ func TestSealed_ItemBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), }, }, }, @@ -656,15 +716,20 @@ func TestSealed_ItemBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("b"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), }, }, }, @@ -691,15 +756,20 @@ func TestSealed_ItemBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("b"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), }, }, }, @@ -712,15 +782,20 @@ func TestSealed_ItemBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), }, }, }, @@ -772,15 +847,20 @@ func TestSealed_FieldBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("b"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), }, }, }, @@ -793,15 +873,20 @@ func TestSealed_FieldBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), }, }, }, @@ -809,9 +894,12 @@ func TestSealed_FieldBy(t *testing.T) { }, Input: NewPointer(psiid1.Ref(), i1id.Ref(), id.PropertySchemaFieldID("a").Ref()), Expected: &SealedField{ - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), }, }, { @@ -834,15 +922,20 @@ func TestSealed_FieldBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("a"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("b"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("b"), + ValueTypeString.ValueFrom("bbb"), + ), }, }, }, @@ -855,15 +948,20 @@ func TestSealed_FieldBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), }, }, }, @@ -871,9 +969,12 @@ func TestSealed_FieldBy(t *testing.T) { }, Input: NewPointer(nil, i3id.Ref(), id.PropertySchemaFieldID("a").Ref()), Expected: &SealedField{ - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), }, }, { @@ -896,15 +997,20 @@ func TestSealed_FieldBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("a"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("b"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("b"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("b"), + ), }, }, }, @@ -917,15 +1023,20 @@ func TestSealed_FieldBy(t *testing.T) { LinkedDataset: &d, Fields: []*SealedField{ { - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), }, { - ID: "b", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), - DatasetValue: dataset.ValueTypeString.ValueFrom("bbb"), + ID: "b", + Val: NewValueAndDatasetValue( + ValueTypeString, + dataset.ValueTypeString.ValueFrom("bbb"), + ValueTypeString.ValueFrom("aaa"), + ), }, }, }, @@ -933,9 +1044,12 @@ func TestSealed_FieldBy(t *testing.T) { }, Input: NewPointer(nil, nil, id.PropertySchemaFieldID("a").Ref()), Expected: &SealedField{ - ID: "a", - Type: ValueTypeString, - PropertyValue: ValueTypeString.ValueFromUnsafe("aaa"), + ID: "a", + Val: NewValueAndDatasetValue( + ValueTypeString, + nil, + ValueTypeString.ValueFrom("aaa"), + ), }, }, } diff --git a/pkg/property/value.go b/pkg/property/value.go index e22c1d3c4..35404a98d 100644 --- a/pkg/property/value.go +++ b/pkg/property/value.go @@ -1,240 +1,216 @@ package property -type ValueInner interface { - Value() *Value +import ( + "net/url" + "strconv" + + "github.com/reearth/reearth-backend/pkg/value" +) + +type LatLng = value.LatLng +type LatLngHeight = value.LatLngHeight +type Coordinates = value.Coordinates +type Rect = value.Rect +type Polygon = value.Polygon + +type ValueType value.Type + +var ( + ValueTypeUnknown = ValueType(value.TypeUnknown) + ValueTypeBool = ValueType(value.TypeBool) + ValueTypeNumber = ValueType(value.TypeNumber) + ValueTypeString = ValueType(value.TypeString) + ValueTypeRef = ValueType(value.TypeRef) + ValueTypeURL = ValueType(value.TypeURL) + ValueTypeLatLng = ValueType(value.TypeLatLng) + ValueTypeLatLngHeight = ValueType(value.TypeLatLngHeight) + ValueTypeCoordinates = ValueType(value.TypeCoordinates) + ValueTypePolygon = ValueType(value.TypePolygon) + ValueTypeRect = ValueType(value.TypeRect) +) + +var types = value.TypePropertyMap{ + value.Type(ValueTypeTypography): &typePropertyTypography{}, + value.Type(ValueTypeCamera): &typePropertyCamera{}, } -// LatLng _ -type LatLng struct { - Lat float64 `json:"lat" mapstructure:"lat"` - Lng float64 `json:"lng" mapstructure:"lng"` +func (vt ValueType) Valid() bool { + if _, ok := types[value.Type(vt)]; ok { + return true + } + return value.Type(vt).Default() } -// Clone _ -func (l *LatLng) Clone() *LatLng { - if l == nil { +func (vt ValueType) ValueFrom(i interface{}) *Value { + v := value.Type(vt).ValueFrom(i, types) + if v == nil { return nil } - return &LatLng{ - Lat: l.Lat, - Lng: l.Lng, - } + return &Value{v: *v} +} + +type Value struct { + v value.Value } -// LatLngHeight _ -type LatLngHeight struct { - Lat float64 `json:"lat" mapstructure:"lat"` - Lng float64 `json:"lng" mapstructure:"lng"` - Height float64 `json:"height" mapstructure:"height"` +func (v *Value) IsEmpty() bool { + return v == nil || v.v.IsEmpty() } -// Clone _ -func (l *LatLngHeight) Clone() *LatLngHeight { - if l == nil { +func (v *Value) Clone() *Value { + if v == nil { return nil } - return &LatLngHeight{ - Lat: l.Lat, - Lng: l.Lng, - Height: l.Height, + vv := v.v.Clone() + if vv == nil { + return nil } + return &Value{v: *vv} } -// Camera _ -type Camera struct { - Lat float64 `json:"lat" mapstructure:"lat"` - Lng float64 `json:"lng" mapstructure:"lng"` - Altitude float64 `json:"altitude" mapstructure:"altitude"` - Heading float64 `json:"heading" mapstructure:"heading"` - Pitch float64 `json:"pitch" mapstructure:"pitch"` - Roll float64 `json:"roll" mapstructure:"roll"` - FOV float64 `json:"fov" mapstructure:"fov"` +func (v *Value) Type() ValueType { + if v == nil { + return ValueType(value.TypeUnknown) + } + return ValueType(v.v.Type()) } -// Clone _ -func (c *Camera) Clone() *Camera { - if c == nil { +func (v *Value) Value() interface{} { + if v == nil { return nil } - return &Camera{ - Lat: c.Lat, - Lng: c.Lng, - Altitude: c.Altitude, - Heading: c.Heading, - Pitch: c.Pitch, - Roll: c.Roll, - FOV: c.FOV, - } + return v.v.Value() } -// Typography _ -type Typography struct { - FontFamily *string `json:"fontFamily" mapstructure:"fontFamily"` - FontWeight *string `json:"fontWeight" mapstructure:"fontWeight"` - FontSize *int `json:"fontSize" mapstructure:"fontSize"` - Color *string `json:"color" mapstructure:"color"` - TextAlign *TextAlign `json:"textAlign" mapstructure:"textAlign"` - Bold *bool `json:"bold" mapstructure:"bold"` - Italic *bool `json:"italic" mapstructure:"italic"` - Underline *bool `json:"underline" mapstructure:"underline"` +func (v *Value) Interface() interface{} { + if v == nil { + return nil + } + return v.v.Interface() } -// Clone _ -func (t *Typography) Clone() *Typography { - if t == nil { +func (v *Value) ValueBool() *bool { + if v == nil { return nil } - return &Typography{ - FontFamily: t.FontFamily, - FontWeight: t.FontWeight, - FontSize: t.FontSize, - Color: t.Color, - TextAlign: t.TextAlign, - Bold: t.Bold, - Italic: t.Italic, - Underline: t.Underline, - } -} - -// TextAlign _ -type TextAlign string - -const ( - // TextAlignLeft _ - TextAlignLeft TextAlign = "left" - // TextAlignCenter _ - TextAlignCenter TextAlign = "center" - // TextAlignRight _ - TextAlignRight TextAlign = "right" - // TextAlignJustify _ - TextAlignJustify TextAlign = "justify" - // TextAlignJustifyAll _ - TextAlignJustifyAll TextAlign = "justify_all" -) + vv, ok := v.v.ValueBool() + if ok { + return &vv + } + return nil +} -// TextAlignFrom _ -func TextAlignFrom(t string) (TextAlign, bool) { - switch TextAlign(t) { - case TextAlignLeft: - return TextAlignLeft, true - case TextAlignCenter: - return TextAlignCenter, true - case TextAlignRight: - return TextAlignRight, true - case TextAlignJustify: - return TextAlignJustify, true - case TextAlignJustifyAll: - return TextAlignJustifyAll, true - } - return TextAlign(""), false -} - -// TextAlignFromRef _ -func TextAlignFromRef(t *string) *TextAlign { - if t == nil { +func (v *Value) ValueNumber() *float64 { + if v == nil { return nil } - var t2 TextAlign - switch TextAlign(*t) { - case TextAlignLeft: - t2 = TextAlignLeft - case TextAlignCenter: - t2 = TextAlignCenter - case TextAlignRight: - t2 = TextAlignRight - case TextAlignJustify: - t2 = TextAlignJustify - case TextAlignJustifyAll: - t2 = TextAlignJustifyAll - default: - return nil + vv, ok := v.v.ValueNumber() + if ok { + return &vv } - return &t2 + return nil } -// String _ -func (t TextAlign) String() string { - return string(t) +func (v *Value) ValueString() *string { + if v == nil { + return nil + } + vv, ok := v.v.ValueString() + if ok { + return &vv + } + return nil } -// StringRef _ -func (t *TextAlign) StringRef() *string { - if t == nil { +func (v *Value) ValueRef() *string { + if v == nil { return nil } - t2 := string(*t) - return &t2 + vv, ok := v.v.ValueRef() + if ok { + return &vv + } + return nil } -// Coordinates _ -type Coordinates []LatLngHeight - -// CoordinatesFrom generates a new Coordinates from slice such as [lon, lat, alt, lon, lat, alt, ...] -func CoordinatesFrom(coords []float64) Coordinates { - if len(coords) == 0 { +func (v *Value) ValueURL() *url.URL { + if v == nil { return nil } - - r := make([]LatLngHeight, 0, len(coords)/3) - l := LatLngHeight{} - for i, c := range coords { - switch i % 3 { - case 0: - l = LatLngHeight{} - l.Lng = c - case 1: - l.Lat = c - case 2: - l.Height = c - r = append(r, l) - } + vv, ok := v.v.ValueURL() + if ok { + return vv } - - return r + return nil } -// Polygon _ -type Polygon []Coordinates - -// Rect _ -type Rect struct { - West float64 `json:"west" mapstructure:"west"` - South float64 `json:"south" mapstructure:"south"` - East float64 `json:"east" mapstructure:"east"` - North float64 `json:"north" mapstructure:"north"` +func (v *Value) ValueLatLng() *LatLng { + if v == nil { + return nil + } + vv, ok := v.v.ValueLatLng() + if ok { + return &vv + } + return nil } -// Value _ -func (l LatLng) Value() *Value { - return ValueTypeLatLng.ValueFromUnsafe(l) +func (v *Value) ValueLatLngHeight() *LatLngHeight { + if v == nil { + return nil + } + vv, ok := v.v.ValueLatLngHeight() + if ok { + return &vv + } + return nil } -// Value _ -func (l LatLngHeight) Value() *Value { - return ValueTypeLatLngHeight.ValueFromUnsafe(l) +func (v *Value) ValueCoordinates() *Coordinates { + if v == nil { + return nil + } + vv, ok := v.v.ValueCoordinates() + if ok { + return &vv + } + return nil } -// Value _ -func (c Camera) Value() *Value { - return ValueTypeCamera.ValueFromUnsafe(c) +func (v *Value) ValueRect() *Rect { + if v == nil { + return nil + } + vv, ok := v.v.ValueRect() + if ok { + return &vv + } + return nil } -// Value _ -func (t Typography) Value() *Value { - return ValueTypeTypography.ValueFromUnsafe(t) +func (v *Value) ValuePolygon() *Polygon { + if v == nil { + return nil + } + vv, ok := v.v.ValuePolygon() + if ok { + return &vv + } + return nil } -// Value _ -func (t Coordinates) Value() *Value { - return ValueTypeCoordinates.ValueFromUnsafe(t) -} +func ValueFromStringOrNumber(s string) *Value { + if vint, err := strconv.Atoi(s); err == nil { + return ValueTypeNumber.ValueFrom(vint) + } -// Value _ -func (t Polygon) Value() *Value { - return ValueTypePolygon.ValueFromUnsafe(t) -} + if vfloat64, err := strconv.ParseFloat(s, 64); err == nil { + return ValueTypeNumber.ValueFrom(vfloat64) + } + + if vbool, err := strconv.ParseBool(s); err == nil { + return ValueTypeBool.ValueFrom(vbool) + } -// Value _ -func (t Rect) Value() *Value { - return ValueTypeRect.ValueFromUnsafe(t) + return ValueTypeString.ValueFrom(s) } diff --git a/pkg/property/value_camera.go b/pkg/property/value_camera.go new file mode 100644 index 000000000..378273c7d --- /dev/null +++ b/pkg/property/value_camera.go @@ -0,0 +1,70 @@ +package property + +import ( + "github.com/mitchellh/mapstructure" +) + +var ValueTypeCamera = ValueType("camera") + +type Camera struct { + Lat float64 `json:"lat" mapstructure:"lat"` + Lng float64 `json:"lng" mapstructure:"lng"` + Altitude float64 `json:"altitude" mapstructure:"altitude"` + Heading float64 `json:"heading" mapstructure:"heading"` + Pitch float64 `json:"pitch" mapstructure:"pitch"` + Roll float64 `json:"roll" mapstructure:"roll"` + FOV float64 `json:"fov" mapstructure:"fov"` +} + +func (c *Camera) Clone() *Camera { + if c == nil { + return nil + } + return &Camera{ + Lat: c.Lat, + Lng: c.Lng, + Altitude: c.Altitude, + Heading: c.Heading, + Pitch: c.Pitch, + Roll: c.Roll, + FOV: c.FOV, + } +} + +type typePropertyCamera struct{} + +func (*typePropertyCamera) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(Camera); ok { + return v, true + } + + if v, ok := i.(*Camera); ok { + if v != nil { + return *v, true + } + return nil, false + } + + v := Camera{} + if err := mapstructure.Decode(i, &v); err == nil { + return v, true + } + return nil, false +} + +func (*typePropertyCamera) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*typePropertyCamera) Validate(i interface{}) bool { + _, ok := i.(Camera) + return ok +} + +func (v *Value) ValueCamera() (vv Camera, ok bool) { + if v == nil { + return + } + vv, ok = v.Value().(Camera) + return +} diff --git a/pkg/property/value_camera_test.go b/pkg/property/value_camera_test.go new file mode 100644 index 000000000..0eb0f5357 --- /dev/null +++ b/pkg/property/value_camera_test.go @@ -0,0 +1,51 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCamera_Clone(t *testing.T) { + tests := []struct { + Name string + Camera, Expected *Camera + }{ + { + Name: "nil Camera", + }, + { + Name: "cloned", + Camera: &Camera{ + Lat: 1, + Lng: 1, + Altitude: 2, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }, + Expected: &Camera{ + Lat: 1, + Lng: 1, + Altitude: 2, + Heading: 4, + Pitch: 5, + Roll: 6, + FOV: 7, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Camera.Clone() + assert.Equal(tt, tc.Expected, res) + if tc.Expected != nil { + assert.NotSame(tt, tc.Expected, res) + } + }) + } +} diff --git a/pkg/property/value_converter.go b/pkg/property/value_converter.go deleted file mode 100644 index d81937aec..000000000 --- a/pkg/property/value_converter.go +++ /dev/null @@ -1,41 +0,0 @@ -package property - -import "github.com/reearth/reearth-backend/pkg/dataset" - -func valueFromDataset(v *dataset.Value) (*Value, bool) { - v2 := v.Value() - switch v3 := v2.(type) { - case *dataset.LatLng: - return ValueTypeLatLng.ValueFrom(LatLng{ - Lat: v3.Lat, - Lng: v3.Lng, - }) - case *dataset.LatLngHeight: - return ValueTypeLatLngHeight.ValueFrom(LatLngHeight{ - Lat: v3.Lat, - Lng: v3.Lng, - Height: v3.Height, - }) - } - return valueTypeFromDataset(v.Type()).ValueFrom(v2) -} - -func valueTypeFromDataset(v dataset.ValueType) ValueType { - switch v { - case dataset.ValueTypeBool: - return ValueTypeBool - case dataset.ValueTypeLatLng: - return ValueTypeLatLng - case dataset.ValueTypeLatLngHeight: - return ValueTypeLatLngHeight - case dataset.ValueTypeNumber: - return ValueTypeNumber - case dataset.ValueTypeRef: - return ValueTypeRef - case dataset.ValueTypeString: - return ValueTypeString - case dataset.ValueTypeURL: - return ValueTypeURL - } - return ValueType("") -} diff --git a/pkg/property/value_converter_test.go b/pkg/property/value_converter_test.go deleted file mode 100644 index 87abbdf7c..000000000 --- a/pkg/property/value_converter_test.go +++ /dev/null @@ -1,120 +0,0 @@ -package property - -import ( - "testing" - - "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/stretchr/testify/assert" -) - -func TestValueFromDataset(t *testing.T) { - testCases := []struct { - Name string - Input *dataset.Value - Expected struct { - V *Value - Ok bool - } - }{ - { - Name: "latlng", - Input: dataset.ValueFrom(dataset.LatLng{ - Lat: 10, - Lng: 12, - }), - Expected: struct { - V *Value - Ok bool - }{ - V: ValueTypeLatLng.ValueFromUnsafe(LatLng{ - Lat: 10, - Lng: 12, - }), - Ok: true, - }, - }, - { - Name: "LatLngHeight", - Input: dataset.ValueFrom(dataset.LatLngHeight{ - Lat: 10, - Lng: 12, - Height: 14, - }), - Expected: struct { - V *Value - Ok bool - }{ - V: ValueTypeLatLngHeight.ValueFromUnsafe(LatLngHeight{ - Lat: 10, - Lng: 12, - Height: 14, - }), - Ok: true, - }, - }, - } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res, ok := valueFromDataset(tc.Input) - assert.Equal(tt, tc.Expected.V, res) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValueTypeFromDataset(t *testing.T) { - testCases := []struct { - Name string - Input dataset.ValueType - Expected ValueType - }{ - { - Name: "latlng", - Input: dataset.ValueTypeLatLng, - Expected: ValueTypeLatLng, - }, - { - Name: "latlngheight", - Input: dataset.ValueTypeLatLngHeight, - Expected: ValueTypeLatLngHeight, - }, - { - Name: "string", - Input: dataset.ValueTypeString, - Expected: ValueTypeString, - }, - { - Name: "bool", - Input: dataset.ValueTypeBool, - Expected: ValueTypeBool, - }, - { - Name: "ref", - Input: dataset.ValueTypeRef, - Expected: ValueTypeRef, - }, - { - Name: "url", - Input: dataset.ValueTypeURL, - Expected: ValueTypeURL, - }, - { - Name: "number", - Input: dataset.ValueTypeNumber, - Expected: ValueTypeNumber, - }, - { - Name: "undefined", - Input: dataset.ValueType("xxx"), - Expected: ValueType(""), - }, - } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := valueTypeFromDataset(tc.Input) - assert.Equal(tt, tc.Expected, res) - }) - } -} diff --git a/pkg/property/value_dataset.go b/pkg/property/value_dataset.go new file mode 100644 index 000000000..3443ad0fb --- /dev/null +++ b/pkg/property/value_dataset.go @@ -0,0 +1,67 @@ +package property + +import ( + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/value" +) + +type ValueAndDatasetValue struct { + t ValueType + d *dataset.Value + p *Value +} + +func NewValueAndDatasetValue(ty ValueType, d *dataset.Value, p *Value) *ValueAndDatasetValue { + if !ty.Valid() { + return nil + } + + if d != nil && ValueType(d.Type()) != ty { + d = nil + } + + if p != nil && p.Type() != ty { + p = nil + } + + return &ValueAndDatasetValue{ + t: ty, + d: d, + p: p, + } +} + +func (v *ValueAndDatasetValue) Type() ValueType { + if v == nil { + return ValueTypeUnknown + } + return v.t +} + +func (v *ValueAndDatasetValue) DatasetValue() *dataset.Value { + if v == nil || v.t == ValueTypeUnknown { + return nil + } + return v.d +} + +func (v *ValueAndDatasetValue) PropertyValue() *Value { + if v == nil || v.t == ValueTypeUnknown { + return nil + } + return v.p +} + +func (v *ValueAndDatasetValue) Value() *Value { + if v == nil || v.t == ValueTypeUnknown { + return nil + } + if v.d != nil { + return valueFromDataset(v.d) + } + return v.p +} + +func valueFromDataset(v *dataset.Value) *Value { + return ValueType(value.Type(v.Type())).ValueFrom(v.Value()) +} diff --git a/pkg/property/value_dataset_test.go b/pkg/property/value_dataset_test.go new file mode 100644 index 000000000..ecaefd482 --- /dev/null +++ b/pkg/property/value_dataset_test.go @@ -0,0 +1,295 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/stretchr/testify/assert" +) + +func TestNewValueAndDatasetValue(t *testing.T) { + type args struct { + ty ValueType + d *dataset.Value + p *Value + } + tests := []struct { + name string + args args + want *ValueAndDatasetValue + }{ + { + name: "ok", + args: args{ + ty: ValueTypeBool, + d: dataset.ValueTypeBool.ValueFrom(false), + p: ValueTypeBool.ValueFrom(true), + }, + want: &ValueAndDatasetValue{ + t: ValueTypeBool, + d: dataset.ValueTypeBool.ValueFrom(false), + p: ValueTypeBool.ValueFrom(true), + }, + }, + { + name: "invalid type", + args: args{ + ty: ValueType("foobar"), + d: dataset.ValueTypeBool.ValueFrom(false), + p: ValueTypeBool.ValueFrom(true), + }, + want: nil, + }, + { + name: "invalid dataset value", + args: args{ + ty: ValueTypeBool, + d: dataset.ValueTypeString.ValueFrom("false"), + p: ValueTypeBool.ValueFrom(true), + }, + want: &ValueAndDatasetValue{ + t: ValueTypeBool, + d: nil, + p: ValueTypeBool.ValueFrom(true), + }, + }, + { + name: "invalid property value", + args: args{ + ty: ValueTypeBool, + d: dataset.ValueTypeBool.ValueFrom(false), + p: ValueTypeString.ValueFrom("true"), + }, + want: &ValueAndDatasetValue{ + t: ValueTypeBool, + d: dataset.ValueTypeBool.ValueFrom(false), + p: nil, + }, + }, + { + name: "nil dataset value", + args: args{ + ty: ValueTypeBool, + d: nil, + p: ValueTypeBool.ValueFrom(false), + }, + want: &ValueAndDatasetValue{ + t: ValueTypeBool, + d: nil, + p: ValueTypeBool.ValueFrom(false), + }, + }, + { + name: "nil property value", + args: args{ + ty: ValueTypeBool, + d: dataset.ValueTypeBool.ValueFrom(false), + p: nil, + }, + want: &ValueAndDatasetValue{ + t: ValueTypeBool, + d: dataset.ValueTypeBool.ValueFrom(false), + p: nil, + }, + }, + { + name: "nil value", + args: args{ + ty: ValueTypeBool, + d: nil, + p: nil, + }, + want: &ValueAndDatasetValue{ + t: ValueTypeBool, + d: nil, + p: nil, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, NewValueAndDatasetValue(tt.args.ty, tt.args.d, tt.args.p)) + }) + } +} + +func TestValueAndDatasetValue_Type(t *testing.T) { + tests := []struct { + name string + target *ValueAndDatasetValue + want ValueType + }{ + { + name: "ok", + target: &ValueAndDatasetValue{t: ValueTypeString}, + want: ValueTypeString, + }, + { + name: "empty", + target: &ValueAndDatasetValue{}, + want: ValueTypeUnknown, + }, + { + name: "nil", + target: nil, + want: ValueTypeUnknown, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Type()) + }) + } +} + +func TestValueAndDatasetValue_DatasetValuee(t *testing.T) { + tests := []struct { + name string + target *ValueAndDatasetValue + want *dataset.Value + }{ + { + name: "dataset only", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + d: dataset.ValueTypeString.ValueFrom("foo"), + }, + want: dataset.ValueTypeString.ValueFrom("foo"), + }, + { + name: "property only", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + p: ValueTypeString.ValueFrom("bar"), + }, + want: nil, + }, + { + name: "dataset and property", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + d: dataset.ValueTypeString.ValueFrom("foo"), + p: ValueTypeString.ValueFrom("bar"), + }, + want: dataset.ValueTypeString.ValueFrom("foo"), + }, + { + name: "empty", + target: &ValueAndDatasetValue{}, + want: nil, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.DatasetValue()) + }) + } +} + +func TestValueAndDatasetValue_PropertyValue(t *testing.T) { + tests := []struct { + name string + target *ValueAndDatasetValue + want *Value + }{ + { + name: "dataset only", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + d: dataset.ValueTypeString.ValueFrom("foo"), + }, + want: nil, + }, + { + name: "property only", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + p: ValueTypeString.ValueFrom("bar"), + }, + want: ValueTypeString.ValueFrom("bar"), + }, + { + name: "dataset and property", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + d: dataset.ValueTypeString.ValueFrom("foo"), + p: ValueTypeString.ValueFrom("bar"), + }, + want: ValueTypeString.ValueFrom("bar"), + }, + { + name: "empty", + target: &ValueAndDatasetValue{}, + want: nil, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.PropertyValue()) + }) + } +} + +func TestValueAndDatasetValue_Value(t *testing.T) { + tests := []struct { + name string + target *ValueAndDatasetValue + want *Value + }{ + { + name: "dataset only", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + d: dataset.ValueTypeString.ValueFrom("foo"), + }, + want: ValueTypeString.ValueFrom("foo"), + }, + { + name: "property only", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + p: ValueTypeString.ValueFrom("bar"), + }, + want: ValueTypeString.ValueFrom("bar"), + }, + { + name: "dataset and property", + target: &ValueAndDatasetValue{ + t: ValueTypeString, + d: dataset.ValueTypeString.ValueFrom("foo"), + p: ValueTypeString.ValueFrom("bar"), + }, + want: ValueTypeString.ValueFrom("foo"), + }, + { + name: "empty", + target: &ValueAndDatasetValue{}, + want: nil, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Value()) + }) + } +} diff --git a/pkg/property/value_optional.go b/pkg/property/value_optional.go new file mode 100644 index 000000000..6e01c84b1 --- /dev/null +++ b/pkg/property/value_optional.go @@ -0,0 +1,78 @@ +package property + +import "github.com/reearth/reearth-backend/pkg/value" + +type OptionalValue struct { + ov value.OptionalValue +} + +func NewOptionalValue(t ValueType, v *Value) *OptionalValue { + var vv *value.Value + if v != nil { + vv = &v.v + } + ov := value.NewOptionalValue(value.Type(t), vv) + if ov == nil { + return nil + } + return &OptionalValue{ov: *ov} +} + +func OptionalValueFrom(v *Value) *OptionalValue { + if v == nil { + return nil + } + ov := value.OptionalValueFrom(&v.v) + if ov == nil { + return nil + } + return &OptionalValue{ + ov: *ov, + } +} + +func (ov *OptionalValue) Type() ValueType { + if ov == nil { + return ValueTypeUnknown + } + return ValueType(ov.ov.Type()) +} + +func (ov *OptionalValue) Value() *Value { + if ov == nil { + return nil + } + vv := ov.ov.Value() + if vv == nil { + return nil + } + return &Value{v: *vv} +} + +func (ov *OptionalValue) TypeAndValue() (ValueType, *Value) { + return ov.Type(), ov.Value() +} + +func (ov *OptionalValue) Clone() *OptionalValue { + if ov == nil { + return nil + } + nov := ov.ov.Clone() + if nov == nil { + return nil + } + return &OptionalValue{ + ov: *nov, + } +} + +func (ov *OptionalValue) SetValue(v *Value) { + if ov == nil { + return + } + if v == nil { + ov.ov.SetValue(nil) + } else { + ov.ov.SetValue(&v.v) + } +} diff --git a/pkg/property/value_optional_test.go b/pkg/property/value_optional_test.go new file mode 100644 index 000000000..61e35b02e --- /dev/null +++ b/pkg/property/value_optional_test.go @@ -0,0 +1,303 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/value" + "github.com/stretchr/testify/assert" +) + +func TestNewNilableValue(t *testing.T) { + type args struct { + t ValueType + v *Value + } + tests := []struct { + name string + args args + want *OptionalValue + }{ + { + name: "default type", + args: args{ + t: ValueTypeString, + v: ValueTypeString.ValueFrom("foo"), + }, + want: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foo", types))}, + }, + { + name: "nil value", + args: args{ + t: ValueTypeString, + }, + want: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, nil)}, + }, + { + name: "invalid value", + args: args{ + t: ValueTypeNumber, + v: ValueTypeString.ValueFrom("foo"), + }, + want: nil, + }, + { + name: "invalid type", + args: args{ + t: ValueTypeUnknown, + v: ValueTypeString.ValueFrom("foo"), + }, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, NewOptionalValue(tt.args.t, tt.args.v)) + }) + } +} + +func TestOptionalValueFrom(t *testing.T) { + type args struct { + v *Value + } + tests := []struct { + name string + args args + want *OptionalValue + }{ + { + name: "default type", + args: args{ + v: ValueTypeString.ValueFrom("foo"), + }, + want: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, value.TypeString.ValueFrom("foo", types))}, + }, + { + name: "empty value", + args: args{ + v: &Value{v: value.Value{}}, + }, + want: nil, + }, + { + name: "nil value", + args: args{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, OptionalValueFrom(tt.args.v)) + }) + } +} + +func TestOptionalValue_Type(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + want ValueType + }{ + { + name: "ok", + value: &OptionalValue{ov: *value.NewOptionalValue(value.TypeBool, nil)}, + want: ValueTypeBool, + }, + { + name: "empty", + value: &OptionalValue{}, + want: ValueTypeUnknown, + }, + { + name: "nil", + value: nil, + want: ValueTypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Type()) + }) + } +} + +func TestOptionalValue_Value(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + want *Value + }{ + { + name: "ok", + value: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foobar", types))}, + want: ValueTypeString.ValueFrom("foobar"), + }, + { + name: "empty", + value: &OptionalValue{}, + want: nil, + }, + { + name: "nil", + value: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.value.Value() + assert.Equal(t, tt.want, res) + if res != nil { + assert.NotSame(t, tt.want, res) + } + }) + } +} + +func TestOptionalValue_TypeAndValue(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + wantt ValueType + wantv *Value + }{ + { + name: "ok", + value: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foobar", types))}, + wantt: ValueTypeString, + wantv: ValueTypeString.ValueFrom("foobar"), + }, + { + name: "empty", + value: &OptionalValue{}, + wantt: ValueTypeUnknown, + wantv: nil, + }, + { + name: "nil", + value: nil, + wantt: ValueTypeUnknown, + wantv: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ty, tv := tt.value.TypeAndValue() + assert.Equal(t, tt.wantt, ty) + assert.Equal(t, tt.wantv, tv) + if tv != nil { + assert.NotSame(t, tt.wantv, tv) + } + }) + } +} + +func TestOptionalValue_SetValue(t *testing.T) { + type args struct { + v *Value + } + tests := []struct { + name string + value *OptionalValue + args args + invalid bool + }{ + { + name: "set", + value: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foo", types))}, + args: args{v: ValueTypeString.ValueFrom("foobar")}, + }, + { + name: "set to nil", + value: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, nil)}, + args: args{v: ValueTypeString.ValueFrom("foobar")}, + }, + { + name: "invalid value", + value: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, nil)}, + args: args{v: ValueTypeNumber.ValueFrom(1)}, + invalid: true, + }, + { + name: "nil value", + args: args{v: ValueTypeNumber.ValueFrom(1)}, + }, + { + name: "empty", + value: &OptionalValue{}, + args: args{v: ValueTypeNumber.ValueFrom(1)}, + invalid: true, + }, + { + name: "nil", + args: args{v: ValueTypeNumber.ValueFrom(1)}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + var v *Value + if tt.value != nil { + v = tt.value.Value() + } + + tt.value.SetValue(tt.args.v) + + if tt.value != nil { + if tt.invalid { + assert.Equal(t, v, tt.value.Value()) + } else { + assert.Equal(t, tt.args.v, tt.value.Value()) + } + } + }) + } +} + +func TestOptionalValue_Clone(t *testing.T) { + tests := []struct { + name string + target *OptionalValue + }{ + { + name: "ok", + target: &OptionalValue{ + ov: *value.NewOptionalValue(value.TypeString, value.TypeString.ValueFrom("foo", types)), + }, + }, + { + name: "empty", + target: &OptionalValue{}, + }, + { + name: "nil", + target: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := tt.target.Clone() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} diff --git a/pkg/property/value_test.go b/pkg/property/value_test.go index ce09faa2b..a51528a3b 100644 --- a/pkg/property/value_test.go +++ b/pkg/property/value_test.go @@ -3,225 +3,53 @@ package property import ( "testing" + "github.com/reearth/reearth-backend/pkg/dataset" "github.com/stretchr/testify/assert" ) -func getStrRef(i string) *string { - return &i -} -func getBoolRef(i bool) *bool { - return &i -} - -func TestLatLng_Clone(t *testing.T) { - testCases := []struct { - Name string - LL, Expected *LatLng - }{ - { - Name: "nil latlng", - }, - { - Name: "cloned", - LL: &LatLng{ - Lat: 10, - Lng: 11, - }, - Expected: &LatLng{ - Lat: 10, - Lng: 11, - }, - }, - } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.LL.Clone() - assert.Equal(tt, tc.Expected, res) - }) - } -} - -func TestLatLngHeight_Clone(t *testing.T) { - testCases := []struct { - Name string - LL, Expected *LatLngHeight - }{ - { - Name: "nil LatLngHeight", - }, - { - Name: "cloned", - LL: &LatLngHeight{ - Lat: 10, - Lng: 11, - Height: 12, - }, - Expected: &LatLngHeight{ - Lat: 10, - Lng: 11, - Height: 12, - }, - }, - } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.LL.Clone() - assert.Equal(tt, tc.Expected, res) - }) - } -} - -func TestCamera_Clone(t *testing.T) { - testCases := []struct { - Name string - Camera, Expected *Camera - }{ - { - Name: "nil Camera", - }, - { - Name: "cloned", - Camera: &Camera{ - Lat: 1, - Lng: 1, - Altitude: 2, - Heading: 4, - Pitch: 5, - Roll: 6, - FOV: 7, - }, - Expected: &Camera{ - Lat: 1, - Lng: 1, - Altitude: 2, - Heading: 4, - Pitch: 5, - Roll: 6, - FOV: 7, - }, - }, - } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.Camera.Clone() - assert.Equal(tt, tc.Expected, res) - }) - } -} - -func TestTypography_Clone(t *testing.T) { - - i := 10 - - testCases := []struct { - Name string - Typography, Expected *Typography - }{ - { - Name: "nil typography", - }, - { - Name: "cloned", - Typography: &Typography{ - FontFamily: getStrRef("x"), - FontWeight: getStrRef("b"), - FontSize: &i, - Color: getStrRef("red"), - TextAlign: TextAlignFromRef(getStrRef(TextAlignCenter.String())), - Bold: getBoolRef(true), - Italic: getBoolRef(false), - Underline: getBoolRef(true), - }, - Expected: &Typography{ - FontFamily: getStrRef("x"), - FontWeight: getStrRef("b"), - FontSize: &i, - Color: getStrRef("red"), - TextAlign: TextAlignFromRef(getStrRef("center")), - Bold: getBoolRef(true), - Italic: getBoolRef(false), - Underline: getBoolRef(true), - }, - }, - } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.Typography.Clone() - assert.Equal(tt, tc.Expected, res) - }) - } -} - -func TestTextAlignFrom(t *testing.T) { +func TestValueFromDataset(t *testing.T) { testCases := []struct { Name string + Input *dataset.Value Expected struct { - TA TextAlign - Bool bool + V *Value + Ok bool } }{ { - Name: "left", - Expected: struct { - TA TextAlign - Bool bool - }{ - TA: TextAlignLeft, - Bool: true, - }, - }, - { - Name: "right", - Expected: struct { - TA TextAlign - Bool bool - }{ - TA: TextAlignRight, - Bool: true, - }, - }, - { - Name: "center", - Expected: struct { - TA TextAlign - Bool bool - }{ - TA: TextAlignCenter, - Bool: true, - }, - }, - { - Name: "justify", - Expected: struct { - TA TextAlign - Bool bool - }{ - TA: TextAlignJustify, - Bool: true, - }, - }, - { - Name: "justify_all", + Name: "latlng", + Input: dataset.ValueTypeLatLng.ValueFrom(dataset.LatLng{ + Lat: 10, + Lng: 12, + }), Expected: struct { - TA TextAlign - Bool bool + V *Value + Ok bool }{ - TA: TextAlignJustifyAll, - Bool: true, + V: ValueTypeLatLng.ValueFrom(LatLng{ + Lat: 10, + Lng: 12, + }), + Ok: true, }, }, { - Name: "undefined", + Name: "LatLngHeight", + Input: dataset.ValueTypeLatLngHeight.ValueFrom(dataset.LatLngHeight{ + Lat: 10, + Lng: 12, + Height: 14, + }), Expected: struct { - TA TextAlign - Bool bool + V *Value + Ok bool }{ - TA: TextAlign(""), - Bool: false, + V: ValueTypeLatLngHeight.ValueFrom(LatLngHeight{ + Lat: 10, + Lng: 12, + Height: 14, + }), + Ok: true, }, }, } @@ -230,125 +58,7 @@ func TestTextAlignFrom(t *testing.T) { tc := tc t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() - res, ok := TextAlignFrom(tc.Name) - assert.Equal(tt, tc.Expected.TA, res) - assert.Equal(tt, tc.Expected.Bool, ok) + assert.Equal(tt, tc.Expected.V, valueFromDataset(tc.Input)) }) } } - -func TestTextAlignFromRef(t *testing.T) { - ja := TextAlignJustifyAll - j := TextAlignJustify - c := TextAlignCenter - l := TextAlignLeft - r := TextAlignRight - testCases := []struct { - Name string - Input *string - Expected *TextAlign - }{ - { - Name: "left", - Input: getStrRef("left"), - Expected: &l, - }, - { - Name: "right", - Input: getStrRef("right"), - Expected: &r, - }, - { - Name: "center", - Input: getStrRef("center"), - Expected: &c, - }, - { - Name: "justify", - Input: getStrRef("justify"), - Expected: &j, - }, - { - Name: "justify_all", - Input: getStrRef("justify_all"), - Expected: &ja, - }, - { - Name: "undefined", - Input: getStrRef("undefined"), - }, - { - Name: "nil input", - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := TextAlignFromRef(tc.Input) - assert.Equal(tt, tc.Expected, res) - }) - } -} - -func TestTextAlign_StringRef(t *testing.T) { - var ta *TextAlign - assert.Nil(t, ta.StringRef()) -} - -func TestValue(t *testing.T) { - ll := LatLng{ - Lat: 1, - Lng: 2, - } - assert.True(t, ValueTypeLatLng.ValidateValue(ll.Value())) - - llh := LatLngHeight{ - Lat: 1, - Lng: 2, - Height: 3, - } - assert.True(t, ValueTypeLatLngHeight.ValidateValue(llh.Value())) - - ca := Camera{ - Lat: 1, - Lng: 2, - Altitude: 3, - Heading: 4, - Pitch: 5, - Roll: 6, - FOV: 7, - } - assert.True(t, ValueTypeCamera.ValidateValue(ca.Value())) - - ty := Typography{ - FontFamily: getStrRef("x"), - FontWeight: getStrRef("b"), - FontSize: nil, - Color: getStrRef("red"), - TextAlign: TextAlignFromRef(getStrRef(TextAlignCenter.String())), - Bold: getBoolRef(true), - Italic: getBoolRef(false), - Underline: getBoolRef(true), - } - assert.True(t, ValueTypeTypography.ValidateValue(ty.Value())) - - co := Coordinates{ - llh, - } - assert.True(t, ValueTypeCoordinates.ValidateValue(co.Value())) - - po := Polygon{ - co, - } - assert.True(t, ValueTypePolygon.ValidateValue(po.Value())) - - rc := Rect{ - West: 10, - South: 3, - East: 5, - North: 2, - } - assert.True(t, ValueTypeRect.ValidateValue(rc.Value())) -} diff --git a/pkg/property/value_type.go b/pkg/property/value_type.go deleted file mode 100644 index 1b0ce23fe..000000000 --- a/pkg/property/value_type.go +++ /dev/null @@ -1,603 +0,0 @@ -package property - -import ( - "encoding/json" - "fmt" - "net/url" - - "github.com/mitchellh/mapstructure" - "github.com/reearth/reearth-backend/pkg/id" -) - -// ValueType _ -type ValueType string - -const ( - // ValueTypeBool _ - ValueTypeBool ValueType = "bool" - // ValueTypeNumber _ - ValueTypeNumber ValueType = "number" - // ValueTypeString _ - ValueTypeString ValueType = "string" - // ValueTypeRef _ - ValueTypeRef ValueType = "ref" - // ValueTypeURL _ - ValueTypeURL ValueType = "url" - // ValueTypeLatLng _ - ValueTypeLatLng ValueType = "latlng" - // ValueTypeLatLngHeight _ - ValueTypeLatLngHeight ValueType = "latlngheight" - // ValueTypeCamera _ - ValueTypeCamera ValueType = "camera" - // ValueTypeTypography _ - ValueTypeTypography ValueType = "typography" - // ValueTypeCoordinates _ - ValueTypeCoordinates ValueType = "coordinates" - // ValueTypePolygon - ValueTypePolygon ValueType = "polygon" - // ValueTypeRect - ValueTypeRect ValueType = "rect" -) - -// ValueTypeFrom _ -func ValueTypeFrom(t string) (ValueType, bool) { - switch ValueType(t) { - case ValueTypeBool: - return ValueTypeBool, true - case ValueTypeNumber: - return ValueTypeNumber, true - case ValueTypeString: - return ValueTypeString, true - case ValueTypeRef: - return ValueTypeRef, true - case ValueTypeURL: - return ValueTypeURL, true - case ValueTypeLatLng: - return ValueTypeLatLng, true - case ValueTypeLatLngHeight: - return ValueTypeLatLngHeight, true - case ValueTypeCamera: - return ValueTypeCamera, true - case ValueTypeTypography: - return ValueTypeTypography, true - case ValueTypeCoordinates: - return ValueTypeCoordinates, true - case ValueTypePolygon: - return ValueTypePolygon, true - case ValueTypeRect: - return ValueTypeRect, true - } - return ValueType(""), false -} - -// Validate _ -func (t ValueType) Validate() (ValueType, bool) { - switch t { - case ValueTypeBool: - fallthrough - case ValueTypeNumber: - fallthrough - case ValueTypeString: - fallthrough - case ValueTypeRef: - fallthrough - case ValueTypeURL: - fallthrough - case ValueTypeLatLng: - fallthrough - case ValueTypeLatLngHeight: - fallthrough - case ValueTypeCamera: - fallthrough - case ValueTypeTypography: - fallthrough - case ValueTypeCoordinates: - fallthrough - case ValueTypePolygon: - fallthrough - case ValueTypeRect: - return t, true - } - return t, false -} - -// Value _ -type Value struct { - v interface{} - t ValueType -} - -// IsEmpty _ -func (v *Value) IsEmpty() bool { - return v == nil || v.v == nil -} - -// Clone _ -func (v *Value) Clone() *Value { - if v == nil { - return nil - } - return v.t.ValueFromUnsafe(v.v) -} - -// Value _ -func (v *Value) Value() interface{} { - if v == nil { - return nil - } - return v.v -} - -// ValueBool _ -func (v *Value) ValueBool() (vv bool, ok bool) { - if v == nil { - return - } - vv, ok = v.v.(bool) - return -} - -// ValueNumber _ -func (v *Value) ValueNumber() (vv float64, ok bool) { - if v == nil { - return - } - vv, ok = v.v.(float64) - return -} - -// ValueString _ -func (v *Value) ValueString() (vv string, ok bool) { - if v == nil { - return - } - vv, ok = v.v.(string) - return -} - -// ValueRef _ -func (v *Value) ValueRef() (vv id.ID, ok bool) { - if v == nil { - return - } - vv, ok = v.v.(id.ID) - return -} - -// ValueURL _ -func (v *Value) ValueURL() (vv *url.URL, ok bool) { - if v == nil { - return - } - vv, ok = v.v.(*url.URL) - return -} - -// ValueLatLng _ -func (v *Value) ValueLatLng() (vv LatLng, ok bool) { - if v == nil { - return - } - vv, ok = v.v.(LatLng) - return -} - -// ValueLatLngHeight _ -func (v *Value) ValueLatLngHeight() (vv LatLngHeight, ok bool) { - if v == nil { - return - } - vv, ok = v.v.(LatLngHeight) - return -} - -// ValueCamera _ -func (v *Value) ValueCamera() (vv Camera, ok bool) { - if v == nil { - return - } - vv, ok = v.v.(Camera) - return -} - -// ValueTypography _ -func (v *Value) ValueTypography() (vv Typography, ok bool) { - if v == nil { - return - } - vv, ok = v.v.(Typography) - return -} - -// ValueCoordinates _ -func (v *Value) ValueCoordinates() (vv Coordinates, ok bool) { - if v == nil { - return - } - vv, ok = v.v.(Coordinates) - return -} - -// ValuePolygon _ -func (v *Value) ValuePolygon() (vv Polygon, ok bool) { - if v == nil { - return - } - vv, ok = v.v.(Polygon) - return -} - -// ValueRect _ -func (v *Value) ValueRect() (vv Rect, ok bool) { - if v == nil { - return - } - vv, ok = v.v.(Rect) - return -} - -// Type _ -func (v *Value) Type() ValueType { - if v == nil { - return ValueType("") - } - return v.t -} - -// ValueFromUnsafe _ -func (t ValueType) ValueFromUnsafe(v interface{}) *Value { - v2, _ := t.ValueFrom(v) - return v2 -} - -func (t ValueType) MustBeValue(v interface{}) *Value { - v2, ok := t.ValueFrom(v) - if !ok { - panic("incompatible value for property value") - } - return v2 -} - -// ValueFrom _ -func (t ValueType) ValueFrom(v interface{}) (*Value, bool) { - if t == "" { - return nil, false - } - if v == nil { - return nil, true - } - - switch t { - case ValueTypeBool: - if v2, ok := v.(bool); ok { - return &Value{v: v2, t: ValueTypeBool}, true - } - case ValueTypeNumber: - if v2, ok := v.(json.Number); ok { - if v3, err := v2.Float64(); err == nil { - return &Value{v: v3, t: ValueTypeNumber}, true - } - } else if v2, ok := v.(float32); ok { - return &Value{v: v2, t: ValueTypeNumber}, true - } else if v2, ok := v.(float64); ok { - return &Value{v: v2, t: ValueTypeNumber}, true - } else if v2, ok := v.(int); ok { - return &Value{v: float64(v2), t: ValueTypeNumber}, true - } else if v2, ok := v.(int8); ok { - return &Value{v: float64(v2), t: ValueTypeNumber}, true - } else if v2, ok := v.(int16); ok { - return &Value{v: float64(v2), t: ValueTypeNumber}, true - } else if v2, ok := v.(int32); ok { - return &Value{v: float64(v2), t: ValueTypeNumber}, true - } else if v2, ok := v.(int64); ok { - return &Value{v: float64(v2), t: ValueTypeNumber}, true - } else if v2, ok := v.(uint); ok { - return &Value{v: float64(v2), t: ValueTypeNumber}, true - } else if v2, ok := v.(uint8); ok { - return &Value{v: float64(v2), t: ValueTypeNumber}, true - } else if v2, ok := v.(uint16); ok { - return &Value{v: float64(v2), t: ValueTypeNumber}, true - } else if v2, ok := v.(uint32); ok { - return &Value{v: float64(v2), t: ValueTypeNumber}, true - } else if v2, ok := v.(uint64); ok { - return &Value{v: float64(v2), t: ValueTypeNumber}, true - } - case ValueTypeString: - if v2, ok := v.(string); ok { - return &Value{v: v2, t: ValueTypeString}, true - } - case ValueTypeRef: - if v2, ok := v.(id.ID); ok { - return &Value{v: v2, t: ValueTypeRef}, true - } else if v2, ok := v.(string); ok { - if id, err := id.NewIDWith(v2); err == nil { - return &Value{v: id, t: ValueTypeRef}, true - } - } - case ValueTypeURL: - if v2, ok := v.(*url.URL); ok { - if v2 == nil { - return nil, false - } - return &Value{v: v2, t: ValueTypeURL}, true - } else if v2, ok := v.(string); ok { - if u, err := url.Parse(v2); err == nil { - return &Value{v: u, t: ValueTypeURL}, true - } - } - case ValueTypeLatLng: - if v2, ok := v.(LatLng); ok { - return &Value{v: v2, t: ValueTypeLatLng}, true - } else if v2, ok := v.(*LatLng); ok { - if v2 == nil { - return nil, false - } - return &Value{v: *v2, t: ValueTypeLatLng}, true - } - v2 := LatLng{} - if err := mapstructure.Decode(v, &v2); err != nil { - return nil, false - } - return &Value{v: v2, t: ValueTypeLatLng}, true - case ValueTypeLatLngHeight: - if v2, ok := v.(LatLngHeight); ok { - return &Value{v: v2, t: ValueTypeLatLngHeight}, true - } else if v2, ok := v.(*LatLngHeight); ok { - if v2 == nil { - return nil, false - } - return &Value{v: *v2, t: ValueTypeLatLngHeight}, true - } - v2 := LatLngHeight{} - if err := mapstructure.Decode(v, &v2); err != nil { - return nil, false - } - return &Value{v: v2, t: ValueTypeLatLngHeight}, true - case ValueTypeCamera: - if v2, ok := v.(Camera); ok { - return &Value{v: v2, t: ValueTypeCamera}, true - } else if v2, ok := v.(*Camera); ok { - if v2 == nil { - return nil, false - } - return &Value{v: *v2, t: ValueTypeCamera}, true - } - v2 := Camera{} - if err := mapstructure.Decode(v, &v2); err != nil { - return nil, false - } - return &Value{v: v2, t: ValueTypeCamera}, true - case ValueTypeTypography: - if v2, ok := v.(Typography); ok { - return &Value{v: v2, t: ValueTypeTypography}, true - } else if v2, ok := v.(*Typography); ok { - if v2 == nil { - return nil, false - } - return &Value{v: *v2, t: ValueTypeTypography}, true - } - v2 := Typography{} - if err := mapstructure.Decode(v, &v2); err != nil { - return nil, false - } - return &Value{v: v2, t: ValueTypeTypography}, true - case ValueTypeCoordinates: - if v2, ok := v.(Coordinates); ok { - return &Value{v: v2, t: ValueTypeCoordinates}, true - } else if v2, ok := v.(*Coordinates); ok { - if v2 == nil { - return nil, false - } - return &Value{v: *v2, t: ValueTypeCoordinates}, true - } else if v2, ok := v.([]float64); ok { - if v2 == nil { - return nil, false - } - return &Value{v: CoordinatesFrom(v2), t: ValueTypeCoordinates}, true - } - - v2 := []float64{} - if err := mapstructure.Decode(v, &v2); err == nil { - return &Value{v: CoordinatesFrom(v2), t: ValueTypeCoordinates}, true - } - - v3 := Coordinates{} - if err := mapstructure.Decode(v, &v3); err != nil { - return nil, false - } - return &Value{v: v3, t: ValueTypeCoordinates}, true - case ValueTypePolygon: - if v2, ok := v.(Polygon); ok { - return &Value{v: v2, t: ValueTypePolygon}, true - } else if v2, ok := v.(*Polygon); ok { - if v2 == nil { - return nil, false - } - return &Value{v: *v2, t: ValueTypePolygon}, true - } - v2 := Polygon{} - if err := mapstructure.Decode(v, &v2); err != nil { - return nil, false - } - return &Value{v: v2, t: ValueTypePolygon}, true - case ValueTypeRect: - if v2, ok := v.(Rect); ok { - return &Value{v: v2, t: ValueTypeRect}, true - } else if v2, ok := v.(*Rect); ok { - if v2 == nil { - return nil, false - } - return &Value{v: *v2, t: ValueTypeRect}, true - } - v2 := Rect{} - if err := mapstructure.Decode(v, &v2); err != nil { - return nil, false - } - return &Value{v: v2, t: ValueTypeRect}, true - } - return nil, false -} - -// ValidateValue _ -func (t ValueType) ValidateValue(v *Value) bool { - if v == nil { - return true - } - vv := v.Value() - if vv == nil { - return true - } - switch t { - case ValueTypeBool: - if _, ok := vv.(bool); ok { - return true - } - case ValueTypeNumber: - if _, ok := vv.(float64); ok { - return true - } - case ValueTypeString: - if _, ok := vv.(string); ok { - return true - } - case ValueTypeRef: - if _, ok := vv.(id.ID); ok { - return true - } - case ValueTypeURL: - if _, ok := vv.(*url.URL); ok { - return true - } - case ValueTypeLatLng: - if _, ok := vv.(LatLng); ok { - return true - } - case ValueTypeLatLngHeight: - if _, ok := vv.(LatLngHeight); ok { - return true - } - case ValueTypeCamera: - if _, ok := vv.(Camera); ok { - return true - } - case ValueTypeTypography: - if _, ok := vv.(Typography); ok { - return true - } - case ValueTypeCoordinates: - if _, ok := vv.(Coordinates); ok { - return true - } - case ValueTypePolygon: - if _, ok := vv.(Polygon); ok { - return true - } - case ValueTypeRect: - if _, ok := vv.(Rect); ok { - return true - } - } - return false -} - -func (t *ValueType) MarshalJSON() ([]byte, error) { - if t == nil { - return nil, nil - } - return json.Marshal(string(*t)) -} - -func (t *ValueType) UnmarshalJSON(bs []byte) (err error) { - var vtstr string - if err = json.Unmarshal(bs, &vtstr); err != nil { - return - } - var ok bool - *t, ok = ValueTypeFrom(vtstr) - if !ok { - return fmt.Errorf("invalid property value type: %s", vtstr) - } - return -} - -func (t *ValueType) MarshalText() ([]byte, error) { - if t == nil { - return nil, nil - } - return []byte(*t), nil -} - -func (t *ValueType) UnmarshalText(text []byte) (err error) { - var ok bool - *t, ok = ValueTypeFrom(string(text)) - if !ok { - return fmt.Errorf("invalid property value type: %s", text) - } - return -} - -// Interface converts the value into generic representation -func (v *Value) Interface() interface{} { - if v == nil { - return nil - } - switch v2 := v.Value().(type) { - case bool: - return v2 - case float64: - return v2 - case string: - return v2 - case id.ID: - return v2.String() - case *url.URL: - return v2.String() - case LatLng: - var v3 map[string]interface{} - if err := mapstructure.Decode(&v2, &v3); err != nil { - return nil - } - return v3 - case LatLngHeight: - var v3 map[string]interface{} - if err := mapstructure.Decode(&v2, &v3); err != nil { - return nil - } - return v3 - case Camera: - var v3 map[string]interface{} - if err := mapstructure.Decode(&v2, &v3); err != nil { - return nil - } - return v3 - case Typography: - var v3 map[string]interface{} - if err := mapstructure.Decode(&v2, &v3); err != nil { - return nil - } - return v3 - case Coordinates: - var v3 []map[string]interface{} - if err := mapstructure.Decode(&v2, &v3); err != nil { - return nil - } - return v3 - case Polygon: - var v3 [][]map[string]interface{} - if err := mapstructure.Decode(&v2, &v3); err != nil { - return nil - } - return v3 - case Rect: - var v3 map[string]interface{} - if err := mapstructure.Decode(&v2, &v3); err != nil { - return nil - } - return v3 - } - return nil -} - -func (v *Value) MarshalJSON() ([]byte, error) { - return json.Marshal(v.Interface()) -} diff --git a/pkg/property/value_type_test.go b/pkg/property/value_type_test.go deleted file mode 100644 index 5f003f680..000000000 --- a/pkg/property/value_type_test.go +++ /dev/null @@ -1,1426 +0,0 @@ -package property - -import ( - "encoding/json" - "net/url" - "strconv" - "testing" - - "github.com/reearth/reearth-backend/pkg/id" - "github.com/stretchr/testify/assert" -) - -func TestValueTypeFrom(t *testing.T) { - testCases := []struct { - Name, Input string - Expected struct { - V ValueType - B bool - } - }{ - { - Name: "bool", - Input: "bool", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypeBool, - B: true, - }, - }, - { - Name: "number", - Input: "number", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypeNumber, - B: true, - }, - }, - { - Name: "ref", - Input: "ref", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypeRef, - B: true, - }, - }, - { - Name: "url", - Input: "url", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypeURL, - B: true, - }, - }, - { - Name: "string", - Input: "string", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypeString, - B: true, - }, - }, { - Name: "camera", - Input: "camera", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypeCamera, - B: true, - }, - }, - { - Name: "bool", - Input: "bool", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypeBool, - B: true, - }, - }, - { - Name: "LatLngHeight", - Input: "latlngheight", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypeLatLngHeight, - B: true, - }, - }, - { - Name: "latlng", - Input: "latlng", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypeLatLng, - B: true, - }, - }, - { - Name: "polygon", - Input: "polygon", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypePolygon, - B: true, - }, - }, - { - Name: "rect", - Input: "rect", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypeRect, - B: true, - }, - }, - { - Name: "coordinates", - Input: "coordinates", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypeCoordinates, - B: true, - }, - }, - { - Name: "typography", - Input: "typography", - Expected: struct { - V ValueType - B bool - }{ - V: ValueTypeTypography, - B: true, - }, - }, - { - Name: "unknown", - Input: "", - Expected: struct { - V ValueType - B bool - }{ - V: ValueType(""), - B: false, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, b := ValueTypeFrom(tc.Input) - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.B, b) - - v2, b2 := v.Validate() - assert.Equal(tt, tc.Expected.V, v2) - assert.Equal(tt, tc.Expected.B, b2) - }) - } -} - -func TestValue_IsEmpty(t *testing.T) { - var v *Value - assert.True(t, v.IsEmpty()) -} - -func TestValue_Clone(t *testing.T) { - var v *Value - assert.Nil(t, v.Clone()) - v, _ = ValueTypeBool.ValueFrom(true) - assert.Equal(t, v, v.Clone()) -} - -func TestValue_Value(t *testing.T) { - var v *Value - assert.Nil(t, v.Value()) - v, _ = ValueTypeBool.ValueFrom(true) - assert.Equal(t, true, v.Value()) -} - -func TestValue_Type(t *testing.T) { - var v *Value - assert.Equal(t, ValueType(""), v.Type()) - v, _ = ValueTypeBool.ValueFrom(true) - assert.Equal(t, ValueTypeBool, v.Type()) -} - -func TestValue_ValueBool(t *testing.T) { - testCases := []struct { - Name string - V *Value - Expected struct { - V, Ok bool - } - }{ - { - Name: "nil value", - }, - { - Name: "success", - V: ValueTypeBool.ValueFromUnsafe(true), - Expected: struct { - V, Ok bool - }{ - V: true, - Ok: true, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.V.ValueBool() - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValue_ValueString(t *testing.T) { - testCases := []struct { - Name string - V *Value - Expected struct { - V string - Ok bool - } - }{ - { - Name: "nil value", - }, - { - Name: "success", - V: ValueTypeString.ValueFromUnsafe("xxx"), - Expected: struct { - V string - Ok bool - }{V: "xxx", Ok: true}, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.V.ValueString() - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValue_ValueNumber(t *testing.T) { - testCases := []struct { - Name string - V *Value - Expected struct { - V float64 - Ok bool - } - }{ - { - Name: "nil value", - }, - { - Name: "success", - V: ValueTypeNumber.ValueFromUnsafe(5.5), - Expected: struct { - V float64 - Ok bool - }{V: 5.5, Ok: true}, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.V.ValueNumber() - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValue_ValueLatLng(t *testing.T) { - testCases := []struct { - Name string - V *Value - Expected struct { - V LatLng - Ok bool - } - }{ - { - Name: "nil value", - }, - { - Name: "success", - V: ValueTypeLatLng.ValueFromUnsafe(map[string]interface{}{ - "Lat": 1, - "Lng": 2, - }), - Expected: struct { - V LatLng - Ok bool - }{ - V: LatLng{ - Lat: 1, - Lng: 2, - }, - Ok: true, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.V.ValueLatLng() - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValue_ValueLatLngHeight(t *testing.T) { - testCases := []struct { - Name string - V *Value - Expected struct { - V LatLngHeight - Ok bool - } - }{ - { - Name: "nil value", - }, - { - Name: "success", - V: ValueTypeLatLngHeight.ValueFromUnsafe(map[string]interface{}{ - "Lat": 1, - "Lng": 2, - "Height": 22, - }), - Expected: struct { - V LatLngHeight - Ok bool - }{ - V: LatLngHeight{ - Lat: 1, - Lng: 2, - Height: 22, - }, - Ok: true, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.V.ValueLatLngHeight() - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValue_ValueCamera(t *testing.T) { - testCases := []struct { - Name string - V *Value - Expected struct { - V Camera - Ok bool - } - }{ - { - Name: "nil value", - }, - { - Name: "success", - V: ValueTypeCamera.ValueFromUnsafe( - map[string]interface{}{ - "Lat": 1, - "Lng": 2, - "Altitude": 3, - "Heading": 4, - "Pitch": 5, - "Roll": 6, - "FOV": 7, - }), - Expected: struct { - V Camera - Ok bool - }{ - V: Camera{ - Lat: 1, - Lng: 2, - Altitude: 3, - Heading: 4, - Pitch: 5, - Roll: 6, - FOV: 7, - }, - Ok: true, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.V.ValueCamera() - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValue_ValueCoordinates(t *testing.T) { - testCases := []struct { - Name string - V *Value - Expected struct { - V Coordinates - Ok bool - } - }{ - { - Name: "nil value", - }, - { - Name: "success", - V: ValueTypeCoordinates.ValueFromUnsafe( - []map[string]interface{}{ - { - "lat": 1, - "lng": 2, - "height": 3, - }, - }), - Expected: struct { - V Coordinates - Ok bool - }{ - V: Coordinates{ - LatLngHeight{ - Lat: 1, - Lng: 2, - Height: 3, - }, - }, - Ok: true, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.V.ValueCoordinates() - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValue_ValuePolygon(t *testing.T) { - testCases := []struct { - Name string - V *Value - Expected struct { - V Polygon - Ok bool - } - }{ - { - Name: "nil value", - }, - { - Name: "success", - V: ValueTypePolygon.ValueFromUnsafe( - [][]map[string]interface{}{ - { - { - "lat": 1, - "lng": 2, - "height": 3, - }, - }, - }), - Expected: struct { - V Polygon - Ok bool - }{ - V: []Coordinates{ - []LatLngHeight{ - { - Lat: 1, - Lng: 2, - Height: 3, - }, - }, - }, - Ok: true, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.V.ValuePolygon() - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValue_ValueRect(t *testing.T) { - testCases := []struct { - Name string - V *Value - Expected struct { - V Rect - Ok bool - } - }{ - { - Name: "nil value", - }, - { - Name: "success", - V: ValueTypeRect.ValueFromUnsafe( - map[string]interface{}{ - "West": 2, - "South": 3, - "East": 4, - "North": 5, - }), - Expected: struct { - V Rect - Ok bool - }{ - V: Rect{ - West: 2, - South: 3, - East: 4, - North: 5, - }, - Ok: true, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.V.ValueRect() - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValue_ValueRef(t *testing.T) { - uid := id.New() - testCases := []struct { - Name string - V *Value - Expected struct { - V id.ID - Ok bool - } - }{ - { - Name: "nil value", - }, - { - Name: "success", - V: ValueTypeRef.ValueFromUnsafe(uid), - Expected: struct { - V id.ID - Ok bool - }{V: uid, Ok: true}, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.V.ValueRef() - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValue_ValueURL(t *testing.T) { - testCases := []struct { - Name string - V *Value - Expected struct { - V *url.URL - Ok bool - } - }{ - { - Name: "nil value", - }, - { - Name: "success", - V: ValueTypeURL.ValueFromUnsafe(map[string]interface{}{ - "Scheme": "xx", - "Opaque": "aa.hh", - "Path": "zz/vv.bb", - "ForceQuery": false, - }), - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.V.ValueURL() - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValue_ValueTypography(t *testing.T) { - ff, fs, ts := "Times New Roman", 10, TextAlignLeft - var c, fw *string - var b, i, u *bool - - testCases := []struct { - Name string - V *Value - Expected struct { - V Typography - Ok bool - } - }{ - { - Name: "nil value", - }, - { - Name: "success", - V: ValueTypeTypography.ValueFromUnsafe(map[string]interface{}{ - "fontFamily": &ff, - "fontSize": &fs, - "textAlign": &ts, - "color": c, - "fontWeight": fw, - "bold": b, - "italic": i, - "underline": u, - }), - Expected: struct { - V Typography - Ok bool - }{ - V: Typography{ - FontFamily: &ff, - FontWeight: fw, - FontSize: &fs, - Color: c, - TextAlign: &ts, - Bold: b, - Italic: i, - Underline: u, - }, - Ok: true, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.V.ValueTypography() - assert.Equal(tt, tc.Expected.V, v) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValueType_ValueFrom(t *testing.T) { - var llh *LatLngHeight - var ll *LatLng - var ca *Camera - var rc *Rect - var cords *Coordinates - var p *Polygon - var ty *Typography - iid := id.New() - testCases := []struct { - Name string - Input interface{} - VT ValueType - Expected struct { - V interface{} - Ok bool - } - }{ - { - Name: "valueType is nil", - VT: "", - Expected: struct { - V interface{} - Ok bool - }{ - V: nil, - Ok: false, - }, - }, - { - Name: "input is nil", - VT: ValueTypeBool, - Expected: struct { - V interface{} - Ok bool - }{ - V: nil, - Ok: true, - }, - }, - { - Name: "bool", - Input: true, - VT: ValueTypeBool, - Expected: struct { - V interface{} - Ok bool - }{ - V: true, - Ok: true, - }, - }, - { - Name: "string", - Input: "xxx", - VT: ValueTypeString, - Expected: struct { - V interface{} - Ok bool - }{ - V: "xxx", - Ok: true, - }, - }, - { - Name: "number: json number", - Input: json.Number(strconv.FormatFloat(10, 'e', 0, 64)), - VT: ValueTypeNumber, - Expected: struct { - V interface{} - Ok bool - }{ - V: float64(10), - Ok: true, - }, - }, - { - Name: "number: float64", - Input: float64(11), - VT: ValueTypeNumber, - Expected: struct { - V interface{} - Ok bool - }{ - V: float64(11), - Ok: true, - }, - }, - { - Name: "number: int64", - Input: 12, - VT: ValueTypeNumber, - Expected: struct { - V interface{} - Ok bool - }{ - V: float64(12), - Ok: true, - }, - }, - { - Name: "ref: string", - Input: iid.String(), - VT: ValueTypeRef, - Expected: struct { - V interface{} - Ok bool - }{ - V: iid, - Ok: true, - }, - }, - { - Name: "ref: id", - Input: iid, - VT: ValueTypeRef, - Expected: struct { - V interface{} - Ok bool - }{ - V: iid, - Ok: true, - }, - }, - { - Name: "latlng", - Input: LatLng{ - Lat: 10, - Lng: 11, - }, - VT: ValueTypeLatLng, - Expected: struct { - V interface{} - Ok bool - }{ - V: LatLng{ - Lat: 10, - Lng: 11, - }, - Ok: true, - }, - }, - { - Name: "latlng: nil", - Input: ll, - VT: ValueTypeLatLng, - Expected: struct { - V interface{} - Ok bool - }{ - Ok: false, - }, - }, - { - Name: "latlng: ref", - Input: &LatLng{ - Lat: 10, - Lng: 11, - }, - VT: ValueTypeLatLng, - Expected: struct { - V interface{} - Ok bool - }{ - V: LatLng{ - Lat: 10, - Lng: 11, - }, - Ok: true, - }, - }, - { - Name: "latlng: map", - Input: map[string]interface{}{ - "lat": 10, - "lng": 11, - }, - VT: ValueTypeLatLng, - Expected: struct { - V interface{} - Ok bool - }{ - V: LatLng{ - Lat: 10, - Lng: 11, - }, - Ok: true, - }, - }, - { - Name: "latlngheight: map", - Input: map[string]interface{}{ - "lng": 11, - "lat": 12, - "height": 13, - }, - VT: ValueTypeLatLngHeight, - Expected: struct { - V interface{} - Ok bool - }{ - V: LatLngHeight{ - Lat: 12, - Lng: 11, - Height: 13, - }, - Ok: true, - }, - }, - { - Name: "latlngheight: nil", - Input: llh, - VT: ValueTypeLatLngHeight, - Expected: struct { - V interface{} - Ok bool - }{ - Ok: false, - }, - }, - { - Name: "latlngheight", - Input: LatLngHeight{ - Lat: 12, - Lng: 11, - Height: 13, - }, - VT: ValueTypeLatLngHeight, - Expected: struct { - V interface{} - Ok bool - }{ - V: LatLngHeight{ - Lat: 12, - Lng: 11, - Height: 13, - }, - Ok: true, - }, - }, - { - Name: "latlngheight: ref", - Input: &LatLngHeight{ - Lat: 12, - Lng: 11, - Height: 13, - }, - VT: ValueTypeLatLngHeight, - Expected: struct { - V interface{} - Ok bool - }{ - V: LatLngHeight{ - Lat: 12, - Lng: 11, - Height: 13, - }, - Ok: true, - }, - }, - { - Name: "camera: map", - Input: map[string]interface{}{ - "Lat": 1, - "Lng": 2, - "Altitude": 3, - "Heading": 4, - "Pitch": 5, - "Roll": 6, - "FOV": 7, - }, - VT: ValueTypeCamera, - Expected: struct { - V interface{} - Ok bool - }{ - V: Camera{ - Lat: 1, - Lng: 2, - Altitude: 3, - Heading: 4, - Pitch: 5, - Roll: 6, - FOV: 7, - }, - Ok: true, - }, - }, - { - Name: "camera", - Input: Camera{ - Lat: 1, - Lng: 2, - Altitude: 3, - Heading: 4, - Pitch: 5, - Roll: 6, - FOV: 7, - }, - VT: ValueTypeCamera, - Expected: struct { - V interface{} - Ok bool - }{ - V: Camera{ - Lat: 1, - Lng: 2, - Altitude: 3, - Heading: 4, - Pitch: 5, - Roll: 6, - FOV: 7, - }, - Ok: true, - }, - }, - { - Name: "camera: ref", - Input: &Camera{ - Lat: 1, - Lng: 2, - Altitude: 3, - Heading: 4, - Pitch: 5, - Roll: 6, - FOV: 7, - }, - VT: ValueTypeCamera, - Expected: struct { - V interface{} - Ok bool - }{ - V: Camera{ - Lat: 1, - Lng: 2, - Altitude: 3, - Heading: 4, - Pitch: 5, - Roll: 6, - FOV: 7, - }, - Ok: true, - }, - }, - { - Name: "camera: nil", - Input: ca, - VT: ValueTypeCamera, - Expected: struct { - V interface{} - Ok bool - }{}, - }, - { - Name: "rect: nil", - Input: rc, - VT: ValueTypeRect, - Expected: struct { - V interface{} - Ok bool - }{}, - }, - { - Name: "rect: map", - Input: map[string]interface{}{ - "West": 2, - "South": 3, - "East": 4, - "North": 5, - }, - VT: ValueTypeRect, - Expected: struct { - V interface{} - Ok bool - }{ - V: Rect{ - West: 2, - South: 3, - East: 4, - North: 5, - }, - Ok: true, - }, - }, - { - Name: "rect", - Input: Rect{ - West: 2, - South: 3, - East: 4, - North: 5, - }, - VT: ValueTypeRect, - Expected: struct { - V interface{} - Ok bool - }{ - V: Rect{ - West: 2, - South: 3, - East: 4, - North: 5, - }, - Ok: true, - }, - }, - { - Name: "rect: ref", - Input: &Rect{ - West: 2, - South: 3, - East: 4, - North: 5, - }, - VT: ValueTypeRect, - Expected: struct { - V interface{} - Ok bool - }{ - V: Rect{ - West: 2, - South: 3, - East: 4, - North: 5, - }, - Ok: true, - }, - }, - { - Name: "cods: map", - Input: []map[string]interface{}{ - { - "lat": 1, - "lng": 2, - "height": 3, - }, - }, - VT: ValueTypeCoordinates, - Expected: struct { - V interface{} - Ok bool - }{ - V: Coordinates{ - { - Lat: 1, - Lng: 2, - Height: 3, - }, - }, - Ok: true, - }, - }, - { - Name: "cods: ref", - Input: &Coordinates{ - { - Lat: 1, - Lng: 2, - Height: 3, - }, - }, - VT: ValueTypeCoordinates, - Expected: struct { - V interface{} - Ok bool - }{ - V: Coordinates{ - { - Lat: 1, - Lng: 2, - Height: 3, - }, - }, - Ok: true, - }, - }, - { - Name: "cods: nil", - Input: cords, - VT: ValueTypeCoordinates, - }, - { - Name: "polygon: nil", - Input: p, - VT: ValueTypePolygon, - }, - { - Name: "polygon: nil", - Input: &Polygon{ - Coordinates{ - { - Lat: 1, - Lng: 2, - Height: 3, - }, - }, - }, - VT: ValueTypePolygon, - Expected: struct { - V interface{} - Ok bool - }{ - V: Polygon{ - Coordinates{ - { - Lat: 1, - Lng: 2, - Height: 3, - }, - }, - }, - Ok: true, - }, - }, - { - Name: "typography: nil", - Input: ty, - VT: ValueTypeTypography, - }, - { - Name: "undefined", - Input: "ttt", - VT: "xxx", - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - v, ok := tc.VT.ValueFrom(tc.Input) - assert.Equal(tt, tc.Expected.V, v.Value()) - assert.Equal(tt, tc.Expected.Ok, ok) - }) - } -} - -func TestValue_Interface(t *testing.T) { - ff, fs, ts := "Times New Roman", 10, TextAlignLeft - var c, fw *string - var b, i, u *bool - testCases := []struct { - Name string - V *Value - Expected interface{} - }{ - { - Name: "nil", - }, - { - Name: "undefined", - V: ValueType("uu").ValueFromUnsafe("xx"), - Expected: nil, - }, - { - Name: "bool", - V: ValueTypeBool.ValueFromUnsafe(true), - Expected: true, - }, - { - Name: "latlng", - V: ValueTypeLatLng.ValueFromUnsafe( - LatLng{ - Lat: 1, - Lng: 2, - }), - Expected: map[string]interface{}{ - "lat": 1.0, - "lng": 2.0, - }, - }, - { - Name: "Typography", - V: ValueTypeTypography.ValueFromUnsafe( - Typography{ - FontFamily: &ff, - FontWeight: fw, - FontSize: &fs, - Color: c, - TextAlign: &ts, - Bold: b, - Italic: i, - Underline: u, - }), - Expected: map[string]interface{}{ - "fontFamily": &ff, - "fontSize": &fs, - "textAlign": &ts, - "color": c, - "fontWeight": fw, - "bold": b, - "italic": i, - "underline": u, - }, - }, - { - Name: "camera", - V: ValueTypeCamera.ValueFromUnsafe( - Camera{ - Lat: 1, - Lng: 2, - Altitude: 3, - Heading: 4, - Pitch: 5, - Roll: 6, - FOV: 7, - }), - Expected: map[string]interface{}{ - "lat": 1.0, - "lng": 2.0, - "altitude": 3.0, - "heading": 4.0, - "pitch": 5.0, - "roll": 6.0, - "fov": 7.0, - }, - }, - { - Name: "rect", - V: ValueTypeRect.ValueFromUnsafe( - Rect{ - West: 2, - South: 3, - East: 4, - North: 5, - }), - Expected: map[string]interface{}{ - "west": 2.0, - "south": 3.0, - "east": 4.0, - "north": 5.0, - }, - }, - { - Name: "latlngheight", - V: ValueTypeLatLngHeight.ValueFromUnsafe( - LatLngHeight{ - Lat: 1, - Lng: 2, - Height: 3, - }), - Expected: map[string]interface{}{ - "lat": 1.0, - "lng": 2.0, - "height": 3.0, - }, - }, - { - Name: "coordinates", - V: ValueTypeCoordinates.ValueFromUnsafe( - Coordinates{ - LatLngHeight{ - Lat: 1, - Lng: 2, - Height: 3, - }, - }), - Expected: []map[string]interface{}{ - { - "lat": 1.0, - "lng": 2.0, - "height": 3.0, - }, - }, - }, - { - Name: "polygon", - V: ValueTypePolygon.ValueFromUnsafe( - Polygon{ - Coordinates{ - LatLngHeight{ - Lat: 1, - Lng: 2, - Height: 3, - }, - }, - }), - Expected: [][]map[string]interface{}{ - {{ - "lat": 1.0, - "lng": 2.0, - "height": 3.0, - }}, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - i := tc.V.Interface() - assert.Equal(tt, tc.Expected, i) - }) - } -} diff --git a/pkg/property/value_typography.go b/pkg/property/value_typography.go new file mode 100644 index 000000000..48f24987d --- /dev/null +++ b/pkg/property/value_typography.go @@ -0,0 +1,133 @@ +package property + +import ( + "github.com/mitchellh/mapstructure" +) + +var ValueTypeTypography = ValueType("typography") + +type Typography struct { + FontFamily *string `json:"fontFamily" mapstructure:"fontFamily"` + FontWeight *string `json:"fontWeight" mapstructure:"fontWeight"` + FontSize *int `json:"fontSize" mapstructure:"fontSize"` + Color *string `json:"color" mapstructure:"color"` + TextAlign *TextAlign `json:"textAlign" mapstructure:"textAlign"` + Bold *bool `json:"bold" mapstructure:"bold"` + Italic *bool `json:"italic" mapstructure:"italic"` + Underline *bool `json:"underline" mapstructure:"underline"` +} + +func (t *Typography) Clone() *Typography { + if t == nil { + return nil + } + return &Typography{ + FontFamily: t.FontFamily, + FontWeight: t.FontWeight, + FontSize: t.FontSize, + Color: t.Color, + TextAlign: t.TextAlign, + Bold: t.Bold, + Italic: t.Italic, + Underline: t.Underline, + } +} + +type TextAlign string + +const ( + TextAlignLeft TextAlign = "left" + TextAlignCenter TextAlign = "center" + TextAlignRight TextAlign = "right" + TextAlignJustify TextAlign = "justify" + TextAlignJustifyAll TextAlign = "justify_all" +) + +func TextAlignFrom(t string) (TextAlign, bool) { + switch TextAlign(t) { + case TextAlignLeft: + return TextAlignLeft, true + case TextAlignCenter: + return TextAlignCenter, true + case TextAlignRight: + return TextAlignRight, true + case TextAlignJustify: + return TextAlignJustify, true + case TextAlignJustifyAll: + return TextAlignJustifyAll, true + } + return TextAlign(""), false +} + +func TextAlignFromRef(t *string) *TextAlign { + if t == nil { + return nil + } + var t2 TextAlign + switch TextAlign(*t) { + case TextAlignLeft: + t2 = TextAlignLeft + case TextAlignCenter: + t2 = TextAlignCenter + case TextAlignRight: + t2 = TextAlignRight + case TextAlignJustify: + t2 = TextAlignJustify + case TextAlignJustifyAll: + t2 = TextAlignJustifyAll + default: + return nil + } + return &t2 +} + +func (t TextAlign) String() string { + return string(t) +} + +func (t *TextAlign) StringRef() *string { + if t == nil { + return nil + } + t2 := string(*t) + return &t2 +} + +type typePropertyTypography struct{} + +func (*typePropertyTypography) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(Typography); ok { + return v, true + } + + if v, ok := i.(*Typography); ok { + if v != nil { + return *v, true + } + return nil, false + } + + v := Typography{} + if err := mapstructure.Decode(i, &v); err == nil { + return v, true + } + + return nil, false +} + +func (*typePropertyTypography) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*typePropertyTypography) Validate(i interface{}) bool { + _, ok := i.(Typography) + return ok +} + +func (v *Value) ValueTypography() (vv Typography, ok bool) { + if v == nil { + return + } + vv, ok = v.Value().(Typography) + return +} diff --git a/pkg/property/value_typography_test.go b/pkg/property/value_typography_test.go new file mode 100644 index 000000000..9f9e73127 --- /dev/null +++ b/pkg/property/value_typography_test.go @@ -0,0 +1,204 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func getStrRef(i string) *string { + return &i +} + +func getBoolRef(i bool) *bool { + return &i +} + +func TestTypography_Clone(t *testing.T) { + i := 10 + + testes := []struct { + Name string + Typography, Expected *Typography + }{ + { + Name: "nil typography", + }, + { + Name: "cloned", + Typography: &Typography{ + FontFamily: getStrRef("x"), + FontWeight: getStrRef("b"), + FontSize: &i, + Color: getStrRef("red"), + TextAlign: TextAlignFromRef(getStrRef(TextAlignCenter.String())), + Bold: getBoolRef(true), + Italic: getBoolRef(false), + Underline: getBoolRef(true), + }, + Expected: &Typography{ + FontFamily: getStrRef("x"), + FontWeight: getStrRef("b"), + FontSize: &i, + Color: getStrRef("red"), + TextAlign: TextAlignFromRef(getStrRef("center")), + Bold: getBoolRef(true), + Italic: getBoolRef(false), + Underline: getBoolRef(true), + }, + }, + } + + for _, tc := range testes { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.Typography.Clone() + assert.Equal(tt, tc.Expected, res) + if tc.Expected != nil { + assert.NotSame(tt, tc.Expected, res) + } + }) + } +} + +func TestTextAlignFrom(t *testing.T) { + testCases := []struct { + Name string + Expected struct { + TA TextAlign + Bool bool + } + }{ + { + Name: "left", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignLeft, + Bool: true, + }, + }, + { + Name: "right", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignRight, + Bool: true, + }, + }, + { + Name: "center", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignCenter, + Bool: true, + }, + }, + { + Name: "justify", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignJustify, + Bool: true, + }, + }, + { + Name: "justify_all", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlignJustifyAll, + Bool: true, + }, + }, + { + Name: "undefined", + Expected: struct { + TA TextAlign + Bool bool + }{ + TA: TextAlign(""), + Bool: false, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res, ok := TextAlignFrom(tc.Name) + assert.Equal(tt, tc.Expected.TA, res) + assert.Equal(tt, tc.Expected.Bool, ok) + }) + } +} + +func TestTextAlignFromRef(t *testing.T) { + ja := TextAlignJustifyAll + j := TextAlignJustify + c := TextAlignCenter + l := TextAlignLeft + r := TextAlignRight + testCases := []struct { + Name string + Input *string + Expected *TextAlign + }{ + { + Name: "left", + Input: getStrRef("left"), + Expected: &l, + }, + { + Name: "right", + Input: getStrRef("right"), + Expected: &r, + }, + { + Name: "center", + Input: getStrRef("center"), + Expected: &c, + }, + { + Name: "justify", + Input: getStrRef("justify"), + Expected: &j, + }, + { + Name: "justify_all", + Input: getStrRef("justify_all"), + Expected: &ja, + }, + { + Name: "undefined", + Input: getStrRef("undefined"), + }, + { + Name: "nil input", + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := TextAlignFromRef(tc.Input) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestTextAlign_StringRef(t *testing.T) { + var ta *TextAlign + assert.Nil(t, ta.StringRef()) +} diff --git a/pkg/scene/builder/builder_test.go b/pkg/scene/builder/builder_test.go index a3c92725f..dc2407994 100644 --- a/pkg/scene/builder/builder_test.go +++ b/pkg/scene/builder/builder_test.go @@ -77,13 +77,11 @@ func TestSceneBuilder(t *testing.T) { Fields([]*property.Field{ property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(property.ValueTypeString). - ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("xxx")). + ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("xxx"))). Build(), property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField2ID). - TypeUnsafe(property.ValueTypeNumber). - ValueUnsafe(property.ValueTypeNumber.ValueFromUnsafe(1)). + ValueUnsafe(property.OptionalValueFrom(property.ValueTypeNumber.ValueFrom(1))). Build(), }).MustBuild(), }). @@ -106,13 +104,11 @@ func TestSceneBuilder(t *testing.T) { Fields([]*property.Field{ property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(property.ValueTypeString). - ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("yyy")). + ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("yyy"))). Build(), property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField2ID). - TypeUnsafe(property.ValueTypeNumber). - ValueUnsafe(property.ValueTypeNumber.ValueFromUnsafe(1)). + ValueUnsafe(property.OptionalValueFrom(property.ValueTypeNumber.ValueFrom(1))). Build(), }).MustBuild(), }). @@ -133,13 +129,11 @@ func TestSceneBuilder(t *testing.T) { Fields([]*property.Field{ property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(property.ValueTypeString). - ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("xxx")). + ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("xxx"))). Build(), property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField3ID). - TypeUnsafe(property.ValueTypeString). - ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("test")). + ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("test"))). Build(), }).MustBuild(), }). @@ -168,7 +162,7 @@ func TestSceneBuilder(t *testing.T) { Fields([]*property.Field{ property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.NewOptionalValue(property.ValueTypeString, nil)). LinksUnsafe(property.NewLinks([]*property.Link{ property.NewLink(ds2id, dss2id, ds2f1), property.NewLink(ds3id, dss3id, ds3f1), @@ -200,8 +194,7 @@ func TestSceneBuilder(t *testing.T) { Fields([]*property.Field{ property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField2ID). - TypeUnsafe(property.ValueTypeNumber). - ValueUnsafe(property.ValueTypeNumber.ValueFromUnsafe(1)). + ValueUnsafe(property.OptionalValueFrom(property.ValueTypeNumber.ValueFrom(1))). Build(), }).MustBuild(), }). @@ -228,15 +221,14 @@ func TestSceneBuilder(t *testing.T) { Fields([]*property.Field{ property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.NewOptionalValue(property.ValueTypeString, nil)). LinksUnsafe(property.NewLinks([]*property.Link{ property.NewLinkFieldOnly(dss3id, ds3f1), })). Build(), property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField3ID). - TypeUnsafe(property.ValueTypeString). - ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("xxx")). + ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("xxx"))). Build(), }).MustBuild(), }). @@ -266,7 +258,7 @@ func TestSceneBuilder(t *testing.T) { Fields([]*property.Field{ property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.NewOptionalValue(property.ValueTypeString, nil)). LinksUnsafe(property.NewLinks([]*property.Link{ property.NewLinkFieldOnly(dss1id, ds1f2), })). @@ -291,7 +283,7 @@ func TestSceneBuilder(t *testing.T) { Fields([]*property.Field{ property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.NewOptionalValue(property.ValueTypeString, nil)). LinksUnsafe(property.NewLinks([]*property.Link{ property.NewLinkFieldOnly(dss1id, ds1f1), property.NewLinkFieldOnly(dss2id, ds2f1), @@ -300,7 +292,7 @@ func TestSceneBuilder(t *testing.T) { Build(), property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField2ID). - TypeUnsafe(property.ValueTypeString). + ValueUnsafe(property.NewOptionalValue(property.ValueTypeString, nil)). LinksUnsafe(property.NewLinks([]*property.Link{ property.NewLinkFieldOnly(dss1id, ds1f1), property.NewLinkFieldOnly(dss2id, ds2f1), @@ -329,16 +321,14 @@ func TestSceneBuilder(t *testing.T) { Fields([]*property.Field{ property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(property.ValueTypeString). - ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("XYZ")). + ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("XYZ"))). Build(), }).MustBuild(), property.NewGroup().ID(propertyItemID2).Schema(propertySchemaID, propertySchemaGroup2ID). Fields([]*property.Field{ property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(property.ValueTypeString). - ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("ZYX")). + ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("ZYX"))). Build(), }).MustBuild(), }).MustBuild(), @@ -371,8 +361,7 @@ func TestSceneBuilder(t *testing.T) { property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID).Fields([]*property.Field{ property.NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). - TypeUnsafe(property.ValueTypeString). - ValueUnsafe(property.ValueTypeString.ValueFromUnsafe("hogehoge")). + ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("hogehoge"))). Build(), }).MustBuild(), }). diff --git a/pkg/scene/builder/encoder_test.go b/pkg/scene/builder/encoder_test.go index afce3cc57..0bb83d027 100644 --- a/pkg/scene/builder/encoder_test.go +++ b/pkg/scene/builder/encoder_test.go @@ -99,10 +99,12 @@ func TestEncoder_Layers(t *testing.T) { } f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("location"), - Type: "latlng", - DatasetValue: nil, - PropertyValue: v1.Value(), + ID: id.PropertySchemaFieldID("location"), + Val: property.NewValueAndDatasetValue( + property.ValueTypeLatLng, + nil, + property.ValueTypeLatLng.ValueFrom(v1), + ), } fl1 := []*property.SealedField{} fl1 = append(fl1, &f1) @@ -157,7 +159,7 @@ func TestEncoder_Layers(t *testing.T) { PluginID: id.OfficialPluginID.StringRef(), ExtensionID: ex.StringRef(), Name: "test", - Property: map[string]interface{}{"default": map[string]interface{}{"location": map[string]interface{}{"lat": 4.4, "lng": 53.4}}}, + Property: map[string]interface{}{"default": map[string]interface{}{"location": property.LatLng{Lat: 4.4, Lng: 53.4}}}, Infobox: nil, }, }, @@ -177,7 +179,6 @@ func TestEncoder_Layers(t *testing.T) { assert.Equal(tt, tc.Expected.Name, res.Name) assert.Equal(tt, *tc.Expected.PluginID, *res.PluginID) } - }) } } diff --git a/pkg/value/bool.go b/pkg/value/bool.go new file mode 100644 index 000000000..538e3f5f3 --- /dev/null +++ b/pkg/value/bool.go @@ -0,0 +1,32 @@ +package value + +var TypeBool Type = "bool" + +type propertyBool struct{} + +func (*propertyBool) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(bool); ok { + return v, true + } + if v, ok := i.(*bool); ok && v != nil { + return *v, true + } + return nil, false +} + +func (*propertyBool) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyBool) Validate(i interface{}) bool { + _, ok := i.(bool) + return ok +} + +func (v *Value) ValueBool() (vv bool, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(bool) + return +} diff --git a/pkg/value/coordinates.go b/pkg/value/coordinates.go new file mode 100644 index 000000000..402889697 --- /dev/null +++ b/pkg/value/coordinates.go @@ -0,0 +1,78 @@ +package value + +import "github.com/mitchellh/mapstructure" + +type Coordinates []LatLngHeight + +// CoordinatesFrom generates a new Coordinates from slice such as [lon, lat, alt, lon, lat, alt, ...] +func CoordinatesFrom(coords []float64) Coordinates { + if len(coords) == 0 { + return nil + } + + r := make([]LatLngHeight, 0, len(coords)/3) + l := LatLngHeight{} + for i, c := range coords { + switch i % 3 { + case 0: + l = LatLngHeight{} + l.Lng = c + case 1: + l.Lat = c + case 2: + l.Height = c + r = append(r, l) + } + } + + return r +} + +var TypeCoordinates Type = "coordinates" + +type propertyCoordinates struct{} + +func (*propertyCoordinates) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(Coordinates); ok { + return v, true + } else if v, ok := i.(*Coordinates); ok { + if v != nil { + return *v, true + } + return nil, false + } else if v2, ok := i.([]float64); ok { + if v2 == nil { + return nil, false + } + return CoordinatesFrom(v2), true + } + + v2 := Coordinates{} + if err := mapstructure.Decode(i, &v2); err == nil { + return v2, true + } + + v1 := []float64{} + if err := mapstructure.Decode(i, &v1); err == nil { + return CoordinatesFrom(v1), true + } + + return nil, false +} + +func (*propertyCoordinates) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyCoordinates) Validate(i interface{}) bool { + _, ok := i.(bool) + return ok +} + +func (v *Value) ValueCoordinates() (vv Coordinates, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(Coordinates) + return +} diff --git a/pkg/value/latlng.go b/pkg/value/latlng.go new file mode 100644 index 000000000..7612eb543 --- /dev/null +++ b/pkg/value/latlng.go @@ -0,0 +1,55 @@ +package value + +import "github.com/mitchellh/mapstructure" + +type LatLng struct { + Lat float64 `json:"lat" mapstructure:"lat"` + Lng float64 `json:"lng" mapstructure:"lng"` +} + +func (l *LatLng) Clone() *LatLng { + if l == nil { + return nil + } + return &LatLng{ + Lat: l.Lat, + Lng: l.Lng, + } +} + +var TypeLatLng Type = "latlng" + +type propertyLatLng struct{} + +func (*propertyLatLng) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(LatLng); ok { + return v, true + } else if v, ok := i.(*LatLng); ok { + if v != nil { + return *v, true + } + return nil, false + } + v := LatLng{} + if err := mapstructure.Decode(i, &v); err != nil { + return nil, false + } + return v, true +} + +func (*propertyLatLng) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyLatLng) Validate(i interface{}) bool { + _, ok := i.(LatLng) + return ok +} + +func (v *Value) ValueLatLng() (vv LatLng, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(LatLng) + return +} diff --git a/pkg/value/latlng_test.go b/pkg/value/latlng_test.go new file mode 100644 index 000000000..b774fe0b5 --- /dev/null +++ b/pkg/value/latlng_test.go @@ -0,0 +1,41 @@ +package value + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLatLng_Clone(t *testing.T) { + tests := []struct { + Name string + LL, Expected *LatLng + }{ + { + Name: "nil latlng", + }, + { + Name: "cloned", + LL: &LatLng{ + Lat: 10, + Lng: 11, + }, + Expected: &LatLng{ + Lat: 10, + Lng: 11, + }, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.LL.Clone() + assert.Equal(tt, tc.Expected, res) + if tc.Expected != nil { + assert.NotSame(tt, tc.Expected, res) + } + }) + } +} diff --git a/pkg/value/latlngheight.go b/pkg/value/latlngheight.go new file mode 100644 index 000000000..173f3875d --- /dev/null +++ b/pkg/value/latlngheight.go @@ -0,0 +1,60 @@ +package value + +import "github.com/mitchellh/mapstructure" + +type LatLngHeight struct { + Lat float64 `json:"lat" mapstructure:"lat"` + Lng float64 `json:"lng" mapstructure:"lng"` + Height float64 `json:"height" mapstructure:"height"` +} + +func (l *LatLngHeight) Clone() *LatLngHeight { + if l == nil { + return nil + } + return &LatLngHeight{ + Lat: l.Lat, + Lng: l.Lng, + Height: l.Height, + } +} + +var TypeLatLngHeight Type = "latlngheight" + +type propertyLatLngHeight struct{} + +func (*propertyLatLngHeight) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(LatLngHeight); ok { + return v, true + } + + if v, ok := i.(*LatLngHeight); ok { + if v != nil { + return *v, false + } + return nil, false + } + + v := LatLngHeight{} + if err := mapstructure.Decode(i, &v); err == nil { + return v, true + } + return nil, false +} + +func (*propertyLatLngHeight) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyLatLngHeight) Validate(i interface{}) bool { + _, ok := i.(LatLngHeight) + return ok +} + +func (v *Value) ValueLatLngHeight() (vv LatLngHeight, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(LatLngHeight) + return +} diff --git a/pkg/value/latlngheight_test.go b/pkg/value/latlngheight_test.go new file mode 100644 index 000000000..5578836c8 --- /dev/null +++ b/pkg/value/latlngheight_test.go @@ -0,0 +1,43 @@ +package value + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLatLngHeight_Clone(t *testing.T) { + testCases := []struct { + Name string + LL, Expected *LatLngHeight + }{ + { + Name: "nil LatLngHeight", + }, + { + Name: "cloned", + LL: &LatLngHeight{ + Lat: 10, + Lng: 11, + Height: 12, + }, + Expected: &LatLngHeight{ + Lat: 10, + Lng: 11, + Height: 12, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.LL.Clone() + assert.Equal(tt, tc.Expected, res) + if tc.Expected != nil { + assert.NotSame(tt, tc.Expected, res) + } + }) + } +} diff --git a/pkg/value/number.go b/pkg/value/number.go new file mode 100644 index 000000000..275e5325f --- /dev/null +++ b/pkg/value/number.go @@ -0,0 +1,118 @@ +package value + +import "encoding/json" + +var TypeNumber Type = "number" + +type propertyNumber struct{} + +func (*propertyNumber) I2V(i interface{}) (interface{}, bool) { + switch v := i.(type) { + case float64: + return v, true + case float32: + return float64(v), true + case int: + return float64(v), true + case int8: + return float64(v), true + case int16: + return float64(v), true + case int32: + return float64(v), true + case int64: + return float64(v), true + case uint: + return float64(v), true + case uint8: + return float64(v), true + case uint16: + return float64(v), true + case uint32: + return float64(v), true + case uint64: + return float64(v), true + case uintptr: + return float64(v), true + case json.Number: + if f, err := v.Float64(); err == nil { + return f, true + } + case *float64: + if v != nil { + return *v, true + } + case *float32: + if v != nil { + return float64(*v), true + } + case *int: + if v != nil { + return float64(*v), true + } + case *int8: + if v != nil { + return float64(*v), true + } + case *int16: + if v != nil { + return float64(*v), true + } + case *int32: + if v != nil { + return float64(*v), true + } + case *int64: + if v != nil { + return float64(*v), true + } + case *uint: + if v != nil { + return float64(*v), true + } + case *uint8: + if v != nil { + return float64(*v), true + } + case *uint16: + if v != nil { + return float64(*v), true + } + case *uint32: + if v != nil { + return float64(*v), true + } + case *uint64: + if v != nil { + return float64(*v), true + } + case *uintptr: + if v != nil { + return float64(*v), true + } + case *json.Number: + if v != nil { + if f, err := v.Float64(); err == nil { + return f, true + } + } + } + return nil, false +} + +func (*propertyNumber) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyNumber) Validate(i interface{}) bool { + _, ok := i.(float64) + return ok +} + +func (v *Value) ValueNumber() (vv float64, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(float64) + return +} diff --git a/pkg/value/optional.go b/pkg/value/optional.go new file mode 100644 index 000000000..2b0b230ea --- /dev/null +++ b/pkg/value/optional.go @@ -0,0 +1,61 @@ +package value + +type OptionalValue struct { + t Type + v *Value +} + +func NewOptionalValue(t Type, v *Value) *OptionalValue { + if t == TypeUnknown || (v != nil && v.Type() != t) { + return nil + } + return &OptionalValue{ + t: t, + v: v, + } +} + +func OptionalValueFrom(v *Value) *OptionalValue { + if v.Type() == TypeUnknown { + return nil + } + return &OptionalValue{ + t: v.Type(), + v: v, + } +} + +func (ov *OptionalValue) Type() Type { + if ov == nil { + return TypeUnknown + } + return ov.t +} + +func (ov *OptionalValue) Value() *Value { + if ov == nil || ov.t == TypeUnknown || ov.v == nil { + return nil + } + return ov.v.Clone() +} + +func (ov *OptionalValue) TypeAndValue() (Type, *Value) { + return ov.Type(), ov.Value() +} + +func (ov *OptionalValue) SetValue(v *Value) { + if ov == nil || ov.t == TypeUnknown || (v != nil && ov.t != v.Type()) { + return + } + ov.v = v.Clone() +} + +func (ov *OptionalValue) Clone() *OptionalValue { + if ov == nil { + return nil + } + return &OptionalValue{ + t: ov.t, + v: ov.v.Clone(), + } +} diff --git a/pkg/value/optional_test.go b/pkg/value/optional_test.go new file mode 100644 index 000000000..19e0f6014 --- /dev/null +++ b/pkg/value/optional_test.go @@ -0,0 +1,330 @@ +package value + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewOptionalValue(t *testing.T) { + type args struct { + t Type + v *Value + } + tests := []struct { + name string + args args + want *OptionalValue + }{ + { + name: "default type", + args: args{ + t: TypeString, + v: TypeString.ValueFrom("foo", nil), + }, + want: &OptionalValue{t: TypeString, v: TypeString.ValueFrom("foo", nil)}, + }, + { + name: "custom type", + args: args{ + t: Type("foo"), + v: &Value{t: Type("foo")}, + }, + want: &OptionalValue{t: Type("foo"), v: &Value{t: Type("foo")}}, + }, + { + name: "nil value", + args: args{ + t: Type("foo"), + }, + want: &OptionalValue{t: Type("foo"), v: nil}, + }, + { + name: "invalid value", + args: args{ + t: TypeNumber, + v: TypeString.ValueFrom("foo", nil), + }, + want: nil, + }, + { + name: "invalid type", + args: args{ + t: TypeUnknown, + v: TypeString.ValueFrom("foo", nil), + }, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, NewOptionalValue(tt.args.t, tt.args.v)) + }) + } +} + +func TestOptionalValueFrom(t *testing.T) { + type args struct { + v *Value + } + tests := []struct { + name string + args args + want *OptionalValue + }{ + { + name: "default type", + args: args{ + v: TypeString.ValueFrom("foo", nil), + }, + want: &OptionalValue{t: TypeString, v: TypeString.ValueFrom("foo", nil)}, + }, + { + name: "custom type", + args: args{ + v: &Value{t: Type("foo")}, + }, + want: &OptionalValue{t: Type("foo"), v: &Value{t: Type("foo")}}, + }, + { + name: "invalid value", + args: args{ + v: &Value{v: "string"}, + }, + want: nil, + }, + { + name: "nil value", + args: args{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, OptionalValueFrom(tt.args.v)) + }) + } +} + +func TestOptionalValue_Type(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + want Type + }{ + { + name: "ok", + value: &OptionalValue{t: Type("foo")}, + want: Type("foo"), + }, + { + name: "empty", + value: &OptionalValue{}, + want: TypeUnknown, + }, + { + name: "nil", + value: nil, + want: TypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Type()) + }) + } +} + +func TestOptionalValue_Value(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + want *Value + }{ + { + name: "ok", + value: &OptionalValue{t: TypeString, v: &Value{t: TypeString, v: "foobar"}}, + want: &Value{t: TypeString, v: "foobar"}, + }, + { + name: "empty", + value: &OptionalValue{}, + want: nil, + }, + { + name: "nil", + value: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.value.Value() + assert.Equal(t, tt.want, res) + if res != nil { + assert.NotSame(t, tt.want, res) + } + }) + } +} + +func TestOptionalValue_TypeAndValue(t *testing.T) { + tests := []struct { + name string + value *OptionalValue + wantt Type + wantv *Value + }{ + { + name: "ok", + value: &OptionalValue{t: TypeString, v: &Value{t: TypeString, v: "foobar"}}, + wantt: TypeString, + wantv: &Value{t: TypeString, v: "foobar"}, + }, + { + name: "empty", + value: &OptionalValue{}, + wantt: TypeUnknown, + wantv: nil, + }, + { + name: "nil", + value: nil, + wantt: TypeUnknown, + wantv: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ty, tv := tt.value.TypeAndValue() + assert.Equal(t, tt.wantt, ty) + assert.Equal(t, tt.wantv, tv) + if tv != nil { + assert.NotSame(t, tt.wantv, tv) + } + }) + } +} + +func TestOptionalValue_SetValue(t *testing.T) { + type args struct { + v *Value + } + tests := []struct { + name string + value *OptionalValue + args args + invalid bool + }{ + { + name: "set", + value: &OptionalValue{ + t: TypeString, + v: &Value{t: TypeString, v: "foobar"}, + }, + args: args{v: &Value{t: TypeString, v: "bar"}}, + }, + { + name: "set to nil", + value: &OptionalValue{ + t: TypeString, + }, + args: args{v: &Value{t: TypeString, v: "bar"}}, + }, + { + name: "invalid value", + value: &OptionalValue{ + t: TypeNumber, + v: &Value{t: TypeNumber, v: 1}, + }, + args: args{v: &Value{t: TypeString, v: "bar"}}, + invalid: true, + }, + { + name: "nil value", + value: &OptionalValue{ + t: TypeNumber, + v: &Value{t: TypeNumber, v: 1}, + }, + }, + { + name: "empty", + value: &OptionalValue{}, + args: args{v: &Value{t: TypeString, v: "bar"}}, + invalid: true, + }, + { + name: "nil", + args: args{v: &Value{t: TypeString, v: "bar"}}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + var v *Value + if tt.value != nil { + v = tt.value.v + } + + tt.value.SetValue(tt.args.v) + + if tt.value != nil { + if tt.invalid { + assert.Same(t, v, tt.value.v) + } else { + assert.Equal(t, tt.args.v, tt.value.v) + if tt.args.v != nil { + assert.NotSame(t, tt.args.v, tt.value.v) + } + } + } + }) + } +} + +func TestOptionalValue_Clone(t *testing.T) { + tests := []struct { + name string + target *OptionalValue + }{ + { + name: "ok", + target: &OptionalValue{t: TypeString, v: TypeString.ValueFrom("foo", nil)}, + }, + { + name: "empty", + target: &OptionalValue{}, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := tt.target.Clone() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} diff --git a/pkg/value/polygon.go b/pkg/value/polygon.go new file mode 100644 index 000000000..4dccecfdf --- /dev/null +++ b/pkg/value/polygon.go @@ -0,0 +1,59 @@ +package value + +import "github.com/mitchellh/mapstructure" + +var TypePolygon Type = "polygon" + +type Polygon []Coordinates + +func PolygonFrom(rings [][]float64) Polygon { + p := make([]Coordinates, 0, len(rings)) + for _, ring := range rings { + p = append(p, CoordinatesFrom(ring)) + } + return p +} + +type propertyPolygon struct{} + +func (*propertyPolygon) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(Polygon); ok { + return v, true + } + + if v, ok := i.(*Polygon); ok { + if v != nil { + return *v, true + } + return nil, false + } + + v := Polygon{} + if err := mapstructure.Decode(i, &v); err == nil { + return v, true + } + + v2 := [][]float64{} + if err := mapstructure.Decode(i, &v); err == nil { + return PolygonFrom(v2), true + } + + return nil, false +} + +func (*propertyPolygon) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyPolygon) Validate(i interface{}) bool { + _, ok := i.(Polygon) + return ok +} + +func (v *Value) ValuePolygon() (vv Polygon, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(Polygon) + return +} diff --git a/pkg/value/rect.go b/pkg/value/rect.go new file mode 100644 index 000000000..90caf01df --- /dev/null +++ b/pkg/value/rect.go @@ -0,0 +1,49 @@ +package value + +import "github.com/mitchellh/mapstructure" + +var TypeRect Type = "rect" + +type Rect struct { + West float64 `json:"west" mapstructure:"west"` + South float64 `json:"south" mapstructure:"south"` + East float64 `json:"east" mapstructure:"east"` + North float64 `json:"north" mapstructure:"north"` +} + +type propertyRect struct{} + +func (*propertyRect) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(Rect); ok { + return v, true + } else if v, ok := i.(*Rect); ok { + if v != nil { + return *v, true + } + return nil, false + } + + v := Rect{} + if err := mapstructure.Decode(i, &v); err == nil { + return v, false + } + + return nil, false +} + +func (*propertyRect) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyRect) Validate(i interface{}) bool { + _, ok := i.(Rect) + return ok +} + +func (v *Value) ValueRect() (vv Rect, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(Rect) + return +} diff --git a/pkg/value/ref.go b/pkg/value/ref.go new file mode 100644 index 000000000..f065ee915 --- /dev/null +++ b/pkg/value/ref.go @@ -0,0 +1,40 @@ +package value + +import "github.com/reearth/reearth-backend/pkg/id" + +var TypeRef Type = "ref" + +type propertyRef struct{} + +func (*propertyRef) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(string); ok { + return v, true + } + if v, ok := i.(*string); ok { + return *v, true + } + if v, ok := i.(id.ID); ok { + return v.String(), true + } + if v, ok := i.(*id.ID); ok && v != nil { + return v.String(), true + } + return nil, false +} + +func (*propertyRef) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyRef) Validate(i interface{}) bool { + _, ok := i.(string) + return ok +} + +func (v *Value) ValueRef() (vv string, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(string) + return +} diff --git a/pkg/value/string.go b/pkg/value/string.go new file mode 100644 index 000000000..3979d0cb0 --- /dev/null +++ b/pkg/value/string.go @@ -0,0 +1,32 @@ +package value + +var TypeString Type = "string" + +type propertyString struct{} + +func (*propertyString) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(string); ok { + return v, true + } + if v, ok := i.(*string); ok { + return *v, true + } + return nil, false +} + +func (*propertyString) V2I(v interface{}) (interface{}, bool) { + return v, true +} + +func (*propertyString) Validate(i interface{}) bool { + _, ok := i.(string) + return ok +} + +func (v *Value) ValueString() (vv string, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(string) + return +} diff --git a/pkg/value/type.go b/pkg/value/type.go new file mode 100644 index 000000000..963747c4b --- /dev/null +++ b/pkg/value/type.go @@ -0,0 +1,53 @@ +package value + +type Type string + +type TypeProperty interface { + I2V(interface{}) (interface{}, bool) + V2I(interface{}) (interface{}, bool) + Validate(interface{}) bool +} + +type TypePropertyMap = map[Type]TypeProperty + +var TypeUnknown = Type("") + +var defaultTypes = TypePropertyMap{ + TypeBool: &propertyBool{}, + TypeCoordinates: &propertyCoordinates{}, + TypeLatLng: &propertyLatLng{}, + TypeLatLngHeight: &propertyLatLngHeight{}, + TypeNumber: &propertyNumber{}, + TypePolygon: &propertyPolygon{}, + TypeRect: &propertyRect{}, + TypeRef: &propertyRef{}, + TypeString: &propertyString{}, + TypeURL: &propertyURL{}, +} + +func (t Type) Default() bool { + _, ok := defaultTypes[t] + return ok +} + +func (t Type) ValueFrom(i interface{}, p TypePropertyMap) *Value { + if t == TypeUnknown || i == nil { + return nil + } + + if p != nil { + if vt, ok := p[t]; ok && vt != nil { + if v, ok2 := vt.I2V(i); ok2 { + return &Value{p: p, v: v, t: t} + } + } + } + + if vt, ok := defaultTypes[t]; ok && vt != nil { + if v, ok2 := vt.I2V(i); ok2 { + return &Value{p: p, v: v, t: t} + } + } + + return nil +} diff --git a/pkg/value/type_test.go b/pkg/value/type_test.go new file mode 100644 index 000000000..6f0d83ce0 --- /dev/null +++ b/pkg/value/type_test.go @@ -0,0 +1,117 @@ +package value + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +type tpmock struct { + TypeProperty +} + +func (*tpmock) I2V(i interface{}) (interface{}, bool) { + return i.(string) + "a", true +} + +func (*tpmock) V2I(v interface{}) (interface{}, bool) { + return v.(string) + "bar", true +} + +func TestType_Default(t *testing.T) { + tests := []struct { + name string + tr Type + want bool + }{ + { + name: "default", + tr: TypeString, + want: true, + }, + { + name: "custom", + tr: Type("foo"), + want: false, + }, + { + name: "unknown", + tr: TypeUnknown, + want: false, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.tr.Default()) + }) + } +} + +func TestType_ValueFrom(t *testing.T) { + tpm := TypePropertyMap{ + Type("foo"): &tpmock{}, + } + + type args struct { + i interface{} + p TypePropertyMap + } + + tests := []struct { + name string + tr Type + args args + want *Value + }{ + { + name: "default type", + tr: TypeString, + args: args{ + i: "hoge", + }, + want: &Value{t: TypeString, v: "hoge"}, + }, + { + name: "custom type", + tr: Type("foo"), + args: args{ + i: "hoge", + p: tpm, + }, + want: &Value{p: tpm, t: Type("foo"), v: "hogea"}, + }, + { + name: "nil", + tr: TypeString, + args: args{}, + want: nil, + }, + { + name: "unknown type", + tr: TypeUnknown, + args: args{ + i: "hoge", + }, + want: nil, + }, + { + name: "unknown type + custom type", + tr: Type("bar"), + args: args{ + i: "hoge", + p: tpm, + }, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.tr.ValueFrom(tt.args.i, tt.args.p)) + }) + } +} diff --git a/pkg/value/url.go b/pkg/value/url.go new file mode 100644 index 000000000..0745a7937 --- /dev/null +++ b/pkg/value/url.go @@ -0,0 +1,52 @@ +package value + +import "net/url" + +var TypeURL Type = "url" + +type propertyURL struct{} + +func (*propertyURL) I2V(i interface{}) (interface{}, bool) { + if v, ok := i.(url.URL); ok { + return &v, true + } + + if v, ok := i.(*url.URL); ok { + if v == nil { + return nil, false + } + return v, true + } + + if v, ok := i.(string); ok { + if u, err := url.Parse(v); err == nil { + return u, true + } + } + + return nil, false +} + +func (*propertyURL) V2I(v interface{}) (interface{}, bool) { + u, ok := v.(*url.URL) + if !ok { + return nil, false + } + if u == nil { + return "", true + } + return u.String(), true +} + +func (*propertyURL) Validate(i interface{}) bool { + _, ok := i.(*url.URL) + return ok +} + +func (v *Value) ValueURL() (vv *url.URL, ok bool) { + if v == nil { + return + } + vv, ok = v.v.(*url.URL) + return +} diff --git a/pkg/value/value.go b/pkg/value/value.go new file mode 100644 index 000000000..79cea6561 --- /dev/null +++ b/pkg/value/value.go @@ -0,0 +1,82 @@ +package value + +import ( + "encoding/json" +) + +type Value struct { + p TypePropertyMap + v interface{} + t Type +} + +func (v *Value) IsEmpty() bool { + return v == nil || v.t == TypeUnknown || v.v == nil +} + +func (v *Value) Clone() *Value { + if v.IsEmpty() { + return nil + } + return v.t.ValueFrom(v.v, v.p) +} + +func (v *Value) Value() interface{} { + if v == nil { + return nil + } + return v.v +} + +func (v *Value) Type() Type { + if v == nil { + return TypeUnknown + } + return v.t +} + +func (v *Value) TypeProperty() (tp TypeProperty) { + if v.IsEmpty() { + return + } + if v.p != nil { + if tp, ok := v.p[v.t]; ok { + return tp + } + } + if tp, ok := defaultTypes[v.t]; ok { + return tp + } + return +} + +// Interface converts the value into generic representation +func (v *Value) Interface() interface{} { + if v == nil || v.t == TypeUnknown { + return nil + } + + if tp := v.TypeProperty(); tp != nil { + if i, ok2 := tp.V2I(v.v); ok2 { + return i + } + } + + return nil +} + +func (v *Value) Validate() bool { + if v == nil || v.t == TypeUnknown { + return false + } + + if tp := v.TypeProperty(); tp != nil { + return tp.Validate(v) + } + + return false +} + +func (v *Value) MarshalJSON() ([]byte, error) { + return json.Marshal(v.Interface()) +} diff --git a/pkg/value/value_test.go b/pkg/value/value_test.go new file mode 100644 index 000000000..b71a78fb6 --- /dev/null +++ b/pkg/value/value_test.go @@ -0,0 +1,264 @@ +package value + +import ( + "net/url" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestValue_IsEmpty(t *testing.T) { + tests := []struct { + name string + value *Value + want bool + }{ + { + name: "empty", + want: true, + }, + { + name: "nil", + want: true, + }, + { + name: "non-empty", + value: &Value{ + t: Type("hoge"), + v: "foo", + }, + want: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.IsEmpty()) + }) + } +} + +func TestValue_Clone(t *testing.T) { + tp := &tpmock{} + tpm := TypePropertyMap{ + Type("hoge"): tp, + } + + tests := []struct { + name string + value *Value + want *Value + }{ + { + name: "ok", + value: &Value{ + t: TypeString, + v: "foo", + }, + want: &Value{ + t: TypeString, + v: "foo", + }, + }, + { + name: "custom type property", + value: &Value{ + t: Type("hoge"), + v: "foo", + p: tpm, + }, + want: &Value{ + t: Type("hoge"), + v: "fooa", + p: tpm, + }, + }, + { + name: "nil", + value: nil, + want: nil, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Clone()) + }) + } +} + +func TestValue_Value(t *testing.T) { + u, _ := url.Parse("https://reearth.io") + tests := []struct { + name string + value *Value + want interface{} + }{ + { + name: "ok", + value: &Value{t: TypeURL, v: u}, + want: u, + }, + { + name: "empty", + value: &Value{}, + }, + { + name: "nil", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.want == nil { + assert.Nil(t, tt.value.Value()) + } else { + assert.Same(t, tt.want, tt.value.Value()) + } + }) + } +} + +func TestValue_Type(t *testing.T) { + tests := []struct { + name string + value *Value + want Type + }{ + { + name: "ok", + value: &Value{t: TypeString}, + want: TypeString, + }, + { + name: "empty", + value: &Value{}, + want: TypeUnknown, + }, + { + name: "nil", + want: TypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Type()) + }) + } +} + +func TestValue_TypeProperty(t *testing.T) { + tp := &tpmock{} + tpm := TypePropertyMap{ + Type("hoge"): tp, + } + + tests := []struct { + name string + value *Value + want TypeProperty + }{ + { + name: "default type", + value: &Value{ + v: "string", + t: TypeString, + }, + want: defaultTypes[TypeString], + }, + { + name: "custom type", + value: &Value{ + v: "string", + t: Type("hoge"), + p: tpm, + }, + want: tp, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + { + name: "nil", + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.value.TypeProperty() + if tt.want == nil { + assert.Nil(t, res) + } else { + assert.Same(t, tt.want, res) + } + }) + } +} + +func TestValue_Interface(t *testing.T) { + tp := &tpmock{} + tpm := TypePropertyMap{ + "foo": tp, + } + + tests := []struct { + name string + value *Value + want interface{} + }{ + { + name: "string", + value: &Value{t: TypeString, v: "hoge"}, + want: "hoge", + }, + { + name: "latlng", + value: &Value{t: TypeLatLng, v: LatLng{Lat: 1, Lng: 2}}, + want: LatLng{Lat: 1, Lng: 2}, + }, + { + name: "custom", + value: &Value{ + p: tpm, + t: Type("foo"), + v: "foo", + }, + want: "foobar", + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + { + name: "nil", + value: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.value.Interface()) + }) + } +} diff --git a/schema.graphql b/schema.graphql index 567207e64..746cc817a 100644 --- a/schema.graphql +++ b/schema.graphql @@ -524,7 +524,7 @@ type PropertyGroupList { } type PropertyField { - id: PropertySchemaFieldID! + id: String! parentId: ID! schemaId: PropertySchemaID! fieldId: PropertySchemaFieldID! From 2ddbc8b2dcbd10484f9febc8308aa1b5f82a5082 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 6 Dec 2021 17:03:49 +0900 Subject: [PATCH 117/253] fix: numbers are not decoded from gql to value --- .../gql/gqlmodel/convert_property_test.go | 44 +++++++++++++ .../adapter/gql/gqlmodel/convert_value.go | 63 ++++++++++++++----- .../gql/gqlmodel/convert_value_test.go | 13 ++++ 3 files changed, 106 insertions(+), 14 deletions(-) create mode 100644 internal/adapter/gql/gqlmodel/convert_property_test.go create mode 100644 internal/adapter/gql/gqlmodel/convert_value_test.go diff --git a/internal/adapter/gql/gqlmodel/convert_property_test.go b/internal/adapter/gql/gqlmodel/convert_property_test.go new file mode 100644 index 000000000..400b1e358 --- /dev/null +++ b/internal/adapter/gql/gqlmodel/convert_property_test.go @@ -0,0 +1,44 @@ +package gqlmodel + +import ( + "encoding/json" + "testing" + + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestFromPropertyValueAndType(t *testing.T) { + type args struct { + v interface{} + t ValueType + } + tests := []struct { + name string + args args + want *property.Value + }{ + { + name: "number", + args: args{ + v: 1.1, + t: ValueTypeNumber, + }, + want: property.ValueTypeNumber.ValueFrom(1.1), + }, + { + name: "json number", + args: args{ + v: json.Number("1.1"), + t: ValueTypeNumber, + }, + want: property.ValueTypeNumber.ValueFrom(1.1), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, FromPropertyValueAndType(tt.args.v, tt.args.t)) + }) + } +} diff --git a/internal/adapter/gql/gqlmodel/convert_value.go b/internal/adapter/gql/gqlmodel/convert_value.go index 3ebe8202a..bf2ed7c54 100644 --- a/internal/adapter/gql/gqlmodel/convert_value.go +++ b/internal/adapter/gql/gqlmodel/convert_value.go @@ -82,31 +82,29 @@ func gqlValueToValueInterface(v interface{}) interface{} { return nil } switch v2 := v.(type) { - case bool: - return v2 - case float64: - return v2 - case string: - return v2 - case *url.URL: - return v2 case LatLng: return value.LatLng{ Lat: v2.Lat, Lng: v2.Lng, } + case *LatLng: + if v2 == nil { + return nil + } + return value.LatLng{ + Lat: v2.Lat, + Lng: v2.Lng, + } case LatLngHeight: return value.LatLngHeight{ Lat: v2.Lat, Lng: v2.Lng, Height: v2.Height, } - case *LatLng: - return value.LatLng{ - Lat: v2.Lat, - Lng: v2.Lng, - } case *LatLngHeight: + if v2 == nil { + return nil + } return value.LatLngHeight{ Lat: v2.Lat, Lng: v2.Lng, @@ -122,6 +120,19 @@ func gqlValueToValueInterface(v interface{}) interface{} { }) } return value.Coordinates(res) + case []*LatLngHeight: + res := make([]value.LatLngHeight, 0, len(v2)) + for _, c := range v2 { + if c == nil { + continue + } + res = append(res, value.LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + return value.Coordinates(res) case [][]LatLngHeight: res := make([]value.Coordinates, 0, len(v2)) for _, d := range v2 { @@ -136,6 +147,30 @@ func gqlValueToValueInterface(v interface{}) interface{} { res = append(res, coord) } return value.Polygon(res) + case [][]*LatLngHeight: + res := make([]value.Coordinates, 0, len(v2)) + for _, d := range v2 { + coord := make([]value.LatLngHeight, 0, len(d)) + for _, c := range d { + if c == nil { + continue + } + coord = append(coord, value.LatLngHeight{ + Lat: c.Lat, + Lng: c.Lng, + Height: c.Height, + }) + } + res = append(res, coord) + } + return value.Polygon(res) + case Rect: + return value.Rect{ + West: v2.West, + East: v2.East, + North: v2.North, + South: v2.South, + } case *Rect: return value.Rect{ West: v2.West, @@ -144,7 +179,7 @@ func gqlValueToValueInterface(v interface{}) interface{} { South: v2.South, } } - return nil + return v } func ToValueType(t value.Type) ValueType { diff --git a/internal/adapter/gql/gqlmodel/convert_value_test.go b/internal/adapter/gql/gqlmodel/convert_value_test.go new file mode 100644 index 000000000..fac2f6098 --- /dev/null +++ b/internal/adapter/gql/gqlmodel/convert_value_test.go @@ -0,0 +1,13 @@ +package gqlmodel + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/value" + "github.com/stretchr/testify/assert" +) + +func Test_FromValueType(t *testing.T) { + assert.Equal(t, value.TypeString, FromValueType(ValueTypeString)) + assert.Equal(t, value.TypeNumber, FromValueType(ValueTypeNumber)) +} From 91f9b39b27e3fd52af090bac3bbc6230041c5c47 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 13 Dec 2021 16:10:12 +0900 Subject: [PATCH 118/253] docs: add pkg.go.dev badge to readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e4cce25ad..79ea7382d 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # reearth-backend -[![main](https://github.com/reearth/reearth-backend/actions/workflows/main.yml/badge.svg)](https://github.com/reearth/reearth-backend/actions/workflows/main.yml) [![codecov](https://codecov.io/gh/reearth/reearth-backend/branch/main/graph/badge.svg?token=4UV79645UP)](https://codecov.io/gh/reearth/reearth-backend) [![Go Report Card](https://goreportcard.com/badge/github.com/reearth/reearth-backend)](https://goreportcard.com/report/github.com/reearth/reearth-backend) +[![main](https://github.com/reearth/reearth-backend/actions/workflows/main.yml/badge.svg)](https://github.com/reearth/reearth-backend/actions/workflows/main.yml) [![codecov](https://codecov.io/gh/reearth/reearth-backend/branch/main/graph/badge.svg?token=4UV79645UP)](https://codecov.io/gh/reearth/reearth-backend) [![Go Report Card](https://goreportcard.com/badge/github.com/reearth/reearth-backend)](https://goreportcard.com/report/github.com/reearth/reearth-backend) [![Go Reference](https://pkg.go.dev/badge/github.com/reearth/reearth-backend.svg)](https://pkg.go.dev/github.com/reearth/reearth-backend) This is the back-end repository of [Re:Earth](https://github.com/reearth/reearth). From 63c5823a173fc3249dcef56d295c1c18358db144 Mon Sep 17 00:00:00 2001 From: Basel Issmail <78056580+issmail-basel@users.noreply.github.com> Date: Wed, 15 Dec 2021 10:49:41 +0200 Subject: [PATCH 119/253] feat: camera limiter (#87) * Edit manifest file * Change default values * Change label names * Fix manifest bug * Edit description * Make ids unique * Make ids unique * Change descriptions * Add modifications Co-authored-by: basel.issmail --- pkg/builtin/manifest.yml | 32 ++++++++++++++++++++++++++++++++ pkg/builtin/manifest_ja.yml | 19 +++++++++++++++++++ 2 files changed, 51 insertions(+) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 3a38d8b30..2a0e238a2 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -78,6 +78,38 @@ extensions: type: string title: Cesium Ion API access token description: Cesium Ion account users may use their personal API keys to be able to use their Cesium Ion assets(tile data, 3D data, etc) with their project. + - id: cameraLimiter + title: Camera Limiter + description: Set the camera limiting box. + fields: + - id: cameraLimitterEnabled + type: bool + title: Enable + defaultValue: false + description: Enable camera limiter. + - id: cameraLimitterShowHelper + type: bool + title: Show Helper + defaultValue: false + description: Display the limiter boundaries. + - id: cameraLimitterTargetArea + type: camera + title: Target max height + description: The base position of the camera movement range. This position is the center point of the limit box in the horizontal and depth directions, and is the maximum height of the movable range. The camera will not be able to zoom out beyond the height specified here. + - id: cameraLimitterTargetWidth + type: number + title: Target width + description: Specifies the width (longitude direction) of the box that represents the limiter boundaries. + min: 5 + defaultValue: 1000000 + suffix: m + - id: cameraLimitterTargetLength + type: number + title: Target length + description: Specifies the depth (latitude direction) of the box that represents the limiter boundaries. + min: 5 + defaultValue: 1000000 + suffix: m - id: tiles title: Tiles description: You may change the look of the Earth by obtaining map tile data and setting it here. diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 4eb66a968..b2a606a3e 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -35,6 +35,25 @@ extensions: ion: title: Cesium Icon APIใ‚ขใ‚ฏใ‚ปใ‚นใƒˆใƒผใ‚ฏใƒณ description: ่‡ช่บซใฎCesium Ionใ‚ขใ‚ซใ‚ฆใƒณใƒˆใ‹ใ‚‰APIใ‚ญใƒผใ‚’็™บ่กŒใ—ใ€ใ“ใ“ใซ่จญๅฎšใ—ใพใ™ใ€‚Cesium Ionใฎใ‚ขใ‚ปใƒƒใƒˆ๏ผˆใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ€3Dใƒ‡ใƒผใ‚ฟใชใฉ๏ผ‰ใฎไฝฟ็”จใŒๅฏ่ƒฝใซใชใ‚‹ใŸใ‚ใ€่จญๅฎšใ‚’ๆŽจๅฅจใ—ใพใ™ใ€‚ + cameraLimiter: + title: ใ‚ซใƒกใƒฉ็ฏ„ๅ›ฒๅˆถ้™ + description: ใ‚ซใƒกใƒฉใฎ็งปๅ‹•ใงใใ‚‹็ฏ„ๅ›ฒใ‚’ๅˆถ้™ใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ + fields: + cameraLimitterEnabled: + title: ๆœ‰ๅŠน + description: ใ‚ซใƒกใƒฉใฎ็ฏ„ๅ›ฒๅˆถ้™ใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚ + cameraLimitterShowHelper: + title: ็ฏ„ๅ›ฒใ‚’่กจ็คบ + description: ใ‚ซใƒกใƒฉใฎ็งปๅ‹•็ฏ„ๅ›ฒใ‚’่กจใ™ใƒœใƒƒใ‚ฏใ‚นใ‚’่กจ็คบใ—ใพใ™ใ€‚ + cameraLimitterTargetArea: + title: ๅŸบๆบ–ไฝ็ฝฎ + description: ็งปๅ‹•็ฏ„ๅ›ฒใฎๅŸบๆบ–ใจใชใ‚‹ไฝ็ฝฎใงใ™ใ€‚ใ“ใฎไฝ็ฝฎใŒๅˆถ้™็ฏ„ๅ›ฒใ‚’่กจใ™ใƒœใƒƒใ‚ฏใ‚นใฎๆจชๆ–นๅ‘ใจๅฅฅ่กŒใๆ–นๅ‘ใฎไธญๅฟƒ็‚นใจใชใ‚Šใ€ใ‹ใค็งปๅ‹•ๅฏ่ƒฝ็ฏ„ๅ›ฒใซใŠใ‘ใ‚‹ๆœ€ๅคงใฎ้ซ˜ใ•ใจใชใ‚Šใพใ™ใ€‚ใ“ใ“ใงๆŒ‡ๅฎšใ—ใŸ้ซ˜ใ•ไปฅไธŠใซใ‚ซใƒกใƒฉใ‚’ใ‚บใƒผใƒ ใ‚ขใ‚ฆใƒˆใ™ใ‚‹ใ“ใจใŒใงใใชใใชใ‚Šใพใ™ใ€‚ + cameraLimitterTargetWidth: + title: ่ปฝๅบฆใฎ็ฏ„ๅ›ฒ + description: ๅˆถ้™็ฏ„ๅ›ฒใ‚’่กจใ™ใƒœใƒƒใ‚ฏใ‚นใฎๅน…๏ผˆ็ตŒๅบฆใฎๆ–นๅ‘๏ผ‰ใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + cameraLimitterTargetLength: + title: ็ทฏๅบฆใฎ็ฏ„ๅ›ฒ + description: ๅˆถ้™็ฏ„ๅ›ฒใ‚’่กจใ™ใƒœใƒƒใ‚ฏใ‚นใฎๅฅฅ่กŒใ๏ผˆ็ทฏๅบฆใฎๆ–นๅ‘๏ผ‰ใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ tiles: title: ใ‚ฟใ‚คใƒซ description: ๆ‰‹ๆŒใกใฎใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ‚’ไฝฟ็”จใ—ใ€ๅœฐ็ƒไธŠใซ่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ From c4fb9a5ad527fe651ef5c6077ac4211778462d07 Mon Sep 17 00:00:00 2001 From: HideBa <49897538+HideBa@users.noreply.github.com> Date: Thu, 16 Dec 2021 23:08:58 -0500 Subject: [PATCH 120/253] fix: layers have their own tags separate from the scene (#90) Co-authored-by: HideBa Co-authored-by: rot1024 --- .vscode/settings.json | 3 - Makefile | 2 +- internal/adapter/gql/generated.go | 1154 ++++++++++++++--- .../adapter/gql/gqlmodel/convert_layer.go | 69 +- internal/adapter/gql/gqlmodel/convert_tag.go | 3 +- internal/adapter/gql/gqlmodel/models_gen.go | 62 +- internal/adapter/gql/loader_layer.go | 18 + internal/adapter/gql/loader_tag.go | 67 - internal/adapter/gql/resolver_layer.go | 86 +- .../adapter/gql/resolver_mutation_layer.go | 26 + internal/adapter/gql/resolver_mutation_tag.go | 22 +- internal/adapter/gql/resolver_tag.go | 55 + internal/infrastructure/memory/layer.go | 6 +- internal/infrastructure/memory/layer_test.go | 7 +- internal/infrastructure/memory/tag.go | 28 +- internal/infrastructure/memory/tag_test.go | 66 +- internal/infrastructure/mongo/layer.go | 38 +- .../infrastructure/mongo/mongodoc/layer.go | 119 +- internal/infrastructure/mongo/mongodoc/tag.go | 13 + .../infrastructure/mongo/mongodoc/tag_test.go | 8 +- internal/infrastructure/mongo/tag.go | 25 +- internal/usecase/interactor/layer.go | 50 +- internal/usecase/interactor/tag.go | 160 +-- internal/usecase/interfaces/layer.go | 3 +- internal/usecase/interfaces/tag.go | 10 +- internal/usecase/repo/tag.go | 4 +- pkg/layer/builder.go | 3 +- pkg/layer/group.go | 22 +- pkg/layer/group_builder.go | 3 +- pkg/layer/group_builder_test.go | 4 +- pkg/layer/group_test.go | 17 +- pkg/layer/item.go | 22 +- pkg/layer/item_builder.go | 3 +- pkg/layer/item_builder_test.go | 4 +- pkg/layer/item_test.go | 36 - pkg/layer/layer.go | 7 +- pkg/layer/tag.go | 226 ++++ pkg/layer/tag_test.go | 1086 ++++++++++++++++ pkg/tag/item.go | 12 + pkg/tag/item_builder.go | 5 + schema.graphql | 66 +- 41 files changed, 2857 insertions(+), 763 deletions(-) create mode 100644 pkg/layer/tag.go create mode 100644 pkg/layer/tag_test.go diff --git a/.vscode/settings.json b/.vscode/settings.json index 87392e61f..21b1151ee 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,9 +1,6 @@ { "editor.formatOnSave": true, "go.lintTool": "golangci-lint", - "go.lintFlags": [ - "--fast" - ], "yaml.format.enable": true, "yaml.completion": true, "yaml.validate": true, diff --git a/Makefile b/Makefile index c27e1a738..6fb6cdeb9 100644 --- a/Makefile +++ b/Makefile @@ -17,7 +17,7 @@ gen: go generate ./... gen/gql: - go generate ./internal/graphql + go generate ./internal/adapter/gql gen/builtin: go generate ./pkg/builtin diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index 869bd4920..1972bd813 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -50,6 +50,8 @@ type ResolverRoot interface { InfoboxField() InfoboxFieldResolver LayerGroup() LayerGroupResolver LayerItem() LayerItemResolver + LayerTagGroup() LayerTagGroupResolver + LayerTagItem() LayerTagItemResolver MergedInfobox() MergedInfoboxResolver MergedInfoboxField() MergedInfoboxFieldResolver MergedLayer() MergedLayerResolver @@ -73,6 +75,7 @@ type ResolverRoot interface { Scene() SceneResolver ScenePlugin() ScenePluginResolver SceneWidget() SceneWidgetResolver + TagGroup() TagGroupResolver TagItem() TagItemResolver Team() TeamResolver TeamMember() TeamMemberResolver @@ -191,7 +194,8 @@ type ComplexityRoot struct { } CreateTagItemPayload struct { - Tag func(childComplexity int) int + Parent func(childComplexity int) int + Tag func(childComplexity int) int } CreateTeamPayload struct { @@ -364,7 +368,6 @@ type ComplexityRoot struct { Scene func(childComplexity int) int SceneID func(childComplexity int) int ScenePlugin func(childComplexity int) int - TagIds func(childComplexity int) int Tags func(childComplexity int) int } @@ -387,10 +390,20 @@ type ComplexityRoot struct { Scene func(childComplexity int) int SceneID func(childComplexity int) int ScenePlugin func(childComplexity int) int - TagIds func(childComplexity int) int Tags func(childComplexity int) int } + LayerTagGroup struct { + Children func(childComplexity int) int + Tag func(childComplexity int) int + TagID func(childComplexity int) int + } + + LayerTagItem struct { + Tag func(childComplexity int) int + TagID func(childComplexity int) int + } + MergedInfobox struct { Fields func(childComplexity int) int Property func(childComplexity int) int @@ -825,7 +838,8 @@ type ComplexityRoot struct { } RemoveTagPayload struct { - TagID func(childComplexity int) int + TagID func(childComplexity int) int + UpdatedLayers func(childComplexity int) int } RemoveWidgetPayload struct { @@ -896,19 +910,25 @@ type ComplexityRoot struct { TagGroup struct { ID func(childComplexity int) int Label func(childComplexity int) int + Layers func(childComplexity int) int + Scene func(childComplexity int) int SceneID func(childComplexity int) int + TagIds func(childComplexity int) int Tags func(childComplexity int) int } TagItem struct { ID func(childComplexity int) int Label func(childComplexity int) int + Layers func(childComplexity int) int LinkedDataset func(childComplexity int) int LinkedDatasetField func(childComplexity int) int LinkedDatasetFieldID func(childComplexity int) int LinkedDatasetID func(childComplexity int) int LinkedDatasetSchema func(childComplexity int) int LinkedDatasetSchemaID func(childComplexity int) int + Parent func(childComplexity int) int + ParentID func(childComplexity int) int SceneID func(childComplexity int) int } @@ -1093,8 +1113,6 @@ type LayerGroupResolver interface { Layers(ctx context.Context, obj *gqlmodel.LayerGroup) ([]gqlmodel.Layer, error) Scene(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Scene, error) ScenePlugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.ScenePlugin, error) - - Tags(ctx context.Context, obj *gqlmodel.LayerGroup) ([]gqlmodel.Tag, error) } type LayerItemResolver interface { Parent(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.LayerGroup, error) @@ -1105,8 +1123,12 @@ type LayerItemResolver interface { Merged(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.MergedLayer, error) Scene(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Scene, error) ScenePlugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.ScenePlugin, error) - - Tags(ctx context.Context, obj *gqlmodel.LayerItem) ([]gqlmodel.Tag, error) +} +type LayerTagGroupResolver interface { + Tag(ctx context.Context, obj *gqlmodel.LayerTagGroup) (gqlmodel.Tag, error) +} +type LayerTagItemResolver interface { + Tag(ctx context.Context, obj *gqlmodel.LayerTagItem) (gqlmodel.Tag, error) } type MergedInfoboxResolver interface { Scene(ctx context.Context, obj *gqlmodel.MergedInfobox) (*gqlmodel.Scene, error) @@ -1305,10 +1327,17 @@ type SceneWidgetResolver interface { Extension(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.PluginExtension, error) Property(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Property, error) } +type TagGroupResolver interface { + Tags(ctx context.Context, obj *gqlmodel.TagGroup) ([]*gqlmodel.TagItem, error) + Scene(ctx context.Context, obj *gqlmodel.TagGroup) (*gqlmodel.Scene, error) + Layers(ctx context.Context, obj *gqlmodel.TagGroup) ([]gqlmodel.Layer, error) +} type TagItemResolver interface { LinkedDatasetSchema(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetSchema, error) LinkedDataset(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.Dataset, error) LinkedDatasetField(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetField, error) + Parent(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.TagGroup, error) + Layers(ctx context.Context, obj *gqlmodel.TagItem) ([]gqlmodel.Layer, error) } type TeamResolver interface { Assets(ctx context.Context, obj *gqlmodel.Team, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) @@ -1666,6 +1695,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.CreateTagGroupPayload.Tag(childComplexity), true + case "CreateTagItemPayload.parent": + if e.complexity.CreateTagItemPayload.Parent == nil { + break + } + + return e.complexity.CreateTagItemPayload.Parent(childComplexity), true + case "CreateTagItemPayload.tag": if e.complexity.CreateTagItemPayload.Tag == nil { break @@ -2420,13 +2456,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.LayerGroup.ScenePlugin(childComplexity), true - case "LayerGroup.tagIds": - if e.complexity.LayerGroup.TagIds == nil { - break - } - - return e.complexity.LayerGroup.TagIds(childComplexity), true - case "LayerGroup.tags": if e.complexity.LayerGroup.Tags == nil { break @@ -2560,19 +2589,47 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.LayerItem.ScenePlugin(childComplexity), true - case "LayerItem.tagIds": - if e.complexity.LayerItem.TagIds == nil { + case "LayerItem.tags": + if e.complexity.LayerItem.Tags == nil { break } - return e.complexity.LayerItem.TagIds(childComplexity), true + return e.complexity.LayerItem.Tags(childComplexity), true - case "LayerItem.tags": - if e.complexity.LayerItem.Tags == nil { + case "LayerTagGroup.children": + if e.complexity.LayerTagGroup.Children == nil { break } - return e.complexity.LayerItem.Tags(childComplexity), true + return e.complexity.LayerTagGroup.Children(childComplexity), true + + case "LayerTagGroup.tag": + if e.complexity.LayerTagGroup.Tag == nil { + break + } + + return e.complexity.LayerTagGroup.Tag(childComplexity), true + + case "LayerTagGroup.tagId": + if e.complexity.LayerTagGroup.TagID == nil { + break + } + + return e.complexity.LayerTagGroup.TagID(childComplexity), true + + case "LayerTagItem.tag": + if e.complexity.LayerTagItem.Tag == nil { + break + } + + return e.complexity.LayerTagItem.Tag(childComplexity), true + + case "LayerTagItem.tagId": + if e.complexity.LayerTagItem.TagID == nil { + break + } + + return e.complexity.LayerTagItem.TagID(childComplexity), true case "MergedInfobox.fields": if e.complexity.MergedInfobox.Fields == nil { @@ -5222,6 +5279,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.RemoveTagPayload.TagID(childComplexity), true + case "RemoveTagPayload.updatedLayers": + if e.complexity.RemoveTagPayload.UpdatedLayers == nil { + break + } + + return e.complexity.RemoveTagPayload.UpdatedLayers(childComplexity), true + case "RemoveWidgetPayload.scene": if e.complexity.RemoveWidgetPayload.Scene == nil { break @@ -5549,6 +5613,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.TagGroup.Label(childComplexity), true + case "TagGroup.layers": + if e.complexity.TagGroup.Layers == nil { + break + } + + return e.complexity.TagGroup.Layers(childComplexity), true + + case "TagGroup.scene": + if e.complexity.TagGroup.Scene == nil { + break + } + + return e.complexity.TagGroup.Scene(childComplexity), true + case "TagGroup.sceneId": if e.complexity.TagGroup.SceneID == nil { break @@ -5556,6 +5634,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.TagGroup.SceneID(childComplexity), true + case "TagGroup.tagIds": + if e.complexity.TagGroup.TagIds == nil { + break + } + + return e.complexity.TagGroup.TagIds(childComplexity), true + case "TagGroup.tags": if e.complexity.TagGroup.Tags == nil { break @@ -5577,6 +5662,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.TagItem.Label(childComplexity), true + case "TagItem.layers": + if e.complexity.TagItem.Layers == nil { + break + } + + return e.complexity.TagItem.Layers(childComplexity), true + case "TagItem.linkedDataset": if e.complexity.TagItem.LinkedDataset == nil { break @@ -5619,6 +5711,20 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.TagItem.LinkedDatasetSchemaID(childComplexity), true + case "TagItem.parent": + if e.complexity.TagItem.Parent == nil { + break + } + + return e.complexity.TagItem.Parent(childComplexity), true + + case "TagItem.parentId": + if e.complexity.TagItem.ParentID == nil { + break + } + + return e.complexity.TagItem.ParentID(childComplexity), true + case "TagItem.sceneId": if e.complexity.TagItem.SceneID == nil { break @@ -6461,7 +6567,6 @@ enum PluginExtensionType { INFOBOX } - type PluginExtension { extensionId: PluginExtensionID! pluginId: PluginID! @@ -6803,12 +6908,9 @@ interface Layer { plugin: Plugin extension: PluginExtension scenePlugin: ScenePlugin - tagIds: [ID!]! - tags: [Tag!]! @goField(forceResolver: true) + tags: [LayerTag!]! } -union Layers = LayerItem | LayerGroup - enum LayerEncodingFormat { KML CZML @@ -6829,6 +6931,7 @@ type LayerItem implements Layer { # parentId will not be always set parentId: ID linkedDatasetId: ID + tags: [LayerTag!]! parent: LayerGroup @goField(forceResolver: true) property: Property @goField(forceResolver: true) plugin: Plugin @goField(forceResolver: true) @@ -6837,8 +6940,6 @@ type LayerItem implements Layer { merged: MergedLayer @goField(forceResolver: true) scene: Scene @goField(forceResolver: true) scenePlugin: ScenePlugin @goField(forceResolver: true) - tagIds: [ID!]! - tags: [Tag!]! @goField(forceResolver: true) } type LayerGroup implements Layer { @@ -6855,6 +6956,7 @@ type LayerGroup implements Layer { linkedDatasetSchemaId: ID root: Boolean! layerIds: [ID!]! + tags: [LayerTag!]! parent: LayerGroup @goField(forceResolver: true) property: Property @goField(forceResolver: true) plugin: Plugin @goField(forceResolver: true) @@ -6863,8 +6965,6 @@ type LayerGroup implements Layer { layers: [Layer]! @goField(forceResolver: true) scene: Scene @goField(forceResolver: true) scenePlugin: ScenePlugin @goField(forceResolver: true) - tagIds: [ID!]! - tags: [Tag!]! @goField(forceResolver: true) } type Infobox { @@ -6899,6 +6999,22 @@ type InfoboxField { scenePlugin: ScenePlugin @goField(forceResolver: true) } +interface LayerTag { + tagId: ID! + tag: Tag +} + +type LayerTagItem implements LayerTag { + tagId: ID! + tag: Tag @goField(forceResolver: true) +} + +type LayerTagGroup implements LayerTag { + tagId: ID! + children: [LayerTagItem!]! + tag: Tag @goField(forceResolver: true) +} + type MergedLayer { originalId: ID! parentId: ID @@ -6933,29 +7049,34 @@ interface Tag { id: ID! sceneId: ID! label: String! + layers: [Layer!]! @goField(forceResolver: true) } type TagItem implements Tag { id: ID! sceneId: ID! label: String! + parentId: ID linkedDatasetID: ID linkedDatasetSchemaID: ID linkedDatasetFieldID: ID linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) linkedDataset: Dataset @goField(forceResolver: true) linkedDatasetField: DatasetField @goField(forceResolver: true) + parent: TagGroup @goField(forceResolver: true) + layers: [Layer!]! @goField(forceResolver: true) } type TagGroup implements Tag { id: ID! sceneId: ID! label: String! - tags: [ID!] + tagIds: [ID!] + tags: [TagItem!]! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + layers: [Layer!]! @goField(forceResolver: true) } -union Tags = TagItem | TagGroup - type Cluster { id: ID! name: String! @@ -7319,6 +7440,7 @@ input AddDatasetSchemaInput { input CreateTagItemInput { sceneId: ID! label: String! + parent: ID linkedDatasetSchemaID: ID linkedDatasetID: ID linkedDatasetField: ID @@ -7578,6 +7700,7 @@ type AddDatasetSchemaPayload { type CreateTagItemPayload { tag: TagItem! + parent: TagGroup } type CreateTagGroupPayload { @@ -7596,16 +7719,17 @@ type UpdateTagPayload { tag: Tag! } -type AttachTagToLayerPayload{ +type AttachTagToLayerPayload { layer: Layer! } -type DetachTagFromLayerPayload{ +type DetachTagFromLayerPayload { layer: Layer! } -type RemoveTagPayload{ +type RemoveTagPayload { tagId: ID! + updatedLayers: [Layer!]! } type AddClusterPayload { @@ -7618,7 +7742,7 @@ type UpdateClusterPayload { cluster: Cluster! } -type RemoveClusterPayload{ +type RemoveClusterPayload { scene: Scene! clusterId: ID! } @@ -7754,7 +7878,9 @@ type Mutation { createScene(input: CreateSceneInput!): CreateScenePayload addWidget(input: AddWidgetInput!): AddWidgetPayload updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload - updateWidgetAlignSystem(input: UpdateWidgetAlignSystemInput!): UpdateWidgetAlignSystemPayload + updateWidgetAlignSystem( + input: UpdateWidgetAlignSystemInput! + ): UpdateWidgetAlignSystemPayload removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload installPlugin(input: InstallPluginInput!): InstallPluginPayload uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload @@ -7777,7 +7903,9 @@ type Mutation { input: RemoveDatasetSchemaInput! ): RemoveDatasetSchemaPayload importDataset(input: ImportDatasetInput!): ImportDatasetPayload - importDatasetFromGoogleSheet(input: ImportDatasetFromGoogleSheetInput!): ImportDatasetPayload + importDatasetFromGoogleSheet( + input: ImportDatasetFromGoogleSheetInput! + ): ImportDatasetPayload addDatasetSchema(input: AddDatasetSchemaInput!): AddDatasetSchemaPayload # Property @@ -7811,8 +7939,12 @@ type Mutation { # Tag createTagItem(input: CreateTagItemInput!): CreateTagItemPayload createTagGroup(input: CreateTagGroupInput!): CreateTagGroupPayload - attachTagItemToGroup(input: AttachTagItemToGroupInput!): AttachTagItemToGroupPayload - detachTagItemFromGroup(input: DetachTagItemFromGroupInput!): DetachTagItemFromGroupPayload + attachTagItemToGroup( + input: AttachTagItemToGroupInput! + ): AttachTagItemToGroupPayload + detachTagItemFromGroup( + input: DetachTagItemFromGroupInput! + ): DetachTagItemFromGroupPayload updateTag(input: UpdateTagInput!): UpdateTagPayload removeTag(input: RemoveTagInput!): RemoveTagPayload } @@ -11236,6 +11368,38 @@ func (ec *executionContext) _CreateTagItemPayload_tag(ctx context.Context, field return ec.marshalNTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItem(ctx, field.Selections, res) } +func (ec *executionContext) _CreateTagItemPayload_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTagItemPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "CreateTagItemPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Parent, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.TagGroup) + fc.Result = res + return ec.marshalOTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) +} + func (ec *executionContext) _CreateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTeamPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -14571,6 +14735,41 @@ func (ec *executionContext) _LayerGroup_layerIds(ctx context.Context, field grap return ec.marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) } +func (ec *executionContext) _LayerGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tags, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.LayerTag) + fc.Result = res + return ec.marshalNLayerTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagแš„(ctx, field.Selections, res) +} + func (ec *executionContext) _LayerGroup_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -14830,76 +15029,6 @@ func (ec *executionContext) _LayerGroup_scenePlugin(ctx context.Context, field g return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_tagIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "LayerGroup", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.TagIds, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.([]*id.ID) - fc.Result = res - return ec.marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) -} - -func (ec *executionContext) _LayerGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "LayerGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.LayerGroup().Tags(rctx, obj) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.([]gqlmodel.Tag) - fc.Result = res - return ec.marshalNTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagแš„(ctx, field.Selections, res) -} - func (ec *executionContext) _LayerItem_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -15232,6 +15361,41 @@ func (ec *executionContext) _LayerItem_linkedDatasetId(ctx context.Context, fiel return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } +func (ec *executionContext) _LayerItem_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Tags, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.LayerTag) + fc.Result = res + return ec.marshalNLayerTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagแš„(ctx, field.Selections, res) +} + func (ec *executionContext) _LayerItem_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -15488,7 +15652,7 @@ func (ec *executionContext) _LayerItem_scenePlugin(ctx context.Context, field gr return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_tagIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerTagGroup_tagId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15496,7 +15660,7 @@ func (ec *executionContext) _LayerItem_tagIds(ctx context.Context, field graphql } }() fc := &graphql.FieldContext{ - Object: "LayerItem", + Object: "LayerTagGroup", Field: field, Args: nil, IsMethod: false, @@ -15506,7 +15670,7 @@ func (ec *executionContext) _LayerItem_tagIds(ctx context.Context, field graphql ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.TagIds, nil + return obj.TagID, nil }) if err != nil { ec.Error(ctx, err) @@ -15518,12 +15682,12 @@ func (ec *executionContext) _LayerItem_tagIds(ctx context.Context, field graphql } return graphql.Null } - res := resTmp.([]*id.ID) + res := resTmp.(id.ID) fc.Result = res - return ec.marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { +func (ec *executionContext) _LayerTagGroup_children(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15531,7 +15695,42 @@ func (ec *executionContext) _LayerItem_tags(ctx context.Context, field graphql.C } }() fc := &graphql.FieldContext{ - Object: "LayerItem", + Object: "LayerTagGroup", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Children, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.LayerTagItem) + fc.Result = res + return ec.marshalNLayerTagItem2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagItemแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerTagGroup_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerTagGroup", Field: field, Args: nil, IsMethod: true, @@ -15541,7 +15740,39 @@ func (ec *executionContext) _LayerItem_tags(ctx context.Context, field graphql.C ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.LayerItem().Tags(rctx, obj) + return ec.resolvers.LayerTagGroup().Tag(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(gqlmodel.Tag) + fc.Result = res + return ec.marshalOTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerTagItem_tagId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerTagItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.TagID, nil }) if err != nil { ec.Error(ctx, err) @@ -15553,9 +15784,41 @@ func (ec *executionContext) _LayerItem_tags(ctx context.Context, field graphql.C } return graphql.Null } - res := resTmp.([]gqlmodel.Tag) + res := resTmp.(id.ID) fc.Result = res - return ec.marshalNTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagแš„(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _LayerTagItem_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "LayerTagItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.LayerTagItem().Tag(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(gqlmodel.Tag) + fc.Result = res + return ec.marshalOTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx, field.Selections, res) } func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { @@ -26721,6 +26984,41 @@ func (ec *executionContext) _RemoveTagPayload_tagId(ctx context.Context, field g return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } +func (ec *executionContext) _RemoveTagPayload_updatedLayers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveTagPayload) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "RemoveTagPayload", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.UpdatedLayers, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx, field.Selections, res) +} + func (ec *executionContext) _RemoveWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveWidgetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -28340,7 +28638,7 @@ func (ec *executionContext) _TagGroup_label(ctx context.Context, field graphql.C return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _TagGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { +func (ec *executionContext) _TagGroup_tagIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -28358,7 +28656,7 @@ func (ec *executionContext) _TagGroup_tags(ctx context.Context, field graphql.Co ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Tags, nil + return obj.TagIds, nil }) if err != nil { ec.Error(ctx, err) @@ -28372,6 +28670,108 @@ func (ec *executionContext) _TagGroup_tags(ctx context.Context, field graphql.Co return ec.marshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) } +func (ec *executionContext) _TagGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagGroup().Tags(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.TagItem) + fc.Result = res + return ec.marshalNTagItem2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItemแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagGroup_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagGroup().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagGroup_layers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagGroup().Layers(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx, field.Selections, res) +} + func (ec *executionContext) _TagItem_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -28477,6 +28877,38 @@ func (ec *executionContext) _TagItem_label(ctx context.Context, field graphql.Co return ec.marshalNString2string(ctx, field.Selections, res) } +func (ec *executionContext) _TagItem_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagItem", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ParentID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*id.ID) + fc.Result = res + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) +} + func (ec *executionContext) _TagItem_linkedDatasetID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -28669,6 +29101,73 @@ func (ec *executionContext) _TagItem_linkedDatasetField(ctx context.Context, fie return ec.marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx, field.Selections, res) } +func (ec *executionContext) _TagItem_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagItem().Parent(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.TagGroup) + fc.Result = res + return ec.marshalOTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) +} + +func (ec *executionContext) _TagItem_layers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "TagItem", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.TagItem().Layers(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]gqlmodel.Layer) + fc.Result = res + return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx, field.Selections, res) +} + func (ec *executionContext) _Team_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -32713,6 +33212,14 @@ func (ec *executionContext) unmarshalInputCreateTagItemInput(ctx context.Context if err != nil { return it, err } + case "parent": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("parent")) + it.Parent, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + if err != nil { + return it, err + } case "linkedDatasetSchemaID": var err error @@ -34698,24 +35205,24 @@ func (ec *executionContext) _Layer(ctx context.Context, sel ast.SelectionSet, ob } } -func (ec *executionContext) _Layers(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.Layers) graphql.Marshaler { +func (ec *executionContext) _LayerTag(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.LayerTag) graphql.Marshaler { switch obj := (obj).(type) { case nil: return graphql.Null - case gqlmodel.LayerItem: - return ec._LayerItem(ctx, sel, &obj) - case *gqlmodel.LayerItem: + case gqlmodel.LayerTagItem: + return ec._LayerTagItem(ctx, sel, &obj) + case *gqlmodel.LayerTagItem: if obj == nil { return graphql.Null } - return ec._LayerItem(ctx, sel, obj) - case gqlmodel.LayerGroup: - return ec._LayerGroup(ctx, sel, &obj) - case *gqlmodel.LayerGroup: + return ec._LayerTagItem(ctx, sel, obj) + case gqlmodel.LayerTagGroup: + return ec._LayerTagGroup(ctx, sel, &obj) + case *gqlmodel.LayerTagGroup: if obj == nil { return graphql.Null } - return ec._LayerGroup(ctx, sel, obj) + return ec._LayerTagGroup(ctx, sel, obj) default: panic(fmt.Errorf("unexpected type %T", obj)) } @@ -34839,29 +35346,6 @@ func (ec *executionContext) _Tag(ctx context.Context, sel ast.SelectionSet, obj } } -func (ec *executionContext) _Tags(ctx context.Context, sel ast.SelectionSet, obj gqlmodel.Tags) graphql.Marshaler { - switch obj := (obj).(type) { - case nil: - return graphql.Null - case gqlmodel.TagItem: - return ec._TagItem(ctx, sel, &obj) - case *gqlmodel.TagItem: - if obj == nil { - return graphql.Null - } - return ec._TagItem(ctx, sel, obj) - case gqlmodel.TagGroup: - return ec._TagGroup(ctx, sel, &obj) - case *gqlmodel.TagGroup: - if obj == nil { - return graphql.Null - } - return ec._TagGroup(ctx, sel, obj) - default: - panic(fmt.Errorf("unexpected type %T", obj)) - } -} - // endregion ************************** interface.gotpl *************************** // region **************************** object.gotpl **************************** @@ -35542,6 +36026,8 @@ func (ec *executionContext) _CreateTagItemPayload(ctx context.Context, sel ast.S if out.Values[i] == graphql.Null { invalids++ } + case "parent": + out.Values[i] = ec._CreateTagItemPayload_parent(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -36577,7 +37063,7 @@ func (ec *executionContext) _LatLngHeight(ctx context.Context, sel ast.Selection return out } -var layerGroupImplementors = []string{"LayerGroup", "Layers", "Layer"} +var layerGroupImplementors = []string{"LayerGroup", "Layer"} func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerGroup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, layerGroupImplementors) @@ -36630,6 +37116,11 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "tags": + out.Values[i] = ec._LayerGroup_tags(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } case "parent": field := field out.Concurrently(i, func() (res graphql.Marshaler) { @@ -36721,25 +37212,6 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe res = ec._LayerGroup_scenePlugin(ctx, field, obj) return res }) - case "tagIds": - out.Values[i] = ec._LayerGroup_tagIds(ctx, field, obj) - if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) - } - case "tags": - field := field - out.Concurrently(i, func() (res graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - } - }() - res = ec._LayerGroup_tags(ctx, field, obj) - if res == graphql.Null { - atomic.AddUint32(&invalids, 1) - } - return res - }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -36751,7 +37223,7 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe return out } -var layerItemImplementors = []string{"LayerItem", "Layers", "Layer"} +var layerItemImplementors = []string{"LayerItem", "Layer"} func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerItem) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, layerItemImplementors) @@ -36794,6 +37266,11 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet out.Values[i] = ec._LayerItem_parentId(ctx, field, obj) case "linkedDatasetId": out.Values[i] = ec._LayerItem_linkedDatasetId(ctx, field, obj) + case "tags": + out.Values[i] = ec._LayerItem_tags(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } case "parent": field := field out.Concurrently(i, func() (res graphql.Marshaler) { @@ -36882,12 +37359,39 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet res = ec._LayerItem_scenePlugin(ctx, field, obj) return res }) - case "tagIds": - out.Values[i] = ec._LayerItem_tagIds(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var layerTagGroupImplementors = []string{"LayerTagGroup", "LayerTag"} + +func (ec *executionContext) _LayerTagGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerTagGroup) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, layerTagGroupImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("LayerTagGroup") + case "tagId": + out.Values[i] = ec._LayerTagGroup_tagId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } - case "tags": + case "children": + out.Values[i] = ec._LayerTagGroup_children(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "tag": field := field out.Concurrently(i, func() (res graphql.Marshaler) { defer func() { @@ -36895,10 +37399,45 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet ec.Error(ctx, ec.Recover(ctx, r)) } }() - res = ec._LayerItem_tags(ctx, field, obj) - if res == graphql.Null { - atomic.AddUint32(&invalids, 1) - } + res = ec._LayerTagGroup_tag(ctx, field, obj) + return res + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var layerTagItemImplementors = []string{"LayerTagItem", "LayerTag"} + +func (ec *executionContext) _LayerTagItem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerTagItem) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, layerTagItemImplementors) + + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("LayerTagItem") + case "tagId": + out.Values[i] = ec._LayerTagItem_tagId(ctx, field, obj) + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "tag": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._LayerTagItem_tag(ctx, field, obj) return res }) default: @@ -39523,6 +40062,11 @@ func (ec *executionContext) _RemoveTagPayload(ctx context.Context, sel ast.Selec if out.Values[i] == graphql.Null { invalids++ } + case "updatedLayers": + out.Values[i] = ec._RemoveTagPayload_updatedLayers(ctx, field, obj) + if out.Values[i] == graphql.Null { + invalids++ + } default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -39983,7 +40527,7 @@ func (ec *executionContext) _SyncDatasetPayload(ctx context.Context, sel ast.Sel return out } -var tagGroupImplementors = []string{"TagGroup", "Tag", "Tags"} +var tagGroupImplementors = []string{"TagGroup", "Tag"} func (ec *executionContext) _TagGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.TagGroup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, tagGroupImplementors) @@ -39997,20 +40541,59 @@ func (ec *executionContext) _TagGroup(ctx context.Context, sel ast.SelectionSet, case "id": out.Values[i] = ec._TagGroup_id(ctx, field, obj) if out.Values[i] == graphql.Null { - invalids++ + atomic.AddUint32(&invalids, 1) } case "sceneId": out.Values[i] = ec._TagGroup_sceneId(ctx, field, obj) if out.Values[i] == graphql.Null { - invalids++ + atomic.AddUint32(&invalids, 1) } case "label": out.Values[i] = ec._TagGroup_label(ctx, field, obj) if out.Values[i] == graphql.Null { - invalids++ + atomic.AddUint32(&invalids, 1) } + case "tagIds": + out.Values[i] = ec._TagGroup_tagIds(ctx, field, obj) case "tags": - out.Values[i] = ec._TagGroup_tags(ctx, field, obj) + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagGroup_tags(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) + case "scene": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagGroup_scene(ctx, field, obj) + return res + }) + case "layers": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagGroup_layers(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -40022,7 +40605,7 @@ func (ec *executionContext) _TagGroup(ctx context.Context, sel ast.SelectionSet, return out } -var tagItemImplementors = []string{"TagItem", "Tag", "Tags"} +var tagItemImplementors = []string{"TagItem", "Tag"} func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.TagItem) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, tagItemImplementors) @@ -40048,6 +40631,8 @@ func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "parentId": + out.Values[i] = ec._TagItem_parentId(ctx, field, obj) case "linkedDatasetID": out.Values[i] = ec._TagItem_linkedDatasetID(ctx, field, obj) case "linkedDatasetSchemaID": @@ -40087,6 +40672,31 @@ func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, res = ec._TagItem_linkedDatasetField(ctx, field, obj) return res }) + case "parent": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagItem_parent(ctx, field, obj) + return res + }) + case "layers": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._TagItem_layers(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -42262,6 +42872,114 @@ func (ec *executionContext) marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reeart return ec._LayerItem(ctx, sel, v) } +func (ec *executionContext) marshalNLayerTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTag(ctx context.Context, sel ast.SelectionSet, v gqlmodel.LayerTag) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._LayerTag(ctx, sel, v) +} + +func (ec *executionContext) marshalNLayerTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.LayerTag) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNLayerTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTag(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNLayerTagItem2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagItemแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.LayerTagItem) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNLayerTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagItem(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalNLayerTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagItem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerTagItem) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._LayerTagItem(ctx, sel, v) +} + func (ec *executionContext) unmarshalNLinkDatasetToPropertyValueInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLinkDatasetToPropertyValueInput(ctx context.Context, v interface{}) (gqlmodel.LinkDatasetToPropertyValueInput, error) { res, err := ec.unmarshalInputLinkDatasetToPropertyValueInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -43705,6 +44423,50 @@ func (ec *executionContext) marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearth return ec._TagGroup(ctx, sel, v) } +func (ec *executionContext) marshalNTagItem2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItemแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.TagItem) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItem(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + func (ec *executionContext) marshalNTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TagItem) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { @@ -45248,6 +46010,20 @@ func (ec *executionContext) marshalOSyncDatasetPayload2แš–githubแš—comแš‹reearth return ec._SyncDatasetPayload(ctx, sel, v) } +func (ec *executionContext) marshalOTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Tag) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Tag(ctx, sel, v) +} + +func (ec *executionContext) marshalOTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TagGroup) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._TagGroup(ctx, sel, v) +} + func (ec *executionContext) marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Team) graphql.Marshaler { if v == nil { return graphql.Null diff --git a/internal/adapter/gql/gqlmodel/convert_layer.go b/internal/adapter/gql/gqlmodel/convert_layer.go index 9bc9be1ac..8863d2bd0 100644 --- a/internal/adapter/gql/gqlmodel/convert_layer.go +++ b/internal/adapter/gql/gqlmodel/convert_layer.go @@ -11,12 +11,6 @@ func ToLayerItem(l *layer.Item, parent *id.LayerID) *LayerItem { return nil } - tags := l.Tags().Tags() - tagIDs := make([]*id.ID, 0, len(tags)) - for _, tid := range tags { - tagIDs = append(tagIDs, tid.IDRef()) - } - return &LayerItem{ ID: l.ID().ID(), SceneID: l.Scene().ID(), @@ -28,7 +22,7 @@ func ToLayerItem(l *layer.Item, parent *id.LayerID) *LayerItem { Infobox: ToInfobox(l.Infobox(), l.ID(), l.Scene(), l.LinkedDataset()), LinkedDatasetID: l.LinkedDataset().IDRef(), ParentID: parent.IDRef(), - TagIds: tagIDs, + Tags: ToLayerTagList(l.Tags(), l.Scene()), } } @@ -43,12 +37,6 @@ func ToLayerGroup(l *layer.Group, parent *id.LayerID) *LayerGroup { layers = append(layers, lay.IDRef()) } - tags := l.Tags().Tags() - tagIDs := make([]*id.ID, 0, len(tags)) - for _, tid := range tags { - tagIDs = append(tagIDs, tid.IDRef()) - } - return &LayerGroup{ ID: l.ID().ID(), SceneID: l.Scene().ID(), @@ -62,7 +50,7 @@ func ToLayerGroup(l *layer.Group, parent *id.LayerID) *LayerGroup { LayerIds: layers, Root: l.IsRoot(), ParentID: parent.IDRef(), - TagIds: tagIDs, + Tags: ToLayerTagList(l.Tags(), l.Scene()), } } @@ -188,3 +176,56 @@ func FromLayerEncodingFormat(v LayerEncodingFormat) decoding.LayerEncodingFormat return decoding.LayerEncodingFormat("") } + +func ToLayerTagList(t *layer.TagList, sid id.SceneID) []LayerTag { + if t.IsEmpty() { + return nil + } + tags := t.Tags() + gtags := make([]LayerTag, 0, len(tags)) + for _, t := range tags { + if gt := ToLayerTag(t); gt != nil { + gtags = append(gtags, gt) + } + } + return gtags +} + +func ToLayerTag(l layer.Tag) LayerTag { + if l == nil { + return nil + } + if tg := layer.TagGroupFrom(l); tg != nil { + return ToLayerTagGroup(tg) + } + if ti := layer.TagItemFrom(l); ti != nil { + return ToLayerTagItem(ti) + } + return nil +} + +func ToLayerTagItem(t *layer.TagItem) *LayerTagItem { + if t == nil { + return nil + } + return &LayerTagItem{ + TagID: t.ID().ID(), + } +} + +func ToLayerTagGroup(t *layer.TagGroup) *LayerTagGroup { + if t == nil { + return nil + } + children := t.Children() + tags := make([]*LayerTagItem, 0, len(children)) + for _, c := range children { + if t := ToLayerTagItem(c); t != nil { + tags = append(tags, t) + } + } + return &LayerTagGroup{ + TagID: t.ID().ID(), + Children: tags, + } +} diff --git a/internal/adapter/gql/gqlmodel/convert_tag.go b/internal/adapter/gql/gqlmodel/convert_tag.go index 77ea5103c..34de71a7b 100644 --- a/internal/adapter/gql/gqlmodel/convert_tag.go +++ b/internal/adapter/gql/gqlmodel/convert_tag.go @@ -13,6 +13,7 @@ func ToTagItem(ti *tag.Item) *TagItem { ID: ti.ID().ID(), SceneID: ti.Scene().ID(), Label: ti.Label(), + ParentID: ti.Parent().IDRef(), LinkedDatasetID: ti.LinkedDatasetID().IDRef(), LinkedDatasetSchemaID: ti.LinkedDatasetSchemaID().IDRef(), LinkedDatasetFieldID: ti.LinkedDatasetFieldID().IDRef(), @@ -34,7 +35,7 @@ func ToTagGroup(tg *tag.Group) *TagGroup { ID: tg.ID().ID(), SceneID: tg.Scene().ID(), Label: tg.Label(), - Tags: ids, + TagIds: ids, } } diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index 3ddeb1189..47e646975 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -19,8 +19,8 @@ type Layer interface { IsLayer() } -type Layers interface { - IsLayers() +type LayerTag interface { + IsLayerTag() } type Node interface { @@ -35,10 +35,6 @@ type Tag interface { IsTag() } -type Tags interface { - IsTags() -} - type AddClusterInput struct { SceneID id.ID `json:"sceneId"` Name string `json:"name"` @@ -262,13 +258,15 @@ type CreateTagGroupPayload struct { type CreateTagItemInput struct { SceneID id.ID `json:"sceneId"` Label string `json:"label"` + Parent *id.ID `json:"parent"` LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaID"` LinkedDatasetID *id.ID `json:"linkedDatasetID"` LinkedDatasetField *id.ID `json:"linkedDatasetField"` } type CreateTagItemPayload struct { - Tag *TagItem `json:"tag"` + Tag *TagItem `json:"tag"` + Parent *TagGroup `json:"parent"` } type CreateTeamInput struct { @@ -490,6 +488,7 @@ type LayerGroup struct { LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaId"` Root bool `json:"root"` LayerIds []*id.ID `json:"layerIds"` + Tags []LayerTag `json:"tags"` Parent *LayerGroup `json:"parent"` Property *Property `json:"property"` Plugin *Plugin `json:"plugin"` @@ -498,12 +497,9 @@ type LayerGroup struct { Layers []Layer `json:"layers"` Scene *Scene `json:"scene"` ScenePlugin *ScenePlugin `json:"scenePlugin"` - TagIds []*id.ID `json:"tagIds"` - Tags []Tag `json:"tags"` } -func (LayerGroup) IsLayers() {} -func (LayerGroup) IsLayer() {} +func (LayerGroup) IsLayer() {} type LayerItem struct { ID id.ID `json:"id"` @@ -516,6 +512,7 @@ type LayerItem struct { Infobox *Infobox `json:"infobox"` ParentID *id.ID `json:"parentId"` LinkedDatasetID *id.ID `json:"linkedDatasetId"` + Tags []LayerTag `json:"tags"` Parent *LayerGroup `json:"parent"` Property *Property `json:"property"` Plugin *Plugin `json:"plugin"` @@ -524,12 +521,24 @@ type LayerItem struct { Merged *MergedLayer `json:"merged"` Scene *Scene `json:"scene"` ScenePlugin *ScenePlugin `json:"scenePlugin"` - TagIds []*id.ID `json:"tagIds"` - Tags []Tag `json:"tags"` } -func (LayerItem) IsLayers() {} -func (LayerItem) IsLayer() {} +func (LayerItem) IsLayer() {} + +type LayerTagGroup struct { + TagID id.ID `json:"tagId"` + Children []*LayerTagItem `json:"children"` + Tag Tag `json:"tag"` +} + +func (LayerTagGroup) IsLayerTag() {} + +type LayerTagItem struct { + TagID id.ID `json:"tagId"` + Tag Tag `json:"tag"` +} + +func (LayerTagItem) IsLayerTag() {} type LinkDatasetToPropertyValueInput struct { PropertyID id.ID `json:"propertyId"` @@ -973,7 +982,8 @@ type RemoveTagInput struct { } type RemoveTagPayload struct { - TagID id.ID `json:"tagId"` + TagID id.ID `json:"tagId"` + UpdatedLayers []Layer `json:"updatedLayers"` } type RemoveWidgetInput struct { @@ -1062,29 +1072,33 @@ type SyncDatasetPayload struct { } type TagGroup struct { - ID id.ID `json:"id"` - SceneID id.ID `json:"sceneId"` - Label string `json:"label"` - Tags []*id.ID `json:"tags"` + ID id.ID `json:"id"` + SceneID id.ID `json:"sceneId"` + Label string `json:"label"` + TagIds []*id.ID `json:"tagIds"` + Tags []*TagItem `json:"tags"` + Scene *Scene `json:"scene"` + Layers []Layer `json:"layers"` } -func (TagGroup) IsTag() {} -func (TagGroup) IsTags() {} +func (TagGroup) IsTag() {} type TagItem struct { ID id.ID `json:"id"` SceneID id.ID `json:"sceneId"` Label string `json:"label"` + ParentID *id.ID `json:"parentId"` LinkedDatasetID *id.ID `json:"linkedDatasetID"` LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaID"` LinkedDatasetFieldID *id.ID `json:"linkedDatasetFieldID"` LinkedDatasetSchema *DatasetSchema `json:"linkedDatasetSchema"` LinkedDataset *Dataset `json:"linkedDataset"` LinkedDatasetField *DatasetField `json:"linkedDatasetField"` + Parent *TagGroup `json:"parent"` + Layers []Layer `json:"layers"` } -func (TagItem) IsTag() {} -func (TagItem) IsTags() {} +func (TagItem) IsTag() {} type Team struct { ID id.ID `json:"id"` diff --git a/internal/adapter/gql/loader_layer.go b/internal/adapter/gql/loader_layer.go index 1beec499b..ea9822024 100644 --- a/internal/adapter/gql/loader_layer.go +++ b/internal/adapter/gql/loader_layer.go @@ -100,6 +100,24 @@ func (c *LayerLoader) FetchParentAndMerged(ctx context.Context, org id.LayerID) return gqlmodel.ToMergedLayer(res), nil } +func (c *LayerLoader) FetchByTag(ctx context.Context, tag id.TagID) ([]gqlmodel.Layer, error) { + res, err2 := c.usecase.FetchByTag(ctx, tag, getOperator(ctx)) + if err2 != nil { + return nil, err2 + } + + layers := make([]gqlmodel.Layer, 0, len(res)) + for _, l := range res { + if l == nil { + layers = append(layers, nil) + } else { + layers = append(layers, gqlmodel.ToLayer(*l, nil)) + } + } + + return layers, nil +} + // data loader type LayerDataLoader interface { diff --git a/internal/adapter/gql/loader_tag.go b/internal/adapter/gql/loader_tag.go index ab00b7ec9..fcfd823e1 100644 --- a/internal/adapter/gql/loader_tag.go +++ b/internal/adapter/gql/loader_tag.go @@ -68,73 +68,6 @@ func (c *TagLoader) FetchItem(ctx context.Context, ids []id.TagID) ([]*gqlmodel. return tagItems, nil } -func (c *TagLoader) FetchGroupByLayer(ctx context.Context, lid id.LayerID) ([]*gqlmodel.TagGroup, error) { - res, err := c.usecase.FetchGroupsByLayer(ctx, lid, getOperator(ctx)) - if err != nil { - return nil, err - } - tagGroups := make([]*gqlmodel.TagGroup, 0, len(res)) - for _, t := range res { - tg := gqlmodel.ToTagGroup(t) - if tg != nil { - tagGroups = append(tagGroups, tg) - } - } - - return tagGroups, nil -} - -func (c *TagLoader) FetchItemByLayer(ctx context.Context, lid id.LayerID) ([]*gqlmodel.TagItem, error) { - res, err := c.usecase.FetchItemsByLayer(ctx, lid, getOperator(ctx)) - if err != nil { - return nil, err - } - - tagItems := make([]*gqlmodel.TagItem, 0, len(res)) - for _, t := range res { - ti := gqlmodel.ToTagItem(t) - if ti != nil { - tagItems = append(tagItems, ti) - } - } - - return tagItems, nil -} - -func (c *TagLoader) FetchGroupByScene(ctx context.Context, sid id.SceneID) ([]*gqlmodel.TagGroup, error) { - res, err := c.usecase.FetchGroupsByScene(ctx, sid, getOperator(ctx)) - if err != nil { - return nil, err - } - - tagGroups := make([]*gqlmodel.TagGroup, 0, len(res)) - for _, t := range res { - tg := gqlmodel.ToTagGroup(t) - if tg != nil { - tagGroups = append(tagGroups, tg) - } - } - - return tagGroups, nil -} - -func (c *TagLoader) FetchItemByScene(ctx context.Context, sid id.SceneID) ([]*gqlmodel.TagItem, error) { - res, err := c.usecase.FetchItemsByScene(ctx, sid, getOperator(ctx)) - if err != nil { - return nil, err - } - - tagItems := make([]*gqlmodel.TagItem, 0, len(res)) - for _, t := range res { - ti := gqlmodel.ToTagItem(t) - if ti != nil { - tagItems = append(tagItems, ti) - } - } - - return tagItems, nil -} - // data loaders type TagDataLoader interface { diff --git a/internal/adapter/gql/resolver_layer.go b/internal/adapter/gql/resolver_layer.go index 6c76eda6a..1e6ca0283 100644 --- a/internal/adapter/gql/resolver_layer.go +++ b/internal/adapter/gql/resolver_layer.go @@ -35,6 +35,14 @@ func (r *Resolver) MergedInfoboxField() MergedInfoboxFieldResolver { return &mergedInfoboxFieldResolver{r} } +func (r *Resolver) LayerTagItem() LayerTagItemResolver { + return &layerTagItemResolver{r} +} + +func (r *Resolver) LayerTagGroup() LayerTagGroupResolver { + return &layerTagGroupResolver{r} +} + type infoboxResolver struct{ *Resolver } func (r *infoboxResolver) Property(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Property, error) { @@ -202,35 +210,6 @@ func (r *infoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.In type layerGroupResolver struct{ *Resolver } -func (r *layerGroupResolver) Tags(ctx context.Context, obj *gqlmodel.LayerGroup) ([]gqlmodel.Tag, error) { - exit := trace(ctx) - defer exit() - - ids := make([]id.TagID, 0, len(obj.TagIds)) - for _, tid := range obj.TagIds { - if tid != nil { - ids = append(ids, id.TagID(*tid)) - } - } - - tags, err := DataLoadersFromContext(ctx).Tag.LoadAll(ids) - if len(err) > 0 { - for _, err1 := range err { - if err1 != nil { - return nil, err1 - } - } - } - - res := make([]gqlmodel.Tag, 0, len(tags)) - for _, t := range tags { - if t != nil { - res = append(res, *t) - } - } - return res, nil -} - func (r *layerGroupResolver) Parent(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.LayerGroup, error) { exit := trace(ctx) defer exit() @@ -411,35 +390,6 @@ func (r *layerItemResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Layer return s.Plugin(*obj.PluginID), nil } -func (r *layerItemResolver) Tags(ctx context.Context, obj *gqlmodel.LayerItem) ([]gqlmodel.Tag, error) { - exit := trace(ctx) - defer exit() - - ids := make([]id.TagID, 0, len(obj.TagIds)) - for _, tid := range obj.TagIds { - if tid != nil { - ids = append(ids, id.TagID(*tid)) - } - } - - tags, err := DataLoadersFromContext(ctx).Tag.LoadAll(ids) - if len(err) > 0 { - for _, err1 := range err { - if err1 != nil { - return nil, err1 - } - } - } - - res := make([]gqlmodel.Tag, 0, len(tags)) - for _, t := range tags { - if t != nil { - res = append(res, *t) - } - } - return res, nil -} - type mergedLayerResolver struct{ *Resolver } func (r *mergedLayerResolver) Original(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerItem, error) { @@ -516,28 +466,28 @@ func (r *mergedInfoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmo return s.Plugin(obj.PluginID), nil } -func (r *mutationResolver) AttachTagToLayer(ctx context.Context, input gqlmodel.AttachTagToLayerInput) (*gqlmodel.AttachTagToLayerPayload, error) { +type layerTagItemResolver struct{ *Resolver } + +func (r *layerTagItemResolver) Tag(ctx context.Context, obj *gqlmodel.LayerTagItem) (gqlmodel.Tag, error) { exit := trace(ctx) defer exit() - layer, err := r.usecases.Layer.AttachTag(ctx, id.LayerID(input.LayerID), id.TagID(input.TagID), getOperator(ctx)) + t, err := DataLoadersFromContext(ctx).Tag.Load(id.TagID(obj.TagID)) if err != nil { return nil, err } - return &gqlmodel.AttachTagToLayerPayload{ - Layer: gqlmodel.ToLayer(layer, nil), - }, nil + return *t, nil } -func (r *mutationResolver) DetachTagFromLayer(ctx context.Context, input gqlmodel.DetachTagFromLayerInput) (*gqlmodel.DetachTagFromLayerPayload, error) { +type layerTagGroupResolver struct{ *Resolver } + +func (r *layerTagGroupResolver) Tag(ctx context.Context, obj *gqlmodel.LayerTagGroup) (gqlmodel.Tag, error) { exit := trace(ctx) defer exit() - layer, err := r.usecases.Layer.DetachTag(ctx, id.LayerID(input.LayerID), id.TagID(input.TagID), getOperator(ctx)) + t, err := DataLoadersFromContext(ctx).Tag.Load(id.TagID(obj.TagID)) if err != nil { return nil, err } - return &gqlmodel.DetachTagFromLayerPayload{ - Layer: gqlmodel.ToLayer(layer, nil), - }, nil + return *t, nil } diff --git a/internal/adapter/gql/resolver_mutation_layer.go b/internal/adapter/gql/resolver_mutation_layer.go index 3b1123295..e745793be 100644 --- a/internal/adapter/gql/resolver_mutation_layer.go +++ b/internal/adapter/gql/resolver_mutation_layer.go @@ -214,3 +214,29 @@ func (r *mutationResolver) ImportLayer(ctx context.Context, input gqlmodel.Impor ParentLayer: gqlmodel.ToLayerGroup(l2, nil), }, err } + +func (r *mutationResolver) AttachTagToLayer(ctx context.Context, input gqlmodel.AttachTagToLayerInput) (*gqlmodel.AttachTagToLayerPayload, error) { + exit := trace(ctx) + defer exit() + + layer, err := r.usecases.Layer.AttachTag(ctx, id.LayerID(input.LayerID), id.TagID(input.TagID), getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.AttachTagToLayerPayload{ + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} + +func (r *mutationResolver) DetachTagFromLayer(ctx context.Context, input gqlmodel.DetachTagFromLayerInput) (*gqlmodel.DetachTagFromLayerPayload, error) { + exit := trace(ctx) + defer exit() + + layer, err := r.usecases.Layer.DetachTag(ctx, id.LayerID(input.LayerID), id.TagID(input.TagID), getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.DetachTagFromLayerPayload{ + Layer: gqlmodel.ToLayer(layer, nil), + }, nil +} diff --git a/internal/adapter/gql/resolver_mutation_tag.go b/internal/adapter/gql/resolver_mutation_tag.go index 95cc8bd50..ceffc8459 100644 --- a/internal/adapter/gql/resolver_mutation_tag.go +++ b/internal/adapter/gql/resolver_mutation_tag.go @@ -12,9 +12,10 @@ func (r *mutationResolver) CreateTagItem(ctx context.Context, input gqlmodel.Cre exit := trace(ctx) defer exit() - tag, err := r.usecases.Tag.CreateItem(ctx, interfaces.CreateTagItemParam{ + tag, parent, err := r.usecases.Tag.CreateItem(ctx, interfaces.CreateTagItemParam{ Label: input.Label, SceneID: id.SceneID(input.SceneID), + Parent: id.TagIDFromRefID(input.Parent), LinkedDatasetSchemaID: id.DatasetSchemaIDFromRefID(input.LinkedDatasetSchemaID), LinkedDatasetID: id.DatasetIDFromRefID(input.LinkedDatasetID), LinkedDatasetField: id.DatasetSchemaFieldIDFromRefID(input.LinkedDatasetField), @@ -22,8 +23,10 @@ func (r *mutationResolver) CreateTagItem(ctx context.Context, input gqlmodel.Cre if err != nil { return nil, err } + return &gqlmodel.CreateTagItemPayload{ - Tag: gqlmodel.ToTagItem(tag), + Tag: gqlmodel.ToTagItem(tag), + Parent: gqlmodel.ToTagGroup(parent), }, nil } @@ -97,11 +100,22 @@ func (r *mutationResolver) RemoveTag(ctx context.Context, input gqlmodel.RemoveT exit := trace(ctx) defer exit() - tagID, err := r.usecases.Tag.Remove(ctx, id.TagID(input.TagID), getOperator(ctx)) + tagID, layers, err := r.usecases.Tag.Remove(ctx, id.TagID(input.TagID), getOperator(ctx)) if err != nil { return nil, err } + + updatedLayers := make([]gqlmodel.Layer, 0, len(layers)) + for _, l := range layers { + if l == nil { + updatedLayers = append(updatedLayers, nil) + } else { + updatedLayers = append(updatedLayers, gqlmodel.ToLayer(*l, nil)) + } + } + return &gqlmodel.RemoveTagPayload{ - TagID: tagID.ID(), + TagID: tagID.ID(), + UpdatedLayers: updatedLayers, }, nil } diff --git a/internal/adapter/gql/resolver_tag.go b/internal/adapter/gql/resolver_tag.go index b3bd77e4e..5a378e543 100644 --- a/internal/adapter/gql/resolver_tag.go +++ b/internal/adapter/gql/resolver_tag.go @@ -43,3 +43,58 @@ func (t tagItemResolver) LinkedDatasetField(ctx context.Context, obj *gqlmodel.T ds, err := DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) return ds.Field(*obj.LinkedDatasetFieldID), err } + +func (t tagItemResolver) Parent(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.TagGroup, error) { + exit := trace(ctx) + defer exit() + + if obj.ParentID == nil { + return nil, nil + } + return DataLoadersFromContext(ctx).TagGroup.Load(id.TagID(*obj.ParentID)) +} + +func (tg tagItemResolver) Layers(ctx context.Context, obj *gqlmodel.TagItem) ([]gqlmodel.Layer, error) { + exit := trace(ctx) + defer exit() + + return tg.loaders.Layer.FetchByTag(ctx, id.TagID(obj.ID)) +} + +type tagGroupResolver struct{ *Resolver } + +func (r *Resolver) TagGroup() TagGroupResolver { + return &tagGroupResolver{r} +} + +func (tg tagGroupResolver) Tags(ctx context.Context, obj *gqlmodel.TagGroup) ([]*gqlmodel.TagItem, error) { + exit := trace(ctx) + defer exit() + + tagIds := make([]id.TagID, 0, len(obj.TagIds)) + for _, i := range obj.TagIds { + if i == nil { + continue + } + tagIds = append(tagIds, id.TagID(*i)) + } + tagItems, err := DataLoadersFromContext(ctx).TagItem.LoadAll(tagIds) + if len(err) > 0 && err[0] != nil { + return nil, err[0] + } + return tagItems, nil +} + +func (tg tagGroupResolver) Scene(ctx context.Context, obj *gqlmodel.TagGroup) (*gqlmodel.Scene, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +} + +func (tg tagGroupResolver) Layers(ctx context.Context, obj *gqlmodel.TagGroup) ([]gqlmodel.Layer, error) { + exit := trace(ctx) + defer exit() + + return tg.loaders.Layer.FetchByTag(ctx, id.TagID(obj.ID)) +} diff --git a/internal/infrastructure/memory/layer.go b/internal/infrastructure/memory/layer.go index c410e9271..d59bc308d 100644 --- a/internal/infrastructure/memory/layer.go +++ b/internal/infrastructure/memory/layer.go @@ -261,10 +261,8 @@ func (r *Layer) FindByTag(ctx context.Context, tagID id.TagID, s []id.SceneID) ( defer r.lock.Unlock() var res layer.List for _, layer := range r.data { - for _, tag := range layer.Tags().Tags() { - if tag == tagID { - res = append(res, &layer) - } + if layer.Tags().Has(tagID) { + res = append(res, &layer) } } diff --git a/internal/infrastructure/memory/layer_test.go b/internal/infrastructure/memory/layer_test.go index f1e2936ce..1ca83948f 100644 --- a/internal/infrastructure/memory/layer_test.go +++ b/internal/infrastructure/memory/layer_test.go @@ -15,15 +15,16 @@ func TestLayer_FindByTag(t *testing.T) { sid := id.NewSceneID() sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + tl := layer.NewTagList([]layer.Tag{layer.NewTagGroup(t1.ID(), nil)}) lg := layer.New().NewID().Tags(tl).Scene(sid).Group().MustBuild() + repo := Layer{ data: map[id.LayerID]layer.Layer{ lg.ID(): lg, }, } + out, err := repo.FindByTag(ctx, t1.ID(), sl) assert.NoError(t, err) - l := layer.Layer(lg) - assert.Equal(t, layer.List{&l}, out) + assert.Equal(t, layer.List{lg.LayerRef()}, out) } diff --git a/internal/infrastructure/memory/tag.go b/internal/infrastructure/memory/tag.go index 3b21b5afd..2cbba397d 100644 --- a/internal/infrastructure/memory/tag.go +++ b/internal/infrastructure/memory/tag.go @@ -112,7 +112,7 @@ func (t *Tag) FindGroupByIDs(ctx context.Context, tagIDs []id.TagID, ids []id.Sc return res, nil } -func (t *Tag) FindByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Tag, error) { +func (t *Tag) FindRootsByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Tag, error) { t.lock.Lock() defer t.lock.Unlock() @@ -192,29 +192,3 @@ func (t *Tag) FindGroupByItem(ctx context.Context, tagID id.TagID, s []id.SceneI return nil, rerror.ErrNotFound } - -func (t *Tag) FindGroupByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Group, error) { - t.lock.Lock() - defer t.lock.Unlock() - - var res []*tag.Group - for _, tt := range t.data { - if group := tag.ToTagGroup(tt); tt.Scene() == sceneID && group != nil { - res = append(res, group) - } - } - return res, nil -} - -func (t *Tag) FindItemByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Item, error) { - t.lock.Lock() - defer t.lock.Unlock() - - var res []*tag.Item - for _, tt := range t.data { - if item := tag.ToTagItem(tt); tt.Scene() == sceneID && item != nil { - res = append(res, item) - } - } - return res, nil -} diff --git a/internal/infrastructure/memory/tag_test.go b/internal/infrastructure/memory/tag_test.go index b4bfeac2f..c6d220bc3 100644 --- a/internal/infrastructure/memory/tag_test.go +++ b/internal/infrastructure/memory/tag_test.go @@ -59,7 +59,7 @@ func TestTag_FindByIDs(t *testing.T) { assert.Equal(t, []*tag.Tag{&tti, &ttg}, out) } -func TestTag_FindByScene(t *testing.T) { +func TestTag_FindRootsByScene(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() sid2 := id.NewSceneID() @@ -77,67 +77,11 @@ func TestTag_FindByScene(t *testing.T) { t3.ID(): tti2, }, } - out, err := repo.FindByScene(ctx, sid2) + out, err := repo.FindRootsByScene(ctx, sid2) assert.NoError(t, err) assert.Equal(t, []*tag.Tag{&tti2}, out) } -func TestTag_FindItemByScene(t *testing.T) { - ctx := context.Background() - sid := id.NewSceneID() - sid2 := id.NewSceneID() - t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.NewListFromTags([]id.TagID{t1.ID()}) - t2, _ := tag.NewGroup().NewID().Scene(sid2).Label("group").Tags(tl).Build() - t3, _ := tag.NewItem().NewID().Scene(sid2).Label("item2").Build() - tti := tag.Tag(t1) - tti2 := tag.Tag(t3) - ttg := tag.Tag(t2) - repo := Tag{ - data: map[id.TagID]tag.Tag{ - t1.ID(): tti, - t2.ID(): ttg, - t3.ID(): tti2, - }, - } - out, err := repo.FindItemByScene(ctx, sid2) - assert.NoError(t, err) - assert.Equal(t, 1, len(out)) - assert.Same(t, t3, out[0]) - - out, err = repo.FindItemByScene(ctx, id.SceneID{}) - assert.NoError(t, err) - assert.Equal(t, 0, len(out)) -} - -func TestTag_FindGroupByScene(t *testing.T) { - ctx := context.Background() - sid := id.NewSceneID() - sid2 := id.NewSceneID() - t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.NewListFromTags([]id.TagID{t1.ID()}) - t2, _ := tag.NewGroup().NewID().Scene(sid2).Label("group").Tags(tl).Build() - t3, _ := tag.NewItem().NewID().Scene(sid2).Label("item2").Build() - tti := tag.Tag(t1) - tti2 := tag.Tag(t3) - ttg := tag.Tag(t2) - repo := Tag{ - data: map[id.TagID]tag.Tag{ - t1.ID(): tti, - t2.ID(): ttg, - t3.ID(): tti2, - }, - } - out, err := repo.FindGroupByScene(ctx, sid2) - assert.NoError(t, err) - assert.Equal(t, 1, len(out)) - assert.Same(t, t2, out[0]) - - out, err = repo.FindGroupByScene(ctx, id.SceneID{}) - assert.NoError(t, err) - assert.Equal(t, 0, len(out)) -} - func TestTag_FindGroupByID(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() @@ -276,7 +220,7 @@ func TestTag_Remove(t *testing.T) { } err := repo.Remove(ctx, t1.ID()) assert.NoError(t, err) - out, _ := repo.FindByScene(ctx, sid) + out, _ := repo.FindRootsByScene(ctx, sid) assert.Equal(t, []*tag.Tag{&ttg}, out) } @@ -299,7 +243,7 @@ func TestTag_RemoveAll(t *testing.T) { } err := repo.RemoveAll(ctx, []id.TagID{t1.ID(), t3.ID()}) assert.NoError(t, err) - out, _ := repo.FindByScene(ctx, sid) + out, _ := repo.FindRootsByScene(ctx, sid) assert.Equal(t, []*tag.Tag{&tti2}, out) } @@ -323,7 +267,7 @@ func TestTag_RemoveByScene(t *testing.T) { } err := repo.RemoveByScene(ctx, sid) assert.NoError(t, err) - out, _ := repo.FindByScene(ctx, sid2) + out, _ := repo.FindRootsByScene(ctx, sid2) assert.Equal(t, []*tag.Tag{&tti2}, out) } diff --git a/internal/infrastructure/mongo/layer.go b/internal/infrastructure/mongo/layer.go index 3ef8a8f03..efdf02c9c 100644 --- a/internal/infrastructure/mongo/layer.go +++ b/internal/infrastructure/mongo/layer.go @@ -31,14 +31,14 @@ func (r *layerRepo) init() { } func (r *layerRepo) FindByID(ctx context.Context, id id.LayerID, f []id.SceneID) (layer.Layer, error) { - filter := r.sceneFilter(bson.D{ + filter := r.sceneFilterD(bson.D{ {Key: "id", Value: id.String()}, }, f) return r.findOne(ctx, filter) } func (r *layerRepo) FindByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.List, error) { - filter := r.sceneFilter(bson.D{ + filter := r.sceneFilterD(bson.D{ {Key: "id", Value: bson.D{ {Key: "$in", Value: id.LayerIDToKeys(ids)}, }}, @@ -59,14 +59,14 @@ func (r *layerRepo) FindAllByDatasetSchema(ctx context.Context, dsid id.DatasetS } func (r *layerRepo) FindItemByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Item, error) { - filter := r.sceneFilter(bson.D{ + filter := r.sceneFilterD(bson.D{ {Key: "id", Value: id.String()}, }, f) return r.findItemOne(ctx, filter) } func (r *layerRepo) FindItemByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.ItemList, error) { - filter := r.sceneFilter(bson.D{ + filter := r.sceneFilterD(bson.D{ {Key: "id", Value: bson.D{ {Key: "$in", Value: id.LayerIDToKeys(ids)}, }}, @@ -80,14 +80,14 @@ func (r *layerRepo) FindItemByIDs(ctx context.Context, ids []id.LayerID, f []id. } func (r *layerRepo) FindGroupByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { - filter := r.sceneFilter(bson.D{ + filter := r.sceneFilterD(bson.D{ {Key: "id", Value: id.String()}, }, f) return r.findGroupOne(ctx, filter) } func (r *layerRepo) FindGroupByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.GroupList, error) { - filter := r.sceneFilter(bson.D{ + filter := r.sceneFilterD(bson.D{ {Key: "id", Value: bson.D{ {Key: "$in", Value: id.LayerIDToKeys(ids)}, }}, @@ -109,7 +109,7 @@ func (r *layerRepo) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, } func (r *layerRepo) FindByProperty(ctx context.Context, id id.PropertyID, f []id.SceneID) (layer.Layer, error) { - filter := r.sceneFilter(bson.D{ + filter := r.sceneFilterD(bson.D{ {Key: "$or", Value: []bson.D{ {{Key: "property", Value: id.String()}}, {{Key: "infobox.property", Value: id.String()}}, @@ -120,7 +120,7 @@ func (r *layerRepo) FindByProperty(ctx context.Context, id id.PropertyID, f []id } func (r *layerRepo) FindParentByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { - filter := r.sceneFilter(bson.D{ + filter := r.sceneFilterD(bson.D{ {Key: "group.layers", Value: id.String()}, }, f) return r.findGroupOne(ctx, filter) @@ -170,16 +170,18 @@ func (r *layerRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error func (r *layerRepo) FindByTag(ctx context.Context, tagID id.TagID, f []id.SceneID) (layer.List, error) { ids := []id.TagID{tagID} - filter := r.sceneFilter(bson.D{ - {Key: "tags", Value: bson.D{ - {Key: "$in", Value: id.TagIDToKeys(ids)}, - }}, + tags := id.TagIDToKeys(ids) + filter := r.sceneFilter(bson.M{ + "$or": []bson.M{ + {"tags.id": bson.M{"$in": tags}}, + {"tags.tags.id": bson.M{"$in": tags}}, + }, }, f) return r.find(ctx, nil, filter) } -func (r *layerRepo) find(ctx context.Context, dst layer.List, filter bson.D) (layer.List, error) { +func (r *layerRepo) find(ctx context.Context, dst layer.List, filter interface{}) (layer.List, error) { c := mongodoc.LayerConsumer{ Rows: dst, } @@ -323,7 +325,7 @@ func filterLayerGroups(ids []id.LayerID, rows []*layer.Group) []*layer.Group { return res } -func (*layerRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { +func (*layerRepo) sceneFilterD(filter bson.D, scenes []id.SceneID) bson.D { if scenes == nil { return filter } @@ -333,3 +335,11 @@ func (*layerRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { }) return filter } + +func (*layerRepo) sceneFilter(filter bson.M, scenes []id.SceneID) bson.M { + if scenes == nil { + return filter + } + filter["scene"] = bson.M{"$in": id.SceneIDToKeys(scenes)} + return filter +} diff --git a/internal/infrastructure/mongo/mongodoc/layer.go b/internal/infrastructure/mongo/mongodoc/layer.go index 1c3b0ac88..44b935a98 100644 --- a/internal/infrastructure/mongo/mongodoc/layer.go +++ b/internal/infrastructure/mongo/mongodoc/layer.go @@ -3,12 +3,9 @@ package mongodoc import ( "errors" - "github.com/reearth/reearth-backend/pkg/tag" - - "go.mongodb.org/mongo-driver/bson" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" + "go.mongodb.org/mongo-driver/bson" ) type LayerInfoboxFieldDocument struct { @@ -23,6 +20,14 @@ type LayerInfoboxDocument struct { Fields []LayerInfoboxFieldDocument } +type LayerTagDocument struct { + ID string + Group bool + Tags []LayerTagDocument +} + +type LayerTagListDocument []LayerTagDocument + type LayerItemDocument struct { LinkedDataset *string } @@ -44,7 +49,7 @@ type LayerDocument struct { Infobox *LayerInfoboxDocument Item *LayerItemDocument Group *LayerGroupDocument - Tags []string + Tags LayerTagListDocument } type LayerConsumer struct { @@ -114,11 +119,7 @@ func NewLayer(l layer.Layer) (*LayerDocument, string) { Fields: fields, } } - var tagIDs []string - tags := l.Tags() - for _, tid := range tags.Tags() { - tagIDs = append(tagIDs, tid.String()) - } + id := l.ID().String() return &LayerDocument{ ID: id, @@ -131,7 +132,7 @@ func NewLayer(l layer.Layer) (*LayerDocument, string) { Plugin: l.Plugin().StringRef(), Extension: l.Extension().StringRef(), Property: l.Property().StringRef(), - Tags: tagIDs, + Tags: NewLayerTagList(l.Tags()), }, id } @@ -181,12 +182,6 @@ func (d *LayerDocument) ModelItem() (*layer.Item, error) { return nil, err } - tids, err := id.TagIDsFrom(d.Tags) - if err != nil { - return nil, err - } - tagList := tag.NewListFromTags(tids) - return layer.NewItem(). ID(lid). Name(d.Name). @@ -196,7 +191,7 @@ func (d *LayerDocument) ModelItem() (*layer.Item, error) { Property(id.PropertyIDFromRef(d.Property)). Infobox(ib). Scene(sid). - Tags(tagList). + Tags(d.Tags.Model()). // item LinkedDataset(id.DatasetIDFromRef(d.Item.LinkedDataset)). Build() @@ -225,12 +220,6 @@ func (d *LayerDocument) ModelGroup() (*layer.Group, error) { ids = append(ids, lid) } - tids, err := id.TagIDsFrom(d.Tags) - if err != nil { - return nil, err - } - tagList := tag.NewListFromTags(tids) - return layer.NewGroup(). ID(lid). Name(d.Name). @@ -240,7 +229,7 @@ func (d *LayerDocument) ModelGroup() (*layer.Group, error) { Property(id.PropertyIDFromRef(d.Property)). Infobox(ib). Scene(sid). - Tags(tagList). + Tags(d.Tags.Model()). // group Root(d.Group != nil && d.Group.Root). Layers(layer.NewIDList(ids)). @@ -283,3 +272,83 @@ func ToModelInfobox(ib *LayerInfoboxDocument) (*layer.Infobox, error) { } return layer.NewInfobox(fields, pid), nil } + +func NewLayerTagList(list *layer.TagList) LayerTagListDocument { + if list.IsEmpty() { + return nil + } + + tags := list.Tags() + if len(tags) == 0 { + return nil + } + res := make([]LayerTagDocument, 0, len(tags)) + for _, t := range tags { + if t == nil { + return nil + } + if td := NewLayerTag(t); td != nil { + res = append(res, *td) + } + } + return res +} + +func (d *LayerTagListDocument) Model() *layer.TagList { + if d == nil { + return nil + } + + tags := make([]layer.Tag, 0, len(*d)) + for _, t := range *d { + if ti := t.Model(); ti != nil { + tags = append(tags, ti) + } + } + return layer.NewTagList(tags) +} + +func NewLayerTag(t layer.Tag) *LayerTagDocument { + var group bool + var tags []LayerTagDocument + + if tg := layer.TagGroupFrom(t); tg != nil { + group = true + children := tg.Children() + tags = make([]LayerTagDocument, 0, len(children)) + for _, c := range children { + if ct := NewLayerTag(c); ct != nil { + tags = append(tags, *ct) + } + } + } else if ti := layer.TagItemFrom(t); ti == nil { + return nil + } + return &LayerTagDocument{ + ID: t.ID().String(), + Group: group, + Tags: tags, + } +} + +func (d *LayerTagDocument) Model() layer.Tag { + if d == nil { + return nil + } + + tid := id.TagIDFromRef(&d.ID) + if tid == nil { + return nil + } + + if d.Group { + tags := make([]*layer.TagItem, 0, len(d.Tags)) + for _, t := range d.Tags { + if ti := layer.TagItemFrom(t.Model()); ti != nil { + tags = append(tags, ti) + } + } + return layer.NewTagGroup(*tid, tags) + } + return layer.NewTagItem(*tid) +} diff --git a/internal/infrastructure/mongo/mongodoc/tag.go b/internal/infrastructure/mongo/mongodoc/tag.go index 68ff9cfd2..d8e936f39 100644 --- a/internal/infrastructure/mongo/mongodoc/tag.go +++ b/internal/infrastructure/mongo/mongodoc/tag.go @@ -10,6 +10,7 @@ import ( ) type TagItemDocument struct { + Parent *string LinkedDatasetFieldID *string LinkedDatasetID *string LinkedDatasetSchemaID *string @@ -73,6 +74,7 @@ func NewTag(t tag.Tag) (*TagDocument, string) { if ti := tag.ItemFrom(t); ti != nil { item = &TagItemDocument{ + Parent: ti.Parent().StringRef(), LinkedDatasetFieldID: ti.LinkedDatasetFieldID().StringRef(), LinkedDatasetID: ti.LinkedDatasetID().StringRef(), LinkedDatasetSchemaID: ti.LinkedDatasetSchemaID().StringRef(), @@ -111,6 +113,7 @@ func (d *TagDocument) Model() (*tag.Item, *tag.Group, error) { } return ti, nil, nil } + if d.Group != nil { tg, err := d.ModelGroup() if err != nil { @@ -118,10 +121,15 @@ func (d *TagDocument) Model() (*tag.Item, *tag.Group, error) { } return nil, tg, nil } + return nil, nil, errors.New("invalid tag") } func (d *TagDocument) ModelItem() (*tag.Item, error) { + if d.Item == nil { + return nil, nil + } + tid, err := id.TagIDFrom(d.ID) if err != nil { return nil, err @@ -135,6 +143,7 @@ func (d *TagDocument) ModelItem() (*tag.Item, error) { ID(tid). Label(d.Label). Scene(sid). + Parent(id.TagIDFromRef(d.Item.Parent)). LinkedDatasetSchemaID(id.DatasetSchemaIDFromRef(d.Item.LinkedDatasetSchemaID)). LinkedDatasetID(id.DatasetIDFromRef(d.Item.LinkedDatasetID)). LinkedDatasetFieldID(id.DatasetSchemaFieldIDFromRef(d.Item.LinkedDatasetFieldID)). @@ -142,6 +151,10 @@ func (d *TagDocument) ModelItem() (*tag.Item, error) { } func (d *TagDocument) ModelGroup() (*tag.Group, error) { + if d.Group == nil { + return nil, nil + } + tid, err := id.TagIDFrom(d.ID) if err != nil { return nil, err diff --git a/internal/infrastructure/mongo/mongodoc/tag_test.go b/internal/infrastructure/mongo/mongodoc/tag_test.go index ed9d30920..b4d1869e0 100644 --- a/internal/infrastructure/mongo/mongodoc/tag_test.go +++ b/internal/infrastructure/mongo/mongodoc/tag_test.go @@ -352,7 +352,8 @@ func TestTagDocument_ModelGroup(t *testing.T) { { name: "invalid id", fields: fields{ - ID: "xxx", + ID: "xxx", + Group: &TagGroupDocument{}, }, want: nil, wantErr: true, @@ -362,6 +363,7 @@ func TestTagDocument_ModelGroup(t *testing.T) { fields: fields{ ID: id.NewTagID().String(), Scene: "xxx", + Group: &TagGroupDocument{}, }, want: nil, wantErr: true, @@ -439,7 +441,8 @@ func TestTagDocument_ModelItem(t *testing.T) { { name: "invalid id", fields: fields{ - ID: "xxx", + ID: "xxx", + Item: &TagItemDocument{}, }, want: nil, wantErr: true, @@ -449,6 +452,7 @@ func TestTagDocument_ModelItem(t *testing.T) { fields: fields{ ID: id.NewTagID().String(), Scene: "xxx", + Item: &TagItemDocument{}, }, want: nil, wantErr: true, diff --git a/internal/infrastructure/mongo/tag.go b/internal/infrastructure/mongo/tag.go index 58175649f..f07349aa5 100644 --- a/internal/infrastructure/mongo/tag.go +++ b/internal/infrastructure/mongo/tag.go @@ -93,27 +93,14 @@ func (r *tagRepo) FindGroupByIDs(ctx context.Context, ids []id.TagID, f []id.Sce return filterTagGroups(ids, res), nil } -func (r *tagRepo) FindByScene(ctx context.Context, id id.SceneID) ([]*tag.Tag, error) { - filter := bson.D{ - {Key: "scene", Value: id.String()}, +func (r *tagRepo) FindRootsByScene(ctx context.Context, id id.SceneID) ([]*tag.Tag, error) { + filter := bson.M{ + "scene": id.String(), + "item.parent": nil, } return r.find(ctx, nil, filter) } -func (r *tagRepo) FindGroupByScene(ctx context.Context, id id.SceneID) ([]*tag.Group, error) { - filter := bson.D{ - {Key: "scene", Value: id.String()}, - } - return r.findGroups(ctx, nil, filter) -} - -func (r *tagRepo) FindItemByScene(ctx context.Context, id id.SceneID) ([]*tag.Item, error) { - filter := bson.D{ - {Key: "scene", Value: id.String()}, - } - return r.findItems(ctx, nil, filter) -} - func (r *tagRepo) FindGroupByItem(ctx context.Context, tagID id.TagID, f []id.SceneID) (*tag.Group, error) { ids := []id.TagID{tagID} filter := r.sceneFilter(bson.D{ @@ -160,7 +147,7 @@ func (r *tagRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { return nil } -func (r *tagRepo) find(ctx context.Context, dst []*tag.Tag, filter bson.D) ([]*tag.Tag, error) { +func (r *tagRepo) find(ctx context.Context, dst []*tag.Tag, filter interface{}) ([]*tag.Tag, error) { c := mongodoc.TagConsumer{ Rows: dst, } @@ -170,7 +157,7 @@ func (r *tagRepo) find(ctx context.Context, dst []*tag.Tag, filter bson.D) ([]*t return c.Rows, nil } -func (r *tagRepo) findOne(ctx context.Context, filter bson.D) (tag.Tag, error) { +func (r *tagRepo) findOne(ctx context.Context, filter interface{}) (tag.Tag, error) { c := mongodoc.TagConsumer{} if err := r.client.FindOne(ctx, filter, &c); err != nil { return nil, err diff --git a/internal/usecase/interactor/layer.go b/internal/usecase/interactor/layer.go index e6e62187d..e7f1b3486 100644 --- a/internal/usecase/interactor/layer.go +++ b/internal/usecase/interactor/layer.go @@ -10,6 +10,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/shp" + "github.com/reearth/reearth-backend/pkg/tag" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" @@ -29,6 +30,7 @@ type Layer struct { commonScene commonSceneLock layerRepo repo.Layer + tagRepo repo.Tag pluginRepo repo.Plugin propertyRepo repo.Property propertySchemaRepo repo.PropertySchema @@ -44,6 +46,7 @@ func NewLayer(r *repo.Container) interfaces.Layer { commonScene: commonScene{sceneRepo: r.Scene}, commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, layerRepo: r.Layer, + tagRepo: r.Tag, pluginRepo: r.Plugin, propertyRepo: r.Property, datasetRepo: r.Dataset, @@ -55,7 +58,7 @@ func NewLayer(r *repo.Container) interfaces.Layer { } } -func (i *Layer) Fetch(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*layer.Layer, error) { +func (i *Layer) Fetch(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) (layer.List, error) { scenes, err := i.OnlyReadableScenes(ctx, operator) if err != nil { return nil, err @@ -151,6 +154,14 @@ func (i *Layer) FetchParentAndMerged(ctx context.Context, org id.LayerID, operat return layer.Merge(orgl, parent), nil } +func (i *Layer) FetchByTag(ctx context.Context, tag id.TagID, operator *usecase.Operator) (layer.List, error) { + scenes, err := i.OnlyReadableScenes(ctx, operator) + if err != nil { + return nil, err + } + return i.layerRepo.FindByTag(ctx, tag, scenes) +} + func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, operator *usecase.Operator) (_ *layer.Item, _ *layer.Group, err error) { tx, err := i.transaction.Begin() if err != nil { @@ -1023,21 +1034,36 @@ func (i *Layer) AttachTag(ctx context.Context, layerID id.LayerID, tagID id.TagI return nil, err } - layer, err := i.layerRepo.FindByID(ctx, layerID, scenes) + // ensure the tag exists + t, err := i.tagRepo.FindByID(ctx, tagID, scenes) if err != nil { return nil, err } - if err := layer.AttachTag(tagID); err != nil { - return nil, err - } - err = i.layerRepo.Save(ctx, layer) + l, err := i.layerRepo.FindByID(ctx, layerID, scenes) if err != nil { return nil, err } + updated := false + if tg := tag.ToTagGroup(t); tg != nil { + updated = l.Tags().Add(layer.NewTagGroup(tagID, nil)) + } else if ti := tag.ToTagItem(t); ti != nil { + if p := ti.Parent(); p != nil { + updated = l.Tags().FindGroup(*ti.Parent()).Add(layer.NewTagItem(ti.ID())) + } else { + updated = l.Tags().Add(layer.NewTagItem(ti.ID())) + } + } + + if updated { + if err := i.layerRepo.Save(ctx, l); err != nil { + return nil, err + } + } + tx.Commit() - return layer, nil + return l, nil } func (i *Layer) DetachTag(ctx context.Context, layerID id.LayerID, tagID id.TagID, operator *usecase.Operator) (layer.Layer, error) { @@ -1061,12 +1087,10 @@ func (i *Layer) DetachTag(ctx context.Context, layerID id.LayerID, tagID id.TagI return nil, err } - if err := layer.DetachTag(tagID); err != nil { - return nil, err - } - err = i.layerRepo.Save(ctx, layer) - if err != nil { - return nil, err + if layer.Tags().Delete(tagID) { + if err := i.layerRepo.Save(ctx, layer); err != nil { + return nil, err + } } tx.Commit() diff --git a/internal/usecase/interactor/tag.go b/internal/usecase/interactor/tag.go index 9cb15da5f..5beb030bb 100644 --- a/internal/usecase/interactor/tag.go +++ b/internal/usecase/interactor/tag.go @@ -8,6 +8,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/tag" ) @@ -30,10 +31,10 @@ func NewTag(r *repo.Container) interfaces.Tag { } } -func (i *Tag) CreateItem(ctx context.Context, inp interfaces.CreateTagItemParam, operator *usecase.Operator) (*tag.Item, error) { +func (i *Tag) CreateItem(ctx context.Context, inp interfaces.CreateTagItemParam, operator *usecase.Operator) (*tag.Item, *tag.Group, error) { tx, err := i.transaction.Begin() if err != nil { - return nil, err + return nil, nil, err } defer func() { if err2 := tx.End(ctx); err == nil && err2 != nil { @@ -42,13 +43,22 @@ func (i *Tag) CreateItem(ctx context.Context, inp interfaces.CreateTagItemParam, }() if err := i.CanWriteScene(ctx, inp.SceneID, operator); err != nil { - return nil, interfaces.ErrOperationDenied + return nil, nil, interfaces.ErrOperationDenied + } + + var parent *tag.Group + if inp.Parent != nil { + parent, err = i.tagRepo.FindGroupByID(ctx, *inp.Parent, []id.SceneID{inp.SceneID}) + if err != nil { + return nil, nil, err + } } builder := tag.NewItem(). NewID(). Label(inp.Label). - Scene(inp.SceneID) + Scene(inp.SceneID). + Parent(inp.Parent) if inp.LinkedDatasetSchemaID != nil && inp.LinkedDatasetID != nil && inp.LinkedDatasetField != nil { builder = builder. LinkedDatasetFieldID(inp.LinkedDatasetField). @@ -57,15 +67,25 @@ func (i *Tag) CreateItem(ctx context.Context, inp interfaces.CreateTagItemParam, } item, err := builder.Build() if err != nil { - return nil, err + return nil, nil, err } - err = i.tagRepo.Save(ctx, item) - if err != nil { - return nil, err + if parent != nil { + parent.Tags().Add(item.ID()) } + + itemt := tag.Tag(item) + tags := []*tag.Tag{&itemt} + if parent != nil { + parentt := tag.Tag(parent) + tags = append(tags, &parentt) + } + if err := i.tagRepo.SaveAll(ctx, tags); err != nil { + return nil, nil, err + } + tx.Commit() - return item, nil + return item, parent, nil } func (i *Tag) CreateGroup(ctx context.Context, inp interfaces.CreateTagGroupParam, operator *usecase.Operator) (*tag.Group, error) { @@ -118,7 +138,7 @@ func (i *Tag) FetchByScene(ctx context.Context, sid id.SceneID, operator *usecas return nil, err } - return i.tagRepo.FindByScene(ctx, sid) + return i.tagRepo.FindRootsByScene(ctx, sid) } func (i *Tag) FetchItem(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Item, error) { @@ -139,51 +159,6 @@ func (i *Tag) FetchGroup(ctx context.Context, ids []id.TagID, operator *usecase. return i.tagRepo.FindGroupByIDs(ctx, ids, scenes) } -func (i *Tag) FetchGroupsByLayer(ctx context.Context, lid id.LayerID, operator *usecase.Operator) ([]*tag.Group, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) - if err != nil { - return nil, err - } - - layer, err := i.layerRepo.FindByID(ctx, lid, scenes) - if err != nil { - return nil, err - } - - return i.tagRepo.FindGroupByIDs(ctx, layer.Tags().Tags(), scenes) -} - -func (i *Tag) FetchGroupsByScene(ctx context.Context, sid id.SceneID, operator *usecase.Operator) ([]*tag.Group, error) { - err := i.CanReadScene(ctx, sid, operator) - if err != nil { - return nil, err - } - - return i.tagRepo.FindGroupByScene(ctx, sid) -} - -func (i *Tag) FetchItemsByLayer(ctx context.Context, lid id.LayerID, operator *usecase.Operator) ([]*tag.Item, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) - if err != nil { - return nil, err - } - - layer, err := i.layerRepo.FindByID(ctx, lid, scenes) - if err != nil { - return nil, err - } - return i.tagRepo.FindItemByIDs(ctx, layer.Tags().Tags(), scenes) -} - -func (i *Tag) FetchItemsByScene(ctx context.Context, sid id.SceneID, operator *usecase.Operator) ([]*tag.Item, error) { - err := i.CanReadScene(ctx, sid, operator) - if err != nil { - return nil, err - } - - return i.tagRepo.FindItemByScene(ctx, sid) -} - func (i *Tag) AttachItemToGroup(ctx context.Context, inp interfaces.AttachItemToGroupParam, operator *usecase.Operator) (*tag.Group, error) { tx, err := i.transaction.Begin() if err != nil { @@ -199,25 +174,34 @@ func (i *Tag) AttachItemToGroup(ctx context.Context, inp interfaces.AttachItemTo if err != nil { return nil, err } + // make sure item exist - _, err = i.tagRepo.FindItemByID(ctx, inp.ItemID, scenes) + ti, err := i.tagRepo.FindItemByID(ctx, inp.ItemID, scenes) if err != nil { return nil, err } + if ti.Parent() != nil { + return nil, errors.New("tag is already added to the group") + } tg, err := i.tagRepo.FindGroupByID(ctx, inp.GroupID, scenes) if err != nil { return nil, err } - if !tg.Tags().Has(inp.ItemID) { - tg.Tags().Add(inp.ItemID) - } else { + + if tg.Tags().Has(inp.ItemID) { return nil, errors.New("tag item is already attached to the group") } - err = i.tagRepo.Save(ctx, tg) - if err != nil { + + tg.Tags().Add(inp.ItemID) + ti.SetParent(tg.ID().Ref()) + + tgt := tag.Tag(tg) + tit := tag.Tag(ti) + if err := i.tagRepo.SaveAll(ctx, []*tag.Tag{&tgt, &tit}); err != nil { return nil, err } + tx.Commit() return tg, nil } @@ -237,8 +221,9 @@ func (i *Tag) DetachItemFromGroup(ctx context.Context, inp interfaces.DetachItem if err != nil { return nil, err } + // make sure item exist - _, err = i.tagRepo.FindItemByID(ctx, inp.ItemID, scenes) + ti, err := i.tagRepo.FindItemByID(ctx, inp.ItemID, scenes) if err != nil { return nil, err } @@ -247,14 +232,17 @@ func (i *Tag) DetachItemFromGroup(ctx context.Context, inp interfaces.DetachItem if err != nil { return nil, err } - if tg.Tags().Has(inp.ItemID) { - tg.Tags().Remove(inp.ItemID) - } else { + + if !tg.Tags().Has(inp.ItemID) { return nil, errors.New("tag item is not attached to the group") } - err = i.tagRepo.Save(ctx, tg) - if err != nil { + tg.Tags().Remove(inp.ItemID) + ti.SetParent(nil) + + tgt := tag.Tag(tg) + tit := tag.Tag(ti) + if err := i.tagRepo.SaveAll(ctx, []*tag.Tag{&tgt, &tit}); err != nil { return nil, err } @@ -294,11 +282,10 @@ func (i *Tag) UpdateTag(ctx context.Context, inp interfaces.UpdateTagParam, oper return &tg, nil } -func (i *Tag) Remove(ctx context.Context, tagID id.TagID, operator *usecase.Operator) (*id.TagID, error) { +func (i *Tag) Remove(ctx context.Context, tagID id.TagID, operator *usecase.Operator) (*id.TagID, layer.List, error) { tx, err := i.transaction.Begin() - if err != nil { - return nil, err + return nil, nil, err } defer func() { if err2 := tx.End(ctx); err == nil && err2 != nil { @@ -308,55 +295,50 @@ func (i *Tag) Remove(ctx context.Context, tagID id.TagID, operator *usecase.Oper scenes, err := i.OnlyWritableScenes(ctx, operator) if err != nil { - return nil, err + return nil, nil, err } t, err := i.tagRepo.FindByID(ctx, tagID, scenes) if err != nil { - return nil, err + return nil, nil, err } if group := tag.ToTagGroup(t); group != nil { - tags := group.Tags() - if len(tags.Tags()) != 0 { - return nil, interfaces.ErrNonemptyTagGroupCannotDelete + if len(group.Tags().Tags()) != 0 { + return nil, nil, interfaces.ErrNonemptyTagGroupCannotDelete } } if item := tag.ToTagItem(t); item != nil { g, err := i.tagRepo.FindGroupByItem(ctx, item.ID(), scenes) if err != nil && !errors.Is(rerror.ErrNotFound, err) { - return nil, err + return nil, nil, err } if g != nil { g.Tags().Remove(item.ID()) - - err = i.tagRepo.Save(ctx, g) - if err != nil { - return nil, err + if err := i.tagRepo.Save(ctx, g); err != nil { + return nil, nil, err } } } ls, err := i.layerRepo.FindByTag(ctx, tagID, scenes) if err != nil && !errors.Is(rerror.ErrNotFound, err) { - return nil, err + return nil, nil, err } - if ls != nil && len(ls) > 0 { + if len(ls) != 0 { for _, l := range ls.Deref() { - err = l.DetachTag(tagID) - if err != nil { - return nil, err - } + _ = l.Tags().Delete(tagID) } if err := i.layerRepo.SaveAll(ctx, ls); err != nil { - return nil, err + return nil, nil, err } } if err := i.tagRepo.Remove(ctx, tagID); err != nil { - return nil, err + return nil, nil, err } - return &tagID, nil + + return &tagID, ls, nil } diff --git a/internal/usecase/interfaces/layer.go b/internal/usecase/interfaces/layer.go index 887f4563d..c4c9f2904 100644 --- a/internal/usecase/interfaces/layer.go +++ b/internal/usecase/interfaces/layer.go @@ -84,13 +84,14 @@ var ( ) type Layer interface { - Fetch(context.Context, []id.LayerID, *usecase.Operator) ([]*layer.Layer, error) + Fetch(context.Context, []id.LayerID, *usecase.Operator) (layer.List, error) FetchGroup(context.Context, []id.LayerID, *usecase.Operator) ([]*layer.Group, error) FetchItem(context.Context, []id.LayerID, *usecase.Operator) ([]*layer.Item, error) FetchParent(context.Context, id.LayerID, *usecase.Operator) (*layer.Group, error) FetchByProperty(context.Context, id.PropertyID, *usecase.Operator) (layer.Layer, error) FetchMerged(context.Context, id.LayerID, *id.LayerID, *usecase.Operator) (*layer.Merged, error) FetchParentAndMerged(context.Context, id.LayerID, *usecase.Operator) (*layer.Merged, error) + FetchByTag(context.Context, id.TagID, *usecase.Operator) (layer.List, error) AddItem(context.Context, AddLayerItemInput, *usecase.Operator) (*layer.Item, *layer.Group, error) AddGroup(context.Context, AddLayerGroupInput, *usecase.Operator) (*layer.Group, *layer.Group, error) Remove(context.Context, id.LayerID, *usecase.Operator) (id.LayerID, *layer.Group, error) diff --git a/internal/usecase/interfaces/tag.go b/internal/usecase/interfaces/tag.go index af703d93f..0aec3dbbe 100644 --- a/internal/usecase/interfaces/tag.go +++ b/internal/usecase/interfaces/tag.go @@ -6,6 +6,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/tag" ) @@ -16,6 +17,7 @@ var ( type CreateTagItemParam struct { Label string SceneID id.SceneID + Parent *id.TagID LinkedDatasetSchemaID *id.DatasetSchemaID LinkedDatasetID *id.DatasetID LinkedDatasetField *id.DatasetSchemaFieldID @@ -46,14 +48,10 @@ type Tag interface { FetchByScene(context.Context, id.SceneID, *usecase.Operator) ([]*tag.Tag, error) FetchItem(context.Context, []id.TagID, *usecase.Operator) ([]*tag.Item, error) FetchGroup(context.Context, []id.TagID, *usecase.Operator) ([]*tag.Group, error) - FetchGroupsByLayer(context.Context, id.LayerID, *usecase.Operator) ([]*tag.Group, error) - FetchGroupsByScene(context.Context, id.SceneID, *usecase.Operator) ([]*tag.Group, error) - FetchItemsByLayer(context.Context, id.LayerID, *usecase.Operator) ([]*tag.Item, error) - FetchItemsByScene(context.Context, id.SceneID, *usecase.Operator) ([]*tag.Item, error) - CreateItem(context.Context, CreateTagItemParam, *usecase.Operator) (*tag.Item, error) + CreateItem(context.Context, CreateTagItemParam, *usecase.Operator) (*tag.Item, *tag.Group, error) CreateGroup(context.Context, CreateTagGroupParam, *usecase.Operator) (*tag.Group, error) AttachItemToGroup(context.Context, AttachItemToGroupParam, *usecase.Operator) (*tag.Group, error) DetachItemFromGroup(context.Context, DetachItemToGroupParam, *usecase.Operator) (*tag.Group, error) UpdateTag(context.Context, UpdateTagParam, *usecase.Operator) (*tag.Tag, error) - Remove(context.Context, id.TagID, *usecase.Operator) (*id.TagID, error) + Remove(context.Context, id.TagID, *usecase.Operator) (*id.TagID, layer.List, error) } diff --git a/internal/usecase/repo/tag.go b/internal/usecase/repo/tag.go index 2554989a2..456642f7c 100644 --- a/internal/usecase/repo/tag.go +++ b/internal/usecase/repo/tag.go @@ -14,10 +14,8 @@ type Tag interface { FindItemByIDs(context.Context, []id.TagID, []id.SceneID) ([]*tag.Item, error) FindGroupByID(context.Context, id.TagID, []id.SceneID) (*tag.Group, error) FindGroupByIDs(context.Context, []id.TagID, []id.SceneID) ([]*tag.Group, error) - FindByScene(context.Context, id.SceneID) ([]*tag.Tag, error) + FindRootsByScene(context.Context, id.SceneID) ([]*tag.Tag, error) FindGroupByItem(context.Context, id.TagID, []id.SceneID) (*tag.Group, error) - FindGroupByScene(context.Context, id.SceneID) ([]*tag.Group, error) - FindItemByScene(context.Context, id.SceneID) ([]*tag.Item, error) Save(context.Context, tag.Tag) error SaveAll(context.Context, []*tag.Tag) error Remove(context.Context, id.TagID) error diff --git a/pkg/layer/builder.go b/pkg/layer/builder.go index e28e27956..0eadb525f 100644 --- a/pkg/layer/builder.go +++ b/pkg/layer/builder.go @@ -2,7 +2,6 @@ package layer import ( "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/tag" ) type Builder struct { @@ -73,7 +72,7 @@ func (b *Builder) Infobox(infobox *Infobox) *Builder { return b } -func (b *Builder) Tags(tags *tag.List) *Builder { +func (b *Builder) Tags(tags *TagList) *Builder { b.base.tags = tags return b } diff --git a/pkg/layer/group.go b/pkg/layer/group.go index e07284943..ca1a6c333 100644 --- a/pkg/layer/group.go +++ b/pkg/layer/group.go @@ -3,7 +3,6 @@ package layer import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" - "github.com/reearth/reearth-backend/pkg/tag" ) type Group struct { @@ -182,22 +181,9 @@ func (l *Group) ValidateProperties(pm property.Map) error { return l.layerBase.ValidateProperties(pm) } -func (l *Group) Tags() *tag.List { - return l.layerBase.tags -} - -func (l *Group) AttachTag(t id.TagID) error { - if l.layerBase.tags.Has(t) { - return ErrDuplicatedTag +func (l *Group) Tags() *TagList { + if l.layerBase.tags == nil { + l.layerBase.tags = NewTagList(nil) } - l.layerBase.tags.Add(t) - return nil -} - -func (l *Group) DetachTag(t id.TagID) error { - if !l.layerBase.tags.Has(t) { - return ErrTagNotFound - } - l.layerBase.tags.Remove(t) - return nil + return l.layerBase.tags } diff --git a/pkg/layer/group_builder.go b/pkg/layer/group_builder.go index 5c5e67ddf..a271589c8 100644 --- a/pkg/layer/group_builder.go +++ b/pkg/layer/group_builder.go @@ -2,7 +2,6 @@ package layer import ( "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/tag" ) func GroupFromLayer(l Layer) *Group { @@ -112,7 +111,7 @@ func (b *GroupBuilder) LinkedDatasetSchema(linkedDatasetSchema *id.DatasetSchema return b } -func (b *GroupBuilder) Tags(tags *tag.List) *GroupBuilder { +func (b *GroupBuilder) Tags(tags *TagList) *GroupBuilder { b.l.tags = tags return b } diff --git a/pkg/layer/group_builder_test.go b/pkg/layer/group_builder_test.go index 14194147e..cd03a2623 100644 --- a/pkg/layer/group_builder_test.go +++ b/pkg/layer/group_builder_test.go @@ -3,13 +3,11 @@ package layer import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/tag" "github.com/stretchr/testify/assert" ) func TestGroupBuilder_Tags(t *testing.T) { - l := tag.NewListFromTags([]id.TagID{id.NewTagID()}) + l := NewTagList(nil) b := NewGroup().NewID().Tags(l).MustBuild() assert.Same(t, l, b.Tags()) } diff --git a/pkg/layer/group_test.go b/pkg/layer/group_test.go index d4d3bea48..555d1f20c 100644 --- a/pkg/layer/group_test.go +++ b/pkg/layer/group_test.go @@ -3,8 +3,6 @@ package layer import ( "testing" - "github.com/reearth/reearth-backend/pkg/tag" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -13,7 +11,6 @@ var _ Layer = &Group{} var l1 = id.MustLayerID(id.New().String()) var l2 = id.MustLayerID(id.New().String()) -var tags = []id.TagID{id.NewTagID()} var group = Group{ layerBase: layerBase{ id: id.MustLayerID(id.New().String()), @@ -23,7 +20,7 @@ var group = Group{ extension: id.PluginExtensionID("foo").Ref(), property: nil, infobox: nil, - tags: tag.NewListFromTags(tags), + tags: nil, scene: id.SceneID{}, }, layers: &IDList{ @@ -139,15 +136,3 @@ func TestGroup_MoveLayerFrom(t *testing.T) { group.MoveLayerFrom(l1, 1, &group) assert.Equal(t, l1, group.Layers().Layers()[1]) } - -func TestGroup_Tags(t *testing.T) { - tt := id.NewTagID() - err := group.AttachTag(tt) - assert.NoError(t, err) - tl := tags - tl = append(tl, tt) - assert.Equal(t, tl, group.Tags().Tags()) - err = group.DetachTag(tt) - assert.NoError(t, err) - assert.Equal(t, tags, group.Tags().Tags()) -} diff --git a/pkg/layer/item.go b/pkg/layer/item.go index ca92a8c57..abf81999c 100644 --- a/pkg/layer/item.go +++ b/pkg/layer/item.go @@ -3,7 +3,6 @@ package layer import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" - "github.com/reearth/reearth-backend/pkg/tag" ) type Item struct { @@ -151,22 +150,9 @@ func (l *Item) ValidateProperties(pm property.Map) error { return l.layerBase.ValidateProperties(pm) } -func (l *Item) Tags() *tag.List { - return l.layerBase.tags -} - -func (l *Item) AttachTag(t id.TagID) error { - if l.layerBase.tags.Has(t) { - return ErrDuplicatedTag +func (l *Item) Tags() *TagList { + if l.layerBase.tags == nil { + l.layerBase.tags = NewTagList(nil) } - l.layerBase.tags.Add(t) - return nil -} - -func (l *Item) DetachTag(t id.TagID) error { - if !l.layerBase.tags.Has(t) { - return ErrTagNotFound - } - l.layerBase.tags.Remove(t) - return nil + return l.layerBase.tags } diff --git a/pkg/layer/item_builder.go b/pkg/layer/item_builder.go index 68a66b21d..c358af080 100644 --- a/pkg/layer/item_builder.go +++ b/pkg/layer/item_builder.go @@ -2,7 +2,6 @@ package layer import ( "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/tag" ) func ItemFromLayer(l Layer) *Item { @@ -102,7 +101,7 @@ func (b *ItemBuilder) LinkedDataset(linkedDataset *id.DatasetID) *ItemBuilder { return b } -func (b *ItemBuilder) Tags(tags *tag.List) *ItemBuilder { +func (b *ItemBuilder) Tags(tags *TagList) *ItemBuilder { b.l.tags = tags return b } diff --git a/pkg/layer/item_builder_test.go b/pkg/layer/item_builder_test.go index 2c76c6a2f..4d83b01d2 100644 --- a/pkg/layer/item_builder_test.go +++ b/pkg/layer/item_builder_test.go @@ -3,13 +3,11 @@ package layer import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/tag" "github.com/stretchr/testify/assert" ) func TestItemBuilder_Tags(t *testing.T) { - l := tag.NewListFromTags([]id.TagID{id.NewTagID()}) + l := NewTagList(nil) b := NewItem().NewID().Tags(l).MustBuild() assert.Same(t, l, b.Tags()) } diff --git a/pkg/layer/item_test.go b/pkg/layer/item_test.go index d65d9203b..a803e2dec 100644 --- a/pkg/layer/item_test.go +++ b/pkg/layer/item_test.go @@ -1,39 +1,3 @@ package layer -import ( - "testing" - - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/tag" - "github.com/stretchr/testify/assert" -) - var _ Layer = &Item{} - -var tags2 = []id.TagID{id.NewTagID()} -var item = Item{ - layerBase: layerBase{ - id: id.MustLayerID(id.New().String()), - name: "xxx", - visible: false, - plugin: id.MustPluginID("aaa~1.1.1").Ref(), - extension: id.PluginExtensionID("foo").Ref(), - property: nil, - infobox: nil, - tags: tag.NewListFromTags(tags2), - scene: id.SceneID{}, - }, - linkedDataset: nil, -} - -func TestItem_Tags(t *testing.T) { - tt := id.NewTagID() - err := item.AttachTag(tt) - assert.NoError(t, err) - tl := tags2 - tl = append(tl, tt) - assert.Equal(t, tl, item.Tags().Tags()) - err = item.DetachTag(tt) - assert.NoError(t, err) - assert.Equal(t, tags2, item.Tags().Tags()) -} diff --git a/pkg/layer/layer.go b/pkg/layer/layer.go index 56013b295..86461c251 100644 --- a/pkg/layer/layer.go +++ b/pkg/layer/layer.go @@ -6,7 +6,6 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" - "github.com/reearth/reearth-backend/pkg/tag" ) var ( @@ -25,15 +24,13 @@ type Layer interface { HasInfobox() bool Infobox() *Infobox Scene() id.SceneID - Tags() *tag.List + Tags() *TagList Rename(string) SetVisible(bool) SetInfobox(*Infobox) SetPlugin(*id.PluginID) Properties() []id.PropertyID ValidateProperties(property.Map) error - AttachTag(t id.TagID) error - DetachTag(t id.TagID) error } func ToLayerGroup(l Layer) *Group { @@ -81,7 +78,7 @@ type layerBase struct { property *id.PropertyID infobox *Infobox scene id.SceneID - tags *tag.List + tags *TagList } func (l *layerBase) ID() id.LayerID { diff --git a/pkg/layer/tag.go b/pkg/layer/tag.go new file mode 100644 index 000000000..d3446473b --- /dev/null +++ b/pkg/layer/tag.go @@ -0,0 +1,226 @@ +package layer + +import "github.com/reearth/reearth-backend/pkg/id" + +type TagID = id.TagID + +var NewTagID = id.NewTagID + +type TagList struct { + tags []Tag +} + +type Tag interface { + ID() TagID + Clone() Tag +} + +type TagItem struct { + id TagID +} + +type TagGroup struct { + id TagID + children []*TagItem +} + +func NewTagItem(t TagID) *TagItem { + if t.IsNil() { + return nil + } + return &TagItem{ + id: t, + } +} + +func (t *TagItem) ID() TagID { + if t == nil { + return TagID{} + } + return t.id +} + +func TagItemFrom(t Tag) *TagItem { + t2, _ := t.(*TagItem) + return t2 +} + +func (t *TagItem) Clone() Tag { + return t.CloneItem() +} + +func (t *TagItem) CloneItem() *TagItem { + if t == nil { + return nil + } + return NewTagItem(t.id) +} + +func NewTagGroup(t TagID, children []*TagItem) *TagGroup { + if t.IsNil() { + return nil + } + return &TagGroup{ + id: t, + children: append(children[:0:0], children...), + } +} + +func TagGroupFrom(t Tag) *TagGroup { + t2, _ := t.(*TagGroup) + return t2 +} + +func (t *TagGroup) ID() TagID { + if t == nil { + return TagID{} + } + return t.id +} + +func (t *TagGroup) Children() []*TagItem { + if t == nil { + return nil + } + return append(t.children[:0:0], t.children...) +} + +func (t *TagGroup) Find(ti TagID) *TagItem { + if t == nil { + return nil + } + for _, tag := range t.children { + if tag.ID() == ti { + return tag + } + } + return nil +} + +func (t *TagGroup) Add(ti *TagItem) bool { + if t == nil || ti == nil || t.Find(ti.ID()) != nil { + return false + } + t.children = append(t.children, ti) + return true +} + +func (t *TagGroup) Delete(ti TagID) (res bool) { + if t == nil { + return + } + for i := 0; i < len(t.children); i++ { + c := t.children[i] + if c.ID() == ti { + t.children = append(t.children[:i], t.children[i+1:]...) + i-- + res = true + } + } + return +} + +func (t *TagGroup) Clone() Tag { + return t.CloneGroup() +} + +func (t *TagGroup) CloneGroup() *TagGroup { + if t == nil { + return nil + } + return NewTagGroup(t.id, t.children) +} + +func NewTagList(tags []Tag) *TagList { + return &TagList{tags: append(tags[:0:0], tags...)} +} + +func (t *TagList) Tags() []Tag { + if t == nil { + return nil + } + return append(t.tags[:0:0], t.tags...) +} + +func (t *TagList) Add(ti Tag) bool { + if t == nil || ti == nil || t.Has(ti.ID()) || TagItemFrom(ti) == nil && TagGroupFrom(ti) == nil { + return false + } + t.tags = append(t.tags, ti) + return true +} + +func (t *TagList) Delete(ti TagID) (res bool) { + if t == nil { + return + } + for i := 0; i < len(t.tags); i++ { + c := t.tags[i] + if c.ID() == ti { + t.tags = append(t.tags[:i], t.tags[i+1:]...) + i-- + res = true + } else if TagGroupFrom(c).Delete(ti) { + res = true + } + } + return +} + +func (t *TagList) Has(ti TagID) bool { + g, i := t.Find(ti) + return g != nil || i != nil +} + +func (t *TagList) Find(ti TagID) (*TagGroup, *TagItem) { + if t == nil { + return nil, nil + } + for _, t := range t.tags { + g := TagGroupFrom(t) + if t.ID() == ti { + return g, TagItemFrom(t) + } + if i := g.Find(ti); i != nil { + return g, i + } + } + return nil, nil +} + +func (t *TagList) FindItem(ti TagID) *TagItem { + _, i := t.Find(ti) + return i +} + +func (t *TagList) FindGroup(ti TagID) *TagGroup { + g, i := t.Find(ti) + if i != nil { + return nil + } + return g +} + +func (t *TagList) RootItems() []*TagItem { + if t == nil { + return nil + } + items := make([]*TagItem, 0, len(t.tags)) + for _, t := range t.tags { + if i := TagItemFrom(t); i != nil { + items = append(items, i) + } + } + return items +} + +func (t *TagList) IsEmpty() bool { + return t == nil || len(t.tags) == 0 +} + +func (t *TagList) Clone() *TagList { + if t == nil { + return nil + } + return NewTagList(t.tags) +} diff --git a/pkg/layer/tag_test.go b/pkg/layer/tag_test.go new file mode 100644 index 000000000..3768a2f21 --- /dev/null +++ b/pkg/layer/tag_test.go @@ -0,0 +1,1086 @@ +package layer + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +var _ Tag = &TagItem{} +var _ Tag = &TagGroup{} + +func TestNewTagItem(t *testing.T) { + tag := NewTagID() + type args struct { + t TagID + } + tests := []struct { + name string + args args + want *TagItem + }{ + { + name: "ok", + args: args{t: tag}, + want: &TagItem{id: tag}, + }, + { + name: "nil id", + args: args{t: TagID{}}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, NewTagItem(tt.args.t)) + }) + } +} + +func TestTagItemFrom(t *testing.T) { + tag := NewTagID() + type args struct { + t Tag + } + tests := []struct { + name string + args args + want *TagItem + }{ + { + name: "item", + args: args{t: &TagItem{id: tag}}, + want: &TagItem{id: tag}, + }, + { + name: "group", + args: args{t: &TagGroup{id: tag}}, + want: nil, + }, + { + name: "nil item", + args: args{t: (*TagItem)(nil)}, + want: nil, + }, + { + name: "nil group", + args: args{t: (*TagGroup)(nil)}, + want: nil, + }, + { + name: "nil", + args: args{t: nil}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, TagItemFrom(tt.args.t)) + }) + } +} + +func TestTagItem_ID(t *testing.T) { + tag := NewTagID() + tests := []struct { + name string + target *TagItem + want TagID + }{ + { + name: "ok", + target: &TagItem{id: tag}, + want: tag, + }, + { + name: "nil", + target: nil, + want: TagID{}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.ID()) + }) + } +} + +func TestTagItem_Clone(t *testing.T) { + tag := NewTagID() + tests := []struct { + name string + target *TagItem + }{ + { + name: "ok", + target: &TagItem{id: tag}, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Clone() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestTagItem_CloneItem(t *testing.T) { + tag := NewTagID() + tests := []struct { + name string + target *TagItem + }{ + { + name: "ok", + target: &TagItem{id: tag}, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.CloneItem() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestNewTagGroup(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + t TagID + children []*TagItem + } + tests := []struct { + name string + args args + want *TagGroup + }{ + { + name: "ok", + args: args{ + t: tag1, + children: []*TagItem{ + {id: tag2}, + {id: tag3}, + }, + }, + want: &TagGroup{ + id: tag1, + children: []*TagItem{ + {id: tag2}, + {id: tag3}, + }, + }, + }, + { + name: "nil id", + args: args{t: TagID{}}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, NewTagGroup(tt.args.t, tt.args.children)) + }) + } +} + +func TestTagGroupFrom(t *testing.T) { + tag := NewTagID() + type args struct { + t Tag + } + tests := []struct { + name string + args args + want *TagGroup + }{ + { + name: "group", + args: args{t: &TagGroup{id: tag}}, + want: &TagGroup{id: tag}, + }, + { + name: "item", + args: args{t: &TagItem{id: tag}}, + want: nil, + }, + { + name: "nil item", + args: args{t: (*TagItem)(nil)}, + want: nil, + }, + { + name: "nil group", + args: args{t: (*TagGroup)(nil)}, + want: nil, + }, + { + name: "nil", + args: args{t: nil}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, TagGroupFrom(tt.args.t)) + }) + } +} + +func TestTagGroup_ID(t *testing.T) { + tag := NewTagID() + tests := []struct { + name string + target *TagGroup + want TagID + }{ + { + name: "ok", + target: &TagGroup{id: tag}, + want: tag, + }, + { + name: "nil", + target: nil, + want: TagID{}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.ID()) + }) + } +} + +func TestTagGroup_Children(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + + tests := []struct { + name string + target *TagGroup + want []*TagItem + }{ + { + name: "ok", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}}}, + want: []*TagItem{{id: tag2}}, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Children() + assert.Equal(t, tt.want, res) + if tt.want != nil { + assert.NotSame(t, tt.target.children, res) + } + }) + } +} + +func TestTagGroup_Find(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + ti TagID + } + tests := []struct { + name string + target *TagGroup + args args + want *TagItem + }{ + { + name: "ok", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}, {id: tag3}}}, + args: args{ti: tag2}, + want: &TagItem{id: tag2}, + }, + { + name: "not found", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}, {id: tag3}}}, + args: args{ti: tag1}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{ti: tag1}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Find(tt.args.ti) + assert.Equal(t, tt.want, res) + }) + } +} + +func TestTagGroup_Add(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + ti *TagItem + } + tests := []struct { + name string + target *TagGroup + args args + want bool + wantChildren []*TagItem + }{ + { + name: "ok", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}, + args: args{ti: &TagItem{id: tag2}}, + want: true, + wantChildren: []*TagItem{{id: tag3}, {id: tag2}}, + }, + { + name: "not added", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}, {id: tag3}}}, + args: args{ti: &TagItem{id: tag2}}, + want: false, + wantChildren: []*TagItem{{id: tag2}, {id: tag3}}, + }, + { + name: "nil item", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}}}, + args: args{ti: nil}, + wantChildren: []*TagItem{{id: tag2}}, + }, + { + name: "nil", + target: nil, + args: args{ti: &TagItem{id: tag2}}, + wantChildren: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Add(tt.args.ti)) + assert.Equal(t, tt.wantChildren, tt.target.Children()) + }) + } +} + +func TestTagGroup_Delete(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + ti TagID + } + tests := []struct { + name string + target *TagGroup + args args + want bool + wantChildren []*TagItem + }{ + { + name: "ok", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}, {id: tag3}}}, + args: args{ti: tag2}, + want: true, + wantChildren: []*TagItem{{id: tag3}}, + }, + { + name: "not found", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}, {id: tag3}}}, + args: args{ti: tag1}, + want: false, + wantChildren: []*TagItem{{id: tag2}, {id: tag3}}, + }, + { + name: "nil", + target: nil, + args: args{ti: tag1}, + wantChildren: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Delete(tt.args.ti)) + assert.Equal(t, tt.wantChildren, tt.target.Children()) + }) + } +} + +func TestTagGroup_Clone(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + + tests := []struct { + name string + target *TagGroup + }{ + { + name: "ok", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}}}, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Clone() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestTagGroup_CloneGroup(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + + tests := []struct { + name string + target *TagGroup + }{ + { + name: "ok", + target: &TagGroup{id: tag1, children: []*TagItem{{id: tag2}}}, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.CloneGroup() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + if tt.target.children != nil { + assert.NotSame(t, tt.target.children, res.children) + } + } + }) + } +} + +func TestNewTagList(t *testing.T) { + tag := NewTagID() + + type args struct { + tags []Tag + } + tests := []struct { + name string + args args + want *TagList + }{ + { + name: "ok", + args: args{tags: []Tag{&TagItem{id: tag}}}, + want: &TagList{tags: []Tag{&TagItem{id: tag}}}, + }, + { + name: "nil", + args: args{tags: nil}, + want: &TagList{tags: nil}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := NewTagList(tt.args.tags) + assert.Equal(t, tt.want, res) + assert.NotSame(t, res.tags, tt.args.tags) + }) + } +} + +func TestTagList_Tags(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + tests := []struct { + name string + target *TagList + want []Tag + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{&TagGroup{id: tag1, children: []*TagItem{{id: tag2}}}, &TagItem{id: tag3}}, + }, + want: []Tag{&TagGroup{id: tag1, children: []*TagItem{{id: tag2}}}, &TagItem{id: tag3}}, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Tags() + assert.Equal(t, tt.want, res) + if tt.want != nil { + assert.NotSame(t, tt.target.tags, res) + } + }) + } +} + +func TestTagList_Add(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + ti Tag + } + tests := []struct { + name string + target *TagList + args args + want bool + wantChildren []Tag + }{ + { + name: "item added", + target: &TagList{ + tags: []Tag{ + &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: &TagItem{id: tag2}}, + want: true, + wantChildren: []Tag{ + &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}, + &TagItem{id: tag2}, + }, + }, + { + name: "group added", + target: &TagList{ + tags: []Tag{ + &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: &TagGroup{id: tag2}}, + want: true, + wantChildren: []Tag{ + &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}, + &TagGroup{id: tag2}, + }, + }, + { + name: "not added", + target: &TagList{ + tags: []Tag{&TagItem{id: tag2}, &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}}, + }, + args: args{ti: &TagGroup{id: tag2}}, + want: false, + wantChildren: []Tag{ + &TagItem{id: tag2}, + &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}, + }, + }, + { + name: "nil tag", + target: &TagList{ + tags: []Tag{&TagItem{id: tag2}, &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}}, + }, + args: args{ti: nil}, + want: false, + wantChildren: []Tag{&TagItem{id: tag2}, &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}}, + }, + { + name: "nil item tag", + target: &TagList{ + tags: []Tag{&TagItem{id: tag2}, &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}}, + }, + args: args{ti: (*TagItem)(nil)}, + want: false, + wantChildren: []Tag{&TagItem{id: tag2}, &TagGroup{id: tag1, children: []*TagItem{{id: tag3}}}}, + }, + { + name: "nil", + args: args{ti: &TagGroup{id: tag2}}, + target: nil, + wantChildren: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Add(tt.args.ti)) + assert.Equal(t, tt.wantChildren, tt.target.Tags()) + }) + } +} + +func TestTagList_Delete(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + tag4 := NewTagID() + + type args struct { + ti TagID + } + tests := []struct { + name string + target *TagList + args args + want bool + wantTags []Tag + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag1}, + want: true, + wantTags: []Tag{ + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + { + name: "not found", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag4}, + want: false, + wantTags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + { + name: "nil", + target: nil, + args: args{ti: tag1}, + wantTags: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Delete(tt.args.ti)) + assert.Equal(t, tt.wantTags, tt.target.Tags()) + }) + } +} + +func TestTagList_Find(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + tag4 := NewTagID() + + type args struct { + ti TagID + } + tests := []struct { + name string + target *TagList + args args + wantGroup *TagGroup + wantItem *TagItem + }{ + { + name: "group", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag2}, + wantGroup: &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + wantItem: nil, + }, + { + name: "item", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag3}, + wantGroup: &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + wantItem: &TagItem{id: tag3}, + }, + { + name: "root item", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag1}, + wantGroup: nil, + wantItem: &TagItem{id: tag1}, + }, + { + name: "not found", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag4}, + wantGroup: nil, + wantItem: nil, + }, + { + name: "nil", + target: nil, + args: args{ti: tag1}, + wantGroup: nil, + wantItem: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + resGroup, resItem := tt.target.Find(tt.args.ti) + assert.Equal(t, tt.wantGroup, resGroup) + assert.Equal(t, tt.wantItem, resItem) + }) + } +} + +func TestTagList_FindGroup(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + ti TagID + } + tests := []struct { + name string + target *TagList + args args + want *TagGroup + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag2}, + want: &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + { + name: "not found", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag1}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{ti: tag1}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.FindGroup(tt.args.ti)) + }) + } +} + +func TestTagList_FindItem(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + type args struct { + ti TagID + } + tests := []struct { + name string + target *TagList + args args + want *TagItem + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag3}, + want: &TagItem{id: tag3}, + }, + { + name: "root item", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag1}, + want: &TagItem{id: tag1}, + }, + { + name: "not found", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + args: args{ti: tag2}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{ti: tag1}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.FindItem(tt.args.ti)) + }) + } +} + +func TestTagList_RootItems(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + tests := []struct { + name string + target *TagList + want []*TagItem + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + want: []*TagItem{{id: tag1}}, + }, + { + name: "no roots", + target: &TagList{ + tags: []Tag{ + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + want: []*TagItem{}, + }, + { + name: "empty", + target: &TagList{}, + want: []*TagItem{}, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.RootItems()) + }) + } +} + +func TestTagList_IsEmpty(t *testing.T) { + tag := NewTagID() + + tests := []struct { + name string + target *TagList + want bool + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{&TagItem{id: tag}}, + }, + want: false, + }, + { + name: "empty", + target: &TagList{}, + want: true, + }, + { + name: "nil", + target: nil, + want: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.IsEmpty()) + }) + } +} + +func TestTagList_Clone(t *testing.T) { + tag1 := NewTagID() + tag2 := NewTagID() + tag3 := NewTagID() + + tests := []struct { + name string + target *TagList + }{ + { + name: "ok", + target: &TagList{ + tags: []Tag{ + &TagItem{id: tag1}, + &TagGroup{id: tag2, children: []*TagItem{{id: tag3}}}, + }, + }, + }, + { + name: "nil", + target: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.Clone() + assert.Equal(t, tt.target, res) + if tt.target != nil { + assert.NotSame(t, tt.target, res) + } + }) + } +} diff --git a/pkg/tag/item.go b/pkg/tag/item.go index ee817289a..efefa0274 100644 --- a/pkg/tag/item.go +++ b/pkg/tag/item.go @@ -4,11 +4,16 @@ import "github.com/reearth/reearth-backend/pkg/id" type Item struct { tag + parent *id.TagID linkedDatasetFieldID *id.DatasetSchemaFieldID linkedDatasetID *id.DatasetID linkedDatasetSchemaID *id.DatasetSchemaID } +func (i *Item) Parent() *id.TagID { + return i.parent.CopyRef() +} + func (i *Item) LinkedDatasetFieldID() *id.DatasetSchemaFieldID { return i.linkedDatasetFieldID.CopyRef() } @@ -20,3 +25,10 @@ func (i *Item) LinkedDatasetID() *id.DatasetID { func (i *Item) LinkedDatasetSchemaID() *id.DatasetSchemaID { return i.linkedDatasetSchemaID.CopyRef() } + +func (i *Item) SetParent(p *id.TagID) { + if i == nil { + return + } + i.parent = p.CopyRef() +} diff --git a/pkg/tag/item_builder.go b/pkg/tag/item_builder.go index 6fb0dbdee..4d2cb79a1 100644 --- a/pkg/tag/item_builder.go +++ b/pkg/tag/item_builder.go @@ -51,6 +51,11 @@ func (b *ItemBuilder) Scene(sid id.SceneID) *ItemBuilder { return b } +func (b *ItemBuilder) Parent(p *id.TagID) *ItemBuilder { + b.i.parent = p.CopyRef() + return b +} + func (b *ItemBuilder) LinkedDatasetFieldID(dfid *id.DatasetSchemaFieldID) *ItemBuilder { b.i.linkedDatasetFieldID = dfid return b diff --git a/schema.graphql b/schema.graphql index 746cc817a..bd8c88050 100644 --- a/schema.graphql +++ b/schema.graphql @@ -318,7 +318,6 @@ enum PluginExtensionType { INFOBOX } - type PluginExtension { extensionId: PluginExtensionID! pluginId: PluginID! @@ -660,12 +659,9 @@ interface Layer { plugin: Plugin extension: PluginExtension scenePlugin: ScenePlugin - tagIds: [ID!]! - tags: [Tag!]! @goField(forceResolver: true) + tags: [LayerTag!]! } -union Layers = LayerItem | LayerGroup - enum LayerEncodingFormat { KML CZML @@ -686,6 +682,7 @@ type LayerItem implements Layer { # parentId will not be always set parentId: ID linkedDatasetId: ID + tags: [LayerTag!]! parent: LayerGroup @goField(forceResolver: true) property: Property @goField(forceResolver: true) plugin: Plugin @goField(forceResolver: true) @@ -694,8 +691,6 @@ type LayerItem implements Layer { merged: MergedLayer @goField(forceResolver: true) scene: Scene @goField(forceResolver: true) scenePlugin: ScenePlugin @goField(forceResolver: true) - tagIds: [ID!]! - tags: [Tag!]! @goField(forceResolver: true) } type LayerGroup implements Layer { @@ -712,6 +707,7 @@ type LayerGroup implements Layer { linkedDatasetSchemaId: ID root: Boolean! layerIds: [ID!]! + tags: [LayerTag!]! parent: LayerGroup @goField(forceResolver: true) property: Property @goField(forceResolver: true) plugin: Plugin @goField(forceResolver: true) @@ -720,8 +716,6 @@ type LayerGroup implements Layer { layers: [Layer]! @goField(forceResolver: true) scene: Scene @goField(forceResolver: true) scenePlugin: ScenePlugin @goField(forceResolver: true) - tagIds: [ID!]! - tags: [Tag!]! @goField(forceResolver: true) } type Infobox { @@ -756,6 +750,22 @@ type InfoboxField { scenePlugin: ScenePlugin @goField(forceResolver: true) } +interface LayerTag { + tagId: ID! + tag: Tag +} + +type LayerTagItem implements LayerTag { + tagId: ID! + tag: Tag @goField(forceResolver: true) +} + +type LayerTagGroup implements LayerTag { + tagId: ID! + children: [LayerTagItem!]! + tag: Tag @goField(forceResolver: true) +} + type MergedLayer { originalId: ID! parentId: ID @@ -790,29 +800,34 @@ interface Tag { id: ID! sceneId: ID! label: String! + layers: [Layer!]! @goField(forceResolver: true) } type TagItem implements Tag { id: ID! sceneId: ID! label: String! + parentId: ID linkedDatasetID: ID linkedDatasetSchemaID: ID linkedDatasetFieldID: ID linkedDatasetSchema: DatasetSchema @goField(forceResolver: true) linkedDataset: Dataset @goField(forceResolver: true) linkedDatasetField: DatasetField @goField(forceResolver: true) + parent: TagGroup @goField(forceResolver: true) + layers: [Layer!]! @goField(forceResolver: true) } type TagGroup implements Tag { id: ID! sceneId: ID! label: String! - tags: [ID!] + tagIds: [ID!] + tags: [TagItem!]! @goField(forceResolver: true) + scene: Scene @goField(forceResolver: true) + layers: [Layer!]! @goField(forceResolver: true) } -union Tags = TagItem | TagGroup - type Cluster { id: ID! name: String! @@ -1176,6 +1191,7 @@ input AddDatasetSchemaInput { input CreateTagItemInput { sceneId: ID! label: String! + parent: ID linkedDatasetSchemaID: ID linkedDatasetID: ID linkedDatasetField: ID @@ -1435,6 +1451,7 @@ type AddDatasetSchemaPayload { type CreateTagItemPayload { tag: TagItem! + parent: TagGroup } type CreateTagGroupPayload { @@ -1453,16 +1470,17 @@ type UpdateTagPayload { tag: Tag! } -type AttachTagToLayerPayload{ +type AttachTagToLayerPayload { layer: Layer! } -type DetachTagFromLayerPayload{ +type DetachTagFromLayerPayload { layer: Layer! } -type RemoveTagPayload{ +type RemoveTagPayload { tagId: ID! + updatedLayers: [Layer!]! } type AddClusterPayload { @@ -1475,7 +1493,7 @@ type UpdateClusterPayload { cluster: Cluster! } -type RemoveClusterPayload{ +type RemoveClusterPayload { scene: Scene! clusterId: ID! } @@ -1611,7 +1629,9 @@ type Mutation { createScene(input: CreateSceneInput!): CreateScenePayload addWidget(input: AddWidgetInput!): AddWidgetPayload updateWidget(input: UpdateWidgetInput!): UpdateWidgetPayload - updateWidgetAlignSystem(input: UpdateWidgetAlignSystemInput!): UpdateWidgetAlignSystemPayload + updateWidgetAlignSystem( + input: UpdateWidgetAlignSystemInput! + ): UpdateWidgetAlignSystemPayload removeWidget(input: RemoveWidgetInput!): RemoveWidgetPayload installPlugin(input: InstallPluginInput!): InstallPluginPayload uninstallPlugin(input: UninstallPluginInput!): UninstallPluginPayload @@ -1634,7 +1654,9 @@ type Mutation { input: RemoveDatasetSchemaInput! ): RemoveDatasetSchemaPayload importDataset(input: ImportDatasetInput!): ImportDatasetPayload - importDatasetFromGoogleSheet(input: ImportDatasetFromGoogleSheetInput!): ImportDatasetPayload + importDatasetFromGoogleSheet( + input: ImportDatasetFromGoogleSheetInput! + ): ImportDatasetPayload addDatasetSchema(input: AddDatasetSchemaInput!): AddDatasetSchemaPayload # Property @@ -1668,8 +1690,12 @@ type Mutation { # Tag createTagItem(input: CreateTagItemInput!): CreateTagItemPayload createTagGroup(input: CreateTagGroupInput!): CreateTagGroupPayload - attachTagItemToGroup(input: AttachTagItemToGroupInput!): AttachTagItemToGroupPayload - detachTagItemFromGroup(input: DetachTagItemFromGroupInput!): DetachTagItemFromGroupPayload + attachTagItemToGroup( + input: AttachTagItemToGroupInput! + ): AttachTagItemToGroupPayload + detachTagItemFromGroup( + input: DetachTagItemFromGroupInput! + ): DetachTagItemFromGroupPayload updateTag(input: UpdateTagInput!): UpdateTagPayload removeTag(input: RemoveTagInput!): RemoveTagPayload } From 1b99c6dc76c9a57c50699ae9293f72e6b22264e0 Mon Sep 17 00:00:00 2001 From: Basel Issmail <78056580+issmail-basel@users.noreply.github.com> Date: Fri, 17 Dec 2021 07:45:44 +0200 Subject: [PATCH 121/253] fix: return property with clusters data (#89) Co-authored-by: maherhamoui6 Co-authored-by: yk Co-authored-by: basel.issmail Co-authored-by: rot1024 --- go.mod | 44 +++++----- internal/adapter/gql/generated.go | 87 ++++++++++++++----- .../adapter/gql/gqlmodel/convert_scene.go | 6 +- internal/adapter/gql/gqlmodel/models_gen.go | 12 +-- .../adapter/gql/resolver_mutation_scene.go | 2 +- internal/adapter/gql/resolver_scene.go | 13 +++ internal/usecase/interactor/scene.go | 15 +++- internal/usecase/interfaces/scene.go | 2 +- pkg/builtin/manifest.yml | 56 ++++-------- pkg/builtin/manifest_ja.yml | 37 +++----- pkg/plugin/manifest/parser_translation.go | 21 +++++ pkg/scene/builder/encoder.go | 14 +++ pkg/scene/builder/scene.go | 23 +++-- pkg/scene/cluster_list.go | 11 +++ pkg/scene/scene.go | 1 + schema.graphql | 4 +- 16 files changed, 221 insertions(+), 127 deletions(-) diff --git a/go.mod b/go.mod index f514f5620..11be3950c 100644 --- a/go.mod +++ b/go.mod @@ -5,18 +5,10 @@ require ( cloud.google.com/go/storage v1.18.2 github.com/99designs/gqlgen v0.14.0 github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.0.0 - github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect - github.com/agnivade/levenshtein v1.1.1 // indirect - github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a // indirect github.com/auth0/go-jwt-middleware v1.0.1 github.com/blang/semver v3.5.1+incompatible - github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect - github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b // indirect - github.com/fatih/color v1.12.0 // indirect github.com/form3tech-oss/jwt-go v3.2.2+incompatible - github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 // indirect github.com/goccy/go-yaml v1.9.4 - github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/iancoleman/strcase v0.2.0 github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d github.com/jarcoal/httpmock v1.0.8 @@ -24,26 +16,19 @@ require ( github.com/jonas-p/go-shp v0.1.1 github.com/kelseyhightower/envconfig v1.4.0 github.com/kennygrant/sanitize v1.2.4 - github.com/klauspost/compress v1.13.6 // indirect github.com/labstack/echo/v4 v4.6.1 github.com/labstack/gommon v0.3.1 - github.com/mattn/go-isatty v0.0.14 // indirect github.com/mitchellh/mapstructure v1.4.2 github.com/oklog/ulid v1.3.1 - github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/paulmach/go.geojson v1.4.0 github.com/pkg/errors v0.9.1 github.com/ravilushqa/otelgqlgen v0.2.0 github.com/sirupsen/logrus v1.8.1 - github.com/smartystreets/assertions v1.1.1 // indirect github.com/spf13/afero v1.6.0 - github.com/stretchr/objx v0.2.0 // indirect github.com/stretchr/testify v1.7.0 - github.com/tidwall/pretty v1.0.1 // indirect github.com/twpayne/go-kml v1.5.2 github.com/uber/jaeger-client-go v2.29.1+incompatible github.com/uber/jaeger-lib v2.4.1+incompatible - github.com/urfave/cli/v2 v2.3.0 // indirect github.com/vektah/dataloaden v0.3.0 github.com/vektah/gqlparser/v2 v2.2.0 go.mongodb.org/mongo-driver v1.7.4 @@ -51,31 +36,31 @@ require ( go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.27.0 go.opentelemetry.io/otel v1.2.0 go.opentelemetry.io/otel/sdk v1.2.0 - go.uber.org/atomic v1.7.0 // indirect - golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 // indirect - golang.org/x/mod v0.5.1 // indirect - golang.org/x/sys v0.0.0-20211103235746-7861aae1554b // indirect golang.org/x/text v0.3.7 - golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/tools v0.1.7 google.golang.org/api v0.60.0 gopkg.in/go-playground/colors.v1 v1.2.0 gopkg.in/h2non/gock.v1 v1.1.2 - gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) require ( cloud.google.com/go v0.97.0 // indirect cloud.google.com/go/trace v1.0.0 // indirect + github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect + github.com/agnivade/levenshtein v1.1.1 // indirect github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect + github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a // indirect github.com/census-instrumentation/opencensus-proto v0.3.0 // indirect github.com/cespare/xxhash/v2 v2.1.2 // indirect github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4 // indirect github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b // indirect github.com/envoyproxy/go-control-plane v0.10.0 // indirect github.com/envoyproxy/protoc-gen-validate v0.6.2 // indirect + github.com/fatih/color v1.12.0 // indirect + github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 // indirect github.com/go-stack/stack v1.8.0 // indirect github.com/golang-jwt/jwt v3.2.2+incompatible // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect @@ -86,10 +71,18 @@ require ( github.com/googleapis/gax-go/v2 v2.1.1 // indirect github.com/gorilla/websocket v1.4.2 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect + github.com/hashicorp/golang-lru v0.5.4 // indirect + github.com/klauspost/compress v1.13.6 // indirect github.com/mattn/go-colorable v0.1.11 // indirect + github.com/mattn/go-isatty v0.0.14 // indirect github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect + github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/smartystreets/assertions v1.1.1 // indirect + github.com/stretchr/objx v0.2.0 // indirect + github.com/tidwall/pretty v1.0.1 // indirect + github.com/urfave/cli/v2 v2.3.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.1 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect @@ -99,9 +92,14 @@ require ( go.opencensus.io v0.23.0 // indirect go.opentelemetry.io/contrib v0.23.0 // indirect go.opentelemetry.io/otel/trace v1.2.0 // indirect + go.uber.org/atomic v1.7.0 // indirect + golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 // indirect + golang.org/x/mod v0.5.1 // indirect golang.org/x/net v0.0.0-20211101193420-4a448f8816b3 // indirect golang.org/x/oauth2 v0.0.0-20211028175245-ba495a64dcb5 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect + golang.org/x/sys v0.0.0-20211103235746-7861aae1554b // indirect + golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20211101144312-62acf1d99145 // indirect @@ -109,6 +107,8 @@ require ( google.golang.org/protobuf v1.27.1 // indirect gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) go 1.17 diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index 1972bd813..f7e13ce8f 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -42,6 +42,7 @@ type Config struct { type ResolverRoot interface { Asset() AssetResolver + Cluster() ClusterResolver Dataset() DatasetResolver DatasetField() DatasetFieldResolver DatasetSchema() DatasetSchemaResolver @@ -172,9 +173,10 @@ type ComplexityRoot struct { } Cluster struct { - ID func(childComplexity int) int - Name func(childComplexity int) int - Property func(childComplexity int) int + ID func(childComplexity int) int + Name func(childComplexity int) int + Property func(childComplexity int) int + PropertyID func(childComplexity int) int } CreateAssetPayload struct { @@ -1068,6 +1070,9 @@ type ComplexityRoot struct { type AssetResolver interface { Team(ctx context.Context, obj *gqlmodel.Asset) (*gqlmodel.Team, error) } +type ClusterResolver interface { + Property(ctx context.Context, obj *gqlmodel.Cluster) (*gqlmodel.Property, error) +} type DatasetResolver interface { Schema(ctx context.Context, obj *gqlmodel.Dataset) (*gqlmodel.DatasetSchema, error) Name(ctx context.Context, obj *gqlmodel.Dataset) (*string, error) @@ -1667,6 +1672,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Cluster.Property(childComplexity), true + case "Cluster.propertyId": + if e.complexity.Cluster.PropertyID == nil { + break + } + + return e.complexity.Cluster.PropertyID(childComplexity), true + case "CreateAssetPayload.asset": if e.complexity.CreateAssetPayload.Asset == nil { break @@ -7080,7 +7092,8 @@ type TagGroup implements Tag { type Cluster { id: ID! name: String! - property: ID! + propertyId: ID! + property: Property @goField(forceResolver: true) } # InputType @@ -7485,7 +7498,6 @@ input RemoveTagInput { input AddClusterInput { sceneId: ID! name: String! - propertyId: ID! } input UpdateClusterInput { @@ -11158,7 +11170,7 @@ func (ec *executionContext) _Cluster_name(ctx context.Context, field graphql.Col return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Cluster_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { +func (ec *executionContext) _Cluster_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -11176,7 +11188,7 @@ func (ec *executionContext) _Cluster_property(ctx context.Context, field graphql ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Property, nil + return obj.PropertyID, nil }) if err != nil { ec.Error(ctx, err) @@ -11193,6 +11205,38 @@ func (ec *executionContext) _Cluster_property(ctx context.Context, field graphql return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) } +func (ec *executionContext) _Cluster_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Cluster", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Cluster().Property(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Property) + fc.Result = res + return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) +} + func (ec *executionContext) _CreateAssetPayload_asset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateAssetPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -32477,14 +32521,6 @@ func (ec *executionContext) unmarshalInputAddClusterInput(ctx context.Context, o if err != nil { return it, err } - case "propertyId": - var err error - - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) - if err != nil { - return it, err - } } } @@ -35879,18 +35915,29 @@ func (ec *executionContext) _Cluster(ctx context.Context, sel ast.SelectionSet, case "id": out.Values[i] = ec._Cluster_id(ctx, field, obj) if out.Values[i] == graphql.Null { - invalids++ + atomic.AddUint32(&invalids, 1) } case "name": out.Values[i] = ec._Cluster_name(ctx, field, obj) if out.Values[i] == graphql.Null { - invalids++ + atomic.AddUint32(&invalids, 1) } - case "property": - out.Values[i] = ec._Cluster_property(ctx, field, obj) + case "propertyId": + out.Values[i] = ec._Cluster_propertyId(ctx, field, obj) if out.Values[i] == graphql.Null { - invalids++ + atomic.AddUint32(&invalids, 1) } + case "property": + field := field + out.Concurrently(i, func() (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Cluster_property(ctx, field, obj) + return res + }) default: panic("unknown field " + strconv.Quote(field.Name)) } diff --git a/internal/adapter/gql/gqlmodel/convert_scene.go b/internal/adapter/gql/gqlmodel/convert_scene.go index 5fae166ad..57855c0f0 100644 --- a/internal/adapter/gql/gqlmodel/convert_scene.go +++ b/internal/adapter/gql/gqlmodel/convert_scene.go @@ -32,9 +32,9 @@ func ToScenePlugin(sp *scene.Plugin) *ScenePlugin { func ToCluster(c *scene.Cluster) *Cluster { return &Cluster{ - ID: c.ID().ID(), - Name: c.Name(), - Property: c.Property().ID(), + ID: c.ID().ID(), + Name: c.Name(), + PropertyID: c.Property().ID(), } } diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index 47e646975..34314fdd0 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -36,9 +36,8 @@ type Tag interface { } type AddClusterInput struct { - SceneID id.ID `json:"sceneId"` - Name string `json:"name"` - PropertyID id.ID `json:"propertyId"` + SceneID id.ID `json:"sceneId"` + Name string `json:"name"` } type AddClusterPayload struct { @@ -205,9 +204,10 @@ type Camera struct { } type Cluster struct { - ID id.ID `json:"id"` - Name string `json:"name"` - Property id.ID `json:"property"` + ID id.ID `json:"id"` + Name string `json:"name"` + PropertyID id.ID `json:"propertyId"` + Property *Property `json:"property"` } type CreateAssetInput struct { diff --git a/internal/adapter/gql/resolver_mutation_scene.go b/internal/adapter/gql/resolver_mutation_scene.go index 306fcb2bb..df92cefba 100644 --- a/internal/adapter/gql/resolver_mutation_scene.go +++ b/internal/adapter/gql/resolver_mutation_scene.go @@ -199,7 +199,7 @@ func (r *mutationResolver) UpgradePlugin(ctx context.Context, input gqlmodel.Upg func (r *mutationResolver) AddCluster(ctx context.Context, input gqlmodel.AddClusterInput) (*gqlmodel.AddClusterPayload, error) { exit := trace(ctx) defer exit() - s, c, err := r.usecases.Scene.AddCluster(ctx, id.SceneID(input.SceneID), input.Name, id.PropertyID(input.PropertyID), getOperator(ctx)) + s, c, err := r.usecases.Scene.AddCluster(ctx, id.SceneID(input.SceneID), input.Name, getOperator(ctx)) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_scene.go b/internal/adapter/gql/resolver_scene.go index 01b53e2e4..fa9dd710b 100644 --- a/internal/adapter/gql/resolver_scene.go +++ b/internal/adapter/gql/resolver_scene.go @@ -20,6 +20,10 @@ func (r *Resolver) SceneWidget() SceneWidgetResolver { return &sceneWidgetResolver{r} } +func (r *Resolver) Cluster() ClusterResolver { + return &clusterResolver{r} +} + type sceneResolver struct{ *Resolver } func (r *sceneResolver) Project(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Project, error) { @@ -144,3 +148,12 @@ func (r *sceneWidgetResolver) Property(ctx context.Context, obj *gqlmodel.SceneW return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) } + +type clusterResolver struct{ *Resolver } + +func (r *clusterResolver) Property(ctx context.Context, obj *gqlmodel.Cluster) (*gqlmodel.Property, error) { + exit := trace(ctx) + defer exit() + + return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) +} diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index aefeff75b..56b6e7db0 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -685,7 +685,7 @@ func (i *Scene) getPlugin(ctx context.Context, sid id.SceneID, p id.PluginID, e return plugin, extension, nil } -func (i *Scene) AddCluster(ctx context.Context, sceneID id.SceneID, name string, propertyID id.PropertyID, operator *usecase.Operator) (*scene.Scene, *scene.Cluster, error) { +func (i *Scene) AddCluster(ctx context.Context, sceneID id.SceneID, name string, operator *usecase.Operator) (*scene.Scene, *scene.Cluster, error) { tx, err := i.transaction.Begin() if err != nil { return nil, nil, err @@ -709,12 +709,23 @@ func (i *Scene) AddCluster(ctx context.Context, sceneID id.SceneID, name string, return nil, nil, err } + prop, err := property.New().NewID().Schema(id.MustPropertySchemaID("reearth/cluster")).Scene(sceneID).Build() + if err != nil { + return nil, nil, err + } + cid := id.NewClusterID() - cluster, err := scene.NewCluster(cid, name, propertyID) + cluster, err := scene.NewCluster(cid, name, prop.ID()) if err != nil { return nil, nil, err } s.Clusters().Add(cluster) + + err = i.propertyRepo.Save(ctx, prop) + if err != nil { + return nil, nil, err + } + err = i.sceneRepo.Save(ctx, s) if err != nil { return nil, nil, err diff --git a/internal/usecase/interfaces/scene.go b/internal/usecase/interfaces/scene.go index dae978178..9f9b2e310 100644 --- a/internal/usecase/interfaces/scene.go +++ b/internal/usecase/interfaces/scene.go @@ -27,7 +27,7 @@ type Scene interface { InstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, id.PluginID, *id.PropertyID, error) UninstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, error) UpgradePlugin(context.Context, id.SceneID, id.PluginID, id.PluginID, *usecase.Operator) (*scene.Scene, error) - AddCluster(context.Context, id.SceneID, string, id.PropertyID, *usecase.Operator) (*scene.Scene, *scene.Cluster, error) + AddCluster(context.Context, id.SceneID, string, *usecase.Operator) (*scene.Scene, *scene.Cluster, error) UpdateCluster(context.Context, UpdateClusterParam, *usecase.Operator) (*scene.Scene, *scene.Cluster, error) RemoveCluster(context.Context, id.SceneID, id.ClusterID, *usecase.Operator) (*scene.Scene, error) } diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 2a0e238a2..6a7c768dc 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -94,7 +94,7 @@ extensions: description: Display the limiter boundaries. - id: cameraLimitterTargetArea type: camera - title: Target max height + title: Target max height description: The base position of the camera movement range. This position is the center point of the limit box in the horizontal and depth directions, and is the maximum height of the movable range. The camera will not be able to zoom out beyond the height specified here. - id: cameraLimitterTargetWidth type: number @@ -1651,11 +1651,7 @@ extensions: - id: default title: Cluster fields: - - id: cluster_name - type: string - title: Name - description: Sets the name of the cluster. - - id: cluster_pixelRange + - id: clusterPixelRange type: number description: Sets the minimum range between layers to get clustered together. title: Pixel range @@ -1663,49 +1659,31 @@ extensions: min: 1 max: 200 suffix: px - - id: cluster_minSize + - id: clusterMinSize type: number title: Minimum cluster size description: The minimum number of layers that can be clustered. defaultValue: 3 min: 2 max: 20 - - id: cluster_maxSize - title: Max cluster size - description: Sets the size of cluster entity. - defaultValue: 48 - type: number - min: 1 - max: 200 - - id: cluster_shapeType - type: string - title: Shape type - defaultValue: pin - description: Sets the shape of cluster entity. - choices: - - key: pin - label: Pin - - key: label - label: Label - - id: cluster_textColor - type: string - title: Text color - description: Sets the text color of cluster entity. - ui: color - availableIf: - field: cluster_shapeType - type: string - value: label - - id: cluster_backgroundColor - type: string - ui: color - title: Background color - description: Sets the background color of cluster entity. - - id: cluster_image + - id: clusterImage type: url title: Image description: Sets the image of cluster entity. ui: image + - id: clusterImageWidth + type: number + title: Width + suffix: px + description: Sets the image width. + - id: clusterImageHeight + type: number + title: Height + suffix: px + description: Sets the image height. + - id: clusterLabelTypography + type: typography + title: Font - id: layers title: Layers representativeField: layer diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index b2a606a3e..a6baa0e18 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -782,32 +782,23 @@ extensions: description: ใƒฌใ‚คใƒคใƒผใ‚’่‡ชๅ‹•็š„ใซใพใจใ‚ใฆ่กจ็คบใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใชใ‚ฏใƒฉใ‚นใ‚ฟใ‚’่จญๅฎšใ—ใพใ™ใ€‚ propertySchema: default: - title: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + title: ใ‚ฏใƒฉใ‚นใ‚ฟ fields: - cluster_name: - title: ใ‚ฏใƒฉใ‚นใ‚ฟๅ - description: ใ‚ฏใƒฉใ‚นใ‚ฟใฎๅๅ‰ใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ - cluster_pixelRange: + clusterPixelRange: title: ๆœ€ๅฐ็ฏ„ๅ›ฒ description: ็”ป้ขไธŠใฎไฝ•ใƒ”ใ‚ฏใ‚ปใƒซๅˆ†ใฎ็ฏ„ๅ›ฒใซใ‚ใ‚‹ใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒฉใ‚นใ‚ฟใซใพใจใ‚ใ‚‹ใ‹ใ‚’ๆœ€ๅฐๅ€คใงๆŒ‡ๅฎšใ—ใพใ™ใ€‚ - cluster_minSize: + clusterMinSize: title: ๆœ€ๅฐใ‚ตใ‚คใ‚บ - description: ใ‚ฏใƒฉใ‚นใ‚ฟใŒ่กจ็คบใ•ใ‚Œใ‚‹ๆœ€ๅฐใฎใƒฌใ‚คใƒคใƒผๆ•ฐ - cluster_maxSize: - title: ๆœ€ๅคงใ‚ตใ‚คใ‚บ - description: ใ‚ฏใƒฉใ‚นใ‚ฟใซๅฑžใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใชๆœ€ๅคงใƒฌใ‚คใƒคใƒผๆ•ฐ - cluster_shapeType: - title: ่กจ็คบๆ–นๆณ• - description: ็”ป้ขไธŠใง่กจ็คบใ•ใ‚Œใ‚‹ใ‚ฏใƒฉใ‚นใ‚ฟใฎ่กจ็คบๆ–นๆณ•ใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ - choices: - pin: ใƒ”ใƒณ - label: ใƒฉใƒ™ใƒซ - cluster_textColor: - title: ๆ–‡ๅญ—่‰ฒ - description: ็”ป้ขไธŠใง่กจ็คบใ•ใ‚Œใ‚‹ใ‚ฏใƒฉใ‚นใ‚ฟใฎๆ–‡ๅญ—่‰ฒใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ - cluster_backgroundColor: - title: ่ƒŒๆ™ฏ่‰ฒ - description: ็”ป้ขไธŠใง่กจ็คบใ•ใ‚Œใ‚‹ใ‚ฏใƒฉใ‚นใ‚ฟใฎ่ƒŒๆ™ฏ่‰ฒใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ - cluster_image: + description: ใ‚ฏใƒฉใ‚นใ‚ฟใŒ่กจ็คบใ•ใ‚Œใ‚‹ใฎใซๅฟ…่ฆใชๆœ€ๅฐใฎใƒฌใ‚คใƒคใƒผๆ•ฐ + clusterImage: title: ็”ปๅƒ description: ็”ป้ขไธŠใง่กจ็คบใ•ใ‚Œใ‚‹ใ‚ฏใƒฉใ‚นใ‚ฟใฎ็”ปๅƒใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + clusterImageWidth: + title: ็”ปๅƒๅน… + description: ็”ปๅƒใฎๅน…ใ‚’ใƒ”ใ‚ฏใ‚ปใƒซใงๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + clusterImageHeight: + title: ็”ปๅƒ้ซ˜ใ• + description: ็”ปๅƒใฎ้ซ˜ใ•ใ‚’ใƒ”ใ‚ฏใ‚ปใƒซใงๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + clusterLabelTypography: + title: ใƒฉใƒ™ใƒซ + description: ใƒฉใƒ™ใƒซใฎใ‚นใ‚ฟใ‚คใƒซใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ diff --git a/pkg/plugin/manifest/parser_translation.go b/pkg/plugin/manifest/parser_translation.go index 54dd00fad..1d35185bd 100644 --- a/pkg/plugin/manifest/parser_translation.go +++ b/pkg/plugin/manifest/parser_translation.go @@ -52,12 +52,18 @@ func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Mani if t.Name != nil { name := m.Plugin.Name() + if name == nil { + name = map[string]string{} + } name[lang] = *t.Name m.Plugin.Rename(name) } if t.Description != nil { des := m.Plugin.Description() + if des == nil { + des = map[string]string{} + } des[lang] = *t.Description m.Plugin.SetDescription(des) } @@ -70,12 +76,18 @@ func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Mani if te.Name != nil { name := ext.Name() + if name == nil { + name = map[string]string{} + } name[lang] = *te.Name ext.Rename(name) } if te.Description != nil { des := ext.Description() + if des == nil { + des = map[string]string{} + } des[lang] = *te.Description ext.SetDescription(des) } @@ -99,6 +111,9 @@ func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Mani if tsg.Title != nil { t := psg.Title() + if t == nil { + t = map[string]string{} + } t[lang] = *tsg.Title psg.SetTitle(t) } @@ -118,12 +133,18 @@ func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Mani if tsf.Title != nil { t := psf.Title() + if t == nil { + t = map[string]string{} + } t[lang] = *tsf.Title psf.SetTitle(t) } if tsf.Description != nil { t := psf.Description() + if t == nil { + t = map[string]string{} + } t[lang] = *tsf.Description psf.SetDescription(t) } diff --git a/pkg/scene/builder/encoder.go b/pkg/scene/builder/encoder.go index bf3134374..eb5f2cac1 100644 --- a/pkg/scene/builder/encoder.go +++ b/pkg/scene/builder/encoder.go @@ -132,3 +132,17 @@ type widgetAreaJSON struct { WidgetIDs []string `json:"widgetIds"` Align string `json:"align"` } + +type widgetJSON struct { + ID string `json:"id"` + PluginID string `json:"pluginId"` + ExtensionID string `json:"extensionId"` + Property propertyJSON `json:"property"` + Extended bool `json:"extended"` +} + +type clusterJSON struct { + ID string `json:"id"` + Name string `json:"name"` + Property propertyJSON `json:"property"` +} diff --git a/pkg/scene/builder/scene.go b/pkg/scene/builder/scene.go index 5060f5d97..973598bb3 100644 --- a/pkg/scene/builder/scene.go +++ b/pkg/scene/builder/scene.go @@ -18,14 +18,7 @@ type sceneJSON struct { Layers []*layerJSON `json:"layers"` Widgets []*widgetJSON `json:"widgets"` WidgetAlignSystem *widgetAlignSystemJSON `json:"widgetAlignSystem"` -} - -type widgetJSON struct { - ID string `json:"id"` - PluginID string `json:"pluginId"` - ExtensionID string `json:"extensionId"` - Property propertyJSON `json:"property"` - Extended bool `json:"extended"` + Clusters []*clusterJSON `json:"clusters"` } func (b *Builder) scene(ctx context.Context, s *scene.Scene, publishedAt time.Time, l []*layerJSON, p []*property.Property) *sceneJSON { @@ -36,6 +29,7 @@ func (b *Builder) scene(ctx context.Context, s *scene.Scene, publishedAt time.Ti Property: b.property(ctx, findProperty(p, s.Property())), Plugins: b.plugins(ctx, s, p), Widgets: b.widgets(ctx, s, p), + Clusters: b.clusters(ctx, s, p), Layers: l, WidgetAlignSystem: buildWidgetAlignSystem(s.WidgetAlignSystem()), } @@ -74,6 +68,19 @@ func (b *Builder) widgets(ctx context.Context, s *scene.Scene, p []*property.Pro return res } +func (b *Builder) clusters(ctx context.Context, s *scene.Scene, p []*property.Property) []*clusterJSON { + sceneClusters := s.Clusters().Clusters() + res := make([]*clusterJSON, 0, len(sceneClusters)) + for _, c := range sceneClusters { + res = append(res, &clusterJSON{ + ID: c.ID().String(), + Name: c.Name(), + Property: b.property(ctx, findProperty(p, c.Property())), + }) + } + return res +} + func (b *Builder) property(ctx context.Context, p *property.Property) propertyJSON { return property.SealProperty(ctx, p).Interface() } diff --git a/pkg/scene/cluster_list.go b/pkg/scene/cluster_list.go index 12d88e171..36a86f010 100644 --- a/pkg/scene/cluster_list.go +++ b/pkg/scene/cluster_list.go @@ -65,3 +65,14 @@ func (tl *ClusterList) Remove(clusters ...id.ClusterID) { } } } + +func (tl *ClusterList) Properties() []id.PropertyID { + if tl == nil { + return nil + } + res := make([]id.PropertyID, 0, len(tl.clusters)) + for _, c := range tl.clusters { + res = append(res, c.property) + } + return res +} diff --git a/pkg/scene/scene.go b/pkg/scene/scene.go index 182790d94..708980689 100644 --- a/pkg/scene/scene.go +++ b/pkg/scene/scene.go @@ -118,6 +118,7 @@ func (s *Scene) Properties() []id.PropertyID { ids := []id.PropertyID{s.property} ids = append(ids, s.pluginSystem.Properties()...) ids = append(ids, s.widgetSystem.Properties()...) + ids = append(ids, s.clusters.Properties()...) return ids } diff --git a/schema.graphql b/schema.graphql index bd8c88050..d45c51cb1 100644 --- a/schema.graphql +++ b/schema.graphql @@ -831,7 +831,8 @@ type TagGroup implements Tag { type Cluster { id: ID! name: String! - property: ID! + propertyId: ID! + property: Property @goField(forceResolver: true) } # InputType @@ -1236,7 +1237,6 @@ input RemoveTagInput { input AddClusterInput { sceneId: ID! name: String! - propertyId: ID! } input UpdateClusterInput { From 755ef78aea8233c77a1625cb5d783a2c7aa8dc08 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 17 Dec 2021 16:21:55 +0900 Subject: [PATCH 122/253] ci: delete godoc job [ci skip] --- .github/workflows/main.yml | 49 -------------------------------------- 1 file changed, 49 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 595229234..49707dacb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -52,55 +52,6 @@ jobs: echo "::set-output name=new_tag::$TAG" echo "::set-output name=new_tag_short::${TAG#v}" fi - godoc: - name: Build godoc - runs-on: ubuntu-latest - needs: - - main - if: github.event.repository.full_name == 'reearth/reearth-backend' - env: - REPO: github.com/reearth/reearth-backend - REPO_NAME: reearth/reearth-backend - ADDR: "localhost:8080" - DIR: ${{ needs.main.outputs.branch }} - steps: - - name: set up - uses: actions/setup-go@v2 - with: - go-version: 1.17 - id: go - - name: checkout - uses: actions/checkout@v2 - - name: cache - uses: actions/cache@v2 - with: - path: ~/go/pkg/mod - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - restore-keys: | - ${{ runner.os }}-go- - - run: go mod download - - name: install godoc - run: go install golang.org/x/tools/cmd/godoc@latest - - name: generate docs - continue-on-error: true - run: | - godoc -http="$ADDR" & - sleep 10 - wget -r -np -N -E -p -k "http://${ADDR}/pkg/${REPO}/" - - name: replace urls - run: | - [ `find . -name "*.html" -type f | wc -l` -eq 0 ] && exit 1 - find ./${ADDR}/ -name "*.html" -print0 | xargs -0 sed -i \ - -e "s@http://${ADDR}/src/${REPO}@https://${REPO}/blob/main@" \ - -e "s@\"http://${ADDR}/pkg/\"@\"/${REPO_NAME}/${DIR}/pkg/${REPO}/\"@" \ - -e 's@ Date: Fri, 17 Dec 2021 17:09:32 +0900 Subject: [PATCH 123/253] ci: add renovate.json [ci skip] --- .github/renovate.json | 24 ++++++++++++++++++++++++ .github/workflows/renovate.yml | 21 +++++++++++++++++++++ 2 files changed, 45 insertions(+) create mode 100644 .github/renovate.json create mode 100644 .github/workflows/renovate.yml diff --git a/.github/renovate.json b/.github/renovate.json new file mode 100644 index 000000000..70a59211d --- /dev/null +++ b/.github/renovate.json @@ -0,0 +1,24 @@ +{ + "extends": [ + "config:base", + ":semanticCommits", + ":semanticCommitScopeDisabled", + ":maintainLockFilesWeekly", + ":enableVulnerabilityAlertsWithLabel(security)", + ":semanticCommitTypeAll(chore)", + "schedule:earlyMondays" + ], + "postUpdateOptions": [ + "gomodTidy", + "gomodUpdateImportPaths" + ], + "packageRules": [ + { + "packagePatterns": [ + "*" + ], + "groupName": "all dependencies", + "groupSlug": "all" + } + ] +} diff --git a/.github/workflows/renovate.yml b/.github/workflows/renovate.yml new file mode 100644 index 000000000..37cf8d272 --- /dev/null +++ b/.github/workflows/renovate.yml @@ -0,0 +1,21 @@ +name: renovate +on: + push: + branches: + - renovate/* +jobs: + renovate-go-sum-fix: + runs-on: ubuntu-latest + steps: + - name: checkout + uses: actions/checkout@v2 + with: + fetch-depth: 2 + - name: fix + uses: at-wat/go-sum-fix-action@v0 + with: + git_user: ${{ github.actor }} + git_email: ${{ github.actor }}@users.noreply.github.com + github_token: ${{ secrets.GITHUB_TOKEN }} + commit_style: squash + push: force From 16f20d3cbf4917181b7c79f178443b161ade032f Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 17 Dec 2021 17:14:49 +0900 Subject: [PATCH 124/253] ci: fix renovate.json [ci skip] --- .github/renovate.json | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/renovate.json b/.github/renovate.json index 70a59211d..b27f006a3 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -5,8 +5,7 @@ ":semanticCommitScopeDisabled", ":maintainLockFilesWeekly", ":enableVulnerabilityAlertsWithLabel(security)", - ":semanticCommitTypeAll(chore)", - "schedule:earlyMondays" + ":semanticCommitTypeAll(chore)" ], "postUpdateOptions": [ "gomodTidy", @@ -18,7 +17,10 @@ "*" ], "groupName": "all dependencies", - "groupSlug": "all" + "groupSlug": "all", + "extends": [ + "schedule:earlyMondays" + ] } ] } From ba4b18fb85f84d63dee4f69dbd800f6d1b063c26 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 21 Dec 2021 16:27:35 +0900 Subject: [PATCH 125/253] fix: cast values, rename value.OptionalValue (#93) --- pkg/dataset/value.go | 18 ++ pkg/dataset/value_optional.go | 17 +- pkg/dataset/value_optional_test.go | 69 ++++++-- pkg/property/merged.go | 11 +- pkg/property/value.go | 31 +++- pkg/property/value_dataset.go | 4 +- pkg/property/value_dataset_test.go | 69 +++++--- pkg/property/value_optional.go | 17 +- pkg/property/value_optional_test.go | 69 ++++++-- pkg/property/value_test.go | 262 ++++++++++++++++++++++++++++ pkg/value/bool.go | 22 ++- pkg/value/bool_test.go | 61 +++++++ pkg/value/latlng.go | 13 +- pkg/value/latlngheight.go | 16 +- pkg/value/number.go | 15 +- pkg/value/number_test.go | 99 +++++++++++ pkg/value/optional.go | 35 ++-- pkg/value/optional_test.go | 124 +++++++++---- pkg/value/string.go | 14 +- pkg/value/string_test.go | 57 ++++++ pkg/value/url.go | 6 + pkg/value/value.go | 10 ++ pkg/value/value_test.go | 56 ++++++ 23 files changed, 960 insertions(+), 135 deletions(-) create mode 100644 pkg/value/bool_test.go create mode 100644 pkg/value/number_test.go create mode 100644 pkg/value/string_test.go diff --git a/pkg/dataset/value.go b/pkg/dataset/value.go index 4fb4aea06..eaee64ed8 100644 --- a/pkg/dataset/value.go +++ b/pkg/dataset/value.go @@ -45,6 +45,13 @@ func (t ValueType) ValueFrom(i interface{}) *Value { return &Value{v: *vv} } +func (vt ValueType) MustBeValue(i interface{}) *Value { + if v := vt.ValueFrom(i); v != nil { + return v + } + panic("invalid value") +} + type Value struct { v value.Value } @@ -81,6 +88,17 @@ func (v *Value) Interface() interface{} { return v.v.Interface() } +func (v *Value) Cast(vt ValueType) *Value { + if v == nil { + return nil + } + nv := v.v.Cast(value.Type(vt), nil) + if nv == nil { + return nil + } + return &Value{v: *nv} +} + func (v *Value) ValueBool() *bool { if v == nil { return nil diff --git a/pkg/dataset/value_optional.go b/pkg/dataset/value_optional.go index 61affc8cd..ac4225066 100644 --- a/pkg/dataset/value_optional.go +++ b/pkg/dataset/value_optional.go @@ -3,7 +3,7 @@ package dataset import "github.com/reearth/reearth-backend/pkg/value" type OptionalValue struct { - ov value.OptionalValue + ov value.Optional } func NewOptionalValue(t ValueType, v *Value) *OptionalValue { @@ -11,7 +11,7 @@ func NewOptionalValue(t ValueType, v *Value) *OptionalValue { if v != nil { vv = &v.v } - ov := value.NewOptionalValue(value.Type(t), vv) + ov := value.NewOptional(value.Type(t), vv) if ov == nil { return nil } @@ -22,7 +22,7 @@ func OptionalValueFrom(v *Value) *OptionalValue { if v == nil { return nil } - ov := value.OptionalValueFrom(&v.v) + ov := value.OptionalFrom(&v.v) if ov == nil { return nil } @@ -76,3 +76,14 @@ func (ov *OptionalValue) Clone() *OptionalValue { ov: *nov, } } + +func (ov *OptionalValue) Cast(t ValueType) *OptionalValue { + if ov == nil { + return nil + } + vv := ov.ov.Cast(value.Type(t), nil) + if vv == nil { + return nil + } + return &OptionalValue{ov: *vv} +} diff --git a/pkg/dataset/value_optional_test.go b/pkg/dataset/value_optional_test.go index 8767264a7..b96decc0e 100644 --- a/pkg/dataset/value_optional_test.go +++ b/pkg/dataset/value_optional_test.go @@ -23,14 +23,14 @@ func TestNewNilableValue(t *testing.T) { t: ValueTypeString, v: ValueTypeString.ValueFrom("foo"), }, - want: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foo", nil))}, + want: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foo", nil))}, }, { name: "nil value", args: args{ t: ValueTypeString, }, - want: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, nil)}, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, }, { name: "invalid value", @@ -73,7 +73,7 @@ func TestOptionalValueFrom(t *testing.T) { args: args{ v: ValueTypeString.ValueFrom("foo"), }, - want: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, value.TypeString.ValueFrom("foo", nil))}, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, value.TypeString.ValueFrom("foo", nil))}, }, { name: "empty value", @@ -106,7 +106,7 @@ func TestOptionalValue_Type(t *testing.T) { }{ { name: "ok", - value: &OptionalValue{ov: *value.NewOptionalValue(value.TypeBool, nil)}, + value: &OptionalValue{ov: *value.NewOptional(value.TypeBool, nil)}, want: ValueTypeBool, }, { @@ -138,7 +138,7 @@ func TestOptionalValue_Value(t *testing.T) { }{ { name: "ok", - value: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foobar", nil))}, + value: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foobar", nil))}, want: ValueTypeString.ValueFrom("foobar"), }, { @@ -175,7 +175,7 @@ func TestOptionalValue_TypeAndValue(t *testing.T) { }{ { name: "ok", - value: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foobar", nil))}, + value: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foobar", nil))}, wantt: ValueTypeString, wantv: ValueTypeString.ValueFrom("foobar"), }, @@ -219,17 +219,17 @@ func TestOptionalValue_SetValue(t *testing.T) { }{ { name: "set", - value: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foo", nil))}, + value: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foo", nil))}, args: args{v: ValueTypeString.ValueFrom("foobar")}, }, { name: "set to nil", - value: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, nil)}, + value: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, args: args{v: ValueTypeString.ValueFrom("foobar")}, }, { name: "invalid value", - value: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, nil)}, + value: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, args: args{v: ValueTypeNumber.ValueFrom(1)}, invalid: true, }, @@ -279,7 +279,7 @@ func TestOptionalValue_Clone(t *testing.T) { { name: "ok", target: &OptionalValue{ - ov: *value.NewOptionalValue(value.TypeString, value.TypeString.ValueFrom("foo", nil)), + ov: *value.NewOptional(value.TypeString, value.TypeString.ValueFrom("foo", nil)), }, }, { @@ -301,3 +301,52 @@ func TestOptionalValue_Clone(t *testing.T) { }) } } + +func TestOptionalValue_Cast(t *testing.T) { + type args struct { + t ValueType + } + tests := []struct { + name string + target *OptionalValue + args args + want *OptionalValue + }{ + { + name: "diff type", + target: &OptionalValue{ov: *value.OptionalFrom(value.TypeNumber.ValueFrom(1.1, nil))}, + args: args{t: ValueTypeString}, + want: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("1.1", nil))}, + }, + { + name: "same type", + target: &OptionalValue{ov: *value.OptionalFrom(value.TypeNumber.ValueFrom(1.1, nil))}, + args: args{t: ValueTypeNumber}, + want: &OptionalValue{ov: *value.OptionalFrom(value.TypeNumber.ValueFrom(1.1, nil))}, + }, + { + name: "failed to cast", + target: &OptionalValue{ov: *value.OptionalFrom(value.TypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}, nil))}, + args: args{t: ValueTypeString}, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + }, + { + name: "empty", + target: &OptionalValue{}, + args: args{t: ValueTypeString}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{t: ValueTypeString}, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Cast(tt.args.t)) + }) + } +} diff --git a/pkg/property/merged.go b/pkg/property/merged.go index 9b049c5b2..6110b4762 100644 --- a/pkg/property/merged.go +++ b/pkg/property/merged.go @@ -113,13 +113,6 @@ func Merge(o *Property, p *Property, linked *id.DatasetID) *Merged { return nil } - // copy id - var linked2 *id.DatasetID - if linked != nil { - linked3 := *linked - linked2 = &linked3 - } - var schema id.PropertySchemaID if p != nil { schema = p.Schema() @@ -131,8 +124,8 @@ func Merge(o *Property, p *Property, linked *id.DatasetID) *Merged { Original: o.IDRef(), Parent: p.IDRef(), Schema: schema, - Groups: mergeItems(o.Items(), p.Items(), linked2), - LinkedDataset: linked2, + Groups: mergeItems(o.Items(), p.Items(), linked.CopyRef()), + LinkedDataset: linked.CopyRef(), } } diff --git a/pkg/property/value.go b/pkg/property/value.go index 35404a98d..a6ea52ce7 100644 --- a/pkg/property/value.go +++ b/pkg/property/value.go @@ -2,7 +2,6 @@ package property import ( "net/url" - "strconv" "github.com/reearth/reearth-backend/pkg/value" ) @@ -49,6 +48,13 @@ func (vt ValueType) ValueFrom(i interface{}) *Value { return &Value{v: *v} } +func (vt ValueType) MustBeValue(i interface{}) *Value { + if v := vt.ValueFrom(i); v != nil { + return v + } + panic("invalid value") +} + type Value struct { v value.Value } @@ -89,6 +95,17 @@ func (v *Value) Interface() interface{} { return v.v.Interface() } +func (v *Value) Cast(vt ValueType) *Value { + if v == nil { + return nil + } + nv := v.v.Cast(value.Type(vt), types) + if nv == nil { + return nil + } + return &Value{v: *nv} +} + func (v *Value) ValueBool() *bool { if v == nil { return nil @@ -200,16 +217,12 @@ func (v *Value) ValuePolygon() *Polygon { } func ValueFromStringOrNumber(s string) *Value { - if vint, err := strconv.Atoi(s); err == nil { - return ValueTypeNumber.ValueFrom(vint) - } - - if vfloat64, err := strconv.ParseFloat(s, 64); err == nil { - return ValueTypeNumber.ValueFrom(vfloat64) + if s == "true" || s == "false" || s == "TRUE" || s == "FALSE" || s == "True" || s == "False" { + return ValueTypeBool.ValueFrom(s) } - if vbool, err := strconv.ParseBool(s); err == nil { - return ValueTypeBool.ValueFrom(vbool) + if v := ValueTypeNumber.ValueFrom(s); v != nil { + return v } return ValueTypeString.ValueFrom(s) diff --git a/pkg/property/value_dataset.go b/pkg/property/value_dataset.go index 3443ad0fb..261307257 100644 --- a/pkg/property/value_dataset.go +++ b/pkg/property/value_dataset.go @@ -17,11 +17,11 @@ func NewValueAndDatasetValue(ty ValueType, d *dataset.Value, p *Value) *ValueAnd } if d != nil && ValueType(d.Type()) != ty { - d = nil + d = d.Cast(dataset.ValueType(ty)) } if p != nil && p.Type() != ty { - p = nil + p = p.Cast(ty) } return &ValueAndDatasetValue{ diff --git a/pkg/property/value_dataset_test.go b/pkg/property/value_dataset_test.go index ecaefd482..c3562a08a 100644 --- a/pkg/property/value_dataset_test.go +++ b/pkg/property/value_dataset_test.go @@ -22,73 +22,86 @@ func TestNewValueAndDatasetValue(t *testing.T) { name: "ok", args: args{ ty: ValueTypeBool, - d: dataset.ValueTypeBool.ValueFrom(false), - p: ValueTypeBool.ValueFrom(true), + d: dataset.ValueTypeBool.MustBeValue(false), + p: ValueTypeBool.MustBeValue(true), }, want: &ValueAndDatasetValue{ t: ValueTypeBool, d: dataset.ValueTypeBool.ValueFrom(false), - p: ValueTypeBool.ValueFrom(true), + p: ValueTypeBool.MustBeValue(true), }, }, { - name: "invalid type", + name: "different types 1", args: args{ - ty: ValueType("foobar"), - d: dataset.ValueTypeBool.ValueFrom(false), - p: ValueTypeBool.ValueFrom(true), + ty: ValueTypeURL, + d: dataset.ValueTypeString.MustBeValue("https://reearth.io"), + p: nil, + }, + want: &ValueAndDatasetValue{ + t: ValueTypeURL, + d: dataset.ValueTypeURL.MustBeValue("https://reearth.io"), + p: nil, }, - want: nil, }, { - name: "invalid dataset value", + name: "different types 3", args: args{ ty: ValueTypeBool, - d: dataset.ValueTypeString.ValueFrom("false"), - p: ValueTypeBool.ValueFrom(true), + d: dataset.ValueTypeBool.MustBeValue(false), + p: ValueTypeString.MustBeValue("true"), }, want: &ValueAndDatasetValue{ t: ValueTypeBool, - d: nil, - p: ValueTypeBool.ValueFrom(true), + d: dataset.ValueTypeBool.ValueFrom(false), + p: ValueTypeBool.MustBeValue(true), }, }, { - name: "invalid property value", + name: "different types 2", args: args{ ty: ValueTypeBool, - d: dataset.ValueTypeBool.ValueFrom(false), - p: ValueTypeString.ValueFrom("true"), + d: dataset.ValueTypeString.ValueFrom("false"), + p: ValueTypeBool.MustBeValue(true), }, want: &ValueAndDatasetValue{ t: ValueTypeBool, d: dataset.ValueTypeBool.ValueFrom(false), - p: nil, + p: ValueTypeBool.MustBeValue(true), }, }, + { + name: "invalid type", + args: args{ + ty: ValueType("foobar"), + d: dataset.ValueTypeBool.ValueFrom(false), + p: ValueTypeBool.MustBeValue(true), + }, + want: nil, + }, { name: "nil dataset value", args: args{ ty: ValueTypeBool, d: nil, - p: ValueTypeBool.ValueFrom(false), + p: ValueTypeBool.MustBeValue(false), }, want: &ValueAndDatasetValue{ t: ValueTypeBool, d: nil, - p: ValueTypeBool.ValueFrom(false), + p: ValueTypeBool.MustBeValue(false), }, }, { name: "nil property value", args: args{ ty: ValueTypeBool, - d: dataset.ValueTypeBool.ValueFrom(false), + d: dataset.ValueTypeBool.MustBeValue(false), p: nil, }, want: &ValueAndDatasetValue{ t: ValueTypeBool, - d: dataset.ValueTypeBool.ValueFrom(false), + d: dataset.ValueTypeBool.MustBeValue(false), p: nil, }, }, @@ -254,26 +267,26 @@ func TestValueAndDatasetValue_Value(t *testing.T) { name: "dataset only", target: &ValueAndDatasetValue{ t: ValueTypeString, - d: dataset.ValueTypeString.ValueFrom("foo"), + d: dataset.ValueTypeString.MustBeValue("foo"), }, - want: ValueTypeString.ValueFrom("foo"), + want: ValueTypeString.MustBeValue("foo"), }, { name: "property only", target: &ValueAndDatasetValue{ t: ValueTypeString, - p: ValueTypeString.ValueFrom("bar"), + p: ValueTypeString.MustBeValue("bar"), }, - want: ValueTypeString.ValueFrom("bar"), + want: ValueTypeString.MustBeValue("bar"), }, { name: "dataset and property", target: &ValueAndDatasetValue{ t: ValueTypeString, - d: dataset.ValueTypeString.ValueFrom("foo"), - p: ValueTypeString.ValueFrom("bar"), + d: dataset.ValueTypeString.MustBeValue("foo"), + p: ValueTypeString.MustBeValue("bar"), }, - want: ValueTypeString.ValueFrom("foo"), + want: ValueTypeString.MustBeValue("foo"), }, { name: "empty", diff --git a/pkg/property/value_optional.go b/pkg/property/value_optional.go index 6e01c84b1..6b862ad64 100644 --- a/pkg/property/value_optional.go +++ b/pkg/property/value_optional.go @@ -3,7 +3,7 @@ package property import "github.com/reearth/reearth-backend/pkg/value" type OptionalValue struct { - ov value.OptionalValue + ov value.Optional } func NewOptionalValue(t ValueType, v *Value) *OptionalValue { @@ -11,7 +11,7 @@ func NewOptionalValue(t ValueType, v *Value) *OptionalValue { if v != nil { vv = &v.v } - ov := value.NewOptionalValue(value.Type(t), vv) + ov := value.NewOptional(value.Type(t), vv) if ov == nil { return nil } @@ -22,7 +22,7 @@ func OptionalValueFrom(v *Value) *OptionalValue { if v == nil { return nil } - ov := value.OptionalValueFrom(&v.v) + ov := value.OptionalFrom(&v.v) if ov == nil { return nil } @@ -76,3 +76,14 @@ func (ov *OptionalValue) SetValue(v *Value) { ov.ov.SetValue(&v.v) } } + +func (ov *OptionalValue) Cast(t ValueType) *OptionalValue { + if ov == nil { + return nil + } + vv := ov.ov.Cast(value.Type(t), types) + if vv == nil { + return nil + } + return &OptionalValue{ov: *vv} +} diff --git a/pkg/property/value_optional_test.go b/pkg/property/value_optional_test.go index 61e35b02e..f73f49709 100644 --- a/pkg/property/value_optional_test.go +++ b/pkg/property/value_optional_test.go @@ -23,14 +23,14 @@ func TestNewNilableValue(t *testing.T) { t: ValueTypeString, v: ValueTypeString.ValueFrom("foo"), }, - want: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foo", types))}, + want: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foo", types))}, }, { name: "nil value", args: args{ t: ValueTypeString, }, - want: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, nil)}, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, }, { name: "invalid value", @@ -73,7 +73,7 @@ func TestOptionalValueFrom(t *testing.T) { args: args{ v: ValueTypeString.ValueFrom("foo"), }, - want: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, value.TypeString.ValueFrom("foo", types))}, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, value.TypeString.ValueFrom("foo", types))}, }, { name: "empty value", @@ -106,7 +106,7 @@ func TestOptionalValue_Type(t *testing.T) { }{ { name: "ok", - value: &OptionalValue{ov: *value.NewOptionalValue(value.TypeBool, nil)}, + value: &OptionalValue{ov: *value.NewOptional(value.TypeBool, nil)}, want: ValueTypeBool, }, { @@ -138,7 +138,7 @@ func TestOptionalValue_Value(t *testing.T) { }{ { name: "ok", - value: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foobar", types))}, + value: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foobar", types))}, want: ValueTypeString.ValueFrom("foobar"), }, { @@ -175,7 +175,7 @@ func TestOptionalValue_TypeAndValue(t *testing.T) { }{ { name: "ok", - value: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foobar", types))}, + value: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foobar", types))}, wantt: ValueTypeString, wantv: ValueTypeString.ValueFrom("foobar"), }, @@ -219,17 +219,17 @@ func TestOptionalValue_SetValue(t *testing.T) { }{ { name: "set", - value: &OptionalValue{ov: *value.OptionalValueFrom(value.TypeString.ValueFrom("foo", types))}, + value: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("foo", types))}, args: args{v: ValueTypeString.ValueFrom("foobar")}, }, { name: "set to nil", - value: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, nil)}, + value: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, args: args{v: ValueTypeString.ValueFrom("foobar")}, }, { name: "invalid value", - value: &OptionalValue{ov: *value.NewOptionalValue(value.TypeString, nil)}, + value: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, args: args{v: ValueTypeNumber.ValueFrom(1)}, invalid: true, }, @@ -279,7 +279,7 @@ func TestOptionalValue_Clone(t *testing.T) { { name: "ok", target: &OptionalValue{ - ov: *value.NewOptionalValue(value.TypeString, value.TypeString.ValueFrom("foo", types)), + ov: *value.NewOptional(value.TypeString, value.TypeString.ValueFrom("foo", types)), }, }, { @@ -301,3 +301,52 @@ func TestOptionalValue_Clone(t *testing.T) { }) } } + +func TestOptionalValue_Cast(t *testing.T) { + type args struct { + t ValueType + } + tests := []struct { + name string + target *OptionalValue + args args + want *OptionalValue + }{ + { + name: "diff type", + target: &OptionalValue{ov: *value.OptionalFrom(value.TypeNumber.ValueFrom(1.1, types))}, + args: args{t: ValueTypeString}, + want: &OptionalValue{ov: *value.OptionalFrom(value.TypeString.ValueFrom("1.1", types))}, + }, + { + name: "same type", + target: &OptionalValue{ov: *value.OptionalFrom(value.TypeNumber.ValueFrom(1.1, types))}, + args: args{t: ValueTypeNumber}, + want: &OptionalValue{ov: *value.OptionalFrom(value.TypeNumber.ValueFrom(1.1, types))}, + }, + { + name: "failed to cast", + target: &OptionalValue{ov: *value.OptionalFrom(value.TypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}, types))}, + args: args{t: ValueTypeString}, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + }, + { + name: "empty", + target: &OptionalValue{}, + args: args{t: ValueTypeString}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{t: ValueTypeString}, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Cast(tt.args.t)) + }) + } +} diff --git a/pkg/property/value_test.go b/pkg/property/value_test.go index a51528a3b..236033bf9 100644 --- a/pkg/property/value_test.go +++ b/pkg/property/value_test.go @@ -4,9 +4,223 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/value" "github.com/stretchr/testify/assert" ) +func TestValue_IsEmpty(t *testing.T) { + tests := []struct { + name string + value *Value + want bool + }{ + { + name: "empty", + want: true, + }, + { + name: "nil", + want: true, + }, + { + name: "non-empty", + value: ValueTypeString.ValueFrom("foo"), + want: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.IsEmpty()) + }) + } +} + +func TestValue_Clone(t *testing.T) { + tests := []struct { + name string + value *Value + want *Value + }{ + { + name: "ok", + value: ValueTypeString.ValueFrom("foo"), + want: &Value{ + v: *value.TypeString.ValueFrom("foo", types), + }, + }, + { + name: "nil", + value: nil, + want: nil, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Clone()) + }) + } +} + +func TestValue_Value(t *testing.T) { + tests := []struct { + name string + value *Value + want interface{} + }{ + { + name: "ok", + value: ValueTypeString.ValueFrom("foo"), + want: "foo", + }, + { + name: "empty", + value: &Value{}, + }, + { + name: "nil", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.want == nil { + assert.Nil(t, tt.value.Value()) + } else { + assert.Equal(t, tt.want, tt.value.Value()) + } + }) + } +} + +func TestValue_Type(t *testing.T) { + tests := []struct { + name string + value *Value + want ValueType + }{ + { + name: "ok", + value: ValueTypeString.ValueFrom("foo"), + want: ValueTypeString, + }, + { + name: "empty", + value: &Value{}, + want: ValueTypeUnknown, + }, + { + name: "nil", + want: ValueTypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Type()) + }) + } +} + +func TestValue_Interface(t *testing.T) { + tests := []struct { + name string + value *Value + want interface{} + }{ + { + name: "string", + value: ValueTypeString.ValueFrom("foo"), + want: "foo", + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + { + name: "nil", + value: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.value.Interface()) + }) + } +} + +func TestValue_Cast(t *testing.T) { + type args struct { + t ValueType + } + tests := []struct { + name string + target *Value + args args + want *Value + }{ + { + name: "diff type", + target: ValueTypeNumber.ValueFrom(1.1), + args: args{t: ValueTypeString}, + want: ValueTypeString.ValueFrom("1.1"), + }, + { + name: "same type", + target: ValueTypeNumber.ValueFrom(1.1), + args: args{t: ValueTypeNumber}, + want: ValueTypeNumber.ValueFrom(1.1), + }, + { + name: "failed to cast", + target: ValueTypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}), + args: args{t: ValueTypeString}, + want: nil, + }, + { + name: "invalid type", + target: ValueTypeNumber.ValueFrom(1.1), + args: args{t: ValueTypeUnknown}, + want: nil, + }, + { + name: "empty", + target: &Value{}, + args: args{t: ValueTypeString}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{t: ValueTypeString}, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Cast(tt.args.t)) + }) + } +} + func TestValueFromDataset(t *testing.T) { testCases := []struct { Name string @@ -62,3 +276,51 @@ func TestValueFromDataset(t *testing.T) { }) } } + +func TestValueFromStringOrNumber(t *testing.T) { + type args struct { + s string + } + tests := []struct { + name string + args args + want *Value + }{ + { + name: "string", + args: args{"aax"}, + want: ValueTypeString.ValueFrom("aax"), + }, + { + name: "number positive int", + args: args{"1023"}, + want: ValueTypeNumber.ValueFrom(1023), + }, + { + name: "number negative int", + args: args{"-1"}, + want: ValueTypeNumber.ValueFrom(-1), + }, + { + name: "number float", + args: args{"1.14"}, + want: ValueTypeNumber.ValueFrom(1.14), + }, + { + name: "bool true", + args: args{"true"}, + want: ValueTypeBool.ValueFrom(true), + }, + { + name: "bool false", + args: args{"false"}, + want: ValueTypeBool.ValueFrom(false), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, ValueFromStringOrNumber(tt.args.s)) + }) + } +} diff --git a/pkg/value/bool.go b/pkg/value/bool.go index 538e3f5f3..70a31278f 100644 --- a/pkg/value/bool.go +++ b/pkg/value/bool.go @@ -1,15 +1,29 @@ package value +import "strconv" + var TypeBool Type = "bool" type propertyBool struct{} func (*propertyBool) I2V(i interface{}) (interface{}, bool) { - if v, ok := i.(bool); ok { + switch v := i.(type) { + case bool: return v, true - } - if v, ok := i.(*bool); ok && v != nil { - return *v, true + case string: + if b, err := strconv.ParseBool(v); err == nil { + return b, true + } + case *bool: + if v != nil { + return *v, true + } + case *string: + if v != nil { + if b, err := strconv.ParseBool(*v); err == nil { + return b, true + } + } } return nil, false } diff --git a/pkg/value/bool_test.go b/pkg/value/bool_test.go new file mode 100644 index 000000000..4b96481fe --- /dev/null +++ b/pkg/value/bool_test.go @@ -0,0 +1,61 @@ +package value + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_propertyBool_I2V(t *testing.T) { + tr := true + fa := false + trs1 := "true" + trs2 := "TRUE" + trs3 := "True" + trs4 := "T" + trs5 := "t" + trs6 := "1" + fas1 := "false" + fas2 := "FALSE" + fas3 := "False" + fas4 := "F" + fas5 := "f" + fas6 := "0" + + tests := []struct { + name string + args []interface{} + want1 interface{} + want2 bool + }{ + { + name: "true", + args: []interface{}{tr, trs1, trs2, trs3, trs4, trs5, trs6, &tr, &trs1, &trs2, &trs3, &trs4, &trs5, &trs6}, + want1: true, + want2: true, + }, + { + name: "false", + args: []interface{}{fa, fas1, fas2, fas3, fas4, fas5, fas6, &fa, &fas1, &fas2, &fas3, &fas4, &fas5, &fas6}, + want1: false, + want2: true, + }, + { + name: "nil", + args: []interface{}{"foo", (*bool)(nil), (*string)(nil), nil}, + want1: nil, + want2: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + p := &propertyBool{} + for i, v := range tt.args { + got1, got2 := p.I2V(v) + assert.Equal(t, tt.want1, got1, "test %d", i) + assert.Equal(t, tt.want2, got2, "test %d", i) + } + }) + } +} diff --git a/pkg/value/latlng.go b/pkg/value/latlng.go index 7612eb543..2bf97ab9d 100644 --- a/pkg/value/latlng.go +++ b/pkg/value/latlng.go @@ -22,14 +22,21 @@ var TypeLatLng Type = "latlng" type propertyLatLng struct{} func (*propertyLatLng) I2V(i interface{}) (interface{}, bool) { - if v, ok := i.(LatLng); ok { + switch v := i.(type) { + case LatLng: return v, true - } else if v, ok := i.(*LatLng); ok { + case LatLngHeight: + return LatLng{Lat: v.Lat, Lng: v.Lng}, true + case *LatLng: if v != nil { return *v, true } - return nil, false + case *LatLngHeight: + if v != nil { + return LatLng{Lat: v.Lat, Lng: v.Lng}, true + } } + v := LatLng{} if err := mapstructure.Decode(i, &v); err != nil { return nil, false diff --git a/pkg/value/latlngheight.go b/pkg/value/latlngheight.go index 173f3875d..f2120899d 100644 --- a/pkg/value/latlngheight.go +++ b/pkg/value/latlngheight.go @@ -24,15 +24,19 @@ var TypeLatLngHeight Type = "latlngheight" type propertyLatLngHeight struct{} func (*propertyLatLngHeight) I2V(i interface{}) (interface{}, bool) { - if v, ok := i.(LatLngHeight); ok { + switch v := i.(type) { + case LatLngHeight: return v, true - } - - if v, ok := i.(*LatLngHeight); ok { + case LatLng: + return LatLngHeight{Lat: v.Lat, Lng: v.Lng, Height: 0}, true + case *LatLngHeight: + if v != nil { + return *v, true + } + case *LatLng: if v != nil { - return *v, false + return LatLngHeight{Lat: v.Lat, Lng: v.Lng, Height: 0}, true } - return nil, false } v := LatLngHeight{} diff --git a/pkg/value/number.go b/pkg/value/number.go index 275e5325f..53ce8bfeb 100644 --- a/pkg/value/number.go +++ b/pkg/value/number.go @@ -1,6 +1,9 @@ package value -import "encoding/json" +import ( + "encoding/json" + "strconv" +) var TypeNumber Type = "number" @@ -38,6 +41,10 @@ func (*propertyNumber) I2V(i interface{}) (interface{}, bool) { if f, err := v.Float64(); err == nil { return f, true } + case string: + if vfloat64, err := strconv.ParseFloat(v, 64); err == nil { + return vfloat64, true + } case *float64: if v != nil { return *v, true @@ -96,6 +103,12 @@ func (*propertyNumber) I2V(i interface{}) (interface{}, bool) { return f, true } } + case *string: + if v != nil { + if vfloat64, err := strconv.ParseFloat(*v, 64); err == nil { + return vfloat64, true + } + } } return nil, false } diff --git a/pkg/value/number_test.go b/pkg/value/number_test.go new file mode 100644 index 000000000..7ca44da58 --- /dev/null +++ b/pkg/value/number_test.go @@ -0,0 +1,99 @@ +package value + +import ( + "encoding/json" + "math" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_propertyNumber_I2V(t *testing.T) { + z1 := 0 + z2 := 0.0 + z3 := "0" + z4 := json.Number("0") + z5 := json.Number("-0") + n1 := 1.12 + n2 := "1.12" + n3 := json.Number("1.12") + nn1 := -0.11 + nn2 := "-0.11" + nn3 := json.Number("-0.11") + nan1 := math.NaN() + nan2 := json.Number("NaN") + inf1 := math.Inf(0) + inf2 := json.Number("Infinity") + infn1 := math.Inf(-1) + infn2 := json.Number("-Infinity") + + tests := []struct { + name string + args []interface{} + want1 interface{} + want2 bool + }{ + { + name: "zero", + args: []interface{}{z1, z2, z3, z4, z5, &z1, &z2, &z3, &z4, &z5}, + want1: 0.0, + want2: true, + }, + { + name: "float", + args: []interface{}{n1, n2, n3, &n1, &n2, &n3}, + want1: 1.12, + want2: true, + }, + { + name: "negative float", + args: []interface{}{nn1, nn2, nn3, &nn1, &nn2, &nn3}, + want1: -0.11, + want2: true, + }, + { + name: "nan", + args: []interface{}{nan1, nan2}, + want1: math.NaN(), + want2: true, + }, + { + name: "inf", + args: []interface{}{inf1, inf2}, + want1: math.Inf(0), + want2: true, + }, + { + name: "negative inf", + args: []interface{}{infn1, infn2}, + want1: math.Inf(-1), + want2: true, + }, + { + name: "nil", + args: []interface{}{"foo", (*float64)(nil), (*string)(nil), (*int)(nil), (*json.Number)(nil), nil}, + want1: nil, + want2: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + p := &propertyNumber{} + for i, v := range tt.args { + got1, got2 := p.I2V(v) + if f, ok := tt.want1.(float64); ok { + if math.IsNaN(f) { + assert.True(t, math.IsNaN(tt.want1.(float64))) + } else { + assert.Equal(t, tt.want1, got1, "test %d", i) + } + } else { + assert.Equal(t, tt.want1, got1, "test %d", i) + } + + assert.Equal(t, tt.want2, got2, "test %d", i) + } + }) + } +} diff --git a/pkg/value/optional.go b/pkg/value/optional.go index 2b0b230ea..8bf0b084a 100644 --- a/pkg/value/optional.go +++ b/pkg/value/optional.go @@ -1,61 +1,74 @@ package value -type OptionalValue struct { +type Optional struct { t Type v *Value } -func NewOptionalValue(t Type, v *Value) *OptionalValue { +func NewOptional(t Type, v *Value) *Optional { if t == TypeUnknown || (v != nil && v.Type() != t) { return nil } - return &OptionalValue{ + return &Optional{ t: t, v: v, } } -func OptionalValueFrom(v *Value) *OptionalValue { +func OptionalFrom(v *Value) *Optional { if v.Type() == TypeUnknown { return nil } - return &OptionalValue{ + return &Optional{ t: v.Type(), v: v, } } -func (ov *OptionalValue) Type() Type { +func (ov *Optional) Type() Type { if ov == nil { return TypeUnknown } return ov.t } -func (ov *OptionalValue) Value() *Value { +func (ov *Optional) Value() *Value { if ov == nil || ov.t == TypeUnknown || ov.v == nil { return nil } return ov.v.Clone() } -func (ov *OptionalValue) TypeAndValue() (Type, *Value) { +func (ov *Optional) TypeAndValue() (Type, *Value) { return ov.Type(), ov.Value() } -func (ov *OptionalValue) SetValue(v *Value) { +func (ov *Optional) SetValue(v *Value) { if ov == nil || ov.t == TypeUnknown || (v != nil && ov.t != v.Type()) { return } ov.v = v.Clone() } -func (ov *OptionalValue) Clone() *OptionalValue { +func (ov *Optional) Clone() *Optional { if ov == nil { return nil } - return &OptionalValue{ + return &Optional{ t: ov.t, v: ov.v.Clone(), } } + +// Cast tries to convert the value to the new type and generates a new Optional. +func (ov *Optional) Cast(t Type, p TypePropertyMap) *Optional { + if ov == nil || ov.t == TypeUnknown { + return nil + } + if ov.v == nil { + return NewOptional(t, nil) + } + + nv := ov.v.Cast(t, p) + return NewOptional(t, nv) +} diff --git a/pkg/value/optional_test.go b/pkg/value/optional_test.go index 19e0f6014..70eec8552 100644 --- a/pkg/value/optional_test.go +++ b/pkg/value/optional_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/assert" ) -func TestNewOptionalValue(t *testing.T) { +func TestNewOptional(t *testing.T) { type args struct { t Type v *Value @@ -14,7 +14,7 @@ func TestNewOptionalValue(t *testing.T) { tests := []struct { name string args args - want *OptionalValue + want *Optional }{ { name: "default type", @@ -22,7 +22,7 @@ func TestNewOptionalValue(t *testing.T) { t: TypeString, v: TypeString.ValueFrom("foo", nil), }, - want: &OptionalValue{t: TypeString, v: TypeString.ValueFrom("foo", nil)}, + want: &Optional{t: TypeString, v: TypeString.ValueFrom("foo", nil)}, }, { name: "custom type", @@ -30,14 +30,14 @@ func TestNewOptionalValue(t *testing.T) { t: Type("foo"), v: &Value{t: Type("foo")}, }, - want: &OptionalValue{t: Type("foo"), v: &Value{t: Type("foo")}}, + want: &Optional{t: Type("foo"), v: &Value{t: Type("foo")}}, }, { name: "nil value", args: args{ t: Type("foo"), }, - want: &OptionalValue{t: Type("foo"), v: nil}, + want: &Optional{t: Type("foo"), v: nil}, }, { name: "invalid value", @@ -61,33 +61,33 @@ func TestNewOptionalValue(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() - assert.Equal(t, tt.want, NewOptionalValue(tt.args.t, tt.args.v)) + assert.Equal(t, tt.want, NewOptional(tt.args.t, tt.args.v)) }) } } -func TestOptionalValueFrom(t *testing.T) { +func TestOptionalFrom(t *testing.T) { type args struct { v *Value } tests := []struct { name string args args - want *OptionalValue + want *Optional }{ { name: "default type", args: args{ v: TypeString.ValueFrom("foo", nil), }, - want: &OptionalValue{t: TypeString, v: TypeString.ValueFrom("foo", nil)}, + want: &Optional{t: TypeString, v: TypeString.ValueFrom("foo", nil)}, }, { name: "custom type", args: args{ v: &Value{t: Type("foo")}, }, - want: &OptionalValue{t: Type("foo"), v: &Value{t: Type("foo")}}, + want: &Optional{t: Type("foo"), v: &Value{t: Type("foo")}}, }, { name: "invalid value", @@ -107,25 +107,25 @@ func TestOptionalValueFrom(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() - assert.Equal(t, tt.want, OptionalValueFrom(tt.args.v)) + assert.Equal(t, tt.want, OptionalFrom(tt.args.v)) }) } } -func TestOptionalValue_Type(t *testing.T) { +func TestOptional_Type(t *testing.T) { tests := []struct { name string - value *OptionalValue + value *Optional want Type }{ { name: "ok", - value: &OptionalValue{t: Type("foo")}, + value: &Optional{t: Type("foo")}, want: Type("foo"), }, { name: "empty", - value: &OptionalValue{}, + value: &Optional{}, want: TypeUnknown, }, { @@ -144,20 +144,20 @@ func TestOptionalValue_Type(t *testing.T) { } } -func TestOptionalValue_Value(t *testing.T) { +func TestOptional_Value(t *testing.T) { tests := []struct { name string - value *OptionalValue + value *Optional want *Value }{ { name: "ok", - value: &OptionalValue{t: TypeString, v: &Value{t: TypeString, v: "foobar"}}, + value: &Optional{t: TypeString, v: &Value{t: TypeString, v: "foobar"}}, want: &Value{t: TypeString, v: "foobar"}, }, { name: "empty", - value: &OptionalValue{}, + value: &Optional{}, want: nil, }, { @@ -180,22 +180,22 @@ func TestOptionalValue_Value(t *testing.T) { } } -func TestOptionalValue_TypeAndValue(t *testing.T) { +func TestOptional_TypeAndValue(t *testing.T) { tests := []struct { name string - value *OptionalValue + value *Optional wantt Type wantv *Value }{ { name: "ok", - value: &OptionalValue{t: TypeString, v: &Value{t: TypeString, v: "foobar"}}, + value: &Optional{t: TypeString, v: &Value{t: TypeString, v: "foobar"}}, wantt: TypeString, wantv: &Value{t: TypeString, v: "foobar"}, }, { name: "empty", - value: &OptionalValue{}, + value: &Optional{}, wantt: TypeUnknown, wantv: nil, }, @@ -221,19 +221,19 @@ func TestOptionalValue_TypeAndValue(t *testing.T) { } } -func TestOptionalValue_SetValue(t *testing.T) { +func TestOptional_SetValue(t *testing.T) { type args struct { v *Value } tests := []struct { name string - value *OptionalValue + value *Optional args args invalid bool }{ { name: "set", - value: &OptionalValue{ + value: &Optional{ t: TypeString, v: &Value{t: TypeString, v: "foobar"}, }, @@ -241,14 +241,14 @@ func TestOptionalValue_SetValue(t *testing.T) { }, { name: "set to nil", - value: &OptionalValue{ + value: &Optional{ t: TypeString, }, args: args{v: &Value{t: TypeString, v: "bar"}}, }, { name: "invalid value", - value: &OptionalValue{ + value: &Optional{ t: TypeNumber, v: &Value{t: TypeNumber, v: 1}, }, @@ -257,14 +257,14 @@ func TestOptionalValue_SetValue(t *testing.T) { }, { name: "nil value", - value: &OptionalValue{ + value: &Optional{ t: TypeNumber, v: &Value{t: TypeNumber, v: 1}, }, }, { name: "empty", - value: &OptionalValue{}, + value: &Optional{}, args: args{v: &Value{t: TypeString, v: "bar"}}, invalid: true, }, @@ -299,18 +299,18 @@ func TestOptionalValue_SetValue(t *testing.T) { } } -func TestOptionalValue_Clone(t *testing.T) { +func TestOptional_Clone(t *testing.T) { tests := []struct { name string - target *OptionalValue + target *Optional }{ { name: "ok", - target: &OptionalValue{t: TypeString, v: TypeString.ValueFrom("foo", nil)}, + target: &Optional{t: TypeString, v: TypeString.ValueFrom("foo", nil)}, }, { name: "empty", - target: &OptionalValue{}, + target: &Optional{}, }, { name: "nil", @@ -328,3 +328,59 @@ func TestOptionalValue_Clone(t *testing.T) { }) } } + +func TestOptional_Cast(t *testing.T) { + type args struct { + t Type + p TypePropertyMap + } + tests := []struct { + name string + target *Optional + args args + want *Optional + }{ + { + name: "diff type", + target: &Optional{t: TypeNumber, v: TypeNumber.ValueFrom(1.1, nil)}, + args: args{t: TypeString}, + want: &Optional{t: TypeString, v: TypeString.ValueFrom("1.1", nil)}, + }, + { + name: "same type", + target: &Optional{t: TypeNumber, v: TypeNumber.ValueFrom(1.1, nil)}, + args: args{t: TypeNumber}, + want: &Optional{t: TypeNumber, v: TypeNumber.ValueFrom(1.1, nil)}, + }, + { + name: "nil value", + target: &Optional{t: TypeNumber}, + args: args{t: TypeString}, + want: &Optional{t: TypeString}, + }, + { + name: "failed to cast", + target: &Optional{t: TypeLatLng, v: TypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}, nil)}, + args: args{t: TypeString}, + want: &Optional{t: TypeString}, + }, + { + name: "empty", + target: &Optional{}, + args: args{t: TypeString}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{t: TypeString}, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Cast(tt.args.t, tt.args.p)) + }) + } +} diff --git a/pkg/value/string.go b/pkg/value/string.go index 3979d0cb0..03bcdd943 100644 --- a/pkg/value/string.go +++ b/pkg/value/string.go @@ -1,5 +1,10 @@ package value +import ( + "fmt" + "strconv" +) + var TypeString Type = "string" type propertyString struct{} @@ -7,9 +12,14 @@ type propertyString struct{} func (*propertyString) I2V(i interface{}) (interface{}, bool) { if v, ok := i.(string); ok { return v, true - } - if v, ok := i.(*string); ok { + } else if v, ok := i.(*string); ok && v != nil { return *v, true + } else if v, ok := i.(float64); ok { + return strconv.FormatFloat(v, 'f', -1, 64), true + } else if v, ok := i.(*float64); ok && v != nil { + return strconv.FormatFloat(*v, 'f', -1, 64), true + } else if v, ok := i.(fmt.Stringer); ok && v != nil { + return v.String(), true } return nil, false } diff --git a/pkg/value/string_test.go b/pkg/value/string_test.go new file mode 100644 index 000000000..d39b3ccc6 --- /dev/null +++ b/pkg/value/string_test.go @@ -0,0 +1,57 @@ +package value + +import ( + "net/url" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_propertyString_I2V(t *testing.T) { + s := "foobar" + n := 1.12 + u, _ := url.Parse("https://reearth.io") + + tests := []struct { + name string + args []interface{} + want1 interface{} + want2 bool + }{ + { + name: "string", + args: []interface{}{s, &s}, + want1: "foobar", + want2: true, + }, + { + name: "number", + args: []interface{}{n, &n}, + want1: "1.12", + want2: true, + }, + { + name: "url", + args: []interface{}{u}, + want1: "https://reearth.io", + want2: true, + }, + { + name: "nil", + args: []interface{}{(*string)(nil), nil}, + want1: nil, + want2: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + p := &propertyString{} + for i, v := range tt.args { + got1, got2 := p.I2V(v) + assert.Equal(t, tt.want1, got1, "test %d", i) + assert.Equal(t, tt.want2, got2, "test %d", i) + } + }) + } +} diff --git a/pkg/value/url.go b/pkg/value/url.go index 0745a7937..07f2a5c64 100644 --- a/pkg/value/url.go +++ b/pkg/value/url.go @@ -24,6 +24,12 @@ func (*propertyURL) I2V(i interface{}) (interface{}, bool) { } } + if v, ok := i.(*string); ok && v != nil { + if u, err := url.Parse(*v); err == nil { + return u, true + } + } + return nil, false } diff --git a/pkg/value/value.go b/pkg/value/value.go index 79cea6561..aac797f1e 100644 --- a/pkg/value/value.go +++ b/pkg/value/value.go @@ -80,3 +80,13 @@ func (v *Value) Validate() bool { func (v *Value) MarshalJSON() ([]byte, error) { return json.Marshal(v.Interface()) } + +func (v *Value) Cast(t Type, p TypePropertyMap) *Value { + if v == nil || v.t == TypeUnknown { + return nil + } + if v.t == t { + return v.Clone() + } + return t.ValueFrom(v.v, p) +} diff --git a/pkg/value/value_test.go b/pkg/value/value_test.go index b71a78fb6..34c47320b 100644 --- a/pkg/value/value_test.go +++ b/pkg/value/value_test.go @@ -262,3 +262,59 @@ func TestValue_Interface(t *testing.T) { }) } } + +func TestValue_Cast(t *testing.T) { + type args struct { + t Type + p TypePropertyMap + } + tests := []struct { + name string + target *Value + args args + want *Value + }{ + { + name: "diff type", + target: &Value{t: TypeNumber, v: 1.1}, + args: args{t: TypeString}, + want: &Value{t: TypeString, v: "1.1"}, + }, + { + name: "same type", + target: &Value{t: TypeNumber, v: 1.1}, + args: args{t: TypeNumber}, + want: &Value{t: TypeNumber, v: 1.1}, + }, + { + name: "failed to cast", + target: &Value{t: TypeLatLng, v: LatLng{Lat: 1, Lng: 2}}, + args: args{t: TypeString}, + want: nil, + }, + { + name: "invalid value", + target: &Value{t: TypeNumber}, + args: args{t: TypeString}, + want: nil, + }, + { + name: "empty", + target: &Value{}, + args: args{t: TypeString}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{t: TypeString}, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Cast(tt.args.t, tt.args.p)) + }) + } +} From db4ceafabab576a57182c99e54fbc6379e9c28ed Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 23 Dec 2021 16:07:37 +0900 Subject: [PATCH 126/253] fix: synchronize mongo migration (#94) * add lock * remove test * fix log * fix mod * fix * remove IsLocked * refactor rerror * refactor transaction * update repo.Config to use lock --- go.mod | 2 + go.sum | 22 ++- internal/app/app.go | 6 +- internal/app/graphql.go | 9 -- internal/infrastructure/memory/config.go | 29 +++- internal/infrastructure/memory/config_test.go | 14 ++ internal/infrastructure/memory/container.go | 1 + internal/infrastructure/memory/lock.go | 21 +++ internal/infrastructure/mongo/config.go | 55 +++++-- internal/infrastructure/mongo/container.go | 13 +- internal/infrastructure/mongo/lock.go | 140 ++++++++++++++++++ .../infrastructure/mongo/migration/client.go | 85 ++++------- .../infrastructure/mongo/mongodoc/client.go | 49 +++++- .../infrastructure/mongo/mongodoc/config.go | 22 +++ internal/infrastructure/mongo/transaction.go | 45 +----- internal/usecase/repo/config.go | 4 +- internal/usecase/repo/container.go | 1 + internal/usecase/repo/lock.go | 17 +++ pkg/rerror/error.go | 29 ++-- pkg/rerror/error_test.go | 17 ++- 20 files changed, 411 insertions(+), 170 deletions(-) create mode 100644 internal/infrastructure/memory/config_test.go create mode 100644 internal/infrastructure/memory/lock.go create mode 100644 internal/infrastructure/mongo/lock.go create mode 100644 internal/infrastructure/mongo/mongodoc/config.go create mode 100644 internal/usecase/repo/lock.go diff --git a/go.mod b/go.mod index 11be3950c..04ae21e81 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ require ( github.com/blang/semver v3.5.1+incompatible github.com/form3tech-oss/jwt-go v3.2.2+incompatible github.com/goccy/go-yaml v1.9.4 + github.com/google/uuid v1.3.0 github.com/iancoleman/strcase v0.2.0 github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d github.com/jarcoal/httpmock v1.0.8 @@ -25,6 +26,7 @@ require ( github.com/ravilushqa/otelgqlgen v0.2.0 github.com/sirupsen/logrus v1.8.1 github.com/spf13/afero v1.6.0 + github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2 github.com/stretchr/testify v1.7.0 github.com/twpayne/go-kml v1.5.2 github.com/uber/jaeger-client-go v2.29.1+incompatible diff --git a/go.sum b/go.sum index 1408c22e9..fd80d1f09 100644 --- a/go.sum +++ b/go.sum @@ -78,6 +78,8 @@ github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= github.com/auth0/go-jwt-middleware v1.0.1 h1:/fsQ4vRr4zod1wKReUH+0A3ySRjGiT9G34kypO/EKwI= github.com/auth0/go-jwt-middleware v1.0.1/go.mod h1:YSeUX3z6+TF2H+7padiEqNJ73Zy9vXW72U//IgN0BIM= +github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= +github.com/aws/aws-sdk-go v1.35.5/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+muhnW+k= github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= @@ -146,8 +148,11 @@ github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD87 github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= +github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-test/deep v1.0.1 h1:UQhStjbkDClarlmv0am7OXXO4/GaPdCGiUiMTvi28sg= +github.com/go-test/deep v1.0.1/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0= github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY= github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg= @@ -212,6 +217,7 @@ github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -254,8 +260,9 @@ github.com/google/pprof v0.0.0-20211008130755-947d60d73cc0 h1:zHs+jv3LO743/zFGcB github.com/google/pprof v0.0.0-20211008130755-947d60d73cc0/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= @@ -292,6 +299,8 @@ github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJ github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/jarcoal/httpmock v1.0.8 h1:8kI16SoO6LQKgPE7PvQuV+YuD/inwHd7fOOe2zMbo4k= github.com/jarcoal/httpmock v1.0.8/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg= github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= @@ -309,6 +318,8 @@ github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8Nz github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/compress v1.11.1/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= @@ -400,6 +411,8 @@ github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2 h1:Fod/tm/5c19889+T6j7mXxg/tEJrcLuDJxR/98raj80= +github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2/go.mod h1:h98Zzl76KWv7bG0FHBMA9MAcDhwcIyE7q570tDP7CmY= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48= @@ -444,6 +457,9 @@ github.com/xdg-go/scram v1.0.2 h1:akYIkZ28e6A96dkWNJQu3nmCzH3YfwMPQExUYDaRv7w= github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= github.com/xdg-go/stringprep v1.0.2 h1:6iq84/ryjjeRmMJwxutI51F2GIPlP5BfTvXHeYjyhBc= github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= +github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= +github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= +github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d h1:splanxYIlg+5LfHAM6xpdFEAYOk8iySO56hMFq6uLyA= github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -452,6 +468,7 @@ github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +go.mongodb.org/mongo-driver v1.4.2/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= go.mongodb.org/mongo-driver v1.7.4 h1:sllcioag8Mec0LYkftYWq+cKNPIR4Kqq3iv9ZXY0g/E= go.mongodb.org/mongo-driver v1.7.4/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= @@ -496,12 +513,14 @@ golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnf golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 h1:HWj/xjIHfjYU5nVXpTM0s39J9CbLn7Cc5a7IC5rwsMQ= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -611,6 +630,7 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201008141435-b3e1573b7520/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= diff --git a/internal/app/app.go b/internal/app/app.go index b6926e708..9584738e8 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -128,11 +128,7 @@ func errorMessage(err error, log func(string, ...interface{})) (int, string) { code = http.StatusNotFound msg = "not found" } else { - var ierr *rerror.ErrInternal - if errors.As(err, &ierr) { - if err2 := ierr.Unwrap(); err2 != nil { - log("internal err: %+v", err2) - } + if ierr := rerror.UnwrapErrInternal(err); ierr != nil { code = http.StatusInternalServerError msg = "internal server error" } diff --git a/internal/app/graphql.go b/internal/app/graphql.go index ced6b24ec..811aa3310 100644 --- a/internal/app/graphql.go +++ b/internal/app/graphql.go @@ -2,7 +2,6 @@ package app import ( "context" - "errors" "github.com/99designs/gqlgen/graphql" "github.com/99designs/gqlgen/graphql/handler" @@ -13,7 +12,6 @@ import ( "github.com/ravilushqa/otelgqlgen" "github.com/reearth/reearth-backend/internal/adapter/gql" "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/rerror" "github.com/vektah/gqlparser/v2/gqlerror" ) @@ -82,13 +80,6 @@ func graphqlAPI( // show more detailed error messgage in debug mode func(ctx context.Context, e error) *gqlerror.Error { if conf.Debug { - var ierr *rerror.ErrInternal - if errors.As(e, &ierr) { - if err2 := ierr.Unwrap(); err2 != nil { - // TODO: display stacktrace with xerrors - ec.Logger.Errorf("%+v", err2) - } - } return gqlerror.ErrorPathf(graphql.GetFieldContext(ctx).Path(), e.Error()) } return graphql.DefaultErrorPresenter(ctx, e) diff --git a/internal/infrastructure/memory/config.go b/internal/infrastructure/memory/config.go index 8b8cd2bb9..07d18d1e3 100644 --- a/internal/infrastructure/memory/config.go +++ b/internal/infrastructure/memory/config.go @@ -9,25 +9,38 @@ import ( ) type Config struct { - lock sync.Mutex - data *config.Config + lock sync.Mutex + locked bool + data *config.Config } func NewConfig() repo.Config { return &Config{} } -func (r *Config) Load(ctx context.Context) (*config.Config, error) { +func (r *Config) LockAndLoad(ctx context.Context) (*config.Config, error) { r.lock.Lock() - defer r.lock.Unlock() - + r.locked = true return r.data, nil } func (r *Config) Save(ctx context.Context, c *config.Config) error { - r.lock.Lock() - defer r.lock.Unlock() + if c != nil { + r.data = c + } + return nil +} + +func (r *Config) SaveAndUnlock(ctx context.Context, c *config.Config) error { + _ = r.Save(ctx, c) + return r.Unlock(ctx) +} - r.data = c +func (r *Config) Unlock(_ context.Context) error { + if !r.locked { + return nil + } + r.lock.Unlock() + r.locked = false return nil } diff --git a/internal/infrastructure/memory/config_test.go b/internal/infrastructure/memory/config_test.go new file mode 100644 index 000000000..bba230ae4 --- /dev/null +++ b/internal/infrastructure/memory/config_test.go @@ -0,0 +1,14 @@ +package memory + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestConfig(t *testing.T) { + ctx := context.Background() + c := NewConfig() + assert.NoError(t, c.Unlock(ctx)) +} diff --git a/internal/infrastructure/memory/container.go b/internal/infrastructure/memory/container.go index 2c9b9d45f..77db26476 100644 --- a/internal/infrastructure/memory/container.go +++ b/internal/infrastructure/memory/container.go @@ -24,5 +24,6 @@ func InitRepos(c *repo.Container) *repo.Container { c.User = NewUser() c.SceneLock = NewSceneLock() c.Transaction = NewTransaction() + c.Lock = NewLock() return c } diff --git a/internal/infrastructure/memory/lock.go b/internal/infrastructure/memory/lock.go new file mode 100644 index 000000000..632586c3a --- /dev/null +++ b/internal/infrastructure/memory/lock.go @@ -0,0 +1,21 @@ +package memory + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase/repo" +) + +type Lock struct{} + +func NewLock() repo.Lock { + return &Lock{} +} + +func (r *Lock) Lock(_ context.Context, _ string) error { + return nil +} + +func (r *Lock) Unlock(_ context.Context, _ string) error { + return nil +} diff --git a/internal/infrastructure/mongo/config.go b/internal/infrastructure/mongo/config.go index 53a2108a9..a78b57a1a 100644 --- a/internal/infrastructure/mongo/config.go +++ b/internal/infrastructure/mongo/config.go @@ -8,39 +8,62 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/config" "github.com/reearth/reearth-backend/pkg/rerror" + "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo/options" ) -var upsert = true +const configLockName = "config" type configRepo struct { client *mongodoc.ClientCollection + lock repo.Lock } -func NewConfig(client *mongodoc.Client) repo.Config { - return &configRepo{client: client.WithCollection("config")} +func NewConfig(client *mongodoc.Client, lock repo.Lock) repo.Config { + return &configRepo{client: client.WithCollection("config"), lock: lock} } -func (r *configRepo) Load(ctx context.Context) (*config.Config, error) { - cfg := &config.Config{} - if err := r.client.Collection().FindOne(ctx, nil).Decode(cfg); err != nil { - if errors.Is(err, mongo.ErrNoDocuments) { - return cfg, nil +func (r *configRepo) LockAndLoad(ctx context.Context) (cfg *config.Config, err error) { + if err := r.lock.Lock(ctx, configLockName); err != nil { + return nil, err + } + + cfgd := &mongodoc.ConfigDocument{} + if err := r.client.Collection().FindOne(ctx, bson.M{}).Decode(cfgd); err != nil { + if !errors.Is(err, mongo.ErrNilDocument) && !errors.Is(err, mongo.ErrNoDocuments) { + return nil, rerror.ErrInternalBy(err) } - return nil, rerror.ErrInternalBy(err) } - return cfg, nil + return cfgd.Model(), nil } func (r *configRepo) Save(ctx context.Context, cfg *config.Config) error { - if cfg == nil { - return nil + if cfg != nil { + if _, err := r.client.Collection().UpdateOne( + ctx, + bson.M{}, + bson.M{"$set": mongodoc.NewConfig(*cfg)}, + (&options.UpdateOptions{}).SetUpsert(true), + ); err != nil { + return rerror.ErrInternalBy(err) + } } - if _, err := r.client.Collection().UpdateOne(ctx, nil, cfg, &options.UpdateOptions{ - Upsert: &upsert, - }); err != nil { - return rerror.ErrInternalBy(err) + + return nil +} + +func (r *configRepo) SaveAndUnlock(ctx context.Context, cfg *config.Config) error { + if err := r.Save(ctx, cfg); err != nil { + return err } + return r.Unlock(ctx) +} + +func (r *configRepo) Unlock(ctx context.Context) error { + if err := r.lock.Unlock(ctx, configLockName); err != nil && !errors.Is(err, repo.ErrNotLocked) { + return err + } + return nil } diff --git a/internal/infrastructure/mongo/container.go b/internal/infrastructure/mongo/container.go index ead33b18a..ec82c46cf 100644 --- a/internal/infrastructure/mongo/container.go +++ b/internal/infrastructure/mongo/container.go @@ -13,10 +13,15 @@ func InitRepos(ctx context.Context, c *repo.Container, mc *mongo.Client, databas if databaseName == "" { databaseName = "reearth" } - client := mongodoc.NewClient(databaseName, mc) + lock, err := NewLock(mc.Database(databaseName).Collection("locks")) + if err != nil { + return err + } + + client := mongodoc.NewClient(databaseName, mc) c.Asset = NewAsset(client) - c.Config = NewConfig(client) + c.Config = NewConfig(client, lock) c.DatasetSchema = NewDatasetSchema(client) c.Dataset = NewDataset(client) c.Layer = NewLayer(client) @@ -30,8 +35,10 @@ func InitRepos(ctx context.Context, c *repo.Container, mc *mongo.Client, databas c.User = NewUser(client) c.SceneLock = NewSceneLock(client) c.Transaction = NewTransaction(client) + c.Lock = lock - if err := (migration.Client{Client: client}).Migrate(ctx); err != nil { + // migration + if err := (migration.Client{Client: client, Config: c.Config}).Migrate(ctx); err != nil { return err } diff --git a/internal/infrastructure/mongo/lock.go b/internal/infrastructure/mongo/lock.go new file mode 100644 index 000000000..b42cfca05 --- /dev/null +++ b/internal/infrastructure/mongo/lock.go @@ -0,0 +1,140 @@ +package mongo + +import ( + "context" + "errors" + "math/rand" + "sync" + "time" + + "github.com/google/uuid" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" + lock "github.com/square/mongo-lock" + "go.mongodb.org/mongo-driver/mongo" +) + +type Lock struct { + l *lock.Client + hostid string + locks sync.Map +} + +func NewLock(c *mongo.Collection) (repo.Lock, error) { + hostid, err := uuidString() + if err != nil { + return nil, err + } + + l := lock.NewClient(c) + if err := l.CreateIndexes(context.Background()); err != nil { + return nil, err + } + + return &Lock{ + l: l, + hostid: hostid, + }, nil +} + +func (r *Lock) Lock(ctx context.Context, name string) error { + if r.getLockID(name) != "" { + return repo.ErrAlreadyLocked + } + + lockID, err := uuidString() + if err != nil { + return err + } + + log.Infof("lock: trying to lock: id=%s, name=%s, host=%s", name, lockID, r.hostid) + // wait and retry + const retry = 10 + for i := 0; i < retry; i++ { + if err := r.l.XLock(ctx, name, lockID, r.details()); err != nil { + if errors.Is(err, lock.ErrAlreadyLocked) { + log.Infof("lock: failed to lock (%d/%d): name=%s, id=%s, host=%s", i+1, retry, name, lockID, r.hostid) + if i >= retry { + return repo.ErrFailedToLock + } + + time.Sleep(time.Second * time.Duration(rand.Intn(1)+(i+1))) + continue + } + + log.Infof("lock: failed to lock: name=%s, id=%s, host=%s, err=%s", name, lockID, r.hostid, err) + return repo.ErrFailedToLock + } else { + break + } + } + + r.setLockID(name, lockID) + log.Infof("lock: locked: name=%s, id=%s, host=%s", name, lockID, r.hostid) + return nil +} + +func (r *Lock) Unlock(ctx context.Context, name string) error { + lockID := r.getLockID(name) + if lockID == "" { + return repo.ErrNotLocked + } + + if _, err := r.l.Unlock(ctx, lockID); err != nil { + return rerror.ErrInternalBy(err) + } + + r.deleteLockID(name) + log.Infof("lock: unlocked: name=%s, id=%s, host=%s", name, lockID, r.hostid) + return nil +} + +func uuidString() (string, error) { + u, err := uuid.NewUUID() + if err != nil { + return "", rerror.ErrInternalBy(err) + } + + return u.String(), nil +} + +func (r *Lock) details() lock.LockDetails { + if r == nil { + return lock.LockDetails{} + } + + return lock.LockDetails{ + Host: r.hostid, + TTL: 60 * 60, // 1 hour + } +} + +func (r *Lock) setLockID(key, lockID string) { + if r == nil { + return + } + + r.locks.Store(key, lockID) +} + +func (r *Lock) getLockID(key string) string { + if r == nil { + return "" + } + + l, ok := r.locks.Load(key) + if !ok { + return "" + } + + return l.(string) +} + +func (r *Lock) deleteLockID(key string) { + if r == nil { + return + } + + r.locks.Delete(key) +} diff --git a/internal/infrastructure/mongo/migration/client.go b/internal/infrastructure/mongo/migration/client.go index 4b210825d..c6b344c92 100644 --- a/internal/infrastructure/mongo/migration/client.go +++ b/internal/infrastructure/mongo/migration/client.go @@ -2,62 +2,66 @@ package migration import ( "context" - "errors" "fmt" "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" - "github.com/reearth/reearth-backend/pkg/config" + "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/pkg/rerror" - "go.mongodb.org/mongo-driver/bson" - "go.mongodb.org/mongo-driver/mongo" - "go.mongodb.org/mongo-driver/mongo/options" ) -var upsert = true - type DBClient = *mongodoc.Client type MigrationFunc = func(context.Context, DBClient) error type Client struct { Client *mongodoc.Client + Config repo.Config } -func (c Client) Migrate(ctx context.Context) error { - config, err := c.loadConfig(ctx) +func (c Client) Migrate(ctx context.Context) (err error) { + config, err := c.Config.LockAndLoad(ctx) if err != nil { - var ie *rerror.ErrInternal - if ok := errors.As(err, &ie); ok { - err = ie.Unwrap() - } - return fmt.Errorf("Failed to load config: %w", err) + return fmt.Errorf("Failed to load config: %w", rerror.UnwrapErrInternal(err)) } + defer func() { + err = c.Config.Unlock(ctx) + }() nextMigrations := config.NextMigrations(migrationKeys()) if len(nextMigrations) == 0 { return nil } + var tx repo.Tx + defer func() { + if tx != nil { + err = tx.End(ctx) + } + }() + for _, m := range nextMigrations { - log.Infof("DB migration: %d\n", m) + tx, err = c.Client.BeginTransaction() + if err != nil { + return err + } + log.Infof("DB migration: %d\n", m) if err := migrations[m](ctx, c.Client); err != nil { - var ie *rerror.ErrInternal - if ok := errors.As(err, &ie); ok { - err = ie.Unwrap() - } - return fmt.Errorf("Failed to exec migration %d: %w", m, err) + return fmt.Errorf("Failed to exec migration %d: %w", m, rerror.UnwrapErrInternal(err)) } config.Migration = m - if err := c.saveConfig(ctx, config); err != nil { - var ie *rerror.ErrInternal - if ok := errors.As(err, &ie); ok { - err = ie.Unwrap() - } - return fmt.Errorf("Failed to save config: %w", err) + if err := c.Config.Save(ctx, config); err != nil { + return err + } + + tx.Commit() + if err := tx.End(ctx); err != nil { + tx = nil + return err } + tx = nil } return nil @@ -70,32 +74,3 @@ func migrationKeys() []int64 { } return keys } - -func (c *Client) loadConfig(ctx context.Context) (*config.Config, error) { - cfg := &config.Config{} - - if err := c.Client.Collection("config").FindOne(ctx, bson.D{}).Decode(cfg); err != nil { - if errors.Is(err, mongo.ErrNoDocuments) || errors.Is(err, mongo.ErrNilDocument) { - return cfg, nil - } - return nil, err - } - - return cfg, nil -} - -func (c *Client) saveConfig(ctx context.Context, cfg *config.Config) error { - if cfg == nil { - return nil - } - - if _, err := c.Client.Collection("config").UpdateOne(ctx, bson.D{}, bson.M{ - "$set": cfg, - }, &options.UpdateOptions{ - Upsert: &upsert, - }); err != nil { - return rerror.ErrInternalBy(err) - } - - return nil -} diff --git a/internal/infrastructure/mongo/mongodoc/client.go b/internal/infrastructure/mongo/mongodoc/client.go index c959875b7..520606c36 100644 --- a/internal/infrastructure/mongo/mongodoc/client.go +++ b/internal/infrastructure/mongo/mongodoc/client.go @@ -7,6 +7,7 @@ import ( "io" "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/rerror" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" @@ -203,7 +204,6 @@ func (c *Client) Paginate(ctx context.Context, col string, filter interface{}, p }}) } } - // ๆ›ดใซ่ชญใ‚ใ‚‹่ฆ็ด ใŒใ‚ใ‚‹ใฎใ‹็ขบใ‹ใ‚ใ‚‹ใŸใ‚ใซไธ€ใคๅคšใ‚ใซ่ชญใฟๅ‡บใ™ // Read one more element so that we can see whether there's a further one limit++ findOptions.Limit = &limit @@ -229,7 +229,7 @@ func (c *Client) Paginate(ctx context.Context, col string, filter interface{}, p hasMore := false if len(results) == int(limit) { hasMore = true - // ไฝ™่จˆใซ1ใค่ชญใ‚“ใ ๅˆ†ใ‚’ๅ–ใ‚Š้™คใ + // Remove the extra one reading. results = results[:len(results)-1] } @@ -261,9 +261,6 @@ func (c *Client) Paginate(ctx context.Context, col string, filter interface{}, p } // ref: https://facebook.github.io/relay/graphql/connections.htm#sec-undefined.PageInfo.Fields - // firstใŒ่จญๅฎšใ•ใ‚Œใฆใ„ใ‚‹ๅ ดๅˆใงๅ‰ใฎpageใŒใ‚ใ‚‹ใ‹ใฉใ†ใ‹ใฎๅˆคๅฎšใฏๅŠน็Ž‡็š„ใซ่กŒใˆใ‚‹ๅ ดๅˆไปฅๅค–ใฏfalseใ‚’่ฟ”ใ—ใฆใ‚ˆใ„ - // lastใŒ่จญๅฎšใ•ใ‚Œใฆใ„ใ‚‹ๅ ดๅˆใงๆฌกใฎpageใŒใ‚ใ‚‹ใ‹ใฉใ†ใ‹ใฎๅˆคๅฎšใฏๅŠน็Ž‡็š„ใซ่กŒใˆใ‚‹ๅ ดๅˆไปฅๅค–ใฏfalseใ‚’่ฟ”ใ—ใฆใ‚ˆใ„ - // ๆ—ขๅญ˜ใฎๅฎŸ่ฃ…ใงใฏๅŠน็Ž‡็š„ใซๆฑ‚ใ‚ใ‚‹ใ“ใจใŒใงใใชใ„ใฎใง็ตถๅฏพใซfalseใ‚’่ฟ”ใ™ // If first is set, false can be returned unless it can be efficiently determined whether or not a previous page exists. // If last is set, false can be returned unless it can be efficiently determined whether or not a next page exists. // Returning absolutely false because the existing implementation cannot determine it efficiently. @@ -328,6 +325,44 @@ func indexes(ctx context.Context, coll *mongo.Collection) map[string]struct{} { return keys } -func (c *Client) Session() (mongo.Session, error) { - return c.client.StartSession() +func (c *Client) BeginTransaction() (repo.Tx, error) { + s, err := c.client.StartSession() + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + + if err := s.StartTransaction(&options.TransactionOptions{}); err != nil { + return nil, rerror.ErrInternalBy(err) + } + + return &Tx{session: s, commit: false}, nil +} + +type Tx struct { + session mongo.Session + commit bool +} + +func (t *Tx) Commit() { + if t == nil { + return + } + t.commit = true +} + +func (t *Tx) End(ctx context.Context) error { + if t == nil { + return nil + } + + if t.commit { + if err := t.session.CommitTransaction(ctx); err != nil { + return rerror.ErrInternalBy(err) + } + } else if err := t.session.AbortTransaction(ctx); err != nil { + return rerror.ErrInternalBy(err) + } + + t.session.EndSession(ctx) + return nil } diff --git a/internal/infrastructure/mongo/mongodoc/config.go b/internal/infrastructure/mongo/mongodoc/config.go new file mode 100644 index 000000000..b54649013 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/config.go @@ -0,0 +1,22 @@ +package mongodoc + +import "github.com/reearth/reearth-backend/pkg/config" + +type ConfigDocument struct { + Migration int64 +} + +func NewConfig(c config.Config) ConfigDocument { + return ConfigDocument{ + Migration: c.Migration, + } +} + +func (c *ConfigDocument) Model() *config.Config { + if c == nil { + return &config.Config{} + } + return &config.Config{ + Migration: c.Migration, + } +} diff --git a/internal/infrastructure/mongo/transaction.go b/internal/infrastructure/mongo/transaction.go index 7f0eb2995..b9ca9935e 100644 --- a/internal/infrastructure/mongo/transaction.go +++ b/internal/infrastructure/mongo/transaction.go @@ -1,13 +1,8 @@ package mongo import ( - "context" - "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/internal/usecase/repo" - "github.com/reearth/reearth-backend/pkg/rerror" - "go.mongodb.org/mongo-driver/mongo" - "go.mongodb.org/mongo-driver/mongo/options" ) type Transaction struct { @@ -21,43 +16,5 @@ func NewTransaction(client *mongodoc.Client) repo.Transaction { } func (t *Transaction) Begin() (repo.Tx, error) { - s, err := t.client.Session() - if err != nil { - return nil, rerror.ErrInternalBy(err) - } - - if err := s.StartTransaction(&options.TransactionOptions{}); err != nil { - return nil, rerror.ErrInternalBy(err) - } - - return &Tx{session: s, commit: false}, nil -} - -type Tx struct { - session mongo.Session - commit bool -} - -func (t *Tx) Commit() { - if t == nil { - return - } - t.commit = true -} - -func (t *Tx) End(ctx context.Context) error { - if t == nil { - return nil - } - - if t.commit { - if err := t.session.CommitTransaction(ctx); err != nil { - return rerror.ErrInternalBy(err) - } - } else if err := t.session.AbortTransaction(ctx); err != nil { - return rerror.ErrInternalBy(err) - } - - t.session.EndSession(ctx) - return nil + return t.client.BeginTransaction() } diff --git a/internal/usecase/repo/config.go b/internal/usecase/repo/config.go index 5db2abb4c..fed54def3 100644 --- a/internal/usecase/repo/config.go +++ b/internal/usecase/repo/config.go @@ -7,6 +7,8 @@ import ( ) type Config interface { - Load(context.Context) (*config.Config, error) + LockAndLoad(context.Context) (*config.Config, error) Save(context.Context, *config.Config) error + SaveAndUnlock(context.Context, *config.Config) error + Unlock(context.Context) error } diff --git a/internal/usecase/repo/container.go b/internal/usecase/repo/container.go index 35c679e43..39ea3717d 100644 --- a/internal/usecase/repo/container.go +++ b/internal/usecase/repo/container.go @@ -16,4 +16,5 @@ type Container struct { User User SceneLock SceneLock Transaction Transaction + Lock Lock } diff --git a/internal/usecase/repo/lock.go b/internal/usecase/repo/lock.go new file mode 100644 index 000000000..645ae1445 --- /dev/null +++ b/internal/usecase/repo/lock.go @@ -0,0 +1,17 @@ +package repo + +import ( + "context" + "errors" +) + +var ( + ErrFailedToLock = errors.New("failed to lock") + ErrAlreadyLocked = errors.New("already locked") + ErrNotLocked = errors.New("not locked") +) + +type Lock interface { + Lock(context.Context, string) error + Unlock(context.Context, string) error +} diff --git a/pkg/rerror/error.go b/pkg/rerror/error.go index b38526e09..cbaea7f75 100644 --- a/pkg/rerror/error.go +++ b/pkg/rerror/error.go @@ -2,6 +2,7 @@ package rerror import ( "fmt" + "runtime/debug" "github.com/pkg/errors" "github.com/reearth/reearth-backend/pkg/log" @@ -17,28 +18,18 @@ var ( ErrNotImplemented = errors.New("not implemented") ) -// ErrInternal is an error struct that can hold an internal error but hides users the details. -type ErrInternal struct { - err Error -} - func ErrInternalBy(err error) error { log.Errorf("internal error: %s", err.Error()) - return &ErrInternal{ - err: Error{ - Label: errInternal, - Err: err, - Hidden: true, - }, + debug.PrintStack() + return &Error{ + Label: errInternal, + Err: err, + Hidden: true, } } -func (e *ErrInternal) Error() string { - return e.err.Error() -} - -func (e *ErrInternal) Unwrap() error { - return e.err.Unwrap() +func UnwrapErrInternal(err error) error { + return As(err, errInternal) } // Error can hold an error together with label. @@ -117,9 +108,9 @@ func As(err error, label error) error { return nil } e := err - var target *Error for { - if !errors.As(e, &target) { + target := Get(e) + if target == nil { break } if target.Label == label { diff --git a/pkg/rerror/error_test.go b/pkg/rerror/error_test.go index d254338d2..bfe3b8190 100644 --- a/pkg/rerror/error_test.go +++ b/pkg/rerror/error_test.go @@ -1,19 +1,21 @@ package rerror import ( - "errors" "fmt" "testing" + "github.com/pkg/errors" "github.com/stretchr/testify/assert" ) func TestErrInternal(t *testing.T) { werr := errors.New("wrapped") err := ErrInternalBy(werr) - var err2 *ErrInternal + var err2 *Error assert.Equal(t, "internal", err.Error()) assert.True(t, errors.As(err, &err2)) + assert.Same(t, errInternal, err2.Label) + assert.True(t, err2.Hidden) assert.Same(t, werr, errors.Unwrap(err)) } @@ -50,6 +52,12 @@ func TestError(t *testing.T) { assert.Equal(t, "d: e.f: g", err6.Error()) } +func TestUnwrapErrInternal(t *testing.T) { + err := errors.New("err") + assert.Same(t, err, UnwrapErrInternal(ErrInternalBy(err))) + assert.Nil(t, UnwrapErrInternal(err)) +} + func TestFrom(t *testing.T) { werr := errors.New("wrapped") err := From("label", werr) @@ -129,6 +137,11 @@ func TestAs(t *testing.T) { assert.Nil(t, As(err, errors.New("wrapped"))) assert.Nil(t, As(nil, errors.New("label"))) + assert.Nil(t, As(errors.New("foo"), errors.New("bar"))) + assert.Nil(t, As(&Error{ + Label: errors.New("bar"), + Err: errors.New("foo"), + }, errors.New("bar"))) } func TestWith(t *testing.T) { From 8b0c250b1deb5349f9aba851737e452291110bb3 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 11 Jan 2022 07:34:09 +0000 Subject: [PATCH 127/253] v0.3.0 --- CHANGELOG.md | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b0dd49b8b..6aea2a5f0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,36 @@ # Changelog All notable changes to this project will be documented in this file. -## 0.2.0 - 2021-11-16 +## 0.3.0 - 2021-12-23 + +### ๐Ÿš€ Features + +- Clusters for scenes ([#75](https://github.com/reearth/reearth-backend/pull/75)) [`3512c0`](https://github.com/reearth/reearth-backend/commit/3512c0) +- Add fields of scene property for terrain [`8693b4`](https://github.com/reearth/reearth-backend/commit/8693b4) +- Camera limiter ([#87](https://github.com/reearth/reearth-backend/pull/87)) [`63c582`](https://github.com/reearth/reearth-backend/commit/63c582) + +### ๐Ÿ”ง Bug Fixes + +- Terrain fields of scene property [`5e3d25`](https://github.com/reearth/reearth-backend/commit/5e3d25) +- Numbers are not decoded from gql to value [`2ddbc8`](https://github.com/reearth/reearth-backend/commit/2ddbc8) +- Layers have their own tags separate from the scene ([#90](https://github.com/reearth/reearth-backend/pull/90)) [`c4fb9a`](https://github.com/reearth/reearth-backend/commit/c4fb9a) +- Return property with clusters data ([#89](https://github.com/reearth/reearth-backend/pull/89)) [`1b99c6`](https://github.com/reearth/reearth-backend/commit/1b99c6) +- Cast values, rename value.OptionalValue ([#93](https://github.com/reearth/reearth-backend/pull/93)) [`ba4b18`](https://github.com/reearth/reearth-backend/commit/ba4b18) +- Synchronize mongo migration ([#94](https://github.com/reearth/reearth-backend/pull/94)) [`db4cea`](https://github.com/reearth/reearth-backend/commit/db4cea) + +### ๐Ÿ“– Documentation + +- Add pkg.go.dev badge to readme [`91f9b3`](https://github.com/reearth/reearth-backend/commit/91f9b3) + +### โœจ Refactor + +- Make property.Value and dataset.Value independent in pkg/value ([#77](https://github.com/reearth/reearth-backend/pull/77)) [`73143b`](https://github.com/reearth/reearth-backend/commit/73143b) + +### Miscellaneous Tasks + +- Fix plugin manifest JSON schema [`2b57b1`](https://github.com/reearth/reearth-backend/commit/2b57b1) + +## 0.2.0 - 2021-11-18 ### ๐Ÿš€ Features From 5cdb5d5efa50b797defdffe7367f4ec96fd02402 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 14 Jan 2022 16:15:48 +0900 Subject: [PATCH 128/253] ci: use golangci-lint-action --- .github/workflows/main.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 49707dacb..f4ac34eb8 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -28,10 +28,10 @@ jobs: key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - - name: install golangci-lint - run: curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(go env GOPATH)/bin v1.39.0 - - name: lint - run: $(go env GOPATH)/bin/golangci-lint run --timeout=10m + - name: golangci-lint + uses: golangci/golangci-lint-action@v2 + with: + version: v1.43 - name: test run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic - name: Send coverage report From 1265ac9b395815fcb1256e2558ba3fa6f7bb4209 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Sat, 15 Jan 2022 01:26:17 +0900 Subject: [PATCH 129/253] refactor: pkg/id, use ID aliases, move JSON schemas (#97) * rename IDToKeys, some methods support nil ids, refactor ids * remove unnecessary comments, refactor NewID and MustID * use id aliases * move schemas * remove unused file --- .vscode/settings.json | 8 +- internal/infrastructure/memory/container.go | 1 - internal/infrastructure/memory/scene_lock.go | 7 +- internal/infrastructure/mongo/asset.go | 4 +- internal/infrastructure/mongo/dataset.go | 10 +- .../infrastructure/mongo/dataset_schema.go | 6 +- internal/infrastructure/mongo/layer.go | 14 +- .../infrastructure/mongo/mongodoc/layer.go | 2 +- .../mongo/mongodoc/scene_align.go | 2 +- internal/infrastructure/mongo/mongodoc/tag.go | 2 +- internal/infrastructure/mongo/plugin.go | 6 +- internal/infrastructure/mongo/project.go | 4 +- internal/infrastructure/mongo/property.go | 6 +- .../infrastructure/mongo/property_schema.go | 4 +- internal/infrastructure/mongo/scene.go | 12 +- internal/infrastructure/mongo/scene_lock.go | 2 +- internal/infrastructure/mongo/tag.go | 12 +- internal/infrastructure/mongo/team.go | 4 +- internal/infrastructure/mongo/user.go | 2 +- internal/usecase/cursor.go | 1 - internal/usecase/operator.go | 9 - pkg/asset/asset.go | 27 +- pkg/asset/asset_test.go | 15 +- pkg/asset/builder.go | 26 +- pkg/asset/builder_test.go | 51 +- pkg/asset/id.go | 31 ++ pkg/builtin/main.go | 9 +- pkg/builtin/main_test.go | 13 +- pkg/dataset/builder.go | 24 +- pkg/dataset/csvparser.go | 15 +- pkg/dataset/csvparser_test.go | 3 +- pkg/dataset/dataset.go | 22 +- pkg/dataset/dataset_test.go | 17 +- pkg/dataset/diff.go | 5 +- pkg/dataset/field.go | 10 +- pkg/dataset/graph_iterator.go | 20 +- pkg/dataset/graph_iterator_test.go | 33 +- pkg/dataset/graph_loader.go | 8 +- pkg/dataset/id.go | 45 ++ pkg/dataset/list.go | 24 +- pkg/dataset/list_test.go | 21 +- pkg/dataset/loader.go | 10 +- pkg/dataset/schema.go | 26 +- pkg/dataset/schema_builder.go | 24 +- pkg/dataset/schema_field.go | 12 +- pkg/dataset/schema_field_builder.go | 12 +- pkg/dataset/schema_field_diff.go | 6 +- pkg/dataset/schema_graph_iterator.go | 21 +- pkg/dataset/schema_graph_iterator_test.go | 31 +- pkg/dataset/schema_list.go | 11 +- pkg/dataset/schema_list_test.go | 15 +- pkg/id/asset_gen.go | 59 +- pkg/id/asset_gen_test.go | 493 ++++++++--------- pkg/id/cluster_field_gen_test.go | 493 ++++++++--------- pkg/id/cluster_gen.go | 59 +- pkg/id/dataset_gen.go | 59 +- pkg/id/dataset_gen_test.go | 493 ++++++++--------- pkg/id/dataset_schema_field_gen.go | 59 +- pkg/id/dataset_schema_field_gen_test.go | 493 ++++++++--------- pkg/id/dataset_schema_gen.go | 59 +- pkg/id/dataset_schema_gen_test.go | 493 ++++++++--------- pkg/id/id.go | 2 +- pkg/id/id.tmpl | 59 +- pkg/id/id_test.go | 6 +- pkg/id/id_test.tmpl | 515 ++++++++---------- pkg/id/infobox_field_gen.go | 59 +- pkg/id/infobox_field_gen_test.go | 493 ++++++++--------- pkg/id/layer_gen.go | 59 +- pkg/id/layer_gen_test.go | 493 ++++++++--------- pkg/id/plugin.go | 18 +- pkg/id/plugin_extension.go | 6 - pkg/id/plugin_test.go | 44 +- pkg/id/project_gen.go | 59 +- pkg/id/project_gen_test.go | 493 ++++++++--------- pkg/id/property_gen.go | 59 +- pkg/id/property_gen_test.go | 493 ++++++++--------- pkg/id/property_item_gen.go | 59 +- pkg/id/property_item_gen_test.go | 493 ++++++++--------- pkg/id/property_schema.go | 20 +- pkg/id/property_schema_field.go | 9 +- pkg/id/property_schema_group.go | 9 +- pkg/id/property_schema_test.go | 20 +- pkg/id/scene_gen.go | 59 +- pkg/id/scene_gen_test.go | 493 ++++++++--------- pkg/id/tag_gen.go | 59 +- pkg/id/tag_gen_test.go | 493 ++++++++--------- pkg/id/team_gen.go | 59 +- pkg/id/team_gen_test.go | 493 ++++++++--------- pkg/id/user_gen.go | 59 +- pkg/id/user_gen_test.go | 493 ++++++++--------- pkg/id/widget_gen.go | 59 +- pkg/id/widget_gen_test.go | 493 ++++++++--------- pkg/layer/builder.go | 16 +- pkg/layer/decoding/common.go | 32 +- pkg/layer/decoding/czml.go | 9 +- pkg/layer/decoding/czml_test.go | 4 +- pkg/layer/decoding/decoder.go | 3 +- pkg/layer/decoding/geojson.go | 9 +- pkg/layer/decoding/geojson_test.go | 4 +- pkg/layer/decoding/kml.go | 9 +- pkg/layer/decoding/kml_test.go | 24 +- pkg/layer/decoding/reearth.go | 55 +- pkg/layer/decoding/reearth_test.go | 37 +- pkg/layer/decoding/shp.go | 9 +- pkg/layer/encoding/czml.go | 4 +- pkg/layer/encoding/czml_test.go | 58 +- pkg/layer/encoding/exporter.go | 3 +- pkg/layer/encoding/geojson.go | 4 +- pkg/layer/encoding/geojson_test.go | 65 ++- pkg/layer/encoding/kml.go | 4 +- pkg/layer/encoding/kml_test.go | 67 ++- pkg/layer/encoding/shp.go | 4 +- pkg/layer/encoding/shp_test.go | 33 +- pkg/layer/group.go | 23 +- pkg/layer/group_builder.go | 22 +- pkg/layer/group_test.go | 31 +- pkg/layer/id.go | 75 +++ pkg/layer/id_list.go | 63 +-- pkg/layer/id_list_test.go | 12 +- pkg/layer/infobox.go | 25 +- pkg/layer/infobox_field.go | 21 +- pkg/layer/infobox_field_builder.go | 21 +- pkg/layer/infobox_test.go | 29 +- pkg/layer/initializer.go | 45 +- pkg/layer/initializer_test.go | 83 ++- pkg/layer/item.go | 21 +- pkg/layer/item_builder.go | 22 +- pkg/layer/layer.go | 45 +- pkg/layer/layerops/initializer.go | 9 +- pkg/layer/layerops/initializer_test.go | 20 +- pkg/layer/layerops/processor.go | 9 +- pkg/layer/layerops/processor_test.go | 29 +- pkg/layer/list.go | 32 +- pkg/layer/list_test.go | 3 +- pkg/layer/loader.go | 18 +- pkg/layer/loader_test.go | 25 +- pkg/layer/merged.go | 25 +- pkg/layer/merged_test.go | 39 +- pkg/layer/merging/merged.go | 15 +- pkg/layer/merging/merger.go | 3 +- pkg/layer/merging/merger_test.go | 29 +- pkg/layer/tag.go | 6 - pkg/plugin/builder.go | 9 +- pkg/plugin/builder_test.go | 51 +- pkg/plugin/extension.go | 9 +- pkg/plugin/extension_builder.go | 7 +- pkg/plugin/extension_builder_test.go | 29 +- pkg/plugin/extension_test.go | 9 +- pkg/plugin/id.go | 31 ++ pkg/plugin/loader.go | 4 +- pkg/plugin/manifest/convert.go | 35 +- pkg/plugin/manifest/convert_test.go | 67 ++- pkg/plugin/manifest/parser.go | 10 +- pkg/plugin/manifest/parser_test.go | 23 +- pkg/plugin/manifest/parser_translation.go | 10 +- .../manifest/parser_translation_test.go | 4 +- pkg/plugin/manifest/schema_gen.go | 2 +- pkg/plugin/plugin.go | 29 +- pkg/plugin/plugin_test.go | 19 +- pkg/plugin/pluginpack/package.go | 4 +- pkg/plugin/pluginpack/package_test.go | 3 +- pkg/project/builder.go | 11 +- pkg/project/builder_test.go | 33 +- pkg/project/id.go | 31 ++ pkg/project/project.go | 14 +- pkg/project/project_test.go | 3 +- pkg/project/publishment_status.go | 6 +- pkg/property/builder.go | 28 +- pkg/property/builder_test.go | 63 ++- pkg/property/condition.go | 6 +- pkg/property/field.go | 23 +- pkg/property/field_builder.go | 8 +- pkg/property/field_builder_test.go | 19 +- pkg/property/field_test.go | 25 +- pkg/property/group.go | 38 +- pkg/property/group_builder.go | 14 +- pkg/property/group_builder_test.go | 49 +- pkg/property/group_list.go | 58 +- pkg/property/group_list_builder.go | 14 +- pkg/property/group_list_builder_test.go | 55 +- pkg/property/group_list_test.go | 223 ++++---- pkg/property/group_test.go | 85 ++- pkg/property/id.go | 71 +++ pkg/property/initializer.go | 52 +- pkg/property/initializer_test.go | 113 ++-- pkg/property/item.go | 25 +- pkg/property/item_builder.go | 8 +- pkg/property/item_test.go | 29 +- pkg/property/link.go | 77 +-- pkg/property/link_test.go | 133 +++-- pkg/property/list.go | 22 +- pkg/property/list_test.go | 7 +- pkg/property/loader.go | 12 +- pkg/property/loader_test.go | 21 +- pkg/property/merged.go | 64 ++- pkg/property/merged_test.go | 142 +++-- pkg/property/pointer.go | 42 +- pkg/property/pointer_test.go | 7 +- pkg/property/property.go | 35 +- pkg/property/property_test.go | 75 ++- pkg/property/schema.go | 21 +- pkg/property/schema_builder.go | 8 +- pkg/property/schema_builder_test.go | 45 +- pkg/property/schema_field.go | 5 +- pkg/property/schema_field_builder.go | 5 +- pkg/property/schema_field_builder_test.go | 9 +- pkg/property/schema_field_ui.go | 1 - pkg/property/schema_group.go | 25 +- pkg/property/schema_group_builder.go | 11 +- pkg/property/schema_group_builder_test.go | 15 +- pkg/property/schema_group_test.go | 33 +- pkg/property/schema_list.go | 4 +- pkg/property/schema_test.go | 25 +- pkg/property/sealed.go | 31 +- pkg/property/sealed_test.go | 71 ++- pkg/scene/builder.go | 23 +- pkg/scene/builder/builder_test.go | 63 ++- pkg/scene/builder/encoder_test.go | 20 +- pkg/scene/builder/scene.go | 5 +- pkg/scene/builder/scene_test.go | 25 +- pkg/scene/builder_test.go | 142 +++-- pkg/scene/cluster.go | 22 +- pkg/scene/cluster_list.go | 12 +- pkg/scene/cluster_list_test.go | 37 +- pkg/scene/cluster_test.go | 35 +- pkg/scene/id.go | 68 +++ pkg/scene/lock.go | 2 - pkg/scene/plugin.go | 18 +- pkg/scene/plugin_system.go | 36 +- pkg/scene/plugin_system_test.go | 83 ++- pkg/scene/plugin_test.go | 5 +- pkg/scene/scene.go | 40 +- pkg/scene/scene_test.go | 33 +- pkg/scene/sceneops/dataset_migrator.go | 43 +- pkg/scene/sceneops/dataset_migrator_test.go | 14 +- pkg/scene/sceneops/plugin_installer.go | 4 +- pkg/scene/sceneops/plugin_migrator.go | 25 +- pkg/scene/widget.go | 28 +- pkg/scene/widget_align_system.go | 10 +- pkg/scene/widget_align_system_test.go | 73 ++- pkg/scene/widget_area.go | 26 +- pkg/scene/widget_area_test.go | 93 ++-- pkg/scene/widget_section.go | 6 +- pkg/scene/widget_section_test.go | 49 +- pkg/scene/widget_system.go | 18 +- pkg/scene/widget_system_test.go | 115 ++-- pkg/scene/widget_test.go | 37 +- pkg/scene/widget_zone.go | 6 +- pkg/scene/widget_zone_test.go | 49 +- pkg/tag/group_builder.go | 16 +- pkg/tag/group_test.go | 27 +- pkg/tag/id.go | 41 ++ pkg/tag/item.go | 20 +- pkg/tag/item_builder.go | 22 +- pkg/tag/item_test.go | 33 +- pkg/tag/list.go | 18 +- pkg/tag/list_test.go | 23 +- pkg/tag/tag.go | 14 +- pkg/user/builder.go | 11 +- pkg/user/builder_test.go | 25 +- pkg/user/id.go | 23 + pkg/user/initializer.go | 11 +- pkg/user/initializer_test.go | 15 +- pkg/user/members.go | 34 +- pkg/user/members_test.go | 81 ++- pkg/user/role.go | 4 +- pkg/user/team.go | 6 +- pkg/user/team_builder.go | 25 +- pkg/user/team_builder_test.go | 25 +- pkg/user/team_test.go | 7 +- pkg/user/user.go | 11 +- pkg/user/user_test.go | 15 +- pkg/visualizer/visualizer.go | 2 - .../plugin_manifest.json | 4 +- .../plugin_manifest_translation.json | 4 +- 275 files changed, 7439 insertions(+), 7694 deletions(-) create mode 100644 pkg/asset/id.go create mode 100644 pkg/dataset/id.go create mode 100644 pkg/layer/id.go create mode 100644 pkg/plugin/id.go create mode 100644 pkg/project/id.go create mode 100644 pkg/property/id.go create mode 100644 pkg/scene/id.go create mode 100644 pkg/tag/id.go create mode 100644 pkg/user/id.go rename plugin_manifest_schema.json => schemas/plugin_manifest.json (98%) rename plugin_manifest_schema_translation.json => schemas/plugin_manifest_translation.json (95%) diff --git a/.vscode/settings.json b/.vscode/settings.json index 21b1151ee..7f4bc2ddc 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -6,10 +6,10 @@ "yaml.validate": true, "yaml.hover": true, "yaml.schemas": { - "./plugin_manifest_schema.json": [ + "./schemas/plugin_manifest.json": [ "/pkg/builtin/manifest.yml" ], - "./plugin_manifest_schema_translation.json": [ + "./schemas/plugin_manifest_translation.json": [ "/pkg/builtin/manifest_*.yml" ] }, @@ -18,13 +18,13 @@ "fileMatch": [ "/pkg/builtin/manifest.json" ], - "url": "./plugin_manifest_schema.json" + "url": "./schemas/plugin_manifest.json" }, { "fileMatch": [ "/pkg/builtin/manifest_*.json" ], - "url": "./plugin_manifest_schema_translation.json" + "url": "./schemas/plugin_manifest_translation.json" } ] } diff --git a/internal/infrastructure/memory/container.go b/internal/infrastructure/memory/container.go index 77db26476..888479b8f 100644 --- a/internal/infrastructure/memory/container.go +++ b/internal/infrastructure/memory/container.go @@ -4,7 +4,6 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/repo" ) -// InitRepos _ func InitRepos(c *repo.Container) *repo.Container { if c == nil { c = &repo.Container{} diff --git a/internal/infrastructure/memory/scene_lock.go b/internal/infrastructure/memory/scene_lock.go index ab467e0b7..2ac0ef28e 100644 --- a/internal/infrastructure/memory/scene_lock.go +++ b/internal/infrastructure/memory/scene_lock.go @@ -4,10 +4,9 @@ import ( "context" "sync" + "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/scene" - - "github.com/reearth/reearth-backend/internal/usecase/repo" ) type sceneLock struct { @@ -19,7 +18,7 @@ func NewSceneLock() repo.SceneLock { } func (r *sceneLock) GetLock(ctx context.Context, sceneID id.SceneID) (scene.LockMode, error) { - if id.ID(sceneID).IsNil() { + if sceneID.IsNil() { return "", id.ErrInvalidID } if v, ok := r.lock.Load(sceneID); ok { @@ -33,7 +32,7 @@ func (r *sceneLock) GetLock(ctx context.Context, sceneID id.SceneID) (scene.Lock func (r *sceneLock) GetAllLock(ctx context.Context, sceneID []id.SceneID) ([]scene.LockMode, error) { res := make([]scene.LockMode, 0, len(sceneID)) for _, si := range sceneID { - if id.ID(si).IsNil() { + if si.IsNil() { return nil, id.ErrInvalidID } if v, ok := r.lock.Load(si); ok { diff --git a/internal/infrastructure/mongo/asset.go b/internal/infrastructure/mongo/asset.go index 2f5e93fa2..cb78cc285 100644 --- a/internal/infrastructure/mongo/asset.go +++ b/internal/infrastructure/mongo/asset.go @@ -33,7 +33,7 @@ func (r *assetRepo) FindByID(ctx context.Context, id id.AssetID, teams []id.Team func (r *assetRepo) FindByIDs(ctx context.Context, ids []id.AssetID, teams []id.TeamID) ([]*asset.Asset, error) { filter := assetFilter(bson.M{ - "id": bson.M{"$in": id.AssetIDToKeys(ids)}, + "id": bson.M{"$in": id.AssetIDsToStrings(ids)}, }, teams) dst := make([]*asset.Asset, 0, len(ids)) res, err := r.find(ctx, dst, filter) @@ -112,6 +112,6 @@ func filterAssets(ids []id.AssetID, rows []*asset.Asset) []*asset.Asset { } func assetFilter(filter bson.M, teams []id.TeamID) bson.M { - filter["team"] = bson.M{"$in": id.TeamIDToKeys(teams)} + filter["team"] = bson.M{"$in": id.TeamIDsToStrings(teams)} return filter } diff --git a/internal/infrastructure/mongo/dataset.go b/internal/infrastructure/mongo/dataset.go index 1b8e1e1a8..f802db5fc 100644 --- a/internal/infrastructure/mongo/dataset.go +++ b/internal/infrastructure/mongo/dataset.go @@ -41,7 +41,7 @@ func (r *datasetRepo) FindByID(ctx context.Context, id2 id.DatasetID, f []id.Sce func (r *datasetRepo) FindByIDs(ctx context.Context, ids []id.DatasetID, f []id.SceneID) (dataset.List, error) { filter := r.sceneFilter(bson.D{ {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.DatasetIDToKeys(ids)}, + {Key: "$in", Value: id.DatasetIDsToStrings(ids)}, }}, }, f) dst := make([]*dataset.Dataset, 0, len(ids)) @@ -75,14 +75,14 @@ func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, f []id.Sc return dataset.List{d}, nil } - fieldsstr := id.DatasetSchemaFieldIDToKeys(fields) + fieldsstr := id.DatasetSchemaFieldIDsToStrings(fields) firstField := fieldsstr[0] aggfilter := bson.D{} if f != nil { aggfilter = append(aggfilter, bson.E{Key: "$in", Value: []interface{}{ "$$g.scene", - id.SceneIDToKeys(f), + id.SceneIDsToStrings(f), }}) } @@ -271,7 +271,7 @@ func (r *datasetRepo) RemoveAll(ctx context.Context, ids []id.DatasetID) error { if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.DatasetIDToKeys(ids)) + return r.client.RemoveAll(ctx, id.DatasetIDsToStrings(ids)) } func (r *datasetRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { @@ -336,7 +336,7 @@ func (*datasetRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { } filter = append(filter, bson.E{ Key: "scene", - Value: bson.D{{Key: "$in", Value: id.SceneIDToKeys(scenes)}}, + Value: bson.D{{Key: "$in", Value: id.SceneIDsToStrings(scenes)}}, }) return filter } diff --git a/internal/infrastructure/mongo/dataset_schema.go b/internal/infrastructure/mongo/dataset_schema.go index e15c51e39..00860f6f8 100644 --- a/internal/infrastructure/mongo/dataset_schema.go +++ b/internal/infrastructure/mongo/dataset_schema.go @@ -41,7 +41,7 @@ func (r *datasetSchemaRepo) FindByID(ctx context.Context, id2 id.DatasetSchemaID func (r *datasetSchemaRepo) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID, f []id.SceneID) (dataset.SchemaList, error) { filter := r.sceneFilter(bson.D{ {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.DatasetSchemaIDToKeys(ids)}, + {Key: "$in", Value: id.DatasetSchemaIDsToStrings(ids)}, }}, }, f) dst := make([]*dataset.Schema, 0, len(ids)) @@ -111,7 +111,7 @@ func (r *datasetSchemaRepo) RemoveAll(ctx context.Context, ids []id.DatasetSchem if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.DatasetSchemaIDToKeys(ids)) + return r.client.RemoveAll(ctx, id.DatasetSchemaIDsToStrings(ids)) } func (r *datasetSchemaRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { @@ -176,7 +176,7 @@ func (*datasetSchemaRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D } filter = append(filter, bson.E{ Key: "scene", - Value: bson.D{{Key: "$in", Value: id.SceneIDToKeys(scenes)}}, + Value: bson.D{{Key: "$in", Value: id.SceneIDsToStrings(scenes)}}, }) return filter } diff --git a/internal/infrastructure/mongo/layer.go b/internal/infrastructure/mongo/layer.go index efdf02c9c..33eb70896 100644 --- a/internal/infrastructure/mongo/layer.go +++ b/internal/infrastructure/mongo/layer.go @@ -40,7 +40,7 @@ func (r *layerRepo) FindByID(ctx context.Context, id id.LayerID, f []id.SceneID) func (r *layerRepo) FindByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.List, error) { filter := r.sceneFilterD(bson.D{ {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.LayerIDToKeys(ids)}, + {Key: "$in", Value: id.LayerIDsToStrings(ids)}, }}, }, f) dst := make([]*layer.Layer, 0, len(ids)) @@ -68,7 +68,7 @@ func (r *layerRepo) FindItemByID(ctx context.Context, id id.LayerID, f []id.Scen func (r *layerRepo) FindItemByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.ItemList, error) { filter := r.sceneFilterD(bson.D{ {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.LayerIDToKeys(ids)}, + {Key: "$in", Value: id.LayerIDsToStrings(ids)}, }}, }, f) dst := make([]*layer.Item, 0, len(ids)) @@ -89,7 +89,7 @@ func (r *layerRepo) FindGroupByID(ctx context.Context, id id.LayerID, f []id.Sce func (r *layerRepo) FindGroupByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.GroupList, error) { filter := r.sceneFilterD(bson.D{ {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.LayerIDToKeys(ids)}, + {Key: "$in", Value: id.LayerIDsToStrings(ids)}, }}, }, f) dst := make([]*layer.Group, 0, len(ids)) @@ -154,7 +154,7 @@ func (r *layerRepo) RemoveAll(ctx context.Context, ids []id.LayerID) error { if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.LayerIDToKeys(ids)) + return r.client.RemoveAll(ctx, id.LayerIDsToStrings(ids)) } func (r *layerRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { @@ -170,7 +170,7 @@ func (r *layerRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error func (r *layerRepo) FindByTag(ctx context.Context, tagID id.TagID, f []id.SceneID) (layer.List, error) { ids := []id.TagID{tagID} - tags := id.TagIDToKeys(ids) + tags := id.TagIDsToStrings(ids) filter := r.sceneFilter(bson.M{ "$or": []bson.M{ {"tags.id": bson.M{"$in": tags}}, @@ -331,7 +331,7 @@ func (*layerRepo) sceneFilterD(filter bson.D, scenes []id.SceneID) bson.D { } filter = append(filter, bson.E{ Key: "scene", - Value: bson.D{{Key: "$in", Value: id.SceneIDToKeys(scenes)}}, + Value: bson.D{{Key: "$in", Value: id.SceneIDsToStrings(scenes)}}, }) return filter } @@ -340,6 +340,6 @@ func (*layerRepo) sceneFilter(filter bson.M, scenes []id.SceneID) bson.M { if scenes == nil { return filter } - filter["scene"] = bson.M{"$in": id.SceneIDToKeys(scenes)} + filter["scene"] = bson.M{"$in": id.SceneIDsToStrings(scenes)} return filter } diff --git a/internal/infrastructure/mongo/mongodoc/layer.go b/internal/infrastructure/mongo/mongodoc/layer.go index 44b935a98..5483b31ea 100644 --- a/internal/infrastructure/mongo/mongodoc/layer.go +++ b/internal/infrastructure/mongo/mongodoc/layer.go @@ -91,7 +91,7 @@ func NewLayer(l layer.Layer) (*LayerDocument, string) { if lg := layer.GroupFromLayer(l); lg != nil { group = &LayerGroupDocument{ - Layers: id.LayerIDToKeys(lg.Layers().Layers()), + Layers: id.LayerIDsToStrings(lg.Layers().Layers()), LinkedDatasetSchema: lg.LinkedDatasetSchema().StringRef(), Root: lg.IsRoot(), } diff --git a/internal/infrastructure/mongo/mongodoc/scene_align.go b/internal/infrastructure/mongo/mongodoc/scene_align.go index ab746b98c..836ef2527 100644 --- a/internal/infrastructure/mongo/mongodoc/scene_align.go +++ b/internal/infrastructure/mongo/mongodoc/scene_align.go @@ -82,7 +82,7 @@ func NewWidgetArea(a *scene.WidgetArea) *WidgetAreaDocument { } return &WidgetAreaDocument{ - WidgetIDs: id.WidgetIDToKeys(a.WidgetIDs()), + WidgetIDs: id.WidgetIDsToStrings(a.WidgetIDs()), Align: string(a.Alignment()), } } diff --git a/internal/infrastructure/mongo/mongodoc/tag.go b/internal/infrastructure/mongo/mongodoc/tag.go index d8e936f39..11550237f 100644 --- a/internal/infrastructure/mongo/mongodoc/tag.go +++ b/internal/infrastructure/mongo/mongodoc/tag.go @@ -68,7 +68,7 @@ func NewTag(t tag.Tag) (*TagDocument, string) { ids := tags.Tags() group = &TagGroupDocument{ - Tags: id.TagIDToKeys(ids), + Tags: id.TagIDsToStrings(ids), } } diff --git a/internal/infrastructure/mongo/plugin.go b/internal/infrastructure/mongo/plugin.go index c9f7e79d4..ff19986ec 100644 --- a/internal/infrastructure/mongo/plugin.go +++ b/internal/infrastructure/mongo/plugin.go @@ -51,7 +51,7 @@ func (r *pluginRepo) FindByID(ctx context.Context, pid id.PluginID, sids []id.Sc { "id": pids, "scene": bson.M{ - "$in": id.SceneIDToKeys(sids), + "$in": id.SceneIDsToStrings(sids), }, }, }, @@ -76,7 +76,7 @@ func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id var err error if len(ids2) > 0 { - keys := id.PluginIDToKeys(ids2) + keys := id.PluginIDsToStrings(ids2) filter := bson.M{ "$or": []bson.M{ { @@ -90,7 +90,7 @@ func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id { "id": bson.M{"$in": keys}, "scene": bson.M{ - "$in": id.SceneIDToKeys(sids), + "$in": id.SceneIDsToStrings(sids), }, }, }, diff --git a/internal/infrastructure/mongo/project.go b/internal/infrastructure/mongo/project.go index 1ebc566d2..3f81036dc 100644 --- a/internal/infrastructure/mongo/project.go +++ b/internal/infrastructure/mongo/project.go @@ -34,7 +34,7 @@ func (r *projectRepo) init() { func (r *projectRepo) FindByIDs(ctx context.Context, ids []id.ProjectID, f []id.TeamID) ([]*project.Project, error) { filter := r.teamFilter(bson.D{ {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.ProjectIDToKeys(ids)}, + {Key: "$in", Value: id.ProjectIDsToStrings(ids)}, }}, }, f) dst := make([]*project.Project, 0, len(ids)) @@ -143,7 +143,7 @@ func (*projectRepo) teamFilter(filter bson.D, teams []id.TeamID) bson.D { } filter = append(filter, bson.E{ Key: "team", - Value: bson.D{{Key: "$in", Value: id.TeamIDToKeys(teams)}}, + Value: bson.D{{Key: "$in", Value: id.TeamIDsToStrings(teams)}}, }) return filter } diff --git a/internal/infrastructure/mongo/property.go b/internal/infrastructure/mongo/property.go index a879654bb..ed40e2fa3 100644 --- a/internal/infrastructure/mongo/property.go +++ b/internal/infrastructure/mongo/property.go @@ -36,7 +36,7 @@ func (r *propertyRepo) FindByID(ctx context.Context, id2 id.PropertyID, f []id.S func (r *propertyRepo) FindByIDs(ctx context.Context, ids []id.PropertyID, f []id.SceneID) (property.List, error) { filter := r.sceneFilter(bson.D{{Key: "id", Value: bson.D{{ - Key: "$in", Value: id.PropertyIDToKeys(ids), + Key: "$in", Value: id.PropertyIDsToStrings(ids), }}}}, f) dst := make(property.List, 0, len(ids)) res, err := r.find(ctx, dst, filter) @@ -99,7 +99,7 @@ func (r *propertyRepo) RemoveAll(ctx context.Context, ids []id.PropertyID) error if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.PropertyIDToKeys(ids)) + return r.client.RemoveAll(ctx, id.PropertyIDsToStrings(ids)) } func (r *propertyRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { @@ -164,7 +164,7 @@ func (*propertyRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { } filter = append(filter, bson.E{ Key: "scene", - Value: bson.D{{Key: "$in", Value: id.SceneIDToKeys(scenes)}}, + Value: bson.D{{Key: "$in", Value: id.SceneIDsToStrings(scenes)}}, }) return filter } diff --git a/internal/infrastructure/mongo/property_schema.go b/internal/infrastructure/mongo/property_schema.go index 2595bdd15..50cb71121 100644 --- a/internal/infrastructure/mongo/property_schema.go +++ b/internal/infrastructure/mongo/property_schema.go @@ -56,7 +56,7 @@ func (r *propertySchemaRepo) FindByIDs(ctx context.Context, ids []id.PropertySch if len(ids2) > 0 { filter := bson.D{{Key: "id", Value: bson.D{{ - Key: "$in", Value: id.PropertySchemaIDToKeys(ids2), + Key: "$in", Value: id.PropertySchemaIDsToStrings(ids2), }}}} dst := make(property.SchemaList, 0, len(ids2)) res, err = r.find(ctx, dst, filter) @@ -120,7 +120,7 @@ func (r *propertySchemaRepo) RemoveAll(ctx context.Context, ids []id.PropertySch if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.PropertySchemaIDToKeys(ids)) + return r.client.RemoveAll(ctx, id.PropertySchemaIDsToStrings(ids)) } func (r *propertySchemaRepo) find(ctx context.Context, dst property.SchemaList, filter bson.D) (property.SchemaList, error) { diff --git a/internal/infrastructure/mongo/scene.go b/internal/infrastructure/mongo/scene.go index 8a5990815..b27cb323e 100644 --- a/internal/infrastructure/mongo/scene.go +++ b/internal/infrastructure/mongo/scene.go @@ -41,7 +41,7 @@ func (r *sceneRepo) FindByID(ctx context.Context, id id.SceneID, f []id.TeamID) func (r *sceneRepo) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) ([]*scene.Scene, error) { filter := r.teamFilter(bson.D{ {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.SceneIDToKeys(ids)}, + {Key: "$in", Value: id.SceneIDsToStrings(ids)}, }}, }, f) dst := make([]*scene.Scene, 0, len(ids)) @@ -62,7 +62,7 @@ func (r *sceneRepo) FindByProject(ctx context.Context, id id.ProjectID, f []id.T func (r *sceneRepo) FindIDsByTeam(ctx context.Context, teams []id.TeamID) ([]id.SceneID, error) { filter := bson.D{ {Key: "team", Value: bson.D{ - {Key: "$in", Value: id.TeamIDToKeys(teams)}, + {Key: "$in", Value: id.TeamIDsToStrings(teams)}, }}, } c := mongodoc.SceneIDConsumer{ @@ -79,7 +79,7 @@ func (r *sceneRepo) FindIDsByTeam(ctx context.Context, teams []id.TeamID) ([]id. func (r *sceneRepo) HasSceneTeam(ctx context.Context, sceneID id.SceneID, temaIDs []id.TeamID) (bool, error) { filter := bson.D{ {Key: "id", Value: sceneID.String()}, - {Key: "team", Value: bson.D{{Key: "$in", Value: id.TeamIDToKeys(temaIDs)}}}, + {Key: "team", Value: bson.D{{Key: "$in", Value: id.TeamIDsToStrings(temaIDs)}}}, } res, err2 := r.client.Collection().CountDocuments(ctx, filter) if err2 != nil { @@ -90,8 +90,8 @@ func (r *sceneRepo) HasSceneTeam(ctx context.Context, sceneID id.SceneID, temaID func (r *sceneRepo) HasScenesTeam(ctx context.Context, sceneIDs []id.SceneID, teamIDs []id.TeamID) ([]bool, error) { cursor, err2 := r.client.Collection().Find(ctx, bson.D{ - {Key: "id", Value: bson.D{{Key: "$in", Value: id.SceneIDToKeys(sceneIDs)}}}, - {Key: "team", Value: bson.D{{Key: "$in", Value: id.TeamIDToKeys(teamIDs)}}}, + {Key: "id", Value: bson.D{{Key: "$in", Value: id.SceneIDsToStrings(sceneIDs)}}}, + {Key: "team", Value: bson.D{{Key: "$in", Value: id.TeamIDsToStrings(teamIDs)}}}, }, &options.FindOptions{ Projection: bson.D{{Key: "id", Value: 1}, {Key: "_id", Value: 0}}, }) @@ -181,7 +181,7 @@ func (*sceneRepo) teamFilter(filter bson.D, teams []id.TeamID) bson.D { } filter = append(filter, bson.E{ Key: "team", - Value: bson.D{{Key: "$in", Value: id.TeamIDToKeys(teams)}}, + Value: bson.D{{Key: "$in", Value: id.TeamIDsToStrings(teams)}}, }) return filter } diff --git a/internal/infrastructure/mongo/scene_lock.go b/internal/infrastructure/mongo/scene_lock.go index c41e015f7..8d9f66670 100644 --- a/internal/infrastructure/mongo/scene_lock.go +++ b/internal/infrastructure/mongo/scene_lock.go @@ -39,7 +39,7 @@ func (r *sceneLockRepo) GetLock(ctx context.Context, sceneID id.SceneID) (scene. func (r *sceneLockRepo) GetAllLock(ctx context.Context, ids []id.SceneID) ([]scene.LockMode, error) { filter := bson.D{ {Key: "scene", Value: bson.D{ - {Key: "$in", Value: id.SceneIDToKeys(ids)}, + {Key: "$in", Value: id.SceneIDsToStrings(ids)}, }}, } c := mongodoc.SceneLockConsumer{ diff --git a/internal/infrastructure/mongo/tag.go b/internal/infrastructure/mongo/tag.go index f07349aa5..6717cc6fb 100644 --- a/internal/infrastructure/mongo/tag.go +++ b/internal/infrastructure/mongo/tag.go @@ -40,7 +40,7 @@ func (r *tagRepo) FindByID(ctx context.Context, id id.TagID, f []id.SceneID) (ta func (r *tagRepo) FindByIDs(ctx context.Context, ids []id.TagID, f []id.SceneID) ([]*tag.Tag, error) { filter := r.sceneFilter(bson.D{ {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.TagIDToKeys(ids)}, + {Key: "$in", Value: id.TagIDsToStrings(ids)}, }}, }, f) dst := make([]*tag.Tag, 0, len(ids)) @@ -61,7 +61,7 @@ func (r *tagRepo) FindItemByID(ctx context.Context, id id.TagID, f []id.SceneID) func (r *tagRepo) FindItemByIDs(ctx context.Context, ids []id.TagID, f []id.SceneID) ([]*tag.Item, error) { filter := r.sceneFilter(bson.D{ {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.TagIDToKeys(ids)}, + {Key: "$in", Value: id.TagIDsToStrings(ids)}, }}, }, f) dst := make([]*tag.Item, 0, len(ids)) @@ -82,7 +82,7 @@ func (r *tagRepo) FindGroupByID(ctx context.Context, id id.TagID, f []id.SceneID func (r *tagRepo) FindGroupByIDs(ctx context.Context, ids []id.TagID, f []id.SceneID) ([]*tag.Group, error) { filter := r.sceneFilter(bson.D{ {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.TagIDToKeys(ids)}, + {Key: "$in", Value: id.TagIDsToStrings(ids)}, }}, }, f) dst := make([]*tag.Group, 0, len(ids)) @@ -105,7 +105,7 @@ func (r *tagRepo) FindGroupByItem(ctx context.Context, tagID id.TagID, f []id.Sc ids := []id.TagID{tagID} filter := r.sceneFilter(bson.D{ {Key: "group.tags", Value: bson.D{ - {Key: "$in", Value: id.TagIDToKeys(ids)}, + {Key: "$in", Value: id.TagIDsToStrings(ids)}, }}, }, f) @@ -133,7 +133,7 @@ func (r *tagRepo) RemoveAll(ctx context.Context, ids []id.TagID) error { if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.TagIDToKeys(ids)) + return r.client.RemoveAll(ctx, id.TagIDsToStrings(ids)) } func (r *tagRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { @@ -270,7 +270,7 @@ func (*tagRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { } filter = append(filter, bson.E{ Key: "scene", - Value: bson.D{{Key: "$in", Value: id.SceneIDToKeys(scenes)}}, + Value: bson.D{{Key: "$in", Value: id.SceneIDsToStrings(scenes)}}, }) return filter } diff --git a/internal/infrastructure/mongo/team.go b/internal/infrastructure/mongo/team.go index 0ba00f9b5..9249fb6d9 100644 --- a/internal/infrastructure/mongo/team.go +++ b/internal/infrastructure/mongo/team.go @@ -41,7 +41,7 @@ func (r *teamRepo) FindByUser(ctx context.Context, id id.UserID) ([]*user.Team, func (r *teamRepo) FindByIDs(ctx context.Context, ids []id.TeamID) ([]*user.Team, error) { filter := bson.D{ {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.TeamIDToKeys(ids)}, + {Key: "$in", Value: id.TeamIDsToStrings(ids)}, }}, } dst := make([]*user.Team, 0, len(ids)) @@ -80,7 +80,7 @@ func (r *teamRepo) RemoveAll(ctx context.Context, ids []id.TeamID) error { if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.TeamIDToKeys(ids)) + return r.client.RemoveAll(ctx, id.TeamIDsToStrings(ids)) } func (r *teamRepo) find(ctx context.Context, dst []*user.Team, filter bson.D) ([]*user.Team, error) { diff --git a/internal/infrastructure/mongo/user.go b/internal/infrastructure/mongo/user.go index 1a8cf7271..d553cb86d 100644 --- a/internal/infrastructure/mongo/user.go +++ b/internal/infrastructure/mongo/user.go @@ -31,7 +31,7 @@ func (r *userRepo) init() { func (r *userRepo) FindByIDs(ctx context.Context, ids []id.UserID) ([]*user.User, error) { filter := bson.D{{Key: "id", Value: bson.D{ - {Key: "$in", Value: id.UserIDToKeys(ids)}, + {Key: "$in", Value: id.UserIDsToStrings(ids)}, }}} dst := make([]*user.User, 0, len(ids)) res, err := r.find(ctx, dst, filter) diff --git a/internal/usecase/cursor.go b/internal/usecase/cursor.go index f18fab832..6e3e5973b 100644 --- a/internal/usecase/cursor.go +++ b/internal/usecase/cursor.go @@ -1,4 +1,3 @@ package usecase -// Cursor _ type Cursor string diff --git a/internal/usecase/operator.go b/internal/usecase/operator.go index cc9edd35e..e1c606f1b 100644 --- a/internal/usecase/operator.go +++ b/internal/usecase/operator.go @@ -5,7 +5,6 @@ import ( "github.com/reearth/reearth-backend/pkg/user" ) -// Operator _ type Operator struct { User id.UserID ReadableTeams []id.TeamID @@ -13,7 +12,6 @@ type Operator struct { OwningTeams []id.TeamID } -// OperatorFrom _ func OperatorFrom(u id.UserID, teams []*user.Team) *Operator { rt := []id.TeamID{} wt := []id.TeamID{} @@ -41,7 +39,6 @@ func OperatorFrom(u id.UserID, teams []*user.Team) *Operator { } } -// Teams _ func (o *Operator) Teams(r user.Role) []id.TeamID { if o == nil { return nil @@ -58,7 +55,6 @@ func (o *Operator) Teams(r user.Role) []id.TeamID { return nil } -// IsReadableTeamIncluded _ func (o *Operator) IsReadableTeamIncluded(team id.TeamID) bool { if o == nil { return false @@ -71,7 +67,6 @@ func (o *Operator) IsReadableTeamIncluded(team id.TeamID) bool { return false } -// IsWritableTeamIncluded _ func (o *Operator) IsWritableTeamIncluded(team id.TeamID) bool { if o == nil { return false @@ -84,7 +79,6 @@ func (o *Operator) IsWritableTeamIncluded(team id.TeamID) bool { return false } -// IsOwningTeamIncluded _ func (o *Operator) IsOwningTeamIncluded(team id.TeamID) bool { if o == nil { return false @@ -97,7 +91,6 @@ func (o *Operator) IsOwningTeamIncluded(team id.TeamID) bool { return false } -// IsReadableTeamsIncluded _ func (o *Operator) IsReadableTeamsIncluded(teams []id.TeamID) bool { if o == nil { return false @@ -112,7 +105,6 @@ func (o *Operator) IsReadableTeamsIncluded(teams []id.TeamID) bool { return false } -// IsWritableTeamsIncluded _ func (o *Operator) IsWritableTeamsIncluded(teams []id.TeamID) bool { if o == nil { return false @@ -127,7 +119,6 @@ func (o *Operator) IsWritableTeamsIncluded(teams []id.TeamID) bool { return false } -// IsOwningTeamsIncluded _ func (o *Operator) IsOwningTeamsIncluded(teams []id.TeamID) bool { if o == nil { return false diff --git a/pkg/asset/asset.go b/pkg/asset/asset.go index c0b60c755..ad417d0da 100644 --- a/pkg/asset/asset.go +++ b/pkg/asset/asset.go @@ -3,64 +3,51 @@ package asset import ( "errors" "time" - - "github.com/reearth/reearth-backend/pkg/id" ) var ( - // ErrEmptyTeamID _ ErrEmptyTeamID = errors.New("require team id") - // ErrEmptyURL _ - ErrEmptyURL = errors.New("require valid url") - // ErrEmptySize _ - ErrEmptySize = errors.New("file size cannot be zero") + ErrEmptyURL = errors.New("require valid url") + ErrEmptySize = errors.New("file size cannot be zero") ) -// Asset _ type Asset struct { - id id.AssetID + id ID createdAt time.Time - team id.TeamID + team TeamID name string // file name size int64 // file size url string contentType string } -// ID _ -func (a *Asset) ID() id.AssetID { +func (a *Asset) ID() ID { return a.id } -// Team _ -func (a *Asset) Team() id.TeamID { +func (a *Asset) Team() TeamID { return a.team } -// Name _ func (a *Asset) Name() string { return a.name } -// Size _ func (a *Asset) Size() int64 { return a.size } -// URL _ func (a *Asset) URL() string { return a.url } -// ContentType _ func (a *Asset) ContentType() string { return a.contentType } -// CreatedAt _ func (a *Asset) CreatedAt() time.Time { if a == nil { return time.Time{} } - return id.ID(a.id).Timestamp() + return createdAt(a.id) } diff --git a/pkg/asset/asset_test.go b/pkg/asset/asset_test.go index b0b65643a..d685bd523 100644 --- a/pkg/asset/asset_test.go +++ b/pkg/asset/asset_test.go @@ -4,20 +4,19 @@ import ( "testing" "time" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestAsset(t *testing.T) { - aid := id.NewAssetID() - tid := id.NewTeamID() - d := id.ID(aid).Timestamp() + aid := NewID() + tid := NewTeamID() + d := createdAt(aid) testCases := []struct { Name string Expected struct { - ID id.AssetID + ID ID CreatedAt time.Time - Team id.TeamID + Team TeamID Name string Size int64 Url string @@ -27,9 +26,9 @@ func TestAsset(t *testing.T) { }{ { Expected: struct { - ID id.AssetID + ID ID CreatedAt time.Time - Team id.TeamID + Team TeamID Name string Size int64 Url string diff --git a/pkg/asset/builder.go b/pkg/asset/builder.go index b75ddc6c8..48f88e4ef 100644 --- a/pkg/asset/builder.go +++ b/pkg/asset/builder.go @@ -2,26 +2,21 @@ package asset import ( "time" - - "github.com/reearth/reearth-backend/pkg/id" ) -// Builder _ type Builder struct { a *Asset } -// New _ func New() *Builder { return &Builder{a: &Asset{}} } -// Build _ func (b *Builder) Build() (*Asset, error) { - if id.ID(b.a.id).IsNil() { - return nil, id.ErrInvalidID + if b.a.id.IsNil() { + return nil, ErrInvalidID } - if id.ID(b.a.team).IsNil() { + if b.a.team.IsNil() { return nil, ErrEmptyTeamID } if b.a.url == "" { @@ -36,7 +31,6 @@ func (b *Builder) Build() (*Asset, error) { return b.a, nil } -// MustBuild _ func (b *Builder) MustBuild() *Asset { r, err := b.Build() if err != nil { @@ -45,49 +39,41 @@ func (b *Builder) MustBuild() *Asset { return r } -// ID _ -func (b *Builder) ID(id id.AssetID) *Builder { +func (b *Builder) ID(id ID) *Builder { b.a.id = id return b } -// NewID _ func (b *Builder) NewID() *Builder { - b.a.id = id.AssetID(id.New()) + b.a.id = NewID() return b } -// Team _ -func (b *Builder) Team(team id.TeamID) *Builder { +func (b *Builder) Team(team TeamID) *Builder { b.a.team = team return b } -// Name _ func (b *Builder) Name(name string) *Builder { b.a.name = name return b } -// Size _ func (b *Builder) Size(size int64) *Builder { b.a.size = size return b } -// URL _ func (b *Builder) URL(url string) *Builder { b.a.url = url return b } -// ContentType _ func (b *Builder) ContentType(contentType string) *Builder { b.a.contentType = contentType return b } -// CreatedAt - func (b *Builder) CreatedAt(createdAt time.Time) *Builder { b.a.createdAt = createdAt return b diff --git a/pkg/asset/builder_test.go b/pkg/asset/builder_test.go index 0142afb73..ada472821 100644 --- a/pkg/asset/builder_test.go +++ b/pkg/asset/builder_test.go @@ -5,19 +5,18 @@ import ( "testing" "time" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestBuilder_Build(t *testing.T) { - aid := id.NewAssetID() - tid := id.NewTeamID() + aid := NewID() + tid := NewTeamID() d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) testCases := []struct { Name, AssetName string - Id id.AssetID + Id ID CreatedAt time.Time - Team id.TeamID + Team TeamID Size int64 Url string ContentType string @@ -46,9 +45,9 @@ func TestBuilder_Build(t *testing.T) { }, { Name: "failed empty size", - Id: id.NewAssetID(), + Id: NewID(), CreatedAt: d, - Team: id.NewTeamID(), + Team: NewTeamID(), Size: 0, Url: "tt://xxx.zz", ContentType: "bbb", @@ -57,9 +56,9 @@ func TestBuilder_Build(t *testing.T) { }, { Name: "failed empty url", - Id: id.NewAssetID(), + Id: NewID(), CreatedAt: d, - Team: id.NewTeamID(), + Team: NewTeamID(), Size: 10, Url: "", ContentType: "bbb", @@ -68,9 +67,9 @@ func TestBuilder_Build(t *testing.T) { }, { Name: "failed empty team", - Id: id.NewAssetID(), + Id: NewID(), CreatedAt: d, - Team: id.TeamID{}, + Team: TeamID{}, Size: 10, Url: "tt://xxx.zz", ContentType: "bbb", @@ -79,9 +78,9 @@ func TestBuilder_Build(t *testing.T) { }, { Name: "failed invalid Id", - Id: id.AssetID{}, + Id: ID{}, CreatedAt: d, - Team: id.NewTeamID(), + Team: NewTeamID(), Size: 10, Url: "tt://xxx.zz", ContentType: "bbb", @@ -112,14 +111,14 @@ func TestBuilder_Build(t *testing.T) { } func TestBuilder_MustBuild(t *testing.T) { - aid := id.NewAssetID() - tid := id.NewTeamID() + aid := NewID() + tid := NewTeamID() d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) testCases := []struct { name, assetName string createdAt time.Time - id id.AssetID - team id.TeamID + id ID + team TeamID size int64 url string contentType string @@ -149,8 +148,8 @@ func TestBuilder_MustBuild(t *testing.T) { { name: "failed empty size", createdAt: d, - id: id.NewAssetID(), - team: id.NewTeamID(), + id: NewID(), + team: NewTeamID(), size: 0, url: "tt://xxx.zz", contentType: "bbb", @@ -160,8 +159,8 @@ func TestBuilder_MustBuild(t *testing.T) { { name: "failed empty url", createdAt: d, - id: id.NewAssetID(), - team: id.NewTeamID(), + id: NewID(), + team: NewTeamID(), size: 10, url: "", contentType: "bbb", @@ -171,8 +170,8 @@ func TestBuilder_MustBuild(t *testing.T) { { name: "failed empty team", createdAt: d, - id: id.NewAssetID(), - team: id.TeamID{}, + id: NewID(), + team: TeamID{}, size: 10, url: "tt://xxx.zz", contentType: "bbb", @@ -182,8 +181,8 @@ func TestBuilder_MustBuild(t *testing.T) { { name: "failed invalid Id", createdAt: d, - id: id.AssetID{}, - team: id.NewTeamID(), + id: ID{}, + team: NewTeamID(), size: 10, url: "tt://xxx.zz", contentType: "bbb", @@ -231,6 +230,6 @@ func TestBuilder_MustBuild(t *testing.T) { } func TestNewID(t *testing.T) { - a := New().NewID().URL("tt://xxx.bb").Team(id.NewTeamID()).Size(10).MustBuild() + a := New().NewID().URL("tt://xxx.bb").Team(NewTeamID()).Size(10).MustBuild() assert.False(t, a.id.IsNil()) } diff --git a/pkg/asset/id.go b/pkg/asset/id.go new file mode 100644 index 000000000..7a2599923 --- /dev/null +++ b/pkg/asset/id.go @@ -0,0 +1,31 @@ +package asset + +import ( + "time" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type ID = id.AssetID +type TeamID = id.TeamID + +var NewID = id.NewAssetID +var NewTeamID = id.NewTeamID + +var MustID = id.MustAssetID +var MustTeamID = id.MustTeamID + +var IDFrom = id.AssetIDFrom +var TeamIDFrom = id.TeamIDFrom + +var IDFromRef = id.AssetIDFromRef +var TeamIDFromRef = id.TeamIDFromRef + +var IDFromRefID = id.AssetIDFromRefID +var TeamIDFromRefID = id.TeamIDFromRefID + +var ErrInvalidID = id.ErrInvalidID + +func createdAt(i ID) time.Time { + return id.ID(i).Timestamp() +} diff --git a/pkg/builtin/main.go b/pkg/builtin/main.go index 513a23528..e64c659c2 100644 --- a/pkg/builtin/main.go +++ b/pkg/builtin/main.go @@ -3,7 +3,6 @@ package builtin import ( _ "embed" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/plugin/manifest" "github.com/reearth/reearth-backend/pkg/property" @@ -20,10 +19,10 @@ var pluginTranslationList = map[string]*manifest.TranslationRoot{"ja": manifest. var pluginManifest = manifest.MergeManifestTranslation(manifest.MustParseSystemFromBytes(pluginManifestJSON, nil), pluginTranslationList) // MUST NOT CHANGE -var PropertySchemaIDVisualizerCesium = id.MustPropertySchemaID("reearth/cesium") +var PropertySchemaIDVisualizerCesium = property.MustSchemaID("reearth/cesium") // MUST NOT CHANGE -var PropertySchemaIDInfobox = id.MustPropertySchemaID("reearth/infobox") +var PropertySchemaIDInfobox = property.MustSchemaID("reearth/infobox") func GetPropertySchemaByVisualizer(v visualizer.Visualizer) *property.Schema { for _, p := range pluginManifest.ExtensionSchema { @@ -42,7 +41,7 @@ func MustPropertySchemaByVisualizer(v visualizer.Visualizer) *property.Schema { return ps } -func GetPropertySchema(id id.PropertySchemaID) *property.Schema { +func GetPropertySchema(id property.SchemaID) *property.Schema { for _, p := range pluginManifest.ExtensionSchema { if id == p.ID() { return p @@ -55,7 +54,7 @@ func Plugin() *plugin.Plugin { return pluginManifest.Plugin } -func GetPlugin(id id.PluginID) *plugin.Plugin { +func GetPlugin(id plugin.ID) *plugin.Plugin { if id.Equal(pluginManifest.Plugin.ID()) { return pluginManifest.Plugin } diff --git a/pkg/builtin/main_test.go b/pkg/builtin/main_test.go index 0f8e06c9a..fa444d161 100644 --- a/pkg/builtin/main_test.go +++ b/pkg/builtin/main_test.go @@ -3,7 +3,8 @@ package builtin import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/visualizer" "github.com/stretchr/testify/assert" ) @@ -45,17 +46,17 @@ func TestPlugin(t *testing.T) { func TestGetPlugin(t *testing.T) { testCases := []struct { name string - pluginID id.PluginID + pluginID plugin.ID expectedNil bool }{ { name: "Official Plugin", - pluginID: id.OfficialPluginID, + pluginID: plugin.OfficialPluginID, expectedNil: false, }, { name: "foo plugin", - pluginID: id.MustPluginID("foo~1.1.1"), + pluginID: plugin.MustID("foo~1.1.1"), expectedNil: true, }, } @@ -76,7 +77,7 @@ func TestGetPlugin(t *testing.T) { func TestGetPropertySchema(t *testing.T) { testCases := []struct { name string - psId id.PropertySchemaID + psId property.SchemaID expectedNil bool }{ { @@ -86,7 +87,7 @@ func TestGetPropertySchema(t *testing.T) { }, { name: "unknown propertySchemaId", - psId: id.MustPropertySchemaID("xxx~1.1.1/aa"), + psId: property.MustSchemaID("xxx~1.1.1/aa"), expectedNil: true, }, } diff --git a/pkg/dataset/builder.go b/pkg/dataset/builder.go index 33919ae22..7fb866b1e 100644 --- a/pkg/dataset/builder.go +++ b/pkg/dataset/builder.go @@ -1,9 +1,5 @@ package dataset -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - type Builder struct { d *Dataset } @@ -13,12 +9,12 @@ func New() *Builder { } func (b *Builder) Build() (*Dataset, error) { - if id.ID(b.d.id).IsNil() { - return nil, id.ErrInvalidID + if b.d.id.IsNil() { + return nil, ErrInvalidID } if b.d.fields == nil || b.d.order == nil { - b.d.fields = map[id.DatasetSchemaFieldID]*Field{} - b.d.order = []id.DatasetSchemaFieldID{} + b.d.fields = map[FieldID]*Field{} + b.d.order = []FieldID{} } return b.d, nil } @@ -31,17 +27,17 @@ func (b *Builder) MustBuild() *Dataset { return r } -func (b *Builder) ID(id id.DatasetID) *Builder { +func (b *Builder) ID(id ID) *Builder { b.d.id = id return b } func (b *Builder) NewID() *Builder { - b.d.id = id.DatasetID(id.New()) + b.d.id = NewID() return b } -func (b *Builder) Scene(scene id.SceneID) *Builder { +func (b *Builder) Scene(scene SceneID) *Builder { b.d.scene = scene return b } @@ -51,14 +47,14 @@ func (b *Builder) Source(source string) *Builder { return b } -func (b *Builder) Schema(schema id.DatasetSchemaID) *Builder { +func (b *Builder) Schema(schema SchemaID) *Builder { b.d.schema = schema return b } func (b *Builder) Fields(fields []*Field) *Builder { - b.d.fields = map[id.DatasetSchemaFieldID]*Field{} - b.d.order = make([]id.DatasetSchemaFieldID, 0, len(fields)) + b.d.fields = map[FieldID]*Field{} + b.d.order = make([]FieldID, 0, len(fields)) sources := map[string]struct{}{} for _, f := range b.d.fields { diff --git a/pkg/dataset/csvparser.go b/pkg/dataset/csvparser.go index 16f77f6ac..3d9bbcb7d 100644 --- a/pkg/dataset/csvparser.go +++ b/pkg/dataset/csvparser.go @@ -6,17 +6,12 @@ import ( "io" "strconv" "strings" - - "github.com/reearth/reearth-backend/pkg/id" ) var ( - // ErrFailedToParseCSVorTSVFile _ ErrFailedToParseCSVorTSVFile error = errors.New("failed to parse file content") - // ErrIncompatibleSchema _ - ErrIncompatibleSchema error = errors.New("schema is not compatible with csv") - // ErrDuplicatiedNameFields _ - ErrDuplicatiedNameFields error = errors.New("failed to parse, name-duplicated fields") + ErrIncompatibleSchema error = errors.New("schema is not compatible with csv") + ErrDuplicatiedNameFields error = errors.New("failed to parse, name-duplicated fields") ) type DatasetCSVParser struct { @@ -53,7 +48,7 @@ func (p *DatasetCSVParser) validateLine(line []string) bool { return len(p.headers) == len(line) } -func (p *DatasetCSVParser) GuessSchema(sid id.SceneID) error { +func (p *DatasetCSVParser) GuessSchema(sid SceneID) error { if !p.validateLine(p.firstline) { return ErrFailedToParseCSVorTSVFile } @@ -95,7 +90,7 @@ func (p *DatasetCSVParser) ReadAll() (*Schema, []*Dataset, error) { return nil, nil, errors.New("schema is not generated yet") } var fields []*Field - schemafieldmap := make(map[string]id.DatasetSchemaFieldID) + schemafieldmap := make(map[string]FieldID) for _, f := range p.schema.Fields() { if _, ok := schemafieldmap[f.Name()]; !ok { schemafieldmap[f.Name()] = f.ID() @@ -141,7 +136,7 @@ func (p *DatasetCSVParser) ReadAll() (*Schema, []*Dataset, error) { return p.schema, datasets, nil } -func (p *DatasetCSVParser) getFields(line []string, sfm map[string]id.DatasetSchemaFieldID) ([]*Field, error) { +func (p *DatasetCSVParser) getFields(line []string, sfm map[string]FieldID) ([]*Field, error) { fields := []*Field{} var lat, lng *float64 for i, record := range line { diff --git a/pkg/dataset/csvparser_test.go b/pkg/dataset/csvparser_test.go index 534dc32e5..ea3f8e012 100644 --- a/pkg/dataset/csvparser_test.go +++ b/pkg/dataset/csvparser_test.go @@ -4,7 +4,6 @@ import ( "strings" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -18,7 +17,7 @@ func TestCSVParser(t *testing.T) { p := NewCSVParser(r, "hoge.csv", ',') err := p.Init() assert.NoError(t, err) - sceneID := id.NewSceneID() + sceneID := NewSceneID() err = p.GuessSchema(sceneID) assert.NoError(t, err) diff --git a/pkg/dataset/dataset.go b/pkg/dataset/dataset.go index f06678ed4..271ade5ad 100644 --- a/pkg/dataset/dataset.go +++ b/pkg/dataset/dataset.go @@ -1,24 +1,22 @@ package dataset -import "github.com/reearth/reearth-backend/pkg/id" - type Dataset struct { - id id.DatasetID + id ID source string - schema id.DatasetSchemaID - fields map[id.DatasetSchemaFieldID]*Field - order []id.DatasetSchemaFieldID - scene id.SceneID + schema SchemaID + fields map[FieldID]*Field + order []FieldID + scene SceneID } -func (d *Dataset) ID() (i id.DatasetID) { +func (d *Dataset) ID() (i ID) { if d == nil { return } return d.id } -func (d *Dataset) Scene() (i id.SceneID) { +func (d *Dataset) Scene() (i SceneID) { if d == nil { return } @@ -32,7 +30,7 @@ func (d *Dataset) Source() string { return d.source } -func (d *Dataset) Schema() (i id.DatasetSchemaID) { +func (d *Dataset) Schema() (i SchemaID) { if d == nil { return } @@ -50,14 +48,14 @@ func (d *Dataset) Fields() []*Field { return fields } -func (d *Dataset) Field(id id.DatasetSchemaFieldID) *Field { +func (d *Dataset) Field(id FieldID) *Field { if d == nil || d.fields == nil { return nil } return d.fields[id] } -func (d *Dataset) FieldRef(id *id.DatasetSchemaFieldID) *Field { +func (d *Dataset) FieldRef(id *FieldID) *Field { if d == nil || id == nil { return nil } diff --git a/pkg/dataset/dataset_test.go b/pkg/dataset/dataset_test.go index 9520080ae..df3110ca7 100644 --- a/pkg/dataset/dataset_test.go +++ b/pkg/dataset/dataset_test.go @@ -3,14 +3,13 @@ package dataset import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestDataset_Interface(t *testing.T) { - f1 := id.NewDatasetSchemaFieldID() - f2 := id.NewDatasetSchemaFieldID() - sid := id.NewDatasetSchemaID() + f1 := NewFieldID() + f2 := NewFieldID() + sid := NewSchemaID() tests := []struct { name string @@ -20,11 +19,11 @@ func TestDataset_Interface(t *testing.T) { }{ { name: "ok", - schema: NewSchema().ID(sid).Scene(id.NewSceneID()).Fields([]*SchemaField{ + schema: NewSchema().ID(sid).Scene(NewSceneID()).Fields([]*SchemaField{ NewSchemaField().ID(f1).Name("foo").Type(ValueTypeNumber).MustBuild(), NewSchemaField().ID(f2).Name("bar").Type(ValueTypeLatLng).MustBuild(), }).MustBuild(), - dataset: New().NewID().Scene(id.NewSceneID()).Schema(sid).Fields([]*Field{ + dataset: New().NewID().Scene(NewSceneID()).Schema(sid).Fields([]*Field{ NewField(f1, ValueTypeNumber.ValueFrom(1), ""), NewField(f2, ValueTypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}), ""), }).MustBuild(), @@ -51,8 +50,8 @@ func TestDataset_Interface(t *testing.T) { } func TestDataset_InterfaceWithFieldIDs(t *testing.T) { - f1 := id.NewDatasetSchemaFieldID() - f2 := id.NewDatasetSchemaFieldID() + f1 := NewFieldID() + f2 := NewFieldID() tests := []struct { name string @@ -61,7 +60,7 @@ func TestDataset_InterfaceWithFieldIDs(t *testing.T) { }{ { name: "ok", - dataset: New().NewID().Scene(id.NewSceneID()).Schema(id.NewDatasetSchemaID()).Fields([]*Field{ + dataset: New().NewID().Scene(NewSceneID()).Schema(NewSchemaID()).Fields([]*Field{ NewField(f1, ValueTypeNumber.ValueFrom(1), ""), NewField(f2, ValueTypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}), ""), }).MustBuild(), diff --git a/pkg/dataset/diff.go b/pkg/dataset/diff.go index bbd41a726..c7b5722d2 100644 --- a/pkg/dataset/diff.go +++ b/pkg/dataset/diff.go @@ -1,10 +1,7 @@ package dataset -import "github.com/reearth/reearth-backend/pkg/id" - -// Diff _ type Diff struct { Added List Removed List - Others map[id.DatasetID]*Dataset + Others map[ID]*Dataset } diff --git a/pkg/dataset/field.go b/pkg/dataset/field.go index e99dffd35..bf6355c60 100644 --- a/pkg/dataset/field.go +++ b/pkg/dataset/field.go @@ -1,14 +1,12 @@ package dataset -import "github.com/reearth/reearth-backend/pkg/id" - type Field struct { - field id.DatasetSchemaFieldID + field FieldID value *Value source string } -func NewField(field id.DatasetSchemaFieldID, value *Value, source string) *Field { +func NewField(field FieldID, value *Value, source string) *Field { if value == nil { return nil } @@ -18,14 +16,14 @@ func NewField(field id.DatasetSchemaFieldID, value *Value, source string) *Field } } -func (d *Field) Field() (i id.DatasetSchemaFieldID) { +func (d *Field) Field() (i FieldID) { if d == nil { return } return d.field } -func (d *Field) FieldRef() *id.DatasetSchemaFieldID { +func (d *Field) FieldRef() *FieldID { if d == nil { return nil } diff --git a/pkg/dataset/graph_iterator.go b/pkg/dataset/graph_iterator.go index db4432f78..17e357e18 100644 --- a/pkg/dataset/graph_iterator.go +++ b/pkg/dataset/graph_iterator.go @@ -1,29 +1,27 @@ package dataset -import "github.com/reearth/reearth-backend/pkg/id" - // GraphIterator is a iterator for graphically exploring a dataset. type GraphIterator struct { m Map - ids [][]id.DatasetID + ids [][]ID currentIndex int currentDepthIndex int maxDepth int } -func GraphIteratorFrom(root id.DatasetID, depth int) *GraphIterator { +func GraphIteratorFrom(root ID, depth int) *GraphIterator { return &GraphIterator{ - ids: [][]id.DatasetID{{root}}, + ids: [][]ID{{root}}, maxDepth: depth, } } -func (di *GraphIterator) Next(d *Dataset) (id.DatasetID, bool) { +func (di *GraphIterator) Next(d *Dataset) (ID, bool) { if di == nil || di.maxDepth == 0 || len(di.ids) == 0 || d == nil { - return id.DatasetID{}, false + return ID{}, false } if di.currentDepthIndex >= len(di.ids) { - return id.DatasetID{}, true + return ID{}, true } if di.m == nil { @@ -33,13 +31,13 @@ func (di *GraphIterator) Next(d *Dataset) (id.DatasetID, bool) { // add fields if len(di.ids) <= di.currentDepthIndex+1 { - di.ids = append(di.ids, []id.DatasetID{}) + di.ids = append(di.ids, []ID{}) } nextDepthIDs := di.ids[di.currentDepthIndex+1] currentIDs := di.ids[di.currentDepthIndex] for _, f := range d.Fields() { if r := f.Value().ValueRef(); r != nil { - if rid, err := id.DatasetIDFrom(*r); err == nil { + if rid, err := IDFrom(*r); err == nil { nextDepthIDs = append(nextDepthIDs, rid) } } @@ -53,7 +51,7 @@ func (di *GraphIterator) Next(d *Dataset) (id.DatasetID, bool) { if di.maxDepth <= di.currentDepthIndex || len(nextDepthIDs) == 0 { // done di.currentDepthIndex++ - return id.DatasetID{}, true + return ID{}, true } di.currentDepthIndex++ } else { diff --git a/pkg/dataset/graph_iterator_test.go b/pkg/dataset/graph_iterator_test.go index 254b1a8e9..04837c039 100644 --- a/pkg/dataset/graph_iterator_test.go +++ b/pkg/dataset/graph_iterator_test.go @@ -3,40 +3,39 @@ package dataset import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestDatasetGraphIterator(t *testing.T) { - sid := id.NewSceneID() - dsid := id.NewDatasetSchemaID() + sid := NewSceneID() + dsid := NewSchemaID() - d0id := id.NewDatasetID() - d11id := id.NewDatasetID() - d12id := id.NewDatasetID() - d21id := id.NewDatasetID() - d31id := id.NewDatasetID() - d32id := id.NewDatasetID() + d0id := NewID() + d11id := NewID() + d12id := NewID() + d21id := NewID() + d31id := NewID() + d32id := NewID() d0, _ := New().ID(d0id).Schema(dsid).Scene(sid).Fields([]*Field{ - NewField(id.NewDatasetSchemaFieldID(), ValueTypeRef.ValueFrom(d11id.ID()), ""), - NewField(id.NewDatasetSchemaFieldID(), ValueTypeRef.ValueFrom(d12id.ID()), ""), + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d11id.ID()), ""), + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d12id.ID()), ""), }).Build() d11, _ := New().ID(d11id).Schema(dsid).Scene(sid).Fields([]*Field{ - NewField(id.NewDatasetSchemaFieldID(), ValueTypeRef.ValueFrom(d21id.ID()), ""), + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d21id.ID()), ""), }).Build() d12, _ := New().ID(d12id).Schema(dsid).Scene(sid).Fields([]*Field{ - NewField(id.NewDatasetSchemaFieldID(), ValueTypeString.ValueFrom("hoge"), ""), + NewField(NewFieldID(), ValueTypeString.ValueFrom("hoge"), ""), }).Build() d21, _ := New().ID(d21id).Schema(dsid).Scene(sid).Fields([]*Field{ - NewField(id.NewDatasetSchemaFieldID(), ValueTypeRef.ValueFrom(d31id.ID()), ""), - NewField(id.NewDatasetSchemaFieldID(), ValueTypeRef.ValueFrom(d32id.ID()), ""), + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d31id.ID()), ""), + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d32id.ID()), ""), }).Build() d31, _ := New().ID(d31id).Schema(dsid).Scene(sid).Fields([]*Field{ - NewField(id.NewDatasetSchemaFieldID(), ValueTypeString.ValueFrom("foo"), ""), + NewField(NewFieldID(), ValueTypeString.ValueFrom("foo"), ""), }).Build() d32, _ := New().ID(d32id).Schema(dsid).Scene(sid).Fields([]*Field{ - NewField(id.NewDatasetSchemaFieldID(), ValueTypeString.ValueFrom("bar"), ""), + NewField(NewFieldID(), ValueTypeString.ValueFrom("bar"), ""), }).Build() it := GraphIteratorFrom(d0id, 3) diff --git a/pkg/dataset/graph_loader.go b/pkg/dataset/graph_loader.go index a613abf42..623280e9b 100644 --- a/pkg/dataset/graph_loader.go +++ b/pkg/dataset/graph_loader.go @@ -2,21 +2,19 @@ package dataset import ( "context" - - "github.com/reearth/reearth-backend/pkg/id" ) -type GraphLoader func(context.Context, id.DatasetID, ...id.DatasetSchemaFieldID) (List, *Field, error) +type GraphLoader func(context.Context, ID, ...FieldID) (List, *Field, error) func GraphLoaderFromMap(m Map) GraphLoader { - return func(ctx context.Context, root id.DatasetID, fields ...id.DatasetSchemaFieldID) (List, *Field, error) { + return func(ctx context.Context, root ID, fields ...FieldID) (List, *Field, error) { list, field := m.GraphSearchByFields(root, fields...) return list, field, nil } } func GraphLoaderFromMapAndGraph(m Map, g GraphLoader) GraphLoader { - return func(ctx context.Context, root id.DatasetID, fields ...id.DatasetSchemaFieldID) (List, *Field, error) { + return func(ctx context.Context, root ID, fields ...FieldID) (List, *Field, error) { if m != nil { if len(fields) == 0 { return List{m[root]}, nil, nil diff --git a/pkg/dataset/id.go b/pkg/dataset/id.go new file mode 100644 index 000000000..701ffb114 --- /dev/null +++ b/pkg/dataset/id.go @@ -0,0 +1,45 @@ +package dataset + +import "github.com/reearth/reearth-backend/pkg/id" + +type ID = id.DatasetID +type FieldID = id.DatasetSchemaFieldID +type SchemaID = id.DatasetSchemaID +type SceneID = id.SceneID + +var NewID = id.NewDatasetID +var NewSchemaID = id.NewDatasetSchemaID +var NewFieldID = id.NewDatasetSchemaFieldID +var NewSceneID = id.NewSceneID + +var MustID = id.MustDatasetID +var MustSchemaID = id.MustDatasetSchemaID +var MustFieldID = id.MustDatasetSchemaFieldID +var MustSceneID = id.MustSceneID + +var IDFrom = id.DatasetIDFrom +var SchemaIDFrom = id.DatasetSchemaIDFrom +var FieldIDFrom = id.DatasetSchemaFieldIDFrom +var SceneIDFrom = id.SceneIDFrom + +var IDFromRef = id.DatasetIDFromRef +var SchemaIDFromRef = id.DatasetSchemaIDFromRef +var FieldIDFromRef = id.DatasetSchemaFieldIDFromRef +var SceneIDFromRef = id.SceneIDFromRef + +var IDFromRefID = id.DatasetIDFromRefID +var SchemaIDFromRefID = id.DatasetSchemaIDFromRefID +var FieldIDFromRefID = id.DatasetSchemaFieldIDFromRefID +var SceneIDFromRefID = id.SceneIDFromRefID + +type IDSet = id.DatasetIDSet +type SchemaIDSet = id.DatasetSchemaIDSet +type FieldIDSet = id.DatasetSchemaFieldIDSet +type SceneIDSet = id.SceneIDSet + +var NewIDSet = id.NewDatasetIDSet +var NewSchemaIDset = id.NewDatasetSchemaIDSet +var NewFieldIDset = id.NewDatasetSchemaFieldIDSet +var NewSceneIDset = id.NewSceneIDSet + +var ErrInvalidID = id.ErrInvalidID diff --git a/pkg/dataset/list.go b/pkg/dataset/list.go index e22e89575..7675c6a38 100644 --- a/pkg/dataset/list.go +++ b/pkg/dataset/list.go @@ -1,9 +1,5 @@ package dataset -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - type List []*Dataset func (l List) First() *Dataset { @@ -20,7 +16,7 @@ func (l List) Last() *Dataset { return l[len(l)-1] } -func (l List) FindDataset(id id.DatasetID) *Dataset { +func (l List) FindDataset(id ID) *Dataset { for _, t := range l { if t.ID() == id { return t @@ -29,12 +25,12 @@ func (l List) FindDataset(id id.DatasetID) *Dataset { return nil } -func (l List) ToDatasetIds() []id.DatasetID { +func (l List) ToDatasetIds() []ID { if l == nil { return nil } - ids := []id.DatasetID{} + ids := []ID{} for _, t := range l { ids = append(ids, t.ID()) } @@ -50,7 +46,7 @@ func (l List) FindDatasetBySource(s string) *Dataset { return nil } -func (l List) FilterByDatasetSchema(s id.DatasetSchemaID) List { +func (l List) FilterByDatasetSchema(s SchemaID) List { n := List{} for _, t := range l { if t.Schema() == s { @@ -65,7 +61,7 @@ func (l List) DiffBySource(l2 List) Diff { added := []*Dataset{} removed := []*Dataset{} // others := map[string]DatasetDiffTouple{} - others2 := map[id.DatasetID]*Dataset{} + others2 := map[ID]*Dataset{} s1 := map[string]*Dataset{} for _, d1 := range l { @@ -119,14 +115,14 @@ func (l List) GraphLoader() GraphLoader { return GraphLoaderFromMap(l.Map()) } -type Map map[id.DatasetID]*Dataset +type Map map[ID]*Dataset func (dm Map) Add(dss ...*Dataset) { if dss == nil { return } if dm == nil { - dm = map[id.DatasetID]*Dataset{} + dm = map[ID]*Dataset{} } for _, ds := range dss { if ds == nil { @@ -147,7 +143,7 @@ func (dm Map) Slice() List { return res } -func (dm Map) GraphSearchByFields(root id.DatasetID, fields ...id.DatasetSchemaFieldID) (List, *Field) { +func (dm Map) GraphSearchByFields(root ID, fields ...FieldID) (List, *Field) { res := make(List, 0, len(fields)) currentD := dm[root] if currentD == nil { @@ -165,8 +161,8 @@ func (dm Map) GraphSearchByFields(root id.DatasetID, fields ...id.DatasetSchemaF if len(fields)-1 == i { return res, field } else if fids := field.Value().ValueRef(); fids != nil { - if fid, err := id.DatasetIDFrom(*fids); err == nil { - currentD = dm[id.DatasetID(fid)] + if fid, err := IDFrom(*fids); err == nil { + currentD = dm[ID(fid)] } else { return res, nil } diff --git a/pkg/dataset/list_test.go b/pkg/dataset/list_test.go index 4dfcccbf3..f98cd3c22 100644 --- a/pkg/dataset/list_test.go +++ b/pkg/dataset/list_test.go @@ -3,12 +3,11 @@ package dataset import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestDatasetListDiff(t *testing.T) { - sid := id.SceneID(id.New()) + sid := NewSceneID() source1 := "hogehoge/1" source2 := "hogehoge/2" source3 := "hogehoge/3" @@ -24,7 +23,7 @@ func TestDatasetListDiff(t *testing.T) { expected := Diff{ Added: []*Dataset{d4}, Removed: []*Dataset{d1}, - Others: map[id.DatasetID]*Dataset{ + Others: map[ID]*Dataset{ d2.ID(): d3, }, } @@ -36,7 +35,7 @@ func TestDatasetListDiff(t *testing.T) { expected = Diff{ Added: []*Dataset{d4}, Removed: []*Dataset{d1, d2}, - Others: map[id.DatasetID]*Dataset{ + Others: map[ID]*Dataset{ d5.ID(): d3, }, } @@ -44,13 +43,13 @@ func TestDatasetListDiff(t *testing.T) { } func TestDatasetMapGraphSearchByFields(t *testing.T) { - did1 := id.NewDatasetID() - did2 := id.NewDatasetID() - did3 := id.NewDatasetID() - fid1 := id.NewDatasetSchemaFieldID() - fid2 := id.NewDatasetSchemaFieldID() - fid3 := id.NewDatasetSchemaFieldID() - sid := id.NewSceneID() + did1 := NewID() + did2 := NewID() + did3 := NewID() + fid1 := NewFieldID() + fid2 := NewFieldID() + fid3 := NewFieldID() + sid := NewSceneID() v1 := ValueTypeRef.ValueFrom(did2.ID()) v2 := ValueTypeRef.ValueFrom(did3.ID()) v3 := ValueTypeString.ValueFrom("value") diff --git a/pkg/dataset/loader.go b/pkg/dataset/loader.go index 5092aa34e..4e20a0458 100644 --- a/pkg/dataset/loader.go +++ b/pkg/dataset/loader.go @@ -2,14 +2,12 @@ package dataset import ( "context" - - "github.com/reearth/reearth-backend/pkg/id" ) -type Loader func(context.Context, ...id.DatasetID) (List, error) +type Loader func(context.Context, ...ID) (List, error) func LoaderFrom(data []*Dataset) Loader { - return func(ctx context.Context, ids ...id.DatasetID) (List, error) { + return func(ctx context.Context, ids ...ID) (List, error) { res := make(List, 0, len(ids)) for _, i := range ids { found := false @@ -28,8 +26,8 @@ func LoaderFrom(data []*Dataset) Loader { } } -func LoaderFromMap(data map[id.DatasetID]*Dataset) Loader { - return func(ctx context.Context, ids ...id.DatasetID) (List, error) { +func LoaderFromMap(data map[ID]*Dataset) Loader { + return func(ctx context.Context, ids ...ID) (List, error) { res := make(List, 0, len(ids)) for _, i := range ids { if d, ok := data[i]; ok { diff --git a/pkg/dataset/schema.go b/pkg/dataset/schema.go index ae3bf22ad..b6d01b1fa 100644 --- a/pkg/dataset/schema.go +++ b/pkg/dataset/schema.go @@ -1,33 +1,31 @@ package dataset -import "github.com/reearth/reearth-backend/pkg/id" - type Schema struct { - id id.DatasetSchemaID + id SchemaID source string name string - fields map[id.DatasetSchemaFieldID]*SchemaField - order []id.DatasetSchemaFieldID - representativeField *id.DatasetSchemaFieldID - scene id.SceneID + fields map[FieldID]*SchemaField + order []FieldID + representativeField *FieldID + scene SceneID dynamic bool } -func (d *Schema) ID() (i id.DatasetSchemaID) { +func (d *Schema) ID() (i SchemaID) { if d == nil { return } return d.id } -func (d *Schema) IDRef() *id.DatasetSchemaID { +func (d *Schema) IDRef() *SchemaID { if d == nil { return nil } return d.id.Ref() } -func (d *Schema) Scene() (i id.SceneID) { +func (d *Schema) Scene() (i SceneID) { if d == nil { return } @@ -48,7 +46,7 @@ func (d *Schema) Name() string { return d.name } -func (d *Schema) RepresentativeFieldID() *id.DatasetSchemaFieldID { +func (d *Schema) RepresentativeFieldID() *FieldID { if d == nil { return nil } @@ -73,14 +71,14 @@ func (d *Schema) Fields() []*SchemaField { return fields } -func (d *Schema) Field(id id.DatasetSchemaFieldID) *SchemaField { +func (d *Schema) Field(id FieldID) *SchemaField { if d == nil { return nil } return d.fields[id] } -func (d *Schema) FieldRef(id *id.DatasetSchemaFieldID) *SchemaField { +func (d *Schema) FieldRef(id *FieldID) *SchemaField { if d == nil || id == nil { return nil } @@ -111,12 +109,10 @@ func (d *Schema) FieldByType(t ValueType) *SchemaField { return nil } -// Dynamic _ func (d *Schema) Dynamic() bool { return d.dynamic } -// Rename _ func (u *Schema) Rename(name string) { u.name = name } diff --git a/pkg/dataset/schema_builder.go b/pkg/dataset/schema_builder.go index 675b2c80d..2b4d138cb 100644 --- a/pkg/dataset/schema_builder.go +++ b/pkg/dataset/schema_builder.go @@ -1,9 +1,5 @@ package dataset -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - type SchemaBuilder struct { d *Schema } @@ -13,12 +9,12 @@ func NewSchema() *SchemaBuilder { } func (b *SchemaBuilder) Build() (*Schema, error) { - if id.ID(b.d.id).IsNil() { - return nil, id.ErrInvalidID + if b.d.id.IsNil() { + return nil, ErrInvalidID } if b.d.fields == nil || b.d.order == nil { - b.d.fields = map[id.DatasetSchemaFieldID]*SchemaField{} - b.d.order = []id.DatasetSchemaFieldID{} + b.d.fields = map[FieldID]*SchemaField{} + b.d.order = []FieldID{} } return b.d, nil } @@ -31,17 +27,17 @@ func (b *SchemaBuilder) MustBuild() *Schema { return r } -func (b *SchemaBuilder) ID(id id.DatasetSchemaID) *SchemaBuilder { +func (b *SchemaBuilder) ID(id SchemaID) *SchemaBuilder { b.d.id = id return b } func (b *SchemaBuilder) NewID() *SchemaBuilder { - b.d.id = id.DatasetSchemaID(id.New()) + b.d.id = NewSchemaID() return b } -func (b *SchemaBuilder) Scene(scene id.SceneID) *SchemaBuilder { +func (b *SchemaBuilder) Scene(scene SceneID) *SchemaBuilder { b.d.scene = scene return b } @@ -61,15 +57,15 @@ func (b *SchemaBuilder) Source(source string) *SchemaBuilder { return b } -func (b *SchemaBuilder) RepresentativeField(representativeField id.DatasetSchemaFieldID) *SchemaBuilder { +func (b *SchemaBuilder) RepresentativeField(representativeField FieldID) *SchemaBuilder { rf := representativeField b.d.representativeField = &rf return b } func (b *SchemaBuilder) Fields(fields []*SchemaField) *SchemaBuilder { - b.d.fields = map[id.DatasetSchemaFieldID]*SchemaField{} - b.d.order = make([]id.DatasetSchemaFieldID, 0, len(fields)) + b.d.fields = map[FieldID]*SchemaField{} + b.d.order = make([]FieldID, 0, len(fields)) sources := map[string]struct{}{} for _, f := range fields { diff --git a/pkg/dataset/schema_field.go b/pkg/dataset/schema_field.go index 538dce7f2..6c0856af4 100644 --- a/pkg/dataset/schema_field.go +++ b/pkg/dataset/schema_field.go @@ -1,23 +1,21 @@ package dataset -import "github.com/reearth/reearth-backend/pkg/id" - type SchemaField struct { - id id.DatasetSchemaFieldID + id FieldID name string dataType ValueType source string - ref *id.DatasetSchemaID + ref *SchemaID } -func (d *SchemaField) ID() (i id.DatasetSchemaFieldID) { +func (d *SchemaField) ID() (i FieldID) { if d == nil { return } return d.id } -func (d *SchemaField) IDRef() *id.DatasetSchemaFieldID { +func (d *SchemaField) IDRef() *FieldID { if d == nil { return nil } @@ -31,7 +29,7 @@ func (d *SchemaField) Name() (n string) { return d.name } -func (d *SchemaField) Ref() *id.DatasetSchemaID { +func (d *SchemaField) Ref() *SchemaID { if d == nil { return nil } diff --git a/pkg/dataset/schema_field_builder.go b/pkg/dataset/schema_field_builder.go index b6db538bd..b51abd766 100644 --- a/pkg/dataset/schema_field_builder.go +++ b/pkg/dataset/schema_field_builder.go @@ -2,8 +2,6 @@ package dataset import ( "errors" - - "github.com/reearth/reearth-backend/pkg/id" ) type SchemaFieldBuilder struct { @@ -15,8 +13,8 @@ func NewSchemaField() *SchemaFieldBuilder { } func (b *SchemaFieldBuilder) Build() (*SchemaField, error) { - if id.ID(b.d.id).IsNil() { - return nil, id.ErrInvalidID + if b.d.id.IsNil() { + return nil, ErrInvalidID } if !b.d.dataType.Default() { return nil, errors.New("invalid value type") @@ -32,13 +30,13 @@ func (b *SchemaFieldBuilder) MustBuild() *SchemaField { return r } -func (b *SchemaFieldBuilder) ID(id id.DatasetSchemaFieldID) *SchemaFieldBuilder { +func (b *SchemaFieldBuilder) ID(id FieldID) *SchemaFieldBuilder { b.d.id = id return b } func (b *SchemaFieldBuilder) NewID() *SchemaFieldBuilder { - b.d.id = id.DatasetSchemaFieldID(id.New()) + b.d.id = NewFieldID() return b } @@ -57,7 +55,7 @@ func (b *SchemaFieldBuilder) Source(source string) *SchemaFieldBuilder { return b } -func (b *SchemaFieldBuilder) Ref(ref *id.DatasetSchemaID) *SchemaFieldBuilder { +func (b *SchemaFieldBuilder) Ref(ref *SchemaID) *SchemaFieldBuilder { if ref == nil { b.d.ref = nil } else { diff --git a/pkg/dataset/schema_field_diff.go b/pkg/dataset/schema_field_diff.go index a58b206a9..1bcace8ae 100644 --- a/pkg/dataset/schema_field_diff.go +++ b/pkg/dataset/schema_field_diff.go @@ -1,18 +1,16 @@ package dataset -import "github.com/reearth/reearth-backend/pkg/id" - type SchemaFieldDiff struct { Added []*SchemaField Removed []*SchemaField - Replaced map[id.DatasetSchemaFieldID]*SchemaField + Replaced map[FieldID]*SchemaField } func (d *Schema) FieldDiffBySource(d2 *Schema) SchemaFieldDiff { added := []*SchemaField{} removed := []*SchemaField{} // others := map[string]DatasetDiffTouple{} - others2 := map[id.DatasetSchemaFieldID]*SchemaField{} + others2 := map[FieldID]*SchemaField{} s1 := map[string]*SchemaField{} for _, d1 := range d.fields { diff --git a/pkg/dataset/schema_graph_iterator.go b/pkg/dataset/schema_graph_iterator.go index 8c5e7e159..59b4a7652 100644 --- a/pkg/dataset/schema_graph_iterator.go +++ b/pkg/dataset/schema_graph_iterator.go @@ -1,31 +1,27 @@ package dataset -import "github.com/reearth/reearth-backend/pkg/id" - // SchemaGraphIterator ใฏใ€ใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚’ใ‚ฐใƒฉใƒ•ๆŽข็ดขใ™ใ‚‹ใŸใ‚ใฎใ‚คใƒ†ใƒฌใƒผใ‚ฟใงใ™ใ€‚ type SchemaGraphIterator struct { m SchemaMap - ids [][]id.DatasetSchemaID + ids [][]SchemaID currentIndex int currentDepthIndex int maxDepth int } -// SchemaGraphIteratorFrom _ -func SchemaGraphIteratorFrom(root id.DatasetSchemaID, depth int) *SchemaGraphIterator { +func SchemaGraphIteratorFrom(root SchemaID, depth int) *SchemaGraphIterator { return &SchemaGraphIterator{ - ids: [][]id.DatasetSchemaID{{root}}, + ids: [][]SchemaID{{root}}, maxDepth: depth, } } -// Next _ -func (di *SchemaGraphIterator) Next(d *Schema) (id.DatasetSchemaID, bool) { +func (di *SchemaGraphIterator) Next(d *Schema) (SchemaID, bool) { if di == nil || di.maxDepth == 0 || di.ids == nil || len(di.ids) == 0 || d == nil { - return id.DatasetSchemaID{}, false + return SchemaID{}, false } if di.currentDepthIndex >= len(di.ids) { - return id.DatasetSchemaID{}, true + return SchemaID{}, true } if di.m == nil { @@ -35,7 +31,7 @@ func (di *SchemaGraphIterator) Next(d *Schema) (id.DatasetSchemaID, bool) { // add fields if len(di.ids) <= di.currentDepthIndex+1 { - di.ids = append(di.ids, []id.DatasetSchemaID{}) + di.ids = append(di.ids, []SchemaID{}) } nextDepthIDs := di.ids[di.currentDepthIndex+1] currentIDs := di.ids[di.currentDepthIndex] @@ -53,7 +49,7 @@ func (di *SchemaGraphIterator) Next(d *Schema) (id.DatasetSchemaID, bool) { if di.maxDepth <= di.currentDepthIndex || len(nextDepthIDs) == 0 { // done di.currentDepthIndex++ - return id.DatasetSchemaID{}, true + return SchemaID{}, true } di.currentDepthIndex++ } else { @@ -63,7 +59,6 @@ func (di *SchemaGraphIterator) Next(d *Schema) (id.DatasetSchemaID, bool) { return di.ids[di.currentDepthIndex][di.currentIndex], false } -// Result _ func (di *SchemaGraphIterator) Result() SchemaMap { if di == nil { return nil diff --git a/pkg/dataset/schema_graph_iterator_test.go b/pkg/dataset/schema_graph_iterator_test.go index fef53de19..c7f556bca 100644 --- a/pkg/dataset/schema_graph_iterator_test.go +++ b/pkg/dataset/schema_graph_iterator_test.go @@ -3,27 +3,26 @@ package dataset import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestDatasetSchemaGraphIterator(t *testing.T) { - sid := id.NewSceneID() - d0id := id.NewDatasetSchemaID() - d11id := id.NewDatasetSchemaID() - d12id := id.NewDatasetSchemaID() - d21id := id.NewDatasetSchemaID() - d31id := id.NewDatasetSchemaID() - d32id := id.NewDatasetSchemaID() + sid := NewSceneID() + d0id := NewSchemaID() + d11id := NewSchemaID() + d12id := NewSchemaID() + d21id := NewSchemaID() + d31id := NewSchemaID() + d32id := NewSchemaID() - d0f0, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeRef).Ref(&d11id).Build() - d0f1, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeRef).Ref(&d12id).Build() - d11f0, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeString).Build() - d12f0, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeRef).Ref(&d21id).Build() - d21f0, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeRef).Ref(&d31id).Build() - d21f1, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeRef).Ref(&d32id).Build() - d31f0, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeString).Build() - d32f0, _ := NewSchemaField().ID(id.NewDatasetSchemaFieldID()).Type(ValueTypeString).Build() + d0f0, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeRef).Ref(&d11id).Build() + d0f1, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeRef).Ref(&d12id).Build() + d11f0, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeString).Build() + d12f0, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeRef).Ref(&d21id).Build() + d21f0, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeRef).Ref(&d31id).Build() + d21f1, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeRef).Ref(&d32id).Build() + d31f0, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeString).Build() + d32f0, _ := NewSchemaField().ID(NewFieldID()).Type(ValueTypeString).Build() d0, _ := NewSchema().ID(d0id).Scene(sid).Fields([]*SchemaField{ d0f0, d0f1, diff --git a/pkg/dataset/schema_list.go b/pkg/dataset/schema_list.go index 6e55b0262..f9041298d 100644 --- a/pkg/dataset/schema_list.go +++ b/pkg/dataset/schema_list.go @@ -1,11 +1,7 @@ package dataset -import "github.com/reearth/reearth-backend/pkg/id" - -// SchemaList _ type SchemaList []*Schema -// Map _ func (dsl SchemaList) Map() SchemaMap { if dsl == nil { return nil @@ -19,10 +15,8 @@ func (dsl SchemaList) Map() SchemaMap { return m } -// SchemaMap _ -type SchemaMap map[id.DatasetSchemaID]*Schema +type SchemaMap map[SchemaID]*Schema -// Slice _ func (dsm SchemaMap) Slice() SchemaList { if dsm == nil { return nil @@ -36,8 +30,7 @@ func (dsm SchemaMap) Slice() SchemaList { return res } -// GraphSearchByFields _ -func (dsm SchemaMap) GraphSearchByFields(root id.DatasetSchemaID, fields ...id.DatasetSchemaFieldID) (SchemaList, *SchemaField) { +func (dsm SchemaMap) GraphSearchByFields(root SchemaID, fields ...FieldID) (SchemaList, *SchemaField) { res := make(SchemaList, 0, len(fields)) currentDs := dsm[root] if currentDs == nil { diff --git a/pkg/dataset/schema_list_test.go b/pkg/dataset/schema_list_test.go index c9f17451f..267c4b32a 100644 --- a/pkg/dataset/schema_list_test.go +++ b/pkg/dataset/schema_list_test.go @@ -3,18 +3,17 @@ package dataset import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestDatasetSchemaMapGraphSearchByFields(t *testing.T) { - did1 := id.NewDatasetSchemaID() - did2 := id.NewDatasetSchemaID() - did3 := id.NewDatasetSchemaID() - fid1 := id.NewDatasetSchemaFieldID() - fid2 := id.NewDatasetSchemaFieldID() - fid3 := id.NewDatasetSchemaFieldID() - sid := id.NewSceneID() + did1 := NewSchemaID() + did2 := NewSchemaID() + did3 := NewSchemaID() + fid1 := NewFieldID() + fid2 := NewFieldID() + fid3 := NewFieldID() + sid := NewSceneID() f1, _ := NewSchemaField().ID(fid1).Type(ValueTypeString).Ref(&did2).Build() f2, _ := NewSchemaField().ID(fid2).Type(ValueTypeString).Ref(&did3).Build() f3, _ := NewSchemaField().ID(fid3).Type(ValueTypeString).Build() diff --git a/pkg/id/asset_gen.go b/pkg/id/asset_gen.go index fa061e5c1..ac082c89e 100644 --- a/pkg/id/asset_gen.go +++ b/pkg/id/asset_gen.go @@ -44,7 +44,7 @@ func AssetIDFromRef(i *string) *AssetID { // AssetIDFromRefID generates a new AssetID from a ref of a generic ID. func AssetIDFromRefID(i *ID) *AssetID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := AssetID(*i) @@ -58,28 +58,40 @@ func (d AssetID) ID() ID { // String returns a string representation. func (d AssetID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d AssetID) GoString() string { - return "id.AssetID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d AssetID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d AssetID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d AssetID) GoString() string { + return "AssetID(" + d.String() + ")" } // Ref returns a reference. func (d AssetID) Ref() *AssetID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d AssetID) Contains(ids []AssetID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d AssetID) Contains(ids []AssetID) bool { // CopyRef returns a copy of a reference. func (d *AssetID) CopyRef() *AssetID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *AssetID) CopyRef() *AssetID { // IDRef returns a reference of a domain id. func (d *AssetID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *AssetID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *AssetID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *AssetID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *AssetID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *AssetID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *AssetID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *AssetID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d AssetID) IsNil() bool { return ID(d).IsNil() } -// AssetIDToKeys converts IDs into a string slice. -func AssetIDToKeys(ids []AssetID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *AssetID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// AssetIDsToStrings converts IDs into a string slice. +func AssetIDsToStrings(ids []AssetID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // AssetIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *AssetIDSet) Clone() *AssetIDSet { // Merge returns a merged set func (s *AssetIDSet) Merge(s2 *AssetIDSet) *AssetIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/asset_gen_test.go b/pkg/id/asset_gen_test.go index 14d5985b7..8a8d24a98 100644 --- a/pkg/id/asset_gen_test.go +++ b/pkg/id/asset_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewAssetID(t *testing.T) { id := NewAssetID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestAssetIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestAssetIDFrom(t *testing.T) { result AssetID err error }{ - AssetID{}, - ErrInvalidID, + result: AssetID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestAssetIDFrom(t *testing.T) { result AssetID err error }{ - AssetID{}, - ErrInvalidID, + result: AssetID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestAssetIDFrom(t *testing.T) { result AssetID err error }{ - AssetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: AssetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := AssetIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := AssetIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustAssetID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustAssetID(t *testing.T) { expected: AssetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustAssetID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustAssetID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestAssetIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *AssetID @@ -139,159 +135,149 @@ func TestAssetIDFromRef(t *testing.T) { expected: &AssetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := AssetIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := AssetIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestAssetIDFromRefID(t *testing.T) { id := New() - - subId := AssetIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := AssetIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, AssetIDFromRefID(nil)) + assert.Nil(t, AssetIDFromRefID(&ID{})) } func TestAssetID_ID(t *testing.T) { id := New() - subId := AssetIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := AssetIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestAssetID_String(t *testing.T) { id := New() - subId := AssetIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := AssetIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", AssetID{}.String()) } -func TestAssetID_GoString(t *testing.T) { - id := New() - subId := AssetIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.AssetID("+id.String()+")") +func TestAssetID_RefString(t *testing.T) { + id := NewAssetID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, AssetID{}.RefString()) } -func TestAssetID_RefString(t *testing.T) { +func TestAssetID_GoString(t *testing.T) { id := New() - subId := AssetIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := AssetIDFromRefID(&id) + assert.Equal(t, "AssetID("+id.String()+")", id2.GoString()) + assert.Equal(t, "AssetID()", AssetID{}.GoString()) } func TestAssetID_Ref(t *testing.T) { - id := New() - subId := AssetIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewAssetID() + assert.Equal(t, AssetID(id), *id.Ref()) + assert.Nil(t, (&AssetID{}).Ref()) } func TestAssetID_Contains(t *testing.T) { id := NewAssetID() id2 := NewAssetID() assert.True(t, id.Contains([]AssetID{id, id2})) + assert.False(t, AssetID{}.Contains([]AssetID{id, id2, {}})) assert.False(t, id.Contains([]AssetID{id2})) } func TestAssetID_CopyRef(t *testing.T) { - id := New() - subId := AssetIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewAssetID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*AssetID)(nil).CopyRef()) } func TestAssetID_IDRef(t *testing.T) { id := New() - subId := AssetIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := AssetIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&AssetID{}).IDRef()) + assert.Nil(t, (*AssetID)(nil).IDRef()) } func TestAssetID_StringRef(t *testing.T) { - id := New() - subId := AssetIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewAssetID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&AssetID{}).StringRef()) + assert.Nil(t, (*AssetID)(nil).StringRef()) } func TestAssetID_MarhsalJSON(t *testing.T) { - id := New() - subId := AssetIDFromRefID(&id) + id := NewAssetID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&AssetID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*AssetID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestAssetID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &AssetID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustAssetID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &AssetID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestAssetID_MarshalText(t *testing.T) { id := New() - subId := AssetIDFromRefID(&id) + res, err := AssetIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&AssetID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*AssetID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestAssetID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &AssetID{} - - err := subId.UnmarshalText(text) - + id2 := &AssetID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestAssetID_IsNil(t *testing.T) { - subId := AssetID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *AssetIDFromRefID(&id) + assert.True(t, AssetID{}.IsNil()) + assert.False(t, NewAssetID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestAssetID_IsNilRef(t *testing.T) { + assert.True(t, AssetID{}.Ref().IsNilRef()) + assert.True(t, (*AssetID)(nil).IsNilRef()) + assert.False(t, NewAssetID().Ref().IsNilRef()) } -func TestAssetIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestAssetIDsToStrings(t *testing.T) { + tests := []struct { name string input []AssetID expected []string @@ -321,19 +307,17 @@ func TestAssetIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, AssetIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, AssetIDsToStrings(tt.input)) }) } - } func TestAssetIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestAssetIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestAssetIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := AssetIDsFrom(tc.input) if tc.expected.err != nil { - _, err := AssetIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := AssetIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestAssetIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []AssetID @@ -449,25 +431,22 @@ func TestAssetIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := AssetIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestAssetIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []AssetID @@ -493,21 +472,18 @@ func TestAssetIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := AssetIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestAssetIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []AssetID expected []ID @@ -537,28 +513,25 @@ func TestAssetIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := AssetIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestAssetIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustAssetID(id1.String()) + id21 := MustAssetID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustAssetID(id2.String()) + id22 := MustAssetID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustAssetID(id3.String()) + id23 := MustAssetID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*AssetID expected []*ID @@ -570,39 +543,35 @@ func TestAssetIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*AssetID{&subId1}, + input: []*AssetID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*AssetID{&subId1, &subId2, &subId3}, + input: []*AssetID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := AssetIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewAssetIDSet(t *testing.T) { AssetIdSet := NewAssetIDSet() - assert.NotNil(t, AssetIdSet) assert.Empty(t, AssetIdSet.m) assert.Empty(t, AssetIdSet.s) } func TestAssetIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []AssetID expected *AssetIDSet @@ -663,24 +632,19 @@ func TestAssetIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewAssetIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestAssetIDSet_AddRef(t *testing.T) { - t.Parallel() - - AssetId := MustAssetID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *AssetID expected *AssetIDSet @@ -695,7 +659,7 @@ func TestAssetIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &AssetId, + input: MustAssetID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &AssetIDSet{ m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestAssetIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewAssetIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestAssetIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - AssetIDSet - AssetID - } + tests := []struct { + name string + target *AssetIDSet + input AssetID expected bool }{ { - name: "Empty Set", - input: struct { - AssetIDSet - AssetID - }{AssetIDSet: AssetIDSet{}, AssetID: MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &AssetIDSet{}, + input: MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - AssetIDSet - AssetID - }{AssetIDSet: AssetIDSet{ + target: &AssetIDSet{ m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, AssetID: MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - AssetIDSet - AssetID - }{AssetIDSet: AssetIDSet{ + target: &AssetIDSet{ m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, AssetID: MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.AssetIDSet.Has(tc.input.AssetID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestAssetIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input AssetIDSet - expected AssetIDSet + input *AssetIDSet + expected *AssetIDSet }{ { - name: "Empty Set", - input: AssetIDSet{}, - expected: AssetIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &AssetIDSet{}, + expected: &AssetIDSet{}, }, { - name: "Set Contains the element", - input: AssetIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &AssetIDSet{ m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: AssetIDSet{ + expected: &AssetIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestAssetIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *AssetIDSet expected []AssetID }{ { - name: "Empty slice", + name: "Empty", input: &AssetIDSet{ m: map[AssetID]struct{}{}, s: nil, }, expected: make([]AssetID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &AssetIDSet{ @@ -854,20 +808,17 @@ func TestAssetIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestAssetIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *AssetIDSet expected *AssetIDSet @@ -922,21 +873,19 @@ func TestAssetIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestAssetIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *AssetIDSet @@ -944,6 +893,23 @@ func TestAssetIDSet_Merge(t *testing.T) { } expected *AssetIDSet }{ + { + name: "Nil Set", + input: struct { + a *AssetIDSet + b *AssetIDSet + }{ + a: &AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &AssetIDSet{ + m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestAssetIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/cluster_field_gen_test.go b/pkg/id/cluster_field_gen_test.go index ed76808b5..7fc5cd187 100644 --- a/pkg/id/cluster_field_gen_test.go +++ b/pkg/id/cluster_field_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewClusterID(t *testing.T) { id := NewClusterID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestClusterIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestClusterIDFrom(t *testing.T) { result ClusterID err error }{ - ClusterID{}, - ErrInvalidID, + result: ClusterID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestClusterIDFrom(t *testing.T) { result ClusterID err error }{ - ClusterID{}, - ErrInvalidID, + result: ClusterID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestClusterIDFrom(t *testing.T) { result ClusterID err error }{ - ClusterID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: ClusterID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := ClusterIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := ClusterIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustClusterID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustClusterID(t *testing.T) { expected: ClusterID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustClusterID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustClusterID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestClusterIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *ClusterID @@ -139,159 +135,149 @@ func TestClusterIDFromRef(t *testing.T) { expected: &ClusterID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := ClusterIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := ClusterIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestClusterIDFromRefID(t *testing.T) { id := New() - - subId := ClusterIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := ClusterIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, ClusterIDFromRefID(nil)) + assert.Nil(t, ClusterIDFromRefID(&ID{})) } func TestClusterID_ID(t *testing.T) { id := New() - subId := ClusterIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := ClusterIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestClusterID_String(t *testing.T) { id := New() - subId := ClusterIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := ClusterIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", ClusterID{}.String()) } -func TestClusterID_GoString(t *testing.T) { - id := New() - subId := ClusterIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.ClusterID("+id.String()+")") +func TestClusterID_RefString(t *testing.T) { + id := NewClusterID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, ClusterID{}.RefString()) } -func TestClusterID_RefString(t *testing.T) { +func TestClusterID_GoString(t *testing.T) { id := New() - subId := ClusterIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := ClusterIDFromRefID(&id) + assert.Equal(t, "ClusterID("+id.String()+")", id2.GoString()) + assert.Equal(t, "ClusterID()", ClusterID{}.GoString()) } func TestClusterID_Ref(t *testing.T) { - id := New() - subId := ClusterIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewClusterID() + assert.Equal(t, ClusterID(id), *id.Ref()) + assert.Nil(t, (&ClusterID{}).Ref()) } func TestClusterID_Contains(t *testing.T) { id := NewClusterID() id2 := NewClusterID() assert.True(t, id.Contains([]ClusterID{id, id2})) + assert.False(t, ClusterID{}.Contains([]ClusterID{id, id2, {}})) assert.False(t, id.Contains([]ClusterID{id2})) } func TestClusterID_CopyRef(t *testing.T) { - id := New() - subId := ClusterIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewClusterID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*ClusterID)(nil).CopyRef()) } func TestClusterID_IDRef(t *testing.T) { id := New() - subId := ClusterIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := ClusterIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&ClusterID{}).IDRef()) + assert.Nil(t, (*ClusterID)(nil).IDRef()) } func TestClusterID_StringRef(t *testing.T) { - id := New() - subId := ClusterIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewClusterID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&ClusterID{}).StringRef()) + assert.Nil(t, (*ClusterID)(nil).StringRef()) } func TestClusterID_MarhsalJSON(t *testing.T) { - id := New() - subId := ClusterIDFromRefID(&id) + id := NewClusterID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&ClusterID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*ClusterID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestClusterID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &ClusterID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustClusterID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &ClusterID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestClusterID_MarshalText(t *testing.T) { id := New() - subId := ClusterIDFromRefID(&id) + res, err := ClusterIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&ClusterID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*ClusterID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestClusterID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &ClusterID{} - - err := subId.UnmarshalText(text) - + id2 := &ClusterID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestClusterID_IsNil(t *testing.T) { - subId := ClusterID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *ClusterIDFromRefID(&id) + assert.True(t, ClusterID{}.IsNil()) + assert.False(t, NewClusterID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestClusterID_IsNilRef(t *testing.T) { + assert.True(t, ClusterID{}.Ref().IsNilRef()) + assert.True(t, (*ClusterID)(nil).IsNilRef()) + assert.False(t, NewClusterID().Ref().IsNilRef()) } -func TestClusterIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestClusterIDsToStrings(t *testing.T) { + tests := []struct { name string input []ClusterID expected []string @@ -321,19 +307,17 @@ func TestClusterIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, ClusterIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, ClusterIDsToStrings(tt.input)) }) } - } func TestClusterIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestClusterIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestClusterIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := ClusterIDsFrom(tc.input) if tc.expected.err != nil { - _, err := ClusterIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := ClusterIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestClusterIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []ClusterID @@ -449,25 +431,22 @@ func TestClusterIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := ClusterIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestClusterIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []ClusterID @@ -493,21 +472,18 @@ func TestClusterIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := ClusterIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestClusterIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []ClusterID expected []ID @@ -537,28 +513,25 @@ func TestClusterIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := ClusterIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestClusterIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustClusterID(id1.String()) + id21 := MustClusterID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustClusterID(id2.String()) + id22 := MustClusterID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustClusterID(id3.String()) + id23 := MustClusterID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*ClusterID expected []*ID @@ -570,39 +543,35 @@ func TestClusterIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*ClusterID{&subId1}, + input: []*ClusterID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*ClusterID{&subId1, &subId2, &subId3}, + input: []*ClusterID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := ClusterIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewClusterIDSet(t *testing.T) { ClusterIdSet := NewClusterIDSet() - assert.NotNil(t, ClusterIdSet) assert.Empty(t, ClusterIdSet.m) assert.Empty(t, ClusterIdSet.s) } func TestClusterIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []ClusterID expected *ClusterIDSet @@ -663,24 +632,19 @@ func TestClusterIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewClusterIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestClusterIDSet_AddRef(t *testing.T) { - t.Parallel() - - ClusterId := MustClusterID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *ClusterID expected *ClusterIDSet @@ -695,7 +659,7 @@ func TestClusterIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &ClusterId, + input: MustClusterID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &ClusterIDSet{ m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestClusterIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewClusterIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestClusterIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - ClusterIDSet - ClusterID - } + tests := []struct { + name string + target *ClusterIDSet + input ClusterID expected bool }{ { - name: "Empty Set", - input: struct { - ClusterIDSet - ClusterID - }{ClusterIDSet: ClusterIDSet{}, ClusterID: MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &ClusterIDSet{}, + input: MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - ClusterIDSet - ClusterID - }{ClusterIDSet: ClusterIDSet{ + target: &ClusterIDSet{ m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, ClusterID: MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - ClusterIDSet - ClusterID - }{ClusterIDSet: ClusterIDSet{ + target: &ClusterIDSet{ m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, ClusterID: MustClusterID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.ClusterIDSet.Has(tc.input.ClusterID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestClusterIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input ClusterIDSet - expected ClusterIDSet + input *ClusterIDSet + expected *ClusterIDSet }{ { - name: "Empty Set", - input: ClusterIDSet{}, - expected: ClusterIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &ClusterIDSet{}, + expected: &ClusterIDSet{}, }, { - name: "Set Contains the element", - input: ClusterIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &ClusterIDSet{ m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: ClusterIDSet{ + expected: &ClusterIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestClusterIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *ClusterIDSet expected []ClusterID }{ { - name: "Empty slice", + name: "Empty", input: &ClusterIDSet{ m: map[ClusterID]struct{}{}, s: nil, }, expected: make([]ClusterID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &ClusterIDSet{ @@ -854,20 +808,17 @@ func TestClusterIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestClusterIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *ClusterIDSet expected *ClusterIDSet @@ -922,21 +873,19 @@ func TestClusterIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestClusterIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *ClusterIDSet @@ -944,6 +893,23 @@ func TestClusterIDSet_Merge(t *testing.T) { } expected *ClusterIDSet }{ + { + name: "Nil Set", + input: struct { + a *ClusterIDSet + b *ClusterIDSet + }{ + a: &ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &ClusterIDSet{ + m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestClusterIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/cluster_gen.go b/pkg/id/cluster_gen.go index 3720b1bf5..038849217 100644 --- a/pkg/id/cluster_gen.go +++ b/pkg/id/cluster_gen.go @@ -44,7 +44,7 @@ func ClusterIDFromRef(i *string) *ClusterID { // ClusterIDFromRefID generates a new ClusterID from a ref of a generic ID. func ClusterIDFromRefID(i *ID) *ClusterID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := ClusterID(*i) @@ -58,28 +58,40 @@ func (d ClusterID) ID() ID { // String returns a string representation. func (d ClusterID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d ClusterID) GoString() string { - return "id.ClusterID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d ClusterID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d ClusterID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d ClusterID) GoString() string { + return "ClusterID(" + d.String() + ")" } // Ref returns a reference. func (d ClusterID) Ref() *ClusterID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d ClusterID) Contains(ids []ClusterID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d ClusterID) Contains(ids []ClusterID) bool { // CopyRef returns a copy of a reference. func (d *ClusterID) CopyRef() *ClusterID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *ClusterID) CopyRef() *ClusterID { // IDRef returns a reference of a domain id. func (d *ClusterID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *ClusterID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *ClusterID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *ClusterID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *ClusterID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *ClusterID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *ClusterID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *ClusterID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d ClusterID) IsNil() bool { return ID(d).IsNil() } -// ClusterIDToKeys converts IDs into a string slice. -func ClusterIDToKeys(ids []ClusterID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *ClusterID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// ClusterIDsToStrings converts IDs into a string slice. +func ClusterIDsToStrings(ids []ClusterID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // ClusterIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *ClusterIDSet) Clone() *ClusterIDSet { // Merge returns a merged set func (s *ClusterIDSet) Merge(s2 *ClusterIDSet) *ClusterIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/dataset_gen.go b/pkg/id/dataset_gen.go index 1e76621b5..8dec9b216 100644 --- a/pkg/id/dataset_gen.go +++ b/pkg/id/dataset_gen.go @@ -44,7 +44,7 @@ func DatasetIDFromRef(i *string) *DatasetID { // DatasetIDFromRefID generates a new DatasetID from a ref of a generic ID. func DatasetIDFromRefID(i *ID) *DatasetID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := DatasetID(*i) @@ -58,28 +58,40 @@ func (d DatasetID) ID() ID { // String returns a string representation. func (d DatasetID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d DatasetID) GoString() string { - return "id.DatasetID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d DatasetID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d DatasetID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d DatasetID) GoString() string { + return "DatasetID(" + d.String() + ")" } // Ref returns a reference. func (d DatasetID) Ref() *DatasetID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d DatasetID) Contains(ids []DatasetID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d DatasetID) Contains(ids []DatasetID) bool { // CopyRef returns a copy of a reference. func (d *DatasetID) CopyRef() *DatasetID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *DatasetID) CopyRef() *DatasetID { // IDRef returns a reference of a domain id. func (d *DatasetID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *DatasetID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *DatasetID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *DatasetID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *DatasetID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *DatasetID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *DatasetID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *DatasetID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d DatasetID) IsNil() bool { return ID(d).IsNil() } -// DatasetIDToKeys converts IDs into a string slice. -func DatasetIDToKeys(ids []DatasetID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *DatasetID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// DatasetIDsToStrings converts IDs into a string slice. +func DatasetIDsToStrings(ids []DatasetID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // DatasetIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *DatasetIDSet) Clone() *DatasetIDSet { // Merge returns a merged set func (s *DatasetIDSet) Merge(s2 *DatasetIDSet) *DatasetIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/dataset_gen_test.go b/pkg/id/dataset_gen_test.go index 48c1c8251..294b45268 100644 --- a/pkg/id/dataset_gen_test.go +++ b/pkg/id/dataset_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewDatasetID(t *testing.T) { id := NewDatasetID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestDatasetIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestDatasetIDFrom(t *testing.T) { result DatasetID err error }{ - DatasetID{}, - ErrInvalidID, + result: DatasetID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestDatasetIDFrom(t *testing.T) { result DatasetID err error }{ - DatasetID{}, - ErrInvalidID, + result: DatasetID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestDatasetIDFrom(t *testing.T) { result DatasetID err error }{ - DatasetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: DatasetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := DatasetIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := DatasetIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustDatasetID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustDatasetID(t *testing.T) { expected: DatasetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustDatasetID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustDatasetID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestDatasetIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *DatasetID @@ -139,159 +135,149 @@ func TestDatasetIDFromRef(t *testing.T) { expected: &DatasetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := DatasetIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := DatasetIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestDatasetIDFromRefID(t *testing.T) { id := New() - - subId := DatasetIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := DatasetIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, DatasetIDFromRefID(nil)) + assert.Nil(t, DatasetIDFromRefID(&ID{})) } func TestDatasetID_ID(t *testing.T) { id := New() - subId := DatasetIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := DatasetIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestDatasetID_String(t *testing.T) { id := New() - subId := DatasetIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := DatasetIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", DatasetID{}.String()) } -func TestDatasetID_GoString(t *testing.T) { - id := New() - subId := DatasetIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.DatasetID("+id.String()+")") +func TestDatasetID_RefString(t *testing.T) { + id := NewDatasetID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, DatasetID{}.RefString()) } -func TestDatasetID_RefString(t *testing.T) { +func TestDatasetID_GoString(t *testing.T) { id := New() - subId := DatasetIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := DatasetIDFromRefID(&id) + assert.Equal(t, "DatasetID("+id.String()+")", id2.GoString()) + assert.Equal(t, "DatasetID()", DatasetID{}.GoString()) } func TestDatasetID_Ref(t *testing.T) { - id := New() - subId := DatasetIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewDatasetID() + assert.Equal(t, DatasetID(id), *id.Ref()) + assert.Nil(t, (&DatasetID{}).Ref()) } func TestDatasetID_Contains(t *testing.T) { id := NewDatasetID() id2 := NewDatasetID() assert.True(t, id.Contains([]DatasetID{id, id2})) + assert.False(t, DatasetID{}.Contains([]DatasetID{id, id2, {}})) assert.False(t, id.Contains([]DatasetID{id2})) } func TestDatasetID_CopyRef(t *testing.T) { - id := New() - subId := DatasetIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewDatasetID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*DatasetID)(nil).CopyRef()) } func TestDatasetID_IDRef(t *testing.T) { id := New() - subId := DatasetIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := DatasetIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&DatasetID{}).IDRef()) + assert.Nil(t, (*DatasetID)(nil).IDRef()) } func TestDatasetID_StringRef(t *testing.T) { - id := New() - subId := DatasetIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewDatasetID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&DatasetID{}).StringRef()) + assert.Nil(t, (*DatasetID)(nil).StringRef()) } func TestDatasetID_MarhsalJSON(t *testing.T) { - id := New() - subId := DatasetIDFromRefID(&id) + id := NewDatasetID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&DatasetID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*DatasetID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestDatasetID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &DatasetID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustDatasetID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &DatasetID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestDatasetID_MarshalText(t *testing.T) { id := New() - subId := DatasetIDFromRefID(&id) + res, err := DatasetIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&DatasetID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*DatasetID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestDatasetID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &DatasetID{} - - err := subId.UnmarshalText(text) - + id2 := &DatasetID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestDatasetID_IsNil(t *testing.T) { - subId := DatasetID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *DatasetIDFromRefID(&id) + assert.True(t, DatasetID{}.IsNil()) + assert.False(t, NewDatasetID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestDatasetID_IsNilRef(t *testing.T) { + assert.True(t, DatasetID{}.Ref().IsNilRef()) + assert.True(t, (*DatasetID)(nil).IsNilRef()) + assert.False(t, NewDatasetID().Ref().IsNilRef()) } -func TestDatasetIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestDatasetIDsToStrings(t *testing.T) { + tests := []struct { name string input []DatasetID expected []string @@ -321,19 +307,17 @@ func TestDatasetIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, DatasetIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, DatasetIDsToStrings(tt.input)) }) } - } func TestDatasetIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestDatasetIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestDatasetIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := DatasetIDsFrom(tc.input) if tc.expected.err != nil { - _, err := DatasetIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := DatasetIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestDatasetIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []DatasetID @@ -449,25 +431,22 @@ func TestDatasetIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := DatasetIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestDatasetIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []DatasetID @@ -493,21 +472,18 @@ func TestDatasetIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := DatasetIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestDatasetIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []DatasetID expected []ID @@ -537,28 +513,25 @@ func TestDatasetIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := DatasetIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestDatasetIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustDatasetID(id1.String()) + id21 := MustDatasetID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustDatasetID(id2.String()) + id22 := MustDatasetID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustDatasetID(id3.String()) + id23 := MustDatasetID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*DatasetID expected []*ID @@ -570,39 +543,35 @@ func TestDatasetIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*DatasetID{&subId1}, + input: []*DatasetID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*DatasetID{&subId1, &subId2, &subId3}, + input: []*DatasetID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := DatasetIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewDatasetIDSet(t *testing.T) { DatasetIdSet := NewDatasetIDSet() - assert.NotNil(t, DatasetIdSet) assert.Empty(t, DatasetIdSet.m) assert.Empty(t, DatasetIdSet.s) } func TestDatasetIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []DatasetID expected *DatasetIDSet @@ -663,24 +632,19 @@ func TestDatasetIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewDatasetIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestDatasetIDSet_AddRef(t *testing.T) { - t.Parallel() - - DatasetId := MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *DatasetID expected *DatasetIDSet @@ -695,7 +659,7 @@ func TestDatasetIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &DatasetId, + input: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &DatasetIDSet{ m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestDatasetIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewDatasetIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestDatasetIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - DatasetIDSet - DatasetID - } + tests := []struct { + name string + target *DatasetIDSet + input DatasetID expected bool }{ { - name: "Empty Set", - input: struct { - DatasetIDSet - DatasetID - }{DatasetIDSet: DatasetIDSet{}, DatasetID: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &DatasetIDSet{}, + input: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - DatasetIDSet - DatasetID - }{DatasetIDSet: DatasetIDSet{ + target: &DatasetIDSet{ m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, DatasetID: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - DatasetIDSet - DatasetID - }{DatasetIDSet: DatasetIDSet{ + target: &DatasetIDSet{ m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, DatasetID: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.DatasetIDSet.Has(tc.input.DatasetID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestDatasetIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input DatasetIDSet - expected DatasetIDSet + input *DatasetIDSet + expected *DatasetIDSet }{ { - name: "Empty Set", - input: DatasetIDSet{}, - expected: DatasetIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &DatasetIDSet{}, + expected: &DatasetIDSet{}, }, { - name: "Set Contains the element", - input: DatasetIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &DatasetIDSet{ m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: DatasetIDSet{ + expected: &DatasetIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestDatasetIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *DatasetIDSet expected []DatasetID }{ { - name: "Empty slice", + name: "Empty", input: &DatasetIDSet{ m: map[DatasetID]struct{}{}, s: nil, }, expected: make([]DatasetID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &DatasetIDSet{ @@ -854,20 +808,17 @@ func TestDatasetIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestDatasetIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *DatasetIDSet expected *DatasetIDSet @@ -922,21 +873,19 @@ func TestDatasetIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestDatasetIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *DatasetIDSet @@ -944,6 +893,23 @@ func TestDatasetIDSet_Merge(t *testing.T) { } expected *DatasetIDSet }{ + { + name: "Nil Set", + input: struct { + a *DatasetIDSet + b *DatasetIDSet + }{ + a: &DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &DatasetIDSet{ + m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestDatasetIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/dataset_schema_field_gen.go b/pkg/id/dataset_schema_field_gen.go index b26c073e0..c607fb680 100644 --- a/pkg/id/dataset_schema_field_gen.go +++ b/pkg/id/dataset_schema_field_gen.go @@ -44,7 +44,7 @@ func DatasetSchemaFieldIDFromRef(i *string) *DatasetSchemaFieldID { // DatasetSchemaFieldIDFromRefID generates a new DatasetSchemaFieldID from a ref of a generic ID. func DatasetSchemaFieldIDFromRefID(i *ID) *DatasetSchemaFieldID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := DatasetSchemaFieldID(*i) @@ -58,28 +58,40 @@ func (d DatasetSchemaFieldID) ID() ID { // String returns a string representation. func (d DatasetSchemaFieldID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d DatasetSchemaFieldID) GoString() string { - return "id.DatasetSchemaFieldID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d DatasetSchemaFieldID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d DatasetSchemaFieldID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d DatasetSchemaFieldID) GoString() string { + return "DatasetSchemaFieldID(" + d.String() + ")" } // Ref returns a reference. func (d DatasetSchemaFieldID) Ref() *DatasetSchemaFieldID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d DatasetSchemaFieldID) Contains(ids []DatasetSchemaFieldID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d DatasetSchemaFieldID) Contains(ids []DatasetSchemaFieldID) bool { // CopyRef returns a copy of a reference. func (d *DatasetSchemaFieldID) CopyRef() *DatasetSchemaFieldID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *DatasetSchemaFieldID) CopyRef() *DatasetSchemaFieldID { // IDRef returns a reference of a domain id. func (d *DatasetSchemaFieldID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *DatasetSchemaFieldID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *DatasetSchemaFieldID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *DatasetSchemaFieldID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *DatasetSchemaFieldID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *DatasetSchemaFieldID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *DatasetSchemaFieldID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *DatasetSchemaFieldID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d DatasetSchemaFieldID) IsNil() bool { return ID(d).IsNil() } -// DatasetSchemaFieldIDToKeys converts IDs into a string slice. -func DatasetSchemaFieldIDToKeys(ids []DatasetSchemaFieldID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *DatasetSchemaFieldID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// DatasetSchemaFieldIDsToStrings converts IDs into a string slice. +func DatasetSchemaFieldIDsToStrings(ids []DatasetSchemaFieldID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // DatasetSchemaFieldIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *DatasetSchemaFieldIDSet) Clone() *DatasetSchemaFieldIDSet { // Merge returns a merged set func (s *DatasetSchemaFieldIDSet) Merge(s2 *DatasetSchemaFieldIDSet) *DatasetSchemaFieldIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/dataset_schema_field_gen_test.go b/pkg/id/dataset_schema_field_gen_test.go index bd1926f1f..9f4f20752 100644 --- a/pkg/id/dataset_schema_field_gen_test.go +++ b/pkg/id/dataset_schema_field_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewDatasetSchemaFieldID(t *testing.T) { id := NewDatasetSchemaFieldID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestDatasetSchemaFieldIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestDatasetSchemaFieldIDFrom(t *testing.T) { result DatasetSchemaFieldID err error }{ - DatasetSchemaFieldID{}, - ErrInvalidID, + result: DatasetSchemaFieldID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestDatasetSchemaFieldIDFrom(t *testing.T) { result DatasetSchemaFieldID err error }{ - DatasetSchemaFieldID{}, - ErrInvalidID, + result: DatasetSchemaFieldID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestDatasetSchemaFieldIDFrom(t *testing.T) { result DatasetSchemaFieldID err error }{ - DatasetSchemaFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: DatasetSchemaFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := DatasetSchemaFieldIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := DatasetSchemaFieldIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustDatasetSchemaFieldID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustDatasetSchemaFieldID(t *testing.T) { expected: DatasetSchemaFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustDatasetSchemaFieldID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustDatasetSchemaFieldID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestDatasetSchemaFieldIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *DatasetSchemaFieldID @@ -139,159 +135,149 @@ func TestDatasetSchemaFieldIDFromRef(t *testing.T) { expected: &DatasetSchemaFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := DatasetSchemaFieldIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := DatasetSchemaFieldIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestDatasetSchemaFieldIDFromRefID(t *testing.T) { id := New() - - subId := DatasetSchemaFieldIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := DatasetSchemaFieldIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, DatasetSchemaFieldIDFromRefID(nil)) + assert.Nil(t, DatasetSchemaFieldIDFromRefID(&ID{})) } func TestDatasetSchemaFieldID_ID(t *testing.T) { id := New() - subId := DatasetSchemaFieldIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := DatasetSchemaFieldIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestDatasetSchemaFieldID_String(t *testing.T) { id := New() - subId := DatasetSchemaFieldIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := DatasetSchemaFieldIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", DatasetSchemaFieldID{}.String()) } -func TestDatasetSchemaFieldID_GoString(t *testing.T) { - id := New() - subId := DatasetSchemaFieldIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.DatasetSchemaFieldID("+id.String()+")") +func TestDatasetSchemaFieldID_RefString(t *testing.T) { + id := NewDatasetSchemaFieldID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, DatasetSchemaFieldID{}.RefString()) } -func TestDatasetSchemaFieldID_RefString(t *testing.T) { +func TestDatasetSchemaFieldID_GoString(t *testing.T) { id := New() - subId := DatasetSchemaFieldIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := DatasetSchemaFieldIDFromRefID(&id) + assert.Equal(t, "DatasetSchemaFieldID("+id.String()+")", id2.GoString()) + assert.Equal(t, "DatasetSchemaFieldID()", DatasetSchemaFieldID{}.GoString()) } func TestDatasetSchemaFieldID_Ref(t *testing.T) { - id := New() - subId := DatasetSchemaFieldIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewDatasetSchemaFieldID() + assert.Equal(t, DatasetSchemaFieldID(id), *id.Ref()) + assert.Nil(t, (&DatasetSchemaFieldID{}).Ref()) } func TestDatasetSchemaFieldID_Contains(t *testing.T) { id := NewDatasetSchemaFieldID() id2 := NewDatasetSchemaFieldID() assert.True(t, id.Contains([]DatasetSchemaFieldID{id, id2})) + assert.False(t, DatasetSchemaFieldID{}.Contains([]DatasetSchemaFieldID{id, id2, {}})) assert.False(t, id.Contains([]DatasetSchemaFieldID{id2})) } func TestDatasetSchemaFieldID_CopyRef(t *testing.T) { - id := New() - subId := DatasetSchemaFieldIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewDatasetSchemaFieldID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*DatasetSchemaFieldID)(nil).CopyRef()) } func TestDatasetSchemaFieldID_IDRef(t *testing.T) { id := New() - subId := DatasetSchemaFieldIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := DatasetSchemaFieldIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&DatasetSchemaFieldID{}).IDRef()) + assert.Nil(t, (*DatasetSchemaFieldID)(nil).IDRef()) } func TestDatasetSchemaFieldID_StringRef(t *testing.T) { - id := New() - subId := DatasetSchemaFieldIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewDatasetSchemaFieldID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&DatasetSchemaFieldID{}).StringRef()) + assert.Nil(t, (*DatasetSchemaFieldID)(nil).StringRef()) } func TestDatasetSchemaFieldID_MarhsalJSON(t *testing.T) { - id := New() - subId := DatasetSchemaFieldIDFromRefID(&id) + id := NewDatasetSchemaFieldID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&DatasetSchemaFieldID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*DatasetSchemaFieldID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestDatasetSchemaFieldID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &DatasetSchemaFieldID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustDatasetSchemaFieldID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &DatasetSchemaFieldID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestDatasetSchemaFieldID_MarshalText(t *testing.T) { id := New() - subId := DatasetSchemaFieldIDFromRefID(&id) + res, err := DatasetSchemaFieldIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&DatasetSchemaFieldID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*DatasetSchemaFieldID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestDatasetSchemaFieldID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &DatasetSchemaFieldID{} - - err := subId.UnmarshalText(text) - + id2 := &DatasetSchemaFieldID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestDatasetSchemaFieldID_IsNil(t *testing.T) { - subId := DatasetSchemaFieldID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *DatasetSchemaFieldIDFromRefID(&id) + assert.True(t, DatasetSchemaFieldID{}.IsNil()) + assert.False(t, NewDatasetSchemaFieldID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestDatasetSchemaFieldID_IsNilRef(t *testing.T) { + assert.True(t, DatasetSchemaFieldID{}.Ref().IsNilRef()) + assert.True(t, (*DatasetSchemaFieldID)(nil).IsNilRef()) + assert.False(t, NewDatasetSchemaFieldID().Ref().IsNilRef()) } -func TestDatasetSchemaFieldIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestDatasetSchemaFieldIDsToStrings(t *testing.T) { + tests := []struct { name string input []DatasetSchemaFieldID expected []string @@ -321,19 +307,17 @@ func TestDatasetSchemaFieldIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, DatasetSchemaFieldIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, DatasetSchemaFieldIDsToStrings(tt.input)) }) } - } func TestDatasetSchemaFieldIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestDatasetSchemaFieldIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestDatasetSchemaFieldIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := DatasetSchemaFieldIDsFrom(tc.input) if tc.expected.err != nil { - _, err := DatasetSchemaFieldIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := DatasetSchemaFieldIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestDatasetSchemaFieldIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []DatasetSchemaFieldID @@ -449,25 +431,22 @@ func TestDatasetSchemaFieldIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := DatasetSchemaFieldIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestDatasetSchemaFieldIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []DatasetSchemaFieldID @@ -493,21 +472,18 @@ func TestDatasetSchemaFieldIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := DatasetSchemaFieldIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestDatasetSchemaFieldIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []DatasetSchemaFieldID expected []ID @@ -537,28 +513,25 @@ func TestDatasetSchemaFieldIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := DatasetSchemaFieldIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestDatasetSchemaFieldIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustDatasetSchemaFieldID(id1.String()) + id21 := MustDatasetSchemaFieldID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustDatasetSchemaFieldID(id2.String()) + id22 := MustDatasetSchemaFieldID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustDatasetSchemaFieldID(id3.String()) + id23 := MustDatasetSchemaFieldID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*DatasetSchemaFieldID expected []*ID @@ -570,39 +543,35 @@ func TestDatasetSchemaFieldIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*DatasetSchemaFieldID{&subId1}, + input: []*DatasetSchemaFieldID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*DatasetSchemaFieldID{&subId1, &subId2, &subId3}, + input: []*DatasetSchemaFieldID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := DatasetSchemaFieldIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewDatasetSchemaFieldIDSet(t *testing.T) { DatasetSchemaFieldIdSet := NewDatasetSchemaFieldIDSet() - assert.NotNil(t, DatasetSchemaFieldIdSet) assert.Empty(t, DatasetSchemaFieldIdSet.m) assert.Empty(t, DatasetSchemaFieldIdSet.s) } func TestDatasetSchemaFieldIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []DatasetSchemaFieldID expected *DatasetSchemaFieldIDSet @@ -663,24 +632,19 @@ func TestDatasetSchemaFieldIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewDatasetSchemaFieldIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestDatasetSchemaFieldIDSet_AddRef(t *testing.T) { - t.Parallel() - - DatasetSchemaFieldId := MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *DatasetSchemaFieldID expected *DatasetSchemaFieldIDSet @@ -695,7 +659,7 @@ func TestDatasetSchemaFieldIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &DatasetSchemaFieldId, + input: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &DatasetSchemaFieldIDSet{ m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestDatasetSchemaFieldIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewDatasetSchemaFieldIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestDatasetSchemaFieldIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - DatasetSchemaFieldIDSet - DatasetSchemaFieldID - } + tests := []struct { + name string + target *DatasetSchemaFieldIDSet + input DatasetSchemaFieldID expected bool }{ { - name: "Empty Set", - input: struct { - DatasetSchemaFieldIDSet - DatasetSchemaFieldID - }{DatasetSchemaFieldIDSet: DatasetSchemaFieldIDSet{}, DatasetSchemaFieldID: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &DatasetSchemaFieldIDSet{}, + input: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - DatasetSchemaFieldIDSet - DatasetSchemaFieldID - }{DatasetSchemaFieldIDSet: DatasetSchemaFieldIDSet{ + target: &DatasetSchemaFieldIDSet{ m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, DatasetSchemaFieldID: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - DatasetSchemaFieldIDSet - DatasetSchemaFieldID - }{DatasetSchemaFieldIDSet: DatasetSchemaFieldIDSet{ + target: &DatasetSchemaFieldIDSet{ m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, DatasetSchemaFieldID: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.DatasetSchemaFieldIDSet.Has(tc.input.DatasetSchemaFieldID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestDatasetSchemaFieldIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input DatasetSchemaFieldIDSet - expected DatasetSchemaFieldIDSet + input *DatasetSchemaFieldIDSet + expected *DatasetSchemaFieldIDSet }{ { - name: "Empty Set", - input: DatasetSchemaFieldIDSet{}, - expected: DatasetSchemaFieldIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &DatasetSchemaFieldIDSet{}, + expected: &DatasetSchemaFieldIDSet{}, }, { - name: "Set Contains the element", - input: DatasetSchemaFieldIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &DatasetSchemaFieldIDSet{ m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: DatasetSchemaFieldIDSet{ + expected: &DatasetSchemaFieldIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestDatasetSchemaFieldIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *DatasetSchemaFieldIDSet expected []DatasetSchemaFieldID }{ { - name: "Empty slice", + name: "Empty", input: &DatasetSchemaFieldIDSet{ m: map[DatasetSchemaFieldID]struct{}{}, s: nil, }, expected: make([]DatasetSchemaFieldID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &DatasetSchemaFieldIDSet{ @@ -854,20 +808,17 @@ func TestDatasetSchemaFieldIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestDatasetSchemaFieldIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *DatasetSchemaFieldIDSet expected *DatasetSchemaFieldIDSet @@ -922,21 +873,19 @@ func TestDatasetSchemaFieldIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestDatasetSchemaFieldIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *DatasetSchemaFieldIDSet @@ -944,6 +893,23 @@ func TestDatasetSchemaFieldIDSet_Merge(t *testing.T) { } expected *DatasetSchemaFieldIDSet }{ + { + name: "Nil Set", + input: struct { + a *DatasetSchemaFieldIDSet + b *DatasetSchemaFieldIDSet + }{ + a: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &DatasetSchemaFieldIDSet{ + m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestDatasetSchemaFieldIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/dataset_schema_gen.go b/pkg/id/dataset_schema_gen.go index 2c47eeb79..66efb0a3e 100644 --- a/pkg/id/dataset_schema_gen.go +++ b/pkg/id/dataset_schema_gen.go @@ -44,7 +44,7 @@ func DatasetSchemaIDFromRef(i *string) *DatasetSchemaID { // DatasetSchemaIDFromRefID generates a new DatasetSchemaID from a ref of a generic ID. func DatasetSchemaIDFromRefID(i *ID) *DatasetSchemaID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := DatasetSchemaID(*i) @@ -58,28 +58,40 @@ func (d DatasetSchemaID) ID() ID { // String returns a string representation. func (d DatasetSchemaID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d DatasetSchemaID) GoString() string { - return "id.DatasetSchemaID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d DatasetSchemaID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d DatasetSchemaID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d DatasetSchemaID) GoString() string { + return "DatasetSchemaID(" + d.String() + ")" } // Ref returns a reference. func (d DatasetSchemaID) Ref() *DatasetSchemaID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d DatasetSchemaID) Contains(ids []DatasetSchemaID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d DatasetSchemaID) Contains(ids []DatasetSchemaID) bool { // CopyRef returns a copy of a reference. func (d *DatasetSchemaID) CopyRef() *DatasetSchemaID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *DatasetSchemaID) CopyRef() *DatasetSchemaID { // IDRef returns a reference of a domain id. func (d *DatasetSchemaID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *DatasetSchemaID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *DatasetSchemaID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *DatasetSchemaID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *DatasetSchemaID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *DatasetSchemaID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *DatasetSchemaID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *DatasetSchemaID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d DatasetSchemaID) IsNil() bool { return ID(d).IsNil() } -// DatasetSchemaIDToKeys converts IDs into a string slice. -func DatasetSchemaIDToKeys(ids []DatasetSchemaID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *DatasetSchemaID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// DatasetSchemaIDsToStrings converts IDs into a string slice. +func DatasetSchemaIDsToStrings(ids []DatasetSchemaID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // DatasetSchemaIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *DatasetSchemaIDSet) Clone() *DatasetSchemaIDSet { // Merge returns a merged set func (s *DatasetSchemaIDSet) Merge(s2 *DatasetSchemaIDSet) *DatasetSchemaIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/dataset_schema_gen_test.go b/pkg/id/dataset_schema_gen_test.go index c3b8910b2..257c78f01 100644 --- a/pkg/id/dataset_schema_gen_test.go +++ b/pkg/id/dataset_schema_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewDatasetSchemaID(t *testing.T) { id := NewDatasetSchemaID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestDatasetSchemaIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestDatasetSchemaIDFrom(t *testing.T) { result DatasetSchemaID err error }{ - DatasetSchemaID{}, - ErrInvalidID, + result: DatasetSchemaID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestDatasetSchemaIDFrom(t *testing.T) { result DatasetSchemaID err error }{ - DatasetSchemaID{}, - ErrInvalidID, + result: DatasetSchemaID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestDatasetSchemaIDFrom(t *testing.T) { result DatasetSchemaID err error }{ - DatasetSchemaID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: DatasetSchemaID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := DatasetSchemaIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := DatasetSchemaIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustDatasetSchemaID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustDatasetSchemaID(t *testing.T) { expected: DatasetSchemaID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustDatasetSchemaID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustDatasetSchemaID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestDatasetSchemaIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *DatasetSchemaID @@ -139,159 +135,149 @@ func TestDatasetSchemaIDFromRef(t *testing.T) { expected: &DatasetSchemaID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := DatasetSchemaIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := DatasetSchemaIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestDatasetSchemaIDFromRefID(t *testing.T) { id := New() - - subId := DatasetSchemaIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := DatasetSchemaIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, DatasetSchemaIDFromRefID(nil)) + assert.Nil(t, DatasetSchemaIDFromRefID(&ID{})) } func TestDatasetSchemaID_ID(t *testing.T) { id := New() - subId := DatasetSchemaIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := DatasetSchemaIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestDatasetSchemaID_String(t *testing.T) { id := New() - subId := DatasetSchemaIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := DatasetSchemaIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", DatasetSchemaID{}.String()) } -func TestDatasetSchemaID_GoString(t *testing.T) { - id := New() - subId := DatasetSchemaIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.DatasetSchemaID("+id.String()+")") +func TestDatasetSchemaID_RefString(t *testing.T) { + id := NewDatasetSchemaID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, DatasetSchemaID{}.RefString()) } -func TestDatasetSchemaID_RefString(t *testing.T) { +func TestDatasetSchemaID_GoString(t *testing.T) { id := New() - subId := DatasetSchemaIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := DatasetSchemaIDFromRefID(&id) + assert.Equal(t, "DatasetSchemaID("+id.String()+")", id2.GoString()) + assert.Equal(t, "DatasetSchemaID()", DatasetSchemaID{}.GoString()) } func TestDatasetSchemaID_Ref(t *testing.T) { - id := New() - subId := DatasetSchemaIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewDatasetSchemaID() + assert.Equal(t, DatasetSchemaID(id), *id.Ref()) + assert.Nil(t, (&DatasetSchemaID{}).Ref()) } func TestDatasetSchemaID_Contains(t *testing.T) { id := NewDatasetSchemaID() id2 := NewDatasetSchemaID() assert.True(t, id.Contains([]DatasetSchemaID{id, id2})) + assert.False(t, DatasetSchemaID{}.Contains([]DatasetSchemaID{id, id2, {}})) assert.False(t, id.Contains([]DatasetSchemaID{id2})) } func TestDatasetSchemaID_CopyRef(t *testing.T) { - id := New() - subId := DatasetSchemaIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewDatasetSchemaID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*DatasetSchemaID)(nil).CopyRef()) } func TestDatasetSchemaID_IDRef(t *testing.T) { id := New() - subId := DatasetSchemaIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := DatasetSchemaIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&DatasetSchemaID{}).IDRef()) + assert.Nil(t, (*DatasetSchemaID)(nil).IDRef()) } func TestDatasetSchemaID_StringRef(t *testing.T) { - id := New() - subId := DatasetSchemaIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewDatasetSchemaID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&DatasetSchemaID{}).StringRef()) + assert.Nil(t, (*DatasetSchemaID)(nil).StringRef()) } func TestDatasetSchemaID_MarhsalJSON(t *testing.T) { - id := New() - subId := DatasetSchemaIDFromRefID(&id) + id := NewDatasetSchemaID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&DatasetSchemaID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*DatasetSchemaID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestDatasetSchemaID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &DatasetSchemaID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustDatasetSchemaID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &DatasetSchemaID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestDatasetSchemaID_MarshalText(t *testing.T) { id := New() - subId := DatasetSchemaIDFromRefID(&id) + res, err := DatasetSchemaIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&DatasetSchemaID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*DatasetSchemaID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestDatasetSchemaID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &DatasetSchemaID{} - - err := subId.UnmarshalText(text) - + id2 := &DatasetSchemaID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestDatasetSchemaID_IsNil(t *testing.T) { - subId := DatasetSchemaID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *DatasetSchemaIDFromRefID(&id) + assert.True(t, DatasetSchemaID{}.IsNil()) + assert.False(t, NewDatasetSchemaID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestDatasetSchemaID_IsNilRef(t *testing.T) { + assert.True(t, DatasetSchemaID{}.Ref().IsNilRef()) + assert.True(t, (*DatasetSchemaID)(nil).IsNilRef()) + assert.False(t, NewDatasetSchemaID().Ref().IsNilRef()) } -func TestDatasetSchemaIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestDatasetSchemaIDsToStrings(t *testing.T) { + tests := []struct { name string input []DatasetSchemaID expected []string @@ -321,19 +307,17 @@ func TestDatasetSchemaIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, DatasetSchemaIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, DatasetSchemaIDsToStrings(tt.input)) }) } - } func TestDatasetSchemaIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestDatasetSchemaIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestDatasetSchemaIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := DatasetSchemaIDsFrom(tc.input) if tc.expected.err != nil { - _, err := DatasetSchemaIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := DatasetSchemaIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestDatasetSchemaIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []DatasetSchemaID @@ -449,25 +431,22 @@ func TestDatasetSchemaIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := DatasetSchemaIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestDatasetSchemaIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []DatasetSchemaID @@ -493,21 +472,18 @@ func TestDatasetSchemaIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := DatasetSchemaIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestDatasetSchemaIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []DatasetSchemaID expected []ID @@ -537,28 +513,25 @@ func TestDatasetSchemaIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := DatasetSchemaIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestDatasetSchemaIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustDatasetSchemaID(id1.String()) + id21 := MustDatasetSchemaID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustDatasetSchemaID(id2.String()) + id22 := MustDatasetSchemaID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustDatasetSchemaID(id3.String()) + id23 := MustDatasetSchemaID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*DatasetSchemaID expected []*ID @@ -570,39 +543,35 @@ func TestDatasetSchemaIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*DatasetSchemaID{&subId1}, + input: []*DatasetSchemaID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*DatasetSchemaID{&subId1, &subId2, &subId3}, + input: []*DatasetSchemaID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := DatasetSchemaIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewDatasetSchemaIDSet(t *testing.T) { DatasetSchemaIdSet := NewDatasetSchemaIDSet() - assert.NotNil(t, DatasetSchemaIdSet) assert.Empty(t, DatasetSchemaIdSet.m) assert.Empty(t, DatasetSchemaIdSet.s) } func TestDatasetSchemaIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []DatasetSchemaID expected *DatasetSchemaIDSet @@ -663,24 +632,19 @@ func TestDatasetSchemaIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewDatasetSchemaIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestDatasetSchemaIDSet_AddRef(t *testing.T) { - t.Parallel() - - DatasetSchemaId := MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *DatasetSchemaID expected *DatasetSchemaIDSet @@ -695,7 +659,7 @@ func TestDatasetSchemaIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &DatasetSchemaId, + input: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &DatasetSchemaIDSet{ m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestDatasetSchemaIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewDatasetSchemaIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestDatasetSchemaIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - DatasetSchemaIDSet - DatasetSchemaID - } + tests := []struct { + name string + target *DatasetSchemaIDSet + input DatasetSchemaID expected bool }{ { - name: "Empty Set", - input: struct { - DatasetSchemaIDSet - DatasetSchemaID - }{DatasetSchemaIDSet: DatasetSchemaIDSet{}, DatasetSchemaID: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &DatasetSchemaIDSet{}, + input: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - DatasetSchemaIDSet - DatasetSchemaID - }{DatasetSchemaIDSet: DatasetSchemaIDSet{ + target: &DatasetSchemaIDSet{ m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, DatasetSchemaID: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - DatasetSchemaIDSet - DatasetSchemaID - }{DatasetSchemaIDSet: DatasetSchemaIDSet{ + target: &DatasetSchemaIDSet{ m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, DatasetSchemaID: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.DatasetSchemaIDSet.Has(tc.input.DatasetSchemaID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestDatasetSchemaIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input DatasetSchemaIDSet - expected DatasetSchemaIDSet + input *DatasetSchemaIDSet + expected *DatasetSchemaIDSet }{ { - name: "Empty Set", - input: DatasetSchemaIDSet{}, - expected: DatasetSchemaIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &DatasetSchemaIDSet{}, + expected: &DatasetSchemaIDSet{}, }, { - name: "Set Contains the element", - input: DatasetSchemaIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &DatasetSchemaIDSet{ m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: DatasetSchemaIDSet{ + expected: &DatasetSchemaIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestDatasetSchemaIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *DatasetSchemaIDSet expected []DatasetSchemaID }{ { - name: "Empty slice", + name: "Empty", input: &DatasetSchemaIDSet{ m: map[DatasetSchemaID]struct{}{}, s: nil, }, expected: make([]DatasetSchemaID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &DatasetSchemaIDSet{ @@ -854,20 +808,17 @@ func TestDatasetSchemaIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestDatasetSchemaIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *DatasetSchemaIDSet expected *DatasetSchemaIDSet @@ -922,21 +873,19 @@ func TestDatasetSchemaIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestDatasetSchemaIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *DatasetSchemaIDSet @@ -944,6 +893,23 @@ func TestDatasetSchemaIDSet_Merge(t *testing.T) { } expected *DatasetSchemaIDSet }{ + { + name: "Nil Set", + input: struct { + a *DatasetSchemaIDSet + b *DatasetSchemaIDSet + }{ + a: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &DatasetSchemaIDSet{ + m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestDatasetSchemaIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/id.go b/pkg/id/id.go index 8645ad070..c821a30ed 100644 --- a/pkg/id/id.go +++ b/pkg/id/id.go @@ -53,7 +53,7 @@ func FromID(id string) (ID, error) { } func FromIDRef(id *string) *ID { - if id == nil { + if id == nil || *id == "" { return nil } parsedID, err := parseID(*id) diff --git a/pkg/id/id.tmpl b/pkg/id/id.tmpl index aad5207dd..f20661ee9 100644 --- a/pkg/id/id.tmpl +++ b/pkg/id/id.tmpl @@ -44,7 +44,7 @@ func {{$name}}IDFromRef(i *string) *{{$name}}ID { // {{$name}}IDFromRefID generates a new {{$name}}ID from a ref of a generic ID. func {{$name}}IDFromRefID(i *ID) *{{$name}}ID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := {{$name}}ID(*i) @@ -58,28 +58,40 @@ func (d {{$name}}ID) ID() ID { // String returns a string representation. func (d {{$name}}ID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d {{$name}}ID) GoString() string { - return "id.{{$name}}ID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d {{$name}}ID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d {{$name}}ID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d {{$name}}ID) GoString() string { + return "{{$name}}ID(" + d.String() + ")" } // Ref returns a reference. func (d {{$name}}ID) Ref() *{{$name}}ID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d {{$name}}ID) Contains(ids []{{$name}}ID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d {{$name}}ID) Contains(ids []{{$name}}ID) bool { // CopyRef returns a copy of a reference. func (d *{{$name}}ID) CopyRef() *{{$name}}ID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *{{$name}}ID) CopyRef() *{{$name}}ID { // IDRef returns a reference of a domain id. func (d *{{$name}}ID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *{{$name}}ID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *{{$name}}ID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *{{$name}}ID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *{{$name}}ID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *{{$name}}ID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *{{$name}}ID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *{{$name}}ID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d {{$name}}ID) IsNil() bool { return ID(d).IsNil() } -// {{$name}}IDToKeys converts IDs into a string slice. -func {{$name}}IDToKeys(ids []{{$name}}ID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *{{$name}}ID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// {{$name}}IDsToStrings converts IDs into a string slice. +func {{$name}}IDsToStrings(ids []{{$name}}ID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // {{$name}}IDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *{{$name}}IDSet) Clone() *{{$name}}IDSet { // Merge returns a merged set func (s *{{$name}}IDSet) Merge(s2 *{{$name}}IDSet) *{{$name}}IDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/id_test.go b/pkg/id/id_test.go index 2becc4dc1..0b90dada5 100644 --- a/pkg/id/id_test.go +++ b/pkg/id/id_test.go @@ -331,13 +331,11 @@ func TestID_generateAllID(t *testing.T) { func TestID_parseID(t *testing.T) { _, err := parseID("") - - assert.True(t, errors.As(ErrInvalidID, &err)) + assert.Error(t, err) id, err := parseID("01f2r7kg1fvvffp0gmexgy5hxy") - assert.Nil(t, err) - assert.EqualValues(t, strings.ToLower(id.String()), "01f2r7kg1fvvffp0gmexgy5hxy") + assert.Equal(t, strings.ToLower(id.String()), "01f2r7kg1fvvffp0gmexgy5hxy") } func TestID_includeUpperCase(t *testing.T) { diff --git a/pkg/id/id_test.tmpl b/pkg/id/id_test.tmpl index c8f57ec65..6778786a9 100644 --- a/pkg/id/id_test.tmpl +++ b/pkg/id/id_test.tmpl @@ -12,19 +12,16 @@ import ( "github.com/stretchr/testify/assert" ) - func TestNew{{$name}}ID(t *testing.T) { id := New{{$name}}ID() - assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) - assert.Nil(t, err) + assert.NotNil(t, id) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) + assert.Nil(t, err) } func Test{{$name}}IDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -39,8 +36,8 @@ func Test{{$name}}IDFrom(t *testing.T) { result {{$name}}ID err error }{ - {{$name}}ID{}, - ErrInvalidID, + result: {{$name}}ID{}, + err: ErrInvalidID, }, }, { @@ -50,8 +47,8 @@ func Test{{$name}}IDFrom(t *testing.T) { result {{$name}}ID err error }{ - {{$name}}ID{}, - ErrInvalidID, + result: {{$name}}ID{}, + err: ErrInvalidID, }, }, { @@ -61,27 +58,26 @@ func Test{{$name}}IDFrom(t *testing.T) { result {{$name}}ID err error }{ - {{$name}}ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: {{$name}}ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := {{$name}}IDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := {{$name}}IDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMust{{$name}}ID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -104,23 +100,23 @@ func TestMust{{$name}}ID(t *testing.T) { expected: {{$name}}ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := Must{{$name}}ID(tc.input) - assert.Equal(tt, tc.expected, result) + result := Must{{$name}}ID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func Test{{$name}}IDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *{{$name}}ID @@ -141,159 +137,149 @@ func Test{{$name}}IDFromRef(t *testing.T) { expected: &{{$name}}ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := {{$name}}IDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := {{$name}}IDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func Test{{$name}}IDFromRefID(t *testing.T) { id := New() - - subId := {{$name}}IDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := {{$name}}IDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, {{$name}}IDFromRefID(nil)) + assert.Nil(t, {{$name}}IDFromRefID(&ID{})) } func Test{{$name}}ID_ID(t *testing.T) { id := New() - subId := {{$name}}IDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := {{$name}}IDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func Test{{$name}}ID_String(t *testing.T) { id := New() - subId := {{$name}}IDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := {{$name}}IDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", {{$name}}ID{}.String()) } -func Test{{$name}}ID_GoString(t *testing.T) { - id := New() - subId := {{$name}}IDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.{{$name}}ID(" + id.String() + ")") +func Test{{$name}}ID_RefString(t *testing.T) { + id := New{{$name}}ID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, {{$name}}ID{}.RefString()) } -func Test{{$name}}ID_RefString(t *testing.T) { +func Test{{$name}}ID_GoString(t *testing.T) { id := New() - subId := {{$name}}IDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := {{$name}}IDFromRefID(&id) + assert.Equal(t, "{{$name}}ID(" + id.String() + ")", id2.GoString()) + assert.Equal(t, "{{$name}}ID()", {{$name}}ID{}.GoString()) } func Test{{$name}}ID_Ref(t *testing.T) { - id := New() - subId := {{$name}}IDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := New{{$name}}ID() + assert.Equal(t, {{$name}}ID(id), *id.Ref()) + assert.Nil(t, (&{{$name}}ID{}).Ref()) } func Test{{$name}}ID_Contains(t *testing.T) { id := New{{$name}}ID() id2 := New{{$name}}ID() assert.True(t, id.Contains([]{{$name}}ID{id, id2})) + assert.False(t, {{$name}}ID{}.Contains([]{{$name}}ID{id, id2, {}})) assert.False(t, id.Contains([]{{$name}}ID{id2})) } func Test{{$name}}ID_CopyRef(t *testing.T) { - id := New() - subId := {{$name}}IDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := New{{$name}}ID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*{{$name}}ID)(nil).CopyRef()) } func Test{{$name}}ID_IDRef(t *testing.T) { id := New() - subId := {{$name}}IDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := {{$name}}IDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&{{$name}}ID{}).IDRef()) + assert.Nil(t, (*{{$name}}ID)(nil).IDRef()) } func Test{{$name}}ID_StringRef(t *testing.T) { - id := New() - subId := {{$name}}IDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := New{{$name}}ID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&{{$name}}ID{}).StringRef()) + assert.Nil(t, (*{{$name}}ID)(nil).StringRef()) } func Test{{$name}}ID_MarhsalJSON(t *testing.T) { - id := New() - subId := {{$name}}IDFromRefID(&id) + id := New{{$name}}ID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&{{$name}}ID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*{{$name}}ID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func Test{{$name}}ID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &{{$name}}ID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := Must{{$name}}ID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &{{$name}}ID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func Test{{$name}}ID_MarshalText(t *testing.T) { id := New() - subId := {{$name}}IDFromRefID(&id) + res, err := {{$name}}IDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&{{$name}}ID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*{{$name}}ID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func Test{{$name}}ID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &{{$name}}ID{} - - err := subId.UnmarshalText(text) - + id2 := &{{$name}}ID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func Test{{$name}}ID_IsNil(t *testing.T) { - subId := {{$name}}ID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *{{$name}}IDFromRefID(&id) + assert.True(t, {{$name}}ID{}.IsNil()) + assert.False(t, New{{$name}}ID().IsNil()) +} - assert.False(t, subId.IsNil()) +func Test{{$name}}ID_IsNilRef(t *testing.T) { + assert.True(t, {{$name}}ID{}.Ref().IsNilRef()) + assert.True(t, (*{{$name}}ID)(nil).IsNilRef()) + assert.False(t, New{{$name}}ID().Ref().IsNilRef()) } -func Test{{$name}}IDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func Test{{$name}}IDsToStrings(t *testing.T) { + tests := []struct { name string input []{{$name}}ID expected []string @@ -323,19 +309,17 @@ func Test{{$name}}IDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, {{$name}}IDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, {{$name}}IDsToStrings(tt.input)) }) } - } func Test{{$name}}IDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -344,7 +328,7 @@ func Test{{$name}}IDsFrom(t *testing.T) { } }{ { - name: "Empty slice", + name: "Empty slice", input: make([]string, 0), expected: struct { res []{{$name}}ID @@ -355,7 +339,7 @@ func Test{{$name}}IDsFrom(t *testing.T) { }, }, { - name: "1 element", + name: "1 element", input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, expected: struct { res []{{$name}}ID @@ -385,10 +369,10 @@ func Test{{$name}}IDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -401,27 +385,25 @@ func Test{{$name}}IDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := {{$name}}IDsFrom(tc.input) if tc.expected.err != nil { - _, err := {{$name}}IDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := {{$name}}IDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func Test{{$name}}IDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []{{$name}}ID @@ -451,25 +433,22 @@ func Test{{$name}}IDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := {{$name}}IDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func Test{{$name}}IDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []{{$name}}ID @@ -495,21 +474,18 @@ func Test{{$name}}IDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := {{$name}}IDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func Test{{$name}}IDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []{{$name}}ID expected []ID @@ -539,28 +515,25 @@ func Test{{$name}}IDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := {{$name}}IDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func Test{{$name}}IDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := Must{{$name}}ID(id1.String()) + id21 := Must{{$name}}ID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := Must{{$name}}ID(id2.String()) + id22 := Must{{$name}}ID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := Must{{$name}}ID(id3.String()) + id23 := Must{{$name}}ID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*{{$name}}ID expected []*ID @@ -572,54 +545,50 @@ func Test{{$name}}IDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*{{$name}}ID{&subId1}, + input: []*{{$name}}ID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*{{$name}}ID{&subId1, &subId2, &subId3}, + input: []*{{$name}}ID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := {{$name}}IDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNew{{$name}}IDSet(t *testing.T) { {{$name}}IdSet := New{{$name}}IDSet() - assert.NotNil(t, {{$name}}IdSet) assert.Empty(t, {{$name}}IdSet.m) assert.Empty(t, {{$name}}IdSet.s) } func Test{{$name}}IDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []{{$name}}ID expected *{{$name}}IDSet }{ { - name: "Empty slice", - input: make([]{{$name}}ID, 0), + name: "Empty slice", + input: make([]{{$name}}ID, 0), expected: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{}, s: nil, }, }, { - name: "1 element", - input: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "1 element", + input: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, expected: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -646,7 +615,7 @@ func Test{{$name}}IDSet_Add(t *testing.T) { }, }, { - name: "multiple elements with duplication", + name: "multiple elements with duplication", input: []{{$name}}ID{ Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), @@ -665,31 +634,26 @@ func Test{{$name}}IDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := New{{$name}}IDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func Test{{$name}}IDSet_AddRef(t *testing.T) { - t.Parallel() - - {{$name}}Id := Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *{{$name}}ID expected *{{$name}}IDSet }{ { - name: "Empty slice", - input: nil, + name: "Empty slice", + input: nil, expected: &{{$name}}IDSet{ m: nil, s: nil, @@ -697,7 +661,7 @@ func Test{{$name}}IDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &{{$name}}Id, + input: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -705,127 +669,117 @@ func Test{{$name}}IDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := New{{$name}}IDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func Test{{$name}}IDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input struct { - {{$name}}IDSet - {{$name}}ID - } + target *{{$name}}IDSet + input {{$name}}ID expected bool }{ { - name: "Empty Set", - input: struct { - {{$name}}IDSet - {{$name}}ID - }{ {{$name}}IDSet: {{$name}}IDSet{}, {{$name}}ID: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &{{$name}}IDSet{}, + input: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { - name: "Set Contains the element", - input: struct { - {{$name}}IDSet - {{$name}}ID - }{ {{$name}}IDSet: {{$name}}IDSet{ + name: "Set Contains the element", + target: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, {{$name}}ID: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - {{$name}}IDSet - {{$name}}ID - }{ {{$name}}IDSet: {{$name}}IDSet{ + target: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, {{$name}}ID: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.{{$name}}IDSet.Has(tc.input.{{$name}}ID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func Test{{$name}}IDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input {{$name}}IDSet - expected {{$name}}IDSet + input *{{$name}}IDSet + expected *{{$name}}IDSet }{ { - name: "Empty Set", - input: {{$name}}IDSet{}, - expected: {{$name}}IDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &{{$name}}IDSet{}, + expected: &{{$name}}IDSet{}, }, { - name: "Set Contains the element", - input: {{$name}}IDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: {{$name}}IDSet{ + expected: &{{$name}}IDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func Test{{$name}}IDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *{{$name}}IDSet expected []{{$name}}ID }{ { - name: "Empty slice", + name: "Empty", input: &{{$name}}IDSet{ m: map[{{$name}}ID]struct{}{}, s: nil, }, expected: make([]{{$name}}ID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &{{$name}}IDSet{ @@ -856,20 +810,17 @@ func Test{{$name}}IDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func Test{{$name}}IDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *{{$name}}IDSet expected *{{$name}}IDSet @@ -924,21 +875,19 @@ func Test{{$name}}IDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func Test{{$name}}IDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *{{$name}}IDSet @@ -946,6 +895,23 @@ func Test{{$name}}IDSet_Merge(t *testing.T) { } expected *{{$name}}IDSet }{ + { + name: "Nil Set", + input: struct { + a *{{$name}}IDSet + b *{{$name}}IDSet + }{ + a: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &{{$name}}IDSet{ + m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1002,12 +968,11 @@ func Test{{$name}}IDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/infobox_field_gen.go b/pkg/id/infobox_field_gen.go index 40758876a..fed9630c2 100644 --- a/pkg/id/infobox_field_gen.go +++ b/pkg/id/infobox_field_gen.go @@ -44,7 +44,7 @@ func InfoboxFieldIDFromRef(i *string) *InfoboxFieldID { // InfoboxFieldIDFromRefID generates a new InfoboxFieldID from a ref of a generic ID. func InfoboxFieldIDFromRefID(i *ID) *InfoboxFieldID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := InfoboxFieldID(*i) @@ -58,28 +58,40 @@ func (d InfoboxFieldID) ID() ID { // String returns a string representation. func (d InfoboxFieldID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d InfoboxFieldID) GoString() string { - return "id.InfoboxFieldID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d InfoboxFieldID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d InfoboxFieldID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d InfoboxFieldID) GoString() string { + return "InfoboxFieldID(" + d.String() + ")" } // Ref returns a reference. func (d InfoboxFieldID) Ref() *InfoboxFieldID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d InfoboxFieldID) Contains(ids []InfoboxFieldID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d InfoboxFieldID) Contains(ids []InfoboxFieldID) bool { // CopyRef returns a copy of a reference. func (d *InfoboxFieldID) CopyRef() *InfoboxFieldID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *InfoboxFieldID) CopyRef() *InfoboxFieldID { // IDRef returns a reference of a domain id. func (d *InfoboxFieldID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *InfoboxFieldID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *InfoboxFieldID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *InfoboxFieldID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *InfoboxFieldID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *InfoboxFieldID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *InfoboxFieldID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *InfoboxFieldID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d InfoboxFieldID) IsNil() bool { return ID(d).IsNil() } -// InfoboxFieldIDToKeys converts IDs into a string slice. -func InfoboxFieldIDToKeys(ids []InfoboxFieldID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *InfoboxFieldID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// InfoboxFieldIDsToStrings converts IDs into a string slice. +func InfoboxFieldIDsToStrings(ids []InfoboxFieldID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // InfoboxFieldIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *InfoboxFieldIDSet) Clone() *InfoboxFieldIDSet { // Merge returns a merged set func (s *InfoboxFieldIDSet) Merge(s2 *InfoboxFieldIDSet) *InfoboxFieldIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/infobox_field_gen_test.go b/pkg/id/infobox_field_gen_test.go index 8c1fd0748..3f2050c9b 100644 --- a/pkg/id/infobox_field_gen_test.go +++ b/pkg/id/infobox_field_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewInfoboxFieldID(t *testing.T) { id := NewInfoboxFieldID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestInfoboxFieldIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestInfoboxFieldIDFrom(t *testing.T) { result InfoboxFieldID err error }{ - InfoboxFieldID{}, - ErrInvalidID, + result: InfoboxFieldID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestInfoboxFieldIDFrom(t *testing.T) { result InfoboxFieldID err error }{ - InfoboxFieldID{}, - ErrInvalidID, + result: InfoboxFieldID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestInfoboxFieldIDFrom(t *testing.T) { result InfoboxFieldID err error }{ - InfoboxFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: InfoboxFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := InfoboxFieldIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := InfoboxFieldIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustInfoboxFieldID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustInfoboxFieldID(t *testing.T) { expected: InfoboxFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustInfoboxFieldID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustInfoboxFieldID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestInfoboxFieldIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *InfoboxFieldID @@ -139,159 +135,149 @@ func TestInfoboxFieldIDFromRef(t *testing.T) { expected: &InfoboxFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := InfoboxFieldIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := InfoboxFieldIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestInfoboxFieldIDFromRefID(t *testing.T) { id := New() - - subId := InfoboxFieldIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := InfoboxFieldIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, InfoboxFieldIDFromRefID(nil)) + assert.Nil(t, InfoboxFieldIDFromRefID(&ID{})) } func TestInfoboxFieldID_ID(t *testing.T) { id := New() - subId := InfoboxFieldIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := InfoboxFieldIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestInfoboxFieldID_String(t *testing.T) { id := New() - subId := InfoboxFieldIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := InfoboxFieldIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", InfoboxFieldID{}.String()) } -func TestInfoboxFieldID_GoString(t *testing.T) { - id := New() - subId := InfoboxFieldIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.InfoboxFieldID("+id.String()+")") +func TestInfoboxFieldID_RefString(t *testing.T) { + id := NewInfoboxFieldID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, InfoboxFieldID{}.RefString()) } -func TestInfoboxFieldID_RefString(t *testing.T) { +func TestInfoboxFieldID_GoString(t *testing.T) { id := New() - subId := InfoboxFieldIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := InfoboxFieldIDFromRefID(&id) + assert.Equal(t, "InfoboxFieldID("+id.String()+")", id2.GoString()) + assert.Equal(t, "InfoboxFieldID()", InfoboxFieldID{}.GoString()) } func TestInfoboxFieldID_Ref(t *testing.T) { - id := New() - subId := InfoboxFieldIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewInfoboxFieldID() + assert.Equal(t, InfoboxFieldID(id), *id.Ref()) + assert.Nil(t, (&InfoboxFieldID{}).Ref()) } func TestInfoboxFieldID_Contains(t *testing.T) { id := NewInfoboxFieldID() id2 := NewInfoboxFieldID() assert.True(t, id.Contains([]InfoboxFieldID{id, id2})) + assert.False(t, InfoboxFieldID{}.Contains([]InfoboxFieldID{id, id2, {}})) assert.False(t, id.Contains([]InfoboxFieldID{id2})) } func TestInfoboxFieldID_CopyRef(t *testing.T) { - id := New() - subId := InfoboxFieldIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewInfoboxFieldID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*InfoboxFieldID)(nil).CopyRef()) } func TestInfoboxFieldID_IDRef(t *testing.T) { id := New() - subId := InfoboxFieldIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := InfoboxFieldIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&InfoboxFieldID{}).IDRef()) + assert.Nil(t, (*InfoboxFieldID)(nil).IDRef()) } func TestInfoboxFieldID_StringRef(t *testing.T) { - id := New() - subId := InfoboxFieldIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewInfoboxFieldID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&InfoboxFieldID{}).StringRef()) + assert.Nil(t, (*InfoboxFieldID)(nil).StringRef()) } func TestInfoboxFieldID_MarhsalJSON(t *testing.T) { - id := New() - subId := InfoboxFieldIDFromRefID(&id) + id := NewInfoboxFieldID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&InfoboxFieldID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*InfoboxFieldID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestInfoboxFieldID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &InfoboxFieldID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustInfoboxFieldID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &InfoboxFieldID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestInfoboxFieldID_MarshalText(t *testing.T) { id := New() - subId := InfoboxFieldIDFromRefID(&id) + res, err := InfoboxFieldIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&InfoboxFieldID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*InfoboxFieldID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestInfoboxFieldID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &InfoboxFieldID{} - - err := subId.UnmarshalText(text) - + id2 := &InfoboxFieldID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestInfoboxFieldID_IsNil(t *testing.T) { - subId := InfoboxFieldID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *InfoboxFieldIDFromRefID(&id) + assert.True(t, InfoboxFieldID{}.IsNil()) + assert.False(t, NewInfoboxFieldID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestInfoboxFieldID_IsNilRef(t *testing.T) { + assert.True(t, InfoboxFieldID{}.Ref().IsNilRef()) + assert.True(t, (*InfoboxFieldID)(nil).IsNilRef()) + assert.False(t, NewInfoboxFieldID().Ref().IsNilRef()) } -func TestInfoboxFieldIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestInfoboxFieldIDsToStrings(t *testing.T) { + tests := []struct { name string input []InfoboxFieldID expected []string @@ -321,19 +307,17 @@ func TestInfoboxFieldIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, InfoboxFieldIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, InfoboxFieldIDsToStrings(tt.input)) }) } - } func TestInfoboxFieldIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestInfoboxFieldIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestInfoboxFieldIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := InfoboxFieldIDsFrom(tc.input) if tc.expected.err != nil { - _, err := InfoboxFieldIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := InfoboxFieldIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestInfoboxFieldIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []InfoboxFieldID @@ -449,25 +431,22 @@ func TestInfoboxFieldIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := InfoboxFieldIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestInfoboxFieldIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []InfoboxFieldID @@ -493,21 +472,18 @@ func TestInfoboxFieldIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := InfoboxFieldIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestInfoboxFieldIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []InfoboxFieldID expected []ID @@ -537,28 +513,25 @@ func TestInfoboxFieldIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := InfoboxFieldIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestInfoboxFieldIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustInfoboxFieldID(id1.String()) + id21 := MustInfoboxFieldID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustInfoboxFieldID(id2.String()) + id22 := MustInfoboxFieldID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustInfoboxFieldID(id3.String()) + id23 := MustInfoboxFieldID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*InfoboxFieldID expected []*ID @@ -570,39 +543,35 @@ func TestInfoboxFieldIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*InfoboxFieldID{&subId1}, + input: []*InfoboxFieldID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*InfoboxFieldID{&subId1, &subId2, &subId3}, + input: []*InfoboxFieldID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := InfoboxFieldIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewInfoboxFieldIDSet(t *testing.T) { InfoboxFieldIdSet := NewInfoboxFieldIDSet() - assert.NotNil(t, InfoboxFieldIdSet) assert.Empty(t, InfoboxFieldIdSet.m) assert.Empty(t, InfoboxFieldIdSet.s) } func TestInfoboxFieldIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []InfoboxFieldID expected *InfoboxFieldIDSet @@ -663,24 +632,19 @@ func TestInfoboxFieldIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewInfoboxFieldIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestInfoboxFieldIDSet_AddRef(t *testing.T) { - t.Parallel() - - InfoboxFieldId := MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *InfoboxFieldID expected *InfoboxFieldIDSet @@ -695,7 +659,7 @@ func TestInfoboxFieldIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &InfoboxFieldId, + input: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &InfoboxFieldIDSet{ m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestInfoboxFieldIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewInfoboxFieldIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestInfoboxFieldIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - InfoboxFieldIDSet - InfoboxFieldID - } + tests := []struct { + name string + target *InfoboxFieldIDSet + input InfoboxFieldID expected bool }{ { - name: "Empty Set", - input: struct { - InfoboxFieldIDSet - InfoboxFieldID - }{InfoboxFieldIDSet: InfoboxFieldIDSet{}, InfoboxFieldID: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &InfoboxFieldIDSet{}, + input: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - InfoboxFieldIDSet - InfoboxFieldID - }{InfoboxFieldIDSet: InfoboxFieldIDSet{ + target: &InfoboxFieldIDSet{ m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, InfoboxFieldID: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - InfoboxFieldIDSet - InfoboxFieldID - }{InfoboxFieldIDSet: InfoboxFieldIDSet{ + target: &InfoboxFieldIDSet{ m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, InfoboxFieldID: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.InfoboxFieldIDSet.Has(tc.input.InfoboxFieldID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestInfoboxFieldIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input InfoboxFieldIDSet - expected InfoboxFieldIDSet + input *InfoboxFieldIDSet + expected *InfoboxFieldIDSet }{ { - name: "Empty Set", - input: InfoboxFieldIDSet{}, - expected: InfoboxFieldIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &InfoboxFieldIDSet{}, + expected: &InfoboxFieldIDSet{}, }, { - name: "Set Contains the element", - input: InfoboxFieldIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &InfoboxFieldIDSet{ m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: InfoboxFieldIDSet{ + expected: &InfoboxFieldIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestInfoboxFieldIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *InfoboxFieldIDSet expected []InfoboxFieldID }{ { - name: "Empty slice", + name: "Empty", input: &InfoboxFieldIDSet{ m: map[InfoboxFieldID]struct{}{}, s: nil, }, expected: make([]InfoboxFieldID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &InfoboxFieldIDSet{ @@ -854,20 +808,17 @@ func TestInfoboxFieldIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestInfoboxFieldIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *InfoboxFieldIDSet expected *InfoboxFieldIDSet @@ -922,21 +873,19 @@ func TestInfoboxFieldIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestInfoboxFieldIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *InfoboxFieldIDSet @@ -944,6 +893,23 @@ func TestInfoboxFieldIDSet_Merge(t *testing.T) { } expected *InfoboxFieldIDSet }{ + { + name: "Nil Set", + input: struct { + a *InfoboxFieldIDSet + b *InfoboxFieldIDSet + }{ + a: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &InfoboxFieldIDSet{ + m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestInfoboxFieldIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/layer_gen.go b/pkg/id/layer_gen.go index d49abdcff..1c22b32de 100644 --- a/pkg/id/layer_gen.go +++ b/pkg/id/layer_gen.go @@ -44,7 +44,7 @@ func LayerIDFromRef(i *string) *LayerID { // LayerIDFromRefID generates a new LayerID from a ref of a generic ID. func LayerIDFromRefID(i *ID) *LayerID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := LayerID(*i) @@ -58,28 +58,40 @@ func (d LayerID) ID() ID { // String returns a string representation. func (d LayerID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d LayerID) GoString() string { - return "id.LayerID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d LayerID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d LayerID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d LayerID) GoString() string { + return "LayerID(" + d.String() + ")" } // Ref returns a reference. func (d LayerID) Ref() *LayerID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d LayerID) Contains(ids []LayerID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d LayerID) Contains(ids []LayerID) bool { // CopyRef returns a copy of a reference. func (d *LayerID) CopyRef() *LayerID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *LayerID) CopyRef() *LayerID { // IDRef returns a reference of a domain id. func (d *LayerID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *LayerID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *LayerID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *LayerID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *LayerID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *LayerID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *LayerID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *LayerID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d LayerID) IsNil() bool { return ID(d).IsNil() } -// LayerIDToKeys converts IDs into a string slice. -func LayerIDToKeys(ids []LayerID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *LayerID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// LayerIDsToStrings converts IDs into a string slice. +func LayerIDsToStrings(ids []LayerID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // LayerIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *LayerIDSet) Clone() *LayerIDSet { // Merge returns a merged set func (s *LayerIDSet) Merge(s2 *LayerIDSet) *LayerIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/layer_gen_test.go b/pkg/id/layer_gen_test.go index bfc9218e3..c68fd3fb3 100644 --- a/pkg/id/layer_gen_test.go +++ b/pkg/id/layer_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewLayerID(t *testing.T) { id := NewLayerID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestLayerIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestLayerIDFrom(t *testing.T) { result LayerID err error }{ - LayerID{}, - ErrInvalidID, + result: LayerID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestLayerIDFrom(t *testing.T) { result LayerID err error }{ - LayerID{}, - ErrInvalidID, + result: LayerID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestLayerIDFrom(t *testing.T) { result LayerID err error }{ - LayerID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: LayerID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := LayerIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := LayerIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustLayerID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustLayerID(t *testing.T) { expected: LayerID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustLayerID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustLayerID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestLayerIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *LayerID @@ -139,159 +135,149 @@ func TestLayerIDFromRef(t *testing.T) { expected: &LayerID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := LayerIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := LayerIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestLayerIDFromRefID(t *testing.T) { id := New() - - subId := LayerIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := LayerIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, LayerIDFromRefID(nil)) + assert.Nil(t, LayerIDFromRefID(&ID{})) } func TestLayerID_ID(t *testing.T) { id := New() - subId := LayerIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := LayerIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestLayerID_String(t *testing.T) { id := New() - subId := LayerIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := LayerIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", LayerID{}.String()) } -func TestLayerID_GoString(t *testing.T) { - id := New() - subId := LayerIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.LayerID("+id.String()+")") +func TestLayerID_RefString(t *testing.T) { + id := NewLayerID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, LayerID{}.RefString()) } -func TestLayerID_RefString(t *testing.T) { +func TestLayerID_GoString(t *testing.T) { id := New() - subId := LayerIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := LayerIDFromRefID(&id) + assert.Equal(t, "LayerID("+id.String()+")", id2.GoString()) + assert.Equal(t, "LayerID()", LayerID{}.GoString()) } func TestLayerID_Ref(t *testing.T) { - id := New() - subId := LayerIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewLayerID() + assert.Equal(t, LayerID(id), *id.Ref()) + assert.Nil(t, (&LayerID{}).Ref()) } func TestLayerID_Contains(t *testing.T) { id := NewLayerID() id2 := NewLayerID() assert.True(t, id.Contains([]LayerID{id, id2})) + assert.False(t, LayerID{}.Contains([]LayerID{id, id2, {}})) assert.False(t, id.Contains([]LayerID{id2})) } func TestLayerID_CopyRef(t *testing.T) { - id := New() - subId := LayerIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewLayerID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*LayerID)(nil).CopyRef()) } func TestLayerID_IDRef(t *testing.T) { id := New() - subId := LayerIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := LayerIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&LayerID{}).IDRef()) + assert.Nil(t, (*LayerID)(nil).IDRef()) } func TestLayerID_StringRef(t *testing.T) { - id := New() - subId := LayerIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewLayerID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&LayerID{}).StringRef()) + assert.Nil(t, (*LayerID)(nil).StringRef()) } func TestLayerID_MarhsalJSON(t *testing.T) { - id := New() - subId := LayerIDFromRefID(&id) + id := NewLayerID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&LayerID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*LayerID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestLayerID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &LayerID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustLayerID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &LayerID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestLayerID_MarshalText(t *testing.T) { id := New() - subId := LayerIDFromRefID(&id) + res, err := LayerIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&LayerID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*LayerID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestLayerID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &LayerID{} - - err := subId.UnmarshalText(text) - + id2 := &LayerID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestLayerID_IsNil(t *testing.T) { - subId := LayerID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *LayerIDFromRefID(&id) + assert.True(t, LayerID{}.IsNil()) + assert.False(t, NewLayerID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestLayerID_IsNilRef(t *testing.T) { + assert.True(t, LayerID{}.Ref().IsNilRef()) + assert.True(t, (*LayerID)(nil).IsNilRef()) + assert.False(t, NewLayerID().Ref().IsNilRef()) } -func TestLayerIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestLayerIDsToStrings(t *testing.T) { + tests := []struct { name string input []LayerID expected []string @@ -321,19 +307,17 @@ func TestLayerIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, LayerIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, LayerIDsToStrings(tt.input)) }) } - } func TestLayerIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestLayerIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestLayerIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := LayerIDsFrom(tc.input) if tc.expected.err != nil { - _, err := LayerIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := LayerIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestLayerIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []LayerID @@ -449,25 +431,22 @@ func TestLayerIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := LayerIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestLayerIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []LayerID @@ -493,21 +472,18 @@ func TestLayerIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := LayerIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestLayerIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []LayerID expected []ID @@ -537,28 +513,25 @@ func TestLayerIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := LayerIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestLayerIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustLayerID(id1.String()) + id21 := MustLayerID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustLayerID(id2.String()) + id22 := MustLayerID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustLayerID(id3.String()) + id23 := MustLayerID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*LayerID expected []*ID @@ -570,39 +543,35 @@ func TestLayerIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*LayerID{&subId1}, + input: []*LayerID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*LayerID{&subId1, &subId2, &subId3}, + input: []*LayerID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := LayerIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewLayerIDSet(t *testing.T) { LayerIdSet := NewLayerIDSet() - assert.NotNil(t, LayerIdSet) assert.Empty(t, LayerIdSet.m) assert.Empty(t, LayerIdSet.s) } func TestLayerIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []LayerID expected *LayerIDSet @@ -663,24 +632,19 @@ func TestLayerIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewLayerIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestLayerIDSet_AddRef(t *testing.T) { - t.Parallel() - - LayerId := MustLayerID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *LayerID expected *LayerIDSet @@ -695,7 +659,7 @@ func TestLayerIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &LayerId, + input: MustLayerID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &LayerIDSet{ m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestLayerIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewLayerIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestLayerIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - LayerIDSet - LayerID - } + tests := []struct { + name string + target *LayerIDSet + input LayerID expected bool }{ { - name: "Empty Set", - input: struct { - LayerIDSet - LayerID - }{LayerIDSet: LayerIDSet{}, LayerID: MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &LayerIDSet{}, + input: MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - LayerIDSet - LayerID - }{LayerIDSet: LayerIDSet{ + target: &LayerIDSet{ m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, LayerID: MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - LayerIDSet - LayerID - }{LayerIDSet: LayerIDSet{ + target: &LayerIDSet{ m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, LayerID: MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.LayerIDSet.Has(tc.input.LayerID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestLayerIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input LayerIDSet - expected LayerIDSet + input *LayerIDSet + expected *LayerIDSet }{ { - name: "Empty Set", - input: LayerIDSet{}, - expected: LayerIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &LayerIDSet{}, + expected: &LayerIDSet{}, }, { - name: "Set Contains the element", - input: LayerIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &LayerIDSet{ m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: LayerIDSet{ + expected: &LayerIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestLayerIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *LayerIDSet expected []LayerID }{ { - name: "Empty slice", + name: "Empty", input: &LayerIDSet{ m: map[LayerID]struct{}{}, s: nil, }, expected: make([]LayerID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &LayerIDSet{ @@ -854,20 +808,17 @@ func TestLayerIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestLayerIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *LayerIDSet expected *LayerIDSet @@ -922,21 +873,19 @@ func TestLayerIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestLayerIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *LayerIDSet @@ -944,6 +893,23 @@ func TestLayerIDSet_Merge(t *testing.T) { } expected *LayerIDSet }{ + { + name: "Nil Set", + input: struct { + a *LayerIDSet + b *LayerIDSet + }{ + a: &LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &LayerIDSet{ + m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestLayerIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/plugin.go b/pkg/id/plugin.go index d13ed808c..4d951529e 100644 --- a/pkg/id/plugin.go +++ b/pkg/id/plugin.go @@ -105,6 +105,21 @@ func PluginIDFromRef(id *string) *PluginID { return &did } +// Clone duplicates the PluginID +func (d PluginID) Clone() PluginID { + return PluginID{ + name: d.name, + version: d.version, + sys: d.sys, + scene: d.scene.CopyRef(), + } +} + +// IsNil checks if ID is empty or not. +func (d PluginID) IsNil() bool { + return d.name == "" && d.version == "" && d.scene == nil && !d.sys +} + // Name returns a name. func (d PluginID) Name() string { return d.name @@ -158,7 +173,6 @@ func (d PluginID) Ref() *PluginID { return &d2 } -// CopyRef _ func (d *PluginID) CopyRef() *PluginID { if d == nil { return nil @@ -201,7 +215,7 @@ func (d *PluginID) UnmarshalText(text []byte) (err error) { } // PluginIDToKeys converts IDs into a string slice. -func PluginIDToKeys(ids []PluginID) []string { +func PluginIDsToStrings(ids []PluginID) []string { keys := make([]string, 0, len(ids)) for _, id := range ids { keys = append(keys, id.String()) diff --git a/pkg/id/plugin_extension.go b/pkg/id/plugin_extension.go index 4be0d7c25..c401a6c50 100644 --- a/pkg/id/plugin_extension.go +++ b/pkg/id/plugin_extension.go @@ -1,9 +1,7 @@ package id -// PluginExtensionID _ type PluginExtensionID string -// PluginExtensionIDFromRef _ func PluginExtensionIDFromRef(id *string) *PluginExtensionID { if id == nil { return nil @@ -12,12 +10,10 @@ func PluginExtensionIDFromRef(id *string) *PluginExtensionID { return &id2 } -// Ref _ func (id PluginExtensionID) Ref() *PluginExtensionID { return &id } -// CopyRef _ func (id *PluginExtensionID) CopyRef() *PluginExtensionID { if id == nil { return nil @@ -26,12 +22,10 @@ func (id *PluginExtensionID) CopyRef() *PluginExtensionID { return &id2 } -// String _ func (id PluginExtensionID) String() string { return string(id) } -// StringRef _ func (id *PluginExtensionID) StringRef() *string { if id == nil { return nil diff --git a/pkg/id/plugin_test.go b/pkg/id/plugin_test.go index 49a8b937c..5df599d80 100644 --- a/pkg/id/plugin_test.go +++ b/pkg/id/plugin_test.go @@ -330,6 +330,19 @@ func TestPluginIDFromRef(t *testing.T) { } } +func TestPluginID_Clone(t *testing.T) { + p := PluginID{ + name: "aaa", + version: "1.0.0", + sys: false, + scene: NewSceneID().Ref(), + } + c := p.Clone() + + assert.Equal(t, p, c) + assert.NotSame(t, p, c) +} + func TestPluginID_Name(t *testing.T) { plugin := MustPluginID("MyPlugin~1.0.0") @@ -564,7 +577,7 @@ func TestPluginID_UnmarshalText(t *testing.T) { } -func TestPluginIDToKeys(t *testing.T) { +func TestPluginIDsToStrings(t *testing.T) { t.Parallel() testCases := []struct { name string @@ -600,7 +613,7 @@ func TestPluginIDToKeys(t *testing.T) { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - assert.Equal(tt, tc.expected, PluginIDToKeys(tc.input)) + assert.Equal(tt, tc.expected, PluginIDsToStrings(tc.input)) }) } @@ -690,3 +703,30 @@ func TestPluginIDsFrom(t *testing.T) { }) } } + +func TestPluginID_IsNil(t *testing.T) { + tests := []struct { + name string + target PluginID + want bool + }{ + { + name: "present", + target: PluginID{name: "a"}, + want: false, + }, + { + name: "empty", + target: PluginID{}, + want: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.IsNil()) + }) + } +} diff --git a/pkg/id/project_gen.go b/pkg/id/project_gen.go index 883d8c1a8..aa29ab430 100644 --- a/pkg/id/project_gen.go +++ b/pkg/id/project_gen.go @@ -44,7 +44,7 @@ func ProjectIDFromRef(i *string) *ProjectID { // ProjectIDFromRefID generates a new ProjectID from a ref of a generic ID. func ProjectIDFromRefID(i *ID) *ProjectID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := ProjectID(*i) @@ -58,28 +58,40 @@ func (d ProjectID) ID() ID { // String returns a string representation. func (d ProjectID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d ProjectID) GoString() string { - return "id.ProjectID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d ProjectID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d ProjectID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d ProjectID) GoString() string { + return "ProjectID(" + d.String() + ")" } // Ref returns a reference. func (d ProjectID) Ref() *ProjectID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d ProjectID) Contains(ids []ProjectID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d ProjectID) Contains(ids []ProjectID) bool { // CopyRef returns a copy of a reference. func (d *ProjectID) CopyRef() *ProjectID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *ProjectID) CopyRef() *ProjectID { // IDRef returns a reference of a domain id. func (d *ProjectID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *ProjectID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *ProjectID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *ProjectID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *ProjectID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *ProjectID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *ProjectID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *ProjectID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d ProjectID) IsNil() bool { return ID(d).IsNil() } -// ProjectIDToKeys converts IDs into a string slice. -func ProjectIDToKeys(ids []ProjectID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *ProjectID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// ProjectIDsToStrings converts IDs into a string slice. +func ProjectIDsToStrings(ids []ProjectID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // ProjectIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *ProjectIDSet) Clone() *ProjectIDSet { // Merge returns a merged set func (s *ProjectIDSet) Merge(s2 *ProjectIDSet) *ProjectIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/project_gen_test.go b/pkg/id/project_gen_test.go index fa8eed3f2..22869ed17 100644 --- a/pkg/id/project_gen_test.go +++ b/pkg/id/project_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewProjectID(t *testing.T) { id := NewProjectID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestProjectIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestProjectIDFrom(t *testing.T) { result ProjectID err error }{ - ProjectID{}, - ErrInvalidID, + result: ProjectID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestProjectIDFrom(t *testing.T) { result ProjectID err error }{ - ProjectID{}, - ErrInvalidID, + result: ProjectID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestProjectIDFrom(t *testing.T) { result ProjectID err error }{ - ProjectID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: ProjectID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := ProjectIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := ProjectIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustProjectID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustProjectID(t *testing.T) { expected: ProjectID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustProjectID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustProjectID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestProjectIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *ProjectID @@ -139,159 +135,149 @@ func TestProjectIDFromRef(t *testing.T) { expected: &ProjectID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := ProjectIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := ProjectIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestProjectIDFromRefID(t *testing.T) { id := New() - - subId := ProjectIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := ProjectIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, ProjectIDFromRefID(nil)) + assert.Nil(t, ProjectIDFromRefID(&ID{})) } func TestProjectID_ID(t *testing.T) { id := New() - subId := ProjectIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := ProjectIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestProjectID_String(t *testing.T) { id := New() - subId := ProjectIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := ProjectIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", ProjectID{}.String()) } -func TestProjectID_GoString(t *testing.T) { - id := New() - subId := ProjectIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.ProjectID("+id.String()+")") +func TestProjectID_RefString(t *testing.T) { + id := NewProjectID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, ProjectID{}.RefString()) } -func TestProjectID_RefString(t *testing.T) { +func TestProjectID_GoString(t *testing.T) { id := New() - subId := ProjectIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := ProjectIDFromRefID(&id) + assert.Equal(t, "ProjectID("+id.String()+")", id2.GoString()) + assert.Equal(t, "ProjectID()", ProjectID{}.GoString()) } func TestProjectID_Ref(t *testing.T) { - id := New() - subId := ProjectIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewProjectID() + assert.Equal(t, ProjectID(id), *id.Ref()) + assert.Nil(t, (&ProjectID{}).Ref()) } func TestProjectID_Contains(t *testing.T) { id := NewProjectID() id2 := NewProjectID() assert.True(t, id.Contains([]ProjectID{id, id2})) + assert.False(t, ProjectID{}.Contains([]ProjectID{id, id2, {}})) assert.False(t, id.Contains([]ProjectID{id2})) } func TestProjectID_CopyRef(t *testing.T) { - id := New() - subId := ProjectIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewProjectID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*ProjectID)(nil).CopyRef()) } func TestProjectID_IDRef(t *testing.T) { id := New() - subId := ProjectIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := ProjectIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&ProjectID{}).IDRef()) + assert.Nil(t, (*ProjectID)(nil).IDRef()) } func TestProjectID_StringRef(t *testing.T) { - id := New() - subId := ProjectIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewProjectID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&ProjectID{}).StringRef()) + assert.Nil(t, (*ProjectID)(nil).StringRef()) } func TestProjectID_MarhsalJSON(t *testing.T) { - id := New() - subId := ProjectIDFromRefID(&id) + id := NewProjectID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&ProjectID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*ProjectID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestProjectID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &ProjectID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustProjectID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &ProjectID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestProjectID_MarshalText(t *testing.T) { id := New() - subId := ProjectIDFromRefID(&id) + res, err := ProjectIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&ProjectID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*ProjectID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestProjectID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &ProjectID{} - - err := subId.UnmarshalText(text) - + id2 := &ProjectID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestProjectID_IsNil(t *testing.T) { - subId := ProjectID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *ProjectIDFromRefID(&id) + assert.True(t, ProjectID{}.IsNil()) + assert.False(t, NewProjectID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestProjectID_IsNilRef(t *testing.T) { + assert.True(t, ProjectID{}.Ref().IsNilRef()) + assert.True(t, (*ProjectID)(nil).IsNilRef()) + assert.False(t, NewProjectID().Ref().IsNilRef()) } -func TestProjectIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestProjectIDsToStrings(t *testing.T) { + tests := []struct { name string input []ProjectID expected []string @@ -321,19 +307,17 @@ func TestProjectIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, ProjectIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, ProjectIDsToStrings(tt.input)) }) } - } func TestProjectIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestProjectIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestProjectIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := ProjectIDsFrom(tc.input) if tc.expected.err != nil { - _, err := ProjectIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := ProjectIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestProjectIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []ProjectID @@ -449,25 +431,22 @@ func TestProjectIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := ProjectIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestProjectIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []ProjectID @@ -493,21 +472,18 @@ func TestProjectIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := ProjectIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestProjectIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []ProjectID expected []ID @@ -537,28 +513,25 @@ func TestProjectIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := ProjectIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestProjectIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustProjectID(id1.String()) + id21 := MustProjectID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustProjectID(id2.String()) + id22 := MustProjectID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustProjectID(id3.String()) + id23 := MustProjectID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*ProjectID expected []*ID @@ -570,39 +543,35 @@ func TestProjectIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*ProjectID{&subId1}, + input: []*ProjectID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*ProjectID{&subId1, &subId2, &subId3}, + input: []*ProjectID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := ProjectIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewProjectIDSet(t *testing.T) { ProjectIdSet := NewProjectIDSet() - assert.NotNil(t, ProjectIdSet) assert.Empty(t, ProjectIdSet.m) assert.Empty(t, ProjectIdSet.s) } func TestProjectIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []ProjectID expected *ProjectIDSet @@ -663,24 +632,19 @@ func TestProjectIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewProjectIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestProjectIDSet_AddRef(t *testing.T) { - t.Parallel() - - ProjectId := MustProjectID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *ProjectID expected *ProjectIDSet @@ -695,7 +659,7 @@ func TestProjectIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &ProjectId, + input: MustProjectID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &ProjectIDSet{ m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestProjectIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewProjectIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestProjectIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - ProjectIDSet - ProjectID - } + tests := []struct { + name string + target *ProjectIDSet + input ProjectID expected bool }{ { - name: "Empty Set", - input: struct { - ProjectIDSet - ProjectID - }{ProjectIDSet: ProjectIDSet{}, ProjectID: MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &ProjectIDSet{}, + input: MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - ProjectIDSet - ProjectID - }{ProjectIDSet: ProjectIDSet{ + target: &ProjectIDSet{ m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, ProjectID: MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - ProjectIDSet - ProjectID - }{ProjectIDSet: ProjectIDSet{ + target: &ProjectIDSet{ m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, ProjectID: MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.ProjectIDSet.Has(tc.input.ProjectID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestProjectIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input ProjectIDSet - expected ProjectIDSet + input *ProjectIDSet + expected *ProjectIDSet }{ { - name: "Empty Set", - input: ProjectIDSet{}, - expected: ProjectIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &ProjectIDSet{}, + expected: &ProjectIDSet{}, }, { - name: "Set Contains the element", - input: ProjectIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &ProjectIDSet{ m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: ProjectIDSet{ + expected: &ProjectIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestProjectIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *ProjectIDSet expected []ProjectID }{ { - name: "Empty slice", + name: "Empty", input: &ProjectIDSet{ m: map[ProjectID]struct{}{}, s: nil, }, expected: make([]ProjectID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &ProjectIDSet{ @@ -854,20 +808,17 @@ func TestProjectIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestProjectIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *ProjectIDSet expected *ProjectIDSet @@ -922,21 +873,19 @@ func TestProjectIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestProjectIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *ProjectIDSet @@ -944,6 +893,23 @@ func TestProjectIDSet_Merge(t *testing.T) { } expected *ProjectIDSet }{ + { + name: "Nil Set", + input: struct { + a *ProjectIDSet + b *ProjectIDSet + }{ + a: &ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &ProjectIDSet{ + m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestProjectIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/property_gen.go b/pkg/id/property_gen.go index f0cfdbf23..dfd762aae 100644 --- a/pkg/id/property_gen.go +++ b/pkg/id/property_gen.go @@ -44,7 +44,7 @@ func PropertyIDFromRef(i *string) *PropertyID { // PropertyIDFromRefID generates a new PropertyID from a ref of a generic ID. func PropertyIDFromRefID(i *ID) *PropertyID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := PropertyID(*i) @@ -58,28 +58,40 @@ func (d PropertyID) ID() ID { // String returns a string representation. func (d PropertyID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d PropertyID) GoString() string { - return "id.PropertyID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d PropertyID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d PropertyID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d PropertyID) GoString() string { + return "PropertyID(" + d.String() + ")" } // Ref returns a reference. func (d PropertyID) Ref() *PropertyID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d PropertyID) Contains(ids []PropertyID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d PropertyID) Contains(ids []PropertyID) bool { // CopyRef returns a copy of a reference. func (d *PropertyID) CopyRef() *PropertyID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *PropertyID) CopyRef() *PropertyID { // IDRef returns a reference of a domain id. func (d *PropertyID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *PropertyID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *PropertyID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *PropertyID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *PropertyID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *PropertyID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *PropertyID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *PropertyID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d PropertyID) IsNil() bool { return ID(d).IsNil() } -// PropertyIDToKeys converts IDs into a string slice. -func PropertyIDToKeys(ids []PropertyID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *PropertyID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// PropertyIDsToStrings converts IDs into a string slice. +func PropertyIDsToStrings(ids []PropertyID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // PropertyIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *PropertyIDSet) Clone() *PropertyIDSet { // Merge returns a merged set func (s *PropertyIDSet) Merge(s2 *PropertyIDSet) *PropertyIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/property_gen_test.go b/pkg/id/property_gen_test.go index 1c9ea59c8..552b62f13 100644 --- a/pkg/id/property_gen_test.go +++ b/pkg/id/property_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewPropertyID(t *testing.T) { id := NewPropertyID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestPropertyIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestPropertyIDFrom(t *testing.T) { result PropertyID err error }{ - PropertyID{}, - ErrInvalidID, + result: PropertyID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestPropertyIDFrom(t *testing.T) { result PropertyID err error }{ - PropertyID{}, - ErrInvalidID, + result: PropertyID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestPropertyIDFrom(t *testing.T) { result PropertyID err error }{ - PropertyID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: PropertyID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := PropertyIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := PropertyIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustPropertyID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustPropertyID(t *testing.T) { expected: PropertyID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustPropertyID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustPropertyID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestPropertyIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *PropertyID @@ -139,159 +135,149 @@ func TestPropertyIDFromRef(t *testing.T) { expected: &PropertyID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := PropertyIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := PropertyIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestPropertyIDFromRefID(t *testing.T) { id := New() - - subId := PropertyIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := PropertyIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, PropertyIDFromRefID(nil)) + assert.Nil(t, PropertyIDFromRefID(&ID{})) } func TestPropertyID_ID(t *testing.T) { id := New() - subId := PropertyIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := PropertyIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestPropertyID_String(t *testing.T) { id := New() - subId := PropertyIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := PropertyIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", PropertyID{}.String()) } -func TestPropertyID_GoString(t *testing.T) { - id := New() - subId := PropertyIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.PropertyID("+id.String()+")") +func TestPropertyID_RefString(t *testing.T) { + id := NewPropertyID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, PropertyID{}.RefString()) } -func TestPropertyID_RefString(t *testing.T) { +func TestPropertyID_GoString(t *testing.T) { id := New() - subId := PropertyIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := PropertyIDFromRefID(&id) + assert.Equal(t, "PropertyID("+id.String()+")", id2.GoString()) + assert.Equal(t, "PropertyID()", PropertyID{}.GoString()) } func TestPropertyID_Ref(t *testing.T) { - id := New() - subId := PropertyIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewPropertyID() + assert.Equal(t, PropertyID(id), *id.Ref()) + assert.Nil(t, (&PropertyID{}).Ref()) } func TestPropertyID_Contains(t *testing.T) { id := NewPropertyID() id2 := NewPropertyID() assert.True(t, id.Contains([]PropertyID{id, id2})) + assert.False(t, PropertyID{}.Contains([]PropertyID{id, id2, {}})) assert.False(t, id.Contains([]PropertyID{id2})) } func TestPropertyID_CopyRef(t *testing.T) { - id := New() - subId := PropertyIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewPropertyID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*PropertyID)(nil).CopyRef()) } func TestPropertyID_IDRef(t *testing.T) { id := New() - subId := PropertyIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := PropertyIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&PropertyID{}).IDRef()) + assert.Nil(t, (*PropertyID)(nil).IDRef()) } func TestPropertyID_StringRef(t *testing.T) { - id := New() - subId := PropertyIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewPropertyID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&PropertyID{}).StringRef()) + assert.Nil(t, (*PropertyID)(nil).StringRef()) } func TestPropertyID_MarhsalJSON(t *testing.T) { - id := New() - subId := PropertyIDFromRefID(&id) + id := NewPropertyID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&PropertyID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*PropertyID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestPropertyID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &PropertyID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustPropertyID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &PropertyID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestPropertyID_MarshalText(t *testing.T) { id := New() - subId := PropertyIDFromRefID(&id) + res, err := PropertyIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&PropertyID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*PropertyID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestPropertyID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &PropertyID{} - - err := subId.UnmarshalText(text) - + id2 := &PropertyID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestPropertyID_IsNil(t *testing.T) { - subId := PropertyID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *PropertyIDFromRefID(&id) + assert.True(t, PropertyID{}.IsNil()) + assert.False(t, NewPropertyID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestPropertyID_IsNilRef(t *testing.T) { + assert.True(t, PropertyID{}.Ref().IsNilRef()) + assert.True(t, (*PropertyID)(nil).IsNilRef()) + assert.False(t, NewPropertyID().Ref().IsNilRef()) } -func TestPropertyIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestPropertyIDsToStrings(t *testing.T) { + tests := []struct { name string input []PropertyID expected []string @@ -321,19 +307,17 @@ func TestPropertyIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, PropertyIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, PropertyIDsToStrings(tt.input)) }) } - } func TestPropertyIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestPropertyIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestPropertyIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := PropertyIDsFrom(tc.input) if tc.expected.err != nil { - _, err := PropertyIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := PropertyIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestPropertyIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []PropertyID @@ -449,25 +431,22 @@ func TestPropertyIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := PropertyIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestPropertyIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []PropertyID @@ -493,21 +472,18 @@ func TestPropertyIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := PropertyIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestPropertyIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []PropertyID expected []ID @@ -537,28 +513,25 @@ func TestPropertyIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := PropertyIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestPropertyIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustPropertyID(id1.String()) + id21 := MustPropertyID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustPropertyID(id2.String()) + id22 := MustPropertyID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustPropertyID(id3.String()) + id23 := MustPropertyID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*PropertyID expected []*ID @@ -570,39 +543,35 @@ func TestPropertyIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*PropertyID{&subId1}, + input: []*PropertyID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*PropertyID{&subId1, &subId2, &subId3}, + input: []*PropertyID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := PropertyIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewPropertyIDSet(t *testing.T) { PropertyIdSet := NewPropertyIDSet() - assert.NotNil(t, PropertyIdSet) assert.Empty(t, PropertyIdSet.m) assert.Empty(t, PropertyIdSet.s) } func TestPropertyIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []PropertyID expected *PropertyIDSet @@ -663,24 +632,19 @@ func TestPropertyIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewPropertyIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestPropertyIDSet_AddRef(t *testing.T) { - t.Parallel() - - PropertyId := MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *PropertyID expected *PropertyIDSet @@ -695,7 +659,7 @@ func TestPropertyIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &PropertyId, + input: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &PropertyIDSet{ m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestPropertyIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewPropertyIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestPropertyIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - PropertyIDSet - PropertyID - } + tests := []struct { + name string + target *PropertyIDSet + input PropertyID expected bool }{ { - name: "Empty Set", - input: struct { - PropertyIDSet - PropertyID - }{PropertyIDSet: PropertyIDSet{}, PropertyID: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &PropertyIDSet{}, + input: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - PropertyIDSet - PropertyID - }{PropertyIDSet: PropertyIDSet{ + target: &PropertyIDSet{ m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, PropertyID: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - PropertyIDSet - PropertyID - }{PropertyIDSet: PropertyIDSet{ + target: &PropertyIDSet{ m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, PropertyID: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.PropertyIDSet.Has(tc.input.PropertyID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestPropertyIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input PropertyIDSet - expected PropertyIDSet + input *PropertyIDSet + expected *PropertyIDSet }{ { - name: "Empty Set", - input: PropertyIDSet{}, - expected: PropertyIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &PropertyIDSet{}, + expected: &PropertyIDSet{}, }, { - name: "Set Contains the element", - input: PropertyIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &PropertyIDSet{ m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: PropertyIDSet{ + expected: &PropertyIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestPropertyIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *PropertyIDSet expected []PropertyID }{ { - name: "Empty slice", + name: "Empty", input: &PropertyIDSet{ m: map[PropertyID]struct{}{}, s: nil, }, expected: make([]PropertyID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &PropertyIDSet{ @@ -854,20 +808,17 @@ func TestPropertyIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestPropertyIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *PropertyIDSet expected *PropertyIDSet @@ -922,21 +873,19 @@ func TestPropertyIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestPropertyIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *PropertyIDSet @@ -944,6 +893,23 @@ func TestPropertyIDSet_Merge(t *testing.T) { } expected *PropertyIDSet }{ + { + name: "Nil Set", + input: struct { + a *PropertyIDSet + b *PropertyIDSet + }{ + a: &PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &PropertyIDSet{ + m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestPropertyIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/property_item_gen.go b/pkg/id/property_item_gen.go index 122e9b1a0..c749aaab4 100644 --- a/pkg/id/property_item_gen.go +++ b/pkg/id/property_item_gen.go @@ -44,7 +44,7 @@ func PropertyItemIDFromRef(i *string) *PropertyItemID { // PropertyItemIDFromRefID generates a new PropertyItemID from a ref of a generic ID. func PropertyItemIDFromRefID(i *ID) *PropertyItemID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := PropertyItemID(*i) @@ -58,28 +58,40 @@ func (d PropertyItemID) ID() ID { // String returns a string representation. func (d PropertyItemID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d PropertyItemID) GoString() string { - return "id.PropertyItemID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d PropertyItemID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d PropertyItemID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d PropertyItemID) GoString() string { + return "PropertyItemID(" + d.String() + ")" } // Ref returns a reference. func (d PropertyItemID) Ref() *PropertyItemID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d PropertyItemID) Contains(ids []PropertyItemID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d PropertyItemID) Contains(ids []PropertyItemID) bool { // CopyRef returns a copy of a reference. func (d *PropertyItemID) CopyRef() *PropertyItemID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *PropertyItemID) CopyRef() *PropertyItemID { // IDRef returns a reference of a domain id. func (d *PropertyItemID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *PropertyItemID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *PropertyItemID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *PropertyItemID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *PropertyItemID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *PropertyItemID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *PropertyItemID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *PropertyItemID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d PropertyItemID) IsNil() bool { return ID(d).IsNil() } -// PropertyItemIDToKeys converts IDs into a string slice. -func PropertyItemIDToKeys(ids []PropertyItemID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *PropertyItemID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// PropertyItemIDsToStrings converts IDs into a string slice. +func PropertyItemIDsToStrings(ids []PropertyItemID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // PropertyItemIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *PropertyItemIDSet) Clone() *PropertyItemIDSet { // Merge returns a merged set func (s *PropertyItemIDSet) Merge(s2 *PropertyItemIDSet) *PropertyItemIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/property_item_gen_test.go b/pkg/id/property_item_gen_test.go index 680c31693..c60703a0c 100644 --- a/pkg/id/property_item_gen_test.go +++ b/pkg/id/property_item_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewPropertyItemID(t *testing.T) { id := NewPropertyItemID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestPropertyItemIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestPropertyItemIDFrom(t *testing.T) { result PropertyItemID err error }{ - PropertyItemID{}, - ErrInvalidID, + result: PropertyItemID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestPropertyItemIDFrom(t *testing.T) { result PropertyItemID err error }{ - PropertyItemID{}, - ErrInvalidID, + result: PropertyItemID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestPropertyItemIDFrom(t *testing.T) { result PropertyItemID err error }{ - PropertyItemID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: PropertyItemID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := PropertyItemIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := PropertyItemIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustPropertyItemID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustPropertyItemID(t *testing.T) { expected: PropertyItemID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustPropertyItemID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustPropertyItemID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestPropertyItemIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *PropertyItemID @@ -139,159 +135,149 @@ func TestPropertyItemIDFromRef(t *testing.T) { expected: &PropertyItemID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := PropertyItemIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := PropertyItemIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestPropertyItemIDFromRefID(t *testing.T) { id := New() - - subId := PropertyItemIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := PropertyItemIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, PropertyItemIDFromRefID(nil)) + assert.Nil(t, PropertyItemIDFromRefID(&ID{})) } func TestPropertyItemID_ID(t *testing.T) { id := New() - subId := PropertyItemIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := PropertyItemIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestPropertyItemID_String(t *testing.T) { id := New() - subId := PropertyItemIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := PropertyItemIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", PropertyItemID{}.String()) } -func TestPropertyItemID_GoString(t *testing.T) { - id := New() - subId := PropertyItemIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.PropertyItemID("+id.String()+")") +func TestPropertyItemID_RefString(t *testing.T) { + id := NewPropertyItemID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, PropertyItemID{}.RefString()) } -func TestPropertyItemID_RefString(t *testing.T) { +func TestPropertyItemID_GoString(t *testing.T) { id := New() - subId := PropertyItemIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := PropertyItemIDFromRefID(&id) + assert.Equal(t, "PropertyItemID("+id.String()+")", id2.GoString()) + assert.Equal(t, "PropertyItemID()", PropertyItemID{}.GoString()) } func TestPropertyItemID_Ref(t *testing.T) { - id := New() - subId := PropertyItemIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewPropertyItemID() + assert.Equal(t, PropertyItemID(id), *id.Ref()) + assert.Nil(t, (&PropertyItemID{}).Ref()) } func TestPropertyItemID_Contains(t *testing.T) { id := NewPropertyItemID() id2 := NewPropertyItemID() assert.True(t, id.Contains([]PropertyItemID{id, id2})) + assert.False(t, PropertyItemID{}.Contains([]PropertyItemID{id, id2, {}})) assert.False(t, id.Contains([]PropertyItemID{id2})) } func TestPropertyItemID_CopyRef(t *testing.T) { - id := New() - subId := PropertyItemIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewPropertyItemID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*PropertyItemID)(nil).CopyRef()) } func TestPropertyItemID_IDRef(t *testing.T) { id := New() - subId := PropertyItemIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := PropertyItemIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&PropertyItemID{}).IDRef()) + assert.Nil(t, (*PropertyItemID)(nil).IDRef()) } func TestPropertyItemID_StringRef(t *testing.T) { - id := New() - subId := PropertyItemIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewPropertyItemID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&PropertyItemID{}).StringRef()) + assert.Nil(t, (*PropertyItemID)(nil).StringRef()) } func TestPropertyItemID_MarhsalJSON(t *testing.T) { - id := New() - subId := PropertyItemIDFromRefID(&id) + id := NewPropertyItemID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&PropertyItemID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*PropertyItemID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestPropertyItemID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &PropertyItemID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustPropertyItemID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &PropertyItemID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestPropertyItemID_MarshalText(t *testing.T) { id := New() - subId := PropertyItemIDFromRefID(&id) + res, err := PropertyItemIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&PropertyItemID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*PropertyItemID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestPropertyItemID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &PropertyItemID{} - - err := subId.UnmarshalText(text) - + id2 := &PropertyItemID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestPropertyItemID_IsNil(t *testing.T) { - subId := PropertyItemID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *PropertyItemIDFromRefID(&id) + assert.True(t, PropertyItemID{}.IsNil()) + assert.False(t, NewPropertyItemID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestPropertyItemID_IsNilRef(t *testing.T) { + assert.True(t, PropertyItemID{}.Ref().IsNilRef()) + assert.True(t, (*PropertyItemID)(nil).IsNilRef()) + assert.False(t, NewPropertyItemID().Ref().IsNilRef()) } -func TestPropertyItemIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestPropertyItemIDsToStrings(t *testing.T) { + tests := []struct { name string input []PropertyItemID expected []string @@ -321,19 +307,17 @@ func TestPropertyItemIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, PropertyItemIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, PropertyItemIDsToStrings(tt.input)) }) } - } func TestPropertyItemIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestPropertyItemIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestPropertyItemIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := PropertyItemIDsFrom(tc.input) if tc.expected.err != nil { - _, err := PropertyItemIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := PropertyItemIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestPropertyItemIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []PropertyItemID @@ -449,25 +431,22 @@ func TestPropertyItemIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := PropertyItemIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestPropertyItemIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []PropertyItemID @@ -493,21 +472,18 @@ func TestPropertyItemIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := PropertyItemIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestPropertyItemIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []PropertyItemID expected []ID @@ -537,28 +513,25 @@ func TestPropertyItemIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := PropertyItemIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestPropertyItemIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustPropertyItemID(id1.String()) + id21 := MustPropertyItemID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustPropertyItemID(id2.String()) + id22 := MustPropertyItemID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustPropertyItemID(id3.String()) + id23 := MustPropertyItemID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*PropertyItemID expected []*ID @@ -570,39 +543,35 @@ func TestPropertyItemIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*PropertyItemID{&subId1}, + input: []*PropertyItemID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*PropertyItemID{&subId1, &subId2, &subId3}, + input: []*PropertyItemID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := PropertyItemIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewPropertyItemIDSet(t *testing.T) { PropertyItemIdSet := NewPropertyItemIDSet() - assert.NotNil(t, PropertyItemIdSet) assert.Empty(t, PropertyItemIdSet.m) assert.Empty(t, PropertyItemIdSet.s) } func TestPropertyItemIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []PropertyItemID expected *PropertyItemIDSet @@ -663,24 +632,19 @@ func TestPropertyItemIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewPropertyItemIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestPropertyItemIDSet_AddRef(t *testing.T) { - t.Parallel() - - PropertyItemId := MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *PropertyItemID expected *PropertyItemIDSet @@ -695,7 +659,7 @@ func TestPropertyItemIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &PropertyItemId, + input: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &PropertyItemIDSet{ m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestPropertyItemIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewPropertyItemIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestPropertyItemIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - PropertyItemIDSet - PropertyItemID - } + tests := []struct { + name string + target *PropertyItemIDSet + input PropertyItemID expected bool }{ { - name: "Empty Set", - input: struct { - PropertyItemIDSet - PropertyItemID - }{PropertyItemIDSet: PropertyItemIDSet{}, PropertyItemID: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &PropertyItemIDSet{}, + input: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - PropertyItemIDSet - PropertyItemID - }{PropertyItemIDSet: PropertyItemIDSet{ + target: &PropertyItemIDSet{ m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, PropertyItemID: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - PropertyItemIDSet - PropertyItemID - }{PropertyItemIDSet: PropertyItemIDSet{ + target: &PropertyItemIDSet{ m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, PropertyItemID: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.PropertyItemIDSet.Has(tc.input.PropertyItemID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestPropertyItemIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input PropertyItemIDSet - expected PropertyItemIDSet + input *PropertyItemIDSet + expected *PropertyItemIDSet }{ { - name: "Empty Set", - input: PropertyItemIDSet{}, - expected: PropertyItemIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &PropertyItemIDSet{}, + expected: &PropertyItemIDSet{}, }, { - name: "Set Contains the element", - input: PropertyItemIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &PropertyItemIDSet{ m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: PropertyItemIDSet{ + expected: &PropertyItemIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestPropertyItemIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *PropertyItemIDSet expected []PropertyItemID }{ { - name: "Empty slice", + name: "Empty", input: &PropertyItemIDSet{ m: map[PropertyItemID]struct{}{}, s: nil, }, expected: make([]PropertyItemID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &PropertyItemIDSet{ @@ -854,20 +808,17 @@ func TestPropertyItemIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestPropertyItemIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *PropertyItemIDSet expected *PropertyItemIDSet @@ -922,21 +873,19 @@ func TestPropertyItemIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestPropertyItemIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *PropertyItemIDSet @@ -944,6 +893,23 @@ func TestPropertyItemIDSet_Merge(t *testing.T) { } expected *PropertyItemIDSet }{ + { + name: "Nil Set", + input: struct { + a *PropertyItemIDSet + b *PropertyItemIDSet + }{ + a: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &PropertyItemIDSet{ + m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestPropertyItemIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/property_schema.go b/pkg/id/property_schema.go index 6530825b6..91795e251 100644 --- a/pkg/id/property_schema.go +++ b/pkg/id/property_schema.go @@ -63,6 +63,14 @@ func PropertySchemaIDFromRef(id *string) *PropertySchemaID { return &did } +// Clone duplicates the PropertySchemaID +func (d PropertySchemaID) Clone() PropertySchemaID { + return PropertySchemaID{ + plugin: d.plugin.Clone(), + id: d.id, + } +} + // ID returns a fragment of just ID. func (d PropertySchemaID) ID() string { return d.id @@ -80,18 +88,24 @@ func (d PropertySchemaID) System() bool { // String returns a string representation. func (d PropertySchemaID) String() string { + if d.IsNil() { + return "" + } return d.plugin.String() + "/" + d.id } // Ref returns a reference. func (d PropertySchemaID) Ref() *PropertySchemaID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // CopyRef returns a copy of a reference. func (d *PropertySchemaID) CopyRef() *PropertySchemaID { - if d == nil { + if d == nil || d.IsNil() { return nil } d2 := *d @@ -100,7 +114,7 @@ func (d *PropertySchemaID) CopyRef() *PropertySchemaID { // IsNil checks if ID is empty or not. func (d PropertySchemaID) IsNil() bool { - return d.plugin == PluginID{} && d.id == "" + return d.plugin.IsNil() && d.id == "" } // Equal returns true if two IDs are equal. @@ -129,7 +143,7 @@ func (d *PropertySchemaID) UnmarshalText(text []byte) (err error) { } // PropertySchemaIDToKeys converts IDs into a string slice. -func PropertySchemaIDToKeys(ids []PropertySchemaID) []string { +func PropertySchemaIDsToStrings(ids []PropertySchemaID) []string { keys := make([]string, 0, len(ids)) for _, id := range ids { keys = append(keys, id.String()) diff --git a/pkg/id/property_schema_field.go b/pkg/id/property_schema_field.go index aaa986e98..b8b9f6d2c 100644 --- a/pkg/id/property_schema_field.go +++ b/pkg/id/property_schema_field.go @@ -3,7 +3,7 @@ package id type PropertySchemaFieldID string func PropertySchemaFieldIDFrom(str *string) *PropertySchemaFieldID { - if str == nil { + if str == nil || *str == "" { return nil } id := PropertySchemaFieldID(*str) @@ -11,12 +11,15 @@ func PropertySchemaFieldIDFrom(str *string) *PropertySchemaFieldID { } func (id PropertySchemaFieldID) Ref() *PropertySchemaFieldID { + if id == "" { + return nil + } id2 := id return &id2 } func (id *PropertySchemaFieldID) CopyRef() *PropertySchemaFieldID { - if id == nil { + if id == nil || *id == "" { return nil } id2 := *id @@ -28,7 +31,7 @@ func (id PropertySchemaFieldID) String() string { } func (id *PropertySchemaFieldID) StringRef() *string { - if id == nil { + if id == nil || *id == "" { return nil } str := string(*id) diff --git a/pkg/id/property_schema_group.go b/pkg/id/property_schema_group.go index d0556e05d..cfba5c321 100644 --- a/pkg/id/property_schema_group.go +++ b/pkg/id/property_schema_group.go @@ -3,7 +3,7 @@ package id type PropertySchemaGroupID string func PropertySchemaGroupIDFrom(str *string) *PropertySchemaGroupID { - if str == nil { + if str == nil || *str == "" { return nil } id := PropertySchemaGroupID(*str) @@ -11,12 +11,15 @@ func PropertySchemaGroupIDFrom(str *string) *PropertySchemaGroupID { } func (id PropertySchemaGroupID) Ref() *PropertySchemaGroupID { + if id == "" { + return nil + } id2 := id return &id2 } func (id *PropertySchemaGroupID) CopyRef() *PropertySchemaGroupID { - if id == nil { + if id == nil || *id == "" { return nil } id2 := *id @@ -28,7 +31,7 @@ func (id PropertySchemaGroupID) String() string { } func (id *PropertySchemaGroupID) StringRef() *string { - if id == nil { + if id == nil || *id == "" { return nil } str := string(*id) diff --git a/pkg/id/property_schema_test.go b/pkg/id/property_schema_test.go index 7adcb8562..918dc1646 100644 --- a/pkg/id/property_schema_test.go +++ b/pkg/id/property_schema_test.go @@ -215,6 +215,22 @@ func TestPropertySchemaIDFromRef(t *testing.T) { } } +func TestPropertySchemaID_Clone(t *testing.T) { + p := PropertySchemaID{ + id: "xxx", + plugin: PluginID{ + name: "aaa", + version: "1.0.0", + sys: false, + scene: NewSceneID().Ref(), + }, + } + c := p.Clone() + + assert.Equal(t, p, c) + assert.NotSame(t, p, c) +} + func TestPropertySchemaID_ID(t *testing.T) { propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") assert.Equal(t, propertySchemaID.ID(), "test") @@ -287,7 +303,7 @@ func TestPropertySchemaID_UnmarshalText(t *testing.T) { assert.Equal(t, "test~2.0.0/test", propertySchemaID.String()) } -func TestPropertySchemaIDToKeys(t *testing.T) { +func TestPropertySchemaIDsToStrings(t *testing.T) { t.Parallel() testCases := []struct { name string @@ -323,7 +339,7 @@ func TestPropertySchemaIDToKeys(t *testing.T) { tc := tc t.Run(tc.name, func(tt *testing.T) { tt.Parallel() - assert.Equal(tt, tc.expected, PropertySchemaIDToKeys(tc.input)) + assert.Equal(tt, tc.expected, PropertySchemaIDsToStrings(tc.input)) }) } diff --git a/pkg/id/scene_gen.go b/pkg/id/scene_gen.go index bc8d2c37b..cd2c6d0bd 100644 --- a/pkg/id/scene_gen.go +++ b/pkg/id/scene_gen.go @@ -44,7 +44,7 @@ func SceneIDFromRef(i *string) *SceneID { // SceneIDFromRefID generates a new SceneID from a ref of a generic ID. func SceneIDFromRefID(i *ID) *SceneID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := SceneID(*i) @@ -58,28 +58,40 @@ func (d SceneID) ID() ID { // String returns a string representation. func (d SceneID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d SceneID) GoString() string { - return "id.SceneID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d SceneID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d SceneID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d SceneID) GoString() string { + return "SceneID(" + d.String() + ")" } // Ref returns a reference. func (d SceneID) Ref() *SceneID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d SceneID) Contains(ids []SceneID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d SceneID) Contains(ids []SceneID) bool { // CopyRef returns a copy of a reference. func (d *SceneID) CopyRef() *SceneID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *SceneID) CopyRef() *SceneID { // IDRef returns a reference of a domain id. func (d *SceneID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *SceneID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *SceneID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *SceneID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *SceneID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *SceneID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *SceneID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *SceneID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d SceneID) IsNil() bool { return ID(d).IsNil() } -// SceneIDToKeys converts IDs into a string slice. -func SceneIDToKeys(ids []SceneID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *SceneID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// SceneIDsToStrings converts IDs into a string slice. +func SceneIDsToStrings(ids []SceneID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // SceneIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *SceneIDSet) Clone() *SceneIDSet { // Merge returns a merged set func (s *SceneIDSet) Merge(s2 *SceneIDSet) *SceneIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/scene_gen_test.go b/pkg/id/scene_gen_test.go index cec679206..0bf3cffa7 100644 --- a/pkg/id/scene_gen_test.go +++ b/pkg/id/scene_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewSceneID(t *testing.T) { id := NewSceneID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestSceneIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestSceneIDFrom(t *testing.T) { result SceneID err error }{ - SceneID{}, - ErrInvalidID, + result: SceneID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestSceneIDFrom(t *testing.T) { result SceneID err error }{ - SceneID{}, - ErrInvalidID, + result: SceneID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestSceneIDFrom(t *testing.T) { result SceneID err error }{ - SceneID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: SceneID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := SceneIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := SceneIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustSceneID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustSceneID(t *testing.T) { expected: SceneID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustSceneID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustSceneID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestSceneIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *SceneID @@ -139,159 +135,149 @@ func TestSceneIDFromRef(t *testing.T) { expected: &SceneID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := SceneIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := SceneIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestSceneIDFromRefID(t *testing.T) { id := New() - - subId := SceneIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := SceneIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, SceneIDFromRefID(nil)) + assert.Nil(t, SceneIDFromRefID(&ID{})) } func TestSceneID_ID(t *testing.T) { id := New() - subId := SceneIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := SceneIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestSceneID_String(t *testing.T) { id := New() - subId := SceneIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := SceneIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", SceneID{}.String()) } -func TestSceneID_GoString(t *testing.T) { - id := New() - subId := SceneIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.SceneID("+id.String()+")") +func TestSceneID_RefString(t *testing.T) { + id := NewSceneID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, SceneID{}.RefString()) } -func TestSceneID_RefString(t *testing.T) { +func TestSceneID_GoString(t *testing.T) { id := New() - subId := SceneIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := SceneIDFromRefID(&id) + assert.Equal(t, "SceneID("+id.String()+")", id2.GoString()) + assert.Equal(t, "SceneID()", SceneID{}.GoString()) } func TestSceneID_Ref(t *testing.T) { - id := New() - subId := SceneIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewSceneID() + assert.Equal(t, SceneID(id), *id.Ref()) + assert.Nil(t, (&SceneID{}).Ref()) } func TestSceneID_Contains(t *testing.T) { id := NewSceneID() id2 := NewSceneID() assert.True(t, id.Contains([]SceneID{id, id2})) + assert.False(t, SceneID{}.Contains([]SceneID{id, id2, {}})) assert.False(t, id.Contains([]SceneID{id2})) } func TestSceneID_CopyRef(t *testing.T) { - id := New() - subId := SceneIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewSceneID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*SceneID)(nil).CopyRef()) } func TestSceneID_IDRef(t *testing.T) { id := New() - subId := SceneIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := SceneIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&SceneID{}).IDRef()) + assert.Nil(t, (*SceneID)(nil).IDRef()) } func TestSceneID_StringRef(t *testing.T) { - id := New() - subId := SceneIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewSceneID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&SceneID{}).StringRef()) + assert.Nil(t, (*SceneID)(nil).StringRef()) } func TestSceneID_MarhsalJSON(t *testing.T) { - id := New() - subId := SceneIDFromRefID(&id) + id := NewSceneID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&SceneID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*SceneID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestSceneID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &SceneID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustSceneID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &SceneID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestSceneID_MarshalText(t *testing.T) { id := New() - subId := SceneIDFromRefID(&id) + res, err := SceneIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&SceneID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*SceneID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestSceneID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &SceneID{} - - err := subId.UnmarshalText(text) - + id2 := &SceneID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestSceneID_IsNil(t *testing.T) { - subId := SceneID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *SceneIDFromRefID(&id) + assert.True(t, SceneID{}.IsNil()) + assert.False(t, NewSceneID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestSceneID_IsNilRef(t *testing.T) { + assert.True(t, SceneID{}.Ref().IsNilRef()) + assert.True(t, (*SceneID)(nil).IsNilRef()) + assert.False(t, NewSceneID().Ref().IsNilRef()) } -func TestSceneIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestSceneIDsToStrings(t *testing.T) { + tests := []struct { name string input []SceneID expected []string @@ -321,19 +307,17 @@ func TestSceneIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, SceneIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, SceneIDsToStrings(tt.input)) }) } - } func TestSceneIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestSceneIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestSceneIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := SceneIDsFrom(tc.input) if tc.expected.err != nil { - _, err := SceneIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := SceneIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestSceneIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []SceneID @@ -449,25 +431,22 @@ func TestSceneIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := SceneIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestSceneIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []SceneID @@ -493,21 +472,18 @@ func TestSceneIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := SceneIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestSceneIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []SceneID expected []ID @@ -537,28 +513,25 @@ func TestSceneIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := SceneIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestSceneIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustSceneID(id1.String()) + id21 := MustSceneID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustSceneID(id2.String()) + id22 := MustSceneID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustSceneID(id3.String()) + id23 := MustSceneID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*SceneID expected []*ID @@ -570,39 +543,35 @@ func TestSceneIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*SceneID{&subId1}, + input: []*SceneID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*SceneID{&subId1, &subId2, &subId3}, + input: []*SceneID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := SceneIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewSceneIDSet(t *testing.T) { SceneIdSet := NewSceneIDSet() - assert.NotNil(t, SceneIdSet) assert.Empty(t, SceneIdSet.m) assert.Empty(t, SceneIdSet.s) } func TestSceneIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []SceneID expected *SceneIDSet @@ -663,24 +632,19 @@ func TestSceneIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewSceneIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestSceneIDSet_AddRef(t *testing.T) { - t.Parallel() - - SceneId := MustSceneID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *SceneID expected *SceneIDSet @@ -695,7 +659,7 @@ func TestSceneIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &SceneId, + input: MustSceneID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &SceneIDSet{ m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestSceneIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewSceneIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestSceneIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - SceneIDSet - SceneID - } + tests := []struct { + name string + target *SceneIDSet + input SceneID expected bool }{ { - name: "Empty Set", - input: struct { - SceneIDSet - SceneID - }{SceneIDSet: SceneIDSet{}, SceneID: MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &SceneIDSet{}, + input: MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - SceneIDSet - SceneID - }{SceneIDSet: SceneIDSet{ + target: &SceneIDSet{ m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, SceneID: MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - SceneIDSet - SceneID - }{SceneIDSet: SceneIDSet{ + target: &SceneIDSet{ m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, SceneID: MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.SceneIDSet.Has(tc.input.SceneID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestSceneIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input SceneIDSet - expected SceneIDSet + input *SceneIDSet + expected *SceneIDSet }{ { - name: "Empty Set", - input: SceneIDSet{}, - expected: SceneIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &SceneIDSet{}, + expected: &SceneIDSet{}, }, { - name: "Set Contains the element", - input: SceneIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &SceneIDSet{ m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: SceneIDSet{ + expected: &SceneIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestSceneIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *SceneIDSet expected []SceneID }{ { - name: "Empty slice", + name: "Empty", input: &SceneIDSet{ m: map[SceneID]struct{}{}, s: nil, }, expected: make([]SceneID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &SceneIDSet{ @@ -854,20 +808,17 @@ func TestSceneIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestSceneIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *SceneIDSet expected *SceneIDSet @@ -922,21 +873,19 @@ func TestSceneIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestSceneIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *SceneIDSet @@ -944,6 +893,23 @@ func TestSceneIDSet_Merge(t *testing.T) { } expected *SceneIDSet }{ + { + name: "Nil Set", + input: struct { + a *SceneIDSet + b *SceneIDSet + }{ + a: &SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &SceneIDSet{ + m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestSceneIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/tag_gen.go b/pkg/id/tag_gen.go index 1e2df781c..27e34057d 100644 --- a/pkg/id/tag_gen.go +++ b/pkg/id/tag_gen.go @@ -44,7 +44,7 @@ func TagIDFromRef(i *string) *TagID { // TagIDFromRefID generates a new TagID from a ref of a generic ID. func TagIDFromRefID(i *ID) *TagID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := TagID(*i) @@ -58,28 +58,40 @@ func (d TagID) ID() ID { // String returns a string representation. func (d TagID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d TagID) GoString() string { - return "id.TagID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d TagID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d TagID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d TagID) GoString() string { + return "TagID(" + d.String() + ")" } // Ref returns a reference. func (d TagID) Ref() *TagID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d TagID) Contains(ids []TagID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d TagID) Contains(ids []TagID) bool { // CopyRef returns a copy of a reference. func (d *TagID) CopyRef() *TagID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *TagID) CopyRef() *TagID { // IDRef returns a reference of a domain id. func (d *TagID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *TagID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *TagID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *TagID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *TagID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *TagID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *TagID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *TagID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d TagID) IsNil() bool { return ID(d).IsNil() } -// TagIDToKeys converts IDs into a string slice. -func TagIDToKeys(ids []TagID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *TagID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// TagIDsToStrings converts IDs into a string slice. +func TagIDsToStrings(ids []TagID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // TagIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *TagIDSet) Clone() *TagIDSet { // Merge returns a merged set func (s *TagIDSet) Merge(s2 *TagIDSet) *TagIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/tag_gen_test.go b/pkg/id/tag_gen_test.go index 0620cf8bb..bef35da44 100644 --- a/pkg/id/tag_gen_test.go +++ b/pkg/id/tag_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewTagID(t *testing.T) { id := NewTagID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestTagIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestTagIDFrom(t *testing.T) { result TagID err error }{ - TagID{}, - ErrInvalidID, + result: TagID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestTagIDFrom(t *testing.T) { result TagID err error }{ - TagID{}, - ErrInvalidID, + result: TagID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestTagIDFrom(t *testing.T) { result TagID err error }{ - TagID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: TagID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := TagIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := TagIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustTagID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustTagID(t *testing.T) { expected: TagID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustTagID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustTagID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestTagIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *TagID @@ -139,159 +135,149 @@ func TestTagIDFromRef(t *testing.T) { expected: &TagID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := TagIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := TagIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestTagIDFromRefID(t *testing.T) { id := New() - - subId := TagIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := TagIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, TagIDFromRefID(nil)) + assert.Nil(t, TagIDFromRefID(&ID{})) } func TestTagID_ID(t *testing.T) { id := New() - subId := TagIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := TagIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestTagID_String(t *testing.T) { id := New() - subId := TagIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := TagIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", TagID{}.String()) } -func TestTagID_GoString(t *testing.T) { - id := New() - subId := TagIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.TagID("+id.String()+")") +func TestTagID_RefString(t *testing.T) { + id := NewTagID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, TagID{}.RefString()) } -func TestTagID_RefString(t *testing.T) { +func TestTagID_GoString(t *testing.T) { id := New() - subId := TagIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := TagIDFromRefID(&id) + assert.Equal(t, "TagID("+id.String()+")", id2.GoString()) + assert.Equal(t, "TagID()", TagID{}.GoString()) } func TestTagID_Ref(t *testing.T) { - id := New() - subId := TagIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewTagID() + assert.Equal(t, TagID(id), *id.Ref()) + assert.Nil(t, (&TagID{}).Ref()) } func TestTagID_Contains(t *testing.T) { id := NewTagID() id2 := NewTagID() assert.True(t, id.Contains([]TagID{id, id2})) + assert.False(t, TagID{}.Contains([]TagID{id, id2, {}})) assert.False(t, id.Contains([]TagID{id2})) } func TestTagID_CopyRef(t *testing.T) { - id := New() - subId := TagIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewTagID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*TagID)(nil).CopyRef()) } func TestTagID_IDRef(t *testing.T) { id := New() - subId := TagIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := TagIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&TagID{}).IDRef()) + assert.Nil(t, (*TagID)(nil).IDRef()) } func TestTagID_StringRef(t *testing.T) { - id := New() - subId := TagIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewTagID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&TagID{}).StringRef()) + assert.Nil(t, (*TagID)(nil).StringRef()) } func TestTagID_MarhsalJSON(t *testing.T) { - id := New() - subId := TagIDFromRefID(&id) + id := NewTagID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&TagID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*TagID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestTagID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &TagID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustTagID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &TagID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestTagID_MarshalText(t *testing.T) { id := New() - subId := TagIDFromRefID(&id) + res, err := TagIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&TagID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*TagID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestTagID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &TagID{} - - err := subId.UnmarshalText(text) - + id2 := &TagID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestTagID_IsNil(t *testing.T) { - subId := TagID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *TagIDFromRefID(&id) + assert.True(t, TagID{}.IsNil()) + assert.False(t, NewTagID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestTagID_IsNilRef(t *testing.T) { + assert.True(t, TagID{}.Ref().IsNilRef()) + assert.True(t, (*TagID)(nil).IsNilRef()) + assert.False(t, NewTagID().Ref().IsNilRef()) } -func TestTagIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestTagIDsToStrings(t *testing.T) { + tests := []struct { name string input []TagID expected []string @@ -321,19 +307,17 @@ func TestTagIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, TagIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, TagIDsToStrings(tt.input)) }) } - } func TestTagIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestTagIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestTagIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := TagIDsFrom(tc.input) if tc.expected.err != nil { - _, err := TagIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := TagIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestTagIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []TagID @@ -449,25 +431,22 @@ func TestTagIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := TagIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestTagIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []TagID @@ -493,21 +472,18 @@ func TestTagIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := TagIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestTagIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []TagID expected []ID @@ -537,28 +513,25 @@ func TestTagIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := TagIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestTagIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustTagID(id1.String()) + id21 := MustTagID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustTagID(id2.String()) + id22 := MustTagID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustTagID(id3.String()) + id23 := MustTagID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*TagID expected []*ID @@ -570,39 +543,35 @@ func TestTagIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*TagID{&subId1}, + input: []*TagID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*TagID{&subId1, &subId2, &subId3}, + input: []*TagID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := TagIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewTagIDSet(t *testing.T) { TagIdSet := NewTagIDSet() - assert.NotNil(t, TagIdSet) assert.Empty(t, TagIdSet.m) assert.Empty(t, TagIdSet.s) } func TestTagIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []TagID expected *TagIDSet @@ -663,24 +632,19 @@ func TestTagIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewTagIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestTagIDSet_AddRef(t *testing.T) { - t.Parallel() - - TagId := MustTagID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *TagID expected *TagIDSet @@ -695,7 +659,7 @@ func TestTagIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &TagId, + input: MustTagID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &TagIDSet{ m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestTagIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewTagIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestTagIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - TagIDSet - TagID - } + tests := []struct { + name string + target *TagIDSet + input TagID expected bool }{ { - name: "Empty Set", - input: struct { - TagIDSet - TagID - }{TagIDSet: TagIDSet{}, TagID: MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &TagIDSet{}, + input: MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - TagIDSet - TagID - }{TagIDSet: TagIDSet{ + target: &TagIDSet{ m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, TagID: MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - TagIDSet - TagID - }{TagIDSet: TagIDSet{ + target: &TagIDSet{ m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, TagID: MustTagID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.TagIDSet.Has(tc.input.TagID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestTagIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input TagIDSet - expected TagIDSet + input *TagIDSet + expected *TagIDSet }{ { - name: "Empty Set", - input: TagIDSet{}, - expected: TagIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &TagIDSet{}, + expected: &TagIDSet{}, }, { - name: "Set Contains the element", - input: TagIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &TagIDSet{ m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: TagIDSet{ + expected: &TagIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestTagIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *TagIDSet expected []TagID }{ { - name: "Empty slice", + name: "Empty", input: &TagIDSet{ m: map[TagID]struct{}{}, s: nil, }, expected: make([]TagID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &TagIDSet{ @@ -854,20 +808,17 @@ func TestTagIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestTagIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *TagIDSet expected *TagIDSet @@ -922,21 +873,19 @@ func TestTagIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestTagIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *TagIDSet @@ -944,6 +893,23 @@ func TestTagIDSet_Merge(t *testing.T) { } expected *TagIDSet }{ + { + name: "Nil Set", + input: struct { + a *TagIDSet + b *TagIDSet + }{ + a: &TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &TagIDSet{ + m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestTagIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/team_gen.go b/pkg/id/team_gen.go index fa5fbc25d..c4d0cf4c8 100644 --- a/pkg/id/team_gen.go +++ b/pkg/id/team_gen.go @@ -44,7 +44,7 @@ func TeamIDFromRef(i *string) *TeamID { // TeamIDFromRefID generates a new TeamID from a ref of a generic ID. func TeamIDFromRefID(i *ID) *TeamID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := TeamID(*i) @@ -58,28 +58,40 @@ func (d TeamID) ID() ID { // String returns a string representation. func (d TeamID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d TeamID) GoString() string { - return "id.TeamID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d TeamID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d TeamID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d TeamID) GoString() string { + return "TeamID(" + d.String() + ")" } // Ref returns a reference. func (d TeamID) Ref() *TeamID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d TeamID) Contains(ids []TeamID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d TeamID) Contains(ids []TeamID) bool { // CopyRef returns a copy of a reference. func (d *TeamID) CopyRef() *TeamID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *TeamID) CopyRef() *TeamID { // IDRef returns a reference of a domain id. func (d *TeamID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *TeamID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *TeamID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *TeamID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *TeamID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *TeamID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *TeamID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *TeamID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d TeamID) IsNil() bool { return ID(d).IsNil() } -// TeamIDToKeys converts IDs into a string slice. -func TeamIDToKeys(ids []TeamID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *TeamID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// TeamIDsToStrings converts IDs into a string slice. +func TeamIDsToStrings(ids []TeamID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // TeamIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *TeamIDSet) Clone() *TeamIDSet { // Merge returns a merged set func (s *TeamIDSet) Merge(s2 *TeamIDSet) *TeamIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/team_gen_test.go b/pkg/id/team_gen_test.go index 952e26a38..db29cf9b6 100644 --- a/pkg/id/team_gen_test.go +++ b/pkg/id/team_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewTeamID(t *testing.T) { id := NewTeamID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestTeamIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestTeamIDFrom(t *testing.T) { result TeamID err error }{ - TeamID{}, - ErrInvalidID, + result: TeamID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestTeamIDFrom(t *testing.T) { result TeamID err error }{ - TeamID{}, - ErrInvalidID, + result: TeamID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestTeamIDFrom(t *testing.T) { result TeamID err error }{ - TeamID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: TeamID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := TeamIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := TeamIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustTeamID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustTeamID(t *testing.T) { expected: TeamID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustTeamID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustTeamID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestTeamIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *TeamID @@ -139,159 +135,149 @@ func TestTeamIDFromRef(t *testing.T) { expected: &TeamID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := TeamIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := TeamIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestTeamIDFromRefID(t *testing.T) { id := New() - - subId := TeamIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := TeamIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, TeamIDFromRefID(nil)) + assert.Nil(t, TeamIDFromRefID(&ID{})) } func TestTeamID_ID(t *testing.T) { id := New() - subId := TeamIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := TeamIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestTeamID_String(t *testing.T) { id := New() - subId := TeamIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := TeamIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", TeamID{}.String()) } -func TestTeamID_GoString(t *testing.T) { - id := New() - subId := TeamIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.TeamID("+id.String()+")") +func TestTeamID_RefString(t *testing.T) { + id := NewTeamID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, TeamID{}.RefString()) } -func TestTeamID_RefString(t *testing.T) { +func TestTeamID_GoString(t *testing.T) { id := New() - subId := TeamIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := TeamIDFromRefID(&id) + assert.Equal(t, "TeamID("+id.String()+")", id2.GoString()) + assert.Equal(t, "TeamID()", TeamID{}.GoString()) } func TestTeamID_Ref(t *testing.T) { - id := New() - subId := TeamIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewTeamID() + assert.Equal(t, TeamID(id), *id.Ref()) + assert.Nil(t, (&TeamID{}).Ref()) } func TestTeamID_Contains(t *testing.T) { id := NewTeamID() id2 := NewTeamID() assert.True(t, id.Contains([]TeamID{id, id2})) + assert.False(t, TeamID{}.Contains([]TeamID{id, id2, {}})) assert.False(t, id.Contains([]TeamID{id2})) } func TestTeamID_CopyRef(t *testing.T) { - id := New() - subId := TeamIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewTeamID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*TeamID)(nil).CopyRef()) } func TestTeamID_IDRef(t *testing.T) { id := New() - subId := TeamIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := TeamIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&TeamID{}).IDRef()) + assert.Nil(t, (*TeamID)(nil).IDRef()) } func TestTeamID_StringRef(t *testing.T) { - id := New() - subId := TeamIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewTeamID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&TeamID{}).StringRef()) + assert.Nil(t, (*TeamID)(nil).StringRef()) } func TestTeamID_MarhsalJSON(t *testing.T) { - id := New() - subId := TeamIDFromRefID(&id) + id := NewTeamID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&TeamID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*TeamID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestTeamID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &TeamID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustTeamID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &TeamID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestTeamID_MarshalText(t *testing.T) { id := New() - subId := TeamIDFromRefID(&id) + res, err := TeamIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&TeamID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*TeamID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestTeamID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &TeamID{} - - err := subId.UnmarshalText(text) - + id2 := &TeamID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestTeamID_IsNil(t *testing.T) { - subId := TeamID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *TeamIDFromRefID(&id) + assert.True(t, TeamID{}.IsNil()) + assert.False(t, NewTeamID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestTeamID_IsNilRef(t *testing.T) { + assert.True(t, TeamID{}.Ref().IsNilRef()) + assert.True(t, (*TeamID)(nil).IsNilRef()) + assert.False(t, NewTeamID().Ref().IsNilRef()) } -func TestTeamIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestTeamIDsToStrings(t *testing.T) { + tests := []struct { name string input []TeamID expected []string @@ -321,19 +307,17 @@ func TestTeamIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, TeamIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, TeamIDsToStrings(tt.input)) }) } - } func TestTeamIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestTeamIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestTeamIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := TeamIDsFrom(tc.input) if tc.expected.err != nil { - _, err := TeamIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := TeamIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestTeamIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []TeamID @@ -449,25 +431,22 @@ func TestTeamIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := TeamIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestTeamIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []TeamID @@ -493,21 +472,18 @@ func TestTeamIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := TeamIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestTeamIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []TeamID expected []ID @@ -537,28 +513,25 @@ func TestTeamIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := TeamIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestTeamIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustTeamID(id1.String()) + id21 := MustTeamID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustTeamID(id2.String()) + id22 := MustTeamID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustTeamID(id3.String()) + id23 := MustTeamID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*TeamID expected []*ID @@ -570,39 +543,35 @@ func TestTeamIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*TeamID{&subId1}, + input: []*TeamID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*TeamID{&subId1, &subId2, &subId3}, + input: []*TeamID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := TeamIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewTeamIDSet(t *testing.T) { TeamIdSet := NewTeamIDSet() - assert.NotNil(t, TeamIdSet) assert.Empty(t, TeamIdSet.m) assert.Empty(t, TeamIdSet.s) } func TestTeamIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []TeamID expected *TeamIDSet @@ -663,24 +632,19 @@ func TestTeamIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewTeamIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestTeamIDSet_AddRef(t *testing.T) { - t.Parallel() - - TeamId := MustTeamID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *TeamID expected *TeamIDSet @@ -695,7 +659,7 @@ func TestTeamIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &TeamId, + input: MustTeamID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &TeamIDSet{ m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestTeamIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewTeamIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestTeamIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - TeamIDSet - TeamID - } + tests := []struct { + name string + target *TeamIDSet + input TeamID expected bool }{ { - name: "Empty Set", - input: struct { - TeamIDSet - TeamID - }{TeamIDSet: TeamIDSet{}, TeamID: MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &TeamIDSet{}, + input: MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - TeamIDSet - TeamID - }{TeamIDSet: TeamIDSet{ + target: &TeamIDSet{ m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, TeamID: MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - TeamIDSet - TeamID - }{TeamIDSet: TeamIDSet{ + target: &TeamIDSet{ m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, TeamID: MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.TeamIDSet.Has(tc.input.TeamID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestTeamIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input TeamIDSet - expected TeamIDSet + input *TeamIDSet + expected *TeamIDSet }{ { - name: "Empty Set", - input: TeamIDSet{}, - expected: TeamIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &TeamIDSet{}, + expected: &TeamIDSet{}, }, { - name: "Set Contains the element", - input: TeamIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &TeamIDSet{ m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: TeamIDSet{ + expected: &TeamIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestTeamIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *TeamIDSet expected []TeamID }{ { - name: "Empty slice", + name: "Empty", input: &TeamIDSet{ m: map[TeamID]struct{}{}, s: nil, }, expected: make([]TeamID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &TeamIDSet{ @@ -854,20 +808,17 @@ func TestTeamIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestTeamIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *TeamIDSet expected *TeamIDSet @@ -922,21 +873,19 @@ func TestTeamIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestTeamIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *TeamIDSet @@ -944,6 +893,23 @@ func TestTeamIDSet_Merge(t *testing.T) { } expected *TeamIDSet }{ + { + name: "Nil Set", + input: struct { + a *TeamIDSet + b *TeamIDSet + }{ + a: &TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &TeamIDSet{ + m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestTeamIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/user_gen.go b/pkg/id/user_gen.go index 830d5f530..bb2eb0b3d 100644 --- a/pkg/id/user_gen.go +++ b/pkg/id/user_gen.go @@ -44,7 +44,7 @@ func UserIDFromRef(i *string) *UserID { // UserIDFromRefID generates a new UserID from a ref of a generic ID. func UserIDFromRefID(i *ID) *UserID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := UserID(*i) @@ -58,28 +58,40 @@ func (d UserID) ID() ID { // String returns a string representation. func (d UserID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d UserID) GoString() string { - return "id.UserID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d UserID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d UserID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d UserID) GoString() string { + return "UserID(" + d.String() + ")" } // Ref returns a reference. func (d UserID) Ref() *UserID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d UserID) Contains(ids []UserID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d UserID) Contains(ids []UserID) bool { // CopyRef returns a copy of a reference. func (d *UserID) CopyRef() *UserID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *UserID) CopyRef() *UserID { // IDRef returns a reference of a domain id. func (d *UserID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *UserID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *UserID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *UserID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *UserID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *UserID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *UserID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *UserID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d UserID) IsNil() bool { return ID(d).IsNil() } -// UserIDToKeys converts IDs into a string slice. -func UserIDToKeys(ids []UserID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *UserID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// UserIDsToStrings converts IDs into a string slice. +func UserIDsToStrings(ids []UserID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // UserIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *UserIDSet) Clone() *UserIDSet { // Merge returns a merged set func (s *UserIDSet) Merge(s2 *UserIDSet) *UserIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/user_gen_test.go b/pkg/id/user_gen_test.go index 3d074a4d2..0dae4566d 100644 --- a/pkg/id/user_gen_test.go +++ b/pkg/id/user_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewUserID(t *testing.T) { id := NewUserID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestUserIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestUserIDFrom(t *testing.T) { result UserID err error }{ - UserID{}, - ErrInvalidID, + result: UserID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestUserIDFrom(t *testing.T) { result UserID err error }{ - UserID{}, - ErrInvalidID, + result: UserID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestUserIDFrom(t *testing.T) { result UserID err error }{ - UserID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: UserID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := UserIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := UserIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustUserID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustUserID(t *testing.T) { expected: UserID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustUserID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustUserID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestUserIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *UserID @@ -139,159 +135,149 @@ func TestUserIDFromRef(t *testing.T) { expected: &UserID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := UserIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := UserIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestUserIDFromRefID(t *testing.T) { id := New() - - subId := UserIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := UserIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, UserIDFromRefID(nil)) + assert.Nil(t, UserIDFromRefID(&ID{})) } func TestUserID_ID(t *testing.T) { id := New() - subId := UserIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := UserIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestUserID_String(t *testing.T) { id := New() - subId := UserIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := UserIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", UserID{}.String()) } -func TestUserID_GoString(t *testing.T) { - id := New() - subId := UserIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.UserID("+id.String()+")") +func TestUserID_RefString(t *testing.T) { + id := NewUserID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, UserID{}.RefString()) } -func TestUserID_RefString(t *testing.T) { +func TestUserID_GoString(t *testing.T) { id := New() - subId := UserIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := UserIDFromRefID(&id) + assert.Equal(t, "UserID("+id.String()+")", id2.GoString()) + assert.Equal(t, "UserID()", UserID{}.GoString()) } func TestUserID_Ref(t *testing.T) { - id := New() - subId := UserIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewUserID() + assert.Equal(t, UserID(id), *id.Ref()) + assert.Nil(t, (&UserID{}).Ref()) } func TestUserID_Contains(t *testing.T) { id := NewUserID() id2 := NewUserID() assert.True(t, id.Contains([]UserID{id, id2})) + assert.False(t, UserID{}.Contains([]UserID{id, id2, {}})) assert.False(t, id.Contains([]UserID{id2})) } func TestUserID_CopyRef(t *testing.T) { - id := New() - subId := UserIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewUserID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*UserID)(nil).CopyRef()) } func TestUserID_IDRef(t *testing.T) { id := New() - subId := UserIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := UserIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&UserID{}).IDRef()) + assert.Nil(t, (*UserID)(nil).IDRef()) } func TestUserID_StringRef(t *testing.T) { - id := New() - subId := UserIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewUserID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&UserID{}).StringRef()) + assert.Nil(t, (*UserID)(nil).StringRef()) } func TestUserID_MarhsalJSON(t *testing.T) { - id := New() - subId := UserIDFromRefID(&id) + id := NewUserID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&UserID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*UserID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestUserID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &UserID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustUserID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &UserID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestUserID_MarshalText(t *testing.T) { id := New() - subId := UserIDFromRefID(&id) + res, err := UserIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&UserID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*UserID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestUserID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &UserID{} - - err := subId.UnmarshalText(text) - + id2 := &UserID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestUserID_IsNil(t *testing.T) { - subId := UserID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *UserIDFromRefID(&id) + assert.True(t, UserID{}.IsNil()) + assert.False(t, NewUserID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestUserID_IsNilRef(t *testing.T) { + assert.True(t, UserID{}.Ref().IsNilRef()) + assert.True(t, (*UserID)(nil).IsNilRef()) + assert.False(t, NewUserID().Ref().IsNilRef()) } -func TestUserIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestUserIDsToStrings(t *testing.T) { + tests := []struct { name string input []UserID expected []string @@ -321,19 +307,17 @@ func TestUserIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, UserIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, UserIDsToStrings(tt.input)) }) } - } func TestUserIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestUserIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestUserIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := UserIDsFrom(tc.input) if tc.expected.err != nil { - _, err := UserIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := UserIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestUserIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []UserID @@ -449,25 +431,22 @@ func TestUserIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := UserIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestUserIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []UserID @@ -493,21 +472,18 @@ func TestUserIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := UserIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestUserIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []UserID expected []ID @@ -537,28 +513,25 @@ func TestUserIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := UserIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestUserIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustUserID(id1.String()) + id21 := MustUserID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustUserID(id2.String()) + id22 := MustUserID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustUserID(id3.String()) + id23 := MustUserID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*UserID expected []*ID @@ -570,39 +543,35 @@ func TestUserIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*UserID{&subId1}, + input: []*UserID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*UserID{&subId1, &subId2, &subId3}, + input: []*UserID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := UserIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewUserIDSet(t *testing.T) { UserIdSet := NewUserIDSet() - assert.NotNil(t, UserIdSet) assert.Empty(t, UserIdSet.m) assert.Empty(t, UserIdSet.s) } func TestUserIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []UserID expected *UserIDSet @@ -663,24 +632,19 @@ func TestUserIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewUserIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestUserIDSet_AddRef(t *testing.T) { - t.Parallel() - - UserId := MustUserID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *UserID expected *UserIDSet @@ -695,7 +659,7 @@ func TestUserIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &UserId, + input: MustUserID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &UserIDSet{ m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestUserIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewUserIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestUserIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - UserIDSet - UserID - } + tests := []struct { + name string + target *UserIDSet + input UserID expected bool }{ { - name: "Empty Set", - input: struct { - UserIDSet - UserID - }{UserIDSet: UserIDSet{}, UserID: MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &UserIDSet{}, + input: MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - UserIDSet - UserID - }{UserIDSet: UserIDSet{ + target: &UserIDSet{ m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, UserID: MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - UserIDSet - UserID - }{UserIDSet: UserIDSet{ + target: &UserIDSet{ m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, UserID: MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.UserIDSet.Has(tc.input.UserID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestUserIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input UserIDSet - expected UserIDSet + input *UserIDSet + expected *UserIDSet }{ { - name: "Empty Set", - input: UserIDSet{}, - expected: UserIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &UserIDSet{}, + expected: &UserIDSet{}, }, { - name: "Set Contains the element", - input: UserIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &UserIDSet{ m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: UserIDSet{ + expected: &UserIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestUserIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *UserIDSet expected []UserID }{ { - name: "Empty slice", + name: "Empty", input: &UserIDSet{ m: map[UserID]struct{}{}, s: nil, }, expected: make([]UserID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &UserIDSet{ @@ -854,20 +808,17 @@ func TestUserIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestUserIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *UserIDSet expected *UserIDSet @@ -922,21 +873,19 @@ func TestUserIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestUserIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *UserIDSet @@ -944,6 +893,23 @@ func TestUserIDSet_Merge(t *testing.T) { } expected *UserIDSet }{ + { + name: "Nil Set", + input: struct { + a *UserIDSet + b *UserIDSet + }{ + a: &UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &UserIDSet{ + m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestUserIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/id/widget_gen.go b/pkg/id/widget_gen.go index 52028e49c..01ed68756 100644 --- a/pkg/id/widget_gen.go +++ b/pkg/id/widget_gen.go @@ -44,7 +44,7 @@ func WidgetIDFromRef(i *string) *WidgetID { // WidgetIDFromRefID generates a new WidgetID from a ref of a generic ID. func WidgetIDFromRefID(i *ID) *WidgetID { - if i == nil { + if i == nil || i.IsNil() { return nil } nid := WidgetID(*i) @@ -58,28 +58,40 @@ func (d WidgetID) ID() ID { // String returns a string representation. func (d WidgetID) String() string { + if d.IsNil() { + return "" + } return ID(d).String() } -// GoString implements fmt.GoStringer interface. -func (d WidgetID) GoString() string { - return "id.WidgetID(" + d.String() + ")" +// StringRef returns a reference of the string representation. +func (d WidgetID) RefString() *string { + if d.IsNil() { + return nil + } + str := d.String() + return &str } -// RefString returns a reference of string representation. -func (d WidgetID) RefString() *string { - id := ID(d).String() - return &id +// GoString implements fmt.GoStringer interface. +func (d WidgetID) GoString() string { + return "WidgetID(" + d.String() + ")" } // Ref returns a reference. func (d WidgetID) Ref() *WidgetID { + if d.IsNil() { + return nil + } d2 := d return &d2 } // Contains returns whether the id is contained in the slice. func (d WidgetID) Contains(ids []WidgetID) bool { + if d.IsNil() { + return false + } for _, i := range ids { if d.ID().Equal(i.ID()) { return true @@ -90,7 +102,7 @@ func (d WidgetID) Contains(ids []WidgetID) bool { // CopyRef returns a copy of a reference. func (d *WidgetID) CopyRef() *WidgetID { - if d == nil { + if d.IsNilRef() { return nil } d2 := *d @@ -99,7 +111,7 @@ func (d *WidgetID) CopyRef() *WidgetID { // IDRef returns a reference of a domain id. func (d *WidgetID) IDRef() *ID { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d) @@ -108,7 +120,7 @@ func (d *WidgetID) IDRef() *ID { // StringRef returns a reference of a string representation. func (d *WidgetID) StringRef() *string { - if d == nil { + if d.IsNilRef() { return nil } id := ID(*d).String() @@ -117,6 +129,9 @@ func (d *WidgetID) StringRef() *string { // MarhsalJSON implements json.Marhsaler interface func (d *WidgetID) MarhsalJSON() ([]byte, error) { + if d.IsNilRef() { + return nil, nil + } return json.Marshal(d.String()) } @@ -132,7 +147,7 @@ func (d *WidgetID) UnmarhsalJSON(bs []byte) (err error) { // MarshalText implements encoding.TextMarshaler interface func (d *WidgetID) MarshalText() ([]byte, error) { - if d == nil { + if d.IsNilRef() { return nil, nil } return []byte(d.String()), nil @@ -144,18 +159,23 @@ func (d *WidgetID) UnmarshalText(text []byte) (err error) { return } -// Ref returns true if a ID is nil or zero-value +// IsNil returns true if a ID is zero-value func (d WidgetID) IsNil() bool { return ID(d).IsNil() } -// WidgetIDToKeys converts IDs into a string slice. -func WidgetIDToKeys(ids []WidgetID) []string { - keys := make([]string, 0, len(ids)) +// IsNilRef returns true if a ID is nil or zero-value +func (d *WidgetID) IsNilRef() bool { + return d == nil || ID(*d).IsNil() +} + +// WidgetIDsToStrings converts IDs into a string slice. +func WidgetIDsToStrings(ids []WidgetID) []string { + strs := make([]string, 0, len(ids)) for _, i := range ids { - keys = append(keys, i.String()) + strs = append(strs, i.String()) } - return keys + return strs } // WidgetIDsFrom converts a string slice into a ID slice. @@ -285,9 +305,6 @@ func (s *WidgetIDSet) Clone() *WidgetIDSet { // Merge returns a merged set func (s *WidgetIDSet) Merge(s2 *WidgetIDSet) *WidgetIDSet { - if s == nil { - return nil - } s3 := s.Clone() if s2 == nil { return s3 diff --git a/pkg/id/widget_gen_test.go b/pkg/id/widget_gen_test.go index 32215f0d5..9dfa8d42a 100644 --- a/pkg/id/widget_gen_test.go +++ b/pkg/id/widget_gen_test.go @@ -4,7 +4,6 @@ package id import ( "encoding/json" - "errors" "testing" "github.com/oklog/ulid" @@ -14,15 +13,13 @@ import ( func TestNewWidgetID(t *testing.T) { id := NewWidgetID() assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) + u, err := ulid.Parse(id.String()) + assert.NotNil(t, u) assert.Nil(t, err) } func TestWidgetIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -37,8 +34,8 @@ func TestWidgetIDFrom(t *testing.T) { result WidgetID err error }{ - WidgetID{}, - ErrInvalidID, + result: WidgetID{}, + err: ErrInvalidID, }, }, { @@ -48,8 +45,8 @@ func TestWidgetIDFrom(t *testing.T) { result WidgetID err error }{ - WidgetID{}, - ErrInvalidID, + result: WidgetID{}, + err: ErrInvalidID, }, }, { @@ -59,27 +56,26 @@ func TestWidgetIDFrom(t *testing.T) { result WidgetID err error }{ - WidgetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, + result: WidgetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + err: nil, }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := WidgetIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := WidgetIDFrom(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestMustWidgetID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -102,23 +98,23 @@ func TestMustWidgetID(t *testing.T) { expected: WidgetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustWidgetID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustWidgetID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestWidgetIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *WidgetID @@ -139,159 +135,149 @@ func TestWidgetIDFromRef(t *testing.T) { expected: &WidgetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := WidgetIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := WidgetIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestWidgetIDFromRefID(t *testing.T) { id := New() - - subId := WidgetIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) + id2 := WidgetIDFromRefID(&id) + assert.Equal(t, id.id, id2.id) + assert.Nil(t, WidgetIDFromRefID(nil)) + assert.Nil(t, WidgetIDFromRefID(&ID{})) } func TestWidgetID_ID(t *testing.T) { id := New() - subId := WidgetIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) + id2 := WidgetIDFromRefID(&id) + assert.Equal(t, id, id2.ID()) } func TestWidgetID_String(t *testing.T) { id := New() - subId := WidgetIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) + id2 := WidgetIDFromRefID(&id) + assert.Equal(t, id.String(), id2.String()) + assert.Equal(t, "", WidgetID{}.String()) } -func TestWidgetID_GoString(t *testing.T) { - id := New() - subId := WidgetIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.WidgetID("+id.String()+")") +func TestWidgetID_RefString(t *testing.T) { + id := NewWidgetID() + assert.Equal(t, id.String(), *id.RefString()) + assert.Nil(t, WidgetID{}.RefString()) } -func TestWidgetID_RefString(t *testing.T) { +func TestWidgetID_GoString(t *testing.T) { id := New() - subId := WidgetIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) + id2 := WidgetIDFromRefID(&id) + assert.Equal(t, "WidgetID("+id.String()+")", id2.GoString()) + assert.Equal(t, "WidgetID()", WidgetID{}.GoString()) } func TestWidgetID_Ref(t *testing.T) { - id := New() - subId := WidgetIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) + id := NewWidgetID() + assert.Equal(t, WidgetID(id), *id.Ref()) + assert.Nil(t, (&WidgetID{}).Ref()) } func TestWidgetID_Contains(t *testing.T) { id := NewWidgetID() id2 := NewWidgetID() assert.True(t, id.Contains([]WidgetID{id, id2})) + assert.False(t, WidgetID{}.Contains([]WidgetID{id, id2, {}})) assert.False(t, id.Contains([]WidgetID{id2})) } func TestWidgetID_CopyRef(t *testing.T) { - id := New() - subId := WidgetIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) + id := NewWidgetID().Ref() + id2 := id.CopyRef() + assert.Equal(t, id, id2) + assert.NotSame(t, id, id2) + assert.Nil(t, (*WidgetID)(nil).CopyRef()) } func TestWidgetID_IDRef(t *testing.T) { id := New() - subId := WidgetIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) + id2 := WidgetIDFromRefID(&id) + assert.Equal(t, &id, id2.IDRef()) + assert.Nil(t, (&WidgetID{}).IDRef()) + assert.Nil(t, (*WidgetID)(nil).IDRef()) } func TestWidgetID_StringRef(t *testing.T) { - id := New() - subId := WidgetIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) + id := NewWidgetID() + assert.Equal(t, id.String(), *id.StringRef()) + assert.Nil(t, (&WidgetID{}).StringRef()) + assert.Nil(t, (*WidgetID)(nil).StringRef()) } func TestWidgetID_MarhsalJSON(t *testing.T) { - id := New() - subId := WidgetIDFromRefID(&id) + id := NewWidgetID() + res, err := id.MarhsalJSON() + assert.Nil(t, err) + exp, _ := json.Marshal(id.String()) + assert.Equal(t, exp, res) - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) + res, err = (&WidgetID{}).MarhsalJSON() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*WidgetID)(nil).MarhsalJSON() assert.Nil(t, err) - assert.Equal(t, exp, res) + assert.Nil(t, res) } func TestWidgetID_UnmarhsalJSON(t *testing.T) { jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &WidgetID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - + id := MustWidgetID("01f3zhkysvcxsnzepyyqtq21fb") + id2 := &WidgetID{} + err := id2.UnmarhsalJSON([]byte(jsonString)) assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) + assert.Equal(t, id, *id2) } func TestWidgetID_MarshalText(t *testing.T) { id := New() - subId := WidgetIDFromRefID(&id) + res, err := WidgetIDFromRefID(&id).MarshalText() + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) - res, err := subId.MarshalText() + res, err = (&WidgetID{}).MarshalText() + assert.Nil(t, err) + assert.Nil(t, res) + res, err = (*WidgetID)(nil).MarshalText() assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) + assert.Nil(t, res) } func TestWidgetID_UnmarshalText(t *testing.T) { text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &WidgetID{} - - err := subId.UnmarshalText(text) - + id2 := &WidgetID{} + err := id2.UnmarshalText(text) assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) } func TestWidgetID_IsNil(t *testing.T) { - subId := WidgetID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *WidgetIDFromRefID(&id) + assert.True(t, WidgetID{}.IsNil()) + assert.False(t, NewWidgetID().IsNil()) +} - assert.False(t, subId.IsNil()) +func TestWidgetID_IsNilRef(t *testing.T) { + assert.True(t, WidgetID{}.Ref().IsNilRef()) + assert.True(t, (*WidgetID)(nil).IsNilRef()) + assert.False(t, NewWidgetID().Ref().IsNilRef()) } -func TestWidgetIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { +func TestWidgetIDsToStrings(t *testing.T) { + tests := []struct { name string input []WidgetID expected []string @@ -321,19 +307,17 @@ func TestWidgetIDToKeys(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, WidgetIDToKeys(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, WidgetIDsToStrings(tt.input)) }) } - } func TestWidgetIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string expected struct { @@ -383,10 +367,10 @@ func TestWidgetIDsFrom(t *testing.T) { }, }, { - name: "multiple elements", + name: "error", input: []string{ "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", + "x", "01f3zhcaq35403zdjnd6dcm0t3", }, expected: struct { @@ -399,27 +383,25 @@ func TestWidgetIDsFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + res, err := WidgetIDsFrom(tc.input) if tc.expected.err != nil { - _, err := WidgetIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + assert.Equal(t, tc.expected.err, err) + assert.Nil(t, res) } else { - res, err := WidgetIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Nil(t, err) + assert.Equal(t, tc.expected.res, res) } - }) } } func TestWidgetIDsFromID(t *testing.T) { t.Parallel() - testCases := []struct { + tests := []struct { name string input []ID expected []WidgetID @@ -449,25 +431,22 @@ func TestWidgetIDsFromID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := WidgetIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestWidgetIDsFromIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - testCases := []struct { + tests := []struct { name string input []*ID expected []WidgetID @@ -493,21 +472,18 @@ func TestWidgetIDsFromIDRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := WidgetIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestWidgetIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []WidgetID expected []ID @@ -537,28 +513,25 @@ func TestWidgetIDsToID(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := WidgetIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestWidgetIDsToIDRef(t *testing.T) { - t.Parallel() - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustWidgetID(id1.String()) + id21 := MustWidgetID(id1.String()) id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustWidgetID(id2.String()) + id22 := MustWidgetID(id2.String()) id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustWidgetID(id3.String()) + id23 := MustWidgetID(id3.String()) - testCases := []struct { + tests := []struct { name string input []*WidgetID expected []*ID @@ -570,39 +543,35 @@ func TestWidgetIDsToIDRef(t *testing.T) { }, { name: "1 element", - input: []*WidgetID{&subId1}, + input: []*WidgetID{&id21}, expected: []*ID{&id1}, }, { name: "multiple elements", - input: []*WidgetID{&subId1, &subId2, &subId3}, + input: []*WidgetID{&id21, &id22, &id23}, expected: []*ID{&id1, &id2, &id3}, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := WidgetIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestNewWidgetIDSet(t *testing.T) { WidgetIdSet := NewWidgetIDSet() - assert.NotNil(t, WidgetIdSet) assert.Empty(t, WidgetIdSet.m) assert.Empty(t, WidgetIdSet.s) } func TestWidgetIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input []WidgetID expected *WidgetIDSet @@ -663,24 +632,19 @@ func TestWidgetIDSet_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewWidgetIDSet() set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestWidgetIDSet_AddRef(t *testing.T) { - t.Parallel() - - WidgetId := MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { + tests := []struct { name string input *WidgetID expected *WidgetIDSet @@ -695,7 +659,7 @@ func TestWidgetIDSet_AddRef(t *testing.T) { }, { name: "1 element", - input: &WidgetId, + input: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), expected: &WidgetIDSet{ m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, @@ -703,127 +667,117 @@ func TestWidgetIDSet_AddRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - + t.Run(tc.name, func(t *testing.T) { + t.Parallel() set := NewWidgetIDSet() set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) + assert.Equal(t, tc.expected, set) }) } } func TestWidgetIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - WidgetIDSet - WidgetID - } + tests := []struct { + name string + target *WidgetIDSet + input WidgetID expected bool }{ { - name: "Empty Set", - input: struct { - WidgetIDSet - WidgetID - }{WidgetIDSet: WidgetIDSet{}, WidgetID: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + name: "Empty Set", + target: &WidgetIDSet{}, + input: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), expected: false, }, { name: "Set Contains the element", - input: struct { - WidgetIDSet - WidgetID - }{WidgetIDSet: WidgetIDSet{ + target: &WidgetIDSet{ m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, WidgetID: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + input: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), expected: true, }, { name: "Set does not Contains the element", - input: struct { - WidgetIDSet - WidgetID - }{WidgetIDSet: WidgetIDSet{ + target: &WidgetIDSet{ m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, WidgetID: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + input: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), expected: false, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.WidgetIDSet.Has(tc.input.WidgetID)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.target.Has(tc.input)) }) } } func TestWidgetIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string - input WidgetIDSet - expected WidgetIDSet + input *WidgetIDSet + expected *WidgetIDSet }{ { - name: "Empty Set", - input: WidgetIDSet{}, - expected: WidgetIDSet{ - m: nil, - s: nil, - }, + name: "Empty set", + input: &WidgetIDSet{}, + expected: &WidgetIDSet{}, }, { - name: "Set Contains the element", - input: WidgetIDSet{ + name: "Nil set", + input: nil, + expected: nil, + }, + { + name: "Contains the element", + input: &WidgetIDSet{ m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, }, - expected: WidgetIDSet{ + expected: &WidgetIDSet{ m: nil, s: nil, }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + tc.input.Clear() + assert.Equal(t, tc.expected, tc.input) }) } } func TestWidgetIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *WidgetIDSet expected []WidgetID }{ { - name: "Empty slice", + name: "Empty", input: &WidgetIDSet{ m: map[WidgetID]struct{}{}, s: nil, }, expected: make([]WidgetID, 0), }, + { + name: "Nil", + input: nil, + expected: nil, + }, { name: "1 element", input: &WidgetIDSet{ @@ -854,20 +808,17 @@ func TestWidgetIDSet_All(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.All()) }) } } func TestWidgetIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input *WidgetIDSet expected *WidgetIDSet @@ -922,21 +873,19 @@ func TestWidgetIDSet_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) + assert.Equal(t, tc.expected, clone) + assert.NotSame(t, tc.input, clone) }) } } func TestWidgetIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { + tests := []struct { name string input struct { a *WidgetIDSet @@ -944,6 +893,23 @@ func TestWidgetIDSet_Merge(t *testing.T) { } expected *WidgetIDSet }{ + { + name: "Nil Set", + input: struct { + a *WidgetIDSet + b *WidgetIDSet + }{ + a: &WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: nil, + }, + expected: &WidgetIDSet{ + m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, { name: "Empty Set", input: struct { @@ -1000,12 +966,11 @@ func TestWidgetIDSet_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) }) } } diff --git a/pkg/layer/builder.go b/pkg/layer/builder.go index 0eadb525f..651d378bf 100644 --- a/pkg/layer/builder.go +++ b/pkg/layer/builder.go @@ -1,9 +1,5 @@ package layer -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - type Builder struct { base layerBase } @@ -20,17 +16,17 @@ func (b *Builder) Item() *ItemBuilder { return NewItem().base(b.base) } -func (b *Builder) ID(id id.LayerID) *Builder { +func (b *Builder) ID(id ID) *Builder { b.base.id = id return b } func (b *Builder) NewID() *Builder { - b.base.id = id.NewLayerID() + b.base.id = NewID() return b } -func (b *Builder) Scene(s id.SceneID) *Builder { +func (b *Builder) Scene(s SceneID) *Builder { b.base.scene = s return b } @@ -52,17 +48,17 @@ func (b *Builder) IsVisibleRef(visible *bool) *Builder { return b } -func (b *Builder) Plugin(plugin *id.PluginID) *Builder { +func (b *Builder) Plugin(plugin *PluginID) *Builder { b.base.plugin = plugin.CopyRef() return b } -func (b *Builder) Extension(extension *id.PluginExtensionID) *Builder { +func (b *Builder) Extension(extension *PluginExtensionID) *Builder { b.base.extension = extension.CopyRef() return b } -func (b *Builder) Property(p *id.PropertyID) *Builder { +func (b *Builder) Property(p *PropertyID) *Builder { b.base.property = p.CopyRef() return b } diff --git a/pkg/layer/decoding/common.go b/pkg/layer/decoding/common.go index acae2d58f..10d275790 100644 --- a/pkg/layer/decoding/common.go +++ b/pkg/layer/decoding/common.go @@ -7,8 +7,8 @@ import ( "github.com/reearth/reearth-backend/pkg/builtin" "github.com/reearth/reearth-backend/pkg/czml" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/kml" + "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" ) @@ -18,21 +18,21 @@ var ( ) var ( - extensions = map[string]id.PluginExtensionID{ - "Point": id.PluginExtensionID("marker"), - "Polygon": id.PluginExtensionID("polygon"), - "Polyline": id.PluginExtensionID("polyline"), + extensions = map[string]layer.PluginExtensionID{ + "Point": layer.PluginExtensionID("marker"), + "Polygon": layer.PluginExtensionID("polygon"), + "Polyline": layer.PluginExtensionID("polyline"), } - propertySchemas = map[string]id.PropertySchemaID{ - "Point": id.MustPropertySchemaID("reearth/marker"), - "Polygon": id.MustPropertySchemaID("reearth/polygon"), - "Polyline": id.MustPropertySchemaID("reearth/polyline"), + propertySchemas = map[string]property.SchemaID{ + "Point": property.MustSchemaID("reearth/marker"), + "Polygon": property.MustSchemaID("reearth/polygon"), + "Polyline": property.MustSchemaID("reearth/polyline"), } - propertyItems = id.PropertySchemaGroupID("default") - propertyFields = map[string]id.PropertySchemaFieldID{ - "Point": id.PropertySchemaFieldID("location"), - "Polygon": id.PropertySchemaFieldID("polygon"), - "Polyline": id.PropertySchemaFieldID("coordinates"), + propertyItems = property.SchemaGroupID("default") + propertyFields = map[string]property.FieldID{ + "Point": property.FieldID("location"), + "Polygon": property.FieldID("polygon"), + "Polyline": property.FieldID("coordinates"), } ) @@ -74,7 +74,7 @@ func rgbafToHex(rgbaf []float64) (string, error) { return rgbaToHex(rgba) } -func MustCreateProperty(t string, v interface{}, sceneID id.SceneID, styleItem interface{}, extension string) *property.Property { +func MustCreateProperty(t string, v interface{}, sceneID layer.SceneID, styleItem interface{}, extension string) *property.Property { p, err := createProperty(t, v, sceneID, styleItem, extension) if err != nil { panic(err) @@ -82,7 +82,7 @@ func MustCreateProperty(t string, v interface{}, sceneID id.SceneID, styleItem i return p } -func createProperty(t string, v interface{}, sceneID id.SceneID, styleItem interface{}, extension string) (*property.Property, error) { +func createProperty(t string, v interface{}, sceneID layer.SceneID, styleItem interface{}, extension string) (*property.Property, error) { propertySchema := propertySchemas[t] item := propertyItems field := propertyFields[t] diff --git a/pkg/layer/decoding/czml.go b/pkg/layer/decoding/czml.go index b17f266b1..ed6a43c32 100644 --- a/pkg/layer/decoding/czml.go +++ b/pkg/layer/decoding/czml.go @@ -5,18 +5,17 @@ import ( "errors" "github.com/reearth/reearth-backend/pkg/czml" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" ) type CZMLDecoder struct { decoder *json.Decoder - sceneId id.SceneID + sceneId layer.SceneID groupName string } -func NewCZMLDecoder(d *json.Decoder, s id.SceneID) *CZMLDecoder { +func NewCZMLDecoder(d *json.Decoder, s layer.SceneID) *CZMLDecoder { return &CZMLDecoder{ decoder: d, sceneId: s, @@ -76,7 +75,7 @@ func (d *CZMLDecoder) Decode() (Result, error) { func (d *CZMLDecoder) decodeLayer(t string, coords []float64, style interface{}, layerName string) (*layer.Item, *property.Property, error) { var p *property.Property var l *layer.Item - var ex id.PluginExtensionID + var ex layer.PluginExtensionID var err error switch t { case "Point": @@ -164,7 +163,7 @@ func (d *CZMLDecoder) decodeLayer(t string, coords []float64, style interface{}, Scene(d.sceneId). Property(p.IDRef()). Extension(&ex). - Plugin(&id.OfficialPluginID). + Plugin(&layer.OfficialPluginID). Build() if err != nil { return nil, nil, err diff --git a/pkg/layer/decoding/czml_test.go b/pkg/layer/decoding/czml_test.go index ed4a49723..e23b8665d 100644 --- a/pkg/layer/decoding/czml_test.go +++ b/pkg/layer/decoding/czml_test.go @@ -5,7 +5,7 @@ import ( "strings" "testing" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" "github.com/stretchr/testify/assert" ) @@ -86,7 +86,7 @@ const ( func TestCZMLDecoder_Decode(t *testing.T) { r := strings.NewReader(czmlmock) d := json.NewDecoder(r) - s := id.NewSceneID() + s := layer.NewSceneID() p := NewCZMLDecoder(d, s) result, err := p.Decode() assert.NoError(t, err) diff --git a/pkg/layer/decoding/decoder.go b/pkg/layer/decoding/decoder.go index 641e3baa4..3467be356 100644 --- a/pkg/layer/decoding/decoder.go +++ b/pkg/layer/decoding/decoder.go @@ -3,7 +3,6 @@ package decoding import ( "fmt" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" ) @@ -51,7 +50,7 @@ func (r Result) Validate() error { func resultFrom(lg *layer.Group, layers layer.Map, properties property.Map) (r Result, err error) { r = Result{ - Root: layer.NewIDList([]id.LayerID{lg.ID()}), + Root: layer.NewIDList([]layer.ID{lg.ID()}), Layers: layers.Add(lg.LayerRef()), Properties: properties, } diff --git a/pkg/layer/decoding/geojson.go b/pkg/layer/decoding/geojson.go index b21ef8bad..ab4516b7e 100644 --- a/pkg/layer/decoding/geojson.go +++ b/pkg/layer/decoding/geojson.go @@ -6,7 +6,6 @@ import ( "io" geojson "github.com/paulmach/go.geojson" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" ) @@ -19,11 +18,11 @@ type GeoStyle struct { type GeoJSONDecoder struct { reader io.Reader features []*geojson.Feature - sceneId id.SceneID + sceneId layer.SceneID groupName string } -func NewGeoJSONDecoder(r io.Reader, s id.SceneID) *GeoJSONDecoder { +func NewGeoJSONDecoder(r io.Reader, s layer.SceneID) *GeoJSONDecoder { return &GeoJSONDecoder{ reader: r, sceneId: s, @@ -109,7 +108,7 @@ func (d *GeoJSONDecoder) decodeLayer() (*layer.Item, *property.Property, error) var feat *geojson.Feature var p *property.Property var l *layer.Item - var ex id.PluginExtensionID + var ex layer.PluginExtensionID var err error var stroke, fillColor string var strokeWidth float64 @@ -242,7 +241,7 @@ func (d *GeoJSONDecoder) decodeLayer() (*layer.Item, *property.Property, error) Scene(d.sceneId). Property(p.IDRef()). Extension(&ex). - Plugin(&id.OfficialPluginID). + Plugin(&layer.OfficialPluginID). Build() if err != nil { return nil, nil, err diff --git a/pkg/layer/decoding/geojson_test.go b/pkg/layer/decoding/geojson_test.go index d1fd13c69..4f63e6de1 100644 --- a/pkg/layer/decoding/geojson_test.go +++ b/pkg/layer/decoding/geojson_test.go @@ -4,7 +4,7 @@ import ( "strings" "testing" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" "github.com/stretchr/testify/assert" ) @@ -159,7 +159,7 @@ const geojsonmock = `{ func TestGeoJSONDecoder_Decode(t *testing.T) { r := strings.NewReader(geojsonmock) - s := id.NewSceneID() + s := layer.NewSceneID() p := NewGeoJSONDecoder(r, s) result, err := p.Decode() assert.NoError(t, err) diff --git a/pkg/layer/decoding/kml.go b/pkg/layer/decoding/kml.go index 07f701d81..b09a19604 100644 --- a/pkg/layer/decoding/kml.go +++ b/pkg/layer/decoding/kml.go @@ -8,7 +8,6 @@ import ( "strconv" "strings" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/kml" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" @@ -16,11 +15,11 @@ import ( type KMLDecoder struct { decoder *xml.Decoder - sceneId id.SceneID + sceneId layer.SceneID styles map[string]kml.Style } -func NewKMLDecoder(d *xml.Decoder, s id.SceneID) *KMLDecoder { +func NewKMLDecoder(d *xml.Decoder, s layer.SceneID) *KMLDecoder { return &KMLDecoder{ decoder: d, sceneId: s, @@ -177,7 +176,7 @@ func (d *KMLDecoder) decodeCollection(c kml.Collection, depth int) (*layer.Group func (d *KMLDecoder) decodePlacemark(p kml.Placemark) (*layer.Item, *property.Property, error) { var layerItem *layer.Item var prop *property.Property - var ex id.PluginExtensionID + var ex layer.PluginExtensionID var styleId string var layerName string @@ -243,7 +242,7 @@ func (d *KMLDecoder) decodePlacemark(p kml.Placemark) (*layer.Item, *property.Pr Scene(d.sceneId). Property(prop.IDRef()). Extension(&ex). - Plugin(&id.OfficialPluginID). + Plugin(&layer.OfficialPluginID). Build() if err != nil { return nil, nil, err diff --git a/pkg/layer/decoding/kml_test.go b/pkg/layer/decoding/kml_test.go index 62be12afa..781dd0d49 100644 --- a/pkg/layer/decoding/kml_test.go +++ b/pkg/layer/decoding/kml_test.go @@ -11,8 +11,6 @@ import ( "github.com/reearth/reearth-backend/pkg/builtin" "github.com/reearth/reearth-backend/pkg/kml" "github.com/reearth/reearth-backend/pkg/layer" - - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" "github.com/stretchr/testify/assert" ) @@ -83,14 +81,14 @@ const kmlmock = ` ` func TestNewKMLDecoder(t *testing.T) { - d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(``)), id.NewSceneID()) + d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(``)), layer.NewSceneID()) assert.NotNil(t, d) } func TestKMLDecoder_Decode(t *testing.T) { r := strings.NewReader(kmlmock) d := xml.NewDecoder(r) - s := id.NewSceneID() + s := layer.NewSceneID() k := NewKMLDecoder(d, s) result, err := k.Decode() @@ -120,7 +118,7 @@ func TestKMLDecoder_Decode(t *testing.T) { fImage, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, "image")) actUrl, _ := url.Parse("http://maps.google.com/mapfiles/kml/pal3/icon19.png") assert.Equal(t, actUrl, fImage.Value().Value()) - fh, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, id.PropertySchemaFieldID("height"))) + fh, _, _ := prop.Field(property.PointFieldBySchemaGroup(propertyItems, property.FieldID("height"))) assert.Equal(t, 43.0, fh.Value().Value()) // Polygon test @@ -397,7 +395,7 @@ func TestKMLDecoder_Decode(t *testing.T) { //} // func TestKMLparseKML(t *testing.T) { - s := id.NewSceneID() + s := layer.NewSceneID() testCases := []struct { name, KMLstr string @@ -489,7 +487,7 @@ func TestKMLparseKML(t *testing.T) { } } func TestKMLdecodePlacemark(t *testing.T) { - s := id.NewSceneID() + s := layer.NewSceneID() point := MustCreateProperty("Point", property.LatLngHeight{ Lat: 23, Lng: 40, @@ -554,7 +552,7 @@ func TestKMLdecodePlacemark(t *testing.T) { Scene(s). Property(point.IDRef()). Extension(&pointExt). - Plugin(&id.OfficialPluginID). + Plugin(&layer.OfficialPluginID). MustBuild(), expectedProperty: point, err: nil, @@ -578,7 +576,7 @@ func TestKMLdecodePlacemark(t *testing.T) { Scene(s). Property(polyline.IDRef()). Extension(&polylineExt). - Plugin(&id.OfficialPluginID). + Plugin(&layer.OfficialPluginID). MustBuild(), expectedProperty: polyline, err: nil, @@ -609,7 +607,7 @@ func TestKMLdecodePlacemark(t *testing.T) { Scene(s). Property(polygon.IDRef()). Extension(&polygonExt). - Plugin(&id.OfficialPluginID). + Plugin(&layer.OfficialPluginID). MustBuild(), expectedProperty: polygon, err: nil, @@ -630,7 +628,7 @@ func TestKMLdecodePlacemark(t *testing.T) { Name("Point"). Scene(s). Extension(&pointExt). - Plugin(&id.OfficialPluginID). + Plugin(&layer.OfficialPluginID). MustBuild(), expectedProperty: nil, err: nil, @@ -660,7 +658,7 @@ func TestKMLdecodePlacemark(t *testing.T) { // @todo not finished yet //func TestKMLdecodeCollection(t *testing.T) { // // @todo err and style cases -// s := id.NewSceneID() +// s := layer.NewSceneID() // pointExt := extensions["Point"] // point := MustCreateProperty("Point", property.LatLngHeight{ // Lat: 39, @@ -674,7 +672,7 @@ func TestKMLdecodePlacemark(t *testing.T) { // Scene(s). // Property(point.IDRef()). // Extension(&pointExt). -// Plugin(&id.OfficialPluginID). +// Plugin(&layer.OfficialPluginID). // MustBuild() // var ll layer.Layer = li // testCases := []struct { diff --git a/pkg/layer/decoding/reearth.go b/pkg/layer/decoding/reearth.go index 361b0bcd5..a365b4c2a 100644 --- a/pkg/layer/decoding/reearth.go +++ b/pkg/layer/decoding/reearth.go @@ -5,17 +5,16 @@ import ( "errors" "github.com/reearth/reearth-backend/pkg/builtin" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" ) type ReearthDecoder struct { d *json.Decoder - scene id.SceneID + scene layer.SceneID } -func NewReearthDecoder(d *json.Decoder, scene id.SceneID) *ReearthDecoder { +func NewReearthDecoder(d *json.Decoder, scene layer.SceneID) *ReearthDecoder { return &ReearthDecoder{d: d, scene: scene} } @@ -43,7 +42,7 @@ type ReearthRoot struct { Layers []*ReearthLayer `json:"layers"` } -func (r *ReearthRoot) Result(scene id.SceneID) (result Result, err error) { +func (r *ReearthRoot) Result(scene layer.SceneID) (result Result, err error) { if r == nil { return } @@ -66,15 +65,15 @@ func (r *ReearthRoot) Result(scene id.SceneID) (result Result, err error) { } type ReearthLayer struct { - Plugin *id.PluginID `json:"plugin"` - Extension *id.PluginExtensionID `json:"extension"` - Name string `json:"name"` - Infobox *ReearthInfobox `json:"infobox"` - Property *ReearthProperty `json:"property"` - Layers []ReearthLayer `json:"layers"` - IsVisible *bool `json:"isVisible"` - LinkedDatasetSchema *id.DatasetSchemaID `json:"linkedDatasetSchema"` - LinkedDataset *id.DatasetID `json:"linkedDataset"` + Plugin *layer.PluginID `json:"plugin"` + Extension *layer.PluginExtensionID `json:"extension"` + Name string `json:"name"` + Infobox *ReearthInfobox `json:"infobox"` + Property *ReearthProperty `json:"property"` + Layers []ReearthLayer `json:"layers"` + IsVisible *bool `json:"isVisible"` + LinkedDatasetSchema *layer.DatasetSchemaID `json:"linkedDatasetSchema"` + LinkedDataset *layer.DatasetID `json:"linkedDataset"` } func (l *ReearthLayer) layer() *layer.Initializer { @@ -92,9 +91,9 @@ func (l *ReearthLayer) layer() *layer.Initializer { } } - var psid *id.PropertySchemaID + var psid *property.SchemaID if l.Plugin != nil || l.Extension != nil { - psid2, err := id.PropertySchemaIDFromExtension(*l.Plugin, *l.Extension) + psid2, err := layer.PropertySchemaIDFromExtension(*l.Plugin, *l.Extension) if err == nil { // if there is an error, property schema id will be nil. psid = psid2.Ref() @@ -151,9 +150,9 @@ func (i *ReearthInfobox) infobox() *layer.InitializerInfobox { } type ReearthInfoboxField struct { - Plugin id.PluginID `json:"plugin"` - Extension id.PluginExtensionID `json:"extension"` - Property *ReearthProperty `json:"property"` + Plugin layer.PluginID `json:"plugin"` + Extension layer.PluginExtensionID `json:"extension"` + Property *ReearthProperty `json:"property"` } func (f *ReearthInfoboxField) infoboxField() *layer.InitializerInfoboxField { @@ -161,9 +160,9 @@ func (f *ReearthInfoboxField) infoboxField() *layer.InitializerInfoboxField { return nil } - var psid *id.PropertySchemaID + var psid *property.SchemaID { - psid2, err := id.PropertySchemaIDFromExtension(f.Plugin, f.Extension) + psid2, err := layer.PropertySchemaIDFromExtension(f.Plugin, f.Extension) if err == nil { // if there is an error, property schema id will be nil. psid = psid2.Ref() @@ -182,9 +181,9 @@ func (f *ReearthInfoboxField) infoboxField() *layer.InitializerInfoboxField { } } -type ReearthProperty map[id.PropertySchemaGroupID]ReearthPropertyItem +type ReearthProperty map[property.SchemaGroupID]ReearthPropertyItem -func (p ReearthProperty) property(schema *id.PropertySchemaID) *property.Initializer { +func (p ReearthProperty) property(schema *property.SchemaID) *property.Initializer { if schema == nil || p == nil { return nil } @@ -206,7 +205,7 @@ type ReearthPropertyItem struct { Fields ReearthPropertyGroup `json:"fields"` } -func (p *ReearthPropertyItem) propertyItem(key id.PropertySchemaGroupID) *property.InitializerItem { +func (p *ReearthPropertyItem) propertyItem(key property.SchemaGroupID) *property.InitializerItem { if p == nil { return nil } @@ -244,7 +243,7 @@ func (p *ReearthPropertyItem) propertyItem(key id.PropertySchemaGroupID) *proper } } -type ReearthPropertyGroup map[id.PropertySchemaFieldID]*ReearthPropertyField +type ReearthPropertyGroup map[property.FieldID]*ReearthPropertyField func (p ReearthPropertyGroup) propertyGroup() *property.InitializerGroup { if p == nil || len(p) == 0 { @@ -270,7 +269,7 @@ type ReearthPropertyField struct { Value interface{} `json:"value"` } -func (f *ReearthPropertyField) propertyField(key id.PropertySchemaFieldID) *property.InitializerField { +func (f *ReearthPropertyField) propertyField(key property.FieldID) *property.InitializerField { if f == nil || f.Type == "" { return nil } @@ -301,7 +300,7 @@ func (f *ReearthPropertyField) propertyField(key id.PropertySchemaFieldID) *prop } type ReearthPropertyLink struct { - Dataset *id.DatasetID `json:"dataset"` - Schema id.DatasetSchemaID `json:"schema"` - Field id.DatasetSchemaFieldID `json:"field"` + Dataset *property.DatasetID `json:"dataset"` + Schema property.DatasetSchemaID `json:"schema"` + Field property.DatasetFieldID `json:"field"` } diff --git a/pkg/layer/decoding/reearth_test.go b/pkg/layer/decoding/reearth_test.go index 29b781512..9179c5c3b 100644 --- a/pkg/layer/decoding/reearth_test.go +++ b/pkg/layer/decoding/reearth_test.go @@ -5,7 +5,6 @@ import ( "strings" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" "github.com/stretchr/testify/assert" @@ -14,9 +13,9 @@ import ( var _ Decoder = &ReearthDecoder{} func TestReearthDecoder_Decode(t *testing.T) { - sid := id.NewSceneID() - dsid := id.NewDatasetSchemaID() - did := id.NewDatasetID() + sid := layer.NewSceneID() + dsid := layer.NewDatasetSchemaID() + did := layer.NewDatasetID() reearthjson := `{ "reearth": 1, "layers": [ @@ -92,8 +91,8 @@ func TestReearthDecoder_Decode(t *testing.T) { rootLayer := result.Layers.Group(result.Root.LayerAt(0)) assert.Equal(t, (&layer.Initializer{ ID: rootLayer.IDRef(), - Plugin: id.MustPluginID("reearth").Ref(), - Extension: id.PluginExtensionID("marker").Ref(), + Plugin: layer.OfficialPluginID.Ref(), + Extension: layer.PluginExtensionID("marker").Ref(), PropertyID: rootLayer.Property().Ref(), Name: "ABC", Infobox: &layer.InitializerInfobox{ @@ -101,8 +100,8 @@ func TestReearthDecoder_Decode(t *testing.T) { Fields: []*layer.InitializerInfoboxField{ { ID: rootLayer.Infobox().FieldAt(0).ID().Ref(), - Plugin: id.MustPluginID("reearth"), - Extension: id.PluginExtensionID("textblock"), + Plugin: layer.OfficialPluginID, + Extension: layer.PluginExtensionID("textblock"), PropertyID: rootLayer.Infobox().FieldAt(0).Property().Ref(), }, }, @@ -116,8 +115,8 @@ func TestReearthDecoder_Decode(t *testing.T) { secondLayer := result.Layers.Item(rootLayer.Layers().LayerAt(0)) assert.Equal(t, (&layer.Initializer{ ID: secondLayer.IDRef(), - Plugin: id.MustPluginID("reearth").Ref(), - Extension: id.PluginExtensionID("marker").Ref(), + Plugin: layer.OfficialPluginID.Ref(), + Extension: layer.PluginExtensionID("marker").Ref(), PropertyID: secondLayer.Property().Ref(), Name: "abc", IsVisible: &tr, @@ -130,14 +129,14 @@ func TestReearthDecoder_Decode(t *testing.T) { t, (&property.Initializer{ ID: prop.ID().Ref(), - Schema: id.MustPropertySchemaID("reearth/marker"), + Schema: property.MustSchemaID("reearth/marker"), Items: []*property.InitializerItem{ { ID: prop.Items()[0].ID().Ref(), - SchemaItem: id.PropertySchemaGroupID("default"), + SchemaItem: property.SchemaGroupID("default"), Fields: []*property.InitializerField{ { - Field: id.PropertySchemaFieldID("latlng"), + Field: property.FieldID("latlng"), Type: property.ValueTypeLatLng, Value: property.ValueTypeLatLng.ValueFrom(property.LatLng{Lat: 1, Lng: 2}), }, @@ -154,7 +153,7 @@ func TestReearthDecoder_Decode(t *testing.T) { t, (&property.Initializer{ ID: rootLayer.Infobox().PropertyRef(), - Schema: id.MustPropertySchemaID("reearth/infobox"), + Schema: property.MustSchemaID("reearth/infobox"), }).MustBeProperty(sid), prop, ) @@ -165,7 +164,7 @@ func TestReearthDecoder_Decode(t *testing.T) { t, (&property.Initializer{ ID: rootLayer.Infobox().FieldAt(0).PropertyRef(), - Schema: id.MustPropertySchemaID("reearth/textblock"), + Schema: property.MustSchemaID("reearth/textblock"), }).MustBeProperty(sid), prop, ) @@ -176,17 +175,17 @@ func TestReearthDecoder_Decode(t *testing.T) { t, (&property.Initializer{ ID: prop.ID().Ref(), - Schema: id.MustPropertySchemaID("reearth/marker"), + Schema: property.MustSchemaID("reearth/marker"), Items: []*property.InitializerItem{ { ID: prop.Items()[0].ID().Ref(), - SchemaItem: id.PropertySchemaGroupID("hoge"), + SchemaItem: property.SchemaGroupID("hoge"), Groups: []*property.InitializerGroup{ { ID: property.ToGroupList(prop.Items()[0]).GroupAt(0).IDRef(), Fields: []*property.InitializerField{ { - Field: id.PropertySchemaFieldID("foobar"), + Field: property.FieldID("foobar"), Type: property.ValueTypeString, Value: property.ValueTypeString.ValueFrom("bar"), }, @@ -196,7 +195,7 @@ func TestReearthDecoder_Decode(t *testing.T) { ID: property.ToGroupList(prop.Items()[0]).GroupAt(1).IDRef(), Fields: []*property.InitializerField{ { - Field: id.PropertySchemaFieldID("foobar"), + Field: property.FieldID("foobar"), Type: property.ValueTypeString, Value: property.ValueTypeString.ValueFrom("foo"), }, diff --git a/pkg/layer/decoding/shp.go b/pkg/layer/decoding/shp.go index e808ce823..652fd2e5f 100644 --- a/pkg/layer/decoding/shp.go +++ b/pkg/layer/decoding/shp.go @@ -1,7 +1,6 @@ package decoding import ( - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/shp" @@ -14,10 +13,10 @@ type ShapeReader interface { } type ShapeDecoder struct { reader ShapeReader - sceneId id.SceneID + sceneId layer.SceneID } -func NewShapeDecoder(r ShapeReader, s id.SceneID) *ShapeDecoder { +func NewShapeDecoder(r ShapeReader, s layer.SceneID) *ShapeDecoder { return &ShapeDecoder{ reader: r, sceneId: s, @@ -27,7 +26,7 @@ func NewShapeDecoder(r ShapeReader, s id.SceneID) *ShapeDecoder { func (shd *ShapeDecoder) getLayer(t string, coords interface{}) (*layer.Item, *property.Property, error) { var p *property.Property var l *layer.Item - var ex id.PluginExtensionID + var ex layer.PluginExtensionID var err error p, err = createProperty(t, coords, shd.sceneId, nil, "") if err != nil { @@ -40,7 +39,7 @@ func (shd *ShapeDecoder) getLayer(t string, coords interface{}) (*layer.Item, *p Scene(shd.sceneId). Property(p.IDRef()). Extension(&ex). - Plugin(&id.OfficialPluginID). + Plugin(&layer.OfficialPluginID). Build() if err != nil { return nil, nil, err diff --git a/pkg/layer/encoding/czml.go b/pkg/layer/encoding/czml.go index 5f616c974..e7117233a 100644 --- a/pkg/layer/encoding/czml.go +++ b/pkg/layer/encoding/czml.go @@ -6,7 +6,7 @@ import ( "io" "github.com/reearth/reearth-backend/pkg/czml" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/merging" ) @@ -29,7 +29,7 @@ func (e *CZMLEncoder) stringToCZMLColor(s string) *czml.Color { } func (e *CZMLEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*czml.Feature, error) { - if li.PluginID == nil || !id.OfficialPluginID.Equal(*li.PluginID) { + if li.PluginID == nil || !layer.OfficialPluginID.Equal(*li.PluginID) { return nil, nil } diff --git a/pkg/layer/encoding/czml_test.go b/pkg/layer/encoding/czml_test.go index 805d7ab7a..17edc3a70 100644 --- a/pkg/layer/encoding/czml_test.go +++ b/pkg/layer/encoding/czml_test.go @@ -6,8 +6,6 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/czml" - - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/merging" "github.com/reearth/reearth-backend/pkg/property" @@ -17,9 +15,9 @@ import ( var _ Encoder = (*CZMLEncoder)(nil) func TestCZMLEncoder_Encode(t *testing.T) { - lid := id.NewLayerID() - sid := id.NewSceneID() - iid := id.NewPropertyItemID() + lid := layer.NewID() + sid := layer.NewSceneID() + iid := property.NewItemID() tests := []struct { name string @@ -34,18 +32,18 @@ func TestCZMLEncoder_Encode(t *testing.T) { Original: lid, Name: "test", Scene: sid, - PluginID: &id.OfficialPluginID, - ExtensionID: id.PluginExtensionID("marker").Ref(), + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("marker").Ref(), }, Property: &property.Sealed{ - Original: id.NewPropertyID().Ref(), + Original: property.NewID().Ref(), Items: []*property.SealedItem{ { Original: &iid, - SchemaGroup: id.PropertySchemaGroupID("default"), + SchemaGroup: property.SchemaGroupID("default"), Fields: []*property.SealedField{ { - ID: id.PropertySchemaFieldID("location"), + ID: property.FieldID("location"), Val: property.NewValueAndDatasetValue( property.ValueTypeLatLng, nil, @@ -53,7 +51,7 @@ func TestCZMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("height"), + ID: property.FieldID("height"), Val: property.NewValueAndDatasetValue( property.ValueTypeNumber, nil, @@ -61,7 +59,7 @@ func TestCZMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("pointColor"), + ID: property.FieldID("pointColor"), Val: property.NewValueAndDatasetValue( property.ValueTypeString, nil, @@ -69,7 +67,7 @@ func TestCZMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("pointSize"), + ID: property.FieldID("pointSize"), Val: property.NewValueAndDatasetValue( property.ValueTypeNumber, nil, @@ -102,18 +100,18 @@ func TestCZMLEncoder_Encode(t *testing.T) { Original: lid, Name: "test", Scene: sid, - PluginID: &id.OfficialPluginID, - ExtensionID: id.PluginExtensionID("polygon").Ref(), + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polygon").Ref(), }, Property: &property.Sealed{ - Original: id.NewPropertyID().Ref(), + Original: property.NewID().Ref(), Items: []*property.SealedItem{ { Original: &iid, - SchemaGroup: id.PropertySchemaGroupID("default"), + SchemaGroup: property.SchemaGroupID("default"), Fields: []*property.SealedField{ { - ID: id.PropertySchemaFieldID("polygon"), + ID: property.FieldID("polygon"), Val: property.NewValueAndDatasetValue( property.ValueTypePolygon, nil, @@ -127,7 +125,7 @@ func TestCZMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("fill"), + ID: property.FieldID("fill"), Val: property.NewValueAndDatasetValue( property.ValueTypeBool, nil, @@ -135,7 +133,7 @@ func TestCZMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("fillColor"), + ID: property.FieldID("fillColor"), Val: property.NewValueAndDatasetValue( property.ValueTypeString, nil, @@ -143,7 +141,7 @@ func TestCZMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("stroke"), + ID: property.FieldID("stroke"), Val: property.NewValueAndDatasetValue( property.ValueTypeBool, nil, @@ -151,7 +149,7 @@ func TestCZMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeColor"), + ID: property.FieldID("strokeColor"), Val: property.NewValueAndDatasetValue( property.ValueTypeString, nil, @@ -159,7 +157,7 @@ func TestCZMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeWidth"), + ID: property.FieldID("strokeWidth"), Val: property.NewValueAndDatasetValue( property.ValueTypeNumber, nil, @@ -197,18 +195,18 @@ func TestCZMLEncoder_Encode(t *testing.T) { Original: lid, Name: "test", Scene: sid, - PluginID: &id.OfficialPluginID, - ExtensionID: id.PluginExtensionID("polyline").Ref(), + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polyline").Ref(), }, Property: &property.Sealed{ - Original: id.NewPropertyID().Ref(), + Original: property.NewID().Ref(), Items: []*property.SealedItem{ { Original: &iid, - SchemaGroup: id.PropertySchemaGroupID("default"), + SchemaGroup: property.SchemaGroupID("default"), Fields: []*property.SealedField{ { - ID: id.PropertySchemaFieldID("coordinates"), + ID: property.FieldID("coordinates"), Val: property.NewValueAndDatasetValue( property.ValueTypeCoordinates, nil, @@ -220,7 +218,7 @@ func TestCZMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeColor"), + ID: property.FieldID("strokeColor"), Val: property.NewValueAndDatasetValue( property.ValueTypeString, nil, @@ -228,7 +226,7 @@ func TestCZMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeWidth"), + ID: property.FieldID("strokeWidth"), Val: property.NewValueAndDatasetValue( property.ValueTypeNumber, nil, diff --git a/pkg/layer/encoding/exporter.go b/pkg/layer/encoding/exporter.go index b01dceaf6..01773256b 100644 --- a/pkg/layer/encoding/exporter.go +++ b/pkg/layer/encoding/exporter.go @@ -3,7 +3,6 @@ package encoding import ( "context" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/merging" ) @@ -14,7 +13,7 @@ type Exporter struct { Encoder Encoder } -func (e *Exporter) ExportLayerByID(ctx context.Context, l id.LayerID) error { +func (e *Exporter) ExportLayerByID(ctx context.Context, l layer.ID) error { if e == nil { return nil } diff --git a/pkg/layer/encoding/geojson.go b/pkg/layer/encoding/geojson.go index b65d4e70c..6fb4c4e0c 100644 --- a/pkg/layer/encoding/geojson.go +++ b/pkg/layer/encoding/geojson.go @@ -5,7 +5,7 @@ import ( "io" geojson "github.com/paulmach/go.geojson" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/merging" "github.com/reearth/reearth-backend/pkg/property" ) @@ -40,7 +40,7 @@ func (e *GeoJSONEncoder) coordsToFloat(c property.Coordinates) [][]float64 { } func (e *GeoJSONEncoder) encodeSingleLayer(li *merging.SealedLayerItem) (*geojson.Feature, error) { - if li == nil || li.PluginID == nil || !id.OfficialPluginID.Equal(*li.PluginID) { + if li == nil || li.PluginID == nil || !layer.OfficialPluginID.Equal(*li.PluginID) { return nil, nil } diff --git a/pkg/layer/encoding/geojson_test.go b/pkg/layer/encoding/geojson_test.go index 04095a28d..c7baffff4 100644 --- a/pkg/layer/encoding/geojson_test.go +++ b/pkg/layer/encoding/geojson_test.go @@ -5,7 +5,6 @@ import ( "testing" geojson "github.com/paulmach/go.geojson" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/merging" "github.com/reearth/reearth-backend/pkg/property" @@ -25,21 +24,21 @@ func TestGeoJSONEncoder_Encode(t *testing.T) { target: &merging.SealedLayerItem{ SealedLayerCommon: merging.SealedLayerCommon{ Merged: layer.Merged{ - Original: id.NewLayerID(), - Scene: id.NewSceneID(), + Original: layer.NewID(), + Scene: layer.NewSceneID(), Name: "test", - PluginID: &id.OfficialPluginID, - ExtensionID: id.PluginExtensionID("marker").Ref(), + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("marker").Ref(), }, Property: &property.Sealed{ - Original: id.NewPropertyID().Ref(), + Original: property.NewID().Ref(), Items: []*property.SealedItem{ { - Original: id.NewPropertyItemID().Ref(), - SchemaGroup: id.PropertySchemaGroupID("default"), + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), Fields: []*property.SealedField{ { - ID: id.PropertySchemaFieldID("location"), + ID: property.FieldID("location"), Val: property.NewValueAndDatasetValue( property.ValueTypeLatLng, nil, @@ -47,7 +46,7 @@ func TestGeoJSONEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("pointColor"), + ID: property.FieldID("pointColor"), Val: property.NewValueAndDatasetValue( property.ValueTypeString, nil, @@ -55,7 +54,7 @@ func TestGeoJSONEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("height"), + ID: property.FieldID("height"), Val: property.NewValueAndDatasetValue( property.ValueTypeNumber, nil, @@ -80,21 +79,21 @@ func TestGeoJSONEncoder_Encode(t *testing.T) { target: &merging.SealedLayerItem{ SealedLayerCommon: merging.SealedLayerCommon{ Merged: layer.Merged{ - Original: id.NewLayerID(), - Scene: id.NewSceneID(), + Original: layer.NewID(), + Scene: layer.NewSceneID(), Name: "test", - PluginID: &id.OfficialPluginID, - ExtensionID: id.PluginExtensionID("polygon").Ref(), + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polygon").Ref(), }, Property: &property.Sealed{ - Original: id.NewPropertyID().Ref(), + Original: property.NewID().Ref(), Items: []*property.SealedItem{ { - Original: id.NewPropertyItemID().Ref(), - SchemaGroup: id.PropertySchemaGroupID("default"), + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), Fields: []*property.SealedField{ { - ID: id.PropertySchemaFieldID("polygon"), + ID: property.FieldID("polygon"), Val: property.NewValueAndDatasetValue( property.ValueTypePolygon, nil, @@ -106,7 +105,7 @@ func TestGeoJSONEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("fillColor"), + ID: property.FieldID("fillColor"), Val: property.NewValueAndDatasetValue( property.ValueTypeString, nil, @@ -114,7 +113,7 @@ func TestGeoJSONEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeColor"), + ID: property.FieldID("strokeColor"), Val: property.NewValueAndDatasetValue( property.ValueTypeString, nil, @@ -122,7 +121,7 @@ func TestGeoJSONEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeWidth"), + ID: property.FieldID("strokeWidth"), Val: property.NewValueAndDatasetValue( property.ValueTypeNumber, nil, @@ -130,7 +129,7 @@ func TestGeoJSONEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeWidth"), + ID: property.FieldID("strokeWidth"), Val: property.NewValueAndDatasetValue( property.ValueTypeNumber, nil, @@ -157,21 +156,21 @@ func TestGeoJSONEncoder_Encode(t *testing.T) { target: &merging.SealedLayerItem{ SealedLayerCommon: merging.SealedLayerCommon{ Merged: layer.Merged{ - Original: id.NewLayerID(), - Scene: id.NewSceneID(), + Original: layer.NewID(), + Scene: layer.NewSceneID(), Name: "test", - PluginID: &id.OfficialPluginID, - ExtensionID: id.PluginExtensionID("polyline").Ref(), + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polyline").Ref(), }, Property: &property.Sealed{ - Original: id.NewPropertyID().Ref(), + Original: property.NewID().Ref(), Items: []*property.SealedItem{ { - Original: id.NewPropertyItemID().Ref(), - SchemaGroup: id.PropertySchemaGroupID("default"), + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), Fields: []*property.SealedField{ { - ID: id.PropertySchemaFieldID("coordinates"), + ID: property.FieldID("coordinates"), Val: property.NewValueAndDatasetValue( property.ValueTypeCoordinates, nil, @@ -183,7 +182,7 @@ func TestGeoJSONEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeColor"), + ID: property.FieldID("strokeColor"), Val: property.NewValueAndDatasetValue( property.ValueTypeString, nil, @@ -191,7 +190,7 @@ func TestGeoJSONEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeWidth"), + ID: property.FieldID("strokeWidth"), Val: property.NewValueAndDatasetValue( property.ValueTypeNumber, nil, diff --git a/pkg/layer/encoding/kml.go b/pkg/layer/encoding/kml.go index 750c5cb2e..9c00f111f 100644 --- a/pkg/layer/encoding/kml.go +++ b/pkg/layer/encoding/kml.go @@ -4,7 +4,7 @@ import ( "errors" "io" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/merging" kml "github.com/twpayne/go-kml" ) @@ -154,7 +154,7 @@ func (e *KMLEncoder) encodeStyle(li *merging.SealedLayerItem) (*kml.SharedElemen // encodes non style layer features func (e *KMLEncoder) encodeLayerTag(li *merging.SealedLayerItem) (*kml.CompoundElement, error) { - if li.PluginID == nil || !id.OfficialPluginID.Equal(*li.PluginID) { + if li.PluginID == nil || !layer.OfficialPluginID.Equal(*li.PluginID) { return nil, nil } diff --git a/pkg/layer/encoding/kml_test.go b/pkg/layer/encoding/kml_test.go index 686b366f3..61cbc87e4 100644 --- a/pkg/layer/encoding/kml_test.go +++ b/pkg/layer/encoding/kml_test.go @@ -4,7 +4,6 @@ import ( "bytes" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/merging" "github.com/reearth/reearth-backend/pkg/property" @@ -15,7 +14,7 @@ import ( var _ Encoder = (*KMLEncoder)(nil) func TestKMLEncoder_Encode(t *testing.T) { - lid := id.MustLayerID("01fmph48ykj1nd82r8e4znh6a6") + lid := layer.MustID("01fmph48ykj1nd82r8e4znh6a6") tests := []struct { name string @@ -28,20 +27,20 @@ func TestKMLEncoder_Encode(t *testing.T) { SealedLayerCommon: merging.SealedLayerCommon{ Merged: layer.Merged{ Original: lid, - Scene: id.NewSceneID(), + Scene: layer.NewSceneID(), Name: "test", - PluginID: &id.OfficialPluginID, - ExtensionID: id.PluginExtensionID("marker").Ref(), + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("marker").Ref(), }, Property: &property.Sealed{ - Original: id.NewPropertyID().Ref(), + Original: property.NewID().Ref(), Items: []*property.SealedItem{ { - Original: id.NewPropertyItemID().Ref(), - SchemaGroup: id.PropertySchemaGroupID("default"), + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), Fields: []*property.SealedField{ { - ID: id.PropertySchemaFieldID("location"), + ID: property.FieldID("location"), Val: property.NewValueAndDatasetValue( property.ValueTypeLatLng, nil, @@ -49,7 +48,7 @@ func TestKMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("height"), + ID: property.FieldID("height"), Val: property.NewValueAndDatasetValue( property.ValueTypeNumber, nil, @@ -57,7 +56,7 @@ func TestKMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("imageSize"), + ID: property.FieldID("imageSize"), Val: property.NewValueAndDatasetValue( property.ValueTypeNumber, nil, @@ -65,7 +64,7 @@ func TestKMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("image"), + ID: property.FieldID("image"), Val: property.NewValueAndDatasetValue( property.ValueTypeURL, nil, @@ -73,7 +72,7 @@ func TestKMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("pointColor"), + ID: property.FieldID("pointColor"), Val: property.NewValueAndDatasetValue( property.ValueTypeString, nil, @@ -113,20 +112,20 @@ func TestKMLEncoder_Encode(t *testing.T) { SealedLayerCommon: merging.SealedLayerCommon{ Merged: layer.Merged{ Original: lid, - Scene: id.NewSceneID(), + Scene: layer.NewSceneID(), Name: "test", - PluginID: &id.OfficialPluginID, - ExtensionID: id.PluginExtensionID("polygon").Ref(), + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polygon").Ref(), }, Property: &property.Sealed{ - Original: id.NewPropertyID().Ref(), + Original: property.NewID().Ref(), Items: []*property.SealedItem{ { - Original: id.NewPropertyItemID().Ref(), - SchemaGroup: id.PropertySchemaGroupID("default"), + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), Fields: []*property.SealedField{ { - ID: id.PropertySchemaFieldID("polygon"), + ID: property.FieldID("polygon"), Val: property.NewValueAndDatasetValue( property.ValueTypePolygon, nil, @@ -138,7 +137,7 @@ func TestKMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("fill"), + ID: property.FieldID("fill"), Val: property.NewValueAndDatasetValue( property.ValueTypeBool, nil, @@ -146,7 +145,7 @@ func TestKMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("fillColor"), + ID: property.FieldID("fillColor"), Val: property.NewValueAndDatasetValue( property.ValueTypeString, nil, @@ -154,7 +153,7 @@ func TestKMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("stroke"), + ID: property.FieldID("stroke"), Val: property.NewValueAndDatasetValue( property.ValueTypeBool, nil, @@ -162,7 +161,7 @@ func TestKMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeColor"), + ID: property.FieldID("strokeColor"), Val: property.NewValueAndDatasetValue( property.ValueTypeString, nil, @@ -170,7 +169,7 @@ func TestKMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeWidth"), + ID: property.FieldID("strokeWidth"), Val: property.NewValueAndDatasetValue( property.ValueTypeNumber, nil, @@ -217,20 +216,20 @@ func TestKMLEncoder_Encode(t *testing.T) { SealedLayerCommon: merging.SealedLayerCommon{ Merged: layer.Merged{ Original: lid, - Scene: id.NewSceneID(), + Scene: layer.NewSceneID(), Name: "test", - PluginID: &id.OfficialPluginID, - ExtensionID: id.PluginExtensionID("polyline").Ref(), + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polyline").Ref(), }, Property: &property.Sealed{ - Original: id.NewPropertyID().Ref(), + Original: property.NewID().Ref(), Items: []*property.SealedItem{ { - Original: id.NewPropertyItemID().Ref(), - SchemaGroup: id.PropertySchemaGroupID("default"), + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), Fields: []*property.SealedField{ { - ID: id.PropertySchemaFieldID("coordinates"), + ID: property.FieldID("coordinates"), Val: property.NewValueAndDatasetValue( property.ValueTypeCoordinates, nil, @@ -242,7 +241,7 @@ func TestKMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeColor"), + ID: property.FieldID("strokeColor"), Val: property.NewValueAndDatasetValue( property.ValueTypeString, nil, @@ -250,7 +249,7 @@ func TestKMLEncoder_Encode(t *testing.T) { ), }, { - ID: id.PropertySchemaFieldID("strokeWidth"), + ID: property.FieldID("strokeWidth"), Val: property.NewValueAndDatasetValue( property.ValueTypeNumber, nil, diff --git a/pkg/layer/encoding/shp.go b/pkg/layer/encoding/shp.go index 833c07316..842e0f5bb 100644 --- a/pkg/layer/encoding/shp.go +++ b/pkg/layer/encoding/shp.go @@ -4,7 +4,7 @@ import ( "errors" "io" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/merging" "github.com/reearth/reearth-backend/pkg/property" shp "github.com/reearth/reearth-backend/pkg/shp" @@ -100,7 +100,7 @@ func polygonToSHP(poly property.Polygon) *shp.Polygon { } func (e *SHPEncoder) encodeLayer(li *merging.SealedLayerItem) (sh shp.Shape, st shp.ShapeType, err error) { - if li.PluginID == nil || !id.OfficialPluginID.Equal(*li.PluginID) { + if li.PluginID == nil || !layer.OfficialPluginID.Equal(*li.PluginID) { return nil, 0, nil } switch li.ExtensionID.String() { diff --git a/pkg/layer/encoding/shp_test.go b/pkg/layer/encoding/shp_test.go index b7256f5b7..68d4d265f 100644 --- a/pkg/layer/encoding/shp_test.go +++ b/pkg/layer/encoding/shp_test.go @@ -6,7 +6,6 @@ import ( "github.com/jonas-p/go-shp" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/merging" "github.com/reearth/reearth-backend/pkg/property" @@ -25,23 +24,23 @@ func TestEncodeSHP(t *testing.T) { layer: &merging.SealedLayerItem{ SealedLayerCommon: merging.SealedLayerCommon{ Merged: layer.Merged{ - Original: id.NewLayerID(), + Original: layer.NewID(), Parent: nil, - Scene: id.NewSceneID(), + Scene: layer.NewSceneID(), Property: nil, Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: id.PluginExtensionID("polygon").Ref(), + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polygon").Ref(), }, Property: &property.Sealed{ - Original: id.NewPropertyID().Ref(), + Original: property.NewID().Ref(), Items: []*property.SealedItem{ { - Original: id.NewPropertyItemID().Ref(), - SchemaGroup: id.PropertySchemaGroupID("default"), + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), Fields: []*property.SealedField{ { - ID: id.PropertySchemaFieldID("polygon"), + ID: property.FieldID("polygon"), Val: property.NewValueAndDatasetValue( property.ValueTypePolygon, nil, @@ -80,24 +79,24 @@ func TestEncodeSHP(t *testing.T) { layer: &merging.SealedLayerItem{ SealedLayerCommon: merging.SealedLayerCommon{ Merged: layer.Merged{ - Original: id.NewLayerID(), + Original: layer.NewID(), Parent: nil, Name: "test", - Scene: id.NewSceneID(), + Scene: layer.NewSceneID(), Property: nil, Infobox: nil, - PluginID: &id.OfficialPluginID, - ExtensionID: id.PluginExtensionID("polyline").Ref(), + PluginID: &layer.OfficialPluginID, + ExtensionID: layer.PluginExtensionID("polyline").Ref(), }, Property: &property.Sealed{ - Original: id.NewPropertyID().Ref(), + Original: property.NewID().Ref(), Items: []*property.SealedItem{ { - Original: id.NewPropertyItemID().Ref(), - SchemaGroup: id.PropertySchemaGroupID("default"), + Original: property.NewItemID().Ref(), + SchemaGroup: property.SchemaGroupID("default"), Fields: []*property.SealedField{ { - ID: id.PropertySchemaFieldID("coordinates"), + ID: property.FieldID("coordinates"), Val: property.NewValueAndDatasetValue( property.ValueTypeCoordinates, nil, diff --git a/pkg/layer/group.go b/pkg/layer/group.go index ca1a6c333..d6827e596 100644 --- a/pkg/layer/group.go +++ b/pkg/layer/group.go @@ -1,22 +1,21 @@ package layer import ( - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" ) type Group struct { layerBase layers *IDList - linkedDatasetSchema *id.DatasetSchemaID + linkedDatasetSchema *DatasetSchemaID root bool } -func (l *Group) ID() id.LayerID { +func (l *Group) ID() ID { return l.layerBase.ID() } -func (l *Group) IDRef() *id.LayerID { +func (l *Group) IDRef() *ID { if l == nil { return nil } @@ -27,14 +26,14 @@ func (l *Group) Name() string { return l.layerBase.Name() } -func (l *Group) Plugin() *id.PluginID { +func (l *Group) Plugin() *PluginID { if l == nil { return nil } return l.layerBase.Plugin() } -func (l *Group) Extension() *id.PluginExtensionID { +func (l *Group) Extension() *PluginExtensionID { if l == nil { return nil } @@ -45,7 +44,7 @@ func (l *Group) UsesPlugin() bool { return l.layerBase.UsesPlugin() } -func (l *Group) Property() *id.PropertyID { +func (l *Group) Property() *PropertyID { if l == nil { return nil } @@ -87,7 +86,7 @@ func (l *Group) SetVisible(visible bool) { l.layerBase.SetVisible(visible) } -func (l *Group) SetPlugin(plugin *id.PluginID) { +func (l *Group) SetPlugin(plugin *PluginID) { if l == nil { return } @@ -101,7 +100,7 @@ func (l *Group) IsLinked() bool { return l.linkedDatasetSchema != nil } -func (l *Group) LinkedDatasetSchema() *id.DatasetSchemaID { +func (l *Group) LinkedDatasetSchema() *DatasetSchemaID { if l == nil || l.linkedDatasetSchema == nil { return nil } @@ -109,7 +108,7 @@ func (l *Group) LinkedDatasetSchema() *id.DatasetSchemaID { return &id } -func (l *Group) Link(ds id.DatasetSchemaID) { +func (l *Group) Link(ds DatasetSchemaID) { if l == nil { return } @@ -134,7 +133,7 @@ func (l *Group) Layers() *IDList { return l.layers } -func (l *Group) MoveLayerFrom(id id.LayerID, index int, fromLayerGroup *Group) { +func (l *Group) MoveLayerFrom(id ID, index int, fromLayerGroup *Group) { if l == nil { return } @@ -167,7 +166,7 @@ func (l *Group) IsRoot() bool { return l.root } -func (l *Group) Properties() []id.PropertyID { +func (l *Group) Properties() []PropertyID { if l == nil { return nil } diff --git a/pkg/layer/group_builder.go b/pkg/layer/group_builder.go index a271589c8..4a1898177 100644 --- a/pkg/layer/group_builder.go +++ b/pkg/layer/group_builder.go @@ -1,9 +1,5 @@ package layer -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - func GroupFromLayer(l Layer) *Group { li, ok := l.(*Group) if !ok { @@ -32,8 +28,8 @@ func NewGroup() *GroupBuilder { } func (b *GroupBuilder) Build() (*Group, error) { - if id.ID(b.l.id).IsNil() { - return nil, id.ErrInvalidID + if b.l.id.IsNil() { + return nil, ErrInvalidID } return b.l, nil } @@ -51,17 +47,17 @@ func (b *GroupBuilder) base(layer layerBase) *GroupBuilder { return b } -func (b *GroupBuilder) ID(id id.LayerID) *GroupBuilder { +func (b *GroupBuilder) ID(id ID) *GroupBuilder { b.l.id = id return b } func (b *GroupBuilder) NewID() *GroupBuilder { - b.l.id = id.NewLayerID() + b.l.id = NewID() return b } -func (b *GroupBuilder) Scene(s id.SceneID) *GroupBuilder { +func (b *GroupBuilder) Scene(s SceneID) *GroupBuilder { b.l.scene = s return b } @@ -81,17 +77,17 @@ func (b *GroupBuilder) IsVisible(visible bool) *GroupBuilder { return b } -func (b *GroupBuilder) Plugin(plugin *id.PluginID) *GroupBuilder { +func (b *GroupBuilder) Plugin(plugin *PluginID) *GroupBuilder { b.l.plugin = plugin.CopyRef() return b } -func (b *GroupBuilder) Extension(extension *id.PluginExtensionID) *GroupBuilder { +func (b *GroupBuilder) Extension(extension *PluginExtensionID) *GroupBuilder { b.l.extension = extension.CopyRef() return b } -func (b *GroupBuilder) Property(property *id.PropertyID) *GroupBuilder { +func (b *GroupBuilder) Property(property *PropertyID) *GroupBuilder { b.l.property = property.CopyRef() return b } @@ -106,7 +102,7 @@ func (b *GroupBuilder) Infobox(infobox *Infobox) *GroupBuilder { return b } -func (b *GroupBuilder) LinkedDatasetSchema(linkedDatasetSchema *id.DatasetSchemaID) *GroupBuilder { +func (b *GroupBuilder) LinkedDatasetSchema(linkedDatasetSchema *DatasetSchemaID) *GroupBuilder { b.l.linkedDatasetSchema = linkedDatasetSchema.CopyRef() return b } diff --git a/pkg/layer/group_test.go b/pkg/layer/group_test.go index 555d1f20c..bad67b07e 100644 --- a/pkg/layer/group_test.go +++ b/pkg/layer/group_test.go @@ -3,29 +3,28 @@ package layer import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) var _ Layer = &Group{} -var l1 = id.MustLayerID(id.New().String()) -var l2 = id.MustLayerID(id.New().String()) +var l1 = NewID() +var l2 = NewID() var group = Group{ layerBase: layerBase{ - id: id.MustLayerID(id.New().String()), + id: NewID(), name: "xxx", visible: false, - plugin: id.MustPluginID("aaa~1.1.1").Ref(), - extension: id.PluginExtensionID("foo").Ref(), + plugin: MustPluginID("aaa~1.1.1").Ref(), + extension: PluginExtensionID("foo").Ref(), property: nil, infobox: nil, tags: nil, - scene: id.SceneID{}, + scene: SceneID{}, }, layers: &IDList{ - layers: append(make([]id.LayerID, 0), l1, l2), - layerIDs: map[id.LayerID]struct{}{l1: {}, l2: {}}, + layers: append(make([]ID, 0), l1, l2), + layerIDs: map[ID]struct{}{l1: {}, l2: {}}, }, linkedDatasetSchema: nil, root: true, @@ -33,7 +32,7 @@ var group = Group{ func TestGroup_ID(t *testing.T) { assert.NotNil(t, group.ID()) - assert.IsType(t, id.MustLayerID(id.New().String()), group.ID()) + assert.IsType(t, NewID(), group.ID()) } func TestGroup_Name(t *testing.T) { @@ -42,12 +41,12 @@ func TestGroup_Name(t *testing.T) { func TestGroup_Plugin(t *testing.T) { assert.NotNil(t, group.Plugin()) - assert.True(t, id.MustPluginID("aaa~1.1.1").Equal(*group.Plugin())) + assert.True(t, MustPluginID("aaa~1.1.1").Equal(*group.Plugin())) } func TestGroup_IDRef(t *testing.T) { assert.NotNil(t, group.IDRef()) - assert.IsType(t, id.MustLayerID(id.New().String()), group.ID()) + assert.IsType(t, NewID(), group.ID()) } func TestGroup_Extension(t *testing.T) { @@ -82,7 +81,7 @@ func TestGroup_Rename(t *testing.T) { func TestGroup_SetInfobox(t *testing.T) { inf := Infobox{ - property: id.MustPropertyID(id.New().String()), + property: NewPropertyID(), fields: nil, ids: nil, } @@ -91,9 +90,9 @@ func TestGroup_SetInfobox(t *testing.T) { } func TestGroup_SetPlugin(t *testing.T) { - group.SetPlugin(id.MustPluginID("ccc~1.1.1").Ref()) + group.SetPlugin(MustPluginID("ccc~1.1.1").Ref()) assert.NotNil(t, group.Plugin()) - assert.True(t, id.MustPluginID("ccc~1.1.1").Equal(*group.Plugin())) + assert.True(t, MustPluginID("ccc~1.1.1").Equal(*group.Plugin())) } func TestGroup_SetVisible(t *testing.T) { @@ -123,7 +122,7 @@ func TestGroup_LinkedDatasetSchema(t *testing.T) { } func TestGroup_Link(t *testing.T) { - group.Link(id.MustDatasetSchemaID(id.New().String())) + group.Link(NewDatasetSchemaID()) assert.NotNil(t, group.LinkedDatasetSchema()) } diff --git a/pkg/layer/id.go b/pkg/layer/id.go new file mode 100644 index 000000000..b34b0271d --- /dev/null +++ b/pkg/layer/id.go @@ -0,0 +1,75 @@ +package layer + +import ( + "sort" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type ID = id.LayerID +type InfoboxFieldID = id.InfoboxFieldID +type TagID = id.TagID +type SceneID = id.SceneID +type PluginID = id.PluginID +type PluginExtensionID = id.PluginExtensionID +type PropertyID = id.PropertyID +type DatasetID = id.DatasetID +type DatasetSchemaID = id.DatasetSchemaID + +var NewID = id.NewLayerID +var NewInfoboxFieldID = id.NewInfoboxFieldID +var NewTagID = id.NewTagID +var NewSceneID = id.NewSceneID +var NewPropertyID = id.NewPropertyID +var NewDatasetID = id.NewDatasetID +var NewDatasetSchemaID = id.NewDatasetSchemaID + +var MustID = id.MustLayerID +var MustInfoboxFieldID = id.MustInfoboxFieldID +var MustTagID = id.MustTagID +var MustSceneID = id.MustSceneID +var MustPluginID = id.MustPluginID +var MustPropertyID = id.MustPropertyID +var PropertySchemaIDFromExtension = id.PropertySchemaIDFromExtension +var MustPropertySchemaIDFromExtension = id.MustPropertySchemaIDFromExtension + +var IDFrom = id.LayerIDFrom +var InfoboxFieldIDFrom = id.InfoboxFieldIDFrom +var TagIDFrom = id.TagIDFrom +var SceneIDFrom = id.SceneIDFrom +var PropertyIDFrom = id.PropertyIDFrom +var DatasetIDFrom = id.DatasetIDFrom +var DatasetSchemaIDFrom = id.DatasetSchemaIDFrom + +var IDFromRef = id.LayerIDFromRef +var InfoboxFieldIDFromRef = id.InfoboxFieldIDFromRef +var TagIDFromRef = id.TagIDFromRef +var SceneIDFromRef = id.SceneIDFromRef +var PropertyIDFromRef = id.PropertyIDFromRef +var DatasetIDFromRef = id.DatasetIDFromRef +var DatasetSchemaIDFromRef = id.DatasetSchemaIDFromRef + +var IDFromRefID = id.LayerIDFromRefID +var InfoboxFieldIDFromRefID = id.InfoboxFieldIDFromRefID +var TagIDFromRefID = id.TagIDFromRefID +var SceneIDFromRefID = id.SceneIDFromRefID +var PropertyIDFromRefID = id.PropertyIDFromRefID +var DatasetIDFromRefID = id.DatasetIDFromRefID +var DatasetSchemaIDFromRefID = id.DatasetSchemaIDFromRefID + +type IDSet = id.LayerIDSet +type InfoboxFIeldIDSet = id.InfoboxFieldIDSet +type DatasetIDSet = id.DatasetIDSet + +var NewIDSet = id.NewLayerIDSet +var NewInfoboxFIeldIDSet = id.NewInfoboxFieldIDSet +var NewDatasetIDSet = id.NewDatasetIDSet + +var OfficialPluginID = id.OfficialPluginID +var ErrInvalidID = id.ErrInvalidID + +func sortIDs(a []ID) { + sort.SliceStable(a, func(i, j int) bool { + return id.ID(a[i]).Compare(id.ID(a[j])) < 0 + }) +} diff --git a/pkg/layer/id_list.go b/pkg/layer/id_list.go index ec7273cba..f2e9a3855 100644 --- a/pkg/layer/id_list.go +++ b/pkg/layer/id_list.go @@ -1,40 +1,34 @@ package layer -import "github.com/reearth/reearth-backend/pkg/id" - -// IDList _ type IDList struct { - layers []id.LayerID + layers []ID // for checking duplication - layerIDs map[id.LayerID]struct{} + layerIDs map[ID]struct{} } -// NewIDList _ -func NewIDList(layers []id.LayerID) *IDList { +func NewIDList(layers []ID) *IDList { ll := IDList{} if len(layers) == 0 { return &ll } - ll.layers = append([]id.LayerID{}, layers...) - ll.layerIDs = make(map[id.LayerID]struct{}, len(layers)) + ll.layers = append([]ID{}, layers...) + ll.layerIDs = make(map[ID]struct{}, len(layers)) for _, l := range layers { ll.layerIDs[l] = struct{}{} } return &ll } -// Layers _ -func (l *IDList) Layers() []id.LayerID { +func (l *IDList) Layers() []ID { if l == nil { return nil } - result := append([]id.LayerID{}, l.layers...) + result := append([]ID{}, l.layers...) return result } -// HasLayer _ -func (l *IDList) HasLayer(id id.LayerID) bool { +func (l *IDList) HasLayer(id ID) bool { if l == nil || len(l.layerIDs) == 0 { return false } @@ -42,23 +36,21 @@ func (l *IDList) HasLayer(id id.LayerID) bool { return ok } -// LayerAt _ -func (l *IDList) LayerAt(index int) id.LayerID { +func (l *IDList) LayerAt(index int) ID { if l == nil || index < 0 || len(l.layers) <= index { - return id.LayerID{} + return ID{} } return l.layers[index] } -func (l *IDList) AtRef(index int) *id.LayerID { +func (l *IDList) AtRef(index int) *ID { if l == nil || index < 0 || len(l.layers) <= index { return nil } return &l.layers[index] } -// FindLayerIndex _ -func (l *IDList) FindLayerIndex(id id.LayerID) int { +func (l *IDList) FindLayerIndex(id ID) int { if l == nil { return -1 } @@ -70,7 +62,6 @@ func (l *IDList) FindLayerIndex(id id.LayerID) int { return -1 } -// LayerCount _ func (l *IDList) LayerCount() int { if l == nil { return 0 @@ -78,13 +69,12 @@ func (l *IDList) LayerCount() int { return len(l.layers) } -// AddLayer _ -func (l *IDList) AddLayer(lid id.LayerID, index int) { +func (l *IDList) AddLayer(lid ID, index int) { if l == nil || l.HasLayer(lid) { return } if l.layerIDs == nil { - l.layerIDs = make(map[id.LayerID]struct{}) + l.layerIDs = make(map[ID]struct{}) } l.layerIDs[lid] = struct{}{} @@ -93,12 +83,11 @@ func (l *IDList) AddLayer(lid id.LayerID, index int) { if index < 0 || le <= index { l.layers = append(l.layers, lid) } else { - l.layers = append(l.layers[:index], append([]id.LayerID{lid}, l.layers[index:]...)...) + l.layers = append(l.layers[:index], append([]ID{lid}, l.layers[index:]...)...) } } -// AppendLayers _ -func (l *IDList) AppendLayers(lid ...id.LayerID) *IDList { +func (l *IDList) AppendLayers(lid ...ID) *IDList { if l == nil { return NewIDList(lid) } @@ -119,8 +108,7 @@ func (l *IDList) Clone() (l2 *IDList) { return NewIDList(l.layers) } -// AddOrMoveLayer _ -func (l *IDList) AddOrMoveLayer(lid id.LayerID, index int) { +func (l *IDList) AddOrMoveLayer(lid ID, index int) { if l == nil { return } @@ -134,12 +122,11 @@ func (l *IDList) AddOrMoveLayer(lid id.LayerID, index int) { l.MoveLayer(lid, index) return } - l.layers = append(l.layers[:index], append([]id.LayerID{lid}, l.layers[index:]...)...) + l.layers = append(l.layers[:index], append([]ID{lid}, l.layers[index:]...)...) l.layerIDs[lid] = struct{}{} } -// MoveLayer _ -func (l *IDList) MoveLayer(id id.LayerID, toIndex int) { +func (l *IDList) MoveLayer(id ID, toIndex int) { if l == nil { return } @@ -152,7 +139,6 @@ func (l *IDList) MoveLayer(id id.LayerID, toIndex int) { } } -// MoveLayerAt _ func (l *IDList) MoveLayerAt(fromIndex int, toIndex int) { if l == nil || len(l.layers) == 0 { return @@ -171,14 +157,13 @@ func (l *IDList) MoveLayerAt(fromIndex int, toIndex int) { f := l.layers[fromIndex] l.layers = append(l.layers[:fromIndex], l.layers[fromIndex+1:]...) - newSlice := make([]id.LayerID, toIndex+1) + newSlice := make([]ID, toIndex+1) copy(newSlice, l.layers[:toIndex]) newSlice[toIndex] = f l.layers = append(newSlice, l.layers[toIndex:]...) } -// RemoveLayer _ -func (l *IDList) RemoveLayer(id id.LayerID) { +func (l *IDList) RemoveLayer(id ID) { if l == nil { return } @@ -191,7 +176,6 @@ func (l *IDList) RemoveLayer(id id.LayerID) { } } -// RemoveLayerAt _ func (l *IDList) RemoveLayerAt(index int) { if l == nil || len(l.layers) == 0 { return @@ -203,9 +187,9 @@ func (l *IDList) RemoveLayerAt(index int) { } layer := l.layers[index] - var layers []id.LayerID + var layers []ID if index == le { - layers = []id.LayerID{} + layers = []ID{} } else { layers = l.layers[index+1:] } @@ -213,7 +197,6 @@ func (l *IDList) RemoveLayerAt(index int) { delete(l.layerIDs, layer) } -// Empty _ func (l *IDList) Empty() { if l == nil { return diff --git a/pkg/layer/id_list_test.go b/pkg/layer/id_list_test.go index 5edcf2bfd..c9345551b 100644 --- a/pkg/layer/id_list_test.go +++ b/pkg/layer/id_list_test.go @@ -3,17 +3,15 @@ package layer import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/stretchr/testify/assert" ) func TestLayerIDList(t *testing.T) { - l1 := id.LayerID(id.New()) - l2 := id.LayerID(id.New()) - l3 := id.LayerID(id.New()) - l4 := id.LayerID(id.New()) - rawLayers := []id.LayerID{l1, l3} + l1 := NewID() + l2 := NewID() + l3 := NewID() + l4 := NewID() + rawLayers := []ID{l1, l3} layers := NewIDList(rawLayers) assert.NotNil(t, layers) diff --git a/pkg/layer/infobox.go b/pkg/layer/infobox.go index 001c9e84c..f446e2a5f 100644 --- a/pkg/layer/infobox.go +++ b/pkg/layer/infobox.go @@ -5,22 +5,21 @@ import ( "fmt" "github.com/reearth/reearth-backend/pkg/builtin" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" ) type Infobox struct { - property id.PropertyID + property PropertyID fields []*InfoboxField // for checking duplication - ids map[id.InfoboxFieldID]struct{} + ids map[InfoboxFieldID]struct{} } -func NewInfobox(fields []*InfoboxField, p id.PropertyID) *Infobox { +func NewInfobox(fields []*InfoboxField, p PropertyID) *Infobox { infobox := Infobox{ property: p, fields: make([]*InfoboxField, len(fields)), - ids: make(map[id.InfoboxFieldID]struct{}, len(fields)), + ids: make(map[InfoboxFieldID]struct{}, len(fields)), } for i, f := range fields { if f == nil { @@ -32,11 +31,11 @@ func NewInfobox(fields []*InfoboxField, p id.PropertyID) *Infobox { return &infobox } -func (i *Infobox) Property() id.PropertyID { +func (i *Infobox) Property() PropertyID { return i.property } -func (i *Infobox) PropertyRef() *id.PropertyID { +func (i *Infobox) PropertyRef() *PropertyID { if i == nil { return nil } @@ -51,7 +50,7 @@ func (i *Infobox) Fields() []*InfoboxField { return append([]*InfoboxField{}, i.fields...) } -func (i *Infobox) Field(field id.InfoboxFieldID) *InfoboxField { +func (i *Infobox) Field(field InfoboxFieldID) *InfoboxField { for _, f := range i.fields { if f.ID() == field { return f @@ -67,7 +66,7 @@ func (i *Infobox) FieldAt(index int) *InfoboxField { return i.fields[index] } -func (i *Infobox) Has(id id.InfoboxFieldID) bool { +func (i *Infobox) Has(id InfoboxFieldID) bool { _, ok := i.ids[id] return ok } @@ -90,7 +89,7 @@ func (i *Infobox) Add(field *InfoboxField, index int) { i.ids[id] = struct{}{} } -func (i *Infobox) Move(field id.InfoboxFieldID, toIndex int) { +func (i *Infobox) Move(field InfoboxFieldID, toIndex int) { for fromIndex, f := range i.fields { if f.ID() == field { i.MoveAt(fromIndex, toIndex) @@ -116,7 +115,7 @@ func (i *Infobox) MoveAt(fromIndex int, toIndex int) { i.fields = append(newSlice, i.fields[toIndex:]...) } -func (i *Infobox) Remove(field id.InfoboxFieldID) { +func (i *Infobox) Remove(field InfoboxFieldID) { for index, f := range i.fields { if f.ID() == field { i.RemoveAt(index) @@ -125,12 +124,12 @@ func (i *Infobox) Remove(field id.InfoboxFieldID) { } } -func (i *Infobox) RemoveAllByPlugin(pid id.PluginID) []id.PropertyID { +func (i *Infobox) RemoveAllByPlugin(pid PluginID) []PropertyID { if i == nil { return nil } - var properties []id.PropertyID + var properties []PropertyID for j := 0; j < len(i.fields); j++ { if i.fields[j].plugin.Equal(pid) { properties = append(properties, i.fields[j].Property()) diff --git a/pkg/layer/infobox_field.go b/pkg/layer/infobox_field.go index 70c036fce..b7e5379da 100644 --- a/pkg/layer/infobox_field.go +++ b/pkg/layer/infobox_field.go @@ -5,34 +5,33 @@ package layer import ( "errors" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" ) type InfoboxField struct { - id id.InfoboxFieldID - plugin id.PluginID - extension id.PluginExtensionID - property id.PropertyID + id InfoboxFieldID + plugin PluginID + extension PluginExtensionID + property PropertyID } -func (i *InfoboxField) ID() id.InfoboxFieldID { +func (i *InfoboxField) ID() InfoboxFieldID { return i.id } -func (i *InfoboxField) Plugin() id.PluginID { +func (i *InfoboxField) Plugin() PluginID { return i.plugin } -func (i *InfoboxField) Extension() id.PluginExtensionID { +func (i *InfoboxField) Extension() PluginExtensionID { return i.extension } -func (i *InfoboxField) Property() id.PropertyID { +func (i *InfoboxField) Property() PropertyID { return i.property } -func (i *InfoboxField) PropertyRef() *id.PropertyID { +func (i *InfoboxField) PropertyRef() *PropertyID { if i == nil { return nil } @@ -48,7 +47,7 @@ func (i *InfoboxField) ValidateProperty(pm property.Map) error { if lp == nil { return errors.New("property does not exist") } - if !lp.Schema().Equal(id.MustPropertySchemaIDFromExtension(i.plugin, i.extension)) { + if !lp.Schema().Equal(MustPropertySchemaIDFromExtension(i.plugin, i.extension)) { return errors.New("property has a invalid schema") } diff --git a/pkg/layer/infobox_field_builder.go b/pkg/layer/infobox_field_builder.go index 4a8af2c1f..47ce14ff0 100644 --- a/pkg/layer/infobox_field_builder.go +++ b/pkg/layer/infobox_field_builder.go @@ -1,10 +1,5 @@ package layer -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - -// InfoboxFieldBuilder _ type InfoboxFieldBuilder struct { i *InfoboxField } @@ -14,10 +9,10 @@ func NewInfoboxField() *InfoboxFieldBuilder { } func (b *InfoboxFieldBuilder) Build() (*InfoboxField, error) { - if id.ID(b.i.id).IsNil() || + if b.i.id.IsNil() || string(b.i.extension) == "" || - id.ID(b.i.property).IsNil() { - return nil, id.ErrInvalidID + b.i.property.IsNil() { + return nil, ErrInvalidID } return b.i, nil } @@ -30,27 +25,27 @@ func (b *InfoboxFieldBuilder) MustBuild() *InfoboxField { return i } -func (b *InfoboxFieldBuilder) ID(id id.InfoboxFieldID) *InfoboxFieldBuilder { +func (b *InfoboxFieldBuilder) ID(id InfoboxFieldID) *InfoboxFieldBuilder { b.i.id = id return b } func (b *InfoboxFieldBuilder) NewID() *InfoboxFieldBuilder { - b.i.id = id.InfoboxFieldID(id.New()) + b.i.id = NewInfoboxFieldID() return b } -func (b *InfoboxFieldBuilder) Plugin(plugin id.PluginID) *InfoboxFieldBuilder { +func (b *InfoboxFieldBuilder) Plugin(plugin PluginID) *InfoboxFieldBuilder { b.i.plugin = plugin return b } -func (b *InfoboxFieldBuilder) Extension(extension id.PluginExtensionID) *InfoboxFieldBuilder { +func (b *InfoboxFieldBuilder) Extension(extension PluginExtensionID) *InfoboxFieldBuilder { b.i.extension = extension return b } -func (b *InfoboxFieldBuilder) Property(p id.PropertyID) *InfoboxFieldBuilder { +func (b *InfoboxFieldBuilder) Property(p PropertyID) *InfoboxFieldBuilder { b.i.property = p return b } diff --git a/pkg/layer/infobox_test.go b/pkg/layer/infobox_test.go index d8a016aa2..1b6d77c75 100644 --- a/pkg/layer/infobox_test.go +++ b/pkg/layer/infobox_test.go @@ -3,17 +3,16 @@ package layer import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestInfobox(t *testing.T) { - f1 := &InfoboxField{id: id.NewInfoboxFieldID()} - f2 := &InfoboxField{id: id.NewInfoboxFieldID()} - f3 := &InfoboxField{id: id.NewInfoboxFieldID()} - f4 := &InfoboxField{id: id.NewInfoboxFieldID()} + f1 := &InfoboxField{id: NewInfoboxFieldID()} + f2 := &InfoboxField{id: NewInfoboxFieldID()} + f3 := &InfoboxField{id: NewInfoboxFieldID()} + f4 := &InfoboxField{id: NewInfoboxFieldID()} fields := []*InfoboxField{f1, f2, f3} - infobox := NewInfobox(fields, id.NewPropertyID()) + infobox := NewInfobox(fields, NewPropertyID()) assert.NotNil(t, infobox) assert.Equal(t, fields, infobox.Fields()) @@ -52,17 +51,17 @@ func TestInfobox(t *testing.T) { } func TestInfobox_RemoveAllByPlugin(t *testing.T) { - pid1 := id.MustPluginID("xxx~1.1.1") - pid2 := id.MustPluginID("xxy~1.1.1") - f1 := &InfoboxField{id: id.NewInfoboxFieldID(), plugin: pid1, extension: "a", property: id.NewPropertyID()} - f2 := &InfoboxField{id: id.NewInfoboxFieldID(), plugin: pid2, extension: "b", property: id.NewPropertyID()} - f3 := &InfoboxField{id: id.NewInfoboxFieldID(), plugin: pid1, extension: "c", property: id.NewPropertyID()} - infobox := NewInfobox([]*InfoboxField{f1, f2, f3}, id.NewPropertyID()) + pid1 := MustPluginID("xxx~1.1.1") + pid2 := MustPluginID("xxy~1.1.1") + f1 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid1, extension: "a", property: NewPropertyID()} + f2 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid2, extension: "b", property: NewPropertyID()} + f3 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid1, extension: "c", property: NewPropertyID()} + infobox := NewInfobox([]*InfoboxField{f1, f2, f3}, NewPropertyID()) - assert.Equal(t, []id.PropertyID(nil), (*Infobox)(nil).RemoveAllByPlugin(pid1)) + assert.Equal(t, []PropertyID(nil), (*Infobox)(nil).RemoveAllByPlugin(pid1)) assert.Equal(t, []*InfoboxField{f1, f2, f3}, infobox.fields) - assert.Equal(t, []id.PropertyID{f1.Property(), f3.Property()}, infobox.RemoveAllByPlugin(pid1)) + assert.Equal(t, []PropertyID{f1.Property(), f3.Property()}, infobox.RemoveAllByPlugin(pid1)) assert.Equal(t, []*InfoboxField{f2}, infobox.fields) - assert.Equal(t, []id.PropertyID(nil), infobox.RemoveAllByPlugin(pid1)) + assert.Equal(t, []PropertyID(nil), infobox.RemoveAllByPlugin(pid1)) assert.Equal(t, []*InfoboxField{f2}, infobox.fields) } diff --git a/pkg/layer/initializer.go b/pkg/layer/initializer.go index 98aa1b714..8d0624b4c 100644 --- a/pkg/layer/initializer.go +++ b/pkg/layer/initializer.go @@ -5,7 +5,6 @@ import ( "fmt" "github.com/reearth/reearth-backend/pkg/builtin" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/rerror" ) @@ -18,7 +17,7 @@ var ( ) type InitializerResult struct { - Root id.LayerID + Root ID Layers Map Properties property.Map } @@ -40,18 +39,18 @@ func (r InitializerResult) RootLayerItem() *Item { } type Initializer struct { - ID *id.LayerID `json:"id"` - Plugin *id.PluginID `json:"plugin"` - Extension *id.PluginExtensionID `json:"extension"` + ID *ID `json:"id"` + Plugin *PluginID `json:"plugin"` + Extension *PluginExtensionID `json:"extension"` Name string `json:"name"` Infobox *InitializerInfobox `json:"infobox"` - PropertyID *id.PropertyID `json:"propertyId"` + PropertyID *PropertyID `json:"propertyId"` Property *property.Initializer `json:"property"` Layers []*Initializer `json:"layers"` - LayerIDs []id.LayerID `json:"layerIds"` + LayerIDs []ID `json:"layerIds"` IsVisible *bool `json:"isVisible"` - LinkedDatasetSchema *id.DatasetSchemaID `json:"linkedDatasetSchema"` - LinkedDataset *id.DatasetID `json:"linkedDataset"` + LinkedDatasetSchema *DatasetSchemaID `json:"linkedDatasetSchema"` + LinkedDataset *DatasetID `json:"linkedDataset"` } func (i *Initializer) Clone() *Initializer { @@ -73,9 +72,9 @@ func (i *Initializer) Clone() *Initializer { } } - var layerIDs []id.LayerID + var layerIDs []ID if len(i.LayerIDs) > 0 { - layerIDs = append([]id.LayerID{}, i.LayerIDs...) + layerIDs = append([]ID{}, i.LayerIDs...) } return &Initializer{ @@ -94,7 +93,7 @@ func (i *Initializer) Clone() *Initializer { } } -func (i *Initializer) Layer(sid id.SceneID) (r InitializerResult, err error) { +func (i *Initializer) Layer(sid SceneID) (r InitializerResult, err error) { if i == nil { return } @@ -108,7 +107,7 @@ func (i *Initializer) Layer(sid id.SceneID) (r InitializerResult, err error) { lid := i.ID if i.ID == nil { - lid = id.NewLayerID().Ref() + lid = NewID().Ref() } pid := i.PropertyID @@ -166,7 +165,7 @@ func (i *Initializer) Layer(sid id.SceneID) (r InitializerResult, err error) { return } -func (i *Initializer) MustBeLayer(sid id.SceneID) InitializerResult { +func (i *Initializer) MustBeLayer(sid SceneID) InitializerResult { r, err := i.Layer(sid) if err != nil { panic(err) @@ -175,7 +174,7 @@ func (i *Initializer) MustBeLayer(sid id.SceneID) InitializerResult { } type InitializerInfobox struct { - PropertyID *id.PropertyID `json:"propertyId"` + PropertyID *PropertyID `json:"propertyId"` Property *property.Initializer `json:"property"` Fields []*InitializerInfoboxField `json:"fields"` } @@ -200,7 +199,7 @@ func (i *InitializerInfobox) Clone() *InitializerInfobox { } } -func (i *InitializerInfobox) Infobox(scene id.SceneID) (*Infobox, property.Map, error) { +func (i *InitializerInfobox) Infobox(scene SceneID) (*Infobox, property.Map, error) { if i == nil { return nil, nil, nil } @@ -240,10 +239,10 @@ func (i *InitializerInfobox) Infobox(scene id.SceneID) (*Infobox, property.Map, } type InitializerInfoboxField struct { - ID *id.InfoboxFieldID `json:"id"` - Plugin id.PluginID `json:"plugin"` - Extension id.PluginExtensionID `json:"extension"` - PropertyID *id.PropertyID `json:"propertyId"` + ID *InfoboxFieldID `json:"id"` + Plugin PluginID `json:"plugin"` + Extension PluginExtensionID `json:"extension"` + PropertyID *PropertyID `json:"propertyId"` Property *property.Initializer `json:"property"` } @@ -261,19 +260,19 @@ func (i *InitializerInfoboxField) Clone() *InitializerInfoboxField { } } -func (i *InitializerInfoboxField) InfoboxField(scene id.SceneID) (*InfoboxField, *property.Property, error) { +func (i *InitializerInfoboxField) InfoboxField(scene SceneID) (*InfoboxField, *property.Property, error) { if i == nil { return nil, nil, nil } - psid, err := id.PropertySchemaIDFromExtension(i.Plugin, i.Extension) + psid, err := PropertySchemaIDFromExtension(i.Plugin, i.Extension) if err != nil { return nil, nil, err } fid := i.ID if i.ID == nil { - fid = id.NewInfoboxFieldID().Ref() + fid = NewInfoboxFieldID().Ref() } pid := i.PropertyID diff --git a/pkg/layer/initializer_test.go b/pkg/layer/initializer_test.go index 960bf26c7..725ad4f2a 100644 --- a/pkg/layer/initializer_test.go +++ b/pkg/layer/initializer_test.go @@ -3,7 +3,6 @@ package layer import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" "github.com/stretchr/testify/assert" ) @@ -11,19 +10,19 @@ import ( func TestInitializer_Clone(t *testing.T) { isVisible := false i := &Initializer{ - ID: id.NewLayerID().Ref(), - Plugin: id.MustPluginID("reearth").Ref(), - Extension: id.PluginExtensionID("marker").Ref(), + ID: NewID().Ref(), + Plugin: MustPluginID("reearth").Ref(), + Extension: PluginExtensionID("marker").Ref(), Name: "hoge", Infobox: &InitializerInfobox{}, - PropertyID: id.NewPropertyID().Ref(), + PropertyID: NewPropertyID().Ref(), Property: &property.Initializer{ - ID: id.NewPropertyID().Ref(), + ID: NewPropertyID().Ref(), }, Layers: []*Initializer{{}}, IsVisible: &isVisible, - LinkedDatasetSchema: id.NewDatasetSchemaID().Ref(), - LinkedDataset: id.NewDatasetID().Ref(), + LinkedDatasetSchema: NewDatasetSchemaID().Ref(), + LinkedDataset: NewDatasetID().Ref(), } actual := i.Clone() @@ -44,24 +43,24 @@ func TestInitializer_Clone(t *testing.T) { } func TestInitializer_Layer(t *testing.T) { - sid := id.NewSceneID() + sid := NewSceneID() isVisible := false i := &Initializer{ - ID: id.NewLayerID().Ref(), - Plugin: id.MustPluginID("reearth").Ref(), - Extension: id.PluginExtensionID("marker").Ref(), + ID: NewID().Ref(), + Plugin: MustPluginID("reearth").Ref(), + Extension: PluginExtensionID("marker").Ref(), Name: "hoge", Infobox: &InitializerInfobox{ - PropertyID: id.NewPropertyID().Ref(), + PropertyID: NewPropertyID().Ref(), }, - PropertyID: id.NewPropertyID().Ref(), + PropertyID: NewPropertyID().Ref(), IsVisible: &isVisible, - LinkedDatasetSchema: id.NewDatasetSchemaID().Ref(), - LinkedDataset: id.NewDatasetID().Ref(), + LinkedDatasetSchema: NewDatasetSchemaID().Ref(), + LinkedDataset: NewDatasetID().Ref(), Layers: []*Initializer{{ - ID: id.NewLayerID().Ref(), + ID: NewID().Ref(), Layers: []*Initializer{{ - ID: id.NewLayerID().Ref(), + ID: NewID().Ref(), }}, }}, } @@ -76,10 +75,10 @@ func TestInitializer_Layer(t *testing.T) { Infobox(NewInfobox(nil, *i.Infobox.PropertyID)). Property(i.PropertyID). Group(). - Layers(NewIDList([]id.LayerID{*i.Layers[0].ID})). + Layers(NewIDList([]ID{*i.Layers[0].ID})). LinkedDatasetSchema(i.LinkedDatasetSchema). MustBuild() - expected2 := New().ID(*i.Layers[0].ID).Scene(sid).Group().Layers(NewIDList([]id.LayerID{*i.Layers[0].Layers[0].ID})).MustBuild() + expected2 := New().ID(*i.Layers[0].ID).Scene(sid).Group().Layers(NewIDList([]ID{*i.Layers[0].Layers[0].ID})).MustBuild() expected3 := New().ID(*i.Layers[0].Layers[0].ID).Scene(sid).Item().MustBuild() actual, err := i.Layer(sid) @@ -99,15 +98,15 @@ func TestInitializer_Layer(t *testing.T) { func TestInitializerInfobox_Clone(t *testing.T) { i := &InitializerInfobox{ - PropertyID: id.NewPropertyID().Ref(), + PropertyID: NewPropertyID().Ref(), Property: &property.Initializer{ - ID: id.NewPropertyID().Ref(), + ID: NewPropertyID().Ref(), }, Fields: []*InitializerInfoboxField{{ - ID: id.NewInfoboxFieldID().Ref(), - Plugin: id.MustPluginID("reearth"), - Extension: id.PluginExtensionID("marker"), - PropertyID: id.NewPropertyID().Ref(), + ID: NewInfoboxFieldID().Ref(), + Plugin: MustPluginID("reearth"), + Extension: PluginExtensionID("marker"), + PropertyID: NewPropertyID().Ref(), }}, } @@ -121,14 +120,14 @@ func TestInitializerInfobox_Clone(t *testing.T) { } func TestInitializerInfobox_Infobox(t *testing.T) { - sid := id.NewSceneID() + sid := NewSceneID() i := &InitializerInfobox{ - PropertyID: id.NewPropertyID().Ref(), + PropertyID: NewPropertyID().Ref(), Fields: []*InitializerInfoboxField{{ - ID: id.NewInfoboxFieldID().Ref(), - Plugin: id.MustPluginID("reearth"), - Extension: id.PluginExtensionID("marker"), - PropertyID: id.NewPropertyID().Ref(), + ID: NewInfoboxFieldID().Ref(), + Plugin: MustPluginID("reearth"), + Extension: PluginExtensionID("marker"), + PropertyID: NewPropertyID().Ref(), }}, } @@ -148,12 +147,12 @@ func TestInitializerInfobox_Infobox(t *testing.T) { func TestInitializerInfoboxField_Clone(t *testing.T) { i := &InitializerInfoboxField{ - ID: id.NewInfoboxFieldID().Ref(), - Plugin: id.MustPluginID("reearth"), - Extension: id.PluginExtensionID("marker"), - PropertyID: id.NewPropertyID().Ref(), + ID: NewInfoboxFieldID().Ref(), + Plugin: MustPluginID("reearth"), + Extension: PluginExtensionID("marker"), + PropertyID: NewPropertyID().Ref(), Property: &property.Initializer{ - ID: id.NewPropertyID().Ref(), + ID: NewPropertyID().Ref(), }, } @@ -166,12 +165,12 @@ func TestInitializerInfoboxField_Clone(t *testing.T) { } func TestInitializerInfoboxField_InfoboxField(t *testing.T) { - sid := id.NewSceneID() + sid := NewSceneID() i := &InitializerInfoboxField{ - ID: id.NewInfoboxFieldID().Ref(), - Plugin: id.MustPluginID("reearth"), - Extension: id.PluginExtensionID("marker"), - PropertyID: id.NewPropertyID().Ref(), + ID: NewInfoboxFieldID().Ref(), + Plugin: MustPluginID("reearth"), + Extension: PluginExtensionID("marker"), + PropertyID: NewPropertyID().Ref(), } expected := NewInfoboxField().ID(*i.ID).Plugin(i.Plugin).Extension(i.Extension).Property(*i.PropertyID).MustBuild() diff --git a/pkg/layer/item.go b/pkg/layer/item.go index abf81999c..e134863a5 100644 --- a/pkg/layer/item.go +++ b/pkg/layer/item.go @@ -1,20 +1,19 @@ package layer import ( - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" ) type Item struct { layerBase - linkedDataset *id.DatasetID + linkedDataset *DatasetID } -func (l *Item) ID() id.LayerID { +func (l *Item) ID() ID { return l.layerBase.ID() } -func (l *Item) IDRef() *id.LayerID { +func (l *Item) IDRef() *ID { if l == nil { return nil } @@ -35,14 +34,14 @@ func (l *Item) IsVisible() bool { return l.layerBase.IsVisible() } -func (l *Item) Plugin() *id.PluginID { +func (l *Item) Plugin() *PluginID { if l == nil { return nil } return l.layerBase.Plugin() } -func (l *Item) Extension() *id.PluginExtensionID { +func (l *Item) Extension() *PluginExtensionID { if l == nil { return nil } @@ -56,7 +55,7 @@ func (l *Item) UsesPlugin() bool { return l.layerBase.UsesPlugin() } -func (l *Item) Property() *id.PropertyID { +func (l *Item) Property() *PropertyID { if l == nil { return nil } @@ -91,7 +90,7 @@ func (l *Item) SetInfobox(infobox *Infobox) { l.layerBase.SetInfobox(infobox) } -func (l *Item) SetPlugin(plugin *id.PluginID) { +func (l *Item) SetPlugin(plugin *PluginID) { if l == nil { return } @@ -105,7 +104,7 @@ func (l *Item) IsLinked() bool { return l.linkedDataset != nil } -func (l *Item) LinkedDataset() *id.DatasetID { +func (l *Item) LinkedDataset() *DatasetID { if l == nil || l.linkedDataset == nil { return nil } @@ -113,7 +112,7 @@ func (l *Item) LinkedDataset() *id.DatasetID { return &id } -func (l *Item) Link(ds id.DatasetID) { +func (l *Item) Link(ds DatasetID) { if l == nil { return } @@ -136,7 +135,7 @@ func (l *Item) LayerRef() *Layer { return &layer } -func (l *Item) Properties() []id.PropertyID { +func (l *Item) Properties() []PropertyID { if l == nil { return nil } diff --git a/pkg/layer/item_builder.go b/pkg/layer/item_builder.go index c358af080..5375ee317 100644 --- a/pkg/layer/item_builder.go +++ b/pkg/layer/item_builder.go @@ -1,9 +1,5 @@ package layer -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - func ItemFromLayer(l Layer) *Item { li, ok := l.(*Item) if !ok { @@ -32,8 +28,8 @@ func NewItem() *ItemBuilder { } func (b *ItemBuilder) Build() (*Item, error) { - if id.ID(b.l.id).IsNil() { - return nil, id.ErrInvalidID + if b.l.id.IsNil() { + return nil, ErrInvalidID } return b.l, nil } @@ -51,17 +47,17 @@ func (b *ItemBuilder) base(layer layerBase) *ItemBuilder { return b } -func (b *ItemBuilder) ID(id id.LayerID) *ItemBuilder { +func (b *ItemBuilder) ID(id ID) *ItemBuilder { b.l.id = id return b } func (b *ItemBuilder) NewID() *ItemBuilder { - b.l.id = id.NewLayerID() + b.l.id = NewID() return b } -func (b *ItemBuilder) Scene(s id.SceneID) *ItemBuilder { +func (b *ItemBuilder) Scene(s SceneID) *ItemBuilder { b.l.scene = s return b } @@ -76,17 +72,17 @@ func (b *ItemBuilder) IsVisible(visible bool) *ItemBuilder { return b } -func (b *ItemBuilder) Plugin(plugin *id.PluginID) *ItemBuilder { +func (b *ItemBuilder) Plugin(plugin *PluginID) *ItemBuilder { b.l.plugin = plugin.CopyRef() return b } -func (b *ItemBuilder) Extension(extension *id.PluginExtensionID) *ItemBuilder { +func (b *ItemBuilder) Extension(extension *PluginExtensionID) *ItemBuilder { b.l.extension = extension.CopyRef() return b } -func (b *ItemBuilder) Property(p *id.PropertyID) *ItemBuilder { +func (b *ItemBuilder) Property(p *PropertyID) *ItemBuilder { b.l.property = p.CopyRef() return b } @@ -96,7 +92,7 @@ func (b *ItemBuilder) Infobox(infobox *Infobox) *ItemBuilder { return b } -func (b *ItemBuilder) LinkedDataset(linkedDataset *id.DatasetID) *ItemBuilder { +func (b *ItemBuilder) LinkedDataset(linkedDataset *DatasetID) *ItemBuilder { b.l.linkedDataset = linkedDataset.CopyRef() return b } diff --git a/pkg/layer/layer.go b/pkg/layer/layer.go index 86461c251..1119e7cf7 100644 --- a/pkg/layer/layer.go +++ b/pkg/layer/layer.go @@ -4,7 +4,6 @@ import ( "errors" "fmt" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" ) @@ -14,22 +13,22 @@ var ( ) type Layer interface { - ID() id.LayerID + ID() ID Name() string IsVisible() bool - Plugin() *id.PluginID - Extension() *id.PluginExtensionID + Plugin() *PluginID + Extension() *PluginExtensionID UsesPlugin() bool - Property() *id.PropertyID + Property() *PropertyID HasInfobox() bool Infobox() *Infobox - Scene() id.SceneID + Scene() SceneID Tags() *TagList Rename(string) SetVisible(bool) SetInfobox(*Infobox) - SetPlugin(*id.PluginID) - Properties() []id.PropertyID + SetPlugin(*PluginID) + Properties() []PropertyID ValidateProperties(property.Map) error } @@ -70,22 +69,22 @@ func ToLayerItemRef(l *Layer) *Item { } type layerBase struct { - id id.LayerID + id ID name string visible bool - plugin *id.PluginID - extension *id.PluginExtensionID - property *id.PropertyID + plugin *PluginID + extension *PluginExtensionID + property *PropertyID infobox *Infobox - scene id.SceneID + scene SceneID tags *TagList } -func (l *layerBase) ID() id.LayerID { +func (l *layerBase) ID() ID { return l.id } -func (l *layerBase) IDRef() *id.LayerID { +func (l *layerBase) IDRef() *ID { if l == nil { return nil } @@ -113,21 +112,21 @@ func (l *layerBase) UsesPlugin() bool { return l.plugin != nil && l.extension != nil } -func (l *layerBase) Plugin() *id.PluginID { +func (l *layerBase) Plugin() *PluginID { if l == nil { return nil } return l.plugin.CopyRef() } -func (l *layerBase) Extension() *id.PluginExtensionID { +func (l *layerBase) Extension() *PluginExtensionID { if l == nil { return nil } return l.extension.CopyRef() } -func (l *layerBase) Property() *id.PropertyID { +func (l *layerBase) Property() *PropertyID { if l == nil { return nil } @@ -148,7 +147,7 @@ func (l *layerBase) Infobox() *Infobox { return l.infobox } -func (l *layerBase) Scene() id.SceneID { +func (l *layerBase) Scene() SceneID { return l.scene } @@ -173,18 +172,18 @@ func (l *layerBase) SetInfobox(infobox *Infobox) { l.infobox = infobox } -func (l *layerBase) SetPlugin(plugin *id.PluginID) { +func (l *layerBase) SetPlugin(plugin *PluginID) { if l == nil { return } l.plugin = plugin.CopyRef() } -func (l *layerBase) Properties() []id.PropertyID { +func (l *layerBase) Properties() []PropertyID { if l == nil { return nil } - res := []id.PropertyID{} + res := []PropertyID{} if l.property != nil { res = append(res, *l.property) } @@ -208,7 +207,7 @@ func (l *layerBase) ValidateProperties(pm property.Map) error { return errors.New("layer should have plugin id and extension id") } - psid, err := id.PropertySchemaIDFromExtension(*l.plugin, *l.extension) + psid, err := PropertySchemaIDFromExtension(*l.plugin, *l.extension) if err != nil { return errors.New("layer has invalid plugin id and extension id") } diff --git a/pkg/layer/layerops/initializer.go b/pkg/layer/layerops/initializer.go index f69b3e2b8..c19433802 100644 --- a/pkg/layer/layerops/initializer.go +++ b/pkg/layer/layerops/initializer.go @@ -3,19 +3,18 @@ package layerops import ( "errors" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" ) type LayerItem struct { - SceneID id.SceneID - ParentLayerID id.LayerID + SceneID layer.SceneID + ParentLayerID layer.ID Plugin *plugin.Plugin - ExtensionID *id.PluginExtensionID + ExtensionID *layer.PluginExtensionID Index *int - LinkedDatasetID *id.DatasetID + LinkedDatasetID *layer.DatasetID Name string LinkablePropertySchema *property.Schema LatLng *property.LatLng diff --git a/pkg/layer/layerops/initializer_test.go b/pkg/layer/layerops/initializer_test.go index d016a31ce..b38bd827b 100644 --- a/pkg/layer/layerops/initializer_test.go +++ b/pkg/layer/layerops/initializer_test.go @@ -5,16 +5,16 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/stretchr/testify/assert" ) func TestInitialize(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - ps := id.MustPropertySchemaID("xxx~1.1.1/aa") - eid := id.PluginExtensionID("foo") - eid2 := id.PluginExtensionID("foo2") + lid := layer.NewID() + ps := plugin.MustPropertySchemaID("xxx~1.1.1/aa") + eid := plugin.ExtensionID("foo") + eid2 := plugin.ExtensionID("foo2") e := plugin.NewExtension(). ID("foo"). Description(i18n.StringFrom("foo/des")). @@ -29,17 +29,17 @@ func TestInitialize(t *testing.T) { es := append(make([]*plugin.Extension, 0), e) es = append(es, e2) p := plugin.New(). - ID(id.MustPluginID("xxx~1.1.1")). + ID(layer.MustPluginID("xxx~1.1.1")). Schema(&ps). Extensions(es). MustBuild() - s := id.NewSceneID() + s := layer.NewSceneID() testCases := []struct { name string - sceneID *id.SceneID - parentLayerID *id.LayerID + sceneID *layer.SceneID + parentLayerID *layer.ID plugin *plugin.Plugin - extID *id.PluginExtensionID + extID *layer.PluginExtensionID err error }{ { diff --git a/pkg/layer/layerops/processor.go b/pkg/layer/layerops/processor.go index aa02737af..ce59fbd7c 100644 --- a/pkg/layer/layerops/processor.go +++ b/pkg/layer/layerops/processor.go @@ -3,22 +3,21 @@ package layerops import ( "context" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" ) type Processor struct { - RootLayerID id.LayerID + RootLayerID layer.ID LayerLoader layer.Loader } type UninstallPluginResult struct { ModifiedLayers layer.List RemovedLayers *layer.IDList - RemovedProperties []id.PropertyID + RemovedProperties []layer.PropertyID } -func (p Processor) UninstallPlugin(ctx context.Context, pluginID id.PluginID) (res UninstallPluginResult, err error) { +func (p Processor) UninstallPlugin(ctx context.Context, pluginID layer.PluginID) (res UninstallPluginResult, err error) { err = p.LayerLoader.Walk(ctx, func(l layer.Layer, parents layer.GroupList) error { parent := parents.Last() parentRemoved := parent != nil && res.RemovedLayers.HasLayer(parent.ID()) @@ -42,7 +41,7 @@ func (p Processor) UninstallPlugin(ctx context.Context, pluginID id.PluginID) (r res.RemovedProperties = append(res.RemovedProperties, l.Properties()...) res.ModifiedLayers = res.ModifiedLayers.Remove(l.ID()) return nil - }, []id.LayerID{p.RootLayerID}) + }, []layer.ID{p.RootLayerID}) return } diff --git a/pkg/layer/layerops/processor_test.go b/pkg/layer/layerops/processor_test.go index 819936433..858ee63a5 100644 --- a/pkg/layer/layerops/processor_test.go +++ b/pkg/layer/layerops/processor_test.go @@ -4,23 +4,22 @@ import ( "context" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/stretchr/testify/assert" ) func TestProcessor_UninstallPlugin(t *testing.T) { - sid := id.NewSceneID() - pid := id.MustPluginID("hoge~1.0.0") - pid2 := id.MustPluginID("hoge~1.0.1") - ibf1 := layer.NewInfoboxField().NewID().Plugin(pid).Extension("a").Property(id.NewPropertyID()).MustBuild() - ibf2 := layer.NewInfoboxField().NewID().Plugin(pid2).Extension("a").Property(id.NewPropertyID()).MustBuild() - ib := layer.NewInfobox([]*layer.InfoboxField{ibf1, ibf2}, id.NewPropertyID()) - l1 := layer.NewItem().NewID().Scene(sid).Property(id.NewPropertyID().Ref()).Plugin(&pid).MustBuild() - l2 := layer.NewItem().NewID().Scene(sid).Property(id.NewPropertyID().Ref()).Plugin(&pid2).MustBuild() - l3 := layer.NewItem().NewID().Scene(sid).Property(id.NewPropertyID().Ref()).Plugin(&pid2).Infobox(ib).MustBuild() - l4 := layer.NewGroup().NewID().Scene(sid).Property(id.NewPropertyID().Ref()).Layers(layer.NewIDList([]id.LayerID{l1.ID(), l2.ID()})).MustBuild() - l5 := layer.NewGroup().NewID().Scene(sid).Layers(layer.NewIDList([]id.LayerID{l3.ID(), l4.ID()})).MustBuild() + sid := layer.NewSceneID() + pid := layer.MustPluginID("hoge~1.0.0") + pid2 := layer.MustPluginID("hoge~1.0.1") + ibf1 := layer.NewInfoboxField().NewID().Plugin(pid).Extension("a").Property(layer.NewPropertyID()).MustBuild() + ibf2 := layer.NewInfoboxField().NewID().Plugin(pid2).Extension("a").Property(layer.NewPropertyID()).MustBuild() + ib := layer.NewInfobox([]*layer.InfoboxField{ibf1, ibf2}, layer.NewPropertyID()) + l1 := layer.NewItem().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Plugin(&pid).MustBuild() + l2 := layer.NewItem().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Plugin(&pid2).MustBuild() + l3 := layer.NewItem().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Plugin(&pid2).Infobox(ib).MustBuild() + l4 := layer.NewGroup().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Layers(layer.NewIDList([]layer.ID{l1.ID(), l2.ID()})).MustBuild() + l5 := layer.NewGroup().NewID().Scene(sid).Layers(layer.NewIDList([]layer.ID{l3.ID(), l4.ID()})).MustBuild() res, err := Processor{ LayerLoader: layer.LoaderFrom([]layer.Layer{l1, l2, l3, l4, l5}), @@ -30,10 +29,10 @@ func TestProcessor_UninstallPlugin(t *testing.T) { assert.NoError(t, err) assert.Equal(t, UninstallPluginResult{ ModifiedLayers: layer.List{l3.LayerRef(), l4.LayerRef()}, - RemovedLayers: layer.NewIDList([]id.LayerID{l1.ID()}), - RemovedProperties: []id.PropertyID{ibf1.Property(), *l1.Property()}, + RemovedLayers: layer.NewIDList([]layer.ID{l1.ID()}), + RemovedProperties: []layer.PropertyID{ibf1.Property(), *l1.Property()}, }, res) - assert.Equal(t, layer.NewIDList([]id.LayerID{l2.ID()}), l4.Layers()) + assert.Equal(t, layer.NewIDList([]layer.ID{l2.ID()}), l4.Layers()) assert.Equal(t, []*layer.InfoboxField{ibf2}, ib.Fields()) } diff --git a/pkg/layer/list.go b/pkg/layer/list.go index d6fb1b31f..1ce98849a 100644 --- a/pkg/layer/list.go +++ b/pkg/layer/list.go @@ -1,11 +1,5 @@ package layer -import ( - "sort" - - "github.com/reearth/reearth-backend/pkg/id" -) - type List []*Layer func (ll List) Last() *Layer { @@ -29,7 +23,7 @@ func (ll List) Pick(il *IDList) List { return layers } -func (ll List) Find(lid id.LayerID) *Layer { +func (ll List) Find(lid ID) *Layer { for _, l := range ll { if l == nil { continue @@ -41,7 +35,7 @@ func (ll List) Find(lid id.LayerID) *Layer { return nil } -func (ll List) FindByDataset(ds id.DatasetID) *Item { +func (ll List) FindByDataset(ds DatasetID) *Item { for _, l := range ll { if li := ItemFromLayerRef(l); li != nil { dsid := li.LinkedDataset() @@ -111,7 +105,7 @@ func (ll List) Map() Map { return m } -func (ll List) Remove(lids ...id.LayerID) List { +func (ll List) Remove(lids ...ID) List { if ll == nil { return nil } @@ -139,7 +133,7 @@ func (ll List) Remove(lids ...id.LayerID) List { type ItemList []*Item -func (ll ItemList) FindByDataset(ds id.DatasetID) *Item { +func (ll ItemList) FindByDataset(ds DatasetID) *Item { for _, li := range ll { dsid := li.LinkedDataset() if dsid != nil && *dsid == ds { @@ -183,7 +177,7 @@ func (ll GroupList) Last() *Group { return ll[len(ll)-1] } -type Map map[id.LayerID]*Layer +type Map map[ID]*Layer func MapFrom(l Layer) Map { return List{&l}.Map() @@ -191,7 +185,7 @@ func MapFrom(l Layer) Map { func (m Map) Add(layers ...*Layer) Map { if m == nil { - m = map[id.LayerID]*Layer{} + m = map[ID]*Layer{} } for _, l := range layers { if l == nil { @@ -254,35 +248,33 @@ func (m Map) Pick(il *IDList) List { return layers } -func (m Map) Layer(i id.LayerID) Layer { +func (m Map) Layer(i ID) Layer { if l := m[i]; l != nil { return *l } return nil } -func (m Map) Item(i id.LayerID) *Item { +func (m Map) Item(i ID) *Item { if l := ToLayerItem(m.Layer(i)); l != nil { return l } return nil } -func (m Map) Group(i id.LayerID) *Group { +func (m Map) Group(i ID) *Group { if l := ToLayerGroup(m.Layer(i)); l != nil { return l } return nil } -func (m Map) Keys() []id.LayerID { - keys := make([]id.LayerID, 0, len(m)) +func (m Map) Keys() []ID { + keys := make([]ID, 0, len(m)) for k := range m { keys = append(keys, k) } - sort.SliceStable(keys, func(i, j int) bool { - return id.ID(keys[i]).Compare(id.ID(keys[j])) < 0 - }) + sortIDs(keys) return keys } diff --git a/pkg/layer/list_test.go b/pkg/layer/list_test.go index 398325dc1..067f2455f 100644 --- a/pkg/layer/list_test.go +++ b/pkg/layer/list_test.go @@ -3,12 +3,11 @@ package layer import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestList_Remove(t *testing.T) { - sid := id.NewSceneID() + sid := NewSceneID() l1 := NewItem().NewID().Scene(sid).MustBuild() l2 := NewItem().NewID().Scene(sid).MustBuild() l3 := NewItem().NewID().Scene(sid).MustBuild() diff --git a/pkg/layer/loader.go b/pkg/layer/loader.go index 0cce4aa7b..896c8284d 100644 --- a/pkg/layer/loader.go +++ b/pkg/layer/loader.go @@ -3,17 +3,15 @@ package layer import ( "context" "errors" - - "github.com/reearth/reearth-backend/pkg/id" ) -type Loader func(context.Context, ...id.LayerID) (List, error) -type LoaderByScene func(context.Context, id.SceneID) (List, error) +type Loader func(context.Context, ...ID) (List, error) +type LoaderByScene func(context.Context, SceneID) (List, error) var WalkerSkipChildren = errors.New("LAYER_WALKER_SKIP_CHILDREN") func LoaderFrom(data []Layer) Loader { - return func(ctx context.Context, ids ...id.LayerID) (List, error) { + return func(ctx context.Context, ids ...ID) (List, error) { res := make([]*Layer, 0, len(ids)) for _, i := range ids { found := false @@ -32,8 +30,8 @@ func LoaderFrom(data []Layer) Loader { } } -func LoaderFromMap(data map[id.LayerID]Layer) Loader { - return func(ctx context.Context, ids ...id.LayerID) (List, error) { +func LoaderFromMap(data map[ID]Layer) Loader { + return func(ctx context.Context, ids ...ID) (List, error) { res := make([]*Layer, 0, len(ids)) for _, i := range ids { if d, ok := data[i]; ok { @@ -46,9 +44,9 @@ func LoaderFromMap(data map[id.LayerID]Layer) Loader { } } -func (l Loader) Walk(ctx context.Context, walker func(Layer, GroupList) error, init []id.LayerID) error { - var walk func(ids []id.LayerID, parents GroupList) error - walk = func(ids []id.LayerID, parents GroupList) error { +func (l Loader) Walk(ctx context.Context, walker func(Layer, GroupList) error, init []ID) error { + var walk func(ids []ID, parents GroupList) error + walk = func(ids []ID, parents GroupList) error { loaded, err := l(ctx, ids...) if err != nil { return err diff --git a/pkg/layer/loader_test.go b/pkg/layer/loader_test.go index e51b95596..f5a5ad4d4 100644 --- a/pkg/layer/loader_test.go +++ b/pkg/layer/loader_test.go @@ -5,17 +5,16 @@ import ( "errors" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestLoader_Walk(t *testing.T) { - sid := id.NewSceneID() + sid := NewSceneID() l1 := NewItem().NewID().Scene(sid).MustBuild() l2 := NewItem().NewID().Scene(sid).MustBuild() l3 := NewItem().NewID().Scene(sid).MustBuild() - l4 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]id.LayerID{l1.ID(), l2.ID()})).MustBuild() - l5 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]id.LayerID{l3.ID(), l4.ID()})).MustBuild() + l4 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]ID{l1.ID(), l2.ID()})).MustBuild() + l5 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]ID{l3.ID(), l4.ID()})).MustBuild() w := LoaderFrom([]Layer{l1, l2, l3, l4, l5}) layers := []Layer{} @@ -24,7 +23,7 @@ func TestLoader_Walk(t *testing.T) { layers = append(layers, l) parents = append(parents, p) return nil - }, []id.LayerID{l5.ID()}) + }, []ID{l5.ID()}) assert.NoError(t, err) assert.Equal(t, []Layer{l5, l3, l4, l1, l2}, layers) @@ -32,12 +31,12 @@ func TestLoader_Walk(t *testing.T) { } func TestLoader_Walk2(t *testing.T) { - sid := id.NewSceneID() + sid := NewSceneID() l1 := NewItem().NewID().Scene(sid).MustBuild() l2 := NewItem().NewID().Scene(sid).MustBuild() l3 := NewItem().NewID().Scene(sid).MustBuild() - l4 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]id.LayerID{l1.ID(), l2.ID()})).MustBuild() - l5 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]id.LayerID{l3.ID(), l4.ID()})).MustBuild() + l4 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]ID{l1.ID(), l2.ID()})).MustBuild() + l5 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]ID{l3.ID(), l4.ID()})).MustBuild() w := LoaderFrom([]Layer{l1, l2, l3, l4, l5}) layers := []Layer{} @@ -46,7 +45,7 @@ func TestLoader_Walk2(t *testing.T) { layers = append(layers, l) parents = append(parents, p) return WalkerSkipChildren - }, []id.LayerID{l5.ID()}) + }, []ID{l5.ID()}) assert.NoError(t, err) assert.Equal(t, []Layer{l5}, layers) @@ -54,12 +53,12 @@ func TestLoader_Walk2(t *testing.T) { } func TestLoader_Walk3(t *testing.T) { - sid := id.NewSceneID() + sid := NewSceneID() l1 := NewItem().NewID().Scene(sid).MustBuild() l2 := NewItem().NewID().Scene(sid).MustBuild() l3 := NewItem().NewID().Scene(sid).MustBuild() - l4 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]id.LayerID{l1.ID(), l2.ID()})).MustBuild() - l5 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]id.LayerID{l3.ID(), l4.ID()})).MustBuild() + l4 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]ID{l1.ID(), l2.ID()})).MustBuild() + l5 := NewGroup().NewID().Scene(sid).Layers(NewIDList([]ID{l3.ID(), l4.ID()})).MustBuild() w := LoaderFrom([]Layer{l1, l2, l3, l4, l5}) err := errors.New("Error") @@ -72,7 +71,7 @@ func TestLoader_Walk3(t *testing.T) { return err } return nil - }, []id.LayerID{l5.ID()}) + }, []ID{l5.ID()}) assert.Same(t, err, err2) assert.Equal(t, []Layer{l5, l3, l4}, layers) diff --git a/pkg/layer/merged.go b/pkg/layer/merged.go index 59f900c8b..dfbeb2e8f 100644 --- a/pkg/layer/merged.go +++ b/pkg/layer/merged.go @@ -1,20 +1,19 @@ package layer import ( - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" ) // Merged represents a merged layer from two layers type Merged struct { - Original id.LayerID - Parent *id.LayerID + Original ID + Parent *ID Name string - Scene id.SceneID + Scene SceneID Property *property.MergedMetadata Infobox *MergedInfobox - PluginID *id.PluginID - ExtensionID *id.PluginExtensionID + PluginID *PluginID + ExtensionID *PluginExtensionID } // MergedInfobox represents a merged info box from two layers @@ -25,9 +24,9 @@ type MergedInfobox struct { // MergedInfoboxField represents a field of MergedInfobox type MergedInfoboxField struct { - ID id.InfoboxFieldID - Plugin id.PluginID - Extension id.PluginExtensionID + ID InfoboxFieldID + Plugin PluginID + Extension PluginExtensionID Property *property.MergedMetadata } @@ -54,7 +53,7 @@ func Merge(o Layer, p *Group) *Merged { } // MergeInfobox merges two infoboxes -func MergeInfobox(o *Infobox, p *Infobox, linked *id.DatasetID) *MergedInfobox { +func MergeInfobox(o *Infobox, p *Infobox, linked *DatasetID) *MergedInfobox { if o == nil && p == nil { return nil } @@ -92,12 +91,12 @@ func MergeInfobox(o *Infobox, p *Infobox, linked *id.DatasetID) *MergedInfobox { } // Properties returns all property IDs in Merged -func (m *Merged) Properties() []id.PropertyID { +func (m *Merged) Properties() []PropertyID { if m == nil { return nil } - added := map[id.PropertyID]struct{}{} - result := []id.PropertyID{} + added := map[PropertyID]struct{}{} + result := []PropertyID{} if m.Property != nil { if m.Property.Original != nil { t := *m.Property.Original diff --git a/pkg/layer/merged_test.go b/pkg/layer/merged_test.go index d98b9c3ae..eadd791c0 100644 --- a/pkg/layer/merged_test.go +++ b/pkg/layer/merged_test.go @@ -3,24 +3,23 @@ package layer import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" "github.com/stretchr/testify/assert" ) func TestMerge(t *testing.T) { - scene := id.NewSceneID() - dataset1 := id.NewDatasetID() - p := id.MustPluginID("xxx~1.1.1") - e := id.PluginExtensionID("foo") + scene := NewSceneID() + dataset1 := NewDatasetID() + p := MustPluginID("xxx~1.1.1") + e := PluginExtensionID("foo") - itemProperty := id.NewPropertyID() - groupProperty := id.NewPropertyID() - ib1pr := id.NewPropertyID() - ib2pr := id.NewPropertyID() - f1pr := id.NewPropertyID() - f2pr := id.NewPropertyID() - f3pr := id.NewPropertyID() + itemProperty := NewPropertyID() + groupProperty := NewPropertyID() + ib1pr := NewPropertyID() + ib2pr := NewPropertyID() + f1pr := NewPropertyID() + f2pr := NewPropertyID() + f3pr := NewPropertyID() f1 := NewInfoboxField().NewID().Plugin(p).Extension(e).Property(f1pr).MustBuild() f2 := NewInfoboxField().NewID().Plugin(p).Extension(e).Property(f2pr).MustBuild() @@ -317,13 +316,13 @@ func TestMerge(t *testing.T) { } func TestMergedProperties(t *testing.T) { - itemProperty := id.NewPropertyID() - groupProperty := id.NewPropertyID() - ib1pr := id.NewPropertyID() - ib2pr := id.NewPropertyID() - f1pr := id.NewPropertyID() - f2pr := id.NewPropertyID() - f3pr := id.NewPropertyID() + itemProperty := NewPropertyID() + groupProperty := NewPropertyID() + ib1pr := NewPropertyID() + ib2pr := NewPropertyID() + f1pr := NewPropertyID() + f2pr := NewPropertyID() + f3pr := NewPropertyID() merged := &Merged{ Property: &property.MergedMetadata{ @@ -352,7 +351,7 @@ func TestMergedProperties(t *testing.T) { }, } - assert.Equal(t, []id.PropertyID{ + assert.Equal(t, []PropertyID{ itemProperty, groupProperty, ib1pr, ib2pr, f1pr, f2pr, f3pr, }, merged.Properties()) } diff --git a/pkg/layer/merging/merged.go b/pkg/layer/merging/merged.go index c9ed65dfc..3111b1723 100644 --- a/pkg/layer/merging/merged.go +++ b/pkg/layer/merging/merged.go @@ -1,7 +1,6 @@ package merging import ( - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" ) @@ -13,7 +12,7 @@ var ( type MergedLayer interface { Common() *MergedLayerCommon - AllDatasets() []id.DatasetID + AllDatasets() []layer.DatasetID } type MergedLayerGroup struct { @@ -56,15 +55,15 @@ func (l *MergedLayerItem) Common() *MergedLayerCommon { return &l.MergedLayerCommon } -func (l *MergedLayerCommon) Datasets() []id.DatasetID { +func (l *MergedLayerCommon) Datasets() []layer.DatasetID { return l.datasetIDSet().All() } -func (l *MergedLayerCommon) datasetIDSet() *id.DatasetIDSet { +func (l *MergedLayerCommon) datasetIDSet() *layer.DatasetIDSet { if l == nil { return nil } - res := id.NewDatasetIDSet() + res := layer.NewDatasetIDSet() res.Add(l.Property.Datasets()...) res.Add(l.Infobox.Property.Datasets()...) for _, f := range l.Infobox.Fields { @@ -73,18 +72,18 @@ func (l *MergedLayerCommon) datasetIDSet() *id.DatasetIDSet { return res } -func (l *MergedLayerItem) AllDatasets() []id.DatasetID { +func (l *MergedLayerItem) AllDatasets() []layer.DatasetID { if l == nil { return nil } return l.Datasets() } -func (l *MergedLayerGroup) AllDatasets() []id.DatasetID { +func (l *MergedLayerGroup) AllDatasets() []layer.DatasetID { return l.allDatasetIDSet().All() } -func (l *MergedLayerGroup) allDatasetIDSet() *id.DatasetIDSet { +func (l *MergedLayerGroup) allDatasetIDSet() *layer.DatasetIDSet { if l == nil { return nil } diff --git a/pkg/layer/merging/merger.go b/pkg/layer/merging/merger.go index c29919719..41a9a0bdf 100644 --- a/pkg/layer/merging/merger.go +++ b/pkg/layer/merging/merger.go @@ -3,7 +3,6 @@ package merging import ( "context" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" ) @@ -57,7 +56,7 @@ func (m *Merger) MergeLayer(ctx context.Context, l layer.Layer, parent *layer.Gr return nil, nil } -func (m *Merger) MergeLayerFromID(ctx context.Context, i id.LayerID, parent *layer.Group) (MergedLayer, error) { +func (m *Merger) MergeLayerFromID(ctx context.Context, i layer.ID, parent *layer.Group) (MergedLayer, error) { l, err := m.LayerLoader(ctx, i) if err != nil { return nil, err diff --git a/pkg/layer/merging/merger_test.go b/pkg/layer/merging/merger_test.go index cc9263dc9..52c81b0b7 100644 --- a/pkg/layer/merging/merger_test.go +++ b/pkg/layer/merging/merger_test.go @@ -4,7 +4,6 @@ import ( "context" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" "github.com/stretchr/testify/assert" @@ -12,19 +11,19 @@ import ( func TestMergeLayer(t *testing.T) { // ids - scene := id.NewSceneID() - dataset1 := id.NewDatasetID() - ps := id.MustPropertySchemaID("xxx~1.1.1/aa") - p := id.MustPluginID("xxx~1.1.1") - e := id.PluginExtensionID("foo") - itemProperty := id.NewPropertyID() - groupProperty := id.NewPropertyID() - ib1pr := id.NewPropertyID() - ib2pr := id.NewPropertyID() - fpr := id.NewPropertyID() - l1 := id.NewLayerID() - l2 := id.NewLayerID() - l1if1 := id.NewInfoboxFieldID() + scene := layer.NewSceneID() + dataset1 := layer.NewDatasetID() + ps := property.MustSchemaID("xxx~1.1.1/aa") + p := layer.MustPluginID("xxx~1.1.1") + e := layer.PluginExtensionID("foo") + itemProperty := property.NewID() + groupProperty := property.NewID() + ib1pr := property.NewID() + ib2pr := property.NewID() + fpr := property.NewID() + l1 := layer.NewID() + l2 := layer.NewID() + l1if1 := layer.NewInfoboxFieldID() // property loader ploader := property.LoaderFrom([]*property.Property{ @@ -51,7 +50,7 @@ func TestMergeLayer(t *testing.T) { Infobox(layer.NewInfobox([]*layer.InfoboxField{ layer.NewInfoboxField().ID(l1if1).Plugin(p).Extension(e).Property(fpr).MustBuild(), }, ib2pr)). - Layers(layer.NewIDList([]id.LayerID{l1})). + Layers(layer.NewIDList([]layer.ID{l1})). MustBuild(), }) diff --git a/pkg/layer/tag.go b/pkg/layer/tag.go index d3446473b..715886f44 100644 --- a/pkg/layer/tag.go +++ b/pkg/layer/tag.go @@ -1,11 +1,5 @@ package layer -import "github.com/reearth/reearth-backend/pkg/id" - -type TagID = id.TagID - -var NewTagID = id.NewTagID - type TagList struct { tags []Tag } diff --git a/pkg/plugin/builder.go b/pkg/plugin/builder.go index 44e45910d..4bdb9c3f0 100644 --- a/pkg/plugin/builder.go +++ b/pkg/plugin/builder.go @@ -2,7 +2,6 @@ package plugin import ( "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" ) type Builder struct { @@ -26,7 +25,7 @@ func (b *Builder) MustBuild() *Plugin { return p } -func (b *Builder) ID(id id.PluginID) *Builder { +func (b *Builder) ID(id ID) *Builder { b.p.id = id return b } @@ -58,8 +57,8 @@ func (b *Builder) Extensions(extensions []*Extension) *Builder { return b } - b.p.extensions = make(map[id.PluginExtensionID]*Extension, len(extensions)) - b.p.extensionOrder = make([]id.PluginExtensionID, 0, len(extensions)) + b.p.extensions = make(map[ExtensionID]*Extension, len(extensions)) + b.p.extensionOrder = make([]ExtensionID, 0, len(extensions)) for _, e := range extensions { b.p.extensions[e.ID()] = e b.p.extensionOrder = append(b.p.extensionOrder, e.ID()) @@ -67,7 +66,7 @@ func (b *Builder) Extensions(extensions []*Extension) *Builder { return b } -func (b *Builder) Schema(schema *id.PropertySchemaID) *Builder { +func (b *Builder) Schema(schema *PropertySchemaID) *Builder { if schema == nil { b.p.schema = nil } else { diff --git a/pkg/plugin/builder_test.go b/pkg/plugin/builder_test.go index e4b5e94b8..a773130fb 100644 --- a/pkg/plugin/builder_test.go +++ b/pkg/plugin/builder_test.go @@ -5,14 +5,13 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestBuilder_ID(t *testing.T) { var b = New() - res := b.ID(id.MustPluginID("aaa~1.1.1")).MustBuild() - assert.Equal(t, id.MustPluginID("aaa~1.1.1"), res.ID()) + res := b.ID(MustID("aaa~1.1.1")).MustBuild() + assert.Equal(t, MustID("aaa~1.1.1"), res.ID()) } func TestBuilder_Name(t *testing.T) { @@ -36,7 +35,7 @@ func TestBuilder_Description(t *testing.T) { func TestBuilder_Schema(t *testing.T) { testCases := []struct { name string - sid, expected *id.PropertySchemaID + sid, expected *PropertySchemaID }{ { name: "nil schema", @@ -45,8 +44,8 @@ func TestBuilder_Schema(t *testing.T) { }, { name: "build schema", - sid: id.MustPropertySchemaID("hoge~0.1.0/fff").Ref(), - expected: id.MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + sid: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + expected: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), }, } for _, tc := range testCases { @@ -78,15 +77,15 @@ func TestBuilder_RepositoryURL(t *testing.T) { func TestBuilder_Build(t *testing.T) { testCases := []struct { name, author, repositoryURL string - id id.PluginID + id ID pname, description i18n.String ext []*Extension - schema *id.PropertySchemaID + schema *PropertySchemaID expected *Plugin err error // skip for now as error is always nil }{ { - id: id.MustPluginID("hoge~0.1.0"), + id: MustID("hoge~0.1.0"), name: "success build new plugin", author: "aaa", repositoryURL: "uuu", @@ -96,19 +95,19 @@ func TestBuilder_Build(t *testing.T) { NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild(), }, - schema: id.MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), expected: &Plugin{ - id: id.MustPluginID("hoge~0.1.0"), + id: MustID("hoge~0.1.0"), name: i18n.StringFrom("nnn"), author: "aaa", description: i18n.StringFrom("ddd"), repositoryURL: "uuu", - extensions: map[id.PluginExtensionID]*Extension{ - id.PluginExtensionID("xxx"): NewExtension().ID("xxx").MustBuild(), - id.PluginExtensionID("yyy"): NewExtension().ID("yyy").MustBuild(), + extensions: map[ExtensionID]*Extension{ + ExtensionID("xxx"): NewExtension().ID("xxx").MustBuild(), + ExtensionID("yyy"): NewExtension().ID("yyy").MustBuild(), }, - extensionOrder: []id.PluginExtensionID{id.PluginExtensionID("xxx"), id.PluginExtensionID("yyy")}, - schema: id.MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + extensionOrder: []ExtensionID{ExtensionID("xxx"), ExtensionID("yyy")}, + schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), }, }, } @@ -137,14 +136,14 @@ func TestBuilder_Build(t *testing.T) { func TestBuilder_MustBuild(t *testing.T) { testCases := []struct { name, author, repositoryURL string - id id.PluginID + id ID pname, description i18n.String ext []*Extension - schema *id.PropertySchemaID + schema *PropertySchemaID expected *Plugin }{ { - id: id.MustPluginID("hoge~0.1.0"), + id: MustID("hoge~0.1.0"), name: "success build new plugin", author: "aaa", repositoryURL: "uuu", @@ -154,19 +153,19 @@ func TestBuilder_MustBuild(t *testing.T) { NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild(), }, - schema: id.MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), expected: &Plugin{ - id: id.MustPluginID("hoge~0.1.0"), + id: MustID("hoge~0.1.0"), name: i18n.StringFrom("nnn"), author: "aaa", description: i18n.StringFrom("ddd"), repositoryURL: "uuu", - extensions: map[id.PluginExtensionID]*Extension{ - id.PluginExtensionID("xxx"): NewExtension().ID("xxx").MustBuild(), - id.PluginExtensionID("yyy"): NewExtension().ID("yyy").MustBuild(), + extensions: map[ExtensionID]*Extension{ + ExtensionID("xxx"): NewExtension().ID("xxx").MustBuild(), + ExtensionID("yyy"): NewExtension().ID("yyy").MustBuild(), }, - extensionOrder: []id.PluginExtensionID{id.PluginExtensionID("xxx"), id.PluginExtensionID("yyy")}, - schema: id.MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + extensionOrder: []ExtensionID{ExtensionID("xxx"), ExtensionID("yyy")}, + schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), }, }, } diff --git a/pkg/plugin/extension.go b/pkg/plugin/extension.go index 4c2795000..4578ad30f 100644 --- a/pkg/plugin/extension.go +++ b/pkg/plugin/extension.go @@ -4,7 +4,6 @@ import ( "errors" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/visualizer" ) @@ -21,18 +20,18 @@ var ( ) type Extension struct { - id id.PluginExtensionID + id ExtensionID extensionType ExtensionType name i18n.String description i18n.String icon string - schema id.PropertySchemaID + schema PropertySchemaID visualizer visualizer.Visualizer singleOnly bool widgetLayout *WidgetLayout } -func (w *Extension) ID() id.PluginExtensionID { +func (w *Extension) ID() ExtensionID { return w.id } @@ -52,7 +51,7 @@ func (w *Extension) Icon() string { return w.icon } -func (w *Extension) Schema() id.PropertySchemaID { +func (w *Extension) Schema() PropertySchemaID { return w.schema } diff --git a/pkg/plugin/extension_builder.go b/pkg/plugin/extension_builder.go index 65f3e7d23..bd8781ff7 100644 --- a/pkg/plugin/extension_builder.go +++ b/pkg/plugin/extension_builder.go @@ -4,7 +4,6 @@ import ( "errors" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/visualizer" ) @@ -19,7 +18,7 @@ func NewExtension() *ExtensionBuilder { func (b *ExtensionBuilder) Build() (*Extension, error) { if string(b.p.id) == "" { - return nil, id.ErrInvalidID + return nil, ErrInvalidID } if !b.s { if b.p.extensionType == ExtensionTypeVisualizer || b.p.extensionType == ExtensionTypeInfobox { @@ -37,7 +36,7 @@ func (b *ExtensionBuilder) MustBuild() *Extension { return p } -func (b *ExtensionBuilder) ID(id id.PluginExtensionID) *ExtensionBuilder { +func (b *ExtensionBuilder) ID(id ExtensionID) *ExtensionBuilder { b.p.id = id return b } @@ -62,7 +61,7 @@ func (b *ExtensionBuilder) Icon(icon string) *ExtensionBuilder { return b } -func (b *ExtensionBuilder) Schema(schema id.PropertySchemaID) *ExtensionBuilder { +func (b *ExtensionBuilder) Schema(schema PropertySchemaID) *ExtensionBuilder { b.p.schema = schema return b } diff --git a/pkg/plugin/extension_builder_test.go b/pkg/plugin/extension_builder_test.go index 8da82fe20..d281ac021 100644 --- a/pkg/plugin/extension_builder_test.go +++ b/pkg/plugin/extension_builder_test.go @@ -4,7 +4,6 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/visualizer" "github.com/stretchr/testify/assert" ) @@ -24,7 +23,7 @@ func TestExtensionBuilder_Description(t *testing.T) { func TestExtensionBuilder_ID(t *testing.T) { var b = NewExtension() res := b.ID("xxx").MustBuild() - assert.Equal(t, id.PluginExtensionID("xxx"), res.ID()) + assert.Equal(t, ExtensionID("xxx"), res.ID()) } func TestExtensionBuilder_Type(t *testing.T) { @@ -47,8 +46,8 @@ func TestExtensionBuilder_SingleOnly(t *testing.T) { func TestExtensionBuilder_Schema(t *testing.T) { var b = NewExtension() - res := b.ID("xxx").Schema(id.MustPropertySchemaID("hoge~0.1.0/fff")).MustBuild() - assert.Equal(t, id.MustPropertySchemaID("hoge~0.1.0/fff"), res.Schema()) + res := b.ID("xxx").Schema(MustPropertySchemaID("hoge~0.1.0/fff")).MustBuild() + assert.Equal(t, MustPropertySchemaID("hoge~0.1.0/fff"), res.Schema()) } func TestExtensionBuilder_Visualizer(t *testing.T) { @@ -69,12 +68,12 @@ func TestExtensionBuilder_WidgetLayout(t *testing.T) { func TestExtensionBuilder_Build(t *testing.T) { testCases := []struct { name, icon string - id id.PluginExtensionID + id ExtensionID extensionType ExtensionType system bool ename i18n.String description i18n.String - schema id.PropertySchemaID + schema PropertySchemaID visualizer visualizer.Visualizer widgetLayout *WidgetLayout expected *Extension @@ -88,7 +87,7 @@ func TestExtensionBuilder_Build(t *testing.T) { system: false, ename: i18n.StringFrom("nnn"), description: i18n.StringFrom("ddd"), - schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), + schema: MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", widgetLayout: NewWidgetLayout( false, false, true, false, &WidgetLocation{ @@ -103,7 +102,7 @@ func TestExtensionBuilder_Build(t *testing.T) { name: i18n.StringFrom("nnn"), description: i18n.StringFrom("ddd"), icon: "ttt", - schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), + schema: MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", widgetLayout: NewWidgetLayout( false, false, true, false, &WidgetLocation{ @@ -118,16 +117,16 @@ func TestExtensionBuilder_Build(t *testing.T) { { name: "fail not system type visualizer", extensionType: ExtensionTypeVisualizer, - err: id.ErrInvalidID, + err: ErrInvalidID, }, { name: "fail not system type infobox", extensionType: ExtensionTypeInfobox, - err: id.ErrInvalidID, + err: ErrInvalidID, }, { name: "fail nil id", - err: id.ErrInvalidID, + err: ErrInvalidID, }, } @@ -158,12 +157,12 @@ func TestExtensionBuilder_Build(t *testing.T) { func TestExtensionBuilder_MustBuild(t *testing.T) { testCases := []struct { name, icon string - id id.PluginExtensionID + id ExtensionID extensionType ExtensionType system bool ename i18n.String description i18n.String - schema id.PropertySchemaID + schema PropertySchemaID visualizer visualizer.Visualizer widgetLayout *WidgetLayout singleOnly bool @@ -177,7 +176,7 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { system: false, ename: i18n.StringFrom("nnn"), description: i18n.StringFrom("ddd"), - schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), + schema: MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", singleOnly: true, widgetLayout: NewWidgetLayout( @@ -192,7 +191,7 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { name: i18n.StringFrom("nnn"), description: i18n.StringFrom("ddd"), icon: "ttt", - schema: id.MustPropertySchemaID("foo~1.1.1/hhh"), + schema: MustPropertySchemaID("foo~1.1.1/hhh"), visualizer: "vvv", singleOnly: true, widgetLayout: NewWidgetLayout( diff --git a/pkg/plugin/extension_test.go b/pkg/plugin/extension_test.go index ff34bd8a4..1c446a4bd 100644 --- a/pkg/plugin/extension_test.go +++ b/pkg/plugin/extension_test.go @@ -4,19 +4,18 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/visualizer" "github.com/stretchr/testify/assert" ) func TestExtension(t *testing.T) { expected := struct { - ID id.PluginExtensionID + ID ExtensionID Type ExtensionType Name i18n.String Description i18n.String Icon string - Schema id.PropertySchemaID + Schema PropertySchemaID Visualizer visualizer.Visualizer SingleOnly bool WidgetLayout *WidgetLayout @@ -26,7 +25,7 @@ func TestExtension(t *testing.T) { Name: i18n.StringFrom("aaa"), Description: i18n.StringFrom("ddd"), Icon: "test", - Schema: id.MustPropertySchemaID("hoge~0.1.0/fff"), + Schema: MustPropertySchemaID("hoge~0.1.0/fff"), Visualizer: "vvv", SingleOnly: true, WidgetLayout: NewWidgetLayout(false, false, true, false, nil).Ref(), @@ -36,7 +35,7 @@ func TestExtension(t *testing.T) { ID("xxx"). Name(i18n.StringFrom("aaa")). Description(i18n.StringFrom("ddd")). - Schema(id.MustPropertySchemaID("hoge~0.1.0/fff")). + Schema(MustPropertySchemaID("hoge~0.1.0/fff")). Icon("test"). WidgetLayout(NewWidgetLayout(false, false, true, false, nil).Ref()). Visualizer("vvv"). diff --git a/pkg/plugin/id.go b/pkg/plugin/id.go new file mode 100644 index 000000000..ff0220caf --- /dev/null +++ b/pkg/plugin/id.go @@ -0,0 +1,31 @@ +package plugin + +import "github.com/reearth/reearth-backend/pkg/id" + +type ID = id.PluginID +type ExtensionID = id.PluginExtensionID +type PropertySchemaID = id.PropertySchemaID +type SceneID = id.SceneID + +var NewID = id.NewPluginID +var NewSceneID = id.NewSceneID + +var MustID = id.MustPluginID +var MustSceneID = id.MustSceneID +var MustPropertySchemaID = id.MustPropertySchemaID + +var IDFrom = id.PluginIDFrom +var SceneIDFrom = id.SceneIDFrom +var PropertySchemaIDFrom = id.PropertySchemaIDFrom + +var IDFromRef = id.PluginIDFromRef +var ExtensionIDFromRef = id.PluginExtensionIDFromRef +var SceneIDFromRef = id.SceneIDFromRef +var PropertySchemaIDFromRef = id.PropertySchemaIDFromRef + +var SceneIDFromRefID = id.SceneIDFromRefID + +var PropertySchemaIDFromExtension = id.PropertySchemaIDFromExtension + +var OfficialPluginID = id.OfficialPluginID +var ErrInvalidID = id.ErrInvalidID diff --git a/pkg/plugin/loader.go b/pkg/plugin/loader.go index add41da5b..628b889ba 100644 --- a/pkg/plugin/loader.go +++ b/pkg/plugin/loader.go @@ -2,8 +2,6 @@ package plugin import ( "context" - - "github.com/reearth/reearth-backend/pkg/id" ) -type Loader func(context.Context, []id.PluginID, []id.SceneID) ([]*Plugin, error) +type Loader func(context.Context, []ID, []SceneID) ([]*Plugin, error) diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go index 4e9c54211..de85ae1e8 100644 --- a/pkg/plugin/manifest/convert.go +++ b/pkg/plugin/manifest/convert.go @@ -5,7 +5,6 @@ import ( "fmt" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/rerror" @@ -14,13 +13,13 @@ import ( var errInvalidManifestWith = rerror.With(ErrInvalidManifest) -func (i *Root) manifest(sid *id.SceneID) (*Manifest, error) { - var pid id.PluginID +func (i *Root) manifest(sid *plugin.SceneID) (*Manifest, error) { + var pid plugin.ID var err error - if i.System && string(i.ID) == id.OfficialPluginID.Name() { - pid = id.OfficialPluginID + if i.System && string(i.ID) == plugin.OfficialPluginID.Name() { + pid = plugin.OfficialPluginID } else { - pid, err = id.NewPluginID(string(i.ID), i.Version, sid) + pid, err = plugin.NewID(string(i.ID), i.Version, sid) if err != nil { return nil, errInvalidManifestWith(fmt.Errorf("invalid plugin id: %s %s %s", i.ID, i.Version, sid)) } @@ -82,7 +81,7 @@ func (i *Root) manifest(sid *id.SceneID) (*Manifest, error) { }, nil } -func (i Extension) extension(pluginID id.PluginID, sys bool) (*plugin.Extension, *property.Schema, error) { +func (i Extension) extension(pluginID plugin.ID, sys bool) (*plugin.Extension, *property.Schema, error) { eid := string(i.ID) schema, err := i.Schema.schema(pluginID, eid) if err != nil { @@ -136,7 +135,7 @@ func (i Extension) extension(pluginID id.PluginID, sys bool) (*plugin.Extension, } ext, err := plugin.NewExtension(). - ID(id.PluginExtensionID(eid)). + ID(plugin.ExtensionID(eid)). Name(i18n.StringFrom(i.Name)). Description(i18n.StringFrom(desc)). Visualizer(viz). @@ -185,8 +184,8 @@ func (l *WidgetLayout) layout() *plugin.WidgetLayout { return plugin.NewWidgetLayout(horizontallyExtendable, verticallyExtendable, extended, l.Floating, dl).Ref() } -func (i *PropertySchema) schema(pluginID id.PluginID, idstr string) (*property.Schema, error) { - psid, err := id.PropertySchemaIDFrom(pluginID.String() + "/" + idstr) +func (i *PropertySchema) schema(pluginID plugin.ID, idstr string) (*property.Schema, error) { + psid, err := property.SchemaIDFrom(pluginID.String() + "/" + idstr) if err != nil { return nil, fmt.Errorf("invalid id: %s", pluginID.String()+"/"+idstr) } @@ -235,17 +234,17 @@ func (p *PropertyPointer) pointer() *property.Pointer { return nil } return property.NewPointer( - id.PropertySchemaGroupIDFrom(&p.SchemaGroupID), + property.SchemaGroupIDFrom(&p.SchemaGroupID), nil, - id.PropertySchemaFieldIDFrom(&p.FieldID), + property.FieldIDFrom(&p.FieldID), ) } -func (i PropertySchemaGroup) schemaGroup(sid id.PropertySchemaID) (*property.SchemaGroup, error) { +func (i PropertySchemaGroup) schemaGroup(sid property.SchemaID) (*property.SchemaGroup, error) { title := i.Title - var representativeField *id.PropertySchemaFieldID + var representativeField *property.FieldID if i.RepresentativeField != nil { - representativeField = id.PropertySchemaFieldID(*i.RepresentativeField).Ref() + representativeField = property.FieldID(*i.RepresentativeField).Ref() } // fields @@ -259,7 +258,7 @@ func (i PropertySchemaGroup) schemaGroup(sid id.PropertySchemaID) (*property.Sch } return property.NewSchemaGroup(). - ID(id.PropertySchemaGroupID(i.ID)). + ID(property.SchemaGroupID(i.ID)). Schema(sid). IsList(i.List). Fields(fields). @@ -274,7 +273,7 @@ func (o *PropertyCondition) condition() *property.Condition { return nil } return &property.Condition{ - Field: id.PropertySchemaFieldID(o.Field), + Field: property.FieldID(o.Field), Value: toValue(o.Value, o.Type), } } @@ -311,7 +310,7 @@ func (i PropertySchemaField) schemaField() (*property.SchemaField, error) { } f, err := property.NewSchemaField(). - ID(id.PropertySchemaFieldID(i.ID)). + ID(property.FieldID(i.ID)). Name(i18n.StringFrom(title)). Description(i18n.StringFrom(desc)). Type(t). diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index 73d045400..b036b3227 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -4,7 +4,6 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/rerror" @@ -80,7 +79,7 @@ func TestManifest(t *testing.T) { Version: "1.1.1", }, expected: &Manifest{ - Plugin: plugin.New().ID(id.OfficialPluginID).Name(i18n.StringFrom("aaa")).Extensions([]*plugin.Extension{plugin.NewExtension().ID("cesium").Visualizer("cesium").Type("visualizer").System(true).MustBuild()}).MustBuild(), + Plugin: plugin.New().ID(plugin.OfficialPluginID).Name(i18n.StringFrom("aaa")).Extensions([]*plugin.Extension{plugin.NewExtension().ID("cesium").Visualizer("cesium").Type("visualizer").System(true).MustBuild()}).MustBuild(), ExtensionSchema: nil, Schema: nil, }, @@ -93,7 +92,7 @@ func TestManifest(t *testing.T) { System: true, }, expected: &Manifest{ - Plugin: plugin.New().ID(id.OfficialPluginID).Name(i18n.StringFrom("reearth")).MustBuild(), + Plugin: plugin.New().ID(plugin.OfficialPluginID).Name(i18n.StringFrom("reearth")).MustBuild(), ExtensionSchema: nil, Schema: nil, }, @@ -118,7 +117,7 @@ func TestManifest(t *testing.T) { Version: "1.1.1", }, expected: &Manifest{ - Plugin: plugin.New().ID(id.OfficialPluginID).Name(i18n.StringFrom("aaa")).Extensions([]*plugin.Extension{plugin.NewExtension().ID("cesium").Visualizer("cesium").Type("visualizer").System(true).MustBuild()}).MustBuild(), + Plugin: plugin.New().ID(plugin.OfficialPluginID).Name(i18n.StringFrom("aaa")).Extensions([]*plugin.Extension{plugin.NewExtension().ID("cesium").Visualizer("cesium").Type("visualizer").System(true).MustBuild()}).MustBuild(), ExtensionSchema: nil, Schema: nil, }, @@ -132,7 +131,7 @@ func TestManifest(t *testing.T) { System: false, }, expected: &Manifest{ - Plugin: plugin.New().ID(id.OfficialPluginID).Name(i18n.StringFrom("reearth")).MustBuild(), + Plugin: plugin.New().ID(plugin.OfficialPluginID).Name(i18n.StringFrom("reearth")).MustBuild(), }, err: "invalid manifest: invalid plugin id: ", }, @@ -164,7 +163,7 @@ func TestExtension(t *testing.T) { name string ext Extension sys bool - pid id.PluginID + pid plugin.ID expectedPE *plugin.Extension expectedPS *property.Schema err string @@ -181,9 +180,9 @@ func TestExtension(t *testing.T) { Visualizer: &cesium, }, sys: true, - pid: id.OfficialPluginID, + pid: plugin.OfficialPluginID, expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeVisualizer).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), - expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), + expectedPS: property.NewSchema().ID(property.MustSchemaID("reearth/cesium")).MustBuild(), }, { name: "primitive", @@ -196,9 +195,9 @@ func TestExtension(t *testing.T) { Visualizer: &cesium, }, sys: true, - pid: id.OfficialPluginID, + pid: plugin.OfficialPluginID, expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypePrimitive).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), - expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), + expectedPS: property.NewSchema().ID(property.MustSchemaID("reearth/cesium")).MustBuild(), }, { name: "widget", @@ -211,9 +210,9 @@ func TestExtension(t *testing.T) { SingleOnly: &tr, }, sys: true, - pid: id.OfficialPluginID, + pid: plugin.OfficialPluginID, expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("").Type(plugin.ExtensionTypeWidget).System(true).Description(i18n.StringFrom("ddd")).SingleOnly(true).MustBuild(), - expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), + expectedPS: property.NewSchema().ID(property.MustSchemaID("reearth/cesium")).MustBuild(), }, { name: "block", @@ -225,9 +224,9 @@ func TestExtension(t *testing.T) { Type: "block", }, sys: true, - pid: id.OfficialPluginID, + pid: plugin.OfficialPluginID, expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("").Type(plugin.ExtensionTypeBlock).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), - expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), + expectedPS: property.NewSchema().ID(property.MustSchemaID("reearth/cesium")).MustBuild(), }, { name: "infobox", @@ -240,9 +239,9 @@ func TestExtension(t *testing.T) { Visualizer: &cesium, }, sys: true, - pid: id.OfficialPluginID, + pid: plugin.OfficialPluginID, expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeInfobox).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), - expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), + expectedPS: property.NewSchema().ID(property.MustSchemaID("reearth/cesium")).MustBuild(), }, { name: "cluster", @@ -255,9 +254,9 @@ func TestExtension(t *testing.T) { Visualizer: &cesium, }, sys: true, - pid: id.OfficialPluginID, + pid: plugin.OfficialPluginID, expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeCluster).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), - expectedPS: property.NewSchema().ID(id.MustPropertySchemaID("reearth/cesium")).MustBuild(), + expectedPS: property.NewSchema().ID(property.MustSchemaID("reearth/cesium")).MustBuild(), }, { name: "empty visualizer", @@ -270,7 +269,7 @@ func TestExtension(t *testing.T) { Visualizer: &es, }, sys: true, - pid: id.OfficialPluginID, + pid: plugin.OfficialPluginID, expectedPE: nil, expectedPS: nil, err: "visualizer missing", @@ -286,7 +285,7 @@ func TestExtension(t *testing.T) { Visualizer: nil, }, sys: true, - pid: id.OfficialPluginID, + pid: plugin.OfficialPluginID, expectedPE: nil, expectedPS: nil, err: "visualizer missing", @@ -302,7 +301,7 @@ func TestExtension(t *testing.T) { Visualizer: &cesium, }, sys: true, - pid: id.OfficialPluginID, + pid: plugin.OfficialPluginID, expectedPE: nil, expectedPS: nil, err: "type missing", @@ -355,7 +354,7 @@ func TestPointer(t *testing.T) { FieldID: "xxx", SchemaGroupID: "aaa", }, - expected: property.NewPointer(id.PropertySchemaGroupIDFrom(&sg), nil, id.PropertySchemaFieldIDFrom(&f)), + expected: property.NewPointer(property.SchemaGroupIDFrom(&sg), nil, property.FieldIDFrom(&f)), }, } for _, tc := range testCases { @@ -427,8 +426,8 @@ func TestLinkable(t *testing.T) { }, }, expected: property.LinkableFields{ - LatLng: property.NewPointer(id.PropertySchemaGroupIDFrom(&d), nil, id.PropertySchemaFieldIDFrom(&l)), - URL: property.NewPointer(id.PropertySchemaGroupIDFrom(&d), nil, id.PropertySchemaFieldIDFrom(&u)), + LatLng: property.NewPointer(property.SchemaGroupIDFrom(&d), nil, property.FieldIDFrom(&l)), + URL: property.NewPointer(property.SchemaGroupIDFrom(&d), nil, property.FieldIDFrom(&u)), }, }, } @@ -446,7 +445,7 @@ func TestSchema(t *testing.T) { testCases := []struct { name, psid string ps *PropertySchema - pid id.PluginID + pid plugin.ID expected *property.Schema err string }{ @@ -458,7 +457,7 @@ func TestSchema(t *testing.T) { Linkable: nil, Version: 0, }, - pid: id.MustPluginID("aaa~1.1.1"), + pid: plugin.MustID("aaa~1.1.1"), expected: nil, err: "invalid id: aaa~1.1.1/@", }, @@ -466,8 +465,8 @@ func TestSchema(t *testing.T) { name: "success nil PropertySchema", psid: "marker", ps: nil, - pid: id.OfficialPluginID, - expected: property.NewSchema().ID(id.MustPropertySchemaID("reearth/marker")).MustBuild(), + pid: plugin.OfficialPluginID, + expected: property.NewSchema().ID(property.MustSchemaID("reearth/marker")).MustBuild(), }, { name: "success ", @@ -497,8 +496,8 @@ func TestSchema(t *testing.T) { Linkable: nil, Version: 0, }, - pid: id.OfficialPluginID, - expected: property.NewSchema().ID(id.MustPropertySchemaID("reearth/marker")).Groups([]*property.SchemaGroup{property.NewSchemaGroup().ID("default").Schema(id.MustPropertySchemaID("reearth/cesium")).Fields([]*property.SchemaField{property.NewSchemaField().ID("location").Type(property.ValueTypeLatLng).MustBuild()}).MustBuild()}).MustBuild(), + pid: plugin.OfficialPluginID, + expected: property.NewSchema().ID(property.MustSchemaID("reearth/marker")).Groups([]*property.SchemaGroup{property.NewSchemaGroup().ID("default").Schema(property.MustSchemaID("reearth/cesium")).Fields([]*property.SchemaField{property.NewSchemaField().ID("location").Type(property.ValueTypeLatLng).MustBuild()}).MustBuild()}).MustBuild(), }, } for _, tc := range testCases { @@ -527,7 +526,7 @@ func TestSchemaGroup(t *testing.T) { testCases := []struct { name string psg PropertySchemaGroup - sid id.PropertySchemaID + sid property.SchemaID expected *property.SchemaGroup err string }{ @@ -554,8 +553,8 @@ func TestSchemaGroup(t *testing.T) { List: false, Title: "marker", }, - sid: id.MustPropertySchemaID("reearth/cesium"), - expected: property.NewSchemaGroup().ID("default").Title(i18n.StringFrom("marker")).Title(i18n.StringFrom(str)).Schema(id.MustPropertySchemaID("reearth/cesium")).Fields([]*property.SchemaField{property.NewSchemaField().ID("location").Type(property.ValueTypeLatLng).MustBuild()}).MustBuild(), + sid: property.MustSchemaID("reearth/cesium"), + expected: property.NewSchemaGroup().ID("default").Title(i18n.StringFrom("marker")).Title(i18n.StringFrom(str)).Schema(property.MustSchemaID("reearth/cesium")).Fields([]*property.SchemaField{property.NewSchemaField().ID("location").Type(property.ValueTypeLatLng).MustBuild()}).MustBuild(), }, { name: "fail invalid schema field", @@ -580,7 +579,7 @@ func TestSchemaGroup(t *testing.T) { List: false, Title: "marker", }, - sid: id.MustPropertySchemaID("reearth/cesium"), + sid: property.MustSchemaID("reearth/cesium"), expected: nil, err: "field (location): invalid value type: xx", }, diff --git a/pkg/plugin/manifest/parser.go b/pkg/plugin/manifest/parser.go index 8f059bb76..e6729cb8b 100644 --- a/pkg/plugin/manifest/parser.go +++ b/pkg/plugin/manifest/parser.go @@ -1,13 +1,13 @@ package manifest -//go:generate go run github.com/idubinskiy/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json +//go:generate go run github.com/idubinskiy/schematyper -o schema_gen.go --package manifest ../../../schemas/plugin_manifest.json import ( "errors" "io" "github.com/goccy/go-yaml" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" ) var ( @@ -16,7 +16,7 @@ var ( ErrSystemManifest = errors.New("cannot build system manifest") ) -func Parse(source io.Reader, scene *id.SceneID) (*Manifest, error) { +func Parse(source io.Reader, scene *plugin.SceneID) (*Manifest, error) { root := Root{} if err := yaml.NewDecoder(source).Decode(&root); err != nil { return nil, ErrFailedToParseManifest @@ -34,7 +34,7 @@ func Parse(source io.Reader, scene *id.SceneID) (*Manifest, error) { return manifest, nil } -func ParseSystemFromBytes(source []byte, scene *id.SceneID) (*Manifest, error) { +func ParseSystemFromBytes(source []byte, scene *plugin.SceneID) (*Manifest, error) { root := Root{} if err := yaml.Unmarshal(source, &root); err != nil { return nil, ErrFailedToParseManifest @@ -49,7 +49,7 @@ func ParseSystemFromBytes(source []byte, scene *id.SceneID) (*Manifest, error) { return manifest, nil } -func MustParseSystemFromBytes(source []byte, scene *id.SceneID) *Manifest { +func MustParseSystemFromBytes(source []byte, scene *plugin.SceneID) *Manifest { m, err := ParseSystemFromBytes(source, scene) if err != nil { panic(err) diff --git a/pkg/plugin/manifest/parser_test.go b/pkg/plugin/manifest/parser_test.go index 08d9661ed..d90501f37 100644 --- a/pkg/plugin/manifest/parser_test.go +++ b/pkg/plugin/manifest/parser_test.go @@ -6,7 +6,6 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/visualizer" @@ -16,35 +15,35 @@ import ( //go:embed testdata/minimum.yml var minimum string var minimumExpected = &Manifest{ - Plugin: plugin.New().ID(id.MustPluginID("aaa~1.1.1")).MustBuild(), + Plugin: plugin.New().ID(plugin.MustID("aaa~1.1.1")).MustBuild(), } //go:embed testdata/test.yml var normal string var normalExpected = &Manifest{ - Plugin: plugin.New().ID(id.MustPluginID("aaa~1.1.1")).Name(i18n.StringFrom("bbb")).Extensions([]*plugin.Extension{ - plugin.NewExtension().ID(id.PluginExtensionID("hoge")). + Plugin: plugin.New().ID(plugin.MustID("aaa~1.1.1")).Name(i18n.StringFrom("bbb")).Extensions([]*plugin.Extension{ + plugin.NewExtension().ID(plugin.ExtensionID("hoge")). Visualizer(visualizer.VisualizerCesium). Type(plugin.ExtensionTypePrimitive). WidgetLayout(nil). - Schema(id.MustPropertySchemaID("aaa~1.1.1/hoge")). + Schema(property.MustSchemaID("aaa~1.1.1/hoge")). MustBuild(), }).MustBuild(), ExtensionSchema: []*property.Schema{ - property.NewSchema().ID(id.MustPropertySchemaID("aaa~1.1.1/hoge")).Groups([]*property.SchemaGroup{ - property.NewSchemaGroup().ID(id.PropertySchemaGroupID("default")). - Schema(id.MustPropertySchemaID("aaa~1.1.1/hoge")). - RepresentativeField(id.PropertySchemaFieldID("a").Ref()). + property.NewSchema().ID(property.MustSchemaID("aaa~1.1.1/hoge")).Groups([]*property.SchemaGroup{ + property.NewSchemaGroup().ID(property.SchemaGroupID("default")). + Schema(property.MustSchemaID("aaa~1.1.1/hoge")). + RepresentativeField(property.FieldID("a").Ref()). Fields([]*property.SchemaField{ - property.NewSchemaField().ID(id.PropertySchemaFieldID("a")). + property.NewSchemaField().ID(property.FieldID("a")). Type(property.ValueTypeBool). DefaultValue(property.ValueTypeBool.ValueFrom(true)). IsAvailableIf(&property.Condition{ - Field: id.PropertySchemaFieldID("b"), + Field: property.FieldID("b"), Value: property.ValueTypeNumber.ValueFrom(1), }). MustBuild(), - property.NewSchemaField().ID(id.PropertySchemaFieldID("b")). + property.NewSchemaField().ID(property.FieldID("b")). Type(property.ValueTypeNumber). MustBuild(), }).MustBuild(), diff --git a/pkg/plugin/manifest/parser_translation.go b/pkg/plugin/manifest/parser_translation.go index 1d35185bd..0816f4e5f 100644 --- a/pkg/plugin/manifest/parser_translation.go +++ b/pkg/plugin/manifest/parser_translation.go @@ -1,14 +1,14 @@ package manifest // Generating types with schema typer for translation schema is disabled because some fields are wrongly typed. -// DISABLED go:generate go run github.com/idubinskiy/schematyper -o schema_translation_gen.go --package manifest --prefix Translation ../../../plugin_manifest_schema_translation.json +// DISABLED go:generate go run github.com/idubinskiy/schematyper -o schema_translation_gen.go --package manifest --prefix Translation ../../../schemas/plugin_manifest_translation.json import ( "errors" "io" "github.com/goccy/go-yaml" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" ) @@ -69,7 +69,7 @@ func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Mani } for key, te := range t.Extensions { - ext := m.Plugin.Extension(id.PluginExtensionID(key)) + ext := m.Plugin.Extension(plugin.ExtensionID(key)) if ext == nil { continue } @@ -104,7 +104,7 @@ func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Mani } for key, tsg := range te.PropertySchema { - psg := ps.Group(id.PropertySchemaGroupID(key)) + psg := ps.Group(property.SchemaGroupID(key)) if psg == nil { continue } @@ -126,7 +126,7 @@ func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Mani // } for key, tsf := range tsg.Fields { - psf := psg.Field(id.PropertySchemaFieldID(key)) + psf := psg.Field(property.FieldID(key)) if psf == nil { continue } diff --git a/pkg/plugin/manifest/parser_translation_test.go b/pkg/plugin/manifest/parser_translation_test.go index 1dd0b156f..c01d13138 100644 --- a/pkg/plugin/manifest/parser_translation_test.go +++ b/pkg/plugin/manifest/parser_translation_test.go @@ -6,7 +6,7 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" "github.com/stretchr/testify/assert" ) @@ -191,7 +191,7 @@ func TestMergeManifestTranslation(t *testing.T) { } assert.Equal(tt, tc.Expected.PluginName, res.Plugin.Name()) assert.Equal(tt, tc.Expected.PluginDesc, res.Plugin.Description()) - assert.Equal(tt, tc.Expected.ExtName, res.Plugin.Extension(id.PluginExtensionID("test_ext")).Name()) + assert.Equal(tt, tc.Expected.ExtName, res.Plugin.Extension(plugin.ExtensionID("test_ext")).Name()) assert.Equal(tt, tc.Expected.PsTitle, res.ExtensionSchema[0].Group("test_ps").Title()) assert.Equal(tt, tc.Expected.FieldTitle, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Title()) assert.Equal(tt, tc.Expected.FieldDesc, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Description()) diff --git a/pkg/plugin/manifest/schema_gen.go b/pkg/plugin/manifest/schema_gen.go index db3c4e3b9..0bb414745 100644 --- a/pkg/plugin/manifest/schema_gen.go +++ b/pkg/plugin/manifest/schema_gen.go @@ -1,6 +1,6 @@ package manifest -// generated by "/var/folders/lz/nhqy382n28g31wb4f_40gbmc0000gp/T/go-build612118365/b001/exe/schematyper -o schema_gen.go --package manifest ../../../plugin_manifest_schema.json" -- DO NOT EDIT +// generated by "/var/folders/lz/nhqy382n28g31wb4f_40gbmc0000gp/T/go-build612118365/b001/exe/schematyper -o schema_gen.go --package manifest ../../../schemas/plugin_manifest.json" -- DO NOT EDIT type Choice struct { Icon string `json:"icon,omitempty"` diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go index 4757f6050..e9cbcab55 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/plugin.go @@ -3,52 +3,43 @@ package plugin import ( "github.com/blang/semver" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" ) -// Plugin _ type Plugin struct { - id id.PluginID + id ID name i18n.String author string description i18n.String repositoryURL string - extensions map[id.PluginExtensionID]*Extension - extensionOrder []id.PluginExtensionID - schema *id.PropertySchemaID + extensions map[ExtensionID]*Extension + extensionOrder []ExtensionID + schema *PropertySchemaID } -// ID _ -func (p *Plugin) ID() id.PluginID { +func (p *Plugin) ID() ID { return p.id } -// Version _ func (p *Plugin) Version() semver.Version { return p.id.Version() } -// Name _ func (p *Plugin) Name() i18n.String { return p.name.Copy() } -// Author _ func (p *Plugin) Author() string { return p.author } -// Description _ func (p *Plugin) Description() i18n.String { return p.description.Copy() } -// RepositoryURL _ func (p *Plugin) RepositoryURL() string { return p.repositoryURL } -// Extensions _ func (p *Plugin) Extensions() []*Extension { if p.extensionOrder == nil { return []*Extension{} @@ -60,7 +51,7 @@ func (p *Plugin) Extensions() []*Extension { return list } -func (p *Plugin) Extension(id id.PluginExtensionID) *Extension { +func (p *Plugin) Extension(id ExtensionID) *Extension { if p == nil { return nil } @@ -72,17 +63,16 @@ func (p *Plugin) Extension(id id.PluginExtensionID) *Extension { return nil } -// Schema _ -func (p *Plugin) Schema() *id.PropertySchemaID { +func (p *Plugin) Schema() *PropertySchemaID { return p.schema } -func (p *Plugin) PropertySchemas() []id.PropertySchemaID { +func (p *Plugin) PropertySchemas() []PropertySchemaID { if p == nil { return nil } - ps := make([]id.PropertySchemaID, 0, len(p.extensions)+1) + ps := make([]PropertySchemaID, 0, len(p.extensions)+1) if p.schema != nil { ps = append(ps, *p.schema) } @@ -96,7 +86,6 @@ func (p *Plugin) Rename(name i18n.String) { p.name = name.Copy() } -// SetDescription _ func (p *Plugin) SetDescription(des i18n.String) { p.description = des.Copy() } diff --git a/pkg/plugin/plugin_test.go b/pkg/plugin/plugin_test.go index f31ae2a8e..6c7ef1993 100644 --- a/pkg/plugin/plugin_test.go +++ b/pkg/plugin/plugin_test.go @@ -4,7 +4,6 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -12,7 +11,7 @@ func TestPlugin_Extension(t *testing.T) { testCases := []struct { name string plugin *Plugin - key id.PluginExtensionID + key ExtensionID expected *Extension }{ { @@ -44,29 +43,29 @@ func TestPlugin_Extension(t *testing.T) { } func TestPlugin_PropertySchemas(t *testing.T) { - ps1 := id.MustPropertySchemaID("hoge~0.1.0/a") - ps2 := id.MustPropertySchemaID("hoge~0.1.0/b") - ps3 := id.MustPropertySchemaID("hoge~0.1.0/c") + ps1 := MustPropertySchemaID("hoge~0.1.0/a") + ps2 := MustPropertySchemaID("hoge~0.1.0/b") + ps3 := MustPropertySchemaID("hoge~0.1.0/c") testCases := []struct { name string plugin *Plugin - expected []id.PropertySchemaID + expected []PropertySchemaID }{ { name: "normal", plugin: New().Schema(&ps1).Extensions([]*Extension{NewExtension().ID("xxx").Schema(ps2).MustBuild(), NewExtension().ID("yyy").Schema(ps3).MustBuild()}).MustBuild(), - expected: []id.PropertySchemaID{ps1, ps2, ps3}, + expected: []PropertySchemaID{ps1, ps2, ps3}, }, { name: "no plugin property schema", plugin: New().Extensions([]*Extension{NewExtension().ID("xxx").Schema(ps2).MustBuild(), NewExtension().ID("yyy").Schema(ps3).MustBuild()}).MustBuild(), - expected: []id.PropertySchemaID{ps2, ps3}, + expected: []PropertySchemaID{ps2, ps3}, }, { name: "nil", plugin: nil, - expected: []id.PropertySchemaID(nil), + expected: []PropertySchemaID(nil), }, } for _, tc := range testCases { @@ -96,5 +95,5 @@ func TestPlugin_Author(t *testing.T) { } func TestPlugin_ID(t *testing.T) { - assert.Equal(t, New().ID(id.MustPluginID("xxx~1.1.1")).MustBuild().ID(), id.MustPluginID("xxx~1.1.1")) + assert.Equal(t, New().ID(MustID("xxx~1.1.1")).MustBuild().ID(), MustID("xxx~1.1.1")) } diff --git a/pkg/plugin/pluginpack/package.go b/pkg/plugin/pluginpack/package.go index 18f66245e..c7e9344c8 100644 --- a/pkg/plugin/pluginpack/package.go +++ b/pkg/plugin/pluginpack/package.go @@ -8,7 +8,7 @@ import ( "path/filepath" "github.com/reearth/reearth-backend/pkg/file" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/plugin/manifest" "github.com/reearth/reearth-backend/pkg/rerror" ) @@ -20,7 +20,7 @@ type Package struct { Files file.Iterator } -func PackageFromZip(r io.Reader, scene *id.SceneID, sizeLimit int64) (*Package, error) { +func PackageFromZip(r io.Reader, scene *plugin.SceneID, sizeLimit int64) (*Package, error) { b, err := io.ReadAll(io.LimitReader(r, sizeLimit)) if err != nil { return nil, rerror.From("zip read error", err) diff --git a/pkg/plugin/pluginpack/package_test.go b/pkg/plugin/pluginpack/package_test.go index 09ff0ebad..ecd2a8073 100644 --- a/pkg/plugin/pluginpack/package_test.go +++ b/pkg/plugin/pluginpack/package_test.go @@ -6,7 +6,6 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/plugin/manifest" "github.com/stretchr/testify/assert" @@ -20,7 +19,7 @@ func TestPackageFromZip(t *testing.T) { }() expected := plugin.New(). - ID(id.MustPluginID("testplugin~1.0.1")). + ID(plugin.MustID("testplugin~1.0.1")). Name(i18n.StringFrom("testplugin")). MustBuild() diff --git a/pkg/project/builder.go b/pkg/project/builder.go index 1891ed2e4..79331c1af 100644 --- a/pkg/project/builder.go +++ b/pkg/project/builder.go @@ -4,7 +4,6 @@ import ( "net/url" "time" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/visualizer" ) @@ -17,8 +16,8 @@ func New() *Builder { } func (b *Builder) Build() (*Project, error) { - if id.ID(b.p.id).IsNil() { - return nil, id.ErrInvalidID + if b.p.id.IsNil() { + return nil, ErrInvalidID } if b.p.alias != "" && !CheckAliasPattern(b.p.alias) { return nil, ErrInvalidAlias @@ -37,13 +36,13 @@ func (b *Builder) MustBuild() *Project { return r } -func (b *Builder) ID(id id.ProjectID) *Builder { +func (b *Builder) ID(id ID) *Builder { b.p.id = id return b } func (b *Builder) NewID() *Builder { - b.p.id = id.ProjectID(id.New()) + b.p.id = NewID() return b } @@ -122,7 +121,7 @@ func (b *Builder) PublicNoIndex(publicNoIndex bool) *Builder { return b } -func (b *Builder) Team(team id.TeamID) *Builder { +func (b *Builder) Team(team TeamID) *Builder { b.p.team = team return b } diff --git a/pkg/project/builder_test.go b/pkg/project/builder_test.go index 09bbdf473..36c29eb3f 100644 --- a/pkg/project/builder_test.go +++ b/pkg/project/builder_test.go @@ -7,7 +7,6 @@ import ( "testing" "time" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/visualizer" "github.com/stretchr/testify/assert" ) @@ -19,7 +18,7 @@ func TestNew(t *testing.T) { func TestBuilder_ID(t *testing.T) { var tb = New() - res := tb.ID(id.NewProjectID()).MustBuild() + res := tb.ID(NewID()).MustBuild() assert.NotNil(t, res.ID()) } @@ -104,7 +103,7 @@ func TestBuilder_Visualizer(t *testing.T) { func TestBuilder_Team(t *testing.T) { var tb = New().NewID() - res := tb.Team(id.NewTeamID()).MustBuild() + res := tb.Team(NewTeamID()).MustBuild() assert.NotNil(t, res.Team()) } @@ -156,19 +155,19 @@ func TestBuilder_PublicNoIndex(t *testing.T) { func TestBuilder_Build(t *testing.T) { d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) i, _ := url.Parse("ttt://xxx.aa/") - pid := id.NewProjectID() - tid := id.NewTeamID() + pid := NewID() + tid := NewTeamID() testCases := []struct { name, pname, description, alias, publicTitle, publicDescription, publicImage string - id id.ProjectID + id ID isArchived bool updatedAt time.Time publishedAt time.Time imageURL *url.URL publicNoIndex bool - team id.TeamID + team TeamID visualizer visualizer.Visualizer publishmentStatus PublishmentStatus expected *Project @@ -212,7 +211,7 @@ func TestBuilder_Build(t *testing.T) { }, { name: "zero updated at", - id: id.NewProjectID(), + id: NewID(), updatedAt: time.Time{}, expected: nil, err: nil, @@ -221,11 +220,11 @@ func TestBuilder_Build(t *testing.T) { name: "failed invalid id", expected: nil, - err: id.ErrInvalidID, + err: ErrInvalidID, }, { name: "failed invalid alias", - id: id.NewProjectID(), + id: NewID(), alias: "xxx.aaa", expected: nil, err: ErrInvalidAlias, @@ -269,19 +268,19 @@ func TestBuilder_Build(t *testing.T) { func TestBuilder_MustBuild(t *testing.T) { d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) i, _ := url.Parse("ttt://xxx.aa/") - pid := id.NewProjectID() - tid := id.NewTeamID() + pid := NewID() + tid := NewTeamID() testCases := []struct { name, pname, description, alias, publicTitle, publicDescription, publicImage string - id id.ProjectID + id ID isArchived bool updatedAt time.Time publishedAt time.Time imageURL *url.URL publicNoIndex bool - team id.TeamID + team TeamID visualizer visualizer.Visualizer publishmentStatus PublishmentStatus expected *Project @@ -325,7 +324,7 @@ func TestBuilder_MustBuild(t *testing.T) { }, { name: "zero updated at", - id: id.NewProjectID(), + id: NewID(), updatedAt: time.Time{}, expected: nil, err: nil, @@ -334,11 +333,11 @@ func TestBuilder_MustBuild(t *testing.T) { name: "failed invalid id", expected: nil, - err: id.ErrInvalidID, + err: ErrInvalidID, }, { name: "failed invalid alias", - id: id.NewProjectID(), + id: NewID(), alias: "xxx.aaa", expected: nil, err: ErrInvalidAlias, diff --git a/pkg/project/id.go b/pkg/project/id.go new file mode 100644 index 000000000..cafbea20f --- /dev/null +++ b/pkg/project/id.go @@ -0,0 +1,31 @@ +package project + +import ( + "time" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type ID = id.ProjectID +type TeamID = id.TeamID + +var NewID = id.NewProjectID +var NewTeamID = id.NewTeamID + +var MustID = id.MustProjectID +var MustTeamID = id.MustTeamID + +var IDFrom = id.ProjectIDFrom +var TeamIDFrom = id.TeamIDFrom + +var IDFromRef = id.ProjectIDFromRef +var TeamIDFromRef = id.TeamIDFromRef + +var IDFromRefID = id.ProjectIDFromRefID +var TeamIDFromRefID = id.TeamIDFromRefID + +var ErrInvalidID = id.ErrInvalidID + +func createdAt(i ID) time.Time { + return id.ID(i).Timestamp() +} diff --git a/pkg/project/project.go b/pkg/project/project.go index c15f9f55a..72cbd8663 100644 --- a/pkg/project/project.go +++ b/pkg/project/project.go @@ -6,7 +6,6 @@ import ( "regexp" "time" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/visualizer" ) @@ -15,9 +14,8 @@ var ( aliasRegexp = regexp.MustCompile("^[a-zA-Z0-9_-]{5,32}$") ) -// Project _ type Project struct { - id id.ProjectID + id ID isArchived bool isBasicAuthActive bool basicAuthUsername string @@ -32,12 +30,12 @@ type Project struct { publicDescription string publicImage string publicNoIndex bool - team id.TeamID + team TeamID visualizer visualizer.Visualizer publishmentStatus PublishmentStatus } -func (p *Project) ID() id.ProjectID { +func (p *Project) ID() ID { return p.id } @@ -105,12 +103,12 @@ func (p *Project) PublishmentStatus() PublishmentStatus { return p.publishmentStatus } -func (p *Project) Team() id.TeamID { +func (p *Project) Team() TeamID { return p.team } func (p *Project) CreatedAt() time.Time { - return id.ID(p.id).Timestamp() + return createdAt(p.id) } func (p *Project) Visualizer() visualizer.Visualizer { @@ -183,7 +181,7 @@ func (p *Project) UpdatePublicNoIndex(publicNoIndex bool) { p.publicNoIndex = publicNoIndex } -func (p *Project) UpdateTeam(team id.TeamID) { +func (p *Project) UpdateTeam(team TeamID) { p.team = team } diff --git a/pkg/project/project_test.go b/pkg/project/project_test.go index 15255f7fa..c471c7089 100644 --- a/pkg/project/project_test.go +++ b/pkg/project/project_test.go @@ -5,7 +5,6 @@ import ( "testing" "time" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/visualizer" "github.com/stretchr/testify/assert" ) @@ -174,7 +173,7 @@ func TestProject_UpdatePublicTitle(t *testing.T) { func TestProject_UpdateTeam(t *testing.T) { p := &Project{} - p.UpdateTeam(id.NewTeamID()) + p.UpdateTeam(NewTeamID()) assert.NotNil(t, p.Team()) } diff --git a/pkg/project/publishment_status.go b/pkg/project/publishment_status.go index e465e2c34..43d33917e 100644 --- a/pkg/project/publishment_status.go +++ b/pkg/project/publishment_status.go @@ -1,13 +1,11 @@ package project -// PublishmentStatus _ type PublishmentStatus string const ( - // PublishmentStatusPublic _ PublishmentStatusPublic PublishmentStatus = "public" - // PublishmentStatusLimited _ + PublishmentStatusLimited PublishmentStatus = "limited" - // PublishmentStatusPrivate _ + PublishmentStatusPrivate PublishmentStatus = "private" ) diff --git a/pkg/property/builder.go b/pkg/property/builder.go index c39156803..a43122623 100644 --- a/pkg/property/builder.go +++ b/pkg/property/builder.go @@ -2,31 +2,25 @@ package property import ( "errors" - - "github.com/reearth/reearth-backend/pkg/id" ) var ( - // ErrInvalidItem _ ErrInvalidItem = errors.New("invalid item") ) -// Builder _ type Builder struct { p *Property } -// New _ func New() *Builder { return &Builder{p: &Property{}} } -// Build _ func (b *Builder) Build() (*Property, error) { - if id.ID(b.p.id).IsNil() { - return nil, id.ErrInvalidID + if b.p.id.IsNil() { + return nil, ErrInvalidID } - if id.ID(b.p.scene).IsNil() { + if b.p.scene.IsNil() { return nil, ErrInvalidSceneID } if b.p.schema.IsNil() { @@ -40,7 +34,6 @@ func (b *Builder) Build() (*Property, error) { return b.p, nil } -// MustBuild _ func (b *Builder) MustBuild() *Property { p, err := b.Build() if err != nil { @@ -49,31 +42,26 @@ func (b *Builder) MustBuild() *Property { return p } -// ID _ -func (b *Builder) ID(id id.PropertyID) *Builder { +func (b *Builder) ID(id ID) *Builder { b.p.id = id return b } -// NewID _ func (b *Builder) NewID() *Builder { - b.p.id = id.PropertyID(id.New()) + b.p.id = NewID() return b } -// Scene _ -func (b *Builder) Scene(s id.SceneID) *Builder { +func (b *Builder) Scene(s SceneID) *Builder { b.p.scene = s return b } -// Schema _ -func (b *Builder) Schema(schema id.PropertySchemaID) *Builder { +func (b *Builder) Schema(schema SchemaID) *Builder { b.p.schema = schema return b } -// Items _ func (b *Builder) Items(items []Item) *Builder { if len(items) == 0 { b.p.items = nil @@ -81,7 +69,7 @@ func (b *Builder) Items(items []Item) *Builder { } newItems := []Item{} - ids := map[id.PropertyItemID]struct{}{} + ids := map[ItemID]struct{}{} for _, f := range items { if f == nil { continue diff --git a/pkg/property/builder_test.go b/pkg/property/builder_test.go index 551710308..e29662959 100644 --- a/pkg/property/builder_test.go +++ b/pkg/property/builder_test.go @@ -4,7 +4,6 @@ import ( "errors" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -14,32 +13,32 @@ func TestBuilder_New(t *testing.T) { } func TestBuilder_ID(t *testing.T) { - pid := id.NewPropertyID() - p := New().ID(pid).Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xxx~1.1.1/aa")).MustBuild() + pid := NewID() + p := New().ID(pid).Scene(NewSceneID()).Schema(MustSchemaID("xxx~1.1.1/aa")).MustBuild() assert.Equal(t, pid, p.ID()) } func TestBuilder_NewID(t *testing.T) { - p := New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xxx~1.1.1/aa")).MustBuild() + p := New().NewID().Scene(NewSceneID()).Schema(MustSchemaID("xxx~1.1.1/aa")).MustBuild() assert.False(t, p.ID().IsNil()) } func TestBuilder_Schema(t *testing.T) { - p := New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xxx~1.1.1/aa")).MustBuild() - assert.Equal(t, id.MustPropertySchemaID("xxx~1.1.1/aa"), p.Schema()) + p := New().NewID().Scene(NewSceneID()).Schema(MustSchemaID("xxx~1.1.1/aa")).MustBuild() + assert.Equal(t, MustSchemaID("xxx~1.1.1/aa"), p.Schema()) } func TestBuilder_Scene(t *testing.T) { - sid := id.NewSceneID() - p := New().NewID().Scene(sid).Schema(id.MustPropertySchemaID("xxx~1.1.1/aa")).MustBuild() + sid := NewSceneID() + p := New().NewID().Scene(sid).Schema(MustSchemaID("xxx~1.1.1/aa")).MustBuild() assert.Equal(t, sid, p.Scene()) } func TestBuilder_Items(t *testing.T) { - iid := id.NewPropertyItemID() - propertySchemaID := id.MustPropertySchemaID("xxx~1.1.1/aa") - propertySchemaField1ID := id.PropertySchemaFieldID("a") - propertySchemaGroup1ID := id.PropertySchemaGroupID("A") + iid := NewItemID() + propertySchemaID := MustSchemaID("xxx~1.1.1/aa") + propertySchemaField1ID := FieldID("a") + propertySchemaGroup1ID := SchemaGroupID("A") testCases := []struct { Name string @@ -83,8 +82,8 @@ func TestBuilder_Items(t *testing.T) { t.Run(tc.Name, func(tt *testing.T) { tt.Parallel() res := New().NewID(). - Scene(id.NewSceneID()). - Schema(id.MustPropertySchemaID("xxx~1.1.1/aa")). + Scene(NewSceneID()). + Schema(MustSchemaID("xxx~1.1.1/aa")). Items(tc.Input). MustBuild() assert.Equal(tt, tc.Expected, res.Items()) @@ -93,24 +92,24 @@ func TestBuilder_Items(t *testing.T) { } func TestBuilder_Build(t *testing.T) { - pid := id.NewPropertyID() - sid := id.NewSceneID() - scid := id.MustPropertySchemaID("xxx~1.1.1/aa") - iid := id.NewPropertyItemID() - propertySchemaField1ID := id.PropertySchemaFieldID("a") - propertySchemaGroup1ID := id.PropertySchemaGroupID("A") + pid := NewID() + sid := NewSceneID() + scid := MustSchemaID("xxx~1.1.1/aa") + iid := NewItemID() + propertySchemaField1ID := FieldID("a") + propertySchemaGroup1ID := SchemaGroupID("A") testCases := []struct { Name string - Id id.PropertyID - Scene id.SceneID - Schema id.PropertySchemaID + Id ID + Scene SceneID + Schema SchemaID Items []Item Err error Expected struct { - Id id.PropertyID - Scene id.SceneID - Schema id.PropertySchemaID + Id ID + Scene SceneID + Schema SchemaID Items []Item } }{ @@ -128,9 +127,9 @@ func TestBuilder_Build(t *testing.T) { Build(), }).MustBuild()}, Expected: struct { - Id id.PropertyID - Scene id.SceneID - Schema id.PropertySchemaID + Id ID + Scene SceneID + Schema SchemaID Items []Item }{ Id: pid, @@ -148,9 +147,9 @@ func TestBuilder_Build(t *testing.T) { }, { Name: "fail invalid id", - Id: id.PropertyID{}, + Id: ID{}, Items: nil, - Err: id.ErrInvalidID, + Err: ErrInvalidID, }, { Name: "fail invalid scene", @@ -171,7 +170,7 @@ func TestBuilder_Build(t *testing.T) { Scene: sid, Schema: scid, Items: []Item{ - NewGroup().ID(iid).Schema(id.MustPropertySchemaID("zzz~1.1.1/aa"), propertySchemaGroup1ID). + NewGroup().ID(iid).Schema(MustSchemaID("zzz~1.1.1/aa"), propertySchemaGroup1ID). Fields([]*Field{ NewFieldUnsafe(). FieldUnsafe(propertySchemaField1ID). diff --git a/pkg/property/condition.go b/pkg/property/condition.go index 14217f30e..8424da317 100644 --- a/pkg/property/condition.go +++ b/pkg/property/condition.go @@ -1,14 +1,10 @@ package property -import "github.com/reearth/reearth-backend/pkg/id" - -// Condition _ type Condition struct { - Field id.PropertySchemaFieldID + Field FieldID Value *Value } -// Clone _ func (c *Condition) Clone() *Condition { if c == nil { return nil diff --git a/pkg/property/field.go b/pkg/property/field.go index d38ffa139..ccf54c108 100644 --- a/pkg/property/field.go +++ b/pkg/property/field.go @@ -5,7 +5,6 @@ import ( "errors" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" ) var ( @@ -16,7 +15,7 @@ var ( ) type Field struct { - field id.PropertySchemaFieldID + field FieldID links *Links v *OptionalValue } @@ -29,7 +28,7 @@ func (p *Field) Clone() *Field { } } -func (p *Field) Field() id.PropertySchemaFieldID { +func (p *Field) Field() FieldID { return p.field } @@ -74,11 +73,11 @@ func (p *Field) HasLinkedField() bool { return p.Links().IsLinked() } -func (p *Field) CollectDatasets() []id.DatasetID { +func (p *Field) CollectDatasets() []DatasetID { if p == nil { return nil } - res := []id.DatasetID{} + res := []DatasetID{} if p.Links().IsLinkedFully() { dsid := p.Links().Last().Dataset() @@ -90,7 +89,7 @@ func (p *Field) CollectDatasets() []id.DatasetID { return res } -func (p *Field) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { +func (p *Field) IsDatasetLinked(s DatasetSchemaID, i DatasetID) bool { return p.Links().HasDatasetOrSchema(s, i) } @@ -126,7 +125,7 @@ func (p *Field) Unlink() { p.links = nil } -func (p *Field) UpdateField(field id.PropertySchemaFieldID) { +func (p *Field) UpdateField(field FieldID) { if p == nil { return } @@ -201,9 +200,9 @@ func (p *Field) ValidateSchema(ps *SchemaField) error { } type DatasetMigrationParam struct { - OldDatasetSchemaMap map[id.DatasetSchemaID]id.DatasetSchemaID - OldDatasetMap map[id.DatasetID]id.DatasetID - DatasetFieldIDMap map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID - NewDatasetSchemaMap map[id.DatasetSchemaID]*dataset.Schema - NewDatasetMap map[id.DatasetID]*dataset.Dataset + OldDatasetSchemaMap map[DatasetSchemaID]DatasetSchemaID + OldDatasetMap map[DatasetID]DatasetID + DatasetFieldIDMap map[DatasetFieldID]DatasetFieldID + NewDatasetSchemaMap map[DatasetSchemaID]*dataset.Schema + NewDatasetMap map[DatasetID]*dataset.Dataset } diff --git a/pkg/property/field_builder.go b/pkg/property/field_builder.go index 00bbbae54..7cfc3df9b 100644 --- a/pkg/property/field_builder.go +++ b/pkg/property/field_builder.go @@ -1,7 +1,5 @@ package property -import "github.com/reearth/reearth-backend/pkg/id" - type FieldBuilder struct { p *Field psf *SchemaField @@ -19,8 +17,8 @@ func NewField(p *SchemaField) *FieldBuilder { } func (b *FieldBuilder) Build() (*Field, error) { - if b.p.field == id.PropertySchemaFieldID("") { - return nil, id.ErrInvalidID + if b.p.field == FieldID("") { + return nil, ErrInvalidID } if b.psf != nil && !b.psf.Validate(b.p.v) { return nil, ErrInvalidPropertyValue @@ -65,7 +63,7 @@ func (b *FieldUnsafeBuilder) Build() *Field { return b.p } -func (b *FieldUnsafeBuilder) FieldUnsafe(f id.PropertySchemaFieldID) *FieldUnsafeBuilder { +func (b *FieldUnsafeBuilder) FieldUnsafe(f FieldID) *FieldUnsafeBuilder { b.p.field = f return b } diff --git a/pkg/property/field_builder_test.go b/pkg/property/field_builder_test.go index 0322ddef8..ae37a952b 100644 --- a/pkg/property/field_builder_test.go +++ b/pkg/property/field_builder_test.go @@ -4,7 +4,6 @@ import ( "errors" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -17,14 +16,14 @@ func TestFieldBuilder_Value(t *testing.T) { func TestFieldBuilder_Link(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() - l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) ls := NewLinks([]*Link{l}) b := NewField(p).Link(ls).MustBuild() assert.Equal(t, ls, b.Links()) } func TestFieldBuilder_Build(t *testing.T) { - l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) testCases := []struct { Name string Links *Links @@ -44,7 +43,7 @@ func TestFieldBuilder_Build(t *testing.T) { Links *Links Value *Value }{}, - Err: id.ErrInvalidID, + Err: ErrInvalidID, }, { Name: "fail invalid property type", @@ -91,7 +90,7 @@ func TestFieldBuilder_Build(t *testing.T) { } func TestFieldBuilder_MustBuild(t *testing.T) { - l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) testCases := []struct { Name string Fails bool @@ -168,16 +167,16 @@ func TestNewFieldUnsafe(t *testing.T) { } func TestFieldUnsafeBuilder_Build(t *testing.T) { - l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) testCases := []struct { Name string Links *Links Value *Value Type ValueType - Field id.PropertySchemaFieldID + Field FieldID Expected struct { PType ValueType - Field id.PropertySchemaFieldID + Field FieldID Links *Links Value *Value } @@ -190,7 +189,7 @@ func TestFieldUnsafeBuilder_Build(t *testing.T) { Field: "a", Expected: struct { PType ValueType - Field id.PropertySchemaFieldID + Field FieldID Links *Links Value *Value }{ @@ -208,7 +207,7 @@ func TestFieldUnsafeBuilder_Build(t *testing.T) { Field: "a", Expected: struct { PType ValueType - Field id.PropertySchemaFieldID + Field FieldID Links *Links Value *Value }{ diff --git a/pkg/property/field_test.go b/pkg/property/field_test.go index aade07fbc..272902500 100644 --- a/pkg/property/field_test.go +++ b/pkg/property/field_test.go @@ -4,15 +4,14 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestField_ActualValue(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() - dsid := id.NewDatasetID() - dssid := id.NewDatasetSchemaID() - dssfid := id.NewDatasetSchemaFieldID() + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + dssfid := NewDatasetFieldID() l := NewLink(dsid, dssid, dssfid) ls := NewLinks([]*Link{l}) @@ -57,21 +56,21 @@ func TestField_ActualValue(t *testing.T) { func TestField_CollectDatasets(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() - dsid := id.NewDatasetID() - dssid := id.NewDatasetSchemaID() - dssfid := id.NewDatasetSchemaFieldID() + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + dssfid := NewDatasetFieldID() l := NewLink(dsid, dssid, dssfid) ls := NewLinks([]*Link{l}) testCases := []struct { Name string Field *Field - Expected []id.DatasetID + Expected []DatasetID }{ { Name: "list of one datasets", Field: NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Link(ls).MustBuild(), - Expected: []id.DatasetID{dsid}, + Expected: []DatasetID{dsid}, }, { Name: "nil field", @@ -91,7 +90,7 @@ func TestField_CollectDatasets(t *testing.T) { func TestField_Clone(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() - l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) ls := NewLinks([]*Link{l}) b := NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Link(ls).MustBuild() r := b.Clone() @@ -99,12 +98,12 @@ func TestField_Clone(t *testing.T) { } func TestField(t *testing.T) { - did := id.NewDatasetID() - dsid := id.NewDatasetSchemaID() + did := NewDatasetID() + dsid := NewDatasetSchemaID() p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() b := NewField(p).MustBuild() assert.True(t, b.IsEmpty()) - l := NewLink(did, dsid, id.NewDatasetSchemaFieldID()) + l := NewLink(did, dsid, NewDatasetFieldID()) ls := NewLinks([]*Link{l}) b.Link(ls) assert.True(t, b.IsDatasetLinked(dsid, did)) diff --git a/pkg/property/group.go b/pkg/property/group.go index bbacd6ceb..80d1a28bc 100644 --- a/pkg/property/group.go +++ b/pkg/property/group.go @@ -6,7 +6,6 @@ import ( "fmt" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" ) // Group represents a group of property @@ -18,43 +17,42 @@ type Group struct { // Group implements Item interface var _ Item = &Group{} -func (g *Group) ID() id.PropertyItemID { +func (g *Group) ID() ItemID { if g == nil { - return id.PropertyItemID{} + return ItemID{} } return g.itemBase.ID } -func (g *Group) IDRef() *id.PropertyItemID { +func (g *Group) IDRef() *ItemID { if g == nil { return nil } return g.itemBase.ID.Ref() } -func (g *Group) SchemaGroup() id.PropertySchemaGroupID { +func (g *Group) SchemaGroup() SchemaGroupID { if g == nil { - return id.PropertySchemaGroupID("") + return SchemaGroupID("") } return g.itemBase.SchemaGroup } -func (g *Group) SchemaGroupRef() *id.PropertySchemaGroupID { +func (g *Group) SchemaGroupRef() *SchemaGroupID { if g == nil { return nil } return g.itemBase.SchemaGroup.Ref() } -func (g *Group) Schema() id.PropertySchemaID { +func (g *Group) Schema() SchemaID { if g == nil { - return id.PropertySchemaID{} + return SchemaID{} } return g.itemBase.Schema } -// SchemaRef _ -func (g *Group) SchemaRef() *id.PropertySchemaID { +func (g *Group) SchemaRef() *SchemaID { if g == nil { return nil } @@ -73,11 +71,11 @@ func (g *Group) HasLinkedField() bool { return false } -func (g *Group) CollectDatasets() []id.DatasetID { +func (g *Group) CollectDatasets() []DatasetID { if g == nil { return nil } - res := []id.DatasetID{} + res := []DatasetID{} for _, f := range g.fields { res = append(res, f.CollectDatasets()...) @@ -86,7 +84,7 @@ func (g *Group) CollectDatasets() []id.DatasetID { return res } -func (g *Group) FieldsByLinkedDataset(s id.DatasetSchemaID, i id.DatasetID) []*Field { +func (g *Group) FieldsByLinkedDataset(s DatasetSchemaID, i DatasetID) []*Field { if g == nil { return nil } @@ -99,7 +97,7 @@ func (g *Group) FieldsByLinkedDataset(s id.DatasetSchemaID, i id.DatasetID) []*F return res } -func (g *Group) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { +func (g *Group) IsDatasetLinked(s DatasetSchemaID, i DatasetID) bool { if g == nil { return false } @@ -150,7 +148,7 @@ func (g *Group) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset g.Prune() } -func (g *Group) GetOrCreateField(ps *Schema, fid id.PropertySchemaFieldID) (*Field, bool) { +func (g *Group) GetOrCreateField(ps *Schema, fid FieldID) (*Field, bool) { if g == nil || ps == nil || !g.Schema().Equal(ps.ID()) { return nil, false } @@ -180,7 +178,7 @@ func (g *Group) GetOrCreateField(ps *Schema, fid id.PropertySchemaFieldID) (*Fie return field, true } -func (g *Group) RemoveField(fid id.PropertySchemaFieldID) { +func (g *Group) RemoveField(fid FieldID) { if g == nil { return } @@ -192,11 +190,11 @@ func (g *Group) RemoveField(fid id.PropertySchemaFieldID) { } } -func (g *Group) FieldIDs() []id.PropertySchemaFieldID { +func (g *Group) FieldIDs() []FieldID { if g == nil { return nil } - fields := make([]id.PropertySchemaFieldID, 0, len(g.fields)) + fields := make([]FieldID, 0, len(g.fields)) for _, f := range g.fields { fields = append(fields, f.Field()) } @@ -212,7 +210,7 @@ func (g *Group) Fields() []*Field { } // Field returns a field whose id is specified -func (g *Group) Field(fid id.PropertySchemaFieldID) *Field { +func (g *Group) Field(fid FieldID) *Field { if g == nil { return nil } diff --git a/pkg/property/group_builder.go b/pkg/property/group_builder.go index d80d12aa7..0c65712ce 100644 --- a/pkg/property/group_builder.go +++ b/pkg/property/group_builder.go @@ -1,7 +1,5 @@ package property -import "github.com/reearth/reearth-backend/pkg/id" - type GroupBuilder struct { p *Group } @@ -21,8 +19,8 @@ func InitGroupFrom(g *SchemaGroup) *Group { } func (b *GroupBuilder) Build() (*Group, error) { - if id.ID(b.p.itemBase.ID).IsNil() { - return nil, id.ErrInvalidID + if b.p.itemBase.ID.IsNil() { + return nil, ErrInvalidID } return b.p, nil } @@ -40,17 +38,17 @@ func (b *GroupBuilder) base(base itemBase) *GroupBuilder { return b } -func (b *GroupBuilder) ID(id id.PropertyItemID) *GroupBuilder { +func (b *GroupBuilder) ID(id ItemID) *GroupBuilder { b.p.itemBase.ID = id return b } func (b *GroupBuilder) NewID() *GroupBuilder { - b.p.itemBase.ID = id.NewPropertyItemID() + b.p.itemBase.ID = NewItemID() return b } -func (b *GroupBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaGroupID) *GroupBuilder { +func (b *GroupBuilder) Schema(s SchemaID, g SchemaGroupID) *GroupBuilder { b.p.itemBase.Schema = s b.p.itemBase.SchemaGroup = g return b @@ -58,7 +56,7 @@ func (b *GroupBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaGroupID) func (b *GroupBuilder) Fields(fields []*Field) *GroupBuilder { var newFields []*Field - ids := map[id.PropertySchemaFieldID]struct{}{} + ids := map[FieldID]struct{}{} for _, f := range fields { if f == nil { continue diff --git a/pkg/property/group_builder_test.go b/pkg/property/group_builder_test.go index 504a37549..0b01b7365 100644 --- a/pkg/property/group_builder_test.go +++ b/pkg/property/group_builder_test.go @@ -4,33 +4,32 @@ import ( "errors" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestGroupBuilder_Build(t *testing.T) { - iid := id.NewPropertyItemID() - sid := id.MustPropertySchemaID("xx~1.0.0/aa") + iid := NewItemID() + sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() testCases := []struct { Name string - Id id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + Id ItemID + Schema SchemaID + SchemaGroup SchemaGroupID Fields []*Field Expected struct { - Id id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + Id ItemID + Schema SchemaID + SchemaGroup SchemaGroupID Fields []*Field } Err error }{ { Name: "fail invalid id", - Err: id.ErrInvalidID, + Err: ErrInvalidID, }, { Name: "success", @@ -39,9 +38,9 @@ func TestGroupBuilder_Build(t *testing.T) { SchemaGroup: "a", Fields: []*Field{f}, Expected: struct { - Id id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + Id ItemID + Schema SchemaID + SchemaGroup SchemaGroupID Fields []*Field }{ Id: iid, @@ -71,22 +70,22 @@ func TestGroupBuilder_Build(t *testing.T) { } func TestGroupBuilder_MustBuild(t *testing.T) { - iid := id.NewPropertyItemID() - sid := id.MustPropertySchemaID("xx~1.0.0/aa") + iid := NewItemID() + sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() testCases := []struct { Name string Fail bool - Id id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + Id ItemID + Schema SchemaID + SchemaGroup SchemaGroupID Fields []*Field Expected struct { - Id id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + Id ItemID + Schema SchemaID + SchemaGroup SchemaGroupID Fields []*Field } }{ @@ -101,9 +100,9 @@ func TestGroupBuilder_MustBuild(t *testing.T) { SchemaGroup: "a", Fields: []*Field{f}, Expected: struct { - Id id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + Id ItemID + Schema SchemaID + SchemaGroup SchemaGroupID Fields []*Field }{ Id: iid, @@ -146,7 +145,7 @@ func TestGroupBuilder_NewID(t *testing.T) { func TestGroupBuilder_InitGroupFrom(t *testing.T) { var sg *SchemaGroup assert.Nil(t, InitGroupFrom(sg)) - sg = NewSchemaGroup().ID("a").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).MustBuild() + sg = NewSchemaGroup().ID("a").Schema(MustSchemaID("xx~1.0.0/aa")).MustBuild() g := InitGroupFrom(sg) assert.Equal(t, sg.ID(), g.SchemaGroup()) assert.Equal(t, sg.Schema(), g.Schema()) diff --git a/pkg/property/group_list.go b/pkg/property/group_list.go index e6ddb1cf0..7eeec815d 100644 --- a/pkg/property/group_list.go +++ b/pkg/property/group_list.go @@ -6,10 +6,8 @@ import ( "fmt" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" ) -// GroupList _ type GroupList struct { itemBase groups []*Group @@ -19,15 +17,15 @@ type GroupList struct { var _ Item = &GroupList{} // ID returns id -func (g *GroupList) ID() id.PropertyItemID { +func (g *GroupList) ID() ItemID { if g == nil { - return id.PropertyItemID{} + return ItemID{} } return g.itemBase.ID } // IDRef returns a reference of id -func (g *GroupList) IDRef() *id.PropertyItemID { +func (g *GroupList) IDRef() *ItemID { if g == nil { return nil } @@ -35,38 +33,34 @@ func (g *GroupList) IDRef() *id.PropertyItemID { } // SchemaGroup returns id of schema group -func (g *GroupList) SchemaGroup() id.PropertySchemaGroupID { +func (g *GroupList) SchemaGroup() SchemaGroupID { if g == nil { - return id.PropertySchemaGroupID("") + return SchemaGroupID("") } return g.itemBase.SchemaGroup } -// SchemaGroupRef _ -func (g *GroupList) SchemaGroupRef() *id.PropertySchemaGroupID { +func (g *GroupList) SchemaGroupRef() *SchemaGroupID { if g == nil { return nil } return g.itemBase.SchemaGroup.Ref() } -// Schema _ -func (g *GroupList) Schema() id.PropertySchemaID { +func (g *GroupList) Schema() SchemaID { if g == nil { - return id.PropertySchemaID{} + return SchemaID{} } return g.itemBase.Schema } -// SchemaRef _ -func (g *GroupList) SchemaRef() *id.PropertySchemaID { +func (g *GroupList) SchemaRef() *SchemaID { if g == nil { return nil } return g.itemBase.Schema.Ref() } -// HasLinkedField _ func (g *GroupList) HasLinkedField() bool { if g == nil { return false @@ -79,12 +73,11 @@ func (g *GroupList) HasLinkedField() bool { return false } -// CollectDatasets _ -func (g *GroupList) CollectDatasets() []id.DatasetID { +func (g *GroupList) CollectDatasets() []DatasetID { if g == nil { return nil } - res := []id.DatasetID{} + res := []DatasetID{} for _, f := range g.groups { res = append(res, f.CollectDatasets()...) @@ -93,8 +86,7 @@ func (g *GroupList) CollectDatasets() []id.DatasetID { return res } -// FieldsByLinkedDataset _ -func (g *GroupList) FieldsByLinkedDataset(s id.DatasetSchemaID, i id.DatasetID) []*Field { +func (g *GroupList) FieldsByLinkedDataset(s DatasetSchemaID, i DatasetID) []*Field { if g == nil { return nil } @@ -105,8 +97,7 @@ func (g *GroupList) FieldsByLinkedDataset(s id.DatasetSchemaID, i id.DatasetID) return res } -// IsDatasetLinked _ -func (g *GroupList) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { +func (g *GroupList) IsDatasetLinked(s DatasetSchemaID, i DatasetID) bool { if g == nil { return false } @@ -118,12 +109,10 @@ func (g *GroupList) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { return false } -// IsEmpty _ func (g *GroupList) IsEmpty() bool { return g != nil && (g.groups == nil || len(g.groups) == 0) } -// Prune _ func (g *GroupList) Prune() { if g == nil { return @@ -133,7 +122,6 @@ func (g *GroupList) Prune() { } } -// MigrateSchema _ func (g *GroupList) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset.Loader) { if g == nil || dl == nil { return @@ -157,7 +145,7 @@ func (g *GroupList) Groups() []*Group { } // GetGroup returns a group whose id is specified -func (g *GroupList) GetGroup(gid id.PropertyItemID) *Group { +func (g *GroupList) GetGroup(gid ItemID) *Group { if g == nil { return nil } @@ -177,8 +165,7 @@ func (g *GroupList) GroupAt(i int) *Group { return g.groups[i] } -// Has _ -func (g *GroupList) Has(i id.PropertyItemID) bool { +func (g *GroupList) Has(i ItemID) bool { if g == nil { return false } @@ -190,7 +177,6 @@ func (g *GroupList) Has(i id.PropertyItemID) bool { return false } -// Count _ func (g *GroupList) Count() int { if g == nil { return 0 @@ -198,7 +184,6 @@ func (g *GroupList) Count() int { return len(g.groups) } -// Add _ func (g *GroupList) Add(gg *Group, index int) { if g == nil || g.Has(gg.ID()) { return @@ -212,7 +197,6 @@ func (g *GroupList) Add(gg *Group, index int) { } } -// AddOrMove _ func (g *GroupList) AddOrMove(gg *Group, index int) { if g == nil { return @@ -231,8 +215,7 @@ func (g *GroupList) AddOrMove(gg *Group, index int) { g.groups = append(g.groups[:index], append([]*Group{gg}, g.groups[index:]...)...) } -// Move _ -func (g *GroupList) Move(id id.PropertyItemID, toIndex int) { +func (g *GroupList) Move(id ItemID, toIndex int) { if g == nil { return } @@ -245,7 +228,6 @@ func (g *GroupList) Move(id id.PropertyItemID, toIndex int) { } } -// MoveAt _ func (g *GroupList) MoveAt(fromIndex int, toIndex int) { if g == nil { return @@ -270,8 +252,7 @@ func (g *GroupList) MoveAt(fromIndex int, toIndex int) { g.groups = append(newSlice, g.groups[toIndex:]...) } -// Remove _ -func (g *GroupList) Remove(id id.PropertyItemID) bool { +func (g *GroupList) Remove(id ItemID) bool { if g == nil { return false } @@ -286,7 +267,6 @@ func (g *GroupList) Remove(id id.PropertyItemID) bool { return false } -// RemoveAt _ func (g *GroupList) RemoveAt(index int) { if g == nil { return @@ -305,7 +285,6 @@ func (g *GroupList) RemoveAt(index int) { g.groups = append(g.groups[:index], groups...) } -// Empty _ func (g *GroupList) Empty() { if g == nil { return @@ -314,7 +293,6 @@ func (g *GroupList) Empty() { g.groups = []*Group{} } -// GetOrCreateField _ func (g *GroupList) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, bool) { if g == nil || ptr == nil || ps == nil || ps.ID() != g.Schema() { return nil, false @@ -337,7 +315,6 @@ func (g *GroupList) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, bool) { return i.GetOrCreateField(ps, fid) } -// CreateAndAddListItem _ func (g *GroupList) CreateAndAddListItem(ps *Schema, index *int) *Group { if g == nil || ps == nil || !g.Schema().Equal(ps.ID()) { return nil @@ -360,7 +337,6 @@ func (g *GroupList) CreateAndAddListItem(ps *Schema, index *int) *Group { return nil } -// MigrateDataset _ func (g *GroupList) MigrateDataset(q DatasetMigrationParam) { if g == nil { return diff --git a/pkg/property/group_list_builder.go b/pkg/property/group_list_builder.go index bebb6a9b1..1affcd1d8 100644 --- a/pkg/property/group_list_builder.go +++ b/pkg/property/group_list_builder.go @@ -1,7 +1,5 @@ package property -import "github.com/reearth/reearth-backend/pkg/id" - type GroupListBuilder struct { p *GroupList } @@ -21,8 +19,8 @@ func InitGroupListFrom(g *SchemaGroup) *GroupList { } func (b *GroupListBuilder) Build() (*GroupList, error) { - if id.ID(b.p.itemBase.ID).IsNil() { - return nil, id.ErrInvalidID + if b.p.itemBase.ID.IsNil() { + return nil, ErrInvalidID } return b.p, nil } @@ -40,17 +38,17 @@ func (b *GroupListBuilder) base(base itemBase) *GroupListBuilder { return b } -func (b *GroupListBuilder) ID(id id.PropertyItemID) *GroupListBuilder { +func (b *GroupListBuilder) ID(id ItemID) *GroupListBuilder { b.p.itemBase.ID = id return b } func (b *GroupListBuilder) NewID() *GroupListBuilder { - b.p.itemBase.ID = id.NewPropertyItemID() + b.p.itemBase.ID = NewItemID() return b } -func (b *GroupListBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaGroupID) *GroupListBuilder { +func (b *GroupListBuilder) Schema(s SchemaID, g SchemaGroupID) *GroupListBuilder { b.p.itemBase.Schema = s b.p.itemBase.SchemaGroup = g return b @@ -58,7 +56,7 @@ func (b *GroupListBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaGrou func (b *GroupListBuilder) Groups(fields []*Group) *GroupListBuilder { newGroups := []*Group{} - ids := map[id.PropertyItemID]struct{}{} + ids := map[ItemID]struct{}{} for _, f := range fields { if f == nil { continue diff --git a/pkg/property/group_list_builder_test.go b/pkg/property/group_list_builder_test.go index 8ef9d7d6a..cdf459d84 100644 --- a/pkg/property/group_list_builder_test.go +++ b/pkg/property/group_list_builder_test.go @@ -4,24 +4,23 @@ import ( "errors" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestGroupListBuilder_Build(t *testing.T) { - pid := id.NewPropertyItemID() - scid := id.MustPropertySchemaID("xx~1.0.0/aa") + pid := NewItemID() + scid := MustSchemaID("xx~1.0.0/aa") groups := []*Group{NewGroup().ID(pid).MustBuild()} testCases := []struct { Name string - Id id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + Id ItemID + Schema SchemaID + SchemaGroup SchemaGroupID Groups []*Group Expected struct { - Id id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + Id ItemID + Schema SchemaID + SchemaGroup SchemaGroupID Groups []*Group } Err error @@ -33,9 +32,9 @@ func TestGroupListBuilder_Build(t *testing.T) { SchemaGroup: "aa", Groups: groups, Expected: struct { - Id id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + Id ItemID + Schema SchemaID + SchemaGroup SchemaGroupID Groups []*Group }{ Id: pid, @@ -46,7 +45,7 @@ func TestGroupListBuilder_Build(t *testing.T) { }, { Name: "fail invalid id", - Err: id.ErrInvalidID, + Err: ErrInvalidID, }, } for _, tc := range testCases { @@ -72,20 +71,20 @@ func TestGroupListBuilder_NewID(t *testing.T) { } func TestGroupListBuilder_MustBuild(t *testing.T) { - pid := id.NewPropertyItemID() - scid := id.MustPropertySchemaID("xx~1.0.0/aa") + pid := NewItemID() + scid := MustSchemaID("xx~1.0.0/aa") groups := []*Group{NewGroup().ID(pid).MustBuild()} testCases := []struct { Name string Fails bool - Id id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + Id ItemID + Schema SchemaID + SchemaGroup SchemaGroupID Groups []*Group Expected struct { - Id id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + Id ItemID + Schema SchemaID + SchemaGroup SchemaGroupID Groups []*Group } }{ @@ -96,9 +95,9 @@ func TestGroupListBuilder_MustBuild(t *testing.T) { SchemaGroup: "aa", Groups: groups, Expected: struct { - Id id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + Id ItemID + Schema SchemaID + SchemaGroup SchemaGroupID Groups []*Group }{ Id: pid, @@ -140,17 +139,17 @@ func TestInitGroupListFrom(t *testing.T) { testCases := []struct { Name string SchemaGroup *SchemaGroup - ExpectedSG id.PropertySchemaGroupID - ExpectedSchema id.PropertySchemaID + ExpectedSG SchemaGroupID + ExpectedSchema SchemaID }{ { Name: "nil schema group", }, { Name: "success", - SchemaGroup: NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).MustBuild(), + SchemaGroup: NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).MustBuild(), ExpectedSG: "aa", - ExpectedSchema: id.MustPropertySchemaID("xx~1.0.0/aa"), + ExpectedSchema: MustSchemaID("xx~1.0.0/aa"), }, } diff --git a/pkg/property/group_list_test.go b/pkg/property/group_list_test.go index 39329d43d..343ab2cd6 100644 --- a/pkg/property/group_list_test.go +++ b/pkg/property/group_list_test.go @@ -3,7 +3,6 @@ package property import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -18,17 +17,17 @@ func TestGroupList_SchemaRef(t *testing.T) { testCases := []struct { Name string GL *GroupList - ExpectedSG *id.PropertySchemaGroupID - ExpectedSchema *id.PropertySchemaID + ExpectedSG *SchemaGroupID + ExpectedSchema *SchemaID }{ { Name: "nil group list", }, { Name: "success", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), id.PropertySchemaGroupID("xx")).MustBuild(), - ExpectedSG: id.PropertySchemaGroupID("xx").Ref(), - ExpectedSchema: id.MustPropertySchemaID("xx~1.0.0/aa").Ref(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), SchemaGroupID("xx")).MustBuild(), + ExpectedSG: SchemaGroupID("xx").Ref(), + ExpectedSchema: MustSchemaID("xx~1.0.0/aa").Ref(), }, } for _, tc := range testCases { @@ -42,12 +41,12 @@ func TestGroupList_SchemaRef(t *testing.T) { } func TestGroupList_HasLinkedField(t *testing.T) { - pid := id.NewPropertyItemID() + pid := NewItemID() sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - dsid := id.NewDatasetID() - dssid := id.NewDatasetSchemaID() - f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}).MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} groups2 := []*Group{NewGroup().ID(pid).MustBuild()} testCases := []struct { @@ -60,12 +59,12 @@ func TestGroupList_HasLinkedField(t *testing.T) { }, { Name: "has linked field", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), Expected: true, }, { Name: "no linked field", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups2).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups2).MustBuild(), Expected: false, }, } @@ -80,31 +79,31 @@ func TestGroupList_HasLinkedField(t *testing.T) { } func TestGroupList_CollectDatasets(t *testing.T) { - pid := id.NewPropertyItemID() + pid := NewItemID() sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - dsid := id.NewDatasetID() - dssid := id.NewDatasetSchemaID() - f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}).MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} groups2 := []*Group{NewGroup().ID(pid).MustBuild()} testCases := []struct { Name string GL *GroupList - Expected []id.DatasetID + Expected []DatasetID }{ { Name: "nil group list", }, { Name: "one dataset", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), - Expected: []id.DatasetID{dsid}, + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), + Expected: []DatasetID{dsid}, }, { Name: "empty list", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups2).MustBuild(), - Expected: []id.DatasetID{}, + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups2).MustBuild(), + Expected: []DatasetID{}, }, } for _, tc := range testCases { @@ -117,12 +116,12 @@ func TestGroupList_CollectDatasets(t *testing.T) { } func TestGroupList_FieldsByLinkedDataset(t *testing.T) { - pid := id.NewPropertyItemID() + pid := NewItemID() sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - dsid := id.NewDatasetID() - dssid := id.NewDatasetSchemaID() - f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}).MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} groups2 := []*Group{NewGroup().ID(pid).MustBuild()} testCases := []struct { @@ -135,12 +134,12 @@ func TestGroupList_FieldsByLinkedDataset(t *testing.T) { }, { Name: "one field list", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), Expected: []*Field{f}, }, { Name: "empty list", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups2).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups2).MustBuild(), Expected: []*Field{}, }, } @@ -154,12 +153,12 @@ func TestGroupList_FieldsByLinkedDataset(t *testing.T) { } func TestGroupList_IsEmpty(t *testing.T) { - pid := id.NewPropertyItemID() + pid := NewItemID() sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - dsid := id.NewDatasetID() - dssid := id.NewDatasetSchemaID() - f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, id.NewDatasetSchemaFieldID())}}).MustBuild() + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}).MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} testCases := []struct { Name string @@ -171,12 +170,12 @@ func TestGroupList_IsEmpty(t *testing.T) { }, { Name: "is empty", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").MustBuild(), Expected: true, }, { Name: "is not empty", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), Expected: false, }, } @@ -194,7 +193,7 @@ func TestGroupList_Prune(t *testing.T) { v := ValueTypeString.ValueFrom("vvv") f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := NewField(sf).MustBuild() - pid := id.NewPropertyItemID() + pid := NewItemID() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f, f2}).MustBuild()} pruned := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} testCases := []struct { @@ -207,7 +206,7 @@ func TestGroupList_Prune(t *testing.T) { }, { Name: "pruned list", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), Expected: pruned, }, } @@ -222,11 +221,11 @@ func TestGroupList_Prune(t *testing.T) { } func TestGroupList_GetGroup(t *testing.T) { - pid := id.NewPropertyItemID() + pid := NewItemID() g := NewGroup().ID(pid).MustBuild() testCases := []struct { Name string - Input id.PropertyItemID + Input ItemID GL *GroupList Expected *Group }{ @@ -236,13 +235,13 @@ func TestGroupList_GetGroup(t *testing.T) { { Name: "found", Input: pid, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g}).MustBuild(), Expected: g, }, { Name: "not found", - Input: id.NewPropertyItemID(), - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g}).MustBuild(), + Input: NewItemID(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g}).MustBuild(), Expected: nil, }, } @@ -256,10 +255,10 @@ func TestGroupList_GetGroup(t *testing.T) { } func TestGroupList_GroupAt(t *testing.T) { - g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).MustBuild() + g2 := NewGroup().ID(NewItemID()).MustBuild() + g3 := NewGroup().ID(NewItemID()).MustBuild() + g4 := NewGroup().ID(NewItemID()).MustBuild() testCases := []struct { Name string Index int @@ -280,7 +279,7 @@ func TestGroupList_GroupAt(t *testing.T) { { Name: "found", Index: 2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: g3, }, } @@ -294,13 +293,13 @@ func TestGroupList_GroupAt(t *testing.T) { } func TestGroupList_Has(t *testing.T) { - g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).MustBuild() + g2 := NewGroup().ID(NewItemID()).MustBuild() + g3 := NewGroup().ID(NewItemID()).MustBuild() + g4 := NewGroup().ID(NewItemID()).MustBuild() testCases := []struct { Name string - Input id.PropertyItemID + Input ItemID GL *GroupList Expected bool }{ @@ -310,13 +309,13 @@ func TestGroupList_Has(t *testing.T) { { Name: "found", Input: g2.ID(), - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: true, }, { Name: "not found", Input: g3.ID(), - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g4}).MustBuild(), Expected: false, }, } @@ -330,10 +329,10 @@ func TestGroupList_Has(t *testing.T) { } func TestGroupList_Count(t *testing.T) { - g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).MustBuild() + g2 := NewGroup().ID(NewItemID()).MustBuild() + g3 := NewGroup().ID(NewItemID()).MustBuild() + g4 := NewGroup().ID(NewItemID()).MustBuild() testCases := []struct { Name string GL *GroupList @@ -344,7 +343,7 @@ func TestGroupList_Count(t *testing.T) { }, { Name: "not found", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: 4, }, } @@ -358,10 +357,10 @@ func TestGroupList_Count(t *testing.T) { } func TestGroupList_Add(t *testing.T) { - g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).MustBuild() + g2 := NewGroup().ID(NewItemID()).MustBuild() + g3 := NewGroup().ID(NewItemID()).MustBuild() + g4 := NewGroup().ID(NewItemID()).MustBuild() testCases := []struct { Name string GL *GroupList @@ -379,7 +378,7 @@ func TestGroupList_Add(t *testing.T) { Name: "index < 0", Index: -1, Gr: g2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -392,7 +391,7 @@ func TestGroupList_Add(t *testing.T) { Name: "len(g) > index > 0 ", Index: 2, Gr: g2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -414,10 +413,10 @@ func TestGroupList_Add(t *testing.T) { } func TestGroupList_AddOrMove(t *testing.T) { - g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).MustBuild() + g2 := NewGroup().ID(NewItemID()).MustBuild() + g3 := NewGroup().ID(NewItemID()).MustBuild() + g4 := NewGroup().ID(NewItemID()).MustBuild() testCases := []struct { Name string GL *GroupList @@ -435,7 +434,7 @@ func TestGroupList_AddOrMove(t *testing.T) { Name: "index < 0", Index: -1, Gr: g2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -448,7 +447,7 @@ func TestGroupList_AddOrMove(t *testing.T) { Name: "len(g) > index > 0 ", Index: 2, Gr: g2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -461,7 +460,7 @@ func TestGroupList_AddOrMove(t *testing.T) { Name: "move group", Index: 2, Gr: g1, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -483,17 +482,17 @@ func TestGroupList_AddOrMove(t *testing.T) { } func TestGroupList_Move(t *testing.T) { - g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).MustBuild() + g2 := NewGroup().ID(NewItemID()).MustBuild() + g3 := NewGroup().ID(NewItemID()).MustBuild() + g4 := NewGroup().ID(NewItemID()).MustBuild() testCases := []struct { Name string GL *GroupList - Id id.PropertyItemID + Id ItemID ToIndex int Expected struct { - Id id.PropertyItemID + Id ItemID Index int } }{ @@ -504,9 +503,9 @@ func TestGroupList_Move(t *testing.T) { Name: "success", Id: g1.ID(), ToIndex: 2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: struct { - Id id.PropertyItemID + Id ItemID Index int }{Id: g1.ID(), Index: 2}, }, @@ -523,10 +522,10 @@ func TestGroupList_Move(t *testing.T) { } func TestGroupList_MoveAt(t *testing.T) { - g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).MustBuild() + g2 := NewGroup().ID(NewItemID()).MustBuild() + g3 := NewGroup().ID(NewItemID()).MustBuild() + g4 := NewGroup().ID(NewItemID()).MustBuild() testCases := []struct { Name string GL *GroupList @@ -540,21 +539,21 @@ func TestGroupList_MoveAt(t *testing.T) { Name: "from = to", FromIndex: 2, ToIndex: 2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g2, g3, g4}, }, { Name: "from < 0", FromIndex: -1, ToIndex: 2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g2, g3, g4}, }, { Name: "success move", FromIndex: 0, ToIndex: 2, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g2, g3, g1, g4}, }, } @@ -570,10 +569,10 @@ func TestGroupList_MoveAt(t *testing.T) { } func TestGroupList_RemoveAt(t *testing.T) { - g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).MustBuild() + g2 := NewGroup().ID(NewItemID()).MustBuild() + g3 := NewGroup().ID(NewItemID()).MustBuild() + g4 := NewGroup().ID(NewItemID()).MustBuild() testCases := []struct { Name string GL *GroupList @@ -586,19 +585,19 @@ func TestGroupList_RemoveAt(t *testing.T) { { Name: "success", Index: 1, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g3, g4}, }, { Name: "index < 0", Index: -1, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g2, g3, g4}, }, { Name: "index > length", Index: 5, - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g2, g3, g4}, }, } @@ -613,14 +612,14 @@ func TestGroupList_RemoveAt(t *testing.T) { } } func TestGroupList_Remove(t *testing.T) { - g1 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g2 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g3 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() - g4 := NewGroup().ID(id.NewPropertyItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).MustBuild() + g2 := NewGroup().ID(NewItemID()).MustBuild() + g3 := NewGroup().ID(NewItemID()).MustBuild() + g4 := NewGroup().ID(NewItemID()).MustBuild() testCases := []struct { Name string GL *GroupList - Input id.PropertyItemID + Input ItemID Expected bool }{ { @@ -629,13 +628,13 @@ func TestGroupList_Remove(t *testing.T) { { Name: "success", Input: g1.ID(), - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: true, }, { Name: "not found", Input: g4.ID(), - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3}).MustBuild(), Expected: false, }, } @@ -652,8 +651,8 @@ func TestGroupList_Remove(t *testing.T) { func TestGroupList_GetOrCreateField(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - g := NewGroup().ID(id.NewPropertyItemID()).Schema(sg.Schema(), sg.ID()).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + g := NewGroup().ID(NewItemID()).Schema(sg.Schema(), sg.ID()).MustBuild() testCases := []struct { Name string GL *GroupList @@ -666,8 +665,8 @@ func TestGroupList_GetOrCreateField(t *testing.T) { }{ { Name: "success", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").Groups([]*Group{g}).MustBuild(), - Schema: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").Groups([]*Group{g}).MustBuild(), + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), Expected: struct { Ok bool @@ -679,20 +678,20 @@ func TestGroupList_GetOrCreateField(t *testing.T) { }, { Name: "can't get a group", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), - Schema: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), }, { Name: "FieldByItem not ok: sg!=nil", - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").Groups([]*Group{g}).MustBuild(), - Schema: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").Groups([]*Group{g}).MustBuild(), + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Ptr: NewPointer(sg.IDRef(), g.IDRef(), sf.ID().Ref()), }, { Name: "psg == nil", GL: NewGroupList().NewID().Groups([]*Group{g}).MustBuild(), - Schema: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), }, } @@ -711,8 +710,8 @@ func TestGroupList_GetOrCreateField(t *testing.T) { func TestGroupList_CreateAndAddListItem(t *testing.T) { getIntRef := func(i int) *int { return &i } sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - g := NewGroup().ID(id.NewPropertyItemID()).Schema(sg.Schema(), sg.ID()).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + g := NewGroup().ID(NewItemID()).Schema(sg.Schema(), sg.ID()).MustBuild() testCases := []struct { Name string GL *GroupList @@ -723,8 +722,8 @@ func TestGroupList_CreateAndAddListItem(t *testing.T) { { Name: "success", Index: getIntRef(0), - GL: NewGroupList().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), - Schema: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Expected: g, }, } diff --git a/pkg/property/group_test.go b/pkg/property/group_test.go index 1321a14c5..5f038056a 100644 --- a/pkg/property/group_test.go +++ b/pkg/property/group_test.go @@ -4,12 +4,11 @@ import ( "errors" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestGroup_IDRef(t *testing.T) { - gid := id.NewPropertyItemID() + gid := NewItemID() var g *Group assert.Nil(t, g.IDRef()) g = NewGroup().ID(gid).MustBuild() @@ -19,9 +18,9 @@ func TestGroup_IDRef(t *testing.T) { func TestGroup_SchemaGroup(t *testing.T) { var g *Group assert.Nil(t, g.SchemaGroupRef()) - assert.Equal(t, id.PropertySchemaGroupID(""), g.SchemaGroup()) - pfid := id.PropertySchemaGroupID("aa") - g = NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), pfid).MustBuild() + assert.Equal(t, SchemaGroupID(""), g.SchemaGroup()) + pfid := SchemaGroupID("aa") + g = NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), pfid).MustBuild() assert.Equal(t, pfid, g.SchemaGroup()) assert.Equal(t, pfid.Ref(), g.SchemaGroupRef()) } @@ -29,7 +28,7 @@ func TestGroup_SchemaGroup(t *testing.T) { func TestGroup_HasLinkedField(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - l := NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) ls := NewLinks([]*Link{l}) f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() f2 := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() @@ -67,9 +66,9 @@ func TestGroup_HasLinkedField(t *testing.T) { func TestGroup_IsDatasetLinked(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - dsid := id.NewDatasetID() - dssid := id.NewDatasetSchemaID() - l := NewLink(dsid, dssid, id.NewDatasetSchemaFieldID()) + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + l := NewLink(dsid, dssid, NewDatasetFieldID()) ls := NewLinks([]*Link{l}) f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() f2 := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() @@ -77,8 +76,8 @@ func TestGroup_IsDatasetLinked(t *testing.T) { testCases := []struct { Name string Group *Group - DatasetSchema id.DatasetSchemaID - Dataset id.DatasetID + DatasetSchema DatasetSchemaID + Dataset DatasetID Expected bool }{ { @@ -110,15 +109,15 @@ func TestGroup_IsDatasetLinked(t *testing.T) { func TestGroup_CollectDatasets(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - dsid := id.NewDatasetID() - l := NewLink(dsid, id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + dsid := NewDatasetID() + l := NewLink(dsid, NewDatasetSchemaID(), NewDatasetFieldID()) ls := NewLinks([]*Link{l}) f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() testCases := []struct { Name string Group *Group - Expected []id.DatasetID + Expected []DatasetID }{ { Name: "nil group", @@ -128,7 +127,7 @@ func TestGroup_CollectDatasets(t *testing.T) { { Name: "normal case", Group: NewGroup().NewID().Fields([]*Field{f}).MustBuild(), - Expected: []id.DatasetID{dsid}, + Expected: []DatasetID{dsid}, }, } for _, tc := range testCases { @@ -144,17 +143,17 @@ func TestGroup_CollectDatasets(t *testing.T) { func TestGroup_FieldsByLinkedDataset(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - dsid := id.NewDatasetID() - dssid := id.NewDatasetSchemaID() - l := NewLink(dsid, dssid, id.NewDatasetSchemaFieldID()) + dsid := NewDatasetID() + dssid := NewDatasetSchemaID() + l := NewLink(dsid, dssid, NewDatasetFieldID()) ls := NewLinks([]*Link{l}) f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() testCases := []struct { Name string Group *Group - DatasetSchema id.DatasetSchemaID - DataSet id.DatasetID + DatasetSchema DatasetSchemaID + DataSet DatasetID Expected []*Field }{ { @@ -245,12 +244,12 @@ func TestGroup_Prune(t *testing.T) { func TestGroup_GetOrCreateField(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() f := NewField(sf).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() testCases := []struct { Name string Group *Group PS *Schema - FID id.PropertySchemaFieldID + FID FieldID Expected struct { Field *Field Bool bool @@ -261,17 +260,17 @@ func TestGroup_GetOrCreateField(t *testing.T) { }, { Name: "nil ps", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), }, { Name: "group schema doesn't equal to ps", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aaa"), "aa").MustBuild(), - PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aaa"), "aa").MustBuild(), + PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), }, { Name: "create field", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), - PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), FID: "aa", Expected: struct { Field *Field @@ -283,8 +282,8 @@ func TestGroup_GetOrCreateField(t *testing.T) { }, { Name: "get field", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").Fields([]*Field{f}).MustBuild(), - PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").Fields([]*Field{f}).MustBuild(), + PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), FID: "aa", Expected: struct { Field *Field @@ -316,7 +315,7 @@ func TestGroup_RemoveField(t *testing.T) { testCases := []struct { Name string Group *Group - Input id.PropertySchemaFieldID + Input FieldID Expected []*Field }{ @@ -350,7 +349,7 @@ func TestGroup_FieldIDs(t *testing.T) { testCases := []struct { Name string Group *Group - Expected []id.PropertySchemaFieldID + Expected []FieldID }{ { @@ -359,7 +358,7 @@ func TestGroup_FieldIDs(t *testing.T) { { Name: "normal case", Group: NewGroup().NewID().Fields([]*Field{f, f2}).MustBuild(), - Expected: []id.PropertySchemaFieldID{"a", "b"}, + Expected: []FieldID{"a", "b"}, }, } for _, tc := range testCases { @@ -382,7 +381,7 @@ func TestGroup_Field(t *testing.T) { testCases := []struct { Name string Group *Group - Input id.PropertySchemaFieldID + Input FieldID Expected *Field }{ @@ -415,14 +414,14 @@ func TestGroup_Field(t *testing.T) { func TestGroup_UpdateNameFieldValue(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() //f := NewField(sf).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - sg2 := NewSchemaGroup().ID("bb").Schema(id.MustPropertySchemaID("xx~1.0.0/bb")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg2 := NewSchemaGroup().ID("bb").Schema(MustSchemaID("xx~1.0.0/bb")).Fields([]*SchemaField{sf}).MustBuild() testCases := []struct { Name string Group *Group PS *Schema Value *Value - FID id.PropertySchemaFieldID + FID FieldID Expected *Field Err error }{ @@ -431,25 +430,25 @@ func TestGroup_UpdateNameFieldValue(t *testing.T) { }, { Name: "nil ps", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), }, { Name: "group schema doesn't equal to ps", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aaa"), "aa").MustBuild(), - PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aaa"), "aa").MustBuild(), + PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), }, { Name: "update value", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), - PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Value: ValueTypeString.ValueFrom("abc"), FID: "aa", Expected: NewField(sf).Value(OptionalValueFrom(ValueTypeString.ValueFrom("abc"))).MustBuild(), }, { Name: "invalid property field", - Group: NewGroup().NewID().Schema(id.MustPropertySchemaID("xx~1.0.0/aa"), "aa").MustBuild(), - PS: NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/bb")).Groups([]*SchemaGroup{sg2}).MustBuild(), + Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + PS: NewSchema().ID(MustSchemaID("xx~1.0.0/bb")).Groups([]*SchemaGroup{sg2}).MustBuild(), Value: ValueTypeString.ValueFrom("abc"), FID: "aa", Expected: nil, diff --git a/pkg/property/id.go b/pkg/property/id.go new file mode 100644 index 000000000..9fb862f91 --- /dev/null +++ b/pkg/property/id.go @@ -0,0 +1,71 @@ +package property + +import ( + "sort" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type ID = id.PropertyID +type ItemID = id.PropertyItemID +type FieldID = id.PropertySchemaFieldID +type SchemaID = id.PropertySchemaID +type SchemaGroupID = id.PropertySchemaGroupID +type DatasetID = id.DatasetID +type DatasetFieldID = id.DatasetSchemaFieldID +type DatasetSchemaID = id.DatasetSchemaID +type SceneID = id.SceneID + +var NewID = id.NewPropertyID +var NewItemID = id.NewPropertyItemID +var NewDatasetID = id.NewDatasetID +var NewDatasetFieldID = id.NewDatasetSchemaFieldID +var NewDatasetSchemaID = id.NewDatasetSchemaID +var NewSceneID = id.NewSceneID + +var MustID = id.MustPropertyID +var MustItemID = id.MustPropertyItemID +var MustSchemaID = id.MustPropertySchemaID +var MustDatasetID = id.MustDatasetID +var MustDatasetFieldID = id.MustDatasetSchemaFieldID +var MustDatasetSchemaID = id.MustDatasetSchemaID +var MustSceneID = id.MustSceneID + +var IDFrom = id.PropertyIDFrom +var ItemIDFrom = id.PropertyItemIDFrom +var FieldIDFrom = id.PropertySchemaFieldIDFrom +var SchemaIDFrom = id.PropertySchemaIDFrom +var SchemaGroupIDFrom = id.PropertySchemaGroupIDFrom +var DatasetIDFrom = id.DatasetIDFrom +var DatasetFieldIDFrom = id.DatasetSchemaFieldIDFrom +var DatasetSchemaIDFrom = id.DatasetSchemaIDFrom +var SceneIDFrom = id.SceneIDFrom + +var IDFromRef = id.PropertyIDFromRef +var ItemIDFromRef = id.PropertyItemIDFromRef +var SchemaIDFromRef = id.PropertySchemaIDFromRef +var DatasetIDFromRef = id.DatasetIDFromRef +var DatasetFieldIDFromRef = id.DatasetSchemaFieldIDFromRef +var DatasetSchemaIDFromRef = id.DatasetSchemaIDFromRef +var SceneIDFromRef = id.SceneIDFromRef + +var IDFromRefID = id.PropertyIDFromRefID +var ItemIDFromRefID = id.PropertyItemIDFromRefID +var DatasetIDFromRefID = id.DatasetIDFromRefID +var DatasetFieldIDFromRefID = id.DatasetSchemaFieldIDFromRefID +var DatasetSchemaIDFromRefID = id.DatasetSchemaIDFromRefID +var SceneIDFromRefID = id.SceneIDFromRefID + +type IDSet = id.PropertyIDSet +type ItemIDSet = id.PropertyItemIDSet + +var NewIDSet = id.NewPropertyIDSet +var NewItemIDSet = id.NewPropertyItemIDSet + +var ErrInvalidID = id.ErrInvalidID + +func sortIDs(a []ID) { + sort.SliceStable(a, func(i, j int) bool { + return id.ID(a[i]).Compare(id.ID(a[j])) < 0 + }) +} diff --git a/pkg/property/initializer.go b/pkg/property/initializer.go index aff489fe6..77a6779da 100644 --- a/pkg/property/initializer.go +++ b/pkg/property/initializer.go @@ -4,16 +4,14 @@ package property import ( "errors" - - "github.com/reearth/reearth-backend/pkg/id" ) var ErrSchemaDoesNotMatch = errors.New("schema of the initializer does not match schema of the argument") type Initializer struct { - ID *id.PropertyID `json:"id"` - Schema id.PropertySchemaID `json:"schema"` - Items []*InitializerItem `json:"items"` + ID *ID `json:"id"` + Schema SchemaID `json:"schema"` + Items []*InitializerItem `json:"items"` } func (p *Initializer) Clone() *Initializer { @@ -36,14 +34,14 @@ func (p *Initializer) Clone() *Initializer { } } -func (p *Initializer) Property(scene id.SceneID) (*Property, error) { +func (p *Initializer) Property(scene SceneID) (*Property, error) { if p == nil { return nil, nil } i := p.ID if i == nil { - i = id.NewPropertyID().Ref() + i = NewID().Ref() } var items []Item @@ -62,7 +60,7 @@ func (p *Initializer) Property(scene id.SceneID) (*Property, error) { } // PropertyIncludingEmpty generates a new property, but even if the initializer is empty, an empty property will be generated. -func (p *Initializer) PropertyIncludingEmpty(scene id.SceneID, schema id.PropertySchemaID) (*Property, error) { +func (p *Initializer) PropertyIncludingEmpty(scene SceneID, schema SchemaID) (*Property, error) { if p != nil && p.Schema != schema { return nil, ErrSchemaDoesNotMatch } @@ -82,7 +80,7 @@ func (p *Initializer) PropertyIncludingEmpty(scene id.SceneID, schema id.Propert return pr, nil } -func (p *Initializer) MustBeProperty(scene id.SceneID) *Property { +func (p *Initializer) MustBeProperty(scene SceneID) *Property { r, err := p.Property(scene) if err != nil { panic(err) @@ -91,10 +89,10 @@ func (p *Initializer) MustBeProperty(scene id.SceneID) *Property { } type InitializerItem struct { - ID *id.PropertyItemID `json:"id"` - SchemaItem id.PropertySchemaGroupID `json:"schemaItem"` - Groups []*InitializerGroup `json:"groups"` - Fields []*InitializerField `json:"fields"` + ID *ItemID `json:"id"` + SchemaItem SchemaGroupID `json:"schemaItem"` + Groups []*InitializerGroup `json:"groups"` + Fields []*InitializerField `json:"fields"` } func (p *InitializerItem) Clone() *InitializerItem { @@ -126,14 +124,14 @@ func (p *InitializerItem) Clone() *InitializerItem { } } -func (p *InitializerItem) PropertyItem(parent id.PropertySchemaID) (Item, error) { +func (p *InitializerItem) PropertyItem(parent SchemaID) (Item, error) { if p == nil { return nil, nil } i := p.ID if i == nil { - i = id.NewPropertyItemID().Ref() + i = NewItemID().Ref() } pi := NewItem().ID(*i).Schema(parent, p.SchemaItem) @@ -166,7 +164,7 @@ func (p *InitializerItem) PropertyItem(parent id.PropertySchemaID) (Item, error) return pi.Group().Fields(fields).Build() } -func (p *InitializerItem) PropertyGroupList(parent id.PropertySchemaID) *GroupList { +func (p *InitializerItem) PropertyGroupList(parent SchemaID) *GroupList { i, _ := p.PropertyItem(parent) if g := ToGroupList(i); g != nil { return g @@ -174,7 +172,7 @@ func (p *InitializerItem) PropertyGroupList(parent id.PropertySchemaID) *GroupLi return nil } -func (p *InitializerItem) PropertyGroup(parent id.PropertySchemaID) *Group { +func (p *InitializerItem) PropertyGroup(parent SchemaID) *Group { i, _ := p.PropertyItem(parent) if g := ToGroup(i); g != nil { return g @@ -183,7 +181,7 @@ func (p *InitializerItem) PropertyGroup(parent id.PropertySchemaID) *Group { } type InitializerGroup struct { - ID *id.PropertyItemID `json:"id"` + ID *ItemID `json:"id"` Fields []*InitializerField `json:"fields"` } @@ -206,14 +204,14 @@ func (p *InitializerGroup) Clone() *InitializerGroup { } } -func (p *InitializerGroup) PropertyGroup(parent id.PropertySchemaID, parentItem id.PropertySchemaGroupID) (*Group, error) { +func (p *InitializerGroup) PropertyGroup(parent SchemaID, parentItem SchemaGroupID) (*Group, error) { if p == nil { return nil, nil } i := p.ID if i == nil { - i = id.NewPropertyItemID().Ref() + i = NewItemID().Ref() } pi := NewItem().ID(*i).Schema(parent, parentItem) @@ -232,10 +230,10 @@ func (p *InitializerGroup) PropertyGroup(parent id.PropertySchemaID, parentItem } type InitializerField struct { - Field id.PropertySchemaFieldID `json:"field"` - Type ValueType `json:"type"` - Value *Value `json:"value"` - Links []*InitializerLink `json:"links"` + Field FieldID `json:"field"` + Type ValueType `json:"type"` + Value *Value `json:"value"` + Links []*InitializerLink `json:"links"` } func (p *InitializerField) Clone() *InitializerField { @@ -280,9 +278,9 @@ func (p *InitializerField) PropertyField() *Field { } type InitializerLink struct { - Dataset *id.DatasetID `json:"dataset"` - Schema id.DatasetSchemaID `json:"schema"` - Field id.DatasetSchemaFieldID `json:"field"` + Dataset *DatasetID `json:"dataset"` + Schema DatasetSchemaID `json:"schema"` + Field DatasetFieldID `json:"field"` } func (p *InitializerLink) Clone() *InitializerLink { diff --git a/pkg/property/initializer_test.go b/pkg/property/initializer_test.go index ebd6fe1e2..980039bd5 100644 --- a/pkg/property/initializer_test.go +++ b/pkg/property/initializer_test.go @@ -3,17 +3,16 @@ package property import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestInitializer_Clone(t *testing.T) { initializer := &Initializer{ - ID: id.NewPropertyID().Ref(), - Schema: id.MustPropertySchemaID("reearth/marker"), + ID: NewID().Ref(), + Schema: MustSchemaID("reearth/marker"), Items: []*InitializerItem{{ - ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaGroupID("hoge"), + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), }}, } @@ -26,13 +25,13 @@ func TestInitializer_Clone(t *testing.T) { } func TestInitializer_Property(t *testing.T) { - sid := id.NewSceneID() + sid := NewSceneID() initializer := &Initializer{ - ID: id.NewPropertyID().Ref(), - Schema: id.MustPropertySchemaID("reearth/marker"), + ID: NewID().Ref(), + Schema: MustSchemaID("reearth/marker"), Items: []*InitializerItem{{ - ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaGroupID("hoge"), + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), }}, } @@ -52,9 +51,9 @@ func TestInitializer_Property(t *testing.T) { } func TestInitializer_PropertyIncludingEmpty(t *testing.T) { - sid := id.NewSceneID() - psid := id.MustPropertySchemaID("reearth/hoge") - psid2 := id.MustPropertySchemaID("reearth/marker") + sid := NewSceneID() + psid := MustSchemaID("reearth/hoge") + psid2 := MustSchemaID("reearth/marker") // test case 1: should generate an empty property var initializer *Initializer @@ -65,11 +64,11 @@ func TestInitializer_PropertyIncludingEmpty(t *testing.T) { // test case 2: should returns an error when schema does not match initializer = &Initializer{ - ID: id.NewPropertyID().Ref(), + ID: NewID().Ref(), Schema: psid2, Items: []*InitializerItem{{ - ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaGroupID("hoge"), + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), }}, } @@ -87,18 +86,18 @@ func TestInitializer_PropertyIncludingEmpty(t *testing.T) { func TestInitializerItem_Clone(t *testing.T) { item := &InitializerItem{ - ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaGroupID("hoge"), + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), Groups: []*InitializerGroup{{ - ID: id.NewPropertyItemID().Ref(), + ID: NewItemID().Ref(), Fields: []*InitializerField{{ - Field: id.PropertySchemaFieldID("name"), + Field: FieldID("name"), Type: ValueTypeString, Value: ValueTypeString.ValueFrom("aaa"), Links: []*InitializerLink{{ - Dataset: id.NewDatasetID().Ref(), - Schema: id.NewDatasetSchemaID(), - Field: id.NewDatasetSchemaFieldID(), + Dataset: NewDatasetID().Ref(), + Schema: NewDatasetSchemaID(), + Field: NewDatasetFieldID(), }}, }}, }}, @@ -115,10 +114,10 @@ func TestInitializerItem_Clone(t *testing.T) { } func TestInitializerItem_PropertyItem(t *testing.T) { - parent := id.MustPropertySchemaID("reearth/marker") + parent := MustSchemaID("reearth/marker") item := &InitializerItem{ - ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaGroupID("hoge"), + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), } expected := NewItem().ID(*item.ID).Schema(parent, item.SchemaItem).Group().MustBuild() @@ -134,12 +133,12 @@ func TestInitializerItem_PropertyItem(t *testing.T) { } func TestInitializerItem_PropertyGroup(t *testing.T) { - parent := id.MustPropertySchemaID("reearth/marker") + parent := MustSchemaID("reearth/marker") item := &InitializerItem{ - ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaGroupID("hoge"), + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), Fields: []*InitializerField{{ - Field: id.PropertySchemaFieldID("name"), + Field: FieldID("name"), Type: ValueTypeString, Value: ValueTypeString.ValueFrom("aaa"), }}, @@ -157,12 +156,12 @@ func TestInitializerItem_PropertyGroup(t *testing.T) { } func TestInitializerItem_PropertyGroupList(t *testing.T) { - parent := id.MustPropertySchemaID("reearth/marker") + parent := MustSchemaID("reearth/marker") item := &InitializerItem{ - ID: id.NewPropertyItemID().Ref(), - SchemaItem: id.PropertySchemaGroupID("hoge"), + ID: NewItemID().Ref(), + SchemaItem: SchemaGroupID("hoge"), Groups: []*InitializerGroup{{ - ID: id.NewPropertyItemID().Ref(), + ID: NewItemID().Ref(), }}, } @@ -179,15 +178,15 @@ func TestInitializerItem_PropertyGroupList(t *testing.T) { func TestInitializerGroup_Clone(t *testing.T) { item := &InitializerGroup{ - ID: id.NewPropertyItemID().Ref(), + ID: NewItemID().Ref(), Fields: []*InitializerField{{ - Field: id.PropertySchemaFieldID("name"), + Field: FieldID("name"), Type: ValueTypeString, Value: ValueTypeString.ValueFrom("aaa"), Links: []*InitializerLink{{ - Dataset: id.NewDatasetID().Ref(), - Schema: id.NewDatasetSchemaID(), - Field: id.NewDatasetSchemaFieldID(), + Dataset: NewDatasetID().Ref(), + Schema: NewDatasetSchemaID(), + Field: NewDatasetFieldID(), }}, }}, } @@ -201,12 +200,12 @@ func TestInitializerGroup_Clone(t *testing.T) { } func TestInitializerGroup_PropertyGroup(t *testing.T) { - parent := id.MustPropertySchemaID("reearth/marker") - parentItem := id.PropertySchemaGroupID("hoge") + parent := MustSchemaID("reearth/marker") + parentItem := SchemaGroupID("hoge") item := &InitializerGroup{ - ID: id.NewPropertyItemID().Ref(), + ID: NewItemID().Ref(), Fields: []*InitializerField{{ - Field: id.PropertySchemaFieldID("name"), + Field: FieldID("name"), Type: ValueTypeString, Value: ValueTypeString.ValueFrom("aaa"), }}, @@ -229,13 +228,13 @@ func TestInitializerGroup_PropertyGroup(t *testing.T) { func TestInitializerField_Clone(t *testing.T) { field := &InitializerField{ - Field: id.PropertySchemaFieldID("name"), + Field: FieldID("name"), Type: ValueTypeString, Value: ValueTypeString.ValueFrom("aaa"), Links: []*InitializerLink{{ - Dataset: id.NewDatasetID().Ref(), - Schema: id.NewDatasetSchemaID(), - Field: id.NewDatasetSchemaFieldID(), + Dataset: NewDatasetID().Ref(), + Schema: NewDatasetSchemaID(), + Field: NewDatasetFieldID(), }}, } cloned := field.Clone() @@ -247,13 +246,13 @@ func TestInitializerField_Clone(t *testing.T) { func TestInitializerField_PropertyField(t *testing.T) { field := &InitializerField{ - Field: id.PropertySchemaFieldID("name"), + Field: FieldID("name"), Type: ValueTypeString, Value: ValueTypeString.ValueFrom("aaa"), Links: []*InitializerLink{{ - Dataset: id.NewDatasetID().Ref(), - Schema: id.NewDatasetSchemaID(), - Field: id.NewDatasetSchemaFieldID(), + Dataset: NewDatasetID().Ref(), + Schema: NewDatasetSchemaID(), + Field: NewDatasetFieldID(), }}, } @@ -268,9 +267,9 @@ func TestInitializerField_PropertyField(t *testing.T) { func TestInitializerLink_Clone(t *testing.T) { link := &InitializerLink{ - Dataset: id.NewDatasetID().Ref(), - Schema: id.NewDatasetSchemaID(), - Field: id.NewDatasetSchemaFieldID(), + Dataset: NewDatasetID().Ref(), + Schema: NewDatasetSchemaID(), + Field: NewDatasetFieldID(), } cloned := link.Clone() @@ -280,9 +279,9 @@ func TestInitializerLink_Clone(t *testing.T) { func TestInitializerLink_PropertyLink(t *testing.T) { link := &InitializerLink{ - Dataset: id.NewDatasetID().Ref(), - Schema: id.NewDatasetSchemaID(), - Field: id.NewDatasetSchemaFieldID(), + Dataset: NewDatasetID().Ref(), + Schema: NewDatasetSchemaID(), + Field: NewDatasetFieldID(), } expected := NewLink(*link.Dataset.CopyRef(), link.Schema, link.Field) diff --git a/pkg/property/item.go b/pkg/property/item.go index 65807e0b0..5128be84c 100644 --- a/pkg/property/item.go +++ b/pkg/property/item.go @@ -4,20 +4,19 @@ import ( "context" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" ) type Item interface { - ID() id.PropertyItemID - IDRef() *id.PropertyItemID - SchemaGroup() id.PropertySchemaGroupID - SchemaGroupRef() *id.PropertySchemaGroupID - Schema() id.PropertySchemaID - SchemaRef() *id.PropertySchemaID + ID() ItemID + IDRef() *ItemID + SchemaGroup() SchemaGroupID + SchemaGroupRef() *SchemaGroupID + Schema() SchemaID + SchemaRef() *SchemaID HasLinkedField() bool - CollectDatasets() []id.DatasetID - FieldsByLinkedDataset(id.DatasetSchemaID, id.DatasetID) []*Field - IsDatasetLinked(id.DatasetSchemaID, id.DatasetID) bool + CollectDatasets() []DatasetID + FieldsByLinkedDataset(DatasetSchemaID, DatasetID) []*Field + IsDatasetLinked(DatasetSchemaID, DatasetID) bool IsEmpty() bool Prune() MigrateSchema(context.Context, *Schema, dataset.Loader) @@ -26,9 +25,9 @@ type Item interface { } type itemBase struct { - ID id.PropertyItemID - Schema id.PropertySchemaID - SchemaGroup id.PropertySchemaGroupID + ID ItemID + Schema SchemaID + SchemaGroup SchemaGroupID } func ToGroup(i Item) *Group { diff --git a/pkg/property/item_builder.go b/pkg/property/item_builder.go index 67c8d21e7..fae398b23 100644 --- a/pkg/property/item_builder.go +++ b/pkg/property/item_builder.go @@ -1,7 +1,5 @@ package property -import "github.com/reearth/reearth-backend/pkg/id" - type ItemBuilder struct { base itemBase } @@ -18,17 +16,17 @@ func (b *ItemBuilder) GroupList() *GroupListBuilder { return NewGroupList().base(b.base) } -func (b *ItemBuilder) ID(id id.PropertyItemID) *ItemBuilder { +func (b *ItemBuilder) ID(id ItemID) *ItemBuilder { b.base.ID = id return b } func (b *ItemBuilder) NewID() *ItemBuilder { - b.base.ID = id.NewPropertyItemID() + b.base.ID = NewItemID() return b } -func (b *ItemBuilder) Schema(s id.PropertySchemaID, g id.PropertySchemaGroupID) *ItemBuilder { +func (b *ItemBuilder) Schema(s SchemaID, g SchemaGroupID) *ItemBuilder { b.base.Schema = s b.base.SchemaGroup = g return b diff --git a/pkg/property/item_test.go b/pkg/property/item_test.go index 8ff6eff35..887f469e2 100644 --- a/pkg/property/item_test.go +++ b/pkg/property/item_test.go @@ -3,17 +3,16 @@ package property import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestInitItemFrom(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - sgl := NewSchemaGroup().ID("aa").IsList(true).Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - iid := id.NewPropertyItemID() - propertySchemaID := id.MustPropertySchemaID("xx~1.0.0/aa") - propertySchemaField1ID := id.PropertySchemaGroupID("aa") + sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sgl := NewSchemaGroup().ID("aa").IsList(true).Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + iid := NewItemID() + propertySchemaID := MustSchemaID("xx~1.0.0/aa") + propertySchemaField1ID := SchemaGroupID("aa") testCases := []struct { Name string SG *SchemaGroup @@ -50,10 +49,10 @@ func TestInitItemFrom(t *testing.T) { } func TestToGroup(t *testing.T) { - iid := id.NewPropertyItemID() - propertySchemaID := id.MustPropertySchemaID("xxx~1.1.1/aa") - propertySchemaField1ID := id.PropertySchemaFieldID("a") - propertySchemaGroup1ID := id.PropertySchemaGroupID("A") + iid := NewItemID() + propertySchemaID := MustSchemaID("xxx~1.1.1/aa") + propertySchemaField1ID := FieldID("a") + propertySchemaGroup1ID := SchemaGroupID("A") il := []Item{ NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID). Fields([]*Field{ @@ -63,7 +62,7 @@ func TestToGroup(t *testing.T) { Build(), }).MustBuild(), } - p := New().NewID().Scene(id.NewSceneID()).Items(il).Schema(propertySchemaID).MustBuild() + p := New().NewID().Scene(NewSceneID()).Items(il).Schema(propertySchemaID).MustBuild() g := ToGroup(p.ItemBySchema(propertySchemaGroup1ID)) assert.Equal(t, propertySchemaID, g.Schema()) assert.Equal(t, propertySchemaGroup1ID, g.SchemaGroup()) @@ -71,13 +70,13 @@ func TestToGroup(t *testing.T) { } func TestToGroupList(t *testing.T) { - iid := id.NewPropertyItemID() - propertySchemaID := id.MustPropertySchemaID("xxx~1.1.1/aa") - propertySchemaGroup1ID := id.PropertySchemaGroupID("A") + iid := NewItemID() + propertySchemaID := MustSchemaID("xxx~1.1.1/aa") + propertySchemaGroup1ID := SchemaGroupID("A") il := []Item{ NewGroupList().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID).MustBuild(), } - p := New().NewID().Scene(id.NewSceneID()).Items(il).Schema(propertySchemaID).MustBuild() + p := New().NewID().Scene(NewSceneID()).Items(il).Schema(propertySchemaID).MustBuild() g := ToGroupList(p.ItemBySchema(propertySchemaGroup1ID)) assert.Equal(t, propertySchemaID, g.Schema()) assert.Equal(t, propertySchemaGroup1ID, g.SchemaGroup()) diff --git a/pkg/property/link.go b/pkg/property/link.go index e1f62a069..4fb8df40e 100644 --- a/pkg/property/link.go +++ b/pkg/property/link.go @@ -4,22 +4,18 @@ import ( "context" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" ) -// Links _ type Links struct { links []*Link } -// Link _ type Link struct { - dataset *id.DatasetID - schema *id.DatasetSchemaID - field *id.DatasetSchemaFieldID + dataset *DatasetID + schema *DatasetSchemaID + field *DatasetFieldID } -// NewLinks _ func NewLinks(links []*Link) *Links { if links == nil { return nil @@ -34,7 +30,6 @@ func NewLinks(links []*Link) *Links { } } -// Clone _ func (l *Links) Clone() *Links { if l == nil { return nil @@ -44,17 +39,14 @@ func (l *Links) Clone() *Links { } } -// IsLinked _ func (l *Links) IsLinked() bool { return l != nil && l.links != nil && len(l.links) > 0 } -// IsLinkedFully _ func (l *Links) IsLinkedFully() bool { return l != nil && l.links != nil && len(l.links) > 0 && len(l.DatasetIDs()) == len(l.links) } -// Len _ func (l *Links) Len() int { if l == nil || l.links == nil { return 0 @@ -62,7 +54,6 @@ func (l *Links) Len() int { return len(l.links) } -// First _ func (l *Links) First() *Link { if l == nil || l.links == nil || len(l.links) == 0 { return nil @@ -70,7 +61,6 @@ func (l *Links) First() *Link { return l.links[0] } -// Last _ func (l *Links) Last() *Link { if l == nil || l.links == nil || len(l.links) == 0 { return nil @@ -78,12 +68,10 @@ func (l *Links) Last() *Link { return l.links[len(l.links)-1] } -// LastValue _ func (l *Links) LastValue(ds *dataset.Dataset) *dataset.Value { return l.Last().Value(ds) } -// Validate _ func (l *Links) Validate(dsm dataset.SchemaMap, dm dataset.Map) bool { if l == nil || l.links == nil { return false @@ -111,11 +99,10 @@ func (l *Links) Validate(dsm dataset.SchemaMap, dm dataset.Map) bool { return true } -// Replace _ func (l *Links) Replace( - dsm map[id.DatasetSchemaID]id.DatasetSchemaID, - dm map[id.DatasetID]id.DatasetID, - fm map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID, + dsm map[DatasetSchemaID]DatasetSchemaID, + dm map[DatasetID]DatasetID, + fm map[DatasetFieldID]DatasetFieldID, ) { if l == nil || l.links == nil { return @@ -168,7 +155,6 @@ func (l *Links) Replace( l.links = links } -// Links _ func (l *Links) Links() []*Link { if l == nil || l.links == nil || len(l.links) == 0 { return nil @@ -181,12 +167,11 @@ func (l *Links) Links() []*Link { return links2 } -// DatasetIDs _ -func (l *Links) DatasetIDs() []id.DatasetID { +func (l *Links) DatasetIDs() []DatasetID { if l == nil { return nil } - datasets := make([]id.DatasetID, 0, len(l.links)) + datasets := make([]DatasetID, 0, len(l.links)) for _, i := range l.links { if i.dataset != nil { datasets = append(datasets, *i.dataset) @@ -197,12 +182,11 @@ func (l *Links) DatasetIDs() []id.DatasetID { return datasets } -// DatasetSchemaIDs _ -func (l *Links) DatasetSchemaIDs() []id.DatasetSchemaID { +func (l *Links) DatasetSchemaIDs() []DatasetSchemaID { if l == nil { return nil } - schemas := make([]id.DatasetSchemaID, 0, len(l.links)) + schemas := make([]DatasetSchemaID, 0, len(l.links)) for _, i := range l.links { if i.schema != nil { schemas = append(schemas, *i.schema) @@ -213,8 +197,7 @@ func (l *Links) DatasetSchemaIDs() []id.DatasetSchemaID { return schemas } -// IsDatasetLinked _ -func (l *Links) IsDatasetLinked(s id.DatasetSchemaID, dsid id.DatasetID) bool { +func (l *Links) IsDatasetLinked(s DatasetSchemaID, dsid DatasetID) bool { if l == nil { return false } @@ -231,12 +214,11 @@ func (l *Links) IsDatasetLinked(s id.DatasetSchemaID, dsid id.DatasetID) bool { return false } -// DatasetSchemaFieldIDs _ -func (l *Links) DatasetSchemaFieldIDs() []id.DatasetSchemaFieldID { +func (l *Links) DatasetSchemaFieldIDs() []DatasetFieldID { if l == nil { return nil } - fields := make([]id.DatasetSchemaFieldID, 0, len(l.links)) + fields := make([]DatasetFieldID, 0, len(l.links)) for _, i := range l.links { if i.field != nil { fields = append(fields, *i.field) @@ -247,8 +229,7 @@ func (l *Links) DatasetSchemaFieldIDs() []id.DatasetSchemaFieldID { return fields } -// HasDataset _ -func (l *Links) HasDataset(did id.DatasetID) bool { +func (l *Links) HasDataset(did DatasetID) bool { if l == nil { return false } @@ -260,8 +241,7 @@ func (l *Links) HasDataset(did id.DatasetID) bool { return false } -// HasDatasetSchema _ -func (l *Links) HasDatasetSchema(dsid id.DatasetSchemaID) bool { +func (l *Links) HasDatasetSchema(dsid DatasetSchemaID) bool { if l == nil { return false } @@ -273,7 +253,7 @@ func (l *Links) HasDatasetSchema(dsid id.DatasetSchemaID) bool { return false } -func (l *Links) HasDatasetOrSchema(dsid id.DatasetSchemaID, did id.DatasetID) bool { +func (l *Links) HasDatasetOrSchema(dsid DatasetSchemaID, did DatasetID) bool { if l == nil { return false } @@ -285,8 +265,7 @@ func (l *Links) HasDatasetOrSchema(dsid id.DatasetSchemaID, did id.DatasetID) bo return false } -// NewLink _ -func NewLink(d id.DatasetID, ds id.DatasetSchemaID, f id.DatasetSchemaFieldID) *Link { +func NewLink(d DatasetID, ds DatasetSchemaID, f DatasetFieldID) *Link { dataset := d schema := ds field := f @@ -297,8 +276,7 @@ func NewLink(d id.DatasetID, ds id.DatasetSchemaID, f id.DatasetSchemaFieldID) * } } -// NewLinkFieldOnly _ -func NewLinkFieldOnly(ds id.DatasetSchemaID, f id.DatasetSchemaFieldID) *Link { +func NewLinkFieldOnly(ds DatasetSchemaID, f DatasetFieldID) *Link { schema := ds field := f return &Link{ @@ -307,8 +285,7 @@ func NewLinkFieldOnly(ds id.DatasetSchemaID, f id.DatasetSchemaFieldID) *Link { } } -// Dataset _ -func (l *Link) Dataset() *id.DatasetID { +func (l *Link) Dataset() *DatasetID { if l == nil || l.dataset == nil { return nil } @@ -316,8 +293,7 @@ func (l *Link) Dataset() *id.DatasetID { return &dataset } -// DatasetSchema _ -func (l *Link) DatasetSchema() *id.DatasetSchemaID { +func (l *Link) DatasetSchema() *DatasetSchemaID { if l == nil || l.schema == nil { return nil } @@ -325,8 +301,7 @@ func (l *Link) DatasetSchema() *id.DatasetSchemaID { return &datasetSchema } -// DatasetSchemaField _ -func (l *Link) DatasetSchemaField() *id.DatasetSchemaFieldID { +func (l *Link) DatasetSchemaField() *DatasetFieldID { if l == nil || l.field == nil { return nil } @@ -334,7 +309,6 @@ func (l *Link) DatasetSchemaField() *id.DatasetSchemaFieldID { return &field } -// Value _ func (l *Link) Value(ds *dataset.Dataset) *dataset.Value { if l == nil || ds == nil || l.dataset == nil || l.field == nil || ds.ID() != *l.dataset { return nil @@ -346,7 +320,6 @@ func (l *Link) Value(ds *dataset.Dataset) *dataset.Value { return f.Value() } -// Validate _ func (l *Link) Validate(dss *dataset.Schema, ds *dataset.Dataset) bool { if l == nil || l.field == nil || l.schema == nil || dss == nil { return false @@ -373,12 +346,10 @@ func (l *Link) Validate(dss *dataset.Schema, ds *dataset.Dataset) bool { return true } -// IsEmpty _ func (l *Links) IsEmpty() bool { return l == nil || l.links == nil || len(l.links) == 0 } -// Clone _ func (l *Link) Clone() *Link { if l == nil { return nil @@ -390,8 +361,7 @@ func (l *Link) Clone() *Link { } } -// ApplyDataset _ -func (l *Link) ApplyDataset(ds *id.DatasetID) *Link { +func (l *Link) ApplyDataset(ds *DatasetID) *Link { if l == nil { return nil } @@ -407,8 +377,7 @@ func (l *Link) ApplyDataset(ds *id.DatasetID) *Link { } } -// ApplyDataset _ -func (l *Links) ApplyDataset(ds *id.DatasetID) *Links { +func (l *Links) ApplyDataset(ds *DatasetID) *Links { if l == nil || l.links == nil || len(l.links) == 0 { return nil } diff --git a/pkg/property/link_test.go b/pkg/property/link_test.go index 706b85658..5f2c2c944 100644 --- a/pkg/property/link_test.go +++ b/pkg/property/link_test.go @@ -5,17 +5,16 @@ import ( "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestNewLinks(t *testing.T) { - dsid1 := id.NewDatasetSchemaID() - dsid2 := id.NewDatasetSchemaID() - did1 := id.NewDatasetID() - did2 := id.NewDatasetID() - dfid1 := id.NewDatasetSchemaFieldID() - dfid2 := id.NewDatasetSchemaFieldID() + dsid1 := NewDatasetSchemaID() + dsid2 := NewDatasetSchemaID() + did1 := NewDatasetID() + did2 := NewDatasetID() + dfid1 := NewDatasetFieldID() + dfid2 := NewDatasetFieldID() var lin *Links assert.Nil(t, lin) @@ -27,17 +26,17 @@ func TestNewLinks(t *testing.T) { assert.Equal(t, 0, lin.Len()) lin = NewLinks([]*Link{}) - assert.Equal(t, []id.DatasetID{}, lin.DatasetIDs()) - assert.Equal(t, []id.DatasetSchemaID{}, lin.DatasetSchemaIDs()) - assert.Equal(t, []id.DatasetSchemaFieldID{}, lin.DatasetSchemaFieldIDs()) + assert.Equal(t, []DatasetID{}, lin.DatasetIDs()) + assert.Equal(t, []DatasetSchemaID{}, lin.DatasetSchemaIDs()) + assert.Equal(t, []DatasetFieldID{}, lin.DatasetSchemaFieldIDs()) ll := []*Link{ NewLink(did1, dsid1, dfid1), NewLink(did2, dsid2, dfid2), } - dl := []id.DatasetID{did1, did2} - dsl := []id.DatasetSchemaID{dsid1, dsid2} - dsfl := []id.DatasetSchemaFieldID{dfid1, dfid2} + dl := []DatasetID{did1, did2} + dsl := []DatasetSchemaID{dsid1, dsid2} + dsfl := []DatasetFieldID{dfid1, dfid2} lin = NewLinks(ll) assert.NotNil(t, lin) assert.Equal(t, ll, lin.Links()) @@ -50,19 +49,19 @@ func TestNewLinks(t *testing.T) { } func TestLinks_IsDatasetLinked(t *testing.T) { - dsid1 := id.NewDatasetSchemaID() - dsid2 := id.NewDatasetSchemaID() - did1 := id.NewDatasetID() - did2 := id.NewDatasetID() - dfid1 := id.NewDatasetSchemaFieldID() + dsid1 := NewDatasetSchemaID() + dsid2 := NewDatasetSchemaID() + did1 := NewDatasetID() + did2 := NewDatasetID() + dfid1 := NewDatasetFieldID() ll := []*Link{ NewLink(did1, dsid1, dfid1), } testCases := []struct { Name string - DSS id.DatasetSchemaID - DS id.DatasetID + DSS DatasetSchemaID + DS DatasetID Links *Links Expected bool }{ @@ -100,9 +99,9 @@ func TestLinks_IsDatasetLinked(t *testing.T) { } func TestLinks_Validate(t *testing.T) { - dsid1 := id.NewDatasetSchemaID() - did1 := id.NewDatasetID() - dfid1 := id.NewDatasetSchemaFieldID() + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() testCases := []struct { Name string @@ -155,18 +154,18 @@ func TestLinks_Validate(t *testing.T) { } func TestLinks_Replace(t *testing.T) { - dsid1 := id.NewDatasetSchemaID() - dsid2 := id.NewDatasetSchemaID() - did1 := id.NewDatasetID() - did2 := id.NewDatasetID() - dfid1 := id.NewDatasetSchemaFieldID() - dfid2 := id.NewDatasetSchemaFieldID() + dsid1 := NewDatasetSchemaID() + dsid2 := NewDatasetSchemaID() + did1 := NewDatasetID() + did2 := NewDatasetID() + dfid1 := NewDatasetFieldID() + dfid2 := NewDatasetFieldID() testCases := []struct { Name string - DSM map[id.DatasetSchemaID]id.DatasetSchemaID - DM map[id.DatasetID]id.DatasetID - FM map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID + DSM map[DatasetSchemaID]DatasetSchemaID + DM map[DatasetID]DatasetID + FM map[DatasetFieldID]DatasetFieldID Expected, Links *Links }{ { @@ -174,13 +173,13 @@ func TestLinks_Replace(t *testing.T) { }, { Name: "success", - DSM: map[id.DatasetSchemaID]id.DatasetSchemaID{ + DSM: map[DatasetSchemaID]DatasetSchemaID{ dsid1: dsid2, }, - DM: map[id.DatasetID]id.DatasetID{ + DM: map[DatasetID]DatasetID{ did1: did2, }, - FM: map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID{ + FM: map[DatasetFieldID]DatasetFieldID{ dfid1: dfid2, }, Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), @@ -188,35 +187,35 @@ func TestLinks_Replace(t *testing.T) { }, { Name: "dataset = nil", - DSM: map[id.DatasetSchemaID]id.DatasetSchemaID{ + DSM: map[DatasetSchemaID]DatasetSchemaID{ dsid1: dsid2, }, - DM: map[id.DatasetID]id.DatasetID{}, - FM: map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID{ + DM: map[DatasetID]DatasetID{}, + FM: map[DatasetFieldID]DatasetFieldID{ dfid1: dfid2, }, Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), }, { Name: "datasetschema = nil", - DSM: map[id.DatasetSchemaID]id.DatasetSchemaID{}, - DM: map[id.DatasetID]id.DatasetID{ + DSM: map[DatasetSchemaID]DatasetSchemaID{}, + DM: map[DatasetID]DatasetID{ did1: did2, }, - FM: map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID{ + FM: map[DatasetFieldID]DatasetFieldID{ dfid1: dfid2, }, Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), }, { Name: "dataset schema field = nil", - DSM: map[id.DatasetSchemaID]id.DatasetSchemaID{ + DSM: map[DatasetSchemaID]DatasetSchemaID{ dsid1: dsid2, }, - DM: map[id.DatasetID]id.DatasetID{ + DM: map[DatasetID]DatasetID{ did1: did2, }, - FM: map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID{}, + FM: map[DatasetFieldID]DatasetFieldID{}, Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), }, } @@ -231,13 +230,13 @@ func TestLinks_Replace(t *testing.T) { } func TestLinks_ApplyDataset(t *testing.T) { - dsid1 := id.NewDatasetSchemaID() - did1 := id.NewDatasetID() - dfid1 := id.NewDatasetSchemaFieldID() + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() testCases := []struct { Name string - Input *id.DatasetID + Input *DatasetID Expected, Links *Links }{ { @@ -271,14 +270,14 @@ func TestLinks_ApplyDataset(t *testing.T) { } func TestLink_Dataset(t *testing.T) { - dsid1 := id.NewDatasetSchemaID() - did1 := id.NewDatasetID() - dfid1 := id.NewDatasetSchemaFieldID() + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() testCases := []struct { Name string Link *Link - Expected *id.DatasetID + Expected *DatasetID }{ { Name: "nil link", @@ -304,14 +303,14 @@ func TestLink_Dataset(t *testing.T) { } func TestLink_DatasetSchema(t *testing.T) { - dsid1 := id.NewDatasetSchemaID() - did1 := id.NewDatasetID() - dfid1 := id.NewDatasetSchemaFieldID() + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() testCases := []struct { Name string Link *Link - Expected *id.DatasetSchemaID + Expected *DatasetSchemaID }{ { Name: "nil link", @@ -333,14 +332,14 @@ func TestLink_DatasetSchema(t *testing.T) { } func TestLink_DatasetSchemaField(t *testing.T) { - dsid1 := id.NewDatasetSchemaID() - did1 := id.NewDatasetID() - dfid1 := id.NewDatasetSchemaFieldID() + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() testCases := []struct { Name string Link *Link - Expected *id.DatasetSchemaFieldID + Expected *DatasetFieldID }{ { Name: "nil link", @@ -361,9 +360,9 @@ func TestLink_DatasetSchemaField(t *testing.T) { } func TestLink_Value(t *testing.T) { - dsid1 := id.NewDatasetSchemaID() - did1 := id.NewDatasetID() - dfid1 := id.NewDatasetSchemaFieldID() + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() dsf := []*dataset.Field{ dataset.NewField(dfid1, dataset.ValueTypeString.ValueFrom("aaa"), ""), } @@ -398,9 +397,9 @@ func TestLink_Value(t *testing.T) { } } func TestLink_Validate(t *testing.T) { - dsid1 := id.NewDatasetSchemaID() - did1 := id.NewDatasetID() - dfid1 := id.NewDatasetSchemaFieldID() + dsid1 := NewDatasetSchemaID() + did1 := NewDatasetID() + dfid1 := NewDatasetFieldID() testCases := []struct { Name string @@ -476,6 +475,6 @@ func TestLink_Validate(t *testing.T) { func TestLink_Clone(t *testing.T) { var l *Link assert.Nil(t, l.Clone()) - l = NewLink(id.NewDatasetID(), id.NewDatasetSchemaID(), id.NewDatasetSchemaFieldID()) + l = NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) assert.Equal(t, l, l.Clone()) } diff --git a/pkg/property/list.go b/pkg/property/list.go index 9fe0af64f..7441ed756 100644 --- a/pkg/property/list.go +++ b/pkg/property/list.go @@ -1,16 +1,10 @@ package property -import ( - "sort" - - "github.com/reearth/reearth-backend/pkg/id" -) - type List []*Property -func (l List) Schemas() []id.PropertySchemaID { - schemas := make([]id.PropertySchemaID, 0, len(l)) - m := map[id.PropertySchemaID]struct{}{} +func (l List) Schemas() []SchemaID { + schemas := make([]SchemaID, 0, len(l)) + m := map[SchemaID]struct{}{} for _, p := range l { s := p.Schema() if _, ok := m[s]; ok { @@ -27,7 +21,7 @@ func (l List) Map() Map { return m.Add(l...) } -type Map map[id.PropertyID]*Property +type Map map[ID]*Property func MapFrom(properties ...*Property) Map { return Map{}.Add(properties...) @@ -80,14 +74,12 @@ func (m Map) Merge(m2 Map) Map { return m3.Add(m2.List()...) } -func (m Map) Keys() []id.PropertyID { - keys := make([]id.PropertyID, 0, len(m)) +func (m Map) Keys() []ID { + keys := make([]ID, 0, len(m)) for k := range m { keys = append(keys, k) } - sort.SliceStable(keys, func(i, j int) bool { - return id.ID(keys[i]).Compare(id.ID(keys[j])) < 0 - }) + sortIDs(keys) return keys } diff --git a/pkg/property/list_test.go b/pkg/property/list_test.go index 8b27eadb9..6ce34a583 100644 --- a/pkg/property/list_test.go +++ b/pkg/property/list_test.go @@ -3,15 +3,14 @@ package property import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) var ( sf = NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg = NewSchemaGroup().ID("aa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - p = New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() - p2 = New().NewID().Scene(id.NewSceneID()).Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() + sg = NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + p = New().NewID().Scene(NewSceneID()).Schema(MustSchemaID("xx~1.0.0/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() + p2 = New().NewID().Scene(NewSceneID()).Schema(MustSchemaID("xx~1.0.0/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() ) func TestMap_Add(t *testing.T) { diff --git a/pkg/property/loader.go b/pkg/property/loader.go index f2bf32801..336161f09 100644 --- a/pkg/property/loader.go +++ b/pkg/property/loader.go @@ -2,16 +2,14 @@ package property import ( "context" - - "github.com/reearth/reearth-backend/pkg/id" ) -type Loader func(context.Context, ...id.PropertyID) (List, error) +type Loader func(context.Context, ...ID) (List, error) -type SchemaLoader func(context.Context, ...id.PropertySchemaID) (SchemaList, error) +type SchemaLoader func(context.Context, ...SchemaID) (SchemaList, error) func LoaderFrom(data []*Property) Loader { - return func(ctx context.Context, ids ...id.PropertyID) (List, error) { + return func(ctx context.Context, ids ...ID) (List, error) { res := make([]*Property, 0, len(ids)) for _, i := range ids { found := false @@ -30,8 +28,8 @@ func LoaderFrom(data []*Property) Loader { } } -func LoaderFromMap(data map[id.PropertyID]*Property) Loader { - return func(ctx context.Context, ids ...id.PropertyID) (List, error) { +func LoaderFromMap(data map[ID]*Property) Loader { + return func(ctx context.Context, ids ...ID) (List, error) { res := make([]*Property, 0, len(ids)) for _, i := range ids { if d, ok := data[i]; ok { diff --git a/pkg/property/loader_test.go b/pkg/property/loader_test.go index 8e6630de5..137704362 100644 --- a/pkg/property/loader_test.go +++ b/pkg/property/loader_test.go @@ -4,15 +4,14 @@ import ( "context" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestLoaderFrom(t *testing.T) { - scene := id.NewSceneID() - ps := id.MustPropertySchemaID("xxx~1.1.1/aa") - pid1 := id.NewPropertyID() - pid2 := id.NewPropertyID() + scene := NewSceneID() + ps := MustSchemaID("xxx~1.1.1/aa") + pid1 := NewID() + pid2 := NewID() p1 := New().ID(pid1).Scene(scene).Schema(ps).MustBuild() p2 := New().ID(pid2).Scene(scene).Schema(ps).MustBuild() pl := LoaderFrom([]*Property{ @@ -27,15 +26,15 @@ func TestLoaderFrom(t *testing.T) { } func TestLoaderFromMap(t *testing.T) { - scene := id.NewSceneID() - ps := id.MustPropertySchemaID("xxx~1.1.1/aa") - pid1 := id.NewPropertyID() - pid2 := id.NewPropertyID() - pid3 := id.NewPropertyID() + scene := NewSceneID() + ps := MustSchemaID("xxx~1.1.1/aa") + pid1 := NewID() + pid2 := NewID() + pid3 := NewID() p1 := New().ID(pid1).Scene(scene).Schema(ps).MustBuild() p2 := New().ID(pid2).Scene(scene).Schema(ps).MustBuild() p3 := New().ID(pid3).Scene(scene).Schema(ps).MustBuild() - pl := LoaderFromMap(map[id.PropertyID]*Property{ + pl := LoaderFromMap(map[ID]*Property{ pid1: p1, pid2: p2, pid3: p3, diff --git a/pkg/property/merged.go b/pkg/property/merged.go index 6110b4762..25401e446 100644 --- a/pkg/property/merged.go +++ b/pkg/property/merged.go @@ -4,31 +4,30 @@ import ( "context" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" ) // Merged represents a merged property from two properties type Merged struct { - Original *id.PropertyID - Parent *id.PropertyID - Schema id.PropertySchemaID - LinkedDataset *id.DatasetID + Original *ID + Parent *ID + Schema SchemaID + LinkedDataset *DatasetID Groups []*MergedGroup } // MergedGroup represents a group of Merged type MergedGroup struct { - Original *id.PropertyItemID - Parent *id.PropertyItemID - SchemaGroup id.PropertySchemaGroupID - LinkedDataset *id.DatasetID + Original *ItemID + Parent *ItemID + SchemaGroup SchemaGroupID + LinkedDataset *DatasetID Groups []*MergedGroup Fields []*MergedField } // MergedField represents a field of Merged type MergedField struct { - ID id.PropertySchemaFieldID + ID FieldID Type ValueType Value *Value Links *Links @@ -36,11 +35,11 @@ type MergedField struct { } // Datasets returns associated dataset IDs -func (m *Merged) Datasets() []id.DatasetID { +func (m *Merged) Datasets() []DatasetID { if m == nil { return nil } - ids := []id.DatasetID{} + ids := []DatasetID{} for _, g := range m.Groups { ids = append(ids, g.Datasets()...) } @@ -48,11 +47,11 @@ func (m *Merged) Datasets() []id.DatasetID { } // Datasets returns associated dataset IDs -func (m *MergedGroup) Datasets() []id.DatasetID { +func (m *MergedGroup) Datasets() []DatasetID { if m == nil { return nil } - ids := []id.DatasetID{} + ids := []DatasetID{} for _, f := range m.Fields { if f == nil { continue @@ -62,15 +61,14 @@ func (m *MergedGroup) Datasets() []id.DatasetID { return ids } -// MergedMetadata _ type MergedMetadata struct { - Original *id.PropertyID - Parent *id.PropertyID - LinkedDataset *id.DatasetID + Original *ID + Parent *ID + LinkedDataset *DatasetID } // MergedMetadataFrom generates MergedMetadata from single property -func MergedMetadataFrom(p id.PropertyID) MergedMetadata { +func MergedMetadataFrom(p ID) MergedMetadata { p2 := p return MergedMetadata{ Original: &p2, @@ -78,8 +76,8 @@ func MergedMetadataFrom(p id.PropertyID) MergedMetadata { } // Properties returns associated property IDs -func (m MergedMetadata) Properties() []id.PropertyID { - ids := make([]id.PropertyID, 0, 2) +func (m MergedMetadata) Properties() []ID { + ids := make([]ID, 0, 2) if m.Original != nil { ids = append(ids, *m.Original) } @@ -108,12 +106,12 @@ func (f *MergedField) DatasetValue(ctx context.Context, d dataset.GraphLoader) ( } // Merge merges two properties -func Merge(o *Property, p *Property, linked *id.DatasetID) *Merged { +func Merge(o *Property, p *Property, linked *DatasetID) *Merged { if o == nil && p == nil || o != nil && p != nil && !o.Schema().Equal(p.Schema()) { return nil } - var schema id.PropertySchemaID + var schema SchemaID if p != nil { schema = p.Schema() } else if o != nil { @@ -129,12 +127,12 @@ func Merge(o *Property, p *Property, linked *id.DatasetID) *Merged { } } -func mergeItems(i1, i2 []Item, linked *id.DatasetID) []*MergedGroup { +func mergeItems(i1, i2 []Item, linked *DatasetID) []*MergedGroup { if i1 == nil && i2 == nil || len(i1) == 0 && len(i2) == 0 { return nil } - consumed := map[id.PropertyItemID]struct{}{} + consumed := map[ItemID]struct{}{} groups := []*MergedGroup{} for _, item := range i1 { @@ -174,7 +172,7 @@ func group(o, p Item) (*Group, *Group) { return ToGroup(o), ToGroup(p) } -func mergeItem(o, p Item, linked *id.DatasetID) *MergedGroup { +func mergeItem(o, p Item, linked *DatasetID) *MergedGroup { if o == nil && p == nil || o != nil && p != nil && o.SchemaGroup() != p.SchemaGroup() { return nil } @@ -215,8 +213,8 @@ func mergeItem(o, p Item, linked *id.DatasetID) *MergedGroup { } } - var oid, pid *id.PropertyItemID - var sg id.PropertySchemaGroupID + var oid, pid *ItemID + var sg SchemaGroupID if o != nil { oid = o.IDRef() sg = o.SchemaGroup() @@ -236,7 +234,7 @@ func mergeItem(o, p Item, linked *id.DatasetID) *MergedGroup { } } -func mergeField(original, parent *Field, linked *id.DatasetID) *MergedField { +func mergeField(original, parent *Field, linked *DatasetID) *MergedField { if original == nil && parent == nil || original != nil && parent != nil && (original.Field() != parent.Field() || original.Type() != parent.Type()) { return nil } @@ -248,7 +246,7 @@ func mergeField(original, parent *Field, linked *id.DatasetID) *MergedField { t = parent.Type() } - var fid id.PropertySchemaFieldID + var fid FieldID if original != nil { fid = original.Field() } else if parent != nil { @@ -286,9 +284,9 @@ func mergeField(original, parent *Field, linked *id.DatasetID) *MergedField { } } -func allFields(args ...[]id.PropertySchemaFieldID) []id.PropertySchemaFieldID { - consumedKeys := map[id.PropertySchemaFieldID]struct{}{} - result := []id.PropertySchemaFieldID{} +func allFields(args ...[]FieldID) []FieldID { + consumedKeys := map[FieldID]struct{}{} + result := []FieldID{} for _, fields := range args { for _, f := range fields { if _, ok := consumedKeys[f]; ok { diff --git a/pkg/property/merged_test.go b/pkg/property/merged_test.go index e21e96064..dbc18a9e6 100644 --- a/pkg/property/merged_test.go +++ b/pkg/property/merged_test.go @@ -3,45 +3,43 @@ package property import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/stretchr/testify/assert" ) func TestMerge(t *testing.T) { - ds := id.NewDatasetSchemaID() - df := id.NewDatasetSchemaFieldID() - d := id.NewDatasetID() - d2 := id.NewDatasetID() - opid := id.NewPropertyID() - ppid := id.NewPropertyID() - psid := id.MustPropertySchemaID("hoge~0.1.0/fff") - psid2 := id.MustPropertySchemaID("hoge~0.1.0/aaa") - psgid1 := id.PropertySchemaGroupID("group1") - psgid2 := id.PropertySchemaGroupID("group2") - psgid3 := id.PropertySchemaGroupID("group3") - psgid4 := id.PropertySchemaGroupID("group4") - i1id := id.NewPropertyItemID() - i2id := id.NewPropertyItemID() - i3id := id.NewPropertyItemID() - i4id := id.NewPropertyItemID() - i5id := id.NewPropertyItemID() - i6id := id.NewPropertyItemID() - i7id := id.NewPropertyItemID() - i8id := id.NewPropertyItemID() + ds := NewDatasetSchemaID() + df := NewDatasetFieldID() + d := NewDatasetID() + d2 := NewDatasetID() + opid := NewID() + ppid := NewID() + psid := MustSchemaID("hoge~0.1.0/fff") + psid2 := MustSchemaID("hoge~0.1.0/aaa") + psgid1 := SchemaGroupID("group1") + psgid2 := SchemaGroupID("group2") + psgid3 := SchemaGroupID("group3") + psgid4 := SchemaGroupID("group4") + i1id := NewItemID() + i2id := NewItemID() + i3id := NewItemID() + i4id := NewItemID() + i5id := NewItemID() + i6id := NewItemID() + i7id := NewItemID() + i8id := NewItemID() fields1 := []*Field{ - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("a")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("a"))).Build(), - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("b")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("b"))).Build(), - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("e")).ValueUnsafe(NewOptionalValue(ValueTypeString, nil)).LinksUnsafe(NewLinks([]*Link{NewLink(d2, ds, df)})).Build(), - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("f")).ValueUnsafe(NewOptionalValue(ValueTypeNumber, nil)).Build(), + NewFieldUnsafe().FieldUnsafe(FieldID("a")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("a"))).Build(), + NewFieldUnsafe().FieldUnsafe(FieldID("b")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("b"))).Build(), + NewFieldUnsafe().FieldUnsafe(FieldID("e")).ValueUnsafe(NewOptionalValue(ValueTypeString, nil)).LinksUnsafe(NewLinks([]*Link{NewLink(d2, ds, df)})).Build(), + NewFieldUnsafe().FieldUnsafe(FieldID("f")).ValueUnsafe(NewOptionalValue(ValueTypeNumber, nil)).Build(), } fields2 := []*Field{ - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("a")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("1"))).Build(), - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("c")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("2"))).Build(), - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("d")).ValueUnsafe(NewOptionalValue(ValueTypeString, nil)).LinksUnsafe(NewLinks([]*Link{NewLinkFieldOnly(ds, df)})).Build(), - NewFieldUnsafe().FieldUnsafe(id.PropertySchemaFieldID("f")).ValueUnsafe(NewOptionalValue(ValueTypeString, nil)).Build(), + NewFieldUnsafe().FieldUnsafe(FieldID("a")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("1"))).Build(), + NewFieldUnsafe().FieldUnsafe(FieldID("c")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("2"))).Build(), + NewFieldUnsafe().FieldUnsafe(FieldID("d")).ValueUnsafe(NewOptionalValue(ValueTypeString, nil)).LinksUnsafe(NewLinks([]*Link{NewLinkFieldOnly(ds, df)})).Build(), + NewFieldUnsafe().FieldUnsafe(FieldID("f")).ValueUnsafe(NewOptionalValue(ValueTypeString, nil)).Build(), } groups1 := []*Group{ @@ -64,7 +62,7 @@ func TestMerge(t *testing.T) { NewGroup().ID(i6id).Schema(psid, psgid4).Fields(fields2).MustBuild(), } - sid := id.NewSceneID() + sid := NewSceneID() op := New().ID(opid).Scene(sid).Schema(psid).Items(items1).MustBuild() pp := New().NewID().Scene(sid).Schema(psid2).MustBuild() pp2 := New().ID(ppid).Scene(sid).Schema(psid).Items(items2).MustBuild() @@ -89,22 +87,22 @@ func TestMerge(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("b"), + ID: FieldID("b"), Value: ValueTypeString.ValueFrom("b"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("e"), + ID: FieldID("e"), Links: NewLinks([]*Link{NewLink(d2, ds, df)}), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("f"), + ID: FieldID("f"), Type: ValueTypeNumber, }, }, @@ -118,28 +116,28 @@ func TestMerge(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, Overridden: true, }, { - ID: id.PropertySchemaFieldID("b"), + ID: FieldID("b"), Value: ValueTypeString.ValueFrom("b"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("e"), + ID: FieldID("e"), Links: NewLinks([]*Link{NewLink(d2, ds, df)}), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("c"), + ID: FieldID("c"), Value: ValueTypeString.ValueFrom("2"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("d"), + ID: FieldID("d"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, }, @@ -152,22 +150,22 @@ func TestMerge(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("b"), + ID: FieldID("b"), Value: ValueTypeString.ValueFrom("b"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("e"), + ID: FieldID("e"), Links: NewLinks([]*Link{NewLink(d2, ds, df)}), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("f"), + ID: FieldID("f"), Type: ValueTypeNumber, }, }, @@ -179,22 +177,22 @@ func TestMerge(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("1"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("c"), + ID: FieldID("c"), Value: ValueTypeString.ValueFrom("2"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("d"), + ID: FieldID("d"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("f"), + ID: FieldID("f"), Type: ValueTypeString, }, }, @@ -222,22 +220,22 @@ func TestMerge(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("b"), + ID: FieldID("b"), Value: ValueTypeString.ValueFrom("b"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("e"), + ID: FieldID("e"), Links: NewLinks([]*Link{NewLink(d2, ds, df)}), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("f"), + ID: FieldID("f"), Type: ValueTypeNumber, }, }, @@ -251,22 +249,22 @@ func TestMerge(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("b"), + ID: FieldID("b"), Value: ValueTypeString.ValueFrom("b"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("e"), + ID: FieldID("e"), Links: NewLinks([]*Link{NewLink(d2, ds, df)}), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("f"), + ID: FieldID("f"), Type: ValueTypeNumber, }, }, @@ -278,22 +276,22 @@ func TestMerge(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("b"), + ID: FieldID("b"), Value: ValueTypeString.ValueFrom("b"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("e"), + ID: FieldID("e"), Links: NewLinks([]*Link{NewLink(d2, ds, df)}), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("f"), + ID: FieldID("f"), Type: ValueTypeNumber, }, }, @@ -321,22 +319,22 @@ func TestMerge(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("1"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("c"), + ID: FieldID("c"), Value: ValueTypeString.ValueFrom("2"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("d"), + ID: FieldID("d"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("f"), + ID: FieldID("f"), Type: ValueTypeString, }, }, @@ -350,22 +348,22 @@ func TestMerge(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("1"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("c"), + ID: FieldID("c"), Value: ValueTypeString.ValueFrom("2"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("d"), + ID: FieldID("d"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("f"), + ID: FieldID("f"), Type: ValueTypeString, }, }, @@ -377,22 +375,22 @@ func TestMerge(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("1"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("c"), + ID: FieldID("c"), Value: ValueTypeString.ValueFrom("2"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("d"), + ID: FieldID("d"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("f"), + ID: FieldID("f"), Type: ValueTypeString, }, }, diff --git a/pkg/property/pointer.go b/pkg/property/pointer.go index 658f0a82e..8164adb90 100644 --- a/pkg/property/pointer.go +++ b/pkg/property/pointer.go @@ -1,16 +1,14 @@ package property -import "github.com/reearth/reearth-backend/pkg/id" - // Pointer is a pointer to a field and an item in properties and schemas type Pointer struct { - schemaItem *id.PropertySchemaGroupID - item *id.PropertyItemID - field *id.PropertySchemaFieldID + schemaItem *SchemaGroupID + item *ItemID + field *FieldID } // NewPointer creates a new Pointer. -func NewPointer(sg *id.PropertySchemaGroupID, i *id.PropertyItemID, f *id.PropertySchemaFieldID) *Pointer { +func NewPointer(sg *SchemaGroupID, i *ItemID, f *FieldID) *Pointer { if sg == nil && i == nil && f == nil { return nil } @@ -22,7 +20,7 @@ func NewPointer(sg *id.PropertySchemaGroupID, i *id.PropertyItemID, f *id.Proper } // PointField creates a new Pointer pointing the field in properties. -func PointField(sg *id.PropertySchemaGroupID, i *id.PropertyItemID, f id.PropertySchemaFieldID) *Pointer { +func PointField(sg *SchemaGroupID, i *ItemID, f FieldID) *Pointer { return &Pointer{ schemaItem: sg.CopyRef(), item: i.CopyRef(), @@ -31,28 +29,28 @@ func PointField(sg *id.PropertySchemaGroupID, i *id.PropertyItemID, f id.Propert } // PointField creates a new Pointer pointing the field in property schemas. -func PointFieldOnly(fid id.PropertySchemaFieldID) *Pointer { +func PointFieldOnly(fid FieldID) *Pointer { return &Pointer{ field: &fid, } } // PointItemBySchema creates a new Pointer pointing the schema item in property schemas. -func PointItemBySchema(sg id.PropertySchemaGroupID) *Pointer { +func PointItemBySchema(sg SchemaGroupID) *Pointer { return &Pointer{ schemaItem: &sg, } } // PointItem creates a new Pointer pointing to the item in properties. -func PointItem(i id.PropertyItemID) *Pointer { +func PointItem(i ItemID) *Pointer { return &Pointer{ item: &i, } } // PointFieldBySchemaGroup creates a new Pointer pointing to the field of the schema field in properties. -func PointFieldBySchemaGroup(sg id.PropertySchemaGroupID, f id.PropertySchemaFieldID) *Pointer { +func PointFieldBySchemaGroup(sg SchemaGroupID, f FieldID) *Pointer { return &Pointer{ schemaItem: &sg, field: &f, @@ -60,7 +58,7 @@ func PointFieldBySchemaGroup(sg id.PropertySchemaGroupID, f id.PropertySchemaFie } // PointFieldByItem creates a new Pointer pointing to the field of the item in properties. -func PointFieldByItem(i id.PropertyItemID, f id.PropertySchemaFieldID) *Pointer { +func PointFieldByItem(i ItemID, f FieldID) *Pointer { return &Pointer{ item: &i, field: &f, @@ -78,7 +76,7 @@ func (p *Pointer) Clone() *Pointer { } } -func (p *Pointer) ItemBySchemaGroupAndItem() (i id.PropertySchemaGroupID, i2 id.PropertyItemID, ok bool) { +func (p *Pointer) ItemBySchemaGroupAndItem() (i SchemaGroupID, i2 ItemID, ok bool) { if p == nil || p.schemaItem == nil || p.item == nil { ok = false return @@ -89,7 +87,7 @@ func (p *Pointer) ItemBySchemaGroupAndItem() (i id.PropertySchemaGroupID, i2 id. return } -func (p *Pointer) ItemBySchemaGroup() (i id.PropertySchemaGroupID, ok bool) { +func (p *Pointer) ItemBySchemaGroup() (i SchemaGroupID, ok bool) { if p == nil || p.schemaItem == nil { ok = false return @@ -99,7 +97,7 @@ func (p *Pointer) ItemBySchemaGroup() (i id.PropertySchemaGroupID, ok bool) { return } -func (p *Pointer) SchemaGroupAndItem() (i id.PropertySchemaGroupID, i2 id.PropertyItemID, ok bool) { +func (p *Pointer) SchemaGroupAndItem() (i SchemaGroupID, i2 ItemID, ok bool) { ok = false if p == nil { return @@ -115,7 +113,7 @@ func (p *Pointer) SchemaGroupAndItem() (i id.PropertySchemaGroupID, i2 id.Proper return } -func (p *Pointer) Item() (i id.PropertyItemID, ok bool) { +func (p *Pointer) Item() (i ItemID, ok bool) { if p == nil || p.item == nil { ok = false return @@ -125,7 +123,7 @@ func (p *Pointer) Item() (i id.PropertyItemID, ok bool) { return } -func (p *Pointer) ItemRef() *id.PropertyItemID { +func (p *Pointer) ItemRef() *ItemID { if p == nil || p.item == nil { return nil } @@ -133,7 +131,7 @@ func (p *Pointer) ItemRef() *id.PropertyItemID { return &f } -func (p *Pointer) FieldByItem() (i id.PropertyItemID, f id.PropertySchemaFieldID, ok bool) { +func (p *Pointer) FieldByItem() (i ItemID, f FieldID, ok bool) { if p == nil || p.item == nil || p.schemaItem != nil || p.field == nil { ok = false return @@ -144,7 +142,7 @@ func (p *Pointer) FieldByItem() (i id.PropertyItemID, f id.PropertySchemaFieldID return } -func (p *Pointer) FieldBySchemaGroup() (sg id.PropertySchemaGroupID, f id.PropertySchemaFieldID, ok bool) { +func (p *Pointer) FieldBySchemaGroup() (sg SchemaGroupID, f FieldID, ok bool) { if p == nil || p.schemaItem == nil || p.item != nil || p.field == nil { ok = false return @@ -155,7 +153,7 @@ func (p *Pointer) FieldBySchemaGroup() (sg id.PropertySchemaGroupID, f id.Proper return } -func (p *Pointer) Field() (f id.PropertySchemaFieldID, ok bool) { +func (p *Pointer) Field() (f FieldID, ok bool) { if p == nil || p.field == nil { ok = false return @@ -165,7 +163,7 @@ func (p *Pointer) Field() (f id.PropertySchemaFieldID, ok bool) { return } -func (p *Pointer) FieldRef() *id.PropertySchemaFieldID { +func (p *Pointer) FieldRef() *FieldID { if p == nil || p.field == nil { return nil } @@ -173,7 +171,7 @@ func (p *Pointer) FieldRef() *id.PropertySchemaFieldID { return &f } -func (p *Pointer) GetAll() (sg *id.PropertySchemaGroupID, i *id.PropertyItemID, f *id.PropertySchemaFieldID) { +func (p *Pointer) GetAll() (sg *SchemaGroupID, i *ItemID, f *FieldID) { if p == nil { return } diff --git a/pkg/property/pointer_test.go b/pkg/property/pointer_test.go index c1225a4e8..f0639c7c2 100644 --- a/pkg/property/pointer_test.go +++ b/pkg/property/pointer_test.go @@ -3,14 +3,13 @@ package property import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestPointer(t *testing.T) { - iid := id.NewPropertyItemID() - sgid := id.PropertySchemaGroupID("foo") - fid := id.PropertySchemaFieldID("hoge") + iid := NewItemID() + sgid := SchemaGroupID("foo") + fid := FieldID("hoge") var p *Pointer var ok bool diff --git a/pkg/property/property.go b/pkg/property/property.go index 1fbbb2eb2..ee6db07a0 100644 --- a/pkg/property/property.go +++ b/pkg/property/property.go @@ -6,32 +6,31 @@ import ( "fmt" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" ) type Property struct { - id id.PropertyID - scene id.SceneID - schema id.PropertySchemaID + id ID + scene SceneID + schema SchemaID items []Item } -func (p *Property) ID() id.PropertyID { +func (p *Property) ID() ID { return p.id } -func (p *Property) IDRef() *id.PropertyID { +func (p *Property) IDRef() *ID { if p == nil { return nil } return p.id.Ref() } -func (p *Property) Scene() id.SceneID { +func (p *Property) Scene() SceneID { return p.scene } -func (p *Property) Schema() id.PropertySchemaID { +func (p *Property) Schema() SchemaID { return p.schema } @@ -62,7 +61,7 @@ func (p *Property) Items() []Item { return append([]Item{}, p.items...) } -func (p *Property) Item(id id.PropertyItemID) (Item, *GroupList) { +func (p *Property) Item(id ItemID) (Item, *GroupList) { if p == nil { return nil, nil } @@ -80,7 +79,7 @@ func (p *Property) Item(id id.PropertyItemID) (Item, *GroupList) { } // ItemBySchema returns a root item by a schema group ID. -func (p *Property) ItemBySchema(id id.PropertySchemaGroupID) Item { +func (p *Property) ItemBySchema(id SchemaGroupID) Item { if p == nil { return nil } @@ -92,7 +91,7 @@ func (p *Property) ItemBySchema(id id.PropertySchemaGroupID) Item { return nil } -func (p *Property) GroupBySchema(id id.PropertySchemaGroupID) *Group { +func (p *Property) GroupBySchema(id SchemaGroupID) *Group { i := p.ItemBySchema(id) if i == nil { return nil @@ -103,7 +102,7 @@ func (p *Property) GroupBySchema(id id.PropertySchemaGroupID) *Group { return nil } -func (p *Property) GroupListBySchema(id id.PropertySchemaGroupID) *GroupList { +func (p *Property) GroupListBySchema(id SchemaGroupID) *GroupList { i := p.ItemBySchema(id) if i == nil { return nil @@ -161,7 +160,7 @@ func (p *Property) HasLinkedField() bool { return false } -func (p *Property) FieldsByLinkedDataset(s id.DatasetSchemaID, i id.DatasetID) []*Field { +func (p *Property) FieldsByLinkedDataset(s DatasetSchemaID, i DatasetID) []*Field { if p == nil { return nil } @@ -172,7 +171,7 @@ func (p *Property) FieldsByLinkedDataset(s id.DatasetSchemaID, i id.DatasetID) [ return res } -func (p *Property) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { +func (p *Property) IsDatasetLinked(s DatasetSchemaID, i DatasetID) bool { if p == nil { return false } @@ -184,11 +183,11 @@ func (p *Property) IsDatasetLinked(s id.DatasetSchemaID, i id.DatasetID) bool { return false } -func (p *Property) CollectDatasets() []id.DatasetID { +func (p *Property) CollectDatasets() []DatasetID { if p == nil { return nil } - res := []id.DatasetID{} + res := []DatasetID{} for _, f := range p.items { res = append(res, f.CollectDatasets()...) @@ -261,7 +260,7 @@ func (p *Property) UpdateValue(ps *Schema, ptr *Pointer, v *Value) (*Field, *Gro return field, gl, g, nil } -func (p *Property) UnlinkAllByDataset(s id.DatasetSchemaID, ds id.DatasetID) { +func (p *Property) UnlinkAllByDataset(s DatasetSchemaID, ds DatasetID) { fields := p.FieldsByLinkedDataset(s, ds) for _, f := range fields { f.Unlink() @@ -419,7 +418,7 @@ func (p *Property) UpdateLinkableValue(s *Schema, v *Value) { } } -func (p *Property) AutoLinkField(s *Schema, v ValueType, d id.DatasetSchemaID, df *id.DatasetSchemaFieldID, ds *id.DatasetID) { +func (p *Property) AutoLinkField(s *Schema, v ValueType, d DatasetSchemaID, df *DatasetFieldID, ds *DatasetID) { if s == nil || p == nil || df == nil { return } diff --git a/pkg/property/property_test.go b/pkg/property/property_test.go index 153a1cdc6..5f38c5bb0 100644 --- a/pkg/property/property_test.go +++ b/pkg/property/property_test.go @@ -6,26 +6,25 @@ import ( "github.com/reearth/reearth-backend/pkg/dataset" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestPropertyMigrateSchema(t *testing.T) { - sceneID := id.NewSceneID() - oldSchema, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") - newSchema, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test2") - schemaField1ID := id.PropertySchemaFieldID("a") - schemaField2ID := id.PropertySchemaFieldID("b") - schemaField3ID := id.PropertySchemaFieldID("c") - schemaField4ID := id.PropertySchemaFieldID("d") - schemaField5ID := id.PropertySchemaFieldID("e") - schemaField6ID := id.PropertySchemaFieldID("f") - schemaField7ID := id.PropertySchemaFieldID("g") - schemaField8ID := id.PropertySchemaFieldID("h") - schemaGroupID := id.PropertySchemaGroupID("i") - datasetID := id.NewDatasetID() - datasetSchemaID := id.NewDatasetSchemaID() - datasetFieldID := id.NewDatasetSchemaFieldID() + sceneID := NewSceneID() + oldSchema, _ := SchemaIDFrom("hoge~1.0.0/test") + newSchema, _ := SchemaIDFrom("hoge~1.0.0/test2") + schemaField1ID := FieldID("a") + schemaField2ID := FieldID("b") + schemaField3ID := FieldID("c") + schemaField4ID := FieldID("d") + schemaField5ID := FieldID("e") + schemaField6ID := FieldID("f") + schemaField7ID := FieldID("g") + schemaField8ID := FieldID("h") + schemaGroupID := SchemaGroupID("i") + datasetID := NewDatasetID() + datasetSchemaID := NewDatasetSchemaID() + datasetFieldID := NewDatasetFieldID() schemaField1, _ := NewSchemaField().ID(schemaField1ID).Type(ValueTypeString).Build() schemaField2, _ := NewSchemaField().ID(schemaField2ID).Type(ValueTypeNumber).Min(0).Max(100).Build() @@ -115,12 +114,12 @@ func TestPropertyMigrateSchema(t *testing.T) { } func TestGetOrCreateItem(t *testing.T) { - sceneID := id.NewSceneID() - sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") - sf1id := id.PropertySchemaFieldID("a") - sf2id := id.PropertySchemaFieldID("b") - sg1id := id.PropertySchemaGroupID("c") - sg2id := id.PropertySchemaGroupID("d") + sceneID := NewSceneID() + sid, _ := SchemaIDFrom("hoge~1.0.0/test") + sf1id := FieldID("a") + sf2id := FieldID("b") + sg1id := SchemaGroupID("c") + sg2id := SchemaGroupID("d") sf1 := NewSchemaField().ID(sf1id).Type(ValueTypeString).MustBuild() sg1 := NewSchemaGroup().ID(sg1id).Schema(sid).Fields([]*SchemaField{sf1}).MustBuild() @@ -165,12 +164,12 @@ func TestGetOrCreateItem(t *testing.T) { } func TestGetOrCreateField(t *testing.T) { - sceneID := id.NewSceneID() - sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") - sf1id := id.PropertySchemaFieldID("a") - sf2id := id.PropertySchemaFieldID("b") - sg1id := id.PropertySchemaGroupID("c") - sg2id := id.PropertySchemaGroupID("d") + sceneID := NewSceneID() + sid, _ := SchemaIDFrom("hoge~1.0.0/test") + sf1id := FieldID("a") + sf2id := FieldID("b") + sg1id := SchemaGroupID("c") + sg2id := SchemaGroupID("d") sf1 := NewSchemaField().ID(sf1id).Type(ValueTypeString).MustBuild() sg1 := NewSchemaGroup().ID(sg1id).Schema(sid).Fields([]*SchemaField{sf1}).MustBuild() @@ -217,10 +216,10 @@ func TestGetOrCreateField(t *testing.T) { } func TestAddListItem(t *testing.T) { - sceneID := id.NewSceneID() - sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") - sfid := id.PropertySchemaFieldID("a") - sgid := id.PropertySchemaGroupID("b") + sceneID := NewSceneID() + sid, _ := SchemaIDFrom("hoge~1.0.0/test") + sfid := FieldID("a") + sgid := SchemaGroupID("b") sf := NewSchemaField().ID(sfid).Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID(sgid).Schema(sid).Fields([]*SchemaField{sf}).IsList(true).MustBuild() ps := NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild() @@ -239,9 +238,9 @@ func TestAddListItem(t *testing.T) { } func TestMoveListItem(t *testing.T) { - sceneID := id.NewSceneID() - sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") - sgid := id.PropertySchemaGroupID("b") + sceneID := NewSceneID() + sid, _ := SchemaIDFrom("hoge~1.0.0/test") + sgid := SchemaGroupID("b") g1 := NewGroup().NewID().Schema(sid, sgid).MustBuild() g2 := NewGroup().NewID().Schema(sid, sgid).MustBuild() gl := NewGroupList().NewID().Schema(sid, sgid).Groups([]*Group{g1, g2}).MustBuild() @@ -254,9 +253,9 @@ func TestMoveListItem(t *testing.T) { } func TestRemoveListItem(t *testing.T) { - sceneID := id.NewSceneID() - sid, _ := id.PropertySchemaIDFrom("hoge~1.0.0/test") - sgid := id.PropertySchemaGroupID("b") + sceneID := NewSceneID() + sid, _ := SchemaIDFrom("hoge~1.0.0/test") + sgid := SchemaGroupID("b") g1 := NewGroup().NewID().Schema(sid, sgid).MustBuild() g2 := NewGroup().NewID().Schema(sid, sgid).MustBuild() gl := NewGroupList().NewID().Schema(sid, sgid).Groups([]*Group{g1, g2}).MustBuild() diff --git a/pkg/property/schema.go b/pkg/property/schema.go index b14eaa043..f6cd4f2b4 100644 --- a/pkg/property/schema.go +++ b/pkg/property/schema.go @@ -1,10 +1,7 @@ package property -import "github.com/reearth/reearth-backend/pkg/id" - -// Schema _ type Schema struct { - id id.PropertySchemaID + id SchemaID version int groups []*SchemaGroup linkable LinkableFields @@ -15,11 +12,11 @@ type LinkableFields struct { URL *Pointer } -func (p *Schema) ID() id.PropertySchemaID { +func (p *Schema) ID() SchemaID { return p.id } -func (p *Schema) IDRef() *id.PropertySchemaID { +func (p *Schema) IDRef() *SchemaID { if p == nil { return nil } @@ -41,7 +38,7 @@ func (p *Schema) Fields() []*SchemaField { return fields } -func (p *Schema) Field(id id.PropertySchemaFieldID) *SchemaField { +func (p *Schema) Field(id FieldID) *SchemaField { if p == nil { return nil } @@ -71,7 +68,7 @@ func (p *Schema) Groups() []*SchemaGroup { return append([]*SchemaGroup{}, p.groups...) } -func (p *Schema) Group(id id.PropertySchemaGroupID) *SchemaGroup { +func (p *Schema) Group(id SchemaGroupID) *SchemaGroup { if p == nil { return nil } @@ -83,7 +80,7 @@ func (p *Schema) Group(id id.PropertySchemaGroupID) *SchemaGroup { return nil } -func (p *Schema) GroupByField(id id.PropertySchemaFieldID) *SchemaGroup { +func (p *Schema) GroupByField(id FieldID) *SchemaGroup { if p == nil { return nil } @@ -114,9 +111,9 @@ func (p *Schema) GroupByPointer(ptr *Pointer) *SchemaGroup { return nil } -func (s *Schema) DetectDuplicatedFields() []id.PropertySchemaFieldID { - duplicated := []id.PropertySchemaFieldID{} - ids := map[id.PropertySchemaFieldID]struct{}{} +func (s *Schema) DetectDuplicatedFields() []FieldID { + duplicated := []FieldID{} + ids := map[FieldID]struct{}{} for _, f := range s.Fields() { i := f.ID() if _, ok := ids[i]; ok { diff --git a/pkg/property/schema_builder.go b/pkg/property/schema_builder.go index d62a30fb3..82c01de75 100644 --- a/pkg/property/schema_builder.go +++ b/pkg/property/schema_builder.go @@ -3,8 +3,6 @@ package property import ( "errors" "fmt" - - "github.com/reearth/reearth-backend/pkg/id" ) var ( @@ -26,7 +24,7 @@ func NewSchema() *SchemaBuilder { func (b *SchemaBuilder) Build() (*Schema, error) { if b.p.id.IsNil() { - return nil, id.ErrInvalidID + return nil, ErrInvalidID } if d := b.p.DetectDuplicatedFields(); len(d) > 0 { return nil, fmt.Errorf("%s: %s %s", ErrDuplicatedField, b.p.id, d) @@ -45,7 +43,7 @@ func (b *SchemaBuilder) MustBuild() *Schema { return p } -func (b *SchemaBuilder) ID(id id.PropertySchemaID) *SchemaBuilder { +func (b *SchemaBuilder) ID(id SchemaID) *SchemaBuilder { b.p.id = id return b } @@ -57,7 +55,7 @@ func (b *SchemaBuilder) Version(version int) *SchemaBuilder { func (b *SchemaBuilder) Groups(groups []*SchemaGroup) *SchemaBuilder { newGroups := []*SchemaGroup{} - ids := map[id.PropertySchemaGroupID]struct{}{} + ids := map[SchemaGroupID]struct{}{} for _, f := range groups { if f == nil { continue diff --git a/pkg/property/schema_builder_test.go b/pkg/property/schema_builder_test.go index 9d7dfbbef..0e317b705 100644 --- a/pkg/property/schema_builder_test.go +++ b/pkg/property/schema_builder_test.go @@ -5,22 +5,21 @@ import ( "fmt" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestSchemaBuilder_Build(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aaa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - sg2 := NewSchemaGroup().ID("daa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aaa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg2 := NewSchemaGroup().ID("daa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() testCases := []struct { Name string - Id id.PropertySchemaID + Id SchemaID Version int Groups []*SchemaGroup Linkable LinkableFields Expected struct { - Id id.PropertySchemaID + Id SchemaID Version int Groups []*SchemaGroup Linkable LinkableFields @@ -29,31 +28,31 @@ func TestSchemaBuilder_Build(t *testing.T) { }{ { Name: "fail: invalid id", - Err: id.ErrInvalidID, + Err: ErrInvalidID, }, { Name: "fail: invalid linkable field", - Id: id.MustPropertySchemaID("xx~1.0.0/aa"), - Linkable: LinkableFields{LatLng: NewPointer(nil, nil, id.PropertySchemaFieldID("xx").Ref())}, + Id: MustSchemaID("xx~1.0.0/aa"), + Linkable: LinkableFields{LatLng: NewPointer(nil, nil, FieldID("xx").Ref())}, Err: ErrInvalidPropertyLinkableField, }, { Name: "fail: duplicated field", - Id: id.MustPropertySchemaID("xx~1.0.0/aa"), + Id: MustSchemaID("xx~1.0.0/aa"), Groups: []*SchemaGroup{sg, sg2}, - Err: fmt.Errorf("%s: %s %s", ErrDuplicatedField, id.MustPropertySchemaID("xx~1.0.0/aa"), []id.PropertySchemaFieldID{"aa"}), + Err: fmt.Errorf("%s: %s %s", ErrDuplicatedField, MustSchemaID("xx~1.0.0/aa"), []FieldID{"aa"}), }, { Name: "success", - Id: id.MustPropertySchemaID("xx~1.0.0/aa"), + Id: MustSchemaID("xx~1.0.0/aa"), Groups: []*SchemaGroup{sg}, Version: 1, Expected: struct { - Id id.PropertySchemaID + Id SchemaID Version int Groups []*SchemaGroup Linkable LinkableFields - }{Id: id.MustPropertySchemaID("xx~1.0.0/aa"), Version: 1, Groups: []*SchemaGroup{sg}}, + }{Id: MustSchemaID("xx~1.0.0/aa"), Version: 1, Groups: []*SchemaGroup{sg}}, }, } @@ -81,17 +80,17 @@ func TestSchemaBuilder_Build(t *testing.T) { func TestSchemaBuilder_MustBuild(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aaa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - sg2 := NewSchemaGroup().ID("daa").Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aaa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg2 := NewSchemaGroup().ID("daa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() testCases := []struct { Name string Fails bool - Id id.PropertySchemaID + Id SchemaID Version int Groups []*SchemaGroup Linkable LinkableFields Expected struct { - Id id.PropertySchemaID + Id SchemaID Version int Groups []*SchemaGroup Linkable LinkableFields @@ -103,27 +102,27 @@ func TestSchemaBuilder_MustBuild(t *testing.T) { }, { Name: "fail: invalid linkable field", - Id: id.MustPropertySchemaID("xx~1.0.0/aa"), - Linkable: LinkableFields{LatLng: NewPointer(nil, nil, id.PropertySchemaFieldID("xx").Ref())}, + Id: MustSchemaID("xx~1.0.0/aa"), + Linkable: LinkableFields{LatLng: NewPointer(nil, nil, FieldID("xx").Ref())}, Fails: true, }, { Name: "fail: duplicated field", - Id: id.MustPropertySchemaID("xx~1.0.0/aa"), + Id: MustSchemaID("xx~1.0.0/aa"), Groups: []*SchemaGroup{sg, sg2}, Fails: true, }, { Name: "success", - Id: id.MustPropertySchemaID("xx~1.0.0/aa"), + Id: MustSchemaID("xx~1.0.0/aa"), Groups: []*SchemaGroup{sg}, Version: 1, Expected: struct { - Id id.PropertySchemaID + Id SchemaID Version int Groups []*SchemaGroup Linkable LinkableFields - }{Id: id.MustPropertySchemaID("xx~1.0.0/aa"), Version: 1, Groups: []*SchemaGroup{sg}}, + }{Id: MustSchemaID("xx~1.0.0/aa"), Version: 1, Groups: []*SchemaGroup{sg}}, }, } diff --git a/pkg/property/schema_field.go b/pkg/property/schema_field.go index 402be2b0f..825233f3f 100644 --- a/pkg/property/schema_field.go +++ b/pkg/property/schema_field.go @@ -2,11 +2,10 @@ package property import ( "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" ) type SchemaField struct { - id id.PropertySchemaFieldID + id FieldID propertyType ValueType title i18n.String description i18n.String @@ -26,7 +25,7 @@ type SchemaFieldChoice struct { Icon string } -func (p *SchemaField) ID() id.PropertySchemaFieldID { +func (p *SchemaField) ID() FieldID { return p.id } diff --git a/pkg/property/schema_field_builder.go b/pkg/property/schema_field_builder.go index 268b15080..cb1497c67 100644 --- a/pkg/property/schema_field_builder.go +++ b/pkg/property/schema_field_builder.go @@ -4,7 +4,6 @@ import ( "errors" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" ) type SchemaFieldBuilder struct { @@ -17,7 +16,7 @@ func NewSchemaField() *SchemaFieldBuilder { func (b *SchemaFieldBuilder) Build() (*SchemaField, error) { if b.p.id.String() == "" || b.p.id.String() == "id" { - return nil, id.ErrInvalidID + return nil, ErrInvalidID } if b.p.ui != SchemaFieldUI("") && SchemaFieldUIFrom(string(b.p.ui)) == SchemaFieldUI("") { return nil, errors.New("invalid property schema field ui") @@ -39,7 +38,7 @@ func (b *SchemaFieldBuilder) MustBuild() *SchemaField { return p } -func (b *SchemaFieldBuilder) ID(id id.PropertySchemaFieldID) *SchemaFieldBuilder { +func (b *SchemaFieldBuilder) ID(id FieldID) *SchemaFieldBuilder { b.p.id = id return b } diff --git a/pkg/property/schema_field_builder_test.go b/pkg/property/schema_field_builder_test.go index ef1507466..164a29f39 100644 --- a/pkg/property/schema_field_builder_test.go +++ b/pkg/property/schema_field_builder_test.go @@ -5,14 +5,13 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestSchemaFieldBuilder_Build(t *testing.T) { testCases := []struct { Name string - Id id.PropertySchemaFieldID + Id FieldID PropertyType ValueType Fname i18n.String Description i18n.String @@ -27,7 +26,7 @@ func TestSchemaFieldBuilder_Build(t *testing.T) { Choices []SchemaFieldChoice Cond *Condition Expected struct { - Id id.PropertySchemaFieldID + Id FieldID PropertyType ValueType Fname i18n.String Description i18n.String @@ -44,11 +43,11 @@ func TestSchemaFieldBuilder_Build(t *testing.T) { }{ { Name: "nil field", - Err: id.ErrInvalidID, + Err: ErrInvalidID, }, { Name: "fail min > max", - Id: id.PropertySchemaFieldID("aa"), + Id: FieldID("aa"), Min: 10, Max: 1, Err: errors.New("invalid min and max"), diff --git a/pkg/property/schema_field_ui.go b/pkg/property/schema_field_ui.go index c03f282a9..3b8b40f05 100644 --- a/pkg/property/schema_field_ui.go +++ b/pkg/property/schema_field_ui.go @@ -52,7 +52,6 @@ func (p SchemaFieldUI) String() string { return string(p) } -// StringRef _ func (p *SchemaFieldUI) StringRef() *string { if p == nil { return nil diff --git a/pkg/property/schema_group.go b/pkg/property/schema_group.go index 73c0955f2..c1515ecb5 100644 --- a/pkg/property/schema_group.go +++ b/pkg/property/schema_group.go @@ -2,43 +2,42 @@ package property import ( "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" ) // SchemaGroup represents a group of property that has some fields type SchemaGroup struct { - id id.PropertySchemaGroupID - sid id.PropertySchemaID + id SchemaGroupID + sid SchemaID fields []*SchemaField list bool isAvailableIf *Condition title i18n.String - representativeField *id.PropertySchemaFieldID + representativeField *FieldID } // ID returns id -func (s *SchemaGroup) ID() id.PropertySchemaGroupID { +func (s *SchemaGroup) ID() SchemaGroupID { if s == nil { - return id.PropertySchemaGroupID("") + return SchemaGroupID("") } return s.id } -func (s *SchemaGroup) IDRef() *id.PropertySchemaGroupID { +func (s *SchemaGroup) IDRef() *SchemaGroupID { if s == nil { return nil } return s.id.Ref() } -func (s *SchemaGroup) Schema() id.PropertySchemaID { +func (s *SchemaGroup) Schema() SchemaID { if s == nil { - return id.PropertySchemaID{} + return SchemaID{} } return s.sid } -func (s *SchemaGroup) SchemaRef() *id.PropertySchemaID { +func (s *SchemaGroup) SchemaRef() *SchemaID { if s == nil { return nil } @@ -54,7 +53,7 @@ func (s *SchemaGroup) Fields() []*SchemaField { } // Field returns a field whose id is specified -func (s *SchemaGroup) Field(fid id.PropertySchemaFieldID) *SchemaField { +func (s *SchemaGroup) Field(fid FieldID) *SchemaField { if s == nil { return nil } @@ -78,7 +77,7 @@ func (s *SchemaGroup) FieldByPointer(ptr *Pointer) *SchemaField { return s.Field(fid) } -func (s *SchemaGroup) HasField(i id.PropertySchemaFieldID) bool { +func (s *SchemaGroup) HasField(i FieldID) bool { return s.Field(i) != nil } @@ -107,7 +106,7 @@ func (s *SchemaGroup) Title() i18n.String { } // RepresentativeFieldID returns the representative field ID of the group -func (s *SchemaGroup) RepresentativeFieldID() *id.PropertySchemaFieldID { +func (s *SchemaGroup) RepresentativeFieldID() *FieldID { if s == nil { return nil } diff --git a/pkg/property/schema_group_builder.go b/pkg/property/schema_group_builder.go index 1b61fe117..7acecdba4 100644 --- a/pkg/property/schema_group_builder.go +++ b/pkg/property/schema_group_builder.go @@ -2,7 +2,6 @@ package property import ( "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" ) type SchemaGroupBuilder struct { @@ -17,7 +16,7 @@ func NewSchemaGroup() *SchemaGroupBuilder { func (b *SchemaGroupBuilder) Build() (*SchemaGroup, error) { if b.p.sid.IsNil() { - return nil, id.ErrInvalidID + return nil, ErrInvalidID } return b.p, nil } @@ -30,19 +29,19 @@ func (b *SchemaGroupBuilder) MustBuild() *SchemaGroup { return p } -func (b *SchemaGroupBuilder) ID(id id.PropertySchemaGroupID) *SchemaGroupBuilder { +func (b *SchemaGroupBuilder) ID(id SchemaGroupID) *SchemaGroupBuilder { b.p.id = id return b } -func (b *SchemaGroupBuilder) Schema(sid id.PropertySchemaID) *SchemaGroupBuilder { +func (b *SchemaGroupBuilder) Schema(sid SchemaID) *SchemaGroupBuilder { b.p.sid = sid return b } func (b *SchemaGroupBuilder) Fields(fields []*SchemaField) *SchemaGroupBuilder { newFields := []*SchemaField{} - ids := map[id.PropertySchemaFieldID]struct{}{} + ids := map[FieldID]struct{}{} for _, f := range fields { if f == nil { continue @@ -72,7 +71,7 @@ func (b *SchemaGroupBuilder) Title(title i18n.String) *SchemaGroupBuilder { return b } -func (b *SchemaGroupBuilder) RepresentativeField(representativeField *id.PropertySchemaFieldID) *SchemaGroupBuilder { +func (b *SchemaGroupBuilder) RepresentativeField(representativeField *FieldID) *SchemaGroupBuilder { b.p.representativeField = representativeField.CopyRef() return b } diff --git a/pkg/property/schema_group_builder_test.go b/pkg/property/schema_group_builder_test.go index 6b88a95b5..cafc7dd5c 100644 --- a/pkg/property/schema_group_builder_test.go +++ b/pkg/property/schema_group_builder_test.go @@ -4,18 +4,17 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestSchemaGroupBuilder_Build(t *testing.T) { - sid := id.MustPropertySchemaID("xx~1.0.0/aa") - gid := id.PropertySchemaGroupID("xx") + sid := MustSchemaID("xx~1.0.0/aa") + gid := SchemaGroupID("xx") sf := NewSchemaField().ID("ff").Type(ValueTypeString).MustBuild() type expected struct { - ID id.PropertySchemaGroupID - Sid id.PropertySchemaID + ID SchemaGroupID + Sid SchemaID Fields []*SchemaField List bool IsAvailableIf *Condition @@ -24,8 +23,8 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { testCases := []struct { Name string - ID id.PropertySchemaGroupID - Sid id.PropertySchemaID + ID SchemaGroupID + Sid SchemaID Fields []*SchemaField List bool IsAvailableIf *Condition @@ -35,7 +34,7 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { }{ { Name: "fail: invalid id", - Err: id.ErrInvalidID, + Err: ErrInvalidID, }, { Name: "success", diff --git a/pkg/property/schema_group_test.go b/pkg/property/schema_group_test.go index 09b635226..0c1a2eea3 100644 --- a/pkg/property/schema_group_test.go +++ b/pkg/property/schema_group_test.go @@ -4,23 +4,22 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestSchemaGroup(t *testing.T) { - scid := id.PropertySchemaGroupID("aa") - sid := id.MustPropertySchemaID("xx~1.0.0/aa") + scid := SchemaGroupID("aa") + sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() testCases := []struct { Name string G *SchemaGroup Expected struct { - GIDRef *id.PropertySchemaGroupID - SIDRef *id.PropertySchemaID - GID id.PropertySchemaGroupID - SID id.PropertySchemaID + GIDRef *SchemaGroupID + SIDRef *SchemaID + GID SchemaGroupID + SID SchemaID Fields []*SchemaField Title i18n.String IsAvailableIf *Condition @@ -34,10 +33,10 @@ func TestSchemaGroup(t *testing.T) { Name: "success", G: NewSchemaGroup().ID(scid).Schema(sid).Fields([]*SchemaField{sf}).MustBuild(), Expected: struct { - GIDRef *id.PropertySchemaGroupID - SIDRef *id.PropertySchemaID - GID id.PropertySchemaGroupID - SID id.PropertySchemaID + GIDRef *SchemaGroupID + SIDRef *SchemaID + GID SchemaGroupID + SID SchemaID Fields []*SchemaField Title i18n.String IsAvailableIf *Condition @@ -71,15 +70,15 @@ func TestSchemaGroup(t *testing.T) { } func TestSchemaGroup_Field(t *testing.T) { - scid := id.PropertySchemaGroupID("aa") - sid := id.MustPropertySchemaID("xx~1.0.0/aa") + scid := SchemaGroupID("aa") + sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() testCases := []struct { Name string G *SchemaGroup PTR *Pointer - Input id.PropertySchemaFieldID + Input FieldID Expected *SchemaField }{ { @@ -95,8 +94,8 @@ func TestSchemaGroup_Field(t *testing.T) { { Name: "not found", G: NewSchemaGroup().ID(scid).Schema(sid).Fields([]*SchemaField{sf}).MustBuild(), - PTR: NewPointer(nil, nil, id.PropertySchemaFieldID("zz").Ref()), - Input: id.PropertySchemaFieldID("zz"), + PTR: NewPointer(nil, nil, FieldID("zz").Ref()), + Input: FieldID("zz"), }, } @@ -112,7 +111,7 @@ func TestSchemaGroup_Field(t *testing.T) { } func TestSchemaGroup_SetTitle(t *testing.T) { - sg := NewSchemaGroup().ID(id.PropertySchemaGroupID("aa")).Schema(id.MustPropertySchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID(SchemaGroupID("aa")).Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() sg.SetTitle(i18n.StringFrom("ttt")) assert.Equal(t, i18n.StringFrom("ttt"), sg.Title()) } diff --git a/pkg/property/schema_list.go b/pkg/property/schema_list.go index 8666f60c8..7c1bd0621 100644 --- a/pkg/property/schema_list.go +++ b/pkg/property/schema_list.go @@ -1,14 +1,12 @@ package property -import "github.com/reearth/reearth-backend/pkg/id" - type SchemaList []*Schema func (l SchemaList) Map() SchemaMap { return SchemaMapFrom(l) } -type SchemaMap map[id.PropertySchemaID]*Schema +type SchemaMap map[SchemaID]*Schema func SchemaMapFrom(l []*Schema) SchemaMap { m := make(SchemaMap, len(l)) diff --git a/pkg/property/schema_test.go b/pkg/property/schema_test.go index 6c7120433..95b4d2065 100644 --- a/pkg/property/schema_test.go +++ b/pkg/property/schema_test.go @@ -3,7 +3,6 @@ package property import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -16,7 +15,7 @@ func TestSchema_Nil(t *testing.T) { } func TestSchema_Field(t *testing.T) { - sid := id.MustPropertySchemaID("xx~1.0.0/aa") + sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() @@ -24,7 +23,7 @@ func TestSchema_Field(t *testing.T) { Name string S *Schema PTR *Pointer - Input id.PropertySchemaFieldID + Input FieldID Expected *SchemaField }{ { @@ -40,8 +39,8 @@ func TestSchema_Field(t *testing.T) { { Name: "not found", S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), - PTR: NewPointer(nil, nil, id.PropertySchemaFieldID("zz").Ref()), - Input: id.PropertySchemaFieldID("zz"), + PTR: NewPointer(nil, nil, FieldID("zz").Ref()), + Input: FieldID("zz"), }, } @@ -56,7 +55,7 @@ func TestSchema_Field(t *testing.T) { } func TestSchema_Group(t *testing.T) { - sid := id.MustPropertySchemaID("xx~1.0.0/aa") + sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() @@ -64,8 +63,8 @@ func TestSchema_Group(t *testing.T) { Name string S *Schema PTR *Pointer - Input id.PropertySchemaGroupID - InputField id.PropertySchemaFieldID + Input SchemaGroupID + InputField FieldID Expected *SchemaGroup }{ { @@ -82,8 +81,8 @@ func TestSchema_Group(t *testing.T) { { Name: "not found", S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), - PTR: NewPointer(nil, nil, id.PropertySchemaFieldID("zz").Ref()), - Input: id.PropertySchemaGroupID("zz"), + PTR: NewPointer(nil, nil, FieldID("zz").Ref()), + Input: SchemaGroupID("zz"), }, } @@ -99,7 +98,7 @@ func TestSchema_Group(t *testing.T) { } func TestSchema_DetectDuplicatedFields(t *testing.T) { - sid := id.MustPropertySchemaID("xx~1.0.0/aa") + sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() @@ -115,13 +114,13 @@ func TestSchema_DetectDuplicatedFields(t *testing.T) { { Name: "invalid: URL", S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), - LF: LinkableFields{URL: NewPointer(nil, nil, id.PropertySchemaFieldID("xx").Ref())}, + LF: LinkableFields{URL: NewPointer(nil, nil, FieldID("xx").Ref())}, Expected: false, }, { Name: "invalid: Lng", S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), - LF: LinkableFields{LatLng: NewPointer(nil, nil, id.PropertySchemaFieldID("xx").Ref())}, + LF: LinkableFields{LatLng: NewPointer(nil, nil, FieldID("xx").Ref())}, Expected: false, }, { diff --git a/pkg/property/sealed.go b/pkg/property/sealed.go index 2279ad0cb..43c3bc52c 100644 --- a/pkg/property/sealed.go +++ b/pkg/property/sealed.go @@ -4,28 +4,27 @@ import ( "context" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" ) type Sealed struct { - Original *id.PropertyID - Parent *id.PropertyID - Schema id.PropertySchemaID - LinkedDataset *id.DatasetID + Original *ID + Parent *ID + Schema SchemaID + LinkedDataset *DatasetID Items []*SealedItem } type SealedItem struct { - Original *id.PropertyItemID - Parent *id.PropertyItemID - SchemaGroup id.PropertySchemaGroupID - LinkedDataset *id.DatasetID + Original *ItemID + Parent *ItemID + SchemaGroup SchemaGroupID + LinkedDataset *DatasetID Fields []*SealedField Groups []*SealedItem } type SealedField struct { - ID id.PropertySchemaFieldID + ID FieldID Val *ValueAndDatasetValue } @@ -164,7 +163,7 @@ func sealedFieldsInterface(fields []*SealedField) map[string]interface{} { return item } -func (s *Sealed) Item(i id.PropertyItemID) *SealedItem { +func (s *Sealed) Item(i ItemID) *SealedItem { if s == nil { return nil } @@ -192,7 +191,7 @@ func (s *Sealed) ItemBy(ptr *Pointer) *SealedItem { return nil } -func (s *Sealed) ItemBySchemaGroup(i id.PropertySchemaGroupID) *SealedItem { +func (s *Sealed) ItemBySchemaGroup(i SchemaGroupID) *SealedItem { if s == nil { return nil } @@ -204,7 +203,7 @@ func (s *Sealed) ItemBySchemaGroup(i id.PropertySchemaGroupID) *SealedItem { return nil } -func (s *Sealed) Field(id id.PropertySchemaFieldID) *SealedField { +func (s *Sealed) Field(id FieldID) *SealedField { if s == nil { return nil } @@ -232,14 +231,14 @@ func (s *Sealed) FieldBy(ptr *Pointer) *SealedField { return nil } -func (s *SealedItem) Match(id id.PropertyItemID) bool { +func (s *SealedItem) Match(id ItemID) bool { if s == nil { return false } return s.Original != nil && *s.Original == id || s.Parent != nil && *s.Parent == id } -func (s *SealedItem) Group(id id.PropertyItemID) *SealedItem { +func (s *SealedItem) Group(id ItemID) *SealedItem { if s == nil { return nil } @@ -251,7 +250,7 @@ func (s *SealedItem) Group(id id.PropertyItemID) *SealedItem { return nil } -func (s *SealedItem) Field(id id.PropertySchemaFieldID) *SealedField { +func (s *SealedItem) Field(id FieldID) *SealedField { if s == nil { return nil } diff --git a/pkg/property/sealed_test.go b/pkg/property/sealed_test.go index 209856c86..b71907d86 100644 --- a/pkg/property/sealed_test.go +++ b/pkg/property/sealed_test.go @@ -5,25 +5,24 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) var ( - sid = id.NewSceneID() - ds = id.NewDatasetSchemaID() - df = id.NewDatasetSchemaFieldID() - d = id.NewDatasetID() - opid = id.NewPropertyID() - ppid = id.NewPropertyID() - psid = id.MustPropertySchemaID("hoge~0.1.0/fff") - psiid1 = id.PropertySchemaGroupID("x") - psiid2 = id.PropertySchemaGroupID("y") - i1id = id.NewPropertyItemID() - i2id = id.NewPropertyItemID() - i3id = id.NewPropertyItemID() - i4id = id.NewPropertyItemID() - i5id = id.NewPropertyItemID() + sid = NewSceneID() + ds = NewDatasetSchemaID() + df = NewDatasetFieldID() + d = NewDatasetID() + opid = NewID() + ppid = NewID() + psid = MustSchemaID("hoge~0.1.0/fff") + psiid1 = SchemaGroupID("x") + psiid2 = SchemaGroupID("y") + i1id = NewItemID() + i2id = NewItemID() + i3id = NewItemID() + i4id = NewItemID() + i5id = NewItemID() ) func TestSeal(t *testing.T) { @@ -57,12 +56,12 @@ func TestSeal(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("b"), + ID: FieldID("b"), Value: ValueTypeString.ValueFrom("b"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, @@ -78,12 +77,12 @@ func TestSeal(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("aaa"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("b"), + ID: FieldID("b"), Value: ValueTypeString.ValueFrom("aaa"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, @@ -92,7 +91,7 @@ func TestSeal(t *testing.T) { }, }, }, - DSGL: dataset.GraphLoaderFromMap(map[id.DatasetID]*dataset.Dataset{ + DSGL: dataset.GraphLoaderFromMap(map[DatasetID]*dataset.Dataset{ d: dataset.New().Scene(sid).ID(d).Schema(ds).Fields([]*dataset.Field{ dataset.NewField(df, dataset.ValueTypeString.ValueFrom("bbb"), ""), }).MustBuild(), @@ -177,8 +176,8 @@ func TestSeal(t *testing.T) { } func TestSealProperty(t *testing.T) { - pid := id.NewPropertyID() - ps := id.MustPropertySchemaID("xxx~1.1.1/aa") + pid := NewID() + ps := MustSchemaID("xxx~1.1.1/aa") testCases := []struct { Name string Input *Property @@ -189,7 +188,7 @@ func TestSealProperty(t *testing.T) { }, { Name: "seal property", - Input: New().ID(pid).Scene(id.NewSceneID()).Schema(ps).MustBuild(), + Input: New().ID(pid).Scene(NewSceneID()).Schema(ps).MustBuild(), Expected: &Sealed{ Original: pid.Ref(), Parent: nil, @@ -236,12 +235,12 @@ func TestSealedItemFrom(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("a"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("b"), + ID: FieldID("b"), Value: ValueTypeString.ValueFrom("b"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, @@ -250,7 +249,7 @@ func TestSealedItemFrom(t *testing.T) { }, }, }, - DSGL: dataset.GraphLoaderFromMap(map[id.DatasetID]*dataset.Dataset{ + DSGL: dataset.GraphLoaderFromMap(map[DatasetID]*dataset.Dataset{ d: dataset.New().Scene(sid).ID(d).Schema(ds).Fields([]*dataset.Field{ dataset.NewField(df, dataset.ValueTypeString.ValueFrom("bbb"), ""), }).MustBuild(), @@ -303,12 +302,12 @@ func TestSealedItemFrom(t *testing.T) { LinkedDataset: &d, Fields: []*MergedField{ { - ID: id.PropertySchemaFieldID("a"), + ID: FieldID("a"), Value: ValueTypeString.ValueFrom("aaa"), Type: ValueTypeString, }, { - ID: id.PropertySchemaFieldID("b"), + ID: FieldID("b"), Value: ValueTypeString.ValueFrom("aaa"), Links: NewLinks([]*Link{NewLink(d, ds, df)}), Type: ValueTypeString, @@ -317,7 +316,7 @@ func TestSealedItemFrom(t *testing.T) { }, }, }, - DSGL: dataset.GraphLoaderFromMap(map[id.DatasetID]*dataset.Dataset{ + DSGL: dataset.GraphLoaderFromMap(map[DatasetID]*dataset.Dataset{ d: dataset.New().Scene(sid).ID(d).Schema(ds).Fields([]*dataset.Field{ dataset.NewField(df, dataset.ValueTypeString.ValueFrom("bbb"), ""), }).MustBuild(), @@ -475,7 +474,7 @@ func TestSealedItem_Match(t *testing.T) { testCases := []struct { Name string SI *SealedItem - Input id.PropertyItemID + Input ItemID Expected bool }{ { @@ -605,7 +604,7 @@ func TestSealed_ItemBy(t *testing.T) { }, }, }, - Input: NewPointer(psiid1.Ref(), i1id.Ref(), id.PropertySchemaFieldID("a").Ref()), + Input: NewPointer(psiid1.Ref(), i1id.Ref(), FieldID("a").Ref()), Expected: &SealedItem{ SchemaGroup: psiid1, Original: &i1id, @@ -703,7 +702,7 @@ func TestSealed_ItemBy(t *testing.T) { }, }, }, - Input: NewPointer(nil, i1id.Ref(), id.PropertySchemaFieldID("a").Ref()), + Input: NewPointer(nil, i1id.Ref(), FieldID("a").Ref()), Expected: &SealedItem{ SchemaGroup: psiid1, Original: &i1id, @@ -801,7 +800,7 @@ func TestSealed_ItemBy(t *testing.T) { }, }, }, - Input: NewPointer(nil, nil, id.PropertySchemaFieldID("a").Ref()), + Input: NewPointer(nil, nil, FieldID("a").Ref()), Expected: nil, }, } @@ -892,7 +891,7 @@ func TestSealed_FieldBy(t *testing.T) { }, }, }, - Input: NewPointer(psiid1.Ref(), i1id.Ref(), id.PropertySchemaFieldID("a").Ref()), + Input: NewPointer(psiid1.Ref(), i1id.Ref(), FieldID("a").Ref()), Expected: &SealedField{ ID: "a", Val: NewValueAndDatasetValue( @@ -967,7 +966,7 @@ func TestSealed_FieldBy(t *testing.T) { }, }, }, - Input: NewPointer(nil, i3id.Ref(), id.PropertySchemaFieldID("a").Ref()), + Input: NewPointer(nil, i3id.Ref(), FieldID("a").Ref()), Expected: &SealedField{ ID: "a", Val: NewValueAndDatasetValue( @@ -1042,7 +1041,7 @@ func TestSealed_FieldBy(t *testing.T) { }, }, }, - Input: NewPointer(nil, nil, id.PropertySchemaFieldID("a").Ref()), + Input: NewPointer(nil, nil, FieldID("a").Ref()), Expected: &SealedField{ ID: "a", Val: NewValueAndDatasetValue( diff --git a/pkg/scene/builder.go b/pkg/scene/builder.go index 7b15de0ef..8124303ec 100644 --- a/pkg/scene/builder.go +++ b/pkg/scene/builder.go @@ -2,8 +2,6 @@ package scene import ( "time" - - "github.com/reearth/reearth-backend/pkg/id" ) type Builder struct { @@ -14,16 +12,15 @@ func New() *Builder { return &Builder{scene: &Scene{}} } -// Build _ func (b *Builder) Build() (*Scene, error) { - if b.scene.id.ID().IsNil() { - return nil, id.ErrInvalidID + if b.scene.id.IsNil() { + return nil, ErrInvalidID } if b.scene.team.ID().IsNil() { - return nil, id.ErrInvalidID + return nil, ErrInvalidID } if b.scene.rootLayer.ID().IsNil() { - return nil, id.ErrInvalidID + return nil, ErrInvalidID } if b.scene.widgetSystem == nil { b.scene.widgetSystem = NewWidgetSystem(nil) @@ -48,22 +45,22 @@ func (b *Builder) MustBuild() *Scene { return r } -func (b *Builder) ID(id id.SceneID) *Builder { +func (b *Builder) ID(id ID) *Builder { b.scene.id = id return b } func (b *Builder) NewID() *Builder { - b.scene.id = id.SceneID(id.New()) + b.scene.id = NewID() return b } -func (b *Builder) Project(prj id.ProjectID) *Builder { +func (b *Builder) Project(prj ProjectID) *Builder { b.scene.project = prj return b } -func (b *Builder) Team(team id.TeamID) *Builder { +func (b *Builder) Team(team TeamID) *Builder { b.scene.team = team return b } @@ -84,7 +81,7 @@ func (b *Builder) WidgetAlignSystem(widgetAlignSystem *WidgetAlignSystem) *Build return b } -func (b *Builder) RootLayer(rootLayer id.LayerID) *Builder { +func (b *Builder) RootLayer(rootLayer LayerID) *Builder { b.scene.rootLayer = rootLayer return b } @@ -95,7 +92,7 @@ func (b *Builder) PluginSystem(pluginSystem *PluginSystem) *Builder { return b } -func (b *Builder) Property(p id.PropertyID) *Builder { +func (b *Builder) Property(p PropertyID) *Builder { b.scene.property = p return b } diff --git a/pkg/scene/builder/builder_test.go b/pkg/scene/builder/builder_test.go index dc2407994..d081abb51 100644 --- a/pkg/scene/builder/builder_test.go +++ b/pkg/scene/builder/builder_test.go @@ -6,7 +6,6 @@ import ( "time" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/scene" @@ -15,31 +14,31 @@ import ( func TestSceneBuilder(t *testing.T) { // ids - sceneID := id.NewSceneID() - scenePropertyID := id.NewPropertyID() - propertySchemaID := id.MustPropertySchemaID("hoge~0.1.0/foobar") - pluginID := id.MustPluginID("hoge~0.1.0") - pluginExtension1ID := id.PluginExtensionID("ext") - pluginExtension2ID := id.PluginExtensionID("ext2") - propertySchemaField1ID := id.PropertySchemaFieldID("a") - propertySchemaField2ID := id.PropertySchemaFieldID("b") - propertySchemaField3ID := id.PropertySchemaFieldID("c") - propertySchemaGroup1ID := id.PropertySchemaGroupID("A") - propertySchemaGroup2ID := id.PropertySchemaGroupID("B") - propertyItemID1 := id.NewPropertyItemID() - propertyItemID2 := id.NewPropertyItemID() + sceneID := scene.NewID() + scenePropertyID := property.NewID() + propertySchemaID := property.MustSchemaID("hoge~0.1.0/foobar") + pluginID := layer.MustPluginID("hoge~0.1.0") + pluginExtension1ID := layer.PluginExtensionID("ext") + pluginExtension2ID := layer.PluginExtensionID("ext2") + propertySchemaField1ID := property.FieldID("a") + propertySchemaField2ID := property.FieldID("b") + propertySchemaField3ID := property.FieldID("c") + propertySchemaGroup1ID := property.SchemaGroupID("A") + propertySchemaGroup2ID := property.SchemaGroupID("B") + propertyItemID1 := property.NewItemID() + propertyItemID2 := property.NewItemID() // datasets - dss1id := id.NewDatasetSchemaID() - dss2id := id.NewDatasetSchemaID() - dss3id := id.NewDatasetSchemaID() - ds1id := id.NewDatasetID() - ds2id := id.NewDatasetID() - ds3id := id.NewDatasetID() - ds1f1 := id.NewDatasetSchemaFieldID() - ds1f2 := id.NewDatasetSchemaFieldID() - ds2f1 := id.NewDatasetSchemaFieldID() - ds3f1 := id.NewDatasetSchemaFieldID() + dss1id := dataset.NewSchemaID() + dss2id := dataset.NewSchemaID() + dss3id := dataset.NewSchemaID() + ds1id := dataset.NewID() + ds2id := dataset.NewID() + ds3id := dataset.NewID() + ds1f1 := dataset.NewFieldID() + ds1f2 := dataset.NewFieldID() + ds2f1 := dataset.NewFieldID() + ds3f1 := dataset.NewFieldID() ds1 := dataset.New().ID(ds1id).Fields([]*dataset.Field{ dataset.NewField( ds1f1, @@ -149,7 +148,7 @@ func TestSceneBuilder(t *testing.T) { Extension(&pluginExtension1ID). Property(layer2p.IDRef()). Infobox(layer2ib). - Layers(layer.NewIDList([]id.LayerID{layer21.ID()})). + Layers(layer.NewIDList([]layer.ID{layer21.ID()})). MustBuild() // layer3: full-linked layer item with infobox @@ -245,7 +244,7 @@ func TestSceneBuilder(t *testing.T) { Property(layer4p.IDRef()). Infobox(layer4ib). LinkedDatasetSchema(&dss3id). - Layers(layer.NewIDList([]id.LayerID{layer41.ID()})). + Layers(layer.NewIDList([]layer.ID{layer41.ID()})). MustBuild() // layer5: linked layer group and children with overrided property @@ -309,7 +308,7 @@ func TestSceneBuilder(t *testing.T) { Extension(&pluginExtension1ID). Property(layer5p.IDRef()). LinkedDatasetSchema(&dss1id). - Layers(layer.NewIDList([]id.LayerID{layer51.ID()})). + Layers(layer.NewIDList([]layer.ID{layer51.ID()})). MustBuild() layer6p := property.New(). NewID(). @@ -343,7 +342,7 @@ func TestSceneBuilder(t *testing.T) { MustBuild() // root layer - rootLayer := layer.NewGroup().NewID().Scene(sceneID).Layers(layer.NewIDList([]id.LayerID{ + rootLayer := layer.NewGroup().NewID().Scene(sceneID).Layers(layer.NewIDList([]layer.ID{ layer1.ID(), layer2.ID(), layer3.ID(), @@ -367,8 +366,8 @@ func TestSceneBuilder(t *testing.T) { }). MustBuild() - sceneWidgetID1 := id.NewWidgetID() - sceneWidgetID2 := id.NewWidgetID() + sceneWidgetID1 := scene.NewWidgetID() + sceneWidgetID2 := scene.NewWidgetID() sceneWidget1 := scene.MustNewWidget( sceneWidgetID1, pluginID, @@ -390,8 +389,8 @@ func TestSceneBuilder(t *testing.T) { scene := scene.New(). ID(sceneID). - Project(id.NewProjectID()). - Team(id.NewTeamID()). + Project(scene.NewProjectID()). + Team(scene.NewTeamID()). Property(scenep.ID()). WidgetSystem(scene.NewWidgetSystem([]*scene.Widget{ sceneWidget1, sceneWidget2, diff --git a/pkg/scene/builder/encoder_test.go b/pkg/scene/builder/encoder_test.go index 0bb83d027..798b04138 100644 --- a/pkg/scene/builder/encoder_test.go +++ b/pkg/scene/builder/encoder_test.go @@ -3,10 +3,10 @@ package builder import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/merging" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" "github.com/stretchr/testify/assert" ) @@ -88,18 +88,18 @@ func TestEncoder_Encode(t *testing.T) { } func TestEncoder_Layers(t *testing.T) { - lid := id.MustLayerID(id.New().String()) - sid := id.MustSceneID(id.New().String()) - pid := id.MustPropertyID(id.New().String()) - ex := id.PluginExtensionID("marker") - iid := id.MustPropertyItemID(id.New().String()) + lid := layer.NewID() + sid := scene.NewID() + pid := property.NewID() + ex := layer.PluginExtensionID("marker") + iid := property.NewItemID() v1 := property.LatLng{ Lat: 4.4, Lng: 53.4, } f1 := property.SealedField{ - ID: id.PropertySchemaFieldID("location"), + ID: property.FieldID("location"), Val: property.NewValueAndDatasetValue( property.ValueTypeLatLng, nil, @@ -111,7 +111,7 @@ func TestEncoder_Layers(t *testing.T) { item1 := property.SealedItem{ Original: &iid, Parent: nil, - SchemaGroup: id.PropertySchemaGroupID("default"), + SchemaGroup: property.SchemaGroupID("default"), LinkedDataset: nil, Fields: fl1, Groups: nil, @@ -132,7 +132,7 @@ func TestEncoder_Layers(t *testing.T) { Scene: sid, Property: nil, Infobox: nil, - PluginID: &id.OfficialPluginID, + PluginID: &layer.OfficialPluginID, ExtensionID: &ex, }, Property: &sp, @@ -156,7 +156,7 @@ func TestEncoder_Layers(t *testing.T) { SL: sealed, Expected: &layerJSON{ ID: lid.String(), - PluginID: id.OfficialPluginID.StringRef(), + PluginID: layer.OfficialPluginID.StringRef(), ExtensionID: ex.StringRef(), Name: "test", Property: map[string]interface{}{"default": map[string]interface{}{"location": property.LatLng{Lat: 4.4, Lng: 53.4}}}, diff --git a/pkg/scene/builder/scene.go b/pkg/scene/builder/scene.go index 973598bb3..d517afb41 100644 --- a/pkg/scene/builder/scene.go +++ b/pkg/scene/builder/scene.go @@ -4,7 +4,6 @@ import ( "context" "time" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/scene" ) @@ -85,7 +84,7 @@ func (b *Builder) property(ctx context.Context, p *property.Property) propertyJS return property.SealProperty(ctx, p).Interface() } -func findProperty(pp []*property.Property, i id.PropertyID) *property.Property { +func findProperty(pp []*property.Property, i property.ID) *property.Property { for _, p := range pp { if p.ID() == i { return p @@ -94,7 +93,7 @@ func findProperty(pp []*property.Property, i id.PropertyID) *property.Property { return nil } -func toString(wids []id.WidgetID) []string { +func toString(wids []scene.WidgetID) []string { if wids == nil { return nil } diff --git a/pkg/scene/builder/scene_test.go b/pkg/scene/builder/scene_test.go index 438206b8e..f33f0b1b0 100644 --- a/pkg/scene/builder/scene_test.go +++ b/pkg/scene/builder/scene_test.go @@ -3,16 +3,15 @@ package builder import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/scene" "github.com/stretchr/testify/assert" ) func TestScene_FindProperty(t *testing.T) { - p1 := id.NewPropertyID() - sid := id.NewSceneID() - scid := id.MustPropertySchemaID("xx~1.0.0/aa") + p1 := property.NewID() + sid := scene.NewID() + scid := property.MustSchemaID("xx~1.0.0/aa") pl := []*property.Property{ property.New().NewID().Scene(sid).Schema(scid).MustBuild(), property.New().ID(p1).Scene(sid).Schema(scid).MustBuild(), @@ -20,7 +19,7 @@ func TestScene_FindProperty(t *testing.T) { testCases := []struct { Name string PL []*property.Property - Input id.PropertyID + Input property.ID Expected *property.Property }{ { @@ -32,7 +31,7 @@ func TestScene_FindProperty(t *testing.T) { { Name: " NotFound", PL: pl, - Input: id.NewPropertyID(), + Input: property.NewID(), Expected: nil, }, } @@ -47,22 +46,22 @@ func TestScene_FindProperty(t *testing.T) { } func TestScene_ToString(t *testing.T) { - wid := id.NewWidgetID() + wid := scene.NewWidgetID() widS := wid.String() - wid2 := id.NewWidgetID() + wid2 := scene.NewWidgetID() wid2S := wid2.String() - wid3 := id.NewWidgetID() + wid3 := scene.NewWidgetID() wid3S := wid3.String() - wids := []id.WidgetID{wid, wid2, wid3} + wids := []scene.WidgetID{wid, wid2, wid3} widsString := []string{widS, wid2S, wid3S} testCases := []struct { Name string - Input []id.WidgetID + Input []scene.WidgetID Expected []string }{ { - Name: "Convert a slice of id.WidgetID to a slice of strings", + Name: "Convert a slice of scene.WidgetID to a slice of strings", Input: wids, Expected: widsString, }, @@ -83,7 +82,7 @@ func TestScene_ToString(t *testing.T) { } func TestBuildWidgetAlignSystem(t *testing.T) { - wid := id.NewWidgetID() + wid := scene.NewWidgetID() was := scene.NewWidgetAlignSystem() was.Area(scene.WidgetLocation{ Zone: scene.WidgetZoneInner, diff --git a/pkg/scene/builder_test.go b/pkg/scene/builder_test.go index 8dadac455..81a92427f 100644 --- a/pkg/scene/builder_test.go +++ b/pkg/scene/builder_test.go @@ -5,103 +5,100 @@ import ( "testing" "time" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestBuilder_IDs(t *testing.T) { - tid := id.NewTeamID() - lid := id.NewLayerID() + tid := NewTeamID() + lid := NewLayerID() b := New().NewID().RootLayer(lid).Team(tid).MustBuild() assert.NotNil(t, b.ID()) assert.Equal(t, tid, b.Team()) assert.Equal(t, lid, b.RootLayer()) - sid := id.NewSceneID() + sid := NewID() b2 := New().ID(sid).RootLayer(lid).Team(tid).MustBuild() assert.Equal(t, sid, b2.ID()) } func TestBuilder_UpdatedAt(t *testing.T) { ti := time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC) - b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).UpdatedAt(ti).MustBuild() + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).UpdatedAt(ti).MustBuild() assert.Equal(t, ti, b.UpdatedAt()) } func TestBuilder_Property(t *testing.T) { - pid := id.NewPropertyID() - b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).Property(pid).MustBuild() + pid := NewPropertyID() + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).Property(pid).MustBuild() assert.Equal(t, pid, b.Property()) } func TestBuilder_PluginSystem(t *testing.T) { ps := NewPluginSystem([]*Plugin{ - NewPlugin(id.OfficialPluginID, id.NewPropertyID().Ref()), + NewPlugin(OfficialPluginID, NewPropertyID().Ref()), }) - b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).PluginSystem(ps).MustBuild() + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).PluginSystem(ps).MustBuild() assert.Equal(t, ps, b.PluginSystem()) } func TestBuilder_Project(t *testing.T) { - pid := id.NewProjectID() - b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).Project(pid).MustBuild() + pid := NewProjectID() + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).Project(pid).MustBuild() assert.Equal(t, pid, b.Project()) } func TestBuilder_WidgetSystem(t *testing.T) { - nid := id.New() ws := NewWidgetSystem([]*Widget{ - MustNewWidget(id.WidgetID(nid), id.OfficialPluginID, "xxx", id.NewPropertyID(), true, false), + MustNewWidget(NewWidgetID(), OfficialPluginID, "xxx", NewPropertyID(), true, false), }) - b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).WidgetSystem(ws).MustBuild() + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).WidgetSystem(ws).MustBuild() assert.Equal(t, ws, b.WidgetSystem()) } func TestBuilder_WidgetAlignSystem(t *testing.T) { was := NewWidgetAlignSystem() - b := New().NewID().RootLayer(id.NewLayerID()).Team(id.NewTeamID()).WidgetAlignSystem(was).MustBuild() + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).WidgetAlignSystem(was).MustBuild() assert.Equal(t, was, b.WidgetAlignSystem()) } func TestBuilder_Build(t *testing.T) { - tid := id.NewTeamID() - sid := id.NewSceneID() - pid := id.NewProjectID() - ppid := id.NewPropertyID() - lid := id.NewLayerID() - nid := id.New() + tid := NewTeamID() + sid := NewID() + pid := NewProjectID() + ppid := NewPropertyID() + lid := NewLayerID() ws := NewWidgetSystem([]*Widget{ - MustNewWidget(id.WidgetID(nid), id.OfficialPluginID, "xxx", ppid, true, false), + MustNewWidget(NewWidgetID(), OfficialPluginID, "xxx", ppid, true, false), }) was := NewWidgetAlignSystem() ps := NewPluginSystem([]*Plugin{ - NewPlugin(id.OfficialPluginID, ppid.Ref()), + NewPlugin(OfficialPluginID, ppid.Ref()), }) testCases := []struct { Name string - Id id.SceneID - Project id.ProjectID - Team id.TeamID - RootLayer id.LayerID + Id ID + Project ProjectID + Team TeamID + RootLayer LayerID WidgetSystem *WidgetSystem WidgetAlignSystem *WidgetAlignSystem PluginSystem *PluginSystem UpdatedAt time.Time - Property id.PropertyID + Property PropertyID Expected struct { - Id id.SceneID - Project id.ProjectID - Team id.TeamID - RootLayer id.LayerID + Id ID + Project ProjectID + Team TeamID + RootLayer LayerID WidgetSystem *WidgetSystem WidgetAlignSystem *WidgetAlignSystem PluginSystem *PluginSystem UpdatedAt time.Time - Property id.PropertyID + Property PropertyID } err error }{ { Name: "fail nil scene id", - Id: id.SceneID{}, + Id: ID{}, Project: pid, Team: tid, RootLayer: lid, @@ -110,33 +107,33 @@ func TestBuilder_Build(t *testing.T) { PluginSystem: ps, UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), Property: ppid, - err: id.ErrInvalidID, + err: ErrInvalidID, }, { Name: "fail nil team id", Id: sid, Project: pid, - Team: id.TeamID{}, + Team: TeamID{}, RootLayer: lid, WidgetSystem: ws, WidgetAlignSystem: was, PluginSystem: ps, UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), Property: ppid, - err: id.ErrInvalidID, + err: ErrInvalidID, }, { Name: "fail nil root layer id", Id: sid, Project: pid, Team: tid, - RootLayer: id.LayerID{}, + RootLayer: LayerID{}, WidgetSystem: ws, WidgetAlignSystem: was, PluginSystem: ps, UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), Property: ppid, - err: id.ErrInvalidID, + err: ErrInvalidID, }, { Name: "success build new scene", @@ -150,15 +147,15 @@ func TestBuilder_Build(t *testing.T) { UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), Property: ppid, Expected: struct { - Id id.SceneID - Project id.ProjectID - Team id.TeamID - RootLayer id.LayerID + Id ID + Project ProjectID + Team TeamID + RootLayer LayerID WidgetSystem *WidgetSystem WidgetAlignSystem *WidgetAlignSystem PluginSystem *PluginSystem UpdatedAt time.Time - Property id.PropertyID + Property PropertyID }{ Id: sid, Project: pid, @@ -205,46 +202,45 @@ func TestBuilder_Build(t *testing.T) { } func TestBuilder_MustBuild(t *testing.T) { - tid := id.NewTeamID() - sid := id.NewSceneID() - pid := id.NewProjectID() - ppid := id.NewPropertyID() - lid := id.NewLayerID() - nid := id.New() + tid := NewTeamID() + sid := NewID() + pid := NewProjectID() + ppid := NewPropertyID() + lid := NewLayerID() ws := NewWidgetSystem([]*Widget{ - MustNewWidget(id.WidgetID(nid), id.OfficialPluginID, "xxx", ppid, true, false), + MustNewWidget(NewWidgetID(), OfficialPluginID, "xxx", ppid, true, false), }) was := NewWidgetAlignSystem() ps := NewPluginSystem([]*Plugin{ - NewPlugin(id.OfficialPluginID, ppid.Ref()), + NewPlugin(OfficialPluginID, ppid.Ref()), }) testCases := []struct { Name string - Id id.SceneID - Project id.ProjectID - Team id.TeamID - RootLayer id.LayerID + Id ID + Project ProjectID + Team TeamID + RootLayer LayerID WidgetSystem *WidgetSystem WidgetAlignSystem *WidgetAlignSystem PluginSystem *PluginSystem UpdatedAt time.Time - Property id.PropertyID + Property PropertyID Expected struct { - Id id.SceneID - Project id.ProjectID - Team id.TeamID - RootLayer id.LayerID + Id ID + Project ProjectID + Team TeamID + RootLayer LayerID WidgetSystem *WidgetSystem WidgetAlignSystem *WidgetAlignSystem PluginSystem *PluginSystem UpdatedAt time.Time - Property id.PropertyID + Property PropertyID } err error }{ { Name: "fail nil scene id", - Id: id.SceneID{}, + Id: ID{}, Project: pid, Team: tid, RootLayer: lid, @@ -253,33 +249,33 @@ func TestBuilder_MustBuild(t *testing.T) { PluginSystem: ps, UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), Property: ppid, - err: id.ErrInvalidID, + err: ErrInvalidID, }, { Name: "fail nil team id", Id: sid, Project: pid, - Team: id.TeamID{}, + Team: TeamID{}, RootLayer: lid, WidgetSystem: ws, WidgetAlignSystem: was, PluginSystem: ps, UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), Property: ppid, - err: id.ErrInvalidID, + err: ErrInvalidID, }, { Name: "fail nil root layer id", Id: sid, Project: pid, Team: tid, - RootLayer: id.LayerID{}, + RootLayer: LayerID{}, WidgetSystem: ws, WidgetAlignSystem: was, PluginSystem: ps, UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), Property: ppid, - err: id.ErrInvalidID, + err: ErrInvalidID, }, { Name: "success build new scene", @@ -293,15 +289,15 @@ func TestBuilder_MustBuild(t *testing.T) { UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), Property: ppid, Expected: struct { - Id id.SceneID - Project id.ProjectID - Team id.TeamID - RootLayer id.LayerID + Id ID + Project ProjectID + Team TeamID + RootLayer LayerID WidgetSystem *WidgetSystem WidgetAlignSystem *WidgetAlignSystem PluginSystem *PluginSystem UpdatedAt time.Time - Property id.PropertyID + Property PropertyID }{ Id: sid, Project: pid, diff --git a/pkg/scene/cluster.go b/pkg/scene/cluster.go index be5faa902..168884981 100644 --- a/pkg/scene/cluster.go +++ b/pkg/scene/cluster.go @@ -1,16 +1,14 @@ package scene -import "github.com/reearth/reearth-backend/pkg/id" - type Cluster struct { - id id.ClusterID + id ClusterID name string - property id.PropertyID + property PropertyID } -func NewCluster(cid id.ClusterID, name string, pid id.PropertyID) (*Cluster, error) { - if id.ID(cid).IsNil() { - return nil, id.ErrInvalidID +func NewCluster(cid ClusterID, name string, pid PropertyID) (*Cluster, error) { + if cid.IsNil() { + return nil, ErrInvalidID } return &Cluster{ id: cid, @@ -19,9 +17,9 @@ func NewCluster(cid id.ClusterID, name string, pid id.PropertyID) (*Cluster, err }, nil } -func (c *Cluster) ID() id.ClusterID { +func (c *Cluster) ID() ClusterID { if c == nil { - return id.ClusterID{} + return ClusterID{} } return c.id } @@ -33,9 +31,9 @@ func (c *Cluster) Name() string { return c.name } -func (c *Cluster) Property() id.PropertyID { +func (c *Cluster) Property() PropertyID { if c == nil { - return id.PropertyID{} + return PropertyID{} } return c.property } @@ -47,7 +45,7 @@ func (c *Cluster) Rename(name string) { c.name = name } -func (c *Cluster) UpdateProperty(pid id.PropertyID) { +func (c *Cluster) UpdateProperty(pid PropertyID) { if c == nil { return } diff --git a/pkg/scene/cluster_list.go b/pkg/scene/cluster_list.go index 36a86f010..2addbc3af 100644 --- a/pkg/scene/cluster_list.go +++ b/pkg/scene/cluster_list.go @@ -1,7 +1,5 @@ package scene -import "github.com/reearth/reearth-backend/pkg/id" - type ClusterList struct { clusters []*Cluster } @@ -21,7 +19,7 @@ func (tl *ClusterList) Clusters() []*Cluster { return append([]*Cluster{}, tl.clusters...) } -func (tl *ClusterList) Has(tid id.ClusterID) bool { +func (tl *ClusterList) Has(tid ClusterID) bool { if tl == nil { return false } @@ -40,7 +38,7 @@ func (tl *ClusterList) Add(clusters ...*Cluster) { tl.clusters = append(tl.clusters, clusters...) } -func (tl *ClusterList) Get(cid id.ClusterID) *Cluster { +func (tl *ClusterList) Get(cid ClusterID) *Cluster { if tl == nil { return nil } @@ -52,7 +50,7 @@ func (tl *ClusterList) Get(cid id.ClusterID) *Cluster { return nil } -func (tl *ClusterList) Remove(clusters ...id.ClusterID) { +func (tl *ClusterList) Remove(clusters ...ClusterID) { if tl == nil { return } @@ -66,11 +64,11 @@ func (tl *ClusterList) Remove(clusters ...id.ClusterID) { } } -func (tl *ClusterList) Properties() []id.PropertyID { +func (tl *ClusterList) Properties() []PropertyID { if tl == nil { return nil } - res := make([]id.PropertyID, 0, len(tl.clusters)) + res := make([]PropertyID, 0, len(tl.clusters)) for _, c := range tl.clusters { res = append(res, c.property) } diff --git a/pkg/scene/cluster_list_test.go b/pkg/scene/cluster_list_test.go index be15a4297..1fafc8942 100644 --- a/pkg/scene/cluster_list_test.go +++ b/pkg/scene/cluster_list_test.go @@ -3,13 +3,12 @@ package scene import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestList_Add(t *testing.T) { - c1, _ := NewCluster(id.NewClusterID(), "c1", id.NewPropertyID()) - c2, _ := NewCluster(id.NewClusterID(), "c2", id.NewPropertyID()) + c1, _ := NewCluster(NewClusterID(), "c1", NewPropertyID()) + c2, _ := NewCluster(NewClusterID(), "c2", NewPropertyID()) type args struct { clusters []*Cluster } @@ -43,8 +42,8 @@ func TestList_Add(t *testing.T) { } func TestList_Clusters(t *testing.T) { - c1, _ := NewCluster(id.NewClusterID(), "ccc", id.NewPropertyID()) - c2, _ := NewCluster(id.NewClusterID(), "xxx", id.NewPropertyID()) + c1, _ := NewCluster(NewClusterID(), "ccc", NewPropertyID()) + c2, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) tests := []struct { name string list *ClusterList @@ -71,10 +70,10 @@ func TestList_Clusters(t *testing.T) { } func TestList_Has(t *testing.T) { - c1, _ := NewCluster(id.NewClusterID(), "xxx", id.NewPropertyID()) + c1, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) type args struct { - tid id.ClusterID + tid ClusterID } tests := []struct { name string @@ -94,7 +93,7 @@ func TestList_Has(t *testing.T) { name: "not existing: should return false", list: NewClusterListFrom([]*Cluster{c1}), args: args{ - tid: id.NewClusterID(), + tid: NewClusterID(), }, want: false, }, @@ -114,12 +113,12 @@ func TestList_Has(t *testing.T) { } func TestList_Remove(t *testing.T) { - c1, _ := NewCluster(id.NewClusterID(), "xxx", id.NewPropertyID()) - c2, _ := NewCluster(id.NewClusterID(), "xxx", id.NewPropertyID()) - c3, _ := NewCluster(id.NewClusterID(), "xxx", id.NewPropertyID()) + c1, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) + c2, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) + c3, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) type args struct { - cluster id.ClusterID + cluster ClusterID } tests := []struct { name string @@ -162,14 +161,14 @@ func TestList_Remove(t *testing.T) { } func TestClusterList_Get(t *testing.T) { - cid1 := id.NewClusterID() - cid2 := id.NewClusterID() - cid3 := id.NewClusterID() - c1, _ := NewCluster(cid1, "xxx", id.NewPropertyID()) - c2, _ := NewCluster(cid2, "zzz", id.NewPropertyID()) - c3, _ := NewCluster(cid3, "yyy", id.NewPropertyID()) + cid1 := NewClusterID() + cid2 := NewClusterID() + cid3 := NewClusterID() + c1, _ := NewCluster(cid1, "xxx", NewPropertyID()) + c2, _ := NewCluster(cid2, "zzz", NewPropertyID()) + c3, _ := NewCluster(cid3, "yyy", NewPropertyID()) type args struct { - cid id.ClusterID + cid ClusterID } tests := []struct { name string diff --git a/pkg/scene/cluster_test.go b/pkg/scene/cluster_test.go index 1df000806..01c4d8257 100644 --- a/pkg/scene/cluster_test.go +++ b/pkg/scene/cluster_test.go @@ -3,19 +3,18 @@ package scene import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestCluster_ID(t *testing.T) { - cid := id.NewClusterID() + cid := NewClusterID() clusterA := &Cluster{ id: cid, } tests := []struct { name string cluster *Cluster - want id.ClusterID + want ClusterID }{ { name: "should return cluster id", @@ -25,7 +24,7 @@ func TestCluster_ID(t *testing.T) { { name: "should return empty if cluster is nil", cluster: nil, - want: id.ClusterID{}, + want: ClusterID{}, }, } for _, tc := range tests { @@ -67,14 +66,14 @@ func TestCluster_Name(t *testing.T) { } } func TestCluster_Property(t *testing.T) { - propertyId := id.NewPropertyID() + propertyId := NewPropertyID() clusterA := &Cluster{ property: propertyId, } tests := []struct { name string cluster *Cluster - want id.PropertyID + want PropertyID }{ { name: "should return cluster property", @@ -84,7 +83,7 @@ func TestCluster_Property(t *testing.T) { { name: "should return empty cluster property", cluster: nil, - want: id.PropertyID{}, + want: PropertyID{}, }, } for _, tc := range tests { @@ -98,12 +97,12 @@ func TestCluster_Property(t *testing.T) { } func TestNew(t *testing.T) { - propertyId := id.NewPropertyID() - clusterId := id.NewClusterID() + propertyId := NewPropertyID() + clusterId := NewClusterID() type args struct { - cid id.ClusterID + cid ClusterID name string - pid id.PropertyID + pid PropertyID } tests := []struct { name string @@ -128,7 +127,7 @@ func TestNew(t *testing.T) { { name: "should return invalid id error", args: args{ - cid: id.ClusterID{}, + cid: ClusterID{}, name: "xxx", pid: propertyId, }, @@ -148,8 +147,8 @@ func TestNew(t *testing.T) { } func TestCluster_Rename(t *testing.T) { - propertyId := id.NewPropertyID() - clusterId := id.NewClusterID() + propertyId := NewPropertyID() + clusterId := NewClusterID() type args struct { name string @@ -195,12 +194,12 @@ func TestCluster_Rename(t *testing.T) { } func TestCluster_UpdateProperty(t *testing.T) { - propertyId := id.NewPropertyID() - propertyId2 := id.NewPropertyID() - clusterId := id.NewClusterID() + propertyId := NewPropertyID() + propertyId2 := NewPropertyID() + clusterId := NewClusterID() type args struct { - property id.PropertyID + property PropertyID } tests := []struct { name string diff --git a/pkg/scene/id.go b/pkg/scene/id.go new file mode 100644 index 000000000..0947fac44 --- /dev/null +++ b/pkg/scene/id.go @@ -0,0 +1,68 @@ +package scene + +import ( + "time" + + "github.com/reearth/reearth-backend/pkg/id" +) + +type ID = id.SceneID +type WidgetID = id.WidgetID +type ClusterID = id.ClusterID +type LayerID = id.LayerID +type PropertyID = id.PropertyID +type PluginID = id.PluginID +type PluginExtensionID = id.PluginExtensionID +type ProjectID = id.ProjectID +type TeamID = id.TeamID + +var NewID = id.NewSceneID +var NewWidgetID = id.NewWidgetID +var NewClusterID = id.NewClusterID +var NewLayerID = id.NewLayerID +var NewPropertyID = id.NewPropertyID +var NewPluginID = id.NewPluginID +var NewProjectID = id.NewProjectID +var NewTeamID = id.NewTeamID + +var MustID = id.MustSceneID +var MustWidgetID = id.MustWidgetID +var MustClusterID = id.MustClusterID +var MustLayerID = id.MustLayerID +var MustPropertyID = id.MustPropertyID +var MustPluginID = id.MustPluginID +var MustProjectID = id.MustProjectID +var MustTeamID = id.MustTeamID + +var IDFrom = id.SceneIDFrom +var WidgetIDFrom = id.WidgetIDFrom +var ClusterIDFrom = id.ClusterIDFrom +var LayerIDFrom = id.LayerIDFrom +var PropertyIDFrom = id.PropertyIDFrom +var PluginIDFrom = id.PluginIDFrom +var ProjectIDFrom = id.ProjectIDFrom +var TeamIDFrom = id.TeamIDFrom + +var IDFromRef = id.SceneIDFromRef +var WidgetIDFromRef = id.WidgetIDFromRef +var ClusterIDFromRef = id.ClusterIDFromRef +var LayerIDFromRef = id.LayerIDFromRef +var PropertyIDFromRef = id.PropertyIDFromRef +var PluginIDFromRef = id.PluginIDFromRef +var ProjectIDFromRef = id.ProjectIDFromRef +var TeamIDFromRef = id.TeamIDFromRef + +var IDFromRefID = id.SceneIDFromRefID +var WidgetIDFromRefID = id.WidgetIDFromRefID +var ClusterIDFromRefID = id.ClusterIDFromRefID +var LayerIDFromRefID = id.LayerIDFromRefID +var PropertyIDFromRefID = id.PropertyIDFromRefID +var ProjectIDFromRefID = id.ProjectIDFromRefID +var TeamIDFromRefID = id.TeamIDFromRefID + +var OfficialPluginID = id.OfficialPluginID +var ErrInvalidID = id.ErrInvalidID + +func createdAt(i ID) time.Time { + return id.ID(i).Timestamp() +} diff --git a/pkg/scene/lock.go b/pkg/scene/lock.go index 03afcd1a3..b99d18e31 100644 --- a/pkg/scene/lock.go +++ b/pkg/scene/lock.go @@ -16,7 +16,6 @@ const ( LockModePublishing LockMode = "publishing" ) -// IsLocked _ func (l LockMode) IsLocked() bool { switch l { case LockModeFree: @@ -27,7 +26,6 @@ func (l LockMode) IsLocked() bool { return true } -// Validate _ func (l LockMode) Validate() (LockMode, bool) { switch l { case LockModeFree: diff --git a/pkg/scene/plugin.go b/pkg/scene/plugin.go index 63e863d21..3228304b2 100644 --- a/pkg/scene/plugin.go +++ b/pkg/scene/plugin.go @@ -1,17 +1,11 @@ package scene -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - -// Plugin _ type Plugin struct { - plugin id.PluginID - property *id.PropertyID + plugin PluginID + property *PropertyID } -// NewPlugin _ -func NewPlugin(plugin id.PluginID, property *id.PropertyID) *Plugin { +func NewPlugin(plugin PluginID, property *PropertyID) *Plugin { if property != nil { property2 := *property property = &property2 @@ -22,13 +16,11 @@ func NewPlugin(plugin id.PluginID, property *id.PropertyID) *Plugin { } } -// Plugin _ -func (s Plugin) Plugin() id.PluginID { +func (s Plugin) Plugin() PluginID { return s.plugin } -// Property _ -func (s Plugin) Property() *id.PropertyID { +func (s Plugin) Property() *PropertyID { property := s.property if property != nil { property2 := *property diff --git a/pkg/scene/plugin_system.go b/pkg/scene/plugin_system.go index e0c756cfb..660881772 100644 --- a/pkg/scene/plugin_system.go +++ b/pkg/scene/plugin_system.go @@ -1,15 +1,9 @@ package scene -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - -// PluginSystem _ type PluginSystem struct { plugins []*Plugin } -// NewPluginSystem _ func NewPluginSystem(p []*Plugin) *PluginSystem { if p == nil { return &PluginSystem{plugins: []*Plugin{}} @@ -34,13 +28,11 @@ func NewPluginSystem(p []*Plugin) *PluginSystem { return &PluginSystem{plugins: p2} } -// Plugins _ func (p *PluginSystem) Plugins() []*Plugin { return append([]*Plugin{}, p.plugins...) } -// Property _ -func (p *PluginSystem) Property(id id.PluginID) *id.PropertyID { +func (p *PluginSystem) Property(id PluginID) *PropertyID { for _, p := range p.plugins { if p.plugin.Equal(id) { return p.property.CopyRef() @@ -49,8 +41,7 @@ func (p *PluginSystem) Property(id id.PluginID) *id.PropertyID { return nil } -// Has _ -func (p *PluginSystem) Has(id id.PluginID) bool { +func (p *PluginSystem) Has(id PluginID) bool { for _, p2 := range p.plugins { if p2.plugin.Equal(id) { return true @@ -59,8 +50,7 @@ func (p *PluginSystem) Has(id id.PluginID) bool { return false } -// HasPlugin _ -func (p *PluginSystem) HasPlugin(id id.PluginID) bool { +func (p *PluginSystem) HasPlugin(id PluginID) bool { name := id.Name() for _, p2 := range p.plugins { if p2.plugin.Name() == name { @@ -70,18 +60,16 @@ func (p *PluginSystem) HasPlugin(id id.PluginID) bool { return false } -// Add _ func (p *PluginSystem) Add(sp *Plugin) { - if sp == nil || p.Has(sp.plugin) || sp.plugin.Equal(id.OfficialPluginID) { + if sp == nil || p.Has(sp.plugin) || sp.plugin.Equal(OfficialPluginID) { return } sp2 := *sp p.plugins = append(p.plugins, &sp2) } -// Remove _ -func (p *PluginSystem) Remove(pid id.PluginID) { - if pid.Equal(id.OfficialPluginID) { +func (p *PluginSystem) Remove(pid PluginID) { + if pid.Equal(OfficialPluginID) { return } for i, p2 := range p.plugins { @@ -92,10 +80,9 @@ func (p *PluginSystem) Remove(pid id.PluginID) { } } -// Upgrade _ -func (p *PluginSystem) Upgrade(pid, newID id.PluginID) { +func (p *PluginSystem) Upgrade(pid, newID PluginID) { for i, p2 := range p.plugins { - if p2.plugin.Equal(id.OfficialPluginID) { + if p2.plugin.Equal(OfficialPluginID) { continue } if p2.plugin.Equal(pid) { @@ -105,12 +92,11 @@ func (p *PluginSystem) Upgrade(pid, newID id.PluginID) { } } -// Properties _ -func (p *PluginSystem) Properties() []id.PropertyID { +func (p *PluginSystem) Properties() []PropertyID { if p == nil { return nil } - res := make([]id.PropertyID, 0, len(p.plugins)) + res := make([]PropertyID, 0, len(p.plugins)) for _, pp := range p.plugins { if pp.property != nil { res = append(res, *pp.property) @@ -119,7 +105,7 @@ func (p *PluginSystem) Properties() []id.PropertyID { return res } -func (p *PluginSystem) Plugin(pluginID id.PluginID) *Plugin { +func (p *PluginSystem) Plugin(pluginID PluginID) *Plugin { for _, pp := range p.plugins { if pp.plugin == pluginID { return pp diff --git a/pkg/scene/plugin_system_test.go b/pkg/scene/plugin_system_test.go index e8acd9204..514b7abfb 100644 --- a/pkg/scene/plugin_system_test.go +++ b/pkg/scene/plugin_system_test.go @@ -3,13 +3,12 @@ package scene import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestNewPluginSystem(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID().Ref() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() testCases := []struct { Name string Input []*Plugin @@ -65,13 +64,13 @@ func TestNewPluginSystem(t *testing.T) { } func TestPluginSystem_Property(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID().Ref() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() testCases := []struct { Name string - Input id.PluginID + Input PluginID PS *PluginSystem - Expected *id.PropertyID + Expected *PropertyID }{ { Name: "property is found", @@ -88,7 +87,7 @@ func TestPluginSystem_Property(t *testing.T) { { Name: "property is not found", Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz~1.1.1"), pr)}), + PS: NewPluginSystem([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), Expected: nil, }, } @@ -103,11 +102,11 @@ func TestPluginSystem_Property(t *testing.T) { } func TestPluginSystem_Plugin(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID().Ref() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() testCases := []struct { Name string - Input id.PluginID + Input PluginID PS *PluginSystem Expected *Plugin }{ @@ -120,7 +119,7 @@ func TestPluginSystem_Plugin(t *testing.T) { { Name: "plugin is not found", Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz~1.1.1"), pr)}), + PS: NewPluginSystem([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), Expected: nil, }, } @@ -135,12 +134,12 @@ func TestPluginSystem_Plugin(t *testing.T) { } func TestPluginSystem_Properties(t *testing.T) { - pr := id.NewPropertyID().Ref() - pr2 := id.NewPropertyID().Ref() + pr := NewPropertyID().Ref() + pr2 := NewPropertyID().Ref() testCases := []struct { Name string PS *PluginSystem - Expected []id.PropertyID + Expected []PropertyID }{ { Name: "pluginSystem is nil", @@ -150,10 +149,10 @@ func TestPluginSystem_Properties(t *testing.T) { { Name: "get properties", PS: NewPluginSystem([]*Plugin{ - NewPlugin(id.MustPluginID("zzz~1.1.1"), pr), - NewPlugin(id.MustPluginID("xxx~1.1.1"), pr2), + NewPlugin(MustPluginID("zzz~1.1.1"), pr), + NewPlugin(MustPluginID("xxx~1.1.1"), pr2), }), - Expected: []id.PropertyID{*pr, *pr2}, + Expected: []PropertyID{*pr, *pr2}, }, } for _, tc := range testCases { @@ -167,11 +166,11 @@ func TestPluginSystem_Properties(t *testing.T) { } func TestPluginSystem_Has(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID().Ref() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() testCases := []struct { Name string - Input id.PluginID + Input PluginID PS *PluginSystem Expected bool }{ @@ -184,7 +183,7 @@ func TestPluginSystem_Has(t *testing.T) { { Name: "property is not found", Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz~1.1.1"), pr)}), + PS: NewPluginSystem([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), Expected: false, }, } @@ -199,11 +198,11 @@ func TestPluginSystem_Has(t *testing.T) { } func TestPluginSystem_HasPlugin(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID().Ref() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() testCases := []struct { Name string - Input id.PluginID + Input PluginID PS *PluginSystem Expected bool }{ @@ -216,7 +215,7 @@ func TestPluginSystem_HasPlugin(t *testing.T) { { Name: "property is not found", Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(id.MustPluginID("zzz~1.1.1"), pr)}), + PS: NewPluginSystem([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), Expected: false, }, } @@ -231,8 +230,8 @@ func TestPluginSystem_HasPlugin(t *testing.T) { } func TestPluginSystem_Add(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID().Ref() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() testCases := []struct { Name string Input *Plugin @@ -252,7 +251,7 @@ func TestPluginSystem_Add(t *testing.T) { }, { Name: "add official plugin", - Input: NewPlugin(id.OfficialPluginID, pr), + Input: NewPlugin(OfficialPluginID, pr), PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), Expected: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), }, @@ -274,18 +273,18 @@ func TestPluginSystem_Add(t *testing.T) { } func TestPluginSystem_Remove(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID().Ref() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() testCases := []struct { Name string - Input id.PluginID + Input PluginID PS, Expected *PluginSystem }{ { Name: "remove official plugin", - Input: id.OfficialPluginID, - PS: NewPluginSystem([]*Plugin{NewPlugin(id.OfficialPluginID, pr)}), - Expected: NewPluginSystem([]*Plugin{NewPlugin(id.OfficialPluginID, pr)}), + Input: OfficialPluginID, + PS: NewPluginSystem([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + Expected: NewPluginSystem([]*Plugin{NewPlugin(OfficialPluginID, pr)}), }, { Name: "remove a plugin", @@ -305,19 +304,19 @@ func TestPluginSystem_Remove(t *testing.T) { } func TestPluginSystem_Upgrade(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - nid := id.MustPluginID("zzz~1.1.1") - pr := id.NewPropertyID().Ref() + pid := MustPluginID("xxx~1.1.1") + nid := MustPluginID("zzz~1.1.1") + pr := NewPropertyID().Ref() testCases := []struct { Name string - PID, NewID id.PluginID + PID, NewID PluginID PS, Expected *PluginSystem }{ { Name: "upgrade official plugin", - PID: id.OfficialPluginID, - PS: NewPluginSystem([]*Plugin{NewPlugin(id.OfficialPluginID, pr)}), - Expected: NewPluginSystem([]*Plugin{NewPlugin(id.OfficialPluginID, pr)}), + PID: OfficialPluginID, + PS: NewPluginSystem([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + Expected: NewPluginSystem([]*Plugin{NewPlugin(OfficialPluginID, pr)}), }, { Name: "upgrade a plugin", diff --git a/pkg/scene/plugin_test.go b/pkg/scene/plugin_test.go index fa35ca273..bce10b3c0 100644 --- a/pkg/scene/plugin_test.go +++ b/pkg/scene/plugin_test.go @@ -3,13 +3,12 @@ package scene import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestPlugin(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID().Ref() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() res := NewPlugin(pid, pr) p := Plugin{ plugin: pid, diff --git a/pkg/scene/scene.go b/pkg/scene/scene.go index 708980689..4f371c309 100644 --- a/pkg/scene/scene.go +++ b/pkg/scene/scene.go @@ -3,28 +3,26 @@ package scene import ( "errors" "time" - - "github.com/reearth/reearth-backend/pkg/id" ) var ErrSceneIsLocked error = errors.New("scene is locked") type Scene struct { - id id.SceneID - project id.ProjectID - team id.TeamID - rootLayer id.LayerID + id ID + project ProjectID + team TeamID + rootLayer LayerID widgetSystem *WidgetSystem widgetAlignSystem *WidgetAlignSystem pluginSystem *PluginSystem updatedAt time.Time - property id.PropertyID + property PropertyID clusters *ClusterList } -func (s *Scene) ID() id.SceneID { +func (s *Scene) ID() ID { if s == nil { - return id.SceneID{} + return ID{} } return s.id } @@ -33,33 +31,33 @@ func (s *Scene) CreatedAt() time.Time { if s == nil { return time.Time{} } - return id.ID(s.id).Timestamp() + return createdAt(s.id) } -func (s *Scene) Project() id.ProjectID { +func (s *Scene) Project() ProjectID { if s == nil { - return id.ProjectID{} + return ProjectID{} } return s.project } -func (s *Scene) Team() id.TeamID { +func (s *Scene) Team() TeamID { if s == nil { - return id.TeamID{} + return TeamID{} } return s.team } -func (s *Scene) Property() id.PropertyID { +func (s *Scene) Property() PropertyID { if s == nil { - return id.PropertyID{} + return PropertyID{} } return s.property } -func (s *Scene) RootLayer() id.LayerID { +func (s *Scene) RootLayer() LayerID { if s == nil { - return id.LayerID{} + return LayerID{} } return s.rootLayer } @@ -99,7 +97,7 @@ func (s *Scene) SetUpdatedAt(updatedAt time.Time) { s.updatedAt = updatedAt } -func (s *Scene) IsTeamIncluded(teams []id.TeamID) bool { +func (s *Scene) IsTeamIncluded(teams []TeamID) bool { if s == nil || teams == nil { return false } @@ -111,11 +109,11 @@ func (s *Scene) IsTeamIncluded(teams []id.TeamID) bool { return false } -func (s *Scene) Properties() []id.PropertyID { +func (s *Scene) Properties() []PropertyID { if s == nil { return nil } - ids := []id.PropertyID{s.property} + ids := []PropertyID{s.property} ids = append(ids, s.pluginSystem.Properties()...) ids = append(ids, s.widgetSystem.Properties()...) ids = append(ids, s.clusters.Properties()...) diff --git a/pkg/scene/scene_test.go b/pkg/scene/scene_test.go index f72a0ea28..04f2abd28 100644 --- a/pkg/scene/scene_test.go +++ b/pkg/scene/scene_test.go @@ -4,40 +4,39 @@ import ( "testing" "time" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestScene_IsTeamIncluded(t *testing.T) { - tid := id.NewTeamID() + tid := NewTeamID() testCases := []struct { Name string - Teams []id.TeamID + Teams []TeamID S *Scene Expected bool }{ { Name: "nil scene", - Teams: []id.TeamID{id.NewTeamID()}, + Teams: []TeamID{NewTeamID()}, S: nil, Expected: false, }, { Name: "nil teams", Teams: nil, - S: New().NewID().Team(id.NewTeamID()).RootLayer(id.NewLayerID()).MustBuild(), + S: New().NewID().Team(NewTeamID()).RootLayer(NewLayerID()).MustBuild(), Expected: false, }, { Name: "teams exist", - Teams: []id.TeamID{tid}, - S: New().NewID().Team(tid).RootLayer(id.NewLayerID()).MustBuild(), + Teams: []TeamID{tid}, + S: New().NewID().Team(tid).RootLayer(NewLayerID()).MustBuild(), Expected: true, }, { Name: "teams not exist", - Teams: []id.TeamID{tid}, - S: New().NewID().Team(id.NewTeamID()).RootLayer(id.NewLayerID()).MustBuild(), + Teams: []TeamID{tid}, + S: New().NewID().Team(NewTeamID()).RootLayer(NewLayerID()).MustBuild(), Expected: false, }, } @@ -52,7 +51,7 @@ func TestScene_IsTeamIncluded(t *testing.T) { } func TestScene_SetUpdatedAt(t *testing.T) { - s := New().NewID().Team(id.NewTeamID()).RootLayer(id.NewLayerID()).UpdatedAt(time.Date(1999, 1, 1, 00, 00, 1, 1, time.UTC)).MustBuild() + s := New().NewID().Team(NewTeamID()).RootLayer(NewLayerID()).UpdatedAt(time.Date(1999, 1, 1, 00, 00, 1, 1, time.UTC)).MustBuild() s.SetUpdatedAt(time.Date(2021, 1, 1, 00, 00, 1, 1, time.UTC)) assert.Equal(t, time.Date(2021, 1, 1, 00, 00, 1, 1, time.UTC), s.UpdatedAt()) s = nil @@ -61,24 +60,24 @@ func TestScene_SetUpdatedAt(t *testing.T) { } func TestScene_Properties(t *testing.T) { - pid1 := id.NewPropertyID() - pid2 := id.NewPropertyID() + pid1 := NewPropertyID() + pid2 := NewPropertyID() s := New(). NewID(). - Team(id.NewTeamID()). - RootLayer(id.NewLayerID()). + Team(NewTeamID()). + RootLayer(NewLayerID()). Property(pid1). WidgetSystem( NewWidgetSystem( []*Widget{ - MustNewWidget(id.NewWidgetID(), id.MustPluginID("xxx~1.1.1"), "eee", pid2, true, false), + MustNewWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", pid2, true, false), }, ), ). WidgetAlignSystem(NewWidgetAlignSystem()). MustBuild() - assert.Equal(t, []id.PropertyID{pid1, pid2}, s.Properties()) + assert.Equal(t, []PropertyID{pid1, pid2}, s.Properties()) } func TestSceneNil(t *testing.T) { @@ -96,7 +95,7 @@ func TestSceneNil(t *testing.T) { } func TestScene_Clusters(t *testing.T) { - c1, _ := NewCluster(id.NewClusterID(), "xxx", id.NewPropertyID()) + c1, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) tests := []struct { name string diff --git a/pkg/scene/sceneops/dataset_migrator.go b/pkg/scene/sceneops/dataset_migrator.go index c84303b91..c4902292b 100644 --- a/pkg/scene/sceneops/dataset_migrator.go +++ b/pkg/scene/sceneops/dataset_migrator.go @@ -5,7 +5,6 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/layerops" "github.com/reearth/reearth-backend/pkg/plugin" @@ -24,9 +23,9 @@ type DatasetMigrator struct { type MigrateDatasetResult struct { Layers layer.Map Properties property.Map - RemovedLayers *id.LayerIDSet - RemovedDatasetSchemas []id.DatasetSchemaID - RemovedDatasets []id.DatasetID + RemovedLayers *layer.IDSet + RemovedDatasetSchemas []dataset.SchemaID + RemovedDatasets []dataset.ID } func (r MigrateDatasetResult) Merge(r2 MigrateDatasetResult) MigrateDatasetResult { @@ -38,30 +37,30 @@ func (r MigrateDatasetResult) Merge(r2 MigrateDatasetResult) MigrateDatasetResul } // NOTE: DatasetSchemaใฎๅ‰Š้™คใซใฏๅฏพๅฟœใ—ใฆใ„ใชใ„๏ผˆ่‡ชๅ‹•็š„ใซๅ‰Š้™คใ•ใ‚Œใชใ„๏ผ‰ -func (srv DatasetMigrator) Migrate(ctx context.Context, sid id.SceneID, newdsl []*dataset.Schema, newdl dataset.List) (MigrateDatasetResult, error) { - scenes := []id.SceneID{sid} +func (srv DatasetMigrator) Migrate(ctx context.Context, sid dataset.SceneID, newdsl []*dataset.Schema, newdl dataset.List) (MigrateDatasetResult, error) { + scenes := []dataset.SceneID{sid} result := MigrateDatasetResult{} // ๅ‰Š้™คๅฏพ่ฑก - noLogerUsedDS := []id.DatasetSchemaID{} - noLogerUsedD := []id.DatasetID{} + noLogerUsedDS := []dataset.SchemaID{} + noLogerUsedD := []dataset.ID{} // ๅคใ„DatasetSchema - oldDatasetSchemaMap := map[id.DatasetSchemaID]*dataset.Schema{} + oldDatasetSchemaMap := map[dataset.SchemaID]*dataset.Schema{} // ๆ–ฐใ—ใ„DatasetSchema - newDatasetSchemaMap := map[id.DatasetSchemaID]*dataset.Schema{} + newDatasetSchemaMap := map[dataset.SchemaID]*dataset.Schema{} // ๆ–ฐใ—ใ„DatasetSchemaใ‹ใ‚‰ๅคใ„DatasetSchemaIDใธใฎๅฏพๅฟœ - datasetSchemaMapNewOld := map[id.DatasetSchemaID]id.DatasetSchemaID{} + datasetSchemaMapNewOld := map[dataset.SchemaID]dataset.SchemaID{} // ๅคใ„DatasetSchemaใ‹ใ‚‰ๆ–ฐใ—ใ„DatasetSchemaIDใธใฎๅฏพๅฟœ - datasetSchemaMapOldNew := map[id.DatasetSchemaID]id.DatasetSchemaID{} + datasetSchemaMapOldNew := map[dataset.SchemaID]dataset.SchemaID{} // ๅคใ„DatasetFieldIDใ‹ใ‚‰ๆ–ฐใ—ใ„DatasetSchemaFieldIDใธใฎๅฏพๅฟœ - datasetSchemaFieldIDMap := map[id.DatasetSchemaFieldID]id.DatasetSchemaFieldID{} + datasetSchemaFieldIDMap := map[dataset.FieldID]dataset.FieldID{} // ๅคใ„Datasetใ‹ใ‚‰ๆ–ฐใ—ใ„Datasetใธใฎๅฏพๅฟœ - newDatasetMap := map[id.DatasetID]*dataset.Dataset{} - datasetMapOldNew := map[id.DatasetID]*dataset.Dataset{} - datasetIDMapOldNew := map[id.DatasetID]id.DatasetID{} + newDatasetMap := map[dataset.ID]*dataset.Dataset{} + datasetMapOldNew := map[dataset.ID]*dataset.Dataset{} + datasetIDMapOldNew := map[dataset.ID]dataset.ID{} // ๆ–ฐใ—ใ„DatasetSchemaใ‹ใ‚‰DatasetDiffใธใฎๅฏพๅฟœ - datasetDiffMap := map[id.DatasetSchemaID]dataset.Diff{} + datasetDiffMap := map[dataset.SchemaID]dataset.Diff{} // ใƒžใƒƒใƒ—ใฎไฝœๆˆ for _, newds := range newdsl { @@ -162,8 +161,8 @@ func (srv DatasetMigrator) Migrate(ctx context.Context, sid id.SceneID, newdsl [ return result, nil } -func (srv DatasetMigrator) migrateLayer(ctx context.Context, sid id.SceneID, oldds *dataset.Schema, newds *dataset.Schema, diff dataset.Diff) (MigrateDatasetResult, error) { - scenes := []id.SceneID{sid} +func (srv DatasetMigrator) migrateLayer(ctx context.Context, sid dataset.SceneID, oldds *dataset.Schema, newds *dataset.Schema, diff dataset.Diff) (MigrateDatasetResult, error) { + scenes := []dataset.SceneID{sid} // ๅ‰ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚นใ‚ญใƒผใƒžใซ็ดใฅใ„ใŸใƒฌใ‚คใƒคใƒผใ‚ฐใƒซใƒผใƒ—ใ‚’ๅ–ๅพ— layerGroups, err := srv.LayerRepo.FindGroupBySceneAndLinkedDatasetSchema(ctx, sid, oldds.ID()) @@ -173,7 +172,7 @@ func (srv DatasetMigrator) migrateLayer(ctx context.Context, sid id.SceneID, old addedAndUpdatedLayers := layer.List{} addedProperties := property.List{} - removedLayers := []id.LayerID{} + removedLayers := []layer.ID{} for _, lg := range layerGroups { layers, err := srv.LayerRepo.FindByIDs(ctx, lg.Layers().Layers(), scenes) @@ -216,7 +215,7 @@ func (srv DatasetMigrator) migrateLayer(ctx context.Context, sid id.SceneID, old // ใƒ—ใƒฉใ‚ฐใ‚คใƒณใ‚’ๅ–ๅพ— var plug *plugin.Plugin if pid := lg.Plugin(); pid != nil { - plug2, err := srv.Plugin(ctx, []id.PluginID{*pid}, []id.SceneID{sid}) + plug2, err := srv.Plugin(ctx, []plugin.ID{*pid}, []dataset.SceneID{sid}) if err != nil || len(plug2) < 1 { return MigrateDatasetResult{}, err } @@ -266,7 +265,7 @@ func (srv DatasetMigrator) migrateLayer(ctx context.Context, sid id.SceneID, old layerGroups.ToLayerList()..., ) - set := id.NewLayerIDSet() + set := layer.NewIDSet() set.Add(removedLayers...) return MigrateDatasetResult{ diff --git a/pkg/scene/sceneops/dataset_migrator_test.go b/pkg/scene/sceneops/dataset_migrator_test.go index a601dd2c7..f7ae7fda4 100644 --- a/pkg/scene/sceneops/dataset_migrator_test.go +++ b/pkg/scene/sceneops/dataset_migrator_test.go @@ -1,19 +1,19 @@ package sceneops //import ( -// "github.com/reearth/reearth-backend/pkg/dataset" -// "github.com/reearth/reearth-backend/pkg/id" // "testing" +// +// "github.com/reearth/reearth-backend/pkg/dataset" //) // //func TestDatasetMigrator_Migrate(t *testing.T) { -// sid := id.NewSceneID() -// dsid:=id.NewDatasetID() -// dssid:=id.NewDatasetSchemaID() -// dssfid:=id.NewDatasetSchemaFieldID() +// sid := dataset.NewSceneID() +// dsid := dataset.NewID() +// dssid := dataset.NewSchemaID() +// dssfid := dataset.MewFieldID() // testCases := []struct { // Name string -// SID id.SceneID +// SID dataset.SceneID // NewDSL []*dataset.Schema // NewDL dataset.List // Expected MigrateDatasetResult diff --git a/pkg/scene/sceneops/plugin_installer.go b/pkg/scene/sceneops/plugin_installer.go index dfc7609b4..936c8d947 100644 --- a/pkg/scene/sceneops/plugin_installer.go +++ b/pkg/scene/sceneops/plugin_installer.go @@ -3,7 +3,7 @@ package sceneops import ( "errors" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" ) type PluginInstaller struct { @@ -12,7 +12,7 @@ type PluginInstaller struct { // PropertySchemaRepo repo.PropertySchema } -func (s PluginInstaller) InstallPluginFromRepository(pluginID id.PluginID) error { +func (s PluginInstaller) InstallPluginFromRepository(pluginID scene.PluginID) error { return errors.New("not implemented") // manifest, err := s.PluginRepositoryRepo.Manifest(pluginID) diff --git a/pkg/scene/sceneops/plugin_migrator.go b/pkg/scene/sceneops/plugin_migrator.go index 2971b996c..20d4c1265 100644 --- a/pkg/scene/sceneops/plugin_migrator.go +++ b/pkg/scene/sceneops/plugin_migrator.go @@ -5,7 +5,6 @@ import ( "errors" "github.com/reearth/reearth-backend/pkg/dataset" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" @@ -25,8 +24,8 @@ type MigratePluginsResult struct { Scene *scene.Scene Layers layer.List Properties []*property.Property - RemovedLayers []id.LayerID - RemovedProperties []id.PropertyID + RemovedLayers []layer.ID + RemovedProperties []property.ID } var ( @@ -34,7 +33,7 @@ var ( ErrInvalidPlugins error = errors.New("invalid plugins") ) -func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, oldPluginID, newPluginID id.PluginID) (MigratePluginsResult, error) { +func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, oldPluginID, newPluginID plugin.ID) (MigratePluginsResult, error) { if s == nil { return MigratePluginsResult{}, rerror.ErrInternalBy(errors.New("scene is nil")) } @@ -47,7 +46,7 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol return MigratePluginsResult{}, ErrPluginNotInstalled } - plugins, err := s.Plugin(ctx, []id.PluginID{oldPluginID, newPluginID}, []id.SceneID{sc.ID()}) + plugins, err := s.Plugin(ctx, []plugin.ID{oldPluginID, newPluginID}, []scene.ID{sc.ID()}) if err != nil || len(plugins) < 2 { return MigratePluginsResult{}, ErrInvalidPlugins } @@ -62,13 +61,13 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol } modifiedLayers := layer.List{} - removedLayers := []id.LayerID{} - propertyIDs := []id.PropertyID{} - removedPropertyIDs := []id.PropertyID{} - schemaMap := map[id.PropertySchemaID]*property.Schema{} + removedLayers := []layer.ID{} + propertyIDs := []property.ID{} + removedPropertyIDs := []property.ID{} + schemaMap := map[property.SchemaID]*property.Schema{} // ใƒ—ใƒญใƒ‘ใƒ†ใ‚ฃใ‚นใ‚ญใƒผใƒžใฎๅ–ๅพ—ใจใ€ๅคใ„ใ‚นใ‚ญใƒผใƒžใจๆ–ฐใ—ใ„ใ‚นใ‚ญใƒผใƒžใฎใƒžใƒƒใƒ—ไฝœๆˆ - schemaIDs := []id.PropertySchemaID{} + schemaIDs := []property.SchemaID{} if oldPlugin.Schema() != nil { if pps := newPlugin.Schema(); pps != nil { schemaIDs = append(schemaIDs, *pps) @@ -153,7 +152,7 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol // ไธๆญฃใชInfoboxFieldใฎๅ‰Š้™ค if ib := ll.Infobox(); ib != nil { - removeFields := []id.InfoboxFieldID{} + removeFields := []layer.InfoboxFieldID{} for _, f := range ib.Fields() { if newPlugin.Extension(f.Extension()) == nil { removeFields = append(removeFields, f.ID()) @@ -234,8 +233,8 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol }, nil } -func collectDatasetIDs(properties []*property.Property) []id.DatasetID { - res := []id.DatasetID{} +func collectDatasetIDs(properties []*property.Property) []property.DatasetID { + res := []property.DatasetID{} for _, p := range properties { res = append(res, p.CollectDatasets()...) } diff --git a/pkg/scene/widget.go b/pkg/scene/widget.go index 5e7943ad4..e52a61b5b 100644 --- a/pkg/scene/widget.go +++ b/pkg/scene/widget.go @@ -1,21 +1,17 @@ package scene -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - type Widget struct { - id id.WidgetID - plugin id.PluginID - extension id.PluginExtensionID - property id.PropertyID + id WidgetID + plugin PluginID + extension PluginExtensionID + property PropertyID enabled bool extended bool } -func NewWidget(wid id.WidgetID, plugin id.PluginID, extension id.PluginExtensionID, property id.PropertyID, enabled, extended bool) (*Widget, error) { - if !plugin.Validate() || string(extension) == "" || id.ID(property).IsNil() { - return nil, id.ErrInvalidID +func NewWidget(wid WidgetID, plugin PluginID, extension PluginExtensionID, property PropertyID, enabled, extended bool) (*Widget, error) { + if !plugin.Validate() || string(extension) == "" || property.IsNil() { + return nil, ErrInvalidID } return &Widget{ @@ -28,7 +24,7 @@ func NewWidget(wid id.WidgetID, plugin id.PluginID, extension id.PluginExtension }, nil } -func MustNewWidget(wid id.WidgetID, plugin id.PluginID, extension id.PluginExtensionID, property id.PropertyID, enabled bool, extended bool) *Widget { +func MustNewWidget(wid WidgetID, plugin PluginID, extension PluginExtensionID, property PropertyID, enabled bool, extended bool) *Widget { w, err := NewWidget(wid, plugin, extension, property, enabled, extended) if err != nil { panic(err) @@ -36,19 +32,19 @@ func MustNewWidget(wid id.WidgetID, plugin id.PluginID, extension id.PluginExten return w } -func (w *Widget) ID() id.WidgetID { +func (w *Widget) ID() WidgetID { return w.id } -func (w *Widget) Plugin() id.PluginID { +func (w *Widget) Plugin() PluginID { return w.plugin } -func (w *Widget) Extension() id.PluginExtensionID { +func (w *Widget) Extension() PluginExtensionID { return w.extension } -func (w *Widget) Property() id.PropertyID { +func (w *Widget) Property() PropertyID { return w.property } diff --git a/pkg/scene/widget_align_system.go b/pkg/scene/widget_align_system.go index 239ce8baa..c87ef78d9 100644 --- a/pkg/scene/widget_align_system.go +++ b/pkg/scene/widget_align_system.go @@ -1,9 +1,5 @@ package scene -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - type WidgetLocation struct { Zone WidgetZoneType Section WidgetSectionType @@ -57,7 +53,7 @@ func (was *WidgetAlignSystem) Zone(zone WidgetZoneType) *WidgetZone { } // Remove a widget from the align system. -func (was *WidgetAlignSystem) Remove(wid id.WidgetID) { +func (was *WidgetAlignSystem) Remove(wid WidgetID) { if was == nil { return } @@ -70,7 +66,7 @@ func (was *WidgetAlignSystem) Area(loc WidgetLocation) *WidgetArea { return was.Zone(loc.Zone).Section(loc.Section).Area(loc.Area) } -func (was *WidgetAlignSystem) Find(wid id.WidgetID) (int, WidgetLocation) { +func (was *WidgetAlignSystem) Find(wid WidgetID) (int, WidgetLocation) { if was == nil { return -1, WidgetLocation{} } @@ -93,7 +89,7 @@ func (was *WidgetAlignSystem) Find(wid id.WidgetID) (int, WidgetLocation) { return -1, WidgetLocation{} } -func (was *WidgetAlignSystem) Move(wid id.WidgetID, location WidgetLocation, index int) { +func (was *WidgetAlignSystem) Move(wid WidgetID, location WidgetLocation, index int) { if was == nil { return } diff --git a/pkg/scene/widget_align_system_test.go b/pkg/scene/widget_align_system_test.go index 813957322..b0b5ffe0f 100644 --- a/pkg/scene/widget_align_system_test.go +++ b/pkg/scene/widget_align_system_test.go @@ -3,7 +3,6 @@ package scene import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -29,15 +28,15 @@ func TestWidgetAlignSystem_Area(t *testing.T) { } func TestWidgetAlignSystem_Find(t *testing.T) { - wid1 := id.NewWidgetID() - wid2 := id.NewWidgetID() - wid3 := id.NewWidgetID() - wid4 := id.NewWidgetID() - wid5 := id.NewWidgetID() + wid1 := NewWidgetID() + wid2 := NewWidgetID() + wid3 := NewWidgetID() + wid4 := NewWidgetID() + wid5 := NewWidgetID() testCases := []struct { Name string - Input id.WidgetID + Input WidgetID Expected1 int Expected2 WidgetLocation Nil bool @@ -56,7 +55,7 @@ func TestWidgetAlignSystem_Find(t *testing.T) { }, { Name: "invalid id", - Input: id.NewWidgetID(), + Input: NewWidgetID(), Expected1: -1, Expected2: WidgetLocation{}, }, @@ -82,8 +81,8 @@ func TestWidgetAlignSystem_Find(t *testing.T) { } was := NewWidgetAlignSystem() - was.Zone(WidgetZoneInner).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]id.WidgetID{wid1, wid2, wid3}) - was.Zone(WidgetZoneOuter).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]id.WidgetID{wid4, wid5}) + was.Zone(WidgetZoneInner).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]WidgetID{wid1, wid2, wid3}) + was.Zone(WidgetZoneOuter).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]WidgetID{wid4, wid5}) index, location := was.Find(tc.Input) assert.Equal(tt, tc.Expected1, index) @@ -93,38 +92,38 @@ func TestWidgetAlignSystem_Find(t *testing.T) { } func TestWidgetAlignSystem_Remove(t *testing.T) { - wid := id.NewWidgetID() + wid := NewWidgetID() testCases := []struct { Name string Zone WidgetZoneType - Input id.WidgetID - Expected []id.WidgetID + Input WidgetID + Expected []WidgetID Nil bool }{ { Name: "inner: remove a widget from widget section", Zone: WidgetZoneInner, Input: wid, - Expected: []id.WidgetID{}, + Expected: []WidgetID{}, }, { Name: "inner: couldn't find widgetId", Zone: WidgetZoneInner, - Input: id.NewWidgetID(), - Expected: []id.WidgetID{wid}, + Input: NewWidgetID(), + Expected: []WidgetID{wid}, }, { Name: "outer: remove a widget from widget section", Zone: WidgetZoneOuter, Input: wid, - Expected: []id.WidgetID{}, + Expected: []WidgetID{}, }, { Name: "outer: couldn't find widgetId", Zone: WidgetZoneOuter, - Input: id.NewWidgetID(), - Expected: []id.WidgetID{wid}, + Input: NewWidgetID(), + Expected: []WidgetID{wid}, }, { Name: "nil", @@ -153,20 +152,20 @@ func TestWidgetAlignSystem_Remove(t *testing.T) { } func TestWidgetAlignSystem_Move(t *testing.T) { - wid1 := id.NewWidgetID() - wid2 := id.NewWidgetID() - wid3 := id.NewWidgetID() - wid4 := id.NewWidgetID() - wid5 := id.NewWidgetID() + wid1 := NewWidgetID() + wid2 := NewWidgetID() + wid3 := NewWidgetID() + wid4 := NewWidgetID() + wid5 := NewWidgetID() testCases := []struct { Name string - Input1 id.WidgetID + Input1 WidgetID Input2 WidgetLocation Input3 int Source WidgetLocation - ExpectedSource []id.WidgetID - ExpectedDest []id.WidgetID + ExpectedSource []WidgetID + ExpectedDest []WidgetID Nil bool }{ { @@ -183,8 +182,8 @@ func TestWidgetAlignSystem_Move(t *testing.T) { Section: WidgetSectionLeft, Area: WidgetAreaTop, }, - ExpectedSource: []id.WidgetID{wid2, wid1, wid3}, - ExpectedDest: []id.WidgetID{wid2, wid1, wid3}, + ExpectedSource: []WidgetID{wid2, wid1, wid3}, + ExpectedDest: []WidgetID{wid2, wid1, wid3}, }, { Name: "move a widget in the same area with negative index", @@ -200,8 +199,8 @@ func TestWidgetAlignSystem_Move(t *testing.T) { Section: WidgetSectionLeft, Area: WidgetAreaTop, }, - ExpectedSource: []id.WidgetID{wid2, wid3, wid1}, - ExpectedDest: []id.WidgetID{wid2, wid3, wid1}, + ExpectedSource: []WidgetID{wid2, wid3, wid1}, + ExpectedDest: []WidgetID{wid2, wid3, wid1}, }, { Name: "move a widget to a different area with positive index", @@ -217,8 +216,8 @@ func TestWidgetAlignSystem_Move(t *testing.T) { Section: WidgetSectionRight, Area: WidgetAreaTop, }, - ExpectedSource: []id.WidgetID{wid2, wid3}, - ExpectedDest: []id.WidgetID{wid4, wid1, wid5}, + ExpectedSource: []WidgetID{wid2, wid3}, + ExpectedDest: []WidgetID{wid4, wid1, wid5}, }, { Name: "move a widget to a different area with negative index", @@ -234,8 +233,8 @@ func TestWidgetAlignSystem_Move(t *testing.T) { Section: WidgetSectionCenter, Area: WidgetAreaMiddle, }, - ExpectedSource: []id.WidgetID{wid2, wid3}, - ExpectedDest: []id.WidgetID{wid4, wid5, wid1}, + ExpectedSource: []WidgetID{wid2, wid3}, + ExpectedDest: []WidgetID{wid4, wid5, wid1}, }, { Name: "nil", @@ -254,9 +253,9 @@ func TestWidgetAlignSystem_Move(t *testing.T) { } ws := NewWidgetAlignSystem() - ws.Area(tc.Source).AddAll([]id.WidgetID{wid1, wid2, wid3}) + ws.Area(tc.Source).AddAll([]WidgetID{wid1, wid2, wid3}) if tc.Source != tc.Input2 { - ws.Area(tc.Input2).AddAll([]id.WidgetID{wid4, wid5}) + ws.Area(tc.Input2).AddAll([]WidgetID{wid4, wid5}) } ws.Move(tc.Input1, tc.Input2, tc.Input3) diff --git a/pkg/scene/widget_area.go b/pkg/scene/widget_area.go index 7ded7c370..81d98f4a4 100644 --- a/pkg/scene/widget_area.go +++ b/pkg/scene/widget_area.go @@ -1,10 +1,8 @@ package scene -import "github.com/reearth/reearth-backend/pkg/id" - // WidgetArea has the widgets and alignment information found in each part area of a section. type WidgetArea struct { - widgetIds []id.WidgetID + widgetIds []WidgetID align WidgetAlignType } @@ -16,7 +14,7 @@ const ( WidgetAlignEnd WidgetAlignType = "end" ) -func NewWidgetArea(widgetIds []id.WidgetID, align WidgetAlignType) *WidgetArea { +func NewWidgetArea(widgetIds []WidgetID, align WidgetAlignType) *WidgetArea { wa := &WidgetArea{} wa.AddAll(widgetIds) wa.SetAlignment(align) @@ -24,12 +22,12 @@ func NewWidgetArea(widgetIds []id.WidgetID, align WidgetAlignType) *WidgetArea { } // WidgetIds will return a slice of widget ids from a specific area. -func (a *WidgetArea) WidgetIDs() []id.WidgetID { +func (a *WidgetArea) WidgetIDs() []WidgetID { if a == nil { return nil } - return append([]id.WidgetID{}, a.widgetIds...) + return append([]WidgetID{}, a.widgetIds...) } // Alignment will return the alignment of a specific area. @@ -41,7 +39,7 @@ func (a *WidgetArea) Alignment() WidgetAlignType { return a.align } -func (a *WidgetArea) Find(wid id.WidgetID) int { +func (a *WidgetArea) Find(wid WidgetID) int { if a == nil { return -1 } @@ -54,7 +52,7 @@ func (a *WidgetArea) Find(wid id.WidgetID) int { return -1 } -func (a *WidgetArea) Add(wid id.WidgetID, index int) { +func (a *WidgetArea) Add(wid WidgetID, index int) { if a == nil || wid.Contains(a.widgetIds) { return } @@ -62,12 +60,12 @@ func (a *WidgetArea) Add(wid id.WidgetID, index int) { a.widgetIds = insertWidgetID(a.widgetIds, wid, index) } -func (a *WidgetArea) AddAll(wids []id.WidgetID) { +func (a *WidgetArea) AddAll(wids []WidgetID) { if a == nil { return } - widgetIds := make([]id.WidgetID, 0, len(wids)) + widgetIds := make([]WidgetID, 0, len(wids)) for _, w := range wids { if w.Contains(a.widgetIds) || w.Contains(widgetIds) { continue @@ -90,7 +88,7 @@ func (a *WidgetArea) SetAlignment(at WidgetAlignType) { } } -func (a *WidgetArea) Remove(wid id.WidgetID) { +func (a *WidgetArea) Remove(wid WidgetID) { if a == nil { return } @@ -113,14 +111,14 @@ func (a *WidgetArea) Move(from, to int) { } // insertWidgetID is used in moveInt to add the widgetID to a new position(index). -func insertWidgetID(array []id.WidgetID, value id.WidgetID, index int) []id.WidgetID { +func insertWidgetID(array []WidgetID, value WidgetID, index int) []WidgetID { if index < 0 { return append(array, value) } - return append(array[:index], append([]id.WidgetID{value}, array[index:]...)...) + return append(array[:index], append([]WidgetID{value}, array[index:]...)...) } // removeWidgetID is used in moveInt to remove the widgetID from original position(index). -func removeWidgetID(array []id.WidgetID, index int) []id.WidgetID { +func removeWidgetID(array []WidgetID, index int) []WidgetID { return append(array[:index], array[index+1:]...) } diff --git a/pkg/scene/widget_area_test.go b/pkg/scene/widget_area_test.go index 2c16d20ae..a69bb2196 100644 --- a/pkg/scene/widget_area_test.go +++ b/pkg/scene/widget_area_test.go @@ -3,37 +3,36 @@ package scene import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestWidgetArea(t *testing.T) { - wid1 := id.NewWidgetID() - wid2 := id.NewWidgetID() + wid1 := NewWidgetID() + wid2 := NewWidgetID() testCases := []struct { Name string - Input1 []id.WidgetID + Input1 []WidgetID Input2 WidgetAlignType Expected *WidgetArea }{ { Name: "New widget area with proper widget ids and widget align type", - Input1: []id.WidgetID{wid1, wid2}, + Input1: []WidgetID{wid1, wid2}, Input2: WidgetAlignEnd, - Expected: &WidgetArea{widgetIds: []id.WidgetID{wid1, wid2}, align: WidgetAlignEnd}, + Expected: &WidgetArea{widgetIds: []WidgetID{wid1, wid2}, align: WidgetAlignEnd}, }, { Name: "New widget area with duplicated widget ids", - Input1: []id.WidgetID{wid1, wid1}, + Input1: []WidgetID{wid1, wid1}, Input2: WidgetAlignEnd, - Expected: &WidgetArea{widgetIds: []id.WidgetID{wid1}, align: WidgetAlignEnd}, + Expected: &WidgetArea{widgetIds: []WidgetID{wid1}, align: WidgetAlignEnd}, }, { Name: "New widget area with wrong widget align type", - Input1: []id.WidgetID{wid1, wid2}, + Input1: []WidgetID{wid1, wid2}, Input2: "wrong", - Expected: &WidgetArea{widgetIds: []id.WidgetID{wid1, wid2}, align: WidgetAlignStart}, + Expected: &WidgetArea{widgetIds: []WidgetID{wid1, wid2}, align: WidgetAlignStart}, }, } @@ -48,8 +47,8 @@ func TestWidgetArea(t *testing.T) { } func TestWidgetArea_WidgetIDs(t *testing.T) { - wid := id.NewWidgetID() - wa := NewWidgetArea([]id.WidgetID{wid}, WidgetAlignStart) + wid := NewWidgetID() + wa := NewWidgetArea([]WidgetID{wid}, WidgetAlignStart) assert.Equal(t, wa.widgetIds, wa.WidgetIDs()) assert.Nil(t, (*WidgetArea)(nil).WidgetIDs()) } @@ -61,12 +60,12 @@ func TestWidgetArea_Alignment(t *testing.T) { } func TestWidgetArea_Find(t *testing.T) { - wid := id.NewWidgetID() - wid2 := id.NewWidgetID() + wid := NewWidgetID() + wid2 := NewWidgetID() testCases := []struct { Name string - Input id.WidgetID + Input WidgetID Expected int Nil bool }{ @@ -94,7 +93,7 @@ func TestWidgetArea_Find(t *testing.T) { var wa *WidgetArea if !tc.Nil { - wa = NewWidgetArea([]id.WidgetID{wid}, WidgetAlignStart) + wa = NewWidgetArea([]WidgetID{wid}, WidgetAlignStart) } assert.Equal(tt, tc.Expected, wa.Find(tc.Input)) }) @@ -102,34 +101,34 @@ func TestWidgetArea_Find(t *testing.T) { } func TestWidgetArea_Add(t *testing.T) { - wid1 := id.NewWidgetID() - wid2 := id.NewWidgetID() - wid3 := id.NewWidgetID() + wid1 := NewWidgetID() + wid2 := NewWidgetID() + wid3 := NewWidgetID() testCases := []struct { Name string Nil bool - Input id.WidgetID + Input WidgetID Input2 int - Expected []id.WidgetID + Expected []WidgetID }{ { Name: "add a widget id", Input: wid3, Input2: -1, - Expected: []id.WidgetID{wid1, wid2, wid3}, + Expected: []WidgetID{wid1, wid2, wid3}, }, { Name: "add a widget id but already exists", Input: wid1, Input2: -1, - Expected: []id.WidgetID{wid1, wid2}, + Expected: []WidgetID{wid1, wid2}, }, { Name: "insert a widget id", Input: wid3, Input2: 1, - Expected: []id.WidgetID{wid1, wid3, wid2}, + Expected: []WidgetID{wid1, wid3, wid2}, }, { Name: "nil widget area", @@ -147,7 +146,7 @@ func TestWidgetArea_Add(t *testing.T) { return } - wa := NewWidgetArea([]id.WidgetID{wid1, wid2}, WidgetAlignStart) + wa := NewWidgetArea([]WidgetID{wid1, wid2}, WidgetAlignStart) wa.Add(tc.Input, tc.Input2) assert.Equal(tt, tc.Expected, wa.WidgetIDs()) }) @@ -155,24 +154,24 @@ func TestWidgetArea_Add(t *testing.T) { } func TestWidgetArea_AddAll(t *testing.T) { - wid1 := id.NewWidgetID() - wid2 := id.NewWidgetID() + wid1 := NewWidgetID() + wid2 := NewWidgetID() testCases := []struct { Name string Nil bool - Input []id.WidgetID - Expected []id.WidgetID + Input []WidgetID + Expected []WidgetID }{ { Name: "add widget ids", - Input: []id.WidgetID{wid1, wid2}, - Expected: []id.WidgetID{wid1, wid2}, + Input: []WidgetID{wid1, wid2}, + Expected: []WidgetID{wid1, wid2}, }, { Name: "add widget ids but duplicated", - Input: []id.WidgetID{wid1, wid1, wid2}, - Expected: []id.WidgetID{wid1, wid2}, + Input: []WidgetID{wid1, wid1, wid2}, + Expected: []WidgetID{wid1, wid2}, }, { Name: "nil widget area", @@ -239,22 +238,22 @@ func TestWidgetArea_SetAlignment(t *testing.T) { } func TestWidgetArea_Remove(t *testing.T) { - wid := id.NewWidgetID() + wid := NewWidgetID() testCases := []struct { Name string - Input id.WidgetID - Expected []id.WidgetID + Input WidgetID + Expected []WidgetID Nil bool }{ { Name: "Remove a widget from widget area", Input: wid, - Expected: []id.WidgetID{}, + Expected: []WidgetID{}, }, { Name: "Remove a widget from widget area that doesn't exist", - Input: id.NewWidgetID(), - Expected: []id.WidgetID{wid}, + Input: NewWidgetID(), + Expected: []WidgetID{wid}, }, { Name: "Return nil if no widget area", @@ -270,7 +269,7 @@ func TestWidgetArea_Remove(t *testing.T) { var wa *WidgetArea if !tc.Nil { - wa = NewWidgetArea([]id.WidgetID{wid}, "") + wa = NewWidgetArea([]WidgetID{wid}, "") } wa.Remove(tc.Input) if !tc.Nil { @@ -281,27 +280,27 @@ func TestWidgetArea_Remove(t *testing.T) { } func TestWidgetArea_Move(t *testing.T) { - wid := id.NewWidgetID() - wid2 := id.NewWidgetID() - wid3 := id.NewWidgetID() + wid := NewWidgetID() + wid2 := NewWidgetID() + wid3 := NewWidgetID() testCases := []struct { Name string Input1, Input2 int - Expected []id.WidgetID + Expected []WidgetID Nil bool }{ { Name: "Move widget Id", Input1: 1, Input2: 2, - Expected: []id.WidgetID{wid, wid3, wid2}, + Expected: []WidgetID{wid, wid3, wid2}, }, { Name: "Move widget Id", Input1: 2, Input2: 0, - Expected: []id.WidgetID{wid3, wid, wid2}, + Expected: []WidgetID{wid3, wid, wid2}, }, { Name: "Nil", @@ -316,7 +315,7 @@ func TestWidgetArea_Move(t *testing.T) { var wa *WidgetArea if !tc.Nil { - wa = NewWidgetArea([]id.WidgetID{wid, wid2, wid3}, "") + wa = NewWidgetArea([]WidgetID{wid, wid2, wid3}, "") } wa.Move(tc.Input1, tc.Input2) if !tc.Nil { diff --git a/pkg/scene/widget_section.go b/pkg/scene/widget_section.go index 2fbcdf489..5a602e150 100644 --- a/pkg/scene/widget_section.go +++ b/pkg/scene/widget_section.go @@ -1,7 +1,5 @@ package scene -import "github.com/reearth/reearth-backend/pkg/id" - // WidgetSection is the structure of each section of the align system. type WidgetSection struct { top *WidgetArea @@ -46,7 +44,7 @@ func (s *WidgetSection) Area(t WidgetAreaType) *WidgetArea { return nil } -func (s *WidgetSection) Find(wid id.WidgetID) (int, WidgetAreaType) { +func (s *WidgetSection) Find(wid WidgetID) (int, WidgetAreaType) { if s == nil { return -1, "" } @@ -63,7 +61,7 @@ func (s *WidgetSection) Find(wid id.WidgetID) (int, WidgetAreaType) { return -1, "" } -func (s *WidgetSection) Remove(wid id.WidgetID) { +func (s *WidgetSection) Remove(wid WidgetID) { if s == nil { return } diff --git a/pkg/scene/widget_section_test.go b/pkg/scene/widget_section_test.go index 9346a63da..e0a35c5df 100644 --- a/pkg/scene/widget_section_test.go +++ b/pkg/scene/widget_section_test.go @@ -3,7 +3,6 @@ package scene import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -22,17 +21,17 @@ func TestWidgetSection_Area(t *testing.T) { } func TestWidgetSection_Find(t *testing.T) { - wid1 := id.NewWidgetID() - wid2 := id.NewWidgetID() - wid3 := id.NewWidgetID() - wid4 := id.NewWidgetID() - wid5 := id.NewWidgetID() - wid6 := id.NewWidgetID() - wid7 := id.NewWidgetID() + wid1 := NewWidgetID() + wid2 := NewWidgetID() + wid3 := NewWidgetID() + wid4 := NewWidgetID() + wid5 := NewWidgetID() + wid6 := NewWidgetID() + wid7 := NewWidgetID() testCases := []struct { Name string - Input id.WidgetID + Input WidgetID Expected1 int Expected2 WidgetAreaType Nil bool @@ -57,7 +56,7 @@ func TestWidgetSection_Find(t *testing.T) { }, { Name: "invalid id", - Input: id.NewWidgetID(), + Input: NewWidgetID(), Expected1: -1, Expected2: "", }, @@ -83,9 +82,9 @@ func TestWidgetSection_Find(t *testing.T) { } ws := NewWidgetSection() - ws.Area(WidgetAreaTop).AddAll([]id.WidgetID{wid1, wid2, wid3}) - ws.Area(WidgetAreaMiddle).AddAll([]id.WidgetID{wid4, wid5}) - ws.Area(WidgetAreaBottom).AddAll([]id.WidgetID{wid6, wid7}) + ws.Area(WidgetAreaTop).AddAll([]WidgetID{wid1, wid2, wid3}) + ws.Area(WidgetAreaMiddle).AddAll([]WidgetID{wid4, wid5}) + ws.Area(WidgetAreaBottom).AddAll([]WidgetID{wid6, wid7}) index, area := ws.Find(tc.Input) assert.Equal(tt, tc.Expected1, index) @@ -95,50 +94,50 @@ func TestWidgetSection_Find(t *testing.T) { } func TestWidgetSection_Remove(t *testing.T) { - wid := id.NewWidgetID() + wid := NewWidgetID() testCases := []struct { Name string Area WidgetAreaType - Input id.WidgetID - Expected []id.WidgetID + Input WidgetID + Expected []WidgetID Nil bool }{ { Name: "top: remove a widget from widget section", Area: WidgetAreaTop, Input: wid, - Expected: []id.WidgetID{}, + Expected: []WidgetID{}, }, { Name: "top: couldn't find widgetId", Area: WidgetAreaTop, - Input: id.NewWidgetID(), - Expected: []id.WidgetID{wid}, + Input: NewWidgetID(), + Expected: []WidgetID{wid}, }, { Name: "middle: remove a widget from widget section", Area: WidgetAreaMiddle, Input: wid, - Expected: []id.WidgetID{}, + Expected: []WidgetID{}, }, { Name: "middle: couldn't find widgetId", Area: WidgetAreaMiddle, - Input: id.NewWidgetID(), - Expected: []id.WidgetID{wid}, + Input: NewWidgetID(), + Expected: []WidgetID{wid}, }, { Name: "bottom: remove a widget from widget section", Area: WidgetAreaBottom, Input: wid, - Expected: []id.WidgetID{}, + Expected: []WidgetID{}, }, { Name: "bottom: couldn't find widgetId", Area: WidgetAreaBottom, - Input: id.NewWidgetID(), - Expected: []id.WidgetID{wid}, + Input: NewWidgetID(), + Expected: []WidgetID{wid}, }, { Name: "nil", diff --git a/pkg/scene/widget_system.go b/pkg/scene/widget_system.go index 87a9aa246..4ea6fa773 100644 --- a/pkg/scene/widget_system.go +++ b/pkg/scene/widget_system.go @@ -2,8 +2,6 @@ package scene import ( "errors" - - "github.com/reearth/reearth-backend/pkg/id" ) var ( @@ -45,7 +43,7 @@ func (w *WidgetSystem) Widgets() []*Widget { return append([]*Widget{}, w.widgets...) } -func (w *WidgetSystem) Widget(wid id.WidgetID) *Widget { +func (w *WidgetSystem) Widget(wid WidgetID) *Widget { if w == nil { return nil } @@ -57,7 +55,7 @@ func (w *WidgetSystem) Widget(wid id.WidgetID) *Widget { return nil } -func (w *WidgetSystem) Has(wid id.WidgetID) bool { +func (w *WidgetSystem) Has(wid WidgetID) bool { if w == nil { return false } @@ -77,7 +75,7 @@ func (w *WidgetSystem) Add(sw *Widget) { w.widgets = append(w.widgets, &sw2) } -func (w *WidgetSystem) Remove(wid id.WidgetID) { +func (w *WidgetSystem) Remove(wid WidgetID) { if w == nil { return } @@ -89,7 +87,7 @@ func (w *WidgetSystem) Remove(wid id.WidgetID) { } } -func (w *WidgetSystem) RemoveAllByPlugin(p id.PluginID) (res []id.PropertyID) { +func (w *WidgetSystem) RemoveAllByPlugin(p PluginID) (res []PropertyID) { if w == nil { return nil } @@ -103,7 +101,7 @@ func (w *WidgetSystem) RemoveAllByPlugin(p id.PluginID) (res []id.PropertyID) { return res } -func (w *WidgetSystem) RemoveAllByExtension(p id.PluginID, e id.PluginExtensionID) (res []id.PropertyID) { +func (w *WidgetSystem) RemoveAllByExtension(p PluginID, e PluginExtensionID) (res []PropertyID) { if w == nil { return nil } @@ -117,7 +115,7 @@ func (w *WidgetSystem) RemoveAllByExtension(p id.PluginID, e id.PluginExtensionI return res } -func (w *WidgetSystem) ReplacePlugin(oldp, newp id.PluginID) { +func (w *WidgetSystem) ReplacePlugin(oldp, newp PluginID) { if w == nil || w.widgets == nil { return } @@ -128,11 +126,11 @@ func (w *WidgetSystem) ReplacePlugin(oldp, newp id.PluginID) { } } -func (w *WidgetSystem) Properties() []id.PropertyID { +func (w *WidgetSystem) Properties() []PropertyID { if w == nil { return nil } - res := make([]id.PropertyID, 0, len(w.widgets)) + res := make([]PropertyID, 0, len(w.widgets)) for _, ww := range w.widgets { res = append(res, ww.property) } diff --git a/pkg/scene/widget_system_test.go b/pkg/scene/widget_system_test.go index d28772cb5..cf385dec4 100644 --- a/pkg/scene/widget_system_test.go +++ b/pkg/scene/widget_system_test.go @@ -3,14 +3,13 @@ package scene import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestNewWidgetSystem(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID() - wid := id.NewWidgetID() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() testCases := []struct { Name string Input []*Widget @@ -57,9 +56,9 @@ func TestNewWidgetSystem(t *testing.T) { } func TestWidgetSystem_Add(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID() - wid := id.NewWidgetID() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() testCases := []struct { Name string Widgets []*Widget @@ -105,15 +104,15 @@ func TestWidgetSystem_Add(t *testing.T) { } func TestWidgetSystem_Remove(t *testing.T) { - wid := id.NewWidgetID() - wid2 := id.NewWidgetID() - pid := id.MustPluginID("xxx~1.1.1") - pid2 := id.MustPluginID("xxx~1.1.2") - pr := id.NewPropertyID() + wid := NewWidgetID() + wid2 := NewWidgetID() + pid := MustPluginID("xxx~1.1.1") + pid2 := MustPluginID("xxx~1.1.2") + pr := NewPropertyID() testCases := []struct { Name string - Input id.WidgetID + Input WidgetID Nil bool }{ { @@ -145,24 +144,24 @@ func TestWidgetSystem_Remove(t *testing.T) { } func TestWidgetSystem_RemoveAllByPlugin(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pid2 := id.MustPluginID("xxx~1.1.2") - w1 := MustNewWidget(id.NewWidgetID(), pid, "e1", id.NewPropertyID(), true, false) - w2 := MustNewWidget(id.NewWidgetID(), pid, "e2", id.NewPropertyID(), true, false) - w3 := MustNewWidget(id.NewWidgetID(), pid2, "e1", id.NewPropertyID(), true, false) + pid := MustPluginID("xxx~1.1.1") + pid2 := MustPluginID("xxx~1.1.2") + w1 := MustNewWidget(NewWidgetID(), pid, "e1", NewPropertyID(), true, false) + w2 := MustNewWidget(NewWidgetID(), pid, "e2", NewPropertyID(), true, false) + w3 := MustNewWidget(NewWidgetID(), pid2, "e1", NewPropertyID(), true, false) testCases := []struct { Name string - PID id.PluginID + PID PluginID WS, Expected *WidgetSystem - ExpectedResult []id.PropertyID + ExpectedResult []PropertyID }{ { Name: "remove widgets", PID: pid, WS: NewWidgetSystem([]*Widget{w1, w2, w3}), Expected: NewWidgetSystem([]*Widget{w3}), - ExpectedResult: []id.PropertyID{w1.Property(), w2.Property()}, + ExpectedResult: []PropertyID{w1.Property(), w2.Property()}, }, { Name: "remove from nil widgetSystem", @@ -182,32 +181,32 @@ func TestWidgetSystem_RemoveAllByPlugin(t *testing.T) { } func TestWidgetSystem_RemoveAllByExtension(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pid2 := id.MustPluginID("xxx~1.1.2") - w1 := MustNewWidget(id.NewWidgetID(), pid, "e1", id.NewPropertyID(), true, false) - w2 := MustNewWidget(id.NewWidgetID(), pid, "e2", id.NewPropertyID(), true, false) - w3 := MustNewWidget(id.NewWidgetID(), pid, "e1", id.NewPropertyID(), true, false) - w4 := MustNewWidget(id.NewWidgetID(), pid2, "e1", id.NewPropertyID(), true, false) + pid := MustPluginID("xxx~1.1.1") + pid2 := MustPluginID("xxx~1.1.2") + w1 := MustNewWidget(NewWidgetID(), pid, "e1", NewPropertyID(), true, false) + w2 := MustNewWidget(NewWidgetID(), pid, "e2", NewPropertyID(), true, false) + w3 := MustNewWidget(NewWidgetID(), pid, "e1", NewPropertyID(), true, false) + w4 := MustNewWidget(NewWidgetID(), pid2, "e1", NewPropertyID(), true, false) testCases := []struct { Name string - PID id.PluginID - EID id.PluginExtensionID + PID PluginID + EID PluginExtensionID WS, Expected *WidgetSystem - ExpectedResult []id.PropertyID + ExpectedResult []PropertyID }{ { Name: "remove widgets", PID: pid, - EID: id.PluginExtensionID("e1"), + EID: PluginExtensionID("e1"), WS: NewWidgetSystem([]*Widget{w1, w2, w3, w4}), Expected: NewWidgetSystem([]*Widget{w2, w4}), - ExpectedResult: []id.PropertyID{w1.Property(), w3.Property()}, + ExpectedResult: []PropertyID{w1.Property(), w3.Property()}, }, { Name: "remove widgets from nil widget system", PID: pid, - EID: id.PluginExtensionID("e1"), + EID: PluginExtensionID("e1"), WS: nil, Expected: nil, ExpectedResult: nil, @@ -224,13 +223,13 @@ func TestWidgetSystem_RemoveAllByExtension(t *testing.T) { } func TestWidgetSystem_ReplacePlugin(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pid2 := id.MustPluginID("zzz~1.1.1") - pr := id.NewPropertyID() - wid := id.NewWidgetID() + pid := MustPluginID("xxx~1.1.1") + pid2 := MustPluginID("zzz~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() testCases := []struct { Name string - PID, NewID id.PluginID + PID, NewID PluginID WS, Expected *WidgetSystem }{ { @@ -263,15 +262,15 @@ func TestWidgetSystem_ReplacePlugin(t *testing.T) { } func TestWidgetSystem_Properties(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID() - pr2 := id.NewPropertyID() - wid := id.NewWidgetID() - wid2 := id.NewWidgetID() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + pr2 := NewPropertyID() + wid := NewWidgetID() + wid2 := NewWidgetID() testCases := []struct { Name string WS *WidgetSystem - Expected []id.PropertyID + Expected []PropertyID }{ { Name: "get properties", @@ -279,7 +278,7 @@ func TestWidgetSystem_Properties(t *testing.T) { MustNewWidget(wid, pid, "eee", pr, true, false), MustNewWidget(wid2, pid, "eee", pr2, true, false), }), - Expected: []id.PropertyID{pr, pr2}, + Expected: []PropertyID{pr, pr2}, }, { Name: "get properties from nil widgetSystem", @@ -298,11 +297,11 @@ func TestWidgetSystem_Properties(t *testing.T) { } func TestWidgetSystem_Widgets(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID() - pr2 := id.NewPropertyID() - wid := id.NewWidgetID() - wid2 := id.NewWidgetID() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + pr2 := NewPropertyID() + wid := NewWidgetID() + wid2 := NewWidgetID() testCases := []struct { Name string WS *WidgetSystem @@ -336,12 +335,12 @@ func TestWidgetSystem_Widgets(t *testing.T) { } func TestWidgetSystem_Widget(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID() - wid := id.NewWidgetID() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() testCases := []struct { Name string - ID id.WidgetID + ID WidgetID WS *WidgetSystem Expected *Widget }{ @@ -375,12 +374,12 @@ func TestWidgetSystem_Widget(t *testing.T) { } func TestWidgetSystem_Has(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID() - wid := id.NewWidgetID() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() testCases := []struct { Name string - ID id.WidgetID + ID WidgetID WS *WidgetSystem Expected bool }{ diff --git a/pkg/scene/widget_test.go b/pkg/scene/widget_test.go index aa34d4c03..d376d1914 100644 --- a/pkg/scene/widget_test.go +++ b/pkg/scene/widget_test.go @@ -3,20 +3,19 @@ package scene import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestNewWidget(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID() - wid := id.NewWidgetID() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() testCases := []struct { Name string - ID id.WidgetID - Plugin id.PluginID - Extension id.PluginExtensionID - Property id.PropertyID + ID WidgetID + Plugin PluginID + Extension PluginExtensionID + Property PropertyID Enabled bool Extended bool Err error @@ -39,7 +38,7 @@ func TestNewWidget(t *testing.T) { Property: pr, Enabled: true, Extended: false, - Err: id.ErrInvalidID, + Err: ErrInvalidID, }, } @@ -63,15 +62,15 @@ func TestNewWidget(t *testing.T) { } func TestMustNewWidget(t *testing.T) { - pid := id.MustPluginID("xxx~1.1.1") - pr := id.NewPropertyID() - wid := id.NewWidgetID() + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID() + wid := NewWidgetID() testCases := []struct { Name string - ID id.WidgetID - Plugin id.PluginID - Extension id.PluginExtensionID - Property id.PropertyID + ID WidgetID + Plugin PluginID + Extension PluginExtensionID + Property PropertyID Enabled bool Extended bool Err error @@ -94,7 +93,7 @@ func TestMustNewWidget(t *testing.T) { Property: pr, Enabled: true, Extended: false, - Err: id.ErrInvalidID, + Err: ErrInvalidID, }, } @@ -121,13 +120,13 @@ func TestMustNewWidget(t *testing.T) { } func TestWidget_SetEnabled(t *testing.T) { - res := MustNewWidget(id.NewWidgetID(), id.MustPluginID("xxx~1.1.1"), "eee", id.NewPropertyID(), false, false) + res := MustNewWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", NewPropertyID(), false, false) res.SetEnabled(true) assert.True(t, res.Enabled()) } func TestWidget_SetExtended(t *testing.T) { - res := MustNewWidget(id.NewWidgetID(), id.MustPluginID("xxx~1.1.1"), "eee", id.NewPropertyID(), false, false) + res := MustNewWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", NewPropertyID(), false, false) res.SetExtended(true) assert.True(t, res.Extended()) } diff --git a/pkg/scene/widget_zone.go b/pkg/scene/widget_zone.go index cbce3ded5..a7936a9e5 100644 --- a/pkg/scene/widget_zone.go +++ b/pkg/scene/widget_zone.go @@ -1,7 +1,5 @@ package scene -import "github.com/reearth/reearth-backend/pkg/id" - // WidgetZone is the structure of each layer (inner and outer) of the align system. type WidgetZone struct { left *WidgetSection @@ -42,7 +40,7 @@ func (wz *WidgetZone) Section(s WidgetSectionType) *WidgetSection { return nil } -func (z *WidgetZone) Remove(wid id.WidgetID) { +func (z *WidgetZone) Remove(wid WidgetID) { if z == nil { return } @@ -52,7 +50,7 @@ func (z *WidgetZone) Remove(wid id.WidgetID) { z.right.Remove(wid) } -func (z *WidgetZone) Find(wid id.WidgetID) (int, WidgetSectionType, WidgetAreaType) { +func (z *WidgetZone) Find(wid WidgetID) (int, WidgetSectionType, WidgetAreaType) { if z == nil { return -1, "", "" } diff --git a/pkg/scene/widget_zone_test.go b/pkg/scene/widget_zone_test.go index 40e21d5b4..f28f0d67f 100644 --- a/pkg/scene/widget_zone_test.go +++ b/pkg/scene/widget_zone_test.go @@ -3,7 +3,6 @@ package scene import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -22,17 +21,17 @@ func TestWidgetZone_Section(t *testing.T) { } func TestWidgetZone_Find(t *testing.T) { - wid1 := id.NewWidgetID() - wid2 := id.NewWidgetID() - wid3 := id.NewWidgetID() - wid4 := id.NewWidgetID() - wid5 := id.NewWidgetID() - wid6 := id.NewWidgetID() - wid7 := id.NewWidgetID() + wid1 := NewWidgetID() + wid2 := NewWidgetID() + wid3 := NewWidgetID() + wid4 := NewWidgetID() + wid5 := NewWidgetID() + wid6 := NewWidgetID() + wid7 := NewWidgetID() testCases := []struct { Name string - Input id.WidgetID + Input WidgetID Expected1 int Expected2 WidgetSectionType Expected3 WidgetAreaType @@ -61,7 +60,7 @@ func TestWidgetZone_Find(t *testing.T) { }, { Name: "invalid id", - Input: id.NewWidgetID(), + Input: NewWidgetID(), Expected1: -1, Expected2: "", Expected3: "", @@ -90,9 +89,9 @@ func TestWidgetZone_Find(t *testing.T) { } ez := NewWidgetZone() - ez.Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]id.WidgetID{wid1, wid2, wid3}) - ez.Section(WidgetSectionCenter).Area(WidgetAreaTop).AddAll([]id.WidgetID{wid4, wid5}) - ez.Section(WidgetSectionRight).Area(WidgetAreaTop).AddAll([]id.WidgetID{wid6, wid7}) + ez.Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]WidgetID{wid1, wid2, wid3}) + ez.Section(WidgetSectionCenter).Area(WidgetAreaTop).AddAll([]WidgetID{wid4, wid5}) + ez.Section(WidgetSectionRight).Area(WidgetAreaTop).AddAll([]WidgetID{wid6, wid7}) index, section, area := ez.Find(tc.Input) assert.Equal(tt, tc.Expected1, index) @@ -103,50 +102,50 @@ func TestWidgetZone_Find(t *testing.T) { } func TestWidgetZone_Remove(t *testing.T) { - wid := id.NewWidgetID() + wid := NewWidgetID() testCases := []struct { Name string Section WidgetSectionType - Input id.WidgetID - Expected []id.WidgetID + Input WidgetID + Expected []WidgetID Nil bool }{ { Name: "left: remove a widget from widget section", Section: WidgetSectionLeft, Input: wid, - Expected: []id.WidgetID{}, + Expected: []WidgetID{}, }, { Name: "left: couldn't find widgetId", Section: WidgetSectionLeft, - Input: id.NewWidgetID(), - Expected: []id.WidgetID{wid}, + Input: NewWidgetID(), + Expected: []WidgetID{wid}, }, { Name: "center: remove a widget from widget section", Section: WidgetSectionCenter, Input: wid, - Expected: []id.WidgetID{}, + Expected: []WidgetID{}, }, { Name: "center: couldn't find widgetId", Section: WidgetSectionCenter, - Input: id.NewWidgetID(), - Expected: []id.WidgetID{wid}, + Input: NewWidgetID(), + Expected: []WidgetID{wid}, }, { Name: "right: remove a widget from widget section", Section: WidgetSectionRight, Input: wid, - Expected: []id.WidgetID{}, + Expected: []WidgetID{}, }, { Name: "right: couldn't find widgetId", Section: WidgetSectionRight, - Input: id.NewWidgetID(), - Expected: []id.WidgetID{wid}, + Input: NewWidgetID(), + Expected: []WidgetID{wid}, }, { Name: "nil", diff --git a/pkg/tag/group_builder.go b/pkg/tag/group_builder.go index 76bce1200..4774ca48f 100644 --- a/pkg/tag/group_builder.go +++ b/pkg/tag/group_builder.go @@ -1,9 +1,5 @@ package tag -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - type GroupBuilder struct { g *Group } @@ -21,10 +17,10 @@ func GroupFrom(t Tag) *Group { } func (b *GroupBuilder) Build() (*Group, error) { - if id.ID(b.g.id).IsNil() { - return nil, id.ErrInvalidID + if b.g.id.IsNil() { + return nil, ErrInvalidID } - if id.ID(b.g.sceneId).IsNil() { + if b.g.sceneId.IsNil() { return nil, ErrInvalidSceneID } if b.g.label == "" { @@ -34,13 +30,13 @@ func (b *GroupBuilder) Build() (*Group, error) { return b.g, nil } -func (b *GroupBuilder) ID(tid id.TagID) *GroupBuilder { +func (b *GroupBuilder) ID(tid ID) *GroupBuilder { b.g.id = tid return b } func (b *GroupBuilder) NewID() *GroupBuilder { - b.g.id = id.NewTagID() + b.g.id = NewID() return b } @@ -49,7 +45,7 @@ func (b *GroupBuilder) Label(l string) *GroupBuilder { return b } -func (b *GroupBuilder) Scene(sid id.SceneID) *GroupBuilder { +func (b *GroupBuilder) Scene(sid SceneID) *GroupBuilder { b.g.sceneId = sid return b } diff --git a/pkg/tag/group_test.go b/pkg/tag/group_test.go index 160246ada..cda1ad637 100644 --- a/pkg/tag/group_test.go +++ b/pkg/tag/group_test.go @@ -3,7 +3,6 @@ package tag import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -11,20 +10,20 @@ var _ Tag = &Group{} func TestGroupBuilder_NewID(t *testing.T) { b := NewGroup().NewID() - assert.NotEqual(t, id.TagID{}, b.g.id) + assert.NotEqual(t, ID{}, b.g.id) } func TestGroupBuilder_Build(t *testing.T) { - tid := id.NewTagID() - sid := id.NewSceneID() - tags := []id.TagID{ - id.NewTagID(), - id.NewTagID(), + tid := NewID() + sid := NewSceneID() + tags := []ID{ + NewID(), + NewID(), } testCases := []struct { Name, Label string - Id id.TagID - Scene id.SceneID + Id ID + Scene SceneID Tags *List Expected struct { Group Group @@ -34,18 +33,18 @@ func TestGroupBuilder_Build(t *testing.T) { { Name: "fail: nil tag ID", Label: "xxx", - Scene: id.NewSceneID(), + Scene: NewSceneID(), Expected: struct { Group Group Error error }{ - Error: id.ErrInvalidID, + Error: ErrInvalidID, }, }, { Name: "fail: empty label", - Id: id.NewTagID(), - Scene: id.NewSceneID(), + Id: NewID(), + Scene: NewSceneID(), Expected: struct { Group Group Error error @@ -56,7 +55,7 @@ func TestGroupBuilder_Build(t *testing.T) { { Name: "fail: nil scene ID", Label: "xxx", - Id: id.NewTagID(), + Id: NewID(), Expected: struct { Group Group Error error diff --git a/pkg/tag/id.go b/pkg/tag/id.go new file mode 100644 index 000000000..2b0e5d43c --- /dev/null +++ b/pkg/tag/id.go @@ -0,0 +1,41 @@ +package tag + +import "github.com/reearth/reearth-backend/pkg/id" + +type ID = id.TagID +type SceneID = id.SceneID +type DatasetID = id.DatasetID +type DatasetSchemaID = id.DatasetSchemaID +type DatasetFieldID = id.DatasetSchemaFieldID + +var NewID = id.NewTagID +var NewSceneID = id.NewSceneID +var NewDatasetID = id.NewDatasetID +var NewDatasetSchemaID = id.NewDatasetSchemaID +var NewDatasetFieldID = id.NewDatasetSchemaFieldID + +var MustID = id.MustTagID +var MustSceneID = id.MustSceneID +var MustDatasetID = id.MustDatasetID +var MustDatasetSchemaID = id.MustDatasetSchemaID +var MustDatasetFieldID = id.MustDatasetSchemaFieldID + +var IDFrom = id.TagIDFrom +var SceneIDFrom = id.SceneIDFrom +var DatasetIDFrom = id.DatasetIDFrom +var DatasetSchemaIDFrom = id.DatasetSchemaIDFrom +var DatasetFieldIDFrom = id.DatasetSchemaFieldIDFrom + +var IDFromRef = id.TagIDFromRef +var SceneIDFromRef = id.SceneIDFromRef +var DatasetIDFromRef = id.DatasetIDFromRef +var DatasetSchemaIDFromRef = id.DatasetSchemaIDFromRef +var DatasetFieldIDFromRef = id.DatasetSchemaFieldIDFromRef + +var IDFromRefID = id.TagIDFromRefID +var SceneIDFromRefID = id.SceneIDFromRefID +var DatasetIDFromRefID = id.DatasetIDFromRefID +var DatasetSchemaIDFromRefID = id.DatasetSchemaIDFromRefID +var DatasetFieldIDFromRefID = id.DatasetSchemaFieldIDFromRefID + +var ErrInvalidID = id.ErrInvalidID diff --git a/pkg/tag/item.go b/pkg/tag/item.go index efefa0274..34ad6b091 100644 --- a/pkg/tag/item.go +++ b/pkg/tag/item.go @@ -1,32 +1,30 @@ package tag -import "github.com/reearth/reearth-backend/pkg/id" - type Item struct { tag - parent *id.TagID - linkedDatasetFieldID *id.DatasetSchemaFieldID - linkedDatasetID *id.DatasetID - linkedDatasetSchemaID *id.DatasetSchemaID + parent *ID + linkedDatasetFieldID *DatasetFieldID + linkedDatasetID *DatasetID + linkedDatasetSchemaID *DatasetSchemaID } -func (i *Item) Parent() *id.TagID { +func (i *Item) Parent() *ID { return i.parent.CopyRef() } -func (i *Item) LinkedDatasetFieldID() *id.DatasetSchemaFieldID { +func (i *Item) LinkedDatasetFieldID() *DatasetFieldID { return i.linkedDatasetFieldID.CopyRef() } -func (i *Item) LinkedDatasetID() *id.DatasetID { +func (i *Item) LinkedDatasetID() *DatasetID { return i.linkedDatasetID.CopyRef() } -func (i *Item) LinkedDatasetSchemaID() *id.DatasetSchemaID { +func (i *Item) LinkedDatasetSchemaID() *DatasetSchemaID { return i.linkedDatasetSchemaID.CopyRef() } -func (i *Item) SetParent(p *id.TagID) { +func (i *Item) SetParent(p *ID) { if i == nil { return } diff --git a/pkg/tag/item_builder.go b/pkg/tag/item_builder.go index 4d2cb79a1..ed2a048a9 100644 --- a/pkg/tag/item_builder.go +++ b/pkg/tag/item_builder.go @@ -1,7 +1,5 @@ package tag -import "github.com/reearth/reearth-backend/pkg/id" - type ItemBuilder struct { i *Item } @@ -19,10 +17,10 @@ func ItemFrom(t Tag) *Item { } func (b *ItemBuilder) Build() (*Item, error) { - if id.ID(b.i.id).IsNil() { - return nil, id.ErrInvalidID + if b.i.id.IsNil() { + return nil, ErrInvalidID } - if id.ID(b.i.sceneId).IsNil() { + if b.i.sceneId.IsNil() { return nil, ErrInvalidSceneID } if b.i.label == "" { @@ -31,13 +29,13 @@ func (b *ItemBuilder) Build() (*Item, error) { return b.i, nil } -func (b *ItemBuilder) ID(tid id.TagID) *ItemBuilder { +func (b *ItemBuilder) ID(tid ID) *ItemBuilder { b.i.id = tid return b } func (b *ItemBuilder) NewID() *ItemBuilder { - b.i.id = id.NewTagID() + b.i.id = NewID() return b } @@ -46,27 +44,27 @@ func (b *ItemBuilder) Label(l string) *ItemBuilder { return b } -func (b *ItemBuilder) Scene(sid id.SceneID) *ItemBuilder { +func (b *ItemBuilder) Scene(sid SceneID) *ItemBuilder { b.i.sceneId = sid return b } -func (b *ItemBuilder) Parent(p *id.TagID) *ItemBuilder { +func (b *ItemBuilder) Parent(p *ID) *ItemBuilder { b.i.parent = p.CopyRef() return b } -func (b *ItemBuilder) LinkedDatasetFieldID(dfid *id.DatasetSchemaFieldID) *ItemBuilder { +func (b *ItemBuilder) LinkedDatasetFieldID(dfid *DatasetFieldID) *ItemBuilder { b.i.linkedDatasetFieldID = dfid return b } -func (b *ItemBuilder) LinkedDatasetID(did *id.DatasetID) *ItemBuilder { +func (b *ItemBuilder) LinkedDatasetID(did *DatasetID) *ItemBuilder { b.i.linkedDatasetID = did return b } -func (b *ItemBuilder) LinkedDatasetSchemaID(dsid *id.DatasetSchemaID) *ItemBuilder { +func (b *ItemBuilder) LinkedDatasetSchemaID(dsid *DatasetSchemaID) *ItemBuilder { b.i.linkedDatasetSchemaID = dsid return b } diff --git a/pkg/tag/item_test.go b/pkg/tag/item_test.go index a2ab775c1..886f97e2b 100644 --- a/pkg/tag/item_test.go +++ b/pkg/tag/item_test.go @@ -3,7 +3,6 @@ package tag import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -11,22 +10,22 @@ var _ Tag = &Item{} func TestItemBuilder_NewID(t *testing.T) { b := NewItem().NewID() - assert.NotEqual(t, id.TagID{}, b.i.id) + assert.NotEqual(t, ID{}, b.i.id) } func TestItemBuilder_Build(t *testing.T) { - tid := id.NewTagID() - sid := id.NewSceneID() - dfid := id.NewDatasetSchemaFieldID() - did := id.NewDatasetID() - dsid := id.NewDatasetSchemaID() + tid := NewID() + sid := NewSceneID() + dfid := NewDatasetFieldID() + did := NewDatasetID() + dsid := NewDatasetSchemaID() testCases := []struct { Name, Label string - Id id.TagID - Scene id.SceneID - LinkedDatasetFieldID *id.DatasetSchemaFieldID - LinkedDatasetID *id.DatasetID - LinkedDatasetSchemaID *id.DatasetSchemaID + Id ID + Scene SceneID + LinkedDatasetFieldID *DatasetFieldID + LinkedDatasetID *DatasetID + LinkedDatasetSchemaID *DatasetSchemaID Expected struct { Item Item Error error @@ -35,18 +34,18 @@ func TestItemBuilder_Build(t *testing.T) { { Name: "fail: nil tag ID", Label: "xxx", - Scene: id.NewSceneID(), + Scene: NewSceneID(), Expected: struct { Item Item Error error }{ - Error: id.ErrInvalidID, + Error: ErrInvalidID, }, }, { Name: "fail: empty label", - Id: id.NewTagID(), - Scene: id.NewSceneID(), + Id: NewID(), + Scene: NewSceneID(), Expected: struct { Item Item Error error @@ -57,7 +56,7 @@ func TestItemBuilder_Build(t *testing.T) { { Name: "fail: nil scene ID", Label: "xxx", - Id: id.NewTagID(), + Id: NewID(), Expected: struct { Item Item Error error diff --git a/pkg/tag/list.go b/pkg/tag/list.go index b0d0779b9..320b4de20 100644 --- a/pkg/tag/list.go +++ b/pkg/tag/list.go @@ -1,27 +1,25 @@ package tag -import "github.com/reearth/reearth-backend/pkg/id" - type List struct { - tags []id.TagID + tags []ID } func NewList() *List { - return &List{tags: []id.TagID{}} + return &List{tags: []ID{}} } -func NewListFromTags(tags []id.TagID) *List { +func NewListFromTags(tags []ID) *List { return &List{tags: tags} } -func (tl *List) Tags() []id.TagID { +func (tl *List) Tags() []ID { if tl == nil || tl.tags == nil { return nil } - return append([]id.TagID{}, tl.tags...) + return append([]ID{}, tl.tags...) } -func (tl *List) Has(tid id.TagID) bool { +func (tl *List) Has(tid ID) bool { if tl == nil || tl.tags == nil { return false } @@ -33,14 +31,14 @@ func (tl *List) Has(tid id.TagID) bool { return false } -func (tl *List) Add(tags ...id.TagID) { +func (tl *List) Add(tags ...ID) { if tl == nil || tl.tags == nil { return } tl.tags = append(tl.tags, tags...) } -func (tl *List) Remove(tags ...id.TagID) { +func (tl *List) Remove(tags ...ID) { if tl == nil || tl.tags == nil { return } diff --git a/pkg/tag/list_test.go b/pkg/tag/list_test.go index c8e9f55dd..8e2e4325d 100644 --- a/pkg/tag/list_test.go +++ b/pkg/tag/list_test.go @@ -3,25 +3,24 @@ package tag import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestList_Add(t *testing.T) { - tid := id.NewTagID() + tid := NewID() var tl *List tl.Add(tid) assert.Nil(t, tl.Tags()) tl = NewList() tl.Add(tid) - expected := []id.TagID{tid} + expected := []ID{tid} assert.Equal(t, expected, tl.Tags()) } func TestList_Remove(t *testing.T) { - tid := id.NewTagID() - tid2 := id.NewTagID() - tags := []id.TagID{ + tid := NewID() + tid2 := NewID() + tags := []ID{ tid, tid2, } @@ -30,20 +29,20 @@ func TestList_Remove(t *testing.T) { assert.Nil(t, tl.Tags()) tl = NewListFromTags(tags) tl.Remove(tid2) - expected := []id.TagID{tid} + expected := []ID{tid} assert.Equal(t, expected, tl.Tags()) } func TestList_Has(t *testing.T) { - tid1 := id.NewTagID() - tid2 := id.NewTagID() - tags := []id.TagID{ + tid1 := NewID() + tid2 := NewID() + tags := []ID{ tid1, } testCases := []struct { Name string - Tags []id.TagID - TID id.TagID + Tags []ID + TID ID Expected bool }{ { diff --git a/pkg/tag/tag.go b/pkg/tag/tag.go index 797fbd9f5..166c2ea74 100644 --- a/pkg/tag/tag.go +++ b/pkg/tag/tag.go @@ -2,8 +2,6 @@ package tag import ( "errors" - - "github.com/reearth/reearth-backend/pkg/id" ) var ( @@ -12,23 +10,23 @@ var ( ) type tag struct { - id id.TagID + id ID label string - sceneId id.SceneID + sceneId SceneID } type Tag interface { - ID() id.TagID - Scene() id.SceneID + ID() ID + Scene() SceneID Label() string Rename(string) } -func (t *tag) ID() id.TagID { +func (t *tag) ID() ID { return t.id } -func (t *tag) Scene() id.SceneID { +func (t *tag) Scene() SceneID { return t.sceneId } diff --git a/pkg/user/builder.go b/pkg/user/builder.go index 495983d5f..e822fb350 100644 --- a/pkg/user/builder.go +++ b/pkg/user/builder.go @@ -1,7 +1,6 @@ package user import ( - "github.com/reearth/reearth-backend/pkg/id" "golang.org/x/text/language" ) @@ -14,8 +13,8 @@ func New() *Builder { } func (b *Builder) Build() (*User, error) { - if id.ID(b.u.id).IsNil() { - return nil, id.ErrInvalidID + if b.u.id.IsNil() { + return nil, ErrInvalidID } return b.u, nil } @@ -28,13 +27,13 @@ func (b *Builder) MustBuild() *User { return r } -func (b *Builder) ID(id id.UserID) *Builder { +func (b *Builder) ID(id ID) *Builder { b.u.id = id return b } func (b *Builder) NewID() *Builder { - b.u.id = id.UserID(id.New()) + b.u.id = NewID() return b } @@ -48,7 +47,7 @@ func (b *Builder) Email(email string) *Builder { return b } -func (b *Builder) Team(team id.TeamID) *Builder { +func (b *Builder) Team(team TeamID) *Builder { b.u.team = team return b } diff --git a/pkg/user/builder_test.go b/pkg/user/builder_test.go index 01a106e5e..193945109 100644 --- a/pkg/user/builder_test.go +++ b/pkg/user/builder_test.go @@ -4,13 +4,12 @@ import ( "errors" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" "golang.org/x/text/language" ) func TestBuilder_ID(t *testing.T) { - uid := id.NewUserID() + uid := NewID() b := New().ID(uid).MustBuild() assert.Equal(t, uid, b.ID()) } @@ -26,7 +25,7 @@ func TestBuilder_NewID(t *testing.T) { } func TestBuilder_Team(t *testing.T) { - tid := id.NewTeamID() + tid := NewTeamID() b := New().NewID().Team(tid).MustBuild() assert.Equal(t, tid, b.Team()) } @@ -94,12 +93,12 @@ func TestNew(t *testing.T) { } func TestBuilder_Build(t *testing.T) { - uid := id.NewUserID() - tid := id.NewTeamID() + uid := NewID() + tid := NewTeamID() testCases := []struct { Name, UserName, Lang, Email string - UID id.UserID - TID id.TeamID + UID ID + TID TeamID Auths []Auth Expected *User err error @@ -129,7 +128,7 @@ func TestBuilder_Build(t *testing.T) { }, { Name: "failed invalid id", Expected: nil, - err: id.ErrInvalidID, + err: ErrInvalidID, }, } for _, tc := range testCases { @@ -146,12 +145,12 @@ func TestBuilder_Build(t *testing.T) { } func TestBuilder_MustBuild(t *testing.T) { - uid := id.NewUserID() - tid := id.NewTeamID() + uid := NewID() + tid := NewTeamID() testCases := []struct { Name, UserName, Lang, Email string - UID id.UserID - TID id.TeamID + UID ID + TID TeamID Auths []Auth Expected *User err error @@ -181,7 +180,7 @@ func TestBuilder_MustBuild(t *testing.T) { }, { Name: "failed invalid id", Expected: nil, - err: id.ErrInvalidID, + err: ErrInvalidID, }, } for _, tc := range testCases { diff --git a/pkg/user/id.go b/pkg/user/id.go new file mode 100644 index 000000000..a051c0c69 --- /dev/null +++ b/pkg/user/id.go @@ -0,0 +1,23 @@ +package user + +import "github.com/reearth/reearth-backend/pkg/id" + +type ID = id.UserID +type TeamID = id.TeamID + +var NewID = id.NewUserID +var NewTeamID = id.NewTeamID + +var MustID = id.MustUserID +var MustTeamID = id.MustTeamID + +var IDFrom = id.UserIDFrom +var TeamIDFrom = id.TeamIDFrom + +var IDFromRef = id.UserIDFromRef +var TeamIDFromRef = id.TeamIDFromRef + +var IDFromRefID = id.UserIDFromRefID +var TeamIDFromRefID = id.TeamIDFromRefID + +var ErrInvalidID = id.ErrInvalidID diff --git a/pkg/user/initializer.go b/pkg/user/initializer.go index f28c19a10..a50d72297 100644 --- a/pkg/user/initializer.go +++ b/pkg/user/initializer.go @@ -1,7 +1,6 @@ package user import ( - "github.com/reearth/reearth-backend/pkg/id" "golang.org/x/text/language" ) @@ -11,16 +10,16 @@ type InitParams struct { Auth0Sub string Lang *language.Tag Theme *Theme - UserID *id.UserID - TeamID *id.TeamID + UserID *ID + TeamID *TeamID } func Init(p InitParams) (*User, *Team, error) { if p.UserID == nil { - p.UserID = id.NewUserID().Ref() + p.UserID = NewID().Ref() } if p.TeamID == nil { - p.TeamID = id.NewTeamID().Ref() + p.TeamID = NewTeamID().Ref() } if p.Lang == nil { p.Lang = &language.Tag{} @@ -46,7 +45,7 @@ func Init(p InitParams) (*User, *Team, error) { t, err := NewTeam(). ID(*p.TeamID). Name(p.Name). - Members(map[id.UserID]Role{u.ID(): RoleOwner}). + Members(map[ID]Role{u.ID(): RoleOwner}). Personal(true). Build() if err != nil { diff --git a/pkg/user/initializer_test.go b/pkg/user/initializer_test.go index 769b87aba..9ca672635 100644 --- a/pkg/user/initializer_test.go +++ b/pkg/user/initializer_test.go @@ -4,17 +4,16 @@ import ( "errors" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestInit(t *testing.T) { - uid := id.NewUserID() - tid := id.NewTeamID() + uid := NewID() + tid := NewTeamID() testCases := []struct { Name, Email, Username, Sub string - UID *id.UserID - TID *id.TeamID + UID *ID + TID *TeamID ExpectedUser *User ExpectedTeam *Team Err error @@ -36,7 +35,7 @@ func TestInit(t *testing.T) { ExpectedTeam: NewTeam(). ID(tid). Name("nnn"). - Members(map[id.UserID]Role{uid: RoleOwner}). + Members(map[ID]Role{uid: RoleOwner}). Personal(true). MustBuild(), Err: nil, @@ -58,7 +57,7 @@ func TestInit(t *testing.T) { ExpectedTeam: NewTeam(). NewID(). Name("nnn"). - Members(map[id.UserID]Role{uid: RoleOwner}). + Members(map[ID]Role{uid: RoleOwner}). Personal(true). MustBuild(), Err: nil, @@ -80,7 +79,7 @@ func TestInit(t *testing.T) { ExpectedTeam: NewTeam(). ID(tid). Name("nnn"). - Members(map[id.UserID]Role{uid: RoleOwner}). + Members(map[ID]Role{uid: RoleOwner}). Personal(true). MustBuild(), Err: nil, diff --git a/pkg/user/members.go b/pkg/user/members.go index a60d80acc..e42c310e4 100644 --- a/pkg/user/members.go +++ b/pkg/user/members.go @@ -3,8 +3,6 @@ package user import ( "errors" "sort" - - "github.com/reearth/reearth-backend/pkg/id" ) var ( @@ -15,22 +13,22 @@ var ( ) type Members struct { - members map[id.UserID]Role + members map[ID]Role fixed bool } func NewMembers() *Members { - m := &Members{members: map[id.UserID]Role{}} + m := &Members{members: map[ID]Role{}} return m } -func NewFixedMembers(u id.UserID) *Members { - m := &Members{members: map[id.UserID]Role{u: RoleOwner}, fixed: true} +func NewFixedMembers(u ID) *Members { + m := &Members{members: map[ID]Role{u: RoleOwner}, fixed: true} return m } -func NewMembersWith(members map[id.UserID]Role) *Members { - m := &Members{members: map[id.UserID]Role{}} +func NewMembersWith(members map[ID]Role) *Members { + m := &Members{members: map[ID]Role{}} for k, v := range members { m.members[k] = v } @@ -41,15 +39,15 @@ func CopyMembers(members *Members) *Members { return NewMembersWith(members.members) } -func (m *Members) Members() map[id.UserID]Role { - members := make(map[id.UserID]Role) +func (m *Members) Members() map[ID]Role { + members := make(map[ID]Role) for k, v := range m.members { members[k] = v } return members } -func (m *Members) ContainsUser(u id.UserID) bool { +func (m *Members) ContainsUser(u ID) bool { for k := range m.members { if k == u { return true @@ -62,11 +60,11 @@ func (m *Members) Count() int { return len(m.members) } -func (m *Members) GetRole(u id.UserID) Role { +func (m *Members) GetRole(u ID) Role { return m.members[u] } -func (m *Members) UpdateRole(u id.UserID, role Role) error { +func (m *Members) UpdateRole(u ID, role Role) error { if m.fixed { return ErrCannotModifyPersonalTeam } @@ -81,7 +79,7 @@ func (m *Members) UpdateRole(u id.UserID, role Role) error { return nil } -func (m *Members) Join(u id.UserID, role Role) error { +func (m *Members) Join(u ID, role Role) error { if m.fixed { return ErrCannotModifyPersonalTeam } @@ -95,7 +93,7 @@ func (m *Members) Join(u id.UserID, role Role) error { return nil } -func (m *Members) Leave(u id.UserID) error { +func (m *Members) Leave(u ID) error { if m.fixed { return ErrCannotModifyPersonalTeam } @@ -107,8 +105,8 @@ func (m *Members) Leave(u id.UserID) error { return nil } -func (m *Members) UsersByRole(role Role) []id.UserID { - users := make([]id.UserID, 0, len(m.members)) +func (m *Members) UsersByRole(role Role) []ID { + users := make([]ID, 0, len(m.members)) for u, r := range m.members { if r == role { users = append(users, u) @@ -122,6 +120,6 @@ func (m *Members) UsersByRole(role Role) []id.UserID { return users } -func (m *Members) IsOnlyOwner(u id.UserID) bool { +func (m *Members) IsOnlyOwner(u ID) bool { return len(m.UsersByRole(RoleOwner)) == 1 && m.members[u] == RoleOwner } diff --git a/pkg/user/members_test.go b/pkg/user/members_test.go index 6216f90a0..a08902b00 100644 --- a/pkg/user/members_test.go +++ b/pkg/user/members_test.go @@ -4,7 +4,6 @@ import ( "errors" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -15,30 +14,30 @@ func TestNewMembers(t *testing.T) { } func TestNewMembersWith(t *testing.T) { - uid := id.NewUserID() - m := NewMembersWith(map[id.UserID]Role{uid: RoleOwner}) + uid := NewID() + m := NewMembersWith(map[ID]Role{uid: RoleOwner}) assert.NotNil(t, m) - assert.Equal(t, map[id.UserID]Role{uid: RoleOwner}, m.Members()) + assert.Equal(t, map[ID]Role{uid: RoleOwner}, m.Members()) } func TestMembers_ContainsUser(t *testing.T) { - uid1 := id.NewUserID() - uid2 := id.NewUserID() + uid1 := NewID() + uid2 := NewID() testCases := []struct { Name string M *Members - UID id.UserID + UID ID Expected bool }{ { Name: "existing user", - M: NewMembersWith(map[id.UserID]Role{uid1: RoleOwner, uid2: RoleReader}), + M: NewMembersWith(map[ID]Role{uid1: RoleOwner, uid2: RoleReader}), UID: uid1, Expected: true, }, { Name: "not existing user", - M: NewMembersWith(map[id.UserID]Role{uid2: RoleReader}), + M: NewMembersWith(map[ID]Role{uid2: RoleReader}), UID: uid1, Expected: false, }, @@ -53,40 +52,40 @@ func TestMembers_ContainsUser(t *testing.T) { } func TestCopyMembers(t *testing.T) { - uid := id.NewUserID() - m := NewMembersWith(map[id.UserID]Role{uid: RoleOwner}) + uid := NewID() + m := NewMembersWith(map[ID]Role{uid: RoleOwner}) m2 := CopyMembers(m) assert.Equal(t, m, m2) } func TestMembers_Count(t *testing.T) { - m := NewMembersWith(map[id.UserID]Role{id.NewUserID(): RoleOwner}) + m := NewMembersWith(map[ID]Role{NewID(): RoleOwner}) assert.Equal(t, len(m.Members()), m.Count()) } func TestMembers_GetRole(t *testing.T) { - uid := id.NewUserID() - m := NewMembersWith(map[id.UserID]Role{uid: RoleOwner}) + uid := NewID() + m := NewMembersWith(map[ID]Role{uid: RoleOwner}) assert.Equal(t, RoleOwner, m.GetRole(uid)) } func TestMembers_IsOnlyOwner(t *testing.T) { - uid := id.NewUserID() - m := NewMembersWith(map[id.UserID]Role{uid: RoleOwner, id.NewUserID(): RoleReader}) + uid := NewID() + m := NewMembersWith(map[ID]Role{uid: RoleOwner, NewID(): RoleReader}) assert.True(t, m.IsOnlyOwner(uid)) } func TestMembers_Leave(t *testing.T) { - uid := id.NewUserID() + uid := NewID() testCases := []struct { Name string M *Members - UID id.UserID + UID ID err error }{ { Name: "success user left", - M: NewMembersWith(map[id.UserID]Role{uid: RoleWriter, id.NewUserID(): RoleOwner}), + M: NewMembersWith(map[ID]Role{uid: RoleWriter, NewID(): RoleOwner}), UID: uid, err: nil, }, @@ -98,8 +97,8 @@ func TestMembers_Leave(t *testing.T) { }, { Name: "fail user not in the team", - M: NewMembersWith(map[id.UserID]Role{uid: RoleWriter, id.NewUserID(): RoleOwner}), - UID: id.NewUserID(), + M: NewMembersWith(map[ID]Role{uid: RoleWriter, NewID(): RoleOwner}), + UID: NewID(), err: ErrTargetUserNotInTheTeam, }, } @@ -118,23 +117,23 @@ func TestMembers_Leave(t *testing.T) { } func TestMembers_Members(t *testing.T) { - uid := id.NewUserID() - m := NewMembersWith(map[id.UserID]Role{uid: RoleOwner}) - assert.Equal(t, map[id.UserID]Role{uid: RoleOwner}, m.Members()) + uid := NewID() + m := NewMembersWith(map[ID]Role{uid: RoleOwner}) + assert.Equal(t, map[ID]Role{uid: RoleOwner}, m.Members()) } func TestMembers_UpdateRole(t *testing.T) { - uid := id.NewUserID() + uid := NewID() testCases := []struct { Name string M *Members - UID id.UserID + UID ID NewRole, Expected Role err error }{ { Name: "success role updated", - M: NewMembersWith(map[id.UserID]Role{uid: RoleWriter}), + M: NewMembersWith(map[ID]Role{uid: RoleWriter}), UID: uid, NewRole: RoleOwner, Expected: RoleOwner, @@ -142,7 +141,7 @@ func TestMembers_UpdateRole(t *testing.T) { }, { Name: "nil role", - M: NewMembersWith(map[id.UserID]Role{uid: RoleOwner}), + M: NewMembersWith(map[ID]Role{uid: RoleOwner}), UID: uid, NewRole: "", Expected: RoleOwner, @@ -157,8 +156,8 @@ func TestMembers_UpdateRole(t *testing.T) { }, { Name: "fail user not in the team", - M: NewMembersWith(map[id.UserID]Role{uid: RoleOwner}), - UID: id.NewUserID(), + M: NewMembersWith(map[ID]Role{uid: RoleOwner}), + UID: NewID(), NewRole: "", err: ErrTargetUserNotInTheTeam, }, @@ -178,18 +177,18 @@ func TestMembers_UpdateRole(t *testing.T) { } func TestMembers_Join(t *testing.T) { - uid := id.NewUserID() - uid2 := id.NewUserID() + uid := NewID() + uid2 := NewID() testCases := []struct { Name string M *Members - UID id.UserID + UID ID JoinRole, ExpectedRole Role err error }{ { Name: "success join user", - M: NewMembersWith(map[id.UserID]Role{uid: RoleWriter}), + M: NewMembersWith(map[ID]Role{uid: RoleWriter}), UID: uid2, JoinRole: "xxx", ExpectedRole: "xxx", @@ -197,7 +196,7 @@ func TestMembers_Join(t *testing.T) { }, { Name: "success join user", - M: NewMembersWith(map[id.UserID]Role{uid: RoleWriter}), + M: NewMembersWith(map[ID]Role{uid: RoleWriter}), UID: uid2, JoinRole: "", ExpectedRole: RoleReader, @@ -212,7 +211,7 @@ func TestMembers_Join(t *testing.T) { }, { Name: "fail user already joined", - M: NewMembersWith(map[id.UserID]Role{uid: RoleOwner}), + M: NewMembersWith(map[ID]Role{uid: RoleOwner}), UID: uid, JoinRole: "", err: ErrUserAlreadyJoined, @@ -234,20 +233,20 @@ func TestMembers_Join(t *testing.T) { } func TestMembers_UsersByRole(t *testing.T) { - uid := id.NewUserID() - uid2 := id.NewUserID() + uid := NewID() + uid2 := NewID() testCases := []struct { Name string M *Members Role Role - Expected []id.UserID + Expected []ID err error }{ { Name: "success join user", - M: NewMembersWith(map[id.UserID]Role{uid: "xxx", uid2: "xxx"}), + M: NewMembersWith(map[ID]Role{uid: "xxx", uid2: "xxx"}), Role: "xxx", - Expected: []id.UserID{uid2, uid}, + Expected: []ID{uid2, uid}, }, } for _, tc := range testCases { diff --git a/pkg/user/role.go b/pkg/user/role.go index b6359d347..53efa91c0 100644 --- a/pkg/user/role.go +++ b/pkg/user/role.go @@ -12,11 +12,10 @@ var ( RoleWriter = Role("writer") // RoleOwner is a role who can have full controll of project RoleOwner = Role("owner") - // ErrInvalidRole _ + ErrInvalidRole = errors.New("invalid role") ) -// Role _ type Role string func checkRole(role Role) bool { @@ -31,7 +30,6 @@ func checkRole(role Role) bool { return false } -// RoleFromString _ func RoleFromString(r string) (Role, error) { role := Role(strings.ToLower(r)) diff --git a/pkg/user/team.go b/pkg/user/team.go index 021a392f7..3015389f1 100644 --- a/pkg/user/team.go +++ b/pkg/user/team.go @@ -1,14 +1,12 @@ package user -import "github.com/reearth/reearth-backend/pkg/id" - type Team struct { - id id.TeamID + id TeamID name string members Members } -func (t *Team) ID() id.TeamID { +func (t *Team) ID() TeamID { return t.id } diff --git a/pkg/user/team_builder.go b/pkg/user/team_builder.go index ec62b6897..d6be1f428 100644 --- a/pkg/user/team_builder.go +++ b/pkg/user/team_builder.go @@ -1,25 +1,18 @@ package user -import ( - "github.com/reearth/reearth-backend/pkg/id" -) - -// TeamBuilder _ type TeamBuilder struct { t *Team - members map[id.UserID]Role + members map[ID]Role personal bool } -// NewTeam _ func NewTeam() *TeamBuilder { return &TeamBuilder{t: &Team{}} } -// Build _ func (b *TeamBuilder) Build() (*Team, error) { - if id.ID(b.t.id).IsNil() { - return nil, id.ErrInvalidID + if b.t.id.IsNil() { + return nil, ErrInvalidID } if b.members == nil { b.t.members = *NewMembers() @@ -30,7 +23,6 @@ func (b *TeamBuilder) Build() (*Team, error) { return b.t, nil } -// MustBuild _ func (b *TeamBuilder) MustBuild() *Team { r, err := b.Build() if err != nil { @@ -39,31 +31,26 @@ func (b *TeamBuilder) MustBuild() *Team { return r } -// ID _ -func (b *TeamBuilder) ID(id id.TeamID) *TeamBuilder { +func (b *TeamBuilder) ID(id TeamID) *TeamBuilder { b.t.id = id return b } -// NewID _ func (b *TeamBuilder) NewID() *TeamBuilder { - b.t.id = id.TeamID(id.New()) + b.t.id = NewTeamID() return b } -// Name _ func (b *TeamBuilder) Name(name string) *TeamBuilder { b.t.name = name return b } -// Members _ -func (b *TeamBuilder) Members(members map[id.UserID]Role) *TeamBuilder { +func (b *TeamBuilder) Members(members map[ID]Role) *TeamBuilder { b.members = members return b } -// Personal _ func (b *TeamBuilder) Personal(p bool) *TeamBuilder { b.personal = p return b diff --git a/pkg/user/team_builder_test.go b/pkg/user/team_builder_test.go index eb630becc..13cf2aaeb 100644 --- a/pkg/user/team_builder_test.go +++ b/pkg/user/team_builder_test.go @@ -4,18 +4,17 @@ import ( "errors" "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestTeamBuilder_ID(t *testing.T) { - tid := id.NewTeamID() + tid := NewTeamID() tm := NewTeam().ID(tid).MustBuild() assert.Equal(t, tid, tm.ID()) } func TestTeamBuilder_Members(t *testing.T) { - m := map[id.UserID]Role{id.NewUserID(): RoleOwner} + m := map[ID]Role{NewID(): RoleOwner} tm := NewTeam().NewID().Members(m).MustBuild() assert.Equal(t, m, tm.Members().Members()) } @@ -36,12 +35,12 @@ func TestTeamBuilder_NewID(t *testing.T) { } func TestTeamBuilder_Build(t *testing.T) { - tid := id.NewTeamID() + tid := NewTeamID() testCases := []struct { Name, UserName string - TID id.TeamID + TID TeamID Personal bool - Members map[id.UserID]Role + Members map[ID]Role Expected *Team err error }{ @@ -50,7 +49,7 @@ func TestTeamBuilder_Build(t *testing.T) { UserName: "xxx", TID: tid, Personal: true, - Expected: NewTeam().ID(tid).Members(map[id.UserID]Role{id.NewUserID(): RoleOwner}).Personal(true).Name("xxx").MustBuild(), + Expected: NewTeam().ID(tid).Members(map[ID]Role{NewID(): RoleOwner}).Personal(true).Name("xxx").MustBuild(), err: nil, }, { Name: "success create team with nil members", @@ -62,7 +61,7 @@ func TestTeamBuilder_Build(t *testing.T) { { Name: "fail invalid id", Expected: nil, - err: id.ErrInvalidID, + err: ErrInvalidID, }, } @@ -80,12 +79,12 @@ func TestTeamBuilder_Build(t *testing.T) { } func TestTeamBuilder_MustBuild(t *testing.T) { - tid := id.NewTeamID() + tid := NewTeamID() testCases := []struct { Name, UserName string - TID id.TeamID + TID TeamID Personal bool - Members map[id.UserID]Role + Members map[ID]Role Expected *Team err error }{ @@ -94,7 +93,7 @@ func TestTeamBuilder_MustBuild(t *testing.T) { UserName: "xxx", TID: tid, Personal: true, - Expected: NewTeam().ID(tid).Members(map[id.UserID]Role{id.NewUserID(): RoleOwner}).Personal(true).Name("xxx").MustBuild(), + Expected: NewTeam().ID(tid).Members(map[ID]Role{NewID(): RoleOwner}).Personal(true).Name("xxx").MustBuild(), err: nil, }, { Name: "success create team with nil members", @@ -106,7 +105,7 @@ func TestTeamBuilder_MustBuild(t *testing.T) { { Name: "fail invalid id", Expected: nil, - err: id.ErrInvalidID, + err: ErrInvalidID, }, } diff --git a/pkg/user/team_test.go b/pkg/user/team_test.go index f01e4f843..aa2bcb79e 100644 --- a/pkg/user/team_test.go +++ b/pkg/user/team_test.go @@ -3,12 +3,11 @@ package user import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) func TestTeam_ID(t *testing.T) { - tid := id.NewTeamID() + tid := NewTeamID() tm := NewTeam().ID(tid).MustBuild() assert.Equal(t, tid, tm.ID()) } @@ -19,8 +18,8 @@ func TestTeam_Name(t *testing.T) { } func TestTeam_Members(t *testing.T) { - m := map[id.UserID]Role{ - id.NewUserID(): RoleOwner, + m := map[ID]Role{ + NewID(): RoleOwner, } tm := NewTeam().NewID().Members(m).MustBuild() assert.Equal(t, m, tm.Members().Members()) diff --git a/pkg/user/user.go b/pkg/user/user.go index 24359fb92..5277999db 100644 --- a/pkg/user/user.go +++ b/pkg/user/user.go @@ -1,21 +1,20 @@ package user import ( - "github.com/reearth/reearth-backend/pkg/id" "golang.org/x/text/language" ) type User struct { - id id.UserID + id ID name string email string - team id.TeamID + team TeamID auths []Auth lang language.Tag theme Theme } -func (u *User) ID() id.UserID { +func (u *User) ID() ID { return u.id } @@ -27,7 +26,7 @@ func (u *User) Email() string { return u.email } -func (u *User) Team() id.TeamID { +func (u *User) Team() TeamID { return u.team } @@ -47,7 +46,7 @@ func (u *User) UpdateEmail(email string) { u.email = email } -func (u *User) UpdateTeam(team id.TeamID) { +func (u *User) UpdateTeam(team TeamID) { u.team = team } diff --git a/pkg/user/user_test.go b/pkg/user/user_test.go index 9f782760c..262bcbd7f 100644 --- a/pkg/user/user_test.go +++ b/pkg/user/user_test.go @@ -3,22 +3,21 @@ package user import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" "golang.org/x/text/language" ) func TestUser(t *testing.T) { - uid := id.NewUserID() - tid := id.NewTeamID() + uid := NewID() + tid := NewTeamID() testCases := []struct { Name string User *User Expected struct { - Id id.UserID + Id ID Name string Email string - Team id.TeamID + Team TeamID Auths []Auth Lang language.Tag } @@ -35,10 +34,10 @@ func TestUser(t *testing.T) { Sub: "sss", }}).MustBuild(), Expected: struct { - Id id.UserID + Id ID Name string Email string - Team id.TeamID + Team TeamID Auths []Auth Lang language.Tag }{ @@ -265,7 +264,7 @@ func TestUser_UpdateLang(t *testing.T) { } func TestUser_UpdateTeam(t *testing.T) { - tid := id.NewTeamID() + tid := NewTeamID() u := New().NewID().MustBuild() u.UpdateTeam(tid) assert.Equal(t, tid, u.Team()) diff --git a/pkg/visualizer/visualizer.go b/pkg/visualizer/visualizer.go index 3e7b2235b..a9a49cf6c 100644 --- a/pkg/visualizer/visualizer.go +++ b/pkg/visualizer/visualizer.go @@ -1,9 +1,7 @@ package visualizer -// Visualizer _ type Visualizer string const ( - // VisualizerCesium _ VisualizerCesium Visualizer = "cesium" ) diff --git a/plugin_manifest_schema.json b/schemas/plugin_manifest.json similarity index 98% rename from plugin_manifest_schema.json rename to schemas/plugin_manifest.json index a149f9b97..037ba9725 100644 --- a/plugin_manifest_schema.json +++ b/schemas/plugin_manifest.json @@ -1,7 +1,7 @@ { - "$id": "https://app.reearth.io/schemas/plugin-manifest", + "$id": "https://reearth.io/schemas/plugin_manifest.json", "$schema": "http://json-schema.org/draft-04/schema", - "description": "Re:Earth plugin manifest schema", + "title": "Re:Earth plugin manifest", "definitions": { "id": { "$id": "#id", diff --git a/plugin_manifest_schema_translation.json b/schemas/plugin_manifest_translation.json similarity index 95% rename from plugin_manifest_schema_translation.json rename to schemas/plugin_manifest_translation.json index 2a6f20ebb..30dad9d84 100644 --- a/plugin_manifest_schema_translation.json +++ b/schemas/plugin_manifest_translation.json @@ -1,7 +1,7 @@ { - "$id": "https://app.reearth.io/schemas/plugin-manifest-translation", + "$id": "https://reearth.io/schemas/plugin_manifest_translation.json", "$schema": "http://json-schema.org/draft-04/schema", - "description": "Re:Earth plugin manifest schema translation", + "title": "Re:Earth plugin manifest translation", "definitions": { "propertySchemaField": { "$id": "#propertySchemaField", From ee144e1a85f661494a9f14712a280aacad8ef7f8 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Sat, 15 Jan 2022 01:27:34 +0900 Subject: [PATCH 130/253] ci: use golangci-lint-action in pr workflow --- .github/workflows/pr.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 5a060a7cd..9722fdda7 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -19,10 +19,10 @@ jobs: key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} restore-keys: | ${{ runner.os }}-go- - - name: install lint - run: curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(go env GOPATH)/bin v1.39.0 - - name: lint - run: $(go env GOPATH)/bin/golangci-lint run --timeout=10m + - name: golangci-lint + uses: golangci/golangci-lint-action@v2 + with: + version: v1.43 - name: test run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic - name: Send coverage report From 691cb787c1945ddc404e804aa8bf7b42aa60acb9 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 17 Jan 2022 12:12:38 +0900 Subject: [PATCH 131/253] perf: add indexes of mongo collections (#98) * add indexes of mongo collections * refactor --- internal/app/main.go | 2 +- internal/infrastructure/mongo/asset.go | 2 +- internal/infrastructure/mongo/dataset.go | 2 +- .../infrastructure/mongo/dataset_schema.go | 2 +- internal/infrastructure/mongo/layer.go | 2 +- .../infrastructure/mongo/mongodoc/dataset.go | 14 +++--- .../infrastructure/mongo/mongodoc/layer.go | 48 +++++++++---------- .../infrastructure/mongo/mongodoc/scene.go | 26 +++++----- internal/infrastructure/mongo/mongodoc/tag.go | 16 +++---- .../infrastructure/mongo/mongodoc/team.go | 22 ++++----- internal/infrastructure/mongo/plugin.go | 2 +- internal/infrastructure/mongo/project.go | 2 +- internal/infrastructure/mongo/property.go | 2 +- internal/infrastructure/mongo/scene.go | 2 +- internal/infrastructure/mongo/tag.go | 2 +- internal/infrastructure/mongo/team.go | 6 ++- 16 files changed, 78 insertions(+), 74 deletions(-) diff --git a/internal/app/main.go b/internal/app/main.go index 0f4cf6cdc..bdce17444 100644 --- a/internal/app/main.go +++ b/internal/app/main.go @@ -86,7 +86,7 @@ func (w *WebServer) Run() { if w.appServer.Debug { debugLog += " with debug mode" } - log.Infof("server started%s at %s\n", debugLog, w.address) + log.Infof("server started%s at http://%s\n", debugLog, w.address) go func() { err := w.appServer.Start(w.address) diff --git a/internal/infrastructure/mongo/asset.go b/internal/infrastructure/mongo/asset.go index cb78cc285..4097a23ce 100644 --- a/internal/infrastructure/mongo/asset.go +++ b/internal/infrastructure/mongo/asset.go @@ -60,7 +60,7 @@ func (r *assetRepo) FindByTeam(ctx context.Context, id id.TeamID, pagination *us } func (r *assetRepo) init() { - i := r.client.CreateIndex(context.Background(), nil) + i := r.client.CreateIndex(context.Background(), []string{"team"}) if len(i) > 0 { log.Infof("mongo: %s: index created: %s", "asset", i) } diff --git a/internal/infrastructure/mongo/dataset.go b/internal/infrastructure/mongo/dataset.go index f802db5fc..52a1cc0c7 100644 --- a/internal/infrastructure/mongo/dataset.go +++ b/internal/infrastructure/mongo/dataset.go @@ -27,7 +27,7 @@ func NewDataset(client *mongodoc.Client) repo.Dataset { } func (r *datasetRepo) init() { - i := r.client.CreateIndex(context.Background(), nil) + i := r.client.CreateIndex(context.Background(), []string{"scene", "schema"}) if len(i) > 0 { log.Infof("mongo: %s: index created: %s", "dataset", i) } diff --git a/internal/infrastructure/mongo/dataset_schema.go b/internal/infrastructure/mongo/dataset_schema.go index 00860f6f8..ba2d4d331 100644 --- a/internal/infrastructure/mongo/dataset_schema.go +++ b/internal/infrastructure/mongo/dataset_schema.go @@ -25,7 +25,7 @@ func NewDatasetSchema(client *mongodoc.Client) repo.DatasetSchema { } func (r *datasetSchemaRepo) init() { - i := r.client.CreateIndex(context.Background(), nil) + i := r.client.CreateIndex(context.Background(), []string{"scene"}) if len(i) > 0 { log.Infof("mongo: %s: index created: %s", "datasetSchema", i) } diff --git a/internal/infrastructure/mongo/layer.go b/internal/infrastructure/mongo/layer.go index 33eb70896..8475e80fa 100644 --- a/internal/infrastructure/mongo/layer.go +++ b/internal/infrastructure/mongo/layer.go @@ -24,7 +24,7 @@ func NewLayer(client *mongodoc.Client) repo.Layer { } func (r *layerRepo) init() { - i := r.client.CreateIndex(context.Background(), nil) + i := r.client.CreateIndex(context.Background(), []string{"plugin", "extension", "scene", "group.layers", "tags.id", "tags.tags.id"}) if len(i) > 0 { log.Infof("mongo: %s: index created: %s", "layer", i) } diff --git a/internal/infrastructure/mongo/mongodoc/dataset.go b/internal/infrastructure/mongo/mongodoc/dataset.go index 3cfd9e08f..5f36282b5 100644 --- a/internal/infrastructure/mongo/mongodoc/dataset.go +++ b/internal/infrastructure/mongo/mongodoc/dataset.go @@ -7,13 +7,6 @@ import ( "github.com/reearth/reearth-backend/pkg/id" ) -type DatasetFieldDocument struct { - Field string - Type string - Value interface{} - Source string -} - type DatasetDocument struct { ID string Source string @@ -22,6 +15,13 @@ type DatasetDocument struct { Scene string } +type DatasetFieldDocument struct { + Field string + Type string + Value interface{} + Source string +} + type DatasetExtendedDocument struct { DatasetDocument Graph []*DatasetExtendedDocument diff --git a/internal/infrastructure/mongo/mongodoc/layer.go b/internal/infrastructure/mongo/mongodoc/layer.go index 5483b31ea..f34b2a54d 100644 --- a/internal/infrastructure/mongo/mongodoc/layer.go +++ b/internal/infrastructure/mongo/mongodoc/layer.go @@ -8,6 +8,30 @@ import ( "go.mongodb.org/mongo-driver/bson" ) +type LayerDocument struct { + ID string + Name string + Visible bool + Scene string + Plugin *string + Extension *string + Property *string + Infobox *LayerInfoboxDocument + Item *LayerItemDocument + Group *LayerGroupDocument + Tags LayerTagListDocument +} + +type LayerItemDocument struct { + LinkedDataset *string +} + +type LayerGroupDocument struct { + Layers []string + LinkedDatasetSchema *string + Root bool +} + type LayerInfoboxFieldDocument struct { ID string Plugin string @@ -28,30 +52,6 @@ type LayerTagDocument struct { type LayerTagListDocument []LayerTagDocument -type LayerItemDocument struct { - LinkedDataset *string -} - -type LayerGroupDocument struct { - Layers []string - LinkedDatasetSchema *string - Root bool -} - -type LayerDocument struct { - ID string - Name string - Visible bool - Scene string - Plugin *string - Extension *string - Property *string - Infobox *LayerInfoboxDocument - Item *LayerItemDocument - Group *LayerGroupDocument - Tags LayerTagListDocument -} - type LayerConsumer struct { Rows []*layer.Layer GroupRows []*layer.Group diff --git a/internal/infrastructure/mongo/mongodoc/scene.go b/internal/infrastructure/mongo/mongodoc/scene.go index fbb19a2b6..016815808 100644 --- a/internal/infrastructure/mongo/mongodoc/scene.go +++ b/internal/infrastructure/mongo/mongodoc/scene.go @@ -10,6 +10,19 @@ import ( "github.com/reearth/reearth-backend/pkg/scene" ) +type SceneDocument struct { + ID string + Project string + Team string + RootLayer string + Widgets []SceneWidgetDocument + AlignSystem *WidgetAlignSystemDocument + Plugins []ScenePluginDocument + UpdateAt time.Time + Property string + Clusters []SceneClusterDocument +} + type SceneWidgetDocument struct { ID string Plugin string @@ -30,19 +43,6 @@ type SceneClusterDocument struct { Property string } -type SceneDocument struct { - ID string - Project string - Team string - RootLayer string - Widgets []SceneWidgetDocument - AlignSystem *WidgetAlignSystemDocument - Plugins []ScenePluginDocument - UpdateAt time.Time - Property string - Clusters []SceneClusterDocument -} - type SceneConsumer struct { Rows []*scene.Scene } diff --git a/internal/infrastructure/mongo/mongodoc/tag.go b/internal/infrastructure/mongo/mongodoc/tag.go index 11550237f..3c270efb5 100644 --- a/internal/infrastructure/mongo/mongodoc/tag.go +++ b/internal/infrastructure/mongo/mongodoc/tag.go @@ -9,6 +9,14 @@ import ( "github.com/reearth/reearth-backend/pkg/tag" ) +type TagDocument struct { + ID string + Label string + Scene string + Item *TagItemDocument + Group *TagGroupDocument +} + type TagItemDocument struct { Parent *string LinkedDatasetFieldID *string @@ -20,14 +28,6 @@ type TagGroupDocument struct { Tags []string } -type TagDocument struct { - ID string - Label string - Scene string - Item *TagItemDocument - Group *TagGroupDocument -} - type TagConsumer struct { Rows []*tag.Tag GroupRows []*tag.Group diff --git a/internal/infrastructure/mongo/mongodoc/team.go b/internal/infrastructure/mongo/mongodoc/team.go index 261d6188f..1006322ee 100644 --- a/internal/infrastructure/mongo/mongodoc/team.go +++ b/internal/infrastructure/mongo/mongodoc/team.go @@ -2,7 +2,7 @@ package mongodoc import ( "github.com/reearth/reearth-backend/pkg/id" - user1 "github.com/reearth/reearth-backend/pkg/user" + "github.com/reearth/reearth-backend/pkg/user" "go.mongodb.org/mongo-driver/bson" ) @@ -18,7 +18,7 @@ type TeamDocument struct { } type TeamConsumer struct { - Rows []*user1.Team + Rows []*user.Team } func (c *TeamConsumer) Consume(raw bson.Raw) error { @@ -38,7 +38,7 @@ func (c *TeamConsumer) Consume(raw bson.Raw) error { return nil } -func NewTeam(team *user1.Team) (*TeamDocument, string) { +func NewTeam(team *user.Team) (*TeamDocument, string) { membersDoc := map[string]TeamMemberDocument{} for user, r := range team.Members().Members() { membersDoc[user.String()] = TeamMemberDocument{ @@ -54,23 +54,23 @@ func NewTeam(team *user1.Team) (*TeamDocument, string) { }, id } -func (d *TeamDocument) Model() (*user1.Team, error) { +func (d *TeamDocument) Model() (*user.Team, error) { tid, err := id.TeamIDFrom(d.ID) if err != nil { return nil, err } - members := map[id.UserID]user1.Role{} + members := map[id.UserID]user.Role{} if d.Members != nil { - for user, member := range d.Members { - uid, err := id.UserIDFrom(user) + for uid, member := range d.Members { + uid, err := id.UserIDFrom(uid) if err != nil { return nil, err } - members[uid] = user1.Role(member.Role) + members[uid] = user.Role(member.Role) } } - return user1.NewTeam(). + return user.NewTeam(). ID(tid). Name(d.Name). Members(members). @@ -78,8 +78,8 @@ func (d *TeamDocument) Model() (*user1.Team, error) { Build() } -func NewTeams(teams []*user1.Team) ([]interface{}, []string) { - res := make([]interface{}, 0, len(teams)) +func NewTeams(teams []*user.Team) ([]*TeamDocument, []string) { + res := make([]*TeamDocument, 0, len(teams)) ids := make([]string, 0, len(teams)) for _, d := range teams { if d == nil { diff --git a/internal/infrastructure/mongo/plugin.go b/internal/infrastructure/mongo/plugin.go index ff19986ec..948cb5bcf 100644 --- a/internal/infrastructure/mongo/plugin.go +++ b/internal/infrastructure/mongo/plugin.go @@ -25,7 +25,7 @@ func NewPlugin(client *mongodoc.Client) repo.Plugin { } func (r *pluginRepo) init() { - i := r.client.CreateIndex(context.Background(), nil) + i := r.client.CreateIndex(context.Background(), []string{"scene"}) if len(i) > 0 { log.Infof("mongo: %s: index created: %s", "plugin", i) } diff --git a/internal/infrastructure/mongo/project.go b/internal/infrastructure/mongo/project.go index 3f81036dc..006cf0a5d 100644 --- a/internal/infrastructure/mongo/project.go +++ b/internal/infrastructure/mongo/project.go @@ -25,7 +25,7 @@ func NewProject(client *mongodoc.Client) repo.Project { } func (r *projectRepo) init() { - i := r.client.CreateIndex(context.Background(), []string{"alias"}) + i := r.client.CreateIndex(context.Background(), []string{"alias", "team"}) if len(i) > 0 { log.Infof("mongo: %s: index created: %s", "project", i) } diff --git a/internal/infrastructure/mongo/property.go b/internal/infrastructure/mongo/property.go index ed40e2fa3..1fb1bbc67 100644 --- a/internal/infrastructure/mongo/property.go +++ b/internal/infrastructure/mongo/property.go @@ -23,7 +23,7 @@ func NewProperty(client *mongodoc.Client) repo.Property { } func (r *propertyRepo) init() { - i := r.client.CreateIndex(context.Background(), nil) + i := r.client.CreateIndex(context.Background(), []string{"scene", "schema"}) if len(i) > 0 { log.Infof("mongo: %s: index created: %s", "property", i) } diff --git a/internal/infrastructure/mongo/scene.go b/internal/infrastructure/mongo/scene.go index b27cb323e..af9a2416f 100644 --- a/internal/infrastructure/mongo/scene.go +++ b/internal/infrastructure/mongo/scene.go @@ -25,7 +25,7 @@ func NewScene(client *mongodoc.Client) repo.Scene { } func (r *sceneRepo) init() { - i := r.client.CreateIndex(context.Background(), nil) + i := r.client.CreateIndex(context.Background(), []string{"project"}) if len(i) > 0 { log.Infof("mongo: %s: index created: %s", "scene", i) } diff --git a/internal/infrastructure/mongo/tag.go b/internal/infrastructure/mongo/tag.go index 6717cc6fb..5aed97d8e 100644 --- a/internal/infrastructure/mongo/tag.go +++ b/internal/infrastructure/mongo/tag.go @@ -24,7 +24,7 @@ func NewTag(client *mongodoc.Client) repo.Tag { } func (r *tagRepo) init() { - i := r.client.CreateIndex(context.Background(), nil) + i := r.client.CreateIndex(context.Background(), []string{"scene", "group.tags", "item.parent"}) if len(i) > 0 { log.Infof("mongo: %s: index created: %s", "tag", i) } diff --git a/internal/infrastructure/mongo/team.go b/internal/infrastructure/mongo/team.go index 9249fb6d9..336e366b8 100644 --- a/internal/infrastructure/mongo/team.go +++ b/internal/infrastructure/mongo/team.go @@ -69,7 +69,11 @@ func (r *teamRepo) SaveAll(ctx context.Context, teams []*user.Team) error { return nil } docs, ids := mongodoc.NewTeams(teams) - return r.client.SaveAll(ctx, ids, docs) + docs2 := make([]interface{}, 0, len(teams)) + for _, d := range docs { + docs2 = append(docs2, d) + } + return r.client.SaveAll(ctx, ids, docs2) } func (r *teamRepo) Remove(ctx context.Context, id id.TeamID) error { From bbe13d8e41cbcf4170c1bcd39c3694cc8124dc50 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 17 Jan 2022 15:52:30 +0900 Subject: [PATCH 132/253] ci: update github action dependencies (#92) Co-authored-by: Renovate Bot --- .github/workflows/main.yml | 2 +- .github/workflows/pr.yml | 2 +- .github/workflows/release.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f4ac34eb8..e0fbff817 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -35,7 +35,7 @@ jobs: - name: test run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic - name: Send coverage report - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v2 with: token: ${{ secrets.CODECOV_TOKEN }} file: coverage.txt diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 9722fdda7..641cb42cc 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -26,7 +26,7 @@ jobs: - name: test run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic - name: Send coverage report - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v2 with: token: ${{ secrets.CODECOV_TOKEN }} file: coverage.txt diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index dd67724ca..4bfc791fd 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -22,7 +22,7 @@ jobs: token: ${{ secrets.GPT }} - name: Bump tag version id: tag - uses: mathieudutour/github-tag-action@v5.6 + uses: mathieudutour/github-tag-action@v6.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} custom_tag: ${{ github.event.inputs.custom_tag }} From 0d112c57a5ee8c003e0294888a52766343a1a97f Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 18 Jan 2022 19:47:16 +0900 Subject: [PATCH 133/253] refactor: unit tests (#99) * rename widget system and plugin system * refactor tests --- .../gql/gqlmodel/convert_property_test.go | 3 + .../adapter/gql/gqlmodel/convert_scene.go | 4 +- .../adapter/gql/resolver_mutation_scene.go | 4 +- .../adapter/gql/resolver_property_test.go | 3 + internal/app/published_test.go | 31 +- internal/infrastructure/fs/file_test.go | 12 +- .../infrastructure/github/fetcher_test.go | 35 +- .../infrastructure/github/plugin_registry.go | 5 +- .../github/plugin_registry_test.go | 19 +- internal/infrastructure/google/fetch_test.go | 2 + .../infrastructure/mongo/mongodoc/scene.go | 8 +- .../infrastructure/mongo/mongodoc/tag_test.go | 60 +-- internal/usecase/gateway/plugin_registry.go | 3 + internal/usecase/interactor/plugin_delete.go | 2 +- internal/usecase/interactor/plugin_upload.go | 2 +- internal/usecase/interactor/property.go | 4 +- internal/usecase/interactor/scene.go | 18 +- pkg/asset/asset_test.go | 24 +- pkg/asset/builder_test.go | 331 +++++++------- pkg/builtin/main_test.go | 39 +- pkg/dataset/value_optional_test.go | 9 + pkg/file/file_test.go | 11 +- pkg/i18n/string_test.go | 46 +- pkg/id/asset_gen_test.go | 2 +- pkg/id/cluster_field_gen_test.go | 2 +- pkg/id/dataset_gen_test.go | 2 +- pkg/id/dataset_schema_field_gen_test.go | 2 +- pkg/id/dataset_schema_gen_test.go | 2 +- pkg/id/id_test.go | 142 +++--- pkg/id/infobox_field_gen_test.go | 2 +- pkg/id/layer_gen_test.go | 2 +- pkg/id/plugin_extension_test.go | 13 +- pkg/id/plugin_test.go | 195 ++++----- pkg/id/project_gen_test.go | 2 +- pkg/id/property_gen_test.go | 2 +- pkg/id/property_item_gen_test.go | 2 +- pkg/id/property_schema_field_test.go | 13 +- pkg/id/property_schema_group_test.go | 13 +- pkg/id/property_schema_test.go | 153 +++---- pkg/id/scene_gen_test.go | 2 +- pkg/id/tag_gen_test.go | 2 +- pkg/id/team_gen_test.go | 2 +- pkg/id/user_gen_test.go | 2 +- pkg/id/widget_gen_test.go | 2 +- pkg/layer/decoding/common_test.go | 77 ++-- pkg/layer/decoding/kml_test.go | 158 +++---- pkg/layer/encoding/czml_test.go | 2 + pkg/layer/encoding/geojson_test.go | 2 + pkg/layer/encoding/kml_test.go | 2 + pkg/layer/encoding/shp_test.go | 3 + pkg/layer/layerops/initializer_test.go | 33 +- pkg/layer/tag_test.go | 17 + pkg/plugin/builder_test.go | 169 ++++---- pkg/plugin/extension_builder_test.go | 201 +++++---- pkg/plugin/manifest/convert_test.go | 163 +++---- pkg/plugin/manifest/parser_test.go | 50 +-- .../manifest/parser_translation_test.go | 66 +-- pkg/plugin/plugin_test.go | 22 +- pkg/plugin/repourl/repourl_test.go | 47 +- pkg/project/builder_test.go | 327 +++++++------- pkg/project/project_test.go | 85 ++-- pkg/property/builder_test.go | 39 +- pkg/property/condition_test.go | 14 +- pkg/property/field_builder_test.go | 271 ++++++------ pkg/property/field_test.go | 20 +- pkg/property/group.go | 12 +- pkg/property/group_builder_test.go | 148 ++++--- pkg/property/group_list_builder_test.go | 161 ++++--- pkg/property/group_list_test.go | 249 ++++++----- pkg/property/group_test.go | 233 +++++----- pkg/property/item_test.go | 19 +- pkg/property/link_test.go | 96 +++-- pkg/property/list_test.go | 32 +- pkg/property/schema_builder_test.go | 194 +++++---- pkg/property/schema_field_builder_test.go | 67 +-- pkg/property/schema_field_test.go | 36 +- pkg/property/schema_group_builder_test.go | 22 +- pkg/property/schema_group_test.go | 38 +- pkg/property/schema_test.go | 36 +- pkg/property/sealed_test.go | 77 ++-- pkg/property/value_camera_test.go | 8 +- pkg/property/value_dataset_test.go | 11 + pkg/property/value_optional_test.go | 9 + pkg/property/value_test.go | 18 +- pkg/property/value_typography_test.go | 31 +- pkg/rerror/error_test.go | 33 +- pkg/scene/builder.go | 18 +- pkg/scene/builder/builder_test.go | 4 +- pkg/scene/builder/encoder_test.go | 46 +- pkg/scene/builder/scene.go | 4 +- pkg/scene/builder/scene_test.go | 34 +- pkg/scene/builder_test.go | 404 ++++++++---------- pkg/scene/cluster_list_test.go | 40 +- pkg/scene/cluster_test.go | 50 ++- pkg/scene/lock_test.go | 22 +- pkg/scene/plugin_system_test.go | 337 --------------- pkg/scene/{plugin_system.go => plugins.go} | 26 +- pkg/scene/plugins_test.go | 355 +++++++++++++++ pkg/scene/scene.go | 16 +- pkg/scene/scene_test.go | 27 +- pkg/scene/sceneops/dataset_migrator_test.go | 46 -- pkg/scene/sceneops/plugin_migrator.go | 12 +- pkg/scene/widget_align_system_test.go | 40 +- pkg/scene/widget_area_test.go | 67 +-- pkg/scene/widget_section_test.go | 28 +- pkg/scene/widget_test.go | 44 +- pkg/scene/widget_zone_test.go | 32 +- pkg/scene/{widget_system.go => widgets.go} | 26 +- ...{widget_system_test.go => widgets_test.go} | 208 ++++----- pkg/shp/reader_test.go | 17 +- pkg/shp/sequentialreader_test.go | 15 +- pkg/shp/shapefile_test.go | 24 +- pkg/shp/shapetype_string_test.go | 12 +- pkg/shp/testdata_test.go | 6 +- pkg/shp/writer_test.go | 27 +- pkg/shp/zipreader_test.go | 12 +- pkg/tag/group_test.go | 20 +- pkg/tag/item_test.go | 24 +- pkg/tag/list_test.go | 10 +- pkg/user/auth.go | 2 +- pkg/user/auth_test.go | 25 +- pkg/user/builder_test.go | 198 +++++---- pkg/user/initializer_test.go | 39 +- pkg/user/members_test.go | 98 +++-- pkg/user/role_test.go | 33 +- pkg/user/team_builder_test.go | 176 +++++--- pkg/user/user_test.go | 66 +-- pkg/value/bool_test.go | 3 + pkg/value/latlng_test.go | 8 +- pkg/value/latlngheight_test.go | 12 +- pkg/value/number_test.go | 4 +- pkg/value/optional_test.go | 8 + pkg/value/string_test.go | 2 + pkg/value/type_test.go | 1 + pkg/value/value_test.go | 6 + pkg/writer/seeker_closer_test.go | 42 +- 136 files changed, 3831 insertions(+), 3477 deletions(-) delete mode 100644 pkg/scene/plugin_system_test.go rename pkg/scene/{plugin_system.go => plugins.go} (71%) create mode 100644 pkg/scene/plugins_test.go delete mode 100644 pkg/scene/sceneops/dataset_migrator_test.go rename pkg/scene/{widget_system.go => widgets.go} (73%) rename pkg/scene/{widget_system_test.go => widgets_test.go} (61%) diff --git a/internal/adapter/gql/gqlmodel/convert_property_test.go b/internal/adapter/gql/gqlmodel/convert_property_test.go index 400b1e358..208178002 100644 --- a/internal/adapter/gql/gqlmodel/convert_property_test.go +++ b/internal/adapter/gql/gqlmodel/convert_property_test.go @@ -13,6 +13,7 @@ func TestFromPropertyValueAndType(t *testing.T) { v interface{} t ValueType } + tests := []struct { name string args args @@ -37,7 +38,9 @@ func TestFromPropertyValueAndType(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, FromPropertyValueAndType(tt.args.v, tt.args.t)) }) } diff --git a/internal/adapter/gql/gqlmodel/convert_scene.go b/internal/adapter/gql/gqlmodel/convert_scene.go index 57855c0f0..5b7953ff0 100644 --- a/internal/adapter/gql/gqlmodel/convert_scene.go +++ b/internal/adapter/gql/gqlmodel/convert_scene.go @@ -43,7 +43,7 @@ func ToScene(scene *scene.Scene) *Scene { return nil } - sceneWidgets := scene.WidgetSystem().Widgets() + sceneWidgets := scene.Widgets().Widgets() widgets := make([]*SceneWidget, 0, len(sceneWidgets)) for _, w := range sceneWidgets { widgets = append(widgets, ToSceneWidget(w)) @@ -55,7 +55,7 @@ func ToScene(scene *scene.Scene) *Scene { clusters = append(clusters, ToCluster(c)) } - scenePlugins := scene.PluginSystem().Plugins() + scenePlugins := scene.Plugins().Plugins() plugins := make([]*ScenePlugin, 0, len(scenePlugins)) for _, sp := range scenePlugins { plugins = append(plugins, ToScenePlugin(sp)) diff --git a/internal/adapter/gql/resolver_mutation_scene.go b/internal/adapter/gql/resolver_mutation_scene.go index df92cefba..8379a7a86 100644 --- a/internal/adapter/gql/resolver_mutation_scene.go +++ b/internal/adapter/gql/resolver_mutation_scene.go @@ -153,7 +153,7 @@ func (r *mutationResolver) UploadPlugin(ctx context.Context, input gqlmodel.Uplo return &gqlmodel.UploadPluginPayload{ Plugin: gqlmodel.ToPlugin(p), Scene: gqlmodel.ToScene(s), - ScenePlugin: gqlmodel.ToScenePlugin(s.PluginSystem().Plugin(p.ID())), + ScenePlugin: gqlmodel.ToScenePlugin(s.Plugins().Plugin(p.ID())), }, nil } @@ -192,7 +192,7 @@ func (r *mutationResolver) UpgradePlugin(ctx context.Context, input gqlmodel.Upg return &gqlmodel.UpgradePluginPayload{ Scene: gqlmodel.ToScene(s), - ScenePlugin: gqlmodel.ToScenePlugin(s.PluginSystem().Plugin(input.ToPluginID)), + ScenePlugin: gqlmodel.ToScenePlugin(s.Plugins().Plugin(input.ToPluginID)), }, nil } diff --git a/internal/adapter/gql/resolver_property_test.go b/internal/adapter/gql/resolver_property_test.go index 73a8b5219..e6cd7ee40 100644 --- a/internal/adapter/gql/resolver_property_test.go +++ b/internal/adapter/gql/resolver_property_test.go @@ -34,8 +34,11 @@ func Test_actualValue(t *testing.T) { false, }, } + for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() got, err := actualValue(tt.args.datasetLoader, tt.args.value, tt.args.links, tt.args.overridden) if (err != nil) != tt.wantErr { t.Errorf("actualValue() error = %v, wantErr %v", err, tt.wantErr) diff --git a/internal/app/published_test.go b/internal/app/published_test.go index 1553ec773..0cf9398bc 100644 --- a/internal/app/published_test.go +++ b/internal/app/published_test.go @@ -36,7 +36,7 @@ func TestPublishedAuthMiddleware(t *testing.T) { return c.String(http.StatusOK, "test") }) - testCases := []struct { + tests := []struct { Name string PublishedName string BasicAuthUsername string @@ -74,12 +74,12 @@ func TestPublishedAuthMiddleware(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() - assert := assert.New(tt) + assert := assert.New(t) req := httptest.NewRequest(http.MethodGet, "/", nil) if tc.BasicAuthUsername != "" { req.Header.Set(echo.HeaderAuthorization, "basic "+base64.StdEncoding.EncodeToString([]byte(tc.BasicAuthUsername+":"+tc.BasicAuthPassword))) @@ -110,7 +110,7 @@ func TestPublishedData(t *testing.T) { return nil, rerror.ErrNotFound }) - testCases := []struct { + tests := []struct { Name string PublishedName string Error error @@ -130,11 +130,12 @@ func TestPublishedData(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert := assert.New(tt) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + assert := assert.New(t) req := httptest.NewRequest(http.MethodGet, "/", nil) res := httptest.NewRecorder() e := echo.New() @@ -156,7 +157,7 @@ func TestPublishedData(t *testing.T) { } func TestPublishedIndex(t *testing.T) { - testCases := []struct { + tests := []struct { Name string PublishedName string Error error @@ -182,12 +183,12 @@ func TestPublishedIndex(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() - assert := assert.New(tt) + assert := assert.New(t) req := httptest.NewRequest(http.MethodGet, "/aaa/bbb", nil) res := httptest.NewRecorder() e := echo.New() diff --git a/internal/infrastructure/fs/file_test.go b/internal/infrastructure/fs/file_test.go index 592e75120..a57913d33 100644 --- a/internal/infrastructure/fs/file_test.go +++ b/internal/infrastructure/fs/file_test.go @@ -88,8 +88,8 @@ func TestFile_RemoveAsset(t *testing.T) { for _, tc := range cases { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() fs := mockFs() f, _ := NewFile(fs, "https://example.com/assets") @@ -98,16 +98,16 @@ func TestFile_RemoveAsset(t *testing.T) { err := f.RemoveAsset(context.Background(), u) if tc.Err == nil { - assert.NoError(tt, err) + assert.NoError(t, err) } else { - assert.Same(tt, tc.Err, err) + assert.Same(t, tc.Err, err) } _, err = fs.Stat(filepath.Join("assets", "xxx.txt")) if tc.Deleted { - assert.ErrorIs(tt, err, os.ErrNotExist) + assert.ErrorIs(t, err, os.ErrNotExist) } else { - assert.NoError(tt, err) + assert.NoError(t, err) } }) } diff --git a/internal/infrastructure/github/fetcher_test.go b/internal/infrastructure/github/fetcher_test.go index fc3d9073a..ddee96c34 100644 --- a/internal/infrastructure/github/fetcher_test.go +++ b/internal/infrastructure/github/fetcher_test.go @@ -2,7 +2,6 @@ package github import ( "context" - "errors" "net/http" "net/http/httptest" "testing" @@ -18,28 +17,34 @@ func TestFetchURL(t *testing.T) { server2 := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { rw.WriteHeader(http.StatusBadRequest) })) - testCases := []struct { + + defer func() { + server.Close() + server2.Close() + }() + + tests := []struct { Name, URL string Ctx context.Context - ExpectedErr error + ExpectedErr string }{ { Name: "Fail: nil context", Ctx: nil, URL: server.URL, - ExpectedErr: errors.New("nil Context"), + ExpectedErr: "net/http: nil Context", }, { - Name: "Fail: nil unsupported protocol scheme ", + Name: "Fail: nil unsupported protocol scheme", Ctx: context.Background(), URL: "", - ExpectedErr: errors.New("unsupported protocol scheme"), + ExpectedErr: "Get \"\": unsupported protocol scheme \"\"", }, { Name: "Fail: bad request ", Ctx: context.Background(), URL: server2.URL, - ExpectedErr: errors.New("StatusCode=400"), + ExpectedErr: "StatusCode=400", }, { Name: "Success", @@ -47,18 +52,16 @@ func TestFetchURL(t *testing.T) { URL: server.URL, }, } - defer func() { - server.Close() - server2.Close() - }() - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { + t.Run(tc.Name, func(t *testing.T) { body, err := fetchURL(tc.Ctx, tc.URL) - if err != nil { - assert.True(tt, errors.As(tc.ExpectedErr, &err)) + if tc.ExpectedErr != "" { + assert.EqualError(t, err, tc.ExpectedErr) } else { - assert.NotNil(tt, body) + _ = body.Close() + assert.NotNil(t, body) } }) } diff --git a/internal/infrastructure/github/plugin_registry.go b/internal/infrastructure/github/plugin_registry.go index 84a3cbe06..417783f35 100644 --- a/internal/infrastructure/github/plugin_registry.go +++ b/internal/infrastructure/github/plugin_registry.go @@ -5,6 +5,7 @@ import ( "encoding/json" "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/pkg/plugin" ) @@ -17,7 +18,6 @@ func NewPluginRegistry() gateway.PluginRegistry { const source = `https://raw.githubusercontent.com/reearth/plugins/main/plugins.json` func (d *pluginRegistry) FetchMetadata(ctx context.Context) ([]*plugin.Metadata, error) { - response, err := fetchURL(ctx, source) if err != nil { return nil, err @@ -28,7 +28,8 @@ func (d *pluginRegistry) FetchMetadata(ctx context.Context) ([]*plugin.Metadata, var result []*plugin.Metadata err = json.NewDecoder(response).Decode(&result) if err != nil { - return nil, err + log.Errorf("plugin_registry: error: %s", err) + return nil, gateway.ErrFailedToFetchDataFromPluginRegistry } return result, nil } diff --git a/internal/infrastructure/github/plugin_registry_test.go b/internal/infrastructure/github/plugin_registry_test.go index 7da6ef6be..8f763632b 100644 --- a/internal/infrastructure/github/plugin_registry_test.go +++ b/internal/infrastructure/github/plugin_registry_test.go @@ -2,11 +2,11 @@ package github import ( "context" - "errors" "testing" "time" "github.com/jarcoal/httpmock" + "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/stretchr/testify/assert" ) @@ -19,8 +19,16 @@ func TestNewPluginRegistry(t *testing.T) { func TestPluginRegistry_FetchMetadata(t *testing.T) { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterResponder("GET", "https://raw.githubusercontent.com/reearth/plugins/main/plugins.json", - httpmock.NewStringResponder(200, `[{"name": "reearth","description": "Official Plugin", "author": "reearth", "thumbnailUrl": "", "createdAt": "2021-03-16T04:19:57.592Z"}]`)) + + httpmock.RegisterResponder( + "GET", + "https://raw.githubusercontent.com/reearth/plugins/main/plugins.json", + httpmock.NewStringResponder( + 200, + `[{"name": "reearth","description": "Official Plugin", "author": "reearth", "thumbnailUrl": "", "createdAt": "2021-03-16T04:19:57.592Z"}]`, + ), + ) + d := NewPluginRegistry() res, err := d.FetchMetadata(context.Background()) tm, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") @@ -40,12 +48,13 @@ func TestPluginRegistry_FetchMetadata(t *testing.T) { httpmock.RegisterResponder("GET", "https://raw.githubusercontent.com/reearth/plugins/main/plugins.json", httpmock.NewStringResponder(400, `mock bad request`)) _, err = d.FetchMetadata(context.Background()) - assert.True(t, errors.As(errors.New("StatusCode=400"), &err)) + + assert.EqualError(t, err, "StatusCode=400") // fail: unable to marshal httpmock.RegisterResponder("GET", "https://raw.githubusercontent.com/reearth/plugins/main/plugins.json", httpmock.NewStringResponder(200, `{"hoge": "test"}`)) _, err = d.FetchMetadata(context.Background()) - assert.True(t, errors.As(errors.New("cannot unmarshal object into Go value of type []*plugin.Metadata"), &err)) + assert.Equal(t, gateway.ErrFailedToFetchDataFromPluginRegistry, err) } diff --git a/internal/infrastructure/google/fetch_test.go b/internal/infrastructure/google/fetch_test.go index b7e8d2c44..21e9b89be 100644 --- a/internal/infrastructure/google/fetch_test.go +++ b/internal/infrastructure/google/fetch_test.go @@ -22,6 +22,7 @@ func Test_fetchCSV(t *testing.T) { fileId string sheetName string } + tests := []struct { name string setup func() @@ -69,6 +70,7 @@ func Test_fetchCSV(t *testing.T) { wantErr: false, }, } + for _, tt := range tests { tt := tt diff --git a/internal/infrastructure/mongo/mongodoc/scene.go b/internal/infrastructure/mongo/mongodoc/scene.go index 016815808..17b8c2f19 100644 --- a/internal/infrastructure/mongo/mongodoc/scene.go +++ b/internal/infrastructure/mongo/mongodoc/scene.go @@ -90,8 +90,8 @@ func (c *SceneIDConsumer) Consume(raw bson.Raw) error { } func NewScene(scene *scene.Scene) (*SceneDocument, string) { - widgets := scene.WidgetSystem().Widgets() - plugins := scene.PluginSystem().Plugins() + widgets := scene.Widgets().Widgets() + plugins := scene.Plugins().Plugins() clusters := scene.Clusters().Clusters() widgetsDoc := make([]SceneWidgetDocument, 0, len(widgets)) @@ -223,9 +223,9 @@ func (d *SceneDocument) Model() (*scene.Scene, error) { Team(tid). RootLayer(lid). Clusters(cl). - WidgetSystem(scene.NewWidgetSystem(ws)). + Widgets(scene.NewWidgets(ws)). WidgetAlignSystem(d.AlignSystem.Model()). - PluginSystem(scene.NewPluginSystem(ps)). + Plugins(scene.NewPlugins(ps)). UpdatedAt(d.UpdateAt). Property(prid). Build() diff --git a/internal/infrastructure/mongo/mongodoc/tag_test.go b/internal/infrastructure/mongo/mongodoc/tag_test.go index b4d1869e0..b4cd65614 100644 --- a/internal/infrastructure/mongo/mongodoc/tag_test.go +++ b/internal/infrastructure/mongo/mongodoc/tag_test.go @@ -31,6 +31,7 @@ func TestNewTag(t *testing.T) { type args struct { t tag.Tag } + tests := []struct { name string args args @@ -70,13 +71,14 @@ func TestNewTag(t *testing.T) { want1: ti.ID().String(), }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() got, got1 := NewTag(tc.args.t) - assert.Equal(tt, tc.want1, got1) - assert.Equal(tt, tc.want, got) + assert.Equal(t, tc.want1, got1) + assert.Equal(t, tc.want, got) }) } } @@ -98,6 +100,7 @@ func TestNewTags(t *testing.T) { type args struct { tags []*tag.Tag } + tests := []struct { name string args args @@ -123,13 +126,14 @@ func TestNewTags(t *testing.T) { want1: []string{tgi.ID().String()}, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() got, got1 := NewTags(tc.args.tags) - assert.Equal(tt, tc.want, got) - assert.Equal(tt, tc.want1, got1) + assert.Equal(t, tc.want, got) + assert.Equal(t, tc.want1, got1) }) } } @@ -158,6 +162,7 @@ func TestFuncConsumer_Consume(t *testing.T) { type args struct { raw bson.Raw } + tests := []struct { name string fields fields @@ -213,10 +218,11 @@ func TestFuncConsumer_Consume(t *testing.T) { wantErr: true, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() c := &TagConsumer{ Rows: tc.fields.Rows, GroupRows: tc.fields.GroupRows, @@ -224,7 +230,7 @@ func TestFuncConsumer_Consume(t *testing.T) { } if err := c.Consume(tc.args.raw); tc.wantErr { - assert.Error(tt, err) + assert.Error(t, err) } }) } @@ -256,6 +262,7 @@ func TestTagDocument_Model(t *testing.T) { Item *TagItemDocument Group *TagGroupDocument } + tests := []struct { name string fields fields @@ -301,10 +308,11 @@ func TestTagDocument_Model(t *testing.T) { wantErr: true, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() d := &TagDocument{ ID: tc.fields.ID, Label: tc.fields.Label, @@ -314,10 +322,10 @@ func TestTagDocument_Model(t *testing.T) { } got, got1, err := d.Model() if tc.wantErr { - assert.Error(tt, err) + assert.Error(t, err) } else { - assert.Equal(tt, tc.want, got) - assert.Equal(tt, tc.want1, got1) + assert.Equal(t, tc.want, got) + assert.Equal(t, tc.want1, got1) } }) } @@ -343,6 +351,7 @@ func TestTagDocument_ModelGroup(t *testing.T) { Item *TagItemDocument Group *TagGroupDocument } + tests := []struct { name string fields fields @@ -392,10 +401,11 @@ func TestTagDocument_ModelGroup(t *testing.T) { wantErr: false, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() d := &TagDocument{ ID: tc.fields.ID, Label: tc.fields.Label, @@ -405,9 +415,9 @@ func TestTagDocument_ModelGroup(t *testing.T) { } got, err := d.ModelGroup() if tc.wantErr { - assert.Error(tt, err) + assert.Error(t, err) } - assert.Equal(tt, tc.want, got) + assert.Equal(t, tc.want, got) }) } } @@ -432,6 +442,7 @@ func TestTagDocument_ModelItem(t *testing.T) { Item *TagItemDocument Group *TagGroupDocument } + tests := []struct { name string fields fields @@ -474,10 +485,11 @@ func TestTagDocument_ModelItem(t *testing.T) { wantErr: false, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() d := &TagDocument{ ID: tc.fields.ID, Label: tc.fields.Label, @@ -487,9 +499,9 @@ func TestTagDocument_ModelItem(t *testing.T) { } got, err := d.ModelItem() if tc.wantErr { - assert.Error(tt, err) + assert.Error(t, err) } - assert.Equal(tt, tc.want, got) + assert.Equal(t, tc.want, got) }) } } diff --git a/internal/usecase/gateway/plugin_registry.go b/internal/usecase/gateway/plugin_registry.go index 7c1ef7cf0..a4e81e834 100644 --- a/internal/usecase/gateway/plugin_registry.go +++ b/internal/usecase/gateway/plugin_registry.go @@ -2,10 +2,13 @@ package gateway import ( "context" + "errors" "github.com/reearth/reearth-backend/pkg/plugin" ) +var ErrFailedToFetchDataFromPluginRegistry = errors.New("failed to fetch data from the plugin registry") + type PluginRegistry interface { FetchMetadata(ctx context.Context) ([]*plugin.Metadata, error) } diff --git a/internal/usecase/interactor/plugin_delete.go b/internal/usecase/interactor/plugin_delete.go index 4d2558e00..33177b212 100644 --- a/internal/usecase/interactor/plugin_delete.go +++ b/internal/usecase/interactor/plugin_delete.go @@ -37,7 +37,7 @@ func (i *Plugin) Delete(ctx context.Context, pid id.PluginID, operator *usecase. return interfaces.ErrOperationDenied } - if s.PluginSystem().HasPlugin(p.ID()) { + if s.Plugins().HasPlugin(p.ID()) { return interfaces.ErrCannotDeleteUsedPlugin } diff --git a/internal/usecase/interactor/plugin_upload.go b/internal/usecase/interactor/plugin_upload.go index b934c751c..f1d3b101b 100644 --- a/internal/usecase/interactor/plugin_upload.go +++ b/internal/usecase/interactor/plugin_upload.go @@ -173,7 +173,7 @@ func (i *Plugin) installPlugin(ctx context.Context, p *pluginpack.Package, s *sc return err } } - s.PluginSystem().Add(scene.NewPlugin(p.Manifest.Plugin.ID(), ppid)) + s.Plugins().Add(scene.NewPlugin(p.Manifest.Plugin.ID(), ppid)) if pp != nil { if err := i.propertyRepo.Save(ctx, pp); err != nil { diff --git a/internal/usecase/interactor/property.go b/internal/usecase/interactor/property.go index ad4809085..a33da91d8 100644 --- a/internal/usecase/interactor/property.go +++ b/internal/usecase/interactor/property.go @@ -404,7 +404,7 @@ func (i *Property) AddItem(ctx context.Context, inp interfaces.AddPropertyItemPa // Set nameFieldValue to the name field if inp.NameFieldValue != nil { - _ = item.UpdateNameFieldValue(ps, inp.NameFieldValue) + item.RepresentativeField(ps).UpdateUnsafe(inp.NameFieldValue) } err = i.propertyRepo.Save(ctx, p) @@ -524,7 +524,7 @@ func (i *Property) UpdateItems(ctx context.Context, inp interfaces.UpdatePropert if op.Operation == interfaces.ListOperationAdd { g, _ := p.AddListItem(ps, inp.Pointer, op.Index) if op.NameFieldValue != nil { - _ = g.UpdateNameFieldValue(ps, op.NameFieldValue) + g.RepresentativeField(ps).UpdateUnsafe(op.NameFieldValue) } } else if op.Operation == interfaces.ListOperationMove && ptr != nil && op.Index != nil { _, _ = p.MoveListItem(ptr, *op.Index) diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 56b6e7db0..dfb23750a 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -99,7 +99,7 @@ func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase. return nil, err } - ps := scene.NewPluginSystem([]*scene.Plugin{ + ps := scene.NewPlugins([]*scene.Plugin{ scene.NewPlugin(id.OfficialPluginID, nil), }) @@ -119,7 +119,7 @@ func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase. Team(prj.Team()). Property(p.ID()). RootLayer(rootLayer.ID()). - PluginSystem(ps). + Plugins(ps). Build() if err != nil { @@ -216,7 +216,7 @@ func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, return nil, nil, err } - s.WidgetSystem().Add(widget) + s.Widgets().Add(widget) if !floating { var loc scene.WidgetLocation @@ -278,7 +278,7 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP return nil, nil, err } - widget := scene.WidgetSystem().Widget(param.WidgetID) + widget := scene.Widgets().Widget(param.WidgetID) if widget == nil { return nil, nil, rerror.ErrNotFound } @@ -406,7 +406,7 @@ func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, wid id.WidgetID return nil, err } - ws := scene.WidgetSystem() + ws := scene.Widgets() widget := ws.Widget(wid) if widget == nil { @@ -458,7 +458,7 @@ func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plugin return nil, pid, nil, err2 } - if s.PluginSystem().HasPlugin(pid) { + if s.Plugins().HasPlugin(pid) { return nil, pid, nil, interfaces.ErrPluginAlreadyInstalled } @@ -486,7 +486,7 @@ func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plugin propertyID = &prid } - s.PluginSystem().Add(scene.NewPlugin(pid, propertyID)) + s.Plugins().Add(scene.NewPlugin(pid, propertyID)) if p != nil { if err := i.propertyRepo.Save(ctx, p); err != nil { @@ -539,7 +539,7 @@ func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plug return nil, err } - ps := scene.PluginSystem() + ps := scene.Plugins() if !ps.Has(pid) { return nil, interfaces.ErrPluginNotInstalled } @@ -553,7 +553,7 @@ func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plug ps.Remove(pid) // remove widgets - removedProperties = append(removedProperties, scene.WidgetSystem().RemoveAllByPlugin(pid)...) + removedProperties = append(removedProperties, scene.Widgets().RemoveAllByPlugin(pid)...) // remove layers res, err := layerops.Processor{ diff --git a/pkg/asset/asset_test.go b/pkg/asset/asset_test.go index d685bd523..f0f6e1b28 100644 --- a/pkg/asset/asset_test.go +++ b/pkg/asset/asset_test.go @@ -11,7 +11,8 @@ func TestAsset(t *testing.T) { aid := NewID() tid := NewTeamID() d := createdAt(aid) - testCases := []struct { + + tests := []struct { Name string Expected struct { ID ID @@ -45,18 +46,19 @@ func TestAsset(t *testing.T) { Actual: New().ID(aid).CreatedAt(d).ContentType("test").Team(tid).Size(10).Name("xxx").URL("tt://xxx.xx").MustBuild(), }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected.ID, tc.Actual.ID()) - assert.Equal(tt, tc.Expected.CreatedAt, tc.Actual.CreatedAt()) - assert.Equal(tt, tc.Expected.Team, tc.Actual.Team()) - assert.Equal(tt, tc.Expected.Url, tc.Actual.URL()) - assert.Equal(tt, tc.Expected.Size, tc.Actual.Size()) - assert.Equal(tt, tc.Expected.Name, tc.Actual.Name()) - assert.Equal(tt, tc.Expected.ContentType, tc.Actual.ContentType()) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected.ID, tc.Actual.ID()) + assert.Equal(t, tc.Expected.CreatedAt, tc.Actual.CreatedAt()) + assert.Equal(t, tc.Expected.Team, tc.Actual.Team()) + assert.Equal(t, tc.Expected.Url, tc.Actual.URL()) + assert.Equal(t, tc.Expected.Size, tc.Actual.Size()) + assert.Equal(t, tc.Expected.Name, tc.Actual.Name()) + assert.Equal(t, tc.Expected.ContentType, tc.Actual.ContentType()) }) } } diff --git a/pkg/asset/builder_test.go b/pkg/asset/builder_test.go index ada472821..d0523dd0d 100644 --- a/pkg/asset/builder_test.go +++ b/pkg/asset/builder_test.go @@ -1,7 +1,6 @@ package asset import ( - "errors" "testing" "time" @@ -12,27 +11,35 @@ func TestBuilder_Build(t *testing.T) { aid := NewID() tid := NewTeamID() d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) - testCases := []struct { - Name, AssetName string - Id ID - CreatedAt time.Time - Team TeamID - Size int64 - Url string - ContentType string - ExpectedAssert *Asset - Err error + + type args struct { + id ID + name string + createdAt time.Time + team TeamID + size int64 + url string + contentType string + } + + tests := []struct { + name string + args args + expected *Asset + err error }{ { - Name: "Valid asset", - CreatedAt: d, - Id: aid, - Team: tid, - AssetName: "xxx", - Size: 10, - Url: "tt://xxx.zz", - ContentType: "bbb", - ExpectedAssert: &Asset{ + name: "Valid asset", + args: args{ + createdAt: d, + id: aid, + team: tid, + name: "xxx", + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + }, + expected: &Asset{ id: aid, createdAt: d, team: tid, @@ -41,70 +48,76 @@ func TestBuilder_Build(t *testing.T) { url: "tt://xxx.zz", contentType: "bbb", }, - Err: nil, }, { - Name: "failed empty size", - Id: NewID(), - CreatedAt: d, - Team: NewTeamID(), - Size: 0, - Url: "tt://xxx.zz", - ContentType: "bbb", - ExpectedAssert: nil, - Err: ErrEmptySize, + name: "failed empty size", + args: args{ + id: NewID(), + createdAt: d, + team: NewTeamID(), + size: 0, + url: "tt://xxx.zz", + contentType: "bbb", + }, + err: ErrEmptySize, }, { - Name: "failed empty url", - Id: NewID(), - CreatedAt: d, - Team: NewTeamID(), - Size: 10, - Url: "", - ContentType: "bbb", - ExpectedAssert: nil, - Err: ErrEmptyURL, + name: "failed empty url", + args: args{ + id: NewID(), + createdAt: d, + team: NewTeamID(), + size: 10, + url: "", + contentType: "bbb", + }, + err: ErrEmptyURL, }, { - Name: "failed empty team", - Id: NewID(), - CreatedAt: d, - Team: TeamID{}, - Size: 10, - Url: "tt://xxx.zz", - ContentType: "bbb", - ExpectedAssert: nil, - Err: ErrEmptyTeamID, + name: "failed empty team", + args: args{ + id: NewID(), + createdAt: d, + team: TeamID{}, + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + }, + err: ErrEmptyTeamID, }, { - Name: "failed invalid Id", - Id: ID{}, - CreatedAt: d, - Team: NewTeamID(), - Size: 10, - Url: "tt://xxx.zz", - ContentType: "bbb", - ExpectedAssert: nil, - Err: ErrEmptyTeamID, + name: "failed invalid Id", + args: args{ + id: ID{}, + createdAt: d, + team: NewTeamID(), + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + }, + err: ErrInvalidID, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - a, err := New(). - ID(tc.Id). - CreatedAt(tc.CreatedAt). - Name(tc.AssetName). - Size(tc.Size). - Team(tc.Team). - ContentType(tc.ContentType). - URL(tc.Url). + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res, err := New(). + ID(tt.args.id). + CreatedAt(tt.args.createdAt). + Name(tt.args.name). + Size(tt.args.size). + Team(tt.args.team). + ContentType(tt.args.contentType). + URL(tt.args.url). Build() - if err == nil { - assert.Equal(tt, tc.ExpectedAssert, a) + if tt.err == nil { + assert.Equal(t, tt.expected, res) + assert.Nil(t, err) } else { - assert.True(tt, errors.As(tc.Err, &err)) + assert.Nil(t, res) + assert.Equal(t, tt.err, err) } }) } @@ -114,27 +127,35 @@ func TestBuilder_MustBuild(t *testing.T) { aid := NewID() tid := NewTeamID() d := time.Date(1900, 1, 1, 00, 00, 0, 1, time.UTC) - testCases := []struct { - name, assetName string - createdAt time.Time - id ID - team TeamID - size int64 - url string - contentType string - expectedAssert *Asset - panic bool + + type args struct { + id ID + name string + createdAt time.Time + team TeamID + size int64 + url string + contentType string + } + + tests := []struct { + name string + args args + expected *Asset + err error }{ { - name: "Valid asset", - createdAt: d, - id: aid, - team: tid, - assetName: "xxx", - size: 10, - url: "tt://xxx.zz", - contentType: "bbb", - expectedAssert: &Asset{ + name: "Valid asset", + args: args{ + createdAt: d, + id: aid, + team: tid, + name: "xxx", + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + }, + expected: &Asset{ id: aid, createdAt: d, team: tid, @@ -143,88 +164,80 @@ func TestBuilder_MustBuild(t *testing.T) { url: "tt://xxx.zz", contentType: "bbb", }, - panic: false, }, { - name: "failed empty size", - createdAt: d, - id: NewID(), - team: NewTeamID(), - size: 0, - url: "tt://xxx.zz", - contentType: "bbb", - expectedAssert: nil, - panic: true, + name: "failed empty size", + args: args{ + createdAt: d, + id: NewID(), + team: NewTeamID(), + size: 0, + url: "tt://xxx.zz", + contentType: "bbb", + }, + err: ErrEmptySize, }, { - name: "failed empty url", - createdAt: d, - id: NewID(), - team: NewTeamID(), - size: 10, - url: "", - contentType: "bbb", - expectedAssert: nil, - panic: true, + name: "failed empty url", + args: args{ + createdAt: d, + id: NewID(), + team: NewTeamID(), + size: 10, + url: "", + contentType: "bbb", + }, + err: ErrEmptyURL, }, { - name: "failed empty team", - createdAt: d, - id: NewID(), - team: TeamID{}, - size: 10, - url: "tt://xxx.zz", - contentType: "bbb", - expectedAssert: nil, - panic: true, + name: "failed empty team", + args: args{ + createdAt: d, + id: NewID(), + team: TeamID{}, + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + }, + err: ErrEmptyTeamID, }, { - name: "failed invalid Id", - createdAt: d, - id: ID{}, - team: NewTeamID(), - size: 10, - url: "tt://xxx.zz", - contentType: "bbb", - expectedAssert: nil, - panic: true, + name: "failed invalid Id", + args: args{ + createdAt: d, + id: ID{}, + team: NewTeamID(), + size: 10, + url: "tt://xxx.zz", + contentType: "bbb", + }, + err: ErrInvalidID, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - var a *Asset - if tc.panic { - defer func() { - if r := recover(); r != nil { - assert.Nil(tt, a) - } - }() - a = New(). - ID(tc.id). - CreatedAt(tc.createdAt). - Name(tc.assetName). - Size(tc.size). - Team(tc.team). - ContentType(tc.contentType). - URL(tc.url). - MustBuild() - } else { - a = New(). - ID(tc.id). - CreatedAt(tc.createdAt). - Name(tc.assetName). - Size(tc.size). - Team(tc.team). - ContentType(tc.contentType). - URL(tc.url). - MustBuild() - assert.Equal(tt, tc.expectedAssert, a) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + build := func() *Asset { + t.Helper() + return New(). + ID(tt.args.id). + CreatedAt(tt.args.createdAt). + Name(tt.args.name). + Size(tt.args.size). + Team(tt.args.team). + ContentType(tt.args.contentType). + URL(tt.args.url). + MustBuild() } + if tt.err != nil { + assert.PanicsWithValue(t, tt.err, func() { _ = build() }) + } else { + assert.Equal(t, tt.expected, build()) + } }) } } diff --git a/pkg/builtin/main_test.go b/pkg/builtin/main_test.go index fa444d161..acd98c551 100644 --- a/pkg/builtin/main_test.go +++ b/pkg/builtin/main_test.go @@ -10,7 +10,7 @@ import ( ) func TestGetPropertySchemaByVisualizer(t *testing.T) { - testCases := []struct { + tests := []struct { name string visualizer visualizer.Visualizer expectedNil bool @@ -26,15 +26,16 @@ func TestGetPropertySchemaByVisualizer(t *testing.T) { expectedNil: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := GetPropertySchemaByVisualizer(tc.visualizer) if tc.expectedNil { - assert.Nil(tt, res) + assert.Nil(t, res) } else { - assert.NotNil(tt, res) + assert.NotNil(t, res) } }) } @@ -44,7 +45,7 @@ func TestPlugin(t *testing.T) { assert.NotNil(t, Plugin()) } func TestGetPlugin(t *testing.T) { - testCases := []struct { + tests := []struct { name string pluginID plugin.ID expectedNil bool @@ -60,22 +61,23 @@ func TestGetPlugin(t *testing.T) { expectedNil: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := GetPlugin(tc.pluginID) if tc.expectedNil { - assert.Nil(tt, res) + assert.Nil(t, res) } else { - assert.NotNil(tt, res) + assert.NotNil(t, res) } }) } } func TestGetPropertySchema(t *testing.T) { - testCases := []struct { + tests := []struct { name string psId property.SchemaID expectedNil bool @@ -91,15 +93,16 @@ func TestGetPropertySchema(t *testing.T) { expectedNil: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := GetPropertySchema(tc.psId) if tc.expectedNil { - assert.Nil(tt, res) + assert.Nil(t, res) } else { - assert.NotNil(tt, res) + assert.NotNil(t, res) } }) } diff --git a/pkg/dataset/value_optional_test.go b/pkg/dataset/value_optional_test.go index b96decc0e..2ad65aa26 100644 --- a/pkg/dataset/value_optional_test.go +++ b/pkg/dataset/value_optional_test.go @@ -12,6 +12,7 @@ func TestNewNilableValue(t *testing.T) { t ValueType v *Value } + tests := []struct { name string args args @@ -63,6 +64,7 @@ func TestOptionalValueFrom(t *testing.T) { type args struct { v *Value } + tests := []struct { name string args args @@ -211,6 +213,7 @@ func TestOptionalValue_SetValue(t *testing.T) { type args struct { v *Value } + tests := []struct { name string value *OptionalValue @@ -291,8 +294,11 @@ func TestOptionalValue_Clone(t *testing.T) { target: nil, }, } + for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() res := tt.target.Clone() assert.Equal(t, tt.target, res) if tt.target != nil { @@ -306,6 +312,7 @@ func TestOptionalValue_Cast(t *testing.T) { type args struct { t ValueType } + tests := []struct { name string target *OptionalValue @@ -345,7 +352,9 @@ func TestOptionalValue_Cast(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, tt.target.Cast(tt.args.t)) }) } diff --git a/pkg/file/file_test.go b/pkg/file/file_test.go index 5d71c9ab9..5970c13df 100644 --- a/pkg/file/file_test.go +++ b/pkg/file/file_test.go @@ -32,7 +32,7 @@ func TestReaders(t *testing.T) { "test/foo.bar": "test\n", } - testCases := []struct { + tests := []struct { Name string Archive Iterator Files []string @@ -49,10 +49,11 @@ func TestReaders(t *testing.T) { }, } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - // tt.Parallel() cannot be used - assert := assert.New(tt) + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + // t.Parallel() cannot be used + assert := assert.New(t) for i, f := range tc.Files { n, err := tc.Archive.Next() diff --git a/pkg/i18n/string_test.go b/pkg/i18n/string_test.go index ca154298d..88903ed70 100644 --- a/pkg/i18n/string_test.go +++ b/pkg/i18n/string_test.go @@ -8,7 +8,7 @@ import ( ) func TestString_String(t *testing.T) { - testCases := []struct { + tests := []struct { Name, ExpectedStr string I18nString String }{ @@ -23,17 +23,18 @@ func TestString_String(t *testing.T) { I18nString: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.ExpectedStr, tc.I18nString.String()) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.ExpectedStr, tc.I18nString.String()) }) } } func TestStringTranslated(t *testing.T) { - testCases := []struct { + tests := []struct { Name, Lang, ExpectedStr string I18nString String }{ @@ -56,11 +57,12 @@ func TestStringTranslated(t *testing.T) { I18nString: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.ExpectedStr, tc.I18nString.Translated(tc.Lang)) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.ExpectedStr, tc.I18nString.Translated(tc.Lang)) }) } } @@ -71,7 +73,7 @@ func TestStringFrom(t *testing.T) { } func TestStringCopy(t *testing.T) { - testCases := []struct { + tests := []struct { Name string SourceString String }{ @@ -88,13 +90,14 @@ func TestStringCopy(t *testing.T) { SourceString: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.True(tt, reflect.DeepEqual(tc.SourceString, tc.SourceString.Copy())) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.True(t, reflect.DeepEqual(tc.SourceString, tc.SourceString.Copy())) if tc.SourceString == nil { - assert.Nil(tt, tc.SourceString.Copy()) + assert.Nil(t, tc.SourceString.Copy()) } }) } @@ -105,7 +108,7 @@ func TestString_StringRef(t *testing.T) { return &s } - testCases := []struct { + tests := []struct { Name string I18nString String Expected *string @@ -121,11 +124,12 @@ func TestString_StringRef(t *testing.T) { Expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.I18nString.StringRef()) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.I18nString.StringRef()) }) } } diff --git a/pkg/id/asset_gen_test.go b/pkg/id/asset_gen_test.go index 8a8d24a98..11101dddc 100644 --- a/pkg/id/asset_gen_test.go +++ b/pkg/id/asset_gen_test.go @@ -61,6 +61,7 @@ func TestAssetIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestAssetIDsFrom(t *testing.T) { } func TestAssetIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/cluster_field_gen_test.go b/pkg/id/cluster_field_gen_test.go index 7fc5cd187..c8f383db1 100644 --- a/pkg/id/cluster_field_gen_test.go +++ b/pkg/id/cluster_field_gen_test.go @@ -61,6 +61,7 @@ func TestClusterIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestClusterIDsFrom(t *testing.T) { } func TestClusterIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/dataset_gen_test.go b/pkg/id/dataset_gen_test.go index 294b45268..87f2363a0 100644 --- a/pkg/id/dataset_gen_test.go +++ b/pkg/id/dataset_gen_test.go @@ -61,6 +61,7 @@ func TestDatasetIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestDatasetIDsFrom(t *testing.T) { } func TestDatasetIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/dataset_schema_field_gen_test.go b/pkg/id/dataset_schema_field_gen_test.go index 9f4f20752..9dbefbc1b 100644 --- a/pkg/id/dataset_schema_field_gen_test.go +++ b/pkg/id/dataset_schema_field_gen_test.go @@ -61,6 +61,7 @@ func TestDatasetSchemaFieldIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestDatasetSchemaFieldIDsFrom(t *testing.T) { } func TestDatasetSchemaFieldIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/dataset_schema_gen_test.go b/pkg/id/dataset_schema_gen_test.go index 257c78f01..48eab60a5 100644 --- a/pkg/id/dataset_schema_gen_test.go +++ b/pkg/id/dataset_schema_gen_test.go @@ -61,6 +61,7 @@ func TestDatasetSchemaIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestDatasetSchemaIDsFrom(t *testing.T) { } func TestDatasetSchemaIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/id_test.go b/pkg/id/id_test.go index 0b90dada5..d68e2146b 100644 --- a/pkg/id/id_test.go +++ b/pkg/id/id_test.go @@ -1,7 +1,6 @@ package id import ( - "errors" "strings" "testing" @@ -13,13 +12,12 @@ func TestID_New(t *testing.T) { id := New() assert.NotNil(t, id) ulID, err := ulid.Parse(id.String()) - assert.NotNil(t, ulID) assert.Nil(t, err) } func TestID_NewAllID(t *testing.T) { - testCases := []struct { + tests := []struct { name string input int expected int @@ -40,16 +38,17 @@ func TestID_NewAllID(t *testing.T) { expected: 5, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := NewAllID(tc.input) - assert.Equal(tt, tc.expected, len(result)) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := NewAllID(tt.input) + assert.Equal(t, tt.expected, len(result)) + for _, id := range result { assert.NotNil(t, id) ulID, err := ulid.Parse(id.String()) - assert.NotNil(t, ulID) assert.Nil(t, err) } @@ -58,7 +57,7 @@ func TestID_NewAllID(t *testing.T) { } func TestID_NewIDWith(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string }{ @@ -75,20 +74,21 @@ func TestID_NewIDWith(t *testing.T) { input: "01f2r7kg1fvvffp0gmexgy5hxy", }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := NewIDWith(tc.input) - exResult, exErr := FromID(tc.input) - assert.Equal(tt, exResult, result) - assert.Equal(tt, exErr, err) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := NewIDWith(tt.input) + exResult, exErr := FromID(tt.input) + assert.Equal(t, exResult, result) + assert.Equal(t, exErr, err) }) } } func TestID_FromID(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -130,21 +130,22 @@ func TestID_FromID(t *testing.T) { }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := FromID(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := FromID(tt.input) + assert.Equal(t, tt.expected.result, result) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.err, err) } }) } } func TestID_FromIDRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *ID @@ -165,21 +166,22 @@ func TestID_FromIDRef(t *testing.T) { expected: &ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := FromIDRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := FromIDRef(&tt.input) + assert.Equal(t, tt.expected, result) + if tt.expected != nil { + assert.Equal(t, tt.expected, result) } }) } } func TestID_MustBeID(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string shouldPanic bool @@ -202,64 +204,54 @@ func TestID_MustBeID(t *testing.T) { expected: ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + if tt.shouldPanic { + assert.Panics(t, func() { MustBeID(tt.input) }) return } - result := MustBeID(tc.input) - assert.Equal(tt, tc.expected, result) + result := MustBeID(tt.input) + assert.Equal(t, tt.expected, result) }) } } func TestID_Copy(t *testing.T) { id := New() - id2 := id.Copy() - assert.Equal(t, id.id, id2.id) - assert.NotSame(t, id.id, id2.id) } func TestID_Timestamp(t *testing.T) { id := New() - assert.Equal(t, ulid.Time(id.id.Time()), id.Timestamp()) } func TestID_String(t *testing.T) { id := MustBeID("01f2r7kg1fvvffp0gmexgy5hxy") - assert.Equal(t, id.String(), "01f2r7kg1fvvffp0gmexgy5hxy") } func TestID_GoString(t *testing.T) { id := MustBeID("01f2r7kg1fvvffp0gmexgy5hxy") - assert.Equal(t, id.GoString(), "id.ID(01f2r7kg1fvvffp0gmexgy5hxy)") } func TestID_IsNil(t *testing.T) { id := ID{} - assert.True(t, id.IsNil()) - id = New() - assert.False(t, id.IsNil()) - } func TestID_Compare(t *testing.T) { id1 := New() id2 := New() - assert.Less(t, id1.Compare(id2), 0) assert.Greater(t, id2.Compare(id1), 0) assert.Equal(t, id1.Compare(id1), 0) @@ -269,29 +261,24 @@ func TestID_Compare(t *testing.T) { func TestID_Equal(t *testing.T) { id1 := New() id2 := id1.Copy() - assert.True(t, id1.Equal(id2)) assert.False(t, id1.Equal(New())) } func TestID_IsEmpty(t *testing.T) { id := ID{} - assert.True(t, id.IsEmpty()) - id = New() - assert.False(t, id.IsEmpty()) } func TestID_generateID(t *testing.T) { id := generateID() - assert.NotNil(t, id) } func TestID_generateAllID(t *testing.T) { - testCases := []struct { + tests := []struct { name string input int expected int @@ -312,16 +299,16 @@ func TestID_generateAllID(t *testing.T) { expected: 5, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := generateAllID(tc.input) - assert.Equal(tt, tc.expected, len(result)) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := generateAllID(tt.input) + assert.Equal(t, tt.expected, len(result)) for _, id := range result { assert.NotNil(t, id) ulID, err := ulid.Parse(id.String()) - assert.NotNil(t, ulID) assert.Nil(t, err) } @@ -339,7 +326,7 @@ func TestID_parseID(t *testing.T) { } func TestID_includeUpperCase(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected bool @@ -360,12 +347,13 @@ func TestID_includeUpperCase(t *testing.T) { expected: true, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := includeUpperCase(tc.input) - assert.Equal(tt, tc.expected, result) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result := includeUpperCase(tt.input) + assert.Equal(t, tt.expected, result) }) } } diff --git a/pkg/id/infobox_field_gen_test.go b/pkg/id/infobox_field_gen_test.go index 3f2050c9b..1f6c86b25 100644 --- a/pkg/id/infobox_field_gen_test.go +++ b/pkg/id/infobox_field_gen_test.go @@ -61,6 +61,7 @@ func TestInfoboxFieldIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestInfoboxFieldIDsFrom(t *testing.T) { } func TestInfoboxFieldIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/layer_gen_test.go b/pkg/id/layer_gen_test.go index c68fd3fb3..a570cdf8d 100644 --- a/pkg/id/layer_gen_test.go +++ b/pkg/id/layer_gen_test.go @@ -61,6 +61,7 @@ func TestLayerIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestLayerIDsFrom(t *testing.T) { } func TestLayerIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/plugin_extension_test.go b/pkg/id/plugin_extension_test.go index 02f95aac0..dc504f873 100644 --- a/pkg/id/plugin_extension_test.go +++ b/pkg/id/plugin_extension_test.go @@ -7,10 +7,10 @@ import ( ) func TestPluginExtensionIDFromRef(t *testing.T) { - t.Parallel() input1 := "testStringId" expected1 := PluginExtensionID(input1) - testCases := []struct { + + tests := []struct { name string input *string expected *PluginExtensionID @@ -26,12 +26,13 @@ func TestPluginExtensionIDFromRef(t *testing.T) { expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() result := PluginExtensionIDFromRef(tc.input) - assert.Equal(tt, tc.expected, result) + assert.Equal(t, tc.expected, result) }) } } diff --git a/pkg/id/plugin_test.go b/pkg/id/plugin_test.go index 5df599d80..c2c785477 100644 --- a/pkg/id/plugin_test.go +++ b/pkg/id/plugin_test.go @@ -2,7 +2,6 @@ package id import ( "encoding" - "errors" "strings" "testing" @@ -14,8 +13,7 @@ var _ encoding.TextMarshaler = (*PluginID)(nil) var _ encoding.TextUnmarshaler = (*PluginID)(nil) func TestPluginIDValidator(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected bool @@ -66,18 +64,18 @@ func TestPluginIDValidator(t *testing.T) { expected: false, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, validatePluginName(tc.input)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, validatePluginName(tc.input)) }) } } func TestNewPluginID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string pluginName string version string @@ -141,23 +139,23 @@ func TestNewPluginID(t *testing.T) { expectedError: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() result, err := NewPluginID(tc.pluginName, tc.version, tc.scene) if tc.expectedError { - assert.Error(tt, err) + assert.Error(t, err) } else { - assert.Equal(tt, tc.expected, result) + assert.Equal(t, tc.expected, result) } }) } } func TestPluginIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected PluginID @@ -214,23 +212,23 @@ func TestPluginIDFrom(t *testing.T) { expectedError: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() result, err := PluginIDFrom(tc.input) if tc.expectedError { - assert.Error(tt, err) + assert.Error(t, err) } else { - assert.Equal(tt, tc.expected, result) + assert.Equal(t, tc.expected, result) } }) } } func TestMustPluginID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected PluginID @@ -266,25 +264,25 @@ func TestMustPluginID(t *testing.T) { expectedError: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() if tc.expectedError { - assert.Panics(tt, func() { + assert.Panics(t, func() { _ = MustPluginID(tc.input) }) } else { result := MustPluginID(tc.input) - assert.Equal(tt, tc.expected, result) + assert.Equal(t, tc.expected, result) } }) } } func TestPluginIDFromRef(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected *PluginID @@ -315,16 +313,17 @@ func TestPluginIDFromRef(t *testing.T) { input: "xxxx~ssss~1.0.0", }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() if tc.expected == nil { result := PluginIDFromRef(&tc.input) - assert.Nil(tt, result) + assert.Nil(t, result) } else { result := PluginIDFromRef(&tc.input) - assert.Equal(tt, *tc.expected, *result) + assert.Equal(t, *tc.expected, *result) } }) } @@ -375,8 +374,7 @@ func TestPluginID_System(t *testing.T) { } func TestPluginID_Validate(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input PluginID expected bool @@ -400,17 +398,17 @@ func TestPluginID_Validate(t *testing.T) { expected: false, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - assert.Equal(tt, tc.expected, tc.input.Validate()) + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.expected, tc.input.Validate()) }) } } func TestPluginID_String(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input PluginID expected string @@ -456,10 +454,11 @@ func TestPluginID_String(t *testing.T) { expected: "reearth", }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - assert.Equal(tt, tc.expected, tc.input.String()) + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.expected, tc.input.String()) }) } } @@ -495,8 +494,7 @@ func TestPluginID_StringRef(t *testing.T) { } func TestPluginID_Equal(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input1 PluginID input2 PluginID @@ -545,12 +543,13 @@ func TestPluginID_Equal(t *testing.T) { expected: false, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input1.Equal(tc.input2)) - assert.Equal(tt, tc.expected, tc.input2.Equal(tc.input1)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input1.Equal(tc.input2)) + assert.Equal(t, tc.expected, tc.input2.Equal(tc.input1)) }) } @@ -578,8 +577,7 @@ func TestPluginID_UnmarshalText(t *testing.T) { } func TestPluginIDsToStrings(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []PluginID expected []string @@ -609,47 +607,31 @@ func TestPluginIDsToStrings(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, PluginIDsToStrings(tc.input)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, PluginIDsToStrings(tc.input)) }) } - } func TestPluginIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string - expected struct { - res []PluginID - err error - } + expected []PluginID + err error }{ { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []PluginID - err error - }{ - res: make([]PluginID, 0), - err: nil, - }, + name: "Empty slice", + input: make([]string, 0), + expected: []PluginID{}, }, { - name: "1 element", - input: []string{"Test~1.0.0"}, - expected: struct { - res []PluginID - err error - }{ - res: []PluginID{MustPluginID("Test~1.0.0")}, - err: nil, - }, + name: "1 element", + input: []string{"Test~1.0.0"}, + expected: []PluginID{MustPluginID("Test~1.0.0")}, }, { name: "multiple elements", @@ -658,47 +640,36 @@ func TestPluginIDsFrom(t *testing.T) { "Test~1.0.1", "Test~1.0.2", }, - expected: struct { - res []PluginID - err error - }{ - res: []PluginID{ - MustPluginID("Test~1.0.0"), - MustPluginID("Test~1.0.1"), - MustPluginID("Test~1.0.2"), - }, - err: nil, + expected: []PluginID{ + MustPluginID("Test~1.0.0"), + MustPluginID("Test~1.0.1"), + MustPluginID("Test~1.0.2"), }, }, { - name: "multiple elements", + name: "invalid element", input: []string{ "Test~1.0.0", "Test~1.0.1", - "Test~1.0.2", - }, - expected: struct { - res []PluginID - err error - }{ - res: nil, - err: ErrInvalidID, + "Test", }, + expected: nil, + err: ErrInvalidID, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() - if tc.expected.err != nil { - _, err := PluginIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + res, err := PluginIDsFrom(tt.input) + if tt.err != nil { + assert.Nil(t, res) + assert.Equal(t, tt.err, err) } else { - res, err := PluginIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Equal(t, tt.expected, res) + assert.Nil(t, err) } }) } diff --git a/pkg/id/project_gen_test.go b/pkg/id/project_gen_test.go index 22869ed17..632c37215 100644 --- a/pkg/id/project_gen_test.go +++ b/pkg/id/project_gen_test.go @@ -61,6 +61,7 @@ func TestProjectIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestProjectIDsFrom(t *testing.T) { } func TestProjectIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/property_gen_test.go b/pkg/id/property_gen_test.go index 552b62f13..6b1034afd 100644 --- a/pkg/id/property_gen_test.go +++ b/pkg/id/property_gen_test.go @@ -61,6 +61,7 @@ func TestPropertyIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestPropertyIDsFrom(t *testing.T) { } func TestPropertyIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/property_item_gen_test.go b/pkg/id/property_item_gen_test.go index c60703a0c..1536c6738 100644 --- a/pkg/id/property_item_gen_test.go +++ b/pkg/id/property_item_gen_test.go @@ -61,6 +61,7 @@ func TestPropertyItemIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestPropertyItemIDsFrom(t *testing.T) { } func TestPropertyItemIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/property_schema_field_test.go b/pkg/id/property_schema_field_test.go index 0f9850777..ab3320f56 100644 --- a/pkg/id/property_schema_field_test.go +++ b/pkg/id/property_schema_field_test.go @@ -7,10 +7,10 @@ import ( ) func TestPropertySchemaFieldIDFrom(t *testing.T) { - t.Parallel() input1 := "testStringId" expected1 := PropertySchemaFieldID(input1) - testCases := []struct { + + tests := []struct { name string input *string expected *PropertySchemaFieldID @@ -26,12 +26,13 @@ func TestPropertySchemaFieldIDFrom(t *testing.T) { expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() result := PropertySchemaFieldIDFrom(tc.input) - assert.Equal(tt, tc.expected, result) + assert.Equal(t, tc.expected, result) }) } } diff --git a/pkg/id/property_schema_group_test.go b/pkg/id/property_schema_group_test.go index 394a825d6..44c488178 100644 --- a/pkg/id/property_schema_group_test.go +++ b/pkg/id/property_schema_group_test.go @@ -7,10 +7,10 @@ import ( ) func TestPropertySchemaGroupIDFrom(t *testing.T) { - t.Parallel() input1 := "testStringId" expected1 := PropertySchemaGroupID(input1) - testCases := []struct { + + tests := []struct { name string input *string expected *PropertySchemaGroupID @@ -26,12 +26,13 @@ func TestPropertySchemaGroupIDFrom(t *testing.T) { expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() result := PropertySchemaGroupIDFrom(tc.input) - assert.Equal(tt, tc.expected, result) + assert.Equal(t, tc.expected, result) }) } } diff --git a/pkg/id/property_schema_test.go b/pkg/id/property_schema_test.go index 918dc1646..4f35382b3 100644 --- a/pkg/id/property_schema_test.go +++ b/pkg/id/property_schema_test.go @@ -2,7 +2,6 @@ package id import ( "encoding" - "errors" "testing" "github.com/stretchr/testify/assert" @@ -12,8 +11,7 @@ var _ encoding.TextMarshaler = (*PropertySchemaID)(nil) var _ encoding.TextUnmarshaler = (*PropertySchemaID)(nil) func TestPropertySchemaIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -61,17 +59,17 @@ func TestPropertySchemaIDFrom(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := PropertySchemaIDFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(tt, tc.expected.result, result) - assert.True(tt, errors.As(tc.expected.err, &err)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + result, err := PropertySchemaIDFrom(tt.input) + if tt.expected.err != nil { + assert.Equal(t, tt.expected.result, result) + assert.Equal(t, tt.expected.err, err) } else { - assert.Equal(tt, tc.expected.result, result) - assert.Nil(tt, err) + assert.Equal(t, tt.expected.result, result) + assert.Nil(t, err) } }) } @@ -91,8 +89,7 @@ func TestPropertySchemaIDFromExtension(t *testing.T) { } func TestMustPropertySchemaID(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string expected struct { @@ -139,18 +136,18 @@ func TestMustPropertySchemaID(t *testing.T) { }{result: PropertySchemaID{}, err: ErrInvalidID}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - if tc.expected.err != nil { - assert.Panics(tt, func() { - _ = MustPropertySchemaID(tc.input) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.expected.err != nil { + assert.Panics(t, func() { + _ = MustPropertySchemaID(tt.input) }) } else { - result := MustPropertySchemaID(tc.input) - assert.Equal(tt, tc.expected.result, result) + result := MustPropertySchemaID(tt.input) + assert.Equal(t, tt.expected.result, result) } }) } @@ -169,7 +166,7 @@ func TestMustPropertySchemaIDFromExtension(t *testing.T) { } func TestPropertySchemaIDFromRef(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *PropertySchemaID @@ -204,13 +201,13 @@ func TestPropertySchemaIDFromRef(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() - result := PropertySchemaIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) + result := PropertySchemaIDFromRef(&tt.input) + assert.Equal(t, tt.expected, result) }) } } @@ -304,8 +301,7 @@ func TestPropertySchemaID_UnmarshalText(t *testing.T) { } func TestPropertySchemaIDsToStrings(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []PropertySchemaID expected []string @@ -335,47 +331,32 @@ func TestPropertySchemaIDsToStrings(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, PropertySchemaIDsToStrings(tc.input)) + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expected, PropertySchemaIDsToStrings(tt.input)) }) } } func TestPropertySchemaIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []string - expected struct { - res []PropertySchemaID - err error - } + expected []PropertySchemaID + err error }{ { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []PropertySchemaID - err error - }{ - res: make([]PropertySchemaID, 0), - err: nil, - }, + name: "Empty slice", + input: make([]string, 0), + expected: []PropertySchemaID{}, }, { - name: "1 element", - input: []string{"Test~1.0.0/test"}, - expected: struct { - res []PropertySchemaID - err error - }{ - res: []PropertySchemaID{MustPropertySchemaID("Test~1.0.0/test")}, - err: nil, - }, + name: "1 element", + input: []string{"Test~1.0.0/test"}, + expected: []PropertySchemaID{MustPropertySchemaID("Test~1.0.0/test")}, }, { name: "multiple elements", @@ -384,47 +365,35 @@ func TestPropertySchemaIDsFrom(t *testing.T) { "Test~1.0.1/test", "Test~1.0.2/test", }, - expected: struct { - res []PropertySchemaID - err error - }{ - res: []PropertySchemaID{ - MustPropertySchemaID("Test~1.0.0/test"), - MustPropertySchemaID("Test~1.0.1/test"), - MustPropertySchemaID("Test~1.0.2/test"), - }, - err: nil, + expected: []PropertySchemaID{ + MustPropertySchemaID("Test~1.0.0/test"), + MustPropertySchemaID("Test~1.0.1/test"), + MustPropertySchemaID("Test~1.0.2/test"), }, }, { - name: "multiple elements", + name: "invalid elements", input: []string{ "Test~1.0.0/test", "Test~1.0.1/test", - "Test~1.0.2/test", - }, - expected: struct { - res []PropertySchemaID - err error - }{ - res: nil, - err: ErrInvalidID, + "Test~1.0.2", }, + err: ErrInvalidID, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() - if tc.expected.err != nil { - _, err := PropertySchemaIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) + res, err := PropertySchemaIDsFrom(tt.input) + if tt.err != nil { + assert.Nil(t, res) + assert.Equal(t, tt.err, err) } else { - res, err := PropertySchemaIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) + assert.Equal(t, tt.expected, res) + assert.Nil(t, err) } }) } diff --git a/pkg/id/scene_gen_test.go b/pkg/id/scene_gen_test.go index 0bf3cffa7..5a8e902c2 100644 --- a/pkg/id/scene_gen_test.go +++ b/pkg/id/scene_gen_test.go @@ -61,6 +61,7 @@ func TestSceneIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestSceneIDsFrom(t *testing.T) { } func TestSceneIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/tag_gen_test.go b/pkg/id/tag_gen_test.go index bef35da44..f2181eee0 100644 --- a/pkg/id/tag_gen_test.go +++ b/pkg/id/tag_gen_test.go @@ -61,6 +61,7 @@ func TestTagIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestTagIDsFrom(t *testing.T) { } func TestTagIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/team_gen_test.go b/pkg/id/team_gen_test.go index db29cf9b6..660d521b3 100644 --- a/pkg/id/team_gen_test.go +++ b/pkg/id/team_gen_test.go @@ -61,6 +61,7 @@ func TestTeamIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestTeamIDsFrom(t *testing.T) { } func TestTeamIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/user_gen_test.go b/pkg/id/user_gen_test.go index 0dae4566d..a4d3a212f 100644 --- a/pkg/id/user_gen_test.go +++ b/pkg/id/user_gen_test.go @@ -61,6 +61,7 @@ func TestUserIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestUserIDsFrom(t *testing.T) { } func TestUserIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/id/widget_gen_test.go b/pkg/id/widget_gen_test.go index 9dfa8d42a..b49c56fc4 100644 --- a/pkg/id/widget_gen_test.go +++ b/pkg/id/widget_gen_test.go @@ -61,6 +61,7 @@ func TestWidgetIDFrom(t *testing.T) { }, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { @@ -400,7 +401,6 @@ func TestWidgetIDsFrom(t *testing.T) { } func TestWidgetIDsFromID(t *testing.T) { - t.Parallel() tests := []struct { name string input []ID diff --git a/pkg/layer/decoding/common_test.go b/pkg/layer/decoding/common_test.go index 0df6b097c..9ea357238 100644 --- a/pkg/layer/decoding/common_test.go +++ b/pkg/layer/decoding/common_test.go @@ -1,58 +1,83 @@ package decoding import ( - "errors" "testing" "github.com/stretchr/testify/assert" ) func TestRgbafToHex(t *testing.T) { - testCases := []struct { + tests := []struct { name string rgba []float64 expected string err error }{ - {name: "orange", rgba: []float64{1, 0.6471, 0, 1}, expected: "ffa500ff", err: nil}, - {name: "RGBA length error", rgba: []float64{1, 0.6471, 0, 1, 1}, expected: "", err: ErrBadColor}, - {name: "RGBA greater than 1 error", rgba: []float64{1, 1.6471, 0, 1, 1}, expected: "", err: ErrBadColor}, + { + name: "orange", + rgba: []float64{1, 0.6471, 0, 1}, + expected: "ffa500ff", + err: nil}, + { + name: "RGBA length error", + rgba: []float64{1, 0.6471, 0, 1, 1}, + expected: "", + err: ErrBadColor}, + { + name: "RGBA greater than 1 error", + rgba: []float64{1, 1.6471, 0, 1, 1}, + expected: "", + err: ErrBadColor}, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - res, err := rgbafToHex(tc.rgba) - if tc.err == nil { - assert.NoError(tt, err) - assert.Equal(tt, tc.expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res, err := rgbafToHex(tt.rgba) + if tt.err == nil { + assert.NoError(t, err) + assert.Equal(t, tt.expected, res) } else { - assert.True(tt, errors.As(err, &tc.err)) + assert.Equal(t, tt.err, err) } }) } } func TestRgbaToHex(t *testing.T) { - testCases := []struct { + tests := []struct { name string rgba []int64 expected string err error }{ - {name: "orange", rgba: []int64{255, 165, 0, 255}, expected: "ffa500ff", err: nil}, - {name: "RGBA length error", rgba: []int64{255, 165, 0}, expected: "", err: ErrBadColor}, - {name: "RGBA bad boundaries ", rgba: []int64{400, 165, 0, 1}, expected: "", err: ErrBadColor}, + { + name: "orange", + rgba: []int64{255, 165, 0, 255}, + expected: "ffa500ff", + err: nil}, + { + name: "RGBA length error", + rgba: []int64{255, 165, 0}, + expected: "", + err: ErrBadColor}, + { + name: "RGBA bad boundaries ", + rgba: []int64{400, 165, 0, 1}, + expected: "", + err: ErrBadColor}, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - res, err := rgbaToHex(tc.rgba) - if err == nil { - assert.Equal(tt, tc.expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res, err := rgbaToHex(tt.rgba) + if tt.err == nil { + assert.Equal(t, tt.expected, res) } else { - assert.True(tt, errors.As(err, &tc.err)) + assert.Equal(t, tt.err, err) } }) } diff --git a/pkg/layer/decoding/kml_test.go b/pkg/layer/decoding/kml_test.go index 781dd0d49..aec6d267c 100644 --- a/pkg/layer/decoding/kml_test.go +++ b/pkg/layer/decoding/kml_test.go @@ -163,7 +163,7 @@ func TestKMLDecoder_Decode(t *testing.T) { } //func TestKMLCoordinatesToLatLng(t *testing.T) { -// testCases := []struct { +// tests := []struct { // name, cords string // expectedLatLng *property.LatLng // expectedHeight float64 @@ -197,23 +197,23 @@ func TestKMLDecoder_Decode(t *testing.T) { // err: strconv.ErrSyntax, // }, // } -// for _, tc := range testCases { -// tc := tc -// t.Run(tc.name, func(tt *testing.T) { -// tt.Parallel() -// ll, h, err := coordinatesToLatLngHeight(tc.cords) -// if tc.err == nil { -// assert.True(tt, reflect.DeepEqual(ll, tc.expectedLatLng)) -// assert.Equal(tt, tc.expectedHeight, h) +// for _, tt := range tests { +// tt := tt +// t.Run(tt.name, func(t *testing.T) { +// t.Parallel() +// ll, h, err := coordinatesToLatLngHeight(tt.cords) +// if tt.err == nil { +// assert.True(t, reflect.DeepEqual(ll, tt.expectedLatLng)) +// assert.Equal(t, tt.expectedHeight, h) // } else { -// assert.True(tt, errors.As(err, &tc.err)) +// assert.Equal(t, tt.err, err) // } // }) // } //} // //func TestKMLCoordinatesToLatLngList(t *testing.T) { -// testCases := []struct { +// tests := []struct { // name, cords string // expected []property.LatLngHeight // err error @@ -254,15 +254,15 @@ func TestKMLDecoder_Decode(t *testing.T) { // err: strconv.ErrSyntax, // }, // } -// for _, tc := range testCases { -// tc := tc -// t.Run(tc.name, func(tt *testing.T) { -// tt.Parallel() -// res, err := coordinatesToLatLngHeightList(tc.cords) -// if tc.err == nil { -// assert.True(tt, reflect.DeepEqual(res, tc.expected)) +// for _, tt := range tests { +// tt := tt +// t.Run(tt.name, func(t *testing.T) { +// t.Parallel() +// res, err := coordinatesToLatLngHeightList(tt.cords) +// if tt.err == nil { +// assert.True(t, reflect.DeepEqual(res, tt.expected)) // } else { -// assert.True(tt, errors.As(err, &tc.err)) +// assert.Equal(t, tt.err, err) // } // }) // } @@ -303,7 +303,7 @@ func TestKMLDecoder_Decode(t *testing.T) { // }, // } // expected := [][]property.LatLngHeight{cl1, cl2} -// testCases := []struct { +// tests := []struct { // name string // polygon *kml.Polygon // expected [][]property.LatLngHeight @@ -380,15 +380,15 @@ func TestKMLDecoder_Decode(t *testing.T) { // err: strconv.ErrSyntax, // }, // } -// for _, tc := range testCases { -// tc := tc -// t.Run(tc.name, func(tt *testing.T) { -// tt.Parallel() -// res, err := getPolygon(tc.polygon) -// if tc.err == nil { -// assert.True(tt, reflect.DeepEqual(res, tc.expected)) +// for _, tt := range tests { +// tt := tt +// t.Run(tt.name, func(t *testing.T) { +// t.Parallel() +// res, err := getPolygon(tt.polygon) +// if tt.err == nil { +// assert.True(t, reflect.DeepEqual(res, tt.expected)) // } else { -// assert.True(tt, errors.As(err, &tc.err)) +// assert.Equal(t, tt.err, err) // } // }) // } @@ -397,7 +397,7 @@ func TestKMLDecoder_Decode(t *testing.T) { func TestKMLparseKML(t *testing.T) { s := layer.NewSceneID() - testCases := []struct { + tests := []struct { name, KMLstr string expected interface{} err error @@ -468,20 +468,17 @@ func TestKMLparseKML(t *testing.T) { err: errors.New("XML syntax error on line 5: element closed by "), }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(tc.KMLstr)), s) - for { - res, err := d.parseKML() - if res != nil { - assert.Equal(tt, tc.expected, res) - break - } else { - assert.Equal(tt, tc.err.Error(), err.Error()) - break - } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(tt.KMLstr)), s) + res, err := d.parseKML() + if tt.expected != nil { + assert.Equal(t, tt.expected, res) + } else { + assert.Equal(t, tt.err.Error(), err.Error()) } }) } @@ -526,7 +523,8 @@ func TestKMLdecodePlacemark(t *testing.T) { pointExt := extensions["Point"] polylineExt := extensions["Polyline"] polygonExt := extensions["Polygon"] - testCases := []struct { + + tests := []struct { name, pt string placemark kml.Placemark expectedLayer *layer.Item @@ -634,22 +632,23 @@ func TestKMLdecodePlacemark(t *testing.T) { err: nil, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(``)), s) - l, p, err := d.decodePlacemark(tc.placemark) - if err == nil { - assert.NotNil(tt, l) - assert.NotNil(tt, p) - assert.Equal(tt, l.Name(), tc.expectedLayer.Name()) - ps := builtin.GetPropertySchema(propertySchemas[tc.pt]) - fa, _, _, _ := p.GetOrCreateField(ps, property.PointFieldBySchemaGroup(propertyItems, propertyFields[tc.pt])) - fe, _, _, _ := tc.expectedProperty.GetOrCreateField(ps, property.PointFieldBySchemaGroup(propertyItems, propertyFields[tc.pt])) - assert.Equal(tt, fe.Value(), fa.Value()) + l, p, err := d.decodePlacemark(tt.placemark) + if tt.err == nil { + assert.NotNil(t, l) + assert.NotNil(t, p) + assert.Equal(t, l.Name(), tt.expectedLayer.Name()) + ps := builtin.GetPropertySchema(propertySchemas[tt.pt]) + fa, _, _, _ := p.GetOrCreateField(ps, property.PointFieldBySchemaGroup(propertyItems, propertyFields[tt.pt])) + fe, _, _, _ := tt.expectedProperty.GetOrCreateField(ps, property.PointFieldBySchemaGroup(propertyItems, propertyFields[tt.pt])) + assert.Equal(t, fe.Value(), fa.Value()) } else { - assert.True(tt, errors.As(err, &tc.err)) + assert.Equal(t, tt.err, err) } }) } @@ -675,7 +674,7 @@ func TestKMLdecodePlacemark(t *testing.T) { // Plugin(&layer.OfficialPluginID). // MustBuild() // var ll layer.Layer = li -// testCases := []struct { +// tests := []struct { // name string // collection *kml.Collection // expectedLayers []*layer.Layer @@ -716,35 +715,36 @@ func TestKMLdecodePlacemark(t *testing.T) { // err: nil, // }, // } -// for _, tc := range testCases { -// tc := tc -// t.Run(tc.name, func(tt *testing.T) { +// +// for _, tt := range tests { +// tt := tt +// t.Run(tt.name, func(t *testing.T) { // d := NewKMLDecoder(xml.NewDecoder(strings.NewReader(``)), s) -// _, lm, pm, _ := d.decodeCollection(*tc.collection, 0) -// //if err == nil { -// // if tc.expectedGroupLayer != nil { -// // assert.NotNil(tt, lg) -// // assert.Equal(tt, tc.expectedGroupLayer.Name(), lg.Name()) +// _, lm, pm, _ := d.decodeCollection(*tt.collection, 0) +// //if tt.err == nil { +// // if tt.expectedGroupLayer != nil { +// // assert.NotNil(t, lg) +// // assert.Equal(t, tt.expectedGroupLayer.Name(), lg.Name()) // // } -// // if tc.expectedLayers != nil { -// // assert.NotNil(tt, ll) -// // assert.True(tt, len(ll) == 1) -// // el := *tc.expectedLayers[0] +// // if tt.expectedLayers != nil { +// // assert.NotNil(t, ll) +// // assert.True(t, len(ll) == 1) +// // el := *tt.expectedLayers[0] // // al := *ll[0] -// // assert.Equal(tt, el.Name(), al.Name()) -// // assert.NotNil(tt, al.Property()) +// // assert.Equal(t, el.Name(), al.Name()) +// // assert.NotNil(t, al.Property()) // // } -// // if tc.expectedProperties != nil { -// // assert.NotNil(tt, pl) -// // assert.True(tt, len(pl) == 1) -// // ep := *tc.expectedProperties[0] +// // if tt.expectedProperties != nil { +// // assert.NotNil(t, pl) +// // assert.True(t, len(pl) == 1) +// // ep := *tt.expectedProperties[0] // // ap := pl.Keys()[0] // // fa, _, _, _ := ap.GetOrCreateField(builtin.GetPropertySchema(propertySchemas["Point"]), property.PointFieldBySchemaGroup(propertyItems, propertyFields["Point"])) // // fe, _, _, _ := ep.GetOrCreateField(builtin.GetPropertySchema(propertySchemas["Point"]), property.PointFieldBySchemaGroup(propertyItems, propertyFields["Point"])) -// // assert.Equal(tt, fe.Value(), fa.Value()) +// // assert.Equal(t, fe.Value(), fa.Value()) // // } // //} else { -// // assert.True(tt, errors.As(err, &tc.err)) +// // assert.Equal(t, tt.err, err) // //} // }) // } diff --git a/pkg/layer/encoding/czml_test.go b/pkg/layer/encoding/czml_test.go index 17edc3a70..35824157b 100644 --- a/pkg/layer/encoding/czml_test.go +++ b/pkg/layer/encoding/czml_test.go @@ -256,7 +256,9 @@ func TestCZMLEncoder_Encode(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() expected, _ := json.Marshal(tt.want) writer := bytes.Buffer{} assert.NoError(t, NewCZMLEncoder(&writer).Encode(tt.target)) diff --git a/pkg/layer/encoding/geojson_test.go b/pkg/layer/encoding/geojson_test.go index c7baffff4..dfabcfd19 100644 --- a/pkg/layer/encoding/geojson_test.go +++ b/pkg/layer/encoding/geojson_test.go @@ -214,7 +214,9 @@ func TestGeoJSONEncoder_Encode(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() expected, _ := tt.want().MarshalJSON() writer := bytes.Buffer{} assert.NoError(t, NewGeoJSONEncoder(&writer).Encode(tt.target)) diff --git a/pkg/layer/encoding/kml_test.go b/pkg/layer/encoding/kml_test.go index 61cbc87e4..a64c7f9cf 100644 --- a/pkg/layer/encoding/kml_test.go +++ b/pkg/layer/encoding/kml_test.go @@ -289,7 +289,9 @@ func TestKMLEncoder_Encode(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() we := bytes.Buffer{} _ = tt.want().WriteIndent(&we, "", " ") wa := bytes.Buffer{} diff --git a/pkg/layer/encoding/shp_test.go b/pkg/layer/encoding/shp_test.go index 68d4d265f..1f0437607 100644 --- a/pkg/layer/encoding/shp_test.go +++ b/pkg/layer/encoding/shp_test.go @@ -133,7 +133,10 @@ func TestEncodeSHP(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + // sequential test + tmpFile, err := os.CreateTemp(os.TempDir(), "*.shp") assert.NoError(t, err) en := NewSHPEncoder(tmpFile) diff --git a/pkg/layer/layerops/initializer_test.go b/pkg/layer/layerops/initializer_test.go index b38bd827b..f62399e67 100644 --- a/pkg/layer/layerops/initializer_test.go +++ b/pkg/layer/layerops/initializer_test.go @@ -1,7 +1,6 @@ package layerops import ( - "errors" "testing" "github.com/reearth/reearth-backend/pkg/i18n" @@ -34,7 +33,8 @@ func TestInitialize(t *testing.T) { Extensions(es). MustBuild() s := layer.NewSceneID() - testCases := []struct { + + tests := []struct { name string sceneID *layer.SceneID parentLayerID *layer.ID @@ -59,23 +59,24 @@ func TestInitialize(t *testing.T) { err: ErrExtensionTypeMustBePrimitive, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() layerItem, property, err := LayerItem{ - SceneID: *tc.sceneID, - ParentLayerID: *tc.parentLayerID, - Plugin: tc.plugin, - ExtensionID: tc.extID, - Name: tc.name, + SceneID: *tt.sceneID, + ParentLayerID: *tt.parentLayerID, + Plugin: tt.plugin, + ExtensionID: tt.extID, + Name: tt.name, }.Initialize() - if tc.err == nil { - assert.NoError(tt, err) - assert.NotNil(tt, layerItem) - assert.NotNil(tt, property) + if tt.err == nil { + assert.NoError(t, err) + assert.NotNil(t, layerItem) + assert.NotNil(t, property) } else { - assert.True(t, errors.As(err, &tc.err)) + assert.Equal(t, tt.err, err) } }) } diff --git a/pkg/layer/tag_test.go b/pkg/layer/tag_test.go index 3768a2f21..09942fcb6 100644 --- a/pkg/layer/tag_test.go +++ b/pkg/layer/tag_test.go @@ -14,6 +14,7 @@ func TestNewTagItem(t *testing.T) { type args struct { t TagID } + tests := []struct { name string args args @@ -45,6 +46,7 @@ func TestTagItemFrom(t *testing.T) { type args struct { t Tag } + tests := []struct { name string args args @@ -88,6 +90,7 @@ func TestTagItemFrom(t *testing.T) { func TestTagItem_ID(t *testing.T) { tag := NewTagID() + tests := []struct { name string target *TagItem @@ -116,6 +119,7 @@ func TestTagItem_ID(t *testing.T) { func TestTagItem_Clone(t *testing.T) { tag := NewTagID() + tests := []struct { name string target *TagItem @@ -145,6 +149,7 @@ func TestTagItem_Clone(t *testing.T) { func TestTagItem_CloneItem(t *testing.T) { tag := NewTagID() + tests := []struct { name string target *TagItem @@ -181,6 +186,7 @@ func TestNewTagGroup(t *testing.T) { t TagID children []*TagItem } + tests := []struct { name string args args @@ -224,6 +230,7 @@ func TestTagGroupFrom(t *testing.T) { type args struct { t Tag } + tests := []struct { name string args args @@ -267,6 +274,7 @@ func TestTagGroupFrom(t *testing.T) { func TestTagGroup_ID(t *testing.T) { tag := NewTagID() + tests := []struct { name string target *TagGroup @@ -335,6 +343,7 @@ func TestTagGroup_Find(t *testing.T) { type args struct { ti TagID } + tests := []struct { name string target *TagGroup @@ -379,6 +388,7 @@ func TestTagGroup_Add(t *testing.T) { type args struct { ti *TagItem } + tests := []struct { name string target *TagGroup @@ -432,6 +442,7 @@ func TestTagGroup_Delete(t *testing.T) { type args struct { ti TagID } + tests := []struct { name string target *TagGroup @@ -542,6 +553,7 @@ func TestNewTagList(t *testing.T) { type args struct { tags []Tag } + tests := []struct { name string args args @@ -615,6 +627,7 @@ func TestTagList_Add(t *testing.T) { type args struct { ti Tag } + tests := []struct { name string target *TagList @@ -707,6 +720,7 @@ func TestTagList_Delete(t *testing.T) { type args struct { ti TagID } + tests := []struct { name string target *TagList @@ -770,6 +784,7 @@ func TestTagList_Find(t *testing.T) { type args struct { ti TagID } + tests := []struct { name string target *TagList @@ -853,6 +868,7 @@ func TestTagList_FindGroup(t *testing.T) { type args struct { ti TagID } + tests := []struct { name string target *TagList @@ -906,6 +922,7 @@ func TestTagList_FindItem(t *testing.T) { type args struct { ti TagID } + tests := []struct { name string target *TagList diff --git a/pkg/plugin/builder_test.go b/pkg/plugin/builder_test.go index a773130fb..6252d27e4 100644 --- a/pkg/plugin/builder_test.go +++ b/pkg/plugin/builder_test.go @@ -1,7 +1,6 @@ package plugin import ( - "errors" "testing" "github.com/reearth/reearth-backend/pkg/i18n" @@ -33,7 +32,7 @@ func TestBuilder_Description(t *testing.T) { } func TestBuilder_Schema(t *testing.T) { - testCases := []struct { + tests := []struct { name string sid, expected *PropertySchemaID }{ @@ -48,12 +47,13 @@ func TestBuilder_Schema(t *testing.T) { expected: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - res := New().Schema(tc.sid).MustBuild() - assert.Equal(tt, tc.expected, res.Schema()) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := New().Schema(tt.sid).MustBuild() + assert.Equal(t, tt.expected, res.Schema()) }) } } @@ -75,27 +75,34 @@ func TestBuilder_RepositoryURL(t *testing.T) { } func TestBuilder_Build(t *testing.T) { - testCases := []struct { - name, author, repositoryURL string - id ID - pname, description i18n.String - ext []*Extension - schema *PropertySchemaID - expected *Plugin - err error // skip for now as error is always nil + type args struct { + id ID + author, repositoryURL string + pname, description i18n.String + ext []*Extension + schema *PropertySchemaID + } + + tests := []struct { + name string + args args + expected *Plugin + err error // skip for now as error is always nil }{ { - id: MustID("hoge~0.1.0"), - name: "success build new plugin", - author: "aaa", - repositoryURL: "uuu", - pname: i18n.StringFrom("nnn"), - description: i18n.StringFrom("ddd"), - ext: []*Extension{ - NewExtension().ID("xxx").MustBuild(), - NewExtension().ID("yyy").MustBuild(), + name: "success build new plugin", + args: args{ + id: MustID("hoge~0.1.0"), + author: "aaa", + repositoryURL: "uuu", + pname: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + ext: []*Extension{ + NewExtension().ID("xxx").MustBuild(), + NewExtension().ID("yyy").MustBuild(), + }, + schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), }, - schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), expected: &Plugin{ id: MustID("hoge~0.1.0"), name: i18n.StringFrom("nnn"), @@ -111,49 +118,58 @@ func TestBuilder_Build(t *testing.T) { }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() p, err := New(). - ID(tc.id). - Extensions(tc.ext). - RepositoryURL(tc.repositoryURL). - Description(tc.description). - Name(tc.pname). - Schema(tc.schema). - Author(tc.author). + ID(tt.args.id). + Extensions(tt.args.ext). + RepositoryURL(tt.args.repositoryURL). + Description(tt.args.description). + Name(tt.args.pname). + Schema(tt.args.schema). + Author(tt.args.author). Build() - if tc.err == nil { - assert.Equal(tt, tc.expected, p) + if tt.err == nil { + assert.Equal(t, tt.expected, p) } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(t, tt.err, err) } }) } } func TestBuilder_MustBuild(t *testing.T) { - testCases := []struct { - name, author, repositoryURL string - id ID - pname, description i18n.String - ext []*Extension - schema *PropertySchemaID - expected *Plugin + type args struct { + author, repositoryURL string + id ID + pname, description i18n.String + ext []*Extension + schema *PropertySchemaID + } + + tests := []struct { + name string + args args + expected *Plugin + err error // skip for now as error is always nil }{ { - id: MustID("hoge~0.1.0"), - name: "success build new plugin", - author: "aaa", - repositoryURL: "uuu", - pname: i18n.StringFrom("nnn"), - description: i18n.StringFrom("ddd"), - ext: []*Extension{ - NewExtension().ID("xxx").MustBuild(), - NewExtension().ID("yyy").MustBuild(), + name: "success build new plugin", + args: args{ + id: MustID("hoge~0.1.0"), + author: "aaa", + repositoryURL: "uuu", + pname: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + ext: []*Extension{ + NewExtension().ID("xxx").MustBuild(), + NewExtension().ID("yyy").MustBuild(), + }, + schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), }, - schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), expected: &Plugin{ id: MustID("hoge~0.1.0"), name: i18n.StringFrom("nnn"), @@ -169,21 +185,30 @@ func TestBuilder_MustBuild(t *testing.T) { }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - p := New(). - ID(tc.id). - Extensions(tc.ext). - RepositoryURL(tc.repositoryURL). - Description(tc.description). - Name(tc.pname). - Schema(tc.schema). - Author(tc.author). - MustBuild() - assert.Equal(tt, tc.expected, p) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + build := func() *Plugin { + t.Helper() + return New(). + ID(tt.args.id). + Extensions(tt.args.ext). + RepositoryURL(tt.args.repositoryURL). + Description(tt.args.description). + Name(tt.args.pname). + Schema(tt.args.schema). + Author(tt.args.author). + MustBuild() + } + + if tt.err != nil { + assert.PanicsWithValue(t, tt.err, func() { _ = build() }) + } else { + assert.Equal(t, tt.expected, build()) + } }) } } diff --git a/pkg/plugin/extension_builder_test.go b/pkg/plugin/extension_builder_test.go index d281ac021..03d7446d4 100644 --- a/pkg/plugin/extension_builder_test.go +++ b/pkg/plugin/extension_builder_test.go @@ -66,8 +66,8 @@ func TestExtensionBuilder_WidgetLayout(t *testing.T) { } func TestExtensionBuilder_Build(t *testing.T) { - testCases := []struct { - name, icon string + type args struct { + icon string id ExtensionID extensionType ExtensionType system bool @@ -76,26 +76,33 @@ func TestExtensionBuilder_Build(t *testing.T) { schema PropertySchemaID visualizer visualizer.Visualizer widgetLayout *WidgetLayout - expected *Extension - err error + } + + tests := []struct { + name string + args args + expected *Extension + err error }{ { - name: "success not system", - icon: "ttt", - id: "xxx", - extensionType: "ppp", - system: false, - ename: i18n.StringFrom("nnn"), - description: i18n.StringFrom("ddd"), - schema: MustPropertySchemaID("foo~1.1.1/hhh"), - visualizer: "vvv", - widgetLayout: NewWidgetLayout( - false, false, true, false, &WidgetLocation{ - Zone: WidgetZoneOuter, - Section: WidgetSectionLeft, - Area: WidgetAreaTop, - }, - ).Ref(), + name: "success not system", + args: args{ + icon: "ttt", + id: "xxx", + extensionType: "ppp", + system: false, + ename: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + schema: MustPropertySchemaID("foo~1.1.1/hhh"), + visualizer: "vvv", + widgetLayout: NewWidgetLayout( + false, false, true, false, &WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }, + ).Ref(), + }, expected: &Extension{ id: "xxx", extensionType: "ppp", @@ -112,17 +119,20 @@ func TestExtensionBuilder_Build(t *testing.T) { }, ).Ref(), }, - err: nil, }, { - name: "fail not system type visualizer", - extensionType: ExtensionTypeVisualizer, - err: ErrInvalidID, + name: "fail not system type visualizer", + args: args{ + extensionType: ExtensionTypeVisualizer, + }, + err: ErrInvalidID, }, { - name: "fail not system type infobox", - extensionType: ExtensionTypeInfobox, - err: ErrInvalidID, + name: "fail not system type infobox", + args: args{ + extensionType: ExtensionTypeInfobox, + }, + err: ErrInvalidID, }, { name: "fail nil id", @@ -130,33 +140,33 @@ func TestExtensionBuilder_Build(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() e, err := NewExtension(). - ID(tc.id). - Visualizer(tc.visualizer). - Schema(tc.schema). - System(tc.system). - Type(tc.extensionType). - Description(tc.description). - Name(tc.ename). - Icon(tc.icon). - WidgetLayout(tc.widgetLayout). + ID(tc.args.id). + Visualizer(tc.args.visualizer). + Schema(tc.args.schema). + System(tc.args.system). + Type(tc.args.extensionType). + Description(tc.args.description). + Name(tc.args.ename). + Icon(tc.args.icon). + WidgetLayout(tc.args.widgetLayout). Build() if tc.err == nil { - assert.Equal(tt, tc.expected, e) + assert.Equal(t, tc.expected, e) } else { - assert.Equal(tt, tc.err, err) + assert.Equal(t, tc.err, err) } }) } } func TestExtensionBuilder_MustBuild(t *testing.T) { - testCases := []struct { - name, icon string + type args struct { + icon string id ExtensionID extensionType ExtensionType system bool @@ -166,25 +176,33 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { visualizer visualizer.Visualizer widgetLayout *WidgetLayout singleOnly bool - expected *Extension + } + + tests := []struct { + name string + args args + expected *Extension + err error }{ { - name: "success not system", - icon: "ttt", - id: "xxx", - extensionType: "ppp", - system: false, - ename: i18n.StringFrom("nnn"), - description: i18n.StringFrom("ddd"), - schema: MustPropertySchemaID("foo~1.1.1/hhh"), - visualizer: "vvv", - singleOnly: true, - widgetLayout: NewWidgetLayout( - false, false, true, false, &WidgetLocation{ - Zone: WidgetZoneOuter, - Section: WidgetSectionLeft, - Area: WidgetAreaTop, - }).Ref(), + name: "success not system", + args: args{ + icon: "ttt", + id: "xxx", + extensionType: "ppp", + system: false, + ename: i18n.StringFrom("nnn"), + description: i18n.StringFrom("ddd"), + schema: MustPropertySchemaID("foo~1.1.1/hhh"), + visualizer: "vvv", + singleOnly: true, + widgetLayout: NewWidgetLayout( + false, false, true, false, &WidgetLocation{ + Zone: WidgetZoneOuter, + Section: WidgetSectionLeft, + Area: WidgetAreaTop, + }).Ref(), + }, expected: &Extension{ id: "xxx", extensionType: "ppp", @@ -203,40 +221,51 @@ func TestExtensionBuilder_MustBuild(t *testing.T) { }, }, { - name: "fail not system type visualizer", - extensionType: ExtensionTypeVisualizer, + name: "fail not system type visualizer", + args: args{ + extensionType: ExtensionTypeVisualizer, + }, + err: ErrInvalidID, }, { - name: "fail not system type infobox", - extensionType: ExtensionTypeInfobox, + name: "fail not system type infobox", + args: args{ + extensionType: ExtensionTypeInfobox, + }, + err: ErrInvalidID, }, { name: "fail nil id", + err: ErrInvalidID, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - var e *Extension - defer func() { - if r := recover(); r == nil { - assert.Equal(tt, tc.expected, e) - - } - }() - e = NewExtension(). - ID(tc.id). - Visualizer(tc.visualizer). - Schema(tc.schema). - System(tc.system). - Type(tc.extensionType). - Description(tc.description). - Name(tc.ename). - Icon(tc.icon). - SingleOnly(tc.singleOnly). - WidgetLayout(tc.widgetLayout). - MustBuild() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + build := func() *Extension { + t.Helper() + return NewExtension(). + ID(tc.args.id). + Visualizer(tc.args.visualizer). + Schema(tc.args.schema). + System(tc.args.system). + Type(tc.args.extensionType). + Description(tc.args.description). + Name(tc.args.ename). + Icon(tc.args.icon). + SingleOnly(tc.args.singleOnly). + WidgetLayout(tc.args.widgetLayout). + MustBuild() + } + + if tc.err != nil { + assert.PanicsWithValue(t, tc.err, func() { _ = build() }) + } else { + assert.Equal(t, tc.expected, build()) + } }) } } diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index b036b3227..26eb82e07 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -18,7 +18,7 @@ func TestToValue(t *testing.T) { } func TestChoice(t *testing.T) { - testCases := []struct { + tests := []struct { name string ch *Choice expected *property.SchemaFieldChoice @@ -37,11 +37,12 @@ func TestChoice(t *testing.T) { }, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, *tc.expected, *tc.ch.choice()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, *tc.expected, *tc.ch.choice()) }) } @@ -53,7 +54,8 @@ func TestManifest(t *testing.T) { a := "aaa" d := "ddd" r := "rrr" - testCases := []struct { + + tests := []struct { name string root *Root expected *Manifest @@ -136,18 +138,19 @@ func TestManifest(t *testing.T) { err: "invalid manifest: invalid plugin id: ", }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() m, err := tc.root.manifest(nil) if tc.err == "" { - assert.Equal(tt, tc.expected.Plugin.ID(), m.Plugin.ID()) - assert.Equal(tt, tc.expected.Plugin.Name(), m.Plugin.Name()) - assert.Equal(tt, len(tc.expected.Plugin.Extensions()), len(m.Plugin.Extensions())) + assert.Equal(t, tc.expected.Plugin.ID(), m.Plugin.ID()) + assert.Equal(t, tc.expected.Plugin.Name(), m.Plugin.Name()) + assert.Equal(t, len(tc.expected.Plugin.Extensions()), len(m.Plugin.Extensions())) //assert.Equal(tt,tc.expected.Schema..) } else { - assert.Equal(tt, tc.err, err.Error()) + assert.Equal(t, tc.err, err.Error()) } }) } @@ -159,7 +162,8 @@ func TestExtension(t *testing.T) { d := "ddd" i := "xx:/aa.bb" tr := true - testCases := []struct { + + tests := []struct { name string ext Extension sys bool @@ -308,20 +312,20 @@ func TestExtension(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() pe, ps, err := tc.ext.extension(tc.pid, tc.sys) if tc.err == "" { - assert.Equal(tt, tc.expectedPE.ID(), pe.ID()) - assert.Equal(tt, tc.expectedPE.Visualizer(), pe.Visualizer()) - assert.Equal(tt, tc.expectedPE.Type(), pe.Type()) - assert.Equal(tt, tc.expectedPE.Name(), pe.Name()) - assert.Equal(tt, tc.expectedPS.ID(), ps.ID()) - assert.Equal(tt, tc.expectedPS.ID(), ps.ID()) + assert.Equal(t, tc.expectedPE.ID(), pe.ID()) + assert.Equal(t, tc.expectedPE.Visualizer(), pe.Visualizer()) + assert.Equal(t, tc.expectedPE.Type(), pe.Type()) + assert.Equal(t, tc.expectedPE.Name(), pe.Name()) + assert.Equal(t, tc.expectedPS.ID(), ps.ID()) + assert.Equal(t, tc.expectedPS.ID(), ps.ID()) } else { - assert.Equal(tt, tc.err, err.Error()) + assert.Equal(t, tc.err, err.Error()) } }) } @@ -330,7 +334,8 @@ func TestExtension(t *testing.T) { func TestPointer(t *testing.T) { sg := "aaa" f := "xxx" - testCases := []struct { + + tests := []struct { name string pp *PropertyPointer expected *property.Pointer @@ -357,17 +362,20 @@ func TestPointer(t *testing.T) { expected: property.NewPointer(property.SchemaGroupIDFrom(&sg), nil, property.FieldIDFrom(&f)), }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.pp.pointer()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.pp.pointer()) }) } } + func TestCondition(t *testing.T) { v := toValue("xxx", "string") - testCases := []struct { + + tests := []struct { name string con *PropertyCondition expected *property.Condition @@ -390,11 +398,12 @@ func TestCondition(t *testing.T) { }, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.con.condition()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.con.condition()) }) } } @@ -403,7 +412,8 @@ func TestLinkable(t *testing.T) { l := "location" d := "default" u := "url" - testCases := []struct { + + tests := []struct { name string p *PropertyLinkableFields expected property.LinkableFields @@ -431,18 +441,20 @@ func TestLinkable(t *testing.T) { }, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.p.linkable()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.p.linkable()) }) } } func TestSchema(t *testing.T) { str := "ddd" - testCases := []struct { + + tests := []struct { name, psid string ps *PropertySchema pid plugin.ID @@ -500,21 +512,22 @@ func TestSchema(t *testing.T) { expected: property.NewSchema().ID(property.MustSchemaID("reearth/marker")).Groups([]*property.SchemaGroup{property.NewSchemaGroup().ID("default").Schema(property.MustSchemaID("reearth/cesium")).Fields([]*property.SchemaField{property.NewSchemaField().ID("location").Type(property.ValueTypeLatLng).MustBuild()}).MustBuild()}).MustBuild(), }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res, err := tc.ps.schema(tc.pid, tc.psid) if tc.err == "" { - assert.Equal(tt, len(tc.expected.Groups()), len(res.Groups())) - assert.Equal(tt, tc.expected.LinkableFields(), res.LinkableFields()) - assert.Equal(tt, tc.expected.Version(), res.Version()) + assert.Equal(t, len(tc.expected.Groups()), len(res.Groups())) + assert.Equal(t, tc.expected.LinkableFields(), res.LinkableFields()) + assert.Equal(t, tc.expected.Version(), res.Version()) if len(res.Groups()) > 0 { exg := tc.expected.Group(res.Groups()[0].ID()) - assert.NotNil(tt, exg) + assert.NotNil(t, exg) } } else { - assert.Equal(tt, tc.err, err.Error()) + assert.Equal(t, tc.err, err.Error()) } }) } @@ -523,7 +536,8 @@ func TestSchema(t *testing.T) { func TestSchemaGroup(t *testing.T) { str := "marker" des := "ddd" - testCases := []struct { + + tests := []struct { name string psg PropertySchemaGroup sid property.SchemaID @@ -584,22 +598,23 @@ func TestSchemaGroup(t *testing.T) { err: "field (location): invalid value type: xx", }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res, err := tc.psg.schemaGroup(tc.sid) if tc.err == "" { - assert.Equal(tt, tc.expected.Title().String(), res.Title().String()) - assert.Equal(tt, tc.expected.Title(), res.Title()) - assert.Equal(tt, tc.expected.Schema(), res.Schema()) - assert.Equal(tt, len(tc.expected.Fields()), len(res.Fields())) + assert.Equal(t, tc.expected.Title().String(), res.Title().String()) + assert.Equal(t, tc.expected.Title(), res.Title()) + assert.Equal(t, tc.expected.Schema(), res.Schema()) + assert.Equal(t, len(tc.expected.Fields()), len(res.Fields())) if len(res.Fields()) > 0 { exf := res.Fields()[0] - assert.NotNil(tt, tc.expected.Field(exf.ID())) + assert.NotNil(t, tc.expected.Field(exf.ID())) } } else { - assert.Equal(tt, tc.err, err.Error()) + assert.Equal(t, tc.err, err.Error()) } }) } @@ -607,7 +622,8 @@ func TestSchemaGroup(t *testing.T) { func TestSchemaField(t *testing.T) { str := "xx" - testCases := []struct { + + tests := []struct { name string psg PropertySchemaField expected *property.SchemaField @@ -746,19 +762,20 @@ func TestSchemaField(t *testing.T) { err: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res, err := tc.psg.schemaField() if tc.err == nil { - assert.Equal(tt, tc.expected.Title(), res.Title()) - assert.Equal(tt, tc.expected.Description(), res.Description()) - assert.Equal(tt, tc.expected.Suffix(), res.Suffix()) - assert.Equal(tt, tc.expected.Prefix(), res.Prefix()) - assert.Equal(tt, tc.expected.Choices(), res.Choices()) + assert.Equal(t, tc.expected.Title(), res.Title()) + assert.Equal(t, tc.expected.Description(), res.Description()) + assert.Equal(t, tc.expected.Suffix(), res.Suffix()) + assert.Equal(t, tc.expected.Prefix(), res.Prefix()) + assert.Equal(t, tc.expected.Choices(), res.Choices()) } else { - assert.Equal(tt, tc.err, rerror.Get(err).Err) + assert.Equal(t, tc.err, rerror.Get(err).Err) } }) } @@ -767,7 +784,7 @@ func TestSchemaField(t *testing.T) { func TestLayout(t *testing.T) { tr := true - testCases := []struct { + tests := []struct { name string widgetLayout WidgetLayout expected *plugin.WidgetLayout @@ -808,12 +825,12 @@ func TestLayout(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res := tc.widgetLayout.layout() - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } diff --git a/pkg/plugin/manifest/parser_test.go b/pkg/plugin/manifest/parser_test.go index d90501f37..9f85bd6f0 100644 --- a/pkg/plugin/manifest/parser_test.go +++ b/pkg/plugin/manifest/parser_test.go @@ -52,7 +52,7 @@ var normalExpected = &Manifest{ } func TestParse(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *Manifest @@ -89,26 +89,26 @@ func TestParse(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() m, err := Parse(strings.NewReader(tc.input), nil) if tc.err == nil { - if !assert.NoError(tt, err) { + if !assert.NoError(t, err) { return } - assert.Equal(tt, tc.expected, m) + assert.Equal(t, tc.expected, m) return } - assert.ErrorIs(tt, tc.err, err) + assert.ErrorIs(t, tc.err, err) }) } } func TestParseSystemFromBytes(t *testing.T) { - testCases := []struct { + tests := []struct { name, input string expected *Manifest err error @@ -133,63 +133,63 @@ func TestParseSystemFromBytes(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() m, err := ParseSystemFromBytes([]byte(tc.input), nil) if tc.err == nil { - if !assert.NoError(tt, err) { + if !assert.NoError(t, err) { return } - assert.Equal(tt, tc.expected, m) + assert.Equal(t, tc.expected, m) return } - assert.ErrorIs(tt, tc.err, err) + assert.ErrorIs(t, tc.err, err) }) } } func TestMustParseSystemFromBytes(t *testing.T) { - testCases := []struct { + tests := []struct { name, input string expected *Manifest - err error + fails bool }{ { name: "success create simple manifest", input: minimum, expected: minimumExpected, - err: nil, + fails: false, }, { name: "success create manifest", input: normal, expected: normalExpected, - err: nil, + fails: false, }, { name: "fail not valid JSON", input: "--", expected: nil, - err: ErrFailedToParseManifest, + fails: true, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() - if tc.err != nil { - assert.PanicsWithError(tt, tc.err.Error(), func() { + if tc.fails { + assert.Panics(t, func() { _ = MustParseSystemFromBytes([]byte(tc.input), nil) }) return } m := MustParseSystemFromBytes([]byte(tc.input), nil) - assert.Equal(tt, m, tc.expected) + assert.Equal(t, m, tc.expected) }) } } diff --git a/pkg/plugin/manifest/parser_translation_test.go b/pkg/plugin/manifest/parser_translation_test.go index c01d13138..3e31f09d1 100644 --- a/pkg/plugin/manifest/parser_translation_test.go +++ b/pkg/plugin/manifest/parser_translation_test.go @@ -41,7 +41,7 @@ var expected = &TranslationRoot{ var mergeManifest string func TestParseTranslation(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *TranslationRoot @@ -61,23 +61,23 @@ func TestParseTranslation(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() r := strings.NewReader(tc.input) res, err := ParseTranslation(r) if tc.err != nil { - assert.ErrorIs(tt, err, tc.err) + assert.ErrorIs(t, err, tc.err) return } - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestParseTranslationFromBytes(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *TranslationRoot @@ -97,61 +97,61 @@ func TestParseTranslationFromBytes(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() res, err := ParseTranslationFromBytes([]byte(tc.input)) if tc.err != nil { - assert.ErrorIs(tt, err, tc.err) + assert.ErrorIs(t, err, tc.err) return } - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestMustParseTransSystemFromBytes(t *testing.T) { - testCases := []struct { + tests := []struct { name string input string expected *TranslationRoot - err error + fails bool }{ { name: "success create translation", input: translatedManifest, expected: expected, - err: nil, + fails: false, }, { name: "fail not valid YAML", input: "--", expected: nil, - err: ErrFailedToParseManifestTranslation, + fails: true, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() - if tc.err != nil { - assert.PanicsWithError(tt, tc.err.Error(), func() { + if tc.fails { + assert.Panics(t, func() { _ = MustParseTranslationFromBytes([]byte(tc.input)) }) return } res := MustParseTranslationFromBytes([]byte(tc.input)) - assert.Equal(tt, tc.expected, res) + assert.Equal(t, tc.expected, res) }) } } func TestMergeManifestTranslation(t *testing.T) { - testCases := []struct { + tests := []struct { Name string Translations map[string]*TranslationRoot Manifest *Manifest @@ -180,21 +180,21 @@ func TestMergeManifestTranslation(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := MergeManifestTranslation(tc.Manifest, tc.Translations) if tc.Expected == nil { - assert.Nil(tt, res) + assert.Nil(t, res) return } - assert.Equal(tt, tc.Expected.PluginName, res.Plugin.Name()) - assert.Equal(tt, tc.Expected.PluginDesc, res.Plugin.Description()) - assert.Equal(tt, tc.Expected.ExtName, res.Plugin.Extension(plugin.ExtensionID("test_ext")).Name()) - assert.Equal(tt, tc.Expected.PsTitle, res.ExtensionSchema[0].Group("test_ps").Title()) - assert.Equal(tt, tc.Expected.FieldTitle, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Title()) - assert.Equal(tt, tc.Expected.FieldDesc, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Description()) + assert.Equal(t, tc.Expected.PluginName, res.Plugin.Name()) + assert.Equal(t, tc.Expected.PluginDesc, res.Plugin.Description()) + assert.Equal(t, tc.Expected.ExtName, res.Plugin.Extension(plugin.ExtensionID("test_ext")).Name()) + assert.Equal(t, tc.Expected.PsTitle, res.ExtensionSchema[0].Group("test_ps").Title()) + assert.Equal(t, tc.Expected.FieldTitle, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Title()) + assert.Equal(t, tc.Expected.FieldDesc, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Description()) }) } } diff --git a/pkg/plugin/plugin_test.go b/pkg/plugin/plugin_test.go index 6c7ef1993..89c771453 100644 --- a/pkg/plugin/plugin_test.go +++ b/pkg/plugin/plugin_test.go @@ -8,7 +8,7 @@ import ( ) func TestPlugin_Extension(t *testing.T) { - testCases := []struct { + tests := []struct { name string plugin *Plugin key ExtensionID @@ -33,11 +33,12 @@ func TestPlugin_Extension(t *testing.T) { expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.plugin.Extension(tc.key)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.plugin.Extension(tc.key)) }) } } @@ -47,7 +48,7 @@ func TestPlugin_PropertySchemas(t *testing.T) { ps2 := MustPropertySchemaID("hoge~0.1.0/b") ps3 := MustPropertySchemaID("hoge~0.1.0/c") - testCases := []struct { + tests := []struct { name string plugin *Plugin expected []PropertySchemaID @@ -68,11 +69,12 @@ func TestPlugin_PropertySchemas(t *testing.T) { expected: []PropertySchemaID(nil), }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.plugin.PropertySchemas()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.plugin.PropertySchemas()) }) } } diff --git a/pkg/plugin/repourl/repourl_test.go b/pkg/plugin/repourl/repourl_test.go index 1ebb2ab87..6d1ad0103 100644 --- a/pkg/plugin/repourl/repourl_test.go +++ b/pkg/plugin/repourl/repourl_test.go @@ -111,15 +111,15 @@ func TestNew(t *testing.T) { for _, tc := range cases { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() ur, _ := url.Parse(tc.Input) u, err := New(ur) if tc.Err != nil { - assert.ErrorIs(tt, err, tc.Err) + assert.ErrorIs(t, err, tc.Err) } else { - assert.NoError(tt, err) - assert.Equal(tt, tc.Expected, u) + assert.NoError(t, err) + assert.Equal(t, tc.Expected, u) } }) } @@ -128,15 +128,16 @@ func TestNew(t *testing.T) { func TestMust(t *testing.T) { for _, tc := range cases { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + ur, _ := url.Parse(tc.Input) if tc.Err != nil { - assert.PanicsWithError(tt, tc.Err.Error(), func() { + assert.PanicsWithError(t, tc.Err.Error(), func() { _ = Must(ur) }) } else { - assert.Equal(tt, tc.Expected, Must(ur)) + assert.Equal(t, tc.Expected, Must(ur)) } }) } @@ -145,14 +146,14 @@ func TestMust(t *testing.T) { func TestParse(t *testing.T) { for _, tc := range cases { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() u, err := Parse(tc.Input) if tc.Err != nil { - assert.ErrorIs(tt, err, tc.Err) + assert.ErrorIs(t, err, tc.Err) } else { - assert.NoError(tt, err) - assert.Equal(tt, tc.Expected, u) + assert.NoError(t, err) + assert.Equal(t, tc.Expected, u) } }) } @@ -161,14 +162,14 @@ func TestParse(t *testing.T) { func TestMustParse(t *testing.T) { for _, tc := range cases { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() if tc.Err != nil { - assert.PanicsWithError(tt, tc.Err.Error(), func() { + assert.PanicsWithError(t, tc.Err.Error(), func() { _ = MustParse(tc.Input) }) } else { - assert.Equal(tt, tc.Expected, MustParse(tc.Input)) + assert.Equal(t, tc.Expected, MustParse(tc.Input)) } }) } @@ -293,11 +294,11 @@ func TestURL_ArchiveURL(t *testing.T) { }, } - for _, tc := range cases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.Input.ArchiveURL().String()) + for _, tt := range cases { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.Expected, tt.Input.ArchiveURL().String()) }) } } diff --git a/pkg/project/builder_test.go b/pkg/project/builder_test.go index 36c29eb3f..38e8401a3 100644 --- a/pkg/project/builder_test.go +++ b/pkg/project/builder_test.go @@ -1,7 +1,6 @@ package project import ( - "errors" "net/url" "reflect" "testing" @@ -64,7 +63,7 @@ func TestBuilder_BasicAuthPassword(t *testing.T) { } func TestBuilder_ImageURL(t *testing.T) { - testCases := []struct { + tests := []struct { name string image *url.URL expectedNil bool @@ -80,16 +79,17 @@ func TestBuilder_ImageURL(t *testing.T) { expectedNil: true, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() tb := New().NewID() - res := tb.ImageURL(tc.image).MustBuild() + res := tb.ImageURL(tt.image).MustBuild() if res.imageURL == nil { - assert.True(tt, tc.expectedNil) + assert.True(t, tt.expectedNil) } else { - assert.False(tt, tc.expectedNil) + assert.False(t, tt.expectedNil) } }) } @@ -157,39 +157,48 @@ func TestBuilder_Build(t *testing.T) { i, _ := url.Parse("ttt://xxx.aa/") pid := NewID() tid := NewTeamID() - testCases := []struct { - name, pname, description, - alias, publicTitle, publicDescription, - publicImage string - id ID - isArchived bool - updatedAt time.Time - publishedAt time.Time - imageURL *url.URL - publicNoIndex bool - team TeamID - visualizer visualizer.Visualizer - publishmentStatus PublishmentStatus - expected *Project - err error + + type args struct { + name, description string + alias, publicTitle string + publicDescription string + publicImage string + id ID + isArchived bool + updatedAt time.Time + publishedAt time.Time + imageURL *url.URL + publicNoIndex bool + team TeamID + visualizer visualizer.Visualizer + publishmentStatus PublishmentStatus + } + + tests := []struct { + name string + args args + expected *Project + err error }{ { - name: "build normal project", - pname: "xxx.aaa", - description: "ddd", - alias: "aaaaa", - publicTitle: "ttt", - publicDescription: "dddd", - publicImage: "iii", - id: pid, - isArchived: false, - updatedAt: d, - publishedAt: d, - imageURL: i, - publicNoIndex: true, - team: tid, - visualizer: visualizer.VisualizerCesium, - publishmentStatus: "ppp", + name: "build normal project", + args: args{ + name: "xxx.aaa", + description: "ddd", + alias: "aaaaa", + publicTitle: "ttt", + publicDescription: "dddd", + publicImage: "iii", + id: pid, + isArchived: false, + updatedAt: d, + publishedAt: d, + imageURL: i, + publicNoIndex: true, + team: tid, + visualizer: visualizer.VisualizerCesium, + publishmentStatus: "ppp", + }, expected: &Project{ id: pid, description: "ddd", @@ -207,59 +216,58 @@ func TestBuilder_Build(t *testing.T) { visualizer: visualizer.VisualizerCesium, publishmentStatus: "ppp", }, - err: nil, }, { - name: "zero updated at", - id: NewID(), - updatedAt: time.Time{}, - expected: nil, - err: nil, + name: "zero updated at", + args: args{ + id: pid, + }, + expected: &Project{ + id: pid, + updatedAt: createdAt(pid), + }, }, { name: "failed invalid id", - - expected: nil, - err: ErrInvalidID, + err: ErrInvalidID, }, { - name: "failed invalid alias", - id: NewID(), - alias: "xxx.aaa", + name: "failed invalid alias", + args: args{ + id: NewID(), + alias: "xxx.aaa", + }, expected: nil, err: ErrInvalidAlias, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() p, err := New(). - ID(tc.id). - PublicNoIndex(tc.publicNoIndex). - PublicDescription(tc.publicDescription). - PublishmentStatus(tc.publishmentStatus). - PublicTitle(tc.publicTitle). - UpdatedAt(tc.updatedAt). - PublishedAt(tc.publishedAt). - PublicImage(tc.publicImage). - Team(tc.team). - ImageURL(tc.imageURL). - Name(tc.pname). - Alias(tc.alias). - Visualizer(tc.visualizer). - UpdatedAt(tc.updatedAt). - Description(tc.description). + ID(tt.args.id). + PublicNoIndex(tt.args.publicNoIndex). + PublicDescription(tt.args.publicDescription). + PublishmentStatus(tt.args.publishmentStatus). + PublicTitle(tt.args.publicTitle). + UpdatedAt(tt.args.updatedAt). + PublishedAt(tt.args.publishedAt). + PublicImage(tt.args.publicImage). + Team(tt.args.team). + ImageURL(tt.args.imageURL). + Name(tt.args.name). + Alias(tt.args.alias). + Visualizer(tt.args.visualizer). + UpdatedAt(tt.args.updatedAt). + Description(tt.args.description). Build() - if err == nil { - if tc.expected == nil { - assert.Equal(tt, p.UpdatedAt(), p.CreatedAt()) - } else { - assert.Equal(tt, tc.expected, p) - } + if tt.err == nil { + assert.Equal(t, tt.expected, p) } else { - assert.True(tt, errors.As(err, &tc.err)) + assert.Equal(t, tt.err, err) } }) } @@ -270,39 +278,48 @@ func TestBuilder_MustBuild(t *testing.T) { i, _ := url.Parse("ttt://xxx.aa/") pid := NewID() tid := NewTeamID() - testCases := []struct { - name, pname, description, - alias, publicTitle, publicDescription, - publicImage string - id ID - isArchived bool - updatedAt time.Time - publishedAt time.Time - imageURL *url.URL - publicNoIndex bool - team TeamID - visualizer visualizer.Visualizer - publishmentStatus PublishmentStatus - expected *Project - err error + + type args struct { + name, description string + alias, publicTitle string + publicDescription string + publicImage string + id ID + isArchived bool + updatedAt time.Time + publishedAt time.Time + imageURL *url.URL + publicNoIndex bool + team TeamID + visualizer visualizer.Visualizer + publishmentStatus PublishmentStatus + } + + tests := []struct { + name string + args args + expected *Project + err error }{ { - name: "build normal project", - pname: "xxx.aaa", - description: "ddd", - alias: "aaaaa", - publicTitle: "ttt", - publicDescription: "dddd", - publicImage: "iii", - id: pid, - isArchived: false, - updatedAt: d, - publishedAt: d, - imageURL: i, - publicNoIndex: true, - team: tid, - visualizer: visualizer.VisualizerCesium, - publishmentStatus: "ppp", + name: "build normal project", + args: args{ + name: "xxx.aaa", + description: "ddd", + alias: "aaaaa", + publicTitle: "ttt", + publicDescription: "dddd", + publicImage: "iii", + id: pid, + isArchived: false, + updatedAt: d, + publishedAt: d, + imageURL: i, + publicNoIndex: true, + team: tid, + visualizer: visualizer.VisualizerCesium, + publishmentStatus: "ppp", + }, expected: &Project{ id: pid, description: "ddd", @@ -320,60 +337,62 @@ func TestBuilder_MustBuild(t *testing.T) { visualizer: visualizer.VisualizerCesium, publishmentStatus: "ppp", }, - err: nil, }, { - name: "zero updated at", - id: NewID(), - updatedAt: time.Time{}, - expected: nil, - err: nil, + name: "zero updated at", + args: args{ + id: pid, + }, + expected: &Project{ + id: pid, + updatedAt: createdAt(pid), + }, }, { name: "failed invalid id", - - expected: nil, - err: ErrInvalidID, + err: ErrInvalidID, }, { - name: "failed invalid alias", - id: NewID(), - alias: "xxx.aaa", - expected: nil, - err: ErrInvalidAlias, + name: "failed invalid alias", + args: args{ + id: NewID(), + alias: "xxx.aaa", + }, + err: ErrInvalidAlias, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - var p *Project - defer func() { - if r := recover(); r == nil { - if tc.expected == nil { - assert.Equal(tt, p.UpdatedAt(), p.CreatedAt()) - } else { - assert.Equal(tt, tc.expected, p) - } - } - }() - p = New(). - ID(tc.id). - PublicNoIndex(tc.publicNoIndex). - PublicDescription(tc.publicDescription). - PublishmentStatus(tc.publishmentStatus). - PublicTitle(tc.publicTitle). - UpdatedAt(tc.updatedAt). - PublishedAt(tc.publishedAt). - PublicImage(tc.publicImage). - Team(tc.team). - ImageURL(tc.imageURL). - Name(tc.pname). - Alias(tc.alias). - Visualizer(tc.visualizer). - UpdatedAt(tc.updatedAt). - Description(tc.description). - MustBuild() + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + build := func() *Project { + t.Helper() + return New(). + ID(tt.args.id). + PublicNoIndex(tt.args.publicNoIndex). + PublicDescription(tt.args.publicDescription). + PublishmentStatus(tt.args.publishmentStatus). + PublicTitle(tt.args.publicTitle). + UpdatedAt(tt.args.updatedAt). + PublishedAt(tt.args.publishedAt). + PublicImage(tt.args.publicImage). + Team(tt.args.team). + ImageURL(tt.args.imageURL). + Name(tt.args.name). + Alias(tt.args.alias). + Visualizer(tt.args.visualizer). + UpdatedAt(tt.args.updatedAt). + Description(tt.args.description). + MustBuild() + } + + if tt.err != nil { + assert.PanicsWithValue(t, tt.err, func() { _ = build() }) + } else { + assert.Equal(t, tt.expected, build()) + } }) } } diff --git a/pkg/project/project_test.go b/pkg/project/project_test.go index c471c7089..848c59f96 100644 --- a/pkg/project/project_test.go +++ b/pkg/project/project_test.go @@ -25,11 +25,12 @@ func TestCheckAliasPattern(t *testing.T) { expexted: false, }, } - for _, tc := range testCase { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expexted, CheckAliasPattern(tc.alias)) + + for _, tt := range testCase { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expexted, CheckAliasPattern(tt.alias)) }) } } @@ -74,11 +75,12 @@ func TestProject_MatchWithPublicName(t *testing.T) { expexted: false, }, } - for _, tc := range testCase { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expexted, tc.p.MatchWithPublicName(tc.n)) + + for _, tt := range testCase { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.expexted, tt.p.MatchWithPublicName(tt.n)) }) } } @@ -121,15 +123,16 @@ func TestProject_SetImageURL(t *testing.T) { expectedNil: false, }, } - for _, tc := range testCase { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - tc.p.SetImageURL(tc.image) - if tc.expectedNil { - assert.Nil(tt, tc.p.ImageURL()) + + for _, tt := range testCase { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + tt.p.SetImageURL(tt.image) + if tt.expectedNil { + assert.Nil(t, tt.p.ImageURL()) } else { - assert.NotNil(tt, tc.p.ImageURL()) + assert.NotNil(t, tt.p.ImageURL()) } }) } @@ -185,7 +188,7 @@ func TestProject_UpdateVisualizer(t *testing.T) { } func TestProject_UpdateAlias(t *testing.T) { - testCases := []struct { + tests := []struct { name, a string expected string err error @@ -203,16 +206,17 @@ func TestProject_UpdateAlias(t *testing.T) { err: ErrInvalidAlias, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() p := &Project{} - err := p.UpdateAlias(tc.a) - if err == nil { - assert.Equal(tt, tc.expected, p.Alias()) + err := p.UpdateAlias(tt.a) + if tt.err == nil { + assert.Equal(t, tt.expected, p.Alias()) } else { - assert.Equal(tt, tc.err, err) + assert.Equal(t, tt.err, err) } }) } @@ -225,7 +229,7 @@ func TestProject_UpdatePublicImage(t *testing.T) { } func TestProject_PublicName(t *testing.T) { - testCases := []struct { + tests := []struct { name string p *Project expected string @@ -258,12 +262,13 @@ func TestProject_PublicName(t *testing.T) { expected: "", }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - res := tc.p.PublicName() - assert.Equal(tt, tc.expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.p.PublicName() + assert.Equal(t, tt.expected, res) }) } } @@ -289,11 +294,13 @@ func TestProject_IsBasicAuthActive(t *testing.T) { expected: true, }, } - for _, tc := range tests { - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - res := tc.p.IsBasicAuthActive() - assert.Equal(tt, tc.expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.p.IsBasicAuthActive() + assert.Equal(t, tt.expected, res) }) } } diff --git a/pkg/property/builder_test.go b/pkg/property/builder_test.go index e29662959..e2e63930f 100644 --- a/pkg/property/builder_test.go +++ b/pkg/property/builder_test.go @@ -1,7 +1,6 @@ package property import ( - "errors" "testing" "github.com/stretchr/testify/assert" @@ -40,7 +39,7 @@ func TestBuilder_Items(t *testing.T) { propertySchemaField1ID := FieldID("a") propertySchemaGroup1ID := SchemaGroupID("A") - testCases := []struct { + tests := []struct { Name string Input, Expected []Item }{ @@ -77,16 +76,16 @@ func TestBuilder_Items(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() res := New().NewID(). Scene(NewSceneID()). Schema(MustSchemaID("xxx~1.1.1/aa")). - Items(tc.Input). + Items(tt.Input). MustBuild() - assert.Equal(tt, tc.Expected, res.Items()) + assert.Equal(t, tt.Expected, res.Items()) }) } } @@ -99,7 +98,7 @@ func TestBuilder_Build(t *testing.T) { propertySchemaField1ID := FieldID("a") propertySchemaGroup1ID := SchemaGroupID("A") - testCases := []struct { + tests := []struct { Name string Id ID Scene SceneID @@ -181,18 +180,18 @@ func TestBuilder_Build(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res, err := New().ID(tc.Id).Items(tc.Items).Scene(tc.Scene).Schema(tc.Schema).Build() - if err == nil { - assert.Equal(tt, tc.Expected.Id, res.ID()) - assert.Equal(tt, tc.Expected.Schema, res.Schema()) - assert.Equal(tt, tc.Expected.Items, res.Items()) - assert.Equal(tt, tc.Expected.Scene, res.Scene()) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := New().ID(tt.Id).Items(tt.Items).Scene(tt.Scene).Schema(tt.Schema).Build() + if tt.Err == nil { + assert.Equal(t, tt.Expected.Id, res.ID()) + assert.Equal(t, tt.Expected.Schema, res.Schema()) + assert.Equal(t, tt.Expected.Items, res.Items()) + assert.Equal(t, tt.Expected.Scene, res.Scene()) } else { - assert.True(tt, errors.As(tc.Err, &err)) + assert.Equal(t, tt.Err, err) } }) } diff --git a/pkg/property/condition_test.go b/pkg/property/condition_test.go index 74a7ea2fb..0f77682d0 100644 --- a/pkg/property/condition_test.go +++ b/pkg/property/condition_test.go @@ -7,7 +7,7 @@ import ( ) func TestCondition_Clone(t *testing.T) { - testCases := []struct { + tests := []struct { Name string Con, Expected *Condition }{ @@ -29,12 +29,12 @@ func TestCondition_Clone(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.Con.Clone() - assert.Equal(tt, tc.Expected, res) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Con.Clone() + assert.Equal(t, tt.Expected, res) }) } } diff --git a/pkg/property/field_builder_test.go b/pkg/property/field_builder_test.go index ae37a952b..ee3c42a66 100644 --- a/pkg/property/field_builder_test.go +++ b/pkg/property/field_builder_test.go @@ -1,7 +1,6 @@ package property import ( - "errors" "testing" "github.com/stretchr/testify/assert" @@ -24,66 +23,60 @@ func TestFieldBuilder_Link(t *testing.T) { func TestFieldBuilder_Build(t *testing.T) { l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) - testCases := []struct { + + type args struct { + Links *Links + Value *Value + Field *SchemaField + Type ValueType + } + + tests := []struct { Name string - Links *Links - Value *Value - SF *SchemaField - Expected struct { - PType ValueType - Links *Links - Value *Value - } - Err error + Args args + Expected *Field + Err error }{ { Name: "fail invalid property id", - Expected: struct { - PType ValueType - Links *Links - Value *Value - }{}, - Err: ErrInvalidID, + Err: ErrInvalidID, }, { - Name: "fail invalid property type", - SF: NewSchemaField().ID("A").Type(ValueTypeBool).MustBuild(), - Value: ValueTypeString.ValueFrom("vvv"), - Expected: struct { - PType ValueType - Links *Links - Value *Value - }{}, - Err: ErrInvalidPropertyType, + Name: "fail invalid property value", + Args: args{ + Field: NewSchemaField().ID("A").Type(ValueTypeBool).MustBuild(), + Type: ValueTypeString, + Value: ValueTypeString.ValueFrom("vvv"), + }, + Err: ErrInvalidPropertyValue, }, { - Name: "success", - SF: NewSchemaField().ID("A").Type(ValueTypeString).MustBuild(), - Links: NewLinks([]*Link{l}), - Value: ValueTypeString.ValueFrom("vvv"), - Expected: struct { - PType ValueType - Links *Links - Value *Value - }{ - PType: ValueTypeString, + Name: "success", + Args: args{ + Field: NewSchemaField().ID("A").Type(ValueTypeString).MustBuild(), Links: NewLinks([]*Link{l}), + Type: ValueTypeString, Value: ValueTypeString.ValueFrom("vvv"), }, - Err: nil, + Expected: &Field{ + field: "A", + links: NewLinks([]*Link{l}), + v: OptionalValueFrom(ValueTypeString.ValueFrom("vvv")), + }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res, err := NewField(tc.SF).Value(OptionalValueFrom(tc.Value)).Link(tc.Links).Build() - if err == nil { - assert.Equal(tt, tc.Expected.Links, res.Links()) - assert.Equal(tt, tc.Expected.PType, res.Type()) - assert.Equal(tt, tc.Expected.Value, res.Value()) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := NewField(tt.Args.Field). + Value(NewOptionalValue(tt.Args.Type, tt.Args.Value)). + Link(tt.Args.Links).Build() + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) } else { - assert.True(tt, errors.As(tc.Err, &err)) + assert.Equal(t, tt.Err, err) } }) } @@ -91,71 +84,66 @@ func TestFieldBuilder_Build(t *testing.T) { func TestFieldBuilder_MustBuild(t *testing.T) { l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) - testCases := []struct { + + type args struct { + Links *Links + Value *Value + Field *SchemaField + Type ValueType + } + + tests := []struct { Name string - Fails bool - Links *Links - Value *Value - SF *SchemaField - Expected struct { - PType ValueType - Links *Links - Value *Value - } + Args args + Expected *Field + Err string }{ { - Name: "fail invalid property id", - Fails: true, - Expected: struct { - PType ValueType - Links *Links - Value *Value - }{}, + Name: "fail invalid property id", + Err: ErrInvalidID.Error(), }, { - Name: "fail invalid property type", - SF: NewSchemaField().ID("A").Type(ValueTypeBool).MustBuild(), - Value: ValueTypeString.ValueFrom("vvv"), - Fails: true, - Expected: struct { - PType ValueType - Links *Links - Value *Value - }{}, + Name: "fail invalid property value", + Args: args{ + Field: NewSchemaField().ID("A").Type(ValueTypeBool).MustBuild(), + Type: ValueTypeString, + Value: ValueTypeString.ValueFrom("vvv"), + }, + Err: ErrInvalidPropertyValue.Error(), }, { - Name: "success", - SF: NewSchemaField().ID("A").Type(ValueTypeString).MustBuild(), - Links: NewLinks([]*Link{l}), - Value: ValueTypeString.ValueFrom("vvv"), - Expected: struct { - PType ValueType - Links *Links - Value *Value - }{ - PType: ValueTypeString, + Name: "success", + Args: args{ + Field: NewSchemaField().ID("A").Type(ValueTypeString).MustBuild(), Links: NewLinks([]*Link{l}), + Type: ValueTypeString, Value: ValueTypeString.ValueFrom("vvv"), }, + Expected: &Field{ + field: "A", + links: NewLinks([]*Link{l}), + v: OptionalValueFrom(ValueTypeString.ValueFrom("vvv")), + }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - var res *Field - if tc.Fails { - defer func() { - if r := recover(); r != nil { - assert.Nil(tt, res) - } - }() - res = NewField(tc.SF).Value(OptionalValueFrom(tc.Value)).Link(tc.Links).MustBuild() + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + build := func() *Field { + t.Helper() + return NewField(tt.Args.Field). + Value(NewOptionalValue(tt.Args.Type, tt.Args.Value)). + Link(tt.Args.Links). + MustBuild() + } + + if tt.Err != "" { + assert.PanicsWithError(t, tt.Err, func() { _ = build() }) } else { - res = NewField(tc.SF).Value(OptionalValueFrom(tc.Value)).Link(tc.Links).MustBuild() - assert.Equal(tt, tc.Expected.Links, res.Links()) - assert.Equal(tt, tc.Expected.PType, res.Type()) - assert.Equal(tt, tc.Expected.Value, res.Value()) + assert.Equal(t, tt.Expected, build()) } }) } @@ -168,64 +156,59 @@ func TestNewFieldUnsafe(t *testing.T) { func TestFieldUnsafeBuilder_Build(t *testing.T) { l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) - testCases := []struct { + + type args struct { + Links *Links + Value *Value + Field FieldID + Type ValueType + } + + tests := []struct { Name string - Links *Links - Value *Value - Type ValueType - Field FieldID - Expected struct { - PType ValueType - Field FieldID - Links *Links - Value *Value - } + Args args + Expected *Field }{ { - Name: "success", - Links: NewLinks([]*Link{l}), - Value: ValueTypeString.ValueFrom("vvv"), - Type: ValueTypeString, - Field: "a", - Expected: struct { - PType ValueType - Field FieldID - Links *Links - Value *Value - }{ - PType: ValueTypeString, - Field: "a", + Name: "success", + Args: args{ Links: NewLinks([]*Link{l}), Value: ValueTypeString.ValueFrom("vvv"), + Type: ValueTypeString, + Field: "a", + }, + Expected: &Field{ + field: "a", + links: NewLinks([]*Link{l}), + v: OptionalValueFrom(ValueTypeString.ValueFrom("vvv")), }, }, { - Name: "nil value", - Links: NewLinks([]*Link{l}), - Value: nil, - Type: ValueTypeString, - Field: "a", - Expected: struct { - PType ValueType - Field FieldID - Links *Links - Value *Value - }{ - PType: ValueTypeString, - Field: "a", + Name: "nil value", + Args: args{ Links: NewLinks([]*Link{l}), Value: nil, + Type: ValueTypeString, + Field: "a", + }, + Expected: &Field{ + field: "a", + links: NewLinks([]*Link{l}), + v: NewOptionalValue(ValueTypeString, nil), }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := NewFieldUnsafe().ValueUnsafe(NewOptionalValue(tc.Type, tc.Value)).LinksUnsafe(tc.Links).FieldUnsafe(tc.Field).Build() - assert.Equal(tt, tc.Expected.Links, res.Links()) - assert.Equal(tt, tc.Expected.PType, res.Type()) - assert.Equal(tt, tc.Expected.Value, res.Value()) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := NewFieldUnsafe(). + ValueUnsafe(NewOptionalValue(tt.Args.Type, tt.Args.Value)). + LinksUnsafe(tt.Args.Links). + FieldUnsafe(tt.Args.Field). + Build() + assert.Equal(t, tt.Expected, res) }) } } diff --git a/pkg/property/field_test.go b/pkg/property/field_test.go index 272902500..54db81afc 100644 --- a/pkg/property/field_test.go +++ b/pkg/property/field_test.go @@ -15,7 +15,7 @@ func TestField_ActualValue(t *testing.T) { l := NewLink(dsid, dssid, dssfid) ls := NewLinks([]*Link{l}) - testCases := []struct { + tests := []struct { Name string Field *Field DS *dataset.Dataset @@ -44,12 +44,12 @@ func TestField_ActualValue(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.Field.ActualValue(tc.DS) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } @@ -62,7 +62,7 @@ func TestField_CollectDatasets(t *testing.T) { l := NewLink(dsid, dssid, dssfid) ls := NewLinks([]*Link{l}) - testCases := []struct { + tests := []struct { Name string Field *Field Expected []DatasetID @@ -78,12 +78,12 @@ func TestField_CollectDatasets(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.Field.CollectDatasets() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } diff --git a/pkg/property/group.go b/pkg/property/group.go index 80d1a28bc..cc1ddc106 100644 --- a/pkg/property/group.go +++ b/pkg/property/group.go @@ -231,18 +231,18 @@ func (g *Group) MigrateDataset(q DatasetMigrationParam) { } } -func (g *Group) UpdateNameFieldValue(ps *Schema, value *Value) error { - if g == nil || ps == nil || !g.Schema().Equal(ps.ID()) { +func (g *Group) RepresentativeField(schema *Schema) *Field { + if g == nil || schema == nil || !g.Schema().Equal(schema.ID()) { return nil } - if psg := ps.GroupByPointer(NewPointer(&g.itemBase.SchemaGroup, nil, nil)); psg != nil { + if psg := schema.GroupByPointer(NewPointer(&g.itemBase.SchemaGroup, nil, nil)); psg != nil { if representativeField := psg.RepresentativeFieldID(); representativeField != nil { - if f, _ := g.GetOrCreateField(ps, *representativeField); f != nil { - return f.Update(value, psg.Field(*representativeField)) + if f, _ := g.GetOrCreateField(schema, *representativeField); f != nil { + return f } } } - return ErrInvalidPropertyField + return nil } func (p *Group) ValidateSchema(ps *SchemaGroup) error { diff --git a/pkg/property/group_builder_test.go b/pkg/property/group_builder_test.go index 0b01b7365..6c1211a90 100644 --- a/pkg/property/group_builder_test.go +++ b/pkg/property/group_builder_test.go @@ -1,7 +1,6 @@ package property import ( - "errors" "testing" "github.com/stretchr/testify/assert" @@ -13,57 +12,56 @@ func TestGroupBuilder_Build(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() - testCases := []struct { - Name string - Id ItemID + + type args struct { + ID ItemID Schema SchemaID SchemaGroup SchemaGroupID Fields []*Field - Expected struct { - Id ItemID - Schema SchemaID - SchemaGroup SchemaGroupID - Fields []*Field - } - Err error + } + + tests := []struct { + Name string + Args args + Expected *Group + Err error }{ { Name: "fail invalid id", Err: ErrInvalidID, }, { - Name: "success", - Id: iid, - Schema: sid, - SchemaGroup: "a", - Fields: []*Field{f}, - Expected: struct { - Id ItemID - Schema SchemaID - SchemaGroup SchemaGroupID - Fields []*Field - }{ - Id: iid, + Name: "success", + Args: args{ + ID: iid, Schema: sid, SchemaGroup: "a", Fields: []*Field{f}, }, - Err: nil, + Expected: &Group{ + itemBase: itemBase{ + ID: iid, + Schema: sid, + SchemaGroup: "a", + }, + fields: []*Field{f}, + }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res, err := NewGroup().ID(tc.Id).Fields(tc.Fields).Schema(tc.Schema, tc.SchemaGroup).Build() - if err == nil { - assert.Equal(tt, tc.Expected.Fields, res.Fields()) - assert.Equal(tt, tc.Expected.Schema, res.Schema()) - assert.Equal(tt, tc.Expected.SchemaGroup, res.SchemaGroup()) - assert.Equal(tt, tc.Expected.Id, res.ID()) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := NewGroup(). + ID(tt.Args.ID). + Fields(tt.Args.Fields). + Schema(tt.Args.Schema, tt.Args.SchemaGroup). + Build() + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) } else { - assert.True(tt, errors.As(tc.Err, &err)) + assert.Equal(t, tt.Err, err) } }) } @@ -75,64 +73,62 @@ func TestGroupBuilder_MustBuild(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() - testCases := []struct { - Name string - Fail bool - Id ItemID + + type args struct { + ID ItemID Schema SchemaID SchemaGroup SchemaGroupID Fields []*Field - Expected struct { - Id ItemID - Schema SchemaID - SchemaGroup SchemaGroupID - Fields []*Field - } + } + + tests := []struct { + Name string + Args args + Expected *Group + Err error }{ { Name: "fail invalid id", - Fail: true, + Err: ErrInvalidID, }, { - Name: "success", - Id: iid, - Schema: sid, - SchemaGroup: "a", - Fields: []*Field{f}, - Expected: struct { - Id ItemID - Schema SchemaID - SchemaGroup SchemaGroupID - Fields []*Field - }{ - Id: iid, + Name: "success", + Args: args{ + ID: iid, Schema: sid, SchemaGroup: "a", Fields: []*Field{f}, }, + Expected: &Group{ + itemBase: itemBase{ + ID: iid, + Schema: sid, + SchemaGroup: "a", + }, + fields: []*Field{f}, + }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - var res *Group - if tc.Fail { - defer func() { - if r := recover(); r != nil { - assert.Nil(tt, res) - } - }() - res = NewGroup().ID(tc.Id).Fields(tc.Fields).Schema(tc.Schema, tc.SchemaGroup).MustBuild() - } else { - res = NewGroup().ID(tc.Id).Fields(tc.Fields).Schema(tc.Schema, tc.SchemaGroup).MustBuild() - assert.Equal(tt, tc.Expected.Fields, res.Fields()) - assert.Equal(tt, tc.Expected.Schema, res.Schema()) - assert.Equal(tt, tc.Expected.SchemaGroup, res.SchemaGroup()) - assert.Equal(tt, tc.Expected.Id, res.ID()) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + build := func() *Group { + t.Helper() + return NewGroup(). + ID(tt.Args.ID). + Fields(tt.Args.Fields). + Schema(tt.Args.Schema, tt.Args.SchemaGroup). + MustBuild() } + if tt.Err != nil { + assert.PanicsWithValue(t, tt.Err, func() { _ = build() }) + } else { + assert.Equal(t, tt.Expected, build()) + } }) } } diff --git a/pkg/property/group_list_builder_test.go b/pkg/property/group_list_builder_test.go index cdf459d84..05e0de8ef 100644 --- a/pkg/property/group_list_builder_test.go +++ b/pkg/property/group_list_builder_test.go @@ -1,7 +1,6 @@ package property import ( - "errors" "testing" "github.com/stretchr/testify/assert" @@ -11,55 +10,56 @@ func TestGroupListBuilder_Build(t *testing.T) { pid := NewItemID() scid := MustSchemaID("xx~1.0.0/aa") groups := []*Group{NewGroup().ID(pid).MustBuild()} - testCases := []struct { - Name string - Id ItemID + + type args struct { + ID ItemID Schema SchemaID SchemaGroup SchemaGroupID Groups []*Group - Expected struct { - Id ItemID - Schema SchemaID - SchemaGroup SchemaGroupID - Groups []*Group - } - Err error + } + + tests := []struct { + Name string + Args args + Expected *GroupList + Err error }{ { - Name: "success", - Id: pid, - Schema: scid, - SchemaGroup: "aa", - Groups: groups, - Expected: struct { - Id ItemID - Schema SchemaID - SchemaGroup SchemaGroupID - Groups []*Group - }{ - Id: pid, + Name: "success", + Args: args{ + ID: pid, Schema: scid, SchemaGroup: "aa", Groups: groups, }, + Expected: &GroupList{ + itemBase: itemBase{ + ID: pid, + Schema: scid, + SchemaGroup: "aa", + }, + groups: groups, + }, }, { Name: "fail invalid id", Err: ErrInvalidID, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res, err := NewGroupList().ID(tc.Id).Schema(tc.Schema, tc.SchemaGroup).Groups(tc.Groups).Build() - if err == nil { - assert.Equal(tt, tc.Expected.Id, res.ID()) - assert.Equal(tt, tc.Expected.SchemaGroup, res.SchemaGroup()) - assert.Equal(tt, tc.Expected.Schema, res.Schema()) - assert.Equal(tt, tc.Expected.Groups, res.Groups()) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := NewGroupList(). + ID(tt.Args.ID). + Schema(tt.Args.Schema, tt.Args.SchemaGroup). + Groups(tt.Args.Groups). + Build() + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) } else { - assert.True(tt, errors.As(tc.Err, &err)) + assert.Equal(t, tt.Err, err) } }) } @@ -74,69 +74,68 @@ func TestGroupListBuilder_MustBuild(t *testing.T) { pid := NewItemID() scid := MustSchemaID("xx~1.0.0/aa") groups := []*Group{NewGroup().ID(pid).MustBuild()} - testCases := []struct { - Name string - Fails bool - Id ItemID + + type args struct { + ID ItemID Schema SchemaID SchemaGroup SchemaGroupID Groups []*Group - Expected struct { - Id ItemID - Schema SchemaID - SchemaGroup SchemaGroupID - Groups []*Group - } + } + + tests := []struct { + Name string + Args args + Err error + Expected *GroupList }{ { - Name: "success", - Id: pid, - Schema: scid, - SchemaGroup: "aa", - Groups: groups, - Expected: struct { - Id ItemID - Schema SchemaID - SchemaGroup SchemaGroupID - Groups []*Group - }{ - Id: pid, + Name: "success", + Args: args{ + ID: pid, Schema: scid, SchemaGroup: "aa", Groups: groups, }, + Expected: &GroupList{ + itemBase: itemBase{ + ID: pid, + Schema: scid, + SchemaGroup: "aa", + }, + groups: groups, + }, }, { - Name: "fail invalid id", - Fails: true, + Name: "fail invalid id", + Err: ErrInvalidID, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - var res *GroupList - if tc.Fails { - defer func() { - if r := recover(); r != nil { - assert.Nil(tt, res) - } - }() - res = NewGroupList().ID(tc.Id).Schema(tc.Schema, tc.SchemaGroup).Groups(tc.Groups).MustBuild() - } else { - res = NewGroupList().ID(tc.Id).Schema(tc.Schema, tc.SchemaGroup).Groups(tc.Groups).MustBuild() - assert.Equal(tt, tc.Expected.Id, res.ID()) - assert.Equal(tt, tc.Expected.SchemaGroup, res.SchemaGroup()) - assert.Equal(tt, tc.Expected.Schema, res.Schema()) - assert.Equal(tt, tc.Expected.Groups, res.Groups()) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + build := func() *GroupList { + t.Helper() + return NewGroupList(). + ID(tc.Args.ID). + Schema(tc.Args.Schema, tc.Args.SchemaGroup). + Groups(tc.Args.Groups). + MustBuild() } + if tc.Err != nil { + assert.PanicsWithValue(t, tc.Err, func() { _ = build() }) + } else { + assert.Equal(t, tc.Expected, build()) + } }) } } func TestInitGroupListFrom(t *testing.T) { - testCases := []struct { + tests := []struct { Name string SchemaGroup *SchemaGroup ExpectedSG SchemaGroupID @@ -153,13 +152,13 @@ func TestInitGroupListFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := InitGroupFrom(tc.SchemaGroup) - assert.Equal(tt, tc.ExpectedSG, res.SchemaGroup()) - assert.Equal(tt, tc.ExpectedSchema, res.Schema()) + assert.Equal(t, tc.ExpectedSG, res.SchemaGroup()) + assert.Equal(t, tc.ExpectedSchema, res.Schema()) }) } } diff --git a/pkg/property/group_list_test.go b/pkg/property/group_list_test.go index 343ab2cd6..98dd0fccf 100644 --- a/pkg/property/group_list_test.go +++ b/pkg/property/group_list_test.go @@ -14,7 +14,7 @@ func TestGroupList_IDRef(t *testing.T) { } func TestGroupList_SchemaRef(t *testing.T) { - testCases := []struct { + tests := []struct { Name string GL *GroupList ExpectedSG *SchemaGroupID @@ -30,12 +30,13 @@ func TestGroupList_SchemaRef(t *testing.T) { ExpectedSchema: MustSchemaID("xx~1.0.0/aa").Ref(), }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.ExpectedSG, tc.GL.SchemaGroupRef()) - assert.Equal(tt, tc.ExpectedSchema, tc.GL.SchemaRef()) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.ExpectedSG, tc.GL.SchemaGroupRef()) + assert.Equal(t, tc.ExpectedSchema, tc.GL.SchemaRef()) }) } } @@ -49,7 +50,8 @@ func TestGroupList_HasLinkedField(t *testing.T) { f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}).MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} groups2 := []*Group{NewGroup().ID(pid).MustBuild()} - testCases := []struct { + + tests := []struct { Name string GL *GroupList Expected bool @@ -68,12 +70,13 @@ func TestGroupList_HasLinkedField(t *testing.T) { Expected: false, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.GL.HasLinkedField()) - assert.Equal(tt, tc.Expected, tc.GL.IsDatasetLinked(dssid, dsid)) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.HasLinkedField()) + assert.Equal(t, tc.Expected, tc.GL.IsDatasetLinked(dssid, dsid)) }) } } @@ -87,7 +90,8 @@ func TestGroupList_CollectDatasets(t *testing.T) { f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}).MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} groups2 := []*Group{NewGroup().ID(pid).MustBuild()} - testCases := []struct { + + tests := []struct { Name string GL *GroupList Expected []DatasetID @@ -106,11 +110,12 @@ func TestGroupList_CollectDatasets(t *testing.T) { Expected: []DatasetID{}, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.GL.CollectDatasets()) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.CollectDatasets()) }) } } @@ -124,7 +129,8 @@ func TestGroupList_FieldsByLinkedDataset(t *testing.T) { f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}).MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} groups2 := []*Group{NewGroup().ID(pid).MustBuild()} - testCases := []struct { + + tests := []struct { Name string GL *GroupList Expected []*Field @@ -143,11 +149,12 @@ func TestGroupList_FieldsByLinkedDataset(t *testing.T) { Expected: []*Field{}, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.GL.FieldsByLinkedDataset(dssid, dsid)) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.FieldsByLinkedDataset(dssid, dsid)) }) } } @@ -160,7 +167,8 @@ func TestGroupList_IsEmpty(t *testing.T) { dssid := NewDatasetSchemaID() f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}).MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} - testCases := []struct { + + tests := []struct { Name string GL *GroupList Expected bool @@ -179,11 +187,12 @@ func TestGroupList_IsEmpty(t *testing.T) { Expected: false, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.GL.IsEmpty()) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.IsEmpty()) }) } } @@ -196,7 +205,8 @@ func TestGroupList_Prune(t *testing.T) { pid := NewItemID() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f, f2}).MustBuild()} pruned := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} - testCases := []struct { + + tests := []struct { Name string GL *GroupList Expected []*Group @@ -210,12 +220,13 @@ func TestGroupList_Prune(t *testing.T) { Expected: pruned, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() tc.GL.Prune() - assert.Equal(tt, tc.Expected, tc.GL.Groups()) + assert.Equal(t, tc.Expected, tc.GL.Groups()) }) } } @@ -223,7 +234,8 @@ func TestGroupList_Prune(t *testing.T) { func TestGroupList_GetGroup(t *testing.T) { pid := NewItemID() g := NewGroup().ID(pid).MustBuild() - testCases := []struct { + + tests := []struct { Name string Input ItemID GL *GroupList @@ -245,11 +257,12 @@ func TestGroupList_GetGroup(t *testing.T) { Expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.GL.GetGroup(tc.Input)) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.GetGroup(tc.Input)) }) } } @@ -259,7 +272,8 @@ func TestGroupList_GroupAt(t *testing.T) { g2 := NewGroup().ID(NewItemID()).MustBuild() g3 := NewGroup().ID(NewItemID()).MustBuild() g4 := NewGroup().ID(NewItemID()).MustBuild() - testCases := []struct { + + tests := []struct { Name string Index int GL *GroupList @@ -283,11 +297,12 @@ func TestGroupList_GroupAt(t *testing.T) { Expected: g3, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.GL.GroupAt(tc.Index)) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.GroupAt(tc.Index)) }) } } @@ -297,7 +312,8 @@ func TestGroupList_Has(t *testing.T) { g2 := NewGroup().ID(NewItemID()).MustBuild() g3 := NewGroup().ID(NewItemID()).MustBuild() g4 := NewGroup().ID(NewItemID()).MustBuild() - testCases := []struct { + + tests := []struct { Name string Input ItemID GL *GroupList @@ -319,11 +335,12 @@ func TestGroupList_Has(t *testing.T) { Expected: false, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.GL.Has(tc.Input)) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.Has(tc.Input)) }) } } @@ -333,7 +350,8 @@ func TestGroupList_Count(t *testing.T) { g2 := NewGroup().ID(NewItemID()).MustBuild() g3 := NewGroup().ID(NewItemID()).MustBuild() g4 := NewGroup().ID(NewItemID()).MustBuild() - testCases := []struct { + + tests := []struct { Name string GL *GroupList Expected int @@ -347,11 +365,12 @@ func TestGroupList_Count(t *testing.T) { Expected: 4, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.GL.Count()) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.GL.Count()) }) } } @@ -361,7 +380,8 @@ func TestGroupList_Add(t *testing.T) { g2 := NewGroup().ID(NewItemID()).MustBuild() g3 := NewGroup().ID(NewItemID()).MustBuild() g4 := NewGroup().ID(NewItemID()).MustBuild() - testCases := []struct { + + tests := []struct { Name string GL *GroupList Gr *Group @@ -402,12 +422,12 @@ func TestGroupList_Add(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - tc.GL.Add(tc.Gr, tc.Index) - assert.Equal(tt, tc.Expected.Gr, tc.GL.GroupAt(tc.Expected.Index)) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.GL.Add(tt.Gr, tt.Index) + assert.Equal(t, tt.Expected.Gr, tt.GL.GroupAt(tt.Expected.Index)) }) } } @@ -417,7 +437,8 @@ func TestGroupList_AddOrMove(t *testing.T) { g2 := NewGroup().ID(NewItemID()).MustBuild() g3 := NewGroup().ID(NewItemID()).MustBuild() g4 := NewGroup().ID(NewItemID()).MustBuild() - testCases := []struct { + + tests := []struct { Name string GL *GroupList Gr *Group @@ -471,12 +492,12 @@ func TestGroupList_AddOrMove(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - tc.GL.AddOrMove(tc.Gr, tc.Index) - assert.Equal(tt, tc.Expected.Gr, tc.GL.GroupAt(tc.Expected.Index)) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.GL.AddOrMove(tt.Gr, tt.Index) + assert.Equal(t, tt.Expected.Gr, tt.GL.GroupAt(tt.Expected.Index)) }) } } @@ -486,7 +507,8 @@ func TestGroupList_Move(t *testing.T) { g2 := NewGroup().ID(NewItemID()).MustBuild() g3 := NewGroup().ID(NewItemID()).MustBuild() g4 := NewGroup().ID(NewItemID()).MustBuild() - testCases := []struct { + + tests := []struct { Name string GL *GroupList Id ItemID @@ -511,12 +533,12 @@ func TestGroupList_Move(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - tc.GL.Move(tc.Id, tc.ToIndex) - assert.Equal(tt, tc.Expected.Id, tc.GL.GroupAt(tc.Expected.Index).ID()) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.GL.Move(tt.Id, tt.ToIndex) + assert.Equal(t, tt.Expected.Id, tt.GL.GroupAt(tt.Expected.Index).ID()) }) } } @@ -526,7 +548,8 @@ func TestGroupList_MoveAt(t *testing.T) { g2 := NewGroup().ID(NewItemID()).MustBuild() g3 := NewGroup().ID(NewItemID()).MustBuild() g4 := NewGroup().ID(NewItemID()).MustBuild() - testCases := []struct { + + tests := []struct { Name string GL *GroupList FromIndex, ToIndex int @@ -558,12 +581,12 @@ func TestGroupList_MoveAt(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - tc.GL.MoveAt(tc.FromIndex, tc.ToIndex) - assert.Equal(tt, tc.Expected, tc.GL.Groups()) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.GL.MoveAt(tt.FromIndex, tt.ToIndex) + assert.Equal(t, tt.Expected, tt.GL.Groups()) }) } } @@ -573,7 +596,8 @@ func TestGroupList_RemoveAt(t *testing.T) { g2 := NewGroup().ID(NewItemID()).MustBuild() g3 := NewGroup().ID(NewItemID()).MustBuild() g4 := NewGroup().ID(NewItemID()).MustBuild() - testCases := []struct { + + tests := []struct { Name string GL *GroupList Index int @@ -602,12 +626,12 @@ func TestGroupList_RemoveAt(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - tc.GL.RemoveAt(tc.Index) - assert.Equal(tt, tc.Expected, tc.GL.Groups()) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.GL.RemoveAt(tt.Index) + assert.Equal(t, tt.Expected, tt.GL.Groups()) }) } } @@ -616,7 +640,8 @@ func TestGroupList_Remove(t *testing.T) { g2 := NewGroup().ID(NewItemID()).MustBuild() g3 := NewGroup().ID(NewItemID()).MustBuild() g4 := NewGroup().ID(NewItemID()).MustBuild() - testCases := []struct { + + tests := []struct { Name string GL *GroupList Input ItemID @@ -639,12 +664,12 @@ func TestGroupList_Remove(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.GL.Remove(tc.Input) - assert.Equal(tt, tc.Expected, res) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.GL.Remove(tt.Input) + assert.Equal(t, tt.Expected, res) }) } } @@ -653,7 +678,8 @@ func TestGroupList_GetOrCreateField(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() g := NewGroup().ID(NewItemID()).Schema(sg.Schema(), sg.ID()).MustBuild() - testCases := []struct { + + tests := []struct { Name string GL *GroupList Schema *Schema @@ -696,13 +722,13 @@ func TestGroupList_GetOrCreateField(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res, ok := tc.GL.GetOrCreateField(tc.Schema, tc.Ptr) - assert.Equal(tt, tc.Expected.Field, res) - assert.Equal(tt, tc.Expected.Ok, ok) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, ok := tt.GL.GetOrCreateField(tt.Schema, tt.Ptr) + assert.Equal(t, tt.Expected.Field, res) + assert.Equal(t, tt.Expected.Ok, ok) }) } } @@ -712,7 +738,8 @@ func TestGroupList_CreateAndAddListItem(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() g := NewGroup().ID(NewItemID()).Schema(sg.Schema(), sg.ID()).MustBuild() - testCases := []struct { + + tests := []struct { Name string GL *GroupList Schema *Schema @@ -728,14 +755,14 @@ func TestGroupList_CreateAndAddListItem(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.GL.CreateAndAddListItem(tc.Schema, tc.Index) - assert.Equal(tt, tc.Expected.Schema(), res.Schema()) - assert.Equal(tt, tc.Expected.Fields(), res.Fields()) - assert.Equal(tt, tc.Expected.SchemaGroup(), res.SchemaGroup()) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.GL.CreateAndAddListItem(tt.Schema, tt.Index) + assert.Equal(t, tt.Expected.Schema(), res.Schema()) + assert.Equal(t, tt.Expected.Fields(), res.Fields()) + assert.Equal(t, tt.Expected.SchemaGroup(), res.SchemaGroup()) }) } } diff --git a/pkg/property/group_test.go b/pkg/property/group_test.go index 5f038056a..38698c468 100644 --- a/pkg/property/group_test.go +++ b/pkg/property/group_test.go @@ -1,9 +1,9 @@ package property import ( - "errors" "testing" + "github.com/reearth/reearth-backend/pkg/value" "github.com/stretchr/testify/assert" ) @@ -33,7 +33,7 @@ func TestGroup_HasLinkedField(t *testing.T) { f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() f2 := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() - testCases := []struct { + tests := []struct { Name string Group *Group Expected bool @@ -54,12 +54,13 @@ func TestGroup_HasLinkedField(t *testing.T) { Expected: false, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.Group.HasLinkedField() - assert.Equal(tt, tc.Expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.HasLinkedField() + assert.Equal(t, tt.Expected, res) }) } } @@ -73,7 +74,7 @@ func TestGroup_IsDatasetLinked(t *testing.T) { f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() f2 := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() - testCases := []struct { + tests := []struct { Name string Group *Group DatasetSchema DatasetSchemaID @@ -96,12 +97,13 @@ func TestGroup_IsDatasetLinked(t *testing.T) { Expected: false, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.Group.IsDatasetLinked(tc.DatasetSchema, tc.Dataset) - assert.Equal(tt, tc.Expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.IsDatasetLinked(tt.DatasetSchema, tt.Dataset) + assert.Equal(t, tt.Expected, res) }) } } @@ -114,7 +116,7 @@ func TestGroup_CollectDatasets(t *testing.T) { ls := NewLinks([]*Link{l}) f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() - testCases := []struct { + tests := []struct { Name string Group *Group Expected []DatasetID @@ -130,12 +132,13 @@ func TestGroup_CollectDatasets(t *testing.T) { Expected: []DatasetID{dsid}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.Group.CollectDatasets() - assert.Equal(tt, tc.Expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.CollectDatasets() + assert.Equal(t, tt.Expected, res) }) } } @@ -149,7 +152,7 @@ func TestGroup_FieldsByLinkedDataset(t *testing.T) { ls := NewLinks([]*Link{l}) f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() - testCases := []struct { + tests := []struct { Name string Group *Group DatasetSchema DatasetSchemaID @@ -167,12 +170,13 @@ func TestGroup_FieldsByLinkedDataset(t *testing.T) { Expected: []*Field{f}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.Group.FieldsByLinkedDataset(tc.DatasetSchema, tc.DataSet) - assert.Equal(tt, tc.Expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.FieldsByLinkedDataset(tt.DatasetSchema, tt.DataSet) + assert.Equal(t, tt.Expected, res) }) } } @@ -183,7 +187,7 @@ func TestGroup_IsEmpty(t *testing.T) { f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := NewField(sf).MustBuild() - testCases := []struct { + tests := []struct { Name string Group *Group Expected bool @@ -200,12 +204,13 @@ func TestGroup_IsEmpty(t *testing.T) { Expected: false, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.Group.IsEmpty() - assert.Equal(tt, tc.Expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.IsEmpty() + assert.Equal(t, tt.Expected, res) }) } } @@ -216,7 +221,7 @@ func TestGroup_Prune(t *testing.T) { f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := NewField(sf).MustBuild() - testCases := []struct { + tests := []struct { Name string Group *Group Expected []*Field @@ -231,12 +236,13 @@ func TestGroup_Prune(t *testing.T) { Expected: []*Field{f}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - tc.Group.Prune() - assert.Equal(tt, tc.Expected, tc.Group.Fields()) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.Group.Prune() + assert.Equal(t, tt.Expected, tt.Group.Fields()) }) } } @@ -245,7 +251,8 @@ func TestGroup_GetOrCreateField(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() f := NewField(sf).MustBuild() sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - testCases := []struct { + + tests := []struct { Name string Group *Group PS *Schema @@ -294,13 +301,14 @@ func TestGroup_GetOrCreateField(t *testing.T) { }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res, b := tc.Group.GetOrCreateField(tc.PS, tc.FID) - assert.Equal(tt, tc.Expected.Field, res) - assert.Equal(tt, tc.Expected.Bool, b) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, b := tt.Group.GetOrCreateField(tt.PS, tt.FID) + assert.Equal(t, tt.Expected.Field, res) + assert.Equal(t, tt.Expected.Bool, b) }) } } @@ -312,13 +320,12 @@ func TestGroup_RemoveField(t *testing.T) { f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := NewField(sf2).MustBuild() - testCases := []struct { + tests := []struct { Name string Group *Group Input FieldID Expected []*Field }{ - { Name: "nil group", }, @@ -329,12 +336,13 @@ func TestGroup_RemoveField(t *testing.T) { Expected: []*Field{f}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - tc.Group.RemoveField(tc.Input) - assert.Equal(tt, tc.Expected, tc.Group.Fields()) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + tt.Group.RemoveField(tt.Input) + assert.Equal(t, tt.Expected, tt.Group.Fields()) }) } } @@ -346,12 +354,11 @@ func TestGroup_FieldIDs(t *testing.T) { f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := NewField(sf2).MustBuild() - testCases := []struct { + tests := []struct { Name string Group *Group Expected []FieldID }{ - { Name: "nil group", }, @@ -361,12 +368,13 @@ func TestGroup_FieldIDs(t *testing.T) { Expected: []FieldID{"a", "b"}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.Group.FieldIDs() - assert.Equal(tt, tc.Expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.FieldIDs() + assert.Equal(t, tt.Expected, res) }) } } @@ -378,13 +386,12 @@ func TestGroup_Field(t *testing.T) { f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := NewField(sf2).MustBuild() - testCases := []struct { + tests := []struct { Name string Group *Group Input FieldID Expected *Field }{ - { Name: "nil group", }, @@ -401,29 +408,41 @@ func TestGroup_Field(t *testing.T) { Expected: nil, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.Group.Field(tc.Input) - assert.Equal(tt, tc.Expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Group.Field(tt.Input) + assert.Equal(t, tt.Expected, res) }) } } -func TestGroup_UpdateNameFieldValue(t *testing.T) { +func TestGroup_UpdateRepresentativeFieldValue(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - //f := NewField(sf).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - sg2 := NewSchemaGroup().ID("bb").Schema(MustSchemaID("xx~1.0.0/bb")).Fields([]*SchemaField{sf}).MustBuild() - testCases := []struct { + sg := NewSchemaGroup().ID("aa"). + Schema(MustSchemaID("xx~1.0.0/aa")). + Fields([]*SchemaField{sf}). + RepresentativeField(FieldID("aa").Ref()). + MustBuild() + sg2 := NewSchemaGroup(). + ID("bb"). + Schema(MustSchemaID("xx~1.0.0/bb")). + Fields([]*SchemaField{sf}). + MustBuild() + + type args struct { + Schema *Schema + Value *Value + } + + tests := []struct { Name string + Args args Group *Group - PS *Schema - Value *Value - FID FieldID + FieldID FieldID Expected *Field - Err error }{ { Name: "nil group", @@ -435,36 +454,34 @@ func TestGroup_UpdateNameFieldValue(t *testing.T) { { Name: "group schema doesn't equal to ps", Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aaa"), "aa").MustBuild(), - PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Args: args{ + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + }, }, { - Name: "update value", - Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), - PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), - Value: ValueTypeString.ValueFrom("abc"), - FID: "aa", - Expected: NewField(sf).Value(OptionalValueFrom(ValueTypeString.ValueFrom("abc"))).MustBuild(), + Name: "invalid property field", + Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + Args: args{ + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/bb")).Groups([]*SchemaGroup{sg2}).MustBuild(), + Value: ValueTypeString.ValueFrom("abc"), + }, }, { - Name: "invalid property field", - Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), - PS: NewSchema().ID(MustSchemaID("xx~1.0.0/bb")).Groups([]*SchemaGroup{sg2}).MustBuild(), - Value: ValueTypeString.ValueFrom("abc"), - FID: "aa", - Expected: nil, - Err: ErrInvalidPropertyField, + Name: "ok", + Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + Args: args{ + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Value: ValueTypeString.ValueFrom("abc"), + }, + Expected: &Field{field: "aa", v: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.Group.UpdateNameFieldValue(tc.PS, tc.Value) - if res == nil { - assert.Equal(tt, tc.Expected, tc.Group.Field(tc.FID)) - } else { - assert.True(tt, errors.As(res, &tc.Err)) - } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.Expected, tt.Group.RepresentativeField(tt.Args.Schema)) }) } } diff --git a/pkg/property/item_test.go b/pkg/property/item_test.go index 887f469e2..d9df99e2d 100644 --- a/pkg/property/item_test.go +++ b/pkg/property/item_test.go @@ -13,7 +13,8 @@ func TestInitItemFrom(t *testing.T) { iid := NewItemID() propertySchemaID := MustSchemaID("xx~1.0.0/aa") propertySchemaField1ID := SchemaGroupID("aa") - testCases := []struct { + + tests := []struct { Name string SG *SchemaGroup Expected Item @@ -33,16 +34,16 @@ func TestInitItemFrom(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := InitItemFrom(tc.SG) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := InitItemFrom(tt.SG) if res != nil { - assert.Equal(tt, tc.Expected.Schema(), res.Schema()) - assert.Equal(tt, tc.Expected.SchemaGroup(), res.SchemaGroup()) + assert.Equal(t, tt.Expected.Schema(), res.Schema()) + assert.Equal(t, tt.Expected.SchemaGroup(), res.SchemaGroup()) } else { - assert.Nil(tt, tc.Expected) + assert.Nil(t, tt.Expected) } }) } diff --git a/pkg/property/link_test.go b/pkg/property/link_test.go index 5f2c2c944..bd5828e92 100644 --- a/pkg/property/link_test.go +++ b/pkg/property/link_test.go @@ -58,7 +58,7 @@ func TestLinks_IsDatasetLinked(t *testing.T) { NewLink(did1, dsid1, dfid1), } - testCases := []struct { + tests := []struct { Name string DSS DatasetSchemaID DS DatasetID @@ -84,16 +84,17 @@ func TestLinks_IsDatasetLinked(t *testing.T) { Expected: false, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.Links.IsDatasetLinked(tc.DSS, tc.DS) res2 := tc.Links.HasDataset(tc.DS) res3 := tc.Links.HasDatasetSchema(tc.DSS) - assert.Equal(tt, tc.Expected, res) - assert.Equal(tt, tc.Expected, res2) - assert.Equal(tt, tc.Expected, res3) + assert.Equal(t, tc.Expected, res) + assert.Equal(t, tc.Expected, res2) + assert.Equal(t, tc.Expected, res3) }) } } @@ -103,7 +104,7 @@ func TestLinks_Validate(t *testing.T) { did1 := NewDatasetID() dfid1 := NewDatasetFieldID() - testCases := []struct { + tests := []struct { Name string DSM dataset.SchemaMap DM dataset.Map @@ -143,12 +144,13 @@ func TestLinks_Validate(t *testing.T) { Expected: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.Links.Validate(tc.DSM, tc.DM) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } @@ -161,7 +163,7 @@ func TestLinks_Replace(t *testing.T) { dfid1 := NewDatasetFieldID() dfid2 := NewDatasetFieldID() - testCases := []struct { + tests := []struct { Name string DSM map[DatasetSchemaID]DatasetSchemaID DM map[DatasetID]DatasetID @@ -219,12 +221,13 @@ func TestLinks_Replace(t *testing.T) { Links: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() tc.Links.Replace(tc.DSM, tc.DM, tc.FM) - assert.Equal(tt, tc.Expected.Links(), tc.Links.Links()) + assert.Equal(t, tc.Expected.Links(), tc.Links.Links()) }) } } @@ -234,7 +237,7 @@ func TestLinks_ApplyDataset(t *testing.T) { did1 := NewDatasetID() dfid1 := NewDatasetFieldID() - testCases := []struct { + tests := []struct { Name string Input *DatasetID Expected, Links *Links @@ -259,12 +262,13 @@ func TestLinks_ApplyDataset(t *testing.T) { Expected: NewLinks([]*Link{NewLink(did1, dsid1, dfid1)}), }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.Links.ApplyDataset(tc.Input) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } @@ -274,7 +278,7 @@ func TestLink_Dataset(t *testing.T) { did1 := NewDatasetID() dfid1 := NewDatasetFieldID() - testCases := []struct { + tests := []struct { Name string Link *Link Expected *DatasetID @@ -292,11 +296,12 @@ func TestLink_Dataset(t *testing.T) { Expected: did1.Ref(), }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { + t.Run(tc.Name, func(t *testing.T) { res := tc.Link.Dataset() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } @@ -307,7 +312,7 @@ func TestLink_DatasetSchema(t *testing.T) { did1 := NewDatasetID() dfid1 := NewDatasetFieldID() - testCases := []struct { + tests := []struct { Name string Link *Link Expected *DatasetSchemaID @@ -321,11 +326,12 @@ func TestLink_DatasetSchema(t *testing.T) { Expected: dsid1.Ref(), }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { + t.Run(tc.Name, func(t *testing.T) { res := tc.Link.DatasetSchema() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } @@ -336,7 +342,7 @@ func TestLink_DatasetSchemaField(t *testing.T) { did1 := NewDatasetID() dfid1 := NewDatasetFieldID() - testCases := []struct { + tests := []struct { Name string Link *Link Expected *DatasetFieldID @@ -350,11 +356,12 @@ func TestLink_DatasetSchemaField(t *testing.T) { Expected: dfid1.Ref(), }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { + t.Run(tc.Name, func(t *testing.T) { res := tc.Link.DatasetSchemaField() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } @@ -367,7 +374,7 @@ func TestLink_Value(t *testing.T) { dataset.NewField(dfid1, dataset.ValueTypeString.ValueFrom("aaa"), ""), } - testCases := []struct { + tests := []struct { Name string Link *Link Input *dataset.Dataset @@ -388,20 +395,22 @@ func TestLink_Value(t *testing.T) { Expected: dataset.ValueTypeString.ValueFrom("aaa"), }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { + t.Run(tc.Name, func(t *testing.T) { res := tc.Link.Value(tc.Input) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } + func TestLink_Validate(t *testing.T) { dsid1 := NewDatasetSchemaID() did1 := NewDatasetID() dfid1 := NewDatasetFieldID() - testCases := []struct { + tests := []struct { Name string DS *dataset.Dataset DSS *dataset.Schema @@ -462,12 +471,13 @@ func TestLink_Validate(t *testing.T) { Expected: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.Link.Validate(tc.DSS, tc.DS) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } diff --git a/pkg/property/list_test.go b/pkg/property/list_test.go index 6ce34a583..8e1273c8d 100644 --- a/pkg/property/list_test.go +++ b/pkg/property/list_test.go @@ -14,7 +14,7 @@ var ( ) func TestMap_Add(t *testing.T) { - testCases := []struct { + tests := []struct { Name string Input *Property M, Expected Map @@ -30,13 +30,13 @@ func TestMap_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() tc.M.Add(tc.Input) - assert.Equal(tt, tc.Expected, tc.M) - assert.Equal(tt, tc.Expected.List(), tc.M.List()) + assert.Equal(t, tc.Expected, tc.M) + assert.Equal(t, tc.Expected.List(), tc.M.List()) }) } } @@ -48,7 +48,7 @@ func TestMapFrom(t *testing.T) { } func TestMap_Clone(t *testing.T) { - testCases := []struct { + tests := []struct { Name string M, Expected Map }{ @@ -63,18 +63,18 @@ func TestMap_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.M.Clone() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } func TestMap_Merge(t *testing.T) { - testCases := []struct { + tests := []struct { Name string M1, M2, Expected Map }{ @@ -90,12 +90,12 @@ func TestMap_Merge(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.M1.Merge(tc.M2) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } diff --git a/pkg/property/schema_builder_test.go b/pkg/property/schema_builder_test.go index 0e317b705..58dbe761e 100644 --- a/pkg/property/schema_builder_test.go +++ b/pkg/property/schema_builder_test.go @@ -1,7 +1,6 @@ package property import ( - "errors" "fmt" "testing" @@ -12,67 +11,70 @@ func TestSchemaBuilder_Build(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() sg2 := NewSchemaGroup().ID("daa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - testCases := []struct { - Name string - Id SchemaID + + type args struct { + ID SchemaID Version int Groups []*SchemaGroup Linkable LinkableFields - Expected struct { - Id SchemaID - Version int - Groups []*SchemaGroup - Linkable LinkableFields - } - Err error + } + + tests := []struct { + Name string + Args args + Expected *Schema + Err error }{ { Name: "fail: invalid id", Err: ErrInvalidID, }, { - Name: "fail: invalid linkable field", - Id: MustSchemaID("xx~1.0.0/aa"), - Linkable: LinkableFields{LatLng: NewPointer(nil, nil, FieldID("xx").Ref())}, - Err: ErrInvalidPropertyLinkableField, + Name: "fail: invalid linkable field", + Args: args{ + ID: MustSchemaID("xx~1.0.0/aa"), + Linkable: LinkableFields{LatLng: NewPointer(nil, nil, FieldID("xx").Ref())}, + }, + Err: ErrInvalidPropertyLinkableField, }, { - Name: "fail: duplicated field", - Id: MustSchemaID("xx~1.0.0/aa"), - Groups: []*SchemaGroup{sg, sg2}, - Err: fmt.Errorf("%s: %s %s", ErrDuplicatedField, MustSchemaID("xx~1.0.0/aa"), []FieldID{"aa"}), + Name: "fail: duplicated field", + Args: args{ + ID: MustSchemaID("xx~1.0.0/aa"), + Groups: []*SchemaGroup{sg, sg2}, + }, + Err: fmt.Errorf("%s: %s %s", ErrDuplicatedField, MustSchemaID("xx~1.0.0/aa"), []FieldID{"aa"}), }, { - Name: "success", - Id: MustSchemaID("xx~1.0.0/aa"), - Groups: []*SchemaGroup{sg}, - Version: 1, - Expected: struct { - Id SchemaID - Version int - Groups []*SchemaGroup - Linkable LinkableFields - }{Id: MustSchemaID("xx~1.0.0/aa"), Version: 1, Groups: []*SchemaGroup{sg}}, + Name: "success", + Args: args{ + ID: MustSchemaID("xx~1.0.0/aa"), + Groups: []*SchemaGroup{sg}, + Version: 1, + }, + Expected: &Schema{ + id: MustSchemaID("xx~1.0.0/aa"), + version: 1, + groups: []*SchemaGroup{sg}, + }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() res, err := NewSchema(). - ID(tc.Id). - Groups(tc.Groups). - Version(tc.Version). - LinkableFields(tc.Linkable). + ID(tt.Args.ID). + Groups(tt.Args.Groups). + Version(tt.Args.Version). + LinkableFields(tt.Args.Linkable). Build() - if err == nil { - assert.Equal(tt, tc.Expected.Linkable, res.LinkableFields()) - assert.Equal(tt, tc.Expected.Groups, res.Groups()) - assert.Equal(tt, tc.Expected.Id, res.ID()) - assert.Equal(tt, tc.Expected.Version, res.Version()) + + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) } else { - assert.True(tt, errors.As(tc.Err, &err)) + assert.Equal(t, tt.Err, err) } }) } @@ -82,78 +84,74 @@ func TestSchemaBuilder_MustBuild(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() sg2 := NewSchemaGroup().ID("daa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - testCases := []struct { - Name string - Fails bool - Id SchemaID + + type args struct { + ID SchemaID Version int Groups []*SchemaGroup Linkable LinkableFields - Expected struct { - Id SchemaID - Version int - Groups []*SchemaGroup - Linkable LinkableFields - } + } + + tests := []struct { + Name string + Args args + Expected *Schema + Err string }{ { - Name: "fail: invalid id", - Fails: true, + Name: "fail: invalid id", + Err: ErrInvalidID.Error(), }, { - Name: "fail: invalid linkable field", - Id: MustSchemaID("xx~1.0.0/aa"), - Linkable: LinkableFields{LatLng: NewPointer(nil, nil, FieldID("xx").Ref())}, - Fails: true, + Name: "fail: invalid linkable field", + Args: args{ + ID: MustSchemaID("xx~1.0.0/aa"), + Linkable: LinkableFields{LatLng: NewPointer(nil, nil, FieldID("xx").Ref())}, + }, + Err: ErrInvalidPropertyLinkableField.Error(), }, { - Name: "fail: duplicated field", - Id: MustSchemaID("xx~1.0.0/aa"), - Groups: []*SchemaGroup{sg, sg2}, - Fails: true, + Name: "fail: duplicated field", + Args: args{ + ID: MustSchemaID("xx~1.0.0/aa"), + Groups: []*SchemaGroup{sg, sg2}, + }, + Err: fmt.Sprintf("%s: %s %s", ErrDuplicatedField, MustSchemaID("xx~1.0.0/aa"), []FieldID{"aa"}), }, { - Name: "success", - Id: MustSchemaID("xx~1.0.0/aa"), - Groups: []*SchemaGroup{sg}, - Version: 1, - Expected: struct { - Id SchemaID - Version int - Groups []*SchemaGroup - Linkable LinkableFields - }{Id: MustSchemaID("xx~1.0.0/aa"), Version: 1, Groups: []*SchemaGroup{sg}}, + Name: "success", + Args: args{ + ID: MustSchemaID("xx~1.0.0/aa"), + Groups: []*SchemaGroup{sg}, + Version: 1, + }, + Expected: &Schema{ + id: MustSchemaID("xx~1.0.0/aa"), + version: 1, + groups: []*SchemaGroup{sg}, + }, }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - var res *Schema - if tc.Fails { - defer func() { - if r := recover(); r != nil { - assert.Nil(tt, res) - } - }() - res = NewSchema(). - ID(tc.Id). - Groups(tc.Groups). - Version(tc.Version). - LinkableFields(tc.Linkable). + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + build := func() *Schema { + t.Helper() + return NewSchema(). + ID(tc.Args.ID). + Groups(tc.Args.Groups). + Version(tc.Args.Version). + LinkableFields(tc.Args.Linkable). MustBuild() + } + + if tc.Err != "" { + assert.PanicsWithError(t, tc.Err, func() { _ = build() }) } else { - res = NewSchema(). - ID(tc.Id). - Groups(tc.Groups). - Version(tc.Version). - LinkableFields(tc.Linkable). - MustBuild() - assert.Equal(tt, tc.Expected.Linkable, res.LinkableFields()) - assert.Equal(tt, tc.Expected.Groups, res.Groups()) - assert.Equal(tt, tc.Expected.Id, res.ID()) - assert.Equal(tt, tc.Expected.Version, res.Version()) + assert.Equal(t, tc.Expected, build()) } }) } diff --git a/pkg/property/schema_field_builder_test.go b/pkg/property/schema_field_builder_test.go index 164a29f39..6c7a33ef5 100644 --- a/pkg/property/schema_field_builder_test.go +++ b/pkg/property/schema_field_builder_test.go @@ -9,7 +9,7 @@ import ( ) func TestSchemaFieldBuilder_Build(t *testing.T) { - testCases := []struct { + tests := []struct { Name string Id FieldID PropertyType ValueType @@ -53,40 +53,43 @@ func TestSchemaFieldBuilder_Build(t *testing.T) { Err: errors.New("invalid min and max"), }, } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() res, err := NewSchemaField(). - ID(tc.Id).Name(tc.Fname). - IsAvailableIf(tc.Cond). - Type(tc.PropertyType). - Description(tc.Description). - Choices(tc.Choices). - Prefix(tc.Prefix). - Suffix(tc.Suffix). - DefaultValue(tc.DefaultValue). - MaxRef(tc.MaxRef). - MinRef(tc.MinRef). - Min(tc.Min). - Max(tc.Max). - UI(tc.Ui). - UIRef(&tc.Ui). + ID(tt.Id).Name(tt.Fname). + IsAvailableIf(tt.Cond). + Type(tt.PropertyType). + Description(tt.Description). + Choices(tt.Choices). + Prefix(tt.Prefix). + Suffix(tt.Suffix). + DefaultValue(tt.DefaultValue). + MaxRef(tt.MaxRef). + MinRef(tt.MinRef). + Min(tt.Min). + Max(tt.Max). + UI(tt.Ui). + UIRef(&tt.Ui). Build() - if err == nil { - assert.Equal(tt, tc.Expected.Ui, res.UI()) - assert.Equal(tt, tc.Expected.Id, res.ID()) - assert.Equal(tt, tc.Expected.Min, res.Min()) - assert.Equal(tt, tc.Expected.Max, res.Max()) - assert.Equal(tt, tc.Expected.DefaultValue, res.DefaultValue()) - assert.Equal(tt, tc.Expected.Description, res.Description()) - assert.Equal(tt, tc.Expected.Prefix, res.Prefix()) - assert.Equal(tt, tc.Expected.Suffix, res.Suffix()) - assert.Equal(tt, tc.Expected.Choices, res.Choices()) - assert.Equal(tt, tc.Expected.Cond, res.IsAvailableIf()) - assert.Equal(tt, tc.Expected.Fname, res.Title()) - assert.Equal(tt, tc.Expected.PropertyType, res.Type()) + + if tt.Err == nil { + assert.Equal(t, tt.Expected.Ui, res.UI()) + assert.Equal(t, tt.Expected.Id, res.ID()) + assert.Equal(t, tt.Expected.Min, res.Min()) + assert.Equal(t, tt.Expected.Max, res.Max()) + assert.Equal(t, tt.Expected.DefaultValue, res.DefaultValue()) + assert.Equal(t, tt.Expected.Description, res.Description()) + assert.Equal(t, tt.Expected.Prefix, res.Prefix()) + assert.Equal(t, tt.Expected.Suffix, res.Suffix()) + assert.Equal(t, tt.Expected.Choices, res.Choices()) + assert.Equal(t, tt.Expected.Cond, res.IsAvailableIf()) + assert.Equal(t, tt.Expected.Fname, res.Title()) + assert.Equal(t, tt.Expected.PropertyType, res.Type()) } else { - assert.True(tt, errors.As(tc.Err, &err)) + assert.Equal(t, tt.Err, err) } }) } diff --git a/pkg/property/schema_field_test.go b/pkg/property/schema_field_test.go index e339c3e6a..46f9464f0 100644 --- a/pkg/property/schema_field_test.go +++ b/pkg/property/schema_field_test.go @@ -11,7 +11,8 @@ func TestSchemaField_MinMax(t *testing.T) { getFloatRef := func(f float64) *float64 { return &f } - testCases := []struct { + + tests := []struct { Name string SF *SchemaField Expected struct { @@ -32,19 +33,20 @@ func TestSchemaField_MinMax(t *testing.T) { Name: "nil sf", }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() min, max := tc.SF.MinMax() - assert.Equal(tt, tc.Expected.Min, min) - assert.Equal(tt, tc.Expected.Max, max) + assert.Equal(t, tc.Expected.Min, min) + assert.Equal(t, tc.Expected.Max, max) }) } } func TestSchemaField_Choice(t *testing.T) { - testCases := []struct { + tests := []struct { Name, Key string SF *SchemaField Expected *SchemaFieldChoice @@ -91,12 +93,13 @@ func TestSchemaField_Choice(t *testing.T) { Name: "nil sf", }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() ch := tc.SF.Choice(tc.Key) - assert.Equal(tt, tc.Expected, ch) + assert.Equal(t, tc.Expected, ch) }) } } @@ -114,7 +117,7 @@ func TestSchemaField_SetTitle(t *testing.T) { } func TestSchemaField_Validate(t *testing.T) { - testCases := []struct { + tests := []struct { Name string SF *SchemaField Input *OptionalValue @@ -200,12 +203,13 @@ func TestSchemaField_Validate(t *testing.T) { Expected: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.SF.Validate(tc.Input) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } diff --git a/pkg/property/schema_group_builder_test.go b/pkg/property/schema_group_builder_test.go index cafc7dd5c..a21909a19 100644 --- a/pkg/property/schema_group_builder_test.go +++ b/pkg/property/schema_group_builder_test.go @@ -21,7 +21,7 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { Title i18n.String } - testCases := []struct { + tests := []struct { Name string ID SchemaGroupID Sid SchemaID @@ -84,10 +84,10 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res, err := NewSchemaGroup(). ID(tc.ID). Schema(tc.Sid). @@ -97,14 +97,14 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { IsAvailableIf(tc.IsAvailableIf). Build() if tc.Err == nil { - assert.Equal(tt, tc.Expected.IsAvailableIf, res.IsAvailableIf()) - assert.Equal(tt, tc.Expected.Sid, res.Schema()) - assert.Equal(tt, tc.Expected.ID, res.ID()) - assert.Equal(tt, tc.Expected.Title, res.Title()) - assert.Equal(tt, tc.Expected.List, res.IsList()) - assert.Equal(tt, tc.Expected.Fields, res.Fields()) + assert.Equal(t, tc.Expected.IsAvailableIf, res.IsAvailableIf()) + assert.Equal(t, tc.Expected.Sid, res.Schema()) + assert.Equal(t, tc.Expected.ID, res.ID()) + assert.Equal(t, tc.Expected.Title, res.Title()) + assert.Equal(t, tc.Expected.List, res.IsList()) + assert.Equal(t, tc.Expected.Fields, res.Fields()) } else { - assert.Equal(tt, tc.Err, err) + assert.Equal(t, tc.Err, err) } }) } diff --git a/pkg/property/schema_group_test.go b/pkg/property/schema_group_test.go index 0c1a2eea3..d1cdbe01b 100644 --- a/pkg/property/schema_group_test.go +++ b/pkg/property/schema_group_test.go @@ -12,7 +12,7 @@ func TestSchemaGroup(t *testing.T) { sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - testCases := []struct { + tests := []struct { Name string G *SchemaGroup Expected struct { @@ -52,19 +52,19 @@ func TestSchemaGroup(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() - assert.Equal(tt, tc.Expected.GID, tc.G.ID()) - assert.Equal(tt, tc.Expected.GIDRef, tc.G.IDRef()) - assert.Equal(tt, tc.Expected.SID, tc.G.Schema()) - assert.Equal(tt, tc.Expected.SIDRef, tc.G.SchemaRef()) - assert.Equal(tt, tc.Expected.Fields, tc.G.Fields()) - assert.Equal(tt, tc.Expected.IsList, tc.G.IsList()) - assert.Equal(tt, tc.Expected.IsAvailableIf, tc.G.IsAvailableIf()) - assert.Equal(tt, tc.Expected.Title, tc.G.Title()) + assert.Equal(t, tc.Expected.GID, tc.G.ID()) + assert.Equal(t, tc.Expected.GIDRef, tc.G.IDRef()) + assert.Equal(t, tc.Expected.SID, tc.G.Schema()) + assert.Equal(t, tc.Expected.SIDRef, tc.G.SchemaRef()) + assert.Equal(t, tc.Expected.Fields, tc.G.Fields()) + assert.Equal(t, tc.Expected.IsList, tc.G.IsList()) + assert.Equal(t, tc.Expected.IsAvailableIf, tc.G.IsAvailableIf()) + assert.Equal(t, tc.Expected.Title, tc.G.Title()) }) } } @@ -74,7 +74,7 @@ func TestSchemaGroup_Field(t *testing.T) { sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - testCases := []struct { + tests := []struct { Name string G *SchemaGroup PTR *Pointer @@ -99,13 +99,13 @@ func TestSchemaGroup_Field(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.G.Field(tc.Input)) - assert.Equal(tt, tc.Expected, tc.G.FieldByPointer(tc.PTR)) - assert.Equal(tt, tc.Expected != nil, tc.G.HasField(tc.Input)) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.G.Field(tc.Input)) + assert.Equal(t, tc.Expected, tc.G.FieldByPointer(tc.PTR)) + assert.Equal(t, tc.Expected != nil, tc.G.HasField(tc.Input)) }) } } diff --git a/pkg/property/schema_test.go b/pkg/property/schema_test.go index 95b4d2065..783675b4f 100644 --- a/pkg/property/schema_test.go +++ b/pkg/property/schema_test.go @@ -19,7 +19,7 @@ func TestSchema_Field(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() - testCases := []struct { + tests := []struct { Name string S *Schema PTR *Pointer @@ -44,12 +44,12 @@ func TestSchema_Field(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.S.Field(tc.Input)) - assert.Equal(tt, tc.Expected, tc.S.FieldByPointer(tc.PTR)) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.S.Field(tc.Input)) + assert.Equal(t, tc.Expected, tc.S.FieldByPointer(tc.PTR)) }) } } @@ -59,7 +59,7 @@ func TestSchema_Group(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() - testCases := []struct { + tests := []struct { Name string S *Schema PTR *Pointer @@ -86,13 +86,13 @@ func TestSchema_Group(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.S.Group(tc.Input)) - assert.Equal(tt, tc.Expected, tc.S.GroupByPointer(tc.PTR)) - assert.Equal(tt, tc.Expected, tc.S.GroupByField(tc.InputField)) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.S.Group(tc.Input)) + assert.Equal(t, tc.Expected, tc.S.GroupByPointer(tc.PTR)) + assert.Equal(t, tc.Expected, tc.S.GroupByField(tc.InputField)) }) } } @@ -102,7 +102,7 @@ func TestSchema_DetectDuplicatedFields(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() - testCases := []struct { + tests := []struct { Name string S *Schema LF LinkableFields @@ -131,12 +131,12 @@ func TestSchema_DetectDuplicatedFields(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.LF.Validate(tc.S) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } diff --git a/pkg/property/sealed_test.go b/pkg/property/sealed_test.go index b71907d86..4eb471c74 100644 --- a/pkg/property/sealed_test.go +++ b/pkg/property/sealed_test.go @@ -26,7 +26,7 @@ var ( ) func TestSeal(t *testing.T) { - testCases := []struct { + tests := []struct { Name string MD *Merged DSGL dataset.GraphLoader @@ -163,14 +163,13 @@ func TestSeal(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res, err := Seal(context.Background(), tc.MD, tc.DSGL) - if err == nil { - assert.Equal(tt, tc.Expected, res) - } + assert.Equal(t, tc.Expected, res) + assert.Nil(t, err) }) } } @@ -178,7 +177,8 @@ func TestSeal(t *testing.T) { func TestSealProperty(t *testing.T) { pid := NewID() ps := MustSchemaID("xxx~1.1.1/aa") - testCases := []struct { + + tests := []struct { Name string Input *Property Expected *Sealed @@ -199,19 +199,19 @@ func TestSealProperty(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := SealProperty(context.Background(), tc.Input) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } func TestSealedItemFrom(t *testing.T) { - testCases := []struct { + tests := []struct { Name string MG *MergedGroup DSGL dataset.GraphLoader @@ -357,21 +357,20 @@ func TestSealedItemFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res, err := sealedItemFrom(context.Background(), tc.MG, tc.DSGL) - if err == nil { - assert.Equal(tt, tc.Expected, res) - } + assert.Equal(t, tc.Expected, res) + assert.Nil(t, err) }) } } func TestSealed_Interface(t *testing.T) { - testCases := []struct { + tests := []struct { Name string S *Sealed Expected map[string]interface{} @@ -460,18 +459,18 @@ func TestSealed_Interface(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.S.Interface() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } func TestSealedItem_Match(t *testing.T) { - testCases := []struct { + tests := []struct { Name string SI *SealedItem Input ItemID @@ -518,19 +517,19 @@ func TestSealedItem_Match(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.SI.Match(tc.Input) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } func TestSealed_ItemBy(t *testing.T) { - testCases := []struct { + tests := []struct { Name string S *Sealed Input *Pointer @@ -805,19 +804,19 @@ func TestSealed_ItemBy(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.S.ItemBy(tc.Input) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } func TestSealed_FieldBy(t *testing.T) { - testCases := []struct { + tests := []struct { Name string S *Sealed Input *Pointer @@ -1053,12 +1052,12 @@ func TestSealed_FieldBy(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.S.FieldBy(tc.Input) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } diff --git a/pkg/property/value_camera_test.go b/pkg/property/value_camera_test.go index 0eb0f5357..75e060f7f 100644 --- a/pkg/property/value_camera_test.go +++ b/pkg/property/value_camera_test.go @@ -39,12 +39,12 @@ func TestCamera_Clone(t *testing.T) { for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.Camera.Clone() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) if tc.Expected != nil { - assert.NotSame(tt, tc.Expected, res) + assert.NotSame(t, tc.Expected, res) } }) } diff --git a/pkg/property/value_dataset_test.go b/pkg/property/value_dataset_test.go index c3562a08a..a701cea66 100644 --- a/pkg/property/value_dataset_test.go +++ b/pkg/property/value_dataset_test.go @@ -13,6 +13,7 @@ func TestNewValueAndDatasetValue(t *testing.T) { d *dataset.Value p *Value } + tests := []struct { name string args args @@ -121,7 +122,9 @@ func TestNewValueAndDatasetValue(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, NewValueAndDatasetValue(tt.args.ty, tt.args.d, tt.args.p)) }) } @@ -151,7 +154,9 @@ func TestValueAndDatasetValue_Type(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, tt.target.Type()) }) } @@ -201,7 +206,9 @@ func TestValueAndDatasetValue_DatasetValuee(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, tt.target.DatasetValue()) }) } @@ -251,7 +258,9 @@ func TestValueAndDatasetValue_PropertyValue(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, tt.target.PropertyValue()) }) } @@ -301,7 +310,9 @@ func TestValueAndDatasetValue_Value(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, tt.target.Value()) }) } diff --git a/pkg/property/value_optional_test.go b/pkg/property/value_optional_test.go index f73f49709..b25de3594 100644 --- a/pkg/property/value_optional_test.go +++ b/pkg/property/value_optional_test.go @@ -12,6 +12,7 @@ func TestNewNilableValue(t *testing.T) { t ValueType v *Value } + tests := []struct { name string args args @@ -63,6 +64,7 @@ func TestOptionalValueFrom(t *testing.T) { type args struct { v *Value } + tests := []struct { name string args args @@ -211,6 +213,7 @@ func TestOptionalValue_SetValue(t *testing.T) { type args struct { v *Value } + tests := []struct { name string value *OptionalValue @@ -291,8 +294,11 @@ func TestOptionalValue_Clone(t *testing.T) { target: nil, }, } + for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() res := tt.target.Clone() assert.Equal(t, tt.target, res) if tt.target != nil { @@ -306,6 +312,7 @@ func TestOptionalValue_Cast(t *testing.T) { type args struct { t ValueType } + tests := []struct { name string target *OptionalValue @@ -345,7 +352,9 @@ func TestOptionalValue_Cast(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, tt.target.Cast(tt.args.t)) }) } diff --git a/pkg/property/value_test.go b/pkg/property/value_test.go index 236033bf9..5ceb416dd 100644 --- a/pkg/property/value_test.go +++ b/pkg/property/value_test.go @@ -160,7 +160,9 @@ func TestValue_Interface(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, tt.value.Interface()) }) } @@ -170,6 +172,7 @@ func TestValue_Cast(t *testing.T) { type args struct { t ValueType } + tests := []struct { name string target *Value @@ -215,14 +218,16 @@ func TestValue_Cast(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, tt.target.Cast(tt.args.t)) }) } } func TestValueFromDataset(t *testing.T) { - testCases := []struct { + tests := []struct { Name string Input *dataset.Value Expected struct { @@ -268,11 +273,11 @@ func TestValueFromDataset(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected.V, valueFromDataset(tc.Input)) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected.V, valueFromDataset(tc.Input)) }) } } @@ -281,6 +286,7 @@ func TestValueFromStringOrNumber(t *testing.T) { type args struct { s string } + tests := []struct { name string args args @@ -319,7 +325,9 @@ func TestValueFromStringOrNumber(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, ValueFromStringOrNumber(tt.args.s)) }) } diff --git a/pkg/property/value_typography_test.go b/pkg/property/value_typography_test.go index 9f9e73127..31f149ae4 100644 --- a/pkg/property/value_typography_test.go +++ b/pkg/property/value_typography_test.go @@ -51,19 +51,19 @@ func TestTypography_Clone(t *testing.T) { for _, tc := range testes { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.Typography.Clone() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) if tc.Expected != nil { - assert.NotSame(tt, tc.Expected, res) + assert.NotSame(t, tc.Expected, res) } }) } } func TestTextAlignFrom(t *testing.T) { - testCases := []struct { + tests := []struct { Name string Expected struct { TA TextAlign @@ -132,13 +132,13 @@ func TestTextAlignFrom(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res, ok := TextAlignFrom(tc.Name) - assert.Equal(tt, tc.Expected.TA, res) - assert.Equal(tt, tc.Expected.Bool, ok) + assert.Equal(t, tc.Expected.TA, res) + assert.Equal(t, tc.Expected.Bool, ok) }) } } @@ -149,7 +149,8 @@ func TestTextAlignFromRef(t *testing.T) { c := TextAlignCenter l := TextAlignLeft r := TextAlignRight - testCases := []struct { + + tests := []struct { Name string Input *string Expected *TextAlign @@ -188,12 +189,12 @@ func TestTextAlignFromRef(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := TextAlignFromRef(tc.Input) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } diff --git a/pkg/rerror/error_test.go b/pkg/rerror/error_test.go index bfe3b8190..ff4746b5f 100644 --- a/pkg/rerror/error_test.go +++ b/pkg/rerror/error_test.go @@ -11,45 +11,38 @@ import ( func TestErrInternal(t *testing.T) { werr := errors.New("wrapped") err := ErrInternalBy(werr) - var err2 *Error - assert.Equal(t, "internal", err.Error()) - assert.True(t, errors.As(err, &err2)) - assert.Same(t, errInternal, err2.Label) - assert.True(t, err2.Hidden) + assert.EqualError(t, err, "internal") + assert.IsType(t, err, &Error{}) assert.Same(t, werr, errors.Unwrap(err)) } func TestError(t *testing.T) { werr := errors.New("wrapped") - label := errors.New("label") - var err error = &Error{Label: label, Err: werr} + err := &Error{Label: errors.New("label"), Err: werr} - var err2 *Error - assert.Equal(t, "label: wrapped", err.Error()) - assert.True(t, errors.As(err, &err2)) + assert.EqualError(t, err, "label: wrapped") assert.Same(t, werr, errors.Unwrap(err)) label2 := errors.New("foo") err3 := &Error{Label: label2, Err: err} - assert.Equal(t, "foo.label: wrapped", err3.Error()) + assert.EqualError(t, err3, "foo.label: wrapped") - label3 := errors.New("bar") - err4 := &Error{Label: label3, Err: err3} - assert.Equal(t, "bar.foo.label: wrapped", err4.Error()) + err4 := &Error{Label: errors.New("bar"), Err: err3} + assert.EqualError(t, err4, "bar.foo.label: wrapped") - err5 := Error{ - Label: label, + err5 := &Error{ + Label: errors.New("label"), Err: werr, Hidden: true, } - assert.Equal(t, "label", err5.Error()) + assert.EqualError(t, err5, "label") var nilerr *Error - assert.Equal(t, "", nilerr.Error()) + assert.EqualError(t, nilerr, "") assert.Nil(t, nilerr.Unwrap()) err6 := &Error{Label: errors.New("d"), Err: &Error{Label: errors.New("e"), Err: &Error{Label: errors.New("f"), Err: errors.New("g")}}, Separate: true} - assert.Equal(t, "d: e.f: g", err6.Error()) + assert.EqualError(t, err6, "d: e.f: g") } func TestUnwrapErrInternal(t *testing.T) { @@ -69,7 +62,7 @@ func TestFrom(t *testing.T) { func TestFromSep(t *testing.T) { werr := &Error{Label: errors.New("wrapped"), Err: errors.New("wrapped2")} err := FromSep("label", werr) - assert.Equal(t, "label", err.Label.Error()) + assert.EqualError(t, err.Label, "label") assert.Same(t, werr, err.Err) assert.False(t, err.Hidden) assert.True(t, err.Separate) diff --git a/pkg/scene/builder.go b/pkg/scene/builder.go index 8124303ec..a160efbb4 100644 --- a/pkg/scene/builder.go +++ b/pkg/scene/builder.go @@ -22,14 +22,14 @@ func (b *Builder) Build() (*Scene, error) { if b.scene.rootLayer.ID().IsNil() { return nil, ErrInvalidID } - if b.scene.widgetSystem == nil { - b.scene.widgetSystem = NewWidgetSystem(nil) + if b.scene.widgets == nil { + b.scene.widgets = NewWidgets(nil) } if b.scene.widgetAlignSystem == nil { b.scene.widgetAlignSystem = NewWidgetAlignSystem() } - if b.scene.pluginSystem == nil { - b.scene.pluginSystem = NewPluginSystem(nil) + if b.scene.plugins == nil { + b.scene.plugins = NewPlugins(nil) } if b.scene.updatedAt.IsZero() { b.scene.updatedAt = b.scene.CreatedAt() @@ -70,9 +70,8 @@ func (b *Builder) UpdatedAt(updatedAt time.Time) *Builder { return b } -func (b *Builder) WidgetSystem(widgetSystem *WidgetSystem) *Builder { - widgetSystem2 := *widgetSystem - b.scene.widgetSystem = &widgetSystem2 +func (b *Builder) Widgets(widgets *Widgets) *Builder { + b.scene.widgets = widgets return b } @@ -86,9 +85,8 @@ func (b *Builder) RootLayer(rootLayer LayerID) *Builder { return b } -func (b *Builder) PluginSystem(pluginSystem *PluginSystem) *Builder { - pluginSystem2 := *pluginSystem - b.scene.pluginSystem = &pluginSystem2 +func (b *Builder) Plugins(plugins *Plugins) *Builder { + b.scene.plugins = plugins return b } diff --git a/pkg/scene/builder/builder_test.go b/pkg/scene/builder/builder_test.go index d081abb51..b81168969 100644 --- a/pkg/scene/builder/builder_test.go +++ b/pkg/scene/builder/builder_test.go @@ -392,10 +392,10 @@ func TestSceneBuilder(t *testing.T) { Project(scene.NewProjectID()). Team(scene.NewTeamID()). Property(scenep.ID()). - WidgetSystem(scene.NewWidgetSystem([]*scene.Widget{ + Widgets(scene.NewWidgets([]*scene.Widget{ sceneWidget1, sceneWidget2, })). - PluginSystem(scene.NewPluginSystem([]*scene.Plugin{scenePlugin1})). + Plugins(scene.NewPlugins([]*scene.Plugin{scenePlugin1})). RootLayer(rootLayer.ID()). MustBuild() diff --git a/pkg/scene/builder/encoder_test.go b/pkg/scene/builder/encoder_test.go index 798b04138..ef81c0f65 100644 --- a/pkg/scene/builder/encoder_test.go +++ b/pkg/scene/builder/encoder_test.go @@ -11,7 +11,7 @@ import ( ) func TestEncoder_Result(t *testing.T) { - testCases := []struct { + tests := []struct { Name string E *encoder Expected []*layerJSON @@ -47,18 +47,19 @@ func TestEncoder_Result(t *testing.T) { }, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.E.Result() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } func TestEncoder_Encode(t *testing.T) { - testCases := []struct { + tests := []struct { Name string E *encoder SL merging.SealedLayer @@ -77,12 +78,13 @@ func TestEncoder_Encode(t *testing.T) { Expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.E.Encode(tc.SL) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } @@ -138,7 +140,8 @@ func TestEncoder_Layers(t *testing.T) { Property: &sp, Infobox: nil, }} - testCases := []struct { + + tests := []struct { Name string E *encoder SL *merging.SealedLayerItem @@ -164,20 +167,21 @@ func TestEncoder_Layers(t *testing.T) { }, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.E.layer(tc.SL) if res == nil { - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) } else { - assert.Equal(tt, tc.Expected.Property, res.Property) - assert.Equal(tt, tc.Expected.Infobox, res.Infobox) - assert.Equal(tt, *tc.Expected.ExtensionID, *res.ExtensionID) - assert.Equal(tt, tc.Expected.ID, res.ID) - assert.Equal(tt, tc.Expected.Name, res.Name) - assert.Equal(tt, *tc.Expected.PluginID, *res.PluginID) + assert.Equal(t, tc.Expected.Property, res.Property) + assert.Equal(t, tc.Expected.Infobox, res.Infobox) + assert.Equal(t, *tc.Expected.ExtensionID, *res.ExtensionID) + assert.Equal(t, tc.Expected.ID, res.ID) + assert.Equal(t, tc.Expected.Name, res.Name) + assert.Equal(t, *tc.Expected.PluginID, *res.PluginID) } }) } diff --git a/pkg/scene/builder/scene.go b/pkg/scene/builder/scene.go index d517afb41..7d17193ef 100644 --- a/pkg/scene/builder/scene.go +++ b/pkg/scene/builder/scene.go @@ -35,7 +35,7 @@ func (b *Builder) scene(ctx context.Context, s *scene.Scene, publishedAt time.Ti } func (b *Builder) plugins(ctx context.Context, s *scene.Scene, p []*property.Property) map[string]propertyJSON { - scenePlugins := s.PluginSystem().Plugins() + scenePlugins := s.Plugins().Plugins() res := map[string]propertyJSON{} for _, sp := range scenePlugins { if sp == nil { @@ -49,7 +49,7 @@ func (b *Builder) plugins(ctx context.Context, s *scene.Scene, p []*property.Pro } func (b *Builder) widgets(ctx context.Context, s *scene.Scene, p []*property.Property) []*widgetJSON { - sceneWidgets := s.WidgetSystem().Widgets() + sceneWidgets := s.Widgets().Widgets() res := make([]*widgetJSON, 0, len(sceneWidgets)) for _, w := range sceneWidgets { if !w.Enabled() { diff --git a/pkg/scene/builder/scene_test.go b/pkg/scene/builder/scene_test.go index f33f0b1b0..7fd36fafd 100644 --- a/pkg/scene/builder/scene_test.go +++ b/pkg/scene/builder/scene_test.go @@ -16,7 +16,8 @@ func TestScene_FindProperty(t *testing.T) { property.New().NewID().Scene(sid).Schema(scid).MustBuild(), property.New().ID(p1).Scene(sid).Schema(scid).MustBuild(), } - testCases := []struct { + + tests := []struct { Name string PL []*property.Property Input property.ID @@ -35,12 +36,13 @@ func TestScene_FindProperty(t *testing.T) { Expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := findProperty(tc.PL, tc.Input) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } @@ -55,7 +57,7 @@ func TestScene_ToString(t *testing.T) { wids := []scene.WidgetID{wid, wid2, wid3} widsString := []string{widS, wid2S, wid3S} - testCases := []struct { + tests := []struct { Name string Input []scene.WidgetID Expected []string @@ -71,12 +73,13 @@ func TestScene_ToString(t *testing.T) { Expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := toString(tc.Input) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } @@ -90,7 +93,7 @@ func TestBuildWidgetAlignSystem(t *testing.T) { Area: scene.WidgetAreaTop, }).Add(wid, -1) - testCases := []struct { + tests := []struct { Name string Input *scene.WidgetAlignSystem Expected *widgetAlignSystemJSON @@ -115,12 +118,13 @@ func TestBuildWidgetAlignSystem(t *testing.T) { Expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := buildWidgetAlignSystem(tc.Input) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } diff --git a/pkg/scene/builder_test.go b/pkg/scene/builder_test.go index 81a92427f..566d5d688 100644 --- a/pkg/scene/builder_test.go +++ b/pkg/scene/builder_test.go @@ -1,7 +1,6 @@ package scene import ( - "errors" "testing" "time" @@ -32,12 +31,12 @@ func TestBuilder_Property(t *testing.T) { assert.Equal(t, pid, b.Property()) } -func TestBuilder_PluginSystem(t *testing.T) { - ps := NewPluginSystem([]*Plugin{ +func TestBuilder_Plugins(t *testing.T) { + ps := NewPlugins([]*Plugin{ NewPlugin(OfficialPluginID, NewPropertyID().Ref()), }) - b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).PluginSystem(ps).MustBuild() - assert.Equal(t, ps, b.PluginSystem()) + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).Plugins(ps).MustBuild() + assert.Equal(t, ps, b.Plugins()) } func TestBuilder_Project(t *testing.T) { @@ -46,12 +45,12 @@ func TestBuilder_Project(t *testing.T) { assert.Equal(t, pid, b.Project()) } -func TestBuilder_WidgetSystem(t *testing.T) { - ws := NewWidgetSystem([]*Widget{ +func TestBuilder_Widgets(t *testing.T) { + ws := NewWidgets([]*Widget{ MustNewWidget(NewWidgetID(), OfficialPluginID, "xxx", NewPropertyID(), true, false), }) - b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).WidgetSystem(ws).MustBuild() - assert.Equal(t, ws, b.WidgetSystem()) + b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).Widgets(ws).MustBuild() + assert.Equal(t, ws, b.Widgets()) } func TestBuilder_WidgetAlignSystem(t *testing.T) { was := NewWidgetAlignSystem() @@ -65,137 +64,124 @@ func TestBuilder_Build(t *testing.T) { pid := NewProjectID() ppid := NewPropertyID() lid := NewLayerID() - ws := NewWidgetSystem([]*Widget{ + ws := NewWidgets([]*Widget{ MustNewWidget(NewWidgetID(), OfficialPluginID, "xxx", ppid, true, false), }) was := NewWidgetAlignSystem() - ps := NewPluginSystem([]*Plugin{ + ps := NewPlugins([]*Plugin{ NewPlugin(OfficialPluginID, ppid.Ref()), }) - testCases := []struct { - Name string - Id ID + + type args struct { + ID ID Project ProjectID Team TeamID RootLayer LayerID - WidgetSystem *WidgetSystem + Widgets *Widgets WidgetAlignSystem *WidgetAlignSystem - PluginSystem *PluginSystem + Plugins *Plugins UpdatedAt time.Time Property PropertyID - Expected struct { - Id ID - Project ProjectID - Team TeamID - RootLayer LayerID - WidgetSystem *WidgetSystem - WidgetAlignSystem *WidgetAlignSystem - PluginSystem *PluginSystem - UpdatedAt time.Time - Property PropertyID - } - err error + } + + tests := []struct { + Name string + Args args + Expected *Scene + Err error }{ { - Name: "fail nil scene id", - Id: ID{}, - Project: pid, - Team: tid, - RootLayer: lid, - WidgetSystem: ws, - WidgetAlignSystem: was, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - err: ErrInvalidID, + Name: "fail nil scene id", + Args: args{ + ID: ID{}, + Project: pid, + Team: tid, + RootLayer: lid, + Widgets: ws, + WidgetAlignSystem: was, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Err: ErrInvalidID, }, { - Name: "fail nil team id", - Id: sid, - Project: pid, - Team: TeamID{}, - RootLayer: lid, - WidgetSystem: ws, - WidgetAlignSystem: was, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - err: ErrInvalidID, + Name: "fail nil team id", + Args: args{ + ID: sid, + Project: pid, + Team: TeamID{}, + RootLayer: lid, + Widgets: ws, + WidgetAlignSystem: was, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Err: ErrInvalidID, }, { - Name: "fail nil root layer id", - Id: sid, - Project: pid, - Team: tid, - RootLayer: LayerID{}, - WidgetSystem: ws, - WidgetAlignSystem: was, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - err: ErrInvalidID, + Name: "fail nil root layer id", + Args: args{ + ID: sid, + Project: pid, + Team: tid, + RootLayer: LayerID{}, + Widgets: ws, + WidgetAlignSystem: was, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Err: ErrInvalidID, }, { - Name: "success build new scene", - Id: sid, - Project: pid, - Team: tid, - RootLayer: lid, - WidgetSystem: ws, - WidgetAlignSystem: was, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - Expected: struct { - Id ID - Project ProjectID - Team TeamID - RootLayer LayerID - WidgetSystem *WidgetSystem - WidgetAlignSystem *WidgetAlignSystem - PluginSystem *PluginSystem - UpdatedAt time.Time - Property PropertyID - }{ - Id: sid, + Name: "success build new scene", + Args: args{ + ID: sid, Project: pid, Team: tid, RootLayer: lid, - WidgetSystem: ws, + Widgets: ws, WidgetAlignSystem: was, - PluginSystem: ps, + Plugins: ps, UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), Property: ppid, }, - err: nil, + Expected: &Scene{ + id: sid, + project: pid, + team: tid, + rootLayer: lid, + widgets: ws, + widgetAlignSystem: was, + plugins: ps, + updatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + property: ppid, + }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() res, err := New(). - ID(tc.Id). - WidgetSystem(tc.WidgetSystem). - WidgetAlignSystem(tc.WidgetAlignSystem). - Project(tc.Project). - PluginSystem(tc.PluginSystem). - Property(tc.Property). - RootLayer(tc.RootLayer). - Team(tc.Team). - UpdatedAt(tc.UpdatedAt). + ID(tt.Args.ID). + Widgets(tt.Args.Widgets). + WidgetAlignSystem(tt.Args.WidgetAlignSystem). + Project(tt.Args.Project). + Plugins(tt.Args.Plugins). + Property(tt.Args.Property). + RootLayer(tt.Args.RootLayer). + Team(tt.Args.Team). + UpdatedAt(tt.Args.UpdatedAt). Build() - if err == nil { - assert.Equal(tt, tc.Expected.Id, res.ID()) - assert.Equal(tt, tc.Expected.UpdatedAt, res.UpdatedAt()) - assert.Equal(tt, tc.Expected.Team, res.Team()) - assert.Equal(tt, tc.Expected.RootLayer, res.RootLayer()) - assert.Equal(tt, tc.Expected.Property, res.Property()) - assert.Equal(tt, tc.Expected.PluginSystem, res.PluginSystem()) - assert.Equal(tt, tc.Expected.WidgetSystem, res.WidgetSystem()) - assert.Equal(tt, tc.Expected.Project, res.Project()) + + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(t, tt.Err, err) } }) } @@ -207,141 +193,129 @@ func TestBuilder_MustBuild(t *testing.T) { pid := NewProjectID() ppid := NewPropertyID() lid := NewLayerID() - ws := NewWidgetSystem([]*Widget{ + ws := NewWidgets([]*Widget{ MustNewWidget(NewWidgetID(), OfficialPluginID, "xxx", ppid, true, false), }) was := NewWidgetAlignSystem() - ps := NewPluginSystem([]*Plugin{ + ps := NewPlugins([]*Plugin{ NewPlugin(OfficialPluginID, ppid.Ref()), }) - testCases := []struct { - Name string - Id ID + + type args struct { + ID ID Project ProjectID Team TeamID RootLayer LayerID - WidgetSystem *WidgetSystem + Widgets *Widgets WidgetAlignSystem *WidgetAlignSystem - PluginSystem *PluginSystem + Plugins *Plugins UpdatedAt time.Time Property PropertyID - Expected struct { - Id ID - Project ProjectID - Team TeamID - RootLayer LayerID - WidgetSystem *WidgetSystem - WidgetAlignSystem *WidgetAlignSystem - PluginSystem *PluginSystem - UpdatedAt time.Time - Property PropertyID - } - err error + } + + tests := []struct { + Name string + Args args + Expected *Scene + Err error }{ { - Name: "fail nil scene id", - Id: ID{}, - Project: pid, - Team: tid, - RootLayer: lid, - WidgetSystem: ws, - WidgetAlignSystem: was, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - err: ErrInvalidID, + Name: "fail nil scene id", + Args: args{ + ID: ID{}, + Project: pid, + Team: tid, + RootLayer: lid, + Widgets: ws, + WidgetAlignSystem: was, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Err: ErrInvalidID, }, { - Name: "fail nil team id", - Id: sid, - Project: pid, - Team: TeamID{}, - RootLayer: lid, - WidgetSystem: ws, - WidgetAlignSystem: was, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - err: ErrInvalidID, + Name: "fail nil team id", + Args: args{ + ID: sid, + Project: pid, + Team: TeamID{}, + RootLayer: lid, + Widgets: ws, + WidgetAlignSystem: was, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Err: ErrInvalidID, }, { - Name: "fail nil root layer id", - Id: sid, - Project: pid, - Team: tid, - RootLayer: LayerID{}, - WidgetSystem: ws, - WidgetAlignSystem: was, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - err: ErrInvalidID, + Name: "fail nil root layer id", + Args: args{ + ID: sid, + Project: pid, + Team: tid, + RootLayer: LayerID{}, + Widgets: ws, + WidgetAlignSystem: was, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, + }, + Err: ErrInvalidID, }, { - Name: "success build new scene", - Id: sid, - Project: pid, - Team: tid, - RootLayer: lid, - WidgetSystem: ws, - WidgetAlignSystem: was, - PluginSystem: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, - Expected: struct { - Id ID - Project ProjectID - Team TeamID - RootLayer LayerID - WidgetSystem *WidgetSystem - WidgetAlignSystem *WidgetAlignSystem - PluginSystem *PluginSystem - UpdatedAt time.Time - Property PropertyID - }{ - Id: sid, + Name: "success build new scene", + Args: args{ + ID: sid, Project: pid, Team: tid, RootLayer: lid, - WidgetSystem: ws, + Widgets: ws, WidgetAlignSystem: was, - PluginSystem: ps, + Plugins: ps, UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), Property: ppid, }, - err: nil, + Expected: &Scene{ + id: sid, + project: pid, + team: tid, + rootLayer: lid, + widgets: ws, + widgetAlignSystem: was, + plugins: ps, + updatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + property: ppid, + }, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - var res *Scene - defer func() { - if r := recover(); r == nil { - assert.Equal(tt, tc.Expected.Id, res.ID()) - assert.Equal(tt, tc.Expected.UpdatedAt, res.UpdatedAt()) - assert.Equal(tt, tc.Expected.Team, res.Team()) - assert.Equal(tt, tc.Expected.RootLayer, res.RootLayer()) - assert.Equal(tt, tc.Expected.Property, res.Property()) - assert.Equal(tt, tc.Expected.PluginSystem, res.PluginSystem()) - assert.Equal(tt, tc.Expected.WidgetSystem, res.WidgetSystem()) - assert.Equal(tt, tc.Expected.WidgetAlignSystem, res.WidgetAlignSystem()) - assert.Equal(tt, tc.Expected.Project, res.Project()) - } - }() - res = New(). - ID(tc.Id). - WidgetSystem(tc.WidgetSystem). - WidgetAlignSystem(tc.WidgetAlignSystem). - Project(tc.Project). - PluginSystem(tc.PluginSystem). - Property(tc.Property). - RootLayer(tc.RootLayer). - Team(tc.Team). - UpdatedAt(tc.UpdatedAt). - MustBuild() + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + build := func() *Scene { + t.Helper() + return New(). + ID(tt.Args.ID). + Widgets(tt.Args.Widgets). + WidgetAlignSystem(tt.Args.WidgetAlignSystem). + Project(tt.Args.Project). + Plugins(tt.Args.Plugins). + Property(tt.Args.Property). + RootLayer(tt.Args.RootLayer). + Team(tt.Args.Team). + UpdatedAt(tt.Args.UpdatedAt). + MustBuild() + } + + if tt.Err != nil { + assert.PanicsWithValue(t, tt.Err, func() { _ = build() }) + } else { + assert.Equal(t, tt.Expected, build()) + } }) } } diff --git a/pkg/scene/cluster_list_test.go b/pkg/scene/cluster_list_test.go index 1fafc8942..548aca8f2 100644 --- a/pkg/scene/cluster_list_test.go +++ b/pkg/scene/cluster_list_test.go @@ -12,6 +12,7 @@ func TestList_Add(t *testing.T) { type args struct { clusters []*Cluster } + tests := []struct { name string list *ClusterList @@ -30,13 +31,14 @@ func TestList_Add(t *testing.T) { args: args{clusters: []*Cluster{c1}}, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() tc.list.Add(tc.args.clusters...) - assert.Equal(tt, tc.want, tc.list) + assert.Equal(t, tc.want, tc.list) }) } } @@ -44,6 +46,7 @@ func TestList_Add(t *testing.T) { func TestList_Clusters(t *testing.T) { c1, _ := NewCluster(NewClusterID(), "ccc", NewPropertyID()) c2, _ := NewCluster(NewClusterID(), "xxx", NewPropertyID()) + tests := []struct { name string list *ClusterList @@ -60,11 +63,12 @@ func TestList_Clusters(t *testing.T) { want: nil, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.want, tc.list.Clusters()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.want, tc.list.Clusters()) }) } } @@ -75,6 +79,7 @@ func TestList_Has(t *testing.T) { type args struct { tid ClusterID } + tests := []struct { name string list *ClusterList @@ -105,9 +110,12 @@ func TestList_Has(t *testing.T) { want: false, }, } + for _, tc := range tests { - t.Run(tc.name, func(tt *testing.T) { - assert.Equal(tt, tc.want, tc.list.Has(tc.args.tid)) + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.want, tc.list.Has(tc.args.tid)) }) } } @@ -120,6 +128,7 @@ func TestList_Remove(t *testing.T) { type args struct { cluster ClusterID } + tests := []struct { name string list *ClusterList @@ -150,12 +159,13 @@ func TestList_Remove(t *testing.T) { want: nil, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() tc.list.Remove(tc.args.cluster) - assert.Equal(tt, tc.want, tc.list) + assert.Equal(t, tc.want, tc.list) }) } } @@ -170,6 +180,7 @@ func TestClusterList_Get(t *testing.T) { type args struct { cid ClusterID } + tests := []struct { name string list *ClusterList @@ -201,12 +212,13 @@ func TestClusterList_Get(t *testing.T) { want: nil, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() got := tc.list.Get(tc.args.cid) - assert.Equal(tt, tc.want, got) + assert.Equal(t, tc.want, got) }) } } diff --git a/pkg/scene/cluster_test.go b/pkg/scene/cluster_test.go index 01c4d8257..77127e2f8 100644 --- a/pkg/scene/cluster_test.go +++ b/pkg/scene/cluster_test.go @@ -11,6 +11,7 @@ func TestCluster_ID(t *testing.T) { clusterA := &Cluster{ id: cid, } + tests := []struct { name string cluster *Cluster @@ -27,19 +28,22 @@ func TestCluster_ID(t *testing.T) { want: ClusterID{}, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() got := tc.cluster.ID() - assert.Equal(tt, tc.want, got) + assert.Equal(t, tc.want, got) }) } } + func TestCluster_Name(t *testing.T) { clusterA := &Cluster{ name: "clusterA", } + tests := []struct { name string cluster *Cluster @@ -56,12 +60,13 @@ func TestCluster_Name(t *testing.T) { want: "", }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() got := tc.cluster.Name() - assert.Equal(tt, tc.want, got) + assert.Equal(t, tc.want, got) }) } } @@ -70,6 +75,7 @@ func TestCluster_Property(t *testing.T) { clusterA := &Cluster{ property: propertyId, } + tests := []struct { name string cluster *Cluster @@ -86,12 +92,13 @@ func TestCluster_Property(t *testing.T) { want: PropertyID{}, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() got := tc.cluster.Property() - assert.Equal(tt, tc.want, got) + assert.Equal(t, tc.want, got) }) } } @@ -104,6 +111,7 @@ func TestNew(t *testing.T) { name string pid PropertyID } + tests := []struct { name string args args @@ -135,13 +143,14 @@ func TestNew(t *testing.T) { wantErr: true, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() got, err := NewCluster(tc.args.cid, tc.args.name, tc.args.pid) - assert.Equal(tt, tc.wantErr, err != nil) - assert.Equal(tt, tc.want, got) + assert.Equal(t, tc.wantErr, err != nil) + assert.Equal(t, tc.want, got) }) } } @@ -153,6 +162,7 @@ func TestCluster_Rename(t *testing.T) { type args struct { name string } + tests := []struct { name string cluster *Cluster @@ -183,12 +193,13 @@ func TestCluster_Rename(t *testing.T) { want: nil, }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() tc.cluster.Rename(tc.args.name) - assert.Equal(tt, tc.want, tc.cluster) + assert.Equal(t, tc.want, tc.cluster) }) } } @@ -201,6 +212,7 @@ func TestCluster_UpdateProperty(t *testing.T) { type args struct { property PropertyID } + tests := []struct { name string cluster *Cluster @@ -234,10 +246,10 @@ func TestCluster_UpdateProperty(t *testing.T) { for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() tc.cluster.UpdateProperty(tc.args.property) - assert.Equal(tt, tc.want, tc.cluster) + assert.Equal(t, tc.want, tc.cluster) }) } } diff --git a/pkg/scene/lock_test.go b/pkg/scene/lock_test.go index fcd36a364..2410090e3 100644 --- a/pkg/scene/lock_test.go +++ b/pkg/scene/lock_test.go @@ -7,7 +7,7 @@ import ( ) func TestLockMode_IsLocked(t *testing.T) { - testCases := []struct { + tests := []struct { Name string LM LockMode Expected bool @@ -28,18 +28,19 @@ func TestLockMode_IsLocked(t *testing.T) { Expected: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.LM.IsLocked() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } func TestLockMode_Validate(t *testing.T) { - testCases := []struct { + tests := []struct { Name string LM LockMode Expected bool @@ -75,12 +76,13 @@ func TestLockMode_Validate(t *testing.T) { Expected: false, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() _, res := tc.LM.Validate() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } diff --git a/pkg/scene/plugin_system_test.go b/pkg/scene/plugin_system_test.go deleted file mode 100644 index 514b7abfb..000000000 --- a/pkg/scene/plugin_system_test.go +++ /dev/null @@ -1,337 +0,0 @@ -package scene - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestNewPluginSystem(t *testing.T) { - pid := MustPluginID("xxx~1.1.1") - pr := NewPropertyID().Ref() - testCases := []struct { - Name string - Input []*Plugin - Expected *PluginSystem - }{ - { - Name: "nil plugin list", - Input: nil, - Expected: &PluginSystem{plugins: []*Plugin{}}, - }, - { - Name: "plugin list with nil", - Input: []*Plugin{nil}, - Expected: &PluginSystem{plugins: []*Plugin{}}, - }, - { - Name: "plugin list with matched values", - Input: []*Plugin{ - { - plugin: pid, - property: pr, - }, - }, - Expected: &PluginSystem{plugins: []*Plugin{ - NewPlugin(pid, pr), - }}, - }, - { - Name: "plugin list with duplicated values", - Input: []*Plugin{ - { - plugin: pid, - property: pr, - }, - { - plugin: pid, - property: pr, - }, - }, - Expected: &PluginSystem{plugins: []*Plugin{ - NewPlugin(pid, pr), - }}, - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := NewPluginSystem(tc.Input) - assert.Equal(tt, tc.Expected, res) - }) - } -} - -func TestPluginSystem_Property(t *testing.T) { - pid := MustPluginID("xxx~1.1.1") - pr := NewPropertyID().Ref() - testCases := []struct { - Name string - Input PluginID - PS *PluginSystem - Expected *PropertyID - }{ - { - Name: "property is found", - Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - Expected: pr, - }, - { - Name: "property is nil", - Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(pid, nil)}), - Expected: nil, - }, - { - Name: "property is not found", - Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), - Expected: nil, - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.PS.Property(tc.Input) - assert.Equal(tt, tc.Expected, res) - }) - } -} - -func TestPluginSystem_Plugin(t *testing.T) { - pid := MustPluginID("xxx~1.1.1") - pr := NewPropertyID().Ref() - testCases := []struct { - Name string - Input PluginID - PS *PluginSystem - Expected *Plugin - }{ - { - Name: "plugin is found", - Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - Expected: NewPlugin(pid, pr), - }, - { - Name: "plugin is not found", - Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), - Expected: nil, - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.PS.Plugin(tc.Input) - assert.Equal(tt, tc.Expected, res) - }) - } -} - -func TestPluginSystem_Properties(t *testing.T) { - pr := NewPropertyID().Ref() - pr2 := NewPropertyID().Ref() - testCases := []struct { - Name string - PS *PluginSystem - Expected []PropertyID - }{ - { - Name: "pluginSystem is nil", - PS: nil, - Expected: nil, - }, - { - Name: "get properties", - PS: NewPluginSystem([]*Plugin{ - NewPlugin(MustPluginID("zzz~1.1.1"), pr), - NewPlugin(MustPluginID("xxx~1.1.1"), pr2), - }), - Expected: []PropertyID{*pr, *pr2}, - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.PS.Properties() - assert.Equal(tt, tc.Expected, res) - }) - } -} - -func TestPluginSystem_Has(t *testing.T) { - pid := MustPluginID("xxx~1.1.1") - pr := NewPropertyID().Ref() - testCases := []struct { - Name string - Input PluginID - PS *PluginSystem - Expected bool - }{ - { - Name: "property is found", - Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - Expected: true, - }, - { - Name: "property is not found", - Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), - Expected: false, - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.PS.Has(tc.Input) - assert.Equal(tt, tc.Expected, res) - }) - } -} - -func TestPluginSystem_HasPlugin(t *testing.T) { - pid := MustPluginID("xxx~1.1.1") - pr := NewPropertyID().Ref() - testCases := []struct { - Name string - Input PluginID - PS *PluginSystem - Expected bool - }{ - { - Name: "property is found", - Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - Expected: true, - }, - { - Name: "property is not found", - Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), - Expected: false, - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.PS.HasPlugin(tc.Input) - assert.Equal(tt, tc.Expected, res) - }) - } -} - -func TestPluginSystem_Add(t *testing.T) { - pid := MustPluginID("xxx~1.1.1") - pr := NewPropertyID().Ref() - testCases := []struct { - Name string - Input *Plugin - PS, Expected *PluginSystem - }{ - { - Name: "add nil plugin", - Input: nil, - PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - Expected: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - }, - { - Name: "add existing plugin", - Input: NewPlugin(pid, pr), - PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - Expected: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - }, - { - Name: "add official plugin", - Input: NewPlugin(OfficialPluginID, pr), - PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - Expected: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - }, - { - Name: "add new plugin", - Input: NewPlugin(pid, pr), - PS: NewPluginSystem([]*Plugin{}), - Expected: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - tc.PS.Add(tc.Input) - assert.Equal(tt, tc.Expected, tc.PS) - }) - } -} - -func TestPluginSystem_Remove(t *testing.T) { - pid := MustPluginID("xxx~1.1.1") - pr := NewPropertyID().Ref() - testCases := []struct { - Name string - Input PluginID - PS, Expected *PluginSystem - }{ - { - Name: "remove official plugin", - Input: OfficialPluginID, - PS: NewPluginSystem([]*Plugin{NewPlugin(OfficialPluginID, pr)}), - Expected: NewPluginSystem([]*Plugin{NewPlugin(OfficialPluginID, pr)}), - }, - { - Name: "remove a plugin", - Input: pid, - PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - Expected: NewPluginSystem([]*Plugin{}), - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - tc.PS.Remove(tc.Input) - assert.Equal(tt, tc.Expected, tc.PS) - }) - } -} - -func TestPluginSystem_Upgrade(t *testing.T) { - pid := MustPluginID("xxx~1.1.1") - nid := MustPluginID("zzz~1.1.1") - pr := NewPropertyID().Ref() - testCases := []struct { - Name string - PID, NewID PluginID - PS, Expected *PluginSystem - }{ - { - Name: "upgrade official plugin", - PID: OfficialPluginID, - PS: NewPluginSystem([]*Plugin{NewPlugin(OfficialPluginID, pr)}), - Expected: NewPluginSystem([]*Plugin{NewPlugin(OfficialPluginID, pr)}), - }, - { - Name: "upgrade a plugin", - PID: pid, - NewID: nid, - PS: NewPluginSystem([]*Plugin{NewPlugin(pid, pr)}), - Expected: NewPluginSystem([]*Plugin{NewPlugin(nid, pr)}), - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - tc.PS.Upgrade(tc.PID, tc.NewID) - assert.Equal(tt, tc.Expected, tc.PS) - }) - } -} diff --git a/pkg/scene/plugin_system.go b/pkg/scene/plugins.go similarity index 71% rename from pkg/scene/plugin_system.go rename to pkg/scene/plugins.go index 660881772..deb440376 100644 --- a/pkg/scene/plugin_system.go +++ b/pkg/scene/plugins.go @@ -1,12 +1,12 @@ package scene -type PluginSystem struct { +type Plugins struct { plugins []*Plugin } -func NewPluginSystem(p []*Plugin) *PluginSystem { +func NewPlugins(p []*Plugin) *Plugins { if p == nil { - return &PluginSystem{plugins: []*Plugin{}} + return &Plugins{plugins: []*Plugin{}} } p2 := make([]*Plugin, 0, len(p)) for _, p1 := range p { @@ -25,14 +25,14 @@ func NewPluginSystem(p []*Plugin) *PluginSystem { p2 = append(p2, &p3) } } - return &PluginSystem{plugins: p2} + return &Plugins{plugins: p2} } -func (p *PluginSystem) Plugins() []*Plugin { +func (p *Plugins) Plugins() []*Plugin { return append([]*Plugin{}, p.plugins...) } -func (p *PluginSystem) Property(id PluginID) *PropertyID { +func (p *Plugins) Property(id PluginID) *PropertyID { for _, p := range p.plugins { if p.plugin.Equal(id) { return p.property.CopyRef() @@ -41,7 +41,7 @@ func (p *PluginSystem) Property(id PluginID) *PropertyID { return nil } -func (p *PluginSystem) Has(id PluginID) bool { +func (p *Plugins) Has(id PluginID) bool { for _, p2 := range p.plugins { if p2.plugin.Equal(id) { return true @@ -50,7 +50,7 @@ func (p *PluginSystem) Has(id PluginID) bool { return false } -func (p *PluginSystem) HasPlugin(id PluginID) bool { +func (p *Plugins) HasPlugin(id PluginID) bool { name := id.Name() for _, p2 := range p.plugins { if p2.plugin.Name() == name { @@ -60,7 +60,7 @@ func (p *PluginSystem) HasPlugin(id PluginID) bool { return false } -func (p *PluginSystem) Add(sp *Plugin) { +func (p *Plugins) Add(sp *Plugin) { if sp == nil || p.Has(sp.plugin) || sp.plugin.Equal(OfficialPluginID) { return } @@ -68,7 +68,7 @@ func (p *PluginSystem) Add(sp *Plugin) { p.plugins = append(p.plugins, &sp2) } -func (p *PluginSystem) Remove(pid PluginID) { +func (p *Plugins) Remove(pid PluginID) { if pid.Equal(OfficialPluginID) { return } @@ -80,7 +80,7 @@ func (p *PluginSystem) Remove(pid PluginID) { } } -func (p *PluginSystem) Upgrade(pid, newID PluginID) { +func (p *Plugins) Upgrade(pid, newID PluginID) { for i, p2 := range p.plugins { if p2.plugin.Equal(OfficialPluginID) { continue @@ -92,7 +92,7 @@ func (p *PluginSystem) Upgrade(pid, newID PluginID) { } } -func (p *PluginSystem) Properties() []PropertyID { +func (p *Plugins) Properties() []PropertyID { if p == nil { return nil } @@ -105,7 +105,7 @@ func (p *PluginSystem) Properties() []PropertyID { return res } -func (p *PluginSystem) Plugin(pluginID PluginID) *Plugin { +func (p *Plugins) Plugin(pluginID PluginID) *Plugin { for _, pp := range p.plugins { if pp.plugin == pluginID { return pp diff --git a/pkg/scene/plugins_test.go b/pkg/scene/plugins_test.go new file mode 100644 index 000000000..e0bece931 --- /dev/null +++ b/pkg/scene/plugins_test.go @@ -0,0 +1,355 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewPlugins(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input []*Plugin + Expected *Plugins + }{ + { + Name: "nil plugin list", + Input: nil, + Expected: &Plugins{plugins: []*Plugin{}}, + }, + { + Name: "plugin list with nil", + Input: []*Plugin{nil}, + Expected: &Plugins{plugins: []*Plugin{}}, + }, + { + Name: "plugin list with matched values", + Input: []*Plugin{ + { + plugin: pid, + property: pr, + }, + }, + Expected: &Plugins{plugins: []*Plugin{ + NewPlugin(pid, pr), + }}, + }, + { + Name: "plugin list with duplicated values", + Input: []*Plugin{ + { + plugin: pid, + property: pr, + }, + { + plugin: pid, + property: pr, + }, + }, + Expected: &Plugins{plugins: []*Plugin{ + NewPlugin(pid, pr), + }}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := NewPlugins(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_Property(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input PluginID + PS *Plugins + Expected *PropertyID + }{ + { + Name: "property is found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: pr, + }, + { + Name: "property is nil", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, nil)}), + Expected: nil, + }, + { + Name: "property is not found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.Property(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_Plugin(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input PluginID + PS *Plugins + Expected *Plugin + }{ + { + Name: "plugin is found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugin(pid, pr), + }, + { + Name: "plugin is not found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.Plugin(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_Properties(t *testing.T) { + pr := NewPropertyID().Ref() + pr2 := NewPropertyID().Ref() + + tests := []struct { + Name string + PS *Plugins + Expected []PropertyID + }{ + { + Name: "plugins is nil", + PS: nil, + Expected: nil, + }, + { + Name: "get properties", + PS: NewPlugins([]*Plugin{ + NewPlugin(MustPluginID("zzz~1.1.1"), pr), + NewPlugin(MustPluginID("xxx~1.1.1"), pr2), + }), + Expected: []PropertyID{*pr, *pr2}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.Properties() + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_Has(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input PluginID + PS *Plugins + Expected bool + }{ + { + Name: "property is found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: true, + }, + { + Name: "property is not found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.Has(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_HasPlugin(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input PluginID + PS *Plugins + Expected bool + }{ + { + Name: "property is found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: true, + }, + { + Name: "property is not found", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.HasPlugin(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + +func TestPlugins_Add(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input *Plugin + PS, Expected *Plugins + }{ + { + Name: "add nil plugin", + Input: nil, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + }, + { + Name: "add existing plugin", + Input: NewPlugin(pid, pr), + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + }, + { + Name: "add official plugin", + Input: NewPlugin(OfficialPluginID, pr), + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + }, + { + Name: "add new plugin", + Input: NewPlugin(pid, pr), + PS: NewPlugins([]*Plugin{}), + Expected: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + tc.PS.Add(tc.Input) + assert.Equal(t, tc.Expected, tc.PS) + }) + } +} + +func TestPlugins_Remove(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input PluginID + PS, Expected *Plugins + }{ + { + Name: "remove official plugin", + Input: OfficialPluginID, + PS: NewPlugins([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + Expected: NewPlugins([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + }, + { + Name: "remove a plugin", + Input: pid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugins([]*Plugin{}), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + tc.PS.Remove(tc.Input) + assert.Equal(t, tc.Expected, tc.PS) + }) + } +} + +func TestPlugins_Upgrade(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + nid := MustPluginID("zzz~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + PID, NewID PluginID + PS, Expected *Plugins + }{ + { + Name: "upgrade official plugin", + PID: OfficialPluginID, + PS: NewPlugins([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + Expected: NewPlugins([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + }, + { + Name: "upgrade a plugin", + PID: pid, + NewID: nid, + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugins([]*Plugin{NewPlugin(nid, pr)}), + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + tc.PS.Upgrade(tc.PID, tc.NewID) + assert.Equal(t, tc.Expected, tc.PS) + }) + } +} diff --git a/pkg/scene/scene.go b/pkg/scene/scene.go index 4f371c309..fa543b083 100644 --- a/pkg/scene/scene.go +++ b/pkg/scene/scene.go @@ -12,9 +12,9 @@ type Scene struct { project ProjectID team TeamID rootLayer LayerID - widgetSystem *WidgetSystem + widgets *Widgets widgetAlignSystem *WidgetAlignSystem - pluginSystem *PluginSystem + plugins *Plugins updatedAt time.Time property PropertyID clusters *ClusterList @@ -62,11 +62,11 @@ func (s *Scene) RootLayer() LayerID { return s.rootLayer } -func (s *Scene) WidgetSystem() *WidgetSystem { +func (s *Scene) Widgets() *Widgets { if s == nil { return nil } - return s.widgetSystem + return s.widgets } func (s *Scene) WidgetAlignSystem() *WidgetAlignSystem { @@ -76,11 +76,11 @@ func (s *Scene) WidgetAlignSystem() *WidgetAlignSystem { return s.widgetAlignSystem } -func (s *Scene) PluginSystem() *PluginSystem { +func (s *Scene) Plugins() *Plugins { if s == nil { return nil } - return s.pluginSystem + return s.plugins } func (s *Scene) UpdatedAt() time.Time { @@ -114,8 +114,8 @@ func (s *Scene) Properties() []PropertyID { return nil } ids := []PropertyID{s.property} - ids = append(ids, s.pluginSystem.Properties()...) - ids = append(ids, s.widgetSystem.Properties()...) + ids = append(ids, s.plugins.Properties()...) + ids = append(ids, s.widgets.Properties()...) ids = append(ids, s.clusters.Properties()...) return ids } diff --git a/pkg/scene/scene_test.go b/pkg/scene/scene_test.go index 04f2abd28..187bd10d7 100644 --- a/pkg/scene/scene_test.go +++ b/pkg/scene/scene_test.go @@ -9,7 +9,8 @@ import ( func TestScene_IsTeamIncluded(t *testing.T) { tid := NewTeamID() - testCases := []struct { + + tests := []struct { Name string Teams []TeamID S *Scene @@ -40,12 +41,13 @@ func TestScene_IsTeamIncluded(t *testing.T) { Expected: false, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.S.IsTeamIncluded(tc.Teams) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } @@ -67,8 +69,8 @@ func TestScene_Properties(t *testing.T) { Team(NewTeamID()). RootLayer(NewLayerID()). Property(pid1). - WidgetSystem( - NewWidgetSystem( + Widgets( + NewWidgets( []*Widget{ MustNewWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", pid2, true, false), }, @@ -84,13 +86,13 @@ func TestSceneNil(t *testing.T) { var s *Scene assert.Nil(t, s.Properties()) assert.True(t, s.ID().IsNil()) - assert.Nil(t, s.WidgetSystem()) + assert.Nil(t, s.Widgets()) assert.Nil(t, s.WidgetAlignSystem()) assert.True(t, s.Project().IsNil()) assert.True(t, s.Team().IsNil()) assert.True(t, s.RootLayer().IsNil()) assert.True(t, s.CreatedAt().IsZero()) - assert.Nil(t, s.PluginSystem()) + assert.Nil(t, s.Plugins()) assert.True(t, s.Property().IsNil()) } @@ -110,12 +112,13 @@ func TestScene_Clusters(t *testing.T) { want: NewClusterListFrom([]*Cluster{c1}), }, } + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() - assert.Equal(tt, tc.want, tc.scene.Clusters()) + assert.Equal(t, tc.want, tc.scene.Clusters()) }) } } diff --git a/pkg/scene/sceneops/dataset_migrator_test.go b/pkg/scene/sceneops/dataset_migrator_test.go deleted file mode 100644 index f7ae7fda4..000000000 --- a/pkg/scene/sceneops/dataset_migrator_test.go +++ /dev/null @@ -1,46 +0,0 @@ -package sceneops - -//import ( -// "testing" -// -// "github.com/reearth/reearth-backend/pkg/dataset" -//) -// -//func TestDatasetMigrator_Migrate(t *testing.T) { -// sid := dataset.NewSceneID() -// dsid := dataset.NewID() -// dssid := dataset.NewSchemaID() -// dssfid := dataset.MewFieldID() -// testCases := []struct { -// Name string -// SID dataset.SceneID -// NewDSL []*dataset.Schema -// NewDL dataset.List -// Expected MigrateDatasetResult -// Err error -// }{ -// { -// Name: "", -// SID: sid, -// NewDSL: []*dataset.Schema{ -// dataset.NewSchema(). -// ID(dssid). -// Fields([]*dataset.SchemaField{ -// dataset.NewSchemaField(). -// ID(dssfid).MustBuild(), -// }).Scene(sid).MustBuild()}, -// NewDL: dataset.List{ -// dataset.New().ID(dsid).MustBuild(), -// }, -// Expected: MigrateDatasetResult{}, -// Err: nil, -// }, -// } -// for _,tc:=range testCases{ -// tc:=tc -// t.Run(tc.Name, func(tt *testing.T) { -// tt.Parallel() -// res,err:=tc -// }) -// } -//} diff --git a/pkg/scene/sceneops/plugin_migrator.go b/pkg/scene/sceneops/plugin_migrator.go index 20d4c1265..5a0a1ef8e 100644 --- a/pkg/scene/sceneops/plugin_migrator.go +++ b/pkg/scene/sceneops/plugin_migrator.go @@ -42,7 +42,7 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol return MigratePluginsResult{}, ErrInvalidPlugins } - if !sc.PluginSystem().Has(oldPluginID) { + if !sc.Plugins().Has(oldPluginID) { return MigratePluginsResult{}, ErrPluginNotInstalled } @@ -100,19 +100,19 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol } // ใ‚ทใƒผใƒณใฎใƒ—ใƒฉใ‚ฐใ‚คใƒณ - sc.PluginSystem().Upgrade(oldPluginID, newPluginID) - for _, sp := range sc.PluginSystem().Plugins() { + sc.Plugins().Upgrade(oldPluginID, newPluginID) + for _, sp := range sc.Plugins().Plugins() { if sp.Plugin().Equal(newPluginID) && sp.Property() != nil { propertyIDs = append(propertyIDs, *sp.Property()) } } // ใ‚ทใƒผใƒณใฎใ‚ฆใ‚ฃใ‚ธใ‚งใƒƒใƒˆ - sc.WidgetSystem().ReplacePlugin(oldPluginID, newPluginID) - for _, w := range sc.WidgetSystem().Widgets() { + sc.Widgets().ReplacePlugin(oldPluginID, newPluginID) + for _, w := range sc.Widgets().Widgets() { if w.Plugin().Equal(newPluginID) { if newPlugin.Extension(w.Extension()) == nil { - sc.WidgetSystem().RemoveAllByExtension(oldPluginID, w.Extension()) + sc.Widgets().RemoveAllByExtension(oldPluginID, w.Extension()) } else { propertyIDs = append(propertyIDs, w.Property()) } diff --git a/pkg/scene/widget_align_system_test.go b/pkg/scene/widget_align_system_test.go index b0b5ffe0f..1259bb280 100644 --- a/pkg/scene/widget_align_system_test.go +++ b/pkg/scene/widget_align_system_test.go @@ -34,7 +34,7 @@ func TestWidgetAlignSystem_Find(t *testing.T) { wid4 := NewWidgetID() wid5 := NewWidgetID() - testCases := []struct { + tests := []struct { Name string Input WidgetID Expected1 int @@ -68,15 +68,15 @@ func TestWidgetAlignSystem_Find(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() if tc.Nil { index, location := (*WidgetAlignSystem)(nil).Find(tc.Input) - assert.Equal(tt, tc.Expected1, index) - assert.Equal(tt, tc.Expected2, location) + assert.Equal(t, tc.Expected1, index) + assert.Equal(t, tc.Expected2, location) return } @@ -85,8 +85,8 @@ func TestWidgetAlignSystem_Find(t *testing.T) { was.Zone(WidgetZoneOuter).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]WidgetID{wid4, wid5}) index, location := was.Find(tc.Input) - assert.Equal(tt, tc.Expected1, index) - assert.Equal(tt, tc.Expected2, location) + assert.Equal(t, tc.Expected1, index) + assert.Equal(t, tc.Expected2, location) }) } } @@ -94,7 +94,7 @@ func TestWidgetAlignSystem_Find(t *testing.T) { func TestWidgetAlignSystem_Remove(t *testing.T) { wid := NewWidgetID() - testCases := []struct { + tests := []struct { Name string Zone WidgetZoneType Input WidgetID @@ -133,10 +133,10 @@ func TestWidgetAlignSystem_Remove(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() if tc.Nil { (*WidgetZone)(nil).Remove(tc.Input) @@ -146,7 +146,7 @@ func TestWidgetAlignSystem_Remove(t *testing.T) { ws := NewWidgetAlignSystem() ws.Zone(tc.Zone).Section(WidgetSectionLeft).Area(WidgetAreaTop).Add(wid, -1) ws.Remove(tc.Input) - assert.Equal(tt, tc.Expected, ws.Zone(tc.Zone).Section(WidgetSectionLeft).Area(WidgetAreaTop).WidgetIDs()) + assert.Equal(t, tc.Expected, ws.Zone(tc.Zone).Section(WidgetSectionLeft).Area(WidgetAreaTop).WidgetIDs()) }) } } @@ -158,7 +158,7 @@ func TestWidgetAlignSystem_Move(t *testing.T) { wid4 := NewWidgetID() wid5 := NewWidgetID() - testCases := []struct { + tests := []struct { Name string Input1 WidgetID Input2 WidgetLocation @@ -242,10 +242,10 @@ func TestWidgetAlignSystem_Move(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() if tc.Nil { (*WidgetAlignSystem)(nil).Move(tc.Input1, tc.Input2, tc.Input3) @@ -260,8 +260,8 @@ func TestWidgetAlignSystem_Move(t *testing.T) { ws.Move(tc.Input1, tc.Input2, tc.Input3) - assert.Equal(tt, tc.ExpectedSource, ws.Area(tc.Source).WidgetIDs()) - assert.Equal(tt, tc.ExpectedDest, ws.Area(tc.Input2).WidgetIDs()) + assert.Equal(t, tc.ExpectedSource, ws.Area(tc.Source).WidgetIDs()) + assert.Equal(t, tc.ExpectedDest, ws.Area(tc.Input2).WidgetIDs()) }) } } @@ -271,6 +271,7 @@ func TestWidgetAlignSystem_SetZone(t *testing.T) { t WidgetZoneType z *WidgetZone } + tests := []struct { name string args args @@ -306,6 +307,7 @@ func TestWidgetAlignSystem_SetZone(t *testing.T) { nil: true, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { diff --git a/pkg/scene/widget_area_test.go b/pkg/scene/widget_area_test.go index a69bb2196..57b30afa6 100644 --- a/pkg/scene/widget_area_test.go +++ b/pkg/scene/widget_area_test.go @@ -10,7 +10,7 @@ func TestWidgetArea(t *testing.T) { wid1 := NewWidgetID() wid2 := NewWidgetID() - testCases := []struct { + tests := []struct { Name string Input1 []WidgetID Input2 WidgetAlignType @@ -36,10 +36,10 @@ func TestWidgetArea(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() wa := NewWidgetArea(tc.Input1, tc.Input2) assert.Equal(t, tc.Expected, wa) }) @@ -63,7 +63,7 @@ func TestWidgetArea_Find(t *testing.T) { wid := NewWidgetID() wid2 := NewWidgetID() - testCases := []struct { + tests := []struct { Name string Input WidgetID Expected int @@ -86,16 +86,16 @@ func TestWidgetArea_Find(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() var wa *WidgetArea if !tc.Nil { wa = NewWidgetArea([]WidgetID{wid}, WidgetAlignStart) } - assert.Equal(tt, tc.Expected, wa.Find(tc.Input)) + assert.Equal(t, tc.Expected, wa.Find(tc.Input)) }) } } @@ -105,7 +105,7 @@ func TestWidgetArea_Add(t *testing.T) { wid2 := NewWidgetID() wid3 := NewWidgetID() - testCases := []struct { + tests := []struct { Name string Nil bool Input WidgetID @@ -136,10 +136,10 @@ func TestWidgetArea_Add(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() if tc.Nil { (*WidgetArea)(nil).Add(wid1, -1) @@ -148,7 +148,7 @@ func TestWidgetArea_Add(t *testing.T) { wa := NewWidgetArea([]WidgetID{wid1, wid2}, WidgetAlignStart) wa.Add(tc.Input, tc.Input2) - assert.Equal(tt, tc.Expected, wa.WidgetIDs()) + assert.Equal(t, tc.Expected, wa.WidgetIDs()) }) } } @@ -157,7 +157,7 @@ func TestWidgetArea_AddAll(t *testing.T) { wid1 := NewWidgetID() wid2 := NewWidgetID() - testCases := []struct { + tests := []struct { Name string Nil bool Input []WidgetID @@ -179,10 +179,10 @@ func TestWidgetArea_AddAll(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() if tc.Nil { (*WidgetArea)(nil).AddAll(nil) @@ -191,13 +191,13 @@ func TestWidgetArea_AddAll(t *testing.T) { wa := NewWidgetArea(nil, WidgetAlignStart) wa.AddAll(tc.Input) - assert.Equal(tt, tc.Expected, wa.WidgetIDs()) + assert.Equal(t, tc.Expected, wa.WidgetIDs()) }) } } func TestWidgetArea_SetAlignment(t *testing.T) { - testCases := []struct { + tests := []struct { Name string Nil bool Input WidgetAlignType @@ -220,10 +220,10 @@ func TestWidgetArea_SetAlignment(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() var wa *WidgetArea if !tc.Nil { @@ -239,7 +239,8 @@ func TestWidgetArea_SetAlignment(t *testing.T) { func TestWidgetArea_Remove(t *testing.T) { wid := NewWidgetID() - testCases := []struct { + + tests := []struct { Name string Input WidgetID Expected []WidgetID @@ -262,10 +263,10 @@ func TestWidgetArea_Remove(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() var wa *WidgetArea if !tc.Nil { @@ -273,7 +274,7 @@ func TestWidgetArea_Remove(t *testing.T) { } wa.Remove(tc.Input) if !tc.Nil { - assert.Equal(tt, tc.Expected, wa.widgetIds) + assert.Equal(t, tc.Expected, wa.widgetIds) } }) } @@ -284,7 +285,7 @@ func TestWidgetArea_Move(t *testing.T) { wid2 := NewWidgetID() wid3 := NewWidgetID() - testCases := []struct { + tests := []struct { Name string Input1, Input2 int Expected []WidgetID @@ -308,10 +309,10 @@ func TestWidgetArea_Move(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() var wa *WidgetArea if !tc.Nil { @@ -319,7 +320,7 @@ func TestWidgetArea_Move(t *testing.T) { } wa.Move(tc.Input1, tc.Input2) if !tc.Nil { - assert.Equal(tt, tc.Expected, wa.widgetIds) + assert.Equal(t, tc.Expected, wa.widgetIds) } }) } diff --git a/pkg/scene/widget_section_test.go b/pkg/scene/widget_section_test.go index e0a35c5df..0328befa2 100644 --- a/pkg/scene/widget_section_test.go +++ b/pkg/scene/widget_section_test.go @@ -29,7 +29,7 @@ func TestWidgetSection_Find(t *testing.T) { wid6 := NewWidgetID() wid7 := NewWidgetID() - testCases := []struct { + tests := []struct { Name string Input WidgetID Expected1 int @@ -69,15 +69,15 @@ func TestWidgetSection_Find(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() if tc.Nil { index, area := (*WidgetSection)(nil).Find(tc.Input) - assert.Equal(tt, tc.Expected1, index) - assert.Equal(tt, tc.Expected2, area) + assert.Equal(t, tc.Expected1, index) + assert.Equal(t, tc.Expected2, area) return } @@ -87,8 +87,8 @@ func TestWidgetSection_Find(t *testing.T) { ws.Area(WidgetAreaBottom).AddAll([]WidgetID{wid6, wid7}) index, area := ws.Find(tc.Input) - assert.Equal(tt, tc.Expected1, index) - assert.Equal(tt, tc.Expected2, area) + assert.Equal(t, tc.Expected1, index) + assert.Equal(t, tc.Expected2, area) }) } } @@ -96,7 +96,7 @@ func TestWidgetSection_Find(t *testing.T) { func TestWidgetSection_Remove(t *testing.T) { wid := NewWidgetID() - testCases := []struct { + tests := []struct { Name string Area WidgetAreaType Input WidgetID @@ -147,10 +147,10 @@ func TestWidgetSection_Remove(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() if tc.Nil { (*WidgetSection)(nil).Remove(tc.Input) @@ -160,7 +160,7 @@ func TestWidgetSection_Remove(t *testing.T) { ws := NewWidgetSection() ws.Area(tc.Area).Add(wid, -1) ws.Remove(tc.Input) - assert.Equal(tt, tc.Expected, ws.Area(tc.Area).WidgetIDs()) + assert.Equal(t, tc.Expected, ws.Area(tc.Area).WidgetIDs()) }) } } @@ -170,6 +170,7 @@ func TestWidgetSection_SetArea(t *testing.T) { t WidgetAreaType a *WidgetArea } + tests := []struct { name string args args @@ -212,6 +213,7 @@ func TestWidgetSection_SetArea(t *testing.T) { nil: true, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { diff --git a/pkg/scene/widget_test.go b/pkg/scene/widget_test.go index d376d1914..48a3f0aaa 100644 --- a/pkg/scene/widget_test.go +++ b/pkg/scene/widget_test.go @@ -10,7 +10,8 @@ func TestNewWidget(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pr := NewPropertyID() wid := NewWidgetID() - testCases := []struct { + + tests := []struct { Name string ID WidgetID Plugin PluginID @@ -42,20 +43,20 @@ func TestNewWidget(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res, err := NewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) if tc.Err == nil { - assert.Equal(tt, tc.ID, res.ID()) - assert.Equal(tt, tc.Property, res.Property()) - assert.Equal(tt, tc.Extension, res.Extension()) - assert.Equal(tt, tc.Enabled, res.Enabled()) - assert.Equal(tt, tc.Extended, res.Extended()) - assert.Equal(tt, tc.Plugin, res.Plugin()) + assert.Equal(t, tc.ID, res.ID()) + assert.Equal(t, tc.Property, res.Property()) + assert.Equal(t, tc.Extension, res.Extension()) + assert.Equal(t, tc.Enabled, res.Enabled()) + assert.Equal(t, tc.Extended, res.Extended()) + assert.Equal(t, tc.Plugin, res.Plugin()) } else { - assert.ErrorIs(tt, err, tc.Err) + assert.ErrorIs(t, err, tc.Err) } }) } @@ -65,7 +66,8 @@ func TestMustNewWidget(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pr := NewPropertyID() wid := NewWidgetID() - testCases := []struct { + + tests := []struct { Name string ID WidgetID Plugin PluginID @@ -97,24 +99,24 @@ func TestMustNewWidget(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() if tc.Err != nil { - assert.PanicsWithError(tt, tc.Err.Error(), func() { + assert.PanicsWithError(t, tc.Err.Error(), func() { MustNewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) }) return } res := MustNewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) - assert.Equal(tt, tc.ID, res.ID()) - assert.Equal(tt, tc.Property, res.Property()) - assert.Equal(tt, tc.Extension, res.Extension()) - assert.Equal(tt, tc.Enabled, res.Enabled()) - assert.Equal(tt, tc.Plugin, res.Plugin()) + assert.Equal(t, tc.ID, res.ID()) + assert.Equal(t, tc.Property, res.Property()) + assert.Equal(t, tc.Extension, res.Extension()) + assert.Equal(t, tc.Enabled, res.Enabled()) + assert.Equal(t, tc.Plugin, res.Plugin()) }) } } diff --git a/pkg/scene/widget_zone_test.go b/pkg/scene/widget_zone_test.go index f28f0d67f..b3aa13458 100644 --- a/pkg/scene/widget_zone_test.go +++ b/pkg/scene/widget_zone_test.go @@ -29,7 +29,7 @@ func TestWidgetZone_Find(t *testing.T) { wid6 := NewWidgetID() wid7 := NewWidgetID() - testCases := []struct { + tests := []struct { Name string Input WidgetID Expected1 int @@ -75,16 +75,16 @@ func TestWidgetZone_Find(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() if tc.Nil { index, section, area := (*WidgetZone)(nil).Find(tc.Input) - assert.Equal(tt, tc.Expected1, index) - assert.Equal(tt, tc.Expected2, section) - assert.Equal(tt, tc.Expected3, area) + assert.Equal(t, tc.Expected1, index) + assert.Equal(t, tc.Expected2, section) + assert.Equal(t, tc.Expected3, area) return } @@ -94,9 +94,9 @@ func TestWidgetZone_Find(t *testing.T) { ez.Section(WidgetSectionRight).Area(WidgetAreaTop).AddAll([]WidgetID{wid6, wid7}) index, section, area := ez.Find(tc.Input) - assert.Equal(tt, tc.Expected1, index) - assert.Equal(tt, tc.Expected2, section) - assert.Equal(tt, tc.Expected3, area) + assert.Equal(t, tc.Expected1, index) + assert.Equal(t, tc.Expected2, section) + assert.Equal(t, tc.Expected3, area) }) } } @@ -104,7 +104,7 @@ func TestWidgetZone_Find(t *testing.T) { func TestWidgetZone_Remove(t *testing.T) { wid := NewWidgetID() - testCases := []struct { + tests := []struct { Name string Section WidgetSectionType Input WidgetID @@ -155,10 +155,10 @@ func TestWidgetZone_Remove(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() if tc.Nil { (*WidgetZone)(nil).Remove(tc.Input) @@ -168,7 +168,7 @@ func TestWidgetZone_Remove(t *testing.T) { ws := NewWidgetZone() ws.Section(tc.Section).Area(WidgetAreaTop).Add(wid, -1) ws.Remove(tc.Input) - assert.Equal(tt, tc.Expected, ws.Section(tc.Section).Area(WidgetAreaTop).WidgetIDs()) + assert.Equal(t, tc.Expected, ws.Section(tc.Section).Area(WidgetAreaTop).WidgetIDs()) }) } } @@ -178,6 +178,7 @@ func TestWidgetZone_SetSection(t *testing.T) { t WidgetSectionType s *WidgetSection } + tests := []struct { name string args args @@ -220,6 +221,7 @@ func TestWidgetZone_SetSection(t *testing.T) { nil: true, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { diff --git a/pkg/scene/widget_system.go b/pkg/scene/widgets.go similarity index 73% rename from pkg/scene/widget_system.go rename to pkg/scene/widgets.go index 4ea6fa773..499a8e0b1 100644 --- a/pkg/scene/widget_system.go +++ b/pkg/scene/widgets.go @@ -8,13 +8,13 @@ var ( ErrDuplicatedWidgetInstance = errors.New("duplicated widget instance") ) -type WidgetSystem struct { +type Widgets struct { widgets []*Widget } -func NewWidgetSystem(w []*Widget) *WidgetSystem { +func NewWidgets(w []*Widget) *Widgets { if w == nil { - return &WidgetSystem{widgets: []*Widget{}} + return &Widgets{widgets: []*Widget{}} } w2 := make([]*Widget, 0, len(w)) for _, w1 := range w { @@ -33,17 +33,17 @@ func NewWidgetSystem(w []*Widget) *WidgetSystem { w2 = append(w2, &w3) } } - return &WidgetSystem{widgets: w2} + return &Widgets{widgets: w2} } -func (w *WidgetSystem) Widgets() []*Widget { +func (w *Widgets) Widgets() []*Widget { if w == nil { return nil } return append([]*Widget{}, w.widgets...) } -func (w *WidgetSystem) Widget(wid WidgetID) *Widget { +func (w *Widgets) Widget(wid WidgetID) *Widget { if w == nil { return nil } @@ -55,7 +55,7 @@ func (w *WidgetSystem) Widget(wid WidgetID) *Widget { return nil } -func (w *WidgetSystem) Has(wid WidgetID) bool { +func (w *Widgets) Has(wid WidgetID) bool { if w == nil { return false } @@ -67,7 +67,7 @@ func (w *WidgetSystem) Has(wid WidgetID) bool { return false } -func (w *WidgetSystem) Add(sw *Widget) { +func (w *Widgets) Add(sw *Widget) { if w == nil || sw == nil || w.Has(sw.ID()) { return } @@ -75,7 +75,7 @@ func (w *WidgetSystem) Add(sw *Widget) { w.widgets = append(w.widgets, &sw2) } -func (w *WidgetSystem) Remove(wid WidgetID) { +func (w *Widgets) Remove(wid WidgetID) { if w == nil { return } @@ -87,7 +87,7 @@ func (w *WidgetSystem) Remove(wid WidgetID) { } } -func (w *WidgetSystem) RemoveAllByPlugin(p PluginID) (res []PropertyID) { +func (w *Widgets) RemoveAllByPlugin(p PluginID) (res []PropertyID) { if w == nil { return nil } @@ -101,7 +101,7 @@ func (w *WidgetSystem) RemoveAllByPlugin(p PluginID) (res []PropertyID) { return res } -func (w *WidgetSystem) RemoveAllByExtension(p PluginID, e PluginExtensionID) (res []PropertyID) { +func (w *Widgets) RemoveAllByExtension(p PluginID, e PluginExtensionID) (res []PropertyID) { if w == nil { return nil } @@ -115,7 +115,7 @@ func (w *WidgetSystem) RemoveAllByExtension(p PluginID, e PluginExtensionID) (re return res } -func (w *WidgetSystem) ReplacePlugin(oldp, newp PluginID) { +func (w *Widgets) ReplacePlugin(oldp, newp PluginID) { if w == nil || w.widgets == nil { return } @@ -126,7 +126,7 @@ func (w *WidgetSystem) ReplacePlugin(oldp, newp PluginID) { } } -func (w *WidgetSystem) Properties() []PropertyID { +func (w *Widgets) Properties() []PropertyID { if w == nil { return nil } diff --git a/pkg/scene/widget_system_test.go b/pkg/scene/widgets_test.go similarity index 61% rename from pkg/scene/widget_system_test.go rename to pkg/scene/widgets_test.go index cf385dec4..53422878e 100644 --- a/pkg/scene/widget_system_test.go +++ b/pkg/scene/widgets_test.go @@ -6,11 +6,12 @@ import ( "github.com/stretchr/testify/assert" ) -func TestNewWidgetSystem(t *testing.T) { +func TestNewWidgets(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pr := NewPropertyID() wid := NewWidgetID() - testCases := []struct { + + tests := []struct { Name string Input []*Widget Expected []*Widget @@ -46,20 +47,21 @@ func TestNewWidgetSystem(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, NewWidgetSystem(tc.Input).Widgets()) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, NewWidgets(tc.Input).Widgets()) }) } } -func TestWidgetSystem_Add(t *testing.T) { +func TestWidgets_Add(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pr := NewPropertyID() wid := NewWidgetID() - testCases := []struct { + + tests := []struct { Name string Widgets []*Widget Input *Widget @@ -77,7 +79,7 @@ func TestWidgetSystem_Add(t *testing.T) { Expected: []*Widget{}, }, { - Name: "add to nil widgetSystem", + Name: "add to nil widgets", Input: MustNewWidget(wid, pid, "see", pr, true, false), Expected: nil, Nil: true, @@ -89,28 +91,29 @@ func TestWidgetSystem_Add(t *testing.T) { Expected: []*Widget{MustNewWidget(wid, pid, "see", pr, true, false)}, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - var ws *WidgetSystem + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + var ws *Widgets if !tc.Nil { - ws = NewWidgetSystem(tc.Widgets) + ws = NewWidgets(tc.Widgets) } ws.Add(tc.Input) - assert.Equal(tt, tc.Expected, ws.Widgets()) + assert.Equal(t, tc.Expected, ws.Widgets()) }) } } -func TestWidgetSystem_Remove(t *testing.T) { +func TestWidgets_Remove(t *testing.T) { wid := NewWidgetID() wid2 := NewWidgetID() pid := MustPluginID("xxx~1.1.1") pid2 := MustPluginID("xxx~1.1.2") pr := NewPropertyID() - testCases := []struct { + tests := []struct { Name string Input WidgetID Nil bool @@ -120,67 +123,69 @@ func TestWidgetSystem_Remove(t *testing.T) { Input: wid, }, { - Name: "remove from nil widgetSystem", + Name: "remove from nil widgets", Input: wid, Nil: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - var ws *WidgetSystem + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + var ws *Widgets if !tc.Nil { - ws = NewWidgetSystem([]*Widget{ + ws = NewWidgets([]*Widget{ MustNewWidget(wid, pid2, "e1", pr, true, false), MustNewWidget(wid2, pid, "e1", pr, true, false), }) - assert.True(tt, ws.Has(tc.Input)) + assert.True(t, ws.Has(tc.Input)) } ws.Remove(tc.Input) - assert.False(tt, ws.Has(tc.Input)) + assert.False(t, ws.Has(tc.Input)) }) } } -func TestWidgetSystem_RemoveAllByPlugin(t *testing.T) { +func TestWidgets_RemoveAllByPlugin(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pid2 := MustPluginID("xxx~1.1.2") w1 := MustNewWidget(NewWidgetID(), pid, "e1", NewPropertyID(), true, false) w2 := MustNewWidget(NewWidgetID(), pid, "e2", NewPropertyID(), true, false) w3 := MustNewWidget(NewWidgetID(), pid2, "e1", NewPropertyID(), true, false) - testCases := []struct { + tests := []struct { Name string PID PluginID - WS, Expected *WidgetSystem + WS, Expected *Widgets ExpectedResult []PropertyID }{ { Name: "remove widgets", PID: pid, - WS: NewWidgetSystem([]*Widget{w1, w2, w3}), - Expected: NewWidgetSystem([]*Widget{w3}), + WS: NewWidgets([]*Widget{w1, w2, w3}), + Expected: NewWidgets([]*Widget{w3}), ExpectedResult: []PropertyID{w1.Property(), w2.Property()}, }, { - Name: "remove from nil widgetSystem", + Name: "remove from nil widgets", WS: nil, Expected: nil, ExpectedResult: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.ExpectedResult, tc.WS.RemoveAllByPlugin(tc.PID)) - assert.Equal(tt, tc.Expected, tc.WS) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.ExpectedResult, tc.WS.RemoveAllByPlugin(tc.PID)) + assert.Equal(t, tc.Expected, tc.WS) }) } } -func TestWidgetSystem_RemoveAllByExtension(t *testing.T) { +func TestWidgets_RemoveAllByExtension(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pid2 := MustPluginID("xxx~1.1.2") w1 := MustNewWidget(NewWidgetID(), pid, "e1", NewPropertyID(), true, false) @@ -188,19 +193,19 @@ func TestWidgetSystem_RemoveAllByExtension(t *testing.T) { w3 := MustNewWidget(NewWidgetID(), pid, "e1", NewPropertyID(), true, false) w4 := MustNewWidget(NewWidgetID(), pid2, "e1", NewPropertyID(), true, false) - testCases := []struct { + tests := []struct { Name string PID PluginID EID PluginExtensionID - WS, Expected *WidgetSystem + WS, Expected *Widgets ExpectedResult []PropertyID }{ { Name: "remove widgets", PID: pid, EID: PluginExtensionID("e1"), - WS: NewWidgetSystem([]*Widget{w1, w2, w3, w4}), - Expected: NewWidgetSystem([]*Widget{w2, w4}), + WS: NewWidgets([]*Widget{w1, w2, w3, w4}), + Expected: NewWidgets([]*Widget{w2, w4}), ExpectedResult: []PropertyID{w1.Property(), w3.Property()}, }, { @@ -212,104 +217,110 @@ func TestWidgetSystem_RemoveAllByExtension(t *testing.T) { ExpectedResult: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.ExpectedResult, tc.WS.RemoveAllByExtension(tc.PID, tc.EID)) - assert.Equal(tt, tc.Expected, tc.WS) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.ExpectedResult, tc.WS.RemoveAllByExtension(tc.PID, tc.EID)) + assert.Equal(t, tc.Expected, tc.WS) }) } } -func TestWidgetSystem_ReplacePlugin(t *testing.T) { +func TestWidgets_ReplacePlugin(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pid2 := MustPluginID("zzz~1.1.1") pr := NewPropertyID() wid := NewWidgetID() - testCases := []struct { + + tests := []struct { Name string PID, NewID PluginID - WS, Expected *WidgetSystem + WS, Expected *Widgets }{ { Name: "replace a widget", PID: pid, NewID: pid2, - WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}), - Expected: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid2, "eee", pr, true, false)}), + WS: NewWidgets([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}), + Expected: NewWidgets([]*Widget{MustNewWidget(wid, pid2, "eee", pr, true, false)}), }, { Name: "replace with nil widget", PID: pid, - WS: NewWidgetSystem(nil), - Expected: NewWidgetSystem(nil), + WS: NewWidgets(nil), + Expected: NewWidgets(nil), }, { - Name: "replace from nil widgetSystem", + Name: "replace from nil widgets", WS: nil, Expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() tc.WS.ReplacePlugin(tc.PID, tc.NewID) - assert.Equal(tt, tc.Expected, tc.WS) + assert.Equal(t, tc.Expected, tc.WS) }) } } -func TestWidgetSystem_Properties(t *testing.T) { +func TestWidgets_Properties(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pr := NewPropertyID() pr2 := NewPropertyID() wid := NewWidgetID() wid2 := NewWidgetID() - testCases := []struct { + + tests := []struct { Name string - WS *WidgetSystem + WS *Widgets Expected []PropertyID }{ { Name: "get properties", - WS: NewWidgetSystem([]*Widget{ + WS: NewWidgets([]*Widget{ MustNewWidget(wid, pid, "eee", pr, true, false), MustNewWidget(wid2, pid, "eee", pr2, true, false), }), Expected: []PropertyID{pr, pr2}, }, { - Name: "get properties from nil widgetSystem", + Name: "get properties from nil widgets", WS: nil, Expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.WS.Properties() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } -func TestWidgetSystem_Widgets(t *testing.T) { +func TestWidgets_Widgets(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pr := NewPropertyID() pr2 := NewPropertyID() wid := NewWidgetID() wid2 := NewWidgetID() - testCases := []struct { + + tests := []struct { Name string - WS *WidgetSystem + WS *Widgets Expected []*Widget }{ { Name: "get widgets", - WS: NewWidgetSystem([]*Widget{ + WS: NewWidgets([]*Widget{ MustNewWidget(wid, pid, "eee", pr, true, false), MustNewWidget(wid2, pid, "eee", pr2, true, false), }), @@ -319,95 +330,100 @@ func TestWidgetSystem_Widgets(t *testing.T) { }, }, { - Name: "get widgets from nil widgetSystem", + Name: "get widgets from nil widgets", WS: nil, Expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.WS.Widgets() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } -func TestWidgetSystem_Widget(t *testing.T) { +func TestWidgets_Widget(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pr := NewPropertyID() wid := NewWidgetID() - testCases := []struct { + + tests := []struct { Name string ID WidgetID - WS *WidgetSystem + WS *Widgets Expected *Widget }{ { Name: "get a widget", ID: wid, - WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}), + WS: NewWidgets([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}), Expected: MustNewWidget(wid, pid, "eee", pr, true, false), }, { Name: "dont has the widget", ID: wid, - WS: NewWidgetSystem([]*Widget{}), + WS: NewWidgets([]*Widget{}), Expected: nil, }, { - Name: "get widget from nil widgetSystem", + Name: "get widget from nil widgets", ID: wid, WS: nil, Expected: nil, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.WS.Widget(tc.ID) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } -func TestWidgetSystem_Has(t *testing.T) { +func TestWidgets_Has(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pr := NewPropertyID() wid := NewWidgetID() - testCases := []struct { + + tests := []struct { Name string ID WidgetID - WS *WidgetSystem + WS *Widgets Expected bool }{ { Name: "has a widget", ID: wid, - WS: NewWidgetSystem([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}), + WS: NewWidgets([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}), Expected: true, }, { Name: "dont has a widget", ID: wid, - WS: NewWidgetSystem([]*Widget{}), + WS: NewWidgets([]*Widget{}), Expected: false, }, { - Name: "has from nil widgetSystem", + Name: "has from nil widgets", ID: wid, WS: nil, Expected: false, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.WS.Has(tc.ID) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } diff --git a/pkg/shp/reader_test.go b/pkg/shp/reader_test.go index 0dbcab2c9..7906715eb 100644 --- a/pkg/shp/reader_test.go +++ b/pkg/shp/reader_test.go @@ -11,6 +11,7 @@ func assertPointsEqual(t *testing.T, a, b []float64, msgAndArgs ...interface{}) if !assert.True(t, len(a) == len(b), msgAndArgs...) { return false } + for k, v := range a { if !assert.True(t, v == b[k], msgAndArgs...) { return false @@ -182,6 +183,7 @@ func TestReadBBox(t *testing.T) { {"test_files/polylinem.shp", Box{0, 0, 25, 25}}, {"test_files/polylinez.shp", Box{0, 0, 25, 25}}, } + for _, tt := range tests { f, _ := os.Open(tt.filename) r, err := ReadFrom(f) @@ -204,16 +206,15 @@ func TestReadBBox(t *testing.T) { } func TestReader(t *testing.T) { - t.Parallel() - testCases := testsData + tests := testData - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - shapes := getShapesFromFile(tc.name, tt) - assert.Equal(tt, tc.count, len(shapes), "Number of shapes for %s read was wrong. Wanted %d, got %d.", tc.name, tc.count, len(shapes)) - tc.tester(tt, tc.points, shapes) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + shapes := getShapesFromFile(tc.name, t) + assert.Equal(t, tc.count, len(shapes), "Number of shapes for %s read was wrong. Wanted %d, got %d.", tc.name, tc.count, len(shapes)) + tc.tester(t, tc.points, shapes) }) } diff --git a/pkg/shp/sequentialreader_test.go b/pkg/shp/sequentialreader_test.go index 5df4ec854..2abf12b95 100644 --- a/pkg/shp/sequentialreader_test.go +++ b/pkg/shp/sequentialreader_test.go @@ -37,16 +37,15 @@ func getShapesSequentially(prefix string, t *testing.T) (shapes []Shape) { } func TestSequentialReader(t *testing.T) { - t.Parallel() - testCases := testsData + tests := testData - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - shapes := getShapesSequentially(tc.name, tt) - assert.Equal(tt, tc.count, len(shapes), "Number of shapes for %s read was wrong. Wanted %d, got %d.", tc.name, tc.count, len(shapes)) - tc.tester(tt, tc.points, shapes) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + shapes := getShapesSequentially(tc.name, t) + assert.Equal(t, tc.count, len(shapes), "Number of shapes for %s read was wrong. Wanted %d, got %d.", tc.name, tc.count, len(shapes)) + tc.tester(t, tc.points, shapes) }) } } diff --git a/pkg/shp/shapefile_test.go b/pkg/shp/shapefile_test.go index d50142ffa..bfe25738f 100644 --- a/pkg/shp/shapefile_test.go +++ b/pkg/shp/shapefile_test.go @@ -7,8 +7,7 @@ import ( ) func TestBox_ExtendWithPoint(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input struct { b Box @@ -77,12 +76,13 @@ func TestBox_ExtendWithPoint(t *testing.T) { }, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() tc.input.b.ExtendWithPoint(tc.input.p) - assert.Equal(tt, tc.expected, tc.input.b) + assert.Equal(t, tc.expected, tc.input.b) }) } @@ -131,8 +131,7 @@ func TestNewPolyLine(t *testing.T) { } func TestBBoxFromPoints(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input []Point expected Box @@ -186,11 +185,12 @@ func TestBBoxFromPoints(t *testing.T) { }, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, BBoxFromPoints(tc.input)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, BBoxFromPoints(tc.input)) }) } } diff --git a/pkg/shp/shapetype_string_test.go b/pkg/shp/shapetype_string_test.go index ee910829f..35494974b 100644 --- a/pkg/shp/shapetype_string_test.go +++ b/pkg/shp/shapetype_string_test.go @@ -7,8 +7,7 @@ import ( ) func TestShapeType_String(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input ShapeType expected string @@ -89,11 +88,12 @@ func TestShapeType_String(t *testing.T) { expected: "ShapeType(-1)", }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.String()) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input.String()) }) } } diff --git a/pkg/shp/testdata_test.go b/pkg/shp/testdata_test.go index 3df89b7a8..135826a46 100644 --- a/pkg/shp/testdata_test.go +++ b/pkg/shp/testdata_test.go @@ -4,15 +4,13 @@ import "testing" type testFunc func(*testing.T, [][]float64, []Shape) -type testCaseData struct { +var testData = []struct { name string points [][]float64 tester testFunc shpType ShapeType count int -} - -var testsData = []testCaseData{ +}{ { name: "test_files/point", shpType: POINT, diff --git a/pkg/shp/writer_test.go b/pkg/shp/writer_test.go index 29e118687..26a73fc6d 100644 --- a/pkg/shp/writer_test.go +++ b/pkg/shp/writer_test.go @@ -118,15 +118,14 @@ func TestWriter_Close(t *testing.T) { } func TestWriter(t *testing.T) { - t.Parallel() - testCases := testsData + tests := testData - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - shapes := getShapesFromFile(tc.name, tt) - assert.Equal(tt, tc.count, len(shapes), "Number of shapes for %s read was wrong. Wanted %d, got %d.", tc.name, tc.count, len(shapes)) + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + shapes := getShapesFromFile(tc.name, t) + assert.Equal(t, tc.count, len(shapes), "Number of shapes for %s read was wrong. Wanted %d, got %d.", tc.name, tc.count, len(shapes)) for i, shp := range shapes { outputPath := tc.name + "_out_" + fmt.Sprint(i) @@ -134,20 +133,20 @@ func TestWriter(t *testing.T) { shape, _ := CreateFrom(f, tc.shpType) _, err := shape.Write(shp) - assert.Nil(tt, err) + assert.Nil(t, err) err = shape.Close() - assert.Nil(tt, err) + assert.Nil(t, err) err = f.Close() - assert.Nil(tt, err) + assert.Nil(t, err) - shpFromOut := getShapesFromFile(outputPath, tt) - assert.Equal(tt, shpFromOut[0], shp) + shpFromOut := getShapesFromFile(outputPath, t) + assert.Equal(t, shpFromOut[0], shp) - removeShapefile(tt, outputPath) + removeShapefile(t, outputPath) } - tc.tester(tt, tc.points, shapes) + tc.tester(t, tc.points, shapes) }) } diff --git a/pkg/shp/zipreader_test.go b/pkg/shp/zipreader_test.go index f4f317bb5..a72616564 100644 --- a/pkg/shp/zipreader_test.go +++ b/pkg/shp/zipreader_test.go @@ -34,8 +34,7 @@ func TestReadZipFrom(t *testing.T) { } func TestReadZipFromWrongScenarios(t *testing.T) { - t.Parallel() - testCases := []struct { + tests := []struct { name string input string }{ @@ -52,10 +51,11 @@ func TestReadZipFromWrongScenarios(t *testing.T) { input: "test_files/multi.zip", }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.name, func(t *testing.T) { + t.Parallel() ior, err := os.Open(tc.input) assert.Nil(t, err) @@ -65,7 +65,7 @@ func TestReadZipFromWrongScenarios(t *testing.T) { }() _, err = ReadZipFrom(ior) - assert.NotNil(tt, err) + assert.NotNil(t, err) }) } } diff --git a/pkg/tag/group_test.go b/pkg/tag/group_test.go index cda1ad637..1b95edc87 100644 --- a/pkg/tag/group_test.go +++ b/pkg/tag/group_test.go @@ -20,7 +20,8 @@ func TestGroupBuilder_Build(t *testing.T) { NewID(), NewID(), } - testCases := []struct { + + tests := []struct { Name, Label string Id ID Scene SceneID @@ -88,10 +89,11 @@ func TestGroupBuilder_Build(t *testing.T) { }, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res, err := NewGroup(). ID(tc.Id). Scene(tc.Scene). @@ -99,12 +101,12 @@ func TestGroupBuilder_Build(t *testing.T) { Tags(tc.Tags). Build() if tc.Expected.Error == nil { - assert.Equal(tt, tc.Expected.Group.ID(), res.ID()) - assert.Equal(tt, tc.Expected.Group.Scene(), res.Scene()) - assert.Equal(tt, tc.Expected.Group.Label(), res.Label()) - assert.Equal(tt, tc.Expected.Group.Tags(), res.Tags()) + assert.Equal(t, tc.Expected.Group.ID(), res.ID()) + assert.Equal(t, tc.Expected.Group.Scene(), res.Scene()) + assert.Equal(t, tc.Expected.Group.Label(), res.Label()) + assert.Equal(t, tc.Expected.Group.Tags(), res.Tags()) } else { - assert.Equal(tt, tc.Expected.Error, err) + assert.Equal(t, tc.Expected.Error, err) } }) } diff --git a/pkg/tag/item_test.go b/pkg/tag/item_test.go index 886f97e2b..ecee16594 100644 --- a/pkg/tag/item_test.go +++ b/pkg/tag/item_test.go @@ -19,7 +19,8 @@ func TestItemBuilder_Build(t *testing.T) { dfid := NewDatasetFieldID() did := NewDatasetID() dsid := NewDatasetSchemaID() - testCases := []struct { + + tests := []struct { Name, Label string Id ID Scene SceneID @@ -89,10 +90,11 @@ func TestItemBuilder_Build(t *testing.T) { }, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res, err := NewItem(). ID(tc.Id). Scene(tc.Scene). @@ -102,14 +104,14 @@ func TestItemBuilder_Build(t *testing.T) { LinkedDatasetFieldID(tc.LinkedDatasetFieldID). Build() if tc.Expected.Error == nil { - assert.Equal(tt, tc.Expected.Item.ID(), res.ID()) - assert.Equal(tt, tc.Expected.Item.Scene(), res.Scene()) - assert.Equal(tt, tc.Expected.Item.Label(), res.Label()) - assert.Equal(tt, tc.Expected.Item.LinkedDatasetFieldID(), res.LinkedDatasetFieldID()) - assert.Equal(tt, tc.Expected.Item.LinkedDatasetSchemaID(), res.LinkedDatasetSchemaID()) - assert.Equal(tt, tc.Expected.Item.LinkedDatasetID(), res.LinkedDatasetID()) + assert.Equal(t, tc.Expected.Item.ID(), res.ID()) + assert.Equal(t, tc.Expected.Item.Scene(), res.Scene()) + assert.Equal(t, tc.Expected.Item.Label(), res.Label()) + assert.Equal(t, tc.Expected.Item.LinkedDatasetFieldID(), res.LinkedDatasetFieldID()) + assert.Equal(t, tc.Expected.Item.LinkedDatasetSchemaID(), res.LinkedDatasetSchemaID()) + assert.Equal(t, tc.Expected.Item.LinkedDatasetID(), res.LinkedDatasetID()) } else { - assert.Equal(tt, tc.Expected.Error, err) + assert.Equal(t, tc.Expected.Error, err) } }) } diff --git a/pkg/tag/list_test.go b/pkg/tag/list_test.go index 8e2e4325d..f73289582 100644 --- a/pkg/tag/list_test.go +++ b/pkg/tag/list_test.go @@ -39,7 +39,8 @@ func TestList_Has(t *testing.T) { tags := []ID{ tid1, } - testCases := []struct { + + tests := []struct { Name string Tags []ID TID ID @@ -62,10 +63,11 @@ func TestList_Has(t *testing.T) { Expected: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := NewListFromTags(tc.Tags).Has(tc.TID) assert.Equal(t, tc.Expected, res) }) diff --git a/pkg/user/auth.go b/pkg/user/auth.go index ed1498591..45ac7fd07 100644 --- a/pkg/user/auth.go +++ b/pkg/user/auth.go @@ -8,7 +8,7 @@ type Auth struct { } func AuthFromAuth0Sub(sub string) Auth { - s := strings.Split(sub, "|") + s := strings.SplitN(sub, "|", 2) if len(s) != 2 { return Auth{} } diff --git a/pkg/user/auth_test.go b/pkg/user/auth_test.go index f125b3e4b..8719ede1e 100644 --- a/pkg/user/auth_test.go +++ b/pkg/user/auth_test.go @@ -7,7 +7,7 @@ import ( ) func TestAuthFromAuth0Sub(t *testing.T) { - testCases := []struct { + tests := []struct { Name, Sub string Expected Auth }{ @@ -16,7 +16,7 @@ func TestAuthFromAuth0Sub(t *testing.T) { Sub: "xx|yy", Expected: Auth{ Provider: "xx", - Sub: "yy", + Sub: "xx|yy", }, }, { @@ -25,16 +25,18 @@ func TestAuthFromAuth0Sub(t *testing.T) { Expected: Auth{}, }, } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, AuthFromAuth0Sub(tc.Sub)) + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, AuthFromAuth0Sub(tc.Sub)) }) } } func TestAuth_IsAuth0(t *testing.T) { - testCases := []struct { + tests := []struct { Name string Auth Auth Expected bool @@ -57,10 +59,11 @@ func TestAuth_IsAuth0(t *testing.T) { }, } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected, tc.Auth.IsAuth0()) + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.Auth.IsAuth0()) }) } } diff --git a/pkg/user/builder_test.go b/pkg/user/builder_test.go index 193945109..8699ce531 100644 --- a/pkg/user/builder_test.go +++ b/pkg/user/builder_test.go @@ -1,7 +1,6 @@ package user import ( - "errors" "testing" "github.com/stretchr/testify/assert" @@ -57,7 +56,7 @@ func TestBuilder_Lang(t *testing.T) { } func TestBuilder_LangFrom(t *testing.T) { - testCases := []struct { + tests := []struct { Name, Lang string Expected language.Tag }{ @@ -77,9 +76,11 @@ func TestBuilder_LangFrom(t *testing.T) { Expected: language.Tag{}, }, } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() b := New().NewID().LangFrom(tc.Lang).MustBuild() assert.Equal(t, tc.Expected, b.Lang()) }) @@ -95,50 +96,66 @@ func TestNew(t *testing.T) { func TestBuilder_Build(t *testing.T) { uid := NewID() tid := NewTeamID() - testCases := []struct { - Name, UserName, Lang, Email string - UID ID - TID TeamID - Auths []Auth - Expected *User - err error + en, _ := language.Parse("en") + + type args struct { + Name, Lang, Email string + ID ID + Team TeamID + Auths []Auth + } + + tests := []struct { + Name string + Args args + Expected *User + Err error }{ { - Name: "Success build user", - UserName: "xxx", - Email: "xx@yy.zz", - Lang: "en", - UID: uid, - TID: tid, - Auths: []Auth{ - { - Provider: "ppp", - Sub: "sss", + Name: "Success build user", + Args: args{ + Name: "xxx", + Email: "xx@yy.zz", + Lang: "en", + ID: uid, + Team: tid, + Auths: []Auth{ + { + Provider: "ppp", + Sub: "sss", + }, }, }, - Expected: New(). - ID(uid). - Team(tid). - Email("xx@yy.zz"). - Name("xxx"). - Auths([]Auth{{Provider: "ppp", Sub: "sss"}}). - LangFrom("en"). - MustBuild(), - err: nil, + Expected: &User{ + id: uid, + team: tid, + email: "xx@yy.zz", + name: "xxx", + auths: []Auth{{Provider: "ppp", Sub: "sss"}}, + lang: en, + }, }, { - Name: "failed invalid id", - Expected: nil, - err: ErrInvalidID, + Name: "failed invalid id", + Err: ErrInvalidID, }, } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res, err := New().ID(tc.UID).Name(tc.UserName).Auths(tc.Auths).LangFrom(tc.Lang).Email(tc.Email).Team(tc.TID).Build() - if err == nil { - assert.Equal(tt, tc.Expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := New(). + ID(tt.Args.ID). + Name(tt.Args.Name). + Auths(tt.Args.Auths). + LangFrom(tt.Args.Lang). + Email(tt.Args.Email). + Team(tt.Args.Team). + Build() + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(t, tt.Err, err) } }) } @@ -147,53 +164,72 @@ func TestBuilder_Build(t *testing.T) { func TestBuilder_MustBuild(t *testing.T) { uid := NewID() tid := NewTeamID() - testCases := []struct { - Name, UserName, Lang, Email string - UID ID - TID TeamID - Auths []Auth - Expected *User - err error + en, _ := language.Parse("en") + + type args struct { + Name, Lang, Email string + ID ID + Team TeamID + Auths []Auth + } + + tests := []struct { + Name string + Args args + Expected *User + Err error }{ { - Name: "Success build user", - UserName: "xxx", - Email: "xx@yy.zz", - Lang: "en", - UID: uid, - TID: tid, - Auths: []Auth{ - { - Provider: "ppp", - Sub: "sss", + Name: "Success build user", + Args: args{ + Name: "xxx", + Email: "xx@yy.zz", + Lang: "en", + ID: uid, + Team: tid, + Auths: []Auth{ + { + Provider: "ppp", + Sub: "sss", + }, }, }, - Expected: New(). - ID(uid). - Team(tid). - Email("xx@yy.zz"). - Name("xxx"). - Auths([]Auth{{Provider: "ppp", Sub: "sss"}}). - LangFrom("en"). - MustBuild(), - err: nil, + Expected: &User{ + id: uid, + team: tid, + email: "xx@yy.zz", + name: "xxx", + auths: []Auth{{Provider: "ppp", Sub: "sss"}}, + lang: en, + }, }, { - Name: "failed invalid id", - Expected: nil, - err: ErrInvalidID, + Name: "failed invalid id", + Err: ErrInvalidID, }, } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - var res *User - defer func() { - if r := recover(); r == nil { - assert.Equal(tt, tc.Expected, res) - } - }() - - res = New().ID(tc.UID).Name(tc.UserName).Auths(tc.Auths).LangFrom(tc.Lang).Email(tc.Email).Team(tc.TID).MustBuild() + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + build := func() *User { + t.Helper() + return New(). + ID(tt.Args.ID). + Name(tt.Args.Name). + Auths(tt.Args.Auths). + LangFrom(tt.Args.Lang). + Email(tt.Args.Email). + Team(tt.Args.Team). + MustBuild() + } + + if tt.Err != nil { + assert.PanicsWithValue(t, tt.Err, func() { _ = build() }) + } else { + assert.Equal(t, tt.Expected, build()) + } }) } } diff --git a/pkg/user/initializer_test.go b/pkg/user/initializer_test.go index 9ca672635..17b242600 100644 --- a/pkg/user/initializer_test.go +++ b/pkg/user/initializer_test.go @@ -1,7 +1,6 @@ package user import ( - "errors" "testing" "github.com/stretchr/testify/assert" @@ -10,7 +9,8 @@ import ( func TestInit(t *testing.T) { uid := NewID() tid := NewTeamID() - testCases := []struct { + + tests := []struct { Name, Email, Username, Sub string UID *ID TID *TeamID @@ -85,26 +85,27 @@ func TestInit(t *testing.T) { Err: nil, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - u, t, err := Init(InitParams{ - Email: tc.Email, - Name: tc.Username, - Auth0Sub: tc.Sub, - UserID: tc.UID, - TeamID: tc.TID, + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + user, team, err := Init(InitParams{ + Email: tt.Email, + Name: tt.Username, + Auth0Sub: tt.Sub, + UserID: tt.UID, + TeamID: tt.TID, }) - if err == nil { - assert.Equal(tt, tc.ExpectedUser.Email(), u.Email()) - assert.Equal(tt, tc.ExpectedUser.Name(), u.Name()) - assert.Equal(tt, tc.ExpectedUser.Auths(), u.Auths()) + if tt.Err == nil { + assert.Equal(t, tt.ExpectedUser.Email(), user.Email()) + assert.Equal(t, tt.ExpectedUser.Name(), user.Name()) + assert.Equal(t, tt.ExpectedUser.Auths(), user.Auths()) - assert.Equal(tt, tc.ExpectedTeam.Name(), t.Name()) - assert.Equal(tt, tc.ExpectedTeam.IsPersonal(), t.IsPersonal()) + assert.Equal(t, tt.ExpectedTeam.Name(), team.Name()) + assert.Equal(t, tt.ExpectedTeam.IsPersonal(), team.IsPersonal()) } else { - assert.True(tt, errors.As(tc.Err, &err)) + assert.Equal(t, tt.Err, err) } }) } diff --git a/pkg/user/members_test.go b/pkg/user/members_test.go index a08902b00..b3cdad465 100644 --- a/pkg/user/members_test.go +++ b/pkg/user/members_test.go @@ -1,7 +1,6 @@ package user import ( - "errors" "testing" "github.com/stretchr/testify/assert" @@ -23,7 +22,8 @@ func TestNewMembersWith(t *testing.T) { func TestMembers_ContainsUser(t *testing.T) { uid1 := NewID() uid2 := NewID() - testCases := []struct { + + tests := []struct { Name string M *Members UID ID @@ -42,11 +42,13 @@ func TestMembers_ContainsUser(t *testing.T) { Expected: false, }, } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.M.ContainsUser(tc.UID) - assert.Equal(tt, tc.Expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.M.ContainsUser(tt.UID) + assert.Equal(t, tt.Expected, res) }) } } @@ -77,7 +79,8 @@ func TestMembers_IsOnlyOwner(t *testing.T) { func TestMembers_Leave(t *testing.T) { uid := NewID() - testCases := []struct { + + tests := []struct { Name string M *Members UID ID @@ -102,15 +105,16 @@ func TestMembers_Leave(t *testing.T) { err: ErrTargetUserNotInTheTeam, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - err := tc.M.Leave(tc.UID) - if err == nil { - assert.False(tt, tc.M.ContainsUser(tc.UID)) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + err := tt.M.Leave(tt.UID) + if tt.err == nil { + assert.False(t, tt.M.ContainsUser(tt.UID)) } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(t, tt.err, err) } }) } @@ -124,7 +128,8 @@ func TestMembers_Members(t *testing.T) { func TestMembers_UpdateRole(t *testing.T) { uid := NewID() - testCases := []struct { + + tests := []struct { Name string M *Members UID ID @@ -151,26 +156,27 @@ func TestMembers_UpdateRole(t *testing.T) { Name: "fail personal team", M: NewFixedMembers(uid), UID: uid, - NewRole: Role("xxx"), + NewRole: RoleOwner, err: ErrCannotModifyPersonalTeam, }, { Name: "fail user not in the team", M: NewMembersWith(map[ID]Role{uid: RoleOwner}), UID: NewID(), - NewRole: "", + NewRole: RoleOwner, err: ErrTargetUserNotInTheTeam, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - err := tc.M.UpdateRole(tc.UID, tc.NewRole) - if err == nil { - assert.Equal(tt, tc.Expected, tc.M.GetRole(tc.UID)) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + err := tt.M.UpdateRole(tt.UID, tt.NewRole) + if tt.err == nil { + assert.Equal(t, tt.Expected, tt.M.GetRole(tt.UID)) } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(t, tt.err, err) } }) } @@ -179,7 +185,8 @@ func TestMembers_UpdateRole(t *testing.T) { func TestMembers_Join(t *testing.T) { uid := NewID() uid2 := NewID() - testCases := []struct { + + tests := []struct { Name string M *Members UID ID @@ -217,16 +224,17 @@ func TestMembers_Join(t *testing.T) { err: ErrUserAlreadyJoined, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - err := tc.M.Join(tc.UID, tc.JoinRole) - if err == nil { - assert.True(tt, tc.M.ContainsUser(tc.UID)) - assert.Equal(tt, tc.ExpectedRole, tc.M.GetRole(tc.UID)) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + err := tt.M.Join(tt.UID, tt.JoinRole) + if tt.err == nil { + assert.True(t, tt.M.ContainsUser(tt.UID)) + assert.Equal(t, tt.ExpectedRole, tt.M.GetRole(tt.UID)) } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(t, tt.err, err) } }) } @@ -235,7 +243,8 @@ func TestMembers_Join(t *testing.T) { func TestMembers_UsersByRole(t *testing.T) { uid := NewID() uid2 := NewID() - testCases := []struct { + + tests := []struct { Name string M *Members Role Role @@ -249,12 +258,13 @@ func TestMembers_UsersByRole(t *testing.T) { Expected: []ID{uid2, uid}, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := tc.M.UsersByRole(tc.Role) - assert.Equal(tt, tc.Expected, res) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.M.UsersByRole(tt.Role) + assert.Equal(t, tt.Expected, res) }) } } diff --git a/pkg/user/role_test.go b/pkg/user/role_test.go index 446b97eec..a0a9c84ba 100644 --- a/pkg/user/role_test.go +++ b/pkg/user/role_test.go @@ -1,14 +1,13 @@ package user import ( - "errors" "testing" "github.com/stretchr/testify/assert" ) func TestRoleFromString(t *testing.T) { - testCases := []struct { + tests := []struct { Name, Role string Expected Role Err error @@ -27,22 +26,22 @@ func TestRoleFromString(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res, err := RoleFromString(tc.Role) - if err == nil { - assert.Equal(tt, tc.Expected, res) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := RoleFromString(tt.Role) + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) } else { - assert.True(tt, errors.As(err, &tc.Err)) + assert.Equal(t, tt.Err, err) } }) } } func TestCheckRole(t *testing.T) { - testCases := []struct { + tests := []struct { Name string Input Role Expected bool @@ -69,12 +68,12 @@ func TestCheckRole(t *testing.T) { }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res := checkRole(tc.Input) - assert.Equal(tt, tc.Expected, res) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := checkRole(tt.Input) + assert.Equal(t, tt.Expected, res) }) } } diff --git a/pkg/user/team_builder_test.go b/pkg/user/team_builder_test.go index 13cf2aaeb..d6b693ba5 100644 --- a/pkg/user/team_builder_test.go +++ b/pkg/user/team_builder_test.go @@ -1,7 +1,6 @@ package user import ( - "errors" "testing" "github.com/stretchr/testify/assert" @@ -36,43 +35,72 @@ func TestTeamBuilder_NewID(t *testing.T) { func TestTeamBuilder_Build(t *testing.T) { tid := NewTeamID() - testCases := []struct { - Name, UserName string - TID TeamID - Personal bool - Members map[ID]Role - Expected *Team - err error + uid := NewID() + + type args struct { + ID TeamID + Name string + Personal bool + Members map[ID]Role + } + + tests := []struct { + Name string + Args args + Expected *Team + Err error }{ { - Name: "success create team", - UserName: "xxx", - TID: tid, - Personal: true, - Expected: NewTeam().ID(tid).Members(map[ID]Role{NewID(): RoleOwner}).Personal(true).Name("xxx").MustBuild(), - err: nil, + Name: "success create team", + Args: args{ + ID: tid, + Name: "xxx", + Personal: true, + Members: map[ID]Role{uid: RoleOwner}, + }, + Expected: &Team{ + id: tid, + name: "xxx", + members: Members{ + members: map[ID]Role{uid: RoleOwner}, + fixed: true, + }, + }, }, { - Name: "success create team with nil members", - UserName: "xxx", - Members: nil, - Expected: NewTeam().ID(tid).MustBuild(), - err: nil, + Name: "success create team with nil members", + Args: args{ + ID: tid, + Name: "xxx", + }, + Expected: &Team{ + id: tid, + name: "xxx", + members: Members{ + members: map[ID]Role{}, + fixed: false, + }, + }, }, { - Name: "fail invalid id", - Expected: nil, - err: ErrInvalidID, + Name: "fail invalid id", + Err: ErrInvalidID, }, } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - res, err := NewTeam().ID(tc.TID).Members(tc.Members).Personal(tc.Personal).Name(tc.UserName).Build() - if err == nil { - assert.Equal(tt, tc.Expected, res) + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := NewTeam(). + ID(tt.Args.ID). + Members(tt.Args.Members). + Personal(tt.Args.Personal). + Name(tt.Args.Name). + Build() + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) } else { - assert.True(tt, errors.As(tc.err, &err)) + assert.Equal(t, tt.Err, err) } }) } @@ -80,45 +108,73 @@ func TestTeamBuilder_Build(t *testing.T) { func TestTeamBuilder_MustBuild(t *testing.T) { tid := NewTeamID() - testCases := []struct { - Name, UserName string - TID TeamID - Personal bool - Members map[ID]Role - Expected *Team - err error + uid := NewID() + + type args struct { + ID TeamID + Name string + Personal bool + Members map[ID]Role + } + + tests := []struct { + Name string + Args args + Expected *Team + Err error }{ { - Name: "success create team", - UserName: "xxx", - TID: tid, - Personal: true, - Expected: NewTeam().ID(tid).Members(map[ID]Role{NewID(): RoleOwner}).Personal(true).Name("xxx").MustBuild(), - err: nil, + Name: "success create team", + Args: args{ + ID: tid, + Name: "xxx", + Personal: true, + Members: map[ID]Role{uid: RoleOwner}, + }, + Expected: &Team{ + id: tid, + name: "xxx", + members: Members{ + members: map[ID]Role{uid: RoleOwner}, + fixed: true, + }, + }, }, { - Name: "success create team with nil members", - UserName: "xxx", - Members: nil, - Expected: NewTeam().ID(tid).MustBuild(), - err: nil, + Name: "success create team with nil members", + Args: args{ + ID: tid, + Name: "xxx", + }, + Expected: &Team{ + id: tid, + name: "xxx", + members: Members{ + members: map[ID]Role{}, + fixed: false, + }, + }, }, { - Name: "fail invalid id", - Expected: nil, - err: ErrInvalidID, + Name: "fail invalid id", + Err: ErrInvalidID, }, } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - var res *Team - defer func() { - if r := recover(); r == nil { - assert.Equal(tt, tc.Expected, res) - } - }() - res = NewTeam().ID(tc.TID).Members(tc.Members).Personal(tc.Personal).Name(tc.UserName).MustBuild() + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + build := func() *Team { + t.Helper() + return NewTeam().ID(tt.Args.ID).Members(tt.Args.Members).Personal(tt.Args.Personal).Name(tt.Args.Name).MustBuild() + } + + if tt.Err != nil { + assert.PanicsWithValue(t, tt.Err, func() { _ = build() }) + } else { + assert.Equal(t, tt.Expected, build()) + } }) } } diff --git a/pkg/user/user_test.go b/pkg/user/user_test.go index 262bcbd7f..7e2e80bc3 100644 --- a/pkg/user/user_test.go +++ b/pkg/user/user_test.go @@ -10,7 +10,8 @@ import ( func TestUser(t *testing.T) { uid := NewID() tid := NewTeamID() - testCases := []struct { + + tests := []struct { Name string User *User Expected struct { @@ -53,22 +54,23 @@ func TestUser(t *testing.T) { }, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.Expected.Id, tc.User.ID()) - assert.Equal(tt, tc.Expected.Name, tc.User.Name()) - assert.Equal(tt, tc.Expected.Team, tc.User.Team()) - assert.Equal(tt, tc.Expected.Auths, tc.User.Auths()) - assert.Equal(tt, tc.Expected.Email, tc.User.Email()) - assert.Equal(tt, tc.Expected.Lang, tc.User.Lang()) + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected.Id, tc.User.ID()) + assert.Equal(t, tc.Expected.Name, tc.User.Name()) + assert.Equal(t, tc.Expected.Team, tc.User.Team()) + assert.Equal(t, tc.Expected.Auths, tc.User.Auths()) + assert.Equal(t, tc.Expected.Email, tc.User.Email()) + assert.Equal(t, tc.Expected.Lang, tc.User.Lang()) }) } } func TestUser_AddAuth(t *testing.T) { - testCases := []struct { + tests := []struct { Name string User *User A Auth @@ -101,18 +103,19 @@ func TestUser_AddAuth(t *testing.T) { Expected: false, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.User.AddAuth(tc.A) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } func TestUser_RemoveAuth(t *testing.T) { - testCases := []struct { + tests := []struct { Name string User *User A Auth @@ -145,18 +148,19 @@ func TestUser_RemoveAuth(t *testing.T) { Expected: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.User.RemoveAuth(tc.A) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } func TestUser_ContainAuth(t *testing.T) { - testCases := []struct { + tests := []struct { Name string User *User A Auth @@ -189,18 +193,19 @@ func TestUser_ContainAuth(t *testing.T) { Expected: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.User.ContainAuth(tc.A) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } func TestUser_RemoveAuthByProvider(t *testing.T) { - testCases := []struct { + tests := []struct { Name string User *User Provider string @@ -227,12 +232,13 @@ func TestUser_RemoveAuthByProvider(t *testing.T) { Expected: true, }, } - for _, tc := range testCases { + + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.User.RemoveAuthByProvider(tc.Provider) - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) }) } } diff --git a/pkg/value/bool_test.go b/pkg/value/bool_test.go index 4b96481fe..3fa0fa52e 100644 --- a/pkg/value/bool_test.go +++ b/pkg/value/bool_test.go @@ -49,7 +49,10 @@ func Test_propertyBool_I2V(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() + p := &propertyBool{} for i, v := range tt.args { got1, got2 := p.I2V(v) diff --git a/pkg/value/latlng_test.go b/pkg/value/latlng_test.go index b774fe0b5..f749c88bd 100644 --- a/pkg/value/latlng_test.go +++ b/pkg/value/latlng_test.go @@ -29,12 +29,12 @@ func TestLatLng_Clone(t *testing.T) { for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.LL.Clone() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) if tc.Expected != nil { - assert.NotSame(tt, tc.Expected, res) + assert.NotSame(t, tc.Expected, res) } }) } diff --git a/pkg/value/latlngheight_test.go b/pkg/value/latlngheight_test.go index 5578836c8..2670900de 100644 --- a/pkg/value/latlngheight_test.go +++ b/pkg/value/latlngheight_test.go @@ -7,7 +7,7 @@ import ( ) func TestLatLngHeight_Clone(t *testing.T) { - testCases := []struct { + tests := []struct { Name string LL, Expected *LatLngHeight }{ @@ -29,14 +29,14 @@ func TestLatLngHeight_Clone(t *testing.T) { }, } - for _, tc := range testCases { + for _, tc := range tests { tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() res := tc.LL.Clone() - assert.Equal(tt, tc.Expected, res) + assert.Equal(t, tc.Expected, res) if tc.Expected != nil { - assert.NotSame(tt, tc.Expected, res) + assert.NotSame(t, tc.Expected, res) } }) } diff --git a/pkg/value/number_test.go b/pkg/value/number_test.go index 7ca44da58..06c38a87d 100644 --- a/pkg/value/number_test.go +++ b/pkg/value/number_test.go @@ -78,7 +78,10 @@ func Test_propertyNumber_I2V(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() + p := &propertyNumber{} for i, v := range tt.args { got1, got2 := p.I2V(v) @@ -91,7 +94,6 @@ func Test_propertyNumber_I2V(t *testing.T) { } else { assert.Equal(t, tt.want1, got1, "test %d", i) } - assert.Equal(t, tt.want2, got2, "test %d", i) } }) diff --git a/pkg/value/optional_test.go b/pkg/value/optional_test.go index 70eec8552..b6a43e0bd 100644 --- a/pkg/value/optional_test.go +++ b/pkg/value/optional_test.go @@ -11,6 +11,7 @@ func TestNewOptional(t *testing.T) { t Type v *Value } + tests := []struct { name string args args @@ -70,6 +71,7 @@ func TestOptionalFrom(t *testing.T) { type args struct { v *Value } + tests := []struct { name string args args @@ -225,6 +227,7 @@ func TestOptional_SetValue(t *testing.T) { type args struct { v *Value } + tests := []struct { name string value *Optional @@ -319,7 +322,9 @@ func TestOptional_Clone(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() res := tt.target.Clone() assert.Equal(t, tt.target, res) if tt.target != nil { @@ -334,6 +339,7 @@ func TestOptional_Cast(t *testing.T) { t Type p TypePropertyMap } + tests := []struct { name string target *Optional @@ -379,7 +385,9 @@ func TestOptional_Cast(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, tt.target.Cast(tt.args.t, tt.args.p)) }) } diff --git a/pkg/value/string_test.go b/pkg/value/string_test.go index d39b3ccc6..077ce6295 100644 --- a/pkg/value/string_test.go +++ b/pkg/value/string_test.go @@ -45,7 +45,9 @@ func Test_propertyString_I2V(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() p := &propertyString{} for i, v := range tt.args { got1, got2 := p.I2V(v) diff --git a/pkg/value/type_test.go b/pkg/value/type_test.go index 6f0d83ce0..7b32af707 100644 --- a/pkg/value/type_test.go +++ b/pkg/value/type_test.go @@ -40,6 +40,7 @@ func TestType_Default(t *testing.T) { want: false, }, } + for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { diff --git a/pkg/value/value_test.go b/pkg/value/value_test.go index 34c47320b..8365c41d7 100644 --- a/pkg/value/value_test.go +++ b/pkg/value/value_test.go @@ -98,6 +98,7 @@ func TestValue_Clone(t *testing.T) { func TestValue_Value(t *testing.T) { u, _ := url.Parse("https://reearth.io") + tests := []struct { name string value *Value @@ -257,7 +258,9 @@ func TestValue_Interface(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, tt.value.Interface()) }) } @@ -268,6 +271,7 @@ func TestValue_Cast(t *testing.T) { t Type p TypePropertyMap } + tests := []struct { name string target *Value @@ -313,7 +317,9 @@ func TestValue_Cast(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() assert.Equal(t, tt.want, tt.target.Cast(tt.args.t, tt.args.p)) }) } diff --git a/pkg/writer/seeker_closer_test.go b/pkg/writer/seeker_closer_test.go index 503539794..1eeed3c2c 100644 --- a/pkg/writer/seeker_closer_test.go +++ b/pkg/writer/seeker_closer_test.go @@ -14,7 +14,7 @@ var _ io.WriteSeeker = (*WriterSeeker)(nil) //reference: https://github.com/orcaman/writerseeker/blob/master/writerseeker_test.go func TestWrite(t *testing.T) { - testCases := []struct { + tests := []struct { Name string Input []byte WS *WriterSeeker @@ -31,16 +31,17 @@ func TestWrite(t *testing.T) { err: nil, }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - n, err := tc.WS.Write(tc.Input) - if err == nil { - assert.Equal(tt, tc.ExpectedBuffer, tc.WS.Buffer()) - assert.Equal(tt, tc.ExpectedPosition, n) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + n, err := tt.WS.Write(tt.Input) + if tt.err == nil { + assert.Equal(t, tt.ExpectedBuffer, tt.WS.Buffer()) + assert.Equal(t, tt.ExpectedPosition, n) } else { - assert.True(tt, errors.As(err, &tc.err)) + assert.Equal(t, tt.err, err) } }) } @@ -50,7 +51,7 @@ func TestSeek(t *testing.T) { ws := &WriterSeeker{} _, _ = ws.Write([]byte("xxxxxx")) - testCases := []struct { + tests := []struct { Name string WS *WriterSeeker Whence int @@ -90,16 +91,17 @@ func TestSeek(t *testing.T) { err: errors.New("negative result pos"), }, } - for _, tc := range testCases { - tc := tc - t.Run(tc.Name, func(tt *testing.T) { - // this test is sequential - //tt.Parallel() - n, err := tc.WS.Seek(tc.Offset, tc.Whence) - if err == nil { - assert.Equal(tt, tc.ExpectedPosition, n) + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + // This test should be sequential + + n, err := tt.WS.Seek(tt.Offset, tt.Whence) + if tt.err == nil { + assert.Equal(t, tt.ExpectedPosition, n) } else { - assert.True(tt, errors.As(err, &tc.err)) + assert.Equal(t, err, tt.err) } }) } From 559194880947ca16ce06587caa8650e229e67025 Mon Sep 17 00:00:00 2001 From: HideBa <49897538+HideBa@users.noreply.github.com> Date: Fri, 21 Jan 2022 15:23:21 +0900 Subject: [PATCH 134/253] feat: add "clamp to ground" option to file primitive (#95) * feat: update manifest file(file's clamp field) * Update pkg/builtin/manifest.yml Co-authored-by: KaWaite <34051327+KaWaite@users.noreply.github.com> * Update pkg/builtin/manifest_ja.yml Co-authored-by: rot1024 Co-authored-by: KaWaite <34051327+KaWaite@users.noreply.github.com> Co-authored-by: rot1024 --- pkg/builtin/manifest.yml | 5 +++++ pkg/builtin/manifest_ja.yml | 3 +++ pkg/plugin/manifest/schema_gen.go | 2 +- 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 6a7c768dc..a4f2df25e 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -1144,6 +1144,11 @@ extensions: label: GeoJSON / TopoJSON - key: czml label: CZML + - id: clampToGround + type: bool + title: Clamp to ground + description: Option to attach the primitive to the ground (keeps primitive on top of the map even with terrain enabled). + defaultValue: false - id: textblock type: block name: Text diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index a6baa0e18..ca2add1eb 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -510,6 +510,9 @@ extensions: kml: KML geojson: GeoJSON / TopoJSON czml: CZML + clampToGround: + title: ๅœฐ่กจใซๅ›บๅฎš + description: ๅœฐ่กจใฎๅ‡นๅ‡ธใซๆฒฟใ†ใ‚ˆใ†ใซใƒใƒชใ‚ดใƒณใชใฉใฎใƒ‡ใƒผใ‚ฟใ‚’่กจ็คบใ—ใพใ™ใ€‚ textblock: name: ใƒ†ใ‚ญใ‚นใƒˆ description: Text block diff --git a/pkg/plugin/manifest/schema_gen.go b/pkg/plugin/manifest/schema_gen.go index 0bb414745..ca2434233 100644 --- a/pkg/plugin/manifest/schema_gen.go +++ b/pkg/plugin/manifest/schema_gen.go @@ -1,6 +1,6 @@ package manifest -// generated by "/var/folders/lz/nhqy382n28g31wb4f_40gbmc0000gp/T/go-build612118365/b001/exe/schematyper -o schema_gen.go --package manifest ../../../schemas/plugin_manifest.json" -- DO NOT EDIT +// generated by "/var/folders/lz/nhqy382n28g31wb4f_40gbmc0000gp/T/go-build773179862/b001/exe/schematyper -o schema_gen.go --package manifest ../../../schemas/plugin_manifest.json" -- DO NOT EDIT type Choice struct { Icon string `json:"icon,omitempty"` From ddd0dbfec13167bba9c412c4d21d371aeb6a570f Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Fri, 21 Jan 2022 10:03:41 +0300 Subject: [PATCH 135/253] feat: Infobox and text block padding (#100) * Add infobox padding * infobox and text block padding * fix titles and descriptions * update textBlock padding IDs * add Japanese translation Co-authored-by: basel.issmail --- pkg/builtin/manifest.yml | 56 +++++++++++++++++++++++++++++++++++++ pkg/builtin/manifest_ja.yml | 24 ++++++++++++++++ 2 files changed, 80 insertions(+) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index a4f2df25e..3c4768d4a 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -298,6 +298,34 @@ extensions: - id: title type: string title: Title + - id: infoboxPaddingTop + type: number + title: Top padding + min: 0 + max: 40 + suffix: px + description: "The space between the top of the infobox and the title. Min: 0 Max: 40" + - id: infoboxPaddingBottom + type: number + title: Bottom padding + min: 0 + max: 40 + suffix: px + description: "The space between the bottom of the infobox and the last block. Min: 0 Max: 40" + - id: infoboxPaddingLeft + type: number + title: Left padding + min: 0 + max: 40 + suffix: px + description: "The space between the left side of the infobox and the title and blocks. Min: 0 Max: 40" + - id: infoboxPaddingRight + type: number + title: Right padding + min: 0 + max: 40 + suffix: px + description: "The space between the right side of the infobox and the title and blocks. Min: 0 Max: 40" - id: size type: string title: Size Type @@ -1168,6 +1196,34 @@ extensions: - id: markdown type: bool title: Use markdown + - id: paddingTop + type: number + title: Top padding + min: 0 + max: 40 + suffix: px + description: "The space between the top edge of the text block and the uppermost text. Min: 0 Max: 40" + - id: paddingBottom + type: number + title: Bottom padding + min: 0 + max: 40 + suffix: px + description: "The space between the bottom edge of the text block and the last text. Min: 0 Max: 40" + - id: paddingLeft + type: number + title: Left padding + min: 0 + max: 40 + suffix: px + description: "The space between the left edge of the text block and the text. Min: 0 Max: 40" + - id: paddingRight + type: number + title: Right padding + min: 0 + max: 40 + suffix: px + description: "The space between the right edge of the text block and the text. Min: 0 Max: 40" - id: typography type: typography title: Font diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index ca2add1eb..547565e7f 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -153,6 +153,18 @@ extensions: propertySchema: default: title: ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น + paddingTop: + title: ไฝ™็™ฝไธŠ + description: "ไธŠ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" + paddingBottom: + title: ไฝ™็™ฝไธ‹ + description: "ไธ‹็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" + paddingLeft: + title: ไฝ™็™ฝๅทฆ + description: "ๅทฆ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" + paddingRight: + title: ไฝ™็™ฝๅณ + description: "ๅณ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" fields: title: title: ใ‚ฟใ‚คใƒˆใƒซ @@ -526,6 +538,18 @@ extensions: title: ใ‚ณใƒณใƒ†ใƒณใƒ„ markdown: title: ใƒžใƒผใ‚ฏใƒ€ใ‚ฆใƒณ + paddingTop: + title: ไฝ™็™ฝไธŠ + description: "ไธŠ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" + paddingBottom: + title: ไฝ™็™ฝไธ‹ + description: "ไธ‹็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" + paddingLeft: + title: ไฝ™็™ฝๅทฆ + description: "ๅทฆ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" + paddingRight: + title: ไฝ™็™ฝๅณ + description: "ๅณ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" typography: title: ใƒ•ใ‚ฉใƒณใƒˆ imageblock: From 17a463d81cf59f3fcfaed4e30b9ea1e5b751bd56 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 25 Jan 2022 11:34:27 +0900 Subject: [PATCH 136/253] refactor: pkg/property, pkg/layer, pkg/plugin (#101) * remove schema field from property.SchemaGroup and Item * rename methods * delete property.FieldUnsafeBuilder * add methods to value * refactor test * refactor usage of references and Clone methods * add cast method to property.Field * add RemoveLayer method to layer/IDList * add IDs and Properties methods to layer/List * rename property/GroupList.GetGroup * fix test * add IDs method to property/List * add nil check to property/SchemaField and plugin/Plugin * add SchemaLoaderFromMap to property * add PropertySchema method to manifest/Manifest --- .../adapter/gql/gqlmodel/convert_property.go | 10 +- internal/infrastructure/fs/file.go | 1 + internal/infrastructure/gcs/file.go | 2 + internal/infrastructure/memory/plugin.go | 9 +- .../201217193948_add_scene_default_tile.go | 2 +- .../infrastructure/mongo/mongodoc/dataset.go | 2 +- .../infrastructure/mongo/mongodoc/property.go | 18 +- .../mongo/mongodoc/property_schema.go | 21 +- internal/usecase/interactor/scene.go | 2 +- pkg/dataset/schema_builder.go | 6 +- pkg/dataset/schema_field.go | 13 + pkg/dataset/schema_field_builder.go | 7 +- pkg/dataset/value.go | 33 +- pkg/dataset/value_test.go | 289 ++++++++++++ pkg/layer/decoding/common.go | 3 +- pkg/layer/group.go | 5 +- pkg/layer/id_list.go | 14 +- pkg/layer/id_list_test.go | 7 + pkg/layer/item.go | 5 +- pkg/layer/list.go | 22 + pkg/layer/list_test.go | 67 +++ pkg/layer/merged_test.go | 424 ++++++++++-------- pkg/plugin/builder.go | 7 +- pkg/plugin/extension.go | 96 +--- pkg/plugin/manifest/convert.go | 5 +- pkg/plugin/manifest/convert_test.go | 201 ++++++--- pkg/plugin/manifest/manifest.go | 21 +- pkg/plugin/manifest/manifest_test.go | 104 +++++ pkg/plugin/manifest/parser_test.go | 1 - pkg/plugin/plugin.go | 61 +++ pkg/plugin/plugin_test.go | 48 ++ pkg/plugin/widget.go | 103 +++++ pkg/project/builder.go | 1 + pkg/project/project.go | 2 + pkg/property/builder.go | 16 +- pkg/property/builder_test.go | 166 ++++--- pkg/property/field.go | 29 +- pkg/property/field_builder.go | 79 ++-- pkg/property/field_builder_test.go | 132 +----- pkg/property/field_test.go | 106 ++++- pkg/property/group.go | 33 +- pkg/property/group_builder.go | 5 +- pkg/property/group_builder_test.go | 19 +- pkg/property/group_list.go | 33 +- pkg/property/group_list_builder.go | 5 +- pkg/property/group_list_builder_test.go | 27 +- pkg/property/group_list_test.go | 143 +++--- pkg/property/group_test.go | 71 ++- pkg/property/initializer.go | 25 +- pkg/property/initializer_test.go | 51 ++- pkg/property/item.go | 5 +- pkg/property/item_builder.go | 3 +- pkg/property/item_test.go | 23 +- pkg/property/link.go | 28 +- pkg/property/link_test.go | 4 +- pkg/property/list.go | 25 +- pkg/property/list_test.go | 112 ++++- pkg/property/loader.go | 14 + pkg/property/loader_test.go | 23 +- pkg/property/merged_test.go | 50 ++- pkg/property/pointer.go | 10 +- pkg/property/property.go | 10 +- pkg/property/property_test.go | 80 ++-- pkg/property/schema_builder_test.go | 8 +- pkg/property/schema_field.go | 34 +- pkg/property/schema_field_builder.go | 3 +- pkg/property/schema_group.go | 15 - pkg/property/schema_group_builder.go | 7 +- pkg/property/schema_group_builder_test.go | 9 - pkg/property/schema_group_test.go | 18 +- pkg/property/schema_list.go | 17 + pkg/property/schema_test.go | 6 +- pkg/property/value.go | 15 + pkg/property/value_test.go | 63 +++ pkg/scene/builder/builder_test.go | 153 +++---- pkg/scene/plugin.go | 31 +- pkg/scene/plugin_test.go | 14 +- pkg/scene/plugins.go | 16 +- pkg/scene/sceneops/plugin_migrator.go | 2 +- pkg/scene/widget.go | 14 + pkg/scene/widget_test.go | 8 + pkg/scene/widgets.go | 6 +- pkg/user/members.go | 7 + pkg/user/members_test.go | 33 ++ pkg/user/team.go | 12 +- pkg/user/team_builder.go | 4 +- pkg/user/team_builder_test.go | 8 +- pkg/user/team_test.go | 10 +- pkg/value/type.go | 4 + pkg/value/type_test.go | 32 ++ pkg/value/value.go | 4 + pkg/value/value_test.go | 62 +++ 92 files changed, 2297 insertions(+), 1257 deletions(-) create mode 100644 pkg/dataset/value_test.go create mode 100644 pkg/plugin/manifest/manifest_test.go create mode 100644 pkg/plugin/widget.go diff --git a/internal/adapter/gql/gqlmodel/convert_property.go b/internal/adapter/gql/gqlmodel/convert_property.go index ce36a8ad6..0803fd4ec 100644 --- a/internal/adapter/gql/gqlmodel/convert_property.go +++ b/internal/adapter/gql/gqlmodel/convert_property.go @@ -211,7 +211,7 @@ func ToPropertySchema(propertySchema *property.Schema) *PropertySchema { pgroups := propertySchema.Groups() groups := make([]*PropertySchemaGroup, 0, len(pgroups)) for _, g := range pgroups { - groups = append(groups, ToPropertySchemaGroup(g)) + groups = append(groups, ToPropertySchemaGroup(g, propertySchema.ID())) } return &PropertySchema{ @@ -364,7 +364,7 @@ func ToMergedPropertyField(f *property.MergedField, s id.PropertySchemaID) *Merg } } -func ToPropertySchemaGroup(g *property.SchemaGroup) *PropertySchemaGroup { +func ToPropertySchemaGroup(g *property.SchemaGroup, s property.SchemaID) *PropertySchemaGroup { if g == nil { return nil } @@ -381,7 +381,7 @@ func ToPropertySchemaGroup(g *property.SchemaGroup) *PropertySchemaGroup { } return &PropertySchemaGroup{ SchemaGroupID: g.ID(), - SchemaID: g.Schema(), + SchemaID: s, IsList: g.IsList(), Title: g.Title().StringRef(), Fields: fields, @@ -405,7 +405,7 @@ func ToPropertyGroup(g *property.Group, p *property.Property, gl *property.Group return &PropertyGroup{ ID: g.ID().ID(), - SchemaID: g.Schema(), + SchemaID: p.Schema(), SchemaGroupID: g.SchemaGroup(), Fields: fields, } @@ -424,7 +424,7 @@ func ToPropertyGroupList(g *property.GroupList, p *property.Property) *PropertyG return &PropertyGroupList{ ID: g.ID().ID(), - SchemaID: g.Schema(), + SchemaID: p.Schema(), SchemaGroupID: g.SchemaGroup(), Groups: groups, } diff --git a/internal/infrastructure/fs/file.go b/internal/infrastructure/fs/file.go index a0d4d643b..975fdf4e5 100644 --- a/internal/infrastructure/fs/file.go +++ b/internal/infrastructure/fs/file.go @@ -180,6 +180,7 @@ func getAssetFileURL(base *url.URL, filename string) *url.URL { return nil } + // https://github.com/golang/go/issues/38351 b := *base b.Path = path.Join(b.Path, filename) return &b diff --git a/internal/infrastructure/gcs/file.go b/internal/infrastructure/gcs/file.go index 239d65118..ddd528624 100644 --- a/internal/infrastructure/gcs/file.go +++ b/internal/infrastructure/gcs/file.go @@ -307,6 +307,8 @@ func getGCSObjectURL(base *url.URL, objectName string) *url.URL { if base == nil { return nil } + + // https://github.com/golang/go/issues/38351 b := *base b.Path = path.Join(b.Path, objectName) return &b diff --git a/internal/infrastructure/memory/plugin.go b/internal/infrastructure/memory/plugin.go index d17d7e4a0..e567667c1 100644 --- a/internal/infrastructure/memory/plugin.go +++ b/internal/infrastructure/memory/plugin.go @@ -32,8 +32,7 @@ func (r *Plugin) FindByID(ctx context.Context, id id.PluginID, sids []id.SceneID } for _, p := range r.data { if p.ID().Equal(id) && (p.ID().Scene() == nil || p.ID().Scene().Contains(sids)) { - p2 := *p - return &p2, nil + return p.Clone(), nil } } return nil, rerror.ErrNotFound @@ -51,8 +50,7 @@ func (r *Plugin) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id.Sce } for _, p := range r.data { if p.ID().Equal(id) && (p.ID().Scene() == nil || p.ID().Scene().Contains(sids)) { - p2 := *p - result = append(result, &p2) + result = append(result, p.Clone()) } else { result = append(result, nil) } @@ -73,8 +71,7 @@ func (r *Plugin) Save(ctx context.Context, p *plugin.Plugin) error { return nil } } - p2 := *p - r.data = append(r.data, &p2) + r.data = append(r.data, p.Clone()) return nil } diff --git a/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go b/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go index bf1f160f1..d1d5b11f1 100644 --- a/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go +++ b/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go @@ -56,7 +56,7 @@ func AddSceneDefaultTile(ctx context.Context, c DBClient) error { if g == nil || g.Count() > 0 { continue } - f := property.NewGroup().NewID().Schema(p.Schema(), id.PropertySchemaGroupID("tiles")).MustBuild() + f := property.NewGroup().NewID().SchemaGroup(id.PropertySchemaGroupID("tiles")).MustBuild() g.Add(f, -1) } diff --git a/internal/infrastructure/mongo/mongodoc/dataset.go b/internal/infrastructure/mongo/mongodoc/dataset.go index 5f36282b5..45ab1cd61 100644 --- a/internal/infrastructure/mongo/mongodoc/dataset.go +++ b/internal/infrastructure/mongo/mongodoc/dataset.go @@ -183,5 +183,5 @@ func toModelDatasetValue(v interface{}, t string) *dataset.Value { if v2, ok := v.(bson.D); ok { v = v2.Map() } - return dataset.ValueTypeFrom(t).ValueFrom(v) + return dataset.ValueType(t).ValueFrom(v) } diff --git a/internal/infrastructure/mongo/mongodoc/property.go b/internal/infrastructure/mongo/mongodoc/property.go index 3e962bac2..1767aeba8 100644 --- a/internal/infrastructure/mongo/mongodoc/property.go +++ b/internal/infrastructure/mongo/mongodoc/property.go @@ -35,7 +35,6 @@ type PropertyLinkDocument struct { type PropertyItemDocument struct { Type string ID string - Schema string SchemaGroup string Groups []*PropertyItemDocument Fields []*PropertyFieldDocument @@ -149,7 +148,6 @@ func newPropertyItem(f property.Item) *PropertyItemDocument { return &PropertyItemDocument{ Type: t, ID: f.ID().String(), - Schema: f.Schema().String(), SchemaGroup: string(f.SchemaGroup()), Groups: items, Fields: fields, @@ -219,10 +217,9 @@ func toModelPropertyField(f *PropertyFieldDocument) *property.Field { } vt := property.ValueType(f.Type) - field := property.NewFieldUnsafe(). - FieldUnsafe(id.PropertySchemaFieldID(f.Field)). - ValueUnsafe(property.NewOptionalValue(vt, toModelPropertyValue(f.Value, f.Type))). - LinksUnsafe(flinks). + field := property.NewField(property.FieldID(f.Field)). + Value(property.NewOptionalValue(vt, toModelPropertyValue(f.Value, f.Type))). + Links(flinks). Build() return field @@ -236,16 +233,11 @@ func toModelPropertyItem(f *PropertyItemDocument) (property.Item, error) { var i property.Item var err error var iid id.PropertyItemID - var sid id.PropertySchemaID iid, err = id.PropertyItemIDFrom(f.ID) if err != nil { return nil, err } - sid, err = id.PropertySchemaIDFrom(f.Schema) - if err != nil { - return nil, err - } gid := id.PropertySchemaGroupID(f.SchemaGroup) if f.Type == typePropertyItemGroup { @@ -256,7 +248,7 @@ func toModelPropertyItem(f *PropertyItemDocument) (property.Item, error) { i, err = property.NewGroup(). ID(iid). - Schema(sid, gid). + SchemaGroup(gid). Fields(fields). Build() } else if f.Type == typePropertyItemGroupList { @@ -273,7 +265,7 @@ func toModelPropertyItem(f *PropertyItemDocument) (property.Item, error) { i, err = property.NewGroupList(). ID(iid). - Schema(sid, gid). + SchemaGroup(gid). Groups(items). Build() } diff --git a/internal/infrastructure/mongo/mongodoc/property_schema.go b/internal/infrastructure/mongo/mongodoc/property_schema.go index 3003d380b..97fb36396 100644 --- a/internal/infrastructure/mongo/mongodoc/property_schema.go +++ b/internal/infrastructure/mongo/mongodoc/property_schema.go @@ -14,6 +14,14 @@ type PropertySchemaDocument struct { LinkableFields *PropertyLinkableFieldsDocument } +type PropertySchemaGroupDocument struct { + ID string + Fields []*PropertySchemaFieldDocument + List bool + IsAvailableIf *PropertyConditonDocument + Title map[string]string +} + type PropertySchemaFieldDocument struct { ID string Type string @@ -50,14 +58,6 @@ type PropertyConditonDocument struct { Value interface{} } -type PropertySchemaGroupDocument struct { - ID string - Fields []*PropertySchemaFieldDocument - List bool - IsAvailableIf *PropertyConditonDocument - Title map[string]string -} - type PropertySchemaConsumer struct { Rows []*property.Schema } @@ -190,7 +190,7 @@ func (doc *PropertySchemaDocument) Model() (*property.Schema, error) { groups := make([]*property.SchemaGroup, 0, len(doc.Groups)) for _, g := range doc.Groups { - g2, err := toModelPropertySchemaGroup(g, pid) + g2, err := g.Model() if err != nil { return nil, err } @@ -248,7 +248,7 @@ func newPropertySchemaGroup(p *property.SchemaGroup) *PropertySchemaGroupDocumen } } -func toModelPropertySchemaGroup(d *PropertySchemaGroupDocument, sid id.PropertySchemaID) (*property.SchemaGroup, error) { +func (d *PropertySchemaGroupDocument) Model() (*property.SchemaGroup, error) { if d == nil { return nil, nil } @@ -264,7 +264,6 @@ func toModelPropertySchemaGroup(d *PropertySchemaGroupDocument, sid id.PropertyS return property.NewSchemaGroup(). ID(id.PropertySchemaGroupID(d.ID)). - Schema(sid). IsList(d.List). Title(d.Title). IsAvailableIf(toModelPropertyCondition(d.IsAvailableIf)). diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index dfb23750a..749463a84 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -111,7 +111,7 @@ func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase. // add default tile tiles := id.PropertySchemaGroupID("tiles") g := p.GetOrCreateGroupList(schema, property.PointItemBySchema(tiles)) - g.Add(property.NewGroup().NewID().Schema(schema.ID(), tiles).MustBuild(), -1) + g.Add(property.NewGroup().NewID().SchemaGroup(tiles).MustBuild(), -1) scene, err := scene.New(). ID(sceneID). diff --git a/pkg/dataset/schema_builder.go b/pkg/dataset/schema_builder.go index 2b4d138cb..b1da43667 100644 --- a/pkg/dataset/schema_builder.go +++ b/pkg/dataset/schema_builder.go @@ -74,12 +74,10 @@ func (b *SchemaBuilder) Fields(fields []*SchemaField) *SchemaBuilder { } if source := f.Source(); source == "" { - copied := *f - b.d.fields[f.ID()] = &copied + b.d.fields[f.ID()] = f.Clone() b.d.order = append(b.d.order, f.ID()) } else if _, ok := sources[source]; !ok { - copied := *f - b.d.fields[f.ID()] = &copied + b.d.fields[f.ID()] = f.Clone() b.d.order = append(b.d.order, f.ID()) sources[source] = struct{}{} } diff --git a/pkg/dataset/schema_field.go b/pkg/dataset/schema_field.go index 6c0856af4..cc7478075 100644 --- a/pkg/dataset/schema_field.go +++ b/pkg/dataset/schema_field.go @@ -49,3 +49,16 @@ func (d *SchemaField) Source() (s string) { } return d.source } + +func (d *SchemaField) Clone() *SchemaField { + if d == nil { + return nil + } + return &SchemaField{ + id: d.id, + name: d.name, + dataType: d.dataType, + source: d.source, + ref: d.ref.CopyRef(), + } +} diff --git a/pkg/dataset/schema_field_builder.go b/pkg/dataset/schema_field_builder.go index b51abd766..d971401b4 100644 --- a/pkg/dataset/schema_field_builder.go +++ b/pkg/dataset/schema_field_builder.go @@ -56,11 +56,6 @@ func (b *SchemaFieldBuilder) Source(source string) *SchemaFieldBuilder { } func (b *SchemaFieldBuilder) Ref(ref *SchemaID) *SchemaFieldBuilder { - if ref == nil { - b.d.ref = nil - } else { - ref2 := *ref - b.d.ref = &ref2 - } + b.d.ref = ref.CopyRef() return b } diff --git a/pkg/dataset/value.go b/pkg/dataset/value.go index eaee64ed8..389ad34a6 100644 --- a/pkg/dataset/value.go +++ b/pkg/dataset/value.go @@ -2,7 +2,6 @@ package dataset import ( "net/url" - "strconv" "github.com/reearth/reearth-backend/pkg/value" ) @@ -13,6 +12,8 @@ type Coordinates = value.Coordinates type Rect = value.Rect type Polygon = value.Polygon +type ValueType value.Type + var ( ValueTypeUnknown = ValueType(value.TypeUnknown) ValueTypeBool = ValueType(value.TypeBool) @@ -27,10 +28,8 @@ var ( TypePolygon = ValueType(value.TypePolygon) ) -type ValueType value.Type - -func ValueTypeFrom(t string) ValueType { - return ValueType(value.Type(t)) +func (vt ValueType) Valid() bool { + return value.Type(vt).Default() } func (t ValueType) Default() bool { @@ -52,10 +51,18 @@ func (vt ValueType) MustBeValue(i interface{}) *Value { panic("invalid value") } +func (vt ValueType) None() *OptionalValue { + return NewOptionalValue(vt, nil) +} + type Value struct { v value.Value } +func (v *Value) IsEmpty() bool { + return v == nil || v.v.IsEmpty() +} + func (v *Value) Clone() *Value { if v == nil { return nil @@ -67,6 +74,10 @@ func (v *Value) Clone() *Value { return &Value{v: *vv} } +func (v *Value) Some() *OptionalValue { + return OptionalValueFrom(v) +} + func (v *Value) Type() ValueType { if v == nil { return ValueTypeUnknown @@ -210,16 +221,12 @@ func (v *Value) ValuePolygon() *Polygon { } func ValueFromStringOrNumber(s string) *Value { - if vint, err := strconv.Atoi(s); err == nil { - return ValueTypeNumber.ValueFrom(vint) + if s == "true" || s == "false" || s == "TRUE" || s == "FALSE" || s == "True" || s == "False" { + return ValueTypeBool.ValueFrom(s) } - if vfloat64, err := strconv.ParseFloat(s, 64); err == nil { - return ValueTypeNumber.ValueFrom(vfloat64) - } - - if vbool, err := strconv.ParseBool(s); err == nil { - return ValueTypeBool.ValueFrom(vbool) + if v := ValueTypeNumber.ValueFrom(s); v != nil { + return v } return ValueTypeString.ValueFrom(s) diff --git a/pkg/dataset/value_test.go b/pkg/dataset/value_test.go new file mode 100644 index 000000000..1ec0980f8 --- /dev/null +++ b/pkg/dataset/value_test.go @@ -0,0 +1,289 @@ +package dataset + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/value" + "github.com/stretchr/testify/assert" +) + +func TestValueType_None(t *testing.T) { + tests := []struct { + name string + tr ValueType + want *OptionalValue + }{ + { + name: "default", + tr: ValueTypeString, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + }, + { + name: "unknown", + tr: ValueTypeUnknown, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.tr.None()) + }) + } +} + +func TestValue_IsEmpty(t *testing.T) { + tests := []struct { + name string + value *Value + want bool + }{ + { + name: "empty", + want: true, + }, + { + name: "nil", + want: true, + }, + { + name: "non-empty", + value: ValueTypeString.ValueFrom("foo"), + want: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.IsEmpty()) + }) + } +} + +func TestValue_Clone(t *testing.T) { + tests := []struct { + name string + value *Value + want *Value + }{ + { + name: "ok", + value: ValueTypeString.ValueFrom("foo"), + want: &Value{ + v: *value.TypeString.ValueFrom("foo", nil), + }, + }, + { + name: "nil", + value: nil, + want: nil, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Clone()) + }) + } +} + +func TestValue_Some(t *testing.T) { + tests := []struct { + name string + value *Value + want *OptionalValue + }{ + { + name: "ok", + value: &Value{ + v: *value.TypeString.ValueFrom("foo", nil), + }, + want: &OptionalValue{ + ov: *value.OptionalFrom(value.TypeString.ValueFrom("foo", nil)), + }, + }, + { + name: "nil", + value: nil, + want: nil, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Some()) + }) + } +} + +func TestValue_Value(t *testing.T) { + tests := []struct { + name string + value *Value + want interface{} + }{ + { + name: "ok", + value: ValueTypeString.ValueFrom("foo"), + want: "foo", + }, + { + name: "empty", + value: &Value{}, + }, + { + name: "nil", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if tt.want == nil { + assert.Nil(t, tt.value.Value()) + } else { + assert.Equal(t, tt.want, tt.value.Value()) + } + }) + } +} + +func TestValue_Type(t *testing.T) { + tests := []struct { + name string + value *Value + want ValueType + }{ + { + name: "ok", + value: ValueTypeString.ValueFrom("foo"), + want: ValueTypeString, + }, + { + name: "empty", + value: &Value{}, + want: ValueTypeUnknown, + }, + { + name: "nil", + want: ValueTypeUnknown, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Type()) + }) + } +} + +func TestValue_Interface(t *testing.T) { + tests := []struct { + name string + value *Value + want interface{} + }{ + { + name: "string", + value: ValueTypeString.ValueFrom("foo"), + want: "foo", + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + { + name: "nil", + value: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Interface()) + }) + } +} + +func TestValue_Cast(t *testing.T) { + type args struct { + t ValueType + } + + tests := []struct { + name string + target *Value + args args + want *Value + }{ + { + name: "diff type", + target: ValueTypeNumber.ValueFrom(1.1), + args: args{t: ValueTypeString}, + want: ValueTypeString.ValueFrom("1.1"), + }, + { + name: "same type", + target: ValueTypeNumber.ValueFrom(1.1), + args: args{t: ValueTypeNumber}, + want: ValueTypeNumber.ValueFrom(1.1), + }, + { + name: "failed to cast", + target: ValueTypeLatLng.ValueFrom(LatLng{Lat: 1, Lng: 2}), + args: args{t: ValueTypeString}, + want: nil, + }, + { + name: "invalid type", + target: ValueTypeNumber.ValueFrom(1.1), + args: args{t: ValueTypeUnknown}, + want: nil, + }, + { + name: "empty", + target: &Value{}, + args: args{t: ValueTypeString}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{t: ValueTypeString}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Cast(tt.args.t)) + }) + } +} diff --git a/pkg/layer/decoding/common.go b/pkg/layer/decoding/common.go index 10d275790..fbf8481d9 100644 --- a/pkg/layer/decoding/common.go +++ b/pkg/layer/decoding/common.go @@ -87,8 +87,7 @@ func createProperty(t string, v interface{}, sceneID layer.SceneID, styleItem in item := propertyItems field := propertyFields[t] ps := builtin.GetPropertySchema(propertySchema) - p, err := property. - New(). + p, err := property.New(). NewID(). Scene(sceneID). Schema(propertySchema). diff --git a/pkg/layer/group.go b/pkg/layer/group.go index d6827e596..61a0d8877 100644 --- a/pkg/layer/group.go +++ b/pkg/layer/group.go @@ -101,11 +101,10 @@ func (l *Group) IsLinked() bool { } func (l *Group) LinkedDatasetSchema() *DatasetSchemaID { - if l == nil || l.linkedDatasetSchema == nil { + if l == nil { return nil } - id := *l.linkedDatasetSchema - return &id + return l.linkedDatasetSchema.CopyRef() } func (l *Group) Link(ds DatasetSchemaID) { diff --git a/pkg/layer/id_list.go b/pkg/layer/id_list.go index f2e9a3855..d359b54ef 100644 --- a/pkg/layer/id_list.go +++ b/pkg/layer/id_list.go @@ -163,15 +163,19 @@ func (l *IDList) MoveLayerAt(fromIndex int, toIndex int) { l.layers = append(newSlice, l.layers[toIndex:]...) } -func (l *IDList) RemoveLayer(id ID) { +func (l *IDList) RemoveLayer(ids ...ID) { if l == nil { return } - for index, layer := range l.layers { - if layer == id { - l.RemoveLayerAt(index) - return + for i := 0; i < len(l.layers); i++ { + layer := l.layers[i] + for _, id := range ids { + if layer == id { + l.RemoveLayerAt(i) + i-- + break + } } } } diff --git a/pkg/layer/id_list_test.go b/pkg/layer/id_list_test.go index c9345551b..b2981981b 100644 --- a/pkg/layer/id_list_test.go +++ b/pkg/layer/id_list_test.go @@ -128,4 +128,11 @@ func TestLayerIDList(t *testing.T) { assert.Equal(t, l3, layers.LayerAt(2)) assert.Equal(t, l4, layers.LayerAt(3)) assert.True(t, layers.HasLayer(l2)) + + // 1, 3 + + layers.RemoveLayer(l2, l4) + assert.Equal(t, 2, layers.LayerCount()) + assert.Equal(t, l1, layers.LayerAt(0)) + assert.Equal(t, l3, layers.LayerAt(1)) } diff --git a/pkg/layer/item.go b/pkg/layer/item.go index e134863a5..971d7ddf5 100644 --- a/pkg/layer/item.go +++ b/pkg/layer/item.go @@ -105,11 +105,10 @@ func (l *Item) IsLinked() bool { } func (l *Item) LinkedDataset() *DatasetID { - if l == nil || l.linkedDataset == nil { + if l == nil { return nil } - id := *l.linkedDataset - return &id + return l.linkedDataset.CopyRef() } func (l *Item) Link(ds DatasetID) { diff --git a/pkg/layer/list.go b/pkg/layer/list.go index 1ce98849a..c43ce677d 100644 --- a/pkg/layer/list.go +++ b/pkg/layer/list.go @@ -9,6 +9,28 @@ func (ll List) Last() *Layer { return ll[len(ll)-1] } +func (ll List) IDs() *IDList { + if len(ll) == 0 { + return nil + } + ids := make([]ID, 0, len(ll)) + for _, l := range ll.Deref() { + ids = append(ids, l.ID()) + } + return NewIDList(ids) +} + +func (ll List) Properties() []PropertyID { + if len(ll) == 0 { + return nil + } + ids := make([]PropertyID, 0, len(ll)) + for _, l := range ll.Deref() { + ids = append(ids, l.Properties()...) + } + return ids +} + func (ll List) Pick(il *IDList) List { if il == nil { return nil diff --git a/pkg/layer/list_test.go b/pkg/layer/list_test.go index 067f2455f..01dc68772 100644 --- a/pkg/layer/list_test.go +++ b/pkg/layer/list_test.go @@ -6,6 +6,73 @@ import ( "github.com/stretchr/testify/assert" ) +func TestList_IDs(t *testing.T) { + sid := NewSceneID() + l1 := NewID() + l2 := NewID() + + tests := []struct { + name string + target List + want *IDList + }{ + { + name: "ok", + target: List{ + New().ID(l1).Scene(sid).Item().MustBuild().LayerRef(), + New().ID(l2).Scene(sid).Group().MustBuild().LayerRef(), + }, + want: NewIDList([]ID{l1, l2}), + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.IDs()) + }) + } +} + +func TestList_Properties(t *testing.T) { + sid := NewSceneID() + p1 := NewPropertyID() + p2 := NewPropertyID() + p3 := NewPropertyID() + + tests := []struct { + name string + target List + want []PropertyID + }{ + { + name: "ok", + target: List{ + New().NewID().Scene(sid).Property(&p1).Item().MustBuild().LayerRef(), + New().NewID().Scene(sid).Infobox(NewInfobox([]*InfoboxField{ + {property: p3}, + }, p2)).Group().MustBuild().LayerRef(), + }, + want: []PropertyID{p1, p2, p3}, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Properties()) + }) + } +} + func TestList_Remove(t *testing.T) { sid := NewSceneID() l1 := NewItem().NewID().Scene(sid).MustBuild() diff --git a/pkg/layer/merged_test.go b/pkg/layer/merged_test.go index eadd791c0..cdd2d89ac 100644 --- a/pkg/layer/merged_test.go +++ b/pkg/layer/merged_test.go @@ -80,239 +80,277 @@ func TestMerge(t *testing.T) { Infobox(NewInfobox([]*InfoboxField{f2, f3}, ib2pr)). MustBuild() - expected1 := &Merged{ - Original: itemLayer1.ID(), - Parent: nil, - Scene: scene, - PluginID: &p, - ExtensionID: &e, - Property: &property.MergedMetadata{ - Original: &itemProperty, - Parent: nil, - LinkedDataset: nil, + tests := []struct { + name string + o Layer + p *Group + want *Merged + }{ + { + name: "nil", + o: nil, + p: nil, + want: nil, }, - } - - expected2 := &Merged{ - Original: itemLayer3.ID(), - Parent: nil, - Scene: scene, - PluginID: &p, - ExtensionID: &e, - Property: &property.MergedMetadata{ - Original: &itemProperty, - Parent: nil, - LinkedDataset: &dataset1, + { + name: "parent only", + o: nil, + p: groupLayer1, + want: nil, }, - Infobox: &MergedInfobox{ - Property: &property.MergedMetadata{ - Original: &ib1pr, - Parent: nil, - LinkedDataset: &dataset1, + { + name: "only original without infobox and link", + o: itemLayer1, + p: nil, + want: &Merged{ + Original: itemLayer1.ID(), + Parent: nil, + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: nil, + LinkedDataset: nil, + }, }, - Fields: []*MergedInfoboxField{ - { - ID: f1.ID(), - Plugin: p, - Extension: e, - Property: &property.MergedMetadata{ - Original: &f1pr, - Parent: nil, - LinkedDataset: &dataset1, - }, + }, + { + name: "only original with infobox", + o: itemLayer3, + p: nil, + want: &Merged{ + Original: itemLayer3.ID(), + Parent: nil, + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: nil, + LinkedDataset: &dataset1, }, - { - ID: f3.ID(), - Plugin: p, - Extension: e, + Infobox: &MergedInfobox{ Property: &property.MergedMetadata{ - Original: &f3pr, + Original: &ib1pr, Parent: nil, LinkedDataset: &dataset1, }, + Fields: []*MergedInfoboxField{ + { + ID: f1.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, }, }, }, - } - - expected3 := &Merged{ - Original: itemLayer2.ID(), - Parent: groupLayer1.IDRef(), - Scene: scene, - PluginID: &p, - ExtensionID: &e, - Property: &property.MergedMetadata{ - Original: &itemProperty, - Parent: &groupProperty, - LinkedDataset: &dataset1, - }, - } - - expected4 := &Merged{ - Original: itemLayer3.ID(), - Parent: groupLayer1.IDRef(), - Scene: scene, - PluginID: &p, - ExtensionID: &e, - Property: &property.MergedMetadata{ - Original: &itemProperty, - Parent: &groupProperty, - LinkedDataset: &dataset1, - }, - Infobox: &MergedInfobox{ - Property: &property.MergedMetadata{ - Original: &ib1pr, - Parent: nil, - LinkedDataset: &dataset1, + { + name: "original without infobox, parent without infobox", + o: itemLayer2, + p: groupLayer1, + want: &Merged{ + Original: itemLayer2.ID(), + Parent: groupLayer1.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, }, - Fields: []*MergedInfoboxField{ - { - ID: f1.ID(), - Plugin: p, - Extension: e, - Property: &property.MergedMetadata{ - Original: &f1pr, - Parent: nil, - LinkedDataset: &dataset1, - }, + }, + { + name: "original with infobox, parent without infobox", + o: itemLayer3, + p: groupLayer1, + want: &Merged{ + Original: itemLayer3.ID(), + Parent: groupLayer1.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, }, - { - ID: f3.ID(), - Plugin: p, - Extension: e, + Infobox: &MergedInfobox{ Property: &property.MergedMetadata{ - Original: &f3pr, + Original: &ib1pr, Parent: nil, LinkedDataset: &dataset1, }, + Fields: []*MergedInfoboxField{ + { + ID: f1.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, }, }, }, - } - - expected5 := &Merged{ - Original: itemLayer2.ID(), - Parent: groupLayer2.IDRef(), - Scene: scene, - PluginID: &p, - ExtensionID: &e, - Property: &property.MergedMetadata{ - Original: &itemProperty, - Parent: &groupProperty, - LinkedDataset: &dataset1, - }, - Infobox: &MergedInfobox{ - Property: &property.MergedMetadata{ - Original: nil, - Parent: &ib2pr, - LinkedDataset: &dataset1, - }, - Fields: []*MergedInfoboxField{ - { - ID: f2.ID(), - Plugin: p, - Extension: e, - Property: &property.MergedMetadata{ - Original: &f2pr, - Parent: nil, - LinkedDataset: &dataset1, - }, + { + name: "original without infobox, parent with infobox", + o: itemLayer2, + p: groupLayer2, + want: &Merged{ + Original: itemLayer2.ID(), + Parent: groupLayer2.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, }, - { - ID: f3.ID(), - Plugin: p, - Extension: e, + Infobox: &MergedInfobox{ Property: &property.MergedMetadata{ - Original: &f3pr, - Parent: nil, + Original: nil, + Parent: &ib2pr, LinkedDataset: &dataset1, }, + Fields: []*MergedInfoboxField{ + { + ID: f2.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f2pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, }, }, }, - } - - expected6 := &Merged{ - Original: itemLayer3.ID(), - Parent: groupLayer2.IDRef(), - Scene: scene, - PluginID: &p, - ExtensionID: &e, - Property: &property.MergedMetadata{ - Original: &itemProperty, - Parent: &groupProperty, - LinkedDataset: &dataset1, - }, - Infobox: &MergedInfobox{ - Property: &property.MergedMetadata{ - Original: &ib1pr, - Parent: &ib2pr, - LinkedDataset: &dataset1, - }, - Fields: []*MergedInfoboxField{ - { - ID: f1.ID(), - Plugin: p, - Extension: e, + { + name: "original with infobox, parent with infobox", + o: itemLayer3, + p: groupLayer2, + want: &Merged{ + Original: itemLayer3.ID(), + Parent: groupLayer2.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + Infobox: &MergedInfobox{ Property: &property.MergedMetadata{ - Original: &f1pr, - Parent: nil, + Original: &ib1pr, + Parent: &ib2pr, LinkedDataset: &dataset1, }, + Fields: []*MergedInfoboxField{ + { + ID: f1.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f1pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + { + ID: f3.ID(), + Plugin: p, + Extension: e, + Property: &property.MergedMetadata{ + Original: &f3pr, + Parent: nil, + LinkedDataset: &dataset1, + }, + }, + }, }, - { - ID: f3.ID(), - Plugin: p, - Extension: e, + }, + }, + { + name: "original with infobox but field is empty, parent with infobox", + o: itemLayer4, + p: groupLayer2, + want: &Merged{ + Original: itemLayer4.ID(), + Parent: groupLayer2.IDRef(), + Scene: scene, + PluginID: &p, + ExtensionID: &e, + Property: &property.MergedMetadata{ + Original: &itemProperty, + Parent: &groupProperty, + LinkedDataset: &dataset1, + }, + Infobox: &MergedInfobox{ Property: &property.MergedMetadata{ - Original: &f3pr, - Parent: nil, + Original: &ib1pr, + Parent: &ib2pr, LinkedDataset: &dataset1, }, + Fields: []*MergedInfoboxField{}, }, }, }, } - expected7 := &Merged{ - Original: itemLayer4.ID(), - Parent: groupLayer2.IDRef(), - Scene: scene, - PluginID: &p, - ExtensionID: &e, - Property: &property.MergedMetadata{ - Original: &itemProperty, - Parent: &groupProperty, - LinkedDataset: &dataset1, - }, - Infobox: &MergedInfobox{ - Property: &property.MergedMetadata{ - Original: &ib1pr, - Parent: &ib2pr, - LinkedDataset: &dataset1, - }, - Fields: []*MergedInfoboxField{}, - }, + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + actual := Merge(tt.o, tt.p) + assert.Equal(t, tt.want, actual) + }) } - - actual := Merge(nil, nil) - assert.Nil(t, actual) - actual = Merge(nil, groupLayer1) - assert.Nil(t, actual) - actual = Merge(itemLayer1, nil) - assert.Equal(t, expected1, actual) - actual = Merge(itemLayer3, nil) - assert.Equal(t, expected2, actual) - actual = Merge(itemLayer2, groupLayer1) - assert.Equal(t, expected3, actual) - actual = Merge(itemLayer3, groupLayer1) - assert.Equal(t, expected4, actual) - actual = Merge(itemLayer2, groupLayer2) - assert.Equal(t, expected5, actual) - actual = Merge(itemLayer3, groupLayer2) - assert.Equal(t, expected6, actual) - actual = Merge(itemLayer4, groupLayer2) - assert.Equal(t, expected7, actual) } func TestMergedProperties(t *testing.T) { diff --git a/pkg/plugin/builder.go b/pkg/plugin/builder.go index 4bdb9c3f0..7cd0ddd27 100644 --- a/pkg/plugin/builder.go +++ b/pkg/plugin/builder.go @@ -67,11 +67,6 @@ func (b *Builder) Extensions(extensions []*Extension) *Builder { } func (b *Builder) Schema(schema *PropertySchemaID) *Builder { - if schema == nil { - b.p.schema = nil - } else { - sid := *schema - b.p.schema = &sid - } + b.p.schema = schema.CopyRef() return b } diff --git a/pkg/plugin/extension.go b/pkg/plugin/extension.go index 4578ad30f..199bf016b 100644 --- a/pkg/plugin/extension.go +++ b/pkg/plugin/extension.go @@ -79,91 +79,19 @@ func (w *Extension) SetDescription(des i18n.String) { w.description = des.Copy() } -type WidgetLayout struct { - horizontallyExtendable bool - verticallyExtendable bool - extended bool - floating bool - defaultLocation *WidgetLocation -} - -func (l WidgetLayout) Extendable(loc WidgetLocation) bool { - return l.HorizontallyExtendable() && loc.Horizontal() || l.VerticallyExtendable() && loc.Vertical() -} - -func NewWidgetLayout(horizontallyExtendable, verticallyExtendable, extended, floating bool, defaultLocation *WidgetLocation) WidgetLayout { - return WidgetLayout{ - horizontallyExtendable: horizontallyExtendable, - verticallyExtendable: verticallyExtendable, - extended: extended, - floating: floating, - defaultLocation: defaultLocation.CopyRef(), - } -} - -func (l WidgetLayout) Ref() *WidgetLayout { - return &l -} - -func (l WidgetLayout) HorizontallyExtendable() bool { - return l.horizontallyExtendable -} - -func (l WidgetLayout) VerticallyExtendable() bool { - return l.verticallyExtendable -} - -func (l WidgetLayout) Extended() bool { - return l.extended -} - -func (l WidgetLayout) Floating() bool { - return l.floating -} - -func (l WidgetLayout) DefaultLocation() *WidgetLocation { - if l.defaultLocation == nil { - return nil - } - return l.defaultLocation.CopyRef() -} - -type WidgetLocation struct { - Zone WidgetZoneType - Section WidgetSectionType - Area WidgetAreaType -} - -func (l WidgetLocation) Horizontal() bool { - return l.Section == WidgetSectionCenter -} - -func (l WidgetLocation) Vertical() bool { - return l.Area == WidgetAreaMiddle -} - -func (l *WidgetLocation) CopyRef() *WidgetLocation { - if l == nil { +func (w *Extension) Clone() *Extension { + if w == nil { return nil } - return &WidgetLocation{ - Zone: l.Zone, - Section: l.Section, - Area: l.Area, + return &Extension{ + id: w.id, + extensionType: w.extensionType, + name: w.name.Copy(), + description: w.description.Copy(), + icon: w.icon, + schema: w.schema.Clone(), + visualizer: w.visualizer, + singleOnly: w.singleOnly, + widgetLayout: w.widgetLayout.Clone(), } } - -type WidgetZoneType string -type WidgetSectionType string -type WidgetAreaType string - -const ( - WidgetZoneInner WidgetZoneType = "inner" - WidgetZoneOuter WidgetZoneType = "outer" - WidgetSectionLeft WidgetSectionType = "left" - WidgetSectionCenter WidgetSectionType = "center" - WidgetSectionRight WidgetSectionType = "right" - WidgetAreaTop WidgetAreaType = "top" - WidgetAreaMiddle WidgetAreaType = "middle" - WidgetAreaBottom WidgetAreaType = "bottom" -) diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go index de85ae1e8..20c7f0f88 100644 --- a/pkg/plugin/manifest/convert.go +++ b/pkg/plugin/manifest/convert.go @@ -199,7 +199,7 @@ func (i *PropertySchema) schema(pluginID plugin.ID, idstr string) (*property.Sch // items items := make([]*property.SchemaGroup, 0, len(i.Groups)) for _, d := range i.Groups { - item, err := d.schemaGroup(psid) + item, err := d.schemaGroup() if err != nil { return nil, rerror.From(fmt.Sprintf("item (%s)", d.ID), err) } @@ -240,7 +240,7 @@ func (p *PropertyPointer) pointer() *property.Pointer { ) } -func (i PropertySchemaGroup) schemaGroup(sid property.SchemaID) (*property.SchemaGroup, error) { +func (i PropertySchemaGroup) schemaGroup() (*property.SchemaGroup, error) { title := i.Title var representativeField *property.FieldID if i.RepresentativeField != nil { @@ -259,7 +259,6 @@ func (i PropertySchemaGroup) schemaGroup(sid property.SchemaID) (*property.Schem return property.NewSchemaGroup(). ID(property.SchemaGroupID(i.ID)). - Schema(sid). IsList(i.List). Fields(fields). Title(i18n.StringFrom(title)). diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index 26eb82e07..6101d40f3 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -183,10 +183,21 @@ func TestExtension(t *testing.T) { Type: "visualizer", Visualizer: &cesium, }, - sys: true, - pid: plugin.OfficialPluginID, - expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeVisualizer).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), - expectedPS: property.NewSchema().ID(property.MustSchemaID("reearth/cesium")).MustBuild(), + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: plugin.NewExtension(). + ID("cesium"). + Name(i18n.StringFrom("Cesium")). + Visualizer("cesium"). + Type(plugin.ExtensionTypeVisualizer). + System(true). + Description(i18n.StringFrom("ddd")). + Schema(property.MustSchemaID("reearth/cesium")). + Icon(i). + MustBuild(), + expectedPS: property.NewSchema(). + ID(property.MustSchemaID("reearth/cesium")). + MustBuild(), }, { name: "primitive", @@ -198,10 +209,20 @@ func TestExtension(t *testing.T) { Type: "primitive", Visualizer: &cesium, }, - sys: true, - pid: plugin.OfficialPluginID, - expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypePrimitive).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), - expectedPS: property.NewSchema().ID(property.MustSchemaID("reearth/cesium")).MustBuild(), + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: plugin.NewExtension(). + ID("cesium"). + Name(i18n.StringFrom("Cesium")). + Visualizer("cesium"). + Type(plugin.ExtensionTypePrimitive). + System(true). + Description(i18n.StringFrom("ddd")). + Schema(property.MustSchemaID("reearth/cesium")). + MustBuild(), + expectedPS: property.NewSchema(). + ID(property.MustSchemaID("reearth/cesium")). + MustBuild(), }, { name: "widget", @@ -213,10 +234,21 @@ func TestExtension(t *testing.T) { Type: "widget", SingleOnly: &tr, }, - sys: true, - pid: plugin.OfficialPluginID, - expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("").Type(plugin.ExtensionTypeWidget).System(true).Description(i18n.StringFrom("ddd")).SingleOnly(true).MustBuild(), - expectedPS: property.NewSchema().ID(property.MustSchemaID("reearth/cesium")).MustBuild(), + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: plugin.NewExtension(). + ID("cesium"). + Name(i18n.StringFrom("Cesium")). + Visualizer(""). + Type(plugin.ExtensionTypeWidget). + System(true). + Description(i18n.StringFrom("ddd")). + Schema(property.MustSchemaID("reearth/cesium")). + SingleOnly(true). + MustBuild(), + expectedPS: property.NewSchema(). + ID(property.MustSchemaID("reearth/cesium")). + MustBuild(), }, { name: "block", @@ -227,10 +259,19 @@ func TestExtension(t *testing.T) { Schema: nil, Type: "block", }, - sys: true, - pid: plugin.OfficialPluginID, - expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("").Type(plugin.ExtensionTypeBlock).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), - expectedPS: property.NewSchema().ID(property.MustSchemaID("reearth/cesium")).MustBuild(), + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: plugin.NewExtension(). + ID("cesium").Name(i18n.StringFrom("Cesium")). + Visualizer(""). + Type(plugin.ExtensionTypeBlock). + System(true). + Description(i18n.StringFrom("ddd")). + Schema(property.MustSchemaID("reearth/cesium")). + MustBuild(), + expectedPS: property.NewSchema(). + ID(property.MustSchemaID("reearth/cesium")). + MustBuild(), }, { name: "infobox", @@ -242,10 +283,20 @@ func TestExtension(t *testing.T) { Type: "infobox", Visualizer: &cesium, }, - sys: true, - pid: plugin.OfficialPluginID, - expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeInfobox).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), - expectedPS: property.NewSchema().ID(property.MustSchemaID("reearth/cesium")).MustBuild(), + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: plugin.NewExtension(). + ID("cesium"). + Name(i18n.StringFrom("Cesium")). + Visualizer("cesium"). + Type(plugin.ExtensionTypeInfobox). + System(true). + Description(i18n.StringFrom("ddd")). + Schema(property.MustSchemaID("reearth/cesium")). + MustBuild(), + expectedPS: property.NewSchema(). + ID(property.MustSchemaID("reearth/cesium")). + MustBuild(), }, { name: "cluster", @@ -257,10 +308,20 @@ func TestExtension(t *testing.T) { Type: "cluster", Visualizer: &cesium, }, - sys: true, - pid: plugin.OfficialPluginID, - expectedPE: plugin.NewExtension().ID("cesium").Name(i18n.StringFrom("Cesium")).Visualizer("cesium").Type(plugin.ExtensionTypeCluster).System(true).Description(i18n.StringFrom("ddd")).MustBuild(), - expectedPS: property.NewSchema().ID(property.MustSchemaID("reearth/cesium")).MustBuild(), + sys: true, + pid: plugin.OfficialPluginID, + expectedPE: plugin.NewExtension(). + ID("cesium"). + Name(i18n.StringFrom("Cesium")). + Visualizer("cesium"). + Type(plugin.ExtensionTypeCluster). + System(true). + Description(i18n.StringFrom("ddd")). + Schema(property.MustSchemaID("reearth/cesium")). + MustBuild(), + expectedPS: property.NewSchema(). + ID(property.MustSchemaID("reearth/cesium")). + MustBuild(), }, { name: "empty visualizer", @@ -318,12 +379,8 @@ func TestExtension(t *testing.T) { t.Parallel() pe, ps, err := tc.ext.extension(tc.pid, tc.sys) if tc.err == "" { - assert.Equal(t, tc.expectedPE.ID(), pe.ID()) - assert.Equal(t, tc.expectedPE.Visualizer(), pe.Visualizer()) - assert.Equal(t, tc.expectedPE.Type(), pe.Type()) - assert.Equal(t, tc.expectedPE.Name(), pe.Name()) - assert.Equal(t, tc.expectedPS.ID(), ps.ID()) - assert.Equal(t, tc.expectedPS.ID(), ps.ID()) + assert.Equal(t, tc.expectedPE, pe) + assert.Equal(t, tc.expectedPS, ps) } else { assert.Equal(t, tc.err, err.Error()) } @@ -508,8 +565,22 @@ func TestSchema(t *testing.T) { Linkable: nil, Version: 0, }, - pid: plugin.OfficialPluginID, - expected: property.NewSchema().ID(property.MustSchemaID("reearth/marker")).Groups([]*property.SchemaGroup{property.NewSchemaGroup().ID("default").Schema(property.MustSchemaID("reearth/cesium")).Fields([]*property.SchemaField{property.NewSchemaField().ID("location").Type(property.ValueTypeLatLng).MustBuild()}).MustBuild()}).MustBuild(), + pid: plugin.OfficialPluginID, + expected: property. + NewSchema(). + ID(property.MustSchemaID("reearth/marker")). + Groups([]*property.SchemaGroup{ + property.NewSchemaGroup(). + ID("default"). + Fields([]*property.SchemaField{ + property.NewSchemaField(). + ID("location"). + Type(property.ValueTypeLatLng). + MustBuild()}, + ). + MustBuild()}, + ). + MustBuild(), }, } @@ -540,7 +611,6 @@ func TestSchemaGroup(t *testing.T) { tests := []struct { name string psg PropertySchemaGroup - sid property.SchemaID expected *property.SchemaGroup err string }{ @@ -567,8 +637,15 @@ func TestSchemaGroup(t *testing.T) { List: false, Title: "marker", }, - sid: property.MustSchemaID("reearth/cesium"), - expected: property.NewSchemaGroup().ID("default").Title(i18n.StringFrom("marker")).Title(i18n.StringFrom(str)).Schema(property.MustSchemaID("reearth/cesium")).Fields([]*property.SchemaField{property.NewSchemaField().ID("location").Type(property.ValueTypeLatLng).MustBuild()}).MustBuild(), + expected: property.NewSchemaGroup(). + ID("default"). + Title(i18n.StringFrom(str)). + Fields([]*property.SchemaField{ + property.NewSchemaField(). + ID("location"). + Type(property.ValueTypeLatLng). + MustBuild(), + }).MustBuild(), }, { name: "fail invalid schema field", @@ -593,7 +670,6 @@ func TestSchemaGroup(t *testing.T) { List: false, Title: "marker", }, - sid: property.MustSchemaID("reearth/cesium"), expected: nil, err: "field (location): invalid value type: xx", }, @@ -603,11 +679,10 @@ func TestSchemaGroup(t *testing.T) { tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() - res, err := tc.psg.schemaGroup(tc.sid) + res, err := tc.psg.schemaGroup() if tc.err == "" { assert.Equal(t, tc.expected.Title().String(), res.Title().String()) assert.Equal(t, tc.expected.Title(), res.Title()) - assert.Equal(t, tc.expected.Schema(), res.Schema()) assert.Equal(t, len(tc.expected.Fields()), len(res.Fields())) if len(res.Fields()) > 0 { exf := res.Fields()[0] @@ -683,8 +758,14 @@ func TestSchemaField(t *testing.T) { Type: "string", UI: nil, }, - expected: property.NewSchemaField().ID("aaa").Prefix("xx").Name(i18n.StringFrom("")).Description(i18n.StringFrom("")).Type(property.ValueTypeString).MustBuild(), - err: nil, + expected: property.NewSchemaField(). + ID("aaa"). + Prefix("xx"). + Name(i18n.StringFrom("")). + Description(i18n.StringFrom("")). + Type(property.ValueTypeString). + MustBuild(), + err: nil, }, { name: "success suffix not nil", @@ -702,8 +783,14 @@ func TestSchemaField(t *testing.T) { Type: "string", UI: nil, }, - expected: property.NewSchemaField().ID("aaa").Name(i18n.StringFrom("")).Description(i18n.StringFrom("")).Suffix("xx").Type(property.ValueTypeString).MustBuild(), - err: nil, + expected: property.NewSchemaField(). + ID("aaa"). + Name(i18n.StringFrom("")). + Description(i18n.StringFrom("")). + Suffix("xx"). + Type(property.ValueTypeString). + MustBuild(), + err: nil, }, { name: "success choices not empty", @@ -727,13 +814,19 @@ func TestSchemaField(t *testing.T) { Type: "string", UI: nil, }, - expected: property.NewSchemaField().ID("aaa").Choices([]property.SchemaFieldChoice{ - { - Key: "nnn", - Title: i18n.StringFrom("vvv"), - Icon: "aaa", - }, - }).Type(property.ValueTypeString).Name(i18n.StringFrom("")).Description(i18n.StringFrom("")).MustBuild(), + expected: property.NewSchemaField(). + ID("aaa"). + Choices([]property.SchemaFieldChoice{ + { + Key: "nnn", + Title: i18n.StringFrom("vvv"), + Icon: "aaa", + }, + }). + Type(property.ValueTypeString). + Name(i18n.StringFrom("")). + Description(i18n.StringFrom("")). + MustBuild(), err: nil, }, { @@ -758,8 +851,14 @@ func TestSchemaField(t *testing.T) { Type: "string", UI: nil, }, - expected: property.NewSchemaField().ID("aaa").Choices([]property.SchemaFieldChoice{}).Type(property.ValueTypeString).Name(i18n.StringFrom("")).Description(i18n.StringFrom("")).MustBuild(), - err: nil, + expected: property.NewSchemaField(). + ID("aaa"). + Choices([]property.SchemaFieldChoice{}). + Type(property.ValueTypeString). + Name(i18n.StringFrom("")). + Description(i18n.StringFrom("")). + MustBuild(), + err: nil, }, } diff --git a/pkg/plugin/manifest/manifest.go b/pkg/plugin/manifest/manifest.go index 32f5ab719..010fc1078 100644 --- a/pkg/plugin/manifest/manifest.go +++ b/pkg/plugin/manifest/manifest.go @@ -7,13 +7,24 @@ import ( type Manifest struct { Plugin *plugin.Plugin - ExtensionSchema []*property.Schema + ExtensionSchema property.SchemaList Schema *property.Schema } -func (m Manifest) PropertySchemas() []*property.Schema { - if m.Schema == nil { - return append([]*property.Schema{}, m.ExtensionSchema...) +func (m Manifest) PropertySchemas() property.SchemaList { + sl := append(property.SchemaList{}, m.ExtensionSchema...) + if m.Schema != nil { + sl = append(sl, m.Schema) } - return append(m.ExtensionSchema, m.Schema) + return sl +} + +func (m Manifest) PropertySchema(psid property.SchemaID) *property.Schema { + if psid.IsNil() { + return nil + } + if m.Schema != nil && psid.Equal(m.Schema.ID()) { + return m.Schema + } + return m.ExtensionSchema.Find(psid) } diff --git a/pkg/plugin/manifest/manifest_test.go b/pkg/plugin/manifest/manifest_test.go new file mode 100644 index 000000000..070d905cc --- /dev/null +++ b/pkg/plugin/manifest/manifest_test.go @@ -0,0 +1,104 @@ +package manifest + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestManifest_PropertySchemas(t *testing.T) { + s1 := property.NewSchema().ID(property.MustSchemaID("xx~1.0.0/aa")).MustBuild() + s2 := property.NewSchema().ID(property.MustSchemaID("xx~1.0.0/bb")).MustBuild() + + tests := []struct { + name string + target Manifest + want property.SchemaList + }{ + { + name: "schema and extensions", + target: Manifest{ + Schema: s1, + ExtensionSchema: property.SchemaList{s2}, + }, + want: property.SchemaList{s2, s1}, + }, + { + name: "schema only", + target: Manifest{ + Schema: s1, + }, + want: property.SchemaList{s1}, + }, + { + name: "extensions only", + target: Manifest{ + ExtensionSchema: property.SchemaList{s2}, + }, + want: property.SchemaList{s2}, + }, + { + name: "empty", + target: Manifest{}, + want: property.SchemaList{}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.PropertySchemas()) + }) + } +} + +func TestManifest_PropertySchema(t *testing.T) { + s1 := property.NewSchema().ID(property.MustSchemaID("xx~1.0.0/aa")).MustBuild() + s2 := property.NewSchema().ID(property.MustSchemaID("xx~1.0.0/bb")).MustBuild() + m := Manifest{ + Schema: s1, + ExtensionSchema: property.SchemaList{s2}, + } + + type args struct { + psid property.SchemaID + } + tests := []struct { + name string + target Manifest + args args + want *property.Schema + }{ + { + name: "schema", + target: m, + args: args{psid: s1.ID()}, + want: s1, + }, + { + name: "extension", + target: m, + args: args{psid: s2.ID()}, + want: s2, + }, + { + name: "empty", + target: Manifest{}, + args: args{psid: s2.ID()}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + res := tt.target.PropertySchema(tt.args.psid) + if tt.want == nil { + assert.Nil(t, res) + } else { + assert.Same(t, tt.want, res) + } + }) + } +} diff --git a/pkg/plugin/manifest/parser_test.go b/pkg/plugin/manifest/parser_test.go index 9f85bd6f0..0d82e3742 100644 --- a/pkg/plugin/manifest/parser_test.go +++ b/pkg/plugin/manifest/parser_test.go @@ -32,7 +32,6 @@ var normalExpected = &Manifest{ ExtensionSchema: []*property.Schema{ property.NewSchema().ID(property.MustSchemaID("aaa~1.1.1/hoge")).Groups([]*property.SchemaGroup{ property.NewSchemaGroup().ID(property.SchemaGroupID("default")). - Schema(property.MustSchemaID("aaa~1.1.1/hoge")). RepresentativeField(property.FieldID("a").Ref()). Fields([]*property.SchemaField{ property.NewSchemaField().ID(property.FieldID("a")). diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go index e9cbcab55..0a12d09da 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/plugin.go @@ -17,30 +17,52 @@ type Plugin struct { } func (p *Plugin) ID() ID { + if p == nil { + return ID{} + } return p.id } func (p *Plugin) Version() semver.Version { + if p == nil { + return semver.Version{} + } return p.id.Version() } func (p *Plugin) Name() i18n.String { + if p == nil { + return nil + } return p.name.Copy() } func (p *Plugin) Author() string { + if p == nil { + return "" + } return p.author } func (p *Plugin) Description() i18n.String { + if p == nil { + return nil + } return p.description.Copy() } func (p *Plugin) RepositoryURL() string { + if p == nil { + return "" + } return p.repositoryURL } func (p *Plugin) Extensions() []*Extension { + if p == nil || len(p.extensions) == 0 { + return nil + } + if p.extensionOrder == nil { return []*Extension{} } @@ -64,6 +86,9 @@ func (p *Plugin) Extension(id ExtensionID) *Extension { } func (p *Plugin) Schema() *PropertySchemaID { + if p == nil { + return nil + } return p.schema } @@ -82,10 +107,46 @@ func (p *Plugin) PropertySchemas() []PropertySchemaID { return ps } +func (p *Plugin) Clone() *Plugin { + if p == nil { + return nil + } + + var extensions map[ExtensionID]*Extension + if p.extensions != nil { + extensions = make(map[ExtensionID]*Extension, len(p.extensions)) + for _, e := range p.extensions { + extensions[e.ID()] = e.Clone() + } + } + + var extensionOrder []ExtensionID + if p.extensionOrder != nil { + extensionOrder = append([]ExtensionID{}, p.extensionOrder...) + } + + return &Plugin{ + id: p.id.Clone(), + name: p.name.Copy(), + author: p.author, + description: p.description.Copy(), + repositoryURL: p.repositoryURL, + extensions: extensions, + extensionOrder: extensionOrder, + schema: p.schema.CopyRef(), + } +} + func (p *Plugin) Rename(name i18n.String) { + if p == nil { + return + } p.name = name.Copy() } func (p *Plugin) SetDescription(des i18n.String) { + if p == nil { + return + } p.description = des.Copy() } diff --git a/pkg/plugin/plugin_test.go b/pkg/plugin/plugin_test.go index 89c771453..1917a58bb 100644 --- a/pkg/plugin/plugin_test.go +++ b/pkg/plugin/plugin_test.go @@ -99,3 +99,51 @@ func TestPlugin_Author(t *testing.T) { func TestPlugin_ID(t *testing.T) { assert.Equal(t, New().ID(MustID("xxx~1.1.1")).MustBuild().ID(), MustID("xxx~1.1.1")) } + +func TestPlugin_Clone(t *testing.T) { + tests := []struct { + name string + target *Plugin + }{ + { + name: "ok", + target: &Plugin{ + id: MustID("hoge~0.1.0"), + name: i18n.StringFrom("hoge"), + extensions: map[ExtensionID]*Extension{ + ExtensionID("foo"): { + id: ExtensionID("foo"), + extensionType: ExtensionTypeBlock, + schema: MustPropertySchemaID("hoge~0.1.0/foo"), + }, + ExtensionID("bar"): { + id: ExtensionID("bar"), + extensionType: ExtensionTypePrimitive, + schema: MustPropertySchemaID("hoge~0.1.0/bar"), + }, + }, + extensionOrder: []ExtensionID{"foo", "bar"}, + schema: MustPropertySchemaID("hoge~0.1.0/fff").Ref(), + }, + }, + { + name: "empty", + target: &Plugin{}, + }, + { + name: "nil", + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + got := tt.target.Clone() + assert.Equal(t, tt.target, got) + if tt.target != nil { + assert.NotSame(t, tt.target, got) + } + }) + } +} diff --git a/pkg/plugin/widget.go b/pkg/plugin/widget.go new file mode 100644 index 000000000..9c8cc4906 --- /dev/null +++ b/pkg/plugin/widget.go @@ -0,0 +1,103 @@ +package plugin + +type WidgetZoneType string +type WidgetSectionType string +type WidgetAreaType string + +const ( + WidgetZoneInner WidgetZoneType = "inner" + WidgetZoneOuter WidgetZoneType = "outer" + WidgetSectionLeft WidgetSectionType = "left" + WidgetSectionCenter WidgetSectionType = "center" + WidgetSectionRight WidgetSectionType = "right" + WidgetAreaTop WidgetAreaType = "top" + WidgetAreaMiddle WidgetAreaType = "middle" + WidgetAreaBottom WidgetAreaType = "bottom" +) + +type WidgetLayout struct { + horizontallyExtendable bool + verticallyExtendable bool + extended bool + floating bool + defaultLocation *WidgetLocation +} + +func (l WidgetLayout) Extendable(loc WidgetLocation) bool { + return l.HorizontallyExtendable() && loc.Horizontal() || l.VerticallyExtendable() && loc.Vertical() +} + +func NewWidgetLayout(horizontallyExtendable, verticallyExtendable, extended, floating bool, defaultLocation *WidgetLocation) WidgetLayout { + return WidgetLayout{ + horizontallyExtendable: horizontallyExtendable, + verticallyExtendable: verticallyExtendable, + extended: extended, + floating: floating, + defaultLocation: defaultLocation.Clone(), + } +} + +func (l WidgetLayout) Ref() *WidgetLayout { + return &l +} + +func (l WidgetLayout) HorizontallyExtendable() bool { + return l.horizontallyExtendable +} + +func (l WidgetLayout) VerticallyExtendable() bool { + return l.verticallyExtendable +} + +func (l WidgetLayout) Extended() bool { + return l.extended +} + +func (l WidgetLayout) Floating() bool { + return l.floating +} + +func (l WidgetLayout) DefaultLocation() *WidgetLocation { + if l.defaultLocation == nil { + return nil + } + return l.defaultLocation.Clone() +} + +func (l *WidgetLayout) Clone() *WidgetLayout { + if l == nil { + return nil + } + return &WidgetLayout{ + horizontallyExtendable: l.horizontallyExtendable, + verticallyExtendable: l.verticallyExtendable, + extended: l.extended, + floating: l.floating, + defaultLocation: l.defaultLocation.Clone(), + } +} + +type WidgetLocation struct { + Zone WidgetZoneType + Section WidgetSectionType + Area WidgetAreaType +} + +func (l WidgetLocation) Horizontal() bool { + return l.Section == WidgetSectionCenter +} + +func (l WidgetLocation) Vertical() bool { + return l.Area == WidgetAreaMiddle +} + +func (l *WidgetLocation) Clone() *WidgetLocation { + if l == nil { + return nil + } + return &WidgetLocation{ + Zone: l.Zone, + Section: l.Section, + Area: l.Area, + } +} diff --git a/pkg/project/builder.go b/pkg/project/builder.go index 79331c1af..47076aa91 100644 --- a/pkg/project/builder.go +++ b/pkg/project/builder.go @@ -95,6 +95,7 @@ func (b *Builder) ImageURL(imageURL *url.URL) *Builder { if imageURL == nil { b.p.imageURL = nil } else { + // https://github.com/golang/go/issues/38351 imageURL2 := *imageURL b.p.imageURL = &imageURL2 } diff --git a/pkg/project/project.go b/pkg/project/project.go index 72cbd8663..ea7f85fce 100644 --- a/pkg/project/project.go +++ b/pkg/project/project.go @@ -79,6 +79,7 @@ func (p *Project) ImageURL() *url.URL { if p == nil || p.imageURL == nil { return nil } + // https://github.com/golang/go/issues/38351 imageURL2 := *p.imageURL return &imageURL2 } @@ -143,6 +144,7 @@ func (p *Project) SetImageURL(imageURL *url.URL) { if imageURL == nil { p.imageURL = nil } else { + // https://github.com/golang/go/issues/38351 imageURL2 := *imageURL p.imageURL = &imageURL2 } diff --git a/pkg/property/builder.go b/pkg/property/builder.go index a43122623..17d8a2385 100644 --- a/pkg/property/builder.go +++ b/pkg/property/builder.go @@ -1,13 +1,5 @@ package property -import ( - "errors" -) - -var ( - ErrInvalidItem = errors.New("invalid item") -) - type Builder struct { p *Property } @@ -26,11 +18,6 @@ func (b *Builder) Build() (*Property, error) { if b.p.schema.IsNil() { return nil, ErrInvalidPropertySchemaID } - for _, i := range b.p.items { - if !i.Schema().Equal(b.p.schema) { - return nil, ErrInvalidItem - } - } return b.p, nil } @@ -68,7 +55,7 @@ func (b *Builder) Items(items []Item) *Builder { return b } - newItems := []Item{} + newItems := make([]Item, 0, len(items)) ids := map[ItemID]struct{}{} for _, f := range items { if f == nil { @@ -80,6 +67,7 @@ func (b *Builder) Items(items []Item) *Builder { ids[f.ID()] = struct{}{} newItems = append(newItems, f) } + b.p.items = newItems return b } diff --git a/pkg/property/builder_test.go b/pkg/property/builder_test.go index e2e63930f..d17d6fc23 100644 --- a/pkg/property/builder_test.go +++ b/pkg/property/builder_test.go @@ -35,7 +35,6 @@ func TestBuilder_Scene(t *testing.T) { func TestBuilder_Items(t *testing.T) { iid := NewItemID() - propertySchemaID := MustSchemaID("xxx~1.1.1/aa") propertySchemaField1ID := FieldID("a") propertySchemaGroup1ID := SchemaGroupID("A") @@ -51,27 +50,24 @@ func TestBuilder_Items(t *testing.T) { { Name: "has duplicated item", Input: []Item{ - NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID). + NewGroup().ID(iid).SchemaGroup(propertySchemaGroup1ID). Fields([]*Field{ - NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). - Build(), + NewField(propertySchemaField1ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). + MustBuild(), }).MustBuild(), - NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID). + NewGroup().ID(iid).SchemaGroup(propertySchemaGroup1ID). Fields([]*Field{ - NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). - Build(), + NewField(propertySchemaField1ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). + MustBuild(), }).MustBuild(), }, - Expected: []Item{NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID). + Expected: []Item{NewGroup().ID(iid).SchemaGroup(propertySchemaGroup1ID). Fields([]*Field{ - NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). - Build(), + NewField(propertySchemaField1ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). + MustBuild(), }).MustBuild()}, }, } @@ -98,85 +94,81 @@ func TestBuilder_Build(t *testing.T) { propertySchemaField1ID := FieldID("a") propertySchemaGroup1ID := SchemaGroupID("A") + type args struct { + ID ID + Scene SceneID + Schema SchemaID + Items []Item + } + tests := []struct { Name string - Id ID - Scene SceneID - Schema SchemaID - Items []Item + Args args Err error - Expected struct { - Id ID - Scene SceneID - Schema SchemaID - Items []Item - } + Expected *Property }{ { - Name: "success", - Id: pid, - Scene: sid, - Schema: scid, - Items: []Item{ - NewGroup().ID(iid).Schema(scid, propertySchemaGroup1ID). - Fields([]*Field{ - NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). - Build(), - }).MustBuild()}, - Expected: struct { - Id ID - Scene SceneID - Schema SchemaID - Items []Item - }{ - Id: pid, + Name: "success", + Args: args{ + ID: pid, Scene: sid, Schema: scid, Items: []Item{ - NewGroup().ID(iid).Schema(scid, propertySchemaGroup1ID). - Fields([]*Field{ - NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). - Build(), - }).MustBuild()}, + &Group{ + itemBase: itemBase{ + ID: iid, + SchemaGroup: propertySchemaGroup1ID, + }, + fields: []*Field{ + { + field: propertySchemaField1ID, + v: OptionalValueFrom(ValueTypeString.ValueFrom("xxx")), + }, + }, + }, + }, + }, + Expected: &Property{ + id: pid, + scene: sid, + schema: scid, + items: []Item{ + &Group{ + itemBase: itemBase{ + ID: iid, + SchemaGroup: propertySchemaGroup1ID, + }, + fields: []*Field{ + { + field: propertySchemaField1ID, + v: OptionalValueFrom(ValueTypeString.ValueFrom("xxx")), + }, + }, + }, + }, }, }, { - Name: "fail invalid id", - Id: ID{}, - Items: nil, - Err: ErrInvalidID, - }, - { - Name: "fail invalid scene", - Id: pid, - Items: nil, - Err: ErrInvalidSceneID, + Name: "fail invalid id", + Args: args{ + ID: ID{}, + }, + Err: ErrInvalidID, }, { - Name: "fail invalid schema", - Id: pid, - Scene: sid, - Items: nil, - Err: ErrInvalidPropertySchemaID, + Name: "fail invalid scene", + Args: args{ + ID: pid, + }, + Err: ErrInvalidSceneID, }, { - Name: "fail invalid item", - Id: pid, - Scene: sid, - Schema: scid, - Items: []Item{ - NewGroup().ID(iid).Schema(MustSchemaID("zzz~1.1.1/aa"), propertySchemaGroup1ID). - Fields([]*Field{ - NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). - Build(), - }).MustBuild()}, - Err: ErrInvalidItem, + Name: "fail invalid schema", + Args: args{ + ID: pid, + Scene: sid, + }, + Err: ErrInvalidPropertySchemaID, }, } @@ -184,13 +176,17 @@ func TestBuilder_Build(t *testing.T) { tt := tt t.Run(tt.Name, func(t *testing.T) { t.Parallel() - res, err := New().ID(tt.Id).Items(tt.Items).Scene(tt.Scene).Schema(tt.Schema).Build() + res, err := New(). + ID(tt.Args.ID). + Items(tt.Args.Items). + Scene(tt.Args.Scene). + Schema(tt.Args.Schema). + Build() if tt.Err == nil { - assert.Equal(t, tt.Expected.Id, res.ID()) - assert.Equal(t, tt.Expected.Schema, res.Schema()) - assert.Equal(t, tt.Expected.Items, res.Items()) - assert.Equal(t, tt.Expected.Scene, res.Scene()) + assert.Nil(t, err) + assert.Equal(t, tt.Expected, res) } else { + assert.Nil(t, res) assert.Equal(t, tt.Err, err) } }) diff --git a/pkg/property/field.go b/pkg/property/field.go index ccf54c108..d0584d87c 100644 --- a/pkg/property/field.go +++ b/pkg/property/field.go @@ -21,6 +21,9 @@ type Field struct { } func (p *Field) Clone() *Field { + if p == nil { + return nil + } return &Field{ field: p.field, links: p.links.Clone(), @@ -29,6 +32,9 @@ func (p *Field) Clone() *Field { } func (p *Field) Field() FieldID { + if p == nil { + return FieldID("") + } return p.field } @@ -69,16 +75,12 @@ func (p *Field) ActualValue(ds *dataset.Dataset) *Value { return p.Value() } -func (p *Field) HasLinkedField() bool { - return p.Links().IsLinked() -} - -func (p *Field) CollectDatasets() []DatasetID { +func (p *Field) Datasets() []DatasetID { if p == nil { return nil } - res := []DatasetID{} + res := []DatasetID{} if p.Links().IsLinkedFully() { dsid := p.Links().Last().Dataset() if dsid != nil { @@ -90,7 +92,7 @@ func (p *Field) CollectDatasets() []DatasetID { } func (p *Field) IsDatasetLinked(s DatasetSchemaID, i DatasetID) bool { - return p.Links().HasDatasetOrSchema(s, i) + return p.Links().HasDatasetSchemaAndDataset(s, i) } func (p *Field) Update(value *Value, field *SchemaField) error { @@ -111,6 +113,14 @@ func (p *Field) UpdateUnsafe(value *Value) { p.v.SetValue(value) } +func (p *Field) Cast(t ValueType) { + if p == nil || p.Type() == t { + return + } + p.v = p.v.Cast(t) + p.Unlink() +} + func (p *Field) Link(links *Links) { if p == nil { return @@ -119,10 +129,7 @@ func (p *Field) Link(links *Links) { } func (p *Field) Unlink() { - if p == nil { - return - } - p.links = nil + p.Link(nil) } func (p *Field) UpdateField(field FieldID) { diff --git a/pkg/property/field_builder.go b/pkg/property/field_builder.go index 7cfc3df9b..0f9593516 100644 --- a/pkg/property/field_builder.go +++ b/pkg/property/field_builder.go @@ -1,79 +1,64 @@ package property -type FieldBuilder struct { - p *Field - psf *SchemaField -} +import "fmt" -type FieldUnsafeBuilder struct { +type FieldBuilder struct { p *Field } -func NewField(p *SchemaField) *FieldBuilder { - b := &FieldBuilder{ - p: &Field{}, +func NewField(field FieldID) *FieldBuilder { + return &FieldBuilder{ + p: &Field{ + field: field, + }, } - return b.schemaField(p) } -func (b *FieldBuilder) Build() (*Field, error) { - if b.p.field == FieldID("") { - return nil, ErrInvalidID +func FieldFrom(sf *SchemaField) *FieldBuilder { + if sf == nil { + return NewField("") } - if b.psf != nil && !b.psf.Validate(b.p.v) { - return nil, ErrInvalidPropertyValue + return &FieldBuilder{ + p: &Field{ + field: sf.ID(), + v: NewOptionalValue(sf.Type(), nil), + }, } - return b.p, nil } -func (b *FieldBuilder) MustBuild() *Field { - p, err := b.Build() - if err != nil { - panic(err) +func (b *FieldBuilder) Build() *Field { + if b.p.field == "" || b.p.v == nil { + return nil } - return p + return b.p } -func (b *FieldBuilder) schemaField(p *SchemaField) *FieldBuilder { - if p != nil { - b.psf = p - b.p.field = p.ID() - b.p.v = NewOptionalValue(p.Type(), p.DefaultValue().Clone()) +func (b *FieldBuilder) MustBuild() *Field { + f := b.Build() + if f == nil { + panic(fmt.Sprintf("field ID or type is invalid: id=%s, type=%s", b.p.field, b.p.v.Type())) } - return b + return f } -func (b *FieldBuilder) Value(v *OptionalValue) *FieldBuilder { - b.p.v = v.Clone() +func (b *FieldBuilder) Field(field FieldID) *FieldBuilder { + b.p.field = field return b } -func (b *FieldBuilder) Link(l *Links) *FieldBuilder { - b.p.links = l.Clone() +func (b *FieldBuilder) Value(v *OptionalValue) *FieldBuilder { + b.p.v = v.Clone() return b } -func NewFieldUnsafe() *FieldUnsafeBuilder { - return &FieldUnsafeBuilder{ - p: &Field{}, +func (b *FieldBuilder) Type(t ValueType) *FieldBuilder { + if b.p.v.Type() != t { + b.p.v = NewOptionalValue(t, nil) } -} - -func (b *FieldUnsafeBuilder) Build() *Field { - return b.p -} - -func (b *FieldUnsafeBuilder) FieldUnsafe(f FieldID) *FieldUnsafeBuilder { - b.p.field = f - return b -} - -func (b *FieldUnsafeBuilder) ValueUnsafe(v *OptionalValue) *FieldUnsafeBuilder { - b.p.v = v.Clone() return b } -func (b *FieldUnsafeBuilder) LinksUnsafe(l *Links) *FieldUnsafeBuilder { +func (b *FieldBuilder) Links(l *Links) *FieldBuilder { b.p.links = l.Clone() return b } diff --git a/pkg/property/field_builder_test.go b/pkg/property/field_builder_test.go index ee3c42a66..f6041dfb9 100644 --- a/pkg/property/field_builder_test.go +++ b/pkg/property/field_builder_test.go @@ -9,7 +9,7 @@ import ( func TestFieldBuilder_Value(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - b := NewField(p).Value(OptionalValueFrom(v)).MustBuild() + b := FieldFrom(p).Value(OptionalValueFrom(v)).Build() assert.Equal(t, v, b.Value()) } @@ -17,7 +17,7 @@ func TestFieldBuilder_Link(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) ls := NewLinks([]*Link{l}) - b := NewField(p).Link(ls).MustBuild() + b := FieldFrom(p).Links(ls).Build() assert.Equal(t, ls, b.Links()) } @@ -26,37 +26,24 @@ func TestFieldBuilder_Build(t *testing.T) { type args struct { Links *Links - Value *Value - Field *SchemaField - Type ValueType + Field FieldID + Value *OptionalValue } tests := []struct { Name string Args args Expected *Field - Err error }{ { Name: "fail invalid property id", - Err: ErrInvalidID, - }, - { - Name: "fail invalid property value", - Args: args{ - Field: NewSchemaField().ID("A").Type(ValueTypeBool).MustBuild(), - Type: ValueTypeString, - Value: ValueTypeString.ValueFrom("vvv"), - }, - Err: ErrInvalidPropertyValue, }, { Name: "success", Args: args{ - Field: NewSchemaField().ID("A").Type(ValueTypeString).MustBuild(), + Field: "A", Links: NewLinks([]*Link{l}), - Type: ValueTypeString, - Value: ValueTypeString.ValueFrom("vvv"), + Value: OptionalValueFrom(ValueTypeString.ValueFrom("vvv")), }, Expected: &Field{ field: "A", @@ -70,14 +57,11 @@ func TestFieldBuilder_Build(t *testing.T) { tt := tt t.Run(tt.Name, func(t *testing.T) { t.Parallel() - res, err := NewField(tt.Args.Field). - Value(NewOptionalValue(tt.Args.Type, tt.Args.Value)). - Link(tt.Args.Links).Build() - if tt.Err == nil { - assert.Equal(t, tt.Expected, res) - } else { - assert.Equal(t, tt.Err, err) - } + res := NewField(tt.Args.Field). + Value(tt.Args.Value). + Links(tt.Args.Links). + Build() + assert.Equal(t, tt.Expected, res) }) } } @@ -87,37 +71,24 @@ func TestFieldBuilder_MustBuild(t *testing.T) { type args struct { Links *Links - Value *Value - Field *SchemaField - Type ValueType + Field FieldID + Value *OptionalValue } tests := []struct { Name string Args args Expected *Field - Err string }{ { Name: "fail invalid property id", - Err: ErrInvalidID.Error(), - }, - { - Name: "fail invalid property value", - Args: args{ - Field: NewSchemaField().ID("A").Type(ValueTypeBool).MustBuild(), - Type: ValueTypeString, - Value: ValueTypeString.ValueFrom("vvv"), - }, - Err: ErrInvalidPropertyValue.Error(), }, { Name: "success", Args: args{ - Field: NewSchemaField().ID("A").Type(ValueTypeString).MustBuild(), + Field: "A", Links: NewLinks([]*Link{l}), - Type: ValueTypeString, - Value: ValueTypeString.ValueFrom("vvv"), + Value: OptionalValueFrom(ValueTypeString.ValueFrom("vvv")), }, Expected: &Field{ field: "A", @@ -133,82 +104,17 @@ func TestFieldBuilder_MustBuild(t *testing.T) { t.Parallel() build := func() *Field { - t.Helper() return NewField(tt.Args.Field). - Value(NewOptionalValue(tt.Args.Type, tt.Args.Value)). - Link(tt.Args.Links). + Value(tt.Args.Value). + Links(tt.Args.Links). MustBuild() } - if tt.Err != "" { - assert.PanicsWithError(t, tt.Err, func() { _ = build() }) + if tt.Expected == nil { + assert.Panics(t, func() { _ = build() }) } else { assert.Equal(t, tt.Expected, build()) } }) } } - -func TestNewFieldUnsafe(t *testing.T) { - p := NewFieldUnsafe().Build() - assert.NotNil(t, p) -} - -func TestFieldUnsafeBuilder_Build(t *testing.T) { - l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) - - type args struct { - Links *Links - Value *Value - Field FieldID - Type ValueType - } - - tests := []struct { - Name string - Args args - Expected *Field - }{ - { - Name: "success", - Args: args{ - Links: NewLinks([]*Link{l}), - Value: ValueTypeString.ValueFrom("vvv"), - Type: ValueTypeString, - Field: "a", - }, - Expected: &Field{ - field: "a", - links: NewLinks([]*Link{l}), - v: OptionalValueFrom(ValueTypeString.ValueFrom("vvv")), - }, - }, - { - Name: "nil value", - Args: args{ - Links: NewLinks([]*Link{l}), - Value: nil, - Type: ValueTypeString, - Field: "a", - }, - Expected: &Field{ - field: "a", - links: NewLinks([]*Link{l}), - v: NewOptionalValue(ValueTypeString, nil), - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.Name, func(t *testing.T) { - t.Parallel() - res := NewFieldUnsafe(). - ValueUnsafe(NewOptionalValue(tt.Args.Type, tt.Args.Value)). - LinksUnsafe(tt.Args.Links). - FieldUnsafe(tt.Args.Field). - Build() - assert.Equal(t, tt.Expected, res) - }) - } -} diff --git a/pkg/property/field_test.go b/pkg/property/field_test.go index 54db81afc..b842a9b16 100644 --- a/pkg/property/field_test.go +++ b/pkg/property/field_test.go @@ -22,18 +22,26 @@ func TestField_ActualValue(t *testing.T) { Expected *Value }{ { - Name: "nil links", - Field: NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).MustBuild(), + Name: "nil links", + Field: FieldFrom(p). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). + MustBuild(), Expected: ValueTypeString.ValueFrom("vvv"), }, { - Name: "nil last link", - Field: NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Link(&Links{}).MustBuild(), + Name: "nil last link", + Field: FieldFrom(p). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). + Links(&Links{}). + MustBuild(), Expected: nil, }, { - Name: "dataset value", - Field: NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Link(ls).MustBuild(), + Name: "dataset value", + Field: FieldFrom(p). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). + Links(ls). + MustBuild(), DS: dataset.New(). ID(dsid).Schema(dssid). Fields([]*dataset.Field{ @@ -54,7 +62,7 @@ func TestField_ActualValue(t *testing.T) { } } -func TestField_CollectDatasets(t *testing.T) { +func TestField_Datasets(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() dsid := NewDatasetID() dssid := NewDatasetSchemaID() @@ -68,8 +76,11 @@ func TestField_CollectDatasets(t *testing.T) { Expected []DatasetID }{ { - Name: "list of one datasets", - Field: NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Link(ls).MustBuild(), + Name: "list of one datasets", + Field: FieldFrom(p). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). + Links(ls). + MustBuild(), Expected: []DatasetID{dsid}, }, { @@ -82,7 +93,7 @@ func TestField_CollectDatasets(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - res := tc.Field.CollectDatasets() + res := tc.Field.Datasets() assert.Equal(t, tc.Expected, res) }) } @@ -92,7 +103,10 @@ func TestField_Clone(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) ls := NewLinks([]*Link{l}) - b := NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Link(ls).MustBuild() + b := FieldFrom(p). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). + Links(ls). + MustBuild() r := b.Clone() assert.Equal(t, b, r) } @@ -101,20 +115,84 @@ func TestField(t *testing.T) { did := NewDatasetID() dsid := NewDatasetSchemaID() p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() - b := NewField(p).MustBuild() + b := FieldFrom(p).MustBuild() assert.True(t, b.IsEmpty()) l := NewLink(did, dsid, NewDatasetFieldID()) ls := NewLinks([]*Link{l}) b.Link(ls) assert.True(t, b.IsDatasetLinked(dsid, did)) b.Unlink() - assert.False(t, b.HasLinkedField()) + assert.Nil(t, b.Links()) } func TestField_Update(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() - b := NewField(p).Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).MustBuild() + b := FieldFrom(p). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). + MustBuild() v := ValueTypeString.ValueFrom("xxx") b.UpdateUnsafe(v) assert.Equal(t, v, b.Value()) } + +func TestField_Cast(t *testing.T) { + dgp := NewLinks([]*Link{ + NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()), + }) + + type args struct { + t ValueType + } + tests := []struct { + name string + target *Field + args args + want *Field + }{ + { + name: "ok", + target: &Field{ + field: FieldID("foobar"), + v: OptionalValueFrom(ValueTypeString.ValueFrom("-123")), + links: dgp.Clone(), + }, + args: args{t: ValueTypeNumber}, + want: &Field{ + field: FieldID("foobar"), + v: OptionalValueFrom(ValueTypeNumber.ValueFrom(-123)), + }, + }, + { + name: "failed", + target: &Field{ + field: FieldID("foobar"), + v: OptionalValueFrom(ValueTypeString.ValueFrom("foo")), + links: dgp.Clone(), + }, + args: args{t: ValueTypeLatLng}, + want: &Field{ + field: FieldID("foobar"), + v: NewOptionalValue(ValueTypeLatLng, nil), + }, + }, + { + name: "empty", + target: &Field{}, + args: args{t: ValueTypeNumber}, + want: &Field{}, + }, + { + name: "nil", + target: nil, + args: args{t: ValueTypeNumber}, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tt.target.Cast(tt.args.t) + assert.Equal(t, tt.want, tt.target) + }) + } +} diff --git a/pkg/property/group.go b/pkg/property/group.go index cc1ddc106..ef96e5888 100644 --- a/pkg/property/group.go +++ b/pkg/property/group.go @@ -45,40 +45,26 @@ func (g *Group) SchemaGroupRef() *SchemaGroupID { return g.itemBase.SchemaGroup.Ref() } -func (g *Group) Schema() SchemaID { - if g == nil { - return SchemaID{} - } - return g.itemBase.Schema -} - -func (g *Group) SchemaRef() *SchemaID { - if g == nil { - return nil - } - return g.itemBase.Schema.Ref() -} - func (g *Group) HasLinkedField() bool { if g == nil { return false } for _, f := range g.fields { - if f.HasLinkedField() { + if f.Links().IsLinked() { return true } } return false } -func (g *Group) CollectDatasets() []DatasetID { +func (g *Group) Datasets() []DatasetID { if g == nil { return nil } res := []DatasetID{} for _, f := range g.fields { - res = append(res, f.CollectDatasets()...) + res = append(res, f.Datasets()...) } return res @@ -90,7 +76,7 @@ func (g *Group) FieldsByLinkedDataset(s DatasetSchemaID, i DatasetID) []*Field { } res := []*Field{} for _, f := range g.fields { - if f.Links().IsDatasetLinked(s, i) { + if f.Links().HasSchemaAndDataset(s, i) { res = append(res, f) } } @@ -137,8 +123,6 @@ func (g *Group) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset return } - g.itemBase.Schema = newSchema.ID() - for _, f := range g.fields { if !f.MigrateSchema(ctx, newSchema, dl) { g.RemoveField(f.Field()) @@ -149,7 +133,7 @@ func (g *Group) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset } func (g *Group) GetOrCreateField(ps *Schema, fid FieldID) (*Field, bool) { - if g == nil || ps == nil || !g.Schema().Equal(ps.ID()) { + if g == nil || ps == nil { return nil, false } psg := ps.Group(g.SchemaGroup()) @@ -169,7 +153,7 @@ func (g *Group) GetOrCreateField(ps *Schema, fid FieldID) (*Field, bool) { } // if the field does not exist, create it here - field, _ = NewField(psf).Build() + field = FieldFrom(psf).Type(psf.Type()).Build() if field == nil { return nil, false } @@ -232,7 +216,7 @@ func (g *Group) MigrateDataset(q DatasetMigrationParam) { } func (g *Group) RepresentativeField(schema *Schema) *Field { - if g == nil || schema == nil || !g.Schema().Equal(schema.ID()) { + if g == nil || schema == nil { return nil } if psg := schema.GroupByPointer(NewPointer(&g.itemBase.SchemaGroup, nil, nil)); psg != nil { @@ -252,9 +236,6 @@ func (p *Group) ValidateSchema(ps *SchemaGroup) error { if ps == nil { return errors.New("invalid schema") } - if !p.Schema().Equal(ps.Schema()) { - return errors.New("invalid schema id") - } if p.SchemaGroup() != ps.ID() { return errors.New("invalid schema group id") } diff --git a/pkg/property/group_builder.go b/pkg/property/group_builder.go index 0c65712ce..8212cbff3 100644 --- a/pkg/property/group_builder.go +++ b/pkg/property/group_builder.go @@ -14,7 +14,7 @@ func InitGroupFrom(g *SchemaGroup) *Group { if g == nil { return nil } - g2, _ := NewGroup().NewID().Schema(g.Schema(), g.ID()).Build() + g2, _ := NewGroup().NewID().SchemaGroup(g.ID()).Build() return g2 } @@ -48,8 +48,7 @@ func (b *GroupBuilder) NewID() *GroupBuilder { return b } -func (b *GroupBuilder) Schema(s SchemaID, g SchemaGroupID) *GroupBuilder { - b.p.itemBase.Schema = s +func (b *GroupBuilder) SchemaGroup(g SchemaGroupID) *GroupBuilder { b.p.itemBase.SchemaGroup = g return b } diff --git a/pkg/property/group_builder_test.go b/pkg/property/group_builder_test.go index 6c1211a90..4ea298e67 100644 --- a/pkg/property/group_builder_test.go +++ b/pkg/property/group_builder_test.go @@ -8,14 +8,12 @@ import ( func TestGroupBuilder_Build(t *testing.T) { iid := NewItemID() - sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() type args struct { ID ItemID - Schema SchemaID SchemaGroup SchemaGroupID Fields []*Field } @@ -34,14 +32,12 @@ func TestGroupBuilder_Build(t *testing.T) { Name: "success", Args: args{ ID: iid, - Schema: sid, SchemaGroup: "a", Fields: []*Field{f}, }, Expected: &Group{ itemBase: itemBase{ ID: iid, - Schema: sid, SchemaGroup: "a", }, fields: []*Field{f}, @@ -56,7 +52,7 @@ func TestGroupBuilder_Build(t *testing.T) { res, err := NewGroup(). ID(tt.Args.ID). Fields(tt.Args.Fields). - Schema(tt.Args.Schema, tt.Args.SchemaGroup). + SchemaGroup(tt.Args.SchemaGroup). Build() if tt.Err == nil { assert.Equal(t, tt.Expected, res) @@ -69,14 +65,12 @@ func TestGroupBuilder_Build(t *testing.T) { func TestGroupBuilder_MustBuild(t *testing.T) { iid := NewItemID() - sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() type args struct { ID ItemID - Schema SchemaID SchemaGroup SchemaGroupID Fields []*Field } @@ -95,14 +89,12 @@ func TestGroupBuilder_MustBuild(t *testing.T) { Name: "success", Args: args{ ID: iid, - Schema: sid, SchemaGroup: "a", Fields: []*Field{f}, }, Expected: &Group{ itemBase: itemBase{ ID: iid, - Schema: sid, SchemaGroup: "a", }, fields: []*Field{f}, @@ -120,7 +112,7 @@ func TestGroupBuilder_MustBuild(t *testing.T) { return NewGroup(). ID(tt.Args.ID). Fields(tt.Args.Fields). - Schema(tt.Args.Schema, tt.Args.SchemaGroup). + SchemaGroup(tt.Args.SchemaGroup). MustBuild() } @@ -141,8 +133,7 @@ func TestGroupBuilder_NewID(t *testing.T) { func TestGroupBuilder_InitGroupFrom(t *testing.T) { var sg *SchemaGroup assert.Nil(t, InitGroupFrom(sg)) - sg = NewSchemaGroup().ID("a").Schema(MustSchemaID("xx~1.0.0/aa")).MustBuild() + sg = NewSchemaGroup().ID("a").MustBuild() g := InitGroupFrom(sg) assert.Equal(t, sg.ID(), g.SchemaGroup()) - assert.Equal(t, sg.Schema(), g.Schema()) } diff --git a/pkg/property/group_list.go b/pkg/property/group_list.go index 7eeec815d..be258ee7f 100644 --- a/pkg/property/group_list.go +++ b/pkg/property/group_list.go @@ -47,20 +47,6 @@ func (g *GroupList) SchemaGroupRef() *SchemaGroupID { return g.itemBase.SchemaGroup.Ref() } -func (g *GroupList) Schema() SchemaID { - if g == nil { - return SchemaID{} - } - return g.itemBase.Schema -} - -func (g *GroupList) SchemaRef() *SchemaID { - if g == nil { - return nil - } - return g.itemBase.Schema.Ref() -} - func (g *GroupList) HasLinkedField() bool { if g == nil { return false @@ -73,14 +59,14 @@ func (g *GroupList) HasLinkedField() bool { return false } -func (g *GroupList) CollectDatasets() []DatasetID { +func (g *GroupList) Datasets() []DatasetID { if g == nil { return nil } res := []DatasetID{} for _, f := range g.groups { - res = append(res, f.CollectDatasets()...) + res = append(res, f.Datasets()...) } return res @@ -127,8 +113,6 @@ func (g *GroupList) MigrateSchema(ctx context.Context, newSchema *Schema, dl dat return } - g.itemBase.Schema = newSchema.ID() - for _, f := range g.groups { f.MigrateSchema(ctx, newSchema, dl) } @@ -144,8 +128,8 @@ func (g *GroupList) Groups() []*Group { return append([]*Group{}, g.groups...) } -// GetGroup returns a group whose id is specified -func (g *GroupList) GetGroup(gid ItemID) *Group { +// Group returns a group whose id is specified +func (g *GroupList) Group(gid ItemID) *Group { if g == nil { return nil } @@ -294,7 +278,7 @@ func (g *GroupList) Empty() { } func (g *GroupList) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, bool) { - if g == nil || ptr == nil || ps == nil || ps.ID() != g.Schema() { + if g == nil || ptr == nil || ps == nil { return nil, false } psg := ps.Group(g.SchemaGroup()) @@ -307,7 +291,7 @@ func (g *GroupList) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, bool) { return nil, false } - i := g.GetGroup(item) + i := g.Group(item) if i == nil { return nil, false } @@ -316,7 +300,7 @@ func (g *GroupList) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, bool) { } func (g *GroupList) CreateAndAddListItem(ps *Schema, index *int) *Group { - if g == nil || ps == nil || !g.Schema().Equal(ps.ID()) { + if g == nil || ps == nil { return nil } psg := ps.Group(g.SchemaGroup()) @@ -353,9 +337,6 @@ func (p *GroupList) ValidateSchema(ps *SchemaGroup) error { if ps == nil { return errors.New("invalid schema") } - if !p.Schema().Equal(ps.Schema()) { - return errors.New("invalid schema id") - } if p.SchemaGroup() != ps.ID() { return errors.New("invalid schema group id") } diff --git a/pkg/property/group_list_builder.go b/pkg/property/group_list_builder.go index 1affcd1d8..5573d851e 100644 --- a/pkg/property/group_list_builder.go +++ b/pkg/property/group_list_builder.go @@ -14,7 +14,7 @@ func InitGroupListFrom(g *SchemaGroup) *GroupList { if g == nil || !g.IsList() { return nil } - g2, _ := NewGroupList().NewID().Schema(g.Schema(), g.ID()).Build() + g2, _ := NewGroupList().NewID().SchemaGroup(g.ID()).Build() return g2 } @@ -48,8 +48,7 @@ func (b *GroupListBuilder) NewID() *GroupListBuilder { return b } -func (b *GroupListBuilder) Schema(s SchemaID, g SchemaGroupID) *GroupListBuilder { - b.p.itemBase.Schema = s +func (b *GroupListBuilder) SchemaGroup(g SchemaGroupID) *GroupListBuilder { b.p.itemBase.SchemaGroup = g return b } diff --git a/pkg/property/group_list_builder_test.go b/pkg/property/group_list_builder_test.go index 05e0de8ef..7438d0f5c 100644 --- a/pkg/property/group_list_builder_test.go +++ b/pkg/property/group_list_builder_test.go @@ -8,12 +8,10 @@ import ( func TestGroupListBuilder_Build(t *testing.T) { pid := NewItemID() - scid := MustSchemaID("xx~1.0.0/aa") groups := []*Group{NewGroup().ID(pid).MustBuild()} type args struct { ID ItemID - Schema SchemaID SchemaGroup SchemaGroupID Groups []*Group } @@ -28,14 +26,12 @@ func TestGroupListBuilder_Build(t *testing.T) { Name: "success", Args: args{ ID: pid, - Schema: scid, SchemaGroup: "aa", Groups: groups, }, Expected: &GroupList{ itemBase: itemBase{ ID: pid, - Schema: scid, SchemaGroup: "aa", }, groups: groups, @@ -53,7 +49,7 @@ func TestGroupListBuilder_Build(t *testing.T) { t.Parallel() res, err := NewGroupList(). ID(tt.Args.ID). - Schema(tt.Args.Schema, tt.Args.SchemaGroup). + SchemaGroup(tt.Args.SchemaGroup). Groups(tt.Args.Groups). Build() if tt.Err == nil { @@ -72,12 +68,10 @@ func TestGroupListBuilder_NewID(t *testing.T) { func TestGroupListBuilder_MustBuild(t *testing.T) { pid := NewItemID() - scid := MustSchemaID("xx~1.0.0/aa") groups := []*Group{NewGroup().ID(pid).MustBuild()} type args struct { ID ItemID - Schema SchemaID SchemaGroup SchemaGroupID Groups []*Group } @@ -92,14 +86,12 @@ func TestGroupListBuilder_MustBuild(t *testing.T) { Name: "success", Args: args{ ID: pid, - Schema: scid, SchemaGroup: "aa", Groups: groups, }, Expected: &GroupList{ itemBase: itemBase{ ID: pid, - Schema: scid, SchemaGroup: "aa", }, groups: groups, @@ -120,7 +112,7 @@ func TestGroupListBuilder_MustBuild(t *testing.T) { t.Helper() return NewGroupList(). ID(tc.Args.ID). - Schema(tc.Args.Schema, tc.Args.SchemaGroup). + SchemaGroup(tc.Args.SchemaGroup). Groups(tc.Args.Groups). MustBuild() } @@ -136,19 +128,17 @@ func TestGroupListBuilder_MustBuild(t *testing.T) { func TestInitGroupListFrom(t *testing.T) { tests := []struct { - Name string - SchemaGroup *SchemaGroup - ExpectedSG SchemaGroupID - ExpectedSchema SchemaID + Name string + SchemaGroup *SchemaGroup + ExpectedSG SchemaGroupID }{ { Name: "nil schema group", }, { - Name: "success", - SchemaGroup: NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).MustBuild(), - ExpectedSG: "aa", - ExpectedSchema: MustSchemaID("xx~1.0.0/aa"), + Name: "success", + SchemaGroup: NewSchemaGroup().ID("aa").MustBuild(), + ExpectedSG: "aa", }, } @@ -158,7 +148,6 @@ func TestInitGroupListFrom(t *testing.T) { t.Parallel() res := InitGroupFrom(tc.SchemaGroup) assert.Equal(t, tc.ExpectedSG, res.SchemaGroup()) - assert.Equal(t, tc.ExpectedSchema, res.Schema()) }) } } diff --git a/pkg/property/group_list_test.go b/pkg/property/group_list_test.go index 98dd0fccf..d2f5a7294 100644 --- a/pkg/property/group_list_test.go +++ b/pkg/property/group_list_test.go @@ -25,7 +25,7 @@ func TestGroupList_SchemaRef(t *testing.T) { }, { Name: "success", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), SchemaGroupID("xx")).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup(SchemaGroupID("xx")).MustBuild(), ExpectedSG: SchemaGroupID("xx").Ref(), ExpectedSchema: MustSchemaID("xx~1.0.0/aa").Ref(), }, @@ -36,7 +36,6 @@ func TestGroupList_SchemaRef(t *testing.T) { t.Run(tc.Name, func(t *testing.T) { t.Parallel() assert.Equal(t, tc.ExpectedSG, tc.GL.SchemaGroupRef()) - assert.Equal(t, tc.ExpectedSchema, tc.GL.SchemaRef()) }) } } @@ -47,7 +46,10 @@ func TestGroupList_HasLinkedField(t *testing.T) { v := ValueTypeString.ValueFrom("vvv") dsid := NewDatasetID() dssid := NewDatasetSchemaID() - f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}).MustBuild() + f := FieldFrom(sf). + Value(OptionalValueFrom(v)). + Links(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}). + MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} groups2 := []*Group{NewGroup().ID(pid).MustBuild()} @@ -61,12 +63,12 @@ func TestGroupList_HasLinkedField(t *testing.T) { }, { Name: "has linked field", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups).MustBuild(), Expected: true, }, { Name: "no linked field", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups2).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups2).MustBuild(), Expected: false, }, } @@ -81,13 +83,16 @@ func TestGroupList_HasLinkedField(t *testing.T) { } } -func TestGroupList_CollectDatasets(t *testing.T) { +func TestGroupList_Datasets(t *testing.T) { pid := NewItemID() sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") dsid := NewDatasetID() dssid := NewDatasetSchemaID() - f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}).MustBuild() + f := FieldFrom(sf). + Value(OptionalValueFrom(v)). + Links(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}). + MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} groups2 := []*Group{NewGroup().ID(pid).MustBuild()} @@ -101,12 +106,12 @@ func TestGroupList_CollectDatasets(t *testing.T) { }, { Name: "one dataset", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups).MustBuild(), Expected: []DatasetID{dsid}, }, { Name: "empty list", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups2).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups2).MustBuild(), Expected: []DatasetID{}, }, } @@ -115,7 +120,7 @@ func TestGroupList_CollectDatasets(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - assert.Equal(t, tc.Expected, tc.GL.CollectDatasets()) + assert.Equal(t, tc.Expected, tc.GL.Datasets()) }) } } @@ -126,7 +131,10 @@ func TestGroupList_FieldsByLinkedDataset(t *testing.T) { v := ValueTypeString.ValueFrom("vvv") dsid := NewDatasetID() dssid := NewDatasetSchemaID() - f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}).MustBuild() + f := FieldFrom(sf). + Value(OptionalValueFrom(v)). + Links(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}). + MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} groups2 := []*Group{NewGroup().ID(pid).MustBuild()} @@ -140,12 +148,12 @@ func TestGroupList_FieldsByLinkedDataset(t *testing.T) { }, { Name: "one field list", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups).MustBuild(), Expected: []*Field{f}, }, { Name: "empty list", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups2).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups2).MustBuild(), Expected: []*Field{}, }, } @@ -165,7 +173,10 @@ func TestGroupList_IsEmpty(t *testing.T) { v := ValueTypeString.ValueFrom("vvv") dsid := NewDatasetID() dssid := NewDatasetSchemaID() - f := NewField(sf).Value(OptionalValueFrom(v)).Link(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}).MustBuild() + f := FieldFrom(sf). + Value(OptionalValueFrom(v)). + Links(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}). + MustBuild() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} tests := []struct { @@ -178,12 +189,12 @@ func TestGroupList_IsEmpty(t *testing.T) { }, { Name: "is empty", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").MustBuild(), Expected: true, }, { Name: "is not empty", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups).MustBuild(), Expected: false, }, } @@ -200,8 +211,8 @@ func TestGroupList_IsEmpty(t *testing.T) { func TestGroupList_Prune(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() - f2 := NewField(sf).MustBuild() + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + f2 := FieldFrom(sf).MustBuild() pid := NewItemID() groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f, f2}).MustBuild()} pruned := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} @@ -216,7 +227,7 @@ func TestGroupList_Prune(t *testing.T) { }, { Name: "pruned list", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups(groups).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups(groups).MustBuild(), Expected: pruned, }, } @@ -231,7 +242,7 @@ func TestGroupList_Prune(t *testing.T) { } } -func TestGroupList_GetGroup(t *testing.T) { +func TestGroupList_Group(t *testing.T) { pid := NewItemID() g := NewGroup().ID(pid).MustBuild() @@ -247,13 +258,13 @@ func TestGroupList_GetGroup(t *testing.T) { { Name: "found", Input: pid, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g}).MustBuild(), Expected: g, }, { Name: "not found", Input: NewItemID(), - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g}).MustBuild(), Expected: nil, }, } @@ -262,7 +273,7 @@ func TestGroupList_GetGroup(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - assert.Equal(t, tc.Expected, tc.GL.GetGroup(tc.Input)) + assert.Equal(t, tc.Expected, tc.GL.Group(tc.Input)) }) } } @@ -293,7 +304,7 @@ func TestGroupList_GroupAt(t *testing.T) { { Name: "found", Index: 2, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: g3, }, } @@ -325,13 +336,13 @@ func TestGroupList_Has(t *testing.T) { { Name: "found", Input: g2.ID(), - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: true, }, { Name: "not found", Input: g3.ID(), - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g4}).MustBuild(), Expected: false, }, } @@ -361,7 +372,7 @@ func TestGroupList_Count(t *testing.T) { }, { Name: "not found", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: 4, }, } @@ -398,7 +409,7 @@ func TestGroupList_Add(t *testing.T) { Name: "index < 0", Index: -1, Gr: g2, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -411,7 +422,7 @@ func TestGroupList_Add(t *testing.T) { Name: "len(g) > index > 0 ", Index: 2, Gr: g2, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -455,7 +466,7 @@ func TestGroupList_AddOrMove(t *testing.T) { Name: "index < 0", Index: -1, Gr: g2, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -468,7 +479,7 @@ func TestGroupList_AddOrMove(t *testing.T) { Name: "len(g) > index > 0 ", Index: 2, Gr: g2, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -481,7 +492,7 @@ func TestGroupList_AddOrMove(t *testing.T) { Name: "move group", Index: 2, Gr: g1, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g3, g4}).MustBuild(), Expected: struct { Gr *Group Index int @@ -525,7 +536,7 @@ func TestGroupList_Move(t *testing.T) { Name: "success", Id: g1.ID(), ToIndex: 2, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: struct { Id ItemID Index int @@ -562,21 +573,21 @@ func TestGroupList_MoveAt(t *testing.T) { Name: "from = to", FromIndex: 2, ToIndex: 2, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g2, g3, g4}, }, { Name: "from < 0", FromIndex: -1, ToIndex: 2, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g2, g3, g4}, }, { Name: "success move", FromIndex: 0, ToIndex: 2, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g2, g3, g1, g4}, }, } @@ -609,19 +620,19 @@ func TestGroupList_RemoveAt(t *testing.T) { { Name: "success", Index: 1, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g3, g4}, }, { Name: "index < 0", Index: -1, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g2, g3, g4}, }, { Name: "index > length", Index: 5, - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: []*Group{g1, g2, g3, g4}, }, } @@ -653,13 +664,13 @@ func TestGroupList_Remove(t *testing.T) { { Name: "success", Input: g1.ID(), - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), Expected: true, }, { Name: "not found", Input: g4.ID(), - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "xx").Groups([]*Group{g1, g2, g3}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3}).MustBuild(), Expected: false, }, } @@ -676,41 +687,34 @@ func TestGroupList_Remove(t *testing.T) { func TestGroupList_GetOrCreateField(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - g := NewGroup().ID(NewItemID()).Schema(sg.Schema(), sg.ID()).MustBuild() + sg := NewSchemaGroup().ID("aa").Fields([]*SchemaField{sf}).MustBuild() + g := NewGroup().ID(NewItemID()).SchemaGroup(sg.ID()).MustBuild() tests := []struct { - Name string - GL *GroupList - Schema *Schema - Ptr *Pointer - Expected struct { - Ok bool - Field *Field - } + Name string + GL *GroupList + Schema *Schema + Ptr *Pointer + Expected *Field + ExpectedOK bool }{ { - Name: "success", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").Groups([]*Group{g}).MustBuild(), - Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), - Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), - Expected: struct { - Ok bool - Field *Field - }{ - Ok: true, - Field: NewField(sf).MustBuild(), - }, + Name: "success", + GL: NewGroupList().NewID().SchemaGroup("aa").Groups([]*Group{g}).MustBuild(), + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), + Expected: FieldFrom(sf).MustBuild(), + ExpectedOK: true, }, { Name: "can't get a group", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("aa").MustBuild(), Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), }, { Name: "FieldByItem not ok: sg!=nil", - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").Groups([]*Group{g}).MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("aa").Groups([]*Group{g}).MustBuild(), Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Ptr: NewPointer(sg.IDRef(), g.IDRef(), sf.ID().Ref()), }, @@ -727,8 +731,8 @@ func TestGroupList_GetOrCreateField(t *testing.T) { t.Run(tt.Name, func(t *testing.T) { t.Parallel() res, ok := tt.GL.GetOrCreateField(tt.Schema, tt.Ptr) - assert.Equal(t, tt.Expected.Field, res) - assert.Equal(t, tt.Expected.Ok, ok) + assert.Equal(t, tt.Expected, res) + assert.Equal(t, tt.ExpectedOK, ok) }) } } @@ -736,8 +740,8 @@ func TestGroupList_GetOrCreateField(t *testing.T) { func TestGroupList_CreateAndAddListItem(t *testing.T) { getIntRef := func(i int) *int { return &i } sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - g := NewGroup().ID(NewItemID()).Schema(sg.Schema(), sg.ID()).MustBuild() + sg := NewSchemaGroup().ID("aa").Fields([]*SchemaField{sf}).MustBuild() + g := NewGroup().ID(NewItemID()).SchemaGroup(sg.ID()).MustBuild() tests := []struct { Name string @@ -749,7 +753,7 @@ func TestGroupList_CreateAndAddListItem(t *testing.T) { { Name: "success", Index: getIntRef(0), - GL: NewGroupList().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + GL: NewGroupList().NewID().SchemaGroup("aa").MustBuild(), Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Expected: g, }, @@ -760,7 +764,6 @@ func TestGroupList_CreateAndAddListItem(t *testing.T) { t.Run(tt.Name, func(t *testing.T) { t.Parallel() res := tt.GL.CreateAndAddListItem(tt.Schema, tt.Index) - assert.Equal(t, tt.Expected.Schema(), res.Schema()) assert.Equal(t, tt.Expected.Fields(), res.Fields()) assert.Equal(t, tt.Expected.SchemaGroup(), res.SchemaGroup()) }) diff --git a/pkg/property/group_test.go b/pkg/property/group_test.go index 38698c468..b06b69d8b 100644 --- a/pkg/property/group_test.go +++ b/pkg/property/group_test.go @@ -20,7 +20,7 @@ func TestGroup_SchemaGroup(t *testing.T) { assert.Nil(t, g.SchemaGroupRef()) assert.Equal(t, SchemaGroupID(""), g.SchemaGroup()) pfid := SchemaGroupID("aa") - g = NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), pfid).MustBuild() + g = NewGroup().NewID().SchemaGroup(pfid).MustBuild() assert.Equal(t, pfid, g.SchemaGroup()) assert.Equal(t, pfid.Ref(), g.SchemaGroupRef()) } @@ -30,8 +30,8 @@ func TestGroup_HasLinkedField(t *testing.T) { v := ValueTypeString.ValueFrom("vvv") l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) ls := NewLinks([]*Link{l}) - f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() - f2 := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() + f := FieldFrom(sf).Value(OptionalValueFrom(v)).Links(ls).MustBuild() + f2 := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() tests := []struct { Name string @@ -71,8 +71,8 @@ func TestGroup_IsDatasetLinked(t *testing.T) { dssid := NewDatasetSchemaID() l := NewLink(dsid, dssid, NewDatasetFieldID()) ls := NewLinks([]*Link{l}) - f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() - f2 := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() + f := FieldFrom(sf).Value(OptionalValueFrom(v)).Links(ls).MustBuild() + f2 := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() tests := []struct { Name string @@ -108,13 +108,13 @@ func TestGroup_IsDatasetLinked(t *testing.T) { } } -func TestGroup_CollectDatasets(t *testing.T) { +func TestGroup_Datasets(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") dsid := NewDatasetID() l := NewLink(dsid, NewDatasetSchemaID(), NewDatasetFieldID()) ls := NewLinks([]*Link{l}) - f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() + f := FieldFrom(sf).Value(OptionalValueFrom(v)).Links(ls).MustBuild() tests := []struct { Name string @@ -137,7 +137,7 @@ func TestGroup_CollectDatasets(t *testing.T) { tt := tt t.Run(tt.Name, func(t *testing.T) { t.Parallel() - res := tt.Group.CollectDatasets() + res := tt.Group.Datasets() assert.Equal(t, tt.Expected, res) }) } @@ -150,7 +150,7 @@ func TestGroup_FieldsByLinkedDataset(t *testing.T) { dssid := NewDatasetSchemaID() l := NewLink(dsid, dssid, NewDatasetFieldID()) ls := NewLinks([]*Link{l}) - f := NewField(sf).Value(OptionalValueFrom(v)).Link(ls).MustBuild() + f := FieldFrom(sf).Value(OptionalValueFrom(v)).Links(ls).MustBuild() tests := []struct { Name string @@ -184,8 +184,8 @@ func TestGroup_FieldsByLinkedDataset(t *testing.T) { func TestGroup_IsEmpty(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() - f2 := NewField(sf).MustBuild() + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + f2 := FieldFrom(sf).MustBuild() tests := []struct { Name string @@ -218,8 +218,8 @@ func TestGroup_IsEmpty(t *testing.T) { func TestGroup_Prune(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() - f2 := NewField(sf).MustBuild() + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + f2 := FieldFrom(sf).MustBuild() tests := []struct { Name string @@ -249,8 +249,8 @@ func TestGroup_Prune(t *testing.T) { func TestGroup_GetOrCreateField(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - f := NewField(sf).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + f := FieldFrom(sf).MustBuild() + sg := NewSchemaGroup().ID("aa").Fields([]*SchemaField{sf}).MustBuild() tests := []struct { Name string @@ -267,36 +267,36 @@ func TestGroup_GetOrCreateField(t *testing.T) { }, { Name: "nil ps", - Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), }, { Name: "group schema doesn't equal to ps", - Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aaa"), "aa").MustBuild(), + Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), }, { Name: "create field", - Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), FID: "aa", Expected: struct { Field *Field Bool bool }{ - Field: NewField(sf).MustBuild(), + Field: FieldFrom(sf).MustBuild(), Bool: true, }, }, { Name: "get field", - Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").Fields([]*Field{f}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("aa").Fields([]*Field{f}).MustBuild(), PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), FID: "aa", Expected: struct { Field *Field Bool bool }{ - Field: NewField(sf).MustBuild(), + Field: FieldFrom(sf).MustBuild(), Bool: false, }, }, @@ -317,8 +317,8 @@ func TestGroup_RemoveField(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() sf2 := NewSchemaField().ID("b").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() - f2 := NewField(sf2).MustBuild() + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + f2 := FieldFrom(sf2).MustBuild() tests := []struct { Name string @@ -351,8 +351,8 @@ func TestGroup_FieldIDs(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() sf2 := NewSchemaField().ID("b").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() - f2 := NewField(sf2).MustBuild() + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + f2 := FieldFrom(sf2).MustBuild() tests := []struct { Name string @@ -383,8 +383,8 @@ func TestGroup_Field(t *testing.T) { sf := NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() sf2 := NewSchemaField().ID("b").Type(ValueTypeString).MustBuild() v := ValueTypeString.ValueFrom("vvv") - f := NewField(sf).Value(OptionalValueFrom(v)).MustBuild() - f2 := NewField(sf2).MustBuild() + f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() + f2 := FieldFrom(sf2).MustBuild() tests := []struct { Name string @@ -419,16 +419,14 @@ func TestGroup_Field(t *testing.T) { } } -func TestGroup_UpdateRepresentativeFieldValue(t *testing.T) { +func TestGroup_RepresentativeFieldValue(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aa"). - Schema(MustSchemaID("xx~1.0.0/aa")). Fields([]*SchemaField{sf}). RepresentativeField(FieldID("aa").Ref()). MustBuild() sg2 := NewSchemaGroup(). ID("bb"). - Schema(MustSchemaID("xx~1.0.0/bb")). Fields([]*SchemaField{sf}). MustBuild() @@ -449,18 +447,11 @@ func TestGroup_UpdateRepresentativeFieldValue(t *testing.T) { }, { Name: "nil ps", - Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), - }, - { - Name: "group schema doesn't equal to ps", - Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aaa"), "aa").MustBuild(), - Args: args{ - Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), - }, + Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), }, { Name: "invalid property field", - Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), Args: args{ Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/bb")).Groups([]*SchemaGroup{sg2}).MustBuild(), Value: ValueTypeString.ValueFrom("abc"), @@ -468,7 +459,7 @@ func TestGroup_UpdateRepresentativeFieldValue(t *testing.T) { }, { Name: "ok", - Group: NewGroup().NewID().Schema(MustSchemaID("xx~1.0.0/aa"), "aa").MustBuild(), + Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), Args: args{ Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), Value: ValueTypeString.ValueFrom("abc"), diff --git a/pkg/property/initializer.go b/pkg/property/initializer.go index 77a6779da..b66882648 100644 --- a/pkg/property/initializer.go +++ b/pkg/property/initializer.go @@ -48,7 +48,7 @@ func (p *Initializer) Property(scene SceneID) (*Property, error) { if p.Items != nil { items = make([]Item, 0, len(p.Items)) for _, i := range p.Items { - item, err := i.PropertyItem(p.Schema) + item, err := i.PropertyItem() if err != nil { return nil, err } @@ -124,7 +124,7 @@ func (p *InitializerItem) Clone() *InitializerItem { } } -func (p *InitializerItem) PropertyItem(parent SchemaID) (Item, error) { +func (p *InitializerItem) PropertyItem() (Item, error) { if p == nil { return nil, nil } @@ -134,12 +134,12 @@ func (p *InitializerItem) PropertyItem(parent SchemaID) (Item, error) { i = NewItemID().Ref() } - pi := NewItem().ID(*i).Schema(parent, p.SchemaItem) + pi := NewItem().ID(*i).SchemaGroup(p.SchemaItem) if p.Groups != nil { groups := make([]*Group, 0, len(p.Groups)) for _, g := range p.Groups { - g2, err := g.PropertyGroup(parent, p.SchemaItem) + g2, err := g.PropertyGroup(p.SchemaItem) if err != nil { return nil, err } @@ -164,16 +164,16 @@ func (p *InitializerItem) PropertyItem(parent SchemaID) (Item, error) { return pi.Group().Fields(fields).Build() } -func (p *InitializerItem) PropertyGroupList(parent SchemaID) *GroupList { - i, _ := p.PropertyItem(parent) +func (p *InitializerItem) PropertyGroupList() *GroupList { + i, _ := p.PropertyItem() if g := ToGroupList(i); g != nil { return g } return nil } -func (p *InitializerItem) PropertyGroup(parent SchemaID) *Group { - i, _ := p.PropertyItem(parent) +func (p *InitializerItem) PropertyGroup() *Group { + i, _ := p.PropertyItem() if g := ToGroup(i); g != nil { return g } @@ -204,7 +204,7 @@ func (p *InitializerGroup) Clone() *InitializerGroup { } } -func (p *InitializerGroup) PropertyGroup(parent SchemaID, parentItem SchemaGroupID) (*Group, error) { +func (p *InitializerGroup) PropertyGroup(parentItem SchemaGroupID) (*Group, error) { if p == nil { return nil, nil } @@ -214,7 +214,7 @@ func (p *InitializerGroup) PropertyGroup(parent SchemaID, parentItem SchemaGroup i = NewItemID().Ref() } - pi := NewItem().ID(*i).Schema(parent, parentItem) + pi := NewItem().ID(*i).SchemaGroup(parentItem) var fields []*Field if p.Fields != nil { @@ -274,7 +274,10 @@ func (p *InitializerField) PropertyField() *Field { plinks = NewLinks(links) } - return NewFieldUnsafe().LinksUnsafe(plinks).FieldUnsafe(p.Field).ValueUnsafe(NewOptionalValue(p.Type, p.Value.Clone())).Build() + return NewField(p.Field). + Value(NewOptionalValue(p.Type, p.Value.Clone())). + Links(plinks). + Build() } type InitializerLink struct { diff --git a/pkg/property/initializer_test.go b/pkg/property/initializer_test.go index 980039bd5..f12d4c853 100644 --- a/pkg/property/initializer_test.go +++ b/pkg/property/initializer_test.go @@ -36,7 +36,7 @@ func TestInitializer_Property(t *testing.T) { } expected := New().ID(*initializer.ID).Schema(initializer.Schema).Scene(sid).Items([]Item{ - NewItem().ID(*initializer.Items[0].ID).Schema(initializer.Schema, initializer.Items[0].SchemaItem).Group().MustBuild(), + NewItem().ID(*initializer.Items[0].ID).SchemaGroup(initializer.Items[0].SchemaItem).Group().MustBuild(), }).MustBuild() actual, err := initializer.Property(sid) @@ -78,7 +78,7 @@ func TestInitializer_PropertyIncludingEmpty(t *testing.T) { // test case 3: should generates a property normally actual, err = initializer.PropertyIncludingEmpty(sid, psid2) expected = New().ID(actual.ID()).Schema(initializer.Schema).Scene(sid).Items([]Item{ - NewItem().ID(*initializer.Items[0].ID).Schema(initializer.Schema, initializer.Items[0].SchemaItem).Group().MustBuild(), + NewItem().ID(*initializer.Items[0].ID).SchemaGroup(initializer.Items[0].SchemaItem).Group().MustBuild(), }).MustBuild() assert.NoError(t, err) assert.Equal(t, expected, actual) @@ -114,26 +114,24 @@ func TestInitializerItem_Clone(t *testing.T) { } func TestInitializerItem_PropertyItem(t *testing.T) { - parent := MustSchemaID("reearth/marker") item := &InitializerItem{ ID: NewItemID().Ref(), SchemaItem: SchemaGroupID("hoge"), } - expected := NewItem().ID(*item.ID).Schema(parent, item.SchemaItem).Group().MustBuild() + expected := NewItem().ID(*item.ID).SchemaGroup(item.SchemaItem).Group().MustBuild() - created, err := item.PropertyItem(parent) + created, err := item.PropertyItem() assert.NoError(t, err) assert.Equal(t, expected, created) item.ID = nil - created, err = item.PropertyItem(parent) + created, err = item.PropertyItem() assert.NoError(t, err) assert.False(t, created.ID().IsNil()) } func TestInitializerItem_PropertyGroup(t *testing.T) { - parent := MustSchemaID("reearth/marker") item := &InitializerItem{ ID: NewItemID().Ref(), SchemaItem: SchemaGroupID("hoge"), @@ -144,19 +142,20 @@ func TestInitializerItem_PropertyGroup(t *testing.T) { }}, } - expected := NewItem().ID(*item.ID).Schema(parent, item.SchemaItem).Group().Fields([]*Field{ - NewFieldUnsafe().FieldUnsafe(item.Fields[0].Field).ValueUnsafe(NewOptionalValue(item.Fields[0].Type, item.Fields[0].Value)).Build(), + expected := NewItem().ID(*item.ID).SchemaGroup(item.SchemaItem).Group().Fields([]*Field{ + NewField(item.Fields[0].Field). + Value(NewOptionalValue(item.Fields[0].Type, item.Fields[0].Value)). + MustBuild(), }).MustBuild() - assert.Equal(t, expected, item.PropertyGroup(parent)) + assert.Equal(t, expected, item.PropertyGroup()) // check if a new id is generated item.ID = nil - assert.False(t, item.PropertyGroup(parent).ID().IsNil()) + assert.False(t, item.PropertyGroup().ID().IsNil()) } func TestInitializerItem_PropertyGroupList(t *testing.T) { - parent := MustSchemaID("reearth/marker") item := &InitializerItem{ ID: NewItemID().Ref(), SchemaItem: SchemaGroupID("hoge"), @@ -165,15 +164,15 @@ func TestInitializerItem_PropertyGroupList(t *testing.T) { }}, } - expected := NewItem().ID(*item.ID).Schema(parent, item.SchemaItem).GroupList().Groups([]*Group{ - NewItem().ID(*item.Groups[0].ID).Schema(parent, item.SchemaItem).Group().MustBuild(), + expected := NewItem().ID(*item.ID).SchemaGroup(item.SchemaItem).GroupList().Groups([]*Group{ + NewItem().ID(*item.Groups[0].ID).SchemaGroup(item.SchemaItem).Group().MustBuild(), }).MustBuild() - assert.Equal(t, expected, item.PropertyGroupList(parent)) + assert.Equal(t, expected, item.PropertyGroupList()) // check if a new id is generated item.ID = nil - assert.False(t, item.PropertyGroupList(parent).ID().IsNil()) + assert.False(t, item.PropertyGroupList().ID().IsNil()) } func TestInitializerGroup_Clone(t *testing.T) { @@ -200,7 +199,6 @@ func TestInitializerGroup_Clone(t *testing.T) { } func TestInitializerGroup_PropertyGroup(t *testing.T) { - parent := MustSchemaID("reearth/marker") parentItem := SchemaGroupID("hoge") item := &InitializerGroup{ ID: NewItemID().Ref(), @@ -211,17 +209,19 @@ func TestInitializerGroup_PropertyGroup(t *testing.T) { }}, } - expected := NewItem().ID(*item.ID).Schema(parent, parentItem).Group().Fields([]*Field{ - NewFieldUnsafe().FieldUnsafe(item.Fields[0].Field).ValueUnsafe(NewOptionalValue(item.Fields[0].Type, item.Fields[0].Value)).Build(), + expected := NewItem().ID(*item.ID).SchemaGroup(parentItem).Group().Fields([]*Field{ + NewField(item.Fields[0].Field). + Value(NewOptionalValue(item.Fields[0].Type, item.Fields[0].Value)). + MustBuild(), }).MustBuild() - p, err := item.PropertyGroup(parent, parentItem) + p, err := item.PropertyGroup(parentItem) assert.NoError(t, err) assert.Equal(t, expected, p) // check if a new id is generated item.ID = nil - p, err = item.PropertyGroup(parent, parentItem) + p, err = item.PropertyGroup(parentItem) assert.NoError(t, err) assert.False(t, p.ID().IsNil()) } @@ -256,11 +256,10 @@ func TestInitializerField_PropertyField(t *testing.T) { }}, } - expected := NewFieldUnsafe(). - FieldUnsafe(field.Field). - ValueUnsafe(NewOptionalValue(field.Type, field.Value)). - LinksUnsafe(NewLinks([]*Link{NewLink(*field.Links[0].Dataset.CopyRef(), field.Links[0].Schema, field.Links[0].Field)})). - Build() + expected := NewField(field.Field). + Value(NewOptionalValue(field.Type, field.Value)). + Links(NewLinks([]*Link{NewLink(*field.Links[0].Dataset.CopyRef(), field.Links[0].Schema, field.Links[0].Field)})). + MustBuild() assert.Equal(t, expected, field.PropertyField()) } diff --git a/pkg/property/item.go b/pkg/property/item.go index 5128be84c..ddfee9145 100644 --- a/pkg/property/item.go +++ b/pkg/property/item.go @@ -11,10 +11,8 @@ type Item interface { IDRef() *ItemID SchemaGroup() SchemaGroupID SchemaGroupRef() *SchemaGroupID - Schema() SchemaID - SchemaRef() *SchemaID HasLinkedField() bool - CollectDatasets() []DatasetID + Datasets() []DatasetID FieldsByLinkedDataset(DatasetSchemaID, DatasetID) []*Field IsDatasetLinked(DatasetSchemaID, DatasetID) bool IsEmpty() bool @@ -26,7 +24,6 @@ type Item interface { type itemBase struct { ID ItemID - Schema SchemaID SchemaGroup SchemaGroupID } diff --git a/pkg/property/item_builder.go b/pkg/property/item_builder.go index fae398b23..64bddc98c 100644 --- a/pkg/property/item_builder.go +++ b/pkg/property/item_builder.go @@ -26,8 +26,7 @@ func (b *ItemBuilder) NewID() *ItemBuilder { return b } -func (b *ItemBuilder) Schema(s SchemaID, g SchemaGroupID) *ItemBuilder { - b.base.Schema = s +func (b *ItemBuilder) SchemaGroup(g SchemaGroupID) *ItemBuilder { b.base.SchemaGroup = g return b } diff --git a/pkg/property/item_test.go b/pkg/property/item_test.go index d9df99e2d..f375efbd7 100644 --- a/pkg/property/item_test.go +++ b/pkg/property/item_test.go @@ -8,10 +8,9 @@ import ( func TestInitItemFrom(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - sgl := NewSchemaGroup().ID("aa").IsList(true).Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aa").Fields([]*SchemaField{sf}).MustBuild() + sgl := NewSchemaGroup().ID("aa").IsList(true).Fields([]*SchemaField{sf}).MustBuild() iid := NewItemID() - propertySchemaID := MustSchemaID("xx~1.0.0/aa") propertySchemaField1ID := SchemaGroupID("aa") tests := []struct { @@ -25,12 +24,12 @@ func TestInitItemFrom(t *testing.T) { { Name: "init item from group", SG: sg, - Expected: NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaField1ID).MustBuild(), + Expected: NewGroup().ID(iid).SchemaGroup(propertySchemaField1ID).MustBuild(), }, { Name: "init item from group list", SG: sgl, - Expected: NewGroupList().ID(iid).Schema(propertySchemaID, propertySchemaField1ID).MustBuild(), + Expected: NewGroupList().ID(iid).SchemaGroup(propertySchemaField1ID).MustBuild(), }, } @@ -40,7 +39,6 @@ func TestInitItemFrom(t *testing.T) { t.Parallel() res := InitItemFrom(tt.SG) if res != nil { - assert.Equal(t, tt.Expected.Schema(), res.Schema()) assert.Equal(t, tt.Expected.SchemaGroup(), res.SchemaGroup()) } else { assert.Nil(t, tt.Expected) @@ -55,17 +53,15 @@ func TestToGroup(t *testing.T) { propertySchemaField1ID := FieldID("a") propertySchemaGroup1ID := SchemaGroupID("A") il := []Item{ - NewGroup().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID). + NewGroup().ID(iid).SchemaGroup(propertySchemaGroup1ID). Fields([]*Field{ - NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). - Build(), + NewField(propertySchemaField1ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("xxx"))). + MustBuild(), }).MustBuild(), } p := New().NewID().Scene(NewSceneID()).Items(il).Schema(propertySchemaID).MustBuild() g := ToGroup(p.ItemBySchema(propertySchemaGroup1ID)) - assert.Equal(t, propertySchemaID, g.Schema()) assert.Equal(t, propertySchemaGroup1ID, g.SchemaGroup()) assert.Equal(t, iid, g.ID()) } @@ -75,11 +71,10 @@ func TestToGroupList(t *testing.T) { propertySchemaID := MustSchemaID("xxx~1.1.1/aa") propertySchemaGroup1ID := SchemaGroupID("A") il := []Item{ - NewGroupList().ID(iid).Schema(propertySchemaID, propertySchemaGroup1ID).MustBuild(), + NewGroupList().ID(iid).SchemaGroup(propertySchemaGroup1ID).MustBuild(), } p := New().NewID().Scene(NewSceneID()).Items(il).Schema(propertySchemaID).MustBuild() g := ToGroupList(p.ItemBySchema(propertySchemaGroup1ID)) - assert.Equal(t, propertySchemaID, g.Schema()) assert.Equal(t, propertySchemaGroup1ID, g.SchemaGroup()) assert.Equal(t, iid, g.ID()) } diff --git a/pkg/property/link.go b/pkg/property/link.go index 4fb8df40e..86f24901a 100644 --- a/pkg/property/link.go +++ b/pkg/property/link.go @@ -22,8 +22,7 @@ func NewLinks(links []*Link) *Links { } links2 := make([]*Link, 0, len(links)) for _, l := range links { - l2 := *l - links2 = append(links2, &l2) + links2 = append(links2, l.Clone()) } return &Links{ links: links2, @@ -161,8 +160,7 @@ func (l *Links) Links() []*Link { } links2 := make([]*Link, 0, len(l.links)) for _, l := range l.links { - l2 := *l - links2 = append(links2, &l2) + links2 = append(links2, l.Clone()) } return links2 } @@ -197,7 +195,7 @@ func (l *Links) DatasetSchemaIDs() []DatasetSchemaID { return schemas } -func (l *Links) IsDatasetLinked(s DatasetSchemaID, dsid DatasetID) bool { +func (l *Links) HasSchemaAndDataset(s DatasetSchemaID, dsid DatasetID) bool { if l == nil { return false } @@ -253,7 +251,7 @@ func (l *Links) HasDatasetSchema(dsid DatasetSchemaID) bool { return false } -func (l *Links) HasDatasetOrSchema(dsid DatasetSchemaID, did DatasetID) bool { +func (l *Links) HasDatasetSchemaAndDataset(dsid DatasetSchemaID, did DatasetID) bool { if l == nil { return false } @@ -286,27 +284,24 @@ func NewLinkFieldOnly(ds DatasetSchemaID, f DatasetFieldID) *Link { } func (l *Link) Dataset() *DatasetID { - if l == nil || l.dataset == nil { + if l == nil { return nil } - dataset := *l.dataset - return &dataset + return l.dataset.CopyRef() } func (l *Link) DatasetSchema() *DatasetSchemaID { - if l == nil || l.schema == nil { + if l == nil { return nil } - datasetSchema := *l.schema - return &datasetSchema + return l.schema.CopyRef() } func (l *Link) DatasetSchemaField() *DatasetFieldID { - if l == nil || l.field == nil { + if l == nil { return nil } - field := *l.field - return &field + return l.field.CopyRef() } func (l *Link) Value(ds *dataset.Dataset) *dataset.Value { @@ -369,9 +364,8 @@ func (l *Link) ApplyDataset(ds *DatasetID) *Link { if ds == nil || l.Dataset() != nil { return l.Clone() } - ds2 := *ds return &Link{ - dataset: &ds2, + dataset: ds.CopyRef(), schema: l.DatasetSchema(), field: l.DatasetSchemaField(), } diff --git a/pkg/property/link_test.go b/pkg/property/link_test.go index bd5828e92..b64a97824 100644 --- a/pkg/property/link_test.go +++ b/pkg/property/link_test.go @@ -48,7 +48,7 @@ func TestNewLinks(t *testing.T) { assert.Equal(t, 2, lin.Len()) } -func TestLinks_IsDatasetLinked(t *testing.T) { +func TestLinks_HasSchemaAndDataset(t *testing.T) { dsid1 := NewDatasetSchemaID() dsid2 := NewDatasetSchemaID() did1 := NewDatasetID() @@ -89,7 +89,7 @@ func TestLinks_IsDatasetLinked(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - res := tc.Links.IsDatasetLinked(tc.DSS, tc.DS) + res := tc.Links.HasSchemaAndDataset(tc.DSS, tc.DS) res2 := tc.Links.HasDataset(tc.DS) res3 := tc.Links.HasDatasetSchema(tc.DSS) assert.Equal(t, tc.Expected, res) diff --git a/pkg/property/list.go b/pkg/property/list.go index 7441ed756..62539ac27 100644 --- a/pkg/property/list.go +++ b/pkg/property/list.go @@ -2,16 +2,35 @@ package property type List []*Property +func (l List) IDs() []ID { + ids := make([]ID, 0, len(l)) + m := map[ID]struct{}{} + for _, p := range l { + s := p.ID() + if _, ok := m[s]; ok { + continue + } + ids = append(ids, s) + m[s] = struct{}{} + } + return ids +} + func (l List) Schemas() []SchemaID { schemas := make([]SchemaID, 0, len(l)) - m := map[SchemaID]struct{}{} for _, p := range l { s := p.Schema() - if _, ok := m[s]; ok { + skip := false + for _, ss := range schemas { + if ss.Equal(s) { + skip = true + break + } + } + if skip { continue } schemas = append(schemas, s) - m[s] = struct{}{} } return schemas } diff --git a/pkg/property/list_test.go b/pkg/property/list_test.go index 8e1273c8d..d2fe403b8 100644 --- a/pkg/property/list_test.go +++ b/pkg/property/list_test.go @@ -8,11 +8,89 @@ import ( var ( sf = NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg = NewSchemaGroup().ID("aa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg = NewSchemaGroup().ID("aa").Fields([]*SchemaField{sf}).MustBuild() p = New().NewID().Scene(NewSceneID()).Schema(MustSchemaID("xx~1.0.0/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() p2 = New().NewID().Scene(NewSceneID()).Schema(MustSchemaID("xx~1.0.0/aa")).Items([]Item{InitItemFrom(sg)}).MustBuild() ) +func TestList_IDs(t *testing.T) { + p1 := NewID() + p2 := NewID() + + tests := []struct { + name string + target List + want []ID + }{ + { + name: "ok", + target: List{&Property{id: p1}, &Property{id: p2}, &Property{id: p1}}, + want: []ID{p1, p2}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.IDs()) + }) + } +} + +func TestList_Schemas(t *testing.T) { + ps1 := MustSchemaID("x~1.0.0/a") + ps2 := MustSchemaID("x~1.0.0/b") + + tests := []struct { + name string + target List + want []SchemaID + }{ + { + name: "ok", + target: List{&Property{schema: ps1}, &Property{schema: ps2}, &Property{schema: ps1}}, + want: []SchemaID{ps1, ps2}, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Schemas()) + }) + } +} + +func TestList_Map(t *testing.T) { + p1 := NewID() + p2 := NewID() + + tests := []struct { + name string + target List + want Map + }{ + { + name: "ok", + target: List{&Property{id: p1}, &Property{id: p2}, &Property{id: p1}}, + want: Map{ + p1: &Property{id: p1}, + p2: &Property{id: p2}, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Map()) + }) + } +} + func TestMap_Add(t *testing.T) { tests := []struct { Name string @@ -30,13 +108,13 @@ func TestMap_Add(t *testing.T) { }, } - for _, tc := range tests { - tc := tc - t.Run(tc.Name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { t.Parallel() - tc.M.Add(tc.Input) - assert.Equal(t, tc.Expected, tc.M) - assert.Equal(t, tc.Expected.List(), tc.M.List()) + tt.M.Add(tt.Input) + assert.Equal(t, tt.Expected, tt.M) + assert.Equal(t, tt.Expected.List(), tt.M.List()) }) } } @@ -63,12 +141,12 @@ func TestMap_Clone(t *testing.T) { }, } - for _, tc := range tests { - tc := tc - t.Run(tc.Name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { t.Parallel() - res := tc.M.Clone() - assert.Equal(t, tc.Expected, res) + res := tt.M.Clone() + assert.Equal(t, tt.Expected, res) }) } } @@ -90,12 +168,12 @@ func TestMap_Merge(t *testing.T) { }, } - for _, tc := range tests { - tc := tc - t.Run(tc.Name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { t.Parallel() - res := tc.M1.Merge(tc.M2) - assert.Equal(t, tc.Expected, res) + res := tt.M1.Merge(tt.M2) + assert.Equal(t, tt.Expected, res) }) } } diff --git a/pkg/property/loader.go b/pkg/property/loader.go index 336161f09..66e7923f2 100644 --- a/pkg/property/loader.go +++ b/pkg/property/loader.go @@ -41,3 +41,17 @@ func LoaderFromMap(data map[ID]*Property) Loader { return res, nil } } + +func SchemaLoaderFromMap(data map[SchemaID]*Schema) SchemaLoader { + return func(ctx context.Context, ids ...SchemaID) (SchemaList, error) { + res := make([]*Schema, 0, len(ids)) + for _, i := range ids { + if d, ok := data[i]; ok { + res = append(res, d) + } else { + res = append(res, nil) + } + } + return res, nil + } +} diff --git a/pkg/property/loader_test.go b/pkg/property/loader_test.go index 137704362..d29929721 100644 --- a/pkg/property/loader_test.go +++ b/pkg/property/loader_test.go @@ -33,13 +33,28 @@ func TestLoaderFromMap(t *testing.T) { pid3 := NewID() p1 := New().ID(pid1).Scene(scene).Schema(ps).MustBuild() p2 := New().ID(pid2).Scene(scene).Schema(ps).MustBuild() - p3 := New().ID(pid3).Scene(scene).Schema(ps).MustBuild() + pl := LoaderFromMap(map[ID]*Property{ pid1: p1, pid2: p2, - pid3: p3, }) - res, err := pl(context.Background(), pid1, pid2) - assert.Equal(t, List{p1, p2}, res) + res, err := pl(context.Background(), pid1, pid3, pid2) + assert.Equal(t, List{p1, nil, p2}, res) + assert.NoError(t, err) +} + +func TestSchemaLoaderFromMap(t *testing.T) { + psid1 := MustSchemaID("xxx~1.1.1/aa") + psid2 := MustSchemaID("xxx~1.1.1/bb") + psid3 := MustSchemaID("xxx~1.1.1/cc") + ps1 := NewSchema().ID(psid1).MustBuild() + ps2 := NewSchema().ID(psid2).MustBuild() + + pl := SchemaLoaderFromMap(map[SchemaID]*Schema{ + psid1: ps1, + psid2: ps2, + }) + res, err := pl(context.Background(), psid1, psid3, psid2) + assert.Equal(t, SchemaList{ps1, nil, ps2}, res) assert.NoError(t, err) } diff --git a/pkg/property/merged_test.go b/pkg/property/merged_test.go index dbc18a9e6..50e7d9043 100644 --- a/pkg/property/merged_test.go +++ b/pkg/property/merged_test.go @@ -29,37 +29,55 @@ func TestMerge(t *testing.T) { i8id := NewItemID() fields1 := []*Field{ - NewFieldUnsafe().FieldUnsafe(FieldID("a")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("a"))).Build(), - NewFieldUnsafe().FieldUnsafe(FieldID("b")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("b"))).Build(), - NewFieldUnsafe().FieldUnsafe(FieldID("e")).ValueUnsafe(NewOptionalValue(ValueTypeString, nil)).LinksUnsafe(NewLinks([]*Link{NewLink(d2, ds, df)})).Build(), - NewFieldUnsafe().FieldUnsafe(FieldID("f")).ValueUnsafe(NewOptionalValue(ValueTypeNumber, nil)).Build(), + NewField(FieldID("a")). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("a"))). + MustBuild(), + NewField(FieldID("b")). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("b"))). + MustBuild(), + NewField(FieldID("e")). + Value(NewOptionalValue(ValueTypeString, nil)). + Links(NewLinks([]*Link{NewLink(d2, ds, df)})). + MustBuild(), + NewField(FieldID("f")). + Value(NewOptionalValue(ValueTypeNumber, nil)). + MustBuild(), } fields2 := []*Field{ - NewFieldUnsafe().FieldUnsafe(FieldID("a")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("1"))).Build(), - NewFieldUnsafe().FieldUnsafe(FieldID("c")).ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("2"))).Build(), - NewFieldUnsafe().FieldUnsafe(FieldID("d")).ValueUnsafe(NewOptionalValue(ValueTypeString, nil)).LinksUnsafe(NewLinks([]*Link{NewLinkFieldOnly(ds, df)})).Build(), - NewFieldUnsafe().FieldUnsafe(FieldID("f")).ValueUnsafe(NewOptionalValue(ValueTypeString, nil)).Build(), + NewField(FieldID("a")). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("1"))). + MustBuild(), + NewField(FieldID("c")). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("2"))). + MustBuild(), + NewField(FieldID("d")). + Value(NewOptionalValue(ValueTypeString, nil)). + Links(NewLinks([]*Link{NewLinkFieldOnly(ds, df)})). + MustBuild(), + NewField(FieldID("f")). + Value(NewOptionalValue(ValueTypeString, nil)). + MustBuild(), } groups1 := []*Group{ - NewGroup().ID(i7id).Schema(psid, psgid1).Fields(fields1).MustBuild(), + NewGroup().ID(i7id).SchemaGroup(psgid1).Fields(fields1).MustBuild(), } groups2 := []*Group{ - NewGroup().ID(i8id).Schema(psid, psgid1).Fields(fields2).MustBuild(), + NewGroup().ID(i8id).SchemaGroup(psgid1).Fields(fields2).MustBuild(), } items1 := []Item{ - NewGroupList().ID(i1id).Schema(psid, psgid1).Groups(groups1).MustBuild(), - NewGroup().ID(i2id).Schema(psid, psgid2).Fields(fields1).MustBuild(), - NewGroup().ID(i3id).Schema(psid, psgid3).Fields(fields1).MustBuild(), + NewGroupList().ID(i1id).SchemaGroup(psgid1).Groups(groups1).MustBuild(), + NewGroup().ID(i2id).SchemaGroup(psgid2).Fields(fields1).MustBuild(), + NewGroup().ID(i3id).SchemaGroup(psgid3).Fields(fields1).MustBuild(), } items2 := []Item{ - NewGroupList().ID(i4id).Schema(psid, psgid1).Groups(groups2).MustBuild(), - NewGroup().ID(i5id).Schema(psid, psgid2).Fields(fields2).MustBuild(), - NewGroup().ID(i6id).Schema(psid, psgid4).Fields(fields2).MustBuild(), + NewGroupList().ID(i4id).SchemaGroup(psgid1).Groups(groups2).MustBuild(), + NewGroup().ID(i5id).SchemaGroup(psgid2).Fields(fields2).MustBuild(), + NewGroup().ID(i6id).SchemaGroup(psgid4).Fields(fields2).MustBuild(), } sid := NewSceneID() diff --git a/pkg/property/pointer.go b/pkg/property/pointer.go index 8164adb90..57080070c 100644 --- a/pkg/property/pointer.go +++ b/pkg/property/pointer.go @@ -124,11 +124,10 @@ func (p *Pointer) Item() (i ItemID, ok bool) { } func (p *Pointer) ItemRef() *ItemID { - if p == nil || p.item == nil { + if p == nil { return nil } - f := *p.item - return &f + return p.item.CopyRef() } func (p *Pointer) FieldByItem() (i ItemID, f FieldID, ok bool) { @@ -164,11 +163,10 @@ func (p *Pointer) Field() (f FieldID, ok bool) { } func (p *Pointer) FieldRef() *FieldID { - if p == nil || p.field == nil { + if p == nil { return nil } - f := *p.field - return &f + return p.field.CopyRef() } func (p *Pointer) GetAll() (sg *SchemaGroupID, i *ItemID, f *FieldID) { diff --git a/pkg/property/property.go b/pkg/property/property.go index ee6db07a0..aad3b92bf 100644 --- a/pkg/property/property.go +++ b/pkg/property/property.go @@ -70,7 +70,7 @@ func (p *Property) Item(id ItemID) (Item, *GroupList) { return f, nil } if gl := ToGroupList(f); gl != nil { - if i := gl.GetGroup(id); i != nil { + if i := gl.Group(id); i != nil { return i, gl } } @@ -131,12 +131,12 @@ func (p *Property) ListItem(ptr *Pointer) (*Group, *GroupList) { } if sgid, i, ok := ptr.ItemBySchemaGroupAndItem(); ok { if item := ToGroupList(p.ItemBySchema(sgid)); item != nil { - return item.GetGroup(i), item + return item.Group(i), item } } else if iid, ok := ptr.Item(); ok { for _, item := range p.items { litem := ToGroupList(item) - if g := litem.GetGroup(iid); g != nil { + if g := litem.Group(iid); g != nil { return g, litem } } @@ -183,14 +183,14 @@ func (p *Property) IsDatasetLinked(s DatasetSchemaID, i DatasetID) bool { return false } -func (p *Property) CollectDatasets() []DatasetID { +func (p *Property) Datasets() []DatasetID { if p == nil { return nil } res := []DatasetID{} for _, f := range p.items { - res = append(res, f.CollectDatasets()...) + res = append(res, f.Datasets()...) } return res diff --git a/pkg/property/property_test.go b/pkg/property/property_test.go index 5f38c5bb0..9e1a5494b 100644 --- a/pkg/property/property_test.go +++ b/pkg/property/property_test.go @@ -46,49 +46,51 @@ func TestPropertyMigrateSchema(t *testing.T) { schemaField7, } schemaGroups := []*SchemaGroup{ - NewSchemaGroup().ID(schemaGroupID).Schema(oldSchema).Fields(schemaFields).MustBuild(), + NewSchemaGroup().ID(schemaGroupID).Fields(schemaFields).MustBuild(), } fields := []*Field{ // should remain - NewFieldUnsafe().FieldUnsafe(schemaField1ID). - ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("foobar"))). - Build(), + NewField(schemaField1ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("foobar"))). + MustBuild(), // should be removed because of max - NewFieldUnsafe().FieldUnsafe(schemaField2ID). - ValueUnsafe(OptionalValueFrom(ValueTypeNumber.ValueFrom(101))). - Build(), + NewField(schemaField2ID). + Value(OptionalValueFrom(ValueTypeNumber.ValueFrom(101))). + MustBuild(), // should remain - NewFieldUnsafe().FieldUnsafe(schemaField3ID). - ValueUnsafe(OptionalValueFrom(ValueTypeNumber.ValueFrom(1))). - Build(), + NewField(schemaField3ID). + Value(OptionalValueFrom(ValueTypeNumber.ValueFrom(1))). + MustBuild(), // should be removed because of choices - NewFieldUnsafe().FieldUnsafe(schemaField4ID). - ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("z"))). - Build(), + NewField(schemaField4ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("z"))). + MustBuild(), // should remain - NewFieldUnsafe().FieldUnsafe(schemaField5ID). - LinksUnsafe(NewLinks([]*Link{ + NewField(schemaField5ID). + Type(ValueTypeString). + Links(NewLinks([]*Link{ NewLink(datasetID, datasetSchemaID, datasetFieldID), })). - Build(), + MustBuild(), // should be removed because of linked dataset field value type - NewFieldUnsafe().FieldUnsafe(schemaField6ID). - LinksUnsafe(NewLinks([]*Link{ + NewField(schemaField6ID). + Type(ValueTypeString). + Links(NewLinks([]*Link{ NewLink(datasetID, datasetSchemaID, datasetFieldID), })). - Build(), + MustBuild(), // should be removed because of type - NewFieldUnsafe().FieldUnsafe(schemaField7ID). - ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("hogehoge"))). - Build(), + NewField(schemaField7ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("hogehoge"))). + MustBuild(), // should be removed because of not existing field - NewFieldUnsafe().FieldUnsafe(schemaField8ID). - ValueUnsafe(OptionalValueFrom(ValueTypeString.ValueFrom("hogehoge"))). - Build(), + NewField(schemaField8ID). + Value(OptionalValueFrom(ValueTypeString.ValueFrom("hogehoge"))). + MustBuild(), } items := []Item{ - NewGroup().NewID().Schema(oldSchema, schemaGroupID).Fields(fields).MustBuild(), + NewGroup().NewID().SchemaGroup(schemaGroupID).Fields(fields).MustBuild(), } datasetFields := []*dataset.Field{ @@ -106,7 +108,6 @@ func TestPropertyMigrateSchema(t *testing.T) { assert.Equal(t, schema.ID(), property.Schema()) assert.Equal(t, 1, len(property.Items())) - assert.Equal(t, schema.ID(), newGroup.Schema()) assert.Equal(t, 3, len(newFields)) assert.NotNil(t, newGroup.Field(schemaField1ID)) assert.NotNil(t, newGroup.Field(schemaField3ID)) @@ -122,9 +123,9 @@ func TestGetOrCreateItem(t *testing.T) { sg2id := SchemaGroupID("d") sf1 := NewSchemaField().ID(sf1id).Type(ValueTypeString).MustBuild() - sg1 := NewSchemaGroup().ID(sg1id).Schema(sid).Fields([]*SchemaField{sf1}).MustBuild() + sg1 := NewSchemaGroup().ID(sg1id).Fields([]*SchemaField{sf1}).MustBuild() sf2 := NewSchemaField().ID(sf2id).Type(ValueTypeString).MustBuild() - sg2 := NewSchemaGroup().ID(sg2id).Schema(sid).Fields([]*SchemaField{sf2}).IsList(true).MustBuild() + sg2 := NewSchemaGroup().ID(sg2id).Fields([]*SchemaField{sf2}).IsList(true).MustBuild() s := NewSchema().ID(sid).Groups([]*SchemaGroup{sg1, sg2}).MustBuild() p := New().NewID().Scene(sceneID).Schema(sid).MustBuild() @@ -135,7 +136,6 @@ func TestGetOrCreateItem(t *testing.T) { i, _ := p.GetOrCreateItem(s, PointItemBySchema(sg1id)) assert.NotNil(t, i) - assert.Equal(t, sid, i.Schema()) assert.Equal(t, sg1id, i.SchemaGroup()) assert.Equal(t, i, ToGroup(p.ItemBySchema(sg1id))) assert.Equal(t, []Item{i}, p.Items()) @@ -151,7 +151,6 @@ func TestGetOrCreateItem(t *testing.T) { i3, _ := p.GetOrCreateItem(s, PointItemBySchema(sg2id)) assert.NotNil(t, i3) - assert.Equal(t, sid, i3.Schema()) assert.Equal(t, sg2id, i3.SchemaGroup()) assert.Equal(t, i3, ToGroupList(p.ItemBySchema(sg2id))) assert.Equal(t, []Item{i, i3}, p.Items()) @@ -172,9 +171,9 @@ func TestGetOrCreateField(t *testing.T) { sg2id := SchemaGroupID("d") sf1 := NewSchemaField().ID(sf1id).Type(ValueTypeString).MustBuild() - sg1 := NewSchemaGroup().ID(sg1id).Schema(sid).Fields([]*SchemaField{sf1}).MustBuild() + sg1 := NewSchemaGroup().ID(sg1id).Fields([]*SchemaField{sf1}).MustBuild() sf2 := NewSchemaField().ID(sf2id).Type(ValueTypeString).MustBuild() - sg2 := NewSchemaGroup().ID(sg2id).Schema(sid).Fields([]*SchemaField{sf2}).IsList(true).MustBuild() + sg2 := NewSchemaGroup().ID(sg2id).Fields([]*SchemaField{sf2}).IsList(true).MustBuild() s := NewSchema().ID(sid).Groups([]*SchemaGroup{sg1, sg2}).MustBuild() p := New().NewID().Scene(sceneID).Schema(sid).MustBuild() @@ -188,7 +187,6 @@ func TestGetOrCreateField(t *testing.T) { assert.True(t, created) assert.Equal(t, sf1id, f.Field()) i := ToGroup(p.ItemBySchema(sg1id)) - assert.Equal(t, sid, i.Schema()) assert.Equal(t, sg1id, i.SchemaGroup()) assert.Equal(t, []*Field{f}, i.Fields()) field, _, _ := p.Field(PointFieldBySchemaGroup(sg1id, sf1id)) @@ -221,7 +219,7 @@ func TestAddListItem(t *testing.T) { sfid := FieldID("a") sgid := SchemaGroupID("b") sf := NewSchemaField().ID(sfid).Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID(sgid).Schema(sid).Fields([]*SchemaField{sf}).IsList(true).MustBuild() + sg := NewSchemaGroup().ID(sgid).Fields([]*SchemaField{sf}).IsList(true).MustBuild() ps := NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild() p := New().NewID().Scene(sceneID).Schema(sid).MustBuild() @@ -241,9 +239,9 @@ func TestMoveListItem(t *testing.T) { sceneID := NewSceneID() sid, _ := SchemaIDFrom("hoge~1.0.0/test") sgid := SchemaGroupID("b") - g1 := NewGroup().NewID().Schema(sid, sgid).MustBuild() - g2 := NewGroup().NewID().Schema(sid, sgid).MustBuild() - gl := NewGroupList().NewID().Schema(sid, sgid).Groups([]*Group{g1, g2}).MustBuild() + g1 := NewGroup().NewID().SchemaGroup(sgid).MustBuild() + g2 := NewGroup().NewID().SchemaGroup(sgid).MustBuild() + gl := NewGroupList().NewID().SchemaGroup(sgid).Groups([]*Group{g1, g2}).MustBuild() p := New().NewID().Scene(sceneID).Schema(sid).Items([]Item{gl}).MustBuild() assert.Equal(t, []*Group{g1, g2}, gl.Groups()) @@ -256,9 +254,9 @@ func TestRemoveListItem(t *testing.T) { sceneID := NewSceneID() sid, _ := SchemaIDFrom("hoge~1.0.0/test") sgid := SchemaGroupID("b") - g1 := NewGroup().NewID().Schema(sid, sgid).MustBuild() - g2 := NewGroup().NewID().Schema(sid, sgid).MustBuild() - gl := NewGroupList().NewID().Schema(sid, sgid).Groups([]*Group{g1, g2}).MustBuild() + g1 := NewGroup().NewID().SchemaGroup(sgid).MustBuild() + g2 := NewGroup().NewID().SchemaGroup(sgid).MustBuild() + gl := NewGroupList().NewID().SchemaGroup(sgid).Groups([]*Group{g1, g2}).MustBuild() p := New().NewID().Scene(sceneID).Schema(sid).Items([]Item{gl}).MustBuild() assert.Equal(t, []*Group{g1, g2}, gl.Groups()) diff --git a/pkg/property/schema_builder_test.go b/pkg/property/schema_builder_test.go index 58dbe761e..1343943e8 100644 --- a/pkg/property/schema_builder_test.go +++ b/pkg/property/schema_builder_test.go @@ -9,8 +9,8 @@ import ( func TestSchemaBuilder_Build(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aaa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - sg2 := NewSchemaGroup().ID("daa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() + sg2 := NewSchemaGroup().ID("daa").Fields([]*SchemaField{sf}).MustBuild() type args struct { ID SchemaID @@ -82,8 +82,8 @@ func TestSchemaBuilder_Build(t *testing.T) { func TestSchemaBuilder_MustBuild(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aaa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() - sg2 := NewSchemaGroup().ID("daa").Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() + sg2 := NewSchemaGroup().ID("daa").Fields([]*SchemaField{sf}).MustBuild() type args struct { ID SchemaID diff --git a/pkg/property/schema_field.go b/pkg/property/schema_field.go index 825233f3f..221d86bb5 100644 --- a/pkg/property/schema_field.go +++ b/pkg/property/schema_field.go @@ -26,39 +26,56 @@ type SchemaFieldChoice struct { } func (p *SchemaField) ID() FieldID { + if p == nil { + return "" + } return p.id } func (p *SchemaField) Type() ValueType { + if p == nil { + return ValueTypeUnknown + } return p.propertyType } func (p *SchemaField) Title() i18n.String { + if p == nil { + return nil + } return p.title.Copy() } func (p *SchemaField) Description() i18n.String { + if p == nil { + return nil + } return p.description.Copy() } func (p *SchemaField) Prefix() string { + if p == nil { + return "" + } return p.prefix } func (p *SchemaField) Suffix() string { + if p == nil { + return "" + } return p.suffix } func (p *SchemaField) DefaultValue() *Value { - if p == nil || p.defaultValue == nil { + if p == nil { return nil } - v := *p.defaultValue - return &v + return p.defaultValue.Clone() } func (p *SchemaField) UI() *SchemaFieldUI { - if p == nil || p.ui == SchemaFieldUI("") { + if p == nil || p.ui == "" { return nil } ui := p.ui @@ -151,14 +168,23 @@ func (p *SchemaField) Validate(value *OptionalValue) bool { } func (p *SchemaField) SetTitle(title i18n.String) { + if p == nil { + return + } p.title = title.Copy() } func (p *SchemaField) SetDescription(des i18n.String) { + if p == nil { + return + } p.description = des.Copy() } func (c *SchemaFieldChoice) SetTitle(l i18n.String) { + if c == nil { + return + } c.Title = l.Copy() } diff --git a/pkg/property/schema_field_builder.go b/pkg/property/schema_field_builder.go index cb1497c67..fd22d3c44 100644 --- a/pkg/property/schema_field_builder.go +++ b/pkg/property/schema_field_builder.go @@ -72,8 +72,7 @@ func (b *SchemaFieldBuilder) DefaultValue(v *Value) *SchemaFieldBuilder { if v == nil { b.p.defaultValue = nil } else { - v2 := *v - b.p.defaultValue = &v2 + b.p.defaultValue = v.Clone() } return b } diff --git a/pkg/property/schema_group.go b/pkg/property/schema_group.go index c1515ecb5..2bc79f34b 100644 --- a/pkg/property/schema_group.go +++ b/pkg/property/schema_group.go @@ -7,7 +7,6 @@ import ( // SchemaGroup represents a group of property that has some fields type SchemaGroup struct { id SchemaGroupID - sid SchemaID fields []*SchemaField list bool isAvailableIf *Condition @@ -30,20 +29,6 @@ func (s *SchemaGroup) IDRef() *SchemaGroupID { return s.id.Ref() } -func (s *SchemaGroup) Schema() SchemaID { - if s == nil { - return SchemaID{} - } - return s.sid -} - -func (s *SchemaGroup) SchemaRef() *SchemaID { - if s == nil { - return nil - } - return &s.sid -} - // Fields returns a slice of fields func (s *SchemaGroup) Fields() []*SchemaField { if s == nil { diff --git a/pkg/property/schema_group_builder.go b/pkg/property/schema_group_builder.go index 7acecdba4..8f55b56e5 100644 --- a/pkg/property/schema_group_builder.go +++ b/pkg/property/schema_group_builder.go @@ -15,7 +15,7 @@ func NewSchemaGroup() *SchemaGroupBuilder { } func (b *SchemaGroupBuilder) Build() (*SchemaGroup, error) { - if b.p.sid.IsNil() { + if b.p.id == "" { return nil, ErrInvalidID } return b.p, nil @@ -34,11 +34,6 @@ func (b *SchemaGroupBuilder) ID(id SchemaGroupID) *SchemaGroupBuilder { return b } -func (b *SchemaGroupBuilder) Schema(sid SchemaID) *SchemaGroupBuilder { - b.p.sid = sid - return b -} - func (b *SchemaGroupBuilder) Fields(fields []*SchemaField) *SchemaGroupBuilder { newFields := []*SchemaField{} ids := map[FieldID]struct{}{} diff --git a/pkg/property/schema_group_builder_test.go b/pkg/property/schema_group_builder_test.go index a21909a19..9a537f981 100644 --- a/pkg/property/schema_group_builder_test.go +++ b/pkg/property/schema_group_builder_test.go @@ -8,13 +8,11 @@ import ( ) func TestSchemaGroupBuilder_Build(t *testing.T) { - sid := MustSchemaID("xx~1.0.0/aa") gid := SchemaGroupID("xx") sf := NewSchemaField().ID("ff").Type(ValueTypeString).MustBuild() type expected struct { ID SchemaGroupID - Sid SchemaID Fields []*SchemaField List bool IsAvailableIf *Condition @@ -24,7 +22,6 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { tests := []struct { Name string ID SchemaGroupID - Sid SchemaID Fields []*SchemaField List bool IsAvailableIf *Condition @@ -39,7 +36,6 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { { Name: "success", ID: gid, - Sid: sid, Fields: []*SchemaField{sf, nil, sf}, List: true, IsAvailableIf: &Condition{ @@ -49,7 +45,6 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { Title: i18n.StringFrom("tt"), Expected: expected{ ID: gid, - Sid: sid, Fields: []*SchemaField{sf}, List: true, IsAvailableIf: &Condition{ @@ -62,7 +57,6 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { { Name: "success: nil name", ID: gid, - Sid: sid, Fields: []*SchemaField{sf}, List: true, IsAvailableIf: &Condition{ @@ -72,7 +66,6 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { Title: i18n.StringFrom("tt"), Expected: expected{ ID: gid, - Sid: sid, Fields: []*SchemaField{sf}, List: true, IsAvailableIf: &Condition{ @@ -90,7 +83,6 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { t.Parallel() res, err := NewSchemaGroup(). ID(tc.ID). - Schema(tc.Sid). Fields(tc.Fields). IsList(tc.List). Title(tc.Title). @@ -98,7 +90,6 @@ func TestSchemaGroupBuilder_Build(t *testing.T) { Build() if tc.Err == nil { assert.Equal(t, tc.Expected.IsAvailableIf, res.IsAvailableIf()) - assert.Equal(t, tc.Expected.Sid, res.Schema()) assert.Equal(t, tc.Expected.ID, res.ID()) assert.Equal(t, tc.Expected.Title, res.Title()) assert.Equal(t, tc.Expected.List, res.IsList()) diff --git a/pkg/property/schema_group_test.go b/pkg/property/schema_group_test.go index d1cdbe01b..8b7ed9dbd 100644 --- a/pkg/property/schema_group_test.go +++ b/pkg/property/schema_group_test.go @@ -9,7 +9,6 @@ import ( func TestSchemaGroup(t *testing.T) { scid := SchemaGroupID("aa") - sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() tests := []struct { @@ -17,9 +16,7 @@ func TestSchemaGroup(t *testing.T) { G *SchemaGroup Expected struct { GIDRef *SchemaGroupID - SIDRef *SchemaID GID SchemaGroupID - SID SchemaID Fields []*SchemaField Title i18n.String IsAvailableIf *Condition @@ -31,21 +28,17 @@ func TestSchemaGroup(t *testing.T) { }, { Name: "success", - G: NewSchemaGroup().ID(scid).Schema(sid).Fields([]*SchemaField{sf}).MustBuild(), + G: NewSchemaGroup().ID(scid).Fields([]*SchemaField{sf}).MustBuild(), Expected: struct { GIDRef *SchemaGroupID - SIDRef *SchemaID GID SchemaGroupID - SID SchemaID Fields []*SchemaField Title i18n.String IsAvailableIf *Condition IsList bool }{ GIDRef: scid.Ref(), - SIDRef: sid.Ref(), GID: scid, - SID: sid, Fields: []*SchemaField{sf}, Title: nil, }, @@ -59,8 +52,6 @@ func TestSchemaGroup(t *testing.T) { assert.Equal(t, tc.Expected.GID, tc.G.ID()) assert.Equal(t, tc.Expected.GIDRef, tc.G.IDRef()) - assert.Equal(t, tc.Expected.SID, tc.G.Schema()) - assert.Equal(t, tc.Expected.SIDRef, tc.G.SchemaRef()) assert.Equal(t, tc.Expected.Fields, tc.G.Fields()) assert.Equal(t, tc.Expected.IsList, tc.G.IsList()) assert.Equal(t, tc.Expected.IsAvailableIf, tc.G.IsAvailableIf()) @@ -71,7 +62,6 @@ func TestSchemaGroup(t *testing.T) { func TestSchemaGroup_Field(t *testing.T) { scid := SchemaGroupID("aa") - sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() tests := []struct { @@ -86,14 +76,14 @@ func TestSchemaGroup_Field(t *testing.T) { }, { Name: "found", - G: NewSchemaGroup().ID(scid).Schema(sid).Fields([]*SchemaField{sf}).MustBuild(), + G: NewSchemaGroup().ID(scid).Fields([]*SchemaField{sf}).MustBuild(), PTR: NewPointer(nil, nil, sf.ID().Ref()), Input: sf.ID(), Expected: sf, }, { Name: "not found", - G: NewSchemaGroup().ID(scid).Schema(sid).Fields([]*SchemaField{sf}).MustBuild(), + G: NewSchemaGroup().ID(scid).Fields([]*SchemaField{sf}).MustBuild(), PTR: NewPointer(nil, nil, FieldID("zz").Ref()), Input: FieldID("zz"), }, @@ -111,7 +101,7 @@ func TestSchemaGroup_Field(t *testing.T) { } func TestSchemaGroup_SetTitle(t *testing.T) { - sg := NewSchemaGroup().ID(SchemaGroupID("aa")).Schema(MustSchemaID("xx~1.0.0/aa")).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID(SchemaGroupID("aa")).Fields([]*SchemaField{sf}).MustBuild() sg.SetTitle(i18n.StringFrom("ttt")) assert.Equal(t, i18n.StringFrom("ttt"), sg.Title()) } diff --git a/pkg/property/schema_list.go b/pkg/property/schema_list.go index 7c1bd0621..e737e6960 100644 --- a/pkg/property/schema_list.go +++ b/pkg/property/schema_list.go @@ -2,10 +2,23 @@ package property type SchemaList []*Schema +func (l SchemaList) Find(psid SchemaID) *Schema { + for _, s := range l { + if s.ID().Equal(psid) { + return s + } + } + return nil +} + func (l SchemaList) Map() SchemaMap { return SchemaMapFrom(l) } +func (l SchemaList) Loader() SchemaLoader { + return SchemaLoaderFromMap(l.Map()) +} + type SchemaMap map[SchemaID]*Schema func SchemaMapFrom(l []*Schema) SchemaMap { @@ -61,3 +74,7 @@ func (m SchemaMap) Merge(m2 SchemaMap) SchemaMap { return m3 } + +func (m SchemaMap) Loader() SchemaLoader { + return SchemaLoaderFromMap(m) +} diff --git a/pkg/property/schema_test.go b/pkg/property/schema_test.go index 783675b4f..56deca6c8 100644 --- a/pkg/property/schema_test.go +++ b/pkg/property/schema_test.go @@ -17,7 +17,7 @@ func TestSchema_Nil(t *testing.T) { func TestSchema_Field(t *testing.T) { sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() tests := []struct { Name string @@ -57,7 +57,7 @@ func TestSchema_Field(t *testing.T) { func TestSchema_Group(t *testing.T) { sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() tests := []struct { Name string @@ -100,7 +100,7 @@ func TestSchema_Group(t *testing.T) { func TestSchema_DetectDuplicatedFields(t *testing.T) { sid := MustSchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aaa").Schema(sid).Fields([]*SchemaField{sf}).MustBuild() + sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() tests := []struct { Name string diff --git a/pkg/property/value.go b/pkg/property/value.go index a6ea52ce7..2d10acc0e 100644 --- a/pkg/property/value.go +++ b/pkg/property/value.go @@ -40,6 +40,13 @@ func (vt ValueType) Valid() bool { return value.Type(vt).Default() } +func (t ValueType) Default() bool { + if _, ok := types[value.Type(t)]; ok { + return true + } + return value.Type(t).Default() +} + func (vt ValueType) ValueFrom(i interface{}) *Value { v := value.Type(vt).ValueFrom(i, types) if v == nil { @@ -55,6 +62,10 @@ func (vt ValueType) MustBeValue(i interface{}) *Value { panic("invalid value") } +func (vt ValueType) None() *OptionalValue { + return NewOptionalValue(vt, nil) +} + type Value struct { v value.Value } @@ -74,6 +85,10 @@ func (v *Value) Clone() *Value { return &Value{v: *vv} } +func (v *Value) Some() *OptionalValue { + return OptionalValueFrom(v) +} + func (v *Value) Type() ValueType { if v == nil { return ValueType(value.TypeUnknown) diff --git a/pkg/property/value_test.go b/pkg/property/value_test.go index 5ceb416dd..8df347939 100644 --- a/pkg/property/value_test.go +++ b/pkg/property/value_test.go @@ -8,6 +8,33 @@ import ( "github.com/stretchr/testify/assert" ) +func TestValueType_None(t *testing.T) { + tests := []struct { + name string + tr ValueType + want *OptionalValue + }{ + { + name: "default", + tr: ValueTypeString, + want: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}, + }, + { + name: "unknown", + tr: ValueTypeUnknown, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.tr.None()) + }) + } +} + func TestValue_IsEmpty(t *testing.T) { tests := []struct { name string @@ -72,6 +99,42 @@ func TestValue_Clone(t *testing.T) { } } +func TestValue_Some(t *testing.T) { + tests := []struct { + name string + value *Value + want *OptionalValue + }{ + { + name: "ok", + value: &Value{ + v: *value.TypeString.ValueFrom("foo", types), + }, + want: &OptionalValue{ + ov: *value.OptionalFrom(value.TypeString.ValueFrom("foo", types)), + }, + }, + { + name: "nil", + value: nil, + want: nil, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Some()) + }) + } +} + func TestValue_Value(t *testing.T) { tests := []struct { name string diff --git a/pkg/scene/builder/builder_test.go b/pkg/scene/builder/builder_test.go index b81168969..c84c9d9bd 100644 --- a/pkg/scene/builder/builder_test.go +++ b/pkg/scene/builder/builder_test.go @@ -72,16 +72,14 @@ func TestSceneBuilder(t *testing.T) { Scene(sceneID). Schema(propertySchemaID). Items([]property.Item{ - property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). Fields([]*property.Field{ - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("xxx"))). - Build(), - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField2ID). - ValueUnsafe(property.OptionalValueFrom(property.ValueTypeNumber.ValueFrom(1))). - Build(), + property.NewField(propertySchemaField1ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("xxx"))). + MustBuild(), + property.NewField(propertySchemaField2ID). + Value(property.OptionalValueFrom(property.ValueTypeNumber.ValueFrom(1))). + MustBuild(), }).MustBuild(), }). MustBuild() @@ -99,16 +97,14 @@ func TestSceneBuilder(t *testing.T) { Scene(sceneID). Schema(propertySchemaID). Items([]property.Item{ - property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). Fields([]*property.Field{ - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("yyy"))). - Build(), - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField2ID). - ValueUnsafe(property.OptionalValueFrom(property.ValueTypeNumber.ValueFrom(1))). - Build(), + property.NewField(propertySchemaField1ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("yyy"))). + MustBuild(), + property.NewField(propertySchemaField2ID). + Value(property.OptionalValueFrom(property.ValueTypeNumber.ValueFrom(1))). + MustBuild(), }).MustBuild(), }). MustBuild() @@ -124,16 +120,14 @@ func TestSceneBuilder(t *testing.T) { Scene(sceneID). Schema(propertySchemaID). Items([]property.Item{ - property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). Fields([]*property.Field{ - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("xxx"))). - Build(), - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField3ID). - ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("test"))). - Build(), + property.NewField(propertySchemaField1ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("xxx"))). + MustBuild(), + property.NewField(propertySchemaField3ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("test"))). + MustBuild(), }).MustBuild(), }). MustBuild() @@ -157,20 +151,24 @@ func TestSceneBuilder(t *testing.T) { Scene(sceneID). Schema(propertySchemaID). Items([]property.Item{ - property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). Fields([]*property.Field{ - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(property.NewOptionalValue(property.ValueTypeString, nil)). - LinksUnsafe(property.NewLinks([]*property.Link{ + property.NewField(propertySchemaField1ID). + Value(property.NewOptionalValue(property.ValueTypeString, nil)). + Links(property.NewLinks([]*property.Link{ property.NewLink(ds2id, dss2id, ds2f1), property.NewLink(ds3id, dss3id, ds3f1), })). - Build(), + MustBuild(), }).MustBuild(), }). MustBuild() - layer3ibf1 := layer.NewInfoboxField().NewID().Plugin(pluginID).Extension(pluginExtension1ID).Property(scenePropertyID).MustBuild() + layer3ibf1 := layer.NewInfoboxField(). + NewID(). + Plugin(pluginID). + Extension(pluginExtension1ID). + Property(scenePropertyID). + MustBuild() layer3ib := layer.NewInfobox([]*layer.InfoboxField{ layer3ibf1, }, scenePropertyID) @@ -189,12 +187,11 @@ func TestSceneBuilder(t *testing.T) { Scene(sceneID). Schema(propertySchemaID). Items([]property.Item{ - property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). Fields([]*property.Field{ - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField2ID). - ValueUnsafe(property.OptionalValueFrom(property.ValueTypeNumber.ValueFrom(1))). - Build(), + property.NewField(propertySchemaField2ID). + Value(property.OptionalValueFrom(property.ValueTypeNumber.ValueFrom(1))). + MustBuild(), }).MustBuild(), }). MustBuild() @@ -216,19 +213,17 @@ func TestSceneBuilder(t *testing.T) { Scene(sceneID). Schema(propertySchemaID). Items([]property.Item{ - property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). Fields([]*property.Field{ - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(property.NewOptionalValue(property.ValueTypeString, nil)). - LinksUnsafe(property.NewLinks([]*property.Link{ + property.NewField(propertySchemaField1ID). + Value(property.NewOptionalValue(property.ValueTypeString, nil)). + Links(property.NewLinks([]*property.Link{ property.NewLinkFieldOnly(dss3id, ds3f1), })). - Build(), - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField3ID). - ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("xxx"))). - Build(), + MustBuild(), + property.NewField(propertySchemaField3ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("xxx"))). + MustBuild(), }).MustBuild(), }). MustBuild() @@ -253,15 +248,14 @@ func TestSceneBuilder(t *testing.T) { Scene(sceneID). Schema(propertySchemaID). Items([]property.Item{ - property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). Fields([]*property.Field{ - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(property.NewOptionalValue(property.ValueTypeString, nil)). - LinksUnsafe(property.NewLinks([]*property.Link{ + property.NewField(propertySchemaField1ID). + Value(property.NewOptionalValue(property.ValueTypeString, nil)). + Links(property.NewLinks([]*property.Link{ property.NewLinkFieldOnly(dss1id, ds1f2), })). - Build(), + MustBuild(), }).MustBuild(), }). MustBuild() @@ -278,26 +272,24 @@ func TestSceneBuilder(t *testing.T) { Scene(sceneID). Schema(propertySchemaID). Items([]property.Item{ - property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID). + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID). Fields([]*property.Field{ - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(property.NewOptionalValue(property.ValueTypeString, nil)). - LinksUnsafe(property.NewLinks([]*property.Link{ + property.NewField(propertySchemaField1ID). + Value(property.NewOptionalValue(property.ValueTypeString, nil)). + Links(property.NewLinks([]*property.Link{ property.NewLinkFieldOnly(dss1id, ds1f1), property.NewLinkFieldOnly(dss2id, ds2f1), property.NewLinkFieldOnly(dss3id, ds3f1), })). - Build(), - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField2ID). - ValueUnsafe(property.NewOptionalValue(property.ValueTypeString, nil)). - LinksUnsafe(property.NewLinks([]*property.Link{ + MustBuild(), + property.NewField(propertySchemaField2ID). + Value(property.NewOptionalValue(property.ValueTypeString, nil)). + Links(property.NewLinks([]*property.Link{ property.NewLinkFieldOnly(dss1id, ds1f1), property.NewLinkFieldOnly(dss2id, ds2f1), property.NewLinkFieldOnly(dss3id, ds3f1), })). - Build(), + MustBuild(), }).MustBuild(), }). MustBuild() @@ -315,20 +307,18 @@ func TestSceneBuilder(t *testing.T) { Scene(sceneID). Schema(propertySchemaID). Items([]property.Item{ - property.NewGroupList().NewID().Schema(propertySchemaID, propertySchemaGroup2ID).Groups([]*property.Group{ - property.NewGroup().ID(propertyItemID1).Schema(propertySchemaID, propertySchemaGroup2ID). + property.NewGroupList().NewID().SchemaGroup(propertySchemaGroup2ID).Groups([]*property.Group{ + property.NewGroup().ID(propertyItemID1).SchemaGroup(propertySchemaGroup2ID). Fields([]*property.Field{ - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("XYZ"))). - Build(), + property.NewField(propertySchemaField1ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("XYZ"))). + MustBuild(), }).MustBuild(), - property.NewGroup().ID(propertyItemID2).Schema(propertySchemaID, propertySchemaGroup2ID). + property.NewGroup().ID(propertyItemID2).SchemaGroup(propertySchemaGroup2ID). Fields([]*property.Field{ - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("ZYX"))). - Build(), + property.NewField(propertySchemaField1ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("ZYX"))). + MustBuild(), }).MustBuild(), }).MustBuild(), }). @@ -357,11 +347,10 @@ func TestSceneBuilder(t *testing.T) { Scene(sceneID). Schema(propertySchemaID). Items([]property.Item{ - property.NewGroup().NewID().Schema(propertySchemaID, propertySchemaGroup1ID).Fields([]*property.Field{ - property.NewFieldUnsafe(). - FieldUnsafe(propertySchemaField1ID). - ValueUnsafe(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("hogehoge"))). - Build(), + property.NewGroup().NewID().SchemaGroup(propertySchemaGroup1ID).Fields([]*property.Field{ + property.NewField(propertySchemaField1ID). + Value(property.OptionalValueFrom(property.ValueTypeString.ValueFrom("hogehoge"))). + MustBuild(), }).MustBuild(), }). MustBuild() diff --git a/pkg/scene/plugin.go b/pkg/scene/plugin.go index 3228304b2..e003d2c66 100644 --- a/pkg/scene/plugin.go +++ b/pkg/scene/plugin.go @@ -6,25 +6,32 @@ type Plugin struct { } func NewPlugin(plugin PluginID, property *PropertyID) *Plugin { - if property != nil { - property2 := *property - property = &property2 - } return &Plugin{ plugin: plugin, - property: property, + property: property.CopyRef(), } } -func (s Plugin) Plugin() PluginID { +func (s *Plugin) Plugin() PluginID { + if s == nil { + return PluginID{} + } return s.plugin } -func (s Plugin) Property() *PropertyID { - property := s.property - if property != nil { - property2 := *property - property = &property2 +func (s *Plugin) Property() *PropertyID { + if s == nil { + return nil + } + return s.property.CopyRef() +} + +func (s *Plugin) Clone() *Plugin { + if s == nil { + return nil + } + return &Plugin{ + plugin: s.plugin.Clone(), + property: s.property.CopyRef(), } - return property } diff --git a/pkg/scene/plugin_test.go b/pkg/scene/plugin_test.go index bce10b3c0..afcf49df4 100644 --- a/pkg/scene/plugin_test.go +++ b/pkg/scene/plugin_test.go @@ -9,12 +9,16 @@ import ( func TestPlugin(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pr := NewPropertyID().Ref() + res := NewPlugin(pid, pr) - p := Plugin{ + assert.Equal(t, &Plugin{ plugin: pid, property: pr, - } - assert.Equal(t, &p, res) - assert.Equal(t, pid, p.Plugin()) - assert.Equal(t, pr, p.Property()) + }, res) + assert.Equal(t, pid, res.Plugin()) + assert.Equal(t, pr, res.Property()) + + cl := res.Clone() + assert.Equal(t, res, cl) + assert.NotSame(t, res, cl) } diff --git a/pkg/scene/plugins.go b/pkg/scene/plugins.go index deb440376..a0367934b 100644 --- a/pkg/scene/plugins.go +++ b/pkg/scene/plugins.go @@ -4,12 +4,12 @@ type Plugins struct { plugins []*Plugin } -func NewPlugins(p []*Plugin) *Plugins { - if p == nil { +func NewPlugins(plugins []*Plugin) *Plugins { + if plugins == nil { return &Plugins{plugins: []*Plugin{}} } - p2 := make([]*Plugin, 0, len(p)) - for _, p1 := range p { + p2 := make([]*Plugin, 0, len(plugins)) + for _, p1 := range plugins { if p1 == nil { continue } @@ -21,8 +21,7 @@ func NewPlugins(p []*Plugin) *Plugins { } } if !duplicated { - p3 := *p1 - p2 = append(p2, &p3) + p2 = append(p2, p1) } } return &Plugins{plugins: p2} @@ -64,8 +63,7 @@ func (p *Plugins) Add(sp *Plugin) { if sp == nil || p.Has(sp.plugin) || sp.plugin.Equal(OfficialPluginID) { return } - sp2 := *sp - p.plugins = append(p.plugins, &sp2) + p.plugins = append(p.plugins, sp) } func (p *Plugins) Remove(pid PluginID) { @@ -107,7 +105,7 @@ func (p *Plugins) Properties() []PropertyID { func (p *Plugins) Plugin(pluginID PluginID) *Plugin { for _, pp := range p.plugins { - if pp.plugin == pluginID { + if pp.plugin.Equal(pluginID) { return pp } } diff --git a/pkg/scene/sceneops/plugin_migrator.go b/pkg/scene/sceneops/plugin_migrator.go index 5a0a1ef8e..e49e2d7a3 100644 --- a/pkg/scene/sceneops/plugin_migrator.go +++ b/pkg/scene/sceneops/plugin_migrator.go @@ -236,7 +236,7 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol func collectDatasetIDs(properties []*property.Property) []property.DatasetID { res := []property.DatasetID{} for _, p := range properties { - res = append(res, p.CollectDatasets()...) + res = append(res, p.Datasets()...) } return res } diff --git a/pkg/scene/widget.go b/pkg/scene/widget.go index e52a61b5b..f8d485855 100644 --- a/pkg/scene/widget.go +++ b/pkg/scene/widget.go @@ -75,3 +75,17 @@ func (w *Widget) SetExtended(extended bool) { } w.extended = extended } + +func (w *Widget) Clone() *Widget { + if w == nil { + return nil + } + return &Widget{ + id: w.id, + plugin: w.plugin.Clone(), + extension: w.extension, + property: w.property, + enabled: w.enabled, + extended: w.extended, + } +} diff --git a/pkg/scene/widget_test.go b/pkg/scene/widget_test.go index 48a3f0aaa..9c1d7a281 100644 --- a/pkg/scene/widget_test.go +++ b/pkg/scene/widget_test.go @@ -132,3 +132,11 @@ func TestWidget_SetExtended(t *testing.T) { res.SetExtended(true) assert.True(t, res.Extended()) } + +func TestWidget_Clone(t *testing.T) { + res := MustNewWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", NewPropertyID(), false, false) + res2 := res.Clone() + assert.Equal(t, res, res2) + assert.NotSame(t, res, res2) + assert.Nil(t, (*Widget)(nil).Clone()) +} diff --git a/pkg/scene/widgets.go b/pkg/scene/widgets.go index 499a8e0b1..657320751 100644 --- a/pkg/scene/widgets.go +++ b/pkg/scene/widgets.go @@ -29,8 +29,7 @@ func NewWidgets(w []*Widget) *Widgets { } } if !duplicated { - w3 := *w1 - w2 = append(w2, &w3) + w2 = append(w2, w1) } } return &Widgets{widgets: w2} @@ -71,8 +70,7 @@ func (w *Widgets) Add(sw *Widget) { if w == nil || sw == nil || w.Has(sw.ID()) { return } - sw2 := *sw - w.widgets = append(w.widgets, &sw2) + w.widgets = append(w.widgets, sw) } func (w *Widgets) Remove(wid WidgetID) { diff --git a/pkg/user/members.go b/pkg/user/members.go index e42c310e4..cf2e0f5bc 100644 --- a/pkg/user/members.go +++ b/pkg/user/members.go @@ -123,3 +123,10 @@ func (m *Members) UsersByRole(role Role) []ID { func (m *Members) IsOnlyOwner(u ID) bool { return len(m.UsersByRole(RoleOwner)) == 1 && m.members[u] == RoleOwner } + +func (m *Members) Fixed() bool { + if m == nil { + return false + } + return m.fixed +} diff --git a/pkg/user/members_test.go b/pkg/user/members_test.go index b3cdad465..ffc29f717 100644 --- a/pkg/user/members_test.go +++ b/pkg/user/members_test.go @@ -268,3 +268,36 @@ func TestMembers_UsersByRole(t *testing.T) { }) } } + +func TestMembers_Fixed(t *testing.T) { + tests := []struct { + name string + target *Members + want bool + }{ + { + name: "true", + target: &Members{ + fixed: true, + }, + want: true, + }, + { + name: "empty", + target: &Members{}, + want: false, + }, + { + name: "nil", + want: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Fixed()) + }) + } +} diff --git a/pkg/user/team.go b/pkg/user/team.go index 3015389f1..2a1312bcc 100644 --- a/pkg/user/team.go +++ b/pkg/user/team.go @@ -3,7 +3,7 @@ package user type Team struct { id TeamID name string - members Members + members *Members } func (t *Team) ID() TeamID { @@ -15,13 +15,13 @@ func (t *Team) Name() string { } func (t *Team) Members() *Members { - return &t.members + return t.members } -func (t *Team) Rename(name string) { - t.name = name +func (t *Team) IsPersonal() bool { + return t.members.Fixed() } -func (t *Team) IsPersonal() bool { - return t.members.fixed +func (t *Team) Rename(name string) { + t.name = name } diff --git a/pkg/user/team_builder.go b/pkg/user/team_builder.go index d6be1f428..ebb9e986f 100644 --- a/pkg/user/team_builder.go +++ b/pkg/user/team_builder.go @@ -15,9 +15,9 @@ func (b *TeamBuilder) Build() (*Team, error) { return nil, ErrInvalidID } if b.members == nil { - b.t.members = *NewMembers() + b.t.members = NewMembers() } else { - b.t.members = *NewMembersWith(b.members) + b.t.members = NewMembersWith(b.members) } b.t.members.fixed = b.personal return b.t, nil diff --git a/pkg/user/team_builder_test.go b/pkg/user/team_builder_test.go index d6b693ba5..0c898fa04 100644 --- a/pkg/user/team_builder_test.go +++ b/pkg/user/team_builder_test.go @@ -61,7 +61,7 @@ func TestTeamBuilder_Build(t *testing.T) { Expected: &Team{ id: tid, name: "xxx", - members: Members{ + members: &Members{ members: map[ID]Role{uid: RoleOwner}, fixed: true, }, @@ -75,7 +75,7 @@ func TestTeamBuilder_Build(t *testing.T) { Expected: &Team{ id: tid, name: "xxx", - members: Members{ + members: &Members{ members: map[ID]Role{}, fixed: false, }, @@ -134,7 +134,7 @@ func TestTeamBuilder_MustBuild(t *testing.T) { Expected: &Team{ id: tid, name: "xxx", - members: Members{ + members: &Members{ members: map[ID]Role{uid: RoleOwner}, fixed: true, }, @@ -148,7 +148,7 @@ func TestTeamBuilder_MustBuild(t *testing.T) { Expected: &Team{ id: tid, name: "xxx", - members: Members{ + members: &Members{ members: map[ID]Role{}, fixed: false, }, diff --git a/pkg/user/team_test.go b/pkg/user/team_test.go index aa2bcb79e..3f0ea578e 100644 --- a/pkg/user/team_test.go +++ b/pkg/user/team_test.go @@ -25,13 +25,13 @@ func TestTeam_Members(t *testing.T) { assert.Equal(t, m, tm.Members().Members()) } +func TestTeam_IsPersonal(t *testing.T) { + tm := NewTeam().NewID().Personal(true).MustBuild() + assert.Equal(t, true, tm.IsPersonal()) +} + func TestTeam_Rename(t *testing.T) { tm := NewTeam().NewID().Name("ttt").MustBuild() tm.Rename("ccc") assert.Equal(t, "ccc", tm.Name()) } - -func TestTeam_IsPersonal(t *testing.T) { - tm := NewTeam().NewID().Personal(true).MustBuild() - assert.Equal(t, true, tm.IsPersonal()) -} diff --git a/pkg/value/type.go b/pkg/value/type.go index 963747c4b..6c0ee631f 100644 --- a/pkg/value/type.go +++ b/pkg/value/type.go @@ -30,6 +30,10 @@ func (t Type) Default() bool { return ok } +func (t Type) None() *Optional { + return NewOptional(t, nil) +} + func (t Type) ValueFrom(i interface{}, p TypePropertyMap) *Value { if t == TypeUnknown || i == nil { return nil diff --git a/pkg/value/type_test.go b/pkg/value/type_test.go index 7b32af707..90191df2f 100644 --- a/pkg/value/type_test.go +++ b/pkg/value/type_test.go @@ -50,6 +50,38 @@ func TestType_Default(t *testing.T) { } } +func TestType_None(t *testing.T) { + tests := []struct { + name string + tr Type + want *Optional + }{ + { + name: "default", + tr: TypeString, + want: &Optional{t: TypeString}, + }, + { + name: "custom", + tr: Type("foo"), + want: &Optional{t: Type("foo")}, + }, + { + name: "unknown", + tr: TypeUnknown, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.tr.None()) + }) + } +} + func TestType_ValueFrom(t *testing.T) { tpm := TypePropertyMap{ Type("foo"): &tpmock{}, diff --git a/pkg/value/value.go b/pkg/value/value.go index aac797f1e..ffaa79209 100644 --- a/pkg/value/value.go +++ b/pkg/value/value.go @@ -21,6 +21,10 @@ func (v *Value) Clone() *Value { return v.t.ValueFrom(v.v, v.p) } +func (v *Value) Some() *Optional { + return OptionalFrom(v) +} + func (v *Value) Value() interface{} { if v == nil { return nil diff --git a/pkg/value/value_test.go b/pkg/value/value_test.go index 8365c41d7..ebd7fdd98 100644 --- a/pkg/value/value_test.go +++ b/pkg/value/value_test.go @@ -96,6 +96,68 @@ func TestValue_Clone(t *testing.T) { } } +func TestValue_Some(t *testing.T) { + tp := &tpmock{} + tpm := TypePropertyMap{ + Type("hoge"): tp, + } + + tests := []struct { + name string + value *Value + want *Optional + }{ + { + name: "ok", + value: &Value{ + t: TypeString, + v: "foo", + }, + want: &Optional{ + t: TypeString, + v: &Value{ + t: TypeString, + v: "foo", + }, + }, + }, + { + name: "custom type property", + value: &Value{ + t: Type("hoge"), + v: "fooa", + p: tpm, + }, + want: &Optional{ + t: Type("hoge"), + v: &Value{ + t: Type("hoge"), + v: "fooa", + p: tpm, + }, + }, + }, + { + name: "nil", + value: nil, + want: nil, + }, + { + name: "empty", + value: &Value{}, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.value.Some()) + }) + } +} + func TestValue_Value(t *testing.T) { u, _ := url.Parse("https://reearth.io") From 9a8b025d2a9fa2667fb417c22e29f63e9bf39c25 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 25 Jan 2022 11:56:57 +0900 Subject: [PATCH 137/253] ci: update github workflows --- .github/workflows/{main.yml => build.yml} | 92 +++++++---------------- .github/workflows/{pr.yml => ci.yml} | 14 +++- README.md | 2 +- 3 files changed, 38 insertions(+), 70 deletions(-) rename .github/workflows/{main.yml => build.yml} (63%) rename .github/workflows/{pr.yml => ci.yml} (88%) diff --git a/.github/workflows/main.yml b/.github/workflows/build.yml similarity index 63% rename from .github/workflows/main.yml rename to .github/workflows/build.yml index e0fbff817..b4c211dfb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/build.yml @@ -1,51 +1,26 @@ -name: main +name: build on: - push: - branches: - - main - tags-ignore: - - "*" + workflow_run: + workflows: [ci] + type: completed + branches: [main] jobs: - main: + info: runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.conclusion == 'success' }} outputs: - branch: ${{ steps.info.outputs.branch }} sha_short: ${{ steps.info.outputs.sha_short }} new_tag: ${{ steps.info.outputs.new_tag }} new_tag_short: ${{ steps.info.outputs.new_tag_short }} steps: - - name: set up - uses: actions/setup-go@v2 - with: - go-version: 1.17 - id: go - name: checkout uses: actions/checkout@v2 - - name: cache - uses: actions/cache@v2 - with: - path: ~/go/pkg/mod - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - restore-keys: | - ${{ runner.os }}-go- - - name: golangci-lint - uses: golangci/golangci-lint-action@v2 - with: - version: v1.43 - - name: test - run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic - - name: Send coverage report - uses: codecov/codecov-action@v2 - with: - token: ${{ secrets.CODECOV_TOKEN }} - file: coverage.txt - name: Fetch tags run: git fetch --prune --unshallow --tags - name: Get info id: info # The tag name should be retrieved lazily, as tagging may be delayed. run: | - echo "::set-output name=branch::${GITHUB_REF##*/}" echo "::set-output name=sha_short::$(git rev-parse --short HEAD)" TAG=$(git tag --points-at HEAD) if [[ ! -z "$TAG" ]]; then @@ -56,7 +31,7 @@ jobs: name: Build and release runs-on: ubuntu-latest needs: - - main + - info if: github.event.repository.full_name == 'reearth/reearth-backend' env: NAME: reearth-backend @@ -70,17 +45,17 @@ jobs: with: go-version: 1.17 - name: Run GoReleaser for nightly - if: "!needs.main.outputs.new_tag" + if: "!needs.info.outputs.new_tag" uses: goreleaser/goreleaser-action@v2 with: args: release --rm-dist --snapshot env: GORELEASER_CURRENT_TAG: 0.0.0 - name: Rename artifacts - if: "!needs.main.outputs.new_tag" + if: "!needs.info.outputs.new_tag" run: for f in dist/${NAME}_*.*; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_nightly/'); done - name: Create GitHub release for nightly - if: "!needs.main.outputs.new_tag" + if: "!needs.info.outputs.new_tag" uses: ncipollo/release-action@v1 with: artifacts: dist/${{ env.NAME }}_*.* @@ -91,33 +66,33 @@ jobs: prerelease: true allowUpdates: true - name: Run GoReleaser - if: needs.main.outputs.new_tag + if: needs.info.outputs.new_tag uses: goreleaser/goreleaser-action@v2 with: args: release --rm-dist env: - GORELEASER_CURRENT_TAG: ${{ needs.main.outputs.new_tag }} + GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.new_tag }} - name: Download latest changelog - if: needs.main.outputs.new_tag + if: needs.info.outputs.new_tag uses: dawidd6/action-download-artifact@v2 with: workflow: release.yml - name: changelog-${{ needs.main.outputs.new_tag }} + name: changelog-${{ needs.info.outputs.new_tag }} - name: Create GitHub release - if: needs.main.outputs.new_tag + if: needs.info.outputs.new_tag uses: ncipollo/release-action@v1 with: artifacts: dist/${{ env.NAME }}_*.* commit: ${{ github.sha }} - name: ${{ needs.main.outputs.new_tag }} - tag: ${{ needs.main.outputs.new_tag }} + name: ${{ needs.info.outputs.new_tag }} + tag: ${{ needs.info.outputs.new_tag }} bodyFile: CHANGELOG_latest.md docker: name: Build and push Docker image runs-on: ubuntu-latest if: github.event.repository.full_name == 'reearth/reearth-backend' needs: - - main + - info env: IMAGE_NAME: reearth/reearth-backend steps: @@ -133,22 +108,22 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push nightly - if: "!needs.main.outputs.new_tag" + if: "!needs.info.outputs.new_tag" id: docker_build uses: docker/build-push-action@v2 with: context: . platforms: linux/amd64,linux/arm64 push: true - build-args: VERSION=0.0.0-SNAPSHOT-${{ needs.main.outputs.sha_short }} + build-args: VERSION=0.0.0-SNAPSHOT-${{ needs.info.outputs.sha_short }} tags: ${{ env.IMAGE_NAME }}:nightly - cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:nightly - cache-to: type=inline + cache-from: type=gha + cache-to: type=gha,mode=max - name: Get Docker tags id: tags - if: needs.main.outputs.new_tag + if: needs.info.outputs.new_tag env: - TAG: ${{ needs.main.outputs.new_tag_short }} + TAG: ${{ needs.info.outputs.new_tag_short }} run: | TAGS=$IMAGE_NAME:$TAG if [[ ! $TAG =~ '-' ]]; then @@ -158,26 +133,13 @@ jobs: fi echo "::set-output name=tags::$TAGS" - name: Build and push release - if: needs.main.outputs.new_tag + if: needs.info.outputs.new_tag uses: docker/build-push-action@v2 with: context: . platforms: linux/amd64,linux/arm64 push: true - build-args: VERSION=${{ needs.main.outputs.new_tag_short }} + build-args: VERSION=${{ needs.info.outputs.new_tag_short }} tags: ${{ steps.tags.outputs.tags }} cache-from: type=registry,ref=${IMAGE_NAME}:latest cache-to: type=inline - slack-notification: - if: github.event.workflow_run.conclusion == 'success' && always() - name: Slack Notification - needs: - - main - runs-on: ubuntu-latest - steps: - - name: Slack Notification - uses: Gamesight/slack-workflow-status@master - if: always() - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/pr.yml b/.github/workflows/ci.yml similarity index 88% rename from .github/workflows/pr.yml rename to .github/workflows/ci.yml index 641cb42cc..628c8d073 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/ci.yml @@ -1,8 +1,12 @@ -name: pr -on: [pull_request] +name: CI +on: + push: + branches: [main] + tags-ignore: ["*"] + pull_request: jobs: - pr: - name: pr + ci: + name: CI runs-on: ubuntu-latest steps: - name: set up @@ -23,6 +27,8 @@ jobs: uses: golangci/golangci-lint-action@v2 with: version: v1.43 + args: --timeout=10m + skip-go-installation: true - name: test run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic - name: Send coverage report diff --git a/README.md b/README.md index 79ea7382d..a1fd9e582 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # reearth-backend -[![main](https://github.com/reearth/reearth-backend/actions/workflows/main.yml/badge.svg)](https://github.com/reearth/reearth-backend/actions/workflows/main.yml) [![codecov](https://codecov.io/gh/reearth/reearth-backend/branch/main/graph/badge.svg?token=4UV79645UP)](https://codecov.io/gh/reearth/reearth-backend) [![Go Report Card](https://goreportcard.com/badge/github.com/reearth/reearth-backend)](https://goreportcard.com/report/github.com/reearth/reearth-backend) [![Go Reference](https://pkg.go.dev/badge/github.com/reearth/reearth-backend.svg)](https://pkg.go.dev/github.com/reearth/reearth-backend) +[![CI](https://github.com/reearth/reearth-backend/actions/workflows/ci.yml/badge.svg)](https://github.com/reearth/reearth-backend/actions/workflows/main.yml) [![codecov](https://codecov.io/gh/reearth/reearth-backend/branch/main/graph/badge.svg?token=4UV79645UP)](https://codecov.io/gh/reearth/reearth-backend) [![Go Report Card](https://goreportcard.com/badge/github.com/reearth/reearth-backend)](https://goreportcard.com/report/github.com/reearth/reearth-backend) [![Go Reference](https://pkg.go.dev/badge/github.com/reearth/reearth-backend.svg)](https://pkg.go.dev/github.com/reearth/reearth-backend) This is the back-end repository of [Re:Earth](https://github.com/reearth/reearth). From b158e13715c65f70a0c4e1a5afc42ec0acbf3bd4 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 25 Jan 2022 11:58:38 +0900 Subject: [PATCH 138/253] ci: fix github workflows --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 628c8d073..f2c6f9554 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -40,7 +40,7 @@ jobs: if: github.event.repository.full_name == 'reearth/reearth-backend' && always() name: Slack Notification needs: - - pr + - ci runs-on: ubuntu-latest steps: - name: Slack Notification From 98ac44bebebbc72385c223e3d8594a3d5ab9fc5e Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 25 Jan 2022 12:02:00 +0900 Subject: [PATCH 139/253] ci: fix github workflows [ci skip] --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b4c211dfb..556b5562b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,7 +2,7 @@ name: build on: workflow_run: workflows: [ci] - type: completed + type: [completed] branches: [main] jobs: info: From 01662550471c5ae1812e80131c332d43c39e7a96 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 25 Jan 2022 12:17:25 +0900 Subject: [PATCH 140/253] ci: fix github workflows [ci skip] --- .github/workflows/build.yml | 11 ++++++++--- .github/workflows/deploy_test.yml | 9 ++++----- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 556b5562b..e9e93a4d5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -6,8 +6,9 @@ on: branches: [main] jobs: info: + name: Collect information runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.conclusion == 'success' }} + if: github.event.workflow_run.conclusion == 'success' && github.event.repository.full_name == 'reearth/reearth-backend' outputs: sha_short: ${{ steps.info.outputs.sha_short }} new_tag: ${{ steps.info.outputs.new_tag }} @@ -27,12 +28,17 @@ jobs: echo "::set-output name=new_tag::$TAG" echo "::set-output name=new_tag_short::${TAG#v}" fi + - name: Show info + env: + SHA_SHORT: ${{ steps.info.outputs.sha_short }} + NEW_TAG: ${{ steps.info.outputs.new_tag }} + NEW_TAG_SHORT: ${{ steps.info.outputs.new_tag_short }} + run: echo "sha_short=$SHA_SHORT, new_tag=$NEW_TAG, new_tag_short=$NEW_TAG_SHORT" build: name: Build and release runs-on: ubuntu-latest needs: - info - if: github.event.repository.full_name == 'reearth/reearth-backend' env: NAME: reearth-backend steps: @@ -90,7 +96,6 @@ jobs: docker: name: Build and push Docker image runs-on: ubuntu-latest - if: github.event.repository.full_name == 'reearth/reearth-backend' needs: - info env: diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml index c79401d5f..afa1580e7 100644 --- a/.github/workflows/deploy_test.yml +++ b/.github/workflows/deploy_test.yml @@ -1,17 +1,16 @@ name: deploy_test on: workflow_run: - workflows: - - main - types: - - completed + workflows: [build] + types: [completed] + branches: [main] env: IMAGE: reearth/reearth-backend:nightly IMAGE_GCP: us.gcr.io/reearth-oss/reearth-backend:nightly GCP_REGION: us-central1 jobs: deploy_test: - name: deploy_test + name: Deploy app to test env runs-on: ubuntu-latest if: github.event.workflow_run.conclusion == 'success' && github.event.repository.full_name == 'reearth/reearth-backend' steps: From 3a34d02ab728baec35ea539dc179baa4bcd9ec93 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 27 Jan 2022 19:23:36 +0900 Subject: [PATCH 141/253] ci: update github release workflow [ci skip] --- .github/workflows/release.yml | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4bfc791fd..ff5bc7254 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -28,13 +28,7 @@ jobs: custom_tag: ${{ github.event.inputs.custom_tag }} dry_run: true - name: Prepare git-cliff - id: cliff_pre - run: | - touch CHANGELOG.md CHANGELOG_latest.md - PREV_TAG=$(git describe --abbrev=0 --tags $(git rev-list --tags --max-count=1) 2> /dev/null) - if [[ $PREV_TAG == v*.*.* ]]; then - echo "::set-output name=latest::--latest" - fi + run: touch CHANGELOG.md - name: Generate changelog uses: orhun/git-cliff-action@v1 env: @@ -42,21 +36,27 @@ jobs: with: config: .github/cliff.toml args: --verbose --tag ${{ steps.tag.outputs.new_tag }} - - name: Generate latest changelog - uses: orhun/git-cliff-action@v1 - id: changelog - env: - OUTPUT: CHANGELOG_latest.md - with: - config: .github/cliff.toml - args: --verbose --strip all --tag ${{ steps.tag.outputs.new_tag }} ${{ steps.cliff_pre.outputs.latest }} - name: Format changelogs env: URL: ${{ github.event.repository.html_url }} run: | URL=${URL//\//\\\/} - sed -i -E 's///g; s/\(#([0-9]+)\)/([#\1]('"$URL"'\/pull\/\1))/g; s/`([a-zA-Z0-9]+)`/[`\1`]('"$URL"'\/commit\/\1)/g' CHANGELOG*.md - sed -i '/^## .*$/d; 1d; 2d' CHANGELOG_latest.md + sed -i -E 's///g; s/\(#([0-9]+)\)/([#\1]('"$URL"'\/pull\/\1))/g; s/`([a-zA-Z0-9]+)`/[`\1`]('"$URL"'\/commit\/\1)/g' CHANGELOG.md + - name: Generate CHANGELOG_latest.md + uses: actions/github-script@v5 + with: + script: | + const fs = require("fs"); + const changelog = fs.readFileSync("CHANGELOG.md", "utf8"); + const lines = changelog.split("\n"); + const h = lines + .map((l, i) => [l, i]) + .filter(l => l[0].startsWith("## ")) + .map(l => l[1]) + .slice(0, 2); + if (!h.length) throw new Error("failed to get the changelog of the latest version"); + const m = lines.slice(h[0] + 1, h[1]).join("\n").trim(); + fs.writeFileSync("CHANGELOG_latest.md", m); - name: Upload latest changelog uses: actions/upload-artifact@v2 with: From b398db39f6e88ffb724dd063abe64806e9224612 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 27 Jan 2022 10:24:30 +0000 Subject: [PATCH 142/253] v0.4.0 --- CHANGELOG.md | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6aea2a5f0..02a442bf9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,24 @@ # Changelog All notable changes to this project will be documented in this file. -## 0.3.0 - 2021-12-23 +## 0.4.0 - 2022-01-27 + +### ๐Ÿš€ Features + +- Add "clamp to ground" option to file primitive ([#95](https://github.com/reearth/reearth-backend/pull/95)) [`559194`](https://github.com/reearth/reearth-backend/commit/559194) +- Infobox and text block padding ([#100](https://github.com/reearth/reearth-backend/pull/100)) [`ddd0db`](https://github.com/reearth/reearth-backend/commit/ddd0db) + +### โšก๏ธ Performance + +- Add indexes of mongo collections ([#98](https://github.com/reearth/reearth-backend/pull/98)) [`691cb7`](https://github.com/reearth/reearth-backend/commit/691cb7) + +### โœจ Refactor + +- Pkg/id, use ID aliases, move JSON schemas ([#97](https://github.com/reearth/reearth-backend/pull/97)) [`1265ac`](https://github.com/reearth/reearth-backend/commit/1265ac) +- Unit tests ([#99](https://github.com/reearth/reearth-backend/pull/99)) [`0d112c`](https://github.com/reearth/reearth-backend/commit/0d112c) +- Pkg/property, pkg/layer, pkg/plugin ([#101](https://github.com/reearth/reearth-backend/pull/101)) [`17a463`](https://github.com/reearth/reearth-backend/commit/17a463) + +## 0.3.0 - 2022-01-11 ### ๐Ÿš€ Features From 805d788f7b00c5eac9df341013533e202d3561c7 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 1 Feb 2022 22:03:27 +0900 Subject: [PATCH 143/253] fix: scene exporter should export layers and tags while maintaining the tree structure (#104) * export layers and tags while maintaining the tree structure * fix * export invislbe layers --- internal/infrastructure/memory/tag.go | 18 +- internal/infrastructure/memory/tag_test.go | 20 +- internal/infrastructure/mongo/mongodoc/tag.go | 2 +- .../infrastructure/mongo/mongodoc/tag_test.go | 8 +- internal/infrastructure/mongo/tag.go | 7 + internal/usecase/interactor/project.go | 4 + internal/usecase/interactor/tag.go | 2 +- internal/usecase/repo/tag.go | 23 + pkg/layer/group.go | 3 + pkg/layer/id.go | 2 + pkg/layer/merged.go | 59 ++- pkg/layer/merged_test.go | 21 + pkg/layer/merging/merged.go | 36 +- pkg/layer/merging/merger.go | 2 +- pkg/layer/merging/merger_test.go | 14 +- pkg/layer/merging/sealed.go | 31 ++ pkg/layer/merging/sealer.go | 54 +- pkg/scene/builder/builder.go | 9 +- pkg/scene/builder/builder_test.go | 478 ++++++++++++------ pkg/scene/builder/encoder.go | 76 ++- pkg/scene/builder/encoder_test.go | 79 ++- pkg/scene/builder/scene.go | 51 +- pkg/tag/group.go | 7 +- pkg/tag/group_builder.go | 11 +- pkg/tag/group_test.go | 6 +- pkg/tag/item.go | 12 + pkg/tag/item_builder.go | 8 + pkg/tag/list.go | 116 ++++- pkg/tag/list_test.go | 91 +++- pkg/tag/loader.go | 46 ++ pkg/tag/map.go | 48 ++ 31 files changed, 1061 insertions(+), 283 deletions(-) create mode 100644 pkg/tag/loader.go create mode 100644 pkg/tag/map.go diff --git a/internal/infrastructure/memory/tag.go b/internal/infrastructure/memory/tag.go index 2cbba397d..d3059a029 100644 --- a/internal/infrastructure/memory/tag.go +++ b/internal/infrastructure/memory/tag.go @@ -13,7 +13,7 @@ import ( type Tag struct { lock sync.Mutex - data map[id.TagID]tag.Tag + data tag.Map } func NewTag() repo.Tag { @@ -50,6 +50,13 @@ func (t *Tag) FindByIDs(ctx context.Context, tids []id.TagID, ids []id.SceneID) return res, nil } +func (t *Tag) FindByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Tag, error) { + t.lock.Lock() + defer t.lock.Unlock() + + return t.data.All().FilterByScene(sceneID).Refs(), nil +} + func (t *Tag) FindItemByID(ctx context.Context, tagID id.TagID, ids []id.SceneID) (*tag.Item, error) { t.lock.Lock() defer t.lock.Unlock() @@ -116,14 +123,7 @@ func (t *Tag) FindRootsByScene(ctx context.Context, sceneID id.SceneID) ([]*tag. t.lock.Lock() defer t.lock.Unlock() - var res []*tag.Tag - for _, tag := range t.data { - tag := tag - if tag.Scene() == sceneID { - res = append(res, &tag) - } - } - return res, nil + return t.data.All().FilterByScene(sceneID).Roots().Refs(), nil } func (t *Tag) Save(ctx context.Context, tag tag.Tag) error { diff --git a/internal/infrastructure/memory/tag_test.go b/internal/infrastructure/memory/tag_test.go index c6d220bc3..155c34727 100644 --- a/internal/infrastructure/memory/tag_test.go +++ b/internal/infrastructure/memory/tag_test.go @@ -4,11 +4,9 @@ import ( "context" "testing" - "github.com/reearth/reearth-backend/pkg/rerror" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/tag" - "github.com/stretchr/testify/assert" ) @@ -41,7 +39,7 @@ func TestTag_FindByIDs(t *testing.T) { sid2 := id.NewSceneID() sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + tl := tag.IDListFrom([]id.TagID{t1.ID()}) t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() t3, _ := tag.NewItem().NewID().Scene(sid2).Label("item2").Build() tti := tag.Tag(t1) @@ -64,7 +62,7 @@ func TestTag_FindRootsByScene(t *testing.T) { sid := id.NewSceneID() sid2 := id.NewSceneID() t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + tl := tag.IDListFrom([]id.TagID{t1.ID()}) t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() t3, _ := tag.NewItem().NewID().Scene(sid2).Label("item2").Build() tti := tag.Tag(t1) @@ -87,7 +85,7 @@ func TestTag_FindGroupByID(t *testing.T) { sid := id.NewSceneID() sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + tl := tag.IDListFrom([]id.TagID{t1.ID()}) t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() tti := tag.Tag(t1) ttg := tag.Tag(t2) @@ -110,7 +108,7 @@ func TestTag_FindItemByID(t *testing.T) { sid := id.NewSceneID() sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + tl := tag.IDListFrom([]id.TagID{t1.ID()}) t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() tti := tag.Tag(t1) ttg := tag.Tag(t2) @@ -208,7 +206,7 @@ func TestTag_Remove(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + tl := tag.IDListFrom([]id.TagID{t1.ID()}) t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() tti := tag.Tag(t1) ttg := tag.Tag(t2) @@ -228,7 +226,7 @@ func TestTag_RemoveAll(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + tl := tag.IDListFrom([]id.TagID{t1.ID()}) t2, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() t3, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() tti := tag.Tag(t1) @@ -252,7 +250,7 @@ func TestTag_RemoveByScene(t *testing.T) { sid := id.NewSceneID() sid2 := id.NewSceneID() t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + tl := tag.IDListFrom([]id.TagID{t1.ID()}) t2, _ := tag.NewItem().NewID().Scene(sid2).Label("item").Build() t3, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() tti := tag.Tag(t1) @@ -276,7 +274,7 @@ func TestTag_FindGroupByItem(t *testing.T) { sid := id.NewSceneID() sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.NewListFromTags([]id.TagID{t1.ID()}) + tl := tag.IDListFrom([]id.TagID{t1.ID()}) t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() tti := tag.Tag(t1) ttg := tag.Tag(t2) diff --git a/internal/infrastructure/mongo/mongodoc/tag.go b/internal/infrastructure/mongo/mongodoc/tag.go index 3c270efb5..68c394b64 100644 --- a/internal/infrastructure/mongo/mongodoc/tag.go +++ b/internal/infrastructure/mongo/mongodoc/tag.go @@ -177,6 +177,6 @@ func (d *TagDocument) ModelGroup() (*tag.Group, error) { ID(tid). Label(d.Label). Scene(sid). - Tags(tag.NewListFromTags(ids)). + Tags(tag.IDListFrom(ids)). Build() } diff --git a/internal/infrastructure/mongo/mongodoc/tag_test.go b/internal/infrastructure/mongo/mongodoc/tag_test.go index b4cd65614..c9934ef6b 100644 --- a/internal/infrastructure/mongo/mongodoc/tag_test.go +++ b/internal/infrastructure/mongo/mongodoc/tag_test.go @@ -25,7 +25,7 @@ func TestNewTag(t *testing.T) { tg, _ := tag.NewGroup(). NewID(). Label("group"). - Tags(tag.NewListFromTags([]id.TagID{ti.ID()})). + Tags(tag.IDListFrom([]id.TagID{ti.ID()})). Scene(sid). Build() type args struct { @@ -93,7 +93,7 @@ func TestNewTags(t *testing.T) { tg, _ := tag.NewGroup(). NewID(). Label("group"). - Tags(tag.NewListFromTags([]id.TagID{ti.ID()})). + Tags(tag.IDListFrom([]id.TagID{ti.ID()})). Scene(sid). Build() tgi := tag.Tag(tg) @@ -252,7 +252,7 @@ func TestTagDocument_Model(t *testing.T) { tg, _ := tag.NewGroup(). NewID(). Label("group"). - Tags(tag.NewListFromTags([]id.TagID{ti.ID()})). + Tags(tag.IDListFrom([]id.TagID{ti.ID()})). Scene(sid). Build() type fields struct { @@ -341,7 +341,7 @@ func TestTagDocument_ModelGroup(t *testing.T) { tg, _ := tag.NewGroup(). NewID(). Label("group"). - Tags(tag.NewListFromTags([]id.TagID{ti.ID()})). + Tags(tag.IDListFrom([]id.TagID{ti.ID()})). Scene(sid). Build() type fields struct { diff --git a/internal/infrastructure/mongo/tag.go b/internal/infrastructure/mongo/tag.go index 5aed97d8e..ab82b26c5 100644 --- a/internal/infrastructure/mongo/tag.go +++ b/internal/infrastructure/mongo/tag.go @@ -51,6 +51,13 @@ func (r *tagRepo) FindByIDs(ctx context.Context, ids []id.TagID, f []id.SceneID) return filterTags(ids, res), nil } +func (r *tagRepo) FindByScene(ctx context.Context, id id.SceneID) ([]*tag.Tag, error) { + filter := bson.M{ + "scene": id.String(), + } + return r.find(ctx, nil, filter) +} + func (r *tagRepo) FindItemByID(ctx context.Context, id id.TagID, f []id.SceneID) (*tag.Item, error) { filter := r.sceneFilter(bson.D{ {Key: "id", Value: id.String()}, diff --git a/internal/usecase/interactor/project.go b/internal/usecase/interactor/project.go index 889ad4a37..0bc54e742 100644 --- a/internal/usecase/interactor/project.go +++ b/internal/usecase/interactor/project.go @@ -29,6 +29,7 @@ type Project struct { layerRepo repo.Layer datasetRepo repo.Dataset datasetSchemaRepo repo.DatasetSchema + tagRepo repo.Tag transaction repo.Transaction file gateway.File } @@ -46,6 +47,7 @@ func NewProject(r *repo.Container, gr *gateway.Container) interfaces.Project { layerRepo: r.Layer, datasetRepo: r.Dataset, datasetSchemaRepo: r.DatasetSchema, + tagRepo: r.Tag, transaction: r.Transaction, file: gr.File, } @@ -315,6 +317,8 @@ func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectP repo.LayerLoaderFrom(i.layerRepo, scenes), repo.PropertyLoaderFrom(i.propertyRepo, scenes), repo.DatasetGraphLoaderFrom(i.datasetRepo, scenes), + repo.TagLoaderFrom(i.tagRepo, scenes), + repo.TagSceneLoaderFrom(i.tagRepo, scenes), ).BuildScene(ctx, w, s, time.Now()) }() diff --git a/internal/usecase/interactor/tag.go b/internal/usecase/interactor/tag.go index 5beb030bb..52678dee2 100644 --- a/internal/usecase/interactor/tag.go +++ b/internal/usecase/interactor/tag.go @@ -103,7 +103,7 @@ func (i *Tag) CreateGroup(ctx context.Context, inp interfaces.CreateTagGroupPara return nil, interfaces.ErrOperationDenied } - list := tag.NewListFromTags(inp.Tags) + list := tag.IDListFrom(inp.Tags) group, err := tag.NewGroup(). NewID(). Label(inp.Label). diff --git a/internal/usecase/repo/tag.go b/internal/usecase/repo/tag.go index 456642f7c..b3c8bcda1 100644 --- a/internal/usecase/repo/tag.go +++ b/internal/usecase/repo/tag.go @@ -10,6 +10,7 @@ import ( type Tag interface { FindByID(context.Context, id.TagID, []id.SceneID) (tag.Tag, error) FindByIDs(context.Context, []id.TagID, []id.SceneID) ([]*tag.Tag, error) + FindByScene(context.Context, id.SceneID) ([]*tag.Tag, error) FindItemByID(context.Context, id.TagID, []id.SceneID) (*tag.Item, error) FindItemByIDs(context.Context, []id.TagID, []id.SceneID) ([]*tag.Item, error) FindGroupByID(context.Context, id.TagID, []id.SceneID) (*tag.Group, error) @@ -22,3 +23,25 @@ type Tag interface { RemoveAll(context.Context, []id.TagID) error RemoveByScene(context.Context, id.SceneID) error } + +func TagLoaderFrom(r Tag, scenes []id.SceneID) tag.Loader { + return func(ctx context.Context, ids ...id.TagID) ([]*tag.Tag, error) { + return r.FindByIDs(ctx, ids, scenes) + } +} + +func TagSceneLoaderFrom(r Tag, scenes []id.SceneID) tag.SceneLoader { + return func(ctx context.Context, id id.SceneID) ([]*tag.Tag, error) { + found := false + for _, s := range scenes { + if id == s { + found = true + break + } + } + if !found { + return nil, nil + } + return r.FindByScene(ctx, id) + } +} diff --git a/pkg/layer/group.go b/pkg/layer/group.go index 61a0d8877..8a7ed345f 100644 --- a/pkg/layer/group.go +++ b/pkg/layer/group.go @@ -180,6 +180,9 @@ func (l *Group) ValidateProperties(pm property.Map) error { } func (l *Group) Tags() *TagList { + if l == nil { + return nil + } if l.layerBase.tags == nil { l.layerBase.tags = NewTagList(nil) } diff --git a/pkg/layer/id.go b/pkg/layer/id.go index b34b0271d..eb9b6dad1 100644 --- a/pkg/layer/id.go +++ b/pkg/layer/id.go @@ -60,10 +60,12 @@ var DatasetSchemaIDFromRefID = id.DatasetSchemaIDFromRefID type IDSet = id.LayerIDSet type InfoboxFIeldIDSet = id.InfoboxFieldIDSet type DatasetIDSet = id.DatasetIDSet +type TagIDSet = id.TagIDSet var NewIDSet = id.NewLayerIDSet var NewInfoboxFIeldIDSet = id.NewInfoboxFieldIDSet var NewDatasetIDSet = id.NewDatasetIDSet +var NewTagIDSet = id.NewTagIDSet var OfficialPluginID = id.OfficialPluginID var ErrInvalidID = id.ErrInvalidID diff --git a/pkg/layer/merged.go b/pkg/layer/merged.go index dfbeb2e8f..ed7c98d2b 100644 --- a/pkg/layer/merged.go +++ b/pkg/layer/merged.go @@ -14,6 +14,14 @@ type Merged struct { Infobox *MergedInfobox PluginID *PluginID ExtensionID *PluginExtensionID + IsVisible bool + Tags []MergedTag +} + +// MergedTag represents a merged tag from two layers +type MergedTag struct { + ID TagID + Tags []MergedTag } // MergedInfobox represents a merged info box from two layers @@ -48,8 +56,37 @@ func Merge(o Layer, p *Group) *Merged { Parent: p.Property(), LinkedDataset: ToLayerItem(o).LinkedDataset(), }, - Infobox: MergeInfobox(o.Infobox(), p.Infobox(), ToLayerItem(o).LinkedDataset()), + IsVisible: o.IsVisible(), + Tags: MergeTags(o.Tags(), p.Tags()), + Infobox: MergeInfobox(o.Infobox(), p.Infobox(), ToLayerItem(o).LinkedDataset()), + } +} + +// MergeInfobox merges two tag lists +func MergeTags(o, _p *TagList) []MergedTag { + // Currently parent tags are ignored + tags := o.Tags() + if len(tags) == 0 { + return nil + } + res := make([]MergedTag, 0, len(tags)) + for _, t := range tags { + tags := TagGroupFrom(t).Children() + + var tags2 []MergedTag + if len(tags) > 0 { + tags2 = make([]MergedTag, 0, len(tags)) + for _, t := range tags { + tags2 = append(tags2, MergedTag{ID: t.ID()}) + } + } + + res = append(res, MergedTag{ + ID: t.ID(), + Tags: tags2, + }) } + return res } // MergeInfobox merges two infoboxes @@ -149,3 +186,23 @@ func (m *Merged) Properties() []PropertyID { } return result } + +func (m *Merged) AllTags() (res []MergedTag) { + if m == nil { + return nil + } + for _, t := range m.Tags { + res = append(res, append([]MergedTag{t}, t.Tags...)...) + } + return res +} + +func (m *Merged) AllTagIDs() (res []TagID) { + if m == nil { + return nil + } + for _, t := range m.AllTags() { + res = append(res, t.ID) + } + return res +} diff --git a/pkg/layer/merged_test.go b/pkg/layer/merged_test.go index cdd2d89ac..e10ba5856 100644 --- a/pkg/layer/merged_test.go +++ b/pkg/layer/merged_test.go @@ -13,6 +13,9 @@ func TestMerge(t *testing.T) { p := MustPluginID("xxx~1.1.1") e := PluginExtensionID("foo") + t1 := NewTagID() + t2 := NewTagID() + t3 := NewTagID() itemProperty := NewPropertyID() groupProperty := NewPropertyID() ib1pr := NewPropertyID() @@ -32,6 +35,7 @@ func TestMerge(t *testing.T) { Plugin(&p). Extension(&e). Property(&itemProperty). + IsVisible(false). MustBuild() // no-infobox itemLayer2 := NewItem(). @@ -41,6 +45,7 @@ func TestMerge(t *testing.T) { Extension(&e). Property(&itemProperty). LinkedDataset(&dataset1). + Tags(NewTagList([]Tag{NewTagGroup(t1, []*TagItem{NewTagItem(t2)}), NewTagItem(t3)})). MustBuild() // infobox itemLayer3 := NewItem(). @@ -69,6 +74,7 @@ func TestMerge(t *testing.T) { Plugin(&p). Extension(&e). Property(&groupProperty). + Tags(NewTagList([]Tag{NewTagGroup(t1, []*TagItem{NewTagItem(t2)}), NewTagItem(t3)})). MustBuild() // infobox groupLayer2 := NewGroup(). @@ -108,6 +114,7 @@ func TestMerge(t *testing.T) { Scene: scene, PluginID: &p, ExtensionID: &e, + IsVisible: false, Property: &property.MergedMetadata{ Original: &itemProperty, Parent: nil, @@ -125,6 +132,7 @@ func TestMerge(t *testing.T) { Scene: scene, PluginID: &p, ExtensionID: &e, + IsVisible: true, Property: &property.MergedMetadata{ Original: &itemProperty, Parent: nil, @@ -171,6 +179,11 @@ func TestMerge(t *testing.T) { Scene: scene, PluginID: &p, ExtensionID: &e, + IsVisible: true, + Tags: []MergedTag{ + {ID: t1, Tags: []MergedTag{{ID: t2}}}, + {ID: t3}, + }, Property: &property.MergedMetadata{ Original: &itemProperty, Parent: &groupProperty, @@ -188,6 +201,7 @@ func TestMerge(t *testing.T) { Scene: scene, PluginID: &p, ExtensionID: &e, + IsVisible: true, Property: &property.MergedMetadata{ Original: &itemProperty, Parent: &groupProperty, @@ -234,6 +248,11 @@ func TestMerge(t *testing.T) { Scene: scene, PluginID: &p, ExtensionID: &e, + IsVisible: true, + Tags: []MergedTag{ + {ID: t1, Tags: []MergedTag{{ID: t2}}}, + {ID: t3}, + }, Property: &property.MergedMetadata{ Original: &itemProperty, Parent: &groupProperty, @@ -280,6 +299,7 @@ func TestMerge(t *testing.T) { Scene: scene, PluginID: &p, ExtensionID: &e, + IsVisible: true, Property: &property.MergedMetadata{ Original: &itemProperty, Parent: &groupProperty, @@ -326,6 +346,7 @@ func TestMerge(t *testing.T) { Scene: scene, PluginID: &p, ExtensionID: &e, + IsVisible: true, Property: &property.MergedMetadata{ Original: &itemProperty, Parent: &groupProperty, diff --git a/pkg/layer/merging/merged.go b/pkg/layer/merging/merged.go index 3111b1723..2ba6e0a65 100644 --- a/pkg/layer/merging/merged.go +++ b/pkg/layer/merging/merged.go @@ -13,6 +13,7 @@ var ( type MergedLayer interface { Common() *MergedLayerCommon AllDatasets() []layer.DatasetID + AllTags() []layer.TagID } type MergedLayerGroup struct { @@ -59,6 +60,10 @@ func (l *MergedLayerCommon) Datasets() []layer.DatasetID { return l.datasetIDSet().All() } +func (l *MergedLayerCommon) Tags() []layer.TagID { + return l.tagIDSet().All() +} + func (l *MergedLayerCommon) datasetIDSet() *layer.DatasetIDSet { if l == nil { return nil @@ -72,6 +77,15 @@ func (l *MergedLayerCommon) datasetIDSet() *layer.DatasetIDSet { return res } +func (l *MergedLayerCommon) tagIDSet() *layer.TagIDSet { + if l == nil { + return nil + } + res := layer.NewTagIDSet() + res.Add(l.Merged.AllTagIDs()...) + return res +} + func (l *MergedLayerItem) AllDatasets() []layer.DatasetID { if l == nil { return nil @@ -79,11 +93,14 @@ func (l *MergedLayerItem) AllDatasets() []layer.DatasetID { return l.Datasets() } -func (l *MergedLayerGroup) AllDatasets() []layer.DatasetID { - return l.allDatasetIDSet().All() +func (l *MergedLayerItem) AllTags() []layer.TagID { + if l == nil { + return nil + } + return l.Tags() } -func (l *MergedLayerGroup) allDatasetIDSet() *layer.DatasetIDSet { +func (l *MergedLayerGroup) AllDatasets() []layer.DatasetID { if l == nil { return nil } @@ -91,5 +108,16 @@ func (l *MergedLayerGroup) allDatasetIDSet() *layer.DatasetIDSet { for _, l := range l.Children { d.Add(l.AllDatasets()...) } - return d + return d.All() +} + +func (l *MergedLayerGroup) AllTags() []layer.TagID { + if l == nil { + return nil + } + d := l.tagIDSet() + for _, l := range l.Children { + d.Add(l.AllTags()...) + } + return d.All() } diff --git a/pkg/layer/merging/merger.go b/pkg/layer/merging/merger.go index 41a9a0bdf..7c1153bfe 100644 --- a/pkg/layer/merging/merger.go +++ b/pkg/layer/merging/merger.go @@ -37,7 +37,7 @@ func (m *Merger) MergeLayer(ctx context.Context, l layer.Layer, parent *layer.Gr children := make([]MergedLayer, 0, len(layers)) for _, c := range layers { - if c == nil || !(*c).IsVisible() { + if c == nil { continue } ml, err := m.MergeLayer(ctx, *c, lg) diff --git a/pkg/layer/merging/merger_test.go b/pkg/layer/merging/merger_test.go index 52c81b0b7..573e72fab 100644 --- a/pkg/layer/merging/merger_test.go +++ b/pkg/layer/merging/merger_test.go @@ -42,6 +42,7 @@ func TestMergeLayer(t *testing.T) { Property(&itemProperty). LinkedDataset(&dataset1). Infobox(layer.NewInfobox(nil, ib1pr)). + IsVisible(false). MustBuild(), layer.NewGroup(). ID(l2). @@ -51,6 +52,7 @@ func TestMergeLayer(t *testing.T) { layer.NewInfoboxField().ID(l1if1).Plugin(p).Extension(e).Property(fpr).MustBuild(), }, ib2pr)). Layers(layer.NewIDList([]layer.ID{l1})). + IsVisible(false). MustBuild(), }) @@ -80,8 +82,9 @@ func TestMergeLayer(t *testing.T) { expected := &MergedLayerGroup{ MergedLayerCommon: MergedLayerCommon{ Merged: layer.Merged{ - Original: l2, - Scene: scene, + Original: l2, + Scene: scene, + IsVisible: false, Property: &property.MergedMetadata{ Original: &groupProperty, }, @@ -112,9 +115,10 @@ func TestMergeLayer(t *testing.T) { &MergedLayerItem{ MergedLayerCommon{ Merged: layer.Merged{ - Original: l1, - Parent: &l2, - Scene: scene, + Original: l1, + Parent: &l2, + Scene: scene, + IsVisible: false, Property: &property.MergedMetadata{ Original: &itemProperty, Parent: &groupProperty, diff --git a/pkg/layer/merging/sealed.go b/pkg/layer/merging/sealed.go index 2ca70a018..4a6dbf665 100644 --- a/pkg/layer/merging/sealed.go +++ b/pkg/layer/merging/sealed.go @@ -13,6 +13,8 @@ var ( type SealedLayer interface { Common() *SealedLayerCommon Flatten() []*SealedLayerItem + Group() *SealedLayerGroup + Item() *SealedLayerItem } type SealedLayerGroup struct { @@ -28,6 +30,7 @@ type SealedLayerCommon struct { layer.Merged Property *property.Sealed Infobox *SealedInfobox + Tags []SealedTag } type SealedInfobox struct { @@ -41,6 +44,12 @@ type SealedInfoboxField struct { Property *property.Sealed } +type SealedTag struct { + ID layer.TagID + Label string + Tags []SealedTag +} + func (l *SealedLayerGroup) Common() *SealedLayerCommon { if l == nil { return nil @@ -59,6 +68,17 @@ func (l *SealedLayerGroup) Flatten() []*SealedLayerItem { return layers } +func (l *SealedLayerGroup) Item() *SealedLayerItem { + return nil +} + +func (l *SealedLayerGroup) Group() *SealedLayerGroup { + if l == nil { + return nil + } + return l +} + func (l *SealedLayerItem) Common() *SealedLayerCommon { if l == nil { return nil @@ -72,3 +92,14 @@ func (l *SealedLayerItem) Flatten() []*SealedLayerItem { } return []*SealedLayerItem{l} } + +func (l *SealedLayerItem) Item() *SealedLayerItem { + if l == nil { + return nil + } + return l +} + +func (*SealedLayerItem) Group() *SealedLayerGroup { + return nil +} diff --git a/pkg/layer/merging/sealer.go b/pkg/layer/merging/sealer.go index 94c0948c4..d4a6bacf1 100644 --- a/pkg/layer/merging/sealer.go +++ b/pkg/layer/merging/sealer.go @@ -4,39 +4,52 @@ import ( "context" "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/tag" ) type Sealer struct { DatasetGraphLoader dataset.GraphLoader + TagLoader tag.Loader } func (s *Sealer) Seal(ctx context.Context, m MergedLayer) (SealedLayer, error) { if s == nil || m == nil { return nil, nil } - return s.sealLayer(ctx, m) + + var tagMap tag.Map + if tags := m.AllTags(); len(tags) > 0 { + tags2, err := s.TagLoader(ctx, tags...) + if err != nil { + return nil, err + } + tagMap = tag.MapFromRefList(tags2) + } + + return s.sealLayer(ctx, m, tagMap) } -func (s *Sealer) sealLayer(ctx context.Context, m MergedLayer) (SealedLayer, error) { +func (s *Sealer) sealLayer(ctx context.Context, m MergedLayer, tagMap tag.Map) (SealedLayer, error) { if s == nil || m == nil { return nil, nil } if g, ok := m.(*MergedLayerGroup); ok { - return s.sealLayerGroup(ctx, g) + return s.sealLayerGroup(ctx, g, tagMap) } if i, ok := m.(*MergedLayerItem); ok { - return s.sealLayerItem(ctx, i) + return s.sealLayerItem(ctx, i, tagMap) } return nil, nil } -func (s *Sealer) sealLayerGroup(ctx context.Context, m *MergedLayerGroup) (*SealedLayerGroup, error) { +func (s *Sealer) sealLayerGroup(ctx context.Context, m *MergedLayerGroup, tagMap tag.Map) (*SealedLayerGroup, error) { if s == nil || m == nil { return nil, nil } - c, err := s.sealLayerCommon(ctx, &m.MergedLayerCommon) + c, err := s.sealLayerCommon(ctx, &m.MergedLayerCommon, tagMap) if err != nil { return nil, err } @@ -46,7 +59,7 @@ func (s *Sealer) sealLayerGroup(ctx context.Context, m *MergedLayerGroup) (*Seal children := make([]SealedLayer, 0, len(m.Children)) for _, c := range m.Children { - s, err := s.sealLayer(ctx, c) + s, err := s.sealLayer(ctx, c, tagMap) if err != nil { return nil, err } @@ -59,11 +72,11 @@ func (s *Sealer) sealLayerGroup(ctx context.Context, m *MergedLayerGroup) (*Seal }, nil } -func (s *Sealer) sealLayerItem(ctx context.Context, m *MergedLayerItem) (*SealedLayerItem, error) { +func (s *Sealer) sealLayerItem(ctx context.Context, m *MergedLayerItem, tagMap tag.Map) (*SealedLayerItem, error) { if s == nil || m == nil { return nil, nil } - c, err := s.sealLayerCommon(ctx, &m.MergedLayerCommon) + c, err := s.sealLayerCommon(ctx, &m.MergedLayerCommon, tagMap) if err != nil { return nil, err } @@ -75,7 +88,7 @@ func (s *Sealer) sealLayerItem(ctx context.Context, m *MergedLayerItem) (*Sealed }, nil } -func (s *Sealer) sealLayerCommon(ctx context.Context, m *MergedLayerCommon) (*SealedLayerCommon, error) { +func (s *Sealer) sealLayerCommon(ctx context.Context, m *MergedLayerCommon, tagMap tag.Map) (*SealedLayerCommon, error) { if s == nil || m == nil { return nil, nil } @@ -87,10 +100,12 @@ func (s *Sealer) sealLayerCommon(ctx context.Context, m *MergedLayerCommon) (*Se if err != nil { return nil, err } + tags := s.sealTags(m.Merged.Tags, tagMap) return &SealedLayerCommon{ Merged: m.Merged, Property: p, Infobox: ib, + Tags: tags, }, nil } @@ -137,3 +152,22 @@ func (s *Sealer) sealProperty(ctx context.Context, m *property.Merged) (*propert } return property.Seal(ctx, m, s.DatasetGraphLoader) } + +func (s *Sealer) sealTags(m []layer.MergedTag, tagMap tag.Map) []SealedTag { + if len(m) == 0 { + return nil + } + res := make([]SealedTag, 0, len(m)) + for _, t := range m { + tt := SealedTag{ + ID: t.ID, + Tags: s.sealTags(t.Tags, tagMap), + Label: "", + } + if ttt, ok := tagMap[t.ID]; ok { + tt.Label = ttt.Label() + } + res = append(res, tt) + } + return res +} diff --git a/pkg/scene/builder/builder.go b/pkg/scene/builder/builder.go index 2d6eff88f..b3123c572 100644 --- a/pkg/scene/builder/builder.go +++ b/pkg/scene/builder/builder.go @@ -12,6 +12,7 @@ import ( "github.com/reearth/reearth-backend/pkg/layer/merging" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/tag" ) const ( @@ -21,14 +22,16 @@ const ( type Builder struct { ploader property.Loader + tloader tag.SceneLoader exporter *encoding.Exporter encoder *encoder } -func New(ll layer.Loader, pl property.Loader, dl dataset.GraphLoader) *Builder { +func New(ll layer.Loader, pl property.Loader, dl dataset.GraphLoader, tl tag.Loader, tsl tag.SceneLoader) *Builder { e := &encoder{} return &Builder{ ploader: pl, + tloader: tsl, encoder: e, exporter: &encoding.Exporter{ Merger: &merging.Merger{ @@ -37,6 +40,7 @@ func New(ll layer.Loader, pl property.Loader, dl dataset.GraphLoader) *Builder { }, Sealer: &merging.Sealer{ DatasetGraphLoader: dl, + TagLoader: tl, }, Encoder: e, }, @@ -73,6 +77,5 @@ func (b *Builder) buildScene(ctx context.Context, s *scene.Scene, publishedAt ti } layers := b.encoder.Result() - res := b.scene(ctx, s, publishedAt, layers, p) - return res, nil + return b.scene(ctx, s, publishedAt, layers, p) } diff --git a/pkg/scene/builder/builder_test.go b/pkg/scene/builder/builder_test.go index c84c9d9bd..40bc388fd 100644 --- a/pkg/scene/builder/builder_test.go +++ b/pkg/scene/builder/builder_test.go @@ -9,10 +9,13 @@ import ( "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/tag" "github.com/stretchr/testify/assert" ) func TestSceneBuilder(t *testing.T) { + publishedAt := time.Date(2019, time.August, 15, 0, 0, 0, 0, time.Local) + // ids sceneID := scene.NewID() scenePropertyID := property.NewID() @@ -66,6 +69,21 @@ func TestSceneBuilder(t *testing.T) { ), }).Scene(sceneID).Schema(dss3id).Source("ds3").MustBuild() + // tags + tag1 := tag.NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() + tag2 := tag.NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() + tag3 := tag.NewItem().NewID().Label("unused").Scene(sceneID).MustBuild() + tag4 := tag.NewGroup().NewID().Label("bar").Scene(sceneID).Tags(tag.IDListFrom(([]tag.ID{ + tag1.ID(), tag2.ID(), tag3.ID(), + }))).MustBuild() + tag5 := tag.NewItem().NewID().Label("dummy").Scene(scene.NewID()).MustBuild() // dummy + tags := tag.List{tag1, tag2, tag3, tag4, tag5} + + // layer tags + ltag1 := layer.NewTagItem(tag1.ID()) + ltag2 := layer.NewTagItem(tag2.ID()) + ltag3 := layer.NewTagGroup(tag4.ID(), []*layer.TagItem{ltag2}) + // layer1: normal layer item layer1p := property.New(). NewID(). @@ -89,6 +107,7 @@ func TestSceneBuilder(t *testing.T) { Plugin(&pluginID). Extension(&pluginExtension1ID). Property(layer1p.IDRef()). + Tags(layer.NewTagList([]layer.Tag{ltag1, ltag3})). MustBuild() // layer2: normal layer group @@ -114,6 +133,7 @@ func TestSceneBuilder(t *testing.T) { Plugin(&pluginID). Extension(&pluginExtension1ID). Property(layer21p.IDRef()). + Tags(layer.NewTagList([]layer.Tag{ltag2})). MustBuild() layer2p := property.New(). NewID(). @@ -131,7 +151,12 @@ func TestSceneBuilder(t *testing.T) { }).MustBuild(), }). MustBuild() - layer2ibf1 := layer.NewInfoboxField().NewID().Plugin(pluginID).Extension(pluginExtension1ID).Property(layer2p.ID()).MustBuild() + layer2ibf1 := layer.NewInfoboxField(). + NewID(). + Plugin(pluginID). + Extension(pluginExtension1ID). + Property(layer2p.ID()). + MustBuild() layer2ib := layer.NewInfobox([]*layer.InfoboxField{ layer2ibf1, }, scenePropertyID) @@ -143,6 +168,7 @@ func TestSceneBuilder(t *testing.T) { Property(layer2p.IDRef()). Infobox(layer2ib). Layers(layer.NewIDList([]layer.ID{layer21.ID()})). + Tags(layer.NewTagList([]layer.Tag{ltag1, ltag3})). MustBuild() // layer3: full-linked layer item with infobox @@ -227,7 +253,12 @@ func TestSceneBuilder(t *testing.T) { }).MustBuild(), }). MustBuild() - layer4ibf1 := layer.NewInfoboxField().NewID().Plugin(pluginID).Extension(pluginExtension1ID).Property(layer4p.ID()).MustBuild() + layer4ibf1 := layer.NewInfoboxField(). + NewID(). + Plugin(pluginID). + Extension(pluginExtension1ID). + Property(layer4p.ID()). + MustBuild() layer4ib := layer.NewInfobox([]*layer.InfoboxField{ layer4ibf1, }, scenePropertyID) @@ -363,14 +394,14 @@ func TestSceneBuilder(t *testing.T) { pluginExtension1ID, scenePropertyID, false, - true) + false) sceneWidget2 := scene.MustNewWidget( sceneWidgetID2, pluginID, pluginExtension2ID, scenePropertyID, true, - false) + true) scenePlugin1 := scene.NewPlugin(pluginID, &scenePropertyID) assert.Equal(t, sceneWidgetID1, sceneWidget1.ID()) @@ -416,166 +447,327 @@ func TestSceneBuilder(t *testing.T) { layer51p, layer6p, }) + tloader := tag.LoaderFrom(tags) + tsloader := tag.SceneLoaderFrom(tags) - // exec - sb := New(lloader, ploader, dloader) - publishedAt := time.Date(2019, time.August, 15, 0, 0, 0, 0, time.Local) - result, err := sb.buildScene(context.Background(), scene, publishedAt) - - // general - assert.NoError(t, err) - assert.Equal(t, sceneID.String(), result.ID) - assert.Equal(t, version, result.SchemaVersion) - assert.Equal(t, publishedAt, result.PublishedAt) - - // property - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "hogehoge", + expectedLayer1 := &layerJSON{ + ID: layer1.ID().String(), + PluginID: layer1.Plugin().StringRef(), + ExtensionID: layer1.Extension().StringRef(), + Name: layer1.Name(), + IsVisible: true, + PropertyID: layer1.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "xxx", + "b": float64(1), + }, }, - }, result.Property, "property") - - // plugins - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "hogehoge", + Infobox: nil, + Tags: []tagJSON{ + {ID: tag1.ID().String(), Label: tag1.Label(), Tags: nil}, + {ID: tag4.ID().String(), Label: tag4.Label(), Tags: []tagJSON{ + {ID: tag2.ID().String(), Label: tag2.Label(), Tags: nil}, + }}, }, - }, result.Plugins[pluginID.String()], "plugin1 property") + Children: nil, + } - // widgets - assert.Equal(t, 1, len(result.Widgets), "widgets len") - resWidget1 := result.Widgets[0] - assert.Equal(t, pluginID.String(), resWidget1.PluginID, "widget1 plugin") - assert.Equal(t, string(pluginExtension2ID), resWidget1.ExtensionID, "widget1 extension") - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "hogehoge", + expectedLayer2 := &layerJSON{ + ID: layer2.ID().String(), + PluginID: layer2.Plugin().StringRef(), + ExtensionID: layer2.Extension().StringRef(), + Name: layer2.Name(), + IsVisible: true, + PropertyID: layer2.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "xxx", + "c": "test", + }, }, - }, resWidget1.Property, "widget1 property") - - // layers - assert.Equal(t, 6, len(result.Layers), "layers len") - - // layer1 - resLayer1 := result.Layers[0] - assert.Equal(t, layer1.ID().String(), resLayer1.ID, "layer1 id") - assert.Equal(t, pluginID.StringRef(), resLayer1.PluginID, "layer1 plugin id") - assert.Equal(t, pluginExtension1ID.StringRef(), resLayer1.ExtensionID, "layer1 extension id") - assert.Nil(t, resLayer1.Infobox, "layer1 infobox") - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "xxx", - "b": float64(1), + Infobox: &infoboxJSON{ + Fields: []infoboxFieldJSON{ + { + ID: layer2ibf1.ID().String(), + PluginID: layer2ibf1.Plugin().String(), + ExtensionID: layer2ibf1.Extension().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "xxx", + "c": "test", + }, + }, + }, + }, + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, }, - }, resLayer1.Property, "layer1 prpperty") - - // layer2 - resLayer2 := result.Layers[1] - assert.Equal(t, layer21.ID().String(), resLayer2.ID, "layer๏ผ’ id") - assert.Equal(t, pluginID.StringRef(), resLayer2.PluginID, "layer๏ผ’ plugin id") - assert.Equal(t, pluginExtension1ID.StringRef(), resLayer2.ExtensionID, "layer๏ผ’ extension id") - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "hogehoge", + Tags: []tagJSON{ + {ID: tag1.ID().String(), Label: tag1.Label()}, + {ID: tag4.ID().String(), Label: tag4.Label(), Tags: []tagJSON{ + {ID: tag2.ID().String(), Label: tag2.Label()}, + }}, + }, + Children: []*layerJSON{ + { + ID: layer21.ID().String(), + PluginID: layer21.Plugin().StringRef(), + ExtensionID: layer21.Extension().StringRef(), + Name: layer21.Name(), + IsVisible: true, + PropertyID: layer21.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "yyy", + "b": float64(1), + "c": "test", + }, + }, + Infobox: &infoboxJSON{ + Fields: []infoboxFieldJSON{ + { + ID: layer2ibf1.ID().String(), + PluginID: layer2ibf1.Plugin().String(), + ExtensionID: layer2ibf1.Extension().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "xxx", + "c": "test", + }, + }, + }, + }, + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + }, + Tags: []tagJSON{ + {ID: tag2.ID().String(), Label: tag2.Label()}, + }, + }, }, - }, resLayer2.Infobox.Property, "layer2 infobox property") - assert.Equal(t, 1, len(resLayer2.Infobox.Fields), "layer2 infobox fields len") - assert.Equal(t, pluginID.String(), resLayer2.Infobox.Fields[0].PluginID, "layer2 infobox field1 plugin") - assert.Equal(t, string(pluginExtension1ID), resLayer2.Infobox.Fields[0].ExtensionID, "layer2 infobox field1 extension") - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "xxx", - "c": "test", + } + + expectedLayer3 := &layerJSON{ + ID: layer3.ID().String(), + PluginID: layer3.Plugin().StringRef(), + ExtensionID: layer3.Extension().StringRef(), + Name: layer3.Name(), + IsVisible: true, + PropertyID: layer3.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "b", + }, }, - }, resLayer2.Infobox.Fields[0].Property, "layer2 infobox field1 property") - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "yyy", - "b": float64(1), - "c": "test", + Infobox: &infoboxJSON{ + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + Fields: []infoboxFieldJSON{ + { + ID: layer3ibf1.ID().String(), + PluginID: layer3ibf1.Plugin().String(), + ExtensionID: layer3ibf1.Extension().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + }, + }, }, - }, resLayer2.Property, "layer2 prpperty") + } - // layer3 - resLayer3 := result.Layers[2] - assert.Equal(t, layer3.ID().String(), resLayer3.ID, "layer3 id") - assert.Equal(t, pluginID.StringRef(), resLayer3.PluginID, "layer3 plugin id") - assert.Equal(t, pluginExtension1ID.StringRef(), resLayer3.ExtensionID, "layer3 extension id") - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "hogehoge", + expectedLayer4 := &layerJSON{ + ID: layer4.ID().String(), + PluginID: layer4.Plugin().StringRef(), + ExtensionID: layer4.Extension().StringRef(), + Name: layer4.Name(), + IsVisible: true, + PropertyID: layer4.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": nil, + "c": "xxx", + }, }, - }, resLayer3.Infobox.Property, "layer3 infobox property") - assert.Equal(t, 1, len(resLayer3.Infobox.Fields), "layer3 infobox fields len") - assert.Equal(t, pluginID.String(), resLayer3.Infobox.Fields[0].PluginID, "layer3 infobox field1 plugin") - assert.Equal(t, string(pluginExtension1ID), resLayer3.Infobox.Fields[0].ExtensionID, "layer3 infobox field1 extension") - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "hogehoge", + Infobox: &infoboxJSON{ + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + Fields: []infoboxFieldJSON{ + { + ID: layer4ibf1.ID().String(), + PluginID: layer4ibf1.Plugin().String(), + ExtensionID: layer4ibf1.Extension().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": nil, + "c": "xxx", + }, + }, + }, + }, }, - }, resLayer3.Infobox.Fields[0].Property, "layer3 infobox field1 property") - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "b", + Children: []*layerJSON{ + { + ID: layer41.ID().String(), + PluginID: layer41.Plugin().StringRef(), + ExtensionID: layer41.Extension().StringRef(), + IsVisible: true, + PropertyID: layer41.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "b", + "b": float64(1), + "c": "xxx", + }, + }, + Infobox: &infoboxJSON{ + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + "b": float64(1), + }, + }, + Fields: []infoboxFieldJSON{ + { + ID: layer41ibf1.ID().String(), + PluginID: layer41ibf1.Plugin().String(), + ExtensionID: layer41ibf1.Extension().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "b": float64(1), + }, + }, + }, + }, + }, + }, }, - }, resLayer3.Property, "layer3 prpperty") + } - // layer4 - resLayer4 := result.Layers[3] - assert.Equal(t, layer41.ID().String(), resLayer4.ID, "layer4 id") - assert.Equal(t, pluginID.StringRef(), resLayer4.PluginID, "layer4 plugin id") - assert.Equal(t, pluginExtension1ID.StringRef(), resLayer4.ExtensionID, "layer4 extension id") - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "hogehoge", - "b": float64(1), - }, - }, resLayer4.Infobox.Property, "layer4 infobox property") - assert.Equal(t, 1, len(resLayer4.Infobox.Fields), "layer4 infobox fields len") - assert.Equal(t, pluginID.String(), resLayer4.Infobox.Fields[0].PluginID, "layer4 infobox field1 plugin") - assert.Equal(t, string(pluginExtension1ID), resLayer4.Infobox.Fields[0].ExtensionID, "layer4 infobox field1 extension") - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "b": float64(1), + expectedLayer5 := &layerJSON{ + ID: layer5.ID().String(), + PluginID: layer5.Plugin().StringRef(), + ExtensionID: layer5.Extension().StringRef(), + Name: layer5.Name(), + IsVisible: true, + PropertyID: layer5.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": nil, + "b": nil, + }, }, - }, resLayer4.Infobox.Fields[0].Property, "layer4 infobox field1 property") - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "b", - "b": float64(1), - "c": "xxx", + Infobox: nil, + Tags: nil, + Children: []*layerJSON{ + { + ID: layer51.ID().String(), + PluginID: layer51.Plugin().StringRef(), + ExtensionID: layer51.Extension().StringRef(), + IsVisible: true, + PropertyID: layer51.Property().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "a", + "b": "b", + }, + }, + }, }, - }, resLayer4.Property, "layer4 prpperty") + } - // layer5 - resLayer5 := result.Layers[4] - assert.Equal(t, layer51.ID().String(), resLayer5.ID, "layer5 id") - assert.Equal(t, pluginID.StringRef(), resLayer5.PluginID, "layer5 plugin id") - assert.Equal(t, pluginExtension1ID.StringRef(), resLayer5.ExtensionID, "layer5 extension id") - assert.Nil(t, resLayer5.Infobox, "layer5 infobox") - assert.Equal(t, map[string]interface{}{ - "A": map[string]interface{}{ - "a": "a", - "b": "b", + expectedLayer6 := &layerJSON{ + ID: layer6.ID().String(), + PluginID: layer6.Plugin().StringRef(), + ExtensionID: layer6.Extension().StringRef(), + Name: layer6.Name(), + IsVisible: true, + PropertyID: layer6.Property().String(), + Property: map[string]interface{}{ + "B": []map[string]interface{}{ + { + "id": propertyItemID1.String(), + "a": "XYZ", + }, + { + "id": propertyItemID2.String(), + "a": "ZYX", + }, + }, }, - }, resLayer5.Property, "layer5 prpperty") + Infobox: nil, + Tags: nil, + Children: nil, + } - // layer6 - resLayer6 := result.Layers[5] - assert.Equal(t, layer6.ID().String(), resLayer6.ID, "layer6 id") - assert.Equal(t, pluginID.StringRef(), resLayer6.PluginID, "layer6 plugin id") - assert.Equal(t, pluginExtension1ID.StringRef(), resLayer6.ExtensionID, "layer6 extension id") - assert.Nil(t, resLayer6.Infobox, "layer6 infobox") - assert.Equal(t, map[string]interface{}{ - "B": []map[string]interface{}{ - { - "a": "XYZ", - "id": propertyItemID1.String(), + expectedLayers := []*layerJSON{ + expectedLayer1, + expectedLayer2, + expectedLayer3, + expectedLayer4, + expectedLayer5, + expectedLayer6, + } + + expected := &sceneJSON{ + SchemaVersion: version, + ID: sceneID.String(), + PublishedAt: publishedAt, + Layers: expectedLayers, + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + Plugins: map[string]map[string]interface{}{ + pluginID.String(): { + "A": map[string]interface{}{ + "a": "hogehoge", + }, }, + }, + Widgets: []*widgetJSON{ { - "a": "ZYX", - "id": propertyItemID2.String(), + ID: sceneWidget2.ID().String(), + PluginID: sceneWidget2.Plugin().String(), + ExtensionID: sceneWidget2.Extension().String(), + Property: map[string]interface{}{ + "A": map[string]interface{}{ + "a": "hogehoge", + }, + }, + Extended: true, }, }, - }, resLayer6.Property, "layer6 prpperty") + WidgetAlignSystem: nil, + Tags: []*tagJSON{ + {ID: tag4.ID().String(), Label: tag4.Label(), Tags: []tagJSON{ + {ID: tag1.ID().String(), Label: tag1.Label(), Tags: nil}, + {ID: tag2.ID().String(), Label: tag2.Label(), Tags: nil}, + {ID: tag3.ID().String(), Label: tag3.Label(), Tags: nil}, + }}, + }, + Clusters: []*clusterJSON{}, + } + + // exec + sb := New(lloader, ploader, dloader, tloader, tsloader) + result, err := sb.buildScene(context.Background(), scene, publishedAt) + + assert.NoError(t, err) + assert.Equal(t, expected, result) } diff --git a/pkg/scene/builder/encoder.go b/pkg/scene/builder/encoder.go index eb5f2cac1..28d772e16 100644 --- a/pkg/scene/builder/encoder.go +++ b/pkg/scene/builder/encoder.go @@ -9,59 +9,79 @@ import ( var _ encoding.Encoder = &encoder{} type encoder struct { - res []*layerJSON + res *layerJSON } func (e *encoder) Result() []*layerJSON { - if e == nil { + if e == nil || e.res == nil { return nil } - return e.res + return e.res.Children } func (e *encoder) Encode(l merging.SealedLayer) (err error) { if e == nil { return } - e.res = e.layers(l) + e.res = e.layer(l) return } -func (e *encoder) layers(l merging.SealedLayer) []*layerJSON { +func (e *encoder) layer(layer merging.SealedLayer) *layerJSON { + if layer == nil { + return nil + } + l := layer.Common() if l == nil { return nil } - if i, ok := l.(*merging.SealedLayerItem); ok { - layer := e.layer(i) - if layer == nil { - return nil - } - return []*layerJSON{layer} - } else if g, ok := l.(*merging.SealedLayerGroup); ok { - // This encoder does not print group layer representation. - layers := make([]*layerJSON, 0, len(g.Children)) + + var children []*layerJSON + if g := layer.Group(); g != nil { for _, c := range g.Children { - l := e.layers(c) - if l != nil { - layers = append(layers, l...) + if d := e.layer(c); d != nil { + children = append(children, d) } } - return layers } - return nil -} -func (e *encoder) layer(l *merging.SealedLayerItem) *layerJSON { - if l == nil { - return nil + var propertyID string + if l.Property != nil { + propertyID = l.Property.Original.String() } + + var tags []tagJSON + if len(l.Tags) > 0 { + for _, t := range l.Tags { + var tags2 []tagJSON + if len(t.Tags) > 0 { + tags2 = make([]tagJSON, 0, len(t.Tags)) + for _, t := range t.Tags { + tags2 = append(tags2, tagJSON{ + ID: t.ID.String(), + Label: t.Label, + }) + } + } + tags = append(tags, tagJSON{ + ID: t.ID.String(), + Label: t.Label, + Tags: tags2, + }) + } + } + return &layerJSON{ ID: l.Original.String(), PluginID: l.PluginID.StringRef(), ExtensionID: l.ExtensionID.StringRef(), Name: l.Name, Property: e.property(l.Property), + PropertyID: propertyID, Infobox: e.infobox(l.Infobox), + IsVisible: l.IsVisible, + Tags: tags, + Children: children, } } @@ -93,8 +113,18 @@ type layerJSON struct { PluginID *string `json:"pluginId,omitempty"` ExtensionID *string `json:"extensionId,omitempty"` Name string `json:"name,omitempty"` + PropertyID string `json:"propertyId,omitempty"` Property propertyJSON `json:"property,omitempty"` Infobox *infoboxJSON `json:"infobox,omitempty"` + Tags []tagJSON `json:"tags,omitempty"` + IsVisible bool `json:"isVisible"` + Children []*layerJSON `json:"children,omitempty"` +} + +type tagJSON struct { + ID string `json:"id"` + Label string `json:"label"` + Tags []tagJSON `json:"tags,omitempty"` } type infoboxJSON struct { diff --git a/pkg/scene/builder/encoder_test.go b/pkg/scene/builder/encoder_test.go index ef81c0f65..682289769 100644 --- a/pkg/scene/builder/encoder_test.go +++ b/pkg/scene/builder/encoder_test.go @@ -13,25 +13,27 @@ import ( func TestEncoder_Result(t *testing.T) { tests := []struct { Name string - E *encoder + Target *encoder Expected []*layerJSON }{ { Name: "nil encoder", - E: nil, + Target: nil, Expected: nil, }, { Name: "success", - E: &encoder{ - res: []*layerJSON{ - { - ID: "xxx", - PluginID: nil, - ExtensionID: nil, - Name: "aaa", - Property: nil, - Infobox: nil, + Target: &encoder{ + res: &layerJSON{ + Children: []*layerJSON{ + { + ID: "xxx", + PluginID: nil, + ExtensionID: nil, + Name: "aaa", + Property: nil, + Infobox: nil, + }, }, }, }, @@ -52,7 +54,7 @@ func TestEncoder_Result(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - res := tc.E.Result() + res := tc.Target.Result() assert.Equal(t, tc.Expected, res) }) } @@ -61,20 +63,20 @@ func TestEncoder_Result(t *testing.T) { func TestEncoder_Encode(t *testing.T) { tests := []struct { Name string - E *encoder - SL merging.SealedLayer + Target *encoder + Input merging.SealedLayer Expected error }{ { Name: "nil encoder", - E: nil, - SL: nil, + Target: nil, + Input: nil, Expected: nil, }, { Name: "success encoding", - E: &encoder{}, - SL: nil, + Target: &encoder{}, + Input: nil, Expected: nil, }, } @@ -83,7 +85,7 @@ func TestEncoder_Encode(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - res := tc.E.Encode(tc.SL) + res := tc.Target.Encode(tc.Input) assert.Equal(t, tc.Expected, res) }) } @@ -139,31 +141,37 @@ func TestEncoder_Layers(t *testing.T) { }, Property: &sp, Infobox: nil, - }} + }, + } tests := []struct { Name string - E *encoder - SL *merging.SealedLayerItem + Target *encoder + Input *merging.SealedLayerItem Expected *layerJSON }{ { Name: "nil layers", - E: &encoder{}, - SL: nil, + Target: &encoder{}, + Input: nil, Expected: nil, }, { - Name: "success", - E: &encoder{}, - SL: sealed, + Name: "success", + Target: &encoder{}, + Input: sealed, Expected: &layerJSON{ ID: lid.String(), PluginID: layer.OfficialPluginID.StringRef(), ExtensionID: ex.StringRef(), Name: "test", - Property: map[string]interface{}{"default": map[string]interface{}{"location": property.LatLng{Lat: 4.4, Lng: 53.4}}}, - Infobox: nil, + PropertyID: pid.String(), + Property: map[string]interface{}{ + "default": map[string]interface{}{ + "location": property.LatLng{Lat: 4.4, Lng: 53.4}, + }, + }, + Infobox: nil, }, }, } @@ -172,17 +180,8 @@ func TestEncoder_Layers(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - res := tc.E.layer(tc.SL) - if res == nil { - assert.Equal(t, tc.Expected, res) - } else { - assert.Equal(t, tc.Expected.Property, res.Property) - assert.Equal(t, tc.Expected.Infobox, res.Infobox) - assert.Equal(t, *tc.Expected.ExtensionID, *res.ExtensionID) - assert.Equal(t, tc.Expected.ID, res.ID) - assert.Equal(t, tc.Expected.Name, res.Name) - assert.Equal(t, *tc.Expected.PluginID, *res.PluginID) - } + res := tc.Target.layer(tc.Input) + assert.Equal(t, tc.Expected, res) }) } } diff --git a/pkg/scene/builder/scene.go b/pkg/scene/builder/scene.go index 7d17193ef..fcdbd9363 100644 --- a/pkg/scene/builder/scene.go +++ b/pkg/scene/builder/scene.go @@ -6,6 +6,7 @@ import ( "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/tag" ) type sceneJSON struct { @@ -17,10 +18,16 @@ type sceneJSON struct { Layers []*layerJSON `json:"layers"` Widgets []*widgetJSON `json:"widgets"` WidgetAlignSystem *widgetAlignSystemJSON `json:"widgetAlignSystem"` + Tags []*tagJSON `json:"tags"` Clusters []*clusterJSON `json:"clusters"` } -func (b *Builder) scene(ctx context.Context, s *scene.Scene, publishedAt time.Time, l []*layerJSON, p []*property.Property) *sceneJSON { +func (b *Builder) scene(ctx context.Context, s *scene.Scene, publishedAt time.Time, l []*layerJSON, p []*property.Property) (*sceneJSON, error) { + tags, err := b.tags(ctx, s) + if err != nil { + return nil, err + } + return &sceneJSON{ SchemaVersion: version, ID: s.ID().String(), @@ -30,8 +37,9 @@ func (b *Builder) scene(ctx context.Context, s *scene.Scene, publishedAt time.Ti Widgets: b.widgets(ctx, s, p), Clusters: b.clusters(ctx, s, p), Layers: l, + Tags: tags, WidgetAlignSystem: buildWidgetAlignSystem(s.WidgetAlignSystem()), - } + }, nil } func (b *Builder) plugins(ctx context.Context, s *scene.Scene, p []*property.Property) map[string]propertyJSON { @@ -80,6 +88,45 @@ func (b *Builder) clusters(ctx context.Context, s *scene.Scene, p []*property.Pr return res } +func (b *Builder) tags(ctx context.Context, s *scene.Scene) ([]*tagJSON, error) { + tags, err := b.tloader(ctx, s.ID()) + if err != nil { + return nil, err + } + tagMap := tag.MapFromRefList(tags) + rootTags := tag.DerefList(tags).Roots() + stags := make([]*tagJSON, 0, len(rootTags)) + for _, t := range rootTags { + if t == nil { + continue + } + t2 := toTag(t, tagMap) + stags = append(stags, &t2) + } + return stags, nil +} + +func toTag(t tag.Tag, m tag.Map) tagJSON { + var tags []tagJSON + if children := tag.GroupFrom(t).Tags().Tags(); children != nil { + tags = make([]tagJSON, 0, len(children)) + for _, tid := range children { + t, ok := m[tid] + if !ok { + continue + } + t2 := toTag(t, m) + tags = append(tags, t2) + } + } + + return tagJSON{ + ID: t.ID().String(), + Label: t.Label(), + Tags: tags, + } +} + func (b *Builder) property(ctx context.Context, p *property.Property) propertyJSON { return property.SealProperty(ctx, p).Interface() } diff --git a/pkg/tag/group.go b/pkg/tag/group.go index 7dcc6c8a1..0c9c9a889 100644 --- a/pkg/tag/group.go +++ b/pkg/tag/group.go @@ -2,9 +2,12 @@ package tag type Group struct { tag - tags *List + tags *IDList } -func (g *Group) Tags() *List { +func (g *Group) Tags() *IDList { + if g == nil { + return nil + } return g.tags } diff --git a/pkg/tag/group_builder.go b/pkg/tag/group_builder.go index 4774ca48f..77283cb6b 100644 --- a/pkg/tag/group_builder.go +++ b/pkg/tag/group_builder.go @@ -26,10 +26,17 @@ func (b *GroupBuilder) Build() (*Group, error) { if b.g.label == "" { return nil, ErrEmptyLabel } - return b.g, nil } +func (b *GroupBuilder) MustBuild() *Group { + res, err := b.Build() + if err != nil { + panic(err) + } + return res +} + func (b *GroupBuilder) ID(tid ID) *GroupBuilder { b.g.id = tid return b @@ -50,7 +57,7 @@ func (b *GroupBuilder) Scene(sid SceneID) *GroupBuilder { return b } -func (b *GroupBuilder) Tags(tl *List) *GroupBuilder { +func (b *GroupBuilder) Tags(tl *IDList) *GroupBuilder { if tl != nil { b.g.tags = tl } diff --git a/pkg/tag/group_test.go b/pkg/tag/group_test.go index 1b95edc87..2be71f42e 100644 --- a/pkg/tag/group_test.go +++ b/pkg/tag/group_test.go @@ -25,7 +25,7 @@ func TestGroupBuilder_Build(t *testing.T) { Name, Label string Id ID Scene SceneID - Tags *List + Tags *IDList Expected struct { Group Group Error error @@ -69,7 +69,7 @@ func TestGroupBuilder_Build(t *testing.T) { Id: tid, Label: "xxx", Scene: sid, - Tags: &List{ + Tags: &IDList{ tags: tags, }, Expected: struct { @@ -82,7 +82,7 @@ func TestGroupBuilder_Build(t *testing.T) { label: "xxx", sceneId: sid, }, - tags: &List{ + tags: &IDList{ tags: tags, }, }, diff --git a/pkg/tag/item.go b/pkg/tag/item.go index 34ad6b091..696dcaf0a 100644 --- a/pkg/tag/item.go +++ b/pkg/tag/item.go @@ -9,18 +9,30 @@ type Item struct { } func (i *Item) Parent() *ID { + if i == nil { + return nil + } return i.parent.CopyRef() } func (i *Item) LinkedDatasetFieldID() *DatasetFieldID { + if i == nil { + return nil + } return i.linkedDatasetFieldID.CopyRef() } func (i *Item) LinkedDatasetID() *DatasetID { + if i == nil { + return nil + } return i.linkedDatasetID.CopyRef() } func (i *Item) LinkedDatasetSchemaID() *DatasetSchemaID { + if i == nil { + return nil + } return i.linkedDatasetSchemaID.CopyRef() } diff --git a/pkg/tag/item_builder.go b/pkg/tag/item_builder.go index ed2a048a9..29eb8790a 100644 --- a/pkg/tag/item_builder.go +++ b/pkg/tag/item_builder.go @@ -29,6 +29,14 @@ func (b *ItemBuilder) Build() (*Item, error) { return b.i, nil } +func (b *ItemBuilder) MustBuild() *Item { + res, err := b.Build() + if err != nil { + panic(err) + } + return res +} + func (b *ItemBuilder) ID(tid ID) *ItemBuilder { b.i.id = tid return b diff --git a/pkg/tag/list.go b/pkg/tag/list.go index 320b4de20..42ee1d352 100644 --- a/pkg/tag/list.go +++ b/pkg/tag/list.go @@ -1,25 +1,25 @@ package tag -type List struct { +type IDList struct { tags []ID } -func NewList() *List { - return &List{tags: []ID{}} +func NewIDList() *IDList { + return &IDList{tags: []ID{}} } -func NewListFromTags(tags []ID) *List { - return &List{tags: tags} +func IDListFrom(tags []ID) *IDList { + return &IDList{tags: tags} } -func (tl *List) Tags() []ID { - if tl == nil || tl.tags == nil { +func (tl *IDList) Tags() []ID { + if tl == nil || len(tl.tags) == 0 { return nil } return append([]ID{}, tl.tags...) } -func (tl *List) Has(tid ID) bool { +func (tl *IDList) Has(tid ID) bool { if tl == nil || tl.tags == nil { return false } @@ -31,14 +31,14 @@ func (tl *List) Has(tid ID) bool { return false } -func (tl *List) Add(tags ...ID) { +func (tl *IDList) Add(tags ...ID) { if tl == nil || tl.tags == nil { return } tl.tags = append(tl.tags, tags...) } -func (tl *List) Remove(tags ...ID) { +func (tl *IDList) Remove(tags ...ID) { if tl == nil || tl.tags == nil { return } @@ -52,3 +52,99 @@ func (tl *List) Remove(tags ...ID) { } } } + +type List []Tag + +func DerefList(tags []*Tag) List { + res := make(List, 0, len(tags)) + for _, t := range tags { + if t == nil { + continue + } + res = append(res, *t) + } + return res +} + +func (l List) Items() (res []*Item) { + if len(l) == 0 { + return + } + + res = make([]*Item, 0, len(l)) + for _, t := range l { + if g := ItemFrom(t); g != nil { + res = append(res, g) + } + } + + return res +} + +func (l List) Groups() (res []*Group) { + if len(l) == 0 { + return + } + + res = make([]*Group, 0, len(l)) + for _, t := range l { + if g := GroupFrom(t); g != nil { + res = append(res, g) + } + } + + return res +} + +func (l List) FilterByScene(s SceneID) (res List) { + if len(l) == 0 { + return + } + + res = make(List, 0, len(l)) + for _, t := range l { + if t.Scene() == s { + res = append(res, t) + } + } + + return res +} + +func (l List) Roots() (res List) { + if len(l) == 0 { + return + } + + groups := l.Groups() + for _, t := range l { + found := false + for _, u := range groups { + if t.ID() == u.ID() { + continue + } + if u.Tags().Has(t.ID()) { + found = true + } + } + if !found { + res = append(res, t) + } + } + + return res +} + +func (l List) Refs() (res []*Tag) { + if len(l) == 0 { + return + } + + res = make([]*Tag, 0, len(l)) + for _, t := range l { + t := t + res = append(res, &t) + } + + return res +} diff --git a/pkg/tag/list_test.go b/pkg/tag/list_test.go index f73289582..59208d910 100644 --- a/pkg/tag/list_test.go +++ b/pkg/tag/list_test.go @@ -6,34 +6,34 @@ import ( "github.com/stretchr/testify/assert" ) -func TestList_Add(t *testing.T) { +func TesIDtList_Add(t *testing.T) { tid := NewID() - var tl *List + var tl *IDList tl.Add(tid) assert.Nil(t, tl.Tags()) - tl = NewList() + tl = NewIDList() tl.Add(tid) expected := []ID{tid} assert.Equal(t, expected, tl.Tags()) } -func TestList_Remove(t *testing.T) { +func TestIDList_Remove(t *testing.T) { tid := NewID() tid2 := NewID() tags := []ID{ tid, tid2, } - var tl *List + var tl *IDList tl.Remove(tid2) assert.Nil(t, tl.Tags()) - tl = NewListFromTags(tags) + tl = IDListFrom(tags) tl.Remove(tid2) expected := []ID{tid} assert.Equal(t, expected, tl.Tags()) } -func TestList_Has(t *testing.T) { +func TestIDList_Has(t *testing.T) { tid1 := NewID() tid2 := NewID() tags := []ID{ @@ -68,8 +68,83 @@ func TestList_Has(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - res := NewListFromTags(tc.Tags).Has(tc.TID) + res := IDListFrom(tc.Tags).Has(tc.TID) assert.Equal(t, tc.Expected, res) }) } } + +func TestList_Items(t *testing.T) { + sceneID := NewSceneID() + sceneID2 := NewSceneID() + tag1 := NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() + tag2 := NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() + tag3 := NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() + tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDListFrom(([]ID{ + tag1.ID(), tag2.ID(), + }))).MustBuild() + tags := List{tag1, tag2, tag3, tag4} + + assert.Equal(t, []*Item{tag1, tag2, tag3}, tags.Items()) + assert.Nil(t, List(nil).Items()) +} + +func TestList_Groups(t *testing.T) { + sceneID := NewSceneID() + sceneID2 := NewSceneID() + tag1 := NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() + tag2 := NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() + tag3 := NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() + tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDListFrom(([]ID{ + tag1.ID(), tag2.ID(), + }))).MustBuild() + tags := List{tag1, tag2, tag3, tag4} + + assert.Equal(t, []*Group{tag4}, tags.Groups()) + assert.Nil(t, List(nil).Groups()) +} + +func TestList_FilterByScene(t *testing.T) { + sceneID := NewSceneID() + sceneID2 := NewSceneID() + tag1 := NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() + tag2 := NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() + tag3 := NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() + tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDListFrom(([]ID{ + tag1.ID(), tag2.ID(), + }))).MustBuild() + tags := List{tag1, tag2, tag3, tag4} + + assert.Equal(t, List{tag1, tag2, tag4}, tags.FilterByScene(sceneID)) + assert.Nil(t, List(nil).FilterByScene(sceneID)) +} + +func TestList_Roots(t *testing.T) { + sceneID := NewSceneID() + sceneID2 := NewSceneID() + tag1 := NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() + tag2 := NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() + tag3 := NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() + tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDListFrom(([]ID{ + tag1.ID(), tag2.ID(), + }))).MustBuild() + tags := List{tag1, tag2, tag3, tag4} + + assert.Equal(t, List{tag3, tag4}, tags.Roots()) + assert.Nil(t, List(nil).Roots()) +} + +func TestList_Refs(t *testing.T) { + sceneID := NewSceneID() + sceneID2 := NewSceneID() + var tag1 Tag = NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() + var tag2 Tag = NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() + var tag3 Tag = NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() + var tag4 Tag = NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDListFrom(([]ID{ + tag1.ID(), tag2.ID(), + }))).MustBuild() + tags := List{tag1, tag2, tag3, tag4} + + assert.Equal(t, []*Tag{&tag1, &tag2, &tag3, &tag4}, tags.Refs()) + assert.Nil(t, List(nil).Refs()) +} diff --git a/pkg/tag/loader.go b/pkg/tag/loader.go new file mode 100644 index 000000000..b90b3511e --- /dev/null +++ b/pkg/tag/loader.go @@ -0,0 +1,46 @@ +package tag + +import "context" + +type Loader func(context.Context, ...ID) ([]*Tag, error) +type SceneLoader func(context.Context, SceneID) ([]*Tag, error) + +func LoaderFrom(data List) Loader { + return func(ctx context.Context, ids ...ID) ([]*Tag, error) { + res := make([]*Tag, 0, len(ids)) + for _, i := range ids { + found := false + for _, d := range data { + if i == d.ID() { + res = append(res, &d) + found = true + break + } + } + if !found { + res = append(res, nil) + } + } + return res, nil + } +} + +func LoaderFromMap(data map[ID]Tag) Loader { + return func(ctx context.Context, ids ...ID) ([]*Tag, error) { + res := make([]*Tag, 0, len(ids)) + for _, i := range ids { + if d, ok := data[i]; ok { + res = append(res, &d) + } else { + res = append(res, nil) + } + } + return res, nil + } +} + +func SceneLoaderFrom(data List) SceneLoader { + return func(ctx context.Context, id SceneID) ([]*Tag, error) { + return data.FilterByScene(id).Refs(), nil + } +} diff --git a/pkg/tag/map.go b/pkg/tag/map.go new file mode 100644 index 000000000..71557e1e2 --- /dev/null +++ b/pkg/tag/map.go @@ -0,0 +1,48 @@ +package tag + +import "sort" + +type Map map[ID]Tag + +func (m Map) All() List { + if m == nil || len(m) == 0 { + return nil + } + res := make(List, 0, len(m)) + for _, t := range m { + res = append(res, t) + } + sort.SliceStable(res, func(i, j int) bool { + return res[i].ID().ID().Compare(res[j].ID().ID()) < 0 + }) + return res +} + +func MapFromList(tags []Tag) Map { + res := make(Map) + for _, t := range tags { + if t == nil { + continue + } + + res[t.ID()] = t + } + return res +} + +func MapFromRefList(tags []*Tag) Map { + res := make(Map) + for _, t := range tags { + if t == nil { + continue + } + + t2 := *t + if t2 == nil { + continue + } + + res[t2.ID()] = t2 + } + return res +} From 01a4e6705ac0ee9315a481cbcc20a21515d0d342 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 2 Feb 2022 13:30:49 +0900 Subject: [PATCH 144/253] refactor: graphql resolvers (#105) * refactor gql * refactor context * rename --- internal/adapter/context.go | 84 +++++++ internal/adapter/gql/context.go | 65 +++--- internal/adapter/gql/loader.go | 29 +-- internal/adapter/gql/resolver.go | 12 +- internal/adapter/gql/resolver_asset.go | 5 +- internal/adapter/gql/resolver_dataset.go | 25 +- .../adapter/gql/resolver_dataset_schema.go | 23 +- internal/adapter/gql/resolver_layer.go | 218 ++++-------------- .../adapter/gql/resolver_mutation_asset.go | 10 +- .../adapter/gql/resolver_mutation_dataset.go | 40 +--- .../adapter/gql/resolver_mutation_layer.go | 65 ++---- .../adapter/gql/resolver_mutation_project.go | 20 +- .../adapter/gql/resolver_mutation_property.go | 45 +--- .../adapter/gql/resolver_mutation_scene.go | 59 ++--- internal/adapter/gql/resolver_mutation_tag.go | 30 +-- .../adapter/gql/resolver_mutation_team.go | 30 +-- .../adapter/gql/resolver_mutation_user.go | 20 +- internal/adapter/gql/resolver_plugin.go | 36 +-- internal/adapter/gql/resolver_project.go | 10 +- internal/adapter/gql/resolver_property.go | 153 +++--------- .../adapter/gql/resolver_property_schema.go | 26 +-- internal/adapter/gql/resolver_query.go | 88 ++----- internal/adapter/gql/resolver_scene.go | 65 ++---- internal/adapter/gql/resolver_tag.go | 46 +--- internal/adapter/gql/resolver_team.go | 15 +- internal/adapter/gql/resolver_user.go | 10 +- internal/adapter/gql/tracer.go | 151 ------------ internal/app/auth.go | 36 +-- internal/app/graphql.go | 46 ++-- internal/app/private.go | 12 +- internal/app/public.go | 8 +- 31 files changed, 372 insertions(+), 1110 deletions(-) create mode 100644 internal/adapter/context.go delete mode 100644 internal/adapter/gql/tracer.go diff --git a/internal/adapter/context.go b/internal/adapter/context.go new file mode 100644 index 000000000..e85980331 --- /dev/null +++ b/internal/adapter/context.go @@ -0,0 +1,84 @@ +package adapter + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/user" +) + +type ContextKey string + +const ( + contextUser ContextKey = "user" + contextOperator ContextKey = "operator" + contextSub ContextKey = "sub" + contextUsecases ContextKey = "usecases" +) + +func AttachUser(ctx context.Context, u *user.User) context.Context { + return context.WithValue(ctx, contextUser, u) +} + +func AttachOperator(ctx context.Context, o *usecase.Operator) context.Context { + return context.WithValue(ctx, contextOperator, o) +} + +func AttachSub(ctx context.Context, sub string) context.Context { + return context.WithValue(ctx, contextSub, sub) +} + +func AttachUsecases(ctx context.Context, u *interfaces.Container) context.Context { + ctx = context.WithValue(ctx, contextUsecases, u) + return ctx +} + +func User(ctx context.Context) *user.User { + if v := ctx.Value(contextUser); v != nil { + if u, ok := v.(*user.User); ok { + return u + } + } + return nil +} + +func Lang(ctx context.Context, lang *string) string { + if lang != nil && *lang != "" { + return *lang + } + + u := User(ctx) + if u == nil { + return "en" // default language + } + + l := u.Lang() + if l.IsRoot() { + return "en" // default language + } + + return l.String() +} + +func Operator(ctx context.Context) *usecase.Operator { + if v := ctx.Value(contextOperator); v != nil { + if v2, ok := v.(*usecase.Operator); ok { + return v2 + } + } + return nil +} + +func Sub(ctx context.Context) string { + if v := ctx.Value(contextSub); v != nil { + if v2, ok := v.(string); ok { + return v2 + } + } + return "" +} + +func Usecases(ctx context.Context) *interfaces.Container { + return ctx.Value(contextUsecases).(*interfaces.Container) +} diff --git a/internal/adapter/gql/context.go b/internal/adapter/gql/context.go index df46435bd..fd9a63902 100644 --- a/internal/adapter/gql/context.go +++ b/internal/adapter/gql/context.go @@ -3,59 +3,54 @@ package gql import ( "context" + "github.com/reearth/reearth-backend/internal/adapter" "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/user" ) type ContextKey string const ( - ContextUser ContextKey = "user" - ContextOperator ContextKey = "operator" - ContextSub ContextKey = "sub" + contextLoaders ContextKey = "loaders" + contextDataloaders ContextKey = "dataloaders" ) +func AttachUsecases(ctx context.Context, u *interfaces.Container, enableDataLoaders bool) context.Context { + loaders := NewLoaders(u) + dataloaders := loaders.DataLoadersWith(ctx, enableDataLoaders) + + ctx = adapter.AttachUsecases(ctx, u) + ctx = context.WithValue(ctx, contextLoaders, loaders) + ctx = context.WithValue(ctx, contextDataloaders, dataloaders) + + return ctx +} + func getUser(ctx context.Context) *user.User { - if v := ctx.Value(ContextUser); v != nil { - if u, ok := v.(*user.User); ok { - return u - } - } - return nil + return adapter.User(ctx) } func getLang(ctx context.Context, lang *string) string { - if lang != nil && *lang != "" { - return *lang - } + return adapter.Lang(ctx, lang) +} - u := getUser(ctx) - if u == nil { - return "en" // default language - } +func getOperator(ctx context.Context) *usecase.Operator { + return adapter.Operator(ctx) +} - l := u.Lang() - if l.IsRoot() { - return "en" // default language - } +func getSub(ctx context.Context) string { + return adapter.Sub(ctx) +} - return l.String() +func usecases(ctx context.Context) *interfaces.Container { + return adapter.Usecases(ctx) } -func getOperator(ctx context.Context) *usecase.Operator { - if v := ctx.Value(ContextOperator); v != nil { - if v2, ok := v.(*usecase.Operator); ok { - return v2 - } - } - return nil +func loaders(ctx context.Context) *Loaders { + return ctx.Value(contextLoaders).(*Loaders) } -func getSub(ctx context.Context) string { - if v := ctx.Value(ContextSub); v != nil { - if v2, ok := v.(string); ok { - return v2 - } - } - return "" +func dataloaders(ctx context.Context) *DataLoaders { + return ctx.Value(contextDataloaders).(*DataLoaders) } diff --git a/internal/adapter/gql/loader.go b/internal/adapter/gql/loader.go index c2df08e3c..78c50ef92 100644 --- a/internal/adapter/gql/loader.go +++ b/internal/adapter/gql/loader.go @@ -45,9 +45,12 @@ type DataLoaders struct { TagGroup TagGroupDataLoader } -func NewLoaders(usecases interfaces.Container) Loaders { - return Loaders{ - usecases: usecases, +func NewLoaders(usecases *interfaces.Container) *Loaders { + if usecases == nil { + return nil + } + return &Loaders{ + usecases: *usecases, Asset: NewAssetLoader(usecases.Asset), Dataset: NewDatasetLoader(usecases.Dataset), Layer: NewLayerLoader(usecases.Layer), @@ -61,15 +64,15 @@ func NewLoaders(usecases interfaces.Container) Loaders { } } -func (l Loaders) DataLoadersWith(ctx context.Context, enabled bool) DataLoaders { +func (l Loaders) DataLoadersWith(ctx context.Context, enabled bool) *DataLoaders { if enabled { return l.DataLoaders(ctx) } return l.OrdinaryDataLoaders(ctx) } -func (l Loaders) DataLoaders(ctx context.Context) DataLoaders { - return DataLoaders{ +func (l Loaders) DataLoaders(ctx context.Context) *DataLoaders { + return &DataLoaders{ Asset: l.Asset.DataLoader(ctx), Dataset: l.Dataset.DataLoader(ctx), DatasetSchema: l.Dataset.SchemaDataLoader(ctx), @@ -89,8 +92,8 @@ func (l Loaders) DataLoaders(ctx context.Context) DataLoaders { } } -func (l Loaders) OrdinaryDataLoaders(ctx context.Context) DataLoaders { - return DataLoaders{ +func (l Loaders) OrdinaryDataLoaders(ctx context.Context) *DataLoaders { + return &DataLoaders{ Asset: l.Asset.OrdinaryDataLoader(ctx), Dataset: l.Dataset.OrdinaryDataLoader(ctx), DatasetSchema: l.Dataset.SchemaOrdinaryDataLoader(ctx), @@ -109,13 +112,3 @@ func (l Loaders) OrdinaryDataLoaders(ctx context.Context) DataLoaders { TagGroup: l.Tag.GroupDataLoader(ctx), } } - -type dataLoadersKey struct{} - -func DataLoadersFromContext(ctx context.Context) DataLoaders { - return ctx.Value(dataLoadersKey{}).(DataLoaders) -} - -func DataLoadersKey() interface{} { - return dataLoadersKey{} -} diff --git a/internal/adapter/gql/resolver.go b/internal/adapter/gql/resolver.go index e708250ab..ac987de32 100644 --- a/internal/adapter/gql/resolver.go +++ b/internal/adapter/gql/resolver.go @@ -4,8 +4,6 @@ package gql import ( "errors" - - "github.com/reearth/reearth-backend/internal/usecase/interfaces" ) // THIS CODE IS A STARTING POINT ONLY. IT WILL NOT BE UPDATED WITH SCHEMA CHANGES. @@ -14,15 +12,11 @@ var ErrNotImplemented = errors.New("not impleneted yet") var ErrUnauthorized = errors.New("unauthorized") type Resolver struct { - usecases interfaces.Container - loaders Loaders - debug bool + debug bool } -func NewResolver(loaders Loaders, debug bool) ResolverRoot { +func NewResolver(debug bool) ResolverRoot { return &Resolver{ - usecases: loaders.usecases, - loaders: loaders, - debug: debug, + debug: debug, } } diff --git a/internal/adapter/gql/resolver_asset.go b/internal/adapter/gql/resolver_asset.go index b10cd1c4c..a3938eaf6 100644 --- a/internal/adapter/gql/resolver_asset.go +++ b/internal/adapter/gql/resolver_asset.go @@ -14,8 +14,5 @@ func (r *Resolver) Asset() AssetResolver { type assetResolver struct{ *Resolver } func (r *assetResolver) Team(ctx context.Context, obj *gqlmodel.Asset) (*gqlmodel.Team, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.TeamID)) + return dataloaders(ctx).Team.Load(id.TeamID(obj.TeamID)) } diff --git a/internal/adapter/gql/resolver_dataset.go b/internal/adapter/gql/resolver_dataset.go index 511a71d67..1002d1b4d 100644 --- a/internal/adapter/gql/resolver_dataset.go +++ b/internal/adapter/gql/resolver_dataset.go @@ -18,17 +18,11 @@ func (r *Resolver) DatasetField() DatasetFieldResolver { type datasetResolver struct{ *Resolver } func (r *datasetResolver) Schema(ctx context.Context, obj *gqlmodel.Dataset) (*gqlmodel.DatasetSchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) } func (r *datasetResolver) Name(ctx context.Context, obj *gqlmodel.Dataset) (*string, error) { - exit := trace(ctx) - defer exit() - - ds, err := DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + ds, err := dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) if err != nil || ds == nil || ds.RepresentativeFieldID == nil { return nil, err } @@ -46,24 +40,15 @@ func (r *datasetResolver) Name(ctx context.Context, obj *gqlmodel.Dataset) (*str type datasetFieldResolver struct{ *Resolver } func (r *datasetFieldResolver) Field(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.DatasetSchemaField, error) { - exit := trace(ctx) - defer exit() - - ds, err := DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + ds, err := dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) return ds.Field(obj.FieldID), err } func (r *datasetFieldResolver) Schema(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.DatasetSchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) } func (r *datasetFieldResolver) ValueRef(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.Dataset, error) { - exit := trace(ctx) - defer exit() - if obj.Value == nil { return nil, nil } @@ -71,5 +56,5 @@ func (r *datasetFieldResolver) ValueRef(ctx context.Context, obj *gqlmodel.Datas if !ok { return nil, nil } - return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(idstr)) + return dataloaders(ctx).Dataset.Load(id.DatasetID(idstr)) } diff --git a/internal/adapter/gql/resolver_dataset_schema.go b/internal/adapter/gql/resolver_dataset_schema.go index 4b4e77920..d72cd9537 100644 --- a/internal/adapter/gql/resolver_dataset_schema.go +++ b/internal/adapter/gql/resolver_dataset_schema.go @@ -19,16 +19,10 @@ func (r *Resolver) DatasetSchemaField() DatasetSchemaFieldResolver { type datasetSchemaResolver struct{ *Resolver } func (r *datasetSchemaResolver) Scene(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.Scene, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) } func (r *datasetSchemaResolver) RepresentativeField(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.DatasetSchemaField, error) { - exit := trace(ctx) - defer exit() - if obj.RepresentativeFieldID == nil { return nil, nil } @@ -42,27 +36,18 @@ func (r *datasetSchemaResolver) RepresentativeField(ctx context.Context, obj *gq } func (r *datasetSchemaResolver) Datasets(ctx context.Context, obj *gqlmodel.DatasetSchema, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Dataset.FindBySchema(ctx, obj.ID, first, last, before, after) + return loaders(ctx).Dataset.FindBySchema(ctx, obj.ID, first, last, before, after) } type datasetSchemaFieldResolver struct{ *Resolver } func (r *datasetSchemaFieldResolver) Schema(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) } func (r *datasetSchemaFieldResolver) Ref(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) { - exit := trace(ctx) - defer exit() - if obj.RefID == nil { return nil, nil } - return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.RefID)) + return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.RefID)) } diff --git a/internal/adapter/gql/resolver_layer.go b/internal/adapter/gql/resolver_layer.go index 1e6ca0283..adbe737a0 100644 --- a/internal/adapter/gql/resolver_layer.go +++ b/internal/adapter/gql/resolver_layer.go @@ -46,17 +46,11 @@ func (r *Resolver) LayerTagGroup() LayerTagGroupResolver { type infoboxResolver struct{ *Resolver } func (r *infoboxResolver) Property(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) + return dataloaders(ctx).Property.Load(id.PropertyID(obj.PropertyID)) } func (r *infoboxResolver) Layer(ctx context.Context, obj *gqlmodel.Infobox) (gqlmodel.Layer, error) { - exit := trace(ctx) - defer exit() - - layer, err := DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) + layer, err := dataloaders(ctx).Layer.Load(id.LayerID(obj.LayerID)) if err != nil || layer == nil { return nil, err } @@ -64,20 +58,14 @@ func (r *infoboxResolver) Layer(ctx context.Context, obj *gqlmodel.Infobox) (gql } func (r *infoboxResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Dataset, error) { - exit := trace(ctx) - defer exit() - if obj.LinkedDatasetID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) } func (r *infoboxResolver) Merged(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.MergedInfobox, error) { - exit := trace(ctx) - defer exit() - - ml, err := r.loaders.Layer.FetchParentAndMerged(ctx, id.LayerID(obj.LayerID)) + ml, err := loaders(ctx).Layer.FetchParentAndMerged(ctx, id.LayerID(obj.LayerID)) if err != nil || ml == nil { return nil, err } @@ -85,17 +73,11 @@ func (r *infoboxResolver) Merged(ctx context.Context, obj *gqlmodel.Infobox) (*g } func (r *infoboxResolver) Scene(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Scene, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) } func (r *infoboxResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.ScenePlugin, error) { - exit := trace(ctx) - defer exit() - - layer, err := DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) + layer, err := dataloaders(ctx).Layer.Load(id.LayerID(obj.LayerID)) if err != nil || layer == nil { return nil, err } @@ -109,7 +91,7 @@ func (r *infoboxResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Infobox return nil, nil } - s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + s, err := dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) if err != nil { return nil, err } @@ -119,10 +101,7 @@ func (r *infoboxResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Infobox type infoboxFieldResolver struct{ *Resolver } func (r *infoboxFieldResolver) Layer(ctx context.Context, obj *gqlmodel.InfoboxField) (gqlmodel.Layer, error) { - exit := trace(ctx) - defer exit() - - layer, err := DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) + layer, err := dataloaders(ctx).Layer.Load(id.LayerID(obj.LayerID)) if err != nil { return nil, err } @@ -130,10 +109,7 @@ func (r *infoboxFieldResolver) Layer(ctx context.Context, obj *gqlmodel.InfoboxF } func (r *infoboxFieldResolver) Infobox(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Infobox, error) { - exit := trace(ctx) - defer exit() - - layer, err := DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.LayerID)) + layer, err := dataloaders(ctx).Layer.Load(id.LayerID(obj.LayerID)) if err != nil || layer == nil { return nil, err } @@ -145,24 +121,15 @@ func (r *infoboxFieldResolver) Infobox(ctx context.Context, obj *gqlmodel.Infobo } func (r *infoboxFieldResolver) Property(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) + return dataloaders(ctx).Property.Load(id.PropertyID(obj.PropertyID)) } func (r *infoboxFieldResolver) Plugin(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Plugin, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + return dataloaders(ctx).Plugin.Load(obj.PluginID) } func (r *infoboxFieldResolver) Extension(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.PluginExtension, error) { - exit := trace(ctx) - defer exit() - - plugin, err := DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + plugin, err := dataloaders(ctx).Plugin.Load(obj.PluginID) if err != nil { return nil, err } @@ -170,20 +137,14 @@ func (r *infoboxFieldResolver) Extension(ctx context.Context, obj *gqlmodel.Info } func (r *infoboxFieldResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Dataset, error) { - exit := trace(ctx) - defer exit() - if obj.LinkedDatasetID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) } func (r *infoboxFieldResolver) Merged(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.MergedInfoboxField, error) { - exit := trace(ctx) - defer exit() - - ml, err := r.loaders.Layer.FetchParentAndMerged(ctx, id.LayerID(obj.LayerID)) + ml, err := loaders(ctx).Layer.FetchParentAndMerged(ctx, id.LayerID(obj.LayerID)) if err != nil || ml == nil || ml.Infobox == nil { return nil, err } @@ -191,17 +152,11 @@ func (r *infoboxFieldResolver) Merged(ctx context.Context, obj *gqlmodel.Infobox } func (r *infoboxFieldResolver) Scene(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Scene, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) } func (r *infoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.ScenePlugin, error) { - exit := trace(ctx) - defer exit() - - s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + s, err := dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) if err != nil { return nil, err } @@ -211,43 +166,31 @@ func (r *infoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.In type layerGroupResolver struct{ *Resolver } func (r *layerGroupResolver) Parent(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.LayerGroup, error) { - exit := trace(ctx) - defer exit() - if obj.ParentID != nil { - return DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) + return dataloaders(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) } - return r.loaders.Layer.FetchParent(ctx, id.LayerID(obj.ID)) + return loaders(ctx).Layer.FetchParent(ctx, id.LayerID(obj.ID)) } func (r *layerGroupResolver) Property(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - if obj.PropertyID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) + return dataloaders(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) } func (r *layerGroupResolver) Plugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Plugin, error) { - exit := trace(ctx) - defer exit() - if obj.PluginID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) + return dataloaders(ctx).Plugin.Load(*obj.PluginID) } func (r *layerGroupResolver) Extension(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.PluginExtension, error) { - exit := trace(ctx) - defer exit() - if obj.PluginID == nil || obj.ExtensionID == nil { return nil, nil } - plugin, err := DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) + plugin, err := dataloaders(ctx).Plugin.Load(*obj.PluginID) if err != nil { return nil, err } @@ -255,27 +198,18 @@ func (r *layerGroupResolver) Extension(ctx context.Context, obj *gqlmodel.LayerG } func (r *layerGroupResolver) ParentLayer(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.LayerGroup, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Layer.FetchParent(ctx, id.LayerID(obj.ID)) + return loaders(ctx).Layer.FetchParent(ctx, id.LayerID(obj.ID)) } func (r *layerGroupResolver) LinkedDatasetSchema(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.DatasetSchema, error) { - exit := trace(ctx) - defer exit() - if obj.LinkedDatasetSchemaID == nil { return nil, nil } - return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.LinkedDatasetSchemaID)) + return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.LinkedDatasetSchemaID)) } func (r *layerGroupResolver) Layers(ctx context.Context, obj *gqlmodel.LayerGroup) ([]gqlmodel.Layer, error) { - exit := trace(ctx) - defer exit() - - layers, err := DataLoadersFromContext(ctx).Layer.LoadAll(id.LayerIDsFromIDRef(obj.LayerIds)) + layers, err := dataloaders(ctx).Layer.LoadAll(id.LayerIDsFromIDRef(obj.LayerIds)) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -283,20 +217,14 @@ func (r *layerGroupResolver) Layers(ctx context.Context, obj *gqlmodel.LayerGrou } func (r *layerGroupResolver) Scene(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Scene, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) } func (r *layerGroupResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.ScenePlugin, error) { - exit := trace(ctx) - defer exit() - if obj.PluginID == nil { return nil, nil } - s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + s, err := dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) if err != nil { return nil, err } @@ -306,43 +234,31 @@ func (r *layerGroupResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Laye type layerItemResolver struct{ *Resolver } func (r *layerItemResolver) Parent(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.LayerGroup, error) { - exit := trace(ctx) - defer exit() - if obj.ParentID != nil { - return DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) + return dataloaders(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) } - return r.loaders.Layer.FetchParent(ctx, id.LayerID(obj.ID)) + return loaders(ctx).Layer.FetchParent(ctx, id.LayerID(obj.ID)) } func (r *layerItemResolver) Property(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - if obj.PropertyID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) + return dataloaders(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) } func (r *layerItemResolver) Plugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Plugin, error) { - exit := trace(ctx) - defer exit() - if obj.PluginID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) + return dataloaders(ctx).Plugin.Load(*obj.PluginID) } func (r *layerItemResolver) Extension(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.PluginExtension, error) { - exit := trace(ctx) - defer exit() - if obj.PluginID == nil || obj.ExtensionID == nil { return nil, nil } - plugin, err := DataLoadersFromContext(ctx).Plugin.Load(*obj.PluginID) + plugin, err := dataloaders(ctx).Plugin.Load(*obj.PluginID) if err != nil { return nil, err } @@ -350,40 +266,28 @@ func (r *layerItemResolver) Extension(ctx context.Context, obj *gqlmodel.LayerIt } func (r *layerItemResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Dataset, error) { - exit := trace(ctx) - defer exit() - if obj.LinkedDatasetID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) } func (r *layerItemResolver) Merged(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.MergedLayer, error) { - exit := trace(ctx) - defer exit() - if obj.ParentID == nil { - return r.loaders.Layer.FetchParentAndMerged(ctx, id.LayerID(obj.ID)) + return loaders(ctx).Layer.FetchParentAndMerged(ctx, id.LayerID(obj.ID)) } - return r.loaders.Layer.FetchMerged(ctx, id.LayerID(obj.ID), id.LayerIDFromRefID(obj.ParentID)) + return loaders(ctx).Layer.FetchMerged(ctx, id.LayerID(obj.ID), id.LayerIDFromRefID(obj.ParentID)) } func (r *layerItemResolver) Scene(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Scene, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) } func (r *layerItemResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.ScenePlugin, error) { - exit := trace(ctx) - defer exit() - if obj.PluginID == nil { return nil, nil } - s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + s, err := dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) if err != nil { return nil, err } @@ -393,55 +297,37 @@ func (r *layerItemResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Layer type mergedLayerResolver struct{ *Resolver } func (r *mergedLayerResolver) Original(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerItem, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).LayerItem.Load(id.LayerID(obj.OriginalID)) + return dataloaders(ctx).LayerItem.Load(id.LayerID(obj.OriginalID)) } func (r *mergedLayerResolver) Parent(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerGroup, error) { - exit := trace(ctx) - defer exit() - if obj.ParentID == nil { return nil, nil } - return DataLoadersFromContext(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) + return dataloaders(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) } func (r *mergedLayerResolver) Scene(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.Scene, error) { - exit := trace(ctx) - defer exit() - if obj.ParentID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) } type mergedInfoboxResolver struct{ *Resolver } func (r *mergedInfoboxResolver) Scene(ctx context.Context, obj *gqlmodel.MergedInfobox) (*gqlmodel.Scene, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) } type mergedInfoboxFieldResolver struct{ *Resolver } func (r *mergedInfoboxFieldResolver) Plugin(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.Plugin, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + return dataloaders(ctx).Plugin.Load(obj.PluginID) } func (r *mergedInfoboxFieldResolver) Extension(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.PluginExtension, error) { - exit := trace(ctx) - defer exit() - - plugin, err := DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + plugin, err := dataloaders(ctx).Plugin.Load(obj.PluginID) if err != nil { return nil, err } @@ -449,17 +335,11 @@ func (r *mergedInfoboxFieldResolver) Extension(ctx context.Context, obj *gqlmode } func (r *mergedInfoboxFieldResolver) Scene(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.Scene, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) } func (r *mergedInfoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.ScenePlugin, error) { - exit := trace(ctx) - defer exit() - - s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) + s, err := dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) if err != nil { return nil, err } @@ -469,10 +349,7 @@ func (r *mergedInfoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmo type layerTagItemResolver struct{ *Resolver } func (r *layerTagItemResolver) Tag(ctx context.Context, obj *gqlmodel.LayerTagItem) (gqlmodel.Tag, error) { - exit := trace(ctx) - defer exit() - - t, err := DataLoadersFromContext(ctx).Tag.Load(id.TagID(obj.TagID)) + t, err := dataloaders(ctx).Tag.Load(id.TagID(obj.TagID)) if err != nil { return nil, err } @@ -482,10 +359,7 @@ func (r *layerTagItemResolver) Tag(ctx context.Context, obj *gqlmodel.LayerTagIt type layerTagGroupResolver struct{ *Resolver } func (r *layerTagGroupResolver) Tag(ctx context.Context, obj *gqlmodel.LayerTagGroup) (gqlmodel.Tag, error) { - exit := trace(ctx) - defer exit() - - t, err := DataLoadersFromContext(ctx).Tag.Load(id.TagID(obj.TagID)) + t, err := dataloaders(ctx).Tag.Load(id.TagID(obj.TagID)) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_mutation_asset.go b/internal/adapter/gql/resolver_mutation_asset.go index a22e5dc87..482d7208a 100644 --- a/internal/adapter/gql/resolver_mutation_asset.go +++ b/internal/adapter/gql/resolver_mutation_asset.go @@ -9,10 +9,7 @@ import ( ) func (r *mutationResolver) CreateAsset(ctx context.Context, input gqlmodel.CreateAssetInput) (*gqlmodel.CreateAssetPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Asset.Create(ctx, interfaces.CreateAssetParam{ + res, err := usecases(ctx).Asset.Create(ctx, interfaces.CreateAssetParam{ TeamID: id.TeamID(input.TeamID), File: gqlmodel.FromFile(&input.File), }, getOperator(ctx)) @@ -24,10 +21,7 @@ func (r *mutationResolver) CreateAsset(ctx context.Context, input gqlmodel.Creat } func (r *mutationResolver) RemoveAsset(ctx context.Context, input gqlmodel.RemoveAssetInput) (*gqlmodel.RemoveAssetPayload, error) { - exit := trace(ctx) - defer exit() - - res, err2 := r.usecases.Asset.Remove(ctx, id.AssetID(input.AssetID), getOperator(ctx)) + res, err2 := usecases(ctx).Asset.Remove(ctx, id.AssetID(input.AssetID), getOperator(ctx)) if err2 != nil { return nil, err2 } diff --git a/internal/adapter/gql/resolver_mutation_dataset.go b/internal/adapter/gql/resolver_mutation_dataset.go index 51a2d652d..dcdcbf39b 100644 --- a/internal/adapter/gql/resolver_mutation_dataset.go +++ b/internal/adapter/gql/resolver_mutation_dataset.go @@ -9,10 +9,7 @@ import ( ) func (r *mutationResolver) UpdateDatasetSchema(ctx context.Context, input gqlmodel.UpdateDatasetSchemaInput) (*gqlmodel.UpdateDatasetSchemaPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Dataset.UpdateDatasetSchema(ctx, interfaces.UpdateDatasetSchemaParam{ + res, err := usecases(ctx).Dataset.UpdateDatasetSchema(ctx, interfaces.UpdateDatasetSchemaParam{ SchemaId: id.DatasetSchemaID(input.SchemaID), Name: input.Name, }, getOperator(ctx)) @@ -24,10 +21,7 @@ func (r *mutationResolver) UpdateDatasetSchema(ctx context.Context, input gqlmod } func (r *mutationResolver) AddDynamicDatasetSchema(ctx context.Context, input gqlmodel.AddDynamicDatasetSchemaInput) (*gqlmodel.AddDynamicDatasetSchemaPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Dataset.AddDynamicDatasetSchema(ctx, interfaces.AddDynamicDatasetSchemaParam{ + res, err := usecases(ctx).Dataset.AddDynamicDatasetSchema(ctx, interfaces.AddDynamicDatasetSchemaParam{ SceneId: id.SceneID(input.SceneID), }) if err != nil { @@ -38,10 +32,7 @@ func (r *mutationResolver) AddDynamicDatasetSchema(ctx context.Context, input gq } func (r *mutationResolver) AddDynamicDataset(ctx context.Context, input gqlmodel.AddDynamicDatasetInput) (*gqlmodel.AddDynamicDatasetPayload, error) { - exit := trace(ctx) - defer exit() - - dss, ds, err := r.usecases.Dataset.AddDynamicDataset(ctx, interfaces.AddDynamicDatasetParam{ + dss, ds, err := usecases(ctx).Dataset.AddDynamicDataset(ctx, interfaces.AddDynamicDatasetParam{ SchemaId: id.DatasetSchemaID(input.DatasetSchemaID), Author: input.Author, Content: input.Content, @@ -57,10 +48,7 @@ func (r *mutationResolver) AddDynamicDataset(ctx context.Context, input gqlmodel } func (r *mutationResolver) SyncDataset(ctx context.Context, input gqlmodel.SyncDatasetInput) (*gqlmodel.SyncDatasetPayload, error) { - exit := trace(ctx) - defer exit() - - dss, ds, err := r.usecases.Dataset.Sync(ctx, id.SceneID(input.SceneID), input.URL, getOperator(ctx)) + dss, ds, err := usecases(ctx).Dataset.Sync(ctx, id.SceneID(input.SceneID), input.URL, getOperator(ctx)) if err != nil { return nil, err } @@ -83,10 +71,7 @@ func (r *mutationResolver) SyncDataset(ctx context.Context, input gqlmodel.SyncD } func (r *mutationResolver) RemoveDatasetSchema(ctx context.Context, input gqlmodel.RemoveDatasetSchemaInput) (*gqlmodel.RemoveDatasetSchemaPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Dataset.RemoveDatasetSchema(ctx, interfaces.RemoveDatasetSchemaParam{ + res, err := usecases(ctx).Dataset.RemoveDatasetSchema(ctx, interfaces.RemoveDatasetSchemaParam{ SchemaId: id.DatasetSchemaID(input.SchemaID), Force: input.Force, }, getOperator(ctx)) @@ -98,10 +83,7 @@ func (r *mutationResolver) RemoveDatasetSchema(ctx context.Context, input gqlmod } func (r *mutationResolver) AddDatasetSchema(ctx context.Context, input gqlmodel.AddDatasetSchemaInput) (*gqlmodel.AddDatasetSchemaPayload, error) { - exit := trace(ctx) - defer exit() - - res, err2 := r.usecases.Dataset.AddDatasetSchema(ctx, interfaces.AddDatasetSchemaParam{ + res, err2 := usecases(ctx).Dataset.AddDatasetSchema(ctx, interfaces.AddDatasetSchemaParam{ SceneId: id.SceneID(input.SceneID), Name: input.Name, RepresentativeField: id.DatasetSchemaFieldIDFromRefID(input.Representativefield), @@ -114,10 +96,7 @@ func (r *mutationResolver) AddDatasetSchema(ctx context.Context, input gqlmodel. } func (r *mutationResolver) ImportDataset(ctx context.Context, input gqlmodel.ImportDatasetInput) (*gqlmodel.ImportDatasetPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Dataset.ImportDataset(ctx, interfaces.ImportDatasetParam{ + res, err := usecases(ctx).Dataset.ImportDataset(ctx, interfaces.ImportDatasetParam{ SceneId: id.SceneID(input.SceneID), SchemaId: id.DatasetSchemaIDFromRefID(input.DatasetSchemaID), File: gqlmodel.FromFile(&input.File), @@ -130,10 +109,7 @@ func (r *mutationResolver) ImportDataset(ctx context.Context, input gqlmodel.Imp } func (r *mutationResolver) ImportDatasetFromGoogleSheet(ctx context.Context, input gqlmodel.ImportDatasetFromGoogleSheetInput) (*gqlmodel.ImportDatasetPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Dataset.ImportDatasetFromGoogleSheet(ctx, interfaces.ImportDatasetFromGoogleSheetParam{ + res, err := usecases(ctx).Dataset.ImportDatasetFromGoogleSheet(ctx, interfaces.ImportDatasetFromGoogleSheetParam{ Token: input.AccessToken, FileID: input.FileID, SheetName: input.SheetName, diff --git a/internal/adapter/gql/resolver_mutation_layer.go b/internal/adapter/gql/resolver_mutation_layer.go index e745793be..843af6a2b 100644 --- a/internal/adapter/gql/resolver_mutation_layer.go +++ b/internal/adapter/gql/resolver_mutation_layer.go @@ -9,10 +9,7 @@ import ( ) func (r *mutationResolver) AddLayerItem(ctx context.Context, input gqlmodel.AddLayerItemInput) (*gqlmodel.AddLayerItemPayload, error) { - exit := trace(ctx) - defer exit() - - layer, parent, err := r.usecases.Layer.AddItem(ctx, interfaces.AddLayerItemInput{ + layer, parent, err := usecases(ctx).Layer.AddItem(ctx, interfaces.AddLayerItemInput{ ParentLayerID: id.LayerID(input.ParentLayerID), PluginID: &input.PluginID, ExtensionID: &input.ExtensionID, @@ -33,10 +30,7 @@ func (r *mutationResolver) AddLayerItem(ctx context.Context, input gqlmodel.AddL } func (r *mutationResolver) AddLayerGroup(ctx context.Context, input gqlmodel.AddLayerGroupInput) (*gqlmodel.AddLayerGroupPayload, error) { - exit := trace(ctx) - defer exit() - - layer, parent, err := r.usecases.Layer.AddGroup(ctx, interfaces.AddLayerGroupInput{ + layer, parent, err := usecases(ctx).Layer.AddGroup(ctx, interfaces.AddLayerGroupInput{ ParentLayerID: id.LayerID(input.ParentLayerID), PluginID: input.PluginID, ExtensionID: input.ExtensionID, @@ -57,10 +51,7 @@ func (r *mutationResolver) AddLayerGroup(ctx context.Context, input gqlmodel.Add } func (r *mutationResolver) RemoveLayer(ctx context.Context, input gqlmodel.RemoveLayerInput) (*gqlmodel.RemoveLayerPayload, error) { - exit := trace(ctx) - defer exit() - - id, layer, err := r.usecases.Layer.Remove(ctx, id.LayerID(input.LayerID), getOperator(ctx)) + id, layer, err := usecases(ctx).Layer.Remove(ctx, id.LayerID(input.LayerID), getOperator(ctx)) if err != nil { return nil, err } @@ -72,10 +63,7 @@ func (r *mutationResolver) RemoveLayer(ctx context.Context, input gqlmodel.Remov } func (r *mutationResolver) UpdateLayer(ctx context.Context, input gqlmodel.UpdateLayerInput) (*gqlmodel.UpdateLayerPayload, error) { - exit := trace(ctx) - defer exit() - - layer, err := r.usecases.Layer.Update(ctx, interfaces.UpdateLayerInput{ + layer, err := usecases(ctx).Layer.Update(ctx, interfaces.UpdateLayerInput{ LayerID: id.LayerID(input.LayerID), Name: input.Name, Visible: input.Visible, @@ -90,10 +78,7 @@ func (r *mutationResolver) UpdateLayer(ctx context.Context, input gqlmodel.Updat } func (r *mutationResolver) MoveLayer(ctx context.Context, input gqlmodel.MoveLayerInput) (*gqlmodel.MoveLayerPayload, error) { - exit := trace(ctx) - defer exit() - - targetLayerID, layerGroupFrom, layerGroupTo, index, err := r.usecases.Layer.Move(ctx, interfaces.MoveLayerInput{ + targetLayerID, layerGroupFrom, layerGroupTo, index, err := usecases(ctx).Layer.Move(ctx, interfaces.MoveLayerInput{ LayerID: id.LayerID(input.LayerID), DestLayerID: id.LayerIDFromRefID(input.DestLayerID), Index: gqlmodel.RefToIndex(input.Index), @@ -111,10 +96,7 @@ func (r *mutationResolver) MoveLayer(ctx context.Context, input gqlmodel.MoveLay } func (r *mutationResolver) CreateInfobox(ctx context.Context, input gqlmodel.CreateInfoboxInput) (*gqlmodel.CreateInfoboxPayload, error) { - exit := trace(ctx) - defer exit() - - layer, err := r.usecases.Layer.CreateInfobox(ctx, id.LayerID(input.LayerID), getOperator(ctx)) + layer, err := usecases(ctx).Layer.CreateInfobox(ctx, id.LayerID(input.LayerID), getOperator(ctx)) if err != nil { return nil, err } @@ -125,10 +107,7 @@ func (r *mutationResolver) CreateInfobox(ctx context.Context, input gqlmodel.Cre } func (r *mutationResolver) RemoveInfobox(ctx context.Context, input gqlmodel.RemoveInfoboxInput) (*gqlmodel.RemoveInfoboxPayload, error) { - exit := trace(ctx) - defer exit() - - layer, err := r.usecases.Layer.RemoveInfobox(ctx, id.LayerID(input.LayerID), getOperator(ctx)) + layer, err := usecases(ctx).Layer.RemoveInfobox(ctx, id.LayerID(input.LayerID), getOperator(ctx)) if err != nil { return nil, err } @@ -139,10 +118,7 @@ func (r *mutationResolver) RemoveInfobox(ctx context.Context, input gqlmodel.Rem } func (r *mutationResolver) AddInfoboxField(ctx context.Context, input gqlmodel.AddInfoboxFieldInput) (*gqlmodel.AddInfoboxFieldPayload, error) { - exit := trace(ctx) - defer exit() - - infoboxField, layer, err := r.usecases.Layer.AddInfoboxField(ctx, interfaces.AddInfoboxFieldParam{ + infoboxField, layer, err := usecases(ctx).Layer.AddInfoboxField(ctx, interfaces.AddInfoboxFieldParam{ LayerID: id.LayerID(input.LayerID), PluginID: input.PluginID, ExtensionID: input.ExtensionID, @@ -159,10 +135,7 @@ func (r *mutationResolver) AddInfoboxField(ctx context.Context, input gqlmodel.A } func (r *mutationResolver) MoveInfoboxField(ctx context.Context, input gqlmodel.MoveInfoboxFieldInput) (*gqlmodel.MoveInfoboxFieldPayload, error) { - exit := trace(ctx) - defer exit() - - infoboxField, layer, index, err := r.usecases.Layer.MoveInfoboxField(ctx, interfaces.MoveInfoboxFieldParam{ + infoboxField, layer, index, err := usecases(ctx).Layer.MoveInfoboxField(ctx, interfaces.MoveInfoboxFieldParam{ LayerID: id.LayerID(input.LayerID), InfoboxFieldID: id.InfoboxFieldID(input.InfoboxFieldID), Index: input.Index, @@ -179,10 +152,7 @@ func (r *mutationResolver) MoveInfoboxField(ctx context.Context, input gqlmodel. } func (r *mutationResolver) RemoveInfoboxField(ctx context.Context, input gqlmodel.RemoveInfoboxFieldInput) (*gqlmodel.RemoveInfoboxFieldPayload, error) { - exit := trace(ctx) - defer exit() - - infoboxField, layer, err := r.usecases.Layer.RemoveInfoboxField(ctx, interfaces.RemoveInfoboxFieldParam{ + infoboxField, layer, err := usecases(ctx).Layer.RemoveInfoboxField(ctx, interfaces.RemoveInfoboxFieldParam{ LayerID: id.LayerID(input.LayerID), InfoboxFieldID: id.InfoboxFieldID(input.InfoboxFieldID), }, getOperator(ctx)) @@ -197,10 +167,7 @@ func (r *mutationResolver) RemoveInfoboxField(ctx context.Context, input gqlmode } func (r *mutationResolver) ImportLayer(ctx context.Context, input gqlmodel.ImportLayerInput) (*gqlmodel.ImportLayerPayload, error) { - exit := trace(ctx) - defer exit() - - l, l2, err := r.usecases.Layer.ImportLayer(ctx, interfaces.ImportLayerParam{ + l, l2, err := usecases(ctx).Layer.ImportLayer(ctx, interfaces.ImportLayerParam{ LayerID: id.LayerID(input.LayerID), File: gqlmodel.FromFile(&input.File), Format: gqlmodel.FromLayerEncodingFormat(input.Format), @@ -216,10 +183,7 @@ func (r *mutationResolver) ImportLayer(ctx context.Context, input gqlmodel.Impor } func (r *mutationResolver) AttachTagToLayer(ctx context.Context, input gqlmodel.AttachTagToLayerInput) (*gqlmodel.AttachTagToLayerPayload, error) { - exit := trace(ctx) - defer exit() - - layer, err := r.usecases.Layer.AttachTag(ctx, id.LayerID(input.LayerID), id.TagID(input.TagID), getOperator(ctx)) + layer, err := usecases(ctx).Layer.AttachTag(ctx, id.LayerID(input.LayerID), id.TagID(input.TagID), getOperator(ctx)) if err != nil { return nil, err } @@ -229,10 +193,7 @@ func (r *mutationResolver) AttachTagToLayer(ctx context.Context, input gqlmodel. } func (r *mutationResolver) DetachTagFromLayer(ctx context.Context, input gqlmodel.DetachTagFromLayerInput) (*gqlmodel.DetachTagFromLayerPayload, error) { - exit := trace(ctx) - defer exit() - - layer, err := r.usecases.Layer.DetachTag(ctx, id.LayerID(input.LayerID), id.TagID(input.TagID), getOperator(ctx)) + layer, err := usecases(ctx).Layer.DetachTag(ctx, id.LayerID(input.LayerID), id.TagID(input.TagID), getOperator(ctx)) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_mutation_project.go b/internal/adapter/gql/resolver_mutation_project.go index 1cc522553..60d054792 100644 --- a/internal/adapter/gql/resolver_mutation_project.go +++ b/internal/adapter/gql/resolver_mutation_project.go @@ -10,10 +10,7 @@ import ( ) func (r *mutationResolver) CreateProject(ctx context.Context, input gqlmodel.CreateProjectInput) (*gqlmodel.ProjectPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Project.Create(ctx, interfaces.CreateProjectParam{ + res, err := usecases(ctx).Project.Create(ctx, interfaces.CreateProjectParam{ TeamID: id.TeamID(input.TeamID), Visualizer: visualizer.Visualizer(input.Visualizer), Name: input.Name, @@ -30,9 +27,6 @@ func (r *mutationResolver) CreateProject(ctx context.Context, input gqlmodel.Cre } func (r *mutationResolver) UpdateProject(ctx context.Context, input gqlmodel.UpdateProjectInput) (*gqlmodel.ProjectPayload, error) { - exit := trace(ctx) - defer exit() - deletePublicImage := false if input.DeletePublicImage != nil { deletePublicImage = *input.DeletePublicImage @@ -43,7 +37,7 @@ func (r *mutationResolver) UpdateProject(ctx context.Context, input gqlmodel.Upd deleteImageURL = *input.DeleteImageURL } - res, err := r.usecases.Project.Update(ctx, interfaces.UpdateProjectParam{ + res, err := usecases(ctx).Project.Update(ctx, interfaces.UpdateProjectParam{ ID: id.ProjectID(input.ProjectID), Name: input.Name, Description: input.Description, @@ -68,10 +62,7 @@ func (r *mutationResolver) UpdateProject(ctx context.Context, input gqlmodel.Upd } func (r *mutationResolver) PublishProject(ctx context.Context, input gqlmodel.PublishProjectInput) (*gqlmodel.ProjectPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Project.Publish(ctx, interfaces.PublishProjectParam{ + res, err := usecases(ctx).Project.Publish(ctx, interfaces.PublishProjectParam{ ID: id.ProjectID(input.ProjectID), Alias: input.Alias, Status: gqlmodel.FromPublishmentStatus(input.Status), @@ -84,10 +75,7 @@ func (r *mutationResolver) PublishProject(ctx context.Context, input gqlmodel.Pu } func (r *mutationResolver) DeleteProject(ctx context.Context, input gqlmodel.DeleteProjectInput) (*gqlmodel.DeleteProjectPayload, error) { - exit := trace(ctx) - defer exit() - - err := r.usecases.Project.Delete(ctx, id.ProjectID(input.ProjectID), getOperator(ctx)) + err := usecases(ctx).Project.Delete(ctx, id.ProjectID(input.ProjectID), getOperator(ctx)) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_mutation_property.go b/internal/adapter/gql/resolver_mutation_property.go index 88d7bbee6..c117fda9e 100644 --- a/internal/adapter/gql/resolver_mutation_property.go +++ b/internal/adapter/gql/resolver_mutation_property.go @@ -11,9 +11,6 @@ import ( ) func (r *mutationResolver) UpdatePropertyValue(ctx context.Context, input gqlmodel.UpdatePropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - var v *property.Value if input.Value != nil { v = gqlmodel.FromPropertyValueAndType(input.Value, input.Type) @@ -22,7 +19,7 @@ func (r *mutationResolver) UpdatePropertyValue(ctx context.Context, input gqlmod } } - pp, pgl, pg, pf, err := r.usecases.Property.UpdateValue(ctx, interfaces.UpdatePropertyValueParam{ + pp, pgl, pg, pf, err := usecases(ctx).Property.UpdateValue(ctx, interfaces.UpdatePropertyValueParam{ PropertyID: id.PropertyID(input.PropertyID), Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), Value: v, @@ -38,10 +35,7 @@ func (r *mutationResolver) UpdatePropertyValue(ctx context.Context, input gqlmod } func (r *mutationResolver) RemovePropertyField(ctx context.Context, input gqlmodel.RemovePropertyFieldInput) (*gqlmodel.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - p, err := r.usecases.Property.RemoveField(ctx, interfaces.RemovePropertyFieldParam{ + p, err := usecases(ctx).Property.RemoveField(ctx, interfaces.RemovePropertyFieldParam{ PropertyID: id.PropertyID(input.PropertyID), Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), }, getOperator(ctx)) @@ -55,10 +49,7 @@ func (r *mutationResolver) RemovePropertyField(ctx context.Context, input gqlmod } func (r *mutationResolver) UploadFileToProperty(ctx context.Context, input gqlmodel.UploadFileToPropertyInput) (*gqlmodel.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - p, pgl, pg, pf, err := r.usecases.Property.UploadFile(ctx, interfaces.UploadFileParam{ + p, pgl, pg, pf, err := usecases(ctx).Property.UploadFile(ctx, interfaces.UploadFileParam{ PropertyID: id.PropertyID(input.PropertyID), Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), File: gqlmodel.FromFile(&input.File), @@ -74,10 +65,7 @@ func (r *mutationResolver) UploadFileToProperty(ctx context.Context, input gqlmo } func (r *mutationResolver) LinkDatasetToPropertyValue(ctx context.Context, input gqlmodel.LinkDatasetToPropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - p, pgl, pg, pf, err := r.usecases.Property.LinkValue(ctx, interfaces.LinkPropertyValueParam{ + p, pgl, pg, pf, err := usecases(ctx).Property.LinkValue(ctx, interfaces.LinkPropertyValueParam{ PropertyID: id.PropertyID(input.PropertyID), Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), Links: gqlmodel.FromPropertyFieldLink( @@ -97,10 +85,7 @@ func (r *mutationResolver) LinkDatasetToPropertyValue(ctx context.Context, input } func (r *mutationResolver) UnlinkPropertyValue(ctx context.Context, input gqlmodel.UnlinkPropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) { - exit := trace(ctx) - defer exit() - - p, pgl, pg, pf, err := r.usecases.Property.UnlinkValue(ctx, interfaces.UnlinkPropertyValueParam{ + p, pgl, pg, pf, err := usecases(ctx).Property.UnlinkValue(ctx, interfaces.UnlinkPropertyValueParam{ PropertyID: id.PropertyID(input.PropertyID), Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), }, getOperator(ctx)) @@ -115,9 +100,6 @@ func (r *mutationResolver) UnlinkPropertyValue(ctx context.Context, input gqlmod } func (r *mutationResolver) AddPropertyItem(ctx context.Context, input gqlmodel.AddPropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { - exit := trace(ctx) - defer exit() - var v *property.Value if input.NameFieldType != nil { v = gqlmodel.FromPropertyValueAndType(input.NameFieldValue, *input.NameFieldType) @@ -126,7 +108,7 @@ func (r *mutationResolver) AddPropertyItem(ctx context.Context, input gqlmodel.A } } - p, pgl, pi, err := r.usecases.Property.AddItem(ctx, interfaces.AddPropertyItemParam{ + p, pgl, pi, err := usecases(ctx).Property.AddItem(ctx, interfaces.AddPropertyItemParam{ PropertyID: id.PropertyID(input.PropertyID), Pointer: gqlmodel.FromPointer(&input.SchemaGroupID, nil, nil), Index: input.Index, @@ -144,10 +126,7 @@ func (r *mutationResolver) AddPropertyItem(ctx context.Context, input gqlmodel.A } func (r *mutationResolver) MovePropertyItem(ctx context.Context, input gqlmodel.MovePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { - exit := trace(ctx) - defer exit() - - p, pgl, pi, err := r.usecases.Property.MoveItem(ctx, interfaces.MovePropertyItemParam{ + p, pgl, pi, err := usecases(ctx).Property.MoveItem(ctx, interfaces.MovePropertyItemParam{ PropertyID: id.PropertyID(input.PropertyID), Pointer: gqlmodel.FromPointer(&input.SchemaGroupID, &input.ItemID, nil), Index: input.Index, @@ -163,10 +142,7 @@ func (r *mutationResolver) MovePropertyItem(ctx context.Context, input gqlmodel. } func (r *mutationResolver) RemovePropertyItem(ctx context.Context, input gqlmodel.RemovePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { - exit := trace(ctx) - defer exit() - - p, err := r.usecases.Property.RemoveItem(ctx, interfaces.RemovePropertyItemParam{ + p, err := usecases(ctx).Property.RemoveItem(ctx, interfaces.RemovePropertyItemParam{ PropertyID: id.PropertyID(input.PropertyID), Pointer: gqlmodel.FromPointer(&input.SchemaGroupID, &input.ItemID, nil), }, getOperator(ctx)) @@ -180,9 +156,6 @@ func (r *mutationResolver) RemovePropertyItem(ctx context.Context, input gqlmode } func (r *mutationResolver) UpdatePropertyItems(ctx context.Context, input gqlmodel.UpdatePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { - exit := trace(ctx) - defer exit() - op := make([]interfaces.UpdatePropertyItemsOperationParam, 0, len(input.Operations)) for _, o := range input.Operations { var v *property.Value @@ -201,7 +174,7 @@ func (r *mutationResolver) UpdatePropertyItems(ctx context.Context, input gqlmod }) } - p, err2 := r.usecases.Property.UpdateItems(ctx, interfaces.UpdatePropertyItemsParam{ + p, err2 := usecases(ctx).Property.UpdateItems(ctx, interfaces.UpdatePropertyItemsParam{ PropertyID: id.PropertyID(input.PropertyID), Pointer: gqlmodel.FromPointer(&input.SchemaGroupID, nil, nil), Operations: op, diff --git a/internal/adapter/gql/resolver_mutation_scene.go b/internal/adapter/gql/resolver_mutation_scene.go index 8379a7a86..26377eeea 100644 --- a/internal/adapter/gql/resolver_mutation_scene.go +++ b/internal/adapter/gql/resolver_mutation_scene.go @@ -12,10 +12,7 @@ import ( ) func (r *mutationResolver) CreateScene(ctx context.Context, input gqlmodel.CreateSceneInput) (*gqlmodel.CreateScenePayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Scene.Create( + res, err := usecases(ctx).Scene.Create( ctx, id.ProjectID(input.ProjectID), getOperator(ctx), @@ -30,10 +27,7 @@ func (r *mutationResolver) CreateScene(ctx context.Context, input gqlmodel.Creat } func (r *mutationResolver) AddWidget(ctx context.Context, input gqlmodel.AddWidgetInput) (*gqlmodel.AddWidgetPayload, error) { - exit := trace(ctx) - defer exit() - - scene, widget, err := r.usecases.Scene.AddWidget( + scene, widget, err := usecases(ctx).Scene.AddWidget( ctx, id.SceneID(input.SceneID), input.PluginID, @@ -51,10 +45,7 @@ func (r *mutationResolver) AddWidget(ctx context.Context, input gqlmodel.AddWidg } func (r *mutationResolver) UpdateWidget(ctx context.Context, input gqlmodel.UpdateWidgetInput) (*gqlmodel.UpdateWidgetPayload, error) { - exit := trace(ctx) - defer exit() - - scene, widget, err := r.usecases.Scene.UpdateWidget(ctx, interfaces.UpdateWidgetParam{ + scene, widget, err := usecases(ctx).Scene.UpdateWidget(ctx, interfaces.UpdateWidgetParam{ SceneID: id.SceneID(input.SceneID), WidgetID: id.WidgetID(input.WidgetID), Enabled: input.Enabled, @@ -73,10 +64,7 @@ func (r *mutationResolver) UpdateWidget(ctx context.Context, input gqlmodel.Upda } func (r *mutationResolver) RemoveWidget(ctx context.Context, input gqlmodel.RemoveWidgetInput) (*gqlmodel.RemoveWidgetPayload, error) { - exit := trace(ctx) - defer exit() - - scene, err := r.usecases.Scene.RemoveWidget(ctx, + scene, err := usecases(ctx).Scene.RemoveWidget(ctx, id.SceneID(input.SceneID), id.WidgetID(input.WidgetID), getOperator(ctx), @@ -92,10 +80,7 @@ func (r *mutationResolver) RemoveWidget(ctx context.Context, input gqlmodel.Remo } func (r *mutationResolver) UpdateWidgetAlignSystem(ctx context.Context, input gqlmodel.UpdateWidgetAlignSystemInput) (*gqlmodel.UpdateWidgetAlignSystemPayload, error) { - exit := trace(ctx) - defer exit() - - scene, err := r.usecases.Scene.UpdateWidgetAlignSystem(ctx, interfaces.UpdateWidgetAlignSystemParam{ + scene, err := usecases(ctx).Scene.UpdateWidgetAlignSystem(ctx, interfaces.UpdateWidgetAlignSystemParam{ SceneID: id.SceneID(input.SceneID), Location: *gqlmodel.FromSceneWidgetLocation(input.Location), Align: gqlmodel.FromWidgetAlignType(input.Align), @@ -110,10 +95,7 @@ func (r *mutationResolver) UpdateWidgetAlignSystem(ctx context.Context, input gq } func (r *mutationResolver) InstallPlugin(ctx context.Context, input gqlmodel.InstallPluginInput) (*gqlmodel.InstallPluginPayload, error) { - exit := trace(ctx) - defer exit() - - scene, pl, pr, err := r.usecases.Scene.InstallPlugin(ctx, + scene, pl, pr, err := usecases(ctx).Scene.InstallPlugin(ctx, id.SceneID(input.SceneID), input.PluginID, getOperator(ctx), @@ -131,18 +113,15 @@ func (r *mutationResolver) InstallPlugin(ctx context.Context, input gqlmodel.Ins } func (r *mutationResolver) UploadPlugin(ctx context.Context, input gqlmodel.UploadPluginInput) (*gqlmodel.UploadPluginPayload, error) { - exit := trace(ctx) - defer exit() - operator := getOperator(ctx) var p *plugin.Plugin var s *scene.Scene var err error if input.File != nil { - p, s, err = r.usecases.Plugin.Upload(ctx, input.File.File, id.SceneID(input.SceneID), operator) + p, s, err = usecases(ctx).Plugin.Upload(ctx, input.File.File, id.SceneID(input.SceneID), operator) } else if input.URL != nil { - p, s, err = r.usecases.Plugin.UploadFromRemote(ctx, input.URL, id.SceneID(input.SceneID), operator) + p, s, err = usecases(ctx).Plugin.UploadFromRemote(ctx, input.URL, id.SceneID(input.SceneID), operator) } else { return nil, errors.New("either file or url is required") } @@ -158,10 +137,7 @@ func (r *mutationResolver) UploadPlugin(ctx context.Context, input gqlmodel.Uplo } func (r *mutationResolver) UninstallPlugin(ctx context.Context, input gqlmodel.UninstallPluginInput) (*gqlmodel.UninstallPluginPayload, error) { - exit := trace(ctx) - defer exit() - - scene, err := r.usecases.Scene.UninstallPlugin(ctx, + scene, err := usecases(ctx).Scene.UninstallPlugin(ctx, id.SceneID(input.SceneID), id.PluginID(input.PluginID), getOperator(ctx), @@ -177,10 +153,7 @@ func (r *mutationResolver) UninstallPlugin(ctx context.Context, input gqlmodel.U } func (r *mutationResolver) UpgradePlugin(ctx context.Context, input gqlmodel.UpgradePluginInput) (*gqlmodel.UpgradePluginPayload, error) { - exit := trace(ctx) - defer exit() - - s, err := r.usecases.Scene.UpgradePlugin(ctx, + s, err := usecases(ctx).Scene.UpgradePlugin(ctx, id.SceneID(input.SceneID), input.PluginID, input.ToPluginID, @@ -197,9 +170,7 @@ func (r *mutationResolver) UpgradePlugin(ctx context.Context, input gqlmodel.Upg } func (r *mutationResolver) AddCluster(ctx context.Context, input gqlmodel.AddClusterInput) (*gqlmodel.AddClusterPayload, error) { - exit := trace(ctx) - defer exit() - s, c, err := r.usecases.Scene.AddCluster(ctx, id.SceneID(input.SceneID), input.Name, getOperator(ctx)) + s, c, err := usecases(ctx).Scene.AddCluster(ctx, id.SceneID(input.SceneID), input.Name, getOperator(ctx)) if err != nil { return nil, err } @@ -211,9 +182,7 @@ func (r *mutationResolver) AddCluster(ctx context.Context, input gqlmodel.AddClu } func (r *mutationResolver) UpdateCluster(ctx context.Context, input gqlmodel.UpdateClusterInput) (*gqlmodel.UpdateClusterPayload, error) { - exit := trace(ctx) - defer exit() - s, c, err := r.usecases.Scene.UpdateCluster(ctx, interfaces.UpdateClusterParam{ + s, c, err := usecases(ctx).Scene.UpdateCluster(ctx, interfaces.UpdateClusterParam{ ClusterID: id.ClusterID(input.ClusterID), SceneID: id.SceneID(input.SceneID), Name: input.Name, @@ -230,9 +199,7 @@ func (r *mutationResolver) UpdateCluster(ctx context.Context, input gqlmodel.Upd } func (r *mutationResolver) RemoveCluster(ctx context.Context, input gqlmodel.RemoveClusterInput) (*gqlmodel.RemoveClusterPayload, error) { - exit := trace(ctx) - defer exit() - s, err := r.usecases.Scene.RemoveCluster(ctx, id.SceneID(input.SceneID), id.ClusterID(input.ClusterID), getOperator(ctx)) + s, err := usecases(ctx).Scene.RemoveCluster(ctx, id.SceneID(input.SceneID), id.ClusterID(input.ClusterID), getOperator(ctx)) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_mutation_tag.go b/internal/adapter/gql/resolver_mutation_tag.go index ceffc8459..99bd76cac 100644 --- a/internal/adapter/gql/resolver_mutation_tag.go +++ b/internal/adapter/gql/resolver_mutation_tag.go @@ -9,10 +9,7 @@ import ( ) func (r *mutationResolver) CreateTagItem(ctx context.Context, input gqlmodel.CreateTagItemInput) (*gqlmodel.CreateTagItemPayload, error) { - exit := trace(ctx) - defer exit() - - tag, parent, err := r.usecases.Tag.CreateItem(ctx, interfaces.CreateTagItemParam{ + tag, parent, err := usecases(ctx).Tag.CreateItem(ctx, interfaces.CreateTagItemParam{ Label: input.Label, SceneID: id.SceneID(input.SceneID), Parent: id.TagIDFromRefID(input.Parent), @@ -31,10 +28,7 @@ func (r *mutationResolver) CreateTagItem(ctx context.Context, input gqlmodel.Cre } func (r *mutationResolver) CreateTagGroup(ctx context.Context, input gqlmodel.CreateTagGroupInput) (*gqlmodel.CreateTagGroupPayload, error) { - exit := trace(ctx) - defer exit() - - tag, err := r.usecases.Tag.CreateGroup(ctx, interfaces.CreateTagGroupParam{ + tag, err := usecases(ctx).Tag.CreateGroup(ctx, interfaces.CreateTagGroupParam{ Label: input.Label, SceneID: id.SceneID(input.SceneID), Tags: id.TagIDsFromIDRef(input.Tags), @@ -48,10 +42,7 @@ func (r *mutationResolver) CreateTagGroup(ctx context.Context, input gqlmodel.Cr } func (r *mutationResolver) UpdateTag(ctx context.Context, input gqlmodel.UpdateTagInput) (*gqlmodel.UpdateTagPayload, error) { - exit := trace(ctx) - defer exit() - - tag, err := r.usecases.Tag.UpdateTag(ctx, interfaces.UpdateTagParam{ + tag, err := usecases(ctx).Tag.UpdateTag(ctx, interfaces.UpdateTagParam{ Label: input.Label, SceneID: id.SceneID(input.SceneID), TagID: id.TagID(input.TagID), @@ -65,10 +56,7 @@ func (r *mutationResolver) UpdateTag(ctx context.Context, input gqlmodel.UpdateT } func (r *mutationResolver) AttachTagItemToGroup(ctx context.Context, input gqlmodel.AttachTagItemToGroupInput) (*gqlmodel.AttachTagItemToGroupPayload, error) { - exit := trace(ctx) - defer exit() - - tag, err := r.usecases.Tag.AttachItemToGroup(ctx, interfaces.AttachItemToGroupParam{ + tag, err := usecases(ctx).Tag.AttachItemToGroup(ctx, interfaces.AttachItemToGroupParam{ ItemID: id.TagID(input.ItemID), GroupID: id.TagID(input.GroupID), }, getOperator(ctx)) @@ -81,10 +69,7 @@ func (r *mutationResolver) AttachTagItemToGroup(ctx context.Context, input gqlmo } func (r *mutationResolver) DetachTagItemFromGroup(ctx context.Context, input gqlmodel.DetachTagItemFromGroupInput) (*gqlmodel.DetachTagItemFromGroupPayload, error) { - exit := trace(ctx) - defer exit() - - tag, err := r.usecases.Tag.DetachItemFromGroup(ctx, interfaces.DetachItemToGroupParam{ + tag, err := usecases(ctx).Tag.DetachItemFromGroup(ctx, interfaces.DetachItemToGroupParam{ ItemID: id.TagID(input.ItemID), GroupID: id.TagID(input.GroupID), }, getOperator(ctx)) @@ -97,10 +82,7 @@ func (r *mutationResolver) DetachTagItemFromGroup(ctx context.Context, input gql } func (r *mutationResolver) RemoveTag(ctx context.Context, input gqlmodel.RemoveTagInput) (*gqlmodel.RemoveTagPayload, error) { - exit := trace(ctx) - defer exit() - - tagID, layers, err := r.usecases.Tag.Remove(ctx, id.TagID(input.TagID), getOperator(ctx)) + tagID, layers, err := usecases(ctx).Tag.Remove(ctx, id.TagID(input.TagID), getOperator(ctx)) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_mutation_team.go b/internal/adapter/gql/resolver_mutation_team.go index f40d87c1a..4595933a4 100644 --- a/internal/adapter/gql/resolver_mutation_team.go +++ b/internal/adapter/gql/resolver_mutation_team.go @@ -8,10 +8,7 @@ import ( ) func (r *mutationResolver) CreateTeam(ctx context.Context, input gqlmodel.CreateTeamInput) (*gqlmodel.CreateTeamPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Team.Create(ctx, input.Name, getUser(ctx).ID()) + res, err := usecases(ctx).Team.Create(ctx, input.Name, getUser(ctx).ID()) if err != nil { return nil, err } @@ -20,10 +17,7 @@ func (r *mutationResolver) CreateTeam(ctx context.Context, input gqlmodel.Create } func (r *mutationResolver) DeleteTeam(ctx context.Context, input gqlmodel.DeleteTeamInput) (*gqlmodel.DeleteTeamPayload, error) { - exit := trace(ctx) - defer exit() - - if err := r.usecases.Team.Remove(ctx, id.TeamID(input.TeamID), getOperator(ctx)); err != nil { + if err := usecases(ctx).Team.Remove(ctx, id.TeamID(input.TeamID), getOperator(ctx)); err != nil { return nil, err } @@ -31,10 +25,7 @@ func (r *mutationResolver) DeleteTeam(ctx context.Context, input gqlmodel.Delete } func (r *mutationResolver) UpdateTeam(ctx context.Context, input gqlmodel.UpdateTeamInput) (*gqlmodel.UpdateTeamPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Team.Update(ctx, id.TeamID(input.TeamID), input.Name, getOperator(ctx)) + res, err := usecases(ctx).Team.Update(ctx, id.TeamID(input.TeamID), input.Name, getOperator(ctx)) if err != nil { return nil, err } @@ -43,10 +34,7 @@ func (r *mutationResolver) UpdateTeam(ctx context.Context, input gqlmodel.Update } func (r *mutationResolver) AddMemberToTeam(ctx context.Context, input gqlmodel.AddMemberToTeamInput) (*gqlmodel.AddMemberToTeamPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Team.AddMember(ctx, id.TeamID(input.TeamID), id.UserID(input.UserID), gqlmodel.FromRole(input.Role), getOperator(ctx)) + res, err := usecases(ctx).Team.AddMember(ctx, id.TeamID(input.TeamID), id.UserID(input.UserID), gqlmodel.FromRole(input.Role), getOperator(ctx)) if err != nil { return nil, err } @@ -55,10 +43,7 @@ func (r *mutationResolver) AddMemberToTeam(ctx context.Context, input gqlmodel.A } func (r *mutationResolver) RemoveMemberFromTeam(ctx context.Context, input gqlmodel.RemoveMemberFromTeamInput) (*gqlmodel.RemoveMemberFromTeamPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Team.RemoveMember(ctx, id.TeamID(input.TeamID), id.UserID(input.UserID), getOperator(ctx)) + res, err := usecases(ctx).Team.RemoveMember(ctx, id.TeamID(input.TeamID), id.UserID(input.UserID), getOperator(ctx)) if err != nil { return nil, err } @@ -67,10 +52,7 @@ func (r *mutationResolver) RemoveMemberFromTeam(ctx context.Context, input gqlmo } func (r *mutationResolver) UpdateMemberOfTeam(ctx context.Context, input gqlmodel.UpdateMemberOfTeamInput) (*gqlmodel.UpdateMemberOfTeamPayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.Team.UpdateMember(ctx, id.TeamID(input.TeamID), id.UserID(input.UserID), gqlmodel.FromRole(input.Role), getOperator(ctx)) + res, err := usecases(ctx).Team.UpdateMember(ctx, id.TeamID(input.TeamID), id.UserID(input.UserID), gqlmodel.FromRole(input.Role), getOperator(ctx)) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_mutation_user.go b/internal/adapter/gql/resolver_mutation_user.go index 7f1930a18..e62362359 100644 --- a/internal/adapter/gql/resolver_mutation_user.go +++ b/internal/adapter/gql/resolver_mutation_user.go @@ -9,15 +9,12 @@ import ( ) func (r *mutationResolver) Signup(ctx context.Context, input gqlmodel.SignupInput) (*gqlmodel.SignupPayload, error) { - exit := trace(ctx) - defer exit() - secret := "" if input.Secret != nil { secret = *input.Secret } - u, team, err := r.usecases.User.Signup(ctx, interfaces.SignupParam{ + u, team, err := usecases(ctx).User.Signup(ctx, interfaces.SignupParam{ Sub: getSub(ctx), Lang: input.Lang, Theme: gqlmodel.ToTheme(input.Theme), @@ -33,10 +30,7 @@ func (r *mutationResolver) Signup(ctx context.Context, input gqlmodel.SignupInpu } func (r *mutationResolver) UpdateMe(ctx context.Context, input gqlmodel.UpdateMeInput) (*gqlmodel.UpdateMePayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.User.UpdateMe(ctx, interfaces.UpdateMeParam{ + res, err := usecases(ctx).User.UpdateMe(ctx, interfaces.UpdateMeParam{ Name: input.Name, Email: input.Email, Lang: input.Lang, @@ -52,10 +46,7 @@ func (r *mutationResolver) UpdateMe(ctx context.Context, input gqlmodel.UpdateMe } func (r *mutationResolver) RemoveMyAuth(ctx context.Context, input gqlmodel.RemoveMyAuthInput) (*gqlmodel.UpdateMePayload, error) { - exit := trace(ctx) - defer exit() - - res, err := r.usecases.User.RemoveMyAuth(ctx, input.Auth, getOperator(ctx)) + res, err := usecases(ctx).User.RemoveMyAuth(ctx, input.Auth, getOperator(ctx)) if err != nil { return nil, err } @@ -64,10 +55,7 @@ func (r *mutationResolver) RemoveMyAuth(ctx context.Context, input gqlmodel.Remo } func (r *mutationResolver) DeleteMe(ctx context.Context, input gqlmodel.DeleteMeInput) (*gqlmodel.DeleteMePayload, error) { - exit := trace(ctx) - defer exit() - - if err := r.usecases.User.DeleteMe(ctx, id.UserID(input.UserID), getOperator(ctx)); err != nil { + if err := usecases(ctx).User.DeleteMe(ctx, id.UserID(input.UserID), getOperator(ctx)); err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_plugin.go b/internal/adapter/gql/resolver_plugin.go index 2648729e2..7440fb18a 100644 --- a/internal/adapter/gql/resolver_plugin.go +++ b/internal/adapter/gql/resolver_plugin.go @@ -18,36 +18,27 @@ func (r *Resolver) PluginExtension() PluginExtensionResolver { type pluginResolver struct{ *Resolver } func (r *pluginResolver) PropertySchema(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - if obj.PropertySchemaID == nil { return nil, nil } - return DataLoadersFromContext(ctx).PropertySchema.Load(*obj.PropertySchemaID) + return dataloaders(ctx).PropertySchema.Load(*obj.PropertySchemaID) } func (r *pluginResolver) Scene(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.Scene, error) { - exit := trace(ctx) - defer exit() - if obj.SceneID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(*obj.SceneID)) + return dataloaders(ctx).Scene.Load(id.SceneID(*obj.SceneID)) } func (r *pluginResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Plugin, sceneID *id.ID) (*gqlmodel.ScenePlugin, error) { - exit := trace(ctx) - defer exit() - if sceneID == nil && obj.SceneID != nil { sceneID = obj.SceneID } if sceneID == nil { return nil, nil } - s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(*sceneID)) + s, err := dataloaders(ctx).Scene.Load(id.SceneID(*sceneID)) return s.Plugin(obj.ID), err } @@ -68,31 +59,19 @@ func (r *pluginResolver) TranslatedDescription(ctx context.Context, obj *gqlmode type pluginExtensionResolver struct{ *Resolver } func (r *pluginExtensionResolver) Plugin(ctx context.Context, obj *gqlmodel.PluginExtension) (*gqlmodel.Plugin, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + return dataloaders(ctx).Plugin.Load(obj.PluginID) } func (r *pluginExtensionResolver) PropertySchema(ctx context.Context, obj *gqlmodel.PluginExtension) (*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).PropertySchema.Load(obj.PropertySchemaID) + return dataloaders(ctx).PropertySchema.Load(obj.PropertySchemaID) } func (r *pluginExtensionResolver) SceneWidget(ctx context.Context, obj *gqlmodel.PluginExtension, sceneID id.ID) (*gqlmodel.SceneWidget, error) { - exit := trace(ctx) - defer exit() - - s, err := DataLoadersFromContext(ctx).Scene.Load(id.SceneID(sceneID)) + s, err := dataloaders(ctx).Scene.Load(id.SceneID(sceneID)) return s.Widget(obj.PluginID, obj.ExtensionID), err } func (r *pluginExtensionResolver) TranslatedName(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) { - exit := trace(ctx) - defer exit() - if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { return s, nil } @@ -100,9 +79,6 @@ func (r *pluginExtensionResolver) TranslatedName(ctx context.Context, obj *gqlmo } func (r *pluginExtensionResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) { - exit := trace(ctx) - defer exit() - if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { return s, nil } diff --git a/internal/adapter/gql/resolver_project.go b/internal/adapter/gql/resolver_project.go index c3a0b9610..fe8cb010e 100644 --- a/internal/adapter/gql/resolver_project.go +++ b/internal/adapter/gql/resolver_project.go @@ -15,17 +15,11 @@ func (r *Resolver) Project() ProjectResolver { type projectResolver struct{ *Resolver } func (r *projectResolver) Team(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Team, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.TeamID)) + return dataloaders(ctx).Team.Load(id.TeamID(obj.TeamID)) } func (r *projectResolver) Scene(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Scene, error) { - exit := trace(ctx) - defer exit() - - s, err := r.loaders.Scene.FindByProject(ctx, id.ProjectID(obj.ID)) + s, err := loaders(ctx).Scene.FindByProject(ctx, id.ProjectID(obj.ID)) if err != nil && err != rerror.ErrNotFound { return nil, err } diff --git a/internal/adapter/gql/resolver_property.go b/internal/adapter/gql/resolver_property.go index f0885bfd1..8ccd73b93 100644 --- a/internal/adapter/gql/resolver_property.go +++ b/internal/adapter/gql/resolver_property.go @@ -44,17 +44,11 @@ func (r *Resolver) PropertyGroup() PropertyGroupResolver { type propertyResolver struct{ *Resolver } func (r *propertyResolver) Schema(ctx context.Context, obj *gqlmodel.Property) (*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) } func (r *propertyResolver) Layer(ctx context.Context, obj *gqlmodel.Property) (gqlmodel.Layer, error) { - exit := trace(ctx) - defer exit() - - l, err := r.loaders.Layer.FetchByProperty(ctx, id.PropertyID(obj.ID)) + l, err := loaders(ctx).Layer.FetchByProperty(ctx, id.PropertyID(obj.ID)) if err != nil || errors.Is(err, rerror.ErrNotFound) { return nil, nil } @@ -62,10 +56,7 @@ func (r *propertyResolver) Layer(ctx context.Context, obj *gqlmodel.Property) (g } func (r *propertyResolver) Merged(ctx context.Context, obj *gqlmodel.Property) (*gqlmodel.MergedProperty, error) { - exit := trace(ctx) - defer exit() - - l, err := r.loaders.Layer.FetchByProperty(ctx, id.PropertyID(obj.ID)) + l, err := loaders(ctx).Layer.FetchByProperty(ctx, id.PropertyID(obj.ID)) if err != nil { if errors.Is(err, rerror.ErrNotFound) { return nil, nil @@ -94,24 +85,15 @@ func (r *propertyResolver) Merged(ctx context.Context, obj *gqlmodel.Property) ( type propertyFieldResolver struct{ *Resolver } func (r *propertyFieldResolver) Parent(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.ParentID)) + return dataloaders(ctx).Property.Load(id.PropertyID(obj.ParentID)) } func (r *propertyFieldResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) } func (r *propertyFieldResolver) Field(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.PropertySchemaField, error) { - exit := trace(ctx) - defer exit() - - schema, err := DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + schema, err := dataloaders(ctx).PropertySchema.Load(obj.SchemaID) if err != nil { return nil, err } @@ -119,33 +101,24 @@ func (r *propertyFieldResolver) Field(ctx context.Context, obj *gqlmodel.Propert } func (r *propertyFieldResolver) ActualValue(ctx context.Context, obj *gqlmodel.PropertyField) (interface{}, error) { - exit := trace(ctx) - defer exit() - - datasetLoader := DataLoadersFromContext(ctx).Dataset + datasetLoader := dataloaders(ctx).Dataset return actualValue(datasetLoader, obj.Value, obj.Links, false) } type propertyFieldLinkResolver struct{ *Resolver } func (r *propertyFieldLinkResolver) Dataset(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.Dataset, error) { - exit := trace(ctx) - defer exit() - if obj.DatasetID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.DatasetID)) + return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.DatasetID)) } func (r *propertyFieldLinkResolver) DatasetField(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetField, error) { - exit := trace(ctx) - defer exit() - if obj.DatasetID == nil { return nil, nil } - d, err := DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.DatasetID)) + d, err := dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.DatasetID)) if err != nil { return nil, err } @@ -153,80 +126,59 @@ func (r *propertyFieldLinkResolver) DatasetField(ctx context.Context, obj *gqlmo } func (r *propertyFieldLinkResolver) DatasetSchema(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetSchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.DatasetSchemaID)) + return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.DatasetSchemaID)) } func (r *propertyFieldLinkResolver) DatasetSchemaField(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetSchemaField, error) { - exit := trace(ctx) - defer exit() - - ds, err := DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.DatasetSchemaID)) + ds, err := dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.DatasetSchemaID)) return ds.Field(obj.DatasetSchemaFieldID), err } type mergedPropertyResolver struct{ *Resolver } func (r *mergedPropertyResolver) Original(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - if obj.OriginalID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) + return dataloaders(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) } func (r *mergedPropertyResolver) Parent(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - if obj.ParentID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.ParentID)) + return dataloaders(ctx).Property.Load(id.PropertyID(*obj.ParentID)) } func (r *mergedPropertyResolver) Schema(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - if obj.SchemaID == nil { if propertyID := obj.PropertyID(); propertyID != nil { - property, err := DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*propertyID)) + property, err := dataloaders(ctx).Property.Load(id.PropertyID(*propertyID)) if err != nil { return nil, err } if property == nil { return nil, nil } - return DataLoadersFromContext(ctx).PropertySchema.Load(property.SchemaID) + return dataloaders(ctx).PropertySchema.Load(property.SchemaID) } return nil, nil } - return DataLoadersFromContext(ctx).PropertySchema.Load(*obj.SchemaID) + return dataloaders(ctx).PropertySchema.Load(*obj.SchemaID) } func (r *mergedPropertyResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Dataset, error) { - exit := trace(ctx) - defer exit() - if obj.LinkedDatasetID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) } func (r *mergedPropertyResolver) Groups(ctx context.Context, obj *gqlmodel.MergedProperty) ([]*gqlmodel.MergedPropertyGroup, error) { - exit := trace(ctx) - defer exit() - if obj.Groups != nil { return obj.Groups, nil } - m, err := r.loaders.Property.FetchMerged(ctx, obj.OriginalID, obj.ParentID, obj.LinkedDatasetID) + m, err := loaders(ctx).Property.FetchMerged(ctx, obj.OriginalID, obj.ParentID, obj.LinkedDatasetID) if err != nil || m == nil { return nil, err } @@ -236,13 +188,10 @@ func (r *mergedPropertyResolver) Groups(ctx context.Context, obj *gqlmodel.Merge type mergedPropertyGroupResolver struct{ *Resolver } func (r *mergedPropertyGroupResolver) Original(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertyGroup, error) { - exit := trace(ctx) - defer exit() - if obj.OriginalID == nil || obj.OriginalPropertyID == nil { return nil, nil } - p, err := DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) + p, err := dataloaders(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) if err != nil { return nil, err } @@ -253,13 +202,10 @@ func (r *mergedPropertyGroupResolver) Original(ctx context.Context, obj *gqlmode } func (r *mergedPropertyGroupResolver) Parent(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertyGroup, error) { - exit := trace(ctx) - defer exit() - if obj.ParentID == nil || obj.ParentPropertyID == nil { return nil, nil } - p, err := DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.ParentID)) + p, err := dataloaders(ctx).Property.Load(id.PropertyID(*obj.ParentID)) if err != nil { return nil, err } @@ -270,94 +216,67 @@ func (r *mergedPropertyGroupResolver) Parent(ctx context.Context, obj *gqlmodel. } func (r *mergedPropertyGroupResolver) OriginalProperty(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - if obj.OriginalID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) + return dataloaders(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) } func (r *mergedPropertyGroupResolver) ParentProperty(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - if obj.ParentID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.ParentID)) + return dataloaders(ctx).Property.Load(id.PropertyID(*obj.ParentID)) } func (r *mergedPropertyGroupResolver) Schema(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - if obj.SchemaID == nil { if propertyID := obj.PropertyID(); propertyID != nil { - property, err := DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*propertyID)) + property, err := dataloaders(ctx).Property.Load(id.PropertyID(*propertyID)) if err != nil { return nil, err } if property == nil { return nil, nil } - return DataLoadersFromContext(ctx).PropertySchema.Load(property.SchemaID) + return dataloaders(ctx).PropertySchema.Load(property.SchemaID) } return nil, nil } - return DataLoadersFromContext(ctx).PropertySchema.Load(*obj.SchemaID) + return dataloaders(ctx).PropertySchema.Load(*obj.SchemaID) } func (r *mergedPropertyGroupResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Dataset, error) { - exit := trace(ctx) - defer exit() - if obj.LinkedDatasetID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) } type mergedPropertyFieldResolver struct{ *Resolver } func (r *mergedPropertyFieldResolver) Schema(ctx context.Context, obj *gqlmodel.MergedPropertyField) (*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) } func (r *mergedPropertyFieldResolver) Field(ctx context.Context, obj *gqlmodel.MergedPropertyField) (*gqlmodel.PropertySchemaField, error) { - exit := trace(ctx) - defer exit() - - s, err := DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + s, err := dataloaders(ctx).PropertySchema.Load(obj.SchemaID) return s.Field(obj.FieldID), err } func (r *mergedPropertyFieldResolver) ActualValue(ctx context.Context, obj *gqlmodel.MergedPropertyField) (interface{}, error) { - exit := trace(ctx) - defer exit() - - datasetLoader := DataLoadersFromContext(ctx).Dataset + datasetLoader := dataloaders(ctx).Dataset return actualValue(datasetLoader, obj.Value, obj.Links, obj.Overridden) } type propertyGroupListResolver struct{ *Resolver } func (*propertyGroupListResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyGroupList) (*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) } func (*propertyGroupListResolver) SchemaGroup(ctx context.Context, obj *gqlmodel.PropertyGroupList) (*gqlmodel.PropertySchemaGroup, error) { - exit := trace(ctx) - defer exit() - - s, err := DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + s, err := dataloaders(ctx).PropertySchema.Load(obj.SchemaID) if err != nil { return nil, err } @@ -367,17 +286,11 @@ func (*propertyGroupListResolver) SchemaGroup(ctx context.Context, obj *gqlmodel type propertyGroupResolver struct{ *Resolver } func (*propertyGroupResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyGroup) (*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) } func (*propertyGroupResolver) SchemaGroup(ctx context.Context, obj *gqlmodel.PropertyGroup) (*gqlmodel.PropertySchemaGroup, error) { - exit := trace(ctx) - defer exit() - - s, err := DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + s, err := dataloaders(ctx).PropertySchema.Load(obj.SchemaID) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_property_schema.go b/internal/adapter/gql/resolver_property_schema.go index 5efe5196c..433c60221 100644 --- a/internal/adapter/gql/resolver_property_schema.go +++ b/internal/adapter/gql/resolver_property_schema.go @@ -25,9 +25,6 @@ func (r *Resolver) PropertySchemaGroup() PropertySchemaGroupResolver { type propertySchemaFieldResolver struct{ *Resolver } func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) { - exit := trace(ctx) - defer exit() - if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { return s, nil } @@ -35,9 +32,6 @@ func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj * } func (r *propertySchemaFieldResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) { - exit := trace(ctx) - defer exit() - if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { return s, nil } @@ -47,41 +41,29 @@ func (r *propertySchemaFieldResolver) TranslatedDescription(ctx context.Context, type propertyLinkableFieldsResolver struct{ *Resolver } func (r *propertyLinkableFieldsResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) } func (r *propertyLinkableFieldsResolver) LatlngField(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchemaField, error) { - exit := trace(ctx) - defer exit() - if obj.Latlng == nil { return nil, nil } - ps, err := DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + ps, err := dataloaders(ctx).PropertySchema.Load(obj.SchemaID) return ps.Field(*obj.Latlng), err } func (r *propertyLinkableFieldsResolver) URLField(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchemaField, error) { - exit := trace(ctx) - defer exit() - if obj.URL == nil { return nil, nil } - ps, err := DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + ps, err := dataloaders(ctx).PropertySchema.Load(obj.SchemaID) return ps.Field(*obj.URL), err } type propertySchemaGroupResolver struct{ *Resolver } func (r *propertySchemaGroupResolver) Schema(ctx context.Context, obj *gqlmodel.PropertySchemaGroup) (*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).PropertySchema.Load(obj.SchemaID) + return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) } func (r *propertySchemaGroupResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaGroup, lang *string) (string, error) { diff --git a/internal/adapter/gql/resolver_query.go b/internal/adapter/gql/resolver_query.go index 5d8775c7a..946dfb744 100644 --- a/internal/adapter/gql/resolver_query.go +++ b/internal/adapter/gql/resolver_query.go @@ -15,16 +15,10 @@ func (r *Resolver) Query() QueryResolver { type queryResolver struct{ *Resolver } func (r *queryResolver) Assets(ctx context.Context, teamID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Asset.FindByTeam(ctx, teamID, first, last, before, after) + return loaders(ctx).Asset.FindByTeam(ctx, teamID, first, last, before, after) } func (r *queryResolver) Me(ctx context.Context) (*gqlmodel.User, error) { - exit := trace(ctx) - defer exit() - u := getUser(ctx) if u == nil { return nil, nil @@ -33,10 +27,7 @@ func (r *queryResolver) Me(ctx context.Context) (*gqlmodel.User, error) { } func (r *queryResolver) Node(ctx context.Context, i id.ID, typeArg gqlmodel.NodeType) (gqlmodel.Node, error) { - exit := trace(ctx) - defer exit() - - dataloaders := DataLoadersFromContext(ctx) + dataloaders := dataloaders(ctx) switch typeArg { case gqlmodel.NodeTypeAsset: result, err := dataloaders.Asset.Load(id.AssetID(i)) @@ -103,10 +94,7 @@ func (r *queryResolver) Node(ctx context.Context, i id.ID, typeArg gqlmodel.Node } func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmodel.NodeType) ([]gqlmodel.Node, error) { - exit := trace(ctx) - defer exit() - - dataloaders := DataLoadersFromContext(ctx) + dataloaders := dataloaders(ctx) switch typeArg { case gqlmodel.NodeTypeAsset: data, err := dataloaders.Asset.LoadAll(id.AssetIDsFromIDRef(ids)) @@ -214,16 +202,10 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmode } func (r *queryResolver) PropertySchema(ctx context.Context, i id.PropertySchemaID) (*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).PropertySchema.Load(i) + return dataloaders(ctx).PropertySchema.Load(i) } func (r *queryResolver) PropertySchemas(ctx context.Context, ids []*id.PropertySchemaID) ([]*gqlmodel.PropertySchema, error) { - exit := trace(ctx) - defer exit() - ids2 := make([]id.PropertySchemaID, 0, len(ids)) for _, i := range ids { if i != nil { @@ -231,7 +213,7 @@ func (r *queryResolver) PropertySchemas(ctx context.Context, ids []*id.PropertyS } } - data, err := DataLoadersFromContext(ctx).PropertySchema.LoadAll(ids2) + data, err := dataloaders(ctx).PropertySchema.LoadAll(ids2) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -240,16 +222,10 @@ func (r *queryResolver) PropertySchemas(ctx context.Context, ids []*id.PropertyS } func (r *queryResolver) Plugin(ctx context.Context, id id.PluginID) (*gqlmodel.Plugin, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Plugin.Load(id) + return dataloaders(ctx).Plugin.Load(id) } func (r *queryResolver) Plugins(ctx context.Context, ids []*id.PluginID) ([]*gqlmodel.Plugin, error) { - exit := trace(ctx) - defer exit() - ids2 := make([]id.PluginID, 0, len(ids)) for _, i := range ids { if i != nil { @@ -257,7 +233,7 @@ func (r *queryResolver) Plugins(ctx context.Context, ids []*id.PluginID) ([]*gql } } - data, err := DataLoadersFromContext(ctx).Plugin.LoadAll(ids2) + data, err := dataloaders(ctx).Plugin.LoadAll(ids2) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -266,10 +242,7 @@ func (r *queryResolver) Plugins(ctx context.Context, ids []*id.PluginID) ([]*gql } func (r *queryResolver) Layer(ctx context.Context, layerID id.ID) (gqlmodel.Layer, error) { - exit := trace(ctx) - defer exit() - - dataloaders := DataLoadersFromContext(ctx) + dataloaders := dataloaders(ctx) result, err := dataloaders.Layer.Load(id.LayerID(layerID)) if result == nil || *result == nil { return nil, nil @@ -278,64 +251,37 @@ func (r *queryResolver) Layer(ctx context.Context, layerID id.ID) (gqlmodel.Laye } func (r *queryResolver) Scene(ctx context.Context, projectID id.ID) (*gqlmodel.Scene, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Scene.FindByProject(ctx, id.ProjectID(projectID)) + return loaders(ctx).Scene.FindByProject(ctx, id.ProjectID(projectID)) } func (r *queryResolver) Projects(ctx context.Context, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Project.FindByTeam(ctx, id.TeamID(teamID), first, last, before, after) + return loaders(ctx).Project.FindByTeam(ctx, id.TeamID(teamID), first, last, before, after) } func (r *queryResolver) DatasetSchemas(ctx context.Context, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Dataset.FindSchemaByScene(ctx, sceneID, first, last, before, after) + return loaders(ctx).Dataset.FindSchemaByScene(ctx, sceneID, first, last, before, after) } func (r *queryResolver) DynamicDatasetSchemas(ctx context.Context, sceneID id.ID) ([]*gqlmodel.DatasetSchema, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Dataset.FindDynamicSchemasByScene(ctx, sceneID) + return loaders(ctx).Dataset.FindDynamicSchemasByScene(ctx, sceneID) } func (r *queryResolver) Datasets(ctx context.Context, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Dataset.FindBySchema(ctx, datasetSchemaID, first, last, before, after) + return loaders(ctx).Dataset.FindBySchema(ctx, datasetSchemaID, first, last, before, after) } func (r *queryResolver) SceneLock(ctx context.Context, sceneID id.ID) (*gqlmodel.SceneLockMode, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Scene.FetchLock(ctx, id.SceneID(sceneID)) + return loaders(ctx).Scene.FetchLock(ctx, id.SceneID(sceneID)) } func (r *queryResolver) SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.SearchedUser, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.User.SearchUser(ctx, nameOrEmail) + return loaders(ctx).User.SearchUser(ctx, nameOrEmail) } func (r *queryResolver) CheckProjectAlias(ctx context.Context, alias string) (*gqlmodel.ProjectAliasAvailability, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Project.CheckAlias(ctx, alias) + return loaders(ctx).Project.CheckAlias(ctx, alias) } func (r *queryResolver) InstallablePlugins(ctx context.Context) ([]*gqlmodel.PluginMetadata, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Plugin.FetchPluginMetadata(ctx) + return loaders(ctx).Plugin.FetchPluginMetadata(ctx) } diff --git a/internal/adapter/gql/resolver_scene.go b/internal/adapter/gql/resolver_scene.go index fa9dd710b..946247418 100644 --- a/internal/adapter/gql/resolver_scene.go +++ b/internal/adapter/gql/resolver_scene.go @@ -27,31 +27,19 @@ func (r *Resolver) Cluster() ClusterResolver { type sceneResolver struct{ *Resolver } func (r *sceneResolver) Project(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Project, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Project.Load(id.ProjectID(obj.ProjectID)) + return dataloaders(ctx).Project.Load(id.ProjectID(obj.ProjectID)) } func (r *sceneResolver) Team(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Team, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.TeamID)) + return dataloaders(ctx).Team.Load(id.TeamID(obj.TeamID)) } func (r *sceneResolver) Property(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) + return dataloaders(ctx).Property.Load(id.PropertyID(obj.PropertyID)) } func (r *sceneResolver) RootLayer(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.LayerGroup, error) { - exit := trace(ctx) - defer exit() - - layer, err := DataLoadersFromContext(ctx).Layer.Load(id.LayerID(obj.RootLayerID)) + layer, err := dataloaders(ctx).Layer.Load(id.LayerID(obj.RootLayerID)) if err != nil { return nil, err } @@ -66,17 +54,11 @@ func (r *sceneResolver) RootLayer(ctx context.Context, obj *gqlmodel.Scene) (*gq } func (r *sceneResolver) DatasetSchemas(ctx context.Context, obj *gqlmodel.Scene, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Dataset.FindSchemaByScene(ctx, obj.ID, first, last, before, after) + return loaders(ctx).Dataset.FindSchemaByScene(ctx, obj.ID, first, last, before, after) } func (r *sceneResolver) LockMode(ctx context.Context, obj *gqlmodel.Scene) (gqlmodel.SceneLockMode, error) { - exit := trace(ctx) - defer exit() - - sl, err := r.loaders.Scene.FetchLock(ctx, id.SceneID(obj.ID)) + sl, err := loaders(ctx).Scene.FetchLock(ctx, id.SceneID(obj.ID)) if err != nil { return gqlmodel.SceneLockModeFree, err } @@ -84,10 +66,7 @@ func (r *sceneResolver) LockMode(ctx context.Context, obj *gqlmodel.Scene) (gqlm } func (r *sceneResolver) Tags(ctx context.Context, obj *gqlmodel.Scene) ([]gqlmodel.Tag, error) { - exit := trace(ctx) - defer exit() - - tags, err := r.usecases.Tag.FetchByScene(ctx, id.SceneID(obj.ID), getOperator(ctx)) + tags, err := usecases(ctx).Tag.FetchByScene(ctx, id.SceneID(obj.ID), getOperator(ctx)) if err != nil { return nil, err } @@ -102,35 +81,23 @@ func (r *sceneResolver) Tags(ctx context.Context, obj *gqlmodel.Scene) ([]gqlmod type scenePluginResolver struct{ *Resolver } func (r *scenePluginResolver) Plugin(ctx context.Context, obj *gqlmodel.ScenePlugin) (*gqlmodel.Plugin, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + return dataloaders(ctx).Plugin.Load(obj.PluginID) } func (r *scenePluginResolver) Property(ctx context.Context, obj *gqlmodel.ScenePlugin) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - if obj.PropertyID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) + return dataloaders(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) } type sceneWidgetResolver struct{ *Resolver } func (r *sceneWidgetResolver) Plugin(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Plugin, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + return dataloaders(ctx).Plugin.Load(obj.PluginID) } func (r *sceneWidgetResolver) Extension(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.PluginExtension, error) { - exit := trace(ctx) - defer exit() - - plugin, err := DataLoadersFromContext(ctx).Plugin.Load(obj.PluginID) + plugin, err := dataloaders(ctx).Plugin.Load(obj.PluginID) if err != nil { return nil, err } @@ -143,17 +110,11 @@ func (r *sceneWidgetResolver) Extension(ctx context.Context, obj *gqlmodel.Scene } func (r *sceneWidgetResolver) Property(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) + return dataloaders(ctx).Property.Load(id.PropertyID(obj.PropertyID)) } type clusterResolver struct{ *Resolver } func (r *clusterResolver) Property(ctx context.Context, obj *gqlmodel.Cluster) (*gqlmodel.Property, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Property.Load(id.PropertyID(obj.PropertyID)) + return dataloaders(ctx).Property.Load(id.PropertyID(obj.PropertyID)) } diff --git a/internal/adapter/gql/resolver_tag.go b/internal/adapter/gql/resolver_tag.go index 5a378e543..7eb572436 100644 --- a/internal/adapter/gql/resolver_tag.go +++ b/internal/adapter/gql/resolver_tag.go @@ -14,51 +14,36 @@ func (r *Resolver) TagItem() TagItemResolver { } func (t tagItemResolver) LinkedDatasetSchema(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetSchema, error) { - exit := trace(ctx) - defer exit() - if obj.LinkedDatasetID == nil { return nil, nil } - return DataLoadersFromContext(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.LinkedDatasetSchemaID)) + return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.LinkedDatasetSchemaID)) } func (t tagItemResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.Dataset, error) { - exit := trace(ctx) - defer exit() - if obj.LinkedDatasetID == nil { return nil, nil } - return DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) } func (t tagItemResolver) LinkedDatasetField(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetField, error) { - exit := trace(ctx) - defer exit() - if obj.LinkedDatasetID == nil { return nil, nil } - ds, err := DataLoadersFromContext(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + ds, err := dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) return ds.Field(*obj.LinkedDatasetFieldID), err } func (t tagItemResolver) Parent(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.TagGroup, error) { - exit := trace(ctx) - defer exit() - if obj.ParentID == nil { return nil, nil } - return DataLoadersFromContext(ctx).TagGroup.Load(id.TagID(*obj.ParentID)) + return dataloaders(ctx).TagGroup.Load(id.TagID(*obj.ParentID)) } func (tg tagItemResolver) Layers(ctx context.Context, obj *gqlmodel.TagItem) ([]gqlmodel.Layer, error) { - exit := trace(ctx) - defer exit() - - return tg.loaders.Layer.FetchByTag(ctx, id.TagID(obj.ID)) + return loaders(ctx).Layer.FetchByTag(ctx, id.TagID(obj.ID)) } type tagGroupResolver struct{ *Resolver } @@ -67,10 +52,7 @@ func (r *Resolver) TagGroup() TagGroupResolver { return &tagGroupResolver{r} } -func (tg tagGroupResolver) Tags(ctx context.Context, obj *gqlmodel.TagGroup) ([]*gqlmodel.TagItem, error) { - exit := trace(ctx) - defer exit() - +func (r tagGroupResolver) Tags(ctx context.Context, obj *gqlmodel.TagGroup) ([]*gqlmodel.TagItem, error) { tagIds := make([]id.TagID, 0, len(obj.TagIds)) for _, i := range obj.TagIds { if i == nil { @@ -78,23 +60,17 @@ func (tg tagGroupResolver) Tags(ctx context.Context, obj *gqlmodel.TagGroup) ([] } tagIds = append(tagIds, id.TagID(*i)) } - tagItems, err := DataLoadersFromContext(ctx).TagItem.LoadAll(tagIds) + tagItems, err := dataloaders(ctx).TagItem.LoadAll(tagIds) if len(err) > 0 && err[0] != nil { return nil, err[0] } return tagItems, nil } -func (tg tagGroupResolver) Scene(ctx context.Context, obj *gqlmodel.TagGroup) (*gqlmodel.Scene, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Scene.Load(id.SceneID(obj.SceneID)) +func (r tagGroupResolver) Scene(ctx context.Context, obj *gqlmodel.TagGroup) (*gqlmodel.Scene, error) { + return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) } -func (tg tagGroupResolver) Layers(ctx context.Context, obj *gqlmodel.TagGroup) ([]gqlmodel.Layer, error) { - exit := trace(ctx) - defer exit() - - return tg.loaders.Layer.FetchByTag(ctx, id.TagID(obj.ID)) +func (r tagGroupResolver) Layers(ctx context.Context, obj *gqlmodel.TagGroup) ([]gqlmodel.Layer, error) { + return loaders(ctx).Layer.FetchByTag(ctx, id.TagID(obj.ID)) } diff --git a/internal/adapter/gql/resolver_team.go b/internal/adapter/gql/resolver_team.go index 3501bb73d..c7a1c49c4 100644 --- a/internal/adapter/gql/resolver_team.go +++ b/internal/adapter/gql/resolver_team.go @@ -19,24 +19,15 @@ func (r *Resolver) TeamMember() TeamMemberResolver { type teamResolver struct{ *Resolver } func (r *teamResolver) Assets(ctx context.Context, obj *gqlmodel.Team, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Asset.FindByTeam(ctx, obj.ID, first, last, before, after) + return loaders(ctx).Asset.FindByTeam(ctx, obj.ID, first, last, before, after) } func (r *teamResolver) Projects(ctx context.Context, obj *gqlmodel.Team, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Project.FindByTeam(ctx, id.TeamID(obj.ID), first, last, before, after) + return loaders(ctx).Project.FindByTeam(ctx, id.TeamID(obj.ID), first, last, before, after) } type teamMemberResolver struct{ *Resolver } func (r *teamMemberResolver) User(ctx context.Context, obj *gqlmodel.TeamMember) (*gqlmodel.User, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).User.Load(id.UserID(obj.UserID)) + return dataloaders(ctx).User.Load(id.UserID(obj.UserID)) } diff --git a/internal/adapter/gql/resolver_user.go b/internal/adapter/gql/resolver_user.go index 3caba5b5b..ad443f5dd 100644 --- a/internal/adapter/gql/resolver_user.go +++ b/internal/adapter/gql/resolver_user.go @@ -14,15 +14,9 @@ func (r *Resolver) User() UserResolver { type userResolver struct{ *Resolver } func (r *userResolver) MyTeam(ctx context.Context, obj *gqlmodel.User) (*gqlmodel.Team, error) { - exit := trace(ctx) - defer exit() - - return DataLoadersFromContext(ctx).Team.Load(id.TeamID(obj.MyTeamID)) + return dataloaders(ctx).Team.Load(id.TeamID(obj.MyTeamID)) } func (r *userResolver) Teams(ctx context.Context, obj *gqlmodel.User) ([]*gqlmodel.Team, error) { - exit := trace(ctx) - defer exit() - - return r.loaders.Team.FindByUser(ctx, id.UserID(obj.ID)) + return loaders(ctx).Team.FindByUser(ctx, id.UserID(obj.ID)) } diff --git a/internal/adapter/gql/tracer.go b/internal/adapter/gql/tracer.go deleted file mode 100644 index 2fa9c8efd..000000000 --- a/internal/adapter/gql/tracer.go +++ /dev/null @@ -1,151 +0,0 @@ -package gql - -import ( - "context" - "fmt" - "sort" - "sync" - "time" - - "github.com/99designs/gqlgen/graphql" -) - -type tracerKeyStruct struct{} - -var tracerKey = tracerKeyStruct{} - -type Tracer struct { - Spans sync.Map -} - -type span struct { - Name string - StartedAt int64 - EndedAt int64 -} - -func (t *Tracer) AddSpan(s *span) { - if t == nil { - return - } - - var spans []*span - if ss, ok := t.Spans.Load(s.Name); ok { - if ss, ok := ss.([]*span); ok { - spans = append(ss, s) - } else { - spans = []*span{s} - } - } else { - spans = []*span{s} - } - - t.Spans.Store(s.Name, spans) -} - -func (t *Tracer) Print() { - if t == nil { - return - } - - type result struct { - Name string - Max int64 - Min int64 - Avr float64 - Count int - } - var results []result - - t.Spans.Range(func(key, value interface{}) bool { - name := key.(string) - ss := value.([]*span) - - var max, min, sum int64 - for i, s := range ss { - d := s.Duration() - sum += d - if i == 0 { - max = d - min = d - } else { - if max < d { - max = d - } - if min > d { - min = d - } - } - } - - results = append(results, result{ - Name: name, - Max: max, - Min: min, - Avr: float64(sum) / float64(len(ss)), - Count: len(ss), - }) - return true - }) - - sort.Slice(results, func(i, j int) bool { - return results[i].Avr > results[j].Avr - }) - - println("\nGraphQL tracing --------------------------------") - for _, r := range results { - if r.Count == 1 { - fmt.Printf("%s: %.2fms\n", r.Name, float64(r.Min)/1000000.0) - } else { - fmt.Printf("%s: %.2f~%.2fms (avr:%.2fms) (%d)\n", r.Name, float64(r.Min)/1000000.0, float64(r.Max)/1000000.0, r.Avr/1000000.0, r.Count) - } - } - println("------------------------------------------------\n") -} - -func (s *span) Start() { - s.StartedAt = time.Now().UnixNano() -} - -func (s *span) End() { - s.EndedAt = time.Now().UnixNano() -} - -func (s *span) Duration() int64 { - return s.EndedAt - s.StartedAt -} - -func AttachTracer(ctx context.Context, t *Tracer) context.Context { - return context.WithValue(ctx, tracerKey, t) -} - -func ExitTracer(ctx context.Context) { - getTracer(ctx).Print() -} - -func getTracer(ctx context.Context) *Tracer { - if t, ok := ctx.Value(tracerKey).(*Tracer); ok { - return t - } - return nil -} - -func trace(ctx context.Context) func() { - t := getTracer(ctx) - fc := graphql.GetFieldContext(ctx) - - name := fc.Field.Name - if object := fc.Field.ObjectDefinition; object != nil { - name = object.Name + "." + name - } - - s := &span{ - Name: name, - } - s.Start() - t.AddSpan(s) - - return func() { - s.End() - } -} diff --git a/internal/app/auth.go b/internal/app/auth.go index 4743aea1e..73aeaf6a7 100644 --- a/internal/app/auth.go +++ b/internal/app/auth.go @@ -4,7 +4,7 @@ import ( "context" "github.com/labstack/echo/v4" - "github.com/reearth/reearth-backend/internal/adapter/gql" + "github.com/reearth/reearth-backend/internal/adapter" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/rerror" @@ -28,9 +28,6 @@ func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { userID = u } - // attach sub - ctx = context.WithValue(ctx, gql.ContextSub, sub) - // debug mode if cfg.Debug { if userID := c.Request().Header.Get(debugUserHeader); userID != "" { @@ -61,30 +58,6 @@ func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { if err != nil && err != rerror.ErrNotFound { return err } - - // Auth0 accounts are already merged into one so it doesn't need to fetch more info from Auth0 - // - // if u == nil && token != "" { - // // user not found by sub - - // // fetch user profile from Auth0 - // data, err := cfg.Gateways.Authenticator.FetchUser(token) - // if err != nil { - // return err - // } - - // // if !data.EmailVerified { - // // return errors.New("email is not verified") - // // } - - // u, err = cfg.Repos.User.FindByEmail(ctx, data.Email) - // if err != nil && err != rerror.ErrNotFound { - // return err - // } - // if u == nil { - // return rerror.ErrUserNotFound - // } - // } } // save a new sub @@ -94,15 +67,14 @@ func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { } } - // attach operator op, err := generateOperator(ctx, cfg, u) if err != nil { return err } - ctx = context.WithValue(ctx, gql.ContextOperator, op) - // attach user - ctx = context.WithValue(ctx, gql.ContextUser, u) + ctx = adapter.AttachSub(ctx, sub) + ctx = adapter.AttachOperator(ctx, op) + ctx = adapter.AttachUser(ctx, u) c.SetRequest(req.WithContext(ctx)) return next(c) diff --git a/internal/app/graphql.go b/internal/app/graphql.go index 811aa3310..4faf675bd 100644 --- a/internal/app/graphql.go +++ b/internal/app/graphql.go @@ -10,6 +10,7 @@ import ( "github.com/99designs/gqlgen/graphql/playground" "github.com/labstack/echo/v4" "github.com/ravilushqa/otelgqlgen" + "github.com/reearth/reearth-backend/internal/adapter" "github.com/reearth/reearth-backend/internal/adapter/gql" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/vektah/gqlparser/v2/gqlerror" @@ -17,35 +18,6 @@ import ( const enableDataLoaders = true -func dataLoaderMiddleware(container gql.Loaders) echo.MiddlewareFunc { - return func(next echo.HandlerFunc) echo.HandlerFunc { - return func(echoCtx echo.Context) error { - req := echoCtx.Request() - ctx := req.Context() - - ctx = context.WithValue(ctx, gql.DataLoadersKey(), container.DataLoadersWith(ctx, enableDataLoaders)) - echoCtx.SetRequest(req.WithContext(ctx)) - return next(echoCtx) - } - } -} - -func tracerMiddleware(enabled bool) echo.MiddlewareFunc { - return func(next echo.HandlerFunc) echo.HandlerFunc { - return func(echoCtx echo.Context) error { - if !enabled { - return next(echoCtx) - } - req := echoCtx.Request() - ctx := req.Context() - t := &gql.Tracer{} - echoCtx.SetRequest(req.WithContext(gql.AttachTracer(ctx, t))) - defer t.Print() - return next(echoCtx) - } - } -} - func graphqlAPI( ec *echo.Echo, r *echo.Group, @@ -53,7 +25,6 @@ func graphqlAPI( usecases interfaces.Container, ) { playgroundEnabled := conf.Debug || conf.Config.Dev - controllers := gql.NewLoaders(usecases) if playgroundEnabled { r.GET("/graphql", echo.WrapHandler( @@ -62,20 +33,24 @@ func graphqlAPI( } schema := gql.NewExecutableSchema(gql.Config{ - Resolvers: gql.NewResolver(controllers, conf.Debug), + Resolvers: gql.NewResolver(conf.Debug), }) srv := handler.NewDefaultServer(schema) srv.Use(otelgqlgen.Middleware()) + if conf.Config.GraphQL.ComplexityLimit > 0 { srv.Use(extension.FixedComplexityLimit(conf.Config.GraphQL.ComplexityLimit)) } + if playgroundEnabled { srv.Use(extension.Introspection{}) } + srv.Use(extension.AutomaticPersistedQuery{ Cache: lru.New(30), }) + srv.SetErrorPresenter( // show more detailed error messgage in debug mode func(ctx context.Context, e error) *gqlerror.Error { @@ -87,7 +62,14 @@ func graphqlAPI( ) r.POST("/graphql", func(c echo.Context) error { + req := c.Request() + ctx := req.Context() + + ctx = adapter.AttachUsecases(ctx, &usecases) + ctx = gql.AttachUsecases(ctx, &usecases, enableDataLoaders) + c.SetRequest(req.WithContext(ctx)) + srv.ServeHTTP(c.Response(), c.Request()) return nil - }, dataLoaderMiddleware(controllers), tracerMiddleware(false)) + }) } diff --git a/internal/app/private.go b/internal/app/private.go index 4ee108cd1..af3515572 100644 --- a/internal/app/private.go +++ b/internal/app/private.go @@ -8,14 +8,13 @@ import ( "strings" "github.com/labstack/echo/v4" - "github.com/reearth/reearth-backend/internal/adapter/gql" + "github.com/reearth/reearth-backend/internal/adapter" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer/encoding" "github.com/reearth/reearth-backend/pkg/layer/merging" "github.com/reearth/reearth-backend/pkg/rerror" - "github.com/reearth/reearth-backend/pkg/user" ) // TODO: move to adapter and usecase layer @@ -60,14 +59,16 @@ func privateAPI( ) { r.GET("/layers/:param", func(c echo.Context) error { ctx := c.Request().Context() - user := c.Request().Context().Value(gql.ContextUser).(*user.User) + user := adapter.User(c.Request().Context()) if user == nil { return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrUnauthorized} } - op := c.Request().Context().Value(gql.ContextOperator).(*usecase.Operator) + + op := adapter.Operator(c.Request().Context()) if op == nil { return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrOpDenied} } + param := c.Param("param") params := strings.Split(param, ".") if len(params) != 2 { @@ -78,6 +79,7 @@ func privateAPI( if err != nil { return &echo.HTTPError{Code: http.StatusBadRequest, Message: ErrBadID} } + scenes, err := repos.Scene.FindIDsByTeam(ctx, op.ReadableTeams) if err != nil { if errors.Is(rerror.ErrNotFound, err) { @@ -85,6 +87,7 @@ func privateAPI( } return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} } + layer, err := repos.Layer.FindByID(ctx, lid, scenes) if err != nil { if errors.Is(rerror.ErrNotFound, err) { @@ -92,6 +95,7 @@ func privateAPI( } return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} } + err = checkScene(ctx, layer.Scene(), op, repos.Scene) if err != nil { if errors.Is(ErrOpDenied, err) { diff --git a/internal/app/public.go b/internal/app/public.go index c536605a3..bcc3d5de9 100644 --- a/internal/app/public.go +++ b/internal/app/public.go @@ -18,8 +18,12 @@ func publicAPI( repos *repo.Container, gateways *gateway.Container, ) { - controller := http1.NewUserController(interactor.NewUser(repos, gateways, conf.SignupSecret)) - publishedController := http1.NewPublishedController(interactor.NewPublished(repos.Project, gateways.File, "")) + controller := http1.NewUserController( + interactor.NewUser(repos, gateways, conf.SignupSecret), + ) + publishedController := http1.NewPublishedController( + interactor.NewPublished(repos.Project, gateways.File, ""), + ) r.GET("/ping", func(c echo.Context) error { return c.JSON(http.StatusOK, "pong") From d0c1bfd9a961edff22166f608c13f6e4510464a4 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Fri, 4 Feb 2022 09:32:21 +0300 Subject: [PATCH 145/253] update go modules (#103) --- go.mod | 68 ++++++++++----------- go.sum | 185 +++++++++++++++++++++++++++++++++------------------------ 2 files changed, 140 insertions(+), 113 deletions(-) diff --git a/go.mod b/go.mod index 04ae21e81..3c139bb7e 100644 --- a/go.mod +++ b/go.mod @@ -1,75 +1,73 @@ module github.com/reearth/reearth-backend require ( - cloud.google.com/go/profiler v0.1.1 - cloud.google.com/go/storage v1.18.2 - github.com/99designs/gqlgen v0.14.0 + cloud.google.com/go/profiler v0.1.2 + cloud.google.com/go/storage v1.19.0 + github.com/99designs/gqlgen v0.16.0 github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.0.0 github.com/auth0/go-jwt-middleware v1.0.1 github.com/blang/semver v3.5.1+incompatible - github.com/form3tech-oss/jwt-go v3.2.2+incompatible - github.com/goccy/go-yaml v1.9.4 + github.com/form3tech-oss/jwt-go v3.2.5+incompatible + github.com/goccy/go-yaml v1.9.5 github.com/google/uuid v1.3.0 github.com/iancoleman/strcase v0.2.0 github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d - github.com/jarcoal/httpmock v1.0.8 + github.com/jarcoal/httpmock v1.1.0 github.com/joho/godotenv v1.4.0 github.com/jonas-p/go-shp v0.1.1 github.com/kelseyhightower/envconfig v1.4.0 github.com/kennygrant/sanitize v1.2.4 - github.com/labstack/echo/v4 v4.6.1 + github.com/labstack/echo/v4 v4.6.3 github.com/labstack/gommon v0.3.1 - github.com/mitchellh/mapstructure v1.4.2 + github.com/mitchellh/mapstructure v1.4.3 github.com/oklog/ulid v1.3.1 github.com/paulmach/go.geojson v1.4.0 github.com/pkg/errors v0.9.1 - github.com/ravilushqa/otelgqlgen v0.2.0 + github.com/ravilushqa/otelgqlgen v0.4.1 github.com/sirupsen/logrus v1.8.1 - github.com/spf13/afero v1.6.0 + github.com/spf13/afero v1.8.0 github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2 github.com/stretchr/testify v1.7.0 github.com/twpayne/go-kml v1.5.2 - github.com/uber/jaeger-client-go v2.29.1+incompatible + github.com/uber/jaeger-client-go v2.30.0+incompatible github.com/uber/jaeger-lib v2.4.1+incompatible github.com/vektah/dataloaden v0.3.0 - github.com/vektah/gqlparser/v2 v2.2.0 - go.mongodb.org/mongo-driver v1.7.4 - go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.27.0 - go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.27.0 - go.opentelemetry.io/otel v1.2.0 - go.opentelemetry.io/otel/sdk v1.2.0 + github.com/vektah/gqlparser/v2 v2.3.1 + go.mongodb.org/mongo-driver v1.8.2 + go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.28.0 + go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.28.0 + go.opentelemetry.io/otel v1.3.0 + go.opentelemetry.io/otel/sdk v1.3.0 golang.org/x/text v0.3.7 - golang.org/x/tools v0.1.7 - google.golang.org/api v0.60.0 + golang.org/x/tools v0.1.9 + google.golang.org/api v0.66.0 gopkg.in/go-playground/colors.v1 v1.2.0 gopkg.in/h2non/gock.v1 v1.1.2 ) require ( - cloud.google.com/go v0.97.0 // indirect + cloud.google.com/go v0.100.2 // indirect + cloud.google.com/go/compute v0.1.0 // indirect + cloud.google.com/go/iam v0.1.1 // indirect cloud.google.com/go/trace v1.0.0 // indirect github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect github.com/agnivade/levenshtein v1.1.1 // indirect github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a // indirect - github.com/census-instrumentation/opencensus-proto v0.3.0 // indirect - github.com/cespare/xxhash/v2 v2.1.2 // indirect - github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4 // indirect - github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b // indirect - github.com/envoyproxy/go-control-plane v0.10.0 // indirect - github.com/envoyproxy/protoc-gen-validate v0.6.2 // indirect github.com/fatih/color v1.12.0 // indirect github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 // indirect + github.com/go-logr/logr v1.2.1 // indirect + github.com/go-logr/stdr v1.2.0 // indirect github.com/go-stack/stack v1.8.0 // indirect github.com/golang-jwt/jwt v3.2.2+incompatible // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.2 // indirect github.com/golang/snappy v0.0.3 // indirect - github.com/google/go-cmp v0.5.6 // indirect - github.com/google/pprof v0.0.0-20211008130755-947d60d73cc0 // indirect + github.com/google/go-cmp v0.5.7 // indirect + github.com/google/pprof v0.0.0-20211214055906-6f57359322fd // indirect github.com/googleapis/gax-go/v2 v2.1.1 // indirect github.com/gorilla/websocket v1.4.2 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect @@ -92,20 +90,20 @@ require ( github.com/xdg-go/stringprep v1.0.2 // indirect github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect go.opencensus.io v0.23.0 // indirect - go.opentelemetry.io/contrib v0.23.0 // indirect - go.opentelemetry.io/otel/trace v1.2.0 // indirect + go.opentelemetry.io/contrib v1.3.0 // indirect + go.opentelemetry.io/otel/trace v1.3.0 // indirect go.uber.org/atomic v1.7.0 // indirect - golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 // indirect + golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa // indirect golang.org/x/mod v0.5.1 // indirect golang.org/x/net v0.0.0-20211101193420-4a448f8816b3 // indirect - golang.org/x/oauth2 v0.0.0-20211028175245-ba495a64dcb5 // indirect + golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect - golang.org/x/sys v0.0.0-20211103235746-7861aae1554b // indirect + golang.org/x/sys v0.0.0-20220114195835-da31bd327af9 // indirect golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20211101144312-62acf1d99145 // indirect - google.golang.org/grpc v1.41.0 // indirect + google.golang.org/genproto v0.0.0-20220118154757-00ab72f36ad5 // indirect + google.golang.org/grpc v1.40.1 // indirect google.golang.org/protobuf v1.27.1 // indirect gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect diff --git a/go.sum b/go.sum index fd80d1f09..9c8c4570e 100644 --- a/go.sum +++ b/go.sum @@ -3,6 +3,7 @@ cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMT cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= @@ -15,6 +16,7 @@ cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOY cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= @@ -25,18 +27,25 @@ cloud.google.com/go v0.88.0/go.mod h1:dnKwfYbP9hQhefiUvpbcAyoGSHUrOxR20JVElLiUvE cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= -cloud.google.com/go v0.97.0 h1:3DXvAyifywvq64LfkKaMOmkWPS1CikIQdMe2lY9vxU8= cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= +cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= +cloud.google.com/go v0.100.2 h1:t9Iw5QH5v4XtlEQaCtUY7x6sCABps8sW0acw7e2WQ6Y= +cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/compute v0.1.0 h1:rSUBvAyVwNJ5uQCKNJFMwPtTvJkfN38b6Pvb9zZoqJ8= +cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/profiler v0.1.1 h1:seMHZtcgOwZXAOKDZuW2sN3u1yKjYG19dUkElb4mbcQ= -cloud.google.com/go/profiler v0.1.1/go.mod h1:zG22vSCuJKJMvIlLpX3FhNjOsifaoLdPAYc4yLw5Iw4= +cloud.google.com/go/iam v0.1.1 h1:4CapQyNFjiksks1/x7jsvsygFPhihslYk5GptIrlX68= +cloud.google.com/go/iam v0.1.1/go.mod h1:CKqrcnI/suGpybEHxZ7BMehL0oA4LpdyJdUlTl9jVMw= +cloud.google.com/go/profiler v0.1.2 h1:QKCWcmQA9kVXsDGlCpRH0tudZg7xg/jfgw7m0Kc4nfo= +cloud.google.com/go/profiler v0.1.2/go.mod h1:Ei3jL4tlaM/zPyJKR7E1Txd5oWhA5zGfmXgEHFtCB5g= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -46,13 +55,16 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.18.2 h1:5NQw6tOn3eMm0oE8vTkfjau18kjL79FlMjy/CHTpmoY= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= cloud.google.com/go/storage v1.18.2/go.mod h1:AiIj7BWXyhO5gGVmYJ+S8tbkCx3yb0IMjua8Aw4naVM= +cloud.google.com/go/storage v1.19.0 h1:XOQSnPJD8hRtZJ3VdCyK0mBZsGGImrzPAMbSWcHSe6Q= +cloud.google.com/go/storage v1.19.0/go.mod h1:6rgiTRjOqI/Zd9YKimub5TIB4d+p3LH33V3ZE1DMuUM= cloud.google.com/go/trace v1.0.0 h1:laKx2y7IWMjguCe5zZx6n7qLtREk4kyE69SXVC0VSN8= cloud.google.com/go/trace v1.0.0/go.mod h1:4iErSByzxkyHWzzlAj63/Gmjz0NH1ASqhJguHpGcr6A= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/99designs/gqlgen v0.14.0 h1:Wg8aNYQUjMR/4v+W3xD+7SizOy6lSvVeQ06AobNQAXI= github.com/99designs/gqlgen v0.14.0/go.mod h1:S7z4boV+Nx4VvzMUpVrY/YuHjFX4n7rDyuTqvAkuoRE= +github.com/99designs/gqlgen v0.16.0 h1:7Qc4Ll3mfN3doAyUWOgtGLcBGu+KDgK48HdkBGLZVFs= +github.com/99designs/gqlgen v0.16.0/go.mod h1:nbeSjFkqphIqpZsYe1ULVz0yfH8hjpJdJIQoX/e0G2I= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.0.0 h1:38fNtfhHY6bs22b/D6+hDzO6JR0rDzpGPD36dY2uPL4= @@ -83,13 +95,7 @@ github.com/aws/aws-sdk-go v1.35.5/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+ github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/census-instrumentation/opencensus-proto v0.3.0 h1:t/LhUZLVitR1Ow2YOnduCsavhwFUklBMoGVYUCqmCqk= -github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -97,14 +103,7 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4 h1:hzAQntlaYRkVSFEfj9OTWlVV1H155FMD8BTKktLv0QI= -github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1 h1:zH8ljVhhq7yC0MIeUL/IviMtY8hx2mK8cN9wEYb8ggw= -github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= @@ -122,25 +121,26 @@ github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5y github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= -github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= -github.com/envoyproxy/go-control-plane v0.10.0 h1:WVt4HEPbdRbRD/PKKPbPnIVavO6gk/h673jWyIJ016k= -github.com/envoyproxy/go-control-plane v0.10.0/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/envoyproxy/protoc-gen-validate v0.6.2 h1:JiO+kJTpmYGjEodY7O1Zk8oZcNz1+f30UtwtXoFUPzE= -github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/felixge/httpsnoop v1.0.2 h1:+nS9g82KMXccJ/wp0zyRW9ZBHFETmMGtkk+2CTTrW4o= github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/form3tech-oss/jwt-go v3.2.2+incompatible h1:TcekIExNqud5crz4xD2pavyTgWiPvpYe4Xau31I0PRk= github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= +github.com/form3tech-oss/jwt-go v3.2.5+incompatible h1:/l4kBbb4/vGSsdtB5nUe8L7B9mImVMaBPw9L/0TBHU8= +github.com/form3tech-oss/jwt-go v3.2.5+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 h1:Uc+IZ7gYqAf/rSGFplbWBSHaGolEQlNLgMgSE3ccnIQ= github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813/go.mod h1:P+oSoE9yhSRvsmYyZsshflcR6ePWYLql6UU1amW13IM= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.1 h1:DX7uPQ4WgAWfoh+NGGlbJQswnYIVvz0SRlLS3rPZQDA= +github.com/go-logr/logr v1.2.1/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/stdr v1.2.0 h1:j4LrlVXgrbIWO83mmQUnK0Hi+YnbD+vzrE1z/EphbFE= +github.com/go-logr/stdr v1.2.0/go.mod h1:YkVgnZu1ZjjL7xTxrfm/LLZBfkhTqSR1ydtm6jTKKwI= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= @@ -177,8 +177,8 @@ github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWe github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= -github.com/goccy/go-yaml v1.9.4 h1:S0GCYjwHKVI6IHqio7QWNKNThUl6NLzFd/g8Z65Axw8= -github.com/goccy/go-yaml v1.9.4/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA= +github.com/goccy/go-yaml v1.9.5 h1:Eh/+3uk9kLxG4koCX6lRMAPS1OaMSAi+FJcya0INdB0= +github.com/goccy/go-yaml v1.9.5/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= @@ -233,8 +233,9 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= +github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -250,14 +251,15 @@ github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210715191844-86eeefc3e471/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20211008130755-947d60d73cc0 h1:zHs+jv3LO743/zFGcByu2KmpbliCU2AhjcGgrdTwSG4= -github.com/google/pprof v0.0.0-20211008130755-947d60d73cc0/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= +github.com/google/pprof v0.0.0-20211214055906-6f57359322fd h1:1FjCyPC+syAzJ5/2S8fqdZK1R22vvA0J7JZKcuOIQ7Y= +github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -268,6 +270,7 @@ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5m github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= github.com/googleapis/gax-go/v2 v2.1.1 h1:dp3bWCh+PPO1zjRRiCSczJav13sBvG4UhNyVTa1KqdU= github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c29bdd3 h1:eHv/jVY/JNop1xg2J9cBb4EzyMpWZoNCP1BslSAIkOI= github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c29bdd3/go.mod h1:h/KNeRx7oYU4SpA4SoY7W2/NxDKEEVuwA6j9A27L4OI= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= @@ -297,8 +300,8 @@ github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d h1:sQbbvtUo github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d/go.mod h1:xVHEhsiSJJnT0jlcQpQUg+GyoLf0i0xciM1kqWTGT58= github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/jarcoal/httpmock v1.0.8 h1:8kI16SoO6LQKgPE7PvQuV+YuD/inwHd7fOOe2zMbo4k= -github.com/jarcoal/httpmock v1.0.8/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= +github.com/jarcoal/httpmock v1.1.0 h1:F47ChZj1Y2zFsCXxNkBPwNNKnAyOATcdQibk0qEdVCE= +github.com/jarcoal/httpmock v1.1.0/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= @@ -316,6 +319,7 @@ github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dv github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o= github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak= +github.com/kevinmbeaulieu/eq-go v1.0.0/go.mod h1:G3S8ajA56gKBZm4UB9AOyoOS37JO3roToPzKNM8dtdM= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= @@ -330,18 +334,20 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/labstack/echo/v4 v4.6.1 h1:OMVsrnNFzYlGSdaiYGHbgWQnr+JM7NG+B9suCPie14M= github.com/labstack/echo/v4 v4.6.1/go.mod h1:RnjgMWNDB9g/HucVWhQYNQP9PvbYf6adqftqryo7s9k= +github.com/labstack/echo/v4 v4.6.3 h1:VhPuIZYxsbPmo4m9KAkMU/el2442eB7EBFFhNTTT9ac= +github.com/labstack/echo/v4 v4.6.3/go.mod h1:Hk5OiHj0kDqmFq7aHe7eDqI7CUhuCrfpupQtLGGLm7A= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/labstack/gommon v0.3.1 h1:OomWaJXm7xR6L1HmEtGyQf26TEn7V6X88mktX9kee9o= github.com/labstack/gommon v0.3.1/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM= github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= -github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= +github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= github.com/matryer/moq v0.0.0-20200106131100-75d0ddfc0007/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= +github.com/matryer/moq v0.2.3/go.mod h1:9RtPYjTnH1bSBIkpvtHkFN7nbWAnO7oRpdJkEIn6UtE= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= @@ -354,8 +360,9 @@ github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9 github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.2 h1:6h7AQ0yhTcIsmFmnAwQls75jp2Gzs4iB8W7pjMO+rqo= -github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.2.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= +github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4= @@ -375,12 +382,12 @@ github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/ravilushqa/otelgqlgen v0.2.0 h1:sLjnXsft8pD+qRhti0pbiH541ZdZOXurlX4h/HW67Yc= -github.com/ravilushqa/otelgqlgen v0.2.0/go.mod h1:XYDoucLfl1iAFyIL8UCgvumZjCx9J11m5jrlrzdrTNw= +github.com/ravilushqa/otelgqlgen v0.4.1 h1:wiTepKKEp3N28PZlPkXeACWu7dBGRwik8RhBdgIe1FM= +github.com/ravilushqa/otelgqlgen v0.4.1/go.mod h1:a0jceFiBN7nUZMfJ8voFwg/C58QCm/90rT4IIlt5hDQ= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= @@ -406,9 +413,8 @@ github.com/smartystreets/assertions v1.1.1/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYl github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= -github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= -github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= +github.com/spf13/afero v1.8.0 h1:5MmtuhAgYeU6qpa7w7bP0dv6MBYuup0vekhSpSkoq60= +github.com/spf13/afero v1.8.0/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2 h1:Fod/tm/5c19889+T6j7mXxg/tEJrcLuDJxR/98raj80= @@ -432,8 +438,8 @@ github.com/twpayne/go-kml v1.5.2 h1:rFMw2/EwgkVssGS2MT6YfWSPZz6BgcJkLxQ53jnE8rQ= github.com/twpayne/go-kml v1.5.2/go.mod h1:kz8jAiIz6FIdU2Zjce9qGlVtgFYES9vt7BTPBHf5jl4= github.com/twpayne/go-polyline v1.0.0/go.mod h1:ICh24bcLYBX8CknfvNPKqoTbe+eg+MX1NPyJmSBo7pU= github.com/twpayne/go-waypoint v0.0.0-20200706203930-b263a7f6e4e8/go.mod h1:qj5pHncxKhu9gxtZEYWypA/z097sxhFlbTyOyt9gcnU= -github.com/uber/jaeger-client-go v2.29.1+incompatible h1:R9ec3zO3sGpzs0abd43Y+fBZRJ9uiH6lXyR/+u6brW4= -github.com/uber/jaeger-client-go v2.29.1+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= +github.com/uber/jaeger-client-go v2.30.0+incompatible h1:D6wyKGCecFaSRUpo8lCVbaOOb6ThwMmTEbhRwtKR97o= +github.com/uber/jaeger-client-go v2.30.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVKhn2Um6rjCsSsg= github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= github.com/urfave/cli/v2 v2.1.1/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ= @@ -449,8 +455,9 @@ github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+ github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= github.com/vektah/dataloaden v0.3.0 h1:ZfVN2QD6swgvp+tDqdH/OIT/wu3Dhu0cus0k5gIZS84= github.com/vektah/dataloaden v0.3.0/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= -github.com/vektah/gqlparser/v2 v2.2.0 h1:bAc3slekAAJW6sZTi07aGq0OrfaCjj4jxARAaC7g2EM= github.com/vektah/gqlparser/v2 v2.2.0/go.mod h1:i3mQIGIrbK2PD1RrCeMTlVbkF2FJ6WkU1KJlJlC+3F4= +github.com/vektah/gqlparser/v2 v2.3.1 h1:blIC0fCxGIr9pVjsc+BVI8XjYUtc2nCFRfnmP7FuFMk= +github.com/vektah/gqlparser/v2 v2.3.1/go.mod h1:i3mQIGIrbK2PD1RrCeMTlVbkF2FJ6WkU1KJlJlC+3F4= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.0.2 h1:akYIkZ28e6A96dkWNJQu3nmCzH3YfwMPQExUYDaRv7w= @@ -467,10 +474,11 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.mongodb.org/mongo-driver v1.4.2/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= -go.mongodb.org/mongo-driver v1.7.4 h1:sllcioag8Mec0LYkftYWq+cKNPIR4Kqq3iv9ZXY0g/E= -go.mongodb.org/mongo-driver v1.7.4/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= +go.mongodb.org/mongo-driver v1.8.0/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= +go.mongodb.org/mongo-driver v1.8.2 h1:8ssUXufb90ujcIvR6MyE1SchaNj0SFxsakiZgxIyrMk= +go.mongodb.org/mongo-driver v1.8.2/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -479,12 +487,12 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= -go.opentelemetry.io/contrib v0.23.0 h1:MgRuo0JZZX8J9WLRjyd7OpTSbaLOdQXXJa6SnZvlWLM= -go.opentelemetry.io/contrib v0.23.0/go.mod h1:EH4yDYeNoaTqn/8yCWQmfNB78VHfGX2Jt2bvnvzBlGM= -go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.27.0 h1:lt97RYNVVZE9YCZG4MgGhzOpOsoktyvR12NJbRnrsso= -go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.27.0/go.mod h1:95JJ0PE9JgjaFL3yLUhPIe4nu+1tm9IarHTcXhrAJ54= -go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.27.0 h1:y1BbYi2c/agRbWm1YLKAk3gJFUMExNMDRxTVIoYy5pU= -go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.27.0/go.mod h1:KdKx74FeuSamMc33LytyiMuxhuT1v5wfIgUF3lcFGdw= +go.opentelemetry.io/contrib v1.3.0 h1:p9Gd+3dD7yB+AIph2Ltg11QDX6Y+yWMH0YQVTpTTP2c= +go.opentelemetry.io/contrib v1.3.0/go.mod h1:FlyPNX9s4U6MCsWEc5YAK4KzKNHFDsjrDUZijJiXvy8= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.28.0 h1:w5fHM6jfxOm0zeKS9fTFZSyktW4Xzcw0REGXEwXQGko= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.28.0/go.mod h1:mG9tj72wNEUZGwJ/9IqfJ1nByl1aW0McYkY5Hjm8SM0= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.28.0 h1:gQqm6bGgJrF1b+qvUPM28NqOQUNot8lYxcbrG4hcyyQ= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.28.0/go.mod h1:aM2EjzJt4BHMoDrzAO40IJSGMayznRWts38juP4m0HQ= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.24.0 h1:qW6j1kJU24yo2xIu16Py4m4AXn1dd+s2uKllGnTFAm0= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.24.0/go.mod h1:7W3JSDYTtH3qKKHrS1fMiwLtK7iZFLPq1+7htfspX/E= go.opentelemetry.io/contrib/propagators/b3 v1.2.0 h1:+zQjl3DBSOle9GEhHuhqzDUKtYcVSfbHSNv24hsoOJ0= @@ -492,20 +500,22 @@ go.opentelemetry.io/contrib/propagators/b3 v1.2.0/go.mod h1:kO8hNKCfa1YmQJ0lM7pz go.opentelemetry.io/otel v1.0.0-RC3/go.mod h1:Ka5j3ua8tZs4Rkq4Ex3hwgBgOchyPVq5S6P2lz//nKQ= go.opentelemetry.io/otel v1.0.0/go.mod h1:AjRVh9A5/5DE7S+mZtTR6t8vpKKryam+0lREnfmS4cg= go.opentelemetry.io/otel v1.0.1/go.mod h1:OPEOD4jIT2SlZPMmwT6FqZz2C0ZNdQqiWcoK6M0SNFU= -go.opentelemetry.io/otel v1.2.0 h1:YOQDvxO1FayUcT9MIhJhgMyNO1WqoduiyvQHzGN0kUQ= go.opentelemetry.io/otel v1.2.0/go.mod h1:aT17Fk0Z1Nor9e0uisf98LrntPGMnk4frBO9+dkf69I= +go.opentelemetry.io/otel v1.3.0 h1:APxLf0eiBwLl+SOXiJJCVYzA1OOJNyAoV8C5RNRyy7Y= +go.opentelemetry.io/otel v1.3.0/go.mod h1:PWIKzi6JCp7sM0k9yZ43VX+T345uNbAkDKwHVjb2PTs= go.opentelemetry.io/otel/internal/metric v0.23.0 h1:mPfzm9Iqhw7G2nDBmUAjFTfPqLZPbOW2k7QI57ITbaI= go.opentelemetry.io/otel/internal/metric v0.23.0/go.mod h1:z+RPiDJe30YnCrOhFGivwBS+DU1JU/PiLKkk4re2DNY= go.opentelemetry.io/otel/metric v0.23.0 h1:mYCcDxi60P4T27/0jchIDFa1WHEfQeU3zH9UEMpnj2c= go.opentelemetry.io/otel/metric v0.23.0/go.mod h1:G/Nn9InyNnIv7J6YVkQfpc0JCfKBNJaERBGw08nqmVQ= go.opentelemetry.io/otel/sdk v1.0.1/go.mod h1:HrdXne+BiwsOHYYkBE5ysIcv2bvdZstxzmCQhxTcZkI= -go.opentelemetry.io/otel/sdk v1.2.0 h1:wKN260u4DesJYhyjxDa7LRFkuhH7ncEVKU37LWcyNIo= -go.opentelemetry.io/otel/sdk v1.2.0/go.mod h1:jNN8QtpvbsKhgaC6V5lHiejMoKD+V8uadoSafgHPx1U= +go.opentelemetry.io/otel/sdk v1.3.0 h1:3278edCoH89MEJ0Ky8WQXVmDQv3FX4ZJ3Pp+9fJreAI= +go.opentelemetry.io/otel/sdk v1.3.0/go.mod h1:rIo4suHNhQwBIPg9axF8V9CA72Wz2mKF1teNrup8yzs= go.opentelemetry.io/otel/trace v1.0.0-RC3/go.mod h1:VUt2TUYd8S2/ZRX09ZDFZQwn2RqfMB5MzO17jBojGxo= go.opentelemetry.io/otel/trace v1.0.0/go.mod h1:PXTWqayeFUlJV1YDNhsJYB184+IvAH814St6o6ajzIs= go.opentelemetry.io/otel/trace v1.0.1/go.mod h1:5g4i4fKLaX2BQpSBsxw8YYcgKpMMSW3x7ZTuYBr3sUk= -go.opentelemetry.io/otel/trace v1.2.0 h1:Ys3iqbqZhcf28hHzrm5WAquMkDHNZTUkw7KHbuNjej0= go.opentelemetry.io/otel/trace v1.2.0/go.mod h1:N5FLswTubnxKxOJHM7XZC074qpeEdLy3CgAVsdMucK0= +go.opentelemetry.io/otel/trace v1.3.0 h1:doy8Hzb1RJ+I3yFhtDmwNc7tIyw1tNMOIsyPzp1NOGY= +go.opentelemetry.io/otel/trace v1.3.0/go.mod h1:c/VDhno8888bvQYmbYLqe41/Ldmr/KKunbvWM4/fEjk= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= @@ -515,14 +525,15 @@ golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaE golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 h1:HWj/xjIHfjYU5nVXpTM0s39J9CbLn7Cc5a7IC5rwsMQ= +golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa h1:idItI2DDfCokpg0N51B2VtiLdJ4vAuXC9fnCb2gACo4= +golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -558,7 +569,6 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -592,15 +602,15 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210716203947-853a461950ff/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210913180222-943fd674d43e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211101193420-4a448f8816b3 h1:VrJZAjbekhoRn7n5FBujY31gboH+iB3pdLxn3gE9FjU= golang.org/x/net v0.0.0-20211101193420-4a448f8816b3/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -619,8 +629,8 @@ golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211028175245-ba495a64dcb5 h1:v79phzBz03tsVCUTbvTBmmC3CUXF5mKYt7DA4ZVldpM= -golang.org/x/oauth2 v0.0.0-20211028175245-ba495a64dcb5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 h1:RerP+noqYHUQ8CMRcPlC2nvTa4dcBIjegkuWdcUDuqg= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -676,6 +686,7 @@ golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -689,17 +700,20 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210910150752-751e447fb3d0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211103235746-7861aae1554b h1:1VkfZQv42XQlA/jchYumAnv1UPo6RgF9rJFkTgZIxO4= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9 h1:XfKQ4OlFl8okEOr5UvAqFRVj8pY/4yfcXrddB8qAbU0= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -765,6 +779,7 @@ golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200701151220-7cb253f4c4f8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200815165600-90abf76919f3/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= @@ -772,14 +787,15 @@ golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.7 h1:6j8CgantCy3yc8JGBqkDLMKWqZ0RDU2g1HVgacojGWQ= -golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= +golang.org/x/tools v0.1.9 h1:j9KsMiaP1c3B0OTQGth0/k+miLGTgLsAFUCrF2vLcF8= +golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -815,8 +831,12 @@ google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqiv google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= google.golang.org/api v0.58.0/go.mod h1:cAbP2FsxoGVNwtgNAmmn3y5G1TWAiVYRmg4yku3lv+E= -google.golang.org/api v0.60.0 h1:eq/zs5WPH4J9undYM9IP1O7dSr7Yh8Y0GtSCpzGzIUk= -google.golang.org/api v0.60.0/go.mod h1:d7rl65NZAkEQ90JFzqBjcRq1TVeG5ZoGV3sSpEnnVb4= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= +google.golang.org/api v0.64.0/go.mod h1:931CdxA8Rm4t6zqTFGSsgwbAEZ2+GMYurbndwSimebM= +google.golang.org/api v0.65.0/go.mod h1:ArYhxgGadlWmqO1IqVujw6Cs8IdD33bTmzKo2Sh+cbg= +google.golang.org/api v0.66.0 h1:CbGy4LEiXCVCiNEDFgGpWOVwsDT7E2Qej1ZvN1P7KPg= +google.golang.org/api v0.66.0/go.mod h1:I1dmXYpX7HGwz/ejRxwQp2qj5bFAz93HiCU1C1oYd9M= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -861,7 +881,9 @@ google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= @@ -887,9 +909,16 @@ google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4/go.mod h1:eFjDcFEc google.golang.org/genproto v0.0.0-20210921142501-181ce0d877f6/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211016002631-37fc39342514/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211021150943-2b146023228c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211101144312-62acf1d99145 h1:vum3nDKdleYb+aePXKFEDT2+ghuH00EgYp9B7Q7EZZE= -google.golang.org/genproto v0.0.0-20211101144312-62acf1d99145/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211223182754-3ac035c7e7cb/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220107163113-42d7afdf6368/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220111164026-67b88f271998/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220114231437-d2e6a121cae0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220118154757-00ab72f36ad5 h1:zzNejm+EgrbLfDZ6lu9Uud2IVvHySPl8vQzf04laR5Q= +google.golang.org/genproto v0.0.0-20220118154757-00ab72f36ad5/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -915,8 +944,8 @@ google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQ google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.41.0 h1:f+PlOh7QV4iIJkPrx5NQ7qaNGFQ3OTse67yaDHfju4E= -google.golang.org/grpc v1.41.0/go.mod h1:U3l9uK9J0sini8mHphKoXyaqDA/8VyGnDee1zzIUK6k= +google.golang.org/grpc v1.40.1 h1:pnP7OclFFFgFi4VHQDQDaoXUVauOFyktqTsqqgzFKbc= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= From 70026969e1e18843ff209b3dc52f7bf5e23b75e9 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 9 Feb 2022 20:07:46 +0900 Subject: [PATCH 146/253] feat: implement property.Diff and plugin/manifest.Diff (#107) * impl property.Diff * impl manifest.Diff * fix error --- .../adapter/gql/gqlmodel/convert_property.go | 15 +- .../infrastructure/mongo/mongodoc/property.go | 2 +- .../mongo/mongodoc/property_schema.go | 36 +- internal/usecase/interactor/property.go | 2 +- pkg/plugin/manifest/convert.go | 23 +- pkg/plugin/manifest/convert_test.go | 185 +++--- pkg/plugin/manifest/diff.go | 104 +++ pkg/plugin/manifest/diff_test.go | 248 ++++++++ pkg/plugin/manifest/parser_test.go | 4 +- pkg/plugin/manifest/parser_translation.go | 2 +- .../manifest/parser_translation_test.go | 6 +- pkg/property/diff.go | 107 ++++ pkg/property/diff_test.go | 600 ++++++++++++++++++ pkg/property/field.go | 67 +- pkg/property/field_test.go | 181 +++++- pkg/property/group.go | 126 +++- pkg/property/group_builder.go | 6 +- pkg/property/group_builder_test.go | 2 +- pkg/property/group_list.go | 136 +++- pkg/property/group_list_builder.go | 24 +- pkg/property/group_list_builder_test.go | 42 +- pkg/property/group_list_test.go | 461 +++++++++++--- pkg/property/group_test.go | 59 +- pkg/property/id_test.go | 9 + pkg/property/item.go | 20 +- pkg/property/item_test.go | 39 ++ pkg/property/pointer.go | 123 +++- pkg/property/property.go | 284 ++++++--- pkg/property/property_test.go | 431 ++++++++++++- pkg/property/schema.go | 129 +--- pkg/property/schema_builder.go | 21 +- pkg/property/schema_builder_test.go | 35 +- pkg/property/schema_field_test.go | 6 + pkg/property/schema_group_list.go | 148 +++++ pkg/property/schema_group_list_test.go | 377 +++++++++++ pkg/property/schema_group_test.go | 5 + pkg/property/schema_pointer.go | 18 + pkg/property/schema_pointer_test.go | 36 ++ pkg/property/schema_test.go | 139 +--- 39 files changed, 3558 insertions(+), 700 deletions(-) create mode 100644 pkg/plugin/manifest/diff.go create mode 100644 pkg/plugin/manifest/diff_test.go create mode 100644 pkg/property/diff.go create mode 100644 pkg/property/diff_test.go create mode 100644 pkg/property/id_test.go create mode 100644 pkg/property/schema_group_list.go create mode 100644 pkg/property/schema_group_list_test.go create mode 100644 pkg/property/schema_pointer.go create mode 100644 pkg/property/schema_pointer_test.go diff --git a/internal/adapter/gql/gqlmodel/convert_property.go b/internal/adapter/gql/gqlmodel/convert_property.go index 0803fd4ec..fb88aaa35 100644 --- a/internal/adapter/gql/gqlmodel/convert_property.go +++ b/internal/adapter/gql/gqlmodel/convert_property.go @@ -208,7 +208,7 @@ func ToPropertySchema(propertySchema *property.Schema) *PropertySchema { return nil } - pgroups := propertySchema.Groups() + pgroups := propertySchema.Groups().Groups() groups := make([]*PropertySchemaGroup, 0, len(pgroups)) for _, g := range pgroups { groups = append(groups, ToPropertySchemaGroup(g, propertySchema.ID())) @@ -222,10 +222,17 @@ func ToPropertySchema(propertySchema *property.Schema) *PropertySchema { } func ToPropertyLinkableFields(sid id.PropertySchemaID, l property.LinkableFields) *PropertyLinkableFields { + var latlng, url *id.PropertySchemaFieldID + if l.LatLng != nil { + latlng = &l.LatLng.Field + } + if l.URL != nil { + url = &l.URL.Field + } return &PropertyLinkableFields{ SchemaID: sid, - Latlng: l.LatLng.FieldRef(), - URL: l.URL.FieldRef(), + Latlng: latlng, + URL: url, } } @@ -397,7 +404,7 @@ func ToPropertyGroup(g *property.Group, p *property.Property, gl *property.Group return nil } - gfields := g.Fields() + gfields := g.Fields(nil) fields := make([]*PropertyField, 0, len(gfields)) for _, f := range gfields { fields = append(fields, ToPropertyField(f, p, gl, g)) diff --git a/internal/infrastructure/mongo/mongodoc/property.go b/internal/infrastructure/mongo/mongodoc/property.go index 1767aeba8..556766fcd 100644 --- a/internal/infrastructure/mongo/mongodoc/property.go +++ b/internal/infrastructure/mongo/mongodoc/property.go @@ -131,7 +131,7 @@ func newPropertyItem(f property.Item) *PropertyItemDocument { if g := property.ToGroup(f); g != nil { t = typePropertyItemGroup - pfields := g.Fields() + pfields := g.Fields(nil) fields = make([]*PropertyFieldDocument, 0, len(pfields)) for _, r := range pfields { fields = append(fields, newPropertyField(r)) diff --git a/internal/infrastructure/mongo/mongodoc/property_schema.go b/internal/infrastructure/mongo/mongodoc/property_schema.go index 97fb36396..41cf99139 100644 --- a/internal/infrastructure/mongo/mongodoc/property_schema.go +++ b/internal/infrastructure/mongo/mongodoc/property_schema.go @@ -42,14 +42,13 @@ type PropertySchemaFieldChoiceDocument struct { } type PropertyLinkableFieldsDocument struct { - LatLng *PropertyPointerDocument - URL *PropertyPointerDocument + LatLng *PropertySchemaFieldPointerDocument + URL *PropertySchemaFieldPointerDocument } -type PropertyPointerDocument struct { - SchemaGroupID *string - ItemID *string - FieldID *string +type PropertySchemaFieldPointerDocument struct { + SchemaGroupID string + FieldID string } type PropertyConditonDocument struct { @@ -113,7 +112,7 @@ func NewPropertySchema(m *property.Schema) (*PropertySchemaDocument, string) { return nil, "" } - pgroups := m.Groups() + pgroups := m.Groups().Groups() groups := make([]*PropertySchemaGroupDocument, 0, len(pgroups)) for _, f := range pgroups { groups = append(groups, newPropertySchemaGroup(f)) @@ -200,7 +199,7 @@ func (doc *PropertySchemaDocument) Model() (*property.Schema, error) { return property.NewSchema(). ID(pid). Version(doc.Version). - Groups(groups). + Groups(property.NewSchemaGroupList(groups)). LinkableFields(toModelPropertyLinkableFields(doc.LinkableFields)). Build() } @@ -288,25 +287,22 @@ func toModelPropertyLinkableFields(l *PropertyLinkableFieldsDocument) property.L } } -func toModelPropertyPointer(p *PropertyPointerDocument) *property.Pointer { +func toModelPropertyPointer(p *PropertySchemaFieldPointerDocument) *property.SchemaFieldPointer { if p == nil { return nil } - return property.NewPointer( - id.PropertySchemaGroupIDFrom(p.SchemaGroupID), - id.PropertyItemIDFromRef(p.ItemID), - id.PropertySchemaFieldIDFrom(p.FieldID), - ) + return &property.SchemaFieldPointer{ + SchemaGroup: property.SchemaGroupID(p.SchemaGroupID), + Field: property.FieldID(p.FieldID), + } } -func newDocPropertyPointer(p *property.Pointer) *PropertyPointerDocument { +func newDocPropertyPointer(p *property.SchemaFieldPointer) *PropertySchemaFieldPointerDocument { if p == nil { return nil } - schemaGroupID, itemID, fieldID := p.GetAll() - return &PropertyPointerDocument{ - SchemaGroupID: schemaGroupID.StringRef(), - ItemID: itemID.StringRef(), - FieldID: fieldID.StringRef(), + return &PropertySchemaFieldPointerDocument{ + SchemaGroupID: p.SchemaGroup.String(), + FieldID: p.Field.String(), } } diff --git a/internal/usecase/interactor/property.go b/internal/usecase/interactor/property.go index a33da91d8..3d612a0cb 100644 --- a/internal/usecase/interactor/property.go +++ b/internal/usecase/interactor/property.go @@ -243,7 +243,7 @@ func (i *Property) UploadFile(ctx context.Context, inp interfaces.UploadFilePara if v == nil { return nil, nil, nil, nil, interfaces.ErrInvalidPropertyValue } - if err = field.Update(v, ps.Field(field.Field())); err != nil { + if err = field.Update(v, ps.Groups().Field(field.Field())); err != nil { return nil, nil, nil, nil, err } diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go index 20c7f0f88..9901c1622 100644 --- a/pkg/plugin/manifest/convert.go +++ b/pkg/plugin/manifest/convert.go @@ -196,21 +196,25 @@ func (i *PropertySchema) schema(pluginID plugin.ID, idstr string) (*property.Sch Build() } - // items - items := make([]*property.SchemaGroup, 0, len(i.Groups)) + // groups + groups := make([]*property.SchemaGroup, 0, len(i.Groups)) for _, d := range i.Groups { item, err := d.schemaGroup() if err != nil { return nil, rerror.From(fmt.Sprintf("item (%s)", d.ID), err) } - items = append(items, item) + groups = append(groups, item) + } + sgroups := property.NewSchemaGroupList(groups) + if sgroups == nil { + return nil, fmt.Errorf("invalid group; it is empty or it may contain some duplicated groups or fields") } // schema schema, err := property.NewSchema(). ID(psid). Version(int(i.Version)). - Groups(items). + Groups(sgroups). LinkableFields(i.Linkable.linkable()). Build() if err != nil { @@ -229,15 +233,14 @@ func (p *PropertyLinkableFields) linkable() property.LinkableFields { } } -func (p *PropertyPointer) pointer() *property.Pointer { +func (p *PropertyPointer) pointer() *property.SchemaFieldPointer { if p == nil || p.FieldID == "" && p.SchemaGroupID == "" { return nil } - return property.NewPointer( - property.SchemaGroupIDFrom(&p.SchemaGroupID), - nil, - property.FieldIDFrom(&p.FieldID), - ) + return &property.SchemaFieldPointer{ + SchemaGroup: property.SchemaGroupID(p.SchemaGroupID), + Field: property.FieldID(p.FieldID), + } } func (i PropertySchemaGroup) schemaGroup() (*property.SchemaGroup, error) { diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index 6101d40f3..54b42d224 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -38,11 +38,11 @@ func TestChoice(t *testing.T) { }, } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { t.Parallel() - assert.Equal(t, *tc.expected, *tc.ch.choice()) + assert.Equal(t, *tt.expected, *tt.ch.choice()) }) } @@ -81,7 +81,16 @@ func TestManifest(t *testing.T) { Version: "1.1.1", }, expected: &Manifest{ - Plugin: plugin.New().ID(plugin.OfficialPluginID).Name(i18n.StringFrom("aaa")).Extensions([]*plugin.Extension{plugin.NewExtension().ID("cesium").Visualizer("cesium").Type("visualizer").System(true).MustBuild()}).MustBuild(), + Plugin: plugin.New(). + ID(plugin.OfficialPluginID). + Name(i18n.StringFrom("aaa")). + Extensions([]*plugin.Extension{ + plugin.NewExtension(). + ID("cesium"). + Visualizer("cesium"). + Type("visualizer"). + System(true).MustBuild(), + }).MustBuild(), ExtensionSchema: nil, Schema: nil, }, @@ -119,7 +128,18 @@ func TestManifest(t *testing.T) { Version: "1.1.1", }, expected: &Manifest{ - Plugin: plugin.New().ID(plugin.OfficialPluginID).Name(i18n.StringFrom("aaa")).Extensions([]*plugin.Extension{plugin.NewExtension().ID("cesium").Visualizer("cesium").Type("visualizer").System(true).MustBuild()}).MustBuild(), + Plugin: plugin.New(). + ID(plugin.OfficialPluginID). + Name(i18n.StringFrom("aaa")). + Extensions([]*plugin.Extension{ + plugin.NewExtension(). + ID("cesium"). + Visualizer("cesium"). + Type("visualizer"). + System(true). + MustBuild(), + }). + MustBuild(), ExtensionSchema: nil, Schema: nil, }, @@ -133,24 +153,26 @@ func TestManifest(t *testing.T) { System: false, }, expected: &Manifest{ - Plugin: plugin.New().ID(plugin.OfficialPluginID).Name(i18n.StringFrom("reearth")).MustBuild(), + Plugin: plugin.New(). + ID(plugin.OfficialPluginID). + Name(i18n.StringFrom("reearth")). + MustBuild(), }, err: "invalid manifest: invalid plugin id: ", }, } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { t.Parallel() - m, err := tc.root.manifest(nil) - if tc.err == "" { - assert.Equal(t, tc.expected.Plugin.ID(), m.Plugin.ID()) - assert.Equal(t, tc.expected.Plugin.Name(), m.Plugin.Name()) - assert.Equal(t, len(tc.expected.Plugin.Extensions()), len(m.Plugin.Extensions())) - //assert.Equal(tt,tc.expected.Schema..) + m, err := tt.root.manifest(nil) + if tt.err == "" { + assert.Equal(t, tt.expected.Plugin.ID(), m.Plugin.ID()) + assert.Equal(t, tt.expected.Plugin.Name(), m.Plugin.Name()) + assert.Equal(t, len(tt.expected.Plugin.Extensions()), len(m.Plugin.Extensions())) } else { - assert.Equal(t, tc.err, err.Error()) + assert.Equal(t, tt.err, err.Error()) } }) } @@ -373,16 +395,16 @@ func TestExtension(t *testing.T) { }, } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { t.Parallel() - pe, ps, err := tc.ext.extension(tc.pid, tc.sys) - if tc.err == "" { - assert.Equal(t, tc.expectedPE, pe) - assert.Equal(t, tc.expectedPS, ps) + pe, ps, err := tt.ext.extension(tt.pid, tt.sys) + if tt.err == "" { + assert.Equal(t, tt.expectedPE, pe) + assert.Equal(t, tt.expectedPS, ps) } else { - assert.Equal(t, tc.err, err.Error()) + assert.Equal(t, tt.err, err.Error()) } }) } @@ -395,7 +417,7 @@ func TestPointer(t *testing.T) { tests := []struct { name string pp *PropertyPointer - expected *property.Pointer + expected *property.SchemaFieldPointer }{ { name: "failed nil PropertyPointer", @@ -416,15 +438,18 @@ func TestPointer(t *testing.T) { FieldID: "xxx", SchemaGroupID: "aaa", }, - expected: property.NewPointer(property.SchemaGroupIDFrom(&sg), nil, property.FieldIDFrom(&f)), + expected: &property.SchemaFieldPointer{ + SchemaGroup: property.SchemaGroupID(sg), + Field: property.FieldID(f), + }, }, } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { t.Parallel() - assert.Equal(t, tc.expected, tc.pp.pointer()) + assert.Equal(t, tt.expected, tt.pp.pointer()) }) } } @@ -456,11 +481,11 @@ func TestCondition(t *testing.T) { }, } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { t.Parallel() - assert.Equal(t, tc.expected, tc.con.condition()) + assert.Equal(t, tt.expected, tt.con.condition()) }) } } @@ -493,17 +518,17 @@ func TestLinkable(t *testing.T) { }, }, expected: property.LinkableFields{ - LatLng: property.NewPointer(property.SchemaGroupIDFrom(&d), nil, property.FieldIDFrom(&l)), - URL: property.NewPointer(property.SchemaGroupIDFrom(&d), nil, property.FieldIDFrom(&u)), + LatLng: &property.SchemaFieldPointer{SchemaGroup: property.SchemaGroupID(d), Field: property.FieldID(l)}, + URL: &property.SchemaFieldPointer{SchemaGroup: property.SchemaGroupID(d), Field: property.FieldID(u)}, }, }, } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { t.Parallel() - assert.Equal(t, tc.expected, tc.p.linkable()) + assert.Equal(t, tt.expected, tt.p.linkable()) }) } } @@ -569,7 +594,7 @@ func TestSchema(t *testing.T) { expected: property. NewSchema(). ID(property.MustSchemaID("reearth/marker")). - Groups([]*property.SchemaGroup{ + Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ property.NewSchemaGroup(). ID("default"). Fields([]*property.SchemaField{ @@ -579,26 +604,26 @@ func TestSchema(t *testing.T) { MustBuild()}, ). MustBuild()}, - ). + )). MustBuild(), }, } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { t.Parallel() - res, err := tc.ps.schema(tc.pid, tc.psid) - if tc.err == "" { - assert.Equal(t, len(tc.expected.Groups()), len(res.Groups())) - assert.Equal(t, tc.expected.LinkableFields(), res.LinkableFields()) - assert.Equal(t, tc.expected.Version(), res.Version()) - if len(res.Groups()) > 0 { - exg := tc.expected.Group(res.Groups()[0].ID()) + res, err := tt.ps.schema(tt.pid, tt.psid) + if tt.err == "" { + assert.Equal(t, tt.expected.Groups().Len(), res.Groups().Len()) + assert.Equal(t, tt.expected.LinkableFields(), res.LinkableFields()) + assert.Equal(t, tt.expected.Version(), res.Version()) + if res.Groups().Len() > 0 { + exg := tt.expected.Groups().Group(res.Groups().Groups()[0].ID()) assert.NotNil(t, exg) } } else { - assert.Equal(t, tc.err, err.Error()) + assert.Equal(t, tt.err, err.Error()) } }) } @@ -675,21 +700,21 @@ func TestSchemaGroup(t *testing.T) { }, } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { t.Parallel() - res, err := tc.psg.schemaGroup() - if tc.err == "" { - assert.Equal(t, tc.expected.Title().String(), res.Title().String()) - assert.Equal(t, tc.expected.Title(), res.Title()) - assert.Equal(t, len(tc.expected.Fields()), len(res.Fields())) + res, err := tt.psg.schemaGroup() + if tt.err == "" { + assert.Equal(t, tt.expected.Title().String(), res.Title().String()) + assert.Equal(t, tt.expected.Title(), res.Title()) + assert.Equal(t, len(tt.expected.Fields()), len(res.Fields())) if len(res.Fields()) > 0 { exf := res.Fields()[0] - assert.NotNil(t, tc.expected.Field(exf.ID())) + assert.NotNil(t, tt.expected.Field(exf.ID())) } } else { - assert.Equal(t, tc.err, err.Error()) + assert.Equal(t, tt.err, err.Error()) } }) } @@ -862,19 +887,19 @@ func TestSchemaField(t *testing.T) { }, } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { t.Parallel() - res, err := tc.psg.schemaField() - if tc.err == nil { - assert.Equal(t, tc.expected.Title(), res.Title()) - assert.Equal(t, tc.expected.Description(), res.Description()) - assert.Equal(t, tc.expected.Suffix(), res.Suffix()) - assert.Equal(t, tc.expected.Prefix(), res.Prefix()) - assert.Equal(t, tc.expected.Choices(), res.Choices()) + res, err := tt.psg.schemaField() + if tt.err == nil { + assert.Equal(t, tt.expected.Title(), res.Title()) + assert.Equal(t, tt.expected.Description(), res.Description()) + assert.Equal(t, tt.expected.Suffix(), res.Suffix()) + assert.Equal(t, tt.expected.Prefix(), res.Prefix()) + assert.Equal(t, tt.expected.Choices(), res.Choices()) } else { - assert.Equal(t, tc.err, rerror.Get(err).Err) + assert.Equal(t, tt.err, rerror.Get(err).Err) } }) } @@ -924,12 +949,12 @@ func TestLayout(t *testing.T) { }, } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { t.Parallel() - res := tc.widgetLayout.layout() - assert.Equal(t, tc.expected, res) + res := tt.widgetLayout.layout() + assert.Equal(t, tt.expected, res) }) } } diff --git a/pkg/plugin/manifest/diff.go b/pkg/plugin/manifest/diff.go new file mode 100644 index 000000000..32c18daa7 --- /dev/null +++ b/pkg/plugin/manifest/diff.go @@ -0,0 +1,104 @@ +package manifest + +import ( + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" +) + +type Diff struct { + From plugin.ID + To plugin.ID + PropertySchemaDiff property.SchemaDiff + PropertySchemaDeleted bool + DeletedExtensions []DiffExtensionDeleted + UpdatedExtensions []DiffExtensionUpdated +} + +type DiffExtensionUpdated struct { + ExtensionID plugin.ExtensionID + OldType plugin.ExtensionType + NewType plugin.ExtensionType + PropertySchemaDiff property.SchemaDiff +} + +type DiffExtensionDeleted struct { + ExtensionID plugin.ExtensionID + PropertySchemaID property.SchemaID +} + +func DiffFrom(old, new Manifest) (d Diff) { + d.From = old.Plugin.ID() + d.To = new.Plugin.ID() + + oldsid, newsid := old.Plugin.Schema(), new.Plugin.Schema() + if oldsid != nil && newsid == nil { + d.PropertySchemaDiff.From = *oldsid + d.PropertySchemaDeleted = true + } else if oldsid != nil && newsid != nil { + d.PropertySchemaDiff = property.SchemaDiffFrom(old.PropertySchema(*oldsid), old.PropertySchema(*newsid)) + } + + for _, e := range old.Plugin.Extensions() { + ne := new.Plugin.Extension(e.ID()) + if ne == nil { + d.DeletedExtensions = append(d.DeletedExtensions, DiffExtensionDeleted{ + ExtensionID: e.ID(), + PropertySchemaID: e.Schema(), + }) + continue + } + + oldps, newps := old.PropertySchema(e.Schema()), new.PropertySchema(ne.Schema()) + diff := DiffExtensionUpdated{ + ExtensionID: e.ID(), + OldType: e.Type(), + NewType: ne.Type(), + PropertySchemaDiff: property.SchemaDiffFrom(oldps, newps), + } + + if diff.OldType != diff.NewType || !diff.PropertySchemaDiff.IsEmpty() { + d.UpdatedExtensions = append(d.UpdatedExtensions, diff) + } + } + + return +} + +func (d *Diff) IsEmpty() bool { + return d == nil || len(d.DeletedExtensions) == 0 && len(d.UpdatedExtensions) == 0 && d.PropertySchemaDiff.IsEmpty() && !d.PropertySchemaDeleted +} + +func (d Diff) DeletedPropertySchemas() []id.PropertySchemaID { + s := make([]id.PropertySchemaID, 0, len(d.DeletedExtensions)+1) + if d.PropertySchemaDeleted { + s = append(s, d.PropertySchemaDiff.From) + } + for _, e := range d.DeletedExtensions { + skip := false + for _, ss := range s { + if ss.Equal(e.PropertySchemaID) { + skip = true + break + } + } + if skip { + continue + } + s = append(s, e.PropertySchemaID) + } + return s +} + +func (d Diff) PropertySchmaDiffs() []property.SchemaDiff { + s := make([]property.SchemaDiff, 0, len(d.UpdatedExtensions)+1) + if !d.PropertySchemaDeleted && (!d.PropertySchemaDiff.IsEmpty() || d.PropertySchemaDiff.IsIDChanged()) { + s = append(s, d.PropertySchemaDiff) + } + for _, e := range d.UpdatedExtensions { + if !e.PropertySchemaDiff.IsEmpty() || e.PropertySchemaDiff.IsIDChanged() { + s = append(s, e.PropertySchemaDiff) + } + } + return s +} diff --git a/pkg/plugin/manifest/diff_test.go b/pkg/plugin/manifest/diff_test.go new file mode 100644 index 000000000..d35b35892 --- /dev/null +++ b/pkg/plugin/manifest/diff_test.go @@ -0,0 +1,248 @@ +package manifest + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/stretchr/testify/assert" +) + +func TestDiffFrom(t *testing.T) { + oldp := plugin.MustID("aaaaaa~1.0.0") + newp := plugin.MustID("aaaaaa~1.1.0") + oldps := property.MustSchemaID("aaaaaa~1.0.0/_") + olde1ps := property.MustSchemaID("aaaaaa~1.0.0/a") + olde2ps := property.MustSchemaID("aaaaaa~1.0.0/b") + olde3ps := property.MustSchemaID("aaaaaa~1.0.0/c") + olde4ps := property.MustSchemaID("aaaaaa~1.0.0/d") + olde5ps := property.MustSchemaID("aaaaaa~1.0.0/e") + newe1ps := property.MustSchemaID("aaaaaa~1.1.0/a") + old := Manifest{ + Plugin: plugin.New().ID(oldp).Schema(&oldps).Extensions([]*plugin.Extension{ + plugin.NewExtension().ID("a").Schema(olde1ps).Type(plugin.ExtensionTypeBlock).MustBuild(), + plugin.NewExtension().ID("b").Schema(olde2ps).MustBuild(), // deleted + plugin.NewExtension().ID("c").Schema(olde3ps).Type(plugin.ExtensionTypeBlock).MustBuild(), + plugin.NewExtension().ID("d").Schema(olde4ps).Type(plugin.ExtensionTypeBlock).MustBuild(), + plugin.NewExtension().ID("e").Schema(olde5ps).Type(plugin.ExtensionTypeBlock).MustBuild(), + }).MustBuild(), + Schema: property.NewSchema().ID(oldps).MustBuild(), + ExtensionSchema: []*property.Schema{ + property.NewSchema().ID(olde1ps).MustBuild(), + property.NewSchema().ID(olde2ps).MustBuild(), + property.NewSchema().ID(olde3ps).MustBuild(), + property.NewSchema().ID(olde4ps).MustBuild(), + property.NewSchema().ID(olde5ps).Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + property.NewSchemaGroup().ID("x").Fields([]*property.SchemaField{ + property.NewSchemaField().ID("y").Type(property.ValueTypeString).MustBuild(), + }).MustBuild(), // updated + })).MustBuild(), + }, + } + new := Manifest{ + Plugin: plugin.New().ID(newp).Extensions([]*plugin.Extension{ + plugin.NewExtension().ID("a").Schema(newe1ps).Type(plugin.ExtensionTypePrimitive).MustBuild(), // updated + plugin.NewExtension().ID("c").Schema(olde3ps).Type(plugin.ExtensionTypeBlock).MustBuild(), // same + plugin.NewExtension().ID("d").Schema(olde4ps).Type(plugin.ExtensionTypeBlock).MustBuild(), // property schema update + plugin.NewExtension().ID("e").Schema(olde5ps).Type(plugin.ExtensionTypeBlock).MustBuild(), // property schema update + }).MustBuild(), + ExtensionSchema: []*property.Schema{ + property.NewSchema().ID(newe1ps).MustBuild(), + property.NewSchema().ID(olde3ps).MustBuild(), + property.NewSchema().ID(olde4ps).Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + property.NewSchemaGroup().ID("x").MustBuild(), // added + })).MustBuild(), + property.NewSchema().ID(olde5ps).Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + property.NewSchemaGroup().ID("x").Fields([]*property.SchemaField{ + property.NewSchemaField().ID("y").Type(property.ValueTypeBool).MustBuild(), + }).MustBuild(), // updated + })).MustBuild(), + }, + } + + type args struct { + old Manifest + new Manifest + } + tests := []struct { + name string + args args + want Diff + }{ + { + name: "diff", + args: args{old: old, new: new}, + want: Diff{ + From: oldp, + To: newp, + PropertySchemaDiff: property.SchemaDiff{From: oldps}, + PropertySchemaDeleted: true, + DeletedExtensions: []DiffExtensionDeleted{{ExtensionID: "b", PropertySchemaID: olde2ps}}, + UpdatedExtensions: []DiffExtensionUpdated{ + { + ExtensionID: "a", + OldType: plugin.ExtensionTypeBlock, + NewType: plugin.ExtensionTypePrimitive, + PropertySchemaDiff: property.SchemaDiff{From: olde1ps, To: newe1ps}, + }, + { + ExtensionID: "e", + OldType: plugin.ExtensionTypeBlock, + NewType: plugin.ExtensionTypeBlock, + PropertySchemaDiff: property.SchemaDiff{ + From: olde5ps, + To: olde5ps, + TypeChanged: []property.SchemaDiffTypeChanged{ + {SchemaFieldPointer: property.SchemaFieldPointer{SchemaGroup: "x", Field: "y"}, NewType: property.ValueTypeBool}, + }, + }, + }, + }, + }, + }, + { + name: "same", + args: args{ + old: old, + new: old, + }, + want: Diff{ + From: oldp, + To: oldp, + PropertySchemaDiff: property.SchemaDiff{From: oldps, To: oldps}, + }, + }, + { + name: "nil", + args: args{}, + want: Diff{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, DiffFrom(tt.args.old, tt.args.new)) + }) + } +} + +func TestDiff_IsEmpty(t *testing.T) { + tests := []struct { + name string + target *Diff + want bool + }{ + { + name: "presemt", + target: &Diff{ + PropertySchemaDeleted: true, + }, + want: false, + }, + { + name: "empty", + target: &Diff{}, + want: true, + }, + { + name: "empty2", + target: &Diff{ + From: plugin.MustID("a~1.0.0"), + }, + want: true, + }, + { + name: "nil", + target: nil, + want: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.IsEmpty()) + }) + } +} + +func TestDiff_DeletedPropertySchemas(t *testing.T) { + ps1 := property.MustSchemaID("a~1.0.0/a") + ps2 := property.MustSchemaID("a~1.0.0/b") + tests := []struct { + name string + target Diff + want []property.SchemaID + }{ + { + name: "ok", + target: Diff{ + PropertySchemaDiff: property.SchemaDiff{ + From: ps1, + }, + PropertySchemaDeleted: true, + DeletedExtensions: []DiffExtensionDeleted{ + {PropertySchemaID: ps2}, + {PropertySchemaID: ps2}, + }, + }, + want: []property.SchemaID{ + ps1, + ps2, + }, + }, + { + name: "empty", + target: Diff{}, + want: []property.SchemaID{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.DeletedPropertySchemas()) + }) + } +} + +func TestDiff_PropertySchmaDiffs(t *testing.T) { + ps1 := property.MustSchemaID("a~1.0.0/a") + ps2 := property.MustSchemaID("a~1.0.0/b") + tests := []struct { + name string + target Diff + want []property.SchemaDiff + }{ + { + name: "ok", + target: Diff{ + PropertySchemaDiff: property.SchemaDiff{ + From: ps1, + }, + UpdatedExtensions: []DiffExtensionUpdated{ + {PropertySchemaDiff: property.SchemaDiff{ + From: ps2, + }}, + }, + }, + want: []property.SchemaDiff{ + { + From: ps1, + }, + { + From: ps2, + }, + }, + }, + { + name: "empty", + target: Diff{}, + want: []property.SchemaDiff{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.PropertySchmaDiffs()) + }) + } +} diff --git a/pkg/plugin/manifest/parser_test.go b/pkg/plugin/manifest/parser_test.go index 0d82e3742..bb8df4fc3 100644 --- a/pkg/plugin/manifest/parser_test.go +++ b/pkg/plugin/manifest/parser_test.go @@ -30,7 +30,7 @@ var normalExpected = &Manifest{ MustBuild(), }).MustBuild(), ExtensionSchema: []*property.Schema{ - property.NewSchema().ID(property.MustSchemaID("aaa~1.1.1/hoge")).Groups([]*property.SchemaGroup{ + property.NewSchema().ID(property.MustSchemaID("aaa~1.1.1/hoge")).Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ property.NewSchemaGroup().ID(property.SchemaGroupID("default")). RepresentativeField(property.FieldID("a").Ref()). Fields([]*property.SchemaField{ @@ -46,7 +46,7 @@ var normalExpected = &Manifest{ Type(property.ValueTypeNumber). MustBuild(), }).MustBuild(), - }).MustBuild(), + })).MustBuild(), }, } diff --git a/pkg/plugin/manifest/parser_translation.go b/pkg/plugin/manifest/parser_translation.go index 0816f4e5f..75a2eac62 100644 --- a/pkg/plugin/manifest/parser_translation.go +++ b/pkg/plugin/manifest/parser_translation.go @@ -104,7 +104,7 @@ func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Mani } for key, tsg := range te.PropertySchema { - psg := ps.Group(property.SchemaGroupID(key)) + psg := ps.Groups().Group(property.SchemaGroupID(key)) if psg == nil { continue } diff --git a/pkg/plugin/manifest/parser_translation_test.go b/pkg/plugin/manifest/parser_translation_test.go index 3e31f09d1..10da92274 100644 --- a/pkg/plugin/manifest/parser_translation_test.go +++ b/pkg/plugin/manifest/parser_translation_test.go @@ -192,9 +192,9 @@ func TestMergeManifestTranslation(t *testing.T) { assert.Equal(t, tc.Expected.PluginName, res.Plugin.Name()) assert.Equal(t, tc.Expected.PluginDesc, res.Plugin.Description()) assert.Equal(t, tc.Expected.ExtName, res.Plugin.Extension(plugin.ExtensionID("test_ext")).Name()) - assert.Equal(t, tc.Expected.PsTitle, res.ExtensionSchema[0].Group("test_ps").Title()) - assert.Equal(t, tc.Expected.FieldTitle, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Title()) - assert.Equal(t, tc.Expected.FieldDesc, res.ExtensionSchema[0].Group("test_ps").Field("test_field").Description()) + assert.Equal(t, tc.Expected.PsTitle, res.ExtensionSchema[0].Groups().Group("test_ps").Title()) + assert.Equal(t, tc.Expected.FieldTitle, res.ExtensionSchema[0].Groups().Group("test_ps").Field("test_field").Title()) + assert.Equal(t, tc.Expected.FieldDesc, res.ExtensionSchema[0].Groups().Group("test_ps").Field("test_field").Description()) }) } } diff --git a/pkg/property/diff.go b/pkg/property/diff.go new file mode 100644 index 000000000..2e7c14987 --- /dev/null +++ b/pkg/property/diff.go @@ -0,0 +1,107 @@ +package property + +type SchemaDiff struct { + From SchemaID + To SchemaID + Deleted []SchemaDiffDeleted + Moved []SchemaDiffMoved + TypeChanged []SchemaDiffTypeChanged +} + +type SchemaDiffDeleted SchemaFieldPointer + +type SchemaDiffMoved struct { + From SchemaFieldPointer + To SchemaFieldPointer + ToList bool +} + +type SchemaDiffTypeChanged struct { + SchemaFieldPointer + NewType ValueType +} + +func SchemaDiffFrom(old, new *Schema) (d SchemaDiff) { + if old != nil { + d.From = old.ID() + } + if new != nil { + d.To = new.ID() + } + if old == nil || new == nil || old == new { + return + } + + for _, gf := range old.Groups().GroupAndFields() { + ngf := new.Groups().GroupAndField(gf.Field.ID()) + if ngf == nil { + d.Deleted = append(d.Deleted, SchemaDiffDeleted(gf.SchemaFieldPointer())) + continue + } + + if ngf.Group.ID() != gf.Group.ID() { + d.Moved = append(d.Moved, SchemaDiffMoved{ + From: gf.SchemaFieldPointer(), + To: ngf.SchemaFieldPointer(), + ToList: ngf.Group.IsList(), + }) + } + + if ngf.Field.Type() != gf.Field.Type() { + d.TypeChanged = append(d.TypeChanged, SchemaDiffTypeChanged{ + SchemaFieldPointer: ngf.SchemaFieldPointer(), + NewType: ngf.Field.Type(), + }) + } + } + + return +} + +func SchemaDiffFromProperty(old *Property, new *Schema) (d SchemaDiff) { + return SchemaDiffFrom(old.GuessSchema(), new) +} + +func (d *SchemaDiff) Migrate(p *Property) (res bool) { + if d.IsEmpty() { + return + } + + res = p.updateSchema(d.To) + + for _, dd := range d.Deleted { + if p.RemoveFields(SchemaFieldPointer(dd).Pointer()) { + res = true + } + } + + for _, dm := range d.Moved { + if dm.ToList { + // group -> list and list -> list are not supported; just delete + if p.RemoveFields(dm.From.Pointer()) { + res = true + } + continue + } + + if p.MoveFields(dm.From.Pointer(), dm.To.Pointer()) { + res = true + } + } + + for _, dt := range d.TypeChanged { + if p.Cast(dt.Pointer(), dt.NewType) { + res = true + } + } + + return +} + +func (d *SchemaDiff) IsEmpty() bool { + return d == nil || len(d.Deleted) == 0 && len(d.Moved) == 0 && len(d.TypeChanged) == 0 +} + +func (d *SchemaDiff) IsIDChanged() bool { + return d != nil && !d.From.Equal(d.To) +} diff --git a/pkg/property/diff_test.go b/pkg/property/diff_test.go new file mode 100644 index 000000000..205e4d016 --- /dev/null +++ b/pkg/property/diff_test.go @@ -0,0 +1,600 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestSchemaDiffFrom(t *testing.T) { + ps1 := MustSchemaID("x~1.0.0/a") + ps2 := MustSchemaID("x~1.0.0/b") + + type args struct { + old *Schema + new *Schema + } + tests := []struct { + name string + args args + want SchemaDiff + }{ + { + name: "diff", + args: args{ + old: &Schema{ + id: ps1, + groups: &SchemaGroupList{groups: []*SchemaGroup{ + {id: "a", fields: []*SchemaField{ + {id: "aa", propertyType: ValueTypeString}, // deleted + {id: "ab", propertyType: ValueTypeString}, + {id: "ac", propertyType: ValueTypeString}, + {id: "ad", propertyType: ValueTypeString}, + }}, + }}, + }, + new: &Schema{ + id: ps2, + groups: &SchemaGroupList{groups: []*SchemaGroup{ + {id: "a", fields: []*SchemaField{ + {id: "ab", propertyType: ValueTypeNumber}, // type changed + {id: "ae", propertyType: ValueTypeString}, // added + }}, + {id: "b", list: true, fields: []*SchemaField{ + {id: "ac", propertyType: ValueTypeString}, // moved + {id: "ad", propertyType: ValueTypeNumber}, // moved and type changed + }}, + }}, + }, + }, + want: SchemaDiff{ + From: ps1, + To: ps2, + Deleted: []SchemaDiffDeleted{ + {SchemaGroup: "a", Field: "aa"}, + }, + Moved: []SchemaDiffMoved{ + {From: SchemaFieldPointer{SchemaGroup: "a", Field: "ac"}, To: SchemaFieldPointer{SchemaGroup: "b", Field: "ac"}, ToList: true}, + {From: SchemaFieldPointer{SchemaGroup: "a", Field: "ad"}, To: SchemaFieldPointer{SchemaGroup: "b", Field: "ad"}, ToList: true}, + }, + TypeChanged: []SchemaDiffTypeChanged{ + {SchemaFieldPointer: SchemaFieldPointer{SchemaGroup: "a", Field: "ab"}, NewType: ValueTypeNumber}, + {SchemaFieldPointer: SchemaFieldPointer{SchemaGroup: "b", Field: "ad"}, NewType: ValueTypeNumber}, + }, + }, + }, + { + name: "no diff", + args: args{ + old: &Schema{ + id: ps1, + groups: &SchemaGroupList{groups: []*SchemaGroup{ + {id: "a", fields: []*SchemaField{ + {id: "aa", propertyType: ValueTypeNumber}, + }}, + }}, + }, + new: &Schema{ + id: ps2, + groups: &SchemaGroupList{groups: []*SchemaGroup{ + {id: "a", fields: []*SchemaField{ + {id: "aa", propertyType: ValueTypeNumber}, + }}, + {id: "b", list: true, fields: []*SchemaField{ + {id: "ba", propertyType: ValueTypeString}, // added + }}, + }}, + }, + }, + want: SchemaDiff{ + From: ps1, + To: ps2, + }, + }, + { + name: "same schemas", + args: args{ + old: testSchema1, + new: testSchema1, + }, + want: SchemaDiff{ + From: testSchema1.ID(), + To: testSchema1.ID(), + }, + }, + { + name: "nil", + args: args{ + old: nil, + new: nil, + }, + want: SchemaDiff{}, + }, + { + name: "old nil", + args: args{ + old: nil, + new: testSchema1, + }, + want: SchemaDiff{ + To: testSchema1.ID(), + }, + }, + { + name: "new nil", + args: args{ + old: testSchema1, + new: nil, + }, + want: SchemaDiff{ + From: testSchema1.ID(), + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, SchemaDiffFrom(tt.args.old, tt.args.new)) + }) + } +} + +func TestSchemaDiffFromProperty(t *testing.T) { + ps := MustSchemaID("x~1.0.0/a") + + type args struct { + old *Property + new *Schema + } + tests := []struct { + name string + args args + want SchemaDiff + }{ + { + name: "diff", + args: args{ + old: testProperty1, + new: &Schema{ + id: ps, + groups: &SchemaGroupList{groups: []*SchemaGroup{ + {id: testSchemaGroup1.ID(), fields: []*SchemaField{ + {id: testSchemaField1.ID(), propertyType: ValueTypeNumber}, // type changed + {id: testSchemaField3.ID(), propertyType: ValueTypeNumber}, // moved and type changed + {id: "xxxx", propertyType: ValueTypeString}, // added + }}, + {id: testSchemaGroup2.ID(), list: true, fields: []*SchemaField{}}, + }}, + }, + }, + want: SchemaDiff{ + From: testProperty1.Schema(), + To: ps, + Deleted: nil, + Moved: []SchemaDiffMoved{ + { + From: SchemaFieldPointer{SchemaGroup: testSchemaGroup2.ID(), Field: testSchemaField3.ID()}, + To: SchemaFieldPointer{SchemaGroup: testSchemaGroup1.ID(), Field: testSchemaField3.ID()}, + }, + }, + TypeChanged: []SchemaDiffTypeChanged{ + {SchemaFieldPointer: SchemaFieldPointer{SchemaGroup: testSchemaGroup1.ID(), Field: testSchemaField1.ID()}, NewType: ValueTypeNumber}, + {SchemaFieldPointer: SchemaFieldPointer{SchemaGroup: testSchemaGroup1.ID(), Field: testSchemaField3.ID()}, NewType: ValueTypeNumber}, + }, + }, + }, + { + name: "no diff", + args: args{ + old: testProperty1, + new: testSchema1, + }, + want: SchemaDiff{ + From: testProperty1.Schema(), + To: testSchema1.ID(), + }, + }, + { + name: "nil", + args: args{ + old: nil, + new: nil, + }, + want: SchemaDiff{}, + }, + { + name: "old nil", + args: args{ + old: nil, + new: testSchema1, + }, + want: SchemaDiff{ + To: testSchema1.ID(), + }, + }, + { + name: "new nil", + args: args{ + old: testProperty1, + new: nil, + }, + want: SchemaDiff{ + From: testProperty1.Schema(), + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, SchemaDiffFromProperty(tt.args.old, tt.args.new)) + }) + } +} + +func TestSchemaDiff_Migrate(t *testing.T) { + itemID := NewItemID() + newSchemaID := MustSchemaID("x~1.0.0/ax") + + tests := []struct { + name string + target *SchemaDiff + args *Property + want bool + wantProperty *Property + only bool + }{ + { + name: "deleted and type changed", + target: &SchemaDiff{ + To: newSchemaID, + Deleted: []SchemaDiffDeleted{ + {SchemaGroup: testGroup1.SchemaGroup(), Field: testField1.Field()}, + }, + TypeChanged: []SchemaDiffTypeChanged{ + {SchemaFieldPointer: SchemaFieldPointer{SchemaGroup: testGroupList1.SchemaGroup(), Field: testField2.Field()}, NewType: ValueTypeString}, + }, + }, + args: testProperty1.Clone(), + want: true, + wantProperty: &Property{ + id: testProperty1.ID(), + scene: testProperty1.Scene(), + schema: newSchemaID, + items: []Item{ + &Group{ + itemBase: itemBase{ + ID: testGroup1.ID(), + SchemaGroup: testGroup1.SchemaGroup(), + }, + fields: []*Field{}, // deleted + }, + &GroupList{ + itemBase: itemBase{ + ID: testGroupList1.ID(), + SchemaGroup: testGroupList1.SchemaGroup(), + }, + groups: []*Group{ + { + itemBase: itemBase{ + ID: testGroup2.ID(), + SchemaGroup: testGroup2.SchemaGroup(), + }, + fields: []*Field{ + {field: testField2.Field(), v: NewOptionalValue(ValueTypeString, nil)}, // type changed + }, + }, + }, + }, + }, + }, + }, + { + name: "moved", + target: &SchemaDiff{ + To: newSchemaID, + Moved: []SchemaDiffMoved{ + { + From: SchemaFieldPointer{SchemaGroup: testGroup1.SchemaGroup(), Field: testField1.Field()}, + To: SchemaFieldPointer{SchemaGroup: "x", Field: testField1.Field()}, + }, + }, + }, + args: testProperty1.Clone(), + want: true, + wantProperty: &Property{ + id: testProperty1.ID(), + scene: testProperty1.Scene(), + schema: newSchemaID, + items: []Item{ + &Group{ + itemBase: itemBase{ + ID: testGroup1.ID(), + SchemaGroup: testGroup1.SchemaGroup(), + }, + fields: []*Field{}, // deleted + }, + testGroupList1, + &Group{ + itemBase: itemBase{ + ID: itemID, + SchemaGroup: "x", + }, + fields: []*Field{testField1}, + }, + }, + }, + }, + { + name: "moved and type changed", + target: &SchemaDiff{ + To: newSchemaID, + Moved: []SchemaDiffMoved{ + { + From: SchemaFieldPointer{SchemaGroup: testGroup1.SchemaGroup(), Field: testField1.Field()}, + To: SchemaFieldPointer{SchemaGroup: "x", Field: testField1.Field()}, + }, + }, + TypeChanged: []SchemaDiffTypeChanged{ + {SchemaFieldPointer: SchemaFieldPointer{SchemaGroup: "x", Field: testField1.Field()}, NewType: ValueTypeNumber}, + }, + }, + args: testProperty1.Clone(), + want: true, + wantProperty: &Property{ + id: testProperty1.ID(), + scene: testProperty1.Scene(), + schema: newSchemaID, + items: []Item{ + &Group{ + itemBase: itemBase{ + ID: testGroup1.ID(), + SchemaGroup: testGroup1.SchemaGroup(), + }, + fields: []*Field{}, // deleted + }, + testGroupList1, + &Group{ + itemBase: itemBase{ + ID: itemID, + SchemaGroup: "x", + }, + fields: []*Field{ + {field: testField1.Field(), v: NewOptionalValue(ValueTypeNumber, nil)}, + }, + }, + }, + }, + }, + { + name: "group -> list", + target: &SchemaDiff{ + To: newSchemaID, + Moved: []SchemaDiffMoved{ + { + From: SchemaFieldPointer{SchemaGroup: testGroup1.SchemaGroup(), Field: testField1.Field()}, + To: SchemaFieldPointer{SchemaGroup: testGroup2.SchemaGroup(), Field: testField1.Field()}, + }, + }, + }, + args: testProperty1.Clone(), + want: true, + wantProperty: &Property{ + id: testProperty1.ID(), + scene: testProperty1.Scene(), + schema: newSchemaID, + items: []Item{ + &Group{ + itemBase: itemBase{ + ID: testGroup1.ID(), + SchemaGroup: testGroup1.SchemaGroup(), + }, + fields: []*Field{}, // deleted + }, + testGroupList1, + }, + }, + }, + { + name: "group -> list (ToList)", + target: &SchemaDiff{ + To: newSchemaID, + Moved: []SchemaDiffMoved{ + { + From: SchemaFieldPointer{SchemaGroup: testGroup1.SchemaGroup(), Field: testField1.Field()}, + To: SchemaFieldPointer{SchemaGroup: testGroup2.SchemaGroup(), Field: testField1.Field()}, + ToList: true, + }, + }, + }, + args: testProperty1.Clone(), + want: true, + wantProperty: &Property{ + id: testProperty1.ID(), + scene: testProperty1.Scene(), + schema: newSchemaID, + items: []Item{ + &Group{ + itemBase: itemBase{ + ID: testGroup1.ID(), + SchemaGroup: testGroup1.SchemaGroup(), + }, + fields: []*Field{}, // deleted + }, + testGroupList1, + }, + }, + }, + { + name: "list -> group", + target: &SchemaDiff{ + To: newSchemaID, + Moved: []SchemaDiffMoved{ + { + From: SchemaFieldPointer{SchemaGroup: testGroup2.SchemaGroup(), Field: testField2.Field()}, + To: SchemaFieldPointer{SchemaGroup: testGroup1.SchemaGroup(), Field: testField2.Field()}, + }, + }, + }, + args: testProperty1.Clone(), + want: true, + wantProperty: &Property{ + id: testProperty1.ID(), + scene: testProperty1.Scene(), + schema: newSchemaID, + items: []Item{ + testGroup1, + &GroupList{ + itemBase: itemBase{ + ID: testGroupList1.ID(), + SchemaGroup: testGroupList1.SchemaGroup(), + }, + groups: []*Group{ + { + itemBase: itemBase{ + ID: testGroup2.ID(), + SchemaGroup: testGroup2.SchemaGroup(), + }, + fields: []*Field{}, // deleted + }, + }, + }, + }, + }, + }, + { + name: "empty", + target: &SchemaDiff{}, + args: testProperty1, + want: false, + wantProperty: testProperty1, + }, + { + name: "nil property", + target: &SchemaDiff{ + To: newSchemaID, + Deleted: []SchemaDiffDeleted{{SchemaGroup: testGroup1.SchemaGroup(), Field: testField1.Field()}}, + }, + args: nil, + want: false, + wantProperty: nil, + }, + { + name: "nil", + target: nil, + args: nil, + want: false, + wantProperty: nil, + }, + } + + only := false + for _, tt := range tests { + if tt.only { + only = true + break + } + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + // t.Parallel() // Cannot run tests in parallel due to mocking NewItemID + if only && !tt.only { + t.SkipNow() + } + defer mockNewItemID(itemID)() + assert.Equal(t, tt.want, tt.target.Migrate(tt.args)) + assert.Equal(t, tt.wantProperty, tt.args) + }) + } +} + +func TestSchemaDiff_IsEmpty(t *testing.T) { + tests := []struct { + name string + target *SchemaDiff + want bool + }{ + { + name: "present", + target: &SchemaDiff{ + Deleted: []SchemaDiffDeleted{{SchemaGroup: "", Field: ""}}, + }, + want: false, + }, + { + name: "empty", + target: &SchemaDiff{}, + want: true, + }, + { + name: "nil", + target: nil, + want: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.IsEmpty()) + }) + } +} + +func TestSchemaDiff_IsIDChanged(t *testing.T) { + tests := []struct { + name string + target *SchemaDiff + want bool + }{ + { + name: "changed1", + target: &SchemaDiff{ + From: id.MustPropertySchemaID("a~1.0.0/a"), + To: id.MustPropertySchemaID("a~1.0.1/a"), + }, + want: true, + }, + { + name: "changed2", + target: &SchemaDiff{ + From: id.MustPropertySchemaID("a~1.0.0/a"), + }, + want: true, + }, + { + name: "changed3", + target: &SchemaDiff{ + To: id.MustPropertySchemaID("a~1.0.0/a"), + }, + want: true, + }, + { + name: "unchanged1", + target: &SchemaDiff{ + From: id.MustPropertySchemaID("a~1.0.0/a"), + To: id.MustPropertySchemaID("a~1.0.0/a"), + }, + want: false, + }, + { + name: "empty", + target: &SchemaDiff{}, + want: false, + }, + { + name: "nil", + target: nil, + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.IsIDChanged()) + }) + } +} diff --git a/pkg/property/field.go b/pkg/property/field.go index d0584d87c..32617a3ed 100644 --- a/pkg/property/field.go +++ b/pkg/property/field.go @@ -32,10 +32,14 @@ func (p *Field) Clone() *Field { } func (p *Field) Field() FieldID { + return p.field +} + +func (p *Field) FieldRef() *FieldID { if p == nil { - return FieldID("") + return nil } - return p.field + return p.field.Ref() } func (p *Field) Links() *Links { @@ -59,20 +63,32 @@ func (p *Field) Value() *Value { return p.v.Value() } -func (p *Field) ActualValue(ds *dataset.Dataset) *Value { +func (p *Field) TypeAndValue() *OptionalValue { + if p == nil { + return nil + } + return p.v +} + +func (p *Field) ActualValue(ds *dataset.Dataset) *ValueAndDatasetValue { + if p == nil { + return nil + } + + var dv *dataset.Value if p.links != nil { if l := p.links.Last(); l != nil { - ldid := l.Dataset() - ldsfid := l.DatasetSchemaField() - if ldid != nil || ldsfid != nil || ds.ID() == *ldid { - if f := ds.Field(*ldsfid); f != nil { - return valueFromDataset(f.Value()) - } + d := l.Dataset() + if d != nil && ds.ID() == *d && l.DatasetSchemaField() != nil { + dv = ds.Field(*l.DatasetSchemaField()).Value() + } else { + return nil } + } else { + return nil } - return nil } - return p.Value() + return NewValueAndDatasetValue(p.Type(), dv, p.Value()) } func (p *Field) Datasets() []DatasetID { @@ -113,12 +129,13 @@ func (p *Field) UpdateUnsafe(value *Value) { p.v.SetValue(value) } -func (p *Field) Cast(t ValueType) { - if p == nil || p.Type() == t { - return +func (p *Field) Cast(t ValueType) bool { + if p == nil || t == ValueTypeUnknown || p.Type() == ValueTypeUnknown || p.Type() == t { + return false } p.v = p.v.Cast(t) p.Unlink() + return true } func (p *Field) Link(links *Links) { @@ -140,7 +157,7 @@ func (p *Field) UpdateField(field FieldID) { } func (p *Field) IsEmpty() bool { - return p != nil && p.Value().IsEmpty() && p.Links().IsEmpty() + return p == nil || p.Value().IsEmpty() && p.Links().IsEmpty() } func (p *Field) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset.Loader) bool { @@ -149,7 +166,7 @@ func (p *Field) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset } fid := p.Field() - schemaField := newSchema.Field(fid) + schemaField := newSchema.Groups().Field(fid) // If field is not found in new schema, this field should be removed invalid := schemaField == nil @@ -175,13 +192,6 @@ func (p *Field) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset return !invalid } -func (p *Field) DatasetValue(ctx context.Context, d dataset.GraphLoader) (*dataset.Value, error) { - if p == nil { - return nil, nil - } - return p.links.DatasetValue(ctx, d) -} - func (p *Field) MigrateDataset(q DatasetMigrationParam) { if p == nil { return @@ -193,15 +203,12 @@ func (p *Field) MigrateDataset(q DatasetMigrationParam) { } } -func (p *Field) ValidateSchema(ps *SchemaField) error { - if p == nil { +func (f *Field) GuessSchema() *SchemaField { + if f == nil { return nil } - if ps == nil { - return errors.New("schema not found") - } - if p.v == nil { - return errors.New("invalid field value and type") + if f, err := NewSchemaField().ID(f.Field()).Type(f.Type()).Build(); err == nil { + return f } return nil } diff --git a/pkg/property/field_test.go b/pkg/property/field_test.go index b842a9b16..24d0c7386 100644 --- a/pkg/property/field_test.go +++ b/pkg/property/field_test.go @@ -7,29 +7,33 @@ import ( "github.com/stretchr/testify/assert" ) +var ( + testField1 = NewField(testSchemaField1.ID()).Value(OptionalValueFrom(ValueTypeString.ValueFrom("aaa"))).MustBuild() + testField2 = NewField(testSchemaField3.ID()).Value(NewOptionalValue(ValueTypeLatLng, nil)).MustBuild() +) + func TestField_ActualValue(t *testing.T) { p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() dsid := NewDatasetID() dssid := NewDatasetSchemaID() dssfid := NewDatasetFieldID() - l := NewLink(dsid, dssid, dssfid) - ls := NewLinks([]*Link{l}) + ls := NewLinks([]*Link{NewLink(dsid, dssid, dssfid)}) tests := []struct { Name string Field *Field DS *dataset.Dataset - Expected *Value + Expected *ValueAndDatasetValue }{ { Name: "nil links", Field: FieldFrom(p). Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). MustBuild(), - Expected: ValueTypeString.ValueFrom("vvv"), + Expected: NewValueAndDatasetValue(ValueTypeString, nil, ValueTypeString.ValueFrom("vvv")), }, { - Name: "nil last link", + Name: "empty link", Field: FieldFrom(p). Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). Links(&Links{}). @@ -48,7 +52,13 @@ func TestField_ActualValue(t *testing.T) { dataset.NewField(dssfid, dataset.ValueTypeString.ValueFrom("xxx"), "")}, ). MustBuild(), - Expected: ValueTypeString.ValueFrom("xxx"), + Expected: NewValueAndDatasetValue(ValueTypeString, dataset.ValueTypeString.ValueFrom("xxx"), ValueTypeString.ValueFrom("vvv")), + }, + { + Name: "dataset value missing", + Field: NewField("a").Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Links(ls).Build(), + DS: dataset.New().ID(dsid).Schema(dssid).MustBuild(), + Expected: NewValueAndDatasetValue(ValueTypeString, nil, ValueTypeString.ValueFrom("vvv")), }, } @@ -100,29 +110,120 @@ func TestField_Datasets(t *testing.T) { } func TestField_Clone(t *testing.T) { - p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() l := NewLink(NewDatasetID(), NewDatasetSchemaID(), NewDatasetFieldID()) ls := NewLinks([]*Link{l}) - b := FieldFrom(p). - Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))). - Links(ls). - MustBuild() - r := b.Clone() - assert.Equal(t, b, r) + b := NewField("a").Value(OptionalValueFrom(ValueTypeString.ValueFrom("vvv"))).Links(ls).Build() + + tests := []struct { + name string + target *Field + want *Field + }{ + { + name: "ok", + target: b, + want: b, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + r := b.Clone() + assert.Equal(t, b, r) + if tt.want != nil { + assert.NotSame(t, b, r) + } + }) + } } -func TestField(t *testing.T) { +func TestField_IsEmpty(t *testing.T) { + tests := []struct { + name string + target *Field + want bool + }{ + { + name: "empty", + target: &Field{}, + want: true, + }, + { + name: "empty value", + target: NewField("a").Value(NewOptionalValue(ValueTypeString, nil)).Build(), + want: true, + }, + { + name: "not empty", + target: NewField("a").Value(OptionalValueFrom(ValueTypeString.ValueFrom("x"))).Build(), + want: false, + }, + { + name: "nil", + target: nil, + want: true, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.IsEmpty()) + }) + } +} + +func TestField_Link(t *testing.T) { did := NewDatasetID() dsid := NewDatasetSchemaID() - p := NewSchemaField().ID("A").Type(ValueTypeString).MustBuild() - b := FieldFrom(p).MustBuild() - assert.True(t, b.IsEmpty()) - l := NewLink(did, dsid, NewDatasetFieldID()) - ls := NewLinks([]*Link{l}) - b.Link(ls) - assert.True(t, b.IsDatasetLinked(dsid, did)) - b.Unlink() - assert.Nil(t, b.Links()) + dfid := NewDatasetFieldID() + l := NewLinks([]*Link{NewLink(did, dsid, dfid)}) + + tests := []struct { + name string + target *Field + args *Links + }{ + { + name: "link", + target: testField1.Clone(), + args: l, + }, + { + name: "unlink", + target: NewField("a").Value(NewOptionalValue(ValueTypeString, nil)).Links(l).Build(), + args: nil, + }, + { + name: "empty", + target: &Field{}, + args: nil, + }, + { + name: "nil", + target: nil, + args: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + tt.target.Link(tt.args) + if tt.target != nil { + assert.Equal(t, tt.args, tt.target.links) + } + }) + } } func TestField_Update(t *testing.T) { @@ -190,9 +291,43 @@ func TestField_Cast(t *testing.T) { } for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { + t.Parallel() tt.target.Cast(tt.args.t) assert.Equal(t, tt.want, tt.target) }) } } + +func TestField_GuessSchema(t *testing.T) { + tests := []struct { + name string + target *Field + want *SchemaField + }{ + { + name: "ok", + target: &Field{field: "a", v: NewOptionalValue(ValueTypeLatLng, nil)}, + want: &SchemaField{id: "a", propertyType: ValueTypeLatLng}, + }, + { + name: "empty", + target: &Field{}, + want: nil, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.GuessSchema()) + }) + } +} diff --git a/pkg/property/group.go b/pkg/property/group.go index ef96e5888..b2e6bf2d3 100644 --- a/pkg/property/group.go +++ b/pkg/property/group.go @@ -3,7 +3,6 @@ package property import ( "context" "errors" - "fmt" "github.com/reearth/reearth-backend/pkg/dataset" ) @@ -18,9 +17,6 @@ type Group struct { var _ Item = &Group{} func (g *Group) ID() ItemID { - if g == nil { - return ItemID{} - } return g.itemBase.ID } @@ -32,9 +28,6 @@ func (g *Group) IDRef() *ItemID { } func (g *Group) SchemaGroup() SchemaGroupID { - if g == nil { - return SchemaGroupID("") - } return g.itemBase.SchemaGroup } @@ -106,15 +99,18 @@ func (g *Group) IsEmpty() bool { return true } -func (g *Group) Prune() { +func (g *Group) Prune() (res bool) { if g == nil { return } for _, f := range g.fields { if f.IsEmpty() { - g.RemoveField(f.Field()) + if g.RemoveField(f.Field()) { + res = true + } } } + return } // TODO: group migration @@ -136,7 +132,7 @@ func (g *Group) GetOrCreateField(ps *Schema, fid FieldID) (*Field, bool) { if g == nil || ps == nil { return nil, false } - psg := ps.Group(g.SchemaGroup()) + psg := ps.Groups().Group(g.SchemaGroup()) if psg == nil { return nil, false } @@ -158,20 +154,31 @@ func (g *Group) GetOrCreateField(ps *Schema, fid FieldID) (*Field, bool) { return nil, false } - g.fields = append(g.fields, field) + g.AddFields(field) return field, true } -func (g *Group) RemoveField(fid FieldID) { +func (g *Group) AddFields(fields ...*Field) { if g == nil { return } + for _, f := range fields { + _ = g.RemoveField(f.Field()) + g.fields = append(g.fields, f) + } +} + +func (g *Group) RemoveField(fid FieldID) (res bool) { + if g == nil { + return false + } for i, f := range g.fields { if f.Field() == fid { g.fields = append(g.fields[:i], g.fields[i+1:]...) - return + return true } } + return false } func (g *Group) FieldIDs() []FieldID { @@ -185,14 +192,6 @@ func (g *Group) FieldIDs() []FieldID { return fields } -// Fields returns a slice of fields -func (g *Group) Fields() []*Field { - if g == nil { - return nil - } - return append([]*Field{}, g.fields...) -} - // Field returns a field whose id is specified func (g *Group) Field(fid FieldID) *Field { if g == nil { @@ -219,7 +218,7 @@ func (g *Group) RepresentativeField(schema *Schema) *Field { if g == nil || schema == nil { return nil } - if psg := schema.GroupByPointer(NewPointer(&g.itemBase.SchemaGroup, nil, nil)); psg != nil { + if psg := schema.Groups().Group(g.itemBase.SchemaGroup); psg != nil { if representativeField := psg.RepresentativeFieldID(); representativeField != nil { if f, _ := g.GetOrCreateField(schema, *representativeField); f != nil { return f @@ -241,10 +240,89 @@ func (p *Group) ValidateSchema(ps *SchemaGroup) error { } for _, i := range p.fields { - if err := i.ValidateSchema(ps.Field(i.Field())); err != nil { - return fmt.Errorf("%s: %w", i.Field(), err) + f := ps.Field(i.Field()) + if f.Type() != i.Type() { + return errors.New("invalid field type") } } return nil } + +func (p *Group) Clone() *Group { + if p == nil { + return nil + } + fields := make([]*Field, 0, len(p.fields)) + for _, f := range p.fields { + fields = append(fields, f.Clone()) + } + return &Group{ + fields: fields, + itemBase: p.itemBase, + } +} + +func (p *Group) CloneItem() Item { + return p.Clone() +} + +func (g *Group) Fields(p *Pointer) []*Field { + if g == nil || len(g.fields) == 0 || (p != nil && !p.TestItem(g.SchemaGroup(), g.ID())) { + return nil + } + + if fid, ok := p.Field(); ok { + if f := g.Field(fid); f != nil { + return []*Field{f} + } + return nil + } + + return append(g.fields[:0:0], g.fields...) +} + +func (g *Group) RemoveFields(ptr *Pointer) (res bool) { + if g == nil || ptr == nil { + return false + } + if f, ok := ptr.FieldIfItemIs(g.SchemaGroup(), g.ID()); ok { + if g.RemoveField(f) { + res = true + } + } + return +} + +func (p *Group) GroupAndFields(ptr *Pointer) []GroupAndField { + if p == nil || len(p.fields) == 0 { + return nil + } + res := []GroupAndField{} + for _, f := range p.fields { + if ptr == nil || ptr.Test(p.SchemaGroup(), p.ID(), f.Field()) { + res = append(res, GroupAndField{ + Group: p, + Field: f, + }) + } + } + return res +} + +func (g *Group) GuessSchema() *SchemaGroup { + if g == nil { + return nil + } + + fields := make([]*SchemaField, 0, len(g.fields)) + for _, f := range g.fields { + if sf := f.GuessSchema(); sf != nil { + fields = append(fields, sf) + } + } + + // TODO: error handling + sg, _ := NewSchemaGroup().ID(g.SchemaGroup()).Fields(fields).Build() + return sg +} diff --git a/pkg/property/group_builder.go b/pkg/property/group_builder.go index 8212cbff3..6ec41ced7 100644 --- a/pkg/property/group_builder.go +++ b/pkg/property/group_builder.go @@ -22,6 +22,9 @@ func (b *GroupBuilder) Build() (*Group, error) { if b.p.itemBase.ID.IsNil() { return nil, ErrInvalidID } + if b.p.itemBase.SchemaGroup == "" { + return nil, ErrInvalidID + } return b.p, nil } @@ -44,7 +47,8 @@ func (b *GroupBuilder) ID(id ItemID) *GroupBuilder { } func (b *GroupBuilder) NewID() *GroupBuilder { - b.p.itemBase.ID = NewItemID() + nid := NewItemID + b.p.itemBase.ID = nid() return b } diff --git a/pkg/property/group_builder_test.go b/pkg/property/group_builder_test.go index 4ea298e67..e5a16a0a9 100644 --- a/pkg/property/group_builder_test.go +++ b/pkg/property/group_builder_test.go @@ -126,7 +126,7 @@ func TestGroupBuilder_MustBuild(t *testing.T) { } func TestGroupBuilder_NewID(t *testing.T) { - g := NewGroup().NewID().MustBuild() + g := NewGroup().NewID().SchemaGroup("x").MustBuild() assert.False(t, g.ID().IsNil()) } diff --git a/pkg/property/group_list.go b/pkg/property/group_list.go index be258ee7f..a07083b74 100644 --- a/pkg/property/group_list.go +++ b/pkg/property/group_list.go @@ -99,13 +99,16 @@ func (g *GroupList) IsEmpty() bool { return g != nil && (g.groups == nil || len(g.groups) == 0) } -func (g *GroupList) Prune() { +func (g *GroupList) Prune() (res bool) { if g == nil { return } for _, f := range g.groups { - f.Prune() + if f.Prune() { + res = true + } } + return } func (g *GroupList) MigrateSchema(ctx context.Context, newSchema *Schema, dl dataset.Loader) { @@ -133,14 +136,133 @@ func (g *GroupList) Group(gid ItemID) *Group { if g == nil { return nil } - for _, f := range g.groups { - if f.ID() == gid { - return f + for _, g := range g.groups { + if g.ID() == gid { + return g } } return nil } +func (g *GroupList) GroupByPointer(ptr *Pointer) *Group { + if g == nil { + return nil + } + gid, ok := ptr.Item() + if !ok { + return nil + } + return g.Group(gid) +} + +func (p *GroupList) Clone() *GroupList { + if p == nil { + return nil + } + groups := make([]*Group, 0, len(p.groups)) + for _, g := range p.groups { + groups = append(groups, g.Clone()) + } + return &GroupList{ + groups: groups, + itemBase: p.itemBase, + } +} + +func (p *GroupList) CloneItem() Item { + return p.Clone() +} + +func (g *GroupList) Fields(ptr *Pointer) []*Field { + if g == nil || len(g.groups) == 0 || (ptr != nil && !ptr.TestSchemaGroup(g.SchemaGroup())) { + return nil + } + + if pi, ok := ptr.Item(); ok && g.ID() != pi { + return g.Group(pi).Fields(ptr) + } + + if fid, ok := ptr.Field(); ok { + ptr = PointFieldOnly(fid) + } + + var fields []*Field + for _, g := range g.groups { + if f := g.Fields(ptr); len(f) > 0 { + fields = append(fields, f...) + } + } + return fields +} + +func (g *GroupList) RemoveFields(ptr *Pointer) (res bool) { + if g == nil { + return + } + + if i, ok := ptr.Item(); ok && g.ID() != i { + return g.GroupByPointer(ptr).RemoveFields(ptr) + } + + if i, ok := ptr.ItemBySchemaGroup(); ok && g.SchemaGroup() != i { + return g.GroupByPointer(ptr).RemoveFields(ptr) + } + + if fid, ok := ptr.Field(); ok { + for _, g := range g.groups { + if g.RemoveField(fid) { + res = true + } + } + } + + return +} + +func (p *GroupList) GroupAndFields(ptr *Pointer) []GroupAndField { + if p == nil || len(p.groups) == 0 { + return nil + } + res := []GroupAndField{} + for _, g := range p.groups { + if ptr == nil || ptr.TestItem(g.SchemaGroup(), g.ID()) { + for _, r := range g.GroupAndFields(ptr) { + res = append(res, GroupAndField{ + ParentGroup: p, + Group: r.Group, + Field: r.Field, + }) + } + } + } + return res +} + +func (g *GroupList) GuessSchema() *SchemaGroup { + if g == nil { + return nil + } + + fieldm := map[FieldID]struct{}{} + fields := []*SchemaField{} + + for _, g := range g.groups { + if gsg := g.GuessSchema(); gsg != nil { + for _, f := range gsg.Fields() { + if _, ok := fieldm[f.ID()]; ok { + continue + } + fields = append(fields, f) + fieldm[f.ID()] = struct{}{} + } + } + } + + // TODO: error handling + sg, _ := NewSchemaGroup().ID(g.SchemaGroup()).IsList(true).Fields(fields).Build() + return sg +} + // GroupAt returns a group whose index is specified func (g *GroupList) GroupAt(i int) *Group { if g == nil || i < 0 || i > len(g.groups)-1 { @@ -281,7 +403,7 @@ func (g *GroupList) GetOrCreateField(ps *Schema, ptr *Pointer) (*Field, bool) { if g == nil || ptr == nil || ps == nil { return nil, false } - psg := ps.Group(g.SchemaGroup()) + psg := ps.Groups().Group(g.SchemaGroup()) if psg == nil { return nil, false } @@ -303,7 +425,7 @@ func (g *GroupList) CreateAndAddListItem(ps *Schema, index *int) *Group { if g == nil || ps == nil { return nil } - psg := ps.Group(g.SchemaGroup()) + psg := ps.Groups().Group(g.SchemaGroup()) if psg == nil { return nil } diff --git a/pkg/property/group_list_builder.go b/pkg/property/group_list_builder.go index 5573d851e..cd3b8bdd7 100644 --- a/pkg/property/group_list_builder.go +++ b/pkg/property/group_list_builder.go @@ -1,5 +1,9 @@ package property +import "errors" + +var ErrInvalidGroupInGroupList = errors.New("cannot contain an invalid property group in the property group list") + type GroupListBuilder struct { p *GroupList } @@ -22,6 +26,14 @@ func (b *GroupListBuilder) Build() (*GroupList, error) { if b.p.itemBase.ID.IsNil() { return nil, ErrInvalidID } + if b.p.itemBase.SchemaGroup == "" { + return nil, ErrInvalidID + } + for _, g := range b.p.groups { + if g.SchemaGroup() != b.p.SchemaGroup() { + return nil, ErrInvalidGroupInGroupList + } + } return b.p, nil } @@ -53,18 +65,18 @@ func (b *GroupListBuilder) SchemaGroup(g SchemaGroupID) *GroupListBuilder { return b } -func (b *GroupListBuilder) Groups(fields []*Group) *GroupListBuilder { +func (b *GroupListBuilder) Groups(groups []*Group) *GroupListBuilder { newGroups := []*Group{} ids := map[ItemID]struct{}{} - for _, f := range fields { - if f == nil { + for _, g := range groups { + if g == nil { continue } - if _, ok := ids[f.ID()]; ok { + if _, ok := ids[g.ID()]; ok { continue } - ids[f.ID()] = struct{}{} - newGroups = append(newGroups, f) + ids[g.ID()] = struct{}{} + newGroups = append(newGroups, g) } b.p.groups = newGroups return b diff --git a/pkg/property/group_list_builder_test.go b/pkg/property/group_list_builder_test.go index 7438d0f5c..a9046cfa9 100644 --- a/pkg/property/group_list_builder_test.go +++ b/pkg/property/group_list_builder_test.go @@ -8,7 +8,7 @@ import ( func TestGroupListBuilder_Build(t *testing.T) { pid := NewItemID() - groups := []*Group{NewGroup().ID(pid).MustBuild()} + groups := []*Group{NewGroup().ID(pid).SchemaGroup("x").MustBuild()} type args struct { ID ItemID @@ -26,17 +26,26 @@ func TestGroupListBuilder_Build(t *testing.T) { Name: "success", Args: args{ ID: pid, - SchemaGroup: "aa", + SchemaGroup: "x", Groups: groups, }, Expected: &GroupList{ itemBase: itemBase{ ID: pid, - SchemaGroup: "aa", + SchemaGroup: "x", }, groups: groups, }, }, + { + Name: "fail invalid group", + Args: args{ + ID: pid, + SchemaGroup: "aa", + Groups: groups, + }, + Err: ErrInvalidGroupInGroupList, + }, { Name: "fail invalid id", Err: ErrInvalidID, @@ -62,13 +71,13 @@ func TestGroupListBuilder_Build(t *testing.T) { } func TestGroupListBuilder_NewID(t *testing.T) { - b := NewGroupList().NewID().MustBuild() + b := NewGroupList().NewID().SchemaGroup("x").MustBuild() assert.NotNil(t, b.ID()) } func TestGroupListBuilder_MustBuild(t *testing.T) { pid := NewItemID() - groups := []*Group{NewGroup().ID(pid).MustBuild()} + groups := []*Group{NewGroup().ID(pid).SchemaGroup("x").MustBuild()} type args struct { ID ItemID @@ -86,17 +95,26 @@ func TestGroupListBuilder_MustBuild(t *testing.T) { Name: "success", Args: args{ ID: pid, - SchemaGroup: "aa", + SchemaGroup: "x", Groups: groups, }, Expected: &GroupList{ itemBase: itemBase{ ID: pid, - SchemaGroup: "aa", + SchemaGroup: "x", }, groups: groups, }, }, + { + Name: "fail invalid group", + Args: args{ + ID: pid, + SchemaGroup: "aa", + Groups: groups, + }, + Err: ErrInvalidGroupInGroupList, + }, { Name: "fail invalid id", Err: ErrInvalidID, @@ -130,7 +148,7 @@ func TestInitGroupListFrom(t *testing.T) { tests := []struct { Name string SchemaGroup *SchemaGroup - ExpectedSG SchemaGroupID + Expected SchemaGroupID }{ { Name: "nil schema group", @@ -138,7 +156,7 @@ func TestInitGroupListFrom(t *testing.T) { { Name: "success", SchemaGroup: NewSchemaGroup().ID("aa").MustBuild(), - ExpectedSG: "aa", + Expected: "aa", }, } @@ -147,7 +165,11 @@ func TestInitGroupListFrom(t *testing.T) { t.Run(tc.Name, func(t *testing.T) { t.Parallel() res := InitGroupFrom(tc.SchemaGroup) - assert.Equal(t, tc.ExpectedSG, res.SchemaGroup()) + if tc.Expected != "" { + assert.Equal(t, tc.Expected, res.SchemaGroup()) + } else { + assert.Nil(t, res) + } }) } } diff --git a/pkg/property/group_list_test.go b/pkg/property/group_list_test.go index d2f5a7294..08886d060 100644 --- a/pkg/property/group_list_test.go +++ b/pkg/property/group_list_test.go @@ -6,11 +6,16 @@ import ( "github.com/stretchr/testify/assert" ) +var ( + testGroupList1 = NewGroupList().NewID().SchemaGroup(testSchemaGroup2.ID()).Groups([]*Group{testGroup2}).MustBuild() +) + func TestGroupList_IDRef(t *testing.T) { - var b *GroupList - assert.Nil(t, b.IDRef()) - b = NewGroupList().NewID().MustBuild() - assert.NotNil(t, b.IDRef()) + id := NewItemID() + assert.Nil(t, (*GroupList)(nil).IDRef()) + assert.Equal(t, &id, (&GroupList{ + itemBase: itemBase{ID: id}, + }).IDRef()) } func TestGroupList_SchemaRef(t *testing.T) { @@ -50,8 +55,8 @@ func TestGroupList_HasLinkedField(t *testing.T) { Value(OptionalValueFrom(v)). Links(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}). MustBuild() - groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} - groups2 := []*Group{NewGroup().ID(pid).MustBuild()} + groups := []*Group{NewGroup().ID(pid).SchemaGroup("xx").Fields([]*Field{f}).MustBuild()} + groups2 := []*Group{NewGroup().ID(pid).SchemaGroup("xx").MustBuild()} tests := []struct { Name string @@ -93,8 +98,8 @@ func TestGroupList_Datasets(t *testing.T) { Value(OptionalValueFrom(v)). Links(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}). MustBuild() - groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} - groups2 := []*Group{NewGroup().ID(pid).MustBuild()} + groups := []*Group{NewGroup().ID(pid).SchemaGroup("xx").Fields([]*Field{f}).MustBuild()} + groups2 := []*Group{NewGroup().ID(pid).SchemaGroup("xx").MustBuild()} tests := []struct { Name string @@ -135,8 +140,8 @@ func TestGroupList_FieldsByLinkedDataset(t *testing.T) { Value(OptionalValueFrom(v)). Links(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}). MustBuild() - groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} - groups2 := []*Group{NewGroup().ID(pid).MustBuild()} + groups := []*Group{NewGroup().ID(pid).SchemaGroup("xx").Fields([]*Field{f}).MustBuild()} + groups2 := []*Group{NewGroup().ID(pid).SchemaGroup("xx").MustBuild()} tests := []struct { Name string @@ -177,7 +182,7 @@ func TestGroupList_IsEmpty(t *testing.T) { Value(OptionalValueFrom(v)). Links(&Links{links: []*Link{NewLink(dsid, dssid, NewDatasetFieldID())}}). MustBuild() - groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} + groups := []*Group{NewGroup().ID(pid).SchemaGroup("xx").Fields([]*Field{f}).MustBuild()} tests := []struct { Name string @@ -214,8 +219,8 @@ func TestGroupList_Prune(t *testing.T) { f := FieldFrom(sf).Value(OptionalValueFrom(v)).MustBuild() f2 := FieldFrom(sf).MustBuild() pid := NewItemID() - groups := []*Group{NewGroup().ID(pid).Fields([]*Field{f, f2}).MustBuild()} - pruned := []*Group{NewGroup().ID(pid).Fields([]*Field{f}).MustBuild()} + groups := []*Group{NewGroup().ID(pid).SchemaGroup("xx").Fields([]*Field{f, f2}).MustBuild()} + pruned := []*Group{NewGroup().ID(pid).SchemaGroup("xx").Fields([]*Field{f}).MustBuild()} tests := []struct { Name string @@ -244,7 +249,7 @@ func TestGroupList_Prune(t *testing.T) { func TestGroupList_Group(t *testing.T) { pid := NewItemID() - g := NewGroup().ID(pid).MustBuild() + g := NewGroup().ID(pid).SchemaGroup("xx").MustBuild() tests := []struct { Name string @@ -279,10 +284,10 @@ func TestGroupList_Group(t *testing.T) { } func TestGroupList_GroupAt(t *testing.T) { - g1 := NewGroup().ID(NewItemID()).MustBuild() - g2 := NewGroup().ID(NewItemID()).MustBuild() - g3 := NewGroup().ID(NewItemID()).MustBuild() - g4 := NewGroup().ID(NewItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() tests := []struct { Name string @@ -319,10 +324,10 @@ func TestGroupList_GroupAt(t *testing.T) { } func TestGroupList_Has(t *testing.T) { - g1 := NewGroup().ID(NewItemID()).MustBuild() - g2 := NewGroup().ID(NewItemID()).MustBuild() - g3 := NewGroup().ID(NewItemID()).MustBuild() - g4 := NewGroup().ID(NewItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() tests := []struct { Name string @@ -357,10 +362,10 @@ func TestGroupList_Has(t *testing.T) { } func TestGroupList_Count(t *testing.T) { - g1 := NewGroup().ID(NewItemID()).MustBuild() - g2 := NewGroup().ID(NewItemID()).MustBuild() - g3 := NewGroup().ID(NewItemID()).MustBuild() - g4 := NewGroup().ID(NewItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() tests := []struct { Name string @@ -387,10 +392,10 @@ func TestGroupList_Count(t *testing.T) { } func TestGroupList_Add(t *testing.T) { - g1 := NewGroup().ID(NewItemID()).MustBuild() - g2 := NewGroup().ID(NewItemID()).MustBuild() - g3 := NewGroup().ID(NewItemID()).MustBuild() - g4 := NewGroup().ID(NewItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() tests := []struct { Name string @@ -444,10 +449,10 @@ func TestGroupList_Add(t *testing.T) { } func TestGroupList_AddOrMove(t *testing.T) { - g1 := NewGroup().ID(NewItemID()).MustBuild() - g2 := NewGroup().ID(NewItemID()).MustBuild() - g3 := NewGroup().ID(NewItemID()).MustBuild() - g4 := NewGroup().ID(NewItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() tests := []struct { Name string @@ -514,33 +519,29 @@ func TestGroupList_AddOrMove(t *testing.T) { } func TestGroupList_Move(t *testing.T) { - g1 := NewGroup().ID(NewItemID()).MustBuild() - g2 := NewGroup().ID(NewItemID()).MustBuild() - g3 := NewGroup().ID(NewItemID()).MustBuild() - g4 := NewGroup().ID(NewItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() tests := []struct { - Name string - GL *GroupList - Id ItemID - ToIndex int - Expected struct { - Id ItemID - Index int - } + Name string + GL *GroupList + ID ItemID + ToIndex int + ExpectedID ItemID + ExpectedIndex int }{ { Name: "nil group list", }, { - Name: "success", - Id: g1.ID(), - ToIndex: 2, - GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), - Expected: struct { - Id ItemID - Index int - }{Id: g1.ID(), Index: 2}, + Name: "success", + ID: g1.ID(), + ToIndex: 2, + GL: NewGroupList().NewID().SchemaGroup("xx").Groups([]*Group{g1, g2, g3, g4}).MustBuild(), + ExpectedID: g1.ID(), + ExpectedIndex: 2, }, } @@ -548,17 +549,20 @@ func TestGroupList_Move(t *testing.T) { tt := tt t.Run(tt.Name, func(t *testing.T) { t.Parallel() - tt.GL.Move(tt.Id, tt.ToIndex) - assert.Equal(t, tt.Expected.Id, tt.GL.GroupAt(tt.Expected.Index).ID()) + tt.GL.Move(tt.ID, tt.ToIndex) + g := tt.GL.GroupAt(tt.ExpectedIndex) + if !tt.ExpectedID.IsNil() { + assert.Equal(t, tt.ExpectedID, g.ID()) + } }) } } func TestGroupList_MoveAt(t *testing.T) { - g1 := NewGroup().ID(NewItemID()).MustBuild() - g2 := NewGroup().ID(NewItemID()).MustBuild() - g3 := NewGroup().ID(NewItemID()).MustBuild() - g4 := NewGroup().ID(NewItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() tests := []struct { Name string @@ -603,10 +607,10 @@ func TestGroupList_MoveAt(t *testing.T) { } func TestGroupList_RemoveAt(t *testing.T) { - g1 := NewGroup().ID(NewItemID()).MustBuild() - g2 := NewGroup().ID(NewItemID()).MustBuild() - g3 := NewGroup().ID(NewItemID()).MustBuild() - g4 := NewGroup().ID(NewItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() tests := []struct { Name string @@ -647,10 +651,10 @@ func TestGroupList_RemoveAt(t *testing.T) { } } func TestGroupList_Remove(t *testing.T) { - g1 := NewGroup().ID(NewItemID()).MustBuild() - g2 := NewGroup().ID(NewItemID()).MustBuild() - g3 := NewGroup().ID(NewItemID()).MustBuild() - g4 := NewGroup().ID(NewItemID()).MustBuild() + g1 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g2 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g3 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() + g4 := NewGroup().ID(NewItemID()).SchemaGroup("xx").MustBuild() tests := []struct { Name string @@ -689,10 +693,11 @@ func TestGroupList_GetOrCreateField(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aa").Fields([]*SchemaField{sf}).MustBuild() g := NewGroup().ID(NewItemID()).SchemaGroup(sg.ID()).MustBuild() + s := NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild() tests := []struct { Name string - GL *GroupList + Target *GroupList Schema *Schema Ptr *Pointer Expected *Field @@ -700,28 +705,28 @@ func TestGroupList_GetOrCreateField(t *testing.T) { }{ { Name: "success", - GL: NewGroupList().NewID().SchemaGroup("aa").Groups([]*Group{g}).MustBuild(), - Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Target: NewGroupList().NewID().SchemaGroup("aa").Groups([]*Group{g}).MustBuild(), + Schema: s, Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), Expected: FieldFrom(sf).MustBuild(), ExpectedOK: true, }, { Name: "can't get a group", - GL: NewGroupList().NewID().SchemaGroup("aa").MustBuild(), - Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Target: NewGroupList().NewID().SchemaGroup("aa").MustBuild(), + Schema: s, Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), }, { - Name: "FieldByItem not ok: sg!=nil", - GL: NewGroupList().NewID().SchemaGroup("aa").Groups([]*Group{g}).MustBuild(), - Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Name: "FieldByItem not ok: sg != nil", + Target: NewGroupList().NewID().SchemaGroup("aa").Groups([]*Group{g}).MustBuild(), + Schema: s, Ptr: NewPointer(sg.IDRef(), g.IDRef(), sf.ID().Ref()), }, { Name: "psg == nil", - GL: NewGroupList().NewID().Groups([]*Group{g}).MustBuild(), - Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Target: nil, + Schema: s, Ptr: NewPointer(nil, g.IDRef(), sf.ID().Ref()), }, } @@ -730,7 +735,7 @@ func TestGroupList_GetOrCreateField(t *testing.T) { tt := tt t.Run(tt.Name, func(t *testing.T) { t.Parallel() - res, ok := tt.GL.GetOrCreateField(tt.Schema, tt.Ptr) + res, ok := tt.Target.GetOrCreateField(tt.Schema, tt.Ptr) assert.Equal(t, tt.Expected, res) assert.Equal(t, tt.ExpectedOK, ok) }) @@ -754,7 +759,7 @@ func TestGroupList_CreateAndAddListItem(t *testing.T) { Name: "success", Index: getIntRef(0), GL: NewGroupList().NewID().SchemaGroup("aa").MustBuild(), - Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), Expected: g, }, } @@ -764,8 +769,302 @@ func TestGroupList_CreateAndAddListItem(t *testing.T) { t.Run(tt.Name, func(t *testing.T) { t.Parallel() res := tt.GL.CreateAndAddListItem(tt.Schema, tt.Index) - assert.Equal(t, tt.Expected.Fields(), res.Fields()) + assert.Equal(t, tt.Expected.Fields(nil), res.Fields(nil)) assert.Equal(t, tt.Expected.SchemaGroup(), res.SchemaGroup()) }) } } + +func TestGroupList_Clone(t *testing.T) { + tests := []struct { + name string + target *GroupList + n bool + }{ + { + name: "ok", + target: testGroupList1.Clone(), + }, + { + name: "nil", + n: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := tt.target.Clone() + if tt.n { + assert.Nil(t, res) + } else { + assert.Equal(t, tt.target, res) + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestGroupList_CloneItem(t *testing.T) { + tests := []struct { + name string + target *GroupList + n bool + }{ + { + name: "ok", + target: testGroupList1.Clone(), + }, + { + name: "nil", + n: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := tt.target.CloneItem() + if tt.n { + assert.Nil(t, res) + } else { + assert.Equal(t, tt.target, res) + assert.NotSame(t, tt.target, res) + } + }) + } +} + +func TestGroupList_Fields(t *testing.T) { + type args struct { + p *Pointer + } + tests := []struct { + name string + target *GroupList + args args + want []*Field + }{ + { + name: "all", + target: testGroupList1, + args: args{p: nil}, + want: []*Field{testField2}, + }, + { + name: "specified", + target: testGroupList1, + args: args{p: PointFieldOnly(testField2.Field())}, + want: []*Field{testField2}, + }, + { + name: "not found", + target: testGroupList1, + args: args{p: PointFieldOnly("xxxxxx")}, + want: nil, + }, + { + name: "empty", + target: &GroupList{}, + args: args{p: PointFieldOnly(testField2.Field())}, + want: nil, + }, + { + name: "nil", + target: nil, + args: args{p: PointFieldOnly(testField2.Field())}, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Fields(tt.args.p)) + }) + } +} + +func TestGroupList_RemoveFields(t *testing.T) { + type args struct { + p *Pointer + } + tests := []struct { + name string + target *GroupList + args args + want bool + wantFields []*Field + }{ + { + name: "nil pointer", + target: testGroupList1.Clone(), + args: args{p: nil}, + want: false, + wantFields: []*Field{testField2}, + }, + { + name: "specified", + target: testGroupList1.Clone(), + args: args{p: PointFieldOnly(testField2.Field())}, + want: true, + wantFields: nil, + }, + { + name: "specified schema group", + target: testGroupList1.Clone(), + args: args{p: PointItemBySchema(testGroupList1.SchemaGroup())}, + want: false, + wantFields: []*Field{testField2}, + }, + { + name: "specified item", + target: testGroupList1.Clone(), + args: args{p: PointItem(testGroupList1.ID())}, + want: false, + wantFields: []*Field{testField2}, + }, + { + name: "not found", + target: testGroupList1.Clone(), + args: args{p: PointFieldOnly("xxxxxx")}, + want: false, + wantFields: []*Field{testField2}, + }, + { + name: "empty", + target: &GroupList{}, + args: args{p: PointFieldOnly(testField1.Field())}, + want: false, + wantFields: nil, + }, + { + name: "nil", + target: nil, + args: args{p: PointFieldOnly(testField1.Field())}, + want: false, + wantFields: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.RemoveFields(tt.args.p)) + if tt.target != nil { + assert.Equal(t, tt.wantFields, tt.target.Fields(nil)) + } + }) + } +} + +func TestGroupList_GroupAndFields(t *testing.T) { + tests := []struct { + name string + target *GroupList + args *Pointer + want []GroupAndField + }{ + { + name: "all", + target: testGroupList1, + args: nil, + want: []GroupAndField{ + {ParentGroup: testGroupList1, Group: testGroup2, Field: testField2}, + }, + }, + { + name: "specified", + target: testGroupList1, + args: PointFieldByItem(testGroup2.ID(), testField2.Field()), + want: []GroupAndField{ + {ParentGroup: testGroupList1, Group: testGroup2, Field: testField2}, + }, + }, + { + name: "specified but not found", + target: testGroupList1, + args: PointFieldByItem(testGroup1.ID(), testField2.Field()), + want: []GroupAndField{}, + }, + { + name: "empty", + target: &GroupList{}, + args: nil, + want: nil, + }, + { + name: "nil", + target: nil, + args: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + res := tt.target.GroupAndFields(tt.args) + assert.Equal(t, tt.want, res) + for i, r := range res { + assert.Same(t, tt.want[i].Field, r.Field) + assert.Same(t, tt.want[i].Group, r.Group) + assert.Same(t, tt.want[i].ParentGroup, r.ParentGroup) + } + }) + } +} + +func TestGroupList_GuessSchema(t *testing.T) { + tests := []struct { + name string + target *GroupList + want *SchemaGroup + }{ + { + name: "ok", + target: &GroupList{ + itemBase: itemBase{ + SchemaGroup: "aa", + }, + groups: []*Group{ + { + itemBase: itemBase{ + SchemaGroup: "aa", + }, + fields: []*Field{ + {field: "a", v: NewOptionalValue(ValueTypeLatLng, nil)}, + }, + }, + { + itemBase: itemBase{ + SchemaGroup: "aa", + }, + fields: []*Field{ + {field: "b", v: NewOptionalValue(ValueTypeString, nil)}, + }, + }, + }, + }, + want: &SchemaGroup{ + id: "aa", + list: true, + fields: []*SchemaField{ + {id: "a", propertyType: ValueTypeLatLng}, + {id: "b", propertyType: ValueTypeString}, + }, + }, + }, + { + name: "empty", + target: &GroupList{}, + want: nil, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.GuessSchema()) + }) + } +} diff --git a/pkg/property/group_test.go b/pkg/property/group_test.go index b06b69d8b..51cd993c4 100644 --- a/pkg/property/group_test.go +++ b/pkg/property/group_test.go @@ -7,18 +7,25 @@ import ( "github.com/stretchr/testify/assert" ) +var ( + testGroup1 = NewGroup().NewID().SchemaGroup(testSchemaGroup1.ID()).Fields([]*Field{testField1}).MustBuild() + testGroup2 = NewGroup().NewID().SchemaGroup(testSchemaGroup2.ID()).Fields([]*Field{testField2}).MustBuild() +) + func TestGroup_IDRef(t *testing.T) { - gid := NewItemID() - var g *Group - assert.Nil(t, g.IDRef()) - g = NewGroup().ID(gid).MustBuild() - assert.Equal(t, gid.Ref(), g.IDRef()) + id := NewItemID() + assert.Nil(t, (*Group)(nil).IDRef()) + assert.Equal(t, &id, (&Group{ + itemBase: itemBase{ + ID: id, + }, + }).IDRef()) } func TestGroup_SchemaGroup(t *testing.T) { var g *Group assert.Nil(t, g.SchemaGroupRef()) - assert.Equal(t, SchemaGroupID(""), g.SchemaGroup()) + pfid := SchemaGroupID("aa") g = NewGroup().NewID().SchemaGroup(pfid).MustBuild() assert.Equal(t, pfid, g.SchemaGroup()) @@ -45,12 +52,12 @@ func TestGroup_HasLinkedField(t *testing.T) { }, { Name: "true", - Group: NewGroup().NewID().Fields([]*Field{f}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f}).MustBuild(), Expected: true, }, { Name: "false", - Group: NewGroup().NewID().Fields([]*Field{f2}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f2}).MustBuild(), Expected: false, }, } @@ -86,14 +93,14 @@ func TestGroup_IsDatasetLinked(t *testing.T) { }, { Name: "true", - Group: NewGroup().NewID().Fields([]*Field{f}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f}).MustBuild(), Dataset: dsid, DatasetSchema: dssid, Expected: true, }, { Name: "false", - Group: NewGroup().NewID().Fields([]*Field{f2}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f2}).MustBuild(), Expected: false, }, } @@ -128,7 +135,7 @@ func TestGroup_Datasets(t *testing.T) { }, { Name: "normal case", - Group: NewGroup().NewID().Fields([]*Field{f}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f}).MustBuild(), Expected: []DatasetID{dsid}, }, } @@ -166,7 +173,7 @@ func TestGroup_FieldsByLinkedDataset(t *testing.T) { Name: "normal case", DataSet: dsid, DatasetSchema: dssid, - Group: NewGroup().NewID().Fields([]*Field{f}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f}).MustBuild(), Expected: []*Field{f}, }, } @@ -195,12 +202,12 @@ func TestGroup_IsEmpty(t *testing.T) { { Name: "true case", - Group: NewGroup().NewID().Fields([]*Field{f2}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f2}).MustBuild(), Expected: true, }, { Name: "false case", - Group: NewGroup().NewID().Fields([]*Field{f}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f}).MustBuild(), Expected: false, }, } @@ -232,7 +239,7 @@ func TestGroup_Prune(t *testing.T) { }, { Name: "normal case", - Group: NewGroup().NewID().Fields([]*Field{f, f2}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f, f2}).MustBuild(), Expected: []*Field{f}, }, } @@ -242,7 +249,7 @@ func TestGroup_Prune(t *testing.T) { t.Run(tt.Name, func(t *testing.T) { t.Parallel() tt.Group.Prune() - assert.Equal(t, tt.Expected, tt.Group.Fields()) + assert.Equal(t, tt.Expected, tt.Group.Fields(nil)) }) } } @@ -272,12 +279,12 @@ func TestGroup_GetOrCreateField(t *testing.T) { { Name: "group schema doesn't equal to ps", Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), - PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), }, { Name: "create field", Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), - PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), FID: "aa", Expected: struct { Field *Field @@ -290,7 +297,7 @@ func TestGroup_GetOrCreateField(t *testing.T) { { Name: "get field", Group: NewGroup().NewID().SchemaGroup("aa").Fields([]*Field{f}).MustBuild(), - PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + PS: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), FID: "aa", Expected: struct { Field *Field @@ -332,7 +339,7 @@ func TestGroup_RemoveField(t *testing.T) { { Name: "normal case", Input: "b", - Group: NewGroup().NewID().Fields([]*Field{f, f2}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f, f2}).MustBuild(), Expected: []*Field{f}, }, } @@ -342,7 +349,7 @@ func TestGroup_RemoveField(t *testing.T) { t.Run(tt.Name, func(t *testing.T) { t.Parallel() tt.Group.RemoveField(tt.Input) - assert.Equal(t, tt.Expected, tt.Group.Fields()) + assert.Equal(t, tt.Expected, tt.Group.Fields(nil)) }) } } @@ -364,7 +371,7 @@ func TestGroup_FieldIDs(t *testing.T) { }, { Name: "normal case", - Group: NewGroup().NewID().Fields([]*Field{f, f2}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f, f2}).MustBuild(), Expected: []FieldID{"a", "b"}, }, } @@ -397,13 +404,13 @@ func TestGroup_Field(t *testing.T) { }, { Name: "normal case", - Group: NewGroup().NewID().Fields([]*Field{f, f2}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f, f2}).MustBuild(), Input: "a", Expected: f, }, { Name: "normal case", - Group: NewGroup().NewID().Fields([]*Field{f, f2}).MustBuild(), + Group: NewGroup().NewID().SchemaGroup("x").Fields([]*Field{f, f2}).MustBuild(), Input: "x", Expected: nil, }, @@ -453,7 +460,7 @@ func TestGroup_RepresentativeFieldValue(t *testing.T) { Name: "invalid property field", Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), Args: args{ - Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/bb")).Groups([]*SchemaGroup{sg2}).MustBuild(), + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/bb")).Groups(NewSchemaGroupList([]*SchemaGroup{sg2})).MustBuild(), Value: ValueTypeString.ValueFrom("abc"), }, }, @@ -461,7 +468,7 @@ func TestGroup_RepresentativeFieldValue(t *testing.T) { Name: "ok", Group: NewGroup().NewID().SchemaGroup("aa").MustBuild(), Args: args{ - Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups([]*SchemaGroup{sg}).MustBuild(), + Schema: NewSchema().ID(MustSchemaID("xx~1.0.0/aa")).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), Value: ValueTypeString.ValueFrom("abc"), }, Expected: &Field{field: "aa", v: &OptionalValue{ov: *value.NewOptional(value.TypeString, nil)}}, diff --git a/pkg/property/id_test.go b/pkg/property/id_test.go new file mode 100644 index 000000000..137bb5954 --- /dev/null +++ b/pkg/property/id_test.go @@ -0,0 +1,9 @@ +package property + +func mockNewItemID(id ItemID) func() { + original := NewItemID + NewItemID = func() ItemID { return id } + return func() { + NewItemID = original + } +} diff --git a/pkg/property/item.go b/pkg/property/item.go index ddfee9145..377d52ceb 100644 --- a/pkg/property/item.go +++ b/pkg/property/item.go @@ -16,10 +16,15 @@ type Item interface { FieldsByLinkedDataset(DatasetSchemaID, DatasetID) []*Field IsDatasetLinked(DatasetSchemaID, DatasetID) bool IsEmpty() bool - Prune() + Prune() bool MigrateSchema(context.Context, *Schema, dataset.Loader) MigrateDataset(DatasetMigrationParam) ValidateSchema(*SchemaGroup) error + Fields(*Pointer) []*Field + RemoveFields(*Pointer) bool + CloneItem() Item + GroupAndFields(*Pointer) []GroupAndField + GuessSchema() *SchemaGroup } type itemBase struct { @@ -46,3 +51,16 @@ func InitItemFrom(psg *SchemaGroup) Item { } return InitGroupFrom(psg) } + +type GroupAndField struct { + ParentGroup *GroupList + Group *Group + Field *Field +} + +func (f GroupAndField) SchemaFieldPointer() SchemaFieldPointer { + return SchemaFieldPointer{ + SchemaGroup: f.Group.SchemaGroup(), + Field: f.Field.Field(), + } +} diff --git a/pkg/property/item_test.go b/pkg/property/item_test.go index f375efbd7..fd4d8b52a 100644 --- a/pkg/property/item_test.go +++ b/pkg/property/item_test.go @@ -78,3 +78,42 @@ func TestToGroupList(t *testing.T) { assert.Equal(t, propertySchemaGroup1ID, g.SchemaGroup()) assert.Equal(t, iid, g.ID()) } + +func TestGroupAndField_SchemaFieldPointer(t *testing.T) { + tests := []struct { + name string + target GroupAndField + want SchemaFieldPointer + }{ + { + name: "group", + target: GroupAndField{ + ParentGroup: nil, + Group: testGroup1, + Field: testField1, + }, + want: SchemaFieldPointer{ + SchemaGroup: testGroup1.SchemaGroup(), + Field: testField1.Field(), + }, + }, + { + name: "group list", + target: GroupAndField{ + ParentGroup: testGroupList1, + Group: testGroup2, + Field: testField2, + }, + want: SchemaFieldPointer{ + SchemaGroup: testGroup2.SchemaGroup(), + Field: testField2.Field(), + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.SchemaFieldPointer()) + }) + } +} diff --git a/pkg/property/pointer.go b/pkg/property/pointer.go index 57080070c..b30b2420a 100644 --- a/pkg/property/pointer.go +++ b/pkg/property/pointer.go @@ -2,9 +2,9 @@ package property // Pointer is a pointer to a field and an item in properties and schemas type Pointer struct { - schemaItem *SchemaGroupID - item *ItemID - field *FieldID + schemaGroup *SchemaGroupID + item *ItemID + field *FieldID } // NewPointer creates a new Pointer. @@ -13,18 +13,23 @@ func NewPointer(sg *SchemaGroupID, i *ItemID, f *FieldID) *Pointer { return nil } return &Pointer{ - schemaItem: sg.CopyRef(), - item: i.CopyRef(), - field: f.CopyRef(), + schemaGroup: sg.CopyRef(), + item: i.CopyRef(), + field: f.CopyRef(), } } +// PointToEverything creates a new Pointer pointing to all items and fields. +func PointToEverything() *Pointer { + return &Pointer{} +} + // PointField creates a new Pointer pointing the field in properties. func PointField(sg *SchemaGroupID, i *ItemID, f FieldID) *Pointer { return &Pointer{ - schemaItem: sg.CopyRef(), - item: i.CopyRef(), - field: &f, + schemaGroup: sg.CopyRef(), + item: i.CopyRef(), + field: &f, } } @@ -38,7 +43,7 @@ func PointFieldOnly(fid FieldID) *Pointer { // PointItemBySchema creates a new Pointer pointing the schema item in property schemas. func PointItemBySchema(sg SchemaGroupID) *Pointer { return &Pointer{ - schemaItem: &sg, + schemaGroup: &sg, } } @@ -52,8 +57,8 @@ func PointItem(i ItemID) *Pointer { // PointFieldBySchemaGroup creates a new Pointer pointing to the field of the schema field in properties. func PointFieldBySchemaGroup(sg SchemaGroupID, f FieldID) *Pointer { return &Pointer{ - schemaItem: &sg, - field: &f, + schemaGroup: &sg, + field: &f, } } @@ -70,29 +75,29 @@ func (p *Pointer) Clone() *Pointer { return nil } return &Pointer{ - field: p.field.CopyRef(), - item: p.item.CopyRef(), - schemaItem: p.schemaItem.CopyRef(), + field: p.field.CopyRef(), + item: p.item.CopyRef(), + schemaGroup: p.schemaGroup.CopyRef(), } } func (p *Pointer) ItemBySchemaGroupAndItem() (i SchemaGroupID, i2 ItemID, ok bool) { - if p == nil || p.schemaItem == nil || p.item == nil { + if p == nil || p.schemaGroup == nil || p.item == nil { ok = false return } - i = *p.schemaItem + i = *p.schemaGroup i2 = *p.item ok = true return } func (p *Pointer) ItemBySchemaGroup() (i SchemaGroupID, ok bool) { - if p == nil || p.schemaItem == nil { + if p == nil || p.schemaGroup == nil { ok = false return } - i = *p.schemaItem + i = *p.schemaGroup ok = true return } @@ -102,8 +107,8 @@ func (p *Pointer) SchemaGroupAndItem() (i SchemaGroupID, i2 ItemID, ok bool) { if p == nil { return } - if p.schemaItem != nil { - i = *p.schemaItem + if p.schemaGroup != nil { + i = *p.schemaGroup ok = true } if p.item != nil { @@ -131,7 +136,7 @@ func (p *Pointer) ItemRef() *ItemID { } func (p *Pointer) FieldByItem() (i ItemID, f FieldID, ok bool) { - if p == nil || p.item == nil || p.schemaItem != nil || p.field == nil { + if p == nil || p.item == nil || p.schemaGroup != nil || p.field == nil { ok = false return } @@ -142,11 +147,11 @@ func (p *Pointer) FieldByItem() (i ItemID, f FieldID, ok bool) { } func (p *Pointer) FieldBySchemaGroup() (sg SchemaGroupID, f FieldID, ok bool) { - if p == nil || p.schemaItem == nil || p.item != nil || p.field == nil { + if p == nil || p.schemaGroup == nil || p.item != nil || p.field == nil { ok = false return } - sg = *p.schemaItem + sg = *p.schemaGroup f = *p.field ok = true return @@ -163,17 +168,81 @@ func (p *Pointer) Field() (f FieldID, ok bool) { } func (p *Pointer) FieldRef() *FieldID { - if p == nil { + f, ok := p.Field() + if !ok { return nil } - return p.field.CopyRef() + return f.Ref() +} + +func (p *Pointer) FieldOnly() (f FieldID, ok bool) { + if p == nil || p.field == nil || p.item != nil || p.schemaGroup != nil { + ok = false + return + } + f = *p.field + ok = true + return +} + +func (p *Pointer) FieldOnlyRef() *FieldID { + f, ok := p.FieldOnly() + if !ok { + return nil + } + return f.Ref() +} + +func (p *Pointer) FieldIfItemIs(sg SchemaGroupID, i ItemID) (f FieldID, ok bool) { + if p == nil || p.field == nil || !p.TestItem(sg, i) { + ok = false + return + } + f = *p.field + ok = true + return +} + +func (p *Pointer) FieldIfItemIsRef(sg SchemaGroupID, i ItemID) *FieldID { + f, ok := p.FieldIfItemIs(sg, i) + if !ok { + return nil + } + return f.Ref() +} + +func (p *Pointer) Test(sg SchemaGroupID, i ItemID, f FieldID) bool { + return p.TestItem(sg, i) && p.TestField(f) +} + +func (p *Pointer) TestItem(sg SchemaGroupID, i ItemID) bool { + return p.TestSchemaGroup(sg) && (p.item == nil || *p.item == i) +} + +func (p *Pointer) TestSchemaGroup(sg SchemaGroupID) bool { + return p != nil && (p.schemaGroup == nil || *p.schemaGroup == sg) +} + +func (p *Pointer) TestField(f FieldID) bool { + return p != nil && (p.field == nil || *p.field == f) +} + +func (p *Pointer) AllFields() *Pointer { + if p == nil || p.schemaGroup == nil && p.item == nil { + return nil + } + return &Pointer{ + schemaGroup: p.schemaGroup.CopyRef(), + item: p.item.CopyRef(), + field: nil, + } } func (p *Pointer) GetAll() (sg *SchemaGroupID, i *ItemID, f *FieldID) { if p == nil { return } - sg = p.schemaItem.CopyRef() + sg = p.schemaGroup.CopyRef() i = p.item.CopyRef() f = p.field.CopyRef() return diff --git a/pkg/property/property.go b/pkg/property/property.go index aad3b92bf..dee03d965 100644 --- a/pkg/property/property.go +++ b/pkg/property/property.go @@ -39,15 +39,9 @@ func (p *Property) Field(ptr *Pointer) (*Field, *GroupList, *Group) { return nil, nil, nil } - if iid, fid, ok := ptr.FieldByItem(); ok { - if i, gl := p.Item(iid); i != nil { - g := ToGroup(i) - return g.Field(fid), gl, g - } - } else if sgid, fid, ok := ptr.FieldBySchemaGroup(); ok { - if i := p.ItemBySchema(sgid); i != nil { - g := ToGroup(i) - return g.Field(fid), nil, g + if g, gl := p.GroupAndList(ptr); g != nil { + if fields := g.Fields(ptr); len(fields) > 0 { + return fields[0], gl, g } } @@ -61,20 +55,35 @@ func (p *Property) Items() []Item { return append([]Item{}, p.items...) } -func (p *Property) Item(id ItemID) (Item, *GroupList) { - if p == nil { - return nil, nil +func (p *Property) Item(ptr *Pointer) Item { + if p == nil || ptr == nil || ptr.FieldOnlyRef() != nil { + return nil } - for _, f := range p.items { - if f.ID() == id { - return f, nil + + for _, i := range p.items { + if ptr.TestItem(i.SchemaGroup(), i.ID()) { + return i } - if gl := ToGroupList(f); gl != nil { - if i := gl.Group(id); i != nil { - return i, gl + } + + return nil +} + +func (p *Property) GroupAndList(ptr *Pointer) (*Group, *GroupList) { + if p == nil || ptr == nil { + return nil, nil + } + + for _, i := range p.items { + if ptr.TestItem(i.SchemaGroup(), i.ID()) { + if gl := ToGroupList(i); gl != nil { + return gl.GroupByPointer(ptr), gl + } else if g := ToGroup(i); g != nil { + return g, nil } } } + return nil, nil } @@ -113,18 +122,6 @@ func (p *Property) GroupListBySchema(id SchemaGroupID) *GroupList { return nil } -func (p *Property) ItemByPointer(ptr *Pointer) (Item, *GroupList) { - if p == nil || ptr == nil { - return nil, nil - } - if pid, ok := ptr.Item(); ok { - return p.Item(pid) - } else if sgid, ok := ptr.ItemBySchemaGroup(); ok { - return p.ItemBySchema(sgid), nil - } - return nil, nil -} - func (p *Property) ListItem(ptr *Pointer) (*Group, *GroupList) { if p == nil { return nil, nil @@ -160,6 +157,47 @@ func (p *Property) HasLinkedField() bool { return false } +func (p *Property) Clone() *Property { + if p == nil { + return nil + } + + items := make([]Item, 0, len(p.items)) + for _, i := range p.items { + items = append(items, i.CloneItem()) + } + + return &Property{ + id: p.id, + schema: p.schema, + scene: p.scene, + items: items, + } +} + +func (p *Property) Fields(ptr *Pointer) []*Field { + if p == nil || len(p.items) == 0 { + return nil + } + res := []*Field{} + for _, g := range p.items { + res = append(res, g.Fields(ptr)...) + } + return res +} + +func (p *Property) RemoveFields(ptr *Pointer) (res bool) { + if p == nil { + return + } + for _, g := range p.items { + if g.RemoveFields(ptr) { + res = true + } + } + return +} + func (p *Property) FieldsByLinkedDataset(s DatasetSchemaID, i DatasetID) []*Field { if p == nil { return nil @@ -197,15 +235,13 @@ func (p *Property) Datasets() []DatasetID { } func (p *Property) RemoveItem(ptr *Pointer) { - if p == nil { - return - } - sgid, iid, ok := ptr.SchemaGroupAndItem() - if !ok { + if p == nil || ptr == nil { return } - for i, item := range p.items { - if item.ID() == iid || item.SchemaGroup() == sgid { + + for i := 0; i < len(p.items); i++ { + item := p.items[i] + if ptr.TestItem(item.SchemaGroup(), item.ID()) { p.items = append(p.items[:i], p.items[i+1:]...) return } @@ -222,21 +258,25 @@ func (p *Property) RemoveField(ptr *Pointer) { return } - item, _ := p.ItemByPointer(ptr) - if group := ToGroup(item); group != nil { + if group := ToGroup(p.Item(ptr)); group != nil { group.RemoveField(fid) } } -func (p *Property) Prune() { +func (p *Property) Prune() (res bool) { if p == nil { return } - for _, f := range p.items { - if f.IsEmpty() { - p.RemoveItem(PointItem(f.ID())) + for _, i := range p.items { + if i.Prune() { + res = true + } + if i.IsEmpty() { + p.RemoveItem(PointItem(i.ID())) + res = true } } + return } func (p *Property) UpdateValue(ps *Schema, ptr *Pointer, v *Value) (*Field, *GroupList, *Group, error) { @@ -246,7 +286,7 @@ func (p *Property) UpdateValue(ps *Schema, ptr *Pointer, v *Value) (*Field, *Gro return nil, nil, nil, nil } - if err := field.Update(v, ps.Field(field.Field())); err != nil { + if err := field.Update(v, ps.Groups().Field(field.Field())); err != nil { return nil, nil, nil, err } @@ -292,8 +332,11 @@ func (p *Property) GetOrCreateItem(ps *Schema, ptr *Pointer) (Item, *GroupList) return nil, nil } - if item, pgl := p.ItemByPointer(ptr); item != nil { - return item, pgl + if g, gl := p.GroupAndList(ptr); g != nil || gl != nil { + if g == nil { + return gl, nil + } + return g, gl } psgid, ok := ptr.ItemBySchemaGroup() @@ -301,18 +344,14 @@ func (p *Property) GetOrCreateItem(ps *Schema, ptr *Pointer) (Item, *GroupList) return nil, nil } - psg := ps.Group(psgid) + psg := ps.Groups().Group(psgid) if psg == nil { return nil, nil } ni := InitItemFrom(psg) if ni != nil { - if p.items == nil { - p.items = []Item{ni} - } else { - p.items = append(p.items, ni) - } + p.items = append(p.items, ni) } return ni, nil // root item @@ -325,9 +364,9 @@ func (p *Property) GetOrCreateGroup(ps *Schema, ptr *Pointer) (*Group, *GroupLis var psg *SchemaGroup if psgid, ok := ptr.ItemBySchemaGroup(); ok { - psg = ps.Group(psgid) + psg = ps.Groups().Group(psgid) } else if f, ok := ptr.Field(); ok { - psg = ps.GroupByField(f) + psg = ps.Groups().GroupByField(f) } if psg == nil { return nil, nil @@ -337,6 +376,29 @@ func (p *Property) GetOrCreateGroup(ps *Schema, ptr *Pointer) (*Group, *GroupLis return ToGroup(item), gl } +func (p *Property) GetOrCreateRootGroup(ptr *Pointer) (*Group, bool) { + if p == nil || ptr == nil { + return nil, false + } + + if i := p.Item(ptr); i != nil { + return ToGroup(i), false + } + + sg, ok := ptr.ItemBySchemaGroup() + if !ok { + return nil, false + } + + ng, err := NewGroup().NewID().SchemaGroup(sg).Build() + if err != nil { + return nil, false + } + + p.items = append(p.items, ng) + return ng, true +} + func (p *Property) GetOrCreateGroupList(ps *Schema, ptr *Pointer) *GroupList { if p == nil || ps == nil || ptr == nil || !ps.ID().Equal(p.Schema()) { return nil @@ -344,9 +406,9 @@ func (p *Property) GetOrCreateGroupList(ps *Schema, ptr *Pointer) *GroupList { var psg *SchemaGroup if psgid, ok := ptr.ItemBySchemaGroup(); ok { - psg = ps.Group(psgid) + psg = ps.Groups().Group(psgid) } else if f, ok := ptr.Field(); ok { - psg = ps.GroupByField(f) + psg = ps.Groups().GroupByField(f) } if psg == nil { return nil @@ -397,22 +459,19 @@ func (p *Property) UpdateLinkableValue(s *Schema, v *Value) { return } - var ptr *Pointer - switch v.Type() { - case ValueTypeLatLng: - ptr = s.linkable.LatLng - case ValueTypeURL: - ptr = s.linkable.URL + sfid := s.linkable.FieldByType(v.Type()) + if sfid == nil { + return } - sf := s.FieldByPointer(ptr) + sf := s.Groups().GroupAndField(*sfid) if sf == nil { return } - f, _, _, ok := p.GetOrCreateField(s, ptr) + f, _, _, ok := p.GetOrCreateField(s, sf.Pointer()) if ok { - if err := f.Update(v, sf); err != nil { + if err := f.Update(v, sf.Field); err != nil { p.Prune() } } @@ -423,20 +482,17 @@ func (p *Property) AutoLinkField(s *Schema, v ValueType, d DatasetSchemaID, df * return } - var ptr *Pointer - switch v { - case ValueTypeLatLng: - ptr = s.linkable.LatLng - case ValueTypeURL: - ptr = s.linkable.URL + sfid := s.linkable.FieldByType(v) + if sfid == nil { + return } - sf := s.FieldByPointer(ptr) + sf := s.Groups().GroupAndField(*sfid) if sf == nil { return } - f, _, _, ok := p.GetOrCreateField(s, ptr) + f, _, _, ok := p.GetOrCreateField(s, sf.Pointer()) if ok { if ds == nil { f.Link(NewLinks([]*Link{NewLinkFieldOnly(d, *df)})) @@ -483,10 +539,90 @@ func (p *Property) ValidateSchema(ps *Schema) error { for _, i := range p.items { sg := i.SchemaGroup() - if err := i.ValidateSchema(ps.Group(sg)); err != nil { + if err := i.ValidateSchema(ps.Groups().Group(sg)); err != nil { return fmt.Errorf("%s (%s): %w", p.ID(), sg, err) } } return nil } + +// MoveFields moves fields between items. Only fields in Groups can be moved to another Group, fields in GroupLists will simply be deleted. +func (p *Property) MoveFields(from, to *Pointer) (res bool) { + if p == nil { + return + } + + fields := p.GroupAndFields(from) + if len(fields) == 0 { + return + } + + toGroup, created := p.GetOrCreateRootGroup(to) + if created { + res = true + } + + for _, f := range fields { + if f.Group.RemoveField(f.Field.Field()) { + res = true + } + // For root group only + if f.ParentGroup == nil && toGroup != nil { + // NOTE: currently changing the field ID is not supported + toGroup.AddFields(f.Field) + res = true + } + } + + return +} + +func (p *Property) GroupAndFields(ptr *Pointer) []GroupAndField { + if p == nil || len(p.items) == 0 { + return nil + } + res := []GroupAndField{} + for _, i := range p.items { + if ptr == nil || ptr.TestSchemaGroup(i.SchemaGroup()) { + res = append(res, i.GroupAndFields(ptr)...) + } + } + return res +} + +// Cast changes the type of fields that are matches the pointer +func (p *Property) Cast(ptr *Pointer, t ValueType) (res bool) { + for _, f := range p.Fields(ptr) { + if f.Cast(t) { + res = true + } + } + return +} + +func (p *Property) GuessSchema() *Schema { + if p == nil { + return nil + } + + groups := make([]*SchemaGroup, 0, len(p.items)) + for _, i := range p.items { + if g := i.GuessSchema(); g != nil { + groups = append(groups, g) + } + } + + if s, err := NewSchema().ID(p.Schema()).Groups(NewSchemaGroupList(groups)).Build(); err == nil { + return s + } + return nil +} + +func (p *Property) updateSchema(s SchemaID) bool { + if p == nil || s.IsNil() || p.schema.Equal(s) { + return false + } + p.schema = s.Clone() + return true +} diff --git a/pkg/property/property_test.go b/pkg/property/property_test.go index 9e1a5494b..9d2fbc410 100644 --- a/pkg/property/property_test.go +++ b/pkg/property/property_test.go @@ -6,13 +6,18 @@ import ( "github.com/reearth/reearth-backend/pkg/dataset" "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) -func TestPropertyMigrateSchema(t *testing.T) { +var ( + testProperty1 = New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{testGroup1, testGroupList1}).MustBuild() +) + +func TestProperty_MigrateSchema(t *testing.T) { sceneID := NewSceneID() - oldSchema, _ := SchemaIDFrom("hoge~1.0.0/test") - newSchema, _ := SchemaIDFrom("hoge~1.0.0/test2") + oldSchema := MustSchemaID("hoge~1.0.0/test") + newSchema := MustSchemaID("hoge~1.0.0/test2") schemaField1ID := FieldID("a") schemaField2ID := FieldID("b") schemaField3ID := FieldID("c") @@ -45,9 +50,9 @@ func TestPropertyMigrateSchema(t *testing.T) { schemaField6, schemaField7, } - schemaGroups := []*SchemaGroup{ + schemaGroups := NewSchemaGroupList([]*SchemaGroup{ NewSchemaGroup().ID(schemaGroupID).Fields(schemaFields).MustBuild(), - } + }) fields := []*Field{ // should remain @@ -104,7 +109,7 @@ func TestPropertyMigrateSchema(t *testing.T) { property.MigrateSchema(context.Background(), schema, dataset.LoaderFrom([]*dataset.Dataset{ds})) newGroup := ToGroup(property.ItemBySchema(schemaGroupID)) - newFields := newGroup.Fields() + newFields := newGroup.Fields(nil) assert.Equal(t, schema.ID(), property.Schema()) assert.Equal(t, 1, len(property.Items())) @@ -126,7 +131,7 @@ func TestGetOrCreateItem(t *testing.T) { sg1 := NewSchemaGroup().ID(sg1id).Fields([]*SchemaField{sf1}).MustBuild() sf2 := NewSchemaField().ID(sf2id).Type(ValueTypeString).MustBuild() sg2 := NewSchemaGroup().ID(sg2id).Fields([]*SchemaField{sf2}).IsList(true).MustBuild() - s := NewSchema().ID(sid).Groups([]*SchemaGroup{sg1, sg2}).MustBuild() + s := NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg1, sg2})).MustBuild() p := New().NewID().Scene(sceneID).Schema(sid).MustBuild() @@ -134,13 +139,15 @@ func TestGetOrCreateItem(t *testing.T) { assert.Nil(t, p.ItemBySchema(sg1id)) assert.Equal(t, []Item{}, p.Items()) - i, _ := p.GetOrCreateItem(s, PointItemBySchema(sg1id)) + i, gl := p.GetOrCreateItem(s, PointItemBySchema(sg1id)) + assert.Nil(t, gl) assert.NotNil(t, i) assert.Equal(t, sg1id, i.SchemaGroup()) assert.Equal(t, i, ToGroup(p.ItemBySchema(sg1id))) assert.Equal(t, []Item{i}, p.Items()) - i2, _ := p.GetOrCreateItem(s, PointItemBySchema(sg1id)) + i2, gl := p.GetOrCreateItem(s, PointItemBySchema(sg1id)) + assert.Nil(t, gl) assert.NotNil(t, i2) assert.Equal(t, i, i2) assert.Equal(t, i2, ToGroup(p.ItemBySchema(sg1id))) @@ -149,13 +156,15 @@ func TestGetOrCreateItem(t *testing.T) { // group list assert.Nil(t, p.ItemBySchema(sg2id)) - i3, _ := p.GetOrCreateItem(s, PointItemBySchema(sg2id)) + i3, gl := p.GetOrCreateItem(s, PointItemBySchema(sg2id)) + assert.Nil(t, gl) assert.NotNil(t, i3) assert.Equal(t, sg2id, i3.SchemaGroup()) assert.Equal(t, i3, ToGroupList(p.ItemBySchema(sg2id))) assert.Equal(t, []Item{i, i3}, p.Items()) - i4, _ := p.GetOrCreateItem(s, PointItemBySchema(sg2id)) + i4, gl := p.GetOrCreateItem(s, PointItemBySchema(sg2id)) + assert.Nil(t, gl) assert.NotNil(t, i4) assert.Equal(t, i3, i4) assert.Equal(t, i4, ToGroupList(p.ItemBySchema(sg2id))) @@ -174,7 +183,7 @@ func TestGetOrCreateField(t *testing.T) { sg1 := NewSchemaGroup().ID(sg1id).Fields([]*SchemaField{sf1}).MustBuild() sf2 := NewSchemaField().ID(sf2id).Type(ValueTypeString).MustBuild() sg2 := NewSchemaGroup().ID(sg2id).Fields([]*SchemaField{sf2}).IsList(true).MustBuild() - s := NewSchema().ID(sid).Groups([]*SchemaGroup{sg1, sg2}).MustBuild() + s := NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg1, sg2})).MustBuild() p := New().NewID().Scene(sceneID).Schema(sid).MustBuild() @@ -188,7 +197,7 @@ func TestGetOrCreateField(t *testing.T) { assert.Equal(t, sf1id, f.Field()) i := ToGroup(p.ItemBySchema(sg1id)) assert.Equal(t, sg1id, i.SchemaGroup()) - assert.Equal(t, []*Field{f}, i.Fields()) + assert.Equal(t, []*Field{f}, i.Fields(nil)) field, _, _ := p.Field(PointFieldBySchemaGroup(sg1id, sf1id)) assert.Equal(t, f, field) @@ -220,7 +229,7 @@ func TestAddListItem(t *testing.T) { sgid := SchemaGroupID("b") sf := NewSchemaField().ID(sfid).Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID(sgid).Fields([]*SchemaField{sf}).IsList(true).MustBuild() - ps := NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild() + ps := NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild() p := New().NewID().Scene(sceneID).Schema(sid).MustBuild() item, _ := p.AddListItem(ps, PointItemBySchema(sgid), nil) @@ -270,3 +279,397 @@ func TestRemoveListItem(t *testing.T) { assert.Equal(t, []*Group{}, gl.Groups()) assert.Equal(t, 0, len(p.Items())) } + +func TestPointer_Test(t *testing.T) { + itemID := NewItemID() + + type args struct { + sg SchemaGroupID + i ItemID + f FieldID + want bool + } + tests := []struct { + name string + target *Pointer + args []args + }{ + { + name: "schema group only", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref()}, + args: []args{ + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("a"), want: true}, + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("b"), want: true}, + {sg: SchemaGroupID("yy"), i: itemID, f: FieldID("a"), want: false}, + }, + }, + { + name: "item only", + target: &Pointer{item: itemID.Ref()}, + args: []args{ + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("a"), want: true}, + {sg: SchemaGroupID("yy"), i: itemID, f: FieldID("a"), want: true}, + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("b"), want: true}, + {sg: SchemaGroupID("xx"), i: NewItemID(), f: FieldID("a"), want: false}, + }, + }, + { + name: "schema group and item", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref(), item: itemID.Ref()}, + args: []args{ + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("a"), want: true}, + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("b"), want: true}, + {sg: SchemaGroupID("xx"), i: NewItemID(), f: FieldID("a"), want: false}, + {sg: SchemaGroupID("yy"), i: itemID, f: FieldID("a"), want: false}, + {sg: SchemaGroupID("yy"), i: NewItemID(), f: FieldID("a"), want: false}, + }, + }, + { + name: "all", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref(), item: itemID.Ref(), field: FieldID("a").Ref()}, + args: []args{ + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("a"), want: true}, + {sg: SchemaGroupID("yy"), i: itemID, f: FieldID("a"), want: false}, + {sg: SchemaGroupID("xx"), i: NewItemID(), f: FieldID("a"), want: false}, + {sg: SchemaGroupID("xx"), i: itemID, f: FieldID("b"), want: false}, + }, + }, + { + name: "empty", + target: &Pointer{}, + args: []args{ + {sg: SchemaGroupID("xx"), i: NewItemID(), f: FieldID("a"), want: true}, + {sg: SchemaGroupID("yy"), i: NewItemID(), f: FieldID("b"), want: true}, + {sg: SchemaGroupID("zz"), i: NewItemID(), f: FieldID("c"), want: true}, + }, + }, + { + name: "nil", + target: nil, + args: []args{ + {sg: SchemaGroupID("xx"), i: NewItemID(), f: FieldID("a"), want: false}, + {sg: SchemaGroupID("yy"), i: NewItemID(), f: FieldID("b"), want: false}, + {sg: SchemaGroupID("zz"), i: NewItemID(), f: FieldID("c"), want: false}, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + for i, a := range tt.args { + assert.Equal(t, a.want, tt.target.Test(a.sg, a.i, a.f), "test %d", i) + } + }) + } +} + +func TestPointer_TestItem(t *testing.T) { + iid := NewItemID() + + type args struct { + sg SchemaGroupID + i ItemID + } + tests := []struct { + name string + target *Pointer + args args + want bool + }{ + { + name: "true schema group only", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref()}, + args: args{sg: SchemaGroupID("xx"), i: iid}, + want: true, + }, + { + name: "true item only", + target: &Pointer{item: iid.Ref()}, + args: args{sg: SchemaGroupID("xx"), i: iid}, + want: true, + }, + { + name: "true schema group and item", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref(), item: iid.Ref()}, + args: args{sg: SchemaGroupID("xx"), i: iid}, + want: true, + }, + { + name: "true empty", + target: &Pointer{}, + args: args{sg: SchemaGroupID("xx"), i: iid}, + want: true, + }, + { + name: "false schema group only", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref()}, + args: args{sg: SchemaGroupID("yy"), i: iid}, + want: false, + }, + { + name: "false item only", + target: &Pointer{item: iid.Ref()}, + args: args{sg: SchemaGroupID("xx"), i: NewItemID()}, + want: false, + }, + { + name: "false schema group and item", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref(), item: iid.Ref()}, + args: args{sg: SchemaGroupID("xx"), i: NewItemID()}, + want: false, + }, + { + name: "false nil", + target: nil, + args: args{sg: SchemaGroupID("xx"), i: iid}, + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.TestItem(tt.args.sg, tt.args.i)) + }) + } +} + +func TestPointer_TestSchemaGroup(t *testing.T) { + type args struct { + sg SchemaGroupID + } + tests := []struct { + name string + target *Pointer + args args + want bool + }{ + { + name: "true", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref()}, + args: args{sg: SchemaGroupID("xx")}, + want: true, + }, + { + name: "false", + target: &Pointer{schemaGroup: SchemaGroupID("xx").Ref()}, + args: args{sg: SchemaGroupID("yy")}, + want: false, + }, + { + name: "empty", + target: &Pointer{}, + args: args{sg: SchemaGroupID("xx")}, + want: true, + }, + { + name: "nil", + target: nil, + args: args{sg: SchemaGroupID("xx")}, + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.TestSchemaGroup(tt.args.sg)) + }) + } +} + +func TestPointer_TestField(t *testing.T) { + type args struct { + f FieldID + } + tests := []struct { + name string + target *Pointer + args args + want bool + }{ + { + name: "true", + target: &Pointer{field: FieldID("xx").Ref()}, + args: args{f: FieldID("xx")}, + want: true, + }, + { + name: "false", + target: &Pointer{field: FieldID("xx").Ref()}, + args: args{f: FieldID("yy")}, + want: false, + }, + { + name: "empty", + target: &Pointer{}, + args: args{f: FieldID("xx")}, + want: true, + }, + { + name: "nil", + target: nil, + args: args{f: FieldID("xx")}, + want: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.TestField(tt.args.f)) + }) + } +} + +func TestProperty_MoveFields(t *testing.T) { + itemID1 := NewItemID() + itemID2 := NewItemID() + + type args struct { + from *Pointer + to *Pointer + } + tests := []struct { + name string + target *Property + args args + wantRes bool + wantFieldsFrom []*Field + wantFieldsTo []*Field + }{ + { + name: "same group", + target: testProperty1.Clone(), + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(testGroup1.SchemaGroup().Ref(), nil, FieldID("x").Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{testField1}, // changing field ID is not supported + wantFieldsTo: []*Field{testField1}, + }, + { + name: "group -> group", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroup().NewID().SchemaGroup(testSchemaGroup1.ID()).Fields([]*Field{testField1}).MustBuild(), + NewGroup().NewID().SchemaGroup("x").Fields([]*Field{testField2}).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(SchemaGroupID("x").Ref(), nil, testField1.Field().Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, + wantFieldsTo: []*Field{testField2, testField1}, + }, + { + name: "group -> group (new)", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroup().NewID().SchemaGroup(testSchemaGroup1.ID()).Fields([]*Field{testField1}).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(SchemaGroupID("x").Ref(), nil, testField1.Field().Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, + wantFieldsTo: []*Field{testField1}, + }, + { + name: "group -> group (rename)", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroup().NewID().SchemaGroup(testSchemaGroup1.ID()).Fields([]*Field{testField1}).MustBuild(), + NewGroup().NewID().SchemaGroup("x").Fields([]*Field{}).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(SchemaGroupID("x").Ref(), nil, FieldID("y").Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, + wantFieldsTo: []*Field{testField1}, // changing field ID is not supported + }, + { + name: "group -> group (field nil)", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroup().NewID().SchemaGroup(testSchemaGroup1.ID()).Fields([]*Field{testField1}).MustBuild(), + NewGroup().NewID().SchemaGroup("x").Fields([]*Field{}).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(SchemaGroupID("x").Ref(), nil, nil), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, + wantFieldsTo: []*Field{testField1}, + }, + { + name: "group -> list", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroup().NewID().SchemaGroup(testSchemaGroup1.ID()).Fields([]*Field{testField1}).MustBuild(), + NewGroupList().NewID().SchemaGroup(testSchemaGroup2.ID()).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(testSchemaGroup2.ID().Ref(), nil, testField1.Field().Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, // deleted + wantFieldsTo: []*Field{}, // not moved + }, + { + name: "list -> group", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroup().NewID().SchemaGroup(SchemaGroupID("x")).Fields([]*Field{testField1}).MustBuild(), + NewGroupList().NewID().SchemaGroup(SchemaGroupID("y")).Groups([]*Group{ + NewGroup().ID(itemID1).SchemaGroup(SchemaGroupID("y")).Fields([]*Field{testField2}).MustBuild(), + }).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(SchemaGroupID("y").Ref(), itemID1.Ref(), testField2.Field().Ref()), + to: NewPointer(SchemaGroupID("x").Ref(), nil, testField2.Field().Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, // deleted + wantFieldsTo: []*Field{testField1}, // not moved + }, + { + name: "list -> list", + target: New().NewID().Schema(testSchema1.ID()).Scene(id.NewSceneID()).Items([]Item{ + NewGroupList().NewID().SchemaGroup(SchemaGroupID("x")).Groups([]*Group{ + NewGroup().ID(itemID1).SchemaGroup(SchemaGroupID("x")).Fields([]*Field{testField1}).MustBuild(), + }).MustBuild(), + NewGroupList().NewID().SchemaGroup(SchemaGroupID("y")).Groups([]*Group{ + NewGroup().ID(itemID2).SchemaGroup(SchemaGroupID("y")).Fields([]*Field{testField2}).MustBuild(), + }).MustBuild(), + }).MustBuild(), + args: args{ + from: NewPointer(SchemaGroupID("x").Ref(), itemID1.Ref(), testField1.Field().Ref()), + to: NewPointer(SchemaGroupID("y").Ref(), itemID2.Ref(), testField2.Field().Ref()), + }, + wantRes: true, + wantFieldsFrom: []*Field{}, // deleted + wantFieldsTo: []*Field{testField2}, // not moved + }, + { + name: "nil", + target: nil, + args: args{ + from: NewPointer(testGroup1.SchemaGroup().Ref(), nil, testField1.Field().Ref()), + to: NewPointer(testGroup1.SchemaGroup().Ref(), nil, FieldID("x").Ref()), + }, + wantRes: false, + wantFieldsFrom: nil, + wantFieldsTo: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.wantRes, tt.target.MoveFields(tt.args.from, tt.args.to)) + assert.Equal(t, tt.wantFieldsFrom, tt.target.Fields(tt.args.from.AllFields())) + assert.Equal(t, tt.wantFieldsTo, tt.target.Fields(tt.args.to.AllFields())) + }) + } +} diff --git a/pkg/property/schema.go b/pkg/property/schema.go index f6cd4f2b4..e7bb2925a 100644 --- a/pkg/property/schema.go +++ b/pkg/property/schema.go @@ -3,13 +3,13 @@ package property type Schema struct { id SchemaID version int - groups []*SchemaGroup + groups *SchemaGroupList linkable LinkableFields } type LinkableFields struct { - LatLng *Pointer - URL *Pointer + LatLng *SchemaFieldPointer + URL *SchemaFieldPointer } func (p *Schema) ID() SchemaID { @@ -27,102 +27,11 @@ func (p *Schema) Version() int { return p.version } -func (p *Schema) Fields() []*SchemaField { +func (p *Schema) Groups() *SchemaGroupList { if p == nil { return nil } - fields := []*SchemaField{} - for _, g := range p.groups { - fields = append(fields, g.Fields()...) - } - return fields -} - -func (p *Schema) Field(id FieldID) *SchemaField { - if p == nil { - return nil - } - for _, g := range p.groups { - if f := g.Field(id); f != nil { - return f - } - } - return nil -} - -func (p *Schema) FieldByPointer(ptr *Pointer) *SchemaField { - if p == nil { - return nil - } - g := p.GroupByPointer(ptr) - if g == nil { - return nil - } - return g.FieldByPointer(ptr) -} - -func (p *Schema) Groups() []*SchemaGroup { - if p == nil { - return nil - } - return append([]*SchemaGroup{}, p.groups...) -} - -func (p *Schema) Group(id SchemaGroupID) *SchemaGroup { - if p == nil { - return nil - } - for _, f := range p.groups { - if f.ID() == id { - return f - } - } - return nil -} - -func (p *Schema) GroupByField(id FieldID) *SchemaGroup { - if p == nil { - return nil - } - for _, f := range p.groups { - if f.HasField(id) { - return f - } - } - return nil -} - -func (p *Schema) GroupByPointer(ptr *Pointer) *SchemaGroup { - if p == nil { - return nil - } - - if gid, ok := ptr.ItemBySchemaGroup(); ok { - return p.Group(gid) - } - if fid, ok := ptr.Field(); ok { - for _, g := range p.groups { - if g.HasField(fid) { - return g - } - } - } - - return nil -} - -func (s *Schema) DetectDuplicatedFields() []FieldID { - duplicated := []FieldID{} - ids := map[FieldID]struct{}{} - for _, f := range s.Fields() { - i := f.ID() - if _, ok := ids[i]; ok { - duplicated = append(duplicated, i) - return duplicated - } - ids[i] = struct{}{} - } - return nil + return p.groups } func (p *Schema) LinkableFields() LinkableFields { @@ -132,10 +41,10 @@ func (p *Schema) LinkableFields() LinkableFields { return p.linkable.Clone() } -func (l LinkableFields) Clone() LinkableFields { +func (p LinkableFields) Clone() LinkableFields { return LinkableFields{ - LatLng: l.LatLng.Clone(), - URL: l.URL.Clone(), + LatLng: p.LatLng.Clone(), + URL: p.URL.Clone(), } } @@ -144,14 +53,32 @@ func (l LinkableFields) Validate(s *Schema) bool { return false } if l.LatLng != nil { - if f := s.FieldByPointer(l.LatLng); f == nil { + if f := s.Groups().Field(l.LatLng.Field); f == nil { return false } } if l.URL != nil { - if f := s.FieldByPointer(l.URL); f == nil { + if f := s.Groups().Field(l.URL.Field); f == nil { return false } } return true } + +func (l LinkableFields) PointerByType(ty ValueType) *SchemaFieldPointer { + switch ty { + case ValueTypeLatLng: + return l.LatLng + case ValueTypeURL: + return l.URL + } + return nil +} + +func (l LinkableFields) FieldByType(ty ValueType) *FieldID { + p := l.PointerByType(ty) + if p == nil { + return nil + } + return p.Field.Ref() +} diff --git a/pkg/property/schema_builder.go b/pkg/property/schema_builder.go index 82c01de75..d0873f914 100644 --- a/pkg/property/schema_builder.go +++ b/pkg/property/schema_builder.go @@ -2,7 +2,6 @@ package property import ( "errors" - "fmt" ) var ( @@ -11,7 +10,6 @@ var ( ErrInvalidValue = errors.New("invalid value") ErrInvalidPropertyLinkableField = errors.New("invalid property linkable field") ErrInvalidVersion = errors.New("invalid version") - ErrDuplicatedField = errors.New("duplicated field") ) type SchemaBuilder struct { @@ -26,9 +24,6 @@ func (b *SchemaBuilder) Build() (*Schema, error) { if b.p.id.IsNil() { return nil, ErrInvalidID } - if d := b.p.DetectDuplicatedFields(); len(d) > 0 { - return nil, fmt.Errorf("%s: %s %s", ErrDuplicatedField, b.p.id, d) - } if !b.p.linkable.Validate(b.p) { return nil, ErrInvalidPropertyLinkableField } @@ -53,20 +48,8 @@ func (b *SchemaBuilder) Version(version int) *SchemaBuilder { return b } -func (b *SchemaBuilder) Groups(groups []*SchemaGroup) *SchemaBuilder { - newGroups := []*SchemaGroup{} - ids := map[SchemaGroupID]struct{}{} - for _, f := range groups { - if f == nil { - continue - } - if _, ok := ids[f.ID()]; ok { - continue - } - ids[f.ID()] = struct{}{} - newGroups = append(newGroups, f) - } - b.p.groups = newGroups +func (b *SchemaBuilder) Groups(groups *SchemaGroupList) *SchemaBuilder { + b.p.groups = groups return b } diff --git a/pkg/property/schema_builder_test.go b/pkg/property/schema_builder_test.go index 1343943e8..447192ca7 100644 --- a/pkg/property/schema_builder_test.go +++ b/pkg/property/schema_builder_test.go @@ -1,7 +1,6 @@ package property import ( - "fmt" "testing" "github.com/stretchr/testify/assert" @@ -10,12 +9,11 @@ import ( func TestSchemaBuilder_Build(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() - sg2 := NewSchemaGroup().ID("daa").Fields([]*SchemaField{sf}).MustBuild() type args struct { ID SchemaID Version int - Groups []*SchemaGroup + Groups *SchemaGroupList Linkable LinkableFields } @@ -33,29 +31,21 @@ func TestSchemaBuilder_Build(t *testing.T) { Name: "fail: invalid linkable field", Args: args{ ID: MustSchemaID("xx~1.0.0/aa"), - Linkable: LinkableFields{LatLng: NewPointer(nil, nil, FieldID("xx").Ref())}, + Linkable: LinkableFields{LatLng: &SchemaFieldPointer{Field: FieldID("xx")}}, }, Err: ErrInvalidPropertyLinkableField, }, - { - Name: "fail: duplicated field", - Args: args{ - ID: MustSchemaID("xx~1.0.0/aa"), - Groups: []*SchemaGroup{sg, sg2}, - }, - Err: fmt.Errorf("%s: %s %s", ErrDuplicatedField, MustSchemaID("xx~1.0.0/aa"), []FieldID{"aa"}), - }, { Name: "success", Args: args{ ID: MustSchemaID("xx~1.0.0/aa"), - Groups: []*SchemaGroup{sg}, + Groups: NewSchemaGroupList([]*SchemaGroup{sg}), Version: 1, }, Expected: &Schema{ id: MustSchemaID("xx~1.0.0/aa"), version: 1, - groups: []*SchemaGroup{sg}, + groups: NewSchemaGroupList([]*SchemaGroup{sg}), }, }, } @@ -83,12 +73,11 @@ func TestSchemaBuilder_Build(t *testing.T) { func TestSchemaBuilder_MustBuild(t *testing.T) { sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() - sg2 := NewSchemaGroup().ID("daa").Fields([]*SchemaField{sf}).MustBuild() type args struct { ID SchemaID Version int - Groups []*SchemaGroup + Groups *SchemaGroupList Linkable LinkableFields } @@ -106,29 +95,21 @@ func TestSchemaBuilder_MustBuild(t *testing.T) { Name: "fail: invalid linkable field", Args: args{ ID: MustSchemaID("xx~1.0.0/aa"), - Linkable: LinkableFields{LatLng: NewPointer(nil, nil, FieldID("xx").Ref())}, + Linkable: LinkableFields{LatLng: &SchemaFieldPointer{Field: FieldID("xx")}}, }, Err: ErrInvalidPropertyLinkableField.Error(), }, - { - Name: "fail: duplicated field", - Args: args{ - ID: MustSchemaID("xx~1.0.0/aa"), - Groups: []*SchemaGroup{sg, sg2}, - }, - Err: fmt.Sprintf("%s: %s %s", ErrDuplicatedField, MustSchemaID("xx~1.0.0/aa"), []FieldID{"aa"}), - }, { Name: "success", Args: args{ ID: MustSchemaID("xx~1.0.0/aa"), - Groups: []*SchemaGroup{sg}, + Groups: NewSchemaGroupList([]*SchemaGroup{sg}), Version: 1, }, Expected: &Schema{ id: MustSchemaID("xx~1.0.0/aa"), version: 1, - groups: []*SchemaGroup{sg}, + groups: NewSchemaGroupList([]*SchemaGroup{sg}), }, }, } diff --git a/pkg/property/schema_field_test.go b/pkg/property/schema_field_test.go index 46f9464f0..8cf0ec5ea 100644 --- a/pkg/property/schema_field_test.go +++ b/pkg/property/schema_field_test.go @@ -7,6 +7,12 @@ import ( "github.com/stretchr/testify/assert" ) +var ( + testSchemaField1 = NewSchemaField().ID("a").Type(ValueTypeString).MustBuild() + testSchemaField2 = NewSchemaField().ID("b").Type(ValueTypeNumber).MustBuild() + testSchemaField3 = NewSchemaField().ID("c").Type(ValueTypeLatLng).MustBuild() +) + func TestSchemaField_MinMax(t *testing.T) { getFloatRef := func(f float64) *float64 { return &f diff --git a/pkg/property/schema_group_list.go b/pkg/property/schema_group_list.go new file mode 100644 index 000000000..ae119ef21 --- /dev/null +++ b/pkg/property/schema_group_list.go @@ -0,0 +1,148 @@ +package property + +import "github.com/reearth/reearth-backend/pkg/id" + +type SchemaGroupList struct { + groups []*SchemaGroup +} + +func NewSchemaGroupList(p []*SchemaGroup) *SchemaGroupList { + sgl := &SchemaGroupList{ + groups: append(p[:0:0], p...), + } + if len(sgl.duplicatedGroups()) > 0 { + return nil + } + return sgl +} + +func (p *SchemaGroupList) Len() int { + if p == nil { + return 0 + } + return len(p.groups) +} + +func (p *SchemaGroupList) Groups() []*SchemaGroup { + if p == nil { + return nil + } + return append(p.groups[:0:0], p.groups...) +} + +func (p *SchemaGroupList) Fields() []*SchemaField { + if p == nil { + return nil + } + + fields := []*SchemaField{} + for _, g := range p.groups { + fields = append(fields, g.Fields()...) + } + return fields +} + +func (p *SchemaGroupList) GroupAndFields() []SchemaGroupAndField { + if p == nil { + return nil + } + fields := []SchemaGroupAndField{} + for _, g := range p.groups { + for _, f := range g.Fields() { + fields = append(fields, SchemaGroupAndField{Group: g, Field: f}) + } + } + return fields +} + +func (p *SchemaGroupList) Field(id id.PropertySchemaFieldID) *SchemaField { + if p == nil { + return nil + } + + for _, g := range p.groups { + if f := g.Field(id); f != nil { + return f + } + } + return nil +} + +func (p *SchemaGroupList) Group(id id.PropertySchemaGroupID) *SchemaGroup { + if p == nil { + return nil + } + + for _, f := range p.groups { + if f.ID() == id { + return f + } + } + return nil +} + +func (p *SchemaGroupList) GroupByField(id id.PropertySchemaFieldID) *SchemaGroup { + if p == nil { + return nil + } + + for _, f := range p.groups { + if f.HasField(id) { + return f + } + } + + return nil +} + +func (p *SchemaGroupList) GroupAndField(f FieldID) *SchemaGroupAndField { + if p == nil { + return nil + } + for _, g := range p.groups { + if gf := g.Field(f); gf != nil { + return &SchemaGroupAndField{Group: g, Field: gf} + } + } + return nil +} + +func (s *SchemaGroupList) duplicatedGroups() []SchemaGroupID { + if s == nil { + return nil + } + + var duplicated []SchemaGroupID + ids := map[SchemaGroupID]struct{}{} + for _, f := range s.Groups() { + i := f.ID() + if _, ok := ids[i]; ok { + duplicated = append(duplicated, i) + } + ids[i] = struct{}{} + } + return duplicated +} + +type SchemaGroupAndField struct { + Group *SchemaGroup + Field *SchemaField +} + +func (gf SchemaGroupAndField) IsEmpty() bool { + return gf.Group == nil && gf.Field == nil +} + +func (gf SchemaGroupAndField) Pointer() *Pointer { + if gf.Group == nil && gf.Field == nil { + return nil + } + return NewPointer(gf.Group.ID().Ref(), nil, gf.Field.ID().Ref()) +} + +func (f SchemaGroupAndField) SchemaFieldPointer() SchemaFieldPointer { + return SchemaFieldPointer{ + SchemaGroup: f.Group.ID(), + Field: f.Field.ID(), + } +} diff --git a/pkg/property/schema_group_list_test.go b/pkg/property/schema_group_list_test.go new file mode 100644 index 000000000..e9223a897 --- /dev/null +++ b/pkg/property/schema_group_list_test.go @@ -0,0 +1,377 @@ +package property + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +var ( + testSchemaGroupList1 = NewSchemaGroupList([]*SchemaGroup{testSchemaGroup1, testSchemaGroup2}) +) + +func TestNewSchemaGroupList(t *testing.T) { + type args struct { + p []*SchemaGroup + } + tests := []struct { + name string + args args + want *SchemaGroupList + }{ + { + name: "ok", + args: args{ + p: []*SchemaGroup{testSchemaGroup1, testSchemaGroup2}, + }, + want: &SchemaGroupList{groups: []*SchemaGroup{testSchemaGroup1, testSchemaGroup2}}, + }, + { + name: "duplicated groups", + args: args{ + p: []*SchemaGroup{testSchemaGroup1, testSchemaGroup1}, + }, + want: nil, + }, + { + name: "nil", + args: args{ + p: nil, + }, + want: &SchemaGroupList{groups: nil}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, NewSchemaGroupList(tt.args.p)) + }) + } +} + +func TestSchemaGroupList_Field(t *testing.T) { + tests := []struct { + name string + target *SchemaGroupList + input id.PropertySchemaFieldID + want *SchemaField + }{ + { + name: "nil schema", + }, + { + name: "found", + target: testSchemaGroupList1, + input: testSchemaField1.ID(), + want: testSchemaField1, + }, + { + name: "not found", + target: testSchemaGroupList1, + input: id.PropertySchemaFieldID("zz"), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Field(tt.input)) + }) + } +} + +func TestSchemaGroupList_Group(t *testing.T) { + tests := []struct { + name string + target *SchemaGroupList + input SchemaGroupID + want *SchemaGroup + }{ + { + name: "nil schema", + target: nil, + input: testSchemaGroup1.ID(), + want: nil, + }, + { + name: "found", + target: testSchemaGroupList1, + input: testSchemaGroup1.ID(), + want: testSchemaGroup1, + }, + { + name: "not found", + target: testSchemaGroupList1, + input: SchemaGroupID("zz"), + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Group(tt.input)) + }) + } +} + +func TestSchemaGroupList_GroupByField(t *testing.T) { + tests := []struct { + name string + target *SchemaGroupList + input FieldID + want *SchemaGroup + }{ + { + name: "nil schema", + target: nil, + input: testSchemaField1.ID(), + want: nil, + }, + { + name: "found", + target: testSchemaGroupList1, + input: testSchemaField1.ID(), + want: testSchemaGroup1, + }, + { + name: "not found", + target: testSchemaGroupList1, + input: FieldID("zz"), + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.GroupByField(tt.input)) + }) + } +} + +func TestSchemaGroupList_GroupAndFields(t *testing.T) { + tests := []struct { + name string + target *SchemaGroupList + want []SchemaGroupAndField + }{ + { + name: "ok", + target: testSchemaGroupList1, + want: []SchemaGroupAndField{ + {Group: testSchemaGroup1, Field: testSchemaField1}, + {Group: testSchemaGroup1, Field: testSchemaField2}, + {Group: testSchemaGroup2, Field: testSchemaField3}, + }, + }, + { + name: "empty", + target: &SchemaGroupList{}, + want: []SchemaGroupAndField{}, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.GroupAndFields() + assert.Equal(t, tt.want, res) + if len(tt.want) > 0 { + for i, gf := range res { + assert.Same(t, tt.want[i].Group, gf.Group) + assert.Same(t, tt.want[i].Field, gf.Field) + } + } + }) + } +} + +func TestSchemaGroupList_GroupAndField(t *testing.T) { + type args struct { + f FieldID + } + tests := []struct { + name string + args args + target *SchemaGroupList + want *SchemaGroupAndField + }{ + { + name: "ok1", + target: testSchemaGroupList1, + args: args{f: testSchemaField1.ID()}, + want: &SchemaGroupAndField{Group: testSchemaGroup1, Field: testSchemaField1}, + }, + { + name: "ok2", + target: testSchemaGroupList1, + args: args{f: testSchemaField2.ID()}, + want: &SchemaGroupAndField{Group: testSchemaGroup1, Field: testSchemaField2}, + }, + { + name: "ok3", + target: testSchemaGroupList1, + args: args{f: testSchemaField3.ID()}, + want: &SchemaGroupAndField{Group: testSchemaGroup2, Field: testSchemaField3}, + }, + { + name: "not found", + target: testSchemaGroupList1, + args: args{f: "ddd"}, + want: nil, + }, + { + name: "empty", + target: &SchemaGroupList{}, + want: nil, + }, + { + name: "nil", + target: nil, + want: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + res := tt.target.GroupAndField(tt.args.f) + assert.Equal(t, tt.want, res) + if tt.want != nil { + assert.Same(t, tt.want.Group, res.Group) + assert.Same(t, tt.want.Field, res.Field) + } + }) + } +} + +func TestSchemaGroupAndField_IsEmpty(t *testing.T) { + tests := []struct { + name string + target SchemaGroupAndField + want bool + }{ + { + name: "present", + target: SchemaGroupAndField{ + Group: testSchemaGroup1, + Field: testSchemaField1, + }, + want: false, + }, + { + name: "empty", + target: SchemaGroupAndField{}, + want: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gf := SchemaGroupAndField{ + Group: tt.target.Group, + Field: tt.target.Field, + } + assert.Equal(t, tt.want, gf.IsEmpty()) + }) + } +} + +func TestSchemaGroupAndField_Pointer(t *testing.T) { + tests := []struct { + name string + target SchemaGroupAndField + want *Pointer + }{ + { + name: "ok", + target: SchemaGroupAndField{ + Group: testSchemaGroup1, + Field: testSchemaField1, + }, + want: &Pointer{ + schemaGroup: testSchemaGroup1.ID().Ref(), + item: nil, + field: testSchemaField1.ID().Ref(), + }, + }, + { + name: "nil group", + target: SchemaGroupAndField{ + Group: nil, + Field: testSchemaField1, + }, + want: &Pointer{ + schemaGroup: nil, + item: nil, + field: testSchemaField1.ID().Ref(), + }, + }, + { + name: "nil field", + target: SchemaGroupAndField{ + Group: testSchemaGroup1, + Field: nil, + }, + want: &Pointer{ + schemaGroup: testSchemaGroup1.ID().Ref(), + item: nil, + field: nil, + }, + }, + { + name: "empty", + target: SchemaGroupAndField{}, + want: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.Pointer()) + }) + } +} + +func TestSchemaGroupAndField_SchemaFieldPointer(t *testing.T) { + tests := []struct { + name string + target SchemaGroupAndField + want SchemaFieldPointer + }{ + { + name: "ok", + target: SchemaGroupAndField{ + Group: testSchemaGroup1, + Field: testSchemaField1, + }, + want: SchemaFieldPointer{ + SchemaGroup: testSchemaGroup1.ID(), + Field: testSchemaField1.ID(), + }, + }, + { + name: "empty", + target: SchemaGroupAndField{}, + want: SchemaFieldPointer{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.target.SchemaFieldPointer()) + }) + } +} diff --git a/pkg/property/schema_group_test.go b/pkg/property/schema_group_test.go index 8b7ed9dbd..383d69107 100644 --- a/pkg/property/schema_group_test.go +++ b/pkg/property/schema_group_test.go @@ -7,6 +7,11 @@ import ( "github.com/stretchr/testify/assert" ) +var ( + testSchemaGroup1 = NewSchemaGroup().ID("aa").Fields([]*SchemaField{testSchemaField1, testSchemaField2}).MustBuild() + testSchemaGroup2 = NewSchemaGroup().ID("bb").Fields([]*SchemaField{testSchemaField3}).IsList(true).MustBuild() +) + func TestSchemaGroup(t *testing.T) { scid := SchemaGroupID("aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() diff --git a/pkg/property/schema_pointer.go b/pkg/property/schema_pointer.go new file mode 100644 index 000000000..8d0431c1e --- /dev/null +++ b/pkg/property/schema_pointer.go @@ -0,0 +1,18 @@ +package property + +type SchemaFieldPointer struct { + SchemaGroup SchemaGroupID + Field FieldID +} + +func (p SchemaFieldPointer) Pointer() *Pointer { + return PointFieldBySchemaGroup(p.SchemaGroup, p.Field) +} + +func (p *SchemaFieldPointer) Clone() *SchemaFieldPointer { + if p == nil { + return p + } + p2 := *p + return &p2 +} diff --git a/pkg/property/schema_pointer_test.go b/pkg/property/schema_pointer_test.go new file mode 100644 index 000000000..60643e153 --- /dev/null +++ b/pkg/property/schema_pointer_test.go @@ -0,0 +1,36 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSchemaFieldPointer_Pointer(t *testing.T) { + tests := []struct { + name string + target *SchemaFieldPointer + want *Pointer + }{ + { + name: "ok", + target: &SchemaFieldPointer{ + SchemaGroup: SchemaGroupID("a"), + Field: FieldID("b"), + }, + want: &Pointer{ + schemaGroup: SchemaGroupID("a").Ref(), + item: nil, + field: FieldID("b").Ref(), + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.Pointer()) + }) + } +} diff --git a/pkg/property/schema_test.go b/pkg/property/schema_test.go index 56deca6c8..4c01cf2af 100644 --- a/pkg/property/schema_test.go +++ b/pkg/property/schema_test.go @@ -3,102 +3,18 @@ package property import ( "testing" + "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) -func TestSchema_Nil(t *testing.T) { - var s *Schema - assert.Nil(t, s.IDRef()) - assert.Nil(t, nil, s.Fields()) - assert.Nil(t, nil, s.Groups()) - assert.Equal(t, LinkableFields{}, s.LinkableFields()) -} - -func TestSchema_Field(t *testing.T) { - sid := MustSchemaID("xx~1.0.0/aa") - sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() - - tests := []struct { - Name string - S *Schema - PTR *Pointer - Input FieldID - Expected *SchemaField - }{ - { - Name: "nil schema", - }, - { - Name: "found", - S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), - PTR: NewPointer(nil, nil, sf.ID().Ref()), - Input: sf.ID(), - Expected: sf, - }, - { - Name: "not found", - S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), - PTR: NewPointer(nil, nil, FieldID("zz").Ref()), - Input: FieldID("zz"), - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.Name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.Expected, tc.S.Field(tc.Input)) - assert.Equal(t, tc.Expected, tc.S.FieldByPointer(tc.PTR)) - }) - } -} - -func TestSchema_Group(t *testing.T) { - sid := MustSchemaID("xx~1.0.0/aa") - sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() - sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() - - tests := []struct { - Name string - S *Schema - PTR *Pointer - Input SchemaGroupID - InputField FieldID - Expected *SchemaGroup - }{ - { - Name: "nil schema", - }, - { - Name: "found", - S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), - PTR: NewPointer(sg.IDRef(), nil, sf.ID().Ref()), - InputField: sf.ID(), - Input: sg.ID(), - Expected: sg, - }, - { - Name: "not found", - S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), - PTR: NewPointer(nil, nil, FieldID("zz").Ref()), - Input: SchemaGroupID("zz"), - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.Name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.Expected, tc.S.Group(tc.Input)) - assert.Equal(t, tc.Expected, tc.S.GroupByPointer(tc.PTR)) - assert.Equal(t, tc.Expected, tc.S.GroupByField(tc.InputField)) - }) - } -} +var ( + testSchema1 = NewSchema().ID(id.MustPropertySchemaID("xx~1.0.0/aa")).Groups( + NewSchemaGroupList([]*SchemaGroup{testSchemaGroup1, testSchemaGroup2}), + ).MustBuild() +) -func TestSchema_DetectDuplicatedFields(t *testing.T) { - sid := MustSchemaID("xx~1.0.0/aa") +func TestLinkableField_Validate(t *testing.T) { + sid := id.MustPropertySchemaID("xx~1.0.0/aa") sf := NewSchemaField().ID("aa").Type(ValueTypeString).MustBuild() sg := NewSchemaGroup().ID("aaa").Fields([]*SchemaField{sf}).MustBuild() @@ -109,34 +25,45 @@ func TestSchema_DetectDuplicatedFields(t *testing.T) { Expected bool }{ { - Name: "nil schema", + Name: "nil schema", + S: nil, + LF: LinkableFields{}, + Expected: false, }, { - Name: "invalid: URL", - S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), - LF: LinkableFields{URL: NewPointer(nil, nil, FieldID("xx").Ref())}, + Name: "invalid: URL", + S: NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), + LF: LinkableFields{ + URL: &SchemaFieldPointer{ + Field: id.PropertySchemaFieldID("xx"), + }, + }, Expected: false, }, { - Name: "invalid: Lng", - S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), - LF: LinkableFields{LatLng: NewPointer(nil, nil, FieldID("xx").Ref())}, + Name: "invalid: Lng", + S: NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), + LF: LinkableFields{ + LatLng: &SchemaFieldPointer{ + Field: id.PropertySchemaFieldID("xx"), + }, + }, Expected: false, }, { - Name: "success", - S: NewSchema().ID(sid).Groups([]*SchemaGroup{sg}).MustBuild(), + Name: "empty", + S: NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), LF: LinkableFields{}, Expected: true, }, } - for _, tc := range tests { - tc := tc - t.Run(tc.Name, func(t *testing.T) { + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { t.Parallel() - res := tc.LF.Validate(tc.S) - assert.Equal(t, tc.Expected, res) + res := tt.LF.Validate(tt.S) + assert.Equal(t, tt.Expected, res) }) } } From 67a618eaf79114384d1c68400d7c14471d8f3520 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 9 Feb 2022 20:16:11 +0900 Subject: [PATCH 147/253] feat: support 3rd party plugin translation (#109) * add i18n.String.WithDefault, rename i18n.String.Copy * add id constructors to property * make schema group fields nil * change schema id pattern * fix test * refactor manifest translation * support 3rd party plugin translation --- internal/infrastructure/fs/common.go | 26 -- internal/infrastructure/fs/file_test.go | 20 +- internal/infrastructure/fs/plugin.go | 63 ++++- .../infrastructure/fs/plugin_repository.go | 2 +- internal/infrastructure/fs/plugin_test.go | 39 +++ internal/infrastructure/fs/property_schema.go | 2 +- pkg/builtin/main.go | 14 +- pkg/i18n/string.go | 34 ++- pkg/i18n/string_test.go | 161 +++++++++++-- pkg/id/property_schema.go | 4 +- pkg/id/property_schema_test.go | 32 ++- pkg/plugin/builder.go | 4 +- pkg/plugin/extension.go | 12 +- pkg/plugin/extension_builder.go | 4 +- pkg/plugin/manifest/convert.go | 133 ++++++---- pkg/plugin/manifest/convert_test.go | 194 ++++++++++----- pkg/plugin/manifest/diff_test.go | 2 +- pkg/plugin/manifest/parser.go | 12 +- pkg/plugin/manifest/parser_test.go | 8 +- pkg/plugin/manifest/parser_translation.go | 139 +---------- .../manifest/parser_translation_test.go | 68 +----- pkg/plugin/manifest/schema_translation.go | 228 ++++++++++++++++++ .../manifest/schema_translation_test.go | 188 +++++++++++++++ .../manifest/testdata/translation_merge.yml | 34 --- pkg/plugin/plugin.go | 22 +- pkg/plugin/plugin_test.go | 12 - pkg/plugin/pluginpack/package.go | 39 ++- pkg/plugin/pluginpack/package_test.go | 18 +- pkg/plugin/pluginpack/testdata/test.zip | Bin 789 -> 1804 bytes .../pluginpack/testdata/test/reearth_ja.yml | 1 + .../testdata/test/reearth_zh-CN.yml | 1 + pkg/property/id.go | 2 + pkg/property/schema_field.go | 12 +- pkg/property/schema_field_builder.go | 4 +- pkg/property/schema_group.go | 4 +- pkg/property/schema_group_builder.go | 7 +- 36 files changed, 1062 insertions(+), 483 deletions(-) create mode 100644 internal/infrastructure/fs/plugin_test.go create mode 100644 pkg/plugin/manifest/schema_translation_test.go delete mode 100644 pkg/plugin/manifest/testdata/translation_merge.yml create mode 100644 pkg/plugin/pluginpack/testdata/test/reearth_ja.yml create mode 100644 pkg/plugin/pluginpack/testdata/test/reearth_zh-CN.yml diff --git a/internal/infrastructure/fs/common.go b/internal/infrastructure/fs/common.go index c4bfccff1..0eb93b450 100644 --- a/internal/infrastructure/fs/common.go +++ b/internal/infrastructure/fs/common.go @@ -1,34 +1,8 @@ package fs -import ( - "path/filepath" - - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/plugin/manifest" - "github.com/reearth/reearth-backend/pkg/rerror" - "github.com/spf13/afero" -) - const ( assetDir = "assets" pluginDir = "plugins" publishedDir = "published" manifestFilePath = "reearth.yml" ) - -func readManifest(fs afero.Fs, pid id.PluginID) (*manifest.Manifest, error) { - f, err := fs.Open(filepath.Join(pluginDir, pid.String(), manifestFilePath)) - if err != nil { - return nil, rerror.ErrInternalBy(err) - } - defer func() { - _ = f.Close() - }() - - m, err := manifest.Parse(f, nil) - if err != nil { - return nil, err - } - - return m, nil -} diff --git a/internal/infrastructure/fs/file_test.go b/internal/infrastructure/fs/file_test.go index a57913d33..065a8bfa9 100644 --- a/internal/infrastructure/fs/file_test.go +++ b/internal/infrastructure/fs/file_test.go @@ -249,15 +249,17 @@ func TestGetAssetFileURL(t *testing.T) { } func mockFs() afero.Fs { + files := map[string]string{ + "assets/xxx.txt": "hello", + "plugins/aaa~1.0.0/foo.js": "bar", + "published/s.json": "{}", + } + fs := afero.NewMemMapFs() - f, _ := fs.Create("assets/xxx.txt") - _, _ = f.WriteString("hello") - _ = f.Close() - f, _ = fs.Create("plugins/aaa~1.0.0/foo.js") - _, _ = f.WriteString("bar") - _ = f.Close() - f, _ = fs.Create("published/s.json") - _, _ = f.WriteString("{}") - _ = f.Close() + for name, content := range files { + f, _ := fs.Create(name) + _, _ = f.WriteString(content) + _ = f.Close() + } return fs } diff --git a/internal/infrastructure/fs/plugin.go b/internal/infrastructure/fs/plugin.go index a201b5146..f5d4c5686 100644 --- a/internal/infrastructure/fs/plugin.go +++ b/internal/infrastructure/fs/plugin.go @@ -3,10 +3,13 @@ package fs import ( "context" "errors" + "path/filepath" + "regexp" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" "github.com/reearth/reearth-backend/pkg/rerror" "github.com/spf13/afero" ) @@ -22,7 +25,7 @@ func NewPlugin(fs afero.Fs) repo.Plugin { } func (r *pluginRepo) FindByID(ctx context.Context, pid id.PluginID, sids []id.SceneID) (*plugin.Plugin, error) { - m, err := readManifest(r.fs, pid) + m, err := readPluginManifest(r.fs, pid) if err != nil { return nil, err } @@ -54,3 +57,61 @@ func (r *pluginRepo) Save(ctx context.Context, p *plugin.Plugin) error { func (r *pluginRepo) Remove(ctx context.Context, pid id.PluginID) error { return rerror.ErrInternalBy(errors.New("read only")) } + +var translationFileNameRegexp = regexp.MustCompile(`reearth_([a-zA-Z]+(?:-[a-zA-Z]+)?).yml`) + +func readPluginManifest(fs afero.Fs, pid id.PluginID) (*manifest.Manifest, error) { + base := filepath.Join(pluginDir, pid.String()) + translationMap, err := readPluginTranslation(fs, base) + if err != nil { + return nil, err + } + + f, err := fs.Open(filepath.Join(base, manifestFilePath)) + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + defer func() { + _ = f.Close() + }() + + m, err := manifest.Parse(f, nil, translationMap.TranslatedRef()) + if err != nil { + return nil, err + } + + return m, nil +} + +func readPluginTranslation(fs afero.Fs, base string) (manifest.TranslationMap, error) { + d, err := afero.ReadDir(fs, base) + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + + translationMap := manifest.TranslationMap{} + for _, e := range d { + if e.IsDir() { + continue + } + name := e.Name() + lang := translationFileNameRegexp.FindStringSubmatch(name) + if len(lang) == 0 { + continue + } + langfile, err := fs.Open(filepath.Join(base, name)) + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + defer func() { + _ = langfile.Close() + }() + t, err := manifest.ParseTranslation(langfile) + if err != nil { + return nil, err + } + translationMap[lang[1]] = t + } + + return translationMap, nil +} diff --git a/internal/infrastructure/fs/plugin_repository.go b/internal/infrastructure/fs/plugin_repository.go index aae3a63e0..04c8083ab 100644 --- a/internal/infrastructure/fs/plugin_repository.go +++ b/internal/infrastructure/fs/plugin_repository.go @@ -26,5 +26,5 @@ func (r *pluginRepository) Data(ctx context.Context, id id.PluginID) (file.Itera } func (r *pluginRepository) Manifest(ctx context.Context, id id.PluginID) (*manifest.Manifest, error) { - return readManifest(r.fs, id) + return readPluginManifest(r.fs, id) } diff --git a/internal/infrastructure/fs/plugin_test.go b/internal/infrastructure/fs/plugin_test.go new file mode 100644 index 000000000..67bd4cb85 --- /dev/null +++ b/internal/infrastructure/fs/plugin_test.go @@ -0,0 +1,39 @@ +package fs + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestPlugin(t *testing.T) { + ctx := context.Background() + fs := NewPlugin(mockPluginFS()) + p, err := fs.FindByID(ctx, plugin.MustID("testplugin~1.0.0"), nil) + assert.NoError(t, err) + assert.Equal(t, plugin.New().ID(plugin.MustID("testplugin~1.0.0")).Name(i18n.String{ + "en": "testplugin", + "ja": "ใƒ†ใ‚นใƒˆใƒ—ใƒฉใ‚ฐใ‚คใƒณ", + "zh-CN": "ๆต‹่ฏ•ๆ’ไปถ", + }).MustBuild(), p) +} + +func mockPluginFS() afero.Fs { + files := map[string]string{ + "plugins/testplugin~1.0.0/reearth.yml": `{ "id": "testplugin", "version": "1.0.0", "name": "testplugin" }`, + "plugins/testplugin~1.0.0/reearth_ja.yml": `{ "name": "ใƒ†ใ‚นใƒˆใƒ—ใƒฉใ‚ฐใ‚คใƒณ" }`, + "plugins/testplugin~1.0.0/reearth_zh-CN.yml": `{ "name": "ๆต‹่ฏ•ๆ’ไปถ" }`, + } + + fs := afero.NewMemMapFs() + for name, content := range files { + f, _ := fs.Create(name) + _, _ = f.WriteString(content) + _ = f.Close() + } + return fs +} diff --git a/internal/infrastructure/fs/property_schema.go b/internal/infrastructure/fs/property_schema.go index 2ae587a7d..d1d92fb67 100644 --- a/internal/infrastructure/fs/property_schema.go +++ b/internal/infrastructure/fs/property_schema.go @@ -22,7 +22,7 @@ func NewPropertySchema(fs afero.Fs) repo.PropertySchema { } func (r *propertySchema) FindByID(ctx context.Context, i id.PropertySchemaID) (*property.Schema, error) { - m, err := readManifest(r.fs, i.Plugin()) + m, err := readPluginManifest(r.fs, i.Plugin()) if err != nil { return nil, err } diff --git a/pkg/builtin/main.go b/pkg/builtin/main.go index e64c659c2..b2979386b 100644 --- a/pkg/builtin/main.go +++ b/pkg/builtin/main.go @@ -15,14 +15,16 @@ var pluginManifestJSON []byte //go:embed manifest_ja.yml var pluginManifestJSON_ja []byte -var pluginTranslationList = map[string]*manifest.TranslationRoot{"ja": manifest.MustParseTranslationFromBytes(pluginManifestJSON_ja)} -var pluginManifest = manifest.MergeManifestTranslation(manifest.MustParseSystemFromBytes(pluginManifestJSON, nil), pluginTranslationList) - -// MUST NOT CHANGE -var PropertySchemaIDVisualizerCesium = property.MustSchemaID("reearth/cesium") +var pluginTranslationList = manifest.TranslationMap{ + "ja": manifest.MustParseTranslationFromBytes(pluginManifestJSON_ja), +} +var pluginManifest = manifest.MustParseSystemFromBytes(pluginManifestJSON, nil, pluginTranslationList.TranslatedRef()) // MUST NOT CHANGE -var PropertySchemaIDInfobox = property.MustSchemaID("reearth/infobox") +var ( + PropertySchemaIDVisualizerCesium = property.MustSchemaID("reearth/cesium") + PropertySchemaIDInfobox = property.MustSchemaID("reearth/infobox") +) func GetPropertySchemaByVisualizer(v visualizer.Visualizer) *property.Schema { for _, p := range pluginManifest.ExtensionSchema { diff --git a/pkg/i18n/string.go b/pkg/i18n/string.go index f4bc4dde9..410b84fed 100644 --- a/pkg/i18n/string.go +++ b/pkg/i18n/string.go @@ -1,12 +1,36 @@ package i18n +const DefaultLang = "en" + type String map[string]string // key should use BCP 47 representation func StringFrom(s string) String { if s == "" { + return String{} + } + return String{DefaultLang: s} +} + +func (s String) WithDefault(d string) String { + if s == nil && d == "" { return nil } - return String{"en": s} + + res := s.Clone() + if res == nil { + res = String{} + } + if d != "" { + res[DefaultLang] = d + } + return res +} + +func (s String) WithDefaultRef(d *string) String { + if d == nil { + return s.Clone() + } + return s.WithDefault(*d) } func (s String) Translated(lang ...string) string { @@ -21,8 +45,8 @@ func (s String) Translated(lang ...string) string { return s.String() } -func (s String) Copy() String { - if s == nil { +func (s String) Clone() String { + if len(s) == 0 { return nil } s2 := make(String, len(s)) @@ -36,13 +60,13 @@ func (s String) String() string { if s == nil { return "" } - return s["en"] + return s[DefaultLang] } func (s String) StringRef() *string { if s == nil { return nil } - st := s["en"] + st := s[DefaultLang] return &st } diff --git a/pkg/i18n/string_test.go b/pkg/i18n/string_test.go index 88903ed70..8aa6affc1 100644 --- a/pkg/i18n/string_test.go +++ b/pkg/i18n/string_test.go @@ -1,7 +1,6 @@ package i18n import ( - "reflect" "testing" "github.com/stretchr/testify/assert" @@ -9,18 +8,18 @@ import ( func TestString_String(t *testing.T) { tests := []struct { - Name, ExpectedStr string - I18nString String + Name, Expected string + Target String }{ { - Name: "en string", - ExpectedStr: "foo", - I18nString: String{"en": "foo"}, + Name: "en string", + Expected: "foo", + Target: String{"en": "foo"}, }, { - Name: "nil string", - ExpectedStr: "", - I18nString: nil, + Name: "nil string", + Expected: "", + Target: nil, }, } @@ -28,7 +27,115 @@ func TestString_String(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - assert.Equal(t, tc.ExpectedStr, tc.I18nString.String()) + assert.Equal(t, tc.Expected, tc.Target.String()) + }) + } +} + +func TestString_WithDefault(t *testing.T) { + tests := []struct { + Name string + Target String + Input string + Expected String + }{ + { + Name: "ok", + Target: String{"en": "foo", "ja": "bar"}, + Input: "x", + Expected: String{"en": "x", "ja": "bar"}, + }, + { + Name: "empty default", + Target: String{"en": "foo"}, + Input: "", + Expected: String{"en": "foo"}, + }, + { + Name: "empty", + Target: String{}, + Input: "x", + Expected: String{"en": "x"}, + }, + { + Name: "empty string and empty default", + Target: String{}, + Input: "", + Expected: String{}, + }, + { + Name: "nil string", + Target: nil, + Input: "x", + Expected: String{"en": "x"}, + }, + { + Name: "nil string and empty default", + Target: nil, + Input: "", + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.Target.WithDefault(tc.Input)) + }) + } +} + +func TestString_WithDefaultRef(t *testing.T) { + tests := []struct { + Name string + Target String + Input *string + Expected String + }{ + { + Name: "ok", + Target: String{"en": "foo", "ja": "bar"}, + Input: sr("x"), + Expected: String{"en": "x", "ja": "bar"}, + }, + { + Name: "nil default", + Target: String{"en": "foo", "ja": "bar"}, + Input: nil, + Expected: String{"en": "foo", "ja": "bar"}, + }, + { + Name: "empty default", + Target: String{"en": "foo"}, + Input: sr(""), + Expected: String{"en": "foo"}, + }, + { + Name: "empty", + Target: String{}, + Input: sr("x"), + Expected: String{"en": "x"}, + }, + { + Name: "empty string and empty default", + Target: String{}, + Input: sr(""), + Expected: String{}, + }, + { + Name: "nil string", + Target: nil, + Input: sr("x"), + Expected: String{"en": "x"}, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.Expected, tc.Target.WithDefaultRef(tc.Input)) }) } } @@ -69,25 +176,28 @@ func TestStringTranslated(t *testing.T) { func TestStringFrom(t *testing.T) { assert.Equal(t, String{"en": "foo"}, StringFrom("foo")) - assert.Nil(t, String(nil), StringFrom("")) + assert.Equal(t, String{}, StringFrom("")) } -func TestStringCopy(t *testing.T) { +func TestString_Clone(t *testing.T) { tests := []struct { - Name string - SourceString String + Name string + Target, Expected String }{ { - Name: "String with content", - SourceString: String{"ja": "foo"}, + Name: "String with content", + Target: String{"ja": "foo"}, + Expected: String{"ja": "foo"}, }, { - Name: "empty String", - SourceString: String{}, + Name: "empty String", + Target: String{}, + Expected: nil, }, { - Name: "nil", - SourceString: nil, + Name: "nil", + Target: nil, + Expected: nil, }, } @@ -95,10 +205,9 @@ func TestStringCopy(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - assert.True(t, reflect.DeepEqual(tc.SourceString, tc.SourceString.Copy())) - if tc.SourceString == nil { - assert.Nil(t, tc.SourceString.Copy()) - } + res := tc.Target.Clone() + assert.Equal(t, tc.Expected, res) + assert.NotSame(t, tc.Target, res) }) } } @@ -133,3 +242,7 @@ func TestString_StringRef(t *testing.T) { }) } } + +func sr(s string) *string { + return &s +} diff --git a/pkg/id/property_schema.go b/pkg/id/property_schema.go index 91795e251..7fc594785 100644 --- a/pkg/id/property_schema.go +++ b/pkg/id/property_schema.go @@ -7,7 +7,7 @@ import ( const schemaSystemIDPrefix = "reearth" -var schemaNameRe = regexp.MustCompile("^[a-zA-Z0-9_-]+$") +var schemaIDRe = regexp.MustCompile("^[a-zA-Z0-9][a-zA-Z0-9_-]*$|^@$") // PropertySchemaID is an ID for PropertySchema. type PropertySchemaID struct { @@ -18,7 +18,7 @@ type PropertySchemaID struct { // PropertySchemaIDFrom generates a new PropertySchemaID from a string. func PropertySchemaIDFrom(id string) (PropertySchemaID, error) { ids := strings.SplitN(id, "/", 2) - if len(ids) < 2 || !schemaNameRe.MatchString(ids[len(ids)-1]) { + if len(ids) < 2 || !schemaIDRe.MatchString(ids[len(ids)-1]) { return PropertySchemaID{}, ErrInvalidID } pid, err := PluginIDFrom(ids[0]) diff --git a/pkg/id/property_schema_test.go b/pkg/id/property_schema_test.go index 4f35382b3..cb8f234f4 100644 --- a/pkg/id/property_schema_test.go +++ b/pkg/id/property_schema_test.go @@ -20,7 +20,7 @@ func TestPropertySchemaIDFrom(t *testing.T) { } }{ { - name: "success:valid name", + name: "success", input: "test~1.0.0/Test_Test-01", expected: struct { result PropertySchemaID @@ -34,7 +34,21 @@ func TestPropertySchemaIDFrom(t *testing.T) { }, }, { - name: "fail:invalid name", + name: "success: @", + input: "test~1.0.0/@", + expected: struct { + result PropertySchemaID + err error + }{ + result: PropertySchemaID{ + plugin: MustPluginID("test~1.0.0"), + id: "@", + }, + err: nil, + }, + }, + { + name: "fail 1", input: "Test", expected: struct { result PropertySchemaID @@ -42,7 +56,7 @@ func TestPropertySchemaIDFrom(t *testing.T) { }{result: PropertySchemaID{}, err: ErrInvalidID}, }, { - name: "fail:invalid name", + name: "fail 2", input: "Test/+dsad", expected: struct { result PropertySchemaID @@ -50,8 +64,16 @@ func TestPropertySchemaIDFrom(t *testing.T) { }{result: PropertySchemaID{}, err: ErrInvalidID}, }, { - name: "fail:invalid name", - input: "Test/dsa d", + name: "fail 3", + input: "Test/-", + expected: struct { + result PropertySchemaID + err error + }{result: PropertySchemaID{}, err: ErrInvalidID}, + }, + { + name: "fail 4", + input: "Test/__", expected: struct { result PropertySchemaID err error diff --git a/pkg/plugin/builder.go b/pkg/plugin/builder.go index 7cd0ddd27..6fad423d5 100644 --- a/pkg/plugin/builder.go +++ b/pkg/plugin/builder.go @@ -31,7 +31,7 @@ func (b *Builder) ID(id ID) *Builder { } func (b *Builder) Name(name i18n.String) *Builder { - b.p.name = name.Copy() + b.p.name = name.Clone() return b } @@ -41,7 +41,7 @@ func (b *Builder) Author(author string) *Builder { } func (b *Builder) Description(description i18n.String) *Builder { - b.p.description = description.Copy() + b.p.description = description.Clone() return b } diff --git a/pkg/plugin/extension.go b/pkg/plugin/extension.go index 199bf016b..083b8ae12 100644 --- a/pkg/plugin/extension.go +++ b/pkg/plugin/extension.go @@ -40,11 +40,11 @@ func (w *Extension) Type() ExtensionType { } func (w *Extension) Name() i18n.String { - return w.name.Copy() + return w.name.Clone() } func (w *Extension) Description() i18n.String { - return w.description.Copy() + return w.description.Clone() } func (w *Extension) Icon() string { @@ -71,12 +71,12 @@ func (w *Extension) WidgetLayout() *WidgetLayout { } func (w *Extension) Rename(name i18n.String) { - w.name = name.Copy() + w.name = name.Clone() } func (w *Extension) SetDescription(des i18n.String) { - w.description = des.Copy() + w.description = des.Clone() } func (w *Extension) Clone() *Extension { @@ -86,8 +86,8 @@ func (w *Extension) Clone() *Extension { return &Extension{ id: w.id, extensionType: w.extensionType, - name: w.name.Copy(), - description: w.description.Copy(), + name: w.name.Clone(), + description: w.description.Clone(), icon: w.icon, schema: w.schema.Clone(), visualizer: w.visualizer, diff --git a/pkg/plugin/extension_builder.go b/pkg/plugin/extension_builder.go index bd8781ff7..a28142b2b 100644 --- a/pkg/plugin/extension_builder.go +++ b/pkg/plugin/extension_builder.go @@ -42,7 +42,7 @@ func (b *ExtensionBuilder) ID(id ExtensionID) *ExtensionBuilder { } func (b *ExtensionBuilder) Name(name i18n.String) *ExtensionBuilder { - b.p.name = name.Copy() + b.p.name = name.Clone() return b } @@ -52,7 +52,7 @@ func (b *ExtensionBuilder) Type(extensionType ExtensionType) *ExtensionBuilder { } func (b *ExtensionBuilder) Description(description i18n.String) *ExtensionBuilder { - b.p.description = description.Copy() + b.p.description = description.Clone() return b } diff --git a/pkg/plugin/manifest/convert.go b/pkg/plugin/manifest/convert.go index 9901c1622..9883e1f0e 100644 --- a/pkg/plugin/manifest/convert.go +++ b/pkg/plugin/manifest/convert.go @@ -13,7 +13,7 @@ import ( var errInvalidManifestWith = rerror.With(ErrInvalidManifest) -func (i *Root) manifest(sid *plugin.SceneID) (*Manifest, error) { +func (i *Root) manifest(sid *plugin.SceneID, tl *TranslatedRoot) (*Manifest, error) { var pid plugin.ID var err error if i.System && string(i.ID) == plugin.OfficialPluginID.Name() { @@ -27,7 +27,11 @@ func (i *Root) manifest(sid *plugin.SceneID) (*Manifest, error) { var pluginSchema *property.Schema if i.Schema != nil { - schema, err := i.Schema.schema(pid, "@") + var ts *TranslatedPropertySchema + if tl != nil { + ts = &tl.Schema + } + schema, err := i.Schema.schema(pid, "@", ts) if err != nil { return nil, errInvalidManifestWith(rerror.From("plugin property schema", err)) } @@ -42,7 +46,12 @@ func (i *Root) manifest(sid *plugin.SceneID) (*Manifest, error) { } for _, e := range i.Extensions { - extension, extensionSchema, err2 := e.extension(pid, i.System) + var te *TranslatedExtension + if tl != nil { + te = tl.Extensions[string(e.ID)] + } + + extension, extensionSchema, err2 := e.extension(pid, i.System, te) if err2 != nil { return nil, errInvalidManifestWith(rerror.From(fmt.Sprintf("ext (%s)", e.ID), err2)) } @@ -50,22 +59,27 @@ func (i *Root) manifest(sid *plugin.SceneID) (*Manifest, error) { extensionSchemas = append(extensionSchemas, extensionSchema) } - var author, desc, repository string + var author, repository string if i.Author != nil { author = *i.Author } - if i.Description != nil { - desc = *i.Description - } if i.Repository != nil { repository = *i.Repository } + var name, desc i18n.String + if tl != nil { + name = tl.Name + desc = tl.Description + } + name = name.WithDefault(i.Name) + desc = desc.WithDefaultRef(i.Description) + p, err := plugin.New(). ID(pid). - Name(i18n.StringFrom(i.Name)). + Name(name). Author(author). - Description(i18n.StringFrom(desc)). + Description(desc). RepositoryURL(repository). Schema(pluginSchema.IDRef()). Extensions(extensions). @@ -81,9 +95,13 @@ func (i *Root) manifest(sid *plugin.SceneID) (*Manifest, error) { }, nil } -func (i Extension) extension(pluginID plugin.ID, sys bool) (*plugin.Extension, *property.Schema, error) { +func (i Extension) extension(pluginID plugin.ID, sys bool, te *TranslatedExtension) (*plugin.Extension, *property.Schema, error) { eid := string(i.ID) - schema, err := i.Schema.schema(pluginID, eid) + var ts *TranslatedPropertySchema + if te != nil { + ts = &te.PropertySchema + } + schema, err := i.Schema.schema(pluginID, eid, ts) if err != nil { return nil, nil, rerror.From("property schema", err) } @@ -122,11 +140,8 @@ func (i Extension) extension(pluginID plugin.ID, sys bool) (*plugin.Extension, * return nil, nil, fmt.Errorf("invalid type: %s", i.Type) } - var desc, icon string + var icon string var singleOnly bool - if i.Description != nil { - desc = *i.Description - } if i.Icon != nil { icon = *i.Icon } @@ -134,10 +149,18 @@ func (i Extension) extension(pluginID plugin.ID, sys bool) (*plugin.Extension, * singleOnly = *i.SingleOnly } + var name, desc i18n.String + if te != nil { + name = te.Name + desc = te.Description + } + name = name.WithDefault(i.Name) + desc = desc.WithDefaultRef(i.Description) + ext, err := plugin.NewExtension(). ID(plugin.ExtensionID(eid)). - Name(i18n.StringFrom(i.Name)). - Description(i18n.StringFrom(desc)). + Name(name). + Description(desc). Visualizer(viz). Type(typ). SingleOnly(singleOnly). @@ -184,7 +207,7 @@ func (l *WidgetLayout) layout() *plugin.WidgetLayout { return plugin.NewWidgetLayout(horizontallyExtendable, verticallyExtendable, extended, l.Floating, dl).Ref() } -func (i *PropertySchema) schema(pluginID plugin.ID, idstr string) (*property.Schema, error) { +func (i *PropertySchema) schema(pluginID plugin.ID, idstr string, ts *TranslatedPropertySchema) (*property.Schema, error) { psid, err := property.SchemaIDFrom(pluginID.String() + "/" + idstr) if err != nil { return nil, fmt.Errorf("invalid id: %s", pluginID.String()+"/"+idstr) @@ -199,7 +222,12 @@ func (i *PropertySchema) schema(pluginID plugin.ID, idstr string) (*property.Sch // groups groups := make([]*property.SchemaGroup, 0, len(i.Groups)) for _, d := range i.Groups { - item, err := d.schemaGroup() + var tg *TranslatedPropertySchemaGroup + if ts != nil { + tg = (*ts)[string(d.ID)] + } + + item, err := d.schemaGroup(tg) if err != nil { return nil, rerror.From(fmt.Sprintf("item (%s)", d.ID), err) } @@ -243,28 +271,41 @@ func (p *PropertyPointer) pointer() *property.SchemaFieldPointer { } } -func (i PropertySchemaGroup) schemaGroup() (*property.SchemaGroup, error) { - title := i.Title +func (i PropertySchemaGroup) schemaGroup(tg *TranslatedPropertySchemaGroup) (*property.SchemaGroup, error) { + var title i18n.String + if tg != nil { + title = tg.Title.Clone() + } + title = title.WithDefault(i.Title) + var representativeField *property.FieldID if i.RepresentativeField != nil { representativeField = property.FieldID(*i.RepresentativeField).Ref() } // fields - fields := make([]*property.SchemaField, 0, len(i.Fields)) - for _, d := range i.Fields { - field, err := d.schemaField() - if err != nil { - return nil, rerror.From(fmt.Sprintf("field (%s)", d.ID), err) + var fields []*property.SchemaField + if len(i.Fields) > 0 { + fields = make([]*property.SchemaField, 0, len(i.Fields)) + for _, d := range i.Fields { + var tf *TranslatedPropertySchemaField + if tg != nil { + tf = tg.Fields[string(d.ID)] + } + + field, err := d.schemaField(tf) + if err != nil { + return nil, rerror.From(fmt.Sprintf("field (%s)", d.ID), err) + } + fields = append(fields, field) } - fields = append(fields, field) } return property.NewSchemaGroup(). ID(property.SchemaGroupID(i.ID)). IsList(i.List). Fields(fields). - Title(i18n.StringFrom(title)). + Title(title). RepresentativeField(representativeField). IsAvailableIf(i.AvailableIf.condition()). Build() @@ -280,19 +321,21 @@ func (o *PropertyCondition) condition() *property.Condition { } } -func (i PropertySchemaField) schemaField() (*property.SchemaField, error) { +func (i PropertySchemaField) schemaField(tf *TranslatedPropertySchemaField) (*property.SchemaField, error) { t := property.ValueType(i.Type) if !t.Valid() { return nil, fmt.Errorf("invalid value type: %s", i.Type) } - var title, desc, prefix, suffix string - if i.Title != nil { - title = *i.Title - } - if i.Description != nil { - desc = *i.Description + var title, desc i18n.String + if tf != nil { + title = tf.Title.Clone() + desc = tf.Description.Clone() } + title = title.WithDefaultRef(i.Title) + desc = desc.WithDefaultRef(i.Description) + + var prefix, suffix string if i.Prefix != nil { prefix = *i.Prefix } @@ -307,14 +350,19 @@ func (i PropertySchemaField) schemaField() (*property.SchemaField, error) { if c.Key == "" { continue } - choices = append(choices, *c.choice()) + + var t i18n.String + if tf != nil { + t = tf.Choices[c.Key] + } + choices = append(choices, c.choice(t)) } } f, err := property.NewSchemaField(). ID(property.FieldID(i.ID)). - Name(i18n.StringFrom(title)). - Description(i18n.StringFrom(desc)). + Name(title). + Description(desc). Type(t). Prefix(prefix). Suffix(suffix). @@ -331,13 +379,10 @@ func (i PropertySchemaField) schemaField() (*property.SchemaField, error) { return f, err } -func (c *Choice) choice() *property.SchemaFieldChoice { - if c == nil { - return nil - } - return &property.SchemaFieldChoice{ +func (c Choice) choice(t i18n.String) property.SchemaFieldChoice { + return property.SchemaFieldChoice{ Key: c.Key, - Title: i18n.StringFrom(c.Label), + Title: t.WithDefault(c.Label), Icon: c.Icon, } } diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index 54b42d224..c82140953 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -20,17 +20,18 @@ func TestToValue(t *testing.T) { func TestChoice(t *testing.T) { tests := []struct { name string - ch *Choice - expected *property.SchemaFieldChoice + ch Choice + tc i18n.String + expected property.SchemaFieldChoice }{ { name: "success", - ch: &Choice{ + ch: Choice{ Icon: "aaa", Key: "nnn", Label: "vvv", }, - expected: &property.SchemaFieldChoice{ + expected: property.SchemaFieldChoice{ Key: "nnn", Title: i18n.StringFrom("vvv"), Icon: "aaa", @@ -42,10 +43,9 @@ func TestChoice(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() - assert.Equal(t, *tt.expected, *tt.ch.choice()) + assert.Equal(t, tt.expected, tt.ch.choice(tt.tc)) }) } - } func TestManifest(t *testing.T) { @@ -58,7 +58,9 @@ func TestManifest(t *testing.T) { tests := []struct { name string root *Root + scene *plugin.SceneID expected *Manifest + tl *TranslatedRoot err string }{ { @@ -72,27 +74,53 @@ func TestManifest(t *testing.T) { Description: nil, ID: "cesium", Name: "", - Schema: nil, Type: "visualizer", Visualizer: &cesium, }}, Repository: &r, System: true, Version: "1.1.1", + Schema: &PropertySchema{ + Groups: []PropertySchemaGroup{ + {ID: "default"}, + }, + }, + }, + tl: &TranslatedRoot{ + Name: i18n.String{"ja": "A"}, + Description: i18n.String{"ja": "B"}, + Extensions: map[string]*TranslatedExtension{"cesium": {Name: i18n.String{"ja": "ใ‚ปใ‚ธใ‚ฆใƒ "}}}, + Schema: TranslatedPropertySchema{"default": {Title: i18n.String{"ja": "ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"}}}, }, expected: &Manifest{ Plugin: plugin.New(). ID(plugin.OfficialPluginID). - Name(i18n.StringFrom("aaa")). + Name(i18n.String{"en": "aaa", "ja": "A"}). + Author(a). + RepositoryURL(r). + Description(i18n.String{"en": d, "ja": "B"}). + Schema(property.MustSchemaIDFromExtension(plugin.OfficialPluginID, "@").Ref()). Extensions([]*plugin.Extension{ plugin.NewExtension(). ID("cesium"). + Name(i18n.String{"ja": "ใ‚ปใ‚ธใ‚ฆใƒ "}). Visualizer("cesium"). Type("visualizer"). - System(true).MustBuild(), + Schema(property.MustSchemaIDFromExtension(plugin.OfficialPluginID, "cesium")). + System(true). + MustBuild(), }).MustBuild(), - ExtensionSchema: nil, - Schema: nil, + ExtensionSchema: property.SchemaList{ + property.NewSchema(). + ID(property.MustSchemaIDFromExtension(plugin.OfficialPluginID, "cesium")). + MustBuild(), + }, + Schema: property.NewSchema(). + ID(property.MustSchemaIDFromExtension(plugin.OfficialPluginID, "@")). + Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + property.NewSchemaGroup().ID("default").Title(i18n.String{"ja": "ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"}).MustBuild(), + })). + MustBuild(), }, }, { @@ -103,9 +131,7 @@ func TestManifest(t *testing.T) { System: true, }, expected: &Manifest{ - Plugin: plugin.New().ID(plugin.OfficialPluginID).Name(i18n.StringFrom("reearth")).MustBuild(), - ExtensionSchema: nil, - Schema: nil, + Plugin: plugin.New().ID(plugin.OfficialPluginID).Name(i18n.StringFrom("reearth")).MustBuild(), }, }, { @@ -166,13 +192,13 @@ func TestManifest(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() - m, err := tt.root.manifest(nil) + m, err := tt.root.manifest(tt.scene, tt.tl) if tt.err == "" { - assert.Equal(t, tt.expected.Plugin.ID(), m.Plugin.ID()) - assert.Equal(t, tt.expected.Plugin.Name(), m.Plugin.Name()) - assert.Equal(t, len(tt.expected.Plugin.Extensions()), len(m.Plugin.Extensions())) + assert.Equal(t, tt.expected, m) + assert.NoError(t, err) } else { - assert.Equal(t, tt.err, err.Error()) + assert.Nil(t, m) + assert.EqualError(t, err, tt.err) } }) } @@ -189,6 +215,7 @@ func TestExtension(t *testing.T) { name string ext Extension sys bool + tl *TranslatedExtension pid plugin.ID expectedPE *plugin.Extension expectedPS *property.Schema @@ -201,24 +228,38 @@ func TestExtension(t *testing.T) { ID: "cesium", Name: "Cesium", Icon: &i, - Schema: nil, - Type: "visualizer", - Visualizer: &cesium, + Schema: &PropertySchema{ + Groups: []PropertySchemaGroup{ + {ID: "default"}, + }, + }, + Type: "visualizer", + Visualizer: &cesium, }, sys: true, pid: plugin.OfficialPluginID, + tl: &TranslatedExtension{ + Name: i18n.String{"ja": "ใ‚ปใ‚ธใ‚ฆใƒ "}, + Description: i18n.String{"ja": "DDD"}, + PropertySchema: TranslatedPropertySchema{ + "default": {Title: i18n.String{"ja": "ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"}}, + }, + }, expectedPE: plugin.NewExtension(). ID("cesium"). - Name(i18n.StringFrom("Cesium")). + Name(i18n.String{"en": "Cesium", "ja": "ใ‚ปใ‚ธใ‚ฆใƒ "}). Visualizer("cesium"). Type(plugin.ExtensionTypeVisualizer). System(true). - Description(i18n.StringFrom("ddd")). + Description(i18n.String{"en": "ddd", "ja": "DDD"}). Schema(property.MustSchemaID("reearth/cesium")). Icon(i). MustBuild(), expectedPS: property.NewSchema(). ID(property.MustSchemaID("reearth/cesium")). + Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + property.NewSchemaGroup().ID("default").Title(i18n.String{"ja": "ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"}).MustBuild(), + })). MustBuild(), }, { @@ -399,12 +440,15 @@ func TestExtension(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() - pe, ps, err := tt.ext.extension(tt.pid, tt.sys) + pe, ps, err := tt.ext.extension(tt.pid, tt.sys, tt.tl) if tt.err == "" { assert.Equal(t, tt.expectedPE, pe) assert.Equal(t, tt.expectedPS, ps) + assert.Nil(t, err) } else { - assert.Equal(t, tt.err, err.Error()) + assert.EqualError(t, err, tt.err) + assert.Nil(t, pe) + assert.Nil(t, ps) } }) } @@ -540,12 +584,13 @@ func TestSchema(t *testing.T) { name, psid string ps *PropertySchema pid plugin.ID + tl *TranslatedPropertySchema expected *property.Schema err string }{ { name: "fail invalid id", - psid: "@", + psid: "~", ps: &PropertySchema{ Groups: nil, Linkable: nil, @@ -553,7 +598,7 @@ func TestSchema(t *testing.T) { }, pid: plugin.MustID("aaa~1.1.1"), expected: nil, - err: "invalid id: aaa~1.1.1/@", + err: "invalid id: aaa~1.1.1/~", }, { name: "success nil PropertySchema", @@ -563,7 +608,7 @@ func TestSchema(t *testing.T) { expected: property.NewSchema().ID(property.MustSchemaID("reearth/marker")).MustBuild(), }, { - name: "success ", + name: "success", psid: "marker", ps: &PropertySchema{ Groups: []PropertySchemaGroup{{ @@ -590,6 +635,9 @@ func TestSchema(t *testing.T) { Linkable: nil, Version: 0, }, + tl: &TranslatedPropertySchema{ + "default": {Title: i18n.String{"ja": "ใƒžใƒผใ‚ซใƒผ"}}, + }, pid: plugin.OfficialPluginID, expected: property. NewSchema(). @@ -597,6 +645,7 @@ func TestSchema(t *testing.T) { Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ property.NewSchemaGroup(). ID("default"). + Title(i18n.String{"en": "marker", "ja": "ใƒžใƒผใ‚ซใƒผ"}). Fields([]*property.SchemaField{ property.NewSchemaField(). ID("location"). @@ -613,17 +662,13 @@ func TestSchema(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() - res, err := tt.ps.schema(tt.pid, tt.psid) + res, err := tt.ps.schema(tt.pid, tt.psid, tt.tl) if tt.err == "" { - assert.Equal(t, tt.expected.Groups().Len(), res.Groups().Len()) - assert.Equal(t, tt.expected.LinkableFields(), res.LinkableFields()) - assert.Equal(t, tt.expected.Version(), res.Version()) - if res.Groups().Len() > 0 { - exg := tt.expected.Groups().Group(res.Groups().Groups()[0].ID()) - assert.NotNil(t, exg) - } + assert.Equal(t, tt.expected, res) + assert.Nil(t, err) } else { - assert.Equal(t, tt.err, err.Error()) + assert.Nil(t, res) + assert.EqualError(t, err, tt.err) } }) } @@ -636,6 +681,7 @@ func TestSchemaGroup(t *testing.T) { tests := []struct { name string psg PropertySchemaGroup + tl *TranslatedPropertySchemaGroup expected *property.SchemaGroup err string }{ @@ -662,13 +708,21 @@ func TestSchemaGroup(t *testing.T) { List: false, Title: "marker", }, + tl: &TranslatedPropertySchemaGroup{ + Title: i18n.String{"ja": "ใƒžใƒผใ‚ซใƒผ"}, + Description: i18n.String{"ja": "่ชฌๆ˜Ž"}, + Fields: map[string]*TranslatedPropertySchemaField{ + "location": {Title: i18n.String{"en": "x"}}, + }, + }, expected: property.NewSchemaGroup(). ID("default"). - Title(i18n.StringFrom(str)). + Title(i18n.String{"en": str, "ja": "ใƒžใƒผใ‚ซใƒผ"}). Fields([]*property.SchemaField{ property.NewSchemaField(). ID("location"). Type(property.ValueTypeLatLng). + Name(i18n.String{"en": "x"}). MustBuild(), }).MustBuild(), }, @@ -704,17 +758,13 @@ func TestSchemaGroup(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() - res, err := tt.psg.schemaGroup() + res, err := tt.psg.schemaGroup(tt.tl) if tt.err == "" { - assert.Equal(t, tt.expected.Title().String(), res.Title().String()) - assert.Equal(t, tt.expected.Title(), res.Title()) - assert.Equal(t, len(tt.expected.Fields()), len(res.Fields())) - if len(res.Fields()) > 0 { - exf := res.Fields()[0] - assert.NotNil(t, tt.expected.Field(exf.ID())) - } + assert.Equal(t, tt.expected, res) + assert.Nil(t, err) } else { - assert.Equal(t, tt.err, err.Error()) + assert.Nil(t, res) + assert.EqualError(t, err, tt.err) } }) } @@ -726,11 +776,12 @@ func TestSchemaField(t *testing.T) { tests := []struct { name string psg PropertySchemaField + tl *TranslatedPropertySchemaField expected *property.SchemaField err error }{ { - name: "success name not nil", + name: "success", psg: PropertySchemaField{ AvailableIf: nil, Choices: nil, @@ -745,8 +796,18 @@ func TestSchemaField(t *testing.T) { Type: "string", UI: nil, }, - expected: property.NewSchemaField().ID("aaa").Name(i18n.StringFrom("xx")).Description(i18n.StringFrom("")).Type(property.ValueTypeString).MustBuild(), - err: nil, + tl: &TranslatedPropertySchemaField{ + Title: i18n.String{"en": "TITLE", "ja": "ใ‚ฟใ‚คใƒˆใƒซ"}, + Description: i18n.String{"ja": "่ชฌๆ˜Ž"}, + Choices: map[string]i18n.String{"A": {"en": "a"}}, + }, + expected: property.NewSchemaField(). + ID("aaa"). + Name(i18n.String{"en": str, "ja": "ใ‚ฟใ‚คใƒˆใƒซ"}). + Description(i18n.String{"ja": "่ชฌๆ˜Ž"}). + Type(property.ValueTypeString). + MustBuild(), + err: nil, }, { name: "success description not nil", @@ -764,8 +825,13 @@ func TestSchemaField(t *testing.T) { Type: "string", UI: nil, }, - expected: property.NewSchemaField().ID("aaa").Name(i18n.StringFrom("")).Description(i18n.StringFrom("xx")).Type(property.ValueTypeString).MustBuild(), - err: nil, + expected: property.NewSchemaField(). + ID("aaa"). + Name(i18n.StringFrom("")). + Description(i18n.StringFrom("xx")). + Type(property.ValueTypeString). + MustBuild(), + err: nil, }, { name: "success prefix not nil", @@ -827,6 +893,9 @@ func TestSchemaField(t *testing.T) { Key: "nnn", Label: "vvv", }, + { + Key: "z", + }, }, DefaultValue: nil, Description: nil, @@ -839,14 +908,21 @@ func TestSchemaField(t *testing.T) { Type: "string", UI: nil, }, + tl: &TranslatedPropertySchemaField{ + Choices: map[string]i18n.String{"nnn": {"ja": "a"}, "z": {"en": "Z"}}, + }, expected: property.NewSchemaField(). ID("aaa"). Choices([]property.SchemaFieldChoice{ { Key: "nnn", - Title: i18n.StringFrom("vvv"), + Title: i18n.String{"en": "vvv", "ja": "a"}, Icon: "aaa", }, + { + Key: "z", + Title: i18n.String{"en": "Z"}, + }, }). Type(property.ValueTypeString). Name(i18n.StringFrom("")). @@ -891,14 +967,12 @@ func TestSchemaField(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() - res, err := tt.psg.schemaField() + res, err := tt.psg.schemaField(tt.tl) if tt.err == nil { - assert.Equal(t, tt.expected.Title(), res.Title()) - assert.Equal(t, tt.expected.Description(), res.Description()) - assert.Equal(t, tt.expected.Suffix(), res.Suffix()) - assert.Equal(t, tt.expected.Prefix(), res.Prefix()) - assert.Equal(t, tt.expected.Choices(), res.Choices()) + assert.Equal(t, tt.expected, res) + assert.Nil(t, err) } else { + assert.Nil(t, res) assert.Equal(t, tt.err, rerror.Get(err).Err) } }) diff --git a/pkg/plugin/manifest/diff_test.go b/pkg/plugin/manifest/diff_test.go index d35b35892..9a0bd0f7e 100644 --- a/pkg/plugin/manifest/diff_test.go +++ b/pkg/plugin/manifest/diff_test.go @@ -11,7 +11,7 @@ import ( func TestDiffFrom(t *testing.T) { oldp := plugin.MustID("aaaaaa~1.0.0") newp := plugin.MustID("aaaaaa~1.1.0") - oldps := property.MustSchemaID("aaaaaa~1.0.0/_") + oldps := property.MustSchemaID("aaaaaa~1.0.0/@") olde1ps := property.MustSchemaID("aaaaaa~1.0.0/a") olde2ps := property.MustSchemaID("aaaaaa~1.0.0/b") olde3ps := property.MustSchemaID("aaaaaa~1.0.0/c") diff --git a/pkg/plugin/manifest/parser.go b/pkg/plugin/manifest/parser.go index e6729cb8b..292d4b7ca 100644 --- a/pkg/plugin/manifest/parser.go +++ b/pkg/plugin/manifest/parser.go @@ -16,14 +16,14 @@ var ( ErrSystemManifest = errors.New("cannot build system manifest") ) -func Parse(source io.Reader, scene *plugin.SceneID) (*Manifest, error) { +func Parse(source io.Reader, scene *plugin.SceneID, tl *TranslatedRoot) (*Manifest, error) { root := Root{} if err := yaml.NewDecoder(source).Decode(&root); err != nil { return nil, ErrFailedToParseManifest // return nil, fmt.Errorf("failed to parse plugin manifest: %w", err) } - manifest, err := root.manifest(scene) + manifest, err := root.manifest(scene, tl) if err != nil { return nil, err } @@ -34,14 +34,14 @@ func Parse(source io.Reader, scene *plugin.SceneID) (*Manifest, error) { return manifest, nil } -func ParseSystemFromBytes(source []byte, scene *plugin.SceneID) (*Manifest, error) { +func ParseSystemFromBytes(source []byte, scene *plugin.SceneID, tl *TranslatedRoot) (*Manifest, error) { root := Root{} if err := yaml.Unmarshal(source, &root); err != nil { return nil, ErrFailedToParseManifest // return nil, fmt.Errorf("failed to parse plugin manifest: %w", err) } - manifest, err := root.manifest(scene) + manifest, err := root.manifest(scene, tl) if err != nil { return nil, err } @@ -49,8 +49,8 @@ func ParseSystemFromBytes(source []byte, scene *plugin.SceneID) (*Manifest, erro return manifest, nil } -func MustParseSystemFromBytes(source []byte, scene *plugin.SceneID) *Manifest { - m, err := ParseSystemFromBytes(source, scene) +func MustParseSystemFromBytes(source []byte, scene *plugin.SceneID, tl *TranslatedRoot) *Manifest { + m, err := ParseSystemFromBytes(source, scene, tl) if err != nil { panic(err) } diff --git a/pkg/plugin/manifest/parser_test.go b/pkg/plugin/manifest/parser_test.go index bb8df4fc3..30fbde104 100644 --- a/pkg/plugin/manifest/parser_test.go +++ b/pkg/plugin/manifest/parser_test.go @@ -92,7 +92,7 @@ func TestParse(t *testing.T) { tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() - m, err := Parse(strings.NewReader(tc.input), nil) + m, err := Parse(strings.NewReader(tc.input), nil, nil) if tc.err == nil { if !assert.NoError(t, err) { return @@ -136,7 +136,7 @@ func TestParseSystemFromBytes(t *testing.T) { tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() - m, err := ParseSystemFromBytes([]byte(tc.input), nil) + m, err := ParseSystemFromBytes([]byte(tc.input), nil, nil) if tc.err == nil { if !assert.NoError(t, err) { return @@ -182,12 +182,12 @@ func TestMustParseSystemFromBytes(t *testing.T) { if tc.fails { assert.Panics(t, func() { - _ = MustParseSystemFromBytes([]byte(tc.input), nil) + _ = MustParseSystemFromBytes([]byte(tc.input), nil, nil) }) return } - m := MustParseSystemFromBytes([]byte(tc.input), nil) + m := MustParseSystemFromBytes([]byte(tc.input), nil, nil) assert.Equal(t, m, tc.expected) }) } diff --git a/pkg/plugin/manifest/parser_translation.go b/pkg/plugin/manifest/parser_translation.go index 75a2eac62..0fec6c142 100644 --- a/pkg/plugin/manifest/parser_translation.go +++ b/pkg/plugin/manifest/parser_translation.go @@ -8,8 +8,6 @@ import ( "io" "github.com/goccy/go-yaml" - "github.com/reearth/reearth-backend/pkg/plugin" - "github.com/reearth/reearth-backend/pkg/property" ) var ( @@ -17,150 +15,27 @@ var ( ErrFailedToParseManifestTranslation error = errors.New("failed to parse plugin manifest translation") ) -func ParseTranslation(source io.Reader) (*TranslationRoot, error) { +func ParseTranslation(source io.Reader) (TranslationRoot, error) { root := TranslationRoot{} if err := yaml.NewDecoder(source).Decode(&root); err != nil { - return nil, ErrFailedToParseManifestTranslation - // return nil, fmt.Errorf("failed to parse plugin manifest translation: %w", err) + return root, ErrFailedToParseManifestTranslation } - return &root, nil + return root, nil } -func ParseTranslationFromBytes(source []byte) (*TranslationRoot, error) { +func ParseTranslationFromBytes(source []byte) (TranslationRoot, error) { tr := TranslationRoot{} if err := yaml.Unmarshal(source, &tr); err != nil { - return nil, ErrFailedToParseManifestTranslation - // return nil, fmt.Errorf("failed to parse plugin manifest translation: %w", err) + return tr, ErrFailedToParseManifestTranslation } - return &tr, nil + return tr, nil } -func MustParseTranslationFromBytes(source []byte) *TranslationRoot { +func MustParseTranslationFromBytes(source []byte) TranslationRoot { m, err := ParseTranslationFromBytes(source) if err != nil { panic(err) } return m } - -func MergeManifestTranslation(m *Manifest, tl map[string]*TranslationRoot) *Manifest { - for lang, t := range tl { - if t == nil { - continue - } - - if t.Name != nil { - name := m.Plugin.Name() - if name == nil { - name = map[string]string{} - } - name[lang] = *t.Name - m.Plugin.Rename(name) - } - - if t.Description != nil { - des := m.Plugin.Description() - if des == nil { - des = map[string]string{} - } - des[lang] = *t.Description - m.Plugin.SetDescription(des) - } - - for key, te := range t.Extensions { - ext := m.Plugin.Extension(plugin.ExtensionID(key)) - if ext == nil { - continue - } - - if te.Name != nil { - name := ext.Name() - if name == nil { - name = map[string]string{} - } - name[lang] = *te.Name - ext.Rename(name) - } - - if te.Description != nil { - des := ext.Description() - if des == nil { - des = map[string]string{} - } - des[lang] = *te.Description - ext.SetDescription(des) - } - - var ps *property.Schema - for _, s := range m.ExtensionSchema { - if s.ID() == ext.Schema() { - ps = s - break - } - } - if ps == nil { - continue - } - - for key, tsg := range te.PropertySchema { - psg := ps.Groups().Group(property.SchemaGroupID(key)) - if psg == nil { - continue - } - - if tsg.Title != nil { - t := psg.Title() - if t == nil { - t = map[string]string{} - } - t[lang] = *tsg.Title - psg.SetTitle(t) - } - - // PropertySchemaGroup does not have description for now - // if tsg.Description != nil { - // t := psg.Description() - // t[lang] = *tsg.Description - // psg.SetDescription(t) - // } - - for key, tsf := range tsg.Fields { - psf := psg.Field(property.FieldID(key)) - if psf == nil { - continue - } - - if tsf.Title != nil { - t := psf.Title() - if t == nil { - t = map[string]string{} - } - t[lang] = *tsf.Title - psf.SetTitle(t) - } - - if tsf.Description != nil { - t := psf.Description() - if t == nil { - t = map[string]string{} - } - t[lang] = *tsf.Description - psf.SetDescription(t) - } - - for key, label := range tsf.Choices { - psfc := psf.Choice(key) - if psfc == nil { - continue - } - - psfc.Title[lang] = label - } - } - } - } - } - - return m -} diff --git a/pkg/plugin/manifest/parser_translation_test.go b/pkg/plugin/manifest/parser_translation_test.go index 10da92274..84ff8066a 100644 --- a/pkg/plugin/manifest/parser_translation_test.go +++ b/pkg/plugin/manifest/parser_translation_test.go @@ -5,14 +5,12 @@ import ( "strings" "testing" - "github.com/reearth/reearth-backend/pkg/i18n" - "github.com/reearth/reearth-backend/pkg/plugin" "github.com/stretchr/testify/assert" ) //go:embed testdata/translation.yml var translatedManifest string -var expected = &TranslationRoot{ +var expected = TranslationRoot{ Description: sr("test plugin desc"), Extensions: map[string]TranslationExtension{ "test_ext": { @@ -37,14 +35,11 @@ var expected = &TranslationRoot{ Schema: nil, } -//go:embed testdata/translation_merge.yml -var mergeManifest string - func TestParseTranslation(t *testing.T) { tests := []struct { name string input string - expected *TranslationRoot + expected TranslationRoot err error }{ { @@ -56,7 +51,7 @@ func TestParseTranslation(t *testing.T) { { name: "fail not valid JSON", input: "", - expected: nil, + expected: TranslationRoot{}, err: ErrFailedToParseManifestTranslation, }, } @@ -80,7 +75,7 @@ func TestParseTranslationFromBytes(t *testing.T) { tests := []struct { name string input string - expected *TranslationRoot + expected TranslationRoot err error }{ { @@ -92,7 +87,7 @@ func TestParseTranslationFromBytes(t *testing.T) { { name: "fail not valid YAML", input: "--", - expected: nil, + expected: TranslationRoot{}, err: ErrFailedToParseManifestTranslation, }, } @@ -115,7 +110,7 @@ func TestMustParseTransSystemFromBytes(t *testing.T) { tests := []struct { name string input string - expected *TranslationRoot + expected TranslationRoot fails bool }{ { @@ -127,7 +122,7 @@ func TestMustParseTransSystemFromBytes(t *testing.T) { { name: "fail not valid YAML", input: "--", - expected: nil, + expected: TranslationRoot{}, fails: true, }, } @@ -150,55 +145,6 @@ func TestMustParseTransSystemFromBytes(t *testing.T) { } } -func TestMergeManifestTranslation(t *testing.T) { - tests := []struct { - Name string - Translations map[string]*TranslationRoot - Manifest *Manifest - Expected *struct { - PluginName, PluginDesc, ExtName, PsTitle, FieldTitle, FieldDesc i18n.String - } - }{ - { - Name: "nil translition list", - Translations: nil, - Manifest: nil, - Expected: nil, - }, - { - Name: "nil translition list", - Translations: map[string]*TranslationRoot{"xx": MustParseTranslationFromBytes([]byte(translatedManifest))}, - Manifest: MustParseSystemFromBytes([]byte(mergeManifest), nil), - Expected: &struct{ PluginName, PluginDesc, ExtName, PsTitle, FieldTitle, FieldDesc i18n.String }{ - PluginName: i18n.String{"en": "aaa", "xx": "test plugin name"}, - PluginDesc: i18n.String{"en": "ddd", "xx": "test plugin desc"}, - ExtName: i18n.String{"en": "ttt", "xx": "test ext name"}, - PsTitle: i18n.String{"en": "sss", "xx": "test ps title"}, - FieldTitle: i18n.String{"en": "nnn", "xx": "test field name"}, - FieldDesc: i18n.String{"en": "kkk", "xx": "test field desc"}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.Name, func(t *testing.T) { - t.Parallel() - res := MergeManifestTranslation(tc.Manifest, tc.Translations) - if tc.Expected == nil { - assert.Nil(t, res) - return - } - assert.Equal(t, tc.Expected.PluginName, res.Plugin.Name()) - assert.Equal(t, tc.Expected.PluginDesc, res.Plugin.Description()) - assert.Equal(t, tc.Expected.ExtName, res.Plugin.Extension(plugin.ExtensionID("test_ext")).Name()) - assert.Equal(t, tc.Expected.PsTitle, res.ExtensionSchema[0].Groups().Group("test_ps").Title()) - assert.Equal(t, tc.Expected.FieldTitle, res.ExtensionSchema[0].Groups().Group("test_ps").Field("test_field").Title()) - assert.Equal(t, tc.Expected.FieldDesc, res.ExtensionSchema[0].Groups().Group("test_ps").Field("test_field").Description()) - }) - } -} - func sr(s string) *string { return &s } diff --git a/pkg/plugin/manifest/schema_translation.go b/pkg/plugin/manifest/schema_translation.go index 7d512c19f..acd606e2b 100644 --- a/pkg/plugin/manifest/schema_translation.go +++ b/pkg/plugin/manifest/schema_translation.go @@ -1,5 +1,7 @@ package manifest +import "github.com/reearth/reearth-backend/pkg/i18n" + type TranslationExtension struct { Description *string `json:"description,omitempty"` Name *string `json:"name,omitempty"` @@ -26,3 +28,229 @@ type TranslationRoot struct { Name *string `json:"name,omitempty"` Schema TranslationPropertySchema `json:"schema,omitempty"` } + +type TranslationMap map[string]TranslationRoot + +type TranslatedExtension struct { + Description i18n.String + Name i18n.String + PropertySchema TranslatedPropertySchema +} + +type TranslatedPropertySchema map[string]*TranslatedPropertySchemaGroup + +type TranslatedPropertySchemaField struct { + Choices map[string]i18n.String + Description i18n.String + Title i18n.String +} + +type TranslatedPropertySchemaGroup struct { + Description i18n.String + Fields map[string]*TranslatedPropertySchemaField + Title i18n.String +} + +type TranslatedRoot struct { + Description i18n.String + Extensions map[string]*TranslatedExtension + Name i18n.String + Schema TranslatedPropertySchema +} + +func (tm TranslationMap) Translated() (res TranslatedRoot) { + if len(tm) == 0 { + return TranslatedRoot{} + } + + res.Name = tm.name() + res.Description = tm.description() + res.Schema.setPropertySchema(tm.propertySchemas("")) + + for l, t := range tm { + for eid, e := range t.Extensions { + te := res.getOrCreateExtension(eid) + + if e.Name != nil { + if te.Name == nil { + te.Name = i18n.String{} + } + te.Name[l] = *e.Name + } + + if e.Description != nil { + if te.Description == nil { + te.Description = i18n.String{} + } + te.Description[l] = *e.Description + } + + if len(e.PropertySchema) > 0 { + te.PropertySchema.setPropertySchema(tm.propertySchemas(eid)) + } + } + } + + return res +} + +func (tm TranslationMap) TranslatedRef() *TranslatedRoot { + if len(tm) == 0 { + return nil + } + + t := tm.Translated() + return &t +} +func (t TranslationRoot) propertySchema(eid string) (res TranslationPropertySchema) { + if eid == "" { + return t.Schema + } + for eid2, e := range t.Extensions { + if eid == eid2 { + return e.PropertySchema + } + } + return +} + +func (tm TranslationMap) name() i18n.String { + name := i18n.String{} + for l, t := range tm { + if t.Name == nil { + continue + } + name[l] = *t.Name + } + if len(name) == 0 { + return nil + } + return name +} + +func (tm TranslationMap) description() i18n.String { + desc := i18n.String{} + for l, t := range tm { + if t.Description == nil { + continue + } + desc[l] = *t.Description + } + if len(desc) == 0 { + return nil + } + return desc +} + +func (tm TranslationMap) propertySchemas(eid string) map[string]TranslationPropertySchema { + if len(tm) == 0 { + return nil + } + + res := make(map[string]TranslationPropertySchema) + for l, tl := range tm { + s := tl.propertySchema(eid) + res[l] = s + } + return res +} + +func (t *TranslatedRoot) getOrCreateExtension(eid string) *TranslatedExtension { + if eid == "" { + return nil + } + if t.Extensions == nil { + t.Extensions = map[string]*TranslatedExtension{} + } + if e, ok := t.Extensions[eid]; ok { + return e + } + g := &TranslatedExtension{} + t.Extensions[eid] = g + return g +} + +func (t *TranslatedPropertySchema) getOrCreateGroup(gid string) *TranslatedPropertySchemaGroup { + if gid == "" { + return nil + } + if t == nil || *t == nil { + *t = TranslatedPropertySchema{} + } + if g := (*t)[gid]; g != nil { + return g + } + g := &TranslatedPropertySchemaGroup{} + (*t)[gid] = g + return g +} + +func (t *TranslatedPropertySchemaGroup) getOrCreateField(fid string) *TranslatedPropertySchemaField { + if fid == "" { + return nil + } + if t.Fields == nil { + t.Fields = map[string]*TranslatedPropertySchemaField{} + } + if f := t.Fields[fid]; f != nil { + return f + } + f := &TranslatedPropertySchemaField{} + t.Fields[fid] = f + return f +} + +func (t *TranslatedPropertySchema) setPropertySchema(schemas map[string]TranslationPropertySchema) { + for l, tl := range schemas { + for gid, g := range tl { + if t == nil || *t == nil { + *t = TranslatedPropertySchema{} + } + + tg := t.getOrCreateGroup(gid) + + if g.Title != nil { + if tg.Title == nil { + tg.Title = i18n.String{} + } + tg.Title[l] = *g.Title + } + + if g.Description != nil { + if tg.Description == nil { + tg.Description = i18n.String{} + } + tg.Description[l] = *g.Description + } + + for fid, f := range g.Fields { + tf := tg.getOrCreateField(fid) + if f.Title != nil { + if tf.Title == nil { + tf.Title = i18n.String{} + } + tf.Title[l] = *f.Title + } + + if f.Description != nil { + if tf.Description == nil { + tf.Description = i18n.String{} + } + tf.Description[l] = *f.Description + } + + if len(f.Choices) > 0 { + if tf.Choices == nil { + tf.Choices = map[string]i18n.String{} + } + for cid, c := range f.Choices { + if tf.Choices[cid] == nil { + tf.Choices[cid] = i18n.String{} + } + tf.Choices[cid][l] = c + } + } + } + } + } +} diff --git a/pkg/plugin/manifest/schema_translation_test.go b/pkg/plugin/manifest/schema_translation_test.go new file mode 100644 index 000000000..ba158d2c3 --- /dev/null +++ b/pkg/plugin/manifest/schema_translation_test.go @@ -0,0 +1,188 @@ +package manifest + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/stretchr/testify/assert" +) + +func TestTranslationMap_Translated(t *testing.T) { + m := TranslationMap{ + "en": TranslationRoot{ + Name: sr("Name"), + Description: sr("desc"), + Extensions: map[string]TranslationExtension{ + "a": { + Name: sr("ext"), + PropertySchema: TranslationPropertySchema{ + "default": { + Fields: map[string]TranslationPropertySchemaField{ + "foo": {Title: sr("foo"), Choices: map[string]string{"A": "AAA", "B": "BBB"}}, + "hoge": {Title: sr("hoge")}, + }, + }, + }, + }, + }, + Schema: TranslationPropertySchema{ + "another": { + Fields: map[string]TranslationPropertySchemaField{ + "foo": {Choices: map[string]string{"A": "AAA"}}, + }, + }, + }, + }, + "ja": TranslationRoot{ + Name: sr("ๅๅ‰"), + Extensions: map[string]TranslationExtension{ + "a": { + Name: sr("extJA"), + Description: sr("DESC!"), + PropertySchema: TranslationPropertySchema{ + "default": { + Fields: map[string]TranslationPropertySchemaField{ + "foo": { + Title: sr("foo!"), + Description: sr("DESC"), + Choices: map[string]string{"B": "BBB!", "C": "CCC!"}, + }, + "bar": {Title: sr("bar!")}, + }, + }, + }, + }, + "b": { + Name: sr("ext2"), + PropertySchema: TranslationPropertySchema{}, + }, + }, + Schema: TranslationPropertySchema{ + "default": { + Fields: map[string]TranslationPropertySchemaField{ + "a": {Title: sr("ใ‚")}, + }, + }, + }, + }, + "zh-CN": TranslationRoot{ + Name: sr("ๅ‘ฝๅ"), + Schema: TranslationPropertySchema{ + "another": { + Description: sr("ๆ่ฟฐ"), + }, + }, + }, + } + + expected := TranslatedRoot{ + Name: i18n.String{"en": "Name", "ja": "ๅๅ‰", "zh-CN": "ๅ‘ฝๅ"}, + Description: i18n.String{"en": "desc"}, + Extensions: map[string]*TranslatedExtension{ + "a": { + Name: i18n.String{"en": "ext", "ja": "extJA"}, + Description: i18n.String{"ja": "DESC!"}, + PropertySchema: TranslatedPropertySchema{ + "default": &TranslatedPropertySchemaGroup{ + Fields: map[string]*TranslatedPropertySchemaField{ + "foo": { + Title: i18n.String{"en": "foo", "ja": "foo!"}, + Description: i18n.String{"ja": "DESC"}, + Choices: map[string]i18n.String{ + "A": {"en": "AAA"}, + "B": {"en": "BBB", "ja": "BBB!"}, + "C": {"ja": "CCC!"}, + }, + }, + "hoge": { + Title: i18n.String{"en": "hoge"}, + }, + "bar": { + Title: i18n.String{"ja": "bar!"}, + }, + }, + }, + }, + }, + "b": { + Name: i18n.String{"ja": "ext2"}, + }, + }, + Schema: TranslatedPropertySchema{ + "default": { + Title: nil, + Description: nil, + Fields: map[string]*TranslatedPropertySchemaField{ + "a": {Title: i18n.String{"ja": "ใ‚"}}, + }, + }, + "another": { + Title: nil, + Description: i18n.String{"zh-CN": "ๆ่ฟฐ"}, + Fields: map[string]*TranslatedPropertySchemaField{ + "foo": {Choices: map[string]i18n.String{"A": {"en": "AAA"}}}, + }, + }, + }, + } + + assert.Equal(t, expected, m.Translated()) + assert.Equal(t, TranslatedRoot{}, TranslationMap{}.Translated()) + assert.Equal(t, TranslatedRoot{}, TranslationMap(nil).Translated()) +} + +func TestTranslatedPropertySchema_getOrCreateGroup(t *testing.T) { + target := TranslatedPropertySchema{} + expected := TranslatedPropertySchema{ + "a": {Title: i18n.String{"ja": "A"}}, + } + + group := target.getOrCreateGroup("a") + assert.Equal(t, &TranslatedPropertySchemaGroup{}, group) + + group.Title = i18n.String{"ja": "A"} + assert.Equal(t, expected, target) +} + +func TestTranslatedPropertySchema_getOrCreateField(t *testing.T) { + target := TranslatedPropertySchemaGroup{} + expected := TranslatedPropertySchemaGroup{ + Fields: map[string]*TranslatedPropertySchemaField{ + "a": {Title: i18n.String{"ja": "A"}}, + }, + } + + field := target.getOrCreateField("a") + assert.Equal(t, &TranslatedPropertySchemaField{}, field) + + field.Title = i18n.String{"ja": "A"} + assert.Equal(t, expected, target) +} + +func TestTranslatedPropertySchema_setPropertySchema(t *testing.T) { + target := TranslatedPropertySchema{ + "a": nil, + "b": {}, + } + expected := TranslatedPropertySchema{ + "a": { + Title: i18n.String{"ja": "A"}, + Fields: map[string]*TranslatedPropertySchemaField{ + "f": {Title: i18n.String{"en": "F"}}, + }}, + "b": {Title: i18n.String{"en": "B"}}, + } + + target.setPropertySchema(map[string]TranslationPropertySchema{ + "en": { + "a": { + Fields: map[string]TranslationPropertySchemaField{ + "f": {Title: sr("F")}, + }, + }, + "b": {Title: sr("B")}, + }, + "ja": {"a": {Title: sr("A")}}, + }) + assert.Equal(t, expected, target) +} diff --git a/pkg/plugin/manifest/testdata/translation_merge.yml b/pkg/plugin/manifest/testdata/translation_merge.yml deleted file mode 100644 index 6f23f4a08..000000000 --- a/pkg/plugin/manifest/testdata/translation_merge.yml +++ /dev/null @@ -1,34 +0,0 @@ -{ - "id": "xxx", - "name": "aaa", - "version": "1.1.1", - "description": "ddd", - "extensions": - [ - { - "id": "test_ext", - "name": "ttt", - "visualizer": "cesium", - "type": "primitive", - "schema": - { - "groups": - [ - { - "id": "test_ps", - "title": "sss", - "fields": - [ - { - "id": "test_field", - "title": "nnn", - "type": "string", - "description": "kkk", - }, - ], - }, - ], - }, - }, - ], -} diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go index 0a12d09da..37b2d5beb 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/plugin.go @@ -34,7 +34,7 @@ func (p *Plugin) Name() i18n.String { if p == nil { return nil } - return p.name.Copy() + return p.name.Clone() } func (p *Plugin) Author() string { @@ -48,7 +48,7 @@ func (p *Plugin) Description() i18n.String { if p == nil { return nil } - return p.description.Copy() + return p.description.Clone() } func (p *Plugin) RepositoryURL() string { @@ -127,26 +127,12 @@ func (p *Plugin) Clone() *Plugin { return &Plugin{ id: p.id.Clone(), - name: p.name.Copy(), + name: p.name.Clone(), author: p.author, - description: p.description.Copy(), + description: p.description.Clone(), repositoryURL: p.repositoryURL, extensions: extensions, extensionOrder: extensionOrder, schema: p.schema.CopyRef(), } } - -func (p *Plugin) Rename(name i18n.String) { - if p == nil { - return - } - p.name = name.Copy() -} - -func (p *Plugin) SetDescription(des i18n.String) { - if p == nil { - return - } - p.description = des.Copy() -} diff --git a/pkg/plugin/plugin_test.go b/pkg/plugin/plugin_test.go index 1917a58bb..9b01cda42 100644 --- a/pkg/plugin/plugin_test.go +++ b/pkg/plugin/plugin_test.go @@ -79,18 +79,6 @@ func TestPlugin_PropertySchemas(t *testing.T) { } } -func TestPlugin_Rename(t *testing.T) { - p := New().Name(i18n.StringFrom("x")).MustBuild() - p.Rename(i18n.StringFrom("z")) - assert.Equal(t, i18n.StringFrom("z"), p.Name()) -} - -func TestPlugin_SetDescription(t *testing.T) { - p := New().MustBuild() - p.SetDescription(i18n.StringFrom("xxx")) - assert.Equal(t, i18n.StringFrom("xxx"), p.Description()) -} - func TestPlugin_Author(t *testing.T) { p := New().Author("xx").MustBuild() assert.Equal(t, "xx", p.Author()) diff --git a/pkg/plugin/pluginpack/package.go b/pkg/plugin/pluginpack/package.go index c7e9344c8..cc7544541 100644 --- a/pkg/plugin/pluginpack/package.go +++ b/pkg/plugin/pluginpack/package.go @@ -6,6 +6,7 @@ import ( "io" "path" "path/filepath" + "regexp" "github.com/reearth/reearth-backend/pkg/file" "github.com/reearth/reearth-backend/pkg/plugin" @@ -15,6 +16,8 @@ import ( const manfiestFilePath = "reearth.yml" +var translationFileNameRegexp = regexp.MustCompile(`reearth_([a-zA-Z]+(?:-[a-zA-Z]+)?).yml`) + type Package struct { Manifest *manifest.Manifest Files file.Iterator @@ -32,6 +35,7 @@ func PackageFromZip(r io.Reader, scene *plugin.SceneID, sizeLimit int64) (*Packa } basePath := file.ZipBasePath(zr) + f, err := zr.Open(path.Join(basePath, manfiestFilePath)) if err != nil { return nil, rerror.From("manifest open error", err) @@ -40,7 +44,12 @@ func PackageFromZip(r io.Reader, scene *plugin.SceneID, sizeLimit int64) (*Packa _ = f.Close() }() - m, err := manifest.Parse(f, scene) + translations, err := readTranslation(zr, basePath) + if err != nil { + return nil, err + } + + m, err := manifest.Parse(f, scene, translations.TranslatedRef()) if err != nil { return nil, rerror.From("invalid manifest", err) } @@ -56,3 +65,31 @@ func iterator(a file.Iterator, prefix string) file.Iterator { return p == manfiestFilePath || filepath.Ext(p) != ".js" }) } + +func readTranslation(fs *zip.Reader, base string) (manifest.TranslationMap, error) { + translationMap := manifest.TranslationMap{} + for _, f := range fs.File { + if filepath.Dir(f.Name) != base { + continue + } + + lang := translationFileNameRegexp.FindStringSubmatch(filepath.Base(f.Name)) + if len(lang) == 0 { + continue + } + langfile, err := f.Open() + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + defer func() { + _ = langfile.Close() + }() + t, err := manifest.ParseTranslation(langfile) + if err != nil { + return nil, err + } + translationMap[lang[1]] = t + } + + return translationMap, nil +} diff --git a/pkg/plugin/pluginpack/package_test.go b/pkg/plugin/pluginpack/package_test.go index ecd2a8073..461107bf6 100644 --- a/pkg/plugin/pluginpack/package_test.go +++ b/pkg/plugin/pluginpack/package_test.go @@ -12,22 +12,22 @@ import ( ) func TestPackageFromZip(t *testing.T) { + expected := &manifest.Manifest{ + Plugin: plugin.New(). + ID(plugin.MustID("testplugin~1.0.1")). + Name(i18n.String{"en": "testplugin", "ja": "ใƒ†ใ‚นใƒˆใƒ—ใƒฉใ‚ฐใ‚คใƒณ", "zh-CN": "ๆต‹่ฏ•ๆ’ไปถ"}). + MustBuild(), + } + f, err := os.Open("testdata/test.zip") assert.NoError(t, err) defer func() { _ = f.Close() }() - expected := plugin.New(). - ID(plugin.MustID("testplugin~1.0.1")). - Name(i18n.StringFrom("testplugin")). - MustBuild() - - p, err := PackageFromZip(f, nil, 1000) + p, err := PackageFromZip(f, nil, 10000) assert.NoError(t, err) - assert.Equal(t, &manifest.Manifest{ - Plugin: expected, - }, p.Manifest) + assert.Equal(t, expected, p.Manifest) var files []string for { diff --git a/pkg/plugin/pluginpack/testdata/test.zip b/pkg/plugin/pluginpack/testdata/test.zip index b13b0aba34f9750cd98e656a318482707130cddb..0d371acbe428c4d8d123d08cbb4d70400e987e89 100644 GIT binary patch literal 1804 zcmWIWW@h1H0D;&!o*`fclwf6$VJJy0F3}GS;bdUGG=n+m3J{l8a5FHnd}U-{U=aZ- z3;=2e(Hsoxy={YUmg^Z;f}|uE82Dh?^<0AEgG=&@Qo-hAZ4OSnG!?^~w=>TBH5*7A z*l!flVOV5Pcsy>;+XE%DDhw1Y`0x1`RLw|hc97u=GM>?`)OP)X`6tdA+3O!O1GYCW z*M0X}_3V0G-KX43>KPc)`2VUkFfvF@2`ib=)%H@{>!{DY!_undaVmQXf9C$$+rjF* z&ssEyL!oiYuYY@XYxo3Ru1WXujg5@GToQZP{7d$7eK)^X%MKqYW0#GzD!#n0{b#Y_ zqV;=T$F>|?k)0#kZBcsILc+(Yz0M%7s8sUUK8c%262Co{7bO}6TwjrXb6=)e#pREn7@qV}`d1PWoe!{1NLM)Ct z+7_%?uRquFG%1J)SlSnCY1Q*G-m`AORWEtwvlqp7X`J0Kw<+wR)Gdc%kksQPM?ZWx zSdh}DE|Rvu^47256D%7|u6Rin{k5N2`%j(Yk^JE++b(ZQ{AMq>^V+AvSG8pg@1+^n z1aDj$)Af34Ro}J?u9>$u1dii@>7P)Wo8ajQFfX zy~^Aia4h}+#vllzrG{z+rM$%4R3$3~rN_-}kDGQrZti&8JpFO=%EwI`9ycv{+`L&y zp_U7jXdre*6;2EWS_KDOFgpf~A@pjnPUEC4eP8V>M4z~QphVF9On0yhN(XnE@DX`ayZ zJ%1+nQec3Vx31Q?Gv}E-C03$r^j zFD11?FRK{tkGq)uNY2kI&d*8J%gImIP|8Tn$;nqJ&o9bJQPQ+V3i51VHeZ@6ogfV~ z9)!hUR=}dUDnr-V5AMmAz&sDaXu*#Z)6ceczgR!@S^uObySG7Nmyt=18CRJCEE&K+ zfZ+fraA743E2M-$vn9YAWE8IQ1!fcj!;(gOm{CZj4bV(bX@k{FTuB~cCa?$tn#sTf zbsj8aU>N{tFqXs*aV)NM4Rfpj!&}D^n8DCg4m1yI3P*$^C|#hYab$f->yCBV=NVgd00kS+UJ literal 789 zcmWIWW@Zs#-~d8N&Hq6RP#^=OIT#ceGV@YWEA+C8Lqm8O*n?tN5}Eb{B{BkWX$3a} zBg>P7;GM|?N zssx!F0Jrl*olP)M01O;~Bsb8`qSVyHqLK`~%G?~VwUR*VU>MEX)nNgrd;&KG1!#Hd z>S>@=2&T_+p6Gl_Ey4QzWXBYmI=Gfh>eL z1!O<6_d$GCpi@dxi%az3eh2v Date: Mon, 14 Feb 2022 12:42:25 +0300 Subject: [PATCH 148/253] feat: improve the Infobox style (manifest) (#110) * improved infobox style * fix mask id * fix order * update ja title * set min/max for height and outlineWidth * Update pkg/builtin/manifest_ja.yml Co-authored-by: HideBa <49897538+HideBa@users.noreply.github.com> * Update pkg/builtin/manifest_ja.yml Co-authored-by: HideBa <49897538+HideBa@users.noreply.github.com> * Update pkg/builtin/manifest_ja.yml Co-authored-by: HideBa <49897538+HideBa@users.noreply.github.com> * Update pkg/builtin/manifest_ja.yml Co-authored-by: HideBa <49897538+HideBa@users.noreply.github.com> * Update pkg/builtin/manifest_ja.yml Co-authored-by: HideBa <49897538+HideBa@users.noreply.github.com> Co-authored-by: HideBa Co-authored-by: lavalse Co-authored-by: HideBa <49897538+HideBa@users.noreply.github.com> --- pkg/builtin/manifest.yml | 65 ++++++++++++++++++++++++++++++++----- pkg/builtin/manifest_ja.yml | 48 +++++++++++++++++++-------- 2 files changed, 91 insertions(+), 22 deletions(-) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 3c4768d4a..e4bc0784c 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -298,6 +298,50 @@ extensions: - id: title type: string title: Title + - id: showTitle + type: bool + title: Show Title + - id: position + type: string + title: Position + defaultValue: right + choices: + - key: right + label: Right + - key: middle + label: Middle + - key: left + label: Left + - id: size + type: string + title: Width + defaultValue: small + choices: + - key: small + label: Small + - key: medium + label: Medium + - key: large + label: Large + - id: heightType + type: string + title: Height Type + defaultValue: auto + choices: + - key: auto + label: Auto + - key: manual + label: Manual + - id: height + type: number + title: Height + min: 284 + max: 2048 + suffix: px + availableIf: + field: heightType + type: string + value: manual - id: infoboxPaddingTop type: number title: Top padding @@ -326,19 +370,22 @@ extensions: max: 40 suffix: px description: "The space between the right side of the infobox and the title and blocks. Min: 0 Max: 40" - - id: size - type: string - title: Size Type - defaultValue: small - choices: - - key: small - label: Small - - key: large - label: Large - id: bgcolor type: string title: Background Color ui: color + - id: outlineWidth + type: number + title: Outline width + suffix: px + max: 20 + - id: outlineColor + type: string + title: Outline Color + ui: color + - id: useMask + type: bool + title: Use Mask - id: typography type: typography title: Font diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 547565e7f..e0b05e420 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -152,29 +152,51 @@ extensions: description: ้–ฒ่ฆง่€…ใŒๅœฐๅ›ณไธŠใฎใƒฌใ‚คใƒคใƒผใ‚’ใ‚ฏใƒชใƒƒใ‚ฏใ—ใŸๆ™‚ใซ่กจ็คบใ•ใ‚Œใ‚‹ใƒœใƒƒใ‚ฏใ‚นใงใ™ใ€‚ใƒ†ใ‚ญใ‚นใƒˆใ‚„็”ปๅƒใ€ๅ‹•็”ปใชใฉใฎใ‚ณใƒณใƒ†ใƒณใƒ„ใ‚’่กจ็คบใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ propertySchema: default: - title: ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚น - paddingTop: - title: ไฝ™็™ฝไธŠ - description: "ไธŠ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" - paddingBottom: - title: ไฝ™็™ฝไธ‹ - description: "ไธ‹็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" - paddingLeft: - title: ไฝ™็™ฝๅทฆ - description: "ๅทฆ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" - paddingRight: - title: ไฝ™็™ฝๅณ - description: "ๅณ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40ox" + title: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ fields: title: title: ใ‚ฟใ‚คใƒˆใƒซ + showTitle: + title: ใ‚ฟใ‚คใƒˆใƒซ่กจ็คบ + position: + title: ไฝ็ฝฎ + choices: + right: ๅณ + middle: ไธญๅคฎ + left: ๅทฆ size: title: ใ‚ตใ‚คใ‚บ choices: small: ๅฐ + medium: ไธญ large: ๅคง + heightType: + title: ้ซ˜ใ•่จญๅฎš + choices: + auto: ่‡ชๅ‹• + manual: ๆ‰‹ๅ‹• + height: + title: ้ซ˜ใ• + infoboxPaddingTop: + title: ไฝ™็™ฝไธŠ + description: "ไธŠ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40px" + infoboxPaddingBottom: + title: ไฝ™็™ฝไธ‹ + description: "ไธ‹็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40px" + infoboxPaddingLeft: + title: ไฝ™็™ฝๅทฆ + description: "ๅทฆ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40px" + infoboxPaddingRight: + title: ไฝ™็™ฝๅณ + description: "ๅณ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐpxใƒปๆœ€ๅคง40px" bgcolor: title: ่ƒŒๆ™ฏ่‰ฒ + outlineWidth: + title: ็ทšๅน… + outlineColor: + title: ็ทš่‰ฒ + useMask: + title: ่ƒŒๆ™ฏใ‚ชใƒผใƒใƒผใƒฌใ‚ค typography: title: ใƒ•ใ‚ฉใƒณใƒˆ marker: From 1738812428e6fa2896237fa58f7916ec060c5d56 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 15 Feb 2022 11:07:50 +0900 Subject: [PATCH 149/253] chore: update all dependencies (#111) Co-authored-by: Renovate Bot --- docker-compose.yml | 2 +- go.mod | 38 +++++++-------- go.sum | 115 +++++++++++++++++++++++---------------------- 3 files changed, 78 insertions(+), 77 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index d51821c97..74e85d775 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -15,7 +15,7 @@ services: depends_on: - reearth-mongo reearth-mongo: - image: mongo:4.2.0-bionic + image: mongo:4.4.6-bionic ports: - 27017:27017 volumes: diff --git a/go.mod b/go.mod index 3c139bb7e..641d28698 100644 --- a/go.mod +++ b/go.mod @@ -1,10 +1,10 @@ module github.com/reearth/reearth-backend require ( - cloud.google.com/go/profiler v0.1.2 - cloud.google.com/go/storage v1.19.0 + cloud.google.com/go/profiler v0.2.0 + cloud.google.com/go/storage v1.20.0 github.com/99designs/gqlgen v0.16.0 - github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.0.0 + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0 github.com/auth0/go-jwt-middleware v1.0.1 github.com/blang/semver v3.5.1+incompatible github.com/form3tech-oss/jwt-go v3.2.5+incompatible @@ -25,7 +25,7 @@ require ( github.com/pkg/errors v0.9.1 github.com/ravilushqa/otelgqlgen v0.4.1 github.com/sirupsen/logrus v1.8.1 - github.com/spf13/afero v1.8.0 + github.com/spf13/afero v1.8.1 github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2 github.com/stretchr/testify v1.7.0 github.com/twpayne/go-kml v1.5.2 @@ -33,21 +33,21 @@ require ( github.com/uber/jaeger-lib v2.4.1+incompatible github.com/vektah/dataloaden v0.3.0 github.com/vektah/gqlparser/v2 v2.3.1 - go.mongodb.org/mongo-driver v1.8.2 - go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.28.0 - go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.28.0 - go.opentelemetry.io/otel v1.3.0 - go.opentelemetry.io/otel/sdk v1.3.0 + go.mongodb.org/mongo-driver v1.8.3 + go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.29.0 + go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.29.0 + go.opentelemetry.io/otel v1.4.0 + go.opentelemetry.io/otel/sdk v1.4.0 golang.org/x/text v0.3.7 golang.org/x/tools v0.1.9 - google.golang.org/api v0.66.0 + google.golang.org/api v0.68.0 gopkg.in/go-playground/colors.v1 v1.2.0 gopkg.in/h2non/gock.v1 v1.1.2 ) require ( cloud.google.com/go v0.100.2 // indirect - cloud.google.com/go/compute v0.1.0 // indirect + cloud.google.com/go/compute v1.2.0 // indirect cloud.google.com/go/iam v0.1.1 // indirect cloud.google.com/go/trace v1.0.0 // indirect github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect @@ -59,15 +59,15 @@ require ( github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b // indirect github.com/fatih/color v1.12.0 // indirect github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 // indirect - github.com/go-logr/logr v1.2.1 // indirect - github.com/go-logr/stdr v1.2.0 // indirect + github.com/go-logr/logr v1.2.2 // indirect + github.com/go-logr/stdr v1.2.2 // indirect github.com/go-stack/stack v1.8.0 // indirect github.com/golang-jwt/jwt v3.2.2+incompatible // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.2 // indirect github.com/golang/snappy v0.0.3 // indirect github.com/google/go-cmp v0.5.7 // indirect - github.com/google/pprof v0.0.0-20211214055906-6f57359322fd // indirect + github.com/google/pprof v0.0.0-20220113144219-d25a53d42d00 // indirect github.com/googleapis/gax-go/v2 v2.1.1 // indirect github.com/gorilla/websocket v1.4.2 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect @@ -91,19 +91,19 @@ require ( github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect go.opencensus.io v0.23.0 // indirect go.opentelemetry.io/contrib v1.3.0 // indirect - go.opentelemetry.io/otel/trace v1.3.0 // indirect + go.opentelemetry.io/otel/trace v1.4.0 // indirect go.uber.org/atomic v1.7.0 // indirect golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa // indirect golang.org/x/mod v0.5.1 // indirect - golang.org/x/net v0.0.0-20211101193420-4a448f8816b3 // indirect + golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd // indirect golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect - golang.org/x/sys v0.0.0-20220114195835-da31bd327af9 // indirect + golang.org/x/sys v0.0.0-20220207234003-57398862261d // indirect golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20220118154757-00ab72f36ad5 // indirect - google.golang.org/grpc v1.40.1 // indirect + google.golang.org/genproto v0.0.0-20220207185906-7721543eae58 // indirect + google.golang.org/grpc v1.44.0 // indirect google.golang.org/protobuf v1.27.1 // indirect gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect diff --git a/go.sum b/go.sum index 9c8c4570e..589c8fa4c 100644 --- a/go.sum +++ b/go.sum @@ -23,7 +23,6 @@ cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= -cloud.google.com/go v0.88.0/go.mod h1:dnKwfYbP9hQhefiUvpbcAyoGSHUrOxR20JVElLiUvEY= cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= @@ -38,14 +37,15 @@ cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvf cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/compute v0.1.0 h1:rSUBvAyVwNJ5uQCKNJFMwPtTvJkfN38b6Pvb9zZoqJ8= cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= +cloud.google.com/go/compute v1.2.0 h1:EKki8sSdvDU0OO9mAXGwPXOTOgPz2l08R0/IutDH11I= +cloud.google.com/go/compute v1.2.0/go.mod h1:xlogom/6gr8RJGBe7nT2eGsQYAFUbbv8dbC29qE3Xmw= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/iam v0.1.1 h1:4CapQyNFjiksks1/x7jsvsygFPhihslYk5GptIrlX68= cloud.google.com/go/iam v0.1.1/go.mod h1:CKqrcnI/suGpybEHxZ7BMehL0oA4LpdyJdUlTl9jVMw= -cloud.google.com/go/profiler v0.1.2 h1:QKCWcmQA9kVXsDGlCpRH0tudZg7xg/jfgw7m0Kc4nfo= -cloud.google.com/go/profiler v0.1.2/go.mod h1:Ei3jL4tlaM/zPyJKR7E1Txd5oWhA5zGfmXgEHFtCB5g= +cloud.google.com/go/profiler v0.2.0 h1:TZEKR39niWTuvpak6VNg+D8J5qTzJnyaD1Yl4BOU+d8= +cloud.google.com/go/profiler v0.2.0/go.mod h1:Rn0g4ZAbYR1sLVP7GAmCZxid4dmtD/nURxcaxf6pngI= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -57,8 +57,8 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= cloud.google.com/go/storage v1.18.2/go.mod h1:AiIj7BWXyhO5gGVmYJ+S8tbkCx3yb0IMjua8Aw4naVM= -cloud.google.com/go/storage v1.19.0 h1:XOQSnPJD8hRtZJ3VdCyK0mBZsGGImrzPAMbSWcHSe6Q= -cloud.google.com/go/storage v1.19.0/go.mod h1:6rgiTRjOqI/Zd9YKimub5TIB4d+p3LH33V3ZE1DMuUM= +cloud.google.com/go/storage v1.20.0 h1:kv3rQ3clEQdxqokkCCgQo+bxPqcuXiROjxvnKb8Oqdk= +cloud.google.com/go/storage v1.20.0/go.mod h1:TiC1o6FxNCG8y5gB7rqCsFZCIYPMPZCO81ppOoEPLGI= cloud.google.com/go/trace v1.0.0 h1:laKx2y7IWMjguCe5zZx6n7qLtREk4kyE69SXVC0VSN8= cloud.google.com/go/trace v1.0.0/go.mod h1:4iErSByzxkyHWzzlAj63/Gmjz0NH1ASqhJguHpGcr6A= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= @@ -67,8 +67,8 @@ github.com/99designs/gqlgen v0.16.0 h1:7Qc4Ll3mfN3doAyUWOgtGLcBGu+KDgK48HdkBGLZV github.com/99designs/gqlgen v0.16.0/go.mod h1:nbeSjFkqphIqpZsYe1ULVz0yfH8hjpJdJIQoX/e0G2I= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.0.0 h1:38fNtfhHY6bs22b/D6+hDzO6JR0rDzpGPD36dY2uPL4= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.0.0/go.mod h1:jE23wM1jvwSKgdGcoOkj5j9n1VWtncW36pL2bK1JU+0= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0 h1:JLLDOHEcoREA54hzOnjr8KQcZCvX0E8KhosjE0F1jaQ= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0/go.mod h1:Pe8G2QFgCaohbU/zHRBjn0YaFh9z8/HtuEDh/Lyo04E= github.com/HdrHistogram/hdrhistogram-go v1.0.1 h1:GX8GAYDuhlFQnI2fRDHQhTlkHMz8bEn0jTI6LJU0mpw= github.com/HdrHistogram/hdrhistogram-go v1.0.1/go.mod h1:BWJ+nMSHY3L41Zj7CA3uXnloDp7xxV0YvstAE7nKTaM= github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= @@ -96,6 +96,7 @@ github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdn github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -103,7 +104,11 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= @@ -121,6 +126,7 @@ github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5y github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= @@ -137,10 +143,12 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9 github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.1 h1:DX7uPQ4WgAWfoh+NGGlbJQswnYIVvz0SRlLS3rPZQDA= github.com/go-logr/logr v1.2.1/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/stdr v1.2.0 h1:j4LrlVXgrbIWO83mmQUnK0Hi+YnbD+vzrE1z/EphbFE= +github.com/go-logr/logr v1.2.2 h1:ahHml/yUpnlb96Rp8HCvtYVPY8ZYpxq3g7UYchIYwbs= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/stdr v1.2.0/go.mod h1:YkVgnZu1ZjjL7xTxrfm/LLZBfkhTqSR1ydtm6jTKKwI= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= @@ -256,10 +264,9 @@ github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210715191844-86eeefc3e471/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20211214055906-6f57359322fd h1:1FjCyPC+syAzJ5/2S8fqdZK1R22vvA0J7JZKcuOIQ7Y= -github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= +github.com/google/pprof v0.0.0-20220113144219-d25a53d42d00 h1:hQb7P4XOakoaN+LET7TJ7PNoBsGm8Tf4lNtAdNwkxDE= +github.com/google/pprof v0.0.0-20220113144219-d25a53d42d00/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -334,10 +341,8 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/labstack/echo/v4 v4.6.1/go.mod h1:RnjgMWNDB9g/HucVWhQYNQP9PvbYf6adqftqryo7s9k= github.com/labstack/echo/v4 v4.6.3 h1:VhPuIZYxsbPmo4m9KAkMU/el2442eB7EBFFhNTTT9ac= github.com/labstack/echo/v4 v4.6.3/go.mod h1:Hk5OiHj0kDqmFq7aHe7eDqI7CUhuCrfpupQtLGGLm7A= -github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/labstack/gommon v0.3.1 h1:OomWaJXm7xR6L1HmEtGyQf26TEn7V6X88mktX9kee9o= github.com/labstack/gommon v0.3.1/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM= github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= @@ -348,13 +353,11 @@ github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsI github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= github.com/matryer/moq v0.0.0-20200106131100-75d0ddfc0007/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= github.com/matryer/moq v0.2.3/go.mod h1:9RtPYjTnH1bSBIkpvtHkFN7nbWAnO7oRpdJkEIn6UtE= -github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.11 h1:nQ+aFkoE2TMGc0b68U2OKSexC+eq46+XwZzWXHRmPYs= github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= @@ -413,8 +416,8 @@ github.com/smartystreets/assertions v1.1.1/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYl github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.8.0 h1:5MmtuhAgYeU6qpa7w7bP0dv6MBYuup0vekhSpSkoq60= -github.com/spf13/afero v1.8.0/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= +github.com/spf13/afero v1.8.1 h1:izYHOT71f9iZ7iq37Uqjael60/vYC6vMtzedudZ0zEk= +github.com/spf13/afero v1.8.1/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2 h1:Fod/tm/5c19889+T6j7mXxg/tEJrcLuDJxR/98raj80= @@ -449,7 +452,6 @@ github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc= github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4= github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= @@ -476,9 +478,8 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.mongodb.org/mongo-driver v1.4.2/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= -go.mongodb.org/mongo-driver v1.8.0/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= -go.mongodb.org/mongo-driver v1.8.2 h1:8ssUXufb90ujcIvR6MyE1SchaNj0SFxsakiZgxIyrMk= -go.mongodb.org/mongo-driver v1.8.2/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= +go.mongodb.org/mongo-driver v1.8.3 h1:TDKlTkGDKm9kkJVUOAXDK5/fkqKHJVwYQSpoRfB43R4= +go.mongodb.org/mongo-driver v1.8.3/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -489,33 +490,31 @@ go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/contrib v1.3.0 h1:p9Gd+3dD7yB+AIph2Ltg11QDX6Y+yWMH0YQVTpTTP2c= go.opentelemetry.io/contrib v1.3.0/go.mod h1:FlyPNX9s4U6MCsWEc5YAK4KzKNHFDsjrDUZijJiXvy8= -go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.28.0 h1:w5fHM6jfxOm0zeKS9fTFZSyktW4Xzcw0REGXEwXQGko= -go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.28.0/go.mod h1:mG9tj72wNEUZGwJ/9IqfJ1nByl1aW0McYkY5Hjm8SM0= -go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.28.0 h1:gQqm6bGgJrF1b+qvUPM28NqOQUNot8lYxcbrG4hcyyQ= -go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.28.0/go.mod h1:aM2EjzJt4BHMoDrzAO40IJSGMayznRWts38juP4m0HQ= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.24.0 h1:qW6j1kJU24yo2xIu16Py4m4AXn1dd+s2uKllGnTFAm0= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.24.0/go.mod h1:7W3JSDYTtH3qKKHrS1fMiwLtK7iZFLPq1+7htfspX/E= -go.opentelemetry.io/contrib/propagators/b3 v1.2.0 h1:+zQjl3DBSOle9GEhHuhqzDUKtYcVSfbHSNv24hsoOJ0= -go.opentelemetry.io/contrib/propagators/b3 v1.2.0/go.mod h1:kO8hNKCfa1YmQJ0lM7pzfJGvbXEipn/S7afbOfaw2Kc= -go.opentelemetry.io/otel v1.0.0-RC3/go.mod h1:Ka5j3ua8tZs4Rkq4Ex3hwgBgOchyPVq5S6P2lz//nKQ= -go.opentelemetry.io/otel v1.0.0/go.mod h1:AjRVh9A5/5DE7S+mZtTR6t8vpKKryam+0lREnfmS4cg= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.29.0 h1:BLXo2v0bW3iq8NhgSf/1X6Cu7UcfkNk3yyuxNcZB1wk= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.29.0/go.mod h1:+0RWgKCuTYtJaZo9Io/D2PAvkMZsRkmYaNgHhwzrCDM= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.29.0 h1:PG5cMt7dHmNmuhQczPRF4nOfAUkZe0tezDZEtckz28k= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.29.0/go.mod h1:V35q3VIMKbgD3FkIiAISJJpSUQxpn2zKQ0pQc7bx9Eg= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.26.0 h1:sdwza9BScvbOFaZLhvKDQc54vQ8CWM8jD9BO2t+rP4E= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.26.0/go.mod h1:4vatbW3QwS11DK0H0SB7FR31/VbthXcYorswdkVXdyg= +go.opentelemetry.io/contrib/propagators/b3 v1.4.0 h1:wDb2ct7xMzossYpx44w81skxkEyeT2IRnBgYKqyEork= +go.opentelemetry.io/contrib/propagators/b3 v1.4.0/go.mod h1:K399DN23drp0RQGXCbSPOt9075HopQigMgUL99oR8hc= go.opentelemetry.io/otel v1.0.1/go.mod h1:OPEOD4jIT2SlZPMmwT6FqZz2C0ZNdQqiWcoK6M0SNFU= -go.opentelemetry.io/otel v1.2.0/go.mod h1:aT17Fk0Z1Nor9e0uisf98LrntPGMnk4frBO9+dkf69I= -go.opentelemetry.io/otel v1.3.0 h1:APxLf0eiBwLl+SOXiJJCVYzA1OOJNyAoV8C5RNRyy7Y= +go.opentelemetry.io/otel v1.1.0/go.mod h1:7cww0OW51jQ8IaZChIEdqLwgh+44+7uiTdWsAL0wQpA= go.opentelemetry.io/otel v1.3.0/go.mod h1:PWIKzi6JCp7sM0k9yZ43VX+T345uNbAkDKwHVjb2PTs= -go.opentelemetry.io/otel/internal/metric v0.23.0 h1:mPfzm9Iqhw7G2nDBmUAjFTfPqLZPbOW2k7QI57ITbaI= -go.opentelemetry.io/otel/internal/metric v0.23.0/go.mod h1:z+RPiDJe30YnCrOhFGivwBS+DU1JU/PiLKkk4re2DNY= -go.opentelemetry.io/otel/metric v0.23.0 h1:mYCcDxi60P4T27/0jchIDFa1WHEfQeU3zH9UEMpnj2c= -go.opentelemetry.io/otel/metric v0.23.0/go.mod h1:G/Nn9InyNnIv7J6YVkQfpc0JCfKBNJaERBGw08nqmVQ= -go.opentelemetry.io/otel/sdk v1.0.1/go.mod h1:HrdXne+BiwsOHYYkBE5ysIcv2bvdZstxzmCQhxTcZkI= -go.opentelemetry.io/otel/sdk v1.3.0 h1:3278edCoH89MEJ0Ky8WQXVmDQv3FX4ZJ3Pp+9fJreAI= +go.opentelemetry.io/otel v1.4.0 h1:7ESuKPq6zpjRaY5nvVDGiuwK7VAJ8MwkKnmNJ9whNZ4= +go.opentelemetry.io/otel v1.4.0/go.mod h1:jeAqMFKy2uLIxCtKxoFj0FAL5zAPKQagc3+GtBWakzk= +go.opentelemetry.io/otel/internal/metric v0.24.0 h1:O5lFy6kAl0LMWBjzy3k//M8VjEaTDWL9DPJuqZmWIAA= +go.opentelemetry.io/otel/internal/metric v0.24.0/go.mod h1:PSkQG+KuApZjBpC6ea6082ZrWUUy/w132tJ/LOU3TXk= +go.opentelemetry.io/otel/metric v0.24.0 h1:Rg4UYHS6JKR1Sw1TxnI13z7q/0p/XAbgIqUTagvLJuU= +go.opentelemetry.io/otel/metric v0.24.0/go.mod h1:tpMFnCD9t+BEGiWY2bWF5+AwjuAdM0lSowQ4SBA3/K4= go.opentelemetry.io/otel/sdk v1.3.0/go.mod h1:rIo4suHNhQwBIPg9axF8V9CA72Wz2mKF1teNrup8yzs= -go.opentelemetry.io/otel/trace v1.0.0-RC3/go.mod h1:VUt2TUYd8S2/ZRX09ZDFZQwn2RqfMB5MzO17jBojGxo= -go.opentelemetry.io/otel/trace v1.0.0/go.mod h1:PXTWqayeFUlJV1YDNhsJYB184+IvAH814St6o6ajzIs= +go.opentelemetry.io/otel/sdk v1.4.0 h1:LJE4SW3jd4lQTESnlpQZcBhQ3oci0U2MLR5uhicfTHQ= +go.opentelemetry.io/otel/sdk v1.4.0/go.mod h1:71GJPNJh4Qju6zJuYl1CrYtXbrgfau/M9UAggqiy1UE= go.opentelemetry.io/otel/trace v1.0.1/go.mod h1:5g4i4fKLaX2BQpSBsxw8YYcgKpMMSW3x7ZTuYBr3sUk= -go.opentelemetry.io/otel/trace v1.2.0/go.mod h1:N5FLswTubnxKxOJHM7XZC074qpeEdLy3CgAVsdMucK0= -go.opentelemetry.io/otel/trace v1.3.0 h1:doy8Hzb1RJ+I3yFhtDmwNc7tIyw1tNMOIsyPzp1NOGY= +go.opentelemetry.io/otel/trace v1.1.0/go.mod h1:i47XtdcBQiktu5IsrPqOHe8w+sBmnLwwHt8wiUsWGTI= go.opentelemetry.io/otel/trace v1.3.0/go.mod h1:c/VDhno8888bvQYmbYLqe41/Ldmr/KKunbvWM4/fEjk= +go.opentelemetry.io/otel/trace v1.4.0 h1:4OOUrPZdVFQkbzl/JSdvGCWIdw5ONXXxzHlaLlWppmo= +go.opentelemetry.io/otel/trace v1.4.0/go.mod h1:uc3eRsqDfWs9R7b92xbQbU42/eTNz4N+gLP8qJCi4aE= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= @@ -608,11 +607,10 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210716203947-853a461950ff/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210913180222-943fd674d43e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211101193420-4a448f8816b3 h1:VrJZAjbekhoRn7n5FBujY31gboH+iB3pdLxn3gE9FjU= -golang.org/x/net v0.0.0-20211101193420-4a448f8816b3/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd h1:O7DYs+zxREGLKzKoMQrtrEacpb0ZVXA5rIwylE2Xchk= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -660,7 +658,6 @@ golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -702,7 +699,6 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210910150752-751e447fb3d0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -711,10 +707,13 @@ golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220114195835-da31bd327af9 h1:XfKQ4OlFl8okEOr5UvAqFRVj8pY/4yfcXrddB8qAbU0= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220207234003-57398862261d h1:Bm7BNOQt2Qv7ZqysjeLjgCBanX+88Z/OtdvsrEv1Djc= +golang.org/x/sys v0.0.0-20220207234003-57398862261d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -835,8 +834,9 @@ google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3h google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= google.golang.org/api v0.64.0/go.mod h1:931CdxA8Rm4t6zqTFGSsgwbAEZ2+GMYurbndwSimebM= google.golang.org/api v0.65.0/go.mod h1:ArYhxgGadlWmqO1IqVujw6Cs8IdD33bTmzKo2Sh+cbg= -google.golang.org/api v0.66.0 h1:CbGy4LEiXCVCiNEDFgGpWOVwsDT7E2Qej1ZvN1P7KPg= google.golang.org/api v0.66.0/go.mod h1:I1dmXYpX7HGwz/ejRxwQp2qj5bFAz93HiCU1C1oYd9M= +google.golang.org/api v0.68.0 h1:9eJiHhwJKIYX6sX2fUZxQLi7pDRA/MYu8c12q6WbJik= +google.golang.org/api v0.68.0/go.mod h1:sOM8pTpwgflXRhz+oC8H2Dr+UcbMqkPPWNJo88Q7TH8= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -895,8 +895,6 @@ google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxH google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210721163202-f1cecdd8b78a/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210722135532-667f2b7c528f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= @@ -917,8 +915,10 @@ google.golang.org/genproto v0.0.0-20211223182754-3ac035c7e7cb/go.mod h1:5CzLGKJ6 google.golang.org/genproto v0.0.0-20220107163113-42d7afdf6368/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220111164026-67b88f271998/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220114231437-d2e6a121cae0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220118154757-00ab72f36ad5 h1:zzNejm+EgrbLfDZ6lu9Uud2IVvHySPl8vQzf04laR5Q= -google.golang.org/genproto v0.0.0-20220118154757-00ab72f36ad5/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220201184016-50beb8ab5c44/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220204002441-d6cc3cc0770e/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220207185906-7721543eae58 h1:i67FGOy2/zGfhE3YgHdrOrcFbOBhqdcRoBrsDqSQrOI= +google.golang.org/genproto v0.0.0-20220207185906-7721543eae58/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -944,8 +944,9 @@ google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQ google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.40.1 h1:pnP7OclFFFgFi4VHQDQDaoXUVauOFyktqTsqqgzFKbc= google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.44.0 h1:weqSxi/TMs1SqFRMHCtBgXRs8k3X39QIDEZ0pRcttUg= +google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= From 1ad0cec9df639cd8b00ec90198629e60cbd00dd8 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 15 Feb 2022 11:30:54 +0900 Subject: [PATCH 150/253] ci: update renovate config --- .github/renovate.json | 40 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 36 insertions(+), 4 deletions(-) diff --git a/.github/renovate.json b/.github/renovate.json index b27f006a3..0ecc3d9cf 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -4,8 +4,7 @@ ":semanticCommits", ":semanticCommitScopeDisabled", ":maintainLockFilesWeekly", - ":enableVulnerabilityAlertsWithLabel(security)", - ":semanticCommitTypeAll(chore)" + ":enableVulnerabilityAlertsWithLabel(security)" ], "postUpdateOptions": [ "gomodTidy", @@ -13,11 +12,44 @@ ], "packageRules": [ { + "enabledManagers": [ + "gomod" + ], + "packagePatterns": [ + "*" + ], + "groupName": "dependencies", + "groupSlug": "gomod", + "semanticCommitType": "chore", + "extends": [ + "schedule:earlyMondays" + ] + }, + { + "enabledManagers": [ + "dockerfile", + "docker-compose" + ], + "packagePatterns": [ + "*" + ], + "groupName": "docker dependencies", + "groupSlug": "docker", + "semanticCommitType": "chore", + "extends": [ + "schedule:earlyMondays" + ] + }, + { + "enabledManagers": [ + "github-actions" + ], "packagePatterns": [ "*" ], - "groupName": "all dependencies", - "groupSlug": "all", + "groupName": "github actions dependencies", + "groupSlug": "github-actions", + "semanticCommitType": "ci", "extends": [ "schedule:earlyMondays" ] From 2dc192764b4212bae498ef518a0fef4ec0952acf Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 15 Feb 2022 15:18:57 +0900 Subject: [PATCH 151/253] feat: overwrite installation of new plug-ins without removing (automatic property migration) (#113) --- .../adapter/gql/gqlmodel/convert_scene.go | 2 +- internal/app/repo.go | 4 +- internal/infrastructure/memory/layer.go | 83 ++++- internal/infrastructure/memory/plugin.go | 7 +- internal/infrastructure/memory/property.go | 77 +++- .../infrastructure/memory/property_schema.go | 11 +- internal/infrastructure/mongo/container.go | 3 +- internal/infrastructure/mongo/layer.go | 89 ++++- ...220214180713_split_schema_of_properties.go | 50 +++ .../infrastructure/mongo/migration/client.go | 8 +- .../mongo/migration/migrations.go | 9 +- .../infrastructure/mongo/mongodoc/client.go | 41 ++- .../mongo/mongodoc/clientcol.go | 8 + .../infrastructure/mongo/mongodoc/property.go | 23 +- .../infrastructure/mongo/mongodoc/scene.go | 5 +- internal/infrastructure/mongo/property.go | 36 +- .../infrastructure/mongo/property_schema.go | 4 +- internal/usecase/interactor/plugin.go | 2 + internal/usecase/interactor/plugin_delete.go | 58 --- internal/usecase/interactor/plugin_upload.go | 322 ++++++++++++---- .../usecase/interactor/plugin_upload_test.go | 345 ++++++++++++++++++ internal/usecase/interactor/scene.go | 12 +- internal/usecase/interfaces/plugin.go | 7 +- internal/usecase/repo/layer.go | 4 + internal/usecase/repo/property.go | 3 + pkg/id/plugin.go | 9 + pkg/id/plugin_test.go | 16 + pkg/id/property_schema.go | 55 ++- pkg/id/property_schema_test.go | 71 ++-- pkg/layer/decoding/reearth.go | 19 +- pkg/layer/id.go | 3 +- pkg/layer/infobox.go | 17 +- pkg/layer/infobox_field.go | 2 +- pkg/layer/infobox_test.go | 30 +- pkg/layer/initializer.go | 12 +- pkg/layer/layer.go | 5 +- pkg/layer/layerops/processor.go | 2 +- pkg/layer/loader.go | 3 + pkg/plugin/id.go | 3 +- pkg/plugin/manifest/convert_test.go | 8 +- pkg/plugin/manifest/diff.go | 4 +- pkg/plugin/manifest/diff_test.go | 6 +- pkg/property/diff.go | 33 ++ pkg/property/diff_test.go | 19 + pkg/property/id.go | 3 +- pkg/property/property.go | 4 + pkg/scene/builder.go | 10 +- pkg/scene/builder/builder_test.go | 2 +- pkg/scene/builder/scene.go | 2 +- pkg/scene/builder_test.go | 133 +++---- pkg/scene/plugin.go | 7 + pkg/scene/plugin_test.go | 3 + pkg/scene/plugins.go | 38 +- pkg/scene/plugins_test.go | 148 +++++++- pkg/scene/scene.go | 26 +- pkg/scene/scene_test.go | 10 +- pkg/scene/sceneops/plugin_migrator.go | 4 +- pkg/scene/widgets.go | 39 +- pkg/scene/widgets_test.go | 78 ++-- 59 files changed, 1502 insertions(+), 535 deletions(-) create mode 100644 internal/infrastructure/mongo/migration/220214180713_split_schema_of_properties.go delete mode 100644 internal/usecase/interactor/plugin_delete.go create mode 100644 internal/usecase/interactor/plugin_upload_test.go diff --git a/internal/adapter/gql/gqlmodel/convert_scene.go b/internal/adapter/gql/gqlmodel/convert_scene.go index 5b7953ff0..c99bf2e74 100644 --- a/internal/adapter/gql/gqlmodel/convert_scene.go +++ b/internal/adapter/gql/gqlmodel/convert_scene.go @@ -71,7 +71,7 @@ func ToScene(scene *scene.Scene) *Scene { UpdatedAt: scene.UpdatedAt(), Clusters: clusters, Widgets: widgets, - WidgetAlignSystem: ToWidgetAlignSystem(scene.WidgetAlignSystem()), + WidgetAlignSystem: ToWidgetAlignSystem(scene.Widgets().Alignment()), Plugins: plugins, } } diff --git a/internal/app/repo.go b/internal/app/repo.go index 27d12a694..53e5e311b 100644 --- a/internal/app/repo.go +++ b/internal/app/repo.go @@ -34,10 +34,10 @@ func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo. SetMonitor(otelmongo.NewMonitor()), ) if err != nil { - log.Fatalln(fmt.Sprintf("repo initialization error: %+v", err)) + log.Fatalf("repo initialization error: %+v\n", err) } if err := mongorepo.InitRepos(ctx, repos, client, "reearth"); err != nil { - log.Fatalln(fmt.Sprintf("Failed to init mongo: %+v", err)) + log.Fatalf("Failed to init mongo: %+v\n", err) } // File diff --git a/internal/infrastructure/memory/layer.go b/internal/infrastructure/memory/layer.go index d59bc308d..06d87f128 100644 --- a/internal/infrastructure/memory/layer.go +++ b/internal/infrastructure/memory/layer.go @@ -137,6 +137,67 @@ func (r *Layer) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, s id return result, nil } +func (r *Layer) FindParentsByIDs(_ context.Context, ids []id.LayerID, scenes []id.SceneID) (layer.GroupList, error) { + r.lock.Lock() + defer r.lock.Unlock() + + res := layer.GroupList{} + for _, l := range r.data { + if !isSceneIncludes(l.Scene(), scenes) { + continue + } + gl, ok := l.(*layer.Group) + if !ok { + continue + } + for _, cl := range gl.Layers().Layers() { + if cl.Contains(ids) { + res = append(res, gl) + } + } + } + + return res, nil +} + +func (r *Layer) FindByPluginAndExtension(_ context.Context, pid id.PluginID, eid *id.PluginExtensionID, scenes []id.SceneID) (layer.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + res := layer.List{} + for _, l := range r.data { + l := l + if !isSceneIncludes(l.Scene(), scenes) { + continue + } + + if p := l.Plugin(); p != nil && p.Equal(pid) { + e := l.Extension() + if eid == nil || e != nil && *e == *eid { + res = append(res, &l) + } + } + } + + return res, nil +} + +func (r *Layer) FindByPluginAndExtensionOfBlocks(_ context.Context, pid id.PluginID, eid *id.PluginExtensionID, scenes []id.SceneID) (layer.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + res := layer.List{} + for _, l := range r.data { + l := l + if !isSceneIncludes(l.Scene(), scenes) || len(l.Infobox().FieldsByPlugin(pid, eid)) == 0 { + continue + } + res = append(res, &l) + } + + return res, nil +} + func (r *Layer) FindByProperty(ctx context.Context, id id.PropertyID, f []id.SceneID) (layer.Layer, error) { r.lock.Lock() defer r.lock.Unlock() @@ -187,6 +248,7 @@ func (r *Layer) FindByScene(ctx context.Context, sceneID id.SceneID) (layer.List res := layer.List{} for _, l := range r.data { + l := l if l.Scene() == sceneID { res = append(res, &l) } @@ -226,6 +288,20 @@ func (r *Layer) SaveAll(ctx context.Context, ll layer.List) error { return nil } +func (r *Layer) UpdatePlugin(ctx context.Context, old id.PluginID, new id.PluginID, scenes []id.SceneID) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, l := range r.data { + p := l.Plugin() + if p != nil && p.Equal(old) && isSceneIncludes(l.Scene(), scenes) { + l.SetPlugin(&new) + r.data[l.ID()] = l + } + } + return nil +} + func (r *Layer) Remove(ctx context.Context, id id.LayerID) error { r.lock.Lock() defer r.lock.Unlock() @@ -260,9 +336,10 @@ func (r *Layer) FindByTag(ctx context.Context, tagID id.TagID, s []id.SceneID) ( r.lock.Lock() defer r.lock.Unlock() var res layer.List - for _, layer := range r.data { - if layer.Tags().Has(tagID) { - res = append(res, &layer) + for _, l := range r.data { + l := l + if l.Tags().Has(tagID) { + res = append(res, &l) } } diff --git a/internal/infrastructure/memory/plugin.go b/internal/infrastructure/memory/plugin.go index e567667c1..79f0a4477 100644 --- a/internal/infrastructure/memory/plugin.go +++ b/internal/infrastructure/memory/plugin.go @@ -66,9 +66,10 @@ func (r *Plugin) Save(ctx context.Context, p *plugin.Plugin) error { if p.ID().System() { return errors.New("cannnot save system plugin") } - for _, p := range r.data { - if p.ID().Equal(p.ID()) { - return nil + for i, q := range r.data { + if q.ID().Equal(p.ID()) { + r.data = append(r.data[:i], r.data[i+1:]...) + break } } r.data = append(r.data, p.Clone()) diff --git a/internal/infrastructure/memory/property.go b/internal/infrastructure/memory/property.go index 38f25fcf3..75ac2b893 100644 --- a/internal/infrastructure/memory/property.go +++ b/internal/infrastructure/memory/property.go @@ -2,6 +2,7 @@ package memory import ( "context" + "sort" "sync" "github.com/reearth/reearth-backend/pkg/id" @@ -13,12 +14,12 @@ import ( type Property struct { lock sync.Mutex - data map[id.PropertyID]property.Property + data property.Map } func NewProperty() repo.Property { return &Property{ - data: map[id.PropertyID]property.Property{}, + data: property.Map{}, } } @@ -28,7 +29,7 @@ func (r *Property) FindByID(ctx context.Context, id id.PropertyID, f []id.SceneI p, ok := r.data[id] if ok && isSceneIncludes(p.Scene(), f) { - return &p, nil + return p, nil } return nil, rerror.ErrNotFound } @@ -40,8 +41,9 @@ func (r *Property) FindByIDs(ctx context.Context, ids []id.PropertyID, f []id.Sc result := property.List{} for _, id := range ids { if d, ok := r.data[id]; ok { + d := d if isSceneIncludes(d.Scene(), f) { - result = append(result, &d) + result = append(result, d) continue } } @@ -56,8 +58,9 @@ func (r *Property) FindByDataset(ctx context.Context, sid id.DatasetSchemaID, di result := property.List{} for _, p := range r.data { + p := p if p.IsDatasetLinked(sid, did) { - result = append(result, &p) + result = append(result, p) } } return result, nil @@ -69,22 +72,66 @@ func (r *Property) FindLinkedAll(ctx context.Context, s id.SceneID) (property.Li result := property.List{} for _, p := range r.data { + p := p if p.Scene() != s { continue } if p.HasLinkedField() { - p2 := p - result = append(result, &p2) + result = append(result, p) } } return result, nil } +func (r *Property) FindBySchema(_ context.Context, schemas []id.PropertySchemaID, s id.SceneID) (property.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := property.List{} + for _, p := range r.data { + p := p + if p.Scene() != s { + continue + } + for _, s := range schemas { + if p.Schema().Equal(s) { + result = append(result, p) + break + } + } + } + sort.Slice(result, func(i, j int) bool { + return result[i].ID().ID().Compare(result[j].ID().ID()) < 0 + }) + return result, nil +} + +func (r *Property) FindByPlugin(_ context.Context, plugin id.PluginID, s id.SceneID) (property.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + result := property.List{} + for _, p := range r.data { + p := p + if p.Scene() != s { + continue + } + if p.Schema().Plugin().Equal(plugin) { + result = append(result, p) + break + } + } + sort.Slice(result, func(i, j int) bool { + return result[i].ID().ID().Compare(result[j].ID().ID()) < 0 + }) + return result, nil +} + func (r *Property) Save(ctx context.Context, p *property.Property) error { r.lock.Lock() defer r.lock.Unlock() - r.data[p.ID()] = *p + r.data[p.ID()] = p return nil } @@ -93,7 +140,19 @@ func (r *Property) SaveAll(ctx context.Context, pl property.List) error { defer r.lock.Unlock() for _, p := range pl { - r.data[p.ID()] = *p + r.data[p.ID()] = p + } + return nil +} + +func (r *Property) UpdateSchemaPlugin(ctx context.Context, old id.PluginID, new id.PluginID, s id.SceneID) error { + r.lock.Lock() + defer r.lock.Unlock() + + for _, p := range r.data { + if s := p.Schema(); s.Plugin().Equal(old) { + p.SetSchema(id.NewPropertySchemaID(new, s.ID())) + } } return nil } diff --git a/internal/infrastructure/memory/property_schema.go b/internal/infrastructure/memory/property_schema.go index 9d2aa51de..a65d869b9 100644 --- a/internal/infrastructure/memory/property_schema.go +++ b/internal/infrastructure/memory/property_schema.go @@ -22,7 +22,7 @@ func NewPropertySchema() repo.PropertySchema { } func (r *PropertySchema) initMap() { - if r.data != nil { + if r.data == nil { r.data = map[string]*property.Schema{} } } @@ -68,7 +68,7 @@ func (r *PropertySchema) Save(ctx context.Context, p *property.Schema) error { defer r.lock.Unlock() r.initMap() - if p.ID().System() { + if p.ID().Plugin().System() { return errors.New("cannnot save system property schema") } r.data[p.ID().String()] = p @@ -80,10 +80,11 @@ func (r *PropertySchema) SaveAll(ctx context.Context, p property.SchemaList) err defer r.lock.Unlock() r.initMap() - for _, ps := range p { - if err := r.Save(ctx, ps); err != nil { - return err + for _, p := range p { + if p.ID().Plugin().System() { + return errors.New("cannnot save system property schema") } + r.data[p.ID().String()] = p } return nil } diff --git a/internal/infrastructure/mongo/container.go b/internal/infrastructure/mongo/container.go index ec82c46cf..95b2251ce 100644 --- a/internal/infrastructure/mongo/container.go +++ b/internal/infrastructure/mongo/container.go @@ -38,7 +38,8 @@ func InitRepos(ctx context.Context, c *repo.Container, mc *mongo.Client, databas c.Lock = lock // migration - if err := (migration.Client{Client: client, Config: c.Config}).Migrate(ctx); err != nil { + m := migration.Client{Client: client, Config: c.Config} + if err := m.Migrate(ctx); err != nil { return err } diff --git a/internal/infrastructure/mongo/layer.go b/internal/infrastructure/mongo/layer.go index 8475e80fa..fe744d5a8 100644 --- a/internal/infrastructure/mongo/layer.go +++ b/internal/infrastructure/mongo/layer.go @@ -108,6 +108,36 @@ func (r *layerRepo) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, return r.findGroups(ctx, nil, filter) } +func (r *layerRepo) FindParentsByIDs(ctx context.Context, ids []id.LayerID, scenes []id.SceneID) (layer.GroupList, error) { + f := bson.M{ + "group.layers": bson.M{"$in": id.LayerIDsToStrings(ids)}, + } + filter := r.sceneFilter(f, scenes) + return r.findGroups(ctx, nil, filter) +} + +func (r *layerRepo) FindByPluginAndExtension(ctx context.Context, pid id.PluginID, eid *id.PluginExtensionID, scenes []id.SceneID) (layer.List, error) { + f := bson.M{ + "plugin": pid.String(), + } + if eid != nil { + f["extension"] = eid.String() + } + filter := r.sceneFilter(f, scenes) + return r.find(ctx, nil, filter) +} + +func (r *layerRepo) FindByPluginAndExtensionOfBlocks(ctx context.Context, pid id.PluginID, eid *id.PluginExtensionID, scenes []id.SceneID) (layer.List, error) { + f := bson.M{ + "infobox.fields.plugin": pid.String(), + } + if eid != nil { + f["infobox.fields.extension"] = eid.String() + } + filter := r.sceneFilter(f, scenes) + return r.find(ctx, nil, filter) +} + func (r *layerRepo) FindByProperty(ctx context.Context, id id.PropertyID, f []id.SceneID) (layer.Layer, error) { filter := r.sceneFilterD(bson.D{ {Key: "$or", Value: []bson.D{ @@ -146,6 +176,25 @@ func (r *layerRepo) SaveAll(ctx context.Context, layers layer.List) error { return r.client.SaveAll(ctx, ids, docs) } +func (r *layerRepo) UpdatePlugin(ctx context.Context, old, new id.PluginID, scenes []id.SceneID) error { + return r.client.UpdateManyMany( + ctx, + []mongodoc.Update{ + { + Filter: r.sceneFilter(bson.M{"plugin": old.String()}, scenes), + Update: bson.M{"plugin": new.String()}, + }, + { + Filter: r.sceneFilter(bson.M{"infobox.fields": bson.M{"$type": "array"}}, scenes), + Update: bson.M{"infobox.fields.$[if].plugin": new.String()}, + ArrayFilters: []interface{}{ + bson.M{"if.plugin": old.String()}, + }, + }, + }, + ) +} + func (r *layerRepo) Remove(ctx context.Context, id id.LayerID) error { return r.client.RemoveOne(ctx, id.String()) } @@ -224,16 +273,7 @@ func (r *layerRepo) findGroupOne(ctx context.Context, filter bson.D) (*layer.Gro return c.GroupRows[0], nil } -// func (r *layerRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (layer.List, *usecase.PageInfo, error) { -// var c mongodoc.LayerConsumer -// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) -// if err2 != nil { -// return nil, nil, rerror.ErrInternalBy(err2) -// } -// return c.Rows, pageInfo, nil -// } - -func (r *layerRepo) findItems(ctx context.Context, dst layer.ItemList, filter bson.D) (layer.ItemList, error) { +func (r *layerRepo) findItems(ctx context.Context, dst layer.ItemList, filter interface{}) (layer.ItemList, error) { c := mongodoc.LayerConsumer{ ItemRows: dst, } @@ -246,16 +286,7 @@ func (r *layerRepo) findItems(ctx context.Context, dst layer.ItemList, filter bs return c.ItemRows, nil } -// func (r *layerRepo) paginateItems(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (layer.ItemList, *usecase.PageInfo, error) { -// var c mongodoc.LayerConsumer -// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) -// if err2 != nil { -// return nil, nil, rerror.ErrInternalBy(err2) -// } -// return c.ItemRows, pageInfo, nil -// } - -func (r *layerRepo) findGroups(ctx context.Context, dst layer.GroupList, filter bson.D) (layer.GroupList, error) { +func (r *layerRepo) findGroups(ctx context.Context, dst layer.GroupList, filter interface{}) (layer.GroupList, error) { c := mongodoc.LayerConsumer{ GroupRows: dst, } @@ -268,6 +299,24 @@ func (r *layerRepo) findGroups(ctx context.Context, dst layer.GroupList, filter return c.GroupRows, nil } +// func (r *layerRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (layer.List, *usecase.PageInfo, error) { +// var c mongodoc.LayerConsumer +// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) +// if err2 != nil { +// return nil, nil, rerror.ErrInternalBy(err2) +// } +// return c.Rows, pageInfo, nil +// } + +// func (r *layerRepo) paginateItems(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (layer.ItemList, *usecase.PageInfo, error) { +// var c mongodoc.LayerConsumer +// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) +// if err2 != nil { +// return nil, nil, rerror.ErrInternalBy(err2) +// } +// return c.ItemRows, pageInfo, nil +// } + // func (r *layerRepo) paginateGroups(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (layer.GroupList, *usecase.PageInfo, error) { // var c mongodoc.LayerConsumer // pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) diff --git a/internal/infrastructure/mongo/migration/220214180713_split_schema_of_properties.go b/internal/infrastructure/mongo/migration/220214180713_split_schema_of_properties.go new file mode 100644 index 000000000..211466bc7 --- /dev/null +++ b/internal/infrastructure/mongo/migration/220214180713_split_schema_of_properties.go @@ -0,0 +1,50 @@ +package migration + +import ( + "context" + + "github.com/labstack/gommon/log" + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/pkg/id" + "go.mongodb.org/mongo-driver/bson" +) + +func SplitSchemaOfProperties(ctx context.Context, c DBClient) error { + col := c.WithCollection("property") + + return col.Find(ctx, bson.M{ + "schema": bson.M{"$exists": true}, + }, &mongodoc.BatchConsumer{ + Size: 50, + Callback: func(rows []bson.Raw) error { + ids := make([]string, 0, len(rows)) + newRows := make([]interface{}, 0, len(rows)) + + log.Infof("migration: SplitSchemaOfProperties: hit properties: %d\n", len(rows)) + + for _, row := range rows { + var doc mongodoc.PropertyDocument + if err := bson.Unmarshal(row, &doc); err != nil { + return err + } + if doc.Schema == "" { + continue + } + + s, err := id.PropertySchemaIDFrom(doc.Schema) + if err != nil { + return err + } + + doc.Schema = "" + doc.SchemaPlugin = s.Plugin().String() + doc.SchemaName = s.ID() + + ids = append(ids, doc.ID) + newRows = append(newRows, doc) + } + + return col.SaveAll(ctx, ids, newRows) + }, + }) +} diff --git a/internal/infrastructure/mongo/migration/client.go b/internal/infrastructure/mongo/migration/client.go index c6b344c92..23377dadd 100644 --- a/internal/infrastructure/mongo/migration/client.go +++ b/internal/infrastructure/mongo/migration/client.go @@ -25,7 +25,9 @@ func (c Client) Migrate(ctx context.Context) (err error) { return fmt.Errorf("Failed to load config: %w", rerror.UnwrapErrInternal(err)) } defer func() { - err = c.Config.Unlock(ctx) + if err2 := c.Config.Unlock(ctx); err == nil && err2 != nil { + err = err2 + } }() nextMigrations := config.NextMigrations(migrationKeys()) @@ -36,7 +38,9 @@ func (c Client) Migrate(ctx context.Context) (err error) { var tx repo.Tx defer func() { if tx != nil { - err = tx.End(ctx) + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } } }() diff --git a/internal/infrastructure/mongo/migration/migrations.go b/internal/infrastructure/mongo/migration/migrations.go index b59e6461f..0e27e773c 100644 --- a/internal/infrastructure/mongo/migration/migrations.go +++ b/internal/infrastructure/mongo/migration/migrations.go @@ -3,8 +3,9 @@ package migration var migrations = map[int64]MigrationFunc{ - 201217132559: AddSceneWidgetId, - 201217193948: AddSceneDefaultTile, - 210310145844: RemovePreviewToken, - 210730175108: AddSceneAlignSystem, + 201217132559: AddSceneWidgetId, + 201217193948: AddSceneDefaultTile, + 210310145844: RemovePreviewToken, + 210730175108: AddSceneAlignSystem, + 220214180713: SplitSchemaOfProperties, } diff --git a/internal/infrastructure/mongo/mongodoc/client.go b/internal/infrastructure/mongo/mongodoc/client.go index 520606c36..65d3de709 100644 --- a/internal/infrastructure/mongo/mongodoc/client.go +++ b/internal/infrastructure/mongo/mongodoc/client.go @@ -120,7 +120,7 @@ var ( ) func (c *Client) SaveOne(ctx context.Context, col string, id string, replacement interface{}) error { - _, err := c.Collection(col).ReplaceOne(ctx, bson.D{{Key: "id", Value: id}}, replacement, replaceOption) + _, err := c.Collection(col).ReplaceOne(ctx, bson.M{"id": id}, replacement, replaceOption) if err != nil { return rerror.ErrInternalBy(err) } @@ -140,7 +140,7 @@ func (c *Client) SaveAll(ctx context.Context, col string, ids []string, updates id := ids[i] writeModels = append(writeModels, &mongo.ReplaceOneModel{ Upsert: &upsert, - Filter: bson.D{{Key: "id", Value: id}}, + Filter: bson.M{"id": id}, Replacement: u, }) } @@ -152,6 +152,43 @@ func (c *Client) SaveAll(ctx context.Context, col string, ids []string, updates return nil } +func (c *Client) UpdateMany(ctx context.Context, col string, filter, update interface{}) error { + _, err := c.Collection(col).UpdateMany(ctx, filter, bson.M{ + "$set": update, + }) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + +type Update struct { + Filter interface{} + Update interface{} + ArrayFilters []interface{} +} + +func (c *Client) UpdateManyMany(ctx context.Context, col string, updates []Update) error { + writeModels := make([]mongo.WriteModel, 0, len(updates)) + for _, w := range updates { + wm := mongo.NewUpdateManyModel().SetFilter(w.Filter).SetUpdate(bson.M{ + "$set": w.Update, + }) + if len(w.ArrayFilters) > 0 { + wm.SetArrayFilters(options.ArrayFilters{ + Filters: w.ArrayFilters, + }) + } + writeModels = append(writeModels, wm) + } + + _, err := c.Collection(col).BulkWrite(ctx, writeModels) + if err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} + func getCursor(raw bson.Raw, key string) (*usecase.Cursor, error) { val, err := raw.LookupErr(key) if err != nil { diff --git a/internal/infrastructure/mongo/mongodoc/clientcol.go b/internal/infrastructure/mongo/mongodoc/clientcol.go index 21916b4cf..4ac86f023 100644 --- a/internal/infrastructure/mongo/mongodoc/clientcol.go +++ b/internal/infrastructure/mongo/mongodoc/clientcol.go @@ -40,6 +40,14 @@ func (c *ClientCollection) SaveAll(ctx context.Context, ids []string, updates [] return c.Client.SaveAll(ctx, c.CollectionName, ids, updates) } +func (c *ClientCollection) UpdateMany(ctx context.Context, filter interface{}, update interface{}) error { + return c.Client.UpdateMany(ctx, c.CollectionName, filter, update) +} + +func (c *ClientCollection) UpdateManyMany(ctx context.Context, updates []Update) error { + return c.Client.UpdateManyMany(ctx, c.CollectionName, updates) +} + func (c *ClientCollection) RemoveOne(ctx context.Context, id string) error { return c.Client.RemoveOne(ctx, c.CollectionName, id) } diff --git a/internal/infrastructure/mongo/mongodoc/property.go b/internal/infrastructure/mongo/mongodoc/property.go index 556766fcd..f9eb64295 100644 --- a/internal/infrastructure/mongo/mongodoc/property.go +++ b/internal/infrastructure/mongo/mongodoc/property.go @@ -13,10 +13,12 @@ const ( ) type PropertyDocument struct { - ID string - Scene string - Schema string - Items []*PropertyItemDocument + ID string + Scene string + Schema string `bson:",omitempty"` // compatibility + SchemaPlugin string + SchemaName string + Items []*PropertyItemDocument } type PropertyFieldDocument struct { @@ -162,10 +164,11 @@ func NewProperty(property *property.Property) (*PropertyDocument, string) { pid := property.ID().String() items := property.Items() doc := PropertyDocument{ - ID: pid, - Schema: property.Schema().String(), - Items: make([]*PropertyItemDocument, 0, len(items)), - Scene: property.Scene().String(), + ID: pid, + SchemaPlugin: property.Schema().Plugin().String(), + SchemaName: property.Schema().ID(), + Items: make([]*PropertyItemDocument, 0, len(items)), + Scene: property.Scene().String(), } for _, f := range items { doc.Items = append(doc.Items, newPropertyItem(f)) @@ -286,7 +289,7 @@ func (doc *PropertyDocument) Model() (*property.Property, error) { if err != nil { return nil, err } - psid, err := id.PropertySchemaIDFrom(doc.Schema) + pl, err := id.PluginIDFrom(doc.SchemaPlugin) if err != nil { return nil, err } @@ -303,7 +306,7 @@ func (doc *PropertyDocument) Model() (*property.Property, error) { return property.New(). ID(pid). Scene(sid). - Schema(psid). + Schema(id.NewPropertySchemaID(pl, doc.SchemaName)). Items(items). Build() } diff --git a/internal/infrastructure/mongo/mongodoc/scene.go b/internal/infrastructure/mongo/mongodoc/scene.go index 17b8c2f19..133400ede 100644 --- a/internal/infrastructure/mongo/mongodoc/scene.go +++ b/internal/infrastructure/mongo/mongodoc/scene.go @@ -131,7 +131,7 @@ func NewScene(scene *scene.Scene) (*SceneDocument, string) { RootLayer: scene.RootLayer().String(), Widgets: widgetsDoc, Plugins: pluginsDoc, - AlignSystem: NewWidgetAlignSystem(scene.WidgetAlignSystem()), + AlignSystem: NewWidgetAlignSystem(scene.Widgets().Alignment()), UpdateAt: scene.UpdatedAt(), Property: scene.Property().String(), Clusters: clsuterDoc, @@ -223,8 +223,7 @@ func (d *SceneDocument) Model() (*scene.Scene, error) { Team(tid). RootLayer(lid). Clusters(cl). - Widgets(scene.NewWidgets(ws)). - WidgetAlignSystem(d.AlignSystem.Model()). + Widgets(scene.NewWidgets(ws, d.AlignSystem.Model())). Plugins(scene.NewPlugins(ps)). UpdatedAt(d.UpdateAt). Property(prid). diff --git a/internal/infrastructure/mongo/property.go b/internal/infrastructure/mongo/property.go index 1fb1bbc67..dde0f6482 100644 --- a/internal/infrastructure/mongo/property.go +++ b/internal/infrastructure/mongo/property.go @@ -78,6 +78,31 @@ func (r *propertyRepo) FindByDataset(ctx context.Context, sid id.DatasetSchemaID return r.find(ctx, nil, filter) } +func (r *propertyRepo) FindBySchema(ctx context.Context, psids []id.PropertySchemaID, sid id.SceneID) (property.List, error) { + if len(psids) == 0 { + return nil, nil + } + + filters := make([]bson.M, 0, len(psids)) + for _, s := range psids { + filters = append(filters, bson.M{ + "schemaplugin": s.Plugin().String(), + "schemaname": s.ID(), + "scene": sid.String(), + }) + } + filter := bson.M{"$and": filters} + return r.find(ctx, nil, filter) +} + +func (r *propertyRepo) FindByPlugin(ctx context.Context, pid id.PluginID, sid id.SceneID) (property.List, error) { + filter := bson.M{ + "schemaplugin": pid.String(), + "scene": sid.String(), + } + return r.find(ctx, nil, filter) +} + func (r *propertyRepo) Save(ctx context.Context, property *property.Property) error { doc, id := mongodoc.NewProperty(property) return r.client.SaveOne(ctx, id, doc) @@ -91,6 +116,15 @@ func (r *propertyRepo) SaveAll(ctx context.Context, properties property.List) er return r.client.SaveAll(ctx, ids, docs) } +func (r *propertyRepo) UpdateSchemaPlugin(ctx context.Context, old, new id.PluginID, s id.SceneID) error { + return r.client.UpdateMany(ctx, bson.M{ + "schemaplugin": old, + "scene": s.String(), + }, bson.M{ + "schemaplugin": new, + }) +} + func (r *propertyRepo) Remove(ctx context.Context, id id.PropertyID) error { return r.client.RemoveOne(ctx, id.String()) } @@ -113,7 +147,7 @@ func (r *propertyRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) er return nil } -func (r *propertyRepo) find(ctx context.Context, dst property.List, filter bson.D) (property.List, error) { +func (r *propertyRepo) find(ctx context.Context, dst property.List, filter interface{}) (property.List, error) { c := mongodoc.PropertyConsumer{ Rows: dst, } diff --git a/internal/infrastructure/mongo/property_schema.go b/internal/infrastructure/mongo/property_schema.go index 50cb71121..e71e15eec 100644 --- a/internal/infrastructure/mongo/property_schema.go +++ b/internal/infrastructure/mongo/property_schema.go @@ -89,7 +89,7 @@ func (r *propertySchemaRepo) FindByIDs(ctx context.Context, ids []id.PropertySch } func (r *propertySchemaRepo) Save(ctx context.Context, m *property.Schema) error { - if m.ID().System() { + if m.ID().Plugin().System() { return errors.New("cannnot save system property schema") } @@ -99,7 +99,7 @@ func (r *propertySchemaRepo) Save(ctx context.Context, m *property.Schema) error func (r *propertySchemaRepo) SaveAll(ctx context.Context, m property.SchemaList) error { for _, ps := range m { - if ps.ID().System() { + if ps.ID().Plugin().System() { return errors.New("cannnot save system property schema") } } diff --git a/internal/usecase/interactor/plugin.go b/internal/usecase/interactor/plugin.go index 3ea24883b..b18add6cb 100644 --- a/internal/usecase/interactor/plugin.go +++ b/internal/usecase/interactor/plugin.go @@ -17,6 +17,7 @@ type Plugin struct { pluginRepo repo.Plugin propertySchemaRepo repo.PropertySchema propertyRepo repo.Property + layerRepo repo.Layer file gateway.File pluginRepository gateway.PluginRepository transaction repo.Transaction @@ -28,6 +29,7 @@ func NewPlugin(r *repo.Container, gr *gateway.Container) interfaces.Plugin { commonScene: commonScene{ sceneRepo: r.Scene, }, + layerRepo: r.Layer, pluginRepo: r.Plugin, propertySchemaRepo: r.PropertySchema, propertyRepo: r.Property, diff --git a/internal/usecase/interactor/plugin_delete.go b/internal/usecase/interactor/plugin_delete.go deleted file mode 100644 index 33177b212..000000000 --- a/internal/usecase/interactor/plugin_delete.go +++ /dev/null @@ -1,58 +0,0 @@ -package interactor - -import ( - "context" - - "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/id" -) - -func (i *Plugin) Delete(ctx context.Context, pid id.PluginID, operator *usecase.Operator) (err error) { - tx, err := i.transaction.Begin() - if err != nil { - return - } - defer func() { - if err2 := tx.End(ctx); err == nil && err2 != nil { - err = err2 - } - }() - - p, err := i.pluginRepo.FindByID(ctx, pid, nil) - if err != nil { - return err - } - - sid := p.ID().Scene() - if sid == nil || p.ID().System() { - return interfaces.ErrCannotDeletePublicPlugin - } - - s, err := i.sceneRepo.FindByID(ctx, *sid, operator.WritableTeams) - if err != nil { - return err - } - if s == nil { - return interfaces.ErrOperationDenied - } - - if s.Plugins().HasPlugin(p.ID()) { - return interfaces.ErrCannotDeleteUsedPlugin - } - - if err := i.pluginRepo.Remove(ctx, p.ID()); err != nil { - return err - } - if ps := p.PropertySchemas(); len(ps) > 0 { - if err := i.propertySchemaRepo.RemoveAll(ctx, ps); err != nil { - return err - } - } - if err := i.file.RemovePlugin(ctx, p.ID()); err != nil { - return err - } - - tx.Commit() - return nil -} diff --git a/internal/usecase/interactor/plugin_upload.go b/internal/usecase/interactor/plugin_upload.go index f1d3b101b..4101a47f0 100644 --- a/internal/usecase/interactor/plugin_upload.go +++ b/internal/usecase/interactor/plugin_upload.go @@ -2,7 +2,6 @@ package interactor import ( "context" - "errors" "io" "net/http" "net/url" @@ -11,6 +10,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/manifest" "github.com/reearth/reearth-backend/pkg/plugin/pluginpack" "github.com/reearth/reearth-backend/pkg/plugin/repourl" "github.com/reearth/reearth-backend/pkg/property" @@ -21,25 +21,10 @@ import ( var pluginPackageSizeLimit int64 = 10 * 1024 * 1024 // 10MB func (i *Plugin) Upload(ctx context.Context, r io.Reader, sid id.SceneID, operator *usecase.Operator) (_ *plugin.Plugin, _ *scene.Scene, err error) { - tx, err := i.transaction.Begin() - if err != nil { - return - } - defer func() { - if err2 := tx.End(ctx); err == nil && err2 != nil { - err = err2 - } - }() - if err := i.CanWriteScene(ctx, sid, operator); err != nil { return nil, nil, err } - s, err := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) - if err != nil { - return nil, nil, err - } - p, err := pluginpack.PackageFromZip(r, &sid, pluginPackageSizeLimit) if err != nil { return nil, nil, &rerror.Error{ @@ -49,57 +34,15 @@ func (i *Plugin) Upload(ctx context.Context, r io.Reader, sid id.SceneID, operat } } - for { - f, err := p.Files.Next() - if err != nil { - return nil, nil, rerror.ErrInternalBy(err) - } - if f == nil { - break - } - if err := i.file.UploadPluginFile(ctx, p.Manifest.Plugin.ID(), f); err != nil { - return nil, nil, rerror.ErrInternalBy(err) - } - } - - if ps := p.Manifest.PropertySchemas(); len(ps) > 0 { - if err := i.propertySchemaRepo.SaveAll(ctx, ps); err != nil { - return nil, nil, rerror.ErrInternalBy(err) - } - } - if err := i.pluginRepo.Save(ctx, p.Manifest.Plugin); err != nil { - return nil, nil, rerror.ErrInternalBy(err) - } - - if err := i.installPlugin(ctx, p, s); err != nil { - return nil, nil, rerror.ErrInternalBy(err) - } - - tx.Commit() - return p.Manifest.Plugin, s, nil + return i.upload(ctx, p, sid, operator) } func (i *Plugin) UploadFromRemote(ctx context.Context, u *url.URL, sid id.SceneID, operator *usecase.Operator) (_ *plugin.Plugin, _ *scene.Scene, err error) { - ru, err := repourl.New(u) - if err != nil { - return nil, nil, err - } - - tx, err := i.transaction.Begin() - if err != nil { - return - } - defer func() { - if err2 := tx.End(ctx); err == nil && err2 != nil { - err = err2 - } - }() - if err := i.CanWriteScene(ctx, sid, operator); err != nil { return nil, nil, err } - s, err := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) + ru, err := repourl.New(u) if err != nil { return nil, nil, err } @@ -113,26 +56,62 @@ func (i *Plugin) UploadFromRemote(ctx context.Context, u *url.URL, sid id.SceneI if err != nil { return nil, nil, interfaces.ErrInvalidPluginPackage } - - defer func() { - _ = res.Body.Close() - }() - if res.StatusCode != 200 { return nil, nil, interfaces.ErrInvalidPluginPackage } p, err := pluginpack.PackageFromZip(res.Body, &sid, pluginPackageSizeLimit) if err != nil { + _ = res.Body.Close() return nil, nil, interfaces.ErrInvalidPluginPackage } - if p, err := i.pluginRepo.FindByID(ctx, p.Manifest.Plugin.ID(), []id.SceneID{sid}); err != nil && !errors.Is(err, rerror.ErrNotFound) { + _ = res.Body.Close() + return i.upload(ctx, p, sid, operator) +} + +func (i *Plugin) upload(ctx context.Context, p *pluginpack.Package, sid id.SceneID, operator *usecase.Operator) (_ *plugin.Plugin, _ *scene.Scene, err error) { + if err := i.CanWriteScene(ctx, sid, operator); err != nil { + return nil, nil, err + } + + s, err := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) + if err != nil { return nil, nil, err - } else if p != nil { - return nil, nil, interfaces.ErrPluginAlreadyInstalled } + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + newpid := p.Manifest.Plugin.ID() + oldpid := s.Plugins().PluginByName(newpid.Name()).PluginRef() + var oldp *plugin.Plugin + if oldpid != nil { + oldp, err = i.pluginRepo.FindByID(ctx, *oldpid, []id.SceneID{sid}) + if err != nil { + return nil, nil, err + } + } + + // new (oldpid == nil): upload files, save plugin and properties -> install + // same (oldpid.Equal(newpid)): delete old files -> upload files, save plugin and property schemas -> migrate + // diff (!oldpid.Equal(newpid)): upload files, save plugin and property schemas -> migrate -> delete old files + + if oldpid != nil && oldpid.Equal(newpid) { + // same only: delete old files + if err := i.file.RemovePlugin(ctx, *oldpid); err != nil { + return nil, nil, err + } + } + + // uploads files for { f, err := p.Files.Next() if err != nil { @@ -146,25 +125,54 @@ func (i *Plugin) UploadFromRemote(ctx context.Context, u *url.URL, sid id.SceneI } } + // save plugin and property schemas if ps := p.Manifest.PropertySchemas(); len(ps) > 0 { if err := i.propertySchemaRepo.SaveAll(ctx, ps); err != nil { return nil, nil, err } } + if err := i.pluginRepo.Save(ctx, p.Manifest.Plugin); err != nil { return nil, nil, err } - if err := i.installPlugin(ctx, p, s); err != nil { - return nil, nil, err + if oldpid == nil { + // new: install plugin + if err := i.installScenePlugin(ctx, p, s); err != nil { + return nil, nil, err + } + } else { + // same, diff: migrate + if err := i.migrateScenePlugin(ctx, p, s, oldp); err != nil { + return nil, nil, err + } + } + + if oldpid != nil && !oldpid.Equal(newpid) { + // diff only: delete old files + if err := i.file.RemovePlugin(ctx, *oldpid); err != nil { + return nil, nil, err + } + + if oldpid.Scene() != nil { + // remove old scene plugin + if err := i.pluginRepo.Remove(ctx, *oldpid); err != nil { + return nil, nil, err + } + if ps := oldp.PropertySchemas(); len(ps) > 0 { + if err := i.propertySchemaRepo.RemoveAll(ctx, ps); err != nil { + return nil, nil, err + } + } + } } tx.Commit() return p.Manifest.Plugin, s, nil } -// installPlugin installs the plugin to the scene -func (i *Plugin) installPlugin(ctx context.Context, p *pluginpack.Package, s *scene.Scene) (err error) { +// installScenePlugin installs the plugin to the scene +func (i *Plugin) installScenePlugin(ctx context.Context, p *pluginpack.Package, s *scene.Scene) (err error) { var ppid *id.PropertyID var pp *property.Property if psid := p.Manifest.Plugin.Schema(); psid != nil { @@ -173,6 +181,7 @@ func (i *Plugin) installPlugin(ctx context.Context, p *pluginpack.Package, s *sc return err } } + s.Plugins().Add(scene.NewPlugin(p.Manifest.Plugin.ID(), ppid)) if pp != nil { @@ -185,3 +194,172 @@ func (i *Plugin) installPlugin(ctx context.Context, p *pluginpack.Package, s *sc } return nil } + +func (i *Plugin) migrateScenePlugin(ctx context.Context, p *pluginpack.Package, s *scene.Scene, oldp *plugin.Plugin) (err error) { + if oldp == nil || p.Manifest == nil { + return nil + } + + oldPManifest, err := i.pluginManifestFromPlugin(ctx, oldp) + if err != nil { + return err + } + + diff := manifest.DiffFrom(oldPManifest, *p.Manifest) + updatedProperties := property.List{} + + // update scene + var spp *id.PropertyID + if to := diff.PropertySchemaDiff.To; !to.IsNil() && diff.PropertySchemaDiff.From.IsNil() { + // new plugin property + p, err := property.New().NewID().Scene(s.ID()).Schema(to).Build() + if err != nil { + return err + } + spp = p.ID().Ref() + updatedProperties = append(updatedProperties, p) + } + + sp := s.Plugins().Plugin(diff.From) + if sp != nil && sp.Property() != nil && diff.PropertySchemaDeleted { + // plugin property should be removed + if err := i.propertyRepo.Remove(ctx, *sp.Property()); err != nil { + return err + } + } + + s.Plugins().Upgrade(diff.From, diff.To, spp, diff.PropertySchemaDeleted) + if err := i.sceneRepo.Save(ctx, s); err != nil { + return err + } + + // delete layers, blocks and widgets + for _, e := range diff.DeletedExtensions { + deletedProperties, err := i.deleteLayersByPluginExtension(ctx, s.ID().Ref(), diff.From, &e.ExtensionID) + if err != nil { + return err + } + + if deletedProperties2, err := i.deleteBlocksByPluginExtension(ctx, s.ID().Ref(), diff.From, &e.ExtensionID); err != nil { + return err + } else { + deletedProperties = append(deletedProperties, deletedProperties2...) + } + + deletedProperties = append(deletedProperties, s.Widgets().RemoveAllByPlugin(diff.From, e.ExtensionID.Ref())...) + + if len(deletedProperties) > 0 { + if err := i.propertyRepo.RemoveAll(ctx, deletedProperties); err != nil { + return err + } + } + } + + // migrate layers + if err := i.layerRepo.UpdatePlugin(ctx, diff.From, diff.To, []id.SceneID{s.ID()}); err != nil { + return err + } + + // migrate properties + updatedPropertySchemas := diff.PropertySchmaDiffs() + updatedPropertySchemaIDs := updatedPropertySchemas.FromSchemas() + pl, err := i.propertyRepo.FindBySchema(ctx, updatedPropertySchemaIDs, s.ID()) + if err != nil { + return err + } + for _, p := range pl { + if e := updatedPropertySchemas.FindByFrom(p.Schema()); e != nil && e.Migrate(p) { + updatedProperties = append(updatedProperties, p) + } + } + if len(updatedProperties) > 0 { + if err := i.propertyRepo.SaveAll(ctx, updatedProperties); err != nil { + return err + } + } + + if err := i.propertyRepo.UpdateSchemaPlugin(ctx, diff.From, diff.To, s.ID()); err != nil { + return err + } + + // delete unused schemas and properties + if deleted := diff.DeletedPropertySchemas(); len(deleted) > 0 { + if err := i.propertySchemaRepo.RemoveAll(ctx, deleted); err != nil { + return err + } + } + + return nil +} + +func (i *Plugin) deleteLayersByPluginExtension(ctx context.Context, sid *id.SceneID, p id.PluginID, e *id.PluginExtensionID) ([]id.PropertyID, error) { + var scenes []id.SceneID + if sid != nil { + scenes = []id.SceneID{*sid} + } + + // delete layers + deletedLayers := []id.LayerID{} + layers, err := i.layerRepo.FindByPluginAndExtension(ctx, p, e, scenes) + if err != nil { + return nil, err + } + deletedLayers = append(deletedLayers, layers.IDs().Layers()...) + + parentLayers, err := i.layerRepo.FindParentsByIDs(ctx, deletedLayers, scenes) + if err != nil { + return nil, err + } + + for _, p := range parentLayers { + p.Layers().RemoveLayer(deletedLayers...) + } + if err := i.layerRepo.SaveAll(ctx, parentLayers.ToLayerList()); err != nil { + return nil, err + } + if err := i.layerRepo.RemoveAll(ctx, deletedLayers); err != nil { + return nil, err + } + + return layers.Properties(), nil +} + +func (i *Plugin) deleteBlocksByPluginExtension(ctx context.Context, sid *id.SceneID, p id.PluginID, e *id.PluginExtensionID) ([]id.PropertyID, error) { + var scenes []id.SceneID + if sid != nil { + scenes = []id.SceneID{*sid} + } + + layers, err := i.layerRepo.FindByPluginAndExtensionOfBlocks(ctx, p, e, scenes) + if err != nil { + return nil, err + } + + var deletedProperties []id.PropertyID + for _, l := range layers.Deref() { + deletedProperties = append(deletedProperties, l.Infobox().RemoveAllByPlugin(p, e)...) + } + + if err := i.layerRepo.SaveAll(ctx, layers); err != nil { + return nil, err + } + return deletedProperties, nil +} + +func (i *Plugin) pluginManifestFromPlugin(ctx context.Context, p *plugin.Plugin) (manifest.Manifest, error) { + schemas, err := i.propertySchemaRepo.FindByIDs(ctx, p.PropertySchemas()) + if err != nil { + return manifest.Manifest{}, err + } + + var s *property.Schema + if ps := p.Schema(); ps != nil { + s = schemas.Find(*ps) + } + + return manifest.Manifest{ + Plugin: p, + ExtensionSchema: schemas, + Schema: s, + }, nil +} diff --git a/internal/usecase/interactor/plugin_upload_test.go b/internal/usecase/interactor/plugin_upload_test.go new file mode 100644 index 000000000..fdde61001 --- /dev/null +++ b/internal/usecase/interactor/plugin_upload_test.go @@ -0,0 +1,345 @@ +package interactor + +import ( + "archive/zip" + "bytes" + "context" + "io" + "os" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/fs" + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +const mockPluginManifest = `{ + "id": "testplugin", + "version": "1.0.1", + "name": "testplugin", + "extensions": [ + { + "id": "block", + "type": "block", + "schema": { + "groups": [ + { + "id": "default", + "fields": [ + { + "id": "field", + "type": "string" + } + ] + } + ] + } + } + ] +}` + +var mockPluginID = id.MustPluginID("testplugin~1.0.1") +var mockPluginFiles = map[string]string{ + "reearth.yml": mockPluginManifest, + "block.js": "// barfoo", +} +var mockPluginArchiveZip bytes.Buffer + +func init() { + zbuf := bytes.Buffer{} + zw := zip.NewWriter(&zbuf) + for p, f := range mockPluginFiles { + w, _ := zw.Create(p) + _, _ = w.Write([]byte(f)) + } + _ = zw.Close() + mockPluginArchiveZip = zbuf +} + +func mockFS(files map[string]string) afero.Fs { + mfs := afero.NewMemMapFs() + for n, c := range files { + f, err := mfs.Create(n) + if err != nil { + panic(err) + } + _, _ = f.Write([]byte(c)) + _ = f.Close() + } + return mfs +} + +func TestPlugin_Upload_New(t *testing.T) { + // upload a new plugin + ctx := context.Background() + team := id.NewTeamID() + sid := id.NewSceneID() + pid := mockPluginID.WithScene(sid.Ref()) + + repos := memory.InitRepos(nil) + mfs := mockFS(nil) + files, err := fs.NewFile(mfs, "") + assert.NoError(t, err) + scene := scene.New().ID(sid).Team(team).RootLayer(id.NewLayerID()).MustBuild() + _ = repos.Scene.Save(ctx, scene) + + uc := &Plugin{ + commonScene: commonScene{sceneRepo: repos.Scene}, + pluginRepo: repos.Plugin, + propertySchemaRepo: repos.PropertySchema, + propertyRepo: repos.Property, + layerRepo: repos.Layer, + file: files, + transaction: repos.Transaction, + } + op := &usecase.Operator{ + ReadableTeams: []id.TeamID{team}, + WritableTeams: []id.TeamID{team}, + } + + reader := bytes.NewReader(mockPluginArchiveZip.Bytes()) + pl, s, err := uc.Upload(ctx, reader, scene.ID(), op) + assert.NoError(t, err) + assert.Equal(t, scene.ID(), s.ID()) + assert.Equal(t, pid, pl.ID()) + + // scene + nscene, err := repos.Scene.FindByID(ctx, scene.ID(), nil) + assert.NoError(t, err) + assert.True(t, nscene.Plugins().HasPlugin(pl.ID())) + + // plugin + npl, err := repos.Plugin.FindByID(ctx, pid, []id.SceneID{scene.ID()}) + assert.NoError(t, err) + assert.Equal(t, pid, npl.ID()) + + npf, err := mfs.Open("plugins/" + pid.String() + "/block.js") + assert.NoError(t, err) + npfc, _ := io.ReadAll(npf) + assert.Equal(t, "// barfoo", string(npfc)) +} + +func TestPlugin_Upload_SameVersion(t *testing.T) { + // upgrade plugin to the same version + // 1 extension is deleted -> property schema, layers, and properties of the extension should be deleted + // old plugin files should be deleted + + ctx := context.Background() + team := id.NewTeamID() + sid := id.NewSceneID() + pid := mockPluginID.WithScene(sid.Ref()) + eid := id.PluginExtensionID("marker") + + repos := memory.InitRepos(nil) + mfs := mockFS(map[string]string{ + "plugins/" + pid.String() + "/hogehoge": "foobar", + }) + files, err := fs.NewFile(mfs, "") + assert.NoError(t, err) + + ps := property.NewSchema().ID(property.NewSchemaID(pid, eid.String())).MustBuild() + pl := plugin.New().ID(pid).Extensions([]*plugin.Extension{ + plugin.NewExtension().ID(eid).Type(plugin.ExtensionTypePrimitive).Schema(ps.ID()).MustBuild(), + }).MustBuild() + + p := property.New().NewID().Schema(ps.ID()).Scene(sid).MustBuild() + pluginLayer := layer.NewItem().NewID().Scene(sid).Plugin(pid.Ref()).Extension(eid.Ref()).Property(p.IDRef()).MustBuild() + rootLayer := layer.NewGroup().NewID().Scene(sid).Layers(layer.NewIDList([]layer.ID{pluginLayer.ID()})).Root(true).MustBuild() + scene := scene.New().ID(sid).Team(team).RootLayer(rootLayer.ID()).Plugins(scene.NewPlugins([]*scene.Plugin{ + scene.NewPlugin(pid, nil), + })).MustBuild() + + _ = repos.PropertySchema.Save(ctx, ps) + _ = repos.Plugin.Save(ctx, pl) + _ = repos.Property.Save(ctx, p) + _ = repos.Layer.SaveAll(ctx, layer.List{pluginLayer.LayerRef(), rootLayer.LayerRef()}) + _ = repos.Scene.Save(ctx, scene) + + uc := &Plugin{ + commonScene: commonScene{sceneRepo: repos.Scene}, + pluginRepo: repos.Plugin, + propertySchemaRepo: repos.PropertySchema, + propertyRepo: repos.Property, + layerRepo: repos.Layer, + file: files, + transaction: repos.Transaction, + } + op := &usecase.Operator{ + ReadableTeams: []id.TeamID{team}, + WritableTeams: []id.TeamID{team}, + } + + reader := bytes.NewReader(mockPluginArchiveZip.Bytes()) + pl, s, err := uc.Upload(ctx, reader, scene.ID(), op) + + assert.NoError(t, err) + assert.Equal(t, scene.ID(), s.ID()) + assert.Equal(t, pid, pl.ID()) + + // scene + nscene, err := repos.Scene.FindByID(ctx, scene.ID(), nil) + assert.NoError(t, err) + assert.True(t, nscene.Plugins().HasPlugin(pl.ID())) + + // plugin + npl, err := repos.Plugin.FindByID(ctx, pid, []id.SceneID{scene.ID()}) + assert.NoError(t, err) + assert.Equal(t, pid, npl.ID()) + + nlps, err := repos.PropertySchema.FindByID(ctx, ps.ID()) + assert.Nil(t, nlps) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + + _, err = mfs.Open("plugins/" + pid.String() + "/hogehoge") + assert.True(t, os.IsNotExist(err)) // deleted + + npf, err := mfs.Open("plugins/" + pid.String() + "/block.js") + assert.NoError(t, err) + npfc, _ := io.ReadAll(npf) + assert.Equal(t, "// barfoo", string(npfc)) + + // layer + nlp, err := repos.Property.FindByID(ctx, p.ID(), nil) + assert.Nil(t, nlp) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + + nl, err := repos.Layer.FindByID(ctx, pluginLayer.ID(), nil) + assert.Nil(t, nl) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + + nrl, err := repos.Layer.FindGroupByID(ctx, rootLayer.ID(), nil) + assert.NoError(t, err) + assert.Equal(t, []layer.ID{}, nrl.Layers().Layers()) // deleted +} + +func TestPlugin_Upload_DiffVersion(t *testing.T) { + // upgrade plugin to the different version + // plugin ID of property and layers should be updated + + ctx := context.Background() + team := id.NewTeamID() + sid := id.NewSceneID() + oldpid := id.MustPluginID("testplugin~1.0.0").WithScene(sid.Ref()) + pid := mockPluginID.WithScene(sid.Ref()) + eid := id.PluginExtensionID("block") + nlpsid := id.NewPropertySchemaID(pid, eid.String()) + + repos := memory.InitRepos(nil) + mfs := mockFS(map[string]string{ + "plugins/" + oldpid.String() + "/hogehoge": "foobar", + }) + files, err := fs.NewFile(mfs, "") + assert.NoError(t, err) + + oldpsf := property.NewSchemaField().ID("field").Type(property.ValueTypeNumber).MustBuild() + oldpsg := property.NewSchemaGroup().ID("default").Fields([]*property.SchemaField{oldpsf}).MustBuild() + oldps := property.NewSchema().ID(property.NewSchemaID(oldpid, eid.String())).Groups(property.NewSchemaGroupList( + []*property.SchemaGroup{oldpsg}, + )).MustBuild() + oldpl := plugin.New().ID(oldpid).Extensions([]*plugin.Extension{ + plugin.NewExtension().ID(eid).Type(plugin.ExtensionTypeBlock).Schema(oldps.ID()).MustBuild(), + }).MustBuild() + + pf := property.NewField("field").Value(property.ValueTypeNumber.ValueFrom(100).Some()).MustBuild() + pg := property.NewGroup().NewID().SchemaGroup(oldpsg.ID()).Fields([]*property.Field{pf}).MustBuild() + oldp := property.New().NewID().Schema(oldps.ID()).Scene(sid).Items([]property.Item{pg}).MustBuild() + oldp2 := property.New().NewID().Schema(oldps.ID()).Scene(sid).MustBuild() + oldp3 := property.New().NewID().Schema(oldps.ID()).Scene(sid).MustBuild() + ib := layer.NewInfobox([]*layer.InfoboxField{ + layer.NewInfoboxField().NewID().Plugin(oldp3.Schema().Plugin()).Extension(plugin.ExtensionID(oldp3.Schema().ID())).Property(oldp3.ID()).MustBuild(), + }, oldp2.ID()) + pluginLayer := layer.NewItem().NewID().Scene(sid).Plugin(oldpid.Ref()).Extension(eid.Ref()).Property(oldp.IDRef()).Infobox(ib).MustBuild() + rootLayer := layer.NewGroup().NewID().Scene(sid).Layers(layer.NewIDList([]layer.ID{pluginLayer.ID()})).Root(true).MustBuild() + scene := scene.New().ID(sid).Team(team).RootLayer(rootLayer.ID()).Plugins(scene.NewPlugins([]*scene.Plugin{ + scene.NewPlugin(oldpid, nil), + })).MustBuild() + + _ = repos.PropertySchema.Save(ctx, oldps) + _ = repos.Plugin.Save(ctx, oldpl) + _ = repos.Property.SaveAll(ctx, property.List{oldp, oldp2, oldp3}) + _ = repos.Layer.SaveAll(ctx, layer.List{pluginLayer.LayerRef(), rootLayer.LayerRef()}) + _ = repos.Scene.Save(ctx, scene) + + uc := &Plugin{ + commonScene: commonScene{sceneRepo: repos.Scene}, + pluginRepo: repos.Plugin, + propertySchemaRepo: repos.PropertySchema, + propertyRepo: repos.Property, + layerRepo: repos.Layer, + file: files, + transaction: repos.Transaction, + } + op := &usecase.Operator{ + ReadableTeams: []id.TeamID{team}, + WritableTeams: []id.TeamID{team}, + } + + reader := bytes.NewReader(mockPluginArchiveZip.Bytes()) + oldpl, s, err := uc.Upload(ctx, reader, scene.ID(), op) + + assert.NoError(t, err) + assert.Equal(t, scene.ID(), s.ID()) + assert.Equal(t, pid, oldpl.ID()) + + // scene + nscene, err := repos.Scene.FindByID(ctx, scene.ID(), nil) + assert.NoError(t, err) + assert.False(t, nscene.Plugins().HasPlugin(oldpid)) + assert.True(t, nscene.Plugins().HasPlugin(pid)) + + // plugin + opl, err := repos.Plugin.FindByID(ctx, oldpid, []id.SceneID{scene.ID()}) + assert.Nil(t, opl) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + + npl, err := repos.Plugin.FindByID(ctx, pid, []id.SceneID{scene.ID()}) + assert.NoError(t, err) + assert.Equal(t, pid, npl.ID()) + + olps, err := repos.PropertySchema.FindByID(ctx, oldps.ID()) + assert.Nil(t, olps) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + + nlps, err := repos.PropertySchema.FindByID(ctx, nlpsid) + assert.NoError(t, err) + assert.Equal(t, nlpsid, nlps.ID()) + + _, err = mfs.Open("plugins/" + oldpid.String() + "/hogehoge") + assert.True(t, os.IsNotExist(err)) // deleted + + npf, err := mfs.Open("plugins/" + pid.String() + "/block.js") + assert.NoError(t, err) + npfc, _ := io.ReadAll(npf) + assert.Equal(t, "// barfoo", string(npfc)) + + // layer + nl, err := repos.Layer.FindByID(ctx, pluginLayer.ID(), nil) + assert.NoError(t, err) + assert.Equal(t, pid, *nl.Plugin()) + assert.Equal(t, eid, *nl.Extension()) + assert.Equal(t, oldp.ID(), *nl.Property()) + assert.Equal(t, oldp2.ID(), nl.Infobox().Property()) + assert.Equal(t, oldp3.ID(), nl.Infobox().FieldAt(0).Property()) + + nlp, err := repos.Property.FindByID(ctx, *nl.Property(), nil) + assert.NoError(t, err) + assert.Equal(t, *nl.Property(), nlp.ID()) + assert.Equal(t, nlpsid, nlp.Schema()) + assert.Equal(t, property.ValueTypeString.ValueFrom("100"), property.ToGroup(nlp.ItemBySchema("default")).Field("field").Value()) + + nlp2, err := repos.Property.FindByID(ctx, oldp2.ID(), nil) + assert.NoError(t, err) + assert.Equal(t, nlpsid, nlp2.Schema()) + + nlp3, err := repos.Property.FindByID(ctx, oldp3.ID(), nil) + assert.NoError(t, err) + assert.Equal(t, nlpsid, nlp3.Schema()) +} diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 749463a84..7cc165d32 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -233,7 +233,7 @@ func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, Area: scene.WidgetAreaTop, } } - s.WidgetAlignSystem().Area(loc).Add(widget.ID(), -1) + s.Widgets().Alignment().Area(loc).Add(widget.ID(), -1) } err = i.propertyRepo.Save(ctx, property) @@ -282,7 +282,7 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP if widget == nil { return nil, nil, rerror.ErrNotFound } - _, location := scene.WidgetAlignSystem().Find(param.WidgetID) + _, location := scene.Widgets().Alignment().Find(param.WidgetID) _, extension, err := i.getPlugin(ctx, scene.ID(), widget.Plugin(), widget.Extension()) if err != nil { @@ -304,7 +304,7 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP if param.Index != nil { index = *param.Index } - scene.WidgetAlignSystem().Move(widget.ID(), location, index) + scene.Widgets().Alignment().Move(widget.ID(), location, index) } if param.Extended != nil { @@ -360,7 +360,7 @@ func (i *Scene) UpdateWidgetAlignSystem(ctx context.Context, param interfaces.Up return nil, err } - area := scene.WidgetAlignSystem().Area(param.Location) + area := scene.Widgets().Alignment().Area(param.Location) if area == nil { return nil, errors.New("invalid location") @@ -414,7 +414,7 @@ func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, wid id.WidgetID } ws.Remove(wid) - scene.WidgetAlignSystem().Remove(wid) + scene.Widgets().Alignment().Remove(wid) err2 = i.propertyRepo.Remove(ctx, widget.Property()) if err2 != nil { @@ -553,7 +553,7 @@ func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plug ps.Remove(pid) // remove widgets - removedProperties = append(removedProperties, scene.Widgets().RemoveAllByPlugin(pid)...) + removedProperties = append(removedProperties, scene.Widgets().RemoveAllByPlugin(pid, nil)...) // remove layers res, err := layerops.Processor{ diff --git a/internal/usecase/interfaces/plugin.go b/internal/usecase/interfaces/plugin.go index ca0fe9054..0de5c4a89 100644 --- a/internal/usecase/interfaces/plugin.go +++ b/internal/usecase/interfaces/plugin.go @@ -13,16 +13,13 @@ import ( ) var ( - ErrPluginAlreadyRegistered = errors.New("plugin already registered") - ErrInvalidPluginPackage = errors.New("invalid plugin package") - ErrCannotDeletePublicPlugin = errors.New("cannot delete public plugin") - ErrCannotDeleteUsedPlugin = errors.New("cannot delete plugin used by at least one scene") + ErrPluginAlreadyRegistered = errors.New("plugin already registered") + ErrInvalidPluginPackage = errors.New("invalid plugin package") ) type Plugin interface { Fetch(context.Context, []id.PluginID, *usecase.Operator) ([]*plugin.Plugin, error) Upload(context.Context, io.Reader, id.SceneID, *usecase.Operator) (*plugin.Plugin, *scene.Scene, error) UploadFromRemote(context.Context, *url.URL, id.SceneID, *usecase.Operator) (*plugin.Plugin, *scene.Scene, error) - Delete(context.Context, id.PluginID, *usecase.Operator) error FetchPluginMetadata(context.Context, *usecase.Operator) ([]*plugin.Metadata, error) } diff --git a/internal/usecase/repo/layer.go b/internal/usecase/repo/layer.go index 75b134bf3..c3293b39c 100644 --- a/internal/usecase/repo/layer.go +++ b/internal/usecase/repo/layer.go @@ -17,11 +17,15 @@ type Layer interface { FindGroupByIDs(context.Context, []id.LayerID, []id.SceneID) (layer.GroupList, error) FindGroupBySceneAndLinkedDatasetSchema(context.Context, id.SceneID, id.DatasetSchemaID) (layer.GroupList, error) FindParentByID(context.Context, id.LayerID, []id.SceneID) (*layer.Group, error) + FindParentsByIDs(context.Context, []id.LayerID, []id.SceneID) (layer.GroupList, error) + FindByPluginAndExtension(context.Context, id.PluginID, *id.PluginExtensionID, []id.SceneID) (layer.List, error) + FindByPluginAndExtensionOfBlocks(context.Context, id.PluginID, *id.PluginExtensionID, []id.SceneID) (layer.List, error) FindByProperty(context.Context, id.PropertyID, []id.SceneID) (layer.Layer, error) FindByScene(context.Context, id.SceneID) (layer.List, error) FindByTag(context.Context, id.TagID, []id.SceneID) (layer.List, error) Save(context.Context, layer.Layer) error SaveAll(context.Context, layer.List) error + UpdatePlugin(context.Context, id.PluginID, id.PluginID, []id.SceneID) error Remove(context.Context, id.LayerID) error RemoveAll(context.Context, []id.LayerID) error RemoveByScene(context.Context, id.SceneID) error diff --git a/internal/usecase/repo/property.go b/internal/usecase/repo/property.go index 469ea2396..91555b3ab 100644 --- a/internal/usecase/repo/property.go +++ b/internal/usecase/repo/property.go @@ -12,8 +12,11 @@ type Property interface { FindByIDs(context.Context, []id.PropertyID, []id.SceneID) (property.List, error) FindLinkedAll(context.Context, id.SceneID) (property.List, error) FindByDataset(context.Context, id.DatasetSchemaID, id.DatasetID) (property.List, error) + FindBySchema(context.Context, []id.PropertySchemaID, id.SceneID) (property.List, error) + FindByPlugin(context.Context, id.PluginID, id.SceneID) (property.List, error) Save(context.Context, *property.Property) error SaveAll(context.Context, property.List) error + UpdateSchemaPlugin(context.Context, id.PluginID, id.PluginID, id.SceneID) error Remove(context.Context, id.PropertyID) error RemoveAll(context.Context, []id.PropertyID) error RemoveByScene(context.Context, id.SceneID) error diff --git a/pkg/id/plugin.go b/pkg/id/plugin.go index 4d951529e..ead781d85 100644 --- a/pkg/id/plugin.go +++ b/pkg/id/plugin.go @@ -105,6 +105,15 @@ func PluginIDFromRef(id *string) *PluginID { return &did } +func (d PluginID) WithScene(sid *SceneID) PluginID { + return PluginID{ + name: d.name, + version: d.version, + sys: d.sys, + scene: sid.CopyRef(), + } +} + // Clone duplicates the PluginID func (d PluginID) Clone() PluginID { return PluginID{ diff --git a/pkg/id/plugin_test.go b/pkg/id/plugin_test.go index c2c785477..69f242507 100644 --- a/pkg/id/plugin_test.go +++ b/pkg/id/plugin_test.go @@ -329,6 +329,22 @@ func TestPluginIDFromRef(t *testing.T) { } } +func TestPluginID_WithScene(t *testing.T) { + sid := NewSceneID().Ref() + + assert.Equal(t, PluginID{ + name: "aaa", + version: "1.0.0", + sys: false, + scene: sid, + }, PluginID{ + name: "aaa", + version: "1.0.0", + sys: false, + scene: nil, + }.WithScene(sid)) +} + func TestPluginID_Clone(t *testing.T) { p := PluginID{ name: "aaa", diff --git a/pkg/id/property_schema.go b/pkg/id/property_schema.go index 7fc594785..812c95145 100644 --- a/pkg/id/property_schema.go +++ b/pkg/id/property_schema.go @@ -5,8 +5,6 @@ import ( "strings" ) -const schemaSystemIDPrefix = "reearth" - var schemaIDRe = regexp.MustCompile("^[a-zA-Z0-9][a-zA-Z0-9_-]*$|^@$") // PropertySchemaID is an ID for PropertySchema. @@ -15,6 +13,14 @@ type PropertySchemaID struct { id string } +// NewPropertySchemaID generates a new PropertySchemaID from a plugin ID and name. +func NewPropertySchemaID(p PluginID, name string) PropertySchemaID { + if p.IsNil() || !schemaIDRe.MatchString(name) { + return PropertySchemaID{} + } + return PropertySchemaID{plugin: p.Clone(), id: name} +} + // PropertySchemaIDFrom generates a new PropertySchemaID from a string. func PropertySchemaIDFrom(id string) (PropertySchemaID, error) { ids := strings.SplitN(id, "/", 2) @@ -28,29 +34,6 @@ func PropertySchemaIDFrom(id string) (PropertySchemaID, error) { return PropertySchemaID{plugin: pid, id: ids[1]}, nil } -// PropertySchemaIDFromExtension generates a new PropertySchemaID from a plugin ID and an extension ID. -func PropertySchemaIDFromExtension(p PluginID, e PluginExtensionID) (PropertySchemaID, error) { - return PropertySchemaID{plugin: p, id: e.String()}, nil -} - -// MustPropertySchemaID generates a new PropertySchemaID from a string, but panics if the string cannot be parsed. -func MustPropertySchemaID(id string) PropertySchemaID { - did, err := PropertySchemaIDFrom(id) - if err != nil { - panic(err) - } - return did -} - -// MustPropertySchemaIDFromExtension generates a new PropertySchemaID from a plugin ID and an extension ID, but panics if the string cannot be parsed. -func MustPropertySchemaIDFromExtension(p PluginID, e PluginExtensionID) PropertySchemaID { - did, err := PropertySchemaIDFromExtension(p, e) - if err != nil { - panic(err) - } - return did -} - // PropertySchemaIDFromRef generates a new PropertySchemaID from a string ref. func PropertySchemaIDFromRef(id *string) *PropertySchemaID { if id == nil { @@ -63,6 +46,15 @@ func PropertySchemaIDFromRef(id *string) *PropertySchemaID { return &did } +// MustPropertySchemaID generates a new PropertySchemaID from a string, but panics if the string cannot be parsed. +func MustPropertySchemaID(id string) PropertySchemaID { + did, err := PropertySchemaIDFrom(id) + if err != nil { + panic(err) + } + return did +} + // Clone duplicates the PropertySchemaID func (d PropertySchemaID) Clone() PropertySchemaID { return PropertySchemaID{ @@ -71,6 +63,14 @@ func (d PropertySchemaID) Clone() PropertySchemaID { } } +// WithPlugin duplicates the PropertySchemaID but its plugin ID is changed +func (d PropertySchemaID) WithPlugin(plugin PluginID) PropertySchemaID { + return PropertySchemaID{ + plugin: plugin.Clone(), + id: d.id, + } +} + // ID returns a fragment of just ID. func (d PropertySchemaID) ID() string { return d.id @@ -81,11 +81,6 @@ func (d PropertySchemaID) Plugin() PluginID { return d.plugin } -// System returns if it is system ID -func (d PropertySchemaID) System() bool { - return d.id == schemaSystemIDPrefix || strings.HasPrefix(d.id, schemaSystemIDPrefix+"/") -} - // String returns a string representation. func (d PropertySchemaID) String() string { if d.IsNil() { diff --git a/pkg/id/property_schema_test.go b/pkg/id/property_schema_test.go index cb8f234f4..f604b26ac 100644 --- a/pkg/id/property_schema_test.go +++ b/pkg/id/property_schema_test.go @@ -10,6 +10,21 @@ import ( var _ encoding.TextMarshaler = (*PropertySchemaID)(nil) var _ encoding.TextUnmarshaler = (*PropertySchemaID)(nil) +func TestNewPropertySchemaID(t *testing.T) { + pluginID := MustPluginID("test~2.0.0") + pluginExtensionID := "test2" + propertySchemaID := NewPropertySchemaID(pluginID, pluginExtensionID) + + assert.NotNil(t, propertySchemaID) + assert.Equal(t, PropertySchemaID{ + plugin: MustPluginID("test~2.0.0"), + id: "test2", + }, propertySchemaID) + + assert.Equal(t, PropertySchemaID{}, NewPropertySchemaID(PluginID{}, "a")) + assert.Equal(t, PropertySchemaID{}, NewPropertySchemaID(pluginID, "")) +} + func TestPropertySchemaIDFrom(t *testing.T) { tests := []struct { name string @@ -97,19 +112,6 @@ func TestPropertySchemaIDFrom(t *testing.T) { } } -func TestPropertySchemaIDFromExtension(t *testing.T) { - pluginID := MustPluginID("test~2.0.0") - pluginExtensionID := PluginExtensionID("test2") - propertySchemaID, err := PropertySchemaIDFromExtension(pluginID, pluginExtensionID) - - assert.NotNil(t, propertySchemaID) - assert.Equal(t, PropertySchemaID{ - plugin: MustPluginID("test~2.0.0"), - id: "test2", - }, propertySchemaID) - assert.Nil(t, err) -} - func TestMustPropertySchemaID(t *testing.T) { tests := []struct { name string @@ -175,18 +177,6 @@ func TestMustPropertySchemaID(t *testing.T) { } } -func TestMustPropertySchemaIDFromExtension(t *testing.T) { - pluginID := MustPluginID("test~2.0.0") - pluginExtensionID := PluginExtensionID("test2") - propertySchemaID := MustPropertySchemaIDFromExtension(pluginID, pluginExtensionID) - - assert.NotNil(t, propertySchemaID) - assert.Equal(t, PropertySchemaID{ - plugin: MustPluginID("test~2.0.0"), - id: "test2", - }, propertySchemaID) -} - func TestPropertySchemaIDFromRef(t *testing.T) { tests := []struct { name string @@ -250,6 +240,27 @@ func TestPropertySchemaID_Clone(t *testing.T) { assert.NotSame(t, p, c) } +func TestPropertySchemaID_WithPlugin(t *testing.T) { + c := PropertySchemaID{ + id: "xxx", + plugin: PluginID{ + name: "aaa", + version: "1.0.0", + }, + }.WithPlugin(PluginID{ + name: "aaa", + version: "1.1.0", + }) + + assert.Equal(t, PropertySchemaID{ + id: "xxx", + plugin: PluginID{ + name: "aaa", + version: "1.1.0", + }, + }, c) +} + func TestPropertySchemaID_ID(t *testing.T) { propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") assert.Equal(t, propertySchemaID.ID(), "test") @@ -260,16 +271,6 @@ func TestPropertySchemaID_Plugin(t *testing.T) { assert.Equal(t, MustPluginID("Test~2.0.0"), propertySchemaID.Plugin()) } -func TestPropertySchemaID_System(t *testing.T) { - propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") - assert.False(t, propertySchemaID.System()) - extinctionName := schemaSystemIDPrefix - propertySchemaID = MustPropertySchemaIDFromExtension(MustPluginID("test~2.0.0"), *PluginExtensionIDFromRef(&extinctionName)) - assert.True(t, propertySchemaID.System()) - propertySchemaID = MustPropertySchemaID("Test~2.0.0/" + schemaSystemIDPrefix) - assert.True(t, propertySchemaID.System()) -} - func TestPropertySchemaID_String(t *testing.T) { propertySchemaID := MustPropertySchemaID("Test~2.0.0/test") assert.Equal(t, propertySchemaID.String(), "Test~2.0.0/test") diff --git a/pkg/layer/decoding/reearth.go b/pkg/layer/decoding/reearth.go index a365b4c2a..ee484d3ed 100644 --- a/pkg/layer/decoding/reearth.go +++ b/pkg/layer/decoding/reearth.go @@ -92,12 +92,8 @@ func (l *ReearthLayer) layer() *layer.Initializer { } var psid *property.SchemaID - if l.Plugin != nil || l.Extension != nil { - psid2, err := layer.PropertySchemaIDFromExtension(*l.Plugin, *l.Extension) - if err == nil { - // if there is an error, property schema id will be nil. - psid = psid2.Ref() - } + if l.Plugin != nil && l.Extension != nil { + psid = layer.NewPropertySchemaID(*l.Plugin, l.Extension.String()).Ref() } var pr *property.Initializer @@ -156,18 +152,11 @@ type ReearthInfoboxField struct { } func (f *ReearthInfoboxField) infoboxField() *layer.InitializerInfoboxField { - if f == nil { + if f == nil || f.Plugin.IsNil() || f.Extension == "" { return nil } - var psid *property.SchemaID - { - psid2, err := layer.PropertySchemaIDFromExtension(f.Plugin, f.Extension) - if err == nil { - // if there is an error, property schema id will be nil. - psid = psid2.Ref() - } - } + psid := layer.NewPropertySchemaID(f.Plugin, f.Extension.String()).Ref() var pr *property.Initializer if f.Property != nil { diff --git a/pkg/layer/id.go b/pkg/layer/id.go index eb9b6dad1..f14db47ac 100644 --- a/pkg/layer/id.go +++ b/pkg/layer/id.go @@ -21,6 +21,7 @@ var NewInfoboxFieldID = id.NewInfoboxFieldID var NewTagID = id.NewTagID var NewSceneID = id.NewSceneID var NewPropertyID = id.NewPropertyID +var NewPropertySchemaID = id.NewPropertySchemaID var NewDatasetID = id.NewDatasetID var NewDatasetSchemaID = id.NewDatasetSchemaID @@ -30,8 +31,6 @@ var MustTagID = id.MustTagID var MustSceneID = id.MustSceneID var MustPluginID = id.MustPluginID var MustPropertyID = id.MustPropertyID -var PropertySchemaIDFromExtension = id.PropertySchemaIDFromExtension -var MustPropertySchemaIDFromExtension = id.MustPropertySchemaIDFromExtension var IDFrom = id.LayerIDFrom var InfoboxFieldIDFrom = id.InfoboxFieldIDFrom diff --git a/pkg/layer/infobox.go b/pkg/layer/infobox.go index f446e2a5f..26ca009cb 100644 --- a/pkg/layer/infobox.go +++ b/pkg/layer/infobox.go @@ -66,6 +66,19 @@ func (i *Infobox) FieldAt(index int) *InfoboxField { return i.fields[index] } +func (i *Infobox) FieldsByPlugin(pid PluginID, eid *PluginExtensionID) []*InfoboxField { + if i == nil { + return nil + } + fields := make([]*InfoboxField, 0, len(i.fields)) + for _, f := range i.fields { + if f.Plugin().Equal(pid) && (eid == nil || f.Extension() == *eid) { + fields = append(fields, f) + } + } + return fields +} + func (i *Infobox) Has(id InfoboxFieldID) bool { _, ok := i.ids[id] return ok @@ -124,14 +137,14 @@ func (i *Infobox) Remove(field InfoboxFieldID) { } } -func (i *Infobox) RemoveAllByPlugin(pid PluginID) []PropertyID { +func (i *Infobox) RemoveAllByPlugin(pid PluginID, eid *PluginExtensionID) []PropertyID { if i == nil { return nil } var properties []PropertyID for j := 0; j < len(i.fields); j++ { - if i.fields[j].plugin.Equal(pid) { + if i.fields[j].plugin.Equal(pid) && (eid == nil || i.fields[j].Extension() == *eid) { properties = append(properties, i.fields[j].Property()) i.fields = append(i.fields[:j], i.fields[j+1:]...) j-- diff --git a/pkg/layer/infobox_field.go b/pkg/layer/infobox_field.go index b7e5379da..3c363e0e8 100644 --- a/pkg/layer/infobox_field.go +++ b/pkg/layer/infobox_field.go @@ -47,7 +47,7 @@ func (i *InfoboxField) ValidateProperty(pm property.Map) error { if lp == nil { return errors.New("property does not exist") } - if !lp.Schema().Equal(MustPropertySchemaIDFromExtension(i.plugin, i.extension)) { + if !lp.Schema().Equal(NewPropertySchemaID(i.plugin, i.extension.String())) { return errors.New("property has a invalid schema") } diff --git a/pkg/layer/infobox_test.go b/pkg/layer/infobox_test.go index 1b6d77c75..ed9626725 100644 --- a/pkg/layer/infobox_test.go +++ b/pkg/layer/infobox_test.go @@ -50,18 +50,36 @@ func TestInfobox(t *testing.T) { assert.Equal(t, f4, infobox.FieldAt(2)) } +func TestInfobox_FieldsByPlugin(t *testing.T) { + pid1 := MustPluginID("xxx~1.1.1") + pid2 := MustPluginID("xxy~1.1.1") + f1 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid1, extension: "a", property: NewPropertyID()} + f2 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid2, extension: "b", property: NewPropertyID()} + f3 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid1, extension: "c", property: NewPropertyID()} + f4 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid2, extension: "d", property: NewPropertyID()} + infobox := NewInfobox([]*InfoboxField{f1, f2, f3, f4}, NewPropertyID()) + + assert.Equal(t, []*InfoboxField(nil), (*Infobox)(nil).FieldsByPlugin(pid1, nil)) + assert.Equal(t, []*InfoboxField{f1, f3}, infobox.FieldsByPlugin(pid1, nil)) + assert.Equal(t, []*InfoboxField{f2, f4}, infobox.FieldsByPlugin(pid2, nil)) + assert.Equal(t, []*InfoboxField{f2}, infobox.FieldsByPlugin(pid2, PluginExtensionID("b").Ref())) +} + func TestInfobox_RemoveAllByPlugin(t *testing.T) { pid1 := MustPluginID("xxx~1.1.1") pid2 := MustPluginID("xxy~1.1.1") f1 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid1, extension: "a", property: NewPropertyID()} f2 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid2, extension: "b", property: NewPropertyID()} f3 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid1, extension: "c", property: NewPropertyID()} - infobox := NewInfobox([]*InfoboxField{f1, f2, f3}, NewPropertyID()) + f4 := &InfoboxField{id: NewInfoboxFieldID(), plugin: pid2, extension: "d", property: NewPropertyID()} + infobox := NewInfobox([]*InfoboxField{f1, f2, f3, f4}, NewPropertyID()) - assert.Equal(t, []PropertyID(nil), (*Infobox)(nil).RemoveAllByPlugin(pid1)) - assert.Equal(t, []*InfoboxField{f1, f2, f3}, infobox.fields) - assert.Equal(t, []PropertyID{f1.Property(), f3.Property()}, infobox.RemoveAllByPlugin(pid1)) - assert.Equal(t, []*InfoboxField{f2}, infobox.fields) - assert.Equal(t, []PropertyID(nil), infobox.RemoveAllByPlugin(pid1)) + assert.Equal(t, []PropertyID(nil), (*Infobox)(nil).RemoveAllByPlugin(pid1, nil)) + assert.Equal(t, []*InfoboxField{f1, f2, f3, f4}, infobox.fields) + assert.Equal(t, []PropertyID{f1.Property(), f3.Property()}, infobox.RemoveAllByPlugin(pid1, nil)) + assert.Equal(t, []*InfoboxField{f2, f4}, infobox.fields) + assert.Equal(t, []PropertyID(nil), infobox.RemoveAllByPlugin(pid1, nil)) + assert.Equal(t, []*InfoboxField{f2, f4}, infobox.fields) + assert.Equal(t, []PropertyID{f4.Property()}, infobox.RemoveAllByPlugin(pid2, PluginExtensionID("d").Ref())) assert.Equal(t, []*InfoboxField{f2}, infobox.fields) } diff --git a/pkg/layer/initializer.go b/pkg/layer/initializer.go index 8d0624b4c..a9f7c86a5 100644 --- a/pkg/layer/initializer.go +++ b/pkg/layer/initializer.go @@ -265,10 +265,7 @@ func (i *InitializerInfoboxField) InfoboxField(scene SceneID) (*InfoboxField, *p return nil, nil, nil } - psid, err := PropertySchemaIDFromExtension(i.Plugin, i.Extension) - if err != nil { - return nil, nil, err - } + psid := NewPropertySchemaID(i.Plugin, i.Extension.String()) fid := i.ID if i.ID == nil { @@ -278,12 +275,13 @@ func (i *InitializerInfoboxField) InfoboxField(scene SceneID) (*InfoboxField, *p pid := i.PropertyID var p *property.Property if pid == nil { - p, err = i.Property.PropertyIncludingEmpty(scene, psid) + p2, err := i.Property.PropertyIncludingEmpty(scene, psid) if err != nil { return nil, nil, ErrInitializationPropertyWith(err) } - if p != nil { - pid = p.IDRef() + if p2 != nil { + p = p2 + pid = p2.IDRef() } } if pid == nil { diff --git a/pkg/layer/layer.go b/pkg/layer/layer.go index 1119e7cf7..d841a45e7 100644 --- a/pkg/layer/layer.go +++ b/pkg/layer/layer.go @@ -207,10 +207,7 @@ func (l *layerBase) ValidateProperties(pm property.Map) error { return errors.New("layer should have plugin id and extension id") } - psid, err := PropertySchemaIDFromExtension(*l.plugin, *l.extension) - if err != nil { - return errors.New("layer has invalid plugin id and extension id") - } + psid := NewPropertySchemaID(*l.plugin, l.extension.String()) lp := pm[*l.property] if lp == nil { diff --git a/pkg/layer/layerops/processor.go b/pkg/layer/layerops/processor.go index ce59fbd7c..786037a3b 100644 --- a/pkg/layer/layerops/processor.go +++ b/pkg/layer/layerops/processor.go @@ -25,7 +25,7 @@ func (p Processor) UninstallPlugin(ctx context.Context, pluginID layer.PluginID) if !parentRemoved { if pid := l.Plugin(); pid == nil || !pid.Equal(pluginID) { // delete infobox fields - removedProperties := l.Infobox().RemoveAllByPlugin(pluginID) + removedProperties := l.Infobox().RemoveAllByPlugin(pluginID, nil) if len(removedProperties) > 0 { res.RemovedProperties = append(res.RemovedProperties, removedProperties...) res.ModifiedLayers = append(res.ModifiedLayers, &l) diff --git a/pkg/layer/loader.go b/pkg/layer/loader.go index 896c8284d..ef4e597f6 100644 --- a/pkg/layer/loader.go +++ b/pkg/layer/loader.go @@ -52,6 +52,9 @@ func (l Loader) Walk(ctx context.Context, walker func(Layer, GroupList) error, i return err } for _, l := range loaded.Deref() { + if l == nil { + continue + } if err := walker(l, parents); err == WalkerSkipChildren { continue } else if err != nil { diff --git a/pkg/plugin/id.go b/pkg/plugin/id.go index ff0220caf..29d4b9777 100644 --- a/pkg/plugin/id.go +++ b/pkg/plugin/id.go @@ -9,6 +9,7 @@ type SceneID = id.SceneID var NewID = id.NewPluginID var NewSceneID = id.NewSceneID +var NewPropertySchemaID = id.NewPropertySchemaID var MustID = id.MustPluginID var MustSceneID = id.MustSceneID @@ -25,7 +26,5 @@ var PropertySchemaIDFromRef = id.PropertySchemaIDFromRef var SceneIDFromRefID = id.SceneIDFromRefID -var PropertySchemaIDFromExtension = id.PropertySchemaIDFromExtension - var OfficialPluginID = id.OfficialPluginID var ErrInvalidID = id.ErrInvalidID diff --git a/pkg/plugin/manifest/convert_test.go b/pkg/plugin/manifest/convert_test.go index c82140953..8643b91af 100644 --- a/pkg/plugin/manifest/convert_test.go +++ b/pkg/plugin/manifest/convert_test.go @@ -99,24 +99,24 @@ func TestManifest(t *testing.T) { Author(a). RepositoryURL(r). Description(i18n.String{"en": d, "ja": "B"}). - Schema(property.MustSchemaIDFromExtension(plugin.OfficialPluginID, "@").Ref()). + Schema(property.NewSchemaID(plugin.OfficialPluginID, "@").Ref()). Extensions([]*plugin.Extension{ plugin.NewExtension(). ID("cesium"). Name(i18n.String{"ja": "ใ‚ปใ‚ธใ‚ฆใƒ "}). Visualizer("cesium"). Type("visualizer"). - Schema(property.MustSchemaIDFromExtension(plugin.OfficialPluginID, "cesium")). + Schema(property.NewSchemaID(plugin.OfficialPluginID, "cesium")). System(true). MustBuild(), }).MustBuild(), ExtensionSchema: property.SchemaList{ property.NewSchema(). - ID(property.MustSchemaIDFromExtension(plugin.OfficialPluginID, "cesium")). + ID(property.NewSchemaID(plugin.OfficialPluginID, "cesium")). MustBuild(), }, Schema: property.NewSchema(). - ID(property.MustSchemaIDFromExtension(plugin.OfficialPluginID, "@")). + ID(property.NewSchemaID(plugin.OfficialPluginID, "@")). Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ property.NewSchemaGroup().ID("default").Title(i18n.String{"ja": "ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ"}).MustBuild(), })). diff --git a/pkg/plugin/manifest/diff.go b/pkg/plugin/manifest/diff.go index 32c18daa7..4453a3d78 100644 --- a/pkg/plugin/manifest/diff.go +++ b/pkg/plugin/manifest/diff.go @@ -90,8 +90,8 @@ func (d Diff) DeletedPropertySchemas() []id.PropertySchemaID { return s } -func (d Diff) PropertySchmaDiffs() []property.SchemaDiff { - s := make([]property.SchemaDiff, 0, len(d.UpdatedExtensions)+1) +func (d Diff) PropertySchmaDiffs() property.SchemaDiffList { + s := make(property.SchemaDiffList, 0, len(d.UpdatedExtensions)+1) if !d.PropertySchemaDeleted && (!d.PropertySchemaDiff.IsEmpty() || d.PropertySchemaDiff.IsIDChanged()) { s = append(s, d.PropertySchemaDiff) } diff --git a/pkg/plugin/manifest/diff_test.go b/pkg/plugin/manifest/diff_test.go index 9a0bd0f7e..ed55ce1ee 100644 --- a/pkg/plugin/manifest/diff_test.go +++ b/pkg/plugin/manifest/diff_test.go @@ -210,7 +210,7 @@ func TestDiff_PropertySchmaDiffs(t *testing.T) { tests := []struct { name string target Diff - want []property.SchemaDiff + want property.SchemaDiffList }{ { name: "ok", @@ -224,7 +224,7 @@ func TestDiff_PropertySchmaDiffs(t *testing.T) { }}, }, }, - want: []property.SchemaDiff{ + want: property.SchemaDiffList{ { From: ps1, }, @@ -236,7 +236,7 @@ func TestDiff_PropertySchmaDiffs(t *testing.T) { { name: "empty", target: Diff{}, - want: []property.SchemaDiff{}, + want: property.SchemaDiffList{}, }, } diff --git a/pkg/property/diff.go b/pkg/property/diff.go index 2e7c14987..ade7c7d0e 100644 --- a/pkg/property/diff.go +++ b/pkg/property/diff.go @@ -105,3 +105,36 @@ func (d *SchemaDiff) IsEmpty() bool { func (d *SchemaDiff) IsIDChanged() bool { return d != nil && !d.From.Equal(d.To) } + +type SchemaDiffList []SchemaDiff + +func (l SchemaDiffList) FindByFrom(from SchemaID) *SchemaDiff { + for _, d := range l { + if d.From.Equal(from) { + return &d + } + } + return nil +} + +func (l SchemaDiffList) FromSchemas() []SchemaID { + if len(l) == 0 { + return nil + } + + res := make([]SchemaID, 0, len(l)) + for _, d := range l { + s := d.From + found := false + for _, r := range res { + if r.Equal(s) { + found = true + break + } + } + if !found { + res = append(res, s) + } + } + return res +} diff --git a/pkg/property/diff_test.go b/pkg/property/diff_test.go index 205e4d016..63a6c8629 100644 --- a/pkg/property/diff_test.go +++ b/pkg/property/diff_test.go @@ -598,3 +598,22 @@ func TestSchemaDiff_IsIDChanged(t *testing.T) { }) } } + +func TestSchemaDiffList_FindByFrom(t *testing.T) { + p1 := MustSchemaID("a~1.0.0/a") + p2 := MustSchemaID("a~1.0.0/b") + + assert.Equal(t, &SchemaDiff{From: p1}, SchemaDiffList{{From: p1}}.FindByFrom(p1)) + assert.Nil(t, SchemaDiffList{}.FindByFrom(p2)) + assert.Nil(t, SchemaDiffList{}.FindByFrom(p1)) + assert.Nil(t, SchemaDiffList(nil).FindByFrom(p1)) +} + +func TestSchemaDiffList_FromSchemas(t *testing.T) { + p1 := MustSchemaID("a~1.0.0/a") + p2 := MustSchemaID("a~1.0.0/b") + + assert.Equal(t, []SchemaID{p1, p2}, SchemaDiffList{{From: p1}, {From: p2}, {From: p2}}.FromSchemas()) + assert.Nil(t, SchemaDiffList{}.FromSchemas()) + assert.Nil(t, SchemaDiffList(nil).FromSchemas()) +} diff --git a/pkg/property/id.go b/pkg/property/id.go index 156d14439..93f0d40ac 100644 --- a/pkg/property/id.go +++ b/pkg/property/id.go @@ -18,6 +18,7 @@ type SceneID = id.SceneID var NewID = id.NewPropertyID var NewItemID = id.NewPropertyItemID +var NewSchemaID = id.NewPropertySchemaID var NewDatasetID = id.NewDatasetID var NewDatasetFieldID = id.NewDatasetSchemaFieldID var NewDatasetSchemaID = id.NewDatasetSchemaID @@ -26,7 +27,6 @@ var NewSceneID = id.NewSceneID var MustID = id.MustPropertyID var MustItemID = id.MustPropertyItemID var MustSchemaID = id.MustPropertySchemaID -var MustSchemaIDFromExtension = id.MustPropertySchemaIDFromExtension var MustDatasetID = id.MustDatasetID var MustDatasetFieldID = id.MustDatasetSchemaFieldID var MustDatasetSchemaID = id.MustDatasetSchemaID @@ -36,7 +36,6 @@ var IDFrom = id.PropertyIDFrom var ItemIDFrom = id.PropertyItemIDFrom var FieldIDFrom = id.PropertySchemaFieldIDFrom var SchemaIDFrom = id.PropertySchemaIDFrom -var SchemaIDFromExtension = id.PropertySchemaIDFromExtension var SchemaGroupIDFrom = id.PropertySchemaGroupIDFrom var DatasetIDFrom = id.DatasetIDFrom var DatasetFieldIDFrom = id.DatasetSchemaFieldIDFrom diff --git a/pkg/property/property.go b/pkg/property/property.go index dee03d965..1da56fe34 100644 --- a/pkg/property/property.go +++ b/pkg/property/property.go @@ -626,3 +626,7 @@ func (p *Property) updateSchema(s SchemaID) bool { p.schema = s.Clone() return true } + +func (p *Property) SetSchema(schema SchemaID) { + p.schema = schema.Clone() +} diff --git a/pkg/scene/builder.go b/pkg/scene/builder.go index a160efbb4..70a0b41e9 100644 --- a/pkg/scene/builder.go +++ b/pkg/scene/builder.go @@ -23,10 +23,7 @@ func (b *Builder) Build() (*Scene, error) { return nil, ErrInvalidID } if b.scene.widgets == nil { - b.scene.widgets = NewWidgets(nil) - } - if b.scene.widgetAlignSystem == nil { - b.scene.widgetAlignSystem = NewWidgetAlignSystem() + b.scene.widgets = NewWidgets(nil, nil) } if b.scene.plugins == nil { b.scene.plugins = NewPlugins(nil) @@ -75,11 +72,6 @@ func (b *Builder) Widgets(widgets *Widgets) *Builder { return b } -func (b *Builder) WidgetAlignSystem(widgetAlignSystem *WidgetAlignSystem) *Builder { - b.scene.widgetAlignSystem = widgetAlignSystem - return b -} - func (b *Builder) RootLayer(rootLayer LayerID) *Builder { b.scene.rootLayer = rootLayer return b diff --git a/pkg/scene/builder/builder_test.go b/pkg/scene/builder/builder_test.go index 40bc388fd..61d661523 100644 --- a/pkg/scene/builder/builder_test.go +++ b/pkg/scene/builder/builder_test.go @@ -414,7 +414,7 @@ func TestSceneBuilder(t *testing.T) { Property(scenep.ID()). Widgets(scene.NewWidgets([]*scene.Widget{ sceneWidget1, sceneWidget2, - })). + }, nil)). Plugins(scene.NewPlugins([]*scene.Plugin{scenePlugin1})). RootLayer(rootLayer.ID()). MustBuild() diff --git a/pkg/scene/builder/scene.go b/pkg/scene/builder/scene.go index fcdbd9363..af1ff48f8 100644 --- a/pkg/scene/builder/scene.go +++ b/pkg/scene/builder/scene.go @@ -38,7 +38,7 @@ func (b *Builder) scene(ctx context.Context, s *scene.Scene, publishedAt time.Ti Clusters: b.clusters(ctx, s, p), Layers: l, Tags: tags, - WidgetAlignSystem: buildWidgetAlignSystem(s.WidgetAlignSystem()), + WidgetAlignSystem: buildWidgetAlignSystem(s.Widgets().Alignment()), }, nil } diff --git a/pkg/scene/builder_test.go b/pkg/scene/builder_test.go index 566d5d688..8b56b6e59 100644 --- a/pkg/scene/builder_test.go +++ b/pkg/scene/builder_test.go @@ -48,15 +48,10 @@ func TestBuilder_Project(t *testing.T) { func TestBuilder_Widgets(t *testing.T) { ws := NewWidgets([]*Widget{ MustNewWidget(NewWidgetID(), OfficialPluginID, "xxx", NewPropertyID(), true, false), - }) + }, nil) b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).Widgets(ws).MustBuild() assert.Equal(t, ws, b.Widgets()) } -func TestBuilder_WidgetAlignSystem(t *testing.T) { - was := NewWidgetAlignSystem() - b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).WidgetAlignSystem(was).MustBuild() - assert.Equal(t, was, b.WidgetAlignSystem()) -} func TestBuilder_Build(t *testing.T) { tid := NewTeamID() @@ -66,22 +61,20 @@ func TestBuilder_Build(t *testing.T) { lid := NewLayerID() ws := NewWidgets([]*Widget{ MustNewWidget(NewWidgetID(), OfficialPluginID, "xxx", ppid, true, false), - }) - was := NewWidgetAlignSystem() + }, nil) ps := NewPlugins([]*Plugin{ NewPlugin(OfficialPluginID, ppid.Ref()), }) type args struct { - ID ID - Project ProjectID - Team TeamID - RootLayer LayerID - Widgets *Widgets - WidgetAlignSystem *WidgetAlignSystem - Plugins *Plugins - UpdatedAt time.Time - Property PropertyID + ID ID + Project ProjectID + Team TeamID + RootLayer LayerID + Widgets *Widgets + Plugins *Plugins + UpdatedAt time.Time + Property PropertyID } tests := []struct { @@ -93,71 +86,66 @@ func TestBuilder_Build(t *testing.T) { { Name: "fail nil scene id", Args: args{ - ID: ID{}, - Project: pid, - Team: tid, - RootLayer: lid, - Widgets: ws, - WidgetAlignSystem: was, - Plugins: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, + ID: ID{}, + Project: pid, + Team: tid, + RootLayer: lid, + Widgets: ws, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, }, Err: ErrInvalidID, }, { Name: "fail nil team id", Args: args{ - ID: sid, - Project: pid, - Team: TeamID{}, - RootLayer: lid, - Widgets: ws, - WidgetAlignSystem: was, - Plugins: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, + ID: sid, + Project: pid, + Team: TeamID{}, + RootLayer: lid, + Widgets: ws, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, }, Err: ErrInvalidID, }, { Name: "fail nil root layer id", Args: args{ - ID: sid, - Project: pid, - Team: tid, - RootLayer: LayerID{}, - Widgets: ws, - WidgetAlignSystem: was, - Plugins: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, + ID: sid, + Project: pid, + Team: tid, + RootLayer: LayerID{}, + Widgets: ws, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, }, Err: ErrInvalidID, }, { Name: "success build new scene", Args: args{ - ID: sid, - Project: pid, - Team: tid, - RootLayer: lid, - Widgets: ws, - WidgetAlignSystem: was, - Plugins: ps, - UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - Property: ppid, + ID: sid, + Project: pid, + Team: tid, + RootLayer: lid, + Widgets: ws, + Plugins: ps, + UpdatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + Property: ppid, }, Expected: &Scene{ - id: sid, - project: pid, - team: tid, - rootLayer: lid, - widgets: ws, - widgetAlignSystem: was, - plugins: ps, - updatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - property: ppid, + id: sid, + project: pid, + team: tid, + rootLayer: lid, + widgets: ws, + plugins: ps, + updatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + property: ppid, }, }, } @@ -169,7 +157,6 @@ func TestBuilder_Build(t *testing.T) { res, err := New(). ID(tt.Args.ID). Widgets(tt.Args.Widgets). - WidgetAlignSystem(tt.Args.WidgetAlignSystem). Project(tt.Args.Project). Plugins(tt.Args.Plugins). Property(tt.Args.Property). @@ -195,7 +182,7 @@ func TestBuilder_MustBuild(t *testing.T) { lid := NewLayerID() ws := NewWidgets([]*Widget{ MustNewWidget(NewWidgetID(), OfficialPluginID, "xxx", ppid, true, false), - }) + }, nil) was := NewWidgetAlignSystem() ps := NewPlugins([]*Plugin{ NewPlugin(OfficialPluginID, ppid.Ref()), @@ -278,15 +265,14 @@ func TestBuilder_MustBuild(t *testing.T) { Property: ppid, }, Expected: &Scene{ - id: sid, - project: pid, - team: tid, - rootLayer: lid, - widgets: ws, - widgetAlignSystem: was, - plugins: ps, - updatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), - property: ppid, + id: sid, + project: pid, + team: tid, + rootLayer: lid, + widgets: ws, + plugins: ps, + updatedAt: time.Date(2000, 1, 1, 1, 1, 0, 0, time.UTC), + property: ppid, }, }, } @@ -301,7 +287,6 @@ func TestBuilder_MustBuild(t *testing.T) { return New(). ID(tt.Args.ID). Widgets(tt.Args.Widgets). - WidgetAlignSystem(tt.Args.WidgetAlignSystem). Project(tt.Args.Project). Plugins(tt.Args.Plugins). Property(tt.Args.Property). diff --git a/pkg/scene/plugin.go b/pkg/scene/plugin.go index e003d2c66..3a5900bb1 100644 --- a/pkg/scene/plugin.go +++ b/pkg/scene/plugin.go @@ -19,6 +19,13 @@ func (s *Plugin) Plugin() PluginID { return s.plugin } +func (s *Plugin) PluginRef() *PluginID { + if s == nil { + return nil + } + return s.plugin.Ref() +} + func (s *Plugin) Property() *PropertyID { if s == nil { return nil diff --git a/pkg/scene/plugin_test.go b/pkg/scene/plugin_test.go index afcf49df4..9df0fb896 100644 --- a/pkg/scene/plugin_test.go +++ b/pkg/scene/plugin_test.go @@ -16,9 +16,12 @@ func TestPlugin(t *testing.T) { property: pr, }, res) assert.Equal(t, pid, res.Plugin()) + assert.Equal(t, &pid, res.PluginRef()) assert.Equal(t, pr, res.Property()) cl := res.Clone() assert.Equal(t, res, cl) assert.NotSame(t, res, cl) + + assert.Nil(t, (*Plugin)(nil).PluginRef()) } diff --git a/pkg/scene/plugins.go b/pkg/scene/plugins.go index a0367934b..de525e534 100644 --- a/pkg/scene/plugins.go +++ b/pkg/scene/plugins.go @@ -50,7 +50,15 @@ func (p *Plugins) Has(id PluginID) bool { } func (p *Plugins) HasPlugin(id PluginID) bool { - name := id.Name() + for _, p2 := range p.plugins { + if p2.plugin.Equal(id) { + return true + } + } + return false +} + +func (p *Plugins) HasPluginByName(name string) bool { for _, p2 := range p.plugins { if p2.plugin.Name() == name { return true @@ -60,7 +68,7 @@ func (p *Plugins) HasPlugin(id PluginID) bool { } func (p *Plugins) Add(sp *Plugin) { - if sp == nil || p.Has(sp.plugin) || sp.plugin.Equal(OfficialPluginID) { + if sp == nil || p.HasPluginByName(sp.plugin.Name()) || sp.plugin.Equal(OfficialPluginID) { return } p.plugins = append(p.plugins, sp) @@ -78,13 +86,24 @@ func (p *Plugins) Remove(pid PluginID) { } } -func (p *Plugins) Upgrade(pid, newID PluginID) { +func (p *Plugins) Upgrade(from, to PluginID, pr *PropertyID, deleteProperty bool) { + if p == nil || from.IsNil() || to.IsNil() { + return + } + for i, p2 := range p.plugins { if p2.plugin.Equal(OfficialPluginID) { continue } - if p2.plugin.Equal(pid) { - p.plugins[i] = &Plugin{plugin: newID, property: p2.property} + if p2.plugin.Equal(from) { + var newpr *PropertyID + if !deleteProperty { + newpr = pr.CopyRef() + if newpr == nil { + newpr = p2.property.CopyRef() + } + } + p.plugins[i] = &Plugin{plugin: to, property: newpr} return } } @@ -111,3 +130,12 @@ func (p *Plugins) Plugin(pluginID PluginID) *Plugin { } return nil } + +func (p *Plugins) PluginByName(name string) *Plugin { + for _, pp := range p.plugins { + if pp.plugin.Name() == name { + return pp + } + } + return nil +} diff --git a/pkg/scene/plugins_test.go b/pkg/scene/plugins_test.go index e0bece931..a2eb40507 100644 --- a/pkg/scene/plugins_test.go +++ b/pkg/scene/plugins_test.go @@ -139,6 +139,40 @@ func TestPlugins_Plugin(t *testing.T) { } } +func TestPlugins_PluginByName(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input string + PS *Plugins + Expected *Plugin + }{ + { + Name: "plugin is found", + Input: "xxx", + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: NewPlugin(pid, pr), + }, + { + Name: "plugin is not found", + Input: "xxz", + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: nil, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.PluginByName(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + func TestPlugins_Properties(t *testing.T) { pr := NewPropertyID().Ref() pr2 := NewPropertyID().Ref() @@ -218,15 +252,15 @@ func TestPlugins_HasPlugin(t *testing.T) { Expected bool }{ { - Name: "property is found", + Name: "plugin is found", Input: pid, PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), Expected: true, }, { - Name: "property is not found", + Name: "plugin is not found", Input: pid, - PS: NewPlugins([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), + PS: NewPlugins([]*Plugin{NewPlugin(MustPluginID("xxx~1.2.1"), pr)}), Expected: false, }, } @@ -241,6 +275,40 @@ func TestPlugins_HasPlugin(t *testing.T) { } } +func TestPlugins_HasPluginByName(t *testing.T) { + pid := MustPluginID("xxx~1.1.1") + pr := NewPropertyID().Ref() + + tests := []struct { + Name string + Input string + PS *Plugins + Expected bool + }{ + { + Name: "plugin is found", + Input: "xxx", + PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + Expected: true, + }, + { + Name: "plugin is not found", + Input: "xxxx", + PS: NewPlugins([]*Plugin{NewPlugin(MustPluginID("zzz~1.1.1"), pr)}), + Expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.PS.HasPluginByName(tc.Input) + assert.Equal(t, tc.Expected, res) + }) + } +} + func TestPlugins_Add(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pr := NewPropertyID().Ref() @@ -323,33 +391,77 @@ func TestPlugins_Upgrade(t *testing.T) { pid := MustPluginID("xxx~1.1.1") nid := MustPluginID("zzz~1.1.1") pr := NewPropertyID().Ref() + pr2 := NewPropertyID().Ref() + + type args struct { + From PluginID + To PluginID + Property *PropertyID + DeleteProperty bool + } tests := []struct { - Name string - PID, NewID PluginID - PS, Expected *Plugins + name string + args args + target *Plugins + want *Plugins }{ { - Name: "upgrade official plugin", - PID: OfficialPluginID, - PS: NewPlugins([]*Plugin{NewPlugin(OfficialPluginID, pr)}), - Expected: NewPlugins([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + name: "upgrade a plugin", + args: args{ + From: pid, + To: nid, + }, + target: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + want: NewPlugins([]*Plugin{NewPlugin(nid, pr)}), }, { - Name: "upgrade a plugin", - PID: pid, - NewID: nid, - PS: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), - Expected: NewPlugins([]*Plugin{NewPlugin(nid, pr)}), + name: "upgrade a plugin with changing property", + args: args{ + From: pid, + To: nid, + Property: pr2, + }, + target: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + want: NewPlugins([]*Plugin{NewPlugin(nid, pr2)}), + }, + { + name: "upgrade a plugin with deleting property", + args: args{ + From: pid, + To: nid, + Property: pr2, + DeleteProperty: true, + }, + target: NewPlugins([]*Plugin{NewPlugin(pid, pr)}), + want: NewPlugins([]*Plugin{NewPlugin(nid, nil)}), + }, + { + name: "upgrade official plugin", + args: args{ + From: OfficialPluginID, + To: nid, + }, + target: NewPlugins([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + want: NewPlugins([]*Plugin{NewPlugin(OfficialPluginID, pr)}), + }, + { + name: "nil", + args: args{ + From: pid, + To: nid, + }, + target: nil, + want: nil, }, } for _, tc := range tests { tc := tc - t.Run(tc.Name, func(t *testing.T) { + t.Run(tc.name, func(t *testing.T) { t.Parallel() - tc.PS.Upgrade(tc.PID, tc.NewID) - assert.Equal(t, tc.Expected, tc.PS) + tc.target.Upgrade(tc.args.From, tc.args.To, tc.args.Property, tc.args.DeleteProperty) + assert.Equal(t, tc.want, tc.target) }) } } diff --git a/pkg/scene/scene.go b/pkg/scene/scene.go index fa543b083..d22c0d176 100644 --- a/pkg/scene/scene.go +++ b/pkg/scene/scene.go @@ -8,16 +8,15 @@ import ( var ErrSceneIsLocked error = errors.New("scene is locked") type Scene struct { - id ID - project ProjectID - team TeamID - rootLayer LayerID - widgets *Widgets - widgetAlignSystem *WidgetAlignSystem - plugins *Plugins - updatedAt time.Time - property PropertyID - clusters *ClusterList + id ID + project ProjectID + team TeamID + rootLayer LayerID + widgets *Widgets + plugins *Plugins + updatedAt time.Time + property PropertyID + clusters *ClusterList } func (s *Scene) ID() ID { @@ -69,13 +68,6 @@ func (s *Scene) Widgets() *Widgets { return s.widgets } -func (s *Scene) WidgetAlignSystem() *WidgetAlignSystem { - if s == nil { - return nil - } - return s.widgetAlignSystem -} - func (s *Scene) Plugins() *Plugins { if s == nil { return nil diff --git a/pkg/scene/scene_test.go b/pkg/scene/scene_test.go index 187bd10d7..07088ec7e 100644 --- a/pkg/scene/scene_test.go +++ b/pkg/scene/scene_test.go @@ -70,13 +70,10 @@ func TestScene_Properties(t *testing.T) { RootLayer(NewLayerID()). Property(pid1). Widgets( - NewWidgets( - []*Widget{ - MustNewWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", pid2, true, false), - }, - ), + NewWidgets([]*Widget{ + MustNewWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", pid2, true, false), + }, nil), ). - WidgetAlignSystem(NewWidgetAlignSystem()). MustBuild() assert.Equal(t, []PropertyID{pid1, pid2}, s.Properties()) @@ -87,7 +84,6 @@ func TestSceneNil(t *testing.T) { assert.Nil(t, s.Properties()) assert.True(t, s.ID().IsNil()) assert.Nil(t, s.Widgets()) - assert.Nil(t, s.WidgetAlignSystem()) assert.True(t, s.Project().IsNil()) assert.True(t, s.Team().IsNil()) assert.True(t, s.RootLayer().IsNil()) diff --git a/pkg/scene/sceneops/plugin_migrator.go b/pkg/scene/sceneops/plugin_migrator.go index e49e2d7a3..beec7d526 100644 --- a/pkg/scene/sceneops/plugin_migrator.go +++ b/pkg/scene/sceneops/plugin_migrator.go @@ -100,7 +100,7 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol } // ใ‚ทใƒผใƒณใฎใƒ—ใƒฉใ‚ฐใ‚คใƒณ - sc.Plugins().Upgrade(oldPluginID, newPluginID) + sc.Plugins().Upgrade(oldPluginID, newPluginID, nil, false) for _, sp := range sc.Plugins().Plugins() { if sp.Plugin().Equal(newPluginID) && sp.Property() != nil { propertyIDs = append(propertyIDs, *sp.Property()) @@ -112,7 +112,7 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol for _, w := range sc.Widgets().Widgets() { if w.Plugin().Equal(newPluginID) { if newPlugin.Extension(w.Extension()) == nil { - sc.Widgets().RemoveAllByExtension(oldPluginID, w.Extension()) + sc.Widgets().RemoveAllByPlugin(oldPluginID, w.Extension().Ref()) } else { propertyIDs = append(propertyIDs, w.Property()) } diff --git a/pkg/scene/widgets.go b/pkg/scene/widgets.go index 657320751..91d139301 100644 --- a/pkg/scene/widgets.go +++ b/pkg/scene/widgets.go @@ -10,11 +10,15 @@ var ( type Widgets struct { widgets []*Widget + align *WidgetAlignSystem } -func NewWidgets(w []*Widget) *Widgets { +func NewWidgets(w []*Widget, a *WidgetAlignSystem) *Widgets { + if a == nil { + a = NewWidgetAlignSystem() + } if w == nil { - return &Widgets{widgets: []*Widget{}} + return &Widgets{widgets: []*Widget{}, align: a} } w2 := make([]*Widget, 0, len(w)) for _, w1 := range w { @@ -32,7 +36,7 @@ func NewWidgets(w []*Widget) *Widgets { w2 = append(w2, w1) } } - return &Widgets{widgets: w2} + return &Widgets{widgets: w2, align: a} } func (w *Widgets) Widgets() []*Widget { @@ -42,6 +46,13 @@ func (w *Widgets) Widgets() []*Widget { return append([]*Widget{}, w.widgets...) } +func (w *Widgets) Alignment() *WidgetAlignSystem { + if w == nil { + return nil + } + return w.align +} + func (w *Widgets) Widget(wid WidgetID) *Widget { if w == nil { return nil @@ -85,27 +96,15 @@ func (w *Widgets) Remove(wid WidgetID) { } } -func (w *Widgets) RemoveAllByPlugin(p PluginID) (res []PropertyID) { - if w == nil { - return nil - } - for i := 0; i < len(w.widgets); i++ { - if w.widgets[i].plugin.Equal(p) { - res = append(res, w.widgets[i].Property()) - w.widgets = append(w.widgets[:i], w.widgets[i+1:]...) - i-- - } - } - return res -} - -func (w *Widgets) RemoveAllByExtension(p PluginID, e PluginExtensionID) (res []PropertyID) { +func (w *Widgets) RemoveAllByPlugin(p PluginID, e *PluginExtensionID) (res []PropertyID) { if w == nil { return nil } for i := 0; i < len(w.widgets); i++ { - if w.widgets[i].Plugin().Equal(p) && w.widgets[i].Extension() == e { - res = append(res, w.widgets[i].Property()) + ww := w.widgets[i] + if ww.Plugin().Equal(p) && (e == nil || ww.Extension() == *e) { + res = append(res, ww.Property()) + w.align.Remove(ww.ID()) w.widgets = append(w.widgets[:i], w.widgets[i+1:]...) i-- } diff --git a/pkg/scene/widgets_test.go b/pkg/scene/widgets_test.go index 53422878e..cd283f670 100644 --- a/pkg/scene/widgets_test.go +++ b/pkg/scene/widgets_test.go @@ -51,7 +51,7 @@ func TestNewWidgets(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - assert.Equal(t, tc.Expected, NewWidgets(tc.Input).Widgets()) + assert.Equal(t, tc.Expected, NewWidgets(tc.Input, nil).Widgets()) }) } } @@ -98,7 +98,7 @@ func TestWidgets_Add(t *testing.T) { t.Parallel() var ws *Widgets if !tc.Nil { - ws = NewWidgets(tc.Widgets) + ws = NewWidgets(tc.Widgets, nil) } ws.Add(tc.Input) assert.Equal(t, tc.Expected, ws.Widgets()) @@ -138,7 +138,7 @@ func TestWidgets_Remove(t *testing.T) { ws = NewWidgets([]*Widget{ MustNewWidget(wid, pid2, "e1", pr, true, false), MustNewWidget(wid2, pid, "e1", pr, true, false), - }) + }, nil) assert.True(t, ws.Has(tc.Input)) } ws.Remove(tc.Input) @@ -157,61 +157,27 @@ func TestWidgets_RemoveAllByPlugin(t *testing.T) { tests := []struct { Name string PID PluginID + EID *PluginExtensionID WS, Expected *Widgets ExpectedResult []PropertyID }{ { Name: "remove widgets", PID: pid, - WS: NewWidgets([]*Widget{w1, w2, w3}), - Expected: NewWidgets([]*Widget{w3}), + WS: NewWidgets([]*Widget{w1, w2, w3}, nil), + Expected: NewWidgets([]*Widget{w3}, nil), ExpectedResult: []PropertyID{w1.Property(), w2.Property()}, }, - { - Name: "remove from nil widgets", - WS: nil, - Expected: nil, - ExpectedResult: nil, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.Name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.ExpectedResult, tc.WS.RemoveAllByPlugin(tc.PID)) - assert.Equal(t, tc.Expected, tc.WS) - }) - } -} - -func TestWidgets_RemoveAllByExtension(t *testing.T) { - pid := MustPluginID("xxx~1.1.1") - pid2 := MustPluginID("xxx~1.1.2") - w1 := MustNewWidget(NewWidgetID(), pid, "e1", NewPropertyID(), true, false) - w2 := MustNewWidget(NewWidgetID(), pid, "e2", NewPropertyID(), true, false) - w3 := MustNewWidget(NewWidgetID(), pid, "e1", NewPropertyID(), true, false) - w4 := MustNewWidget(NewWidgetID(), pid2, "e1", NewPropertyID(), true, false) - - tests := []struct { - Name string - PID PluginID - EID PluginExtensionID - WS, Expected *Widgets - ExpectedResult []PropertyID - }{ { Name: "remove widgets", PID: pid, - EID: PluginExtensionID("e1"), - WS: NewWidgets([]*Widget{w1, w2, w3, w4}), - Expected: NewWidgets([]*Widget{w2, w4}), - ExpectedResult: []PropertyID{w1.Property(), w3.Property()}, + EID: PluginExtensionID("e2").Ref(), + WS: NewWidgets([]*Widget{w1, w2, w3}, nil), + Expected: NewWidgets([]*Widget{w1, w3}, nil), + ExpectedResult: []PropertyID{w2.Property()}, }, { - Name: "remove widgets from nil widget system", - PID: pid, - EID: PluginExtensionID("e1"), + Name: "remove from nil widgets", WS: nil, Expected: nil, ExpectedResult: nil, @@ -222,7 +188,7 @@ func TestWidgets_RemoveAllByExtension(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - assert.Equal(t, tc.ExpectedResult, tc.WS.RemoveAllByExtension(tc.PID, tc.EID)) + assert.Equal(t, tc.ExpectedResult, tc.WS.RemoveAllByPlugin(tc.PID, tc.EID)) assert.Equal(t, tc.Expected, tc.WS) }) } @@ -243,14 +209,14 @@ func TestWidgets_ReplacePlugin(t *testing.T) { Name: "replace a widget", PID: pid, NewID: pid2, - WS: NewWidgets([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}), - Expected: NewWidgets([]*Widget{MustNewWidget(wid, pid2, "eee", pr, true, false)}), + WS: NewWidgets([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}, nil), + Expected: NewWidgets([]*Widget{MustNewWidget(wid, pid2, "eee", pr, true, false)}, nil), }, { Name: "replace with nil widget", PID: pid, - WS: NewWidgets(nil), - Expected: NewWidgets(nil), + WS: NewWidgets(nil, nil), + Expected: NewWidgets(nil, nil), }, { Name: "replace from nil widgets", @@ -286,7 +252,7 @@ func TestWidgets_Properties(t *testing.T) { WS: NewWidgets([]*Widget{ MustNewWidget(wid, pid, "eee", pr, true, false), MustNewWidget(wid2, pid, "eee", pr2, true, false), - }), + }, nil), Expected: []PropertyID{pr, pr2}, }, { @@ -323,7 +289,7 @@ func TestWidgets_Widgets(t *testing.T) { WS: NewWidgets([]*Widget{ MustNewWidget(wid, pid, "eee", pr, true, false), MustNewWidget(wid2, pid, "eee", pr2, true, false), - }), + }, nil), Expected: []*Widget{ MustNewWidget(wid, pid, "eee", pr, true, false), MustNewWidget(wid2, pid, "eee", pr2, true, false), @@ -360,13 +326,13 @@ func TestWidgets_Widget(t *testing.T) { { Name: "get a widget", ID: wid, - WS: NewWidgets([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}), + WS: NewWidgets([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}, nil), Expected: MustNewWidget(wid, pid, "eee", pr, true, false), }, { Name: "dont has the widget", ID: wid, - WS: NewWidgets([]*Widget{}), + WS: NewWidgets([]*Widget{}, nil), Expected: nil, }, { @@ -401,13 +367,13 @@ func TestWidgets_Has(t *testing.T) { { Name: "has a widget", ID: wid, - WS: NewWidgets([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}), + WS: NewWidgets([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}, nil), Expected: true, }, { Name: "dont has a widget", ID: wid, - WS: NewWidgets([]*Widget{}), + WS: NewWidgets([]*Widget{}, nil), Expected: false, }, { From 6084365498b1c717ba1ac30748b657d2f455d2a7 Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Wed, 16 Feb 2022 04:48:28 +0300 Subject: [PATCH 152/253] feat: update infobox style fields (#115) * fix show title * add description for the height * add japanese translation Co-authored-by: HideBa --- pkg/builtin/manifest.yml | 2 ++ pkg/builtin/manifest_ja.yml | 1 + 2 files changed, 3 insertions(+) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index e4bc0784c..c98700081 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -301,6 +301,7 @@ extensions: - id: showTitle type: bool title: Show Title + defaultValue: true - id: position type: string title: Position @@ -338,6 +339,7 @@ extensions: min: 284 max: 2048 suffix: px + description: "This sets the infobox height. Min: 284 Max: 2048" availableIf: field: heightType type: string diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index e0b05e420..99ebb28ef 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -177,6 +177,7 @@ extensions: manual: ๆ‰‹ๅ‹• height: title: ้ซ˜ใ• + description: ใ‚คใƒณใƒ•ใ‚ฉใƒœใƒƒใ‚ฏใ‚นใฎ้ซ˜ใ•ใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ284pxใƒปๆœ€ๅคง2048px infoboxPaddingTop: title: ไฝ™็™ฝไธŠ description: "ไธŠ็ซฏใฎไฝ™็™ฝใ‚’่จญๅฎšใ—ใพใ™ใ€‚ๆœ€ๅฐ0pxใƒปๆœ€ๅคง40px" From 5009c5edaea6b2e5b2ea7e2340551a1222a55910 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 17 Feb 2022 22:08:12 +0900 Subject: [PATCH 153/253] fix: property field in groups in list cannot be updated correctly --- internal/usecase/interactor/property.go | 1 - internal/usecase/interactor/property_test.go | 166 +++++++++++++++++ pkg/property/property.go | 16 +- pkg/property/property_test.go | 177 ++++++++++++++++++- 4 files changed, 354 insertions(+), 6 deletions(-) create mode 100644 internal/usecase/interactor/property_test.go diff --git a/internal/usecase/interactor/property.go b/internal/usecase/interactor/property.go index 3d612a0cb..d2ddbe86b 100644 --- a/internal/usecase/interactor/property.go +++ b/internal/usecase/interactor/property.go @@ -457,7 +457,6 @@ func (i *Property) MoveItem(ctx context.Context, inp interfaces.MovePropertyItem } func (i *Property) RemoveItem(ctx context.Context, inp interfaces.RemovePropertyItemParam, operator *usecase.Operator) (p *property.Property, err error) { - tx, err := i.transaction.Begin() if err != nil { return diff --git a/internal/usecase/interactor/property_test.go b/internal/usecase/interactor/property_test.go new file mode 100644 index 000000000..c479ee53a --- /dev/null +++ b/internal/usecase/interactor/property_test.go @@ -0,0 +1,166 @@ +package interactor + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/stretchr/testify/assert" +) + +func TestProperty_AddItem(t *testing.T) { + ctx := context.Background() + memory := memory.InitRepos(nil) + + team := id.NewTeamID() + scene := scene.New().NewID().Team(team).RootLayer(id.NewLayerID()).MustBuild() + psg := property.NewSchemaGroup().ID("foobar").IsList(true).Fields([]*property.SchemaField{ + property.NewSchemaField().ID("field").Type(property.ValueTypeString).MustBuild(), + }).MustBuild() + ps := property.NewSchema().ID(property.MustSchemaID("xxx~1.1.1/aa")). + Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + psg, + })). + MustBuild() + p := property.New().NewID().Scene(scene.ID()).Schema(ps.ID()).MustBuild() + _ = memory.Scene.Save(ctx, scene) + _ = memory.PropertySchema.Save(ctx, ps) + _ = memory.Property.Save(ctx, p) + + uc := &Property{ + commonScene: commonScene{sceneRepo: memory.Scene}, + commonSceneLock: commonSceneLock{sceneLockRepo: memory.SceneLock}, + propertyRepo: memory.Property, + propertySchemaRepo: memory.PropertySchema, + transaction: memory.Transaction, + } + op := &usecase.Operator{ + ReadableTeams: []id.TeamID{team}, + WritableTeams: []id.TeamID{team}, + } + + index := -1 + np, npl, npg, err := uc.AddItem(ctx, interfaces.AddPropertyItemParam{ + PropertyID: p.ID(), + Index: &index, + Pointer: property.PointItemBySchema(psg.ID()), + }, op) + assert.NoError(t, err) + assert.NotNil(t, np) + assert.NotNil(t, npl) + assert.NotNil(t, npg) + assert.Equal(t, p.ID(), np.ID()) + assert.Equal(t, psg.ID(), npl.SchemaGroup()) + assert.Equal(t, psg.ID(), npg.SchemaGroup()) + + assert.Same(t, npl, property.ToGroupList(np.ItemBySchema(psg.ID()))) + assert.Equal(t, npg, npl.GroupAt(0)) + assert.Equal(t, 1, len(npl.Groups())) + + np2, _ := memory.Property.FindByID(ctx, p.ID(), nil) + assert.Equal(t, np, np2) +} + +func TestProperty_RemoveItem(t *testing.T) { + ctx := context.Background() + memory := memory.InitRepos(nil) + + team := id.NewTeamID() + scene := scene.New().NewID().Team(team).RootLayer(id.NewLayerID()).MustBuild() + psg := property.NewSchemaGroup().ID("foobar").IsList(true).MustBuild() + ps := property.NewSchema().ID(property.MustSchemaID("xxx~1.1.1/aa")). + Groups(property.NewSchemaGroupList([]*property.SchemaGroup{ + psg, + })). + MustBuild() + pg := property.NewGroup().NewID().SchemaGroup(psg.ID()).MustBuild() + pl := property.NewGroupList().NewID().SchemaGroup(psg.ID()).Groups([]*property.Group{pg}).MustBuild() + p := property.New().NewID().Scene(scene.ID()).Schema(ps.ID()).Items([]property.Item{pl}).MustBuild() + _ = memory.Scene.Save(ctx, scene) + _ = memory.PropertySchema.Save(ctx, ps) + _ = memory.Property.Save(ctx, p) + + uc := &Property{ + commonScene: commonScene{sceneRepo: memory.Scene}, + commonSceneLock: commonSceneLock{sceneLockRepo: memory.SceneLock}, + propertyRepo: memory.Property, + propertySchemaRepo: memory.PropertySchema, + transaction: memory.Transaction, + } + op := &usecase.Operator{ + ReadableTeams: []id.TeamID{team}, + WritableTeams: []id.TeamID{team}, + } + + np, err := uc.RemoveItem(ctx, interfaces.RemovePropertyItemParam{ + PropertyID: p.ID(), + Pointer: property.NewPointer(psg.IDRef(), pg.IDRef(), nil), + }, op) + assert.NoError(t, err) + assert.NotNil(t, np) + assert.Equal(t, p.ID(), np.ID()) + + npl := property.ToGroupList(np.ItemBySchema(psg.ID())) + assert.Equal(t, 0, len(npl.Groups())) + + np2, _ := memory.Property.FindByID(ctx, p.ID(), nil) + assert.Equal(t, np, np2) +} + +func TestProperty_UpdateValue_FieldOfGroupInList(t *testing.T) { + ctx := context.Background() + memory := memory.InitRepos(nil) + + team := id.NewTeamID() + scene := scene.New().NewID().Team(team).RootLayer(id.NewLayerID()).MustBuild() + psf := property.NewSchemaField().ID("field").Type(property.ValueTypeString).MustBuild() + psg := property.NewSchemaGroup().ID("foobar").IsList(true).Fields([]*property.SchemaField{psf}).MustBuild() + ps := property.NewSchema().ID(property.MustSchemaID("xxx~1.1.1/aa")). + Groups(property.NewSchemaGroupList([]*property.SchemaGroup{psg})). + MustBuild() + pg := property.NewGroup().NewID().SchemaGroup(psg.ID()).MustBuild() + pl := property.NewGroupList().NewID().SchemaGroup(psg.ID()).Groups([]*property.Group{pg}).MustBuild() + p := property.New().NewID().Scene(scene.ID()).Schema(ps.ID()).Items([]property.Item{pl}).MustBuild() + _ = memory.Scene.Save(ctx, scene) + _ = memory.PropertySchema.Save(ctx, ps) + _ = memory.Property.Save(ctx, p) + + uc := &Property{ + commonScene: commonScene{sceneRepo: memory.Scene}, + commonSceneLock: commonSceneLock{sceneLockRepo: memory.SceneLock}, + propertyRepo: memory.Property, + propertySchemaRepo: memory.PropertySchema, + transaction: memory.Transaction, + } + op := &usecase.Operator{ + ReadableTeams: []id.TeamID{team}, + WritableTeams: []id.TeamID{team}, + } + + np, npl, npg, npf, err := uc.UpdateValue(ctx, interfaces.UpdatePropertyValueParam{ + PropertyID: p.ID(), + Pointer: property.PointField(psg.IDRef(), pg.IDRef(), psf.ID()), + Value: property.ValueTypeString.ValueFrom("aaaa"), + }, op) + + assert.NoError(t, err) + assert.NotNil(t, np) + assert.NotNil(t, npl) + assert.NotNil(t, npg) + assert.NotNil(t, npf) + assert.Equal(t, p.ID(), np.ID()) + assert.Equal(t, pl.ID(), npl.ID()) + assert.Equal(t, []*property.Group{pg}, npl.Groups()) + assert.Equal(t, pg.ID(), npg.ID()) + assert.Same(t, npf, npg.Field(psf.ID())) + assert.Equal(t, psf.ID(), npf.Field()) + assert.Equal(t, property.ValueTypeString.ValueFrom("aaaa"), npf.Value()) + + np2, _ := memory.Property.FindByID(ctx, p.ID(), nil) + assert.Equal(t, np, np2) +} diff --git a/pkg/property/property.go b/pkg/property/property.go index 1da56fe34..c72f6c5aa 100644 --- a/pkg/property/property.go +++ b/pkg/property/property.go @@ -75,7 +75,7 @@ func (p *Property) GroupAndList(ptr *Pointer) (*Group, *GroupList) { } for _, i := range p.items { - if ptr.TestItem(i.SchemaGroup(), i.ID()) { + if ptr.TestSchemaGroup(i.SchemaGroup()) { if gl := ToGroupList(i); gl != nil { return gl.GroupByPointer(ptr), gl } else if g := ToGroup(i); g != nil { @@ -234,6 +234,14 @@ func (p *Property) Datasets() []DatasetID { return res } +func (p *Property) AddItem(i Item) bool { + if p == nil || p.ItemBySchema(i.SchemaGroup()) != nil || p.Item(PointItem(i.ID())) != nil { + return false + } + p.items = append(p.items, i) + return true +} + func (p *Property) RemoveItem(ptr *Pointer) { if p == nil || ptr == nil { return @@ -283,6 +291,7 @@ func (p *Property) UpdateValue(ps *Schema, ptr *Pointer, v *Value) (*Field, *Gro field, gl, g, created := p.GetOrCreateField(ps, ptr) if field == nil || created && v == nil { // The field is empty and will be removed by prune, so it does not make sense + // p.Prune() return nil, nil, nil, nil } @@ -351,7 +360,7 @@ func (p *Property) GetOrCreateItem(ps *Schema, ptr *Pointer) (Item, *GroupList) ni := InitItemFrom(psg) if ni != nil { - p.items = append(p.items, ni) + _ = p.AddItem(ni) } return ni, nil // root item @@ -395,8 +404,7 @@ func (p *Property) GetOrCreateRootGroup(ptr *Pointer) (*Group, bool) { return nil, false } - p.items = append(p.items, ng) - return ng, true + return ng, p.AddItem(ng) } func (p *Property) GetOrCreateGroupList(ps *Schema, ptr *Pointer) *GroupList { diff --git a/pkg/property/property_test.go b/pkg/property/property_test.go index 9d2fbc410..aaca5c8dd 100644 --- a/pkg/property/property_test.go +++ b/pkg/property/property_test.go @@ -274,7 +274,7 @@ func TestRemoveListItem(t *testing.T) { assert.Equal(t, []*Group{g2}, gl.Groups()) assert.Equal(t, 1, len(p.Items())) - ok = p.RemoveListItem(PointItem(g2.ID())) + ok = p.RemoveListItem(NewPointer(sgid.Ref(), g2.IDRef(), nil)) assert.True(t, ok) assert.Equal(t, []*Group{}, gl.Groups()) assert.Equal(t, 0, len(p.Items())) @@ -673,3 +673,178 @@ func TestProperty_MoveFields(t *testing.T) { }) } } + +func TestProperty_GroupAndList(t *testing.T) { + type args struct { + ptr *Pointer + } + + pgid1 := NewItemID() + pgid2 := NewItemID() + + tests := []struct { + name string + target *Property + args args + want *Group + want1 *GroupList + }{ + { + name: "found", + target: &Property{ + items: []Item{ + &GroupList{ + itemBase: itemBase{ + ID: pgid1, + SchemaGroup: SchemaGroupID("aaaa"), + }, + groups: []*Group{ + { + itemBase: itemBase{ + ID: pgid2, + SchemaGroup: SchemaGroupID("aaaa"), + }, + }, + }, + }, + }, + }, + args: args{ + ptr: &Pointer{ + schemaGroup: SchemaGroupID("aaaa").Ref(), + item: pgid2.Ref(), + field: nil, + }, + }, + want: &Group{ + itemBase: itemBase{ + ID: pgid2, + SchemaGroup: SchemaGroupID("aaaa"), + }, + }, + want1: &GroupList{ + itemBase: itemBase{ + ID: pgid1, + SchemaGroup: SchemaGroupID("aaaa"), + }, + groups: []*Group{ + { + itemBase: itemBase{ + ID: pgid2, + SchemaGroup: SchemaGroupID("aaaa"), + }, + }, + }, + }, + }, + { + name: "list only", + target: &Property{ + items: []Item{ + &GroupList{ + itemBase: itemBase{ + ID: pgid1, + SchemaGroup: SchemaGroupID("aaaa"), + }, + groups: []*Group{ + { + itemBase: itemBase{ + ID: pgid2, + SchemaGroup: SchemaGroupID("aaaa"), + }, + }, + }, + }, + }, + }, + args: args{ + ptr: &Pointer{ + schemaGroup: SchemaGroupID("aaaa").Ref(), + item: pgid1.Ref(), + field: nil, + }, + }, + want: nil, + want1: &GroupList{ + itemBase: itemBase{ + ID: pgid1, + SchemaGroup: SchemaGroupID("aaaa"), + }, + groups: []*Group{ + { + itemBase: itemBase{ + ID: pgid2, + SchemaGroup: SchemaGroupID("aaaa"), + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + got, got1 := tt.target.GroupAndList(tt.args.ptr) + assert.Equal(t, tt.want, got) + assert.Equal(t, tt.want1, got1) + }) + } +} + +func TestProperty_AddItem(t *testing.T) { + type args struct { + i Item + } + + iid := NewItemID() + + tests := []struct { + name string + target *Property + args args + want bool + wantItems []Item + }{ + { + name: "ok", + target: &Property{}, + args: args{i: &Group{}}, + want: true, + wantItems: []Item{&Group{}}, + }, + { + name: "schema group duplicated", + target: &Property{items: []Item{&Group{itemBase: itemBase{SchemaGroup: "a"}}}}, + args: args{i: &Group{itemBase: itemBase{SchemaGroup: "a"}}}, + want: false, + wantItems: []Item{&Group{itemBase: itemBase{SchemaGroup: "a"}}}, + }, + { + name: "id duplicated", + target: &Property{items: []Item{&Group{itemBase: itemBase{ID: iid}}}}, + args: args{i: &Group{itemBase: itemBase{ID: iid}}}, + want: false, + wantItems: []Item{&Group{itemBase: itemBase{ID: iid}}}, + }, + { + name: "nil", + target: nil, + args: args{i: &Group{}}, + want: false, + wantItems: nil, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tt.want, tt.target.AddItem(tt.args.i)) + if tt.target != nil { + assert.Equal(t, tt.wantItems, tt.target.items) + } + }) + } +} From 861c4bb02ce392788d7085d623c96bc1a4952e31 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 18 Feb 2022 16:08:25 +0900 Subject: [PATCH 154/253] fix: scenes and properties are not updated properly when plugin is updated --- internal/usecase/interactor/plugin_upload.go | 39 ++++++++-------- .../usecase/interactor/plugin_upload_test.go | 18 +++++++- pkg/scene/widgets_test.go | 29 ++++++------ pkg/value/bool.go | 8 ++-- pkg/value/coordinates.go | 4 +- pkg/value/latlng.go | 6 +-- pkg/value/latlngheight.go | 6 +-- pkg/value/number.go | 46 +++++++++++-------- pkg/value/polygon.go | 4 +- pkg/value/rect.go | 4 +- pkg/value/string.go | 12 +++-- pkg/value/url.go | 13 ++---- 12 files changed, 105 insertions(+), 84 deletions(-) diff --git a/internal/usecase/interactor/plugin_upload.go b/internal/usecase/interactor/plugin_upload.go index 4101a47f0..96d5a7fdc 100644 --- a/internal/usecase/interactor/plugin_upload.go +++ b/internal/usecase/interactor/plugin_upload.go @@ -90,14 +90,20 @@ func (i *Plugin) upload(ctx context.Context, p *pluginpack.Package, sid id.Scene } }() + var oldPManifest *manifest.Manifest newpid := p.Manifest.Plugin.ID() oldpid := s.Plugins().PluginByName(newpid.Name()).PluginRef() - var oldp *plugin.Plugin if oldpid != nil { - oldp, err = i.pluginRepo.FindByID(ctx, *oldpid, []id.SceneID{sid}) + oldPlugin, err := i.pluginRepo.FindByID(ctx, *oldpid, []id.SceneID{sid}) if err != nil { return nil, nil, err } + + oldPManifest2, err := i.pluginManifestFromPlugin(ctx, oldPlugin) + if err != nil { + return nil, nil, err + } + oldPManifest = &oldPManifest2 } // new (oldpid == nil): upload files, save plugin and properties -> install @@ -136,19 +142,19 @@ func (i *Plugin) upload(ctx context.Context, p *pluginpack.Package, sid id.Scene return nil, nil, err } - if oldpid == nil { + if oldPManifest == nil { // new: install plugin if err := i.installScenePlugin(ctx, p, s); err != nil { return nil, nil, err } } else { // same, diff: migrate - if err := i.migrateScenePlugin(ctx, p, s, oldp); err != nil { + if err := i.migrateScenePlugin(ctx, *oldPManifest, p, s); err != nil { return nil, nil, err } } - if oldpid != nil && !oldpid.Equal(newpid) { + if oldpid != nil && oldPManifest != nil && !oldpid.Equal(newpid) { // diff only: delete old files if err := i.file.RemovePlugin(ctx, *oldpid); err != nil { return nil, nil, err @@ -159,7 +165,7 @@ func (i *Plugin) upload(ctx context.Context, p *pluginpack.Package, sid id.Scene if err := i.pluginRepo.Remove(ctx, *oldpid); err != nil { return nil, nil, err } - if ps := oldp.PropertySchemas(); len(ps) > 0 { + if ps := oldPManifest.Plugin.PropertySchemas(); len(ps) > 0 { if err := i.propertySchemaRepo.RemoveAll(ctx, ps); err != nil { return nil, nil, err } @@ -195,17 +201,12 @@ func (i *Plugin) installScenePlugin(ctx context.Context, p *pluginpack.Package, return nil } -func (i *Plugin) migrateScenePlugin(ctx context.Context, p *pluginpack.Package, s *scene.Scene, oldp *plugin.Plugin) (err error) { - if oldp == nil || p.Manifest == nil { +func (i *Plugin) migrateScenePlugin(ctx context.Context, oldm manifest.Manifest, p *pluginpack.Package, s *scene.Scene) (err error) { + if oldm.Plugin == nil || p.Manifest == nil { return nil } - oldPManifest, err := i.pluginManifestFromPlugin(ctx, oldp) - if err != nil { - return err - } - - diff := manifest.DiffFrom(oldPManifest, *p.Manifest) + diff := manifest.DiffFrom(oldm, *p.Manifest) updatedProperties := property.List{} // update scene @@ -220,8 +221,7 @@ func (i *Plugin) migrateScenePlugin(ctx context.Context, p *pluginpack.Package, updatedProperties = append(updatedProperties, p) } - sp := s.Plugins().Plugin(diff.From) - if sp != nil && sp.Property() != nil && diff.PropertySchemaDeleted { + if sp := s.Plugins().Plugin(diff.From); sp != nil && sp.Property() != nil && diff.PropertySchemaDeleted { // plugin property should be removed if err := i.propertyRepo.Remove(ctx, *sp.Property()); err != nil { return err @@ -229,9 +229,6 @@ func (i *Plugin) migrateScenePlugin(ctx context.Context, p *pluginpack.Package, } s.Plugins().Upgrade(diff.From, diff.To, spp, diff.PropertySchemaDeleted) - if err := i.sceneRepo.Save(ctx, s); err != nil { - return err - } // delete layers, blocks and widgets for _, e := range diff.DeletedExtensions { @@ -255,6 +252,10 @@ func (i *Plugin) migrateScenePlugin(ctx context.Context, p *pluginpack.Package, } } + if err := i.sceneRepo.Save(ctx, s); err != nil { + return err + } + // migrate layers if err := i.layerRepo.UpdatePlugin(ctx, diff.From, diff.To, []id.SceneID{s.ID()}); err != nil { return err diff --git a/internal/usecase/interactor/plugin_upload_test.go b/internal/usecase/interactor/plugin_upload_test.go index fdde61001..4f5414622 100644 --- a/internal/usecase/interactor/plugin_upload_test.go +++ b/internal/usecase/interactor/plugin_upload_test.go @@ -137,6 +137,8 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { sid := id.NewSceneID() pid := mockPluginID.WithScene(sid.Ref()) eid := id.PluginExtensionID("marker") + eid2 := id.PluginExtensionID("widget") + wid := id.NewWidgetID() repos := memory.InitRepos(nil) mfs := mockFS(map[string]string{ @@ -146,16 +148,21 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { assert.NoError(t, err) ps := property.NewSchema().ID(property.NewSchemaID(pid, eid.String())).MustBuild() + ps2 := property.NewSchema().ID(property.NewSchemaID(pid, eid2.String())).MustBuild() pl := plugin.New().ID(pid).Extensions([]*plugin.Extension{ plugin.NewExtension().ID(eid).Type(plugin.ExtensionTypePrimitive).Schema(ps.ID()).MustBuild(), + plugin.NewExtension().ID(eid2).Type(plugin.ExtensionTypeWidget).Schema(ps2.ID()).MustBuild(), }).MustBuild() p := property.New().NewID().Schema(ps.ID()).Scene(sid).MustBuild() + p2 := property.New().NewID().Schema(ps2.ID()).Scene(sid).MustBuild() pluginLayer := layer.NewItem().NewID().Scene(sid).Plugin(pid.Ref()).Extension(eid.Ref()).Property(p.IDRef()).MustBuild() rootLayer := layer.NewGroup().NewID().Scene(sid).Layers(layer.NewIDList([]layer.ID{pluginLayer.ID()})).Root(true).MustBuild() scene := scene.New().ID(sid).Team(team).RootLayer(rootLayer.ID()).Plugins(scene.NewPlugins([]*scene.Plugin{ scene.NewPlugin(pid, nil), - })).MustBuild() + })).Widgets(scene.NewWidgets([]*scene.Widget{ + scene.MustNewWidget(wid, pid, eid2, p2.ID(), false, false), + }, nil)).MustBuild() _ = repos.PropertySchema.Save(ctx, ps) _ = repos.Plugin.Save(ctx, pl) @@ -188,6 +195,11 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { nscene, err := repos.Scene.FindByID(ctx, scene.ID(), nil) assert.NoError(t, err) assert.True(t, nscene.Plugins().HasPlugin(pl.ID())) + assert.Nil(t, nscene.Widgets().Widget(wid)) + + nlp2, err := repos.Property.FindByID(ctx, p.ID(), nil) + assert.Nil(t, nlp2) // deleted + assert.Equal(t, rerror.ErrNotFound, err) // plugin npl, err := repos.Plugin.FindByID(ctx, pid, []id.SceneID{scene.ID()}) @@ -198,6 +210,10 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { assert.Nil(t, nlps) // deleted assert.Equal(t, rerror.ErrNotFound, err) + nlps2, err := repos.PropertySchema.FindByID(ctx, ps2.ID()) + assert.Nil(t, nlps2) // deleted + assert.Equal(t, rerror.ErrNotFound, err) + _, err = mfs.Open("plugins/" + pid.String() + "/hogehoge") assert.True(t, os.IsNotExist(err)) // deleted diff --git a/pkg/scene/widgets_test.go b/pkg/scene/widgets_test.go index cd283f670..83411a165 100644 --- a/pkg/scene/widgets_test.go +++ b/pkg/scene/widgets_test.go @@ -155,30 +155,31 @@ func TestWidgets_RemoveAllByPlugin(t *testing.T) { w3 := MustNewWidget(NewWidgetID(), pid2, "e1", NewPropertyID(), true, false) tests := []struct { - Name string - PID PluginID - EID *PluginExtensionID - WS, Expected *Widgets - ExpectedResult []PropertyID + Name string + ArgsPID PluginID + ArgsEID *PluginExtensionID + Target, Expected *Widgets + ExpectedResult []PropertyID }{ { Name: "remove widgets", - PID: pid, - WS: NewWidgets([]*Widget{w1, w2, w3}, nil), + ArgsPID: pid, + ArgsEID: nil, + Target: NewWidgets([]*Widget{w1, w2, w3}, nil), Expected: NewWidgets([]*Widget{w3}, nil), ExpectedResult: []PropertyID{w1.Property(), w2.Property()}, }, { - Name: "remove widgets", - PID: pid, - EID: PluginExtensionID("e2").Ref(), - WS: NewWidgets([]*Widget{w1, w2, w3}, nil), + Name: "remove widgets of extension", + ArgsPID: pid, + ArgsEID: PluginExtensionID("e2").Ref(), + Target: NewWidgets([]*Widget{w1, w2, w3}, nil), Expected: NewWidgets([]*Widget{w1, w3}, nil), ExpectedResult: []PropertyID{w2.Property()}, }, { Name: "remove from nil widgets", - WS: nil, + Target: nil, Expected: nil, ExpectedResult: nil, }, @@ -188,8 +189,8 @@ func TestWidgets_RemoveAllByPlugin(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - assert.Equal(t, tc.ExpectedResult, tc.WS.RemoveAllByPlugin(tc.PID, tc.EID)) - assert.Equal(t, tc.Expected, tc.WS) + assert.Equal(t, tc.ExpectedResult, tc.Target.RemoveAllByPlugin(tc.ArgsPID, tc.ArgsEID)) + assert.Equal(t, tc.Expected, tc.Target) }) } } diff --git a/pkg/value/bool.go b/pkg/value/bool.go index 70a31278f..68f172f32 100644 --- a/pkg/value/bool.go +++ b/pkg/value/bool.go @@ -6,7 +6,7 @@ var TypeBool Type = "bool" type propertyBool struct{} -func (*propertyBool) I2V(i interface{}) (interface{}, bool) { +func (p *propertyBool) I2V(i interface{}) (interface{}, bool) { switch v := i.(type) { case bool: return v, true @@ -16,13 +16,11 @@ func (*propertyBool) I2V(i interface{}) (interface{}, bool) { } case *bool: if v != nil { - return *v, true + return p.I2V(*v) } case *string: if v != nil { - if b, err := strconv.ParseBool(*v); err == nil { - return b, true - } + return p.I2V(*v) } } return nil, false diff --git a/pkg/value/coordinates.go b/pkg/value/coordinates.go index 402889697..06ade2678 100644 --- a/pkg/value/coordinates.go +++ b/pkg/value/coordinates.go @@ -32,12 +32,12 @@ var TypeCoordinates Type = "coordinates" type propertyCoordinates struct{} -func (*propertyCoordinates) I2V(i interface{}) (interface{}, bool) { +func (p *propertyCoordinates) I2V(i interface{}) (interface{}, bool) { if v, ok := i.(Coordinates); ok { return v, true } else if v, ok := i.(*Coordinates); ok { if v != nil { - return *v, true + return p.I2V(*v) } return nil, false } else if v2, ok := i.([]float64); ok { diff --git a/pkg/value/latlng.go b/pkg/value/latlng.go index 2bf97ab9d..f824df3fd 100644 --- a/pkg/value/latlng.go +++ b/pkg/value/latlng.go @@ -21,7 +21,7 @@ var TypeLatLng Type = "latlng" type propertyLatLng struct{} -func (*propertyLatLng) I2V(i interface{}) (interface{}, bool) { +func (p *propertyLatLng) I2V(i interface{}) (interface{}, bool) { switch v := i.(type) { case LatLng: return v, true @@ -29,11 +29,11 @@ func (*propertyLatLng) I2V(i interface{}) (interface{}, bool) { return LatLng{Lat: v.Lat, Lng: v.Lng}, true case *LatLng: if v != nil { - return *v, true + return p.I2V(*v) } case *LatLngHeight: if v != nil { - return LatLng{Lat: v.Lat, Lng: v.Lng}, true + return p.I2V(*v) } } diff --git a/pkg/value/latlngheight.go b/pkg/value/latlngheight.go index f2120899d..9dead9880 100644 --- a/pkg/value/latlngheight.go +++ b/pkg/value/latlngheight.go @@ -23,7 +23,7 @@ var TypeLatLngHeight Type = "latlngheight" type propertyLatLngHeight struct{} -func (*propertyLatLngHeight) I2V(i interface{}) (interface{}, bool) { +func (p *propertyLatLngHeight) I2V(i interface{}) (interface{}, bool) { switch v := i.(type) { case LatLngHeight: return v, true @@ -31,11 +31,11 @@ func (*propertyLatLngHeight) I2V(i interface{}) (interface{}, bool) { return LatLngHeight{Lat: v.Lat, Lng: v.Lng, Height: 0}, true case *LatLngHeight: if v != nil { - return *v, true + return p.I2V(*v) } case *LatLng: if v != nil { - return LatLngHeight{Lat: v.Lat, Lng: v.Lng, Height: 0}, true + return p.I2V(*v) } } diff --git a/pkg/value/number.go b/pkg/value/number.go index 53ce8bfeb..5ac456966 100644 --- a/pkg/value/number.go +++ b/pkg/value/number.go @@ -9,7 +9,7 @@ var TypeNumber Type = "number" type propertyNumber struct{} -func (*propertyNumber) I2V(i interface{}) (interface{}, bool) { +func (p *propertyNumber) I2V(i interface{}) (interface{}, bool) { switch v := i.(type) { case float64: return v, true @@ -45,69 +45,75 @@ func (*propertyNumber) I2V(i interface{}) (interface{}, bool) { if vfloat64, err := strconv.ParseFloat(v, 64); err == nil { return vfloat64, true } + case bool: + if v { + return float64(1), true + } else { + return float64(0), true + } case *float64: if v != nil { - return *v, true + return p.I2V(*v) } case *float32: if v != nil { - return float64(*v), true + return p.I2V(*v) } case *int: if v != nil { - return float64(*v), true + return p.I2V(*v) } case *int8: if v != nil { - return float64(*v), true + return p.I2V(*v) } case *int16: if v != nil { - return float64(*v), true + return p.I2V(*v) } case *int32: if v != nil { - return float64(*v), true + return p.I2V(*v) } case *int64: if v != nil { - return float64(*v), true + return p.I2V(*v) } case *uint: if v != nil { - return float64(*v), true + return p.I2V(*v) } case *uint8: if v != nil { - return float64(*v), true + return p.I2V(*v) } case *uint16: if v != nil { - return float64(*v), true + return p.I2V(*v) } case *uint32: if v != nil { - return float64(*v), true + return p.I2V(*v) } case *uint64: if v != nil { - return float64(*v), true + return p.I2V(*v) } case *uintptr: if v != nil { - return float64(*v), true + return p.I2V(*v) } case *json.Number: if v != nil { - if f, err := v.Float64(); err == nil { - return f, true - } + return p.I2V(*v) } case *string: if v != nil { - if vfloat64, err := strconv.ParseFloat(*v, 64); err == nil { - return vfloat64, true - } + return p.I2V(*v) + } + case *bool: + if v != nil { + return p.I2V(*v) } } return nil, false diff --git a/pkg/value/polygon.go b/pkg/value/polygon.go index 4dccecfdf..2e1e7a0db 100644 --- a/pkg/value/polygon.go +++ b/pkg/value/polygon.go @@ -16,14 +16,14 @@ func PolygonFrom(rings [][]float64) Polygon { type propertyPolygon struct{} -func (*propertyPolygon) I2V(i interface{}) (interface{}, bool) { +func (p *propertyPolygon) I2V(i interface{}) (interface{}, bool) { if v, ok := i.(Polygon); ok { return v, true } if v, ok := i.(*Polygon); ok { if v != nil { - return *v, true + return p.I2V(*v) } return nil, false } diff --git a/pkg/value/rect.go b/pkg/value/rect.go index 90caf01df..33d5c92de 100644 --- a/pkg/value/rect.go +++ b/pkg/value/rect.go @@ -13,12 +13,12 @@ type Rect struct { type propertyRect struct{} -func (*propertyRect) I2V(i interface{}) (interface{}, bool) { +func (p *propertyRect) I2V(i interface{}) (interface{}, bool) { if v, ok := i.(Rect); ok { return v, true } else if v, ok := i.(*Rect); ok { if v != nil { - return *v, true + return p.I2V(*v) } return nil, false } diff --git a/pkg/value/string.go b/pkg/value/string.go index 03bcdd943..e5a7fb9ed 100644 --- a/pkg/value/string.go +++ b/pkg/value/string.go @@ -9,15 +9,19 @@ var TypeString Type = "string" type propertyString struct{} -func (*propertyString) I2V(i interface{}) (interface{}, bool) { +func (p *propertyString) I2V(i interface{}) (interface{}, bool) { if v, ok := i.(string); ok { return v, true - } else if v, ok := i.(*string); ok && v != nil { - return *v, true } else if v, ok := i.(float64); ok { return strconv.FormatFloat(v, 'f', -1, 64), true + } else if v, ok := i.(bool); ok && v { + return "true", true + } else if v, ok := i.(*string); ok && v != nil { + return p.I2V(*v) } else if v, ok := i.(*float64); ok && v != nil { - return strconv.FormatFloat(*v, 'f', -1, 64), true + return p.I2V(*v) + } else if v, ok := i.(*bool); ok && v != nil { + return p.I2V(*v) } else if v, ok := i.(fmt.Stringer); ok && v != nil { return v.String(), true } diff --git a/pkg/value/url.go b/pkg/value/url.go index 07f2a5c64..c64df75df 100644 --- a/pkg/value/url.go +++ b/pkg/value/url.go @@ -6,16 +6,13 @@ var TypeURL Type = "url" type propertyURL struct{} -func (*propertyURL) I2V(i interface{}) (interface{}, bool) { +func (p *propertyURL) I2V(i interface{}) (interface{}, bool) { if v, ok := i.(url.URL); ok { return &v, true } - if v, ok := i.(*url.URL); ok { - if v == nil { - return nil, false - } - return v, true + if v, ok := i.(*url.URL); ok && v != nil { + return p.I2V(*v) // clone URL } if v, ok := i.(string); ok { @@ -25,9 +22,7 @@ func (*propertyURL) I2V(i interface{}) (interface{}, bool) { } if v, ok := i.(*string); ok && v != nil { - if u, err := url.Parse(*v); err == nil { - return u, true - } + return p.I2V(*v) } return nil, false From f66f9a3045d3dc2b2b4336c1e7b9783a89250894 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 18 Feb 2022 19:34:27 +0900 Subject: [PATCH 155/253] fix: scene widgets and blocks are not update properly when plugin is updated --- internal/infrastructure/mongo/property.go | 4 +- internal/usecase/interactor/plugin_upload.go | 1 + .../usecase/interactor/plugin_upload_test.go | 78 ++++++++++++------- internal/usecase/interactor/scene.go | 2 +- pkg/property/schema_list.go | 2 +- pkg/scene/builder/builder_test.go | 4 +- pkg/scene/builder_test.go | 6 +- pkg/scene/scene_test.go | 2 +- pkg/scene/sceneops/plugin_migrator.go | 2 +- pkg/scene/widget.go | 9 ++- pkg/scene/widget_test.go | 10 +-- pkg/scene/widgets.go | 6 +- pkg/scene/widgets_test.go | 58 +++++++------- 13 files changed, 109 insertions(+), 75 deletions(-) diff --git a/internal/infrastructure/mongo/property.go b/internal/infrastructure/mongo/property.go index dde0f6482..20cf68f3a 100644 --- a/internal/infrastructure/mongo/property.go +++ b/internal/infrastructure/mongo/property.go @@ -118,10 +118,10 @@ func (r *propertyRepo) SaveAll(ctx context.Context, properties property.List) er func (r *propertyRepo) UpdateSchemaPlugin(ctx context.Context, old, new id.PluginID, s id.SceneID) error { return r.client.UpdateMany(ctx, bson.M{ - "schemaplugin": old, + "schemaplugin": old.String(), "scene": s.String(), }, bson.M{ - "schemaplugin": new, + "schemaplugin": new.String(), }) } diff --git a/internal/usecase/interactor/plugin_upload.go b/internal/usecase/interactor/plugin_upload.go index 96d5a7fdc..b6b2e0cf4 100644 --- a/internal/usecase/interactor/plugin_upload.go +++ b/internal/usecase/interactor/plugin_upload.go @@ -228,6 +228,7 @@ func (i *Plugin) migrateScenePlugin(ctx context.Context, oldm manifest.Manifest, } } + s.Widgets().UpgradePlugin(diff.From, diff.To) s.Plugins().Upgrade(diff.From, diff.To, spp, diff.PropertySchemaDeleted) // delete layers, blocks and widgets diff --git a/internal/usecase/interactor/plugin_upload_test.go b/internal/usecase/interactor/plugin_upload_test.go index 4f5414622..e83ec65b4 100644 --- a/internal/usecase/interactor/plugin_upload_test.go +++ b/internal/usecase/interactor/plugin_upload_test.go @@ -42,6 +42,11 @@ const mockPluginManifest = `{ } ] } + }, + { + "id": "widget", + "type": "widget", + "schema": {} } ] }` @@ -127,6 +132,9 @@ func TestPlugin_Upload_New(t *testing.T) { assert.Equal(t, "// barfoo", string(npfc)) } +// The plugin and its files should be replaced with the new one (old files are deleted) +// Properties that schema is changed should be migrated +// Layers, widgets, blocks, properties, and property schemas that extension is deleted should deleted func TestPlugin_Upload_SameVersion(t *testing.T) { // upgrade plugin to the same version // 1 extension is deleted -> property schema, layers, and properties of the extension should be deleted @@ -136,9 +144,9 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { team := id.NewTeamID() sid := id.NewSceneID() pid := mockPluginID.WithScene(sid.Ref()) - eid := id.PluginExtensionID("marker") - eid2 := id.PluginExtensionID("widget") - wid := id.NewWidgetID() + eid1 := id.PluginExtensionID("marker") + eid2 := id.PluginExtensionID("widget2") + wid1 := id.NewWidgetID() repos := memory.InitRepos(nil) mfs := mockFS(map[string]string{ @@ -147,26 +155,26 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { files, err := fs.NewFile(mfs, "") assert.NoError(t, err) - ps := property.NewSchema().ID(property.NewSchemaID(pid, eid.String())).MustBuild() + ps := property.NewSchema().ID(property.NewSchemaID(pid, eid1.String())).MustBuild() ps2 := property.NewSchema().ID(property.NewSchemaID(pid, eid2.String())).MustBuild() pl := plugin.New().ID(pid).Extensions([]*plugin.Extension{ - plugin.NewExtension().ID(eid).Type(plugin.ExtensionTypePrimitive).Schema(ps.ID()).MustBuild(), + plugin.NewExtension().ID(eid1).Type(plugin.ExtensionTypePrimitive).Schema(ps.ID()).MustBuild(), plugin.NewExtension().ID(eid2).Type(plugin.ExtensionTypeWidget).Schema(ps2.ID()).MustBuild(), }).MustBuild() - p := property.New().NewID().Schema(ps.ID()).Scene(sid).MustBuild() + p1 := property.New().NewID().Schema(ps.ID()).Scene(sid).MustBuild() p2 := property.New().NewID().Schema(ps2.ID()).Scene(sid).MustBuild() - pluginLayer := layer.NewItem().NewID().Scene(sid).Plugin(pid.Ref()).Extension(eid.Ref()).Property(p.IDRef()).MustBuild() + pluginLayer := layer.NewItem().NewID().Scene(sid).Plugin(pid.Ref()).Extension(eid1.Ref()).Property(p1.IDRef()).MustBuild() rootLayer := layer.NewGroup().NewID().Scene(sid).Layers(layer.NewIDList([]layer.ID{pluginLayer.ID()})).Root(true).MustBuild() scene := scene.New().ID(sid).Team(team).RootLayer(rootLayer.ID()).Plugins(scene.NewPlugins([]*scene.Plugin{ scene.NewPlugin(pid, nil), })).Widgets(scene.NewWidgets([]*scene.Widget{ - scene.MustNewWidget(wid, pid, eid2, p2.ID(), false, false), + scene.MustWidget(wid1, pid, eid2, p2.ID(), false, false), }, nil)).MustBuild() _ = repos.PropertySchema.Save(ctx, ps) _ = repos.Plugin.Save(ctx, pl) - _ = repos.Property.Save(ctx, p) + _ = repos.Property.Save(ctx, p1) _ = repos.Layer.SaveAll(ctx, layer.List{pluginLayer.LayerRef(), rootLayer.LayerRef()}) _ = repos.Scene.Save(ctx, scene) @@ -195,9 +203,9 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { nscene, err := repos.Scene.FindByID(ctx, scene.ID(), nil) assert.NoError(t, err) assert.True(t, nscene.Plugins().HasPlugin(pl.ID())) - assert.Nil(t, nscene.Widgets().Widget(wid)) + assert.Nil(t, nscene.Widgets().Widget(wid1)) - nlp2, err := repos.Property.FindByID(ctx, p.ID(), nil) + nlp2, err := repos.Property.FindByID(ctx, p1.ID(), nil) assert.Nil(t, nlp2) // deleted assert.Equal(t, rerror.ErrNotFound, err) @@ -223,7 +231,7 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { assert.Equal(t, "// barfoo", string(npfc)) // layer - nlp, err := repos.Property.FindByID(ctx, p.ID(), nil) + nlp, err := repos.Property.FindByID(ctx, p1.ID(), nil) assert.Nil(t, nlp) // deleted assert.Equal(t, rerror.ErrNotFound, err) @@ -236,6 +244,10 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { assert.Equal(t, []layer.ID{}, nrl.Layers().Layers()) // deleted } +// The plugin and its files should be newrly created (old plugin and files are deleted if the plugin is private) +// Properties that schema is changed should be migrated +// Layers, widgets, blocks, properties, and property schemas that extension is deleted should deleted +// Plugin field of layers, widgets, block, properties, and property schemas are replaced with the new plugin ID func TestPlugin_Upload_DiffVersion(t *testing.T) { // upgrade plugin to the different version // plugin ID of property and layers should be updated @@ -245,8 +257,11 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { sid := id.NewSceneID() oldpid := id.MustPluginID("testplugin~1.0.0").WithScene(sid.Ref()) pid := mockPluginID.WithScene(sid.Ref()) - eid := id.PluginExtensionID("block") - nlpsid := id.NewPropertySchemaID(pid, eid.String()) + eid1 := id.PluginExtensionID("block") + eid2 := id.PluginExtensionID("widget") + nlpsid1 := id.NewPropertySchemaID(pid, eid1.String()) + nlpsid2 := id.NewPropertySchemaID(pid, eid2.String()) + wid := id.NewWidgetID() repos := memory.InitRepos(nil) mfs := mockFS(map[string]string{ @@ -257,11 +272,13 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { oldpsf := property.NewSchemaField().ID("field").Type(property.ValueTypeNumber).MustBuild() oldpsg := property.NewSchemaGroup().ID("default").Fields([]*property.SchemaField{oldpsf}).MustBuild() - oldps := property.NewSchema().ID(property.NewSchemaID(oldpid, eid.String())).Groups(property.NewSchemaGroupList( + oldps := property.NewSchema().ID(property.NewSchemaID(oldpid, eid1.String())).Groups(property.NewSchemaGroupList( []*property.SchemaGroup{oldpsg}, )).MustBuild() + oldps2 := property.NewSchema().ID(property.NewSchemaID(oldpid, eid2.String())).MustBuild() oldpl := plugin.New().ID(oldpid).Extensions([]*plugin.Extension{ - plugin.NewExtension().ID(eid).Type(plugin.ExtensionTypeBlock).Schema(oldps.ID()).MustBuild(), + plugin.NewExtension().ID(eid1).Type(plugin.ExtensionTypeBlock).Schema(oldps.ID()).MustBuild(), + plugin.NewExtension().ID(eid2).Type(plugin.ExtensionTypeWidget).Schema(oldps2.ID()).MustBuild(), }).MustBuild() pf := property.NewField("field").Value(property.ValueTypeNumber.ValueFrom(100).Some()).MustBuild() @@ -269,18 +286,21 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { oldp := property.New().NewID().Schema(oldps.ID()).Scene(sid).Items([]property.Item{pg}).MustBuild() oldp2 := property.New().NewID().Schema(oldps.ID()).Scene(sid).MustBuild() oldp3 := property.New().NewID().Schema(oldps.ID()).Scene(sid).MustBuild() + oldp4 := property.New().NewID().Schema(oldps2.ID()).Scene(sid).MustBuild() ib := layer.NewInfobox([]*layer.InfoboxField{ layer.NewInfoboxField().NewID().Plugin(oldp3.Schema().Plugin()).Extension(plugin.ExtensionID(oldp3.Schema().ID())).Property(oldp3.ID()).MustBuild(), }, oldp2.ID()) - pluginLayer := layer.NewItem().NewID().Scene(sid).Plugin(oldpid.Ref()).Extension(eid.Ref()).Property(oldp.IDRef()).Infobox(ib).MustBuild() + pluginLayer := layer.NewItem().NewID().Scene(sid).Plugin(oldpid.Ref()).Extension(eid1.Ref()).Property(oldp.IDRef()).Infobox(ib).MustBuild() rootLayer := layer.NewGroup().NewID().Scene(sid).Layers(layer.NewIDList([]layer.ID{pluginLayer.ID()})).Root(true).MustBuild() scene := scene.New().ID(sid).Team(team).RootLayer(rootLayer.ID()).Plugins(scene.NewPlugins([]*scene.Plugin{ scene.NewPlugin(oldpid, nil), - })).MustBuild() + })).Widgets(scene.NewWidgets([]*scene.Widget{ + scene.MustWidget(wid, oldpid, eid2, oldp4.ID(), true, false), + }, nil)).MustBuild() - _ = repos.PropertySchema.Save(ctx, oldps) + _ = repos.PropertySchema.SaveAll(ctx, property.SchemaList{oldps, oldps2}) _ = repos.Plugin.Save(ctx, oldpl) - _ = repos.Property.SaveAll(ctx, property.List{oldp, oldp2, oldp3}) + _ = repos.Property.SaveAll(ctx, property.List{oldp, oldp2, oldp3, oldp4}) _ = repos.Layer.SaveAll(ctx, layer.List{pluginLayer.LayerRef(), rootLayer.LayerRef()}) _ = repos.Scene.Save(ctx, scene) @@ -310,6 +330,8 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { assert.NoError(t, err) assert.False(t, nscene.Plugins().HasPlugin(oldpid)) assert.True(t, nscene.Plugins().HasPlugin(pid)) + assert.Equal(t, pid, nscene.Widgets().Widget(wid).Plugin()) + assert.Equal(t, eid2, nscene.Widgets().Widget(wid).Extension()) // plugin opl, err := repos.Plugin.FindByID(ctx, oldpid, []id.SceneID{scene.ID()}) @@ -324,9 +346,13 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { assert.Nil(t, olps) // deleted assert.Equal(t, rerror.ErrNotFound, err) - nlps, err := repos.PropertySchema.FindByID(ctx, nlpsid) + nlps1, err := repos.PropertySchema.FindByID(ctx, nlpsid1) + assert.NoError(t, err) + assert.Equal(t, nlpsid1, nlps1.ID()) + + nlps2, err := repos.PropertySchema.FindByID(ctx, nlpsid2) assert.NoError(t, err) - assert.Equal(t, nlpsid, nlps.ID()) + assert.Equal(t, nlpsid2, nlps2.ID()) _, err = mfs.Open("plugins/" + oldpid.String() + "/hogehoge") assert.True(t, os.IsNotExist(err)) // deleted @@ -340,7 +366,7 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { nl, err := repos.Layer.FindByID(ctx, pluginLayer.ID(), nil) assert.NoError(t, err) assert.Equal(t, pid, *nl.Plugin()) - assert.Equal(t, eid, *nl.Extension()) + assert.Equal(t, eid1, *nl.Extension()) assert.Equal(t, oldp.ID(), *nl.Property()) assert.Equal(t, oldp2.ID(), nl.Infobox().Property()) assert.Equal(t, oldp3.ID(), nl.Infobox().FieldAt(0).Property()) @@ -348,14 +374,14 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { nlp, err := repos.Property.FindByID(ctx, *nl.Property(), nil) assert.NoError(t, err) assert.Equal(t, *nl.Property(), nlp.ID()) - assert.Equal(t, nlpsid, nlp.Schema()) + assert.Equal(t, nlpsid1, nlp.Schema()) assert.Equal(t, property.ValueTypeString.ValueFrom("100"), property.ToGroup(nlp.ItemBySchema("default")).Field("field").Value()) nlp2, err := repos.Property.FindByID(ctx, oldp2.ID(), nil) assert.NoError(t, err) - assert.Equal(t, nlpsid, nlp2.Schema()) + assert.Equal(t, nlpsid1, nlp2.Schema()) nlp3, err := repos.Property.FindByID(ctx, oldp3.ID(), nil) assert.NoError(t, err) - assert.Equal(t, nlpsid, nlp3.Schema()) + assert.Equal(t, nlpsid1, nlp3.Schema()) } diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 7cc165d32..ea8ca2c4a 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -555,7 +555,7 @@ func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plug // remove widgets removedProperties = append(removedProperties, scene.Widgets().RemoveAllByPlugin(pid, nil)...) - // remove layers + // remove layers and blocks res, err := layerops.Processor{ LayerLoader: repo.LayerLoaderFrom(i.layerRepo, []id.SceneID{sid}), RootLayerID: scene.RootLayer(), diff --git a/pkg/property/schema_list.go b/pkg/property/schema_list.go index e737e6960..da83e4365 100644 --- a/pkg/property/schema_list.go +++ b/pkg/property/schema_list.go @@ -4,7 +4,7 @@ type SchemaList []*Schema func (l SchemaList) Find(psid SchemaID) *Schema { for _, s := range l { - if s.ID().Equal(psid) { + if s != nil && s.ID().Equal(psid) { return s } } diff --git a/pkg/scene/builder/builder_test.go b/pkg/scene/builder/builder_test.go index 61d661523..7fe1ac367 100644 --- a/pkg/scene/builder/builder_test.go +++ b/pkg/scene/builder/builder_test.go @@ -388,14 +388,14 @@ func TestSceneBuilder(t *testing.T) { sceneWidgetID1 := scene.NewWidgetID() sceneWidgetID2 := scene.NewWidgetID() - sceneWidget1 := scene.MustNewWidget( + sceneWidget1 := scene.MustWidget( sceneWidgetID1, pluginID, pluginExtension1ID, scenePropertyID, false, false) - sceneWidget2 := scene.MustNewWidget( + sceneWidget2 := scene.MustWidget( sceneWidgetID2, pluginID, pluginExtension2ID, diff --git a/pkg/scene/builder_test.go b/pkg/scene/builder_test.go index 8b56b6e59..9ee94b06a 100644 --- a/pkg/scene/builder_test.go +++ b/pkg/scene/builder_test.go @@ -47,7 +47,7 @@ func TestBuilder_Project(t *testing.T) { func TestBuilder_Widgets(t *testing.T) { ws := NewWidgets([]*Widget{ - MustNewWidget(NewWidgetID(), OfficialPluginID, "xxx", NewPropertyID(), true, false), + MustWidget(NewWidgetID(), OfficialPluginID, "xxx", NewPropertyID(), true, false), }, nil) b := New().NewID().RootLayer(NewLayerID()).Team(NewTeamID()).Widgets(ws).MustBuild() assert.Equal(t, ws, b.Widgets()) @@ -60,7 +60,7 @@ func TestBuilder_Build(t *testing.T) { ppid := NewPropertyID() lid := NewLayerID() ws := NewWidgets([]*Widget{ - MustNewWidget(NewWidgetID(), OfficialPluginID, "xxx", ppid, true, false), + MustWidget(NewWidgetID(), OfficialPluginID, "xxx", ppid, true, false), }, nil) ps := NewPlugins([]*Plugin{ NewPlugin(OfficialPluginID, ppid.Ref()), @@ -181,7 +181,7 @@ func TestBuilder_MustBuild(t *testing.T) { ppid := NewPropertyID() lid := NewLayerID() ws := NewWidgets([]*Widget{ - MustNewWidget(NewWidgetID(), OfficialPluginID, "xxx", ppid, true, false), + MustWidget(NewWidgetID(), OfficialPluginID, "xxx", ppid, true, false), }, nil) was := NewWidgetAlignSystem() ps := NewPlugins([]*Plugin{ diff --git a/pkg/scene/scene_test.go b/pkg/scene/scene_test.go index 07088ec7e..7b061c4a1 100644 --- a/pkg/scene/scene_test.go +++ b/pkg/scene/scene_test.go @@ -71,7 +71,7 @@ func TestScene_Properties(t *testing.T) { Property(pid1). Widgets( NewWidgets([]*Widget{ - MustNewWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", pid2, true, false), + MustWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", pid2, true, false), }, nil), ). MustBuild() diff --git a/pkg/scene/sceneops/plugin_migrator.go b/pkg/scene/sceneops/plugin_migrator.go index beec7d526..124a1f6bd 100644 --- a/pkg/scene/sceneops/plugin_migrator.go +++ b/pkg/scene/sceneops/plugin_migrator.go @@ -108,7 +108,7 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol } // ใ‚ทใƒผใƒณใฎใ‚ฆใ‚ฃใ‚ธใ‚งใƒƒใƒˆ - sc.Widgets().ReplacePlugin(oldPluginID, newPluginID) + sc.Widgets().UpgradePlugin(oldPluginID, newPluginID) for _, w := range sc.Widgets().Widgets() { if w.Plugin().Equal(newPluginID) { if newPlugin.Extension(w.Extension()) == nil { diff --git a/pkg/scene/widget.go b/pkg/scene/widget.go index f8d485855..3a1cb7bc4 100644 --- a/pkg/scene/widget.go +++ b/pkg/scene/widget.go @@ -24,7 +24,7 @@ func NewWidget(wid WidgetID, plugin PluginID, extension PluginExtensionID, prope }, nil } -func MustNewWidget(wid WidgetID, plugin PluginID, extension PluginExtensionID, property PropertyID, enabled bool, extended bool) *Widget { +func MustWidget(wid WidgetID, plugin PluginID, extension PluginExtensionID, property PropertyID, enabled bool, extended bool) *Widget { w, err := NewWidget(wid, plugin, extension, property, enabled, extended) if err != nil { panic(err) @@ -89,3 +89,10 @@ func (w *Widget) Clone() *Widget { extended: w.extended, } } + +func (w *Widget) SetPlugin(pid PluginID) { + if w == nil || pid.IsNil() { + return + } + w.plugin = pid.Clone() +} diff --git a/pkg/scene/widget_test.go b/pkg/scene/widget_test.go index 9c1d7a281..48db36e66 100644 --- a/pkg/scene/widget_test.go +++ b/pkg/scene/widget_test.go @@ -106,12 +106,12 @@ func TestMustNewWidget(t *testing.T) { if tc.Err != nil { assert.PanicsWithError(t, tc.Err.Error(), func() { - MustNewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) + MustWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) }) return } - res := MustNewWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) + res := MustWidget(tc.ID, tc.Plugin, tc.Extension, tc.Property, tc.Enabled, tc.Extended) assert.Equal(t, tc.ID, res.ID()) assert.Equal(t, tc.Property, res.Property()) assert.Equal(t, tc.Extension, res.Extension()) @@ -122,19 +122,19 @@ func TestMustNewWidget(t *testing.T) { } func TestWidget_SetEnabled(t *testing.T) { - res := MustNewWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", NewPropertyID(), false, false) + res := MustWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", NewPropertyID(), false, false) res.SetEnabled(true) assert.True(t, res.Enabled()) } func TestWidget_SetExtended(t *testing.T) { - res := MustNewWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", NewPropertyID(), false, false) + res := MustWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", NewPropertyID(), false, false) res.SetExtended(true) assert.True(t, res.Extended()) } func TestWidget_Clone(t *testing.T) { - res := MustNewWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", NewPropertyID(), false, false) + res := MustWidget(NewWidgetID(), MustPluginID("xxx~1.1.1"), "eee", NewPropertyID(), false, false) res2 := res.Clone() assert.Equal(t, res, res2) assert.NotSame(t, res, res2) diff --git a/pkg/scene/widgets.go b/pkg/scene/widgets.go index 91d139301..a671d7759 100644 --- a/pkg/scene/widgets.go +++ b/pkg/scene/widgets.go @@ -112,13 +112,13 @@ func (w *Widgets) RemoveAllByPlugin(p PluginID, e *PluginExtensionID) (res []Pro return res } -func (w *Widgets) ReplacePlugin(oldp, newp PluginID) { - if w == nil || w.widgets == nil { +func (w *Widgets) UpgradePlugin(oldp, newp PluginID) { + if w == nil || w.widgets == nil || oldp.Equal(newp) || oldp.IsNil() || newp.IsNil() { return } for _, ww := range w.widgets { if ww.plugin.Equal(oldp) { - ww.plugin = newp + ww.SetPlugin(newp) } } } diff --git a/pkg/scene/widgets_test.go b/pkg/scene/widgets_test.go index 83411a165..5cc70332c 100644 --- a/pkg/scene/widgets_test.go +++ b/pkg/scene/widgets_test.go @@ -29,20 +29,20 @@ func TestNewWidgets(t *testing.T) { { Name: "widget list", Input: []*Widget{ - MustNewWidget(wid, pid, "see", pr, true, false), + MustWidget(wid, pid, "see", pr, true, false), }, Expected: []*Widget{ - MustNewWidget(wid, pid, "see", pr, true, false), + MustWidget(wid, pid, "see", pr, true, false), }, }, { Name: "widget list with duplicatd values", Input: []*Widget{ - MustNewWidget(wid, pid, "see", pr, true, false), - MustNewWidget(wid, pid, "see", pr, true, false), + MustWidget(wid, pid, "see", pr, true, false), + MustWidget(wid, pid, "see", pr, true, false), }, Expected: []*Widget{ - MustNewWidget(wid, pid, "see", pr, true, false), + MustWidget(wid, pid, "see", pr, true, false), }, }, } @@ -70,8 +70,8 @@ func TestWidgets_Add(t *testing.T) { }{ { Name: "add new widget", - Input: MustNewWidget(wid, pid, "see", pr, true, false), - Expected: []*Widget{MustNewWidget(wid, pid, "see", pr, true, false)}, + Input: MustWidget(wid, pid, "see", pr, true, false), + Expected: []*Widget{MustWidget(wid, pid, "see", pr, true, false)}, }, { Name: "add nil widget", @@ -80,15 +80,15 @@ func TestWidgets_Add(t *testing.T) { }, { Name: "add to nil widgets", - Input: MustNewWidget(wid, pid, "see", pr, true, false), + Input: MustWidget(wid, pid, "see", pr, true, false), Expected: nil, Nil: true, }, { Name: "add existing widget", - Widgets: []*Widget{MustNewWidget(wid, pid, "see", pr, true, false)}, - Input: MustNewWidget(wid, pid, "see", pr, true, false), - Expected: []*Widget{MustNewWidget(wid, pid, "see", pr, true, false)}, + Widgets: []*Widget{MustWidget(wid, pid, "see", pr, true, false)}, + Input: MustWidget(wid, pid, "see", pr, true, false), + Expected: []*Widget{MustWidget(wid, pid, "see", pr, true, false)}, }, } @@ -136,8 +136,8 @@ func TestWidgets_Remove(t *testing.T) { var ws *Widgets if !tc.Nil { ws = NewWidgets([]*Widget{ - MustNewWidget(wid, pid2, "e1", pr, true, false), - MustNewWidget(wid2, pid, "e1", pr, true, false), + MustWidget(wid, pid2, "e1", pr, true, false), + MustWidget(wid2, pid, "e1", pr, true, false), }, nil) assert.True(t, ws.Has(tc.Input)) } @@ -150,9 +150,9 @@ func TestWidgets_Remove(t *testing.T) { func TestWidgets_RemoveAllByPlugin(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pid2 := MustPluginID("xxx~1.1.2") - w1 := MustNewWidget(NewWidgetID(), pid, "e1", NewPropertyID(), true, false) - w2 := MustNewWidget(NewWidgetID(), pid, "e2", NewPropertyID(), true, false) - w3 := MustNewWidget(NewWidgetID(), pid2, "e1", NewPropertyID(), true, false) + w1 := MustWidget(NewWidgetID(), pid, "e1", NewPropertyID(), true, false) + w2 := MustWidget(NewWidgetID(), pid, "e2", NewPropertyID(), true, false) + w3 := MustWidget(NewWidgetID(), pid2, "e1", NewPropertyID(), true, false) tests := []struct { Name string @@ -195,7 +195,7 @@ func TestWidgets_RemoveAllByPlugin(t *testing.T) { } } -func TestWidgets_ReplacePlugin(t *testing.T) { +func TestWidgets_UpgradePlugin(t *testing.T) { pid := MustPluginID("xxx~1.1.1") pid2 := MustPluginID("zzz~1.1.1") pr := NewPropertyID() @@ -210,8 +210,8 @@ func TestWidgets_ReplacePlugin(t *testing.T) { Name: "replace a widget", PID: pid, NewID: pid2, - WS: NewWidgets([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}, nil), - Expected: NewWidgets([]*Widget{MustNewWidget(wid, pid2, "eee", pr, true, false)}, nil), + WS: NewWidgets([]*Widget{MustWidget(wid, pid, "eee", pr, true, false)}, nil), + Expected: NewWidgets([]*Widget{MustWidget(wid, pid2, "eee", pr, true, false)}, nil), }, { Name: "replace with nil widget", @@ -230,7 +230,7 @@ func TestWidgets_ReplacePlugin(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - tc.WS.ReplacePlugin(tc.PID, tc.NewID) + tc.WS.UpgradePlugin(tc.PID, tc.NewID) assert.Equal(t, tc.Expected, tc.WS) }) } @@ -251,8 +251,8 @@ func TestWidgets_Properties(t *testing.T) { { Name: "get properties", WS: NewWidgets([]*Widget{ - MustNewWidget(wid, pid, "eee", pr, true, false), - MustNewWidget(wid2, pid, "eee", pr2, true, false), + MustWidget(wid, pid, "eee", pr, true, false), + MustWidget(wid2, pid, "eee", pr2, true, false), }, nil), Expected: []PropertyID{pr, pr2}, }, @@ -288,12 +288,12 @@ func TestWidgets_Widgets(t *testing.T) { { Name: "get widgets", WS: NewWidgets([]*Widget{ - MustNewWidget(wid, pid, "eee", pr, true, false), - MustNewWidget(wid2, pid, "eee", pr2, true, false), + MustWidget(wid, pid, "eee", pr, true, false), + MustWidget(wid2, pid, "eee", pr2, true, false), }, nil), Expected: []*Widget{ - MustNewWidget(wid, pid, "eee", pr, true, false), - MustNewWidget(wid2, pid, "eee", pr2, true, false), + MustWidget(wid, pid, "eee", pr, true, false), + MustWidget(wid2, pid, "eee", pr2, true, false), }, }, { @@ -327,8 +327,8 @@ func TestWidgets_Widget(t *testing.T) { { Name: "get a widget", ID: wid, - WS: NewWidgets([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}, nil), - Expected: MustNewWidget(wid, pid, "eee", pr, true, false), + WS: NewWidgets([]*Widget{MustWidget(wid, pid, "eee", pr, true, false)}, nil), + Expected: MustWidget(wid, pid, "eee", pr, true, false), }, { Name: "dont has the widget", @@ -368,7 +368,7 @@ func TestWidgets_Has(t *testing.T) { { Name: "has a widget", ID: wid, - WS: NewWidgets([]*Widget{MustNewWidget(wid, pid, "eee", pr, true, false)}, nil), + WS: NewWidgets([]*Widget{MustWidget(wid, pid, "eee", pr, true, false)}, nil), Expected: true, }, { From fbbca4d6985db327f83a6eca87521052d2a4e623 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 22 Feb 2022 15:56:34 +0900 Subject: [PATCH 156/253] chore: increase batch size of db migration [ci skip] --- .../mongo/migration/201217132559_add_scene_widget_id.go | 2 +- .../mongo/migration/201217193948_add_scene_default_tile.go | 2 +- .../mongo/migration/210310145844_remove_preview_token.go | 2 +- .../mongo/migration/210730175108_add_scene_align_system.go | 2 +- .../migration/220214180713_split_schema_of_properties.go | 2 +- internal/infrastructure/mongo/migration/migrations.go | 3 +++ tools/cmd/migrategen/main.go | 7 +++++++ 7 files changed, 15 insertions(+), 5 deletions(-) diff --git a/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go b/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go index 95cf2167a..862da984b 100644 --- a/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go +++ b/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go @@ -13,7 +13,7 @@ func AddSceneWidgetId(ctx context.Context, c DBClient) error { col := c.WithCollection("scene") return col.Find(ctx, bson.D{}, &mongodoc.BatchConsumer{ - Size: 50, + Size: 1000, Callback: func(rows []bson.Raw) error { ids := make([]string, 0, len(rows)) diff --git a/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go b/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go index d1d5b11f1..0cfbe9e7f 100644 --- a/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go +++ b/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go @@ -47,7 +47,7 @@ func AddSceneDefaultTile(ctx context.Context, c DBClient) error { log.Infof("migration: AddSceneDefaultTile: filter: %+v\n", filter) return col.Find(ctx, filter, &mongodoc.PropertyBatchConsumer{ - Size: 50, + Size: 1000, Callback: func(properties []*property.Property) error { log.Infof("migration: AddSceneDefaultTile: hit properties: %d\n", len(properties)) diff --git a/internal/infrastructure/mongo/migration/210310145844_remove_preview_token.go b/internal/infrastructure/mongo/migration/210310145844_remove_preview_token.go index ab1908d25..05a4b6630 100644 --- a/internal/infrastructure/mongo/migration/210310145844_remove_preview_token.go +++ b/internal/infrastructure/mongo/migration/210310145844_remove_preview_token.go @@ -12,7 +12,7 @@ func RemovePreviewToken(ctx context.Context, c DBClient) error { col := c.WithCollection("project") return col.Find(ctx, bson.D{}, &mongodoc.BatchConsumer{ - Size: 50, + Size: 1000, Callback: func(rows []bson.Raw) error { ids := make([]string, 0, len(rows)) diff --git a/internal/infrastructure/mongo/migration/210730175108_add_scene_align_system.go b/internal/infrastructure/mongo/migration/210730175108_add_scene_align_system.go index 3c659c779..ebbfee702 100644 --- a/internal/infrastructure/mongo/migration/210730175108_add_scene_align_system.go +++ b/internal/infrastructure/mongo/migration/210730175108_add_scene_align_system.go @@ -16,7 +16,7 @@ func AddSceneAlignSystem(ctx context.Context, c DBClient) error { col := c.WithCollection("scene") return col.Find(ctx, bson.D{}, &mongodoc.BatchConsumer{ - Size: 50, + Size: 1000, Callback: func(rows []bson.Raw) error { ids := make([]string, 0, len(rows)) newRows := make([]interface{}, 0, len(rows)) diff --git a/internal/infrastructure/mongo/migration/220214180713_split_schema_of_properties.go b/internal/infrastructure/mongo/migration/220214180713_split_schema_of_properties.go index 211466bc7..df80ee240 100644 --- a/internal/infrastructure/mongo/migration/220214180713_split_schema_of_properties.go +++ b/internal/infrastructure/mongo/migration/220214180713_split_schema_of_properties.go @@ -15,7 +15,7 @@ func SplitSchemaOfProperties(ctx context.Context, c DBClient) error { return col.Find(ctx, bson.M{ "schema": bson.M{"$exists": true}, }, &mongodoc.BatchConsumer{ - Size: 50, + Size: 1000, Callback: func(rows []bson.Raw) error { ids := make([]string, 0, len(rows)) newRows := make([]interface{}, 0, len(rows)) diff --git a/internal/infrastructure/mongo/migration/migrations.go b/internal/infrastructure/mongo/migration/migrations.go index 0e27e773c..3116f661f 100644 --- a/internal/infrastructure/mongo/migration/migrations.go +++ b/internal/infrastructure/mongo/migration/migrations.go @@ -2,6 +2,9 @@ package migration +// WARNING: +// If the migration takes too long, the deployment may fail in a serverless environment. +// Set the batch size to as large a value as possible without using up the RAM of the deployment destination. var migrations = map[int64]MigrationFunc{ 201217132559: AddSceneWidgetId, 201217193948: AddSceneDefaultTile, diff --git a/tools/cmd/migrategen/main.go b/tools/cmd/migrategen/main.go index 5ecb63456..b4f383702 100644 --- a/tools/cmd/migrategen/main.go +++ b/tools/cmd/migrategen/main.go @@ -101,6 +101,10 @@ import "context" func {{.Name}}(ctx context.Context, c DBClient) error { // TODO: Write your migration code here + // WARNING: + // If the migration takes too long, the deployment may fail in a serverless environment. + // Set the batch size to as large a value as possible without using up the RAM of the deployment destination. + return nil } `)) @@ -109,6 +113,9 @@ var templ2 = template.Must(template.New("generated2").Parse(`// Code generated b package migration +// WARNING: +// If the migration takes too long, the deployment may fail in a serverless environment. +// Set the batch size to as large a value as possible without using up the RAM of the deployment destination. var migrations = map[int64]MigrationFunc{ {{range .}} {{.Key}}: {{.Name}}, {{end}}} From 7501062cd57d29207d47d60ca2f50d3ae4868ec0 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 22 Feb 2022 16:10:10 +0900 Subject: [PATCH 157/253] ci: add CODEOWNERS [ci skip] --- .github/CODEOWNERS | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..15f679477 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,3 @@ +* @rot1024 +/pkg/builtin/manifest.yml @HideBa +/pkg/builtin/manifest_ja.yml @HideBa From 6979a8d7156fdacb32e70ee3fdd5f48346f14bf5 Mon Sep 17 00:00:00 2001 From: issmail-basel Date: Thu, 24 Feb 2022 06:38:43 +0000 Subject: [PATCH 158/253] v0.5.0 --- CHANGELOG.md | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 02a442bf9..2367b7257 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,32 @@ # Changelog All notable changes to this project will be documented in this file. +## 0.5.0 - 2022-02-22 + +### ๐Ÿš€ Features + +- Implement property.Diff and plugin/manifest.Diff ([#107](https://github.com/reearth/reearth-backend/pull/107)) [`700269`](https://github.com/reearth/reearth-backend/commit/700269) +- Support 3rd party plugin translation ([#109](https://github.com/reearth/reearth-backend/pull/109)) [`67a618`](https://github.com/reearth/reearth-backend/commit/67a618) +- Improve the Infobox style (manifest) ([#110](https://github.com/reearth/reearth-backend/pull/110)) [`7aebcd`](https://github.com/reearth/reearth-backend/commit/7aebcd) +- Overwrite installation of new plug-ins without removing (automatic property migration) ([#113](https://github.com/reearth/reearth-backend/pull/113)) [`2dc192`](https://github.com/reearth/reearth-backend/commit/2dc192) +- Update infobox style fields ([#115](https://github.com/reearth/reearth-backend/pull/115)) [`608436`](https://github.com/reearth/reearth-backend/commit/608436) + +### ๐Ÿ”ง Bug Fixes + +- Scene exporter should export layers and tags while maintaining the tree structure ([#104](https://github.com/reearth/reearth-backend/pull/104)) [`805d78`](https://github.com/reearth/reearth-backend/commit/805d78) +- Property field in groups in list cannot be updated correctly [`5009c5`](https://github.com/reearth/reearth-backend/commit/5009c5) +- Scenes and properties are not updated properly when plugin is updated [`861c4b`](https://github.com/reearth/reearth-backend/commit/861c4b) +- Scene widgets and blocks are not update properly when plugin is updated [`f66f9a`](https://github.com/reearth/reearth-backend/commit/f66f9a) + +### โœจ Refactor + +- Graphql resolvers ([#105](https://github.com/reearth/reearth-backend/pull/105)) [`01a4e6`](https://github.com/reearth/reearth-backend/commit/01a4e6) + +### Miscellaneous Tasks + +- Update all dependencies ([#111](https://github.com/reearth/reearth-backend/pull/111)) [`173881`](https://github.com/reearth/reearth-backend/commit/173881) +- Increase batch size of db migration [ci skip] [`fbbca4`](https://github.com/reearth/reearth-backend/commit/fbbca4) + ## 0.4.0 - 2022-01-27 ### ๐Ÿš€ Features From 496099cef4e0575233d7d79ccb02380a53e0795c Mon Sep 17 00:00:00 2001 From: KaWaite <34051327+KaWaite@users.noreply.github.com> Date: Fri, 4 Mar 2022 15:56:52 +0900 Subject: [PATCH 159/253] ci: update renovate schedule (#121) Co-authored-by: rot1024 --- .github/renovate.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/renovate.json b/.github/renovate.json index 0ecc3d9cf..6eeb20039 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -21,8 +21,8 @@ "groupName": "dependencies", "groupSlug": "gomod", "semanticCommitType": "chore", - "extends": [ - "schedule:earlyMondays" + "schedule": [ + "before 3am on the fourth day of the month" ] }, { @@ -36,8 +36,8 @@ "groupName": "docker dependencies", "groupSlug": "docker", "semanticCommitType": "chore", - "extends": [ - "schedule:earlyMondays" + "schedule": [ + "before 3am on the fourth day of the month" ] }, { @@ -50,8 +50,8 @@ "groupName": "github actions dependencies", "groupSlug": "github-actions", "semanticCommitType": "ci", - "extends": [ - "schedule:earlyMondays" + "schedule": [ + "before 3am on the fourth day of the month" ] } ] From d1a38e294efb2d2f08d268caf27a7650a384265d Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 7 Mar 2022 16:22:40 +0900 Subject: [PATCH 160/253] chore: update dependencies (#117) Co-authored-by: Renovate Bot Co-authored-by: yk Co-authored-by: rot1024 --- go.mod | 29 +++++++++++----------- go.sum | 75 ++++++++++++++++++++++++++++---------------------------- tools.go | 4 +-- 3 files changed, 54 insertions(+), 54 deletions(-) diff --git a/go.mod b/go.mod index 641d28698..7804b96f4 100644 --- a/go.mod +++ b/go.mod @@ -2,8 +2,8 @@ module github.com/reearth/reearth-backend require ( cloud.google.com/go/profiler v0.2.0 - cloud.google.com/go/storage v1.20.0 - github.com/99designs/gqlgen v0.16.0 + cloud.google.com/go/storage v1.21.0 + github.com/99designs/gqlgen v0.17.1 github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0 github.com/auth0/go-jwt-middleware v1.0.1 github.com/blang/semver v3.5.1+incompatible @@ -17,13 +17,13 @@ require ( github.com/jonas-p/go-shp v0.1.1 github.com/kelseyhightower/envconfig v1.4.0 github.com/kennygrant/sanitize v1.2.4 - github.com/labstack/echo/v4 v4.6.3 + github.com/labstack/echo/v4 v4.7.0 github.com/labstack/gommon v0.3.1 github.com/mitchellh/mapstructure v1.4.3 github.com/oklog/ulid v1.3.1 github.com/paulmach/go.geojson v1.4.0 github.com/pkg/errors v0.9.1 - github.com/ravilushqa/otelgqlgen v0.4.1 + github.com/ravilushqa/otelgqlgen v0.5.1 github.com/sirupsen/logrus v1.8.1 github.com/spf13/afero v1.8.1 github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2 @@ -32,22 +32,22 @@ require ( github.com/uber/jaeger-client-go v2.30.0+incompatible github.com/uber/jaeger-lib v2.4.1+incompatible github.com/vektah/dataloaden v0.3.0 - github.com/vektah/gqlparser/v2 v2.3.1 - go.mongodb.org/mongo-driver v1.8.3 + github.com/vektah/gqlparser/v2 v2.4.1 + go.mongodb.org/mongo-driver v1.8.4 go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.29.0 go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.29.0 - go.opentelemetry.io/otel v1.4.0 - go.opentelemetry.io/otel/sdk v1.4.0 + go.opentelemetry.io/otel v1.4.1 + go.opentelemetry.io/otel/sdk v1.4.1 golang.org/x/text v0.3.7 golang.org/x/tools v0.1.9 - google.golang.org/api v0.68.0 + google.golang.org/api v0.70.0 gopkg.in/go-playground/colors.v1 v1.2.0 gopkg.in/h2non/gock.v1 v1.1.2 ) require ( cloud.google.com/go v0.100.2 // indirect - cloud.google.com/go/compute v1.2.0 // indirect + cloud.google.com/go/compute v1.3.0 // indirect cloud.google.com/go/iam v0.1.1 // indirect cloud.google.com/go/trace v1.0.0 // indirect github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect @@ -73,6 +73,7 @@ require ( github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/klauspost/compress v1.13.6 // indirect + github.com/matryer/moq v0.2.3 // indirect github.com/mattn/go-colorable v0.1.11 // indirect github.com/mattn/go-isatty v0.0.14 // indirect github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect @@ -90,19 +91,19 @@ require ( github.com/xdg-go/stringprep v1.0.2 // indirect github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect go.opencensus.io v0.23.0 // indirect - go.opentelemetry.io/contrib v1.3.0 // indirect - go.opentelemetry.io/otel/trace v1.4.0 // indirect + go.opentelemetry.io/contrib v1.4.0 // indirect + go.opentelemetry.io/otel/trace v1.4.1 // indirect go.uber.org/atomic v1.7.0 // indirect golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa // indirect golang.org/x/mod v0.5.1 // indirect golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd // indirect golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect - golang.org/x/sys v0.0.0-20220207234003-57398862261d // indirect + golang.org/x/sys v0.0.0-20220209214540-3681064d5158 // indirect golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20220207185906-7721543eae58 // indirect + google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c // indirect google.golang.org/grpc v1.44.0 // indirect google.golang.org/protobuf v1.27.1 // indirect gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect diff --git a/go.sum b/go.sum index 589c8fa4c..80bd7c534 100644 --- a/go.sum +++ b/go.sum @@ -38,8 +38,9 @@ cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUM cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= -cloud.google.com/go/compute v1.2.0 h1:EKki8sSdvDU0OO9mAXGwPXOTOgPz2l08R0/IutDH11I= cloud.google.com/go/compute v1.2.0/go.mod h1:xlogom/6gr8RJGBe7nT2eGsQYAFUbbv8dbC29qE3Xmw= +cloud.google.com/go/compute v1.3.0 h1:mPL/MzDDYHsh5tHRS9mhmhWlcgClCrCa6ApQCU6wnHI= +cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/iam v0.1.1 h1:4CapQyNFjiksks1/x7jsvsygFPhihslYk5GptIrlX68= @@ -57,14 +58,14 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= cloud.google.com/go/storage v1.18.2/go.mod h1:AiIj7BWXyhO5gGVmYJ+S8tbkCx3yb0IMjua8Aw4naVM= -cloud.google.com/go/storage v1.20.0 h1:kv3rQ3clEQdxqokkCCgQo+bxPqcuXiROjxvnKb8Oqdk= -cloud.google.com/go/storage v1.20.0/go.mod h1:TiC1o6FxNCG8y5gB7rqCsFZCIYPMPZCO81ppOoEPLGI= +cloud.google.com/go/storage v1.21.0 h1:HwnT2u2D309SFDHQII6m18HlrCi3jAXhUMTLOWXYH14= +cloud.google.com/go/storage v1.21.0/go.mod h1:XmRlxkgPjlBONznT2dDUU/5XlpU2OjMnKuqnZI01LAA= cloud.google.com/go/trace v1.0.0 h1:laKx2y7IWMjguCe5zZx6n7qLtREk4kyE69SXVC0VSN8= cloud.google.com/go/trace v1.0.0/go.mod h1:4iErSByzxkyHWzzlAj63/Gmjz0NH1ASqhJguHpGcr6A= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/99designs/gqlgen v0.14.0/go.mod h1:S7z4boV+Nx4VvzMUpVrY/YuHjFX4n7rDyuTqvAkuoRE= -github.com/99designs/gqlgen v0.16.0 h1:7Qc4Ll3mfN3doAyUWOgtGLcBGu+KDgK48HdkBGLZVFs= github.com/99designs/gqlgen v0.16.0/go.mod h1:nbeSjFkqphIqpZsYe1ULVz0yfH8hjpJdJIQoX/e0G2I= +github.com/99designs/gqlgen v0.17.1 h1:i2qQMPKHQjHgBWYIpO4TsaQpPqMHCPK1+h95ipvH8VU= +github.com/99designs/gqlgen v0.17.1/go.mod h1:K5fzLKwtph+FFgh9j7nFbRUdBKvTcGnsta51fsMTn3o= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0 h1:JLLDOHEcoREA54hzOnjr8KQcZCvX0E8KhosjE0F1jaQ= @@ -187,7 +188,6 @@ github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/V github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= github.com/goccy/go-yaml v1.9.5 h1:Eh/+3uk9kLxG4koCX6lRMAPS1OaMSAi+FJcya0INdB0= github.com/goccy/go-yaml v1.9.5/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= @@ -283,9 +283,6 @@ github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c2 github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 h1:l5lAOZEym3oK3SQ2HBHWsJUfbNBiTXJDeW2QDxw9AQ0= github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/context v0.0.0-20160226214623-1ea25387ff6f/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= -github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= -github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc= github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= @@ -327,7 +324,6 @@ github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o= github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak= github.com/kevinmbeaulieu/eq-go v1.0.0/go.mod h1:G3S8ajA56gKBZm4UB9AOyoOS37JO3roToPzKNM8dtdM= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.11.1/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= @@ -341,17 +337,17 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/labstack/echo/v4 v4.6.3 h1:VhPuIZYxsbPmo4m9KAkMU/el2442eB7EBFFhNTTT9ac= github.com/labstack/echo/v4 v4.6.3/go.mod h1:Hk5OiHj0kDqmFq7aHe7eDqI7CUhuCrfpupQtLGGLm7A= +github.com/labstack/echo/v4 v4.7.0 h1:8wHgZhoE9OT1NSLw6sfrX7ZGpWMtO5Zlfr68+BIo180= +github.com/labstack/echo/v4 v4.7.0/go.mod h1:xkCDAdFCIf8jsFQ5NnbK7oqaF/yU1A1X20Ltm0OvSks= github.com/labstack/gommon v0.3.1 h1:OomWaJXm7xR6L1HmEtGyQf26TEn7V6X88mktX9kee9o= github.com/labstack/gommon v0.3.1/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM= github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= -github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= -github.com/matryer/moq v0.0.0-20200106131100-75d0ddfc0007/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= +github.com/matryer/moq v0.2.3 h1:Q06vEqnBYjjfx5KKgHfYRKE/lvlRu+Nj+xodG4YdHnU= github.com/matryer/moq v0.2.3/go.mod h1:9RtPYjTnH1bSBIkpvtHkFN7nbWAnO7oRpdJkEIn6UtE= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= @@ -362,7 +358,6 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= -github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.2.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= @@ -374,8 +369,6 @@ github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWb github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= -github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= -github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= github.com/paulmach/go.geojson v1.4.0 h1:5x5moCkCtDo5x8af62P9IOAYGQcYHtxz2QJ3x1DoCgY= @@ -389,21 +382,18 @@ github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qR github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/ravilushqa/otelgqlgen v0.4.1 h1:wiTepKKEp3N28PZlPkXeACWu7dBGRwik8RhBdgIe1FM= -github.com/ravilushqa/otelgqlgen v0.4.1/go.mod h1:a0jceFiBN7nUZMfJ8voFwg/C58QCm/90rT4IIlt5hDQ= +github.com/ravilushqa/otelgqlgen v0.5.1 h1:KW9ZpELSnuQlQM2OXgxSeEMWhwt7sPdEL/B2TpcbPM4= +github.com/ravilushqa/otelgqlgen v0.5.1/go.mod h1:ZJey0LrlbpEUXzFhZ5HILWEBg6wUKJvX2Vx3NXHGFkk= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/shurcooL/httpfs v0.0.0-20171119174359-809beceb2371/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/shurcooL/vfsgen v0.0.0-20180121065927-ffb13db8def0/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= @@ -445,7 +435,6 @@ github.com/uber/jaeger-client-go v2.30.0+incompatible h1:D6wyKGCecFaSRUpo8lCVbaO github.com/uber/jaeger-client-go v2.30.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVKhn2Um6rjCsSsg= github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= -github.com/urfave/cli/v2 v2.1.1/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ= github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc= @@ -454,12 +443,13 @@ github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6Kllzaw github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4= github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= -github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= github.com/vektah/dataloaden v0.3.0 h1:ZfVN2QD6swgvp+tDqdH/OIT/wu3Dhu0cus0k5gIZS84= github.com/vektah/dataloaden v0.3.0/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= github.com/vektah/gqlparser/v2 v2.2.0/go.mod h1:i3mQIGIrbK2PD1RrCeMTlVbkF2FJ6WkU1KJlJlC+3F4= -github.com/vektah/gqlparser/v2 v2.3.1 h1:blIC0fCxGIr9pVjsc+BVI8XjYUtc2nCFRfnmP7FuFMk= github.com/vektah/gqlparser/v2 v2.3.1/go.mod h1:i3mQIGIrbK2PD1RrCeMTlVbkF2FJ6WkU1KJlJlC+3F4= +github.com/vektah/gqlparser/v2 v2.4.0/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= +github.com/vektah/gqlparser/v2 v2.4.1 h1:QOyEn8DAPMUMARGMeshKDkDgNmVoEaEGiDB0uWxcSlQ= +github.com/vektah/gqlparser/v2 v2.4.1/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.0.2 h1:akYIkZ28e6A96dkWNJQu3nmCzH3YfwMPQExUYDaRv7w= @@ -478,8 +468,9 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.mongodb.org/mongo-driver v1.4.2/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= -go.mongodb.org/mongo-driver v1.8.3 h1:TDKlTkGDKm9kkJVUOAXDK5/fkqKHJVwYQSpoRfB43R4= go.mongodb.org/mongo-driver v1.8.3/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= +go.mongodb.org/mongo-driver v1.8.4 h1:NruvZPPL0PBcRJKmbswoWSrmHeUvzdxA3GCPfD/NEOA= +go.mongodb.org/mongo-driver v1.8.4/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -488,8 +479,8 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= -go.opentelemetry.io/contrib v1.3.0 h1:p9Gd+3dD7yB+AIph2Ltg11QDX6Y+yWMH0YQVTpTTP2c= -go.opentelemetry.io/contrib v1.3.0/go.mod h1:FlyPNX9s4U6MCsWEc5YAK4KzKNHFDsjrDUZijJiXvy8= +go.opentelemetry.io/contrib v1.4.0 h1:o+obgKZArn1GbM8zPCLYU9LZCI7lL6GcTZArn0qz1yw= +go.opentelemetry.io/contrib v1.4.0/go.mod h1:FlyPNX9s4U6MCsWEc5YAK4KzKNHFDsjrDUZijJiXvy8= go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.29.0 h1:BLXo2v0bW3iq8NhgSf/1X6Cu7UcfkNk3yyuxNcZB1wk= go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.29.0/go.mod h1:+0RWgKCuTYtJaZo9Io/D2PAvkMZsRkmYaNgHhwzrCDM= go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.29.0 h1:PG5cMt7dHmNmuhQczPRF4nOfAUkZe0tezDZEtckz28k= @@ -501,20 +492,22 @@ go.opentelemetry.io/contrib/propagators/b3 v1.4.0/go.mod h1:K399DN23drp0RQGXCbSP go.opentelemetry.io/otel v1.0.1/go.mod h1:OPEOD4jIT2SlZPMmwT6FqZz2C0ZNdQqiWcoK6M0SNFU= go.opentelemetry.io/otel v1.1.0/go.mod h1:7cww0OW51jQ8IaZChIEdqLwgh+44+7uiTdWsAL0wQpA= go.opentelemetry.io/otel v1.3.0/go.mod h1:PWIKzi6JCp7sM0k9yZ43VX+T345uNbAkDKwHVjb2PTs= -go.opentelemetry.io/otel v1.4.0 h1:7ESuKPq6zpjRaY5nvVDGiuwK7VAJ8MwkKnmNJ9whNZ4= go.opentelemetry.io/otel v1.4.0/go.mod h1:jeAqMFKy2uLIxCtKxoFj0FAL5zAPKQagc3+GtBWakzk= +go.opentelemetry.io/otel v1.4.1 h1:QbINgGDDcoQUoMJa2mMaWno49lja9sHwp6aoa2n3a4g= +go.opentelemetry.io/otel v1.4.1/go.mod h1:StM6F/0fSwpd8dKWDCdRr7uRvEPYdW0hBSlbdTiUde4= go.opentelemetry.io/otel/internal/metric v0.24.0 h1:O5lFy6kAl0LMWBjzy3k//M8VjEaTDWL9DPJuqZmWIAA= go.opentelemetry.io/otel/internal/metric v0.24.0/go.mod h1:PSkQG+KuApZjBpC6ea6082ZrWUUy/w132tJ/LOU3TXk= go.opentelemetry.io/otel/metric v0.24.0 h1:Rg4UYHS6JKR1Sw1TxnI13z7q/0p/XAbgIqUTagvLJuU= go.opentelemetry.io/otel/metric v0.24.0/go.mod h1:tpMFnCD9t+BEGiWY2bWF5+AwjuAdM0lSowQ4SBA3/K4= go.opentelemetry.io/otel/sdk v1.3.0/go.mod h1:rIo4suHNhQwBIPg9axF8V9CA72Wz2mKF1teNrup8yzs= -go.opentelemetry.io/otel/sdk v1.4.0 h1:LJE4SW3jd4lQTESnlpQZcBhQ3oci0U2MLR5uhicfTHQ= -go.opentelemetry.io/otel/sdk v1.4.0/go.mod h1:71GJPNJh4Qju6zJuYl1CrYtXbrgfau/M9UAggqiy1UE= +go.opentelemetry.io/otel/sdk v1.4.1 h1:J7EaW71E0v87qflB4cDolaqq3AcujGrtyIPGQoZOB0Y= +go.opentelemetry.io/otel/sdk v1.4.1/go.mod h1:NBwHDgDIBYjwK2WNu1OPgsIc2IJzmBXNnvIJxJc8BpE= go.opentelemetry.io/otel/trace v1.0.1/go.mod h1:5g4i4fKLaX2BQpSBsxw8YYcgKpMMSW3x7ZTuYBr3sUk= go.opentelemetry.io/otel/trace v1.1.0/go.mod h1:i47XtdcBQiktu5IsrPqOHe8w+sBmnLwwHt8wiUsWGTI= go.opentelemetry.io/otel/trace v1.3.0/go.mod h1:c/VDhno8888bvQYmbYLqe41/Ldmr/KKunbvWM4/fEjk= -go.opentelemetry.io/otel/trace v1.4.0 h1:4OOUrPZdVFQkbzl/JSdvGCWIdw5ONXXxzHlaLlWppmo= go.opentelemetry.io/otel/trace v1.4.0/go.mod h1:uc3eRsqDfWs9R7b92xbQbU42/eTNz4N+gLP8qJCi4aE= +go.opentelemetry.io/otel/trace v1.4.1 h1:O+16qcdTrT7zxv2J6GejTPFinSwA++cYerC5iSiF8EQ= +go.opentelemetry.io/otel/trace v1.4.1/go.mod h1:iYEVbroFCNut9QkwEczV9vMRPHNKSSwYZjulEtsmhFc= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= @@ -708,9 +701,11 @@ golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220207234003-57398862261d h1:Bm7BNOQt2Qv7ZqysjeLjgCBanX+88Z/OtdvsrEv1Djc= golang.org/x/sys v0.0.0-20220207234003-57398862261d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220209214540-3681064d5158 h1:rm+CHSpPEEW2IsXUib1ThaHIjuBVZjxNgSKmBLFfD4c= +golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -774,7 +769,6 @@ golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200701151220-7cb253f4c4f8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= @@ -785,7 +779,6 @@ golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= @@ -835,8 +828,11 @@ google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tD google.golang.org/api v0.64.0/go.mod h1:931CdxA8Rm4t6zqTFGSsgwbAEZ2+GMYurbndwSimebM= google.golang.org/api v0.65.0/go.mod h1:ArYhxgGadlWmqO1IqVujw6Cs8IdD33bTmzKo2Sh+cbg= google.golang.org/api v0.66.0/go.mod h1:I1dmXYpX7HGwz/ejRxwQp2qj5bFAz93HiCU1C1oYd9M= -google.golang.org/api v0.68.0 h1:9eJiHhwJKIYX6sX2fUZxQLi7pDRA/MYu8c12q6WbJik= +google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= google.golang.org/api v0.68.0/go.mod h1:sOM8pTpwgflXRhz+oC8H2Dr+UcbMqkPPWNJo88Q7TH8= +google.golang.org/api v0.69.0/go.mod h1:boanBiw+h5c3s+tBPgEzLDRHfFLWV0qXxRHz3ws7C80= +google.golang.org/api v0.70.0 h1:67zQnAE0T2rB0A3CwLSas0K+SbVzSxP+zTLkQLexeiw= +google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -915,10 +911,15 @@ google.golang.org/genproto v0.0.0-20211223182754-3ac035c7e7cb/go.mod h1:5CzLGKJ6 google.golang.org/genproto v0.0.0-20220107163113-42d7afdf6368/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220111164026-67b88f271998/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220114231437-d2e6a121cae0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220201184016-50beb8ab5c44/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220204002441-d6cc3cc0770e/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220207185906-7721543eae58 h1:i67FGOy2/zGfhE3YgHdrOrcFbOBhqdcRoBrsDqSQrOI= +google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220207185906-7721543eae58/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220211171837-173942840c17/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220216160803-4663080d8bc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c h1:TU4rFa5APdKTq0s6B7WTsH6Xmx0Knj86s6Biz56mErE= +google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -993,5 +994,3 @@ honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9 rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= -sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= -sourcegraph.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67/go.mod h1:L5q+DGLGOQFpo1snNEkLOJT2d1YTW66rWNzatr3He1k= diff --git a/tools.go b/tools.go index f4aea7332..24546391f 100644 --- a/tools.go +++ b/tools.go @@ -1,9 +1,9 @@ -// +build tools +//go:build tools package main import ( - _ "github.com/99designs/gqlgen/cmd" + _ "github.com/99designs/gqlgen" _ "github.com/idubinskiy/schematyper" _ "github.com/vektah/dataloaden" ) From 7843321f13251b528948cd032d1355c0f24f1a2d Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 7 Mar 2022 17:24:54 +0900 Subject: [PATCH 161/253] perf: reduce database queries to obtain scene IDs (#119) --- .../adapter/gql/resolver_mutation_dataset.go | 2 +- internal/app/app.go | 72 +++++---- internal/app/auth.go | 23 ++- internal/app/graphql.go | 6 +- internal/app/private.go | 32 +--- internal/app/public.go | 128 ++++++++++++---- .../app/{published_test.go => public_test.go} | 94 +++++++----- internal/app/published.go | 102 ------------- internal/app/usecase.go | 21 +++ internal/infrastructure/memory/scene.go | 62 ++------ internal/infrastructure/memory/team.go | 8 +- .../infrastructure/mongo/mongodoc/scene.go | 2 +- .../infrastructure/mongo/mongodoc/team.go | 2 +- internal/infrastructure/mongo/scene.go | 119 +++------------ internal/infrastructure/mongo/team.go | 30 +--- internal/usecase/interactor/asset.go | 4 +- internal/usecase/interactor/common.go | 92 +++++------- internal/usecase/interactor/dataset.go | 69 ++++----- internal/usecase/interactor/layer.go | 45 +++--- internal/usecase/interactor/plugin.go | 8 +- internal/usecase/interactor/plugin_upload.go | 8 +- .../usecase/interactor/plugin_upload_test.go | 22 +-- internal/usecase/interactor/project.go | 17 +-- internal/usecase/interactor/property.go | 27 ++-- internal/usecase/interactor/property_test.go | 8 +- internal/usecase/interactor/scene.go | 29 ++-- internal/usecase/interactor/tag.go | 24 ++- internal/usecase/interactor/team.go | 16 +- internal/usecase/interactor/user.go | 2 +- internal/usecase/interfaces/dataset.go | 2 +- internal/usecase/operator.go | 141 ++++++------------ internal/usecase/repo/scene.go | 6 +- internal/usecase/repo/team.go | 4 +- pkg/scene/id.go | 33 ++++ pkg/scene/id_test.go | 37 +++++ pkg/scene/list.go | 51 +++++++ pkg/scene/list_test.go | 44 ++++++ pkg/user/id.go | 33 ++++ pkg/user/id_test.go | 37 +++++ pkg/user/role.go | 31 +++- pkg/user/role_test.go | 91 +++++++++++ pkg/user/team_list.go | 65 ++++++++ pkg/user/team_list_test.go | 84 +++++++++++ 43 files changed, 1013 insertions(+), 720 deletions(-) rename internal/app/{published_test.go => public_test.go} (71%) delete mode 100644 internal/app/published.go create mode 100644 internal/app/usecase.go create mode 100644 pkg/scene/id_test.go create mode 100644 pkg/scene/list.go create mode 100644 pkg/scene/list_test.go create mode 100644 pkg/user/id_test.go create mode 100644 pkg/user/team_list.go create mode 100644 pkg/user/team_list_test.go diff --git a/internal/adapter/gql/resolver_mutation_dataset.go b/internal/adapter/gql/resolver_mutation_dataset.go index dcdcbf39b..12928aedb 100644 --- a/internal/adapter/gql/resolver_mutation_dataset.go +++ b/internal/adapter/gql/resolver_mutation_dataset.go @@ -72,7 +72,7 @@ func (r *mutationResolver) SyncDataset(ctx context.Context, input gqlmodel.SyncD func (r *mutationResolver) RemoveDatasetSchema(ctx context.Context, input gqlmodel.RemoveDatasetSchemaInput) (*gqlmodel.RemoveDatasetSchemaPayload, error) { res, err := usecases(ctx).Dataset.RemoveDatasetSchema(ctx, interfaces.RemoveDatasetSchemaParam{ - SchemaId: id.DatasetSchemaID(input.SchemaID), + SchemaID: id.DatasetSchemaID(input.SchemaID), Force: input.Force, }, getOperator(ctx)) if err != nil { diff --git a/internal/app/app.go b/internal/app/app.go index 9584738e8..864c017a6 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -2,8 +2,10 @@ package app import ( "errors" + "io/fs" "net/http" "net/http/pprof" + "os" "github.com/99designs/gqlgen/graphql/playground" "github.com/labstack/echo/v4" @@ -23,12 +25,12 @@ func initEcho(cfg *ServerConfig) *echo.Echo { e.Debug = cfg.Debug e.HideBanner = true e.HidePort = true + e.HTTPErrorHandler = errorHandler(e.DefaultHTTPErrorHandler) + // basic middleware logger := GetEchoLogger() e.Logger = logger - e.Use(logger.Hook()) - - e.Use(middleware.Recover(), otelecho.Middleware("reearth-backend")) + e.Use(logger.Hook(), middleware.Recover(), otelecho.Middleware("reearth-backend")) origins := allowedOrigins(cfg) if len(origins) > 0 { e.Use( @@ -38,8 +40,8 @@ func initEcho(cfg *ServerConfig) *echo.Echo { ) } + // enable pprof if e.Debug { - // enable pprof pprofGroup := e.Group("/debug/pprof") pprofGroup.Any("/cmdline", echo.WrapHandler(http.HandlerFunc(pprof.Cmdline))) pprofGroup.Any("/profile", echo.WrapHandler(http.HandlerFunc(pprof.Profile))) @@ -48,42 +50,45 @@ func initEcho(cfg *ServerConfig) *echo.Echo { pprofGroup.Any("/*", echo.WrapHandler(http.HandlerFunc(pprof.Index))) } - e.HTTPErrorHandler = func(err error, c echo.Context) { - if c.Response().Committed { - return - } - - code, msg := errorMessage(err, func(f string, args ...interface{}) { - c.Echo().Logger.Errorf(f, args...) - }) - if err := c.JSON(code, map[string]string{ - "error": msg, - }); err != nil { - e.DefaultHTTPErrorHandler(err, c) - } - } - + // GraphQL Playground without auth if cfg.Debug || cfg.Config.Dev { - // GraphQL Playground without auth e.GET("/graphql", echo.WrapHandler( playground.Handler("reearth-backend", "/api/graphql"), )) } + // init usecases + var publishedIndexHTML string + if cfg.Config.Published.IndexURL == nil || cfg.Config.Published.IndexURL.String() == "" { + if html, err := fs.ReadFile(os.DirFS("."), "web/published.html"); err == nil { + publishedIndexHTML = string(html) + } + } usecases := interactor.NewContainer(cfg.Repos, cfg.Gateways, interactor.ContainerConfig{ - SignupSecret: cfg.Config.SignupSecret, + SignupSecret: cfg.Config.SignupSecret, + PublishedIndexHTML: publishedIndexHTML, + PublishedIndexURL: cfg.Config.Published.IndexURL, }) + e.Use(UsecaseMiddleware(&usecases)) + + // apis api := e.Group("/api") - publicAPI(e, api, cfg.Config, cfg.Repos, cfg.Gateways) - jwks := &JwksSyncOnce{} + api.GET("/ping", Ping()) + api.POST("/signup", Signup()) + api.GET("/published/:name", PublishedMetadata()) + api.GET("/published_data/:name", PublishedData()) + privateApi := api.Group("") + jwks := &JwksSyncOnce{} authRequired(privateApi, jwks, cfg) - graphqlAPI(e, privateApi, cfg, usecases) + graphqlAPI(e, privateApi, cfg) privateAPI(e, privateApi, cfg.Repos) published := e.Group("/p") - publishedRoute(e, published, cfg.Config, cfg.Repos, cfg.Gateways) + auth := PublishedAuthMiddleware() + published.GET("/:name/data.json", PublishedData(), auth) + published.GET("/:name/", PublishedIndex(), auth) serveFiles(e, cfg.Gateways.File) web(e, cfg.Config.Web, cfg.Config.Auth0) @@ -91,6 +96,23 @@ func initEcho(cfg *ServerConfig) *echo.Echo { return e } +func errorHandler(next func(error, echo.Context)) func(error, echo.Context) { + return func(err error, c echo.Context) { + if c.Response().Committed { + return + } + + code, msg := errorMessage(err, func(f string, args ...interface{}) { + c.Echo().Logger.Errorf(f, args...) + }) + if err := c.JSON(code, map[string]string{ + "error": msg, + }); err != nil { + next(err, c) + } + } +} + func authRequired(g *echo.Group, jwks Jwks, cfg *ServerConfig) { g.Use(jwtEchoMiddleware(jwks, cfg)) g.Use(parseJwtMiddleware(cfg)) diff --git a/internal/app/auth.go b/internal/app/auth.go index 73aeaf6a7..b20a99976 100644 --- a/internal/app/auth.go +++ b/internal/app/auth.go @@ -86,11 +86,30 @@ func generateOperator(ctx context.Context, cfg *ServerConfig, u *user.User) (*us if u == nil { return nil, nil } - teams, err := cfg.Repos.Team.FindByUser(ctx, u.ID()) + + uid := u.ID() + teams, err := cfg.Repos.Team.FindByUser(ctx, uid) + if err != nil { + return nil, err + } + scenes, err := cfg.Repos.Scene.FindByTeam(ctx, teams.IDs()...) if err != nil { return nil, err } - return usecase.OperatorFrom(u.ID(), teams), nil + + readableTeams := teams.FilterByUserRole(uid, user.RoleReader).IDs() + writableTeams := teams.FilterByUserRole(uid, user.RoleWriter).IDs() + owningTeams := teams.FilterByUserRole(uid, user.RoleOwner).IDs() + + return &usecase.Operator{ + User: uid, + ReadableTeams: readableTeams, + WritableTeams: writableTeams, + OwningTeams: owningTeams, + ReadableScenes: scenes.FilterByTeam(readableTeams...).IDs(), + WritableScenes: scenes.FilterByTeam(writableTeams...).IDs(), + OwningScenes: scenes.FilterByTeam(owningTeams...).IDs(), + }, nil } func addAuth0SubToUser(ctx context.Context, u *user.User, a user.Auth, cfg *ServerConfig) error { diff --git a/internal/app/graphql.go b/internal/app/graphql.go index 4faf675bd..be0cd8ffb 100644 --- a/internal/app/graphql.go +++ b/internal/app/graphql.go @@ -12,7 +12,6 @@ import ( "github.com/ravilushqa/otelgqlgen" "github.com/reearth/reearth-backend/internal/adapter" "github.com/reearth/reearth-backend/internal/adapter/gql" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/vektah/gqlparser/v2/gqlerror" ) @@ -22,7 +21,6 @@ func graphqlAPI( ec *echo.Echo, r *echo.Group, conf *ServerConfig, - usecases interfaces.Container, ) { playgroundEnabled := conf.Debug || conf.Config.Dev @@ -65,8 +63,8 @@ func graphqlAPI( req := c.Request() ctx := req.Context() - ctx = adapter.AttachUsecases(ctx, &usecases) - ctx = gql.AttachUsecases(ctx, &usecases, enableDataLoaders) + usecases := adapter.Usecases(ctx) + ctx = gql.AttachUsecases(ctx, usecases, enableDataLoaders) c.SetRequest(req.WithContext(ctx)) srv.ServeHTTP(c.Response(), c.Request()) diff --git a/internal/app/private.go b/internal/app/private.go index af3515572..c616ecec1 100644 --- a/internal/app/private.go +++ b/internal/app/private.go @@ -1,7 +1,6 @@ package app import ( - "context" "errors" "io" "net/http" @@ -9,7 +8,6 @@ import ( "github.com/labstack/echo/v4" "github.com/reearth/reearth-backend/internal/adapter" - "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer/encoding" @@ -27,17 +25,6 @@ var ( ErrBadParameter = errors.New("id.ext is needed") ) -func checkScene(ctx context.Context, id id.SceneID, op *usecase.Operator, sr repo.Scene) error { - res, err := sr.HasSceneTeam(ctx, id, op.ReadableTeams) - if err != nil { - return err - } - if !res { - return ErrOpDenied - } - return nil -} - func getEncoder(w io.Writer, ext string) (encoding.Encoder, string) { switch strings.ToLower(ext) { case "kml": @@ -68,6 +55,7 @@ func privateAPI( if op == nil { return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrOpDenied} } + scenes := op.AllReadableScenes() param := c.Param("param") params := strings.Split(param, ".") @@ -80,14 +68,6 @@ func privateAPI( return &echo.HTTPError{Code: http.StatusBadRequest, Message: ErrBadID} } - scenes, err := repos.Scene.FindIDsByTeam(ctx, op.ReadableTeams) - if err != nil { - if errors.Is(rerror.ErrNotFound, err) { - return &echo.HTTPError{Code: http.StatusNotFound, Message: err} - } - return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} - } - layer, err := repos.Layer.FindByID(ctx, lid, scenes) if err != nil { if errors.Is(rerror.ErrNotFound, err) { @@ -95,14 +75,8 @@ func privateAPI( } return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} } - - err = checkScene(ctx, layer.Scene(), op, repos.Scene) - if err != nil { - if errors.Is(ErrOpDenied, err) { - return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrOpDenied} - } - - return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} + if !op.IsReadableScene(layer.Scene()) { + return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrOpDenied} } ext := params[1] diff --git a/internal/app/public.go b/internal/app/public.go index bcc3d5de9..174c3862e 100644 --- a/internal/app/public.go +++ b/internal/app/public.go @@ -1,73 +1,145 @@ package app import ( + "context" + "crypto/subtle" "fmt" "net/http" + "net/url" "github.com/labstack/echo/v4" + "github.com/labstack/echo/v4/middleware" + "github.com/reearth/reearth-backend/internal/adapter" http1 "github.com/reearth/reearth-backend/internal/adapter/http" - "github.com/reearth/reearth-backend/internal/usecase/gateway" - "github.com/reearth/reearth-backend/internal/usecase/interactor" - "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/rerror" ) -func publicAPI( - ec *echo.Echo, - r *echo.Group, - conf *Config, - repos *repo.Container, - gateways *gateway.Container, -) { - controller := http1.NewUserController( - interactor.NewUser(repos, gateways, conf.SignupSecret), - ) - publishedController := http1.NewPublishedController( - interactor.NewPublished(repos.Project, gateways.File, ""), - ) - - r.GET("/ping", func(c echo.Context) error { +func Ping() echo.HandlerFunc { + return func(c echo.Context) error { return c.JSON(http.StatusOK, "pong") - }) + } +} - r.POST("/signup", func(c echo.Context) error { +func Signup() echo.HandlerFunc { + return func(c echo.Context) error { var inp http1.CreateUserInput if err := c.Bind(&inp); err != nil { return &echo.HTTPError{Code: http.StatusBadRequest, Message: fmt.Errorf("failed to parse request body: %w", err)} } + uc := adapter.Usecases(c.Request().Context()) + controller := http1.NewUserController(uc.User) + output, err := controller.CreateUser(c.Request().Context(), inp) if err != nil { return err } return c.JSON(http.StatusOK, output) - }) + } +} - r.GET("/published/:name", func(c echo.Context) error { +func PublishedMetadata() echo.HandlerFunc { + return func(c echo.Context) error { name := c.Param("name") if name == "" { - return echo.ErrNotFound + return rerror.ErrNotFound + } + + contr, err := publishedController(c) + if err != nil { + return err } - res, err := publishedController.Metadata(c.Request().Context(), name) + res, err := contr.Metadata(c.Request().Context(), name) if err != nil { return err } return c.JSON(http.StatusOK, res) - }) + } +} - r.GET("/published_data/:name", func(c echo.Context) error { +func PublishedData() echo.HandlerFunc { + return func(c echo.Context) error { name := c.Param("name") if name == "" { - return echo.ErrNotFound + return rerror.ErrNotFound + } + + contr, err := publishedController(c) + if err != nil { + return err } - r, err := publishedController.Data(c.Request().Context(), name) + r, err := contr.Data(c.Request().Context(), name) if err != nil { return err } return c.Stream(http.StatusOK, "application/json", r) + } +} + +func PublishedIndex() echo.HandlerFunc { + return func(c echo.Context) error { + contr, err := publishedController(c) + if err != nil { + return err + } + + index, err := contr.Index(c.Request().Context(), c.Param("name"), &url.URL{ + Scheme: "http", + Host: c.Request().Host, + Path: c.Request().URL.Path, + }) + if err != nil { + return err + } + if index == "" { + return rerror.ErrNotFound + } + return c.HTML(http.StatusOK, index) + } +} + +func PublishedAuthMiddleware() echo.MiddlewareFunc { + key := struct{}{} + return middleware.BasicAuthWithConfig(middleware.BasicAuthConfig{ + Validator: func(user string, password string, c echo.Context) (bool, error) { + md, ok := c.Request().Context().Value(key).(interfaces.ProjectPublishedMetadata) + if !ok { + return true, echo.ErrNotFound + } + return !md.IsBasicAuthActive || subtle.ConstantTimeCompare([]byte(user), []byte(md.BasicAuthUsername)) == 1 && subtle.ConstantTimeCompare([]byte(password), []byte(md.BasicAuthPassword)) == 1, nil + }, + Skipper: func(c echo.Context) bool { + name := c.Param("name") + if name == "" { + return true + } + + contr, err := publishedController(c) + if err != nil { + return false + } + + md, err := contr.Metadata(c.Request().Context(), name) + if err != nil { + return true + } + + c.SetRequest(c.Request().WithContext(context.WithValue(c.Request().Context(), key, md))) + return !md.IsBasicAuthActive + }, }) } + +func publishedController(c echo.Context) (*http1.PublishedController, error) { + uc := adapter.Usecases(c.Request().Context()) + if uc.Published == nil { + return nil, rerror.ErrNotFound + } + return http1.NewPublishedController(uc.Published), nil +} diff --git a/internal/app/published_test.go b/internal/app/public_test.go similarity index 71% rename from internal/app/published_test.go rename to internal/app/public_test.go index 0cf9398bc..e5e1b9460 100644 --- a/internal/app/published_test.go +++ b/internal/app/public_test.go @@ -17,25 +17,6 @@ import ( ) func TestPublishedAuthMiddleware(t *testing.T) { - h := PublishedAuthMiddleware(func(ctx context.Context, name string) (interfaces.ProjectPublishedMetadata, error) { - if name == "active" { - return interfaces.ProjectPublishedMetadata{ - IsBasicAuthActive: true, - BasicAuthUsername: "fooo", - BasicAuthPassword: "baar", - }, nil - } else if name == "inactive" { - return interfaces.ProjectPublishedMetadata{ - IsBasicAuthActive: false, - BasicAuthUsername: "fooo", - BasicAuthPassword: "baar", - }, nil - } - return interfaces.ProjectPublishedMetadata{}, rerror.ErrNotFound - })(func(c echo.Context) error { - return c.String(http.StatusOK, "test") - }) - tests := []struct { Name string PublishedName string @@ -89,8 +70,13 @@ func TestPublishedAuthMiddleware(t *testing.T) { c := e.NewContext(req, res) c.SetParamNames("name") c.SetParamValues(tc.PublishedName) + m := UsecaseMiddleware(&interfaces.Container{ + Published: &mockPublished{}, + }) - err := h(c) + err := m(PublishedAuthMiddleware()(func(c echo.Context) error { + return c.String(http.StatusOK, "test") + }))(c) if tc.Error == nil { assert.NoError(err) assert.Equal(http.StatusOK, res.Code) @@ -103,13 +89,6 @@ func TestPublishedAuthMiddleware(t *testing.T) { } func TestPublishedData(t *testing.T) { - h := PublishedData(func(ctx context.Context, name string) (io.Reader, error) { - if name == "prj" { - return strings.NewReader("aaa"), nil - } - return nil, rerror.ErrNotFound - }) - tests := []struct { Name string PublishedName string @@ -142,8 +121,12 @@ func TestPublishedData(t *testing.T) { c := e.NewContext(req, res) c.SetParamNames("name") c.SetParamValues(tc.PublishedName) + m := UsecaseMiddleware(&interfaces.Container{ + Published: &mockPublished{}, + }) + + err := m(PublishedData())(c) - err := h(c) if tc.Error == nil { assert.NoError(err) assert.Equal(http.StatusOK, res.Code) @@ -169,7 +152,7 @@ func TestPublishedIndex(t *testing.T) { }, { Name: "empty index", - Error: echo.ErrNotFound, + Error: rerror.ErrNotFound, EmptyIndex: true, }, { @@ -195,17 +178,11 @@ func TestPublishedIndex(t *testing.T) { c := e.NewContext(req, res) c.SetParamNames("name") c.SetParamValues(tc.PublishedName) + m := UsecaseMiddleware(&interfaces.Container{ + Published: &mockPublished{EmptyIndex: tc.EmptyIndex}, + }) - err := PublishedIndex(func(ctx context.Context, name string, url *url.URL) (string, error) { - if tc.EmptyIndex { - return "", nil - } - if name == "prj" { - assert.Equal("http://example.com/aaa/bbb", url.String()) - return "index", nil - } - return "", rerror.ErrNotFound - })(c) + err := m(PublishedIndex())(c) if tc.Error == nil { assert.NoError(err) @@ -218,3 +195,42 @@ func TestPublishedIndex(t *testing.T) { }) } } + +type mockPublished struct { + interfaces.Published + EmptyIndex bool +} + +func (p *mockPublished) Metadata(ctx context.Context, name string) (interfaces.ProjectPublishedMetadata, error) { + if name == "active" { + return interfaces.ProjectPublishedMetadata{ + IsBasicAuthActive: true, + BasicAuthUsername: "fooo", + BasicAuthPassword: "baar", + }, nil + } else if name == "inactive" { + return interfaces.ProjectPublishedMetadata{ + IsBasicAuthActive: false, + BasicAuthUsername: "fooo", + BasicAuthPassword: "baar", + }, nil + } + return interfaces.ProjectPublishedMetadata{}, rerror.ErrNotFound +} + +func (p *mockPublished) Data(ctx context.Context, name string) (io.Reader, error) { + if name == "prj" { + return strings.NewReader("aaa"), nil + } + return nil, rerror.ErrNotFound +} + +func (p *mockPublished) Index(ctx context.Context, name string, url *url.URL) (string, error) { + if p.EmptyIndex { + return "", nil + } + if name == "prj" && url.String() == "http://example.com/aaa/bbb" { + return "index", nil + } + return "", rerror.ErrNotFound +} diff --git a/internal/app/published.go b/internal/app/published.go deleted file mode 100644 index 339ae21a9..000000000 --- a/internal/app/published.go +++ /dev/null @@ -1,102 +0,0 @@ -package app - -import ( - "context" - "crypto/subtle" - "io" - "io/fs" - "net/http" - "net/url" - "os" - - "github.com/labstack/echo/v4" - "github.com/labstack/echo/v4/middleware" - http1 "github.com/reearth/reearth-backend/internal/adapter/http" - "github.com/reearth/reearth-backend/internal/usecase/gateway" - "github.com/reearth/reearth-backend/internal/usecase/interactor" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/internal/usecase/repo" -) - -func publishedRoute( - ec *echo.Echo, - r *echo.Group, - conf *Config, - repos *repo.Container, - gateways *gateway.Container, -) { - contr := http1.NewPublishedController(publishedUsecaseFrom(conf.Published.IndexURL, repos.Project, gateways.File, os.DirFS("."))) - auth := PublishedAuthMiddleware(contr.Metadata) - r.GET("/:name/data.json", PublishedData(contr.Data), auth) - r.GET("/:name/", PublishedIndex(contr.Index), auth) -} - -func PublishedData(data func(ctx context.Context, name string) (io.Reader, error)) echo.HandlerFunc { - return func(c echo.Context) error { - r, err := data(c.Request().Context(), c.Param("name")) - if err != nil { - return err - } - - return c.Stream(http.StatusOK, "application/json", r) - } -} - -func PublishedIndex(index func(ctx context.Context, name string, url *url.URL) (string, error)) echo.HandlerFunc { - return func(c echo.Context) error { - index, err := index(c.Request().Context(), c.Param("name"), &url.URL{ - Scheme: "http", - Host: c.Request().Host, - Path: c.Request().URL.Path, - }) - if err != nil { - return err - } - if index == "" { - return echo.ErrNotFound - } - return c.HTML(http.StatusOK, index) - } -} - -func PublishedAuthMiddleware(metadata func(ctx context.Context, name string) (interfaces.ProjectPublishedMetadata, error)) echo.MiddlewareFunc { - key := struct{}{} - return middleware.BasicAuthWithConfig(middleware.BasicAuthConfig{ - Validator: func(user string, password string, c echo.Context) (bool, error) { - md, ok := c.Request().Context().Value(key).(interfaces.ProjectPublishedMetadata) - if !ok { - return true, echo.ErrNotFound - } - return !md.IsBasicAuthActive || subtle.ConstantTimeCompare([]byte(user), []byte(md.BasicAuthUsername)) == 1 && subtle.ConstantTimeCompare([]byte(password), []byte(md.BasicAuthPassword)) == 1, nil - }, - Skipper: func(c echo.Context) bool { - name := c.Param("name") - if name == "" { - return true - } - - md, err := metadata(c.Request().Context(), name) - if err != nil { - return true - } - - c.SetRequest(c.Request().WithContext(context.WithValue(c.Request().Context(), key, md))) - return !md.IsBasicAuthActive - }, - }) -} - -func publishedUsecaseFrom(indexURL *url.URL, p repo.Project, f gateway.File, ff fs.FS) interfaces.Published { - var i interfaces.Published - if indexURL == nil || indexURL.String() == "" { - html, err := fs.ReadFile(ff, "web/published.html") - if err == nil { - i = interactor.NewPublished(p, f, string(html)) - } else { - i = interactor.NewPublished(p, f, "") - } - } else { - i = interactor.NewPublishedWithURL(p, f, indexURL) - } - return i -} diff --git a/internal/app/usecase.go b/internal/app/usecase.go new file mode 100644 index 000000000..f4eb67098 --- /dev/null +++ b/internal/app/usecase.go @@ -0,0 +1,21 @@ +package app + +import ( + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/adapter" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" +) + +func UsecaseMiddleware(uc *interfaces.Container) echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + req := c.Request() + ctx := req.Context() + + ctx = adapter.AttachUsecases(ctx, uc) + + c.SetRequest(req.WithContext(ctx)) + return next(c) + } + } +} diff --git a/internal/infrastructure/memory/scene.go b/internal/infrastructure/memory/scene.go index 2dedfd3cb..cdfd48c42 100644 --- a/internal/infrastructure/memory/scene.go +++ b/internal/infrastructure/memory/scene.go @@ -5,21 +5,20 @@ import ( "sync" "time" + "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/scene" - - "github.com/reearth/reearth-backend/internal/usecase/repo" ) type Scene struct { lock sync.Mutex - data map[id.SceneID]scene.Scene + data map[id.SceneID]*scene.Scene } func NewScene() repo.Scene { return &Scene{ - data: map[id.SceneID]scene.Scene{}, + data: map[id.SceneID]*scene.Scene{}, } } @@ -29,20 +28,20 @@ func (r *Scene) FindByID(ctx context.Context, id id.SceneID, f []id.TeamID) (*sc s, ok := r.data[id] if ok && isTeamIncludes(s.Team(), f) { - return &s, nil + return s, nil } return nil, rerror.ErrNotFound } -func (r *Scene) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) ([]*scene.Scene, error) { +func (r *Scene) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) (scene.List, error) { r.lock.Lock() defer r.lock.Unlock() - result := []*scene.Scene{} + result := scene.List{} for _, id := range ids { if d, ok := r.data[id]; ok { if isTeamIncludes(d.Team(), f) { - result = append(result, &d) + result = append(result, d) continue } } @@ -58,68 +57,31 @@ func (r *Scene) FindByProject(ctx context.Context, id id.ProjectID, f []id.TeamI for _, d := range r.data { if d.Project() == id && isTeamIncludes(d.Team(), f) { - return &d, nil + return d, nil } } return nil, rerror.ErrNotFound } -func (r *Scene) FindIDsByTeam(ctx context.Context, teams []id.TeamID) ([]id.SceneID, error) { +func (r *Scene) FindByTeam(ctx context.Context, teams ...id.TeamID) (scene.List, error) { r.lock.Lock() defer r.lock.Unlock() - result := []id.SceneID{} + result := scene.List{} for _, d := range r.data { if isTeamIncludes(d.Team(), teams) { - result = append(result, d.ID()) + result = append(result, d) } } return result, nil } -func (r *Scene) HasSceneTeam(ctx context.Context, id id.SceneID, teams []id.TeamID) (bool, error) { - r.lock.Lock() - defer r.lock.Unlock() - - s, ok := r.data[id] - if !ok { - return false, rerror.ErrNotFound - } - return s.IsTeamIncluded(teams), nil -} - -func (r *Scene) HasScenesTeam(ctx context.Context, id []id.SceneID, teams []id.TeamID) ([]bool, error) { - r.lock.Lock() - defer r.lock.Unlock() - - if id == nil { - return nil, nil - } - if len(teams) == 0 { - return make([]bool, len(id)), nil - } - res := make([]bool, 0, len(id)) - for _, i := range id { - if teams == nil { - res = append(res, false) - continue - } - s, ok := r.data[i] - if !ok { - res = append(res, false) - continue - } - res = append(res, s.IsTeamIncluded(teams)) - } - return res, nil -} - func (r *Scene) Save(ctx context.Context, s *scene.Scene) error { r.lock.Lock() defer r.lock.Unlock() s.SetUpdatedAt(time.Now()) - r.data[s.ID()] = *s + r.data[s.ID()] = s return nil } diff --git a/internal/infrastructure/memory/team.go b/internal/infrastructure/memory/team.go index 9d2f46e66..b8619ed5b 100644 --- a/internal/infrastructure/memory/team.go +++ b/internal/infrastructure/memory/team.go @@ -21,11 +21,11 @@ func NewTeam() repo.Team { } } -func (r *Team) FindByUser(ctx context.Context, i id.UserID) ([]*user.Team, error) { +func (r *Team) FindByUser(ctx context.Context, i id.UserID) (user.TeamList, error) { r.lock.Lock() defer r.lock.Unlock() - result := []*user.Team{} + result := user.TeamList{} for _, d := range r.data { if d.Members().ContainsUser(i) { result = append(result, &d) @@ -34,11 +34,11 @@ func (r *Team) FindByUser(ctx context.Context, i id.UserID) ([]*user.Team, error return result, nil } -func (r *Team) FindByIDs(ctx context.Context, ids []id.TeamID) ([]*user.Team, error) { +func (r *Team) FindByIDs(ctx context.Context, ids []id.TeamID) (user.TeamList, error) { r.lock.Lock() defer r.lock.Unlock() - result := []*user.Team{} + result := user.TeamList{} for _, id := range ids { if d, ok := r.data[id]; ok { result = append(result, &d) diff --git a/internal/infrastructure/mongo/mongodoc/scene.go b/internal/infrastructure/mongo/mongodoc/scene.go index 133400ede..98e4c50d6 100644 --- a/internal/infrastructure/mongo/mongodoc/scene.go +++ b/internal/infrastructure/mongo/mongodoc/scene.go @@ -44,7 +44,7 @@ type SceneClusterDocument struct { } type SceneConsumer struct { - Rows []*scene.Scene + Rows scene.List } func (c *SceneConsumer) Consume(raw bson.Raw) error { diff --git a/internal/infrastructure/mongo/mongodoc/team.go b/internal/infrastructure/mongo/mongodoc/team.go index 1006322ee..f440aaa1d 100644 --- a/internal/infrastructure/mongo/mongodoc/team.go +++ b/internal/infrastructure/mongo/mongodoc/team.go @@ -18,7 +18,7 @@ type TeamDocument struct { } type TeamConsumer struct { - Rows []*user.Team + Rows user.TeamList } func (c *TeamConsumer) Consume(raw bson.Raw) error { diff --git a/internal/infrastructure/mongo/scene.go b/internal/infrastructure/mongo/scene.go index af9a2416f..ee38b94d9 100644 --- a/internal/infrastructure/mongo/scene.go +++ b/internal/infrastructure/mongo/scene.go @@ -7,11 +7,9 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/log" - "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/scene" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" - "go.mongodb.org/mongo-driver/mongo/options" ) type sceneRepo struct { @@ -32,19 +30,19 @@ func (r *sceneRepo) init() { } func (r *sceneRepo) FindByID(ctx context.Context, id id.SceneID, f []id.TeamID) (*scene.Scene, error) { - filter := r.teamFilter(bson.D{ - {Key: "id", Value: id.String()}, + filter := r.teamFilter(bson.M{ + "id": id.String(), }, f) return r.findOne(ctx, filter) } -func (r *sceneRepo) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) ([]*scene.Scene, error) { - filter := r.teamFilter(bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.SceneIDsToStrings(ids)}, - }}, +func (r *sceneRepo) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) (scene.List, error) { + filter := r.teamFilter(bson.M{ + "id": bson.M{ + "$in": id.SceneIDsToStrings(ids), + }, }, f) - dst := make([]*scene.Scene, 0, len(ids)) + dst := make(scene.List, 0, len(ids)) res, err := r.find(ctx, dst, filter) if err != nil { return nil, err @@ -53,72 +51,22 @@ func (r *sceneRepo) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.Team } func (r *sceneRepo) FindByProject(ctx context.Context, id id.ProjectID, f []id.TeamID) (*scene.Scene, error) { - filter := r.teamFilter(bson.D{ - {Key: "project", Value: id.String()}, + filter := r.teamFilter(bson.M{ + "project": id.String(), }, f) return r.findOne(ctx, filter) } -func (r *sceneRepo) FindIDsByTeam(ctx context.Context, teams []id.TeamID) ([]id.SceneID, error) { - filter := bson.D{ - {Key: "team", Value: bson.D{ - {Key: "$in", Value: id.TeamIDsToStrings(teams)}, - }}, - } - c := mongodoc.SceneIDConsumer{ - Rows: []id.SceneID{}, - } - if err := r.client.Find(ctx, filter, &c); err != nil { +func (r *sceneRepo) FindByTeam(ctx context.Context, teams ...id.TeamID) (scene.List, error) { + res, err := r.find(ctx, nil, bson.M{ + "team": bson.M{"$in": id.TeamIDsToStrings(teams)}, + }) + if err != nil { if err != mongo.ErrNilDocument && err != mongo.ErrNoDocuments { return nil, err } } - return c.Rows, nil -} - -func (r *sceneRepo) HasSceneTeam(ctx context.Context, sceneID id.SceneID, temaIDs []id.TeamID) (bool, error) { - filter := bson.D{ - {Key: "id", Value: sceneID.String()}, - {Key: "team", Value: bson.D{{Key: "$in", Value: id.TeamIDsToStrings(temaIDs)}}}, - } - res, err2 := r.client.Collection().CountDocuments(ctx, filter) - if err2 != nil { - return false, rerror.ErrInternalBy(err2) - } - return res == 1, nil -} - -func (r *sceneRepo) HasScenesTeam(ctx context.Context, sceneIDs []id.SceneID, teamIDs []id.TeamID) ([]bool, error) { - cursor, err2 := r.client.Collection().Find(ctx, bson.D{ - {Key: "id", Value: bson.D{{Key: "$in", Value: id.SceneIDsToStrings(sceneIDs)}}}, - {Key: "team", Value: bson.D{{Key: "$in", Value: id.TeamIDsToStrings(teamIDs)}}}, - }, &options.FindOptions{ - Projection: bson.D{{Key: "id", Value: 1}, {Key: "_id", Value: 0}}, - }) - - if err2 != nil { - return nil, rerror.ErrInternalBy(err2) - } - - var res []struct{ ID string } - err2 = cursor.All(ctx, res) - if err2 != nil { - return nil, rerror.ErrInternalBy(err2) - } - - res2 := make([]bool, 0, len(sceneIDs)) - for _, sid := range sceneIDs { - ok := false - for _, r := range res { - if r.ID == sid.String() { - ok = true - break - } - } - res2 = append(res2, ok) - } - - return res2, nil + return res, nil } func (r *sceneRepo) Save(ctx context.Context, scene *scene.Scene) error { @@ -130,7 +78,7 @@ func (r *sceneRepo) Remove(ctx context.Context, id id.SceneID) error { return r.client.RemoveOne(ctx, id.String()) } -func (r *sceneRepo) find(ctx context.Context, dst []*scene.Scene, filter bson.D) ([]*scene.Scene, error) { +func (r *sceneRepo) find(ctx context.Context, dst []*scene.Scene, filter interface{}) ([]*scene.Scene, error) { c := mongodoc.SceneConsumer{ Rows: dst, } @@ -140,7 +88,7 @@ func (r *sceneRepo) find(ctx context.Context, dst []*scene.Scene, filter bson.D) return c.Rows, nil } -func (r *sceneRepo) findOne(ctx context.Context, filter bson.D) (*scene.Scene, error) { +func (r *sceneRepo) findOne(ctx context.Context, filter interface{}) (*scene.Scene, error) { dst := make([]*scene.Scene, 0, 1) c := mongodoc.SceneConsumer{ Rows: dst, @@ -151,37 +99,14 @@ func (r *sceneRepo) findOne(ctx context.Context, filter bson.D) (*scene.Scene, e return c.Rows[0], nil } -// func (r *sceneRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) ([]*scene.Scene, *usecase.PageInfo, error) { -// var c mongodoc.SceneConsumer -// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) -// if err2 != nil { -// return nil, nil, rerror.ErrInternalBy(err2) -// } -// return c.Rows, pageInfo, nil -// } - -func filterScenes(ids []id.SceneID, rows []*scene.Scene) []*scene.Scene { - res := make([]*scene.Scene, 0, len(ids)) - for _, id := range ids { - var r2 *scene.Scene - for _, r := range rows { - if r.ID() == id { - r2 = r - break - } - } - res = append(res, r2) - } - return res +func filterScenes(ids []id.SceneID, rows scene.List) scene.List { + return rows.FilterByID(ids...) } -func (*sceneRepo) teamFilter(filter bson.D, teams []id.TeamID) bson.D { +func (*sceneRepo) teamFilter(filter bson.M, teams []id.TeamID) bson.M { if teams == nil { return filter } - filter = append(filter, bson.E{ - Key: "team", - Value: bson.D{{Key: "$in", Value: id.TeamIDsToStrings(teams)}}, - }) + filter["team"] = bson.D{{Key: "$in", Value: id.TeamIDsToStrings(teams)}} return filter } diff --git a/internal/infrastructure/mongo/team.go b/internal/infrastructure/mongo/team.go index 336e366b8..11691abe3 100644 --- a/internal/infrastructure/mongo/team.go +++ b/internal/infrastructure/mongo/team.go @@ -29,7 +29,7 @@ func (r *teamRepo) init() { } } -func (r *teamRepo) FindByUser(ctx context.Context, id id.UserID) ([]*user.Team, error) { +func (r *teamRepo) FindByUser(ctx context.Context, id id.UserID) (user.TeamList, error) { filter := bson.D{ {Key: "members." + strings.Replace(id.String(), ".", "", -1), Value: bson.D{ {Key: "$exists", Value: true}, @@ -38,7 +38,7 @@ func (r *teamRepo) FindByUser(ctx context.Context, id id.UserID) ([]*user.Team, return r.find(ctx, nil, filter) } -func (r *teamRepo) FindByIDs(ctx context.Context, ids []id.TeamID) ([]*user.Team, error) { +func (r *teamRepo) FindByIDs(ctx context.Context, ids []id.TeamID) (user.TeamList, error) { filter := bson.D{ {Key: "id", Value: bson.D{ {Key: "$in", Value: id.TeamIDsToStrings(ids)}, @@ -87,7 +87,7 @@ func (r *teamRepo) RemoveAll(ctx context.Context, ids []id.TeamID) error { return r.client.RemoveAll(ctx, id.TeamIDsToStrings(ids)) } -func (r *teamRepo) find(ctx context.Context, dst []*user.Team, filter bson.D) ([]*user.Team, error) { +func (r *teamRepo) find(ctx context.Context, dst []*user.Team, filter bson.D) (user.TeamList, error) { c := mongodoc.TeamConsumer{ Rows: dst, } @@ -108,26 +108,6 @@ func (r *teamRepo) findOne(ctx context.Context, filter bson.D) (*user.Team, erro return c.Rows[0], nil } -// func (r *teamRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) ([]*user.Team, *usecase.PageInfo, error) { -// var c mongodoc.TeamConsumer -// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) -// if err2 != nil { -// return nil, nil, rerror.ErrInternalBy(err2) -// } -// return c.Rows, pageInfo, nil -// } - -func filterTeams(ids []id.TeamID, rows []*user.Team) []*user.Team { - res := make([]*user.Team, 0, len(ids)) - for _, id := range ids { - var r2 *user.Team - for _, r := range rows { - if r.ID() == id { - r2 = r - break - } - } - res = append(res, r2) - } - return res +func filterTeams(ids []id.TeamID, rows user.TeamList) user.TeamList { + return rows.FilterByID(ids...) } diff --git a/internal/usecase/interactor/asset.go b/internal/usecase/interactor/asset.go index 9c5e9e729..34f91db8b 100644 --- a/internal/usecase/interactor/asset.go +++ b/internal/usecase/interactor/asset.go @@ -31,7 +31,7 @@ func NewAsset(r *repo.Container, gr *gateway.Container) interfaces.Asset { } func (i *Asset) Fetch(ctx context.Context, assets []id.AssetID, operator *usecase.Operator) ([]*asset.Asset, error) { - return i.assetRepo.FindByIDs(ctx, assets, operator.ReadableTeams) + return i.assetRepo.FindByIDs(ctx, assets, operator.AllReadableTeams()) } func (i *Asset) FindByTeam(ctx context.Context, tid id.TeamID, p *usecase.Pagination, operator *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) { @@ -86,7 +86,7 @@ func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, ope } func (i *Asset) Remove(ctx context.Context, aid id.AssetID, operator *usecase.Operator) (result id.AssetID, err error) { - asset, err := i.assetRepo.FindByID(ctx, aid, operator.WritableTeams) + asset, err := i.assetRepo.FindByID(ctx, aid, operator.AllWritableTeams()) if err != nil { return aid, err } diff --git a/internal/usecase/interactor/common.go b/internal/usecase/interactor/common.go index 6c5a383d2..b3d0bb11f 100644 --- a/internal/usecase/interactor/common.go +++ b/internal/usecase/interactor/common.go @@ -3,6 +3,7 @@ package interactor import ( "context" "errors" + "net/url" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interfaces" @@ -16,21 +17,31 @@ import ( ) type ContainerConfig struct { - SignupSecret string + SignupSecret string + PublishedIndexHTML string + PublishedIndexURL *url.URL } func NewContainer(r *repo.Container, g *gateway.Container, config ContainerConfig) interfaces.Container { + var published interfaces.Published + if config.PublishedIndexURL != nil && config.PublishedIndexURL.String() != "" { + published = NewPublishedWithURL(r.Project, g.File, config.PublishedIndexURL) + } else { + published = NewPublished(r.Project, g.File, config.PublishedIndexHTML) + } + return interfaces.Container{ - Asset: NewAsset(r, g), - Dataset: NewDataset(r, g), - Layer: NewLayer(r), - Plugin: NewPlugin(r, g), - Project: NewProject(r, g), - Property: NewProperty(r, g), - Scene: NewScene(r, g), - Tag: NewTag(r), - Team: NewTeam(r), - User: NewUser(r, g, config.SignupSecret), + Asset: NewAsset(r, g), + Dataset: NewDataset(r, g), + Layer: NewLayer(r), + Plugin: NewPlugin(r, g), + Project: NewProject(r, g), + Property: NewProperty(r, g), + Published: published, + Scene: NewScene(r, g), + Tag: NewTag(r), + Team: NewTeam(r), + User: NewUser(r, g, config.SignupSecret), } } @@ -57,7 +68,7 @@ func (i common) CanReadTeam(t id.TeamID, op *usecase.Operator) error { if err := i.OnlyOperator(op); err != nil { return err } - if !op.IsReadableTeamIncluded(t) { + if !op.IsReadableTeam(t) { return interfaces.ErrOperationDenied } return nil @@ -67,65 +78,44 @@ func (i common) CanWriteTeam(t id.TeamID, op *usecase.Operator) error { if err := i.OnlyOperator(op); err != nil { return err } - if !op.IsWritableTeamIncluded(t) { + if !op.IsWritableTeam(t) { return interfaces.ErrOperationDenied } return nil } -type commonScene struct { - common - sceneRepo repo.Scene -} - -func (i commonScene) OnlyReadableScenes(ctx context.Context, op *usecase.Operator) ([]id.SceneID, error) { - if err := i.OnlyOperator(op); err != nil { - return nil, err - } - scenes, err := i.sceneRepo.FindIDsByTeam(ctx, op.ReadableTeams) - if err != nil { - return nil, err - } - return scenes, nil -} - -func (i commonScene) OnlyWritableScenes(ctx context.Context, op *usecase.Operator) ([]id.SceneID, error) { +func (i common) CanReadScene(t id.SceneID, op *usecase.Operator) error { if err := i.OnlyOperator(op); err != nil { - return nil, err + return err } - scenes, err := i.sceneRepo.FindIDsByTeam(ctx, op.WritableTeams) - if err != nil { - return nil, err + if !op.IsReadableScene(t) { + return interfaces.ErrOperationDenied } - return scenes, nil + return nil } -func (i commonScene) CanReadScene(ctx context.Context, s id.SceneID, op *usecase.Operator) error { +func (i common) CanWriteScene(t id.SceneID, op *usecase.Operator) error { if err := i.OnlyOperator(op); err != nil { return err } - res, err := i.sceneRepo.HasSceneTeam(ctx, s, op.ReadableTeams) - if err != nil { - return err - } - if !res { + if !op.IsWritableScene(t) { return interfaces.ErrOperationDenied } return nil } -func (i commonScene) CanWriteScene(ctx context.Context, s id.SceneID, op *usecase.Operator) error { +func (i common) OnlyReadableScenes(op *usecase.Operator) ([]id.SceneID, error) { if err := i.OnlyOperator(op); err != nil { - return err - } - res, err := i.sceneRepo.HasSceneTeam(ctx, s, op.WritableTeams) - if err != nil { - return err + return nil, err } - if !res { - return interfaces.ErrOperationDenied + return op.AllReadableScenes(), nil +} + +func (i common) OnlyWritableScenes(op *usecase.Operator) ([]id.SceneID, error) { + if err := i.OnlyOperator(op); err != nil { + return nil, err } - return nil + return op.AllWritableScenes(), nil } type commonSceneLock struct { @@ -236,7 +226,7 @@ func (d ProjectDeleter) Delete(ctx context.Context, prj *project.Project, force } // Fetch scene - s, err := d.Scene.FindByProject(ctx, prj.ID(), operator.WritableTeams) + s, err := d.Scene.FindByProject(ctx, prj.ID(), operator.AllWritableTeams()) if err != nil && !errors.Is(err, rerror.ErrNotFound) { return err } diff --git a/internal/usecase/interactor/dataset.go b/internal/usecase/interactor/dataset.go index 563504812..27e122b29 100644 --- a/internal/usecase/interactor/dataset.go +++ b/internal/usecase/interactor/dataset.go @@ -26,7 +26,7 @@ import ( var extensionForLinkedLayers = id.PluginExtensionID("marker") type Dataset struct { - commonScene + common commonSceneLock datasetRepo repo.Dataset datasetSchemaRepo repo.DatasetSchema @@ -41,7 +41,6 @@ type Dataset struct { func NewDataset(r *repo.Container, gr *gateway.Container) interfaces.Dataset { return &Dataset{ - commonScene: commonScene{sceneRepo: r.Scene}, commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, datasetRepo: r.Dataset, datasetSchemaRepo: r.DatasetSchema, @@ -64,7 +63,7 @@ func (i *Dataset) DynamicSchemaFields() []*dataset.SchemaField { } func (i *Dataset) UpdateDatasetSchema(ctx context.Context, inp interfaces.UpdateDatasetSchemaParam, operator *usecase.Operator) (_ *dataset.Schema, err error) { - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, err } @@ -96,7 +95,6 @@ func (i *Dataset) UpdateDatasetSchema(ctx context.Context, inp interfaces.Update } func (i *Dataset) AddDynamicDatasetSchema(ctx context.Context, inp interfaces.AddDynamicDatasetSchemaParam) (_ *dataset.Schema, err error) { - // Begin Db transaction tx, err := i.transaction.Begin() if err != nil { @@ -180,7 +178,7 @@ func (i *Dataset) AddDynamicDataset(ctx context.Context, inp interfaces.AddDynam } func (i *Dataset) ImportDataset(ctx context.Context, inp interfaces.ImportDatasetParam, operator *usecase.Operator) (_ *dataset.Schema, err error) { - if err := i.CanWriteScene(ctx, inp.SceneId, operator); err != nil { + if err := i.CanWriteScene(inp.SceneId, operator); err != nil { return nil, err } if inp.File == nil { @@ -196,7 +194,7 @@ func (i *Dataset) ImportDataset(ctx context.Context, inp interfaces.ImportDatase } func (i *Dataset) ImportDatasetFromGoogleSheet(ctx context.Context, inp interfaces.ImportDatasetFromGoogleSheetParam, operator *usecase.Operator) (_ *dataset.Schema, err error) { - if err := i.CanWriteScene(ctx, inp.SceneId, operator); err != nil { + if err := i.CanWriteScene(inp.SceneId, operator); err != nil { return nil, err } @@ -349,7 +347,7 @@ func (i *Dataset) importDataset(ctx context.Context, content io.Reader, name str } func (i *Dataset) Fetch(ctx context.Context, ids []id.DatasetID, operator *usecase.Operator) (dataset.List, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -357,7 +355,7 @@ func (i *Dataset) Fetch(ctx context.Context, ids []id.DatasetID, operator *useca } func (i *Dataset) GraphFetch(ctx context.Context, id id.DatasetID, depth int, operator *usecase.Operator) (dataset.List, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -386,7 +384,7 @@ func (i *Dataset) GraphFetch(ctx context.Context, id id.DatasetID, depth int, op } func (i *Dataset) FetchSchema(ctx context.Context, ids []id.DatasetSchemaID, operator *usecase.Operator) (dataset.SchemaList, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -395,7 +393,7 @@ func (i *Dataset) FetchSchema(ctx context.Context, ids []id.DatasetSchemaID, ope } func (i *Dataset) GraphFetchSchema(ctx context.Context, id id.DatasetSchemaID, depth int, operator *usecase.Operator) (dataset.SchemaList, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -427,7 +425,7 @@ func (i *Dataset) GraphFetchSchema(ctx context.Context, id id.DatasetSchemaID, d } func (i *Dataset) FindBySchema(ctx context.Context, ds id.DatasetSchemaID, p *usecase.Pagination, operator *usecase.Operator) (dataset.List, *usecase.PageInfo, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, nil, err } @@ -436,7 +434,7 @@ func (i *Dataset) FindBySchema(ctx context.Context, ds id.DatasetSchemaID, p *us } func (i *Dataset) FindSchemaByScene(ctx context.Context, sid id.SceneID, p *usecase.Pagination, operator *usecase.Operator) (dataset.SchemaList, *usecase.PageInfo, error) { - if err := i.CanReadScene(ctx, sid, operator); err != nil { + if err := i.CanReadScene(sid, operator); err != nil { return nil, nil, err } @@ -448,7 +446,7 @@ func (i *Dataset) FindDynamicSchemaByScene(ctx context.Context, sid id.SceneID) } func (i *Dataset) Sync(ctx context.Context, sceneID id.SceneID, url string, operator *usecase.Operator) (dss dataset.SchemaList, ds dataset.List, err error) { - if err := i.CanWriteScene(ctx, sceneID, operator); err != nil { + if err := i.CanWriteScene(sceneID, operator); err != nil { return nil, nil, err } @@ -525,7 +523,7 @@ func (i *Dataset) Sync(ctx context.Context, sceneID id.SceneID, url string, oper } func (i *Dataset) AddDatasetSchema(ctx context.Context, inp interfaces.AddDatasetSchemaParam, operator *usecase.Operator) (ds *dataset.Schema, err error) { - if err := i.CanWriteScene(ctx, inp.SceneId, operator); err != nil { + if err := i.CanWriteScene(inp.SceneId, operator); err != nil { return nil, err } @@ -560,28 +558,25 @@ func (i *Dataset) AddDatasetSchema(ctx context.Context, inp interfaces.AddDatase } func (i *Dataset) RemoveDatasetSchema(ctx context.Context, inp interfaces.RemoveDatasetSchemaParam, operator *usecase.Operator) (_ id.DatasetSchemaID, err error) { - if operator == nil { - return inp.SchemaId, interfaces.ErrOperationDenied - } - scenes, err := i.sceneRepo.FindIDsByTeam(ctx, operator.WritableTeams) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { - return inp.SchemaId, err + return inp.SchemaID, err } - s, err := i.datasetSchemaRepo.FindByID(ctx, inp.SchemaId, scenes) + s, err := i.datasetSchemaRepo.FindByID(ctx, inp.SchemaID, scenes) if err != nil { - return inp.SchemaId, err + return inp.SchemaID, err } if s == nil { - return inp.SchemaId, rerror.ErrNotFound + return inp.SchemaID, rerror.ErrNotFound } - datasets, err := i.datasetRepo.FindBySchemaAll(ctx, inp.SchemaId) + datasets, err := i.datasetRepo.FindBySchemaAll(ctx, inp.SchemaID) if err != nil { - return inp.SchemaId, err + return inp.SchemaID, err } if (inp.Force == nil || !*inp.Force) && len(datasets) != 0 { - return inp.SchemaId, errors.New("can not remove non-empty schema") + return inp.SchemaID, errors.New("can not remove non-empty schema") } // Begin Db transaction @@ -599,23 +594,23 @@ func (i *Dataset) RemoveDatasetSchema(ctx context.Context, inp interfaces.Remove dsids := []id.DatasetID{} var properties []*property.Property for _, d := range datasets { - properties, err = i.propertyRepo.FindByDataset(ctx, inp.SchemaId, d.ID()) + properties, err = i.propertyRepo.FindByDataset(ctx, inp.SchemaID, d.ID()) if err != nil { - return inp.SchemaId, err + return inp.SchemaID, err } for _, p := range properties { // unlinking fields - p.UnlinkAllByDataset(inp.SchemaId, d.ID()) + p.UnlinkAllByDataset(inp.SchemaID, d.ID()) } dsids = append(dsids, d.ID()) } // unlink layers (items and groups) and save - layers, err := i.layerRepo.FindAllByDatasetSchema(ctx, inp.SchemaId) + layers, err := i.layerRepo.FindAllByDatasetSchema(ctx, inp.SchemaID) if err != nil { - return inp.SchemaId, err + return inp.SchemaID, err } for _, li := range layers.ToLayerItemList() { @@ -627,7 +622,7 @@ func (i *Dataset) RemoveDatasetSchema(ctx context.Context, inp interfaces.Remove groupItems, err := i.layerRepo.FindItemByIDs(ctx, lg.Layers().Layers(), scenes) if err != nil { - return inp.SchemaId, err + return inp.SchemaID, err } // unlink layers group items @@ -641,24 +636,24 @@ func (i *Dataset) RemoveDatasetSchema(ctx context.Context, inp interfaces.Remove err = i.propertyRepo.SaveAll(ctx, properties) if err != nil { - return inp.SchemaId, err + return inp.SchemaID, err } err = i.layerRepo.SaveAll(ctx, layers) if err != nil { - return inp.SchemaId, err + return inp.SchemaID, err } err = i.datasetRepo.RemoveAll(ctx, dsids) if err != nil { - return inp.SchemaId, err + return inp.SchemaID, err } - err = i.datasetSchemaRepo.Remove(ctx, inp.SchemaId) + err = i.datasetSchemaRepo.Remove(ctx, inp.SchemaID) if err != nil { - return inp.SchemaId, err + return inp.SchemaID, err } tx.Commit() - return inp.SchemaId, nil + return inp.SchemaID, nil } diff --git a/internal/usecase/interactor/layer.go b/internal/usecase/interactor/layer.go index e7f1b3486..bc11ae644 100644 --- a/internal/usecase/interactor/layer.go +++ b/internal/usecase/interactor/layer.go @@ -27,7 +27,7 @@ import ( // TODO: ใƒฌใ‚คใƒคใƒผไฝœๆˆใฎใƒ‰ใƒกใ‚คใƒณใƒญใ‚ธใƒƒใ‚ฏใŒใ“ใ“ใซๅคšใๆผใ‚Œๅ‡บใ—ใฆใ„ใ‚‹ใฎใงใƒ‰ใƒกใ‚คใƒณๅฑคใซ็งปใ™ type Layer struct { - commonScene + common commonSceneLock layerRepo repo.Layer tagRepo repo.Tag @@ -43,7 +43,6 @@ type Layer struct { func NewLayer(r *repo.Container) interfaces.Layer { return &Layer{ - commonScene: commonScene{sceneRepo: r.Scene}, commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, layerRepo: r.Layer, tagRepo: r.Tag, @@ -59,7 +58,7 @@ func NewLayer(r *repo.Container) interfaces.Layer { } func (i *Layer) Fetch(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) (layer.List, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -68,7 +67,7 @@ func (i *Layer) Fetch(ctx context.Context, ids []id.LayerID, operator *usecase.O } func (i *Layer) FetchGroup(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*layer.Group, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -77,7 +76,7 @@ func (i *Layer) FetchGroup(ctx context.Context, ids []id.LayerID, operator *usec } func (i *Layer) FetchItem(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*layer.Item, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -86,7 +85,7 @@ func (i *Layer) FetchItem(ctx context.Context, ids []id.LayerID, operator *useca } func (i *Layer) FetchParent(ctx context.Context, pid id.LayerID, operator *usecase.Operator) (*layer.Group, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -95,7 +94,7 @@ func (i *Layer) FetchParent(ctx context.Context, pid id.LayerID, operator *useca } func (i *Layer) FetchByProperty(ctx context.Context, pid id.PropertyID, operator *usecase.Operator) (layer.Layer, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -104,7 +103,7 @@ func (i *Layer) FetchByProperty(ctx context.Context, pid id.PropertyID, operator } func (i *Layer) FetchMerged(ctx context.Context, org id.LayerID, parent *id.LayerID, operator *usecase.Operator) (*layer.Merged, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -137,7 +136,7 @@ func (i *Layer) FetchMerged(ctx context.Context, org id.LayerID, parent *id.Laye } func (i *Layer) FetchParentAndMerged(ctx context.Context, org id.LayerID, operator *usecase.Operator) (*layer.Merged, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -155,7 +154,7 @@ func (i *Layer) FetchParentAndMerged(ctx context.Context, org id.LayerID, operat } func (i *Layer) FetchByTag(ctx context.Context, tag id.TagID, operator *usecase.Operator) (layer.List, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -173,7 +172,7 @@ func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, o } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, nil, err } @@ -258,7 +257,7 @@ func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, nil, err } @@ -476,7 +475,7 @@ func (i *Layer) Remove(ctx context.Context, lid id.LayerID, operator *usecase.Op } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return lid, nil, err } @@ -544,7 +543,7 @@ func (i *Layer) Update(ctx context.Context, inp interfaces.UpdateLayerInput, ope } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, err } @@ -588,7 +587,7 @@ func (i *Layer) Move(ctx context.Context, inp interfaces.MoveLayerInput, operato } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return inp.LayerID, nil, nil, -1, err } @@ -652,7 +651,7 @@ func (i *Layer) CreateInfobox(ctx context.Context, lid id.LayerID, operator *use } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, err } @@ -705,7 +704,7 @@ func (i *Layer) RemoveInfobox(ctx context.Context, layerID id.LayerID, operator } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, err } @@ -752,7 +751,7 @@ func (i *Layer) AddInfoboxField(ctx context.Context, inp interfaces.AddInfoboxFi } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, nil, err } @@ -827,7 +826,7 @@ func (i *Layer) MoveInfoboxField(ctx context.Context, inp interfaces.MoveInfobox } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return inp.InfoboxFieldID, nil, -1, err } @@ -870,7 +869,7 @@ func (i *Layer) RemoveInfoboxField(ctx context.Context, inp interfaces.RemoveInf } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return inp.InfoboxFieldID, nil, err } @@ -941,7 +940,7 @@ func (i *Layer) ImportLayer(ctx context.Context, inp interfaces.ImportLayerParam if inp.File == nil { return nil, nil, interfaces.ErrFileNotIncluded } - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, nil, err } @@ -1029,7 +1028,7 @@ func (i *Layer) AttachTag(ctx context.Context, layerID id.LayerID, tagID id.TagI } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, err } @@ -1077,7 +1076,7 @@ func (i *Layer) DetachTag(ctx context.Context, layerID id.LayerID, tagID id.TagI } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, err } diff --git a/internal/usecase/interactor/plugin.go b/internal/usecase/interactor/plugin.go index b18add6cb..c9788c759 100644 --- a/internal/usecase/interactor/plugin.go +++ b/internal/usecase/interactor/plugin.go @@ -13,7 +13,7 @@ import ( type Plugin struct { common - commonScene + sceneRepo repo.Scene pluginRepo repo.Plugin propertySchemaRepo repo.PropertySchema propertyRepo repo.Property @@ -26,9 +26,7 @@ type Plugin struct { func NewPlugin(r *repo.Container, gr *gateway.Container) interfaces.Plugin { return &Plugin{ - commonScene: commonScene{ - sceneRepo: r.Scene, - }, + sceneRepo: r.Scene, layerRepo: r.Layer, pluginRepo: r.Plugin, propertySchemaRepo: r.PropertySchema, @@ -41,7 +39,7 @@ func NewPlugin(r *repo.Container, gr *gateway.Container) interfaces.Plugin { } func (i *Plugin) Fetch(ctx context.Context, ids []id.PluginID, operator *usecase.Operator) ([]*plugin.Plugin, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } diff --git a/internal/usecase/interactor/plugin_upload.go b/internal/usecase/interactor/plugin_upload.go index b6b2e0cf4..443289a34 100644 --- a/internal/usecase/interactor/plugin_upload.go +++ b/internal/usecase/interactor/plugin_upload.go @@ -21,7 +21,7 @@ import ( var pluginPackageSizeLimit int64 = 10 * 1024 * 1024 // 10MB func (i *Plugin) Upload(ctx context.Context, r io.Reader, sid id.SceneID, operator *usecase.Operator) (_ *plugin.Plugin, _ *scene.Scene, err error) { - if err := i.CanWriteScene(ctx, sid, operator); err != nil { + if err := i.CanWriteScene(sid, operator); err != nil { return nil, nil, err } @@ -38,7 +38,7 @@ func (i *Plugin) Upload(ctx context.Context, r io.Reader, sid id.SceneID, operat } func (i *Plugin) UploadFromRemote(ctx context.Context, u *url.URL, sid id.SceneID, operator *usecase.Operator) (_ *plugin.Plugin, _ *scene.Scene, err error) { - if err := i.CanWriteScene(ctx, sid, operator); err != nil { + if err := i.CanWriteScene(sid, operator); err != nil { return nil, nil, err } @@ -71,11 +71,11 @@ func (i *Plugin) UploadFromRemote(ctx context.Context, u *url.URL, sid id.SceneI } func (i *Plugin) upload(ctx context.Context, p *pluginpack.Package, sid id.SceneID, operator *usecase.Operator) (_ *plugin.Plugin, _ *scene.Scene, err error) { - if err := i.CanWriteScene(ctx, sid, operator); err != nil { + if err := i.CanWriteScene(sid, operator); err != nil { return nil, nil, err } - s, err := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) + s, err := i.sceneRepo.FindByID(ctx, sid, operator.AllWritableTeams()) if err != nil { return nil, nil, err } diff --git a/internal/usecase/interactor/plugin_upload_test.go b/internal/usecase/interactor/plugin_upload_test.go index e83ec65b4..1c04afb36 100644 --- a/internal/usecase/interactor/plugin_upload_test.go +++ b/internal/usecase/interactor/plugin_upload_test.go @@ -97,7 +97,7 @@ func TestPlugin_Upload_New(t *testing.T) { _ = repos.Scene.Save(ctx, scene) uc := &Plugin{ - commonScene: commonScene{sceneRepo: repos.Scene}, + sceneRepo: repos.Scene, pluginRepo: repos.Plugin, propertySchemaRepo: repos.PropertySchema, propertyRepo: repos.Property, @@ -106,8 +106,8 @@ func TestPlugin_Upload_New(t *testing.T) { transaction: repos.Transaction, } op := &usecase.Operator{ - ReadableTeams: []id.TeamID{team}, - WritableTeams: []id.TeamID{team}, + WritableTeams: []id.TeamID{team}, + WritableScenes: []id.SceneID{sid}, } reader := bytes.NewReader(mockPluginArchiveZip.Bytes()) @@ -179,7 +179,7 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { _ = repos.Scene.Save(ctx, scene) uc := &Plugin{ - commonScene: commonScene{sceneRepo: repos.Scene}, + sceneRepo: repos.Scene, pluginRepo: repos.Plugin, propertySchemaRepo: repos.PropertySchema, propertyRepo: repos.Property, @@ -188,8 +188,8 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { transaction: repos.Transaction, } op := &usecase.Operator{ - ReadableTeams: []id.TeamID{team}, - WritableTeams: []id.TeamID{team}, + WritableTeams: []id.TeamID{team}, + WritableScenes: []id.SceneID{sid}, } reader := bytes.NewReader(mockPluginArchiveZip.Bytes()) @@ -305,7 +305,7 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { _ = repos.Scene.Save(ctx, scene) uc := &Plugin{ - commonScene: commonScene{sceneRepo: repos.Scene}, + sceneRepo: repos.Scene, pluginRepo: repos.Plugin, propertySchemaRepo: repos.PropertySchema, propertyRepo: repos.Property, @@ -314,15 +314,15 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { transaction: repos.Transaction, } op := &usecase.Operator{ - ReadableTeams: []id.TeamID{team}, - WritableTeams: []id.TeamID{team}, + WritableTeams: []id.TeamID{team}, + WritableScenes: []id.SceneID{sid}, } reader := bytes.NewReader(mockPluginArchiveZip.Bytes()) - oldpl, s, err := uc.Upload(ctx, reader, scene.ID(), op) + oldpl, s2, err := uc.Upload(ctx, reader, sid, op) assert.NoError(t, err) - assert.Equal(t, scene.ID(), s.ID()) + assert.Equal(t, scene.ID(), s2.ID()) assert.Equal(t, pid, oldpl.ID()) // scene diff --git a/internal/usecase/interactor/project.go b/internal/usecase/interactor/project.go index 0bc54e742..83bcebabb 100644 --- a/internal/usecase/interactor/project.go +++ b/internal/usecase/interactor/project.go @@ -18,7 +18,7 @@ import ( ) type Project struct { - commonScene + common commonSceneLock assetRepo repo.Asset projectRepo repo.Project @@ -36,7 +36,6 @@ type Project struct { func NewProject(r *repo.Container, gr *gateway.Container) interfaces.Project { return &Project{ - commonScene: commonScene{sceneRepo: r.Scene}, commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, assetRepo: r.Asset, projectRepo: r.Project, @@ -57,7 +56,7 @@ func (i *Project) Fetch(ctx context.Context, ids []id.ProjectID, operator *useca if err := i.OnlyOperator(operator); err != nil { return nil, err } - return i.projectRepo.FindByIDs(ctx, ids, operator.ReadableTeams) + return i.projectRepo.FindByIDs(ctx, ids, operator.AllReadableTeams()) } func (i *Project) FindByTeam(ctx context.Context, id id.TeamID, p *usecase.Pagination, operator *usecase.Operator) ([]*project.Project, *usecase.PageInfo, error) { @@ -131,7 +130,7 @@ func (i *Project) Update(ctx context.Context, p interfaces.UpdateProjectParam, o return nil, err } - prj, err := i.projectRepo.FindByID(ctx, p.ID, operator.WritableTeams) + prj, err := i.projectRepo.FindByID(ctx, p.ID, operator.AllWritableTeams()) if err != nil { return nil, err } @@ -227,7 +226,6 @@ func (i *Project) CheckAlias(ctx context.Context, alias string) (bool, error) { } func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectParam, operator *usecase.Operator) (_ *project.Project, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -242,7 +240,7 @@ func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectP return nil, err } - prj, err := i.projectRepo.FindByID(ctx, params.ID, operator.WritableTeams) + prj, err := i.projectRepo.FindByID(ctx, params.ID, operator.AllWritableTeams()) if err != nil { return nil, err } @@ -251,7 +249,7 @@ func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectP return nil, err } - s, err := i.sceneRepo.FindByProject(ctx, params.ID, operator.WritableTeams) + s, err := i.sceneRepo.FindByProject(ctx, params.ID, operator.AllWritableTeams()) if err != nil { return nil, err } @@ -339,8 +337,7 @@ func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectP prj.UpdatePublishmentStatus(params.Status) prj.SetPublishedAt(time.Now()) - err = i.projectRepo.Save(ctx, prj) - if err != nil { + if err := i.projectRepo.Save(ctx, prj); err != nil { return nil, err } @@ -363,7 +360,7 @@ func (i *Project) Delete(ctx context.Context, projectID id.ProjectID, operator * return err } - prj, err := i.projectRepo.FindByID(ctx, projectID, operator.WritableTeams) + prj, err := i.projectRepo.FindByID(ctx, projectID, operator.AllWritableTeams()) if err != nil { return err } diff --git a/internal/usecase/interactor/property.go b/internal/usecase/interactor/property.go index d2ddbe86b..418243bfe 100644 --- a/internal/usecase/interactor/property.go +++ b/internal/usecase/interactor/property.go @@ -15,7 +15,7 @@ import ( ) type Property struct { - commonScene + common commonSceneLock propertyRepo repo.Property propertySchemaRepo repo.PropertySchema @@ -29,7 +29,6 @@ type Property struct { func NewProperty(r *repo.Container, gr *gateway.Container) interfaces.Property { return &Property{ - commonScene: commonScene{sceneRepo: r.Scene}, commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, propertyRepo: r.Property, propertySchemaRepo: r.PropertySchema, @@ -43,7 +42,7 @@ func NewProperty(r *repo.Container, gr *gateway.Container) interfaces.Property { } func (i *Property) Fetch(ctx context.Context, ids []id.PropertyID, operator *usecase.Operator) ([]*property.Property, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -60,7 +59,7 @@ func (i *Property) FetchSchema(ctx context.Context, ids []id.PropertySchemaID, o } func (i *Property) FetchMerged(ctx context.Context, org, parent *id.PropertyID, linked *id.DatasetID, operator *usecase.Operator) (*property.Merged, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -103,7 +102,7 @@ func (i *Property) UpdateValue(ctx context.Context, inp interfaces.UpdatePropert } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, nil, nil, nil, err } @@ -148,7 +147,7 @@ func (i *Property) RemoveField(ctx context.Context, inp interfaces.RemovePropert } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, err } @@ -189,7 +188,7 @@ func (i *Property) UploadFile(ctx context.Context, inp interfaces.UploadFilePara return nil, nil, nil, nil, interfaces.ErrInvalidFile } - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, nil, nil, nil, err } @@ -203,7 +202,7 @@ func (i *Property) UploadFile(ctx context.Context, inp interfaces.UploadFilePara return nil, nil, nil, nil, err } - propertyScene, err := i.sceneRepo.FindByID(ctx, p.Scene(), operator.WritableTeams) + propertyScene, err := i.sceneRepo.FindByID(ctx, p.Scene(), operator.AllWritableTeams()) if err != nil { return nil, nil, nil, nil, err } @@ -267,7 +266,7 @@ func (i *Property) LinkValue(ctx context.Context, inp interfaces.LinkPropertyVal } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, nil, nil, nil, err } @@ -329,7 +328,7 @@ func (i *Property) UnlinkValue(ctx context.Context, inp interfaces.UnlinkPropert } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, nil, nil, nil, err } @@ -378,7 +377,7 @@ func (i *Property) AddItem(ctx context.Context, inp interfaces.AddPropertyItemPa } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, nil, nil, err } @@ -428,7 +427,7 @@ func (i *Property) MoveItem(ctx context.Context, inp interfaces.MovePropertyItem } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, nil, nil, err } @@ -467,7 +466,7 @@ func (i *Property) RemoveItem(ctx context.Context, inp interfaces.RemoveProperty } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, err } @@ -495,7 +494,7 @@ func (i *Property) RemoveItem(ctx context.Context, inp interfaces.RemoveProperty } func (i *Property) UpdateItems(ctx context.Context, inp interfaces.UpdatePropertyItemsParam, operator *usecase.Operator) (*property.Property, error) { - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, err } diff --git a/internal/usecase/interactor/property_test.go b/internal/usecase/interactor/property_test.go index c479ee53a..b6875d619 100644 --- a/internal/usecase/interactor/property_test.go +++ b/internal/usecase/interactor/property_test.go @@ -33,7 +33,6 @@ func TestProperty_AddItem(t *testing.T) { _ = memory.Property.Save(ctx, p) uc := &Property{ - commonScene: commonScene{sceneRepo: memory.Scene}, commonSceneLock: commonSceneLock{sceneLockRepo: memory.SceneLock}, propertyRepo: memory.Property, propertySchemaRepo: memory.PropertySchema, @@ -86,7 +85,6 @@ func TestProperty_RemoveItem(t *testing.T) { _ = memory.Property.Save(ctx, p) uc := &Property{ - commonScene: commonScene{sceneRepo: memory.Scene}, commonSceneLock: commonSceneLock{sceneLockRepo: memory.SceneLock}, propertyRepo: memory.Property, propertySchemaRepo: memory.PropertySchema, @@ -131,15 +129,15 @@ func TestProperty_UpdateValue_FieldOfGroupInList(t *testing.T) { _ = memory.Property.Save(ctx, p) uc := &Property{ - commonScene: commonScene{sceneRepo: memory.Scene}, commonSceneLock: commonSceneLock{sceneLockRepo: memory.SceneLock}, + sceneRepo: memory.Scene, propertyRepo: memory.Property, propertySchemaRepo: memory.PropertySchema, transaction: memory.Transaction, } op := &usecase.Operator{ - ReadableTeams: []id.TeamID{team}, - WritableTeams: []id.TeamID{team}, + WritableTeams: []id.TeamID{team}, + WritableScenes: []id.SceneID{scene.ID()}, } np, npl, npg, npf, err := uc.UpdateValue(ctx, interfaces.UpdatePropertyValueParam{ diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index ea8ca2c4a..348216664 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -21,7 +21,7 @@ import ( ) type Scene struct { - commonScene + common commonSceneLock sceneRepo repo.Scene sceneLockRepo repo.SceneLock @@ -37,7 +37,6 @@ type Scene struct { func NewScene(r *repo.Container, g *gateway.Container) interfaces.Scene { return &Scene{ - commonScene: commonScene{sceneRepo: r.Scene}, commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, sceneRepo: r.Scene, sceneLockRepo: r.SceneLock, @@ -56,14 +55,14 @@ func (i *Scene) Fetch(ctx context.Context, ids []id.SceneID, operator *usecase.O if err := i.OnlyOperator(operator); err != nil { return nil, err } - return i.sceneRepo.FindByIDs(ctx, ids, operator.ReadableTeams) + return i.sceneRepo.FindByIDs(ctx, ids, operator.AllReadableTeams()) } func (i *Scene) FindByProject(ctx context.Context, id id.ProjectID, operator *usecase.Operator) (*scene.Scene, error) { if err := i.OnlyOperator(operator); err != nil { return nil, err } - res, err := i.sceneRepo.FindByProject(ctx, id, operator.ReadableTeams) + res, err := i.sceneRepo.FindByProject(ctx, id, operator.AllReadableTeams()) return res, err } @@ -82,7 +81,7 @@ func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase. return nil, err } - prj, err := i.projectRepo.FindByID(ctx, pid, operator.WritableTeams) + prj, err := i.projectRepo.FindByID(ctx, pid, operator.AllWritableTeams()) if err != nil { return nil, err } @@ -174,7 +173,7 @@ func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, return nil, nil, err } - s, err := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) + s, err := i.sceneRepo.FindByID(ctx, sid, operator.AllWritableTeams()) if err != nil { return nil, nil, err } @@ -270,7 +269,7 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP return nil, nil, err } - scene, err2 := i.sceneRepo.FindByID(ctx, param.SceneID, operator.WritableTeams) + scene, err2 := i.sceneRepo.FindByID(ctx, param.SceneID, operator.AllWritableTeams()) if err2 != nil { return nil, nil, err2 } @@ -352,7 +351,7 @@ func (i *Scene) UpdateWidgetAlignSystem(ctx context.Context, param interfaces.Up return nil, err } - scene, err2 := i.sceneRepo.FindByID(ctx, param.SceneID, operator.WritableTeams) + scene, err2 := i.sceneRepo.FindByID(ctx, param.SceneID, operator.AllWritableTeams()) if err2 != nil { return nil, err2 } @@ -393,7 +392,7 @@ func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, wid id.WidgetID return nil, interfaces.ErrOperationDenied } - scene, err2 := i.sceneRepo.FindByID(ctx, id, operator.WritableTeams) + scene, err2 := i.sceneRepo.FindByID(ctx, id, operator.AllWritableTeams()) if err2 != nil { return nil, err2 } @@ -445,7 +444,7 @@ func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plugin return nil, pid, nil, interfaces.ErrOperationDenied } - s, err2 := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) + s, err2 := i.sceneRepo.FindByID(ctx, sid, operator.AllWritableTeams()) if err2 != nil { return nil, pid, nil, err2 } @@ -521,7 +520,7 @@ func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plug return nil, err } - scene, err := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) + scene, err := i.sceneRepo.FindByID(ctx, sid, operator.AllWritableTeams()) if err != nil { return nil, err } @@ -623,7 +622,7 @@ func (i *Scene) UpgradePlugin(ctx context.Context, sid id.SceneID, oldPluginID, return nil, err } - s, err := i.sceneRepo.FindByID(ctx, sid, operator.WritableTeams) + s, err := i.sceneRepo.FindByID(ctx, sid, operator.AllWritableTeams()) if err != nil { return nil, err } @@ -704,7 +703,7 @@ func (i *Scene) AddCluster(ctx context.Context, sceneID id.SceneID, name string, return nil, nil, err } - s, err := i.sceneRepo.FindByID(ctx, sceneID, operator.WritableTeams) + s, err := i.sceneRepo.FindByID(ctx, sceneID, operator.AllWritableTeams()) if err != nil { return nil, nil, err } @@ -754,7 +753,7 @@ func (i *Scene) UpdateCluster(ctx context.Context, param interfaces.UpdateCluste return nil, nil, err } - s, err := i.sceneRepo.FindByID(ctx, param.SceneID, operator.WritableTeams) + s, err := i.sceneRepo.FindByID(ctx, param.SceneID, operator.AllWritableTeams()) if err != nil { return nil, nil, err } @@ -797,7 +796,7 @@ func (i *Scene) RemoveCluster(ctx context.Context, sceneID id.SceneID, clusterID return nil, err } - s, err := i.sceneRepo.FindByID(ctx, sceneID, operator.WritableTeams) + s, err := i.sceneRepo.FindByID(ctx, sceneID, operator.AllWritableTeams()) if err != nil { return nil, err } diff --git a/internal/usecase/interactor/tag.go b/internal/usecase/interactor/tag.go index 52678dee2..d0bf305eb 100644 --- a/internal/usecase/interactor/tag.go +++ b/internal/usecase/interactor/tag.go @@ -14,7 +14,7 @@ import ( ) type Tag struct { - commonScene + common tagRepo repo.Tag layerRepo repo.Layer sceneRepo repo.Scene @@ -23,7 +23,6 @@ type Tag struct { func NewTag(r *repo.Container) interfaces.Tag { return &Tag{ - commonScene: commonScene{sceneRepo: r.Scene}, tagRepo: r.Tag, layerRepo: r.Layer, sceneRepo: r.Scene, @@ -42,7 +41,7 @@ func (i *Tag) CreateItem(ctx context.Context, inp interfaces.CreateTagItemParam, } }() - if err := i.CanWriteScene(ctx, inp.SceneID, operator); err != nil { + if err := i.CanWriteScene(inp.SceneID, operator); err != nil { return nil, nil, interfaces.ErrOperationDenied } @@ -99,7 +98,7 @@ func (i *Tag) CreateGroup(ctx context.Context, inp interfaces.CreateTagGroupPara } }() - if err := i.CanWriteScene(ctx, inp.SceneID, operator); err != nil { + if err := i.CanWriteScene(inp.SceneID, operator); err != nil { return nil, interfaces.ErrOperationDenied } @@ -124,7 +123,7 @@ func (i *Tag) CreateGroup(ctx context.Context, inp interfaces.CreateTagGroupPara } func (i *Tag) Fetch(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Tag, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -133,8 +132,7 @@ func (i *Tag) Fetch(ctx context.Context, ids []id.TagID, operator *usecase.Opera } func (i *Tag) FetchByScene(ctx context.Context, sid id.SceneID, operator *usecase.Operator) ([]*tag.Tag, error) { - err := i.CanReadScene(ctx, sid, operator) - if err != nil { + if err := i.CanReadScene(sid, operator); err != nil { return nil, err } @@ -142,7 +140,7 @@ func (i *Tag) FetchByScene(ctx context.Context, sid id.SceneID, operator *usecas } func (i *Tag) FetchItem(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Item, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -151,7 +149,7 @@ func (i *Tag) FetchItem(ctx context.Context, ids []id.TagID, operator *usecase.O } func (i *Tag) FetchGroup(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Group, error) { - scenes, err := i.OnlyReadableScenes(ctx, operator) + scenes, err := i.OnlyReadableScenes(operator) if err != nil { return nil, err } @@ -170,7 +168,7 @@ func (i *Tag) AttachItemToGroup(ctx context.Context, inp interfaces.AttachItemTo } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, err } @@ -217,7 +215,7 @@ func (i *Tag) DetachItemFromGroup(ctx context.Context, inp interfaces.DetachItem } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, err } @@ -261,7 +259,7 @@ func (i *Tag) UpdateTag(ctx context.Context, inp interfaces.UpdateTagParam, oper } }() - if err := i.CanWriteScene(ctx, inp.SceneID, operator); err != nil { + if err := i.CanWriteScene(inp.SceneID, operator); err != nil { return nil, interfaces.ErrOperationDenied } @@ -293,7 +291,7 @@ func (i *Tag) Remove(ctx context.Context, tagID id.TagID, operator *usecase.Oper } }() - scenes, err := i.OnlyWritableScenes(ctx, operator) + scenes, err := i.OnlyWritableScenes(operator) if err != nil { return nil, nil, err } diff --git a/internal/usecase/interactor/team.go b/internal/usecase/interactor/team.go index fdff3135a..6201f82b2 100644 --- a/internal/usecase/interactor/team.go +++ b/internal/usecase/interactor/team.go @@ -28,8 +28,8 @@ func NewTeam(r *repo.Container) interfaces.Team { } func (i *Team) Fetch(ctx context.Context, ids []id.TeamID, operator *usecase.Operator) ([]*user.Team, error) { - if operator == nil { - return nil, interfaces.ErrOperationDenied + if err := i.OnlyOperator(operator); err != nil { + return nil, err } res, err := i.teamRepo.FindByIDs(ctx, ids) res2, err := i.filterTeams(res, operator, err) @@ -37,8 +37,8 @@ func (i *Team) Fetch(ctx context.Context, ids []id.TeamID, operator *usecase.Ope } func (i *Team) FindByUser(ctx context.Context, id id.UserID, operator *usecase.Operator) ([]*user.Team, error) { - if operator == nil { - return nil, interfaces.ErrOperationDenied + if err := i.OnlyOperator(operator); err != nil { + return nil, err } res, err := i.teamRepo.FindByUser(ctx, id) res2, err := i.filterTeams(res, operator, err) @@ -46,7 +46,6 @@ func (i *Team) FindByUser(ctx context.Context, id id.UserID, operator *usecase.O } func (i *Team) Create(ctx context.Context, name string, firstUser id.UserID) (_ *user.Team, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -80,7 +79,6 @@ func (i *Team) Create(ctx context.Context, name string, firstUser id.UserID) (_ } func (i *Team) Update(ctx context.Context, id id.TeamID, name string, operator *usecase.Operator) (_ *user.Team, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -118,7 +116,6 @@ func (i *Team) Update(ctx context.Context, id id.TeamID, name string, operator * } func (i *Team) AddMember(ctx context.Context, id id.TeamID, u id.UserID, role user.Role, operator *usecase.Operator) (_ *user.Team, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -164,7 +161,6 @@ func (i *Team) AddMember(ctx context.Context, id id.TeamID, u id.UserID, role us } func (i *Team) RemoveMember(ctx context.Context, id id.TeamID, u id.UserID, operator *usecase.Operator) (_ *user.Team, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -209,7 +205,6 @@ func (i *Team) RemoveMember(ctx context.Context, id id.TeamID, u id.UserID, oper } func (i *Team) UpdateMember(ctx context.Context, id id.TeamID, u id.UserID, role user.Role, operator *usecase.Operator) (_ *user.Team, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -254,7 +249,6 @@ func (i *Team) UpdateMember(ctx context.Context, id id.TeamID, u id.UserID, role } func (i *Team) Remove(ctx context.Context, id id.TeamID, operator *usecase.Operator) (err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -305,7 +299,7 @@ func (i *Team) filterTeams(teams []*user.Team, operator *usecase.Operator, err e return make([]*user.Team, len(teams)), nil } for i, t := range teams { - if t == nil || !operator.IsReadableTeamIncluded(t.ID()) { + if t == nil || !operator.IsReadableTeam(t.ID()) { teams[i] = nil } } diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index 491a110ad..08d6ef6bd 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -69,7 +69,7 @@ func (i *User) Fetch(ctx context.Context, ids []id.UserID, operator *usecase.Ope teamIDs = append(teamIDs, t.ID()) } } - if !operator.IsReadableTeamsIncluded(teamIDs) { + if !operator.IsReadableTeam(teamIDs...) { res[k] = nil } } diff --git a/internal/usecase/interfaces/dataset.go b/internal/usecase/interfaces/dataset.go index 9f6eee029..15e9ab6e1 100644 --- a/internal/usecase/interfaces/dataset.go +++ b/internal/usecase/interfaces/dataset.go @@ -44,7 +44,7 @@ type ImportDatasetFromGoogleSheetParam struct { } type RemoveDatasetSchemaParam struct { - SchemaId id.DatasetSchemaID + SchemaID id.DatasetSchemaID Force *bool } diff --git a/internal/usecase/operator.go b/internal/usecase/operator.go index e1c606f1b..753edd745 100644 --- a/internal/usecase/operator.go +++ b/internal/usecase/operator.go @@ -2,41 +2,18 @@ package usecase import ( "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" "github.com/reearth/reearth-backend/pkg/user" ) type Operator struct { - User id.UserID - ReadableTeams []id.TeamID - WritableTeams []id.TeamID - OwningTeams []id.TeamID -} - -func OperatorFrom(u id.UserID, teams []*user.Team) *Operator { - rt := []id.TeamID{} - wt := []id.TeamID{} - ot := []id.TeamID{} - for _, t := range teams { - r := t.Members().GetRole(u) - if r == user.Role("") { - continue - } - tid := t.ID() - rt = append(rt, tid) - if r == user.RoleWriter { - wt = append(wt, tid) - } else if r == user.RoleOwner { - wt = append(wt, tid) - ot = append(ot, tid) - } - } - - return &Operator{ - User: u, - ReadableTeams: rt, - WritableTeams: wt, - OwningTeams: ot, - } + User user.ID + ReadableTeams user.TeamIDList + WritableTeams user.TeamIDList + OwningTeams user.TeamIDList + ReadableScenes scene.IDList + WritableScenes scene.IDList + OwningScenes scene.IDList } func (o *Operator) Teams(r user.Role) []id.TeamID { @@ -55,80 +32,50 @@ func (o *Operator) Teams(r user.Role) []id.TeamID { return nil } -func (o *Operator) IsReadableTeamIncluded(team id.TeamID) bool { - if o == nil { - return false - } - for _, t := range o.ReadableTeams { - if t == team { - return true - } - } - return false +func (o *Operator) AllReadableTeams() user.TeamIDList { + return append(o.ReadableTeams, o.AllWritableTeams()...) } -func (o *Operator) IsWritableTeamIncluded(team id.TeamID) bool { - if o == nil { - return false - } - for _, t := range o.WritableTeams { - if t == team { - return true - } - } - return false +func (o *Operator) AllWritableTeams() user.TeamIDList { + return append(o.WritableTeams, o.AllOwningTeams()...) } -func (o *Operator) IsOwningTeamIncluded(team id.TeamID) bool { - if o == nil { - return false - } - for _, t := range o.OwningTeams { - if t == team { - return true - } - } - return false +func (o *Operator) AllOwningTeams() user.TeamIDList { + return o.OwningTeams } -func (o *Operator) IsReadableTeamsIncluded(teams []id.TeamID) bool { - if o == nil { - return false - } - for _, t := range teams { - for _, t2 := range o.ReadableTeams { - if t == t2 { - return true - } - } - } - return false +func (o *Operator) IsReadableTeam(team ...id.TeamID) bool { + return o.AllReadableTeams().Filter(team...).Len() > 0 } -func (o *Operator) IsWritableTeamsIncluded(teams []id.TeamID) bool { - if o == nil { - return false - } - for _, t := range teams { - for _, t2 := range o.WritableTeams { - if t == t2 { - return true - } - } - } - return false +func (o *Operator) IsWritableTeam(team ...id.TeamID) bool { + return o.AllWritableTeams().Filter(team...).Len() > 0 } -func (o *Operator) IsOwningTeamsIncluded(teams []id.TeamID) bool { - if o == nil { - return false - } - for _, t := range teams { - for _, t2 := range o.OwningTeams { - if t == t2 { - return true - } - } - } - return false +func (o *Operator) IsOwningTeam(team ...id.TeamID) bool { + return o.AllOwningTeams().Filter(team...).Len() > 0 +} + +func (o *Operator) AllReadableScenes() scene.IDList { + return append(o.ReadableScenes, o.AllWritableScenes()...) +} + +func (o *Operator) AllWritableScenes() scene.IDList { + return append(o.WritableScenes, o.AllOwningScenes()...) +} + +func (o *Operator) AllOwningScenes() scene.IDList { + return o.OwningScenes +} + +func (o *Operator) IsReadableScene(scene ...id.SceneID) bool { + return o.AllReadableScenes().Includes(scene...) +} + +func (o *Operator) IsWritableScene(scene ...id.SceneID) bool { + return o.AllWritableScenes().Includes(scene...) +} + +func (o *Operator) IsOwningScene(scene ...id.SceneID) bool { + return o.AllOwningScenes().Includes(scene...) } diff --git a/internal/usecase/repo/scene.go b/internal/usecase/repo/scene.go index a599a6ae7..c1c0bebe7 100644 --- a/internal/usecase/repo/scene.go +++ b/internal/usecase/repo/scene.go @@ -9,11 +9,9 @@ import ( type Scene interface { FindByID(context.Context, id.SceneID, []id.TeamID) (*scene.Scene, error) - FindByIDs(context.Context, []id.SceneID, []id.TeamID) ([]*scene.Scene, error) + FindByIDs(context.Context, []id.SceneID, []id.TeamID) (scene.List, error) + FindByTeam(context.Context, ...id.TeamID) (scene.List, error) FindByProject(context.Context, id.ProjectID, []id.TeamID) (*scene.Scene, error) - FindIDsByTeam(context.Context, []id.TeamID) ([]id.SceneID, error) - HasSceneTeam(context.Context, id.SceneID, []id.TeamID) (bool, error) - HasScenesTeam(context.Context, []id.SceneID, []id.TeamID) ([]bool, error) Save(context.Context, *scene.Scene) error Remove(context.Context, id.SceneID) error } diff --git a/internal/usecase/repo/team.go b/internal/usecase/repo/team.go index 60b75f354..cc8c251c6 100644 --- a/internal/usecase/repo/team.go +++ b/internal/usecase/repo/team.go @@ -8,8 +8,8 @@ import ( ) type Team interface { - FindByUser(context.Context, id.UserID) ([]*user.Team, error) - FindByIDs(context.Context, []id.TeamID) ([]*user.Team, error) + FindByUser(context.Context, id.UserID) (user.TeamList, error) + FindByIDs(context.Context, []id.TeamID) (user.TeamList, error) FindByID(context.Context, id.TeamID) (*user.Team, error) Save(context.Context, *user.Team) error SaveAll(context.Context, []*user.Team) error diff --git a/pkg/scene/id.go b/pkg/scene/id.go index 0947fac44..57bbbbc82 100644 --- a/pkg/scene/id.go +++ b/pkg/scene/id.go @@ -66,3 +66,36 @@ var ErrInvalidID = id.ErrInvalidID func createdAt(i ID) time.Time { return id.ID(i).Timestamp() } + +type IDList []ID + +func (l IDList) Filter(ids ...ID) IDList { + if l == nil { + return nil + } + + res := make(IDList, 0, len(l)) + for _, t := range l { + for _, t2 := range ids { + if t == t2 { + res = append(res, t) + } + } + } + return res +} + +func (l IDList) Includes(ids ...ID) bool { + for _, t := range l { + for _, t2 := range ids { + if t == t2 { + return true + } + } + } + return false +} + +func (l IDList) Len() int { + return len(l) +} diff --git a/pkg/scene/id_test.go b/pkg/scene/id_test.go new file mode 100644 index 000000000..635b62f3e --- /dev/null +++ b/pkg/scene/id_test.go @@ -0,0 +1,37 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestIDList_Filter(t *testing.T) { + t1 := NewID() + t2 := NewID() + t3 := NewID() + t4 := NewID() + assert.Equal(t, IDList{t1}, IDList{t1, t2, t3}.Filter(t1)) + assert.Equal(t, IDList{t1, t3}, IDList{t1, t2, t3}.Filter(t1, t3)) + assert.Equal(t, IDList{}, IDList{t1, t2, t3}.Filter(t4)) + assert.Equal(t, IDList(nil), IDList(nil).Filter(t4)) +} + +func TestIDList_Includes(t *testing.T) { + t1 := NewID() + t2 := NewID() + t3 := NewID() + assert.True(t, IDList{t1, t2, t3}.Includes(t1)) + assert.False(t, IDList{t1, t2}.Includes(t3)) + assert.False(t, IDList(nil).Includes(t1)) +} + +func TestIDList_Len(t *testing.T) { + t1 := NewID() + t2 := NewID() + t3 := NewID() + assert.Equal(t, 2, IDList{t1, t2}.Len()) + assert.Equal(t, 3, IDList{t1, t2, t3}.Len()) + assert.Equal(t, 0, IDList{}.Len()) + assert.Equal(t, 0, IDList(nil).Len()) +} diff --git a/pkg/scene/list.go b/pkg/scene/list.go new file mode 100644 index 000000000..2a05b1897 --- /dev/null +++ b/pkg/scene/list.go @@ -0,0 +1,51 @@ +package scene + +type List []*Scene + +func (l List) IDs() []ID { + if l == nil { + return nil + } + + res := make([]ID, 0, len(l)) + for _, s := range l { + res = append(res, s.ID()) + } + return res +} + +func (l List) FilterByID(ids ...ID) List { + if l == nil { + return nil + } + + res := make(List, 0, len(l)) + for _, s := range l { + sid2 := s.ID() + for _, sid := range ids { + if sid == sid2 { + res = append(res, s) + break + } + } + } + return res +} + +func (l List) FilterByTeam(teams ...TeamID) List { + if l == nil { + return nil + } + + res := make(List, 0, len(l)) + for _, s := range l { + st := s.Team() + for _, t := range teams { + if t == st { + res = append(res, s) + break + } + } + } + return res +} diff --git a/pkg/scene/list_test.go b/pkg/scene/list_test.go new file mode 100644 index 000000000..c1e53d2e4 --- /dev/null +++ b/pkg/scene/list_test.go @@ -0,0 +1,44 @@ +package scene + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestList_FilterByID(t *testing.T) { + sid1 := NewID() + sid2 := NewID() + t1 := &Scene{id: sid1} + t2 := &Scene{id: sid2} + + assert.Equal(t, List{t1}, List{t1, t2}.FilterByID(sid1)) + assert.Equal(t, List{t2}, List{t1, t2}.FilterByID(sid2)) + assert.Equal(t, List{t1, t2}, List{t1, t2}.FilterByID(sid1, sid2)) + assert.Equal(t, List{}, List{t1, t2}.FilterByID(NewID())) + assert.Equal(t, List(nil), List(nil).FilterByID(sid1)) +} + +func TestList_FilterByTeam(t *testing.T) { + tid1 := NewTeamID() + tid2 := NewTeamID() + t1 := &Scene{id: NewID(), team: tid1} + t2 := &Scene{id: NewID(), team: tid2} + + assert.Equal(t, List{t1}, List{t1, t2}.FilterByTeam(tid1)) + assert.Equal(t, List{t2}, List{t1, t2}.FilterByTeam(tid2)) + assert.Equal(t, List{t1, t2}, List{t1, t2}.FilterByTeam(tid1, tid2)) + assert.Equal(t, List{}, List{t1, t2}.FilterByTeam(NewTeamID())) + assert.Equal(t, List(nil), List(nil).FilterByTeam(tid1)) +} + +func TestTeamList_IDs(t *testing.T) { + sid1 := NewID() + sid2 := NewID() + t1 := &Scene{id: sid1} + t2 := &Scene{id: sid2} + + assert.Equal(t, []ID{sid1, sid2}, List{t1, t2}.IDs()) + assert.Equal(t, []ID{}, List{}.IDs()) + assert.Equal(t, []ID(nil), List(nil).IDs()) +} diff --git a/pkg/user/id.go b/pkg/user/id.go index a051c0c69..fb59016db 100644 --- a/pkg/user/id.go +++ b/pkg/user/id.go @@ -21,3 +21,36 @@ var IDFromRefID = id.UserIDFromRefID var TeamIDFromRefID = id.TeamIDFromRefID var ErrInvalidID = id.ErrInvalidID + +type TeamIDList []TeamID + +func (l TeamIDList) Filter(ids ...TeamID) TeamIDList { + if l == nil { + return nil + } + + res := make(TeamIDList, 0, len(l)) + for _, t := range l { + for _, t2 := range ids { + if t == t2 { + res = append(res, t) + } + } + } + return res +} + +func (l TeamIDList) Includes(ids ...TeamID) bool { + for _, t := range l { + for _, t2 := range ids { + if t == t2 { + return true + } + } + } + return false +} + +func (k TeamIDList) Len() int { + return len(k) +} diff --git a/pkg/user/id_test.go b/pkg/user/id_test.go new file mode 100644 index 000000000..d37cc0912 --- /dev/null +++ b/pkg/user/id_test.go @@ -0,0 +1,37 @@ +package user + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTeamIDList_Filter(t *testing.T) { + t1 := NewTeamID() + t2 := NewTeamID() + t3 := NewTeamID() + t4 := NewTeamID() + assert.Equal(t, TeamIDList{t1}, TeamIDList{t1, t2, t3}.Filter(t1)) + assert.Equal(t, TeamIDList{t1, t3}, TeamIDList{t1, t2, t3}.Filter(t1, t3)) + assert.Equal(t, TeamIDList{}, TeamIDList{t1, t2, t3}.Filter(t4)) + assert.Equal(t, TeamIDList(nil), TeamIDList(nil).Filter(t4)) +} + +func TestTeamIDList_Includes(t *testing.T) { + t1 := NewTeamID() + t2 := NewTeamID() + t3 := NewTeamID() + assert.True(t, TeamIDList{t1, t2, t3}.Includes(t1)) + assert.False(t, TeamIDList{t1, t2}.Includes(t3)) + assert.False(t, TeamIDList(nil).Includes(t1)) +} + +func TestTeamIDList_Len(t *testing.T) { + t1 := NewTeamID() + t2 := NewTeamID() + t3 := NewTeamID() + assert.Equal(t, 2, TeamIDList{t1, t2}.Len()) + assert.Equal(t, 3, TeamIDList{t1, t2, t3}.Len()) + assert.Equal(t, 0, TeamIDList{}.Len()) + assert.Equal(t, 0, TeamIDList(nil).Len()) +} diff --git a/pkg/user/role.go b/pkg/user/role.go index 53efa91c0..272856142 100644 --- a/pkg/user/role.go +++ b/pkg/user/role.go @@ -6,12 +6,18 @@ import ( ) var ( - // RoleReader is a role who can read project - RoleReader = Role("reader") - // RoleWriter is a role who can read and write project - RoleWriter = Role("writer") // RoleOwner is a role who can have full controll of project RoleOwner = Role("owner") + // RoleWriter is a role who can read and write project + RoleWriter = Role("writer") + // RoleReader is a role who can read project + RoleReader = Role("reader") + + roles = []Role{ + RoleOwner, + RoleWriter, + RoleReader, + } ErrInvalidRole = errors.New("invalid role") ) @@ -20,11 +26,11 @@ type Role string func checkRole(role Role) bool { switch role { - case RoleReader: + case RoleOwner: return true case RoleWriter: return true - case RoleOwner: + case RoleReader: return true } return false @@ -38,3 +44,16 @@ func RoleFromString(r string) (Role, error) { } return role, ErrInvalidRole } + +func (r Role) Includes(role Role) bool { + for i, r2 := range roles { + if r == r2 { + for _, r3 := range roles[i:] { + if role == r3 { + return true + } + } + } + } + return false +} diff --git a/pkg/user/role_test.go b/pkg/user/role_test.go index a0a9c84ba..757a52c06 100644 --- a/pkg/user/role_test.go +++ b/pkg/user/role_test.go @@ -77,3 +77,94 @@ func TestCheckRole(t *testing.T) { }) } } + +func TestRole_Includes(t *testing.T) { + tests := []struct { + Name string + Target Role + Input Role + Expected bool + }{ + { + Name: "reader and readner", + Target: RoleReader, + Input: RoleReader, + Expected: true, + }, + { + Name: "reader and writer", + Target: RoleReader, + Input: RoleWriter, + Expected: false, + }, + { + Name: "reader and owner", + Target: RoleReader, + Input: RoleOwner, + Expected: false, + }, + { + Name: "writer and readner", + Target: RoleWriter, + Input: RoleReader, + Expected: true, + }, + { + Name: "writer and writer", + Target: RoleWriter, + Input: RoleWriter, + Expected: true, + }, + { + Name: "writer and owner", + Target: RoleWriter, + Input: RoleOwner, + Expected: false, + }, + { + Name: "owner and readner", + Target: RoleOwner, + Input: RoleReader, + Expected: true, + }, + { + Name: "owner and writer", + Target: RoleOwner, + Input: RoleWriter, + Expected: true, + }, + { + Name: "owner and owner", + Target: RoleOwner, + Input: RoleOwner, + Expected: true, + }, + { + Name: "unknown role", + Target: Role("xxx"), + Input: Role("yyy"), + Expected: false, + }, + { + Name: "unknown role 2", + Target: RoleOwner, + Input: Role("yyy"), + Expected: false, + }, + { + Name: "unknown role 3", + Target: Role("xxx"), + Input: RoleOwner, + Expected: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := tt.Target.Includes(tt.Input) + assert.Equal(t, tt.Expected, res) + }) + } +} diff --git a/pkg/user/team_list.go b/pkg/user/team_list.go new file mode 100644 index 000000000..3a557982b --- /dev/null +++ b/pkg/user/team_list.go @@ -0,0 +1,65 @@ +package user + +type TeamList []*Team + +func (l TeamList) FilterByID(ids ...TeamID) TeamList { + if l == nil { + return nil + } + + res := make(TeamList, 0, len(l)) + for _, id := range ids { + var t2 *Team + for _, t := range l { + if t.ID() == id { + t2 = t + break + } + } + if t2 != nil { + res = append(res, t2) + } + } + return res +} +func (l TeamList) FilterByUserRole(u ID, r Role) TeamList { + if l == nil || u.IsNil() || r == "" { + return nil + } + + res := make(TeamList, 0, len(l)) + for _, t := range l { + tr := t.Members().GetRole(u) + if tr == r { + res = append(res, t) + } + } + return res +} + +func (l TeamList) FilterByUserRoleIncluding(u ID, r Role) TeamList { + if l == nil || u.IsNil() || r == "" { + return nil + } + + res := make(TeamList, 0, len(l)) + for _, t := range l { + tr := t.Members().GetRole(u) + if tr.Includes(r) { + res = append(res, t) + } + } + return res +} + +func (l TeamList) IDs() []TeamID { + if l == nil { + return nil + } + + res := make([]TeamID, 0, len(l)) + for _, t := range l { + res = append(res, t.ID()) + } + return res +} diff --git a/pkg/user/team_list_test.go b/pkg/user/team_list_test.go new file mode 100644 index 000000000..043493f58 --- /dev/null +++ b/pkg/user/team_list_test.go @@ -0,0 +1,84 @@ +package user + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTeamList_FilterByID(t *testing.T) { + tid1 := NewTeamID() + tid2 := NewTeamID() + t1 := &Team{id: tid1} + t2 := &Team{id: tid2} + + assert.Equal(t, TeamList{t1}, TeamList{t1, t2}.FilterByID(tid1)) + assert.Equal(t, TeamList{t2}, TeamList{t1, t2}.FilterByID(tid2)) + assert.Equal(t, TeamList{t1, t2}, TeamList{t1, t2}.FilterByID(tid1, tid2)) + assert.Equal(t, TeamList{}, TeamList{t1, t2}.FilterByID(NewTeamID())) + assert.Equal(t, TeamList(nil), TeamList(nil).FilterByID(tid1)) +} + +func TestTeamList_FilterByUserRole(t *testing.T) { + uid := NewID() + tid1 := NewTeamID() + tid2 := NewTeamID() + t1 := &Team{ + id: tid1, + members: &Members{ + members: map[ID]Role{ + uid: RoleReader, + }, + }, + } + t2 := &Team{ + id: tid2, + members: &Members{ + members: map[ID]Role{ + uid: RoleOwner, + }, + }, + } + + assert.Equal(t, TeamList{t1}, TeamList{t1, t2}.FilterByUserRole(uid, RoleReader)) + assert.Equal(t, TeamList{}, TeamList{t1, t2}.FilterByUserRole(uid, RoleWriter)) + assert.Equal(t, TeamList{t2}, TeamList{t1, t2}.FilterByUserRole(uid, RoleOwner)) + assert.Equal(t, TeamList(nil), TeamList(nil).FilterByUserRole(uid, RoleOwner)) +} + +func TestTeamList_FilterByUserRoleIncluding(t *testing.T) { + uid := NewID() + tid1 := NewTeamID() + tid2 := NewTeamID() + t1 := &Team{ + id: tid1, + members: &Members{ + members: map[ID]Role{ + uid: RoleReader, + }, + }, + } + t2 := &Team{ + id: tid2, + members: &Members{ + members: map[ID]Role{ + uid: RoleOwner, + }, + }, + } + + assert.Equal(t, TeamList{t1, t2}, TeamList{t1, t2}.FilterByUserRoleIncluding(uid, RoleReader)) + assert.Equal(t, TeamList{t2}, TeamList{t1, t2}.FilterByUserRoleIncluding(uid, RoleWriter)) + assert.Equal(t, TeamList{t2}, TeamList{t1, t2}.FilterByUserRoleIncluding(uid, RoleOwner)) + assert.Equal(t, TeamList(nil), TeamList(nil).FilterByUserRoleIncluding(uid, RoleOwner)) +} +func TestTeamList_IDs(t *testing.T) { + tid1 := NewTeamID() + tid2 := NewTeamID() + t1 := &Team{id: tid1} + t2 := &Team{id: tid2} + + assert.Equal(t, []TeamID{tid1, tid2}, TeamList{t1, t2}.IDs()) + assert.Equal(t, []TeamID{}, TeamList{}.IDs()) + assert.Equal(t, []TeamID(nil), TeamList(nil).IDs()) +} From db72827e668d1d8eb73372439e6b564515ffb894 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 9 Mar 2022 06:41:07 +0000 Subject: [PATCH 162/253] ci: update github actions dependencies (major) (#118) Co-authored-by: Renovate Bot --- .github/workflows/build.yml | 8 ++++---- .github/workflows/ci.yml | 6 +++--- .github/workflows/release.yml | 6 +++--- .github/workflows/renovate.yml | 2 +- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e9e93a4d5..015969a79 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -15,7 +15,7 @@ jobs: new_tag_short: ${{ steps.info.outputs.new_tag_short }} steps: - name: checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Fetch tags run: git fetch --prune --unshallow --tags - name: Get info @@ -43,11 +43,11 @@ jobs: NAME: reearth-backend steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - name: Set up Go - uses: actions/setup-go@v2 + uses: actions/setup-go@v3 with: go-version: 1.17 - name: Run GoReleaser for nightly @@ -102,7 +102,7 @@ jobs: IMAGE_NAME: reearth/reearth-backend steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Set up QEMU uses: docker/setup-qemu-action@v1 - name: Set up Docker Buildx diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f2c6f9554..6f0a4616c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,12 +10,12 @@ jobs: runs-on: ubuntu-latest steps: - name: set up - uses: actions/setup-go@v2 + uses: actions/setup-go@v3 with: go-version: 1.17 id: go - name: checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: cache uses: actions/cache@v2 with: @@ -24,7 +24,7 @@ jobs: restore-keys: | ${{ runner.os }}-go- - name: golangci-lint - uses: golangci/golangci-lint-action@v2 + uses: golangci/golangci-lint-action@v3 with: version: v1.43 args: --timeout=10m diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ff5bc7254..a3edea4f0 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -16,7 +16,7 @@ jobs: git config --global user.name "${{ github.actor }}" git config --global user.email "${{ github.actor }}@users.noreply.github.com" - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 token: ${{ secrets.GPT }} @@ -43,7 +43,7 @@ jobs: URL=${URL//\//\\\/} sed -i -E 's///g; s/\(#([0-9]+)\)/([#\1]('"$URL"'\/pull\/\1))/g; s/`([a-zA-Z0-9]+)`/[`\1`]('"$URL"'\/commit\/\1)/g' CHANGELOG.md - name: Generate CHANGELOG_latest.md - uses: actions/github-script@v5 + uses: actions/github-script@v6 with: script: | const fs = require("fs"); @@ -58,7 +58,7 @@ jobs: const m = lines.slice(h[0] + 1, h[1]).join("\n").trim(); fs.writeFileSync("CHANGELOG_latest.md", m); - name: Upload latest changelog - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: changelog-${{ steps.tag.outputs.new_tag }} path: CHANGELOG_latest.md diff --git a/.github/workflows/renovate.yml b/.github/workflows/renovate.yml index 37cf8d272..0db94b406 100644 --- a/.github/workflows/renovate.yml +++ b/.github/workflows/renovate.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 2 - name: fix From b89c323b107df20241bc7671c636268c6e8db590 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Fri, 11 Mar 2022 10:17:47 +0300 Subject: [PATCH 163/253] feat: authentication system (#108) Co-authored-by: rot1024 Co-authored-by: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Co-authored-by: yk Co-authored-by: maherhamoui6 Co-authored-by: mimoham24 <69579255+mimoham24@users.noreply.github.com> --- docker-compose.yml | 4 + go.mod | 20 +- go.sum | 74 +- internal/adapter/gql/generated.go | 6 +- .../adapter/gql/resolver_mutation_user.go | 5 +- internal/adapter/http/user.go | 76 +- internal/app/app.go | 26 +- internal/app/{auth.go => auth_client.go} | 0 internal/app/auth_server.go | 249 ++++ internal/app/config.go | 71 ++ internal/app/jwt.go | 232 +--- internal/app/main.go | 6 +- internal/app/public.go | 69 +- internal/app/repo.go | 14 + internal/infrastructure/mailer/sendgrid.go | 32 + .../infrastructure/mailer/sendgrid_test.go | 41 + internal/infrastructure/mailer/smtp.go | 121 ++ internal/infrastructure/mailer/smtp_test.go | 144 +++ .../infrastructure/memory/auth_request.go | 75 ++ internal/infrastructure/memory/user.go | 35 + internal/infrastructure/mongo/auth_request.go | 64 ++ internal/infrastructure/mongo/container.go | 1 + .../mongo/mongodoc/auth_request.go | 116 ++ .../infrastructure/mongo/mongodoc/config.go | 24 +- .../infrastructure/mongo/mongodoc/user.go | 89 +- internal/infrastructure/mongo/user.go | 12 + internal/usecase/gateway/mailer.go | 7 +- internal/usecase/interactor/auth.go | 403 +++++++ internal/usecase/interactor/common.go | 3 +- .../usecase/interactor/emails/auth_html.tmpl | 435 +++++++ .../usecase/interactor/emails/auth_text.tmpl | 7 + internal/usecase/interactor/user.go | 388 ++++++- internal/usecase/interfaces/user.go | 30 +- internal/usecase/repo/auth_request.go | 16 + internal/usecase/repo/container.go | 1 + internal/usecase/repo/user.go | 2 + pkg/auth/builder.go | 102 ++ pkg/auth/client.go | 115 ++ pkg/auth/request.go | 143 +++ pkg/config/config.go | 6 + pkg/id/auth_request_gen.go | 297 +++++ pkg/id/auth_request_gen_test.go | 1011 +++++++++++++++++ pkg/id/gen.go | 2 + pkg/log/log.go | 4 + pkg/tag/list_test.go | 2 +- pkg/user/auth.go | 16 +- pkg/user/auth_test.go | 27 + pkg/user/builder.go | 32 +- pkg/user/builder_test.go | 120 +- pkg/user/initializer.go | 9 +- pkg/user/initializer_test.go | 62 +- pkg/user/password_reset.go | 44 + pkg/user/password_reset_test.go | 103 ++ pkg/user/user.go | 126 +- pkg/user/user_test.go | 296 +++++ pkg/user/verification.go | 71 ++ pkg/user/verification_test.go | 215 ++++ schema.graphql | 6 +- 58 files changed, 5350 insertions(+), 357 deletions(-) rename internal/app/{auth.go => auth_client.go} (100%) create mode 100644 internal/app/auth_server.go create mode 100644 internal/infrastructure/mailer/sendgrid.go create mode 100644 internal/infrastructure/mailer/sendgrid_test.go create mode 100644 internal/infrastructure/mailer/smtp.go create mode 100644 internal/infrastructure/mailer/smtp_test.go create mode 100644 internal/infrastructure/memory/auth_request.go create mode 100644 internal/infrastructure/mongo/auth_request.go create mode 100644 internal/infrastructure/mongo/mongodoc/auth_request.go create mode 100644 internal/usecase/interactor/auth.go create mode 100644 internal/usecase/interactor/emails/auth_html.tmpl create mode 100644 internal/usecase/interactor/emails/auth_text.tmpl create mode 100644 internal/usecase/repo/auth_request.go create mode 100644 pkg/auth/builder.go create mode 100644 pkg/auth/client.go create mode 100644 pkg/auth/request.go create mode 100644 pkg/id/auth_request_gen.go create mode 100644 pkg/id/auth_request_gen_test.go create mode 100644 pkg/user/password_reset.go create mode 100644 pkg/user/password_reset_test.go create mode 100644 pkg/user/verification.go create mode 100644 pkg/user/verification_test.go diff --git a/docker-compose.yml b/docker-compose.yml index 74e85d775..942e5c7ad 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,6 +12,10 @@ services: environment: REEARTH_ENV: docker REEARTH_DB_URL: mongodb://reearth-mongo + REEARTH_MAILER: smtp + REEARTH_SMTP_URL: #add later + REEARTH_SMTP_USER: #add later + REEARTH_SMTP_PASSWORD: #add later depends_on: - reearth-mongo reearth-mongo: diff --git a/go.mod b/go.mod index 7804b96f4..deed69fdd 100644 --- a/go.mod +++ b/go.mod @@ -5,11 +5,13 @@ require ( cloud.google.com/go/storage v1.21.0 github.com/99designs/gqlgen v0.17.1 github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0 - github.com/auth0/go-jwt-middleware v1.0.1 + github.com/auth0/go-jwt-middleware/v2 v2.0.0 github.com/blang/semver v3.5.1+incompatible - github.com/form3tech-oss/jwt-go v3.2.5+incompatible + github.com/caos/oidc v1.0.0 github.com/goccy/go-yaml v1.9.5 + github.com/golang/gddo v0.0.0-20210115222349-20d68f94ee1f github.com/google/uuid v1.3.0 + github.com/gorilla/mux v1.8.0 github.com/iancoleman/strcase v0.2.0 github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d github.com/jarcoal/httpmock v1.1.0 @@ -24,6 +26,7 @@ require ( github.com/paulmach/go.geojson v1.4.0 github.com/pkg/errors v0.9.1 github.com/ravilushqa/otelgqlgen v0.5.1 + github.com/sendgrid/sendgrid-go v3.11.1+incompatible github.com/sirupsen/logrus v1.8.1 github.com/spf13/afero v1.8.1 github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2 @@ -38,11 +41,13 @@ require ( go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.29.0 go.opentelemetry.io/otel v1.4.1 go.opentelemetry.io/otel/sdk v1.4.1 + golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce golang.org/x/text v0.3.7 golang.org/x/tools v0.1.9 google.golang.org/api v0.70.0 gopkg.in/go-playground/colors.v1 v1.2.0 gopkg.in/h2non/gock.v1 v1.1.2 + gopkg.in/square/go-jose.v2 v2.6.0 ) require ( @@ -54,10 +59,12 @@ require ( github.com/agnivade/levenshtein v1.1.1 // indirect github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a // indirect + github.com/caos/logging v0.0.2 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dgryski/trifles v0.0.0-20200705224438-cafc02a1ee2b // indirect github.com/fatih/color v1.12.0 // indirect + github.com/felixge/httpsnoop v1.0.2 // indirect github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 // indirect github.com/go-logr/logr v1.2.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect @@ -69,6 +76,10 @@ require ( github.com/google/go-cmp v0.5.7 // indirect github.com/google/pprof v0.0.0-20220113144219-d25a53d42d00 // indirect github.com/googleapis/gax-go/v2 v2.1.1 // indirect + github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 // indirect + github.com/gorilla/handlers v1.5.1 // indirect + github.com/gorilla/schema v1.2.0 // indirect + github.com/gorilla/securecookie v1.1.1 // indirect github.com/gorilla/websocket v1.4.2 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect github.com/hashicorp/golang-lru v0.5.4 // indirect @@ -76,11 +87,12 @@ require ( github.com/matryer/moq v0.2.3 // indirect github.com/mattn/go-colorable v0.1.11 // indirect github.com/mattn/go-isatty v0.0.14 // indirect - github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/sendgrid/rest v2.6.6+incompatible // indirect github.com/smartystreets/assertions v1.1.1 // indirect + github.com/smartystreets/goconvey v1.6.4 // indirect github.com/stretchr/objx v0.2.0 // indirect github.com/tidwall/pretty v1.0.1 // indirect github.com/urfave/cli/v2 v2.3.0 // indirect @@ -94,7 +106,6 @@ require ( go.opentelemetry.io/contrib v1.4.0 // indirect go.opentelemetry.io/otel/trace v1.4.1 // indirect go.uber.org/atomic v1.7.0 // indirect - golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa // indirect golang.org/x/mod v0.5.1 // indirect golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd // indirect golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 // indirect @@ -107,7 +118,6 @@ require ( google.golang.org/grpc v1.44.0 // indirect google.golang.org/protobuf v1.27.1 // indirect gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect - gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) diff --git a/go.sum b/go.sum index 80bd7c534..d66fe4c6a 100644 --- a/go.sum +++ b/go.sum @@ -1,3 +1,4 @@ +cloud.google.com/go v0.16.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= @@ -89,12 +90,17 @@ github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= -github.com/auth0/go-jwt-middleware v1.0.1 h1:/fsQ4vRr4zod1wKReUH+0A3ySRjGiT9G34kypO/EKwI= -github.com/auth0/go-jwt-middleware v1.0.1/go.mod h1:YSeUX3z6+TF2H+7padiEqNJ73Zy9vXW72U//IgN0BIM= +github.com/auth0/go-jwt-middleware/v2 v2.0.0 h1:jft2yYteA6wpwTj1uxSLwE0TlHCjodMQvX7+eyqJiOQ= +github.com/auth0/go-jwt-middleware/v2 v2.0.0/go.mod h1:/y7nPmfWDnJhCbFq22haCAU7vufwsOUzTthLVleE6/8= github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= github.com/aws/aws-sdk-go v1.35.5/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+muhnW+k= github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/bradfitz/gomemcache v0.0.0-20170208213004-1952afaa557d/go.mod h1:PmM6Mmwb0LSuEubjR8N7PtNe1KxZLtOUHtbeikc5h60= +github.com/caos/logging v0.0.2 h1:ebg5C/HN0ludYR+WkvnFjwSExF4wvyiWPyWGcKMYsoo= +github.com/caos/logging v0.0.2/go.mod h1:9LKiDE2ChuGv6CHYif/kiugrfEXu9AwDiFWSreX7Wp0= +github.com/caos/oidc v1.0.0 h1:3sHkYf8zsuARR89qO9CyvfYhHGdliWPcou4glzGMXmQ= +github.com/caos/oidc v1.0.0/go.mod h1:4l0PPwdc6BbrdCFhNrRTUddsG292uHGa7gE2DSEIqoU= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -132,11 +138,11 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7 github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= +github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/felixge/httpsnoop v1.0.2 h1:+nS9g82KMXccJ/wp0zyRW9ZBHFETmMGtkk+2CTTrW4o= github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= -github.com/form3tech-oss/jwt-go v3.2.5+incompatible h1:/l4kBbb4/vGSsdtB5nUe8L7B9mImVMaBPw9L/0TBHU8= -github.com/form3tech-oss/jwt-go v3.2.5+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= +github.com/fsnotify/fsnotify v1.4.3-0.20170329110642-4da3e2cfbabc/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/garyburd/redigo v1.1.1-0.20170914051019-70e1b1943d4f/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 h1:Uc+IZ7gYqAf/rSGFplbWBSHaGolEQlNLgMgSE3ccnIQ= github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813/go.mod h1:P+oSoE9yhSRvsmYyZsshflcR6ePWYLql6UU1amW13IM= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= @@ -158,6 +164,7 @@ github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+ github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.6.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-test/deep v1.0.1 h1:UQhStjbkDClarlmv0am7OXXO4/GaPdCGiUiMTvi28sg= @@ -190,12 +197,15 @@ github.com/goccy/go-yaml v1.9.5 h1:Eh/+3uk9kLxG4koCX6lRMAPS1OaMSAi+FJcya0INdB0= github.com/goccy/go-yaml v1.9.5/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA= github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang/gddo v0.0.0-20210115222349-20d68f94ee1f h1:16RtHeWGkJMc80Etb8RPCcKevXGldr57+LOyZt8zOlg= +github.com/golang/gddo v0.0.0-20210115222349-20d68f94ee1f/go.mod h1:ijRvpgDJDI262hYq/IQVYgf8hd8IHUs93Ol0kvMBAx4= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/lint v0.0.0-20170918230701-e5d664eb928e/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= @@ -224,12 +234,14 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.0-20170215233205-553a64147049/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.1.1-0.20171103154506-982329095285/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -244,6 +256,8 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-github/v31 v31.0.0/go.mod h1:NQPZol8/1sMoWYGN2yaALIBytu17gAWfhbweiEed3pM= +github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -272,6 +286,8 @@ github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go v2.0.0+incompatible h1:j0GKcs05QVmm7yesiZq2+9cxHkNK9YM6zKx4D2qucQU= +github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= @@ -283,10 +299,17 @@ github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c2 github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 h1:l5lAOZEym3oK3SQ2HBHWsJUfbNBiTXJDeW2QDxw9AQ0= github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc= -github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4= +github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/schema v1.2.0 h1:YufUaxZYCKGFuAq3c96BOhjgd5nmXiOY9NGzF247Tsc= +github.com/gorilla/schema v1.2.0/go.mod h1:kgLaKoK1FELgZqMAVxx/5cbj0kT+57qxUrAlIO2eleU= +github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= +github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gregjones/httpcache v0.0.0-20170920190843-316c5e0ff04e/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw= github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI= @@ -294,6 +317,7 @@ github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/hcl v0.0.0-20170914154624-68e816d1c783/go.mod h1:oZtUIOe8dh44I2q6ScRibXws4Ajl+d+nod3AaR9vL5w= github.com/huandu/xstrings v1.3.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/iancoleman/strcase v0.2.0 h1:05I4QRnGpI0m37iZQRuskXh+w77mr6Z41lwQzuHLwW0= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= @@ -303,6 +327,7 @@ github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1: github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d h1:sQbbvtUoen3Tfl9G/079tXeqniwPH6TgM/lU4y7lQN8= github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d/go.mod h1:xVHEhsiSJJnT0jlcQpQUg+GyoLf0i0xciM1kqWTGT58= github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/inconshreveable/log15 v0.0.0-20170622235902-74a0988b5f80/go.mod h1:cOaXtrgN4ScfRrD9Bre7U1thNq5RtJ8ZoP4iXVGRj6o= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/jarcoal/httpmock v1.1.0 h1:F47ChZj1Y2zFsCXxNkBPwNNKnAyOATcdQibk0qEdVCE= github.com/jarcoal/httpmock v1.1.0/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= @@ -333,6 +358,7 @@ github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxv github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= @@ -345,19 +371,23 @@ github.com/labstack/gommon v0.3.1/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3 github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc= +github.com/magiconair/properties v1.7.4-0.20170902060319-8d7837e64d3c/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= github.com/matryer/moq v0.2.3 h1:Q06vEqnBYjjfx5KKgHfYRKE/lvlRu+Nj+xodG4YdHnU= github.com/matryer/moq v0.2.3/go.mod h1:9RtPYjTnH1bSBIkpvtHkFN7nbWAnO7oRpdJkEIn6UtE= +github.com/mattn/go-colorable v0.0.10-0.20170816031813-ad5389df28cd/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.11 h1:nQ+aFkoE2TMGc0b68U2OKSexC+eq46+XwZzWXHRmPYs= github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-isatty v0.0.2/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= +github.com/mitchellh/mapstructure v0.0.0-20170523030023-d0303fe80992/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.2.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= @@ -373,6 +403,7 @@ github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+ github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= github.com/paulmach/go.geojson v1.4.0 h1:5x5moCkCtDo5x8af62P9IOAYGQcYHtxz2QJ3x1DoCgY= github.com/paulmach/go.geojson v1.4.0/go.mod h1:YaKx1hKpWF+T2oj2lFJPsW/t1Q5e1jQI61eoQSTwpIs= +github.com/pelletier/go-toml v1.0.1-0.20170904195809-1d6b12b7cb29/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -391,6 +422,10 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sendgrid/rest v2.6.6+incompatible h1:3rO5UTPhLQo6fjytWwdwRWclP101CqErg2klf8LneB4= +github.com/sendgrid/rest v2.6.6+incompatible/go.mod h1:kXX7q3jZtJXK5c5qK83bSGMdV6tsOE70KbHoqJls4lE= +github.com/sendgrid/sendgrid-go v3.11.1+incompatible h1:ai0+woZ3r/+tKLQExznak5XerOFoD6S7ePO0lMV8WXo= +github.com/sendgrid/sendgrid-go v3.11.1+incompatible/go.mod h1:QRQt+LX/NmgVEvmdRw0VT/QgUn499+iza2FnDca9fg8= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= @@ -400,16 +435,20 @@ github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6Mwd github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/assertions v1.1.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= github.com/smartystreets/assertions v1.1.1 h1:T/YLemO5Yp7KPzS+lVtu+WsHn8yoSwTfItdAd1r3cck= github.com/smartystreets/assertions v1.1.1/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v0.0.0-20170901052352-ee1bd8ee15a1/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.8.1 h1:izYHOT71f9iZ7iq37Uqjael60/vYC6vMtzedudZ0zEk= github.com/spf13/afero v1.8.1/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= +github.com/spf13/cast v1.1.0/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/jwalterweatherman v0.0.0-20170901151539-12bd96e66386/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v1.0.1-0.20170901120850-7aff26db30c1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/viper v1.0.0/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7SrnBM= github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2 h1:Fod/tm/5c19889+T6j7mXxg/tEJrcLuDJxR/98raj80= github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2/go.mod h1:h98Zzl76KWv7bG0FHBMA9MAcDhwcIyE7q570tDP7CmY= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -437,8 +476,6 @@ github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVK github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= -github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc= -github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4= @@ -524,8 +561,9 @@ golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa h1:idItI2DDfCokpg0N51B2VtiLdJ4vAuXC9fnCb2gACo4= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce h1:Roh6XWxHFKrPgC/EQhVubSAGQ6Ozk6IdxHSzt1mR0EI= +golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -602,8 +640,10 @@ golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96b golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210913180222-943fd674d43e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd h1:O7DYs+zxREGLKzKoMQrtrEacpb0ZVXA5rIwylE2Xchk= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/oauth2 v0.0.0-20170912212905-13449ad91cb2/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -622,6 +662,7 @@ golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 h1:RerP+noqYHUQ8CMRcPlC2nvTa4dcBIjegkuWdcUDuqg= golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20170517211232-f52d1811a629/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -654,6 +695,7 @@ golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191206220618-eeba5f6aabab/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -719,6 +761,7 @@ golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/time v0.0.0-20170424234030-8be79e1e0910/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -793,6 +836,7 @@ golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.0.0-20170921000349-586095a6e407/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -841,6 +885,7 @@ google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCID google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20170918111702-1e559d0a00ee/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -920,6 +965,7 @@ google.golang.org/genproto v0.0.0-20220211171837-173942840c17/go.mod h1:kGP+zUP2 google.golang.org/genproto v0.0.0-20220216160803-4663080d8bc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c h1:TU4rFa5APdKTq0s6B7WTsH6Xmx0Knj86s6Biz56mErE= google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/grpc v1.2.1-0.20170921194603-d4b75ebd4f9f/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -968,16 +1014,20 @@ gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLks gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b h1:QRR6H1YWRnHb4Y/HeNFCTJLFVxaq6wH4YuVdsUOr75U= +gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/go-playground/colors.v1 v1.2.0 h1:SPweMUve+ywPrfwao+UvfD5Ah78aOLUkT5RlJiZn52c= gopkg.in/go-playground/colors.v1 v1.2.0/go.mod h1:AvbqcMpNXVl5gBrM20jBm3VjjKBbH/kI5UnqjU7lxFI= gopkg.in/h2non/gock.v1 v1.1.2 h1:jBbHXgGBK/AoPVfJh5x4r/WxIrElvbLel8TCZkkZJoY= gopkg.in/h2non/gock.v1 v1.1.2/go.mod h1:n7UGz/ckNChHiK05rDoiC4MYSunEC/lyaUm2WWaDva0= +gopkg.in/square/go-jose.v2 v2.6.0 h1:NGk74WTnPKBNUhNzQX7PYcTLUjoq7mzKk2OKbvwk2iI= +gopkg.in/square/go-jose.v2 v2.6.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index f7e13ce8f..91f53d42d 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -7522,13 +7522,13 @@ type RemoveAssetPayload { assetId: ID! } -type SignupPayload { +type UpdateMePayload { user: User! - team: Team! } -type UpdateMePayload { +type SignupPayload { user: User! + team: Team! } type DeleteMePayload { diff --git a/internal/adapter/gql/resolver_mutation_user.go b/internal/adapter/gql/resolver_mutation_user.go index e62362359..b42475f12 100644 --- a/internal/adapter/gql/resolver_mutation_user.go +++ b/internal/adapter/gql/resolver_mutation_user.go @@ -14,13 +14,14 @@ func (r *mutationResolver) Signup(ctx context.Context, input gqlmodel.SignupInpu secret = *input.Secret } + sub := getSub(ctx) u, team, err := usecases(ctx).User.Signup(ctx, interfaces.SignupParam{ - Sub: getSub(ctx), + Sub: &sub, Lang: input.Lang, Theme: gqlmodel.ToTheme(input.Theme), UserID: id.UserIDFromRefID(input.UserID), TeamID: id.TeamIDFromRefID(input.TeamID), - Secret: secret, + Secret: &secret, }) if err != nil { return nil, err diff --git a/internal/adapter/http/user.go b/internal/adapter/http/user.go index 414ca04eb..b2d7a0a3f 100644 --- a/internal/adapter/http/user.go +++ b/internal/adapter/http/user.go @@ -17,6 +17,31 @@ func NewUserController(usecase interfaces.User) *UserController { } } +type PasswordResetInput struct { + Email string `json:"email"` + Token string `json:"token"` + Password string `json:"password"` +} + +type SignupInput struct { + Sub *string `json:"sub"` + Secret *string `json:"secret"` + UserID *id.UserID `json:"userId"` + TeamID *id.TeamID `json:"teamId"` + Name *string `json:"username"` + Email *string `json:"email"` + Password *string `json:"password"` +} + +type CreateVerificationInput struct { + Email string `json:"email"` +} + +type VerifyUserOutput struct { + UserID string `json:"userId"` + Verified bool `json:"verified"` +} + type CreateUserInput struct { Sub string `json:"sub"` Secret string `json:"secret"` @@ -24,26 +49,63 @@ type CreateUserInput struct { TeamID *id.TeamID `json:"teamId"` } -type CreateUserOutput struct { +type SignupOutput struct { ID string `json:"id"` Name string `json:"name"` Email string `json:"email"` } -func (c *UserController) CreateUser(ctx context.Context, input CreateUserInput) (interface{}, error) { +func (c *UserController) Signup(ctx context.Context, input SignupInput) (interface{}, error) { u, _, err := c.usecase.Signup(ctx, interfaces.SignupParam{ - Sub: input.Sub, - Secret: input.Secret, - UserID: input.UserID, - TeamID: input.TeamID, + Sub: input.Sub, + Secret: input.Secret, + UserID: input.UserID, + TeamID: input.TeamID, + Name: input.Name, + Email: input.Email, + Password: input.Password, }) if err != nil { return nil, err } + if err := c.usecase.CreateVerification(ctx, *input.Email); err != nil { + return nil, err + } - return CreateUserOutput{ + return SignupOutput{ ID: u.ID().String(), Name: u.Name(), Email: u.Email(), }, nil } + +func (c *UserController) CreateVerification(ctx context.Context, input CreateVerificationInput) error { + if err := c.usecase.CreateVerification(ctx, input.Email); err != nil { + return err + } + return nil +} + +func (c *UserController) VerifyUser(ctx context.Context, code string) (interface{}, error) { + u, err := c.usecase.VerifyUser(ctx, code) + if err != nil { + return nil, err + } + return VerifyUserOutput{ + UserID: u.ID().String(), + Verified: u.Verification().IsVerified(), + }, nil +} + +func (c *UserController) StartPasswordReset(ctx context.Context, input PasswordResetInput) error { + err := c.usecase.StartPasswordReset(ctx, input.Email) + if err != nil { + return err + } + + return nil +} + +func (c *UserController) PasswordReset(ctx context.Context, input PasswordResetInput) error { + return c.usecase.PasswordReset(ctx, input.Password, input.Token) +} diff --git a/internal/app/app.go b/internal/app/app.go index 864c017a6..58afca7f1 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -1,6 +1,7 @@ package app import ( + "context" "errors" "io/fs" "net/http" @@ -16,7 +17,7 @@ import ( "go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho" ) -func initEcho(cfg *ServerConfig) *echo.Echo { +func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { if cfg.Config == nil { log.Fatalln("ServerConfig.Config is nil") } @@ -68,27 +69,34 @@ func initEcho(cfg *ServerConfig) *echo.Echo { SignupSecret: cfg.Config.SignupSecret, PublishedIndexHTML: publishedIndexHTML, PublishedIndexURL: cfg.Config.Published.IndexURL, + AuthSrvUIDomain: cfg.Config.AuthSrv.UIDomain, }) e.Use(UsecaseMiddleware(&usecases)) + // auth srv + auth := e.Group("") + authEndPoints(ctx, e, auth, cfg) + // apis api := e.Group("/api") api.GET("/ping", Ping()) api.POST("/signup", Signup()) + api.POST("/signup/verify", StartSignupVerify()) + api.POST("/signup/verify/:code", SignupVerify()) + api.POST("/password-reset", PasswordReset()) api.GET("/published/:name", PublishedMetadata()) api.GET("/published_data/:name", PublishedData()) privateApi := api.Group("") - jwks := &JwksSyncOnce{} - authRequired(privateApi, jwks, cfg) + authRequired(privateApi, cfg) graphqlAPI(e, privateApi, cfg) privateAPI(e, privateApi, cfg.Repos) published := e.Group("/p") - auth := PublishedAuthMiddleware() - published.GET("/:name/data.json", PublishedData(), auth) - published.GET("/:name/", PublishedIndex(), auth) + publishedAuth := PublishedAuthMiddleware() + published.GET("/:name/data.json", PublishedData(), publishedAuth) + published.GET("/:name/", PublishedIndex(), publishedAuth) serveFiles(e, cfg.Gateways.File) web(e, cfg.Config.Web, cfg.Config.Auth0) @@ -113,9 +121,9 @@ func errorHandler(next func(error, echo.Context)) func(error, echo.Context) { } } -func authRequired(g *echo.Group, jwks Jwks, cfg *ServerConfig) { - g.Use(jwtEchoMiddleware(jwks, cfg)) - g.Use(parseJwtMiddleware(cfg)) +func authRequired(g *echo.Group, cfg *ServerConfig) { + g.Use(jwtEchoMiddleware(cfg)) + g.Use(parseJwtMiddleware()) g.Use(authMiddleware(cfg)) } diff --git a/internal/app/auth.go b/internal/app/auth_client.go similarity index 100% rename from internal/app/auth.go rename to internal/app/auth_client.go diff --git a/internal/app/auth_server.go b/internal/app/auth_server.go new file mode 100644 index 000000000..bdb895da0 --- /dev/null +++ b/internal/app/auth_server.go @@ -0,0 +1,249 @@ +package app + +import ( + "context" + "crypto/sha256" + "encoding/json" + "net/http" + "net/url" + "strings" + + "github.com/caos/oidc/pkg/op" + "github.com/golang/gddo/httputil/header" + "github.com/gorilla/mux" + "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/usecase/interactor" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" +) + +var ( + loginEndpoint = "api/login" + logoutEndpoint = "api/logout" + jwksEndpoint = ".well-known/jwks.json" +) + +func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *ServerConfig) { + + userUsecase := interactor.NewUser(cfg.Repos, cfg.Gateways, cfg.Config.SignupSecret, cfg.Config.AuthSrv.UIDomain) + + domain, err := url.Parse(cfg.Config.AuthSrv.Domain) + if err != nil { + panic("not valid auth domain") + } + domain.Path = "/" + + config := &op.Config{ + Issuer: domain.String(), + CryptoKey: sha256.Sum256([]byte(cfg.Config.AuthSrv.Key)), + GrantTypeRefreshToken: true, + } + + var dn *interactor.AuthDNConfig = nil + if cfg.Config.AuthSrv.DN != nil { + dn = &interactor.AuthDNConfig{ + CommonName: cfg.Config.AuthSrv.DN.CN, + Organization: cfg.Config.AuthSrv.DN.O, + OrganizationalUnit: cfg.Config.AuthSrv.DN.OU, + Country: cfg.Config.AuthSrv.DN.C, + Locality: cfg.Config.AuthSrv.DN.L, + Province: cfg.Config.AuthSrv.DN.ST, + StreetAddress: cfg.Config.AuthSrv.DN.Street, + PostalCode: cfg.Config.AuthSrv.DN.PostalCode, + } + } + + storage, err := interactor.NewAuthStorage( + ctx, + &interactor.StorageConfig{ + Domain: domain.String(), + Debug: cfg.Debug, + DN: dn, + }, + cfg.Repos.AuthRequest, + cfg.Repos.Config, + userUsecase.GetUserBySubject, + ) + if err != nil { + e.Logger.Fatal(err) + } + + handler, err := op.NewOpenIDProvider( + ctx, + config, + storage, + op.WithHttpInterceptors(jsonToFormHandler()), + op.WithHttpInterceptors(setURLVarsHandler()), + op.WithCustomEndSessionEndpoint(op.NewEndpoint(logoutEndpoint)), + op.WithCustomKeysEndpoint(op.NewEndpoint(jwksEndpoint)), + ) + if err != nil { + e.Logger.Fatal(err) + } + + router := handler.HttpHandler().(*mux.Router) + + if err := router.Walk(muxToEchoMapper(r)); err != nil { + e.Logger.Fatal(err) + } + + // Actual login endpoint + r.POST(loginEndpoint, login(ctx, cfg, storage, userUsecase)) + + r.GET(logoutEndpoint, logout()) + + // used for auth0/auth0-react; the logout endpoint URL is hard-coded + // can be removed when the mentioned issue is solved + // https://github.com/auth0/auth0-spa-js/issues/845 + r.GET("v2/logout", logout()) + +} + +func setURLVarsHandler() func(handler http.Handler) http.Handler { + return func(handler http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/authorize/callback" { + handler.ServeHTTP(w, r) + return + } + + r2 := mux.SetURLVars(r, map[string]string{"id": r.URL.Query().Get("id")}) + handler.ServeHTTP(w, r2) + }) + } +} + +func jsonToFormHandler() func(handler http.Handler) http.Handler { + return func(handler http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/oauth/token" { + handler.ServeHTTP(w, r) + return + } + + if r.Header.Get("Content-Type") != "" { + value, _ := header.ParseValueAndParams(r.Header, "Content-Type") + if value != "application/json" { + // Content-Type header is not application/json + handler.ServeHTTP(w, r) + return + } + } + + if err := r.ParseForm(); err != nil { + return + } + + var result map[string]string + + if err := json.NewDecoder(r.Body).Decode(&result); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + for key, value := range result { + r.Form.Set(key, value) + } + + handler.ServeHTTP(w, r) + }) + } +} + +func muxToEchoMapper(r *echo.Group) func(route *mux.Route, router *mux.Router, ancestors []*mux.Route) error { + return func(route *mux.Route, router *mux.Router, ancestors []*mux.Route) error { + path, err := route.GetPathTemplate() + if err != nil { + return err + } + + methods, err := route.GetMethods() + if err != nil { + r.Any(path, echo.WrapHandler(route.GetHandler())) + return nil + } + + for _, method := range methods { + r.Add(method, path, echo.WrapHandler(route.GetHandler())) + } + + return nil + } +} + +type loginForm struct { + Email string `json:"username" form:"username"` + Password string `json:"password" form:"password"` + AuthRequestID string `json:"id" form:"id"` +} + +func login(ctx context.Context, cfg *ServerConfig, storage op.Storage, userUsecase interfaces.User) func(ctx echo.Context) error { + return func(ec echo.Context) error { + + request := new(loginForm) + err := ec.Bind(request) + if err != nil { + ec.Logger().Error("filed to parse login request") + return err + } + + authRequest, err := storage.AuthRequestByID(ctx, request.AuthRequestID) + if err != nil { + ec.Logger().Error("filed to parse login request") + return err + } + + if len(request.Email) == 0 || len(request.Password) == 0 { + ec.Logger().Error("credentials are not provided") + return ec.Redirect(http.StatusFound, redirectURL(authRequest.GetRedirectURI(), !cfg.Debug, request.AuthRequestID, "invalid login")) + } + + // check user credentials from db + user, err := userUsecase.GetUserByCredentials(ctx, interfaces.GetUserByCredentials{ + Email: request.Email, + Password: request.Password, + }) + if err != nil { + ec.Logger().Error("wrong credentials!") + return ec.Redirect(http.StatusFound, redirectURL(authRequest.GetRedirectURI(), !cfg.Debug, request.AuthRequestID, "invalid login")) + } + + // Complete the auth request && set the subject + err = storage.(*interactor.AuthStorage).CompleteAuthRequest(ctx, request.AuthRequestID, user.GetAuthByProvider("reearth").Sub) + if err != nil { + ec.Logger().Error("failed to complete the auth request !") + return ec.Redirect(http.StatusFound, redirectURL(authRequest.GetRedirectURI(), !cfg.Debug, request.AuthRequestID, "invalid login")) + } + + return ec.Redirect(http.StatusFound, "/authorize/callback?id="+request.AuthRequestID) + } +} + +func logout() func(ec echo.Context) error { + return func(ec echo.Context) error { + u := ec.QueryParam("returnTo") + return ec.Redirect(http.StatusTemporaryRedirect, u) + } +} + +func redirectURL(domain string, secure bool, requestID string, error string) string { + domain = strings.TrimPrefix(domain, "http://") + domain = strings.TrimPrefix(domain, "https://") + + schema := "http" + if secure { + schema = "https" + } + + u := url.URL{ + Scheme: schema, + Host: domain, + Path: "login", + } + + queryValues := u.Query() + queryValues.Set("id", requestID) + queryValues.Set("error", error) + u.RawQuery = queryValues.Encode() + + return u.String() +} diff --git a/internal/app/config.go b/internal/app/config.go index d6c7e1a51..8ae478a88 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -1,6 +1,7 @@ package app import ( + "encoding/json" "fmt" "net/url" "os" @@ -18,6 +19,11 @@ type Config struct { Dev bool DB string `default:"mongodb://localhost"` Auth0 Auth0Config + AuthSrv AuthSrvConfig + Auth AuthConfigs + Mailer string + SMTP SMTPConfig + SendGrid SendGridConfig GraphQL GraphQLConfig Published PublishedConfig GCPProject string `envconfig:"GOOGLE_CLOUD_PROJECT"` @@ -39,6 +45,24 @@ type Auth0Config struct { WebClientID string } +type AuthSrvConfig struct { + Domain string `default:"http://localhost:8080"` + UIDomain string `default:"http://localhost:3000"` + Key string + DN *AuthDNConfig +} + +type AuthDNConfig struct { + CN string + O []string + OU []string + C []string + L []string + ST []string + Street []string + PostalCode []string +} + type GraphQLConfig struct { ComplexityLimit int `default:"6000"` } @@ -52,6 +76,20 @@ type GCSConfig struct { PublicationCacheControl string } +type SendGridConfig struct { + Email string + Name string + API string +} + +type SMTPConfig struct { + Host string + Port string + SMTPUsername string + Email string + Password string +} + func ReadConfig(debug bool) (*Config, error) { // load .env if err := godotenv.Load(".env"); err != nil && !os.IsNotExist(err) { @@ -80,3 +118,36 @@ func (c Config) Print() string { } return s } + +type AuthConfig struct { + ISS string + AUD []string + ALG *string + TTL *int +} + +type AuthConfigs []AuthConfig + +// Decode is a custom decoder for AuthConfigs +func (ipd *AuthConfigs) Decode(value string) error { + var providers []AuthConfig + + err := json.Unmarshal([]byte(value), &providers) + if err != nil { + return fmt.Errorf("invalid identity providers json: %w", err) + } + + for i := range providers { + if providers[i].TTL == nil { + providers[i].TTL = new(int) + *providers[i].TTL = 5 + } + if providers[i].ALG == nil { + providers[i].ALG = new(string) + *providers[i].ALG = "RS256" + } + } + + *ipd = providers + return nil +} diff --git a/internal/app/jwt.go b/internal/app/jwt.go index 1cececd81..cd51e5e64 100644 --- a/internal/app/jwt.go +++ b/internal/app/jwt.go @@ -2,17 +2,13 @@ package app import ( "context" - "encoding/json" - "errors" - "net/http" - "strings" - "sync" - - jwtmiddleware "github.com/auth0/go-jwt-middleware" - // TODO: github.com/form3tech-oss/jwt-go is decrepated. - // Alternative is https://github.com/golang-jwt/jwt, but go-jwt-middleware still uses github.comform3tech-oss/jwt-go - // See also https://github.com/auth0/go-jwt-middleware/issues/73 - "github.com/form3tech-oss/jwt-go" + "fmt" + "net/url" + "time" + + jwtmiddleware "github.com/auth0/go-jwt-middleware/v2" + "github.com/auth0/go-jwt-middleware/v2/jwks" + "github.com/auth0/go-jwt-middleware/v2/validator" "github.com/labstack/echo/v4" "github.com/reearth/reearth-backend/pkg/log" ) @@ -20,120 +16,77 @@ import ( type contextKey string const ( - userProfileKey = "auth0_user" - debugUserHeader = "X-Reearth-Debug-User" - contextAuth0AccessToken contextKey = "auth0AccessToken" - contextAuth0Sub contextKey = "auth0Sub" - contextUser contextKey = "reearth_user" + debugUserHeader = "X-Reearth-Debug-User" + contextAuth0Sub contextKey = "auth0Sub" + contextUser contextKey = "reearth_user" ) -type JSONWebKeys struct { - Kty string `json:"kty"` - Kid string `json:"kid"` - Use string `json:"use"` - N string `json:"n"` - E string `json:"e"` - X5c []string `json:"x5c"` -} +type MultiValidator []*validator.Validator -type Jwks interface { - GetJwks(string) ([]JSONWebKeys, error) -} +func NewMultiValidator(providers []AuthConfig) (MultiValidator, error) { + validators := make([]*validator.Validator, 0, len(providers)) + for _, p := range providers { -type JwksSyncOnce struct { - jwks []JSONWebKeys - once sync.Once -} - -func (jso *JwksSyncOnce) GetJwks(publicKeyURL string) ([]JSONWebKeys, error) { - var err error - jso.once.Do(func() { - jso.jwks, err = fetchJwks(publicKeyURL) - }) - - if err != nil { - return nil, err - } + issuerURL, err := url.Parse(p.ISS) + if err != nil { + return nil, fmt.Errorf("failed to parse the issuer url: %w", err) + } - return jso.jwks, nil -} + provider := jwks.NewCachingProvider(issuerURL, time.Duration(*p.TTL)*time.Minute) -func fetchJwks(publicKeyURL string) ([]JSONWebKeys, error) { - resp, err := http.Get(publicKeyURL) - var res struct { - Jwks []JSONWebKeys `json:"keys"` - } + algorithm := validator.SignatureAlgorithm(*p.ALG) - if err != nil { - return nil, err + v, err := validator.New( + provider.KeyFunc, + algorithm, + p.ISS, + p.AUD, + ) + if err != nil { + return nil, err + } + validators = append(validators, v) } - defer func() { - _ = resp.Body.Close() - }() - - err = json.NewDecoder(resp.Body).Decode(&res) + return validators, nil +} - if err != nil { - return nil, err +// ValidateToken Trys to validate the token with each validator +// NOTE: the last validation error only is returned +func (mv MultiValidator) ValidateToken(ctx context.Context, tokenString string) (res interface{}, err error) { + for _, v := range mv { + res, err = v.ValidateToken(ctx, tokenString) + if err == nil { + return + } } - - return res.Jwks, nil + return } -func getPemCert(token *jwt.Token, publicKeyURL string, jwks Jwks) (string, error) { - cert := "" - keys, err := jwks.GetJwks(publicKeyURL) +// Validate the access token and inject the user clams into ctx +func jwtEchoMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { + jwtValidator, err := NewMultiValidator(cfg.Config.Auth) if err != nil { - return cert, err + log.Fatalf("failed to set up the validator: %v", err) } - for k := range keys { - if token.Header["kid"] == keys[k].Kid { - cert = "-----BEGIN CERTIFICATE-----\n" + keys[k].X5c[0] + "\n-----END CERTIFICATE-----" - } - } + middleware := jwtmiddleware.New(jwtValidator.ValidateToken) - if cert == "" { - err := errors.New("unable to find appropriate key") - return cert, err - } - - return cert, nil + return echo.WrapMiddleware(middleware.CheckJWT) } -func parseJwtMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { - iss := urlFromDomain(cfg.Config.Auth0.Domain) - aud := cfg.Config.Auth0.Audience - +// load claim from ctx and inject the user sub into ctx +func parseJwtMiddleware() echo.MiddlewareFunc { return func(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { req := c.Request() ctx := req.Context() - token := ctx.Value(userProfileKey) - if userProfile, ok := token.(*jwt.Token); ok { - claims := userProfile.Claims.(jwt.MapClaims) - - // Verify 'iss' claim - checkIss := claims.VerifyIssuer(iss, false) - if !checkIss { - return errorResponse(c, "invalid issuer") - } - - // Verify 'aud' claim - if !verifyAudience(claims, aud) { - return errorResponse(c, "invalid audience") - } + rawClaims := ctx.Value(jwtmiddleware.ContextKey{}) + if claims, ok := rawClaims.(*validator.ValidatedClaims); ok { // attach sub and access token to context - if sub, ok := claims["sub"].(string); ok { - ctx = context.WithValue(ctx, contextAuth0Sub, sub) - } - if user, ok := claims["https://reearth.io/user_id"].(string); ok { - ctx = context.WithValue(ctx, contextUser, user) - } - ctx = context.WithValue(ctx, contextAuth0AccessToken, userProfile.Raw) + ctx = context.WithValue(ctx, contextAuth0Sub, claims.RegisteredClaims.Subject) } c.SetRequest(req.WithContext(ctx)) @@ -141,84 +94,3 @@ func parseJwtMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { } } } - -func jwtEchoMiddleware(jwks Jwks, cfg *ServerConfig) echo.MiddlewareFunc { - jwksURL := urlFromDomain(cfg.Config.Auth0.Domain) + ".well-known/jwks.json" - - jwtMiddleware := jwtmiddleware.New(jwtmiddleware.Options{ - CredentialsOptional: cfg.Debug, - UserProperty: userProfileKey, - SigningMethod: jwt.SigningMethodRS256, - // Make jwtmiddleware return an error object by not writing ErrorHandler to ResponseWriter - ErrorHandler: func(w http.ResponseWriter, req *http.Request, err string) {}, - ValidationKeyGetter: func(token *jwt.Token) (interface{}, error) { - cert, err := getPemCert(token, jwksURL, jwks) - if err != nil { - log.Errorf("jwt: %s", err) - return nil, err - } - result, _ := jwt.ParseRSAPublicKeyFromPEM([]byte(cert)) - return result, nil - }, - }) - - return func(next echo.HandlerFunc) echo.HandlerFunc { - return func(c echo.Context) error { - err := jwtMiddleware.CheckJWT(c.Response(), c.Request()) - if err != nil { - return errorResponse(c, err.Error()) - } - return next(c) - } - } -} - -func urlFromDomain(path string) string { - if path == "" { - return path - } - if !strings.HasPrefix(path, "http://") && !strings.HasPrefix(path, "https://") { - path = "https://" + path - } - if path[len(path)-1] != '/' { - path += "/" - } - return path -} - -// WORKAROUND: golang-jwt/jwt-go supports multiple audiences, but go-jwt-middleware still uses github.comform3tech-oss/jwt-go -func verifyAudience(claims jwt.MapClaims, aud string) bool { - if aud == "" { - return true - } - - auds, ok := claims["aud"].([]string) - if !ok { - auds2, ok := claims["aud"].([]interface{}) - if ok { - for _, a := range auds2 { - if aa, ok := a.(string); ok { - auds = append(auds, aa) - } - } - } else { - a, ok := claims["aud"].(string) - if !ok || a == "" { - return false - } - auds = append(auds, a) - } - } - - for _, a := range auds { - if jwt.MapClaims(map[string]interface{}{"aud": a}).VerifyAudience(aud, true) { - return true - } - } - return false -} - -func errorResponse(c echo.Context, err string) error { - res := map[string]string{"error": err} - return c.JSON(http.StatusUnauthorized, res) -} diff --git a/internal/app/main.go b/internal/app/main.go index bdce17444..4750ab597 100644 --- a/internal/app/main.go +++ b/internal/app/main.go @@ -40,7 +40,7 @@ func Start(debug bool, version string) { repos, gateways := initReposAndGateways(ctx, conf, debug) // Start web server - NewServer(&ServerConfig{ + NewServer(ctx, &ServerConfig{ Config: conf, Debug: debug, Repos: repos, @@ -60,7 +60,7 @@ type ServerConfig struct { Gateways *gateway.Container } -func NewServer(cfg *ServerConfig) *WebServer { +func NewServer(ctx context.Context, cfg *ServerConfig) *WebServer { port := cfg.Config.Port if port == "" { port = "8080" @@ -75,7 +75,7 @@ func NewServer(cfg *ServerConfig) *WebServer { address: address, } - w.appServer = initEcho(cfg) + w.appServer = initEcho(ctx, cfg) return w } diff --git a/internal/app/public.go b/internal/app/public.go index 174c3862e..e1624f0d8 100644 --- a/internal/app/public.go +++ b/internal/app/public.go @@ -23,7 +23,7 @@ func Ping() echo.HandlerFunc { func Signup() echo.HandlerFunc { return func(c echo.Context) error { - var inp http1.CreateUserInput + var inp http1.SignupInput if err := c.Bind(&inp); err != nil { return &echo.HTTPError{Code: http.StatusBadRequest, Message: fmt.Errorf("failed to parse request body: %w", err)} } @@ -31,7 +31,72 @@ func Signup() echo.HandlerFunc { uc := adapter.Usecases(c.Request().Context()) controller := http1.NewUserController(uc.User) - output, err := controller.CreateUser(c.Request().Context(), inp) + output, err := controller.Signup(c.Request().Context(), inp) + if err != nil { + return err + } + + return c.JSON(http.StatusOK, output) + } +} + +func PasswordReset() echo.HandlerFunc { + return func(c echo.Context) error { + var inp http1.PasswordResetInput + if err := c.Bind(&inp); err != nil { + return err + } + + uc := adapter.Usecases(c.Request().Context()) + controller := http1.NewUserController(uc.User) + + if len(inp.Email) > 0 { + if err := controller.StartPasswordReset(c.Request().Context(), inp); err != nil { + return err + } + return c.JSON(http.StatusOK, true) + } + + if len(inp.Token) > 0 && len(inp.Password) > 0 { + if err := controller.PasswordReset(c.Request().Context(), inp); err != nil { + return err + } + return c.JSON(http.StatusOK, true) + } + + return &echo.HTTPError{Code: http.StatusBadRequest, Message: "Bad reset password request"} + } +} + +func StartSignupVerify() echo.HandlerFunc { + return func(c echo.Context) error { + var inp http1.CreateVerificationInput + if err := c.Bind(&inp); err != nil { + return &echo.HTTPError{Code: http.StatusBadRequest, Message: fmt.Errorf("failed to parse request body: %w", err)} + } + + uc := adapter.Usecases(c.Request().Context()) + controller := http1.NewUserController(uc.User) + + if err := controller.CreateVerification(c.Request().Context(), inp); err != nil { + return err + } + + return c.NoContent(http.StatusOK) + } +} + +func SignupVerify() echo.HandlerFunc { + return func(c echo.Context) error { + code := c.Param("code") + if len(code) == 0 { + return echo.ErrBadRequest + } + + uc := adapter.Usecases(c.Request().Context()) + controller := http1.NewUserController(uc.User) + + output, err := controller.VerifyUser(c.Request().Context(), code) if err != nil { return err } diff --git a/internal/app/repo.go b/internal/app/repo.go index 53e5e311b..fdff03a08 100644 --- a/internal/app/repo.go +++ b/internal/app/repo.go @@ -5,6 +5,8 @@ import ( "fmt" "time" + "github.com/reearth/reearth-backend/internal/infrastructure/mailer" + "github.com/reearth/reearth-backend/internal/infrastructure/github" "github.com/reearth/reearth-backend/internal/infrastructure/google" "github.com/spf13/afero" @@ -70,6 +72,9 @@ func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo. // google gateways.Google = google.NewGoogle() + // SMTP Mailer + gateways.Mailer = initMailer(conf) + // release lock of all scenes if err := repos.SceneLock.ReleaseAllLock(context.Background()); err != nil { log.Fatalln(fmt.Sprintf("repo initialization error: %+v", err)) @@ -77,3 +82,12 @@ func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo. return repos, gateways } + +func initMailer(conf *Config) gateway.Mailer { + if conf.Mailer == "sendgrid" { + return mailer.NewWithSendGrid(conf.SendGrid.Name, conf.SendGrid.Email, conf.SendGrid.API) + } else if conf.Mailer == "smtp" { + return mailer.NewWithSMTP(conf.SMTP.Host, conf.SMTP.Port, conf.SMTP.SMTPUsername, conf.SMTP.Email, conf.SMTP.Password) + } + return nil +} diff --git a/internal/infrastructure/mailer/sendgrid.go b/internal/infrastructure/mailer/sendgrid.go new file mode 100644 index 000000000..cd078d0ff --- /dev/null +++ b/internal/infrastructure/mailer/sendgrid.go @@ -0,0 +1,32 @@ +package mailer + +import ( + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/sendgrid/sendgrid-go" + "github.com/sendgrid/sendgrid-go/helpers/mail" +) + +type sendgridMailer struct { + api string + // sender data + name string + email string +} + +func NewWithSendGrid(senderName, senderEmail, api string) gateway.Mailer { + return &sendgridMailer{ + name: senderName, + email: senderEmail, + api: api, + } +} + +func (m *sendgridMailer) SendMail(to []gateway.Contact, subject, plainContent, htmlContent string) error { + contact := to[0] + sender := mail.NewEmail(m.name, m.email) + receiver := mail.NewEmail(contact.Name, contact.Email) + message := mail.NewSingleEmail(sender, subject, receiver, plainContent, htmlContent) + client := sendgrid.NewSendClient(m.api) + _, err := client.Send(message) + return err +} diff --git a/internal/infrastructure/mailer/sendgrid_test.go b/internal/infrastructure/mailer/sendgrid_test.go new file mode 100644 index 000000000..2a7251ed3 --- /dev/null +++ b/internal/infrastructure/mailer/sendgrid_test.go @@ -0,0 +1,41 @@ +package mailer + +import ( + "testing" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/stretchr/testify/assert" +) + +func TestNewWithSendGrid(t *testing.T) { + type args struct { + senderName string + senderEmail string + api string + } + tests := []struct { + name string + args args + want gateway.Mailer + }{ + { + name: "should create a sendGrid mailer", + args: args{ + senderName: "test sender", + senderEmail: "sender@test.com", + api: "TEST_API", + }, + want: &sendgridMailer{ + api: "TEST_API", + name: "test sender", + email: "sender@test.com", + }, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + got := NewWithSendGrid(tc.args.senderName, tc.args.senderEmail, tc.args.api) + assert.Equal(tt, tc.want, got) + }) + } +} diff --git a/internal/infrastructure/mailer/smtp.go b/internal/infrastructure/mailer/smtp.go new file mode 100644 index 000000000..e54127d7f --- /dev/null +++ b/internal/infrastructure/mailer/smtp.go @@ -0,0 +1,121 @@ +package mailer + +import ( + "bytes" + "errors" + "fmt" + "io" + "mime/multipart" + "net/mail" + "net/smtp" + "net/textproto" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" +) + +type smtpMailer struct { + host string + port string + email string + username string + password string +} + +type message struct { + to []string + from string + subject string + plainContent string + htmlContent string +} + +func (m *message) encodeContent() (string, error) { + buf := bytes.NewBuffer(nil) + writer := multipart.NewWriter(buf) + boundary := writer.Boundary() + + altBuffer, err := writer.CreatePart(textproto.MIMEHeader{"Content-Type": {"multipart/alternative; boundary=" + boundary}}) + if err != nil { + return "", err + } + altWriter := multipart.NewWriter(altBuffer) + err = altWriter.SetBoundary(boundary) + if err != nil { + return "", err + } + var content io.Writer + content, err = altWriter.CreatePart(textproto.MIMEHeader{"Content-Type": {"text/plain"}}) + if err != nil { + return "", err + } + + _, err = content.Write([]byte(m.plainContent + "\r\n\r\n")) + if err != nil { + return "", err + } + content, err = altWriter.CreatePart(textproto.MIMEHeader{"Content-Type": {"text/html"}}) + if err != nil { + return "", err + } + _, err = content.Write([]byte(m.htmlContent + "\r\n")) + if err != nil { + return "", err + } + _ = altWriter.Close() + return buf.String(), nil +} + +func (m *message) encodeMessage() ([]byte, error) { + buf := bytes.NewBuffer(nil) + buf.WriteString(fmt.Sprintf("Subject: %s\n", m.subject)) + buf.WriteString(fmt.Sprintf("From: %s\n", m.from)) + buf.WriteString(fmt.Sprintf("To: %s\n", strings.Join(m.to, ","))) + content, err := m.encodeContent() + if err != nil { + return nil, err + } + buf.WriteString(content) + + return buf.Bytes(), nil +} + +func NewWithSMTP(host, port, username, email, password string) gateway.Mailer { + return &smtpMailer{ + host: host, + port: port, + username: username, + email: email, + password: password, + } +} + +func (m *smtpMailer) SendMail(to []gateway.Contact, subject, plainContent, htmlContent string) error { + emails := make([]string, 0, len(to)) + for _, c := range to { + _, err := mail.ParseAddress(c.Email) + if err != nil { + return fmt.Errorf("invalid email %s", c.Email) + } + emails = append(emails, c.Email) + } + + msg := &message{ + to: emails, + from: m.email, + subject: subject, + plainContent: plainContent, + htmlContent: htmlContent, + } + + encodedMsg, err := msg.encodeMessage() + if err != nil { + return err + } + + auth := smtp.PlainAuth("", m.username, m.password, m.host) + if len(m.host) == 0 { + return errors.New("invalid smtp url") + } + return smtp.SendMail(m.host+":"+m.port, auth, m.email, emails, encodedMsg) +} diff --git a/internal/infrastructure/mailer/smtp_test.go b/internal/infrastructure/mailer/smtp_test.go new file mode 100644 index 000000000..ed217d9cb --- /dev/null +++ b/internal/infrastructure/mailer/smtp_test.go @@ -0,0 +1,144 @@ +package mailer + +import ( + "strings" + "testing" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + + "github.com/stretchr/testify/assert" +) + +func TestNewWithSMTP(t *testing.T) { + type args struct { + host string + port string + email string + username string + password string + } + tests := []struct { + name string + args args + want gateway.Mailer + }{ + { + name: "should create mailer with given args", + args: args{ + host: "x.x.x", + port: "8080", + username: "foo", + email: "xxx@test.com", + password: "foo.pass", + }, + want: &smtpMailer{ + host: "x.x.x", + port: "8080", + username: "foo", + email: "xxx@test.com", + password: "foo.pass", + }, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + got := NewWithSMTP(tc.args.host, tc.args.port, tc.args.username, tc.args.email, tc.args.password) + assert.Equal(tt, tc.want, got) + }) + } +} + +func Test_message_encodeContent(t *testing.T) { + // subject and receiver email are not needed for encoding the content + tests := []struct { + name string + plainContent string + htmlContent string + wantContentTypes []string + wantPlain bool + wantHtml bool + wantErr bool + }{ + { + name: "should return encoded message content", + plainContent: "plain content", + htmlContent: `

html content

`, + wantContentTypes: []string{ + "Content-Type: multipart/alternative", + "Content-Type: text/plain", + "Content-Type: text/html", + }, + wantPlain: true, + wantHtml: true, + wantErr: false, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + m := &message{ + plainContent: tc.plainContent, + htmlContent: tc.htmlContent, + } + got, err := m.encodeContent() + gotTypes := true + for _, ct := range tc.wantContentTypes { + gotTypes = strings.Contains(got, ct) && gotTypes + } + assert.Equal(tt, tc.wantErr, err != nil) + assert.True(tt, gotTypes) + assert.Equal(tt, tc.wantPlain, strings.Contains(got, tc.plainContent)) + assert.Equal(tt, tc.wantHtml, strings.Contains(got, tc.htmlContent)) + }) + } +} + +func Test_message_encodeMessage(t *testing.T) { + tests := []struct { + name string + to []string + subject string + plainContent string + htmlContent string + wantTo bool + wantSubject bool + wantPlain bool + wantHtml bool + wantErr bool + }{ + { + name: "should return encoded message", + to: []string{"someone@email.com"}, + subject: "test", + plainContent: "plain content", + htmlContent: `

html content

`, + wantTo: true, + wantSubject: true, + wantPlain: true, + wantHtml: true, + wantErr: false, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + m := &message{ + to: []string{"someone@email.com"}, + subject: "test", + plainContent: tc.plainContent, + htmlContent: tc.htmlContent, + } + got, err := m.encodeMessage() + str := string(got) + assert.Equal(tt, tc.wantErr, err != nil) + assert.Equal(tt, tc.wantSubject, strings.Contains(str, tc.subject)) + assert.Equal(tt, tc.wantTo, strings.Contains(str, tc.to[0])) + assert.Equal(tt, tc.wantPlain, strings.Contains(str, tc.plainContent)) + assert.Equal(tt, tc.wantHtml, strings.Contains(str, tc.htmlContent)) + }) + } +} diff --git a/internal/infrastructure/memory/auth_request.go b/internal/infrastructure/memory/auth_request.go new file mode 100644 index 000000000..daabf8c64 --- /dev/null +++ b/internal/infrastructure/memory/auth_request.go @@ -0,0 +1,75 @@ +package memory + +import ( + "context" + "sync" + + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/auth" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type AuthRequest struct { + lock sync.Mutex + data map[id.AuthRequestID]auth.Request +} + +func NewAuthRequest() repo.AuthRequest { + return &AuthRequest{ + data: map[id.AuthRequestID]auth.Request{}, + } +} + +func (r *AuthRequest) FindByID(_ context.Context, id id.AuthRequestID) (*auth.Request, error) { + r.lock.Lock() + defer r.lock.Unlock() + + d, ok := r.data[id] + if ok { + return &d, nil + } + return &auth.Request{}, rerror.ErrNotFound +} + +func (r *AuthRequest) FindByCode(_ context.Context, s string) (*auth.Request, error) { + r.lock.Lock() + defer r.lock.Unlock() + + for _, ar := range r.data { + if ar.GetCode() == s { + return &ar, nil + } + } + + return &auth.Request{}, rerror.ErrNotFound +} + +func (r *AuthRequest) FindBySubject(_ context.Context, s string) (*auth.Request, error) { + r.lock.Lock() + defer r.lock.Unlock() + + for _, ar := range r.data { + if ar.GetSubject() == s { + return &ar, nil + } + } + + return &auth.Request{}, rerror.ErrNotFound +} + +func (r *AuthRequest) Save(_ context.Context, request *auth.Request) error { + r.lock.Lock() + defer r.lock.Unlock() + + r.data[request.ID()] = *request + return nil +} + +func (r *AuthRequest) Remove(_ context.Context, requestID id.AuthRequestID) error { + r.lock.Lock() + defer r.lock.Unlock() + + delete(r.data, requestID) + return nil +} diff --git a/internal/infrastructure/memory/user.go b/internal/infrastructure/memory/user.go index 76d4383f8..4024b1ad7 100644 --- a/internal/infrastructure/memory/user.go +++ b/internal/infrastructure/memory/user.go @@ -72,6 +72,24 @@ func (r *User) FindByAuth0Sub(ctx context.Context, auth0sub string) (*user.User, return nil, rerror.ErrNotFound } +func (r *User) FindByPasswordResetRequest(ctx context.Context, token string) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if token == "" { + return nil, rerror.ErrInvalidParams + } + + for _, u := range r.data { + pwdReq := u.PasswordReset() + if pwdReq != nil && pwdReq.Token == token { + return &u, nil + } + } + + return nil, rerror.ErrNotFound +} + func (r *User) FindByEmail(ctx context.Context, email string) (*user.User, error) { r.lock.Lock() defer r.lock.Unlock() @@ -113,3 +131,20 @@ func (r *User) Remove(ctx context.Context, user id.UserID) error { delete(r.data, user) return nil } + +func (r *User) FindByVerification(ctx context.Context, code string) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if code == "" { + return nil, rerror.ErrInvalidParams + } + + for _, u := range r.data { + if u.Verification() != nil && u.Verification().Code() == code { + return &u, nil + } + } + + return nil, rerror.ErrNotFound +} diff --git a/internal/infrastructure/mongo/auth_request.go b/internal/infrastructure/mongo/auth_request.go new file mode 100644 index 000000000..247e1f6e4 --- /dev/null +++ b/internal/infrastructure/mongo/auth_request.go @@ -0,0 +1,64 @@ +package mongo + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/auth" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "go.mongodb.org/mongo-driver/bson" +) + +type authRequestRepo struct { + client *mongodoc.ClientCollection +} + +func NewAuthRequest(client *mongodoc.Client) repo.AuthRequest { + r := &authRequestRepo{client: client.WithCollection("authRequest")} + r.init() + return r +} + +func (r *authRequestRepo) init() { + i := r.client.CreateIndex(context.Background(), []string{"code", "subject"}) + if len(i) > 0 { + log.Infof("mongo: %s: index created: %s", "authRequest", i) + } +} + +func (r *authRequestRepo) FindByID(ctx context.Context, id2 id.AuthRequestID) (*auth.Request, error) { + filter := bson.D{{Key: "id", Value: id2.String()}} + return r.findOne(ctx, filter) +} + +func (r *authRequestRepo) FindByCode(ctx context.Context, s string) (*auth.Request, error) { + filter := bson.D{{Key: "code", Value: s}} + return r.findOne(ctx, filter) +} + +func (r *authRequestRepo) FindBySubject(ctx context.Context, s string) (*auth.Request, error) { + filter := bson.D{{Key: "subject", Value: s}} + return r.findOne(ctx, filter) +} + +func (r *authRequestRepo) Save(ctx context.Context, request *auth.Request) error { + doc, id1 := mongodoc.NewAuthRequest(request) + return r.client.SaveOne(ctx, id1, doc) +} + +func (r *authRequestRepo) Remove(ctx context.Context, requestID id.AuthRequestID) error { + return r.client.RemoveOne(ctx, requestID.String()) +} + +func (r *authRequestRepo) findOne(ctx context.Context, filter bson.D) (*auth.Request, error) { + dst := make([]*auth.Request, 0, 1) + c := mongodoc.AuthRequestConsumer{ + Rows: dst, + } + if err := r.client.FindOne(ctx, filter, &c); err != nil { + return nil, err + } + return c.Rows[0], nil +} diff --git a/internal/infrastructure/mongo/container.go b/internal/infrastructure/mongo/container.go index 95b2251ce..2dcd699be 100644 --- a/internal/infrastructure/mongo/container.go +++ b/internal/infrastructure/mongo/container.go @@ -21,6 +21,7 @@ func InitRepos(ctx context.Context, c *repo.Container, mc *mongo.Client, databas client := mongodoc.NewClient(databaseName, mc) c.Asset = NewAsset(client) + c.AuthRequest = NewAuthRequest(client) c.Config = NewConfig(client, lock) c.DatasetSchema = NewDatasetSchema(client) c.Dataset = NewDataset(client) diff --git a/internal/infrastructure/mongo/mongodoc/auth_request.go b/internal/infrastructure/mongo/mongodoc/auth_request.go new file mode 100644 index 000000000..245e94c2d --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/auth_request.go @@ -0,0 +1,116 @@ +package mongodoc + +import ( + "time" + + "github.com/caos/oidc/pkg/oidc" + "github.com/reearth/reearth-backend/pkg/auth" + "github.com/reearth/reearth-backend/pkg/id" + "go.mongodb.org/mongo-driver/bson" +) + +type AuthRequestDocument struct { + ID string + ClientID string + Subject string + Code string + State string + ResponseType string + Scopes []string + Audiences []string + RedirectURI string + Nonce string + CodeChallenge *CodeChallengeDocument + CreatedAt time.Time + AuthorizedAt *time.Time +} + +type CodeChallengeDocument struct { + Challenge string + Method string +} + +type AuthRequestConsumer struct { + Rows []*auth.Request +} + +func (a *AuthRequestConsumer) Consume(raw bson.Raw) error { + if raw == nil { + return nil + } + + var doc AuthRequestDocument + if err := bson.Unmarshal(raw, &doc); err != nil { + return err + } + request, err := doc.Model() + if err != nil { + return err + } + a.Rows = append(a.Rows, request) + return nil +} + +func NewAuthRequest(req *auth.Request) (*AuthRequestDocument, string) { + if req == nil { + return nil, "" + } + reqID := req.GetID() + var cc *CodeChallengeDocument + if req.GetCodeChallenge() != nil { + cc = &CodeChallengeDocument{ + Challenge: req.GetCodeChallenge().Challenge, + Method: string(req.GetCodeChallenge().Method), + } + } + return &AuthRequestDocument{ + ID: reqID, + ClientID: req.GetClientID(), + Subject: req.GetSubject(), + Code: req.GetCode(), + State: req.GetState(), + ResponseType: string(req.GetResponseType()), + Scopes: req.GetScopes(), + Audiences: req.GetAudience(), + RedirectURI: req.GetRedirectURI(), + Nonce: req.GetNonce(), + CodeChallenge: cc, + CreatedAt: req.CreatedAt(), + AuthorizedAt: req.AuthorizedAt(), + }, reqID +} + +func (d *AuthRequestDocument) Model() (*auth.Request, error) { + if d == nil { + return nil, nil + } + + ulid, err := id.AuthRequestIDFrom(d.ID) + if err != nil { + return nil, err + } + + var cc *oidc.CodeChallenge + if d.CodeChallenge != nil { + cc = &oidc.CodeChallenge{ + Challenge: d.CodeChallenge.Challenge, + Method: oidc.CodeChallengeMethod(d.CodeChallenge.Method), + } + } + var req = auth.NewRequest(). + ID(ulid). + ClientID(d.ClientID). + Subject(d.Subject). + Code(d.Code). + State(d.State). + ResponseType(oidc.ResponseType(d.ResponseType)). + Scopes(d.Scopes). + Audiences(d.Audiences). + RedirectURI(d.RedirectURI). + Nonce(d.Nonce). + CodeChallenge(cc). + CreatedAt(d.CreatedAt). + AuthorizedAt(d.AuthorizedAt). + MustBuild() + return req, nil +} diff --git a/internal/infrastructure/mongo/mongodoc/config.go b/internal/infrastructure/mongo/mongodoc/config.go index b54649013..d20ca1288 100644 --- a/internal/infrastructure/mongo/mongodoc/config.go +++ b/internal/infrastructure/mongo/mongodoc/config.go @@ -4,19 +4,39 @@ import "github.com/reearth/reearth-backend/pkg/config" type ConfigDocument struct { Migration int64 + Auth *Auth +} + +type Auth struct { + Cert string + Key string } func NewConfig(c config.Config) ConfigDocument { - return ConfigDocument{ + d := ConfigDocument{ Migration: c.Migration, } + if c.Auth != nil { + d.Auth = &Auth{ + Cert: c.Auth.Cert, + Key: c.Auth.Key, + } + } + return d } func (c *ConfigDocument) Model() *config.Config { if c == nil { return &config.Config{} } - return &config.Config{ + m := &config.Config{ Migration: c.Migration, } + if c.Auth != nil { + m.Auth = &config.Auth{ + Cert: c.Auth.Cert, + Key: c.Auth.Key, + } + } + return m } diff --git a/internal/infrastructure/mongo/mongodoc/user.go b/internal/infrastructure/mongo/mongodoc/user.go index de3030c23..2f52da3e5 100644 --- a/internal/infrastructure/mongo/mongodoc/user.go +++ b/internal/infrastructure/mongo/mongodoc/user.go @@ -1,6 +1,8 @@ package mongodoc import ( + "time" + "go.mongodb.org/mongo-driver/bson" "github.com/reearth/reearth-backend/pkg/id" @@ -8,15 +10,29 @@ import ( user1 "github.com/reearth/reearth-backend/pkg/user" ) +type PasswordResetDocument struct { + Token string + CreatedAt time.Time +} + type UserDocument struct { - ID string - Name string - Email string - Auth0Sub string - Auth0SubList []string - Team string - Lang string - Theme string + ID string + Name string + Email string + Auth0Sub string + Auth0SubList []string + Team string + Lang string + Theme string + Password []byte + PasswordReset *PasswordResetDocument + Verification *UserVerificationDoc +} + +type UserVerificationDoc struct { + Code string + Expiration time.Time + Verified bool } type UserConsumer struct { @@ -47,15 +63,35 @@ func NewUser(user *user1.User) (*UserDocument, string) { for _, a := range auths { authsdoc = append(authsdoc, a.Sub) } + var v *UserVerificationDoc + if user.Verification() != nil { + v = &UserVerificationDoc{ + Code: user.Verification().Code(), + Expiration: user.Verification().Expiration(), + Verified: user.Verification().IsVerified(), + } + } + pwdReset := user.PasswordReset() + + var pwdResetDoc *PasswordResetDocument + if pwdReset != nil { + pwdResetDoc = &PasswordResetDocument{ + Token: pwdReset.Token, + CreatedAt: pwdReset.CreatedAt, + } + } return &UserDocument{ - ID: id, - Name: user.Name(), - Email: user.Email(), - Auth0SubList: authsdoc, - Team: user.Team().String(), - Lang: user.Lang().String(), - Theme: string(user.Theme()), + ID: id, + Name: user.Name(), + Email: user.Email(), + Auth0SubList: authsdoc, + Team: user.Team().String(), + Lang: user.Lang().String(), + Theme: string(user.Theme()), + Verification: v, + Password: user.Password(), + PasswordReset: pwdResetDoc, }, id } @@ -75,17 +111,36 @@ func (d *UserDocument) Model() (*user1.User, error) { if d.Auth0Sub != "" { auths = append(auths, user.AuthFromAuth0Sub(d.Auth0Sub)) } - user, err := user1.New(). + var v *user.Verification + if d.Verification != nil { + v = user.VerificationFrom(d.Verification.Code, d.Verification.Expiration, d.Verification.Verified) + } + + u, err := user1.New(). ID(uid). Name(d.Name). Email(d.Email). Auths(auths). Team(tid). LangFrom(d.Lang). + Verification(v). + Password(d.Password). + PasswordReset(d.PasswordReset.Model()). Theme(user.Theme(d.Theme)). Build() + if err != nil { return nil, err } - return user, nil + return u, nil +} + +func (d *PasswordResetDocument) Model() *user1.PasswordReset { + if d == nil { + return nil + } + return &user1.PasswordReset{ + Token: d.Token, + CreatedAt: d.CreatedAt, + } } diff --git a/internal/infrastructure/mongo/user.go b/internal/infrastructure/mongo/user.go index d553cb86d..b5b542e6e 100644 --- a/internal/infrastructure/mongo/user.go +++ b/internal/infrastructure/mongo/user.go @@ -73,6 +73,18 @@ func (r *userRepo) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (* return r.findOne(ctx, filter) } +func (r *userRepo) FindByVerification(ctx context.Context, code string) (*user.User, error) { + filter := bson.D{{Key: "verification.code", Value: code}} + return r.findOne(ctx, filter) +} + +func (r *userRepo) FindByPasswordResetRequest(ctx context.Context, pwdResetToken string) (*user.User, error) { + filter := bson.D{ + {Key: "passwordreset.token", Value: pwdResetToken}, + } + return r.findOne(ctx, filter) +} + func (r *userRepo) Save(ctx context.Context, user *user.User) error { doc, id := mongodoc.NewUser(user) return r.client.SaveOne(ctx, id, doc) diff --git a/internal/usecase/gateway/mailer.go b/internal/usecase/gateway/mailer.go index 27f530855..3784d29fc 100644 --- a/internal/usecase/gateway/mailer.go +++ b/internal/usecase/gateway/mailer.go @@ -1,5 +1,10 @@ package gateway +type Contact struct { + Email string + Name string +} + type Mailer interface { - SendMail(to, content string) error + SendMail(toContacts []Contact, subject, plainContent, htmlContent string) error } diff --git a/internal/usecase/interactor/auth.go b/internal/usecase/interactor/auth.go new file mode 100644 index 000000000..5cc6d8306 --- /dev/null +++ b/internal/usecase/interactor/auth.go @@ -0,0 +1,403 @@ +package interactor + +import ( + "context" + "crypto/rand" + "crypto/rsa" + "crypto/x509" + "crypto/x509/pkix" + "encoding/pem" + "errors" + "fmt" + "math/big" + "time" + + "github.com/caos/oidc/pkg/oidc" + "github.com/caos/oidc/pkg/op" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/auth" + config2 "github.com/reearth/reearth-backend/pkg/config" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/user" + "gopkg.in/square/go-jose.v2" +) + +type AuthStorage struct { + appConfig *StorageConfig + getUserBySubject func(context.Context, string) (*user.User, error) + clients map[string]op.Client + requests repo.AuthRequest + keySet jose.JSONWebKeySet + key *rsa.PrivateKey + sigKey jose.SigningKey +} + +type StorageConfig struct { + Domain string `default:"http://localhost:8080"` + Debug bool + DN *AuthDNConfig +} + +type AuthDNConfig struct { + CommonName string + Organization []string + OrganizationalUnit []string + Country []string + Province []string + Locality []string + StreetAddress []string + PostalCode []string +} + +var dummyName = pkix.Name{ + CommonName: "Dummy company, INC.", + Organization: []string{"Dummy company, INC."}, + OrganizationalUnit: []string{"Dummy OU"}, + Country: []string{"US"}, + Province: []string{"Dummy"}, + Locality: []string{"Dummy locality"}, + StreetAddress: []string{"Dummy street"}, + PostalCode: []string{"1"}, +} + +func NewAuthStorage(ctx context.Context, cfg *StorageConfig, request repo.AuthRequest, config repo.Config, getUserBySubject func(context.Context, string) (*user.User, error)) (op.Storage, error) { + + client := auth.NewLocalClient(cfg.Debug) + + name := dummyName + if cfg.DN != nil { + name = pkix.Name{ + CommonName: cfg.DN.CommonName, + Organization: cfg.DN.Organization, + OrganizationalUnit: cfg.DN.OrganizationalUnit, + Country: cfg.DN.Country, + Province: cfg.DN.Province, + Locality: cfg.DN.Locality, + StreetAddress: cfg.DN.StreetAddress, + PostalCode: cfg.DN.PostalCode, + } + } + c, err := config.LockAndLoad(ctx) + if err != nil { + return nil, fmt.Errorf("Could not load auth config: %w\n", err) + } + defer func() { + if err := config.Unlock(ctx); err != nil { + log.Errorf("auth: Could not release config lock: %s\n", err) + } + }() + + var keyBytes, certBytes []byte + if c.Auth != nil { + keyBytes = []byte(c.Auth.Key) + certBytes = []byte(c.Auth.Cert) + } else { + keyBytes, certBytes, err = generateCert(name) + if err != nil { + return nil, fmt.Errorf("Could not generate raw cert: %w\n", err) + } + c.Auth = &config2.Auth{ + Key: string(keyBytes), + Cert: string(certBytes), + } + + if err := config.Save(ctx, c); err != nil { + return nil, fmt.Errorf("Could not save raw cert: %w\n", err) + } + } + + key, sigKey, keySet, err := initKeys(keyBytes, certBytes) + if err != nil { + return nil, fmt.Errorf("Fail to init keys: %w\n", err) + } + + return &AuthStorage{ + appConfig: cfg, + getUserBySubject: getUserBySubject, + requests: request, + key: key, + sigKey: *sigKey, + keySet: *keySet, + clients: map[string]op.Client{ + client.GetID(): client, + }, + }, nil +} + +func initKeys(keyBytes, certBytes []byte) (*rsa.PrivateKey, *jose.SigningKey, *jose.JSONWebKeySet, error) { + + block, _ := pem.Decode(keyBytes) + if block == nil { + return nil, nil, nil, fmt.Errorf("failed to decode the key bytes") + } + + key, err := x509.ParsePKCS1PrivateKey(block.Bytes) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to parse the private key bytes: %w\n", err) + } + + cert, err := x509.ParseCertificate(certBytes) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to parse the cert bytes: %w\n", err) + } + + keyID := "RE01" + sk := jose.SigningKey{ + Algorithm: jose.RS256, + Key: jose.JSONWebKey{Key: key, Use: "sig", Algorithm: string(jose.RS256), KeyID: keyID, Certificates: []*x509.Certificate{cert}}, + } + + return key, &sk, &jose.JSONWebKeySet{ + Keys: []jose.JSONWebKey{ + {Key: key.Public(), Use: "sig", Algorithm: string(jose.RS256), KeyID: keyID, Certificates: []*x509.Certificate{cert}}, + }, + }, nil +} + +func generateCert(name pkix.Name) (keyPem, certPem []byte, err error) { + key, err := rsa.GenerateKey(rand.Reader, 2048) + if err != nil { + err = fmt.Errorf("failed to generate key: %w\n", err) + return + } + + keyPem = pem.EncodeToMemory(&pem.Block{ + Type: "RSA PRIVATE KEY", + Bytes: x509.MarshalPKCS1PrivateKey(key), + }) + + cert := &x509.Certificate{ + SerialNumber: big.NewInt(1), + Subject: name, + NotBefore: time.Now(), + NotAfter: time.Now().AddDate(100, 0, 0), + IsCA: true, + KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign | x509.KeyUsageCRLSign, + } + + certPem, err = x509.CreateCertificate(rand.Reader, cert, cert, key.Public(), key) + if err != nil { + err = fmt.Errorf("failed to create the cert: %w\n", err) + } + + return +} + +func (s *AuthStorage) Health(_ context.Context) error { + return nil +} + +func (s *AuthStorage) CreateAuthRequest(ctx context.Context, authReq *oidc.AuthRequest, _ string) (op.AuthRequest, error) { + audiences := []string{ + s.appConfig.Domain, + } + if s.appConfig.Debug { + audiences = append(audiences, "http://localhost:8080") + } + + var cc *oidc.CodeChallenge + if authReq.CodeChallenge != "" { + cc = &oidc.CodeChallenge{ + Challenge: authReq.CodeChallenge, + Method: authReq.CodeChallengeMethod, + } + } + var request = auth.NewRequest(). + NewID(). + ClientID(authReq.ClientID). + State(authReq.State). + ResponseType(authReq.ResponseType). + Scopes(authReq.Scopes). + Audiences(audiences). + RedirectURI(authReq.RedirectURI). + Nonce(authReq.Nonce). + CodeChallenge(cc). + CreatedAt(time.Now().UTC()). + AuthorizedAt(nil). + MustBuild() + + if err := s.requests.Save(ctx, request); err != nil { + return nil, err + } + return request, nil +} + +func (s *AuthStorage) AuthRequestByID(ctx context.Context, requestID string) (op.AuthRequest, error) { + if requestID == "" { + return nil, errors.New("invalid id") + } + reqId, err := id.AuthRequestIDFrom(requestID) + if err != nil { + return nil, err + } + request, err := s.requests.FindByID(ctx, reqId) + if err != nil { + return nil, err + } + return request, nil +} + +func (s *AuthStorage) AuthRequestByCode(ctx context.Context, code string) (op.AuthRequest, error) { + if code == "" { + return nil, errors.New("invalid code") + } + return s.requests.FindByCode(ctx, code) +} + +func (s *AuthStorage) AuthRequestBySubject(ctx context.Context, subject string) (op.AuthRequest, error) { + if subject == "" { + return nil, errors.New("invalid subject") + } + + return s.requests.FindBySubject(ctx, subject) +} + +func (s *AuthStorage) SaveAuthCode(ctx context.Context, requestID, code string) error { + + request, err := s.AuthRequestByID(ctx, requestID) + if err != nil { + return err + } + request2 := request.(*auth.Request) + request2.SetCode(code) + err = s.updateRequest(ctx, requestID, *request2) + return err +} + +func (s *AuthStorage) DeleteAuthRequest(_ context.Context, requestID string) error { + delete(s.clients, requestID) + return nil +} + +func (s *AuthStorage) CreateAccessToken(_ context.Context, _ op.TokenRequest) (string, time.Time, error) { + return "id", time.Now().UTC().Add(5 * time.Hour), nil +} + +func (s *AuthStorage) CreateAccessAndRefreshTokens(_ context.Context, request op.TokenRequest, _ string) (accessTokenID string, newRefreshToken string, expiration time.Time, err error) { + authReq := request.(*auth.Request) + return "id", authReq.GetID(), time.Now().UTC().Add(5 * time.Minute), nil +} + +func (s *AuthStorage) TokenRequestByRefreshToken(ctx context.Context, refreshToken string) (op.RefreshTokenRequest, error) { + r, err := s.AuthRequestByID(ctx, refreshToken) + if err != nil { + return nil, err + } + return r.(op.RefreshTokenRequest), err +} + +func (s *AuthStorage) TerminateSession(_ context.Context, _, _ string) error { + return errors.New("not implemented") +} + +func (s *AuthStorage) GetSigningKey(_ context.Context, keyCh chan<- jose.SigningKey) { + keyCh <- s.sigKey +} + +func (s *AuthStorage) GetKeySet(_ context.Context) (*jose.JSONWebKeySet, error) { + return &s.keySet, nil +} + +func (s *AuthStorage) GetKeyByIDAndUserID(_ context.Context, kid, _ string) (*jose.JSONWebKey, error) { + return &s.keySet.Key(kid)[0], nil +} + +func (s *AuthStorage) GetClientByClientID(_ context.Context, clientID string) (op.Client, error) { + + if clientID == "" { + return nil, errors.New("invalid client id") + } + + client, exists := s.clients[clientID] + if !exists { + return nil, errors.New("not found") + } + + return client, nil +} + +func (s *AuthStorage) AuthorizeClientIDSecret(_ context.Context, _ string, _ string) error { + return nil +} + +func (s *AuthStorage) SetUserinfoFromToken(ctx context.Context, userinfo oidc.UserInfoSetter, _, _, _ string) error { + return s.SetUserinfoFromScopes(ctx, userinfo, "", "", []string{}) +} + +func (s *AuthStorage) SetUserinfoFromScopes(ctx context.Context, userinfo oidc.UserInfoSetter, subject, _ string, _ []string) error { + + request, err := s.AuthRequestBySubject(ctx, subject) + if err != nil { + return err + } + + u, err := s.getUserBySubject(ctx, subject) + if err != nil { + return err + } + + userinfo.SetSubject(request.GetSubject()) + userinfo.SetEmail(u.Email(), true) + userinfo.SetName(u.Name()) + userinfo.AppendClaims("lang", u.Lang()) + userinfo.AppendClaims("theme", u.Theme()) + + return nil +} + +func (s *AuthStorage) GetPrivateClaimsFromScopes(_ context.Context, _, _ string, _ []string) (map[string]interface{}, error) { + return map[string]interface{}{"private_claim": "test"}, nil +} + +func (s *AuthStorage) SetIntrospectionFromToken(ctx context.Context, introspect oidc.IntrospectionResponse, _, subject, clientID string) error { + if err := s.SetUserinfoFromScopes(ctx, introspect, subject, clientID, []string{}); err != nil { + return err + } + request, err := s.AuthRequestBySubject(ctx, subject) + if err != nil { + return err + } + introspect.SetClientID(request.GetClientID()) + return nil +} + +func (s *AuthStorage) ValidateJWTProfileScopes(_ context.Context, _ string, scope []string) ([]string, error) { + return scope, nil +} + +func (s *AuthStorage) RevokeToken(_ context.Context, _ string, _ string, _ string) *oidc.Error { + // TODO implement me + panic("implement me") +} + +func (s *AuthStorage) CompleteAuthRequest(ctx context.Context, requestId, sub string) error { + request, err := s.AuthRequestByID(ctx, requestId) + if err != nil { + return err + } + req := request.(*auth.Request) + req.Complete(sub) + err = s.updateRequest(ctx, requestId, *req) + return err +} + +func (s *AuthStorage) updateRequest(ctx context.Context, requestID string, req auth.Request) error { + if requestID == "" { + return errors.New("invalid id") + } + reqId, err := id.AuthRequestIDFrom(requestID) + if err != nil { + return err + } + + if _, err := s.requests.FindByID(ctx, reqId); err != nil { + return err + } + + if err := s.requests.Save(ctx, &req); err != nil { + return err + } + + return nil +} diff --git a/internal/usecase/interactor/common.go b/internal/usecase/interactor/common.go index b3d0bb11f..dcd3039ec 100644 --- a/internal/usecase/interactor/common.go +++ b/internal/usecase/interactor/common.go @@ -18,6 +18,7 @@ import ( type ContainerConfig struct { SignupSecret string + AuthSrvUIDomain string PublishedIndexHTML string PublishedIndexURL *url.URL } @@ -41,7 +42,7 @@ func NewContainer(r *repo.Container, g *gateway.Container, config ContainerConfi Scene: NewScene(r, g), Tag: NewTag(r), Team: NewTeam(r), - User: NewUser(r, g, config.SignupSecret), + User: NewUser(r, g, config.SignupSecret, config.AuthSrvUIDomain), } } diff --git a/internal/usecase/interactor/emails/auth_html.tmpl b/internal/usecase/interactor/emails/auth_html.tmpl new file mode 100644 index 000000000..9d1d1e3ac --- /dev/null +++ b/internal/usecase/interactor/emails/auth_html.tmpl @@ -0,0 +1,435 @@ + + + + + + + Re:Earth reset password + + + + + + + + + + + + + \ No newline at end of file diff --git a/internal/usecase/interactor/emails/auth_text.tmpl b/internal/usecase/interactor/emails/auth_text.tmpl new file mode 100644 index 000000000..0ed590d5b --- /dev/null +++ b/internal/usecase/interactor/emails/auth_text.tmpl @@ -0,0 +1,7 @@ +Hi {{ .UserName }}: +{{ .Message }} + +To {{ .ActionLabel }}: +{{ .ActionURL }} + +{{ .Suffix }} \ No newline at end of file diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index 08d6ef6bd..138757981 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -1,14 +1,20 @@ package interactor import ( + "bytes" "context" + _ "embed" "errors" + htmlTmpl "html/template" + "net/mail" + textTmpl "text/template" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/pkg/project" "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/user" @@ -28,10 +34,57 @@ type User struct { transaction repo.Transaction file gateway.File authenticator gateway.Authenticator + mailer gateway.Mailer signupSecret string + authSrvUIDomain string } -func NewUser(r *repo.Container, g *gateway.Container, signupSecret string) interfaces.User { +type mailContent struct { + UserName string + Message string + Suffix string + ActionLabel string + ActionURL htmlTmpl.URL +} + +var ( + //go:embed emails/auth_html.tmpl + autHTMLTMPLStr string + //go:embed emails/auth_text.tmpl + authTextTMPLStr string + + authTextTMPL *textTmpl.Template + authHTMLTMPL *htmlTmpl.Template + + signupMailContent mailContent + passwordResetMailContent mailContent +) + +func init() { + var err error + authTextTMPL, err = textTmpl.New("passwordReset").Parse(authTextTMPLStr) + if err != nil { + log.Panicf("password reset email template parse error: %s\n", err) + } + authHTMLTMPL, err = htmlTmpl.New("passwordReset").Parse(autHTMLTMPLStr) + if err != nil { + log.Panicf("password reset email template parse error: %s\n", err) + } + + signupMailContent = mailContent{ + Message: "Thank you for signing up to Re:Earth. Please verify your email address by clicking the button below.", + Suffix: "You can use this email address to log in to Re:Earth account anytime.", + ActionLabel: "Activate your account and log in", + } + + passwordResetMailContent = mailContent{ + Message: "Thank you for using Re:Earth. Weโ€™ve received a request to reset your password. If this was you, please click the link below to confirm and change your password.", + Suffix: "If you did not mean to reset your password, then you can ignore this email.", + ActionLabel: "Confirm to reset your password", + } +} + +func NewUser(r *repo.Container, g *gateway.Container, signupSecret, authSrcUIDomain string) interfaces.User { return &User{ userRepo: r.User, teamRepo: r.Team, @@ -46,6 +99,8 @@ func NewUser(r *repo.Container, g *gateway.Container, signupSecret string) inter file: g.File, authenticator: g.Authenticator, signupSecret: signupSecret, + authSrvUIDomain: authSrcUIDomain, + mailer: g.Mailer, } } @@ -77,17 +132,19 @@ func (i *User) Fetch(ctx context.Context, ids []id.UserID, operator *usecase.Ope } func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user.User, _ *user.Team, err error) { - if i.signupSecret != "" && inp.Secret != i.signupSecret { - return nil, nil, interfaces.ErrSignupInvalidSecret - } - - if len(inp.Sub) == 0 { - return nil, nil, errors.New("sub is required") + var team *user.Team + var email, name string + var auth *user.Auth + var tx repo.Tx + isOidc := inp.Secret != nil && inp.Sub != nil + isAuth := inp.Name != nil && inp.Email != nil && inp.Password != nil + if !isAuth && !isOidc { + return } - tx, err := i.transaction.Begin() + tx, err = i.transaction.Begin() if err != nil { - return + return nil, nil, err } defer func() { if err2 := tx.End(ctx); err == nil && err2 != nil { @@ -95,25 +152,43 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. } }() - // Check if user and team already exists - existed, err := i.userRepo.FindByAuth0Sub(ctx, inp.Sub) - if err != nil && !errors.Is(err, rerror.ErrNotFound) { - return nil, nil, err - } - if existed != nil { - return nil, nil, errors.New("existed user") - } + if isOidc { + // Auth0 + if i.signupSecret != "" && *inp.Secret != i.signupSecret { + return nil, nil, interfaces.ErrSignupInvalidSecret + } - if inp.UserID != nil { - existed, err := i.userRepo.FindByID(ctx, *inp.UserID) - if err != nil && !errors.Is(err, rerror.ErrNotFound) { - return nil, nil, err + if len(*inp.Sub) == 0 { + return nil, nil, errors.New("sub is required") } - if existed != nil { - return nil, nil, errors.New("existed user") + name, email, auth, err = i.oidcSignup(ctx, inp) + if err != nil { + return + } + + } else if isAuth { + if *inp.Name == "" { + return nil, nil, interfaces.ErrSignupInvalidName + } + if _, err := mail.ParseAddress(*inp.Email); err != nil { + return nil, nil, interfaces.ErrInvalidUserEmail + } + if *inp.Password == "" { + return nil, nil, interfaces.ErrSignupInvalidPassword + } + + var unverifiedUser *user.User + var unverifiedTeam *user.Team + name, email, unverifiedUser, unverifiedTeam, err = i.reearthSignup(ctx, inp) + if err != nil { + return + } + if unverifiedUser != nil && unverifiedTeam != nil { + return unverifiedUser, unverifiedTeam, nil } } + // Check if team already exists if inp.TeamID != nil { existed, err := i.teamRepo.FindByID(ctx, *inp.TeamID) if err != nil && !errors.Is(err, rerror.ErrNotFound) { @@ -124,27 +199,12 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. } } - // Fetch user info - ui, err := i.authenticator.FetchUser(inp.Sub) - if err != nil { - return nil, nil, err - } - - // Check if user and team already exists - var team *user.Team - existed, err = i.userRepo.FindByEmail(ctx, ui.Email) - if err != nil && !errors.Is(err, rerror.ErrNotFound) { - return nil, nil, err - } - if existed != nil { - return nil, nil, errors.New("existed user") - } - // Initialize user and team u, team, err = user.Init(user.InitParams{ - Email: ui.Email, - Name: ui.Name, - Auth0Sub: inp.Sub, + Email: email, + Name: name, + Sub: auth, + Password: *inp.Password, Lang: inp.Lang, Theme: inp.Theme, UserID: inp.UserID, @@ -159,11 +219,189 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. if err := i.teamRepo.Save(ctx, team); err != nil { return nil, nil, err } + if tx != nil { + tx.Commit() + } - tx.Commit() return u, team, nil } +func (i *User) reearthSignup(ctx context.Context, inp interfaces.SignupParam) (string, string, *user.User, *user.Team, error) { + // Check if user email already exists + existed, err := i.userRepo.FindByEmail(ctx, *inp.Email) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return "", "", nil, nil, err + } + + if existed != nil { + if existed.Verification().IsVerified() { + return "", "", nil, nil, errors.New("existed user email") + } else { + // if user exists but not verified -> create a new verification + if err := i.CreateVerification(ctx, *inp.Email); err != nil { + return "", "", nil, nil, err + } else { + team, err := i.teamRepo.FindByID(ctx, existed.Team()) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return "", "", nil, nil, err + } + return "", "", existed, team, nil + } + } + } + + return *inp.Name, *inp.Email, nil, nil, nil +} + +func (i *User) oidcSignup(ctx context.Context, inp interfaces.SignupParam) (string, string, *user.Auth, error) { + // Check if user already exists + existed, err := i.userRepo.FindByAuth0Sub(ctx, *inp.Sub) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return "", "", nil, err + } + if existed != nil { + return "", "", nil, errors.New("existed user") + } + + if inp.UserID != nil { + existed, err := i.userRepo.FindByID(ctx, *inp.UserID) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return "", "", nil, err + } + if existed != nil { + return "", "", nil, errors.New("existed user") + } + } + + // Fetch user info + ui, err := i.authenticator.FetchUser(*inp.Sub) + if err != nil { + return "", "", nil, err + } + + // Check if user and team already exists + existed, err = i.userRepo.FindByEmail(ctx, ui.Email) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return "", "", nil, err + } + if existed != nil { + return "", "", nil, errors.New("existed user") + } + + return ui.Name, ui.Email, user.AuthFromAuth0Sub(*inp.Sub).Ref(), nil +} + +func (i *User) GetUserByCredentials(ctx context.Context, inp interfaces.GetUserByCredentials) (u *user.User, err error) { + u, err = i.userRepo.FindByNameOrEmail(ctx, inp.Email) + if err != nil && !errors.Is(rerror.ErrNotFound, err) { + return nil, err + } else if u == nil { + return nil, interfaces.ErrInvalidUserEmail + } + matched, err := u.MatchPassword(inp.Password) + if err != nil { + return nil, err + } + if !matched { + return nil, interfaces.ErrSignupInvalidPassword + } + return u, nil +} + +func (i *User) GetUserBySubject(ctx context.Context, sub string) (u *user.User, err error) { + u, err = i.userRepo.FindByAuth0Sub(ctx, sub) + if err != nil { + return nil, err + } + return u, nil +} + +func (i *User) StartPasswordReset(ctx context.Context, email string) error { + tx, err := i.transaction.Begin() + if err != nil { + return err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + u, err := i.userRepo.FindByEmail(ctx, email) + if err != nil { + return err + } + + pr := user.NewPasswordReset() + u.SetPasswordReset(pr) + + if err := i.userRepo.Save(ctx, u); err != nil { + return err + } + + var TextOut, HTMLOut bytes.Buffer + link := i.authSrvUIDomain + "/?pwd-reset-token=" + pr.Token + passwordResetMailContent.UserName = u.Name() + passwordResetMailContent.ActionURL = htmlTmpl.URL(link) + + if err := authTextTMPL.Execute(&TextOut, passwordResetMailContent); err != nil { + return err + } + if err := authHTMLTMPL.Execute(&HTMLOut, passwordResetMailContent); err != nil { + return err + } + + err = i.mailer.SendMail([]gateway.Contact{ + { + Email: u.Email(), + Name: u.Name(), + }, + }, "Password reset", TextOut.String(), HTMLOut.String()) + if err != nil { + return err + } + + tx.Commit() + return nil +} + +func (i *User) PasswordReset(ctx context.Context, password, token string) error { + tx, err := i.transaction.Begin() + if err != nil { + return err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + u, err := i.userRepo.FindByPasswordResetRequest(ctx, token) + if err != nil { + return err + } + + passwordReset := u.PasswordReset() + ok := passwordReset.Validate(token) + + if !ok { + return interfaces.ErrUserInvalidPasswordReset + } + + u.SetPasswordReset(nil) + + if err := u.SetPassword(password); err != nil { + return err + } + + if err := i.userRepo.Save(ctx, u); err != nil { + return err + } + + tx.Commit() + return nil +} + func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operator *usecase.Operator) (u *user.User, err error) { if err := i.OnlyOperator(operator); err != nil { return nil, err @@ -375,3 +613,67 @@ func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase tx.Commit() return nil } + +func (i *User) CreateVerification(ctx context.Context, email string) error { + tx, err := i.transaction.Begin() + if err != nil { + return err + } + u, err := i.userRepo.FindByEmail(ctx, email) + if err != nil { + return err + } + + vr := user.NewVerification() + u.SetVerification(vr) + err = i.userRepo.Save(ctx, u) + if err != nil { + return err + } + + var TextOut, HTMLOut bytes.Buffer + link := i.authSrvUIDomain + "/?user-verification-token=" + vr.Code() + signupMailContent.UserName = email + signupMailContent.ActionURL = htmlTmpl.URL(link) + + if err := authTextTMPL.Execute(&TextOut, signupMailContent); err != nil { + return err + } + if err := authHTMLTMPL.Execute(&HTMLOut, signupMailContent); err != nil { + return err + } + + err = i.mailer.SendMail([]gateway.Contact{ + { + Email: u.Email(), + Name: u.Name(), + }, + }, "email verification", TextOut.String(), HTMLOut.String()) + if err != nil { + return err + } + tx.Commit() + return nil +} + +func (i *User) VerifyUser(ctx context.Context, code string) (*user.User, error) { + tx, err := i.transaction.Begin() + if err != nil { + return nil, err + } + u, err := i.userRepo.FindByVerification(ctx, code) + if err != nil { + return nil, err + } + if u.Verification().IsExpired() { + return nil, errors.New("verification expired") + } + u.Verification().SetVerified(true) + err = i.userRepo.Save(ctx, u) + if err != nil { + return nil, err + } + + tx.Commit() + return u, nil +} diff --git a/internal/usecase/interfaces/user.go b/internal/usecase/interfaces/user.go index c933d8961..acca0bb50 100644 --- a/internal/usecase/interfaces/user.go +++ b/internal/usecase/interfaces/user.go @@ -13,17 +13,29 @@ import ( var ( ErrUserInvalidPasswordConfirmation = errors.New("invalid password confirmation") + ErrUserInvalidPasswordReset = errors.New("invalid password reset request") ErrUserInvalidLang = errors.New("invalid lang") ErrSignupInvalidSecret = errors.New("invalid secret") + ErrSignupInvalidName = errors.New("invalid name") + ErrInvalidUserEmail = errors.New("invalid email") + ErrSignupInvalidPassword = errors.New("invalid password") ) type SignupParam struct { - Sub string - Lang *language.Tag - Theme *user.Theme - UserID *id.UserID - TeamID *id.TeamID - Secret string + Sub *string + UserID *id.UserID + Secret *string + Name *string + Email *string + Password *string + Lang *language.Tag + Theme *user.Theme + TeamID *id.TeamID +} + +type GetUserByCredentials struct { + Email string + Password string } type UpdateMeParam struct { @@ -38,6 +50,12 @@ type UpdateMeParam struct { type User interface { Fetch(context.Context, []id.UserID, *usecase.Operator) ([]*user.User, error) Signup(context.Context, SignupParam) (*user.User, *user.Team, error) + CreateVerification(context.Context, string) error + VerifyUser(context.Context, string) (*user.User, error) + GetUserByCredentials(context.Context, GetUserByCredentials) (*user.User, error) + GetUserBySubject(context.Context, string) (*user.User, error) + StartPasswordReset(context.Context, string) error + PasswordReset(context.Context, string, string) error UpdateMe(context.Context, UpdateMeParam, *usecase.Operator) (*user.User, error) RemoveMyAuth(context.Context, string, *usecase.Operator) (*user.User, error) SearchUser(context.Context, string, *usecase.Operator) (*user.User, error) diff --git a/internal/usecase/repo/auth_request.go b/internal/usecase/repo/auth_request.go new file mode 100644 index 000000000..378926bb2 --- /dev/null +++ b/internal/usecase/repo/auth_request.go @@ -0,0 +1,16 @@ +package repo + +import ( + "context" + + "github.com/reearth/reearth-backend/pkg/auth" + "github.com/reearth/reearth-backend/pkg/id" +) + +type AuthRequest interface { + FindByID(context.Context, id.AuthRequestID) (*auth.Request, error) + FindByCode(context.Context, string) (*auth.Request, error) + FindBySubject(context.Context, string) (*auth.Request, error) + Save(context.Context, *auth.Request) error + Remove(context.Context, id.AuthRequestID) error +} diff --git a/internal/usecase/repo/container.go b/internal/usecase/repo/container.go index 39ea3717d..e29c96baf 100644 --- a/internal/usecase/repo/container.go +++ b/internal/usecase/repo/container.go @@ -2,6 +2,7 @@ package repo type Container struct { Asset Asset + AuthRequest AuthRequest Config Config DatasetSchema DatasetSchema Dataset Dataset diff --git a/internal/usecase/repo/user.go b/internal/usecase/repo/user.go index 2b4152785..fdc3f2769 100644 --- a/internal/usecase/repo/user.go +++ b/internal/usecase/repo/user.go @@ -13,6 +13,8 @@ type User interface { FindByAuth0Sub(context.Context, string) (*user.User, error) FindByEmail(context.Context, string) (*user.User, error) FindByNameOrEmail(context.Context, string) (*user.User, error) + FindByVerification(context.Context, string) (*user.User, error) + FindByPasswordResetRequest(context.Context, string) (*user.User, error) Save(context.Context, *user.User) error Remove(context.Context, id.UserID) error } diff --git a/pkg/auth/builder.go b/pkg/auth/builder.go new file mode 100644 index 000000000..9eed6e642 --- /dev/null +++ b/pkg/auth/builder.go @@ -0,0 +1,102 @@ +package auth + +import ( + "time" + + "github.com/caos/oidc/pkg/oidc" + "github.com/reearth/reearth-backend/pkg/id" +) + +type RequestBuilder struct { + r *Request +} + +func NewRequest() *RequestBuilder { + return &RequestBuilder{r: &Request{}} +} + +func (b *RequestBuilder) Build() (*Request, error) { + if id.ID(b.r.id).IsNil() { + return nil, id.ErrInvalidID + } + b.r.createdAt = time.Now() + return b.r, nil +} + +func (b *RequestBuilder) MustBuild() *Request { + r, err := b.Build() + if err != nil { + panic(err) + } + return r +} + +func (b *RequestBuilder) ID(id id.AuthRequestID) *RequestBuilder { + b.r.id = id + return b +} + +func (b *RequestBuilder) NewID() *RequestBuilder { + b.r.id = id.AuthRequestID(id.New()) + return b +} + +func (b *RequestBuilder) ClientID(id string) *RequestBuilder { + b.r.clientID = id + return b +} + +func (b *RequestBuilder) Subject(subject string) *RequestBuilder { + b.r.subject = subject + return b +} + +func (b *RequestBuilder) Code(code string) *RequestBuilder { + b.r.code = code + return b +} + +func (b *RequestBuilder) State(state string) *RequestBuilder { + b.r.state = state + return b +} + +func (b *RequestBuilder) ResponseType(rt oidc.ResponseType) *RequestBuilder { + b.r.responseType = rt + return b +} + +func (b *RequestBuilder) Scopes(scopes []string) *RequestBuilder { + b.r.scopes = scopes + return b +} + +func (b *RequestBuilder) Audiences(audiences []string) *RequestBuilder { + b.r.audiences = audiences + return b +} + +func (b *RequestBuilder) RedirectURI(redirectURI string) *RequestBuilder { + b.r.redirectURI = redirectURI + return b +} + +func (b *RequestBuilder) Nonce(nonce string) *RequestBuilder { + b.r.nonce = nonce + return b +} + +func (b *RequestBuilder) CodeChallenge(CodeChallenge *oidc.CodeChallenge) *RequestBuilder { + b.r.codeChallenge = CodeChallenge + return b +} + +func (b *RequestBuilder) CreatedAt(createdAt time.Time) *RequestBuilder { + b.r.createdAt = createdAt + return b +} + +func (b *RequestBuilder) AuthorizedAt(authorizedAt *time.Time) *RequestBuilder { + b.r.authorizedAt = authorizedAt + return b +} diff --git a/pkg/auth/client.go b/pkg/auth/client.go new file mode 100644 index 000000000..930757100 --- /dev/null +++ b/pkg/auth/client.go @@ -0,0 +1,115 @@ +package auth + +import ( + "fmt" + "time" + + "github.com/caos/oidc/pkg/oidc" + "github.com/caos/oidc/pkg/op" +) + +type Client struct { + id string + applicationType op.ApplicationType + authMethod oidc.AuthMethod + accessTokenType op.AccessTokenType + responseTypes []oidc.ResponseType + grantTypes []oidc.GrantType + allowedScopes []string + redirectURIs []string + logoutRedirectURIs []string + loginURI string + idTokenLifetime time.Duration + clockSkew time.Duration + devMode bool +} + +func NewLocalClient(devMode bool) op.Client { + return &Client{ + id: "01FH69GFQ4DFCXS5XD91JK4HZ1", + applicationType: op.ApplicationTypeWeb, + authMethod: oidc.AuthMethodNone, + accessTokenType: op.AccessTokenTypeJWT, + responseTypes: []oidc.ResponseType{oidc.ResponseTypeCode}, + grantTypes: []oidc.GrantType{oidc.GrantTypeCode, oidc.GrantTypeRefreshToken}, + redirectURIs: []string{"http://localhost:3000"}, + allowedScopes: []string{"openid", "profile", "email"}, + loginURI: "http://localhost:3000/login?id=%s", + idTokenLifetime: 5 * time.Minute, + clockSkew: 0, + devMode: devMode, + } +} + +func (c *Client) GetID() string { + return c.id +} + +func (c *Client) RedirectURIs() []string { + return c.redirectURIs +} + +func (c *Client) PostLogoutRedirectURIs() []string { + return c.logoutRedirectURIs +} + +func (c *Client) LoginURL(id string) string { + return fmt.Sprintf(c.loginURI, id) +} + +func (c *Client) ApplicationType() op.ApplicationType { + return c.applicationType +} + +func (c *Client) AuthMethod() oidc.AuthMethod { + return c.authMethod +} + +func (c *Client) IDTokenLifetime() time.Duration { + return c.idTokenLifetime +} + +func (c *Client) AccessTokenType() op.AccessTokenType { + return c.accessTokenType +} + +func (c *Client) ResponseTypes() []oidc.ResponseType { + return c.responseTypes +} + +func (c *Client) GrantTypes() []oidc.GrantType { + return c.grantTypes +} + +func (c *Client) DevMode() bool { + return c.devMode +} + +func (c *Client) RestrictAdditionalIdTokenScopes() func(scopes []string) []string { + return func(scopes []string) []string { + return scopes + } +} + +func (c *Client) RestrictAdditionalAccessTokenScopes() func(scopes []string) []string { + return func(scopes []string) []string { + return scopes + } +} + +func (c *Client) IsScopeAllowed(scope string) bool { + for _, clientScope := range c.allowedScopes { + if clientScope == scope { + return true + } + } + return false +} + +func (c *Client) IDTokenUserinfoClaimsAssertion() bool { + return false +} + +func (c *Client) ClockSkew() time.Duration { + return c.clockSkew +} diff --git a/pkg/auth/request.go b/pkg/auth/request.go new file mode 100644 index 000000000..c2645b3b0 --- /dev/null +++ b/pkg/auth/request.go @@ -0,0 +1,143 @@ +package auth + +import ( + "time" + + "github.com/caos/oidc/pkg/oidc" + "github.com/reearth/reearth-backend/pkg/id" +) + +var essentialScopes = []string{"openid", "profile", "email"} + +type Request struct { + id id.AuthRequestID + clientID string + subject string + code string + state string + responseType oidc.ResponseType + scopes []string + audiences []string + redirectURI string + nonce string + codeChallenge *oidc.CodeChallenge + createdAt time.Time + authorizedAt *time.Time +} + +func (a *Request) ID() id.AuthRequestID { + return a.id +} + +func (a *Request) GetID() string { + return a.id.String() +} + +func (a *Request) GetACR() string { + return "" +} + +func (a *Request) GetAMR() []string { + return []string{ + "password", + } +} + +func (a *Request) GetAudience() []string { + if a.audiences == nil { + return make([]string, 0) + } + + return a.audiences +} + +func (a *Request) GetAuthTime() time.Time { + return a.createdAt +} + +func (a *Request) GetClientID() string { + return a.clientID +} + +func (a *Request) GetResponseMode() oidc.ResponseMode { + // TODO make sure about this + return oidc.ResponseModeQuery +} + +func (a *Request) GetCode() string { + return a.code +} + +func (a *Request) GetState() string { + return a.state +} + +func (a *Request) GetCodeChallenge() *oidc.CodeChallenge { + return a.codeChallenge +} + +func (a *Request) GetNonce() string { + return a.nonce +} + +func (a *Request) GetRedirectURI() string { + return a.redirectURI +} + +func (a *Request) GetResponseType() oidc.ResponseType { + return a.responseType +} + +func (a *Request) GetScopes() []string { + return unique(append(a.scopes, essentialScopes...)) +} + +func (a *Request) SetCurrentScopes(scopes []string) { + a.scopes = unique(append(scopes, essentialScopes...)) +} + +func (a *Request) GetSubject() string { + return a.subject +} + +func (a *Request) CreatedAt() time.Time { + return a.createdAt +} + +func (a *Request) SetCreatedAt(createdAt time.Time) { + a.createdAt = createdAt +} + +func (a *Request) AuthorizedAt() *time.Time { + return a.authorizedAt +} + +func (a *Request) SetAuthorizedAt(authorizedAt *time.Time) { + a.authorizedAt = authorizedAt +} + +func (a *Request) Done() bool { + return a.authorizedAt != nil +} + +func (a *Request) Complete(sub string) { + a.subject = sub + now := time.Now() + a.authorizedAt = &now +} + +func (a *Request) SetCode(code string) { + a.code = code +} + +func unique(list []string) []string { + allKeys := make(map[string]struct{}) + var uniqueList []string + for _, item := range list { + if _, ok := allKeys[item]; !ok { + allKeys[item] = struct{}{} + uniqueList = append(uniqueList, item) + } + } + return uniqueList +} diff --git a/pkg/config/config.go b/pkg/config/config.go index fd069478a..a0f48115f 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -4,6 +4,12 @@ import "sort" type Config struct { Migration int64 + Auth *Auth +} + +type Auth struct { + Cert string + Key string } func (c *Config) NextMigrations(migrations []int64) []int64 { diff --git a/pkg/id/auth_request_gen.go b/pkg/id/auth_request_gen.go new file mode 100644 index 000000000..76a36140a --- /dev/null +++ b/pkg/id/auth_request_gen.go @@ -0,0 +1,297 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import "encoding/json" + +// AuthRequestID is an ID for AuthRequest. +type AuthRequestID ID + +// NewAuthRequestID generates a new AuthRequestId. +func NewAuthRequestID() AuthRequestID { + return AuthRequestID(New()) +} + +// AuthRequestIDFrom generates a new AuthRequestID from a string. +func AuthRequestIDFrom(i string) (nid AuthRequestID, err error) { + var did ID + did, err = FromID(i) + if err != nil { + return + } + nid = AuthRequestID(did) + return +} + +// MustAuthRequestID generates a new AuthRequestID from a string, but panics if the string cannot be parsed. +func MustAuthRequestID(i string) AuthRequestID { + did, err := FromID(i) + if err != nil { + panic(err) + } + return AuthRequestID(did) +} + +// AuthRequestIDFromRef generates a new AuthRequestID from a string ref. +func AuthRequestIDFromRef(i *string) *AuthRequestID { + did := FromIDRef(i) + if did == nil { + return nil + } + nid := AuthRequestID(*did) + return &nid +} + +// AuthRequestIDFromRefID generates a new AuthRequestID from a ref of a generic ID. +func AuthRequestIDFromRefID(i *ID) *AuthRequestID { + if i == nil { + return nil + } + nid := AuthRequestID(*i) + return &nid +} + +// ID returns a domain ID. +func (d AuthRequestID) ID() ID { + return ID(d) +} + +// String returns a string representation. +func (d AuthRequestID) String() string { + return ID(d).String() +} + +// GoString implements fmt.GoStringer interface. +func (d AuthRequestID) GoString() string { + return "id.AuthRequestID(" + d.String() + ")" +} + +// RefString returns a reference of string representation. +func (d AuthRequestID) RefString() *string { + id := ID(d).String() + return &id +} + +// Ref returns a reference. +func (d AuthRequestID) Ref() *AuthRequestID { + d2 := d + return &d2 +} + +// Contains returns whether the id is contained in the slice. +func (d AuthRequestID) Contains(ids []AuthRequestID) bool { + for _, i := range ids { + if d.ID().Equal(i.ID()) { + return true + } + } + return false +} + +// CopyRef returns a copy of a reference. +func (d *AuthRequestID) CopyRef() *AuthRequestID { + if d == nil { + return nil + } + d2 := *d + return &d2 +} + +// IDRef returns a reference of a domain id. +func (d *AuthRequestID) IDRef() *ID { + if d == nil { + return nil + } + id := ID(*d) + return &id +} + +// StringRef returns a reference of a string representation. +func (d *AuthRequestID) StringRef() *string { + if d == nil { + return nil + } + id := ID(*d).String() + return &id +} + +// MarhsalJSON implements json.Marhsaler interface +func (d *AuthRequestID) MarhsalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarhsalJSON implements json.Unmarshaler interface +func (d *AuthRequestID) UnmarhsalJSON(bs []byte) (err error) { + var idstr string + if err = json.Unmarshal(bs, &idstr); err != nil { + return + } + *d, err = AuthRequestIDFrom(idstr) + return +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *AuthRequestID) MarshalText() ([]byte, error) { + if d == nil { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (d *AuthRequestID) UnmarshalText(text []byte) (err error) { + *d, err = AuthRequestIDFrom(string(text)) + return +} + +// Ref returns true if a ID is nil or zero-value +func (d AuthRequestID) IsNil() bool { + return ID(d).IsNil() +} + +// AuthRequestIDToKeys converts IDs into a string slice. +func AuthRequestIDToKeys(ids []AuthRequestID) []string { + keys := make([]string, 0, len(ids)) + for _, i := range ids { + keys = append(keys, i.String()) + } + return keys +} + +// AuthRequestIDsFrom converts a string slice into a ID slice. +func AuthRequestIDsFrom(ids []string) ([]AuthRequestID, error) { + dids := make([]AuthRequestID, 0, len(ids)) + for _, i := range ids { + did, err := AuthRequestIDFrom(i) + if err != nil { + return nil, err + } + dids = append(dids, did) + } + return dids, nil +} + +// AuthRequestIDsFromID converts a generic ID slice into a ID slice. +func AuthRequestIDsFromID(ids []ID) []AuthRequestID { + dids := make([]AuthRequestID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, AuthRequestID(i)) + } + return dids +} + +// AuthRequestIDsFromIDRef converts a ref of a generic ID slice into a ID slice. +func AuthRequestIDsFromIDRef(ids []*ID) []AuthRequestID { + dids := make([]AuthRequestID, 0, len(ids)) + for _, i := range ids { + if i != nil { + dids = append(dids, AuthRequestID(*i)) + } + } + return dids +} + +// AuthRequestIDsToID converts a ID slice into a generic ID slice. +func AuthRequestIDsToID(ids []AuthRequestID) []ID { + dids := make([]ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.ID()) + } + return dids +} + +// AuthRequestIDsToIDRef converts a ID ref slice into a generic ID ref slice. +func AuthRequestIDsToIDRef(ids []*AuthRequestID) []*ID { + dids := make([]*ID, 0, len(ids)) + for _, i := range ids { + dids = append(dids, i.IDRef()) + } + return dids +} + +// AuthRequestIDSet represents a set of AuthRequestIDs +type AuthRequestIDSet struct { + m map[AuthRequestID]struct{} + s []AuthRequestID +} + +// NewAuthRequestIDSet creates a new AuthRequestIDSet +func NewAuthRequestIDSet() *AuthRequestIDSet { + return &AuthRequestIDSet{} +} + +// Add adds a new ID if it does not exists in the set +func (s *AuthRequestIDSet) Add(p ...AuthRequestID) { + if s == nil || p == nil { + return + } + if s.m == nil { + s.m = map[AuthRequestID]struct{}{} + } + for _, i := range p { + if _, ok := s.m[i]; !ok { + if s.s == nil { + s.s = []AuthRequestID{} + } + s.m[i] = struct{}{} + s.s = append(s.s, i) + } + } +} + +// AddRef adds a new ID ref if it does not exists in the set +func (s *AuthRequestIDSet) AddRef(p *AuthRequestID) { + if s == nil || p == nil { + return + } + s.Add(*p) +} + +// Has checks if the ID exists in the set +func (s *AuthRequestIDSet) Has(p AuthRequestID) bool { + if s == nil || s.m == nil { + return false + } + _, ok := s.m[p] + return ok +} + +// Clear clears all stored IDs +func (s *AuthRequestIDSet) Clear() { + if s == nil { + return + } + s.m = nil + s.s = nil +} + +// All returns stored all IDs as a slice +func (s *AuthRequestIDSet) All() []AuthRequestID { + if s == nil { + return nil + } + return append([]AuthRequestID{}, s.s...) +} + +// Clone returns a cloned set +func (s *AuthRequestIDSet) Clone() *AuthRequestIDSet { + if s == nil { + return NewAuthRequestIDSet() + } + s2 := NewAuthRequestIDSet() + s2.Add(s.s...) + return s2 +} + +// Merge returns a merged set +func (s *AuthRequestIDSet) Merge(s2 *AuthRequestIDSet) *AuthRequestIDSet { + if s == nil { + return nil + } + s3 := s.Clone() + if s2 == nil { + return s3 + } + s3.Add(s2.s...) + return s3 +} diff --git a/pkg/id/auth_request_gen_test.go b/pkg/id/auth_request_gen_test.go new file mode 100644 index 000000000..5f84e7592 --- /dev/null +++ b/pkg/id/auth_request_gen_test.go @@ -0,0 +1,1011 @@ +// Code generated by gen, DO NOT EDIT. + +package id + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/oklog/ulid" + "github.com/stretchr/testify/assert" +) + +func TestNewAuthRequestID(t *testing.T) { + id := NewAuthRequestID() + assert.NotNil(t, id) + ulID, err := ulid.Parse(id.String()) + + assert.NotNil(t, ulID) + assert.Nil(t, err) +} + +func TestAuthRequestIDFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + expected struct { + result AuthRequestID + err error + } + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: struct { + result AuthRequestID + err error + }{ + AuthRequestID{}, + ErrInvalidID, + }, + }, + { + name: "Fail:Not valid string", + input: "", + expected: struct { + result AuthRequestID + err error + }{ + AuthRequestID{}, + ErrInvalidID, + }, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: struct { + result AuthRequestID + err error + }{ + AuthRequestID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + nil, + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result, err := AuthRequestIDFrom(tc.input) + assert.Equal(tt, tc.expected.result, result) + if err != nil { + assert.True(tt, errors.As(tc.expected.err, &err)) + } + }) + } +} + +func TestMustAuthRequestID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input string + shouldPanic bool + expected AuthRequestID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + shouldPanic: true, + }, + { + name: "Fail:Not valid string", + input: "", + shouldPanic: true, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + shouldPanic: false, + expected: AuthRequestID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.shouldPanic { + assert.Panics(tt, func() { MustBeID(tc.input) }) + return + } + result := MustAuthRequestID(tc.input) + assert.Equal(tt, tc.expected, result) + }) + } +} + +func TestAuthRequestIDFromRef(t *testing.T) { + testCases := []struct { + name string + input string + expected *AuthRequestID + }{ + { + name: "Fail:Not valid string", + input: "testMustFail", + expected: nil, + }, + { + name: "Fail:Not valid string", + input: "", + expected: nil, + }, + { + name: "success:valid string", + input: "01f2r7kg1fvvffp0gmexgy5hxy", + expected: &AuthRequestID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + result := AuthRequestIDFromRef(&tc.input) + assert.Equal(tt, tc.expected, result) + if tc.expected != nil { + assert.Equal(tt, *tc.expected, *result) + } + }) + } +} + +func TestAuthRequestIDFromRefID(t *testing.T) { + id := New() + + subId := AuthRequestIDFromRefID(&id) + + assert.NotNil(t, subId) + assert.Equal(t, subId.id, id.id) +} + +func TestAuthRequestID_ID(t *testing.T) { + id := New() + subId := AuthRequestIDFromRefID(&id) + + idOrg := subId.ID() + + assert.Equal(t, id, idOrg) +} + +func TestAuthRequestID_String(t *testing.T) { + id := New() + subId := AuthRequestIDFromRefID(&id) + + assert.Equal(t, subId.String(), id.String()) +} + +func TestAuthRequestID_GoString(t *testing.T) { + id := New() + subId := AuthRequestIDFromRefID(&id) + + assert.Equal(t, subId.GoString(), "id.AuthRequestID("+id.String()+")") +} + +func TestAuthRequestID_RefString(t *testing.T) { + id := New() + subId := AuthRequestIDFromRefID(&id) + + refString := subId.StringRef() + + assert.NotNil(t, refString) + assert.Equal(t, *refString, id.String()) +} + +func TestAuthRequestID_Ref(t *testing.T) { + id := New() + subId := AuthRequestIDFromRefID(&id) + + subIdRef := subId.Ref() + + assert.Equal(t, *subId, *subIdRef) +} + +func TestAuthRequestID_Contains(t *testing.T) { + id := NewAuthRequestID() + id2 := NewAuthRequestID() + assert.True(t, id.Contains([]AuthRequestID{id, id2})) + assert.False(t, id.Contains([]AuthRequestID{id2})) +} + +func TestAuthRequestID_CopyRef(t *testing.T) { + id := New() + subId := AuthRequestIDFromRefID(&id) + + subIdCopyRef := subId.CopyRef() + + assert.Equal(t, *subId, *subIdCopyRef) + assert.NotSame(t, subId, subIdCopyRef) +} + +func TestAuthRequestID_IDRef(t *testing.T) { + id := New() + subId := AuthRequestIDFromRefID(&id) + + assert.Equal(t, id, *subId.IDRef()) +} + +func TestAuthRequestID_StringRef(t *testing.T) { + id := New() + subId := AuthRequestIDFromRefID(&id) + + assert.Equal(t, *subId.StringRef(), id.String()) +} + +func TestAuthRequestID_MarhsalJSON(t *testing.T) { + id := New() + subId := AuthRequestIDFromRefID(&id) + + res, err := subId.MarhsalJSON() + exp, _ := json.Marshal(subId.String()) + + assert.Nil(t, err) + assert.Equal(t, exp, res) +} + +func TestAuthRequestID_UnmarhsalJSON(t *testing.T) { + jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" + + subId := &AuthRequestID{} + + err := subId.UnmarhsalJSON([]byte(jsonString)) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) +} + +func TestAuthRequestID_MarshalText(t *testing.T) { + id := New() + subId := AuthRequestIDFromRefID(&id) + + res, err := subId.MarshalText() + + assert.Nil(t, err) + assert.Equal(t, []byte(id.String()), res) +} + +func TestAuthRequestID_UnmarshalText(t *testing.T) { + text := []byte("01f3zhcaq35403zdjnd6dcm0t2") + + subId := &AuthRequestID{} + + err := subId.UnmarshalText(text) + + assert.Nil(t, err) + assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) + +} + +func TestAuthRequestID_IsNil(t *testing.T) { + subId := AuthRequestID{} + + assert.True(t, subId.IsNil()) + + id := New() + subId = *AuthRequestIDFromRefID(&id) + + assert.False(t, subId.IsNil()) +} + +func TestAuthRequestIDToKeys(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []AuthRequestID + expected []string + }{ + { + name: "Empty slice", + input: make([]AuthRequestID, 0), + expected: make([]string, 0), + }, + { + name: "1 element", + input: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + }, + { + name: "multiple elements", + input: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, AuthRequestIDToKeys(tc.input)) + }) + } + +} + +func TestAuthRequestIDsFrom(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []string + expected struct { + res []AuthRequestID + err error + } + }{ + { + name: "Empty slice", + input: make([]string, 0), + expected: struct { + res []AuthRequestID + err error + }{ + res: make([]AuthRequestID, 0), + err: nil, + }, + }, + { + name: "1 element", + input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, + expected: struct { + res []AuthRequestID + err error + }{ + res: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2")}, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []AuthRequestID + err error + }{ + res: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + err: nil, + }, + }, + { + name: "multiple elements", + input: []string{ + "01f3zhcaq35403zdjnd6dcm0t1", + "01f3zhcaq35403zdjnd6dcm0t2", + "01f3zhcaq35403zdjnd6dcm0t3", + }, + expected: struct { + res []AuthRequestID + err error + }{ + res: nil, + err: ErrInvalidID, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + if tc.expected.err != nil { + _, err := AuthRequestIDsFrom(tc.input) + assert.True(tt, errors.As(ErrInvalidID, &err)) + } else { + res, err := AuthRequestIDsFrom(tc.input) + assert.Equal(tt, tc.expected.res, res) + assert.Nil(tt, err) + } + + }) + } +} + +func TestAuthRequestIDsFromID(t *testing.T) { + t.Parallel() + testCases := []struct { + name string + input []ID + expected []AuthRequestID + }{ + { + name: "Empty slice", + input: make([]ID, 0), + expected: make([]AuthRequestID, 0), + }, + { + name: "1 element", + input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := AuthRequestIDsFromID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestAuthRequestIDsFromIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + + testCases := []struct { + name string + input []*ID + expected []AuthRequestID + }{ + { + name: "Empty slice", + input: make([]*ID, 0), + expected: make([]AuthRequestID, 0), + }, + { + name: "1 element", + input: []*ID{&id1}, + expected: []AuthRequestID{MustAuthRequestID(id1.String())}, + }, + { + name: "multiple elements", + input: []*ID{&id1, &id2, &id3}, + expected: []AuthRequestID{ + MustAuthRequestID(id1.String()), + MustAuthRequestID(id2.String()), + MustAuthRequestID(id3.String()), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := AuthRequestIDsFromIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestAuthRequestIDsToID(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []AuthRequestID + expected []ID + }{ + { + name: "Empty slice", + input: make([]AuthRequestID, 0), + expected: make([]ID, 0), + }, + { + name: "1 element", + input: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + { + name: "multiple elements", + input: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: []ID{ + MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), + MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := AuthRequestIDsToID(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestAuthRequestIDsToIDRef(t *testing.T) { + t.Parallel() + + id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") + subId1 := MustAuthRequestID(id1.String()) + id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") + subId2 := MustAuthRequestID(id2.String()) + id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") + subId3 := MustAuthRequestID(id3.String()) + + testCases := []struct { + name string + input []*AuthRequestID + expected []*ID + }{ + { + name: "Empty slice", + input: make([]*AuthRequestID, 0), + expected: make([]*ID, 0), + }, + { + name: "1 element", + input: []*AuthRequestID{&subId1}, + expected: []*ID{&id1}, + }, + { + name: "multiple elements", + input: []*AuthRequestID{&subId1, &subId2, &subId3}, + expected: []*ID{&id1, &id2, &id3}, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + res := AuthRequestIDsToIDRef(tc.input) + assert.Equal(tt, tc.expected, res) + }) + } +} + +func TestNewAuthRequestIDSet(t *testing.T) { + AuthRequestIdSet := NewAuthRequestIDSet() + + assert.NotNil(t, AuthRequestIdSet) + assert.Empty(t, AuthRequestIdSet.m) + assert.Empty(t, AuthRequestIdSet.s) +} + +func TestAuthRequestIDSet_Add(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input []AuthRequestID + expected *AuthRequestIDSet + }{ + { + name: "Empty slice", + input: make([]AuthRequestID, 0), + expected: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{}, + s: nil, + }, + }, + { + name: "1 element", + input: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + { + name: "multiple elements with duplication", + input: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + expected: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewAuthRequestIDSet() + set.Add(tc.input...) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestAuthRequestIDSet_AddRef(t *testing.T) { + t.Parallel() + + AuthRequestId := MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1") + + testCases := []struct { + name string + input *AuthRequestID + expected *AuthRequestIDSet + }{ + { + name: "Empty slice", + input: nil, + expected: &AuthRequestIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "1 element", + input: &AuthRequestId, + expected: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + set := NewAuthRequestIDSet() + set.AddRef(tc.input) + assert.Equal(tt, tc.expected, set) + }) + } +} + +func TestAuthRequestIDSet_Has(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + AuthRequestIDSet + AuthRequestID + } + expected bool + }{ + { + name: "Empty Set", + input: struct { + AuthRequestIDSet + AuthRequestID + }{AuthRequestIDSet: AuthRequestIDSet{}, AuthRequestID: MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: false, + }, + { + name: "Set Contains the element", + input: struct { + AuthRequestIDSet + AuthRequestID + }{AuthRequestIDSet: AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, AuthRequestID: MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + expected: true, + }, + { + name: "Set does not Contains the element", + input: struct { + AuthRequestIDSet + AuthRequestID + }{AuthRequestIDSet: AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, AuthRequestID: MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2")}, + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.expected, tc.input.AuthRequestIDSet.Has(tc.input.AuthRequestID)) + }) + } +} + +func TestAuthRequestIDSet_Clear(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input AuthRequestIDSet + expected AuthRequestIDSet + }{ + { + name: "Empty Set", + input: AuthRequestIDSet{}, + expected: AuthRequestIDSet{ + m: nil, + s: nil, + }, + }, + { + name: "Set Contains the element", + input: AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: AuthRequestIDSet{ + m: nil, + s: nil, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + set := tc.input + p := &set + p.Clear() + assert.Equal(tt, tc.expected, *p) + }) + } +} + +func TestAuthRequestIDSet_All(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *AuthRequestIDSet + expected []AuthRequestID + }{ + { + name: "Empty slice", + input: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{}, + s: nil, + }, + expected: make([]AuthRequestID, 0), + }, + { + name: "1 element", + input: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + { + name: "multiple elements", + input: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.All()) + }) + } +} + +func TestAuthRequestIDSet_Clone(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input *AuthRequestIDSet + expected *AuthRequestIDSet + }{ + { + name: "nil set", + input: nil, + expected: NewAuthRequestIDSet(), + }, + { + name: "Empty set", + input: NewAuthRequestIDSet(), + expected: NewAuthRequestIDSet(), + }, + { + name: "1 element", + input: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + expected: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "multiple elements", + input: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + expected: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"): {}, + }, + s: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + clone := tc.input.Clone() + assert.Equal(tt, tc.expected, clone) + assert.False(tt, tc.input == clone) + }) + } +} + +func TestAuthRequestIDSet_Merge(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + input struct { + a *AuthRequestIDSet + b *AuthRequestIDSet + } + expected *AuthRequestIDSet + }{ + { + name: "Empty Set", + input: struct { + a *AuthRequestIDSet + b *AuthRequestIDSet + }{ + a: &AuthRequestIDSet{}, + b: &AuthRequestIDSet{}, + }, + expected: &AuthRequestIDSet{}, + }, + { + name: "1 Empty Set", + input: struct { + a *AuthRequestIDSet + b *AuthRequestIDSet + }{ + a: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &AuthRequestIDSet{}, + }, + expected: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + }, + { + name: "2 non Empty Set", + input: struct { + a *AuthRequestIDSet + b *AuthRequestIDSet + }{ + a: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, + s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, + }, + b: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, + s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2")}, + }, + }, + expected: &AuthRequestIDSet{ + m: map[AuthRequestID]struct{}{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}, + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"): {}, + }, + s: []AuthRequestID{ + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), + MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) + }) + } +} diff --git a/pkg/id/gen.go b/pkg/id/gen.go index 7c4519f8a..0fb715580 100644 --- a/pkg/id/gen.go +++ b/pkg/id/gen.go @@ -11,6 +11,7 @@ //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=user_gen.go --name=User //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=dataset_schema_field_gen.go --name=DatasetSchemaField //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=infobox_field_gen.go --name=InfoboxField +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=auth_request_gen.go --name=AuthRequest //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=tag_gen.go --name=Tag //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=cluster_gen.go --name=Cluster @@ -29,6 +30,7 @@ //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=user_gen_test.go --name=User //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=dataset_schema_field_gen_test.go --name=DatasetSchemaField //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=infobox_field_gen_test.go --name=InfoboxField +//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=auth_request_gen_test.go --name=AuthRequest //go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=cluster_field_gen_test.go --name=Cluster package id diff --git a/pkg/log/log.go b/pkg/log/log.go index 23ed1c844..a42a6f002 100644 --- a/pkg/log/log.go +++ b/pkg/log/log.go @@ -102,3 +102,7 @@ func Errorln(args ...interface{}) { func Fatalln(args ...interface{}) { logrus.Fatalln(args...) } + +func Panicf(format string, args ...interface{}) { + logrus.Panicf(format, args...) +} diff --git a/pkg/tag/list_test.go b/pkg/tag/list_test.go index 59208d910..bf4775bb7 100644 --- a/pkg/tag/list_test.go +++ b/pkg/tag/list_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/assert" ) -func TesIDtList_Add(t *testing.T) { +func TestIDtList_Add(t *testing.T) { tid := NewID() var tl *IDList tl.Add(tid) diff --git a/pkg/user/auth.go b/pkg/user/auth.go index 45ac7fd07..f68ab6152 100644 --- a/pkg/user/auth.go +++ b/pkg/user/auth.go @@ -1,6 +1,8 @@ package user -import "strings" +import ( + "strings" +) type Auth struct { Provider string @@ -18,3 +20,15 @@ func AuthFromAuth0Sub(sub string) Auth { func (a Auth) IsAuth0() bool { return a.Provider == "auth0" } + +func (a Auth) Ref() *Auth { + a2 := a + return &a2 +} + +func GenReearthSub(userID string) *Auth { + return &Auth{ + Provider: "reearth", + Sub: "reearth|" + userID, + } +} diff --git a/pkg/user/auth_test.go b/pkg/user/auth_test.go index 8719ede1e..ff9d7b976 100644 --- a/pkg/user/auth_test.go +++ b/pkg/user/auth_test.go @@ -3,6 +3,8 @@ package user import ( "testing" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" ) @@ -67,3 +69,28 @@ func TestAuth_IsAuth0(t *testing.T) { }) } } + +func TestGenReearthSub(t *testing.T) { + uid := id.NewUserID() + + tests := []struct { + name string + input string + want *Auth + }{ + { + name: "should return reearth sub", + input: uid.String(), + want: &Auth{ + Provider: "reearth", + Sub: "reearth|" + uid.String(), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := GenReearthSub(tt.input) + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/pkg/user/builder.go b/pkg/user/builder.go index e822fb350..264eba119 100644 --- a/pkg/user/builder.go +++ b/pkg/user/builder.go @@ -5,7 +5,8 @@ import ( ) type Builder struct { - u *User + u *User + passwordText string } func New() *Builder { @@ -16,6 +17,11 @@ func (b *Builder) Build() (*User, error) { if b.u.id.IsNil() { return nil, ErrInvalidID } + if b.passwordText != "" { + if err := b.u.SetPassword(b.passwordText); err != nil { + return nil, ErrEncodingPassword + } + } return b.u, nil } @@ -47,6 +53,20 @@ func (b *Builder) Email(email string) *Builder { return b } +func (b *Builder) Password(p []byte) *Builder { + if p == nil { + b.u.password = nil + } else { + b.u.password = append(p[:0:0], p...) + } + return b +} + +func (b *Builder) PasswordPlainText(p string) *Builder { + b.passwordText = p + return b +} + func (b *Builder) Team(team TeamID) *Builder { b.u.team = team return b @@ -75,3 +95,13 @@ func (b *Builder) Auths(auths []Auth) *Builder { b.u.auths = append([]Auth{}, auths...) return b } + +func (b *Builder) PasswordReset(pr *PasswordReset) *Builder { + b.u.passwordReset = pr + return b +} + +func (b *Builder) Verification(v *Verification) *Builder { + b.u.verification = v + return b +} diff --git a/pkg/user/builder_test.go b/pkg/user/builder_test.go index 8699ce531..dc02a227d 100644 --- a/pkg/user/builder_test.go +++ b/pkg/user/builder_test.go @@ -2,6 +2,7 @@ package user import ( "testing" + "time" "github.com/stretchr/testify/assert" "golang.org/x/text/language" @@ -11,6 +12,7 @@ func TestBuilder_ID(t *testing.T) { uid := NewID() b := New().ID(uid).MustBuild() assert.Equal(t, uid, b.ID()) + assert.Nil(t, b.passwordReset) } func TestBuilder_Name(t *testing.T) { @@ -87,6 +89,31 @@ func TestBuilder_LangFrom(t *testing.T) { } } +func TestBuilder_PasswordReset(t *testing.T) { + testCases := []struct { + Name, Token string + CreatedAt time.Time + Expected PasswordReset + }{ + { + Name: "Test1", + Token: "xyz", + CreatedAt: time.Unix(0, 0), + Expected: PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(0, 0), + }, + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + // u := New().NewID().PasswordReset(tc.Token, tc.CreatedAt).MustBuild() + // assert.Equal(t, tc.Expected, *u.passwordReset) + }) + } +} + func TestNew(t *testing.T) { b := New() assert.NotNil(t, b) @@ -97,12 +124,14 @@ func TestBuilder_Build(t *testing.T) { uid := NewID() tid := NewTeamID() en, _ := language.Parse("en") + pass, _ := encodePassword("pass") type args struct { Name, Lang, Email string ID ID Team TeamID Auths []Auth + PasswordBin []byte } tests := []struct { @@ -114,11 +143,12 @@ func TestBuilder_Build(t *testing.T) { { Name: "Success build user", Args: args{ - Name: "xxx", - Email: "xx@yy.zz", - Lang: "en", - ID: uid, - Team: tid, + Name: "xxx", + Email: "xx@yy.zz", + Lang: "en", + ID: uid, + Team: tid, + PasswordBin: pass, Auths: []Auth{ { Provider: "ppp", @@ -127,16 +157,18 @@ func TestBuilder_Build(t *testing.T) { }, }, Expected: &User{ - id: uid, - team: tid, - email: "xx@yy.zz", - name: "xxx", - auths: []Auth{{Provider: "ppp", Sub: "sss"}}, - lang: en, + id: uid, + team: tid, + email: "xx@yy.zz", + name: "xxx", + password: pass, + auths: []Auth{{Provider: "ppp", Sub: "sss"}}, + lang: en, }, }, { - Name: "failed invalid id", - Err: ErrInvalidID, + Name: "failed invalid id", + Expected: nil, + Err: ErrInvalidID, }, } @@ -146,6 +178,7 @@ func TestBuilder_Build(t *testing.T) { t.Parallel() res, err := New(). ID(tt.Args.ID). + Password(pass). Name(tt.Args.Name). Auths(tt.Args.Auths). LangFrom(tt.Args.Lang). @@ -165,11 +198,13 @@ func TestBuilder_MustBuild(t *testing.T) { uid := NewID() tid := NewTeamID() en, _ := language.Parse("en") + pass, _ := encodePassword("pass") type args struct { Name, Lang, Email string ID ID Team TeamID + PasswordBin []byte Auths []Auth } @@ -182,11 +217,12 @@ func TestBuilder_MustBuild(t *testing.T) { { Name: "Success build user", Args: args{ - Name: "xxx", - Email: "xx@yy.zz", - Lang: "en", - ID: uid, - Team: tid, + Name: "xxx", + Email: "xx@yy.zz", + Lang: "en", + ID: uid, + Team: tid, + PasswordBin: pass, Auths: []Auth{ { Provider: "ppp", @@ -195,12 +231,13 @@ func TestBuilder_MustBuild(t *testing.T) { }, }, Expected: &User{ - id: uid, - team: tid, - email: "xx@yy.zz", - name: "xxx", - auths: []Auth{{Provider: "ppp", Sub: "sss"}}, - lang: en, + id: uid, + team: tid, + email: "xx@yy.zz", + name: "xxx", + password: pass, + auths: []Auth{{Provider: "ppp", Sub: "sss"}}, + lang: en, }, }, { Name: "failed invalid id", @@ -217,6 +254,7 @@ func TestBuilder_MustBuild(t *testing.T) { t.Helper() return New(). ID(tt.Args.ID). + Password(pass). Name(tt.Args.Name). Auths(tt.Args.Auths). LangFrom(tt.Args.Lang). @@ -233,3 +271,37 @@ func TestBuilder_MustBuild(t *testing.T) { }) } } + +func TestBuilder_Verification(t *testing.T) { + tests := []struct { + name string + input *Verification + want *Builder + }{ + { + name: "should return verification", + input: &Verification{ + verified: true, + code: "xxx", + expiration: time.Time{}, + }, + + want: &Builder{ + u: &User{ + verification: &Verification{ + verified: true, + code: "xxx", + expiration: time.Time{}, + }, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + b := New() + b.Verification(tt.input) + assert.Equal(t, tt.want, b) + }) + } +} diff --git a/pkg/user/initializer.go b/pkg/user/initializer.go index a50d72297..eadd1756e 100644 --- a/pkg/user/initializer.go +++ b/pkg/user/initializer.go @@ -7,7 +7,8 @@ import ( type InitParams struct { Email string Name string - Auth0Sub string + Sub *Auth + Password string Lang *language.Tag Theme *Theme UserID *ID @@ -28,13 +29,17 @@ func Init(p InitParams) (*User, *Team, error) { t := ThemeDefault p.Theme = &t } + if p.Sub == nil { + p.Sub = GenReearthSub(p.UserID.String()) + } u, err := New(). ID(*p.UserID). Name(p.Name). Email(p.Email). - Auths([]Auth{AuthFromAuth0Sub(p.Auth0Sub)}). + Auths([]Auth{*p.Sub}). Lang(*p.Lang). + PasswordPlainText(p.Password). Theme(*p.Theme). Build() if err != nil { diff --git a/pkg/user/initializer_test.go b/pkg/user/initializer_test.go index 17b242600..f99c5e307 100644 --- a/pkg/user/initializer_test.go +++ b/pkg/user/initializer_test.go @@ -9,28 +9,35 @@ import ( func TestInit(t *testing.T) { uid := NewID() tid := NewTeamID() - + expectedSub := Auth{ + Provider: "###", + Sub: "###", + } tests := []struct { - Name, Email, Username, Sub string - UID *ID - TID *TeamID - ExpectedUser *User - ExpectedTeam *Team - Err error + Name, Email, Username string + Sub Auth + UID *ID + TID *TeamID + ExpectedUser *User + ExpectedTeam *Team + Err error }{ { Name: "Success create user", Email: "xx@yy.zz", Username: "nnn", - Sub: "###", - UID: &uid, - TID: &tid, + Sub: Auth{ + Provider: "###", + Sub: "###", + }, + UID: &uid, + TID: &tid, ExpectedUser: New(). ID(uid). Email("xx@yy.zz"). Name("nnn"). Team(tid). - Auths([]Auth{AuthFromAuth0Sub("###")}). + Auths([]Auth{expectedSub}). MustBuild(), ExpectedTeam: NewTeam(). ID(tid). @@ -44,15 +51,18 @@ func TestInit(t *testing.T) { Name: "Success nil team id", Email: "xx@yy.zz", Username: "nnn", - Sub: "###", - UID: &uid, - TID: nil, + Sub: Auth{ + Provider: "###", + Sub: "###", + }, + UID: &uid, + TID: nil, ExpectedUser: New(). ID(uid). Email("xx@yy.zz"). Name("nnn"). Team(tid). - Auths([]Auth{AuthFromAuth0Sub("###")}). + Auths([]Auth{expectedSub}). MustBuild(), ExpectedTeam: NewTeam(). NewID(). @@ -66,15 +76,18 @@ func TestInit(t *testing.T) { Name: "Success nil id", Email: "xx@yy.zz", Username: "nnn", - Sub: "###", - UID: nil, - TID: &tid, + Sub: Auth{ + Provider: "###", + Sub: "###", + }, + UID: nil, + TID: &tid, ExpectedUser: New(). NewID(). Email("xx@yy.zz"). Name("nnn"). Team(tid). - Auths([]Auth{AuthFromAuth0Sub("###")}). + Auths([]Auth{expectedSub}). MustBuild(), ExpectedTeam: NewTeam(). ID(tid). @@ -85,17 +98,16 @@ func TestInit(t *testing.T) { Err: nil, }, } - for _, tt := range tests { tt := tt t.Run(tt.Name, func(t *testing.T) { t.Parallel() user, team, err := Init(InitParams{ - Email: tt.Email, - Name: tt.Username, - Auth0Sub: tt.Sub, - UserID: tt.UID, - TeamID: tt.TID, + Email: tt.Email, + Name: tt.Username, + Sub: &tt.Sub, + UserID: tt.UID, + TeamID: tt.TID, }) if tt.Err == nil { assert.Equal(t, tt.ExpectedUser.Email(), user.Email()) diff --git a/pkg/user/password_reset.go b/pkg/user/password_reset.go new file mode 100644 index 000000000..6ec208723 --- /dev/null +++ b/pkg/user/password_reset.go @@ -0,0 +1,44 @@ +package user + +import ( + "time" + + "github.com/google/uuid" +) + +var timeNow = time.Now + +type PasswordReset struct { + Token string + CreatedAt time.Time +} + +func NewPasswordReset() *PasswordReset { + return &PasswordReset{ + Token: generateToken(), + CreatedAt: timeNow(), + } +} + +func PasswordResetFrom(token string, createdAt time.Time) *PasswordReset { + return &PasswordReset{ + Token: token, + CreatedAt: createdAt, + } +} + +func generateToken() string { + return uuid.New().String() +} + +func (pr *PasswordReset) Validate(token string) bool { + return pr != nil && pr.Token == token && pr.CreatedAt.Add(24*time.Hour).After(time.Now()) +} + +func (pr *PasswordReset) Clone() *PasswordReset { + if pr == nil { + return nil + } + pr2 := PasswordResetFrom(pr.Token, pr.CreatedAt) + return pr2 +} diff --git a/pkg/user/password_reset_test.go b/pkg/user/password_reset_test.go new file mode 100644 index 000000000..253a7b92c --- /dev/null +++ b/pkg/user/password_reset_test.go @@ -0,0 +1,103 @@ +package user + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestNewPasswordReset(t *testing.T) { + mockTime := time.Now() + timeNow = func() time.Time { + return mockTime + } + pr := NewPasswordReset() + assert.NotNil(t, pr) + assert.NotEmpty(t, pr.Token) + assert.Equal(t, mockTime, pr.CreatedAt) +} + +func TestPasswordReset_Validate(t *testing.T) { + tests := []struct { + name string + pr *PasswordReset + token string + want bool + }{ + { + name: "valid", + pr: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Now(), + }, + token: "xyz", + want: true, + }, + { + name: "wrong token", + pr: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Now(), + }, + token: "xxx", + want: false, + }, + { + name: "old request", + pr: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Now().Add(-24 * time.Hour), + }, + token: "xyz", + want: false, + }, + { + name: "nil request", + pr: nil, + token: "xyz", + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.pr.Validate(tt.token)) + }) + } +} + +func Test_generateToken(t *testing.T) { + t1 := generateToken() + t2 := generateToken() + + assert.NotNil(t, t1) + assert.NotNil(t, t2) + assert.NotEmpty(t, t1) + assert.NotEmpty(t, t2) + assert.NotEqual(t, t1, t2) + +} + +func TestPasswordResetFrom(t *testing.T) { + tests := []struct { + name string + token string + createdAt time.Time + want *PasswordReset + }{ + { + name: "prFrom", + token: "xyz", + createdAt: time.Unix(1, 1), + want: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, PasswordResetFrom(tt.token, tt.createdAt)) + }) + } +} diff --git a/pkg/user/user.go b/pkg/user/user.go index 5277999db..c3dc7fd99 100644 --- a/pkg/user/user.go +++ b/pkg/user/user.go @@ -1,17 +1,34 @@ package user import ( + "errors" + "unicode" + + "golang.org/x/crypto/bcrypt" + "golang.org/x/text/language" ) +var ( + ErrEncodingPassword = errors.New("error encoding password") + ErrInvalidPassword = errors.New("error invalid password") + ErrPasswordLength = errors.New("password at least 8 characters") + ErrPasswordUpper = errors.New("password should have upper case letters") + ErrPasswordLower = errors.New("password should have lower case letters") + ErrPasswordNumber = errors.New("password should have numbers") +) + type User struct { - id ID - name string - email string - team TeamID - auths []Auth - lang language.Tag - theme Theme + id ID + name string + email string + password []byte + team TeamID + auths []Auth + lang language.Tag + theme Theme + verification *Verification + passwordReset *PasswordReset } func (u *User) ID() ID { @@ -38,6 +55,10 @@ func (u *User) Theme() Theme { return u.theme } +func (u *User) Password() []byte { + return u.password +} + func (u *User) UpdateName(name string) { u.name = name } @@ -58,6 +79,10 @@ func (u *User) UpdateTheme(t Theme) { u.theme = t } +func (u *User) Verification() *Verification { + return u.verification +} + func (u *User) Auths() []Auth { if u == nil { return nil @@ -101,6 +126,18 @@ func (u *User) RemoveAuth(a Auth) bool { return false } +func (u *User) GetAuthByProvider(provider string) *Auth { + if u == nil || u.auths == nil { + return nil + } + for _, b := range u.auths { + if provider == b.Provider { + return &b + } + } + return nil +} + func (u *User) RemoveAuthByProvider(provider string) bool { if u == nil || provider == "auth0" { return false @@ -117,3 +154,78 @@ func (u *User) RemoveAuthByProvider(provider string) bool { func (u *User) ClearAuths() { u.auths = []Auth{} } + +func (u *User) SetPassword(pass string) error { + if err := validatePassword(pass); err != nil { + return err + } + p, err := encodePassword(pass) + if err != nil { + return err + } + u.password = p + return nil +} + +func (u *User) MatchPassword(pass string) (bool, error) { + if u == nil || len(u.password) == 0 { + return false, nil + } + return verifyPassword(pass, u.password) +} + +func encodePassword(pass string) ([]byte, error) { + bytes, err := bcrypt.GenerateFromPassword([]byte(pass), 14) + return bytes, err +} + +func verifyPassword(toVerify string, encoded []byte) (bool, error) { + err := bcrypt.CompareHashAndPassword(encoded, []byte(toVerify)) + if err != nil { + if errors.Is(err, bcrypt.ErrMismatchedHashAndPassword) { + return false, nil + } + return false, err + } + return true, nil +} + +func (u *User) PasswordReset() *PasswordReset { + return u.passwordReset +} + +func (u *User) SetPasswordReset(pr *PasswordReset) { + u.passwordReset = pr.Clone() +} + +func (u *User) SetVerification(v *Verification) { + u.verification = v +} + +func validatePassword(pass string) error { + var hasNum, hasUpper, hasLower bool + for _, c := range pass { + switch { + case unicode.IsNumber(c): + hasNum = true + case unicode.IsUpper(c): + hasUpper = true + case unicode.IsLower(c) || c == ' ': + hasLower = true + } + } + if len(pass) < 8 { + return ErrPasswordLength + } + if !hasLower { + return ErrPasswordLower + } + if !hasUpper { + return ErrPasswordUpper + } + if !hasNum { + return ErrPasswordNumber + } + + return nil +} diff --git a/pkg/user/user_test.go b/pkg/user/user_test.go index 7e2e80bc3..008e2f8fa 100644 --- a/pkg/user/user_test.go +++ b/pkg/user/user_test.go @@ -2,6 +2,7 @@ package user import ( "testing" + "time" "github.com/stretchr/testify/assert" "golang.org/x/text/language" @@ -281,3 +282,298 @@ func TestUser_UpdateName(t *testing.T) { u.UpdateName("xxx") assert.Equal(t, "xxx", u.Name()) } + +func TestUser_GetAuthByProvider(t *testing.T) { + testCases := []struct { + Name string + User *User + Provider string + Expected *Auth + }{ + { + Name: "existing auth", + User: New().NewID().Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + Provider: "xxx", + Expected: &Auth{ + Provider: "xxx", + Sub: "zzz", + }, + }, + { + Name: "not existing auth", + User: New().NewID().Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + Provider: "yyy", + Expected: nil, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + res := tc.User.GetAuthByProvider(tc.Provider) + assert.Equal(tt, tc.Expected, res) + }) + } +} + +func TestUser_MatchPassword(t *testing.T) { + encodedPass, _ := encodePassword("test") + type args struct { + pass string + } + tests := []struct { + name string + password []byte + args args + want bool + wantErr bool + }{ + { + name: "passwords should match", + password: encodedPass, + args: args{ + pass: "test", + }, + want: true, + wantErr: false, + }, + { + name: "passwords shouldn't match", + password: encodedPass, + args: args{ + pass: "xxx", + }, + want: false, + wantErr: false, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + u := &User{ + password: tc.password, + } + got, err := u.MatchPassword(tc.args.pass) + assert.Equal(tt, tc.want, got) + if tc.wantErr { + assert.Error(tt, err) + } else { + assert.NoError(tt, err) + } + }) + } +} + +func TestUser_SetPassword(t *testing.T) { + type args struct { + pass string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "should set non-latin characters password", + args: args{ + pass: "ร€รชรฎรดรปtest1", + }, + want: "ร€รชรฎรดรปtest1", + }, + { + name: "should set latin characters password", + args: args{ + pass: "Testabc1", + }, + want: "Testabc1", + }, + } + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + u := &User{} + _ = u.SetPassword(tc.args.pass) + got, err := verifyPassword(tc.want, u.password) + assert.NoError(tt, err) + assert.True(tt, got) + }) + } +} + +func TestUser_PasswordReset(t *testing.T) { + testCases := []struct { + Name string + User *User + Expected *PasswordReset + }{ + { + Name: "not password request", + User: New().NewID().MustBuild(), + Expected: nil, + }, + { + Name: "create new password request over existing one", + User: New().NewID().PasswordReset(&PasswordReset{"xzy", time.Unix(0, 0)}).MustBuild(), + Expected: &PasswordReset{ + Token: "xzy", + CreatedAt: time.Unix(0, 0), + }, + }, + } + for _, tc := range testCases { + tc := tc + t.Run(tc.Name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.Expected, tc.User.PasswordReset()) + }) + } +} + +func TestUser_SetPasswordReset(t *testing.T) { + tests := []struct { + Name string + User *User + Pr *PasswordReset + Expected *PasswordReset + }{ + { + Name: "nil", + User: New().NewID().MustBuild(), + Pr: nil, + Expected: nil, + }, + { + Name: "nil", + User: New().NewID().MustBuild(), + Pr: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + Expected: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + }, + { + Name: "create new password request", + User: New().NewID().MustBuild(), + Pr: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + Expected: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + }, + { + Name: "create new password request over existing one", + User: New().NewID().PasswordReset(&PasswordReset{"xzy", time.Now()}).MustBuild(), + Pr: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + Expected: &PasswordReset{ + Token: "xyz", + CreatedAt: time.Unix(1, 1), + }, + }, + { + Name: "remove none existing password request", + User: New().NewID().MustBuild(), + Pr: nil, + Expected: nil, + }, + { + Name: "remove existing password request", + User: New().NewID().PasswordReset(&PasswordReset{"xzy", time.Now()}).MustBuild(), + Pr: nil, + Expected: nil, + }, + } + for _, tt := range tests { + t.Run(tt.Name, func(t *testing.T) { + tt.User.SetPasswordReset(tt.Pr) + assert.Equal(t, tt.Expected, tt.User.PasswordReset()) + }) + } +} + +func TestUser_SetVerification(t *testing.T) { + input := &User{} + v := &Verification{ + verified: false, + code: "xxx", + expiration: time.Time{}, + } + input.SetVerification(v) + assert.Equal(t, v, input.verification) +} + +func TestUser_Verification(t *testing.T) { + v := NewVerification() + tests := []struct { + name string + verification *Verification + want *Verification + }{ + { + name: "should return the same verification", + verification: v, + want: v, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + u := &User{ + verification: tt.verification, + } + assert.Equal(t, tt.want, u.Verification()) + }) + } +} + +func Test_ValidatePassword(t *testing.T) { + + tests := []struct { + name string + pass string + wantErr bool + }{ + { + name: "should pass", + pass: "Abcdafgh1", + wantErr: false, + }, + { + name: "shouldn't pass: length<8", + pass: "Aafgh1", + wantErr: true, + }, + { + name: "shouldn't pass: don't have numbers", + pass: "Abcdefghi", + wantErr: true, + }, + { + name: "shouldn't pass: don't have upper", + pass: "abcdefghi1", + wantErr: true, + }, + { + name: "shouldn't pass: don't have lower", + pass: "ABCDEFGHI1", + wantErr: true, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + out := validatePassword(tc.pass) + assert.Equal(tt, out != nil, tc.wantErr) + }) + } +} diff --git a/pkg/user/verification.go b/pkg/user/verification.go new file mode 100644 index 000000000..2b7215f0c --- /dev/null +++ b/pkg/user/verification.go @@ -0,0 +1,71 @@ +package user + +import ( + "time" + + uuid "github.com/google/uuid" +) + +type Verification struct { + verified bool + code string + expiration time.Time +} + +func (v *Verification) IsVerified() bool { + if v == nil { + return false + } + return v.verified +} + +func (v *Verification) Code() string { + if v == nil { + return "" + } + return v.code +} + +func (v *Verification) Expiration() time.Time { + if v == nil { + return time.Time{} + } + return v.expiration +} + +func generateCode() string { + return uuid.NewString() +} + +func (v *Verification) IsExpired() bool { + if v == nil { + return true + } + now := time.Now() + return now.After(v.expiration) +} + +func (v *Verification) SetVerified(b bool) { + if v == nil { + return + } + v.verified = b +} + +func NewVerification() *Verification { + v := &Verification{ + verified: false, + code: generateCode(), + expiration: time.Now().Add(time.Hour * 24), + } + return v +} + +func VerificationFrom(c string, e time.Time, b bool) *Verification { + v := &Verification{ + verified: b, + code: c, + expiration: e, + } + return v +} diff --git a/pkg/user/verification_test.go b/pkg/user/verification_test.go new file mode 100644 index 000000000..342c5937b --- /dev/null +++ b/pkg/user/verification_test.go @@ -0,0 +1,215 @@ +package user + +import ( + "testing" + "time" + + "github.com/google/uuid" + + "github.com/stretchr/testify/assert" +) + +func TestNewVerification(t *testing.T) { + type fields struct { + verified bool + code bool + expiration bool + } + + tests := []struct { + name string + want fields + }{ + { + name: "init verification struct", + + want: fields{ + verified: false, + code: true, + expiration: true, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := NewVerification() + assert.Equal(t, tt.want.verified, got.IsVerified()) + assert.Equal(t, tt.want.code, len(got.Code()) > 0) + assert.Equal(t, tt.want.expiration, !got.Expiration().IsZero()) + }) + } +} + +func TestVerification_Code(t *testing.T) { + tests := []struct { + name string + verification *Verification + want string + }{ + { + name: "should return a code string", + verification: &Verification{ + verified: false, + code: "xxx", + expiration: time.Time{}, + }, + want: "xxx", + }, + { + name: "should return a empty string", + want: "", + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + + assert.Equal(tt, tc.want, tc.verification.Code()) + }) + } +} + +func TestVerification_Expiration(t *testing.T) { + e := time.Now() + + tests := []struct { + name string + verification *Verification + want time.Time + }{ + { + name: "should return now date", + verification: &Verification{ + verified: false, + code: "", + expiration: e, + }, + want: e, + }, + { + name: "should return zero time", + verification: nil, + want: time.Time{}, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.want, tc.verification.Expiration()) + }) + } +} + +func TestVerification_IsExpired(t *testing.T) { + tim, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") + tim2 := time.Now().Add(time.Hour * 24) + + type fields struct { + verified bool + code string + expiration time.Time + } + tests := []struct { + name string + fields fields + want bool + }{ + { + name: "should be expired", + fields: fields{ + verified: false, + code: "xxx", + expiration: tim, + }, + want: true, + }, + { + name: "shouldn't be expired", + fields: fields{ + verified: false, + code: "xxx", + expiration: tim2, + }, + want: false, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + v := &Verification{ + verified: tc.fields.verified, + code: tc.fields.code, + expiration: tc.fields.expiration, + } + assert.Equal(tt, tc.want, v.IsExpired()) + }) + } +} + +func TestVerification_IsVerified(t *testing.T) { + tests := []struct { + name string + verification *Verification + want bool + }{ + { + name: "should return true", + verification: &Verification{ + verified: true, + }, + want: true, + }, + { + name: "should return false", + verification: nil, + want: false, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + assert.Equal(tt, tc.want, tc.verification.IsVerified()) + }) + } +} + +func TestVerification_SetVerified(t *testing.T) { + tests := []struct { + name string + verification *Verification + input bool + want bool + }{ + { + name: "should set true", + verification: &Verification{ + verified: false, + }, + input: true, + want: true, + }, + { + name: "should return false", + verification: nil, + want: false, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tc.verification.SetVerified(tc.input) + assert.Equal(tt, tc.want, tc.verification.IsVerified()) + }) + } +} + +func Test_generateCode(t *testing.T) { + str := generateCode() + _, err := uuid.Parse(str) + assert.NoError(t, err) +} diff --git a/schema.graphql b/schema.graphql index d45c51cb1..76c819213 100644 --- a/schema.graphql +++ b/schema.graphql @@ -1261,13 +1261,13 @@ type RemoveAssetPayload { assetId: ID! } -type SignupPayload { +type UpdateMePayload { user: User! - team: Team! } -type UpdateMePayload { +type SignupPayload { user: User! + team: Team! } type DeleteMePayload { From 9bde8a4adc9ab560b856768dc125aacd20c747c6 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Fri, 11 Mar 2022 11:50:33 +0300 Subject: [PATCH 164/253] fix: load auth client domain from config (#124) --- internal/app/auth_server.go | 7 ++++--- internal/app/config.go | 2 +- internal/usecase/interactor/auth.go | 9 +++++---- pkg/auth/client.go | 6 +++--- 4 files changed, 13 insertions(+), 11 deletions(-) diff --git a/internal/app/auth_server.go b/internal/app/auth_server.go index bdb895da0..87ba02f0f 100644 --- a/internal/app/auth_server.go +++ b/internal/app/auth_server.go @@ -55,9 +55,10 @@ func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *Server storage, err := interactor.NewAuthStorage( ctx, &interactor.StorageConfig{ - Domain: domain.String(), - Debug: cfg.Debug, - DN: dn, + Domain: domain.String(), + ClientDomain: cfg.Config.AuthSrv.UIDomain, + Debug: cfg.Debug, + DN: dn, }, cfg.Repos.AuthRequest, cfg.Repos.Config, diff --git a/internal/app/config.go b/internal/app/config.go index 8ae478a88..14cd77130 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -47,7 +47,7 @@ type Auth0Config struct { type AuthSrvConfig struct { Domain string `default:"http://localhost:8080"` - UIDomain string `default:"http://localhost:3000"` + UIDomain string `default:"http://localhost:8080"` Key string DN *AuthDNConfig } diff --git a/internal/usecase/interactor/auth.go b/internal/usecase/interactor/auth.go index 5cc6d8306..a3f3b084d 100644 --- a/internal/usecase/interactor/auth.go +++ b/internal/usecase/interactor/auth.go @@ -34,9 +34,10 @@ type AuthStorage struct { } type StorageConfig struct { - Domain string `default:"http://localhost:8080"` - Debug bool - DN *AuthDNConfig + Domain string `default:"http://localhost:8080"` + ClientDomain string `default:"http://localhost:8080"` + Debug bool + DN *AuthDNConfig } type AuthDNConfig struct { @@ -63,7 +64,7 @@ var dummyName = pkix.Name{ func NewAuthStorage(ctx context.Context, cfg *StorageConfig, request repo.AuthRequest, config repo.Config, getUserBySubject func(context.Context, string) (*user.User, error)) (op.Storage, error) { - client := auth.NewLocalClient(cfg.Debug) + client := auth.NewLocalClient(cfg.Debug, cfg.ClientDomain) name := dummyName if cfg.DN != nil { diff --git a/pkg/auth/client.go b/pkg/auth/client.go index 930757100..884848c38 100644 --- a/pkg/auth/client.go +++ b/pkg/auth/client.go @@ -24,7 +24,7 @@ type Client struct { devMode bool } -func NewLocalClient(devMode bool) op.Client { +func NewLocalClient(devMode bool, clientDomain string) op.Client { return &Client{ id: "01FH69GFQ4DFCXS5XD91JK4HZ1", applicationType: op.ApplicationTypeWeb, @@ -32,9 +32,9 @@ func NewLocalClient(devMode bool) op.Client { accessTokenType: op.AccessTokenTypeJWT, responseTypes: []oidc.ResponseType{oidc.ResponseTypeCode}, grantTypes: []oidc.GrantType{oidc.GrantTypeCode, oidc.GrantTypeRefreshToken}, - redirectURIs: []string{"http://localhost:3000"}, + redirectURIs: []string{clientDomain}, allowedScopes: []string{"openid", "profile", "email"}, - loginURI: "http://localhost:3000/login?id=%s", + loginURI: clientDomain + "/login?id=%s", idTokenLifetime: 5 * time.Minute, clockSkew: 0, devMode: devMode, From 27c2f0caed5414705c142b627f76a9e522f6a2c8 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 14 Mar 2022 13:02:06 +0900 Subject: [PATCH 165/253] fix: signup fails when password is not set --- internal/usecase/interactor/user.go | 46 ++++++++++++++--------------- pkg/user/initializer.go | 12 ++++---- 2 files changed, 30 insertions(+), 28 deletions(-) diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index 138757981..47de634bd 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -133,13 +133,18 @@ func (i *User) Fetch(ctx context.Context, ids []id.UserID, operator *usecase.Ope func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user.User, _ *user.Team, err error) { var team *user.Team - var email, name string var auth *user.Auth + var email, name string var tx repo.Tx - isOidc := inp.Secret != nil && inp.Sub != nil + + isOidc := inp.Sub != nil && inp.Password == nil isAuth := inp.Name != nil && inp.Email != nil && inp.Password != nil if !isAuth && !isOidc { - return + return nil, nil, errors.New("invalid params") + } + + if i.signupSecret != "" && *inp.Secret != i.signupSecret { + return nil, nil, interfaces.ErrSignupInvalidSecret } tx, err = i.transaction.Begin() @@ -152,12 +157,18 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. } }() - if isOidc { - // Auth0 - if i.signupSecret != "" && *inp.Secret != i.signupSecret { - return nil, nil, interfaces.ErrSignupInvalidSecret + // Check if team already exists + if inp.TeamID != nil { + existed, err := i.teamRepo.FindByID(ctx, *inp.TeamID) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, nil, err + } + if existed != nil { + return nil, nil, errors.New("existed team") } + } + if isOidc { if len(*inp.Sub) == 0 { return nil, nil, errors.New("sub is required") } @@ -165,7 +176,6 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. if err != nil { return } - } else if isAuth { if *inp.Name == "" { return nil, nil, interfaces.ErrSignupInvalidName @@ -188,41 +198,31 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. } } - // Check if team already exists - if inp.TeamID != nil { - existed, err := i.teamRepo.FindByID(ctx, *inp.TeamID) - if err != nil && !errors.Is(err, rerror.ErrNotFound) { - return nil, nil, err - } - if existed != nil { - return nil, nil, errors.New("existed team") - } - } - // Initialize user and team u, team, err = user.Init(user.InitParams{ Email: email, Name: name, Sub: auth, - Password: *inp.Password, + Password: inp.Password, Lang: inp.Lang, Theme: inp.Theme, UserID: inp.UserID, TeamID: inp.TeamID, }) + if err != nil { return nil, nil, err } + if err := i.userRepo.Save(ctx, u); err != nil { return nil, nil, err } + if err := i.teamRepo.Save(ctx, team); err != nil { return nil, nil, err } - if tx != nil { - tx.Commit() - } + tx.Commit() return u, team, nil } diff --git a/pkg/user/initializer.go b/pkg/user/initializer.go index eadd1756e..070b6f086 100644 --- a/pkg/user/initializer.go +++ b/pkg/user/initializer.go @@ -8,7 +8,7 @@ type InitParams struct { Email string Name string Sub *Auth - Password string + Password *string Lang *language.Tag Theme *Theme UserID *ID @@ -33,15 +33,17 @@ func Init(p InitParams) (*User, *Team, error) { p.Sub = GenReearthSub(p.UserID.String()) } - u, err := New(). + b := New(). ID(*p.UserID). Name(p.Name). Email(p.Email). Auths([]Auth{*p.Sub}). Lang(*p.Lang). - PasswordPlainText(p.Password). - Theme(*p.Theme). - Build() + Theme(*p.Theme) + if p.Password != nil { + b = b.PasswordPlainText(*p.Password) + } + u, err := b.Build() if err != nil { return nil, nil, err } From aab26c3a07ffa3a4b050276b09d3121252b8f5ca Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 14 Mar 2022 15:10:24 +0900 Subject: [PATCH 166/253] feat: default mailer that outputs mails into stdout --- docker-compose.yml | 4 - internal/app/repo.go | 14 +- internal/infrastructure/mailer/common.go | 112 ++++++++++++++++ .../mailer/{smtp_test.go => common_test.go} | 43 ------ internal/infrastructure/mailer/direct.go | 125 ++++++++++++++++++ internal/infrastructure/mailer/logger.go | 21 +++ internal/infrastructure/mailer/sendgrid.go | 34 ++--- .../infrastructure/mailer/sendgrid_test.go | 41 ------ internal/infrastructure/mailer/smtp.go | 85 ++---------- 9 files changed, 297 insertions(+), 182 deletions(-) create mode 100644 internal/infrastructure/mailer/common.go rename internal/infrastructure/mailer/{smtp_test.go => common_test.go} (74%) create mode 100644 internal/infrastructure/mailer/direct.go create mode 100644 internal/infrastructure/mailer/logger.go delete mode 100644 internal/infrastructure/mailer/sendgrid_test.go diff --git a/docker-compose.yml b/docker-compose.yml index 942e5c7ad..74e85d775 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,10 +12,6 @@ services: environment: REEARTH_ENV: docker REEARTH_DB_URL: mongodb://reearth-mongo - REEARTH_MAILER: smtp - REEARTH_SMTP_URL: #add later - REEARTH_SMTP_USER: #add later - REEARTH_SMTP_PASSWORD: #add later depends_on: - reearth-mongo reearth-mongo: diff --git a/internal/app/repo.go b/internal/app/repo.go index fdff03a08..4399297ed 100644 --- a/internal/app/repo.go +++ b/internal/app/repo.go @@ -72,7 +72,7 @@ func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo. // google gateways.Google = google.NewGoogle() - // SMTP Mailer + // mailer gateways.Mailer = initMailer(conf) // release lock of all scenes @@ -85,9 +85,13 @@ func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo. func initMailer(conf *Config) gateway.Mailer { if conf.Mailer == "sendgrid" { - return mailer.NewWithSendGrid(conf.SendGrid.Name, conf.SendGrid.Email, conf.SendGrid.API) - } else if conf.Mailer == "smtp" { - return mailer.NewWithSMTP(conf.SMTP.Host, conf.SMTP.Port, conf.SMTP.SMTPUsername, conf.SMTP.Email, conf.SMTP.Password) + log.Infoln("mailer: sendgrid is used") + return mailer.NewSendGrid(conf.SendGrid.Name, conf.SendGrid.Email, conf.SendGrid.API) } - return nil + if conf.Mailer == "smtp" { + log.Infoln("mailer: smtp is used") + return mailer.NewSMTP(conf.SMTP.Host, conf.SMTP.Port, conf.SMTP.SMTPUsername, conf.SMTP.Email, conf.SMTP.Password) + } + log.Infoln("mailer: logger is used") + return mailer.NewLogger() } diff --git a/internal/infrastructure/mailer/common.go b/internal/infrastructure/mailer/common.go new file mode 100644 index 000000000..82f7a1774 --- /dev/null +++ b/internal/infrastructure/mailer/common.go @@ -0,0 +1,112 @@ +package mailer + +import ( + "bytes" + "fmt" + "io" + "mime/multipart" + "net/mail" + "net/textproto" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/log" +) + +func verifyEmails(contacts []gateway.Contact) ([]string, error) { + emails := make([]string, 0, len(contacts)) + for _, c := range contacts { + _, err := mail.ParseAddress(c.Email) + if err != nil { + return nil, fmt.Errorf("invalid email %s", c.Email) + } + emails = append(emails, c.Email) + } + + return emails, nil +} + +type message struct { + to []string + from string + subject string + plainContent string + htmlContent string +} + +func (m *message) encodeContent() (string, error) { + buf := bytes.NewBuffer(nil) + writer := multipart.NewWriter(buf) + boundary := writer.Boundary() + + altBuffer, err := writer.CreatePart(textproto.MIMEHeader{"Content-Type": {"multipart/alternative; boundary=" + boundary}}) + if err != nil { + return "", err + } + altWriter := multipart.NewWriter(altBuffer) + err = altWriter.SetBoundary(boundary) + if err != nil { + return "", err + } + var content io.Writer + content, err = altWriter.CreatePart(textproto.MIMEHeader{"Content-Type": {"text/plain"}}) + if err != nil { + return "", err + } + + _, err = content.Write([]byte(m.plainContent + "\r\n\r\n")) + if err != nil { + return "", err + } + content, err = altWriter.CreatePart(textproto.MIMEHeader{"Content-Type": {"text/html"}}) + if err != nil { + return "", err + } + _, err = content.Write([]byte(m.htmlContent + "\r\n")) + if err != nil { + return "", err + } + _ = altWriter.Close() + return buf.String(), nil +} + +func (m *message) encodeMessage() ([]byte, error) { + buf := bytes.NewBuffer(nil) + buf.WriteString(fmt.Sprintf("Subject: %s\n", m.subject)) + buf.WriteString(fmt.Sprintf("From: %s\n", m.from)) + buf.WriteString(fmt.Sprintf("To: %s\n", strings.Join(m.to, ","))) + content, err := m.encodeContent() + if err != nil { + return nil, err + } + buf.WriteString(content) + + return buf.Bytes(), nil +} + +type ToList []gateway.Contact + +func (l ToList) String() string { + tos := &strings.Builder{} + for i, t := range l { + if t.Name != "" { + _, _ = tos.WriteString(t.Name) + if t.Email != "" { + _, _ = tos.WriteString(" ") + } + } + if t.Email != "" { + _, _ = tos.WriteString("<") + _, _ = tos.WriteString(t.Email) + _, _ = tos.WriteString(">") + } + if len(l)-1 > i { + _, _ = tos.WriteString(", ") + } + } + return tos.String() +} + +func logMail(to ToList, subject string) { + log.Infof("mailer: mail sent: To: %s, Subject: %s", to, subject) +} diff --git a/internal/infrastructure/mailer/smtp_test.go b/internal/infrastructure/mailer/common_test.go similarity index 74% rename from internal/infrastructure/mailer/smtp_test.go rename to internal/infrastructure/mailer/common_test.go index ed217d9cb..a6ecfce53 100644 --- a/internal/infrastructure/mailer/smtp_test.go +++ b/internal/infrastructure/mailer/common_test.go @@ -4,52 +4,9 @@ import ( "strings" "testing" - "github.com/reearth/reearth-backend/internal/usecase/gateway" - "github.com/stretchr/testify/assert" ) -func TestNewWithSMTP(t *testing.T) { - type args struct { - host string - port string - email string - username string - password string - } - tests := []struct { - name string - args args - want gateway.Mailer - }{ - { - name: "should create mailer with given args", - args: args{ - host: "x.x.x", - port: "8080", - username: "foo", - email: "xxx@test.com", - password: "foo.pass", - }, - want: &smtpMailer{ - host: "x.x.x", - port: "8080", - username: "foo", - email: "xxx@test.com", - password: "foo.pass", - }, - }, - } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - got := NewWithSMTP(tc.args.host, tc.args.port, tc.args.username, tc.args.email, tc.args.password) - assert.Equal(tt, tc.want, got) - }) - } -} - func Test_message_encodeContent(t *testing.T) { // subject and receiver email are not needed for encoding the content tests := []struct { diff --git a/internal/infrastructure/mailer/direct.go b/internal/infrastructure/mailer/direct.go new file mode 100644 index 000000000..27cedbda1 --- /dev/null +++ b/internal/infrastructure/mailer/direct.go @@ -0,0 +1,125 @@ +package mailer + +import ( + "errors" + "fmt" + "net" + "net/smtp" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +// NOTE: implemented but it does not work expectedly +type direct struct { + from string +} + +func NewDirect(from string) gateway.Mailer { + if from == "" { + from = "reearth@localhost:8080" + } + return &direct{from: from} +} + +func (m *direct) SendMail(to []gateway.Contact, subject, plainContent, htmlContent string) error { + emails, err := verifyEmails(to) + if err != nil { + return err + } + + emailHosts, err := m.hosts(emails) + if err != nil { + return err + } + + mxHosts, err := m.lookupHosts(emailHosts) + if err != nil { + return err + } + + msg, err := m.message(emails, subject, plainContent, htmlContent) + if err != nil { + return err + } + + for i, to := range emails { + host := mxHosts[i] + if err := m.send(to, host, msg); err != nil { + return err + } + } + + logMail(to, subject) + return nil +} + +func (m *direct) message(emails []string, subject, plainContent, htmlContent string) ([]byte, error) { + msg := &message{ + to: emails, + from: m.from, + subject: subject, + plainContent: plainContent, + htmlContent: htmlContent, + } + encodedMsg, err := msg.encodeMessage() + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + return encodedMsg, nil +} + +func (*direct) hosts(addresses []string) ([]string, error) { + res := make([]string, 0, len(addresses)) + for _, a := range addresses { + s := strings.SplitAfterN(a, "@", 2) + if len(s) != 2 { + return nil, errors.New("invalid email address") + } + res = append(res, s[1]) + } + return res, nil +} + +func (*direct) lookupHosts(hosts []string) ([]string, error) { + res := make([]string, 0, len(hosts)) + for _, h := range hosts { + mxs, err := net.LookupMX(h) + if err != nil { + return nil, errors.New("invalid email address") + } + if len(mxs) == 0 { + return nil, errors.New("invalid email address") + } + res = append(res, strings.TrimSuffix(mxs[0].Host, ".")) + } + return res, nil +} + +func (m *direct) send(to string, host string, msg []byte) error { + c, err := smtp.Dial(fmt.Sprintf("%s:25", host)) + if err != nil { + return rerror.ErrInternalBy(err) + } + if err := c.Mail(m.from); err != nil { + return rerror.ErrInternalBy(err) + } + if err := c.Rcpt(to); err != nil { + return rerror.ErrInternalBy(err) + } + wc, err := c.Data() + if err != nil { + return rerror.ErrInternalBy(err) + } + if _, err = wc.Write(msg); err != nil { + return rerror.ErrInternalBy(err) + } + if err := wc.Close(); err != nil { + return rerror.ErrInternalBy(err) + } + if err := c.Quit(); err != nil { + return rerror.ErrInternalBy(err) + } + return nil +} diff --git a/internal/infrastructure/mailer/logger.go b/internal/infrastructure/mailer/logger.go new file mode 100644 index 000000000..1d52fba21 --- /dev/null +++ b/internal/infrastructure/mailer/logger.go @@ -0,0 +1,21 @@ +package mailer + +import ( + "fmt" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" +) + +const loggerSep = "=======================" + +type logger struct{} + +func NewLogger() gateway.Mailer { + return &logger{} +} + +func (m *logger) SendMail(to []gateway.Contact, subject, plainContent, _ string) error { + logMail(to, subject) + fmt.Printf("%s\n%s\n%s\n", loggerSep, plainContent, loggerSep) + return nil +} diff --git a/internal/infrastructure/mailer/sendgrid.go b/internal/infrastructure/mailer/sendgrid.go index cd078d0ff..07ec200ed 100644 --- a/internal/infrastructure/mailer/sendgrid.go +++ b/internal/infrastructure/mailer/sendgrid.go @@ -7,26 +7,30 @@ import ( ) type sendgridMailer struct { - api string - // sender data - name string - email string + name string + email string + client *sendgrid.Client } -func NewWithSendGrid(senderName, senderEmail, api string) gateway.Mailer { +func NewSendGrid(senderName, senderEmail, api string) gateway.Mailer { return &sendgridMailer{ - name: senderName, - email: senderEmail, - api: api, + name: senderName, + email: senderEmail, + client: sendgrid.NewSendClient(api), } } func (m *sendgridMailer) SendMail(to []gateway.Contact, subject, plainContent, htmlContent string) error { - contact := to[0] - sender := mail.NewEmail(m.name, m.email) - receiver := mail.NewEmail(contact.Name, contact.Email) - message := mail.NewSingleEmail(sender, subject, receiver, plainContent, htmlContent) - client := sendgrid.NewSendClient(m.api) - _, err := client.Send(message) - return err + for _, t := range to { + sender := mail.NewEmail(m.name, m.email) + receiver := mail.NewEmail(t.Name, t.Email) + message := mail.NewSingleEmail(sender, subject, receiver, plainContent, htmlContent) + _, err := m.client.Send(message) + if err != nil { + return err + } + } + + logMail(to, subject) + return nil } diff --git a/internal/infrastructure/mailer/sendgrid_test.go b/internal/infrastructure/mailer/sendgrid_test.go deleted file mode 100644 index 2a7251ed3..000000000 --- a/internal/infrastructure/mailer/sendgrid_test.go +++ /dev/null @@ -1,41 +0,0 @@ -package mailer - -import ( - "testing" - - "github.com/reearth/reearth-backend/internal/usecase/gateway" - "github.com/stretchr/testify/assert" -) - -func TestNewWithSendGrid(t *testing.T) { - type args struct { - senderName string - senderEmail string - api string - } - tests := []struct { - name string - args args - want gateway.Mailer - }{ - { - name: "should create a sendGrid mailer", - args: args{ - senderName: "test sender", - senderEmail: "sender@test.com", - api: "TEST_API", - }, - want: &sendgridMailer{ - api: "TEST_API", - name: "test sender", - email: "sender@test.com", - }, - }, - } - for _, tc := range tests { - t.Run(tc.name, func(tt *testing.T) { - got := NewWithSendGrid(tc.args.senderName, tc.args.senderEmail, tc.args.api) - assert.Equal(tt, tc.want, got) - }) - } -} diff --git a/internal/infrastructure/mailer/smtp.go b/internal/infrastructure/mailer/smtp.go index e54127d7f..be72c3675 100644 --- a/internal/infrastructure/mailer/smtp.go +++ b/internal/infrastructure/mailer/smtp.go @@ -1,15 +1,8 @@ package mailer import ( - "bytes" "errors" - "fmt" - "io" - "mime/multipart" - "net/mail" "net/smtp" - "net/textproto" - "strings" "github.com/reearth/reearth-backend/internal/usecase/gateway" ) @@ -22,65 +15,7 @@ type smtpMailer struct { password string } -type message struct { - to []string - from string - subject string - plainContent string - htmlContent string -} - -func (m *message) encodeContent() (string, error) { - buf := bytes.NewBuffer(nil) - writer := multipart.NewWriter(buf) - boundary := writer.Boundary() - - altBuffer, err := writer.CreatePart(textproto.MIMEHeader{"Content-Type": {"multipart/alternative; boundary=" + boundary}}) - if err != nil { - return "", err - } - altWriter := multipart.NewWriter(altBuffer) - err = altWriter.SetBoundary(boundary) - if err != nil { - return "", err - } - var content io.Writer - content, err = altWriter.CreatePart(textproto.MIMEHeader{"Content-Type": {"text/plain"}}) - if err != nil { - return "", err - } - - _, err = content.Write([]byte(m.plainContent + "\r\n\r\n")) - if err != nil { - return "", err - } - content, err = altWriter.CreatePart(textproto.MIMEHeader{"Content-Type": {"text/html"}}) - if err != nil { - return "", err - } - _, err = content.Write([]byte(m.htmlContent + "\r\n")) - if err != nil { - return "", err - } - _ = altWriter.Close() - return buf.String(), nil -} - -func (m *message) encodeMessage() ([]byte, error) { - buf := bytes.NewBuffer(nil) - buf.WriteString(fmt.Sprintf("Subject: %s\n", m.subject)) - buf.WriteString(fmt.Sprintf("From: %s\n", m.from)) - buf.WriteString(fmt.Sprintf("To: %s\n", strings.Join(m.to, ","))) - content, err := m.encodeContent() - if err != nil { - return nil, err - } - buf.WriteString(content) - - return buf.Bytes(), nil -} - -func NewWithSMTP(host, port, username, email, password string) gateway.Mailer { +func NewSMTP(host, port, username, email, password string) gateway.Mailer { return &smtpMailer{ host: host, port: port, @@ -91,13 +26,9 @@ func NewWithSMTP(host, port, username, email, password string) gateway.Mailer { } func (m *smtpMailer) SendMail(to []gateway.Contact, subject, plainContent, htmlContent string) error { - emails := make([]string, 0, len(to)) - for _, c := range to { - _, err := mail.ParseAddress(c.Email) - if err != nil { - return fmt.Errorf("invalid email %s", c.Email) - } - emails = append(emails, c.Email) + emails, err := verifyEmails(to) + if err != nil { + return err } msg := &message{ @@ -117,5 +48,11 @@ func (m *smtpMailer) SendMail(to []gateway.Contact, subject, plainContent, htmlC if len(m.host) == 0 { return errors.New("invalid smtp url") } - return smtp.SendMail(m.host+":"+m.port, auth, m.email, emails, encodedMsg) + + if err := smtp.SendMail(m.host+":"+m.port, auth, m.email, emails, encodedMsg); err != nil { + return err + } + + logMail(to, subject) + return nil } From d1e3a8ca73c0440d3c489d704d392c74c9355bdf Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 15 Mar 2022 11:16:03 +0900 Subject: [PATCH 167/253] fix: logger panics --- internal/app/auth_server.go | 7 +++---- internal/app/echo-logrus.go | 14 +++++++------- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/internal/app/auth_server.go b/internal/app/auth_server.go index 87ba02f0f..ab7796557 100644 --- a/internal/app/auth_server.go +++ b/internal/app/auth_server.go @@ -4,6 +4,7 @@ import ( "context" "crypto/sha256" "encoding/json" + "fmt" "net/http" "net/url" "strings" @@ -23,7 +24,6 @@ var ( ) func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *ServerConfig) { - userUsecase := interactor.NewUser(cfg.Repos, cfg.Gateways, cfg.Config.SignupSecret, cfg.Config.AuthSrv.UIDomain) domain, err := url.Parse(cfg.Config.AuthSrv.Domain) @@ -78,13 +78,13 @@ func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *Server op.WithCustomKeysEndpoint(op.NewEndpoint(jwksEndpoint)), ) if err != nil { - e.Logger.Fatal(err) + e.Logger.Fatal(fmt.Errorf("auth: init failed: %w", err)) } router := handler.HttpHandler().(*mux.Router) if err := router.Walk(muxToEchoMapper(r)); err != nil { - e.Logger.Fatal(err) + e.Logger.Fatal(fmt.Errorf("auth: walk failed: %w", err)) } // Actual login endpoint @@ -96,7 +96,6 @@ func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *Server // can be removed when the mentioned issue is solved // https://github.com/auth0/auth0-spa-js/issues/845 r.GET("v2/logout", logout()) - } func setURLVarsHandler() func(handler http.Handler) http.Handler { diff --git a/internal/app/echo-logrus.go b/internal/app/echo-logrus.go index ead1a14eb..340fd07ce 100644 --- a/internal/app/echo-logrus.go +++ b/internal/app/echo-logrus.go @@ -115,37 +115,37 @@ func (l *Logger) Panicj(j log.JSON) { // Print string log func (l *Logger) Print(i ...interface{}) { - logrus.Print(i[0].(string)) + logrus.Print(i...) } // Debug string log func (l *Logger) Debug(i ...interface{}) { - logrus.Debug(i[0].(string)) + logrus.Debug(i...) } // Info string log func (l *Logger) Info(i ...interface{}) { - logrus.Info(i[0].(string)) + logrus.Info(i...) } // Warn string log func (l *Logger) Warn(i ...interface{}) { - logrus.Warn(i[0].(string)) + logrus.Warn(i...) } // Error string log func (l *Logger) Error(i ...interface{}) { - logrus.Error(i[0].(string)) + logrus.Error(i...) } // Fatal string log func (l *Logger) Fatal(i ...interface{}) { - logrus.Fatal(i[0].(string)) + logrus.Fatal(i...) } // Panic string log func (l *Logger) Panic(i ...interface{}) { - logrus.Panic(i[0].(string)) + logrus.Panic(i...) } // Printf print json log From 83a66a4b8d3a2c961c52ad6cdf690374d67b32b6 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 15 Mar 2022 11:31:59 +0900 Subject: [PATCH 168/253] fix: set auth server dev mode automatically --- internal/app/config.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/internal/app/config.go b/internal/app/config.go index 14cd77130..39852a8bb 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -7,6 +7,7 @@ import ( "os" "strings" + "github.com/caos/oidc/pkg/op" "github.com/joho/godotenv" "github.com/kelseyhightower/envconfig" "github.com/reearth/reearth-backend/pkg/log" @@ -103,6 +104,9 @@ func ReadConfig(debug bool) (*Config, error) { if debug { c.Dev = true + if _, ok := os.LookupEnv(op.OidcDevMode); !ok { + _ = os.Setenv(op.OidcDevMode, "1") + } } return &c, err From 82cf28ce694391b6f81223b7c2591c6594c68ae5 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 15 Mar 2022 12:33:52 +0900 Subject: [PATCH 169/253] refactor: remove filter args from repos to prevent implementation errors in the use case layer (#122) --- internal/adapter/gql/resolver_mutation_tag.go | 5 +- internal/app/app.go | 35 +-- internal/app/auth_client.go | 29 +- internal/app/private.go | 10 +- internal/app/public_test.go | 21 +- internal/app/usecase.go | 34 ++- internal/infrastructure/adapter/plugin.go | 19 +- .../infrastructure/adapter/property_schema.go | 11 + internal/infrastructure/fs/plugin.go | 17 +- internal/infrastructure/fs/plugin_test.go | 2 +- internal/infrastructure/fs/property_schema.go | 12 +- internal/infrastructure/memory/asset.go | 60 +++-- internal/infrastructure/memory/dataset.go | 64 +++-- .../infrastructure/memory/dataset_schema.go | 79 ++++-- internal/infrastructure/memory/layer.go | 171 ++++++------ internal/infrastructure/memory/layer_test.go | 3 +- internal/infrastructure/memory/plugin.go | 42 ++- internal/infrastructure/memory/project.go | 40 +-- internal/infrastructure/memory/property.go | 87 +++--- .../infrastructure/memory/property_schema.go | 41 ++- internal/infrastructure/memory/scene.go | 44 ++-- internal/infrastructure/memory/tag.go | 206 ++++++++------- internal/infrastructure/memory/tag_test.go | 46 ++-- internal/infrastructure/memory/util.go | 27 -- internal/infrastructure/mongo/asset.go | 65 +++-- internal/infrastructure/mongo/container.go | 29 ++ internal/infrastructure/mongo/dataset.go | 119 +++++---- .../infrastructure/mongo/dataset_schema.go | 127 +++++---- internal/infrastructure/mongo/layer.go | 249 ++++++++---------- .../201217193948_add_scene_default_tile.go | 2 +- ...4648_add_scene_field_to_property_schema.go | 42 +++ .../mongo/migration/migrations.go | 3 + .../infrastructure/mongo/mongodoc/client.go | 33 +-- .../mongo/mongodoc/clientcol.go | 8 +- .../infrastructure/mongo/mongodoc/dataset.go | 5 +- .../mongo/mongodoc/dataset_schema.go | 5 +- .../infrastructure/mongo/mongodoc/layer.go | 9 +- .../infrastructure/mongo/mongodoc/property.go | 5 +- .../mongo/mongodoc/property_schema.go | 8 +- internal/infrastructure/mongo/mongodoc/tag.go | 9 +- .../infrastructure/mongo/mongodoc/tag_test.go | 35 ++- .../infrastructure/mongo/mongodoc/util.go | 101 ++++++- .../mongo/mongodoc/util_test.go | 91 +++++++ internal/infrastructure/mongo/plugin.go | 123 +++------ internal/infrastructure/mongo/project.go | 97 +++---- internal/infrastructure/mongo/property.go | 128 +++++---- .../infrastructure/mongo/property_schema.go | 67 ++--- internal/infrastructure/mongo/scene.go | 60 +++-- internal/infrastructure/mongo/tag.go | 136 +++++----- internal/infrastructure/mongo/team.go | 6 +- internal/usecase/interactor/asset.go | 34 ++- internal/usecase/interactor/common.go | 26 +- internal/usecase/interactor/dataset.go | 56 +--- internal/usecase/interactor/layer.go | 174 ++++-------- internal/usecase/interactor/layer_test.go | 7 +- internal/usecase/interactor/plugin.go | 16 +- internal/usecase/interactor/plugin_upload.go | 30 +-- .../usecase/interactor/plugin_upload_test.go | 30 +-- internal/usecase/interactor/project.go | 47 +--- internal/usecase/interactor/property.go | 86 ++---- internal/usecase/interactor/property_test.go | 14 +- internal/usecase/interactor/scene.go | 147 ++++------- internal/usecase/interactor/tag.go | 66 ++--- internal/usecase/interfaces/tag.go | 5 +- internal/usecase/pageinfo.go | 10 + internal/usecase/repo/asset.go | 7 +- internal/usecase/repo/container.go | 94 ++++++- internal/usecase/repo/dataset.go | 19 +- internal/usecase/repo/dataset_schema.go | 5 +- internal/usecase/repo/layer.go | 31 +-- internal/usecase/repo/plugin.go | 9 +- internal/usecase/repo/project.go | 5 +- internal/usecase/repo/property.go | 9 +- internal/usecase/repo/property_schema.go | 1 + internal/usecase/repo/scene.go | 7 +- internal/usecase/repo/tag.go | 19 +- pkg/plugin/list.go | 50 ++++ pkg/plugin/list_test.go | 45 ++++ pkg/plugin/loader.go | 2 +- pkg/plugin/plugin.go | 4 + pkg/property/schema.go | 4 + pkg/property/schema_list.go | 12 + pkg/property/schema_list_test.go | 45 ++++ pkg/scene/id.go | 11 + pkg/scene/id_test.go | 10 + pkg/scene/sceneops/dataset_migrator.go | 9 +- pkg/scene/sceneops/plugin_migrator.go | 2 +- pkg/user/id.go | 11 + pkg/user/id_test.go | 10 + tools/cmd/migrategen/main.go | 2 + 90 files changed, 2199 insertions(+), 1639 deletions(-) delete mode 100644 internal/infrastructure/memory/util.go create mode 100644 internal/infrastructure/mongo/migration/220309174648_add_scene_field_to_property_schema.go create mode 100644 internal/infrastructure/mongo/mongodoc/util_test.go create mode 100644 pkg/plugin/list.go create mode 100644 pkg/plugin/list_test.go create mode 100644 pkg/property/schema_list_test.go diff --git a/internal/adapter/gql/resolver_mutation_tag.go b/internal/adapter/gql/resolver_mutation_tag.go index 99bd76cac..0ed5c8cef 100644 --- a/internal/adapter/gql/resolver_mutation_tag.go +++ b/internal/adapter/gql/resolver_mutation_tag.go @@ -43,9 +43,8 @@ func (r *mutationResolver) CreateTagGroup(ctx context.Context, input gqlmodel.Cr func (r *mutationResolver) UpdateTag(ctx context.Context, input gqlmodel.UpdateTagInput) (*gqlmodel.UpdateTagPayload, error) { tag, err := usecases(ctx).Tag.UpdateTag(ctx, interfaces.UpdateTagParam{ - Label: input.Label, - SceneID: id.SceneID(input.SceneID), - TagID: id.TagID(input.TagID), + Label: input.Label, + TagID: id.TagID(input.TagID), }, getOperator(ctx)) if err != nil { return nil, err diff --git a/internal/app/app.go b/internal/app/app.go index 58afca7f1..b0344349b 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -31,7 +31,11 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { // basic middleware logger := GetEchoLogger() e.Logger = logger - e.Use(logger.Hook(), middleware.Recover(), otelecho.Middleware("reearth-backend")) + e.Use( + logger.Hook(), + middleware.Recover(), + otelecho.Middleware("reearth-backend"), + ) origins := allowedOrigins(cfg) if len(origins) > 0 { e.Use( @@ -41,6 +45,12 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { ) } + e.Use( + jwtEchoMiddleware(cfg), + parseJwtMiddleware(), + authMiddleware(cfg), + ) + // enable pprof if e.Debug { pprofGroup := e.Group("/debug/pprof") @@ -65,14 +75,13 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { publishedIndexHTML = string(html) } } - usecases := interactor.NewContainer(cfg.Repos, cfg.Gateways, interactor.ContainerConfig{ + + e.Use(UsecaseMiddleware(cfg.Repos, cfg.Gateways, interactor.ContainerConfig{ SignupSecret: cfg.Config.SignupSecret, PublishedIndexHTML: publishedIndexHTML, PublishedIndexURL: cfg.Config.Published.IndexURL, AuthSrvUIDomain: cfg.Config.AuthSrv.UIDomain, - }) - - e.Use(UsecaseMiddleware(&usecases)) + })) // auth srv auth := e.Group("") @@ -88,15 +97,13 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { api.GET("/published/:name", PublishedMetadata()) api.GET("/published_data/:name", PublishedData()) - privateApi := api.Group("") - authRequired(privateApi, cfg) + privateApi := api.Group("", AuthRequiredMiddleware()) graphqlAPI(e, privateApi, cfg) privateAPI(e, privateApi, cfg.Repos) - published := e.Group("/p") - publishedAuth := PublishedAuthMiddleware() - published.GET("/:name/data.json", PublishedData(), publishedAuth) - published.GET("/:name/", PublishedIndex(), publishedAuth) + published := e.Group("/p", PublishedAuthMiddleware()) + published.GET("/:name/data.json", PublishedData()) + published.GET("/:name/", PublishedIndex()) serveFiles(e, cfg.Gateways.File) web(e, cfg.Config.Web, cfg.Config.Auth0) @@ -121,12 +128,6 @@ func errorHandler(next func(error, echo.Context)) func(error, echo.Context) { } } -func authRequired(g *echo.Group, cfg *ServerConfig) { - g.Use(jwtEchoMiddleware(cfg)) - g.Use(parseJwtMiddleware()) - g.Use(authMiddleware(cfg)) -} - func allowedOrigins(cfg *ServerConfig) []string { if cfg == nil { return nil diff --git a/internal/app/auth_client.go b/internal/app/auth_client.go index b20a99976..5aad87ec2 100644 --- a/internal/app/auth_client.go +++ b/internal/app/auth_client.go @@ -67,14 +67,19 @@ func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { } } - op, err := generateOperator(ctx, cfg, u) - if err != nil { - return err + if sub != "" { + ctx = adapter.AttachSub(ctx, sub) } - ctx = adapter.AttachSub(ctx, sub) - ctx = adapter.AttachOperator(ctx, op) - ctx = adapter.AttachUser(ctx, u) + if u != nil { + op, err := generateOperator(ctx, cfg, u) + if err != nil { + return err + } + + ctx = adapter.AttachUser(ctx, u) + ctx = adapter.AttachOperator(ctx, op) + } c.SetRequest(req.WithContext(ctx)) return next(c) @@ -121,3 +126,15 @@ func addAuth0SubToUser(ctx context.Context, u *user.User, a user.Auth, cfg *Serv } return nil } + +func AuthRequiredMiddleware() echo.MiddlewareFunc { + return func(next echo.HandlerFunc) echo.HandlerFunc { + return func(c echo.Context) error { + ctx := c.Request().Context() + if adapter.Operator(ctx) == nil { + return echo.ErrUnauthorized + } + return next(c) + } + } +} diff --git a/internal/app/private.go b/internal/app/private.go index c616ecec1..1884bddde 100644 --- a/internal/app/private.go +++ b/internal/app/private.go @@ -55,7 +55,7 @@ func privateAPI( if op == nil { return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrOpDenied} } - scenes := op.AllReadableScenes() + repos := repos.Filtered(repo.TeamFilterFromOperator(op), repo.SceneFilterFromOperator(op)) param := c.Param("param") params := strings.Split(param, ".") @@ -68,7 +68,7 @@ func privateAPI( return &echo.HTTPError{Code: http.StatusBadRequest, Message: ErrBadID} } - layer, err := repos.Layer.FindByID(ctx, lid, scenes) + layer, err := repos.Layer.FindByID(ctx, lid) if err != nil { if errors.Is(rerror.ErrNotFound, err) { return &echo.HTTPError{Code: http.StatusNotFound, Message: err} @@ -88,11 +88,11 @@ func privateAPI( ex := &encoding.Exporter{ Merger: &merging.Merger{ - LayerLoader: repo.LayerLoaderFrom(repos.Layer, scenes), - PropertyLoader: repo.PropertyLoaderFrom(repos.Property, scenes), + LayerLoader: repo.LayerLoaderFrom(repos.Layer), + PropertyLoader: repo.PropertyLoaderFrom(repos.Property), }, Sealer: &merging.Sealer{ - DatasetGraphLoader: repo.DatasetGraphLoaderFrom(repos.Dataset, scenes), + DatasetGraphLoader: repo.DatasetGraphLoaderFrom(repos.Dataset), }, Encoder: e, } diff --git a/internal/app/public_test.go b/internal/app/public_test.go index e5e1b9460..f7ff0819c 100644 --- a/internal/app/public_test.go +++ b/internal/app/public_test.go @@ -11,6 +11,7 @@ import ( "testing" "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/adapter" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/rerror" "github.com/stretchr/testify/assert" @@ -70,9 +71,7 @@ func TestPublishedAuthMiddleware(t *testing.T) { c := e.NewContext(req, res) c.SetParamNames("name") c.SetParamValues(tc.PublishedName) - m := UsecaseMiddleware(&interfaces.Container{ - Published: &mockPublished{}, - }) + m := mockPublishedUsecaseMiddleware(false) err := m(PublishedAuthMiddleware()(func(c echo.Context) error { return c.String(http.StatusOK, "test") @@ -121,9 +120,7 @@ func TestPublishedData(t *testing.T) { c := e.NewContext(req, res) c.SetParamNames("name") c.SetParamValues(tc.PublishedName) - m := UsecaseMiddleware(&interfaces.Container{ - Published: &mockPublished{}, - }) + m := mockPublishedUsecaseMiddleware(false) err := m(PublishedData())(c) @@ -178,9 +175,7 @@ func TestPublishedIndex(t *testing.T) { c := e.NewContext(req, res) c.SetParamNames("name") c.SetParamValues(tc.PublishedName) - m := UsecaseMiddleware(&interfaces.Container{ - Published: &mockPublished{EmptyIndex: tc.EmptyIndex}, - }) + m := mockPublishedUsecaseMiddleware(tc.EmptyIndex) err := m(PublishedIndex())(c) @@ -196,6 +191,14 @@ func TestPublishedIndex(t *testing.T) { } } +func mockPublishedUsecaseMiddleware(emptyIndex bool) echo.MiddlewareFunc { + return ContextMiddleware(func(ctx context.Context) context.Context { + return adapter.AttachUsecases(ctx, &interfaces.Container{ + Published: &mockPublished{EmptyIndex: emptyIndex}, + }) + }) +} + type mockPublished struct { interfaces.Published EmptyIndex bool diff --git a/internal/app/usecase.go b/internal/app/usecase.go index f4eb67098..69b68572c 100644 --- a/internal/app/usecase.go +++ b/internal/app/usecase.go @@ -1,20 +1,40 @@ package app import ( + "context" + "github.com/labstack/echo/v4" "github.com/reearth/reearth-backend/internal/adapter" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interactor" + "github.com/reearth/reearth-backend/internal/usecase/repo" ) -func UsecaseMiddleware(uc *interfaces.Container) echo.MiddlewareFunc { +func UsecaseMiddleware(r *repo.Container, g *gateway.Container, config interactor.ContainerConfig) echo.MiddlewareFunc { + return ContextMiddleware(func(ctx context.Context) context.Context { + var r2 *repo.Container + if op := adapter.Operator(ctx); op != nil && r != nil { + // apply filters to repos + r3 := r.Filtered( + repo.TeamFilterFromOperator(op), + repo.SceneFilterFromOperator(op), + ) + r2 = &r3 + } else { + r2 = r + } + + uc := interactor.NewContainer(r2, g, config) + ctx = adapter.AttachUsecases(ctx, &uc) + return ctx + }) +} + +func ContextMiddleware(fn func(ctx context.Context) context.Context) echo.MiddlewareFunc { return func(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { req := c.Request() - ctx := req.Context() - - ctx = adapter.AttachUsecases(ctx, uc) - - c.SetRequest(req.WithContext(ctx)) + c.SetRequest(req.WithContext(fn(req.Context()))) return next(c) } } diff --git a/internal/infrastructure/adapter/plugin.go b/internal/infrastructure/adapter/plugin.go index c956d03e7..195267281 100644 --- a/internal/infrastructure/adapter/plugin.go +++ b/internal/infrastructure/adapter/plugin.go @@ -24,9 +24,20 @@ func NewPlugin(readers []repo.Plugin, writer repo.Plugin) repo.Plugin { } } -func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID, sids []id.SceneID) (*plugin.Plugin, error) { +func (r *pluginRepo) Filtered(f repo.SceneFilter) repo.Plugin { + readers := make([]repo.Plugin, 0, len(r.readers)) + for _, r := range r.readers { + readers = append(readers, r.Filtered(f)) + } + return &pluginRepo{ + readers: readers, + writer: r.writer.Filtered(f), + } +} + +func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { for _, re := range r.readers { - if res, err := re.FindByID(ctx, id, sids); err != nil { + if res, err := re.FindByID(ctx, id); err != nil { if errors.Is(err, rerror.ErrNotFound) { continue } else { @@ -39,10 +50,10 @@ func (r *pluginRepo) FindByID(ctx context.Context, id id.PluginID, sids []id.Sce return nil, rerror.ErrNotFound } -func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id.SceneID) ([]*plugin.Plugin, error) { +func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { results := make([]*plugin.Plugin, 0, len(ids)) for _, id := range ids { - res, err := r.FindByID(ctx, id, sids) + res, err := r.FindByID(ctx, id) if err != nil && err != rerror.ErrNotFound { return nil, err } diff --git a/internal/infrastructure/adapter/property_schema.go b/internal/infrastructure/adapter/property_schema.go index af3bbeaec..4576067d7 100644 --- a/internal/infrastructure/adapter/property_schema.go +++ b/internal/infrastructure/adapter/property_schema.go @@ -24,6 +24,17 @@ func NewPropertySchema(readers []repo.PropertySchema, writer repo.PropertySchema } } +func (r *propertySchema) Filtered(f repo.SceneFilter) repo.PropertySchema { + readers := make([]repo.PropertySchema, 0, len(r.readers)) + for _, r := range r.readers { + readers = append(readers, r.Filtered(f)) + } + return &propertySchema{ + readers: readers, + writer: r.writer.Filtered(f), + } +} + func (r *propertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) (*property.Schema, error) { for _, re := range r.readers { if res, err := re.FindByID(ctx, id); err != nil { diff --git a/internal/infrastructure/fs/plugin.go b/internal/infrastructure/fs/plugin.go index f5d4c5686..8b993ac49 100644 --- a/internal/infrastructure/fs/plugin.go +++ b/internal/infrastructure/fs/plugin.go @@ -16,6 +16,7 @@ import ( type pluginRepo struct { fs afero.Fs + f repo.SceneFilter } func NewPlugin(fs afero.Fs) repo.Plugin { @@ -24,24 +25,30 @@ func NewPlugin(fs afero.Fs) repo.Plugin { } } -func (r *pluginRepo) FindByID(ctx context.Context, pid id.PluginID, sids []id.SceneID) (*plugin.Plugin, error) { +func (r *pluginRepo) Filtered(f repo.SceneFilter) repo.Plugin { + return &pluginRepo{ + fs: r.fs, + f: f.Clone(), + } +} + +func (r *pluginRepo) FindByID(ctx context.Context, pid id.PluginID) (*plugin.Plugin, error) { m, err := readPluginManifest(r.fs, pid) if err != nil { return nil, err } - sid := m.Plugin.ID().Scene() - if sid != nil && !sid.Contains(sids) { + if s := m.Plugin.ID().Scene(); s != nil && !r.f.CanRead(*s) { return nil, nil } return m.Plugin, nil } -func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id.SceneID) ([]*plugin.Plugin, error) { +func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { results := make([]*plugin.Plugin, 0, len(ids)) for _, id := range ids { - res, err := r.FindByID(ctx, id, sids) + res, err := r.FindByID(ctx, id) if err != nil { return nil, err } diff --git a/internal/infrastructure/fs/plugin_test.go b/internal/infrastructure/fs/plugin_test.go index 67bd4cb85..b15296921 100644 --- a/internal/infrastructure/fs/plugin_test.go +++ b/internal/infrastructure/fs/plugin_test.go @@ -13,7 +13,7 @@ import ( func TestPlugin(t *testing.T) { ctx := context.Background() fs := NewPlugin(mockPluginFS()) - p, err := fs.FindByID(ctx, plugin.MustID("testplugin~1.0.0"), nil) + p, err := fs.FindByID(ctx, plugin.MustID("testplugin~1.0.0")) assert.NoError(t, err) assert.Equal(t, plugin.New().ID(plugin.MustID("testplugin~1.0.0")).Name(i18n.String{ "en": "testplugin", diff --git a/internal/infrastructure/fs/property_schema.go b/internal/infrastructure/fs/property_schema.go index d1d92fb67..47353ad3e 100644 --- a/internal/infrastructure/fs/property_schema.go +++ b/internal/infrastructure/fs/property_schema.go @@ -13,6 +13,7 @@ import ( type propertySchema struct { fs afero.Fs + f repo.SceneFilter } func NewPropertySchema(fs afero.Fs) repo.PropertySchema { @@ -21,6 +22,13 @@ func NewPropertySchema(fs afero.Fs) repo.PropertySchema { } } +func (r *propertySchema) Filtered(f repo.SceneFilter) repo.PropertySchema { + return &propertySchema{ + fs: r.fs, + f: f.Clone(), + } +} + func (r *propertySchema) FindByID(ctx context.Context, i id.PropertySchemaID) (*property.Schema, error) { m, err := readPluginManifest(r.fs, i.Plugin()) if err != nil { @@ -36,7 +44,9 @@ func (r *propertySchema) FindByID(ctx context.Context, i id.PropertySchemaID) (* continue } if ps.ID().Equal(i) { - return ps, nil + if s := ps.Scene(); s == nil || r.f.CanRead(*s) { + return ps, nil + } } } diff --git a/internal/infrastructure/memory/asset.go b/internal/infrastructure/memory/asset.go index 9bde832cd..72758d87e 100644 --- a/internal/infrastructure/memory/asset.go +++ b/internal/infrastructure/memory/asset.go @@ -14,6 +14,7 @@ import ( type Asset struct { lock sync.Mutex data map[id.AssetID]*asset.Asset + f repo.TeamFilter } func NewAsset() repo.Asset { @@ -22,25 +23,33 @@ func NewAsset() repo.Asset { } } -func (r *Asset) FindByID(ctx context.Context, id id.AssetID, teams []id.TeamID) (*asset.Asset, error) { +func (r *Asset) Filtered(f repo.TeamFilter) repo.Asset { + return &Asset{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: f.Clone(), + } +} + +func (r *Asset) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, error) { r.lock.Lock() defer r.lock.Unlock() d, ok := r.data[id] - if ok { + if ok && r.f.CanRead(d.Team()) { return d, nil } return &asset.Asset{}, rerror.ErrNotFound } -func (r *Asset) FindByIDs(ctx context.Context, ids []id.AssetID, teams []id.TeamID) ([]*asset.Asset, error) { +func (r *Asset) FindByIDs(ctx context.Context, ids []id.AssetID) ([]*asset.Asset, error) { r.lock.Lock() defer r.lock.Unlock() result := []*asset.Asset{} for _, id := range ids { if d, ok := r.data[id]; ok { - if isTeamIncludes(d.Team(), teams) { + if r.f.CanRead(d.Team()) { result = append(result, d) continue } @@ -50,23 +59,11 @@ func (r *Asset) FindByIDs(ctx context.Context, ids []id.AssetID, teams []id.Team return result, nil } -func (r *Asset) Save(ctx context.Context, a *asset.Asset) error { - r.lock.Lock() - defer r.lock.Unlock() - - r.data[a.ID()] = a - return nil -} - -func (r *Asset) Remove(ctx context.Context, id id.AssetID) error { - r.lock.Lock() - defer r.lock.Unlock() - - delete(r.data, id) - return nil -} - func (r *Asset) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { + if !r.f.CanRead(id) { + return nil, usecase.EmptyPageInfo(), nil + } + r.lock.Lock() defer r.lock.Unlock() @@ -93,3 +90,26 @@ func (r *Asset) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecas true, ), nil } + +func (r *Asset) Save(ctx context.Context, a *asset.Asset) error { + if !r.f.CanWrite(a.Team()) { + return repo.ErrOperationDenied + } + + r.lock.Lock() + defer r.lock.Unlock() + + r.data[a.ID()] = a + return nil +} + +func (r *Asset) Remove(ctx context.Context, id id.AssetID) error { + r.lock.Lock() + defer r.lock.Unlock() + + if a, ok := r.data[id]; ok && r.f.CanWrite(a.Team()) { + delete(r.data, id) + } + + return nil +} diff --git a/internal/infrastructure/memory/dataset.go b/internal/infrastructure/memory/dataset.go index c3a119c15..cea85ccd8 100644 --- a/internal/infrastructure/memory/dataset.go +++ b/internal/infrastructure/memory/dataset.go @@ -13,35 +13,44 @@ import ( type Dataset struct { lock sync.Mutex - data map[id.DatasetID]dataset.Dataset + data map[id.DatasetID]*dataset.Dataset + f repo.SceneFilter } func NewDataset() repo.Dataset { return &Dataset{ - data: map[id.DatasetID]dataset.Dataset{}, + data: map[id.DatasetID]*dataset.Dataset{}, } } -func (r *Dataset) FindByID(ctx context.Context, id id.DatasetID, f []id.SceneID) (*dataset.Dataset, error) { +func (r *Dataset) Filtered(f repo.SceneFilter) repo.Dataset { + return &Dataset{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: f.Clone(), + } +} + +func (r *Dataset) FindByID(ctx context.Context, id id.DatasetID) (*dataset.Dataset, error) { r.lock.Lock() defer r.lock.Unlock() p, ok := r.data[id] - if ok && isSceneIncludes(p.Scene(), f) { - return &p, nil + if ok && r.f.CanRead(p.Scene()) { + return p, nil } return nil, rerror.ErrNotFound } -func (r *Dataset) FindByIDs(ctx context.Context, ids []id.DatasetID, f []id.SceneID) (dataset.List, error) { +func (r *Dataset) FindByIDs(ctx context.Context, ids []id.DatasetID) (dataset.List, error) { r.lock.Lock() defer r.lock.Unlock() result := dataset.List{} for _, id := range ids { if d, ok := r.data[id]; ok { - if isSceneIncludes(d.Scene(), f) { - result = append(result, &d) + if r.f.CanRead(d.Scene()) { + result = append(result, d) continue } } @@ -50,15 +59,14 @@ func (r *Dataset) FindByIDs(ctx context.Context, ids []id.DatasetID, f []id.Scen return result, nil } -func (r *Dataset) FindBySchema(ctx context.Context, id id.DatasetSchemaID, f []id.SceneID, p *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { +func (r *Dataset) FindBySchema(ctx context.Context, id id.DatasetSchemaID, p *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { r.lock.Lock() defer r.lock.Unlock() result := dataset.List{} for _, d := range r.data { - if d.Schema() == id && isSceneIncludes(d.Scene(), f) { - dd := d - result = append(result, &dd) + if d.Schema() == id && r.f.CanRead(d.Scene()) { + result = append(result, d) } } @@ -85,23 +93,21 @@ func (r *Dataset) FindBySchemaAll(ctx context.Context, id id.DatasetSchemaID) (d result := dataset.List{} for _, d := range r.data { - if d.Schema() == id { - dd := d - result = append(result, &dd) + if d.Schema() == id && r.f.CanRead(d.Scene()) { + result = append(result, d) } } return result, nil } -func (r *Dataset) FindGraph(ctx context.Context, i id.DatasetID, f []id.SceneID, fields []id.DatasetSchemaFieldID) (dataset.List, error) { +func (r *Dataset) FindGraph(ctx context.Context, i id.DatasetID, fields []id.DatasetSchemaFieldID) (dataset.List, error) { r.lock.Lock() defer r.lock.Unlock() result := make(dataset.List, 0, len(fields)) next := i for _, nextField := range fields { - d, _ := r.FindByID(ctx, next, f) - if d != nil { + if d := r.data[next]; d != nil && r.f.CanRead(d.Scene()) { result = append(result, d) if f := d.Field(nextField); f != nil { if f.Type() == dataset.ValueTypeRef { @@ -119,10 +125,14 @@ func (r *Dataset) FindGraph(ctx context.Context, i id.DatasetID, f []id.SceneID, } func (r *Dataset) Save(ctx context.Context, d *dataset.Dataset) error { + if !r.f.CanWrite(d.Scene()) { + return repo.ErrOperationDenied + } + r.lock.Lock() defer r.lock.Unlock() - r.data[d.ID()] = *d + r.data[d.ID()] = d return nil } @@ -131,7 +141,9 @@ func (r *Dataset) SaveAll(ctx context.Context, dl dataset.List) error { defer r.lock.Unlock() for _, d := range dl { - r.data[d.ID()] = *d + if r.f.CanWrite(d.Scene()) { + r.data[d.ID()] = d + } } return nil } @@ -140,7 +152,9 @@ func (r *Dataset) Remove(ctx context.Context, id id.DatasetID) error { r.lock.Lock() defer r.lock.Unlock() - delete(r.data, id) + if d, ok := r.data[id]; ok && r.f.CanWrite(d.Scene()) { + delete(r.data, id) + } return nil } @@ -149,12 +163,18 @@ func (r *Dataset) RemoveAll(ctx context.Context, ids []id.DatasetID) error { defer r.lock.Unlock() for _, id := range ids { - delete(r.data, id) + if d, ok := r.data[id]; ok && r.f.CanWrite(d.Scene()) { + delete(r.data, id) + } } return nil } func (r *Dataset) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/memory/dataset_schema.go b/internal/infrastructure/memory/dataset_schema.go index 372196f64..dfad074cf 100644 --- a/internal/infrastructure/memory/dataset_schema.go +++ b/internal/infrastructure/memory/dataset_schema.go @@ -13,35 +13,44 @@ import ( type DatasetSchema struct { lock sync.Mutex - data map[id.DatasetSchemaID]dataset.Schema + data map[id.DatasetSchemaID]*dataset.Schema + f repo.SceneFilter } func NewDatasetSchema() repo.DatasetSchema { return &DatasetSchema{ - data: map[id.DatasetSchemaID]dataset.Schema{}, + data: map[id.DatasetSchemaID]*dataset.Schema{}, } } -func (r *DatasetSchema) FindByID(ctx context.Context, id id.DatasetSchemaID, f []id.SceneID) (*dataset.Schema, error) { +func (r *DatasetSchema) Filtered(f repo.SceneFilter) repo.DatasetSchema { + return &DatasetSchema{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: f.Clone(), + } +} + +func (r *DatasetSchema) FindByID(ctx context.Context, id id.DatasetSchemaID) (*dataset.Schema, error) { r.lock.Lock() defer r.lock.Unlock() p, ok := r.data[id] - if ok { - return &p, nil + if ok && r.f.CanRead(p.Scene()) { + return p, nil } return nil, rerror.ErrNotFound } -func (r *DatasetSchema) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID, f []id.SceneID) (dataset.SchemaList, error) { +func (r *DatasetSchema) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID) (dataset.SchemaList, error) { r.lock.Lock() defer r.lock.Unlock() result := dataset.SchemaList{} for _, id := range ids { - if d, ok := r.data[id]; ok { + if d, ok := r.data[id]; ok && r.f.CanRead(d.Scene()) { d2 := d - result = append(result, &d2) + result = append(result, d2) } else { result = append(result, nil) } @@ -50,6 +59,10 @@ func (r *DatasetSchema) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID, } func (r *DatasetSchema) FindByScene(ctx context.Context, s id.SceneID, p *usecase.Pagination) (dataset.SchemaList, *usecase.PageInfo, error) { + if !r.f.CanRead(s) { + return nil, usecase.EmptyPageInfo(), nil + } + r.lock.Lock() defer r.lock.Unlock() @@ -57,7 +70,7 @@ func (r *DatasetSchema) FindByScene(ctx context.Context, s id.SceneID, p *usecas for _, d := range r.data { if d.Scene() == s { d2 := d - result = append(result, &d2) + result = append(result, d2) } } @@ -79,28 +92,36 @@ func (r *DatasetSchema) FindByScene(ctx context.Context, s id.SceneID, p *usecas } func (r *DatasetSchema) FindBySceneAll(ctx context.Context, s id.SceneID) (dataset.SchemaList, error) { + if !r.f.CanRead(s) { + return nil, nil + } + r.lock.Lock() defer r.lock.Unlock() result := dataset.SchemaList{} for _, d := range r.data { - if d.Scene() == s { + if d.Scene() == s && r.f.CanRead(d.Scene()) { d2 := d - result = append(result, &d2) + result = append(result, d2) } } return result, nil } func (r *DatasetSchema) FindAllDynamicByScene(ctx context.Context, s id.SceneID) (dataset.SchemaList, error) { + if !r.f.CanRead(s) { + return nil, nil + } + r.lock.Lock() defer r.lock.Unlock() result := dataset.SchemaList{} for _, d := range r.data { - if d.Scene() == s && d.Dynamic() { + if d.Scene() == s && d.Dynamic() && r.f.CanRead(d.Scene()) { d2 := d - result = append(result, &d2) + result = append(result, d2) } } return result, nil @@ -111,13 +132,17 @@ func (r *DatasetSchema) FindDynamicByID(ctx context.Context, id id.DatasetSchema defer r.lock.Unlock() p, ok := r.data[id] - if ok && p.Dynamic() { - return &p, nil + if ok && p.Dynamic() && r.f.CanRead(p.Scene()) { + return p, nil } return nil, rerror.ErrNotFound } func (r *DatasetSchema) FindBySceneAndSource(ctx context.Context, s id.SceneID, src string) (dataset.SchemaList, error) { + if !r.f.CanRead(s) { + return nil, rerror.ErrNotFound + } + r.lock.Lock() defer r.lock.Unlock() @@ -125,17 +150,21 @@ func (r *DatasetSchema) FindBySceneAndSource(ctx context.Context, s id.SceneID, for _, d := range r.data { if d.Scene() == s && d.Source() == src { d2 := d - result = append(result, &d2) + result = append(result, d2) } } return result, nil } func (r *DatasetSchema) Save(ctx context.Context, d *dataset.Schema) error { + if !r.f.CanWrite(d.Scene()) { + return repo.ErrOperationDenied + } + r.lock.Lock() defer r.lock.Unlock() - r.data[d.ID()] = *d + r.data[d.ID()] = d return nil } @@ -144,7 +173,9 @@ func (r *DatasetSchema) SaveAll(ctx context.Context, dl dataset.SchemaList) erro defer r.lock.Unlock() for _, d := range dl { - r.data[d.ID()] = *d + if r.f.CanWrite(d.Scene()) { + r.data[d.ID()] = d + } } return nil } @@ -153,7 +184,9 @@ func (r *DatasetSchema) Remove(ctx context.Context, id id.DatasetSchemaID) error r.lock.Lock() defer r.lock.Unlock() - delete(r.data, id) + if d, ok := r.data[id]; ok && r.f.CanWrite(d.Scene()) { + delete(r.data, id) + } return nil } @@ -162,12 +195,18 @@ func (r *DatasetSchema) RemoveAll(ctx context.Context, ids []id.DatasetSchemaID) defer r.lock.Unlock() for _, id := range ids { - delete(r.data, id) + if d, ok := r.data[id]; ok && r.f.CanWrite(d.Scene()) { + delete(r.data, id) + } } return nil } func (r *DatasetSchema) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/memory/layer.go b/internal/infrastructure/memory/layer.go index 06d87f128..afdddde30 100644 --- a/internal/infrastructure/memory/layer.go +++ b/internal/infrastructure/memory/layer.go @@ -13,6 +13,7 @@ import ( type Layer struct { lock sync.Mutex data map[id.LayerID]layer.Layer + f repo.SceneFilter } func NewLayer() repo.Layer { @@ -21,46 +22,50 @@ func NewLayer() repo.Layer { } } -func (r *Layer) FindByID(ctx context.Context, id id.LayerID, f []id.SceneID) (layer.Layer, error) { +func (r *Layer) Filtered(f repo.SceneFilter) repo.Layer { + return &Layer{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: f.Clone(), + } +} + +func (r *Layer) FindByID(ctx context.Context, id id.LayerID) (layer.Layer, error) { r.lock.Lock() defer r.lock.Unlock() res, ok := r.data[id] - if ok && isSceneIncludes(res.Scene(), f) { + if ok && r.f.CanRead(res.Scene()) { return res, nil } return nil, rerror.ErrNotFound } -func (r *Layer) FindByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.List, error) { +func (r *Layer) FindByIDs(ctx context.Context, ids []id.LayerID) (layer.List, error) { r.lock.Lock() defer r.lock.Unlock() result := layer.List{} for _, id := range ids { - if d, ok := r.data[id]; ok { - if isSceneIncludes(d.Scene(), f) { - result = append(result, &d) - continue - } + if d, ok := r.data[id]; ok && r.f.CanRead(d.Scene()) { + result = append(result, &d) + continue } result = append(result, nil) } return result, nil } -func (r *Layer) FindGroupByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.GroupList, error) { +func (r *Layer) FindGroupByIDs(ctx context.Context, ids []id.LayerID) (layer.GroupList, error) { r.lock.Lock() defer r.lock.Unlock() result := layer.GroupList{} for _, id := range ids { if d, ok := r.data[id]; ok { - if lg := layer.GroupFromLayer(d); lg != nil { - if isSceneIncludes(lg.Scene(), f) { - result = append(result, lg) - continue - } + if lg := layer.GroupFromLayer(d); lg != nil && r.f.CanRead(lg.Scene()) { + result = append(result, lg) + continue } result = append(result, nil) } @@ -68,18 +73,16 @@ func (r *Layer) FindGroupByIDs(ctx context.Context, ids []id.LayerID, f []id.Sce return result, nil } -func (r *Layer) FindItemByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.ItemList, error) { +func (r *Layer) FindItemByIDs(ctx context.Context, ids []id.LayerID) (layer.ItemList, error) { r.lock.Lock() defer r.lock.Unlock() result := layer.ItemList{} for _, id := range ids { if d, ok := r.data[id]; ok { - if li := layer.ItemFromLayer(d); li != nil { - if isSceneIncludes(li.Scene(), f) { - result = append(result, li) - continue - } + if li := layer.ItemFromLayer(d); li != nil && r.f.CanRead(li.Scene()) { + result = append(result, li) + continue } result = append(result, nil) } @@ -87,7 +90,7 @@ func (r *Layer) FindItemByIDs(ctx context.Context, ids []id.LayerID, f []id.Scen return result, nil } -func (r *Layer) FindItemByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Item, error) { +func (r *Layer) FindItemByID(ctx context.Context, id id.LayerID) (*layer.Item, error) { r.lock.Lock() defer r.lock.Unlock() @@ -95,15 +98,13 @@ func (r *Layer) FindItemByID(ctx context.Context, id id.LayerID, f []id.SceneID) if !ok { return &layer.Item{}, nil } - if li := layer.ItemFromLayer(d); li != nil { - if isSceneIncludes(li.Scene(), f) { - return li, nil - } + if li := layer.ItemFromLayer(d); li != nil && r.f.CanRead(li.Scene()) { + return li, nil } return nil, rerror.ErrNotFound } -func (r *Layer) FindGroupByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { +func (r *Layer) FindGroupByID(ctx context.Context, id id.LayerID) (*layer.Group, error) { r.lock.Lock() defer r.lock.Unlock() @@ -111,10 +112,8 @@ func (r *Layer) FindGroupByID(ctx context.Context, id id.LayerID, f []id.SceneID if !ok { return &layer.Group{}, nil } - if lg := layer.GroupFromLayer(d); lg != nil { - if isSceneIncludes(lg.Scene(), f) { - return lg, nil - } + if lg := layer.GroupFromLayer(d); lg != nil && r.f.CanRead(lg.Scene()) { + return lg, nil } return nil, rerror.ErrNotFound } @@ -128,7 +127,7 @@ func (r *Layer) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, s id if l.Scene() != s { continue } - if lg, ok := l.(*layer.Group); ok { + if lg := layer.ToLayerGroup(l); lg != nil && r.f.CanRead(lg.Scene()) { if dsid := lg.LinkedDatasetSchema(); dsid != nil && *dsid == ds { result = append(result, lg) } @@ -137,22 +136,17 @@ func (r *Layer) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, s id return result, nil } -func (r *Layer) FindParentsByIDs(_ context.Context, ids []id.LayerID, scenes []id.SceneID) (layer.GroupList, error) { +func (r *Layer) FindParentsByIDs(_ context.Context, ids []id.LayerID) (layer.GroupList, error) { r.lock.Lock() defer r.lock.Unlock() res := layer.GroupList{} for _, l := range r.data { - if !isSceneIncludes(l.Scene(), scenes) { - continue - } - gl, ok := l.(*layer.Group) - if !ok { - continue - } - for _, cl := range gl.Layers().Layers() { - if cl.Contains(ids) { - res = append(res, gl) + if lg := layer.ToLayerGroup(l); lg != nil && r.f.CanRead(l.Scene()) { + for _, cl := range lg.Layers().Layers() { + if cl.Contains(ids) { + res = append(res, lg) + } } } } @@ -160,18 +154,14 @@ func (r *Layer) FindParentsByIDs(_ context.Context, ids []id.LayerID, scenes []i return res, nil } -func (r *Layer) FindByPluginAndExtension(_ context.Context, pid id.PluginID, eid *id.PluginExtensionID, scenes []id.SceneID) (layer.List, error) { +func (r *Layer) FindByPluginAndExtension(_ context.Context, pid id.PluginID, eid *id.PluginExtensionID) (layer.List, error) { r.lock.Lock() defer r.lock.Unlock() res := layer.List{} for _, l := range r.data { l := l - if !isSceneIncludes(l.Scene(), scenes) { - continue - } - - if p := l.Plugin(); p != nil && p.Equal(pid) { + if r.f.CanRead(l.Scene()) && l.Plugin() != nil && l.Plugin().Equal(pid) { e := l.Extension() if eid == nil || e != nil && *e == *eid { res = append(res, &l) @@ -182,14 +172,14 @@ func (r *Layer) FindByPluginAndExtension(_ context.Context, pid id.PluginID, eid return res, nil } -func (r *Layer) FindByPluginAndExtensionOfBlocks(_ context.Context, pid id.PluginID, eid *id.PluginExtensionID, scenes []id.SceneID) (layer.List, error) { +func (r *Layer) FindByPluginAndExtensionOfBlocks(_ context.Context, pid id.PluginID, eid *id.PluginExtensionID) (layer.List, error) { r.lock.Lock() defer r.lock.Unlock() res := layer.List{} for _, l := range r.data { l := l - if !isSceneIncludes(l.Scene(), scenes) || len(l.Infobox().FieldsByPlugin(pid, eid)) == 0 { + if !r.f.CanRead(l.Scene()) || len(l.Infobox().FieldsByPlugin(pid, eid)) == 0 { continue } res = append(res, &l) @@ -198,12 +188,12 @@ func (r *Layer) FindByPluginAndExtensionOfBlocks(_ context.Context, pid id.Plugi return res, nil } -func (r *Layer) FindByProperty(ctx context.Context, id id.PropertyID, f []id.SceneID) (layer.Layer, error) { +func (r *Layer) FindByProperty(ctx context.Context, id id.PropertyID) (layer.Layer, error) { r.lock.Lock() defer r.lock.Unlock() for _, l := range r.data { - if !isSceneIncludes(l.Scene(), f) { + if !r.f.CanRead(l.Scene()) { continue } if pid := l.Property(); pid != nil && *pid == id { @@ -221,21 +211,16 @@ func (r *Layer) FindByProperty(ctx context.Context, id id.PropertyID, f []id.Sce return nil, rerror.ErrNotFound } -func (r *Layer) FindParentByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { +func (r *Layer) FindParentByID(ctx context.Context, id id.LayerID) (*layer.Group, error) { r.lock.Lock() defer r.lock.Unlock() for _, l := range r.data { - if !isSceneIncludes(l.Scene(), f) { - continue - } - gl, ok := l.(*layer.Group) - if !ok { - continue - } - for _, cl := range gl.Layers().Layers() { - if cl == id { - return gl, nil + if lg := layer.ToLayerGroup(l); lg != nil && r.f.CanRead(l.Scene()) { + for _, cl := range lg.Layers().Layers() { + if cl == id { + return lg, nil + } } } } @@ -243,6 +228,10 @@ func (r *Layer) FindParentByID(ctx context.Context, id id.LayerID, f []id.SceneI } func (r *Layer) FindByScene(ctx context.Context, sceneID id.SceneID) (layer.List, error) { + if !r.f.CanRead(sceneID) { + return nil, nil + } + r.lock.Lock() defer r.lock.Unlock() @@ -262,14 +251,34 @@ func (r *Layer) FindAllByDatasetSchema(ctx context.Context, datasetSchemaID id.D res := layer.List{} for _, l := range r.data { - if d := layer.ToLayerGroup(l).LinkedDatasetSchema(); d != nil && *d == datasetSchemaID { + l := l + if d := layer.ToLayerGroup(l).LinkedDatasetSchema(); d != nil && *d == datasetSchemaID && r.f.CanRead(l.Scene()) { res = append(res, &l) } } return res, nil } +func (r *Layer) FindByTag(ctx context.Context, tagID id.TagID) (layer.List, error) { + r.lock.Lock() + defer r.lock.Unlock() + + var res layer.List + for _, l := range r.data { + l := l + if l.Tags().Has(tagID) && r.f.CanRead(l.Scene()) { + res = append(res, &l) + } + } + + return res, nil +} + func (r *Layer) Save(ctx context.Context, l layer.Layer) error { + if !r.f.CanWrite(l.Scene()) { + return repo.ErrOperationDenied + } + r.lock.Lock() defer r.lock.Unlock() @@ -283,18 +292,20 @@ func (r *Layer) SaveAll(ctx context.Context, ll layer.List) error { for _, l := range ll { layer := *l - r.data[layer.ID()] = layer + if r.f.CanWrite(layer.Scene()) { + r.data[layer.ID()] = layer + } } return nil } -func (r *Layer) UpdatePlugin(ctx context.Context, old id.PluginID, new id.PluginID, scenes []id.SceneID) error { +func (r *Layer) UpdatePlugin(ctx context.Context, old id.PluginID, new id.PluginID) error { r.lock.Lock() defer r.lock.Unlock() for _, l := range r.data { p := l.Plugin() - if p != nil && p.Equal(old) && isSceneIncludes(l.Scene(), scenes) { + if p != nil && p.Equal(old) && r.f.CanWrite(l.Scene()) { l.SetPlugin(&new) r.data[l.ID()] = l } @@ -306,7 +317,9 @@ func (r *Layer) Remove(ctx context.Context, id id.LayerID) error { r.lock.Lock() defer r.lock.Unlock() - delete(r.data, id) + if l, ok := r.data[id]; ok && l != nil && r.f.CanWrite(l.Scene()) { + delete(r.data, id) + } return nil } @@ -315,12 +328,18 @@ func (r *Layer) RemoveAll(ctx context.Context, ids []id.LayerID) error { defer r.lock.Unlock() for _, id := range ids { - delete(r.data, id) + if l, ok := r.data[id]; ok && l != nil && r.f.CanWrite(l.Scene()) { + delete(r.data, id) + } } return nil } func (r *Layer) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + r.lock.Lock() defer r.lock.Unlock() @@ -331,17 +350,3 @@ func (r *Layer) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { } return nil } - -func (r *Layer) FindByTag(ctx context.Context, tagID id.TagID, s []id.SceneID) (layer.List, error) { - r.lock.Lock() - defer r.lock.Unlock() - var res layer.List - for _, l := range r.data { - l := l - if l.Tags().Has(tagID) { - res = append(res, &l) - } - } - - return res, nil -} diff --git a/internal/infrastructure/memory/layer_test.go b/internal/infrastructure/memory/layer_test.go index 1ca83948f..d3d2693c3 100644 --- a/internal/infrastructure/memory/layer_test.go +++ b/internal/infrastructure/memory/layer_test.go @@ -13,7 +13,6 @@ import ( func TestLayer_FindByTag(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() - sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() tl := layer.NewTagList([]layer.Tag{layer.NewTagGroup(t1.ID(), nil)}) lg := layer.New().NewID().Tags(tl).Scene(sid).Group().MustBuild() @@ -24,7 +23,7 @@ func TestLayer_FindByTag(t *testing.T) { }, } - out, err := repo.FindByTag(ctx, t1.ID(), sl) + out, err := repo.FindByTag(ctx, t1.ID()) assert.NoError(t, err) assert.Equal(t, layer.List{lg.LayerRef()}, out) } diff --git a/internal/infrastructure/memory/plugin.go b/internal/infrastructure/memory/plugin.go index 79f0a4477..d9361716b 100644 --- a/internal/infrastructure/memory/plugin.go +++ b/internal/infrastructure/memory/plugin.go @@ -15,6 +15,7 @@ import ( type Plugin struct { lock sync.Mutex data []*plugin.Plugin + f repo.SceneFilter } func NewPlugin() repo.Plugin { @@ -23,7 +24,15 @@ func NewPlugin() repo.Plugin { } } -func (r *Plugin) FindByID(ctx context.Context, id id.PluginID, sids []id.SceneID) (*plugin.Plugin, error) { +func (r *Plugin) Filtered(f repo.SceneFilter) repo.Plugin { + return &Plugin{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: f.Clone(), + } +} + +func (r *Plugin) FindByID(ctx context.Context, id id.PluginID) (*plugin.Plugin, error) { r.lock.Lock() defer r.lock.Unlock() @@ -31,14 +40,16 @@ func (r *Plugin) FindByID(ctx context.Context, id id.PluginID, sids []id.SceneID return p, nil } for _, p := range r.data { - if p.ID().Equal(id) && (p.ID().Scene() == nil || p.ID().Scene().Contains(sids)) { - return p.Clone(), nil + if p.ID().Equal(id) { + if s := p.ID().Scene(); s == nil || r.f.CanRead(*s) { + return p.Clone(), nil + } } } return nil, rerror.ErrNotFound } -func (r *Plugin) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id.SceneID) ([]*plugin.Plugin, error) { +func (r *Plugin) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { r.lock.Lock() defer r.lock.Unlock() @@ -49,10 +60,12 @@ func (r *Plugin) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id.Sce continue } for _, p := range r.data { - if p.ID().Equal(id) && (p.ID().Scene() == nil || p.ID().Scene().Contains(sids)) { - result = append(result, p.Clone()) - } else { - result = append(result, nil) + if p.ID().Equal(id) { + if s := p.ID().Scene(); s == nil || r.f.CanRead(*s) { + result = append(result, p.Clone()) + } else { + result = append(result, nil) + } } } } @@ -66,6 +79,10 @@ func (r *Plugin) Save(ctx context.Context, p *plugin.Plugin) error { if p.ID().System() { return errors.New("cannnot save system plugin") } + if s := p.ID().Scene(); s != nil && !r.f.CanWrite(*s) { + return repo.ErrOperationDenied + } + for i, q := range r.data { if q.ID().Equal(p.ID()) { r.data = append(r.data[:i], r.data[i+1:]...) @@ -81,10 +98,13 @@ func (r *Plugin) Remove(ctx context.Context, id id.PluginID) error { defer r.lock.Unlock() for i := 0; i < len(r.data); i++ { - if r.data[i].ID().Equal(id) { - r.data = append(r.data[:i], r.data[i+1:]...) - i-- + if p := r.data[i]; p.ID().Equal(id) { + if s := p.ID().Scene(); s == nil || r.f.CanWrite(*s) { + r.data = append(r.data[:i], r.data[i+1:]...) + i-- + } } } + return nil } diff --git a/internal/infrastructure/memory/project.go b/internal/infrastructure/memory/project.go index cb061fa4d..e6c28ca3b 100644 --- a/internal/infrastructure/memory/project.go +++ b/internal/infrastructure/memory/project.go @@ -15,6 +15,7 @@ import ( type Project struct { lock sync.Mutex data map[id.ProjectID]*project.Project + f repo.TeamFilter } func NewProject() repo.Project { @@ -23,6 +24,14 @@ func NewProject() repo.Project { } } +func (r *Project) Filtered(f repo.TeamFilter) repo.Project { + return &Project{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: f.Clone(), + } +} + func (r *Project) FindByTeam(ctx context.Context, id id.TeamID, p *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { r.lock.Lock() defer r.lock.Unlock() @@ -51,29 +60,26 @@ func (r *Project) FindByTeam(ctx context.Context, id id.TeamID, p *usecase.Pagin ), nil } -func (r *Project) FindByIDs(ctx context.Context, ids []id.ProjectID, filter []id.TeamID) ([]*project.Project, error) { +func (r *Project) FindByIDs(ctx context.Context, ids []id.ProjectID) ([]*project.Project, error) { r.lock.Lock() defer r.lock.Unlock() result := []*project.Project{} for _, id := range ids { - if d, ok := r.data[id]; ok { - if isTeamIncludes(d.Team(), filter) { - result = append(result, d) - continue - } + if d, ok := r.data[id]; ok && r.f.CanRead(d.Team()) { + result = append(result, d) + continue } result = append(result, nil) } return result, nil } -func (r *Project) FindByID(ctx context.Context, id id.ProjectID, filter []id.TeamID) (*project.Project, error) { +func (r *Project) FindByID(ctx context.Context, id id.ProjectID) (*project.Project, error) { r.lock.Lock() defer r.lock.Unlock() - p, ok := r.data[id] - if ok && isTeamIncludes(p.Team(), filter) { + if p, ok := r.data[id]; ok && r.f.CanRead(p.Team()) { return p, nil } return nil, rerror.ErrNotFound @@ -87,7 +93,7 @@ func (r *Project) FindByPublicName(ctx context.Context, name string) (*project.P return nil, nil } for _, p := range r.data { - if p.MatchWithPublicName(name) { + if p.MatchWithPublicName(name) && r.f.CanRead(p.Team()) { return p, nil } } @@ -99,7 +105,7 @@ func (r *Project) CountByTeam(ctx context.Context, team id.TeamID) (c int, err e defer r.lock.Unlock() for _, p := range r.data { - if p.Team() == team { + if p.Team() == team && r.f.CanRead(p.Team()) { c++ } } @@ -107,6 +113,10 @@ func (r *Project) CountByTeam(ctx context.Context, team id.TeamID) (c int, err e } func (r *Project) Save(ctx context.Context, p *project.Project) error { + if !r.f.CanWrite(p.Team()) { + return repo.ErrOperationDenied + } + r.lock.Lock() defer r.lock.Unlock() @@ -115,14 +125,12 @@ func (r *Project) Save(ctx context.Context, p *project.Project) error { return nil } -func (r *Project) Remove(ctx context.Context, projectID id.ProjectID) error { +func (r *Project) Remove(ctx context.Context, id id.ProjectID) error { r.lock.Lock() defer r.lock.Unlock() - for sid := range r.data { - if sid == projectID { - delete(r.data, sid) - } + if p, ok := r.data[id]; ok && r.f.CanRead(p.Team()) { + delete(r.data, id) } return nil } diff --git a/internal/infrastructure/memory/property.go b/internal/infrastructure/memory/property.go index 75ac2b893..c4dd03505 100644 --- a/internal/infrastructure/memory/property.go +++ b/internal/infrastructure/memory/property.go @@ -15,6 +15,7 @@ import ( type Property struct { lock sync.Mutex data property.Map + f repo.SceneFilter } func NewProperty() repo.Property { @@ -23,29 +24,33 @@ func NewProperty() repo.Property { } } -func (r *Property) FindByID(ctx context.Context, id id.PropertyID, f []id.SceneID) (*property.Property, error) { +func (r *Property) Filtered(f repo.SceneFilter) repo.Property { + return &Property{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: f.Clone(), + } +} + +func (r *Property) FindByID(ctx context.Context, id id.PropertyID) (*property.Property, error) { r.lock.Lock() defer r.lock.Unlock() - p, ok := r.data[id] - if ok && isSceneIncludes(p.Scene(), f) { + if p, ok := r.data[id]; ok && r.f.CanRead(p.Scene()) { return p, nil } return nil, rerror.ErrNotFound } -func (r *Property) FindByIDs(ctx context.Context, ids []id.PropertyID, f []id.SceneID) (property.List, error) { +func (r *Property) FindByIDs(ctx context.Context, ids []id.PropertyID) (property.List, error) { r.lock.Lock() defer r.lock.Unlock() result := property.List{} for _, id := range ids { - if d, ok := r.data[id]; ok { - d := d - if isSceneIncludes(d.Scene(), f) { - result = append(result, d) - continue - } + if d, ok := r.data[id]; ok && r.f.CanRead(d.Scene()) { + result = append(result, d) + continue } result = append(result, nil) } @@ -58,8 +63,7 @@ func (r *Property) FindByDataset(ctx context.Context, sid id.DatasetSchemaID, di result := property.List{} for _, p := range r.data { - p := p - if p.IsDatasetLinked(sid, did) { + if p.IsDatasetLinked(sid, did) && r.f.CanRead(p.Scene()) { result = append(result, p) } } @@ -67,30 +71,33 @@ func (r *Property) FindByDataset(ctx context.Context, sid id.DatasetSchemaID, di } func (r *Property) FindLinkedAll(ctx context.Context, s id.SceneID) (property.List, error) { + if !r.f.CanRead(s) { + return nil, nil + } + r.lock.Lock() defer r.lock.Unlock() result := property.List{} for _, p := range r.data { - p := p - if p.Scene() != s { - continue - } - if p.HasLinkedField() { + if p.Scene() == s && p.HasLinkedField() { result = append(result, p) } } return result, nil } -func (r *Property) FindBySchema(_ context.Context, schemas []id.PropertySchemaID, s id.SceneID) (property.List, error) { +func (r *Property) FindBySchema(_ context.Context, schemas []id.PropertySchemaID, scene id.SceneID) (property.List, error) { + if !r.f.CanRead(scene) { + return nil, nil + } + r.lock.Lock() defer r.lock.Unlock() result := property.List{} for _, p := range r.data { - p := p - if p.Scene() != s { + if p.Scene() != scene { continue } for _, s := range schemas { @@ -106,17 +113,17 @@ func (r *Property) FindBySchema(_ context.Context, schemas []id.PropertySchemaID return result, nil } -func (r *Property) FindByPlugin(_ context.Context, plugin id.PluginID, s id.SceneID) (property.List, error) { +func (r *Property) FindByPlugin(_ context.Context, plugin id.PluginID, scene id.SceneID) (property.List, error) { + if !r.f.CanRead(scene) { + return nil, nil + } + r.lock.Lock() defer r.lock.Unlock() result := property.List{} for _, p := range r.data { - p := p - if p.Scene() != s { - continue - } - if p.Schema().Plugin().Equal(plugin) { + if p.Scene() == scene && p.Schema().Plugin().Equal(plugin) { result = append(result, p) break } @@ -128,6 +135,10 @@ func (r *Property) FindByPlugin(_ context.Context, plugin id.PluginID, s id.Scen } func (r *Property) Save(ctx context.Context, p *property.Property) error { + if !r.f.CanWrite(p.Scene()) { + return repo.ErrOperationDenied + } + r.lock.Lock() defer r.lock.Unlock() @@ -140,17 +151,23 @@ func (r *Property) SaveAll(ctx context.Context, pl property.List) error { defer r.lock.Unlock() for _, p := range pl { - r.data[p.ID()] = p + if r.f.CanWrite(p.Scene()) { + r.data[p.ID()] = p + } } return nil } -func (r *Property) UpdateSchemaPlugin(ctx context.Context, old id.PluginID, new id.PluginID, s id.SceneID) error { +func (r *Property) UpdateSchemaPlugin(ctx context.Context, old id.PluginID, new id.PluginID, scene id.SceneID) error { + if !r.f.CanWrite(scene) { + return nil + } + r.lock.Lock() defer r.lock.Unlock() for _, p := range r.data { - if s := p.Schema(); s.Plugin().Equal(old) { + if s := p.Schema(); s.Plugin().Equal(old) && p.Scene() == scene { p.SetSchema(id.NewPropertySchemaID(new, s.ID())) } } @@ -161,7 +178,9 @@ func (r *Property) Remove(ctx context.Context, id id.PropertyID) error { r.lock.Lock() defer r.lock.Unlock() - delete(r.data, id) + if p, ok := r.data[id]; ok && r.f.CanWrite(p.Scene()) { + delete(r.data, id) + } return nil } @@ -170,12 +189,18 @@ func (r *Property) RemoveAll(ctx context.Context, ids []id.PropertyID) error { defer r.lock.Unlock() for _, id := range ids { - delete(r.data, id) + if p, ok := r.data[id]; ok && r.f.CanWrite(p.Scene()) { + delete(r.data, id) + } } return nil } func (r *Property) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/memory/property_schema.go b/internal/infrastructure/memory/property_schema.go index a65d869b9..5df9558da 100644 --- a/internal/infrastructure/memory/property_schema.go +++ b/internal/infrastructure/memory/property_schema.go @@ -15,6 +15,7 @@ import ( type PropertySchema struct { lock sync.Mutex data map[string]*property.Schema + f repo.SceneFilter } func NewPropertySchema() repo.PropertySchema { @@ -27,6 +28,14 @@ func (r *PropertySchema) initMap() { } } +func (r *PropertySchema) Filtered(f repo.SceneFilter) repo.PropertySchema { + return &PropertySchema{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: f.Clone(), + } +} + func (r *PropertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) (*property.Schema, error) { r.lock.Lock() defer r.lock.Unlock() @@ -38,7 +47,9 @@ func (r *PropertySchema) FindByID(ctx context.Context, id id.PropertySchemaID) ( r.initMap() p, ok := r.data[id.String()] if ok { - return p, nil + if s := p.Scene(); s == nil || r.f.CanRead(*s) { + return p, nil + } } return nil, rerror.ErrNotFound } @@ -55,7 +66,9 @@ func (r *PropertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaI continue } if d, ok := r.data[id.String()]; ok { - result = append(result, d) + if s := d.Scene(); s == nil || r.f.CanRead(*s) { + result = append(result, d) + } } else { result = append(result, nil) } @@ -64,6 +77,10 @@ func (r *PropertySchema) FindByIDs(ctx context.Context, ids []id.PropertySchemaI } func (r *PropertySchema) Save(ctx context.Context, p *property.Schema) error { + if s := p.Scene(); s != nil && !r.f.CanWrite(*s) { + return repo.ErrOperationDenied + } + r.lock.Lock() defer r.lock.Unlock() @@ -82,9 +99,11 @@ func (r *PropertySchema) SaveAll(ctx context.Context, p property.SchemaList) err r.initMap() for _, p := range p { if p.ID().Plugin().System() { - return errors.New("cannnot save system property schema") + continue + } + if s := p.Scene(); s == nil || r.f.CanRead(*s) { + r.data[p.ID().String()] = p } - r.data[p.ID().String()] = p } return nil } @@ -94,7 +113,13 @@ func (r *PropertySchema) Remove(ctx context.Context, id id.PropertySchemaID) err defer r.lock.Unlock() r.initMap() - delete(r.data, id.String()) + + if d, ok := r.data[id.String()]; ok { + if s := d.Scene(); s == nil || r.f.CanRead(*s) { + delete(r.data, id.String()) + } + } + return nil } @@ -104,7 +129,11 @@ func (r *PropertySchema) RemoveAll(ctx context.Context, ids []id.PropertySchemaI r.initMap() for _, id := range ids { - delete(r.data, id.String()) + if d, ok := r.data[id.String()]; ok { + if s := d.Scene(); s == nil || r.f.CanRead(*s) { + delete(r.data, id.String()) + } + } } return nil } diff --git a/internal/infrastructure/memory/scene.go b/internal/infrastructure/memory/scene.go index cdfd48c42..b067222fb 100644 --- a/internal/infrastructure/memory/scene.go +++ b/internal/infrastructure/memory/scene.go @@ -9,11 +9,13 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/user" ) type Scene struct { lock sync.Mutex data map[id.SceneID]*scene.Scene + f repo.TeamFilter } func NewScene() repo.Scene { @@ -22,28 +24,33 @@ func NewScene() repo.Scene { } } -func (r *Scene) FindByID(ctx context.Context, id id.SceneID, f []id.TeamID) (*scene.Scene, error) { +func (r *Scene) Filtered(f repo.TeamFilter) repo.Scene { + return &Scene{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: f.Clone(), + } +} + +func (r *Scene) FindByID(ctx context.Context, id id.SceneID) (*scene.Scene, error) { r.lock.Lock() defer r.lock.Unlock() - s, ok := r.data[id] - if ok && isTeamIncludes(s.Team(), f) { + if s, ok := r.data[id]; ok && r.f.CanRead(s.Team()) { return s, nil } return nil, rerror.ErrNotFound } -func (r *Scene) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) (scene.List, error) { +func (r *Scene) FindByIDs(ctx context.Context, ids []id.SceneID) (scene.List, error) { r.lock.Lock() defer r.lock.Unlock() result := scene.List{} for _, id := range ids { - if d, ok := r.data[id]; ok { - if isTeamIncludes(d.Team(), f) { - result = append(result, d) - continue - } + if d, ok := r.data[id]; ok && r.f.CanRead(d.Team()) { + result = append(result, d) + continue } result = append(result, nil) @@ -51,12 +58,12 @@ func (r *Scene) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) return result, nil } -func (r *Scene) FindByProject(ctx context.Context, id id.ProjectID, f []id.TeamID) (*scene.Scene, error) { +func (r *Scene) FindByProject(ctx context.Context, id id.ProjectID) (*scene.Scene, error) { r.lock.Lock() defer r.lock.Unlock() for _, d := range r.data { - if d.Project() == id && isTeamIncludes(d.Team(), f) { + if d.Project() == id && r.f.CanRead(d.Team()) { return d, nil } } @@ -69,7 +76,7 @@ func (r *Scene) FindByTeam(ctx context.Context, teams ...id.TeamID) (scene.List, result := scene.List{} for _, d := range r.data { - if isTeamIncludes(d.Team(), teams) { + if user.TeamIDList(teams).Includes(d.Team()) && r.f.CanRead(d.Team()) { result = append(result, d) } } @@ -77,6 +84,10 @@ func (r *Scene) FindByTeam(ctx context.Context, teams ...id.TeamID) (scene.List, } func (r *Scene) Save(ctx context.Context, s *scene.Scene) error { + if !r.f.CanWrite(s.Team()) { + return repo.ErrOperationDenied + } + r.lock.Lock() defer r.lock.Unlock() @@ -85,14 +96,13 @@ func (r *Scene) Save(ctx context.Context, s *scene.Scene) error { return nil } -func (r *Scene) Remove(ctx context.Context, sceneID id.SceneID) error { +func (r *Scene) Remove(ctx context.Context, id id.SceneID) error { r.lock.Lock() defer r.lock.Unlock() - for sid := range r.data { - if sid == sceneID { - delete(r.data, sid) - } + if s, ok := r.data[id]; ok && r.f.CanWrite(s.Team()) { + delete(r.data, id) } + return nil } diff --git a/internal/infrastructure/memory/tag.go b/internal/infrastructure/memory/tag.go index d3059a029..a92e7639b 100644 --- a/internal/infrastructure/memory/tag.go +++ b/internal/infrastructure/memory/tag.go @@ -14,6 +14,7 @@ import ( type Tag struct { lock sync.Mutex data tag.Map + f repo.SceneFilter } func NewTag() repo.Tag { @@ -22,173 +23,192 @@ func NewTag() repo.Tag { } } -func (t *Tag) FindByID(ctx context.Context, tagID id.TagID, ids []id.SceneID) (tag.Tag, error) { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) Filtered(f repo.SceneFilter) repo.Tag { + return &Tag{ + // note data is shared between the source repo and mutex cannot work well + data: r.data, + f: f.Clone(), + } +} - res, ok := t.data[tagID] - if ok && isSceneIncludes(res.Scene(), ids) { +func (r *Tag) FindByID(ctx context.Context, tagID id.TagID) (tag.Tag, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if res, ok := r.data[tagID]; ok && r.f.CanRead(res.Scene()) { return res, nil } return nil, rerror.ErrNotFound } -func (t *Tag) FindByIDs(ctx context.Context, tids []id.TagID, ids []id.SceneID) ([]*tag.Tag, error) { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) FindByIDs(ctx context.Context, tids []id.TagID) ([]*tag.Tag, error) { + r.lock.Lock() + defer r.lock.Unlock() var res []*tag.Tag for _, id := range tids { - if d, ok := t.data[id]; ok { - if isSceneIncludes(d.Scene(), ids) { - res = append(res, &d) - continue - } + if d, ok := r.data[id]; ok && r.f.CanRead(d.Scene()) { + res = append(res, &d) + continue } res = append(res, nil) } return res, nil } -func (t *Tag) FindByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Tag, error) { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) FindByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Tag, error) { + if !r.f.CanRead(sceneID) { + return nil, nil + } + + r.lock.Lock() + defer r.lock.Unlock() - return t.data.All().FilterByScene(sceneID).Refs(), nil + return r.data.All().FilterByScene(sceneID).Refs(), nil } -func (t *Tag) FindItemByID(ctx context.Context, tagID id.TagID, ids []id.SceneID) (*tag.Item, error) { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) FindItemByID(ctx context.Context, tagID id.TagID) (*tag.Item, error) { + r.lock.Lock() + defer r.lock.Unlock() - if d, ok := t.data[tagID]; ok { - if res := tag.ItemFrom(d); res != nil { - if isSceneIncludes(res.Scene(), ids) { - return res, nil - } + if d, ok := r.data[tagID]; ok { + if res := tag.ItemFrom(d); res != nil && r.f.CanRead(res.Scene()) { + return res, nil } } return nil, rerror.ErrNotFound } -func (t *Tag) FindItemByIDs(ctx context.Context, tagIDs []id.TagID, ids []id.SceneID) ([]*tag.Item, error) { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) FindItemByIDs(ctx context.Context, tagIDs []id.TagID) ([]*tag.Item, error) { + r.lock.Lock() + defer r.lock.Unlock() var res []*tag.Item for _, id := range tagIDs { - if d, ok := t.data[id]; ok { - if ti := tag.ItemFrom(d); ti != nil { - if isSceneIncludes(ti.Scene(), ids) { - res = append(res, ti) - } + if d, ok := r.data[id]; ok { + if ti := tag.ItemFrom(d); ti != nil && r.f.CanRead(ti.Scene()) { + res = append(res, ti) } } } return res, nil } -func (t *Tag) FindGroupByID(ctx context.Context, tagID id.TagID, ids []id.SceneID) (*tag.Group, error) { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) FindGroupByID(ctx context.Context, tagID id.TagID) (*tag.Group, error) { + r.lock.Lock() + defer r.lock.Unlock() - if d, ok := t.data[tagID]; ok { - if res := tag.GroupFrom(d); res != nil { - if isSceneIncludes(res.Scene(), ids) { - return res, nil - } + if d, ok := r.data[tagID]; ok { + if tg := tag.GroupFrom(d); tg != nil && r.f.CanRead(tg.Scene()) { + return tg, nil } } return nil, rerror.ErrNotFound } -func (t *Tag) FindGroupByIDs(ctx context.Context, tagIDs []id.TagID, ids []id.SceneID) ([]*tag.Group, error) { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) FindGroupByIDs(ctx context.Context, tagIDs []id.TagID) ([]*tag.Group, error) { + r.lock.Lock() + defer r.lock.Unlock() var res []*tag.Group for _, id := range tagIDs { - if d, ok := t.data[id]; ok { - if tg := tag.GroupFrom(d); tg != nil { - if isSceneIncludes(tg.Scene(), ids) { - res = append(res, tg) - } + if d, ok := r.data[id]; ok { + if tg := tag.GroupFrom(d); tg != nil && r.f.CanRead(tg.Scene()) { + res = append(res, tg) } } } return res, nil } -func (t *Tag) FindRootsByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Tag, error) { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) FindRootsByScene(ctx context.Context, sceneID id.SceneID) ([]*tag.Tag, error) { + if !r.f.CanRead(sceneID) { + return nil, nil + } + + r.lock.Lock() + defer r.lock.Unlock() - return t.data.All().FilterByScene(sceneID).Roots().Refs(), nil + return r.data.All().FilterByScene(sceneID).Roots().Refs(), nil } -func (t *Tag) Save(ctx context.Context, tag tag.Tag) error { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) FindGroupByItem(ctx context.Context, tagID id.TagID) (*tag.Group, error) { + r.lock.Lock() + defer r.lock.Unlock() - t.data[tag.ID()] = tag - return nil -} + for _, tg := range r.data { + if res := tag.GroupFrom(tg); res != nil { + tags := res.Tags() + for _, item := range tags.Tags() { + if item == tagID { + return res, nil + } + } + } + } -func (t *Tag) SaveAll(ctx context.Context, tags []*tag.Tag) error { - t.lock.Lock() - defer t.lock.Unlock() + return nil, rerror.ErrNotFound +} - for _, tagRef := range tags { - tag := *tagRef - t.data[tag.ID()] = tag +func (r *Tag) Save(ctx context.Context, tag tag.Tag) error { + if !r.f.CanWrite(tag.Scene()) { + return repo.ErrOperationDenied } + + r.lock.Lock() + defer r.lock.Unlock() + + r.data[tag.ID()] = tag return nil } -func (t *Tag) Remove(ctx context.Context, tagID id.TagID) error { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) SaveAll(ctx context.Context, tags []*tag.Tag) error { + r.lock.Lock() + defer r.lock.Unlock() - delete(t.data, tagID) + for _, tagRef := range tags { + tag := *tagRef + if r.f.CanWrite(tag.Scene()) { + r.data[tag.ID()] = tag + } + } return nil } -func (t *Tag) RemoveAll(ctx context.Context, ids []id.TagID) error { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) Remove(ctx context.Context, id id.TagID) error { + r.lock.Lock() + defer r.lock.Unlock() - for _, tagID := range ids { - delete(t.data, tagID) + if t, ok := r.data[id]; ok && r.f.CanWrite(t.Scene()) { + delete(r.data, id) } return nil } -func (t *Tag) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) RemoveAll(ctx context.Context, ids []id.TagID) error { + r.lock.Lock() + defer r.lock.Unlock() - for tid, v := range t.data { - if v.Scene() == sceneID { - delete(t.data, tid) + for _, id := range ids { + if t, ok := r.data[id]; ok && r.f.CanWrite(t.Scene()) { + delete(r.data, id) } } return nil } -func (t *Tag) FindGroupByItem(ctx context.Context, tagID id.TagID, s []id.SceneID) (*tag.Group, error) { - t.lock.Lock() - defer t.lock.Unlock() +func (r *Tag) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } + + r.lock.Lock() + defer r.lock.Unlock() - for _, tg := range t.data { - if res := tag.GroupFrom(tg); res != nil { - tags := res.Tags() - for _, item := range tags.Tags() { - if item == tagID { - return res, nil - } - } + for tid, v := range r.data { + if v.Scene() == sceneID { + delete(r.data, tid) } } - - return nil, rerror.ErrNotFound + return nil } diff --git a/internal/infrastructure/memory/tag_test.go b/internal/infrastructure/memory/tag_test.go index 155c34727..89264591a 100644 --- a/internal/infrastructure/memory/tag_test.go +++ b/internal/infrastructure/memory/tag_test.go @@ -4,6 +4,7 @@ import ( "context" "testing" + "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/tag" @@ -19,17 +20,16 @@ func TestNewTag(t *testing.T) { func TestTag_FindByID(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() - sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() tti := tag.Tag(t1) repo := Tag{ data: map[id.TagID]tag.Tag{t1.ID(): tti}, } - out, err := repo.FindByID(ctx, t1.ID(), sl) + out, err := repo.FindByID(ctx, t1.ID()) assert.NoError(t, err) assert.Equal(t, tti, out) - _, err = repo.FindByID(ctx, id.TagID{}, sl) + _, err = repo.FindByID(ctx, id.TagID{}) assert.Same(t, rerror.ErrNotFound, err) } @@ -37,7 +37,6 @@ func TestTag_FindByIDs(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() sid2 := id.NewSceneID() - sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() tl := tag.IDListFrom([]id.TagID{t1.ID()}) t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() @@ -52,7 +51,7 @@ func TestTag_FindByIDs(t *testing.T) { t3.ID(): tti2, }, } - out, err := repo.FindByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}, sl) + out, err := repo.FindByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}) assert.NoError(t, err) assert.Equal(t, []*tag.Tag{&tti, &ttg}, out) } @@ -83,7 +82,6 @@ func TestTag_FindRootsByScene(t *testing.T) { func TestTag_FindGroupByID(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() - sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() tl := tag.IDListFrom([]id.TagID{t1.ID()}) t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() @@ -95,18 +93,17 @@ func TestTag_FindGroupByID(t *testing.T) { t2.ID(): ttg, }, } - out, err := repo.FindGroupByID(ctx, t2.ID(), sl) + out, err := repo.FindGroupByID(ctx, t2.ID()) assert.NoError(t, err) assert.Equal(t, t2, out) - _, err = repo.FindGroupByID(ctx, id.TagID{}, []id.SceneID{}) + _, err = repo.FindGroupByID(ctx, id.TagID{}) assert.Same(t, rerror.ErrNotFound, err) } func TestTag_FindItemByID(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() - sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() tl := tag.IDListFrom([]id.TagID{t1.ID()}) t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() @@ -118,33 +115,34 @@ func TestTag_FindItemByID(t *testing.T) { t2.ID(): ttg, }, } - out, err := repo.FindItemByID(ctx, t1.ID(), sl) + out, err := repo.FindItemByID(ctx, t1.ID()) assert.NoError(t, err) assert.Equal(t, t1, out) - _, err = repo.FindItemByID(ctx, id.TagID{}, sl) + _, err = repo.FindItemByID(ctx, id.TagID{}) assert.Same(t, rerror.ErrNotFound, err) } func TestTag_FindGroupByIDs(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() - sl := []id.SceneID{sid} t1, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Build() t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group2").Build() ttg := tag.Tag(t1) ttg2 := tag.Tag(t2) - repo := Tag{ + r := Tag{ data: map[id.TagID]tag.Tag{ t1.ID(): ttg, t2.ID(): ttg2, }, } - out, err := repo.FindGroupByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}, sl) + out, err := r.FindGroupByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}) assert.NoError(t, err) assert.Equal(t, []*tag.Group{t1, t2}, out) - out, err = repo.FindGroupByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}, []id.SceneID{}) + out, err = r.Filtered(repo.SceneFilter{ + Readable: []id.SceneID{}, + }).FindGroupByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}) assert.NoError(t, err) assert.Equal(t, 0, len(out)) } @@ -152,22 +150,23 @@ func TestTag_FindGroupByIDs(t *testing.T) { func TestTag_FindItemByIDs(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() - sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() t2, _ := tag.NewItem().NewID().Scene(sid).Label("item2").Build() tti := tag.Tag(t1) tti2 := tag.Tag(t2) - repo := Tag{ + r := Tag{ data: map[id.TagID]tag.Tag{ t1.ID(): tti, t2.ID(): tti2, }, } - out, err := repo.FindItemByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}, sl) + out, err := r.FindItemByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}) assert.NoError(t, err) assert.Equal(t, []*tag.Item{t1, t2}, out) - out, err = repo.FindItemByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}, []id.SceneID{}) + out, err = r.Filtered(repo.SceneFilter{ + Readable: []id.SceneID{}, + }).FindItemByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}) assert.NoError(t, err) assert.Equal(t, 0, len(out)) } @@ -176,13 +175,12 @@ func TestTag_Save(t *testing.T) { ctx := context.Background() repo := NewTag() sid := id.NewSceneID() - sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() tti := tag.Tag(t1) err := repo.Save(ctx, tti) assert.NoError(t, err) - out, _ := repo.FindByID(ctx, t1.ID(), sl) + out, _ := repo.FindByID(ctx, t1.ID()) assert.Equal(t, tti, out) } @@ -190,7 +188,6 @@ func TestTag_SaveAll(t *testing.T) { ctx := context.Background() repo := NewTag() sid := id.NewSceneID() - sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() t2, _ := tag.NewItem().NewID().Scene(sid).Label("item2").Build() tti := tag.Tag(t1) @@ -198,7 +195,7 @@ func TestTag_SaveAll(t *testing.T) { err := repo.SaveAll(ctx, []*tag.Tag{&tti, &tti2}) assert.NoError(t, err) - out, _ := repo.FindByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}, sl) + out, _ := repo.FindByIDs(ctx, []id.TagID{t1.ID(), t2.ID()}) assert.Equal(t, []*tag.Tag{&tti, &tti2}, out) } @@ -272,7 +269,6 @@ func TestTag_RemoveByScene(t *testing.T) { func TestTag_FindGroupByItem(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() - sl := []id.SceneID{sid} t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() tl := tag.IDListFrom([]id.TagID{t1.ID()}) t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() @@ -284,7 +280,7 @@ func TestTag_FindGroupByItem(t *testing.T) { t2.ID(): ttg, }, } - out, err := repo.FindGroupByItem(ctx, t1.ID(), sl) + out, err := repo.FindGroupByItem(ctx, t1.ID()) assert.NoError(t, err) assert.Equal(t, t2, out) } diff --git a/internal/infrastructure/memory/util.go b/internal/infrastructure/memory/util.go deleted file mode 100644 index 01d3a80ef..000000000 --- a/internal/infrastructure/memory/util.go +++ /dev/null @@ -1,27 +0,0 @@ -package memory - -import "github.com/reearth/reearth-backend/pkg/id" - -func isTeamIncludes(id id.TeamID, ids []id.TeamID) bool { - if ids == nil { - return true - } - for _, i := range ids { - if id == i { - return true - } - } - return false -} - -func isSceneIncludes(id id.SceneID, ids []id.SceneID) bool { - if ids == nil { - return true - } - for _, i := range ids { - if id == i { - return true - } - } - return false -} diff --git a/internal/infrastructure/mongo/asset.go b/internal/infrastructure/mongo/asset.go index 4097a23ce..214efe305 100644 --- a/internal/infrastructure/mongo/asset.go +++ b/internal/infrastructure/mongo/asset.go @@ -16,6 +16,7 @@ import ( type assetRepo struct { client *mongodoc.ClientCollection + f repo.TeamFilter } func NewAsset(client *mongodoc.Client) repo.Asset { @@ -24,17 +25,23 @@ func NewAsset(client *mongodoc.Client) repo.Asset { return r } -func (r *assetRepo) FindByID(ctx context.Context, id id.AssetID, teams []id.TeamID) (*asset.Asset, error) { - filter := assetFilter(bson.M{ +func (r *assetRepo) Filtered(f repo.TeamFilter) repo.Asset { + return &assetRepo{ + client: r.client, + f: f.Clone(), + } +} + +func (r *assetRepo) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, error) { + return r.findOne(ctx, bson.M{ "id": id.String(), - }, teams) - return r.findOne(ctx, filter) + }) } -func (r *assetRepo) FindByIDs(ctx context.Context, ids []id.AssetID, teams []id.TeamID) ([]*asset.Asset, error) { - filter := assetFilter(bson.M{ +func (r *assetRepo) FindByIDs(ctx context.Context, ids []id.AssetID) ([]*asset.Asset, error) { + filter := bson.M{ "id": bson.M{"$in": id.AssetIDsToStrings(ids)}, - }, teams) + } dst := make([]*asset.Asset, 0, len(ids)) res, err := r.find(ctx, dst, filter) if err != nil { @@ -43,20 +50,27 @@ func (r *assetRepo) FindByIDs(ctx context.Context, ids []id.AssetID, teams []id. return filterAssets(ids, res), nil } +func (r *assetRepo) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { + if !r.f.CanRead(id) { + return nil, usecase.EmptyPageInfo(), nil + } + return r.paginate(ctx, bson.M{ + "team": id.String(), + }, pagination) +} + func (r *assetRepo) Save(ctx context.Context, asset *asset.Asset) error { + if !r.f.CanWrite(asset.Team()) { + return repo.ErrOperationDenied + } doc, id := mongodoc.NewAsset(asset) return r.client.SaveOne(ctx, id, doc) } func (r *assetRepo) Remove(ctx context.Context, id id.AssetID) error { - return r.client.RemoveOne(ctx, id.String()) -} - -func (r *assetRepo) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { - filter := bson.D{ - {Key: "team", Value: id.String()}, - } - return r.paginate(ctx, filter, pagination) + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{ + "id": id.String(), + })) } func (r *assetRepo) init() { @@ -66,11 +80,11 @@ func (r *assetRepo) init() { } } -func (r *assetRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { +func (r *assetRepo) paginate(ctx context.Context, filter bson.M, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { var c mongodoc.AssetConsumer - pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) - if err2 != nil { - return nil, nil, rerror.ErrInternalBy(err2) + pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), pagination, &c) + if err != nil { + return nil, nil, rerror.ErrInternalBy(err) } return c.Rows, pageInfo, nil } @@ -79,7 +93,7 @@ func (r *assetRepo) find(ctx context.Context, dst []*asset.Asset, filter interfa c := mongodoc.AssetConsumer{ Rows: dst, } - if err2 := r.client.Find(ctx, filter, &c); err2 != nil { + if err2 := r.client.Find(ctx, r.readFilter(filter), &c); err2 != nil { return nil, rerror.ErrInternalBy(err2) } return c.Rows, nil @@ -90,7 +104,7 @@ func (r *assetRepo) findOne(ctx context.Context, filter interface{}) (*asset.Ass c := mongodoc.AssetConsumer{ Rows: dst, } - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows[0], nil @@ -111,7 +125,10 @@ func filterAssets(ids []id.AssetID, rows []*asset.Asset) []*asset.Asset { return res } -func assetFilter(filter bson.M, teams []id.TeamID) bson.M { - filter["team"] = bson.M{"$in": id.TeamIDsToStrings(teams)} - return filter +func (r *assetRepo) readFilter(filter interface{}) interface{} { + return applyTeamFilter(filter, r.f.Readable) +} + +func (r *assetRepo) writeFilter(filter interface{}) interface{} { + return applyTeamFilter(filter, r.f.Writable) } diff --git a/internal/infrastructure/mongo/container.go b/internal/infrastructure/mongo/container.go index 2dcd699be..5e67f1c7e 100644 --- a/internal/infrastructure/mongo/container.go +++ b/internal/infrastructure/mongo/container.go @@ -6,6 +6,10 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/mongo/migration" "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/user" + "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" ) @@ -46,3 +50,28 @@ func InitRepos(ctx context.Context, c *repo.Container, mc *mongo.Client, databas return nil } + +func applyTeamFilter(filter interface{}, ids user.TeamIDList) interface{} { + if ids == nil { + return filter + } + return mongodoc.And(filter, "team", bson.M{"$in": id.TeamIDsToStrings(ids)}) +} + +func applySceneFilter(filter interface{}, ids scene.IDList) interface{} { + if ids == nil { + return filter + } + return mongodoc.And(filter, "scene", bson.M{"$in": id.SceneIDsToStrings(ids)}) +} + +func applyOptionalSceneFilter(filter interface{}, ids scene.IDList) interface{} { + if ids == nil { + return filter + } + return mongodoc.And(filter, "", bson.M{"$or": []bson.M{ + {"scene": bson.M{"$in": id.SceneIDsToStrings(ids)}}, + {"scene": nil}, + {"scene": ""}, + }}) +} diff --git a/internal/infrastructure/mongo/dataset.go b/internal/infrastructure/mongo/dataset.go index 52a1cc0c7..66bb748d5 100644 --- a/internal/infrastructure/mongo/dataset.go +++ b/internal/infrastructure/mongo/dataset.go @@ -18,6 +18,7 @@ import ( type datasetRepo struct { client *mongodoc.ClientCollection + f repo.SceneFilter } func NewDataset(client *mongodoc.Client) repo.Dataset { @@ -26,6 +27,13 @@ func NewDataset(client *mongodoc.Client) repo.Dataset { return r } +func (r *datasetRepo) Filtered(f repo.SceneFilter) repo.Dataset { + return &datasetRepo{ + client: r.client, + f: f.Clone(), + } +} + func (r *datasetRepo) init() { i := r.client.CreateIndex(context.Background(), []string{"scene", "schema"}) if len(i) > 0 { @@ -33,17 +41,16 @@ func (r *datasetRepo) init() { } } -func (r *datasetRepo) FindByID(ctx context.Context, id2 id.DatasetID, f []id.SceneID) (*dataset.Dataset, error) { - filter := r.sceneFilter(bson.D{{Key: "id", Value: id.ID(id2).String()}}, f) - return r.findOne(ctx, filter) +func (r *datasetRepo) FindByID(ctx context.Context, id id.DatasetID) (*dataset.Dataset, error) { + return r.findOne(ctx, bson.M{"id": id.String()}) } -func (r *datasetRepo) FindByIDs(ctx context.Context, ids []id.DatasetID, f []id.SceneID) (dataset.List, error) { - filter := r.sceneFilter(bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.DatasetIDsToStrings(ids)}, - }}, - }, f) +func (r *datasetRepo) FindByIDs(ctx context.Context, ids []id.DatasetID) (dataset.List, error) { + filter := bson.M{ + "id": bson.M{ + "$in": id.DatasetIDsToStrings(ids), + }, + } dst := make([]*dataset.Dataset, 0, len(ids)) res, err := r.find(ctx, dst, filter) if err != nil { @@ -52,23 +59,21 @@ func (r *datasetRepo) FindByIDs(ctx context.Context, ids []id.DatasetID, f []id. return filterDatasets(ids, res), nil } -func (r *datasetRepo) FindBySchema(ctx context.Context, schemaID id.DatasetSchemaID, f []id.SceneID, pagination *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { - filter := r.sceneFilter(bson.D{ - {Key: "schema", Value: id.ID(schemaID).String()}, - }, f) - return r.paginate(ctx, filter, pagination) +func (r *datasetRepo) FindBySchema(ctx context.Context, schemaID id.DatasetSchemaID, pagination *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { + return r.paginate(ctx, bson.M{ + "schema": id.ID(schemaID).String(), + }, pagination) } func (r *datasetRepo) FindBySchemaAll(ctx context.Context, schemaID id.DatasetSchemaID) (dataset.List, error) { - filter := bson.D{ - {Key: "schema", Value: id.ID(schemaID).String()}, - } - return r.find(ctx, nil, filter) + return r.find(ctx, nil, bson.M{ + "schema": id.ID(schemaID).String(), + }) } -func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, f []id.SceneID, fields []id.DatasetSchemaFieldID) (dataset.List, error) { +func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, fields []id.DatasetSchemaFieldID) (dataset.List, error) { if len(fields) == 0 { - d, err := r.FindByID(ctx, did, f) + d, err := r.FindByID(ctx, did) if err != nil { return nil, err } @@ -79,18 +84,18 @@ func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, f []id.Sc firstField := fieldsstr[0] aggfilter := bson.D{} - if f != nil { + if r.f.Readable != nil { aggfilter = append(aggfilter, bson.E{Key: "$in", Value: []interface{}{ "$$g.scene", - id.SceneIDsToStrings(f), + id.SceneIDsToStrings(r.f.Readable), }}) } pipeline := bson.D{ - {Key: "$match", Value: r.sceneFilter(bson.D{ - {Key: "id", Value: did.String()}, - {Key: "fields.id", Value: firstField}, - }, f)}, + {Key: "$match", Value: r.readFilter(bson.M{ + "id": did.String(), + "fields.id": firstField, + })}, {Key: "$limit", Value: 1}, {Key: "$addFields", Value: bson.D{ {Key: "field", Value: bson.D{ @@ -116,7 +121,7 @@ func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, f []id.Sc {Key: "connectToField", Value: "id"}, {Key: "depthField", Value: "depth"}, {Key: "as", Value: "graph"}, - {Key: "restrictSearchWithMatch", Value: r.sceneFilter(bson.D{}, f)}, + {Key: "restrictSearchWithMatch", Value: r.readFilter(bson.M{})}, }}, {Key: "$addFields", Value: bson.D{ {Key: "firstGraph", Value: bson.D{ @@ -251,6 +256,9 @@ func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, f []id.Sc } func (r *datasetRepo) Save(ctx context.Context, dataset *dataset.Dataset) error { + if !r.f.CanWrite(dataset.Scene()) { + return repo.ErrOperationDenied + } doc, id := mongodoc.NewDataset(dataset) return r.client.SaveOne(ctx, id, doc) } @@ -259,62 +267,66 @@ func (r *datasetRepo) SaveAll(ctx context.Context, datasetList dataset.List) err if datasetList == nil || len(datasetList) == 0 { return nil } - docs, ids := mongodoc.NewDatasets(datasetList) + docs, ids := mongodoc.NewDatasets(datasetList, r.f.Writable) return r.client.SaveAll(ctx, ids, docs) } func (r *datasetRepo) Remove(ctx context.Context, id id.DatasetID) error { - return r.client.RemoveOne(ctx, id.String()) + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } func (r *datasetRepo) RemoveAll(ctx context.Context, ids []id.DatasetID) error { if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.DatasetIDsToStrings(ids)) + return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ + "id": bson.M{"$in": id.DatasetIDsToStrings(ids)}, + })) } func (r *datasetRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { - filter := bson.D{ - {Key: "scene", Value: sceneID.String()}, + if !r.f.CanWrite(sceneID) { + return nil } - _, err := r.client.Collection().DeleteMany(ctx, filter) + _, err := r.client.Collection().DeleteMany(ctx, bson.D{ + {Key: "scene", Value: sceneID.String()}, + }) if err != nil { return rerror.ErrInternalBy(err) } return nil } -func (r *datasetRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { - var c mongodoc.DatasetConsumer - pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) - if err2 != nil { - return nil, nil, rerror.ErrInternalBy(err2) - } - return c.Rows, pageInfo, nil -} - -func (r *datasetRepo) find(ctx context.Context, dst dataset.List, filter bson.D) (dataset.List, error) { +func (r *datasetRepo) find(ctx context.Context, dst dataset.List, filter interface{}) (dataset.List, error) { c := mongodoc.DatasetConsumer{ Rows: dst, } - if err2 := r.client.Find(ctx, filter, &c); err2 != nil { + if err2 := r.client.Find(ctx, r.readFilter(filter), &c); err2 != nil { return nil, rerror.ErrInternalBy(err2) } return c.Rows, nil } -func (r *datasetRepo) findOne(ctx context.Context, filter bson.D) (*dataset.Dataset, error) { +func (r *datasetRepo) findOne(ctx context.Context, filter interface{}) (*dataset.Dataset, error) { dst := make([]*dataset.Dataset, 0, 1) c := mongodoc.DatasetConsumer{ Rows: dst, } - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows[0], nil } +func (r *datasetRepo) paginate(ctx context.Context, filter bson.M, pagination *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { + var c mongodoc.DatasetConsumer + pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), pagination, &c) + if err != nil { + return nil, nil, rerror.ErrInternalBy(err) + } + return c.Rows, pageInfo, nil +} + func filterDatasets(ids []id.DatasetID, rows []*dataset.Dataset) []*dataset.Dataset { res := make([]*dataset.Dataset, 0, len(ids)) for _, id := range ids { @@ -330,13 +342,10 @@ func filterDatasets(ids []id.DatasetID, rows []*dataset.Dataset) []*dataset.Data return res } -func (*datasetRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { - if scenes == nil { - return filter - } - filter = append(filter, bson.E{ - Key: "scene", - Value: bson.D{{Key: "$in", Value: id.SceneIDsToStrings(scenes)}}, - }) - return filter +func (r *datasetRepo) readFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Readable) +} + +func (r *datasetRepo) writeFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Writable) } diff --git a/internal/infrastructure/mongo/dataset_schema.go b/internal/infrastructure/mongo/dataset_schema.go index ba2d4d331..aacc45eb4 100644 --- a/internal/infrastructure/mongo/dataset_schema.go +++ b/internal/infrastructure/mongo/dataset_schema.go @@ -16,6 +16,7 @@ import ( type datasetSchemaRepo struct { client *mongodoc.ClientCollection + f repo.SceneFilter } func NewDatasetSchema(client *mongodoc.Client) repo.DatasetSchema { @@ -31,19 +32,25 @@ func (r *datasetSchemaRepo) init() { } } -func (r *datasetSchemaRepo) FindByID(ctx context.Context, id2 id.DatasetSchemaID, f []id.SceneID) (*dataset.Schema, error) { - filter := r.sceneFilter(bson.D{ - {Key: "id", Value: id.ID(id2).String()}, - }, f) - return r.findOne(ctx, filter) +func (r *datasetSchemaRepo) Filtered(f repo.SceneFilter) repo.DatasetSchema { + return &datasetSchemaRepo{ + client: r.client, + f: f.Clone(), + } } -func (r *datasetSchemaRepo) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID, f []id.SceneID) (dataset.SchemaList, error) { - filter := r.sceneFilter(bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.DatasetSchemaIDsToStrings(ids)}, - }}, - }, f) +func (r *datasetSchemaRepo) FindByID(ctx context.Context, id id.DatasetSchemaID) (*dataset.Schema, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) +} + +func (r *datasetSchemaRepo) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID) (dataset.SchemaList, error) { + filter := bson.M{ + "id": bson.M{ + "$in": id.DatasetSchemaIDsToStrings(ids), + }, + } dst := make([]*dataset.Schema, 0, len(ids)) res, err := r.find(ctx, dst, filter) if err != nil { @@ -53,44 +60,54 @@ func (r *datasetSchemaRepo) FindByIDs(ctx context.Context, ids []id.DatasetSchem } func (r *datasetSchemaRepo) FindByScene(ctx context.Context, sceneID id.SceneID, pagination *usecase.Pagination) (dataset.SchemaList, *usecase.PageInfo, error) { - filter := bson.D{ - {Key: "scene", Value: sceneID.String()}, + if !r.f.CanRead(sceneID) { + return nil, usecase.EmptyPageInfo(), nil } - return r.paginate(ctx, filter, pagination) + return r.paginate(ctx, bson.M{ + "scene": sceneID.String(), + }, pagination) } func (r *datasetSchemaRepo) FindBySceneAll(ctx context.Context, sceneID id.SceneID) (dataset.SchemaList, error) { - filter := bson.D{ - {Key: "scene", Value: sceneID.String()}, + if !r.f.CanRead(sceneID) { + return nil, nil } - return r.find(ctx, nil, filter) + return r.find(ctx, nil, bson.M{ + "scene": sceneID.String(), + }) } func (r *datasetSchemaRepo) FindDynamicByID(ctx context.Context, sid id.DatasetSchemaID) (*dataset.Schema, error) { - filter := bson.D{ - {Key: "id", Value: id.ID(sid).String()}, - {Key: "dynamic", Value: true}, - } - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{ + "id": id.ID(sid).String(), + "dynamic": true, + }) } func (r *datasetSchemaRepo) FindAllDynamicByScene(ctx context.Context, sceneID id.SceneID) (dataset.SchemaList, error) { - filter := bson.D{ - {Key: "scene", Value: sceneID.String()}, - {Key: "dynamic", Value: true}, + if !r.f.CanRead(sceneID) { + return nil, nil } - return r.find(ctx, nil, filter) + return r.find(ctx, nil, bson.M{ + "scene": sceneID.String(), + "dynamic": true, + }) } func (r *datasetSchemaRepo) FindBySceneAndSource(ctx context.Context, sceneID id.SceneID, source string) (dataset.SchemaList, error) { - filter := bson.D{ - {Key: "scene", Value: sceneID.String()}, - {Key: "source", Value: string(source)}, + if !r.f.CanRead(sceneID) { + return nil, nil } - return r.find(ctx, nil, filter) + return r.find(ctx, nil, bson.M{ + "scene": sceneID.String(), + "source": string(source), + }) } func (r *datasetSchemaRepo) Save(ctx context.Context, datasetSchema *dataset.Schema) error { + if !r.f.CanWrite(datasetSchema.Scene()) { + return repo.ErrOperationDenied + } doc, id := mongodoc.NewDatasetSchema(datasetSchema) return r.client.SaveOne(ctx, id, doc) } @@ -99,58 +116,61 @@ func (r *datasetSchemaRepo) SaveAll(ctx context.Context, datasetSchemas dataset. if datasetSchemas == nil || len(datasetSchemas) == 0 { return nil } - docs, ids := mongodoc.NewDatasetSchemas(datasetSchemas) + docs, ids := mongodoc.NewDatasetSchemas(datasetSchemas, r.f.Writable) return r.client.SaveAll(ctx, ids, docs) } -func (r *datasetSchemaRepo) Remove(ctx context.Context, datasetSchemaID id.DatasetSchemaID) error { - return r.client.RemoveOne(ctx, datasetSchemaID.String()) +func (r *datasetSchemaRepo) Remove(ctx context.Context, id id.DatasetSchemaID) error { + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } func (r *datasetSchemaRepo) RemoveAll(ctx context.Context, ids []id.DatasetSchemaID) error { if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.DatasetSchemaIDsToStrings(ids)) + return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ + "id": bson.M{"$in": id.DatasetSchemaIDsToStrings(ids)}, + })) } func (r *datasetSchemaRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { - filter := bson.D{ - {Key: "scene", Value: sceneID.String()}, + if !r.f.CanWrite(sceneID) { + return nil } - _, err := r.client.Collection().DeleteMany(ctx, filter) - if err != nil { + if _, err := r.client.Collection().DeleteMany(ctx, bson.M{ + "scene": sceneID.String(), + }); err != nil { return rerror.ErrInternalBy(err) } return nil } -func (r *datasetSchemaRepo) find(ctx context.Context, dst []*dataset.Schema, filter bson.D) ([]*dataset.Schema, error) { +func (r *datasetSchemaRepo) find(ctx context.Context, dst []*dataset.Schema, filter interface{}) ([]*dataset.Schema, error) { c := mongodoc.DatasetSchemaConsumer{ Rows: dst, } - if err := r.client.Find(ctx, filter, &c); err != nil { + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows, nil } -func (r *datasetSchemaRepo) findOne(ctx context.Context, filter bson.D) (*dataset.Schema, error) { +func (r *datasetSchemaRepo) findOne(ctx context.Context, filter interface{}) (*dataset.Schema, error) { dst := make([]*dataset.Schema, 0, 1) c := mongodoc.DatasetSchemaConsumer{ Rows: dst, } - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows[0], nil } -func (r *datasetSchemaRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) ([]*dataset.Schema, *usecase.PageInfo, error) { +func (r *datasetSchemaRepo) paginate(ctx context.Context, filter bson.M, pagination *usecase.Pagination) ([]*dataset.Schema, *usecase.PageInfo, error) { var c mongodoc.DatasetSchemaConsumer - pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) - if err2 != nil { - return nil, nil, rerror.ErrInternalBy(err2) + pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), pagination, &c) + if err != nil { + return nil, nil, rerror.ErrInternalBy(err) } return c.Rows, pageInfo, nil } @@ -170,13 +190,10 @@ func filterDatasetSchemas(ids []id.DatasetSchemaID, rows []*dataset.Schema) []*d return res } -func (*datasetSchemaRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { - if scenes == nil { - return filter - } - filter = append(filter, bson.E{ - Key: "scene", - Value: bson.D{{Key: "$in", Value: id.SceneIDsToStrings(scenes)}}, - }) - return filter +func (r *datasetSchemaRepo) readFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Readable) +} + +func (r *datasetSchemaRepo) writeFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Writable) } diff --git a/internal/infrastructure/mongo/layer.go b/internal/infrastructure/mongo/layer.go index fe744d5a8..e5e1b3878 100644 --- a/internal/infrastructure/mongo/layer.go +++ b/internal/infrastructure/mongo/layer.go @@ -15,6 +15,7 @@ import ( type layerRepo struct { client *mongodoc.ClientCollection + f repo.SceneFilter } func NewLayer(client *mongodoc.Client) repo.Layer { @@ -30,19 +31,25 @@ func (r *layerRepo) init() { } } -func (r *layerRepo) FindByID(ctx context.Context, id id.LayerID, f []id.SceneID) (layer.Layer, error) { - filter := r.sceneFilterD(bson.D{ - {Key: "id", Value: id.String()}, - }, f) - return r.findOne(ctx, filter) +func (r *layerRepo) Filtered(f repo.SceneFilter) repo.Layer { + return &layerRepo{ + client: r.client, + f: f.Clone(), + } +} + +func (r *layerRepo) FindByID(ctx context.Context, id id.LayerID) (layer.Layer, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) } -func (r *layerRepo) FindByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.List, error) { - filter := r.sceneFilterD(bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.LayerIDsToStrings(ids)}, - }}, - }, f) +func (r *layerRepo) FindByIDs(ctx context.Context, ids []id.LayerID) (layer.List, error) { + filter := bson.M{ + "id": bson.M{ + "$in": id.LayerIDsToStrings(ids), + }, + } dst := make([]*layer.Layer, 0, len(ids)) res, err := r.find(ctx, dst, filter) if err != nil { @@ -52,25 +59,23 @@ func (r *layerRepo) FindByIDs(ctx context.Context, ids []id.LayerID, f []id.Scen } func (r *layerRepo) FindAllByDatasetSchema(ctx context.Context, dsid id.DatasetSchemaID) (layer.List, error) { - filter := bson.D{ - {Key: "group.linkeddatasetschema", Value: dsid.String()}, - } - return r.find(ctx, nil, filter) + return r.find(ctx, nil, bson.M{ + "group.linkeddatasetschema": dsid.String(), + }) } -func (r *layerRepo) FindItemByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Item, error) { - filter := r.sceneFilterD(bson.D{ - {Key: "id", Value: id.String()}, - }, f) - return r.findItemOne(ctx, filter) +func (r *layerRepo) FindItemByID(ctx context.Context, id id.LayerID) (*layer.Item, error) { + return r.findItemOne(ctx, bson.M{ + "id": id.String(), + }) } -func (r *layerRepo) FindItemByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.ItemList, error) { - filter := r.sceneFilterD(bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.LayerIDsToStrings(ids)}, - }}, - }, f) +func (r *layerRepo) FindItemByIDs(ctx context.Context, ids []id.LayerID) (layer.ItemList, error) { + filter := bson.M{ + "id": bson.M{ + "$in": id.LayerIDsToStrings(ids), + }, + } dst := make([]*layer.Item, 0, len(ids)) res, err := r.findItems(ctx, dst, filter) if err != nil { @@ -79,19 +84,18 @@ func (r *layerRepo) FindItemByIDs(ctx context.Context, ids []id.LayerID, f []id. return filterLayerItems(ids, res), nil } -func (r *layerRepo) FindGroupByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { - filter := r.sceneFilterD(bson.D{ - {Key: "id", Value: id.String()}, - }, f) - return r.findGroupOne(ctx, filter) +func (r *layerRepo) FindGroupByID(ctx context.Context, id id.LayerID) (*layer.Group, error) { + return r.findGroupOne(ctx, bson.M{ + "id": id.String(), + }) } -func (r *layerRepo) FindGroupByIDs(ctx context.Context, ids []id.LayerID, f []id.SceneID) (layer.GroupList, error) { - filter := r.sceneFilterD(bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.LayerIDsToStrings(ids)}, - }}, - }, f) +func (r *layerRepo) FindGroupByIDs(ctx context.Context, ids []id.LayerID) (layer.GroupList, error) { + filter := bson.M{ + "id": bson.M{ + "$in": id.LayerIDsToStrings(ids), + }, + } dst := make([]*layer.Group, 0, len(ids)) res, err := r.findGroups(ctx, dst, filter) if err != nil { @@ -101,69 +105,76 @@ func (r *layerRepo) FindGroupByIDs(ctx context.Context, ids []id.LayerID, f []id } func (r *layerRepo) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, sceneID id.SceneID, datasetSchemaID id.DatasetSchemaID) (layer.GroupList, error) { - filter := bson.D{ - {Key: "scene", Value: sceneID.String()}, - {Key: "group.linkeddatasetschema", Value: datasetSchemaID.String()}, - } - return r.findGroups(ctx, nil, filter) + return r.findGroups(ctx, nil, bson.M{ + "scene": sceneID.String(), + "group.linkeddatasetschema": datasetSchemaID.String(), + }) } -func (r *layerRepo) FindParentsByIDs(ctx context.Context, ids []id.LayerID, scenes []id.SceneID) (layer.GroupList, error) { - f := bson.M{ +func (r *layerRepo) FindParentsByIDs(ctx context.Context, ids []id.LayerID) (layer.GroupList, error) { + return r.findGroups(ctx, nil, bson.M{ "group.layers": bson.M{"$in": id.LayerIDsToStrings(ids)}, - } - filter := r.sceneFilter(f, scenes) - return r.findGroups(ctx, nil, filter) + }) } -func (r *layerRepo) FindByPluginAndExtension(ctx context.Context, pid id.PluginID, eid *id.PluginExtensionID, scenes []id.SceneID) (layer.List, error) { - f := bson.M{ +func (r *layerRepo) FindByPluginAndExtension(ctx context.Context, pid id.PluginID, eid *id.PluginExtensionID) (layer.List, error) { + filter := bson.M{ "plugin": pid.String(), } if eid != nil { - f["extension"] = eid.String() + filter["extension"] = eid.String() } - filter := r.sceneFilter(f, scenes) return r.find(ctx, nil, filter) } -func (r *layerRepo) FindByPluginAndExtensionOfBlocks(ctx context.Context, pid id.PluginID, eid *id.PluginExtensionID, scenes []id.SceneID) (layer.List, error) { - f := bson.M{ +func (r *layerRepo) FindByPluginAndExtensionOfBlocks(ctx context.Context, pid id.PluginID, eid *id.PluginExtensionID) (layer.List, error) { + filter := bson.M{ "infobox.fields.plugin": pid.String(), } if eid != nil { - f["infobox.fields.extension"] = eid.String() + filter["infobox.fields.extension"] = eid.String() } - filter := r.sceneFilter(f, scenes) return r.find(ctx, nil, filter) } -func (r *layerRepo) FindByProperty(ctx context.Context, id id.PropertyID, f []id.SceneID) (layer.Layer, error) { - filter := r.sceneFilterD(bson.D{ - {Key: "$or", Value: []bson.D{ - {{Key: "property", Value: id.String()}}, - {{Key: "infobox.property", Value: id.String()}}, - {{Key: "infobox.fields.property", Value: id.String()}}, - }}, - }, f) - return r.findOne(ctx, filter) +func (r *layerRepo) FindByProperty(ctx context.Context, id id.PropertyID) (layer.Layer, error) { + return r.findOne(ctx, bson.M{ + "$or": []bson.M{ + {"property": id.String()}, + {"infobox.property": id.String()}, + {"infobox.fields.property": id.String()}, + }, + }) } -func (r *layerRepo) FindParentByID(ctx context.Context, id id.LayerID, f []id.SceneID) (*layer.Group, error) { - filter := r.sceneFilterD(bson.D{ - {Key: "group.layers", Value: id.String()}, - }, f) - return r.findGroupOne(ctx, filter) +func (r *layerRepo) FindParentByID(ctx context.Context, id id.LayerID) (*layer.Group, error) { + return r.findGroupOne(ctx, bson.M{ + "group.layers": id.String(), + }) } func (r *layerRepo) FindByScene(ctx context.Context, id id.SceneID) (layer.List, error) { - filter := bson.D{ - {Key: "scene", Value: id.String()}, + if !r.f.CanRead(id) { + return nil, nil } - return r.find(ctx, nil, filter) + return r.find(ctx, nil, bson.M{ + "scene": id.String(), + }) +} + +func (r *layerRepo) FindByTag(ctx context.Context, tagID id.TagID) (layer.List, error) { + return r.find(ctx, nil, bson.M{ + "$or": []bson.M{ + {"tags.id": tagID.String()}, + {"tags.tags.id": tagID.String()}, + }, + }) } func (r *layerRepo) Save(ctx context.Context, layer layer.Layer) error { + if !r.f.CanWrite(layer.Scene()) { + return repo.ErrOperationDenied + } doc, id := mongodoc.NewLayer(layer) return r.client.SaveOne(ctx, id, doc) } @@ -172,20 +183,20 @@ func (r *layerRepo) SaveAll(ctx context.Context, layers layer.List) error { if layers == nil || len(layers) == 0 { return nil } - docs, ids := mongodoc.NewLayers(layers) + docs, ids := mongodoc.NewLayers(layers, r.f.Writable) return r.client.SaveAll(ctx, ids, docs) } -func (r *layerRepo) UpdatePlugin(ctx context.Context, old, new id.PluginID, scenes []id.SceneID) error { +func (r *layerRepo) UpdatePlugin(ctx context.Context, old, new id.PluginID) error { return r.client.UpdateManyMany( ctx, []mongodoc.Update{ { - Filter: r.sceneFilter(bson.M{"plugin": old.String()}, scenes), + Filter: r.writeFilter(bson.M{"plugin": old.String()}), Update: bson.M{"plugin": new.String()}, }, { - Filter: r.sceneFilter(bson.M{"infobox.fields": bson.M{"$type": "array"}}, scenes), + Filter: r.writeFilter(bson.M{"infobox.fields": bson.M{"$type": "array"}}), Update: bson.M{"infobox.fields.$[if].plugin": new.String()}, ArrayFilters: []interface{}{ bson.M{"if.plugin": old.String()}, @@ -196,17 +207,22 @@ func (r *layerRepo) UpdatePlugin(ctx context.Context, old, new id.PluginID, scen } func (r *layerRepo) Remove(ctx context.Context, id id.LayerID) error { - return r.client.RemoveOne(ctx, id.String()) + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } func (r *layerRepo) RemoveAll(ctx context.Context, ids []id.LayerID) error { if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.LayerIDsToStrings(ids)) + return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ + "id": bson.M{"$in": id.LayerIDsToStrings(ids)}, + })) } func (r *layerRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } filter := bson.D{ {Key: "scene", Value: sceneID.String()}, } @@ -217,32 +233,19 @@ func (r *layerRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error return nil } -func (r *layerRepo) FindByTag(ctx context.Context, tagID id.TagID, f []id.SceneID) (layer.List, error) { - ids := []id.TagID{tagID} - tags := id.TagIDsToStrings(ids) - filter := r.sceneFilter(bson.M{ - "$or": []bson.M{ - {"tags.id": bson.M{"$in": tags}}, - {"tags.tags.id": bson.M{"$in": tags}}, - }, - }, f) - - return r.find(ctx, nil, filter) -} - func (r *layerRepo) find(ctx context.Context, dst layer.List, filter interface{}) (layer.List, error) { c := mongodoc.LayerConsumer{ Rows: dst, } - if err := r.client.Find(ctx, filter, &c); err != nil { + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows, nil } -func (r *layerRepo) findOne(ctx context.Context, filter bson.D) (layer.Layer, error) { +func (r *layerRepo) findOne(ctx context.Context, filter interface{}) (layer.Layer, error) { c := mongodoc.LayerConsumer{} - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } if len(c.Rows) == 0 { @@ -251,9 +254,9 @@ func (r *layerRepo) findOne(ctx context.Context, filter bson.D) (layer.Layer, er return *c.Rows[0], nil } -func (r *layerRepo) findItemOne(ctx context.Context, filter bson.D) (*layer.Item, error) { +func (r *layerRepo) findItemOne(ctx context.Context, filter interface{}) (*layer.Item, error) { c := mongodoc.LayerConsumer{} - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } if len(c.ItemRows) == 0 { @@ -262,9 +265,9 @@ func (r *layerRepo) findItemOne(ctx context.Context, filter bson.D) (*layer.Item return c.ItemRows[0], nil } -func (r *layerRepo) findGroupOne(ctx context.Context, filter bson.D) (*layer.Group, error) { +func (r *layerRepo) findGroupOne(ctx context.Context, filter interface{}) (*layer.Group, error) { c := mongodoc.LayerConsumer{} - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } if len(c.GroupRows) == 0 { @@ -280,7 +283,7 @@ func (r *layerRepo) findItems(ctx context.Context, dst layer.ItemList, filter in if c.ItemRows != nil { c.Rows = make(layer.List, 0, len(c.ItemRows)) } - if err := r.client.Find(ctx, filter, &c); err != nil { + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.ItemRows, nil @@ -293,39 +296,12 @@ func (r *layerRepo) findGroups(ctx context.Context, dst layer.GroupList, filter if c.GroupRows != nil { c.Rows = make(layer.List, 0, len(c.GroupRows)) } - if err := r.client.Find(ctx, filter, &c); err != nil { + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.GroupRows, nil } -// func (r *layerRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (layer.List, *usecase.PageInfo, error) { -// var c mongodoc.LayerConsumer -// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) -// if err2 != nil { -// return nil, nil, rerror.ErrInternalBy(err2) -// } -// return c.Rows, pageInfo, nil -// } - -// func (r *layerRepo) paginateItems(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (layer.ItemList, *usecase.PageInfo, error) { -// var c mongodoc.LayerConsumer -// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) -// if err2 != nil { -// return nil, nil, rerror.ErrInternalBy(err2) -// } -// return c.ItemRows, pageInfo, nil -// } - -// func (r *layerRepo) paginateGroups(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (layer.GroupList, *usecase.PageInfo, error) { -// var c mongodoc.LayerConsumer -// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) -// if err2 != nil { -// return nil, nil, rerror.ErrInternalBy(err2) -// } -// return c.GroupRows, pageInfo, nil -// } - func filterLayers(ids []id.LayerID, rows []*layer.Layer) []*layer.Layer { res := make([]*layer.Layer, 0, len(ids)) for _, id := range ids { @@ -374,21 +350,10 @@ func filterLayerGroups(ids []id.LayerID, rows []*layer.Group) []*layer.Group { return res } -func (*layerRepo) sceneFilterD(filter bson.D, scenes []id.SceneID) bson.D { - if scenes == nil { - return filter - } - filter = append(filter, bson.E{ - Key: "scene", - Value: bson.D{{Key: "$in", Value: id.SceneIDsToStrings(scenes)}}, - }) - return filter +func (r *layerRepo) readFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Readable) } -func (*layerRepo) sceneFilter(filter bson.M, scenes []id.SceneID) bson.M { - if scenes == nil { - return filter - } - filter["scene"] = bson.M{"$in": id.SceneIDsToStrings(scenes)} - return filter +func (r *layerRepo) writeFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Writable) } diff --git a/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go b/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go index 0cfbe9e7f..38bd4c177 100644 --- a/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go +++ b/internal/infrastructure/mongo/migration/201217193948_add_scene_default_tile.go @@ -60,7 +60,7 @@ func AddSceneDefaultTile(ctx context.Context, c DBClient) error { g.Add(f, -1) } - docs, ids := mongodoc.NewProperties(properties) + docs, ids := mongodoc.NewProperties(properties, nil) return col.SaveAll(ctx, ids, docs) }, diff --git a/internal/infrastructure/mongo/migration/220309174648_add_scene_field_to_property_schema.go b/internal/infrastructure/mongo/migration/220309174648_add_scene_field_to_property_schema.go new file mode 100644 index 000000000..4aad91156 --- /dev/null +++ b/internal/infrastructure/mongo/migration/220309174648_add_scene_field_to_property_schema.go @@ -0,0 +1,42 @@ +package migration + +import ( + "context" + + "github.com/labstack/gommon/log" + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "github.com/reearth/reearth-backend/pkg/id" + "go.mongodb.org/mongo-driver/bson" +) + +func AddSceneFieldToPropertySchema(ctx context.Context, c DBClient) error { + col := c.WithCollection("propertySchema") + + return col.Find(ctx, bson.M{}, &mongodoc.BatchConsumer{ + Size: 1000, + Callback: func(rows []bson.Raw) error { + ids := make([]string, 0, len(rows)) + newRows := make([]interface{}, 0, len(rows)) + + log.Infof("migration: AddSceneFieldToPropertySchema: hit property schemas: %d\n", len(rows)) + + for _, row := range rows { + var doc mongodoc.PropertySchemaDocument + if err := bson.Unmarshal(row, &doc); err != nil { + return err + } + + s, err := id.PropertySchemaIDFrom(doc.ID) + if err != nil || s.Plugin().Scene() == nil { + continue + } + + doc.Scene = s.Plugin().Scene().StringRef() + ids = append(ids, doc.ID) + newRows = append(newRows, doc) + } + + return col.SaveAll(ctx, ids, newRows) + }, + }) +} diff --git a/internal/infrastructure/mongo/migration/migrations.go b/internal/infrastructure/mongo/migration/migrations.go index 3116f661f..2329b12cc 100644 --- a/internal/infrastructure/mongo/migration/migrations.go +++ b/internal/infrastructure/mongo/migration/migrations.go @@ -2,6 +2,8 @@ package migration +// To add a new migration, run go run ./tools/cmd/migrategen migration_name + // WARNING: // If the migration takes too long, the deployment may fail in a serverless environment. // Set the batch size to as large a value as possible without using up the RAM of the deployment destination. @@ -11,4 +13,5 @@ var migrations = map[int64]MigrationFunc{ 210310145844: RemovePreviewToken, 210730175108: AddSceneAlignSystem, 220214180713: SplitSchemaOfProperties, + 220309174648: AddSceneFieldToPropertySchema, } diff --git a/internal/infrastructure/mongo/mongodoc/client.go b/internal/infrastructure/mongo/mongodoc/client.go index 65d3de709..4ca44677a 100644 --- a/internal/infrastructure/mongo/mongodoc/client.go +++ b/internal/infrastructure/mongo/mongodoc/client.go @@ -88,24 +88,16 @@ func (c *Client) Count(ctx context.Context, col string, filter interface{}) (int return count, nil } -func (c *Client) RemoveAll(ctx context.Context, col string, ids []string) error { - if len(ids) == 0 { - return nil - } - filter := bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: ids}, - }}, - } - _, err := c.Collection(col).DeleteMany(ctx, filter) +func (c *Client) RemoveOne(ctx context.Context, col string, f interface{}) error { + _, err := c.Collection(col).DeleteOne(ctx, f) if err != nil { return rerror.ErrInternalBy(err) } return nil } -func (c *Client) RemoveOne(ctx context.Context, col string, id string) error { - _, err := c.Collection(col).DeleteOne(ctx, bson.D{{Key: "id", Value: id}}) +func (c *Client) RemoveAll(ctx context.Context, col string, f interface{}) error { + _, err := c.Collection(col).DeleteMany(ctx, f) if err != nil { return rerror.ErrInternalBy(err) } @@ -113,10 +105,7 @@ func (c *Client) RemoveOne(ctx context.Context, col string, id string) error { } var ( - upsert = true - replaceOption = &options.ReplaceOptions{ - Upsert: &upsert, - } + replaceOption = (&options.ReplaceOptions{}).SetUpsert(true) ) func (c *Client) SaveOne(ctx context.Context, col string, id string, replacement interface{}) error { @@ -138,11 +127,13 @@ func (c *Client) SaveAll(ctx context.Context, col string, ids []string, updates writeModels := make([]mongo.WriteModel, 0, len(updates)) for i, u := range updates { id := ids[i] - writeModels = append(writeModels, &mongo.ReplaceOneModel{ - Upsert: &upsert, - Filter: bson.M{"id": id}, - Replacement: u, - }) + writeModels = append( + writeModels, + (&mongo.ReplaceOneModel{}). + SetUpsert(true). + SetFilter(bson.M{"id": id}). + SetReplacement(u), + ) } _, err := c.Collection(col).BulkWrite(ctx, writeModels) diff --git a/internal/infrastructure/mongo/mongodoc/clientcol.go b/internal/infrastructure/mongo/mongodoc/clientcol.go index 4ac86f023..97241643d 100644 --- a/internal/infrastructure/mongo/mongodoc/clientcol.go +++ b/internal/infrastructure/mongo/mongodoc/clientcol.go @@ -48,12 +48,12 @@ func (c *ClientCollection) UpdateManyMany(ctx context.Context, updates []Update) return c.Client.UpdateManyMany(ctx, c.CollectionName, updates) } -func (c *ClientCollection) RemoveOne(ctx context.Context, id string) error { - return c.Client.RemoveOne(ctx, c.CollectionName, id) +func (c *ClientCollection) RemoveOne(ctx context.Context, f interface{}) error { + return c.Client.RemoveOne(ctx, c.CollectionName, f) } -func (c *ClientCollection) RemoveAll(ctx context.Context, ids []string) error { - return c.Client.RemoveAll(ctx, c.CollectionName, ids) +func (c *ClientCollection) RemoveAll(ctx context.Context, f interface{}) error { + return c.Client.RemoveAll(ctx, c.CollectionName, f) } func (c *ClientCollection) CreateIndex(ctx context.Context, keys []string) []string { diff --git a/internal/infrastructure/mongo/mongodoc/dataset.go b/internal/infrastructure/mongo/mongodoc/dataset.go index 45ab1cd61..4d8d41fe2 100644 --- a/internal/infrastructure/mongo/mongodoc/dataset.go +++ b/internal/infrastructure/mongo/mongodoc/dataset.go @@ -5,6 +5,7 @@ import ( "github.com/reearth/reearth-backend/pkg/dataset" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" ) type DatasetDocument struct { @@ -162,11 +163,11 @@ func NewDataset(dataset *dataset.Dataset) (*DatasetDocument, string) { return &doc, did } -func NewDatasets(datasets []*dataset.Dataset) ([]interface{}, []string) { +func NewDatasets(datasets []*dataset.Dataset, f scene.IDList) ([]interface{}, []string) { res := make([]interface{}, 0, len(datasets)) ids := make([]string, 0, len(datasets)) for _, d := range datasets { - if d == nil { + if d == nil || f != nil && !f.Includes(d.Scene()) { continue } r, id := NewDataset(d) diff --git a/internal/infrastructure/mongo/mongodoc/dataset_schema.go b/internal/infrastructure/mongo/mongodoc/dataset_schema.go index 80fb6a975..4f4c99b2e 100644 --- a/internal/infrastructure/mongo/mongodoc/dataset_schema.go +++ b/internal/infrastructure/mongo/mongodoc/dataset_schema.go @@ -5,6 +5,7 @@ import ( "github.com/reearth/reearth-backend/pkg/dataset" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" ) type DatasetSchemaDocument struct { @@ -114,11 +115,11 @@ func NewDatasetSchema(dataset *dataset.Schema) (*DatasetSchemaDocument, string) return &doc, did } -func NewDatasetSchemas(datasetSchemas []*dataset.Schema) ([]interface{}, []string) { +func NewDatasetSchemas(datasetSchemas []*dataset.Schema, f scene.IDList) ([]interface{}, []string) { res := make([]interface{}, 0, len(datasetSchemas)) ids := make([]string, 0, len(datasetSchemas)) for _, d := range datasetSchemas { - if d == nil { + if d == nil || f != nil && !f.Includes(d.Scene()) { continue } r, id := NewDatasetSchema(d) diff --git a/internal/infrastructure/mongo/mongodoc/layer.go b/internal/infrastructure/mongo/mongodoc/layer.go index f34b2a54d..0800dd171 100644 --- a/internal/infrastructure/mongo/mongodoc/layer.go +++ b/internal/infrastructure/mongo/mongodoc/layer.go @@ -5,6 +5,7 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/scene" "go.mongodb.org/mongo-driver/bson" ) @@ -136,14 +137,18 @@ func NewLayer(l layer.Layer) (*LayerDocument, string) { }, id } -func NewLayers(layers layer.List) ([]interface{}, []string) { +func NewLayers(layers layer.List, f scene.IDList) ([]interface{}, []string) { res := make([]interface{}, 0, len(layers)) ids := make([]string, 0, len(layers)) for _, d := range layers { if d == nil { continue } - r, id := NewLayer(*d) + d2 := *d + if d2 == nil || f != nil && !f.Includes(d2.Scene()) { + continue + } + r, id := NewLayer(d2) res = append(res, r) ids = append(ids, id) } diff --git a/internal/infrastructure/mongo/mongodoc/property.go b/internal/infrastructure/mongo/mongodoc/property.go index f9eb64295..5d85b62c5 100644 --- a/internal/infrastructure/mongo/mongodoc/property.go +++ b/internal/infrastructure/mongo/mongodoc/property.go @@ -5,6 +5,7 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" ) const ( @@ -176,7 +177,7 @@ func NewProperty(property *property.Property) (*PropertyDocument, string) { return &doc, pid } -func NewProperties(properties []*property.Property) ([]interface{}, []string) { +func NewProperties(properties []*property.Property, f scene.IDList) ([]interface{}, []string) { if properties == nil { return nil, nil } @@ -184,7 +185,7 @@ func NewProperties(properties []*property.Property) ([]interface{}, []string) { res := make([]interface{}, 0, len(properties)) ids := make([]string, 0, len(properties)) for _, d := range properties { - if d == nil { + if d == nil || f != nil && !f.Includes(d.Scene()) { continue } r, id := NewProperty(d) diff --git a/internal/infrastructure/mongo/mongodoc/property_schema.go b/internal/infrastructure/mongo/mongodoc/property_schema.go index 41cf99139..5aadc26db 100644 --- a/internal/infrastructure/mongo/mongodoc/property_schema.go +++ b/internal/infrastructure/mongo/mongodoc/property_schema.go @@ -5,10 +5,12 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/scene" ) type PropertySchemaDocument struct { ID string + Scene *string `bson:",omitempty"` Version int Groups []*PropertySchemaGroupDocument LinkableFields *PropertyLinkableFieldsDocument @@ -121,13 +123,14 @@ func NewPropertySchema(m *property.Schema) (*PropertySchemaDocument, string) { id := m.ID().String() return &PropertySchemaDocument{ ID: id, + Scene: m.Scene().StringRef(), Version: m.Version(), Groups: groups, LinkableFields: ToDocPropertyLinkableFields(m.LinkableFields()), }, id } -func NewPropertySchemas(ps []*property.Schema) ([]interface{}, []string) { +func NewPropertySchemas(ps []*property.Schema, f scene.IDList) ([]interface{}, []string) { if ps == nil { return nil, nil } @@ -138,6 +141,9 @@ func NewPropertySchemas(ps []*property.Schema) ([]interface{}, []string) { if d == nil { continue } + if s := d.Scene(); s != nil && f != nil && !f.Includes(*s) { + continue + } r, id := NewPropertySchema(d) res = append(res, r) ids = append(ids, id) diff --git a/internal/infrastructure/mongo/mongodoc/tag.go b/internal/infrastructure/mongo/mongodoc/tag.go index 68c394b64..2e07de4e3 100644 --- a/internal/infrastructure/mongo/mongodoc/tag.go +++ b/internal/infrastructure/mongo/mongodoc/tag.go @@ -6,6 +6,7 @@ import ( "go.mongodb.org/mongo-driver/bson" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" "github.com/reearth/reearth-backend/pkg/tag" ) @@ -91,14 +92,18 @@ func NewTag(t tag.Tag) (*TagDocument, string) { }, tid } -func NewTags(tags []*tag.Tag) ([]interface{}, []string) { +func NewTags(tags []*tag.Tag, f scene.IDList) ([]interface{}, []string) { res := make([]interface{}, 0, len(tags)) ids := make([]string, 0, len(tags)) for _, d := range tags { if d == nil { continue } - r, tid := NewTag(*d) + d2 := *d + if f != nil && !f.Includes(d2.Scene()) { + continue + } + r, tid := NewTag(d2) res = append(res, r) ids = append(ids, tid) } diff --git a/internal/infrastructure/mongo/mongodoc/tag_test.go b/internal/infrastructure/mongo/mongodoc/tag_test.go index c9934ef6b..020b96775 100644 --- a/internal/infrastructure/mongo/mongodoc/tag_test.go +++ b/internal/infrastructure/mongo/mongodoc/tag_test.go @@ -4,6 +4,7 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/scene" "github.com/reearth/reearth-backend/pkg/tag" "github.com/stretchr/testify/assert" "go.mongodb.org/mongo-driver/bson" @@ -97,8 +98,10 @@ func TestNewTags(t *testing.T) { Scene(sid). Build() tgi := tag.Tag(tg) + type args struct { tags []*tag.Tag + f scene.IDList } tests := []struct { @@ -125,13 +128,43 @@ func TestNewTags(t *testing.T) { }, want1: []string{tgi.ID().String()}, }, + { + name: "filtered tags 1", + args: args{ + tags: []*tag.Tag{ + &tgi, + }, + f: scene.IDList{tgi.Scene()}, + }, + want: []interface{}{ + &TagDocument{ + ID: tg.ID().String(), + Label: "group", + Scene: sid.ID().String(), + Item: nil, + Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, + }, + }, + want1: []string{tgi.ID().String()}, + }, + { + name: "filtered tags 2", + args: args{ + tags: []*tag.Tag{ + &tgi, + }, + f: scene.IDList{}, + }, + want: []interface{}{}, + want1: []string{}, + }, } for _, tc := range tests { tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() - got, got1 := NewTags(tc.args.tags) + got, got1 := NewTags(tc.args.tags, tc.args.f) assert.Equal(t, tc.want, got) assert.Equal(t, tc.want1, got1) }) diff --git a/internal/infrastructure/mongo/mongodoc/util.go b/internal/infrastructure/mongo/mongodoc/util.go index e2e144c18..9d854e29f 100644 --- a/internal/infrastructure/mongo/mongodoc/util.go +++ b/internal/infrastructure/mongo/mongodoc/util.go @@ -15,10 +15,58 @@ func convertDToM(i interface{}) interface{} { a = append(a, convertDToM(e)) } return a + case []bson.M: + a := make([]interface{}, 0, len(i2)) + for _, e := range i2 { + a = append(a, convertDToM(e)) + } + return a + case []bson.D: + a := make([]interface{}, 0, len(i2)) + for _, e := range i2 { + a = append(a, convertDToM(e)) + } + return a + case []bson.A: + a := make([]interface{}, 0, len(i2)) + for _, e := range i2 { + a = append(a, convertDToM(e)) + } + return a + case []interface{}: + a := make([]interface{}, 0, len(i2)) + for _, e := range i2 { + a = append(a, convertDToM(e)) + } + return a } return i } +func appendI(f interface{}, elements ...interface{}) interface{} { + switch f2 := f.(type) { + case []bson.D: + res := make([]interface{}, 0, len(f2)) + for _, e := range f2 { + res = append(res, e) + } + return append(res, elements...) + case []bson.M: + res := make([]interface{}, 0, len(f2)+len(elements)) + for _, e := range f2 { + res = append(res, e) + } + return append(res, elements...) + case bson.A: + res := make([]interface{}, 0, len(f2)+len(elements)) + return append(res, append(f2, elements...)...) + case []interface{}: + res := make([]interface{}, 0, len(f2)+len(elements)) + return append(res, append(f2, elements...)...) + } + return f +} + func appendE(f interface{}, elements ...bson.E) interface{} { switch f2 := f.(type) { case bson.D: @@ -27,10 +75,59 @@ func appendE(f interface{}, elements ...bson.E) interface{} { } return f2 case bson.M: + f3 := make(bson.M, len(f2)) + for k, v := range f2 { + f3[k] = v + } for _, e := range elements { - f2[e.Key] = e.Value + f3[e.Key] = e.Value } - return f2 + return f3 } return f } + +func getE(f interface{}, k string) interface{} { + switch g := f.(type) { + case bson.D: + for _, e := range g { + if e.Key == k { + return e.Value + } + } + case bson.M: + return g[k] + } + return nil +} + +func And(filter interface{}, key string, f interface{}) interface{} { + if f == nil { + return filter + } + if key != "" && getE(filter, key) != nil { + return filter + } + var g interface{} + if key == "" { + g = f + } else { + g = bson.M{key: f} + } + if getE(filter, "$or") != nil { + return bson.M{ + "$and": []interface{}{filter, g}, + } + } + if and := getE(filter, "$and"); and != nil { + return bson.M{ + "$and": appendI(and, g), + } + } + if key == "" { + return bson.M{ + "$and": []interface{}{filter, g}, + } + } + return appendE(filter, bson.E{Key: key, Value: f}) +} diff --git a/internal/infrastructure/mongo/mongodoc/util_test.go b/internal/infrastructure/mongo/mongodoc/util_test.go new file mode 100644 index 000000000..8bcde2c14 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/util_test.go @@ -0,0 +1,91 @@ +package mongodoc + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "go.mongodb.org/mongo-driver/bson" +) + +func TestConvertDToM(t *testing.T) { + assert.Equal(t, bson.M{"a": "b"}, convertDToM(bson.M{"a": "b"})) + assert.Equal(t, bson.M{"a": "b"}, convertDToM(bson.D{{Key: "a", Value: "b"}})) + assert.Equal(t, []interface{}{bson.M{"a": "b"}}, convertDToM([]bson.D{{{Key: "a", Value: "b"}}})) + assert.Equal(t, []interface{}{bson.M{"a": "b"}}, convertDToM([]bson.M{{"a": "b"}})) + assert.Equal(t, []interface{}{bson.M{"a": "b"}}, convertDToM(bson.A{bson.D{{Key: "a", Value: "b"}}})) + assert.Equal(t, []interface{}{bson.M{"a": "b"}}, convertDToM([]interface{}{bson.D{{Key: "a", Value: "b"}}})) +} + +func TestAppendI(t *testing.T) { + assert.Equal(t, []interface{}{bson.M{"a": "b"}, "x"}, appendI([]bson.M{{"a": "b"}}, "x")) + assert.Equal(t, []interface{}{bson.D{{Key: "a", Value: "b"}}, "x"}, appendI([]bson.D{{{Key: "a", Value: "b"}}}, "x")) + assert.Equal(t, []interface{}{bson.D{{Key: "a", Value: "b"}}, "x"}, appendI(bson.A{bson.D{{Key: "a", Value: "b"}}}, "x")) + assert.Equal(t, []interface{}{bson.D{{Key: "a", Value: "b"}}, "x"}, appendI([]interface{}{bson.D{{Key: "a", Value: "b"}}}, "x")) +} + +func TestAppendE(t *testing.T) { + assert.Equal(t, bson.M{"a": "b", "c": "d"}, appendE(bson.M{"a": "b"}, bson.E{Key: "c", Value: "d"})) + assert.Equal(t, bson.D{{Key: "a", Value: "b"}, {Key: "c", Value: "d"}}, appendE(bson.D{{Key: "a", Value: "b"}}, bson.E{Key: "c", Value: "d"})) + assert.Equal(t, []bson.M{}, appendE([]bson.M{}, bson.E{Key: "c", Value: "d"})) +} + +func TestGetE(t *testing.T) { + assert.Equal(t, "b", getE(bson.M{"a": "b"}, "a")) + assert.Nil(t, getE(bson.M{"a": "b"}, "b")) + assert.Equal(t, "b", getE(bson.D{{Key: "a", Value: "b"}}, "a")) + assert.Nil(t, getE(bson.D{{Key: "a", Value: "b"}}, "b")) + assert.Nil(t, getE(bson.A{}, "b")) +} + +func TestAnd(t *testing.T) { + assert.Equal(t, bson.M{"x": "y"}, And(bson.M{}, "x", "y")) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "x", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.M{"$or": []bson.M{{"a": "b"}}}, + bson.M{"x": "y"}, + }, + }, And(bson.M{"$or": []bson.M{{"a": "b"}}}, "x", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.M{"a": "b"}, + bson.M{"x": "y"}, + }, + }, And(bson.M{"$and": []bson.M{{"a": "b"}}}, "x", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.M{"a": "b"}, + bson.M{"x": "y"}, + }, + }, And(bson.M{"$and": []interface{}{bson.M{"a": "b"}}}, "x", "y")) + + assert.Equal(t, bson.D{{Key: "x", Value: "y"}}, And(bson.D{}, "x", "y")) + assert.Equal(t, bson.D{{Key: "x", Value: "z"}}, And(bson.D{{Key: "x", Value: "z"}}, "x", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.D{{Key: "$or", Value: []bson.M{{"a": "b"}}}}, + bson.M{"x": "y"}, + }, + }, And(bson.D{{Key: "$or", Value: []bson.M{{"a": "b"}}}}, "x", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.M{"a": "b"}, + bson.M{"x": "y"}, + }, + }, And(bson.D{{Key: "$and", Value: []bson.M{{"a": "b"}}}}, "x", "y")) + + assert.Equal(t, bson.M{"$and": []interface{}{bson.M{}, "y"}}, And(bson.M{}, "", "y")) + assert.Equal(t, bson.M{"$and": []interface{}{bson.D{}, "y"}}, And(bson.D{}, "", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.D{{Key: "$or", Value: []bson.M{{"a": "b"}}}}, + "y", + }, + }, And(bson.D{{Key: "$or", Value: []bson.M{{"a": "b"}}}}, "", "y")) + assert.Equal(t, bson.M{ + "$and": []interface{}{ + bson.M{"a": "b"}, + "y", + }, + }, And(bson.D{{Key: "$and", Value: []bson.M{{"a": "b"}}}}, "", "y")) +} diff --git a/internal/infrastructure/mongo/plugin.go b/internal/infrastructure/mongo/plugin.go index 948cb5bcf..9bd95e563 100644 --- a/internal/infrastructure/mongo/plugin.go +++ b/internal/infrastructure/mongo/plugin.go @@ -12,10 +12,12 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/rerror" ) type pluginRepo struct { client *mongodoc.ClientCollection + f repo.SceneFilter } func NewPlugin(client *mongodoc.Client) repo.Plugin { @@ -31,69 +33,45 @@ func (r *pluginRepo) init() { } } -func (r *pluginRepo) FindByID(ctx context.Context, pid id.PluginID, sids []id.SceneID) (*plugin.Plugin, error) { +func (r *pluginRepo) Filtered(f repo.SceneFilter) repo.Plugin { + return &pluginRepo{ + client: r.client, + f: f.Clone(), + } +} + +func (r *pluginRepo) FindByID(ctx context.Context, pid id.PluginID) (*plugin.Plugin, error) { // TODO: separate built-in plugins to another repository if p := builtin.GetPlugin(pid); p != nil { return p, nil } - - pids := pid.String() - filter := bson.M{ - "$or": []bson.M{ - { - "id": pids, - "scene": nil, - }, - { - "id": pids, - "scene": "", - }, - { - "id": pids, - "scene": bson.M{ - "$in": id.SceneIDsToStrings(sids), - }, - }, - }, + if s := pid.Scene(); s != nil && !r.f.CanRead(*s) { + return nil, rerror.ErrNotFound } - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{ + "id": pid.String(), + }) } -func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id.SceneID) ([]*plugin.Plugin, error) { +func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { // TODO: separate built-in plugins to another repository // exclude built-in - b := map[string]*plugin.Plugin{} + b := plugin.Map{} ids2 := make([]id.PluginID, 0, len(ids)) for _, id := range ids { if p := builtin.GetPlugin(id); p != nil { - b[id.String()] = p - } else { + b[id] = p + } else if s := id.Scene(); s == nil || r.f.CanRead(*s) { ids2 = append(ids2, id) } } - res := make([]*plugin.Plugin, 0, len(ids2)) + res := make(plugin.List, 0, len(ids2)) var err error if len(ids2) > 0 { - keys := id.PluginIDsToStrings(ids2) filter := bson.M{ - "$or": []bson.M{ - { - "id": bson.M{"$in": keys}, - "scene": nil, - }, - { - "id": bson.M{"$in": keys}, - "scene": "", - }, - { - "id": bson.M{"$in": keys}, - "scene": bson.M{ - "$in": id.SceneIDsToStrings(sids), - }, - }, - }, + "id": bson.M{"$in": id.PluginIDsToStrings(ids2)}, } dst := make([]*plugin.Plugin, 0, len(ids2)) res, err = r.find(ctx, dst, filter) @@ -102,46 +80,29 @@ func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID, sids []id } } - // combine built-in and mongo results - results := make([]*plugin.Plugin, 0, len(ids)) - for _, id := range ids { - if p, ok := b[id.String()]; ok { - results = append(results, p) - continue - } - found := false - for _, p := range res { - if p != nil && p.ID().Equal(id) { - results = append(results, p) - found = true - break - } - } - if !found { - results = append(results, nil) - } - } - - return filterPlugins(ids, results), nil + return res.Concat(b.List()).MapToIDs(ids), nil } func (r *pluginRepo) Save(ctx context.Context, plugin *plugin.Plugin) error { if plugin.ID().System() { return errors.New("cannnot save system plugin") } + if s := plugin.ID().Scene(); s != nil && !r.f.CanWrite(*s) { + return repo.ErrOperationDenied + } doc, id := mongodoc.NewPlugin(plugin) return r.client.SaveOne(ctx, id, doc) } func (r *pluginRepo) Remove(ctx context.Context, id id.PluginID) error { - return r.client.RemoveOne(ctx, id.String()) + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } func (r *pluginRepo) find(ctx context.Context, dst []*plugin.Plugin, filter interface{}) ([]*plugin.Plugin, error) { c := mongodoc.PluginConsumer{ Rows: dst, } - if err := r.client.Find(ctx, filter, &c); err != nil { + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows, nil @@ -152,32 +113,16 @@ func (r *pluginRepo) findOne(ctx context.Context, filter interface{}) (*plugin.P c := mongodoc.PluginConsumer{ Rows: dst, } - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows[0], nil } -// func (r *pluginRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) ([]*plugin.Plugin, *usecase.PageInfo, error) { -// var c mongodoc.PluginConsumer -// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) -// if err2 != nil { -// return nil, nil, rerror.ErrInternalBy(err2) -// } -// return c.Rows, pageInfo, nil -// } - -func filterPlugins(ids []id.PluginID, rows []*plugin.Plugin) []*plugin.Plugin { - res := make([]*plugin.Plugin, 0, len(ids)) - for _, id := range ids { - var r2 *plugin.Plugin - for _, r := range rows { - if r.ID().Equal(id) { - r2 = r - break - } - } - res = append(res, r2) - } - return res +func (r *pluginRepo) readFilter(filter interface{}) interface{} { + return applyOptionalSceneFilter(filter, r.f.Readable) +} + +func (r *pluginRepo) writeFilter(filter interface{}) interface{} { + return applyOptionalSceneFilter(filter, r.f.Writable) } diff --git a/internal/infrastructure/mongo/project.go b/internal/infrastructure/mongo/project.go index 006cf0a5d..8b10f6994 100644 --- a/internal/infrastructure/mongo/project.go +++ b/internal/infrastructure/mongo/project.go @@ -16,6 +16,7 @@ import ( type projectRepo struct { client *mongodoc.ClientCollection + f repo.TeamFilter } func NewProject(client *mongodoc.Client) repo.Project { @@ -31,12 +32,19 @@ func (r *projectRepo) init() { } } -func (r *projectRepo) FindByIDs(ctx context.Context, ids []id.ProjectID, f []id.TeamID) ([]*project.Project, error) { - filter := r.teamFilter(bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.ProjectIDsToStrings(ids)}, - }}, - }, f) +func (r *projectRepo) Filtered(f repo.TeamFilter) repo.Project { + return &projectRepo{ + client: r.client, + f: f.Clone(), + } +} + +func (r *projectRepo) FindByIDs(ctx context.Context, ids []id.ProjectID) ([]*project.Project, error) { + filter := bson.M{ + "id": bson.M{ + "$in": id.ProjectIDsToStrings(ids), + }, + } dst := make([]*project.Project, 0, len(ids)) res, err := r.find(ctx, dst, filter) if err != nil { @@ -45,79 +53,79 @@ func (r *projectRepo) FindByIDs(ctx context.Context, ids []id.ProjectID, f []id. return filterProjects(ids, res), nil } -func (r *projectRepo) FindByID(ctx context.Context, id id.ProjectID, f []id.TeamID) (*project.Project, error) { - filter := r.teamFilter(bson.D{ - {Key: "id", Value: id.String()}, - }, f) - return r.findOne(ctx, filter) +func (r *projectRepo) FindByID(ctx context.Context, id id.ProjectID) (*project.Project, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) } func (r *projectRepo) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { - filter := bson.D{ - {Key: "team", Value: id.String()}, + if !r.f.CanRead(id) { + return nil, usecase.EmptyPageInfo(), nil } - return r.paginate(ctx, filter, pagination) + return r.paginate(ctx, bson.M{ + "team": id.String(), + }, pagination) } func (r *projectRepo) FindByPublicName(ctx context.Context, name string) (*project.Project, error) { - var filter bson.D - if name == "" { - return nil, nil - } - - filter = bson.D{ - {Key: "$or", Value: []bson.D{ - {{Key: "alias", Value: name}, {Key: "publishmentstatus", Value: "limited"}}, - {{Key: "domains.domain", Value: name}, {Key: "publishmentstatus", Value: "public"}}, - {{Key: "alias", Value: name}, {Key: "publishmentstatus", Value: "public"}}, - }}, + return nil, rerror.ErrNotFound } - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{ + "$or": []bson.M{ + {"alias": name, "publishmentstatus": "limited"}, + {"domains.domain": name, "publishmentstatus": "public"}, + {"alias": name, "publishmentstatus": "public"}, + }, + }) } func (r *projectRepo) CountByTeam(ctx context.Context, team id.TeamID) (int, error) { - count, err := r.client.Count(ctx, bson.D{ - {Key: "team", Value: team.String()}, + count, err := r.client.Count(ctx, bson.M{ + "team": team.String(), }) return int(count), err } func (r *projectRepo) Save(ctx context.Context, project *project.Project) error { + if !r.f.CanWrite(project.Team()) { + return repo.ErrOperationDenied + } doc, id := mongodoc.NewProject(project) return r.client.SaveOne(ctx, id, doc) } func (r *projectRepo) Remove(ctx context.Context, id id.ProjectID) error { - return r.client.RemoveOne(ctx, id.String()) + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } -func (r *projectRepo) find(ctx context.Context, dst []*project.Project, filter bson.D) ([]*project.Project, error) { +func (r *projectRepo) find(ctx context.Context, dst []*project.Project, filter interface{}) ([]*project.Project, error) { c := mongodoc.ProjectConsumer{ Rows: dst, } - if err := r.client.Find(ctx, filter, &c); err != nil { + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows, nil } -func (r *projectRepo) findOne(ctx context.Context, filter bson.D) (*project.Project, error) { +func (r *projectRepo) findOne(ctx context.Context, filter interface{}) (*project.Project, error) { dst := make([]*project.Project, 0, 1) c := mongodoc.ProjectConsumer{ Rows: dst, } - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows[0], nil } -func (r *projectRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { +func (r *projectRepo) paginate(ctx context.Context, filter bson.M, pagination *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { var c mongodoc.ProjectConsumer - pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) - if err2 != nil { - return nil, nil, rerror.ErrInternalBy(err2) + pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), pagination, &c) + if err != nil { + return nil, nil, rerror.ErrInternalBy(err) } return c.Rows, pageInfo, nil } @@ -137,13 +145,10 @@ func filterProjects(ids []id.ProjectID, rows []*project.Project) []*project.Proj return res } -func (*projectRepo) teamFilter(filter bson.D, teams []id.TeamID) bson.D { - if teams == nil { - return filter - } - filter = append(filter, bson.E{ - Key: "team", - Value: bson.D{{Key: "$in", Value: id.TeamIDsToStrings(teams)}}, - }) - return filter +func (r *projectRepo) readFilter(filter interface{}) interface{} { + return applyTeamFilter(filter, r.f.Readable) +} + +func (r *projectRepo) writeFilter(filter interface{}) interface{} { + return applyTeamFilter(filter, r.f.Writable) } diff --git a/internal/infrastructure/mongo/property.go b/internal/infrastructure/mongo/property.go index 20cf68f3a..a0f3de5a8 100644 --- a/internal/infrastructure/mongo/property.go +++ b/internal/infrastructure/mongo/property.go @@ -14,6 +14,7 @@ import ( type propertyRepo struct { client *mongodoc.ClientCollection + f repo.SceneFilter } func NewProperty(client *mongodoc.Client) repo.Property { @@ -29,15 +30,25 @@ func (r *propertyRepo) init() { } } -func (r *propertyRepo) FindByID(ctx context.Context, id2 id.PropertyID, f []id.SceneID) (*property.Property, error) { - filter := r.sceneFilter(bson.D{{Key: "id", Value: id.ID(id2).String()}}, f) - return r.findOne(ctx, filter) +func (r *propertyRepo) Filtered(f repo.SceneFilter) repo.Property { + return &propertyRepo{ + client: r.client, + f: f.Clone(), + } +} + +func (r *propertyRepo) FindByID(ctx context.Context, id id.PropertyID) (*property.Property, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) } -func (r *propertyRepo) FindByIDs(ctx context.Context, ids []id.PropertyID, f []id.SceneID) (property.List, error) { - filter := r.sceneFilter(bson.D{{Key: "id", Value: bson.D{{ - Key: "$in", Value: id.PropertyIDsToStrings(ids), - }}}}, f) +func (r *propertyRepo) FindByIDs(ctx context.Context, ids []id.PropertyID) (property.List, error) { + filter := bson.M{ + "id": bson.M{ + "$in": id.PropertyIDsToStrings(ids), + }, + } dst := make(property.List, 0, len(ids)) res, err := r.find(ctx, dst, filter) if err != nil { @@ -47,39 +58,35 @@ func (r *propertyRepo) FindByIDs(ctx context.Context, ids []id.PropertyID, f []i } func (r *propertyRepo) FindLinkedAll(ctx context.Context, id id.SceneID) (property.List, error) { - filter := bson.D{ - {Key: "scene", Value: id.String()}, - {Key: "fields", Value: bson.D{ - {Key: "$elemMatch", Value: bson.D{ - {Key: "links", Value: bson.D{{ - Key: "$not", Value: bson.D{{ - Key: "$size", Value: 0, - }}}, - }}, - }}, - }}, - } - return r.find(ctx, nil, filter) + return r.find(ctx, nil, bson.M{ + "scene": id.String(), + "fields": bson.M{ + "$elemMatch": bson.M{ + "links": bson.M{ + "$not": bson.M{ + "$size": 0, + }, + }, + }, + }, + }) } func (r *propertyRepo) FindByDataset(ctx context.Context, sid id.DatasetSchemaID, did id.DatasetID) (property.List, error) { - sids := sid.String() - pids := did.String() - filter := bson.D{ - {Key: "$or", Value: []bson.D{ - {{Key: "fields.links.dataset", Value: pids}}, // for compatibility - {{Key: "items.fields.links.dataset", Value: pids}}, - {{Key: "items.groups.fields.links.dataset", Value: pids}}, - {{Key: "fields.links.schema", Value: sids}}, // for compatibility - {{Key: "items.fields.links.schema", Value: sids}}, - {{Key: "items.groups.fields.links.schema", Value: sids}}, - }}, - } - return r.find(ctx, nil, filter) + return r.find(ctx, nil, bson.M{ + "$or": []bson.M{ + {"fields.links.dataset": did.String()}, // for compatibility + {"items.fields.links.dataset": did.String()}, + {"items.groups.fields.links.dataset": did.String()}, + {"fields.links.schema": sid.String()}, // for compatibility + {"items.fields.links.schema": sid.String()}, + {"items.groups.fields.links.schema": sid.String()}, + }, + }) } func (r *propertyRepo) FindBySchema(ctx context.Context, psids []id.PropertySchemaID, sid id.SceneID) (property.List, error) { - if len(psids) == 0 { + if len(psids) == 0 || !r.f.CanRead(sid) { return nil, nil } @@ -96,6 +103,12 @@ func (r *propertyRepo) FindBySchema(ctx context.Context, psids []id.PropertySche } func (r *propertyRepo) FindByPlugin(ctx context.Context, pid id.PluginID, sid id.SceneID) (property.List, error) { + if !r.f.CanRead(sid) { + return nil, rerror.ErrNotFound + } + if s := pid.Scene(); s != nil && !r.f.CanRead(*s) { + return nil, rerror.ErrNotFound + } filter := bson.M{ "schemaplugin": pid.String(), "scene": sid.String(), @@ -104,6 +117,9 @@ func (r *propertyRepo) FindByPlugin(ctx context.Context, pid id.PluginID, sid id } func (r *propertyRepo) Save(ctx context.Context, property *property.Property) error { + if !r.f.CanWrite(property.Scene()) { + return repo.ErrOperationDenied + } doc, id := mongodoc.NewProperty(property) return r.client.SaveOne(ctx, id, doc) } @@ -112,11 +128,14 @@ func (r *propertyRepo) SaveAll(ctx context.Context, properties property.List) er if len(properties) == 0 { return nil } - docs, ids := mongodoc.NewProperties(properties) + docs, ids := mongodoc.NewProperties(properties, r.f.Writable) return r.client.SaveAll(ctx, ids, docs) } func (r *propertyRepo) UpdateSchemaPlugin(ctx context.Context, old, new id.PluginID, s id.SceneID) error { + if !r.f.CanWrite(s) { + return nil + } return r.client.UpdateMany(ctx, bson.M{ "schemaplugin": old.String(), "scene": s.String(), @@ -126,17 +145,22 @@ func (r *propertyRepo) UpdateSchemaPlugin(ctx context.Context, old, new id.Plugi } func (r *propertyRepo) Remove(ctx context.Context, id id.PropertyID) error { - return r.client.RemoveOne(ctx, id.String()) + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } func (r *propertyRepo) RemoveAll(ctx context.Context, ids []id.PropertyID) error { if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.PropertyIDsToStrings(ids)) + return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ + "id": bson.M{"$in": id.PropertyIDsToStrings(ids)}, + })) } func (r *propertyRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { + if !r.f.CanWrite(sceneID) { + return nil + } filter := bson.D{ {Key: "scene", Value: sceneID.String()}, } @@ -151,32 +175,23 @@ func (r *propertyRepo) find(ctx context.Context, dst property.List, filter inter c := mongodoc.PropertyConsumer{ Rows: dst, } - if err := r.client.Find(ctx, filter, &c); err != nil { + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows, nil } -func (r *propertyRepo) findOne(ctx context.Context, filter bson.D) (*property.Property, error) { +func (r *propertyRepo) findOne(ctx context.Context, filter interface{}) (*property.Property, error) { dst := make(property.List, 0, 1) c := mongodoc.PropertyConsumer{ Rows: dst, } - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows[0], nil } -// func (r *propertyRepo) paginate(ctx context.Context, filter bson.D, pagination *usecase.Pagination) (property.List, *usecase.PageInfo, error) { -// var c propertyConsumer -// pageInfo, err2 := r.client.Paginate(ctx, filter, pagination, &c) -// if err2 != nil { -// return nil, nil, rerror.ErrInternalBy(err2) -// } -// return c.rows, pageInfo, nil -// } - func filterProperties(ids []id.PropertyID, rows property.List) property.List { res := make(property.List, 0, len(ids)) for _, id := range ids { @@ -192,13 +207,10 @@ func filterProperties(ids []id.PropertyID, rows property.List) property.List { return res } -func (*propertyRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { - if scenes == nil { - return filter - } - filter = append(filter, bson.E{ - Key: "scene", - Value: bson.D{{Key: "$in", Value: id.SceneIDsToStrings(scenes)}}, - }) - return filter +func (r *propertyRepo) readFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Readable) +} + +func (r *propertyRepo) writeFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Writable) } diff --git a/internal/infrastructure/mongo/property_schema.go b/internal/infrastructure/mongo/property_schema.go index e71e15eec..f06933f59 100644 --- a/internal/infrastructure/mongo/property_schema.go +++ b/internal/infrastructure/mongo/property_schema.go @@ -15,6 +15,7 @@ import ( type propertySchemaRepo struct { client *mongodoc.ClientCollection + f repo.SceneFilter } func NewPropertySchema(client *mongodoc.Client) repo.PropertySchema { @@ -30,6 +31,13 @@ func (r *propertySchemaRepo) init() { } } +func (r *propertySchemaRepo) Filtered(f repo.SceneFilter) repo.PropertySchema { + return &propertySchemaRepo{ + client: r.client, + f: f.Clone(), + } +} + func (r *propertySchemaRepo) FindByID(ctx context.Context, id id.PropertySchemaID) (*property.Schema, error) { if ps := builtin.GetPropertySchema(id); ps != nil { return ps, nil @@ -41,12 +49,12 @@ func (r *propertySchemaRepo) FindByID(ctx context.Context, id id.PropertySchemaI func (r *propertySchemaRepo) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { // exclude built-in - b := map[string]*property.Schema{} + b := property.SchemaMap{} ids2 := make([]id.PropertySchemaID, 0, len(ids)) for _, id := range ids { if p := builtin.GetPropertySchema(id); p != nil { - b[id.String()] = p - } else { + b[id] = p + } else if s := id.Plugin().Scene(); s == nil || r.f.CanRead(*s) { ids2 = append(ids2, id) } } @@ -65,81 +73,76 @@ func (r *propertySchemaRepo) FindByIDs(ctx context.Context, ids []id.PropertySch } } - // combine built-in and mongo results - results := make(property.SchemaList, 0, len(ids)) - for _, id := range ids { - if p, ok := b[id.String()]; ok { - results = append(results, p) - continue - } - found := false - for _, p := range res { - if p != nil && p.ID().Equal(id) { - results = append(results, p) - found = true - break - } - } - if !found { - results = append(results, nil) - } - } - - return results, nil + return res.Concat(b.List()).MapToIDs(ids), nil } func (r *propertySchemaRepo) Save(ctx context.Context, m *property.Schema) error { if m.ID().Plugin().System() { return errors.New("cannnot save system property schema") } + if s := m.Scene(); s != nil && !r.f.CanWrite(*s) { + return repo.ErrOperationDenied + } doc, id := mongodoc.NewPropertySchema(m) return r.client.SaveOne(ctx, id, doc) } func (r *propertySchemaRepo) SaveAll(ctx context.Context, m property.SchemaList) error { + savable := make(property.SchemaList, 0, len(m)) for _, ps := range m { if ps.ID().Plugin().System() { - return errors.New("cannnot save system property schema") + continue } + savable = append(savable, ps) } if len(m) == 0 { return nil } - docs, ids := mongodoc.NewPropertySchemas(m) + docs, ids := mongodoc.NewPropertySchemas(savable, r.f.Writable) return r.client.SaveAll(ctx, ids, docs) } func (r *propertySchemaRepo) Remove(ctx context.Context, id id.PropertySchemaID) error { - return r.client.RemoveOne(ctx, id.String()) + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } func (r *propertySchemaRepo) RemoveAll(ctx context.Context, ids []id.PropertySchemaID) error { if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.PropertySchemaIDsToStrings(ids)) + return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ + "id": bson.M{"$in": id.PropertySchemaIDsToStrings(ids)}, + })) } -func (r *propertySchemaRepo) find(ctx context.Context, dst property.SchemaList, filter bson.D) (property.SchemaList, error) { +func (r *propertySchemaRepo) find(ctx context.Context, dst property.SchemaList, filter interface{}) (property.SchemaList, error) { c := mongodoc.PropertySchemaConsumer{ Rows: dst, } - if err := r.client.Find(ctx, filter, &c); err != nil { + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows, nil } -func (r *propertySchemaRepo) findOne(ctx context.Context, filter bson.D) (*property.Schema, error) { +func (r *propertySchemaRepo) findOne(ctx context.Context, filter interface{}) (*property.Schema, error) { dst := make(property.SchemaList, 0, 1) c := mongodoc.PropertySchemaConsumer{ Rows: dst, } - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows[0], nil } + +func (r *propertySchemaRepo) readFilter(filter interface{}) interface{} { + return applyOptionalSceneFilter(filter, r.f.Readable) +} + +func (r *propertySchemaRepo) writeFilter(filter interface{}) interface{} { + return applyOptionalSceneFilter(filter, r.f.Writable) +} diff --git a/internal/infrastructure/mongo/scene.go b/internal/infrastructure/mongo/scene.go index ee38b94d9..140ed42e2 100644 --- a/internal/infrastructure/mongo/scene.go +++ b/internal/infrastructure/mongo/scene.go @@ -8,12 +8,14 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/user" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" ) type sceneRepo struct { client *mongodoc.ClientCollection + f repo.TeamFilter } func NewScene(client *mongodoc.Client) repo.Scene { @@ -29,19 +31,25 @@ func (r *sceneRepo) init() { } } -func (r *sceneRepo) FindByID(ctx context.Context, id id.SceneID, f []id.TeamID) (*scene.Scene, error) { - filter := r.teamFilter(bson.M{ +func (r *sceneRepo) Filtered(f repo.TeamFilter) repo.Scene { + return &sceneRepo{ + client: r.client, + f: f.Clone(), + } +} + +func (r *sceneRepo) FindByID(ctx context.Context, id id.SceneID) (*scene.Scene, error) { + return r.findOne(ctx, bson.M{ "id": id.String(), - }, f) - return r.findOne(ctx, filter) + }) } -func (r *sceneRepo) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.TeamID) (scene.List, error) { - filter := r.teamFilter(bson.M{ +func (r *sceneRepo) FindByIDs(ctx context.Context, ids []id.SceneID) (scene.List, error) { + filter := bson.M{ "id": bson.M{ "$in": id.SceneIDsToStrings(ids), }, - }, f) + } dst := make(scene.List, 0, len(ids)) res, err := r.find(ctx, dst, filter) if err != nil { @@ -50,39 +58,43 @@ func (r *sceneRepo) FindByIDs(ctx context.Context, ids []id.SceneID, f []id.Team return filterScenes(ids, res), nil } -func (r *sceneRepo) FindByProject(ctx context.Context, id id.ProjectID, f []id.TeamID) (*scene.Scene, error) { - filter := r.teamFilter(bson.M{ +func (r *sceneRepo) FindByProject(ctx context.Context, id id.ProjectID) (*scene.Scene, error) { + filter := bson.M{ "project": id.String(), - }, f) + } return r.findOne(ctx, filter) } func (r *sceneRepo) FindByTeam(ctx context.Context, teams ...id.TeamID) (scene.List, error) { + if r.f.Readable != nil { + teams = user.TeamIDList(teams).Filter(r.f.Readable...) + } res, err := r.find(ctx, nil, bson.M{ - "team": bson.M{"$in": id.TeamIDsToStrings(teams)}, + "team": bson.M{"$in": user.TeamIDList(teams).Strings()}, }) - if err != nil { - if err != mongo.ErrNilDocument && err != mongo.ErrNoDocuments { - return nil, err - } + if err != nil && err != mongo.ErrNilDocument && err != mongo.ErrNoDocuments { + return nil, err } return res, nil } func (r *sceneRepo) Save(ctx context.Context, scene *scene.Scene) error { + if !r.f.CanWrite(scene.Team()) { + return repo.ErrOperationDenied + } doc, id := mongodoc.NewScene(scene) return r.client.SaveOne(ctx, id, doc) } func (r *sceneRepo) Remove(ctx context.Context, id id.SceneID) error { - return r.client.RemoveOne(ctx, id.String()) + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } func (r *sceneRepo) find(ctx context.Context, dst []*scene.Scene, filter interface{}) ([]*scene.Scene, error) { c := mongodoc.SceneConsumer{ Rows: dst, } - if err := r.client.Find(ctx, filter, &c); err != nil { + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows, nil @@ -93,7 +105,7 @@ func (r *sceneRepo) findOne(ctx context.Context, filter interface{}) (*scene.Sce c := mongodoc.SceneConsumer{ Rows: dst, } - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows[0], nil @@ -103,10 +115,10 @@ func filterScenes(ids []id.SceneID, rows scene.List) scene.List { return rows.FilterByID(ids...) } -func (*sceneRepo) teamFilter(filter bson.M, teams []id.TeamID) bson.M { - if teams == nil { - return filter - } - filter["team"] = bson.D{{Key: "$in", Value: id.TeamIDsToStrings(teams)}} - return filter +func (r *sceneRepo) readFilter(filter interface{}) interface{} { + return applyTeamFilter(filter, r.f.Readable) +} + +func (r *sceneRepo) writeFilter(filter interface{}) interface{} { + return applyTeamFilter(filter, r.f.Writable) } diff --git a/internal/infrastructure/mongo/tag.go b/internal/infrastructure/mongo/tag.go index ab82b26c5..9fb6914b3 100644 --- a/internal/infrastructure/mongo/tag.go +++ b/internal/infrastructure/mongo/tag.go @@ -15,6 +15,7 @@ import ( type tagRepo struct { client *mongodoc.ClientCollection + f repo.SceneFilter } func NewTag(client *mongodoc.Client) repo.Tag { @@ -30,19 +31,25 @@ func (r *tagRepo) init() { } } -func (r *tagRepo) FindByID(ctx context.Context, id id.TagID, f []id.SceneID) (tag.Tag, error) { - filter := r.sceneFilter(bson.D{ - {Key: "id", Value: id.String()}, - }, f) - return r.findOne(ctx, filter) +func (r *tagRepo) Filtered(f repo.SceneFilter) repo.Tag { + return &tagRepo{ + client: r.client, + f: f.Clone(), + } +} + +func (r *tagRepo) FindByID(ctx context.Context, id id.TagID) (tag.Tag, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) } -func (r *tagRepo) FindByIDs(ctx context.Context, ids []id.TagID, f []id.SceneID) ([]*tag.Tag, error) { - filter := r.sceneFilter(bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.TagIDsToStrings(ids)}, - }}, - }, f) +func (r *tagRepo) FindByIDs(ctx context.Context, ids []id.TagID) ([]*tag.Tag, error) { + filter := bson.M{ + "id": bson.M{ + "$in": id.TagIDsToStrings(ids), + }, + } dst := make([]*tag.Tag, 0, len(ids)) res, err := r.find(ctx, dst, filter) if err != nil { @@ -52,25 +59,28 @@ func (r *tagRepo) FindByIDs(ctx context.Context, ids []id.TagID, f []id.SceneID) } func (r *tagRepo) FindByScene(ctx context.Context, id id.SceneID) ([]*tag.Tag, error) { + if !r.f.CanRead(id) { + return nil, nil + } filter := bson.M{ "scene": id.String(), } return r.find(ctx, nil, filter) } -func (r *tagRepo) FindItemByID(ctx context.Context, id id.TagID, f []id.SceneID) (*tag.Item, error) { - filter := r.sceneFilter(bson.D{ - {Key: "id", Value: id.String()}, - }, f) +func (r *tagRepo) FindItemByID(ctx context.Context, id id.TagID) (*tag.Item, error) { + filter := bson.M{ + "id": id.String(), + } return r.findItemOne(ctx, filter) } -func (r *tagRepo) FindItemByIDs(ctx context.Context, ids []id.TagID, f []id.SceneID) ([]*tag.Item, error) { - filter := r.sceneFilter(bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.TagIDsToStrings(ids)}, - }}, - }, f) +func (r *tagRepo) FindItemByIDs(ctx context.Context, ids []id.TagID) ([]*tag.Item, error) { + filter := bson.M{ + "id": bson.M{ + "$in": id.TagIDsToStrings(ids), + }, + } dst := make([]*tag.Item, 0, len(ids)) res, err := r.findItems(ctx, dst, filter) if err != nil { @@ -79,19 +89,19 @@ func (r *tagRepo) FindItemByIDs(ctx context.Context, ids []id.TagID, f []id.Scen return filterTagItems(ids, res), nil } -func (r *tagRepo) FindGroupByID(ctx context.Context, id id.TagID, f []id.SceneID) (*tag.Group, error) { - filter := r.sceneFilter(bson.D{ - {Key: "id", Value: id.String()}, - }, f) +func (r *tagRepo) FindGroupByID(ctx context.Context, id id.TagID) (*tag.Group, error) { + filter := bson.M{ + "id": id.String(), + } return r.findGroupOne(ctx, filter) } -func (r *tagRepo) FindGroupByIDs(ctx context.Context, ids []id.TagID, f []id.SceneID) ([]*tag.Group, error) { - filter := r.sceneFilter(bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.TagIDsToStrings(ids)}, - }}, - }, f) +func (r *tagRepo) FindGroupByIDs(ctx context.Context, ids []id.TagID) ([]*tag.Group, error) { + filter := bson.M{ + "id": bson.M{ + "$in": id.TagIDsToStrings(ids), + }, + } dst := make([]*tag.Group, 0, len(ids)) res, err := r.findGroups(ctx, dst, filter) if err != nil { @@ -101,25 +111,22 @@ func (r *tagRepo) FindGroupByIDs(ctx context.Context, ids []id.TagID, f []id.Sce } func (r *tagRepo) FindRootsByScene(ctx context.Context, id id.SceneID) ([]*tag.Tag, error) { - filter := bson.M{ + return r.find(ctx, nil, bson.M{ "scene": id.String(), "item.parent": nil, - } - return r.find(ctx, nil, filter) + }) } -func (r *tagRepo) FindGroupByItem(ctx context.Context, tagID id.TagID, f []id.SceneID) (*tag.Group, error) { - ids := []id.TagID{tagID} - filter := r.sceneFilter(bson.D{ - {Key: "group.tags", Value: bson.D{ - {Key: "$in", Value: id.TagIDsToStrings(ids)}, - }}, - }, f) - - return r.findGroupOne(ctx, filter) +func (r *tagRepo) FindGroupByItem(ctx context.Context, tagID id.TagID) (*tag.Group, error) { + return r.findGroupOne(ctx, bson.M{ + "group.tags": tagID.String(), + }) } func (r *tagRepo) Save(ctx context.Context, tag tag.Tag) error { + if !r.f.CanWrite(tag.Scene()) { + return repo.ErrOperationDenied + } doc, tid := mongodoc.NewTag(tag) return r.client.SaveOne(ctx, tid, doc) } @@ -128,19 +135,21 @@ func (r *tagRepo) SaveAll(ctx context.Context, tags []*tag.Tag) error { if tags == nil { return nil } - docs, ids := mongodoc.NewTags(tags) + docs, ids := mongodoc.NewTags(tags, r.f.Writable) return r.client.SaveAll(ctx, ids, docs) } func (r *tagRepo) Remove(ctx context.Context, id id.TagID) error { - return r.client.RemoveOne(ctx, id.String()) + return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } func (r *tagRepo) RemoveAll(ctx context.Context, ids []id.TagID) error { if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.TagIDsToStrings(ids)) + return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ + "id": bson.M{"$in": id.TagIDsToStrings(ids)}, + })) } func (r *tagRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { @@ -158,7 +167,7 @@ func (r *tagRepo) find(ctx context.Context, dst []*tag.Tag, filter interface{}) c := mongodoc.TagConsumer{ Rows: dst, } - if err := r.client.Find(ctx, filter, &c); err != nil { + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.Rows, nil @@ -166,7 +175,7 @@ func (r *tagRepo) find(ctx context.Context, dst []*tag.Tag, filter interface{}) func (r *tagRepo) findOne(ctx context.Context, filter interface{}) (tag.Tag, error) { c := mongodoc.TagConsumer{} - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } if len(c.Rows) == 0 { @@ -175,9 +184,9 @@ func (r *tagRepo) findOne(ctx context.Context, filter interface{}) (tag.Tag, err return *c.Rows[0], nil } -func (r *tagRepo) findItemOne(ctx context.Context, filter bson.D) (*tag.Item, error) { +func (r *tagRepo) findItemOne(ctx context.Context, filter interface{}) (*tag.Item, error) { c := mongodoc.TagConsumer{} - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } if len(c.ItemRows) == 0 { @@ -186,9 +195,9 @@ func (r *tagRepo) findItemOne(ctx context.Context, filter bson.D) (*tag.Item, er return c.ItemRows[0], nil } -func (r *tagRepo) findGroupOne(ctx context.Context, filter bson.D) (*tag.Group, error) { +func (r *tagRepo) findGroupOne(ctx context.Context, filter interface{}) (*tag.Group, error) { c := mongodoc.TagConsumer{} - if err := r.client.FindOne(ctx, filter, &c); err != nil { + if err := r.client.FindOne(ctx, r.readFilter(filter), &c); err != nil { return nil, err } if len(c.GroupRows) == 0 { @@ -197,27 +206,27 @@ func (r *tagRepo) findGroupOne(ctx context.Context, filter bson.D) (*tag.Group, return c.GroupRows[0], nil } -func (r *tagRepo) findItems(ctx context.Context, dst []*tag.Item, filter bson.D) ([]*tag.Item, error) { +func (r *tagRepo) findItems(ctx context.Context, dst []*tag.Item, filter interface{}) ([]*tag.Item, error) { c := mongodoc.TagConsumer{ ItemRows: dst, } if c.ItemRows != nil { c.Rows = make([]*tag.Tag, 0, len(c.ItemRows)) } - if err := r.client.Find(ctx, filter, &c); err != nil { + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.ItemRows, nil } -func (r *tagRepo) findGroups(ctx context.Context, dst []*tag.Group, filter bson.D) ([]*tag.Group, error) { +func (r *tagRepo) findGroups(ctx context.Context, dst []*tag.Group, filter interface{}) ([]*tag.Group, error) { c := mongodoc.TagConsumer{ GroupRows: dst, } if c.GroupRows != nil { c.Rows = make([]*tag.Tag, 0, len(c.GroupRows)) } - if err := r.client.Find(ctx, filter, &c); err != nil { + if err := r.client.Find(ctx, r.readFilter(filter), &c); err != nil { return nil, err } return c.GroupRows, nil @@ -271,13 +280,10 @@ func filterTagGroups(ids []id.TagID, rows []*tag.Group) []*tag.Group { return res } -func (*tagRepo) sceneFilter(filter bson.D, scenes []id.SceneID) bson.D { - if scenes == nil { - return filter - } - filter = append(filter, bson.E{ - Key: "scene", - Value: bson.D{{Key: "$in", Value: id.SceneIDsToStrings(scenes)}}, - }) - return filter +func (r *tagRepo) readFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Readable) +} + +func (r *tagRepo) writeFilter(filter interface{}) interface{} { + return applySceneFilter(filter, r.f.Writable) } diff --git a/internal/infrastructure/mongo/team.go b/internal/infrastructure/mongo/team.go index 11691abe3..d6ec04f56 100644 --- a/internal/infrastructure/mongo/team.go +++ b/internal/infrastructure/mongo/team.go @@ -77,14 +77,16 @@ func (r *teamRepo) SaveAll(ctx context.Context, teams []*user.Team) error { } func (r *teamRepo) Remove(ctx context.Context, id id.TeamID) error { - return r.client.RemoveOne(ctx, id.String()) + return r.client.RemoveOne(ctx, bson.M{"id": id.String()}) } func (r *teamRepo) RemoveAll(ctx context.Context, ids []id.TeamID) error { if len(ids) == 0 { return nil } - return r.client.RemoveAll(ctx, id.TeamIDsToStrings(ids)) + return r.client.RemoveAll(ctx, bson.M{ + "id": bson.M{"$in": id.TeamIDsToStrings(ids)}, + }) } func (r *teamRepo) find(ctx context.Context, dst []*user.Team, filter bson.D) (user.TeamList, error) { diff --git a/internal/usecase/interactor/asset.go b/internal/usecase/interactor/asset.go index 34f91db8b..ce99847f9 100644 --- a/internal/usecase/interactor/asset.go +++ b/internal/usecase/interactor/asset.go @@ -15,23 +15,19 @@ import ( type Asset struct { common - assetRepo repo.Asset - teamRepo repo.Team - transaction repo.Transaction - file gateway.File + repos *repo.Container + gateways *gateway.Container } -func NewAsset(r *repo.Container, gr *gateway.Container) interfaces.Asset { +func NewAsset(r *repo.Container, g *gateway.Container) interfaces.Asset { return &Asset{ - assetRepo: r.Asset, - teamRepo: r.Team, - transaction: r.Transaction, - file: gr.File, + repos: r, + gateways: g, } } func (i *Asset) Fetch(ctx context.Context, assets []id.AssetID, operator *usecase.Operator) ([]*asset.Asset, error) { - return i.assetRepo.FindByIDs(ctx, assets, operator.AllReadableTeams()) + return i.repos.Asset.FindByIDs(ctx, assets) } func (i *Asset) FindByTeam(ctx context.Context, tid id.TeamID, p *usecase.Pagination, operator *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) { @@ -39,7 +35,7 @@ func (i *Asset) FindByTeam(ctx context.Context, tid id.TeamID, p *usecase.Pagina return nil, nil, err } - return i.assetRepo.FindByTeam(ctx, tid, p) + return i.repos.Asset.FindByTeam(ctx, tid, p) } func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, operator *usecase.Operator) (result *asset.Asset, err error) { @@ -51,7 +47,7 @@ func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, ope return nil, interfaces.ErrFileNotIncluded } - tx, err := i.transaction.Begin() + tx, err := i.repos.Transaction.Begin() if err != nil { return } @@ -61,7 +57,7 @@ func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, ope } }() - url, err := i.file.UploadAsset(ctx, inp.File) + url, err := i.gateways.File.UploadAsset(ctx, inp.File) if err != nil { return nil, err } @@ -77,7 +73,7 @@ func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, ope return nil, err } - if err = i.assetRepo.Save(ctx, result); err != nil { + if err = i.repos.Asset.Save(ctx, result); err != nil { return } @@ -86,12 +82,12 @@ func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, ope } func (i *Asset) Remove(ctx context.Context, aid id.AssetID, operator *usecase.Operator) (result id.AssetID, err error) { - asset, err := i.assetRepo.FindByID(ctx, aid, operator.AllWritableTeams()) + asset, err := i.repos.Asset.FindByID(ctx, aid) if err != nil { return aid, err } - tx, err := i.transaction.Begin() + tx, err := i.repos.Transaction.Begin() if err != nil { return } @@ -101,7 +97,7 @@ func (i *Asset) Remove(ctx context.Context, aid id.AssetID, operator *usecase.Op } }() - team, err := i.teamRepo.FindByID(ctx, asset.Team()) + team, err := i.repos.Team.FindByID(ctx, asset.Team()) if err != nil { return aid, err } @@ -111,12 +107,12 @@ func (i *Asset) Remove(ctx context.Context, aid id.AssetID, operator *usecase.Op } if url, _ := url.Parse(asset.URL()); url != nil { - if err = i.file.RemoveAsset(ctx, url); err != nil { + if err = i.gateways.File.RemoveAsset(ctx, url); err != nil { return aid, err } } - if err = i.assetRepo.Remove(ctx, aid); err != nil { + if err = i.repos.Asset.Remove(ctx, aid); err != nil { return } diff --git a/internal/usecase/interactor/common.go b/internal/usecase/interactor/common.go index dcd3039ec..15b134497 100644 --- a/internal/usecase/interactor/common.go +++ b/internal/usecase/interactor/common.go @@ -55,16 +55,6 @@ func (common) OnlyOperator(op *usecase.Operator) error { return nil } -func (i common) IsMe(u id.UserID, op *usecase.Operator) error { - if err := i.OnlyOperator(op); err != nil { - return err - } - if op.User != u { - return interfaces.ErrOperationDenied - } - return nil -} - func (i common) CanReadTeam(t id.TeamID, op *usecase.Operator) error { if err := i.OnlyOperator(op); err != nil { return err @@ -105,20 +95,6 @@ func (i common) CanWriteScene(t id.SceneID, op *usecase.Operator) error { return nil } -func (i common) OnlyReadableScenes(op *usecase.Operator) ([]id.SceneID, error) { - if err := i.OnlyOperator(op); err != nil { - return nil, err - } - return op.AllReadableScenes(), nil -} - -func (i common) OnlyWritableScenes(op *usecase.Operator) ([]id.SceneID, error) { - if err := i.OnlyOperator(op); err != nil { - return nil, err - } - return op.AllWritableScenes(), nil -} - type commonSceneLock struct { sceneLockRepo repo.SceneLock } @@ -227,7 +203,7 @@ func (d ProjectDeleter) Delete(ctx context.Context, prj *project.Project, force } // Fetch scene - s, err := d.Scene.FindByProject(ctx, prj.ID(), operator.AllWritableTeams()) + s, err := d.Scene.FindByProject(ctx, prj.ID()) if err != nil && !errors.Is(err, rerror.ErrNotFound) { return err } diff --git a/internal/usecase/interactor/dataset.go b/internal/usecase/interactor/dataset.go index 27e122b29..cb5c3106e 100644 --- a/internal/usecase/interactor/dataset.go +++ b/internal/usecase/interactor/dataset.go @@ -63,11 +63,7 @@ func (i *Dataset) DynamicSchemaFields() []*dataset.SchemaField { } func (i *Dataset) UpdateDatasetSchema(ctx context.Context, inp interfaces.UpdateDatasetSchemaParam, operator *usecase.Operator) (_ *dataset.Schema, err error) { - scenes, err := i.OnlyWritableScenes(operator) - if err != nil { - return nil, err - } - schema, err := i.datasetSchemaRepo.FindByID(ctx, inp.SchemaId, scenes) + schema, err := i.datasetSchemaRepo.FindByID(ctx, inp.SchemaId) if err != nil { return nil, err } @@ -223,7 +219,6 @@ func (i *Dataset) importDataset(ctx context.Context, content io.Reader, name str } }() - scenes := []id.SceneID{sceneId} csv := dataset.NewCSVParser(content, name, separator) err = csv.Init() if err != nil { @@ -232,7 +227,7 @@ func (i *Dataset) importDataset(ctx context.Context, content io.Reader, name str // replacment mode if schemaId != nil { - dss, err := i.datasetSchemaRepo.FindByID(ctx, *schemaId, scenes) + dss, err := i.datasetSchemaRepo.FindByID(ctx, *schemaId) if err != nil { return nil, err } @@ -283,7 +278,7 @@ func (i *Dataset) importDataset(ctx context.Context, content io.Reader, name str for _, lg := range layergroups { if lg.Layers().LayerCount() > 0 { - children, err := i.layerRepo.FindByIDs(ctx, lg.Layers().Layers(), scenes) + children, err := i.layerRepo.FindByIDs(ctx, lg.Layers().Layers()) if err != nil { return nil, err } @@ -347,18 +342,10 @@ func (i *Dataset) importDataset(ctx context.Context, content io.Reader, name str } func (i *Dataset) Fetch(ctx context.Context, ids []id.DatasetID, operator *usecase.Operator) (dataset.List, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - return i.datasetRepo.FindByIDs(ctx, ids, scenes) + return i.datasetRepo.FindByIDs(ctx, ids) } func (i *Dataset) GraphFetch(ctx context.Context, id id.DatasetID, depth int, operator *usecase.Operator) (dataset.List, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } if depth < 0 || depth > 3 { return nil, interfaces.ErrDatasetInvalidDepth } @@ -367,7 +354,7 @@ func (i *Dataset) GraphFetch(ctx context.Context, id id.DatasetID, depth int, op next := id done := false for { - d, err := i.datasetRepo.FindByID(ctx, next, scenes) + d, err := i.datasetRepo.FindByID(ctx, next) if err != nil { return nil, err } @@ -384,20 +371,10 @@ func (i *Dataset) GraphFetch(ctx context.Context, id id.DatasetID, depth int, op } func (i *Dataset) FetchSchema(ctx context.Context, ids []id.DatasetSchemaID, operator *usecase.Operator) (dataset.SchemaList, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - - return i.datasetSchemaRepo.FindByIDs(ctx, ids, scenes) + return i.datasetSchemaRepo.FindByIDs(ctx, ids) } func (i *Dataset) GraphFetchSchema(ctx context.Context, id id.DatasetSchemaID, depth int, operator *usecase.Operator) (dataset.SchemaList, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - if depth < 0 || depth > 3 { return nil, interfaces.ErrDatasetInvalidDepth } @@ -407,7 +384,7 @@ func (i *Dataset) GraphFetchSchema(ctx context.Context, id id.DatasetSchemaID, d next := id done := false for { - d, err := i.datasetSchemaRepo.FindByID(ctx, next, scenes) + d, err := i.datasetSchemaRepo.FindByID(ctx, next) if err != nil { return nil, err } @@ -425,12 +402,7 @@ func (i *Dataset) GraphFetchSchema(ctx context.Context, id id.DatasetSchemaID, d } func (i *Dataset) FindBySchema(ctx context.Context, ds id.DatasetSchemaID, p *usecase.Pagination, operator *usecase.Operator) (dataset.List, *usecase.PageInfo, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, nil, err - } - - return i.datasetRepo.FindBySchema(ctx, ds, scenes, p) + return i.datasetRepo.FindBySchema(ctx, ds, p) } func (i *Dataset) FindSchemaByScene(ctx context.Context, sid id.SceneID, p *usecase.Pagination, operator *usecase.Operator) (dataset.SchemaList, *usecase.PageInfo, error) { @@ -558,18 +530,16 @@ func (i *Dataset) AddDatasetSchema(ctx context.Context, inp interfaces.AddDatase } func (i *Dataset) RemoveDatasetSchema(ctx context.Context, inp interfaces.RemoveDatasetSchemaParam, operator *usecase.Operator) (_ id.DatasetSchemaID, err error) { - scenes, err := i.OnlyWritableScenes(operator) + s, err := i.datasetSchemaRepo.FindByID(ctx, inp.SchemaID) if err != nil { return inp.SchemaID, err } - s, err := i.datasetSchemaRepo.FindByID(ctx, inp.SchemaID, scenes) - if err != nil { - return inp.SchemaID, err - } - if s == nil { return inp.SchemaID, rerror.ErrNotFound } + if err := i.CanWriteScene(s.Scene(), operator); err != nil { + return inp.SchemaID, err + } datasets, err := i.datasetRepo.FindBySchemaAll(ctx, inp.SchemaID) if err != nil { @@ -620,7 +590,7 @@ func (i *Dataset) RemoveDatasetSchema(ctx context.Context, inp interfaces.Remove for _, lg := range layers.ToLayerGroupList() { lg.Unlink() - groupItems, err := i.layerRepo.FindItemByIDs(ctx, lg.Layers().Layers(), scenes) + groupItems, err := i.layerRepo.FindItemByIDs(ctx, lg.Layers().Layers()) if err != nil { return inp.SchemaID, err } diff --git a/internal/usecase/interactor/layer.go b/internal/usecase/interactor/layer.go index bc11ae644..e2f15c811 100644 --- a/internal/usecase/interactor/layer.go +++ b/internal/usecase/interactor/layer.go @@ -58,61 +58,31 @@ func NewLayer(r *repo.Container) interfaces.Layer { } func (i *Layer) Fetch(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) (layer.List, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - - return i.layerRepo.FindByIDs(ctx, ids, scenes) + return i.layerRepo.FindByIDs(ctx, ids) } func (i *Layer) FetchGroup(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*layer.Group, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - - return i.layerRepo.FindGroupByIDs(ctx, ids, scenes) + return i.layerRepo.FindGroupByIDs(ctx, ids) } func (i *Layer) FetchItem(ctx context.Context, ids []id.LayerID, operator *usecase.Operator) ([]*layer.Item, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - - return i.layerRepo.FindItemByIDs(ctx, ids, scenes) + return i.layerRepo.FindItemByIDs(ctx, ids) } func (i *Layer) FetchParent(ctx context.Context, pid id.LayerID, operator *usecase.Operator) (*layer.Group, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - - return i.layerRepo.FindParentByID(ctx, pid, scenes) + return i.layerRepo.FindParentByID(ctx, pid) } func (i *Layer) FetchByProperty(ctx context.Context, pid id.PropertyID, operator *usecase.Operator) (layer.Layer, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - - return i.layerRepo.FindByProperty(ctx, pid, scenes) + return i.layerRepo.FindByProperty(ctx, pid) } func (i *Layer) FetchMerged(ctx context.Context, org id.LayerID, parent *id.LayerID, operator *usecase.Operator) (*layer.Merged, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - ids := []id.LayerID{org} if parent != nil { ids = append(ids, *parent) } - layers, err := i.layerRepo.FindByIDs(ctx, ids, scenes) + layers, err := i.layerRepo.FindByIDs(ctx, ids) if err != nil { return nil, err } @@ -136,16 +106,11 @@ func (i *Layer) FetchMerged(ctx context.Context, org id.LayerID, parent *id.Laye } func (i *Layer) FetchParentAndMerged(ctx context.Context, org id.LayerID, operator *usecase.Operator) (*layer.Merged, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - - orgl, err := i.layerRepo.FindItemByID(ctx, org, scenes) + orgl, err := i.layerRepo.FindItemByID(ctx, org) if err != nil { return nil, err } - parent, err := i.layerRepo.FindParentByID(ctx, org, scenes) + parent, err := i.layerRepo.FindParentByID(ctx, org) if err != nil { return nil, err } @@ -154,11 +119,7 @@ func (i *Layer) FetchParentAndMerged(ctx context.Context, org id.LayerID, operat } func (i *Layer) FetchByTag(ctx context.Context, tag id.TagID, operator *usecase.Operator) (layer.List, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - return i.layerRepo.FindByTag(ctx, tag, scenes) + return i.layerRepo.FindByTag(ctx, tag) } func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, operator *usecase.Operator) (_ *layer.Item, _ *layer.Group, err error) { @@ -172,16 +133,8 @@ func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, o } }() - scenes, err := i.OnlyWritableScenes(operator) - if err != nil { - return nil, nil, err - } - - parentLayer, err := i.layerRepo.FindGroupByID(ctx, inp.ParentLayerID, scenes) + parentLayer, err := i.layerRepo.FindGroupByID(ctx, inp.ParentLayerID) if err != nil { - if errors.Is(err, rerror.ErrNotFound) { - return nil, nil, err - } return nil, nil, err } @@ -257,18 +210,11 @@ func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, } }() - scenes, err := i.OnlyWritableScenes(operator) + parentLayer, err := i.layerRepo.FindGroupByID(ctx, inp.ParentLayerID) if err != nil { return nil, nil, err } - parentLayer, err := i.layerRepo.FindGroupByID(ctx, inp.ParentLayerID, scenes) - if err != nil { - return nil, nil, err - } - - layerScenes := []id.SceneID{parentLayer.Scene()} - // check scene lock if err := i.CheckSceneLock(ctx, parentLayer.Scene()); err != nil { return nil, nil, err @@ -295,7 +241,7 @@ func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, var datasetSchema *dataset.Schema var ds dataset.List if inp.LinkedDatasetSchemaID != nil { - datasetSchema2, err := i.datasetSchemaRepo.FindByID(ctx, *inp.LinkedDatasetSchemaID, layerScenes) + datasetSchema2, err := i.datasetSchemaRepo.FindByID(ctx, *inp.LinkedDatasetSchemaID) if err != nil { return nil, nil, err } @@ -436,9 +382,9 @@ func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, return layerGroup, parentLayer, nil } -func (i *Layer) fetchAllChildren(ctx context.Context, l layer.Layer, scenes []id.SceneID) ([]id.LayerID, []id.PropertyID, error) { +func (i *Layer) fetchAllChildren(ctx context.Context, l layer.Layer) ([]id.LayerID, []id.PropertyID, error) { lidl := layer.ToLayerGroup(l).Layers().Layers() - layers, err := i.layerRepo.FindByIDs(ctx, lidl, scenes) + layers, err := i.layerRepo.FindByIDs(ctx, lidl) if err != nil { return nil, nil, err } @@ -447,7 +393,7 @@ func (i *Layer) fetchAllChildren(ctx context.Context, l layer.Layer, scenes []id lg := layer.ToLayerGroup(*ll) li := layer.ToLayerItem(*ll) if lg != nil { - childrenLayers, childrenProperties, err := i.fetchAllChildren(ctx, lg, scenes) + childrenLayers, childrenProperties, err := i.fetchAllChildren(ctx, lg) if err != nil { return nil, nil, err } @@ -464,7 +410,6 @@ func (i *Layer) fetchAllChildren(ctx context.Context, l layer.Layer, scenes []id } func (i *Layer) Remove(ctx context.Context, lid id.LayerID, operator *usecase.Operator) (_ id.LayerID, _ *layer.Group, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -475,13 +420,11 @@ func (i *Layer) Remove(ctx context.Context, lid id.LayerID, operator *usecase.Op } }() - scenes, err := i.OnlyWritableScenes(operator) + l, err := i.layerRepo.FindByID(ctx, lid) if err != nil { return lid, nil, err } - - l, err := i.layerRepo.FindByID(ctx, lid, scenes) - if err != nil { + if err := i.CanWriteScene(l.Scene(), operator); err != nil { return lid, nil, err } @@ -493,7 +436,7 @@ func (i *Layer) Remove(ctx context.Context, lid id.LayerID, operator *usecase.Op return lid, nil, errors.New("root layer cannot be deleted") } - parentLayer, err := i.layerRepo.FindParentByID(ctx, lid, scenes) + parentLayer, err := i.layerRepo.FindParentByID(ctx, lid) if err != nil && err != rerror.ErrNotFound { return lid, nil, err } @@ -513,7 +456,7 @@ func (i *Layer) Remove(ctx context.Context, lid id.LayerID, operator *usecase.Op return lid, nil, err } } - layers, properties, err := i.fetchAllChildren(ctx, l, scenes) + layers, properties, err := i.fetchAllChildren(ctx, l) if err != nil { return lid, nil, err } @@ -532,7 +475,6 @@ func (i *Layer) Remove(ctx context.Context, lid id.LayerID, operator *usecase.Op } func (i *Layer) Update(ctx context.Context, inp interfaces.UpdateLayerInput, operator *usecase.Operator) (_ layer.Layer, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -543,13 +485,11 @@ func (i *Layer) Update(ctx context.Context, inp interfaces.UpdateLayerInput, ope } }() - scenes, err := i.OnlyWritableScenes(operator) + layer, err := i.layerRepo.FindByID(ctx, inp.LayerID) if err != nil { return nil, err } - - layer, err := i.layerRepo.FindByID(ctx, inp.LayerID, scenes) - if err != nil { + if err := i.CanWriteScene(layer.Scene(), operator); err != nil { return nil, err } @@ -576,7 +516,6 @@ func (i *Layer) Update(ctx context.Context, inp interfaces.UpdateLayerInput, ope } func (i *Layer) Move(ctx context.Context, inp interfaces.MoveLayerInput, operator *usecase.Operator) (_ id.LayerID, _ *layer.Group, _ *layer.Group, _ int, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -587,13 +526,11 @@ func (i *Layer) Move(ctx context.Context, inp interfaces.MoveLayerInput, operato } }() - scenes, err := i.OnlyWritableScenes(operator) + parentLayer, err := i.layerRepo.FindParentByID(ctx, inp.LayerID) if err != nil { return inp.LayerID, nil, nil, -1, err } - - parentLayer, err := i.layerRepo.FindParentByID(ctx, inp.LayerID, scenes) - if err != nil { + if err := i.CanWriteScene(parentLayer.Scene(), operator); err != nil { return inp.LayerID, nil, nil, -1, err } @@ -608,7 +545,7 @@ func (i *Layer) Move(ctx context.Context, inp interfaces.MoveLayerInput, operato } else if parentLayer.IsLinked() { return inp.LayerID, nil, nil, -1, interfaces.ErrLinkedLayerItemCannotBeMoved } else { - toParentLayer, err = i.layerRepo.FindGroupByID(ctx, *inp.DestLayerID, scenes) + toParentLayer, err = i.layerRepo.FindGroupByID(ctx, *inp.DestLayerID) if err != nil { return inp.LayerID, nil, nil, -1, err } @@ -640,7 +577,6 @@ func (i *Layer) Move(ctx context.Context, inp interfaces.MoveLayerInput, operato } func (i *Layer) CreateInfobox(ctx context.Context, lid id.LayerID, operator *usecase.Operator) (_ layer.Layer, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -651,13 +587,11 @@ func (i *Layer) CreateInfobox(ctx context.Context, lid id.LayerID, operator *use } }() - scenes, err := i.OnlyWritableScenes(operator) + l, err := i.layerRepo.FindByID(ctx, lid) if err != nil { return nil, err } - - l, err := i.layerRepo.FindByID(ctx, lid, scenes) - if err != nil { + if err := i.CanWriteScene(l.Scene(), operator); err != nil { return nil, err } @@ -704,13 +638,11 @@ func (i *Layer) RemoveInfobox(ctx context.Context, layerID id.LayerID, operator } }() - scenes, err := i.OnlyWritableScenes(operator) + layer, err := i.layerRepo.FindByID(ctx, layerID) if err != nil { return nil, err } - - layer, err := i.layerRepo.FindByID(ctx, layerID, scenes) - if err != nil { + if err := i.CanWriteScene(layer.Scene(), operator); err != nil { return nil, err } @@ -751,13 +683,11 @@ func (i *Layer) AddInfoboxField(ctx context.Context, inp interfaces.AddInfoboxFi } }() - scenes, err := i.OnlyWritableScenes(operator) + l, err := i.layerRepo.FindByID(ctx, inp.LayerID) if err != nil { return nil, nil, err } - - l, err := i.layerRepo.FindByID(ctx, inp.LayerID, scenes) - if err != nil { + if err := i.CanWriteScene(l.Scene(), operator); err != nil { return nil, nil, err } @@ -815,7 +745,6 @@ func (i *Layer) AddInfoboxField(ctx context.Context, inp interfaces.AddInfoboxFi } func (i *Layer) MoveInfoboxField(ctx context.Context, inp interfaces.MoveInfoboxFieldParam, operator *usecase.Operator) (_ id.InfoboxFieldID, _ layer.Layer, _ int, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -826,13 +755,11 @@ func (i *Layer) MoveInfoboxField(ctx context.Context, inp interfaces.MoveInfobox } }() - scenes, err := i.OnlyWritableScenes(operator) + layer, err := i.layerRepo.FindByID(ctx, inp.LayerID) if err != nil { return inp.InfoboxFieldID, nil, -1, err } - - layer, err := i.layerRepo.FindByID(ctx, inp.LayerID, scenes) - if err != nil { + if err := i.CanWriteScene(layer.Scene(), operator); err != nil { return inp.InfoboxFieldID, nil, -1, err } @@ -858,7 +785,6 @@ func (i *Layer) MoveInfoboxField(ctx context.Context, inp interfaces.MoveInfobox } func (i *Layer) RemoveInfoboxField(ctx context.Context, inp interfaces.RemoveInfoboxFieldParam, operator *usecase.Operator) (_ id.InfoboxFieldID, _ layer.Layer, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -869,13 +795,11 @@ func (i *Layer) RemoveInfoboxField(ctx context.Context, inp interfaces.RemoveInf } }() - scenes, err := i.OnlyWritableScenes(operator) + layer, err := i.layerRepo.FindByID(ctx, inp.LayerID) if err != nil { return inp.InfoboxFieldID, nil, err } - - layer, err := i.layerRepo.FindByID(ctx, inp.LayerID, scenes) - if err != nil { + if err := i.CanWriteScene(layer.Scene(), operator); err != nil { return inp.InfoboxFieldID, nil, err } @@ -905,7 +829,7 @@ func (i *Layer) getPlugin(ctx context.Context, sid id.SceneID, p *id.PluginID, e return nil, nil, nil } - plugin, err := i.pluginRepo.FindByID(ctx, *p, []id.SceneID{sid}) + plugin, err := i.pluginRepo.FindByID(ctx, *p) if err != nil { if errors.Is(err, rerror.ErrNotFound) { return nil, nil, interfaces.ErrPluginNotFound @@ -926,6 +850,9 @@ func (i *Layer) getPlugin(ctx context.Context, sid id.SceneID, p *id.PluginID, e } func (i *Layer) ImportLayer(ctx context.Context, inp interfaces.ImportLayerParam, operator *usecase.Operator) (_ layer.List, _ *layer.Group, err error) { + if inp.File == nil { + return nil, nil, interfaces.ErrFileNotIncluded + } tx, err := i.transaction.Begin() if err != nil { @@ -937,17 +864,14 @@ func (i *Layer) ImportLayer(ctx context.Context, inp interfaces.ImportLayerParam } }() - if inp.File == nil { - return nil, nil, interfaces.ErrFileNotIncluded - } - scenes, err := i.OnlyWritableScenes(operator) + parent, err := i.layerRepo.FindGroupByID(ctx, inp.LayerID) if err != nil { return nil, nil, err } - parent, err := i.layerRepo.FindGroupByID(ctx, inp.LayerID, scenes) - if err != nil { + if err := i.CanWriteScene(parent.Scene(), operator); err != nil { return nil, nil, err } + var decoder decoding.Decoder switch inp.Format { case decoding.LayerEncodingFormatKML: @@ -1028,18 +952,16 @@ func (i *Layer) AttachTag(ctx context.Context, layerID id.LayerID, tagID id.TagI } }() - scenes, err := i.OnlyWritableScenes(operator) + // ensure the tag exists + t, err := i.tagRepo.FindByID(ctx, tagID) if err != nil { return nil, err } - - // ensure the tag exists - t, err := i.tagRepo.FindByID(ctx, tagID, scenes) - if err != nil { + if err := i.CanWriteScene(t.Scene(), operator); err != nil { return nil, err } - l, err := i.layerRepo.FindByID(ctx, layerID, scenes) + l, err := i.layerRepo.FindByID(ctx, layerID) if err != nil { return nil, err } @@ -1076,13 +998,11 @@ func (i *Layer) DetachTag(ctx context.Context, layerID id.LayerID, tagID id.TagI } }() - scenes, err := i.OnlyWritableScenes(operator) + layer, err := i.layerRepo.FindByID(ctx, layerID) if err != nil { return nil, err } - - layer, err := i.layerRepo.FindByID(ctx, layerID, scenes) - if err != nil { + if err := i.CanWriteScene(layer.Scene(), operator); err != nil { return nil, err } diff --git a/internal/usecase/interactor/layer_test.go b/internal/usecase/interactor/layer_test.go index 7c9b084f6..7076886e6 100644 --- a/internal/usecase/interactor/layer_test.go +++ b/internal/usecase/interactor/layer_test.go @@ -24,13 +24,14 @@ func TestCreateInfobox(t *testing.T) { _ = db.Layer.Save(ctx, l) i, _ := il.CreateInfobox(ctx, l.ID(), &usecase.Operator{ - WritableTeams: []id.TeamID{scene.Team()}, + WritableScenes: []id.SceneID{scene.ID()}, }) assert.NotNil(t, i) - l, _ = db.Layer.FindItemByID(ctx, l.ID(), nil) + l, err := db.Layer.FindItemByID(ctx, l.ID()) + assert.NoError(t, err) infobox := l.Infobox() assert.NotNil(t, infobox) - property, _ := db.Property.FindByID(ctx, infobox.Property(), nil) + property, _ := db.Property.FindByID(ctx, infobox.Property()) assert.NotNil(t, property) assert.NotNil(t, property.Schema()) } diff --git a/internal/usecase/interactor/plugin.go b/internal/usecase/interactor/plugin.go index c9788c759..86fa504f1 100644 --- a/internal/usecase/interactor/plugin.go +++ b/internal/usecase/interactor/plugin.go @@ -39,24 +39,12 @@ func NewPlugin(r *repo.Container, gr *gateway.Container) interfaces.Plugin { } func (i *Plugin) Fetch(ctx context.Context, ids []id.PluginID, operator *usecase.Operator) ([]*plugin.Plugin, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - - res, err := i.pluginRepo.FindByIDs(ctx, ids, scenes) - return res, err + return i.pluginRepo.FindByIDs(ctx, ids) } func (i *Plugin) FetchPluginMetadata(ctx context.Context, operator *usecase.Operator) ([]*plugin.Metadata, error) { if err := i.OnlyOperator(operator); err != nil { return nil, err } - - res, err := i.pluginRegistry.FetchMetadata(ctx) - if err != nil { - return nil, err - } - - return res, nil + return i.pluginRegistry.FetchMetadata(ctx) } diff --git a/internal/usecase/interactor/plugin_upload.go b/internal/usecase/interactor/plugin_upload.go index 443289a34..c5a574ef5 100644 --- a/internal/usecase/interactor/plugin_upload.go +++ b/internal/usecase/interactor/plugin_upload.go @@ -75,7 +75,7 @@ func (i *Plugin) upload(ctx context.Context, p *pluginpack.Package, sid id.Scene return nil, nil, err } - s, err := i.sceneRepo.FindByID(ctx, sid, operator.AllWritableTeams()) + s, err := i.sceneRepo.FindByID(ctx, sid) if err != nil { return nil, nil, err } @@ -94,7 +94,7 @@ func (i *Plugin) upload(ctx context.Context, p *pluginpack.Package, sid id.Scene newpid := p.Manifest.Plugin.ID() oldpid := s.Plugins().PluginByName(newpid.Name()).PluginRef() if oldpid != nil { - oldPlugin, err := i.pluginRepo.FindByID(ctx, *oldpid, []id.SceneID{sid}) + oldPlugin, err := i.pluginRepo.FindByID(ctx, *oldpid) if err != nil { return nil, nil, err } @@ -233,12 +233,12 @@ func (i *Plugin) migrateScenePlugin(ctx context.Context, oldm manifest.Manifest, // delete layers, blocks and widgets for _, e := range diff.DeletedExtensions { - deletedProperties, err := i.deleteLayersByPluginExtension(ctx, s.ID().Ref(), diff.From, &e.ExtensionID) + deletedProperties, err := i.deleteLayersByPluginExtension(ctx, diff.From, &e.ExtensionID) if err != nil { return err } - if deletedProperties2, err := i.deleteBlocksByPluginExtension(ctx, s.ID().Ref(), diff.From, &e.ExtensionID); err != nil { + if deletedProperties2, err := i.deleteBlocksByPluginExtension(ctx, diff.From, &e.ExtensionID); err != nil { return err } else { deletedProperties = append(deletedProperties, deletedProperties2...) @@ -258,7 +258,7 @@ func (i *Plugin) migrateScenePlugin(ctx context.Context, oldm manifest.Manifest, } // migrate layers - if err := i.layerRepo.UpdatePlugin(ctx, diff.From, diff.To, []id.SceneID{s.ID()}); err != nil { + if err := i.layerRepo.UpdatePlugin(ctx, diff.From, diff.To); err != nil { return err } @@ -294,21 +294,16 @@ func (i *Plugin) migrateScenePlugin(ctx context.Context, oldm manifest.Manifest, return nil } -func (i *Plugin) deleteLayersByPluginExtension(ctx context.Context, sid *id.SceneID, p id.PluginID, e *id.PluginExtensionID) ([]id.PropertyID, error) { - var scenes []id.SceneID - if sid != nil { - scenes = []id.SceneID{*sid} - } - +func (i *Plugin) deleteLayersByPluginExtension(ctx context.Context, p id.PluginID, e *id.PluginExtensionID) ([]id.PropertyID, error) { // delete layers deletedLayers := []id.LayerID{} - layers, err := i.layerRepo.FindByPluginAndExtension(ctx, p, e, scenes) + layers, err := i.layerRepo.FindByPluginAndExtension(ctx, p, e) if err != nil { return nil, err } deletedLayers = append(deletedLayers, layers.IDs().Layers()...) - parentLayers, err := i.layerRepo.FindParentsByIDs(ctx, deletedLayers, scenes) + parentLayers, err := i.layerRepo.FindParentsByIDs(ctx, deletedLayers) if err != nil { return nil, err } @@ -326,13 +321,8 @@ func (i *Plugin) deleteLayersByPluginExtension(ctx context.Context, sid *id.Scen return layers.Properties(), nil } -func (i *Plugin) deleteBlocksByPluginExtension(ctx context.Context, sid *id.SceneID, p id.PluginID, e *id.PluginExtensionID) ([]id.PropertyID, error) { - var scenes []id.SceneID - if sid != nil { - scenes = []id.SceneID{*sid} - } - - layers, err := i.layerRepo.FindByPluginAndExtensionOfBlocks(ctx, p, e, scenes) +func (i *Plugin) deleteBlocksByPluginExtension(ctx context.Context, p id.PluginID, e *id.PluginExtensionID) ([]id.PropertyID, error) { + layers, err := i.layerRepo.FindByPluginAndExtensionOfBlocks(ctx, p, e) if err != nil { return nil, err } diff --git a/internal/usecase/interactor/plugin_upload_test.go b/internal/usecase/interactor/plugin_upload_test.go index 1c04afb36..5817ed732 100644 --- a/internal/usecase/interactor/plugin_upload_test.go +++ b/internal/usecase/interactor/plugin_upload_test.go @@ -117,12 +117,12 @@ func TestPlugin_Upload_New(t *testing.T) { assert.Equal(t, pid, pl.ID()) // scene - nscene, err := repos.Scene.FindByID(ctx, scene.ID(), nil) + nscene, err := repos.Scene.FindByID(ctx, scene.ID()) assert.NoError(t, err) assert.True(t, nscene.Plugins().HasPlugin(pl.ID())) // plugin - npl, err := repos.Plugin.FindByID(ctx, pid, []id.SceneID{scene.ID()}) + npl, err := repos.Plugin.FindByID(ctx, pid) assert.NoError(t, err) assert.Equal(t, pid, npl.ID()) @@ -200,17 +200,17 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { assert.Equal(t, pid, pl.ID()) // scene - nscene, err := repos.Scene.FindByID(ctx, scene.ID(), nil) + nscene, err := repos.Scene.FindByID(ctx, scene.ID()) assert.NoError(t, err) assert.True(t, nscene.Plugins().HasPlugin(pl.ID())) assert.Nil(t, nscene.Widgets().Widget(wid1)) - nlp2, err := repos.Property.FindByID(ctx, p1.ID(), nil) + nlp2, err := repos.Property.FindByID(ctx, p1.ID()) assert.Nil(t, nlp2) // deleted assert.Equal(t, rerror.ErrNotFound, err) // plugin - npl, err := repos.Plugin.FindByID(ctx, pid, []id.SceneID{scene.ID()}) + npl, err := repos.Plugin.FindByID(ctx, pid) assert.NoError(t, err) assert.Equal(t, pid, npl.ID()) @@ -231,15 +231,15 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { assert.Equal(t, "// barfoo", string(npfc)) // layer - nlp, err := repos.Property.FindByID(ctx, p1.ID(), nil) + nlp, err := repos.Property.FindByID(ctx, p1.ID()) assert.Nil(t, nlp) // deleted assert.Equal(t, rerror.ErrNotFound, err) - nl, err := repos.Layer.FindByID(ctx, pluginLayer.ID(), nil) + nl, err := repos.Layer.FindByID(ctx, pluginLayer.ID()) assert.Nil(t, nl) // deleted assert.Equal(t, rerror.ErrNotFound, err) - nrl, err := repos.Layer.FindGroupByID(ctx, rootLayer.ID(), nil) + nrl, err := repos.Layer.FindGroupByID(ctx, rootLayer.ID()) assert.NoError(t, err) assert.Equal(t, []layer.ID{}, nrl.Layers().Layers()) // deleted } @@ -326,7 +326,7 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { assert.Equal(t, pid, oldpl.ID()) // scene - nscene, err := repos.Scene.FindByID(ctx, scene.ID(), nil) + nscene, err := repos.Scene.FindByID(ctx, scene.ID()) assert.NoError(t, err) assert.False(t, nscene.Plugins().HasPlugin(oldpid)) assert.True(t, nscene.Plugins().HasPlugin(pid)) @@ -334,11 +334,11 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { assert.Equal(t, eid2, nscene.Widgets().Widget(wid).Extension()) // plugin - opl, err := repos.Plugin.FindByID(ctx, oldpid, []id.SceneID{scene.ID()}) + opl, err := repos.Plugin.FindByID(ctx, oldpid) assert.Nil(t, opl) // deleted assert.Equal(t, rerror.ErrNotFound, err) - npl, err := repos.Plugin.FindByID(ctx, pid, []id.SceneID{scene.ID()}) + npl, err := repos.Plugin.FindByID(ctx, pid) assert.NoError(t, err) assert.Equal(t, pid, npl.ID()) @@ -363,7 +363,7 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { assert.Equal(t, "// barfoo", string(npfc)) // layer - nl, err := repos.Layer.FindByID(ctx, pluginLayer.ID(), nil) + nl, err := repos.Layer.FindByID(ctx, pluginLayer.ID()) assert.NoError(t, err) assert.Equal(t, pid, *nl.Plugin()) assert.Equal(t, eid1, *nl.Extension()) @@ -371,17 +371,17 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { assert.Equal(t, oldp2.ID(), nl.Infobox().Property()) assert.Equal(t, oldp3.ID(), nl.Infobox().FieldAt(0).Property()) - nlp, err := repos.Property.FindByID(ctx, *nl.Property(), nil) + nlp, err := repos.Property.FindByID(ctx, *nl.Property()) assert.NoError(t, err) assert.Equal(t, *nl.Property(), nlp.ID()) assert.Equal(t, nlpsid1, nlp.Schema()) assert.Equal(t, property.ValueTypeString.ValueFrom("100"), property.ToGroup(nlp.ItemBySchema("default")).Field("field").Value()) - nlp2, err := repos.Property.FindByID(ctx, oldp2.ID(), nil) + nlp2, err := repos.Property.FindByID(ctx, oldp2.ID()) assert.NoError(t, err) assert.Equal(t, nlpsid1, nlp2.Schema()) - nlp3, err := repos.Property.FindByID(ctx, oldp3.ID(), nil) + nlp3, err := repos.Property.FindByID(ctx, oldp3.ID()) assert.NoError(t, err) assert.Equal(t, nlpsid1, nlp3.Schema()) } diff --git a/internal/usecase/interactor/project.go b/internal/usecase/interactor/project.go index 83bcebabb..0ba5c8a7b 100644 --- a/internal/usecase/interactor/project.go +++ b/internal/usecase/interactor/project.go @@ -53,20 +53,18 @@ func NewProject(r *repo.Container, gr *gateway.Container) interfaces.Project { } func (i *Project) Fetch(ctx context.Context, ids []id.ProjectID, operator *usecase.Operator) ([]*project.Project, error) { - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - return i.projectRepo.FindByIDs(ctx, ids, operator.AllReadableTeams()) + return i.projectRepo.FindByIDs(ctx, ids) } func (i *Project) FindByTeam(ctx context.Context, id id.TeamID, p *usecase.Pagination, operator *usecase.Operator) ([]*project.Project, *usecase.PageInfo, error) { - if err := i.CanReadTeam(id, operator); err != nil { - return nil, nil, err - } return i.projectRepo.FindByTeam(ctx, id, p) } func (i *Project) Create(ctx context.Context, p interfaces.CreateProjectParam, operator *usecase.Operator) (_ *project.Project, err error) { + if err := i.CanWriteTeam(p.TeamID, operator); err != nil { + return nil, err + } + tx, err := i.transaction.Begin() if err != nil { return @@ -77,10 +75,6 @@ func (i *Project) Create(ctx context.Context, p interfaces.CreateProjectParam, o } }() - if err := i.CanWriteTeam(p.TeamID, operator); err != nil { - return nil, err - } - pb := project.New(). NewID(). Team(p.TeamID). @@ -126,15 +120,10 @@ func (i *Project) Update(ctx context.Context, p interfaces.UpdateProjectParam, o } }() - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - - prj, err := i.projectRepo.FindByID(ctx, p.ID, operator.AllWritableTeams()) + prj, err := i.projectRepo.FindByID(ctx, p.ID) if err != nil { return nil, err } - if err := i.CanWriteTeam(prj.Team(), operator); err != nil { return nil, err } @@ -236,20 +225,15 @@ func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectP } }() - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - - prj, err := i.projectRepo.FindByID(ctx, params.ID, operator.AllWritableTeams()) + prj, err := i.projectRepo.FindByID(ctx, params.ID) if err != nil { return nil, err } - if err := i.CanWriteTeam(prj.Team(), operator); err != nil { return nil, err } - s, err := i.sceneRepo.FindByProject(ctx, params.ID, operator.AllWritableTeams()) + s, err := i.sceneRepo.FindByProject(ctx, params.ID) if err != nil { return nil, err } @@ -312,10 +296,10 @@ func (i *Project) Publish(ctx context.Context, params interfaces.PublishProjectP }() err = builder.New( - repo.LayerLoaderFrom(i.layerRepo, scenes), - repo.PropertyLoaderFrom(i.propertyRepo, scenes), - repo.DatasetGraphLoaderFrom(i.datasetRepo, scenes), - repo.TagLoaderFrom(i.tagRepo, scenes), + repo.LayerLoaderFrom(i.layerRepo), + repo.PropertyLoaderFrom(i.propertyRepo), + repo.DatasetGraphLoaderFrom(i.datasetRepo), + repo.TagLoaderFrom(i.tagRepo), repo.TagSceneLoaderFrom(i.tagRepo, scenes), ).BuildScene(ctx, w, s, time.Now()) }() @@ -356,15 +340,10 @@ func (i *Project) Delete(ctx context.Context, projectID id.ProjectID, operator * } }() - if err := i.OnlyOperator(operator); err != nil { - return err - } - - prj, err := i.projectRepo.FindByID(ctx, projectID, operator.AllWritableTeams()) + prj, err := i.projectRepo.FindByID(ctx, projectID) if err != nil { return err } - if err := i.CanWriteTeam(prj.Team(), operator); err != nil { return err } diff --git a/internal/usecase/interactor/property.go b/internal/usecase/interactor/property.go index 418243bfe..54e29fc9e 100644 --- a/internal/usecase/interactor/property.go +++ b/internal/usecase/interactor/property.go @@ -42,28 +42,14 @@ func NewProperty(r *repo.Container, gr *gateway.Container) interfaces.Property { } func (i *Property) Fetch(ctx context.Context, ids []id.PropertyID, operator *usecase.Operator) ([]*property.Property, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - - return i.propertyRepo.FindByIDs(ctx, ids, scenes) + return i.propertyRepo.FindByIDs(ctx, ids) } func (i *Property) FetchSchema(ctx context.Context, ids []id.PropertySchemaID, operator *usecase.Operator) ([]*property.Schema, error) { - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - res, err := i.propertySchemaRepo.FindByIDs(ctx, ids) - return res, err + return i.propertySchemaRepo.FindByIDs(ctx, ids) } func (i *Property) FetchMerged(ctx context.Context, org, parent *id.PropertyID, linked *id.DatasetID, operator *usecase.Operator) (*property.Merged, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - ids := []id.PropertyID{} if org != nil { ids = append(ids, *org) @@ -71,7 +57,7 @@ func (i *Property) FetchMerged(ctx context.Context, org, parent *id.PropertyID, if parent != nil { ids = append(ids, *parent) } - props, err := i.propertyRepo.FindByIDs(ctx, ids, scenes) + props, err := i.propertyRepo.FindByIDs(ctx, ids) if err != nil { return nil, err } @@ -102,13 +88,11 @@ func (i *Property) UpdateValue(ctx context.Context, inp interfaces.UpdatePropert } }() - scenes, err := i.OnlyWritableScenes(operator) + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) if err != nil { return nil, nil, nil, nil, err } - - p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) - if err != nil { + if err := i.CanWriteScene(p.Scene(), operator); err != nil { return nil, nil, nil, nil, err } @@ -136,7 +120,6 @@ func (i *Property) UpdateValue(ctx context.Context, inp interfaces.UpdatePropert } func (i *Property) RemoveField(ctx context.Context, inp interfaces.RemovePropertyFieldParam, operator *usecase.Operator) (p *property.Property, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -147,13 +130,11 @@ func (i *Property) RemoveField(ctx context.Context, inp interfaces.RemovePropert } }() - scenes, err := i.OnlyWritableScenes(operator) + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) if err != nil { return nil, err } - - p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) - if err != nil { + if err := i.CanWriteScene(p.Scene(), operator); err != nil { return nil, err } @@ -188,13 +169,11 @@ func (i *Property) UploadFile(ctx context.Context, inp interfaces.UploadFilePara return nil, nil, nil, nil, interfaces.ErrInvalidFile } - scenes, err := i.OnlyWritableScenes(operator) + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) if err != nil { return nil, nil, nil, nil, err } - - p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) - if err != nil { + if err := i.CanWriteScene(p.Scene(), operator); err != nil { return nil, nil, nil, nil, err } @@ -202,7 +181,7 @@ func (i *Property) UploadFile(ctx context.Context, inp interfaces.UploadFilePara return nil, nil, nil, nil, err } - propertyScene, err := i.sceneRepo.FindByID(ctx, p.Scene(), operator.AllWritableTeams()) + propertyScene, err := i.sceneRepo.FindByID(ctx, p.Scene()) if err != nil { return nil, nil, nil, nil, err } @@ -255,7 +234,6 @@ func (i *Property) UploadFile(ctx context.Context, inp interfaces.UploadFilePara } func (i *Property) LinkValue(ctx context.Context, inp interfaces.LinkPropertyValueParam, operator *usecase.Operator) (p *property.Property, pgl *property.GroupList, pg *property.Group, field *property.Field, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -266,13 +244,11 @@ func (i *Property) LinkValue(ctx context.Context, inp interfaces.LinkPropertyVal } }() - scenes, err := i.OnlyWritableScenes(operator) + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) if err != nil { return nil, nil, nil, nil, err } - - p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) - if err != nil { + if err := i.CanWriteScene(p.Scene(), operator); err != nil { return nil, nil, nil, nil, err } @@ -287,16 +263,14 @@ func (i *Property) LinkValue(ctx context.Context, inp interfaces.LinkPropertyVal field, pgl, pg, _ = p.GetOrCreateField(ps, inp.Pointer) - propertyScenes := []id.SceneID{p.Scene()} - if inp.Links != nil { dsids := inp.Links.DatasetSchemaIDs() dids := inp.Links.DatasetIDs() - dss, err := i.datasetSchemaRepo.FindByIDs(ctx, dsids, propertyScenes) + dss, err := i.datasetSchemaRepo.FindByIDs(ctx, dsids) if err != nil { return nil, nil, nil, nil, err } - ds, err := i.datasetRepo.FindByIDs(ctx, dids, propertyScenes) + ds, err := i.datasetRepo.FindByIDs(ctx, dids) if err != nil { return nil, nil, nil, nil, err } @@ -328,13 +302,11 @@ func (i *Property) UnlinkValue(ctx context.Context, inp interfaces.UnlinkPropert } }() - scenes, err := i.OnlyWritableScenes(operator) + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) if err != nil { return nil, nil, nil, nil, err } - - p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) - if err != nil { + if err := i.CanWriteScene(p.Scene(), operator); err != nil { return nil, nil, nil, nil, err } @@ -366,7 +338,6 @@ func (i *Property) UnlinkValue(ctx context.Context, inp interfaces.UnlinkPropert } func (i *Property) AddItem(ctx context.Context, inp interfaces.AddPropertyItemParam, operator *usecase.Operator) (p *property.Property, _ *property.GroupList, pg *property.Group, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -377,13 +348,11 @@ func (i *Property) AddItem(ctx context.Context, inp interfaces.AddPropertyItemPa } }() - scenes, err := i.OnlyWritableScenes(operator) + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) if err != nil { return nil, nil, nil, err } - - p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) - if err != nil { + if err := i.CanWriteScene(p.Scene(), operator); err != nil { return nil, nil, nil, err } @@ -416,7 +385,6 @@ func (i *Property) AddItem(ctx context.Context, inp interfaces.AddPropertyItemPa } func (i *Property) MoveItem(ctx context.Context, inp interfaces.MovePropertyItemParam, operator *usecase.Operator) (p *property.Property, _ *property.GroupList, _ *property.Group, err error) { - tx, err := i.transaction.Begin() if err != nil { return @@ -427,13 +395,11 @@ func (i *Property) MoveItem(ctx context.Context, inp interfaces.MovePropertyItem } }() - scenes, err := i.OnlyWritableScenes(operator) + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) if err != nil { return nil, nil, nil, err } - - p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) - if err != nil { + if err := i.CanWriteScene(p.Scene(), operator); err != nil { return nil, nil, nil, err } @@ -466,13 +432,11 @@ func (i *Property) RemoveItem(ctx context.Context, inp interfaces.RemoveProperty } }() - scenes, err := i.OnlyWritableScenes(operator) + p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID) if err != nil { return nil, err } - - p, err = i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) - if err != nil { + if err := i.CanWriteScene(p.Scene(), operator); err != nil { return nil, err } @@ -494,13 +458,11 @@ func (i *Property) RemoveItem(ctx context.Context, inp interfaces.RemoveProperty } func (i *Property) UpdateItems(ctx context.Context, inp interfaces.UpdatePropertyItemsParam, operator *usecase.Operator) (*property.Property, error) { - scenes, err := i.OnlyWritableScenes(operator) + p, err := i.propertyRepo.FindByID(ctx, inp.PropertyID) if err != nil { return nil, err } - - p, err := i.propertyRepo.FindByID(ctx, inp.PropertyID, scenes) - if err != nil { + if err := i.CanWriteScene(p.Scene(), operator); err != nil { return nil, err } diff --git a/internal/usecase/interactor/property_test.go b/internal/usecase/interactor/property_test.go index b6875d619..0db04fdf9 100644 --- a/internal/usecase/interactor/property_test.go +++ b/internal/usecase/interactor/property_test.go @@ -39,8 +39,8 @@ func TestProperty_AddItem(t *testing.T) { transaction: memory.Transaction, } op := &usecase.Operator{ - ReadableTeams: []id.TeamID{team}, - WritableTeams: []id.TeamID{team}, + ReadableScenes: []id.SceneID{scene.ID()}, + WritableScenes: []id.SceneID{scene.ID()}, } index := -1 @@ -61,7 +61,7 @@ func TestProperty_AddItem(t *testing.T) { assert.Equal(t, npg, npl.GroupAt(0)) assert.Equal(t, 1, len(npl.Groups())) - np2, _ := memory.Property.FindByID(ctx, p.ID(), nil) + np2, _ := memory.Property.FindByID(ctx, p.ID()) assert.Equal(t, np, np2) } @@ -91,8 +91,8 @@ func TestProperty_RemoveItem(t *testing.T) { transaction: memory.Transaction, } op := &usecase.Operator{ - ReadableTeams: []id.TeamID{team}, - WritableTeams: []id.TeamID{team}, + ReadableScenes: []id.SceneID{scene.ID()}, + WritableScenes: []id.SceneID{scene.ID()}, } np, err := uc.RemoveItem(ctx, interfaces.RemovePropertyItemParam{ @@ -106,7 +106,7 @@ func TestProperty_RemoveItem(t *testing.T) { npl := property.ToGroupList(np.ItemBySchema(psg.ID())) assert.Equal(t, 0, len(npl.Groups())) - np2, _ := memory.Property.FindByID(ctx, p.ID(), nil) + np2, _ := memory.Property.FindByID(ctx, p.ID()) assert.Equal(t, np, np2) } @@ -159,6 +159,6 @@ func TestProperty_UpdateValue_FieldOfGroupInList(t *testing.T) { assert.Equal(t, psf.ID(), npf.Field()) assert.Equal(t, property.ValueTypeString.ValueFrom("aaaa"), npf.Value()) - np2, _ := memory.Property.FindByID(ctx, p.ID(), nil) + np2, _ := memory.Property.FindByID(ctx, p.ID()) assert.Equal(t, np, np2) } diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index 348216664..fe8dc936c 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -52,18 +52,11 @@ func NewScene(r *repo.Container, g *gateway.Container) interfaces.Scene { } func (i *Scene) Fetch(ctx context.Context, ids []id.SceneID, operator *usecase.Operator) ([]*scene.Scene, error) { - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - return i.sceneRepo.FindByIDs(ctx, ids, operator.AllReadableTeams()) + return i.sceneRepo.FindByIDs(ctx, ids) } func (i *Scene) FindByProject(ctx context.Context, id id.ProjectID, operator *usecase.Operator) (*scene.Scene, error) { - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - res, err := i.sceneRepo.FindByProject(ctx, id, operator.AllReadableTeams()) - return res, err + return i.sceneRepo.FindByProject(ctx, id) } func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase.Operator) (_ *scene.Scene, err error) { @@ -77,15 +70,10 @@ func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase. } }() - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - - prj, err := i.projectRepo.FindByID(ctx, pid, operator.AllWritableTeams()) + prj, err := i.projectRepo.FindByID(ctx, pid) if err != nil { return nil, err } - if err := i.CanWriteTeam(prj.Team(), operator); err != nil { return nil, err } @@ -164,16 +152,7 @@ func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, } }() - if err := i.OnlyOperator(operator); err != nil { - return nil, nil, interfaces.ErrOperationDenied - } - - // check scene lock - if err := i.CheckSceneLock(ctx, sid); err != nil { - return nil, nil, err - } - - s, err := i.sceneRepo.FindByID(ctx, sid, operator.AllWritableTeams()) + s, err := i.sceneRepo.FindByID(ctx, sid) if err != nil { return nil, nil, err } @@ -181,6 +160,11 @@ func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, return nil, nil, err } + // check scene lock + if err := i.CheckSceneLock(ctx, sid); err != nil { + return nil, nil, err + } + _, extension, err := i.getPlugin(ctx, sid, pid, eid) if err != nil { return nil, nil, err @@ -260,16 +244,7 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP } }() - if err := i.OnlyOperator(operator); err != nil { - return nil, nil, interfaces.ErrOperationDenied - } - - // check scene lock - if err := i.CheckSceneLock(ctx, param.SceneID); err != nil { - return nil, nil, err - } - - scene, err2 := i.sceneRepo.FindByID(ctx, param.SceneID, operator.AllWritableTeams()) + scene, err2 := i.sceneRepo.FindByID(ctx, param.SceneID) if err2 != nil { return nil, nil, err2 } @@ -277,6 +252,11 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP return nil, nil, err } + // check scene lock + if err := i.CheckSceneLock(ctx, param.SceneID); err != nil { + return nil, nil, err + } + widget := scene.Widgets().Widget(param.WidgetID) if widget == nil { return nil, nil, rerror.ErrNotFound @@ -342,16 +322,7 @@ func (i *Scene) UpdateWidgetAlignSystem(ctx context.Context, param interfaces.Up } }() - if err := i.OnlyOperator(operator); err != nil { - return nil, interfaces.ErrOperationDenied - } - - // check scene lock - if err := i.CheckSceneLock(ctx, param.SceneID); err != nil { - return nil, err - } - - scene, err2 := i.sceneRepo.FindByID(ctx, param.SceneID, operator.AllWritableTeams()) + scene, err2 := i.sceneRepo.FindByID(ctx, param.SceneID) if err2 != nil { return nil, err2 } @@ -359,6 +330,11 @@ func (i *Scene) UpdateWidgetAlignSystem(ctx context.Context, param interfaces.Up return nil, err } + // check scene lock + if err := i.CheckSceneLock(ctx, param.SceneID); err != nil { + return nil, err + } + area := scene.Widgets().Alignment().Area(param.Location) if area == nil { @@ -388,11 +364,7 @@ func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, wid id.WidgetID } }() - if err := i.OnlyOperator(operator); err != nil { - return nil, interfaces.ErrOperationDenied - } - - scene, err2 := i.sceneRepo.FindByID(ctx, id, operator.AllWritableTeams()) + scene, err2 := i.sceneRepo.FindByID(ctx, id) if err2 != nil { return nil, err2 } @@ -440,11 +412,7 @@ func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plugin } }() - if operator == nil { - return nil, pid, nil, interfaces.ErrOperationDenied - } - - s, err2 := i.sceneRepo.FindByID(ctx, sid, operator.AllWritableTeams()) + s, err2 := i.sceneRepo.FindByID(ctx, sid) if err2 != nil { return nil, pid, nil, err2 } @@ -461,7 +429,7 @@ func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plugin return nil, pid, nil, interfaces.ErrPluginAlreadyInstalled } - plugin, err := i.pluginRepo.FindByID(ctx, pid, []id.SceneID{sid}) + plugin, err := i.pluginRepo.FindByID(ctx, pid) if err != nil { if errors.Is(err2, rerror.ErrNotFound) { return nil, pid, nil, interfaces.ErrPluginNotFound @@ -516,11 +484,7 @@ func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plug } }() - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - - scene, err := i.sceneRepo.FindByID(ctx, sid, operator.AllWritableTeams()) + scene, err := i.sceneRepo.FindByID(ctx, sid) if err != nil { return nil, err } @@ -528,7 +492,7 @@ func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plug return nil, err } - pl, err := i.pluginRepo.FindByID(ctx, pid, []id.SceneID{sid}) + pl, err := i.pluginRepo.FindByID(ctx, pid) if err != nil { return nil, err } @@ -556,7 +520,7 @@ func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.Plug // remove layers and blocks res, err := layerops.Processor{ - LayerLoader: repo.LayerLoaderFrom(i.layerRepo, []id.SceneID{sid}), + LayerLoader: repo.LayerLoaderFrom(i.layerRepo), RootLayerID: scene.RootLayer(), }.UninstallPlugin(ctx, pid) if err != nil { @@ -618,11 +582,7 @@ func (i *Scene) UpgradePlugin(ctx context.Context, sid id.SceneID, oldPluginID, } }() - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - - s, err := i.sceneRepo.FindByID(ctx, sid, operator.AllWritableTeams()) + s, err := i.sceneRepo.FindByID(ctx, sid) if err != nil { return nil, err } @@ -636,11 +596,10 @@ func (i *Scene) UpgradePlugin(ctx context.Context, sid id.SceneID, oldPluginID, defer i.ReleaseSceneLock(ctx, sid) - scenes := []id.SceneID{s.ID()} pluginMigrator := sceneops.PluginMigrator{ - Property: repo.PropertyLoaderFrom(i.propertyRepo, scenes), + Property: repo.PropertyLoaderFrom(i.propertyRepo), PropertySchema: repo.PropertySchemaLoaderFrom(i.propertySchemaRepo), - Dataset: repo.DatasetLoaderFrom(i.datasetRepo, scenes), + Dataset: repo.DatasetLoaderFrom(i.datasetRepo), Layer: repo.LayerLoaderBySceneFrom(i.layerRepo), Plugin: repo.PluginLoaderFrom(i.pluginRepo), } @@ -668,7 +627,7 @@ func (i *Scene) UpgradePlugin(ctx context.Context, sid id.SceneID, oldPluginID, } func (i *Scene) getPlugin(ctx context.Context, sid id.SceneID, p id.PluginID, e id.PluginExtensionID) (*plugin.Plugin, *plugin.Extension, error) { - plugin, err2 := i.pluginRepo.FindByID(ctx, p, []id.SceneID{sid}) + plugin, err2 := i.pluginRepo.FindByID(ctx, p) if err2 != nil { if errors.Is(err2, rerror.ErrNotFound) { return nil, nil, interfaces.ErrPluginNotFound @@ -695,16 +654,15 @@ func (i *Scene) AddCluster(ctx context.Context, sceneID id.SceneID, name string, } }() - if err := i.OnlyOperator(operator); err != nil { - return nil, nil, interfaces.ErrOperationDenied + s, err := i.sceneRepo.FindByID(ctx, sceneID) + if err != nil { + return nil, nil, err } - - if err := i.CheckSceneLock(ctx, sceneID); err != nil { + if err := i.CanWriteTeam(s.Team(), operator); err != nil { return nil, nil, err } - s, err := i.sceneRepo.FindByID(ctx, sceneID, operator.AllWritableTeams()) - if err != nil { + if err := i.CheckSceneLock(ctx, sceneID); err != nil { return nil, nil, err } @@ -725,8 +683,7 @@ func (i *Scene) AddCluster(ctx context.Context, sceneID id.SceneID, name string, return nil, nil, err } - err = i.sceneRepo.Save(ctx, s) - if err != nil { + if err := i.sceneRepo.Save(ctx, s); err != nil { return nil, nil, err } @@ -745,18 +702,18 @@ func (i *Scene) UpdateCluster(ctx context.Context, param interfaces.UpdateCluste } }() - if err := i.OnlyOperator(operator); err != nil { - return nil, nil, interfaces.ErrOperationDenied + s, err := i.sceneRepo.FindByID(ctx, param.SceneID) + if err != nil { + return nil, nil, err } - - if err := i.CheckSceneLock(ctx, param.SceneID); err != nil { + if err := i.CanWriteTeam(s.Team(), operator); err != nil { return nil, nil, err } - s, err := i.sceneRepo.FindByID(ctx, param.SceneID, operator.AllWritableTeams()) - if err != nil { + if err := i.CheckSceneLock(ctx, param.SceneID); err != nil { return nil, nil, err } + cluster := s.Clusters().Get(param.ClusterID) if cluster == nil { return nil, nil, rerror.ErrNotFound @@ -768,8 +725,7 @@ func (i *Scene) UpdateCluster(ctx context.Context, param interfaces.UpdateCluste cluster.UpdateProperty(*param.PropertyID) } - err = i.sceneRepo.Save(ctx, s) - if err != nil { + if err := i.sceneRepo.Save(ctx, s); err != nil { return nil, nil, err } @@ -788,22 +744,21 @@ func (i *Scene) RemoveCluster(ctx context.Context, sceneID id.SceneID, clusterID } }() - if err := i.OnlyOperator(operator); err != nil { - return nil, interfaces.ErrOperationDenied + s, err := i.sceneRepo.FindByID(ctx, sceneID) + if err != nil { + return nil, err } - - if err := i.CheckSceneLock(ctx, sceneID); err != nil { + if err := i.CanWriteTeam(s.Team(), operator); err != nil { return nil, err } - s, err := i.sceneRepo.FindByID(ctx, sceneID, operator.AllWritableTeams()) - if err != nil { + if err := i.CheckSceneLock(ctx, sceneID); err != nil { return nil, err } + s.Clusters().Remove(clusterID) - err = i.sceneRepo.Save(ctx, s) - if err != nil { + if err := i.sceneRepo.Save(ctx, s); err != nil { return nil, err } diff --git a/internal/usecase/interactor/tag.go b/internal/usecase/interactor/tag.go index d0bf305eb..21b54e67b 100644 --- a/internal/usecase/interactor/tag.go +++ b/internal/usecase/interactor/tag.go @@ -47,7 +47,7 @@ func (i *Tag) CreateItem(ctx context.Context, inp interfaces.CreateTagItemParam, var parent *tag.Group if inp.Parent != nil { - parent, err = i.tagRepo.FindGroupByID(ctx, *inp.Parent, []id.SceneID{inp.SceneID}) + parent, err = i.tagRepo.FindGroupByID(ctx, *inp.Parent) if err != nil { return nil, nil, err } @@ -123,38 +123,19 @@ func (i *Tag) CreateGroup(ctx context.Context, inp interfaces.CreateTagGroupPara } func (i *Tag) Fetch(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Tag, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - - return i.tagRepo.FindByIDs(ctx, ids, scenes) + return i.tagRepo.FindByIDs(ctx, ids) } func (i *Tag) FetchByScene(ctx context.Context, sid id.SceneID, operator *usecase.Operator) ([]*tag.Tag, error) { - if err := i.CanReadScene(sid, operator); err != nil { - return nil, err - } - return i.tagRepo.FindRootsByScene(ctx, sid) } func (i *Tag) FetchItem(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Item, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - - return i.tagRepo.FindItemByIDs(ctx, ids, scenes) + return i.tagRepo.FindItemByIDs(ctx, ids) } func (i *Tag) FetchGroup(ctx context.Context, ids []id.TagID, operator *usecase.Operator) ([]*tag.Group, error) { - scenes, err := i.OnlyReadableScenes(operator) - if err != nil { - return nil, err - } - - return i.tagRepo.FindGroupByIDs(ctx, ids, scenes) + return i.tagRepo.FindGroupByIDs(ctx, ids) } func (i *Tag) AttachItemToGroup(ctx context.Context, inp interfaces.AttachItemToGroupParam, operator *usecase.Operator) (*tag.Group, error) { @@ -168,21 +149,19 @@ func (i *Tag) AttachItemToGroup(ctx context.Context, inp interfaces.AttachItemTo } }() - scenes, err := i.OnlyWritableScenes(operator) + // make sure item exist + ti, err := i.tagRepo.FindItemByID(ctx, inp.ItemID) if err != nil { return nil, err } - - // make sure item exist - ti, err := i.tagRepo.FindItemByID(ctx, inp.ItemID, scenes) - if err != nil { + if err := i.CanWriteScene(ti.Scene(), operator); err != nil { return nil, err } if ti.Parent() != nil { return nil, errors.New("tag is already added to the group") } - tg, err := i.tagRepo.FindGroupByID(ctx, inp.GroupID, scenes) + tg, err := i.tagRepo.FindGroupByID(ctx, inp.GroupID) if err != nil { return nil, err } @@ -215,18 +194,16 @@ func (i *Tag) DetachItemFromGroup(ctx context.Context, inp interfaces.DetachItem } }() - scenes, err := i.OnlyWritableScenes(operator) + // make sure item exist + ti, err := i.tagRepo.FindItemByID(ctx, inp.ItemID) if err != nil { return nil, err } - - // make sure item exist - ti, err := i.tagRepo.FindItemByID(ctx, inp.ItemID, scenes) - if err != nil { + if err := i.CanWriteScene(ti.Scene(), operator); err != nil { return nil, err } - tg, err := i.tagRepo.FindGroupByID(ctx, inp.GroupID, scenes) + tg, err := i.tagRepo.FindGroupByID(ctx, inp.GroupID) if err != nil { return nil, err } @@ -259,14 +236,13 @@ func (i *Tag) UpdateTag(ctx context.Context, inp interfaces.UpdateTagParam, oper } }() - if err := i.CanWriteScene(inp.SceneID, operator); err != nil { - return nil, interfaces.ErrOperationDenied - } - - tg, err := i.tagRepo.FindByID(ctx, inp.TagID, []id.SceneID{inp.SceneID}) + tg, err := i.tagRepo.FindByID(ctx, inp.TagID) if err != nil { return nil, err } + if err := i.CanWriteScene(tg.Scene(), operator); err != nil { + return nil, err + } if inp.Label != nil { tg.Rename(*inp.Label) @@ -291,13 +267,11 @@ func (i *Tag) Remove(ctx context.Context, tagID id.TagID, operator *usecase.Oper } }() - scenes, err := i.OnlyWritableScenes(operator) + t, err := i.tagRepo.FindByID(ctx, tagID) if err != nil { return nil, nil, err } - - t, err := i.tagRepo.FindByID(ctx, tagID, scenes) - if err != nil { + if err := i.CanWriteScene(t.Scene(), operator); err != nil { return nil, nil, err } @@ -308,7 +282,7 @@ func (i *Tag) Remove(ctx context.Context, tagID id.TagID, operator *usecase.Oper } if item := tag.ToTagItem(t); item != nil { - g, err := i.tagRepo.FindGroupByItem(ctx, item.ID(), scenes) + g, err := i.tagRepo.FindGroupByItem(ctx, item.ID()) if err != nil && !errors.Is(rerror.ErrNotFound, err) { return nil, nil, err } @@ -320,7 +294,7 @@ func (i *Tag) Remove(ctx context.Context, tagID id.TagID, operator *usecase.Oper } } - ls, err := i.layerRepo.FindByTag(ctx, tagID, scenes) + ls, err := i.layerRepo.FindByTag(ctx, tagID) if err != nil && !errors.Is(rerror.ErrNotFound, err) { return nil, nil, err } diff --git a/internal/usecase/interfaces/tag.go b/internal/usecase/interfaces/tag.go index 0aec3dbbe..bc18e7e7c 100644 --- a/internal/usecase/interfaces/tag.go +++ b/internal/usecase/interfaces/tag.go @@ -38,9 +38,8 @@ type DetachItemToGroupParam struct { } type UpdateTagParam struct { - Label *string - SceneID id.SceneID - TagID id.TagID + Label *string + TagID id.TagID } type Tag interface { diff --git a/internal/usecase/pageinfo.go b/internal/usecase/pageinfo.go index fa6a0ace3..53fa8f5b6 100644 --- a/internal/usecase/pageinfo.go +++ b/internal/usecase/pageinfo.go @@ -27,6 +27,16 @@ func NewPageInfo(totalCount int, startCursor *Cursor, endCursor *Cursor, hasNext } } +func EmptyPageInfo() *PageInfo { + return &PageInfo{ + totalCount: 0, + startCursor: nil, + endCursor: nil, + hasNextPage: false, + hasPreviousPage: false, + } +} + func (p *PageInfo) TotalCount() int { if p == nil { return 0 diff --git a/internal/usecase/repo/asset.go b/internal/usecase/repo/asset.go index 7648fd5c7..e14a42d6c 100644 --- a/internal/usecase/repo/asset.go +++ b/internal/usecase/repo/asset.go @@ -9,9 +9,10 @@ import ( ) type Asset interface { + Filtered(TeamFilter) Asset + FindByTeam(context.Context, id.TeamID, *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) + FindByID(context.Context, id.AssetID) (*asset.Asset, error) + FindByIDs(context.Context, []id.AssetID) ([]*asset.Asset, error) Save(context.Context, *asset.Asset) error Remove(context.Context, id.AssetID) error - FindByTeam(context.Context, id.TeamID, *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) - FindByID(context.Context, id.AssetID, []id.TeamID) (*asset.Asset, error) - FindByIDs(context.Context, []id.AssetID, []id.TeamID) ([]*asset.Asset, error) } diff --git a/internal/usecase/repo/container.go b/internal/usecase/repo/container.go index e29c96baf..6e5bee8ed 100644 --- a/internal/usecase/repo/container.go +++ b/internal/usecase/repo/container.go @@ -1,5 +1,17 @@ package repo +import ( + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/user" +) + +var ( + ErrOperationDenied = errors.New("operation denied") +) + type Container struct { Asset Asset AuthRequest AuthRequest @@ -7,15 +19,91 @@ type Container struct { DatasetSchema DatasetSchema Dataset Dataset Layer Layer + Lock Lock Plugin Plugin Project Project PropertySchema PropertySchema Property Property Scene Scene + SceneLock SceneLock Tag Tag Team Team - User User - SceneLock SceneLock Transaction Transaction - Lock Lock + User User +} + +func (c Container) Filtered(team TeamFilter, scene SceneFilter) Container { + return Container{ + Asset: c.Asset.Filtered(team), + AuthRequest: c.AuthRequest, + Config: c.Config, + DatasetSchema: c.DatasetSchema.Filtered(scene), + Dataset: c.Dataset.Filtered(scene), + Layer: c.Layer.Filtered(scene), + Lock: c.Lock, + Plugin: c.Plugin.Filtered(scene), + Project: c.Project.Filtered(team), + PropertySchema: c.PropertySchema.Filtered(scene), + Property: c.Property.Filtered(scene), + Scene: c.Scene.Filtered(team), + SceneLock: c.SceneLock, + Tag: c.Tag.Filtered(scene), + Team: c.Team, + Transaction: c.Transaction, + User: c.User, + } +} + +type TeamFilter struct { + Readable user.TeamIDList + Writable user.TeamIDList +} + +func TeamFilterFromOperator(o *usecase.Operator) TeamFilter { + return TeamFilter{ + Readable: o.AllReadableTeams(), + Writable: o.AllWritableTeams(), + } +} + +func (f TeamFilter) Clone() TeamFilter { + return TeamFilter{ + Readable: f.Readable.Clone(), + Writable: f.Writable.Clone(), + } +} + +func (f TeamFilter) CanRead(id user.TeamID) bool { + return f.Readable == nil || f.Readable.Includes(id) +} + +func (f TeamFilter) CanWrite(id user.TeamID) bool { + return f.Writable == nil || f.Writable.Includes(id) +} + +type SceneFilter struct { + Readable scene.IDList + Writable scene.IDList +} + +func SceneFilterFromOperator(o *usecase.Operator) SceneFilter { + return SceneFilter{ + Readable: o.AllReadableScenes(), + Writable: o.AllWritableScenes(), + } +} + +func (f SceneFilter) Clone() SceneFilter { + return SceneFilter{ + Readable: f.Readable.Clone(), + Writable: f.Writable.Clone(), + } +} + +func (f SceneFilter) CanRead(id scene.ID) bool { + return f.Readable == nil || f.Readable.Includes(id) +} + +func (f SceneFilter) CanWrite(id scene.ID) bool { + return f.Writable == nil || f.Writable.Includes(id) } diff --git a/internal/usecase/repo/dataset.go b/internal/usecase/repo/dataset.go index 9cb0df1bf..8a5a1f09b 100644 --- a/internal/usecase/repo/dataset.go +++ b/internal/usecase/repo/dataset.go @@ -9,11 +9,12 @@ import ( ) type Dataset interface { - FindByID(context.Context, id.DatasetID, []id.SceneID) (*dataset.Dataset, error) - FindByIDs(context.Context, []id.DatasetID, []id.SceneID) (dataset.List, error) - FindBySchema(context.Context, id.DatasetSchemaID, []id.SceneID, *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) + Filtered(SceneFilter) Dataset + FindByID(context.Context, id.DatasetID) (*dataset.Dataset, error) + FindByIDs(context.Context, []id.DatasetID) (dataset.List, error) + FindBySchema(context.Context, id.DatasetSchemaID, *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) FindBySchemaAll(context.Context, id.DatasetSchemaID) (dataset.List, error) - FindGraph(context.Context, id.DatasetID, []id.SceneID, []id.DatasetSchemaFieldID) (dataset.List, error) + FindGraph(context.Context, id.DatasetID, []id.DatasetSchemaFieldID) (dataset.List, error) Save(context.Context, *dataset.Dataset) error SaveAll(context.Context, dataset.List) error Remove(context.Context, id.DatasetID) error @@ -21,16 +22,16 @@ type Dataset interface { RemoveByScene(context.Context, id.SceneID) error } -func DatasetLoaderFrom(r Dataset, scenes []id.SceneID) dataset.Loader { +func DatasetLoaderFrom(r Dataset) dataset.Loader { return func(ctx context.Context, ids ...id.DatasetID) (dataset.List, error) { - return r.FindByIDs(ctx, ids, scenes) + return r.FindByIDs(ctx, ids) } } -func DatasetGraphLoaderFrom(r Dataset, scenes []id.SceneID) dataset.GraphLoader { +func DatasetGraphLoaderFrom(r Dataset) dataset.GraphLoader { return func(ctx context.Context, root id.DatasetID, fields ...id.DatasetSchemaFieldID) (dataset.List, *dataset.Field, error) { if len(fields) <= 1 { - d, err := r.FindByID(ctx, root, scenes) + d, err := r.FindByID(ctx, root) if err != nil { return nil, nil, err } @@ -41,7 +42,7 @@ func DatasetGraphLoaderFrom(r Dataset, scenes []id.SceneID) dataset.GraphLoader return dataset.List{d}, field, nil } - list2, err := r.FindGraph(ctx, root, scenes, fields) + list2, err := r.FindGraph(ctx, root, fields) if err != nil { return nil, nil, err } diff --git a/internal/usecase/repo/dataset_schema.go b/internal/usecase/repo/dataset_schema.go index 2e42eae95..50cc2b06d 100644 --- a/internal/usecase/repo/dataset_schema.go +++ b/internal/usecase/repo/dataset_schema.go @@ -9,8 +9,9 @@ import ( ) type DatasetSchema interface { - FindByID(context.Context, id.DatasetSchemaID, []id.SceneID) (*dataset.Schema, error) - FindByIDs(context.Context, []id.DatasetSchemaID, []id.SceneID) (dataset.SchemaList, error) + Filtered(SceneFilter) DatasetSchema + FindByID(context.Context, id.DatasetSchemaID) (*dataset.Schema, error) + FindByIDs(context.Context, []id.DatasetSchemaID) (dataset.SchemaList, error) FindByScene(context.Context, id.SceneID, *usecase.Pagination) (dataset.SchemaList, *usecase.PageInfo, error) FindBySceneAll(context.Context, id.SceneID) (dataset.SchemaList, error) FindBySceneAndSource(context.Context, id.SceneID, string) (dataset.SchemaList, error) diff --git a/internal/usecase/repo/layer.go b/internal/usecase/repo/layer.go index c3293b39c..28d5edad3 100644 --- a/internal/usecase/repo/layer.go +++ b/internal/usecase/repo/layer.go @@ -8,32 +8,33 @@ import ( ) type Layer interface { - FindByID(context.Context, id.LayerID, []id.SceneID) (layer.Layer, error) - FindByIDs(context.Context, []id.LayerID, []id.SceneID) (layer.List, error) - FindItemByID(context.Context, id.LayerID, []id.SceneID) (*layer.Item, error) - FindItemByIDs(context.Context, []id.LayerID, []id.SceneID) (layer.ItemList, error) + Filtered(SceneFilter) Layer + FindByID(context.Context, id.LayerID) (layer.Layer, error) + FindByIDs(context.Context, []id.LayerID) (layer.List, error) + FindItemByID(context.Context, id.LayerID) (*layer.Item, error) + FindItemByIDs(context.Context, []id.LayerID) (layer.ItemList, error) FindAllByDatasetSchema(context.Context, id.DatasetSchemaID) (layer.List, error) - FindGroupByID(context.Context, id.LayerID, []id.SceneID) (*layer.Group, error) - FindGroupByIDs(context.Context, []id.LayerID, []id.SceneID) (layer.GroupList, error) + FindGroupByID(context.Context, id.LayerID) (*layer.Group, error) + FindGroupByIDs(context.Context, []id.LayerID) (layer.GroupList, error) FindGroupBySceneAndLinkedDatasetSchema(context.Context, id.SceneID, id.DatasetSchemaID) (layer.GroupList, error) - FindParentByID(context.Context, id.LayerID, []id.SceneID) (*layer.Group, error) - FindParentsByIDs(context.Context, []id.LayerID, []id.SceneID) (layer.GroupList, error) - FindByPluginAndExtension(context.Context, id.PluginID, *id.PluginExtensionID, []id.SceneID) (layer.List, error) - FindByPluginAndExtensionOfBlocks(context.Context, id.PluginID, *id.PluginExtensionID, []id.SceneID) (layer.List, error) - FindByProperty(context.Context, id.PropertyID, []id.SceneID) (layer.Layer, error) + FindParentByID(context.Context, id.LayerID) (*layer.Group, error) + FindParentsByIDs(context.Context, []id.LayerID) (layer.GroupList, error) + FindByPluginAndExtension(context.Context, id.PluginID, *id.PluginExtensionID) (layer.List, error) + FindByPluginAndExtensionOfBlocks(context.Context, id.PluginID, *id.PluginExtensionID) (layer.List, error) + FindByProperty(context.Context, id.PropertyID) (layer.Layer, error) FindByScene(context.Context, id.SceneID) (layer.List, error) - FindByTag(context.Context, id.TagID, []id.SceneID) (layer.List, error) + FindByTag(context.Context, id.TagID) (layer.List, error) Save(context.Context, layer.Layer) error SaveAll(context.Context, layer.List) error - UpdatePlugin(context.Context, id.PluginID, id.PluginID, []id.SceneID) error + UpdatePlugin(context.Context, id.PluginID, id.PluginID) error Remove(context.Context, id.LayerID) error RemoveAll(context.Context, []id.LayerID) error RemoveByScene(context.Context, id.SceneID) error } -func LayerLoaderFrom(r Layer, scenes []id.SceneID) layer.Loader { +func LayerLoaderFrom(r Layer) layer.Loader { return func(ctx context.Context, ids ...id.LayerID) (layer.List, error) { - return r.FindByIDs(ctx, ids, scenes) + return r.FindByIDs(ctx, ids) } } diff --git a/internal/usecase/repo/plugin.go b/internal/usecase/repo/plugin.go index c8a472a71..f30d2f8dc 100644 --- a/internal/usecase/repo/plugin.go +++ b/internal/usecase/repo/plugin.go @@ -8,14 +8,15 @@ import ( ) type Plugin interface { - FindByID(context.Context, id.PluginID, []id.SceneID) (*plugin.Plugin, error) - FindByIDs(context.Context, []id.PluginID, []id.SceneID) ([]*plugin.Plugin, error) + Filtered(SceneFilter) Plugin + FindByID(context.Context, id.PluginID) (*plugin.Plugin, error) + FindByIDs(context.Context, []id.PluginID) ([]*plugin.Plugin, error) Save(context.Context, *plugin.Plugin) error Remove(context.Context, id.PluginID) error } func PluginLoaderFrom(r Plugin) plugin.Loader { - return func(ctx context.Context, ids []id.PluginID, sids []id.SceneID) ([]*plugin.Plugin, error) { - return r.FindByIDs(ctx, ids, sids) + return func(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { + return r.FindByIDs(ctx, ids) } } diff --git a/internal/usecase/repo/project.go b/internal/usecase/repo/project.go index c5921a9c6..1c47719d4 100644 --- a/internal/usecase/repo/project.go +++ b/internal/usecase/repo/project.go @@ -9,8 +9,9 @@ import ( ) type Project interface { - FindByIDs(context.Context, []id.ProjectID, []id.TeamID) ([]*project.Project, error) - FindByID(context.Context, id.ProjectID, []id.TeamID) (*project.Project, error) + Filtered(TeamFilter) Project + FindByIDs(context.Context, []id.ProjectID) ([]*project.Project, error) + FindByID(context.Context, id.ProjectID) (*project.Project, error) FindByTeam(context.Context, id.TeamID, *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) FindByPublicName(context.Context, string) (*project.Project, error) CountByTeam(context.Context, id.TeamID) (int, error) diff --git a/internal/usecase/repo/property.go b/internal/usecase/repo/property.go index 91555b3ab..9c2464dc7 100644 --- a/internal/usecase/repo/property.go +++ b/internal/usecase/repo/property.go @@ -8,8 +8,9 @@ import ( ) type Property interface { - FindByID(context.Context, id.PropertyID, []id.SceneID) (*property.Property, error) - FindByIDs(context.Context, []id.PropertyID, []id.SceneID) (property.List, error) + Filtered(SceneFilter) Property + FindByID(context.Context, id.PropertyID) (*property.Property, error) + FindByIDs(context.Context, []id.PropertyID) (property.List, error) FindLinkedAll(context.Context, id.SceneID) (property.List, error) FindByDataset(context.Context, id.DatasetSchemaID, id.DatasetID) (property.List, error) FindBySchema(context.Context, []id.PropertySchemaID, id.SceneID) (property.List, error) @@ -22,8 +23,8 @@ type Property interface { RemoveByScene(context.Context, id.SceneID) error } -func PropertyLoaderFrom(r Property, scenes []id.SceneID) property.Loader { +func PropertyLoaderFrom(r Property) property.Loader { return func(ctx context.Context, ids ...id.PropertyID) (property.List, error) { - return r.FindByIDs(ctx, ids, scenes) + return r.FindByIDs(ctx, ids) } } diff --git a/internal/usecase/repo/property_schema.go b/internal/usecase/repo/property_schema.go index 952eddda4..6911af050 100644 --- a/internal/usecase/repo/property_schema.go +++ b/internal/usecase/repo/property_schema.go @@ -8,6 +8,7 @@ import ( ) type PropertySchema interface { + Filtered(SceneFilter) PropertySchema FindByID(context.Context, id.PropertySchemaID) (*property.Schema, error) FindByIDs(context.Context, []id.PropertySchemaID) (property.SchemaList, error) Save(context.Context, *property.Schema) error diff --git a/internal/usecase/repo/scene.go b/internal/usecase/repo/scene.go index c1c0bebe7..9d0b1f137 100644 --- a/internal/usecase/repo/scene.go +++ b/internal/usecase/repo/scene.go @@ -8,10 +8,11 @@ import ( ) type Scene interface { - FindByID(context.Context, id.SceneID, []id.TeamID) (*scene.Scene, error) - FindByIDs(context.Context, []id.SceneID, []id.TeamID) (scene.List, error) + Filtered(TeamFilter) Scene + FindByID(context.Context, id.SceneID) (*scene.Scene, error) + FindByIDs(context.Context, []id.SceneID) (scene.List, error) FindByTeam(context.Context, ...id.TeamID) (scene.List, error) - FindByProject(context.Context, id.ProjectID, []id.TeamID) (*scene.Scene, error) + FindByProject(context.Context, id.ProjectID) (*scene.Scene, error) Save(context.Context, *scene.Scene) error Remove(context.Context, id.SceneID) error } diff --git a/internal/usecase/repo/tag.go b/internal/usecase/repo/tag.go index b3c8bcda1..86b9f886e 100644 --- a/internal/usecase/repo/tag.go +++ b/internal/usecase/repo/tag.go @@ -8,15 +8,16 @@ import ( ) type Tag interface { - FindByID(context.Context, id.TagID, []id.SceneID) (tag.Tag, error) - FindByIDs(context.Context, []id.TagID, []id.SceneID) ([]*tag.Tag, error) + Filtered(SceneFilter) Tag + FindByID(context.Context, id.TagID) (tag.Tag, error) + FindByIDs(context.Context, []id.TagID) ([]*tag.Tag, error) FindByScene(context.Context, id.SceneID) ([]*tag.Tag, error) - FindItemByID(context.Context, id.TagID, []id.SceneID) (*tag.Item, error) - FindItemByIDs(context.Context, []id.TagID, []id.SceneID) ([]*tag.Item, error) - FindGroupByID(context.Context, id.TagID, []id.SceneID) (*tag.Group, error) - FindGroupByIDs(context.Context, []id.TagID, []id.SceneID) ([]*tag.Group, error) + FindItemByID(context.Context, id.TagID) (*tag.Item, error) + FindItemByIDs(context.Context, []id.TagID) ([]*tag.Item, error) + FindGroupByID(context.Context, id.TagID) (*tag.Group, error) + FindGroupByIDs(context.Context, []id.TagID) ([]*tag.Group, error) FindRootsByScene(context.Context, id.SceneID) ([]*tag.Tag, error) - FindGroupByItem(context.Context, id.TagID, []id.SceneID) (*tag.Group, error) + FindGroupByItem(context.Context, id.TagID) (*tag.Group, error) Save(context.Context, tag.Tag) error SaveAll(context.Context, []*tag.Tag) error Remove(context.Context, id.TagID) error @@ -24,9 +25,9 @@ type Tag interface { RemoveByScene(context.Context, id.SceneID) error } -func TagLoaderFrom(r Tag, scenes []id.SceneID) tag.Loader { +func TagLoaderFrom(r Tag) tag.Loader { return func(ctx context.Context, ids ...id.TagID) ([]*tag.Tag, error) { - return r.FindByIDs(ctx, ids, scenes) + return r.FindByIDs(ctx, ids) } } diff --git a/pkg/plugin/list.go b/pkg/plugin/list.go new file mode 100644 index 000000000..6e28f86d8 --- /dev/null +++ b/pkg/plugin/list.go @@ -0,0 +1,50 @@ +package plugin + +import "sort" + +type List []*Plugin + +func (l List) Find(p ID) *Plugin { + for _, q := range l { + if q.ID().Equal(p) { + return q + } + } + return nil +} + +func (l List) Concat(m List) List { + return append(l, m...) +} + +func (l List) MapToIDs(ids []ID) List { + res := make(List, 0, len(ids)) + for _, id := range ids { + res = append(res, l.Find(id)) + } + return res +} + +func (l List) Map() Map { + m := make(Map, len(l)) + for _, p := range l { + m[p.ID()] = p + } + return m +} + +type Map map[ID]*Plugin + +func (m Map) List() List { + if m == nil { + return nil + } + res := make(List, 0, len(m)) + for _, p := range m { + res = append(res, p) + } + sort.SliceStable(res, func(i, j int) bool { + return res[i].ID().String() > res[j].ID().String() + }) + return res +} diff --git a/pkg/plugin/list_test.go b/pkg/plugin/list_test.go new file mode 100644 index 000000000..08bba230c --- /dev/null +++ b/pkg/plugin/list_test.go @@ -0,0 +1,45 @@ +package plugin + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestList_Find(t *testing.T) { + p1 := &Plugin{id: MustID("foo~1.0.0")} + p2 := &Plugin{id: MustID("bar~1.0.0")} + assert.Equal(t, p1, List{p1, p2}.Find(p1.ID())) + assert.Nil(t, List{p1, p2}.Find(MustID("hoge~1.0.0"))) + assert.Nil(t, List(nil).Find(p1.ID())) +} + +func TestList_Concat(t *testing.T) { + p1 := &Plugin{id: MustID("foo~1.0.0")} + p2 := &Plugin{id: MustID("bar~1.0.0")} + assert.Equal(t, List{p1, p2, p2}, List{p1, p2}.Concat(List{p2})) + assert.Equal(t, List{p1}, List(nil).Concat(List{p1})) + assert.Equal(t, List{p1}, List{p1}.Concat(nil)) +} + +func TestList_Map(t *testing.T) { + p1 := &Plugin{id: MustID("foo~1.0.0")} + p2 := &Plugin{id: MustID("bar~1.0.0")} + assert.Equal(t, Map{p1.ID(): p1, p2.ID(): p2}, List{p1, p2}.Map()) + assert.Equal(t, Map{}, List(nil).Map()) +} + +func TestList_MapToIDs(t *testing.T) { + p1 := &Plugin{id: MustID("foo~1.0.0")} + p2 := &Plugin{id: MustID("bar~1.0.0")} + assert.Equal(t, List{nil, p2}, List{p1, p2}.MapToIDs([]ID{MustID("hoge~1.0.0"), p2.ID()})) + assert.Equal(t, List{}, List{p1, p2}.MapToIDs(nil)) + assert.Equal(t, List{nil}, List(nil).MapToIDs([]ID{p1.ID()})) +} + +func TestMap_List(t *testing.T) { + p1 := &Plugin{id: MustID("foo~1.0.0")} + p2 := &Plugin{id: MustID("bar~1.0.0")} + assert.Equal(t, List{p1, p2}, Map{p1.ID(): p1, p2.ID(): p2}.List()) + assert.Nil(t, Map(nil).List()) +} diff --git a/pkg/plugin/loader.go b/pkg/plugin/loader.go index 628b889ba..3a789fbc0 100644 --- a/pkg/plugin/loader.go +++ b/pkg/plugin/loader.go @@ -4,4 +4,4 @@ import ( "context" ) -type Loader func(context.Context, []ID, []SceneID) ([]*Plugin, error) +type Loader func(context.Context, []ID) ([]*Plugin, error) diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go index 37b2d5beb..8f1196ee6 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/plugin.go @@ -30,6 +30,10 @@ func (p *Plugin) Version() semver.Version { return p.id.Version() } +func (p *Plugin) Scene() *SceneID { + return p.ID().Scene() +} + func (p *Plugin) Name() i18n.String { if p == nil { return nil diff --git a/pkg/property/schema.go b/pkg/property/schema.go index e7bb2925a..5b23a417c 100644 --- a/pkg/property/schema.go +++ b/pkg/property/schema.go @@ -23,6 +23,10 @@ func (p *Schema) IDRef() *SchemaID { return p.id.Ref() } +func (p *Schema) Scene() *SceneID { + return p.id.Plugin().Scene() +} + func (p *Schema) Version() int { return p.version } diff --git a/pkg/property/schema_list.go b/pkg/property/schema_list.go index da83e4365..322523c2b 100644 --- a/pkg/property/schema_list.go +++ b/pkg/property/schema_list.go @@ -19,6 +19,18 @@ func (l SchemaList) Loader() SchemaLoader { return SchemaLoaderFromMap(l.Map()) } +func (l SchemaList) Concat(m SchemaList) SchemaList { + return append(l, m...) +} + +func (l SchemaList) MapToIDs(ids []SchemaID) SchemaList { + results := make(SchemaList, 0, len(ids)) + for _, id := range ids { + results = append(results, l.Find(id)) + } + return results +} + type SchemaMap map[SchemaID]*Schema func SchemaMapFrom(l []*Schema) SchemaMap { diff --git a/pkg/property/schema_list_test.go b/pkg/property/schema_list_test.go new file mode 100644 index 000000000..0a485e5a5 --- /dev/null +++ b/pkg/property/schema_list_test.go @@ -0,0 +1,45 @@ +package property + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSchemaList_Find(t *testing.T) { + p1 := &Schema{id: MustSchemaID("foo~1.0.0/a")} + p2 := &Schema{id: MustSchemaID("bar~1.0.0/a")} + assert.Equal(t, p1, SchemaList{p1, p2}.Find(p1.ID())) + assert.Nil(t, SchemaList{p1, p2}.Find(MustSchemaID("hoge~1.0.0/a"))) + assert.Nil(t, SchemaList(nil).Find(p1.ID())) +} + +func TestSchemaList_Concat(t *testing.T) { + p1 := &Schema{id: MustSchemaID("foo~1.0.0/a")} + p2 := &Schema{id: MustSchemaID("bar~1.0.0/a")} + assert.Equal(t, SchemaList{p1, p2, p2}, SchemaList{p1, p2}.Concat(SchemaList{p2})) + assert.Equal(t, SchemaList{p1}, SchemaList(nil).Concat(SchemaList{p1})) + assert.Equal(t, SchemaList{p1}, SchemaList{p1}.Concat(nil)) +} + +func TestSchemaList_Map(t *testing.T) { + p1 := &Schema{id: MustSchemaID("foo~1.0.0/a")} + p2 := &Schema{id: MustSchemaID("bar~1.0.0/a")} + assert.Equal(t, SchemaMap{p1.ID(): p1, p2.ID(): p2}, SchemaList{p1, p2}.Map()) + assert.Equal(t, SchemaMap{}, SchemaList(nil).Map()) +} + +func TestSchemaList_MapToIDs(t *testing.T) { + p1 := &Schema{id: MustSchemaID("foo~1.0.0/a")} + p2 := &Schema{id: MustSchemaID("bar~1.0.0/a")} + assert.Equal(t, SchemaList{nil, p2}, SchemaList{p1, p2}.MapToIDs([]SchemaID{MustSchemaID("hoge~1.0.0/a"), p2.ID()})) + assert.Equal(t, SchemaList{}, SchemaList{p1, p2}.MapToIDs(nil)) + assert.Equal(t, SchemaList{nil}, SchemaList(nil).MapToIDs([]SchemaID{p1.ID()})) +} + +func TestSchemaMap_List(t *testing.T) { + p1 := &Schema{id: MustSchemaID("foo~1.0.0/a")} + p2 := &Schema{id: MustSchemaID("bar~1.0.0/a")} + assert.Equal(t, SchemaList{p1, p2}, SchemaMap{p1.ID(): p1, p2.ID(): p2}.List()) + assert.Nil(t, SchemaMap(nil).List()) +} diff --git a/pkg/scene/id.go b/pkg/scene/id.go index 57bbbbc82..4326ae467 100644 --- a/pkg/scene/id.go +++ b/pkg/scene/id.go @@ -69,6 +69,13 @@ func createdAt(i ID) time.Time { type IDList []ID +func (l IDList) Clone() IDList { + if l == nil { + return nil + } + return append(IDList{}, l...) +} + func (l IDList) Filter(ids ...ID) IDList { if l == nil { return nil @@ -99,3 +106,7 @@ func (l IDList) Includes(ids ...ID) bool { func (l IDList) Len() int { return len(l) } + +func (k IDList) Strings() []string { + return id.SceneIDsToStrings(k) +} diff --git a/pkg/scene/id_test.go b/pkg/scene/id_test.go index 635b62f3e..a76de6e56 100644 --- a/pkg/scene/id_test.go +++ b/pkg/scene/id_test.go @@ -6,6 +6,16 @@ import ( "github.com/stretchr/testify/assert" ) +func TestIDList_Clone(t *testing.T) { + t1 := NewID() + t2 := NewID() + t3 := NewID() + ids := IDList{t1, t2, t3} + assert.Equal(t, ids, ids.Clone()) + assert.NotSame(t, ids, ids.Clone()) + assert.Nil(t, IDList(nil).Clone()) +} + func TestIDList_Filter(t *testing.T) { t1 := NewID() t2 := NewID() diff --git a/pkg/scene/sceneops/dataset_migrator.go b/pkg/scene/sceneops/dataset_migrator.go index c4902292b..c67c42a4f 100644 --- a/pkg/scene/sceneops/dataset_migrator.go +++ b/pkg/scene/sceneops/dataset_migrator.go @@ -38,7 +38,6 @@ func (r MigrateDatasetResult) Merge(r2 MigrateDatasetResult) MigrateDatasetResul // NOTE: DatasetSchemaใฎๅ‰Š้™คใซใฏๅฏพๅฟœใ—ใฆใ„ใชใ„๏ผˆ่‡ชๅ‹•็š„ใซๅ‰Š้™คใ•ใ‚Œใชใ„๏ผ‰ func (srv DatasetMigrator) Migrate(ctx context.Context, sid dataset.SceneID, newdsl []*dataset.Schema, newdl dataset.List) (MigrateDatasetResult, error) { - scenes := []dataset.SceneID{sid} result := MigrateDatasetResult{} // ๅ‰Š้™คๅฏพ่ฑก @@ -95,7 +94,7 @@ func (srv DatasetMigrator) Migrate(ctx context.Context, sid dataset.SceneID, new } // ๅคใ„DSใฎDใ‚’ๆŽขใ—ๅ‡บใ™ - olddl, _, err := srv.DatasetRepo.FindBySchema(ctx, oldds.ID(), scenes, nil) + olddl, _, err := srv.DatasetRepo.FindBySchema(ctx, oldds.ID(), nil) if err != nil { return MigrateDatasetResult{}, err } @@ -162,8 +161,6 @@ func (srv DatasetMigrator) Migrate(ctx context.Context, sid dataset.SceneID, new } func (srv DatasetMigrator) migrateLayer(ctx context.Context, sid dataset.SceneID, oldds *dataset.Schema, newds *dataset.Schema, diff dataset.Diff) (MigrateDatasetResult, error) { - scenes := []dataset.SceneID{sid} - // ๅ‰ใฎใƒ‡ใƒผใ‚ฟใ‚ปใƒƒใƒˆใ‚นใ‚ญใƒผใƒžใซ็ดใฅใ„ใŸใƒฌใ‚คใƒคใƒผใ‚ฐใƒซใƒผใƒ—ใ‚’ๅ–ๅพ— layerGroups, err := srv.LayerRepo.FindGroupBySceneAndLinkedDatasetSchema(ctx, sid, oldds.ID()) if err != nil { @@ -175,7 +172,7 @@ func (srv DatasetMigrator) migrateLayer(ctx context.Context, sid dataset.SceneID removedLayers := []layer.ID{} for _, lg := range layerGroups { - layers, err := srv.LayerRepo.FindByIDs(ctx, lg.Layers().Layers(), scenes) + layers, err := srv.LayerRepo.FindByIDs(ctx, lg.Layers().Layers()) if err != nil { return MigrateDatasetResult{}, err } @@ -215,7 +212,7 @@ func (srv DatasetMigrator) migrateLayer(ctx context.Context, sid dataset.SceneID // ใƒ—ใƒฉใ‚ฐใ‚คใƒณใ‚’ๅ–ๅพ— var plug *plugin.Plugin if pid := lg.Plugin(); pid != nil { - plug2, err := srv.Plugin(ctx, []plugin.ID{*pid}, []dataset.SceneID{sid}) + plug2, err := srv.Plugin(ctx, []plugin.ID{*pid}) if err != nil || len(plug2) < 1 { return MigrateDatasetResult{}, err } diff --git a/pkg/scene/sceneops/plugin_migrator.go b/pkg/scene/sceneops/plugin_migrator.go index 124a1f6bd..6550c8eb7 100644 --- a/pkg/scene/sceneops/plugin_migrator.go +++ b/pkg/scene/sceneops/plugin_migrator.go @@ -46,7 +46,7 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol return MigratePluginsResult{}, ErrPluginNotInstalled } - plugins, err := s.Plugin(ctx, []plugin.ID{oldPluginID, newPluginID}, []scene.ID{sc.ID()}) + plugins, err := s.Plugin(ctx, []plugin.ID{oldPluginID, newPluginID}) if err != nil || len(plugins) < 2 { return MigratePluginsResult{}, ErrInvalidPlugins } diff --git a/pkg/user/id.go b/pkg/user/id.go index fb59016db..b134a6682 100644 --- a/pkg/user/id.go +++ b/pkg/user/id.go @@ -24,6 +24,13 @@ var ErrInvalidID = id.ErrInvalidID type TeamIDList []TeamID +func (l TeamIDList) Clone() TeamIDList { + if l == nil { + return nil + } + return append(TeamIDList{}, l...) +} + func (l TeamIDList) Filter(ids ...TeamID) TeamIDList { if l == nil { return nil @@ -54,3 +61,7 @@ func (l TeamIDList) Includes(ids ...TeamID) bool { func (k TeamIDList) Len() int { return len(k) } + +func (k TeamIDList) Strings() []string { + return id.TeamIDsToStrings(k) +} diff --git a/pkg/user/id_test.go b/pkg/user/id_test.go index d37cc0912..de4ee0a9b 100644 --- a/pkg/user/id_test.go +++ b/pkg/user/id_test.go @@ -6,6 +6,16 @@ import ( "github.com/stretchr/testify/assert" ) +func TestTeamIDList_Clone(t *testing.T) { + t1 := NewTeamID() + t2 := NewTeamID() + t3 := NewTeamID() + ids := TeamIDList{t1, t2, t3} + assert.Equal(t, ids, ids.Clone()) + assert.NotSame(t, ids, ids.Clone()) + assert.Nil(t, TeamIDList(nil).Clone()) +} + func TestTeamIDList_Filter(t *testing.T) { t1 := NewTeamID() t2 := NewTeamID() diff --git a/tools/cmd/migrategen/main.go b/tools/cmd/migrategen/main.go index b4f383702..8f891e423 100644 --- a/tools/cmd/migrategen/main.go +++ b/tools/cmd/migrategen/main.go @@ -113,6 +113,8 @@ var templ2 = template.Must(template.New("generated2").Parse(`// Code generated b package migration +// To add a new migration, run go run ./tools/cmd/migrategen migration_name + // WARNING: // If the migration takes too long, the deployment may fail in a serverless environment. // Set the batch size to as large a value as possible without using up the RAM of the deployment destination. From ce230993860480d6907cfb10f7e5ebd5907b36c4 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Wed, 16 Mar 2022 10:18:16 +0300 Subject: [PATCH 170/253] fix: auth server bugs and auth client bugs (#125) Co-authored-by: maherhamoui6 Co-authored-by: rot1024 --- internal/app/app.go | 1 + internal/app/auth_server.go | 34 +++++----- internal/app/jwt.go | 2 +- internal/app/public.go | 16 +++-- internal/infrastructure/memory/user.go | 17 +++++ .../infrastructure/mongo/mongodoc/client.go | 37 +++++++---- .../mongo/mongodoc/clientcol.go | 4 ++ internal/infrastructure/mongo/user.go | 7 +- internal/usecase/interactor/user.go | 64 ++++++++++++++----- internal/usecase/interfaces/user.go | 1 + internal/usecase/repo/user.go | 1 + pkg/user/user.go | 12 ++++ pkg/user/user_test.go | 39 +++++++++++ 13 files changed, 180 insertions(+), 55 deletions(-) diff --git a/internal/app/app.go b/internal/app/app.go index b0344349b..3f98f93e8 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -97,6 +97,7 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { api.GET("/published/:name", PublishedMetadata()) api.GET("/published_data/:name", PublishedData()) + // authenticated endpoints privateApi := api.Group("", AuthRequiredMiddleware()) graphqlAPI(e, privateApi, cfg) privateAPI(e, privateApi, cfg.Repos) diff --git a/internal/app/auth_server.go b/internal/app/auth_server.go index ab7796557..f1f63796c 100644 --- a/internal/app/auth_server.go +++ b/internal/app/auth_server.go @@ -4,7 +4,6 @@ import ( "context" "crypto/sha256" "encoding/json" - "fmt" "net/http" "net/url" "strings" @@ -15,12 +14,14 @@ import ( "github.com/labstack/echo/v4" "github.com/reearth/reearth-backend/internal/usecase/interactor" "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/log" ) -var ( +const ( loginEndpoint = "api/login" logoutEndpoint = "api/logout" jwksEndpoint = ".well-known/jwks.json" + authProvider = "reearth" ) func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *ServerConfig) { @@ -65,7 +66,7 @@ func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *Server userUsecase.GetUserBySubject, ) if err != nil { - e.Logger.Fatal(err) + log.Fatalf("auth: init failed: %s\n", err) } handler, err := op.NewOpenIDProvider( @@ -78,13 +79,13 @@ func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *Server op.WithCustomKeysEndpoint(op.NewEndpoint(jwksEndpoint)), ) if err != nil { - e.Logger.Fatal(fmt.Errorf("auth: init failed: %w", err)) + log.Fatalf("auth: init failed: %s\n", err) } router := handler.HttpHandler().(*mux.Router) if err := router.Walk(muxToEchoMapper(r)); err != nil { - e.Logger.Fatal(fmt.Errorf("auth: walk failed: %w", err)) + log.Fatalf("auth: walk failed: %s\n", err) } // Actual login endpoint @@ -178,23 +179,22 @@ type loginForm struct { func login(ctx context.Context, cfg *ServerConfig, storage op.Storage, userUsecase interfaces.User) func(ctx echo.Context) error { return func(ec echo.Context) error { - request := new(loginForm) err := ec.Bind(request) if err != nil { - ec.Logger().Error("filed to parse login request") - return err + log.Errorln("auth: filed to parse login request") + return ec.Redirect(http.StatusFound, redirectURL(ec.Request().Referer(), !cfg.Debug, "", "Bad request!")) } authRequest, err := storage.AuthRequestByID(ctx, request.AuthRequestID) if err != nil { - ec.Logger().Error("filed to parse login request") - return err + log.Errorf("auth: filed to parse login request: %s\n", err) + return ec.Redirect(http.StatusFound, redirectURL(ec.Request().Referer(), !cfg.Debug, "", "Bad request!")) } if len(request.Email) == 0 || len(request.Password) == 0 { - ec.Logger().Error("credentials are not provided") - return ec.Redirect(http.StatusFound, redirectURL(authRequest.GetRedirectURI(), !cfg.Debug, request.AuthRequestID, "invalid login")) + log.Errorln("auth: one of credentials are not provided") + return ec.Redirect(http.StatusFound, redirectURL(authRequest.GetRedirectURI(), !cfg.Debug, request.AuthRequestID, "Bad request!")) } // check user credentials from db @@ -203,15 +203,15 @@ func login(ctx context.Context, cfg *ServerConfig, storage op.Storage, userUseca Password: request.Password, }) if err != nil { - ec.Logger().Error("wrong credentials!") - return ec.Redirect(http.StatusFound, redirectURL(authRequest.GetRedirectURI(), !cfg.Debug, request.AuthRequestID, "invalid login")) + log.Errorf("auth: wrong credentials: %s\n", err) + return ec.Redirect(http.StatusFound, redirectURL(authRequest.GetRedirectURI(), !cfg.Debug, request.AuthRequestID, "Login failed; Invalid user ID or password.")) } // Complete the auth request && set the subject - err = storage.(*interactor.AuthStorage).CompleteAuthRequest(ctx, request.AuthRequestID, user.GetAuthByProvider("reearth").Sub) + err = storage.(*interactor.AuthStorage).CompleteAuthRequest(ctx, request.AuthRequestID, user.GetAuthByProvider(authProvider).Sub) if err != nil { - ec.Logger().Error("failed to complete the auth request !") - return ec.Redirect(http.StatusFound, redirectURL(authRequest.GetRedirectURI(), !cfg.Debug, request.AuthRequestID, "invalid login")) + log.Errorf("auth: failed to complete the auth request: %s\n", err) + return ec.Redirect(http.StatusFound, redirectURL(authRequest.GetRedirectURI(), !cfg.Debug, request.AuthRequestID, "Bad request!")) } return ec.Redirect(http.StatusFound, "/authorize/callback?id="+request.AuthRequestID) diff --git a/internal/app/jwt.go b/internal/app/jwt.go index cd51e5e64..f0d5927a3 100644 --- a/internal/app/jwt.go +++ b/internal/app/jwt.go @@ -70,7 +70,7 @@ func jwtEchoMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { log.Fatalf("failed to set up the validator: %v", err) } - middleware := jwtmiddleware.New(jwtValidator.ValidateToken) + middleware := jwtmiddleware.New(jwtValidator.ValidateToken, jwtmiddleware.WithCredentialsOptional(true)) return echo.WrapMiddleware(middleware.CheckJWT) } diff --git a/internal/app/public.go b/internal/app/public.go index e1624f0d8..fedca4d12 100644 --- a/internal/app/public.go +++ b/internal/app/public.go @@ -50,18 +50,22 @@ func PasswordReset() echo.HandlerFunc { uc := adapter.Usecases(c.Request().Context()) controller := http1.NewUserController(uc.User) - if len(inp.Email) > 0 { + isStartingNewRequest := len(inp.Email) > 0 && len(inp.Token) == 0 && len(inp.Password) == 0 + isSettingNewPassword := len(inp.Email) > 0 && len(inp.Token) > 0 && len(inp.Password) > 0 + + if isStartingNewRequest { if err := controller.StartPasswordReset(c.Request().Context(), inp); err != nil { - return err + c.Logger().Error("an attempt to start reset password failed. internal error: %w", err) } - return c.JSON(http.StatusOK, true) + return c.JSON(http.StatusOK, echo.Map{"message": "If that email address is in our database, we will send you an email to reset your password."}) } - if len(inp.Token) > 0 && len(inp.Password) > 0 { + if isSettingNewPassword { if err := controller.PasswordReset(c.Request().Context(), inp); err != nil { - return err + c.Logger().Error("an attempt to Set password failed. internal error: %w", err) + return c.JSON(http.StatusBadRequest, echo.Map{"message": "Bad set password request"}) } - return c.JSON(http.StatusOK, true) + return c.JSON(http.StatusOK, echo.Map{"message": "Password is updated successfully"}) } return &echo.HTTPError{Code: http.StatusBadRequest, Message: "Bad reset password request"} diff --git a/internal/infrastructure/memory/user.go b/internal/infrastructure/memory/user.go index 4024b1ad7..e1134efdb 100644 --- a/internal/infrastructure/memory/user.go +++ b/internal/infrastructure/memory/user.go @@ -107,6 +107,23 @@ func (r *User) FindByEmail(ctx context.Context, email string) (*user.User, error return nil, rerror.ErrNotFound } +func (r *User) FindByName(ctx context.Context, name string) (*user.User, error) { + r.lock.Lock() + defer r.lock.Unlock() + + if name == "" { + return nil, rerror.ErrInvalidParams + } + + for _, u := range r.data { + if u.Name() == name { + return &u, nil + } + } + + return nil, rerror.ErrNotFound +} + func (r *User) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user.User, error) { r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/mongo/mongodoc/client.go b/internal/infrastructure/mongo/mongodoc/client.go index 4ca44677a..7b6bc4117 100644 --- a/internal/infrastructure/mongo/mongodoc/client.go +++ b/internal/infrastructure/mongo/mongodoc/client.go @@ -304,24 +304,35 @@ func (c *Client) Paginate(ctx context.Context, col string, filter interface{}, p } func (c *Client) CreateIndex(ctx context.Context, col string, keys []string) []string { + return c.CreateUniqueIndex(ctx, col, keys, []string{}) +} + +func (c *Client) CreateUniqueIndex(ctx context.Context, col string, keys, uniqueKeys []string) []string { coll := c.Collection(col) indexedKeys := indexes(ctx, coll) - newIndexes := []mongo.IndexModel{} + // store unique keys as map to check them in an efficient way + ukm := map[string]struct{}{} + for _, k := range append([]string{"id"}, uniqueKeys...) { + ukm[k] = struct{}{} + } + + var newIndexes []mongo.IndexModel for _, k := range append([]string{"id"}, keys...) { - if _, ok := indexedKeys[k]; !ok { - indexBg := true - unique := k == "id" - newIndexes = append(newIndexes, mongo.IndexModel{ - Keys: map[string]int{ - k: 1, - }, - Options: &options.IndexOptions{ - Background: &indexBg, - Unique: &unique, - }, - }) + if _, ok := indexedKeys[k]; ok { + continue } + indexBg := true + _, isUnique := ukm[k] + newIndexes = append(newIndexes, mongo.IndexModel{ + Keys: map[string]int{ + k: 1, + }, + Options: &options.IndexOptions{ + Background: &indexBg, + Unique: &isUnique, + }, + }) } if len(newIndexes) > 0 { diff --git a/internal/infrastructure/mongo/mongodoc/clientcol.go b/internal/infrastructure/mongo/mongodoc/clientcol.go index 97241643d..b6c6d5d3d 100644 --- a/internal/infrastructure/mongo/mongodoc/clientcol.go +++ b/internal/infrastructure/mongo/mongodoc/clientcol.go @@ -59,3 +59,7 @@ func (c *ClientCollection) RemoveAll(ctx context.Context, f interface{}) error { func (c *ClientCollection) CreateIndex(ctx context.Context, keys []string) []string { return c.Client.CreateIndex(ctx, c.CollectionName, keys) } + +func (c *ClientCollection) CreateUniqueIndex(ctx context.Context, keys, uniqueKeys []string) []string { + return c.Client.CreateUniqueIndex(ctx, c.CollectionName, keys, uniqueKeys) +} diff --git a/internal/infrastructure/mongo/user.go b/internal/infrastructure/mongo/user.go index b5b542e6e..0958cfc6c 100644 --- a/internal/infrastructure/mongo/user.go +++ b/internal/infrastructure/mongo/user.go @@ -23,7 +23,7 @@ func NewUser(client *mongodoc.Client) repo.User { } func (r *userRepo) init() { - i := r.client.CreateIndex(context.Background(), []string{"email", "auth0sublist"}) + i := r.client.CreateUniqueIndex(context.Background(), []string{"email", "name", "auth0sublist"}, []string{"name"}) if len(i) > 0 { log.Infof("mongo: %s: index created: %s", "user", i) } @@ -65,6 +65,11 @@ func (r *userRepo) FindByEmail(ctx context.Context, email string) (*user.User, e return r.findOne(ctx, filter) } +func (r *userRepo) FindByName(ctx context.Context, name string) (*user.User, error) { + filter := bson.D{{Key: "name", Value: name}} + return r.findOne(ctx, filter) +} + func (r *userRepo) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user.User, error) { filter := bson.D{{Key: "$or", Value: []bson.D{ {{Key: "email", Value: nameOrEmail}}, diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index 47de634bd..eea781aee 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -177,7 +177,7 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. return } } else if isAuth { - if *inp.Name == "" { + if _, err := mail.ParseAddress(*inp.Name); err == nil || *inp.Name == "" { return nil, nil, interfaces.ErrSignupInvalidName } if _, err := mail.ParseAddress(*inp.Email); err != nil { @@ -228,28 +228,38 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user. func (i *User) reearthSignup(ctx context.Context, inp interfaces.SignupParam) (string, string, *user.User, *user.Team, error) { // Check if user email already exists - existed, err := i.userRepo.FindByEmail(ctx, *inp.Email) + existedByEmail, err := i.userRepo.FindByEmail(ctx, *inp.Email) if err != nil && !errors.Is(err, rerror.ErrNotFound) { return "", "", nil, nil, err } - if existed != nil { - if existed.Verification().IsVerified() { - return "", "", nil, nil, errors.New("existed user email") - } else { - // if user exists but not verified -> create a new verification - if err := i.CreateVerification(ctx, *inp.Email); err != nil { - return "", "", nil, nil, err - } else { - team, err := i.teamRepo.FindByID(ctx, existed.Team()) - if err != nil && !errors.Is(err, rerror.ErrNotFound) { - return "", "", nil, nil, err - } - return "", "", existed, team, nil - } + if existedByEmail != nil { + if existedByEmail.Verification() != nil && existedByEmail.Verification().IsVerified() { + return "", "", nil, nil, errors.New("existed email") + } + + // if user exists but not verified -> create a new verification + if err := i.CreateVerification(ctx, *inp.Email); err != nil { + return "", "", nil, nil, err } + + team, err := i.teamRepo.FindByID(ctx, existedByEmail.Team()) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return "", "", nil, nil, err + } + return "", "", existedByEmail, team, nil } + existedByName, err := i.userRepo.FindByName(ctx, *inp.Name) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return "", "", nil, nil, err + } + + if existedByName != nil { + return "", "", nil, nil, errors.New("taken username") + } + + // !existedByName && !existedByEmail return *inp.Name, *inp.Email, nil, nil, nil } @@ -305,6 +315,9 @@ func (i *User) GetUserByCredentials(ctx context.Context, inp interfaces.GetUserB if !matched { return nil, interfaces.ErrSignupInvalidPassword } + if u.Verification() == nil || !u.Verification().IsVerified() { + return nil, interfaces.ErrNotVerifiedUser + } return u, nil } @@ -430,7 +443,15 @@ func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operato return nil, err } - if p.Name != nil { + if p.Name != nil && *p.Name != u.Name() { + // username should not be a valid mail + if _, err := mail.ParseAddress(*p.Name); err == nil { + return nil, interfaces.ErrSignupInvalidName + } + // make sure the username is not exists + if userByName, _ := i.userRepo.FindByName(ctx, *p.Name); userByName != nil { + return nil, interfaces.ErrSignupInvalidName + } oldName := u.Name() u.UpdateName(*p.Name) @@ -456,9 +477,18 @@ func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operato u.UpdateTheme(*p.Theme) } + if p.Password != nil && u.HasAuthProvider("reearth") { + if err := u.SetPassword(*p.Password); err != nil { + return nil, err + } + } + // Update Auth0 users if p.Name != nil || p.Email != nil || p.Password != nil { for _, a := range u.Auths() { + if a.Provider != "auth0" { + continue + } if _, err := i.authenticator.UpdateUser(gateway.AuthenticatorUpdateUserParam{ ID: a.Sub, Name: p.Name, diff --git a/internal/usecase/interfaces/user.go b/internal/usecase/interfaces/user.go index acca0bb50..c80cf7db7 100644 --- a/internal/usecase/interfaces/user.go +++ b/internal/usecase/interfaces/user.go @@ -18,6 +18,7 @@ var ( ErrSignupInvalidSecret = errors.New("invalid secret") ErrSignupInvalidName = errors.New("invalid name") ErrInvalidUserEmail = errors.New("invalid email") + ErrNotVerifiedUser = errors.New("not verified user") ErrSignupInvalidPassword = errors.New("invalid password") ) diff --git a/internal/usecase/repo/user.go b/internal/usecase/repo/user.go index fdc3f2769..8fc7abec6 100644 --- a/internal/usecase/repo/user.go +++ b/internal/usecase/repo/user.go @@ -12,6 +12,7 @@ type User interface { FindByID(context.Context, id.UserID) (*user.User, error) FindByAuth0Sub(context.Context, string) (*user.User, error) FindByEmail(context.Context, string) (*user.User, error) + FindByName(context.Context, string) (*user.User, error) FindByNameOrEmail(context.Context, string) (*user.User, error) FindByVerification(context.Context, string) (*user.User, error) FindByPasswordResetRequest(context.Context, string) (*user.User, error) diff --git a/pkg/user/user.go b/pkg/user/user.go index c3dc7fd99..cc48e0e02 100644 --- a/pkg/user/user.go +++ b/pkg/user/user.go @@ -102,6 +102,18 @@ func (u *User) ContainAuth(a Auth) bool { return false } +func (u *User) HasAuthProvider(p string) bool { + if u == nil { + return false + } + for _, b := range u.auths { + if b.Provider == p { + return true + } + } + return false +} + func (u *User) AddAuth(a Auth) bool { if u == nil { return false diff --git a/pkg/user/user_test.go b/pkg/user/user_test.go index 008e2f8fa..d3244466d 100644 --- a/pkg/user/user_test.go +++ b/pkg/user/user_test.go @@ -205,6 +205,45 @@ func TestUser_ContainAuth(t *testing.T) { } } +func TestUser_HasAuthProvider(t *testing.T) { + tests := []struct { + Name string + User *User + P string + Expected bool + }{ + { + Name: "nil user", + User: nil, + Expected: false, + }, + { + Name: "not existing auth", + User: New().NewID().MustBuild(), + P: "auth0", + Expected: false, + }, + { + Name: "existing auth", + User: New().NewID().Auths([]Auth{{ + Provider: "xxx", + Sub: "zzz", + }}).MustBuild(), + P: "xxx", + Expected: true, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + res := tc.User.HasAuthProvider(tc.P) + assert.Equal(t, tc.Expected, res) + }) + } +} + func TestUser_RemoveAuthByProvider(t *testing.T) { tests := []struct { Name string From 73994348b1d0cc612cde2b4f232ce86228fe668e Mon Sep 17 00:00:00 2001 From: KaWaite <34051327+KaWaite@users.noreply.github.com> Date: Wed, 16 Mar 2022 16:30:18 +0900 Subject: [PATCH 171/253] feat: assets filtering & pagination (#81) * wip: asset filtering * asset filtering * refactoring * fix naming * add ability to search by key word code refactoring * make the pagination cursor optional * code restructuring * fix sort default value * fix sort default value * fix sort * pagination refactoring * support sort with pagination * - remove unused code * - refactor pagination code * - fix paginationFilter return type * - code refactoring * - fix pagination when it contains reverse order * fix memory Co-authored-by: yk Co-authored-by: rot1024 --- internal/adapter/gql/generated.go | 138 ++++++++++++++---- internal/adapter/gql/gqlmodel/convert.go | 12 ++ .../adapter/gql/gqlmodel/convert_asset.go | 15 ++ internal/adapter/gql/gqlmodel/models_gen.go | 50 +++++++ internal/adapter/gql/loader_asset.go | 8 +- internal/adapter/gql/resolver_query.go | 4 +- internal/adapter/gql/resolver_team.go | 7 +- internal/infrastructure/memory/asset.go | 22 ++- internal/infrastructure/mongo/asset.go | 31 +++- internal/infrastructure/mongo/dataset.go | 2 +- .../infrastructure/mongo/dataset_schema.go | 2 +- .../infrastructure/mongo/mongodoc/client.go | 96 +++++++----- .../mongo/mongodoc/clientcol.go | 4 +- .../mongo/mongodoc/pagination.go | 49 +++++++ internal/infrastructure/mongo/project.go | 2 +- internal/usecase/interactor/asset.go | 8 +- internal/usecase/interfaces/asset.go | 10 +- internal/usecase/repo/asset.go | 8 +- pkg/asset/sort_type.go | 37 +++++ pkg/asset/sort_type_test.go | 79 ++++++++++ schema.graphql | 20 ++- 21 files changed, 510 insertions(+), 94 deletions(-) create mode 100644 internal/infrastructure/mongo/mongodoc/pagination.go create mode 100644 pkg/asset/sort_type.go create mode 100644 pkg/asset/sort_type_test.go diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index 91f53d42d..fc5853785 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -781,7 +781,7 @@ type ComplexityRoot struct { } Query struct { - Assets func(childComplexity int, teamID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + Assets func(childComplexity int, teamID id.ID, keyword *string, sort *gqlmodel.AssetSortType, pagination *gqlmodel.Pagination) int CheckProjectAlias func(childComplexity int, alias string) int DatasetSchemas func(childComplexity int, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int Datasets func(childComplexity int, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int @@ -1303,7 +1303,7 @@ type QueryResolver interface { Plugins(ctx context.Context, id []*id.PluginID) ([]*gqlmodel.Plugin, error) Layer(ctx context.Context, id id.ID) (gqlmodel.Layer, error) Scene(ctx context.Context, projectID id.ID) (*gqlmodel.Scene, error) - Assets(ctx context.Context, teamID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) + Assets(ctx context.Context, teamID id.ID, keyword *string, sort *gqlmodel.AssetSortType, pagination *gqlmodel.Pagination) (*gqlmodel.AssetConnection, error) Projects(ctx context.Context, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) DatasetSchemas(ctx context.Context, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) Datasets(ctx context.Context, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) @@ -4990,7 +4990,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.Assets(childComplexity, args["teamId"].(id.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + return e.complexity.Query.Assets(childComplexity, args["teamId"].(id.ID), args["keyword"].(*string), args["sort"].(*gqlmodel.AssetSortType), args["pagination"].(*gqlmodel.Pagination)), true case "Query.checkProjectAlias": if e.complexity.Query.CheckProjectAlias == nil { @@ -6357,6 +6357,13 @@ type Rect { north: Float! } +input Pagination{ + first: Int + last: Int + after: Cursor + before: Cursor +} + enum TextAlign { LEFT CENTER @@ -6405,6 +6412,12 @@ type Asset implements Node { team: Team @goField(forceResolver: true) } +enum AssetSortType { + DATE + SIZE + NAME +} + # User type User implements Node { @@ -7823,10 +7836,9 @@ type Query { scene(projectId: ID!): Scene assets( teamId: ID! - first: Int - last: Int - after: Cursor - before: Cursor + keyword: String + sort: AssetSortType + pagination: Pagination ): AssetConnection! projects( teamId: ID! @@ -9152,42 +9164,33 @@ func (ec *executionContext) field_Query_assets_args(ctx context.Context, rawArgs } } args["teamId"] = arg0 - var arg1 *int - if tmp, ok := rawArgs["first"]; ok { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) - arg1, err = ec.unmarshalOInt2แš–int(ctx, tmp) - if err != nil { - return nil, err - } - } - args["first"] = arg1 - var arg2 *int - if tmp, ok := rawArgs["last"]; ok { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) - arg2, err = ec.unmarshalOInt2แš–int(ctx, tmp) + var arg1 *string + if tmp, ok := rawArgs["keyword"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("keyword")) + arg1, err = ec.unmarshalOString2แš–string(ctx, tmp) if err != nil { return nil, err } } - args["last"] = arg2 - var arg3 *usecase.Cursor - if tmp, ok := rawArgs["after"]; ok { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) - arg3, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + args["keyword"] = arg1 + var arg2 *gqlmodel.AssetSortType + if tmp, ok := rawArgs["sort"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sort")) + arg2, err = ec.unmarshalOAssetSortType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetSortType(ctx, tmp) if err != nil { return nil, err } } - args["after"] = arg3 - var arg4 *usecase.Cursor - if tmp, ok := rawArgs["before"]; ok { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) - arg4, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, tmp) + args["sort"] = arg2 + var arg3 *gqlmodel.Pagination + if tmp, ok := rawArgs["pagination"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pagination")) + arg3, err = ec.unmarshalOPagination2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPagination(ctx, tmp) if err != nil { return nil, err } } - args["before"] = arg4 + args["pagination"] = arg3 return args, nil } @@ -26092,7 +26095,7 @@ func (ec *executionContext) _Query_assets(ctx context.Context, field graphql.Col fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Assets(rctx, args["teamId"].(id.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + return ec.resolvers.Query().Assets(rctx, args["teamId"].(id.ID), args["keyword"].(*string), args["sort"].(*gqlmodel.AssetSortType), args["pagination"].(*gqlmodel.Pagination)) }) if err != nil { ec.Error(ctx, err) @@ -33800,6 +33803,53 @@ func (ec *executionContext) unmarshalInputMovePropertyItemInput(ctx context.Cont return it, nil } +func (ec *executionContext) unmarshalInputPagination(ctx context.Context, obj interface{}) (gqlmodel.Pagination, error) { + var it gqlmodel.Pagination + asMap := map[string]interface{}{} + for k, v := range obj.(map[string]interface{}) { + asMap[k] = v + } + + for k, v := range asMap { + switch k { + case "first": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("first")) + it.First, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "last": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("last")) + it.Last, err = ec.unmarshalOInt2แš–int(ctx, v) + if err != nil { + return it, err + } + case "after": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("after")) + it.After, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, v) + if err != nil { + return it, err + } + case "before": + var err error + + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("before")) + it.Before, err = ec.unmarshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, v) + if err != nil { + return it, err + } + } + } + + return it, nil +} + func (ec *executionContext) unmarshalInputPublishProjectInput(ctx context.Context, obj interface{}) (gqlmodel.PublishProjectInput, error) { var it gqlmodel.PublishProjectInput asMap := map[string]interface{}{} @@ -45199,6 +45249,22 @@ func (ec *executionContext) marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘ return ec._Asset(ctx, sel, v) } +func (ec *executionContext) unmarshalOAssetSortType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetSortType(ctx context.Context, v interface{}) (*gqlmodel.AssetSortType, error) { + if v == nil { + return nil, nil + } + var res = new(gqlmodel.AssetSortType) + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOAssetSortType2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetSortType(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AssetSortType) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return v +} + func (ec *executionContext) marshalOAttachTagItemToGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagItemToGroupPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AttachTagItemToGroupPayload) graphql.Marshaler { if v == nil { return graphql.Null @@ -45587,6 +45653,14 @@ func (ec *executionContext) marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘back return ec._Node(ctx, sel, v) } +func (ec *executionContext) unmarshalOPagination2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPagination(ctx context.Context, v interface{}) (*gqlmodel.Pagination, error) { + if v == nil { + return nil, nil + } + res, err := ec.unmarshalInputPagination(ctx, v) + return &res, graphql.ErrorOnPath(ctx, err) +} + func (ec *executionContext) marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Plugin) graphql.Marshaler { if v == nil { return graphql.Null diff --git a/internal/adapter/gql/gqlmodel/convert.go b/internal/adapter/gql/gqlmodel/convert.go index f25dbf027..eecaa6363 100644 --- a/internal/adapter/gql/gqlmodel/convert.go +++ b/internal/adapter/gql/gqlmodel/convert.go @@ -85,3 +85,15 @@ func FromListOperation(op ListOperation) interfaces.ListOperation { } return interfaces.ListOperation("") } + +func ToPagination(pagination *Pagination) *usecase.Pagination { + if pagination == nil { + return nil + } + return &usecase.Pagination{ + Before: pagination.Before, + After: pagination.After, + First: pagination.First, + Last: pagination.Last, + } +} diff --git a/internal/adapter/gql/gqlmodel/convert_asset.go b/internal/adapter/gql/gqlmodel/convert_asset.go index 0f60ad769..1404ac643 100644 --- a/internal/adapter/gql/gqlmodel/convert_asset.go +++ b/internal/adapter/gql/gqlmodel/convert_asset.go @@ -18,3 +18,18 @@ func ToAsset(a *asset.Asset) *Asset { ContentType: a.ContentType(), } } + +func AssetSortTypeFrom(ast *AssetSortType) *asset.SortType { + if ast == nil { + return nil + } + switch *ast { + case AssetSortTypeDate: + return &asset.SortTypeID + case AssetSortTypeName: + return &asset.SortTypeName + case AssetSortTypeSize: + return &asset.SortTypeSize + } + return &asset.SortTypeID +} diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index 34314fdd0..ee7a0f6bc 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -661,6 +661,13 @@ type PageInfo struct { HasPreviousPage bool `json:"hasPreviousPage"` } +type Pagination struct { + First *int `json:"first"` + Last *int `json:"last"` + After *usecase.Cursor `json:"after"` + Before *usecase.Cursor `json:"before"` +} + type Plugin struct { ID id.PluginID `json:"id"` SceneID *id.ID `json:"sceneId"` @@ -1374,6 +1381,49 @@ type WidgetZone struct { Right *WidgetSection `json:"right"` } +type AssetSortType string + +const ( + AssetSortTypeDate AssetSortType = "DATE" + AssetSortTypeSize AssetSortType = "SIZE" + AssetSortTypeName AssetSortType = "NAME" +) + +var AllAssetSortType = []AssetSortType{ + AssetSortTypeDate, + AssetSortTypeSize, + AssetSortTypeName, +} + +func (e AssetSortType) IsValid() bool { + switch e { + case AssetSortTypeDate, AssetSortTypeSize, AssetSortTypeName: + return true + } + return false +} + +func (e AssetSortType) String() string { + return string(e) +} + +func (e *AssetSortType) UnmarshalGQL(v interface{}) error { + str, ok := v.(string) + if !ok { + return fmt.Errorf("enums must be strings") + } + + *e = AssetSortType(str) + if !e.IsValid() { + return fmt.Errorf("%s is not a valid AssetSortType", str) + } + return nil +} + +func (e AssetSortType) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(e.String())) +} + type LayerEncodingFormat string const ( diff --git a/internal/adapter/gql/loader_asset.go b/internal/adapter/gql/loader_asset.go index 936af5194..fdf9159e8 100644 --- a/internal/adapter/gql/loader_asset.go +++ b/internal/adapter/gql/loader_asset.go @@ -7,6 +7,7 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/asset" "github.com/reearth/reearth-backend/pkg/id" ) @@ -32,9 +33,10 @@ func (c *AssetLoader) Fetch(ctx context.Context, ids []id.AssetID) ([]*gqlmodel. return assets, nil } -func (c *AssetLoader) FindByTeam(ctx context.Context, teamID id.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.AssetConnection, error) { - p := usecase.NewPagination(first, last, before, after) - assets, pi, err := c.usecase.FindByTeam(ctx, id.TeamID(teamID), p, getOperator(ctx)) +func (c *AssetLoader) FindByTeam(ctx context.Context, teamID id.ID, keyword *string, sort *asset.SortType, pagination *gqlmodel.Pagination) (*gqlmodel.AssetConnection, error) { + p := gqlmodel.ToPagination(pagination) + + assets, pi, err := c.usecase.FindByTeam(ctx, id.TeamID(teamID), keyword, sort, p, getOperator(ctx)) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_query.go b/internal/adapter/gql/resolver_query.go index 946dfb744..9adad2b10 100644 --- a/internal/adapter/gql/resolver_query.go +++ b/internal/adapter/gql/resolver_query.go @@ -14,8 +14,8 @@ func (r *Resolver) Query() QueryResolver { type queryResolver struct{ *Resolver } -func (r *queryResolver) Assets(ctx context.Context, teamID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) { - return loaders(ctx).Asset.FindByTeam(ctx, teamID, first, last, before, after) +func (r *queryResolver) Assets(ctx context.Context, teamID id.ID, keyword *string, sortType *gqlmodel.AssetSortType, pagination *gqlmodel.Pagination) (*gqlmodel.AssetConnection, error) { + return loaders(ctx).Asset.FindByTeam(ctx, teamID, keyword, gqlmodel.AssetSortTypeFrom(sortType), pagination) } func (r *queryResolver) Me(ctx context.Context) (*gqlmodel.User, error) { diff --git a/internal/adapter/gql/resolver_team.go b/internal/adapter/gql/resolver_team.go index c7a1c49c4..021412d78 100644 --- a/internal/adapter/gql/resolver_team.go +++ b/internal/adapter/gql/resolver_team.go @@ -19,7 +19,12 @@ func (r *Resolver) TeamMember() TeamMemberResolver { type teamResolver struct{ *Resolver } func (r *teamResolver) Assets(ctx context.Context, obj *gqlmodel.Team, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.AssetConnection, error) { - return loaders(ctx).Asset.FindByTeam(ctx, obj.ID, first, last, before, after) + return loaders(ctx).Asset.FindByTeam(ctx, obj.ID, nil, nil, &gqlmodel.Pagination{ + First: first, + Last: last, + After: after, + Before: before, + }) } func (r *teamResolver) Projects(ctx context.Context, obj *gqlmodel.Team, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { diff --git a/internal/infrastructure/memory/asset.go b/internal/infrastructure/memory/asset.go index 72758d87e..557023f98 100644 --- a/internal/infrastructure/memory/asset.go +++ b/internal/infrastructure/memory/asset.go @@ -2,6 +2,8 @@ package memory import ( "context" + "sort" + "strings" "sync" "github.com/reearth/reearth-backend/internal/usecase" @@ -59,7 +61,7 @@ func (r *Asset) FindByIDs(ctx context.Context, ids []id.AssetID) ([]*asset.Asset return result, nil } -func (r *Asset) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { +func (r *Asset) FindByTeam(ctx context.Context, id id.TeamID, filter repo.AssetFilter) ([]*asset.Asset, *usecase.PageInfo, error) { if !r.f.CanRead(id) { return nil, usecase.EmptyPageInfo(), nil } @@ -69,11 +71,27 @@ func (r *Asset) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecas result := []*asset.Asset{} for _, d := range r.data { - if d.Team() == id { + if d.Team() == id && (filter.Keyword == nil || strings.Contains(d.Name(), *filter.Keyword)) { result = append(result, d) } } + if filter.Sort != nil { + s := *filter.Sort + sort.SliceStable(result, func(i, j int) bool { + if s == asset.SortTypeID { + return result[i].ID().ID().Compare(result[j].ID().ID()) < 0 + } + if s == asset.SortTypeSize { + return result[i].Size() < result[j].Size() + } + if s == asset.SortTypeName { + return strings.Compare(result[i].Name(), result[j].Name()) < 0 + } + return false + }) + } + var startCursor, endCursor *usecase.Cursor if len(result) > 0 { _startCursor := usecase.Cursor(result[0].ID().String()) diff --git a/internal/infrastructure/mongo/asset.go b/internal/infrastructure/mongo/asset.go index 214efe305..827069c07 100644 --- a/internal/infrastructure/mongo/asset.go +++ b/internal/infrastructure/mongo/asset.go @@ -2,8 +2,8 @@ package mongo import ( "context" - - "go.mongodb.org/mongo-driver/bson" + "fmt" + "regexp" "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/internal/usecase" @@ -12,6 +12,8 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/log" "github.com/reearth/reearth-backend/pkg/rerror" + "go.mongodb.org/mongo-driver/bson" + "go.mongodb.org/mongo-driver/bson/primitive" ) type assetRepo struct { @@ -50,13 +52,22 @@ func (r *assetRepo) FindByIDs(ctx context.Context, ids []id.AssetID) ([]*asset.A return filterAssets(ids, res), nil } -func (r *assetRepo) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { +func (r *assetRepo) FindByTeam(ctx context.Context, id id.TeamID, uFilter repo.AssetFilter) ([]*asset.Asset, *usecase.PageInfo, error) { if !r.f.CanRead(id) { return nil, usecase.EmptyPageInfo(), nil } - return r.paginate(ctx, bson.M{ + + var filter interface{} = bson.M{ "team": id.String(), - }, pagination) + } + + if uFilter.Keyword != nil { + filter = mongodoc.And(filter, "name", bson.M{ + "$regex": primitive.Regex{Pattern: fmt.Sprintf(".*%s.*", regexp.QuoteMeta(*uFilter.Keyword)), Options: "i"}, + }) + } + + return r.paginate(ctx, filter, uFilter.Sort, uFilter.Pagination) } func (r *assetRepo) Save(ctx context.Context, asset *asset.Asset) error { @@ -80,9 +91,15 @@ func (r *assetRepo) init() { } } -func (r *assetRepo) paginate(ctx context.Context, filter bson.M, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { +func (r *assetRepo) paginate(ctx context.Context, filter interface{}, sort *asset.SortType, pagination *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) { + var sortstr *string + if sort != nil { + sortstr2 := string(*sort) + sortstr = &sortstr2 + } + var c mongodoc.AssetConsumer - pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), pagination, &c) + pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), sortstr, pagination, &c) if err != nil { return nil, nil, rerror.ErrInternalBy(err) } diff --git a/internal/infrastructure/mongo/dataset.go b/internal/infrastructure/mongo/dataset.go index 66bb748d5..66f62dbb5 100644 --- a/internal/infrastructure/mongo/dataset.go +++ b/internal/infrastructure/mongo/dataset.go @@ -320,7 +320,7 @@ func (r *datasetRepo) findOne(ctx context.Context, filter interface{}) (*dataset func (r *datasetRepo) paginate(ctx context.Context, filter bson.M, pagination *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { var c mongodoc.DatasetConsumer - pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), pagination, &c) + pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), nil, pagination, &c) if err != nil { return nil, nil, rerror.ErrInternalBy(err) } diff --git a/internal/infrastructure/mongo/dataset_schema.go b/internal/infrastructure/mongo/dataset_schema.go index aacc45eb4..928a5d4ed 100644 --- a/internal/infrastructure/mongo/dataset_schema.go +++ b/internal/infrastructure/mongo/dataset_schema.go @@ -168,7 +168,7 @@ func (r *datasetSchemaRepo) findOne(ctx context.Context, filter interface{}) (*d func (r *datasetSchemaRepo) paginate(ctx context.Context, filter bson.M, pagination *usecase.Pagination) ([]*dataset.Schema, *usecase.PageInfo, error) { var c mongodoc.DatasetSchemaConsumer - pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), pagination, &c) + pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), nil, pagination, &c) if err != nil { return nil, nil, rerror.ErrInternalBy(err) } diff --git a/internal/infrastructure/mongo/mongodoc/client.go b/internal/infrastructure/mongo/mongodoc/client.go index 7b6bc4117..7257c2fd5 100644 --- a/internal/infrastructure/mongo/mongodoc/client.go +++ b/internal/infrastructure/mongo/mongodoc/client.go @@ -193,45 +193,30 @@ func getCursor(raw bson.Raw, key string) (*usecase.Cursor, error) { return &c, nil } -func (c *Client) Paginate(ctx context.Context, col string, filter interface{}, p *usecase.Pagination, consumer Consumer) (*usecase.PageInfo, error) { +func (c *Client) Paginate(ctx context.Context, col string, filter interface{}, sort *string, p *Pagination, consumer Consumer) (*usecase.PageInfo, error) { if p == nil { return nil, nil } coll := c.Collection(col) + const key = "id" - key := "id" + findOptions := options.Find().SetCollation(&options.Collation{Strength: 1, Locale: "en"}) + + sortOptions, sortKey := sortOptionsFrom(sort, p, key) + + findOptions.Sort = sortOptions count, err := coll.CountDocuments(ctx, filter) if err != nil { return nil, fmt.Errorf("failed to count documents: %v", err.Error()) } - reverse := false - var limit int64 - findOptions := options.Find() - if first := p.First; first != nil { - limit = int64(*first) - findOptions.Sort = bson.D{ - {Key: key, Value: 1}, - } - if after := p.After; after != nil { - filter = appendE(filter, bson.E{Key: key, Value: bson.D{ - {Key: "$gt", Value: *after}, - }}) - } - } - if last := p.Last; last != nil { - reverse = true - limit = int64(*last) - findOptions.Sort = bson.D{ - {Key: key, Value: -1}, - } - if before := p.Before; before != nil { - filter = appendE(filter, bson.E{Key: key, Value: bson.D{ - {Key: "$lt", Value: *before}, - }}) - } + filter, limit, err := paginationFilter(ctx, coll, p, sortKey, key, filter) + if err != nil { + return nil, err } + + // ๆ›ดใซ่ชญใ‚ใ‚‹่ฆ็ด ใŒใ‚ใ‚‹ใฎใ‹็ขบใ‹ใ‚ใ‚‹ใŸใ‚ใซไธ€ใคๅคšใ‚ใซ่ชญใฟๅ‡บใ™ // Read one more element so that we can see whether there's a further one limit++ findOptions.Limit = &limit @@ -261,13 +246,6 @@ func (c *Client) Paginate(ctx context.Context, col string, filter interface{}, p results = results[:len(results)-1] } - if reverse { - for i := len(results)/2 - 1; i >= 0; i-- { - opp := len(results) - 1 - i - results[i], results[opp] = results[opp], results[i] - } - } - for _, result := range results { if err := consumer.Consume(result); err != nil { return nil, err @@ -303,6 +281,56 @@ func (c *Client) Paginate(ctx context.Context, col string, filter interface{}, p return usecase.NewPageInfo(int(count), startCursor, endCursor, hasNextPage, hasPreviousPage), nil } +func sortOptionsFrom(sort *string, p *Pagination, key string) (bson.D, string) { + var sortOptions bson.D + var sortKey = "" + if sort != nil && len(*sort) > 0 && *sort != "id" { + sortKey = *sort + sortOptions = append(sortOptions, bson.E{Key: sortKey, Value: p.SortDirection()}) + } + sortOptions = append(sortOptions, bson.E{Key: key, Value: p.SortDirection()}) + return sortOptions, sortKey +} + +func paginationFilter(ctx context.Context, coll *mongo.Collection, p *Pagination, sortKey, key string, filter interface{}) (interface{}, int64, error) { + limit, op, cur, err := p.Parameters() + if err != nil { + return nil, 0, fmt.Errorf("failed to parse pagination parameters: %w", err) + } + + var paginationFilter bson.M + + if cur != nil { + + if sortKey == "" { + paginationFilter = bson.M{key: bson.M{op: *cur}} + } else { + var curObj bson.M + if err := coll.FindOne(ctx, bson.M{key: *cur}).Decode(&curObj); err != nil { + return nil, 0, fmt.Errorf("failed to find cursor element") + } + if curObj[sortKey] == nil { + return nil, 0, fmt.Errorf("invalied sort key") + } + paginationFilter = bson.M{ + "$or": []bson.M{ + {sortKey: bson.M{op: curObj[sortKey]}}, + { + sortKey: curObj[sortKey], + key: bson.M{op: *cur}, + }, + }, + } + } + } + + return And( + filter, + "", + paginationFilter, + ), limit, nil +} + func (c *Client) CreateIndex(ctx context.Context, col string, keys []string) []string { return c.CreateUniqueIndex(ctx, col, keys, []string{}) } diff --git a/internal/infrastructure/mongo/mongodoc/clientcol.go b/internal/infrastructure/mongo/mongodoc/clientcol.go index b6c6d5d3d..5a2d29ae4 100644 --- a/internal/infrastructure/mongo/mongodoc/clientcol.go +++ b/internal/infrastructure/mongo/mongodoc/clientcol.go @@ -28,8 +28,8 @@ func (c *ClientCollection) Count(ctx context.Context, filter interface{}) (int64 return c.Client.Count(ctx, c.CollectionName, filter) } -func (c *ClientCollection) Paginate(ctx context.Context, filter interface{}, p *usecase.Pagination, consumer Consumer) (*usecase.PageInfo, error) { - return c.Client.Paginate(ctx, c.CollectionName, filter, p, consumer) +func (c *ClientCollection) Paginate(ctx context.Context, filter interface{}, sort *string, p *usecase.Pagination, consumer Consumer) (*usecase.PageInfo, error) { + return c.Client.Paginate(ctx, c.CollectionName, filter, sort, PaginationFrom(p), consumer) } func (c *ClientCollection) SaveOne(ctx context.Context, id string, replacement interface{}) error { diff --git a/internal/infrastructure/mongo/mongodoc/pagination.go b/internal/infrastructure/mongo/mongodoc/pagination.go new file mode 100644 index 000000000..ce0f75042 --- /dev/null +++ b/internal/infrastructure/mongo/mongodoc/pagination.go @@ -0,0 +1,49 @@ +package mongodoc + +import ( + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" +) + +type Pagination struct { + Before *string + After *string + First *int + Last *int +} + +func PaginationFrom(pagination *usecase.Pagination) *Pagination { + if pagination == nil { + return nil + } + return &Pagination{ + Before: (*string)(pagination.Before), + After: (*string)(pagination.After), + First: pagination.First, + Last: pagination.Last, + } +} + +func (p *Pagination) SortDirection() int { + if p != nil && p.Last != nil { + return -1 + } + return 1 +} + +func (p *Pagination) Parameters() (limit int64, op string, cursor *string, err error) { + if first, after := p.First, p.After; first != nil { + limit = int64(*first) + op = "$gt" + cursor = after + return + } + if last, before := p.Last, p.Before; last != nil { + limit = int64(*last) + op = "$lt" + cursor = before + return + } + return 0, "", nil, errors.New("neither first nor last are set") +} diff --git a/internal/infrastructure/mongo/project.go b/internal/infrastructure/mongo/project.go index 8b10f6994..7377b3307 100644 --- a/internal/infrastructure/mongo/project.go +++ b/internal/infrastructure/mongo/project.go @@ -123,7 +123,7 @@ func (r *projectRepo) findOne(ctx context.Context, filter interface{}) (*project func (r *projectRepo) paginate(ctx context.Context, filter bson.M, pagination *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { var c mongodoc.ProjectConsumer - pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), pagination, &c) + pageInfo, err := r.client.Paginate(ctx, r.readFilter(filter), nil, pagination, &c) if err != nil { return nil, nil, rerror.ErrInternalBy(err) } diff --git a/internal/usecase/interactor/asset.go b/internal/usecase/interactor/asset.go index ce99847f9..1bf1331d7 100644 --- a/internal/usecase/interactor/asset.go +++ b/internal/usecase/interactor/asset.go @@ -30,12 +30,16 @@ func (i *Asset) Fetch(ctx context.Context, assets []id.AssetID, operator *usecas return i.repos.Asset.FindByIDs(ctx, assets) } -func (i *Asset) FindByTeam(ctx context.Context, tid id.TeamID, p *usecase.Pagination, operator *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) { +func (i *Asset) FindByTeam(ctx context.Context, tid id.TeamID, keyword *string, sort *asset.SortType, p *usecase.Pagination, operator *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) { if err := i.CanReadTeam(tid, operator); err != nil { return nil, nil, err } - return i.repos.Asset.FindByTeam(ctx, tid, p) + return i.repos.Asset.FindByTeam(ctx, tid, repo.AssetFilter{ + Sort: sort, + Keyword: keyword, + Pagination: p, + }) } func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, operator *usecase.Operator) (result *asset.Asset, err error) { diff --git a/internal/usecase/interfaces/asset.go b/internal/usecase/interfaces/asset.go index 57a43e22f..1028d7b27 100644 --- a/internal/usecase/interfaces/asset.go +++ b/internal/usecase/interfaces/asset.go @@ -10,6 +10,14 @@ import ( "github.com/reearth/reearth-backend/pkg/id" ) +type AssetFilterType string + +const ( + AssetFilterDate AssetFilterType = "DATE" + AssetFilterSize AssetFilterType = "SIZE" + AssetFilterName AssetFilterType = "NAME" +) + type CreateAssetParam struct { TeamID id.TeamID File *file.File @@ -21,7 +29,7 @@ var ( type Asset interface { Fetch(context.Context, []id.AssetID, *usecase.Operator) ([]*asset.Asset, error) - FindByTeam(context.Context, id.TeamID, *usecase.Pagination, *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) + FindByTeam(context.Context, id.TeamID, *string, *asset.SortType, *usecase.Pagination, *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) Create(context.Context, CreateAssetParam, *usecase.Operator) (*asset.Asset, error) Remove(context.Context, id.AssetID, *usecase.Operator) (id.AssetID, error) } diff --git a/internal/usecase/repo/asset.go b/internal/usecase/repo/asset.go index e14a42d6c..5010b5101 100644 --- a/internal/usecase/repo/asset.go +++ b/internal/usecase/repo/asset.go @@ -8,9 +8,15 @@ import ( "github.com/reearth/reearth-backend/pkg/id" ) +type AssetFilter struct { + Sort *asset.SortType + Keyword *string + Pagination *usecase.Pagination +} + type Asset interface { Filtered(TeamFilter) Asset - FindByTeam(context.Context, id.TeamID, *usecase.Pagination) ([]*asset.Asset, *usecase.PageInfo, error) + FindByTeam(context.Context, id.TeamID, AssetFilter) ([]*asset.Asset, *usecase.PageInfo, error) FindByID(context.Context, id.AssetID) (*asset.Asset, error) FindByIDs(context.Context, []id.AssetID) ([]*asset.Asset, error) Save(context.Context, *asset.Asset) error diff --git a/pkg/asset/sort_type.go b/pkg/asset/sort_type.go new file mode 100644 index 000000000..ab44259a0 --- /dev/null +++ b/pkg/asset/sort_type.go @@ -0,0 +1,37 @@ +package asset + +import ( + "errors" + "strings" +) + +var ( + SortTypeID = SortType("id") + SortTypeName = SortType("name") + SortTypeSize = SortType("size") + + ErrInvalidSortType = errors.New("invalid sort type") +) + +type SortType string + +func check(role SortType) bool { + switch role { + case SortTypeID: + return true + case SortTypeName: + return true + case SortTypeSize: + return true + } + return false +} + +func SortTypeFromString(r string) (SortType, error) { + role := SortType(strings.ToLower(r)) + + if check(role) { + return role, nil + } + return role, ErrInvalidSortType +} diff --git a/pkg/asset/sort_type_test.go b/pkg/asset/sort_type_test.go new file mode 100644 index 000000000..4e5cf49c8 --- /dev/null +++ b/pkg/asset/sort_type_test.go @@ -0,0 +1,79 @@ +package asset + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSortTypeFromString(t *testing.T) { + tests := []struct { + Name, Role string + Expected SortType + Err error + }{ + { + Name: "Success id", + Role: "id", + Expected: SortType("id"), + Err: nil, + }, + { + Name: "fail invalid sort type", + Role: "xxx", + Expected: SortType("xxx"), + Err: ErrInvalidSortType, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res, err := SortTypeFromString(tt.Role) + if tt.Err == nil { + assert.Equal(t, tt.Expected, res) + } else { + assert.Equal(t, tt.Err, err) + } + }) + } +} + +func TestCheck(t *testing.T) { + tests := []struct { + Name string + Input SortType + Expected bool + }{ + { + Name: "check id", + Input: SortType("id"), + Expected: true, + }, + { + Name: "check name", + Input: SortType("name"), + Expected: true, + }, + { + Name: "check size", + Input: SortType("size"), + Expected: true, + }, + { + Name: "check unknown sort type", + Input: SortType("xxx"), + Expected: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + res := check(tt.Input) + assert.Equal(t, tt.Expected, res) + }) + } +} diff --git a/schema.graphql b/schema.graphql index 76c819213..4876423fe 100644 --- a/schema.graphql +++ b/schema.graphql @@ -96,6 +96,13 @@ type Rect { north: Float! } +input Pagination{ + first: Int + last: Int + after: Cursor + before: Cursor +} + enum TextAlign { LEFT CENTER @@ -144,6 +151,12 @@ type Asset implements Node { team: Team @goField(forceResolver: true) } +enum AssetSortType { + DATE + SIZE + NAME +} + # User type User implements Node { @@ -1562,10 +1575,9 @@ type Query { scene(projectId: ID!): Scene assets( teamId: ID! - first: Int - last: Int - after: Cursor - before: Cursor + keyword: String + sort: AssetSortType + pagination: Pagination ): AssetConnection! projects( teamId: ID! From 232e75e814af1055d19727bca08ffc9e6b835538 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 16 Mar 2022 17:57:00 +0900 Subject: [PATCH 172/253] fix: auth0 setting is not used by JWT verification middleware --- internal/app/config.go | 39 ++++++++++++++++++++++++++----------- internal/app/config_test.go | 21 ++++++++++++++++++++ internal/app/jwt.go | 20 +++++++++++++------ 3 files changed, 63 insertions(+), 17 deletions(-) create mode 100644 internal/app/config_test.go diff --git a/internal/app/config.go b/internal/app/config.go index 39852a8bb..3b4fdabaa 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -123,6 +123,34 @@ func (c Config) Print() string { return s } +func (c Config) Auths() []AuthConfig { + if ac := c.Auth0.AuthConfig(); ac != nil { + return append(c.Auth, *ac) + } + return c.Auth +} + +func (c Auth0Config) AuthConfig() *AuthConfig { + domain := c.Domain + if c.Domain == "" { + return nil + } + if !strings.HasPrefix(domain, "https://") && !strings.HasPrefix(domain, "http://") { + domain = "https://" + domain + } + if !strings.HasSuffix(domain, "/") { + domain = domain + "/" + } + aud := []string{} + if c.Audience != "" { + aud = append(aud, c.Audience) + } + return &AuthConfig{ + ISS: domain, + AUD: aud, + } +} + type AuthConfig struct { ISS string AUD []string @@ -141,17 +169,6 @@ func (ipd *AuthConfigs) Decode(value string) error { return fmt.Errorf("invalid identity providers json: %w", err) } - for i := range providers { - if providers[i].TTL == nil { - providers[i].TTL = new(int) - *providers[i].TTL = 5 - } - if providers[i].ALG == nil { - providers[i].ALG = new(string) - *providers[i].ALG = "RS256" - } - } - *ipd = providers return nil } diff --git a/internal/app/config_test.go b/internal/app/config_test.go new file mode 100644 index 000000000..66ce6f96b --- /dev/null +++ b/internal/app/config_test.go @@ -0,0 +1,21 @@ +package app + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestAuth0Config_AuthConfig(t *testing.T) { + assert.Equal(t, &AuthConfig{ + ISS: "https://hoge.auth0.com/", + AUD: []string{"xxx"}, + }, Auth0Config{ + Domain: "hoge.auth0.com", + Audience: "xxx", + }.AuthConfig()) + assert.Nil(t, Auth0Config{ + Domain: "", + Audience: "xxx", + }.AuthConfig()) +} diff --git a/internal/app/jwt.go b/internal/app/jwt.go index f0d5927a3..32e9020a9 100644 --- a/internal/app/jwt.go +++ b/internal/app/jwt.go @@ -19,6 +19,7 @@ const ( debugUserHeader = "X-Reearth-Debug-User" contextAuth0Sub contextKey = "auth0Sub" contextUser contextKey = "reearth_user" + defaultJWTTTL = 5 * time.Minute ) type MultiValidator []*validator.Validator @@ -26,15 +27,24 @@ type MultiValidator []*validator.Validator func NewMultiValidator(providers []AuthConfig) (MultiValidator, error) { validators := make([]*validator.Validator, 0, len(providers)) for _, p := range providers { - issuerURL, err := url.Parse(p.ISS) if err != nil { return nil, fmt.Errorf("failed to parse the issuer url: %w", err) } - provider := jwks.NewCachingProvider(issuerURL, time.Duration(*p.TTL)*time.Minute) + var ttl time.Duration + if p.TTL != nil { + ttl = time.Duration(*p.TTL) * time.Minute + } else { + ttl = defaultJWTTTL + } + provider := jwks.NewCachingProvider(issuerURL, ttl) - algorithm := validator.SignatureAlgorithm(*p.ALG) + alg := "RS256" + if p.ALG != nil && *p.ALG != "" { + alg = *p.ALG + } + algorithm := validator.SignatureAlgorithm(alg) v, err := validator.New( provider.KeyFunc, @@ -64,8 +74,7 @@ func (mv MultiValidator) ValidateToken(ctx context.Context, tokenString string) // Validate the access token and inject the user clams into ctx func jwtEchoMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { - - jwtValidator, err := NewMultiValidator(cfg.Config.Auth) + jwtValidator, err := NewMultiValidator(cfg.Config.Auths()) if err != nil { log.Fatalf("failed to set up the validator: %v", err) } @@ -84,7 +93,6 @@ func parseJwtMiddleware() echo.MiddlewareFunc { rawClaims := ctx.Value(jwtmiddleware.ContextKey{}) if claims, ok := rawClaims.(*validator.ValidatedClaims); ok { - // attach sub and access token to context ctx = context.WithValue(ctx, contextAuth0Sub, claims.RegisteredClaims.Subject) } From 7caf68f64eacd39709ecec07b97fce9a9aa1fcc8 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 16 Mar 2022 18:14:59 +0900 Subject: [PATCH 173/253] fix: invalid mongo queries of pagination --- .../infrastructure/mongo/mongodoc/util.go | 22 +++++++++++++++++++ .../mongo/mongodoc/util_test.go | 8 +++++++ 2 files changed, 30 insertions(+) diff --git a/internal/infrastructure/mongo/mongodoc/util.go b/internal/infrastructure/mongo/mongodoc/util.go index 9d854e29f..175407ed7 100644 --- a/internal/infrastructure/mongo/mongodoc/util.go +++ b/internal/infrastructure/mongo/mongodoc/util.go @@ -105,6 +105,28 @@ func And(filter interface{}, key string, f interface{}) interface{} { if f == nil { return filter } + if g, ok := f.(bson.M); ok && g == nil { + return filter + } + if g, ok := f.(bson.D); ok && g == nil { + return filter + } + if g, ok := f.(bson.A); ok && g == nil { + return filter + } + if g, ok := f.([]interface{}); ok && g == nil { + return filter + } + if g, ok := f.([]bson.M); ok && g == nil { + return filter + } + if g, ok := f.([]bson.D); ok && g == nil { + return filter + } + if g, ok := f.([]bson.A); ok && g == nil { + return filter + } + if key != "" && getE(filter, key) != nil { return filter } diff --git a/internal/infrastructure/mongo/mongodoc/util_test.go b/internal/infrastructure/mongo/mongodoc/util_test.go index 8bcde2c14..806e0a128 100644 --- a/internal/infrastructure/mongo/mongodoc/util_test.go +++ b/internal/infrastructure/mongo/mongodoc/util_test.go @@ -40,6 +40,14 @@ func TestGetE(t *testing.T) { func TestAnd(t *testing.T) { assert.Equal(t, bson.M{"x": "y"}, And(bson.M{}, "x", "y")) assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "x", "y")) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", nil)) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", bson.M(nil))) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", bson.D(nil))) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", bson.A(nil))) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", []bson.M(nil))) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", []bson.D(nil))) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", []bson.A(nil))) + assert.Equal(t, bson.M{"x": "z"}, And(bson.M{"x": "z"}, "", []interface{}(nil))) assert.Equal(t, bson.M{ "$and": []interface{}{ bson.M{"$or": []bson.M{{"a": "b"}}}, From 570fe7a79dc0c789f69d4a7066188cc91547c577 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 17 Mar 2022 11:29:33 +0900 Subject: [PATCH 174/253] fix: auth config not loaded expectedly --- internal/app/app.go | 18 +++-- internal/app/config.go | 106 ++++++++++++++++++++-------- internal/app/config_test.go | 11 +++ internal/app/web.go | 21 +++--- internal/usecase/interactor/auth.go | 3 - pkg/auth/client.go | 4 +- 6 files changed, 114 insertions(+), 49 deletions(-) diff --git a/internal/app/app.go b/internal/app/app.go index 3f98f93e8..627146806 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -84,16 +84,20 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { })) // auth srv - auth := e.Group("") - authEndPoints(ctx, e, auth, cfg) + if !cfg.Config.AuthSrv.Disabled { + auth := e.Group("") + authEndPoints(ctx, e, auth, cfg) + } // apis api := e.Group("/api") api.GET("/ping", Ping()) - api.POST("/signup", Signup()) - api.POST("/signup/verify", StartSignupVerify()) - api.POST("/signup/verify/:code", SignupVerify()) - api.POST("/password-reset", PasswordReset()) + if cfg.Config.AuthSrv.Disabled { + api.POST("/signup", Signup()) + api.POST("/signup/verify", StartSignupVerify()) + api.POST("/signup/verify/:code", SignupVerify()) + api.POST("/password-reset", PasswordReset()) + } api.GET("/published/:name", PublishedMetadata()) api.GET("/published_data/:name", PublishedData()) @@ -107,7 +111,7 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { published.GET("/:name/", PublishedIndex()) serveFiles(e, cfg.Gateways.File) - web(e, cfg.Config.Web, cfg.Config.Auth0) + web(e, cfg.Config.Web, cfg.Config.Auths()) return e } diff --git a/internal/app/config.go b/internal/app/config.go index 3b4fdabaa..f3b3e1311 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -10,32 +10,40 @@ import ( "github.com/caos/oidc/pkg/op" "github.com/joho/godotenv" "github.com/kelseyhightower/envconfig" + "github.com/reearth/reearth-backend/pkg/auth" "github.com/reearth/reearth-backend/pkg/log" ) const configPrefix = "reearth" type Config struct { - Port string `default:"8080" envconfig:"PORT"` - Dev bool - DB string `default:"mongodb://localhost"` - Auth0 Auth0Config - AuthSrv AuthSrvConfig - Auth AuthConfigs - Mailer string - SMTP SMTPConfig - SendGrid SendGridConfig - GraphQL GraphQLConfig - Published PublishedConfig - GCPProject string `envconfig:"GOOGLE_CLOUD_PROJECT"` - Profiler string - Tracer string - TracerSample float64 - GCS GCSConfig - AssetBaseURL string `default:"http://localhost:8080/assets"` - Origins []string - Web WebConfig - SignupSecret string + Port string `default:"8080" envconfig:"PORT"` + Dev bool + DB string `default:"mongodb://localhost"` + Mailer string + SMTP SMTPConfig + SendGrid SendGridConfig + GraphQL GraphQLConfig + Published PublishedConfig + GCPProject string `envconfig:"GOOGLE_CLOUD_PROJECT"` + Profiler string + Tracer string + TracerSample float64 + GCS GCSConfig + AssetBaseURL string `default:"http://localhost:8080/assets"` + Origins []string + Web WebConfig + SignupSecret string + SignupDisabled bool + // auth + Auth AuthConfigs + Auth0 Auth0Config + AuthSrv AuthSrvConfig + Auth_ISS string + Auth_AUD string + Auth_ALG *string + Auth_TTL *int + Auth_ClientID *string } type Auth0Config struct { @@ -47,13 +55,32 @@ type Auth0Config struct { } type AuthSrvConfig struct { + Disabled bool Domain string `default:"http://localhost:8080"` UIDomain string `default:"http://localhost:8080"` Key string - DN *AuthDNConfig + DN *AuthSrvDNConfig } -type AuthDNConfig struct { +func (c AuthSrvConfig) AuthConfig(debug bool) *AuthConfig { + if c.Disabled { + return nil + } + var aud []string + if debug { + aud = []string{"http://localhost:8080", c.Domain} + } else { + aud = []string{c.Domain} + } + clientID := auth.ClientID + return &AuthConfig{ + ISS: c.Domain, + AUD: aud, + ClientID: &clientID, + } +} + +type AuthSrvDNConfig struct { CN string O []string OU []string @@ -123,11 +150,27 @@ func (c Config) Print() string { return s } -func (c Config) Auths() []AuthConfig { +func (c Config) Auths() (res []AuthConfig) { if ac := c.Auth0.AuthConfig(); ac != nil { - return append(c.Auth, *ac) + res = append(res, *ac) + } + if c.Auth_ISS != "" { + var aud []string + if len(c.Auth_AUD) > 0 { + aud = append(aud, c.Auth_AUD) + } + res = append(res, AuthConfig{ + ISS: c.Auth_ISS, + AUD: aud, + ALG: c.Auth_ALG, + TTL: c.Auth_TTL, + ClientID: c.Auth_ClientID, + }) + } + if ac := c.AuthSrv.AuthConfig(c.Dev); ac != nil { + res = append(res, *ac) } - return c.Auth + return append(res, c.Auth...) } func (c Auth0Config) AuthConfig() *AuthConfig { @@ -152,16 +195,21 @@ func (c Auth0Config) AuthConfig() *AuthConfig { } type AuthConfig struct { - ISS string - AUD []string - ALG *string - TTL *int + ISS string + AUD []string + ALG *string + TTL *int + ClientID *string } type AuthConfigs []AuthConfig // Decode is a custom decoder for AuthConfigs func (ipd *AuthConfigs) Decode(value string) error { + if value == "" { + return nil + } + var providers []AuthConfig err := json.Unmarshal([]byte(value), &providers) diff --git a/internal/app/config_test.go b/internal/app/config_test.go index 66ce6f96b..6f12a995b 100644 --- a/internal/app/config_test.go +++ b/internal/app/config_test.go @@ -19,3 +19,14 @@ func TestAuth0Config_AuthConfig(t *testing.T) { Audience: "xxx", }.AuthConfig()) } + +func TestReadConfig(t *testing.T) { + t.Setenv("REEARTH_AUTH", `[{"iss":"bar"}]`) + t.Setenv("REEARTH_AUTH_ISS", "hoge") + t.Setenv("REEARTH_AUTH_AUD", "foo") + cfg, err := ReadConfig(false) + assert.NoError(t, err) + assert.Equal(t, AuthConfigs([]AuthConfig{{ISS: "bar"}}), cfg.Auth) + assert.Equal(t, "hoge", cfg.Auth_ISS) + assert.Equal(t, "foo", cfg.Auth_AUD) +} diff --git a/internal/app/web.go b/internal/app/web.go index 6bf1d6d50..9a195fc30 100644 --- a/internal/app/web.go +++ b/internal/app/web.go @@ -10,7 +10,7 @@ import ( type WebConfig map[string]string -func web(e *echo.Echo, wc WebConfig, ac Auth0Config) { +func web(e *echo.Echo, wc WebConfig, a []AuthConfig) { if _, err := os.Stat("web"); err != nil { return // web won't be delivered } @@ -18,14 +18,17 @@ func web(e *echo.Echo, wc WebConfig, ac Auth0Config) { e.Logger.Info("web: web directory will be delivered\n") config := map[string]string{} - if ac.Domain != "" { - config["auth0Domain"] = ac.Domain - } - if ac.WebClientID != "" { - config["auth0ClientId"] = ac.WebClientID - } - if ac.Audience != "" { - config["auth0Audience"] = ac.Audience + if len(a) > 0 { + ac := a[0] + if ac.ISS != "" { + config["auth0Domain"] = ac.ISS + } + if ac.ClientID != nil { + config["auth0ClientId"] = *ac.ClientID + } + if len(ac.AUD) > 0 { + config["auth0Audience"] = ac.AUD[0] + } } for k, v := range wc { config[k] = v diff --git a/internal/usecase/interactor/auth.go b/internal/usecase/interactor/auth.go index a3f3b084d..ed75c3385 100644 --- a/internal/usecase/interactor/auth.go +++ b/internal/usecase/interactor/auth.go @@ -63,7 +63,6 @@ var dummyName = pkix.Name{ } func NewAuthStorage(ctx context.Context, cfg *StorageConfig, request repo.AuthRequest, config repo.Config, getUserBySubject func(context.Context, string) (*user.User, error)) (op.Storage, error) { - client := auth.NewLocalClient(cfg.Debug, cfg.ClientDomain) name := dummyName @@ -127,7 +126,6 @@ func NewAuthStorage(ctx context.Context, cfg *StorageConfig, request repo.AuthRe } func initKeys(keyBytes, certBytes []byte) (*rsa.PrivateKey, *jose.SigningKey, *jose.JSONWebKeySet, error) { - block, _ := pem.Decode(keyBytes) if block == nil { return nil, nil, nil, fmt.Errorf("failed to decode the key bytes") @@ -255,7 +253,6 @@ func (s *AuthStorage) AuthRequestBySubject(ctx context.Context, subject string) } func (s *AuthStorage) SaveAuthCode(ctx context.Context, requestID, code string) error { - request, err := s.AuthRequestByID(ctx, requestID) if err != nil { return err diff --git a/pkg/auth/client.go b/pkg/auth/client.go index 884848c38..83316be67 100644 --- a/pkg/auth/client.go +++ b/pkg/auth/client.go @@ -8,6 +8,8 @@ import ( "github.com/caos/oidc/pkg/op" ) +const ClientID = "01FH69GFQ4DFCXS5XD91JK4HZ1" + type Client struct { id string applicationType op.ApplicationType @@ -26,7 +28,7 @@ type Client struct { func NewLocalClient(devMode bool, clientDomain string) op.Client { return &Client{ - id: "01FH69GFQ4DFCXS5XD91JK4HZ1", + id: ClientID, applicationType: op.ApplicationTypeWeb, authMethod: oidc.AuthMethodNone, accessTokenType: op.AccessTokenTypeJWT, From 5df25f2356fb55a6112619d00d6932be7a301348 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 17 Mar 2022 11:54:03 +0900 Subject: [PATCH 175/253] fix: users cannot creates a new team and scene --- internal/adapter/gql/resolver_mutation_team.go | 2 +- internal/infrastructure/fs/plugin.go | 2 +- internal/infrastructure/fs/property_schema.go | 2 +- internal/infrastructure/memory/asset.go | 2 +- internal/infrastructure/memory/dataset.go | 2 +- internal/infrastructure/memory/dataset_schema.go | 2 +- internal/infrastructure/memory/layer.go | 2 +- internal/infrastructure/memory/plugin.go | 2 +- internal/infrastructure/memory/project.go | 2 +- internal/infrastructure/memory/property.go | 2 +- internal/infrastructure/memory/property_schema.go | 2 +- internal/infrastructure/memory/scene.go | 2 +- internal/infrastructure/memory/tag.go | 2 +- internal/infrastructure/mongo/asset.go | 2 +- internal/infrastructure/mongo/dataset.go | 2 +- internal/infrastructure/mongo/dataset_schema.go | 2 +- internal/infrastructure/mongo/layer.go | 2 +- internal/infrastructure/mongo/plugin.go | 2 +- internal/infrastructure/mongo/project.go | 2 +- internal/infrastructure/mongo/property.go | 2 +- internal/infrastructure/mongo/property_schema.go | 2 +- internal/infrastructure/mongo/scene.go | 2 +- internal/infrastructure/mongo/tag.go | 2 +- internal/usecase/interactor/scene.go | 14 ++++++++------ internal/usecase/interactor/team.go | 9 ++++----- internal/usecase/interactor/team_test.go | 8 ++++---- internal/usecase/interfaces/team.go | 2 +- internal/usecase/operator.go | 12 ++++++++++++ internal/usecase/repo/container.go | 14 ++++++++++++++ 29 files changed, 66 insertions(+), 39 deletions(-) diff --git a/internal/adapter/gql/resolver_mutation_team.go b/internal/adapter/gql/resolver_mutation_team.go index 4595933a4..0e4037a17 100644 --- a/internal/adapter/gql/resolver_mutation_team.go +++ b/internal/adapter/gql/resolver_mutation_team.go @@ -8,7 +8,7 @@ import ( ) func (r *mutationResolver) CreateTeam(ctx context.Context, input gqlmodel.CreateTeamInput) (*gqlmodel.CreateTeamPayload, error) { - res, err := usecases(ctx).Team.Create(ctx, input.Name, getUser(ctx).ID()) + res, err := usecases(ctx).Team.Create(ctx, input.Name, getUser(ctx).ID(), getOperator(ctx)) if err != nil { return nil, err } diff --git a/internal/infrastructure/fs/plugin.go b/internal/infrastructure/fs/plugin.go index 8b993ac49..e19501088 100644 --- a/internal/infrastructure/fs/plugin.go +++ b/internal/infrastructure/fs/plugin.go @@ -28,7 +28,7 @@ func NewPlugin(fs afero.Fs) repo.Plugin { func (r *pluginRepo) Filtered(f repo.SceneFilter) repo.Plugin { return &pluginRepo{ fs: r.fs, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/fs/property_schema.go b/internal/infrastructure/fs/property_schema.go index 47353ad3e..ffb0b9619 100644 --- a/internal/infrastructure/fs/property_schema.go +++ b/internal/infrastructure/fs/property_schema.go @@ -25,7 +25,7 @@ func NewPropertySchema(fs afero.Fs) repo.PropertySchema { func (r *propertySchema) Filtered(f repo.SceneFilter) repo.PropertySchema { return &propertySchema{ fs: r.fs, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/memory/asset.go b/internal/infrastructure/memory/asset.go index 557023f98..08df3ed25 100644 --- a/internal/infrastructure/memory/asset.go +++ b/internal/infrastructure/memory/asset.go @@ -29,7 +29,7 @@ func (r *Asset) Filtered(f repo.TeamFilter) repo.Asset { return &Asset{ // note data is shared between the source repo and mutex cannot work well data: r.data, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/memory/dataset.go b/internal/infrastructure/memory/dataset.go index cea85ccd8..3c4d3f7a9 100644 --- a/internal/infrastructure/memory/dataset.go +++ b/internal/infrastructure/memory/dataset.go @@ -27,7 +27,7 @@ func (r *Dataset) Filtered(f repo.SceneFilter) repo.Dataset { return &Dataset{ // note data is shared between the source repo and mutex cannot work well data: r.data, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/memory/dataset_schema.go b/internal/infrastructure/memory/dataset_schema.go index dfad074cf..75081eb64 100644 --- a/internal/infrastructure/memory/dataset_schema.go +++ b/internal/infrastructure/memory/dataset_schema.go @@ -27,7 +27,7 @@ func (r *DatasetSchema) Filtered(f repo.SceneFilter) repo.DatasetSchema { return &DatasetSchema{ // note data is shared between the source repo and mutex cannot work well data: r.data, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/memory/layer.go b/internal/infrastructure/memory/layer.go index afdddde30..eefab91ab 100644 --- a/internal/infrastructure/memory/layer.go +++ b/internal/infrastructure/memory/layer.go @@ -26,7 +26,7 @@ func (r *Layer) Filtered(f repo.SceneFilter) repo.Layer { return &Layer{ // note data is shared between the source repo and mutex cannot work well data: r.data, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/memory/plugin.go b/internal/infrastructure/memory/plugin.go index d9361716b..68b8a8ad3 100644 --- a/internal/infrastructure/memory/plugin.go +++ b/internal/infrastructure/memory/plugin.go @@ -28,7 +28,7 @@ func (r *Plugin) Filtered(f repo.SceneFilter) repo.Plugin { return &Plugin{ // note data is shared between the source repo and mutex cannot work well data: r.data, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/memory/project.go b/internal/infrastructure/memory/project.go index e6c28ca3b..48c6127c3 100644 --- a/internal/infrastructure/memory/project.go +++ b/internal/infrastructure/memory/project.go @@ -28,7 +28,7 @@ func (r *Project) Filtered(f repo.TeamFilter) repo.Project { return &Project{ // note data is shared between the source repo and mutex cannot work well data: r.data, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/memory/property.go b/internal/infrastructure/memory/property.go index c4dd03505..715481dd4 100644 --- a/internal/infrastructure/memory/property.go +++ b/internal/infrastructure/memory/property.go @@ -28,7 +28,7 @@ func (r *Property) Filtered(f repo.SceneFilter) repo.Property { return &Property{ // note data is shared between the source repo and mutex cannot work well data: r.data, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/memory/property_schema.go b/internal/infrastructure/memory/property_schema.go index 5df9558da..e67b39630 100644 --- a/internal/infrastructure/memory/property_schema.go +++ b/internal/infrastructure/memory/property_schema.go @@ -32,7 +32,7 @@ func (r *PropertySchema) Filtered(f repo.SceneFilter) repo.PropertySchema { return &PropertySchema{ // note data is shared between the source repo and mutex cannot work well data: r.data, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/memory/scene.go b/internal/infrastructure/memory/scene.go index b067222fb..7032a92bf 100644 --- a/internal/infrastructure/memory/scene.go +++ b/internal/infrastructure/memory/scene.go @@ -28,7 +28,7 @@ func (r *Scene) Filtered(f repo.TeamFilter) repo.Scene { return &Scene{ // note data is shared between the source repo and mutex cannot work well data: r.data, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/memory/tag.go b/internal/infrastructure/memory/tag.go index a92e7639b..ec206bfd6 100644 --- a/internal/infrastructure/memory/tag.go +++ b/internal/infrastructure/memory/tag.go @@ -27,7 +27,7 @@ func (r *Tag) Filtered(f repo.SceneFilter) repo.Tag { return &Tag{ // note data is shared between the source repo and mutex cannot work well data: r.data, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/mongo/asset.go b/internal/infrastructure/mongo/asset.go index 827069c07..c06206be4 100644 --- a/internal/infrastructure/mongo/asset.go +++ b/internal/infrastructure/mongo/asset.go @@ -30,7 +30,7 @@ func NewAsset(client *mongodoc.Client) repo.Asset { func (r *assetRepo) Filtered(f repo.TeamFilter) repo.Asset { return &assetRepo{ client: r.client, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/mongo/dataset.go b/internal/infrastructure/mongo/dataset.go index 66f62dbb5..be2db28cf 100644 --- a/internal/infrastructure/mongo/dataset.go +++ b/internal/infrastructure/mongo/dataset.go @@ -30,7 +30,7 @@ func NewDataset(client *mongodoc.Client) repo.Dataset { func (r *datasetRepo) Filtered(f repo.SceneFilter) repo.Dataset { return &datasetRepo{ client: r.client, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/mongo/dataset_schema.go b/internal/infrastructure/mongo/dataset_schema.go index 928a5d4ed..2ad8d995d 100644 --- a/internal/infrastructure/mongo/dataset_schema.go +++ b/internal/infrastructure/mongo/dataset_schema.go @@ -35,7 +35,7 @@ func (r *datasetSchemaRepo) init() { func (r *datasetSchemaRepo) Filtered(f repo.SceneFilter) repo.DatasetSchema { return &datasetSchemaRepo{ client: r.client, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/mongo/layer.go b/internal/infrastructure/mongo/layer.go index e5e1b3878..e12834c65 100644 --- a/internal/infrastructure/mongo/layer.go +++ b/internal/infrastructure/mongo/layer.go @@ -34,7 +34,7 @@ func (r *layerRepo) init() { func (r *layerRepo) Filtered(f repo.SceneFilter) repo.Layer { return &layerRepo{ client: r.client, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/mongo/plugin.go b/internal/infrastructure/mongo/plugin.go index 9bd95e563..ebadde8ec 100644 --- a/internal/infrastructure/mongo/plugin.go +++ b/internal/infrastructure/mongo/plugin.go @@ -36,7 +36,7 @@ func (r *pluginRepo) init() { func (r *pluginRepo) Filtered(f repo.SceneFilter) repo.Plugin { return &pluginRepo{ client: r.client, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/mongo/project.go b/internal/infrastructure/mongo/project.go index 7377b3307..257b088fa 100644 --- a/internal/infrastructure/mongo/project.go +++ b/internal/infrastructure/mongo/project.go @@ -35,7 +35,7 @@ func (r *projectRepo) init() { func (r *projectRepo) Filtered(f repo.TeamFilter) repo.Project { return &projectRepo{ client: r.client, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/mongo/property.go b/internal/infrastructure/mongo/property.go index a0f3de5a8..f6311259d 100644 --- a/internal/infrastructure/mongo/property.go +++ b/internal/infrastructure/mongo/property.go @@ -33,7 +33,7 @@ func (r *propertyRepo) init() { func (r *propertyRepo) Filtered(f repo.SceneFilter) repo.Property { return &propertyRepo{ client: r.client, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/mongo/property_schema.go b/internal/infrastructure/mongo/property_schema.go index f06933f59..87a28b078 100644 --- a/internal/infrastructure/mongo/property_schema.go +++ b/internal/infrastructure/mongo/property_schema.go @@ -34,7 +34,7 @@ func (r *propertySchemaRepo) init() { func (r *propertySchemaRepo) Filtered(f repo.SceneFilter) repo.PropertySchema { return &propertySchemaRepo{ client: r.client, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/mongo/scene.go b/internal/infrastructure/mongo/scene.go index 140ed42e2..4c001ffe0 100644 --- a/internal/infrastructure/mongo/scene.go +++ b/internal/infrastructure/mongo/scene.go @@ -34,7 +34,7 @@ func (r *sceneRepo) init() { func (r *sceneRepo) Filtered(f repo.TeamFilter) repo.Scene { return &sceneRepo{ client: r.client, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/infrastructure/mongo/tag.go b/internal/infrastructure/mongo/tag.go index 9fb6914b3..7daae79f1 100644 --- a/internal/infrastructure/mongo/tag.go +++ b/internal/infrastructure/mongo/tag.go @@ -34,7 +34,7 @@ func (r *tagRepo) init() { func (r *tagRepo) Filtered(f repo.SceneFilter) repo.Tag { return &tagRepo{ client: r.client, - f: f.Clone(), + f: r.f.Merge(f), } } diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index fe8dc936c..f45c5eed4 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -74,7 +74,8 @@ func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase. if err != nil { return nil, err } - if err := i.CanWriteTeam(prj.Team(), operator); err != nil { + team := prj.Team() + if err := i.CanWriteTeam(team, operator); err != nil { return nil, err } @@ -100,7 +101,7 @@ func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase. g := p.GetOrCreateGroupList(schema, property.PointItemBySchema(tiles)) g.Add(property.NewGroup().NewID().SchemaGroup(tiles).MustBuild(), -1) - scene, err := scene.New(). + res, err := scene.New(). ID(sceneID). Project(pid). Team(prj.Team()). @@ -114,24 +115,25 @@ func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase. } if p != nil { - err = i.propertyRepo.Save(ctx, p) + err = i.propertyRepo.Filtered(repo.SceneFilter{Writable: scene.IDList{sceneID}}).Save(ctx, p) if err != nil { return nil, err } } - err = i.layerRepo.Save(ctx, rootLayer) + err = i.layerRepo.Filtered(repo.SceneFilter{Writable: scene.IDList{sceneID}}).Save(ctx, rootLayer) if err != nil { return nil, err } - err = i.sceneRepo.Save(ctx, scene) + err = i.sceneRepo.Save(ctx, res) if err != nil { return nil, err } + operator.AddNewScene(team, sceneID) tx.Commit() - return scene, err + return res, err } func (s *Scene) FetchLock(ctx context.Context, ids []id.SceneID, operator *usecase.Operator) ([]scene.LockMode, error) { diff --git a/internal/usecase/interactor/team.go b/internal/usecase/interactor/team.go index 6201f82b2..0f012b3d1 100644 --- a/internal/usecase/interactor/team.go +++ b/internal/usecase/interactor/team.go @@ -45,7 +45,7 @@ func (i *Team) FindByUser(ctx context.Context, id id.UserID, operator *usecase.O return res2, err } -func (i *Team) Create(ctx context.Context, name string, firstUser id.UserID) (_ *user.Team, err error) { +func (i *Team) Create(ctx context.Context, name string, firstUser id.UserID, operator *usecase.Operator) (_ *user.Team, err error) { tx, err := i.transaction.Begin() if err != nil { return @@ -64,16 +64,15 @@ func (i *Team) Create(ctx context.Context, name string, firstUser id.UserID) (_ return nil, err } - err = team.Members().Join(firstUser, user.RoleOwner) - if err != nil { + if err := team.Members().Join(firstUser, user.RoleOwner); err != nil { return nil, err } - err = i.teamRepo.Save(ctx, team) - if err != nil { + if err := i.teamRepo.Save(ctx, team); err != nil { return nil, err } + operator.AddNewTeam(team.ID()) tx.Commit() return team, nil } diff --git a/internal/usecase/interactor/team_test.go b/internal/usecase/interactor/team_test.go index d160009cd..a334e340d 100644 --- a/internal/usecase/interactor/team_test.go +++ b/internal/usecase/interactor/team_test.go @@ -16,11 +16,10 @@ func TestCreateTeam(t *testing.T) { db := memory.InitRepos(nil) - user := user.New().NewID().Team(id.NewTeamID()).MustBuild() - + u := user.New().NewID().Team(id.NewTeamID()).MustBuild() teamUC := NewTeam(db) - - team, err := teamUC.Create(ctx, "team name", user.ID()) + op := &usecase.Operator{User: u.ID()} + team, err := teamUC.Create(ctx, "team name", u.ID(), op) assert.Nil(t, err) assert.NotNil(t, team) @@ -33,4 +32,5 @@ func TestCreateTeam(t *testing.T) { assert.NotEmpty(t, resultTeams) assert.Equal(t, resultTeams[0].ID(), team.ID()) assert.Equal(t, resultTeams[0].Name(), "team name") + assert.Equal(t, user.TeamIDList{resultTeams[0].ID()}, op.OwningTeams) } diff --git a/internal/usecase/interfaces/team.go b/internal/usecase/interfaces/team.go index a4ba619fa..503b07bb0 100644 --- a/internal/usecase/interfaces/team.go +++ b/internal/usecase/interfaces/team.go @@ -18,7 +18,7 @@ var ( type Team interface { Fetch(context.Context, []id.TeamID, *usecase.Operator) ([]*user.Team, error) FindByUser(context.Context, id.UserID, *usecase.Operator) ([]*user.Team, error) - Create(context.Context, string, id.UserID) (*user.Team, error) + Create(context.Context, string, id.UserID, *usecase.Operator) (*user.Team, error) Update(context.Context, id.TeamID, string, *usecase.Operator) (*user.Team, error) AddMember(context.Context, id.TeamID, id.UserID, user.Role, *usecase.Operator) (*user.Team, error) RemoveMember(context.Context, id.TeamID, id.UserID, *usecase.Operator) (*user.Team, error) diff --git a/internal/usecase/operator.go b/internal/usecase/operator.go index 753edd745..28eddf88a 100644 --- a/internal/usecase/operator.go +++ b/internal/usecase/operator.go @@ -79,3 +79,15 @@ func (o *Operator) IsWritableScene(scene ...id.SceneID) bool { func (o *Operator) IsOwningScene(scene ...id.SceneID) bool { return o.AllOwningScenes().Includes(scene...) } + +func (o *Operator) AddNewTeam(team id.TeamID) { + o.OwningTeams = append(o.OwningTeams, team) +} + +func (o *Operator) AddNewScene(team id.TeamID, scene id.SceneID) { + if o.IsOwningTeam(team) { + o.OwningScenes = append(o.OwningScenes, scene) + } else if o.IsWritableTeam(team) { + o.WritableScenes = append(o.WritableScenes, scene) + } +} diff --git a/internal/usecase/repo/container.go b/internal/usecase/repo/container.go index 6e5bee8ed..7099ed5d0 100644 --- a/internal/usecase/repo/container.go +++ b/internal/usecase/repo/container.go @@ -73,6 +73,13 @@ func (f TeamFilter) Clone() TeamFilter { } } +func (f TeamFilter) Merge(g TeamFilter) TeamFilter { + return TeamFilter{ + Readable: append(f.Readable, g.Readable...), + Writable: append(f.Writable, g.Writable...), + } +} + func (f TeamFilter) CanRead(id user.TeamID) bool { return f.Readable == nil || f.Readable.Includes(id) } @@ -93,6 +100,13 @@ func SceneFilterFromOperator(o *usecase.Operator) SceneFilter { } } +func (f SceneFilter) Merge(g SceneFilter) SceneFilter { + return SceneFilter{ + Readable: append(f.Readable, g.Readable...), + Writable: append(f.Writable, g.Writable...), + } +} + func (f SceneFilter) Clone() SceneFilter { return SceneFilter{ Readable: f.Readable.Clone(), From 982a71aeb4e7778c945ed64e91a4c9bfbb288717 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 17 Mar 2022 12:17:05 +0900 Subject: [PATCH 176/253] fix: auth server certificate is not saved as pem format --- internal/usecase/interactor/auth.go | 35 ++++++++++++++++++++--------- 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/internal/usecase/interactor/auth.go b/internal/usecase/interactor/auth.go index ed75c3385..1f01ebb65 100644 --- a/internal/usecase/interactor/auth.go +++ b/internal/usecase/interactor/auth.go @@ -80,11 +80,11 @@ func NewAuthStorage(ctx context.Context, cfg *StorageConfig, request repo.AuthRe } c, err := config.LockAndLoad(ctx) if err != nil { - return nil, fmt.Errorf("Could not load auth config: %w\n", err) + return nil, fmt.Errorf("could not load auth config: %w\n", err) } defer func() { if err := config.Unlock(ctx); err != nil { - log.Errorf("auth: Could not release config lock: %s\n", err) + log.Errorf("auth: could not release config lock: %s\n", err) } }() @@ -95,7 +95,7 @@ func NewAuthStorage(ctx context.Context, cfg *StorageConfig, request repo.AuthRe } else { keyBytes, certBytes, err = generateCert(name) if err != nil { - return nil, fmt.Errorf("Could not generate raw cert: %w\n", err) + return nil, fmt.Errorf("could not generate raw cert: %w\n", err) } c.Auth = &config2.Auth{ Key: string(keyBytes), @@ -103,13 +103,14 @@ func NewAuthStorage(ctx context.Context, cfg *StorageConfig, request repo.AuthRe } if err := config.Save(ctx, c); err != nil { - return nil, fmt.Errorf("Could not save raw cert: %w\n", err) + return nil, fmt.Errorf("could not save raw cert: %w\n", err) } + log.Info("auth: init a new private key and certificate") } key, sigKey, keySet, err := initKeys(keyBytes, certBytes) if err != nil { - return nil, fmt.Errorf("Fail to init keys: %w\n", err) + return nil, fmt.Errorf("could not init keys: %w\n", err) } return &AuthStorage{ @@ -126,17 +127,25 @@ func NewAuthStorage(ctx context.Context, cfg *StorageConfig, request repo.AuthRe } func initKeys(keyBytes, certBytes []byte) (*rsa.PrivateKey, *jose.SigningKey, *jose.JSONWebKeySet, error) { - block, _ := pem.Decode(keyBytes) - if block == nil { + keyBlock, _ := pem.Decode(keyBytes) + if keyBlock == nil { return nil, nil, nil, fmt.Errorf("failed to decode the key bytes") } - - key, err := x509.ParsePKCS1PrivateKey(block.Bytes) + key, err := x509.ParsePKCS1PrivateKey(keyBlock.Bytes) if err != nil { return nil, nil, nil, fmt.Errorf("failed to parse the private key bytes: %w\n", err) } - cert, err := x509.ParseCertificate(certBytes) + var certActualBytes []byte + certBlock, _ := pem.Decode(certBytes) + if certBlock == nil { + certActualBytes = certBytes // backwards compatibility + } else { + certActualBytes = certBlock.Bytes + } + + var cert *x509.Certificate + cert, err = x509.ParseCertificate(certActualBytes) if err != nil { return nil, nil, nil, fmt.Errorf("failed to parse the cert bytes: %w\n", err) } @@ -175,11 +184,15 @@ func generateCert(name pkix.Name) (keyPem, certPem []byte, err error) { KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign | x509.KeyUsageCRLSign, } - certPem, err = x509.CreateCertificate(rand.Reader, cert, cert, key.Public(), key) + certBytes, err := x509.CreateCertificate(rand.Reader, cert, cert, key.Public(), key) if err != nil { err = fmt.Errorf("failed to create the cert: %w\n", err) } + certPem = pem.EncodeToMemory(&pem.Block{ + Type: "CERTIFICATE", + Bytes: certBytes, + }) return } From f4cc3f9aa6edb94a9264f88c733af75251b1c4af Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 17 Mar 2022 12:41:16 +0900 Subject: [PATCH 177/253] fix: repo filters are not merged expectedly --- internal/usecase/repo/container.go | 38 +++++++++++++++++--- internal/usecase/repo/container_test.go | 47 +++++++++++++++++++++++++ 2 files changed, 81 insertions(+), 4 deletions(-) create mode 100644 internal/usecase/repo/container_test.go diff --git a/internal/usecase/repo/container.go b/internal/usecase/repo/container.go index 7099ed5d0..fff4b4889 100644 --- a/internal/usecase/repo/container.go +++ b/internal/usecase/repo/container.go @@ -74,9 +74,24 @@ func (f TeamFilter) Clone() TeamFilter { } func (f TeamFilter) Merge(g TeamFilter) TeamFilter { + var r, w user.TeamIDList + if f.Readable != nil || g.Readable != nil { + if f.Readable == nil { + r = append(g.Readable[:0:0], g.Readable...) + } else { + r = append(f.Readable, g.Readable...) + } + } + if f.Writable != nil || g.Writable != nil { + if f.Writable == nil { + w = append(g.Writable[:0:0], g.Writable...) + } else { + w = append(f.Writable, g.Writable...) + } + } return TeamFilter{ - Readable: append(f.Readable, g.Readable...), - Writable: append(f.Writable, g.Writable...), + Readable: r, + Writable: w, } } @@ -101,9 +116,24 @@ func SceneFilterFromOperator(o *usecase.Operator) SceneFilter { } func (f SceneFilter) Merge(g SceneFilter) SceneFilter { + var r, w scene.IDList + if f.Readable != nil || g.Readable != nil { + if f.Readable == nil { + r = append(g.Readable[:0:0], g.Readable...) + } else { + r = append(f.Readable, g.Readable...) + } + } + if f.Writable != nil || g.Writable != nil { + if f.Writable == nil { + w = append(g.Writable[:0:0], g.Writable...) + } else { + w = append(f.Writable, g.Writable...) + } + } return SceneFilter{ - Readable: append(f.Readable, g.Readable...), - Writable: append(f.Writable, g.Writable...), + Readable: r, + Writable: w, } } diff --git a/internal/usecase/repo/container_test.go b/internal/usecase/repo/container_test.go new file mode 100644 index 000000000..6b60ce97d --- /dev/null +++ b/internal/usecase/repo/container_test.go @@ -0,0 +1,47 @@ +package repo + +import ( + "testing" + + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/user" + "github.com/stretchr/testify/assert" +) + +func TestTeamFilter_Merge(t *testing.T) { + a := user.NewTeamID() + b := user.NewTeamID() + assert.Equal(t, TeamFilter{ + Readable: user.TeamIDList{a, b}, + Writable: user.TeamIDList{b, a}, + }, TeamFilter{ + Readable: user.TeamIDList{a}, + Writable: user.TeamIDList{b}, + }.Merge(TeamFilter{ + Readable: user.TeamIDList{b}, + Writable: user.TeamIDList{a}, + })) + assert.Equal(t, TeamFilter{Readable: user.TeamIDList{}}, TeamFilter{}.Merge(TeamFilter{Readable: user.TeamIDList{}})) + assert.Equal(t, TeamFilter{Readable: user.TeamIDList{}}, TeamFilter{Readable: user.TeamIDList{}}.Merge(TeamFilter{})) + assert.Equal(t, TeamFilter{Writable: user.TeamIDList{}}, TeamFilter{}.Merge(TeamFilter{Writable: user.TeamIDList{}})) + assert.Equal(t, TeamFilter{Writable: user.TeamIDList{}}, TeamFilter{Writable: user.TeamIDList{}}.Merge(TeamFilter{})) +} + +func TestSceneFilter_Merge(t *testing.T) { + a := scene.NewID() + b := scene.NewID() + assert.Equal(t, SceneFilter{ + Readable: scene.IDList{a, b}, + Writable: scene.IDList{b, a}, + }, SceneFilter{ + Readable: scene.IDList{a}, + Writable: scene.IDList{b}, + }.Merge(SceneFilter{ + Readable: scene.IDList{b}, + Writable: scene.IDList{a}, + })) + assert.Equal(t, SceneFilter{Readable: scene.IDList{}}, SceneFilter{}.Merge(SceneFilter{Readable: scene.IDList{}})) + assert.Equal(t, SceneFilter{Readable: scene.IDList{}}, SceneFilter{Readable: scene.IDList{}}.Merge(SceneFilter{})) + assert.Equal(t, SceneFilter{Writable: scene.IDList{}}, SceneFilter{}.Merge(SceneFilter{Writable: scene.IDList{}})) + assert.Equal(t, SceneFilter{Writable: scene.IDList{}}, SceneFilter{Writable: scene.IDList{}}.Merge(SceneFilter{})) +} From 690a914b09aecede6a84f8290d06532982c47814 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Fri, 18 Mar 2022 09:26:57 +0300 Subject: [PATCH 178/253] fix end points disabling config (#126) --- internal/app/app.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/app/app.go b/internal/app/app.go index 627146806..cb6ff834e 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -92,7 +92,7 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { // apis api := e.Group("/api") api.GET("/ping", Ping()) - if cfg.Config.AuthSrv.Disabled { + if !cfg.Config.AuthSrv.Disabled { api.POST("/signup", Signup()) api.POST("/signup/verify", StartSignupVerify()) api.POST("/signup/verify/:code", SignupVerify()) From 3f2582c666bbcf5eed87b57f10878eeaf26786f5 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 23 Mar 2022 16:41:43 +0900 Subject: [PATCH 179/253] refactor: http api to export layers --- internal/adapter/gql/resolver.go | 7 +-- internal/app/app.go | 15 +++-- internal/app/graphql.go | 32 ++++------ internal/app/private.go | 92 +++------------------------- internal/usecase/interactor/layer.go | 47 ++++++++++++-- internal/usecase/interfaces/layer.go | 2 + pkg/layer/encoding/czml.go | 4 ++ pkg/layer/encoding/encoder.go | 18 ++++++ pkg/layer/encoding/geojson.go | 4 ++ pkg/layer/encoding/kml.go | 4 ++ pkg/layer/encoding/shp.go | 4 ++ pkg/scene/builder/encoder.go | 4 ++ 12 files changed, 110 insertions(+), 123 deletions(-) diff --git a/internal/adapter/gql/resolver.go b/internal/adapter/gql/resolver.go index ac987de32..40f45c3a2 100644 --- a/internal/adapter/gql/resolver.go +++ b/internal/adapter/gql/resolver.go @@ -12,11 +12,8 @@ var ErrNotImplemented = errors.New("not impleneted yet") var ErrUnauthorized = errors.New("unauthorized") type Resolver struct { - debug bool } -func NewResolver(debug bool) ResolverRoot { - return &Resolver{ - debug: debug, - } +func NewResolver() ResolverRoot { + return &Resolver{} } diff --git a/internal/app/app.go b/internal/app/app.go index cb6ff834e..6b64c1460 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -62,7 +62,8 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { } // GraphQL Playground without auth - if cfg.Debug || cfg.Config.Dev { + gqldev := cfg.Debug || cfg.Config.Dev + if gqldev { e.GET("/graphql", echo.WrapHandler( playground.Handler("reearth-backend", "/api/graphql"), )) @@ -92,19 +93,17 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { // apis api := e.Group("/api") api.GET("/ping", Ping()) + api.POST("/graphql", GraphqlAPI(cfg.Config.GraphQL, gqldev), AuthRequiredMiddleware()) + api.GET("/published/:name", PublishedMetadata()) + api.GET("/published_data/:name", PublishedData()) + api.GET("/layers/:param", ExportLayer(), AuthRequiredMiddleware()) + if !cfg.Config.AuthSrv.Disabled { api.POST("/signup", Signup()) api.POST("/signup/verify", StartSignupVerify()) api.POST("/signup/verify/:code", SignupVerify()) api.POST("/password-reset", PasswordReset()) } - api.GET("/published/:name", PublishedMetadata()) - api.GET("/published_data/:name", PublishedData()) - - // authenticated endpoints - privateApi := api.Group("", AuthRequiredMiddleware()) - graphqlAPI(e, privateApi, cfg) - privateAPI(e, privateApi, cfg.Repos) published := e.Group("/p", PublishedAuthMiddleware()) published.GET("/:name/data.json", PublishedData()) diff --git a/internal/app/graphql.go b/internal/app/graphql.go index be0cd8ffb..22835f043 100644 --- a/internal/app/graphql.go +++ b/internal/app/graphql.go @@ -7,7 +7,6 @@ import ( "github.com/99designs/gqlgen/graphql/handler" "github.com/99designs/gqlgen/graphql/handler/extension" "github.com/99designs/gqlgen/graphql/handler/lru" - "github.com/99designs/gqlgen/graphql/playground" "github.com/labstack/echo/v4" "github.com/ravilushqa/otelgqlgen" "github.com/reearth/reearth-backend/internal/adapter" @@ -17,31 +16,22 @@ import ( const enableDataLoaders = true -func graphqlAPI( - ec *echo.Echo, - r *echo.Group, - conf *ServerConfig, -) { - playgroundEnabled := conf.Debug || conf.Config.Dev - - if playgroundEnabled { - r.GET("/graphql", echo.WrapHandler( - playground.Handler("reearth-backend", "/api/graphql"), - )) - } - +func GraphqlAPI( + conf GraphQLConfig, + dev bool, +) echo.HandlerFunc { schema := gql.NewExecutableSchema(gql.Config{ - Resolvers: gql.NewResolver(conf.Debug), + Resolvers: gql.NewResolver(), }) srv := handler.NewDefaultServer(schema) srv.Use(otelgqlgen.Middleware()) - if conf.Config.GraphQL.ComplexityLimit > 0 { - srv.Use(extension.FixedComplexityLimit(conf.Config.GraphQL.ComplexityLimit)) + if conf.ComplexityLimit > 0 { + srv.Use(extension.FixedComplexityLimit(conf.ComplexityLimit)) } - if playgroundEnabled { + if dev { srv.Use(extension.Introspection{}) } @@ -52,14 +42,14 @@ func graphqlAPI( srv.SetErrorPresenter( // show more detailed error messgage in debug mode func(ctx context.Context, e error) *gqlerror.Error { - if conf.Debug { + if dev { return gqlerror.ErrorPathf(graphql.GetFieldContext(ctx).Path(), e.Error()) } return graphql.DefaultErrorPresenter(ctx, e) }, ) - r.POST("/graphql", func(c echo.Context) error { + return func(c echo.Context) error { req := c.Request() ctx := req.Context() @@ -69,5 +59,5 @@ func graphqlAPI( srv.ServeHTTP(c.Response(), c.Request()) return nil - }) + } } diff --git a/internal/app/private.go b/internal/app/private.go index 1884bddde..11a731bc4 100644 --- a/internal/app/private.go +++ b/internal/app/private.go @@ -1,112 +1,36 @@ package app import ( - "errors" - "io" "net/http" "strings" "github.com/labstack/echo/v4" "github.com/reearth/reearth-backend/internal/adapter" - "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/layer/encoding" - "github.com/reearth/reearth-backend/pkg/layer/merging" "github.com/reearth/reearth-backend/pkg/rerror" ) -// TODO: move to adapter and usecase layer - -var ( - ErrOpDenied = errors.New("operation denied") - ErrUnauthorized = errors.New("Unauthorized") - ErrUnknowFormat = errors.New("unknown file format") - ErrBadID = errors.New("bad id") - ErrBadParameter = errors.New("id.ext is needed") -) - -func getEncoder(w io.Writer, ext string) (encoding.Encoder, string) { - switch strings.ToLower(ext) { - case "kml": - return encoding.NewKMLEncoder(w), "application/xml" - case "geojson": - return encoding.NewGeoJSONEncoder(w), "application/json" - case "czml": - return encoding.NewCZMLEncoder(w), "application/json" - case "shp": - return encoding.NewSHPEncoder(w), "application/octet-stream" - } - return nil, "" -} - -func privateAPI( - ec *echo.Echo, - r *echo.Group, - repos *repo.Container, -) { - r.GET("/layers/:param", func(c echo.Context) error { +func ExportLayer() echo.HandlerFunc { + return func(c echo.Context) error { ctx := c.Request().Context() - user := adapter.User(c.Request().Context()) - if user == nil { - return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrUnauthorized} - } - - op := adapter.Operator(c.Request().Context()) - if op == nil { - return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrOpDenied} - } - repos := repos.Filtered(repo.TeamFilterFromOperator(op), repo.SceneFilterFromOperator(op)) + u := adapter.Usecases(ctx) param := c.Param("param") params := strings.Split(param, ".") if len(params) != 2 { - return &echo.HTTPError{Code: http.StatusBadRequest, Message: ErrBadParameter} + return rerror.ErrNotFound } lid, err := id.LayerIDFrom(params[0]) if err != nil { - return &echo.HTTPError{Code: http.StatusBadRequest, Message: ErrBadID} + return rerror.ErrNotFound } - layer, err := repos.Layer.FindByID(ctx, lid) + reader, mime, err := u.Layer.Export(ctx, lid, params[1]) if err != nil { - if errors.Is(rerror.ErrNotFound, err) { - return &echo.HTTPError{Code: http.StatusNotFound, Message: err} - } - return &echo.HTTPError{Code: http.StatusInternalServerError, Message: err} - } - if !op.IsReadableScene(layer.Scene()) { - return &echo.HTTPError{Code: http.StatusUnauthorized, Message: ErrOpDenied} - } - ext := params[1] - - reader, writer := io.Pipe() - e, mime := getEncoder(writer, strings.ToLower(ext)) - if e == nil { - return &echo.HTTPError{Code: http.StatusBadRequest, Message: ErrUnknowFormat} - } - - ex := &encoding.Exporter{ - Merger: &merging.Merger{ - LayerLoader: repo.LayerLoaderFrom(repos.Layer), - PropertyLoader: repo.PropertyLoaderFrom(repos.Property), - }, - Sealer: &merging.Sealer{ - DatasetGraphLoader: repo.DatasetGraphLoaderFrom(repos.Dataset), - }, - Encoder: e, + return err } - go func() { - defer func() { - _ = writer.Close() - }() - err = ex.ExportLayerByID(ctx, lid) - }() - - if err != nil { - return &echo.HTTPError{Code: http.StatusBadRequest, Message: err} - } return c.Stream(http.StatusOK, mime, reader) - }) + } } diff --git a/internal/usecase/interactor/layer.go b/internal/usecase/interactor/layer.go index e2f15c811..33f34193f 100644 --- a/internal/usecase/interactor/layer.go +++ b/internal/usecase/interactor/layer.go @@ -5,23 +5,25 @@ import ( "encoding/json" "encoding/xml" "errors" + "io" "strings" - "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/rerror" - "github.com/reearth/reearth-backend/pkg/shp" - "github.com/reearth/reearth-backend/pkg/tag" - "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/builtin" "github.com/reearth/reearth-backend/pkg/dataset" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/decoding" + "github.com/reearth/reearth-backend/pkg/layer/encoding" "github.com/reearth/reearth-backend/pkg/layer/layerops" + "github.com/reearth/reearth-backend/pkg/layer/merging" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/shp" + "github.com/reearth/reearth-backend/pkg/tag" ) // TODO: ใƒฌใ‚คใƒคใƒผไฝœๆˆใฎใƒ‰ใƒกใ‚คใƒณใƒญใ‚ธใƒƒใ‚ฏใŒใ“ใ“ใซๅคšใๆผใ‚Œๅ‡บใ—ใฆใ„ใ‚‹ใฎใงใƒ‰ใƒกใ‚คใƒณๅฑคใซ็งปใ™ @@ -122,6 +124,41 @@ func (i *Layer) FetchByTag(ctx context.Context, tag id.TagID, operator *usecase. return i.layerRepo.FindByTag(ctx, tag) } +func (l *Layer) Export(ctx context.Context, lid id.LayerID, ext string) (io.Reader, string, error) { + _, err := l.layerRepo.FindByID(ctx, lid) + if err != nil { + return nil, "", err + } + + reader, writer := io.Pipe() + e := encoding.EncoderFromExt(strings.ToLower(ext), writer) + if e == nil { + return nil, "", rerror.ErrNotFound + } + ex := &encoding.Exporter{ + Merger: &merging.Merger{ + LayerLoader: repo.LayerLoaderFrom(l.layerRepo), + PropertyLoader: repo.PropertyLoaderFrom(l.propertyRepo), + }, + Sealer: &merging.Sealer{ + DatasetGraphLoader: repo.DatasetGraphLoaderFrom(l.datasetRepo), + }, + Encoder: e, + } + + go func() { + defer func() { + _ = writer.Close() + }() + err = ex.ExportLayerByID(ctx, lid) + }() + + if err != nil { + return nil, "", err + } + return reader, e.MimeType(), nil +} + func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, operator *usecase.Operator) (_ *layer.Item, _ *layer.Group, err error) { tx, err := i.transaction.Begin() if err != nil { diff --git a/internal/usecase/interfaces/layer.go b/internal/usecase/interfaces/layer.go index c4c9f2904..e9cfa5a1e 100644 --- a/internal/usecase/interfaces/layer.go +++ b/internal/usecase/interfaces/layer.go @@ -3,6 +3,7 @@ package interfaces import ( "context" "errors" + "io" "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/pkg/file" @@ -92,6 +93,7 @@ type Layer interface { FetchMerged(context.Context, id.LayerID, *id.LayerID, *usecase.Operator) (*layer.Merged, error) FetchParentAndMerged(context.Context, id.LayerID, *usecase.Operator) (*layer.Merged, error) FetchByTag(context.Context, id.TagID, *usecase.Operator) (layer.List, error) + Export(context.Context, id.LayerID, string) (io.Reader, string, error) AddItem(context.Context, AddLayerItemInput, *usecase.Operator) (*layer.Item, *layer.Group, error) AddGroup(context.Context, AddLayerGroupInput, *usecase.Operator) (*layer.Group, *layer.Group, error) Remove(context.Context, id.LayerID, *usecase.Operator) (id.LayerID, *layer.Group, error) diff --git a/pkg/layer/encoding/czml.go b/pkg/layer/encoding/czml.go index e7117233a..23b0d1fe0 100644 --- a/pkg/layer/encoding/czml.go +++ b/pkg/layer/encoding/czml.go @@ -20,6 +20,10 @@ func NewCZMLEncoder(w io.Writer) *CZMLEncoder { } } +func (*CZMLEncoder) MimeType() string { + return "application/json" +} + func (e *CZMLEncoder) stringToCZMLColor(s string) *czml.Color { c := getColor(s) if c == nil { diff --git a/pkg/layer/encoding/encoder.go b/pkg/layer/encoding/encoder.go index 20b1d2d88..0a0d805cc 100644 --- a/pkg/layer/encoding/encoder.go +++ b/pkg/layer/encoding/encoder.go @@ -1,9 +1,27 @@ package encoding import ( + "io" + "github.com/reearth/reearth-backend/pkg/layer/merging" ) +var encoders = map[string]func(w io.Writer) Encoder{ + "kml": func(w io.Writer) Encoder { return NewKMLEncoder(w) }, + "geojson": func(w io.Writer) Encoder { return NewGeoJSONEncoder(w) }, + "czml": func(w io.Writer) Encoder { return NewCZMLEncoder(w) }, + "shp": func(w io.Writer) Encoder { return NewSHPEncoder(w) }, +} + type Encoder interface { Encode(merging.SealedLayer) error + MimeType() string +} + +func EncoderFromExt(ext string, w io.Writer) Encoder { + e := encoders[ext] + if e == nil { + return nil + } + return e(w) } diff --git a/pkg/layer/encoding/geojson.go b/pkg/layer/encoding/geojson.go index 6fb4c4e0c..2940813d7 100644 --- a/pkg/layer/encoding/geojson.go +++ b/pkg/layer/encoding/geojson.go @@ -20,6 +20,10 @@ func NewGeoJSONEncoder(w io.Writer) *GeoJSONEncoder { } } +func (*GeoJSONEncoder) MimeType() string { + return "application/json" +} + func (e *GeoJSONEncoder) polygonToFloat(p property.Polygon) [][][]float64 { var res [][][]float64 for _, c := range p { diff --git a/pkg/layer/encoding/kml.go b/pkg/layer/encoding/kml.go index 9c00f111f..d3f0f25a5 100644 --- a/pkg/layer/encoding/kml.go +++ b/pkg/layer/encoding/kml.go @@ -20,6 +20,10 @@ func NewKMLEncoder(w io.Writer) *KMLEncoder { } } +func (*KMLEncoder) MimeType() string { + return "application/xml" +} + // generates a composite string of layer name and id to be used as style tag id func generateKMLStyleId(id string) string { return id + "_style" diff --git a/pkg/layer/encoding/shp.go b/pkg/layer/encoding/shp.go index 842e0f5bb..93677778c 100644 --- a/pkg/layer/encoding/shp.go +++ b/pkg/layer/encoding/shp.go @@ -21,6 +21,10 @@ func NewSHPEncoder(w io.Writer) *SHPEncoder { } } +func (*SHPEncoder) MimeType() string { + return "application/octet-stream" +} + func coordsToPoints(coords property.Coordinates) []shp.Point { var res []shp.Point for _, l := range coords { diff --git a/pkg/scene/builder/encoder.go b/pkg/scene/builder/encoder.go index 28d772e16..1907e194c 100644 --- a/pkg/scene/builder/encoder.go +++ b/pkg/scene/builder/encoder.go @@ -12,6 +12,10 @@ type encoder struct { res *layerJSON } +func (*encoder) MimeType() string { + return "application/json" +} + func (e *encoder) Result() []*layerJSON { if e == nil || e.res == nil { return nil From 58a6d13ecaead6818561cbc20c2693d92eacb6d5 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 23 Mar 2022 16:43:16 +0900 Subject: [PATCH 180/253] fix: auth is no longer required for GraphQL endpoint --- internal/app/app.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/app/app.go b/internal/app/app.go index 6b64c1460..64d4e87e6 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -93,7 +93,7 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { // apis api := e.Group("/api") api.GET("/ping", Ping()) - api.POST("/graphql", GraphqlAPI(cfg.Config.GraphQL, gqldev), AuthRequiredMiddleware()) + api.POST("/graphql", GraphqlAPI(cfg.Config.GraphQL, gqldev)) api.GET("/published/:name", PublishedMetadata()) api.GET("/published_data/:name", PublishedData()) api.GET("/layers/:param", ExportLayer(), AuthRequiredMiddleware()) From 89adc36cfe8f3504b0d38bcdb01a81debb429784 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 24 Mar 2022 18:55:04 +0900 Subject: [PATCH 181/253] fix: rename auth srv default client ID (#128) * fix: rename auth srv default client ID * fix test error --- .github/workflows/ci.yml | 2 +- pkg/auth/client.go | 2 +- pkg/user/builder_test.go | 25 ------------------------- 3 files changed, 2 insertions(+), 27 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6f0a4616c..e2290d812 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,7 +30,7 @@ jobs: args: --timeout=10m skip-go-installation: true - name: test - run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic + run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic -timeout 15m - name: Send coverage report uses: codecov/codecov-action@v2 with: diff --git a/pkg/auth/client.go b/pkg/auth/client.go index 83316be67..0c7f6b5b6 100644 --- a/pkg/auth/client.go +++ b/pkg/auth/client.go @@ -8,7 +8,7 @@ import ( "github.com/caos/oidc/pkg/op" ) -const ClientID = "01FH69GFQ4DFCXS5XD91JK4HZ1" +const ClientID = "reearth-authsrv-client-default" type Client struct { id string diff --git a/pkg/user/builder_test.go b/pkg/user/builder_test.go index dc02a227d..031ea979d 100644 --- a/pkg/user/builder_test.go +++ b/pkg/user/builder_test.go @@ -89,31 +89,6 @@ func TestBuilder_LangFrom(t *testing.T) { } } -func TestBuilder_PasswordReset(t *testing.T) { - testCases := []struct { - Name, Token string - CreatedAt time.Time - Expected PasswordReset - }{ - { - Name: "Test1", - Token: "xyz", - CreatedAt: time.Unix(0, 0), - Expected: PasswordReset{ - Token: "xyz", - CreatedAt: time.Unix(0, 0), - }, - }, - } - for _, tc := range testCases { - t.Run(tc.Name, func(tt *testing.T) { - tt.Parallel() - // u := New().NewID().PasswordReset(tc.Token, tc.CreatedAt).MustBuild() - // assert.Equal(t, tc.Expected, *u.passwordReset) - }) - } -} - func TestNew(t *testing.T) { b := New() assert.NotNil(t, b) From dccd9c7dae07b4065baf2cc23cfebe59c36caf36 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 24 Mar 2022 19:15:17 +0900 Subject: [PATCH 182/253] ci: fix deploy_test github actions --- .github/workflows/deploy_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml index afa1580e7..88d97d808 100644 --- a/.github/workflows/deploy_test.yml +++ b/.github/workflows/deploy_test.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest if: github.event.workflow_run.conclusion == 'success' && github.event.repository.full_name == 'reearth/reearth-backend' steps: - - uses: google-github-actions/setup-gcloud@master + - uses: google-github-actions/setup-gcloud@v0 with: project_id: ${{ secrets.GCP_PROJECT }} service_account_key: ${{ secrets.GCP_SA_KEY }} From 47be6ab8f813343edd8e1e9b3d28db36f30d0908 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 31 Mar 2022 20:17:21 +0900 Subject: [PATCH 183/253] fix: signup API is disabled when auth server is disabled, users and auth requests in mongo cannot be deleted (#132) --- internal/app/app.go | 2 +- internal/infrastructure/mongo/auth_request.go | 13 ++-- internal/infrastructure/mongo/property.go | 7 +- internal/infrastructure/mongo/scene.go | 19 ++--- internal/infrastructure/mongo/tag.go | 7 +- internal/infrastructure/mongo/team.go | 29 +++----- internal/infrastructure/mongo/user.go | 71 +++++++++---------- 7 files changed, 61 insertions(+), 87 deletions(-) diff --git a/internal/app/app.go b/internal/app/app.go index 64d4e87e6..4ab7737b8 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -97,9 +97,9 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { api.GET("/published/:name", PublishedMetadata()) api.GET("/published_data/:name", PublishedData()) api.GET("/layers/:param", ExportLayer(), AuthRequiredMiddleware()) + api.POST("/signup", Signup()) if !cfg.Config.AuthSrv.Disabled { - api.POST("/signup", Signup()) api.POST("/signup/verify", StartSignupVerify()) api.POST("/signup/verify/:code", SignupVerify()) api.POST("/password-reset", PasswordReset()) diff --git a/internal/infrastructure/mongo/auth_request.go b/internal/infrastructure/mongo/auth_request.go index 247e1f6e4..b0d9bf503 100644 --- a/internal/infrastructure/mongo/auth_request.go +++ b/internal/infrastructure/mongo/auth_request.go @@ -29,18 +29,15 @@ func (r *authRequestRepo) init() { } func (r *authRequestRepo) FindByID(ctx context.Context, id2 id.AuthRequestID) (*auth.Request, error) { - filter := bson.D{{Key: "id", Value: id2.String()}} - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{"id": id2.String()}) } func (r *authRequestRepo) FindByCode(ctx context.Context, s string) (*auth.Request, error) { - filter := bson.D{{Key: "code", Value: s}} - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{"code": s}) } func (r *authRequestRepo) FindBySubject(ctx context.Context, s string) (*auth.Request, error) { - filter := bson.D{{Key: "subject", Value: s}} - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{"subject": s}) } func (r *authRequestRepo) Save(ctx context.Context, request *auth.Request) error { @@ -49,10 +46,10 @@ func (r *authRequestRepo) Save(ctx context.Context, request *auth.Request) error } func (r *authRequestRepo) Remove(ctx context.Context, requestID id.AuthRequestID) error { - return r.client.RemoveOne(ctx, requestID.String()) + return r.client.RemoveOne(ctx, bson.M{"id": requestID.String()}) } -func (r *authRequestRepo) findOne(ctx context.Context, filter bson.D) (*auth.Request, error) { +func (r *authRequestRepo) findOne(ctx context.Context, filter interface{}) (*auth.Request, error) { dst := make([]*auth.Request, 0, 1) c := mongodoc.AuthRequestConsumer{ Rows: dst, diff --git a/internal/infrastructure/mongo/property.go b/internal/infrastructure/mongo/property.go index f6311259d..0c5d81b6a 100644 --- a/internal/infrastructure/mongo/property.go +++ b/internal/infrastructure/mongo/property.go @@ -161,10 +161,9 @@ func (r *propertyRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) er if !r.f.CanWrite(sceneID) { return nil } - filter := bson.D{ - {Key: "scene", Value: sceneID.String()}, - } - _, err := r.client.Collection().DeleteMany(ctx, filter) + _, err := r.client.Collection().DeleteMany(ctx, bson.M{ + "scene": sceneID.String(), + }) if err != nil { return rerror.ErrInternalBy(err) } diff --git a/internal/infrastructure/mongo/scene.go b/internal/infrastructure/mongo/scene.go index 4c001ffe0..acbbc6ecd 100644 --- a/internal/infrastructure/mongo/scene.go +++ b/internal/infrastructure/mongo/scene.go @@ -45,24 +45,17 @@ func (r *sceneRepo) FindByID(ctx context.Context, id id.SceneID) (*scene.Scene, } func (r *sceneRepo) FindByIDs(ctx context.Context, ids []id.SceneID) (scene.List, error) { - filter := bson.M{ + return r.find(ctx, make(scene.List, 0, len(ids)), bson.M{ "id": bson.M{ "$in": id.SceneIDsToStrings(ids), }, - } - dst := make(scene.List, 0, len(ids)) - res, err := r.find(ctx, dst, filter) - if err != nil { - return nil, err - } - return filterScenes(ids, res), nil + }) } func (r *sceneRepo) FindByProject(ctx context.Context, id id.ProjectID) (*scene.Scene, error) { - filter := bson.M{ + return r.findOne(ctx, bson.M{ "project": id.String(), - } - return r.findOne(ctx, filter) + }) } func (r *sceneRepo) FindByTeam(ctx context.Context, teams ...id.TeamID) (scene.List, error) { @@ -111,10 +104,6 @@ func (r *sceneRepo) findOne(ctx context.Context, filter interface{}) (*scene.Sce return c.Rows[0], nil } -func filterScenes(ids []id.SceneID, rows scene.List) scene.List { - return rows.FilterByID(ids...) -} - func (r *sceneRepo) readFilter(filter interface{}) interface{} { return applyTeamFilter(filter, r.f.Readable) } diff --git a/internal/infrastructure/mongo/tag.go b/internal/infrastructure/mongo/tag.go index 7daae79f1..001b90cdd 100644 --- a/internal/infrastructure/mongo/tag.go +++ b/internal/infrastructure/mongo/tag.go @@ -153,10 +153,9 @@ func (r *tagRepo) RemoveAll(ctx context.Context, ids []id.TagID) error { } func (r *tagRepo) RemoveByScene(ctx context.Context, sceneID id.SceneID) error { - filter := bson.D{ - {Key: "scene", Value: sceneID.String()}, - } - _, err := r.client.Collection().DeleteMany(ctx, filter) + _, err := r.client.Collection().DeleteMany(ctx, bson.M{ + "scene": sceneID.String(), + }) if err != nil { return rerror.ErrInternalBy(err) } diff --git a/internal/infrastructure/mongo/team.go b/internal/infrastructure/mongo/team.go index d6ec04f56..1b058b8ca 100644 --- a/internal/infrastructure/mongo/team.go +++ b/internal/infrastructure/mongo/team.go @@ -30,22 +30,18 @@ func (r *teamRepo) init() { } func (r *teamRepo) FindByUser(ctx context.Context, id id.UserID) (user.TeamList, error) { - filter := bson.D{ - {Key: "members." + strings.Replace(id.String(), ".", "", -1), Value: bson.D{ - {Key: "$exists", Value: true}, - }}, - } - return r.find(ctx, nil, filter) + return r.find(ctx, nil, bson.M{ + "members." + strings.Replace(id.String(), ".", "", -1): bson.M{ + "$exists": true, + }, + }) } func (r *teamRepo) FindByIDs(ctx context.Context, ids []id.TeamID) (user.TeamList, error) { - filter := bson.D{ - {Key: "id", Value: bson.D{ - {Key: "$in", Value: id.TeamIDsToStrings(ids)}, - }}, - } dst := make([]*user.Team, 0, len(ids)) - res, err := r.find(ctx, dst, filter) + res, err := r.find(ctx, dst, bson.M{ + "id": bson.M{"$in": id.TeamIDsToStrings(ids)}, + }) if err != nil { return nil, err } @@ -53,10 +49,7 @@ func (r *teamRepo) FindByIDs(ctx context.Context, ids []id.TeamID) (user.TeamLis } func (r *teamRepo) FindByID(ctx context.Context, id id.TeamID) (*user.Team, error) { - filter := bson.D{ - {Key: "id", Value: id.String()}, - } - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{"id": id.String()}) } func (r *teamRepo) Save(ctx context.Context, team *user.Team) error { @@ -89,7 +82,7 @@ func (r *teamRepo) RemoveAll(ctx context.Context, ids []id.TeamID) error { }) } -func (r *teamRepo) find(ctx context.Context, dst []*user.Team, filter bson.D) (user.TeamList, error) { +func (r *teamRepo) find(ctx context.Context, dst []*user.Team, filter interface{}) (user.TeamList, error) { c := mongodoc.TeamConsumer{ Rows: dst, } @@ -99,7 +92,7 @@ func (r *teamRepo) find(ctx context.Context, dst []*user.Team, filter bson.D) (u return c.Rows, nil } -func (r *teamRepo) findOne(ctx context.Context, filter bson.D) (*user.Team, error) { +func (r *teamRepo) findOne(ctx context.Context, filter interface{}) (*user.Team, error) { dst := make([]*user.Team, 0, 1) c := mongodoc.TeamConsumer{ Rows: dst, diff --git a/internal/infrastructure/mongo/user.go b/internal/infrastructure/mongo/user.go index 0958cfc6c..5cec81f16 100644 --- a/internal/infrastructure/mongo/user.go +++ b/internal/infrastructure/mongo/user.go @@ -30,11 +30,10 @@ func (r *userRepo) init() { } func (r *userRepo) FindByIDs(ctx context.Context, ids []id.UserID) ([]*user.User, error) { - filter := bson.D{{Key: "id", Value: bson.D{ - {Key: "$in", Value: id.UserIDsToStrings(ids)}, - }}} dst := make([]*user.User, 0, len(ids)) - res, err := r.find(ctx, dst, filter) + res, err := r.find(ctx, dst, bson.M{ + "id": bson.M{"$in": id.UserIDsToStrings(ids)}, + }) if err != nil { return nil, err } @@ -42,52 +41,51 @@ func (r *userRepo) FindByIDs(ctx context.Context, ids []id.UserID) ([]*user.User } func (r *userRepo) FindByID(ctx context.Context, id2 id.UserID) (*user.User, error) { - filter := bson.D{{Key: "id", Value: id.ID(id2).String()}} - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{"id": id2.String()}) } func (r *userRepo) FindByAuth0Sub(ctx context.Context, auth0sub string) (*user.User, error) { - filter := bson.D{ - {Key: "$or", Value: []bson.D{ - {{Key: "auth0sub", Value: auth0sub}}, - {{Key: "auth0sublist", Value: bson.D{ - {Key: "$elemMatch", Value: bson.D{ - {Key: "$eq", Value: auth0sub}, - }}, - }}}, - }}, - } - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{ + "$or": []bson.M{ + {"auth0sub": auth0sub}, + { + "auth0sublist": bson.M{ + "$elemMatch": bson.M{ + "$eq": auth0sub, + }, + }, + }, + }, + }) } func (r *userRepo) FindByEmail(ctx context.Context, email string) (*user.User, error) { - filter := bson.D{{Key: "email", Value: email}} - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{"email": email}) } func (r *userRepo) FindByName(ctx context.Context, name string) (*user.User, error) { - filter := bson.D{{Key: "name", Value: name}} - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{"name": name}) } func (r *userRepo) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user.User, error) { - filter := bson.D{{Key: "$or", Value: []bson.D{ - {{Key: "email", Value: nameOrEmail}}, - {{Key: "name", Value: nameOrEmail}}, - }}} - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{ + "$or": []bson.M{ + {"email": nameOrEmail}, + {"name": nameOrEmail}, + }, + }) } func (r *userRepo) FindByVerification(ctx context.Context, code string) (*user.User, error) { - filter := bson.D{{Key: "verification.code", Value: code}} - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{ + "verification.code": code, + }) } func (r *userRepo) FindByPasswordResetRequest(ctx context.Context, pwdResetToken string) (*user.User, error) { - filter := bson.D{ - {Key: "passwordreset.token", Value: pwdResetToken}, - } - return r.findOne(ctx, filter) + return r.findOne(ctx, bson.M{ + "passwordreset.token": pwdResetToken, + }) } func (r *userRepo) Save(ctx context.Context, user *user.User) error { @@ -96,10 +94,10 @@ func (r *userRepo) Save(ctx context.Context, user *user.User) error { } func (r *userRepo) Remove(ctx context.Context, user id.UserID) error { - return r.client.RemoveOne(ctx, user.String()) + return r.client.RemoveOne(ctx, bson.M{"id": user.String()}) } -func (r *userRepo) find(ctx context.Context, dst []*user.User, filter bson.D) ([]*user.User, error) { +func (r *userRepo) find(ctx context.Context, dst []*user.User, filter interface{}) ([]*user.User, error) { c := mongodoc.UserConsumer{ Rows: dst, } @@ -109,10 +107,9 @@ func (r *userRepo) find(ctx context.Context, dst []*user.User, filter bson.D) ([ return c.Rows, nil } -func (r *userRepo) findOne(ctx context.Context, filter bson.D) (*user.User, error) { - dst := make([]*user.User, 0, 1) +func (r *userRepo) findOne(ctx context.Context, filter interface{}) (*user.User, error) { c := mongodoc.UserConsumer{ - Rows: dst, + Rows: make([]*user.User, 0, 1), } if err := r.client.FindOne(ctx, filter, &c); err != nil { return nil, err From 3cbb456259420c863902928e7c684a2e2628de91 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Mon, 4 Apr 2022 12:40:32 +0300 Subject: [PATCH 184/253] fix: auth to work with zero config (#131) Co-authored-by: rot1024 --- .env.example | 50 +++++++++++++++++++++++++++++++++++++ internal/app/auth_server.go | 10 ++++++++ internal/app/config.go | 27 ++++++++++++-------- internal/app/config_test.go | 8 +++--- internal/app/jwt.go | 3 ++- internal/app/web.go | 3 ++- 6 files changed, 86 insertions(+), 15 deletions(-) create mode 100644 .env.example diff --git a/.env.example b/.env.example new file mode 100644 index 000000000..98c11c801 --- /dev/null +++ b/.env.example @@ -0,0 +1,50 @@ +# General +PORT=8080 +REEARTH_DB=mongodb://localhost +REEARTH_DEV=false + +# GCP +GOOGLE_CLOUD_PROJECT= +GCS_BUCKETNAME= +GCS_PUBLICATIONCACHECONTROL= + +# Local Auth serv +REEARTH_AUTH0_DOMAIN=https://example.auth0.com +REEARTH_AUTH0_AUDIENCE=https://api.reearth.example.com +REEARTH_AUTH0_CLIENTID= +REEARTH_AUTH0_CLIENTSECRET= +REEARTH_AUTH0_WEBCLIENTID= + +# Auth client +#REEARTH_AUTH_ISS=https://hoge.com +#REEARTH_AUTH_AUD=https://api.reearth.example.com +# If you want to use multiple auth servers +#REEARTH_AUTH=[{"ISS":"https://hoge.com","AUD":["https://api.reearth.example.com"]}] + +# Auth server +# If you want to restrict signups, set secret +REEARTH_SIGNUP_SECRET= +# If you want to run auth server on localhost, set to true +REEARTH_AUTHSRV_DEV=true +REEARTH_AUTHSRV_DISABLED=false +REEARTH_AUTHSRV_UIDOMAIN=https://reearth.example.com +REEARTH_AUTHSRV_DOMAIN=https://api.reearth.example.com +# Any random long string (keep it secrit) +REEARTH_AUTHSRV_KEY=abcdefghijklmnopqrstuvwxyz + +# Available mailers: [log, smtp, sendgrid] +REEARTH_MAILER=log + +#SendGrid config +#REEARTH_MAILER=sendgrid +#REEARTH_SENDGRID_EMAIL=noreplay@test.com +#REEARTH_SENDGRID_NAME= +#REEARTH_SENDGRID_API= + +#SMTP config +#REEARTH_MAILER=smtp +#REEARTH_SMTP_EMAIL=noreplay@test.com +#REEARTH_SMTP_HOST=smtp.sendgrid.net +#REEARTH_SMTP_PORT=587 +#REEARTH_SMTP_SMTPUSERNAME=apikey +#REEARTH_SMTP_PASSWORD=Your_SendGrid_Token diff --git a/internal/app/auth_server.go b/internal/app/auth_server.go index f1f63796c..0a2c10e81 100644 --- a/internal/app/auth_server.go +++ b/internal/app/auth_server.go @@ -6,6 +6,8 @@ import ( "encoding/json" "net/http" "net/url" + "os" + "strconv" "strings" "github.com/caos/oidc/pkg/op" @@ -97,6 +99,14 @@ func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *Server // can be removed when the mentioned issue is solved // https://github.com/auth0/auth0-spa-js/issues/845 r.GET("v2/logout", logout()) + + debugMsg := "" + if dev, ok := os.LookupEnv(op.OidcDevMode); ok { + if isDev, _ := strconv.ParseBool(dev); isDev { + debugMsg = " with debug mode" + } + } + log.Infof("auth: oidc server started%s at %s", debugMsg, domain.String()) } func setURLVarsHandler() func(handler http.Handler) http.Handler { diff --git a/internal/app/config.go b/internal/app/config.go index f3b3e1311..128e265f7 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -55,6 +55,7 @@ type Auth0Config struct { } type AuthSrvConfig struct { + Dev bool Disabled bool Domain string `default:"http://localhost:8080"` UIDomain string `default:"http://localhost:8080"` @@ -131,6 +132,8 @@ func ReadConfig(debug bool) (*Config, error) { if debug { c.Dev = true + } + if c.Dev || c.AuthSrv.Dev { if _, ok := os.LookupEnv(op.OidcDevMode); !ok { _ = os.Setenv(op.OidcDevMode, "1") } @@ -173,24 +176,28 @@ func (c Config) Auths() (res []AuthConfig) { return append(res, c.Auth...) } +func prepareUrl(url string) string { + if !strings.HasPrefix(url, "https://") && !strings.HasPrefix(url, "http://") { + url = "https://" + url + } + url = strings.TrimSuffix(url, "/") + return url +} + func (c Auth0Config) AuthConfig() *AuthConfig { - domain := c.Domain if c.Domain == "" { return nil } - if !strings.HasPrefix(domain, "https://") && !strings.HasPrefix(domain, "http://") { - domain = "https://" + domain - } - if !strings.HasSuffix(domain, "/") { - domain = domain + "/" - } + domain := prepareUrl(c.Domain) + aud := []string{} if c.Audience != "" { - aud = append(aud, c.Audience) + aud = append(aud, prepareUrl(c.Audience)) } return &AuthConfig{ - ISS: domain, - AUD: aud, + ISS: domain, + AUD: aud, + ClientID: &c.ClientID, } } diff --git a/internal/app/config_test.go b/internal/app/config_test.go index 6f12a995b..d24059025 100644 --- a/internal/app/config_test.go +++ b/internal/app/config_test.go @@ -7,11 +7,13 @@ import ( ) func TestAuth0Config_AuthConfig(t *testing.T) { + s := "" assert.Equal(t, &AuthConfig{ - ISS: "https://hoge.auth0.com/", - AUD: []string{"xxx"}, + ISS: "https://hoge.auth0.com", + AUD: []string{"https://xxx"}, + ClientID: &s, }, Auth0Config{ - Domain: "hoge.auth0.com", + Domain: "hoge.auth0.com/", Audience: "xxx", }.AuthConfig()) assert.Nil(t, Auth0Config{ diff --git a/internal/app/jwt.go b/internal/app/jwt.go index 32e9020a9..d2ad9410e 100644 --- a/internal/app/jwt.go +++ b/internal/app/jwt.go @@ -28,6 +28,7 @@ func NewMultiValidator(providers []AuthConfig) (MultiValidator, error) { validators := make([]*validator.Validator, 0, len(providers)) for _, p := range providers { issuerURL, err := url.Parse(p.ISS) + issuerURL.Path = "/" if err != nil { return nil, fmt.Errorf("failed to parse the issuer url: %w", err) } @@ -49,7 +50,7 @@ func NewMultiValidator(providers []AuthConfig) (MultiValidator, error) { v, err := validator.New( provider.KeyFunc, algorithm, - p.ISS, + issuerURL.String(), p.AUD, ) if err != nil { diff --git a/internal/app/web.go b/internal/app/web.go index 9a195fc30..f8d811d33 100644 --- a/internal/app/web.go +++ b/internal/app/web.go @@ -3,6 +3,7 @@ package app import ( "net/http" "os" + "strings" "github.com/labstack/echo/v4" "github.com/labstack/echo/v4/middleware" @@ -21,7 +22,7 @@ func web(e *echo.Echo, wc WebConfig, a []AuthConfig) { if len(a) > 0 { ac := a[0] if ac.ISS != "" { - config["auth0Domain"] = ac.ISS + config["auth0Domain"] = strings.TrimSuffix(ac.ISS, "/") } if ac.ClientID != nil { config["auth0ClientId"] = *ac.ClientID From fef60e6e7656f18f466f08f8fa726909d8821831 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 6 Apr 2022 14:32:29 +0900 Subject: [PATCH 185/253] feat: support sign up with information provided by OIDC providers (#130) --- internal/adapter/context.go | 25 +- internal/adapter/gql/context.go | 4 - .../adapter/gql/resolver_mutation_user.go | 39 +- internal/adapter/http/user.go | 93 +-- internal/app/auth_client.go | 18 +- internal/app/jwt.go | 33 +- .../infrastructure/auth0/authenticator.go | 16 - .../auth0/authenticator_test.go | 17 +- internal/infrastructure/mailer/mock.go | 39 ++ .../infrastructure/memory/auth_request.go | 18 +- internal/infrastructure/memory/team.go | 16 +- internal/infrastructure/memory/user.go | 24 +- .../infrastructure/mongo/mongodoc/user.go | 2 +- internal/usecase/cursor.go | 2 - internal/usecase/gateway/authenticator.go | 1 - internal/usecase/interactor/team_test.go | 2 +- internal/usecase/interactor/user.go | 218 +------ internal/usecase/interactor/user_signup.go | 388 +++++++++++++ .../usecase/interactor/user_signup_test.go | 549 ++++++++++++++++++ internal/usecase/interfaces/user.go | 32 +- internal/usecase/pagination.go | 2 + pkg/user/auth.go | 2 +- pkg/user/auth_test.go | 13 +- pkg/user/builder.go | 19 +- pkg/user/builder_test.go | 36 +- pkg/user/password.go | 123 ++++ pkg/user/password_test.go | 51 ++ pkg/user/theme.go | 4 + pkg/user/user.go | 72 +-- pkg/user/user_test.go | 91 +-- pkg/user/verification.go | 47 +- 31 files changed, 1496 insertions(+), 500 deletions(-) create mode 100644 internal/infrastructure/mailer/mock.go create mode 100644 internal/usecase/interactor/user_signup.go create mode 100644 internal/usecase/interactor/user_signup_test.go create mode 100644 pkg/user/password.go create mode 100644 pkg/user/password_test.go diff --git a/internal/adapter/context.go b/internal/adapter/context.go index e85980331..b09e2b07e 100644 --- a/internal/adapter/context.go +++ b/internal/adapter/context.go @@ -13,10 +13,19 @@ type ContextKey string const ( contextUser ContextKey = "user" contextOperator ContextKey = "operator" - contextSub ContextKey = "sub" + contextAuthInfo ContextKey = "authinfo" contextUsecases ContextKey = "usecases" ) +type AuthInfo struct { + Token string + Sub string + Iss string + Name string + Email string + EmailVerified *bool +} + func AttachUser(ctx context.Context, u *user.User) context.Context { return context.WithValue(ctx, contextUser, u) } @@ -25,8 +34,8 @@ func AttachOperator(ctx context.Context, o *usecase.Operator) context.Context { return context.WithValue(ctx, contextOperator, o) } -func AttachSub(ctx context.Context, sub string) context.Context { - return context.WithValue(ctx, contextSub, sub) +func AttachAuthInfo(ctx context.Context, a AuthInfo) context.Context { + return context.WithValue(ctx, contextAuthInfo, a) } func AttachUsecases(ctx context.Context, u *interfaces.Container) context.Context { @@ -70,13 +79,13 @@ func Operator(ctx context.Context) *usecase.Operator { return nil } -func Sub(ctx context.Context) string { - if v := ctx.Value(contextSub); v != nil { - if v2, ok := v.(string); ok { - return v2 +func GetAuthInfo(ctx context.Context) *AuthInfo { + if v := ctx.Value(contextAuthInfo); v != nil { + if v2, ok := v.(AuthInfo); ok { + return &v2 } } - return "" + return nil } func Usecases(ctx context.Context) *interfaces.Container { diff --git a/internal/adapter/gql/context.go b/internal/adapter/gql/context.go index fd9a63902..0c5a599bc 100644 --- a/internal/adapter/gql/context.go +++ b/internal/adapter/gql/context.go @@ -39,10 +39,6 @@ func getOperator(ctx context.Context) *usecase.Operator { return adapter.Operator(ctx) } -func getSub(ctx context.Context) string { - return adapter.Sub(ctx) -} - func usecases(ctx context.Context) *interfaces.Container { return adapter.Usecases(ctx) } diff --git a/internal/adapter/gql/resolver_mutation_user.go b/internal/adapter/gql/resolver_mutation_user.go index b42475f12..ba86f06ad 100644 --- a/internal/adapter/gql/resolver_mutation_user.go +++ b/internal/adapter/gql/resolver_mutation_user.go @@ -3,31 +3,44 @@ package gql import ( "context" + "github.com/reearth/reearth-backend/internal/adapter" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" ) func (r *mutationResolver) Signup(ctx context.Context, input gqlmodel.SignupInput) (*gqlmodel.SignupPayload, error) { - secret := "" - if input.Secret != nil { - secret = *input.Secret + au := adapter.GetAuthInfo(ctx) + + var u *user.User + var t *user.Team + var err error + + if au != nil { + u, t, err = usecases(ctx).User.SignupOIDC(ctx, interfaces.SignupOIDCParam{ + Sub: au.Sub, + AccessToken: au.Token, + Issuer: au.Iss, + Email: au.Email, + Name: au.Name, + Secret: input.Secret, + User: interfaces.SignupUserParam{ + Lang: input.Lang, + Theme: gqlmodel.ToTheme(input.Theme), + UserID: id.UserIDFromRefID(input.UserID), + TeamID: id.TeamIDFromRefID(input.TeamID), + }, + }) + } else { + return nil, interfaces.ErrOperationDenied } - sub := getSub(ctx) - u, team, err := usecases(ctx).User.Signup(ctx, interfaces.SignupParam{ - Sub: &sub, - Lang: input.Lang, - Theme: gqlmodel.ToTheme(input.Theme), - UserID: id.UserIDFromRefID(input.UserID), - TeamID: id.TeamIDFromRefID(input.TeamID), - Secret: &secret, - }) if err != nil { return nil, err } - return &gqlmodel.SignupPayload{User: gqlmodel.ToUser(u), Team: gqlmodel.ToTeam(team)}, nil + return &gqlmodel.SignupPayload{User: gqlmodel.ToUser(u), Team: gqlmodel.ToTeam(t)}, nil } func (r *mutationResolver) UpdateMe(ctx context.Context, input gqlmodel.UpdateMeInput) (*gqlmodel.UpdateMePayload, error) { diff --git a/internal/adapter/http/user.go b/internal/adapter/http/user.go index b2d7a0a3f..40f5bfca0 100644 --- a/internal/adapter/http/user.go +++ b/internal/adapter/http/user.go @@ -2,9 +2,13 @@ package http import ( "context" + "errors" + "github.com/reearth/reearth-backend/internal/adapter" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" + "golang.org/x/text/language" ) type UserController struct { @@ -24,13 +28,15 @@ type PasswordResetInput struct { } type SignupInput struct { - Sub *string `json:"sub"` - Secret *string `json:"secret"` - UserID *id.UserID `json:"userId"` - TeamID *id.TeamID `json:"teamId"` - Name *string `json:"username"` - Email *string `json:"email"` - Password *string `json:"password"` + Sub *string `json:"sub"` + Secret *string `json:"secret"` + UserID *id.UserID `json:"userId"` + TeamID *id.TeamID `json:"teamId"` + Name *string `json:"name"` + Email *string `json:"email"` + Password *string `json:"password"` + Theme *user.Theme `json:"theme"` + Lang *language.Tag `json:"lang"` } type CreateVerificationInput struct { @@ -55,21 +61,50 @@ type SignupOutput struct { Email string `json:"email"` } -func (c *UserController) Signup(ctx context.Context, input SignupInput) (interface{}, error) { - u, _, err := c.usecase.Signup(ctx, interfaces.SignupParam{ - Sub: input.Sub, - Secret: input.Secret, - UserID: input.UserID, - TeamID: input.TeamID, - Name: input.Name, - Email: input.Email, - Password: input.Password, - }) - if err != nil { - return nil, err +func (c *UserController) Signup(ctx context.Context, input SignupInput) (SignupOutput, error) { + var u *user.User + var err error + + if au := adapter.GetAuthInfo(ctx); au != nil { + var name string + if input.Name != nil { + name = *input.Name + } + + u, _, err = c.usecase.SignupOIDC(ctx, interfaces.SignupOIDCParam{ + Sub: au.Sub, + AccessToken: au.Token, + Issuer: au.Iss, + Email: au.Email, + Name: name, + Secret: input.Secret, + User: interfaces.SignupUserParam{ + UserID: input.UserID, + TeamID: input.TeamID, + Lang: input.Lang, + Theme: input.Theme, + }, + }) + } else if input.Name != nil && input.Email != nil && input.Password != nil { + u, _, err = c.usecase.Signup(ctx, interfaces.SignupParam{ + Sub: input.Sub, + Name: *input.Name, + Email: *input.Email, + Password: *input.Password, + Secret: input.Secret, + User: interfaces.SignupUserParam{ + UserID: input.UserID, + TeamID: input.TeamID, + Lang: input.Lang, + Theme: input.Theme, + }, + }) + } else { + err = errors.New("invalid params") } - if err := c.usecase.CreateVerification(ctx, *input.Email); err != nil { - return nil, err + + if err != nil { + return SignupOutput{}, err } return SignupOutput{ @@ -80,16 +115,13 @@ func (c *UserController) Signup(ctx context.Context, input SignupInput) (interfa } func (c *UserController) CreateVerification(ctx context.Context, input CreateVerificationInput) error { - if err := c.usecase.CreateVerification(ctx, input.Email); err != nil { - return err - } - return nil + return c.usecase.CreateVerification(ctx, input.Email) } -func (c *UserController) VerifyUser(ctx context.Context, code string) (interface{}, error) { +func (c *UserController) VerifyUser(ctx context.Context, code string) (VerifyUserOutput, error) { u, err := c.usecase.VerifyUser(ctx, code) if err != nil { - return nil, err + return VerifyUserOutput{}, err } return VerifyUserOutput{ UserID: u.ID().String(), @@ -98,12 +130,7 @@ func (c *UserController) VerifyUser(ctx context.Context, code string) (interface } func (c *UserController) StartPasswordReset(ctx context.Context, input PasswordResetInput) error { - err := c.usecase.StartPasswordReset(ctx, input.Email) - if err != nil { - return err - } - - return nil + return c.usecase.StartPasswordReset(ctx, input.Email) } func (c *UserController) PasswordReset(ctx context.Context, input PasswordResetInput) error { diff --git a/internal/app/auth_client.go b/internal/app/auth_client.go index 5aad87ec2..ee32430d0 100644 --- a/internal/app/auth_client.go +++ b/internal/app/auth_client.go @@ -17,13 +17,11 @@ func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { req := c.Request() ctx := req.Context() - var sub, userID string + var userID string var u *user.User // get sub from context - if s, ok := ctx.Value(contextAuth0Sub).(string); ok { - sub = s - } + au := adapter.GetAuthInfo(ctx) if u, ok := ctx.Value(contextUser).(string); ok { userID = u } @@ -51,26 +49,22 @@ func authMiddleware(cfg *ServerConfig) echo.MiddlewareFunc { } } - if u == nil && sub != "" { + if u == nil && au != nil { var err error // find user - u, err = cfg.Repos.User.FindByAuth0Sub(ctx, sub) + u, err = cfg.Repos.User.FindByAuth0Sub(ctx, au.Sub) if err != nil && err != rerror.ErrNotFound { return err } } // save a new sub - if u != nil && sub != "" { - if err := addAuth0SubToUser(ctx, u, user.AuthFromAuth0Sub(sub), cfg); err != nil { + if u != nil && au != nil { + if err := addAuth0SubToUser(ctx, u, user.AuthFromAuth0Sub(au.Sub), cfg); err != nil { return err } } - if sub != "" { - ctx = adapter.AttachSub(ctx, sub) - } - if u != nil { op, err := generateOperator(ctx, cfg, u) if err != nil { diff --git a/internal/app/jwt.go b/internal/app/jwt.go index d2ad9410e..94476673f 100644 --- a/internal/app/jwt.go +++ b/internal/app/jwt.go @@ -4,12 +4,14 @@ import ( "context" "fmt" "net/url" + "strings" "time" jwtmiddleware "github.com/auth0/go-jwt-middleware/v2" "github.com/auth0/go-jwt-middleware/v2/jwks" "github.com/auth0/go-jwt-middleware/v2/validator" "github.com/labstack/echo/v4" + "github.com/reearth/reearth-backend/internal/adapter" "github.com/reearth/reearth-backend/pkg/log" ) @@ -17,11 +19,21 @@ type contextKey string const ( debugUserHeader = "X-Reearth-Debug-User" - contextAuth0Sub contextKey = "auth0Sub" contextUser contextKey = "reearth_user" defaultJWTTTL = 5 * time.Minute ) +type customClaims struct { + Name string `json:"name"` + Nickname string `json:"nickname"` + Email string `json:"email"` + EmailVerified *bool `json:"email_verified"` +} + +func (c *customClaims) Validate(ctx context.Context) error { + return nil +} + type MultiValidator []*validator.Validator func NewMultiValidator(providers []AuthConfig) (MultiValidator, error) { @@ -52,6 +64,9 @@ func NewMultiValidator(providers []AuthConfig) (MultiValidator, error) { algorithm, issuerURL.String(), p.AUD, + validator.WithCustomClaims(func() validator.CustomClaims { + return &customClaims{} + }), ) if err != nil { return nil, err @@ -94,8 +109,20 @@ func parseJwtMiddleware() echo.MiddlewareFunc { rawClaims := ctx.Value(jwtmiddleware.ContextKey{}) if claims, ok := rawClaims.(*validator.ValidatedClaims); ok { - // attach sub and access token to context - ctx = context.WithValue(ctx, contextAuth0Sub, claims.RegisteredClaims.Subject) + // attach auth info to context + customClaims := claims.CustomClaims.(*customClaims) + name := customClaims.Nickname + if name == "" { + name = customClaims.Name + } + ctx = adapter.AttachAuthInfo(ctx, adapter.AuthInfo{ + Token: strings.TrimPrefix(c.Request().Header.Get("Authorization"), "Bearer "), + Sub: claims.RegisteredClaims.Subject, + Iss: claims.RegisteredClaims.Issuer, + Name: name, + Email: customClaims.Email, + EmailVerified: customClaims.EmailVerified, + }) } c.SetRequest(req.WithContext(ctx)) diff --git a/internal/infrastructure/auth0/authenticator.go b/internal/infrastructure/auth0/authenticator.go index 91847d9a1..b5ac5c0bb 100644 --- a/internal/infrastructure/auth0/authenticator.go +++ b/internal/infrastructure/auth0/authenticator.go @@ -72,22 +72,6 @@ func New(domain, clientID, clientSecret string) *Auth0 { } } -func (a *Auth0) FetchUser(id string) (gateway.AuthenticatorUser, error) { - if err := a.updateToken(); err != nil { - return gateway.AuthenticatorUser{}, err - } - - var r response - r, err := a.exec(http.MethodGet, "api/v2/users/"+id, a.token, nil) - if err != nil { - if !a.disableLogging { - log.Errorf("auth0: fetch user: %+v", err) - } - return gateway.AuthenticatorUser{}, errors.New("failed to auth") - } - return r.Into(), nil -} - func (a *Auth0) UpdateUser(p gateway.AuthenticatorUpdateUserParam) (data gateway.AuthenticatorUser, err error) { err = a.updateToken() if err != nil { diff --git a/internal/infrastructure/auth0/authenticator_test.go b/internal/infrastructure/auth0/authenticator_test.go index ac293563c..3ab188526 100644 --- a/internal/infrastructure/auth0/authenticator_test.go +++ b/internal/infrastructure/auth0/authenticator_test.go @@ -50,20 +50,8 @@ func TestAuth0(t *testing.T) { assert.True(t, a.needsFetchToken()) a.current = func() time.Time { return current } - r, err := a.FetchUser(userID) - assert.NoError(t, err) - assert.Equal(t, gateway.AuthenticatorUser{ - ID: userID, - Email: userEmail, - EmailVerified: true, - Name: userName, - }, r) - - _, err = a.FetchUser(token) - assert.Error(t, err) - newEmail := "xxxxx" - r, err = a.UpdateUser(gateway.AuthenticatorUpdateUserParam{ + r, err := a.UpdateUser(gateway.AuthenticatorUpdateUserParam{ ID: userID, Email: &newEmail, }) @@ -76,9 +64,6 @@ func TestAuth0(t *testing.T) { }, r) a.current = func() time.Time { return current2 } - _, err = a.FetchUser(token + "a") - assert.Error(t, err) - assert.Equal(t, current2.Add(time.Second*expiresIn), a.expireAt) } func res(i interface{}) io.ReadCloser { diff --git a/internal/infrastructure/mailer/mock.go b/internal/infrastructure/mailer/mock.go new file mode 100644 index 000000000..b3d04f9bc --- /dev/null +++ b/internal/infrastructure/mailer/mock.go @@ -0,0 +1,39 @@ +package mailer + +import ( + "sync" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" +) + +type Mock struct { + lock sync.Mutex + mails []Mail +} + +type Mail struct { + To []gateway.Contact + Subject string + PlainContent string + HTMLContent string +} + +func NewMock() *Mock { + return &Mock{} +} + +func (m *Mock) SendMail(to []gateway.Contact, subject, text, html string) error { + m.lock.Lock() + defer m.lock.Unlock() + m.mails = append(m.mails, Mail{ + To: append([]gateway.Contact{}, to...), + Subject: subject, + PlainContent: text, + HTMLContent: html, + }) + return nil +} + +func (m *Mock) Mails() []Mail { + return append([]Mail{}, m.mails...) +} diff --git a/internal/infrastructure/memory/auth_request.go b/internal/infrastructure/memory/auth_request.go index daabf8c64..02daad83d 100644 --- a/internal/infrastructure/memory/auth_request.go +++ b/internal/infrastructure/memory/auth_request.go @@ -12,12 +12,12 @@ import ( type AuthRequest struct { lock sync.Mutex - data map[id.AuthRequestID]auth.Request + data map[id.AuthRequestID]*auth.Request } func NewAuthRequest() repo.AuthRequest { return &AuthRequest{ - data: map[id.AuthRequestID]auth.Request{}, + data: map[id.AuthRequestID]*auth.Request{}, } } @@ -27,9 +27,9 @@ func (r *AuthRequest) FindByID(_ context.Context, id id.AuthRequestID) (*auth.Re d, ok := r.data[id] if ok { - return &d, nil + return d, nil } - return &auth.Request{}, rerror.ErrNotFound + return nil, rerror.ErrNotFound } func (r *AuthRequest) FindByCode(_ context.Context, s string) (*auth.Request, error) { @@ -38,11 +38,11 @@ func (r *AuthRequest) FindByCode(_ context.Context, s string) (*auth.Request, er for _, ar := range r.data { if ar.GetCode() == s { - return &ar, nil + return ar, nil } } - return &auth.Request{}, rerror.ErrNotFound + return nil, rerror.ErrNotFound } func (r *AuthRequest) FindBySubject(_ context.Context, s string) (*auth.Request, error) { @@ -51,18 +51,18 @@ func (r *AuthRequest) FindBySubject(_ context.Context, s string) (*auth.Request, for _, ar := range r.data { if ar.GetSubject() == s { - return &ar, nil + return ar, nil } } - return &auth.Request{}, rerror.ErrNotFound + return nil, rerror.ErrNotFound } func (r *AuthRequest) Save(_ context.Context, request *auth.Request) error { r.lock.Lock() defer r.lock.Unlock() - r.data[request.ID()] = *request + r.data[request.ID()] = request return nil } diff --git a/internal/infrastructure/memory/team.go b/internal/infrastructure/memory/team.go index b8619ed5b..d779c06f3 100644 --- a/internal/infrastructure/memory/team.go +++ b/internal/infrastructure/memory/team.go @@ -12,12 +12,12 @@ import ( type Team struct { lock sync.Mutex - data map[id.TeamID]user.Team + data map[id.TeamID]*user.Team } func NewTeam() repo.Team { return &Team{ - data: map[id.TeamID]user.Team{}, + data: map[id.TeamID]*user.Team{}, } } @@ -28,7 +28,7 @@ func (r *Team) FindByUser(ctx context.Context, i id.UserID) (user.TeamList, erro result := user.TeamList{} for _, d := range r.data { if d.Members().ContainsUser(i) { - result = append(result, &d) + result = append(result, d) } } return result, nil @@ -41,7 +41,7 @@ func (r *Team) FindByIDs(ctx context.Context, ids []id.TeamID) (user.TeamList, e result := user.TeamList{} for _, id := range ids { if d, ok := r.data[id]; ok { - result = append(result, &d) + result = append(result, d) } else { result = append(result, nil) } @@ -55,16 +55,16 @@ func (r *Team) FindByID(ctx context.Context, id id.TeamID) (*user.Team, error) { d, ok := r.data[id] if ok { - return &d, nil + return d, nil } - return &user.Team{}, rerror.ErrNotFound + return nil, rerror.ErrNotFound } func (r *Team) Save(ctx context.Context, t *user.Team) error { r.lock.Lock() defer r.lock.Unlock() - r.data[t.ID()] = *t + r.data[t.ID()] = t return nil } @@ -73,7 +73,7 @@ func (r *Team) SaveAll(ctx context.Context, teams []*user.Team) error { defer r.lock.Unlock() for _, t := range teams { - r.data[t.ID()] = *t + r.data[t.ID()] = t } return nil } diff --git a/internal/infrastructure/memory/user.go b/internal/infrastructure/memory/user.go index e1134efdb..0da5e7d82 100644 --- a/internal/infrastructure/memory/user.go +++ b/internal/infrastructure/memory/user.go @@ -12,12 +12,12 @@ import ( type User struct { lock sync.Mutex - data map[id.UserID]user.User + data map[id.UserID]*user.User } func NewUser() repo.User { return &User{ - data: map[id.UserID]user.User{}, + data: map[id.UserID]*user.User{}, } } @@ -28,7 +28,7 @@ func (r *User) FindByIDs(ctx context.Context, ids []id.UserID) ([]*user.User, er result := []*user.User{} for _, id := range ids { if d, ok := r.data[id]; ok { - result = append(result, &d) + result = append(result, d) } else { result = append(result, nil) } @@ -42,16 +42,16 @@ func (r *User) FindByID(ctx context.Context, id id.UserID) (*user.User, error) { d, ok := r.data[id] if ok { - return &d, nil + return d, nil } - return &user.User{}, rerror.ErrNotFound + return nil, rerror.ErrNotFound } func (r *User) Save(ctx context.Context, u *user.User) error { r.lock.Lock() defer r.lock.Unlock() - r.data[u.ID()] = *u + r.data[u.ID()] = u return nil } @@ -65,7 +65,7 @@ func (r *User) FindByAuth0Sub(ctx context.Context, auth0sub string) (*user.User, for _, u := range r.data { if u.ContainAuth(user.AuthFromAuth0Sub(auth0sub)) { - return &u, nil + return u, nil } } @@ -83,7 +83,7 @@ func (r *User) FindByPasswordResetRequest(ctx context.Context, token string) (*u for _, u := range r.data { pwdReq := u.PasswordReset() if pwdReq != nil && pwdReq.Token == token { - return &u, nil + return u, nil } } @@ -100,7 +100,7 @@ func (r *User) FindByEmail(ctx context.Context, email string) (*user.User, error for _, u := range r.data { if u.Email() == email { - return &u, nil + return u, nil } } @@ -117,7 +117,7 @@ func (r *User) FindByName(ctx context.Context, name string) (*user.User, error) for _, u := range r.data { if u.Name() == name { - return &u, nil + return u, nil } } @@ -134,7 +134,7 @@ func (r *User) FindByNameOrEmail(ctx context.Context, nameOrEmail string) (*user for _, u := range r.data { if u.Email() == nameOrEmail || u.Name() == nameOrEmail { - return &u, nil + return u, nil } } @@ -159,7 +159,7 @@ func (r *User) FindByVerification(ctx context.Context, code string) (*user.User, for _, u := range r.data { if u.Verification() != nil && u.Verification().Code() == code { - return &u, nil + return u, nil } } diff --git a/internal/infrastructure/mongo/mongodoc/user.go b/internal/infrastructure/mongo/mongodoc/user.go index 2f52da3e5..ae16cdef7 100644 --- a/internal/infrastructure/mongo/mongodoc/user.go +++ b/internal/infrastructure/mongo/mongodoc/user.go @@ -124,7 +124,7 @@ func (d *UserDocument) Model() (*user1.User, error) { Team(tid). LangFrom(d.Lang). Verification(v). - Password(d.Password). + EncodedPassword(d.Password). PasswordReset(d.PasswordReset.Model()). Theme(user.Theme(d.Theme)). Build() diff --git a/internal/usecase/cursor.go b/internal/usecase/cursor.go index 6e3e5973b..aed24547c 100644 --- a/internal/usecase/cursor.go +++ b/internal/usecase/cursor.go @@ -1,3 +1 @@ package usecase - -type Cursor string diff --git a/internal/usecase/gateway/authenticator.go b/internal/usecase/gateway/authenticator.go index fa4fa0bbc..86a30b6a7 100644 --- a/internal/usecase/gateway/authenticator.go +++ b/internal/usecase/gateway/authenticator.go @@ -15,6 +15,5 @@ type AuthenticatorUser struct { } type Authenticator interface { - FetchUser(string) (AuthenticatorUser, error) UpdateUser(AuthenticatorUpdateUserParam) (AuthenticatorUser, error) } diff --git a/internal/usecase/interactor/team_test.go b/internal/usecase/interactor/team_test.go index a334e340d..739bbbe3c 100644 --- a/internal/usecase/interactor/team_test.go +++ b/internal/usecase/interactor/team_test.go @@ -16,7 +16,7 @@ func TestCreateTeam(t *testing.T) { db := memory.InitRepos(nil) - u := user.New().NewID().Team(id.NewTeamID()).MustBuild() + u := user.New().NewID().Email("aaa@bbb.com").Team(id.NewTeamID()).MustBuild() teamUC := NewTeam(db) op := &usecase.Operator{User: u.ID()} team, err := teamUC.Create(ctx, "team name", u.ID(), op) diff --git a/internal/usecase/interactor/user.go b/internal/usecase/interactor/user.go index eea781aee..a73ec7c0a 100644 --- a/internal/usecase/interactor/user.go +++ b/internal/usecase/interactor/user.go @@ -56,7 +56,6 @@ var ( authTextTMPL *textTmpl.Template authHTMLTMPL *htmlTmpl.Template - signupMailContent mailContent passwordResetMailContent mailContent ) @@ -71,12 +70,6 @@ func init() { log.Panicf("password reset email template parse error: %s\n", err) } - signupMailContent = mailContent{ - Message: "Thank you for signing up to Re:Earth. Please verify your email address by clicking the button below.", - Suffix: "You can use this email address to log in to Re:Earth account anytime.", - ActionLabel: "Activate your account and log in", - } - passwordResetMailContent = mailContent{ Message: "Thank you for using Re:Earth. Weโ€™ve received a request to reset your password. If this was you, please click the link below to confirm and change your password.", Suffix: "If you did not mean to reset your password, then you can ignore this email.", @@ -131,176 +124,6 @@ func (i *User) Fetch(ctx context.Context, ids []id.UserID, operator *usecase.Ope return res, nil } -func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (u *user.User, _ *user.Team, err error) { - var team *user.Team - var auth *user.Auth - var email, name string - var tx repo.Tx - - isOidc := inp.Sub != nil && inp.Password == nil - isAuth := inp.Name != nil && inp.Email != nil && inp.Password != nil - if !isAuth && !isOidc { - return nil, nil, errors.New("invalid params") - } - - if i.signupSecret != "" && *inp.Secret != i.signupSecret { - return nil, nil, interfaces.ErrSignupInvalidSecret - } - - tx, err = i.transaction.Begin() - if err != nil { - return nil, nil, err - } - defer func() { - if err2 := tx.End(ctx); err == nil && err2 != nil { - err = err2 - } - }() - - // Check if team already exists - if inp.TeamID != nil { - existed, err := i.teamRepo.FindByID(ctx, *inp.TeamID) - if err != nil && !errors.Is(err, rerror.ErrNotFound) { - return nil, nil, err - } - if existed != nil { - return nil, nil, errors.New("existed team") - } - } - - if isOidc { - if len(*inp.Sub) == 0 { - return nil, nil, errors.New("sub is required") - } - name, email, auth, err = i.oidcSignup(ctx, inp) - if err != nil { - return - } - } else if isAuth { - if _, err := mail.ParseAddress(*inp.Name); err == nil || *inp.Name == "" { - return nil, nil, interfaces.ErrSignupInvalidName - } - if _, err := mail.ParseAddress(*inp.Email); err != nil { - return nil, nil, interfaces.ErrInvalidUserEmail - } - if *inp.Password == "" { - return nil, nil, interfaces.ErrSignupInvalidPassword - } - - var unverifiedUser *user.User - var unverifiedTeam *user.Team - name, email, unverifiedUser, unverifiedTeam, err = i.reearthSignup(ctx, inp) - if err != nil { - return - } - if unverifiedUser != nil && unverifiedTeam != nil { - return unverifiedUser, unverifiedTeam, nil - } - } - - // Initialize user and team - u, team, err = user.Init(user.InitParams{ - Email: email, - Name: name, - Sub: auth, - Password: inp.Password, - Lang: inp.Lang, - Theme: inp.Theme, - UserID: inp.UserID, - TeamID: inp.TeamID, - }) - - if err != nil { - return nil, nil, err - } - - if err := i.userRepo.Save(ctx, u); err != nil { - return nil, nil, err - } - - if err := i.teamRepo.Save(ctx, team); err != nil { - return nil, nil, err - } - - tx.Commit() - return u, team, nil -} - -func (i *User) reearthSignup(ctx context.Context, inp interfaces.SignupParam) (string, string, *user.User, *user.Team, error) { - // Check if user email already exists - existedByEmail, err := i.userRepo.FindByEmail(ctx, *inp.Email) - if err != nil && !errors.Is(err, rerror.ErrNotFound) { - return "", "", nil, nil, err - } - - if existedByEmail != nil { - if existedByEmail.Verification() != nil && existedByEmail.Verification().IsVerified() { - return "", "", nil, nil, errors.New("existed email") - } - - // if user exists but not verified -> create a new verification - if err := i.CreateVerification(ctx, *inp.Email); err != nil { - return "", "", nil, nil, err - } - - team, err := i.teamRepo.FindByID(ctx, existedByEmail.Team()) - if err != nil && !errors.Is(err, rerror.ErrNotFound) { - return "", "", nil, nil, err - } - return "", "", existedByEmail, team, nil - } - - existedByName, err := i.userRepo.FindByName(ctx, *inp.Name) - if err != nil && !errors.Is(err, rerror.ErrNotFound) { - return "", "", nil, nil, err - } - - if existedByName != nil { - return "", "", nil, nil, errors.New("taken username") - } - - // !existedByName && !existedByEmail - return *inp.Name, *inp.Email, nil, nil, nil -} - -func (i *User) oidcSignup(ctx context.Context, inp interfaces.SignupParam) (string, string, *user.Auth, error) { - // Check if user already exists - existed, err := i.userRepo.FindByAuth0Sub(ctx, *inp.Sub) - if err != nil && !errors.Is(err, rerror.ErrNotFound) { - return "", "", nil, err - } - if existed != nil { - return "", "", nil, errors.New("existed user") - } - - if inp.UserID != nil { - existed, err := i.userRepo.FindByID(ctx, *inp.UserID) - if err != nil && !errors.Is(err, rerror.ErrNotFound) { - return "", "", nil, err - } - if existed != nil { - return "", "", nil, errors.New("existed user") - } - } - - // Fetch user info - ui, err := i.authenticator.FetchUser(*inp.Sub) - if err != nil { - return "", "", nil, err - } - - // Check if user and team already exists - existed, err = i.userRepo.FindByEmail(ctx, ui.Email) - if err != nil && !errors.Is(err, rerror.ErrNotFound) { - return "", "", nil, err - } - if existed != nil { - return "", "", nil, errors.New("existed user") - } - - return ui.Name, ui.Email, user.AuthFromAuth0Sub(*inp.Sub).Ref(), nil -} - func (i *User) GetUserByCredentials(ctx context.Context, inp interfaces.GetUserByCredentials) (u *user.User, err error) { u, err = i.userRepo.FindByNameOrEmail(ctx, inp.Email) if err != nil && !errors.Is(rerror.ErrNotFound, err) { @@ -468,7 +291,9 @@ func (i *User) UpdateMe(ctx context.Context, p interfaces.UpdateMeParam, operato } } if p.Email != nil { - u.UpdateEmail(*p.Email) + if err := u.UpdateEmail(*p.Email); err != nil { + return nil, err + } } if p.Lang != nil { u.UpdateLang(*p.Lang) @@ -645,45 +470,14 @@ func (i *User) DeleteMe(ctx context.Context, userID id.UserID, operator *usecase } func (i *User) CreateVerification(ctx context.Context, email string) error { - tx, err := i.transaction.Begin() - if err != nil { - return err - } u, err := i.userRepo.FindByEmail(ctx, email) if err != nil { return err } - - vr := user.NewVerification() - u.SetVerification(vr) - err = i.userRepo.Save(ctx, u) - if err != nil { - return err - } - - var TextOut, HTMLOut bytes.Buffer - link := i.authSrvUIDomain + "/?user-verification-token=" + vr.Code() - signupMailContent.UserName = email - signupMailContent.ActionURL = htmlTmpl.URL(link) - - if err := authTextTMPL.Execute(&TextOut, signupMailContent); err != nil { - return err - } - if err := authHTMLTMPL.Execute(&HTMLOut, signupMailContent); err != nil { - return err - } - - err = i.mailer.SendMail([]gateway.Contact{ - { - Email: u.Email(), - Name: u.Name(), - }, - }, "email verification", TextOut.String(), HTMLOut.String()) - if err != nil { - return err + if u.Verification().IsVerified() { + return nil } - tx.Commit() - return nil + return i.createVerification(ctx, u) } func (i *User) VerifyUser(ctx context.Context, code string) (*user.User, error) { diff --git a/internal/usecase/interactor/user_signup.go b/internal/usecase/interactor/user_signup.go new file mode 100644 index 000000000..19cff4009 --- /dev/null +++ b/internal/usecase/interactor/user_signup.go @@ -0,0 +1,388 @@ +package interactor + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + htmlTmpl "html/template" + "net/http" + "net/url" + "path" + "strings" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/user" +) + +func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (*user.User, *user.Team, error) { + if inp.Password == "" { + return nil, nil, interfaces.ErrSignupInvalidPassword + } + if inp.Name == "" { + return nil, nil, interfaces.ErrSignupInvalidName + } + if err := i.verifySignupSecret(inp.Secret); err != nil { + return nil, nil, err + } + + tx, err := i.transaction.Begin() + if err != nil { + return nil, nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + // Check if user and team already exists + existedUser, existedTeam, err := i.userAlreadyExists(ctx, inp.User.UserID, inp.Sub, &inp.Name, inp.User.TeamID) + if err != nil { + return nil, nil, err + } + + if existedUser != nil { + if existedUser.Verification() == nil || !existedUser.Verification().IsVerified() { + // if user exists but not verified -> create a new verification + if err := i.createVerification(ctx, existedUser); err != nil { + return nil, nil, err + } + return existedUser, existedTeam, nil + } + return nil, nil, interfaces.ErrUserAlreadyExists + } + + // Initialize user and team + var auth *user.Auth + if inp.Sub != nil { + auth = user.AuthFromAuth0Sub(*inp.Sub).Ref() + } + u, team, err := user.Init(user.InitParams{ + Email: inp.Email, + Name: inp.Name, + Sub: auth, + Password: &inp.Password, + Lang: inp.User.Lang, + Theme: inp.User.Theme, + UserID: inp.User.UserID, + TeamID: inp.User.TeamID, + }) + if err != nil { + return nil, nil, err + } + + if err := i.userRepo.Save(ctx, u); err != nil { + return nil, nil, err + } + if err := i.teamRepo.Save(ctx, team); err != nil { + return nil, nil, err + } + + if err := i.createVerification(ctx, u); err != nil { + return nil, nil, err + } + + tx.Commit() + return u, team, nil +} + +func (i *User) SignupOIDC(ctx context.Context, inp interfaces.SignupOIDCParam) (u *user.User, _ *user.Team, err error) { + if err := i.verifySignupSecret(inp.Secret); err != nil { + return nil, nil, err + } + + sub := inp.Sub + name := inp.Name + email := inp.Email + if sub == "" || email == "" { + ui, err := getUserInfoFromISS(ctx, inp.Issuer, inp.AccessToken) + if err != nil { + return nil, nil, err + } + sub = ui.Sub + email = ui.Email + if name == "" { + name = ui.Nickname + } + if name == "" { + name = ui.Name + } + if name == "" { + name = ui.Email + } + } + + tx, err := i.transaction.Begin() + if err != nil { + return nil, nil, err + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + // Check if user and team already exists + if existedUser, existedTeam, err := i.userAlreadyExists(ctx, inp.User.UserID, &sub, &name, inp.User.TeamID); err != nil { + return nil, nil, err + } else if existedUser != nil || existedTeam != nil { + return nil, nil, interfaces.ErrUserAlreadyExists + } + + // Initialize user and team + u, team, err := user.Init(user.InitParams{ + Email: email, + Name: name, + Sub: user.AuthFromAuth0Sub(sub).Ref(), + Lang: inp.User.Lang, + Theme: inp.User.Theme, + UserID: inp.User.UserID, + TeamID: inp.User.TeamID, + }) + if err != nil { + return nil, nil, err + } + + if err := i.userRepo.Save(ctx, u); err != nil { + return nil, nil, err + } + if err := i.teamRepo.Save(ctx, team); err != nil { + return nil, nil, err + } + + tx.Commit() + return u, team, nil +} + +func (i *User) verifySignupSecret(secret *string) error { + if i.signupSecret != "" && (secret == nil || *secret != i.signupSecret) { + return interfaces.ErrSignupInvalidSecret + } + return nil +} + +func (i *User) userAlreadyExists(ctx context.Context, userID *id.UserID, sub *string, name *string, teamID *id.TeamID) (*user.User, *user.Team, error) { + // Check if user already exists + var existedUser *user.User + var err error + + if userID != nil { + existedUser, err = i.userRepo.FindByID(ctx, *userID) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, nil, err + } + } else if sub != nil { + // Check if user already exists + existedUser, err = i.userRepo.FindByAuth0Sub(ctx, *sub) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, nil, err + } + } else if name != nil { + existedUser, err = i.userRepo.FindByName(ctx, *name) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, nil, err + } + } + + if existedUser != nil { + team, err := i.teamRepo.FindByID(ctx, existedUser.Team()) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, nil, err + } + return existedUser, team, nil + } + + // Check if team already exists + if teamID != nil { + existed, err := i.teamRepo.FindByID(ctx, *teamID) + if err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, nil, err + } + if existed != nil { + return nil, existed, nil + } + } + + return nil, nil, nil +} + +func getUserInfoFromISS(ctx context.Context, iss, accessToken string) (UserInfo, error) { + if accessToken == "" { + return UserInfo{}, errors.New("invalid access token") + } + if iss == "" { + return UserInfo{}, errors.New("invalid issuer") + } + + var u string + c, err := getOpenIDConfiguration(ctx, iss) + if err != nil { + u2 := issToURL(iss, "/userinfo") + if u2 == nil { + return UserInfo{}, errors.New("invalid iss") + } + u = u2.String() + } else { + u = c.UserinfoEndpoint + } + return getUserInfo(ctx, u, accessToken) +} + +type OpenIDConfiguration struct { + UserinfoEndpoint string `json:"userinfo_endpoint"` +} + +func getOpenIDConfiguration(ctx context.Context, iss string) (c OpenIDConfiguration, err error) { + url := issToURL(iss, "/.well-known/openid-configuration") + if url == nil { + err = errors.New("invalid iss") + return + } + + if ctx == nil { + ctx = context.Background() + } + + req, err2 := http.NewRequestWithContext(ctx, http.MethodGet, url.String(), nil) + if err2 != nil { + err = err2 + return + } + + res, err2 := http.DefaultClient.Do(req) + if err2 != nil { + err = err2 + return + } + + if res.StatusCode != http.StatusOK { + err = errors.New("could not get user info") + return + } + + if err2 := json.NewDecoder(res.Body).Decode(&c); err2 != nil { + err = fmt.Errorf("could not get user info: %w", err2) + return + } + + return +} + +type UserInfo struct { + Sub string `json:"sub"` + Name string `json:"name"` + Nickname string `json:"nickname"` + Email string `json:"email"` + Error string `json:"error"` +} + +func getUserInfo(ctx context.Context, url, accessToken string) (ui UserInfo, err error) { + if ctx == nil { + ctx = context.Background() + } + + req, err2 := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err2 != nil { + err = err2 + return + } + + req.Header.Set("Authorization", "Bearer "+accessToken) + res, err2 := http.DefaultClient.Do(req) + if err2 != nil { + err = err2 + return + } + + if res.StatusCode != http.StatusOK { + err = errors.New("could not get user info") + return + } + + if err2 := json.NewDecoder(res.Body).Decode(&ui); err2 != nil { + err = fmt.Errorf("could not get user info: %w", err2) + return + } + + if ui.Error != "" { + err = fmt.Errorf("could not get user info: %s", ui.Error) + return + } + if ui.Sub == "" { + err = fmt.Errorf("could not get user info: invalid response") + return + } + if ui.Email == "" { + err = fmt.Errorf("could not get user info: email scope missing") + return + } + + return +} + +func issToURL(iss, p string) *url.URL { + if iss == "" { + return nil + } + + if !strings.HasPrefix(iss, "https://") && !strings.HasPrefix(iss, "http://") { + iss = "https://" + iss + } + + u, err := url.Parse(iss) + if err == nil { + u.Path = path.Join(u.Path, p) + if u.Path == "/" { + u.Path = "" + } + return u + } + + return nil +} + +func (i *User) createVerification(ctx context.Context, u *user.User) error { + vr := user.NewVerification() + u.SetVerification(vr) + + if err := i.userRepo.Save(ctx, u); err != nil { + return err + } + + var text, html bytes.Buffer + link := i.authSrvUIDomain + "/?user-verification-token=" + vr.Code() + signupMailContent := mailContent{ + Message: "Thank you for signing up to Re:Earth. Please verify your email address by clicking the button below.", + Suffix: "You can use this email address to log in to Re:Earth account anytime.", + ActionLabel: "Activate your account and log in", + UserName: u.Email(), + ActionURL: htmlTmpl.URL(link), + } + if err := authTextTMPL.Execute(&text, signupMailContent); err != nil { + return err + } + if err := authHTMLTMPL.Execute(&html, signupMailContent); err != nil { + return err + } + + if err := i.mailer.SendMail( + []gateway.Contact{ + { + Email: u.Email(), + Name: u.Name(), + }, + }, + "email verification", + text.String(), + html.String(), + ); err != nil { + return err + } + + return nil +} diff --git a/internal/usecase/interactor/user_signup_test.go b/internal/usecase/interactor/user_signup_test.go new file mode 100644 index 000000000..8fb19be00 --- /dev/null +++ b/internal/usecase/interactor/user_signup_test.go @@ -0,0 +1,549 @@ +package interactor + +import ( + "context" + "errors" + "net/http" + "net/url" + "testing" + "time" + + "github.com/jarcoal/httpmock" + "github.com/reearth/reearth-backend/internal/infrastructure/mailer" + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/user" + "github.com/stretchr/testify/assert" + "golang.org/x/text/language" +) + +func TestUser_Signup(t *testing.T) { + user.DefaultPasswordEncoder = &user.NoopPasswordEncoder{} + uid := id.NewUserID() + tid := id.NewTeamID() + mocktime := time.Time{} + mockcode := "CODECODE" + + defer user.MockNow(mocktime)() + defer user.MockGenerateVerificationCode(mockcode)() + + tests := []struct { + name string + signupSecret string + authSrvUIDomain string + createUserBefore *user.User + args interfaces.SignupParam + wantUser *user.User + wantTeam *user.Team + wantMailTo []gateway.Contact + wantMailSubject string + wantMailContent string + wantError error + }{ + { + name: "without secret", + signupSecret: "", + authSrvUIDomain: "https://reearth.io", + args: interfaces.SignupParam{ + Sub: sr("SUB"), + Email: "aaa@bbb.com", + Name: "NAME", + Password: "PAss00!!", + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Name("NAME"). + Auths([]user.Auth{{Provider: "", Sub: "SUB"}}). + Email("aaa@bbb.com"). + PasswordPlainText("PAss00!!"). + Verification(user.VerificationFrom(mockcode, mocktime.Add(24*time.Hour), false)). + MustBuild(), + wantTeam: user.NewTeam(). + ID(tid). + Name("NAME"). + Members(map[id.UserID]user.Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + wantMailTo: []gateway.Contact{{Email: "aaa@bbb.com", Name: "NAME"}}, + wantMailSubject: "email verification", + wantMailContent: "https://reearth.io/?user-verification-token=CODECODE", + wantError: nil, + }, + { + name: "existing but not valdiated user", + signupSecret: "", + authSrvUIDomain: "", + createUserBefore: user.New(). + ID(uid). + Team(tid). + Email("aaa@bbb.com"). + MustBuild(), + args: interfaces.SignupParam{ + Email: "aaa@bbb.com", + Name: "NAME", + Password: "PAss00!!", + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Email("aaa@bbb.com"). + Verification(user.VerificationFrom(mockcode, mocktime.Add(24*time.Hour), false)). + MustBuild(), + wantTeam: nil, + wantMailTo: []gateway.Contact{{Email: "aaa@bbb.com", Name: ""}}, + wantMailSubject: "email verification", + wantMailContent: "/?user-verification-token=CODECODE", + wantError: nil, + }, + { + name: "existing and valdiated user", + signupSecret: "", + authSrvUIDomain: "", + createUserBefore: user.New(). + ID(uid). + Team(tid). + Email("aaa@bbb.com"). + Verification(user.VerificationFrom(mockcode, mocktime, true)). + MustBuild(), + args: interfaces.SignupParam{ + Sub: sr("SUB"), + Email: "aaa@bbb.com", + Name: "NAME", + Password: "PAss00!!", + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: nil, + wantTeam: nil, + wantError: interfaces.ErrUserAlreadyExists, + }, + { + name: "without secret 2", + signupSecret: "", + authSrvUIDomain: "", + args: interfaces.SignupParam{ + Sub: sr("SUB"), + Email: "aaa@bbb.com", + Name: "NAME", + Password: "PAss00!!", + Secret: sr("hogehoge"), + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Name("NAME"). + Auths([]user.Auth{{Provider: "", Sub: "SUB"}}). + Email("aaa@bbb.com"). + PasswordPlainText("PAss00!!"). + Verification(user.VerificationFrom(mockcode, mocktime.Add(24*time.Hour), false)). + MustBuild(), + wantTeam: user.NewTeam(). + ID(tid). + Name("NAME"). + Members(map[id.UserID]user.Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + wantMailTo: []gateway.Contact{{Email: "aaa@bbb.com", Name: "NAME"}}, + wantMailSubject: "email verification", + wantMailContent: "/?user-verification-token=CODECODE", + wantError: nil, + }, + { + name: "with secret", + signupSecret: "SECRET", + authSrvUIDomain: "", + args: interfaces.SignupParam{ + Sub: sr("SUB"), + Email: "aaa@bbb.com", + Name: "NAME", + Password: "PAss00!!", + Secret: sr("SECRET"), + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + Lang: &language.Japanese, + Theme: user.ThemeDark.Ref(), + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Name("NAME"). + Auths([]user.Auth{{Provider: "", Sub: "SUB"}}). + Email("aaa@bbb.com"). + PasswordPlainText("PAss00!!"). + Lang(language.Japanese). + Theme(user.ThemeDark). + Verification(user.VerificationFrom(mockcode, mocktime.Add(24*time.Hour), false)). + MustBuild(), + wantTeam: user.NewTeam(). + ID(tid). + Name("NAME"). + Members(map[id.UserID]user.Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + wantMailTo: []gateway.Contact{{Email: "aaa@bbb.com", Name: "NAME"}}, + wantMailSubject: "email verification", + wantMailContent: "/?user-verification-token=CODECODE", + wantError: nil, + }, + { + name: "invalid secret", + signupSecret: "SECRET", + authSrvUIDomain: "", + args: interfaces.SignupParam{ + Sub: sr("SUB"), + Email: "aaa@bbb.com", + Name: "NAME", + Password: "PAss00!!", + Secret: sr("SECRET!"), + }, + wantError: interfaces.ErrSignupInvalidSecret, + }, + { + name: "invalid secret 2", + signupSecret: "SECRET", + authSrvUIDomain: "", + args: interfaces.SignupParam{ + Sub: sr("SUB"), + Email: "aaa@bbb.com", + Name: "NAME", + Password: "PAss00!!", + }, + wantError: interfaces.ErrSignupInvalidSecret, + }, + { + name: "invalid email", + args: interfaces.SignupParam{ + Email: "aaa", + Name: "NAME", + Password: "PAss00!!", + }, + wantError: user.ErrInvalidEmail, + }, + { + name: "invalid password", + args: interfaces.SignupParam{ + Email: "aaa@bbb.com", + Name: "NAME", + Password: "PAss00", + }, + wantError: user.ErrPasswordLength, + }, + { + name: "invalid name", + args: interfaces.SignupParam{ + Email: "aaa@bbb.com", + Name: "", + Password: "PAss00!!", + }, + wantError: interfaces.ErrSignupInvalidName, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + // t.Parallel() cannot be used + r := memory.InitRepos(nil) + if tt.createUserBefore != nil { + assert.NoError(t, r.User.Save( + context.Background(), + tt.createUserBefore), + ) + } + m := mailer.NewMock() + g := &gateway.Container{Mailer: m} + uc := NewUser(r, g, tt.signupSecret, tt.authSrvUIDomain) + user, team, err := uc.Signup(context.Background(), tt.args) + assert.Equal(t, tt.wantUser, user) + assert.Equal(t, tt.wantTeam, team) + assert.Equal(t, tt.wantError, err) + mails := m.Mails() + if tt.wantMailSubject == "" { + assert.Empty(t, mails) + } else { + assert.Equal(t, 1, len(mails)) + assert.Equal(t, tt.wantMailSubject, mails[0].Subject) + assert.Equal(t, tt.wantMailTo, mails[0].To) + assert.Contains(t, mails[0].PlainContent, tt.wantMailContent) + } + }) + } +} + +func TestUser_SignupOIDC(t *testing.T) { + httpmock.Activate() + defer httpmock.DeactivateAndReset() + + httpmock.RegisterResponder( + "GET", + "https://issuer/.well-known/openid-configuration", + httpmock.NewStringResponder(200, `{"userinfo_endpoint":"https://issuer/userinfo"}`), + ) + + httpmock.RegisterResponder( + "GET", + "https://issuer/userinfo", + func(req *http.Request) (*http.Response, error) { + if req.Header.Get("Authorization") == "Bearer accesstoken" { + return httpmock.NewStringResponse(200, `{"sub":"SUB","email":"x@y.z","name":"NAME"}`), nil + } + return httpmock.NewStringResponse(401, "Unauthorized"), nil + }, + ) + + user.DefaultPasswordEncoder = &user.NoopPasswordEncoder{} + uid := id.NewUserID() + tid := id.NewTeamID() + mocktime := time.Time{} + mockcode := "CODECODE" + + defer user.MockNow(mocktime)() + defer user.MockGenerateVerificationCode(mockcode)() + + tests := []struct { + name string + signupSecret string + authSrvUIDomain string + createUserBefore *user.User + args interfaces.SignupOIDCParam + wantUser *user.User + wantTeam *user.Team + wantMail *mailer.Mail + wantMailTo string + wantMailSubject string + wantMailContent string + wantError error + }{ + { + name: "userinfo", + signupSecret: "", + authSrvUIDomain: "", + args: interfaces.SignupOIDCParam{ + AccessToken: "accesstoken", + Issuer: "https://issuer", + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Name("NAME"). + Auths([]user.Auth{{Provider: "", Sub: "SUB"}}). + Email("x@y.z"). + MustBuild(), + wantTeam: user.NewTeam(). + ID(tid). + Name("NAME"). + Members(map[id.UserID]user.Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + wantError: nil, + }, + { + name: "no userinfo", + signupSecret: "", + authSrvUIDomain: "", + args: interfaces.SignupOIDCParam{ + Email: "aaa@bbb.com", + Name: "name", + AccessToken: "accesstoken", + Issuer: "https://issuer", + Sub: "sub", + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Name("name"). + Auths([]user.Auth{{Provider: "", Sub: "sub"}}). + Email("aaa@bbb.com"). + MustBuild(), + wantTeam: user.NewTeam(). + ID(tid). + Name("name"). + Members(map[id.UserID]user.Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + wantError: nil, + }, + { + name: "no userinfo with secret", + signupSecret: "SECRET", + authSrvUIDomain: "", + args: interfaces.SignupOIDCParam{ + Email: "aaa@bbb.com", + Name: "name", + AccessToken: "accesstoken", + Issuer: "https://issuer", + Sub: "sub", + Secret: sr("SECRET"), + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantUser: user.New(). + ID(uid). + Team(tid). + Name("name"). + Auths([]user.Auth{{Provider: "", Sub: "sub"}}). + Email("aaa@bbb.com"). + MustBuild(), + wantTeam: user.NewTeam(). + ID(tid). + Name("name"). + Members(map[id.UserID]user.Role{uid: user.RoleOwner}). + Personal(true). + MustBuild(), + wantError: nil, + }, + { + name: "existed but not validated user", + signupSecret: "", + authSrvUIDomain: "", + createUserBefore: user.New(). + ID(uid). + Email("aaa@bbb.com"). + MustBuild(), + args: interfaces.SignupOIDCParam{ + AccessToken: "accesstoken", + Issuer: "https://issuer", + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantError: interfaces.ErrUserAlreadyExists, + }, + { + name: "existed and verified user", + signupSecret: "", + authSrvUIDomain: "", + createUserBefore: user.New(). + ID(uid). + Email("aaa@bbb.com"). + Verification(user.VerificationFrom(mockcode, mocktime, true)). + MustBuild(), + args: interfaces.SignupOIDCParam{ + AccessToken: "accesstoken", + Issuer: "https://issuer", + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantError: interfaces.ErrUserAlreadyExists, + }, + { + name: "invalid secret", + signupSecret: "SECRET", + authSrvUIDomain: "", + args: interfaces.SignupOIDCParam{ + Email: "aaa@bbb.com", + Name: "name", + AccessToken: "accesstoken", + Issuer: "https://issuer", + Sub: "sub", + Secret: sr("SECRET!"), + User: interfaces.SignupUserParam{ + UserID: &uid, + TeamID: &tid, + }, + }, + wantError: interfaces.ErrSignupInvalidSecret, + }, + { + name: "invalid email", + args: interfaces.SignupOIDCParam{ + Email: "aaabbbcom", + Name: "name", + AccessToken: "accesstoken", + Issuer: "https://issuer", + Sub: "sub", + }, + wantError: user.ErrInvalidEmail, + }, + { + name: "invalid access token", + args: interfaces.SignupOIDCParam{ + Email: "", + Name: "", + AccessToken: "", + Issuer: "https://issuer", + Sub: "sub", + }, + wantError: errors.New("invalid access token"), + }, + { + name: "invalid issuer", + args: interfaces.SignupOIDCParam{ + Email: "", + Name: "", + AccessToken: "access token", + Issuer: "", + Sub: "sub", + }, + wantError: errors.New("invalid issuer"), + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + // t.Parallel() cannot be used + r := memory.InitRepos(nil) + if tt.createUserBefore != nil { + assert.NoError(t, r.User.Save( + context.Background(), + tt.createUserBefore), + ) + } + m := mailer.NewMock() + g := &gateway.Container{Mailer: m} + uc := NewUser(r, g, tt.signupSecret, tt.authSrvUIDomain) + user, team, err := uc.SignupOIDC(context.Background(), tt.args) + assert.Equal(t, tt.wantUser, user) + assert.Equal(t, tt.wantTeam, team) + assert.Equal(t, tt.wantError, err) + assert.Empty(t, m.Mails()) + }) + } +} + +func TestIssToURL(t *testing.T) { + assert.Nil(t, issToURL("", "")) + assert.Equal(t, &url.URL{Scheme: "https", Host: "iss.com"}, issToURL("iss.com", "")) + assert.Equal(t, &url.URL{Scheme: "https", Host: "iss.com"}, issToURL("https://iss.com", "")) + assert.Equal(t, &url.URL{Scheme: "http", Host: "iss.com"}, issToURL("http://iss.com", "")) + assert.Equal(t, &url.URL{Scheme: "https", Host: "iss.com", Path: ""}, issToURL("https://iss.com/", "")) + assert.Equal(t, &url.URL{Scheme: "https", Host: "iss.com", Path: "/hoge"}, issToURL("https://iss.com/hoge", "")) + assert.Equal(t, &url.URL{Scheme: "https", Host: "iss.com", Path: "/hoge/foobar"}, issToURL("https://iss.com/hoge", "foobar")) +} + +func sr(s string) *string { + return &s +} diff --git a/internal/usecase/interfaces/user.go b/internal/usecase/interfaces/user.go index c80cf7db7..79456c69c 100644 --- a/internal/usecase/interfaces/user.go +++ b/internal/usecase/interfaces/user.go @@ -20,18 +20,33 @@ var ( ErrInvalidUserEmail = errors.New("invalid email") ErrNotVerifiedUser = errors.New("not verified user") ErrSignupInvalidPassword = errors.New("invalid password") + ErrUserAlreadyExists = errors.New("user already exists") ) type SignupParam struct { - Sub *string - UserID *id.UserID + Sub *string // required by Auth0 + Email string + Name string + Password string Secret *string - Name *string - Email *string - Password *string - Lang *language.Tag - Theme *user.Theme - TeamID *id.TeamID + User SignupUserParam +} + +type SignupOIDCParam struct { + AccessToken string + Issuer string + Sub string + Email string + Name string + Secret *string + User SignupUserParam +} + +type SignupUserParam struct { + UserID *id.UserID + Lang *language.Tag + Theme *user.Theme + TeamID *id.TeamID } type GetUserByCredentials struct { @@ -51,6 +66,7 @@ type UpdateMeParam struct { type User interface { Fetch(context.Context, []id.UserID, *usecase.Operator) ([]*user.User, error) Signup(context.Context, SignupParam) (*user.User, *user.Team, error) + SignupOIDC(context.Context, SignupOIDCParam) (*user.User, *user.Team, error) CreateVerification(context.Context, string) error VerifyUser(context.Context, string) (*user.User, error) GetUserByCredentials(context.Context, GetUserByCredentials) (*user.User, error) diff --git a/internal/usecase/pagination.go b/internal/usecase/pagination.go index 46932c09c..e6ec0645b 100644 --- a/internal/usecase/pagination.go +++ b/internal/usecase/pagination.go @@ -1,5 +1,7 @@ package usecase +type Cursor string + type Pagination struct { Before *Cursor After *Cursor diff --git a/pkg/user/auth.go b/pkg/user/auth.go index f68ab6152..a20ed8990 100644 --- a/pkg/user/auth.go +++ b/pkg/user/auth.go @@ -12,7 +12,7 @@ type Auth struct { func AuthFromAuth0Sub(sub string) Auth { s := strings.SplitN(sub, "|", 2) if len(s) != 2 { - return Auth{} + return Auth{Provider: "", Sub: sub} } return Auth{Provider: s[0], Sub: sub} } diff --git a/pkg/user/auth_test.go b/pkg/user/auth_test.go index ff9d7b976..83682003c 100644 --- a/pkg/user/auth_test.go +++ b/pkg/user/auth_test.go @@ -14,7 +14,7 @@ func TestAuthFromAuth0Sub(t *testing.T) { Expected Auth }{ { - Name: "Create Auth", + Name: "with provider", Sub: "xx|yy", Expected: Auth{ Provider: "xx", @@ -22,7 +22,15 @@ func TestAuthFromAuth0Sub(t *testing.T) { }, }, { - Name: "Create empty Auth", + Name: "without provider", + Sub: "yy", + Expected: Auth{ + Provider: "", + Sub: "yy", + }, + }, + { + Name: "empty", Sub: "", Expected: Auth{}, }, @@ -87,6 +95,7 @@ func TestGenReearthSub(t *testing.T) { }, }, } + for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := GenReearthSub(tt.input) diff --git a/pkg/user/builder.go b/pkg/user/builder.go index 264eba119..c3fa6129a 100644 --- a/pkg/user/builder.go +++ b/pkg/user/builder.go @@ -7,6 +7,7 @@ import ( type Builder struct { u *User passwordText string + email string } func New() *Builder { @@ -17,11 +18,17 @@ func (b *Builder) Build() (*User, error) { if b.u.id.IsNil() { return nil, ErrInvalidID } + if b.u.theme == "" { + b.u.theme = ThemeDefault + } if b.passwordText != "" { if err := b.u.SetPassword(b.passwordText); err != nil { - return nil, ErrEncodingPassword + return nil, err } } + if err := b.u.UpdateEmail(b.email); err != nil { + return nil, err + } return b.u, nil } @@ -49,16 +56,12 @@ func (b *Builder) Name(name string) *Builder { } func (b *Builder) Email(email string) *Builder { - b.u.email = email + b.email = email return b } -func (b *Builder) Password(p []byte) *Builder { - if p == nil { - b.u.password = nil - } else { - b.u.password = append(p[:0:0], p...) - } +func (b *Builder) EncodedPassword(p EncodedPassword) *Builder { + b.u.password = p.Clone() return b } diff --git a/pkg/user/builder_test.go b/pkg/user/builder_test.go index 031ea979d..de88a84b7 100644 --- a/pkg/user/builder_test.go +++ b/pkg/user/builder_test.go @@ -10,29 +10,29 @@ import ( func TestBuilder_ID(t *testing.T) { uid := NewID() - b := New().ID(uid).MustBuild() + b := New().ID(uid).Email("aaa@bbb.com").MustBuild() assert.Equal(t, uid, b.ID()) assert.Nil(t, b.passwordReset) } func TestBuilder_Name(t *testing.T) { - b := New().NewID().Name("xxx").MustBuild() + b := New().NewID().Name("xxx").Email("aaa@bbb.com").MustBuild() assert.Equal(t, "xxx", b.Name()) } func TestBuilder_NewID(t *testing.T) { - b := New().NewID().MustBuild() + b := New().NewID().Email("aaa@bbb.com").MustBuild() assert.NotNil(t, b.ID()) } func TestBuilder_Team(t *testing.T) { tid := NewTeamID() - b := New().NewID().Team(tid).MustBuild() + b := New().NewID().Email("aaa@bbb.com").Team(tid).MustBuild() assert.Equal(t, tid, b.Team()) } func TestBuilder_Auths(t *testing.T) { - b := New().NewID().Auths([]Auth{ + b := New().NewID().Email("aaa@bbb.com").Auths([]Auth{ { Provider: "xxx", Sub: "aaa", @@ -53,7 +53,7 @@ func TestBuilder_Email(t *testing.T) { func TestBuilder_Lang(t *testing.T) { l := language.Make("en") - b := New().NewID().Lang(l).MustBuild() + b := New().NewID().Email("aaa@bbb.com").Lang(l).MustBuild() assert.Equal(t, l, b.Lang()) } @@ -83,7 +83,7 @@ func TestBuilder_LangFrom(t *testing.T) { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() - b := New().NewID().LangFrom(tc.Lang).MustBuild() + b := New().NewID().Email("aaa@bbb.com").LangFrom(tc.Lang).MustBuild() assert.Equal(t, tc.Expected, b.Lang()) }) } @@ -96,10 +96,12 @@ func TestNew(t *testing.T) { } func TestBuilder_Build(t *testing.T) { + // bcrypt is not suitable for unit tests as it requires heavy computation + DefaultPasswordEncoder = &NoopPasswordEncoder{} + uid := NewID() tid := NewTeamID() - en, _ := language.Parse("en") - pass, _ := encodePassword("pass") + pass := MustEncodedPassword("abcDEF0!") type args struct { Name, Lang, Email string @@ -138,7 +140,8 @@ func TestBuilder_Build(t *testing.T) { name: "xxx", password: pass, auths: []Auth{{Provider: "ppp", Sub: "sss"}}, - lang: en, + lang: language.English, + theme: ThemeDefault, }, }, { Name: "failed invalid id", @@ -153,7 +156,7 @@ func TestBuilder_Build(t *testing.T) { t.Parallel() res, err := New(). ID(tt.Args.ID). - Password(pass). + EncodedPassword(pass). Name(tt.Args.Name). Auths(tt.Args.Auths). LangFrom(tt.Args.Lang). @@ -170,10 +173,12 @@ func TestBuilder_Build(t *testing.T) { } func TestBuilder_MustBuild(t *testing.T) { + // bcrypt is not suitable for unit tests as it requires heavy computation + DefaultPasswordEncoder = &NoopPasswordEncoder{} + uid := NewID() tid := NewTeamID() - en, _ := language.Parse("en") - pass, _ := encodePassword("pass") + pass := MustEncodedPassword("abcDEF0!") type args struct { Name, Lang, Email string @@ -212,7 +217,8 @@ func TestBuilder_MustBuild(t *testing.T) { name: "xxx", password: pass, auths: []Auth{{Provider: "ppp", Sub: "sss"}}, - lang: en, + lang: language.English, + theme: ThemeDefault, }, }, { Name: "failed invalid id", @@ -229,7 +235,7 @@ func TestBuilder_MustBuild(t *testing.T) { t.Helper() return New(). ID(tt.Args.ID). - Password(pass). + EncodedPassword(pass). Name(tt.Args.Name). Auths(tt.Args.Auths). LangFrom(tt.Args.Lang). diff --git a/pkg/user/password.go b/pkg/user/password.go new file mode 100644 index 000000000..6481ed204 --- /dev/null +++ b/pkg/user/password.go @@ -0,0 +1,123 @@ +package user + +import ( + "bytes" + "errors" + "unicode" + + "golang.org/x/crypto/bcrypt" +) + +var ( + DefaultPasswordEncoder PasswordEncoder = &BcryptPasswordEncoder{} + ErrEncodingPassword = errors.New("encoding password") + ErrInvalidPassword = errors.New("invalid password") + ErrPasswordLength = errors.New("password at least 8 characters") + ErrPasswordUpper = errors.New("password should have upper case letters") + ErrPasswordLower = errors.New("password should have lower case letters") + ErrPasswordNumber = errors.New("password should have numbers") +) + +type PasswordEncoder interface { + Encode(string) ([]byte, error) + Verify(string, []byte) (bool, error) +} + +type BcryptPasswordEncoder struct{} + +func (BcryptPasswordEncoder) Encode(pass string) ([]byte, error) { + return bcrypt.GenerateFromPassword([]byte(pass), 14) +} + +func (BcryptPasswordEncoder) Verify(s string, p []byte) (bool, error) { + err := bcrypt.CompareHashAndPassword(p, []byte(s)) + if err != nil { + if errors.Is(err, bcrypt.ErrMismatchedHashAndPassword) { + return false, nil + } + return false, err + } + return true, nil +} + +type NoopPasswordEncoder struct{} + +func (m NoopPasswordEncoder) Encode(pass string) ([]byte, error) { + return []byte(pass), nil +} + +func (m NoopPasswordEncoder) Verify(s string, p []byte) (bool, error) { + return bytes.Equal([]byte(s), []byte(p)), nil +} + +type MockPasswordEncoder struct{ Mock []byte } + +func (m MockPasswordEncoder) Encode(pass string) ([]byte, error) { + return append(m.Mock[:0:0], m.Mock...), nil +} + +func (m MockPasswordEncoder) Verify(s string, p []byte) (bool, error) { + return bytes.Equal(m.Mock, []byte(s)), nil +} + +type EncodedPassword []byte + +func NewEncodedPassword(pass string) (EncodedPassword, error) { + if err := ValidatePasswordFormat(pass); err != nil { + return nil, err + } + got, err := DefaultPasswordEncoder.Encode(pass) + if err != nil { + return nil, ErrEncodingPassword + } + return got, nil +} + +func MustEncodedPassword(pass string) EncodedPassword { + p, err := NewEncodedPassword(pass) + if err != nil { + panic(err) + } + return p +} + +func (p EncodedPassword) Clone() EncodedPassword { + if p == nil { + return nil + } + return append(p[:0:0], p...) +} + +func (p EncodedPassword) Verify(toVerify string) (bool, error) { + if len(toVerify) == 0 || len(p) == 0 { + return false, nil + } + return DefaultPasswordEncoder.Verify(toVerify, p) +} + +func ValidatePasswordFormat(pass string) error { + var hasNum, hasUpper, hasLower bool + for _, c := range pass { + switch { + case unicode.IsNumber(c): + hasNum = true + case unicode.IsUpper(c): + hasUpper = true + case unicode.IsLower(c) || c == ' ': + hasLower = true + } + } + if len(pass) < 8 { + return ErrPasswordLength + } + if !hasLower { + return ErrPasswordLower + } + if !hasUpper { + return ErrPasswordUpper + } + if !hasNum { + return ErrPasswordNumber + } + return nil +} diff --git a/pkg/user/password_test.go b/pkg/user/password_test.go new file mode 100644 index 000000000..fb962df88 --- /dev/null +++ b/pkg/user/password_test.go @@ -0,0 +1,51 @@ +package user + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "golang.org/x/crypto/bcrypt" +) + +func TestBcryptPasswordEncoder(t *testing.T) { + got, err := (&BcryptPasswordEncoder{}).Encode("abc") + assert.NoError(t, err) + err = bcrypt.CompareHashAndPassword(got, []byte("abc")) + assert.NoError(t, err) + + ok, err := (&BcryptPasswordEncoder{}).Verify("abc", got) + assert.NoError(t, err) + assert.True(t, ok) + ok, err = (&BcryptPasswordEncoder{}).Verify("abcd", got) + assert.NoError(t, err) + assert.False(t, ok) +} + +func TestMockPasswordEncoder(t *testing.T) { + got, err := (&MockPasswordEncoder{Mock: []byte("ABC")}).Encode("ABC") + assert.NoError(t, err) + assert.Equal(t, got, []byte("ABC")) + got, err = (&MockPasswordEncoder{Mock: []byte("ABC")}).Encode("abc") + assert.NoError(t, err) + assert.Equal(t, got, []byte("ABC")) + + ok, err := (&MockPasswordEncoder{Mock: []byte("ABC")}).Verify("ABC", got) + assert.NoError(t, err) + assert.True(t, ok) + ok, err = (&MockPasswordEncoder{Mock: []byte("ABC")}).Verify("abc", got) + assert.NoError(t, err) + assert.False(t, ok) +} + +func TestNoopPasswordEncoder(t *testing.T) { + got, err := (&NoopPasswordEncoder{}).Encode("abc") + assert.NoError(t, err) + assert.Equal(t, got, []byte("abc")) + + ok, err := (&NoopPasswordEncoder{}).Verify("abc", got) + assert.NoError(t, err) + assert.True(t, ok) + ok, err = (&NoopPasswordEncoder{}).Verify("abcd", got) + assert.NoError(t, err) + assert.False(t, ok) +} diff --git a/pkg/user/theme.go b/pkg/user/theme.go index 8ec4e75e9..0fa5a92c8 100644 --- a/pkg/user/theme.go +++ b/pkg/user/theme.go @@ -7,3 +7,7 @@ const ( ThemeLight Theme = "light" ThemeDark Theme = "dark" ) + +func (t Theme) Ref() *Theme { + return &t +} diff --git a/pkg/user/user.go b/pkg/user/user.go index cc48e0e02..177f16725 100644 --- a/pkg/user/user.go +++ b/pkg/user/user.go @@ -2,27 +2,20 @@ package user import ( "errors" - "unicode" - - "golang.org/x/crypto/bcrypt" + "net/mail" "golang.org/x/text/language" ) var ( - ErrEncodingPassword = errors.New("error encoding password") - ErrInvalidPassword = errors.New("error invalid password") - ErrPasswordLength = errors.New("password at least 8 characters") - ErrPasswordUpper = errors.New("password should have upper case letters") - ErrPasswordLower = errors.New("password should have lower case letters") - ErrPasswordNumber = errors.New("password should have numbers") + ErrInvalidEmail = errors.New("invalid email") ) type User struct { id ID name string email string - password []byte + password EncodedPassword team TeamID auths []Auth lang language.Tag @@ -63,8 +56,12 @@ func (u *User) UpdateName(name string) { u.name = name } -func (u *User) UpdateEmail(email string) { +func (u *User) UpdateEmail(email string) error { + if _, err := mail.ParseAddress(email); err != nil { + return ErrInvalidEmail + } u.email = email + return nil } func (u *User) UpdateTeam(team TeamID) { @@ -168,10 +165,7 @@ func (u *User) ClearAuths() { } func (u *User) SetPassword(pass string) error { - if err := validatePassword(pass); err != nil { - return err - } - p, err := encodePassword(pass) + p, err := NewEncodedPassword(pass) if err != nil { return err } @@ -180,26 +174,10 @@ func (u *User) SetPassword(pass string) error { } func (u *User) MatchPassword(pass string) (bool, error) { - if u == nil || len(u.password) == 0 { + if u == nil { return false, nil } - return verifyPassword(pass, u.password) -} - -func encodePassword(pass string) ([]byte, error) { - bytes, err := bcrypt.GenerateFromPassword([]byte(pass), 14) - return bytes, err -} - -func verifyPassword(toVerify string, encoded []byte) (bool, error) { - err := bcrypt.CompareHashAndPassword(encoded, []byte(toVerify)) - if err != nil { - if errors.Is(err, bcrypt.ErrMismatchedHashAndPassword) { - return false, nil - } - return false, err - } - return true, nil + return u.password.Verify(pass) } func (u *User) PasswordReset() *PasswordReset { @@ -213,31 +191,3 @@ func (u *User) SetPasswordReset(pr *PasswordReset) { func (u *User) SetVerification(v *Verification) { u.verification = v } - -func validatePassword(pass string) error { - var hasNum, hasUpper, hasLower bool - for _, c := range pass { - switch { - case unicode.IsNumber(c): - hasNum = true - case unicode.IsUpper(c): - hasUpper = true - case unicode.IsLower(c) || c == ' ': - hasLower = true - } - } - if len(pass) < 8 { - return ErrPasswordLength - } - if !hasLower { - return ErrPasswordLower - } - if !hasUpper { - return ErrPasswordUpper - } - if !hasNum { - return ErrPasswordNumber - } - - return nil -} diff --git a/pkg/user/user_test.go b/pkg/user/user_test.go index d3244466d..39d37d980 100644 --- a/pkg/user/user_test.go +++ b/pkg/user/user_test.go @@ -84,7 +84,7 @@ func TestUser_AddAuth(t *testing.T) { }, { Name: "add new auth", - User: New().NewID().MustBuild(), + User: New().NewID().Email("aaa@bbb.com").MustBuild(), A: Auth{ Provider: "xxx", Sub: "zzz", @@ -93,7 +93,7 @@ func TestUser_AddAuth(t *testing.T) { }, { Name: "existing auth", - User: New().NewID().Auths([]Auth{{ + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ Provider: "xxx", Sub: "zzz", }}).MustBuild(), @@ -129,7 +129,7 @@ func TestUser_RemoveAuth(t *testing.T) { }, { Name: "remove auth0", - User: New().NewID().MustBuild(), + User: New().NewID().Email("aaa@bbb.com").MustBuild(), A: Auth{ Provider: "auth0", Sub: "zzz", @@ -138,7 +138,7 @@ func TestUser_RemoveAuth(t *testing.T) { }, { Name: "existing auth", - User: New().NewID().Auths([]Auth{{ + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ Provider: "xxx", Sub: "zzz", }}).MustBuild(), @@ -174,7 +174,7 @@ func TestUser_ContainAuth(t *testing.T) { }, { Name: "not existing auth", - User: New().NewID().MustBuild(), + User: New().NewID().Email("aaa@bbb.com").MustBuild(), A: Auth{ Provider: "auth0", Sub: "zzz", @@ -183,7 +183,7 @@ func TestUser_ContainAuth(t *testing.T) { }, { Name: "existing auth", - User: New().NewID().Auths([]Auth{{ + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ Provider: "xxx", Sub: "zzz", }}).MustBuild(), @@ -219,13 +219,13 @@ func TestUser_HasAuthProvider(t *testing.T) { }, { Name: "not existing auth", - User: New().NewID().MustBuild(), + User: New().NewID().Email("aaa@bbb.com").MustBuild(), P: "auth0", Expected: false, }, { Name: "existing auth", - User: New().NewID().Auths([]Auth{{ + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ Provider: "xxx", Sub: "zzz", }}).MustBuild(), @@ -258,13 +258,13 @@ func TestUser_RemoveAuthByProvider(t *testing.T) { }, { Name: "remove auth0", - User: New().NewID().MustBuild(), + User: New().NewID().Email("aaa@bbb.com").MustBuild(), Provider: "auth0", Expected: false, }, { Name: "existing auth", - User: New().NewID().Auths([]Auth{{ + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ Provider: "xxx", Sub: "zzz", }}).MustBuild(), @@ -284,7 +284,7 @@ func TestUser_RemoveAuthByProvider(t *testing.T) { } func TestUser_ClearAuths(t *testing.T) { - u := New().NewID().Auths([]Auth{{ + u := New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ Provider: "xxx", Sub: "zzz", }}).MustBuild() @@ -298,26 +298,27 @@ func TestUser_Auths(t *testing.T) { } func TestUser_UpdateEmail(t *testing.T) { - u := New().NewID().MustBuild() - u.UpdateEmail("ff@xx.zz") - assert.Equal(t, "ff@xx.zz", u.Email()) + u := New().NewID().Email("abc@abc.com").MustBuild() + assert.NoError(t, u.UpdateEmail("abc@xyz.com")) + assert.Equal(t, "abc@xyz.com", u.Email()) + assert.Error(t, u.UpdateEmail("abcxyz")) } func TestUser_UpdateLang(t *testing.T) { - u := New().NewID().MustBuild() + u := New().NewID().Email("aaa@bbb.com").MustBuild() u.UpdateLang(language.Make("en")) assert.Equal(t, language.Make("en"), u.Lang()) } func TestUser_UpdateTeam(t *testing.T) { tid := NewTeamID() - u := New().NewID().MustBuild() + u := New().NewID().Email("aaa@bbb.com").MustBuild() u.UpdateTeam(tid) assert.Equal(t, tid, u.Team()) } func TestUser_UpdateName(t *testing.T) { - u := New().NewID().MustBuild() + u := New().NewID().Email("aaa@bbb.com").MustBuild() u.UpdateName("xxx") assert.Equal(t, "xxx", u.Name()) } @@ -331,7 +332,7 @@ func TestUser_GetAuthByProvider(t *testing.T) { }{ { Name: "existing auth", - User: New().NewID().Auths([]Auth{{ + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ Provider: "xxx", Sub: "zzz", }}).MustBuild(), @@ -343,7 +344,7 @@ func TestUser_GetAuthByProvider(t *testing.T) { }, { Name: "not existing auth", - User: New().NewID().Auths([]Auth{{ + User: New().NewID().Email("aaa@bbb.com").Auths([]Auth{{ Provider: "xxx", Sub: "zzz", }}).MustBuild(), @@ -362,10 +363,15 @@ func TestUser_GetAuthByProvider(t *testing.T) { } func TestUser_MatchPassword(t *testing.T) { - encodedPass, _ := encodePassword("test") + // bcrypt is not suitable for unit tests as it requires heavy computation + DefaultPasswordEncoder = &NoopPasswordEncoder{} + + password := MustEncodedPassword("abcDEF0!") + type args struct { pass string } + tests := []struct { name string password []byte @@ -374,17 +380,17 @@ func TestUser_MatchPassword(t *testing.T) { wantErr bool }{ { - name: "passwords should match", - password: encodedPass, + name: "should match", + password: password, args: args{ - pass: "test", + pass: "abcDEF0!", }, want: true, wantErr: false, }, { - name: "passwords shouldn't match", - password: encodedPass, + name: "should not match", + password: password, args: args{ pass: "xxx", }, @@ -392,7 +398,9 @@ func TestUser_MatchPassword(t *testing.T) { wantErr: false, }, } + for _, tc := range tests { + tc := tc t.Run(tc.name, func(tt *testing.T) { u := &User{ password: tc.password, @@ -409,6 +417,9 @@ func TestUser_MatchPassword(t *testing.T) { } func TestUser_SetPassword(t *testing.T) { + // bcrypt is not suitable for unit tests as it requires heavy computation + DefaultPasswordEncoder = &NoopPasswordEncoder{} + type args struct { pass string } @@ -432,11 +443,13 @@ func TestUser_SetPassword(t *testing.T) { want: "Testabc1", }, } + for _, tc := range tests { + tc := tc t.Run(tc.name, func(tt *testing.T) { u := &User{} _ = u.SetPassword(tc.args.pass) - got, err := verifyPassword(tc.want, u.password) + got, err := u.password.Verify(tc.want) assert.NoError(tt, err) assert.True(tt, got) }) @@ -451,18 +464,19 @@ func TestUser_PasswordReset(t *testing.T) { }{ { Name: "not password request", - User: New().NewID().MustBuild(), + User: New().NewID().Email("aaa@bbb.com").MustBuild(), Expected: nil, }, { Name: "create new password request over existing one", - User: New().NewID().PasswordReset(&PasswordReset{"xzy", time.Unix(0, 0)}).MustBuild(), + User: New().NewID().Email("aaa@bbb.com").PasswordReset(&PasswordReset{"xzy", time.Unix(0, 0)}).MustBuild(), Expected: &PasswordReset{ Token: "xzy", CreatedAt: time.Unix(0, 0), }, }, } + for _, tc := range testCases { tc := tc t.Run(tc.Name, func(tt *testing.T) { @@ -481,13 +495,13 @@ func TestUser_SetPasswordReset(t *testing.T) { }{ { Name: "nil", - User: New().NewID().MustBuild(), + User: New().NewID().Email("aaa@bbb.com").MustBuild(), Pr: nil, Expected: nil, }, { Name: "nil", - User: New().NewID().MustBuild(), + User: New().NewID().Email("aaa@bbb.com").MustBuild(), Pr: &PasswordReset{ Token: "xyz", CreatedAt: time.Unix(1, 1), @@ -499,7 +513,7 @@ func TestUser_SetPasswordReset(t *testing.T) { }, { Name: "create new password request", - User: New().NewID().MustBuild(), + User: New().NewID().Email("aaa@bbb.com").MustBuild(), Pr: &PasswordReset{ Token: "xyz", CreatedAt: time.Unix(1, 1), @@ -511,7 +525,7 @@ func TestUser_SetPasswordReset(t *testing.T) { }, { Name: "create new password request over existing one", - User: New().NewID().PasswordReset(&PasswordReset{"xzy", time.Now()}).MustBuild(), + User: New().NewID().Email("aaa@bbb.com").PasswordReset(&PasswordReset{"xzy", time.Now()}).MustBuild(), Pr: &PasswordReset{ Token: "xyz", CreatedAt: time.Unix(1, 1), @@ -523,18 +537,20 @@ func TestUser_SetPasswordReset(t *testing.T) { }, { Name: "remove none existing password request", - User: New().NewID().MustBuild(), + User: New().NewID().Email("aaa@bbb.com").MustBuild(), Pr: nil, Expected: nil, }, { Name: "remove existing password request", - User: New().NewID().PasswordReset(&PasswordReset{"xzy", time.Now()}).MustBuild(), + User: New().NewID().Email("aaa@bbb.com").PasswordReset(&PasswordReset{"xzy", time.Now()}).MustBuild(), Pr: nil, Expected: nil, }, } + for _, tt := range tests { + tt := tt t.Run(tt.Name, func(t *testing.T) { tt.User.SetPasswordReset(tt.Pr) assert.Equal(t, tt.Expected, tt.User.PasswordReset()) @@ -566,7 +582,9 @@ func TestUser_Verification(t *testing.T) { want: v, }, } + for _, tt := range tests { + tt := tt t.Run(tt.name, func(t *testing.T) { u := &User{ verification: tt.verification, @@ -577,7 +595,6 @@ func TestUser_Verification(t *testing.T) { } func Test_ValidatePassword(t *testing.T) { - tests := []struct { name string pass string @@ -609,9 +626,11 @@ func Test_ValidatePassword(t *testing.T) { wantErr: true, }, } + for _, tc := range tests { + tc := tc t.Run(tc.name, func(tt *testing.T) { - out := validatePassword(tc.pass) + out := ValidatePasswordFormat(tc.pass) assert.Equal(tt, out != nil, tc.wantErr) }) } diff --git a/pkg/user/verification.go b/pkg/user/verification.go index 2b7215f0c..9ed458cb8 100644 --- a/pkg/user/verification.go +++ b/pkg/user/verification.go @@ -6,6 +6,35 @@ import ( uuid "github.com/google/uuid" ) +var Now = time.Now +var GenerateVerificationCode = generateCode + +func MockNow(t time.Time) func() { + Now = func() time.Time { return t } + return func() { Now = time.Now } +} + +func MockGenerateVerificationCode(code string) func() { + GenerateVerificationCode = func() string { return code } + return func() { GenerateVerificationCode = generateCode } +} + +func NewVerification() *Verification { + return &Verification{ + verified: false, + code: GenerateVerificationCode(), + expiration: Now().Add(time.Hour * 24), + } +} + +func VerificationFrom(c string, e time.Time, b bool) *Verification { + return &Verification{ + verified: b, + code: c, + expiration: e, + } +} + type Verification struct { verified bool code string @@ -51,21 +80,3 @@ func (v *Verification) SetVerified(b bool) { } v.verified = b } - -func NewVerification() *Verification { - v := &Verification{ - verified: false, - code: generateCode(), - expiration: time.Now().Add(time.Hour * 24), - } - return v -} - -func VerificationFrom(c string, e time.Time, b bool) *Verification { - v := &Verification{ - verified: b, - code: c, - expiration: e, - } - return v -} From 3e6dff17eb60d1476be6ea2f141a3d818951f07e Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 6 Apr 2022 14:45:39 +0900 Subject: [PATCH 186/253] fix: property.SchemaListMap.List test fails --- pkg/property/schema_list_test.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pkg/property/schema_list_test.go b/pkg/property/schema_list_test.go index 0a485e5a5..cb22dc673 100644 --- a/pkg/property/schema_list_test.go +++ b/pkg/property/schema_list_test.go @@ -40,6 +40,9 @@ func TestSchemaList_MapToIDs(t *testing.T) { func TestSchemaMap_List(t *testing.T) { p1 := &Schema{id: MustSchemaID("foo~1.0.0/a")} p2 := &Schema{id: MustSchemaID("bar~1.0.0/a")} - assert.Equal(t, SchemaList{p1, p2}, SchemaMap{p1.ID(): p1, p2.ID(): p2}.List()) + list := SchemaMap{p1.ID(): p1, p2.ID(): p2}.List() + assert.Len(t, list, 2) + assert.Contains(t, list, p1) + assert.Contains(t, list, p2) assert.Nil(t, SchemaMap(nil).List()) } From 10691ac385eae2039d259010bec27e645d4e9183 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 6 Apr 2022 15:51:20 +0900 Subject: [PATCH 187/253] fix: errors when auth srv domain is not specified --- internal/app/app.go | 2 +- internal/app/auth_server.go | 4 ++-- internal/app/config.go | 33 ++++++++++++++++++++++++--------- 3 files changed, 27 insertions(+), 12 deletions(-) diff --git a/internal/app/app.go b/internal/app/app.go index 4ab7737b8..ccd40e163 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -81,7 +81,7 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { SignupSecret: cfg.Config.SignupSecret, PublishedIndexHTML: publishedIndexHTML, PublishedIndexURL: cfg.Config.Published.IndexURL, - AuthSrvUIDomain: cfg.Config.AuthSrv.UIDomain, + AuthSrvUIDomain: cfg.Config.Host_Web, })) // auth srv diff --git a/internal/app/auth_server.go b/internal/app/auth_server.go index 0a2c10e81..4d4902132 100644 --- a/internal/app/auth_server.go +++ b/internal/app/auth_server.go @@ -27,7 +27,7 @@ const ( ) func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *ServerConfig) { - userUsecase := interactor.NewUser(cfg.Repos, cfg.Gateways, cfg.Config.SignupSecret, cfg.Config.AuthSrv.UIDomain) + userUsecase := interactor.NewUser(cfg.Repos, cfg.Gateways, cfg.Config.SignupSecret, cfg.Config.Host_Web) domain, err := url.Parse(cfg.Config.AuthSrv.Domain) if err != nil { @@ -59,7 +59,7 @@ func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *Server ctx, &interactor.StorageConfig{ Domain: domain.String(), - ClientDomain: cfg.Config.AuthSrv.UIDomain, + ClientDomain: cfg.Config.Host_Web, Debug: cfg.Debug, DN: dn, }, diff --git a/internal/app/config.go b/internal/app/config.go index 128e265f7..354f1cc47 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -18,6 +18,8 @@ const configPrefix = "reearth" type Config struct { Port string `default:"8080" envconfig:"PORT"` + Host string `default:"http://localhost:8080"` + Host_Web string Dev bool DB string `default:"mongodb://localhost"` Mailer string @@ -57,25 +59,32 @@ type Auth0Config struct { type AuthSrvConfig struct { Dev bool Disabled bool - Domain string `default:"http://localhost:8080"` - UIDomain string `default:"http://localhost:8080"` + Domain string Key string DN *AuthSrvDNConfig } -func (c AuthSrvConfig) AuthConfig(debug bool) *AuthConfig { +func (c AuthSrvConfig) AuthConfig(debug bool, host string) *AuthConfig { if c.Disabled { return nil } + + domain := c.Domain + if domain == "" { + domain = host + } + var aud []string - if debug { - aud = []string{"http://localhost:8080", c.Domain} + if debug && host != "" && c.Domain != "" { + aud = []string{host, c.Domain} } else { - aud = []string{c.Domain} + aud = []string{domain} } + clientID := auth.ClientID + return &AuthConfig{ - ISS: c.Domain, + ISS: domain, AUD: aud, ClientID: &clientID, } @@ -130,10 +139,16 @@ func ReadConfig(debug bool) (*Config, error) { var c Config err := envconfig.Process(configPrefix, &c) + // defailt values if debug { c.Dev = true } - if c.Dev || c.AuthSrv.Dev { + if c.Host_Web == "" { + c.Host_Web = c.Host + } + + // overwrite env vars + if !c.AuthSrv.Disabled && (c.Dev || c.AuthSrv.Dev || c.AuthSrv.Domain == "") { if _, ok := os.LookupEnv(op.OidcDevMode); !ok { _ = os.Setenv(op.OidcDevMode, "1") } @@ -170,7 +185,7 @@ func (c Config) Auths() (res []AuthConfig) { ClientID: c.Auth_ClientID, }) } - if ac := c.AuthSrv.AuthConfig(c.Dev); ac != nil { + if ac := c.AuthSrv.AuthConfig(c.Dev, c.Host); ac != nil { res = append(res, *ac) } return append(res, c.Auth...) From 64807342db26d2ada5a50dc2e870ca9a442e91c7 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 6 Apr 2022 20:05:35 +0900 Subject: [PATCH 188/253] fix: errors when auth srv domain is not specified --- internal/adapter/http/user.go | 2 +- internal/app/auth_server.go | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/internal/adapter/http/user.go b/internal/adapter/http/user.go index 40f5bfca0..834d26f4a 100644 --- a/internal/adapter/http/user.go +++ b/internal/adapter/http/user.go @@ -32,7 +32,7 @@ type SignupInput struct { Secret *string `json:"secret"` UserID *id.UserID `json:"userId"` TeamID *id.TeamID `json:"teamId"` - Name *string `json:"name"` + Name *string `json:"username"` Email *string `json:"email"` Password *string `json:"password"` Theme *user.Theme `json:"theme"` diff --git a/internal/app/auth_server.go b/internal/app/auth_server.go index 4d4902132..7d80907ca 100644 --- a/internal/app/auth_server.go +++ b/internal/app/auth_server.go @@ -29,9 +29,13 @@ const ( func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *ServerConfig) { userUsecase := interactor.NewUser(cfg.Repos, cfg.Gateways, cfg.Config.SignupSecret, cfg.Config.Host_Web) - domain, err := url.Parse(cfg.Config.AuthSrv.Domain) + d := cfg.Config.AuthSrv.Domain + if d == "" { + d = cfg.Config.Host + } + domain, err := url.Parse(d) if err != nil { - panic("not valid auth domain") + log.Panicf("auth: not valid auth domain: %s", d) } domain.Path = "/" From 83f9b135f57e74d55361b0a51bd6ce1ce29b16c6 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 6 Apr 2022 20:06:11 +0900 Subject: [PATCH 189/253] chore: update docker-compose config --- docker-compose.yml | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 74e85d775..239ab7510 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,19 +3,21 @@ services: reearth-backend: build: context: . - args: - TAG: debug + image: reearth/reearth-backend:nightly + environment: + REEARTH_DB: mongodb://reearth-mongo ports: - '8080:8080' - env_file: - - ./.env - environment: - REEARTH_ENV: docker - REEARTH_DB_URL: mongodb://reearth-mongo + # env_file: + # - ./.env + links: + - reearth-mongo depends_on: - reearth-mongo + volumes: + - ./data:/reearth/data reearth-mongo: - image: mongo:4.4.6-bionic + image: mongo:4.4-focal ports: - 27017:27017 volumes: From cb6ca40507df61526b555b204b0581ab6dac16a1 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 7 Apr 2022 17:17:14 +0900 Subject: [PATCH 190/253] fix: login redirect does not work --- internal/app/auth_server.go | 97 +++++++++++++++++++++++-------------- internal/app/config.go | 45 +++++++++++++++++ internal/app/config_test.go | 6 +++ 3 files changed, 111 insertions(+), 37 deletions(-) diff --git a/internal/app/auth_server.go b/internal/app/auth_server.go index 7d80907ca..5f96d9a51 100644 --- a/internal/app/auth_server.go +++ b/internal/app/auth_server.go @@ -4,11 +4,11 @@ import ( "context" "crypto/sha256" "encoding/json" + "errors" "net/http" "net/url" "os" "strconv" - "strings" "github.com/caos/oidc/pkg/op" "github.com/golang/gddo/httputil/header" @@ -17,6 +17,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/interactor" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/user" ) const ( @@ -29,13 +30,9 @@ const ( func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *ServerConfig) { userUsecase := interactor.NewUser(cfg.Repos, cfg.Gateways, cfg.Config.SignupSecret, cfg.Config.Host_Web) - d := cfg.Config.AuthSrv.Domain - if d == "" { - d = cfg.Config.Host - } - domain, err := url.Parse(d) - if err != nil { - log.Panicf("auth: not valid auth domain: %s", d) + domain := cfg.Config.AuthServeDomainURL() + if domain == nil || domain.String() == "" { + log.Panicf("auth: not valid auth domain: %s", domain) } domain.Path = "/" @@ -95,7 +92,7 @@ func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *Server } // Actual login endpoint - r.POST(loginEndpoint, login(ctx, cfg, storage, userUsecase)) + r.POST(loginEndpoint, login(ctx, domain, storage, userUsecase)) r.GET(logoutEndpoint, logout()) @@ -191,44 +188,68 @@ type loginForm struct { AuthRequestID string `json:"id" form:"id"` } -func login(ctx context.Context, cfg *ServerConfig, storage op.Storage, userUsecase interfaces.User) func(ctx echo.Context) error { +func login(ctx context.Context, url *url.URL, storage op.Storage, userUsecase interfaces.User) func(ctx echo.Context) error { return func(ec echo.Context) error { request := new(loginForm) err := ec.Bind(request) if err != nil { log.Errorln("auth: filed to parse login request") - return ec.Redirect(http.StatusFound, redirectURL(ec.Request().Referer(), !cfg.Debug, "", "Bad request!")) + return ec.Redirect( + http.StatusFound, + redirectURL(url, "/login", "", "Bad request!"), + ) } - authRequest, err := storage.AuthRequestByID(ctx, request.AuthRequestID) - if err != nil { + if _, err := storage.AuthRequestByID(ctx, request.AuthRequestID); err != nil { log.Errorf("auth: filed to parse login request: %s\n", err) - return ec.Redirect(http.StatusFound, redirectURL(ec.Request().Referer(), !cfg.Debug, "", "Bad request!")) + return ec.Redirect( + http.StatusFound, + redirectURL(url, "/login", "", "Bad request!"), + ) } if len(request.Email) == 0 || len(request.Password) == 0 { log.Errorln("auth: one of credentials are not provided") - return ec.Redirect(http.StatusFound, redirectURL(authRequest.GetRedirectURI(), !cfg.Debug, request.AuthRequestID, "Bad request!")) + return ec.Redirect( + http.StatusFound, + redirectURL(url, "/login", request.AuthRequestID, "Bad request!"), + ) } // check user credentials from db - user, err := userUsecase.GetUserByCredentials(ctx, interfaces.GetUserByCredentials{ + u, err := userUsecase.GetUserByCredentials(ctx, interfaces.GetUserByCredentials{ Email: request.Email, Password: request.Password, }) + var auth *user.Auth + if err == nil { + auth = u.GetAuthByProvider(authProvider) + if auth == nil { + err = errors.New("The account is not signed up with Re:Earth") + } + } if err != nil { log.Errorf("auth: wrong credentials: %s\n", err) - return ec.Redirect(http.StatusFound, redirectURL(authRequest.GetRedirectURI(), !cfg.Debug, request.AuthRequestID, "Login failed; Invalid user ID or password.")) + return ec.Redirect( + http.StatusFound, + redirectURL(url, "/login", request.AuthRequestID, "Login failed; Invalid user ID or password."), + ) } // Complete the auth request && set the subject - err = storage.(*interactor.AuthStorage).CompleteAuthRequest(ctx, request.AuthRequestID, user.GetAuthByProvider(authProvider).Sub) + err = storage.(*interactor.AuthStorage).CompleteAuthRequest(ctx, request.AuthRequestID, auth.Sub) if err != nil { log.Errorf("auth: failed to complete the auth request: %s\n", err) - return ec.Redirect(http.StatusFound, redirectURL(authRequest.GetRedirectURI(), !cfg.Debug, request.AuthRequestID, "Bad request!")) + return ec.Redirect( + http.StatusFound, + redirectURL(url, "/login", request.AuthRequestID, "Bad request!"), + ) } - return ec.Redirect(http.StatusFound, "/authorize/callback?id="+request.AuthRequestID) + return ec.Redirect( + http.StatusFound, + redirectURL(url, "/authorize/callback", request.AuthRequestID, ""), + ) } } @@ -239,25 +260,27 @@ func logout() func(ec echo.Context) error { } } -func redirectURL(domain string, secure bool, requestID string, error string) string { - domain = strings.TrimPrefix(domain, "http://") - domain = strings.TrimPrefix(domain, "https://") - - schema := "http" - if secure { - schema = "https" +func redirectURL(u *url.URL, p string, requestID, err string) string { + v := cloneURL(u) + if p == "" { + p = "/login" } - - u := url.URL{ - Scheme: schema, - Host: domain, - Path: "login", - } - + v.Path = p queryValues := u.Query() queryValues.Set("id", requestID) - queryValues.Set("error", error) - u.RawQuery = queryValues.Encode() + if err != "" { + queryValues.Set("error", err) + } + v.RawQuery = queryValues.Encode() + return v.String() +} - return u.String() +func cloneURL(u *url.URL) *url.URL { + return &url.URL{ + Scheme: u.Scheme, + Opaque: u.Opaque, + User: u.User, + Host: u.Host, + Path: u.Path, + } } diff --git a/internal/app/config.go b/internal/app/config.go index 354f1cc47..56bb8a8a4 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -143,6 +143,17 @@ func ReadConfig(debug bool) (*Config, error) { if debug { c.Dev = true } + c.Host = addHTTPScheme(c.Host) + if c.Host_Web == "" { + c.Host_Web = c.Host + } else { + c.Host_Web = addHTTPScheme(c.Host_Web) + } + if c.AuthSrv.Domain == "" { + c.AuthSrv.Domain = c.Host + } else { + c.AuthSrv.Domain = addHTTPScheme(c.AuthSrv.Domain) + } if c.Host_Web == "" { c.Host_Web = c.Host } @@ -242,3 +253,37 @@ func (ipd *AuthConfigs) Decode(value string) error { *ipd = providers return nil } + +func (c Config) HostURL() *url.URL { + u, err := url.Parse(c.Host) + if err != nil { + u = nil + } + return u +} + +func (c Config) HostWebURL() *url.URL { + u, err := url.Parse(c.Host_Web) + if err != nil { + u = nil + } + return u +} + +func (c Config) AuthServeDomainURL() *url.URL { + u, err := url.Parse(c.AuthSrv.Domain) + if err != nil { + u = nil + } + return u +} + +func addHTTPScheme(host string) string { + if host == "" { + return "" + } + if !strings.HasPrefix(host, "https://") && !strings.HasPrefix(host, "http://") { + host = "http://" + host + } + return host +} diff --git a/internal/app/config_test.go b/internal/app/config_test.go index d24059025..40b5a2fab 100644 --- a/internal/app/config_test.go +++ b/internal/app/config_test.go @@ -32,3 +32,9 @@ func TestReadConfig(t *testing.T) { assert.Equal(t, "hoge", cfg.Auth_ISS) assert.Equal(t, "foo", cfg.Auth_AUD) } + +func Test_AddHTTPScheme(t *testing.T) { + assert.Equal(t, "http://a", addHTTPScheme("a")) + assert.Equal(t, "http://a", addHTTPScheme("http://a")) + assert.Equal(t, "https://a", addHTTPScheme("https://a")) +} From adeda41232d8aab7bc16b70d28597786db6e9789 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Thu, 7 Apr 2022 11:49:13 +0300 Subject: [PATCH 191/253] chore: add log for GraphQL Playground endpoint (#133) * add log * change log message Co-authored-by: rot1024 --- internal/app/app.go | 1 + 1 file changed, 1 insertion(+) diff --git a/internal/app/app.go b/internal/app/app.go index ccd40e163..8b007d4f4 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -67,6 +67,7 @@ func initEcho(ctx context.Context, cfg *ServerConfig) *echo.Echo { e.GET("/graphql", echo.WrapHandler( playground.Handler("reearth-backend", "/api/graphql"), )) + log.Infof("gql: GraphQL Playground is available") } // init usecases From 0c0e28c83eafddaea601a6d1ecd1eb60fa27f05a Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 7 Apr 2022 17:52:10 +0900 Subject: [PATCH 192/253] fix: enable auth srv dev mode when no domain is specified --- .env.example | 6 ++++-- internal/app/config.go | 14 +++++++------- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/.env.example b/.env.example index 98c11c801..7e4aba5a5 100644 --- a/.env.example +++ b/.env.example @@ -1,6 +1,8 @@ # General PORT=8080 REEARTH_DB=mongodb://localhost +REEARTH_HOST=https://localhost:8080 +REEARTH_HOST_WEB=https://localhost:3000 REEARTH_DEV=false # GCP @@ -35,13 +37,13 @@ REEARTH_AUTHSRV_KEY=abcdefghijklmnopqrstuvwxyz # Available mailers: [log, smtp, sendgrid] REEARTH_MAILER=log -#SendGrid config +# SendGrid config #REEARTH_MAILER=sendgrid #REEARTH_SENDGRID_EMAIL=noreplay@test.com #REEARTH_SENDGRID_NAME= #REEARTH_SENDGRID_API= -#SMTP config +# SMTP config #REEARTH_MAILER=smtp #REEARTH_SMTP_EMAIL=noreplay@test.com #REEARTH_SMTP_HOST=smtp.sendgrid.net diff --git a/internal/app/config.go b/internal/app/config.go index 56bb8a8a4..3b2061cc8 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -139,6 +139,13 @@ func ReadConfig(debug bool) (*Config, error) { var c Config err := envconfig.Process(configPrefix, &c) + // overwrite env vars + if !c.AuthSrv.Disabled && (c.Dev || c.AuthSrv.Dev || c.AuthSrv.Domain == "") { + if _, ok := os.LookupEnv(op.OidcDevMode); !ok { + _ = os.Setenv(op.OidcDevMode, "1") + } + } + // defailt values if debug { c.Dev = true @@ -158,13 +165,6 @@ func ReadConfig(debug bool) (*Config, error) { c.Host_Web = c.Host } - // overwrite env vars - if !c.AuthSrv.Disabled && (c.Dev || c.AuthSrv.Dev || c.AuthSrv.Domain == "") { - if _, ok := os.LookupEnv(op.OidcDevMode); !ok { - _ = os.Setenv(op.OidcDevMode, "1") - } - } - return &c, err } From e96f78ae3a33efdb36b067812c9d97fc7a67b8b8 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 7 Apr 2022 19:23:05 +0900 Subject: [PATCH 193/253] fix: add a trailing slash to jwt audiences --- internal/app/jwt.go | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/internal/app/jwt.go b/internal/app/jwt.go index 94476673f..84b72c739 100644 --- a/internal/app/jwt.go +++ b/internal/app/jwt.go @@ -59,11 +59,20 @@ func NewMultiValidator(providers []AuthConfig) (MultiValidator, error) { } algorithm := validator.SignatureAlgorithm(alg) + // add a trailing slash (auth0-spa-js adds a trailing slash to audiences) + aud := append([]string{}, p.AUD...) + for i, a := range aud { + if !strings.HasSuffix(a, "/") { + a += "/" + } + aud[i] = a + } + v, err := validator.New( provider.KeyFunc, algorithm, issuerURL.String(), - p.AUD, + aud, validator.WithCustomClaims(func() validator.CustomClaims { return &customClaims{} }), From 0ce79ff58c3a0a52ad7637437f24d5877d4dd0dc Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 8 Apr 2022 15:25:54 +0900 Subject: [PATCH 194/253] fix: allow separate auth server ui domain --- internal/app/auth_server.go | 21 +++++++++++---------- internal/app/config.go | 18 ++++++++++++++++-- 2 files changed, 27 insertions(+), 12 deletions(-) diff --git a/internal/app/auth_server.go b/internal/app/auth_server.go index 5f96d9a51..9430fa3cd 100644 --- a/internal/app/auth_server.go +++ b/internal/app/auth_server.go @@ -36,6 +36,8 @@ func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *Server } domain.Path = "/" + uidomain := cfg.Config.AuthServeUIDomainURL() + config := &op.Config{ Issuer: domain.String(), CryptoKey: sha256.Sum256([]byte(cfg.Config.AuthSrv.Key)), @@ -92,7 +94,7 @@ func authEndPoints(ctx context.Context, e *echo.Echo, r *echo.Group, cfg *Server } // Actual login endpoint - r.POST(loginEndpoint, login(ctx, domain, storage, userUsecase)) + r.POST(loginEndpoint, login(ctx, domain, uidomain, storage, userUsecase)) r.GET(logoutEndpoint, logout()) @@ -188,7 +190,7 @@ type loginForm struct { AuthRequestID string `json:"id" form:"id"` } -func login(ctx context.Context, url *url.URL, storage op.Storage, userUsecase interfaces.User) func(ctx echo.Context) error { +func login(ctx context.Context, url, uiurl *url.URL, storage op.Storage, userUsecase interfaces.User) func(ctx echo.Context) error { return func(ec echo.Context) error { request := new(loginForm) err := ec.Bind(request) @@ -196,7 +198,7 @@ func login(ctx context.Context, url *url.URL, storage op.Storage, userUsecase in log.Errorln("auth: filed to parse login request") return ec.Redirect( http.StatusFound, - redirectURL(url, "/login", "", "Bad request!"), + redirectURL(uiurl, "/login", "", "Bad request!"), ) } @@ -204,7 +206,7 @@ func login(ctx context.Context, url *url.URL, storage op.Storage, userUsecase in log.Errorf("auth: filed to parse login request: %s\n", err) return ec.Redirect( http.StatusFound, - redirectURL(url, "/login", "", "Bad request!"), + redirectURL(uiurl, "/login", "", "Bad request!"), ) } @@ -212,7 +214,7 @@ func login(ctx context.Context, url *url.URL, storage op.Storage, userUsecase in log.Errorln("auth: one of credentials are not provided") return ec.Redirect( http.StatusFound, - redirectURL(url, "/login", request.AuthRequestID, "Bad request!"), + redirectURL(uiurl, "/login", request.AuthRequestID, "Bad request!"), ) } @@ -232,7 +234,7 @@ func login(ctx context.Context, url *url.URL, storage op.Storage, userUsecase in log.Errorf("auth: wrong credentials: %s\n", err) return ec.Redirect( http.StatusFound, - redirectURL(url, "/login", request.AuthRequestID, "Login failed; Invalid user ID or password."), + redirectURL(uiurl, "/login", request.AuthRequestID, "Login failed; Invalid user ID or password."), ) } @@ -242,7 +244,7 @@ func login(ctx context.Context, url *url.URL, storage op.Storage, userUsecase in log.Errorf("auth: failed to complete the auth request: %s\n", err) return ec.Redirect( http.StatusFound, - redirectURL(url, "/login", request.AuthRequestID, "Bad request!"), + redirectURL(uiurl, "/login", request.AuthRequestID, "Bad request!"), ) } @@ -262,10 +264,9 @@ func logout() func(ec echo.Context) error { func redirectURL(u *url.URL, p string, requestID, err string) string { v := cloneURL(u) - if p == "" { - p = "/login" + if p != "" { + v.Path = p } - v.Path = p queryValues := u.Query() queryValues.Set("id", requestID) if err != "" { diff --git a/internal/app/config.go b/internal/app/config.go index 3b2061cc8..62d6daace 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -60,6 +60,7 @@ type AuthSrvConfig struct { Dev bool Disabled bool Domain string + UIDomain string Key string DN *AuthSrvDNConfig } @@ -75,7 +76,7 @@ func (c AuthSrvConfig) AuthConfig(debug bool, host string) *AuthConfig { } var aud []string - if debug && host != "" && c.Domain != "" { + if debug && host != "" && c.Domain != "" && c.Domain != host { aud = []string{host, c.Domain} } else { aud = []string{domain} @@ -146,7 +147,7 @@ func ReadConfig(debug bool) (*Config, error) { } } - // defailt values + // default values if debug { c.Dev = true } @@ -164,6 +165,11 @@ func ReadConfig(debug bool) (*Config, error) { if c.Host_Web == "" { c.Host_Web = c.Host } + if c.AuthSrv.UIDomain == "" { + c.AuthSrv.UIDomain = c.Host_Web + } else { + c.AuthSrv.UIDomain = addHTTPScheme(c.AuthSrv.UIDomain) + } return &c, err } @@ -278,6 +284,14 @@ func (c Config) AuthServeDomainURL() *url.URL { return u } +func (c Config) AuthServeUIDomainURL() *url.URL { + u, err := url.Parse(c.AuthSrv.UIDomain) + if err != nil { + u = nil + } + return u +} + func addHTTPScheme(host string) string { if host == "" { return "" From db6ff776aba4219ff560239a59b3d53129961fde Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 8 Apr 2022 08:46:24 +0000 Subject: [PATCH 195/253] v0.6.0 --- CHANGELOG.md | 51 ++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2367b7257..c22e0f84a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,56 @@ # Changelog All notable changes to this project will be documented in this file. -## 0.5.0 - 2022-02-22 +## 0.6.0 - 2022-04-08 + +### ๐Ÿš€ Features + +- Authentication system ([#108](https://github.com/reearth/reearth-backend/pull/108)) [`b89c32`](https://github.com/reearth/reearth-backend/commit/b89c32) +- Default mailer that outputs mails into stdout [`aab26c`](https://github.com/reearth/reearth-backend/commit/aab26c) +- Assets filtering & pagination ([#81](https://github.com/reearth/reearth-backend/pull/81)) [`739943`](https://github.com/reearth/reearth-backend/commit/739943) +- Support sign up with information provided by OIDC providers ([#130](https://github.com/reearth/reearth-backend/pull/130)) [`fef60e`](https://github.com/reearth/reearth-backend/commit/fef60e) + +### ๐Ÿ”ง Bug Fixes + +- Load auth client domain from config ([#124](https://github.com/reearth/reearth-backend/pull/124)) [`9bde8a`](https://github.com/reearth/reearth-backend/commit/9bde8a) +- Signup fails when password is not set [`27c2f0`](https://github.com/reearth/reearth-backend/commit/27c2f0) +- Logger panics [`d1e3a8`](https://github.com/reearth/reearth-backend/commit/d1e3a8) +- Set auth server dev mode automatically [`83a66a`](https://github.com/reearth/reearth-backend/commit/83a66a) +- Auth server bugs and auth client bugs ([#125](https://github.com/reearth/reearth-backend/pull/125)) [`ce2309`](https://github.com/reearth/reearth-backend/commit/ce2309) +- Auth0 setting is not used by JWT verification middleware [`232e75`](https://github.com/reearth/reearth-backend/commit/232e75) +- Invalid mongo queries of pagination [`7caf68`](https://github.com/reearth/reearth-backend/commit/7caf68) +- Auth config not loaded expectedly [`570fe7`](https://github.com/reearth/reearth-backend/commit/570fe7) +- Users cannot creates a new team and scene [`5df25f`](https://github.com/reearth/reearth-backend/commit/5df25f) +- Auth server certificate is not saved as pem format [`982a71`](https://github.com/reearth/reearth-backend/commit/982a71) +- Repo filters are not merged expectedly [`f4cc3f`](https://github.com/reearth/reearth-backend/commit/f4cc3f) +- Auth is no longer required for GraphQL endpoint [`58a6d1`](https://github.com/reearth/reearth-backend/commit/58a6d1) +- Rename auth srv default client ID ([#128](https://github.com/reearth/reearth-backend/pull/128)) [`89adc3`](https://github.com/reearth/reearth-backend/commit/89adc3) +- Signup API is disabled when auth server is disabled, users and auth requests in mongo cannot be deleted ([#132](https://github.com/reearth/reearth-backend/pull/132)) [`47be6a`](https://github.com/reearth/reearth-backend/commit/47be6a) +- Auth to work with zero config ([#131](https://github.com/reearth/reearth-backend/pull/131)) [`3cbb45`](https://github.com/reearth/reearth-backend/commit/3cbb45) +- Property.SchemaListMap.List test fails [`3e6dff`](https://github.com/reearth/reearth-backend/commit/3e6dff) +- Errors when auth srv domain is not specified [`10691a`](https://github.com/reearth/reearth-backend/commit/10691a) +- Errors when auth srv domain is not specified [`648073`](https://github.com/reearth/reearth-backend/commit/648073) +- Login redirect does not work [`cb6ca4`](https://github.com/reearth/reearth-backend/commit/cb6ca4) +- Enable auth srv dev mode when no domain is specified [`0c0e28`](https://github.com/reearth/reearth-backend/commit/0c0e28) +- Add a trailing slash to jwt audiences [`e96f78`](https://github.com/reearth/reearth-backend/commit/e96f78) +- Allow separate auth server ui domain [`0ce79f`](https://github.com/reearth/reearth-backend/commit/0ce79f) + +### โšก๏ธ Performance + +- Reduce database queries to obtain scene IDs ([#119](https://github.com/reearth/reearth-backend/pull/119)) [`784332`](https://github.com/reearth/reearth-backend/commit/784332) + +### โœจ Refactor + +- Remove filter args from repos to prevent implementation errors in the use case layer ([#122](https://github.com/reearth/reearth-backend/pull/122)) [`82cf28`](https://github.com/reearth/reearth-backend/commit/82cf28) +- Http api to export layers [`3f2582`](https://github.com/reearth/reearth-backend/commit/3f2582) + +### Miscellaneous Tasks + +- Update dependencies ([#117](https://github.com/reearth/reearth-backend/pull/117)) [`d1a38e`](https://github.com/reearth/reearth-backend/commit/d1a38e) +- Update docker-compose config [`83f9b1`](https://github.com/reearth/reearth-backend/commit/83f9b1) +- Add log for GraphQL Playground endpoint ([#133](https://github.com/reearth/reearth-backend/pull/133)) [`adeda4`](https://github.com/reearth/reearth-backend/commit/adeda4) + +## 0.5.0 - 2022-02-24 ### ๐Ÿš€ Features From 1b9b6bea7dc3850f9047b27db18fb281ff5c09a9 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Mon, 11 Apr 2022 08:43:27 +0300 Subject: [PATCH 196/253] chore: update dependencies (#134) Co-authored-by: rot1024 --- .github/workflows/build.yml | 2 +- .github/workflows/ci.yml | 7 +-- Dockerfile | 2 +- go.mod | 52 ++++++++--------- go.sum | 113 ++++++++++++++++++++---------------- 5 files changed, 94 insertions(+), 82 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 015969a79..6a95b9b1b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -49,7 +49,7 @@ jobs: - name: Set up Go uses: actions/setup-go@v3 with: - go-version: 1.17 + go-version: 1.18 - name: Run GoReleaser for nightly if: "!needs.info.outputs.new_tag" uses: goreleaser/goreleaser-action@v2 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e2290d812..4a14e47c8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: - name: set up uses: actions/setup-go@v3 with: - go-version: 1.17 + go-version: 1.18 id: go - name: checkout uses: actions/checkout@v3 @@ -26,11 +26,10 @@ jobs: - name: golangci-lint uses: golangci/golangci-lint-action@v3 with: - version: v1.43 + version: v1.45 args: --timeout=10m - skip-go-installation: true - name: test - run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic -timeout 15m + run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic -timeout 10m - name: Send coverage report uses: codecov/codecov-action@v2 with: diff --git a/Dockerfile b/Dockerfile index f2edbd9cb..bb2a5155d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.17-alpine AS build +FROM golang:1.18-alpine AS build ARG TAG=release ARG REV ARG VERSION diff --git a/go.mod b/go.mod index deed69fdd..91c57ea5b 100644 --- a/go.mod +++ b/go.mod @@ -3,11 +3,11 @@ module github.com/reearth/reearth-backend require ( cloud.google.com/go/profiler v0.2.0 cloud.google.com/go/storage v1.21.0 - github.com/99designs/gqlgen v0.17.1 + github.com/99designs/gqlgen v0.17.2 github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0 - github.com/auth0/go-jwt-middleware/v2 v2.0.0 + github.com/auth0/go-jwt-middleware/v2 v2.0.1 github.com/blang/semver v3.5.1+incompatible - github.com/caos/oidc v1.0.0 + github.com/caos/oidc v1.2.0 github.com/goccy/go-yaml v1.9.5 github.com/golang/gddo v0.0.0-20210115222349-20d68f94ee1f github.com/google/uuid v1.3.0 @@ -19,32 +19,32 @@ require ( github.com/jonas-p/go-shp v0.1.1 github.com/kelseyhightower/envconfig v1.4.0 github.com/kennygrant/sanitize v1.2.4 - github.com/labstack/echo/v4 v4.7.0 + github.com/labstack/echo/v4 v4.7.2 github.com/labstack/gommon v0.3.1 github.com/mitchellh/mapstructure v1.4.3 github.com/oklog/ulid v1.3.1 github.com/paulmach/go.geojson v1.4.0 github.com/pkg/errors v0.9.1 - github.com/ravilushqa/otelgqlgen v0.5.1 + github.com/ravilushqa/otelgqlgen v0.6.0 github.com/sendgrid/sendgrid-go v3.11.1+incompatible github.com/sirupsen/logrus v1.8.1 - github.com/spf13/afero v1.8.1 + github.com/spf13/afero v1.8.2 github.com/square/mongo-lock v0.0.0-20201208161834-4db518ed7fb2 - github.com/stretchr/testify v1.7.0 + github.com/stretchr/testify v1.7.1 github.com/twpayne/go-kml v1.5.2 github.com/uber/jaeger-client-go v2.30.0+incompatible github.com/uber/jaeger-lib v2.4.1+incompatible github.com/vektah/dataloaden v0.3.0 github.com/vektah/gqlparser/v2 v2.4.1 - go.mongodb.org/mongo-driver v1.8.4 - go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.29.0 - go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.29.0 - go.opentelemetry.io/otel v1.4.1 - go.opentelemetry.io/otel/sdk v1.4.1 - golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce + go.mongodb.org/mongo-driver v1.9.0 + go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.31.0 + go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.31.0 + go.opentelemetry.io/otel v1.6.1 + go.opentelemetry.io/otel/sdk v1.6.1 + golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29 golang.org/x/text v0.3.7 - golang.org/x/tools v0.1.9 - google.golang.org/api v0.70.0 + golang.org/x/tools v0.1.10 + google.golang.org/api v0.74.0 gopkg.in/go-playground/colors.v1 v1.2.0 gopkg.in/h2non/gock.v1 v1.1.2 gopkg.in/square/go-jose.v2 v2.6.0 @@ -52,7 +52,7 @@ require ( require ( cloud.google.com/go v0.100.2 // indirect - cloud.google.com/go/compute v1.3.0 // indirect + cloud.google.com/go/compute v1.5.0 // indirect cloud.google.com/go/iam v0.1.1 // indirect cloud.google.com/go/trace v1.0.0 // indirect github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect @@ -66,7 +66,7 @@ require ( github.com/fatih/color v1.12.0 // indirect github.com/felixge/httpsnoop v1.0.2 // indirect github.com/gedex/inflector v0.0.0-20170307190818-16278e9db813 // indirect - github.com/go-logr/logr v1.2.2 // indirect + github.com/go-logr/logr v1.2.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-stack/stack v1.8.0 // indirect github.com/golang-jwt/jwt v3.2.2+incompatible // indirect @@ -75,7 +75,7 @@ require ( github.com/golang/snappy v0.0.3 // indirect github.com/google/go-cmp v0.5.7 // indirect github.com/google/pprof v0.0.0-20220113144219-d25a53d42d00 // indirect - github.com/googleapis/gax-go/v2 v2.1.1 // indirect + github.com/googleapis/gax-go/v2 v2.2.0 // indirect github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 // indirect github.com/gorilla/handlers v1.5.1 // indirect github.com/gorilla/schema v1.2.0 // indirect @@ -104,22 +104,22 @@ require ( github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect go.opencensus.io v0.23.0 // indirect go.opentelemetry.io/contrib v1.4.0 // indirect - go.opentelemetry.io/otel/trace v1.4.1 // indirect + go.opentelemetry.io/otel/trace v1.6.1 // indirect go.uber.org/atomic v1.7.0 // indirect - golang.org/x/mod v0.5.1 // indirect - golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd // indirect - golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 // indirect + golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect + golang.org/x/net v0.0.0-20220325170049-de3da57026de // indirect + golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect - golang.org/x/sys v0.0.0-20220209214540-3681064d5158 // indirect + golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886 // indirect golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c // indirect - google.golang.org/grpc v1.44.0 // indirect + google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb // indirect + google.golang.org/grpc v1.45.0 // indirect google.golang.org/protobuf v1.27.1 // indirect gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) -go 1.17 +go 1.18 diff --git a/go.sum b/go.sum index d66fe4c6a..ea8ee7338 100644 --- a/go.sum +++ b/go.sum @@ -40,8 +40,9 @@ cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4g cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= cloud.google.com/go/compute v1.2.0/go.mod h1:xlogom/6gr8RJGBe7nT2eGsQYAFUbbv8dbC29qE3Xmw= -cloud.google.com/go/compute v1.3.0 h1:mPL/MzDDYHsh5tHRS9mhmhWlcgClCrCa6ApQCU6wnHI= cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= +cloud.google.com/go/compute v1.5.0 h1:b1zWmYuuHz7gO9kDcM/EpHGr06UgsYNRpNJzI2kFiLM= +cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/iam v0.1.1 h1:4CapQyNFjiksks1/x7jsvsygFPhihslYk5GptIrlX68= @@ -64,9 +65,8 @@ cloud.google.com/go/storage v1.21.0/go.mod h1:XmRlxkgPjlBONznT2dDUU/5XlpU2OjMnKu cloud.google.com/go/trace v1.0.0 h1:laKx2y7IWMjguCe5zZx6n7qLtREk4kyE69SXVC0VSN8= cloud.google.com/go/trace v1.0.0/go.mod h1:4iErSByzxkyHWzzlAj63/Gmjz0NH1ASqhJguHpGcr6A= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/99designs/gqlgen v0.16.0/go.mod h1:nbeSjFkqphIqpZsYe1ULVz0yfH8hjpJdJIQoX/e0G2I= -github.com/99designs/gqlgen v0.17.1 h1:i2qQMPKHQjHgBWYIpO4TsaQpPqMHCPK1+h95ipvH8VU= -github.com/99designs/gqlgen v0.17.1/go.mod h1:K5fzLKwtph+FFgh9j7nFbRUdBKvTcGnsta51fsMTn3o= +github.com/99designs/gqlgen v0.17.2 h1:yczvlwMsfcVu/JtejqfrLwXuSP0yZFhmcss3caEvHw8= +github.com/99designs/gqlgen v0.17.2/go.mod h1:K5fzLKwtph+FFgh9j7nFbRUdBKvTcGnsta51fsMTn3o= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0 h1:JLLDOHEcoREA54hzOnjr8KQcZCvX0E8KhosjE0F1jaQ= @@ -90,8 +90,8 @@ github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= -github.com/auth0/go-jwt-middleware/v2 v2.0.0 h1:jft2yYteA6wpwTj1uxSLwE0TlHCjodMQvX7+eyqJiOQ= -github.com/auth0/go-jwt-middleware/v2 v2.0.0/go.mod h1:/y7nPmfWDnJhCbFq22haCAU7vufwsOUzTthLVleE6/8= +github.com/auth0/go-jwt-middleware/v2 v2.0.1 h1:zAgDKL7nsfVBFl31GGxsSXkhuRzYe1fVtJcO3aMSrFU= +github.com/auth0/go-jwt-middleware/v2 v2.0.1/go.mod h1:kDt7JgUuDEp1VutfUmO4ZxBLL51vlNu/56oDfXc5E0Y= github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= github.com/aws/aws-sdk-go v1.35.5/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+muhnW+k= github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= @@ -99,8 +99,8 @@ github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnweb github.com/bradfitz/gomemcache v0.0.0-20170208213004-1952afaa557d/go.mod h1:PmM6Mmwb0LSuEubjR8N7PtNe1KxZLtOUHtbeikc5h60= github.com/caos/logging v0.0.2 h1:ebg5C/HN0ludYR+WkvnFjwSExF4wvyiWPyWGcKMYsoo= github.com/caos/logging v0.0.2/go.mod h1:9LKiDE2ChuGv6CHYif/kiugrfEXu9AwDiFWSreX7Wp0= -github.com/caos/oidc v1.0.0 h1:3sHkYf8zsuARR89qO9CyvfYhHGdliWPcou4glzGMXmQ= -github.com/caos/oidc v1.0.0/go.mod h1:4l0PPwdc6BbrdCFhNrRTUddsG292uHGa7gE2DSEIqoU= +github.com/caos/oidc v1.2.0 h1:dTy5bcT2WQbwPgytEZiG8SV1bCgHUXyDdaPDCNtRdEU= +github.com/caos/oidc v1.2.0/go.mod h1:4l0PPwdc6BbrdCFhNrRTUddsG292uHGa7gE2DSEIqoU= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -151,8 +151,9 @@ github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2 github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.1/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.2 h1:ahHml/yUpnlb96Rp8HCvtYVPY8ZYpxq3g7UYchIYwbs= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= +github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/stdr v1.2.0/go.mod h1:YkVgnZu1ZjjL7xTxrfm/LLZBfkhTqSR1ydtm6jTKKwI= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= @@ -286,13 +287,13 @@ github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gax-go v2.0.0+incompatible h1:j0GKcs05QVmm7yesiZq2+9cxHkNK9YM6zKx4D2qucQU= github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= -github.com/googleapis/gax-go/v2 v2.1.1 h1:dp3bWCh+PPO1zjRRiCSczJav13sBvG4UhNyVTa1KqdU= github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleapis/gax-go/v2 v2.2.0 h1:s7jOdKSaksJVOxE0Y/S32otcfiP+UQ0cL8/GTKaONwE= +github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c29bdd3 h1:eHv/jVY/JNop1xg2J9cBb4EzyMpWZoNCP1BslSAIkOI= github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c29bdd3/go.mod h1:h/KNeRx7oYU4SpA4SoY7W2/NxDKEEVuwA6j9A27L4OI= @@ -363,9 +364,8 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/labstack/echo/v4 v4.6.3/go.mod h1:Hk5OiHj0kDqmFq7aHe7eDqI7CUhuCrfpupQtLGGLm7A= -github.com/labstack/echo/v4 v4.7.0 h1:8wHgZhoE9OT1NSLw6sfrX7ZGpWMtO5Zlfr68+BIo180= -github.com/labstack/echo/v4 v4.7.0/go.mod h1:xkCDAdFCIf8jsFQ5NnbK7oqaF/yU1A1X20Ltm0OvSks= +github.com/labstack/echo/v4 v4.7.2 h1:Kv2/p8OaQ+M6Ex4eGimg9b9e6icoxA42JSlOR3msKtI= +github.com/labstack/echo/v4 v4.7.2/go.mod h1:xkCDAdFCIf8jsFQ5NnbK7oqaF/yU1A1X20Ltm0OvSks= github.com/labstack/gommon v0.3.1 h1:OomWaJXm7xR6L1HmEtGyQf26TEn7V6X88mktX9kee9o= github.com/labstack/gommon v0.3.1/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM= github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= @@ -413,8 +413,8 @@ github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qR github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/ravilushqa/otelgqlgen v0.5.1 h1:KW9ZpELSnuQlQM2OXgxSeEMWhwt7sPdEL/B2TpcbPM4= -github.com/ravilushqa/otelgqlgen v0.5.1/go.mod h1:ZJey0LrlbpEUXzFhZ5HILWEBg6wUKJvX2Vx3NXHGFkk= +github.com/ravilushqa/otelgqlgen v0.6.0 h1:SbBmlE1/6Z4NDjCn96Ksbb41BIEhIf3dBT1WlGzNOr4= +github.com/ravilushqa/otelgqlgen v0.6.0/go.mod h1:QP2vU3CSdJ2kYJkZl1zvTZWGgd2qEUxSWp1agqZjZ9A= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= @@ -441,8 +441,8 @@ github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIK github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v0.0.0-20170901052352-ee1bd8ee15a1/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/afero v1.8.1 h1:izYHOT71f9iZ7iq37Uqjael60/vYC6vMtzedudZ0zEk= -github.com/spf13/afero v1.8.1/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= +github.com/spf13/afero v1.8.2 h1:xehSyVa0YnHWsJ49JFljMpg1HX19V6NDZ1fkm1Xznbo= +github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= github.com/spf13/cast v1.1.0/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/jwalterweatherman v0.0.0-20170901151539-12bd96e66386/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= @@ -461,8 +461,9 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/pretty v1.0.1 h1:WE4RBSZ1x6McVVC8S/Md+Qse8YUv6HRObAx6ke00NY8= github.com/tidwall/pretty v1.0.1/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= @@ -482,8 +483,6 @@ github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52 github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= github.com/vektah/dataloaden v0.3.0 h1:ZfVN2QD6swgvp+tDqdH/OIT/wu3Dhu0cus0k5gIZS84= github.com/vektah/dataloaden v0.3.0/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= -github.com/vektah/gqlparser/v2 v2.2.0/go.mod h1:i3mQIGIrbK2PD1RrCeMTlVbkF2FJ6WkU1KJlJlC+3F4= -github.com/vektah/gqlparser/v2 v2.3.1/go.mod h1:i3mQIGIrbK2PD1RrCeMTlVbkF2FJ6WkU1KJlJlC+3F4= github.com/vektah/gqlparser/v2 v2.4.0/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= github.com/vektah/gqlparser/v2 v2.4.1 h1:QOyEn8DAPMUMARGMeshKDkDgNmVoEaEGiDB0uWxcSlQ= github.com/vektah/gqlparser/v2 v2.4.1/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= @@ -505,9 +504,9 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.mongodb.org/mongo-driver v1.4.2/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= -go.mongodb.org/mongo-driver v1.8.3/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= -go.mongodb.org/mongo-driver v1.8.4 h1:NruvZPPL0PBcRJKmbswoWSrmHeUvzdxA3GCPfD/NEOA= go.mongodb.org/mongo-driver v1.8.4/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= +go.mongodb.org/mongo-driver v1.9.0 h1:f3aLGJvQmBl8d9S40IL+jEyBC6hfLPbJjv9t5hEM9ck= +go.mongodb.org/mongo-driver v1.9.0/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -518,33 +517,34 @@ go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/contrib v1.4.0 h1:o+obgKZArn1GbM8zPCLYU9LZCI7lL6GcTZArn0qz1yw= go.opentelemetry.io/contrib v1.4.0/go.mod h1:FlyPNX9s4U6MCsWEc5YAK4KzKNHFDsjrDUZijJiXvy8= -go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.29.0 h1:BLXo2v0bW3iq8NhgSf/1X6Cu7UcfkNk3yyuxNcZB1wk= -go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.29.0/go.mod h1:+0RWgKCuTYtJaZo9Io/D2PAvkMZsRkmYaNgHhwzrCDM= -go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.29.0 h1:PG5cMt7dHmNmuhQczPRF4nOfAUkZe0tezDZEtckz28k= -go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.29.0/go.mod h1:V35q3VIMKbgD3FkIiAISJJpSUQxpn2zKQ0pQc7bx9Eg= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.31.0 h1:Qj331G/6VDsXEYMgeRRV7d+zGa6KKDkJtP6q8DdGeZg= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.31.0/go.mod h1:3k7OUuzfLbCMIVZybXejR2TzJcog/5/HSnUj3m0ISso= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.31.0 h1:401vSW2p/bBvNuAyy8AIT7PoLHQCtuuGVK+ttC5FmwQ= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.31.0/go.mod h1:OfY26sPTH7bTcD8Fxwj/nlC7wmCCP7SR996JVh93sys= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.26.0 h1:sdwza9BScvbOFaZLhvKDQc54vQ8CWM8jD9BO2t+rP4E= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.26.0/go.mod h1:4vatbW3QwS11DK0H0SB7FR31/VbthXcYorswdkVXdyg= -go.opentelemetry.io/contrib/propagators/b3 v1.4.0 h1:wDb2ct7xMzossYpx44w81skxkEyeT2IRnBgYKqyEork= -go.opentelemetry.io/contrib/propagators/b3 v1.4.0/go.mod h1:K399DN23drp0RQGXCbSPOt9075HopQigMgUL99oR8hc= +go.opentelemetry.io/contrib/propagators/b3 v1.6.0 h1:rHeNbko1wNe1Sazpw5IJD83x43lfzMnDb8vckdKxRu8= +go.opentelemetry.io/contrib/propagators/b3 v1.6.0/go.mod h1:6kJAkL2/nNqP9AYhm/8j4dzVU8BfpcvYr2cy25RGBak= go.opentelemetry.io/otel v1.0.1/go.mod h1:OPEOD4jIT2SlZPMmwT6FqZz2C0ZNdQqiWcoK6M0SNFU= go.opentelemetry.io/otel v1.1.0/go.mod h1:7cww0OW51jQ8IaZChIEdqLwgh+44+7uiTdWsAL0wQpA= go.opentelemetry.io/otel v1.3.0/go.mod h1:PWIKzi6JCp7sM0k9yZ43VX+T345uNbAkDKwHVjb2PTs= -go.opentelemetry.io/otel v1.4.0/go.mod h1:jeAqMFKy2uLIxCtKxoFj0FAL5zAPKQagc3+GtBWakzk= -go.opentelemetry.io/otel v1.4.1 h1:QbINgGDDcoQUoMJa2mMaWno49lja9sHwp6aoa2n3a4g= -go.opentelemetry.io/otel v1.4.1/go.mod h1:StM6F/0fSwpd8dKWDCdRr7uRvEPYdW0hBSlbdTiUde4= +go.opentelemetry.io/otel v1.6.0/go.mod h1:bfJD2DZVw0LBxghOTlgnlI0CV3hLDu9XF/QKOUXMTQQ= +go.opentelemetry.io/otel v1.6.1 h1:6r1YrcTenBvYa1x491d0GGpTVBsNECmrc/K6b+zDeis= +go.opentelemetry.io/otel v1.6.1/go.mod h1:blzUabWHkX6LJewxvadmzafgh/wnvBSDBdOuwkAtrWQ= go.opentelemetry.io/otel/internal/metric v0.24.0 h1:O5lFy6kAl0LMWBjzy3k//M8VjEaTDWL9DPJuqZmWIAA= go.opentelemetry.io/otel/internal/metric v0.24.0/go.mod h1:PSkQG+KuApZjBpC6ea6082ZrWUUy/w132tJ/LOU3TXk= go.opentelemetry.io/otel/metric v0.24.0 h1:Rg4UYHS6JKR1Sw1TxnI13z7q/0p/XAbgIqUTagvLJuU= go.opentelemetry.io/otel/metric v0.24.0/go.mod h1:tpMFnCD9t+BEGiWY2bWF5+AwjuAdM0lSowQ4SBA3/K4= go.opentelemetry.io/otel/sdk v1.3.0/go.mod h1:rIo4suHNhQwBIPg9axF8V9CA72Wz2mKF1teNrup8yzs= -go.opentelemetry.io/otel/sdk v1.4.1 h1:J7EaW71E0v87qflB4cDolaqq3AcujGrtyIPGQoZOB0Y= -go.opentelemetry.io/otel/sdk v1.4.1/go.mod h1:NBwHDgDIBYjwK2WNu1OPgsIc2IJzmBXNnvIJxJc8BpE= +go.opentelemetry.io/otel/sdk v1.6.0/go.mod h1:PjLRUfDsoPy0zl7yrDGSUqjj43tL7rEtFdCEiGlxXRM= +go.opentelemetry.io/otel/sdk v1.6.1 h1:ZmcNyMhcuAYIb/Nr6QhBPTMopMTbov/47wHt1gibkoY= +go.opentelemetry.io/otel/sdk v1.6.1/go.mod h1:IVYrddmFZ+eJqu2k38qD3WezFR2pymCzm8tdxyh3R4E= go.opentelemetry.io/otel/trace v1.0.1/go.mod h1:5g4i4fKLaX2BQpSBsxw8YYcgKpMMSW3x7ZTuYBr3sUk= go.opentelemetry.io/otel/trace v1.1.0/go.mod h1:i47XtdcBQiktu5IsrPqOHe8w+sBmnLwwHt8wiUsWGTI= go.opentelemetry.io/otel/trace v1.3.0/go.mod h1:c/VDhno8888bvQYmbYLqe41/Ldmr/KKunbvWM4/fEjk= -go.opentelemetry.io/otel/trace v1.4.0/go.mod h1:uc3eRsqDfWs9R7b92xbQbU42/eTNz4N+gLP8qJCi4aE= -go.opentelemetry.io/otel/trace v1.4.1 h1:O+16qcdTrT7zxv2J6GejTPFinSwA++cYerC5iSiF8EQ= -go.opentelemetry.io/otel/trace v1.4.1/go.mod h1:iYEVbroFCNut9QkwEczV9vMRPHNKSSwYZjulEtsmhFc= +go.opentelemetry.io/otel/trace v1.6.0/go.mod h1:qs7BrU5cZ8dXQHBGxHMOxwME/27YH2qEp4/+tZLLwJE= +go.opentelemetry.io/otel/trace v1.6.1 h1:f8c93l5tboBYZna1nWk0W9DYyMzJXDWdZcJZ0Kb400U= +go.opentelemetry.io/otel/trace v1.6.1/go.mod h1:RkFRM1m0puWIq10oxImnGEduNBzxiN7TXluRBtE+5j0= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= @@ -562,8 +562,8 @@ golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWP golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce h1:Roh6XWxHFKrPgC/EQhVubSAGQ6Ozk6IdxHSzt1mR0EI= -golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29 h1:tkVvjkPTB7pnW3jnid7kNyAMPVWllTNOf/qKDze4p9o= +golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -599,8 +599,9 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 h1:kQgndtyPBW/JIYERgdxfwMYh3AVStj88WQTlNDi2a+o= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -638,11 +639,11 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210913180222-943fd674d43e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd h1:O7DYs+zxREGLKzKoMQrtrEacpb0ZVXA5rIwylE2Xchk= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220325170049-de3da57026de h1:pZB1TWnKi+o4bENlbzAgLrEbY4RMYmUIRobMcSmfeYc= +golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/oauth2 v0.0.0-20170912212905-13449ad91cb2/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -660,8 +661,10 @@ golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 h1:RerP+noqYHUQ8CMRcPlC2nvTa4dcBIjegkuWdcUDuqg= golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a h1:qfl7ob3DIEs3Ml9oLuPwY2N04gymzAW04WsUQHIClgM= +golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/sync v0.0.0-20170517211232-f52d1811a629/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -746,8 +749,11 @@ golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220207234003-57398862261d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220209214540-3681064d5158 h1:rm+CHSpPEEW2IsXUib1ThaHIjuBVZjxNgSKmBLFfD4c= golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886 h1:eJv7u3ksNXoLbGSKuv2s/SIO4tJVxc/A+MTpzxDgz/Q= +golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -770,7 +776,6 @@ golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba h1:O8mE0/t419eoIwhTFpKVkHiT golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= @@ -829,8 +834,9 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.9 h1:j9KsMiaP1c3B0OTQGth0/k+miLGTgLsAFUCrF2vLcF8= golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/tools v0.1.10 h1:QjFRCZxdOhBJ/UNgnBZLbNV13DlbnK0quyivTnXJM20= +golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -875,8 +881,10 @@ google.golang.org/api v0.66.0/go.mod h1:I1dmXYpX7HGwz/ejRxwQp2qj5bFAz93HiCU1C1oY google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= google.golang.org/api v0.68.0/go.mod h1:sOM8pTpwgflXRhz+oC8H2Dr+UcbMqkPPWNJo88Q7TH8= google.golang.org/api v0.69.0/go.mod h1:boanBiw+h5c3s+tBPgEzLDRHfFLWV0qXxRHz3ws7C80= -google.golang.org/api v0.70.0 h1:67zQnAE0T2rB0A3CwLSas0K+SbVzSxP+zTLkQLexeiw= google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= +google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= +google.golang.org/api v0.74.0 h1:ExR2D+5TYIrMphWgs5JCgwRhEDlPDXXrLwHHMgPHTXE= +google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -963,8 +971,12 @@ google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ6 google.golang.org/genproto v0.0.0-20220207185906-7721543eae58/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220211171837-173942840c17/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220216160803-4663080d8bc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c h1:TU4rFa5APdKTq0s6B7WTsH6Xmx0Knj86s6Biz56mErE= google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb h1:0m9wktIpOxGw+SSKmydXWB3Z3GTfcPP6+q75HCQa6HI= +google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= google.golang.org/grpc v1.2.1-0.20170921194603-d4b75ebd4f9f/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= @@ -992,8 +1004,9 @@ google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnD google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.44.0 h1:weqSxi/TMs1SqFRMHCtBgXRs8k3X39QIDEZ0pRcttUg= google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= From 82843fa1c89eaed3729ed67065e42d953236e423 Mon Sep 17 00:00:00 2001 From: KaWaite <34051327+KaWaite@users.noreply.github.com> Date: Mon, 18 Apr 2022 15:38:39 +0900 Subject: [PATCH 197/253] fix: renovate bot not running on schedule (#136) --- .github/renovate.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/renovate.json b/.github/renovate.json index 6eeb20039..e45642d20 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -15,7 +15,7 @@ "enabledManagers": [ "gomod" ], - "packagePatterns": [ + "matchPackagePatterns": [ "*" ], "groupName": "dependencies", @@ -30,7 +30,7 @@ "dockerfile", "docker-compose" ], - "packagePatterns": [ + "matchPackagePatterns": [ "*" ], "groupName": "docker dependencies", @@ -44,7 +44,7 @@ "enabledManagers": [ "github-actions" ], - "packagePatterns": [ + "matchPackagePatterns": [ "*" ], "groupName": "github actions dependencies", @@ -55,4 +55,4 @@ ] } ] -} +} \ No newline at end of file From e2721725d59014ea065c0f2989343a058436e961 Mon Sep 17 00:00:00 2001 From: KaWaite Date: Mon, 18 Apr 2022 17:30:46 +0900 Subject: [PATCH 198/253] ci: fix renovate schedule syntax --- .github/renovate.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/renovate.json b/.github/renovate.json index e45642d20..a35fb4239 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -22,7 +22,7 @@ "groupSlug": "gomod", "semanticCommitType": "chore", "schedule": [ - "before 3am on the fourth day of the month" + "before 3:00 am on the 4th day of the month" ] }, { @@ -37,7 +37,7 @@ "groupSlug": "docker", "semanticCommitType": "chore", "schedule": [ - "before 3am on the fourth day of the month" + "before 3:00 am on the 4th day of the month" ] }, { From 98510048bb464e796e7e47cb1c6695ef528e4cf1 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 19 Apr 2022 16:46:08 +0900 Subject: [PATCH 199/253] fix: aud was changed and jwt could not be validated correctly --- .gitignore | 4 ++-- internal/app/config.go | 7 +------ internal/app/config_test.go | 2 +- internal/app/jwt.go | 11 +---------- 4 files changed, 5 insertions(+), 19 deletions(-) diff --git a/.gitignore b/.gitignore index c7310eea7..de2f5f191 100644 --- a/.gitignore +++ b/.gitignore @@ -24,7 +24,7 @@ __debug_bin /bin /debug /mongo -/.env -/.env.local +/.env* +!/.env.example /coverage.txt /web diff --git a/internal/app/config.go b/internal/app/config.go index 62d6daace..b96e0bd03 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -221,14 +221,9 @@ func (c Auth0Config) AuthConfig() *AuthConfig { return nil } domain := prepareUrl(c.Domain) - - aud := []string{} - if c.Audience != "" { - aud = append(aud, prepareUrl(c.Audience)) - } return &AuthConfig{ ISS: domain, - AUD: aud, + AUD: []string{c.Audience}, ClientID: &c.ClientID, } } diff --git a/internal/app/config_test.go b/internal/app/config_test.go index 40b5a2fab..eff6fca6f 100644 --- a/internal/app/config_test.go +++ b/internal/app/config_test.go @@ -10,7 +10,7 @@ func TestAuth0Config_AuthConfig(t *testing.T) { s := "" assert.Equal(t, &AuthConfig{ ISS: "https://hoge.auth0.com", - AUD: []string{"https://xxx"}, + AUD: []string{"xxx"}, ClientID: &s, }, Auth0Config{ Domain: "hoge.auth0.com/", diff --git a/internal/app/jwt.go b/internal/app/jwt.go index 84b72c739..94476673f 100644 --- a/internal/app/jwt.go +++ b/internal/app/jwt.go @@ -59,20 +59,11 @@ func NewMultiValidator(providers []AuthConfig) (MultiValidator, error) { } algorithm := validator.SignatureAlgorithm(alg) - // add a trailing slash (auth0-spa-js adds a trailing slash to audiences) - aud := append([]string{}, p.AUD...) - for i, a := range aud { - if !strings.HasSuffix(a, "/") { - a += "/" - } - aud[i] = a - } - v, err := validator.New( provider.KeyFunc, algorithm, issuerURL.String(), - aud, + p.AUD, validator.WithCustomClaims(func() validator.CustomClaims { return &customClaims{} }), From 7ec76aac1729a6ec62a7bc7738b6a5b63bde17b6 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 20 Apr 2022 12:18:15 +0900 Subject: [PATCH 200/253] fix: auth audiences were unintentionally required --- internal/app/config.go | 6 +++++- internal/app/config_test.go | 34 ++++++++++++++++++++++++++++++++-- internal/app/jwt.go | 9 ++++++++- 3 files changed, 45 insertions(+), 4 deletions(-) diff --git a/internal/app/config.go b/internal/app/config.go index b96e0bd03..0cd45a16a 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -221,9 +221,13 @@ func (c Auth0Config) AuthConfig() *AuthConfig { return nil } domain := prepareUrl(c.Domain) + var aud []string + if len(c.Audience) > 0 { + aud = []string{c.Audience} + } return &AuthConfig{ ISS: domain, - AUD: []string{c.Audience}, + AUD: aud, ClientID: &c.ClientID, } } diff --git a/internal/app/config_test.go b/internal/app/config_test.go index eff6fca6f..d19faab23 100644 --- a/internal/app/config_test.go +++ b/internal/app/config_test.go @@ -3,6 +3,7 @@ package app import ( "testing" + "github.com/reearth/reearth-backend/pkg/auth" "github.com/stretchr/testify/assert" ) @@ -23,13 +24,42 @@ func TestAuth0Config_AuthConfig(t *testing.T) { } func TestReadConfig(t *testing.T) { + clientID := auth.ClientID + localAuth := AuthConfig{ + ISS: "http://localhost:8080", + AUD: []string{"http://localhost:8080"}, + ClientID: &clientID, + } + + cfg, err := ReadConfig(false) + assert.NoError(t, err) + assert.Nil(t, cfg.Auth) + assert.Equal(t, []AuthConfig{localAuth}, cfg.Auths()) + t.Setenv("REEARTH_AUTH", `[{"iss":"bar"}]`) t.Setenv("REEARTH_AUTH_ISS", "hoge") - t.Setenv("REEARTH_AUTH_AUD", "foo") - cfg, err := ReadConfig(false) + cfg, err = ReadConfig(false) assert.NoError(t, err) assert.Equal(t, AuthConfigs([]AuthConfig{{ISS: "bar"}}), cfg.Auth) + assert.Equal(t, []AuthConfig{ + {ISS: "hoge"}, // REEARTH_AUTH_* + localAuth, // local auth srv + {ISS: "bar"}, // REEARTH_AUTH + }, cfg.Auths()) assert.Equal(t, "hoge", cfg.Auth_ISS) + assert.Equal(t, "", cfg.Auth_AUD) + + t.Setenv("REEARTH_AUTH_AUD", "foo") + t.Setenv("REEARTH_AUTH0_DOMAIN", "foo") + t.Setenv("REEARTH_AUTH0_CLIENTID", clientID) + cfg, err = ReadConfig(false) + assert.NoError(t, err) + assert.Equal(t, []AuthConfig{ + {ISS: "https://foo", ClientID: &clientID}, // Auth0 + {ISS: "hoge", AUD: []string{"foo"}}, // REEARTH_AUTH_* + localAuth, // local auth srv + {ISS: "bar"}, // REEARTH_AUTH + }, cfg.Auths()) assert.Equal(t, "foo", cfg.Auth_AUD) } diff --git a/internal/app/jwt.go b/internal/app/jwt.go index 94476673f..58c678eed 100644 --- a/internal/app/jwt.go +++ b/internal/app/jwt.go @@ -59,11 +59,18 @@ func NewMultiValidator(providers []AuthConfig) (MultiValidator, error) { } algorithm := validator.SignatureAlgorithm(alg) + var aud []string + if p.AUD != nil { + aud = p.AUD + } else { + aud = []string{} + } + v, err := validator.New( provider.KeyFunc, algorithm, issuerURL.String(), - p.AUD, + aud, validator.WithCustomClaims(func() validator.CustomClaims { return &customClaims{} }), From 04a098dacf6b51b331bb9a171a82554c9a85efaa Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 20 Apr 2022 12:42:50 +0900 Subject: [PATCH 201/253] refactor: introduce generics, reorganize GraphQL schema (#135) * add util pkg * impl * add tests for util * rename * remove ids from gqlgen models * add list to util * add tests for idx.List * add tests for set * fix test * fix lint error * use lang scalar * add me type to gql schema --- go.mod | 2 + go.sum | 5 + gqlgen.yml | 16 +- internal/adapter/context.go | 13 +- internal/adapter/gql/context.go | 3 +- internal/adapter/gql/generated.go | 8360 +++++++++++------ .../gql/gqldataloader/assetloader_gen.go | 27 +- .../adapter/gql/gqldataloader/dataloader.go | 32 +- .../gql/gqldataloader/datasetloader_gen.go | 27 +- .../gqldataloader/datasetschemaloader_gen.go | 27 +- .../gql/gqldataloader/layergrouploader_gen.go | 27 +- .../gql/gqldataloader/layeritemloader_gen.go | 27 +- .../gql/gqldataloader/layerloader_gen.go | 27 +- .../gql/gqldataloader/pluginloader_gen.go | 27 +- .../gql/gqldataloader/projectloader_gen.go | 27 +- .../gql/gqldataloader/propertyloader_gen.go | 27 +- .../gqldataloader/propertyschemaloader_gen.go | 27 +- .../gql/gqldataloader/sceneloader_gen.go | 27 +- .../gql/gqldataloader/taggrouploader_gen.go | 27 +- .../gql/gqldataloader/tagitemloader_gen.go | 27 +- .../gql/gqldataloader/tagloader_gen.go | 27 +- .../gql/gqldataloader/teamloader_gen.go | 27 +- .../gql/gqldataloader/userloader_gen.go | 27 +- .../adapter/gql/gqlmodel/convert_asset.go | 6 +- .../adapter/gql/gqlmodel/convert_dataset.go | 49 +- .../adapter/gql/gqlmodel/convert_layer.go | 127 +- .../adapter/gql/gqlmodel/convert_plugin.go | 49 +- .../adapter/gql/gqlmodel/convert_project.go | 4 +- .../adapter/gql/gqlmodel/convert_property.go | 240 +- .../adapter/gql/gqlmodel/convert_scene.go | 67 +- .../gql/gqlmodel/convert_scene_align.go | 12 +- internal/adapter/gql/gqlmodel/convert_tag.go | 29 +- internal/adapter/gql/gqlmodel/convert_team.go | 51 - internal/adapter/gql/gqlmodel/convert_user.go | 87 +- internal/adapter/gql/gqlmodel/models.go | 64 +- internal/adapter/gql/gqlmodel/models_gen.go | 875 +- internal/adapter/gql/gqlmodel/scalar.go | 92 - internal/adapter/gql/gqlmodel/scalar_id.go | 174 + internal/adapter/gql/loader_asset.go | 36 +- internal/adapter/gql/loader_dataset.go | 96 +- internal/adapter/gql/loader_layer.go | 121 +- internal/adapter/gql/loader_plugin.go | 38 +- internal/adapter/gql/loader_project.go | 37 +- internal/adapter/gql/loader_property.go | 65 +- internal/adapter/gql/loader_scene.go | 55 +- internal/adapter/gql/loader_tag.go | 76 +- internal/adapter/gql/loader_team.go | 35 +- internal/adapter/gql/loader_user.go | 30 +- internal/adapter/gql/resolver_asset.go | 3 +- internal/adapter/gql/resolver_dataset.go | 15 +- .../adapter/gql/resolver_dataset_schema.go | 7 +- internal/adapter/gql/resolver_layer.go | 80 +- .../adapter/gql/resolver_mutation_asset.go | 16 +- .../adapter/gql/resolver_mutation_dataset.go | 80 +- .../adapter/gql/resolver_mutation_layer.go | 134 +- .../adapter/gql/resolver_mutation_project.go | 27 +- .../adapter/gql/resolver_mutation_property.go | 146 +- .../adapter/gql/resolver_mutation_scene.go | 139 +- internal/adapter/gql/resolver_mutation_tag.go | 64 +- .../adapter/gql/resolver_mutation_team.go | 35 +- .../adapter/gql/resolver_mutation_user.go | 48 +- internal/adapter/gql/resolver_plugin.go | 20 +- internal/adapter/gql/resolver_project.go | 5 +- internal/adapter/gql/resolver_property.go | 43 +- .../adapter/gql/resolver_property_schema.go | 9 +- internal/adapter/gql/resolver_query.go | 104 +- internal/adapter/gql/resolver_scene.go | 25 +- internal/adapter/gql/resolver_tag.go | 24 +- internal/adapter/gql/resolver_team.go | 5 +- internal/adapter/gql/resolver_user.go | 15 +- internal/infrastructure/fs/file.go | 7 +- internal/infrastructure/gcs/file.go | 7 +- internal/infrastructure/memory/asset.go | 4 +- internal/infrastructure/memory/dataset.go | 6 +- .../infrastructure/memory/dataset_schema.go | 4 +- internal/infrastructure/memory/layer.go | 12 +- internal/infrastructure/memory/project.go | 2 +- internal/infrastructure/memory/property.go | 13 +- internal/infrastructure/memory/scene.go | 4 +- internal/infrastructure/memory/scene_lock.go | 2 +- internal/infrastructure/memory/tag.go | 11 +- internal/infrastructure/memory/tag_test.go | 16 +- internal/infrastructure/memory/team.go | 4 +- internal/infrastructure/memory/user.go | 2 +- internal/infrastructure/mongo/asset.go | 4 +- internal/infrastructure/mongo/container.go | 7 +- internal/infrastructure/mongo/dataset.go | 18 +- .../infrastructure/mongo/dataset_schema.go | 10 +- internal/infrastructure/mongo/layer.go | 20 +- .../201217132559_add_scene_widget_id.go | 2 +- .../infrastructure/mongo/mongodoc/dataset.go | 8 +- .../mongo/mongodoc/dataset_schema.go | 8 +- .../infrastructure/mongo/mongodoc/layer.go | 4 +- .../infrastructure/mongo/mongodoc/property.go | 4 +- .../mongo/mongodoc/property_schema.go | 6 +- .../mongo/mongodoc/scene_align.go | 2 +- internal/infrastructure/mongo/mongodoc/tag.go | 22 +- .../infrastructure/mongo/mongodoc/tag_test.go | 46 +- internal/infrastructure/mongo/project.go | 4 +- internal/infrastructure/mongo/property.go | 8 +- internal/infrastructure/mongo/scene.go | 7 +- internal/infrastructure/mongo/scene_lock.go | 4 +- internal/infrastructure/mongo/tag.go | 22 +- internal/infrastructure/mongo/team.go | 8 +- internal/infrastructure/mongo/user.go | 4 +- internal/usecase/interactor/dataset.go | 6 +- internal/usecase/interactor/layer.go | 2 +- internal/usecase/interactor/tag.go | 9 +- internal/usecase/interactor/team.go | 4 +- internal/usecase/interfaces/dataset.go | 2 +- internal/usecase/interfaces/layer.go | 2 +- internal/usecase/interfaces/tag.go | 2 +- internal/usecase/operator.go | 14 +- internal/usecase/repo/asset.go | 2 +- internal/usecase/repo/container.go | 8 +- internal/usecase/repo/dataset.go | 8 +- internal/usecase/repo/dataset_schema.go | 4 +- internal/usecase/repo/layer.go | 10 +- internal/usecase/repo/project.go | 2 +- internal/usecase/repo/property.go | 4 +- internal/usecase/repo/scene.go | 2 +- internal/usecase/repo/scene_lock.go | 2 +- internal/usecase/repo/tag.go | 8 +- internal/usecase/repo/team.go | 4 +- internal/usecase/repo/user.go | 2 +- pkg/asset/asset.go | 2 +- pkg/asset/asset_test.go | 2 +- pkg/asset/id.go | 9 - pkg/auth/builder.go | 4 +- pkg/dataset/graph_iterator_test.go | 10 +- pkg/dataset/id.go | 19 +- pkg/dataset/list_test.go | 4 +- pkg/id/asset_gen.go | 314 - pkg/id/asset_gen_test.go | 976 -- pkg/id/auth_request_gen.go | 297 - pkg/id/auth_request_gen_test.go | 1011 -- pkg/id/cluster_field_gen_test.go | 976 -- pkg/id/cluster_gen.go | 314 - pkg/id/common.go | 5 + pkg/id/dataset_gen.go | 314 - pkg/id/dataset_gen_test.go | 976 -- pkg/id/dataset_schema_field_gen.go | 314 - pkg/id/dataset_schema_field_gen_test.go | 976 -- pkg/id/dataset_schema_gen.go | 314 - pkg/id/dataset_schema_gen_test.go | 976 -- pkg/id/gen.go | 36 - pkg/id/id.go | 341 +- pkg/id/id.tmpl | 314 - pkg/id/id_test.go | 359 - pkg/id/id_test.tmpl | 978 -- pkg/id/idx/id.go | 140 + pkg/id/idx/id_test.go | 26 + pkg/id/idx/list.go | 115 + pkg/id/idx/list_test.go | 249 + pkg/id/idx/set.go | 85 + pkg/id/idx/set_test.go | 86 + pkg/id/idx/string.go | 38 + pkg/id/idx/string_test.go | 29 + pkg/id/idx/ulid.go | 54 + pkg/id/infobox_field_gen.go | 314 - pkg/id/infobox_field_gen_test.go | 976 -- pkg/id/layer_gen.go | 314 - pkg/id/layer_gen_test.go | 976 -- pkg/id/plugin.go | 1 - pkg/id/plugin_extension.go | 35 - pkg/id/plugin_extension_test.go | 64 - pkg/id/project_gen.go | 314 - pkg/id/project_gen_test.go | 976 -- pkg/id/property_gen.go | 314 - pkg/id/property_gen_test.go | 976 -- pkg/id/property_item_gen.go | 314 - pkg/id/property_item_gen_test.go | 976 -- pkg/id/property_schema.go | 7 +- pkg/id/property_schema_field.go | 39 - pkg/id/property_schema_field_test.go | 64 - pkg/id/property_schema_group.go | 39 - pkg/id/property_schema_group_test.go | 64 - pkg/id/scene_gen.go | 314 - pkg/id/scene_gen_test.go | 976 -- pkg/id/tag_gen.go | 314 - pkg/id/tag_gen_test.go | 976 -- pkg/id/team_gen.go | 314 - pkg/id/team_gen_test.go | 976 -- pkg/id/user_gen.go | 314 - pkg/id/user_gen_test.go | 976 -- pkg/id/widget_gen.go | 314 - pkg/id/widget_gen_test.go | 976 -- pkg/layer/builder.go | 2 +- pkg/layer/group_builder.go | 2 +- pkg/layer/id.go | 12 +- pkg/layer/id_list.go | 11 +- pkg/layer/initializer.go | 2 +- pkg/layer/initializer_test.go | 4 +- pkg/layer/item_builder.go | 2 +- pkg/layer/layer.go | 2 +- pkg/layer/merged.go | 2 +- pkg/layer/merging/merged.go | 22 +- pkg/plugin/id.go | 3 - pkg/project/builder_test.go | 4 +- pkg/project/id.go | 9 - pkg/project/project.go | 2 +- pkg/property/builder_test.go | 2 +- pkg/property/group_builder_test.go | 2 +- pkg/property/id.go | 30 +- pkg/property/initializer_test.go | 10 +- pkg/property/list.go | 19 +- pkg/property/pointer.go | 16 +- pkg/property/schema_group_builder.go | 2 +- pkg/property/schema_group_list.go | 8 +- pkg/property/schema_group_list_test.go | 5 +- pkg/property/schema_test.go | 4 +- pkg/scene/builder.go | 4 +- pkg/scene/builder/builder_test.go | 8 +- pkg/scene/builder/scene.go | 2 +- pkg/scene/id.go | 61 +- pkg/scene/id_test.go | 47 - pkg/scene/scene.go | 2 +- pkg/scene/scene_test.go | 10 +- pkg/scene/sceneops/dataset_migrator.go | 2 +- pkg/scene/widget_align_system_test.go | 38 +- pkg/scene/widget_area.go | 55 +- pkg/scene/widget_area_test.go | 56 +- pkg/scene/widget_section_test.go | 20 +- pkg/scene/widget_zone_test.go | 20 +- pkg/tag/group.go | 13 +- pkg/tag/group_builder.go | 7 +- pkg/tag/group_test.go | 12 +- pkg/tag/id.go | 18 +- pkg/tag/list.go | 53 - pkg/tag/list_test.go | 88 +- pkg/tag/map.go | 2 +- pkg/user/id.go | 47 +- pkg/user/id_test.go | 47 - pkg/user/members.go | 2 +- pkg/util/list.go | 130 + pkg/util/list_test.go | 168 + pkg/util/map.go | 126 + pkg/util/map_test.go | 180 + pkg/util/slice.go | 135 + pkg/util/slice_test.go | 95 + pkg/util/util.go | 25 + pkg/util/util_test.go | 52 + pkg/value/ref.go | 8 +- schema.graphql | 192 +- 244 files changed, 10326 insertions(+), 28283 deletions(-) delete mode 100644 internal/adapter/gql/gqlmodel/convert_team.go create mode 100644 internal/adapter/gql/gqlmodel/scalar_id.go delete mode 100644 pkg/id/asset_gen.go delete mode 100644 pkg/id/asset_gen_test.go delete mode 100644 pkg/id/auth_request_gen.go delete mode 100644 pkg/id/auth_request_gen_test.go delete mode 100644 pkg/id/cluster_field_gen_test.go delete mode 100644 pkg/id/cluster_gen.go create mode 100644 pkg/id/common.go delete mode 100644 pkg/id/dataset_gen.go delete mode 100644 pkg/id/dataset_gen_test.go delete mode 100644 pkg/id/dataset_schema_field_gen.go delete mode 100644 pkg/id/dataset_schema_field_gen_test.go delete mode 100644 pkg/id/dataset_schema_gen.go delete mode 100644 pkg/id/dataset_schema_gen_test.go delete mode 100644 pkg/id/gen.go delete mode 100644 pkg/id/id.tmpl delete mode 100644 pkg/id/id_test.go delete mode 100644 pkg/id/id_test.tmpl create mode 100644 pkg/id/idx/id.go create mode 100644 pkg/id/idx/id_test.go create mode 100644 pkg/id/idx/list.go create mode 100644 pkg/id/idx/list_test.go create mode 100644 pkg/id/idx/set.go create mode 100644 pkg/id/idx/set_test.go create mode 100644 pkg/id/idx/string.go create mode 100644 pkg/id/idx/string_test.go create mode 100644 pkg/id/idx/ulid.go delete mode 100644 pkg/id/infobox_field_gen.go delete mode 100644 pkg/id/infobox_field_gen_test.go delete mode 100644 pkg/id/layer_gen.go delete mode 100644 pkg/id/layer_gen_test.go delete mode 100644 pkg/id/plugin_extension.go delete mode 100644 pkg/id/plugin_extension_test.go delete mode 100644 pkg/id/project_gen.go delete mode 100644 pkg/id/project_gen_test.go delete mode 100644 pkg/id/property_gen.go delete mode 100644 pkg/id/property_gen_test.go delete mode 100644 pkg/id/property_item_gen.go delete mode 100644 pkg/id/property_item_gen_test.go delete mode 100644 pkg/id/property_schema_field.go delete mode 100644 pkg/id/property_schema_field_test.go delete mode 100644 pkg/id/property_schema_group.go delete mode 100644 pkg/id/property_schema_group_test.go delete mode 100644 pkg/id/scene_gen.go delete mode 100644 pkg/id/scene_gen_test.go delete mode 100644 pkg/id/tag_gen.go delete mode 100644 pkg/id/tag_gen_test.go delete mode 100644 pkg/id/team_gen.go delete mode 100644 pkg/id/team_gen_test.go delete mode 100644 pkg/id/user_gen.go delete mode 100644 pkg/id/user_gen_test.go delete mode 100644 pkg/id/widget_gen.go delete mode 100644 pkg/id/widget_gen_test.go delete mode 100644 pkg/scene/id_test.go delete mode 100644 pkg/user/id_test.go create mode 100644 pkg/util/list.go create mode 100644 pkg/util/list_test.go create mode 100644 pkg/util/map.go create mode 100644 pkg/util/map_test.go create mode 100644 pkg/util/slice.go create mode 100644 pkg/util/slice_test.go create mode 100644 pkg/util/util.go create mode 100644 pkg/util/util_test.go diff --git a/go.mod b/go.mod index 91c57ea5b..4c9e3186c 100644 --- a/go.mod +++ b/go.mod @@ -26,6 +26,7 @@ require ( github.com/paulmach/go.geojson v1.4.0 github.com/pkg/errors v0.9.1 github.com/ravilushqa/otelgqlgen v0.6.0 + github.com/samber/lo v1.11.0 github.com/sendgrid/sendgrid-go v3.11.1+incompatible github.com/sirupsen/logrus v1.8.1 github.com/spf13/afero v1.8.2 @@ -42,6 +43,7 @@ require ( go.opentelemetry.io/otel v1.6.1 go.opentelemetry.io/otel/sdk v1.6.1 golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29 + golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4 golang.org/x/text v0.3.7 golang.org/x/tools v0.1.10 google.golang.org/api v0.74.0 diff --git a/go.sum b/go.sum index ea8ee7338..0f2e1bfee 100644 --- a/go.sum +++ b/go.sum @@ -422,6 +422,8 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/samber/lo v1.11.0 h1:JfeYozXL1xfkhRUFOfH13ociyeiLSC/GRJjGKI668xM= +github.com/samber/lo v1.11.0/go.mod h1:2I7tgIv8Q1SG2xEIkRq0F2i2zgxVpnyPOP0d3Gj2r+A= github.com/sendgrid/rest v2.6.6+incompatible h1:3rO5UTPhLQo6fjytWwdwRWclP101CqErg2klf8LneB4= github.com/sendgrid/rest v2.6.6+incompatible/go.mod h1:kXX7q3jZtJXK5c5qK83bSGMdV6tsOE70KbHoqJls4lE= github.com/sendgrid/sendgrid-go v3.11.1+incompatible h1:ai0+woZ3r/+tKLQExznak5XerOFoD6S7ePO0lMV8WXo= @@ -464,6 +466,7 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/thoas/go-funk v0.9.1 h1:O549iLZqPpTUQ10ykd26sZhzD+rmR5pWhuElrhbC20M= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/pretty v1.0.1 h1:WE4RBSZ1x6McVVC8S/Md+Qse8YUv6HRObAx6ke00NY8= github.com/tidwall/pretty v1.0.1/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= @@ -574,6 +577,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4 h1:K3x+yU+fbot38x5bQbU2QqUAVyYLEktdNH2GxZLnM3U= +golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= diff --git a/gqlgen.yml b/gqlgen.yml index 3f1c7a163..cece7ccde 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -17,25 +17,13 @@ models: model: github.com/99designs/gqlgen/graphql.Time FileSize: model: github.com/99designs/gqlgen/graphql.Int64 - ID: - model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID Cursor: model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Cursor URL: model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.URL - PluginID: - model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PluginID - PluginExtensionID: - model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PluginExtensionID - PropertySchemaID: - model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PropertySchemaID - PropertySchemaGroupID: - model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PropertySchemaGroupID - PropertySchemaFieldID: - model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PropertySchemaFieldID - DatasetSchemaFieldID: - model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.DatasetSchemaFieldID TranslatedString: model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Map Lang: model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Lang + ID: + model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID diff --git a/internal/adapter/context.go b/internal/adapter/context.go index b09e2b07e..4025c2de4 100644 --- a/internal/adapter/context.go +++ b/internal/adapter/context.go @@ -6,6 +6,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/user" + "golang.org/x/text/language" ) type ContextKey string @@ -17,6 +18,8 @@ const ( contextUsecases ContextKey = "usecases" ) +var defaultLang = language.English + type AuthInfo struct { Token string Sub string @@ -52,19 +55,19 @@ func User(ctx context.Context) *user.User { return nil } -func Lang(ctx context.Context, lang *string) string { - if lang != nil && *lang != "" { - return *lang +func Lang(ctx context.Context, lang *language.Tag) string { + if lang != nil && !lang.IsRoot() { + return lang.String() } u := User(ctx) if u == nil { - return "en" // default language + return defaultLang.String() } l := u.Lang() if l.IsRoot() { - return "en" // default language + return defaultLang.String() } return l.String() diff --git a/internal/adapter/gql/context.go b/internal/adapter/gql/context.go index 0c5a599bc..b6ab687ae 100644 --- a/internal/adapter/gql/context.go +++ b/internal/adapter/gql/context.go @@ -7,6 +7,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/user" + "golang.org/x/text/language" ) type ContextKey string @@ -31,7 +32,7 @@ func getUser(ctx context.Context) *user.User { return adapter.User(ctx) } -func getLang(ctx context.Context, lang *string) string { +func getLang(ctx context.Context, lang *language.Tag) string { return adapter.Lang(ctx, lang) } diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index fc5853785..d78aa027c 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -17,7 +17,6 @@ import ( "github.com/99designs/gqlgen/graphql/introspection" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" gqlparser "github.com/vektah/gqlparser/v2" "github.com/vektah/gqlparser/v2/ast" "golang.org/x/text/language" @@ -53,6 +52,7 @@ type ResolverRoot interface { LayerItem() LayerItemResolver LayerTagGroup() LayerTagGroupResolver LayerTagItem() LayerTagItemResolver + Me() MeResolver MergedInfobox() MergedInfoboxResolver MergedInfoboxField() MergedInfoboxFieldResolver MergedLayer() MergedLayerResolver @@ -80,7 +80,6 @@ type ResolverRoot interface { TagItem() TagItemResolver Team() TeamResolver TeamMember() TeamMemberResolver - User() UserResolver } type DirectiveRoot struct { @@ -406,6 +405,18 @@ type ComplexityRoot struct { TagID func(childComplexity int) int } + Me struct { + Auths func(childComplexity int) int + Email func(childComplexity int) int + ID func(childComplexity int) int + Lang func(childComplexity int) int + MyTeam func(childComplexity int) int + MyTeamID func(childComplexity int) int + Name func(childComplexity int) int + Teams func(childComplexity int) int + Theme func(childComplexity int) int + } + MergedInfobox struct { Fields func(childComplexity int) int Property func(childComplexity int) int @@ -578,9 +589,9 @@ type ComplexityRoot struct { RepositoryURL func(childComplexity int) int Scene func(childComplexity int) int SceneID func(childComplexity int) int - ScenePlugin func(childComplexity int, sceneID *id.ID) int - TranslatedDescription func(childComplexity int, lang *string) int - TranslatedName func(childComplexity int, lang *string) int + ScenePlugin func(childComplexity int, sceneID *gqlmodel.ID) int + TranslatedDescription func(childComplexity int, lang *language.Tag) int + TranslatedName func(childComplexity int, lang *language.Tag) int Version func(childComplexity int) int } @@ -595,10 +606,10 @@ type ComplexityRoot struct { PluginID func(childComplexity int) int PropertySchema func(childComplexity int) int PropertySchemaID func(childComplexity int) int - SceneWidget func(childComplexity int, sceneID id.ID) int + SceneWidget func(childComplexity int, sceneID gqlmodel.ID) int SingleOnly func(childComplexity int) int - TranslatedDescription func(childComplexity int, lang *string) int - TranslatedName func(childComplexity int, lang *string) int + TranslatedDescription func(childComplexity int, lang *language.Tag) int + TranslatedName func(childComplexity int, lang *language.Tag) int Type func(childComplexity int) int Visualizer func(childComplexity int) int WidgetLayout func(childComplexity int) int @@ -752,8 +763,8 @@ type ComplexityRoot struct { Prefix func(childComplexity int) int Suffix func(childComplexity int) int Title func(childComplexity int) int - TranslatedDescription func(childComplexity int, lang *string) int - TranslatedTitle func(childComplexity int, lang *string) int + TranslatedDescription func(childComplexity int, lang *language.Tag) int + TranslatedTitle func(childComplexity int, lang *language.Tag) int Type func(childComplexity int) int UI func(childComplexity int) int } @@ -763,7 +774,7 @@ type ComplexityRoot struct { Icon func(childComplexity int) int Key func(childComplexity int) int Title func(childComplexity int) int - TranslatedTitle func(childComplexity int, lang *string) int + TranslatedTitle func(childComplexity int, lang *language.Tag) int } PropertySchemaGroup struct { @@ -777,27 +788,26 @@ type ComplexityRoot struct { SchemaGroupID func(childComplexity int) int SchemaID func(childComplexity int) int Title func(childComplexity int) int - TranslatedTitle func(childComplexity int, lang *string) int + TranslatedTitle func(childComplexity int, lang *language.Tag) int } Query struct { - Assets func(childComplexity int, teamID id.ID, keyword *string, sort *gqlmodel.AssetSortType, pagination *gqlmodel.Pagination) int + Assets func(childComplexity int, teamID gqlmodel.ID, keyword *string, sort *gqlmodel.AssetSortType, pagination *gqlmodel.Pagination) int CheckProjectAlias func(childComplexity int, alias string) int - DatasetSchemas func(childComplexity int, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int - Datasets func(childComplexity int, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int - DynamicDatasetSchemas func(childComplexity int, sceneID id.ID) int + DatasetSchemas func(childComplexity int, sceneID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + Datasets func(childComplexity int, datasetSchemaID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + DynamicDatasetSchemas func(childComplexity int, sceneID gqlmodel.ID) int InstallablePlugins func(childComplexity int) int - Layer func(childComplexity int, id id.ID) int + Layer func(childComplexity int, id gqlmodel.ID) int Me func(childComplexity int) int - Node func(childComplexity int, id id.ID, typeArg gqlmodel.NodeType) int - Nodes func(childComplexity int, id []*id.ID, typeArg gqlmodel.NodeType) int - Plugin func(childComplexity int, id id.PluginID) int - Plugins func(childComplexity int, id []*id.PluginID) int - Projects func(childComplexity int, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int - PropertySchema func(childComplexity int, id id.PropertySchemaID) int - PropertySchemas func(childComplexity int, id []*id.PropertySchemaID) int - Scene func(childComplexity int, projectID id.ID) int - SceneLock func(childComplexity int, sceneID id.ID) int + Node func(childComplexity int, id gqlmodel.ID, typeArg gqlmodel.NodeType) int + Nodes func(childComplexity int, id []gqlmodel.ID, typeArg gqlmodel.NodeType) int + Plugin func(childComplexity int, id gqlmodel.ID) int + Plugins func(childComplexity int, id []gqlmodel.ID) int + Projects func(childComplexity int, teamID gqlmodel.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int + PropertySchema func(childComplexity int, id gqlmodel.ID) int + PropertySchemas func(childComplexity int, id []gqlmodel.ID) int + Scene func(childComplexity int, projectID gqlmodel.ID) int SearchUser func(childComplexity int, nameOrEmail string) int } @@ -855,7 +865,6 @@ type ComplexityRoot struct { DatasetSchemas func(childComplexity int, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int DynamicDatasetSchemas func(childComplexity int) int ID func(childComplexity int) int - LockMode func(childComplexity int) int Plugins func(childComplexity int) int Project func(childComplexity int) int ProjectID func(childComplexity int) int @@ -891,12 +900,6 @@ type ComplexityRoot struct { PropertyID func(childComplexity int) int } - SearchedUser struct { - UserEmail func(childComplexity int) int - UserID func(childComplexity int) int - UserName func(childComplexity int) int - } - SignupPayload struct { Team func(childComplexity int) int User func(childComplexity int) int @@ -979,7 +982,7 @@ type ComplexityRoot struct { } UpdateMePayload struct { - User func(childComplexity int) int + Me func(childComplexity int) int } UpdateMemberOfTeamPayload struct { @@ -1015,15 +1018,9 @@ type ComplexityRoot struct { } User struct { - Auths func(childComplexity int) int - Email func(childComplexity int) int - ID func(childComplexity int) int - Lang func(childComplexity int) int - MyTeam func(childComplexity int) int - MyTeamID func(childComplexity int) int - Name func(childComplexity int) int - Teams func(childComplexity int) int - Theme func(childComplexity int) int + Email func(childComplexity int) int + ID func(childComplexity int) int + Name func(childComplexity int) int } WidgetAlignSystem struct { @@ -1135,6 +1132,10 @@ type LayerTagGroupResolver interface { type LayerTagItemResolver interface { Tag(ctx context.Context, obj *gqlmodel.LayerTagItem) (gqlmodel.Tag, error) } +type MeResolver interface { + Teams(ctx context.Context, obj *gqlmodel.Me) ([]*gqlmodel.Team, error) + MyTeam(ctx context.Context, obj *gqlmodel.Me) (*gqlmodel.Team, error) +} type MergedInfoboxResolver interface { Scene(ctx context.Context, obj *gqlmodel.MergedInfobox) (*gqlmodel.Scene, error) } @@ -1237,16 +1238,16 @@ type MutationResolver interface { } type PluginResolver interface { Scene(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.Scene, error) - TranslatedName(ctx context.Context, obj *gqlmodel.Plugin, lang *string) (string, error) - TranslatedDescription(ctx context.Context, obj *gqlmodel.Plugin, lang *string) (string, error) + TranslatedName(ctx context.Context, obj *gqlmodel.Plugin, lang *language.Tag) (string, error) + TranslatedDescription(ctx context.Context, obj *gqlmodel.Plugin, lang *language.Tag) (string, error) PropertySchema(ctx context.Context, obj *gqlmodel.Plugin) (*gqlmodel.PropertySchema, error) } type PluginExtensionResolver interface { Plugin(ctx context.Context, obj *gqlmodel.PluginExtension) (*gqlmodel.Plugin, error) - SceneWidget(ctx context.Context, obj *gqlmodel.PluginExtension, sceneID id.ID) (*gqlmodel.SceneWidget, error) + SceneWidget(ctx context.Context, obj *gqlmodel.PluginExtension, sceneID gqlmodel.ID) (*gqlmodel.SceneWidget, error) PropertySchema(ctx context.Context, obj *gqlmodel.PluginExtension) (*gqlmodel.PropertySchema, error) - TranslatedName(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) - TranslatedDescription(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) + TranslatedName(ctx context.Context, obj *gqlmodel.PluginExtension, lang *language.Tag) (string, error) + TranslatedDescription(ctx context.Context, obj *gqlmodel.PluginExtension, lang *language.Tag) (string, error) } type ProjectResolver interface { Team(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Team, error) @@ -1283,33 +1284,32 @@ type PropertyLinkableFieldsResolver interface { Schema(ctx context.Context, obj *gqlmodel.PropertyLinkableFields) (*gqlmodel.PropertySchema, error) } type PropertySchemaFieldResolver interface { - TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) - TranslatedDescription(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) + TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *language.Tag) (string, error) + TranslatedDescription(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *language.Tag) (string, error) } type PropertySchemaFieldChoiceResolver interface { - TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *string) (string, error) + TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *language.Tag) (string, error) } type PropertySchemaGroupResolver interface { Schema(ctx context.Context, obj *gqlmodel.PropertySchemaGroup) (*gqlmodel.PropertySchema, error) - TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaGroup, lang *string) (string, error) + TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaGroup, lang *language.Tag) (string, error) } type QueryResolver interface { - Me(ctx context.Context) (*gqlmodel.User, error) - Node(ctx context.Context, id id.ID, typeArg gqlmodel.NodeType) (gqlmodel.Node, error) - Nodes(ctx context.Context, id []*id.ID, typeArg gqlmodel.NodeType) ([]gqlmodel.Node, error) - PropertySchema(ctx context.Context, id id.PropertySchemaID) (*gqlmodel.PropertySchema, error) - PropertySchemas(ctx context.Context, id []*id.PropertySchemaID) ([]*gqlmodel.PropertySchema, error) - Plugin(ctx context.Context, id id.PluginID) (*gqlmodel.Plugin, error) - Plugins(ctx context.Context, id []*id.PluginID) ([]*gqlmodel.Plugin, error) - Layer(ctx context.Context, id id.ID) (gqlmodel.Layer, error) - Scene(ctx context.Context, projectID id.ID) (*gqlmodel.Scene, error) - Assets(ctx context.Context, teamID id.ID, keyword *string, sort *gqlmodel.AssetSortType, pagination *gqlmodel.Pagination) (*gqlmodel.AssetConnection, error) - Projects(ctx context.Context, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) - DatasetSchemas(ctx context.Context, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) - Datasets(ctx context.Context, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) - SceneLock(ctx context.Context, sceneID id.ID) (*gqlmodel.SceneLockMode, error) - DynamicDatasetSchemas(ctx context.Context, sceneID id.ID) ([]*gqlmodel.DatasetSchema, error) - SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.SearchedUser, error) + Me(ctx context.Context) (*gqlmodel.Me, error) + Node(ctx context.Context, id gqlmodel.ID, typeArg gqlmodel.NodeType) (gqlmodel.Node, error) + Nodes(ctx context.Context, id []gqlmodel.ID, typeArg gqlmodel.NodeType) ([]gqlmodel.Node, error) + PropertySchema(ctx context.Context, id gqlmodel.ID) (*gqlmodel.PropertySchema, error) + PropertySchemas(ctx context.Context, id []gqlmodel.ID) ([]*gqlmodel.PropertySchema, error) + Plugin(ctx context.Context, id gqlmodel.ID) (*gqlmodel.Plugin, error) + Plugins(ctx context.Context, id []gqlmodel.ID) ([]*gqlmodel.Plugin, error) + Layer(ctx context.Context, id gqlmodel.ID) (gqlmodel.Layer, error) + Scene(ctx context.Context, projectID gqlmodel.ID) (*gqlmodel.Scene, error) + Assets(ctx context.Context, teamID gqlmodel.ID, keyword *string, sort *gqlmodel.AssetSortType, pagination *gqlmodel.Pagination) (*gqlmodel.AssetConnection, error) + Projects(ctx context.Context, teamID gqlmodel.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) + DatasetSchemas(ctx context.Context, sceneID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) + Datasets(ctx context.Context, datasetSchemaID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) + DynamicDatasetSchemas(ctx context.Context, sceneID gqlmodel.ID) ([]*gqlmodel.DatasetSchema, error) + SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.User, error) CheckProjectAlias(ctx context.Context, alias string) (*gqlmodel.ProjectAliasAvailability, error) InstallablePlugins(ctx context.Context) ([]*gqlmodel.PluginMetadata, error) } @@ -1318,7 +1318,6 @@ type SceneResolver interface { Team(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Team, error) Property(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Property, error) RootLayer(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.LayerGroup, error) - LockMode(ctx context.Context, obj *gqlmodel.Scene) (gqlmodel.SceneLockMode, error) DatasetSchemas(ctx context.Context, obj *gqlmodel.Scene, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) Tags(ctx context.Context, obj *gqlmodel.Scene) ([]gqlmodel.Tag, error) @@ -1351,10 +1350,6 @@ type TeamResolver interface { type TeamMemberResolver interface { User(ctx context.Context, obj *gqlmodel.TeamMember) (*gqlmodel.User, error) } -type UserResolver interface { - Teams(ctx context.Context, obj *gqlmodel.User) ([]*gqlmodel.Team, error) - MyTeam(ctx context.Context, obj *gqlmodel.User) (*gqlmodel.Team, error) -} type executableSchema struct { resolvers ResolverRoot @@ -2643,6 +2638,69 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.LayerTagItem.TagID(childComplexity), true + case "Me.auths": + if e.complexity.Me.Auths == nil { + break + } + + return e.complexity.Me.Auths(childComplexity), true + + case "Me.email": + if e.complexity.Me.Email == nil { + break + } + + return e.complexity.Me.Email(childComplexity), true + + case "Me.id": + if e.complexity.Me.ID == nil { + break + } + + return e.complexity.Me.ID(childComplexity), true + + case "Me.lang": + if e.complexity.Me.Lang == nil { + break + } + + return e.complexity.Me.Lang(childComplexity), true + + case "Me.myTeam": + if e.complexity.Me.MyTeam == nil { + break + } + + return e.complexity.Me.MyTeam(childComplexity), true + + case "Me.myTeamId": + if e.complexity.Me.MyTeamID == nil { + break + } + + return e.complexity.Me.MyTeamID(childComplexity), true + + case "Me.name": + if e.complexity.Me.Name == nil { + break + } + + return e.complexity.Me.Name(childComplexity), true + + case "Me.teams": + if e.complexity.Me.Teams == nil { + break + } + + return e.complexity.Me.Teams(childComplexity), true + + case "Me.theme": + if e.complexity.Me.Theme == nil { + break + } + + return e.complexity.Me.Theme(childComplexity), true + case "MergedInfobox.fields": if e.complexity.MergedInfobox.Fields == nil { break @@ -3960,7 +4018,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Plugin.ScenePlugin(childComplexity, args["sceneId"].(*id.ID)), true + return e.complexity.Plugin.ScenePlugin(childComplexity, args["sceneId"].(*gqlmodel.ID)), true case "Plugin.translatedDescription": if e.complexity.Plugin.TranslatedDescription == nil { @@ -3972,7 +4030,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Plugin.TranslatedDescription(childComplexity, args["lang"].(*string)), true + return e.complexity.Plugin.TranslatedDescription(childComplexity, args["lang"].(*language.Tag)), true case "Plugin.translatedName": if e.complexity.Plugin.TranslatedName == nil { @@ -3984,7 +4042,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Plugin.TranslatedName(childComplexity, args["lang"].(*string)), true + return e.complexity.Plugin.TranslatedName(childComplexity, args["lang"].(*language.Tag)), true case "Plugin.version": if e.complexity.Plugin.Version == nil { @@ -4073,7 +4131,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.PluginExtension.SceneWidget(childComplexity, args["sceneId"].(id.ID)), true + return e.complexity.PluginExtension.SceneWidget(childComplexity, args["sceneId"].(gqlmodel.ID)), true case "PluginExtension.singleOnly": if e.complexity.PluginExtension.SingleOnly == nil { @@ -4092,7 +4150,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.PluginExtension.TranslatedDescription(childComplexity, args["lang"].(*string)), true + return e.complexity.PluginExtension.TranslatedDescription(childComplexity, args["lang"].(*language.Tag)), true case "PluginExtension.translatedName": if e.complexity.PluginExtension.TranslatedName == nil { @@ -4104,7 +4162,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.PluginExtension.TranslatedName(childComplexity, args["lang"].(*string)), true + return e.complexity.PluginExtension.TranslatedName(childComplexity, args["lang"].(*language.Tag)), true case "PluginExtension.type": if e.complexity.PluginExtension.Type == nil { @@ -4830,7 +4888,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.PropertySchemaField.TranslatedDescription(childComplexity, args["lang"].(*string)), true + return e.complexity.PropertySchemaField.TranslatedDescription(childComplexity, args["lang"].(*language.Tag)), true case "PropertySchemaField.translatedTitle": if e.complexity.PropertySchemaField.TranslatedTitle == nil { @@ -4842,7 +4900,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.PropertySchemaField.TranslatedTitle(childComplexity, args["lang"].(*string)), true + return e.complexity.PropertySchemaField.TranslatedTitle(childComplexity, args["lang"].(*language.Tag)), true case "PropertySchemaField.type": if e.complexity.PropertySchemaField.Type == nil { @@ -4896,7 +4954,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.PropertySchemaFieldChoice.TranslatedTitle(childComplexity, args["lang"].(*string)), true + return e.complexity.PropertySchemaFieldChoice.TranslatedTitle(childComplexity, args["lang"].(*language.Tag)), true case "PropertySchemaGroup.allTranslatedTitle": if e.complexity.PropertySchemaGroup.AllTranslatedTitle == nil { @@ -4978,7 +5036,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.PropertySchemaGroup.TranslatedTitle(childComplexity, args["lang"].(*string)), true + return e.complexity.PropertySchemaGroup.TranslatedTitle(childComplexity, args["lang"].(*language.Tag)), true case "Query.assets": if e.complexity.Query.Assets == nil { @@ -4990,7 +5048,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.Assets(childComplexity, args["teamId"].(id.ID), args["keyword"].(*string), args["sort"].(*gqlmodel.AssetSortType), args["pagination"].(*gqlmodel.Pagination)), true + return e.complexity.Query.Assets(childComplexity, args["teamId"].(gqlmodel.ID), args["keyword"].(*string), args["sort"].(*gqlmodel.AssetSortType), args["pagination"].(*gqlmodel.Pagination)), true case "Query.checkProjectAlias": if e.complexity.Query.CheckProjectAlias == nil { @@ -5014,7 +5072,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.DatasetSchemas(childComplexity, args["sceneId"].(id.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + return e.complexity.Query.DatasetSchemas(childComplexity, args["sceneId"].(gqlmodel.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true case "Query.datasets": if e.complexity.Query.Datasets == nil { @@ -5026,7 +5084,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.Datasets(childComplexity, args["datasetSchemaId"].(id.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + return e.complexity.Query.Datasets(childComplexity, args["datasetSchemaId"].(gqlmodel.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true case "Query.dynamicDatasetSchemas": if e.complexity.Query.DynamicDatasetSchemas == nil { @@ -5038,7 +5096,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.DynamicDatasetSchemas(childComplexity, args["sceneId"].(id.ID)), true + return e.complexity.Query.DynamicDatasetSchemas(childComplexity, args["sceneId"].(gqlmodel.ID)), true case "Query.installablePlugins": if e.complexity.Query.InstallablePlugins == nil { @@ -5057,7 +5115,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.Layer(childComplexity, args["id"].(id.ID)), true + return e.complexity.Query.Layer(childComplexity, args["id"].(gqlmodel.ID)), true case "Query.me": if e.complexity.Query.Me == nil { @@ -5076,7 +5134,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.Node(childComplexity, args["id"].(id.ID), args["type"].(gqlmodel.NodeType)), true + return e.complexity.Query.Node(childComplexity, args["id"].(gqlmodel.ID), args["type"].(gqlmodel.NodeType)), true case "Query.nodes": if e.complexity.Query.Nodes == nil { @@ -5088,7 +5146,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.Nodes(childComplexity, args["id"].([]*id.ID), args["type"].(gqlmodel.NodeType)), true + return e.complexity.Query.Nodes(childComplexity, args["id"].([]gqlmodel.ID), args["type"].(gqlmodel.NodeType)), true case "Query.plugin": if e.complexity.Query.Plugin == nil { @@ -5100,7 +5158,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.Plugin(childComplexity, args["id"].(id.PluginID)), true + return e.complexity.Query.Plugin(childComplexity, args["id"].(gqlmodel.ID)), true case "Query.plugins": if e.complexity.Query.Plugins == nil { @@ -5112,7 +5170,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.Plugins(childComplexity, args["id"].([]*id.PluginID)), true + return e.complexity.Query.Plugins(childComplexity, args["id"].([]gqlmodel.ID)), true case "Query.projects": if e.complexity.Query.Projects == nil { @@ -5124,7 +5182,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.Projects(childComplexity, args["teamId"].(id.ID), args["includeArchived"].(*bool), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true + return e.complexity.Query.Projects(childComplexity, args["teamId"].(gqlmodel.ID), args["includeArchived"].(*bool), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)), true case "Query.propertySchema": if e.complexity.Query.PropertySchema == nil { @@ -5136,7 +5194,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.PropertySchema(childComplexity, args["id"].(id.PropertySchemaID)), true + return e.complexity.Query.PropertySchema(childComplexity, args["id"].(gqlmodel.ID)), true case "Query.propertySchemas": if e.complexity.Query.PropertySchemas == nil { @@ -5148,7 +5206,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.PropertySchemas(childComplexity, args["id"].([]*id.PropertySchemaID)), true + return e.complexity.Query.PropertySchemas(childComplexity, args["id"].([]gqlmodel.ID)), true case "Query.scene": if e.complexity.Query.Scene == nil { @@ -5160,19 +5218,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return 0, false } - return e.complexity.Query.Scene(childComplexity, args["projectId"].(id.ID)), true - - case "Query.sceneLock": - if e.complexity.Query.SceneLock == nil { - break - } - - args, err := ec.field_Query_sceneLock_args(context.TODO(), rawArgs) - if err != nil { - return 0, false - } - - return e.complexity.Query.SceneLock(childComplexity, args["sceneId"].(id.ID)), true + return e.complexity.Query.Scene(childComplexity, args["projectId"].(gqlmodel.ID)), true case "Query.searchUser": if e.complexity.Query.SearchUser == nil { @@ -5352,13 +5398,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Scene.ID(childComplexity), true - case "Scene.lockMode": - if e.complexity.Scene.LockMode == nil { - break - } - - return e.complexity.Scene.LockMode(childComplexity), true - case "Scene.plugins": if e.complexity.Scene.Plugins == nil { break @@ -5548,27 +5587,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.SceneWidget.PropertyID(childComplexity), true - case "SearchedUser.userEmail": - if e.complexity.SearchedUser.UserEmail == nil { - break - } - - return e.complexity.SearchedUser.UserEmail(childComplexity), true - - case "SearchedUser.userId": - if e.complexity.SearchedUser.UserID == nil { - break - } - - return e.complexity.SearchedUser.UserID(childComplexity), true - - case "SearchedUser.userName": - if e.complexity.SearchedUser.UserName == nil { - break - } - - return e.complexity.SearchedUser.UserName(childComplexity), true - case "SignupPayload.team": if e.complexity.SignupPayload.Team == nil { break @@ -5915,12 +5933,12 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.UpdateLayerPayload.Layer(childComplexity), true - case "UpdateMePayload.user": - if e.complexity.UpdateMePayload.User == nil { + case "UpdateMePayload.me": + if e.complexity.UpdateMePayload.Me == nil { break } - return e.complexity.UpdateMePayload.User(childComplexity), true + return e.complexity.UpdateMePayload.Me(childComplexity), true case "UpdateMemberOfTeamPayload.team": if e.complexity.UpdateMemberOfTeamPayload.Team == nil { @@ -5999,13 +6017,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.UploadPluginPayload.ScenePlugin(childComplexity), true - case "User.auths": - if e.complexity.User.Auths == nil { - break - } - - return e.complexity.User.Auths(childComplexity), true - case "User.email": if e.complexity.User.Email == nil { break @@ -6020,27 +6031,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.User.ID(childComplexity), true - case "User.lang": - if e.complexity.User.Lang == nil { - break - } - - return e.complexity.User.Lang(childComplexity), true - - case "User.myTeam": - if e.complexity.User.MyTeam == nil { - break - } - - return e.complexity.User.MyTeam(childComplexity), true - - case "User.myTeamId": - if e.complexity.User.MyTeamID == nil { - break - } - - return e.complexity.User.MyTeamID(childComplexity), true - case "User.name": if e.complexity.User.Name == nil { break @@ -6048,20 +6038,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.User.Name(childComplexity), true - case "User.teams": - if e.complexity.User.Teams == nil { - break - } - - return e.complexity.User.Teams(childComplexity), true - - case "User.theme": - if e.complexity.User.Theme == nil { - break - } - - return e.complexity.User.Theme(childComplexity), true - case "WidgetAlignSystem.inner": if e.complexity.WidgetAlignSystem.Inner == nil { break @@ -6310,12 +6286,6 @@ scalar DateTime scalar URL scalar Lang scalar FileSize -scalar PluginID -scalar PluginExtensionID -scalar PropertySchemaID -scalar PropertySchemaGroupID -scalar PropertySchemaFieldID -scalar DatasetSchemaFieldID scalar TranslatedString type LatLng { @@ -6424,20 +6394,20 @@ type User implements Node { id: ID! name: String! email: String! +} + +type Me { + id: ID! + name: String! + email: String! lang: Lang! theme: Theme! - myTeamId: ID! auths: [String!]! + myTeamId: ID! teams: [Team!]! @goField(forceResolver: true) myTeam: Team! @goField(forceResolver: true) } -type SearchedUser { - userId: ID! - userName: String! - userEmail: String! -} - type ProjectAliasAvailability { alias: String! available: Boolean! @@ -6517,21 +6487,21 @@ enum PublishmentStatus { # Plugin type Plugin { - id: PluginID! + id: ID! sceneId: ID name: String! version: String! description: String! author: String! repositoryUrl: String! - propertySchemaId: PropertySchemaID + propertySchemaId: ID extensions: [PluginExtension!]! scenePlugin(sceneId: ID): ScenePlugin allTranslatedDescription: TranslatedString allTranslatedName: TranslatedString scene: Scene @goField(forceResolver: true) - translatedName(lang: String): String! @goField(forceResolver: true) - translatedDescription(lang: String): String! @goField(forceResolver: true) + translatedName(lang: Lang): String! @goField(forceResolver: true) + translatedDescription(lang: Lang): String! @goField(forceResolver: true) propertySchema: PropertySchema @goField(forceResolver: true) } @@ -6593,8 +6563,8 @@ enum PluginExtensionType { } type PluginExtension { - extensionId: PluginExtensionID! - pluginId: PluginID! + extensionId: ID! + pluginId: ID! type: PluginExtensionType! name: String! description: String! @@ -6602,14 +6572,14 @@ type PluginExtension { singleOnly: Boolean widgetLayout: WidgetLayout visualizer: Visualizer - propertySchemaId: PropertySchemaID! + propertySchemaId: ID! allTranslatedName: TranslatedString allTranslatedDescription: TranslatedString plugin: Plugin @goField(forceResolver: true) sceneWidget(sceneId: ID!): SceneWidget @goField(forceResolver: true) propertySchema: PropertySchema @goField(forceResolver: true) - translatedName(lang: String): String! @goField(forceResolver: true) - translatedDescription(lang: String): String! @goField(forceResolver: true) + translatedName(lang: Lang): String! @goField(forceResolver: true) + translatedDescription(lang: Lang): String! @goField(forceResolver: true) } # Scene @@ -6630,7 +6600,6 @@ type Scene implements Node { team: Team @goField(forceResolver: true) property: Property @goField(forceResolver: true) rootLayer: LayerGroup @goField(forceResolver: true) - lockMode: SceneLockMode! @goField(forceResolver: true) datasetSchemas( first: Int last: Int @@ -6642,18 +6611,10 @@ type Scene implements Node { clusters: [Cluster!]! } -enum SceneLockMode { - FREE - PENDING - DATASET_SYNCING - PLUGIN_UPGRADING - PUBLISHING -} - type SceneWidget { id: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + pluginId: ID! + extensionId: ID! propertyId: ID! enabled: Boolean! extended: Boolean! @@ -6663,7 +6624,7 @@ type SceneWidget { } type ScenePlugin { - pluginId: PluginID! + pluginId: ID! propertyId: ID plugin: Plugin @goField(forceResolver: true) property: Property @goField(forceResolver: true) @@ -6694,36 +6655,36 @@ type WidgetArea { # Property type PropertySchema { - id: PropertySchemaID! + id: ID! groups: [PropertySchemaGroup!]! linkableFields: PropertyLinkableFields! } type PropertyLinkableFields { - schemaId: PropertySchemaID! - latlng: PropertySchemaFieldID - url: PropertySchemaFieldID + schemaId: ID! + latlng: ID + url: ID latlngField: PropertySchemaField @goField(forceResolver: true) urlField: PropertySchemaField @goField(forceResolver: true) schema: PropertySchema @goField(forceResolver: true) } type PropertySchemaGroup { - schemaGroupId: PropertySchemaGroupID! - schemaId: PropertySchemaID! + schemaGroupId: ID! + schemaId: ID! fields: [PropertySchemaField!]! isList: Boolean! isAvailableIf: PropertyCondition title: String allTranslatedTitle: TranslatedString - representativeFieldId: PropertySchemaFieldID + representativeFieldId: ID representativeField: PropertySchemaField schema: PropertySchema @goField(forceResolver: true) - translatedTitle(lang: String): String! @goField(forceResolver: true) + translatedTitle(lang: Lang): String! @goField(forceResolver: true) } type PropertySchemaField { - fieldId: PropertySchemaFieldID! + fieldId: ID! type: ValueType! title: String! description: String! @@ -6737,8 +6698,8 @@ type PropertySchemaField { isAvailableIf: PropertyCondition allTranslatedTitle: TranslatedString allTranslatedDescription: TranslatedString - translatedTitle(lang: String): String! @goField(forceResolver: true) - translatedDescription(lang: String): String! @goField(forceResolver: true) + translatedTitle(lang: Lang): String! @goField(forceResolver: true) + translatedDescription(lang: Lang): String! @goField(forceResolver: true) } enum PropertySchemaFieldUI { @@ -6758,18 +6719,18 @@ type PropertySchemaFieldChoice { title: String! icon: String allTranslatedTitle: TranslatedString - translatedTitle(lang: String): String! @goField(forceResolver: true) + translatedTitle(lang: Lang): String! @goField(forceResolver: true) } type PropertyCondition { - fieldId: PropertySchemaFieldID! + fieldId: ID! type: ValueType! value: Any } type Property implements Node { id: ID! - schemaId: PropertySchemaID! + schemaId: ID! items: [PropertyItem!]! schema: PropertySchema @goField(forceResolver: true) layer: Layer @goField(forceResolver: true) @@ -6780,8 +6741,8 @@ union PropertyItem = PropertyGroup | PropertyGroupList type PropertyGroup { id: ID! - schemaId: PropertySchemaID! - schemaGroupId: PropertySchemaGroupID! + schemaId: ID! + schemaGroupId: ID! fields: [PropertyField!]! schema: PropertySchema @goField(forceResolver: true) schemaGroup: PropertySchemaGroup @goField(forceResolver: true) @@ -6789,8 +6750,8 @@ type PropertyGroup { type PropertyGroupList { id: ID! - schemaId: PropertySchemaID! - schemaGroupId: PropertySchemaGroupID! + schemaId: ID! + schemaGroupId: ID! groups: [PropertyGroup!]! schema: PropertySchema @goField(forceResolver: true) schemaGroup: PropertySchemaGroup @goField(forceResolver: true) @@ -6799,8 +6760,8 @@ type PropertyGroupList { type PropertyField { id: String! parentId: ID! - schemaId: PropertySchemaID! - fieldId: PropertySchemaFieldID! + schemaId: ID! + fieldId: ID! links: [PropertyFieldLink!] type: ValueType! value: Any @@ -6824,7 +6785,7 @@ type MergedProperty { originalId: ID parentId: ID # note: schemaId will not always be set - schemaId: PropertySchemaID + schemaId: ID linkedDatasetId: ID original: Property @goField(forceResolver: true) parent: Property @goField(forceResolver: true) @@ -6838,9 +6799,9 @@ type MergedPropertyGroup { parentPropertyId: ID originalId: ID parentId: ID - schemaGroupId: PropertySchemaGroupID! + schemaGroupId: ID! # note: schemaId will not always be set - schemaId: PropertySchemaID + schemaId: ID linkedDatasetId: ID fields: [MergedPropertyField!]! groups: [MergedPropertyGroup!]! @@ -6853,8 +6814,8 @@ type MergedPropertyGroup { } type MergedPropertyField { - schemaId: PropertySchemaID! - fieldId: PropertySchemaFieldID! + schemaId: ID! + fieldId: ID! value: Any type: ValueType! links: [PropertyFieldLink!] @@ -6923,8 +6884,8 @@ interface Layer { name: String! isVisible: Boolean! propertyId: ID - pluginId: PluginID - extensionId: PluginExtensionID + pluginId: ID + extensionId: ID infobox: Infobox # parentId will not be always set parentId: ID @@ -6950,8 +6911,8 @@ type LayerItem implements Layer { name: String! isVisible: Boolean! propertyId: ID - pluginId: PluginID - extensionId: PluginExtensionID + pluginId: ID + extensionId: ID infobox: Infobox # parentId will not be always set parentId: ID @@ -6973,8 +6934,8 @@ type LayerGroup implements Layer { name: String! isVisible: Boolean! propertyId: ID - pluginId: PluginID - extensionId: PluginExtensionID + pluginId: ID + extensionId: ID infobox: Infobox # parentId will not be always set parentId: ID @@ -7010,8 +6971,8 @@ type InfoboxField { sceneId: ID! layerId: ID! propertyId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + pluginId: ID! + extensionId: ID! linkedDatasetId: ID layer: Layer! @goField(forceResolver: true) infobox: Infobox! @goField(forceResolver: true) @@ -7061,8 +7022,8 @@ type MergedInfobox { type MergedInfoboxField { originalId: ID! sceneID: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + pluginId: ID! + extensionId: ID! property: MergedProperty plugin: Plugin @goField(forceResolver: true) extension: PluginExtension @goField(forceResolver: true) @@ -7231,8 +7192,8 @@ input WidgetLocationInput { input AddWidgetInput { sceneId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + pluginId: ID! + extensionId: ID! } input UpdateWidgetInput { @@ -7257,18 +7218,18 @@ input RemoveWidgetInput { input InstallPluginInput { sceneId: ID! - pluginId: PluginID! + pluginId: ID! } input UninstallPluginInput { sceneId: ID! - pluginId: PluginID! + pluginId: ID! } input UpgradePluginInput { sceneId: ID! - pluginId: PluginID! - toPluginId: PluginID! + pluginId: ID! + toPluginId: ID! } input SyncDatasetInput { @@ -7278,33 +7239,33 @@ input SyncDatasetInput { input UpdatePropertyValueInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID + schemaGroupId: ID itemId: ID - fieldId: PropertySchemaFieldID! + fieldId: ID! value: Any type: ValueType! } input RemovePropertyFieldInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID + schemaGroupId: ID itemId: ID - fieldId: PropertySchemaFieldID! + fieldId: ID! } input UploadFileToPropertyInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID + schemaGroupId: ID itemId: ID - fieldId: PropertySchemaFieldID! + fieldId: ID! file: Upload! } input LinkDatasetToPropertyValueInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID + schemaGroupId: ID itemId: ID - fieldId: PropertySchemaFieldID! + fieldId: ID! datasetSchemaIds: [ID!]! datasetSchemaFieldIds: [ID!]! datasetIds: [ID!] @@ -7312,14 +7273,14 @@ input LinkDatasetToPropertyValueInput { input UnlinkPropertyValueInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID + schemaGroupId: ID itemId: ID - fieldId: PropertySchemaFieldID! + fieldId: ID! } input AddPropertyItemInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID! + schemaGroupId: ID! index: Int nameFieldValue: Any nameFieldType: ValueType @@ -7327,20 +7288,20 @@ input AddPropertyItemInput { input MovePropertyItemInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID! + schemaGroupId: ID! itemId: ID! index: Int! } input RemovePropertyItemInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID! + schemaGroupId: ID! itemId: ID! } input UpdatePropertyItemInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID! + schemaGroupId: ID! operations: [UpdatePropertyItemOperationInput!]! } @@ -7354,8 +7315,8 @@ input UpdatePropertyItemOperationInput { input AddLayerItemInput { parentLayerId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + pluginId: ID! + extensionId: ID! index: Int name: String lat: Float @@ -7364,12 +7325,12 @@ input AddLayerItemInput { input AddLayerGroupInput { parentLayerId: ID! - pluginId: PluginID - extensionId: PluginExtensionID + pluginId: ID + extensionId: ID index: Int linkedDatasetSchemaID: ID name: String - representativeFieldId: DatasetSchemaFieldID + representativeFieldId: ID } input RemoveLayerInput { @@ -7398,8 +7359,8 @@ input RemoveInfoboxInput { input AddInfoboxFieldInput { layerId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + pluginId: ID! + extensionId: ID! index: Int } @@ -7536,7 +7497,7 @@ type RemoveAssetPayload { } type UpdateMePayload { - user: User! + me: Me! } type SignupPayload { @@ -7615,7 +7576,7 @@ type InstallPluginPayload { } type UninstallPluginPayload { - pluginId: PluginID! + pluginId: ID! scene: Scene! } @@ -7825,13 +7786,13 @@ type DatasetEdge { # Query type Query { - me: User + me: Me node(id: ID!, type: NodeType!): Node nodes(id: [ID!]!, type: NodeType!): [Node]! - propertySchema(id: PropertySchemaID!): PropertySchema - propertySchemas(id: [PropertySchemaID!]!): [PropertySchema!]! - plugin(id: PluginID!): Plugin - plugins(id: [PluginID!]!): [Plugin!]! + propertySchema(id: ID!): PropertySchema + propertySchemas(id: [ID!]!): [PropertySchema!]! + plugin(id: ID!): Plugin + plugins(id: [ID!]!): [Plugin!]! layer(id: ID!): Layer scene(projectId: ID!): Scene assets( @@ -7862,9 +7823,8 @@ type Query { after: Cursor before: Cursor ): DatasetConnection! - sceneLock(sceneId: ID!): SceneLockMode dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! - searchUser(nameOrEmail: String!): SearchedUser + searchUser(nameOrEmail: String!): User checkProjectAlias(alias: String!): ProjectAliasAvailability! installablePlugins: [PluginMetadata!]! } @@ -8990,10 +8950,10 @@ func (ec *executionContext) field_Mutation_uploadPlugin_args(ctx context.Context func (ec *executionContext) field_PluginExtension_sceneWidget_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 id.ID + var arg0 gqlmodel.ID if tmp, ok := rawArgs["sceneId"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) if err != nil { return nil, err } @@ -9005,10 +8965,10 @@ func (ec *executionContext) field_PluginExtension_sceneWidget_args(ctx context.C func (ec *executionContext) field_PluginExtension_translatedDescription_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 *string + var arg0 *language.Tag if tmp, ok := rawArgs["lang"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) - arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) if err != nil { return nil, err } @@ -9020,10 +8980,10 @@ func (ec *executionContext) field_PluginExtension_translatedDescription_args(ctx func (ec *executionContext) field_PluginExtension_translatedName_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 *string + var arg0 *language.Tag if tmp, ok := rawArgs["lang"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) - arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) if err != nil { return nil, err } @@ -9035,10 +8995,10 @@ func (ec *executionContext) field_PluginExtension_translatedName_args(ctx contex func (ec *executionContext) field_Plugin_scenePlugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 *id.ID + var arg0 *gqlmodel.ID if tmp, ok := rawArgs["sceneId"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - arg0, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + arg0, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) if err != nil { return nil, err } @@ -9050,10 +9010,10 @@ func (ec *executionContext) field_Plugin_scenePlugin_args(ctx context.Context, r func (ec *executionContext) field_Plugin_translatedDescription_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 *string + var arg0 *language.Tag if tmp, ok := rawArgs["lang"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) - arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) if err != nil { return nil, err } @@ -9065,10 +9025,10 @@ func (ec *executionContext) field_Plugin_translatedDescription_args(ctx context. func (ec *executionContext) field_Plugin_translatedName_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 *string + var arg0 *language.Tag if tmp, ok := rawArgs["lang"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) - arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) if err != nil { return nil, err } @@ -9080,10 +9040,10 @@ func (ec *executionContext) field_Plugin_translatedName_args(ctx context.Context func (ec *executionContext) field_PropertySchemaFieldChoice_translatedTitle_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 *string + var arg0 *language.Tag if tmp, ok := rawArgs["lang"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) - arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) if err != nil { return nil, err } @@ -9095,10 +9055,10 @@ func (ec *executionContext) field_PropertySchemaFieldChoice_translatedTitle_args func (ec *executionContext) field_PropertySchemaField_translatedDescription_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 *string + var arg0 *language.Tag if tmp, ok := rawArgs["lang"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) - arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) if err != nil { return nil, err } @@ -9110,10 +9070,10 @@ func (ec *executionContext) field_PropertySchemaField_translatedDescription_args func (ec *executionContext) field_PropertySchemaField_translatedTitle_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 *string + var arg0 *language.Tag if tmp, ok := rawArgs["lang"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) - arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) if err != nil { return nil, err } @@ -9125,10 +9085,10 @@ func (ec *executionContext) field_PropertySchemaField_translatedTitle_args(ctx c func (ec *executionContext) field_PropertySchemaGroup_translatedTitle_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 *string + var arg0 *language.Tag if tmp, ok := rawArgs["lang"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("lang")) - arg0, err = ec.unmarshalOString2แš–string(ctx, tmp) + arg0, err = ec.unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, tmp) if err != nil { return nil, err } @@ -9155,10 +9115,10 @@ func (ec *executionContext) field_Query___type_args(ctx context.Context, rawArgs func (ec *executionContext) field_Query_assets_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 id.ID + var arg0 gqlmodel.ID if tmp, ok := rawArgs["teamId"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) - arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) if err != nil { return nil, err } @@ -9212,10 +9172,10 @@ func (ec *executionContext) field_Query_checkProjectAlias_args(ctx context.Conte func (ec *executionContext) field_Query_datasetSchemas_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 id.ID + var arg0 gqlmodel.ID if tmp, ok := rawArgs["sceneId"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) if err != nil { return nil, err } @@ -9263,10 +9223,10 @@ func (ec *executionContext) field_Query_datasetSchemas_args(ctx context.Context, func (ec *executionContext) field_Query_datasets_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 id.ID + var arg0 gqlmodel.ID if tmp, ok := rawArgs["datasetSchemaId"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaId")) - arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) if err != nil { return nil, err } @@ -9314,10 +9274,10 @@ func (ec *executionContext) field_Query_datasets_args(ctx context.Context, rawAr func (ec *executionContext) field_Query_dynamicDatasetSchemas_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 id.ID + var arg0 gqlmodel.ID if tmp, ok := rawArgs["sceneId"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) if err != nil { return nil, err } @@ -9329,10 +9289,10 @@ func (ec *executionContext) field_Query_dynamicDatasetSchemas_args(ctx context.C func (ec *executionContext) field_Query_layer_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 id.ID + var arg0 gqlmodel.ID if tmp, ok := rawArgs["id"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) - arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) if err != nil { return nil, err } @@ -9344,10 +9304,10 @@ func (ec *executionContext) field_Query_layer_args(ctx context.Context, rawArgs func (ec *executionContext) field_Query_node_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 id.ID + var arg0 gqlmodel.ID if tmp, ok := rawArgs["id"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) - arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) if err != nil { return nil, err } @@ -9368,10 +9328,10 @@ func (ec *executionContext) field_Query_node_args(ctx context.Context, rawArgs m func (ec *executionContext) field_Query_nodes_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 []*id.ID + var arg0 []gqlmodel.ID if tmp, ok := rawArgs["id"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) - arg0, err = ec.unmarshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, tmp) + arg0, err = ec.unmarshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, tmp) if err != nil { return nil, err } @@ -9392,10 +9352,10 @@ func (ec *executionContext) field_Query_nodes_args(ctx context.Context, rawArgs func (ec *executionContext) field_Query_plugin_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 id.PluginID + var arg0 gqlmodel.ID if tmp, ok := rawArgs["id"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) - arg0, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, tmp) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) if err != nil { return nil, err } @@ -9407,10 +9367,10 @@ func (ec *executionContext) field_Query_plugin_args(ctx context.Context, rawArgs func (ec *executionContext) field_Query_plugins_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 []*id.PluginID + var arg0 []gqlmodel.ID if tmp, ok := rawArgs["id"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) - arg0, err = ec.unmarshalNPluginID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginIDแš„(ctx, tmp) + arg0, err = ec.unmarshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, tmp) if err != nil { return nil, err } @@ -9422,10 +9382,10 @@ func (ec *executionContext) field_Query_plugins_args(ctx context.Context, rawArg func (ec *executionContext) field_Query_projects_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 id.ID + var arg0 gqlmodel.ID if tmp, ok := rawArgs["teamId"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) - arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) if err != nil { return nil, err } @@ -9482,10 +9442,10 @@ func (ec *executionContext) field_Query_projects_args(ctx context.Context, rawAr func (ec *executionContext) field_Query_propertySchema_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 id.PropertySchemaID + var arg0 gqlmodel.ID if tmp, ok := rawArgs["id"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) - arg0, err = ec.unmarshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, tmp) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) if err != nil { return nil, err } @@ -9497,10 +9457,10 @@ func (ec *executionContext) field_Query_propertySchema_args(ctx context.Context, func (ec *executionContext) field_Query_propertySchemas_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 []*id.PropertySchemaID + var arg0 []gqlmodel.ID if tmp, ok := rawArgs["id"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("id")) - arg0, err = ec.unmarshalNPropertySchemaID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaIDแš„(ctx, tmp) + arg0, err = ec.unmarshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, tmp) if err != nil { return nil, err } @@ -9509,28 +9469,13 @@ func (ec *executionContext) field_Query_propertySchemas_args(ctx context.Context return args, nil } -func (ec *executionContext) field_Query_sceneLock_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { - var err error - args := map[string]interface{}{} - var arg0 id.ID - if tmp, ok := rawArgs["sceneId"]; ok { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) - if err != nil { - return nil, err - } - } - args["sceneId"] = arg0 - return args, nil -} - func (ec *executionContext) field_Query_scene_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { var err error args := map[string]interface{}{} - var arg0 id.ID + var arg0 gqlmodel.ID if tmp, ok := rawArgs["projectId"]; ok { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) - arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, tmp) + arg0, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, tmp) if err != nil { return nil, err } @@ -10334,9 +10279,9 @@ func (ec *executionContext) _Asset_id(ctx context.Context, field graphql.Collect } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Asset_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { @@ -10404,9 +10349,9 @@ func (ec *executionContext) _Asset_teamId(ctx context.Context, field graphql.Col } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Asset_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { @@ -11133,9 +11078,9 @@ func (ec *executionContext) _Cluster_id(ctx context.Context, field graphql.Colle } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Cluster_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { @@ -11203,9 +11148,9 @@ func (ec *executionContext) _Cluster_propertyId(ctx context.Context, field graph } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Cluster_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { @@ -11512,9 +11457,9 @@ func (ec *executionContext) _Dataset_id(ctx context.Context, field graphql.Colle } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Dataset_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { @@ -11582,9 +11527,9 @@ func (ec *executionContext) _Dataset_schemaId(ctx context.Context, field graphql } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Dataset_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { @@ -11923,9 +11868,9 @@ func (ec *executionContext) _DatasetField_fieldId(ctx context.Context, field gra } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _DatasetField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { @@ -11958,9 +11903,9 @@ func (ec *executionContext) _DatasetField_schemaId(ctx context.Context, field gr } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _DatasetField_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { @@ -12191,9 +12136,9 @@ func (ec *executionContext) _DatasetSchema_id(ctx context.Context, field graphql } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _DatasetSchema_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { @@ -12296,9 +12241,9 @@ func (ec *executionContext) _DatasetSchema_sceneId(ctx context.Context, field gr } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _DatasetSchema_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { @@ -12363,9 +12308,9 @@ func (ec *executionContext) _DatasetSchema_representativeFieldId(ctx context.Con if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _DatasetSchema_dynamic(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { @@ -12743,9 +12688,9 @@ func (ec *executionContext) _DatasetSchemaField_id(ctx context.Context, field gr } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _DatasetSchemaField_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { @@ -12883,9 +12828,9 @@ func (ec *executionContext) _DatasetSchemaField_schemaId(ctx context.Context, fi } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _DatasetSchemaField_refId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { @@ -12915,9 +12860,9 @@ func (ec *executionContext) _DatasetSchemaField_refId(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _DatasetSchemaField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { @@ -13014,9 +12959,9 @@ func (ec *executionContext) _DeleteMePayload_userId(ctx context.Context, field g } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _DeleteProjectPayload_projectId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DeleteProjectPayload) (ret graphql.Marshaler) { @@ -13049,9 +12994,9 @@ func (ec *executionContext) _DeleteProjectPayload_projectId(ctx context.Context, } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _DeleteTeamPayload_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DeleteTeamPayload) (ret graphql.Marshaler) { @@ -13084,9 +13029,9 @@ func (ec *executionContext) _DeleteTeamPayload_teamId(ctx context.Context, field } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _DetachTagFromLayerPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DetachTagFromLayerPayload) (ret graphql.Marshaler) { @@ -13294,9 +13239,9 @@ func (ec *executionContext) _Infobox_sceneId(ctx context.Context, field graphql. } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Infobox_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { @@ -13329,9 +13274,9 @@ func (ec *executionContext) _Infobox_layerId(ctx context.Context, field graphql. } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Infobox_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { @@ -13364,9 +13309,9 @@ func (ec *executionContext) _Infobox_propertyId(ctx context.Context, field graph } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Infobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { @@ -13431,9 +13376,9 @@ func (ec *executionContext) _Infobox_linkedDatasetId(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Infobox_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { @@ -13629,9 +13574,9 @@ func (ec *executionContext) _InfoboxField_id(ctx context.Context, field graphql. } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _InfoboxField_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { @@ -13664,9 +13609,9 @@ func (ec *executionContext) _InfoboxField_sceneId(ctx context.Context, field gra } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _InfoboxField_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { @@ -13699,9 +13644,9 @@ func (ec *executionContext) _InfoboxField_layerId(ctx context.Context, field gra } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _InfoboxField_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { @@ -13734,9 +13679,9 @@ func (ec *executionContext) _InfoboxField_propertyId(ctx context.Context, field } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _InfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { @@ -13769,9 +13714,9 @@ func (ec *executionContext) _InfoboxField_pluginId(ctx context.Context, field gr } return graphql.Null } - res := resTmp.(id.PluginID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _InfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { @@ -13804,9 +13749,9 @@ func (ec *executionContext) _InfoboxField_extensionId(ctx context.Context, field } return graphql.Null } - res := resTmp.(id.PluginExtensionID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _InfoboxField_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { @@ -13836,9 +13781,9 @@ func (ec *executionContext) _InfoboxField_linkedDatasetId(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _InfoboxField_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { @@ -14410,9 +14355,9 @@ func (ec *executionContext) _LayerGroup_id(ctx context.Context, field graphql.Co } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerGroup_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { @@ -14445,9 +14390,9 @@ func (ec *executionContext) _LayerGroup_sceneId(ctx context.Context, field graph } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { @@ -14547,9 +14492,9 @@ func (ec *executionContext) _LayerGroup_propertyId(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerGroup_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { @@ -14579,9 +14524,9 @@ func (ec *executionContext) _LayerGroup_pluginId(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*id.PluginID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerGroup_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { @@ -14611,9 +14556,9 @@ func (ec *executionContext) _LayerGroup_extensionId(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*id.PluginExtensionID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOPluginExtensionID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerGroup_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { @@ -14675,9 +14620,9 @@ func (ec *executionContext) _LayerGroup_parentId(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerGroup_linkedDatasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { @@ -14707,9 +14652,9 @@ func (ec *executionContext) _LayerGroup_linkedDatasetSchemaId(ctx context.Contex if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerGroup_root(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { @@ -14777,9 +14722,9 @@ func (ec *executionContext) _LayerGroup_layerIds(ctx context.Context, field grap } return graphql.Null } - res := resTmp.([]*id.ID) + res := resTmp.([]gqlmodel.ID) fc.Result = res - return ec.marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) + return ec.marshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, field.Selections, res) } func (ec *executionContext) _LayerGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { @@ -15106,9 +15051,9 @@ func (ec *executionContext) _LayerItem_id(ctx context.Context, field graphql.Col } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerItem_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { @@ -15141,9 +15086,9 @@ func (ec *executionContext) _LayerItem_sceneId(ctx context.Context, field graphq } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerItem_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { @@ -15243,9 +15188,9 @@ func (ec *executionContext) _LayerItem_propertyId(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerItem_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { @@ -15275,9 +15220,9 @@ func (ec *executionContext) _LayerItem_pluginId(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*id.PluginID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerItem_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { @@ -15307,9 +15252,9 @@ func (ec *executionContext) _LayerItem_extensionId(ctx context.Context, field gr if resTmp == nil { return graphql.Null } - res := resTmp.(*id.PluginExtensionID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOPluginExtensionID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerItem_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { @@ -15371,9 +15316,9 @@ func (ec *executionContext) _LayerItem_parentId(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerItem_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { @@ -15403,9 +15348,9 @@ func (ec *executionContext) _LayerItem_linkedDatasetId(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerItem_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { @@ -15729,9 +15674,9 @@ func (ec *executionContext) _LayerTagGroup_tagId(ctx context.Context, field grap } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerTagGroup_children(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagGroup) (ret graphql.Marshaler) { @@ -15831,9 +15776,9 @@ func (ec *executionContext) _LayerTagItem_tagId(ctx context.Context, field graph } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _LayerTagItem_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagItem) (ret graphql.Marshaler) { @@ -15868,7 +15813,7 @@ func (ec *executionContext) _LayerTagItem_tag(ctx context.Context, field graphql return ec.marshalOTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Me_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15876,7 +15821,7 @@ func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field gr } }() fc := &graphql.FieldContext{ - Object: "MergedInfobox", + Object: "Me", Field: field, Args: nil, IsMethod: false, @@ -15886,7 +15831,7 @@ func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field gr ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.SceneID, nil + return obj.ID, nil }) if err != nil { ec.Error(ctx, err) @@ -15898,12 +15843,12 @@ func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field gr } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Me_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15911,7 +15856,7 @@ func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field g } }() fc := &graphql.FieldContext{ - Object: "MergedInfobox", + Object: "Me", Field: field, Args: nil, IsMethod: false, @@ -15921,21 +15866,24 @@ func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field g ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Property, nil + return obj.Name, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } return graphql.Null } - res := resTmp.(*gqlmodel.MergedProperty) + res := resTmp.(string) fc.Result = res - return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) + return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Me_email(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15943,7 +15891,7 @@ func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field gra } }() fc := &graphql.FieldContext{ - Object: "MergedInfobox", + Object: "Me", Field: field, Args: nil, IsMethod: false, @@ -15953,7 +15901,7 @@ func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field gra ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Fields, nil + return obj.Email, nil }) if err != nil { ec.Error(ctx, err) @@ -15965,12 +15913,12 @@ func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field gra } return graphql.Null } - res := resTmp.([]*gqlmodel.MergedInfoboxField) + res := resTmp.(string) fc.Result = res - return ec.marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxFieldแš„(ctx, field.Selections, res) + return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { +func (ec *executionContext) _Me_lang(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -15978,31 +15926,34 @@ func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field grap } }() fc := &graphql.FieldContext{ - Object: "MergedInfobox", + Object: "Me", Field: field, Args: nil, - IsMethod: true, - IsResolver: true, + IsMethod: false, + IsResolver: false, } ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.MergedInfobox().Scene(rctx, obj) + return obj.Lang, nil }) if err != nil { ec.Error(ctx, err) return graphql.Null } if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } return graphql.Null } - res := resTmp.(*gqlmodel.Scene) + res := resTmp.(language.Tag) fc.Result = res - return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) + return ec.marshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _Me_theme(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16010,7 +15961,7 @@ func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, } }() fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", + Object: "Me", Field: field, Args: nil, IsMethod: false, @@ -16020,7 +15971,7 @@ func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.OriginalID, nil + return obj.Theme, nil }) if err != nil { ec.Error(ctx, err) @@ -16032,12 +15983,12 @@ func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.Theme) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _Me_auths(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16045,7 +15996,7 @@ func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, fie } }() fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", + Object: "Me", Field: field, Args: nil, IsMethod: false, @@ -16055,7 +16006,7 @@ func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, fie ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.SceneID, nil + return obj.Auths, nil }) if err != nil { ec.Error(ctx, err) @@ -16067,12 +16018,12 @@ func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, fie } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.([]string) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNString2แš•stringแš„(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _Me_myTeamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16080,7 +16031,7 @@ func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, fi } }() fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", + Object: "Me", Field: field, Args: nil, IsMethod: false, @@ -16090,7 +16041,7 @@ func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, fi ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.PluginID, nil + return obj.MyTeamID, nil }) if err != nil { ec.Error(ctx, err) @@ -16102,12 +16053,12 @@ func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, fi } return graphql.Null } - res := resTmp.(id.PluginID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _Me_teams(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16115,7 +16066,77 @@ func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, } }() fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", + Object: "Me", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Me().Teams(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.Team) + fc.Result = res + return ec.marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _Me_myTeam(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "Me", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Me().MyTeam(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*gqlmodel.Team) + fc.Result = res + return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfobox", Field: field, Args: nil, IsMethod: false, @@ -16125,7 +16146,7 @@ func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.ExtensionID, nil + return obj.SceneID, nil }) if err != nil { ec.Error(ctx, err) @@ -16137,12 +16158,12 @@ func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, } return graphql.Null } - res := resTmp.(id.PluginExtensionID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { +func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16150,7 +16171,246 @@ func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, fi } }() fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", + Object: "MergedInfobox", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Property, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.MergedProperty) + fc.Result = res + return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfobox", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]*gqlmodel.MergedInfoboxField) + fc.Result = res + return ec.marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxFieldแš„(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfobox", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: true, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedInfobox().Scene(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.Scene) + fc.Result = res + return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OriginalID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SceneID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PluginID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + Args: nil, + IsMethod: false, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.ExtensionID, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "MergedInfoboxField", Field: field, Args: nil, IsMethod: false, @@ -16332,9 +16592,9 @@ func (ec *executionContext) _MergedLayer_originalId(ctx context.Context, field g } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedLayer_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { @@ -16364,9 +16624,9 @@ func (ec *executionContext) _MergedLayer_parentId(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedLayer_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { @@ -16399,9 +16659,9 @@ func (ec *executionContext) _MergedLayer_sceneID(ctx context.Context, field grap } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedLayer_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { @@ -16591,9 +16851,9 @@ func (ec *executionContext) _MergedProperty_originalId(ctx context.Context, fiel if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedProperty_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { @@ -16623,9 +16883,9 @@ func (ec *executionContext) _MergedProperty_parentId(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedProperty_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { @@ -16655,9 +16915,9 @@ func (ec *executionContext) _MergedProperty_schemaId(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*id.PropertySchemaID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedProperty_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { @@ -16687,9 +16947,9 @@ func (ec *executionContext) _MergedProperty_linkedDatasetId(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedProperty_original(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { @@ -16885,9 +17145,9 @@ func (ec *executionContext) _MergedPropertyField_schemaId(ctx context.Context, f } return graphql.Null } - res := resTmp.(id.PropertySchemaID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedPropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { @@ -16920,9 +17180,9 @@ func (ec *executionContext) _MergedPropertyField_fieldId(ctx context.Context, fi } return graphql.Null } - res := resTmp.(id.PropertySchemaFieldID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedPropertyField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { @@ -17182,9 +17442,9 @@ func (ec *executionContext) _MergedPropertyGroup_originalPropertyId(ctx context. if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedPropertyGroup_parentPropertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { @@ -17214,9 +17474,9 @@ func (ec *executionContext) _MergedPropertyGroup_parentPropertyId(ctx context.Co if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedPropertyGroup_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { @@ -17246,9 +17506,9 @@ func (ec *executionContext) _MergedPropertyGroup_originalId(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedPropertyGroup_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { @@ -17278,9 +17538,9 @@ func (ec *executionContext) _MergedPropertyGroup_parentId(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedPropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { @@ -17313,9 +17573,9 @@ func (ec *executionContext) _MergedPropertyGroup_schemaGroupId(ctx context.Conte } return graphql.Null } - res := resTmp.(id.PropertySchemaGroupID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedPropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { @@ -17345,9 +17605,9 @@ func (ec *executionContext) _MergedPropertyGroup_schemaId(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*id.PropertySchemaID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedPropertyGroup_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { @@ -17377,9 +17637,9 @@ func (ec *executionContext) _MergedPropertyGroup_linkedDatasetId(ctx context.Con if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MergedPropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { @@ -17674,9 +17934,9 @@ func (ec *executionContext) _MoveInfoboxFieldPayload_infoboxFieldId(ctx context. } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { @@ -17779,9 +18039,9 @@ func (ec *executionContext) _MoveLayerPayload_layerId(ctx context.Context, field } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _MoveLayerPayload_fromParentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { @@ -20549,9 +20809,9 @@ func (ec *executionContext) _Plugin_id(ctx context.Context, field graphql.Collec } return graphql.Null } - res := resTmp.(id.PluginID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Plugin_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { @@ -20581,9 +20841,9 @@ func (ec *executionContext) _Plugin_sceneId(ctx context.Context, field graphql.C if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Plugin_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { @@ -20788,9 +21048,9 @@ func (ec *executionContext) _Plugin_propertySchemaId(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*id.PropertySchemaID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Plugin_extensions(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { @@ -20988,7 +21248,7 @@ func (ec *executionContext) _Plugin_translatedName(ctx context.Context, field gr fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Plugin().TranslatedName(rctx, obj, args["lang"].(*string)) + return ec.resolvers.Plugin().TranslatedName(rctx, obj, args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -21030,7 +21290,7 @@ func (ec *executionContext) _Plugin_translatedDescription(ctx context.Context, f fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Plugin().TranslatedDescription(rctx, obj, args["lang"].(*string)) + return ec.resolvers.Plugin().TranslatedDescription(rctx, obj, args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -21109,9 +21369,9 @@ func (ec *executionContext) _PluginExtension_extensionId(ctx context.Context, fi } return graphql.Null } - res := resTmp.(id.PluginExtensionID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PluginExtension_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { @@ -21144,9 +21404,9 @@ func (ec *executionContext) _PluginExtension_pluginId(ctx context.Context, field } return graphql.Null } - res := resTmp.(id.PluginID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PluginExtension_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { @@ -21415,9 +21675,9 @@ func (ec *executionContext) _PluginExtension_propertySchemaId(ctx context.Contex } return graphql.Null } - res := resTmp.(id.PropertySchemaID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PluginExtension_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { @@ -21541,7 +21801,7 @@ func (ec *executionContext) _PluginExtension_sceneWidget(ctx context.Context, fi fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PluginExtension().SceneWidget(rctx, obj, args["sceneId"].(id.ID)) + return ec.resolvers.PluginExtension().SceneWidget(rctx, obj, args["sceneId"].(gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -21612,7 +21872,7 @@ func (ec *executionContext) _PluginExtension_translatedName(ctx context.Context, fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PluginExtension().TranslatedName(rctx, obj, args["lang"].(*string)) + return ec.resolvers.PluginExtension().TranslatedName(rctx, obj, args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -21654,7 +21914,7 @@ func (ec *executionContext) _PluginExtension_translatedDescription(ctx context.C fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PluginExtension().TranslatedDescription(rctx, obj, args["lang"].(*string)) + return ec.resolvers.PluginExtension().TranslatedDescription(rctx, obj, args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -21876,9 +22136,9 @@ func (ec *executionContext) _Project_id(ctx context.Context, field graphql.Colle } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Project_isArchived(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { @@ -22430,9 +22690,9 @@ func (ec *executionContext) _Project_teamId(ctx context.Context, field graphql.C } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Project_visualizer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { @@ -22911,9 +23171,9 @@ func (ec *executionContext) _Property_id(ctx context.Context, field graphql.Coll } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Property_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { @@ -22946,9 +23206,9 @@ func (ec *executionContext) _Property_schemaId(ctx context.Context, field graphq } return graphql.Null } - res := resTmp.(id.PropertySchemaID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Property_items(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { @@ -23112,9 +23372,9 @@ func (ec *executionContext) _PropertyCondition_fieldId(ctx context.Context, fiel } return graphql.Null } - res := resTmp.(id.PropertySchemaFieldID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyCondition_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyCondition) (ret graphql.Marshaler) { @@ -23249,9 +23509,9 @@ func (ec *executionContext) _PropertyField_parentId(ctx context.Context, field g } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { @@ -23284,9 +23544,9 @@ func (ec *executionContext) _PropertyField_schemaId(ctx context.Context, field g } return graphql.Null } - res := resTmp.(id.PropertySchemaID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { @@ -23319,9 +23579,9 @@ func (ec *executionContext) _PropertyField_fieldId(ctx context.Context, field gr } return graphql.Null } - res := resTmp.(id.PropertySchemaFieldID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyField_links(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { @@ -23578,9 +23838,9 @@ func (ec *executionContext) _PropertyFieldLink_datasetId(ctx context.Context, fi if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyFieldLink_datasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { @@ -23613,9 +23873,9 @@ func (ec *executionContext) _PropertyFieldLink_datasetSchemaId(ctx context.Conte } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyFieldLink_datasetSchemaFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { @@ -23648,9 +23908,9 @@ func (ec *executionContext) _PropertyFieldLink_datasetSchemaFieldId(ctx context. } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyFieldLink_dataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { @@ -23878,9 +24138,9 @@ func (ec *executionContext) _PropertyGroup_id(ctx context.Context, field graphql } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { @@ -23913,9 +24173,9 @@ func (ec *executionContext) _PropertyGroup_schemaId(ctx context.Context, field g } return graphql.Null } - res := resTmp.(id.PropertySchemaID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { @@ -23948,9 +24208,9 @@ func (ec *executionContext) _PropertyGroup_schemaGroupId(ctx context.Context, fi } return graphql.Null } - res := resTmp.(id.PropertySchemaGroupID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { @@ -24082,9 +24342,9 @@ func (ec *executionContext) _PropertyGroupList_id(ctx context.Context, field gra } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyGroupList_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { @@ -24117,9 +24377,9 @@ func (ec *executionContext) _PropertyGroupList_schemaId(ctx context.Context, fie } return graphql.Null } - res := resTmp.(id.PropertySchemaID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyGroupList_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { @@ -24152,9 +24412,9 @@ func (ec *executionContext) _PropertyGroupList_schemaGroupId(ctx context.Context } return graphql.Null } - res := resTmp.(id.PropertySchemaGroupID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyGroupList_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { @@ -24353,9 +24613,9 @@ func (ec *executionContext) _PropertyLinkableFields_schemaId(ctx context.Context } return graphql.Null } - res := resTmp.(id.PropertySchemaID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyLinkableFields_latlng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { @@ -24385,9 +24645,9 @@ func (ec *executionContext) _PropertyLinkableFields_latlng(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*id.PropertySchemaFieldID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyLinkableFields_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { @@ -24417,9 +24677,9 @@ func (ec *executionContext) _PropertyLinkableFields_url(ctx context.Context, fie if resTmp == nil { return graphql.Null } - res := resTmp.(*id.PropertySchemaFieldID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertyLinkableFields_latlngField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { @@ -24548,9 +24808,9 @@ func (ec *executionContext) _PropertySchema_id(ctx context.Context, field graphq } return graphql.Null } - res := resTmp.(id.PropertySchemaID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertySchema_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchema) (ret graphql.Marshaler) { @@ -24653,9 +24913,9 @@ func (ec *executionContext) _PropertySchemaField_fieldId(ctx context.Context, fi } return graphql.Null } - res := resTmp.(id.PropertySchemaFieldID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertySchemaField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { @@ -25108,7 +25368,7 @@ func (ec *executionContext) _PropertySchemaField_translatedTitle(ctx context.Con fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PropertySchemaField().TranslatedTitle(rctx, obj, args["lang"].(*string)) + return ec.resolvers.PropertySchemaField().TranslatedTitle(rctx, obj, args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -25150,7 +25410,7 @@ func (ec *executionContext) _PropertySchemaField_translatedDescription(ctx conte fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PropertySchemaField().TranslatedDescription(rctx, obj, args["lang"].(*string)) + return ec.resolvers.PropertySchemaField().TranslatedDescription(rctx, obj, args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -25326,7 +25586,7 @@ func (ec *executionContext) _PropertySchemaFieldChoice_translatedTitle(ctx conte fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PropertySchemaFieldChoice().TranslatedTitle(rctx, obj, args["lang"].(*string)) + return ec.resolvers.PropertySchemaFieldChoice().TranslatedTitle(rctx, obj, args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -25373,9 +25633,9 @@ func (ec *executionContext) _PropertySchemaGroup_schemaGroupId(ctx context.Conte } return graphql.Null } - res := resTmp.(id.PropertySchemaGroupID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertySchemaGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { @@ -25408,9 +25668,9 @@ func (ec *executionContext) _PropertySchemaGroup_schemaId(ctx context.Context, f } return graphql.Null } - res := resTmp.(id.PropertySchemaID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertySchemaGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { @@ -25606,9 +25866,9 @@ func (ec *executionContext) _PropertySchemaGroup_representativeFieldId(ctx conte if resTmp == nil { return graphql.Null } - res := resTmp.(*id.PropertySchemaFieldID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _PropertySchemaGroup_representativeField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { @@ -25700,7 +25960,7 @@ func (ec *executionContext) _PropertySchemaGroup_translatedTitle(ctx context.Con fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PropertySchemaGroup().TranslatedTitle(rctx, obj, args["lang"].(*string)) + return ec.resolvers.PropertySchemaGroup().TranslatedTitle(rctx, obj, args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -25744,9 +26004,9 @@ func (ec *executionContext) _Query_me(ctx context.Context, field graphql.Collect if resTmp == nil { return graphql.Null } - res := resTmp.(*gqlmodel.User) + res := resTmp.(*gqlmodel.Me) fc.Result = res - return ec.marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) + return ec.marshalOMe2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMe(ctx, field.Selections, res) } func (ec *executionContext) _Query_node(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -25774,7 +26034,7 @@ func (ec *executionContext) _Query_node(ctx context.Context, field graphql.Colle fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Node(rctx, args["id"].(id.ID), args["type"].(gqlmodel.NodeType)) + return ec.resolvers.Query().Node(rctx, args["id"].(gqlmodel.ID), args["type"].(gqlmodel.NodeType)) }) if err != nil { ec.Error(ctx, err) @@ -25813,7 +26073,7 @@ func (ec *executionContext) _Query_nodes(ctx context.Context, field graphql.Coll fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Nodes(rctx, args["id"].([]*id.ID), args["type"].(gqlmodel.NodeType)) + return ec.resolvers.Query().Nodes(rctx, args["id"].([]gqlmodel.ID), args["type"].(gqlmodel.NodeType)) }) if err != nil { ec.Error(ctx, err) @@ -25855,7 +26115,7 @@ func (ec *executionContext) _Query_propertySchema(ctx context.Context, field gra fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().PropertySchema(rctx, args["id"].(id.PropertySchemaID)) + return ec.resolvers.Query().PropertySchema(rctx, args["id"].(gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -25894,7 +26154,7 @@ func (ec *executionContext) _Query_propertySchemas(ctx context.Context, field gr fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().PropertySchemas(rctx, args["id"].([]*id.PropertySchemaID)) + return ec.resolvers.Query().PropertySchemas(rctx, args["id"].([]gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -25936,7 +26196,7 @@ func (ec *executionContext) _Query_plugin(ctx context.Context, field graphql.Col fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Plugin(rctx, args["id"].(id.PluginID)) + return ec.resolvers.Query().Plugin(rctx, args["id"].(gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -25975,7 +26235,7 @@ func (ec *executionContext) _Query_plugins(ctx context.Context, field graphql.Co fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Plugins(rctx, args["id"].([]*id.PluginID)) + return ec.resolvers.Query().Plugins(rctx, args["id"].([]gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -26017,7 +26277,7 @@ func (ec *executionContext) _Query_layer(ctx context.Context, field graphql.Coll fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Layer(rctx, args["id"].(id.ID)) + return ec.resolvers.Query().Layer(rctx, args["id"].(gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -26056,7 +26316,7 @@ func (ec *executionContext) _Query_scene(ctx context.Context, field graphql.Coll fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Scene(rctx, args["projectId"].(id.ID)) + return ec.resolvers.Query().Scene(rctx, args["projectId"].(gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -26095,7 +26355,7 @@ func (ec *executionContext) _Query_assets(ctx context.Context, field graphql.Col fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Assets(rctx, args["teamId"].(id.ID), args["keyword"].(*string), args["sort"].(*gqlmodel.AssetSortType), args["pagination"].(*gqlmodel.Pagination)) + return ec.resolvers.Query().Assets(rctx, args["teamId"].(gqlmodel.ID), args["keyword"].(*string), args["sort"].(*gqlmodel.AssetSortType), args["pagination"].(*gqlmodel.Pagination)) }) if err != nil { ec.Error(ctx, err) @@ -26137,7 +26397,7 @@ func (ec *executionContext) _Query_projects(ctx context.Context, field graphql.C fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Projects(rctx, args["teamId"].(id.ID), args["includeArchived"].(*bool), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + return ec.resolvers.Query().Projects(rctx, args["teamId"].(gqlmodel.ID), args["includeArchived"].(*bool), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) }) if err != nil { ec.Error(ctx, err) @@ -26179,7 +26439,7 @@ func (ec *executionContext) _Query_datasetSchemas(ctx context.Context, field gra fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().DatasetSchemas(rctx, args["sceneId"].(id.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + return ec.resolvers.Query().DatasetSchemas(rctx, args["sceneId"].(gqlmodel.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) }) if err != nil { ec.Error(ctx, err) @@ -26221,7 +26481,7 @@ func (ec *executionContext) _Query_datasets(ctx context.Context, field graphql.C fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Datasets(rctx, args["datasetSchemaId"].(id.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + return ec.resolvers.Query().Datasets(rctx, args["datasetSchemaId"].(gqlmodel.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) }) if err != nil { ec.Error(ctx, err) @@ -26238,45 +26498,6 @@ func (ec *executionContext) _Query_datasets(ctx context.Context, field graphql.C return ec.marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetConnection(ctx, field.Selections, res) } -func (ec *executionContext) _Query_sceneLock(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "Query", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_sceneLock_args(ctx, rawArgs) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - fc.Args = args - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().SceneLock(rctx, args["sceneId"].(id.ID)) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*gqlmodel.SceneLockMode) - fc.Result = res - return ec.marshalOSceneLockMode2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneLockMode(ctx, field.Selections, res) -} - func (ec *executionContext) _Query_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -26302,7 +26523,7 @@ func (ec *executionContext) _Query_dynamicDatasetSchemas(ctx context.Context, fi fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().DynamicDatasetSchemas(rctx, args["sceneId"].(id.ID)) + return ec.resolvers.Query().DynamicDatasetSchemas(rctx, args["sceneId"].(gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -26353,9 +26574,9 @@ func (ec *executionContext) _Query_searchUser(ctx context.Context, field graphql if resTmp == nil { return graphql.Null } - res := resTmp.(*gqlmodel.SearchedUser) + res := resTmp.(*gqlmodel.User) fc.Result = res - return ec.marshalOSearchedUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSearchedUser(ctx, field.Selections, res) + return ec.marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) } func (ec *executionContext) _Query_checkProjectAlias(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { @@ -26676,9 +26897,9 @@ func (ec *executionContext) _RemoveAssetPayload_assetId(ctx context.Context, fie } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _RemoveClusterPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveClusterPayload) (ret graphql.Marshaler) { @@ -26746,9 +26967,9 @@ func (ec *executionContext) _RemoveClusterPayload_clusterId(ctx context.Context, } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _RemoveDatasetSchemaPayload_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveDatasetSchemaPayload) (ret graphql.Marshaler) { @@ -26781,9 +27002,9 @@ func (ec *executionContext) _RemoveDatasetSchemaPayload_schemaId(ctx context.Con } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _RemoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { @@ -26816,9 +27037,9 @@ func (ec *executionContext) _RemoveInfoboxFieldPayload_infoboxFieldId(ctx contex } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _RemoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { @@ -26921,9 +27142,9 @@ func (ec *executionContext) _RemoveLayerPayload_layerId(ctx context.Context, fie } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _RemoveLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveLayerPayload) (ret graphql.Marshaler) { @@ -27026,9 +27247,9 @@ func (ec *executionContext) _RemoveTagPayload_tagId(ctx context.Context, field g } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _RemoveTagPayload_updatedLayers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveTagPayload) (ret graphql.Marshaler) { @@ -27131,9 +27352,9 @@ func (ec *executionContext) _RemoveWidgetPayload_widgetId(ctx context.Context, f } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Scene_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { @@ -27166,9 +27387,9 @@ func (ec *executionContext) _Scene_id(ctx context.Context, field graphql.Collect } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Scene_projectId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { @@ -27201,9 +27422,9 @@ func (ec *executionContext) _Scene_projectId(ctx context.Context, field graphql. } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Scene_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { @@ -27236,9 +27457,9 @@ func (ec *executionContext) _Scene_teamId(ctx context.Context, field graphql.Col } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Scene_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { @@ -27271,9 +27492,9 @@ func (ec *executionContext) _Scene_propertyId(ctx context.Context, field graphql } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Scene_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { @@ -27376,9 +27597,9 @@ func (ec *executionContext) _Scene_rootLayerId(ctx context.Context, field graphq } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Scene_widgets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { @@ -27646,41 +27867,6 @@ func (ec *executionContext) _Scene_rootLayer(ctx context.Context, field graphql. return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_lockMode(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "Scene", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.Scene().LockMode(rctx, obj) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(gqlmodel.SceneLockMode) - fc.Result = res - return ec.marshalNSceneLockMode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneLockMode(ctx, field.Selections, res) -} - func (ec *executionContext) _Scene_datasetSchemas(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -27753,9 +27939,9 @@ func (ec *executionContext) _Scene_tagIds(ctx context.Context, field graphql.Col } return graphql.Null } - res := resTmp.([]*id.ID) + res := resTmp.([]gqlmodel.ID) fc.Result = res - return ec.marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) + return ec.marshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, field.Selections, res) } func (ec *executionContext) _Scene_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { @@ -27858,9 +28044,9 @@ func (ec *executionContext) _ScenePlugin_pluginId(ctx context.Context, field gra } return graphql.Null } - res := resTmp.(id.PluginID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _ScenePlugin_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { @@ -27890,9 +28076,9 @@ func (ec *executionContext) _ScenePlugin_propertyId(ctx context.Context, field g if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _ScenePlugin_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { @@ -27989,9 +28175,9 @@ func (ec *executionContext) _SceneWidget_id(ctx context.Context, field graphql.C } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _SceneWidget_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { @@ -28024,9 +28210,9 @@ func (ec *executionContext) _SceneWidget_pluginId(ctx context.Context, field gra } return graphql.Null } - res := resTmp.(id.PluginID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _SceneWidget_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { @@ -28059,9 +28245,9 @@ func (ec *executionContext) _SceneWidget_extensionId(ctx context.Context, field } return graphql.Null } - res := resTmp.(id.PluginExtensionID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _SceneWidget_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { @@ -28094,9 +28280,9 @@ func (ec *executionContext) _SceneWidget_propertyId(ctx context.Context, field g } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _SceneWidget_enabled(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { @@ -28265,111 +28451,6 @@ func (ec *executionContext) _SceneWidget_property(ctx context.Context, field gra return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _SearchedUser_userId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SearchedUser) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "SearchedUser", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.UserID, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(id.ID) - fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) -} - -func (ec *executionContext) _SearchedUser_userName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SearchedUser) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "SearchedUser", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.UserName, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(string) - fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) -} - -func (ec *executionContext) _SearchedUser_userEmail(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SearchedUser) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "SearchedUser", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.UserEmail, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(string) - fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) -} - func (ec *executionContext) _SignupPayload_user(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SignupPayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -28470,9 +28551,9 @@ func (ec *executionContext) _SyncDatasetPayload_sceneId(ctx context.Context, fie } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _SyncDatasetPayload_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { @@ -28610,9 +28691,9 @@ func (ec *executionContext) _TagGroup_id(ctx context.Context, field graphql.Coll } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _TagGroup_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { @@ -28645,9 +28726,9 @@ func (ec *executionContext) _TagGroup_sceneId(ctx context.Context, field graphql } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _TagGroup_label(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { @@ -28712,9 +28793,9 @@ func (ec *executionContext) _TagGroup_tagIds(ctx context.Context, field graphql. if resTmp == nil { return graphql.Null } - res := resTmp.([]*id.ID) + res := resTmp.([]gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) + return ec.marshalOID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, field.Selections, res) } func (ec *executionContext) _TagGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { @@ -28849,9 +28930,9 @@ func (ec *executionContext) _TagItem_id(ctx context.Context, field graphql.Colle } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _TagItem_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { @@ -28884,9 +28965,9 @@ func (ec *executionContext) _TagItem_sceneId(ctx context.Context, field graphql. } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _TagItem_label(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { @@ -28951,9 +29032,9 @@ func (ec *executionContext) _TagItem_parentId(ctx context.Context, field graphql if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _TagItem_linkedDatasetID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { @@ -28983,9 +29064,9 @@ func (ec *executionContext) _TagItem_linkedDatasetID(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _TagItem_linkedDatasetSchemaID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { @@ -29015,9 +29096,9 @@ func (ec *executionContext) _TagItem_linkedDatasetSchemaID(ctx context.Context, if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _TagItem_linkedDatasetFieldID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { @@ -29047,9 +29128,9 @@ func (ec *executionContext) _TagItem_linkedDatasetFieldID(ctx context.Context, f if resTmp == nil { return graphql.Null } - res := resTmp.(*id.ID) + res := resTmp.(*gqlmodel.ID) fc.Result = res - return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _TagItem_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { @@ -29245,9 +29326,9 @@ func (ec *executionContext) _Team_id(ctx context.Context, field graphql.Collecte } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _Team_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { @@ -29469,9 +29550,9 @@ func (ec *executionContext) _TeamMember_userId(ctx context.Context, field graphq } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _TeamMember_role(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TeamMember) (ret graphql.Marshaler) { @@ -29827,9 +29908,9 @@ func (ec *executionContext) _UninstallPluginPayload_pluginId(ctx context.Context } return graphql.Null } - res := resTmp.(id.PluginID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UninstallPluginPayload) (ret graphql.Marshaler) { @@ -30004,7 +30085,7 @@ func (ec *executionContext) _UpdateLayerPayload_layer(ctx context.Context, field return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _UpdateMePayload_user(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateMePayload) (ret graphql.Marshaler) { +func (ec *executionContext) _UpdateMePayload_me(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateMePayload) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -30022,7 +30103,7 @@ func (ec *executionContext) _UpdateMePayload_user(ctx context.Context, field gra ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.User, nil + return obj.Me, nil }) if err != nil { ec.Error(ctx, err) @@ -30034,9 +30115,9 @@ func (ec *executionContext) _UpdateMePayload_user(ctx context.Context, field gra } return graphql.Null } - res := resTmp.(*gqlmodel.User) + res := resTmp.(*gqlmodel.Me) fc.Result = res - return ec.marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) + return ec.marshalNMe2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMe(ctx, field.Selections, res) } func (ec *executionContext) _UpdateMemberOfTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateMemberOfTeamPayload) (ret graphql.Marshaler) { @@ -30454,9 +30535,9 @@ func (ec *executionContext) _User_id(ctx context.Context, field graphql.Collecte } return graphql.Null } - res := resTmp.(id.ID) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } func (ec *executionContext) _User_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { @@ -30529,216 +30610,6 @@ func (ec *executionContext) _User_email(ctx context.Context, field graphql.Colle return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _User_lang(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "User", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Lang, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(language.Tag) - fc.Result = res - return ec.marshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, field.Selections, res) -} - -func (ec *executionContext) _User_theme(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "User", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Theme, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(gqlmodel.Theme) - fc.Result = res - return ec.marshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx, field.Selections, res) -} - -func (ec *executionContext) _User_myTeamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "User", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.MyTeamID, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(id.ID) - fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, field.Selections, res) -} - -func (ec *executionContext) _User_auths(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "User", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Auths, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.([]string) - fc.Result = res - return ec.marshalNString2แš•stringแš„(ctx, field.Selections, res) -} - -func (ec *executionContext) _User_teams(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "User", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.User().Teams(rctx, obj) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.([]*gqlmodel.Team) - fc.Result = res - return ec.marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamแš„(ctx, field.Selections, res) -} - -func (ec *executionContext) _User_myTeam(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "User", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.User().MyTeam(rctx, obj) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(*gqlmodel.Team) - fc.Result = res - return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) -} - func (ec *executionContext) _WidgetAlignSystem_inner(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetAlignSystem) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -30833,9 +30704,9 @@ func (ec *executionContext) _WidgetArea_widgetIds(ctx context.Context, field gra } return graphql.Null } - res := resTmp.([]*id.ID) + res := resTmp.([]gqlmodel.ID) fc.Result = res - return ec.marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, field.Selections, res) + return ec.marshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, field.Selections, res) } func (ec *executionContext) _WidgetArea_align(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetArea) (ret graphql.Marshaler) { @@ -31423,14 +31294,14 @@ func (ec *executionContext) ___Directive_description(ctx context.Context, field Object: "__Directive", Field: field, Args: nil, - IsMethod: false, + IsMethod: true, IsResolver: false, } ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Description, nil + return obj.Description(), nil }) if err != nil { ec.Error(ctx, err) @@ -31439,9 +31310,9 @@ func (ec *executionContext) ___Directive_description(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(string) + res := resTmp.(*string) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalOString2แš–string(ctx, field.Selections, res) } func (ec *executionContext) ___Directive_locations(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { @@ -31595,14 +31466,14 @@ func (ec *executionContext) ___EnumValue_description(ctx context.Context, field Object: "__EnumValue", Field: field, Args: nil, - IsMethod: false, + IsMethod: true, IsResolver: false, } ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Description, nil + return obj.Description(), nil }) if err != nil { ec.Error(ctx, err) @@ -31611,9 +31482,9 @@ func (ec *executionContext) ___EnumValue_description(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(string) + res := resTmp.(*string) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalOString2แš–string(ctx, field.Selections, res) } func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { @@ -31729,14 +31600,14 @@ func (ec *executionContext) ___Field_description(ctx context.Context, field grap Object: "__Field", Field: field, Args: nil, - IsMethod: false, + IsMethod: true, IsResolver: false, } ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Description, nil + return obj.Description(), nil }) if err != nil { ec.Error(ctx, err) @@ -31745,9 +31616,9 @@ func (ec *executionContext) ___Field_description(ctx context.Context, field grap if resTmp == nil { return graphql.Null } - res := resTmp.(string) + res := resTmp.(*string) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalOString2แš–string(ctx, field.Selections, res) } func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { @@ -31933,14 +31804,14 @@ func (ec *executionContext) ___InputValue_description(ctx context.Context, field Object: "__InputValue", Field: field, Args: nil, - IsMethod: false, + IsMethod: true, IsResolver: false, } ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Description, nil + return obj.Description(), nil }) if err != nil { ec.Error(ctx, err) @@ -31949,9 +31820,9 @@ func (ec *executionContext) ___InputValue_description(ctx context.Context, field if resTmp == nil { return graphql.Null } - res := resTmp.(string) + res := resTmp.(*string) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalOString2แš–string(ctx, field.Selections, res) } func (ec *executionContext) ___InputValue_type(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { @@ -32021,6 +31892,38 @@ func (ec *executionContext) ___InputValue_defaultValue(ctx context.Context, fiel return ec.marshalOString2แš–string(ctx, field.Selections, res) } +func (ec *executionContext) ___Schema_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Schema", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + func (ec *executionContext) ___Schema_types(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { @@ -32284,9 +32187,9 @@ func (ec *executionContext) ___Type_description(ctx context.Context, field graph if resTmp == nil { return graphql.Null } - res := resTmp.(string) + res := resTmp.(*string) fc.Result = res - return ec.marshalOString2string(ctx, field.Selections, res) + return ec.marshalOString2แš–string(ctx, field.Selections, res) } func (ec *executionContext) ___Type_fields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { @@ -32495,6 +32398,38 @@ func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.Co return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) } +func (ec *executionContext) ___Type_specifiedByURL(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + fc := &graphql.FieldContext{ + Object: "__Type", + Field: field, + Args: nil, + IsMethod: true, + IsResolver: false, + } + + ctx = graphql.WithFieldContext(ctx, fc) + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SpecifiedByURL(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2แš–string(ctx, field.Selections, res) +} + // endregion **************************** field.gotpl ***************************** // region **************************** input.gotpl ***************************** @@ -32512,7 +32447,7 @@ func (ec *executionContext) unmarshalInputAddClusterInput(ctx context.Context, o var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32543,7 +32478,7 @@ func (ec *executionContext) unmarshalInputAddDatasetSchemaInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32559,7 +32494,7 @@ func (ec *executionContext) unmarshalInputAddDatasetSchemaInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("representativefield")) - it.Representativefield, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.Representativefield, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32582,7 +32517,7 @@ func (ec *executionContext) unmarshalInputAddDynamicDatasetInput(ctx context.Con var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaId")) - it.DatasetSchemaID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.DatasetSchemaID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32645,7 +32580,7 @@ func (ec *executionContext) unmarshalInputAddDynamicDatasetSchemaInput(ctx conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32668,7 +32603,7 @@ func (ec *executionContext) unmarshalInputAddInfoboxFieldInput(ctx context.Conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) - it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32676,7 +32611,7 @@ func (ec *executionContext) unmarshalInputAddInfoboxFieldInput(ctx context.Conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) - it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + it.PluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32684,7 +32619,7 @@ func (ec *executionContext) unmarshalInputAddInfoboxFieldInput(ctx context.Conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) - it.ExtensionID, err = ec.unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, v) + it.ExtensionID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32715,7 +32650,7 @@ func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("parentLayerId")) - it.ParentLayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ParentLayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32723,7 +32658,7 @@ func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) - it.PluginID, err = ec.unmarshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + it.PluginID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32731,7 +32666,7 @@ func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) - it.ExtensionID, err = ec.unmarshalOPluginExtensionID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, v) + it.ExtensionID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32747,7 +32682,7 @@ func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("linkedDatasetSchemaID")) - it.LinkedDatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LinkedDatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32763,7 +32698,7 @@ func (ec *executionContext) unmarshalInputAddLayerGroupInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("representativeFieldId")) - it.RepresentativeFieldID, err = ec.unmarshalODatasetSchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšDatasetSchemaFieldID(ctx, v) + it.RepresentativeFieldID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32786,7 +32721,7 @@ func (ec *executionContext) unmarshalInputAddLayerItemInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("parentLayerId")) - it.ParentLayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ParentLayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32794,7 +32729,7 @@ func (ec *executionContext) unmarshalInputAddLayerItemInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) - it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + it.PluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32802,7 +32737,7 @@ func (ec *executionContext) unmarshalInputAddLayerItemInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) - it.ExtensionID, err = ec.unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, v) + it.ExtensionID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32857,7 +32792,7 @@ func (ec *executionContext) unmarshalInputAddMemberToTeamInput(ctx context.Conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) - it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32865,7 +32800,7 @@ func (ec *executionContext) unmarshalInputAddMemberToTeamInput(ctx context.Conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) - it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32896,7 +32831,7 @@ func (ec *executionContext) unmarshalInputAddPropertyItemInput(ctx context.Conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32904,7 +32839,7 @@ func (ec *executionContext) unmarshalInputAddPropertyItemInput(ctx context.Conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) - it.SchemaGroupID, err = ec.unmarshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) + it.SchemaGroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32951,7 +32886,7 @@ func (ec *executionContext) unmarshalInputAddWidgetInput(ctx context.Context, ob var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32959,7 +32894,7 @@ func (ec *executionContext) unmarshalInputAddWidgetInput(ctx context.Context, ob var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) - it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + it.PluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32967,7 +32902,7 @@ func (ec *executionContext) unmarshalInputAddWidgetInput(ctx context.Context, ob var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("extensionId")) - it.ExtensionID, err = ec.unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx, v) + it.ExtensionID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32990,7 +32925,7 @@ func (ec *executionContext) unmarshalInputAttachTagItemToGroupInput(ctx context. var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemID")) - it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -32998,7 +32933,7 @@ func (ec *executionContext) unmarshalInputAttachTagItemToGroupInput(ctx context. var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("groupID")) - it.GroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.GroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33021,7 +32956,7 @@ func (ec *executionContext) unmarshalInputAttachTagToLayerInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagID")) - it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33029,7 +32964,7 @@ func (ec *executionContext) unmarshalInputAttachTagToLayerInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerID")) - it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33052,7 +32987,7 @@ func (ec *executionContext) unmarshalInputCreateAssetInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) - it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33083,7 +33018,7 @@ func (ec *executionContext) unmarshalInputCreateInfoboxInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) - it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33106,7 +33041,7 @@ func (ec *executionContext) unmarshalInputCreateProjectInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) - it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33177,7 +33112,7 @@ func (ec *executionContext) unmarshalInputCreateSceneInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) - it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33200,7 +33135,7 @@ func (ec *executionContext) unmarshalInputCreateTagGroupInput(ctx context.Contex var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33216,7 +33151,7 @@ func (ec *executionContext) unmarshalInputCreateTagGroupInput(ctx context.Contex var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tags")) - it.Tags, err = ec.unmarshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, v) + it.Tags, err = ec.unmarshalOID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, v) if err != nil { return it, err } @@ -33239,7 +33174,7 @@ func (ec *executionContext) unmarshalInputCreateTagItemInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33255,7 +33190,7 @@ func (ec *executionContext) unmarshalInputCreateTagItemInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("parent")) - it.Parent, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.Parent, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33263,7 +33198,7 @@ func (ec *executionContext) unmarshalInputCreateTagItemInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("linkedDatasetSchemaID")) - it.LinkedDatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LinkedDatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33271,7 +33206,7 @@ func (ec *executionContext) unmarshalInputCreateTagItemInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("linkedDatasetID")) - it.LinkedDatasetID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LinkedDatasetID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33279,7 +33214,7 @@ func (ec *executionContext) unmarshalInputCreateTagItemInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("linkedDatasetField")) - it.LinkedDatasetField, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LinkedDatasetField, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33325,7 +33260,7 @@ func (ec *executionContext) unmarshalInputDeleteMeInput(ctx context.Context, obj var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) - it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33348,7 +33283,7 @@ func (ec *executionContext) unmarshalInputDeleteProjectInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) - it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33371,7 +33306,7 @@ func (ec *executionContext) unmarshalInputDeleteTeamInput(ctx context.Context, o var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) - it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33394,7 +33329,7 @@ func (ec *executionContext) unmarshalInputDetachTagFromLayerInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagID")) - it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33402,7 +33337,7 @@ func (ec *executionContext) unmarshalInputDetachTagFromLayerInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerID")) - it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33425,7 +33360,7 @@ func (ec *executionContext) unmarshalInputDetachTagItemFromGroupInput(ctx contex var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemID")) - it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33433,7 +33368,7 @@ func (ec *executionContext) unmarshalInputDetachTagItemFromGroupInput(ctx contex var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("groupID")) - it.GroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.GroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33480,7 +33415,7 @@ func (ec *executionContext) unmarshalInputImportDatasetFromGoogleSheetInput(ctx var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33488,7 +33423,7 @@ func (ec *executionContext) unmarshalInputImportDatasetFromGoogleSheetInput(ctx var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaId")) - it.DatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.DatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33519,7 +33454,7 @@ func (ec *executionContext) unmarshalInputImportDatasetInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33527,7 +33462,7 @@ func (ec *executionContext) unmarshalInputImportDatasetInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaId")) - it.DatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.DatasetSchemaID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33550,7 +33485,7 @@ func (ec *executionContext) unmarshalInputImportLayerInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) - it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33589,7 +33524,7 @@ func (ec *executionContext) unmarshalInputInstallPluginInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33597,7 +33532,7 @@ func (ec *executionContext) unmarshalInputInstallPluginInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) - it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + it.PluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33620,7 +33555,7 @@ func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33628,7 +33563,7 @@ func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) - it.SchemaGroupID, err = ec.unmarshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) + it.SchemaGroupID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33636,7 +33571,7 @@ func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) - it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33644,7 +33579,7 @@ func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) - it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + it.FieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33652,7 +33587,7 @@ func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaIds")) - it.DatasetSchemaIds, err = ec.unmarshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, v) + it.DatasetSchemaIds, err = ec.unmarshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, v) if err != nil { return it, err } @@ -33660,7 +33595,7 @@ func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetSchemaFieldIds")) - it.DatasetSchemaFieldIds, err = ec.unmarshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, v) + it.DatasetSchemaFieldIds, err = ec.unmarshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, v) if err != nil { return it, err } @@ -33668,7 +33603,7 @@ func (ec *executionContext) unmarshalInputLinkDatasetToPropertyValueInput(ctx co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("datasetIds")) - it.DatasetIds, err = ec.unmarshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx, v) + it.DatasetIds, err = ec.unmarshalOID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, v) if err != nil { return it, err } @@ -33691,7 +33626,7 @@ func (ec *executionContext) unmarshalInputMoveInfoboxFieldInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) - it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33699,7 +33634,7 @@ func (ec *executionContext) unmarshalInputMoveInfoboxFieldInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("infoboxFieldId")) - it.InfoboxFieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.InfoboxFieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33730,7 +33665,7 @@ func (ec *executionContext) unmarshalInputMoveLayerInput(ctx context.Context, ob var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) - it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33738,7 +33673,7 @@ func (ec *executionContext) unmarshalInputMoveLayerInput(ctx context.Context, ob var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("destLayerId")) - it.DestLayerID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.DestLayerID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33769,7 +33704,7 @@ func (ec *executionContext) unmarshalInputMovePropertyItemInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33777,7 +33712,7 @@ func (ec *executionContext) unmarshalInputMovePropertyItemInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) - it.SchemaGroupID, err = ec.unmarshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) + it.SchemaGroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33785,7 +33720,7 @@ func (ec *executionContext) unmarshalInputMovePropertyItemInput(ctx context.Cont var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) - it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33863,7 +33798,7 @@ func (ec *executionContext) unmarshalInputPublishProjectInput(ctx context.Contex var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) - it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33902,7 +33837,7 @@ func (ec *executionContext) unmarshalInputRemoveAssetInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("assetId")) - it.AssetID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.AssetID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33925,7 +33860,7 @@ func (ec *executionContext) unmarshalInputRemoveClusterInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("clusterId")) - it.ClusterID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ClusterID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33933,7 +33868,7 @@ func (ec *executionContext) unmarshalInputRemoveClusterInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33956,7 +33891,7 @@ func (ec *executionContext) unmarshalInputRemoveDatasetSchemaInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaId")) - it.SchemaID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SchemaID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33987,7 +33922,7 @@ func (ec *executionContext) unmarshalInputRemoveInfoboxFieldInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) - it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -33995,7 +33930,7 @@ func (ec *executionContext) unmarshalInputRemoveInfoboxFieldInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("infoboxFieldId")) - it.InfoboxFieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.InfoboxFieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34018,7 +33953,7 @@ func (ec *executionContext) unmarshalInputRemoveInfoboxInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) - it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34041,7 +33976,7 @@ func (ec *executionContext) unmarshalInputRemoveLayerInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) - it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34064,7 +33999,7 @@ func (ec *executionContext) unmarshalInputRemoveMemberFromTeamInput(ctx context. var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) - it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34072,7 +34007,7 @@ func (ec *executionContext) unmarshalInputRemoveMemberFromTeamInput(ctx context. var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) - it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34118,7 +34053,7 @@ func (ec *executionContext) unmarshalInputRemovePropertyFieldInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34126,7 +34061,7 @@ func (ec *executionContext) unmarshalInputRemovePropertyFieldInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) - it.SchemaGroupID, err = ec.unmarshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) + it.SchemaGroupID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34134,7 +34069,7 @@ func (ec *executionContext) unmarshalInputRemovePropertyFieldInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) - it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34142,7 +34077,7 @@ func (ec *executionContext) unmarshalInputRemovePropertyFieldInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) - it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + it.FieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34165,7 +34100,7 @@ func (ec *executionContext) unmarshalInputRemovePropertyItemInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34173,7 +34108,7 @@ func (ec *executionContext) unmarshalInputRemovePropertyItemInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) - it.SchemaGroupID, err = ec.unmarshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) + it.SchemaGroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34181,7 +34116,7 @@ func (ec *executionContext) unmarshalInputRemovePropertyItemInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) - it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ItemID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34204,7 +34139,7 @@ func (ec *executionContext) unmarshalInputRemoveTagInput(ctx context.Context, ob var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagID")) - it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34227,7 +34162,7 @@ func (ec *executionContext) unmarshalInputRemoveWidgetInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34235,7 +34170,7 @@ func (ec *executionContext) unmarshalInputRemoveWidgetInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("widgetId")) - it.WidgetID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.WidgetID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34274,7 +34209,7 @@ func (ec *executionContext) unmarshalInputSignupInput(ctx context.Context, obj i var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) - it.UserID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.UserID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34282,7 +34217,7 @@ func (ec *executionContext) unmarshalInputSignupInput(ctx context.Context, obj i var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) - it.TeamID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.TeamID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34313,7 +34248,7 @@ func (ec *executionContext) unmarshalInputSyncDatasetInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34344,7 +34279,7 @@ func (ec *executionContext) unmarshalInputUninstallPluginInput(ctx context.Conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34352,7 +34287,7 @@ func (ec *executionContext) unmarshalInputUninstallPluginInput(ctx context.Conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) - it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + it.PluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34375,7 +34310,7 @@ func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34383,7 +34318,7 @@ func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) - it.SchemaGroupID, err = ec.unmarshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) + it.SchemaGroupID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34391,7 +34326,7 @@ func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) - it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34399,7 +34334,7 @@ func (ec *executionContext) unmarshalInputUnlinkPropertyValueInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) - it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + it.FieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34422,7 +34357,7 @@ func (ec *executionContext) unmarshalInputUpdateClusterInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("clusterId")) - it.ClusterID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ClusterID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34430,7 +34365,7 @@ func (ec *executionContext) unmarshalInputUpdateClusterInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34446,7 +34381,7 @@ func (ec *executionContext) unmarshalInputUpdateClusterInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.PropertyID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34469,7 +34404,7 @@ func (ec *executionContext) unmarshalInputUpdateDatasetSchemaInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaId")) - it.SchemaID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SchemaID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34500,7 +34435,7 @@ func (ec *executionContext) unmarshalInputUpdateLayerInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("layerId")) - it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.LayerID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34602,7 +34537,7 @@ func (ec *executionContext) unmarshalInputUpdateMemberOfTeamInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) - it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34610,7 +34545,7 @@ func (ec *executionContext) unmarshalInputUpdateMemberOfTeamInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("userId")) - it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.UserID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34641,7 +34576,7 @@ func (ec *executionContext) unmarshalInputUpdateProjectInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("projectId")) - it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ProjectID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34776,7 +34711,7 @@ func (ec *executionContext) unmarshalInputUpdatePropertyItemInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34784,7 +34719,7 @@ func (ec *executionContext) unmarshalInputUpdatePropertyItemInput(ctx context.Co var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) - it.SchemaGroupID, err = ec.unmarshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) + it.SchemaGroupID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34823,7 +34758,7 @@ func (ec *executionContext) unmarshalInputUpdatePropertyItemOperationInput(ctx c var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) - it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34870,7 +34805,7 @@ func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34878,7 +34813,7 @@ func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) - it.SchemaGroupID, err = ec.unmarshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) + it.SchemaGroupID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34886,7 +34821,7 @@ func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) - it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34894,7 +34829,7 @@ func (ec *executionContext) unmarshalInputUpdatePropertyValueInput(ctx context.C var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) - it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + it.FieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34933,7 +34868,7 @@ func (ec *executionContext) unmarshalInputUpdateTagInput(ctx context.Context, ob var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("tagId")) - it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.TagID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34941,7 +34876,7 @@ func (ec *executionContext) unmarshalInputUpdateTagInput(ctx context.Context, ob var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -34972,7 +34907,7 @@ func (ec *executionContext) unmarshalInputUpdateTeamInput(ctx context.Context, o var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("teamId")) - it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.TeamID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -35003,7 +34938,7 @@ func (ec *executionContext) unmarshalInputUpdateWidgetAlignSystemInput(ctx conte var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -35042,7 +34977,7 @@ func (ec *executionContext) unmarshalInputUpdateWidgetInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -35050,7 +34985,7 @@ func (ec *executionContext) unmarshalInputUpdateWidgetInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("widgetId")) - it.WidgetID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.WidgetID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -35105,7 +35040,7 @@ func (ec *executionContext) unmarshalInputUpgradePluginInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -35113,7 +35048,7 @@ func (ec *executionContext) unmarshalInputUpgradePluginInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pluginId")) - it.PluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + it.PluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -35121,7 +35056,7 @@ func (ec *executionContext) unmarshalInputUpgradePluginInput(ctx context.Context var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("toPluginId")) - it.ToPluginID, err = ec.unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, v) + it.ToPluginID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -35144,7 +35079,7 @@ func (ec *executionContext) unmarshalInputUploadFileToPropertyInput(ctx context. var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("propertyId")) - it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.PropertyID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -35152,7 +35087,7 @@ func (ec *executionContext) unmarshalInputUploadFileToPropertyInput(ctx context. var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("schemaGroupId")) - it.SchemaGroupID, err = ec.unmarshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx, v) + it.SchemaGroupID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -35160,7 +35095,7 @@ func (ec *executionContext) unmarshalInputUploadFileToPropertyInput(ctx context. var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("itemId")) - it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.ItemID, err = ec.unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -35168,7 +35103,7 @@ func (ec *executionContext) unmarshalInputUploadFileToPropertyInput(ctx context. var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("fieldId")) - it.FieldID, err = ec.unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx, v) + it.FieldID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -35199,7 +35134,7 @@ func (ec *executionContext) unmarshalInputUploadPluginInput(ctx context.Context, var err error ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("sceneId")) - it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, v) + it.SceneID, err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, v) if err != nil { return it, err } @@ -35440,7 +35375,6 @@ var addClusterPayloadImplementors = []string{"AddClusterPayload"} func (ec *executionContext) _AddClusterPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddClusterPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addClusterPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35448,12 +35382,22 @@ func (ec *executionContext) _AddClusterPayload(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("AddClusterPayload") case "scene": - out.Values[i] = ec._AddClusterPayload_scene(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddClusterPayload_scene(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "cluster": - out.Values[i] = ec._AddClusterPayload_cluster(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddClusterPayload_cluster(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -35472,7 +35416,6 @@ var addDatasetSchemaPayloadImplementors = []string{"AddDatasetSchemaPayload"} func (ec *executionContext) _AddDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddDatasetSchemaPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addDatasetSchemaPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35480,7 +35423,12 @@ func (ec *executionContext) _AddDatasetSchemaPayload(ctx context.Context, sel as case "__typename": out.Values[i] = graphql.MarshalString("AddDatasetSchemaPayload") case "datasetSchema": - out.Values[i] = ec._AddDatasetSchemaPayload_datasetSchema(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddDatasetSchemaPayload_datasetSchema(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -35496,7 +35444,6 @@ var addDynamicDatasetPayloadImplementors = []string{"AddDynamicDatasetPayload"} func (ec *executionContext) _AddDynamicDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddDynamicDatasetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addDynamicDatasetPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35504,9 +35451,19 @@ func (ec *executionContext) _AddDynamicDatasetPayload(ctx context.Context, sel a case "__typename": out.Values[i] = graphql.MarshalString("AddDynamicDatasetPayload") case "datasetSchema": - out.Values[i] = ec._AddDynamicDatasetPayload_datasetSchema(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddDynamicDatasetPayload_datasetSchema(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "dataset": - out.Values[i] = ec._AddDynamicDatasetPayload_dataset(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddDynamicDatasetPayload_dataset(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -35522,7 +35479,6 @@ var addDynamicDatasetSchemaPayloadImplementors = []string{"AddDynamicDatasetSche func (ec *executionContext) _AddDynamicDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddDynamicDatasetSchemaPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addDynamicDatasetSchemaPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35530,7 +35486,12 @@ func (ec *executionContext) _AddDynamicDatasetSchemaPayload(ctx context.Context, case "__typename": out.Values[i] = graphql.MarshalString("AddDynamicDatasetSchemaPayload") case "datasetSchema": - out.Values[i] = ec._AddDynamicDatasetSchemaPayload_datasetSchema(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddDynamicDatasetSchemaPayload_datasetSchema(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -35546,7 +35507,6 @@ var addInfoboxFieldPayloadImplementors = []string{"AddInfoboxFieldPayload"} func (ec *executionContext) _AddInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddInfoboxFieldPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addInfoboxFieldPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35554,12 +35514,22 @@ func (ec *executionContext) _AddInfoboxFieldPayload(ctx context.Context, sel ast case "__typename": out.Values[i] = graphql.MarshalString("AddInfoboxFieldPayload") case "infoboxField": - out.Values[i] = ec._AddInfoboxFieldPayload_infoboxField(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddInfoboxFieldPayload_infoboxField(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "layer": - out.Values[i] = ec._AddInfoboxFieldPayload_layer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddInfoboxFieldPayload_layer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -35578,7 +35548,6 @@ var addLayerGroupPayloadImplementors = []string{"AddLayerGroupPayload"} func (ec *executionContext) _AddLayerGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddLayerGroupPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addLayerGroupPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35586,17 +35555,32 @@ func (ec *executionContext) _AddLayerGroupPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("AddLayerGroupPayload") case "layer": - out.Values[i] = ec._AddLayerGroupPayload_layer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddLayerGroupPayload_layer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "parentLayer": - out.Values[i] = ec._AddLayerGroupPayload_parentLayer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddLayerGroupPayload_parentLayer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "index": - out.Values[i] = ec._AddLayerGroupPayload_index(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddLayerGroupPayload_index(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -35612,7 +35596,6 @@ var addLayerItemPayloadImplementors = []string{"AddLayerItemPayload"} func (ec *executionContext) _AddLayerItemPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddLayerItemPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addLayerItemPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35620,17 +35603,32 @@ func (ec *executionContext) _AddLayerItemPayload(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("AddLayerItemPayload") case "layer": - out.Values[i] = ec._AddLayerItemPayload_layer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddLayerItemPayload_layer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "parentLayer": - out.Values[i] = ec._AddLayerItemPayload_parentLayer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddLayerItemPayload_parentLayer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "index": - out.Values[i] = ec._AddLayerItemPayload_index(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddLayerItemPayload_index(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -35646,7 +35644,6 @@ var addMemberToTeamPayloadImplementors = []string{"AddMemberToTeamPayload"} func (ec *executionContext) _AddMemberToTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddMemberToTeamPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addMemberToTeamPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35654,7 +35651,12 @@ func (ec *executionContext) _AddMemberToTeamPayload(ctx context.Context, sel ast case "__typename": out.Values[i] = graphql.MarshalString("AddMemberToTeamPayload") case "team": - out.Values[i] = ec._AddMemberToTeamPayload_team(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddMemberToTeamPayload_team(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -35673,7 +35675,6 @@ var addWidgetPayloadImplementors = []string{"AddWidgetPayload"} func (ec *executionContext) _AddWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AddWidgetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, addWidgetPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35681,12 +35682,22 @@ func (ec *executionContext) _AddWidgetPayload(ctx context.Context, sel ast.Selec case "__typename": out.Values[i] = graphql.MarshalString("AddWidgetPayload") case "scene": - out.Values[i] = ec._AddWidgetPayload_scene(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddWidgetPayload_scene(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "sceneWidget": - out.Values[i] = ec._AddWidgetPayload_sceneWidget(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AddWidgetPayload_sceneWidget(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -35705,7 +35716,6 @@ var assetImplementors = []string{"Asset", "Node"} func (ec *executionContext) _Asset(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Asset) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, assetImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35713,43 +35723,79 @@ func (ec *executionContext) _Asset(ctx context.Context, sel ast.SelectionSet, ob case "__typename": out.Values[i] = graphql.MarshalString("Asset") case "id": - out.Values[i] = ec._Asset_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Asset_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "createdAt": - out.Values[i] = ec._Asset_createdAt(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Asset_createdAt(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "teamId": - out.Values[i] = ec._Asset_teamId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Asset_teamId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - out.Values[i] = ec._Asset_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Asset_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "size": - out.Values[i] = ec._Asset_size(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Asset_size(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "url": - out.Values[i] = ec._Asset_url(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Asset_url(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "contentType": - out.Values[i] = ec._Asset_contentType(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Asset_contentType(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "team": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -35757,6 +35803,11 @@ func (ec *executionContext) _Asset(ctx context.Context, sel ast.SelectionSet, ob }() res = ec._Asset_team(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -35773,7 +35824,6 @@ var assetConnectionImplementors = []string{"AssetConnection"} func (ec *executionContext) _AssetConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AssetConnection) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, assetConnectionImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35781,22 +35831,42 @@ func (ec *executionContext) _AssetConnection(ctx context.Context, sel ast.Select case "__typename": out.Values[i] = graphql.MarshalString("AssetConnection") case "edges": - out.Values[i] = ec._AssetConnection_edges(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AssetConnection_edges(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "nodes": - out.Values[i] = ec._AssetConnection_nodes(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AssetConnection_nodes(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "pageInfo": - out.Values[i] = ec._AssetConnection_pageInfo(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AssetConnection_pageInfo(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "totalCount": - out.Values[i] = ec._AssetConnection_totalCount(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AssetConnection_totalCount(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -35815,7 +35885,6 @@ var assetEdgeImplementors = []string{"AssetEdge"} func (ec *executionContext) _AssetEdge(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AssetEdge) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, assetEdgeImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35823,12 +35892,22 @@ func (ec *executionContext) _AssetEdge(ctx context.Context, sel ast.SelectionSet case "__typename": out.Values[i] = graphql.MarshalString("AssetEdge") case "cursor": - out.Values[i] = ec._AssetEdge_cursor(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AssetEdge_cursor(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "node": - out.Values[i] = ec._AssetEdge_node(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AssetEdge_node(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -35844,7 +35923,6 @@ var attachTagItemToGroupPayloadImplementors = []string{"AttachTagItemToGroupPayl func (ec *executionContext) _AttachTagItemToGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AttachTagItemToGroupPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, attachTagItemToGroupPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35852,7 +35930,12 @@ func (ec *executionContext) _AttachTagItemToGroupPayload(ctx context.Context, se case "__typename": out.Values[i] = graphql.MarshalString("AttachTagItemToGroupPayload") case "tag": - out.Values[i] = ec._AttachTagItemToGroupPayload_tag(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AttachTagItemToGroupPayload_tag(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -35871,7 +35954,6 @@ var attachTagToLayerPayloadImplementors = []string{"AttachTagToLayerPayload"} func (ec *executionContext) _AttachTagToLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.AttachTagToLayerPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, attachTagToLayerPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35879,7 +35961,12 @@ func (ec *executionContext) _AttachTagToLayerPayload(ctx context.Context, sel as case "__typename": out.Values[i] = graphql.MarshalString("AttachTagToLayerPayload") case "layer": - out.Values[i] = ec._AttachTagToLayerPayload_layer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._AttachTagToLayerPayload_layer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -35898,7 +35985,6 @@ var cameraImplementors = []string{"Camera"} func (ec *executionContext) _Camera(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Camera) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, cameraImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35906,37 +35992,72 @@ func (ec *executionContext) _Camera(ctx context.Context, sel ast.SelectionSet, o case "__typename": out.Values[i] = graphql.MarshalString("Camera") case "lat": - out.Values[i] = ec._Camera_lat(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Camera_lat(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "lng": - out.Values[i] = ec._Camera_lng(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Camera_lng(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "altitude": - out.Values[i] = ec._Camera_altitude(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Camera_altitude(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "heading": - out.Values[i] = ec._Camera_heading(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Camera_heading(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "pitch": - out.Values[i] = ec._Camera_pitch(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Camera_pitch(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "roll": - out.Values[i] = ec._Camera_roll(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Camera_roll(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "fov": - out.Values[i] = ec._Camera_fov(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Camera_fov(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -35955,7 +36076,6 @@ var clusterImplementors = []string{"Cluster"} func (ec *executionContext) _Cluster(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Cluster) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, clusterImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -35963,23 +36083,39 @@ func (ec *executionContext) _Cluster(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("Cluster") case "id": - out.Values[i] = ec._Cluster_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Cluster_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - out.Values[i] = ec._Cluster_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Cluster_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - out.Values[i] = ec._Cluster_propertyId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Cluster_propertyId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "property": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -35987,6 +36123,11 @@ func (ec *executionContext) _Cluster(ctx context.Context, sel ast.SelectionSet, }() res = ec._Cluster_property(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -36003,7 +36144,6 @@ var createAssetPayloadImplementors = []string{"CreateAssetPayload"} func (ec *executionContext) _CreateAssetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateAssetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, createAssetPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36011,7 +36151,12 @@ func (ec *executionContext) _CreateAssetPayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("CreateAssetPayload") case "asset": - out.Values[i] = ec._CreateAssetPayload_asset(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._CreateAssetPayload_asset(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36030,7 +36175,6 @@ var createInfoboxPayloadImplementors = []string{"CreateInfoboxPayload"} func (ec *executionContext) _CreateInfoboxPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateInfoboxPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, createInfoboxPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36038,7 +36182,12 @@ func (ec *executionContext) _CreateInfoboxPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("CreateInfoboxPayload") case "layer": - out.Values[i] = ec._CreateInfoboxPayload_layer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._CreateInfoboxPayload_layer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36057,7 +36206,6 @@ var createScenePayloadImplementors = []string{"CreateScenePayload"} func (ec *executionContext) _CreateScenePayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateScenePayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, createScenePayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36065,7 +36213,12 @@ func (ec *executionContext) _CreateScenePayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("CreateScenePayload") case "scene": - out.Values[i] = ec._CreateScenePayload_scene(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._CreateScenePayload_scene(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36084,7 +36237,6 @@ var createTagGroupPayloadImplementors = []string{"CreateTagGroupPayload"} func (ec *executionContext) _CreateTagGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateTagGroupPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, createTagGroupPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36092,7 +36244,12 @@ func (ec *executionContext) _CreateTagGroupPayload(ctx context.Context, sel ast. case "__typename": out.Values[i] = graphql.MarshalString("CreateTagGroupPayload") case "tag": - out.Values[i] = ec._CreateTagGroupPayload_tag(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._CreateTagGroupPayload_tag(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36111,7 +36268,6 @@ var createTagItemPayloadImplementors = []string{"CreateTagItemPayload"} func (ec *executionContext) _CreateTagItemPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateTagItemPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, createTagItemPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36119,12 +36275,22 @@ func (ec *executionContext) _CreateTagItemPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("CreateTagItemPayload") case "tag": - out.Values[i] = ec._CreateTagItemPayload_tag(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._CreateTagItemPayload_tag(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "parent": - out.Values[i] = ec._CreateTagItemPayload_parent(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._CreateTagItemPayload_parent(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -36140,7 +36306,6 @@ var createTeamPayloadImplementors = []string{"CreateTeamPayload"} func (ec *executionContext) _CreateTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.CreateTeamPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, createTeamPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36148,7 +36313,12 @@ func (ec *executionContext) _CreateTeamPayload(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("CreateTeamPayload") case "team": - out.Values[i] = ec._CreateTeamPayload_team(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._CreateTeamPayload_team(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36167,7 +36337,6 @@ var datasetImplementors = []string{"Dataset", "Node"} func (ec *executionContext) _Dataset(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Dataset) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36175,28 +36344,49 @@ func (ec *executionContext) _Dataset(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("Dataset") case "id": - out.Values[i] = ec._Dataset_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Dataset_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "source": - out.Values[i] = ec._Dataset_source(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Dataset_source(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - out.Values[i] = ec._Dataset_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Dataset_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fields": - out.Values[i] = ec._Dataset_fields(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Dataset_fields(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36204,10 +36394,16 @@ func (ec *executionContext) _Dataset(ctx context.Context, sel ast.SelectionSet, }() res = ec._Dataset_schema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "name": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36215,6 +36411,11 @@ func (ec *executionContext) _Dataset(ctx context.Context, sel ast.SelectionSet, }() res = ec._Dataset_name(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -36231,7 +36432,6 @@ var datasetConnectionImplementors = []string{"DatasetConnection"} func (ec *executionContext) _DatasetConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetConnection) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetConnectionImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36239,22 +36439,42 @@ func (ec *executionContext) _DatasetConnection(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("DatasetConnection") case "edges": - out.Values[i] = ec._DatasetConnection_edges(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetConnection_edges(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "nodes": - out.Values[i] = ec._DatasetConnection_nodes(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetConnection_nodes(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "pageInfo": - out.Values[i] = ec._DatasetConnection_pageInfo(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetConnection_pageInfo(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "totalCount": - out.Values[i] = ec._DatasetConnection_totalCount(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetConnection_totalCount(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36273,7 +36493,6 @@ var datasetEdgeImplementors = []string{"DatasetEdge"} func (ec *executionContext) _DatasetEdge(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetEdge) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetEdgeImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36281,12 +36500,22 @@ func (ec *executionContext) _DatasetEdge(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("DatasetEdge") case "cursor": - out.Values[i] = ec._DatasetEdge_cursor(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetEdge_cursor(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "node": - out.Values[i] = ec._DatasetEdge_node(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetEdge_node(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -36302,7 +36531,6 @@ var datasetFieldImplementors = []string{"DatasetField"} func (ec *executionContext) _DatasetField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetFieldImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36310,30 +36538,56 @@ func (ec *executionContext) _DatasetField(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("DatasetField") case "fieldId": - out.Values[i] = ec._DatasetField_fieldId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetField_fieldId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - out.Values[i] = ec._DatasetField_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetField_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "source": - out.Values[i] = ec._DatasetField_source(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetField_source(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "type": - out.Values[i] = ec._DatasetField_type(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetField_type(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "value": - out.Values[i] = ec._DatasetField_value(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetField_value(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "schema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36341,10 +36595,16 @@ func (ec *executionContext) _DatasetField(ctx context.Context, sel ast.Selection }() res = ec._DatasetField_schema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "field": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36352,10 +36612,16 @@ func (ec *executionContext) _DatasetField(ctx context.Context, sel ast.Selection }() res = ec._DatasetField_field(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "valueRef": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36363,6 +36629,11 @@ func (ec *executionContext) _DatasetField(ctx context.Context, sel ast.Selection }() res = ec._DatasetField_valueRef(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -36379,7 +36650,6 @@ var datasetSchemaImplementors = []string{"DatasetSchema", "Node"} func (ec *executionContext) _DatasetSchema(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetSchema) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36387,37 +36657,73 @@ func (ec *executionContext) _DatasetSchema(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("DatasetSchema") case "id": - out.Values[i] = ec._DatasetSchema_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchema_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "source": - out.Values[i] = ec._DatasetSchema_source(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchema_source(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - out.Values[i] = ec._DatasetSchema_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchema_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - out.Values[i] = ec._DatasetSchema_sceneId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchema_sceneId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fields": - out.Values[i] = ec._DatasetSchema_fields(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchema_fields(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "representativeFieldId": - out.Values[i] = ec._DatasetSchema_representativeFieldId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchema_representativeFieldId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "dynamic": - out.Values[i] = ec._DatasetSchema_dynamic(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchema_dynamic(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "datasets": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36428,10 +36734,16 @@ func (ec *executionContext) _DatasetSchema(ctx context.Context, sel ast.Selectio atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scene": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36439,10 +36751,16 @@ func (ec *executionContext) _DatasetSchema(ctx context.Context, sel ast.Selectio }() res = ec._DatasetSchema_scene(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "representativeField": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36450,6 +36768,11 @@ func (ec *executionContext) _DatasetSchema(ctx context.Context, sel ast.Selectio }() res = ec._DatasetSchema_representativeField(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -36466,7 +36789,6 @@ var datasetSchemaConnectionImplementors = []string{"DatasetSchemaConnection"} func (ec *executionContext) _DatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetSchemaConnection) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaConnectionImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36474,22 +36796,42 @@ func (ec *executionContext) _DatasetSchemaConnection(ctx context.Context, sel as case "__typename": out.Values[i] = graphql.MarshalString("DatasetSchemaConnection") case "edges": - out.Values[i] = ec._DatasetSchemaConnection_edges(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchemaConnection_edges(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "nodes": - out.Values[i] = ec._DatasetSchemaConnection_nodes(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchemaConnection_nodes(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "pageInfo": - out.Values[i] = ec._DatasetSchemaConnection_pageInfo(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchemaConnection_pageInfo(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "totalCount": - out.Values[i] = ec._DatasetSchemaConnection_totalCount(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchemaConnection_totalCount(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36508,7 +36850,6 @@ var datasetSchemaEdgeImplementors = []string{"DatasetSchemaEdge"} func (ec *executionContext) _DatasetSchemaEdge(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetSchemaEdge) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaEdgeImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36516,12 +36857,22 @@ func (ec *executionContext) _DatasetSchemaEdge(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("DatasetSchemaEdge") case "cursor": - out.Values[i] = ec._DatasetSchemaEdge_cursor(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchemaEdge_cursor(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "node": - out.Values[i] = ec._DatasetSchemaEdge_node(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchemaEdge_node(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -36537,7 +36888,6 @@ var datasetSchemaFieldImplementors = []string{"DatasetSchemaField", "Node"} func (ec *executionContext) _DatasetSchemaField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DatasetSchemaField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, datasetSchemaFieldImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36545,35 +36895,66 @@ func (ec *executionContext) _DatasetSchemaField(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("DatasetSchemaField") case "id": - out.Values[i] = ec._DatasetSchemaField_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchemaField_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "source": - out.Values[i] = ec._DatasetSchemaField_source(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchemaField_source(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - out.Values[i] = ec._DatasetSchemaField_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchemaField_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "type": - out.Values[i] = ec._DatasetSchemaField_type(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchemaField_type(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - out.Values[i] = ec._DatasetSchemaField_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchemaField_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "refId": - out.Values[i] = ec._DatasetSchemaField_refId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DatasetSchemaField_refId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "schema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36581,10 +36962,16 @@ func (ec *executionContext) _DatasetSchemaField(ctx context.Context, sel ast.Sel }() res = ec._DatasetSchemaField_schema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "ref": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36592,6 +36979,11 @@ func (ec *executionContext) _DatasetSchemaField(ctx context.Context, sel ast.Sel }() res = ec._DatasetSchemaField_ref(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -36608,7 +37000,6 @@ var deleteMePayloadImplementors = []string{"DeleteMePayload"} func (ec *executionContext) _DeleteMePayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DeleteMePayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, deleteMePayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36616,7 +37007,12 @@ func (ec *executionContext) _DeleteMePayload(ctx context.Context, sel ast.Select case "__typename": out.Values[i] = graphql.MarshalString("DeleteMePayload") case "userId": - out.Values[i] = ec._DeleteMePayload_userId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DeleteMePayload_userId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36635,7 +37031,6 @@ var deleteProjectPayloadImplementors = []string{"DeleteProjectPayload"} func (ec *executionContext) _DeleteProjectPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DeleteProjectPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, deleteProjectPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36643,7 +37038,12 @@ func (ec *executionContext) _DeleteProjectPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("DeleteProjectPayload") case "projectId": - out.Values[i] = ec._DeleteProjectPayload_projectId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DeleteProjectPayload_projectId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36662,7 +37062,6 @@ var deleteTeamPayloadImplementors = []string{"DeleteTeamPayload"} func (ec *executionContext) _DeleteTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DeleteTeamPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, deleteTeamPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36670,7 +37069,12 @@ func (ec *executionContext) _DeleteTeamPayload(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("DeleteTeamPayload") case "teamId": - out.Values[i] = ec._DeleteTeamPayload_teamId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DeleteTeamPayload_teamId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36689,7 +37093,6 @@ var detachTagFromLayerPayloadImplementors = []string{"DetachTagFromLayerPayload" func (ec *executionContext) _DetachTagFromLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DetachTagFromLayerPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, detachTagFromLayerPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36697,7 +37100,12 @@ func (ec *executionContext) _DetachTagFromLayerPayload(ctx context.Context, sel case "__typename": out.Values[i] = graphql.MarshalString("DetachTagFromLayerPayload") case "layer": - out.Values[i] = ec._DetachTagFromLayerPayload_layer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DetachTagFromLayerPayload_layer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36716,7 +37124,6 @@ var detachTagItemFromGroupPayloadImplementors = []string{"DetachTagItemFromGroup func (ec *executionContext) _DetachTagItemFromGroupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.DetachTagItemFromGroupPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, detachTagItemFromGroupPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36724,7 +37131,12 @@ func (ec *executionContext) _DetachTagItemFromGroupPayload(ctx context.Context, case "__typename": out.Values[i] = graphql.MarshalString("DetachTagItemFromGroupPayload") case "tag": - out.Values[i] = ec._DetachTagItemFromGroupPayload_tag(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._DetachTagItemFromGroupPayload_tag(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36743,7 +37155,6 @@ var importDatasetPayloadImplementors = []string{"ImportDatasetPayload"} func (ec *executionContext) _ImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ImportDatasetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, importDatasetPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36751,7 +37162,12 @@ func (ec *executionContext) _ImportDatasetPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("ImportDatasetPayload") case "datasetSchema": - out.Values[i] = ec._ImportDatasetPayload_datasetSchema(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ImportDatasetPayload_datasetSchema(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36770,7 +37186,6 @@ var importLayerPayloadImplementors = []string{"ImportLayerPayload"} func (ec *executionContext) _ImportLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ImportLayerPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, importLayerPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36778,12 +37193,22 @@ func (ec *executionContext) _ImportLayerPayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("ImportLayerPayload") case "layers": - out.Values[i] = ec._ImportLayerPayload_layers(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ImportLayerPayload_layers(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "parentLayer": - out.Values[i] = ec._ImportLayerPayload_parentLayer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ImportLayerPayload_parentLayer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -36802,7 +37227,6 @@ var infoboxImplementors = []string{"Infobox"} func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Infobox) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, infoboxImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36810,30 +37234,56 @@ func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("Infobox") case "sceneId": - out.Values[i] = ec._Infobox_sceneId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Infobox_sceneId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "layerId": - out.Values[i] = ec._Infobox_layerId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Infobox_layerId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - out.Values[i] = ec._Infobox_propertyId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Infobox_propertyId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fields": - out.Values[i] = ec._Infobox_fields(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Infobox_fields(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "linkedDatasetId": - out.Values[i] = ec._Infobox_linkedDatasetId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Infobox_linkedDatasetId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "layer": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36844,10 +37294,16 @@ func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "property": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36855,10 +37311,16 @@ func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, }() res = ec._Infobox_property(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "linkedDataset": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36866,10 +37328,16 @@ func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, }() res = ec._Infobox_linkedDataset(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "merged": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36877,10 +37345,16 @@ func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, }() res = ec._Infobox_merged(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scene": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36888,6 +37362,11 @@ func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, }() res = ec._Infobox_scene(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -36904,7 +37383,6 @@ var infoboxFieldImplementors = []string{"InfoboxField"} func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.InfoboxField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, infoboxFieldImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -36912,40 +37390,76 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("InfoboxField") case "id": - out.Values[i] = ec._InfoboxField_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._InfoboxField_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - out.Values[i] = ec._InfoboxField_sceneId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._InfoboxField_sceneId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "layerId": - out.Values[i] = ec._InfoboxField_layerId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._InfoboxField_layerId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - out.Values[i] = ec._InfoboxField_propertyId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._InfoboxField_propertyId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "pluginId": - out.Values[i] = ec._InfoboxField_pluginId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._InfoboxField_pluginId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "extensionId": - out.Values[i] = ec._InfoboxField_extensionId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._InfoboxField_extensionId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "linkedDatasetId": - out.Values[i] = ec._InfoboxField_linkedDatasetId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._InfoboxField_linkedDatasetId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "layer": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36956,10 +37470,16 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "infobox": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36970,10 +37490,16 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "property": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36981,10 +37507,16 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection }() res = ec._InfoboxField_property(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "plugin": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -36992,10 +37524,16 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection }() res = ec._InfoboxField_plugin(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "extension": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37003,10 +37541,16 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection }() res = ec._InfoboxField_extension(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "linkedDataset": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37014,10 +37558,16 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection }() res = ec._InfoboxField_linkedDataset(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "merged": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37025,10 +37575,16 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection }() res = ec._InfoboxField_merged(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scene": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37036,10 +37592,16 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection }() res = ec._InfoboxField_scene(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scenePlugin": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37047,6 +37609,11 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection }() res = ec._InfoboxField_scenePlugin(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -37063,7 +37630,6 @@ var installPluginPayloadImplementors = []string{"InstallPluginPayload"} func (ec *executionContext) _InstallPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.InstallPluginPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, installPluginPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37071,12 +37637,22 @@ func (ec *executionContext) _InstallPluginPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("InstallPluginPayload") case "scene": - out.Values[i] = ec._InstallPluginPayload_scene(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._InstallPluginPayload_scene(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "scenePlugin": - out.Values[i] = ec._InstallPluginPayload_scenePlugin(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._InstallPluginPayload_scenePlugin(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -37095,7 +37671,6 @@ var latLngImplementors = []string{"LatLng"} func (ec *executionContext) _LatLng(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LatLng) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, latLngImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37103,12 +37678,22 @@ func (ec *executionContext) _LatLng(ctx context.Context, sel ast.SelectionSet, o case "__typename": out.Values[i] = graphql.MarshalString("LatLng") case "lat": - out.Values[i] = ec._LatLng_lat(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LatLng_lat(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "lng": - out.Values[i] = ec._LatLng_lng(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LatLng_lng(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -37127,7 +37712,6 @@ var latLngHeightImplementors = []string{"LatLngHeight"} func (ec *executionContext) _LatLngHeight(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LatLngHeight) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, latLngHeightImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37135,17 +37719,32 @@ func (ec *executionContext) _LatLngHeight(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("LatLngHeight") case "lat": - out.Values[i] = ec._LatLngHeight_lat(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LatLngHeight_lat(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "lng": - out.Values[i] = ec._LatLngHeight_lng(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LatLngHeight_lng(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "height": - out.Values[i] = ec._LatLngHeight_height(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LatLngHeight_height(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -37164,7 +37763,6 @@ var layerGroupImplementors = []string{"LayerGroup", "Layer"} func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerGroup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, layerGroupImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37172,55 +37770,121 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe case "__typename": out.Values[i] = graphql.MarshalString("LayerGroup") case "id": - out.Values[i] = ec._LayerGroup_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - out.Values[i] = ec._LayerGroup_sceneId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_sceneId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - out.Values[i] = ec._LayerGroup_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "isVisible": - out.Values[i] = ec._LayerGroup_isVisible(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_isVisible(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - out.Values[i] = ec._LayerGroup_propertyId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_propertyId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "pluginId": - out.Values[i] = ec._LayerGroup_pluginId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_pluginId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "extensionId": - out.Values[i] = ec._LayerGroup_extensionId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_extensionId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "infobox": - out.Values[i] = ec._LayerGroup_infobox(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_infobox(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "parentId": - out.Values[i] = ec._LayerGroup_parentId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_parentId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "linkedDatasetSchemaId": - out.Values[i] = ec._LayerGroup_linkedDatasetSchemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_linkedDatasetSchemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "root": - out.Values[i] = ec._LayerGroup_root(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_root(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "layerIds": - out.Values[i] = ec._LayerGroup_layerIds(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_layerIds(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "tags": - out.Values[i] = ec._LayerGroup_tags(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerGroup_tags(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "parent": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37228,10 +37892,16 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe }() res = ec._LayerGroup_parent(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "property": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37239,10 +37909,16 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe }() res = ec._LayerGroup_property(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "plugin": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37250,10 +37926,16 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe }() res = ec._LayerGroup_plugin(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "extension": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37261,10 +37943,16 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe }() res = ec._LayerGroup_extension(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "linkedDatasetSchema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37272,10 +37960,16 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe }() res = ec._LayerGroup_linkedDatasetSchema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "layers": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37286,10 +37980,16 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scene": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37297,10 +37997,16 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe }() res = ec._LayerGroup_scene(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scenePlugin": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37308,6 +38014,11 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe }() res = ec._LayerGroup_scenePlugin(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -37324,7 +38035,6 @@ var layerItemImplementors = []string{"LayerItem", "Layer"} func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerItem) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, layerItemImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37332,45 +38042,101 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet case "__typename": out.Values[i] = graphql.MarshalString("LayerItem") case "id": - out.Values[i] = ec._LayerItem_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerItem_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - out.Values[i] = ec._LayerItem_sceneId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerItem_sceneId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - out.Values[i] = ec._LayerItem_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerItem_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "isVisible": - out.Values[i] = ec._LayerItem_isVisible(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerItem_isVisible(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - out.Values[i] = ec._LayerItem_propertyId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerItem_propertyId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "pluginId": - out.Values[i] = ec._LayerItem_pluginId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerItem_pluginId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "extensionId": - out.Values[i] = ec._LayerItem_extensionId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerItem_extensionId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "infobox": - out.Values[i] = ec._LayerItem_infobox(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerItem_infobox(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "parentId": - out.Values[i] = ec._LayerItem_parentId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerItem_parentId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "linkedDatasetId": - out.Values[i] = ec._LayerItem_linkedDatasetId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerItem_linkedDatasetId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "tags": - out.Values[i] = ec._LayerItem_tags(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerItem_tags(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "parent": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37378,10 +38144,16 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet }() res = ec._LayerItem_parent(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "property": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37389,10 +38161,16 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet }() res = ec._LayerItem_property(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "plugin": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37400,10 +38178,16 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet }() res = ec._LayerItem_plugin(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "extension": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37411,10 +38195,16 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet }() res = ec._LayerItem_extension(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "linkedDataset": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37422,10 +38212,16 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet }() res = ec._LayerItem_linkedDataset(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "merged": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37433,10 +38229,16 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet }() res = ec._LayerItem_merged(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scene": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37444,10 +38246,16 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet }() res = ec._LayerItem_scene(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scenePlugin": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37455,6 +38263,11 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet }() res = ec._LayerItem_scenePlugin(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -37471,7 +38284,6 @@ var layerTagGroupImplementors = []string{"LayerTagGroup", "LayerTag"} func (ec *executionContext) _LayerTagGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerTagGroup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, layerTagGroupImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37479,18 +38291,29 @@ func (ec *executionContext) _LayerTagGroup(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("LayerTagGroup") case "tagId": - out.Values[i] = ec._LayerTagGroup_tagId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerTagGroup_tagId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "children": - out.Values[i] = ec._LayerTagGroup_children(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerTagGroup_children(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "tag": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37498,6 +38321,11 @@ func (ec *executionContext) _LayerTagGroup(ctx context.Context, sel ast.Selectio }() res = ec._LayerTagGroup_tag(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -37514,7 +38342,6 @@ var layerTagItemImplementors = []string{"LayerTagItem", "LayerTag"} func (ec *executionContext) _LayerTagItem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.LayerTagItem) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, layerTagItemImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37522,13 +38349,19 @@ func (ec *executionContext) _LayerTagItem(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("LayerTagItem") case "tagId": - out.Values[i] = ec._LayerTagItem_tagId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._LayerTagItem_tagId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "tag": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37536,6 +38369,142 @@ func (ec *executionContext) _LayerTagItem(ctx context.Context, sel ast.Selection }() res = ec._LayerTagItem_tag(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch() + if invalids > 0 { + return graphql.Null + } + return out +} + +var meImplementors = []string{"Me"} + +func (ec *executionContext) _Me(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Me) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, meImplementors) + out := graphql.NewFieldSet(fields) + var invalids uint32 + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Me") + case "id": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Me_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "name": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Me_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "email": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Me_email(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "lang": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Me_lang(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "theme": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Me_theme(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "auths": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Me_auths(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "myTeamId": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Me_myTeamId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + + if out.Values[i] == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + case "teams": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Me_teams(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) + case "myTeam": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Me_myTeam(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -37552,7 +38521,6 @@ var mergedInfoboxImplementors = []string{"MergedInfobox"} func (ec *executionContext) _MergedInfobox(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedInfobox) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mergedInfoboxImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37560,20 +38528,36 @@ func (ec *executionContext) _MergedInfobox(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("MergedInfobox") case "sceneID": - out.Values[i] = ec._MergedInfobox_sceneID(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedInfobox_sceneID(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "property": - out.Values[i] = ec._MergedInfobox_property(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedInfobox_property(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "fields": - out.Values[i] = ec._MergedInfobox_fields(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedInfobox_fields(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "scene": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37581,6 +38565,11 @@ func (ec *executionContext) _MergedInfobox(ctx context.Context, sel ast.Selectio }() res = ec._MergedInfobox_scene(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -37597,7 +38586,6 @@ var mergedInfoboxFieldImplementors = []string{"MergedInfoboxField"} func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedInfoboxField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mergedInfoboxFieldImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37605,30 +38593,56 @@ func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("MergedInfoboxField") case "originalId": - out.Values[i] = ec._MergedInfoboxField_originalId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedInfoboxField_originalId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneID": - out.Values[i] = ec._MergedInfoboxField_sceneID(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedInfoboxField_sceneID(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "pluginId": - out.Values[i] = ec._MergedInfoboxField_pluginId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedInfoboxField_pluginId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "extensionId": - out.Values[i] = ec._MergedInfoboxField_extensionId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedInfoboxField_extensionId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "property": - out.Values[i] = ec._MergedInfoboxField_property(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedInfoboxField_property(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "plugin": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37636,10 +38650,16 @@ func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.Sel }() res = ec._MergedInfoboxField_plugin(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "extension": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37647,10 +38667,16 @@ func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.Sel }() res = ec._MergedInfoboxField_extension(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scene": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37658,10 +38684,16 @@ func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.Sel }() res = ec._MergedInfoboxField_scene(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scenePlugin": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37669,6 +38701,11 @@ func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.Sel }() res = ec._MergedInfoboxField_scenePlugin(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -37685,7 +38722,6 @@ var mergedLayerImplementors = []string{"MergedLayer"} func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedLayer) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mergedLayerImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37693,24 +38729,50 @@ func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("MergedLayer") case "originalId": - out.Values[i] = ec._MergedLayer_originalId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedLayer_originalId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "parentId": - out.Values[i] = ec._MergedLayer_parentId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedLayer_parentId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "sceneID": - out.Values[i] = ec._MergedLayer_sceneID(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedLayer_sceneID(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "property": - out.Values[i] = ec._MergedLayer_property(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedLayer_property(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "infobox": - out.Values[i] = ec._MergedLayer_infobox(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedLayer_infobox(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "original": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37718,10 +38780,16 @@ func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionS }() res = ec._MergedLayer_original(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "parent": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37729,10 +38797,16 @@ func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionS }() res = ec._MergedLayer_parent(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scene": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37740,6 +38814,11 @@ func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionS }() res = ec._MergedLayer_scene(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -37756,7 +38835,6 @@ var mergedPropertyImplementors = []string{"MergedProperty"} func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedProperty) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mergedPropertyImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37764,16 +38842,37 @@ func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.Selecti case "__typename": out.Values[i] = graphql.MarshalString("MergedProperty") case "originalId": - out.Values[i] = ec._MergedProperty_originalId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedProperty_originalId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "parentId": - out.Values[i] = ec._MergedProperty_parentId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedProperty_parentId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "schemaId": - out.Values[i] = ec._MergedProperty_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedProperty_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "linkedDatasetId": - out.Values[i] = ec._MergedProperty_linkedDatasetId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedProperty_linkedDatasetId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "original": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37781,10 +38880,16 @@ func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.Selecti }() res = ec._MergedProperty_original(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "parent": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37792,10 +38897,16 @@ func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.Selecti }() res = ec._MergedProperty_parent(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "schema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37803,10 +38914,16 @@ func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.Selecti }() res = ec._MergedProperty_schema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "linkedDataset": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37814,10 +38931,16 @@ func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.Selecti }() res = ec._MergedProperty_linkedDataset(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "groups": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37828,6 +38951,11 @@ func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.Selecti atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -37844,7 +38972,6 @@ var mergedPropertyFieldImplementors = []string{"MergedPropertyField"} func (ec *executionContext) _MergedPropertyField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedPropertyField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mergedPropertyFieldImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37852,32 +38979,63 @@ func (ec *executionContext) _MergedPropertyField(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("MergedPropertyField") case "schemaId": - out.Values[i] = ec._MergedPropertyField_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyField_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fieldId": - out.Values[i] = ec._MergedPropertyField_fieldId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyField_fieldId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "value": - out.Values[i] = ec._MergedPropertyField_value(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyField_value(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "type": - out.Values[i] = ec._MergedPropertyField_type(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyField_type(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "links": - out.Values[i] = ec._MergedPropertyField_links(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyField_links(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "overridden": - out.Values[i] = ec._MergedPropertyField_overridden(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyField_overridden(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37885,10 +39043,16 @@ func (ec *executionContext) _MergedPropertyField(ctx context.Context, sel ast.Se }() res = ec._MergedPropertyField_schema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "field": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37896,10 +39060,16 @@ func (ec *executionContext) _MergedPropertyField(ctx context.Context, sel ast.Se }() res = ec._MergedPropertyField_field(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "actualValue": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37907,6 +39077,11 @@ func (ec *executionContext) _MergedPropertyField(ctx context.Context, sel ast.Se }() res = ec._MergedPropertyField_actualValue(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -37923,7 +39098,6 @@ var mergedPropertyGroupImplementors = []string{"MergedPropertyGroup"} func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MergedPropertyGroup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mergedPropertyGroupImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -37931,35 +39105,81 @@ func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("MergedPropertyGroup") case "originalPropertyId": - out.Values[i] = ec._MergedPropertyGroup_originalPropertyId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyGroup_originalPropertyId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "parentPropertyId": - out.Values[i] = ec._MergedPropertyGroup_parentPropertyId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyGroup_parentPropertyId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "originalId": - out.Values[i] = ec._MergedPropertyGroup_originalId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyGroup_originalId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "parentId": - out.Values[i] = ec._MergedPropertyGroup_parentId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyGroup_parentId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "schemaGroupId": - out.Values[i] = ec._MergedPropertyGroup_schemaGroupId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyGroup_schemaGroupId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - out.Values[i] = ec._MergedPropertyGroup_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyGroup_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "linkedDatasetId": - out.Values[i] = ec._MergedPropertyGroup_linkedDatasetId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyGroup_linkedDatasetId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "fields": - out.Values[i] = ec._MergedPropertyGroup_fields(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyGroup_fields(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "groups": - out.Values[i] = ec._MergedPropertyGroup_groups(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MergedPropertyGroup_groups(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "originalProperty": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37967,10 +39187,16 @@ func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.Se }() res = ec._MergedPropertyGroup_originalProperty(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "parentProperty": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37978,10 +39204,16 @@ func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.Se }() res = ec._MergedPropertyGroup_parentProperty(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "original": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -37989,10 +39221,16 @@ func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.Se }() res = ec._MergedPropertyGroup_original(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "parent": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38000,10 +39238,16 @@ func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.Se }() res = ec._MergedPropertyGroup_parent(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "schema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38011,10 +39255,16 @@ func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.Se }() res = ec._MergedPropertyGroup_schema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "linkedDataset": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38022,6 +39272,11 @@ func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.Se }() res = ec._MergedPropertyGroup_linkedDataset(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -38038,7 +39293,6 @@ var moveInfoboxFieldPayloadImplementors = []string{"MoveInfoboxFieldPayload"} func (ec *executionContext) _MoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MoveInfoboxFieldPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, moveInfoboxFieldPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38046,17 +39300,32 @@ func (ec *executionContext) _MoveInfoboxFieldPayload(ctx context.Context, sel as case "__typename": out.Values[i] = graphql.MarshalString("MoveInfoboxFieldPayload") case "infoboxFieldId": - out.Values[i] = ec._MoveInfoboxFieldPayload_infoboxFieldId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MoveInfoboxFieldPayload_infoboxFieldId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "layer": - out.Values[i] = ec._MoveInfoboxFieldPayload_layer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MoveInfoboxFieldPayload_layer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "index": - out.Values[i] = ec._MoveInfoboxFieldPayload_index(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MoveInfoboxFieldPayload_index(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -38075,7 +39344,6 @@ var moveLayerPayloadImplementors = []string{"MoveLayerPayload"} func (ec *executionContext) _MoveLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.MoveLayerPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, moveLayerPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38083,22 +39351,42 @@ func (ec *executionContext) _MoveLayerPayload(ctx context.Context, sel ast.Selec case "__typename": out.Values[i] = graphql.MarshalString("MoveLayerPayload") case "layerId": - out.Values[i] = ec._MoveLayerPayload_layerId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MoveLayerPayload_layerId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "fromParentLayer": - out.Values[i] = ec._MoveLayerPayload_fromParentLayer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MoveLayerPayload_fromParentLayer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "toParentLayer": - out.Values[i] = ec._MoveLayerPayload_toParentLayer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MoveLayerPayload_toParentLayer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "index": - out.Values[i] = ec._MoveLayerPayload_index(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._MoveLayerPayload_index(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -38117,7 +39405,6 @@ var mutationImplementors = []string{"Mutation"} func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, mutationImplementors) - ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{ Object: "Mutation", }) @@ -38125,137 +39412,462 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { + innerCtx := graphql.WithRootFieldContext(ctx, &graphql.RootFieldContext{ + Object: field.Name, + Field: field, + }) + switch field.Name { case "__typename": out.Values[i] = graphql.MarshalString("Mutation") case "createAsset": - out.Values[i] = ec._Mutation_createAsset(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createAsset(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "removeAsset": - out.Values[i] = ec._Mutation_removeAsset(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeAsset(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "signup": - out.Values[i] = ec._Mutation_signup(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_signup(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "updateMe": - out.Values[i] = ec._Mutation_updateMe(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateMe(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "removeMyAuth": - out.Values[i] = ec._Mutation_removeMyAuth(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeMyAuth(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "deleteMe": - out.Values[i] = ec._Mutation_deleteMe(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_deleteMe(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "createTeam": - out.Values[i] = ec._Mutation_createTeam(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createTeam(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "deleteTeam": - out.Values[i] = ec._Mutation_deleteTeam(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_deleteTeam(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "updateTeam": - out.Values[i] = ec._Mutation_updateTeam(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateTeam(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "addMemberToTeam": - out.Values[i] = ec._Mutation_addMemberToTeam(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addMemberToTeam(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "removeMemberFromTeam": - out.Values[i] = ec._Mutation_removeMemberFromTeam(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeMemberFromTeam(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "updateMemberOfTeam": - out.Values[i] = ec._Mutation_updateMemberOfTeam(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateMemberOfTeam(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "createProject": - out.Values[i] = ec._Mutation_createProject(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createProject(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "updateProject": - out.Values[i] = ec._Mutation_updateProject(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateProject(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "publishProject": - out.Values[i] = ec._Mutation_publishProject(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_publishProject(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "deleteProject": - out.Values[i] = ec._Mutation_deleteProject(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_deleteProject(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "createScene": - out.Values[i] = ec._Mutation_createScene(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createScene(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "addWidget": - out.Values[i] = ec._Mutation_addWidget(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addWidget(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "updateWidget": - out.Values[i] = ec._Mutation_updateWidget(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateWidget(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "updateWidgetAlignSystem": - out.Values[i] = ec._Mutation_updateWidgetAlignSystem(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateWidgetAlignSystem(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "removeWidget": - out.Values[i] = ec._Mutation_removeWidget(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeWidget(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "installPlugin": - out.Values[i] = ec._Mutation_installPlugin(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_installPlugin(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "uninstallPlugin": - out.Values[i] = ec._Mutation_uninstallPlugin(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_uninstallPlugin(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "uploadPlugin": - out.Values[i] = ec._Mutation_uploadPlugin(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_uploadPlugin(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "upgradePlugin": - out.Values[i] = ec._Mutation_upgradePlugin(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_upgradePlugin(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "addCluster": - out.Values[i] = ec._Mutation_addCluster(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addCluster(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "updateCluster": - out.Values[i] = ec._Mutation_updateCluster(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateCluster(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "removeCluster": - out.Values[i] = ec._Mutation_removeCluster(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeCluster(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "updateDatasetSchema": - out.Values[i] = ec._Mutation_updateDatasetSchema(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateDatasetSchema(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "syncDataset": - out.Values[i] = ec._Mutation_syncDataset(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_syncDataset(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "addDynamicDatasetSchema": - out.Values[i] = ec._Mutation_addDynamicDatasetSchema(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addDynamicDatasetSchema(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "addDynamicDataset": - out.Values[i] = ec._Mutation_addDynamicDataset(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addDynamicDataset(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "removeDatasetSchema": - out.Values[i] = ec._Mutation_removeDatasetSchema(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeDatasetSchema(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "importDataset": - out.Values[i] = ec._Mutation_importDataset(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_importDataset(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "importDatasetFromGoogleSheet": - out.Values[i] = ec._Mutation_importDatasetFromGoogleSheet(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_importDatasetFromGoogleSheet(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "addDatasetSchema": - out.Values[i] = ec._Mutation_addDatasetSchema(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addDatasetSchema(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "updatePropertyValue": - out.Values[i] = ec._Mutation_updatePropertyValue(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updatePropertyValue(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "removePropertyField": - out.Values[i] = ec._Mutation_removePropertyField(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removePropertyField(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "uploadFileToProperty": - out.Values[i] = ec._Mutation_uploadFileToProperty(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_uploadFileToProperty(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "linkDatasetToPropertyValue": - out.Values[i] = ec._Mutation_linkDatasetToPropertyValue(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_linkDatasetToPropertyValue(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "unlinkPropertyValue": - out.Values[i] = ec._Mutation_unlinkPropertyValue(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_unlinkPropertyValue(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "addPropertyItem": - out.Values[i] = ec._Mutation_addPropertyItem(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addPropertyItem(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "movePropertyItem": - out.Values[i] = ec._Mutation_movePropertyItem(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_movePropertyItem(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "removePropertyItem": - out.Values[i] = ec._Mutation_removePropertyItem(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removePropertyItem(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "updatePropertyItems": - out.Values[i] = ec._Mutation_updatePropertyItems(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updatePropertyItems(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "addLayerItem": - out.Values[i] = ec._Mutation_addLayerItem(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addLayerItem(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "addLayerGroup": - out.Values[i] = ec._Mutation_addLayerGroup(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addLayerGroup(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "removeLayer": - out.Values[i] = ec._Mutation_removeLayer(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeLayer(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "updateLayer": - out.Values[i] = ec._Mutation_updateLayer(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateLayer(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "moveLayer": - out.Values[i] = ec._Mutation_moveLayer(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_moveLayer(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "createInfobox": - out.Values[i] = ec._Mutation_createInfobox(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createInfobox(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "removeInfobox": - out.Values[i] = ec._Mutation_removeInfobox(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeInfobox(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "addInfoboxField": - out.Values[i] = ec._Mutation_addInfoboxField(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addInfoboxField(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "moveInfoboxField": - out.Values[i] = ec._Mutation_moveInfoboxField(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_moveInfoboxField(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "removeInfoboxField": - out.Values[i] = ec._Mutation_removeInfoboxField(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeInfoboxField(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "importLayer": - out.Values[i] = ec._Mutation_importLayer(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_importLayer(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "attachTagToLayer": - out.Values[i] = ec._Mutation_attachTagToLayer(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_attachTagToLayer(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "detachTagFromLayer": - out.Values[i] = ec._Mutation_detachTagFromLayer(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_detachTagFromLayer(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "createTagItem": - out.Values[i] = ec._Mutation_createTagItem(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createTagItem(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "createTagGroup": - out.Values[i] = ec._Mutation_createTagGroup(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createTagGroup(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "attachTagItemToGroup": - out.Values[i] = ec._Mutation_attachTagItemToGroup(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_attachTagItemToGroup(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "detachTagItemFromGroup": - out.Values[i] = ec._Mutation_detachTagItemFromGroup(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_detachTagItemFromGroup(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "updateTag": - out.Values[i] = ec._Mutation_updateTag(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateTag(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "removeTag": - out.Values[i] = ec._Mutation_removeTag(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeTag(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -38271,7 +39883,6 @@ var pageInfoImplementors = []string{"PageInfo"} func (ec *executionContext) _PageInfo(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PageInfo) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, pageInfoImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38279,16 +39890,36 @@ func (ec *executionContext) _PageInfo(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("PageInfo") case "startCursor": - out.Values[i] = ec._PageInfo_startCursor(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PageInfo_startCursor(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "endCursor": - out.Values[i] = ec._PageInfo_endCursor(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PageInfo_endCursor(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "hasNextPage": - out.Values[i] = ec._PageInfo_hasNextPage(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PageInfo_hasNextPage(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "hasPreviousPage": - out.Values[i] = ec._PageInfo_hasPreviousPage(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PageInfo_hasPreviousPage(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -38307,7 +39938,6 @@ var pluginImplementors = []string{"Plugin"} func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Plugin) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, pluginImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38315,53 +39945,114 @@ func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, o case "__typename": out.Values[i] = graphql.MarshalString("Plugin") case "id": - out.Values[i] = ec._Plugin_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Plugin_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - out.Values[i] = ec._Plugin_sceneId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Plugin_sceneId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "name": - out.Values[i] = ec._Plugin_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Plugin_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "version": - out.Values[i] = ec._Plugin_version(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Plugin_version(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - out.Values[i] = ec._Plugin_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Plugin_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "author": - out.Values[i] = ec._Plugin_author(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Plugin_author(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "repositoryUrl": - out.Values[i] = ec._Plugin_repositoryUrl(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Plugin_repositoryUrl(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertySchemaId": - out.Values[i] = ec._Plugin_propertySchemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Plugin_propertySchemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "extensions": - out.Values[i] = ec._Plugin_extensions(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Plugin_extensions(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "scenePlugin": - out.Values[i] = ec._Plugin_scenePlugin(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Plugin_scenePlugin(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "allTranslatedDescription": - out.Values[i] = ec._Plugin_allTranslatedDescription(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Plugin_allTranslatedDescription(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "allTranslatedName": - out.Values[i] = ec._Plugin_allTranslatedName(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Plugin_allTranslatedName(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "scene": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38369,10 +40060,16 @@ func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, o }() res = ec._Plugin_scene(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "translatedName": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38383,10 +40080,16 @@ func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, o atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "translatedDescription": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38397,10 +40100,16 @@ func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, o atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "propertySchema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38408,6 +40117,11 @@ func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, o }() res = ec._Plugin_propertySchema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -38424,7 +40138,6 @@ var pluginExtensionImplementors = []string{"PluginExtension"} func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PluginExtension) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, pluginExtensionImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38432,53 +40145,114 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select case "__typename": out.Values[i] = graphql.MarshalString("PluginExtension") case "extensionId": - out.Values[i] = ec._PluginExtension_extensionId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginExtension_extensionId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "pluginId": - out.Values[i] = ec._PluginExtension_pluginId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginExtension_pluginId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "type": - out.Values[i] = ec._PluginExtension_type(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginExtension_type(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - out.Values[i] = ec._PluginExtension_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginExtension_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - out.Values[i] = ec._PluginExtension_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginExtension_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "icon": - out.Values[i] = ec._PluginExtension_icon(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginExtension_icon(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "singleOnly": - out.Values[i] = ec._PluginExtension_singleOnly(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginExtension_singleOnly(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "widgetLayout": - out.Values[i] = ec._PluginExtension_widgetLayout(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginExtension_widgetLayout(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "visualizer": - out.Values[i] = ec._PluginExtension_visualizer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginExtension_visualizer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "propertySchemaId": - out.Values[i] = ec._PluginExtension_propertySchemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginExtension_propertySchemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "allTranslatedName": - out.Values[i] = ec._PluginExtension_allTranslatedName(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginExtension_allTranslatedName(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "allTranslatedDescription": - out.Values[i] = ec._PluginExtension_allTranslatedDescription(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginExtension_allTranslatedDescription(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "plugin": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38486,10 +40260,16 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select }() res = ec._PluginExtension_plugin(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "sceneWidget": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38497,10 +40277,16 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select }() res = ec._PluginExtension_sceneWidget(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "propertySchema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38508,10 +40294,16 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select }() res = ec._PluginExtension_propertySchema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "translatedName": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38522,10 +40314,16 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "translatedDescription": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38536,6 +40334,11 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -38552,7 +40355,6 @@ var pluginMetadataImplementors = []string{"PluginMetadata"} func (ec *executionContext) _PluginMetadata(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PluginMetadata) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, pluginMetadataImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38560,27 +40362,52 @@ func (ec *executionContext) _PluginMetadata(ctx context.Context, sel ast.Selecti case "__typename": out.Values[i] = graphql.MarshalString("PluginMetadata") case "name": - out.Values[i] = ec._PluginMetadata_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginMetadata_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "description": - out.Values[i] = ec._PluginMetadata_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginMetadata_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "author": - out.Values[i] = ec._PluginMetadata_author(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginMetadata_author(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "thumbnailUrl": - out.Values[i] = ec._PluginMetadata_thumbnailUrl(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginMetadata_thumbnailUrl(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "createdAt": - out.Values[i] = ec._PluginMetadata_createdAt(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PluginMetadata_createdAt(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -38599,7 +40426,6 @@ var projectImplementors = []string{"Project", "Node"} func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Project) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, projectImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38607,97 +40433,193 @@ func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("Project") case "id": - out.Values[i] = ec._Project_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "isArchived": - out.Values[i] = ec._Project_isArchived(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_isArchived(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "isBasicAuthActive": - out.Values[i] = ec._Project_isBasicAuthActive(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_isBasicAuthActive(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "basicAuthUsername": - out.Values[i] = ec._Project_basicAuthUsername(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_basicAuthUsername(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "basicAuthPassword": - out.Values[i] = ec._Project_basicAuthPassword(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_basicAuthPassword(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "createdAt": - out.Values[i] = ec._Project_createdAt(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_createdAt(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "updatedAt": - out.Values[i] = ec._Project_updatedAt(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_updatedAt(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "publishedAt": - out.Values[i] = ec._Project_publishedAt(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_publishedAt(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "name": - out.Values[i] = ec._Project_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - out.Values[i] = ec._Project_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "alias": - out.Values[i] = ec._Project_alias(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_alias(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "publicTitle": - out.Values[i] = ec._Project_publicTitle(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_publicTitle(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "publicDescription": - out.Values[i] = ec._Project_publicDescription(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_publicDescription(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "publicImage": - out.Values[i] = ec._Project_publicImage(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_publicImage(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "publicNoIndex": - out.Values[i] = ec._Project_publicNoIndex(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_publicNoIndex(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "imageUrl": - out.Values[i] = ec._Project_imageUrl(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_imageUrl(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "teamId": - out.Values[i] = ec._Project_teamId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_teamId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "visualizer": - out.Values[i] = ec._Project_visualizer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_visualizer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "publishmentStatus": - out.Values[i] = ec._Project_publishmentStatus(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Project_publishmentStatus(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "team": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38705,10 +40627,16 @@ func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, }() res = ec._Project_team(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scene": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38716,6 +40644,11 @@ func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, }() res = ec._Project_scene(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -38732,7 +40665,6 @@ var projectAliasAvailabilityImplementors = []string{"ProjectAliasAvailability"} func (ec *executionContext) _ProjectAliasAvailability(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectAliasAvailability) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, projectAliasAvailabilityImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38740,12 +40672,22 @@ func (ec *executionContext) _ProjectAliasAvailability(ctx context.Context, sel a case "__typename": out.Values[i] = graphql.MarshalString("ProjectAliasAvailability") case "alias": - out.Values[i] = ec._ProjectAliasAvailability_alias(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProjectAliasAvailability_alias(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "available": - out.Values[i] = ec._ProjectAliasAvailability_available(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProjectAliasAvailability_available(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -38764,7 +40706,6 @@ var projectConnectionImplementors = []string{"ProjectConnection"} func (ec *executionContext) _ProjectConnection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectConnection) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, projectConnectionImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38772,22 +40713,42 @@ func (ec *executionContext) _ProjectConnection(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("ProjectConnection") case "edges": - out.Values[i] = ec._ProjectConnection_edges(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProjectConnection_edges(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "nodes": - out.Values[i] = ec._ProjectConnection_nodes(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProjectConnection_nodes(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "pageInfo": - out.Values[i] = ec._ProjectConnection_pageInfo(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProjectConnection_pageInfo(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "totalCount": - out.Values[i] = ec._ProjectConnection_totalCount(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProjectConnection_totalCount(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -38806,7 +40767,6 @@ var projectEdgeImplementors = []string{"ProjectEdge"} func (ec *executionContext) _ProjectEdge(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectEdge) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, projectEdgeImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38814,12 +40774,22 @@ func (ec *executionContext) _ProjectEdge(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("ProjectEdge") case "cursor": - out.Values[i] = ec._ProjectEdge_cursor(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProjectEdge_cursor(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "node": - out.Values[i] = ec._ProjectEdge_node(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProjectEdge_node(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -38835,7 +40805,6 @@ var projectPayloadImplementors = []string{"ProjectPayload"} func (ec *executionContext) _ProjectPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ProjectPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, projectPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38843,7 +40812,12 @@ func (ec *executionContext) _ProjectPayload(ctx context.Context, sel ast.Selecti case "__typename": out.Values[i] = graphql.MarshalString("ProjectPayload") case "project": - out.Values[i] = ec._ProjectPayload_project(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ProjectPayload_project(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -38862,7 +40836,6 @@ var propertyImplementors = []string{"Property", "Node"} func (ec *executionContext) _Property(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Property) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38870,23 +40843,39 @@ func (ec *executionContext) _Property(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("Property") case "id": - out.Values[i] = ec._Property_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Property_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - out.Values[i] = ec._Property_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Property_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "items": - out.Values[i] = ec._Property_items(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Property_items(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38894,10 +40883,16 @@ func (ec *executionContext) _Property(ctx context.Context, sel ast.SelectionSet, }() res = ec._Property_schema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "layer": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38905,10 +40900,16 @@ func (ec *executionContext) _Property(ctx context.Context, sel ast.SelectionSet, }() res = ec._Property_layer(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "merged": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -38916,6 +40917,11 @@ func (ec *executionContext) _Property(ctx context.Context, sel ast.SelectionSet, }() res = ec._Property_merged(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -38932,7 +40938,6 @@ var propertyConditionImplementors = []string{"PropertyCondition"} func (ec *executionContext) _PropertyCondition(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyCondition) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyConditionImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38940,17 +40945,32 @@ func (ec *executionContext) _PropertyCondition(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("PropertyCondition") case "fieldId": - out.Values[i] = ec._PropertyCondition_fieldId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyCondition_fieldId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "type": - out.Values[i] = ec._PropertyCondition_type(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyCondition_type(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "value": - out.Values[i] = ec._PropertyCondition_value(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyCondition_value(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -38966,7 +40986,6 @@ var propertyFieldImplementors = []string{"PropertyField"} func (ec *executionContext) _PropertyField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyFieldImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -38974,37 +40993,73 @@ func (ec *executionContext) _PropertyField(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("PropertyField") case "id": - out.Values[i] = ec._PropertyField_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyField_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "parentId": - out.Values[i] = ec._PropertyField_parentId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyField_parentId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - out.Values[i] = ec._PropertyField_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyField_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fieldId": - out.Values[i] = ec._PropertyField_fieldId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyField_fieldId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "links": - out.Values[i] = ec._PropertyField_links(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyField_links(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "type": - out.Values[i] = ec._PropertyField_type(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyField_type(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "value": - out.Values[i] = ec._PropertyField_value(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyField_value(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "parent": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39012,10 +41067,16 @@ func (ec *executionContext) _PropertyField(ctx context.Context, sel ast.Selectio }() res = ec._PropertyField_parent(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "schema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39023,10 +41084,16 @@ func (ec *executionContext) _PropertyField(ctx context.Context, sel ast.Selectio }() res = ec._PropertyField_schema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "field": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39034,10 +41101,16 @@ func (ec *executionContext) _PropertyField(ctx context.Context, sel ast.Selectio }() res = ec._PropertyField_field(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "actualValue": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39045,6 +41118,11 @@ func (ec *executionContext) _PropertyField(ctx context.Context, sel ast.Selectio }() res = ec._PropertyField_actualValue(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -39061,7 +41139,6 @@ var propertyFieldLinkImplementors = []string{"PropertyFieldLink"} func (ec *executionContext) _PropertyFieldLink(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyFieldLink) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyFieldLinkImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39069,20 +41146,36 @@ func (ec *executionContext) _PropertyFieldLink(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("PropertyFieldLink") case "datasetId": - out.Values[i] = ec._PropertyFieldLink_datasetId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyFieldLink_datasetId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "datasetSchemaId": - out.Values[i] = ec._PropertyFieldLink_datasetSchemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyFieldLink_datasetSchemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "datasetSchemaFieldId": - out.Values[i] = ec._PropertyFieldLink_datasetSchemaFieldId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyFieldLink_datasetSchemaFieldId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "dataset": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39090,10 +41183,16 @@ func (ec *executionContext) _PropertyFieldLink(ctx context.Context, sel ast.Sele }() res = ec._PropertyFieldLink_dataset(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "datasetField": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39101,10 +41200,16 @@ func (ec *executionContext) _PropertyFieldLink(ctx context.Context, sel ast.Sele }() res = ec._PropertyFieldLink_datasetField(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "datasetSchema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39112,10 +41217,16 @@ func (ec *executionContext) _PropertyFieldLink(ctx context.Context, sel ast.Sele }() res = ec._PropertyFieldLink_datasetSchema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "datasetSchemaField": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39123,6 +41234,11 @@ func (ec *executionContext) _PropertyFieldLink(ctx context.Context, sel ast.Sele }() res = ec._PropertyFieldLink_datasetSchemaField(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -39139,7 +41255,6 @@ var propertyFieldPayloadImplementors = []string{"PropertyFieldPayload"} func (ec *executionContext) _PropertyFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyFieldPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyFieldPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39147,12 +41262,22 @@ func (ec *executionContext) _PropertyFieldPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("PropertyFieldPayload") case "property": - out.Values[i] = ec._PropertyFieldPayload_property(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyFieldPayload_property(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "propertyField": - out.Values[i] = ec._PropertyFieldPayload_propertyField(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyFieldPayload_propertyField(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -39168,7 +41293,6 @@ var propertyGroupImplementors = []string{"PropertyGroup", "PropertyItem"} func (ec *executionContext) _PropertyGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyGroup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyGroupImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39176,28 +41300,49 @@ func (ec *executionContext) _PropertyGroup(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("PropertyGroup") case "id": - out.Values[i] = ec._PropertyGroup_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyGroup_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - out.Values[i] = ec._PropertyGroup_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyGroup_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaGroupId": - out.Values[i] = ec._PropertyGroup_schemaGroupId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyGroup_schemaGroupId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fields": - out.Values[i] = ec._PropertyGroup_fields(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyGroup_fields(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39205,10 +41350,16 @@ func (ec *executionContext) _PropertyGroup(ctx context.Context, sel ast.Selectio }() res = ec._PropertyGroup_schema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "schemaGroup": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39216,6 +41367,11 @@ func (ec *executionContext) _PropertyGroup(ctx context.Context, sel ast.Selectio }() res = ec._PropertyGroup_schemaGroup(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -39232,7 +41388,6 @@ var propertyGroupListImplementors = []string{"PropertyGroupList", "PropertyItem" func (ec *executionContext) _PropertyGroupList(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyGroupList) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyGroupListImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39240,28 +41395,49 @@ func (ec *executionContext) _PropertyGroupList(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("PropertyGroupList") case "id": - out.Values[i] = ec._PropertyGroupList_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyGroupList_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - out.Values[i] = ec._PropertyGroupList_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyGroupList_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaGroupId": - out.Values[i] = ec._PropertyGroupList_schemaGroupId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyGroupList_schemaGroupId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "groups": - out.Values[i] = ec._PropertyGroupList_groups(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyGroupList_groups(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39269,10 +41445,16 @@ func (ec *executionContext) _PropertyGroupList(ctx context.Context, sel ast.Sele }() res = ec._PropertyGroupList_schema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "schemaGroup": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39280,6 +41462,11 @@ func (ec *executionContext) _PropertyGroupList(ctx context.Context, sel ast.Sele }() res = ec._PropertyGroupList_schemaGroup(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -39296,7 +41483,6 @@ var propertyItemPayloadImplementors = []string{"PropertyItemPayload"} func (ec *executionContext) _PropertyItemPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyItemPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyItemPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39304,12 +41490,22 @@ func (ec *executionContext) _PropertyItemPayload(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("PropertyItemPayload") case "property": - out.Values[i] = ec._PropertyItemPayload_property(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyItemPayload_property(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "propertyItem": - out.Values[i] = ec._PropertyItemPayload_propertyItem(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyItemPayload_propertyItem(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -39325,7 +41521,6 @@ var propertyLinkableFieldsImplementors = []string{"PropertyLinkableFields"} func (ec *executionContext) _PropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertyLinkableFields) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertyLinkableFieldsImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39333,17 +41528,33 @@ func (ec *executionContext) _PropertyLinkableFields(ctx context.Context, sel ast case "__typename": out.Values[i] = graphql.MarshalString("PropertyLinkableFields") case "schemaId": - out.Values[i] = ec._PropertyLinkableFields_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyLinkableFields_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "latlng": - out.Values[i] = ec._PropertyLinkableFields_latlng(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyLinkableFields_latlng(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "url": - out.Values[i] = ec._PropertyLinkableFields_url(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertyLinkableFields_url(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "latlngField": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39351,10 +41562,16 @@ func (ec *executionContext) _PropertyLinkableFields(ctx context.Context, sel ast }() res = ec._PropertyLinkableFields_latlngField(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "urlField": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39362,10 +41579,16 @@ func (ec *executionContext) _PropertyLinkableFields(ctx context.Context, sel ast }() res = ec._PropertyLinkableFields_urlField(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "schema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39373,6 +41596,11 @@ func (ec *executionContext) _PropertyLinkableFields(ctx context.Context, sel ast }() res = ec._PropertyLinkableFields_schema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -39389,7 +41617,6 @@ var propertySchemaImplementors = []string{"PropertySchema"} func (ec *executionContext) _PropertySchema(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertySchema) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39397,17 +41624,32 @@ func (ec *executionContext) _PropertySchema(ctx context.Context, sel ast.Selecti case "__typename": out.Values[i] = graphql.MarshalString("PropertySchema") case "id": - out.Values[i] = ec._PropertySchema_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchema_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "groups": - out.Values[i] = ec._PropertySchema_groups(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchema_groups(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "linkableFields": - out.Values[i] = ec._PropertySchema_linkableFields(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchema_linkableFields(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -39426,7 +41668,6 @@ var propertySchemaFieldImplementors = []string{"PropertySchemaField"} func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertySchemaField) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaFieldImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39434,48 +41675,119 @@ func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("PropertySchemaField") case "fieldId": - out.Values[i] = ec._PropertySchemaField_fieldId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_fieldId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "type": - out.Values[i] = ec._PropertySchemaField_type(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_type(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "title": - out.Values[i] = ec._PropertySchemaField_title(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_title(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - out.Values[i] = ec._PropertySchemaField_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "prefix": - out.Values[i] = ec._PropertySchemaField_prefix(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_prefix(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "suffix": - out.Values[i] = ec._PropertySchemaField_suffix(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_suffix(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "defaultValue": - out.Values[i] = ec._PropertySchemaField_defaultValue(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_defaultValue(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "ui": - out.Values[i] = ec._PropertySchemaField_ui(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_ui(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "min": - out.Values[i] = ec._PropertySchemaField_min(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_min(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "max": - out.Values[i] = ec._PropertySchemaField_max(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_max(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "choices": - out.Values[i] = ec._PropertySchemaField_choices(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_choices(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "isAvailableIf": - out.Values[i] = ec._PropertySchemaField_isAvailableIf(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_isAvailableIf(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "allTranslatedTitle": - out.Values[i] = ec._PropertySchemaField_allTranslatedTitle(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_allTranslatedTitle(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "allTranslatedDescription": - out.Values[i] = ec._PropertySchemaField_allTranslatedDescription(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaField_allTranslatedDescription(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "translatedTitle": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39486,10 +41798,16 @@ func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.Se atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "translatedDescription": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39500,6 +41818,11 @@ func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.Se atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -39516,7 +41839,6 @@ var propertySchemaFieldChoiceImplementors = []string{"PropertySchemaFieldChoice" func (ec *executionContext) _PropertySchemaFieldChoice(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertySchemaFieldChoice) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaFieldChoiceImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39524,22 +41846,43 @@ func (ec *executionContext) _PropertySchemaFieldChoice(ctx context.Context, sel case "__typename": out.Values[i] = graphql.MarshalString("PropertySchemaFieldChoice") case "key": - out.Values[i] = ec._PropertySchemaFieldChoice_key(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaFieldChoice_key(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "title": - out.Values[i] = ec._PropertySchemaFieldChoice_title(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaFieldChoice_title(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "icon": - out.Values[i] = ec._PropertySchemaFieldChoice_icon(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaFieldChoice_icon(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "allTranslatedTitle": - out.Values[i] = ec._PropertySchemaFieldChoice_allTranslatedTitle(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaFieldChoice_allTranslatedTitle(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "translatedTitle": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39550,6 +41893,11 @@ func (ec *executionContext) _PropertySchemaFieldChoice(ctx context.Context, sel atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -39566,7 +41914,6 @@ var propertySchemaGroupImplementors = []string{"PropertySchemaGroup"} func (ec *executionContext) _PropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PropertySchemaGroup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, propertySchemaGroupImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39574,38 +41921,84 @@ func (ec *executionContext) _PropertySchemaGroup(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("PropertySchemaGroup") case "schemaGroupId": - out.Values[i] = ec._PropertySchemaGroup_schemaGroupId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaGroup_schemaGroupId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - out.Values[i] = ec._PropertySchemaGroup_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaGroup_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fields": - out.Values[i] = ec._PropertySchemaGroup_fields(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaGroup_fields(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "isList": - out.Values[i] = ec._PropertySchemaGroup_isList(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaGroup_isList(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "isAvailableIf": - out.Values[i] = ec._PropertySchemaGroup_isAvailableIf(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaGroup_isAvailableIf(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "title": - out.Values[i] = ec._PropertySchemaGroup_title(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaGroup_title(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "allTranslatedTitle": - out.Values[i] = ec._PropertySchemaGroup_allTranslatedTitle(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaGroup_allTranslatedTitle(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "representativeFieldId": - out.Values[i] = ec._PropertySchemaGroup_representativeFieldId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaGroup_representativeFieldId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "representativeField": - out.Values[i] = ec._PropertySchemaGroup_representativeField(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._PropertySchemaGroup_representativeField(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "schema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39613,10 +42006,16 @@ func (ec *executionContext) _PropertySchemaGroup(ctx context.Context, sel ast.Se }() res = ec._PropertySchemaGroup_schema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "translatedTitle": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39627,6 +42026,11 @@ func (ec *executionContext) _PropertySchemaGroup(ctx context.Context, sel ast.Se atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -39643,7 +42047,6 @@ var queryImplementors = []string{"Query"} func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, queryImplementors) - ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{ Object: "Query", }) @@ -39651,12 +42054,18 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { + innerCtx := graphql.WithRootFieldContext(ctx, &graphql.RootFieldContext{ + Object: field.Name, + Field: field, + }) + switch field.Name { case "__typename": out.Values[i] = graphql.MarshalString("Query") case "me": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39664,10 +42073,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr }() res = ec._Query_me(ctx, field) return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "node": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39675,10 +42093,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr }() res = ec._Query_node(ctx, field) return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "nodes": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39689,10 +42116,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "propertySchema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39700,10 +42136,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr }() res = ec._Query_propertySchema(ctx, field) return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "propertySchemas": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39714,10 +42159,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "plugin": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39725,10 +42179,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr }() res = ec._Query_plugin(ctx, field) return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "plugins": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39739,10 +42202,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "layer": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39750,10 +42222,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr }() res = ec._Query_layer(ctx, field) return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "scene": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39761,10 +42242,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr }() res = ec._Query_scene(ctx, field) return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "assets": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39775,10 +42265,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "projects": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39789,10 +42288,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "datasetSchemas": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39803,10 +42311,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "datasets": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39817,21 +42334,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res - }) - case "sceneLock": - field := field - out.Concurrently(i, func() (res graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - } - }() - res = ec._Query_sceneLock(ctx, field) - return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "dynamicDatasetSchemas": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39842,10 +42357,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "searchUser": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39853,10 +42377,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr }() res = ec._Query_searchUser(ctx, field) return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "checkProjectAlias": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39867,10 +42400,19 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "installablePlugins": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -39881,11 +42423,29 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr atomic.AddUint32(&invalids, 1) } return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) + } + + out.Concurrently(i, func() graphql.Marshaler { + return rrm(innerCtx) }) case "__type": - out.Values[i] = ec._Query___type(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Query___type(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + case "__schema": - out.Values[i] = ec._Query___schema(ctx, field) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Query___schema(ctx, field) + } + + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -39901,7 +42461,6 @@ var rectImplementors = []string{"Rect"} func (ec *executionContext) _Rect(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Rect) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, rectImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39909,22 +42468,42 @@ func (ec *executionContext) _Rect(ctx context.Context, sel ast.SelectionSet, obj case "__typename": out.Values[i] = graphql.MarshalString("Rect") case "west": - out.Values[i] = ec._Rect_west(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Rect_west(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "south": - out.Values[i] = ec._Rect_south(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Rect_south(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "east": - out.Values[i] = ec._Rect_east(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Rect_east(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "north": - out.Values[i] = ec._Rect_north(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Rect_north(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -39943,7 +42522,6 @@ var removeAssetPayloadImplementors = []string{"RemoveAssetPayload"} func (ec *executionContext) _RemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveAssetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeAssetPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39951,7 +42529,12 @@ func (ec *executionContext) _RemoveAssetPayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("RemoveAssetPayload") case "assetId": - out.Values[i] = ec._RemoveAssetPayload_assetId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveAssetPayload_assetId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -39970,7 +42553,6 @@ var removeClusterPayloadImplementors = []string{"RemoveClusterPayload"} func (ec *executionContext) _RemoveClusterPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveClusterPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeClusterPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -39978,12 +42560,22 @@ func (ec *executionContext) _RemoveClusterPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("RemoveClusterPayload") case "scene": - out.Values[i] = ec._RemoveClusterPayload_scene(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveClusterPayload_scene(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "clusterId": - out.Values[i] = ec._RemoveClusterPayload_clusterId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveClusterPayload_clusterId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -40002,7 +42594,6 @@ var removeDatasetSchemaPayloadImplementors = []string{"RemoveDatasetSchemaPayloa func (ec *executionContext) _RemoveDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveDatasetSchemaPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeDatasetSchemaPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40010,7 +42601,12 @@ func (ec *executionContext) _RemoveDatasetSchemaPayload(ctx context.Context, sel case "__typename": out.Values[i] = graphql.MarshalString("RemoveDatasetSchemaPayload") case "schemaId": - out.Values[i] = ec._RemoveDatasetSchemaPayload_schemaId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveDatasetSchemaPayload_schemaId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -40029,7 +42625,6 @@ var removeInfoboxFieldPayloadImplementors = []string{"RemoveInfoboxFieldPayload" func (ec *executionContext) _RemoveInfoboxFieldPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveInfoboxFieldPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeInfoboxFieldPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40037,12 +42632,22 @@ func (ec *executionContext) _RemoveInfoboxFieldPayload(ctx context.Context, sel case "__typename": out.Values[i] = graphql.MarshalString("RemoveInfoboxFieldPayload") case "infoboxFieldId": - out.Values[i] = ec._RemoveInfoboxFieldPayload_infoboxFieldId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveInfoboxFieldPayload_infoboxFieldId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "layer": - out.Values[i] = ec._RemoveInfoboxFieldPayload_layer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveInfoboxFieldPayload_layer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -40061,7 +42666,6 @@ var removeInfoboxPayloadImplementors = []string{"RemoveInfoboxPayload"} func (ec *executionContext) _RemoveInfoboxPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveInfoboxPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeInfoboxPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40069,7 +42673,12 @@ func (ec *executionContext) _RemoveInfoboxPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("RemoveInfoboxPayload") case "layer": - out.Values[i] = ec._RemoveInfoboxPayload_layer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveInfoboxPayload_layer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -40088,7 +42697,6 @@ var removeLayerPayloadImplementors = []string{"RemoveLayerPayload"} func (ec *executionContext) _RemoveLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveLayerPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeLayerPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40096,12 +42704,22 @@ func (ec *executionContext) _RemoveLayerPayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("RemoveLayerPayload") case "layerId": - out.Values[i] = ec._RemoveLayerPayload_layerId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveLayerPayload_layerId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "parentLayer": - out.Values[i] = ec._RemoveLayerPayload_parentLayer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveLayerPayload_parentLayer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -40120,7 +42738,6 @@ var removeMemberFromTeamPayloadImplementors = []string{"RemoveMemberFromTeamPayl func (ec *executionContext) _RemoveMemberFromTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveMemberFromTeamPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeMemberFromTeamPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40128,7 +42745,12 @@ func (ec *executionContext) _RemoveMemberFromTeamPayload(ctx context.Context, se case "__typename": out.Values[i] = graphql.MarshalString("RemoveMemberFromTeamPayload") case "team": - out.Values[i] = ec._RemoveMemberFromTeamPayload_team(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveMemberFromTeamPayload_team(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -40147,7 +42769,6 @@ var removeTagPayloadImplementors = []string{"RemoveTagPayload"} func (ec *executionContext) _RemoveTagPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveTagPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeTagPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40155,12 +42776,22 @@ func (ec *executionContext) _RemoveTagPayload(ctx context.Context, sel ast.Selec case "__typename": out.Values[i] = graphql.MarshalString("RemoveTagPayload") case "tagId": - out.Values[i] = ec._RemoveTagPayload_tagId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveTagPayload_tagId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "updatedLayers": - out.Values[i] = ec._RemoveTagPayload_updatedLayers(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveTagPayload_updatedLayers(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -40179,7 +42810,6 @@ var removeWidgetPayloadImplementors = []string{"RemoveWidgetPayload"} func (ec *executionContext) _RemoveWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.RemoveWidgetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, removeWidgetPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40187,12 +42817,22 @@ func (ec *executionContext) _RemoveWidgetPayload(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("RemoveWidgetPayload") case "scene": - out.Values[i] = ec._RemoveWidgetPayload_scene(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveWidgetPayload_scene(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "widgetId": - out.Values[i] = ec._RemoveWidgetPayload_widgetId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._RemoveWidgetPayload_widgetId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -40211,7 +42851,6 @@ var sceneImplementors = []string{"Scene", "Node"} func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Scene) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, sceneImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40219,60 +42858,116 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob case "__typename": out.Values[i] = graphql.MarshalString("Scene") case "id": - out.Values[i] = ec._Scene_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "projectId": - out.Values[i] = ec._Scene_projectId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_projectId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "teamId": - out.Values[i] = ec._Scene_teamId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_teamId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - out.Values[i] = ec._Scene_propertyId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_propertyId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "createdAt": - out.Values[i] = ec._Scene_createdAt(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_createdAt(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "updatedAt": - out.Values[i] = ec._Scene_updatedAt(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_updatedAt(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "rootLayerId": - out.Values[i] = ec._Scene_rootLayerId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_rootLayerId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "widgets": - out.Values[i] = ec._Scene_widgets(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_widgets(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "plugins": - out.Values[i] = ec._Scene_plugins(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_plugins(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "widgetAlignSystem": - out.Values[i] = ec._Scene_widgetAlignSystem(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_widgetAlignSystem(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "dynamicDatasetSchemas": - out.Values[i] = ec._Scene_dynamicDatasetSchemas(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_dynamicDatasetSchemas(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "project": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40280,10 +42975,16 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob }() res = ec._Scene_project(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "team": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40291,10 +42992,16 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob }() res = ec._Scene_team(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "property": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40302,10 +43009,16 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob }() res = ec._Scene_property(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "rootLayer": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40313,24 +43026,16 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob }() res = ec._Scene_rootLayer(ctx, field, obj) return res - }) - case "lockMode": - field := field - out.Concurrently(i, func() (res graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - } - }() - res = ec._Scene_lockMode(ctx, field, obj) - if res == graphql.Null { - atomic.AddUint32(&invalids, 1) - } - return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "datasetSchemas": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40341,15 +43046,26 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "tagIds": - out.Values[i] = ec._Scene_tagIds(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_tagIds(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "tags": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40360,9 +43076,19 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "clusters": - out.Values[i] = ec._Scene_clusters(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Scene_clusters(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } @@ -40381,7 +43107,6 @@ var scenePluginImplementors = []string{"ScenePlugin"} func (ec *executionContext) _ScenePlugin(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.ScenePlugin) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, scenePluginImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40389,15 +43114,26 @@ func (ec *executionContext) _ScenePlugin(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("ScenePlugin") case "pluginId": - out.Values[i] = ec._ScenePlugin_pluginId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ScenePlugin_pluginId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - out.Values[i] = ec._ScenePlugin_propertyId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._ScenePlugin_propertyId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "plugin": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40405,10 +43141,16 @@ func (ec *executionContext) _ScenePlugin(ctx context.Context, sel ast.SelectionS }() res = ec._ScenePlugin_plugin(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "property": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40416,6 +43158,11 @@ func (ec *executionContext) _ScenePlugin(ctx context.Context, sel ast.SelectionS }() res = ec._ScenePlugin_property(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -40432,7 +43179,6 @@ var sceneWidgetImplementors = []string{"SceneWidget"} func (ec *executionContext) _SceneWidget(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.SceneWidget) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, sceneWidgetImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40440,38 +43186,69 @@ func (ec *executionContext) _SceneWidget(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("SceneWidget") case "id": - out.Values[i] = ec._SceneWidget_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SceneWidget_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "pluginId": - out.Values[i] = ec._SceneWidget_pluginId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SceneWidget_pluginId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "extensionId": - out.Values[i] = ec._SceneWidget_extensionId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SceneWidget_extensionId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - out.Values[i] = ec._SceneWidget_propertyId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SceneWidget_propertyId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "enabled": - out.Values[i] = ec._SceneWidget_enabled(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SceneWidget_enabled(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "extended": - out.Values[i] = ec._SceneWidget_extended(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SceneWidget_extended(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "plugin": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40479,10 +43256,16 @@ func (ec *executionContext) _SceneWidget(ctx context.Context, sel ast.SelectionS }() res = ec._SceneWidget_plugin(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "extension": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40490,10 +43273,16 @@ func (ec *executionContext) _SceneWidget(ctx context.Context, sel ast.SelectionS }() res = ec._SceneWidget_extension(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "property": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40501,44 +43290,12 @@ func (ec *executionContext) _SceneWidget(ctx context.Context, sel ast.SelectionS }() res = ec._SceneWidget_property(ctx, field, obj) return res - }) - default: - panic("unknown field " + strconv.Quote(field.Name)) - } - } - out.Dispatch() - if invalids > 0 { - return graphql.Null - } - return out -} - -var searchedUserImplementors = []string{"SearchedUser"} + } -func (ec *executionContext) _SearchedUser(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.SearchedUser) graphql.Marshaler { - fields := graphql.CollectFields(ec.OperationContext, sel, searchedUserImplementors) + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) - out := graphql.NewFieldSet(fields) - var invalids uint32 - for i, field := range fields { - switch field.Name { - case "__typename": - out.Values[i] = graphql.MarshalString("SearchedUser") - case "userId": - out.Values[i] = ec._SearchedUser_userId(ctx, field, obj) - if out.Values[i] == graphql.Null { - invalids++ - } - case "userName": - out.Values[i] = ec._SearchedUser_userName(ctx, field, obj) - if out.Values[i] == graphql.Null { - invalids++ - } - case "userEmail": - out.Values[i] = ec._SearchedUser_userEmail(ctx, field, obj) - if out.Values[i] == graphql.Null { - invalids++ - } + }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -40554,7 +43311,6 @@ var signupPayloadImplementors = []string{"SignupPayload"} func (ec *executionContext) _SignupPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.SignupPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, signupPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40562,12 +43318,22 @@ func (ec *executionContext) _SignupPayload(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("SignupPayload") case "user": - out.Values[i] = ec._SignupPayload_user(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SignupPayload_user(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "team": - out.Values[i] = ec._SignupPayload_team(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SignupPayload_team(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -40586,7 +43352,6 @@ var syncDatasetPayloadImplementors = []string{"SyncDatasetPayload"} func (ec *executionContext) _SyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.SyncDatasetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, syncDatasetPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40594,22 +43359,42 @@ func (ec *executionContext) _SyncDatasetPayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("SyncDatasetPayload") case "sceneId": - out.Values[i] = ec._SyncDatasetPayload_sceneId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SyncDatasetPayload_sceneId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "url": - out.Values[i] = ec._SyncDatasetPayload_url(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SyncDatasetPayload_url(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "datasetSchema": - out.Values[i] = ec._SyncDatasetPayload_datasetSchema(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SyncDatasetPayload_datasetSchema(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "dataset": - out.Values[i] = ec._SyncDatasetPayload_dataset(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._SyncDatasetPayload_dataset(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -40628,7 +43413,6 @@ var tagGroupImplementors = []string{"TagGroup", "Tag"} func (ec *executionContext) _TagGroup(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.TagGroup) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, tagGroupImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40636,25 +43420,46 @@ func (ec *executionContext) _TagGroup(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("TagGroup") case "id": - out.Values[i] = ec._TagGroup_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TagGroup_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - out.Values[i] = ec._TagGroup_sceneId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TagGroup_sceneId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "label": - out.Values[i] = ec._TagGroup_label(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TagGroup_label(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "tagIds": - out.Values[i] = ec._TagGroup_tagIds(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TagGroup_tagIds(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "tags": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40665,10 +43470,16 @@ func (ec *executionContext) _TagGroup(ctx context.Context, sel ast.SelectionSet, atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "scene": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40676,10 +43487,16 @@ func (ec *executionContext) _TagGroup(ctx context.Context, sel ast.SelectionSet, }() res = ec._TagGroup_scene(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "layers": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40690,6 +43507,11 @@ func (ec *executionContext) _TagGroup(ctx context.Context, sel ast.SelectionSet, atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -40706,7 +43528,6 @@ var tagItemImplementors = []string{"TagItem", "Tag"} func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.TagItem) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, tagItemImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40714,31 +43535,67 @@ func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("TagItem") case "id": - out.Values[i] = ec._TagItem_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TagItem_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - out.Values[i] = ec._TagItem_sceneId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TagItem_sceneId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "label": - out.Values[i] = ec._TagItem_label(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TagItem_label(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "parentId": - out.Values[i] = ec._TagItem_parentId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TagItem_parentId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "linkedDatasetID": - out.Values[i] = ec._TagItem_linkedDatasetID(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TagItem_linkedDatasetID(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "linkedDatasetSchemaID": - out.Values[i] = ec._TagItem_linkedDatasetSchemaID(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TagItem_linkedDatasetSchemaID(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "linkedDatasetFieldID": - out.Values[i] = ec._TagItem_linkedDatasetFieldID(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TagItem_linkedDatasetFieldID(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "linkedDatasetSchema": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40746,10 +43603,16 @@ func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, }() res = ec._TagItem_linkedDatasetSchema(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "linkedDataset": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40757,10 +43620,16 @@ func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, }() res = ec._TagItem_linkedDataset(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "linkedDatasetField": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40768,10 +43637,16 @@ func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, }() res = ec._TagItem_linkedDatasetField(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "parent": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40779,10 +43654,16 @@ func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, }() res = ec._TagItem_parent(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "layers": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40793,6 +43674,11 @@ func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -40809,7 +43695,6 @@ var teamImplementors = []string{"Team", "Node"} func (ec *executionContext) _Team(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Team) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, teamImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40817,28 +43702,49 @@ func (ec *executionContext) _Team(ctx context.Context, sel ast.SelectionSet, obj case "__typename": out.Values[i] = graphql.MarshalString("Team") case "id": - out.Values[i] = ec._Team_id(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Team_id(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - out.Values[i] = ec._Team_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Team_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "members": - out.Values[i] = ec._Team_members(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Team_members(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "personal": - out.Values[i] = ec._Team_personal(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Team_personal(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "assets": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40849,10 +43755,16 @@ func (ec *executionContext) _Team(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) case "projects": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40863,6 +43775,11 @@ func (ec *executionContext) _Team(ctx context.Context, sel ast.SelectionSet, obj atomic.AddUint32(&invalids, 1) } return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -40879,7 +43796,6 @@ var teamMemberImplementors = []string{"TeamMember"} func (ec *executionContext) _TeamMember(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.TeamMember) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, teamMemberImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40887,18 +43803,29 @@ func (ec *executionContext) _TeamMember(ctx context.Context, sel ast.SelectionSe case "__typename": out.Values[i] = graphql.MarshalString("TeamMember") case "userId": - out.Values[i] = ec._TeamMember_userId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TeamMember_userId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "role": - out.Values[i] = ec._TeamMember_role(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._TeamMember_role(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "user": field := field - out.Concurrently(i, func() (res graphql.Marshaler) { + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -40906,6 +43833,11 @@ func (ec *executionContext) _TeamMember(ctx context.Context, sel ast.SelectionSe }() res = ec._TeamMember_user(ctx, field, obj) return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -40922,7 +43854,6 @@ var typographyImplementors = []string{"Typography"} func (ec *executionContext) _Typography(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Typography) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, typographyImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40930,21 +43861,61 @@ func (ec *executionContext) _Typography(ctx context.Context, sel ast.SelectionSe case "__typename": out.Values[i] = graphql.MarshalString("Typography") case "fontFamily": - out.Values[i] = ec._Typography_fontFamily(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Typography_fontFamily(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "fontWeight": - out.Values[i] = ec._Typography_fontWeight(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Typography_fontWeight(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "fontSize": - out.Values[i] = ec._Typography_fontSize(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Typography_fontSize(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "color": - out.Values[i] = ec._Typography_color(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Typography_color(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "textAlign": - out.Values[i] = ec._Typography_textAlign(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Typography_textAlign(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "bold": - out.Values[i] = ec._Typography_bold(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Typography_bold(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "italic": - out.Values[i] = ec._Typography_italic(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Typography_italic(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "underline": - out.Values[i] = ec._Typography_underline(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._Typography_underline(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -40960,7 +43931,6 @@ var uninstallPluginPayloadImplementors = []string{"UninstallPluginPayload"} func (ec *executionContext) _UninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UninstallPluginPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, uninstallPluginPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -40968,12 +43938,22 @@ func (ec *executionContext) _UninstallPluginPayload(ctx context.Context, sel ast case "__typename": out.Values[i] = graphql.MarshalString("UninstallPluginPayload") case "pluginId": - out.Values[i] = ec._UninstallPluginPayload_pluginId(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UninstallPluginPayload_pluginId(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "scene": - out.Values[i] = ec._UninstallPluginPayload_scene(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UninstallPluginPayload_scene(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -40992,7 +43972,6 @@ var updateClusterPayloadImplementors = []string{"UpdateClusterPayload"} func (ec *executionContext) _UpdateClusterPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateClusterPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateClusterPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41000,12 +43979,22 @@ func (ec *executionContext) _UpdateClusterPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("UpdateClusterPayload") case "scene": - out.Values[i] = ec._UpdateClusterPayload_scene(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpdateClusterPayload_scene(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "cluster": - out.Values[i] = ec._UpdateClusterPayload_cluster(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpdateClusterPayload_cluster(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41024,7 +44013,6 @@ var updateDatasetSchemaPayloadImplementors = []string{"UpdateDatasetSchemaPayloa func (ec *executionContext) _UpdateDatasetSchemaPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateDatasetSchemaPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateDatasetSchemaPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41032,7 +44020,12 @@ func (ec *executionContext) _UpdateDatasetSchemaPayload(ctx context.Context, sel case "__typename": out.Values[i] = graphql.MarshalString("UpdateDatasetSchemaPayload") case "datasetSchema": - out.Values[i] = ec._UpdateDatasetSchemaPayload_datasetSchema(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpdateDatasetSchemaPayload_datasetSchema(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -41048,7 +44041,6 @@ var updateLayerPayloadImplementors = []string{"UpdateLayerPayload"} func (ec *executionContext) _UpdateLayerPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateLayerPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateLayerPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41056,7 +44048,12 @@ func (ec *executionContext) _UpdateLayerPayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("UpdateLayerPayload") case "layer": - out.Values[i] = ec._UpdateLayerPayload_layer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpdateLayerPayload_layer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41075,15 +44072,19 @@ var updateMePayloadImplementors = []string{"UpdateMePayload"} func (ec *executionContext) _UpdateMePayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateMePayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateMePayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { switch field.Name { case "__typename": out.Values[i] = graphql.MarshalString("UpdateMePayload") - case "user": - out.Values[i] = ec._UpdateMePayload_user(ctx, field, obj) + case "me": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpdateMePayload_me(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41102,7 +44103,6 @@ var updateMemberOfTeamPayloadImplementors = []string{"UpdateMemberOfTeamPayload" func (ec *executionContext) _UpdateMemberOfTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateMemberOfTeamPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateMemberOfTeamPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41110,7 +44110,12 @@ func (ec *executionContext) _UpdateMemberOfTeamPayload(ctx context.Context, sel case "__typename": out.Values[i] = graphql.MarshalString("UpdateMemberOfTeamPayload") case "team": - out.Values[i] = ec._UpdateMemberOfTeamPayload_team(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpdateMemberOfTeamPayload_team(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41129,7 +44134,6 @@ var updateTagPayloadImplementors = []string{"UpdateTagPayload"} func (ec *executionContext) _UpdateTagPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateTagPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateTagPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41137,7 +44141,12 @@ func (ec *executionContext) _UpdateTagPayload(ctx context.Context, sel ast.Selec case "__typename": out.Values[i] = graphql.MarshalString("UpdateTagPayload") case "tag": - out.Values[i] = ec._UpdateTagPayload_tag(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpdateTagPayload_tag(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41156,7 +44165,6 @@ var updateTeamPayloadImplementors = []string{"UpdateTeamPayload"} func (ec *executionContext) _UpdateTeamPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateTeamPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateTeamPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41164,7 +44172,12 @@ func (ec *executionContext) _UpdateTeamPayload(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("UpdateTeamPayload") case "team": - out.Values[i] = ec._UpdateTeamPayload_team(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpdateTeamPayload_team(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41183,7 +44196,6 @@ var updateWidgetAlignSystemPayloadImplementors = []string{"UpdateWidgetAlignSyst func (ec *executionContext) _UpdateWidgetAlignSystemPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateWidgetAlignSystemPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateWidgetAlignSystemPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41191,7 +44203,12 @@ func (ec *executionContext) _UpdateWidgetAlignSystemPayload(ctx context.Context, case "__typename": out.Values[i] = graphql.MarshalString("UpdateWidgetAlignSystemPayload") case "scene": - out.Values[i] = ec._UpdateWidgetAlignSystemPayload_scene(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpdateWidgetAlignSystemPayload_scene(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41210,7 +44227,6 @@ var updateWidgetPayloadImplementors = []string{"UpdateWidgetPayload"} func (ec *executionContext) _UpdateWidgetPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpdateWidgetPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, updateWidgetPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41218,12 +44234,22 @@ func (ec *executionContext) _UpdateWidgetPayload(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("UpdateWidgetPayload") case "scene": - out.Values[i] = ec._UpdateWidgetPayload_scene(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpdateWidgetPayload_scene(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "sceneWidget": - out.Values[i] = ec._UpdateWidgetPayload_sceneWidget(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpdateWidgetPayload_sceneWidget(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41242,7 +44268,6 @@ var upgradePluginPayloadImplementors = []string{"UpgradePluginPayload"} func (ec *executionContext) _UpgradePluginPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UpgradePluginPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, upgradePluginPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41250,12 +44275,22 @@ func (ec *executionContext) _UpgradePluginPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("UpgradePluginPayload") case "scene": - out.Values[i] = ec._UpgradePluginPayload_scene(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpgradePluginPayload_scene(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "scenePlugin": - out.Values[i] = ec._UpgradePluginPayload_scenePlugin(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UpgradePluginPayload_scenePlugin(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41274,7 +44309,6 @@ var uploadPluginPayloadImplementors = []string{"UploadPluginPayload"} func (ec *executionContext) _UploadPluginPayload(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.UploadPluginPayload) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, uploadPluginPayloadImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41282,17 +44316,32 @@ func (ec *executionContext) _UploadPluginPayload(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("UploadPluginPayload") case "plugin": - out.Values[i] = ec._UploadPluginPayload_plugin(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UploadPluginPayload_plugin(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "scene": - out.Values[i] = ec._UploadPluginPayload_scene(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UploadPluginPayload_scene(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "scenePlugin": - out.Values[i] = ec._UploadPluginPayload_scenePlugin(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._UploadPluginPayload_scenePlugin(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41311,7 +44360,6 @@ var userImplementors = []string{"User", "Node"} func (ec *executionContext) _User(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.User) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, userImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41319,68 +44367,35 @@ func (ec *executionContext) _User(ctx context.Context, sel ast.SelectionSet, obj case "__typename": out.Values[i] = graphql.MarshalString("User") case "id": - out.Values[i] = ec._User_id(ctx, field, obj) - if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) - } - case "name": - out.Values[i] = ec._User_name(ctx, field, obj) - if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._User_id(ctx, field, obj) } - case "email": - out.Values[i] = ec._User_email(ctx, field, obj) + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) + invalids++ } - case "lang": - out.Values[i] = ec._User_lang(ctx, field, obj) - if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) + case "name": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._User_name(ctx, field, obj) } - case "theme": - out.Values[i] = ec._User_theme(ctx, field, obj) + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) + invalids++ } - case "myTeamId": - out.Values[i] = ec._User_myTeamId(ctx, field, obj) - if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) + case "email": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._User_email(ctx, field, obj) } - case "auths": - out.Values[i] = ec._User_auths(ctx, field, obj) + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { - atomic.AddUint32(&invalids, 1) + invalids++ } - case "teams": - field := field - out.Concurrently(i, func() (res graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - } - }() - res = ec._User_teams(ctx, field, obj) - if res == graphql.Null { - atomic.AddUint32(&invalids, 1) - } - return res - }) - case "myTeam": - field := field - out.Concurrently(i, func() (res graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - } - }() - res = ec._User_myTeam(ctx, field, obj) - if res == graphql.Null { - atomic.AddUint32(&invalids, 1) - } - return res - }) default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -41396,7 +44411,6 @@ var widgetAlignSystemImplementors = []string{"WidgetAlignSystem"} func (ec *executionContext) _WidgetAlignSystem(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetAlignSystem) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, widgetAlignSystemImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41404,9 +44418,19 @@ func (ec *executionContext) _WidgetAlignSystem(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("WidgetAlignSystem") case "inner": - out.Values[i] = ec._WidgetAlignSystem_inner(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetAlignSystem_inner(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "outer": - out.Values[i] = ec._WidgetAlignSystem_outer(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetAlignSystem_outer(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -41422,7 +44446,6 @@ var widgetAreaImplementors = []string{"WidgetArea"} func (ec *executionContext) _WidgetArea(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetArea) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, widgetAreaImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41430,12 +44453,22 @@ func (ec *executionContext) _WidgetArea(ctx context.Context, sel ast.SelectionSe case "__typename": out.Values[i] = graphql.MarshalString("WidgetArea") case "widgetIds": - out.Values[i] = ec._WidgetArea_widgetIds(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetArea_widgetIds(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "align": - out.Values[i] = ec._WidgetArea_align(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetArea_align(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41454,7 +44487,6 @@ var widgetExtendableImplementors = []string{"WidgetExtendable"} func (ec *executionContext) _WidgetExtendable(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetExtendable) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, widgetExtendableImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41462,12 +44494,22 @@ func (ec *executionContext) _WidgetExtendable(ctx context.Context, sel ast.Selec case "__typename": out.Values[i] = graphql.MarshalString("WidgetExtendable") case "vertically": - out.Values[i] = ec._WidgetExtendable_vertically(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetExtendable_vertically(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "horizontally": - out.Values[i] = ec._WidgetExtendable_horizontally(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetExtendable_horizontally(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41486,7 +44528,6 @@ var widgetLayoutImplementors = []string{"WidgetLayout"} func (ec *executionContext) _WidgetLayout(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetLayout) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, widgetLayoutImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41494,22 +44535,42 @@ func (ec *executionContext) _WidgetLayout(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("WidgetLayout") case "extendable": - out.Values[i] = ec._WidgetLayout_extendable(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetLayout_extendable(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "extended": - out.Values[i] = ec._WidgetLayout_extended(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetLayout_extended(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "floating": - out.Values[i] = ec._WidgetLayout_floating(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetLayout_floating(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "defaultLocation": - out.Values[i] = ec._WidgetLayout_defaultLocation(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetLayout_defaultLocation(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -41525,7 +44586,6 @@ var widgetLocationImplementors = []string{"WidgetLocation"} func (ec *executionContext) _WidgetLocation(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetLocation) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, widgetLocationImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41533,17 +44593,32 @@ func (ec *executionContext) _WidgetLocation(ctx context.Context, sel ast.Selecti case "__typename": out.Values[i] = graphql.MarshalString("WidgetLocation") case "zone": - out.Values[i] = ec._WidgetLocation_zone(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetLocation_zone(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "section": - out.Values[i] = ec._WidgetLocation_section(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetLocation_section(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "area": - out.Values[i] = ec._WidgetLocation_area(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetLocation_area(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41562,7 +44637,6 @@ var widgetSectionImplementors = []string{"WidgetSection"} func (ec *executionContext) _WidgetSection(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetSection) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, widgetSectionImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41570,11 +44644,26 @@ func (ec *executionContext) _WidgetSection(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("WidgetSection") case "top": - out.Values[i] = ec._WidgetSection_top(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetSection_top(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "middle": - out.Values[i] = ec._WidgetSection_middle(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetSection_middle(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "bottom": - out.Values[i] = ec._WidgetSection_bottom(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetSection_bottom(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -41590,7 +44679,6 @@ var widgetZoneImplementors = []string{"WidgetZone"} func (ec *executionContext) _WidgetZone(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.WidgetZone) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, widgetZoneImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41598,11 +44686,26 @@ func (ec *executionContext) _WidgetZone(ctx context.Context, sel ast.SelectionSe case "__typename": out.Values[i] = graphql.MarshalString("WidgetZone") case "left": - out.Values[i] = ec._WidgetZone_left(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetZone_left(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "center": - out.Values[i] = ec._WidgetZone_center(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetZone_center(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "right": - out.Values[i] = ec._WidgetZone_right(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec._WidgetZone_right(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -41618,7 +44721,6 @@ var __DirectiveImplementors = []string{"__Directive"} func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionSet, obj *introspection.Directive) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, __DirectiveImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41626,24 +44728,49 @@ func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("__Directive") case "name": - out.Values[i] = ec.___Directive_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Directive_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "description": - out.Values[i] = ec.___Directive_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Directive_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "locations": - out.Values[i] = ec.___Directive_locations(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Directive_locations(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "args": - out.Values[i] = ec.___Directive_args(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Directive_args(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "isRepeatable": - out.Values[i] = ec.___Directive_isRepeatable(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Directive_isRepeatable(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41662,7 +44789,6 @@ var __EnumValueImplementors = []string{"__EnumValue"} func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.EnumValue) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, __EnumValueImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41670,19 +44796,39 @@ func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("__EnumValue") case "name": - out.Values[i] = ec.___EnumValue_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___EnumValue_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "description": - out.Values[i] = ec.___EnumValue_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___EnumValue_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "isDeprecated": - out.Values[i] = ec.___EnumValue_isDeprecated(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___EnumValue_isDeprecated(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "deprecationReason": - out.Values[i] = ec.___EnumValue_deprecationReason(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___EnumValue_deprecationReason(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -41698,7 +44844,6 @@ var __FieldImplementors = []string{"__Field"} func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, obj *introspection.Field) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, __FieldImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41706,29 +44851,59 @@ func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("__Field") case "name": - out.Values[i] = ec.___Field_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Field_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "description": - out.Values[i] = ec.___Field_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Field_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "args": - out.Values[i] = ec.___Field_args(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Field_args(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "type": - out.Values[i] = ec.___Field_type(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Field_type(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "isDeprecated": - out.Values[i] = ec.___Field_isDeprecated(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Field_isDeprecated(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "deprecationReason": - out.Values[i] = ec.___Field_deprecationReason(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Field_deprecationReason(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -41744,7 +44919,6 @@ var __InputValueImplementors = []string{"__InputValue"} func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.InputValue) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, __InputValueImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41752,19 +44926,39 @@ func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("__InputValue") case "name": - out.Values[i] = ec.___InputValue_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___InputValue_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "description": - out.Values[i] = ec.___InputValue_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___InputValue_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "type": - out.Values[i] = ec.___InputValue_type(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___InputValue_type(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "defaultValue": - out.Values[i] = ec.___InputValue_defaultValue(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___InputValue_defaultValue(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -41780,29 +44974,60 @@ var __SchemaImplementors = []string{"__Schema"} func (ec *executionContext) ___Schema(ctx context.Context, sel ast.SelectionSet, obj *introspection.Schema) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, __SchemaImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { switch field.Name { case "__typename": out.Values[i] = graphql.MarshalString("__Schema") + case "description": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Schema_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "types": - out.Values[i] = ec.___Schema_types(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Schema_types(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "queryType": - out.Values[i] = ec.___Schema_queryType(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Schema_queryType(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "mutationType": - out.Values[i] = ec.___Schema_mutationType(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Schema_mutationType(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "subscriptionType": - out.Values[i] = ec.___Schema_subscriptionType(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Schema_subscriptionType(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "directives": - out.Values[i] = ec.___Schema_directives(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Schema_directives(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } @@ -41821,7 +45046,6 @@ var __TypeImplementors = []string{"__Type"} func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, obj *introspection.Type) graphql.Marshaler { fields := graphql.CollectFields(ec.OperationContext, sel, __TypeImplementors) - out := graphql.NewFieldSet(fields) var invalids uint32 for i, field := range fields { @@ -41829,26 +45053,78 @@ func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, o case "__typename": out.Values[i] = graphql.MarshalString("__Type") case "kind": - out.Values[i] = ec.___Type_kind(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_kind(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + if out.Values[i] == graphql.Null { invalids++ } case "name": - out.Values[i] = ec.___Type_name(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_name(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "description": - out.Values[i] = ec.___Type_description(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_description(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "fields": - out.Values[i] = ec.___Type_fields(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_fields(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "interfaces": - out.Values[i] = ec.___Type_interfaces(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_interfaces(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "possibleTypes": - out.Values[i] = ec.___Type_possibleTypes(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_possibleTypes(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "enumValues": - out.Values[i] = ec.___Type_enumValues(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_enumValues(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "inputFields": - out.Values[i] = ec.___Type_inputFields(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_inputFields(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + case "ofType": - out.Values[i] = ec.___Type_ofType(ctx, field, obj) + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_ofType(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + + case "specifiedByURL": + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + return ec.___Type_specifiedByURL(ctx, field, obj) + } + + out.Values[i] = innerFunc(ctx) + default: panic("unknown field " + strconv.Quote(field.Name)) } @@ -42643,27 +45919,28 @@ func (ec *executionContext) marshalNFileSize2int64(ctx context.Context, sel ast. } func (ec *executionContext) unmarshalNFloat2float64(ctx context.Context, v interface{}) (float64, error) { - res, err := graphql.UnmarshalFloat(v) + res, err := graphql.UnmarshalFloatContext(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } func (ec *executionContext) marshalNFloat2float64(ctx context.Context, sel ast.SelectionSet, v float64) graphql.Marshaler { - res := graphql.MarshalFloat(v) + res := graphql.MarshalFloatContext(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } } - return res + return graphql.WrapContextMarshaler(ctx, res) } -func (ec *executionContext) unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, v interface{}) (id.ID, error) { - res, err := gqlmodel.UnmarshalID(v) +func (ec *executionContext) unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx context.Context, v interface{}) (gqlmodel.ID, error) { + tmp, err := graphql.UnmarshalString(v) + res := gqlmodel.ID(tmp) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, sel ast.SelectionSet, v id.ID) graphql.Marshaler { - res := gqlmodel.MarshalID(v) +func (ec *executionContext) marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ID) graphql.Marshaler { + res := graphql.MarshalString(string(v)) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") @@ -42672,20 +45949,16 @@ func (ec *executionContext) marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backen return res } -func (ec *executionContext) unmarshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx context.Context, v interface{}) ([]*id.ID, error) { +func (ec *executionContext) unmarshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx context.Context, v interface{}) ([]gqlmodel.ID, error) { var vSlice []interface{} if v != nil { - if tmp1, ok := v.([]interface{}); ok { - vSlice = tmp1 - } else { - vSlice = []interface{}{v} - } + vSlice = graphql.CoerceList(v) } var err error - res := make([]*id.ID, len(vSlice)) + res := make([]gqlmodel.ID, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, vSlice[i]) + res[i], err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, vSlice[i]) if err != nil { return nil, err } @@ -42693,10 +45966,10 @@ func (ec *executionContext) unmarshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearth return res, nil } -func (ec *executionContext) marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx context.Context, sel ast.SelectionSet, v []*id.ID) graphql.Marshaler { +func (ec *executionContext) marshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.ID) graphql.Marshaler { ret := make(graphql.Array, len(v)) for i := range v { - ret[i] = ec.marshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, sel, v[i]) + ret[i] = ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, sel, v[i]) } for _, e := range ret { @@ -42708,27 +45981,6 @@ func (ec *executionContext) marshalNID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘ return ret } -func (ec *executionContext) unmarshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, v interface{}) (*id.ID, error) { - res, err := gqlmodel.UnmarshalID(v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, sel ast.SelectionSet, v *id.ID) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := gqlmodel.MarshalID(*v) - if res == graphql.Null { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - } - return res -} - func (ec *executionContext) unmarshalNImportDatasetFromGoogleSheetInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetFromGoogleSheetInput(ctx context.Context, v interface{}) (gqlmodel.ImportDatasetFromGoogleSheetInput, error) { res, err := ec.unmarshalInputImportDatasetFromGoogleSheetInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) @@ -43092,184 +46344,17 @@ func (ec *executionContext) marshalNListOperation2githubแš—comแš‹reearthแš‹reear return v } -func (ec *executionContext) marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.MergedInfoboxField) graphql.Marshaler { - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret -} - -func (ec *executionContext) marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedInfoboxField) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._MergedInfoboxField(ctx, sel, v) -} - -func (ec *executionContext) marshalNMergedPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.MergedPropertyField) graphql.Marshaler { - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyField(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret -} - -func (ec *executionContext) marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedPropertyField) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._MergedPropertyField(ctx, sel, v) -} - -func (ec *executionContext) marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.MergedPropertyGroup) graphql.Marshaler { - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroup(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret -} - -func (ec *executionContext) marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedPropertyGroup) graphql.Marshaler { +func (ec *executionContext) marshalNMe2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMe(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Me) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._MergedPropertyGroup(ctx, sel, v) -} - -func (ec *executionContext) unmarshalNMoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveInfoboxFieldInput(ctx context.Context, v interface{}) (gqlmodel.MoveInfoboxFieldInput, error) { - res, err := ec.unmarshalInputMoveInfoboxFieldInput(ctx, v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) unmarshalNMoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveLayerInput(ctx context.Context, v interface{}) (gqlmodel.MoveLayerInput, error) { - res, err := ec.unmarshalInputMoveLayerInput(ctx, v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) unmarshalNMovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMovePropertyItemInput(ctx context.Context, v interface{}) (gqlmodel.MovePropertyItemInput, error) { - res, err := ec.unmarshalInputMovePropertyItemInput(ctx, v) - return res, graphql.ErrorOnPath(ctx, err) + return ec._Me(ctx, sel, v) } -func (ec *executionContext) marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.Node) graphql.Marshaler { +func (ec *executionContext) marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.MergedInfoboxField) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -43293,7 +46378,7 @@ func (ec *executionContext) marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘b if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx, sel, v[i]) + ret[i] = ec.marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx, sel, v[i]) } if isLen1 { f(i) @@ -43304,30 +46389,26 @@ func (ec *executionContext) marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘b } wg.Wait() - return ret -} - -func (ec *executionContext) unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNodeType(ctx context.Context, v interface{}) (gqlmodel.NodeType, error) { - var res gqlmodel.NodeType - err := res.UnmarshalGQL(v) - return res, graphql.ErrorOnPath(ctx, err) -} + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } -func (ec *executionContext) marshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNodeType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.NodeType) graphql.Marshaler { - return v + return ret } -func (ec *executionContext) marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PageInfo) graphql.Marshaler { +func (ec *executionContext) marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedInfoboxField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._PageInfo(ctx, sel, v) + return ec._MergedInfoboxField(ctx, sel, v) } -func (ec *executionContext) marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Plugin) graphql.Marshaler { +func (ec *executionContext) marshalNMergedPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.MergedPropertyField) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -43351,7 +46432,7 @@ func (ec *executionContext) marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reeart if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, sel, v[i]) + ret[i] = ec.marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyField(ctx, sel, v[i]) } if isLen1 { f(i) @@ -43371,17 +46452,17 @@ func (ec *executionContext) marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reeart return ret } -func (ec *executionContext) marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Plugin) graphql.Marshaler { +func (ec *executionContext) marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedPropertyField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._Plugin(ctx, sel, v) + return ec._MergedPropertyField(ctx, sel, v) } -func (ec *executionContext) marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PluginExtension) graphql.Marshaler { +func (ec *executionContext) marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.MergedPropertyGroup) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -43405,7 +46486,7 @@ func (ec *executionContext) marshalNPluginExtension2แš•แš–githubแš—comแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, sel, v[i]) + ret[i] = ec.marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroup(ctx, sel, v[i]) } if isLen1 { f(i) @@ -43425,82 +46506,123 @@ func (ec *executionContext) marshalNPluginExtension2แš•แš–githubแš—comแš‹reearth return ret } -func (ec *executionContext) marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PluginExtension) graphql.Marshaler { +func (ec *executionContext) marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedPropertyGroup) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._PluginExtension(ctx, sel, v) + return ec._MergedPropertyGroup(ctx, sel, v) } -func (ec *executionContext) unmarshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx context.Context, v interface{}) (id.PluginExtensionID, error) { - res, err := gqlmodel.UnmarshalPluginExtensionID(v) +func (ec *executionContext) unmarshalNMoveInfoboxFieldInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveInfoboxFieldInput(ctx context.Context, v interface{}) (gqlmodel.MoveInfoboxFieldInput, error) { + res, err := ec.unmarshalInputMoveInfoboxFieldInput(ctx, v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNPluginExtensionID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx context.Context, sel ast.SelectionSet, v id.PluginExtensionID) graphql.Marshaler { - res := gqlmodel.MarshalPluginExtensionID(v) - if res == graphql.Null { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") +func (ec *executionContext) unmarshalNMoveLayerInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveLayerInput(ctx context.Context, v interface{}) (gqlmodel.MoveLayerInput, error) { + res, err := ec.unmarshalInputMoveLayerInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) unmarshalNMovePropertyItemInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMovePropertyItemInput(ctx context.Context, v interface{}) (gqlmodel.MovePropertyItemInput, error) { + res, err := ec.unmarshalInputMovePropertyItemInput(ctx, v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.Node) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) } + } - return res + wg.Wait() + + return ret } -func (ec *executionContext) unmarshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionType(ctx context.Context, v interface{}) (gqlmodel.PluginExtensionType, error) { - var res gqlmodel.PluginExtensionType +func (ec *executionContext) unmarshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNodeType(ctx context.Context, v interface{}) (gqlmodel.NodeType, error) { + var res gqlmodel.NodeType err := res.UnmarshalGQL(v) return res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PluginExtensionType) graphql.Marshaler { +func (ec *executionContext) marshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNodeType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.NodeType) graphql.Marshaler { return v } -func (ec *executionContext) unmarshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, v interface{}) (id.PluginID, error) { - res, err := gqlmodel.UnmarshalPluginID(v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalNPluginID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, sel ast.SelectionSet, v id.PluginID) graphql.Marshaler { - res := gqlmodel.MarshalPluginID(v) - if res == graphql.Null { +func (ec *executionContext) marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PageInfo) graphql.Marshaler { + if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } + return graphql.Null } - return res + return ec._PageInfo(ctx, sel, v) } -func (ec *executionContext) unmarshalNPluginID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginIDแš„(ctx context.Context, v interface{}) ([]*id.PluginID, error) { - var vSlice []interface{} - if v != nil { - if tmp1, ok := v.([]interface{}); ok { - vSlice = tmp1 - } else { - vSlice = []interface{}{v} - } +func (ec *executionContext) marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Plugin) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) } - var err error - res := make([]*id.PluginID, len(vSlice)) - for i := range vSlice { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, vSlice[i]) - if err != nil { - return nil, err + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) } - } - return res, nil -} -func (ec *executionContext) marshalNPluginID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginIDแš„(ctx context.Context, sel ast.SelectionSet, v []*id.PluginID) graphql.Marshaler { - ret := make(graphql.Array, len(v)) - for i := range v { - ret[i] = ec.marshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx, sel, v[i]) } + wg.Wait() for _, e := range ret { if e == graphql.Null { @@ -43511,28 +46633,17 @@ func (ec *executionContext) marshalNPluginID2แš•แš–githubแš—comแš‹reearthแš‹reea return ret } -func (ec *executionContext) unmarshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, v interface{}) (*id.PluginID, error) { - res, err := gqlmodel.UnmarshalPluginID(v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalNPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, sel ast.SelectionSet, v *id.PluginID) graphql.Marshaler { +func (ec *executionContext) marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Plugin) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - res := gqlmodel.MarshalPluginID(*v) - if res == graphql.Null { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - } - return res + return ec._Plugin(ctx, sel, v) } -func (ec *executionContext) marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadataแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PluginMetadata) graphql.Marshaler { +func (ec *executionContext) marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PluginExtension) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -43556,7 +46667,7 @@ func (ec *executionContext) marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadata(ctx, sel, v[i]) + ret[i] = ec.marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, sel, v[i]) } if isLen1 { f(i) @@ -43576,17 +46687,27 @@ func (ec *executionContext) marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearth return ret } -func (ec *executionContext) marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadata(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PluginMetadata) graphql.Marshaler { +func (ec *executionContext) marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PluginExtension) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._PluginMetadata(ctx, sel, v) + return ec._PluginExtension(ctx, sel, v) } -func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Project) graphql.Marshaler { +func (ec *executionContext) unmarshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionType(ctx context.Context, v interface{}) (gqlmodel.PluginExtensionType, error) { + var res gqlmodel.PluginExtensionType + err := res.UnmarshalGQL(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionType(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PluginExtensionType) graphql.Marshaler { + return v +} + +func (ec *executionContext) marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadataแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PluginMetadata) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -43610,7 +46731,7 @@ func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reear if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, sel, v[i]) + ret[i] = ec.marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadata(ctx, sel, v[i]) } if isLen1 { f(i) @@ -43621,48 +46742,26 @@ func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reear } wg.Wait() - return ret -} - -func (ec *executionContext) marshalNProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Project) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._Project(ctx, sel, v) -} - -func (ec *executionContext) marshalNProjectAliasAvailability2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectAliasAvailability(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ProjectAliasAvailability) graphql.Marshaler { - return ec._ProjectAliasAvailability(ctx, sel, &v) -} - -func (ec *executionContext) marshalNProjectAliasAvailability2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectAliasAvailability(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectAliasAvailability) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + for _, e := range ret { + if e == graphql.Null { + return graphql.Null } - return graphql.Null } - return ec._ProjectAliasAvailability(ctx, sel, v) -} -func (ec *executionContext) marshalNProjectConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ProjectConnection) graphql.Marshaler { - return ec._ProjectConnection(ctx, sel, &v) + return ret } -func (ec *executionContext) marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectConnection) graphql.Marshaler { +func (ec *executionContext) marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadata(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PluginMetadata) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._ProjectConnection(ctx, sel, v) + return ec._PluginMetadata(ctx, sel, v) } -func (ec *executionContext) marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.ProjectEdge) graphql.Marshaler { +func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Project) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -43686,7 +46785,7 @@ func (ec *executionContext) marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹r if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdge(ctx, sel, v[i]) + ret[i] = ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, sel, v[i]) } if isLen1 { f(i) @@ -43697,36 +46796,48 @@ func (ec *executionContext) marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹r } wg.Wait() - for _, e := range ret { - if e == graphql.Null { - return graphql.Null + return ret +} + +func (ec *executionContext) marshalNProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Project) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") } + return graphql.Null } + return ec._Project(ctx, sel, v) +} - return ret +func (ec *executionContext) marshalNProjectAliasAvailability2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectAliasAvailability(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ProjectAliasAvailability) graphql.Marshaler { + return ec._ProjectAliasAvailability(ctx, sel, &v) } -func (ec *executionContext) marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectEdge) graphql.Marshaler { +func (ec *executionContext) marshalNProjectAliasAvailability2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectAliasAvailability(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectAliasAvailability) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._ProjectEdge(ctx, sel, v) + return ec._ProjectAliasAvailability(ctx, sel, v) } -func (ec *executionContext) marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Property) graphql.Marshaler { +func (ec *executionContext) marshalNProjectConnection2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v gqlmodel.ProjectConnection) graphql.Marshaler { + return ec._ProjectConnection(ctx, sel, &v) +} + +func (ec *executionContext) marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectConnection) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._Property(ctx, sel, v) + return ec._ProjectConnection(ctx, sel, v) } -func (ec *executionContext) marshalNPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertyField) graphql.Marshaler { +func (ec *executionContext) marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdgeแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.ProjectEdge) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -43750,7 +46861,7 @@ func (ec *executionContext) marshalNPropertyField2แš•แš–githubแš—comแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx, sel, v[i]) + ret[i] = ec.marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdge(ctx, sel, v[i]) } if isLen1 { f(i) @@ -43770,27 +46881,27 @@ func (ec *executionContext) marshalNPropertyField2แš•แš–githubแš—comแš‹reearth return ret } -func (ec *executionContext) marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyField) graphql.Marshaler { +func (ec *executionContext) marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectEdge) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._PropertyField(ctx, sel, v) + return ec._ProjectEdge(ctx, sel, v) } -func (ec *executionContext) marshalNPropertyFieldLink2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLink(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyFieldLink) graphql.Marshaler { +func (ec *executionContext) marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Property) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._PropertyFieldLink(ctx, sel, v) + return ec._Property(ctx, sel, v) } -func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertyGroup) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertyField) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -43814,7 +46925,7 @@ func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx, sel, v[i]) } if isLen1 { f(i) @@ -43834,27 +46945,27 @@ func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearth return ret } -func (ec *executionContext) marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyGroup) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._PropertyGroup(ctx, sel, v) + return ec._PropertyField(ctx, sel, v) } -func (ec *executionContext) marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PropertyItem) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyFieldLink2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLink(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyFieldLink) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._PropertyItem(ctx, sel, v) + return ec._PropertyFieldLink(ctx, sel, v) } -func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.PropertyItem) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertyGroup) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -43878,7 +46989,7 @@ func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹ree if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx, sel, v[i]) } if isLen1 { f(i) @@ -43898,17 +47009,27 @@ func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹ree return ret } -func (ec *executionContext) marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyLinkableFields) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyGroup) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._PropertyLinkableFields(ctx, sel, v) + return ec._PropertyGroup(ctx, sel, v) } -func (ec *executionContext) marshalNPropertySchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchema) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PropertyItem) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + return ec._PropertyItem(ctx, sel, v) +} + +func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.PropertyItem) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -43932,7 +47053,7 @@ func (ec *executionContext) marshalNPropertySchema2แš•แš–githubแš—comแš‹reearth if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx, sel, v[i]) } if isLen1 { f(i) @@ -43952,17 +47073,17 @@ func (ec *executionContext) marshalNPropertySchema2แš•แš–githubแš—comแš‹reearth return ret } -func (ec *executionContext) marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchema) graphql.Marshaler { +func (ec *executionContext) marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyLinkableFields) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._PropertySchema(ctx, sel, v) + return ec._PropertyLinkableFields(ctx, sel, v) } -func (ec *executionContext) marshalNPropertySchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchemaField) graphql.Marshaler { +func (ec *executionContext) marshalNPropertySchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchema) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -43986,7 +47107,7 @@ func (ec *executionContext) marshalNPropertySchemaField2แš•แš–githubแš—comแš‹ree if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, sel, v[i]) } if isLen1 { f(i) @@ -44006,42 +47127,17 @@ func (ec *executionContext) marshalNPropertySchemaField2แš•แš–githubแš—comแš‹ree return ret } -func (ec *executionContext) marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaField) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - return ec._PropertySchemaField(ctx, sel, v) -} - -func (ec *executionContext) marshalNPropertySchemaFieldChoice2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldChoice(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaFieldChoice) graphql.Marshaler { +func (ec *executionContext) marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchema) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._PropertySchemaFieldChoice(ctx, sel, v) -} - -func (ec *executionContext) unmarshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx context.Context, v interface{}) (id.PropertySchemaFieldID, error) { - res, err := gqlmodel.UnmarshalPropertySchemaFieldID(v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalNPropertySchemaFieldID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx context.Context, sel ast.SelectionSet, v id.PropertySchemaFieldID) graphql.Marshaler { - res := gqlmodel.MarshalPropertySchemaFieldID(v) - if res == graphql.Null { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - } - return res + return ec._PropertySchema(ctx, sel, v) } -func (ec *executionContext) marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchemaGroup) graphql.Marshaler { +func (ec *executionContext) marshalNPropertySchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchemaField) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup isLen1 := len(v) == 1 @@ -44065,7 +47161,7 @@ func (ec *executionContext) marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹ree if !isLen1 { defer wg.Done() } - ret[i] = ec.marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx, sel, v[i]) + ret[i] = ec.marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, sel, v[i]) } if isLen1 { f(i) @@ -44085,72 +47181,60 @@ func (ec *executionContext) marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹ree return ret } -func (ec *executionContext) marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaGroup) graphql.Marshaler { +func (ec *executionContext) marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - return ec._PropertySchemaGroup(ctx, sel, v) -} - -func (ec *executionContext) unmarshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx context.Context, v interface{}) (id.PropertySchemaGroupID, error) { - res, err := gqlmodel.UnmarshalPropertySchemaGroupID(v) - return res, graphql.ErrorOnPath(ctx, err) + return ec._PropertySchemaField(ctx, sel, v) } -func (ec *executionContext) marshalNPropertySchemaGroupID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx context.Context, sel ast.SelectionSet, v id.PropertySchemaGroupID) graphql.Marshaler { - res := gqlmodel.MarshalPropertySchemaGroupID(v) - if res == graphql.Null { +func (ec *executionContext) marshalNPropertySchemaFieldChoice2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldChoice(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaFieldChoice) graphql.Marshaler { + if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } + return graphql.Null } - return res -} - -func (ec *executionContext) unmarshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, v interface{}) (id.PropertySchemaID, error) { - res, err := gqlmodel.UnmarshalPropertySchemaID(v) - return res, graphql.ErrorOnPath(ctx, err) + return ec._PropertySchemaFieldChoice(ctx, sel, v) } -func (ec *executionContext) marshalNPropertySchemaID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, sel ast.SelectionSet, v id.PropertySchemaID) graphql.Marshaler { - res := gqlmodel.MarshalPropertySchemaID(v) - if res == graphql.Null { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } +func (ec *executionContext) marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroupแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PropertySchemaGroup) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) } - return res -} - -func (ec *executionContext) unmarshalNPropertySchemaID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaIDแš„(ctx context.Context, v interface{}) ([]*id.PropertySchemaID, error) { - var vSlice []interface{} - if v != nil { - if tmp1, ok := v.([]interface{}); ok { - vSlice = tmp1 - } else { - vSlice = []interface{}{v} + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], } - } - var err error - res := make([]*id.PropertySchemaID, len(vSlice)) - for i := range vSlice { - ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, vSlice[i]) - if err != nil { - return nil, err + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) } - } - return res, nil -} -func (ec *executionContext) marshalNPropertySchemaID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaIDแš„(ctx context.Context, sel ast.SelectionSet, v []*id.PropertySchemaID) graphql.Marshaler { - ret := make(graphql.Array, len(v)) - for i := range v { - ret[i] = ec.marshalNPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx, sel, v[i]) } + wg.Wait() for _, e := range ret { if e == graphql.Null { @@ -44161,25 +47245,14 @@ func (ec *executionContext) marshalNPropertySchemaID2แš•แš–githubแš—comแš‹reeart return ret } -func (ec *executionContext) unmarshalNPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, v interface{}) (*id.PropertySchemaID, error) { - res, err := gqlmodel.UnmarshalPropertySchemaID(v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalNPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, sel ast.SelectionSet, v *id.PropertySchemaID) graphql.Marshaler { +func (ec *executionContext) marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaGroup) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { ec.Errorf(ctx, "must not be null") } return graphql.Null } - res := gqlmodel.MarshalPropertySchemaID(*v) - if res == graphql.Null { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") - } - } - return res + return ec._PropertySchemaGroup(ctx, sel, v) } func (ec *executionContext) unmarshalNPublishProjectInput2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishProjectInput(ctx context.Context, v interface{}) (gqlmodel.PublishProjectInput, error) { @@ -44277,16 +47350,6 @@ func (ec *executionContext) marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘ return ec._Scene(ctx, sel, v) } -func (ec *executionContext) unmarshalNSceneLockMode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneLockMode(ctx context.Context, v interface{}) (gqlmodel.SceneLockMode, error) { - var res gqlmodel.SceneLockMode - err := res.UnmarshalGQL(v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalNSceneLockMode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneLockMode(ctx context.Context, sel ast.SelectionSet, v gqlmodel.SceneLockMode) graphql.Marshaler { - return v -} - func (ec *executionContext) marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePluginแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.ScenePlugin) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup @@ -44418,11 +47481,7 @@ func (ec *executionContext) marshalNString2string(ctx context.Context, sel ast.S func (ec *executionContext) unmarshalNString2แš•stringแš„(ctx context.Context, v interface{}) ([]string, error) { var vSlice []interface{} if v != nil { - if tmp1, ok := v.([]interface{}); ok { - vSlice = tmp1 - } else { - vSlice = []interface{}{v} - } + vSlice = graphql.CoerceList(v) } var err error res := make([]string, len(vSlice)) @@ -44744,11 +47803,7 @@ func (ec *executionContext) unmarshalNUpdatePropertyItemInput2githubแš—comแš‹ree func (ec *executionContext) unmarshalNUpdatePropertyItemOperationInput2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdatePropertyItemOperationInputแš„(ctx context.Context, v interface{}) ([]*gqlmodel.UpdatePropertyItemOperationInput, error) { var vSlice []interface{} if v != nil { - if tmp1, ok := v.([]interface{}); ok { - vSlice = tmp1 - } else { - vSlice = []interface{}{v} - } + vSlice = graphql.CoerceList(v) } var err error res := make([]*gqlmodel.UpdatePropertyItemOperationInput, len(vSlice)) @@ -44973,11 +48028,7 @@ func (ec *executionContext) marshalN__DirectiveLocation2string(ctx context.Conte func (ec *executionContext) unmarshalN__DirectiveLocation2แš•stringแš„(ctx context.Context, v interface{}) ([]string, error) { var vSlice []interface{} if v != nil { - if tmp1, ok := v.([]interface{}); ok { - vSlice = tmp1 - } else { - vSlice = []interface{}{v} - } + vSlice = graphql.CoerceList(v) } var err error res := make([]string, len(vSlice)) @@ -45239,7 +48290,8 @@ func (ec *executionContext) marshalOAny2interface(ctx context.Context, sel ast.S if v == nil { return graphql.Null } - return graphql.MarshalAny(v) + res := graphql.MarshalAny(v) + return res } func (ec *executionContext) marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Asset) graphql.Marshaler { @@ -45285,7 +48337,8 @@ func (ec *executionContext) unmarshalOBoolean2bool(ctx context.Context, v interf } func (ec *executionContext) marshalOBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler { - return graphql.MarshalBoolean(v) + res := graphql.MarshalBoolean(v) + return res } func (ec *executionContext) unmarshalOBoolean2แš–bool(ctx context.Context, v interface{}) (*bool, error) { @@ -45300,7 +48353,8 @@ func (ec *executionContext) marshalOBoolean2แš–bool(ctx context.Context, sel ast if v == nil { return graphql.Null } - return graphql.MarshalBoolean(*v) + res := graphql.MarshalBoolean(*v) + return res } func (ec *executionContext) marshalOCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateAssetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.CreateAssetPayload) graphql.Marshaler { @@ -45357,7 +48411,8 @@ func (ec *executionContext) marshalOCursor2แš–githubแš—comแš‹reearthแš‹reearth if v == nil { return graphql.Null } - return gqlmodel.MarshalCursor(*v) + res := gqlmodel.MarshalCursor(*v) + return res } func (ec *executionContext) marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Dataset) graphql.Marshaler { @@ -45388,21 +48443,6 @@ func (ec *executionContext) marshalODatasetSchemaField2แš–githubแš—comแš‹reearth return ec._DatasetSchemaField(ctx, sel, v) } -func (ec *executionContext) unmarshalODatasetSchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšDatasetSchemaFieldID(ctx context.Context, v interface{}) (*id.DatasetSchemaFieldID, error) { - if v == nil { - return nil, nil - } - res, err := gqlmodel.UnmarshalDatasetSchemaFieldID(v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalODatasetSchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšDatasetSchemaFieldID(ctx context.Context, sel ast.SelectionSet, v *id.DatasetSchemaFieldID) graphql.Marshaler { - if v == nil { - return graphql.Null - } - return gqlmodel.MarshalDatasetSchemaFieldID(*v) -} - func (ec *executionContext) unmarshalODateTime2แš–timeแšTime(ctx context.Context, v interface{}) (*time.Time, error) { if v == nil { return nil, nil @@ -45415,7 +48455,8 @@ func (ec *executionContext) marshalODateTime2แš–timeแšTime(ctx context.Context, if v == nil { return graphql.Null } - return graphql.MarshalTime(*v) + res := graphql.MarshalTime(*v) + return res } func (ec *executionContext) marshalODeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteMePayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DeleteMePayload) graphql.Marshaler { @@ -45457,7 +48498,7 @@ func (ec *executionContext) unmarshalOFloat2แš–float64(ctx context.Context, v in if v == nil { return nil, nil } - res, err := graphql.UnmarshalFloat(v) + res, err := graphql.UnmarshalFloatContext(ctx, v) return &res, graphql.ErrorOnPath(ctx, err) } @@ -45465,26 +48506,23 @@ func (ec *executionContext) marshalOFloat2แš–float64(ctx context.Context, sel as if v == nil { return graphql.Null } - return graphql.MarshalFloat(*v) + res := graphql.MarshalFloatContext(*v) + return graphql.WrapContextMarshaler(ctx, res) } -func (ec *executionContext) unmarshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx context.Context, v interface{}) ([]*id.ID, error) { +func (ec *executionContext) unmarshalOID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx context.Context, v interface{}) ([]gqlmodel.ID, error) { if v == nil { return nil, nil } var vSlice []interface{} if v != nil { - if tmp1, ok := v.([]interface{}); ok { - vSlice = tmp1 - } else { - vSlice = []interface{}{v} - } + vSlice = graphql.CoerceList(v) } var err error - res := make([]*id.ID, len(vSlice)) + res := make([]gqlmodel.ID, len(vSlice)) for i := range vSlice { ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) - res[i], err = ec.unmarshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, vSlice[i]) + res[i], err = ec.unmarshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, vSlice[i]) if err != nil { return nil, err } @@ -45492,13 +48530,13 @@ func (ec *executionContext) unmarshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearth return res, nil } -func (ec *executionContext) marshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšIDแš„(ctx context.Context, sel ast.SelectionSet, v []*id.ID) graphql.Marshaler { +func (ec *executionContext) marshalOID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx context.Context, sel ast.SelectionSet, v []gqlmodel.ID) graphql.Marshaler { if v == nil { return graphql.Null } ret := make(graphql.Array, len(v)) for i := range v { - ret[i] = ec.marshalNID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx, sel, v[i]) + ret[i] = ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, sel, v[i]) } for _, e := range ret { @@ -45510,19 +48548,21 @@ func (ec *executionContext) marshalOID2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘ return ret } -func (ec *executionContext) unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, v interface{}) (*id.ID, error) { +func (ec *executionContext) unmarshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx context.Context, v interface{}) (*gqlmodel.ID, error) { if v == nil { return nil, nil } - res, err := gqlmodel.UnmarshalID(v) + tmp, err := graphql.UnmarshalString(v) + res := gqlmodel.ID(tmp) return &res, graphql.ErrorOnPath(ctx, err) } -func (ec *executionContext) marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšID(ctx context.Context, sel ast.SelectionSet, v *id.ID) graphql.Marshaler { +func (ec *executionContext) marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ID) graphql.Marshaler { if v == nil { return graphql.Null } - return gqlmodel.MarshalID(*v) + res := graphql.MarshalString(string(*v)) + return res } func (ec *executionContext) marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ImportDatasetPayload) graphql.Marshaler { @@ -45565,7 +48605,8 @@ func (ec *executionContext) marshalOInt2แš–int(ctx context.Context, sel ast.Sele if v == nil { return graphql.Null } - return graphql.MarshalInt(*v) + res := graphql.MarshalInt(*v) + return res } func (ec *executionContext) unmarshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx context.Context, v interface{}) (*language.Tag, error) { @@ -45580,7 +48621,8 @@ func (ec *executionContext) marshalOLang2แš–golangแš—orgแš‹xแš‹textแš‹language if v == nil { return graphql.Null } - return gqlmodel.MarshalLang(*v) + res := gqlmodel.MarshalLang(*v) + return res } func (ec *executionContext) marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Layer) graphql.Marshaler { @@ -45604,6 +48646,13 @@ func (ec *executionContext) marshalOLayerItem2แš–githubแš—comแš‹reearthแš‹reeart return ec._LayerItem(ctx, sel, v) } +func (ec *executionContext) marshalOMe2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMe(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Me) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec._Me(ctx, sel, v) +} + func (ec *executionContext) marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfobox(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedInfobox) graphql.Marshaler { if v == nil { return graphql.Null @@ -45675,36 +48724,6 @@ func (ec *executionContext) marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹ return ec._PluginExtension(ctx, sel, v) } -func (ec *executionContext) unmarshalOPluginExtensionID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx context.Context, v interface{}) (*id.PluginExtensionID, error) { - if v == nil { - return nil, nil - } - res, err := gqlmodel.UnmarshalPluginExtensionID(v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOPluginExtensionID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginExtensionID(ctx context.Context, sel ast.SelectionSet, v *id.PluginExtensionID) graphql.Marshaler { - if v == nil { - return graphql.Null - } - return gqlmodel.MarshalPluginExtensionID(*v) -} - -func (ec *executionContext) unmarshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, v interface{}) (*id.PluginID, error) { - if v == nil { - return nil, nil - } - res, err := gqlmodel.UnmarshalPluginID(v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOPluginID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPluginID(ctx context.Context, sel ast.SelectionSet, v *id.PluginID) graphql.Marshaler { - if v == nil { - return graphql.Null - } - return gqlmodel.MarshalPluginID(*v) -} - func (ec *executionContext) marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Project) graphql.Marshaler { if v == nil { return graphql.Null @@ -45876,21 +48895,6 @@ func (ec *executionContext) marshalOPropertySchemaFieldChoice2แš•แš–githubแš—com return ret } -func (ec *executionContext) unmarshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx context.Context, v interface{}) (*id.PropertySchemaFieldID, error) { - if v == nil { - return nil, nil - } - res, err := gqlmodel.UnmarshalPropertySchemaFieldID(v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOPropertySchemaFieldID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaFieldID(ctx context.Context, sel ast.SelectionSet, v *id.PropertySchemaFieldID) graphql.Marshaler { - if v == nil { - return graphql.Null - } - return gqlmodel.MarshalPropertySchemaFieldID(*v) -} - func (ec *executionContext) unmarshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldUI(ctx context.Context, v interface{}) (*gqlmodel.PropertySchemaFieldUI, error) { if v == nil { return nil, nil @@ -45914,36 +48918,6 @@ func (ec *executionContext) marshalOPropertySchemaGroup2แš–githubแš—comแš‹reeart return ec._PropertySchemaGroup(ctx, sel, v) } -func (ec *executionContext) unmarshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx context.Context, v interface{}) (*id.PropertySchemaGroupID, error) { - if v == nil { - return nil, nil - } - res, err := gqlmodel.UnmarshalPropertySchemaGroupID(v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOPropertySchemaGroupID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaGroupID(ctx context.Context, sel ast.SelectionSet, v *id.PropertySchemaGroupID) graphql.Marshaler { - if v == nil { - return graphql.Null - } - return gqlmodel.MarshalPropertySchemaGroupID(*v) -} - -func (ec *executionContext) unmarshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, v interface{}) (*id.PropertySchemaID, error) { - if v == nil { - return nil, nil - } - res, err := gqlmodel.UnmarshalPropertySchemaID(v) - return &res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOPropertySchemaID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹pkgแš‹idแšPropertySchemaID(ctx context.Context, sel ast.SelectionSet, v *id.PropertySchemaID) graphql.Marshaler { - if v == nil { - return graphql.Null - } - return gqlmodel.MarshalPropertySchemaID(*v) -} - func (ec *executionContext) marshalORemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveAssetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.RemoveAssetPayload) graphql.Marshaler { if v == nil { return graphql.Null @@ -46014,22 +48988,6 @@ func (ec *executionContext) marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘ return ec._Scene(ctx, sel, v) } -func (ec *executionContext) unmarshalOSceneLockMode2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneLockMode(ctx context.Context, v interface{}) (*gqlmodel.SceneLockMode, error) { - if v == nil { - return nil, nil - } - var res = new(gqlmodel.SceneLockMode) - err := res.UnmarshalGQL(v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOSceneLockMode2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneLockMode(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SceneLockMode) graphql.Marshaler { - if v == nil { - return graphql.Null - } - return v -} - func (ec *executionContext) marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ScenePlugin) graphql.Marshaler { if v == nil { return graphql.Null @@ -46044,13 +49002,6 @@ func (ec *executionContext) marshalOSceneWidget2แš–githubแš—comแš‹reearthแš‹reea return ec._SceneWidget(ctx, sel, v) } -func (ec *executionContext) marshalOSearchedUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSearchedUser(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SearchedUser) graphql.Marshaler { - if v == nil { - return graphql.Null - } - return ec._SearchedUser(ctx, sel, v) -} - func (ec *executionContext) marshalOSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSignupPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SignupPayload) graphql.Marshaler { if v == nil { return graphql.Null @@ -46058,26 +49009,13 @@ func (ec *executionContext) marshalOSignupPayload2แš–githubแš—comแš‹reearthแš‹re return ec._SignupPayload(ctx, sel, v) } -func (ec *executionContext) unmarshalOString2string(ctx context.Context, v interface{}) (string, error) { - res, err := graphql.UnmarshalString(v) - return res, graphql.ErrorOnPath(ctx, err) -} - -func (ec *executionContext) marshalOString2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { - return graphql.MarshalString(v) -} - func (ec *executionContext) unmarshalOString2แš•stringแš„(ctx context.Context, v interface{}) ([]string, error) { if v == nil { return nil, nil } var vSlice []interface{} if v != nil { - if tmp1, ok := v.([]interface{}); ok { - vSlice = tmp1 - } else { - vSlice = []interface{}{v} - } + vSlice = graphql.CoerceList(v) } var err error res := make([]string, len(vSlice)) @@ -46121,7 +49059,8 @@ func (ec *executionContext) marshalOString2แš–string(ctx context.Context, sel as if v == nil { return graphql.Null } - return graphql.MarshalString(*v) + res := graphql.MarshalString(*v) + return res } func (ec *executionContext) marshalOSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSyncDatasetPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SyncDatasetPayload) graphql.Marshaler { @@ -46196,7 +49135,8 @@ func (ec *executionContext) marshalOTranslatedString2map(ctx context.Context, se if v == nil { return graphql.Null } - return gqlmodel.MarshalMap(v) + res := gqlmodel.MarshalMap(v) + return res } func (ec *executionContext) unmarshalOURL2แš–netแš‹urlแšURL(ctx context.Context, v interface{}) (*url.URL, error) { @@ -46211,7 +49151,8 @@ func (ec *executionContext) marshalOURL2แš–netแš‹urlแšURL(ctx context.Context, if v == nil { return graphql.Null } - return gqlmodel.MarshalURL(*v) + res := gqlmodel.MarshalURL(*v) + return res } func (ec *executionContext) marshalOUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUninstallPluginPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UninstallPluginPayload) graphql.Marshaler { @@ -46303,7 +49244,8 @@ func (ec *executionContext) marshalOUpload2แš–githubแš—comแš‹99designsแš‹gqlgen if v == nil { return graphql.Null } - return graphql.MarshalUpload(*v) + res := graphql.MarshalUpload(*v) + return res } func (ec *executionContext) marshalOUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadPluginPayload(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.UploadPluginPayload) graphql.Marshaler { diff --git a/internal/adapter/gql/gqldataloader/assetloader_gen.go b/internal/adapter/gql/gqldataloader/assetloader_gen.go index 805dc9542..556880b44 100644 --- a/internal/adapter/gql/gqldataloader/assetloader_gen.go +++ b/internal/adapter/gql/gqldataloader/assetloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // AssetLoaderConfig captures the config to create a new AssetLoader type AssetLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.AssetID) ([]*gqlmodel.Asset, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Asset, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewAssetLoader(config AssetLoaderConfig) *AssetLoader { // AssetLoader batches and caches requests type AssetLoader struct { // this method provides the data for the loader - fetch func(keys []id.AssetID) ([]*gqlmodel.Asset, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Asset, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type AssetLoader struct { // INTERNAL // lazily created cache - cache map[id.AssetID]*gqlmodel.Asset + cache map[gqlmodel.ID]*gqlmodel.Asset // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type AssetLoader struct { } type assetLoaderBatch struct { - keys []id.AssetID + keys []gqlmodel.ID data []*gqlmodel.Asset error []error closing bool @@ -64,14 +63,14 @@ type assetLoaderBatch struct { } // Load a Asset by key, batching and caching will be applied automatically -func (l *AssetLoader) Load(key id.AssetID) (*gqlmodel.Asset, error) { +func (l *AssetLoader) Load(key gqlmodel.ID) (*gqlmodel.Asset, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Asset. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *AssetLoader) LoadThunk(key id.AssetID) func() (*gqlmodel.Asset, error) { +func (l *AssetLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Asset, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *AssetLoader) LoadThunk(key id.AssetID) func() (*gqlmodel.Asset, error) // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *AssetLoader) LoadAll(keys []id.AssetID) ([]*gqlmodel.Asset, []error) { +func (l *AssetLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Asset, []error) { results := make([]func() (*gqlmodel.Asset, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *AssetLoader) LoadAll(keys []id.AssetID) ([]*gqlmodel.Asset, []error) { // LoadAllThunk returns a function that when called will block waiting for a Assets. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *AssetLoader) LoadAllThunk(keys []id.AssetID) func() ([]*gqlmodel.Asset, []error) { +func (l *AssetLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Asset, []error) { results := make([]func() (*gqlmodel.Asset, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *AssetLoader) LoadAllThunk(keys []id.AssetID) func() ([]*gqlmodel.Asset, // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *AssetLoader) Prime(key id.AssetID, value *gqlmodel.Asset) bool { +func (l *AssetLoader) Prime(key gqlmodel.ID, value *gqlmodel.Asset) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *AssetLoader) Prime(key id.AssetID, value *gqlmodel.Asset) bool { } // Clear the value at key from the cache, if it exists -func (l *AssetLoader) Clear(key id.AssetID) { +func (l *AssetLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *AssetLoader) unsafeSet(key id.AssetID, value *gqlmodel.Asset) { +func (l *AssetLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Asset) { if l.cache == nil { - l.cache = map[id.AssetID]*gqlmodel.Asset{} + l.cache = map[gqlmodel.ID]*gqlmodel.Asset{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *assetLoaderBatch) keyIndex(l *AssetLoader, key id.AssetID) int { +func (b *assetLoaderBatch) keyIndex(l *AssetLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/dataloader.go b/internal/adapter/gql/gqldataloader/dataloader.go index 6c15aced6..19ac7e7c8 100644 --- a/internal/adapter/gql/gqldataloader/dataloader.go +++ b/internal/adapter/gql/gqldataloader/dataloader.go @@ -1,18 +1,18 @@ package gqldataloader -//go:generate go run github.com/vektah/dataloaden AssetLoader github.com/reearth/reearth-backend/pkg/id.AssetID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Asset -//go:generate go run github.com/vektah/dataloaden DatasetLoader github.com/reearth/reearth-backend/pkg/id.DatasetID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Dataset -//go:generate go run github.com/vektah/dataloaden DatasetSchemaLoader github.com/reearth/reearth-backend/pkg/id.DatasetSchemaID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.DatasetSchema -//go:generate go run github.com/vektah/dataloaden LayerLoader github.com/reearth/reearth-backend/pkg/id.LayerID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Layer -//go:generate go run github.com/vektah/dataloaden LayerGroupLoader github.com/reearth/reearth-backend/pkg/id.LayerID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.LayerGroup -//go:generate go run github.com/vektah/dataloaden LayerItemLoader github.com/reearth/reearth-backend/pkg/id.LayerID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.LayerItem -//go:generate go run github.com/vektah/dataloaden PluginLoader github.com/reearth/reearth-backend/pkg/id.PluginID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Plugin -//go:generate go run github.com/vektah/dataloaden ProjectLoader github.com/reearth/reearth-backend/pkg/id.ProjectID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Project -//go:generate go run github.com/vektah/dataloaden PropertyLoader github.com/reearth/reearth-backend/pkg/id.PropertyID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Property -//go:generate go run github.com/vektah/dataloaden PropertySchemaLoader github.com/reearth/reearth-backend/pkg/id.PropertySchemaID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PropertySchema -//go:generate go run github.com/vektah/dataloaden SceneLoader github.com/reearth/reearth-backend/pkg/id.SceneID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Scene -//go:generate go run github.com/vektah/dataloaden TeamLoader github.com/reearth/reearth-backend/pkg/id.TeamID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Team -//go:generate go run github.com/vektah/dataloaden UserLoader github.com/reearth/reearth-backend/pkg/id.UserID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.User -//go:generate go run github.com/vektah/dataloaden TagLoader github.com/reearth/reearth-backend/pkg/id.TagID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Tag -//go:generate go run github.com/vektah/dataloaden TagItemLoader github.com/reearth/reearth-backend/pkg/id.TagID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.TagItem -//go:generate go run github.com/vektah/dataloaden TagGroupLoader github.com/reearth/reearth-backend/pkg/id.TagID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.TagGroup +//go:generate go run github.com/vektah/dataloaden AssetLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Asset +//go:generate go run github.com/vektah/dataloaden DatasetLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Dataset +//go:generate go run github.com/vektah/dataloaden DatasetSchemaLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.DatasetSchema +//go:generate go run github.com/vektah/dataloaden LayerLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Layer +//go:generate go run github.com/vektah/dataloaden LayerGroupLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.LayerGroup +//go:generate go run github.com/vektah/dataloaden LayerItemLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.LayerItem +//go:generate go run github.com/vektah/dataloaden PluginLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Plugin +//go:generate go run github.com/vektah/dataloaden ProjectLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Project +//go:generate go run github.com/vektah/dataloaden PropertyLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Property +//go:generate go run github.com/vektah/dataloaden PropertySchemaLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.PropertySchema +//go:generate go run github.com/vektah/dataloaden SceneLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Scene +//go:generate go run github.com/vektah/dataloaden TeamLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Team +//go:generate go run github.com/vektah/dataloaden UserLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.User +//go:generate go run github.com/vektah/dataloaden TagLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Tag +//go:generate go run github.com/vektah/dataloaden TagItemLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.TagItem +//go:generate go run github.com/vektah/dataloaden TagGroupLoader github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID *github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.TagGroup diff --git a/internal/adapter/gql/gqldataloader/datasetloader_gen.go b/internal/adapter/gql/gqldataloader/datasetloader_gen.go index 9b1cb1120..57b115ab7 100644 --- a/internal/adapter/gql/gqldataloader/datasetloader_gen.go +++ b/internal/adapter/gql/gqldataloader/datasetloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // DatasetLoaderConfig captures the config to create a new DatasetLoader type DatasetLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.DatasetID) ([]*gqlmodel.Dataset, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Dataset, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewDatasetLoader(config DatasetLoaderConfig) *DatasetLoader { // DatasetLoader batches and caches requests type DatasetLoader struct { // this method provides the data for the loader - fetch func(keys []id.DatasetID) ([]*gqlmodel.Dataset, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Dataset, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type DatasetLoader struct { // INTERNAL // lazily created cache - cache map[id.DatasetID]*gqlmodel.Dataset + cache map[gqlmodel.ID]*gqlmodel.Dataset // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type DatasetLoader struct { } type datasetLoaderBatch struct { - keys []id.DatasetID + keys []gqlmodel.ID data []*gqlmodel.Dataset error []error closing bool @@ -64,14 +63,14 @@ type datasetLoaderBatch struct { } // Load a Dataset by key, batching and caching will be applied automatically -func (l *DatasetLoader) Load(key id.DatasetID) (*gqlmodel.Dataset, error) { +func (l *DatasetLoader) Load(key gqlmodel.ID) (*gqlmodel.Dataset, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Dataset. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *DatasetLoader) LoadThunk(key id.DatasetID) func() (*gqlmodel.Dataset, error) { +func (l *DatasetLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Dataset, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *DatasetLoader) LoadThunk(key id.DatasetID) func() (*gqlmodel.Dataset, e // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *DatasetLoader) LoadAll(keys []id.DatasetID) ([]*gqlmodel.Dataset, []error) { +func (l *DatasetLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Dataset, []error) { results := make([]func() (*gqlmodel.Dataset, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *DatasetLoader) LoadAll(keys []id.DatasetID) ([]*gqlmodel.Dataset, []err // LoadAllThunk returns a function that when called will block waiting for a Datasets. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *DatasetLoader) LoadAllThunk(keys []id.DatasetID) func() ([]*gqlmodel.Dataset, []error) { +func (l *DatasetLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Dataset, []error) { results := make([]func() (*gqlmodel.Dataset, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *DatasetLoader) LoadAllThunk(keys []id.DatasetID) func() ([]*gqlmodel.Da // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *DatasetLoader) Prime(key id.DatasetID, value *gqlmodel.Dataset) bool { +func (l *DatasetLoader) Prime(key gqlmodel.ID, value *gqlmodel.Dataset) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *DatasetLoader) Prime(key id.DatasetID, value *gqlmodel.Dataset) bool { } // Clear the value at key from the cache, if it exists -func (l *DatasetLoader) Clear(key id.DatasetID) { +func (l *DatasetLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *DatasetLoader) unsafeSet(key id.DatasetID, value *gqlmodel.Dataset) { +func (l *DatasetLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Dataset) { if l.cache == nil { - l.cache = map[id.DatasetID]*gqlmodel.Dataset{} + l.cache = map[gqlmodel.ID]*gqlmodel.Dataset{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *datasetLoaderBatch) keyIndex(l *DatasetLoader, key id.DatasetID) int { +func (b *datasetLoaderBatch) keyIndex(l *DatasetLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/datasetschemaloader_gen.go b/internal/adapter/gql/gqldataloader/datasetschemaloader_gen.go index e8ca12d9a..9fe7e8ffa 100644 --- a/internal/adapter/gql/gqldataloader/datasetschemaloader_gen.go +++ b/internal/adapter/gql/gqldataloader/datasetschemaloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // DatasetSchemaLoaderConfig captures the config to create a new DatasetSchemaLoader type DatasetSchemaLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewDatasetSchemaLoader(config DatasetSchemaLoaderConfig) *DatasetSchemaLoad // DatasetSchemaLoader batches and caches requests type DatasetSchemaLoader struct { // this method provides the data for the loader - fetch func(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type DatasetSchemaLoader struct { // INTERNAL // lazily created cache - cache map[id.DatasetSchemaID]*gqlmodel.DatasetSchema + cache map[gqlmodel.ID]*gqlmodel.DatasetSchema // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type DatasetSchemaLoader struct { } type datasetSchemaLoaderBatch struct { - keys []id.DatasetSchemaID + keys []gqlmodel.ID data []*gqlmodel.DatasetSchema error []error closing bool @@ -64,14 +63,14 @@ type datasetSchemaLoaderBatch struct { } // Load a DatasetSchema by key, batching and caching will be applied automatically -func (l *DatasetSchemaLoader) Load(key id.DatasetSchemaID) (*gqlmodel.DatasetSchema, error) { +func (l *DatasetSchemaLoader) Load(key gqlmodel.ID) (*gqlmodel.DatasetSchema, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a DatasetSchema. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *DatasetSchemaLoader) LoadThunk(key id.DatasetSchemaID) func() (*gqlmodel.DatasetSchema, error) { +func (l *DatasetSchemaLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.DatasetSchema, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *DatasetSchemaLoader) LoadThunk(key id.DatasetSchemaID) func() (*gqlmode // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *DatasetSchemaLoader) LoadAll(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) { +func (l *DatasetSchemaLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) { results := make([]func() (*gqlmodel.DatasetSchema, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *DatasetSchemaLoader) LoadAll(keys []id.DatasetSchemaID) ([]*gqlmodel.Da // LoadAllThunk returns a function that when called will block waiting for a DatasetSchemas. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *DatasetSchemaLoader) LoadAllThunk(keys []id.DatasetSchemaID) func() ([]*gqlmodel.DatasetSchema, []error) { +func (l *DatasetSchemaLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.DatasetSchema, []error) { results := make([]func() (*gqlmodel.DatasetSchema, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *DatasetSchemaLoader) LoadAllThunk(keys []id.DatasetSchemaID) func() ([] // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *DatasetSchemaLoader) Prime(key id.DatasetSchemaID, value *gqlmodel.DatasetSchema) bool { +func (l *DatasetSchemaLoader) Prime(key gqlmodel.ID, value *gqlmodel.DatasetSchema) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *DatasetSchemaLoader) Prime(key id.DatasetSchemaID, value *gqlmodel.Data } // Clear the value at key from the cache, if it exists -func (l *DatasetSchemaLoader) Clear(key id.DatasetSchemaID) { +func (l *DatasetSchemaLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *DatasetSchemaLoader) unsafeSet(key id.DatasetSchemaID, value *gqlmodel.DatasetSchema) { +func (l *DatasetSchemaLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.DatasetSchema) { if l.cache == nil { - l.cache = map[id.DatasetSchemaID]*gqlmodel.DatasetSchema{} + l.cache = map[gqlmodel.ID]*gqlmodel.DatasetSchema{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *datasetSchemaLoaderBatch) keyIndex(l *DatasetSchemaLoader, key id.DatasetSchemaID) int { +func (b *datasetSchemaLoaderBatch) keyIndex(l *DatasetSchemaLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/layergrouploader_gen.go b/internal/adapter/gql/gqldataloader/layergrouploader_gen.go index 5c93dad38..0e46cf893 100644 --- a/internal/adapter/gql/gqldataloader/layergrouploader_gen.go +++ b/internal/adapter/gql/gqldataloader/layergrouploader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // LayerGroupLoaderConfig captures the config to create a new LayerGroupLoader type LayerGroupLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewLayerGroupLoader(config LayerGroupLoaderConfig) *LayerGroupLoader { // LayerGroupLoader batches and caches requests type LayerGroupLoader struct { // this method provides the data for the loader - fetch func(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type LayerGroupLoader struct { // INTERNAL // lazily created cache - cache map[id.LayerID]*gqlmodel.LayerGroup + cache map[gqlmodel.ID]*gqlmodel.LayerGroup // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type LayerGroupLoader struct { } type layerGroupLoaderBatch struct { - keys []id.LayerID + keys []gqlmodel.ID data []*gqlmodel.LayerGroup error []error closing bool @@ -64,14 +63,14 @@ type layerGroupLoaderBatch struct { } // Load a LayerGroup by key, batching and caching will be applied automatically -func (l *LayerGroupLoader) Load(key id.LayerID) (*gqlmodel.LayerGroup, error) { +func (l *LayerGroupLoader) Load(key gqlmodel.ID) (*gqlmodel.LayerGroup, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a LayerGroup. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *LayerGroupLoader) LoadThunk(key id.LayerID) func() (*gqlmodel.LayerGroup, error) { +func (l *LayerGroupLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.LayerGroup, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *LayerGroupLoader) LoadThunk(key id.LayerID) func() (*gqlmodel.LayerGrou // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *LayerGroupLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) { +func (l *LayerGroupLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) { results := make([]func() (*gqlmodel.LayerGroup, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *LayerGroupLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.LayerGroup, [ // LoadAllThunk returns a function that when called will block waiting for a LayerGroups. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *LayerGroupLoader) LoadAllThunk(keys []id.LayerID) func() ([]*gqlmodel.LayerGroup, []error) { +func (l *LayerGroupLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.LayerGroup, []error) { results := make([]func() (*gqlmodel.LayerGroup, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *LayerGroupLoader) LoadAllThunk(keys []id.LayerID) func() ([]*gqlmodel.L // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *LayerGroupLoader) Prime(key id.LayerID, value *gqlmodel.LayerGroup) bool { +func (l *LayerGroupLoader) Prime(key gqlmodel.ID, value *gqlmodel.LayerGroup) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *LayerGroupLoader) Prime(key id.LayerID, value *gqlmodel.LayerGroup) boo } // Clear the value at key from the cache, if it exists -func (l *LayerGroupLoader) Clear(key id.LayerID) { +func (l *LayerGroupLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *LayerGroupLoader) unsafeSet(key id.LayerID, value *gqlmodel.LayerGroup) { +func (l *LayerGroupLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.LayerGroup) { if l.cache == nil { - l.cache = map[id.LayerID]*gqlmodel.LayerGroup{} + l.cache = map[gqlmodel.ID]*gqlmodel.LayerGroup{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *layerGroupLoaderBatch) keyIndex(l *LayerGroupLoader, key id.LayerID) int { +func (b *layerGroupLoaderBatch) keyIndex(l *LayerGroupLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/layeritemloader_gen.go b/internal/adapter/gql/gqldataloader/layeritemloader_gen.go index 9593fd1ed..97562fba2 100644 --- a/internal/adapter/gql/gqldataloader/layeritemloader_gen.go +++ b/internal/adapter/gql/gqldataloader/layeritemloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // LayerItemLoaderConfig captures the config to create a new LayerItemLoader type LayerItemLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewLayerItemLoader(config LayerItemLoaderConfig) *LayerItemLoader { // LayerItemLoader batches and caches requests type LayerItemLoader struct { // this method provides the data for the loader - fetch func(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type LayerItemLoader struct { // INTERNAL // lazily created cache - cache map[id.LayerID]*gqlmodel.LayerItem + cache map[gqlmodel.ID]*gqlmodel.LayerItem // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type LayerItemLoader struct { } type layerItemLoaderBatch struct { - keys []id.LayerID + keys []gqlmodel.ID data []*gqlmodel.LayerItem error []error closing bool @@ -64,14 +63,14 @@ type layerItemLoaderBatch struct { } // Load a LayerItem by key, batching and caching will be applied automatically -func (l *LayerItemLoader) Load(key id.LayerID) (*gqlmodel.LayerItem, error) { +func (l *LayerItemLoader) Load(key gqlmodel.ID) (*gqlmodel.LayerItem, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a LayerItem. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *LayerItemLoader) LoadThunk(key id.LayerID) func() (*gqlmodel.LayerItem, error) { +func (l *LayerItemLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.LayerItem, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *LayerItemLoader) LoadThunk(key id.LayerID) func() (*gqlmodel.LayerItem, // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *LayerItemLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) { +func (l *LayerItemLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) { results := make([]func() (*gqlmodel.LayerItem, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *LayerItemLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.LayerItem, []e // LoadAllThunk returns a function that when called will block waiting for a LayerItems. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *LayerItemLoader) LoadAllThunk(keys []id.LayerID) func() ([]*gqlmodel.LayerItem, []error) { +func (l *LayerItemLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.LayerItem, []error) { results := make([]func() (*gqlmodel.LayerItem, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *LayerItemLoader) LoadAllThunk(keys []id.LayerID) func() ([]*gqlmodel.La // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *LayerItemLoader) Prime(key id.LayerID, value *gqlmodel.LayerItem) bool { +func (l *LayerItemLoader) Prime(key gqlmodel.ID, value *gqlmodel.LayerItem) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *LayerItemLoader) Prime(key id.LayerID, value *gqlmodel.LayerItem) bool } // Clear the value at key from the cache, if it exists -func (l *LayerItemLoader) Clear(key id.LayerID) { +func (l *LayerItemLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *LayerItemLoader) unsafeSet(key id.LayerID, value *gqlmodel.LayerItem) { +func (l *LayerItemLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.LayerItem) { if l.cache == nil { - l.cache = map[id.LayerID]*gqlmodel.LayerItem{} + l.cache = map[gqlmodel.ID]*gqlmodel.LayerItem{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *layerItemLoaderBatch) keyIndex(l *LayerItemLoader, key id.LayerID) int { +func (b *layerItemLoaderBatch) keyIndex(l *LayerItemLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/layerloader_gen.go b/internal/adapter/gql/gqldataloader/layerloader_gen.go index 1dbe12af4..f1cc7d4bd 100644 --- a/internal/adapter/gql/gqldataloader/layerloader_gen.go +++ b/internal/adapter/gql/gqldataloader/layerloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // LayerLoaderConfig captures the config to create a new LayerLoader type LayerLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.LayerID) ([]*gqlmodel.Layer, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewLayerLoader(config LayerLoaderConfig) *LayerLoader { // LayerLoader batches and caches requests type LayerLoader struct { // this method provides the data for the loader - fetch func(keys []id.LayerID) ([]*gqlmodel.Layer, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type LayerLoader struct { // INTERNAL // lazily created cache - cache map[id.LayerID]*gqlmodel.Layer + cache map[gqlmodel.ID]*gqlmodel.Layer // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type LayerLoader struct { } type layerLoaderBatch struct { - keys []id.LayerID + keys []gqlmodel.ID data []*gqlmodel.Layer error []error closing bool @@ -64,14 +63,14 @@ type layerLoaderBatch struct { } // Load a Layer by key, batching and caching will be applied automatically -func (l *LayerLoader) Load(key id.LayerID) (*gqlmodel.Layer, error) { +func (l *LayerLoader) Load(key gqlmodel.ID) (*gqlmodel.Layer, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Layer. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *LayerLoader) LoadThunk(key id.LayerID) func() (*gqlmodel.Layer, error) { +func (l *LayerLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Layer, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *LayerLoader) LoadThunk(key id.LayerID) func() (*gqlmodel.Layer, error) // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *LayerLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.Layer, []error) { +func (l *LayerLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) { results := make([]func() (*gqlmodel.Layer, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *LayerLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.Layer, []error) { // LoadAllThunk returns a function that when called will block waiting for a Layers. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *LayerLoader) LoadAllThunk(keys []id.LayerID) func() ([]*gqlmodel.Layer, []error) { +func (l *LayerLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Layer, []error) { results := make([]func() (*gqlmodel.Layer, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *LayerLoader) LoadAllThunk(keys []id.LayerID) func() ([]*gqlmodel.Layer, // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *LayerLoader) Prime(key id.LayerID, value *gqlmodel.Layer) bool { +func (l *LayerLoader) Prime(key gqlmodel.ID, value *gqlmodel.Layer) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *LayerLoader) Prime(key id.LayerID, value *gqlmodel.Layer) bool { } // Clear the value at key from the cache, if it exists -func (l *LayerLoader) Clear(key id.LayerID) { +func (l *LayerLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *LayerLoader) unsafeSet(key id.LayerID, value *gqlmodel.Layer) { +func (l *LayerLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Layer) { if l.cache == nil { - l.cache = map[id.LayerID]*gqlmodel.Layer{} + l.cache = map[gqlmodel.ID]*gqlmodel.Layer{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *layerLoaderBatch) keyIndex(l *LayerLoader, key id.LayerID) int { +func (b *layerLoaderBatch) keyIndex(l *LayerLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/pluginloader_gen.go b/internal/adapter/gql/gqldataloader/pluginloader_gen.go index b4a868a3a..e8b53ba23 100644 --- a/internal/adapter/gql/gqldataloader/pluginloader_gen.go +++ b/internal/adapter/gql/gqldataloader/pluginloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // PluginLoaderConfig captures the config to create a new PluginLoader type PluginLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewPluginLoader(config PluginLoaderConfig) *PluginLoader { // PluginLoader batches and caches requests type PluginLoader struct { // this method provides the data for the loader - fetch func(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type PluginLoader struct { // INTERNAL // lazily created cache - cache map[id.PluginID]*gqlmodel.Plugin + cache map[gqlmodel.ID]*gqlmodel.Plugin // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type PluginLoader struct { } type pluginLoaderBatch struct { - keys []id.PluginID + keys []gqlmodel.ID data []*gqlmodel.Plugin error []error closing bool @@ -64,14 +63,14 @@ type pluginLoaderBatch struct { } // Load a Plugin by key, batching and caching will be applied automatically -func (l *PluginLoader) Load(key id.PluginID) (*gqlmodel.Plugin, error) { +func (l *PluginLoader) Load(key gqlmodel.ID) (*gqlmodel.Plugin, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Plugin. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *PluginLoader) LoadThunk(key id.PluginID) func() (*gqlmodel.Plugin, error) { +func (l *PluginLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Plugin, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *PluginLoader) LoadThunk(key id.PluginID) func() (*gqlmodel.Plugin, erro // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *PluginLoader) LoadAll(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) { +func (l *PluginLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) { results := make([]func() (*gqlmodel.Plugin, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *PluginLoader) LoadAll(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) // LoadAllThunk returns a function that when called will block waiting for a Plugins. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *PluginLoader) LoadAllThunk(keys []id.PluginID) func() ([]*gqlmodel.Plugin, []error) { +func (l *PluginLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Plugin, []error) { results := make([]func() (*gqlmodel.Plugin, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *PluginLoader) LoadAllThunk(keys []id.PluginID) func() ([]*gqlmodel.Plug // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *PluginLoader) Prime(key id.PluginID, value *gqlmodel.Plugin) bool { +func (l *PluginLoader) Prime(key gqlmodel.ID, value *gqlmodel.Plugin) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *PluginLoader) Prime(key id.PluginID, value *gqlmodel.Plugin) bool { } // Clear the value at key from the cache, if it exists -func (l *PluginLoader) Clear(key id.PluginID) { +func (l *PluginLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *PluginLoader) unsafeSet(key id.PluginID, value *gqlmodel.Plugin) { +func (l *PluginLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Plugin) { if l.cache == nil { - l.cache = map[id.PluginID]*gqlmodel.Plugin{} + l.cache = map[gqlmodel.ID]*gqlmodel.Plugin{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *pluginLoaderBatch) keyIndex(l *PluginLoader, key id.PluginID) int { +func (b *pluginLoaderBatch) keyIndex(l *PluginLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/projectloader_gen.go b/internal/adapter/gql/gqldataloader/projectloader_gen.go index 686d83223..31540c646 100644 --- a/internal/adapter/gql/gqldataloader/projectloader_gen.go +++ b/internal/adapter/gql/gqldataloader/projectloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // ProjectLoaderConfig captures the config to create a new ProjectLoader type ProjectLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.ProjectID) ([]*gqlmodel.Project, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewProjectLoader(config ProjectLoaderConfig) *ProjectLoader { // ProjectLoader batches and caches requests type ProjectLoader struct { // this method provides the data for the loader - fetch func(keys []id.ProjectID) ([]*gqlmodel.Project, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type ProjectLoader struct { // INTERNAL // lazily created cache - cache map[id.ProjectID]*gqlmodel.Project + cache map[gqlmodel.ID]*gqlmodel.Project // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type ProjectLoader struct { } type projectLoaderBatch struct { - keys []id.ProjectID + keys []gqlmodel.ID data []*gqlmodel.Project error []error closing bool @@ -64,14 +63,14 @@ type projectLoaderBatch struct { } // Load a Project by key, batching and caching will be applied automatically -func (l *ProjectLoader) Load(key id.ProjectID) (*gqlmodel.Project, error) { +func (l *ProjectLoader) Load(key gqlmodel.ID) (*gqlmodel.Project, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Project. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *ProjectLoader) LoadThunk(key id.ProjectID) func() (*gqlmodel.Project, error) { +func (l *ProjectLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Project, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *ProjectLoader) LoadThunk(key id.ProjectID) func() (*gqlmodel.Project, e // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *ProjectLoader) LoadAll(keys []id.ProjectID) ([]*gqlmodel.Project, []error) { +func (l *ProjectLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) { results := make([]func() (*gqlmodel.Project, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *ProjectLoader) LoadAll(keys []id.ProjectID) ([]*gqlmodel.Project, []err // LoadAllThunk returns a function that when called will block waiting for a Projects. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *ProjectLoader) LoadAllThunk(keys []id.ProjectID) func() ([]*gqlmodel.Project, []error) { +func (l *ProjectLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Project, []error) { results := make([]func() (*gqlmodel.Project, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *ProjectLoader) LoadAllThunk(keys []id.ProjectID) func() ([]*gqlmodel.Pr // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *ProjectLoader) Prime(key id.ProjectID, value *gqlmodel.Project) bool { +func (l *ProjectLoader) Prime(key gqlmodel.ID, value *gqlmodel.Project) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *ProjectLoader) Prime(key id.ProjectID, value *gqlmodel.Project) bool { } // Clear the value at key from the cache, if it exists -func (l *ProjectLoader) Clear(key id.ProjectID) { +func (l *ProjectLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *ProjectLoader) unsafeSet(key id.ProjectID, value *gqlmodel.Project) { +func (l *ProjectLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Project) { if l.cache == nil { - l.cache = map[id.ProjectID]*gqlmodel.Project{} + l.cache = map[gqlmodel.ID]*gqlmodel.Project{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *projectLoaderBatch) keyIndex(l *ProjectLoader, key id.ProjectID) int { +func (b *projectLoaderBatch) keyIndex(l *ProjectLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/propertyloader_gen.go b/internal/adapter/gql/gqldataloader/propertyloader_gen.go index bcd8b64ec..20a20430e 100644 --- a/internal/adapter/gql/gqldataloader/propertyloader_gen.go +++ b/internal/adapter/gql/gqldataloader/propertyloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // PropertyLoaderConfig captures the config to create a new PropertyLoader type PropertyLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.PropertyID) ([]*gqlmodel.Property, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewPropertyLoader(config PropertyLoaderConfig) *PropertyLoader { // PropertyLoader batches and caches requests type PropertyLoader struct { // this method provides the data for the loader - fetch func(keys []id.PropertyID) ([]*gqlmodel.Property, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type PropertyLoader struct { // INTERNAL // lazily created cache - cache map[id.PropertyID]*gqlmodel.Property + cache map[gqlmodel.ID]*gqlmodel.Property // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type PropertyLoader struct { } type propertyLoaderBatch struct { - keys []id.PropertyID + keys []gqlmodel.ID data []*gqlmodel.Property error []error closing bool @@ -64,14 +63,14 @@ type propertyLoaderBatch struct { } // Load a Property by key, batching and caching will be applied automatically -func (l *PropertyLoader) Load(key id.PropertyID) (*gqlmodel.Property, error) { +func (l *PropertyLoader) Load(key gqlmodel.ID) (*gqlmodel.Property, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Property. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *PropertyLoader) LoadThunk(key id.PropertyID) func() (*gqlmodel.Property, error) { +func (l *PropertyLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Property, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *PropertyLoader) LoadThunk(key id.PropertyID) func() (*gqlmodel.Property // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *PropertyLoader) LoadAll(keys []id.PropertyID) ([]*gqlmodel.Property, []error) { +func (l *PropertyLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) { results := make([]func() (*gqlmodel.Property, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *PropertyLoader) LoadAll(keys []id.PropertyID) ([]*gqlmodel.Property, [] // LoadAllThunk returns a function that when called will block waiting for a Propertys. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *PropertyLoader) LoadAllThunk(keys []id.PropertyID) func() ([]*gqlmodel.Property, []error) { +func (l *PropertyLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Property, []error) { results := make([]func() (*gqlmodel.Property, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *PropertyLoader) LoadAllThunk(keys []id.PropertyID) func() ([]*gqlmodel. // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *PropertyLoader) Prime(key id.PropertyID, value *gqlmodel.Property) bool { +func (l *PropertyLoader) Prime(key gqlmodel.ID, value *gqlmodel.Property) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *PropertyLoader) Prime(key id.PropertyID, value *gqlmodel.Property) bool } // Clear the value at key from the cache, if it exists -func (l *PropertyLoader) Clear(key id.PropertyID) { +func (l *PropertyLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *PropertyLoader) unsafeSet(key id.PropertyID, value *gqlmodel.Property) { +func (l *PropertyLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Property) { if l.cache == nil { - l.cache = map[id.PropertyID]*gqlmodel.Property{} + l.cache = map[gqlmodel.ID]*gqlmodel.Property{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *propertyLoaderBatch) keyIndex(l *PropertyLoader, key id.PropertyID) int { +func (b *propertyLoaderBatch) keyIndex(l *PropertyLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/propertyschemaloader_gen.go b/internal/adapter/gql/gqldataloader/propertyschemaloader_gen.go index f4c4379b7..253e408cf 100644 --- a/internal/adapter/gql/gqldataloader/propertyschemaloader_gen.go +++ b/internal/adapter/gql/gqldataloader/propertyschemaloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // PropertySchemaLoaderConfig captures the config to create a new PropertySchemaLoader type PropertySchemaLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewPropertySchemaLoader(config PropertySchemaLoaderConfig) *PropertySchemaL // PropertySchemaLoader batches and caches requests type PropertySchemaLoader struct { // this method provides the data for the loader - fetch func(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type PropertySchemaLoader struct { // INTERNAL // lazily created cache - cache map[id.PropertySchemaID]*gqlmodel.PropertySchema + cache map[gqlmodel.ID]*gqlmodel.PropertySchema // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type PropertySchemaLoader struct { } type propertySchemaLoaderBatch struct { - keys []id.PropertySchemaID + keys []gqlmodel.ID data []*gqlmodel.PropertySchema error []error closing bool @@ -64,14 +63,14 @@ type propertySchemaLoaderBatch struct { } // Load a PropertySchema by key, batching and caching will be applied automatically -func (l *PropertySchemaLoader) Load(key id.PropertySchemaID) (*gqlmodel.PropertySchema, error) { +func (l *PropertySchemaLoader) Load(key gqlmodel.ID) (*gqlmodel.PropertySchema, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a PropertySchema. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *PropertySchemaLoader) LoadThunk(key id.PropertySchemaID) func() (*gqlmodel.PropertySchema, error) { +func (l *PropertySchemaLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.PropertySchema, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *PropertySchemaLoader) LoadThunk(key id.PropertySchemaID) func() (*gqlmo // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *PropertySchemaLoader) LoadAll(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) { +func (l *PropertySchemaLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) { results := make([]func() (*gqlmodel.PropertySchema, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *PropertySchemaLoader) LoadAll(keys []id.PropertySchemaID) ([]*gqlmodel. // LoadAllThunk returns a function that when called will block waiting for a PropertySchemas. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *PropertySchemaLoader) LoadAllThunk(keys []id.PropertySchemaID) func() ([]*gqlmodel.PropertySchema, []error) { +func (l *PropertySchemaLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.PropertySchema, []error) { results := make([]func() (*gqlmodel.PropertySchema, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *PropertySchemaLoader) LoadAllThunk(keys []id.PropertySchemaID) func() ( // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *PropertySchemaLoader) Prime(key id.PropertySchemaID, value *gqlmodel.PropertySchema) bool { +func (l *PropertySchemaLoader) Prime(key gqlmodel.ID, value *gqlmodel.PropertySchema) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *PropertySchemaLoader) Prime(key id.PropertySchemaID, value *gqlmodel.Pr } // Clear the value at key from the cache, if it exists -func (l *PropertySchemaLoader) Clear(key id.PropertySchemaID) { +func (l *PropertySchemaLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *PropertySchemaLoader) unsafeSet(key id.PropertySchemaID, value *gqlmodel.PropertySchema) { +func (l *PropertySchemaLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.PropertySchema) { if l.cache == nil { - l.cache = map[id.PropertySchemaID]*gqlmodel.PropertySchema{} + l.cache = map[gqlmodel.ID]*gqlmodel.PropertySchema{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *propertySchemaLoaderBatch) keyIndex(l *PropertySchemaLoader, key id.PropertySchemaID) int { +func (b *propertySchemaLoaderBatch) keyIndex(l *PropertySchemaLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/sceneloader_gen.go b/internal/adapter/gql/gqldataloader/sceneloader_gen.go index 29edcd912..233b4cdde 100644 --- a/internal/adapter/gql/gqldataloader/sceneloader_gen.go +++ b/internal/adapter/gql/gqldataloader/sceneloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // SceneLoaderConfig captures the config to create a new SceneLoader type SceneLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.SceneID) ([]*gqlmodel.Scene, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewSceneLoader(config SceneLoaderConfig) *SceneLoader { // SceneLoader batches and caches requests type SceneLoader struct { // this method provides the data for the loader - fetch func(keys []id.SceneID) ([]*gqlmodel.Scene, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type SceneLoader struct { // INTERNAL // lazily created cache - cache map[id.SceneID]*gqlmodel.Scene + cache map[gqlmodel.ID]*gqlmodel.Scene // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type SceneLoader struct { } type sceneLoaderBatch struct { - keys []id.SceneID + keys []gqlmodel.ID data []*gqlmodel.Scene error []error closing bool @@ -64,14 +63,14 @@ type sceneLoaderBatch struct { } // Load a Scene by key, batching and caching will be applied automatically -func (l *SceneLoader) Load(key id.SceneID) (*gqlmodel.Scene, error) { +func (l *SceneLoader) Load(key gqlmodel.ID) (*gqlmodel.Scene, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Scene. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *SceneLoader) LoadThunk(key id.SceneID) func() (*gqlmodel.Scene, error) { +func (l *SceneLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Scene, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *SceneLoader) LoadThunk(key id.SceneID) func() (*gqlmodel.Scene, error) // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *SceneLoader) LoadAll(keys []id.SceneID) ([]*gqlmodel.Scene, []error) { +func (l *SceneLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) { results := make([]func() (*gqlmodel.Scene, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *SceneLoader) LoadAll(keys []id.SceneID) ([]*gqlmodel.Scene, []error) { // LoadAllThunk returns a function that when called will block waiting for a Scenes. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *SceneLoader) LoadAllThunk(keys []id.SceneID) func() ([]*gqlmodel.Scene, []error) { +func (l *SceneLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Scene, []error) { results := make([]func() (*gqlmodel.Scene, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *SceneLoader) LoadAllThunk(keys []id.SceneID) func() ([]*gqlmodel.Scene, // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *SceneLoader) Prime(key id.SceneID, value *gqlmodel.Scene) bool { +func (l *SceneLoader) Prime(key gqlmodel.ID, value *gqlmodel.Scene) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *SceneLoader) Prime(key id.SceneID, value *gqlmodel.Scene) bool { } // Clear the value at key from the cache, if it exists -func (l *SceneLoader) Clear(key id.SceneID) { +func (l *SceneLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *SceneLoader) unsafeSet(key id.SceneID, value *gqlmodel.Scene) { +func (l *SceneLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Scene) { if l.cache == nil { - l.cache = map[id.SceneID]*gqlmodel.Scene{} + l.cache = map[gqlmodel.ID]*gqlmodel.Scene{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *sceneLoaderBatch) keyIndex(l *SceneLoader, key id.SceneID) int { +func (b *sceneLoaderBatch) keyIndex(l *SceneLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/taggrouploader_gen.go b/internal/adapter/gql/gqldataloader/taggrouploader_gen.go index 5a1ca6d5a..d50e6e693 100644 --- a/internal/adapter/gql/gqldataloader/taggrouploader_gen.go +++ b/internal/adapter/gql/gqldataloader/taggrouploader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // TagGroupLoaderConfig captures the config to create a new TagGroupLoader type TagGroupLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewTagGroupLoader(config TagGroupLoaderConfig) *TagGroupLoader { // TagGroupLoader batches and caches requests type TagGroupLoader struct { // this method provides the data for the loader - fetch func(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type TagGroupLoader struct { // INTERNAL // lazily created cache - cache map[id.TagID]*gqlmodel.TagGroup + cache map[gqlmodel.ID]*gqlmodel.TagGroup // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type TagGroupLoader struct { } type tagGroupLoaderBatch struct { - keys []id.TagID + keys []gqlmodel.ID data []*gqlmodel.TagGroup error []error closing bool @@ -64,14 +63,14 @@ type tagGroupLoaderBatch struct { } // Load a TagGroup by key, batching and caching will be applied automatically -func (l *TagGroupLoader) Load(key id.TagID) (*gqlmodel.TagGroup, error) { +func (l *TagGroupLoader) Load(key gqlmodel.ID) (*gqlmodel.TagGroup, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a TagGroup. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *TagGroupLoader) LoadThunk(key id.TagID) func() (*gqlmodel.TagGroup, error) { +func (l *TagGroupLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.TagGroup, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *TagGroupLoader) LoadThunk(key id.TagID) func() (*gqlmodel.TagGroup, err // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *TagGroupLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) { +func (l *TagGroupLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) { results := make([]func() (*gqlmodel.TagGroup, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *TagGroupLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.TagGroup, []error // LoadAllThunk returns a function that when called will block waiting for a TagGroups. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *TagGroupLoader) LoadAllThunk(keys []id.TagID) func() ([]*gqlmodel.TagGroup, []error) { +func (l *TagGroupLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.TagGroup, []error) { results := make([]func() (*gqlmodel.TagGroup, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *TagGroupLoader) LoadAllThunk(keys []id.TagID) func() ([]*gqlmodel.TagGr // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *TagGroupLoader) Prime(key id.TagID, value *gqlmodel.TagGroup) bool { +func (l *TagGroupLoader) Prime(key gqlmodel.ID, value *gqlmodel.TagGroup) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *TagGroupLoader) Prime(key id.TagID, value *gqlmodel.TagGroup) bool { } // Clear the value at key from the cache, if it exists -func (l *TagGroupLoader) Clear(key id.TagID) { +func (l *TagGroupLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *TagGroupLoader) unsafeSet(key id.TagID, value *gqlmodel.TagGroup) { +func (l *TagGroupLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.TagGroup) { if l.cache == nil { - l.cache = map[id.TagID]*gqlmodel.TagGroup{} + l.cache = map[gqlmodel.ID]*gqlmodel.TagGroup{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *tagGroupLoaderBatch) keyIndex(l *TagGroupLoader, key id.TagID) int { +func (b *tagGroupLoaderBatch) keyIndex(l *TagGroupLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/tagitemloader_gen.go b/internal/adapter/gql/gqldataloader/tagitemloader_gen.go index 7931dec24..90ad04a4c 100644 --- a/internal/adapter/gql/gqldataloader/tagitemloader_gen.go +++ b/internal/adapter/gql/gqldataloader/tagitemloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // TagItemLoaderConfig captures the config to create a new TagItemLoader type TagItemLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.TagID) ([]*gqlmodel.TagItem, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewTagItemLoader(config TagItemLoaderConfig) *TagItemLoader { // TagItemLoader batches and caches requests type TagItemLoader struct { // this method provides the data for the loader - fetch func(keys []id.TagID) ([]*gqlmodel.TagItem, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type TagItemLoader struct { // INTERNAL // lazily created cache - cache map[id.TagID]*gqlmodel.TagItem + cache map[gqlmodel.ID]*gqlmodel.TagItem // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type TagItemLoader struct { } type tagItemLoaderBatch struct { - keys []id.TagID + keys []gqlmodel.ID data []*gqlmodel.TagItem error []error closing bool @@ -64,14 +63,14 @@ type tagItemLoaderBatch struct { } // Load a TagItem by key, batching and caching will be applied automatically -func (l *TagItemLoader) Load(key id.TagID) (*gqlmodel.TagItem, error) { +func (l *TagItemLoader) Load(key gqlmodel.ID) (*gqlmodel.TagItem, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a TagItem. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *TagItemLoader) LoadThunk(key id.TagID) func() (*gqlmodel.TagItem, error) { +func (l *TagItemLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.TagItem, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *TagItemLoader) LoadThunk(key id.TagID) func() (*gqlmodel.TagItem, error // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *TagItemLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.TagItem, []error) { +func (l *TagItemLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) { results := make([]func() (*gqlmodel.TagItem, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *TagItemLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.TagItem, []error) // LoadAllThunk returns a function that when called will block waiting for a TagItems. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *TagItemLoader) LoadAllThunk(keys []id.TagID) func() ([]*gqlmodel.TagItem, []error) { +func (l *TagItemLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.TagItem, []error) { results := make([]func() (*gqlmodel.TagItem, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *TagItemLoader) LoadAllThunk(keys []id.TagID) func() ([]*gqlmodel.TagIte // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *TagItemLoader) Prime(key id.TagID, value *gqlmodel.TagItem) bool { +func (l *TagItemLoader) Prime(key gqlmodel.ID, value *gqlmodel.TagItem) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *TagItemLoader) Prime(key id.TagID, value *gqlmodel.TagItem) bool { } // Clear the value at key from the cache, if it exists -func (l *TagItemLoader) Clear(key id.TagID) { +func (l *TagItemLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *TagItemLoader) unsafeSet(key id.TagID, value *gqlmodel.TagItem) { +func (l *TagItemLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.TagItem) { if l.cache == nil { - l.cache = map[id.TagID]*gqlmodel.TagItem{} + l.cache = map[gqlmodel.ID]*gqlmodel.TagItem{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *tagItemLoaderBatch) keyIndex(l *TagItemLoader, key id.TagID) int { +func (b *tagItemLoaderBatch) keyIndex(l *TagItemLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/tagloader_gen.go b/internal/adapter/gql/gqldataloader/tagloader_gen.go index 3d4cee6c2..909c22983 100644 --- a/internal/adapter/gql/gqldataloader/tagloader_gen.go +++ b/internal/adapter/gql/gqldataloader/tagloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // TagLoaderConfig captures the config to create a new TagLoader type TagLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.TagID) ([]*gqlmodel.Tag, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewTagLoader(config TagLoaderConfig) *TagLoader { // TagLoader batches and caches requests type TagLoader struct { // this method provides the data for the loader - fetch func(keys []id.TagID) ([]*gqlmodel.Tag, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type TagLoader struct { // INTERNAL // lazily created cache - cache map[id.TagID]*gqlmodel.Tag + cache map[gqlmodel.ID]*gqlmodel.Tag // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type TagLoader struct { } type tagLoaderBatch struct { - keys []id.TagID + keys []gqlmodel.ID data []*gqlmodel.Tag error []error closing bool @@ -64,14 +63,14 @@ type tagLoaderBatch struct { } // Load a Tag by key, batching and caching will be applied automatically -func (l *TagLoader) Load(key id.TagID) (*gqlmodel.Tag, error) { +func (l *TagLoader) Load(key gqlmodel.ID) (*gqlmodel.Tag, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Tag. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *TagLoader) LoadThunk(key id.TagID) func() (*gqlmodel.Tag, error) { +func (l *TagLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Tag, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *TagLoader) LoadThunk(key id.TagID) func() (*gqlmodel.Tag, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *TagLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.Tag, []error) { +func (l *TagLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) { results := make([]func() (*gqlmodel.Tag, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *TagLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.Tag, []error) { // LoadAllThunk returns a function that when called will block waiting for a Tags. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *TagLoader) LoadAllThunk(keys []id.TagID) func() ([]*gqlmodel.Tag, []error) { +func (l *TagLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Tag, []error) { results := make([]func() (*gqlmodel.Tag, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *TagLoader) LoadAllThunk(keys []id.TagID) func() ([]*gqlmodel.Tag, []err // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *TagLoader) Prime(key id.TagID, value *gqlmodel.Tag) bool { +func (l *TagLoader) Prime(key gqlmodel.ID, value *gqlmodel.Tag) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *TagLoader) Prime(key id.TagID, value *gqlmodel.Tag) bool { } // Clear the value at key from the cache, if it exists -func (l *TagLoader) Clear(key id.TagID) { +func (l *TagLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *TagLoader) unsafeSet(key id.TagID, value *gqlmodel.Tag) { +func (l *TagLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Tag) { if l.cache == nil { - l.cache = map[id.TagID]*gqlmodel.Tag{} + l.cache = map[gqlmodel.ID]*gqlmodel.Tag{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *tagLoaderBatch) keyIndex(l *TagLoader, key id.TagID) int { +func (b *tagLoaderBatch) keyIndex(l *TagLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/teamloader_gen.go b/internal/adapter/gql/gqldataloader/teamloader_gen.go index ac7e4dd3a..9c5fbb903 100644 --- a/internal/adapter/gql/gqldataloader/teamloader_gen.go +++ b/internal/adapter/gql/gqldataloader/teamloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // TeamLoaderConfig captures the config to create a new TeamLoader type TeamLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.TeamID) ([]*gqlmodel.Team, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewTeamLoader(config TeamLoaderConfig) *TeamLoader { // TeamLoader batches and caches requests type TeamLoader struct { // this method provides the data for the loader - fetch func(keys []id.TeamID) ([]*gqlmodel.Team, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type TeamLoader struct { // INTERNAL // lazily created cache - cache map[id.TeamID]*gqlmodel.Team + cache map[gqlmodel.ID]*gqlmodel.Team // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type TeamLoader struct { } type teamLoaderBatch struct { - keys []id.TeamID + keys []gqlmodel.ID data []*gqlmodel.Team error []error closing bool @@ -64,14 +63,14 @@ type teamLoaderBatch struct { } // Load a Team by key, batching and caching will be applied automatically -func (l *TeamLoader) Load(key id.TeamID) (*gqlmodel.Team, error) { +func (l *TeamLoader) Load(key gqlmodel.ID) (*gqlmodel.Team, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a Team. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *TeamLoader) LoadThunk(key id.TeamID) func() (*gqlmodel.Team, error) { +func (l *TeamLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.Team, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *TeamLoader) LoadThunk(key id.TeamID) func() (*gqlmodel.Team, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *TeamLoader) LoadAll(keys []id.TeamID) ([]*gqlmodel.Team, []error) { +func (l *TeamLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) { results := make([]func() (*gqlmodel.Team, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *TeamLoader) LoadAll(keys []id.TeamID) ([]*gqlmodel.Team, []error) { // LoadAllThunk returns a function that when called will block waiting for a Teams. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *TeamLoader) LoadAllThunk(keys []id.TeamID) func() ([]*gqlmodel.Team, []error) { +func (l *TeamLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.Team, []error) { results := make([]func() (*gqlmodel.Team, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *TeamLoader) LoadAllThunk(keys []id.TeamID) func() ([]*gqlmodel.Team, [] // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *TeamLoader) Prime(key id.TeamID, value *gqlmodel.Team) bool { +func (l *TeamLoader) Prime(key gqlmodel.ID, value *gqlmodel.Team) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *TeamLoader) Prime(key id.TeamID, value *gqlmodel.Team) bool { } // Clear the value at key from the cache, if it exists -func (l *TeamLoader) Clear(key id.TeamID) { +func (l *TeamLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *TeamLoader) unsafeSet(key id.TeamID, value *gqlmodel.Team) { +func (l *TeamLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.Team) { if l.cache == nil { - l.cache = map[id.TeamID]*gqlmodel.Team{} + l.cache = map[gqlmodel.ID]*gqlmodel.Team{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *teamLoaderBatch) keyIndex(l *TeamLoader, key id.TeamID) int { +func (b *teamLoaderBatch) keyIndex(l *TeamLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqldataloader/userloader_gen.go b/internal/adapter/gql/gqldataloader/userloader_gen.go index 84959213e..85bd08cde 100644 --- a/internal/adapter/gql/gqldataloader/userloader_gen.go +++ b/internal/adapter/gql/gqldataloader/userloader_gen.go @@ -7,13 +7,12 @@ import ( "time" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) // UserLoaderConfig captures the config to create a new UserLoader type UserLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []id.UserID) ([]*gqlmodel.User, []error) + Fetch func(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -34,7 +33,7 @@ func NewUserLoader(config UserLoaderConfig) *UserLoader { // UserLoader batches and caches requests type UserLoader struct { // this method provides the data for the loader - fetch func(keys []id.UserID) ([]*gqlmodel.User, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) // how long to done before sending a batch wait time.Duration @@ -45,7 +44,7 @@ type UserLoader struct { // INTERNAL // lazily created cache - cache map[id.UserID]*gqlmodel.User + cache map[gqlmodel.ID]*gqlmodel.User // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,7 +55,7 @@ type UserLoader struct { } type userLoaderBatch struct { - keys []id.UserID + keys []gqlmodel.ID data []*gqlmodel.User error []error closing bool @@ -64,14 +63,14 @@ type userLoaderBatch struct { } // Load a User by key, batching and caching will be applied automatically -func (l *UserLoader) Load(key id.UserID) (*gqlmodel.User, error) { +func (l *UserLoader) Load(key gqlmodel.ID) (*gqlmodel.User, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a User. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *UserLoader) LoadThunk(key id.UserID) func() (*gqlmodel.User, error) { +func (l *UserLoader) LoadThunk(key gqlmodel.ID) func() (*gqlmodel.User, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() @@ -114,7 +113,7 @@ func (l *UserLoader) LoadThunk(key id.UserID) func() (*gqlmodel.User, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *UserLoader) LoadAll(keys []id.UserID) ([]*gqlmodel.User, []error) { +func (l *UserLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) { results := make([]func() (*gqlmodel.User, error), len(keys)) for i, key := range keys { @@ -132,7 +131,7 @@ func (l *UserLoader) LoadAll(keys []id.UserID) ([]*gqlmodel.User, []error) { // LoadAllThunk returns a function that when called will block waiting for a Users. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *UserLoader) LoadAllThunk(keys []id.UserID) func() ([]*gqlmodel.User, []error) { +func (l *UserLoader) LoadAllThunk(keys []gqlmodel.ID) func() ([]*gqlmodel.User, []error) { results := make([]func() (*gqlmodel.User, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) @@ -150,7 +149,7 @@ func (l *UserLoader) LoadAllThunk(keys []id.UserID) func() ([]*gqlmodel.User, [] // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *UserLoader) Prime(key id.UserID, value *gqlmodel.User) bool { +func (l *UserLoader) Prime(key gqlmodel.ID, value *gqlmodel.User) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -164,22 +163,22 @@ func (l *UserLoader) Prime(key id.UserID, value *gqlmodel.User) bool { } // Clear the value at key from the cache, if it exists -func (l *UserLoader) Clear(key id.UserID) { +func (l *UserLoader) Clear(key gqlmodel.ID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *UserLoader) unsafeSet(key id.UserID, value *gqlmodel.User) { +func (l *UserLoader) unsafeSet(key gqlmodel.ID, value *gqlmodel.User) { if l.cache == nil { - l.cache = map[id.UserID]*gqlmodel.User{} + l.cache = map[gqlmodel.ID]*gqlmodel.User{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *userLoaderBatch) keyIndex(l *UserLoader, key id.UserID) int { +func (b *userLoaderBatch) keyIndex(l *UserLoader, key gqlmodel.ID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/adapter/gql/gqlmodel/convert_asset.go b/internal/adapter/gql/gqlmodel/convert_asset.go index 1404ac643..675eb8a19 100644 --- a/internal/adapter/gql/gqlmodel/convert_asset.go +++ b/internal/adapter/gql/gqlmodel/convert_asset.go @@ -8,10 +8,11 @@ func ToAsset(a *asset.Asset) *Asset { if a == nil { return nil } + return &Asset{ - ID: a.ID().ID(), + ID: IDFrom(a.ID()), CreatedAt: a.CreatedAt(), - TeamID: a.Team().ID(), + TeamID: IDFrom(a.Team()), Name: a.Name(), Size: a.Size(), URL: a.URL(), @@ -23,6 +24,7 @@ func AssetSortTypeFrom(ast *AssetSortType) *asset.SortType { if ast == nil { return nil } + switch *ast { case AssetSortTypeDate: return &asset.SortTypeID diff --git a/internal/adapter/gql/gqlmodel/convert_dataset.go b/internal/adapter/gql/gqlmodel/convert_dataset.go index 3c0969ac8..29f020a1d 100644 --- a/internal/adapter/gql/gqlmodel/convert_dataset.go +++ b/internal/adapter/gql/gqlmodel/convert_dataset.go @@ -2,6 +2,7 @@ package gqlmodel import ( "github.com/reearth/reearth-backend/pkg/dataset" + "github.com/reearth/reearth-backend/pkg/util" "github.com/reearth/reearth-backend/pkg/value" ) @@ -16,8 +17,8 @@ func ToDatasetField(f *dataset.Field, parent *dataset.Dataset) *DatasetField { } return &DatasetField{ - SchemaID: parent.Schema().ID(), - FieldID: f.Field().ID(), + SchemaID: IDFrom(parent.Schema()), + FieldID: IDFrom(f.Field()), Type: ToValueType(value.Type(f.Type())), Value: ToDatasetValue(f.Value()), Source: f.Source(), @@ -29,17 +30,13 @@ func ToDataset(ds *dataset.Dataset) *Dataset { return nil } - dsFields := ds.Fields() - fields := make([]*DatasetField, 0, len(dsFields)) - for _, f := range dsFields { - fields = append(fields, ToDatasetField(f, ds)) - } - return &Dataset{ - ID: ds.ID().ID(), - SchemaID: ds.Schema().ID(), + ID: IDFrom(ds.ID()), + SchemaID: IDFrom(ds.Schema()), Source: ds.Source(), - Fields: fields, + Fields: util.FilterMapR(ds.Fields(), func(f *dataset.Field) *DatasetField { + return ToDatasetField(f, ds) + }), } } @@ -48,25 +45,21 @@ func ToDatasetSchema(ds *dataset.Schema) *DatasetSchema { return nil } - dsFields := ds.Fields() - fields := make([]*DatasetSchemaField, 0, len(dsFields)) - for _, f := range dsFields { - fields = append(fields, &DatasetSchemaField{ - ID: f.ID().ID(), - Name: f.Name(), - Type: ToValueType(value.Type(f.Type())), - SchemaID: ds.ID().ID(), - Source: f.Source(), - RefID: f.Ref().IDRef(), - }) - } - return &DatasetSchema{ - ID: ds.ID().ID(), + ID: IDFrom(ds.ID()), Source: ds.Source(), Name: ds.Name(), - SceneID: ds.Scene().ID(), - RepresentativeFieldID: ds.RepresentativeField().IDRef().IDRef(), - Fields: fields, + SceneID: IDFrom(ds.Scene()), + RepresentativeFieldID: IDFromRef(ds.RepresentativeField().IDRef()), + Fields: util.Map(ds.Fields(), func(f *dataset.SchemaField) *DatasetSchemaField { + return &DatasetSchemaField{ + ID: IDFrom(f.ID()), + Name: f.Name(), + Type: ToValueType(value.Type(f.Type())), + SchemaID: IDFrom(ds.ID()), + Source: f.Source(), + RefID: IDFromRef(f.Ref()), + } + }), } } diff --git a/internal/adapter/gql/gqlmodel/convert_layer.go b/internal/adapter/gql/gqlmodel/convert_layer.go index 8863d2bd0..00fca5667 100644 --- a/internal/adapter/gql/gqlmodel/convert_layer.go +++ b/internal/adapter/gql/gqlmodel/convert_layer.go @@ -4,6 +4,7 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/reearth/reearth-backend/pkg/layer/decoding" + "github.com/reearth/reearth-backend/pkg/util" ) func ToLayerItem(l *layer.Item, parent *id.LayerID) *LayerItem { @@ -12,16 +13,16 @@ func ToLayerItem(l *layer.Item, parent *id.LayerID) *LayerItem { } return &LayerItem{ - ID: l.ID().ID(), - SceneID: l.Scene().ID(), + ID: IDFrom(l.ID()), + SceneID: IDFrom(l.Scene()), Name: l.Name(), IsVisible: l.IsVisible(), - PropertyID: l.Property().IDRef(), - PluginID: l.Plugin(), - ExtensionID: l.Extension(), + PropertyID: IDFromRef(l.Property()), + PluginID: IDFromPluginIDRef(l.Plugin()), + ExtensionID: IDFromStringRef(l.Extension()), Infobox: ToInfobox(l.Infobox(), l.ID(), l.Scene(), l.LinkedDataset()), - LinkedDatasetID: l.LinkedDataset().IDRef(), - ParentID: parent.IDRef(), + LinkedDatasetID: IDFromRef(l.LinkedDataset()), + ParentID: IDFromRef[id.Layer](parent), Tags: ToLayerTagList(l.Tags(), l.Scene()), } } @@ -31,25 +32,19 @@ func ToLayerGroup(l *layer.Group, parent *id.LayerID) *LayerGroup { return nil } - laLayers := l.Layers().Layers() - layers := make([]*id.ID, 0, len(laLayers)) - for _, lay := range laLayers { - layers = append(layers, lay.IDRef()) - } - return &LayerGroup{ - ID: l.ID().ID(), - SceneID: l.Scene().ID(), + ID: IDFrom(l.ID()), + SceneID: IDFrom(l.Scene()), Name: l.Name(), IsVisible: l.IsVisible(), - PropertyID: l.Property().IDRef(), - PluginID: l.Plugin(), - ExtensionID: l.Extension(), + PropertyID: IDFromRef(l.Property()), + PluginID: IDFromPluginIDRef(l.Plugin()), + ExtensionID: IDFromStringRef(l.Extension()), Infobox: ToInfobox(l.Infobox(), l.ID(), l.Scene(), nil), - LinkedDatasetSchemaID: l.LinkedDatasetSchema().IDRef(), - LayerIds: layers, + LinkedDatasetSchemaID: IDFromRef(l.LinkedDatasetSchema()), + LayerIds: util.Map(l.Layers().Layers(), IDFrom[id.Layer]), Root: l.IsRoot(), - ParentID: parent.IDRef(), + ParentID: IDFromRef[id.Layer](parent), Tags: ToLayerTagList(l.Tags(), l.Scene()), } } @@ -58,6 +53,7 @@ func ToLayer(l layer.Layer, parent *id.LayerID) Layer { if l == nil { return nil } + switch la := l.(type) { case *layer.Item: return ToLayerItem(la, parent) @@ -68,32 +64,23 @@ func ToLayer(l layer.Layer, parent *id.LayerID) Layer { } func ToLayers(layers layer.List, parent *id.LayerID) []Layer { - if len(layers) == 0 { - return nil - } - - result := make([]Layer, 0, len(layers)) - for _, l := range layers { - if l == nil { - continue - } - result = append(result, ToLayer(*l, parent)) - } - - return result + return util.Map(layers, func(l *layer.Layer) Layer { + return ToLayer(*l, parent) + }) } func ToInfoboxField(ibf *layer.InfoboxField, parentSceneID id.SceneID, parentDatasetID *id.DatasetID) *InfoboxField { if ibf == nil { return nil } + return &InfoboxField{ - ID: ibf.ID().ID(), - SceneID: parentSceneID.ID(), - PluginID: ibf.Plugin(), - ExtensionID: ibf.Extension(), - PropertyID: ibf.Property().ID(), - LinkedDatasetID: parentDatasetID.IDRef(), + ID: IDFrom(ibf.ID()), + SceneID: IDFrom(parentSceneID), + PluginID: IDFromPluginID(ibf.Plugin()), + ExtensionID: ID(ibf.Extension()), + PropertyID: IDFrom(ibf.Property()), + LinkedDatasetID: IDFromRef[id.Dataset](parentDatasetID), } } @@ -108,11 +95,11 @@ func ToInfobox(ib *layer.Infobox, parent id.LayerID, parentSceneID id.SceneID, p } return &Infobox{ - SceneID: parentSceneID.ID(), - PropertyID: ib.Property().ID(), + SceneID: IDFrom(parentSceneID), + PropertyID: IDFrom(ib.Property()), Fields: fields, - LayerID: parent.ID(), - LinkedDatasetID: parentDatasetID.IDRef(), + LayerID: IDFrom(parent), + LinkedDatasetID: IDFromRef[id.Dataset](parentDatasetID), } } @@ -122,9 +109,9 @@ func ToMergedLayer(layer *layer.Merged) *MergedLayer { } return &MergedLayer{ - SceneID: layer.Scene.ID(), - OriginalID: layer.Original.ID(), - ParentID: layer.Parent.IDRef(), + SceneID: IDFrom(layer.Scene), + OriginalID: IDFrom(layer.Original), + ParentID: IDFromRef(layer.Parent), Infobox: ToMergedInfobox(layer.Infobox, layer.Scene), Property: ToMergedPropertyFromMetadata(layer.Property), } @@ -135,14 +122,11 @@ func ToMergedInfobox(ib *layer.MergedInfobox, sceneID id.SceneID) *MergedInfobox return nil } - fields := make([]*MergedInfoboxField, 0, len(ib.Fields)) - for _, f := range ib.Fields { - fields = append(fields, ToMergedInfoboxField(f, sceneID)) - } - return &MergedInfobox{ - SceneID: sceneID.ID(), - Fields: fields, + SceneID: IDFrom(sceneID), + Fields: util.Map(ib.Fields, func(f *layer.MergedInfoboxField) *MergedInfoboxField { + return ToMergedInfoboxField(f, sceneID) + }), Property: ToMergedPropertyFromMetadata(ib.Property), } } @@ -153,10 +137,10 @@ func ToMergedInfoboxField(ibf *layer.MergedInfoboxField, sceneID id.SceneID) *Me } return &MergedInfoboxField{ - SceneID: sceneID.ID(), - OriginalID: ibf.ID.ID(), - PluginID: ibf.Plugin, - ExtensionID: ibf.Extension, + SceneID: IDFrom(sceneID), + OriginalID: IDFrom(ibf.ID), + PluginID: IDFromPluginID(ibf.Plugin), + ExtensionID: ID(ibf.Extension), Property: ToMergedPropertyFromMetadata(ibf.Property), } } @@ -181,14 +165,13 @@ func ToLayerTagList(t *layer.TagList, sid id.SceneID) []LayerTag { if t.IsEmpty() { return nil } - tags := t.Tags() - gtags := make([]LayerTag, 0, len(tags)) - for _, t := range tags { - if gt := ToLayerTag(t); gt != nil { - gtags = append(gtags, gt) + + return util.FilterMap(t.Tags(), func(v layer.Tag) *LayerTag { + if t := ToLayerTag(v); t != nil { + return &t } - } - return gtags + return nil + }) } func ToLayerTag(l layer.Tag) LayerTag { @@ -209,7 +192,7 @@ func ToLayerTagItem(t *layer.TagItem) *LayerTagItem { return nil } return &LayerTagItem{ - TagID: t.ID().ID(), + TagID: IDFrom(t.ID()), } } @@ -217,15 +200,9 @@ func ToLayerTagGroup(t *layer.TagGroup) *LayerTagGroup { if t == nil { return nil } - children := t.Children() - tags := make([]*LayerTagItem, 0, len(children)) - for _, c := range children { - if t := ToLayerTagItem(c); t != nil { - tags = append(tags, t) - } - } + return &LayerTagGroup{ - TagID: t.ID().ID(), - Children: tags, + TagID: IDFrom(t.ID()), + Children: util.FilterMapR(t.Children(), ToLayerTagItem), } } diff --git a/internal/adapter/gql/gqlmodel/convert_plugin.go b/internal/adapter/gql/gqlmodel/convert_plugin.go index 883a7e229..9b5b7596c 100644 --- a/internal/adapter/gql/gqlmodel/convert_plugin.go +++ b/internal/adapter/gql/gqlmodel/convert_plugin.go @@ -2,6 +2,7 @@ package gqlmodel import ( "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/util" ) func ToPlugin(p *plugin.Plugin) *Plugin { @@ -9,29 +10,10 @@ func ToPlugin(p *plugin.Plugin) *Plugin { return nil } - pid := p.ID() - pluginExtensions := p.Extensions() - extensions := make([]*PluginExtension, 0, len(pluginExtensions)) - for _, pe := range pluginExtensions { - extensions = append(extensions, &PluginExtension{ - ExtensionID: pe.ID(), - PluginID: pid, - Type: ToPluginExtensionType(pe.Type()), - Visualizer: ToVisualizerRef(pe.Visualizer()), - Name: pe.Name().String(), - Description: pe.Description().String(), - Icon: pe.Icon(), - SingleOnly: BoolToRef(pe.SingleOnly()), - WidgetLayout: ToPluginWidgetLayout(pe.WidgetLayout()), - PropertySchemaID: pe.Schema(), - AllTranslatedDescription: pe.Description(), - AllTranslatedName: pe.Name(), - }) - } - + pid := IDFromPluginID(p.ID()) return &Plugin{ ID: pid, - SceneID: pid.Scene().IDRef(), + SceneID: IDFromRef(p.ID().Scene()), Name: p.Name().String(), Description: p.Description().String(), AllTranslatedDescription: p.Description(), @@ -39,8 +21,23 @@ func ToPlugin(p *plugin.Plugin) *Plugin { Author: p.Author(), RepositoryURL: p.RepositoryURL(), Version: p.Version().String(), - PropertySchemaID: p.Schema(), - Extensions: extensions, + PropertySchemaID: IDFromPropertySchemaIDRef(p.Schema()), + Extensions: util.Map(p.Extensions(), func(pe *plugin.Extension) *PluginExtension { + return &PluginExtension{ + ExtensionID: ID(pe.ID()), + PluginID: pid, + Type: ToPluginExtensionType(pe.Type()), + Visualizer: ToVisualizerRef(pe.Visualizer()), + Name: pe.Name().String(), + Description: pe.Description().String(), + Icon: pe.Icon(), + SingleOnly: BoolToRef(pe.SingleOnly()), + WidgetLayout: ToPluginWidgetLayout(pe.WidgetLayout()), + PropertySchemaID: IDFromPropertySchemaID(pe.Schema()), + AllTranslatedDescription: pe.Description(), + AllTranslatedName: pe.Name(), + } + }), } } @@ -60,9 +57,9 @@ func ToPluginExtensionType(t plugin.ExtensionType) PluginExtensionType { return PluginExtensionType("") } -func ToPluginMetadata(t *plugin.Metadata) (*PluginMetadata, error) { +func ToPluginMetadata(t *plugin.Metadata) *PluginMetadata { if t == nil { - return nil, nil + return nil } return &PluginMetadata{ @@ -71,7 +68,7 @@ func ToPluginMetadata(t *plugin.Metadata) (*PluginMetadata, error) { ThumbnailURL: t.ThumbnailUrl, Author: t.Author, CreatedAt: t.CreatedAt, - }, nil + } } func ToPluginWidgetLayout(wl *plugin.WidgetLayout) *WidgetLayout { diff --git a/internal/adapter/gql/gqlmodel/convert_project.go b/internal/adapter/gql/gqlmodel/convert_project.go index 863c95ff9..ce8a1249a 100644 --- a/internal/adapter/gql/gqlmodel/convert_project.go +++ b/internal/adapter/gql/gqlmodel/convert_project.go @@ -41,7 +41,7 @@ func ToProject(p *project.Project) *Project { } return &Project{ - ID: p.ID().ID(), + ID: IDFrom(p.ID()), CreatedAt: p.CreatedAt(), IsArchived: p.IsArchived(), IsBasicAuthActive: p.IsBasicAuthActive(), @@ -54,7 +54,7 @@ func ToProject(p *project.Project) *Project { PublishedAt: publishedAtRes, UpdatedAt: p.UpdatedAt(), Visualizer: Visualizer(p.Visualizer()), - TeamID: p.Team().ID(), + TeamID: IDFrom(p.Team()), PublishmentStatus: ToPublishmentStatus(p.PublishmentStatus()), PublicTitle: p.PublicTitle(), PublicDescription: p.PublicDescription(), diff --git a/internal/adapter/gql/gqlmodel/convert_property.go b/internal/adapter/gql/gqlmodel/convert_property.go index fb88aaa35..94053895a 100644 --- a/internal/adapter/gql/gqlmodel/convert_property.go +++ b/internal/adapter/gql/gqlmodel/convert_property.go @@ -5,6 +5,7 @@ import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/util" "github.com/reearth/reearth-backend/pkg/value" ) @@ -119,22 +120,14 @@ func ToPropertyField(f *property.Field, parent *property.Property, gl *property. return nil } - var links []*PropertyFieldLink - if flinks := f.Links(); flinks != nil { - links = make([]*PropertyFieldLink, 0, flinks.Len()) - for _, l := range flinks.Links() { - links = append(links, ToPropertyFieldLink(l)) - } - } - return &PropertyField{ ID: propertyFieldID(parent, gl, g, f), - ParentID: parent.ID().ID(), - SchemaID: parent.Schema(), - FieldID: f.Field(), + ParentID: IDFrom(parent.ID()), + SchemaID: IDFromPropertySchemaID(parent.Schema()), + FieldID: ID(f.Field()), Value: ToPropertyValue(f.Value()), Type: ToValueType(value.Type(f.Type())), - Links: links, + Links: util.Map(f.Links().Links(), ToPropertyFieldLink), } } @@ -150,38 +143,43 @@ func ToPropertyFieldLinks(flinks *property.Links) []*PropertyFieldLink { return links } -func FromPropertyFieldLink(datasetSchema, ds, fields []*id.ID) *property.Links { +func FromPropertyFieldLink(datasetSchema, ds, fields []ID) (*property.Links, error) { if len(datasetSchema) != len(fields) || (ds != nil && len(ds) != len(fields) && len(ds) > 1) { - return nil + return nil, nil } links := make([]*property.Link, 0, len(datasetSchema)) for i, dss := range datasetSchema { f := fields[i] - if dss == nil || f == nil { - return nil + dsid, dsfid, err := ToID2[id.DatasetSchema, id.DatasetField](dss, f) + if err != nil { + return nil, err } - dsid := id.DatasetSchemaID(*dss) - dsfid := id.DatasetSchemaFieldID(*f) if len(ds) == 0 || (len(ds) == 1 && i > 0) { links = append(links, property.NewLinkFieldOnly(dsid, dsfid)) } else { - d := ds[i] - if d == nil { - return nil + did, err := ToID[id.Dataset](ds[i]) + if err != nil { + return nil, err } - links = append(links, property.NewLink(id.DatasetID(*d), dsid, dsfid)) + links = append(links, property.NewLink(did, dsid, dsfid)) } } - return property.NewLinks(links) + return property.NewLinks(links), nil } func ToPropertyFieldLink(link *property.Link) *PropertyFieldLink { + ds := link.DatasetSchema() + df := link.DatasetSchemaField() + if ds == nil || df == nil { + return nil + } + return &PropertyFieldLink{ - DatasetID: link.Dataset().IDRef(), - DatasetSchemaID: link.DatasetSchema().ID(), - DatasetSchemaFieldID: link.DatasetSchemaField().ID(), + DatasetID: IDFromRef(link.Dataset()), + DatasetSchemaID: IDFrom(*ds), + DatasetSchemaFieldID: IDFrom(*df), } } @@ -197,8 +195,8 @@ func ToProperty(property *property.Property) *Property { } return &Property{ - ID: property.ID().ID(), - SchemaID: property.Schema(), + ID: IDFrom(property.ID()), + SchemaID: IDFromPropertySchemaID(property.Schema()), Items: items, } } @@ -208,21 +206,18 @@ func ToPropertySchema(propertySchema *property.Schema) *PropertySchema { return nil } - pgroups := propertySchema.Groups().Groups() - groups := make([]*PropertySchemaGroup, 0, len(pgroups)) - for _, g := range pgroups { - groups = append(groups, ToPropertySchemaGroup(g, propertySchema.ID())) - } - + psid := propertySchema.ID() return &PropertySchema{ - ID: propertySchema.ID(), - Groups: groups, + ID: IDFromPropertySchemaID(psid), + Groups: util.Map(propertySchema.Groups().Groups(), func(g *property.SchemaGroup) *PropertySchemaGroup { + return ToPropertySchemaGroup(g, psid) + }), LinkableFields: ToPropertyLinkableFields(propertySchema.ID(), propertySchema.LinkableFields()), } } func ToPropertyLinkableFields(sid id.PropertySchemaID, l property.LinkableFields) *PropertyLinkableFields { - var latlng, url *id.PropertySchemaFieldID + var latlng, url *id.PropertyFieldID if l.LatLng != nil { latlng = &l.LatLng.Field } @@ -230,9 +225,9 @@ func ToPropertyLinkableFields(sid id.PropertySchemaID, l property.LinkableFields url = &l.URL.Field } return &PropertyLinkableFields{ - SchemaID: sid, - Latlng: latlng, - URL: url, + SchemaID: IDFromPropertySchemaID(sid), + Latlng: IDFromStringRef(latlng), + URL: IDFromStringRef(url), } } @@ -241,31 +236,25 @@ func ToPropertySchemaField(f *property.SchemaField) *PropertySchemaField { return nil } - var choices []*PropertySchemaFieldChoice - if c := f.Choices(); c != nil { - choices = make([]*PropertySchemaFieldChoice, 0, len(c)) - for _, k := range c { - choices = append(choices, &PropertySchemaFieldChoice{ - Key: k.Key, - Title: k.Title.String(), - AllTranslatedTitle: k.Title, - Icon: stringToRef(k.Icon), - }) - } - } - return &PropertySchemaField{ - FieldID: f.ID(), - Type: ToValueType(value.Type(f.Type())), - Title: f.Title().String(), - Description: f.Description().String(), - Prefix: stringToRef(f.Prefix()), - Suffix: stringToRef(f.Suffix()), - DefaultValue: ToPropertyValue(f.DefaultValue()), - UI: ToPropertySchemaFieldUI(f.UI()), - Min: f.Min(), - Max: f.Max(), - Choices: choices, + FieldID: ID(f.ID()), + Type: ToValueType(value.Type(f.Type())), + Title: f.Title().String(), + Description: f.Description().String(), + Prefix: stringToRef(f.Prefix()), + Suffix: stringToRef(f.Suffix()), + DefaultValue: ToPropertyValue(f.DefaultValue()), + UI: ToPropertySchemaFieldUI(f.UI()), + Min: f.Min(), + Max: f.Max(), + Choices: util.Map(f.Choices(), func(c property.SchemaFieldChoice) *PropertySchemaFieldChoice { + return &PropertySchemaFieldChoice{ + Key: c.Key, + Title: c.Title.String(), + AllTranslatedTitle: c.Title, + Icon: stringToRef(c.Icon), + } + }), IsAvailableIf: ToPropertyConditon(f.IsAvailableIf()), AllTranslatedTitle: f.Title(), AllTranslatedDescription: f.Description(), @@ -276,6 +265,7 @@ func ToPropertySchemaFieldUI(ui *property.SchemaFieldUI) *PropertySchemaFieldUI if ui == nil { return nil } + ui2 := PropertySchemaFieldUI("") switch *ui { case property.SchemaFieldUIMultiline: @@ -307,10 +297,11 @@ func ToMergedPropertyFromMetadata(m *property.MergedMetadata) *MergedProperty { if m == nil { return nil } + return &MergedProperty{ - OriginalID: m.Original.IDRef(), - ParentID: m.Parent.IDRef(), - LinkedDatasetID: m.LinkedDataset.IDRef(), + OriginalID: IDFromRef(m.Original), + ParentID: IDFromRef(m.Parent), + LinkedDatasetID: IDFromRef(m.LinkedDataset), Groups: nil, // resolved by graphql resolver } } @@ -319,16 +310,15 @@ func ToMergedProperty(m *property.Merged) *MergedProperty { if m == nil { return nil } - groups := make([]*MergedPropertyGroup, 0, len(m.Groups)) - for _, f := range m.Groups { - groups = append(groups, ToMergedPropertyGroup(f, m)) - } + return &MergedProperty{ - OriginalID: m.Original.IDRef(), - ParentID: m.Parent.IDRef(), - SchemaID: &m.Schema, - LinkedDatasetID: m.LinkedDataset.IDRef(), - Groups: groups, + OriginalID: IDFromRef(m.Original), + ParentID: IDFromRef(m.Parent), + SchemaID: IDFromPropertySchemaIDRef(m.Schema.Ref()), + LinkedDatasetID: IDFromRef(m.LinkedDataset), + Groups: util.Map(m.Groups, func(g *property.MergedGroup) *MergedPropertyGroup { + return ToMergedPropertyGroup(g, m) + }), } } @@ -336,24 +326,21 @@ func ToMergedPropertyGroup(f *property.MergedGroup, p *property.Merged) *MergedP if f == nil { return nil } - fields := make([]*MergedPropertyField, 0, len(f.Fields)) - for _, f2 := range f.Fields { - fields = append(fields, ToMergedPropertyField(f2, p.Schema)) - } - groups := make([]*MergedPropertyGroup, 0, len(f.Groups)) - for _, f2 := range f.Groups { - groups = append(groups, ToMergedPropertyGroup(f2, p)) - } + return &MergedPropertyGroup{ - OriginalPropertyID: p.Original.IDRef(), - ParentPropertyID: p.Parent.IDRef(), - OriginalID: f.Original.IDRef(), - SchemaGroupID: f.SchemaGroup, - ParentID: f.Parent.IDRef(), - SchemaID: p.Schema.Ref(), - LinkedDatasetID: f.LinkedDataset.IDRef(), - Fields: fields, - Groups: groups, + OriginalPropertyID: IDFromRef(p.Original), + ParentPropertyID: IDFromRef(p.Parent), + OriginalID: IDFromRef(f.Original), + SchemaGroupID: ID(f.SchemaGroup), + ParentID: IDFromRef(f.Parent), + SchemaID: IDFromPropertySchemaIDRef(p.Schema.Ref()), + LinkedDatasetID: IDFromRef(f.LinkedDataset), + Fields: util.Map(f.Fields, func(f *property.MergedField) *MergedPropertyField { + return ToMergedPropertyField(f, p.Schema) + }), + Groups: util.Map(f.Groups, func(g *property.MergedGroup) *MergedPropertyGroup { + return ToMergedPropertyGroup(g, p) + }), } } @@ -361,9 +348,10 @@ func ToMergedPropertyField(f *property.MergedField, s id.PropertySchemaID) *Merg if f == nil { return nil } + return &MergedPropertyField{ - FieldID: f.ID, - SchemaID: s, + FieldID: ID(f.ID), + SchemaID: IDFromPropertySchemaID(s), Links: ToPropertyFieldLinks(f.Links), Value: ToPropertyValue(f.Value), Type: ToValueType(value.Type(f.Type)), @@ -375,10 +363,13 @@ func ToPropertySchemaGroup(g *property.SchemaGroup, s property.SchemaID) *Proper if g == nil { return nil } + gfields := g.Fields() fields := make([]*PropertySchemaField, 0, len(gfields)) + var representativeField *PropertySchemaField representativeFieldID := g.RepresentativeFieldID() + for _, f := range gfields { f2 := ToPropertySchemaField(f) fields = append(fields, f2) @@ -386,13 +377,14 @@ func ToPropertySchemaGroup(g *property.SchemaGroup, s property.SchemaID) *Proper representativeField = f2 } } + return &PropertySchemaGroup{ - SchemaGroupID: g.ID(), - SchemaID: s, + SchemaGroupID: ID(g.ID()), + SchemaID: IDFromPropertySchemaID(s), IsList: g.IsList(), Title: g.Title().StringRef(), Fields: fields, - RepresentativeFieldID: representativeFieldID, + RepresentativeFieldID: IDFromStringRef(representativeFieldID), RepresentativeField: representativeField, AllTranslatedTitle: g.Title(), IsAvailableIf: ToPropertyConditon(g.IsAvailableIf()), @@ -404,36 +396,28 @@ func ToPropertyGroup(g *property.Group, p *property.Property, gl *property.Group return nil } - gfields := g.Fields(nil) - fields := make([]*PropertyField, 0, len(gfields)) - for _, f := range gfields { - fields = append(fields, ToPropertyField(f, p, gl, g)) - } - return &PropertyGroup{ - ID: g.ID().ID(), - SchemaID: p.Schema(), - SchemaGroupID: g.SchemaGroup(), - Fields: fields, + ID: IDFrom(g.ID()), + SchemaID: IDFromPropertySchemaID(p.Schema()), + SchemaGroupID: ID(g.SchemaGroup()), + Fields: util.Map(g.Fields(nil), func(f *property.Field) *PropertyField { + return ToPropertyField(f, p, gl, g) + }), } } -func ToPropertyGroupList(g *property.GroupList, p *property.Property) *PropertyGroupList { - if g == nil { +func ToPropertyGroupList(gl *property.GroupList, p *property.Property) *PropertyGroupList { + if gl == nil { return nil } - ggroups := g.Groups() - groups := make([]*PropertyGroup, 0, len(ggroups)) - for _, f := range ggroups { - groups = append(groups, ToPropertyGroup(f, p, g)) - } - return &PropertyGroupList{ - ID: g.ID().ID(), - SchemaID: p.Schema(), - SchemaGroupID: g.SchemaGroup(), - Groups: groups, + ID: IDFrom(gl.ID()), + SchemaID: IDFromPropertySchemaID(p.Schema()), + SchemaGroupID: ID(gl.SchemaGroup()), + Groups: util.Map(gl.Groups(), func(g *property.Group) *PropertyGroup { + return ToPropertyGroup(g, p, gl) + }), } } @@ -456,15 +440,14 @@ func ToPropertyConditon(c *property.Condition) *PropertyCondition { } return &PropertyCondition{ - FieldID: c.Field, + FieldID: ID(c.Field), Value: ToPropertyValue(c.Value), Type: ToValueType(value.Type(c.Value.Type())), } } -func FromPointer(schemaItem *id.PropertySchemaGroupID, item *id.ID, field *id.PropertySchemaFieldID) *property.Pointer { - i := id.PropertyItemIDFromRefID(item) - return property.NewPointer(schemaItem, i, field) +func FromPointer(schemaItem *id.PropertySchemaGroupID, item *ID, field *id.PropertyFieldID) *property.Pointer { + return property.NewPointer(schemaItem, ToIDRef[id.PropertyItem](item), field) } func ToPropertyLatLng(lat, lng *float64) *property.LatLng { @@ -495,12 +478,3 @@ func propertyFieldID(property *property.Property, groupList *property.GroupList, return sb.String() } - -func getPropertySchemaFieldIDFromGQLPropertyFieldID(i string) string { - const sep = "_" - s := strings.Split(i, sep) - if len(s) > 0 { - return s[len(s)-1] - } - return "" -} diff --git a/internal/adapter/gql/gqlmodel/convert_scene.go b/internal/adapter/gql/gqlmodel/convert_scene.go index c99bf2e74..a172d1ca2 100644 --- a/internal/adapter/gql/gqlmodel/convert_scene.go +++ b/internal/adapter/gql/gqlmodel/convert_scene.go @@ -2,6 +2,7 @@ package gqlmodel import ( "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/util" ) func ToSceneWidget(w *scene.Widget) *SceneWidget { @@ -10,10 +11,10 @@ func ToSceneWidget(w *scene.Widget) *SceneWidget { } return &SceneWidget{ - ID: w.ID().ID(), - PluginID: w.Plugin(), - ExtensionID: w.Extension(), - PropertyID: w.Property().ID(), + ID: IDFrom(w.ID()), + PluginID: IDFromPluginID(w.Plugin()), + ExtensionID: ID(w.Extension()), + PropertyID: IDFrom(w.Property()), Enabled: w.Enabled(), Extended: w.Extended(), } @@ -25,16 +26,16 @@ func ToScenePlugin(sp *scene.Plugin) *ScenePlugin { } return &ScenePlugin{ - PluginID: sp.Plugin(), - PropertyID: sp.Property().IDRef(), + PluginID: IDFromPluginID(sp.Plugin()), + PropertyID: IDFromRef(sp.Property()), } } func ToCluster(c *scene.Cluster) *Cluster { return &Cluster{ - ID: c.ID().ID(), + ID: IDFrom(c.ID()), Name: c.Name(), - PropertyID: c.Property().ID(), + PropertyID: IDFrom(c.Property()), } } @@ -43,51 +44,17 @@ func ToScene(scene *scene.Scene) *Scene { return nil } - sceneWidgets := scene.Widgets().Widgets() - widgets := make([]*SceneWidget, 0, len(sceneWidgets)) - for _, w := range sceneWidgets { - widgets = append(widgets, ToSceneWidget(w)) - } - - cl := scene.Clusters().Clusters() - clusters := make([]*Cluster, 0, len(cl)) - for _, c := range cl { - clusters = append(clusters, ToCluster(c)) - } - - scenePlugins := scene.Plugins().Plugins() - plugins := make([]*ScenePlugin, 0, len(scenePlugins)) - for _, sp := range scenePlugins { - plugins = append(plugins, ToScenePlugin(sp)) - } - return &Scene{ - ID: scene.ID().ID(), - ProjectID: scene.Project().ID(), - PropertyID: scene.Property().ID(), - TeamID: scene.Team().ID(), - RootLayerID: scene.RootLayer().ID(), + ID: IDFrom(scene.ID()), + ProjectID: IDFrom(scene.Project()), + PropertyID: IDFrom(scene.Property()), + TeamID: IDFrom(scene.Team()), + RootLayerID: IDFrom(scene.RootLayer()), CreatedAt: scene.CreatedAt(), UpdatedAt: scene.UpdatedAt(), - Clusters: clusters, - Widgets: widgets, + Plugins: util.Map(scene.Plugins().Plugins(), ToScenePlugin), + Clusters: util.Map(scene.Clusters().Clusters(), ToCluster), + Widgets: util.Map(scene.Widgets().Widgets(), ToSceneWidget), WidgetAlignSystem: ToWidgetAlignSystem(scene.Widgets().Alignment()), - Plugins: plugins, - } -} - -func ToSceneLockMode(lm scene.LockMode) SceneLockMode { - switch lm { - case scene.LockModeFree: - return SceneLockModeFree - case scene.LockModePending: - return SceneLockModePending - case scene.LockModeDatasetSyncing: - return SceneLockModeDatasetSyncing - case scene.LockModePluginUpgrading: - return SceneLockModePluginUpgrading - case scene.LockModePublishing: - return SceneLockModePublishing } - return SceneLockMode("invalid") } diff --git a/internal/adapter/gql/gqlmodel/convert_scene_align.go b/internal/adapter/gql/gqlmodel/convert_scene_align.go index c453ec7aa..644f2ed73 100644 --- a/internal/adapter/gql/gqlmodel/convert_scene_align.go +++ b/internal/adapter/gql/gqlmodel/convert_scene_align.go @@ -3,6 +3,7 @@ package gqlmodel import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/util" ) func ToWidgetAlignSystem(sas *scene.WidgetAlignSystem) *WidgetAlignSystem { @@ -17,6 +18,7 @@ func ToWidgetZone(z *scene.WidgetZone) *WidgetZone { if z == nil { return nil } + return &WidgetZone{ Left: ToWidgetSection(z.Section(scene.WidgetSectionLeft)), Center: ToWidgetSection(z.Section(scene.WidgetSectionCenter)), @@ -28,6 +30,7 @@ func ToWidgetSection(s *scene.WidgetSection) *WidgetSection { if s == nil { return nil } + return &WidgetSection{ Top: ToWidgetArea(s.Area(scene.WidgetAreaTop)), Middle: ToWidgetArea(s.Area(scene.WidgetAreaMiddle)), @@ -39,13 +42,9 @@ func ToWidgetArea(a *scene.WidgetArea) *WidgetArea { if a == nil { return nil } - wids := a.WidgetIDs() - ids := make([]*id.ID, 0, len(wids)) - for _, wid := range wids { - ids = append(ids, wid.IDRef()) - } + return &WidgetArea{ - WidgetIds: ids, + WidgetIds: util.Map(a.WidgetIDs(), IDFrom[id.Widget]), Align: ToWidgetAlignType(a.Alignment()), } } @@ -66,6 +65,7 @@ func FromSceneWidgetLocation(l *WidgetLocationInput) *scene.WidgetLocation { if l == nil { return nil } + return &scene.WidgetLocation{ Zone: FromSceneWidgetZoneType(l.Zone), Section: FromSceneWidgetSectionType(l.Section), diff --git a/internal/adapter/gql/gqlmodel/convert_tag.go b/internal/adapter/gql/gqlmodel/convert_tag.go index 34de71a7b..71d95711a 100644 --- a/internal/adapter/gql/gqlmodel/convert_tag.go +++ b/internal/adapter/gql/gqlmodel/convert_tag.go @@ -3,20 +3,22 @@ package gqlmodel import ( "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/tag" + "github.com/reearth/reearth-backend/pkg/util" ) func ToTagItem(ti *tag.Item) *TagItem { if ti == nil { return nil } + return &TagItem{ - ID: ti.ID().ID(), - SceneID: ti.Scene().ID(), + ID: IDFrom(ti.ID()), + SceneID: IDFrom(ti.Scene()), Label: ti.Label(), - ParentID: ti.Parent().IDRef(), - LinkedDatasetID: ti.LinkedDatasetID().IDRef(), - LinkedDatasetSchemaID: ti.LinkedDatasetSchemaID().IDRef(), - LinkedDatasetFieldID: ti.LinkedDatasetFieldID().IDRef(), + ParentID: IDFromRef(ti.Parent()), + LinkedDatasetID: IDFromRef(ti.LinkedDatasetID()), + LinkedDatasetSchemaID: IDFromRef(ti.LinkedDatasetSchemaID()), + LinkedDatasetFieldID: IDFromRef(ti.LinkedDatasetFieldID()), } } @@ -24,18 +26,12 @@ func ToTagGroup(tg *tag.Group) *TagGroup { if tg == nil { return nil } - tags := tg.Tags().Tags() - ids := make([]*id.ID, 0, len(tags)) - for _, tid := range tags { - if !tid.IsNil() { - ids = append(ids, tid.IDRef()) - } - } + return &TagGroup{ - ID: tg.ID().ID(), - SceneID: tg.Scene().ID(), + ID: IDFrom(tg.ID()), + SceneID: IDFrom(tg.Scene()), Label: tg.Label(), - TagIds: ids, + TagIds: util.Map(tg.Tags(), IDFrom[id.Tag]), } } @@ -43,6 +39,7 @@ func ToTag(t tag.Tag) Tag { if t == nil { return nil } + switch ty := t.(type) { case *tag.Item: return ToTagItem(ty) diff --git a/internal/adapter/gql/gqlmodel/convert_team.go b/internal/adapter/gql/gqlmodel/convert_team.go deleted file mode 100644 index 034784acc..000000000 --- a/internal/adapter/gql/gqlmodel/convert_team.go +++ /dev/null @@ -1,51 +0,0 @@ -package gqlmodel - -import ( - "github.com/reearth/reearth-backend/pkg/user" -) - -func ToTeam(t *user.Team) *Team { - if t == nil { - return nil - } - - memberMap := t.Members().Members() - members := make([]*TeamMember, 0, len(memberMap)) - for u, r := range memberMap { - members = append(members, &TeamMember{ - UserID: u.ID(), - Role: toRole(r), - }) - } - - return &Team{ - ID: t.ID().ID(), - Name: t.Name(), - Personal: t.IsPersonal(), - Members: members, - } -} - -func FromRole(r Role) user.Role { - switch r { - case RoleReader: - return user.RoleReader - case RoleWriter: - return user.RoleWriter - case RoleOwner: - return user.RoleOwner - } - return user.Role("") -} - -func toRole(r user.Role) Role { - switch r { - case user.RoleReader: - return RoleReader - case user.RoleWriter: - return RoleWriter - case user.RoleOwner: - return RoleOwner - } - return Role("") -} diff --git a/internal/adapter/gql/gqlmodel/convert_user.go b/internal/adapter/gql/gqlmodel/convert_user.go index d7ff342e0..4a4ceb68b 100644 --- a/internal/adapter/gql/gqlmodel/convert_user.go +++ b/internal/adapter/gql/gqlmodel/convert_user.go @@ -2,46 +2,45 @@ package gqlmodel import ( "github.com/reearth/reearth-backend/pkg/user" + "github.com/reearth/reearth-backend/pkg/util" ) -func ToUser(user *user.User) *User { - if user == nil { +func ToUser(u *user.User) *User { + if u == nil { return nil } - auths := user.Auths() - authsgql := make([]string, 0, len(auths)) - for _, a := range auths { - authsgql = append(authsgql, a.Provider) - } + return &User{ - ID: user.ID().ID(), - Name: user.Name(), - Email: user.Email(), - Lang: user.Lang(), - Theme: Theme(user.Theme()), - MyTeamID: user.Team().ID(), - Auths: authsgql, + ID: IDFrom(u.ID()), + Name: u.Name(), + Email: u.Email(), } } -func ToSearchedUser(u *user.User) *SearchedUser { +func ToMe(u *user.User) *Me { if u == nil { return nil } - return &SearchedUser{ - UserID: u.ID().ID(), - UserName: u.Name(), - UserEmail: u.Email(), + + return &Me{ + ID: IDFrom(u.ID()), + Name: u.Name(), + Email: u.Email(), + Lang: u.Lang(), + Theme: Theme(u.Theme()), + MyTeamID: IDFrom(u.Team()), + Auths: util.Map(u.Auths(), func(a user.Auth) string { + return a.Provider + }), } } func ToTheme(t *Theme) *user.Theme { - th := user.ThemeDefault - if t == nil { return nil } + th := user.ThemeDefault switch *t { case ThemeDark: th = user.ThemeDark @@ -50,3 +49,49 @@ func ToTheme(t *Theme) *user.Theme { } return &th } + +func ToTeam(t *user.Team) *Team { + if t == nil { + return nil + } + + memberMap := t.Members().Members() + members := make([]*TeamMember, 0, len(memberMap)) + for u, r := range memberMap { + members = append(members, &TeamMember{ + UserID: IDFrom(u), + Role: ToRole(r), + }) + } + + return &Team{ + ID: IDFrom(t.ID()), + Name: t.Name(), + Personal: t.IsPersonal(), + Members: members, + } +} + +func FromRole(r Role) user.Role { + switch r { + case RoleReader: + return user.RoleReader + case RoleWriter: + return user.RoleWriter + case RoleOwner: + return user.RoleOwner + } + return user.Role("") +} + +func ToRole(r user.Role) Role { + switch r { + case user.RoleReader: + return RoleReader + case user.RoleWriter: + return RoleWriter + case user.RoleOwner: + return RoleOwner + } + return Role("") +} diff --git a/internal/adapter/gql/gqlmodel/models.go b/internal/adapter/gql/gqlmodel/models.go index 1886a626e..9d8384b85 100644 --- a/internal/adapter/gql/gqlmodel/models.go +++ b/internal/adapter/gql/gqlmodel/models.go @@ -20,8 +20,8 @@ func (l *PropertyFieldLink) Copy() *PropertyFieldLink { } } -func (d *Dataset) Field(id id.ID) *DatasetField { - if d == nil || id.IsNil() { +func (d *Dataset) Field(id ID) *DatasetField { + if d == nil || id == "" { return nil } for _, f := range d.Fields { @@ -32,8 +32,8 @@ func (d *Dataset) Field(id id.ID) *DatasetField { return nil } -func (d *DatasetSchema) Field(id id.ID) *DatasetSchemaField { - if d == nil || id.IsNil() { +func (d *DatasetSchema) Field(id ID) *DatasetSchemaField { + if d == nil || id == "" { return nil } for _, f := range d.Fields { @@ -44,14 +44,14 @@ func (d *DatasetSchema) Field(id id.ID) *DatasetSchemaField { return nil } -func (d *Property) Field(id id.PropertySchemaFieldID) *PropertyField { +func (d *Property) Field(id id.PropertyFieldID) *PropertyField { if d == nil || id == "" { return nil } for _, g := range d.Items { if gi, ok := g.(*PropertyGroup); ok { for _, f := range gi.Fields { - if s := getPropertySchemaFieldIDFromGQLPropertyFieldID(f.ID); s == string(id) { + if f.ID == string(id) { return f } } @@ -60,7 +60,7 @@ func (d *Property) Field(id id.PropertySchemaFieldID) *PropertyField { return nil } -func (d *PropertySchema) Field(id id.PropertySchemaFieldID) *PropertySchemaField { +func (d *PropertySchema) Field(id ID) *PropertySchemaField { if d == nil || id == "" { return nil } @@ -74,7 +74,7 @@ func (d *PropertySchema) Field(id id.PropertySchemaFieldID) *PropertySchemaField return nil } -func (d *Plugin) Extension(id id.PluginExtensionID) *PluginExtension { +func (d *Plugin) Extension(id ID) *PluginExtension { if d == nil || id == "" { return nil } @@ -86,8 +86,8 @@ func (d *Plugin) Extension(id id.PluginExtensionID) *PluginExtension { return nil } -func (d *Infobox) Field(id id.ID) *InfoboxField { - if d == nil || id.IsNil() { +func (d *Infobox) Field(id ID) *InfoboxField { + if d == nil || id == "" { return nil } for _, f := range d.Fields { @@ -98,8 +98,8 @@ func (d *Infobox) Field(id id.ID) *InfoboxField { return nil } -func (d *MergedInfobox) Field(id id.ID) *MergedInfoboxField { - if d == nil || id.IsNil() { +func (d *MergedInfobox) Field(id ID) *MergedInfoboxField { + if d == nil || id == "" { return nil } for _, f := range d.Fields { @@ -110,7 +110,7 @@ func (d *MergedInfobox) Field(id id.ID) *MergedInfoboxField { return nil } -func AttachParentLayer(layers []*Layer, parent id.ID) []Layer { +func AttachParentLayer(layers []*Layer, parent ID) []Layer { if layers == nil { return nil } @@ -140,19 +140,19 @@ func NewEmptyPageInfo() *PageInfo { return ToPageInfo(usecase.NewPageInfo(0, nil, nil, false, false)) } -func (d *PropertyGroup) Field(id id.PropertySchemaFieldID) *PropertyField { +func (d *PropertyGroup) Field(id ID) *PropertyField { if d == nil || id == "" { return nil } for _, f := range d.Fields { - if s := getPropertySchemaFieldIDFromGQLPropertyFieldID(f.ID); s == string(id) { + if f.ID == string(id) { return f } } return nil } -func (d *PropertySchema) Group(id id.PropertySchemaGroupID) *PropertySchemaGroup { +func (d *PropertySchema) Group(id ID) *PropertySchemaGroup { if d == nil || id == "" { return nil } @@ -164,8 +164,8 @@ func (d *PropertySchema) Group(id id.PropertySchemaGroupID) *PropertySchemaGroup return nil } -func (d *Property) Item(id id.ID) PropertyItem { - if d == nil || id.IsNil() { +func (d *Property) Item(id ID) PropertyItem { + if d == nil || id == "" { return nil } for _, f := range d.Items { @@ -187,8 +187,8 @@ func (d *Property) Item(id id.ID) PropertyItem { return nil } -func (d *PropertyGroupList) Group(id id.ID) *PropertyGroup { - if d == nil || id.IsNil() { +func (d *PropertyGroupList) Group(id ID) *PropertyGroup { + if d == nil || id == "" { return nil } for _, f := range d.Groups { @@ -199,7 +199,7 @@ func (d *PropertyGroupList) Group(id id.ID) *PropertyGroup { return nil } -func (d *MergedProperty) PropertyID() *id.ID { +func (d *MergedProperty) PropertyID() *ID { if d.OriginalID != nil { return d.OriginalID } else if d.ParentID != nil { @@ -208,8 +208,8 @@ func (d *MergedProperty) PropertyID() *id.ID { return nil } -func (d *MergedProperty) GroupByOriginal(id id.ID) *MergedPropertyGroup { - if d == nil || id.IsNil() { +func (d *MergedProperty) GroupByOriginal(id ID) *MergedPropertyGroup { + if d == nil || id == "" { return nil } for _, f := range d.Groups { @@ -220,8 +220,8 @@ func (d *MergedProperty) GroupByOriginal(id id.ID) *MergedPropertyGroup { return nil } -func (d *MergedProperty) GroupByParent(id id.ID) *MergedPropertyGroup { - if d == nil || id.IsNil() { +func (d *MergedProperty) GroupByParent(id ID) *MergedPropertyGroup { + if d == nil || id == "" { return nil } for _, f := range d.Groups { @@ -232,7 +232,7 @@ func (d *MergedProperty) GroupByParent(id id.ID) *MergedPropertyGroup { return nil } -func (d *MergedPropertyGroup) PropertyID() *id.ID { +func (d *MergedPropertyGroup) PropertyID() *ID { if d.OriginalID != nil { return d.OriginalID } else if d.ParentID != nil { @@ -241,8 +241,8 @@ func (d *MergedPropertyGroup) PropertyID() *id.ID { return nil } -func (d *MergedPropertyGroup) GroupByOriginal(id id.ID) *MergedPropertyGroup { - if d == nil || id.IsNil() { +func (d *MergedPropertyGroup) GroupByOriginal(id ID) *MergedPropertyGroup { + if d == nil || id == "" { return nil } for _, f := range d.Groups { @@ -253,8 +253,8 @@ func (d *MergedPropertyGroup) GroupByOriginal(id id.ID) *MergedPropertyGroup { return nil } -func (d *MergedPropertyGroup) GroupByParent(id id.ID) *MergedPropertyGroup { - if d == nil || id.IsNil() { +func (d *MergedPropertyGroup) GroupByParent(id ID) *MergedPropertyGroup { + if d == nil || id == "" { return nil } for _, f := range d.Groups { @@ -265,7 +265,7 @@ func (d *MergedPropertyGroup) GroupByParent(id id.ID) *MergedPropertyGroup { return nil } -func (s *Scene) Widget(pluginID id.PluginID, extensionID id.PluginExtensionID) *SceneWidget { +func (s *Scene) Widget(pluginID, extensionID ID) *SceneWidget { if s == nil { return nil } @@ -277,7 +277,7 @@ func (s *Scene) Widget(pluginID id.PluginID, extensionID id.PluginExtensionID) * return nil } -func (s *Scene) Plugin(pluginID id.PluginID) *ScenePlugin { +func (s *Scene) Plugin(pluginID ID) *ScenePlugin { if s == nil { return nil } diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index ee7a0f6bc..cdcf705cb 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -11,7 +11,6 @@ import ( "github.com/99designs/gqlgen/graphql" "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" "golang.org/x/text/language" ) @@ -36,7 +35,7 @@ type Tag interface { } type AddClusterInput struct { - SceneID id.ID `json:"sceneId"` + SceneID ID `json:"sceneId"` Name string `json:"name"` } @@ -46,9 +45,9 @@ type AddClusterPayload struct { } type AddDatasetSchemaInput struct { - SceneID id.ID `json:"sceneId"` + SceneID ID `json:"sceneId"` Name string `json:"name"` - Representativefield *id.ID `json:"representativefield"` + Representativefield *ID `json:"representativefield"` } type AddDatasetSchemaPayload struct { @@ -56,7 +55,7 @@ type AddDatasetSchemaPayload struct { } type AddDynamicDatasetInput struct { - DatasetSchemaID id.ID `json:"datasetSchemaId"` + DatasetSchemaID ID `json:"datasetSchemaId"` Author string `json:"author"` Content string `json:"content"` Lat *float64 `json:"lat"` @@ -70,7 +69,7 @@ type AddDynamicDatasetPayload struct { } type AddDynamicDatasetSchemaInput struct { - SceneID id.ID `json:"sceneId"` + SceneID ID `json:"sceneId"` } type AddDynamicDatasetSchemaPayload struct { @@ -78,10 +77,10 @@ type AddDynamicDatasetSchemaPayload struct { } type AddInfoboxFieldInput struct { - LayerID id.ID `json:"layerId"` - PluginID id.PluginID `json:"pluginId"` - ExtensionID id.PluginExtensionID `json:"extensionId"` - Index *int `json:"index"` + LayerID ID `json:"layerId"` + PluginID ID `json:"pluginId"` + ExtensionID ID `json:"extensionId"` + Index *int `json:"index"` } type AddInfoboxFieldPayload struct { @@ -90,13 +89,13 @@ type AddInfoboxFieldPayload struct { } type AddLayerGroupInput struct { - ParentLayerID id.ID `json:"parentLayerId"` - PluginID *id.PluginID `json:"pluginId"` - ExtensionID *id.PluginExtensionID `json:"extensionId"` - Index *int `json:"index"` - LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaID"` - Name *string `json:"name"` - RepresentativeFieldID *id.DatasetSchemaFieldID `json:"representativeFieldId"` + ParentLayerID ID `json:"parentLayerId"` + PluginID *ID `json:"pluginId"` + ExtensionID *ID `json:"extensionId"` + Index *int `json:"index"` + LinkedDatasetSchemaID *ID `json:"linkedDatasetSchemaID"` + Name *string `json:"name"` + RepresentativeFieldID *ID `json:"representativeFieldId"` } type AddLayerGroupPayload struct { @@ -106,13 +105,13 @@ type AddLayerGroupPayload struct { } type AddLayerItemInput struct { - ParentLayerID id.ID `json:"parentLayerId"` - PluginID id.PluginID `json:"pluginId"` - ExtensionID id.PluginExtensionID `json:"extensionId"` - Index *int `json:"index"` - Name *string `json:"name"` - Lat *float64 `json:"lat"` - Lng *float64 `json:"lng"` + ParentLayerID ID `json:"parentLayerId"` + PluginID ID `json:"pluginId"` + ExtensionID ID `json:"extensionId"` + Index *int `json:"index"` + Name *string `json:"name"` + Lat *float64 `json:"lat"` + Lng *float64 `json:"lng"` } type AddLayerItemPayload struct { @@ -122,9 +121,9 @@ type AddLayerItemPayload struct { } type AddMemberToTeamInput struct { - TeamID id.ID `json:"teamId"` - UserID id.ID `json:"userId"` - Role Role `json:"role"` + TeamID ID `json:"teamId"` + UserID ID `json:"userId"` + Role Role `json:"role"` } type AddMemberToTeamPayload struct { @@ -132,17 +131,17 @@ type AddMemberToTeamPayload struct { } type AddPropertyItemInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` - Index *int `json:"index"` - NameFieldValue interface{} `json:"nameFieldValue"` - NameFieldType *ValueType `json:"nameFieldType"` + PropertyID ID `json:"propertyId"` + SchemaGroupID ID `json:"schemaGroupId"` + Index *int `json:"index"` + NameFieldValue interface{} `json:"nameFieldValue"` + NameFieldType *ValueType `json:"nameFieldType"` } type AddWidgetInput struct { - SceneID id.ID `json:"sceneId"` - PluginID id.PluginID `json:"pluginId"` - ExtensionID id.PluginExtensionID `json:"extensionId"` + SceneID ID `json:"sceneId"` + PluginID ID `json:"pluginId"` + ExtensionID ID `json:"extensionId"` } type AddWidgetPayload struct { @@ -151,9 +150,9 @@ type AddWidgetPayload struct { } type Asset struct { - ID id.ID `json:"id"` + ID ID `json:"id"` CreatedAt time.Time `json:"createdAt"` - TeamID id.ID `json:"teamId"` + TeamID ID `json:"teamId"` Name string `json:"name"` Size int64 `json:"size"` URL string `json:"url"` @@ -176,8 +175,8 @@ type AssetEdge struct { } type AttachTagItemToGroupInput struct { - ItemID id.ID `json:"itemID"` - GroupID id.ID `json:"groupID"` + ItemID ID `json:"itemID"` + GroupID ID `json:"groupID"` } type AttachTagItemToGroupPayload struct { @@ -185,8 +184,8 @@ type AttachTagItemToGroupPayload struct { } type AttachTagToLayerInput struct { - TagID id.ID `json:"tagID"` - LayerID id.ID `json:"layerID"` + TagID ID `json:"tagID"` + LayerID ID `json:"layerID"` } type AttachTagToLayerPayload struct { @@ -204,14 +203,14 @@ type Camera struct { } type Cluster struct { - ID id.ID `json:"id"` + ID ID `json:"id"` Name string `json:"name"` - PropertyID id.ID `json:"propertyId"` + PropertyID ID `json:"propertyId"` Property *Property `json:"property"` } type CreateAssetInput struct { - TeamID id.ID `json:"teamId"` + TeamID ID `json:"teamId"` File graphql.Upload `json:"file"` } @@ -220,7 +219,7 @@ type CreateAssetPayload struct { } type CreateInfoboxInput struct { - LayerID id.ID `json:"layerId"` + LayerID ID `json:"layerId"` } type CreateInfoboxPayload struct { @@ -228,7 +227,7 @@ type CreateInfoboxPayload struct { } type CreateProjectInput struct { - TeamID id.ID `json:"teamId"` + TeamID ID `json:"teamId"` Visualizer Visualizer `json:"visualizer"` Name *string `json:"name"` Description *string `json:"description"` @@ -238,7 +237,7 @@ type CreateProjectInput struct { } type CreateSceneInput struct { - ProjectID id.ID `json:"projectId"` + ProjectID ID `json:"projectId"` } type CreateScenePayload struct { @@ -246,9 +245,9 @@ type CreateScenePayload struct { } type CreateTagGroupInput struct { - SceneID id.ID `json:"sceneId"` - Label string `json:"label"` - Tags []*id.ID `json:"tags"` + SceneID ID `json:"sceneId"` + Label string `json:"label"` + Tags []ID `json:"tags"` } type CreateTagGroupPayload struct { @@ -256,12 +255,12 @@ type CreateTagGroupPayload struct { } type CreateTagItemInput struct { - SceneID id.ID `json:"sceneId"` + SceneID ID `json:"sceneId"` Label string `json:"label"` - Parent *id.ID `json:"parent"` - LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaID"` - LinkedDatasetID *id.ID `json:"linkedDatasetID"` - LinkedDatasetField *id.ID `json:"linkedDatasetField"` + Parent *ID `json:"parent"` + LinkedDatasetSchemaID *ID `json:"linkedDatasetSchemaID"` + LinkedDatasetID *ID `json:"linkedDatasetID"` + LinkedDatasetField *ID `json:"linkedDatasetField"` } type CreateTagItemPayload struct { @@ -278,9 +277,9 @@ type CreateTeamPayload struct { } type Dataset struct { - ID id.ID `json:"id"` + ID ID `json:"id"` Source string `json:"source"` - SchemaID id.ID `json:"schemaId"` + SchemaID ID `json:"schemaId"` Fields []*DatasetField `json:"fields"` Schema *DatasetSchema `json:"schema"` Name *string `json:"name"` @@ -301,8 +300,8 @@ type DatasetEdge struct { } type DatasetField struct { - FieldID id.ID `json:"fieldId"` - SchemaID id.ID `json:"schemaId"` + FieldID ID `json:"fieldId"` + SchemaID ID `json:"schemaId"` Source string `json:"source"` Type ValueType `json:"type"` Value interface{} `json:"value"` @@ -312,12 +311,12 @@ type DatasetField struct { } type DatasetSchema struct { - ID id.ID `json:"id"` + ID ID `json:"id"` Source string `json:"source"` Name string `json:"name"` - SceneID id.ID `json:"sceneId"` + SceneID ID `json:"sceneId"` Fields []*DatasetSchemaField `json:"fields"` - RepresentativeFieldID *id.ID `json:"representativeFieldId"` + RepresentativeFieldID *ID `json:"representativeFieldId"` Dynamic *bool `json:"dynamic"` Datasets *DatasetConnection `json:"datasets"` Scene *Scene `json:"scene"` @@ -339,12 +338,12 @@ type DatasetSchemaEdge struct { } type DatasetSchemaField struct { - ID id.ID `json:"id"` + ID ID `json:"id"` Source string `json:"source"` Name string `json:"name"` Type ValueType `json:"type"` - SchemaID id.ID `json:"schemaId"` - RefID *id.ID `json:"refId"` + SchemaID ID `json:"schemaId"` + RefID *ID `json:"refId"` Schema *DatasetSchema `json:"schema"` Ref *DatasetSchema `json:"ref"` } @@ -352,32 +351,32 @@ type DatasetSchemaField struct { func (DatasetSchemaField) IsNode() {} type DeleteMeInput struct { - UserID id.ID `json:"userId"` + UserID ID `json:"userId"` } type DeleteMePayload struct { - UserID id.ID `json:"userId"` + UserID ID `json:"userId"` } type DeleteProjectInput struct { - ProjectID id.ID `json:"projectId"` + ProjectID ID `json:"projectId"` } type DeleteProjectPayload struct { - ProjectID id.ID `json:"projectId"` + ProjectID ID `json:"projectId"` } type DeleteTeamInput struct { - TeamID id.ID `json:"teamId"` + TeamID ID `json:"teamId"` } type DeleteTeamPayload struct { - TeamID id.ID `json:"teamId"` + TeamID ID `json:"teamId"` } type DetachTagFromLayerInput struct { - TagID id.ID `json:"tagID"` - LayerID id.ID `json:"layerID"` + TagID ID `json:"tagID"` + LayerID ID `json:"layerID"` } type DetachTagFromLayerPayload struct { @@ -385,8 +384,8 @@ type DetachTagFromLayerPayload struct { } type DetachTagItemFromGroupInput struct { - ItemID id.ID `json:"itemID"` - GroupID id.ID `json:"groupID"` + ItemID ID `json:"itemID"` + GroupID ID `json:"groupID"` } type DetachTagItemFromGroupPayload struct { @@ -397,14 +396,14 @@ type ImportDatasetFromGoogleSheetInput struct { AccessToken string `json:"accessToken"` FileID string `json:"fileId"` SheetName string `json:"sheetName"` - SceneID id.ID `json:"sceneId"` - DatasetSchemaID *id.ID `json:"datasetSchemaId"` + SceneID ID `json:"sceneId"` + DatasetSchemaID *ID `json:"datasetSchemaId"` } type ImportDatasetInput struct { File graphql.Upload `json:"file"` - SceneID id.ID `json:"sceneId"` - DatasetSchemaID *id.ID `json:"datasetSchemaId"` + SceneID ID `json:"sceneId"` + DatasetSchemaID *ID `json:"datasetSchemaId"` } type ImportDatasetPayload struct { @@ -412,7 +411,7 @@ type ImportDatasetPayload struct { } type ImportLayerInput struct { - LayerID id.ID `json:"layerId"` + LayerID ID `json:"layerId"` File graphql.Upload `json:"file"` Format LayerEncodingFormat `json:"format"` } @@ -423,11 +422,11 @@ type ImportLayerPayload struct { } type Infobox struct { - SceneID id.ID `json:"sceneId"` - LayerID id.ID `json:"layerId"` - PropertyID id.ID `json:"propertyId"` + SceneID ID `json:"sceneId"` + LayerID ID `json:"layerId"` + PropertyID ID `json:"propertyId"` Fields []*InfoboxField `json:"fields"` - LinkedDatasetID *id.ID `json:"linkedDatasetId"` + LinkedDatasetID *ID `json:"linkedDatasetId"` Layer Layer `json:"layer"` Property *Property `json:"property"` LinkedDataset *Dataset `json:"linkedDataset"` @@ -436,27 +435,27 @@ type Infobox struct { } type InfoboxField struct { - ID id.ID `json:"id"` - SceneID id.ID `json:"sceneId"` - LayerID id.ID `json:"layerId"` - PropertyID id.ID `json:"propertyId"` - PluginID id.PluginID `json:"pluginId"` - ExtensionID id.PluginExtensionID `json:"extensionId"` - LinkedDatasetID *id.ID `json:"linkedDatasetId"` - Layer Layer `json:"layer"` - Infobox *Infobox `json:"infobox"` - Property *Property `json:"property"` - Plugin *Plugin `json:"plugin"` - Extension *PluginExtension `json:"extension"` - LinkedDataset *Dataset `json:"linkedDataset"` - Merged *MergedInfoboxField `json:"merged"` - Scene *Scene `json:"scene"` - ScenePlugin *ScenePlugin `json:"scenePlugin"` + ID ID `json:"id"` + SceneID ID `json:"sceneId"` + LayerID ID `json:"layerId"` + PropertyID ID `json:"propertyId"` + PluginID ID `json:"pluginId"` + ExtensionID ID `json:"extensionId"` + LinkedDatasetID *ID `json:"linkedDatasetId"` + Layer Layer `json:"layer"` + Infobox *Infobox `json:"infobox"` + Property *Property `json:"property"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + LinkedDataset *Dataset `json:"linkedDataset"` + Merged *MergedInfoboxField `json:"merged"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` } type InstallPluginInput struct { - SceneID id.ID `json:"sceneId"` - PluginID id.PluginID `json:"pluginId"` + SceneID ID `json:"sceneId"` + PluginID ID `json:"pluginId"` } type InstallPluginPayload struct { @@ -476,57 +475,57 @@ type LatLngHeight struct { } type LayerGroup struct { - ID id.ID `json:"id"` - SceneID id.ID `json:"sceneId"` - Name string `json:"name"` - IsVisible bool `json:"isVisible"` - PropertyID *id.ID `json:"propertyId"` - PluginID *id.PluginID `json:"pluginId"` - ExtensionID *id.PluginExtensionID `json:"extensionId"` - Infobox *Infobox `json:"infobox"` - ParentID *id.ID `json:"parentId"` - LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaId"` - Root bool `json:"root"` - LayerIds []*id.ID `json:"layerIds"` - Tags []LayerTag `json:"tags"` - Parent *LayerGroup `json:"parent"` - Property *Property `json:"property"` - Plugin *Plugin `json:"plugin"` - Extension *PluginExtension `json:"extension"` - LinkedDatasetSchema *DatasetSchema `json:"linkedDatasetSchema"` - Layers []Layer `json:"layers"` - Scene *Scene `json:"scene"` - ScenePlugin *ScenePlugin `json:"scenePlugin"` + ID ID `json:"id"` + SceneID ID `json:"sceneId"` + Name string `json:"name"` + IsVisible bool `json:"isVisible"` + PropertyID *ID `json:"propertyId"` + PluginID *ID `json:"pluginId"` + ExtensionID *ID `json:"extensionId"` + Infobox *Infobox `json:"infobox"` + ParentID *ID `json:"parentId"` + LinkedDatasetSchemaID *ID `json:"linkedDatasetSchemaId"` + Root bool `json:"root"` + LayerIds []ID `json:"layerIds"` + Tags []LayerTag `json:"tags"` + Parent *LayerGroup `json:"parent"` + Property *Property `json:"property"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + LinkedDatasetSchema *DatasetSchema `json:"linkedDatasetSchema"` + Layers []Layer `json:"layers"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` } func (LayerGroup) IsLayer() {} type LayerItem struct { - ID id.ID `json:"id"` - SceneID id.ID `json:"sceneId"` - Name string `json:"name"` - IsVisible bool `json:"isVisible"` - PropertyID *id.ID `json:"propertyId"` - PluginID *id.PluginID `json:"pluginId"` - ExtensionID *id.PluginExtensionID `json:"extensionId"` - Infobox *Infobox `json:"infobox"` - ParentID *id.ID `json:"parentId"` - LinkedDatasetID *id.ID `json:"linkedDatasetId"` - Tags []LayerTag `json:"tags"` - Parent *LayerGroup `json:"parent"` - Property *Property `json:"property"` - Plugin *Plugin `json:"plugin"` - Extension *PluginExtension `json:"extension"` - LinkedDataset *Dataset `json:"linkedDataset"` - Merged *MergedLayer `json:"merged"` - Scene *Scene `json:"scene"` - ScenePlugin *ScenePlugin `json:"scenePlugin"` + ID ID `json:"id"` + SceneID ID `json:"sceneId"` + Name string `json:"name"` + IsVisible bool `json:"isVisible"` + PropertyID *ID `json:"propertyId"` + PluginID *ID `json:"pluginId"` + ExtensionID *ID `json:"extensionId"` + Infobox *Infobox `json:"infobox"` + ParentID *ID `json:"parentId"` + LinkedDatasetID *ID `json:"linkedDatasetId"` + Tags []LayerTag `json:"tags"` + Parent *LayerGroup `json:"parent"` + Property *Property `json:"property"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + LinkedDataset *Dataset `json:"linkedDataset"` + Merged *MergedLayer `json:"merged"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` } func (LayerItem) IsLayer() {} type LayerTagGroup struct { - TagID id.ID `json:"tagId"` + TagID ID `json:"tagId"` Children []*LayerTagItem `json:"children"` Tag Tag `json:"tag"` } @@ -534,45 +533,57 @@ type LayerTagGroup struct { func (LayerTagGroup) IsLayerTag() {} type LayerTagItem struct { - TagID id.ID `json:"tagId"` - Tag Tag `json:"tag"` + TagID ID `json:"tagId"` + Tag Tag `json:"tag"` } func (LayerTagItem) IsLayerTag() {} type LinkDatasetToPropertyValueInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaGroupID *id.PropertySchemaGroupID `json:"schemaGroupId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` - DatasetSchemaIds []*id.ID `json:"datasetSchemaIds"` - DatasetSchemaFieldIds []*id.ID `json:"datasetSchemaFieldIds"` - DatasetIds []*id.ID `json:"datasetIds"` + PropertyID ID `json:"propertyId"` + SchemaGroupID *ID `json:"schemaGroupId"` + ItemID *ID `json:"itemId"` + FieldID ID `json:"fieldId"` + DatasetSchemaIds []ID `json:"datasetSchemaIds"` + DatasetSchemaFieldIds []ID `json:"datasetSchemaFieldIds"` + DatasetIds []ID `json:"datasetIds"` +} + +type Me struct { + ID ID `json:"id"` + Name string `json:"name"` + Email string `json:"email"` + Lang language.Tag `json:"lang"` + Theme Theme `json:"theme"` + Auths []string `json:"auths"` + MyTeamID ID `json:"myTeamId"` + Teams []*Team `json:"teams"` + MyTeam *Team `json:"myTeam"` } type MergedInfobox struct { - SceneID id.ID `json:"sceneID"` + SceneID ID `json:"sceneID"` Property *MergedProperty `json:"property"` Fields []*MergedInfoboxField `json:"fields"` Scene *Scene `json:"scene"` } type MergedInfoboxField struct { - OriginalID id.ID `json:"originalId"` - SceneID id.ID `json:"sceneID"` - PluginID id.PluginID `json:"pluginId"` - ExtensionID id.PluginExtensionID `json:"extensionId"` - Property *MergedProperty `json:"property"` - Plugin *Plugin `json:"plugin"` - Extension *PluginExtension `json:"extension"` - Scene *Scene `json:"scene"` - ScenePlugin *ScenePlugin `json:"scenePlugin"` + OriginalID ID `json:"originalId"` + SceneID ID `json:"sceneID"` + PluginID ID `json:"pluginId"` + ExtensionID ID `json:"extensionId"` + Property *MergedProperty `json:"property"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + Scene *Scene `json:"scene"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` } type MergedLayer struct { - OriginalID id.ID `json:"originalId"` - ParentID *id.ID `json:"parentId"` - SceneID id.ID `json:"sceneID"` + OriginalID ID `json:"originalId"` + ParentID *ID `json:"parentId"` + SceneID ID `json:"sceneID"` Property *MergedProperty `json:"property"` Infobox *MergedInfobox `json:"infobox"` Original *LayerItem `json:"original"` @@ -581,10 +592,10 @@ type MergedLayer struct { } type MergedProperty struct { - OriginalID *id.ID `json:"originalId"` - ParentID *id.ID `json:"parentId"` - SchemaID *id.PropertySchemaID `json:"schemaId"` - LinkedDatasetID *id.ID `json:"linkedDatasetId"` + OriginalID *ID `json:"originalId"` + ParentID *ID `json:"parentId"` + SchemaID *ID `json:"schemaId"` + LinkedDatasetID *ID `json:"linkedDatasetId"` Original *Property `json:"original"` Parent *Property `json:"parent"` Schema *PropertySchema `json:"schema"` @@ -593,65 +604,65 @@ type MergedProperty struct { } type MergedPropertyField struct { - SchemaID id.PropertySchemaID `json:"schemaId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` - Value interface{} `json:"value"` - Type ValueType `json:"type"` - Links []*PropertyFieldLink `json:"links"` - Overridden bool `json:"overridden"` - Schema *PropertySchema `json:"schema"` - Field *PropertySchemaField `json:"field"` - ActualValue interface{} `json:"actualValue"` + SchemaID ID `json:"schemaId"` + FieldID ID `json:"fieldId"` + Value interface{} `json:"value"` + Type ValueType `json:"type"` + Links []*PropertyFieldLink `json:"links"` + Overridden bool `json:"overridden"` + Schema *PropertySchema `json:"schema"` + Field *PropertySchemaField `json:"field"` + ActualValue interface{} `json:"actualValue"` } type MergedPropertyGroup struct { - OriginalPropertyID *id.ID `json:"originalPropertyId"` - ParentPropertyID *id.ID `json:"parentPropertyId"` - OriginalID *id.ID `json:"originalId"` - ParentID *id.ID `json:"parentId"` - SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` - SchemaID *id.PropertySchemaID `json:"schemaId"` - LinkedDatasetID *id.ID `json:"linkedDatasetId"` - Fields []*MergedPropertyField `json:"fields"` - Groups []*MergedPropertyGroup `json:"groups"` - OriginalProperty *Property `json:"originalProperty"` - ParentProperty *Property `json:"parentProperty"` - Original *PropertyGroup `json:"original"` - Parent *PropertyGroup `json:"parent"` - Schema *PropertySchema `json:"schema"` - LinkedDataset *Dataset `json:"linkedDataset"` + OriginalPropertyID *ID `json:"originalPropertyId"` + ParentPropertyID *ID `json:"parentPropertyId"` + OriginalID *ID `json:"originalId"` + ParentID *ID `json:"parentId"` + SchemaGroupID ID `json:"schemaGroupId"` + SchemaID *ID `json:"schemaId"` + LinkedDatasetID *ID `json:"linkedDatasetId"` + Fields []*MergedPropertyField `json:"fields"` + Groups []*MergedPropertyGroup `json:"groups"` + OriginalProperty *Property `json:"originalProperty"` + ParentProperty *Property `json:"parentProperty"` + Original *PropertyGroup `json:"original"` + Parent *PropertyGroup `json:"parent"` + Schema *PropertySchema `json:"schema"` + LinkedDataset *Dataset `json:"linkedDataset"` } type MoveInfoboxFieldInput struct { - LayerID id.ID `json:"layerId"` - InfoboxFieldID id.ID `json:"infoboxFieldId"` - Index int `json:"index"` + LayerID ID `json:"layerId"` + InfoboxFieldID ID `json:"infoboxFieldId"` + Index int `json:"index"` } type MoveInfoboxFieldPayload struct { - InfoboxFieldID id.ID `json:"infoboxFieldId"` + InfoboxFieldID ID `json:"infoboxFieldId"` Layer Layer `json:"layer"` Index int `json:"index"` } type MoveLayerInput struct { - LayerID id.ID `json:"layerId"` - DestLayerID *id.ID `json:"destLayerId"` - Index *int `json:"index"` + LayerID ID `json:"layerId"` + DestLayerID *ID `json:"destLayerId"` + Index *int `json:"index"` } type MoveLayerPayload struct { - LayerID id.ID `json:"layerId"` + LayerID ID `json:"layerId"` FromParentLayer *LayerGroup `json:"fromParentLayer"` ToParentLayer *LayerGroup `json:"toParentLayer"` Index int `json:"index"` } type MovePropertyItemInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` - ItemID id.ID `json:"itemId"` - Index int `json:"index"` + PropertyID ID `json:"propertyId"` + SchemaGroupID ID `json:"schemaGroupId"` + ItemID ID `json:"itemId"` + Index int `json:"index"` } type PageInfo struct { @@ -669,42 +680,42 @@ type Pagination struct { } type Plugin struct { - ID id.PluginID `json:"id"` - SceneID *id.ID `json:"sceneId"` - Name string `json:"name"` - Version string `json:"version"` - Description string `json:"description"` - Author string `json:"author"` - RepositoryURL string `json:"repositoryUrl"` - PropertySchemaID *id.PropertySchemaID `json:"propertySchemaId"` - Extensions []*PluginExtension `json:"extensions"` - ScenePlugin *ScenePlugin `json:"scenePlugin"` - AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` - AllTranslatedName map[string]string `json:"allTranslatedName"` - Scene *Scene `json:"scene"` - TranslatedName string `json:"translatedName"` - TranslatedDescription string `json:"translatedDescription"` - PropertySchema *PropertySchema `json:"propertySchema"` + ID ID `json:"id"` + SceneID *ID `json:"sceneId"` + Name string `json:"name"` + Version string `json:"version"` + Description string `json:"description"` + Author string `json:"author"` + RepositoryURL string `json:"repositoryUrl"` + PropertySchemaID *ID `json:"propertySchemaId"` + Extensions []*PluginExtension `json:"extensions"` + ScenePlugin *ScenePlugin `json:"scenePlugin"` + AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` + AllTranslatedName map[string]string `json:"allTranslatedName"` + Scene *Scene `json:"scene"` + TranslatedName string `json:"translatedName"` + TranslatedDescription string `json:"translatedDescription"` + PropertySchema *PropertySchema `json:"propertySchema"` } type PluginExtension struct { - ExtensionID id.PluginExtensionID `json:"extensionId"` - PluginID id.PluginID `json:"pluginId"` - Type PluginExtensionType `json:"type"` - Name string `json:"name"` - Description string `json:"description"` - Icon string `json:"icon"` - SingleOnly *bool `json:"singleOnly"` - WidgetLayout *WidgetLayout `json:"widgetLayout"` - Visualizer *Visualizer `json:"visualizer"` - PropertySchemaID id.PropertySchemaID `json:"propertySchemaId"` - AllTranslatedName map[string]string `json:"allTranslatedName"` - AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` - Plugin *Plugin `json:"plugin"` - SceneWidget *SceneWidget `json:"sceneWidget"` - PropertySchema *PropertySchema `json:"propertySchema"` - TranslatedName string `json:"translatedName"` - TranslatedDescription string `json:"translatedDescription"` + ExtensionID ID `json:"extensionId"` + PluginID ID `json:"pluginId"` + Type PluginExtensionType `json:"type"` + Name string `json:"name"` + Description string `json:"description"` + Icon string `json:"icon"` + SingleOnly *bool `json:"singleOnly"` + WidgetLayout *WidgetLayout `json:"widgetLayout"` + Visualizer *Visualizer `json:"visualizer"` + PropertySchemaID ID `json:"propertySchemaId"` + AllTranslatedName map[string]string `json:"allTranslatedName"` + AllTranslatedDescription map[string]string `json:"allTranslatedDescription"` + Plugin *Plugin `json:"plugin"` + SceneWidget *SceneWidget `json:"sceneWidget"` + PropertySchema *PropertySchema `json:"propertySchema"` + TranslatedName string `json:"translatedName"` + TranslatedDescription string `json:"translatedDescription"` } type PluginMetadata struct { @@ -716,7 +727,7 @@ type PluginMetadata struct { } type Project struct { - ID id.ID `json:"id"` + ID ID `json:"id"` IsArchived bool `json:"isArchived"` IsBasicAuthActive bool `json:"isBasicAuthActive"` BasicAuthUsername string `json:"basicAuthUsername"` @@ -732,7 +743,7 @@ type Project struct { PublicImage string `json:"publicImage"` PublicNoIndex bool `json:"publicNoIndex"` ImageURL *url.URL `json:"imageUrl"` - TeamID id.ID `json:"teamId"` + TeamID ID `json:"teamId"` Visualizer Visualizer `json:"visualizer"` PublishmentStatus PublishmentStatus `json:"publishmentStatus"` Team *Team `json:"team"` @@ -763,40 +774,40 @@ type ProjectPayload struct { } type Property struct { - ID id.ID `json:"id"` - SchemaID id.PropertySchemaID `json:"schemaId"` - Items []PropertyItem `json:"items"` - Schema *PropertySchema `json:"schema"` - Layer Layer `json:"layer"` - Merged *MergedProperty `json:"merged"` + ID ID `json:"id"` + SchemaID ID `json:"schemaId"` + Items []PropertyItem `json:"items"` + Schema *PropertySchema `json:"schema"` + Layer Layer `json:"layer"` + Merged *MergedProperty `json:"merged"` } func (Property) IsNode() {} type PropertyCondition struct { - FieldID id.PropertySchemaFieldID `json:"fieldId"` - Type ValueType `json:"type"` - Value interface{} `json:"value"` + FieldID ID `json:"fieldId"` + Type ValueType `json:"type"` + Value interface{} `json:"value"` } type PropertyField struct { - ID string `json:"id"` - ParentID id.ID `json:"parentId"` - SchemaID id.PropertySchemaID `json:"schemaId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` - Links []*PropertyFieldLink `json:"links"` - Type ValueType `json:"type"` - Value interface{} `json:"value"` - Parent *Property `json:"parent"` - Schema *PropertySchema `json:"schema"` - Field *PropertySchemaField `json:"field"` - ActualValue interface{} `json:"actualValue"` + ID string `json:"id"` + ParentID ID `json:"parentId"` + SchemaID ID `json:"schemaId"` + FieldID ID `json:"fieldId"` + Links []*PropertyFieldLink `json:"links"` + Type ValueType `json:"type"` + Value interface{} `json:"value"` + Parent *Property `json:"parent"` + Schema *PropertySchema `json:"schema"` + Field *PropertySchemaField `json:"field"` + ActualValue interface{} `json:"actualValue"` } type PropertyFieldLink struct { - DatasetID *id.ID `json:"datasetId"` - DatasetSchemaID id.ID `json:"datasetSchemaId"` - DatasetSchemaFieldID id.ID `json:"datasetSchemaFieldId"` + DatasetID *ID `json:"datasetId"` + DatasetSchemaID ID `json:"datasetSchemaId"` + DatasetSchemaFieldID ID `json:"datasetSchemaFieldId"` Dataset *Dataset `json:"dataset"` DatasetField *DatasetField `json:"datasetField"` DatasetSchema *DatasetSchema `json:"datasetSchema"` @@ -809,23 +820,23 @@ type PropertyFieldPayload struct { } type PropertyGroup struct { - ID id.ID `json:"id"` - SchemaID id.PropertySchemaID `json:"schemaId"` - SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` - Fields []*PropertyField `json:"fields"` - Schema *PropertySchema `json:"schema"` - SchemaGroup *PropertySchemaGroup `json:"schemaGroup"` + ID ID `json:"id"` + SchemaID ID `json:"schemaId"` + SchemaGroupID ID `json:"schemaGroupId"` + Fields []*PropertyField `json:"fields"` + Schema *PropertySchema `json:"schema"` + SchemaGroup *PropertySchemaGroup `json:"schemaGroup"` } func (PropertyGroup) IsPropertyItem() {} type PropertyGroupList struct { - ID id.ID `json:"id"` - SchemaID id.PropertySchemaID `json:"schemaId"` - SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` - Groups []*PropertyGroup `json:"groups"` - Schema *PropertySchema `json:"schema"` - SchemaGroup *PropertySchemaGroup `json:"schemaGroup"` + ID ID `json:"id"` + SchemaID ID `json:"schemaId"` + SchemaGroupID ID `json:"schemaGroupId"` + Groups []*PropertyGroup `json:"groups"` + Schema *PropertySchema `json:"schema"` + SchemaGroup *PropertySchemaGroup `json:"schemaGroup"` } func (PropertyGroupList) IsPropertyItem() {} @@ -836,22 +847,22 @@ type PropertyItemPayload struct { } type PropertyLinkableFields struct { - SchemaID id.PropertySchemaID `json:"schemaId"` - Latlng *id.PropertySchemaFieldID `json:"latlng"` - URL *id.PropertySchemaFieldID `json:"url"` - LatlngField *PropertySchemaField `json:"latlngField"` - URLField *PropertySchemaField `json:"urlField"` - Schema *PropertySchema `json:"schema"` + SchemaID ID `json:"schemaId"` + Latlng *ID `json:"latlng"` + URL *ID `json:"url"` + LatlngField *PropertySchemaField `json:"latlngField"` + URLField *PropertySchemaField `json:"urlField"` + Schema *PropertySchema `json:"schema"` } type PropertySchema struct { - ID id.PropertySchemaID `json:"id"` + ID ID `json:"id"` Groups []*PropertySchemaGroup `json:"groups"` LinkableFields *PropertyLinkableFields `json:"linkableFields"` } type PropertySchemaField struct { - FieldID id.PropertySchemaFieldID `json:"fieldId"` + FieldID ID `json:"fieldId"` Type ValueType `json:"type"` Title string `json:"title"` Description string `json:"description"` @@ -878,21 +889,21 @@ type PropertySchemaFieldChoice struct { } type PropertySchemaGroup struct { - SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` - SchemaID id.PropertySchemaID `json:"schemaId"` - Fields []*PropertySchemaField `json:"fields"` - IsList bool `json:"isList"` - IsAvailableIf *PropertyCondition `json:"isAvailableIf"` - Title *string `json:"title"` - AllTranslatedTitle map[string]string `json:"allTranslatedTitle"` - RepresentativeFieldID *id.PropertySchemaFieldID `json:"representativeFieldId"` - RepresentativeField *PropertySchemaField `json:"representativeField"` - Schema *PropertySchema `json:"schema"` - TranslatedTitle string `json:"translatedTitle"` + SchemaGroupID ID `json:"schemaGroupId"` + SchemaID ID `json:"schemaId"` + Fields []*PropertySchemaField `json:"fields"` + IsList bool `json:"isList"` + IsAvailableIf *PropertyCondition `json:"isAvailableIf"` + Title *string `json:"title"` + AllTranslatedTitle map[string]string `json:"allTranslatedTitle"` + RepresentativeFieldID *ID `json:"representativeFieldId"` + RepresentativeField *PropertySchemaField `json:"representativeField"` + Schema *PropertySchema `json:"schema"` + TranslatedTitle string `json:"translatedTitle"` } type PublishProjectInput struct { - ProjectID id.ID `json:"projectId"` + ProjectID ID `json:"projectId"` Alias *string `json:"alias"` Status PublishmentStatus `json:"status"` } @@ -905,44 +916,44 @@ type Rect struct { } type RemoveAssetInput struct { - AssetID id.ID `json:"assetId"` + AssetID ID `json:"assetId"` } type RemoveAssetPayload struct { - AssetID id.ID `json:"assetId"` + AssetID ID `json:"assetId"` } type RemoveClusterInput struct { - ClusterID id.ID `json:"clusterId"` - SceneID id.ID `json:"sceneId"` + ClusterID ID `json:"clusterId"` + SceneID ID `json:"sceneId"` } type RemoveClusterPayload struct { Scene *Scene `json:"scene"` - ClusterID id.ID `json:"clusterId"` + ClusterID ID `json:"clusterId"` } type RemoveDatasetSchemaInput struct { - SchemaID id.ID `json:"schemaId"` + SchemaID ID `json:"schemaId"` Force *bool `json:"force"` } type RemoveDatasetSchemaPayload struct { - SchemaID id.ID `json:"schemaId"` + SchemaID ID `json:"schemaId"` } type RemoveInfoboxFieldInput struct { - LayerID id.ID `json:"layerId"` - InfoboxFieldID id.ID `json:"infoboxFieldId"` + LayerID ID `json:"layerId"` + InfoboxFieldID ID `json:"infoboxFieldId"` } type RemoveInfoboxFieldPayload struct { - InfoboxFieldID id.ID `json:"infoboxFieldId"` + InfoboxFieldID ID `json:"infoboxFieldId"` Layer Layer `json:"layer"` } type RemoveInfoboxInput struct { - LayerID id.ID `json:"layerId"` + LayerID ID `json:"layerId"` } type RemoveInfoboxPayload struct { @@ -950,17 +961,17 @@ type RemoveInfoboxPayload struct { } type RemoveLayerInput struct { - LayerID id.ID `json:"layerId"` + LayerID ID `json:"layerId"` } type RemoveLayerPayload struct { - LayerID id.ID `json:"layerId"` + LayerID ID `json:"layerId"` ParentLayer *LayerGroup `json:"parentLayer"` } type RemoveMemberFromTeamInput struct { - TeamID id.ID `json:"teamId"` - UserID id.ID `json:"userId"` + TeamID ID `json:"teamId"` + UserID ID `json:"userId"` } type RemoveMemberFromTeamPayload struct { @@ -972,45 +983,45 @@ type RemoveMyAuthInput struct { } type RemovePropertyFieldInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaGroupID *id.PropertySchemaGroupID `json:"schemaGroupId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` + PropertyID ID `json:"propertyId"` + SchemaGroupID *ID `json:"schemaGroupId"` + ItemID *ID `json:"itemId"` + FieldID ID `json:"fieldId"` } type RemovePropertyItemInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` - ItemID id.ID `json:"itemId"` + PropertyID ID `json:"propertyId"` + SchemaGroupID ID `json:"schemaGroupId"` + ItemID ID `json:"itemId"` } type RemoveTagInput struct { - TagID id.ID `json:"tagID"` + TagID ID `json:"tagID"` } type RemoveTagPayload struct { - TagID id.ID `json:"tagId"` + TagID ID `json:"tagId"` UpdatedLayers []Layer `json:"updatedLayers"` } type RemoveWidgetInput struct { - SceneID id.ID `json:"sceneId"` - WidgetID id.ID `json:"widgetId"` + SceneID ID `json:"sceneId"` + WidgetID ID `json:"widgetId"` } type RemoveWidgetPayload struct { Scene *Scene `json:"scene"` - WidgetID id.ID `json:"widgetId"` + WidgetID ID `json:"widgetId"` } type Scene struct { - ID id.ID `json:"id"` - ProjectID id.ID `json:"projectId"` - TeamID id.ID `json:"teamId"` - PropertyID id.ID `json:"propertyId"` + ID ID `json:"id"` + ProjectID ID `json:"projectId"` + TeamID ID `json:"teamId"` + PropertyID ID `json:"propertyId"` CreatedAt time.Time `json:"createdAt"` UpdatedAt time.Time `json:"updatedAt"` - RootLayerID id.ID `json:"rootLayerId"` + RootLayerID ID `json:"rootLayerId"` Widgets []*SceneWidget `json:"widgets"` Plugins []*ScenePlugin `json:"plugins"` WidgetAlignSystem *WidgetAlignSystem `json:"widgetAlignSystem"` @@ -1019,9 +1030,8 @@ type Scene struct { Team *Team `json:"team"` Property *Property `json:"property"` RootLayer *LayerGroup `json:"rootLayer"` - LockMode SceneLockMode `json:"lockMode"` DatasetSchemas *DatasetSchemaConnection `json:"datasetSchemas"` - TagIds []*id.ID `json:"tagIds"` + TagIds []ID `json:"tagIds"` Tags []Tag `json:"tags"` Clusters []*Cluster `json:"clusters"` } @@ -1029,35 +1039,29 @@ type Scene struct { func (Scene) IsNode() {} type ScenePlugin struct { - PluginID id.PluginID `json:"pluginId"` - PropertyID *id.ID `json:"propertyId"` - Plugin *Plugin `json:"plugin"` - Property *Property `json:"property"` + PluginID ID `json:"pluginId"` + PropertyID *ID `json:"propertyId"` + Plugin *Plugin `json:"plugin"` + Property *Property `json:"property"` } type SceneWidget struct { - ID id.ID `json:"id"` - PluginID id.PluginID `json:"pluginId"` - ExtensionID id.PluginExtensionID `json:"extensionId"` - PropertyID id.ID `json:"propertyId"` - Enabled bool `json:"enabled"` - Extended bool `json:"extended"` - Plugin *Plugin `json:"plugin"` - Extension *PluginExtension `json:"extension"` - Property *Property `json:"property"` -} - -type SearchedUser struct { - UserID id.ID `json:"userId"` - UserName string `json:"userName"` - UserEmail string `json:"userEmail"` + ID ID `json:"id"` + PluginID ID `json:"pluginId"` + ExtensionID ID `json:"extensionId"` + PropertyID ID `json:"propertyId"` + Enabled bool `json:"enabled"` + Extended bool `json:"extended"` + Plugin *Plugin `json:"plugin"` + Extension *PluginExtension `json:"extension"` + Property *Property `json:"property"` } type SignupInput struct { Lang *language.Tag `json:"lang"` Theme *Theme `json:"theme"` - UserID *id.ID `json:"userId"` - TeamID *id.ID `json:"teamId"` + UserID *ID `json:"userId"` + TeamID *ID `json:"teamId"` Secret *string `json:"secret"` } @@ -1067,22 +1071,22 @@ type SignupPayload struct { } type SyncDatasetInput struct { - SceneID id.ID `json:"sceneId"` + SceneID ID `json:"sceneId"` URL string `json:"url"` } type SyncDatasetPayload struct { - SceneID id.ID `json:"sceneId"` + SceneID ID `json:"sceneId"` URL string `json:"url"` DatasetSchema []*DatasetSchema `json:"datasetSchema"` Dataset []*Dataset `json:"dataset"` } type TagGroup struct { - ID id.ID `json:"id"` - SceneID id.ID `json:"sceneId"` + ID ID `json:"id"` + SceneID ID `json:"sceneId"` Label string `json:"label"` - TagIds []*id.ID `json:"tagIds"` + TagIds []ID `json:"tagIds"` Tags []*TagItem `json:"tags"` Scene *Scene `json:"scene"` Layers []Layer `json:"layers"` @@ -1091,13 +1095,13 @@ type TagGroup struct { func (TagGroup) IsTag() {} type TagItem struct { - ID id.ID `json:"id"` - SceneID id.ID `json:"sceneId"` + ID ID `json:"id"` + SceneID ID `json:"sceneId"` Label string `json:"label"` - ParentID *id.ID `json:"parentId"` - LinkedDatasetID *id.ID `json:"linkedDatasetID"` - LinkedDatasetSchemaID *id.ID `json:"linkedDatasetSchemaID"` - LinkedDatasetFieldID *id.ID `json:"linkedDatasetFieldID"` + ParentID *ID `json:"parentId"` + LinkedDatasetID *ID `json:"linkedDatasetID"` + LinkedDatasetSchemaID *ID `json:"linkedDatasetSchemaID"` + LinkedDatasetFieldID *ID `json:"linkedDatasetFieldID"` LinkedDatasetSchema *DatasetSchema `json:"linkedDatasetSchema"` LinkedDataset *Dataset `json:"linkedDataset"` LinkedDatasetField *DatasetField `json:"linkedDatasetField"` @@ -1108,7 +1112,7 @@ type TagItem struct { func (TagItem) IsTag() {} type Team struct { - ID id.ID `json:"id"` + ID ID `json:"id"` Name string `json:"name"` Members []*TeamMember `json:"members"` Personal bool `json:"personal"` @@ -1119,7 +1123,7 @@ type Team struct { func (Team) IsNode() {} type TeamMember struct { - UserID id.ID `json:"userId"` + UserID ID `json:"userId"` Role Role `json:"role"` User *User `json:"user"` } @@ -1136,27 +1140,27 @@ type Typography struct { } type UninstallPluginInput struct { - SceneID id.ID `json:"sceneId"` - PluginID id.PluginID `json:"pluginId"` + SceneID ID `json:"sceneId"` + PluginID ID `json:"pluginId"` } type UninstallPluginPayload struct { - PluginID id.PluginID `json:"pluginId"` - Scene *Scene `json:"scene"` + PluginID ID `json:"pluginId"` + Scene *Scene `json:"scene"` } type UnlinkPropertyValueInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaGroupID *id.PropertySchemaGroupID `json:"schemaGroupId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` + PropertyID ID `json:"propertyId"` + SchemaGroupID *ID `json:"schemaGroupId"` + ItemID *ID `json:"itemId"` + FieldID ID `json:"fieldId"` } type UpdateClusterInput struct { - ClusterID id.ID `json:"clusterId"` - SceneID id.ID `json:"sceneId"` + ClusterID ID `json:"clusterId"` + SceneID ID `json:"sceneId"` Name *string `json:"name"` - PropertyID *id.ID `json:"propertyId"` + PropertyID *ID `json:"propertyId"` } type UpdateClusterPayload struct { @@ -1165,7 +1169,7 @@ type UpdateClusterPayload struct { } type UpdateDatasetSchemaInput struct { - SchemaID id.ID `json:"schemaId"` + SchemaID ID `json:"schemaId"` Name string `json:"name"` } @@ -1174,7 +1178,7 @@ type UpdateDatasetSchemaPayload struct { } type UpdateLayerInput struct { - LayerID id.ID `json:"layerId"` + LayerID ID `json:"layerId"` Name *string `json:"name"` Visible *bool `json:"visible"` } @@ -1193,13 +1197,13 @@ type UpdateMeInput struct { } type UpdateMePayload struct { - User *User `json:"user"` + Me *Me `json:"me"` } type UpdateMemberOfTeamInput struct { - TeamID id.ID `json:"teamId"` - UserID id.ID `json:"userId"` - Role Role `json:"role"` + TeamID ID `json:"teamId"` + UserID ID `json:"userId"` + Role Role `json:"role"` } type UpdateMemberOfTeamPayload struct { @@ -1207,7 +1211,7 @@ type UpdateMemberOfTeamPayload struct { } type UpdateProjectInput struct { - ProjectID id.ID `json:"projectId"` + ProjectID ID `json:"projectId"` Name *string `json:"name"` Description *string `json:"description"` Archived *bool `json:"archived"` @@ -1225,31 +1229,31 @@ type UpdateProjectInput struct { } type UpdatePropertyItemInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaGroupID id.PropertySchemaGroupID `json:"schemaGroupId"` + PropertyID ID `json:"propertyId"` + SchemaGroupID ID `json:"schemaGroupId"` Operations []*UpdatePropertyItemOperationInput `json:"operations"` } type UpdatePropertyItemOperationInput struct { Operation ListOperation `json:"operation"` - ItemID *id.ID `json:"itemId"` + ItemID *ID `json:"itemId"` Index *int `json:"index"` NameFieldValue interface{} `json:"nameFieldValue"` NameFieldType *ValueType `json:"nameFieldType"` } type UpdatePropertyValueInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaGroupID *id.PropertySchemaGroupID `json:"schemaGroupId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` - Value interface{} `json:"value"` - Type ValueType `json:"type"` + PropertyID ID `json:"propertyId"` + SchemaGroupID *ID `json:"schemaGroupId"` + ItemID *ID `json:"itemId"` + FieldID ID `json:"fieldId"` + Value interface{} `json:"value"` + Type ValueType `json:"type"` } type UpdateTagInput struct { - TagID id.ID `json:"tagId"` - SceneID id.ID `json:"sceneId"` + TagID ID `json:"tagId"` + SceneID ID `json:"sceneId"` Label *string `json:"label"` } @@ -1258,7 +1262,7 @@ type UpdateTagPayload struct { } type UpdateTeamInput struct { - TeamID id.ID `json:"teamId"` + TeamID ID `json:"teamId"` Name string `json:"name"` } @@ -1267,7 +1271,7 @@ type UpdateTeamPayload struct { } type UpdateWidgetAlignSystemInput struct { - SceneID id.ID `json:"sceneId"` + SceneID ID `json:"sceneId"` Location *WidgetLocationInput `json:"location"` Align *WidgetAreaAlign `json:"align"` } @@ -1277,8 +1281,8 @@ type UpdateWidgetAlignSystemPayload struct { } type UpdateWidgetInput struct { - SceneID id.ID `json:"sceneId"` - WidgetID id.ID `json:"widgetId"` + SceneID ID `json:"sceneId"` + WidgetID ID `json:"widgetId"` Enabled *bool `json:"enabled"` Location *WidgetLocationInput `json:"location"` Extended *bool `json:"extended"` @@ -1291,9 +1295,9 @@ type UpdateWidgetPayload struct { } type UpgradePluginInput struct { - SceneID id.ID `json:"sceneId"` - PluginID id.PluginID `json:"pluginId"` - ToPluginID id.PluginID `json:"toPluginId"` + SceneID ID `json:"sceneId"` + PluginID ID `json:"pluginId"` + ToPluginID ID `json:"toPluginId"` } type UpgradePluginPayload struct { @@ -1302,15 +1306,15 @@ type UpgradePluginPayload struct { } type UploadFileToPropertyInput struct { - PropertyID id.ID `json:"propertyId"` - SchemaGroupID *id.PropertySchemaGroupID `json:"schemaGroupId"` - ItemID *id.ID `json:"itemId"` - FieldID id.PropertySchemaFieldID `json:"fieldId"` - File graphql.Upload `json:"file"` + PropertyID ID `json:"propertyId"` + SchemaGroupID *ID `json:"schemaGroupId"` + ItemID *ID `json:"itemId"` + FieldID ID `json:"fieldId"` + File graphql.Upload `json:"file"` } type UploadPluginInput struct { - SceneID id.ID `json:"sceneId"` + SceneID ID `json:"sceneId"` File *graphql.Upload `json:"file"` URL *url.URL `json:"url"` } @@ -1322,15 +1326,9 @@ type UploadPluginPayload struct { } type User struct { - ID id.ID `json:"id"` - Name string `json:"name"` - Email string `json:"email"` - Lang language.Tag `json:"lang"` - Theme Theme `json:"theme"` - MyTeamID id.ID `json:"myTeamId"` - Auths []string `json:"auths"` - Teams []*Team `json:"teams"` - MyTeam *Team `json:"myTeam"` + ID ID `json:"id"` + Name string `json:"name"` + Email string `json:"email"` } func (User) IsNode() {} @@ -1341,7 +1339,7 @@ type WidgetAlignSystem struct { } type WidgetArea struct { - WidgetIds []*id.ID `json:"widgetIds"` + WidgetIds []ID `json:"widgetIds"` Align WidgetAreaAlign `json:"align"` } @@ -1763,53 +1761,6 @@ func (e Role) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } -type SceneLockMode string - -const ( - SceneLockModeFree SceneLockMode = "FREE" - SceneLockModePending SceneLockMode = "PENDING" - SceneLockModeDatasetSyncing SceneLockMode = "DATASET_SYNCING" - SceneLockModePluginUpgrading SceneLockMode = "PLUGIN_UPGRADING" - SceneLockModePublishing SceneLockMode = "PUBLISHING" -) - -var AllSceneLockMode = []SceneLockMode{ - SceneLockModeFree, - SceneLockModePending, - SceneLockModeDatasetSyncing, - SceneLockModePluginUpgrading, - SceneLockModePublishing, -} - -func (e SceneLockMode) IsValid() bool { - switch e { - case SceneLockModeFree, SceneLockModePending, SceneLockModeDatasetSyncing, SceneLockModePluginUpgrading, SceneLockModePublishing: - return true - } - return false -} - -func (e SceneLockMode) String() string { - return string(e) -} - -func (e *SceneLockMode) UnmarshalGQL(v interface{}) error { - str, ok := v.(string) - if !ok { - return fmt.Errorf("enums must be strings") - } - - *e = SceneLockMode(str) - if !e.IsValid() { - return fmt.Errorf("%s is not a valid SceneLockMode", str) - } - return nil -} - -func (e SceneLockMode) MarshalGQL(w io.Writer) { - fmt.Fprint(w, strconv.Quote(e.String())) -} - type TextAlign string const ( diff --git a/internal/adapter/gql/gqlmodel/scalar.go b/internal/adapter/gql/gqlmodel/scalar.go index 143666b3b..7968d1e22 100644 --- a/internal/adapter/gql/gqlmodel/scalar.go +++ b/internal/adapter/gql/gqlmodel/scalar.go @@ -10,7 +10,6 @@ import ( "github.com/99designs/gqlgen/graphql" "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" "golang.org/x/text/language" ) @@ -51,19 +50,6 @@ func UnmarshalLang(v interface{}) (language.Tag, error) { return language.Tag{}, errors.New("invalid lang") } -func MarshalID(t id.ID) graphql.Marshaler { - return graphql.WriterFunc(func(w io.Writer) { - _, _ = io.WriteString(w, strconv.Quote(t.String())) - }) -} - -func UnmarshalID(v interface{}) (id.ID, error) { - if tmpStr, ok := v.(string); ok { - return id.NewIDWith(tmpStr) - } - return id.ID{}, errors.New("invalid ID") -} - func MarshalCursor(t usecase.Cursor) graphql.Marshaler { return graphql.WriterFunc(func(w io.Writer) { _, _ = io.WriteString(w, strconv.Quote(string(t))) @@ -77,84 +63,6 @@ func UnmarshalCursor(v interface{}) (usecase.Cursor, error) { return usecase.Cursor(""), errors.New("invalid cursor") } -func MarshalPluginID(t id.PluginID) graphql.Marshaler { - return graphql.WriterFunc(func(w io.Writer) { - _, _ = io.WriteString(w, strconv.Quote(t.String())) - }) -} - -func UnmarshalPluginID(v interface{}) (id.PluginID, error) { - if tmpStr, ok := v.(string); ok { - return id.PluginIDFrom(tmpStr) - } - return id.PluginID{}, errors.New("invalid ID") -} - -func MarshalPluginExtensionID(t id.PluginExtensionID) graphql.Marshaler { - return graphql.WriterFunc(func(w io.Writer) { - _, _ = io.WriteString(w, strconv.Quote(t.String())) - }) -} - -func UnmarshalPluginExtensionID(v interface{}) (id.PluginExtensionID, error) { - if tmpStr, ok := v.(string); ok { - return id.PluginExtensionID(tmpStr), nil - } - return id.PluginExtensionID(""), errors.New("invalid ID") -} - -func MarshalPropertySchemaID(t id.PropertySchemaID) graphql.Marshaler { - return graphql.WriterFunc(func(w io.Writer) { - _, _ = io.WriteString(w, strconv.Quote(t.String())) - }) -} - -func UnmarshalPropertySchemaID(v interface{}) (id.PropertySchemaID, error) { - if tmpStr, ok := v.(string); ok { - return id.PropertySchemaIDFrom(tmpStr) - } - return id.PropertySchemaID{}, errors.New("invalid ID") -} - -func MarshalPropertySchemaGroupID(t id.PropertySchemaGroupID) graphql.Marshaler { - return graphql.WriterFunc(func(w io.Writer) { - _, _ = io.WriteString(w, strconv.Quote(t.String())) - }) -} - -func UnmarshalPropertySchemaGroupID(v interface{}) (id.PropertySchemaGroupID, error) { - if tmpStr, ok := v.(string); ok { - return id.PropertySchemaGroupID(tmpStr), nil - } - return id.PropertySchemaGroupID(""), errors.New("invalid ID") -} - -func MarshalPropertySchemaFieldID(t id.PropertySchemaFieldID) graphql.Marshaler { - return graphql.WriterFunc(func(w io.Writer) { - _, _ = io.WriteString(w, strconv.Quote(t.String())) - }) -} - -func UnmarshalPropertySchemaFieldID(v interface{}) (id.PropertySchemaFieldID, error) { - if tmpStr, ok := v.(string); ok { - return id.PropertySchemaFieldID(tmpStr), nil - } - return id.PropertySchemaFieldID(""), errors.New("invalid ID") -} - -func MarshalDatasetSchemaFieldID(t id.DatasetSchemaFieldID) graphql.Marshaler { - return graphql.WriterFunc(func(w io.Writer) { - _, _ = io.WriteString(w, strconv.Quote(t.String())) - }) -} - -func UnmarshalDatasetSchemaFieldID(v interface{}) (id.DatasetSchemaFieldID, error) { - if tmpStr, ok := v.(string); ok { - return id.DatasetSchemaFieldIDFrom(tmpStr) - } - return id.NewDatasetSchemaFieldID(), errors.New("invalid ID") -} - func MarshalMap(val map[string]string) graphql.Marshaler { return graphql.WriterFunc(func(w io.Writer) { _ = json.NewEncoder(w).Encode(val) diff --git a/internal/adapter/gql/gqlmodel/scalar_id.go b/internal/adapter/gql/gqlmodel/scalar_id.go new file mode 100644 index 000000000..80cf8d422 --- /dev/null +++ b/internal/adapter/gql/gqlmodel/scalar_id.go @@ -0,0 +1,174 @@ +package gqlmodel + +import ( + "errors" + "io" + "strconv" + + "github.com/99designs/gqlgen/graphql" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/id/idx" +) + +type ID string + +func MarshalPropertyFieldID(t id.PropertyFieldID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPropertyFieldID(v interface{}) (id.PropertyFieldID, error) { + if tmpStr, ok := v.(string); ok { + return id.PropertyFieldID(tmpStr), nil + } + return id.PropertyFieldID(""), errors.New("invalid ID") +} + +func MarshalDatasetFieldID(t id.DatasetFieldID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalDatasetFieldID(v interface{}) (id.DatasetFieldID, error) { + if tmpStr, ok := v.(string); ok { + return id.DatasetFieldIDFrom(tmpStr) + } + return id.NewDatasetFieldID(), errors.New("invalid ID") +} + +func MarshalPluginID(t id.PluginID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPluginID(v interface{}) (id.PluginID, error) { + if tmpStr, ok := v.(string); ok { + return id.PluginIDFrom(tmpStr) + } + return id.PluginID{}, errors.New("invalid ID") +} + +func MarshalPluginExtensionID(t id.PluginExtensionID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPluginExtensionID(v interface{}) (id.PluginExtensionID, error) { + if tmpStr, ok := v.(string); ok { + return id.PluginExtensionID(tmpStr), nil + } + return id.PluginExtensionID(""), errors.New("invalid ID") +} + +func MarshalPropertySchemaID(t id.PropertySchemaID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPropertySchemaID(v interface{}) (id.PropertySchemaID, error) { + if tmpStr, ok := v.(string); ok { + return id.PropertySchemaIDFrom(tmpStr) + } + return id.PropertySchemaID{}, errors.New("invalid ID") +} + +func MarshalPropertySchemaGroupID(t id.PropertySchemaGroupID) graphql.Marshaler { + return graphql.WriterFunc(func(w io.Writer) { + _, _ = io.WriteString(w, strconv.Quote(t.String())) + }) +} + +func UnmarshalPropertySchemaGroupID(v interface{}) (id.PropertySchemaGroupID, error) { + if tmpStr, ok := v.(string); ok { + return id.PropertySchemaGroupID(tmpStr), nil + } + return id.PropertySchemaGroupID(""), errors.New("invalid ID") +} + +func IDFrom[T idx.Type](i idx.ID[T]) ID { + return ID(i.String()) +} + +func IDFromRef[T idx.Type](i *idx.ID[T]) *ID { + return (*ID)(i.StringRef()) +} + +func IDFromStringRef[T idx.Type](i *idx.StringID[T]) *ID { + return (*ID)(i) +} + +func IDFromPluginID(i id.PluginID) ID { + return ID(i.String()) +} + +func IDFromPluginIDRef(i *id.PluginID) *ID { + return (*ID)(i.StringRef()) +} + +func IDFromPropertySchemaID(i id.PropertySchemaID) ID { + return ID(i.String()) +} + +func IDFromPropertySchemaIDRef(i *id.PropertySchemaID) *ID { + return (*ID)(i.StringRef()) +} + +func ToID[A idx.Type](a ID) (idx.ID[A], error) { + return idx.From[A](string(a)) +} + +func ToID2[A, B idx.Type](a, b ID) (ai idx.ID[A], bi idx.ID[B], err error) { + ai, err = ToID[A](a) + if err != nil { + return + } + bi, err = ToID[B](b) + return +} + +func ToID3[A, B, C idx.Type](a, b, c ID) (ai idx.ID[A], bi idx.ID[B], ci idx.ID[C], err error) { + ai, bi, err = ToID2[A, B](a, b) + if err != nil { + return + } + ci, err = ToID[C](c) + return +} + +func ToIDRef[A idx.Type](a *ID) *idx.ID[A] { + return idx.FromRef[A]((*string)(a)) +} + +func ToStringIDRef[T idx.Type](a *ID) *idx.StringID[T] { + return idx.StringIDFromRef[T]((*string)(a)) +} + +func ToPropertySchemaID(a ID) (id.PropertySchemaID, error) { + return id.PropertySchemaIDFrom((string)(a)) +} + +func ToPluginID(a ID) (id.PluginID, error) { + return id.PluginIDFrom((string)(a)) +} + +func ToPluginID2(a, b ID) (ai id.PluginID, bi id.PluginID, err error) { + ai, err = id.PluginIDFrom((string)(a)) + if err != nil { + return + } + bi, err = ToPluginID(b) + return ai, bi, err +} + +func ToPropertySchemaIDRef(a *ID) *id.PropertySchemaID { + return id.PropertySchemaIDFromRef((*string)(a)) +} + +func ToPluginIDRef(a *ID) *id.PluginID { + return id.PluginIDFromRef((*string)(a)) +} diff --git a/internal/adapter/gql/loader_asset.go b/internal/adapter/gql/loader_asset.go index fdf9159e8..03f7fc563 100644 --- a/internal/adapter/gql/loader_asset.go +++ b/internal/adapter/gql/loader_asset.go @@ -9,6 +9,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/asset" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" ) type AssetLoader struct { @@ -19,24 +20,27 @@ func NewAssetLoader(usecase interfaces.Asset) *AssetLoader { return &AssetLoader{usecase: usecase} } -func (c *AssetLoader) Fetch(ctx context.Context, ids []id.AssetID) ([]*gqlmodel.Asset, []error) { - res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) +func (c *AssetLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Asset, []error) { + ids2, err := util.TryMap(ids, gqlmodel.ToID[id.Asset]) if err != nil { return nil, []error{err} } - assets := make([]*gqlmodel.Asset, 0, len(res)) - for _, a := range res { - assets = append(assets, gqlmodel.ToAsset(a)) + res, err := c.usecase.Fetch(ctx, ids2, getOperator(ctx)) + if err != nil { + return nil, []error{err} } - return assets, nil + return util.Map(res, gqlmodel.ToAsset), nil } -func (c *AssetLoader) FindByTeam(ctx context.Context, teamID id.ID, keyword *string, sort *asset.SortType, pagination *gqlmodel.Pagination) (*gqlmodel.AssetConnection, error) { - p := gqlmodel.ToPagination(pagination) +func (c *AssetLoader) FindByTeam(ctx context.Context, teamID gqlmodel.ID, keyword *string, sort *asset.SortType, pagination *gqlmodel.Pagination) (*gqlmodel.AssetConnection, error) { + tid, err := gqlmodel.ToID[id.Team](teamID) + if err != nil { + return nil, err + } - assets, pi, err := c.usecase.FindByTeam(ctx, id.TeamID(teamID), keyword, sort, p, getOperator(ctx)) + assets, pi, err := c.usecase.FindByTeam(ctx, tid, keyword, sort, gqlmodel.ToPagination(pagination), getOperator(ctx)) if err != nil { return nil, err } @@ -47,7 +51,7 @@ func (c *AssetLoader) FindByTeam(ctx context.Context, teamID id.ID, keyword *str asset := gqlmodel.ToAsset(a) edges = append(edges, &gqlmodel.AssetEdge{ Node: asset, - Cursor: usecase.Cursor(asset.ID.String()), + Cursor: usecase.Cursor(asset.ID), }) nodes = append(nodes, asset) } @@ -63,15 +67,15 @@ func (c *AssetLoader) FindByTeam(ctx context.Context, teamID id.ID, keyword *str // data loader type AssetDataLoader interface { - Load(id.AssetID) (*gqlmodel.Asset, error) - LoadAll([]id.AssetID) ([]*gqlmodel.Asset, []error) + Load(gqlmodel.ID) (*gqlmodel.Asset, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Asset, []error) } func (c *AssetLoader) DataLoader(ctx context.Context) AssetDataLoader { return gqldataloader.NewAssetLoader(gqldataloader.AssetLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.AssetID) ([]*gqlmodel.Asset, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Asset, []error) { return c.Fetch(ctx, keys) }, }) @@ -86,8 +90,8 @@ type ordinaryAssetLoader struct { c *AssetLoader } -func (l *ordinaryAssetLoader) Load(key id.AssetID) (*gqlmodel.Asset, error) { - res, errs := l.c.Fetch(l.ctx, []id.AssetID{key}) +func (l *ordinaryAssetLoader) Load(key gqlmodel.ID) (*gqlmodel.Asset, error) { + res, errs := l.c.Fetch(l.ctx, []gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -97,6 +101,6 @@ func (l *ordinaryAssetLoader) Load(key id.AssetID) (*gqlmodel.Asset, error) { return nil, nil } -func (l *ordinaryAssetLoader) LoadAll(keys []id.AssetID) ([]*gqlmodel.Asset, []error) { +func (l *ordinaryAssetLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Asset, []error) { return l.c.Fetch(l.ctx, keys) } diff --git a/internal/adapter/gql/loader_dataset.go b/internal/adapter/gql/loader_dataset.go index 0c7f0c2e4..ca476e7c7 100644 --- a/internal/adapter/gql/loader_dataset.go +++ b/internal/adapter/gql/loader_dataset.go @@ -8,6 +8,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" ) type DatasetLoader struct { @@ -18,8 +19,13 @@ func NewDatasetLoader(usecase interfaces.Dataset) *DatasetLoader { return &DatasetLoader{usecase: usecase} } -func (c *DatasetLoader) Fetch(ctx context.Context, ids []id.DatasetID) ([]*gqlmodel.Dataset, []error) { - res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) +func (c *DatasetLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Dataset, []error) { + datasetids, err := util.TryMap(ids, gqlmodel.ToID[id.Dataset]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, datasetids, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -32,8 +38,13 @@ func (c *DatasetLoader) Fetch(ctx context.Context, ids []id.DatasetID) ([]*gqlmo return datasets, nil } -func (c *DatasetLoader) FetchSchema(ctx context.Context, ids []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) { - res, err := c.usecase.FetchSchema(ctx, ids, getOperator(ctx)) +func (c *DatasetLoader) FetchSchema(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) { + schemaids, err := util.TryMap(ids, gqlmodel.ToID[id.DatasetSchema]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.FetchSchema(ctx, schemaids, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -46,8 +57,13 @@ func (c *DatasetLoader) FetchSchema(ctx context.Context, ids []id.DatasetSchemaI return schemas, nil } -func (c *DatasetLoader) GraphFetch(ctx context.Context, i id.DatasetID, depth int) ([]*gqlmodel.Dataset, []error) { - res, err := c.usecase.GraphFetch(ctx, i, depth, getOperator(ctx)) +func (c *DatasetLoader) GraphFetch(ctx context.Context, i gqlmodel.ID, depth int) ([]*gqlmodel.Dataset, []error) { + did, err := gqlmodel.ToID[id.Dataset](i) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.GraphFetch(ctx, did, depth, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -60,8 +76,13 @@ func (c *DatasetLoader) GraphFetch(ctx context.Context, i id.DatasetID, depth in return datasets, nil } -func (c *DatasetLoader) GraphFetchSchema(ctx context.Context, i id.ID, depth int) ([]*gqlmodel.DatasetSchema, []error) { - res, err := c.usecase.GraphFetchSchema(ctx, id.DatasetSchemaID(i), depth, getOperator(ctx)) +func (c *DatasetLoader) GraphFetchSchema(ctx context.Context, i gqlmodel.ID, depth int) ([]*gqlmodel.DatasetSchema, []error) { + did, err := gqlmodel.ToID[id.DatasetSchema](i) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.GraphFetchSchema(ctx, did, depth, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -74,8 +95,13 @@ func (c *DatasetLoader) GraphFetchSchema(ctx context.Context, i id.ID, depth int return schemas, nil } -func (c *DatasetLoader) FindSchemaByScene(ctx context.Context, i id.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) { - res, pi, err := c.usecase.FindSchemaByScene(ctx, id.SceneID(i), usecase.NewPagination(first, last, before, after), getOperator(ctx)) +func (c *DatasetLoader) FindSchemaByScene(ctx context.Context, i gqlmodel.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) { + sid, err := gqlmodel.ToID[id.Scene](i) + if err != nil { + return nil, err + } + + res, pi, err := c.usecase.FindSchemaByScene(ctx, sid, usecase.NewPagination(first, last, before, after), getOperator(ctx)) if err != nil { return nil, err } @@ -86,7 +112,7 @@ func (c *DatasetLoader) FindSchemaByScene(ctx context.Context, i id.ID, first *i ds := gqlmodel.ToDatasetSchema(dataset) edges = append(edges, &gqlmodel.DatasetSchemaEdge{ Node: ds, - Cursor: usecase.Cursor(ds.ID.String()), + Cursor: usecase.Cursor(ds.ID), }) nodes = append(nodes, ds) } @@ -99,8 +125,13 @@ func (c *DatasetLoader) FindSchemaByScene(ctx context.Context, i id.ID, first *i }, nil } -func (c *DatasetLoader) FindDynamicSchemasByScene(ctx context.Context, sid id.ID) ([]*gqlmodel.DatasetSchema, error) { - res, err := c.usecase.FindDynamicSchemaByScene(ctx, id.SceneID(sid)) +func (c *DatasetLoader) FindDynamicSchemasByScene(ctx context.Context, sid gqlmodel.ID) ([]*gqlmodel.DatasetSchema, error) { + sceneid, err := gqlmodel.ToID[id.Scene](sid) + if err != nil { + return nil, err + } + + res, err := c.usecase.FindDynamicSchemaByScene(ctx, sceneid) if err != nil { return nil, err } @@ -113,9 +144,14 @@ func (c *DatasetLoader) FindDynamicSchemasByScene(ctx context.Context, sid id.ID return dss, nil } -func (c *DatasetLoader) FindBySchema(ctx context.Context, dsid id.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.DatasetConnection, error) { +func (c *DatasetLoader) FindBySchema(ctx context.Context, dsid gqlmodel.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.DatasetConnection, error) { + schemaid, err := gqlmodel.ToID[id.DatasetSchema](dsid) + if err != nil { + return nil, err + } + p := usecase.NewPagination(first, last, before, after) - res, pi, err2 := c.usecase.FindBySchema(ctx, id.DatasetSchemaID(dsid), p, getOperator(ctx)) + res, pi, err2 := c.usecase.FindBySchema(ctx, schemaid, p, getOperator(ctx)) if err2 != nil { return nil, err2 } @@ -126,7 +162,7 @@ func (c *DatasetLoader) FindBySchema(ctx context.Context, dsid id.ID, first *int ds := gqlmodel.ToDataset(dataset) edges = append(edges, &gqlmodel.DatasetEdge{ Node: ds, - Cursor: usecase.Cursor(ds.ID.String()), + Cursor: usecase.Cursor(ds.ID), }) nodes = append(nodes, ds) } @@ -144,15 +180,15 @@ func (c *DatasetLoader) FindBySchema(ctx context.Context, dsid id.ID, first *int // data loader type DatasetDataLoader interface { - Load(id.DatasetID) (*gqlmodel.Dataset, error) - LoadAll([]id.DatasetID) ([]*gqlmodel.Dataset, []error) + Load(gqlmodel.ID) (*gqlmodel.Dataset, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Dataset, []error) } func (c *DatasetLoader) DataLoader(ctx context.Context) DatasetDataLoader { return gqldataloader.NewDatasetLoader(gqldataloader.DatasetLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.DatasetID) ([]*gqlmodel.Dataset, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Dataset, []error) { return c.Fetch(ctx, keys) }, }) @@ -167,8 +203,8 @@ type ordinaryDatasetLoader struct { c *DatasetLoader } -func (l *ordinaryDatasetLoader) Load(key id.DatasetID) (*gqlmodel.Dataset, error) { - res, errs := l.c.Fetch(l.ctx, []id.DatasetID{key}) +func (l *ordinaryDatasetLoader) Load(key gqlmodel.ID) (*gqlmodel.Dataset, error) { + res, errs := l.c.Fetch(l.ctx, []gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -178,20 +214,20 @@ func (l *ordinaryDatasetLoader) Load(key id.DatasetID) (*gqlmodel.Dataset, error return nil, nil } -func (l *ordinaryDatasetLoader) LoadAll(keys []id.DatasetID) ([]*gqlmodel.Dataset, []error) { +func (l *ordinaryDatasetLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Dataset, []error) { return l.c.Fetch(l.ctx, keys) } type DatasetSchemaDataLoader interface { - Load(id.DatasetSchemaID) (*gqlmodel.DatasetSchema, error) - LoadAll([]id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) + Load(gqlmodel.ID) (*gqlmodel.DatasetSchema, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) } func (c *DatasetLoader) SchemaDataLoader(ctx context.Context) DatasetSchemaDataLoader { return gqldataloader.NewDatasetSchemaLoader(gqldataloader.DatasetSchemaLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) { return c.FetchSchema(ctx, keys) }, }) @@ -199,18 +235,18 @@ func (c *DatasetLoader) SchemaDataLoader(ctx context.Context) DatasetSchemaDataL func (c *DatasetLoader) SchemaOrdinaryDataLoader(ctx context.Context) DatasetSchemaDataLoader { return &ordinaryDatasetSchemaLoader{ - fetch: func(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) { return c.FetchSchema(ctx, keys) }, } } type ordinaryDatasetSchemaLoader struct { - fetch func(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) } -func (l *ordinaryDatasetSchemaLoader) Load(key id.DatasetSchemaID) (*gqlmodel.DatasetSchema, error) { - res, errs := l.fetch([]id.DatasetSchemaID{key}) +func (l *ordinaryDatasetSchemaLoader) Load(key gqlmodel.ID) (*gqlmodel.DatasetSchema, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -220,6 +256,6 @@ func (l *ordinaryDatasetSchemaLoader) Load(key id.DatasetSchemaID) (*gqlmodel.Da return nil, nil } -func (l *ordinaryDatasetSchemaLoader) LoadAll(keys []id.DatasetSchemaID) ([]*gqlmodel.DatasetSchema, []error) { +func (l *ordinaryDatasetSchemaLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.DatasetSchema, []error) { return l.fetch(keys) } diff --git a/internal/adapter/gql/loader_layer.go b/internal/adapter/gql/loader_layer.go index ea9822024..b5f7293af 100644 --- a/internal/adapter/gql/loader_layer.go +++ b/internal/adapter/gql/loader_layer.go @@ -7,6 +7,7 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" ) type LayerLoader struct { @@ -17,8 +18,13 @@ func NewLayerLoader(usecase interfaces.Layer) *LayerLoader { return &LayerLoader{usecase: usecase} } -func (c *LayerLoader) Fetch(ctx context.Context, ids []id.LayerID) ([]*gqlmodel.Layer, []error) { - res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) +func (c *LayerLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Layer, []error) { + layerids, err := util.TryMap(ids, gqlmodel.ToID[id.Layer]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, layerids, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -36,8 +42,13 @@ func (c *LayerLoader) Fetch(ctx context.Context, ids []id.LayerID) ([]*gqlmodel. return layers, nil } -func (c *LayerLoader) FetchGroup(ctx context.Context, ids []id.LayerID) ([]*gqlmodel.LayerGroup, []error) { - res, err := c.usecase.FetchGroup(ctx, ids, getOperator(ctx)) +func (c *LayerLoader) FetchGroup(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) { + layerids, err := util.TryMap(ids, gqlmodel.ToID[id.Layer]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.FetchGroup(ctx, layerids, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -50,8 +61,13 @@ func (c *LayerLoader) FetchGroup(ctx context.Context, ids []id.LayerID) ([]*gqlm return layerGroups, nil } -func (c *LayerLoader) FetchItem(ctx context.Context, ids []id.LayerID) ([]*gqlmodel.LayerItem, []error) { - res, err := c.usecase.FetchItem(ctx, ids, getOperator(ctx)) +func (c *LayerLoader) FetchItem(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) { + layerids, err := util.TryMap(ids, gqlmodel.ToID[id.Layer]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.FetchItem(ctx, layerids, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -64,8 +80,13 @@ func (c *LayerLoader) FetchItem(ctx context.Context, ids []id.LayerID) ([]*gqlmo return layerItems, nil } -func (c *LayerLoader) FetchParent(ctx context.Context, lid id.LayerID) (*gqlmodel.LayerGroup, error) { - res, err := c.usecase.FetchParent(ctx, id.LayerID(lid), getOperator(ctx)) +func (c *LayerLoader) FetchParent(ctx context.Context, lid gqlmodel.ID) (*gqlmodel.LayerGroup, error) { + layerid, err := gqlmodel.ToID[id.Layer](lid) + if err != nil { + return nil, err + } + + res, err := c.usecase.FetchParent(ctx, layerid, getOperator(ctx)) if err != nil { return nil, err } @@ -73,8 +94,13 @@ func (c *LayerLoader) FetchParent(ctx context.Context, lid id.LayerID) (*gqlmode return gqlmodel.ToLayerGroup(res, nil), nil } -func (c *LayerLoader) FetchByProperty(ctx context.Context, pid id.PropertyID) (gqlmodel.Layer, error) { - res, err := c.usecase.FetchByProperty(ctx, pid, getOperator(ctx)) +func (c *LayerLoader) FetchByProperty(ctx context.Context, pid gqlmodel.ID) (gqlmodel.Layer, error) { + propertyid, err := gqlmodel.ToID[id.Property](pid) + if err != nil { + return nil, err + } + + res, err := c.usecase.FetchByProperty(ctx, propertyid, getOperator(ctx)) if err != nil { return nil, err } @@ -82,8 +108,13 @@ func (c *LayerLoader) FetchByProperty(ctx context.Context, pid id.PropertyID) (g return gqlmodel.ToLayer(res, nil), nil } -func (c *LayerLoader) FetchMerged(ctx context.Context, org id.LayerID, parent *id.LayerID) (*gqlmodel.MergedLayer, error) { - res, err2 := c.usecase.FetchMerged(ctx, org, parent, getOperator(ctx)) +func (c *LayerLoader) FetchMerged(ctx context.Context, org gqlmodel.ID, parent *gqlmodel.ID) (*gqlmodel.MergedLayer, error) { + orgid, err := gqlmodel.ToID[id.Layer](org) + if err != nil { + return nil, err + } + + res, err2 := c.usecase.FetchMerged(ctx, orgid, gqlmodel.ToIDRef[id.Layer](parent), getOperator(ctx)) if err2 != nil { return nil, err2 } @@ -91,8 +122,13 @@ func (c *LayerLoader) FetchMerged(ctx context.Context, org id.LayerID, parent *i return gqlmodel.ToMergedLayer(res), nil } -func (c *LayerLoader) FetchParentAndMerged(ctx context.Context, org id.LayerID) (*gqlmodel.MergedLayer, error) { - res, err2 := c.usecase.FetchParentAndMerged(ctx, org, getOperator(ctx)) +func (c *LayerLoader) FetchParentAndMerged(ctx context.Context, org gqlmodel.ID) (*gqlmodel.MergedLayer, error) { + orgid, err := gqlmodel.ToID[id.Layer](org) + if err != nil { + return nil, err + } + + res, err2 := c.usecase.FetchParentAndMerged(ctx, orgid, getOperator(ctx)) if err2 != nil { return nil, err2 } @@ -100,8 +136,13 @@ func (c *LayerLoader) FetchParentAndMerged(ctx context.Context, org id.LayerID) return gqlmodel.ToMergedLayer(res), nil } -func (c *LayerLoader) FetchByTag(ctx context.Context, tag id.TagID) ([]gqlmodel.Layer, error) { - res, err2 := c.usecase.FetchByTag(ctx, tag, getOperator(ctx)) +func (c *LayerLoader) FetchByTag(ctx context.Context, tag gqlmodel.ID) ([]gqlmodel.Layer, error) { + tagid, err := gqlmodel.ToID[id.Tag](tag) + if err != nil { + return nil, err + } + + res, err2 := c.usecase.FetchByTag(ctx, tagid, getOperator(ctx)) if err2 != nil { return nil, err2 } @@ -121,15 +162,15 @@ func (c *LayerLoader) FetchByTag(ctx context.Context, tag id.TagID) ([]gqlmodel. // data loader type LayerDataLoader interface { - Load(id.LayerID) (*gqlmodel.Layer, error) - LoadAll([]id.LayerID) ([]*gqlmodel.Layer, []error) + Load(gqlmodel.ID) (*gqlmodel.Layer, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Layer, []error) } func (c *LayerLoader) DataLoader(ctx context.Context) LayerDataLoader { return gqldataloader.NewLayerLoader(gqldataloader.LayerLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.LayerID) ([]*gqlmodel.Layer, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) { return c.Fetch(ctx, keys) }, }) @@ -137,18 +178,18 @@ func (c *LayerLoader) DataLoader(ctx context.Context) LayerDataLoader { func (c *LayerLoader) OrdinaryDataLoader(ctx context.Context) LayerDataLoader { return &ordinaryLayerLoader{ - fetch: func(keys []id.LayerID) ([]*gqlmodel.Layer, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) { return c.Fetch(ctx, keys) }, } } type ordinaryLayerLoader struct { - fetch func(keys []id.LayerID) ([]*gqlmodel.Layer, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) } -func (l *ordinaryLayerLoader) Load(key id.LayerID) (*gqlmodel.Layer, error) { - res, errs := l.fetch([]id.LayerID{key}) +func (l *ordinaryLayerLoader) Load(key gqlmodel.ID) (*gqlmodel.Layer, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -158,20 +199,20 @@ func (l *ordinaryLayerLoader) Load(key id.LayerID) (*gqlmodel.Layer, error) { return nil, nil } -func (l *ordinaryLayerLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.Layer, []error) { +func (l *ordinaryLayerLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Layer, []error) { return l.fetch(keys) } type LayerItemDataLoader interface { - Load(id.LayerID) (*gqlmodel.LayerItem, error) - LoadAll([]id.LayerID) ([]*gqlmodel.LayerItem, []error) + Load(gqlmodel.ID) (*gqlmodel.LayerItem, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) } func (c *LayerLoader) ItemDataLoader(ctx context.Context) LayerItemDataLoader { return gqldataloader.NewLayerItemLoader(gqldataloader.LayerItemLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) { return c.FetchItem(ctx, keys) }, }) @@ -179,18 +220,18 @@ func (c *LayerLoader) ItemDataLoader(ctx context.Context) LayerItemDataLoader { func (c *LayerLoader) ItemOrdinaryDataLoader(ctx context.Context) LayerItemDataLoader { return &ordinaryLayerItemLoader{ - fetch: func(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) { return c.FetchItem(ctx, keys) }, } } type ordinaryLayerItemLoader struct { - fetch func(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) } -func (l *ordinaryLayerItemLoader) Load(key id.LayerID) (*gqlmodel.LayerItem, error) { - res, errs := l.fetch([]id.LayerID{key}) +func (l *ordinaryLayerItemLoader) Load(key gqlmodel.ID) (*gqlmodel.LayerItem, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -200,20 +241,20 @@ func (l *ordinaryLayerItemLoader) Load(key id.LayerID) (*gqlmodel.LayerItem, err return nil, nil } -func (l *ordinaryLayerItemLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.LayerItem, []error) { +func (l *ordinaryLayerItemLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.LayerItem, []error) { return l.fetch(keys) } type LayerGroupDataLoader interface { - Load(id.LayerID) (*gqlmodel.LayerGroup, error) - LoadAll([]id.LayerID) ([]*gqlmodel.LayerGroup, []error) + Load(gqlmodel.ID) (*gqlmodel.LayerGroup, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) } func (c *LayerLoader) GroupDataLoader(ctx context.Context) LayerGroupDataLoader { return gqldataloader.NewLayerGroupLoader(gqldataloader.LayerGroupLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) { return c.FetchGroup(ctx, keys) }, }) @@ -221,18 +262,18 @@ func (c *LayerLoader) GroupDataLoader(ctx context.Context) LayerGroupDataLoader func (c *LayerLoader) GroupOrdinaryDataLoader(ctx context.Context) LayerGroupDataLoader { return &ordinaryLayerGroupLoader{ - fetch: func(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) { return c.FetchGroup(ctx, keys) }, } } type ordinaryLayerGroupLoader struct { - fetch func(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) } -func (l *ordinaryLayerGroupLoader) Load(key id.LayerID) (*gqlmodel.LayerGroup, error) { - res, errs := l.fetch([]id.LayerID{key}) +func (l *ordinaryLayerGroupLoader) Load(key gqlmodel.ID) (*gqlmodel.LayerGroup, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -242,6 +283,6 @@ func (l *ordinaryLayerGroupLoader) Load(key id.LayerID) (*gqlmodel.LayerGroup, e return nil, nil } -func (l *ordinaryLayerGroupLoader) LoadAll(keys []id.LayerID) ([]*gqlmodel.LayerGroup, []error) { +func (l *ordinaryLayerGroupLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.LayerGroup, []error) { return l.fetch(keys) } diff --git a/internal/adapter/gql/loader_plugin.go b/internal/adapter/gql/loader_plugin.go index cc52dd29d..d103ddd0a 100644 --- a/internal/adapter/gql/loader_plugin.go +++ b/internal/adapter/gql/loader_plugin.go @@ -6,7 +6,7 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqldataloader" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase/interfaces" - "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" ) type PluginLoader struct { @@ -17,8 +17,13 @@ func NewPluginLoader(usecase interfaces.Plugin) *PluginLoader { return &PluginLoader{usecase: usecase} } -func (c *PluginLoader) Fetch(ctx context.Context, ids []id.PluginID) ([]*gqlmodel.Plugin, []error) { - res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) +func (c *PluginLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) { + ids2, err := util.TryMap(ids, gqlmodel.ToPluginID) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, ids2, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -37,30 +42,21 @@ func (c *PluginLoader) FetchPluginMetadata(ctx context.Context) ([]*gqlmodel.Plu return nil, err } - pluginMetaList := make([]*gqlmodel.PluginMetadata, 0, len(res)) - for _, md := range res { - pm, err := gqlmodel.ToPluginMetadata(md) - if err != nil { - return nil, err - } - pluginMetaList = append(pluginMetaList, pm) - } - - return pluginMetaList, nil + return util.Map(res, gqlmodel.ToPluginMetadata), nil } // data loader type PluginDataLoader interface { - Load(id.PluginID) (*gqlmodel.Plugin, error) - LoadAll([]id.PluginID) ([]*gqlmodel.Plugin, []error) + Load(gqlmodel.ID) (*gqlmodel.Plugin, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Plugin, []error) } func (c *PluginLoader) DataLoader(ctx context.Context) PluginDataLoader { return gqldataloader.NewPluginLoader(gqldataloader.PluginLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) { return c.Fetch(ctx, keys) }, }) @@ -68,18 +64,18 @@ func (c *PluginLoader) DataLoader(ctx context.Context) PluginDataLoader { func (c *PluginLoader) OrdinaryDataLoader(ctx context.Context) PluginDataLoader { return &ordinaryPluginLoader{ - fetch: func(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) { return c.Fetch(ctx, keys) }, } } type ordinaryPluginLoader struct { - fetch func(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) } -func (l *ordinaryPluginLoader) Load(key id.PluginID) (*gqlmodel.Plugin, error) { - res, errs := l.fetch([]id.PluginID{key}) +func (l *ordinaryPluginLoader) Load(key gqlmodel.ID) (*gqlmodel.Plugin, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -89,6 +85,6 @@ func (l *ordinaryPluginLoader) Load(key id.PluginID) (*gqlmodel.Plugin, error) { return nil, nil } -func (l *ordinaryPluginLoader) LoadAll(keys []id.PluginID) ([]*gqlmodel.Plugin, []error) { +func (l *ordinaryPluginLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Plugin, []error) { return l.fetch(keys) } diff --git a/internal/adapter/gql/loader_project.go b/internal/adapter/gql/loader_project.go index d97282af0..7cea429f2 100644 --- a/internal/adapter/gql/loader_project.go +++ b/internal/adapter/gql/loader_project.go @@ -8,6 +8,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" ) type ProjectLoader struct { @@ -18,8 +19,13 @@ func NewProjectLoader(usecase interfaces.Project) *ProjectLoader { return &ProjectLoader{usecase: usecase} } -func (c *ProjectLoader) Fetch(ctx context.Context, ids []id.ProjectID) ([]*gqlmodel.Project, []error) { - res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) +func (c *ProjectLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Project, []error) { + ids2, err := util.TryMap(ids, gqlmodel.ToID[id.Project]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, ids2, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -32,8 +38,13 @@ func (c *ProjectLoader) Fetch(ctx context.Context, ids []id.ProjectID) ([]*gqlmo return projects, nil } -func (c *ProjectLoader) FindByTeam(ctx context.Context, teamID id.TeamID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { - res, pi, err := c.usecase.FindByTeam(ctx, teamID, usecase.NewPagination(first, last, before, after), getOperator(ctx)) +func (c *ProjectLoader) FindByTeam(ctx context.Context, teamID gqlmodel.ID, first *int, last *int, before *usecase.Cursor, after *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { + tid, err := gqlmodel.ToID[id.Team](teamID) + if err != nil { + return nil, err + } + + res, pi, err := c.usecase.FindByTeam(ctx, tid, usecase.NewPagination(first, last, before, after), getOperator(ctx)) if err != nil { return nil, err } @@ -44,7 +55,7 @@ func (c *ProjectLoader) FindByTeam(ctx context.Context, teamID id.TeamID, first prj := gqlmodel.ToProject(p) edges = append(edges, &gqlmodel.ProjectEdge{ Node: prj, - Cursor: usecase.Cursor(prj.ID.String()), + Cursor: usecase.Cursor(prj.ID), }) nodes = append(nodes, prj) } @@ -69,15 +80,15 @@ func (c *ProjectLoader) CheckAlias(ctx context.Context, alias string) (*gqlmodel // data loaders type ProjectDataLoader interface { - Load(id.ProjectID) (*gqlmodel.Project, error) - LoadAll([]id.ProjectID) ([]*gqlmodel.Project, []error) + Load(gqlmodel.ID) (*gqlmodel.Project, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Project, []error) } func (c *ProjectLoader) DataLoader(ctx context.Context) ProjectDataLoader { return gqldataloader.NewProjectLoader(gqldataloader.ProjectLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.ProjectID) ([]*gqlmodel.Project, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) { return c.Fetch(ctx, keys) }, }) @@ -85,18 +96,18 @@ func (c *ProjectLoader) DataLoader(ctx context.Context) ProjectDataLoader { func (c *ProjectLoader) OrdinaryDataLoader(ctx context.Context) ProjectDataLoader { return &ordinaryProjectLoader{ - fetch: func(keys []id.ProjectID) ([]*gqlmodel.Project, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) { return c.Fetch(ctx, keys) }, } } type ordinaryProjectLoader struct { - fetch func(keys []id.ProjectID) ([]*gqlmodel.Project, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) } -func (l *ordinaryProjectLoader) Load(key id.ProjectID) (*gqlmodel.Project, error) { - res, errs := l.fetch([]id.ProjectID{key}) +func (l *ordinaryProjectLoader) Load(key gqlmodel.ID) (*gqlmodel.Project, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -106,6 +117,6 @@ func (l *ordinaryProjectLoader) Load(key id.ProjectID) (*gqlmodel.Project, error return nil, nil } -func (l *ordinaryProjectLoader) LoadAll(keys []id.ProjectID) ([]*gqlmodel.Project, []error) { +func (l *ordinaryProjectLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Project, []error) { return l.fetch(keys) } diff --git a/internal/adapter/gql/loader_property.go b/internal/adapter/gql/loader_property.go index b562a9acf..0bba8f3f6 100644 --- a/internal/adapter/gql/loader_property.go +++ b/internal/adapter/gql/loader_property.go @@ -7,6 +7,7 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" ) type PropertyLoader struct { @@ -17,8 +18,13 @@ func NewPropertyLoader(usecase interfaces.Property) *PropertyLoader { return &PropertyLoader{usecase: usecase} } -func (c *PropertyLoader) Fetch(ctx context.Context, ids []id.PropertyID) ([]*gqlmodel.Property, []error) { - res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) +func (c *PropertyLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Property, []error) { + ids2, err := util.TryMap(ids, gqlmodel.ToID[id.Property]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, ids2, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -31,23 +37,28 @@ func (c *PropertyLoader) Fetch(ctx context.Context, ids []id.PropertyID) ([]*gql return properties, nil } -func (c *PropertyLoader) FetchSchema(ctx context.Context, ids []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) { - res, err := c.usecase.FetchSchema(ctx, ids, getOperator(ctx)) +func (c *PropertyLoader) FetchSchema(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) { + ids2, err := util.TryMap(ids, gqlmodel.ToPropertySchemaID) if err != nil { return nil, []error{err} } - schemas := make([]*gqlmodel.PropertySchema, 0, len(res)) - for _, propertySchema := range res { - schemas = append(schemas, gqlmodel.ToPropertySchema(propertySchema)) + res, err := c.usecase.FetchSchema(ctx, ids2, getOperator(ctx)) + if err != nil { + return nil, []error{err} } - return schemas, nil + return util.Map(res, gqlmodel.ToPropertySchema), nil } -func (c *PropertyLoader) FetchMerged(ctx context.Context, org, parent, linked *id.ID) (*gqlmodel.MergedProperty, error) { - res, err := c.usecase.FetchMerged(ctx, id.PropertyIDFromRefID(org), id.PropertyIDFromRefID(parent), id.DatasetIDFromRefID(linked), getOperator(ctx)) - +func (c *PropertyLoader) FetchMerged(ctx context.Context, org, parent, linked *gqlmodel.ID) (*gqlmodel.MergedProperty, error) { + res, err := c.usecase.FetchMerged( + ctx, + gqlmodel.ToIDRef[id.Property](org), + gqlmodel.ToIDRef[id.Property](parent), + gqlmodel.ToIDRef[id.Dataset](linked), + getOperator(ctx), + ) if err != nil { return nil, err } @@ -58,15 +69,15 @@ func (c *PropertyLoader) FetchMerged(ctx context.Context, org, parent, linked *i // data loader type PropertyDataLoader interface { - Load(id.PropertyID) (*gqlmodel.Property, error) - LoadAll([]id.PropertyID) ([]*gqlmodel.Property, []error) + Load(gqlmodel.ID) (*gqlmodel.Property, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Property, []error) } func (c *PropertyLoader) DataLoader(ctx context.Context) PropertyDataLoader { return gqldataloader.NewPropertyLoader(gqldataloader.PropertyLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.PropertyID) ([]*gqlmodel.Property, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) { return c.Fetch(ctx, keys) }, }) @@ -74,18 +85,18 @@ func (c *PropertyLoader) DataLoader(ctx context.Context) PropertyDataLoader { func (c *PropertyLoader) OrdinaryDataLoader(ctx context.Context) PropertyDataLoader { return &ordinaryPropertyLoader{ - fetch: func(keys []id.PropertyID) ([]*gqlmodel.Property, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) { return c.Fetch(ctx, keys) }, } } type ordinaryPropertyLoader struct { - fetch func(keys []id.PropertyID) ([]*gqlmodel.Property, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) } -func (l *ordinaryPropertyLoader) Load(key id.PropertyID) (*gqlmodel.Property, error) { - res, errs := l.fetch([]id.PropertyID{key}) +func (l *ordinaryPropertyLoader) Load(key gqlmodel.ID) (*gqlmodel.Property, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -95,20 +106,20 @@ func (l *ordinaryPropertyLoader) Load(key id.PropertyID) (*gqlmodel.Property, er return nil, nil } -func (l *ordinaryPropertyLoader) LoadAll(keys []id.PropertyID) ([]*gqlmodel.Property, []error) { +func (l *ordinaryPropertyLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Property, []error) { return l.fetch(keys) } type PropertySchemaDataLoader interface { - Load(id.PropertySchemaID) (*gqlmodel.PropertySchema, error) - LoadAll([]id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) + Load(gqlmodel.ID) (*gqlmodel.PropertySchema, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) } func (c *PropertyLoader) SchemaDataLoader(ctx context.Context) PropertySchemaDataLoader { return gqldataloader.NewPropertySchemaLoader(gqldataloader.PropertySchemaLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) { return c.FetchSchema(ctx, keys) }, }) @@ -116,18 +127,18 @@ func (c *PropertyLoader) SchemaDataLoader(ctx context.Context) PropertySchemaDat func (c *PropertyLoader) SchemaOrdinaryDataLoader(ctx context.Context) PropertySchemaDataLoader { return &ordinaryPropertySchemaLoader{ - fetch: func(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) { return c.FetchSchema(ctx, keys) }, } } type ordinaryPropertySchemaLoader struct { - fetch func(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) } -func (l *ordinaryPropertySchemaLoader) Load(key id.PropertySchemaID) (*gqlmodel.PropertySchema, error) { - res, errs := l.fetch([]id.PropertySchemaID{key}) +func (l *ordinaryPropertySchemaLoader) Load(key gqlmodel.ID) (*gqlmodel.PropertySchema, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -137,6 +148,6 @@ func (l *ordinaryPropertySchemaLoader) Load(key id.PropertySchemaID) (*gqlmodel. return nil, nil } -func (l *ordinaryPropertySchemaLoader) LoadAll(keys []id.PropertySchemaID) ([]*gqlmodel.PropertySchema, []error) { +func (l *ordinaryPropertySchemaLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.PropertySchema, []error) { return l.fetch(keys) } diff --git a/internal/adapter/gql/loader_scene.go b/internal/adapter/gql/loader_scene.go index 4391c26a9..89a09cb4c 100644 --- a/internal/adapter/gql/loader_scene.go +++ b/internal/adapter/gql/loader_scene.go @@ -7,6 +7,7 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" ) type SceneLoader struct { @@ -17,8 +18,13 @@ func NewSceneLoader(usecase interfaces.Scene) *SceneLoader { return &SceneLoader{usecase: usecase} } -func (c *SceneLoader) Fetch(ctx context.Context, ids []id.SceneID) ([]*gqlmodel.Scene, []error) { - res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) +func (c *SceneLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Scene, []error) { + pids, err := util.TryMap(ids, gqlmodel.ToID[id.Scene]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, pids, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -30,53 +36,32 @@ func (c *SceneLoader) Fetch(ctx context.Context, ids []id.SceneID) ([]*gqlmodel. return scenes, nil } -func (c *SceneLoader) FindByProject(ctx context.Context, projectID id.ProjectID) (*gqlmodel.Scene, error) { - res, err := c.usecase.FindByProject(ctx, projectID, getOperator(ctx)) +func (c *SceneLoader) FindByProject(ctx context.Context, projectID gqlmodel.ID) (*gqlmodel.Scene, error) { + pid, err := gqlmodel.ToID[id.Project](projectID) if err != nil { return nil, err } - return gqlmodel.ToScene(res), nil -} - -func (c *SceneLoader) FetchLock(ctx context.Context, sid id.SceneID) (*gqlmodel.SceneLockMode, error) { - res, err := c.usecase.FetchLock(ctx, []id.SceneID{sid}, getOperator(ctx)) + res, err := c.usecase.FindByProject(ctx, pid, getOperator(ctx)) if err != nil { return nil, err } - if len(res) > 0 { - return nil, nil - } - sl := gqlmodel.ToSceneLockMode(res[0]) - return &sl, nil -} - -func (c *SceneLoader) FetchLockAll(ctx context.Context, sid []id.SceneID) ([]gqlmodel.SceneLockMode, []error) { - res, err := c.usecase.FetchLock(ctx, sid, getOperator(ctx)) - if err != nil { - return nil, []error{err} - } - - res2 := make([]gqlmodel.SceneLockMode, 0, len(res)) - for _, r := range res { - res2 = append(res2, gqlmodel.ToSceneLockMode(r)) - } - return res2, nil + return gqlmodel.ToScene(res), nil } // data loader type SceneDataLoader interface { - Load(id.SceneID) (*gqlmodel.Scene, error) - LoadAll([]id.SceneID) ([]*gqlmodel.Scene, []error) + Load(gqlmodel.ID) (*gqlmodel.Scene, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Scene, []error) } func (c *SceneLoader) DataLoader(ctx context.Context) SceneDataLoader { return gqldataloader.NewSceneLoader(gqldataloader.SceneLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.SceneID) ([]*gqlmodel.Scene, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) { return c.Fetch(ctx, keys) }, }) @@ -84,18 +69,18 @@ func (c *SceneLoader) DataLoader(ctx context.Context) SceneDataLoader { func (c *SceneLoader) OrdinaryDataLoader(ctx context.Context) SceneDataLoader { return &ordinarySceneLoader{ - fetch: func(keys []id.SceneID) ([]*gqlmodel.Scene, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) { return c.Fetch(ctx, keys) }, } } type ordinarySceneLoader struct { - fetch func(keys []id.SceneID) ([]*gqlmodel.Scene, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) } -func (l *ordinarySceneLoader) Load(key id.SceneID) (*gqlmodel.Scene, error) { - res, errs := l.fetch([]id.SceneID{key}) +func (l *ordinarySceneLoader) Load(key gqlmodel.ID) (*gqlmodel.Scene, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -105,6 +90,6 @@ func (l *ordinarySceneLoader) Load(key id.SceneID) (*gqlmodel.Scene, error) { return nil, nil } -func (l *ordinarySceneLoader) LoadAll(keys []id.SceneID) ([]*gqlmodel.Scene, []error) { +func (l *ordinarySceneLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Scene, []error) { return l.fetch(keys) } diff --git a/internal/adapter/gql/loader_tag.go b/internal/adapter/gql/loader_tag.go index fcfd823e1..c88542b95 100644 --- a/internal/adapter/gql/loader_tag.go +++ b/internal/adapter/gql/loader_tag.go @@ -7,6 +7,7 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" ) type TagLoader struct { @@ -17,8 +18,13 @@ func NewTagLoader(usecase interfaces.Tag) *TagLoader { return &TagLoader{usecase: usecase} } -func (c *TagLoader) Fetch(ctx context.Context, ids []id.TagID) ([]*gqlmodel.Tag, []error) { - res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) +func (c *TagLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Tag, []error) { + tagids, err := util.TryMap(ids, gqlmodel.ToID[id.Tag]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, tagids, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -34,8 +40,13 @@ func (c *TagLoader) Fetch(ctx context.Context, ids []id.TagID) ([]*gqlmodel.Tag, return tags, nil } -func (c *TagLoader) FetchGroup(ctx context.Context, ids []id.TagID) ([]*gqlmodel.TagGroup, []error) { - res, err := c.usecase.FetchGroup(ctx, ids, getOperator(ctx)) +func (c *TagLoader) FetchGroup(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) { + tids, err := util.TryMap(ids, gqlmodel.ToID[id.Tag]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.FetchGroup(ctx, tids, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -51,8 +62,13 @@ func (c *TagLoader) FetchGroup(ctx context.Context, ids []id.TagID) ([]*gqlmodel return tagGroups, nil } -func (c *TagLoader) FetchItem(ctx context.Context, ids []id.TagID) ([]*gqlmodel.TagItem, []error) { - res, err := c.usecase.FetchItem(ctx, ids, getOperator(ctx)) +func (c *TagLoader) FetchItem(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) { + tids, err := util.TryMap(ids, gqlmodel.ToID[id.Tag]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.FetchItem(ctx, tids, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -71,15 +87,15 @@ func (c *TagLoader) FetchItem(ctx context.Context, ids []id.TagID) ([]*gqlmodel. // data loaders type TagDataLoader interface { - Load(id.TagID) (*gqlmodel.Tag, error) - LoadAll([]id.TagID) ([]*gqlmodel.Tag, []error) + Load(gqlmodel.ID) (*gqlmodel.Tag, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Tag, []error) } func (c *TagLoader) DataLoader(ctx context.Context) TagDataLoader { return gqldataloader.NewTagLoader(gqldataloader.TagLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.TagID) ([]*gqlmodel.Tag, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) { return c.Fetch(ctx, keys) }, }) @@ -87,18 +103,18 @@ func (c *TagLoader) DataLoader(ctx context.Context) TagDataLoader { func (c *TagLoader) OrdinaryDataLoader(ctx context.Context) TagDataLoader { return &ordinaryTagLoader{ - fetch: func(keys []id.TagID) ([]*gqlmodel.Tag, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) { return c.Fetch(ctx, keys) }, } } type ordinaryTagLoader struct { - fetch func(keys []id.TagID) ([]*gqlmodel.Tag, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) } -func (t *ordinaryTagLoader) Load(key id.TagID) (*gqlmodel.Tag, error) { - res, errs := t.fetch([]id.TagID{key}) +func (t *ordinaryTagLoader) Load(key gqlmodel.ID) (*gqlmodel.Tag, error) { + res, errs := t.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -108,20 +124,20 @@ func (t *ordinaryTagLoader) Load(key id.TagID) (*gqlmodel.Tag, error) { return nil, nil } -func (t *ordinaryTagLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.Tag, []error) { +func (t *ordinaryTagLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Tag, []error) { return t.fetch(keys) } type TagItemDataLoader interface { - Load(id.TagID) (*gqlmodel.TagItem, error) - LoadAll([]id.TagID) ([]*gqlmodel.TagItem, []error) + Load(gqlmodel.ID) (*gqlmodel.TagItem, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.TagItem, []error) } func (c *TagLoader) ItemDataLoader(ctx context.Context) TagItemDataLoader { return gqldataloader.NewTagItemLoader(gqldataloader.TagItemLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.TagID) ([]*gqlmodel.TagItem, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) { return c.FetchItem(ctx, keys) }, }) @@ -129,18 +145,18 @@ func (c *TagLoader) ItemDataLoader(ctx context.Context) TagItemDataLoader { func (c *TagLoader) ItemOrdinaryDataLoader(ctx context.Context) TagItemDataLoader { return &ordinaryTagItemLoader{ - fetch: func(keys []id.TagID) ([]*gqlmodel.TagItem, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) { return c.FetchItem(ctx, keys) }, } } type ordinaryTagItemLoader struct { - fetch func(keys []id.TagID) ([]*gqlmodel.TagItem, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) } -func (t *ordinaryTagItemLoader) Load(key id.TagID) (*gqlmodel.TagItem, error) { - res, errs := t.fetch([]id.TagID{key}) +func (t *ordinaryTagItemLoader) Load(key gqlmodel.ID) (*gqlmodel.TagItem, error) { + res, errs := t.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -150,20 +166,20 @@ func (t *ordinaryTagItemLoader) Load(key id.TagID) (*gqlmodel.TagItem, error) { return nil, nil } -func (t *ordinaryTagItemLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.TagItem, []error) { +func (t *ordinaryTagItemLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.TagItem, []error) { return t.fetch(keys) } type TagGroupDataLoader interface { - Load(id.TagID) (*gqlmodel.TagGroup, error) - LoadAll([]id.TagID) ([]*gqlmodel.TagGroup, []error) + Load(gqlmodel.ID) (*gqlmodel.TagGroup, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) } func (c *TagLoader) GroupDataLoader(ctx context.Context) TagGroupDataLoader { return gqldataloader.NewTagGroupLoader(gqldataloader.TagGroupLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) { return c.FetchGroup(ctx, keys) }, }) @@ -171,18 +187,18 @@ func (c *TagLoader) GroupDataLoader(ctx context.Context) TagGroupDataLoader { func (c *TagLoader) GroupOrdinaryDataLoader(ctx context.Context) TagGroupDataLoader { return &ordinaryTagGroupLoader{ - fetch: func(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) { return c.FetchGroup(ctx, keys) }, } } type ordinaryTagGroupLoader struct { - fetch func(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) } -func (t *ordinaryTagGroupLoader) Load(key id.TagID) (*gqlmodel.TagGroup, error) { - res, errs := t.fetch([]id.TagID{key}) +func (t *ordinaryTagGroupLoader) Load(key gqlmodel.ID) (*gqlmodel.TagGroup, error) { + res, errs := t.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -192,6 +208,6 @@ func (t *ordinaryTagGroupLoader) Load(key id.TagID) (*gqlmodel.TagGroup, error) return nil, nil } -func (t *ordinaryTagGroupLoader) LoadAll(keys []id.TagID) ([]*gqlmodel.TagGroup, []error) { +func (t *ordinaryTagGroupLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.TagGroup, []error) { return t.fetch(keys) } diff --git a/internal/adapter/gql/loader_team.go b/internal/adapter/gql/loader_team.go index 0e041df98..bfaeee35b 100644 --- a/internal/adapter/gql/loader_team.go +++ b/internal/adapter/gql/loader_team.go @@ -7,6 +7,7 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" ) type TeamLoader struct { @@ -17,8 +18,13 @@ func NewTeamLoader(usecase interfaces.Team) *TeamLoader { return &TeamLoader{usecase: usecase} } -func (c *TeamLoader) Fetch(ctx context.Context, ids []id.TeamID) ([]*gqlmodel.Team, []error) { - res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) +func (c *TeamLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Team, []error) { + uids, err := util.TryMap(ids, gqlmodel.ToID[id.Team]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, uids, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -30,8 +36,13 @@ func (c *TeamLoader) Fetch(ctx context.Context, ids []id.TeamID) ([]*gqlmodel.Te return teams, nil } -func (c *TeamLoader) FindByUser(ctx context.Context, uid id.UserID) ([]*gqlmodel.Team, error) { - res, err := c.usecase.FindByUser(ctx, uid, getOperator(ctx)) +func (c *TeamLoader) FindByUser(ctx context.Context, uid gqlmodel.ID) ([]*gqlmodel.Team, error) { + userid, err := gqlmodel.ToID[id.User](uid) + if err != nil { + return nil, err + } + + res, err := c.usecase.FindByUser(ctx, userid, getOperator(ctx)) if err != nil { return nil, err } @@ -45,15 +56,15 @@ func (c *TeamLoader) FindByUser(ctx context.Context, uid id.UserID) ([]*gqlmodel // data loader type TeamDataLoader interface { - Load(id.TeamID) (*gqlmodel.Team, error) - LoadAll([]id.TeamID) ([]*gqlmodel.Team, []error) + Load(gqlmodel.ID) (*gqlmodel.Team, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.Team, []error) } func (c *TeamLoader) DataLoader(ctx context.Context) TeamDataLoader { return gqldataloader.NewTeamLoader(gqldataloader.TeamLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.TeamID) ([]*gqlmodel.Team, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) { return c.Fetch(ctx, keys) }, }) @@ -61,18 +72,18 @@ func (c *TeamLoader) DataLoader(ctx context.Context) TeamDataLoader { func (c *TeamLoader) OrdinaryDataLoader(ctx context.Context) TeamDataLoader { return &ordinaryTeamLoader{ - fetch: func(keys []id.TeamID) ([]*gqlmodel.Team, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) { return c.Fetch(ctx, keys) }, } } type ordinaryTeamLoader struct { - fetch func(keys []id.TeamID) ([]*gqlmodel.Team, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) } -func (l *ordinaryTeamLoader) Load(key id.TeamID) (*gqlmodel.Team, error) { - res, errs := l.fetch([]id.TeamID{key}) +func (l *ordinaryTeamLoader) Load(key gqlmodel.ID) (*gqlmodel.Team, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -82,6 +93,6 @@ func (l *ordinaryTeamLoader) Load(key id.TeamID) (*gqlmodel.Team, error) { return nil, nil } -func (l *ordinaryTeamLoader) LoadAll(keys []id.TeamID) ([]*gqlmodel.Team, []error) { +func (l *ordinaryTeamLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.Team, []error) { return l.fetch(keys) } diff --git a/internal/adapter/gql/loader_user.go b/internal/adapter/gql/loader_user.go index a18bf4be3..dcd0462c8 100644 --- a/internal/adapter/gql/loader_user.go +++ b/internal/adapter/gql/loader_user.go @@ -7,6 +7,7 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" ) type UserLoader struct { @@ -17,8 +18,13 @@ func NewUserLoader(usecase interfaces.User) *UserLoader { return &UserLoader{usecase: usecase} } -func (c *UserLoader) Fetch(ctx context.Context, ids []id.UserID) ([]*gqlmodel.User, []error) { - res, err := c.usecase.Fetch(ctx, ids, getOperator(ctx)) +func (c *UserLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.User, []error) { + uids, err := util.TryMap(ids, gqlmodel.ToID[id.User]) + if err != nil { + return nil, []error{err} + } + + res, err := c.usecase.Fetch(ctx, uids, getOperator(ctx)) if err != nil { return nil, []error{err} } @@ -31,27 +37,27 @@ func (c *UserLoader) Fetch(ctx context.Context, ids []id.UserID) ([]*gqlmodel.Us return users, nil } -func (c *UserLoader) SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.SearchedUser, error) { +func (c *UserLoader) SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.User, error) { res, err := c.usecase.SearchUser(ctx, nameOrEmail, getOperator(ctx)) if err != nil { return nil, err } - return gqlmodel.ToSearchedUser(res), nil + return gqlmodel.ToUser(res), nil } // data loader type UserDataLoader interface { - Load(id.UserID) (*gqlmodel.User, error) - LoadAll([]id.UserID) ([]*gqlmodel.User, []error) + Load(gqlmodel.ID) (*gqlmodel.User, error) + LoadAll([]gqlmodel.ID) ([]*gqlmodel.User, []error) } func (c *UserLoader) DataLoader(ctx context.Context) UserDataLoader { return gqldataloader.NewUserLoader(gqldataloader.UserLoaderConfig{ Wait: dataLoaderWait, MaxBatch: dataLoaderMaxBatch, - Fetch: func(keys []id.UserID) ([]*gqlmodel.User, []error) { + Fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) { return c.Fetch(ctx, keys) }, }) @@ -59,18 +65,18 @@ func (c *UserLoader) DataLoader(ctx context.Context) UserDataLoader { func (c *UserLoader) OrdinaryDataLoader(ctx context.Context) UserDataLoader { return &ordinaryUserLoader{ - fetch: func(keys []id.UserID) ([]*gqlmodel.User, []error) { + fetch: func(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) { return c.Fetch(ctx, keys) }, } } type ordinaryUserLoader struct { - fetch func(keys []id.UserID) ([]*gqlmodel.User, []error) + fetch func(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) } -func (l *ordinaryUserLoader) Load(key id.UserID) (*gqlmodel.User, error) { - res, errs := l.fetch([]id.UserID{key}) +func (l *ordinaryUserLoader) Load(key gqlmodel.ID) (*gqlmodel.User, error) { + res, errs := l.fetch([]gqlmodel.ID{key}) if len(errs) > 0 { return nil, errs[0] } @@ -80,6 +86,6 @@ func (l *ordinaryUserLoader) Load(key id.UserID) (*gqlmodel.User, error) { return nil, nil } -func (l *ordinaryUserLoader) LoadAll(keys []id.UserID) ([]*gqlmodel.User, []error) { +func (l *ordinaryUserLoader) LoadAll(keys []gqlmodel.ID) ([]*gqlmodel.User, []error) { return l.fetch(keys) } diff --git a/internal/adapter/gql/resolver_asset.go b/internal/adapter/gql/resolver_asset.go index a3938eaf6..f9d85fabf 100644 --- a/internal/adapter/gql/resolver_asset.go +++ b/internal/adapter/gql/resolver_asset.go @@ -4,7 +4,6 @@ import ( "context" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) func (r *Resolver) Asset() AssetResolver { @@ -14,5 +13,5 @@ func (r *Resolver) Asset() AssetResolver { type assetResolver struct{ *Resolver } func (r *assetResolver) Team(ctx context.Context, obj *gqlmodel.Asset) (*gqlmodel.Team, error) { - return dataloaders(ctx).Team.Load(id.TeamID(obj.TeamID)) + return dataloaders(ctx).Team.Load(obj.TeamID) } diff --git a/internal/adapter/gql/resolver_dataset.go b/internal/adapter/gql/resolver_dataset.go index 1002d1b4d..4edd29807 100644 --- a/internal/adapter/gql/resolver_dataset.go +++ b/internal/adapter/gql/resolver_dataset.go @@ -4,7 +4,6 @@ import ( "context" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) func (r *Resolver) Dataset() DatasetResolver { @@ -18,11 +17,11 @@ func (r *Resolver) DatasetField() DatasetFieldResolver { type datasetResolver struct{ *Resolver } func (r *datasetResolver) Schema(ctx context.Context, obj *gqlmodel.Dataset) (*gqlmodel.DatasetSchema, error) { - return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + return dataloaders(ctx).DatasetSchema.Load(obj.SchemaID) } func (r *datasetResolver) Name(ctx context.Context, obj *gqlmodel.Dataset) (*string, error) { - ds, err := dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + ds, err := dataloaders(ctx).DatasetSchema.Load(obj.SchemaID) if err != nil || ds == nil || ds.RepresentativeFieldID == nil { return nil, err } @@ -40,21 +39,21 @@ func (r *datasetResolver) Name(ctx context.Context, obj *gqlmodel.Dataset) (*str type datasetFieldResolver struct{ *Resolver } func (r *datasetFieldResolver) Field(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.DatasetSchemaField, error) { - ds, err := dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + ds, err := dataloaders(ctx).DatasetSchema.Load(obj.SchemaID) return ds.Field(obj.FieldID), err } func (r *datasetFieldResolver) Schema(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.DatasetSchema, error) { - return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + return dataloaders(ctx).DatasetSchema.Load(obj.SchemaID) } func (r *datasetFieldResolver) ValueRef(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.Dataset, error) { - if obj.Value == nil { + if obj.Value == nil || obj.Type != gqlmodel.ValueTypeRef { return nil, nil } - idstr, ok := (obj.Value).(id.ID) + idstr, ok := (obj.Value).(string) if !ok { return nil, nil } - return dataloaders(ctx).Dataset.Load(id.DatasetID(idstr)) + return dataloaders(ctx).Dataset.Load(gqlmodel.ID(idstr)) } diff --git a/internal/adapter/gql/resolver_dataset_schema.go b/internal/adapter/gql/resolver_dataset_schema.go index d72cd9537..b106e9bb9 100644 --- a/internal/adapter/gql/resolver_dataset_schema.go +++ b/internal/adapter/gql/resolver_dataset_schema.go @@ -5,7 +5,6 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" ) func (r *Resolver) DatasetSchema() DatasetSchemaResolver { @@ -19,7 +18,7 @@ func (r *Resolver) DatasetSchemaField() DatasetSchemaFieldResolver { type datasetSchemaResolver struct{ *Resolver } func (r *datasetSchemaResolver) Scene(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.Scene, error) { - return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(obj.SceneID) } func (r *datasetSchemaResolver) RepresentativeField(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.DatasetSchemaField, error) { @@ -42,12 +41,12 @@ func (r *datasetSchemaResolver) Datasets(ctx context.Context, obj *gqlmodel.Data type datasetSchemaFieldResolver struct{ *Resolver } func (r *datasetSchemaFieldResolver) Schema(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) { - return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.SchemaID)) + return dataloaders(ctx).DatasetSchema.Load(obj.SchemaID) } func (r *datasetSchemaFieldResolver) Ref(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) { if obj.RefID == nil { return nil, nil } - return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.RefID)) + return dataloaders(ctx).DatasetSchema.Load(*obj.RefID) } diff --git a/internal/adapter/gql/resolver_layer.go b/internal/adapter/gql/resolver_layer.go index adbe737a0..b1e35179a 100644 --- a/internal/adapter/gql/resolver_layer.go +++ b/internal/adapter/gql/resolver_layer.go @@ -4,7 +4,6 @@ import ( "context" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) func (r *Resolver) LayerItem() LayerItemResolver { @@ -46,11 +45,11 @@ func (r *Resolver) LayerTagGroup() LayerTagGroupResolver { type infoboxResolver struct{ *Resolver } func (r *infoboxResolver) Property(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Property, error) { - return dataloaders(ctx).Property.Load(id.PropertyID(obj.PropertyID)) + return dataloaders(ctx).Property.Load(obj.PropertyID) } func (r *infoboxResolver) Layer(ctx context.Context, obj *gqlmodel.Infobox) (gqlmodel.Layer, error) { - layer, err := dataloaders(ctx).Layer.Load(id.LayerID(obj.LayerID)) + layer, err := dataloaders(ctx).Layer.Load(obj.LayerID) if err != nil || layer == nil { return nil, err } @@ -61,11 +60,11 @@ func (r *infoboxResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.Infob if obj.LinkedDatasetID == nil { return nil, nil } - return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) } func (r *infoboxResolver) Merged(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.MergedInfobox, error) { - ml, err := loaders(ctx).Layer.FetchParentAndMerged(ctx, id.LayerID(obj.LayerID)) + ml, err := loaders(ctx).Layer.FetchParentAndMerged(ctx, obj.LayerID) if err != nil || ml == nil { return nil, err } @@ -73,15 +72,16 @@ func (r *infoboxResolver) Merged(ctx context.Context, obj *gqlmodel.Infobox) (*g } func (r *infoboxResolver) Scene(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.Scene, error) { - return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(obj.SceneID) } func (r *infoboxResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Infobox) (*gqlmodel.ScenePlugin, error) { - layer, err := dataloaders(ctx).Layer.Load(id.LayerID(obj.LayerID)) + layer, err := dataloaders(ctx).Layer.Load(obj.LayerID) if err != nil || layer == nil { return nil, err } - var pluginID *id.PluginID + + var pluginID *gqlmodel.ID if lg, ok := (*layer).(*gqlmodel.LayerGroup); ok { pluginID = lg.PluginID } else if li, ok := (*layer).(*gqlmodel.LayerItem); ok { @@ -91,7 +91,7 @@ func (r *infoboxResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Infobox return nil, nil } - s, err := dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + s, err := dataloaders(ctx).Scene.Load(obj.SceneID) if err != nil { return nil, err } @@ -101,7 +101,7 @@ func (r *infoboxResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Infobox type infoboxFieldResolver struct{ *Resolver } func (r *infoboxFieldResolver) Layer(ctx context.Context, obj *gqlmodel.InfoboxField) (gqlmodel.Layer, error) { - layer, err := dataloaders(ctx).Layer.Load(id.LayerID(obj.LayerID)) + layer, err := dataloaders(ctx).Layer.Load(obj.LayerID) if err != nil { return nil, err } @@ -109,7 +109,7 @@ func (r *infoboxFieldResolver) Layer(ctx context.Context, obj *gqlmodel.InfoboxF } func (r *infoboxFieldResolver) Infobox(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Infobox, error) { - layer, err := dataloaders(ctx).Layer.Load(id.LayerID(obj.LayerID)) + layer, err := dataloaders(ctx).Layer.Load(obj.LayerID) if err != nil || layer == nil { return nil, err } @@ -121,7 +121,7 @@ func (r *infoboxFieldResolver) Infobox(ctx context.Context, obj *gqlmodel.Infobo } func (r *infoboxFieldResolver) Property(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Property, error) { - return dataloaders(ctx).Property.Load(id.PropertyID(obj.PropertyID)) + return dataloaders(ctx).Property.Load(obj.PropertyID) } func (r *infoboxFieldResolver) Plugin(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Plugin, error) { @@ -140,11 +140,11 @@ func (r *infoboxFieldResolver) LinkedDataset(ctx context.Context, obj *gqlmodel. if obj.LinkedDatasetID == nil { return nil, nil } - return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) } func (r *infoboxFieldResolver) Merged(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.MergedInfoboxField, error) { - ml, err := loaders(ctx).Layer.FetchParentAndMerged(ctx, id.LayerID(obj.LayerID)) + ml, err := loaders(ctx).Layer.FetchParentAndMerged(ctx, obj.LayerID) if err != nil || ml == nil || ml.Infobox == nil { return nil, err } @@ -152,11 +152,11 @@ func (r *infoboxFieldResolver) Merged(ctx context.Context, obj *gqlmodel.Infobox } func (r *infoboxFieldResolver) Scene(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.Scene, error) { - return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(obj.SceneID) } func (r *infoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.InfoboxField) (*gqlmodel.ScenePlugin, error) { - s, err := dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + s, err := dataloaders(ctx).Scene.Load(obj.SceneID) if err != nil { return nil, err } @@ -167,16 +167,16 @@ type layerGroupResolver struct{ *Resolver } func (r *layerGroupResolver) Parent(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.LayerGroup, error) { if obj.ParentID != nil { - return dataloaders(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) + return dataloaders(ctx).LayerGroup.Load(*obj.ParentID) } - return loaders(ctx).Layer.FetchParent(ctx, id.LayerID(obj.ID)) + return loaders(ctx).Layer.FetchParent(ctx, obj.ID) } func (r *layerGroupResolver) Property(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Property, error) { if obj.PropertyID == nil { return nil, nil } - return dataloaders(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) + return dataloaders(ctx).Property.Load(*obj.PropertyID) } func (r *layerGroupResolver) Plugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Plugin, error) { @@ -198,18 +198,18 @@ func (r *layerGroupResolver) Extension(ctx context.Context, obj *gqlmodel.LayerG } func (r *layerGroupResolver) ParentLayer(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.LayerGroup, error) { - return loaders(ctx).Layer.FetchParent(ctx, id.LayerID(obj.ID)) + return loaders(ctx).Layer.FetchParent(ctx, obj.ID) } func (r *layerGroupResolver) LinkedDatasetSchema(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.DatasetSchema, error) { if obj.LinkedDatasetSchemaID == nil { return nil, nil } - return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.LinkedDatasetSchemaID)) + return dataloaders(ctx).DatasetSchema.Load(*obj.LinkedDatasetSchemaID) } func (r *layerGroupResolver) Layers(ctx context.Context, obj *gqlmodel.LayerGroup) ([]gqlmodel.Layer, error) { - layers, err := dataloaders(ctx).Layer.LoadAll(id.LayerIDsFromIDRef(obj.LayerIds)) + layers, err := dataloaders(ctx).Layer.LoadAll(obj.LayerIds) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -217,14 +217,14 @@ func (r *layerGroupResolver) Layers(ctx context.Context, obj *gqlmodel.LayerGrou } func (r *layerGroupResolver) Scene(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.Scene, error) { - return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(obj.SceneID) } func (r *layerGroupResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.LayerGroup) (*gqlmodel.ScenePlugin, error) { if obj.PluginID == nil { return nil, nil } - s, err := dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + s, err := dataloaders(ctx).Scene.Load(obj.SceneID) if err != nil { return nil, err } @@ -235,16 +235,16 @@ type layerItemResolver struct{ *Resolver } func (r *layerItemResolver) Parent(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.LayerGroup, error) { if obj.ParentID != nil { - return dataloaders(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) + return dataloaders(ctx).LayerGroup.Load(*obj.ParentID) } - return loaders(ctx).Layer.FetchParent(ctx, id.LayerID(obj.ID)) + return loaders(ctx).Layer.FetchParent(ctx, obj.ID) } func (r *layerItemResolver) Property(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Property, error) { if obj.PropertyID == nil { return nil, nil } - return dataloaders(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) + return dataloaders(ctx).Property.Load(*obj.PropertyID) } func (r *layerItemResolver) Plugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Plugin, error) { @@ -269,25 +269,25 @@ func (r *layerItemResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.Lay if obj.LinkedDatasetID == nil { return nil, nil } - return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) } func (r *layerItemResolver) Merged(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.MergedLayer, error) { if obj.ParentID == nil { - return loaders(ctx).Layer.FetchParentAndMerged(ctx, id.LayerID(obj.ID)) + return loaders(ctx).Layer.FetchParentAndMerged(ctx, obj.ID) } - return loaders(ctx).Layer.FetchMerged(ctx, id.LayerID(obj.ID), id.LayerIDFromRefID(obj.ParentID)) + return loaders(ctx).Layer.FetchMerged(ctx, obj.ID, obj.ParentID) } func (r *layerItemResolver) Scene(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.Scene, error) { - return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(obj.SceneID) } func (r *layerItemResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.LayerItem) (*gqlmodel.ScenePlugin, error) { if obj.PluginID == nil { return nil, nil } - s, err := dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + s, err := dataloaders(ctx).Scene.Load(obj.SceneID) if err != nil { return nil, err } @@ -297,27 +297,27 @@ func (r *layerItemResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Layer type mergedLayerResolver struct{ *Resolver } func (r *mergedLayerResolver) Original(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerItem, error) { - return dataloaders(ctx).LayerItem.Load(id.LayerID(obj.OriginalID)) + return dataloaders(ctx).LayerItem.Load(obj.OriginalID) } func (r *mergedLayerResolver) Parent(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.LayerGroup, error) { if obj.ParentID == nil { return nil, nil } - return dataloaders(ctx).LayerGroup.Load(id.LayerID(*obj.ParentID)) + return dataloaders(ctx).LayerGroup.Load(*obj.ParentID) } func (r *mergedLayerResolver) Scene(ctx context.Context, obj *gqlmodel.MergedLayer) (*gqlmodel.Scene, error) { if obj.ParentID == nil { return nil, nil } - return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(obj.SceneID) } type mergedInfoboxResolver struct{ *Resolver } func (r *mergedInfoboxResolver) Scene(ctx context.Context, obj *gqlmodel.MergedInfobox) (*gqlmodel.Scene, error) { - return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(obj.SceneID) } type mergedInfoboxFieldResolver struct{ *Resolver } @@ -335,11 +335,11 @@ func (r *mergedInfoboxFieldResolver) Extension(ctx context.Context, obj *gqlmode } func (r *mergedInfoboxFieldResolver) Scene(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.Scene, error) { - return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(obj.SceneID) } func (r *mergedInfoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.MergedInfoboxField) (*gqlmodel.ScenePlugin, error) { - s, err := dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + s, err := dataloaders(ctx).Scene.Load(obj.SceneID) if err != nil { return nil, err } @@ -349,7 +349,7 @@ func (r *mergedInfoboxFieldResolver) ScenePlugin(ctx context.Context, obj *gqlmo type layerTagItemResolver struct{ *Resolver } func (r *layerTagItemResolver) Tag(ctx context.Context, obj *gqlmodel.LayerTagItem) (gqlmodel.Tag, error) { - t, err := dataloaders(ctx).Tag.Load(id.TagID(obj.TagID)) + t, err := dataloaders(ctx).Tag.Load(obj.TagID) if err != nil { return nil, err } @@ -359,7 +359,7 @@ func (r *layerTagItemResolver) Tag(ctx context.Context, obj *gqlmodel.LayerTagIt type layerTagGroupResolver struct{ *Resolver } func (r *layerTagGroupResolver) Tag(ctx context.Context, obj *gqlmodel.LayerTagGroup) (gqlmodel.Tag, error) { - t, err := dataloaders(ctx).Tag.Load(id.TagID(obj.TagID)) + t, err := dataloaders(ctx).Tag.Load(obj.TagID) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_mutation_asset.go b/internal/adapter/gql/resolver_mutation_asset.go index 482d7208a..ef0188838 100644 --- a/internal/adapter/gql/resolver_mutation_asset.go +++ b/internal/adapter/gql/resolver_mutation_asset.go @@ -9,8 +9,13 @@ import ( ) func (r *mutationResolver) CreateAsset(ctx context.Context, input gqlmodel.CreateAssetInput) (*gqlmodel.CreateAssetPayload, error) { + tid, err := gqlmodel.ToID[id.Team](input.TeamID) + if err != nil { + return nil, err + } + res, err := usecases(ctx).Asset.Create(ctx, interfaces.CreateAssetParam{ - TeamID: id.TeamID(input.TeamID), + TeamID: tid, File: gqlmodel.FromFile(&input.File), }, getOperator(ctx)) if err != nil { @@ -21,10 +26,15 @@ func (r *mutationResolver) CreateAsset(ctx context.Context, input gqlmodel.Creat } func (r *mutationResolver) RemoveAsset(ctx context.Context, input gqlmodel.RemoveAssetInput) (*gqlmodel.RemoveAssetPayload, error) { - res, err2 := usecases(ctx).Asset.Remove(ctx, id.AssetID(input.AssetID), getOperator(ctx)) + aid, err := gqlmodel.ToID[id.Asset](input.AssetID) + if err != nil { + return nil, err + } + + res, err2 := usecases(ctx).Asset.Remove(ctx, aid, getOperator(ctx)) if err2 != nil { return nil, err2 } - return &gqlmodel.RemoveAssetPayload{AssetID: res.ID()}, nil + return &gqlmodel.RemoveAssetPayload{AssetID: gqlmodel.IDFrom(res)}, nil } diff --git a/internal/adapter/gql/resolver_mutation_dataset.go b/internal/adapter/gql/resolver_mutation_dataset.go index 12928aedb..87d80e6a0 100644 --- a/internal/adapter/gql/resolver_mutation_dataset.go +++ b/internal/adapter/gql/resolver_mutation_dataset.go @@ -6,11 +6,17 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" ) func (r *mutationResolver) UpdateDatasetSchema(ctx context.Context, input gqlmodel.UpdateDatasetSchemaInput) (*gqlmodel.UpdateDatasetSchemaPayload, error) { + dsid, err := gqlmodel.ToID[id.DatasetSchema](input.SchemaID) + if err != nil { + return nil, err + } + res, err := usecases(ctx).Dataset.UpdateDatasetSchema(ctx, interfaces.UpdateDatasetSchemaParam{ - SchemaId: id.DatasetSchemaID(input.SchemaID), + SchemaId: dsid, Name: input.Name, }, getOperator(ctx)) if err != nil { @@ -21,8 +27,13 @@ func (r *mutationResolver) UpdateDatasetSchema(ctx context.Context, input gqlmod } func (r *mutationResolver) AddDynamicDatasetSchema(ctx context.Context, input gqlmodel.AddDynamicDatasetSchemaInput) (*gqlmodel.AddDynamicDatasetSchemaPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + res, err := usecases(ctx).Dataset.AddDynamicDatasetSchema(ctx, interfaces.AddDynamicDatasetSchemaParam{ - SceneId: id.SceneID(input.SceneID), + SceneId: sid, }) if err != nil { return nil, err @@ -32,8 +43,13 @@ func (r *mutationResolver) AddDynamicDatasetSchema(ctx context.Context, input gq } func (r *mutationResolver) AddDynamicDataset(ctx context.Context, input gqlmodel.AddDynamicDatasetInput) (*gqlmodel.AddDynamicDatasetPayload, error) { + dsid, err := gqlmodel.ToID[id.DatasetSchema](input.DatasetSchemaID) + if err != nil { + return nil, err + } + dss, ds, err := usecases(ctx).Dataset.AddDynamicDataset(ctx, interfaces.AddDynamicDatasetParam{ - SchemaId: id.DatasetSchemaID(input.DatasetSchemaID), + SchemaId: dsid, Author: input.Author, Content: input.Content, Lat: input.Lat, @@ -48,57 +64,68 @@ func (r *mutationResolver) AddDynamicDataset(ctx context.Context, input gqlmodel } func (r *mutationResolver) SyncDataset(ctx context.Context, input gqlmodel.SyncDatasetInput) (*gqlmodel.SyncDatasetPayload, error) { - dss, ds, err := usecases(ctx).Dataset.Sync(ctx, id.SceneID(input.SceneID), input.URL, getOperator(ctx)) + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) if err != nil { return nil, err } - schemas := make([]*gqlmodel.DatasetSchema, 0, len(dss)) - datasets := make([]*gqlmodel.Dataset, 0, len(ds)) - for _, d := range dss { - schemas = append(schemas, gqlmodel.ToDatasetSchema(d)) - } - for _, d := range ds { - datasets = append(datasets, gqlmodel.ToDataset(d)) + dss, ds, err := usecases(ctx).Dataset.Sync(ctx, sid, input.URL, getOperator(ctx)) + if err != nil { + return nil, err } return &gqlmodel.SyncDatasetPayload{ SceneID: input.SceneID, URL: input.URL, - DatasetSchema: schemas, - Dataset: datasets, + DatasetSchema: util.Map(dss, gqlmodel.ToDatasetSchema), + Dataset: util.Map(ds, gqlmodel.ToDataset), }, nil } func (r *mutationResolver) RemoveDatasetSchema(ctx context.Context, input gqlmodel.RemoveDatasetSchemaInput) (*gqlmodel.RemoveDatasetSchemaPayload, error) { + sid, err := gqlmodel.ToID[id.DatasetSchema](input.SchemaID) + if err != nil { + return nil, err + } + res, err := usecases(ctx).Dataset.RemoveDatasetSchema(ctx, interfaces.RemoveDatasetSchemaParam{ - SchemaID: id.DatasetSchemaID(input.SchemaID), + SchemaID: sid, Force: input.Force, }, getOperator(ctx)) if err != nil { return nil, err } - return &gqlmodel.RemoveDatasetSchemaPayload{SchemaID: res.ID()}, nil + return &gqlmodel.RemoveDatasetSchemaPayload{SchemaID: gqlmodel.IDFrom(res)}, nil } func (r *mutationResolver) AddDatasetSchema(ctx context.Context, input gqlmodel.AddDatasetSchemaInput) (*gqlmodel.AddDatasetSchemaPayload, error) { - res, err2 := usecases(ctx).Dataset.AddDatasetSchema(ctx, interfaces.AddDatasetSchemaParam{ - SceneId: id.SceneID(input.SceneID), + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Dataset.AddDatasetSchema(ctx, interfaces.AddDatasetSchemaParam{ + SceneId: sid, Name: input.Name, - RepresentativeField: id.DatasetSchemaFieldIDFromRefID(input.Representativefield), + RepresentativeField: gqlmodel.ToIDRef[id.DatasetField](input.Representativefield), }, getOperator(ctx)) - if err2 != nil { - return nil, err2 + if err != nil { + return nil, err } return &gqlmodel.AddDatasetSchemaPayload{DatasetSchema: gqlmodel.ToDatasetSchema(res)}, nil } func (r *mutationResolver) ImportDataset(ctx context.Context, input gqlmodel.ImportDatasetInput) (*gqlmodel.ImportDatasetPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + res, err := usecases(ctx).Dataset.ImportDataset(ctx, interfaces.ImportDatasetParam{ - SceneId: id.SceneID(input.SceneID), - SchemaId: id.DatasetSchemaIDFromRefID(input.DatasetSchemaID), + SceneId: sid, + SchemaId: gqlmodel.ToIDRef[id.DatasetSchema](input.DatasetSchemaID), File: gqlmodel.FromFile(&input.File), }, getOperator(ctx)) if err != nil { @@ -109,12 +136,17 @@ func (r *mutationResolver) ImportDataset(ctx context.Context, input gqlmodel.Imp } func (r *mutationResolver) ImportDatasetFromGoogleSheet(ctx context.Context, input gqlmodel.ImportDatasetFromGoogleSheetInput) (*gqlmodel.ImportDatasetPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + res, err := usecases(ctx).Dataset.ImportDatasetFromGoogleSheet(ctx, interfaces.ImportDatasetFromGoogleSheetParam{ Token: input.AccessToken, FileID: input.FileID, SheetName: input.SheetName, - SceneId: id.SceneID(input.SceneID), - SchemaId: id.DatasetSchemaIDFromRefID(input.DatasetSchemaID), + SceneId: sid, + SchemaId: gqlmodel.ToIDRef[id.DatasetSchema](input.DatasetSchemaID), }, getOperator(ctx)) if err != nil { return nil, err diff --git a/internal/adapter/gql/resolver_mutation_layer.go b/internal/adapter/gql/resolver_mutation_layer.go index 843af6a2b..0a244ff02 100644 --- a/internal/adapter/gql/resolver_mutation_layer.go +++ b/internal/adapter/gql/resolver_mutation_layer.go @@ -6,13 +6,24 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/samber/lo" ) func (r *mutationResolver) AddLayerItem(ctx context.Context, input gqlmodel.AddLayerItemInput) (*gqlmodel.AddLayerItemPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.ParentLayerID) + if err != nil { + return nil, err + } + + pid, err := gqlmodel.ToPluginID(input.PluginID) + if err != nil { + return nil, err + } + layer, parent, err := usecases(ctx).Layer.AddItem(ctx, interfaces.AddLayerItemInput{ - ParentLayerID: id.LayerID(input.ParentLayerID), - PluginID: &input.PluginID, - ExtensionID: &input.ExtensionID, + ParentLayerID: lid, + PluginID: &pid, + ExtensionID: lo.ToPtr(id.PluginExtensionID(input.ExtensionID)), Index: input.Index, Name: gqlmodel.RefToString(input.Name), LatLng: gqlmodel.ToPropertyLatLng(input.Lat, input.Lng), @@ -30,14 +41,19 @@ func (r *mutationResolver) AddLayerItem(ctx context.Context, input gqlmodel.AddL } func (r *mutationResolver) AddLayerGroup(ctx context.Context, input gqlmodel.AddLayerGroupInput) (*gqlmodel.AddLayerGroupPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.ParentLayerID) + if err != nil { + return nil, err + } + layer, parent, err := usecases(ctx).Layer.AddGroup(ctx, interfaces.AddLayerGroupInput{ - ParentLayerID: id.LayerID(input.ParentLayerID), - PluginID: input.PluginID, - ExtensionID: input.ExtensionID, + ParentLayerID: lid, + PluginID: gqlmodel.ToPluginIDRef(input.PluginID), + ExtensionID: gqlmodel.ToStringIDRef[id.PluginExtension](input.ExtensionID), Index: input.Index, Name: gqlmodel.RefToString(input.Name), - LinkedDatasetSchemaID: id.DatasetSchemaIDFromRefID(input.LinkedDatasetSchemaID), - RepresentativeFieldId: input.RepresentativeFieldID, + LinkedDatasetSchemaID: gqlmodel.ToIDRef[id.DatasetSchema](input.LinkedDatasetSchemaID), + RepresentativeFieldId: gqlmodel.ToIDRef[id.DatasetField](input.RepresentativeFieldID), }, getOperator(ctx)) if err != nil { return nil, err @@ -51,20 +67,30 @@ func (r *mutationResolver) AddLayerGroup(ctx context.Context, input gqlmodel.Add } func (r *mutationResolver) RemoveLayer(ctx context.Context, input gqlmodel.RemoveLayerInput) (*gqlmodel.RemoveLayerPayload, error) { - id, layer, err := usecases(ctx).Layer.Remove(ctx, id.LayerID(input.LayerID), getOperator(ctx)) + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + + id, layer, err := usecases(ctx).Layer.Remove(ctx, lid, getOperator(ctx)) if err != nil { return nil, err } return &gqlmodel.RemoveLayerPayload{ - LayerID: id.ID(), + LayerID: gqlmodel.IDFrom(id), ParentLayer: gqlmodel.ToLayerGroup(layer, nil), }, nil } func (r *mutationResolver) UpdateLayer(ctx context.Context, input gqlmodel.UpdateLayerInput) (*gqlmodel.UpdateLayerPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + layer, err := usecases(ctx).Layer.Update(ctx, interfaces.UpdateLayerInput{ - LayerID: id.LayerID(input.LayerID), + LayerID: lid, Name: input.Name, Visible: input.Visible, }, getOperator(ctx)) @@ -78,9 +104,14 @@ func (r *mutationResolver) UpdateLayer(ctx context.Context, input gqlmodel.Updat } func (r *mutationResolver) MoveLayer(ctx context.Context, input gqlmodel.MoveLayerInput) (*gqlmodel.MoveLayerPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + targetLayerID, layerGroupFrom, layerGroupTo, index, err := usecases(ctx).Layer.Move(ctx, interfaces.MoveLayerInput{ - LayerID: id.LayerID(input.LayerID), - DestLayerID: id.LayerIDFromRefID(input.DestLayerID), + LayerID: lid, + DestLayerID: gqlmodel.ToIDRef[id.Layer](input.DestLayerID), Index: gqlmodel.RefToIndex(input.Index), }, getOperator(ctx)) if err != nil { @@ -88,7 +119,7 @@ func (r *mutationResolver) MoveLayer(ctx context.Context, input gqlmodel.MoveLay } return &gqlmodel.MoveLayerPayload{ - LayerID: targetLayerID.ID(), + LayerID: gqlmodel.IDFrom(targetLayerID), FromParentLayer: gqlmodel.ToLayerGroup(layerGroupFrom, nil), ToParentLayer: gqlmodel.ToLayerGroup(layerGroupTo, nil), Index: index, @@ -96,7 +127,12 @@ func (r *mutationResolver) MoveLayer(ctx context.Context, input gqlmodel.MoveLay } func (r *mutationResolver) CreateInfobox(ctx context.Context, input gqlmodel.CreateInfoboxInput) (*gqlmodel.CreateInfoboxPayload, error) { - layer, err := usecases(ctx).Layer.CreateInfobox(ctx, id.LayerID(input.LayerID), getOperator(ctx)) + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + + layer, err := usecases(ctx).Layer.CreateInfobox(ctx, lid, getOperator(ctx)) if err != nil { return nil, err } @@ -107,7 +143,12 @@ func (r *mutationResolver) CreateInfobox(ctx context.Context, input gqlmodel.Cre } func (r *mutationResolver) RemoveInfobox(ctx context.Context, input gqlmodel.RemoveInfoboxInput) (*gqlmodel.RemoveInfoboxPayload, error) { - layer, err := usecases(ctx).Layer.RemoveInfobox(ctx, id.LayerID(input.LayerID), getOperator(ctx)) + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + + layer, err := usecases(ctx).Layer.RemoveInfobox(ctx, lid, getOperator(ctx)) if err != nil { return nil, err } @@ -118,10 +159,20 @@ func (r *mutationResolver) RemoveInfobox(ctx context.Context, input gqlmodel.Rem } func (r *mutationResolver) AddInfoboxField(ctx context.Context, input gqlmodel.AddInfoboxFieldInput) (*gqlmodel.AddInfoboxFieldPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + + pid, err := gqlmodel.ToPluginID(input.PluginID) + if err != nil { + return nil, err + } + infoboxField, layer, err := usecases(ctx).Layer.AddInfoboxField(ctx, interfaces.AddInfoboxFieldParam{ - LayerID: id.LayerID(input.LayerID), - PluginID: input.PluginID, - ExtensionID: input.ExtensionID, + LayerID: lid, + PluginID: pid, + ExtensionID: id.PluginExtensionID(input.ExtensionID), Index: input.Index, }, getOperator(ctx)) if err != nil { @@ -135,9 +186,14 @@ func (r *mutationResolver) AddInfoboxField(ctx context.Context, input gqlmodel.A } func (r *mutationResolver) MoveInfoboxField(ctx context.Context, input gqlmodel.MoveInfoboxFieldInput) (*gqlmodel.MoveInfoboxFieldPayload, error) { + lid, ifid, err := gqlmodel.ToID2[id.Layer, id.InfoboxField](input.LayerID, input.InfoboxFieldID) + if err != nil { + return nil, err + } + infoboxField, layer, index, err := usecases(ctx).Layer.MoveInfoboxField(ctx, interfaces.MoveInfoboxFieldParam{ - LayerID: id.LayerID(input.LayerID), - InfoboxFieldID: id.InfoboxFieldID(input.InfoboxFieldID), + LayerID: lid, + InfoboxFieldID: ifid, Index: input.Index, }, getOperator(ctx)) if err != nil { @@ -145,30 +201,40 @@ func (r *mutationResolver) MoveInfoboxField(ctx context.Context, input gqlmodel. } return &gqlmodel.MoveInfoboxFieldPayload{ - InfoboxFieldID: infoboxField.ID(), + InfoboxFieldID: gqlmodel.IDFrom(infoboxField), Layer: gqlmodel.ToLayer(layer, nil), Index: index, }, nil } func (r *mutationResolver) RemoveInfoboxField(ctx context.Context, input gqlmodel.RemoveInfoboxFieldInput) (*gqlmodel.RemoveInfoboxFieldPayload, error) { + lid, ifid, err := gqlmodel.ToID2[id.Layer, id.InfoboxField](input.LayerID, input.InfoboxFieldID) + if err != nil { + return nil, err + } + infoboxField, layer, err := usecases(ctx).Layer.RemoveInfoboxField(ctx, interfaces.RemoveInfoboxFieldParam{ - LayerID: id.LayerID(input.LayerID), - InfoboxFieldID: id.InfoboxFieldID(input.InfoboxFieldID), + LayerID: lid, + InfoboxFieldID: ifid, }, getOperator(ctx)) if err != nil { return nil, err } return &gqlmodel.RemoveInfoboxFieldPayload{ - InfoboxFieldID: infoboxField.ID(), + InfoboxFieldID: gqlmodel.IDFrom(infoboxField), Layer: gqlmodel.ToLayer(layer, nil), }, nil } func (r *mutationResolver) ImportLayer(ctx context.Context, input gqlmodel.ImportLayerInput) (*gqlmodel.ImportLayerPayload, error) { + lid, err := gqlmodel.ToID[id.Layer](input.LayerID) + if err != nil { + return nil, err + } + l, l2, err := usecases(ctx).Layer.ImportLayer(ctx, interfaces.ImportLayerParam{ - LayerID: id.LayerID(input.LayerID), + LayerID: lid, File: gqlmodel.FromFile(&input.File), Format: gqlmodel.FromLayerEncodingFormat(input.Format), }, getOperator(ctx)) @@ -183,20 +249,32 @@ func (r *mutationResolver) ImportLayer(ctx context.Context, input gqlmodel.Impor } func (r *mutationResolver) AttachTagToLayer(ctx context.Context, input gqlmodel.AttachTagToLayerInput) (*gqlmodel.AttachTagToLayerPayload, error) { - layer, err := usecases(ctx).Layer.AttachTag(ctx, id.LayerID(input.LayerID), id.TagID(input.TagID), getOperator(ctx)) + lid, tid, err := gqlmodel.ToID2[id.Layer, id.Tag](input.LayerID, input.TagID) if err != nil { return nil, err } + + layer, err := usecases(ctx).Layer.AttachTag(ctx, lid, tid, getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.AttachTagToLayerPayload{ Layer: gqlmodel.ToLayer(layer, nil), }, nil } func (r *mutationResolver) DetachTagFromLayer(ctx context.Context, input gqlmodel.DetachTagFromLayerInput) (*gqlmodel.DetachTagFromLayerPayload, error) { - layer, err := usecases(ctx).Layer.DetachTag(ctx, id.LayerID(input.LayerID), id.TagID(input.TagID), getOperator(ctx)) + lid, tid, err := gqlmodel.ToID2[id.Layer, id.Tag](input.LayerID, input.TagID) if err != nil { return nil, err } + + layer, err := usecases(ctx).Layer.DetachTag(ctx, lid, tid, getOperator(ctx)) + if err != nil { + return nil, err + } + return &gqlmodel.DetachTagFromLayerPayload{ Layer: gqlmodel.ToLayer(layer, nil), }, nil diff --git a/internal/adapter/gql/resolver_mutation_project.go b/internal/adapter/gql/resolver_mutation_project.go index 60d054792..5fc8b7878 100644 --- a/internal/adapter/gql/resolver_mutation_project.go +++ b/internal/adapter/gql/resolver_mutation_project.go @@ -10,8 +10,13 @@ import ( ) func (r *mutationResolver) CreateProject(ctx context.Context, input gqlmodel.CreateProjectInput) (*gqlmodel.ProjectPayload, error) { + tid, err := gqlmodel.ToID[id.Team](input.TeamID) + if err != nil { + return nil, err + } + res, err := usecases(ctx).Project.Create(ctx, interfaces.CreateProjectParam{ - TeamID: id.TeamID(input.TeamID), + TeamID: tid, Visualizer: visualizer.Visualizer(input.Visualizer), Name: input.Name, Description: input.Description, @@ -37,8 +42,13 @@ func (r *mutationResolver) UpdateProject(ctx context.Context, input gqlmodel.Upd deleteImageURL = *input.DeleteImageURL } + pid, err := gqlmodel.ToID[id.Project](input.ProjectID) + if err != nil { + return nil, err + } + res, err := usecases(ctx).Project.Update(ctx, interfaces.UpdateProjectParam{ - ID: id.ProjectID(input.ProjectID), + ID: pid, Name: input.Name, Description: input.Description, Alias: input.Alias, @@ -62,8 +72,13 @@ func (r *mutationResolver) UpdateProject(ctx context.Context, input gqlmodel.Upd } func (r *mutationResolver) PublishProject(ctx context.Context, input gqlmodel.PublishProjectInput) (*gqlmodel.ProjectPayload, error) { + pid, err := gqlmodel.ToID[id.Project](input.ProjectID) + if err != nil { + return nil, err + } + res, err := usecases(ctx).Project.Publish(ctx, interfaces.PublishProjectParam{ - ID: id.ProjectID(input.ProjectID), + ID: pid, Alias: input.Alias, Status: gqlmodel.FromPublishmentStatus(input.Status), }, getOperator(ctx)) @@ -75,10 +90,14 @@ func (r *mutationResolver) PublishProject(ctx context.Context, input gqlmodel.Pu } func (r *mutationResolver) DeleteProject(ctx context.Context, input gqlmodel.DeleteProjectInput) (*gqlmodel.DeleteProjectPayload, error) { - err := usecases(ctx).Project.Delete(ctx, id.ProjectID(input.ProjectID), getOperator(ctx)) + pid, err := gqlmodel.ToID[id.Project](input.ProjectID) if err != nil { return nil, err } + if err := usecases(ctx).Project.Delete(ctx, pid, getOperator(ctx)); err != nil { + return nil, err + } + return &gqlmodel.DeleteProjectPayload{ProjectID: input.ProjectID}, nil } diff --git a/internal/adapter/gql/resolver_mutation_property.go b/internal/adapter/gql/resolver_mutation_property.go index c117fda9e..e2ba6c9b7 100644 --- a/internal/adapter/gql/resolver_mutation_property.go +++ b/internal/adapter/gql/resolver_mutation_property.go @@ -8,6 +8,8 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/util" + "github.com/samber/lo" ) func (r *mutationResolver) UpdatePropertyValue(ctx context.Context, input gqlmodel.UpdatePropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) { @@ -19,10 +21,19 @@ func (r *mutationResolver) UpdatePropertyValue(ctx context.Context, input gqlmod } } + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + pp, pgl, pg, pf, err := usecases(ctx).Property.UpdateValue(ctx, interfaces.UpdatePropertyValueParam{ - PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), - Value: v, + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + gqlmodel.ToStringIDRef[id.PropertySchemaGroup](input.SchemaGroupID), + input.ItemID, + gqlmodel.ToStringIDRef[id.PropertyField](&input.FieldID), + ), + Value: v, }, getOperator(ctx)) if err != nil { return nil, err @@ -35,9 +46,18 @@ func (r *mutationResolver) UpdatePropertyValue(ctx context.Context, input gqlmod } func (r *mutationResolver) RemovePropertyField(ctx context.Context, input gqlmodel.RemovePropertyFieldInput) (*gqlmodel.PropertyFieldPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + p, err := usecases(ctx).Property.RemoveField(ctx, interfaces.RemovePropertyFieldParam{ - PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + gqlmodel.ToStringIDRef[id.PropertySchemaGroup](input.SchemaGroupID), + input.ItemID, + gqlmodel.ToStringIDRef[id.PropertyField](&input.FieldID), + ), }, getOperator(ctx)) if err != nil { return nil, err @@ -49,10 +69,19 @@ func (r *mutationResolver) RemovePropertyField(ctx context.Context, input gqlmod } func (r *mutationResolver) UploadFileToProperty(ctx context.Context, input gqlmodel.UploadFileToPropertyInput) (*gqlmodel.PropertyFieldPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + p, pgl, pg, pf, err := usecases(ctx).Property.UploadFile(ctx, interfaces.UploadFileParam{ - PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), - File: gqlmodel.FromFile(&input.File), + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + gqlmodel.ToStringIDRef[id.PropertySchemaGroup](input.SchemaGroupID), + input.ItemID, + gqlmodel.ToStringIDRef[id.PropertyField](&input.FieldID), + ), + File: gqlmodel.FromFile(&input.File), }, getOperator(ctx)) if err != nil { return nil, err @@ -65,14 +94,24 @@ func (r *mutationResolver) UploadFileToProperty(ctx context.Context, input gqlmo } func (r *mutationResolver) LinkDatasetToPropertyValue(ctx context.Context, input gqlmodel.LinkDatasetToPropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + + links, err := gqlmodel.FromPropertyFieldLink(input.DatasetSchemaIds, input.DatasetIds, input.DatasetSchemaFieldIds) + if err != nil { + return nil, err + } + p, pgl, pg, pf, err := usecases(ctx).Property.LinkValue(ctx, interfaces.LinkPropertyValueParam{ - PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), - Links: gqlmodel.FromPropertyFieldLink( - input.DatasetSchemaIds, - input.DatasetIds, - input.DatasetSchemaFieldIds, + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + gqlmodel.ToStringIDRef[id.PropertySchemaGroup](input.SchemaGroupID), + input.ItemID, + lo.ToPtr(id.PropertyFieldID(input.FieldID)), ), + Links: links, }, getOperator(ctx)) if err != nil { return nil, err @@ -85,9 +124,18 @@ func (r *mutationResolver) LinkDatasetToPropertyValue(ctx context.Context, input } func (r *mutationResolver) UnlinkPropertyValue(ctx context.Context, input gqlmodel.UnlinkPropertyValueInput) (*gqlmodel.PropertyFieldPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + p, pgl, pg, pf, err := usecases(ctx).Property.UnlinkValue(ctx, interfaces.UnlinkPropertyValueParam{ - PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(input.SchemaGroupID, input.ItemID, &input.FieldID), + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + gqlmodel.ToStringIDRef[id.PropertySchemaGroup](input.SchemaGroupID), + input.ItemID, + lo.ToPtr(id.PropertyFieldID(input.FieldID)), + ), }, getOperator(ctx)) if err != nil { return nil, err @@ -100,6 +148,11 @@ func (r *mutationResolver) UnlinkPropertyValue(ctx context.Context, input gqlmod } func (r *mutationResolver) AddPropertyItem(ctx context.Context, input gqlmodel.AddPropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + var v *property.Value if input.NameFieldType != nil { v = gqlmodel.FromPropertyValueAndType(input.NameFieldValue, *input.NameFieldType) @@ -109,8 +162,8 @@ func (r *mutationResolver) AddPropertyItem(ctx context.Context, input gqlmodel.A } p, pgl, pi, err := usecases(ctx).Property.AddItem(ctx, interfaces.AddPropertyItemParam{ - PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(&input.SchemaGroupID, nil, nil), + PropertyID: pid, + Pointer: gqlmodel.FromPointer(gqlmodel.ToStringIDRef[id.PropertySchemaGroup](&input.SchemaGroupID), nil, nil), Index: input.Index, NameFieldValue: v, }, getOperator(ctx)) @@ -126,10 +179,19 @@ func (r *mutationResolver) AddPropertyItem(ctx context.Context, input gqlmodel.A } func (r *mutationResolver) MovePropertyItem(ctx context.Context, input gqlmodel.MovePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + p, pgl, pi, err := usecases(ctx).Property.MoveItem(ctx, interfaces.MovePropertyItemParam{ - PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(&input.SchemaGroupID, &input.ItemID, nil), - Index: input.Index, + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + lo.ToPtr(id.PropertySchemaGroupID(input.SchemaGroupID)), + &input.ItemID, + nil, + ), + Index: input.Index, }, getOperator(ctx)) if err != nil { return nil, err @@ -142,9 +204,18 @@ func (r *mutationResolver) MovePropertyItem(ctx context.Context, input gqlmodel. } func (r *mutationResolver) RemovePropertyItem(ctx context.Context, input gqlmodel.RemovePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + p, err := usecases(ctx).Property.RemoveItem(ctx, interfaces.RemovePropertyItemParam{ - PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(&input.SchemaGroupID, &input.ItemID, nil), + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + lo.ToPtr(id.PropertySchemaGroupID(input.SchemaGroupID)), + &input.ItemID, + nil, + ), }, getOperator(ctx)) if err != nil { return nil, err @@ -156,28 +227,39 @@ func (r *mutationResolver) RemovePropertyItem(ctx context.Context, input gqlmode } func (r *mutationResolver) UpdatePropertyItems(ctx context.Context, input gqlmodel.UpdatePropertyItemInput) (*gqlmodel.PropertyItemPayload, error) { - op := make([]interfaces.UpdatePropertyItemsOperationParam, 0, len(input.Operations)) - for _, o := range input.Operations { + pid, err := gqlmodel.ToID[id.Property](input.PropertyID) + if err != nil { + return nil, err + } + + operations, err := util.TryMap(input.Operations, func(o *gqlmodel.UpdatePropertyItemOperationInput) (interfaces.UpdatePropertyItemsOperationParam, error) { var v *property.Value if o.NameFieldType != nil { v = gqlmodel.FromPropertyValueAndType(o.NameFieldValue, *o.NameFieldType) if v == nil { - return nil, errors.New("invalid name field value") + return interfaces.UpdatePropertyItemsOperationParam{}, errors.New("invalid name field value") } } - op = append(op, interfaces.UpdatePropertyItemsOperationParam{ + return interfaces.UpdatePropertyItemsOperationParam{ Operation: gqlmodel.FromListOperation(o.Operation), - ItemID: id.PropertyItemIDFromRefID(o.ItemID), + ItemID: gqlmodel.ToIDRef[id.PropertyItem](o.ItemID), Index: o.Index, NameFieldValue: v, - }) + }, nil + }) + if err != nil { + return nil, err } p, err2 := usecases(ctx).Property.UpdateItems(ctx, interfaces.UpdatePropertyItemsParam{ - PropertyID: id.PropertyID(input.PropertyID), - Pointer: gqlmodel.FromPointer(&input.SchemaGroupID, nil, nil), - Operations: op, + PropertyID: pid, + Pointer: gqlmodel.FromPointer( + lo.ToPtr(id.PropertySchemaGroupID(input.SchemaGroupID)), + nil, + nil, + ), + Operations: operations, }, getOperator(ctx)) if err2 != nil { return nil, err2 diff --git a/internal/adapter/gql/resolver_mutation_scene.go b/internal/adapter/gql/resolver_mutation_scene.go index 26377eeea..8c4e5ee55 100644 --- a/internal/adapter/gql/resolver_mutation_scene.go +++ b/internal/adapter/gql/resolver_mutation_scene.go @@ -12,11 +12,12 @@ import ( ) func (r *mutationResolver) CreateScene(ctx context.Context, input gqlmodel.CreateSceneInput) (*gqlmodel.CreateScenePayload, error) { - res, err := usecases(ctx).Scene.Create( - ctx, - id.ProjectID(input.ProjectID), - getOperator(ctx), - ) + pid, err := gqlmodel.ToID[id.Project](input.ProjectID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Scene.Create(ctx, pid, getOperator(ctx)) if err != nil { return nil, err } @@ -27,10 +28,20 @@ func (r *mutationResolver) CreateScene(ctx context.Context, input gqlmodel.Creat } func (r *mutationResolver) AddWidget(ctx context.Context, input gqlmodel.AddWidgetInput) (*gqlmodel.AddWidgetPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + pid, err := gqlmodel.ToPluginID(input.PluginID) + if err != nil { + return nil, err + } + scene, widget, err := usecases(ctx).Scene.AddWidget( ctx, - id.SceneID(input.SceneID), - input.PluginID, + sid, + pid, id.PluginExtensionID(input.ExtensionID), getOperator(ctx), ) @@ -45,9 +56,14 @@ func (r *mutationResolver) AddWidget(ctx context.Context, input gqlmodel.AddWidg } func (r *mutationResolver) UpdateWidget(ctx context.Context, input gqlmodel.UpdateWidgetInput) (*gqlmodel.UpdateWidgetPayload, error) { + sid, wid, err := gqlmodel.ToID2[id.Scene, id.Widget](input.SceneID, input.WidgetID) + if err != nil { + return nil, err + } + scene, widget, err := usecases(ctx).Scene.UpdateWidget(ctx, interfaces.UpdateWidgetParam{ - SceneID: id.SceneID(input.SceneID), - WidgetID: id.WidgetID(input.WidgetID), + SceneID: sid, + WidgetID: wid, Enabled: input.Enabled, Extended: input.Extended, Location: gqlmodel.FromSceneWidgetLocation(input.Location), @@ -64,9 +80,14 @@ func (r *mutationResolver) UpdateWidget(ctx context.Context, input gqlmodel.Upda } func (r *mutationResolver) RemoveWidget(ctx context.Context, input gqlmodel.RemoveWidgetInput) (*gqlmodel.RemoveWidgetPayload, error) { + sid, wid, err := gqlmodel.ToID2[id.Scene, id.Widget](input.SceneID, input.WidgetID) + if err != nil { + return nil, err + } + scene, err := usecases(ctx).Scene.RemoveWidget(ctx, - id.SceneID(input.SceneID), - id.WidgetID(input.WidgetID), + sid, + wid, getOperator(ctx), ) if err != nil { @@ -80,8 +101,13 @@ func (r *mutationResolver) RemoveWidget(ctx context.Context, input gqlmodel.Remo } func (r *mutationResolver) UpdateWidgetAlignSystem(ctx context.Context, input gqlmodel.UpdateWidgetAlignSystemInput) (*gqlmodel.UpdateWidgetAlignSystemPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + scene, err := usecases(ctx).Scene.UpdateWidgetAlignSystem(ctx, interfaces.UpdateWidgetAlignSystemParam{ - SceneID: id.SceneID(input.SceneID), + SceneID: sid, Location: *gqlmodel.FromSceneWidgetLocation(input.Location), Align: gqlmodel.FromWidgetAlignType(input.Align), }, getOperator(ctx)) @@ -95,33 +121,43 @@ func (r *mutationResolver) UpdateWidgetAlignSystem(ctx context.Context, input gq } func (r *mutationResolver) InstallPlugin(ctx context.Context, input gqlmodel.InstallPluginInput) (*gqlmodel.InstallPluginPayload, error) { - scene, pl, pr, err := usecases(ctx).Scene.InstallPlugin(ctx, - id.SceneID(input.SceneID), - input.PluginID, - getOperator(ctx), - ) + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + pid, err := gqlmodel.ToPluginID(input.PluginID) + if err != nil { + return nil, err + } + + scene, pl, pr, err := usecases(ctx).Scene.InstallPlugin(ctx, sid, pid, getOperator(ctx)) if err != nil { return nil, err } return &gqlmodel.InstallPluginPayload{ Scene: gqlmodel.ToScene(scene), ScenePlugin: &gqlmodel.ScenePlugin{ - PluginID: pl, - PropertyID: pr.IDRef(), + PluginID: gqlmodel.IDFromPluginID(pl), + PropertyID: gqlmodel.IDFromRef(pr), }, }, nil } func (r *mutationResolver) UploadPlugin(ctx context.Context, input gqlmodel.UploadPluginInput) (*gqlmodel.UploadPluginPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + operator := getOperator(ctx) var p *plugin.Plugin var s *scene.Scene - var err error if input.File != nil { - p, s, err = usecases(ctx).Plugin.Upload(ctx, input.File.File, id.SceneID(input.SceneID), operator) + p, s, err = usecases(ctx).Plugin.Upload(ctx, input.File.File, sid, operator) } else if input.URL != nil { - p, s, err = usecases(ctx).Plugin.UploadFromRemote(ctx, input.URL, id.SceneID(input.SceneID), operator) + p, s, err = usecases(ctx).Plugin.UploadFromRemote(ctx, input.URL, sid, operator) } else { return nil, errors.New("either file or url is required") } @@ -137,11 +173,17 @@ func (r *mutationResolver) UploadPlugin(ctx context.Context, input gqlmodel.Uplo } func (r *mutationResolver) UninstallPlugin(ctx context.Context, input gqlmodel.UninstallPluginInput) (*gqlmodel.UninstallPluginPayload, error) { - scene, err := usecases(ctx).Scene.UninstallPlugin(ctx, - id.SceneID(input.SceneID), - id.PluginID(input.PluginID), - getOperator(ctx), - ) + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + pid, err := gqlmodel.ToPluginID(input.PluginID) + if err != nil { + return nil, err + } + + scene, err := usecases(ctx).Scene.UninstallPlugin(ctx, sid, pid, getOperator(ctx)) if err != nil { return nil, err } @@ -153,10 +195,20 @@ func (r *mutationResolver) UninstallPlugin(ctx context.Context, input gqlmodel.U } func (r *mutationResolver) UpgradePlugin(ctx context.Context, input gqlmodel.UpgradePluginInput) (*gqlmodel.UpgradePluginPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + pid, topid, err := gqlmodel.ToPluginID2(input.PluginID, input.ToPluginID) + if err != nil { + return nil, err + } + s, err := usecases(ctx).Scene.UpgradePlugin(ctx, - id.SceneID(input.SceneID), - input.PluginID, - input.ToPluginID, + sid, + pid, + topid, getOperator(ctx), ) if err != nil { @@ -165,12 +217,17 @@ func (r *mutationResolver) UpgradePlugin(ctx context.Context, input gqlmodel.Upg return &gqlmodel.UpgradePluginPayload{ Scene: gqlmodel.ToScene(s), - ScenePlugin: gqlmodel.ToScenePlugin(s.Plugins().Plugin(input.ToPluginID)), + ScenePlugin: gqlmodel.ToScenePlugin(s.Plugins().Plugin(topid)), }, nil } func (r *mutationResolver) AddCluster(ctx context.Context, input gqlmodel.AddClusterInput) (*gqlmodel.AddClusterPayload, error) { - s, c, err := usecases(ctx).Scene.AddCluster(ctx, id.SceneID(input.SceneID), input.Name, getOperator(ctx)) + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + s, c, err := usecases(ctx).Scene.AddCluster(ctx, sid, input.Name, getOperator(ctx)) if err != nil { return nil, err } @@ -182,11 +239,16 @@ func (r *mutationResolver) AddCluster(ctx context.Context, input gqlmodel.AddClu } func (r *mutationResolver) UpdateCluster(ctx context.Context, input gqlmodel.UpdateClusterInput) (*gqlmodel.UpdateClusterPayload, error) { + sid, cid, err := gqlmodel.ToID2[id.Scene, id.Cluster](input.SceneID, input.ClusterID) + if err != nil { + return nil, err + } + s, c, err := usecases(ctx).Scene.UpdateCluster(ctx, interfaces.UpdateClusterParam{ - ClusterID: id.ClusterID(input.ClusterID), - SceneID: id.SceneID(input.SceneID), + ClusterID: cid, + SceneID: sid, Name: input.Name, - PropertyID: id.PropertyIDFromRefID(input.PropertyID), + PropertyID: gqlmodel.ToIDRef[id.Property](input.PropertyID), }, getOperator(ctx)) if err != nil { return nil, err @@ -199,7 +261,12 @@ func (r *mutationResolver) UpdateCluster(ctx context.Context, input gqlmodel.Upd } func (r *mutationResolver) RemoveCluster(ctx context.Context, input gqlmodel.RemoveClusterInput) (*gqlmodel.RemoveClusterPayload, error) { - s, err := usecases(ctx).Scene.RemoveCluster(ctx, id.SceneID(input.SceneID), id.ClusterID(input.ClusterID), getOperator(ctx)) + sid, cid, err := gqlmodel.ToID2[id.Scene, id.Cluster](input.SceneID, input.ClusterID) + if err != nil { + return nil, err + } + + s, err := usecases(ctx).Scene.RemoveCluster(ctx, sid, cid, getOperator(ctx)) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_mutation_tag.go b/internal/adapter/gql/resolver_mutation_tag.go index 0ed5c8cef..dcfdaceed 100644 --- a/internal/adapter/gql/resolver_mutation_tag.go +++ b/internal/adapter/gql/resolver_mutation_tag.go @@ -6,16 +6,22 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/util" ) func (r *mutationResolver) CreateTagItem(ctx context.Context, input gqlmodel.CreateTagItemInput) (*gqlmodel.CreateTagItemPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + tag, parent, err := usecases(ctx).Tag.CreateItem(ctx, interfaces.CreateTagItemParam{ Label: input.Label, - SceneID: id.SceneID(input.SceneID), - Parent: id.TagIDFromRefID(input.Parent), - LinkedDatasetSchemaID: id.DatasetSchemaIDFromRefID(input.LinkedDatasetSchemaID), - LinkedDatasetID: id.DatasetIDFromRefID(input.LinkedDatasetID), - LinkedDatasetField: id.DatasetSchemaFieldIDFromRefID(input.LinkedDatasetField), + SceneID: sid, + Parent: gqlmodel.ToIDRef[id.Tag](input.Parent), + LinkedDatasetSchemaID: gqlmodel.ToIDRef[id.DatasetSchema](input.LinkedDatasetSchemaID), + LinkedDatasetID: gqlmodel.ToIDRef[id.Dataset](input.LinkedDatasetID), + LinkedDatasetField: gqlmodel.ToIDRef[id.DatasetField](input.LinkedDatasetField), }, getOperator(ctx)) if err != nil { return nil, err @@ -28,10 +34,20 @@ func (r *mutationResolver) CreateTagItem(ctx context.Context, input gqlmodel.Cre } func (r *mutationResolver) CreateTagGroup(ctx context.Context, input gqlmodel.CreateTagGroupInput) (*gqlmodel.CreateTagGroupPayload, error) { + sid, err := gqlmodel.ToID[id.Scene](input.SceneID) + if err != nil { + return nil, err + } + + tags, err := util.TryMap(input.Tags, gqlmodel.ToID[id.Tag]) + if err != nil { + return nil, err + } + tag, err := usecases(ctx).Tag.CreateGroup(ctx, interfaces.CreateTagGroupParam{ Label: input.Label, - SceneID: id.SceneID(input.SceneID), - Tags: id.TagIDsFromIDRef(input.Tags), + SceneID: sid, + Tags: tags, }, getOperator(ctx)) if err != nil { return nil, err @@ -42,9 +58,14 @@ func (r *mutationResolver) CreateTagGroup(ctx context.Context, input gqlmodel.Cr } func (r *mutationResolver) UpdateTag(ctx context.Context, input gqlmodel.UpdateTagInput) (*gqlmodel.UpdateTagPayload, error) { + tid, err := gqlmodel.ToID[id.Tag](input.TagID) + if err != nil { + return nil, err + } + tag, err := usecases(ctx).Tag.UpdateTag(ctx, interfaces.UpdateTagParam{ Label: input.Label, - TagID: id.TagID(input.TagID), + TagID: tid, }, getOperator(ctx)) if err != nil { return nil, err @@ -55,9 +76,14 @@ func (r *mutationResolver) UpdateTag(ctx context.Context, input gqlmodel.UpdateT } func (r *mutationResolver) AttachTagItemToGroup(ctx context.Context, input gqlmodel.AttachTagItemToGroupInput) (*gqlmodel.AttachTagItemToGroupPayload, error) { + iid, gid, err := gqlmodel.ToID2[id.Tag, id.Tag](input.ItemID, input.GroupID) + if err != nil { + return nil, err + } + tag, err := usecases(ctx).Tag.AttachItemToGroup(ctx, interfaces.AttachItemToGroupParam{ - ItemID: id.TagID(input.ItemID), - GroupID: id.TagID(input.GroupID), + ItemID: iid, + GroupID: gid, }, getOperator(ctx)) if err != nil { return nil, err @@ -68,9 +94,14 @@ func (r *mutationResolver) AttachTagItemToGroup(ctx context.Context, input gqlmo } func (r *mutationResolver) DetachTagItemFromGroup(ctx context.Context, input gqlmodel.DetachTagItemFromGroupInput) (*gqlmodel.DetachTagItemFromGroupPayload, error) { + iid, gid, err := gqlmodel.ToID2[id.Tag, id.Tag](input.ItemID, input.GroupID) + if err != nil { + return nil, err + } + tag, err := usecases(ctx).Tag.DetachItemFromGroup(ctx, interfaces.DetachItemToGroupParam{ - ItemID: id.TagID(input.ItemID), - GroupID: id.TagID(input.GroupID), + ItemID: iid, + GroupID: gid, }, getOperator(ctx)) if err != nil { return nil, err @@ -81,7 +112,12 @@ func (r *mutationResolver) DetachTagItemFromGroup(ctx context.Context, input gql } func (r *mutationResolver) RemoveTag(ctx context.Context, input gqlmodel.RemoveTagInput) (*gqlmodel.RemoveTagPayload, error) { - tagID, layers, err := usecases(ctx).Tag.Remove(ctx, id.TagID(input.TagID), getOperator(ctx)) + tid, err := gqlmodel.ToID[id.Tag](input.TagID) + if err != nil { + return nil, err + } + + _, layers, err := usecases(ctx).Tag.Remove(ctx, tid, getOperator(ctx)) if err != nil { return nil, err } @@ -96,7 +132,7 @@ func (r *mutationResolver) RemoveTag(ctx context.Context, input gqlmodel.RemoveT } return &gqlmodel.RemoveTagPayload{ - TagID: tagID.ID(), + TagID: input.TagID, UpdatedLayers: updatedLayers, }, nil } diff --git a/internal/adapter/gql/resolver_mutation_team.go b/internal/adapter/gql/resolver_mutation_team.go index 0e4037a17..46e78e035 100644 --- a/internal/adapter/gql/resolver_mutation_team.go +++ b/internal/adapter/gql/resolver_mutation_team.go @@ -17,7 +17,12 @@ func (r *mutationResolver) CreateTeam(ctx context.Context, input gqlmodel.Create } func (r *mutationResolver) DeleteTeam(ctx context.Context, input gqlmodel.DeleteTeamInput) (*gqlmodel.DeleteTeamPayload, error) { - if err := usecases(ctx).Team.Remove(ctx, id.TeamID(input.TeamID), getOperator(ctx)); err != nil { + tid, err := gqlmodel.ToID[id.Team](input.TeamID) + if err != nil { + return nil, err + } + + if err := usecases(ctx).Team.Remove(ctx, tid, getOperator(ctx)); err != nil { return nil, err } @@ -25,7 +30,12 @@ func (r *mutationResolver) DeleteTeam(ctx context.Context, input gqlmodel.Delete } func (r *mutationResolver) UpdateTeam(ctx context.Context, input gqlmodel.UpdateTeamInput) (*gqlmodel.UpdateTeamPayload, error) { - res, err := usecases(ctx).Team.Update(ctx, id.TeamID(input.TeamID), input.Name, getOperator(ctx)) + tid, err := gqlmodel.ToID[id.Team](input.TeamID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Team.Update(ctx, tid, input.Name, getOperator(ctx)) if err != nil { return nil, err } @@ -34,7 +44,12 @@ func (r *mutationResolver) UpdateTeam(ctx context.Context, input gqlmodel.Update } func (r *mutationResolver) AddMemberToTeam(ctx context.Context, input gqlmodel.AddMemberToTeamInput) (*gqlmodel.AddMemberToTeamPayload, error) { - res, err := usecases(ctx).Team.AddMember(ctx, id.TeamID(input.TeamID), id.UserID(input.UserID), gqlmodel.FromRole(input.Role), getOperator(ctx)) + tid, uid, err := gqlmodel.ToID2[id.Team, id.User](input.TeamID, input.UserID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Team.AddMember(ctx, tid, uid, gqlmodel.FromRole(input.Role), getOperator(ctx)) if err != nil { return nil, err } @@ -43,7 +58,12 @@ func (r *mutationResolver) AddMemberToTeam(ctx context.Context, input gqlmodel.A } func (r *mutationResolver) RemoveMemberFromTeam(ctx context.Context, input gqlmodel.RemoveMemberFromTeamInput) (*gqlmodel.RemoveMemberFromTeamPayload, error) { - res, err := usecases(ctx).Team.RemoveMember(ctx, id.TeamID(input.TeamID), id.UserID(input.UserID), getOperator(ctx)) + tid, uid, err := gqlmodel.ToID2[id.Team, id.User](input.TeamID, input.UserID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Team.RemoveMember(ctx, tid, uid, getOperator(ctx)) if err != nil { return nil, err } @@ -52,7 +72,12 @@ func (r *mutationResolver) RemoveMemberFromTeam(ctx context.Context, input gqlmo } func (r *mutationResolver) UpdateMemberOfTeam(ctx context.Context, input gqlmodel.UpdateMemberOfTeamInput) (*gqlmodel.UpdateMemberOfTeamPayload, error) { - res, err := usecases(ctx).Team.UpdateMember(ctx, id.TeamID(input.TeamID), id.UserID(input.UserID), gqlmodel.FromRole(input.Role), getOperator(ctx)) + tid, uid, err := gqlmodel.ToID2[id.Team, id.User](input.TeamID, input.UserID) + if err != nil { + return nil, err + } + + res, err := usecases(ctx).Team.UpdateMember(ctx, tid, uid, gqlmodel.FromRole(input.Role), getOperator(ctx)) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_mutation_user.go b/internal/adapter/gql/resolver_mutation_user.go index ba86f06ad..ce70025f0 100644 --- a/internal/adapter/gql/resolver_mutation_user.go +++ b/internal/adapter/gql/resolver_mutation_user.go @@ -7,35 +7,28 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/user" ) func (r *mutationResolver) Signup(ctx context.Context, input gqlmodel.SignupInput) (*gqlmodel.SignupPayload, error) { au := adapter.GetAuthInfo(ctx) - - var u *user.User - var t *user.Team - var err error - - if au != nil { - u, t, err = usecases(ctx).User.SignupOIDC(ctx, interfaces.SignupOIDCParam{ - Sub: au.Sub, - AccessToken: au.Token, - Issuer: au.Iss, - Email: au.Email, - Name: au.Name, - Secret: input.Secret, - User: interfaces.SignupUserParam{ - Lang: input.Lang, - Theme: gqlmodel.ToTheme(input.Theme), - UserID: id.UserIDFromRefID(input.UserID), - TeamID: id.TeamIDFromRefID(input.TeamID), - }, - }) - } else { + if au == nil { return nil, interfaces.ErrOperationDenied } + u, t, err := usecases(ctx).User.SignupOIDC(ctx, interfaces.SignupOIDCParam{ + Sub: au.Sub, + AccessToken: au.Token, + Issuer: au.Iss, + Email: au.Email, + Name: au.Name, + Secret: input.Secret, + User: interfaces.SignupUserParam{ + Lang: input.Lang, + Theme: gqlmodel.ToTheme(input.Theme), + UserID: gqlmodel.ToIDRef[id.User](input.UserID), + TeamID: gqlmodel.ToIDRef[id.Team](input.TeamID), + }, + }) if err != nil { return nil, err } @@ -56,7 +49,7 @@ func (r *mutationResolver) UpdateMe(ctx context.Context, input gqlmodel.UpdateMe return nil, err } - return &gqlmodel.UpdateMePayload{User: gqlmodel.ToUser(res)}, nil + return &gqlmodel.UpdateMePayload{Me: gqlmodel.ToMe(res)}, nil } func (r *mutationResolver) RemoveMyAuth(ctx context.Context, input gqlmodel.RemoveMyAuthInput) (*gqlmodel.UpdateMePayload, error) { @@ -65,11 +58,16 @@ func (r *mutationResolver) RemoveMyAuth(ctx context.Context, input gqlmodel.Remo return nil, err } - return &gqlmodel.UpdateMePayload{User: gqlmodel.ToUser(res)}, nil + return &gqlmodel.UpdateMePayload{Me: gqlmodel.ToMe(res)}, nil } func (r *mutationResolver) DeleteMe(ctx context.Context, input gqlmodel.DeleteMeInput) (*gqlmodel.DeleteMePayload, error) { - if err := usecases(ctx).User.DeleteMe(ctx, id.UserID(input.UserID), getOperator(ctx)); err != nil { + uid, err := gqlmodel.ToID[id.User](input.UserID) + if err != nil { + return nil, err + } + + if err := usecases(ctx).User.DeleteMe(ctx, uid, getOperator(ctx)); err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_plugin.go b/internal/adapter/gql/resolver_plugin.go index 7440fb18a..c46945134 100644 --- a/internal/adapter/gql/resolver_plugin.go +++ b/internal/adapter/gql/resolver_plugin.go @@ -4,7 +4,7 @@ import ( "context" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" + "golang.org/x/text/language" ) func (r *Resolver) Plugin() PluginResolver { @@ -28,28 +28,28 @@ func (r *pluginResolver) Scene(ctx context.Context, obj *gqlmodel.Plugin) (*gqlm if obj.SceneID == nil { return nil, nil } - return dataloaders(ctx).Scene.Load(id.SceneID(*obj.SceneID)) + return dataloaders(ctx).Scene.Load(*obj.SceneID) } -func (r *pluginResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Plugin, sceneID *id.ID) (*gqlmodel.ScenePlugin, error) { +func (r *pluginResolver) ScenePlugin(ctx context.Context, obj *gqlmodel.Plugin, sceneID *gqlmodel.ID) (*gqlmodel.ScenePlugin, error) { if sceneID == nil && obj.SceneID != nil { sceneID = obj.SceneID } if sceneID == nil { return nil, nil } - s, err := dataloaders(ctx).Scene.Load(id.SceneID(*sceneID)) + s, err := dataloaders(ctx).Scene.Load(*sceneID) return s.Plugin(obj.ID), err } -func (r *pluginResolver) TranslatedName(ctx context.Context, obj *gqlmodel.Plugin, lang *string) (string, error) { +func (r *pluginResolver) TranslatedName(ctx context.Context, obj *gqlmodel.Plugin, lang *language.Tag) (string, error) { if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { return s, nil } return obj.Name, nil } -func (r *pluginResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.Plugin, lang *string) (string, error) { +func (r *pluginResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.Plugin, lang *language.Tag) (string, error) { if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { return s, nil } @@ -66,19 +66,19 @@ func (r *pluginExtensionResolver) PropertySchema(ctx context.Context, obj *gqlmo return dataloaders(ctx).PropertySchema.Load(obj.PropertySchemaID) } -func (r *pluginExtensionResolver) SceneWidget(ctx context.Context, obj *gqlmodel.PluginExtension, sceneID id.ID) (*gqlmodel.SceneWidget, error) { - s, err := dataloaders(ctx).Scene.Load(id.SceneID(sceneID)) +func (r *pluginExtensionResolver) SceneWidget(ctx context.Context, obj *gqlmodel.PluginExtension, sceneID gqlmodel.ID) (*gqlmodel.SceneWidget, error) { + s, err := dataloaders(ctx).Scene.Load(sceneID) return s.Widget(obj.PluginID, obj.ExtensionID), err } -func (r *pluginExtensionResolver) TranslatedName(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) { +func (r *pluginExtensionResolver) TranslatedName(ctx context.Context, obj *gqlmodel.PluginExtension, lang *language.Tag) (string, error) { if s, ok := obj.AllTranslatedName[getLang(ctx, lang)]; ok { return s, nil } return obj.Name, nil } -func (r *pluginExtensionResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.PluginExtension, lang *string) (string, error) { +func (r *pluginExtensionResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.PluginExtension, lang *language.Tag) (string, error) { if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { return s, nil } diff --git a/internal/adapter/gql/resolver_project.go b/internal/adapter/gql/resolver_project.go index fe8cb010e..6aa38add1 100644 --- a/internal/adapter/gql/resolver_project.go +++ b/internal/adapter/gql/resolver_project.go @@ -4,7 +4,6 @@ import ( "context" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/rerror" ) @@ -15,11 +14,11 @@ func (r *Resolver) Project() ProjectResolver { type projectResolver struct{ *Resolver } func (r *projectResolver) Team(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Team, error) { - return dataloaders(ctx).Team.Load(id.TeamID(obj.TeamID)) + return dataloaders(ctx).Team.Load(obj.TeamID) } func (r *projectResolver) Scene(ctx context.Context, obj *gqlmodel.Project) (*gqlmodel.Scene, error) { - s, err := loaders(ctx).Scene.FindByProject(ctx, id.ProjectID(obj.ID)) + s, err := loaders(ctx).Scene.FindByProject(ctx, obj.ID) if err != nil && err != rerror.ErrNotFound { return nil, err } diff --git a/internal/adapter/gql/resolver_property.go b/internal/adapter/gql/resolver_property.go index 8ccd73b93..26a539bc5 100644 --- a/internal/adapter/gql/resolver_property.go +++ b/internal/adapter/gql/resolver_property.go @@ -5,7 +5,6 @@ import ( "errors" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/rerror" ) @@ -48,7 +47,7 @@ func (r *propertyResolver) Schema(ctx context.Context, obj *gqlmodel.Property) ( } func (r *propertyResolver) Layer(ctx context.Context, obj *gqlmodel.Property) (gqlmodel.Layer, error) { - l, err := loaders(ctx).Layer.FetchByProperty(ctx, id.PropertyID(obj.ID)) + l, err := loaders(ctx).Layer.FetchByProperty(ctx, obj.ID) if err != nil || errors.Is(err, rerror.ErrNotFound) { return nil, nil } @@ -56,7 +55,7 @@ func (r *propertyResolver) Layer(ctx context.Context, obj *gqlmodel.Property) (g } func (r *propertyResolver) Merged(ctx context.Context, obj *gqlmodel.Property) (*gqlmodel.MergedProperty, error) { - l, err := loaders(ctx).Layer.FetchByProperty(ctx, id.PropertyID(obj.ID)) + l, err := loaders(ctx).Layer.FetchByProperty(ctx, obj.ID) if err != nil { if errors.Is(err, rerror.ErrNotFound) { return nil, nil @@ -85,7 +84,7 @@ func (r *propertyResolver) Merged(ctx context.Context, obj *gqlmodel.Property) ( type propertyFieldResolver struct{ *Resolver } func (r *propertyFieldResolver) Parent(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.Property, error) { - return dataloaders(ctx).Property.Load(id.PropertyID(obj.ParentID)) + return dataloaders(ctx).Property.Load(obj.ParentID) } func (r *propertyFieldResolver) Schema(ctx context.Context, obj *gqlmodel.PropertyField) (*gqlmodel.PropertySchema, error) { @@ -111,14 +110,14 @@ func (r *propertyFieldLinkResolver) Dataset(ctx context.Context, obj *gqlmodel.P if obj.DatasetID == nil { return nil, nil } - return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.DatasetID)) + return dataloaders(ctx).Dataset.Load(*obj.DatasetID) } func (r *propertyFieldLinkResolver) DatasetField(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetField, error) { if obj.DatasetID == nil { return nil, nil } - d, err := dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.DatasetID)) + d, err := dataloaders(ctx).Dataset.Load(*obj.DatasetID) if err != nil { return nil, err } @@ -126,11 +125,11 @@ func (r *propertyFieldLinkResolver) DatasetField(ctx context.Context, obj *gqlmo } func (r *propertyFieldLinkResolver) DatasetSchema(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetSchema, error) { - return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.DatasetSchemaID)) + return dataloaders(ctx).DatasetSchema.Load(obj.DatasetSchemaID) } func (r *propertyFieldLinkResolver) DatasetSchemaField(ctx context.Context, obj *gqlmodel.PropertyFieldLink) (*gqlmodel.DatasetSchemaField, error) { - ds, err := dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(obj.DatasetSchemaID)) + ds, err := dataloaders(ctx).DatasetSchema.Load(obj.DatasetSchemaID) return ds.Field(obj.DatasetSchemaFieldID), err } @@ -140,20 +139,20 @@ func (r *mergedPropertyResolver) Original(ctx context.Context, obj *gqlmodel.Mer if obj.OriginalID == nil { return nil, nil } - return dataloaders(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) + return dataloaders(ctx).Property.Load(*obj.OriginalID) } func (r *mergedPropertyResolver) Parent(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.Property, error) { if obj.ParentID == nil { return nil, nil } - return dataloaders(ctx).Property.Load(id.PropertyID(*obj.ParentID)) + return dataloaders(ctx).Property.Load(*obj.ParentID) } func (r *mergedPropertyResolver) Schema(ctx context.Context, obj *gqlmodel.MergedProperty) (*gqlmodel.PropertySchema, error) { if obj.SchemaID == nil { if propertyID := obj.PropertyID(); propertyID != nil { - property, err := dataloaders(ctx).Property.Load(id.PropertyID(*propertyID)) + property, err := dataloaders(ctx).Property.Load(*propertyID) if err != nil { return nil, err } @@ -171,7 +170,7 @@ func (r *mergedPropertyResolver) LinkedDataset(ctx context.Context, obj *gqlmode if obj.LinkedDatasetID == nil { return nil, nil } - return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) } func (r *mergedPropertyResolver) Groups(ctx context.Context, obj *gqlmodel.MergedProperty) ([]*gqlmodel.MergedPropertyGroup, error) { @@ -191,7 +190,7 @@ func (r *mergedPropertyGroupResolver) Original(ctx context.Context, obj *gqlmode if obj.OriginalID == nil || obj.OriginalPropertyID == nil { return nil, nil } - p, err := dataloaders(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) + p, err := dataloaders(ctx).Property.Load(*obj.OriginalID) if err != nil { return nil, err } @@ -205,7 +204,7 @@ func (r *mergedPropertyGroupResolver) Parent(ctx context.Context, obj *gqlmodel. if obj.ParentID == nil || obj.ParentPropertyID == nil { return nil, nil } - p, err := dataloaders(ctx).Property.Load(id.PropertyID(*obj.ParentID)) + p, err := dataloaders(ctx).Property.Load(*obj.ParentID) if err != nil { return nil, err } @@ -219,20 +218,20 @@ func (r *mergedPropertyGroupResolver) OriginalProperty(ctx context.Context, obj if obj.OriginalID == nil { return nil, nil } - return dataloaders(ctx).Property.Load(id.PropertyID(*obj.OriginalID)) + return dataloaders(ctx).Property.Load(*obj.OriginalID) } func (r *mergedPropertyGroupResolver) ParentProperty(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.Property, error) { if obj.ParentID == nil { return nil, nil } - return dataloaders(ctx).Property.Load(id.PropertyID(*obj.ParentID)) + return dataloaders(ctx).Property.Load(*obj.ParentID) } func (r *mergedPropertyGroupResolver) Schema(ctx context.Context, obj *gqlmodel.MergedPropertyGroup) (*gqlmodel.PropertySchema, error) { if obj.SchemaID == nil { if propertyID := obj.PropertyID(); propertyID != nil { - property, err := dataloaders(ctx).Property.Load(id.PropertyID(*propertyID)) + property, err := dataloaders(ctx).Property.Load(*propertyID) if err != nil { return nil, err } @@ -250,7 +249,7 @@ func (r *mergedPropertyGroupResolver) LinkedDataset(ctx context.Context, obj *gq if obj.LinkedDatasetID == nil { return nil, nil } - return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) } type mergedPropertyFieldResolver struct{ *Resolver } @@ -305,7 +304,7 @@ func actualValue(datasetLoader DatasetDataLoader, value interface{}, links []*gq if len(links) > 1 && links[0].DatasetID != nil && links[len(links)-1].DatasetID == nil { dsid := *links[0].DatasetID for i, link := range links { - ds, err := datasetLoader.Load(id.DatasetID(dsid)) + ds, err := datasetLoader.Load(dsid) if err != nil { return nil, err } @@ -317,9 +316,9 @@ func actualValue(datasetLoader DatasetDataLoader, value interface{}, links []*gq return nil, nil } if field.Value != nil { - val, ok := (field.Value).(id.ID) + val, ok := (field.Value).(string) if ok { - dsid = val + dsid = gqlmodel.ID(val) } else { return nil, nil } @@ -330,7 +329,7 @@ func actualValue(datasetLoader DatasetDataLoader, value interface{}, links []*gq } } else if lastLink := links[len(links)-1]; lastLink.DatasetID != nil { // ไธ€็•ชๆœ€ๅพŒใฎใƒชใƒณใ‚ฏใ‚’ๅ–ๅพ— - ds, err := datasetLoader.Load(id.DatasetID(*lastLink.DatasetID)) + ds, err := datasetLoader.Load(*lastLink.DatasetID) if err != nil { return nil, err } diff --git a/internal/adapter/gql/resolver_property_schema.go b/internal/adapter/gql/resolver_property_schema.go index 433c60221..5dea300b4 100644 --- a/internal/adapter/gql/resolver_property_schema.go +++ b/internal/adapter/gql/resolver_property_schema.go @@ -4,6 +4,7 @@ import ( "context" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" + "golang.org/x/text/language" ) func (r *Resolver) PropertySchemaField() PropertySchemaFieldResolver { @@ -24,14 +25,14 @@ func (r *Resolver) PropertySchemaGroup() PropertySchemaGroupResolver { type propertySchemaFieldResolver struct{ *Resolver } -func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) { +func (r *propertySchemaFieldResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *language.Tag) (string, error) { if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { return s, nil } return obj.Title, nil } -func (r *propertySchemaFieldResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *string) (string, error) { +func (r *propertySchemaFieldResolver) TranslatedDescription(ctx context.Context, obj *gqlmodel.PropertySchemaField, lang *language.Tag) (string, error) { if s, ok := obj.AllTranslatedDescription[getLang(ctx, lang)]; ok { return s, nil } @@ -66,7 +67,7 @@ func (r *propertySchemaGroupResolver) Schema(ctx context.Context, obj *gqlmodel. return dataloaders(ctx).PropertySchema.Load(obj.SchemaID) } -func (r *propertySchemaGroupResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaGroup, lang *string) (string, error) { +func (r *propertySchemaGroupResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaGroup, lang *language.Tag) (string, error) { if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { return s, nil } @@ -79,7 +80,7 @@ func (r *propertySchemaGroupResolver) TranslatedTitle(ctx context.Context, obj * type propertySchemaFieldChoiceResolver struct{ *Resolver } -func (r *propertySchemaFieldChoiceResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *string) (string, error) { +func (r *propertySchemaFieldChoiceResolver) TranslatedTitle(ctx context.Context, obj *gqlmodel.PropertySchemaFieldChoice, lang *language.Tag) (string, error) { if s, ok := obj.AllTranslatedTitle[getLang(ctx, lang)]; ok { return s, nil } diff --git a/internal/adapter/gql/resolver_query.go b/internal/adapter/gql/resolver_query.go index 9adad2b10..f3258f931 100644 --- a/internal/adapter/gql/resolver_query.go +++ b/internal/adapter/gql/resolver_query.go @@ -5,7 +5,6 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" ) func (r *Resolver) Query() QueryResolver { @@ -14,77 +13,77 @@ func (r *Resolver) Query() QueryResolver { type queryResolver struct{ *Resolver } -func (r *queryResolver) Assets(ctx context.Context, teamID id.ID, keyword *string, sortType *gqlmodel.AssetSortType, pagination *gqlmodel.Pagination) (*gqlmodel.AssetConnection, error) { +func (r *queryResolver) Assets(ctx context.Context, teamID gqlmodel.ID, keyword *string, sortType *gqlmodel.AssetSortType, pagination *gqlmodel.Pagination) (*gqlmodel.AssetConnection, error) { return loaders(ctx).Asset.FindByTeam(ctx, teamID, keyword, gqlmodel.AssetSortTypeFrom(sortType), pagination) } -func (r *queryResolver) Me(ctx context.Context) (*gqlmodel.User, error) { +func (r *queryResolver) Me(ctx context.Context) (*gqlmodel.Me, error) { u := getUser(ctx) if u == nil { return nil, nil } - return gqlmodel.ToUser(u), nil + return gqlmodel.ToMe(u), nil } -func (r *queryResolver) Node(ctx context.Context, i id.ID, typeArg gqlmodel.NodeType) (gqlmodel.Node, error) { +func (r *queryResolver) Node(ctx context.Context, i gqlmodel.ID, typeArg gqlmodel.NodeType) (gqlmodel.Node, error) { dataloaders := dataloaders(ctx) switch typeArg { case gqlmodel.NodeTypeAsset: - result, err := dataloaders.Asset.Load(id.AssetID(i)) + result, err := dataloaders.Asset.Load(i) if result == nil { return nil, nil } return result, err case gqlmodel.NodeTypeDataset: - result, err := dataloaders.Dataset.Load(id.DatasetID(i)) + result, err := dataloaders.Dataset.Load(i) if result == nil { return nil, nil } return result, err case gqlmodel.NodeTypeDatasetSchema: - result, err := dataloaders.DatasetSchema.Load(id.DatasetSchemaID(i)) + result, err := dataloaders.DatasetSchema.Load(i) if result == nil { return nil, nil } return result, err case gqlmodel.NodeTypeLayerItem: - result, err := dataloaders.LayerItem.Load(id.LayerID(i)) + result, err := dataloaders.LayerItem.Load(i) if result == nil { return nil, nil } return result, err case gqlmodel.NodeTypeLayerGroup: - result, err := dataloaders.LayerGroup.Load(id.LayerID(i)) + result, err := dataloaders.LayerGroup.Load(i) if result == nil { return nil, nil } return result, err case gqlmodel.NodeTypeProject: - result, err := dataloaders.Project.Load(id.ProjectID(i)) + result, err := dataloaders.Project.Load(i) if result == nil { return nil, nil } return result, err case gqlmodel.NodeTypeProperty: - result, err := dataloaders.Property.Load(id.PropertyID(i)) + result, err := dataloaders.Property.Load(i) if result == nil { return nil, nil } return result, err case gqlmodel.NodeTypeScene: - result, err := dataloaders.Scene.Load(id.SceneID(i)) + result, err := dataloaders.Scene.Load(i) if result == nil { return nil, nil } return result, err case gqlmodel.NodeTypeTeam: - result, err := dataloaders.Team.Load(id.TeamID(i)) + result, err := dataloaders.Team.Load(i) if result == nil { return nil, nil } return result, err case gqlmodel.NodeTypeUser: - result, err := dataloaders.User.Load(id.UserID(i)) + result, err := dataloaders.User.Load(i) if result == nil { return nil, nil } @@ -93,11 +92,11 @@ func (r *queryResolver) Node(ctx context.Context, i id.ID, typeArg gqlmodel.Node return nil, nil } -func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmodel.NodeType) ([]gqlmodel.Node, error) { +func (r *queryResolver) Nodes(ctx context.Context, ids []gqlmodel.ID, typeArg gqlmodel.NodeType) ([]gqlmodel.Node, error) { dataloaders := dataloaders(ctx) switch typeArg { case gqlmodel.NodeTypeAsset: - data, err := dataloaders.Asset.LoadAll(id.AssetIDsFromIDRef(ids)) + data, err := dataloaders.Asset.LoadAll(ids) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -107,7 +106,7 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmode } return nodes, nil case gqlmodel.NodeTypeDataset: - data, err := dataloaders.Dataset.LoadAll(id.DatasetIDsFromIDRef(ids)) + data, err := dataloaders.Dataset.LoadAll(ids) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -117,7 +116,7 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmode } return nodes, nil case gqlmodel.NodeTypeDatasetSchema: - data, err := dataloaders.DatasetSchema.LoadAll(id.DatasetSchemaIDsFromIDRef(ids)) + data, err := dataloaders.DatasetSchema.LoadAll(ids) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -127,7 +126,7 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmode } return nodes, nil case gqlmodel.NodeTypeLayerItem: - data, err := dataloaders.LayerItem.LoadAll(id.LayerIDsFromIDRef(ids)) + data, err := dataloaders.LayerItem.LoadAll(ids) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -137,7 +136,7 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmode } return nodes, nil case gqlmodel.NodeTypeLayerGroup: - data, err := dataloaders.LayerGroup.LoadAll(id.LayerIDsFromIDRef(ids)) + data, err := dataloaders.LayerGroup.LoadAll(ids) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -147,7 +146,7 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmode } return nodes, nil case gqlmodel.NodeTypeProject: - data, err := dataloaders.Project.LoadAll(id.ProjectIDsFromIDRef(ids)) + data, err := dataloaders.Project.LoadAll(ids) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -157,7 +156,7 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmode } return nodes, nil case gqlmodel.NodeTypeProperty: - data, err := dataloaders.Property.LoadAll(id.PropertyIDsFromIDRef(ids)) + data, err := dataloaders.Property.LoadAll(ids) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -167,7 +166,7 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmode } return nodes, nil case gqlmodel.NodeTypeScene: - data, err := dataloaders.Scene.LoadAll(id.SceneIDsFromIDRef(ids)) + data, err := dataloaders.Scene.LoadAll(ids) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -177,7 +176,7 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmode } return nodes, nil case gqlmodel.NodeTypeTeam: - data, err := dataloaders.Team.LoadAll(id.TeamIDsFromIDRef(ids)) + data, err := dataloaders.Team.LoadAll(ids) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -187,7 +186,7 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmode } return nodes, nil case gqlmodel.NodeTypeUser: - data, err := dataloaders.User.LoadAll(id.UserIDsFromIDRef(ids)) + data, err := dataloaders.User.LoadAll(ids) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -201,80 +200,59 @@ func (r *queryResolver) Nodes(ctx context.Context, ids []*id.ID, typeArg gqlmode } } -func (r *queryResolver) PropertySchema(ctx context.Context, i id.PropertySchemaID) (*gqlmodel.PropertySchema, error) { +func (r *queryResolver) PropertySchema(ctx context.Context, i gqlmodel.ID) (*gqlmodel.PropertySchema, error) { return dataloaders(ctx).PropertySchema.Load(i) } -func (r *queryResolver) PropertySchemas(ctx context.Context, ids []*id.PropertySchemaID) ([]*gqlmodel.PropertySchema, error) { - ids2 := make([]id.PropertySchemaID, 0, len(ids)) - for _, i := range ids { - if i != nil { - ids2 = append(ids2, *i) - } - } - - data, err := dataloaders(ctx).PropertySchema.LoadAll(ids2) +func (r *queryResolver) PropertySchemas(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.PropertySchema, error) { + data, err := dataloaders(ctx).PropertySchema.LoadAll(ids) if len(err) > 0 && err[0] != nil { return nil, err[0] } - return data, nil } -func (r *queryResolver) Plugin(ctx context.Context, id id.PluginID) (*gqlmodel.Plugin, error) { +func (r *queryResolver) Plugin(ctx context.Context, id gqlmodel.ID) (*gqlmodel.Plugin, error) { return dataloaders(ctx).Plugin.Load(id) } -func (r *queryResolver) Plugins(ctx context.Context, ids []*id.PluginID) ([]*gqlmodel.Plugin, error) { - ids2 := make([]id.PluginID, 0, len(ids)) - for _, i := range ids { - if i != nil { - ids2 = append(ids2, *i) - } - } - - data, err := dataloaders(ctx).Plugin.LoadAll(ids2) +func (r *queryResolver) Plugins(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmodel.Plugin, error) { + data, err := dataloaders(ctx).Plugin.LoadAll(ids) if len(err) > 0 && err[0] != nil { return nil, err[0] } - return data, nil } -func (r *queryResolver) Layer(ctx context.Context, layerID id.ID) (gqlmodel.Layer, error) { - dataloaders := dataloaders(ctx) - result, err := dataloaders.Layer.Load(id.LayerID(layerID)) +func (r *queryResolver) Layer(ctx context.Context, layerID gqlmodel.ID) (gqlmodel.Layer, error) { + result, err := dataloaders(ctx).Layer.Load(layerID) if result == nil || *result == nil { return nil, nil } return *result, err } -func (r *queryResolver) Scene(ctx context.Context, projectID id.ID) (*gqlmodel.Scene, error) { - return loaders(ctx).Scene.FindByProject(ctx, id.ProjectID(projectID)) +func (r *queryResolver) Scene(ctx context.Context, projectID gqlmodel.ID) (*gqlmodel.Scene, error) { + return loaders(ctx).Scene.FindByProject(ctx, projectID) } -func (r *queryResolver) Projects(ctx context.Context, teamID id.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { - return loaders(ctx).Project.FindByTeam(ctx, id.TeamID(teamID), first, last, before, after) +func (r *queryResolver) Projects(ctx context.Context, teamID gqlmodel.ID, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { + return loaders(ctx).Project.FindByTeam(ctx, teamID, first, last, before, after) } -func (r *queryResolver) DatasetSchemas(ctx context.Context, sceneID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) { +func (r *queryResolver) DatasetSchemas(ctx context.Context, sceneID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetSchemaConnection, error) { return loaders(ctx).Dataset.FindSchemaByScene(ctx, sceneID, first, last, before, after) } -func (r *queryResolver) DynamicDatasetSchemas(ctx context.Context, sceneID id.ID) ([]*gqlmodel.DatasetSchema, error) { +func (r *queryResolver) DynamicDatasetSchemas(ctx context.Context, sceneID gqlmodel.ID) ([]*gqlmodel.DatasetSchema, error) { return loaders(ctx).Dataset.FindDynamicSchemasByScene(ctx, sceneID) } -func (r *queryResolver) Datasets(ctx context.Context, datasetSchemaID id.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) { +func (r *queryResolver) Datasets(ctx context.Context, datasetSchemaID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) { return loaders(ctx).Dataset.FindBySchema(ctx, datasetSchemaID, first, last, before, after) } -func (r *queryResolver) SceneLock(ctx context.Context, sceneID id.ID) (*gqlmodel.SceneLockMode, error) { - return loaders(ctx).Scene.FetchLock(ctx, id.SceneID(sceneID)) -} - -func (r *queryResolver) SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.SearchedUser, error) { +func (r *queryResolver) SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.User, error) { return loaders(ctx).User.SearchUser(ctx, nameOrEmail) } diff --git a/internal/adapter/gql/resolver_scene.go b/internal/adapter/gql/resolver_scene.go index 946247418..eff213ba7 100644 --- a/internal/adapter/gql/resolver_scene.go +++ b/internal/adapter/gql/resolver_scene.go @@ -27,19 +27,19 @@ func (r *Resolver) Cluster() ClusterResolver { type sceneResolver struct{ *Resolver } func (r *sceneResolver) Project(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Project, error) { - return dataloaders(ctx).Project.Load(id.ProjectID(obj.ProjectID)) + return dataloaders(ctx).Project.Load(obj.ProjectID) } func (r *sceneResolver) Team(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Team, error) { - return dataloaders(ctx).Team.Load(id.TeamID(obj.TeamID)) + return dataloaders(ctx).Team.Load(obj.TeamID) } func (r *sceneResolver) Property(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Property, error) { - return dataloaders(ctx).Property.Load(id.PropertyID(obj.PropertyID)) + return dataloaders(ctx).Property.Load(obj.PropertyID) } func (r *sceneResolver) RootLayer(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.LayerGroup, error) { - layer, err := dataloaders(ctx).Layer.Load(id.LayerID(obj.RootLayerID)) + layer, err := dataloaders(ctx).Layer.Load(obj.RootLayerID) if err != nil { return nil, err } @@ -57,16 +57,13 @@ func (r *sceneResolver) DatasetSchemas(ctx context.Context, obj *gqlmodel.Scene, return loaders(ctx).Dataset.FindSchemaByScene(ctx, obj.ID, first, last, before, after) } -func (r *sceneResolver) LockMode(ctx context.Context, obj *gqlmodel.Scene) (gqlmodel.SceneLockMode, error) { - sl, err := loaders(ctx).Scene.FetchLock(ctx, id.SceneID(obj.ID)) +func (r *sceneResolver) Tags(ctx context.Context, obj *gqlmodel.Scene) ([]gqlmodel.Tag, error) { + sid, err := gqlmodel.ToID[id.Scene](obj.ID) if err != nil { - return gqlmodel.SceneLockModeFree, err + return nil, err } - return *sl, nil -} -func (r *sceneResolver) Tags(ctx context.Context, obj *gqlmodel.Scene) ([]gqlmodel.Tag, error) { - tags, err := usecases(ctx).Tag.FetchByScene(ctx, id.SceneID(obj.ID), getOperator(ctx)) + tags, err := usecases(ctx).Tag.FetchByScene(ctx, sid, getOperator(ctx)) if err != nil { return nil, err } @@ -87,7 +84,7 @@ func (r *scenePluginResolver) Property(ctx context.Context, obj *gqlmodel.SceneP if obj.PropertyID == nil { return nil, nil } - return dataloaders(ctx).Property.Load(id.PropertyID(*obj.PropertyID)) + return dataloaders(ctx).Property.Load(*obj.PropertyID) } type sceneWidgetResolver struct{ *Resolver } @@ -110,11 +107,11 @@ func (r *sceneWidgetResolver) Extension(ctx context.Context, obj *gqlmodel.Scene } func (r *sceneWidgetResolver) Property(ctx context.Context, obj *gqlmodel.SceneWidget) (*gqlmodel.Property, error) { - return dataloaders(ctx).Property.Load(id.PropertyID(obj.PropertyID)) + return dataloaders(ctx).Property.Load(obj.PropertyID) } type clusterResolver struct{ *Resolver } func (r *clusterResolver) Property(ctx context.Context, obj *gqlmodel.Cluster) (*gqlmodel.Property, error) { - return dataloaders(ctx).Property.Load(id.PropertyID(obj.PropertyID)) + return dataloaders(ctx).Property.Load(obj.PropertyID) } diff --git a/internal/adapter/gql/resolver_tag.go b/internal/adapter/gql/resolver_tag.go index 7eb572436..54bcfd580 100644 --- a/internal/adapter/gql/resolver_tag.go +++ b/internal/adapter/gql/resolver_tag.go @@ -4,7 +4,6 @@ import ( "context" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) type tagItemResolver struct{ *Resolver } @@ -17,21 +16,21 @@ func (t tagItemResolver) LinkedDatasetSchema(ctx context.Context, obj *gqlmodel. if obj.LinkedDatasetID == nil { return nil, nil } - return dataloaders(ctx).DatasetSchema.Load(id.DatasetSchemaID(*obj.LinkedDatasetSchemaID)) + return dataloaders(ctx).DatasetSchema.Load(*obj.LinkedDatasetSchemaID) } func (t tagItemResolver) LinkedDataset(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.Dataset, error) { if obj.LinkedDatasetID == nil { return nil, nil } - return dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + return dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) } func (t tagItemResolver) LinkedDatasetField(ctx context.Context, obj *gqlmodel.TagItem) (*gqlmodel.DatasetField, error) { if obj.LinkedDatasetID == nil { return nil, nil } - ds, err := dataloaders(ctx).Dataset.Load(id.DatasetID(*obj.LinkedDatasetID)) + ds, err := dataloaders(ctx).Dataset.Load(*obj.LinkedDatasetID) return ds.Field(*obj.LinkedDatasetFieldID), err } @@ -39,11 +38,11 @@ func (t tagItemResolver) Parent(ctx context.Context, obj *gqlmodel.TagItem) (*gq if obj.ParentID == nil { return nil, nil } - return dataloaders(ctx).TagGroup.Load(id.TagID(*obj.ParentID)) + return dataloaders(ctx).TagGroup.Load(*obj.ParentID) } func (tg tagItemResolver) Layers(ctx context.Context, obj *gqlmodel.TagItem) ([]gqlmodel.Layer, error) { - return loaders(ctx).Layer.FetchByTag(ctx, id.TagID(obj.ID)) + return loaders(ctx).Layer.FetchByTag(ctx, obj.ID) } type tagGroupResolver struct{ *Resolver } @@ -53,14 +52,7 @@ func (r *Resolver) TagGroup() TagGroupResolver { } func (r tagGroupResolver) Tags(ctx context.Context, obj *gqlmodel.TagGroup) ([]*gqlmodel.TagItem, error) { - tagIds := make([]id.TagID, 0, len(obj.TagIds)) - for _, i := range obj.TagIds { - if i == nil { - continue - } - tagIds = append(tagIds, id.TagID(*i)) - } - tagItems, err := dataloaders(ctx).TagItem.LoadAll(tagIds) + tagItems, err := dataloaders(ctx).TagItem.LoadAll(obj.TagIds) if len(err) > 0 && err[0] != nil { return nil, err[0] } @@ -68,9 +60,9 @@ func (r tagGroupResolver) Tags(ctx context.Context, obj *gqlmodel.TagGroup) ([]* } func (r tagGroupResolver) Scene(ctx context.Context, obj *gqlmodel.TagGroup) (*gqlmodel.Scene, error) { - return dataloaders(ctx).Scene.Load(id.SceneID(obj.SceneID)) + return dataloaders(ctx).Scene.Load(obj.SceneID) } func (r tagGroupResolver) Layers(ctx context.Context, obj *gqlmodel.TagGroup) ([]gqlmodel.Layer, error) { - return loaders(ctx).Layer.FetchByTag(ctx, id.TagID(obj.ID)) + return loaders(ctx).Layer.FetchByTag(ctx, obj.ID) } diff --git a/internal/adapter/gql/resolver_team.go b/internal/adapter/gql/resolver_team.go index 021412d78..67455e63e 100644 --- a/internal/adapter/gql/resolver_team.go +++ b/internal/adapter/gql/resolver_team.go @@ -5,7 +5,6 @@ import ( "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" "github.com/reearth/reearth-backend/internal/usecase" - "github.com/reearth/reearth-backend/pkg/id" ) func (r *Resolver) Team() TeamResolver { @@ -28,11 +27,11 @@ func (r *teamResolver) Assets(ctx context.Context, obj *gqlmodel.Team, first *in } func (r *teamResolver) Projects(ctx context.Context, obj *gqlmodel.Team, includeArchived *bool, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.ProjectConnection, error) { - return loaders(ctx).Project.FindByTeam(ctx, id.TeamID(obj.ID), first, last, before, after) + return loaders(ctx).Project.FindByTeam(ctx, obj.ID, first, last, before, after) } type teamMemberResolver struct{ *Resolver } func (r *teamMemberResolver) User(ctx context.Context, obj *gqlmodel.TeamMember) (*gqlmodel.User, error) { - return dataloaders(ctx).User.Load(id.UserID(obj.UserID)) + return dataloaders(ctx).User.Load(obj.UserID) } diff --git a/internal/adapter/gql/resolver_user.go b/internal/adapter/gql/resolver_user.go index ad443f5dd..cb0c12ecf 100644 --- a/internal/adapter/gql/resolver_user.go +++ b/internal/adapter/gql/resolver_user.go @@ -4,19 +4,18 @@ import ( "context" "github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel" - "github.com/reearth/reearth-backend/pkg/id" ) -func (r *Resolver) User() UserResolver { - return &userResolver{r} +func (r *Resolver) Me() MeResolver { + return &meResolver{r} } -type userResolver struct{ *Resolver } +type meResolver struct{ *Resolver } -func (r *userResolver) MyTeam(ctx context.Context, obj *gqlmodel.User) (*gqlmodel.Team, error) { - return dataloaders(ctx).Team.Load(id.TeamID(obj.MyTeamID)) +func (r *meResolver) MyTeam(ctx context.Context, obj *gqlmodel.Me) (*gqlmodel.Team, error) { + return dataloaders(ctx).Team.Load(obj.MyTeamID) } -func (r *userResolver) Teams(ctx context.Context, obj *gqlmodel.User) ([]*gqlmodel.Team, error) { - return loaders(ctx).Team.FindByUser(ctx, id.UserID(obj.ID)) +func (r *meResolver) Teams(ctx context.Context, obj *gqlmodel.Me) ([]*gqlmodel.Team, error) { + return loaders(ctx).Team.FindByUser(ctx, obj.ID) } diff --git a/internal/infrastructure/fs/file.go b/internal/infrastructure/fs/file.go index 975fdf4e5..6ea331fff 100644 --- a/internal/infrastructure/fs/file.go +++ b/internal/infrastructure/fs/file.go @@ -43,7 +43,7 @@ func (f *fileRepo) ReadAsset(ctx context.Context, filename string) (io.ReadClose } func (f *fileRepo) UploadAsset(ctx context.Context, file *file.File) (*url.URL, error) { - filename := sanitize.Path(id.New().String() + path.Ext(file.Path)) + filename := sanitize.Path(newAssetID() + path.Ext(file.Path)) if err := f.upload(ctx, filepath.Join(assetDir, filename), file.Content); err != nil { return nil, err } @@ -185,3 +185,8 @@ func getAssetFileURL(base *url.URL, filename string) *url.URL { b.Path = path.Join(b.Path, filename) return &b } + +func newAssetID() string { + // TODO: replace + return id.NewAssetID().String() +} diff --git a/internal/infrastructure/gcs/file.go b/internal/infrastructure/gcs/file.go index ddd528624..154fde67a 100644 --- a/internal/infrastructure/gcs/file.go +++ b/internal/infrastructure/gcs/file.go @@ -71,7 +71,7 @@ func (f *fileRepo) UploadAsset(ctx context.Context, file *file.File) (*url.URL, return nil, gateway.ErrFileTooLarge } - sn := sanitize.Path(id.New().String() + path.Ext(file.Path)) + sn := sanitize.Path(newAssetID() + path.Ext(file.Path)) if sn == "" { return nil, gateway.ErrInvalidFile } @@ -328,3 +328,8 @@ func getGCSObjectNameFromURL(base, u *url.URL) string { return p } + +func newAssetID() string { + // TODO: replace + return id.NewAssetID().String() +} diff --git a/internal/infrastructure/memory/asset.go b/internal/infrastructure/memory/asset.go index 08df3ed25..73a463f98 100644 --- a/internal/infrastructure/memory/asset.go +++ b/internal/infrastructure/memory/asset.go @@ -44,7 +44,7 @@ func (r *Asset) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, erro return &asset.Asset{}, rerror.ErrNotFound } -func (r *Asset) FindByIDs(ctx context.Context, ids []id.AssetID) ([]*asset.Asset, error) { +func (r *Asset) FindByIDs(ctx context.Context, ids id.AssetIDList) ([]*asset.Asset, error) { r.lock.Lock() defer r.lock.Unlock() @@ -80,7 +80,7 @@ func (r *Asset) FindByTeam(ctx context.Context, id id.TeamID, filter repo.AssetF s := *filter.Sort sort.SliceStable(result, func(i, j int) bool { if s == asset.SortTypeID { - return result[i].ID().ID().Compare(result[j].ID().ID()) < 0 + return result[i].ID().Compare(result[j].ID()) < 0 } if s == asset.SortTypeSize { return result[i].Size() < result[j].Size() diff --git a/internal/infrastructure/memory/dataset.go b/internal/infrastructure/memory/dataset.go index 3c4d3f7a9..71634f88a 100644 --- a/internal/infrastructure/memory/dataset.go +++ b/internal/infrastructure/memory/dataset.go @@ -42,7 +42,7 @@ func (r *Dataset) FindByID(ctx context.Context, id id.DatasetID) (*dataset.Datas return nil, rerror.ErrNotFound } -func (r *Dataset) FindByIDs(ctx context.Context, ids []id.DatasetID) (dataset.List, error) { +func (r *Dataset) FindByIDs(ctx context.Context, ids id.DatasetIDList) (dataset.List, error) { r.lock.Lock() defer r.lock.Unlock() @@ -100,7 +100,7 @@ func (r *Dataset) FindBySchemaAll(ctx context.Context, id id.DatasetSchemaID) (d return result, nil } -func (r *Dataset) FindGraph(ctx context.Context, i id.DatasetID, fields []id.DatasetSchemaFieldID) (dataset.List, error) { +func (r *Dataset) FindGraph(ctx context.Context, i id.DatasetID, fields id.DatasetFieldIDList) (dataset.List, error) { r.lock.Lock() defer r.lock.Unlock() @@ -158,7 +158,7 @@ func (r *Dataset) Remove(ctx context.Context, id id.DatasetID) error { return nil } -func (r *Dataset) RemoveAll(ctx context.Context, ids []id.DatasetID) error { +func (r *Dataset) RemoveAll(ctx context.Context, ids id.DatasetIDList) error { r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/memory/dataset_schema.go b/internal/infrastructure/memory/dataset_schema.go index 75081eb64..faa2a77aa 100644 --- a/internal/infrastructure/memory/dataset_schema.go +++ b/internal/infrastructure/memory/dataset_schema.go @@ -42,7 +42,7 @@ func (r *DatasetSchema) FindByID(ctx context.Context, id id.DatasetSchemaID) (*d return nil, rerror.ErrNotFound } -func (r *DatasetSchema) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID) (dataset.SchemaList, error) { +func (r *DatasetSchema) FindByIDs(ctx context.Context, ids id.DatasetSchemaIDList) (dataset.SchemaList, error) { r.lock.Lock() defer r.lock.Unlock() @@ -190,7 +190,7 @@ func (r *DatasetSchema) Remove(ctx context.Context, id id.DatasetSchemaID) error return nil } -func (r *DatasetSchema) RemoveAll(ctx context.Context, ids []id.DatasetSchemaID) error { +func (r *DatasetSchema) RemoveAll(ctx context.Context, ids id.DatasetSchemaIDList) error { r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/memory/layer.go b/internal/infrastructure/memory/layer.go index eefab91ab..c15939edb 100644 --- a/internal/infrastructure/memory/layer.go +++ b/internal/infrastructure/memory/layer.go @@ -41,7 +41,7 @@ func (r *Layer) FindByID(ctx context.Context, id id.LayerID) (layer.Layer, error return nil, rerror.ErrNotFound } -func (r *Layer) FindByIDs(ctx context.Context, ids []id.LayerID) (layer.List, error) { +func (r *Layer) FindByIDs(ctx context.Context, ids id.LayerIDList) (layer.List, error) { r.lock.Lock() defer r.lock.Unlock() @@ -56,7 +56,7 @@ func (r *Layer) FindByIDs(ctx context.Context, ids []id.LayerID) (layer.List, er return result, nil } -func (r *Layer) FindGroupByIDs(ctx context.Context, ids []id.LayerID) (layer.GroupList, error) { +func (r *Layer) FindGroupByIDs(ctx context.Context, ids id.LayerIDList) (layer.GroupList, error) { r.lock.Lock() defer r.lock.Unlock() @@ -73,7 +73,7 @@ func (r *Layer) FindGroupByIDs(ctx context.Context, ids []id.LayerID) (layer.Gro return result, nil } -func (r *Layer) FindItemByIDs(ctx context.Context, ids []id.LayerID) (layer.ItemList, error) { +func (r *Layer) FindItemByIDs(ctx context.Context, ids id.LayerIDList) (layer.ItemList, error) { r.lock.Lock() defer r.lock.Unlock() @@ -136,7 +136,7 @@ func (r *Layer) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, s id return result, nil } -func (r *Layer) FindParentsByIDs(_ context.Context, ids []id.LayerID) (layer.GroupList, error) { +func (r *Layer) FindParentsByIDs(_ context.Context, ids id.LayerIDList) (layer.GroupList, error) { r.lock.Lock() defer r.lock.Unlock() @@ -144,7 +144,7 @@ func (r *Layer) FindParentsByIDs(_ context.Context, ids []id.LayerID) (layer.Gro for _, l := range r.data { if lg := layer.ToLayerGroup(l); lg != nil && r.f.CanRead(l.Scene()) { for _, cl := range lg.Layers().Layers() { - if cl.Contains(ids) { + if ids.Has(cl) { res = append(res, lg) } } @@ -323,7 +323,7 @@ func (r *Layer) Remove(ctx context.Context, id id.LayerID) error { return nil } -func (r *Layer) RemoveAll(ctx context.Context, ids []id.LayerID) error { +func (r *Layer) RemoveAll(ctx context.Context, ids id.LayerIDList) error { r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/memory/project.go b/internal/infrastructure/memory/project.go index 48c6127c3..4c1563e44 100644 --- a/internal/infrastructure/memory/project.go +++ b/internal/infrastructure/memory/project.go @@ -60,7 +60,7 @@ func (r *Project) FindByTeam(ctx context.Context, id id.TeamID, p *usecase.Pagin ), nil } -func (r *Project) FindByIDs(ctx context.Context, ids []id.ProjectID) ([]*project.Project, error) { +func (r *Project) FindByIDs(ctx context.Context, ids id.ProjectIDList) ([]*project.Project, error) { r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/memory/property.go b/internal/infrastructure/memory/property.go index 715481dd4..2d8c74571 100644 --- a/internal/infrastructure/memory/property.go +++ b/internal/infrastructure/memory/property.go @@ -2,7 +2,6 @@ package memory import ( "context" - "sort" "sync" "github.com/reearth/reearth-backend/pkg/id" @@ -42,7 +41,7 @@ func (r *Property) FindByID(ctx context.Context, id id.PropertyID) (*property.Pr return nil, rerror.ErrNotFound } -func (r *Property) FindByIDs(ctx context.Context, ids []id.PropertyID) (property.List, error) { +func (r *Property) FindByIDs(ctx context.Context, ids id.PropertyIDList) (property.List, error) { r.lock.Lock() defer r.lock.Unlock() @@ -107,9 +106,7 @@ func (r *Property) FindBySchema(_ context.Context, schemas []id.PropertySchemaID } } } - sort.Slice(result, func(i, j int) bool { - return result[i].ID().ID().Compare(result[j].ID().ID()) < 0 - }) + result.Sort() return result, nil } @@ -128,9 +125,7 @@ func (r *Property) FindByPlugin(_ context.Context, plugin id.PluginID, scene id. break } } - sort.Slice(result, func(i, j int) bool { - return result[i].ID().ID().Compare(result[j].ID().ID()) < 0 - }) + result.Sort() return result, nil } @@ -184,7 +179,7 @@ func (r *Property) Remove(ctx context.Context, id id.PropertyID) error { return nil } -func (r *Property) RemoveAll(ctx context.Context, ids []id.PropertyID) error { +func (r *Property) RemoveAll(ctx context.Context, ids id.PropertyIDList) error { r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/memory/scene.go b/internal/infrastructure/memory/scene.go index 7032a92bf..826f041bc 100644 --- a/internal/infrastructure/memory/scene.go +++ b/internal/infrastructure/memory/scene.go @@ -42,7 +42,7 @@ func (r *Scene) FindByID(ctx context.Context, id id.SceneID) (*scene.Scene, erro return nil, rerror.ErrNotFound } -func (r *Scene) FindByIDs(ctx context.Context, ids []id.SceneID) (scene.List, error) { +func (r *Scene) FindByIDs(ctx context.Context, ids id.SceneIDList) (scene.List, error) { r.lock.Lock() defer r.lock.Unlock() @@ -76,7 +76,7 @@ func (r *Scene) FindByTeam(ctx context.Context, teams ...id.TeamID) (scene.List, result := scene.List{} for _, d := range r.data { - if user.TeamIDList(teams).Includes(d.Team()) && r.f.CanRead(d.Team()) { + if user.TeamIDList(teams).Has(d.Team()) && r.f.CanRead(d.Team()) { result = append(result, d) } } diff --git a/internal/infrastructure/memory/scene_lock.go b/internal/infrastructure/memory/scene_lock.go index 2ac0ef28e..3b65a725a 100644 --- a/internal/infrastructure/memory/scene_lock.go +++ b/internal/infrastructure/memory/scene_lock.go @@ -29,7 +29,7 @@ func (r *sceneLock) GetLock(ctx context.Context, sceneID id.SceneID) (scene.Lock return scene.LockModeFree, nil } -func (r *sceneLock) GetAllLock(ctx context.Context, sceneID []id.SceneID) ([]scene.LockMode, error) { +func (r *sceneLock) GetAllLock(ctx context.Context, sceneID id.SceneIDList) ([]scene.LockMode, error) { res := make([]scene.LockMode, 0, len(sceneID)) for _, si := range sceneID { if si.IsNil() { diff --git a/internal/infrastructure/memory/tag.go b/internal/infrastructure/memory/tag.go index ec206bfd6..300fcce87 100644 --- a/internal/infrastructure/memory/tag.go +++ b/internal/infrastructure/memory/tag.go @@ -41,7 +41,7 @@ func (r *Tag) FindByID(ctx context.Context, tagID id.TagID) (tag.Tag, error) { return nil, rerror.ErrNotFound } -func (r *Tag) FindByIDs(ctx context.Context, tids []id.TagID) ([]*tag.Tag, error) { +func (r *Tag) FindByIDs(ctx context.Context, tids id.TagIDList) ([]*tag.Tag, error) { r.lock.Lock() defer r.lock.Unlock() @@ -79,7 +79,7 @@ func (r *Tag) FindItemByID(ctx context.Context, tagID id.TagID) (*tag.Item, erro return nil, rerror.ErrNotFound } -func (r *Tag) FindItemByIDs(ctx context.Context, tagIDs []id.TagID) ([]*tag.Item, error) { +func (r *Tag) FindItemByIDs(ctx context.Context, tagIDs id.TagIDList) ([]*tag.Item, error) { r.lock.Lock() defer r.lock.Unlock() @@ -106,7 +106,7 @@ func (r *Tag) FindGroupByID(ctx context.Context, tagID id.TagID) (*tag.Group, er return nil, rerror.ErrNotFound } -func (r *Tag) FindGroupByIDs(ctx context.Context, tagIDs []id.TagID) ([]*tag.Group, error) { +func (r *Tag) FindGroupByIDs(ctx context.Context, tagIDs id.TagIDList) ([]*tag.Group, error) { r.lock.Lock() defer r.lock.Unlock() @@ -138,8 +138,7 @@ func (r *Tag) FindGroupByItem(ctx context.Context, tagID id.TagID) (*tag.Group, for _, tg := range r.data { if res := tag.GroupFrom(tg); res != nil { - tags := res.Tags() - for _, item := range tags.Tags() { + for _, item := range res.Tags() { if item == tagID { return res, nil } @@ -185,7 +184,7 @@ func (r *Tag) Remove(ctx context.Context, id id.TagID) error { return nil } -func (r *Tag) RemoveAll(ctx context.Context, ids []id.TagID) error { +func (r *Tag) RemoveAll(ctx context.Context, ids id.TagIDList) error { r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/memory/tag_test.go b/internal/infrastructure/memory/tag_test.go index 89264591a..82130cf20 100644 --- a/internal/infrastructure/memory/tag_test.go +++ b/internal/infrastructure/memory/tag_test.go @@ -38,7 +38,7 @@ func TestTag_FindByIDs(t *testing.T) { sid := id.NewSceneID() sid2 := id.NewSceneID() t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.IDListFrom([]id.TagID{t1.ID()}) + tl := id.TagIDList{t1.ID()} t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() t3, _ := tag.NewItem().NewID().Scene(sid2).Label("item2").Build() tti := tag.Tag(t1) @@ -61,7 +61,7 @@ func TestTag_FindRootsByScene(t *testing.T) { sid := id.NewSceneID() sid2 := id.NewSceneID() t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.IDListFrom([]id.TagID{t1.ID()}) + tl := id.TagIDList{t1.ID()} t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() t3, _ := tag.NewItem().NewID().Scene(sid2).Label("item2").Build() tti := tag.Tag(t1) @@ -83,7 +83,7 @@ func TestTag_FindGroupByID(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.IDListFrom([]id.TagID{t1.ID()}) + tl := id.TagIDList{t1.ID()} t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() tti := tag.Tag(t1) ttg := tag.Tag(t2) @@ -105,7 +105,7 @@ func TestTag_FindItemByID(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.IDListFrom([]id.TagID{t1.ID()}) + tl := id.TagIDList{t1.ID()} t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() tti := tag.Tag(t1) ttg := tag.Tag(t2) @@ -203,7 +203,7 @@ func TestTag_Remove(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.IDListFrom([]id.TagID{t1.ID()}) + tl := id.TagIDList{t1.ID()} t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() tti := tag.Tag(t1) ttg := tag.Tag(t2) @@ -223,7 +223,7 @@ func TestTag_RemoveAll(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.IDListFrom([]id.TagID{t1.ID()}) + tl := id.TagIDList{t1.ID()} t2, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() t3, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() tti := tag.Tag(t1) @@ -247,7 +247,7 @@ func TestTag_RemoveByScene(t *testing.T) { sid := id.NewSceneID() sid2 := id.NewSceneID() t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.IDListFrom([]id.TagID{t1.ID()}) + tl := id.TagIDList{t1.ID()} t2, _ := tag.NewItem().NewID().Scene(sid2).Label("item").Build() t3, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() tti := tag.Tag(t1) @@ -270,7 +270,7 @@ func TestTag_FindGroupByItem(t *testing.T) { ctx := context.Background() sid := id.NewSceneID() t1, _ := tag.NewItem().NewID().Scene(sid).Label("item").Build() - tl := tag.IDListFrom([]id.TagID{t1.ID()}) + tl := id.TagIDList{t1.ID()} t2, _ := tag.NewGroup().NewID().Scene(sid).Label("group").Tags(tl).Build() tti := tag.Tag(t1) ttg := tag.Tag(t2) diff --git a/internal/infrastructure/memory/team.go b/internal/infrastructure/memory/team.go index d779c06f3..625baeded 100644 --- a/internal/infrastructure/memory/team.go +++ b/internal/infrastructure/memory/team.go @@ -34,7 +34,7 @@ func (r *Team) FindByUser(ctx context.Context, i id.UserID) (user.TeamList, erro return result, nil } -func (r *Team) FindByIDs(ctx context.Context, ids []id.TeamID) (user.TeamList, error) { +func (r *Team) FindByIDs(ctx context.Context, ids id.TeamIDList) (user.TeamList, error) { r.lock.Lock() defer r.lock.Unlock() @@ -86,7 +86,7 @@ func (r *Team) Remove(ctx context.Context, id id.TeamID) error { return nil } -func (r *Team) RemoveAll(ctx context.Context, ids []id.TeamID) error { +func (r *Team) RemoveAll(ctx context.Context, ids id.TeamIDList) error { r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/memory/user.go b/internal/infrastructure/memory/user.go index 0da5e7d82..16512cbbf 100644 --- a/internal/infrastructure/memory/user.go +++ b/internal/infrastructure/memory/user.go @@ -21,7 +21,7 @@ func NewUser() repo.User { } } -func (r *User) FindByIDs(ctx context.Context, ids []id.UserID) ([]*user.User, error) { +func (r *User) FindByIDs(ctx context.Context, ids id.UserIDList) ([]*user.User, error) { r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/mongo/asset.go b/internal/infrastructure/mongo/asset.go index c06206be4..1ad65a3e4 100644 --- a/internal/infrastructure/mongo/asset.go +++ b/internal/infrastructure/mongo/asset.go @@ -40,9 +40,9 @@ func (r *assetRepo) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, }) } -func (r *assetRepo) FindByIDs(ctx context.Context, ids []id.AssetID) ([]*asset.Asset, error) { +func (r *assetRepo) FindByIDs(ctx context.Context, ids id.AssetIDList) ([]*asset.Asset, error) { filter := bson.M{ - "id": bson.M{"$in": id.AssetIDsToStrings(ids)}, + "id": bson.M{"$in": ids.Strings()}, } dst := make([]*asset.Asset, 0, len(ids)) res, err := r.find(ctx, dst, filter) diff --git a/internal/infrastructure/mongo/container.go b/internal/infrastructure/mongo/container.go index 5e67f1c7e..a890f3343 100644 --- a/internal/infrastructure/mongo/container.go +++ b/internal/infrastructure/mongo/container.go @@ -6,7 +6,6 @@ import ( "github.com/reearth/reearth-backend/internal/infrastructure/mongo/migration" "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" "github.com/reearth/reearth-backend/internal/usecase/repo" - "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/scene" "github.com/reearth/reearth-backend/pkg/user" "go.mongodb.org/mongo-driver/bson" @@ -55,14 +54,14 @@ func applyTeamFilter(filter interface{}, ids user.TeamIDList) interface{} { if ids == nil { return filter } - return mongodoc.And(filter, "team", bson.M{"$in": id.TeamIDsToStrings(ids)}) + return mongodoc.And(filter, "team", bson.M{"$in": ids.Strings()}) } func applySceneFilter(filter interface{}, ids scene.IDList) interface{} { if ids == nil { return filter } - return mongodoc.And(filter, "scene", bson.M{"$in": id.SceneIDsToStrings(ids)}) + return mongodoc.And(filter, "scene", bson.M{"$in": ids.Strings()}) } func applyOptionalSceneFilter(filter interface{}, ids scene.IDList) interface{} { @@ -70,7 +69,7 @@ func applyOptionalSceneFilter(filter interface{}, ids scene.IDList) interface{} return filter } return mongodoc.And(filter, "", bson.M{"$or": []bson.M{ - {"scene": bson.M{"$in": id.SceneIDsToStrings(ids)}}, + {"scene": bson.M{"$in": ids.Strings()}}, {"scene": nil}, {"scene": ""}, }}) diff --git a/internal/infrastructure/mongo/dataset.go b/internal/infrastructure/mongo/dataset.go index be2db28cf..eda57615e 100644 --- a/internal/infrastructure/mongo/dataset.go +++ b/internal/infrastructure/mongo/dataset.go @@ -45,10 +45,10 @@ func (r *datasetRepo) FindByID(ctx context.Context, id id.DatasetID) (*dataset.D return r.findOne(ctx, bson.M{"id": id.String()}) } -func (r *datasetRepo) FindByIDs(ctx context.Context, ids []id.DatasetID) (dataset.List, error) { +func (r *datasetRepo) FindByIDs(ctx context.Context, ids id.DatasetIDList) (dataset.List, error) { filter := bson.M{ "id": bson.M{ - "$in": id.DatasetIDsToStrings(ids), + "$in": ids.Strings(), }, } dst := make([]*dataset.Dataset, 0, len(ids)) @@ -61,17 +61,17 @@ func (r *datasetRepo) FindByIDs(ctx context.Context, ids []id.DatasetID) (datase func (r *datasetRepo) FindBySchema(ctx context.Context, schemaID id.DatasetSchemaID, pagination *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) { return r.paginate(ctx, bson.M{ - "schema": id.ID(schemaID).String(), + "schema": schemaID.String(), }, pagination) } func (r *datasetRepo) FindBySchemaAll(ctx context.Context, schemaID id.DatasetSchemaID) (dataset.List, error) { return r.find(ctx, nil, bson.M{ - "schema": id.ID(schemaID).String(), + "schema": schemaID.String(), }) } -func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, fields []id.DatasetSchemaFieldID) (dataset.List, error) { +func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, fields id.DatasetFieldIDList) (dataset.List, error) { if len(fields) == 0 { d, err := r.FindByID(ctx, did) if err != nil { @@ -80,14 +80,14 @@ func (r *datasetRepo) FindGraph(ctx context.Context, did id.DatasetID, fields [] return dataset.List{d}, nil } - fieldsstr := id.DatasetSchemaFieldIDsToStrings(fields) + fieldsstr := fields.Strings() firstField := fieldsstr[0] aggfilter := bson.D{} if r.f.Readable != nil { aggfilter = append(aggfilter, bson.E{Key: "$in", Value: []interface{}{ "$$g.scene", - id.SceneIDsToStrings(r.f.Readable), + r.f.Readable.Strings(), }}) } @@ -275,12 +275,12 @@ func (r *datasetRepo) Remove(ctx context.Context, id id.DatasetID) error { return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } -func (r *datasetRepo) RemoveAll(ctx context.Context, ids []id.DatasetID) error { +func (r *datasetRepo) RemoveAll(ctx context.Context, ids id.DatasetIDList) error { if len(ids) == 0 { return nil } return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ - "id": bson.M{"$in": id.DatasetIDsToStrings(ids)}, + "id": bson.M{"$in": ids.Strings()}, })) } diff --git a/internal/infrastructure/mongo/dataset_schema.go b/internal/infrastructure/mongo/dataset_schema.go index 2ad8d995d..384a2fa91 100644 --- a/internal/infrastructure/mongo/dataset_schema.go +++ b/internal/infrastructure/mongo/dataset_schema.go @@ -45,10 +45,10 @@ func (r *datasetSchemaRepo) FindByID(ctx context.Context, id id.DatasetSchemaID) }) } -func (r *datasetSchemaRepo) FindByIDs(ctx context.Context, ids []id.DatasetSchemaID) (dataset.SchemaList, error) { +func (r *datasetSchemaRepo) FindByIDs(ctx context.Context, ids id.DatasetSchemaIDList) (dataset.SchemaList, error) { filter := bson.M{ "id": bson.M{ - "$in": id.DatasetSchemaIDsToStrings(ids), + "$in": ids.Strings(), }, } dst := make([]*dataset.Schema, 0, len(ids)) @@ -79,7 +79,7 @@ func (r *datasetSchemaRepo) FindBySceneAll(ctx context.Context, sceneID id.Scene func (r *datasetSchemaRepo) FindDynamicByID(ctx context.Context, sid id.DatasetSchemaID) (*dataset.Schema, error) { return r.findOne(ctx, bson.M{ - "id": id.ID(sid).String(), + "id": sid.String(), "dynamic": true, }) } @@ -124,12 +124,12 @@ func (r *datasetSchemaRepo) Remove(ctx context.Context, id id.DatasetSchemaID) e return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } -func (r *datasetSchemaRepo) RemoveAll(ctx context.Context, ids []id.DatasetSchemaID) error { +func (r *datasetSchemaRepo) RemoveAll(ctx context.Context, ids id.DatasetSchemaIDList) error { if len(ids) == 0 { return nil } return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ - "id": bson.M{"$in": id.DatasetSchemaIDsToStrings(ids)}, + "id": bson.M{"$in": ids.Strings()}, })) } diff --git a/internal/infrastructure/mongo/layer.go b/internal/infrastructure/mongo/layer.go index e12834c65..83a8829ed 100644 --- a/internal/infrastructure/mongo/layer.go +++ b/internal/infrastructure/mongo/layer.go @@ -44,10 +44,10 @@ func (r *layerRepo) FindByID(ctx context.Context, id id.LayerID) (layer.Layer, e }) } -func (r *layerRepo) FindByIDs(ctx context.Context, ids []id.LayerID) (layer.List, error) { +func (r *layerRepo) FindByIDs(ctx context.Context, ids id.LayerIDList) (layer.List, error) { filter := bson.M{ "id": bson.M{ - "$in": id.LayerIDsToStrings(ids), + "$in": ids.Strings(), }, } dst := make([]*layer.Layer, 0, len(ids)) @@ -70,10 +70,10 @@ func (r *layerRepo) FindItemByID(ctx context.Context, id id.LayerID) (*layer.Ite }) } -func (r *layerRepo) FindItemByIDs(ctx context.Context, ids []id.LayerID) (layer.ItemList, error) { +func (r *layerRepo) FindItemByIDs(ctx context.Context, ids id.LayerIDList) (layer.ItemList, error) { filter := bson.M{ "id": bson.M{ - "$in": id.LayerIDsToStrings(ids), + "$in": ids.Strings(), }, } dst := make([]*layer.Item, 0, len(ids)) @@ -90,10 +90,10 @@ func (r *layerRepo) FindGroupByID(ctx context.Context, id id.LayerID) (*layer.Gr }) } -func (r *layerRepo) FindGroupByIDs(ctx context.Context, ids []id.LayerID) (layer.GroupList, error) { +func (r *layerRepo) FindGroupByIDs(ctx context.Context, ids id.LayerIDList) (layer.GroupList, error) { filter := bson.M{ "id": bson.M{ - "$in": id.LayerIDsToStrings(ids), + "$in": ids.Strings(), }, } dst := make([]*layer.Group, 0, len(ids)) @@ -111,9 +111,9 @@ func (r *layerRepo) FindGroupBySceneAndLinkedDatasetSchema(ctx context.Context, }) } -func (r *layerRepo) FindParentsByIDs(ctx context.Context, ids []id.LayerID) (layer.GroupList, error) { +func (r *layerRepo) FindParentsByIDs(ctx context.Context, ids id.LayerIDList) (layer.GroupList, error) { return r.findGroups(ctx, nil, bson.M{ - "group.layers": bson.M{"$in": id.LayerIDsToStrings(ids)}, + "group.layers": bson.M{"$in": ids.Strings()}, }) } @@ -210,12 +210,12 @@ func (r *layerRepo) Remove(ctx context.Context, id id.LayerID) error { return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } -func (r *layerRepo) RemoveAll(ctx context.Context, ids []id.LayerID) error { +func (r *layerRepo) RemoveAll(ctx context.Context, ids id.LayerIDList) error { if len(ids) == 0 { return nil } return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ - "id": bson.M{"$in": id.LayerIDsToStrings(ids)}, + "id": bson.M{"$in": ids.Strings()}, })) } diff --git a/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go b/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go index 862da984b..2d516d3c5 100644 --- a/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go +++ b/internal/infrastructure/mongo/migration/201217132559_add_scene_widget_id.go @@ -30,7 +30,7 @@ func AddSceneWidgetId(ctx context.Context, c DBClient) error { widgets := make([]mongodoc.SceneWidgetDocument, 0, len(doc.Widgets)) for _, w := range doc.Widgets { if w.ID == "" { - w.ID = id.New().String() + w.ID = id.NewWidgetID().String() } widgets = append(widgets, w) } diff --git a/internal/infrastructure/mongo/mongodoc/dataset.go b/internal/infrastructure/mongo/mongodoc/dataset.go index 4d8d41fe2..3cef6b719 100644 --- a/internal/infrastructure/mongo/mongodoc/dataset.go +++ b/internal/infrastructure/mongo/mongodoc/dataset.go @@ -122,7 +122,7 @@ func (doc *DatasetDocument) Model() (*dataset.Dataset, error) { } fields := make([]*dataset.Field, 0, len(doc.Fields)) for _, field := range doc.Fields { - fid, err := id.DatasetSchemaFieldIDFrom(field.Field) + fid, err := id.DatasetFieldIDFrom(field.Field) if err != nil { return nil, err } @@ -147,8 +147,8 @@ func NewDataset(dataset *dataset.Dataset) (*DatasetDocument, string) { var doc DatasetDocument doc.ID = did doc.Source = dataset.Source() - doc.Scene = id.ID(dataset.Scene()).String() - doc.Schema = id.ID(dataset.Schema()).String() + doc.Scene = dataset.Scene().String() + doc.Schema = dataset.Schema().String() fields := dataset.Fields() doc.Fields = make([]*DatasetFieldDocument, 0, len(fields)) @@ -167,7 +167,7 @@ func NewDatasets(datasets []*dataset.Dataset, f scene.IDList) ([]interface{}, [] res := make([]interface{}, 0, len(datasets)) ids := make([]string, 0, len(datasets)) for _, d := range datasets { - if d == nil || f != nil && !f.Includes(d.Scene()) { + if d == nil || f != nil && !f.Has(d.Scene()) { continue } r, id := NewDataset(d) diff --git a/internal/infrastructure/mongo/mongodoc/dataset_schema.go b/internal/infrastructure/mongo/mongodoc/dataset_schema.go index 4f4c99b2e..45ef65e04 100644 --- a/internal/infrastructure/mongo/mongodoc/dataset_schema.go +++ b/internal/infrastructure/mongo/mongodoc/dataset_schema.go @@ -58,7 +58,7 @@ func (d *DatasetSchemaDocument) Model() (*dataset.Schema, error) { fields := make([]*dataset.SchemaField, 0, len(d.Fields)) for _, field := range d.Fields { - fid, err := id.DatasetSchemaFieldIDFrom(field.ID) + fid, err := id.DatasetFieldIDFrom(field.ID) if err != nil { return nil, err } @@ -81,7 +81,7 @@ func (d *DatasetSchemaDocument) Model() (*dataset.Schema, error) { Scene(scene). Fields(fields) if d.RepresentativeField != nil { - dsfid, err := id.DatasetSchemaFieldIDFrom(*d.RepresentativeField) + dsfid, err := id.DatasetFieldIDFrom(*d.RepresentativeField) if err != nil { return nil, err } @@ -96,7 +96,7 @@ func NewDatasetSchema(dataset *dataset.Schema) (*DatasetSchemaDocument, string) ID: did, Name: dataset.Name(), Source: dataset.Source(), - Scene: id.ID(dataset.Scene()).String(), + Scene: dataset.Scene().String(), RepresentativeField: dataset.RepresentativeFieldID().StringRef(), Dynamic: dataset.Dynamic(), } @@ -119,7 +119,7 @@ func NewDatasetSchemas(datasetSchemas []*dataset.Schema, f scene.IDList) ([]inte res := make([]interface{}, 0, len(datasetSchemas)) ids := make([]string, 0, len(datasetSchemas)) for _, d := range datasetSchemas { - if d == nil || f != nil && !f.Includes(d.Scene()) { + if d == nil || f != nil && !f.Has(d.Scene()) { continue } r, id := NewDatasetSchema(d) diff --git a/internal/infrastructure/mongo/mongodoc/layer.go b/internal/infrastructure/mongo/mongodoc/layer.go index 0800dd171..3d8686cd6 100644 --- a/internal/infrastructure/mongo/mongodoc/layer.go +++ b/internal/infrastructure/mongo/mongodoc/layer.go @@ -92,7 +92,7 @@ func NewLayer(l layer.Layer) (*LayerDocument, string) { if lg := layer.GroupFromLayer(l); lg != nil { group = &LayerGroupDocument{ - Layers: id.LayerIDsToStrings(lg.Layers().Layers()), + Layers: lg.Layers().Strings(), LinkedDatasetSchema: lg.LinkedDatasetSchema().StringRef(), Root: lg.IsRoot(), } @@ -145,7 +145,7 @@ func NewLayers(layers layer.List, f scene.IDList) ([]interface{}, []string) { continue } d2 := *d - if d2 == nil || f != nil && !f.Includes(d2.Scene()) { + if d2 == nil || f != nil && !f.Has(d2.Scene()) { continue } r, id := NewLayer(d2) diff --git a/internal/infrastructure/mongo/mongodoc/property.go b/internal/infrastructure/mongo/mongodoc/property.go index 5d85b62c5..05c5608a0 100644 --- a/internal/infrastructure/mongo/mongodoc/property.go +++ b/internal/infrastructure/mongo/mongodoc/property.go @@ -185,7 +185,7 @@ func NewProperties(properties []*property.Property, f scene.IDList) ([]interface res := make([]interface{}, 0, len(properties)) ids := make([]string, 0, len(properties)) for _, d := range properties { - if d == nil || f != nil && !f.Includes(d.Scene()) { + if d == nil || f != nil && !f.Has(d.Scene()) { continue } r, id := NewProperty(d) @@ -207,7 +207,7 @@ func toModelPropertyField(f *PropertyFieldDocument) *property.Field { var link *property.Link d := id.DatasetIDFromRef(l.Dataset) ds := id.DatasetSchemaIDFromRef(l.Schema) - df := id.DatasetSchemaFieldIDFromRef(l.Field) + df := id.DatasetFieldIDFromRef(l.Field) if d != nil && ds != nil && df != nil { link = property.NewLink(*d, *ds, *df) } else if ds != nil && df != nil { diff --git a/internal/infrastructure/mongo/mongodoc/property_schema.go b/internal/infrastructure/mongo/mongodoc/property_schema.go index 5aadc26db..f2d0d357f 100644 --- a/internal/infrastructure/mongo/mongodoc/property_schema.go +++ b/internal/infrastructure/mongo/mongodoc/property_schema.go @@ -141,7 +141,7 @@ func NewPropertySchemas(ps []*property.Schema, f scene.IDList) ([]interface{}, [ if d == nil { continue } - if s := d.Scene(); s != nil && f != nil && !f.Includes(*s) { + if s := d.Scene(); s != nil && f != nil && !f.Has(*s) { continue } r, id := NewPropertySchema(d) @@ -169,7 +169,7 @@ func ToModelPropertySchemaField(f *PropertySchemaFieldDocument) (*property.Schem vt := property.ValueType(f.Type) return property.NewSchemaField(). - ID(id.PropertySchemaFieldID(f.ID)). + ID(id.PropertyFieldID(f.ID)). Type(vt). Name(f.Name). Description(f.Description). @@ -228,7 +228,7 @@ func toModelPropertyCondition(d *PropertyConditonDocument) *property.Condition { } return &property.Condition{ - Field: id.PropertySchemaFieldID(d.Field), + Field: id.PropertyFieldID(d.Field), Value: toModelPropertyValue(d.Value, d.Type), } } diff --git a/internal/infrastructure/mongo/mongodoc/scene_align.go b/internal/infrastructure/mongo/mongodoc/scene_align.go index 836ef2527..16677e248 100644 --- a/internal/infrastructure/mongo/mongodoc/scene_align.go +++ b/internal/infrastructure/mongo/mongodoc/scene_align.go @@ -82,7 +82,7 @@ func NewWidgetArea(a *scene.WidgetArea) *WidgetAreaDocument { } return &WidgetAreaDocument{ - WidgetIDs: id.WidgetIDsToStrings(a.WidgetIDs()), + WidgetIDs: a.WidgetIDs().Strings(), Align: string(a.Alignment()), } } diff --git a/internal/infrastructure/mongo/mongodoc/tag.go b/internal/infrastructure/mongo/mongodoc/tag.go index 2e07de4e3..e1f737cb6 100644 --- a/internal/infrastructure/mongo/mongodoc/tag.go +++ b/internal/infrastructure/mongo/mongodoc/tag.go @@ -65,11 +65,8 @@ func NewTag(t tag.Tag) (*TagDocument, string) { var group *TagGroupDocument var item *TagItemDocument if tg := tag.GroupFrom(t); tg != nil { - tags := tg.Tags() - ids := tags.Tags() - group = &TagGroupDocument{ - Tags: id.TagIDsToStrings(ids), + Tags: tg.Tags().Strings(), } } @@ -100,7 +97,7 @@ func NewTags(tags []*tag.Tag, f scene.IDList) ([]interface{}, []string) { continue } d2 := *d - if f != nil && !f.Includes(d2.Scene()) { + if f != nil && !f.Has(d2.Scene()) { continue } r, tid := NewTag(d2) @@ -151,7 +148,7 @@ func (d *TagDocument) ModelItem() (*tag.Item, error) { Parent(id.TagIDFromRef(d.Item.Parent)). LinkedDatasetSchemaID(id.DatasetSchemaIDFromRef(d.Item.LinkedDatasetSchemaID)). LinkedDatasetID(id.DatasetIDFromRef(d.Item.LinkedDatasetID)). - LinkedDatasetFieldID(id.DatasetSchemaFieldIDFromRef(d.Item.LinkedDatasetFieldID)). + LinkedDatasetFieldID(id.DatasetFieldIDFromRef(d.Item.LinkedDatasetFieldID)). Build() } @@ -164,24 +161,21 @@ func (d *TagDocument) ModelGroup() (*tag.Group, error) { if err != nil { return nil, err } + sid, err := id.SceneIDFrom(d.Scene) if err != nil { return nil, err } - ids := make([]id.TagID, 0, len(d.Group.Tags)) - for _, lgid := range d.Group.Tags { - tagId, err := id.TagIDFrom(lgid) - if err != nil { - return nil, err - } - ids = append(ids, tagId) + tags, err := id.TagIDListFrom(d.Group.Tags) + if err != nil { + return nil, err } return tag.NewGroup(). ID(tid). Label(d.Label). Scene(sid). - Tags(tag.IDListFrom(ids)). + Tags(tags). Build() } diff --git a/internal/infrastructure/mongo/mongodoc/tag_test.go b/internal/infrastructure/mongo/mongodoc/tag_test.go index 020b96775..9101c9fe3 100644 --- a/internal/infrastructure/mongo/mongodoc/tag_test.go +++ b/internal/infrastructure/mongo/mongodoc/tag_test.go @@ -14,7 +14,7 @@ func TestNewTag(t *testing.T) { sid := id.NewSceneID() dssid := id.NewDatasetSchemaID() dsid := id.NewDatasetID() - dssfid := id.NewDatasetSchemaFieldID() + dssfid := id.NewDatasetFieldID() ti, _ := tag.NewItem(). NewID(). Label("Item"). @@ -26,7 +26,7 @@ func TestNewTag(t *testing.T) { tg, _ := tag.NewGroup(). NewID(). Label("group"). - Tags(tag.IDListFrom([]id.TagID{ti.ID()})). + Tags(tag.IDList{ti.ID()}). Scene(sid). Build() type args struct { @@ -47,7 +47,7 @@ func TestNewTag(t *testing.T) { want: &TagDocument{ ID: tg.ID().String(), Label: "group", - Scene: sid.ID().String(), + Scene: sid.String(), Item: nil, Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, }, @@ -61,11 +61,11 @@ func TestNewTag(t *testing.T) { want: &TagDocument{ ID: ti.ID().String(), Label: "Item", - Scene: sid.ID().String(), + Scene: sid.String(), Item: &TagItemDocument{ - LinkedDatasetFieldID: dssfid.RefString(), - LinkedDatasetID: dsid.RefString(), - LinkedDatasetSchemaID: dssid.RefString(), + LinkedDatasetFieldID: dssfid.StringRef(), + LinkedDatasetID: dsid.StringRef(), + LinkedDatasetSchemaID: dssid.StringRef(), }, Group: nil, }, @@ -94,7 +94,7 @@ func TestNewTags(t *testing.T) { tg, _ := tag.NewGroup(). NewID(). Label("group"). - Tags(tag.IDListFrom([]id.TagID{ti.ID()})). + Tags(id.TagIDList{ti.ID()}). Scene(sid). Build() tgi := tag.Tag(tg) @@ -121,7 +121,7 @@ func TestNewTags(t *testing.T) { &TagDocument{ ID: tg.ID().String(), Label: "group", - Scene: sid.ID().String(), + Scene: sid.String(), Item: nil, Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, }, @@ -140,7 +140,7 @@ func TestNewTags(t *testing.T) { &TagDocument{ ID: tg.ID().String(), Label: "group", - Scene: sid.ID().String(), + Scene: sid.String(), Item: nil, Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, }, @@ -273,7 +273,7 @@ func TestTagDocument_Model(t *testing.T) { sid := id.NewSceneID() dssid := id.NewDatasetSchemaID() dsid := id.NewDatasetID() - dssfid := id.NewDatasetSchemaFieldID() + dssfid := id.NewDatasetFieldID() ti, _ := tag.NewItem(). NewID(). Label("Item"). @@ -285,7 +285,7 @@ func TestTagDocument_Model(t *testing.T) { tg, _ := tag.NewGroup(). NewID(). Label("group"). - Tags(tag.IDListFrom([]id.TagID{ti.ID()})). + Tags(tag.IDList{ti.ID()}). Scene(sid). Build() type fields struct { @@ -308,11 +308,11 @@ func TestTagDocument_Model(t *testing.T) { fields: fields{ ID: ti.ID().String(), Label: "Item", - Scene: sid.ID().String(), + Scene: sid.String(), Item: &TagItemDocument{ - LinkedDatasetFieldID: dssfid.RefString(), - LinkedDatasetID: dsid.RefString(), - LinkedDatasetSchemaID: dssid.RefString(), + LinkedDatasetFieldID: dssfid.StringRef(), + LinkedDatasetID: dsid.StringRef(), + LinkedDatasetSchemaID: dssid.StringRef(), }, Group: nil, }, @@ -325,7 +325,7 @@ func TestTagDocument_Model(t *testing.T) { fields: fields{ ID: tg.ID().String(), Label: "group", - Scene: sid.ID().String(), + Scene: sid.String(), Item: nil, Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, }, @@ -374,7 +374,7 @@ func TestTagDocument_ModelGroup(t *testing.T) { tg, _ := tag.NewGroup(). NewID(). Label("group"). - Tags(tag.IDListFrom([]id.TagID{ti.ID()})). + Tags(tag.IDList{ti.ID()}). Scene(sid). Build() type fields struct { @@ -426,7 +426,7 @@ func TestTagDocument_ModelGroup(t *testing.T) { fields: fields{ ID: tg.ID().String(), Label: "group", - Scene: sid.ID().String(), + Scene: sid.String(), Item: nil, Group: &TagGroupDocument{Tags: []string{ti.ID().String()}}, }, @@ -459,7 +459,7 @@ func TestTagDocument_ModelItem(t *testing.T) { sid := id.NewSceneID() dssid := id.NewDatasetSchemaID() dsid := id.NewDatasetID() - dssfid := id.NewDatasetSchemaFieldID() + dssfid := id.NewDatasetFieldID() ti, _ := tag.NewItem(). NewID(). Label("Item"). @@ -508,9 +508,9 @@ func TestTagDocument_ModelItem(t *testing.T) { Label: ti.Label(), Scene: ti.Scene().String(), Item: &TagItemDocument{ - LinkedDatasetFieldID: dssfid.RefString(), - LinkedDatasetID: dsid.RefString(), - LinkedDatasetSchemaID: dssid.RefString(), + LinkedDatasetFieldID: dssfid.StringRef(), + LinkedDatasetID: dsid.StringRef(), + LinkedDatasetSchemaID: dssid.StringRef(), }, Group: nil, }, diff --git a/internal/infrastructure/mongo/project.go b/internal/infrastructure/mongo/project.go index 257b088fa..0e64aefed 100644 --- a/internal/infrastructure/mongo/project.go +++ b/internal/infrastructure/mongo/project.go @@ -39,10 +39,10 @@ func (r *projectRepo) Filtered(f repo.TeamFilter) repo.Project { } } -func (r *projectRepo) FindByIDs(ctx context.Context, ids []id.ProjectID) ([]*project.Project, error) { +func (r *projectRepo) FindByIDs(ctx context.Context, ids id.ProjectIDList) ([]*project.Project, error) { filter := bson.M{ "id": bson.M{ - "$in": id.ProjectIDsToStrings(ids), + "$in": ids.Strings(), }, } dst := make([]*project.Project, 0, len(ids)) diff --git a/internal/infrastructure/mongo/property.go b/internal/infrastructure/mongo/property.go index 0c5d81b6a..ce170852a 100644 --- a/internal/infrastructure/mongo/property.go +++ b/internal/infrastructure/mongo/property.go @@ -43,10 +43,10 @@ func (r *propertyRepo) FindByID(ctx context.Context, id id.PropertyID) (*propert }) } -func (r *propertyRepo) FindByIDs(ctx context.Context, ids []id.PropertyID) (property.List, error) { +func (r *propertyRepo) FindByIDs(ctx context.Context, ids id.PropertyIDList) (property.List, error) { filter := bson.M{ "id": bson.M{ - "$in": id.PropertyIDsToStrings(ids), + "$in": ids.Strings(), }, } dst := make(property.List, 0, len(ids)) @@ -148,12 +148,12 @@ func (r *propertyRepo) Remove(ctx context.Context, id id.PropertyID) error { return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } -func (r *propertyRepo) RemoveAll(ctx context.Context, ids []id.PropertyID) error { +func (r *propertyRepo) RemoveAll(ctx context.Context, ids id.PropertyIDList) error { if len(ids) == 0 { return nil } return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ - "id": bson.M{"$in": id.PropertyIDsToStrings(ids)}, + "id": bson.M{"$in": ids.Strings()}, })) } diff --git a/internal/infrastructure/mongo/scene.go b/internal/infrastructure/mongo/scene.go index acbbc6ecd..9702ca42f 100644 --- a/internal/infrastructure/mongo/scene.go +++ b/internal/infrastructure/mongo/scene.go @@ -44,10 +44,10 @@ func (r *sceneRepo) FindByID(ctx context.Context, id id.SceneID) (*scene.Scene, }) } -func (r *sceneRepo) FindByIDs(ctx context.Context, ids []id.SceneID) (scene.List, error) { +func (r *sceneRepo) FindByIDs(ctx context.Context, ids id.SceneIDList) (scene.List, error) { return r.find(ctx, make(scene.List, 0, len(ids)), bson.M{ "id": bson.M{ - "$in": id.SceneIDsToStrings(ids), + "$in": ids.Strings(), }, }) } @@ -59,8 +59,9 @@ func (r *sceneRepo) FindByProject(ctx context.Context, id id.ProjectID) (*scene. } func (r *sceneRepo) FindByTeam(ctx context.Context, teams ...id.TeamID) (scene.List, error) { + teams2 := id.TeamIDList(teams) if r.f.Readable != nil { - teams = user.TeamIDList(teams).Filter(r.f.Readable...) + teams2 = teams2.Intersect(r.f.Readable) } res, err := r.find(ctx, nil, bson.M{ "team": bson.M{"$in": user.TeamIDList(teams).Strings()}, diff --git a/internal/infrastructure/mongo/scene_lock.go b/internal/infrastructure/mongo/scene_lock.go index 8d9f66670..da8cff8a1 100644 --- a/internal/infrastructure/mongo/scene_lock.go +++ b/internal/infrastructure/mongo/scene_lock.go @@ -36,10 +36,10 @@ func (r *sceneLockRepo) GetLock(ctx context.Context, sceneID id.SceneID) (scene. return c.Rows[0], nil } -func (r *sceneLockRepo) GetAllLock(ctx context.Context, ids []id.SceneID) ([]scene.LockMode, error) { +func (r *sceneLockRepo) GetAllLock(ctx context.Context, ids id.SceneIDList) ([]scene.LockMode, error) { filter := bson.D{ {Key: "scene", Value: bson.D{ - {Key: "$in", Value: id.SceneIDsToStrings(ids)}, + {Key: "$in", Value: ids.Strings()}, }}, } c := mongodoc.SceneLockConsumer{ diff --git a/internal/infrastructure/mongo/tag.go b/internal/infrastructure/mongo/tag.go index 001b90cdd..d37a3eed6 100644 --- a/internal/infrastructure/mongo/tag.go +++ b/internal/infrastructure/mongo/tag.go @@ -44,10 +44,10 @@ func (r *tagRepo) FindByID(ctx context.Context, id id.TagID) (tag.Tag, error) { }) } -func (r *tagRepo) FindByIDs(ctx context.Context, ids []id.TagID) ([]*tag.Tag, error) { +func (r *tagRepo) FindByIDs(ctx context.Context, ids id.TagIDList) ([]*tag.Tag, error) { filter := bson.M{ "id": bson.M{ - "$in": id.TagIDsToStrings(ids), + "$in": ids.Strings(), }, } dst := make([]*tag.Tag, 0, len(ids)) @@ -75,10 +75,10 @@ func (r *tagRepo) FindItemByID(ctx context.Context, id id.TagID) (*tag.Item, err return r.findItemOne(ctx, filter) } -func (r *tagRepo) FindItemByIDs(ctx context.Context, ids []id.TagID) ([]*tag.Item, error) { +func (r *tagRepo) FindItemByIDs(ctx context.Context, ids id.TagIDList) ([]*tag.Item, error) { filter := bson.M{ "id": bson.M{ - "$in": id.TagIDsToStrings(ids), + "$in": ids.Strings(), }, } dst := make([]*tag.Item, 0, len(ids)) @@ -96,10 +96,10 @@ func (r *tagRepo) FindGroupByID(ctx context.Context, id id.TagID) (*tag.Group, e return r.findGroupOne(ctx, filter) } -func (r *tagRepo) FindGroupByIDs(ctx context.Context, ids []id.TagID) ([]*tag.Group, error) { +func (r *tagRepo) FindGroupByIDs(ctx context.Context, ids id.TagIDList) ([]*tag.Group, error) { filter := bson.M{ "id": bson.M{ - "$in": id.TagIDsToStrings(ids), + "$in": ids.Strings(), }, } dst := make([]*tag.Group, 0, len(ids)) @@ -143,12 +143,12 @@ func (r *tagRepo) Remove(ctx context.Context, id id.TagID) error { return r.client.RemoveOne(ctx, r.writeFilter(bson.M{"id": id.String()})) } -func (r *tagRepo) RemoveAll(ctx context.Context, ids []id.TagID) error { +func (r *tagRepo) RemoveAll(ctx context.Context, ids id.TagIDList) error { if len(ids) == 0 { return nil } return r.client.RemoveAll(ctx, r.writeFilter(bson.M{ - "id": bson.M{"$in": id.TagIDsToStrings(ids)}, + "id": bson.M{"$in": ids.Strings()}, })) } @@ -231,7 +231,7 @@ func (r *tagRepo) findGroups(ctx context.Context, dst []*tag.Group, filter inter return c.GroupRows, nil } -func filterTags(ids []id.TagID, rows []*tag.Tag) []*tag.Tag { +func filterTags(ids id.TagIDList, rows []*tag.Tag) []*tag.Tag { res := make([]*tag.Tag, 0, len(ids)) for _, tid := range ids { var r2 *tag.Tag @@ -249,7 +249,7 @@ func filterTags(ids []id.TagID, rows []*tag.Tag) []*tag.Tag { return res } -func filterTagItems(ids []id.TagID, rows []*tag.Item) []*tag.Item { +func filterTagItems(ids id.TagIDList, rows []*tag.Item) []*tag.Item { res := make([]*tag.Item, 0, len(ids)) for _, tid := range ids { var r2 *tag.Item @@ -264,7 +264,7 @@ func filterTagItems(ids []id.TagID, rows []*tag.Item) []*tag.Item { return res } -func filterTagGroups(ids []id.TagID, rows []*tag.Group) []*tag.Group { +func filterTagGroups(ids id.TagIDList, rows []*tag.Group) []*tag.Group { res := make([]*tag.Group, 0, len(ids)) for _, tid := range ids { var r2 *tag.Group diff --git a/internal/infrastructure/mongo/team.go b/internal/infrastructure/mongo/team.go index 1b058b8ca..f52ea730c 100644 --- a/internal/infrastructure/mongo/team.go +++ b/internal/infrastructure/mongo/team.go @@ -37,10 +37,10 @@ func (r *teamRepo) FindByUser(ctx context.Context, id id.UserID) (user.TeamList, }) } -func (r *teamRepo) FindByIDs(ctx context.Context, ids []id.TeamID) (user.TeamList, error) { +func (r *teamRepo) FindByIDs(ctx context.Context, ids id.TeamIDList) (user.TeamList, error) { dst := make([]*user.Team, 0, len(ids)) res, err := r.find(ctx, dst, bson.M{ - "id": bson.M{"$in": id.TeamIDsToStrings(ids)}, + "id": bson.M{"$in": ids.Strings()}, }) if err != nil { return nil, err @@ -73,12 +73,12 @@ func (r *teamRepo) Remove(ctx context.Context, id id.TeamID) error { return r.client.RemoveOne(ctx, bson.M{"id": id.String()}) } -func (r *teamRepo) RemoveAll(ctx context.Context, ids []id.TeamID) error { +func (r *teamRepo) RemoveAll(ctx context.Context, ids id.TeamIDList) error { if len(ids) == 0 { return nil } return r.client.RemoveAll(ctx, bson.M{ - "id": bson.M{"$in": id.TeamIDsToStrings(ids)}, + "id": bson.M{"$in": ids.Strings()}, }) } diff --git a/internal/infrastructure/mongo/user.go b/internal/infrastructure/mongo/user.go index 5cec81f16..4c53b037d 100644 --- a/internal/infrastructure/mongo/user.go +++ b/internal/infrastructure/mongo/user.go @@ -29,10 +29,10 @@ func (r *userRepo) init() { } } -func (r *userRepo) FindByIDs(ctx context.Context, ids []id.UserID) ([]*user.User, error) { +func (r *userRepo) FindByIDs(ctx context.Context, ids id.UserIDList) ([]*user.User, error) { dst := make([]*user.User, 0, len(ids)) res, err := r.find(ctx, dst, bson.M{ - "id": bson.M{"$in": id.UserIDsToStrings(ids)}, + "id": bson.M{"$in": ids.Strings()}, }) if err != nil { return nil, err diff --git a/internal/usecase/interactor/dataset.go b/internal/usecase/interactor/dataset.go index cb5c3106e..9ec5d54cd 100644 --- a/internal/usecase/interactor/dataset.go +++ b/internal/usecase/interactor/dataset.go @@ -360,7 +360,7 @@ func (i *Dataset) GraphFetch(ctx context.Context, id id.DatasetID, depth int, op } res = append(res, d) next, done = it.Next(d) - if next.ID().IsNil() { + if next.IsNil() { return nil, rerror.ErrInternalBy(errors.New("next id is nil")) } if done { @@ -390,7 +390,7 @@ func (i *Dataset) GraphFetchSchema(ctx context.Context, id id.DatasetSchemaID, d } res = append(res, d) next, done = it.Next(d) - if next.ID().IsNil() { + if next.IsNil() { return nil, rerror.ErrInternalBy(errors.New("next id is nil")) } if done { @@ -480,7 +480,7 @@ func (i *Dataset) Sync(ctx context.Context, sceneID id.SceneID, url string, oper if err := i.layerRepo.SaveAll(ctx, result.Layers.List()); err != nil { return nil, nil, err } - if err := i.layerRepo.RemoveAll(ctx, result.RemovedLayers.All()); err != nil { + if err := i.layerRepo.RemoveAll(ctx, result.RemovedLayers.List()); err != nil { return nil, nil, err } if err := i.datasetRepo.RemoveAll(ctx, result.RemovedDatasets); err != nil { diff --git a/internal/usecase/interactor/layer.go b/internal/usecase/interactor/layer.go index 33f34193f..026584a8e 100644 --- a/internal/usecase/interactor/layer.go +++ b/internal/usecase/interactor/layer.go @@ -345,7 +345,7 @@ func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, } // create item layers - var representativeFieldID *id.DatasetSchemaFieldID + var representativeFieldID *id.DatasetFieldID if inp.RepresentativeFieldId != nil { representativeFieldID = inp.RepresentativeFieldId } else { diff --git a/internal/usecase/interactor/tag.go b/internal/usecase/interactor/tag.go index 21b54e67b..f8b6a8db9 100644 --- a/internal/usecase/interactor/tag.go +++ b/internal/usecase/interactor/tag.go @@ -102,12 +102,11 @@ func (i *Tag) CreateGroup(ctx context.Context, inp interfaces.CreateTagGroupPara return nil, interfaces.ErrOperationDenied } - list := tag.IDListFrom(inp.Tags) group, err := tag.NewGroup(). NewID(). Label(inp.Label). Scene(inp.SceneID). - Tags(list). + Tags(inp.Tags). Build() if err != nil { @@ -212,7 +211,7 @@ func (i *Tag) DetachItemFromGroup(ctx context.Context, inp interfaces.DetachItem return nil, errors.New("tag item is not attached to the group") } - tg.Tags().Remove(inp.ItemID) + tg.RemoveTag(inp.ItemID) ti.SetParent(nil) tgt := tag.Tag(tg) @@ -276,7 +275,7 @@ func (i *Tag) Remove(ctx context.Context, tagID id.TagID, operator *usecase.Oper } if group := tag.ToTagGroup(t); group != nil { - if len(group.Tags().Tags()) != 0 { + if len(group.Tags()) != 0 { return nil, nil, interfaces.ErrNonemptyTagGroupCannotDelete } } @@ -287,7 +286,7 @@ func (i *Tag) Remove(ctx context.Context, tagID id.TagID, operator *usecase.Oper return nil, nil, err } if g != nil { - g.Tags().Remove(item.ID()) + g.RemoveTag(item.ID()) if err := i.tagRepo.Save(ctx, g); err != nil { return nil, nil, err } diff --git a/internal/usecase/interactor/team.go b/internal/usecase/interactor/team.go index 0f012b3d1..d0030521d 100644 --- a/internal/usecase/interactor/team.go +++ b/internal/usecase/interactor/team.go @@ -185,7 +185,7 @@ func (i *Team) RemoveMember(ctx context.Context, id id.TeamID, u id.UserID, oper return nil, interfaces.ErrOperationDenied } - if u.ID() == operator.User.ID() { + if u == operator.User { return nil, interfaces.ErrOwnerCannotLeaveTheTeam } @@ -229,7 +229,7 @@ func (i *Team) UpdateMember(ctx context.Context, id id.TeamID, u id.UserID, role return nil, interfaces.ErrOperationDenied } - if u.ID() == operator.User.ID() { + if u == operator.User { return nil, interfaces.ErrCannotChangeOwnerRole } diff --git a/internal/usecase/interfaces/dataset.go b/internal/usecase/interfaces/dataset.go index 15e9ab6e1..4328d2f7a 100644 --- a/internal/usecase/interfaces/dataset.go +++ b/internal/usecase/interfaces/dataset.go @@ -13,7 +13,7 @@ import ( type AddDatasetSchemaParam struct { SceneId id.SceneID Name string - RepresentativeField *id.DatasetSchemaFieldID + RepresentativeField *id.DatasetFieldID } type AddDynamicDatasetSchemaParam struct { diff --git a/internal/usecase/interfaces/layer.go b/internal/usecase/interfaces/layer.go index e9cfa5a1e..f43779c4a 100644 --- a/internal/usecase/interfaces/layer.go +++ b/internal/usecase/interfaces/layer.go @@ -29,7 +29,7 @@ type AddLayerGroupInput struct { ExtensionID *id.PluginExtensionID Index *int LinkedDatasetSchemaID *id.DatasetSchemaID - RepresentativeFieldId *id.DatasetSchemaFieldID + RepresentativeFieldId *id.DatasetFieldID Name string } diff --git a/internal/usecase/interfaces/tag.go b/internal/usecase/interfaces/tag.go index bc18e7e7c..cad800eec 100644 --- a/internal/usecase/interfaces/tag.go +++ b/internal/usecase/interfaces/tag.go @@ -20,7 +20,7 @@ type CreateTagItemParam struct { Parent *id.TagID LinkedDatasetSchemaID *id.DatasetSchemaID LinkedDatasetID *id.DatasetID - LinkedDatasetField *id.DatasetSchemaFieldID + LinkedDatasetField *id.DatasetFieldID } type CreateTagGroupParam struct { diff --git a/internal/usecase/operator.go b/internal/usecase/operator.go index 28eddf88a..5da9a52a4 100644 --- a/internal/usecase/operator.go +++ b/internal/usecase/operator.go @@ -16,7 +16,7 @@ type Operator struct { OwningScenes scene.IDList } -func (o *Operator) Teams(r user.Role) []id.TeamID { +func (o *Operator) Teams(r user.Role) user.TeamIDList { if o == nil { return nil } @@ -45,15 +45,15 @@ func (o *Operator) AllOwningTeams() user.TeamIDList { } func (o *Operator) IsReadableTeam(team ...id.TeamID) bool { - return o.AllReadableTeams().Filter(team...).Len() > 0 + return o.AllReadableTeams().Intersect(team).Len() > 0 } func (o *Operator) IsWritableTeam(team ...id.TeamID) bool { - return o.AllWritableTeams().Filter(team...).Len() > 0 + return o.AllWritableTeams().Intersect(team).Len() > 0 } func (o *Operator) IsOwningTeam(team ...id.TeamID) bool { - return o.AllOwningTeams().Filter(team...).Len() > 0 + return o.AllOwningTeams().Intersect(team).Len() > 0 } func (o *Operator) AllReadableScenes() scene.IDList { @@ -69,15 +69,15 @@ func (o *Operator) AllOwningScenes() scene.IDList { } func (o *Operator) IsReadableScene(scene ...id.SceneID) bool { - return o.AllReadableScenes().Includes(scene...) + return o.AllReadableScenes().Has(scene...) } func (o *Operator) IsWritableScene(scene ...id.SceneID) bool { - return o.AllWritableScenes().Includes(scene...) + return o.AllWritableScenes().Has(scene...) } func (o *Operator) IsOwningScene(scene ...id.SceneID) bool { - return o.AllOwningScenes().Includes(scene...) + return o.AllOwningScenes().Has(scene...) } func (o *Operator) AddNewTeam(team id.TeamID) { diff --git a/internal/usecase/repo/asset.go b/internal/usecase/repo/asset.go index 5010b5101..a01e5016d 100644 --- a/internal/usecase/repo/asset.go +++ b/internal/usecase/repo/asset.go @@ -18,7 +18,7 @@ type Asset interface { Filtered(TeamFilter) Asset FindByTeam(context.Context, id.TeamID, AssetFilter) ([]*asset.Asset, *usecase.PageInfo, error) FindByID(context.Context, id.AssetID) (*asset.Asset, error) - FindByIDs(context.Context, []id.AssetID) ([]*asset.Asset, error) + FindByIDs(context.Context, id.AssetIDList) ([]*asset.Asset, error) Save(context.Context, *asset.Asset) error Remove(context.Context, id.AssetID) error } diff --git a/internal/usecase/repo/container.go b/internal/usecase/repo/container.go index fff4b4889..6f991c05d 100644 --- a/internal/usecase/repo/container.go +++ b/internal/usecase/repo/container.go @@ -96,11 +96,11 @@ func (f TeamFilter) Merge(g TeamFilter) TeamFilter { } func (f TeamFilter) CanRead(id user.TeamID) bool { - return f.Readable == nil || f.Readable.Includes(id) + return f.Readable == nil || f.Readable.Has(id) } func (f TeamFilter) CanWrite(id user.TeamID) bool { - return f.Writable == nil || f.Writable.Includes(id) + return f.Writable == nil || f.Writable.Has(id) } type SceneFilter struct { @@ -145,9 +145,9 @@ func (f SceneFilter) Clone() SceneFilter { } func (f SceneFilter) CanRead(id scene.ID) bool { - return f.Readable == nil || f.Readable.Includes(id) + return f.Readable == nil || f.Readable.Has(id) } func (f SceneFilter) CanWrite(id scene.ID) bool { - return f.Writable == nil || f.Writable.Includes(id) + return f.Writable == nil || f.Writable.Has(id) } diff --git a/internal/usecase/repo/dataset.go b/internal/usecase/repo/dataset.go index 8a5a1f09b..572234f75 100644 --- a/internal/usecase/repo/dataset.go +++ b/internal/usecase/repo/dataset.go @@ -11,14 +11,14 @@ import ( type Dataset interface { Filtered(SceneFilter) Dataset FindByID(context.Context, id.DatasetID) (*dataset.Dataset, error) - FindByIDs(context.Context, []id.DatasetID) (dataset.List, error) + FindByIDs(context.Context, id.DatasetIDList) (dataset.List, error) FindBySchema(context.Context, id.DatasetSchemaID, *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) FindBySchemaAll(context.Context, id.DatasetSchemaID) (dataset.List, error) - FindGraph(context.Context, id.DatasetID, []id.DatasetSchemaFieldID) (dataset.List, error) + FindGraph(context.Context, id.DatasetID, id.DatasetFieldIDList) (dataset.List, error) Save(context.Context, *dataset.Dataset) error SaveAll(context.Context, dataset.List) error Remove(context.Context, id.DatasetID) error - RemoveAll(context.Context, []id.DatasetID) error + RemoveAll(context.Context, id.DatasetIDList) error RemoveByScene(context.Context, id.SceneID) error } @@ -29,7 +29,7 @@ func DatasetLoaderFrom(r Dataset) dataset.Loader { } func DatasetGraphLoaderFrom(r Dataset) dataset.GraphLoader { - return func(ctx context.Context, root id.DatasetID, fields ...id.DatasetSchemaFieldID) (dataset.List, *dataset.Field, error) { + return func(ctx context.Context, root id.DatasetID, fields ...id.DatasetFieldID) (dataset.List, *dataset.Field, error) { if len(fields) <= 1 { d, err := r.FindByID(ctx, root) if err != nil { diff --git a/internal/usecase/repo/dataset_schema.go b/internal/usecase/repo/dataset_schema.go index 50cc2b06d..fe99bfb64 100644 --- a/internal/usecase/repo/dataset_schema.go +++ b/internal/usecase/repo/dataset_schema.go @@ -11,7 +11,7 @@ import ( type DatasetSchema interface { Filtered(SceneFilter) DatasetSchema FindByID(context.Context, id.DatasetSchemaID) (*dataset.Schema, error) - FindByIDs(context.Context, []id.DatasetSchemaID) (dataset.SchemaList, error) + FindByIDs(context.Context, id.DatasetSchemaIDList) (dataset.SchemaList, error) FindByScene(context.Context, id.SceneID, *usecase.Pagination) (dataset.SchemaList, *usecase.PageInfo, error) FindBySceneAll(context.Context, id.SceneID) (dataset.SchemaList, error) FindBySceneAndSource(context.Context, id.SceneID, string) (dataset.SchemaList, error) @@ -20,6 +20,6 @@ type DatasetSchema interface { Save(context.Context, *dataset.Schema) error SaveAll(context.Context, dataset.SchemaList) error Remove(context.Context, id.DatasetSchemaID) error - RemoveAll(context.Context, []id.DatasetSchemaID) error + RemoveAll(context.Context, id.DatasetSchemaIDList) error RemoveByScene(context.Context, id.SceneID) error } diff --git a/internal/usecase/repo/layer.go b/internal/usecase/repo/layer.go index 28d5edad3..46870661f 100644 --- a/internal/usecase/repo/layer.go +++ b/internal/usecase/repo/layer.go @@ -10,15 +10,15 @@ import ( type Layer interface { Filtered(SceneFilter) Layer FindByID(context.Context, id.LayerID) (layer.Layer, error) - FindByIDs(context.Context, []id.LayerID) (layer.List, error) + FindByIDs(context.Context, id.LayerIDList) (layer.List, error) FindItemByID(context.Context, id.LayerID) (*layer.Item, error) - FindItemByIDs(context.Context, []id.LayerID) (layer.ItemList, error) + FindItemByIDs(context.Context, id.LayerIDList) (layer.ItemList, error) FindAllByDatasetSchema(context.Context, id.DatasetSchemaID) (layer.List, error) FindGroupByID(context.Context, id.LayerID) (*layer.Group, error) - FindGroupByIDs(context.Context, []id.LayerID) (layer.GroupList, error) + FindGroupByIDs(context.Context, id.LayerIDList) (layer.GroupList, error) FindGroupBySceneAndLinkedDatasetSchema(context.Context, id.SceneID, id.DatasetSchemaID) (layer.GroupList, error) FindParentByID(context.Context, id.LayerID) (*layer.Group, error) - FindParentsByIDs(context.Context, []id.LayerID) (layer.GroupList, error) + FindParentsByIDs(context.Context, id.LayerIDList) (layer.GroupList, error) FindByPluginAndExtension(context.Context, id.PluginID, *id.PluginExtensionID) (layer.List, error) FindByPluginAndExtensionOfBlocks(context.Context, id.PluginID, *id.PluginExtensionID) (layer.List, error) FindByProperty(context.Context, id.PropertyID) (layer.Layer, error) @@ -28,7 +28,7 @@ type Layer interface { SaveAll(context.Context, layer.List) error UpdatePlugin(context.Context, id.PluginID, id.PluginID) error Remove(context.Context, id.LayerID) error - RemoveAll(context.Context, []id.LayerID) error + RemoveAll(context.Context, id.LayerIDList) error RemoveByScene(context.Context, id.SceneID) error } diff --git a/internal/usecase/repo/project.go b/internal/usecase/repo/project.go index 1c47719d4..f65a1b6d8 100644 --- a/internal/usecase/repo/project.go +++ b/internal/usecase/repo/project.go @@ -10,7 +10,7 @@ import ( type Project interface { Filtered(TeamFilter) Project - FindByIDs(context.Context, []id.ProjectID) ([]*project.Project, error) + FindByIDs(context.Context, id.ProjectIDList) ([]*project.Project, error) FindByID(context.Context, id.ProjectID) (*project.Project, error) FindByTeam(context.Context, id.TeamID, *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) FindByPublicName(context.Context, string) (*project.Project, error) diff --git a/internal/usecase/repo/property.go b/internal/usecase/repo/property.go index 9c2464dc7..c614db033 100644 --- a/internal/usecase/repo/property.go +++ b/internal/usecase/repo/property.go @@ -10,7 +10,7 @@ import ( type Property interface { Filtered(SceneFilter) Property FindByID(context.Context, id.PropertyID) (*property.Property, error) - FindByIDs(context.Context, []id.PropertyID) (property.List, error) + FindByIDs(context.Context, id.PropertyIDList) (property.List, error) FindLinkedAll(context.Context, id.SceneID) (property.List, error) FindByDataset(context.Context, id.DatasetSchemaID, id.DatasetID) (property.List, error) FindBySchema(context.Context, []id.PropertySchemaID, id.SceneID) (property.List, error) @@ -19,7 +19,7 @@ type Property interface { SaveAll(context.Context, property.List) error UpdateSchemaPlugin(context.Context, id.PluginID, id.PluginID, id.SceneID) error Remove(context.Context, id.PropertyID) error - RemoveAll(context.Context, []id.PropertyID) error + RemoveAll(context.Context, id.PropertyIDList) error RemoveByScene(context.Context, id.SceneID) error } diff --git a/internal/usecase/repo/scene.go b/internal/usecase/repo/scene.go index 9d0b1f137..236f27708 100644 --- a/internal/usecase/repo/scene.go +++ b/internal/usecase/repo/scene.go @@ -10,7 +10,7 @@ import ( type Scene interface { Filtered(TeamFilter) Scene FindByID(context.Context, id.SceneID) (*scene.Scene, error) - FindByIDs(context.Context, []id.SceneID) (scene.List, error) + FindByIDs(context.Context, id.SceneIDList) (scene.List, error) FindByTeam(context.Context, ...id.TeamID) (scene.List, error) FindByProject(context.Context, id.ProjectID) (*scene.Scene, error) Save(context.Context, *scene.Scene) error diff --git a/internal/usecase/repo/scene_lock.go b/internal/usecase/repo/scene_lock.go index d0358adef..b8397a68b 100644 --- a/internal/usecase/repo/scene_lock.go +++ b/internal/usecase/repo/scene_lock.go @@ -9,7 +9,7 @@ import ( type SceneLock interface { GetLock(context.Context, id.SceneID) (scene.LockMode, error) - GetAllLock(context.Context, []id.SceneID) ([]scene.LockMode, error) + GetAllLock(context.Context, id.SceneIDList) ([]scene.LockMode, error) SaveLock(context.Context, id.SceneID, scene.LockMode) error ReleaseAllLock(context.Context) error } diff --git a/internal/usecase/repo/tag.go b/internal/usecase/repo/tag.go index 86b9f886e..343a283ce 100644 --- a/internal/usecase/repo/tag.go +++ b/internal/usecase/repo/tag.go @@ -10,18 +10,18 @@ import ( type Tag interface { Filtered(SceneFilter) Tag FindByID(context.Context, id.TagID) (tag.Tag, error) - FindByIDs(context.Context, []id.TagID) ([]*tag.Tag, error) + FindByIDs(context.Context, id.TagIDList) ([]*tag.Tag, error) FindByScene(context.Context, id.SceneID) ([]*tag.Tag, error) FindItemByID(context.Context, id.TagID) (*tag.Item, error) - FindItemByIDs(context.Context, []id.TagID) ([]*tag.Item, error) + FindItemByIDs(context.Context, id.TagIDList) ([]*tag.Item, error) FindGroupByID(context.Context, id.TagID) (*tag.Group, error) - FindGroupByIDs(context.Context, []id.TagID) ([]*tag.Group, error) + FindGroupByIDs(context.Context, id.TagIDList) ([]*tag.Group, error) FindRootsByScene(context.Context, id.SceneID) ([]*tag.Tag, error) FindGroupByItem(context.Context, id.TagID) (*tag.Group, error) Save(context.Context, tag.Tag) error SaveAll(context.Context, []*tag.Tag) error Remove(context.Context, id.TagID) error - RemoveAll(context.Context, []id.TagID) error + RemoveAll(context.Context, id.TagIDList) error RemoveByScene(context.Context, id.SceneID) error } diff --git a/internal/usecase/repo/team.go b/internal/usecase/repo/team.go index cc8c251c6..8ac266224 100644 --- a/internal/usecase/repo/team.go +++ b/internal/usecase/repo/team.go @@ -9,10 +9,10 @@ import ( type Team interface { FindByUser(context.Context, id.UserID) (user.TeamList, error) - FindByIDs(context.Context, []id.TeamID) (user.TeamList, error) + FindByIDs(context.Context, id.TeamIDList) (user.TeamList, error) FindByID(context.Context, id.TeamID) (*user.Team, error) Save(context.Context, *user.Team) error SaveAll(context.Context, []*user.Team) error Remove(context.Context, id.TeamID) error - RemoveAll(context.Context, []id.TeamID) error + RemoveAll(context.Context, id.TeamIDList) error } diff --git a/internal/usecase/repo/user.go b/internal/usecase/repo/user.go index 8fc7abec6..9ebb545be 100644 --- a/internal/usecase/repo/user.go +++ b/internal/usecase/repo/user.go @@ -8,7 +8,7 @@ import ( ) type User interface { - FindByIDs(context.Context, []id.UserID) ([]*user.User, error) + FindByIDs(context.Context, id.UserIDList) ([]*user.User, error) FindByID(context.Context, id.UserID) (*user.User, error) FindByAuth0Sub(context.Context, string) (*user.User, error) FindByEmail(context.Context, string) (*user.User, error) diff --git a/pkg/asset/asset.go b/pkg/asset/asset.go index ad417d0da..2f04f913a 100644 --- a/pkg/asset/asset.go +++ b/pkg/asset/asset.go @@ -49,5 +49,5 @@ func (a *Asset) CreatedAt() time.Time { if a == nil { return time.Time{} } - return createdAt(a.id) + return a.id.Timestamp() } diff --git a/pkg/asset/asset_test.go b/pkg/asset/asset_test.go index f0f6e1b28..3863f4c95 100644 --- a/pkg/asset/asset_test.go +++ b/pkg/asset/asset_test.go @@ -10,7 +10,7 @@ import ( func TestAsset(t *testing.T) { aid := NewID() tid := NewTeamID() - d := createdAt(aid) + d := aid.Timestamp() tests := []struct { Name string diff --git a/pkg/asset/id.go b/pkg/asset/id.go index 7a2599923..8fb4f56cc 100644 --- a/pkg/asset/id.go +++ b/pkg/asset/id.go @@ -1,8 +1,6 @@ package asset import ( - "time" - "github.com/reearth/reearth-backend/pkg/id" ) @@ -21,11 +19,4 @@ var TeamIDFrom = id.TeamIDFrom var IDFromRef = id.AssetIDFromRef var TeamIDFromRef = id.TeamIDFromRef -var IDFromRefID = id.AssetIDFromRefID -var TeamIDFromRefID = id.TeamIDFromRefID - var ErrInvalidID = id.ErrInvalidID - -func createdAt(i ID) time.Time { - return id.ID(i).Timestamp() -} diff --git a/pkg/auth/builder.go b/pkg/auth/builder.go index 9eed6e642..a2de4e7e3 100644 --- a/pkg/auth/builder.go +++ b/pkg/auth/builder.go @@ -16,7 +16,7 @@ func NewRequest() *RequestBuilder { } func (b *RequestBuilder) Build() (*Request, error) { - if id.ID(b.r.id).IsNil() { + if b.r.id.IsNil() { return nil, id.ErrInvalidID } b.r.createdAt = time.Now() @@ -37,7 +37,7 @@ func (b *RequestBuilder) ID(id id.AuthRequestID) *RequestBuilder { } func (b *RequestBuilder) NewID() *RequestBuilder { - b.r.id = id.AuthRequestID(id.New()) + b.r.id = id.NewAuthRequestID() return b } diff --git a/pkg/dataset/graph_iterator_test.go b/pkg/dataset/graph_iterator_test.go index 04837c039..65d880fba 100644 --- a/pkg/dataset/graph_iterator_test.go +++ b/pkg/dataset/graph_iterator_test.go @@ -18,18 +18,18 @@ func TestDatasetGraphIterator(t *testing.T) { d32id := NewID() d0, _ := New().ID(d0id).Schema(dsid).Scene(sid).Fields([]*Field{ - NewField(NewFieldID(), ValueTypeRef.ValueFrom(d11id.ID()), ""), - NewField(NewFieldID(), ValueTypeRef.ValueFrom(d12id.ID()), ""), + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d11id), ""), + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d12id), ""), }).Build() d11, _ := New().ID(d11id).Schema(dsid).Scene(sid).Fields([]*Field{ - NewField(NewFieldID(), ValueTypeRef.ValueFrom(d21id.ID()), ""), + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d21id), ""), }).Build() d12, _ := New().ID(d12id).Schema(dsid).Scene(sid).Fields([]*Field{ NewField(NewFieldID(), ValueTypeString.ValueFrom("hoge"), ""), }).Build() d21, _ := New().ID(d21id).Schema(dsid).Scene(sid).Fields([]*Field{ - NewField(NewFieldID(), ValueTypeRef.ValueFrom(d31id.ID()), ""), - NewField(NewFieldID(), ValueTypeRef.ValueFrom(d32id.ID()), ""), + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d31id), ""), + NewField(NewFieldID(), ValueTypeRef.ValueFrom(d32id), ""), }).Build() d31, _ := New().ID(d31id).Schema(dsid).Scene(sid).Fields([]*Field{ NewField(NewFieldID(), ValueTypeString.ValueFrom("foo"), ""), diff --git a/pkg/dataset/id.go b/pkg/dataset/id.go index 701ffb114..47e93d428 100644 --- a/pkg/dataset/id.go +++ b/pkg/dataset/id.go @@ -3,43 +3,38 @@ package dataset import "github.com/reearth/reearth-backend/pkg/id" type ID = id.DatasetID -type FieldID = id.DatasetSchemaFieldID +type FieldID = id.DatasetFieldID type SchemaID = id.DatasetSchemaID type SceneID = id.SceneID var NewID = id.NewDatasetID var NewSchemaID = id.NewDatasetSchemaID -var NewFieldID = id.NewDatasetSchemaFieldID +var NewFieldID = id.NewDatasetFieldID var NewSceneID = id.NewSceneID var MustID = id.MustDatasetID var MustSchemaID = id.MustDatasetSchemaID -var MustFieldID = id.MustDatasetSchemaFieldID +var MustFieldID = id.MustDatasetFieldID var MustSceneID = id.MustSceneID var IDFrom = id.DatasetIDFrom var SchemaIDFrom = id.DatasetSchemaIDFrom -var FieldIDFrom = id.DatasetSchemaFieldIDFrom +var FieldIDFrom = id.DatasetFieldIDFrom var SceneIDFrom = id.SceneIDFrom var IDFromRef = id.DatasetIDFromRef var SchemaIDFromRef = id.DatasetSchemaIDFromRef -var FieldIDFromRef = id.DatasetSchemaFieldIDFromRef +var FieldIDFromRef = id.DatasetFieldIDFromRef var SceneIDFromRef = id.SceneIDFromRef -var IDFromRefID = id.DatasetIDFromRefID -var SchemaIDFromRefID = id.DatasetSchemaIDFromRefID -var FieldIDFromRefID = id.DatasetSchemaFieldIDFromRefID -var SceneIDFromRefID = id.SceneIDFromRefID - type IDSet = id.DatasetIDSet type SchemaIDSet = id.DatasetSchemaIDSet -type FieldIDSet = id.DatasetSchemaFieldIDSet +type FieldIDSet = id.DatasetFieldIDSet type SceneIDSet = id.SceneIDSet var NewIDSet = id.NewDatasetIDSet var NewSchemaIDset = id.NewDatasetSchemaIDSet -var NewFieldIDset = id.NewDatasetSchemaFieldIDSet +var NewFieldIDset = id.NewDatasetFieldIDSet var NewSceneIDset = id.NewSceneIDSet var ErrInvalidID = id.ErrInvalidID diff --git a/pkg/dataset/list_test.go b/pkg/dataset/list_test.go index f98cd3c22..dde511008 100644 --- a/pkg/dataset/list_test.go +++ b/pkg/dataset/list_test.go @@ -50,8 +50,8 @@ func TestDatasetMapGraphSearchByFields(t *testing.T) { fid2 := NewFieldID() fid3 := NewFieldID() sid := NewSceneID() - v1 := ValueTypeRef.ValueFrom(did2.ID()) - v2 := ValueTypeRef.ValueFrom(did3.ID()) + v1 := ValueTypeRef.ValueFrom(did2) + v2 := ValueTypeRef.ValueFrom(did3) v3 := ValueTypeString.ValueFrom("value") f3 := NewField(fid3, v3, "") d1, _ := New().ID(did1).Scene(sid).Fields([]*Field{ diff --git a/pkg/id/asset_gen.go b/pkg/id/asset_gen.go deleted file mode 100644 index ac082c89e..000000000 --- a/pkg/id/asset_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// AssetID is an ID for Asset. -type AssetID ID - -// NewAssetID generates a new AssetId. -func NewAssetID() AssetID { - return AssetID(New()) -} - -// AssetIDFrom generates a new AssetID from a string. -func AssetIDFrom(i string) (nid AssetID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = AssetID(did) - return -} - -// MustAssetID generates a new AssetID from a string, but panics if the string cannot be parsed. -func MustAssetID(i string) AssetID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return AssetID(did) -} - -// AssetIDFromRef generates a new AssetID from a string ref. -func AssetIDFromRef(i *string) *AssetID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := AssetID(*did) - return &nid -} - -// AssetIDFromRefID generates a new AssetID from a ref of a generic ID. -func AssetIDFromRefID(i *ID) *AssetID { - if i == nil || i.IsNil() { - return nil - } - nid := AssetID(*i) - return &nid -} - -// ID returns a domain ID. -func (d AssetID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d AssetID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d AssetID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d AssetID) GoString() string { - return "AssetID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d AssetID) Ref() *AssetID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d AssetID) Contains(ids []AssetID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *AssetID) CopyRef() *AssetID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *AssetID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *AssetID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *AssetID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *AssetID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = AssetIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *AssetID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *AssetID) UnmarshalText(text []byte) (err error) { - *d, err = AssetIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d AssetID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *AssetID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// AssetIDsToStrings converts IDs into a string slice. -func AssetIDsToStrings(ids []AssetID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// AssetIDsFrom converts a string slice into a ID slice. -func AssetIDsFrom(ids []string) ([]AssetID, error) { - dids := make([]AssetID, 0, len(ids)) - for _, i := range ids { - did, err := AssetIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// AssetIDsFromID converts a generic ID slice into a ID slice. -func AssetIDsFromID(ids []ID) []AssetID { - dids := make([]AssetID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, AssetID(i)) - } - return dids -} - -// AssetIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func AssetIDsFromIDRef(ids []*ID) []AssetID { - dids := make([]AssetID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, AssetID(*i)) - } - } - return dids -} - -// AssetIDsToID converts a ID slice into a generic ID slice. -func AssetIDsToID(ids []AssetID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// AssetIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func AssetIDsToIDRef(ids []*AssetID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// AssetIDSet represents a set of AssetIDs -type AssetIDSet struct { - m map[AssetID]struct{} - s []AssetID -} - -// NewAssetIDSet creates a new AssetIDSet -func NewAssetIDSet() *AssetIDSet { - return &AssetIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *AssetIDSet) Add(p ...AssetID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[AssetID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []AssetID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *AssetIDSet) AddRef(p *AssetID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *AssetIDSet) Has(p AssetID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *AssetIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *AssetIDSet) All() []AssetID { - if s == nil { - return nil - } - return append([]AssetID{}, s.s...) -} - -// Clone returns a cloned set -func (s *AssetIDSet) Clone() *AssetIDSet { - if s == nil { - return NewAssetIDSet() - } - s2 := NewAssetIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *AssetIDSet) Merge(s2 *AssetIDSet) *AssetIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/asset_gen_test.go b/pkg/id/asset_gen_test.go deleted file mode 100644 index 11101dddc..000000000 --- a/pkg/id/asset_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewAssetID(t *testing.T) { - id := NewAssetID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestAssetIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result AssetID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result AssetID - err error - }{ - result: AssetID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result AssetID - err error - }{ - result: AssetID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result AssetID - err error - }{ - result: AssetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := AssetIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustAssetID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected AssetID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: AssetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustAssetID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestAssetIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *AssetID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &AssetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := AssetIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestAssetIDFromRefID(t *testing.T) { - id := New() - id2 := AssetIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, AssetIDFromRefID(nil)) - assert.Nil(t, AssetIDFromRefID(&ID{})) -} - -func TestAssetID_ID(t *testing.T) { - id := New() - id2 := AssetIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestAssetID_String(t *testing.T) { - id := New() - id2 := AssetIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", AssetID{}.String()) -} - -func TestAssetID_RefString(t *testing.T) { - id := NewAssetID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, AssetID{}.RefString()) -} - -func TestAssetID_GoString(t *testing.T) { - id := New() - id2 := AssetIDFromRefID(&id) - assert.Equal(t, "AssetID("+id.String()+")", id2.GoString()) - assert.Equal(t, "AssetID()", AssetID{}.GoString()) -} - -func TestAssetID_Ref(t *testing.T) { - id := NewAssetID() - assert.Equal(t, AssetID(id), *id.Ref()) - assert.Nil(t, (&AssetID{}).Ref()) -} - -func TestAssetID_Contains(t *testing.T) { - id := NewAssetID() - id2 := NewAssetID() - assert.True(t, id.Contains([]AssetID{id, id2})) - assert.False(t, AssetID{}.Contains([]AssetID{id, id2, {}})) - assert.False(t, id.Contains([]AssetID{id2})) -} - -func TestAssetID_CopyRef(t *testing.T) { - id := NewAssetID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*AssetID)(nil).CopyRef()) -} - -func TestAssetID_IDRef(t *testing.T) { - id := New() - id2 := AssetIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&AssetID{}).IDRef()) - assert.Nil(t, (*AssetID)(nil).IDRef()) -} - -func TestAssetID_StringRef(t *testing.T) { - id := NewAssetID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&AssetID{}).StringRef()) - assert.Nil(t, (*AssetID)(nil).StringRef()) -} - -func TestAssetID_MarhsalJSON(t *testing.T) { - id := NewAssetID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&AssetID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*AssetID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestAssetID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustAssetID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &AssetID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestAssetID_MarshalText(t *testing.T) { - id := New() - res, err := AssetIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&AssetID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*AssetID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestAssetID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &AssetID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestAssetID_IsNil(t *testing.T) { - assert.True(t, AssetID{}.IsNil()) - assert.False(t, NewAssetID().IsNil()) -} - -func TestAssetID_IsNilRef(t *testing.T) { - assert.True(t, AssetID{}.Ref().IsNilRef()) - assert.True(t, (*AssetID)(nil).IsNilRef()) - assert.False(t, NewAssetID().Ref().IsNilRef()) -} - -func TestAssetIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []AssetID - expected []string - }{ - { - name: "Empty slice", - input: make([]AssetID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, AssetIDsToStrings(tt.input)) - }) - } -} - -func TestAssetIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []AssetID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []AssetID - err error - }{ - res: make([]AssetID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []AssetID - err error - }{ - res: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []AssetID - err error - }{ - res: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []AssetID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := AssetIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestAssetIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []AssetID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]AssetID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := AssetIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestAssetIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []AssetID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]AssetID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []AssetID{MustAssetID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []AssetID{ - MustAssetID(id1.String()), - MustAssetID(id2.String()), - MustAssetID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := AssetIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestAssetIDsToID(t *testing.T) { - tests := []struct { - name string - input []AssetID - expected []ID - }{ - { - name: "Empty slice", - input: make([]AssetID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := AssetIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestAssetIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustAssetID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustAssetID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustAssetID(id3.String()) - - tests := []struct { - name string - input []*AssetID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*AssetID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*AssetID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*AssetID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := AssetIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewAssetIDSet(t *testing.T) { - AssetIdSet := NewAssetIDSet() - assert.NotNil(t, AssetIdSet) - assert.Empty(t, AssetIdSet.m) - assert.Empty(t, AssetIdSet.s) -} - -func TestAssetIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []AssetID - expected *AssetIDSet - }{ - { - name: "Empty slice", - input: make([]AssetID, 0), - expected: &AssetIDSet{ - m: map[AssetID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &AssetIDSet{ - m: map[AssetID]struct{}{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &AssetIDSet{ - m: map[AssetID]struct{}{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewAssetIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestAssetIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *AssetID - expected *AssetIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &AssetIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustAssetID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewAssetIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestAssetIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *AssetIDSet - input AssetID - expected bool - }{ - { - name: "Empty Set", - target: &AssetIDSet{}, - input: MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestAssetIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *AssetIDSet - expected *AssetIDSet - }{ - { - name: "Empty set", - input: &AssetIDSet{}, - expected: &AssetIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &AssetIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestAssetIDSet_All(t *testing.T) { - tests := []struct { - name string - input *AssetIDSet - expected []AssetID - }{ - { - name: "Empty", - input: &AssetIDSet{ - m: map[AssetID]struct{}{}, - s: nil, - }, - expected: make([]AssetID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &AssetIDSet{ - m: map[AssetID]struct{}{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestAssetIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *AssetIDSet - expected *AssetIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewAssetIDSet(), - }, - { - name: "Empty set", - input: NewAssetIDSet(), - expected: NewAssetIDSet(), - }, - { - name: "1 element", - input: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &AssetIDSet{ - m: map[AssetID]struct{}{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &AssetIDSet{ - m: map[AssetID]struct{}{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestAssetIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *AssetIDSet - b *AssetIDSet - } - expected *AssetIDSet - }{ - { - name: "Nil Set", - input: struct { - a *AssetIDSet - b *AssetIDSet - }{ - a: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *AssetIDSet - b *AssetIDSet - }{ - a: &AssetIDSet{}, - b: &AssetIDSet{}, - }, - expected: &AssetIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *AssetIDSet - b *AssetIDSet - }{ - a: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &AssetIDSet{}, - }, - expected: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *AssetIDSet - b *AssetIDSet - }{ - a: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &AssetIDSet{ - m: map[AssetID]struct{}{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []AssetID{MustAssetID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &AssetIDSet{ - m: map[AssetID]struct{}{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []AssetID{ - MustAssetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAssetID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/auth_request_gen.go b/pkg/id/auth_request_gen.go deleted file mode 100644 index 76a36140a..000000000 --- a/pkg/id/auth_request_gen.go +++ /dev/null @@ -1,297 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// AuthRequestID is an ID for AuthRequest. -type AuthRequestID ID - -// NewAuthRequestID generates a new AuthRequestId. -func NewAuthRequestID() AuthRequestID { - return AuthRequestID(New()) -} - -// AuthRequestIDFrom generates a new AuthRequestID from a string. -func AuthRequestIDFrom(i string) (nid AuthRequestID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = AuthRequestID(did) - return -} - -// MustAuthRequestID generates a new AuthRequestID from a string, but panics if the string cannot be parsed. -func MustAuthRequestID(i string) AuthRequestID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return AuthRequestID(did) -} - -// AuthRequestIDFromRef generates a new AuthRequestID from a string ref. -func AuthRequestIDFromRef(i *string) *AuthRequestID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := AuthRequestID(*did) - return &nid -} - -// AuthRequestIDFromRefID generates a new AuthRequestID from a ref of a generic ID. -func AuthRequestIDFromRefID(i *ID) *AuthRequestID { - if i == nil { - return nil - } - nid := AuthRequestID(*i) - return &nid -} - -// ID returns a domain ID. -func (d AuthRequestID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d AuthRequestID) String() string { - return ID(d).String() -} - -// GoString implements fmt.GoStringer interface. -func (d AuthRequestID) GoString() string { - return "id.AuthRequestID(" + d.String() + ")" -} - -// RefString returns a reference of string representation. -func (d AuthRequestID) RefString() *string { - id := ID(d).String() - return &id -} - -// Ref returns a reference. -func (d AuthRequestID) Ref() *AuthRequestID { - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d AuthRequestID) Contains(ids []AuthRequestID) bool { - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *AuthRequestID) CopyRef() *AuthRequestID { - if d == nil { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *AuthRequestID) IDRef() *ID { - if d == nil { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *AuthRequestID) StringRef() *string { - if d == nil { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *AuthRequestID) MarhsalJSON() ([]byte, error) { - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *AuthRequestID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = AuthRequestIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *AuthRequestID) MarshalText() ([]byte, error) { - if d == nil { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *AuthRequestID) UnmarshalText(text []byte) (err error) { - *d, err = AuthRequestIDFrom(string(text)) - return -} - -// Ref returns true if a ID is nil or zero-value -func (d AuthRequestID) IsNil() bool { - return ID(d).IsNil() -} - -// AuthRequestIDToKeys converts IDs into a string slice. -func AuthRequestIDToKeys(ids []AuthRequestID) []string { - keys := make([]string, 0, len(ids)) - for _, i := range ids { - keys = append(keys, i.String()) - } - return keys -} - -// AuthRequestIDsFrom converts a string slice into a ID slice. -func AuthRequestIDsFrom(ids []string) ([]AuthRequestID, error) { - dids := make([]AuthRequestID, 0, len(ids)) - for _, i := range ids { - did, err := AuthRequestIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// AuthRequestIDsFromID converts a generic ID slice into a ID slice. -func AuthRequestIDsFromID(ids []ID) []AuthRequestID { - dids := make([]AuthRequestID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, AuthRequestID(i)) - } - return dids -} - -// AuthRequestIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func AuthRequestIDsFromIDRef(ids []*ID) []AuthRequestID { - dids := make([]AuthRequestID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, AuthRequestID(*i)) - } - } - return dids -} - -// AuthRequestIDsToID converts a ID slice into a generic ID slice. -func AuthRequestIDsToID(ids []AuthRequestID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// AuthRequestIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func AuthRequestIDsToIDRef(ids []*AuthRequestID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// AuthRequestIDSet represents a set of AuthRequestIDs -type AuthRequestIDSet struct { - m map[AuthRequestID]struct{} - s []AuthRequestID -} - -// NewAuthRequestIDSet creates a new AuthRequestIDSet -func NewAuthRequestIDSet() *AuthRequestIDSet { - return &AuthRequestIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *AuthRequestIDSet) Add(p ...AuthRequestID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[AuthRequestID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []AuthRequestID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *AuthRequestIDSet) AddRef(p *AuthRequestID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *AuthRequestIDSet) Has(p AuthRequestID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *AuthRequestIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *AuthRequestIDSet) All() []AuthRequestID { - if s == nil { - return nil - } - return append([]AuthRequestID{}, s.s...) -} - -// Clone returns a cloned set -func (s *AuthRequestIDSet) Clone() *AuthRequestIDSet { - if s == nil { - return NewAuthRequestIDSet() - } - s2 := NewAuthRequestIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *AuthRequestIDSet) Merge(s2 *AuthRequestIDSet) *AuthRequestIDSet { - if s == nil { - return nil - } - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/auth_request_gen_test.go b/pkg/id/auth_request_gen_test.go deleted file mode 100644 index 5f84e7592..000000000 --- a/pkg/id/auth_request_gen_test.go +++ /dev/null @@ -1,1011 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "errors" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewAuthRequestID(t *testing.T) { - id := NewAuthRequestID() - assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - - assert.NotNil(t, ulID) - assert.Nil(t, err) -} - -func TestAuthRequestIDFrom(t *testing.T) { - t.Parallel() - testCases := []struct { - name string - input string - expected struct { - result AuthRequestID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result AuthRequestID - err error - }{ - AuthRequestID{}, - ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result AuthRequestID - err error - }{ - AuthRequestID{}, - ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result AuthRequestID - err error - }{ - AuthRequestID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, - }, - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result, err := AuthRequestIDFrom(tc.input) - assert.Equal(tt, tc.expected.result, result) - if err != nil { - assert.True(tt, errors.As(tc.expected.err, &err)) - } - }) - } -} - -func TestMustAuthRequestID(t *testing.T) { - t.Parallel() - testCases := []struct { - name string - input string - shouldPanic bool - expected AuthRequestID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: AuthRequestID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - if tc.shouldPanic { - assert.Panics(tt, func() { MustBeID(tc.input) }) - return - } - result := MustAuthRequestID(tc.input) - assert.Equal(tt, tc.expected, result) - }) - } -} - -func TestAuthRequestIDFromRef(t *testing.T) { - testCases := []struct { - name string - input string - expected *AuthRequestID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &AuthRequestID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - result := AuthRequestIDFromRef(&tc.input) - assert.Equal(tt, tc.expected, result) - if tc.expected != nil { - assert.Equal(tt, *tc.expected, *result) - } - }) - } -} - -func TestAuthRequestIDFromRefID(t *testing.T) { - id := New() - - subId := AuthRequestIDFromRefID(&id) - - assert.NotNil(t, subId) - assert.Equal(t, subId.id, id.id) -} - -func TestAuthRequestID_ID(t *testing.T) { - id := New() - subId := AuthRequestIDFromRefID(&id) - - idOrg := subId.ID() - - assert.Equal(t, id, idOrg) -} - -func TestAuthRequestID_String(t *testing.T) { - id := New() - subId := AuthRequestIDFromRefID(&id) - - assert.Equal(t, subId.String(), id.String()) -} - -func TestAuthRequestID_GoString(t *testing.T) { - id := New() - subId := AuthRequestIDFromRefID(&id) - - assert.Equal(t, subId.GoString(), "id.AuthRequestID("+id.String()+")") -} - -func TestAuthRequestID_RefString(t *testing.T) { - id := New() - subId := AuthRequestIDFromRefID(&id) - - refString := subId.StringRef() - - assert.NotNil(t, refString) - assert.Equal(t, *refString, id.String()) -} - -func TestAuthRequestID_Ref(t *testing.T) { - id := New() - subId := AuthRequestIDFromRefID(&id) - - subIdRef := subId.Ref() - - assert.Equal(t, *subId, *subIdRef) -} - -func TestAuthRequestID_Contains(t *testing.T) { - id := NewAuthRequestID() - id2 := NewAuthRequestID() - assert.True(t, id.Contains([]AuthRequestID{id, id2})) - assert.False(t, id.Contains([]AuthRequestID{id2})) -} - -func TestAuthRequestID_CopyRef(t *testing.T) { - id := New() - subId := AuthRequestIDFromRefID(&id) - - subIdCopyRef := subId.CopyRef() - - assert.Equal(t, *subId, *subIdCopyRef) - assert.NotSame(t, subId, subIdCopyRef) -} - -func TestAuthRequestID_IDRef(t *testing.T) { - id := New() - subId := AuthRequestIDFromRefID(&id) - - assert.Equal(t, id, *subId.IDRef()) -} - -func TestAuthRequestID_StringRef(t *testing.T) { - id := New() - subId := AuthRequestIDFromRefID(&id) - - assert.Equal(t, *subId.StringRef(), id.String()) -} - -func TestAuthRequestID_MarhsalJSON(t *testing.T) { - id := New() - subId := AuthRequestIDFromRefID(&id) - - res, err := subId.MarhsalJSON() - exp, _ := json.Marshal(subId.String()) - - assert.Nil(t, err) - assert.Equal(t, exp, res) -} - -func TestAuthRequestID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - - subId := &AuthRequestID{} - - err := subId.UnmarhsalJSON([]byte(jsonString)) - - assert.Nil(t, err) - assert.Equal(t, "01f3zhkysvcxsnzepyyqtq21fb", subId.String()) -} - -func TestAuthRequestID_MarshalText(t *testing.T) { - id := New() - subId := AuthRequestIDFromRefID(&id) - - res, err := subId.MarshalText() - - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) -} - -func TestAuthRequestID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - - subId := &AuthRequestID{} - - err := subId.UnmarshalText(text) - - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", subId.String()) - -} - -func TestAuthRequestID_IsNil(t *testing.T) { - subId := AuthRequestID{} - - assert.True(t, subId.IsNil()) - - id := New() - subId = *AuthRequestIDFromRefID(&id) - - assert.False(t, subId.IsNil()) -} - -func TestAuthRequestIDToKeys(t *testing.T) { - t.Parallel() - testCases := []struct { - name string - input []AuthRequestID - expected []string - }{ - { - name: "Empty slice", - input: make([]AuthRequestID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, AuthRequestIDToKeys(tc.input)) - }) - } - -} - -func TestAuthRequestIDsFrom(t *testing.T) { - t.Parallel() - testCases := []struct { - name string - input []string - expected struct { - res []AuthRequestID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []AuthRequestID - err error - }{ - res: make([]AuthRequestID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []AuthRequestID - err error - }{ - res: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []AuthRequestID - err error - }{ - res: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []AuthRequestID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - if tc.expected.err != nil { - _, err := AuthRequestIDsFrom(tc.input) - assert.True(tt, errors.As(ErrInvalidID, &err)) - } else { - res, err := AuthRequestIDsFrom(tc.input) - assert.Equal(tt, tc.expected.res, res) - assert.Nil(tt, err) - } - - }) - } -} - -func TestAuthRequestIDsFromID(t *testing.T) { - t.Parallel() - testCases := []struct { - name string - input []ID - expected []AuthRequestID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]AuthRequestID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - res := AuthRequestIDsFromID(tc.input) - assert.Equal(tt, tc.expected, res) - }) - } -} - -func TestAuthRequestIDsFromIDRef(t *testing.T) { - t.Parallel() - - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - testCases := []struct { - name string - input []*ID - expected []AuthRequestID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]AuthRequestID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []AuthRequestID{MustAuthRequestID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []AuthRequestID{ - MustAuthRequestID(id1.String()), - MustAuthRequestID(id2.String()), - MustAuthRequestID(id3.String()), - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - res := AuthRequestIDsFromIDRef(tc.input) - assert.Equal(tt, tc.expected, res) - }) - } -} - -func TestAuthRequestIDsToID(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input []AuthRequestID - expected []ID - }{ - { - name: "Empty slice", - input: make([]AuthRequestID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - res := AuthRequestIDsToID(tc.input) - assert.Equal(tt, tc.expected, res) - }) - } -} - -func TestAuthRequestIDsToIDRef(t *testing.T) { - t.Parallel() - - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - subId1 := MustAuthRequestID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - subId2 := MustAuthRequestID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - subId3 := MustAuthRequestID(id3.String()) - - testCases := []struct { - name string - input []*AuthRequestID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*AuthRequestID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*AuthRequestID{&subId1}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*AuthRequestID{&subId1, &subId2, &subId3}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - res := AuthRequestIDsToIDRef(tc.input) - assert.Equal(tt, tc.expected, res) - }) - } -} - -func TestNewAuthRequestIDSet(t *testing.T) { - AuthRequestIdSet := NewAuthRequestIDSet() - - assert.NotNil(t, AuthRequestIdSet) - assert.Empty(t, AuthRequestIdSet.m) - assert.Empty(t, AuthRequestIdSet.s) -} - -func TestAuthRequestIDSet_Add(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input []AuthRequestID - expected *AuthRequestIDSet - }{ - { - name: "Empty slice", - input: make([]AuthRequestID, 0), - expected: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - set := NewAuthRequestIDSet() - set.Add(tc.input...) - assert.Equal(tt, tc.expected, set) - }) - } -} - -func TestAuthRequestIDSet_AddRef(t *testing.T) { - t.Parallel() - - AuthRequestId := MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1") - - testCases := []struct { - name string - input *AuthRequestID - expected *AuthRequestIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &AuthRequestIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: &AuthRequestId, - expected: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - set := NewAuthRequestIDSet() - set.AddRef(tc.input) - assert.Equal(tt, tc.expected, set) - }) - } -} - -func TestAuthRequestIDSet_Has(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - AuthRequestIDSet - AuthRequestID - } - expected bool - }{ - { - name: "Empty Set", - input: struct { - AuthRequestIDSet - AuthRequestID - }{AuthRequestIDSet: AuthRequestIDSet{}, AuthRequestID: MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: false, - }, - { - name: "Set Contains the element", - input: struct { - AuthRequestIDSet - AuthRequestID - }{AuthRequestIDSet: AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, AuthRequestID: MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: true, - }, - { - name: "Set does not Contains the element", - input: struct { - AuthRequestIDSet - AuthRequestID - }{AuthRequestIDSet: AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, AuthRequestID: MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: false, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - assert.Equal(tt, tc.expected, tc.input.AuthRequestIDSet.Has(tc.input.AuthRequestID)) - }) - } -} - -func TestAuthRequestIDSet_Clear(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input AuthRequestIDSet - expected AuthRequestIDSet - }{ - { - name: "Empty Set", - input: AuthRequestIDSet{}, - expected: AuthRequestIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "Set Contains the element", - input: AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: AuthRequestIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - set := tc.input - p := &set - p.Clear() - assert.Equal(tt, tc.expected, *p) - }) - } -} - -func TestAuthRequestIDSet_All(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input *AuthRequestIDSet - expected []AuthRequestID - }{ - { - name: "Empty slice", - input: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{}, - s: nil, - }, - expected: make([]AuthRequestID, 0), - }, - { - name: "1 element", - input: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.All()) - }) - } -} - -func TestAuthRequestIDSet_Clone(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input *AuthRequestIDSet - expected *AuthRequestIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewAuthRequestIDSet(), - }, - { - name: "Empty set", - input: NewAuthRequestIDSet(), - expected: NewAuthRequestIDSet(), - }, - { - name: "1 element", - input: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - clone := tc.input.Clone() - assert.Equal(tt, tc.expected, clone) - assert.False(tt, tc.input == clone) - }) - } -} - -func TestAuthRequestIDSet_Merge(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input struct { - a *AuthRequestIDSet - b *AuthRequestIDSet - } - expected *AuthRequestIDSet - }{ - { - name: "Empty Set", - input: struct { - a *AuthRequestIDSet - b *AuthRequestIDSet - }{ - a: &AuthRequestIDSet{}, - b: &AuthRequestIDSet{}, - }, - expected: &AuthRequestIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *AuthRequestIDSet - b *AuthRequestIDSet - }{ - a: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &AuthRequestIDSet{}, - }, - expected: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *AuthRequestIDSet - b *AuthRequestIDSet - }{ - a: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []AuthRequestID{MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &AuthRequestIDSet{ - m: map[AuthRequestID]struct{}{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []AuthRequestID{ - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t1"), - MustAuthRequestID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(tt *testing.T) { - tt.Parallel() - - assert.Equal(tt, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/cluster_field_gen_test.go b/pkg/id/cluster_field_gen_test.go deleted file mode 100644 index c8f383db1..000000000 --- a/pkg/id/cluster_field_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewClusterID(t *testing.T) { - id := NewClusterID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestClusterIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result ClusterID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result ClusterID - err error - }{ - result: ClusterID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result ClusterID - err error - }{ - result: ClusterID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result ClusterID - err error - }{ - result: ClusterID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := ClusterIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustClusterID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected ClusterID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: ClusterID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustClusterID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestClusterIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *ClusterID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &ClusterID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := ClusterIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestClusterIDFromRefID(t *testing.T) { - id := New() - id2 := ClusterIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, ClusterIDFromRefID(nil)) - assert.Nil(t, ClusterIDFromRefID(&ID{})) -} - -func TestClusterID_ID(t *testing.T) { - id := New() - id2 := ClusterIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestClusterID_String(t *testing.T) { - id := New() - id2 := ClusterIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", ClusterID{}.String()) -} - -func TestClusterID_RefString(t *testing.T) { - id := NewClusterID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, ClusterID{}.RefString()) -} - -func TestClusterID_GoString(t *testing.T) { - id := New() - id2 := ClusterIDFromRefID(&id) - assert.Equal(t, "ClusterID("+id.String()+")", id2.GoString()) - assert.Equal(t, "ClusterID()", ClusterID{}.GoString()) -} - -func TestClusterID_Ref(t *testing.T) { - id := NewClusterID() - assert.Equal(t, ClusterID(id), *id.Ref()) - assert.Nil(t, (&ClusterID{}).Ref()) -} - -func TestClusterID_Contains(t *testing.T) { - id := NewClusterID() - id2 := NewClusterID() - assert.True(t, id.Contains([]ClusterID{id, id2})) - assert.False(t, ClusterID{}.Contains([]ClusterID{id, id2, {}})) - assert.False(t, id.Contains([]ClusterID{id2})) -} - -func TestClusterID_CopyRef(t *testing.T) { - id := NewClusterID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*ClusterID)(nil).CopyRef()) -} - -func TestClusterID_IDRef(t *testing.T) { - id := New() - id2 := ClusterIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&ClusterID{}).IDRef()) - assert.Nil(t, (*ClusterID)(nil).IDRef()) -} - -func TestClusterID_StringRef(t *testing.T) { - id := NewClusterID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&ClusterID{}).StringRef()) - assert.Nil(t, (*ClusterID)(nil).StringRef()) -} - -func TestClusterID_MarhsalJSON(t *testing.T) { - id := NewClusterID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&ClusterID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*ClusterID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestClusterID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustClusterID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &ClusterID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestClusterID_MarshalText(t *testing.T) { - id := New() - res, err := ClusterIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&ClusterID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*ClusterID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestClusterID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &ClusterID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestClusterID_IsNil(t *testing.T) { - assert.True(t, ClusterID{}.IsNil()) - assert.False(t, NewClusterID().IsNil()) -} - -func TestClusterID_IsNilRef(t *testing.T) { - assert.True(t, ClusterID{}.Ref().IsNilRef()) - assert.True(t, (*ClusterID)(nil).IsNilRef()) - assert.False(t, NewClusterID().Ref().IsNilRef()) -} - -func TestClusterIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []ClusterID - expected []string - }{ - { - name: "Empty slice", - input: make([]ClusterID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, ClusterIDsToStrings(tt.input)) - }) - } -} - -func TestClusterIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []ClusterID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []ClusterID - err error - }{ - res: make([]ClusterID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []ClusterID - err error - }{ - res: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []ClusterID - err error - }{ - res: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []ClusterID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := ClusterIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestClusterIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []ClusterID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]ClusterID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := ClusterIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestClusterIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []ClusterID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]ClusterID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []ClusterID{MustClusterID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []ClusterID{ - MustClusterID(id1.String()), - MustClusterID(id2.String()), - MustClusterID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := ClusterIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestClusterIDsToID(t *testing.T) { - tests := []struct { - name string - input []ClusterID - expected []ID - }{ - { - name: "Empty slice", - input: make([]ClusterID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := ClusterIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestClusterIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustClusterID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustClusterID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustClusterID(id3.String()) - - tests := []struct { - name string - input []*ClusterID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*ClusterID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*ClusterID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*ClusterID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := ClusterIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewClusterIDSet(t *testing.T) { - ClusterIdSet := NewClusterIDSet() - assert.NotNil(t, ClusterIdSet) - assert.Empty(t, ClusterIdSet.m) - assert.Empty(t, ClusterIdSet.s) -} - -func TestClusterIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []ClusterID - expected *ClusterIDSet - }{ - { - name: "Empty slice", - input: make([]ClusterID, 0), - expected: &ClusterIDSet{ - m: map[ClusterID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &ClusterIDSet{ - m: map[ClusterID]struct{}{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &ClusterIDSet{ - m: map[ClusterID]struct{}{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewClusterIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestClusterIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *ClusterID - expected *ClusterIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &ClusterIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustClusterID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewClusterIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestClusterIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *ClusterIDSet - input ClusterID - expected bool - }{ - { - name: "Empty Set", - target: &ClusterIDSet{}, - input: MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestClusterIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *ClusterIDSet - expected *ClusterIDSet - }{ - { - name: "Empty set", - input: &ClusterIDSet{}, - expected: &ClusterIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &ClusterIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestClusterIDSet_All(t *testing.T) { - tests := []struct { - name string - input *ClusterIDSet - expected []ClusterID - }{ - { - name: "Empty", - input: &ClusterIDSet{ - m: map[ClusterID]struct{}{}, - s: nil, - }, - expected: make([]ClusterID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &ClusterIDSet{ - m: map[ClusterID]struct{}{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestClusterIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *ClusterIDSet - expected *ClusterIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewClusterIDSet(), - }, - { - name: "Empty set", - input: NewClusterIDSet(), - expected: NewClusterIDSet(), - }, - { - name: "1 element", - input: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &ClusterIDSet{ - m: map[ClusterID]struct{}{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &ClusterIDSet{ - m: map[ClusterID]struct{}{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestClusterIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *ClusterIDSet - b *ClusterIDSet - } - expected *ClusterIDSet - }{ - { - name: "Nil Set", - input: struct { - a *ClusterIDSet - b *ClusterIDSet - }{ - a: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *ClusterIDSet - b *ClusterIDSet - }{ - a: &ClusterIDSet{}, - b: &ClusterIDSet{}, - }, - expected: &ClusterIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *ClusterIDSet - b *ClusterIDSet - }{ - a: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &ClusterIDSet{}, - }, - expected: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *ClusterIDSet - b *ClusterIDSet - }{ - a: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &ClusterIDSet{ - m: map[ClusterID]struct{}{MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []ClusterID{MustClusterID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &ClusterIDSet{ - m: map[ClusterID]struct{}{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []ClusterID{ - MustClusterID("01f3zhcaq35403zdjnd6dcm0t1"), - MustClusterID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/cluster_gen.go b/pkg/id/cluster_gen.go deleted file mode 100644 index 038849217..000000000 --- a/pkg/id/cluster_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// ClusterID is an ID for Cluster. -type ClusterID ID - -// NewClusterID generates a new ClusterId. -func NewClusterID() ClusterID { - return ClusterID(New()) -} - -// ClusterIDFrom generates a new ClusterID from a string. -func ClusterIDFrom(i string) (nid ClusterID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = ClusterID(did) - return -} - -// MustClusterID generates a new ClusterID from a string, but panics if the string cannot be parsed. -func MustClusterID(i string) ClusterID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return ClusterID(did) -} - -// ClusterIDFromRef generates a new ClusterID from a string ref. -func ClusterIDFromRef(i *string) *ClusterID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := ClusterID(*did) - return &nid -} - -// ClusterIDFromRefID generates a new ClusterID from a ref of a generic ID. -func ClusterIDFromRefID(i *ID) *ClusterID { - if i == nil || i.IsNil() { - return nil - } - nid := ClusterID(*i) - return &nid -} - -// ID returns a domain ID. -func (d ClusterID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d ClusterID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d ClusterID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d ClusterID) GoString() string { - return "ClusterID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d ClusterID) Ref() *ClusterID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d ClusterID) Contains(ids []ClusterID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *ClusterID) CopyRef() *ClusterID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *ClusterID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *ClusterID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *ClusterID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *ClusterID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = ClusterIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *ClusterID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *ClusterID) UnmarshalText(text []byte) (err error) { - *d, err = ClusterIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d ClusterID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *ClusterID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// ClusterIDsToStrings converts IDs into a string slice. -func ClusterIDsToStrings(ids []ClusterID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// ClusterIDsFrom converts a string slice into a ID slice. -func ClusterIDsFrom(ids []string) ([]ClusterID, error) { - dids := make([]ClusterID, 0, len(ids)) - for _, i := range ids { - did, err := ClusterIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// ClusterIDsFromID converts a generic ID slice into a ID slice. -func ClusterIDsFromID(ids []ID) []ClusterID { - dids := make([]ClusterID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, ClusterID(i)) - } - return dids -} - -// ClusterIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func ClusterIDsFromIDRef(ids []*ID) []ClusterID { - dids := make([]ClusterID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, ClusterID(*i)) - } - } - return dids -} - -// ClusterIDsToID converts a ID slice into a generic ID slice. -func ClusterIDsToID(ids []ClusterID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// ClusterIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func ClusterIDsToIDRef(ids []*ClusterID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// ClusterIDSet represents a set of ClusterIDs -type ClusterIDSet struct { - m map[ClusterID]struct{} - s []ClusterID -} - -// NewClusterIDSet creates a new ClusterIDSet -func NewClusterIDSet() *ClusterIDSet { - return &ClusterIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *ClusterIDSet) Add(p ...ClusterID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[ClusterID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []ClusterID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *ClusterIDSet) AddRef(p *ClusterID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *ClusterIDSet) Has(p ClusterID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *ClusterIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *ClusterIDSet) All() []ClusterID { - if s == nil { - return nil - } - return append([]ClusterID{}, s.s...) -} - -// Clone returns a cloned set -func (s *ClusterIDSet) Clone() *ClusterIDSet { - if s == nil { - return NewClusterIDSet() - } - s2 := NewClusterIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *ClusterIDSet) Merge(s2 *ClusterIDSet) *ClusterIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/common.go b/pkg/id/common.go new file mode 100644 index 000000000..28b5aa31d --- /dev/null +++ b/pkg/id/common.go @@ -0,0 +1,5 @@ +package id + +import "github.com/reearth/reearth-backend/pkg/id/idx" + +var ErrInvalidID = idx.ErrInvalidID diff --git a/pkg/id/dataset_gen.go b/pkg/id/dataset_gen.go deleted file mode 100644 index 8dec9b216..000000000 --- a/pkg/id/dataset_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// DatasetID is an ID for Dataset. -type DatasetID ID - -// NewDatasetID generates a new DatasetId. -func NewDatasetID() DatasetID { - return DatasetID(New()) -} - -// DatasetIDFrom generates a new DatasetID from a string. -func DatasetIDFrom(i string) (nid DatasetID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = DatasetID(did) - return -} - -// MustDatasetID generates a new DatasetID from a string, but panics if the string cannot be parsed. -func MustDatasetID(i string) DatasetID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return DatasetID(did) -} - -// DatasetIDFromRef generates a new DatasetID from a string ref. -func DatasetIDFromRef(i *string) *DatasetID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := DatasetID(*did) - return &nid -} - -// DatasetIDFromRefID generates a new DatasetID from a ref of a generic ID. -func DatasetIDFromRefID(i *ID) *DatasetID { - if i == nil || i.IsNil() { - return nil - } - nid := DatasetID(*i) - return &nid -} - -// ID returns a domain ID. -func (d DatasetID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d DatasetID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d DatasetID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d DatasetID) GoString() string { - return "DatasetID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d DatasetID) Ref() *DatasetID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d DatasetID) Contains(ids []DatasetID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *DatasetID) CopyRef() *DatasetID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *DatasetID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *DatasetID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *DatasetID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *DatasetID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = DatasetIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *DatasetID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *DatasetID) UnmarshalText(text []byte) (err error) { - *d, err = DatasetIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d DatasetID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *DatasetID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// DatasetIDsToStrings converts IDs into a string slice. -func DatasetIDsToStrings(ids []DatasetID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// DatasetIDsFrom converts a string slice into a ID slice. -func DatasetIDsFrom(ids []string) ([]DatasetID, error) { - dids := make([]DatasetID, 0, len(ids)) - for _, i := range ids { - did, err := DatasetIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// DatasetIDsFromID converts a generic ID slice into a ID slice. -func DatasetIDsFromID(ids []ID) []DatasetID { - dids := make([]DatasetID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, DatasetID(i)) - } - return dids -} - -// DatasetIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func DatasetIDsFromIDRef(ids []*ID) []DatasetID { - dids := make([]DatasetID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, DatasetID(*i)) - } - } - return dids -} - -// DatasetIDsToID converts a ID slice into a generic ID slice. -func DatasetIDsToID(ids []DatasetID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// DatasetIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func DatasetIDsToIDRef(ids []*DatasetID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// DatasetIDSet represents a set of DatasetIDs -type DatasetIDSet struct { - m map[DatasetID]struct{} - s []DatasetID -} - -// NewDatasetIDSet creates a new DatasetIDSet -func NewDatasetIDSet() *DatasetIDSet { - return &DatasetIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *DatasetIDSet) Add(p ...DatasetID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[DatasetID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []DatasetID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *DatasetIDSet) AddRef(p *DatasetID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *DatasetIDSet) Has(p DatasetID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *DatasetIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *DatasetIDSet) All() []DatasetID { - if s == nil { - return nil - } - return append([]DatasetID{}, s.s...) -} - -// Clone returns a cloned set -func (s *DatasetIDSet) Clone() *DatasetIDSet { - if s == nil { - return NewDatasetIDSet() - } - s2 := NewDatasetIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *DatasetIDSet) Merge(s2 *DatasetIDSet) *DatasetIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/dataset_gen_test.go b/pkg/id/dataset_gen_test.go deleted file mode 100644 index 87f2363a0..000000000 --- a/pkg/id/dataset_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewDatasetID(t *testing.T) { - id := NewDatasetID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestDatasetIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result DatasetID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result DatasetID - err error - }{ - result: DatasetID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result DatasetID - err error - }{ - result: DatasetID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result DatasetID - err error - }{ - result: DatasetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := DatasetIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustDatasetID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected DatasetID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: DatasetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustDatasetID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestDatasetIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *DatasetID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &DatasetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := DatasetIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestDatasetIDFromRefID(t *testing.T) { - id := New() - id2 := DatasetIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, DatasetIDFromRefID(nil)) - assert.Nil(t, DatasetIDFromRefID(&ID{})) -} - -func TestDatasetID_ID(t *testing.T) { - id := New() - id2 := DatasetIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestDatasetID_String(t *testing.T) { - id := New() - id2 := DatasetIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", DatasetID{}.String()) -} - -func TestDatasetID_RefString(t *testing.T) { - id := NewDatasetID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, DatasetID{}.RefString()) -} - -func TestDatasetID_GoString(t *testing.T) { - id := New() - id2 := DatasetIDFromRefID(&id) - assert.Equal(t, "DatasetID("+id.String()+")", id2.GoString()) - assert.Equal(t, "DatasetID()", DatasetID{}.GoString()) -} - -func TestDatasetID_Ref(t *testing.T) { - id := NewDatasetID() - assert.Equal(t, DatasetID(id), *id.Ref()) - assert.Nil(t, (&DatasetID{}).Ref()) -} - -func TestDatasetID_Contains(t *testing.T) { - id := NewDatasetID() - id2 := NewDatasetID() - assert.True(t, id.Contains([]DatasetID{id, id2})) - assert.False(t, DatasetID{}.Contains([]DatasetID{id, id2, {}})) - assert.False(t, id.Contains([]DatasetID{id2})) -} - -func TestDatasetID_CopyRef(t *testing.T) { - id := NewDatasetID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*DatasetID)(nil).CopyRef()) -} - -func TestDatasetID_IDRef(t *testing.T) { - id := New() - id2 := DatasetIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&DatasetID{}).IDRef()) - assert.Nil(t, (*DatasetID)(nil).IDRef()) -} - -func TestDatasetID_StringRef(t *testing.T) { - id := NewDatasetID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&DatasetID{}).StringRef()) - assert.Nil(t, (*DatasetID)(nil).StringRef()) -} - -func TestDatasetID_MarhsalJSON(t *testing.T) { - id := NewDatasetID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&DatasetID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*DatasetID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestDatasetID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustDatasetID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &DatasetID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestDatasetID_MarshalText(t *testing.T) { - id := New() - res, err := DatasetIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&DatasetID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*DatasetID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestDatasetID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &DatasetID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestDatasetID_IsNil(t *testing.T) { - assert.True(t, DatasetID{}.IsNil()) - assert.False(t, NewDatasetID().IsNil()) -} - -func TestDatasetID_IsNilRef(t *testing.T) { - assert.True(t, DatasetID{}.Ref().IsNilRef()) - assert.True(t, (*DatasetID)(nil).IsNilRef()) - assert.False(t, NewDatasetID().Ref().IsNilRef()) -} - -func TestDatasetIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []DatasetID - expected []string - }{ - { - name: "Empty slice", - input: make([]DatasetID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, DatasetIDsToStrings(tt.input)) - }) - } -} - -func TestDatasetIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []DatasetID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []DatasetID - err error - }{ - res: make([]DatasetID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []DatasetID - err error - }{ - res: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []DatasetID - err error - }{ - res: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []DatasetID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := DatasetIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestDatasetIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []DatasetID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]DatasetID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := DatasetIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestDatasetIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []DatasetID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]DatasetID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []DatasetID{MustDatasetID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []DatasetID{ - MustDatasetID(id1.String()), - MustDatasetID(id2.String()), - MustDatasetID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := DatasetIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestDatasetIDsToID(t *testing.T) { - tests := []struct { - name string - input []DatasetID - expected []ID - }{ - { - name: "Empty slice", - input: make([]DatasetID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := DatasetIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestDatasetIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustDatasetID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustDatasetID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustDatasetID(id3.String()) - - tests := []struct { - name string - input []*DatasetID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*DatasetID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*DatasetID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*DatasetID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := DatasetIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewDatasetIDSet(t *testing.T) { - DatasetIdSet := NewDatasetIDSet() - assert.NotNil(t, DatasetIdSet) - assert.Empty(t, DatasetIdSet.m) - assert.Empty(t, DatasetIdSet.s) -} - -func TestDatasetIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []DatasetID - expected *DatasetIDSet - }{ - { - name: "Empty slice", - input: make([]DatasetID, 0), - expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewDatasetIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestDatasetIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *DatasetID - expected *DatasetIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &DatasetIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewDatasetIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestDatasetIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *DatasetIDSet - input DatasetID - expected bool - }{ - { - name: "Empty Set", - target: &DatasetIDSet{}, - input: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestDatasetIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *DatasetIDSet - expected *DatasetIDSet - }{ - { - name: "Empty set", - input: &DatasetIDSet{}, - expected: &DatasetIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &DatasetIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestDatasetIDSet_All(t *testing.T) { - tests := []struct { - name string - input *DatasetIDSet - expected []DatasetID - }{ - { - name: "Empty", - input: &DatasetIDSet{ - m: map[DatasetID]struct{}{}, - s: nil, - }, - expected: make([]DatasetID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &DatasetIDSet{ - m: map[DatasetID]struct{}{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestDatasetIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *DatasetIDSet - expected *DatasetIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewDatasetIDSet(), - }, - { - name: "Empty set", - input: NewDatasetIDSet(), - expected: NewDatasetIDSet(), - }, - { - name: "1 element", - input: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &DatasetIDSet{ - m: map[DatasetID]struct{}{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestDatasetIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *DatasetIDSet - b *DatasetIDSet - } - expected *DatasetIDSet - }{ - { - name: "Nil Set", - input: struct { - a *DatasetIDSet - b *DatasetIDSet - }{ - a: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *DatasetIDSet - b *DatasetIDSet - }{ - a: &DatasetIDSet{}, - b: &DatasetIDSet{}, - }, - expected: &DatasetIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *DatasetIDSet - b *DatasetIDSet - }{ - a: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &DatasetIDSet{}, - }, - expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *DatasetIDSet - b *DatasetIDSet - }{ - a: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &DatasetIDSet{ - m: map[DatasetID]struct{}{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []DatasetID{MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &DatasetIDSet{ - m: map[DatasetID]struct{}{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []DatasetID{ - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/dataset_schema_field_gen.go b/pkg/id/dataset_schema_field_gen.go deleted file mode 100644 index c607fb680..000000000 --- a/pkg/id/dataset_schema_field_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// DatasetSchemaFieldID is an ID for DatasetSchemaField. -type DatasetSchemaFieldID ID - -// NewDatasetSchemaFieldID generates a new DatasetSchemaFieldId. -func NewDatasetSchemaFieldID() DatasetSchemaFieldID { - return DatasetSchemaFieldID(New()) -} - -// DatasetSchemaFieldIDFrom generates a new DatasetSchemaFieldID from a string. -func DatasetSchemaFieldIDFrom(i string) (nid DatasetSchemaFieldID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = DatasetSchemaFieldID(did) - return -} - -// MustDatasetSchemaFieldID generates a new DatasetSchemaFieldID from a string, but panics if the string cannot be parsed. -func MustDatasetSchemaFieldID(i string) DatasetSchemaFieldID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return DatasetSchemaFieldID(did) -} - -// DatasetSchemaFieldIDFromRef generates a new DatasetSchemaFieldID from a string ref. -func DatasetSchemaFieldIDFromRef(i *string) *DatasetSchemaFieldID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := DatasetSchemaFieldID(*did) - return &nid -} - -// DatasetSchemaFieldIDFromRefID generates a new DatasetSchemaFieldID from a ref of a generic ID. -func DatasetSchemaFieldIDFromRefID(i *ID) *DatasetSchemaFieldID { - if i == nil || i.IsNil() { - return nil - } - nid := DatasetSchemaFieldID(*i) - return &nid -} - -// ID returns a domain ID. -func (d DatasetSchemaFieldID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d DatasetSchemaFieldID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d DatasetSchemaFieldID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d DatasetSchemaFieldID) GoString() string { - return "DatasetSchemaFieldID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d DatasetSchemaFieldID) Ref() *DatasetSchemaFieldID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d DatasetSchemaFieldID) Contains(ids []DatasetSchemaFieldID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *DatasetSchemaFieldID) CopyRef() *DatasetSchemaFieldID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *DatasetSchemaFieldID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *DatasetSchemaFieldID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *DatasetSchemaFieldID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *DatasetSchemaFieldID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = DatasetSchemaFieldIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *DatasetSchemaFieldID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *DatasetSchemaFieldID) UnmarshalText(text []byte) (err error) { - *d, err = DatasetSchemaFieldIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d DatasetSchemaFieldID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *DatasetSchemaFieldID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// DatasetSchemaFieldIDsToStrings converts IDs into a string slice. -func DatasetSchemaFieldIDsToStrings(ids []DatasetSchemaFieldID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// DatasetSchemaFieldIDsFrom converts a string slice into a ID slice. -func DatasetSchemaFieldIDsFrom(ids []string) ([]DatasetSchemaFieldID, error) { - dids := make([]DatasetSchemaFieldID, 0, len(ids)) - for _, i := range ids { - did, err := DatasetSchemaFieldIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// DatasetSchemaFieldIDsFromID converts a generic ID slice into a ID slice. -func DatasetSchemaFieldIDsFromID(ids []ID) []DatasetSchemaFieldID { - dids := make([]DatasetSchemaFieldID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, DatasetSchemaFieldID(i)) - } - return dids -} - -// DatasetSchemaFieldIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func DatasetSchemaFieldIDsFromIDRef(ids []*ID) []DatasetSchemaFieldID { - dids := make([]DatasetSchemaFieldID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, DatasetSchemaFieldID(*i)) - } - } - return dids -} - -// DatasetSchemaFieldIDsToID converts a ID slice into a generic ID slice. -func DatasetSchemaFieldIDsToID(ids []DatasetSchemaFieldID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// DatasetSchemaFieldIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func DatasetSchemaFieldIDsToIDRef(ids []*DatasetSchemaFieldID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// DatasetSchemaFieldIDSet represents a set of DatasetSchemaFieldIDs -type DatasetSchemaFieldIDSet struct { - m map[DatasetSchemaFieldID]struct{} - s []DatasetSchemaFieldID -} - -// NewDatasetSchemaFieldIDSet creates a new DatasetSchemaFieldIDSet -func NewDatasetSchemaFieldIDSet() *DatasetSchemaFieldIDSet { - return &DatasetSchemaFieldIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *DatasetSchemaFieldIDSet) Add(p ...DatasetSchemaFieldID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[DatasetSchemaFieldID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []DatasetSchemaFieldID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *DatasetSchemaFieldIDSet) AddRef(p *DatasetSchemaFieldID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *DatasetSchemaFieldIDSet) Has(p DatasetSchemaFieldID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *DatasetSchemaFieldIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *DatasetSchemaFieldIDSet) All() []DatasetSchemaFieldID { - if s == nil { - return nil - } - return append([]DatasetSchemaFieldID{}, s.s...) -} - -// Clone returns a cloned set -func (s *DatasetSchemaFieldIDSet) Clone() *DatasetSchemaFieldIDSet { - if s == nil { - return NewDatasetSchemaFieldIDSet() - } - s2 := NewDatasetSchemaFieldIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *DatasetSchemaFieldIDSet) Merge(s2 *DatasetSchemaFieldIDSet) *DatasetSchemaFieldIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/dataset_schema_field_gen_test.go b/pkg/id/dataset_schema_field_gen_test.go deleted file mode 100644 index 9dbefbc1b..000000000 --- a/pkg/id/dataset_schema_field_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewDatasetSchemaFieldID(t *testing.T) { - id := NewDatasetSchemaFieldID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestDatasetSchemaFieldIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result DatasetSchemaFieldID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result DatasetSchemaFieldID - err error - }{ - result: DatasetSchemaFieldID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result DatasetSchemaFieldID - err error - }{ - result: DatasetSchemaFieldID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result DatasetSchemaFieldID - err error - }{ - result: DatasetSchemaFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := DatasetSchemaFieldIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustDatasetSchemaFieldID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected DatasetSchemaFieldID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: DatasetSchemaFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustDatasetSchemaFieldID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestDatasetSchemaFieldIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *DatasetSchemaFieldID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &DatasetSchemaFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := DatasetSchemaFieldIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestDatasetSchemaFieldIDFromRefID(t *testing.T) { - id := New() - id2 := DatasetSchemaFieldIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, DatasetSchemaFieldIDFromRefID(nil)) - assert.Nil(t, DatasetSchemaFieldIDFromRefID(&ID{})) -} - -func TestDatasetSchemaFieldID_ID(t *testing.T) { - id := New() - id2 := DatasetSchemaFieldIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestDatasetSchemaFieldID_String(t *testing.T) { - id := New() - id2 := DatasetSchemaFieldIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", DatasetSchemaFieldID{}.String()) -} - -func TestDatasetSchemaFieldID_RefString(t *testing.T) { - id := NewDatasetSchemaFieldID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, DatasetSchemaFieldID{}.RefString()) -} - -func TestDatasetSchemaFieldID_GoString(t *testing.T) { - id := New() - id2 := DatasetSchemaFieldIDFromRefID(&id) - assert.Equal(t, "DatasetSchemaFieldID("+id.String()+")", id2.GoString()) - assert.Equal(t, "DatasetSchemaFieldID()", DatasetSchemaFieldID{}.GoString()) -} - -func TestDatasetSchemaFieldID_Ref(t *testing.T) { - id := NewDatasetSchemaFieldID() - assert.Equal(t, DatasetSchemaFieldID(id), *id.Ref()) - assert.Nil(t, (&DatasetSchemaFieldID{}).Ref()) -} - -func TestDatasetSchemaFieldID_Contains(t *testing.T) { - id := NewDatasetSchemaFieldID() - id2 := NewDatasetSchemaFieldID() - assert.True(t, id.Contains([]DatasetSchemaFieldID{id, id2})) - assert.False(t, DatasetSchemaFieldID{}.Contains([]DatasetSchemaFieldID{id, id2, {}})) - assert.False(t, id.Contains([]DatasetSchemaFieldID{id2})) -} - -func TestDatasetSchemaFieldID_CopyRef(t *testing.T) { - id := NewDatasetSchemaFieldID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*DatasetSchemaFieldID)(nil).CopyRef()) -} - -func TestDatasetSchemaFieldID_IDRef(t *testing.T) { - id := New() - id2 := DatasetSchemaFieldIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&DatasetSchemaFieldID{}).IDRef()) - assert.Nil(t, (*DatasetSchemaFieldID)(nil).IDRef()) -} - -func TestDatasetSchemaFieldID_StringRef(t *testing.T) { - id := NewDatasetSchemaFieldID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&DatasetSchemaFieldID{}).StringRef()) - assert.Nil(t, (*DatasetSchemaFieldID)(nil).StringRef()) -} - -func TestDatasetSchemaFieldID_MarhsalJSON(t *testing.T) { - id := NewDatasetSchemaFieldID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&DatasetSchemaFieldID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*DatasetSchemaFieldID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestDatasetSchemaFieldID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustDatasetSchemaFieldID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &DatasetSchemaFieldID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestDatasetSchemaFieldID_MarshalText(t *testing.T) { - id := New() - res, err := DatasetSchemaFieldIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&DatasetSchemaFieldID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*DatasetSchemaFieldID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestDatasetSchemaFieldID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &DatasetSchemaFieldID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestDatasetSchemaFieldID_IsNil(t *testing.T) { - assert.True(t, DatasetSchemaFieldID{}.IsNil()) - assert.False(t, NewDatasetSchemaFieldID().IsNil()) -} - -func TestDatasetSchemaFieldID_IsNilRef(t *testing.T) { - assert.True(t, DatasetSchemaFieldID{}.Ref().IsNilRef()) - assert.True(t, (*DatasetSchemaFieldID)(nil).IsNilRef()) - assert.False(t, NewDatasetSchemaFieldID().Ref().IsNilRef()) -} - -func TestDatasetSchemaFieldIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []DatasetSchemaFieldID - expected []string - }{ - { - name: "Empty slice", - input: make([]DatasetSchemaFieldID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, DatasetSchemaFieldIDsToStrings(tt.input)) - }) - } -} - -func TestDatasetSchemaFieldIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []DatasetSchemaFieldID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []DatasetSchemaFieldID - err error - }{ - res: make([]DatasetSchemaFieldID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []DatasetSchemaFieldID - err error - }{ - res: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []DatasetSchemaFieldID - err error - }{ - res: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []DatasetSchemaFieldID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := DatasetSchemaFieldIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestDatasetSchemaFieldIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []DatasetSchemaFieldID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]DatasetSchemaFieldID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := DatasetSchemaFieldIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestDatasetSchemaFieldIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []DatasetSchemaFieldID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]DatasetSchemaFieldID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []DatasetSchemaFieldID{MustDatasetSchemaFieldID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID(id1.String()), - MustDatasetSchemaFieldID(id2.String()), - MustDatasetSchemaFieldID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := DatasetSchemaFieldIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestDatasetSchemaFieldIDsToID(t *testing.T) { - tests := []struct { - name string - input []DatasetSchemaFieldID - expected []ID - }{ - { - name: "Empty slice", - input: make([]DatasetSchemaFieldID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := DatasetSchemaFieldIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestDatasetSchemaFieldIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustDatasetSchemaFieldID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustDatasetSchemaFieldID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustDatasetSchemaFieldID(id3.String()) - - tests := []struct { - name string - input []*DatasetSchemaFieldID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*DatasetSchemaFieldID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*DatasetSchemaFieldID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*DatasetSchemaFieldID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := DatasetSchemaFieldIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewDatasetSchemaFieldIDSet(t *testing.T) { - DatasetSchemaFieldIdSet := NewDatasetSchemaFieldIDSet() - assert.NotNil(t, DatasetSchemaFieldIdSet) - assert.Empty(t, DatasetSchemaFieldIdSet.m) - assert.Empty(t, DatasetSchemaFieldIdSet.s) -} - -func TestDatasetSchemaFieldIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []DatasetSchemaFieldID - expected *DatasetSchemaFieldIDSet - }{ - { - name: "Empty slice", - input: make([]DatasetSchemaFieldID, 0), - expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewDatasetSchemaFieldIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestDatasetSchemaFieldIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *DatasetSchemaFieldID - expected *DatasetSchemaFieldIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &DatasetSchemaFieldIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewDatasetSchemaFieldIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestDatasetSchemaFieldIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *DatasetSchemaFieldIDSet - input DatasetSchemaFieldID - expected bool - }{ - { - name: "Empty Set", - target: &DatasetSchemaFieldIDSet{}, - input: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestDatasetSchemaFieldIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *DatasetSchemaFieldIDSet - expected *DatasetSchemaFieldIDSet - }{ - { - name: "Empty set", - input: &DatasetSchemaFieldIDSet{}, - expected: &DatasetSchemaFieldIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &DatasetSchemaFieldIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestDatasetSchemaFieldIDSet_All(t *testing.T) { - tests := []struct { - name string - input *DatasetSchemaFieldIDSet - expected []DatasetSchemaFieldID - }{ - { - name: "Empty", - input: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{}, - s: nil, - }, - expected: make([]DatasetSchemaFieldID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestDatasetSchemaFieldIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *DatasetSchemaFieldIDSet - expected *DatasetSchemaFieldIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewDatasetSchemaFieldIDSet(), - }, - { - name: "Empty set", - input: NewDatasetSchemaFieldIDSet(), - expected: NewDatasetSchemaFieldIDSet(), - }, - { - name: "1 element", - input: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestDatasetSchemaFieldIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *DatasetSchemaFieldIDSet - b *DatasetSchemaFieldIDSet - } - expected *DatasetSchemaFieldIDSet - }{ - { - name: "Nil Set", - input: struct { - a *DatasetSchemaFieldIDSet - b *DatasetSchemaFieldIDSet - }{ - a: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *DatasetSchemaFieldIDSet - b *DatasetSchemaFieldIDSet - }{ - a: &DatasetSchemaFieldIDSet{}, - b: &DatasetSchemaFieldIDSet{}, - }, - expected: &DatasetSchemaFieldIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *DatasetSchemaFieldIDSet - b *DatasetSchemaFieldIDSet - }{ - a: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &DatasetSchemaFieldIDSet{}, - }, - expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *DatasetSchemaFieldIDSet - b *DatasetSchemaFieldIDSet - }{ - a: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []DatasetSchemaFieldID{MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &DatasetSchemaFieldIDSet{ - m: map[DatasetSchemaFieldID]struct{}{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []DatasetSchemaFieldID{ - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/dataset_schema_gen.go b/pkg/id/dataset_schema_gen.go deleted file mode 100644 index 66efb0a3e..000000000 --- a/pkg/id/dataset_schema_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// DatasetSchemaID is an ID for DatasetSchema. -type DatasetSchemaID ID - -// NewDatasetSchemaID generates a new DatasetSchemaId. -func NewDatasetSchemaID() DatasetSchemaID { - return DatasetSchemaID(New()) -} - -// DatasetSchemaIDFrom generates a new DatasetSchemaID from a string. -func DatasetSchemaIDFrom(i string) (nid DatasetSchemaID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = DatasetSchemaID(did) - return -} - -// MustDatasetSchemaID generates a new DatasetSchemaID from a string, but panics if the string cannot be parsed. -func MustDatasetSchemaID(i string) DatasetSchemaID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return DatasetSchemaID(did) -} - -// DatasetSchemaIDFromRef generates a new DatasetSchemaID from a string ref. -func DatasetSchemaIDFromRef(i *string) *DatasetSchemaID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := DatasetSchemaID(*did) - return &nid -} - -// DatasetSchemaIDFromRefID generates a new DatasetSchemaID from a ref of a generic ID. -func DatasetSchemaIDFromRefID(i *ID) *DatasetSchemaID { - if i == nil || i.IsNil() { - return nil - } - nid := DatasetSchemaID(*i) - return &nid -} - -// ID returns a domain ID. -func (d DatasetSchemaID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d DatasetSchemaID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d DatasetSchemaID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d DatasetSchemaID) GoString() string { - return "DatasetSchemaID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d DatasetSchemaID) Ref() *DatasetSchemaID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d DatasetSchemaID) Contains(ids []DatasetSchemaID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *DatasetSchemaID) CopyRef() *DatasetSchemaID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *DatasetSchemaID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *DatasetSchemaID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *DatasetSchemaID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *DatasetSchemaID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = DatasetSchemaIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *DatasetSchemaID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *DatasetSchemaID) UnmarshalText(text []byte) (err error) { - *d, err = DatasetSchemaIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d DatasetSchemaID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *DatasetSchemaID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// DatasetSchemaIDsToStrings converts IDs into a string slice. -func DatasetSchemaIDsToStrings(ids []DatasetSchemaID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// DatasetSchemaIDsFrom converts a string slice into a ID slice. -func DatasetSchemaIDsFrom(ids []string) ([]DatasetSchemaID, error) { - dids := make([]DatasetSchemaID, 0, len(ids)) - for _, i := range ids { - did, err := DatasetSchemaIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// DatasetSchemaIDsFromID converts a generic ID slice into a ID slice. -func DatasetSchemaIDsFromID(ids []ID) []DatasetSchemaID { - dids := make([]DatasetSchemaID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, DatasetSchemaID(i)) - } - return dids -} - -// DatasetSchemaIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func DatasetSchemaIDsFromIDRef(ids []*ID) []DatasetSchemaID { - dids := make([]DatasetSchemaID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, DatasetSchemaID(*i)) - } - } - return dids -} - -// DatasetSchemaIDsToID converts a ID slice into a generic ID slice. -func DatasetSchemaIDsToID(ids []DatasetSchemaID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// DatasetSchemaIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func DatasetSchemaIDsToIDRef(ids []*DatasetSchemaID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// DatasetSchemaIDSet represents a set of DatasetSchemaIDs -type DatasetSchemaIDSet struct { - m map[DatasetSchemaID]struct{} - s []DatasetSchemaID -} - -// NewDatasetSchemaIDSet creates a new DatasetSchemaIDSet -func NewDatasetSchemaIDSet() *DatasetSchemaIDSet { - return &DatasetSchemaIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *DatasetSchemaIDSet) Add(p ...DatasetSchemaID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[DatasetSchemaID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []DatasetSchemaID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *DatasetSchemaIDSet) AddRef(p *DatasetSchemaID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *DatasetSchemaIDSet) Has(p DatasetSchemaID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *DatasetSchemaIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *DatasetSchemaIDSet) All() []DatasetSchemaID { - if s == nil { - return nil - } - return append([]DatasetSchemaID{}, s.s...) -} - -// Clone returns a cloned set -func (s *DatasetSchemaIDSet) Clone() *DatasetSchemaIDSet { - if s == nil { - return NewDatasetSchemaIDSet() - } - s2 := NewDatasetSchemaIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *DatasetSchemaIDSet) Merge(s2 *DatasetSchemaIDSet) *DatasetSchemaIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/dataset_schema_gen_test.go b/pkg/id/dataset_schema_gen_test.go deleted file mode 100644 index 48eab60a5..000000000 --- a/pkg/id/dataset_schema_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewDatasetSchemaID(t *testing.T) { - id := NewDatasetSchemaID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestDatasetSchemaIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result DatasetSchemaID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result DatasetSchemaID - err error - }{ - result: DatasetSchemaID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result DatasetSchemaID - err error - }{ - result: DatasetSchemaID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result DatasetSchemaID - err error - }{ - result: DatasetSchemaID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := DatasetSchemaIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustDatasetSchemaID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected DatasetSchemaID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: DatasetSchemaID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustDatasetSchemaID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestDatasetSchemaIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *DatasetSchemaID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &DatasetSchemaID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := DatasetSchemaIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestDatasetSchemaIDFromRefID(t *testing.T) { - id := New() - id2 := DatasetSchemaIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, DatasetSchemaIDFromRefID(nil)) - assert.Nil(t, DatasetSchemaIDFromRefID(&ID{})) -} - -func TestDatasetSchemaID_ID(t *testing.T) { - id := New() - id2 := DatasetSchemaIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestDatasetSchemaID_String(t *testing.T) { - id := New() - id2 := DatasetSchemaIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", DatasetSchemaID{}.String()) -} - -func TestDatasetSchemaID_RefString(t *testing.T) { - id := NewDatasetSchemaID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, DatasetSchemaID{}.RefString()) -} - -func TestDatasetSchemaID_GoString(t *testing.T) { - id := New() - id2 := DatasetSchemaIDFromRefID(&id) - assert.Equal(t, "DatasetSchemaID("+id.String()+")", id2.GoString()) - assert.Equal(t, "DatasetSchemaID()", DatasetSchemaID{}.GoString()) -} - -func TestDatasetSchemaID_Ref(t *testing.T) { - id := NewDatasetSchemaID() - assert.Equal(t, DatasetSchemaID(id), *id.Ref()) - assert.Nil(t, (&DatasetSchemaID{}).Ref()) -} - -func TestDatasetSchemaID_Contains(t *testing.T) { - id := NewDatasetSchemaID() - id2 := NewDatasetSchemaID() - assert.True(t, id.Contains([]DatasetSchemaID{id, id2})) - assert.False(t, DatasetSchemaID{}.Contains([]DatasetSchemaID{id, id2, {}})) - assert.False(t, id.Contains([]DatasetSchemaID{id2})) -} - -func TestDatasetSchemaID_CopyRef(t *testing.T) { - id := NewDatasetSchemaID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*DatasetSchemaID)(nil).CopyRef()) -} - -func TestDatasetSchemaID_IDRef(t *testing.T) { - id := New() - id2 := DatasetSchemaIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&DatasetSchemaID{}).IDRef()) - assert.Nil(t, (*DatasetSchemaID)(nil).IDRef()) -} - -func TestDatasetSchemaID_StringRef(t *testing.T) { - id := NewDatasetSchemaID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&DatasetSchemaID{}).StringRef()) - assert.Nil(t, (*DatasetSchemaID)(nil).StringRef()) -} - -func TestDatasetSchemaID_MarhsalJSON(t *testing.T) { - id := NewDatasetSchemaID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&DatasetSchemaID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*DatasetSchemaID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestDatasetSchemaID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustDatasetSchemaID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &DatasetSchemaID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestDatasetSchemaID_MarshalText(t *testing.T) { - id := New() - res, err := DatasetSchemaIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&DatasetSchemaID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*DatasetSchemaID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestDatasetSchemaID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &DatasetSchemaID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestDatasetSchemaID_IsNil(t *testing.T) { - assert.True(t, DatasetSchemaID{}.IsNil()) - assert.False(t, NewDatasetSchemaID().IsNil()) -} - -func TestDatasetSchemaID_IsNilRef(t *testing.T) { - assert.True(t, DatasetSchemaID{}.Ref().IsNilRef()) - assert.True(t, (*DatasetSchemaID)(nil).IsNilRef()) - assert.False(t, NewDatasetSchemaID().Ref().IsNilRef()) -} - -func TestDatasetSchemaIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []DatasetSchemaID - expected []string - }{ - { - name: "Empty slice", - input: make([]DatasetSchemaID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, DatasetSchemaIDsToStrings(tt.input)) - }) - } -} - -func TestDatasetSchemaIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []DatasetSchemaID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []DatasetSchemaID - err error - }{ - res: make([]DatasetSchemaID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []DatasetSchemaID - err error - }{ - res: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []DatasetSchemaID - err error - }{ - res: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []DatasetSchemaID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := DatasetSchemaIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestDatasetSchemaIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []DatasetSchemaID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]DatasetSchemaID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := DatasetSchemaIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestDatasetSchemaIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []DatasetSchemaID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]DatasetSchemaID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []DatasetSchemaID{MustDatasetSchemaID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []DatasetSchemaID{ - MustDatasetSchemaID(id1.String()), - MustDatasetSchemaID(id2.String()), - MustDatasetSchemaID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := DatasetSchemaIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestDatasetSchemaIDsToID(t *testing.T) { - tests := []struct { - name string - input []DatasetSchemaID - expected []ID - }{ - { - name: "Empty slice", - input: make([]DatasetSchemaID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := DatasetSchemaIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestDatasetSchemaIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustDatasetSchemaID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustDatasetSchemaID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustDatasetSchemaID(id3.String()) - - tests := []struct { - name string - input []*DatasetSchemaID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*DatasetSchemaID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*DatasetSchemaID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*DatasetSchemaID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := DatasetSchemaIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewDatasetSchemaIDSet(t *testing.T) { - DatasetSchemaIdSet := NewDatasetSchemaIDSet() - assert.NotNil(t, DatasetSchemaIdSet) - assert.Empty(t, DatasetSchemaIdSet.m) - assert.Empty(t, DatasetSchemaIdSet.s) -} - -func TestDatasetSchemaIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []DatasetSchemaID - expected *DatasetSchemaIDSet - }{ - { - name: "Empty slice", - input: make([]DatasetSchemaID, 0), - expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewDatasetSchemaIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestDatasetSchemaIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *DatasetSchemaID - expected *DatasetSchemaIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &DatasetSchemaIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewDatasetSchemaIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestDatasetSchemaIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *DatasetSchemaIDSet - input DatasetSchemaID - expected bool - }{ - { - name: "Empty Set", - target: &DatasetSchemaIDSet{}, - input: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestDatasetSchemaIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *DatasetSchemaIDSet - expected *DatasetSchemaIDSet - }{ - { - name: "Empty set", - input: &DatasetSchemaIDSet{}, - expected: &DatasetSchemaIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &DatasetSchemaIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestDatasetSchemaIDSet_All(t *testing.T) { - tests := []struct { - name string - input *DatasetSchemaIDSet - expected []DatasetSchemaID - }{ - { - name: "Empty", - input: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{}, - s: nil, - }, - expected: make([]DatasetSchemaID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestDatasetSchemaIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *DatasetSchemaIDSet - expected *DatasetSchemaIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewDatasetSchemaIDSet(), - }, - { - name: "Empty set", - input: NewDatasetSchemaIDSet(), - expected: NewDatasetSchemaIDSet(), - }, - { - name: "1 element", - input: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestDatasetSchemaIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *DatasetSchemaIDSet - b *DatasetSchemaIDSet - } - expected *DatasetSchemaIDSet - }{ - { - name: "Nil Set", - input: struct { - a *DatasetSchemaIDSet - b *DatasetSchemaIDSet - }{ - a: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *DatasetSchemaIDSet - b *DatasetSchemaIDSet - }{ - a: &DatasetSchemaIDSet{}, - b: &DatasetSchemaIDSet{}, - }, - expected: &DatasetSchemaIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *DatasetSchemaIDSet - b *DatasetSchemaIDSet - }{ - a: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &DatasetSchemaIDSet{}, - }, - expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *DatasetSchemaIDSet - b *DatasetSchemaIDSet - }{ - a: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []DatasetSchemaID{MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &DatasetSchemaIDSet{ - m: map[DatasetSchemaID]struct{}{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []DatasetSchemaID{ - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t1"), - MustDatasetSchemaID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/gen.go b/pkg/id/gen.go deleted file mode 100644 index 0fb715580..000000000 --- a/pkg/id/gen.go +++ /dev/null @@ -1,36 +0,0 @@ -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=asset_gen.go --name=Asset -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=dataset_gen.go --name=Dataset -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=dataset_schema_gen.go --name=DatasetSchema -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=layer_gen.go --name=Layer -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=project_gen.go --name=Project -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=property_gen.go --name=Property -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=property_item_gen.go --name=PropertyItem -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=scene_gen.go --name=Scene -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=widget_gen.go --name=Widget -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=team_gen.go --name=Team -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=user_gen.go --name=User -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=dataset_schema_field_gen.go --name=DatasetSchemaField -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=infobox_field_gen.go --name=InfoboxField -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=auth_request_gen.go --name=AuthRequest -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=tag_gen.go --name=Tag -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id.tmpl --output=cluster_gen.go --name=Cluster - -// Testing -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=asset_gen_test.go --name=Asset -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=tag_gen_test.go --name=Tag -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=dataset_gen_test.go --name=Dataset -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=dataset_schema_gen_test.go --name=DatasetSchema -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=layer_gen_test.go --name=Layer -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=project_gen_test.go --name=Project -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=property_gen_test.go --name=Property -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=property_item_gen_test.go --name=PropertyItem -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=scene_gen_test.go --name=Scene -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=widget_gen_test.go --name=Widget -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=team_gen_test.go --name=Team -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=user_gen_test.go --name=User -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=dataset_schema_field_gen_test.go --name=DatasetSchemaField -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=infobox_field_gen_test.go --name=InfoboxField -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=auth_request_gen_test.go --name=AuthRequest -//go:generate go run github.com/reearth/reearth-backend/tools/cmd/gen --template=id_test.tmpl --output=cluster_field_gen_test.go --name=Cluster - -package id diff --git a/pkg/id/id.go b/pkg/id/id.go index c821a30ed..a2f5e3e14 100644 --- a/pkg/id/id.go +++ b/pkg/id/id.go @@ -1,141 +1,204 @@ package id -import ( - "errors" - "math/rand" - "strings" - "sync" - "time" - - "github.com/oklog/ulid" -) - -var ( - entropyLock sync.Mutex - // not safe for concurrent - entropy = ulid.Monotonic(rand.New(rand.NewSource(time.Now().UnixNano())), 0) - ErrInvalidID = errors.New("invalid ID") -) - -type ID struct { - id ulid.ULID -} - -func New() ID { - return ID{generateID()} -} - -func NewAllID(n int) []ID { - if n <= 0 { - return []ID{} - } - if n == 1 { - return []ID{New()} - } - ids := make([]ID, 0, n) - generated := generateAllID(n) - for _, id := range generated { - ids = append(ids, ID{id}) - } - return ids -} - -func NewIDWith(id string) (ID, error) { - return FromID(id) -} - -func FromID(id string) (ID, error) { - parsedID, e := parseID(id) - if e != nil { - return ID{}, ErrInvalidID - } - return ID{parsedID}, nil -} - -func FromIDRef(id *string) *ID { - if id == nil || *id == "" { - return nil - } - parsedID, err := parseID(*id) - if err != nil { - return nil - } - nid := ID{parsedID} - return &nid -} - -func MustBeID(id string) ID { - parsedID, err := parseID(id) - if err != nil { - panic("invalid id") - } - return ID{parsedID} -} - -func (i ID) Copy() ID { - return ID{i.id} -} - -func (i ID) Timestamp() time.Time { - return ulid.Time(i.id.Time()) -} - -// String implements fmt.Stringer interface. -func (i ID) String() string { - return strings.ToLower(ulid.ULID(i.id).String()) -} - -// GoString implements fmt.GoStringer interface. -func (i ID) GoString() string { - return "id.ID(" + i.String() + ")" -} - -func (i ID) IsNil() bool { - return i.id.Compare(ulid.ULID{}) == 0 -} - -func (i ID) Compare(i2 ID) int { - return i.id.Compare(i2.id) -} - -func (i ID) Equal(i2 ID) bool { - return i.id.Compare(i2.id) == 0 -} - -func (i *ID) IsEmpty() bool { - return i == nil || (*i).IsNil() -} - -func generateID() ulid.ULID { - entropyLock.Lock() - newID := ulid.MustNew(ulid.Timestamp(time.Now().UTC()), entropy) - entropyLock.Unlock() - return newID -} - -func generateAllID(n int) []ulid.ULID { - ids := make([]ulid.ULID, 0, n) - entropyLock.Lock() - for i := 0; i < n; i++ { - newID := ulid.MustNew(ulid.Timestamp(time.Now().UTC()), entropy) - ids = append(ids, newID) - } - entropyLock.Unlock() - return ids -} - -func parseID(id string) (parsedID ulid.ULID, e error) { - if includeUpperCase(id) { - return parsedID, ErrInvalidID - } - return ulid.Parse(id) -} - -func includeUpperCase(s string) bool { - for _, c := range s { - if 'A' <= c && c <= 'Z' { - return true - } - } - return false -} +import "github.com/reearth/reearth-backend/pkg/id/idx" + +type Asset struct{} +type AuthRequest struct{} +type Dataset struct{} +type DatasetField struct{} +type DatasetSchema struct{} +type Cluster struct{} +type InfoboxField struct{} +type Layer struct{} +type PluginExtension struct{} +type Project struct{} +type Property struct{} +type PropertyItem struct{} +type PropertyField struct{} +type PropertySchemaGroup struct{} +type Scene struct{} +type Tag struct{} +type Team struct{} +type User struct{} +type Widget struct{} + +func (Asset) Type() string { return "asset" } +func (AuthRequest) Type() string { return "authRequest" } +func (Dataset) Type() string { return "dataset" } +func (DatasetField) Type() string { return "datasetField" } +func (DatasetSchema) Type() string { return "datasetSchema" } +func (Cluster) Type() string { return "cluster" } +func (InfoboxField) Type() string { return "infoboxField" } +func (Layer) Type() string { return "layer" } +func (PluginExtension) Type() string { return "pluginExtension" } +func (Project) Type() string { return "project" } +func (Property) Type() string { return "property" } +func (PropertyItem) Type() string { return "propertyItem" } +func (PropertyField) Type() string { return "propertyField" } +func (PropertySchemaGroup) Type() string { return "propertySchemaGroup" } +func (Scene) Type() string { return "scene" } +func (Tag) Type() string { return "tag" } +func (Team) Type() string { return "team" } +func (User) Type() string { return "user" } +func (Widget) Type() string { return "widget" } + +type AssetID = idx.ID[Asset] +type AuthRequestID = idx.ID[AuthRequest] +type DatasetID = idx.ID[Dataset] +type DatasetFieldID = idx.ID[DatasetField] +type DatasetSchemaID = idx.ID[DatasetSchema] +type ClusterID = idx.ID[Cluster] +type InfoboxFieldID = idx.ID[InfoboxField] +type LayerID = idx.ID[Layer] +type ProjectID = idx.ID[Project] +type PropertyID = idx.ID[Property] +type PropertyItemID = idx.ID[PropertyItem] +type SceneID = idx.ID[Scene] +type TagID = idx.ID[Tag] +type TeamID = idx.ID[Team] +type UserID = idx.ID[User] +type WidgetID = idx.ID[Widget] + +type PluginExtensionID = idx.StringID[PluginExtension] +type PropertySchemaGroupID = idx.StringID[PropertySchemaGroup] +type PropertyFieldID = idx.StringID[PropertyField] + +var NewAssetID = idx.New[Asset] +var NewAuthRequestID = idx.New[AuthRequest] +var NewDatasetID = idx.New[Dataset] +var NewDatasetFieldID = idx.New[DatasetField] +var NewDatasetSchemaID = idx.New[DatasetSchema] +var NewClusterID = idx.New[Cluster] +var NewInfoboxFieldID = idx.New[InfoboxField] +var NewLayerID = idx.New[Layer] +var NewProjectID = idx.New[Project] +var NewPropertyID = idx.New[Property] +var NewPropertyItemID = idx.New[PropertyItem] +var NewSceneID = idx.New[Scene] +var NewTagID = idx.New[Tag] +var NewTeamID = idx.New[Team] +var NewUserID = idx.New[User] +var NewWidgetID = idx.New[Widget] + +var MustAssetID = idx.Must[Asset] +var MustAuthRequestID = idx.Must[AuthRequest] +var MustDatasetID = idx.Must[Dataset] +var MustDatasetFieldID = idx.Must[DatasetField] +var MustDatasetSchemaID = idx.Must[DatasetSchema] +var MustClusterID = idx.Must[Cluster] +var MustInfoboxFieldID = idx.Must[InfoboxField] +var MustLayerID = idx.Must[Layer] +var MustProjectID = idx.Must[Project] +var MustPropertyID = idx.Must[Property] +var MustPropertyItemID = idx.Must[PropertyItem] +var MustSceneID = idx.Must[Scene] +var MustTagID = idx.Must[Tag] +var MustTeamID = idx.Must[Team] +var MustUserID = idx.Must[User] +var MustWidgetID = idx.Must[Widget] + +var AssetIDFrom = idx.From[Asset] +var AuthRequestIDFrom = idx.From[AuthRequest] +var DatasetIDFrom = idx.From[Dataset] +var DatasetFieldIDFrom = idx.From[DatasetField] +var DatasetSchemaIDFrom = idx.From[DatasetSchema] +var ClusterIDFrom = idx.From[Cluster] +var InfoboxFieldIDFrom = idx.From[InfoboxField] +var LayerIDFrom = idx.From[Layer] +var ProjectIDFrom = idx.From[Project] +var PropertyIDFrom = idx.From[Property] +var PropertyItemIDFrom = idx.From[PropertyItem] +var SceneIDFrom = idx.From[Scene] +var TagIDFrom = idx.From[Tag] +var TeamIDFrom = idx.From[Team] +var UserIDFrom = idx.From[User] +var WidgetIDFrom = idx.From[Widget] + +var AssetIDFromRef = idx.FromRef[Asset] +var AuthRequestIDFromRef = idx.FromRef[AuthRequest] +var DatasetIDFromRef = idx.FromRef[Dataset] +var DatasetFieldIDFromRef = idx.FromRef[DatasetField] +var DatasetSchemaIDFromRef = idx.FromRef[DatasetSchema] +var ClusterIDFromRef = idx.FromRef[Cluster] +var InfoboxFieldIDFromRef = idx.FromRef[InfoboxField] +var LayerIDFromRef = idx.FromRef[Layer] +var ProjectIDFromRef = idx.FromRef[Project] +var PropertyIDFromRef = idx.FromRef[Property] +var PropertyItemIDFromRef = idx.FromRef[PropertyItem] +var SceneIDFromRef = idx.FromRef[Scene] +var TagIDFromRef = idx.FromRef[Tag] +var TeamIDFromRef = idx.FromRef[Team] +var UserIDFromRef = idx.FromRef[User] +var WidgetIDFromRef = idx.FromRef[Widget] + +var PluginExtensionIDFromRef = idx.StringIDFromRef[PluginExtension] +var PropertyFieldIDFromRef = idx.StringIDFromRef[PropertyField] +var PropertySchemaGroupIDFromRef = idx.StringIDFromRef[PropertySchemaGroup] + +type AssetIDList = idx.List[Asset] +type AuthRequestIDList = idx.List[AuthRequest] +type DatasetIDList = idx.List[Dataset] +type DatasetFieldIDList = idx.List[DatasetField] +type DatasetSchemaIDList = idx.List[DatasetSchema] +type ClusterIDList = idx.List[Cluster] +type InfoboxFieldIDList = idx.List[InfoboxField] +type LayerIDList = idx.List[Layer] +type ProjectIDList = idx.List[Project] +type PropertyIDList = idx.List[Property] +type PropertyItemIDList = idx.List[PropertyItem] +type SceneIDList = idx.List[Scene] +type TagIDList = idx.List[Tag] +type TeamIDList = idx.List[Team] +type UserIDList = idx.List[User] +type WidgetIDList = idx.List[Widget] + +var AssetIDListFrom = idx.ListFrom[Asset] +var AuthRequestIDListFrom = idx.ListFrom[AuthRequest] +var DatasetIDListFrom = idx.ListFrom[Dataset] +var DatasetFieldIDListFrom = idx.ListFrom[DatasetField] +var DatasetSchemaIDListFrom = idx.ListFrom[DatasetSchema] +var ClusterIDListFrom = idx.ListFrom[Cluster] +var InfoboxFieldIDListFrom = idx.ListFrom[InfoboxField] +var LayerIDListFrom = idx.ListFrom[Layer] +var ProjectIDListFrom = idx.ListFrom[Project] +var PropertyIDListFrom = idx.ListFrom[Property] +var PropertyItemIDListFrom = idx.ListFrom[PropertyItem] +var SceneIDListFrom = idx.ListFrom[Scene] +var TagIDListFrom = idx.ListFrom[Tag] +var TeamIDListFrom = idx.ListFrom[Team] +var UserIDListFrom = idx.ListFrom[User] +var WidgetIDListFrom = idx.ListFrom[Widget] + +type AssetIDSet = idx.Set[Asset] +type AuthRequestIDSet = idx.Set[AuthRequest] +type DatasetIDSet = idx.Set[Dataset] +type DatasetFieldIDSet = idx.Set[DatasetField] +type DatasetSchemaIDSet = idx.Set[DatasetSchema] +type ClusterIDSet = idx.Set[Cluster] +type InfoboxFieldIDSet = idx.Set[InfoboxField] +type LayerIDSet = idx.Set[Layer] +type ProjectIDSet = idx.Set[Project] +type PropertyIDSet = idx.Set[Property] +type PropertyItemIDSet = idx.Set[PropertyItem] +type SceneIDSet = idx.Set[Scene] +type TagIDSet = idx.Set[Tag] +type TeamIDSet = idx.Set[Team] +type UserIDSet = idx.Set[User] +type WidgetIDSet = idx.Set[Widget] + +var NewAssetIDSet = idx.NewSet[Asset] +var NewAuthRequestIDSet = idx.NewSet[AuthRequest] +var NewDatasetIDSet = idx.NewSet[Dataset] +var NewDatasetFieldIDSet = idx.NewSet[DatasetField] +var NewDatasetSchemaIDSet = idx.NewSet[DatasetSchema] +var NewClusterIDSet = idx.NewSet[Cluster] +var NewInfoboxFieldIDSet = idx.NewSet[InfoboxField] +var NewLayerIDSet = idx.NewSet[Layer] +var NewProjectIDSet = idx.NewSet[Project] +var NewPropertyIDSet = idx.NewSet[Property] +var NewPropertyItemIDSet = idx.NewSet[PropertyItem] +var NewSceneIDSet = idx.NewSet[Scene] +var NewTagIDSet = idx.NewSet[Tag] +var NewTeamIDSet = idx.NewSet[Team] +var NewUserIDSet = idx.NewSet[User] +var NewWidgetIDSet = idx.NewSet[Widget] diff --git a/pkg/id/id.tmpl b/pkg/id/id.tmpl deleted file mode 100644 index f20661ee9..000000000 --- a/pkg/id/id.tmpl +++ /dev/null @@ -1,314 +0,0 @@ -{{ $name := index .Flags.name 0 }} - -package {{.PackageName}} - -import "encoding/json" - -// {{$name}}ID is an ID for {{$name}}. -type {{$name}}ID ID - -// New{{$name}}ID generates a new {{$name}}Id. -func New{{$name}}ID() {{$name}}ID { - return {{$name}}ID(New()) -} - -// {{$name}}IDFrom generates a new {{$name}}ID from a string. -func {{$name}}IDFrom(i string) (nid {{$name}}ID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = {{$name}}ID(did) - return -} - -// Must{{$name}}ID generates a new {{$name}}ID from a string, but panics if the string cannot be parsed. -func Must{{$name}}ID(i string) {{$name}}ID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return {{$name}}ID(did) -} - -// {{$name}}IDFromRef generates a new {{$name}}ID from a string ref. -func {{$name}}IDFromRef(i *string) *{{$name}}ID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := {{$name}}ID(*did) - return &nid -} - -// {{$name}}IDFromRefID generates a new {{$name}}ID from a ref of a generic ID. -func {{$name}}IDFromRefID(i *ID) *{{$name}}ID { - if i == nil || i.IsNil() { - return nil - } - nid := {{$name}}ID(*i) - return &nid -} - -// ID returns a domain ID. -func (d {{$name}}ID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d {{$name}}ID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d {{$name}}ID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d {{$name}}ID) GoString() string { - return "{{$name}}ID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d {{$name}}ID) Ref() *{{$name}}ID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d {{$name}}ID) Contains(ids []{{$name}}ID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *{{$name}}ID) CopyRef() *{{$name}}ID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *{{$name}}ID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *{{$name}}ID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *{{$name}}ID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *{{$name}}ID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = {{$name}}IDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *{{$name}}ID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *{{$name}}ID) UnmarshalText(text []byte) (err error) { - *d, err = {{$name}}IDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d {{$name}}ID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *{{$name}}ID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// {{$name}}IDsToStrings converts IDs into a string slice. -func {{$name}}IDsToStrings(ids []{{$name}}ID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// {{$name}}IDsFrom converts a string slice into a ID slice. -func {{$name}}IDsFrom(ids []string) ([]{{$name}}ID, error) { - dids := make([]{{$name}}ID, 0, len(ids)) - for _, i := range ids { - did, err := {{$name}}IDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// {{$name}}IDsFromID converts a generic ID slice into a ID slice. -func {{$name}}IDsFromID(ids []ID) []{{$name}}ID { - dids := make([]{{$name}}ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, {{$name}}ID(i)) - } - return dids -} - -// {{$name}}IDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func {{$name}}IDsFromIDRef(ids []*ID) []{{$name}}ID { - dids := make([]{{$name}}ID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, {{$name}}ID(*i)) - } - } - return dids -} - -// {{$name}}IDsToID converts a ID slice into a generic ID slice. -func {{$name}}IDsToID(ids []{{$name}}ID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// {{$name}}IDsToIDRef converts a ID ref slice into a generic ID ref slice. -func {{$name}}IDsToIDRef(ids []*{{$name}}ID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// {{$name}}IDSet represents a set of {{$name}}IDs -type {{$name}}IDSet struct { - m map[{{$name}}ID]struct{} - s []{{$name}}ID -} - -// New{{$name}}IDSet creates a new {{$name}}IDSet -func New{{$name}}IDSet() *{{$name}}IDSet { - return &{{$name}}IDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *{{$name}}IDSet) Add(p ...{{$name}}ID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[{{$name}}ID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []{{$name}}ID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *{{$name}}IDSet) AddRef(p *{{$name}}ID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *{{$name}}IDSet) Has(p {{$name}}ID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *{{$name}}IDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *{{$name}}IDSet) All() []{{$name}}ID { - if s == nil { - return nil - } - return append([]{{$name}}ID{}, s.s...) -} - -// Clone returns a cloned set -func (s *{{$name}}IDSet) Clone() *{{$name}}IDSet { - if s == nil { - return New{{$name}}IDSet() - } - s2 := New{{$name}}IDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *{{$name}}IDSet) Merge(s2 *{{$name}}IDSet) *{{$name}}IDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/id_test.go b/pkg/id/id_test.go deleted file mode 100644 index d68e2146b..000000000 --- a/pkg/id/id_test.go +++ /dev/null @@ -1,359 +0,0 @@ -package id - -import ( - "strings" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestID_New(t *testing.T) { - id := New() - assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - assert.NotNil(t, ulID) - assert.Nil(t, err) -} - -func TestID_NewAllID(t *testing.T) { - tests := []struct { - name string - input int - expected int - }{ - { - name: "success: Zero ID", - input: 0, - expected: 0, - }, - { - name: "success: One ID", - input: 1, - expected: 1, - }, - { - name: "success: Multiple IDs", - input: 5, - expected: 5, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := NewAllID(tt.input) - assert.Equal(t, tt.expected, len(result)) - - for _, id := range result { - assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - assert.NotNil(t, ulID) - assert.Nil(t, err) - } - }) - } -} - -func TestID_NewIDWith(t *testing.T) { - tests := []struct { - name string - input string - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - }, - { - name: "Fail:Not valid string", - input: "", - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := NewIDWith(tt.input) - exResult, exErr := FromID(tt.input) - assert.Equal(t, exResult, result) - assert.Equal(t, exErr, err) - }) - } -} - -func TestID_FromID(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result ID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result ID - err error - }{ - ID{}, - ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result ID - err error - }{ - ID{}, - ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result ID - err error - }{ - ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := FromID(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestID_FromIDRef(t *testing.T) { - tests := []struct { - name string - input string - expected *ID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := FromIDRef(&tt.input) - assert.Equal(t, tt.expected, result) - if tt.expected != nil { - assert.Equal(t, tt.expected, result) - } - }) - } -} - -func TestID_MustBeID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected ID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustBeID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestID_Copy(t *testing.T) { - id := New() - id2 := id.Copy() - assert.Equal(t, id.id, id2.id) - assert.NotSame(t, id.id, id2.id) -} - -func TestID_Timestamp(t *testing.T) { - id := New() - assert.Equal(t, ulid.Time(id.id.Time()), id.Timestamp()) -} - -func TestID_String(t *testing.T) { - id := MustBeID("01f2r7kg1fvvffp0gmexgy5hxy") - assert.Equal(t, id.String(), "01f2r7kg1fvvffp0gmexgy5hxy") -} - -func TestID_GoString(t *testing.T) { - id := MustBeID("01f2r7kg1fvvffp0gmexgy5hxy") - assert.Equal(t, id.GoString(), "id.ID(01f2r7kg1fvvffp0gmexgy5hxy)") -} - -func TestID_IsNil(t *testing.T) { - id := ID{} - assert.True(t, id.IsNil()) - id = New() - assert.False(t, id.IsNil()) -} - -func TestID_Compare(t *testing.T) { - id1 := New() - id2 := New() - assert.Less(t, id1.Compare(id2), 0) - assert.Greater(t, id2.Compare(id1), 0) - assert.Equal(t, id1.Compare(id1), 0) - assert.Equal(t, id2.Compare(id2), 0) -} - -func TestID_Equal(t *testing.T) { - id1 := New() - id2 := id1.Copy() - assert.True(t, id1.Equal(id2)) - assert.False(t, id1.Equal(New())) -} - -func TestID_IsEmpty(t *testing.T) { - id := ID{} - assert.True(t, id.IsEmpty()) - id = New() - assert.False(t, id.IsEmpty()) -} - -func TestID_generateID(t *testing.T) { - id := generateID() - assert.NotNil(t, id) -} - -func TestID_generateAllID(t *testing.T) { - tests := []struct { - name string - input int - expected int - }{ - { - name: "success: Zero ID", - input: 0, - expected: 0, - }, - { - name: "success: One ID", - input: 1, - expected: 1, - }, - { - name: "success: Multiple IDs", - input: 5, - expected: 5, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := generateAllID(tt.input) - assert.Equal(t, tt.expected, len(result)) - for _, id := range result { - assert.NotNil(t, id) - ulID, err := ulid.Parse(id.String()) - assert.NotNil(t, ulID) - assert.Nil(t, err) - } - }) - } -} - -func TestID_parseID(t *testing.T) { - _, err := parseID("") - assert.Error(t, err) - - id, err := parseID("01f2r7kg1fvvffp0gmexgy5hxy") - assert.Nil(t, err) - assert.Equal(t, strings.ToLower(id.String()), "01f2r7kg1fvvffp0gmexgy5hxy") -} - -func TestID_includeUpperCase(t *testing.T) { - tests := []struct { - name string - input string - expected bool - }{ - { - name: "All small letters", - input: "abcd", - expected: false, - }, - { - name: "Contains Upper case", - input: "Abcd", - expected: true, - }, - { - name: "Contains Upper case", - input: "abcD", - expected: true, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := includeUpperCase(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} diff --git a/pkg/id/id_test.tmpl b/pkg/id/id_test.tmpl deleted file mode 100644 index 6778786a9..000000000 --- a/pkg/id/id_test.tmpl +++ /dev/null @@ -1,978 +0,0 @@ -{{ $name := index .Flags.name 0 }} - -package {{.PackageName}} - -import ( - "encoding" - "errors" - "strings" - "testing" - - "github.com/blang/semver" - "github.com/stretchr/testify/assert" -) - -func TestNew{{$name}}ID(t *testing.T) { - id := New{{$name}}ID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func Test{{$name}}IDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result {{$name}}ID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result {{$name}}ID - err error - }{ - result: {{$name}}ID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result {{$name}}ID - err error - }{ - result: {{$name}}ID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result {{$name}}ID - err error - }{ - result: {{$name}}ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := {{$name}}IDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMust{{$name}}ID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected {{$name}}ID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: {{$name}}ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := Must{{$name}}ID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func Test{{$name}}IDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *{{$name}}ID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &{{$name}}ID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := {{$name}}IDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func Test{{$name}}IDFromRefID(t *testing.T) { - id := New() - id2 := {{$name}}IDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, {{$name}}IDFromRefID(nil)) - assert.Nil(t, {{$name}}IDFromRefID(&ID{})) -} - -func Test{{$name}}ID_ID(t *testing.T) { - id := New() - id2 := {{$name}}IDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func Test{{$name}}ID_String(t *testing.T) { - id := New() - id2 := {{$name}}IDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", {{$name}}ID{}.String()) -} - -func Test{{$name}}ID_RefString(t *testing.T) { - id := New{{$name}}ID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, {{$name}}ID{}.RefString()) -} - -func Test{{$name}}ID_GoString(t *testing.T) { - id := New() - id2 := {{$name}}IDFromRefID(&id) - assert.Equal(t, "{{$name}}ID(" + id.String() + ")", id2.GoString()) - assert.Equal(t, "{{$name}}ID()", {{$name}}ID{}.GoString()) -} - -func Test{{$name}}ID_Ref(t *testing.T) { - id := New{{$name}}ID() - assert.Equal(t, {{$name}}ID(id), *id.Ref()) - assert.Nil(t, (&{{$name}}ID{}).Ref()) -} - -func Test{{$name}}ID_Contains(t *testing.T) { - id := New{{$name}}ID() - id2 := New{{$name}}ID() - assert.True(t, id.Contains([]{{$name}}ID{id, id2})) - assert.False(t, {{$name}}ID{}.Contains([]{{$name}}ID{id, id2, {}})) - assert.False(t, id.Contains([]{{$name}}ID{id2})) -} - -func Test{{$name}}ID_CopyRef(t *testing.T) { - id := New{{$name}}ID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*{{$name}}ID)(nil).CopyRef()) -} - -func Test{{$name}}ID_IDRef(t *testing.T) { - id := New() - id2 := {{$name}}IDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&{{$name}}ID{}).IDRef()) - assert.Nil(t, (*{{$name}}ID)(nil).IDRef()) -} - -func Test{{$name}}ID_StringRef(t *testing.T) { - id := New{{$name}}ID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&{{$name}}ID{}).StringRef()) - assert.Nil(t, (*{{$name}}ID)(nil).StringRef()) -} - -func Test{{$name}}ID_MarhsalJSON(t *testing.T) { - id := New{{$name}}ID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&{{$name}}ID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*{{$name}}ID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func Test{{$name}}ID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := Must{{$name}}ID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &{{$name}}ID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func Test{{$name}}ID_MarshalText(t *testing.T) { - id := New() - res, err := {{$name}}IDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&{{$name}}ID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*{{$name}}ID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func Test{{$name}}ID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &{{$name}}ID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func Test{{$name}}ID_IsNil(t *testing.T) { - assert.True(t, {{$name}}ID{}.IsNil()) - assert.False(t, New{{$name}}ID().IsNil()) -} - -func Test{{$name}}ID_IsNilRef(t *testing.T) { - assert.True(t, {{$name}}ID{}.Ref().IsNilRef()) - assert.True(t, (*{{$name}}ID)(nil).IsNilRef()) - assert.False(t, New{{$name}}ID().Ref().IsNilRef()) -} - -func Test{{$name}}IDsToStrings(t *testing.T) { - tests := []struct { - name string - input []{{$name}}ID - expected []string - }{ - { - name: "Empty slice", - input: make([]{{$name}}ID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, {{$name}}IDsToStrings(tt.input)) - }) - } -} - -func Test{{$name}}IDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []{{$name}}ID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []{{$name}}ID - err error - }{ - res: make([]{{$name}}ID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []{{$name}}ID - err error - }{ - res: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []{{$name}}ID - err error - }{ - res: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []{{$name}}ID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := {{$name}}IDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func Test{{$name}}IDsFromID(t *testing.T) { - t.Parallel() - tests := []struct { - name string - input []ID - expected []{{$name}}ID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]{{$name}}ID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := {{$name}}IDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func Test{{$name}}IDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []{{$name}}ID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]{{$name}}ID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []{{$name}}ID{Must{{$name}}ID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []{{$name}}ID{ - Must{{$name}}ID(id1.String()), - Must{{$name}}ID(id2.String()), - Must{{$name}}ID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := {{$name}}IDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func Test{{$name}}IDsToID(t *testing.T) { - tests := []struct { - name string - input []{{$name}}ID - expected []ID - }{ - { - name: "Empty slice", - input: make([]{{$name}}ID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := {{$name}}IDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func Test{{$name}}IDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := Must{{$name}}ID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := Must{{$name}}ID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := Must{{$name}}ID(id3.String()) - - tests := []struct { - name string - input []*{{$name}}ID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*{{$name}}ID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*{{$name}}ID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*{{$name}}ID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := {{$name}}IDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNew{{$name}}IDSet(t *testing.T) { - {{$name}}IdSet := New{{$name}}IDSet() - assert.NotNil(t, {{$name}}IdSet) - assert.Empty(t, {{$name}}IdSet.m) - assert.Empty(t, {{$name}}IdSet.s) -} - -func Test{{$name}}IDSet_Add(t *testing.T) { - tests := []struct { - name string - input []{{$name}}ID - expected *{{$name}}IDSet - }{ - { - name: "Empty slice", - input: make([]{{$name}}ID, 0), - expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := New{{$name}}IDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func Test{{$name}}IDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *{{$name}}ID - expected *{{$name}}IDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &{{$name}}IDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := New{{$name}}IDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func Test{{$name}}IDSet_Has(t *testing.T) { - tests := []struct { - name string - target *{{$name}}IDSet - input {{$name}}ID - expected bool - }{ - { - name: "Empty Set", - target: &{{$name}}IDSet{}, - input: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func Test{{$name}}IDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *{{$name}}IDSet - expected *{{$name}}IDSet - }{ - { - name: "Empty set", - input: &{{$name}}IDSet{}, - expected: &{{$name}}IDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &{{$name}}IDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func Test{{$name}}IDSet_All(t *testing.T) { - tests := []struct { - name string - input *{{$name}}IDSet - expected []{{$name}}ID - }{ - { - name: "Empty", - input: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{}, - s: nil, - }, - expected: make([]{{$name}}ID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func Test{{$name}}IDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *{{$name}}IDSet - expected *{{$name}}IDSet - }{ - { - name: "nil set", - input: nil, - expected: New{{$name}}IDSet(), - }, - { - name: "Empty set", - input: New{{$name}}IDSet(), - expected: New{{$name}}IDSet(), - }, - { - name: "1 element", - input: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func Test{{$name}}IDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *{{$name}}IDSet - b *{{$name}}IDSet - } - expected *{{$name}}IDSet - }{ - { - name: "Nil Set", - input: struct { - a *{{$name}}IDSet - b *{{$name}}IDSet - }{ - a: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *{{$name}}IDSet - b *{{$name}}IDSet - }{ - a: &{{$name}}IDSet{}, - b: &{{$name}}IDSet{}, - }, - expected: &{{$name}}IDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *{{$name}}IDSet - b *{{$name}}IDSet - }{ - a: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &{{$name}}IDSet{}, - }, - expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *{{$name}}IDSet - b *{{$name}}IDSet - }{ - a: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []{{$name}}ID{Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &{{$name}}IDSet{ - m: map[{{$name}}ID]struct{}{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []{{$name}}ID{ - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t1"), - Must{{$name}}ID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/idx/id.go b/pkg/id/idx/id.go new file mode 100644 index 000000000..9547dca21 --- /dev/null +++ b/pkg/id/idx/id.go @@ -0,0 +1,140 @@ +package idx + +import ( + "errors" + "strings" + "time" + + "github.com/oklog/ulid" + "github.com/reearth/reearth-backend/pkg/util" + "github.com/samber/lo" +) + +var ErrInvalidID = errors.New("invalid ID") + +type Type interface { + Type() string +} + +type ID[T Type] struct { + id ulid.ULID +} + +func New[T Type]() ID[T] { + return ID[T]{id: generateID()} +} + +func NewAll[T Type](n int) (l List[T]) { + if n <= 0 { + return + } + if n == 1 { + return List[T]{New[T]()} + } + return lo.Map(generateAllID(n), func(id ulid.ULID, _ int) ID[T] { + return ID[T]{id: id} + }) +} + +func From[T Type](id string) (ID[T], error) { + parsedID, e := parseID(id) + if e != nil { + return ID[T]{}, ErrInvalidID + } + return ID[T]{id: parsedID}, nil +} + +func Must[T Type](id string) ID[T] { + return util.Must(From[T](id)) +} + +func FromRef[T Type](id *string) *ID[T] { + if id == nil { + return nil + } + nid, err := From[T](*id) + if err != nil { + return nil + } + return &nid +} + +func (id ID[T]) Ref() *ID[T] { + return &id +} + +func (id ID[T]) Clone() ID[T] { + return ID[T]{id: id.id} +} + +func (id *ID[T]) CloneRef() *ID[T] { + if id == nil { + return nil + } + i := id.Clone() + return &i +} + +func (id *ID[T]) CopyRef() *ID[T] { + return id.CloneRef() +} + +func (ID[T]) Type() string { + var t T + return t.Type() +} + +func (id ID[T]) Timestamp() time.Time { + return ulid.Time(id.id.Time()) +} + +// String implements fmt.Stringer interface. +func (id ID[T]) String() string { + if id.IsEmpty() { + return "" + } + return strings.ToLower(ulid.ULID(id.id).String()) +} + +func (id *ID[T]) StringRef() *string { + if id == nil { + return nil + } + s := id.String() + return &s +} + +// GoString implements fmt.GoStringer interface. +func (id ID[T]) GoString() string { + return id.Type() + "ID(" + id.String() + ")" +} + +func (id ID[T]) Compare(id2 ID[T]) int { + return id.id.Compare(id2.id) +} + +func (i ID[T]) Equal(i2 ID[T]) bool { + return i.id.Compare(i2.id) == 0 +} + +func (id ID[T]) IsEmpty() bool { + return id.id.Compare(ulid.ULID{}) == 0 +} + +func (id *ID[T]) IsNil() bool { + return id == nil || (*id).IsEmpty() +} + +// MarshalText implements encoding.TextMarshaler interface +func (d *ID[T]) MarshalText() ([]byte, error) { + if d.IsNil() { + return nil, nil + } + return []byte(d.String()), nil +} + +// UnmarshalText implements encoding.TextUnmarshaler interface +func (id *ID[T]) UnmarshalText(b []byte) (err error) { + *id, err = From[T](string(b)) + return +} diff --git a/pkg/id/idx/id_test.go b/pkg/id/idx/id_test.go new file mode 100644 index 000000000..10b116a58 --- /dev/null +++ b/pkg/id/idx/id_test.go @@ -0,0 +1,26 @@ +package idx + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +type TID = ID[T] + +// T is a dummy ID type for unit tests +type T struct{} + +func (T) Type() string { return "_" } + +var idstr = mustParseID("01fzxycwmq7n84q8kessktvb8z") + +func TestID_String(t *testing.T) { + assert.Equal(t, "01fzxycwmq7n84q8kessktvb8z", TID{id: idstr}.String()) + assert.Equal(t, "", ID[T]{}.String()) +} + +func TestID_GoString(t *testing.T) { + assert.Equal(t, "_ID(01fzxycwmq7n84q8kessktvb8z)", TID{id: idstr}.GoString()) + assert.Equal(t, "_ID()", TID{}.GoString()) +} diff --git a/pkg/id/idx/list.go b/pkg/id/idx/list.go new file mode 100644 index 000000000..8c8dcffdc --- /dev/null +++ b/pkg/id/idx/list.go @@ -0,0 +1,115 @@ +package idx + +import ( + "github.com/reearth/reearth-backend/pkg/util" + "golang.org/x/exp/slices" +) + +type List[T Type] []ID[T] + +type RefList[T Type] []*ID[T] + +func ListFrom[T Type](ids []string) (List[T], error) { + return util.TryMap(ids, From[T]) +} + +func MustList[T Type](ids []string) List[T] { + return util.Must(ListFrom[T](ids)) +} + +func (l List[T]) list() util.List[ID[T]] { + return util.List[ID[T]](l) +} + +func (l List[T]) Has(ids ...ID[T]) bool { + return l.list().Has(ids...) +} + +func (l List[T]) At(i int) *ID[T] { + return l.list().At(i) +} + +func (l List[T]) Index(id ID[T]) int { + return l.list().Index(id) +} + +func (l List[T]) Len() int { + return l.list().Len() +} + +func (l List[T]) Ref() *List[T] { + return (*List[T])(l.list().Ref()) +} + +func (l List[T]) Refs() RefList[T] { + return l.list().Refs() +} + +func (l List[T]) Delete(ids ...ID[T]) List[T] { + return List[T](l.list().Delete(ids...)) +} + +func (l List[T]) DeleteAt(i int) List[T] { + return List[T](l.list().DeleteAt(i)) +} + +func (l List[T]) Add(ids ...ID[T]) List[T] { + return List[T](l.list().Add(ids...)) +} + +func (l List[T]) AddUniq(ids ...ID[T]) List[T] { + return List[T](l.list().AddUniq(ids...)) +} + +func (l List[T]) Insert(i int, ids ...ID[T]) List[T] { + return List[T](l.list().Insert(i, ids...)) +} + +func (l List[T]) Move(e ID[T], to int) List[T] { + return List[T](l.list().Move(e, to)) +} + +func (l List[T]) MoveAt(from, to int) List[T] { + return List[T](l.list().MoveAt(from, to)) +} + +func (l List[T]) Reverse() List[T] { + return List[T](l.list().Reverse()) +} + +func (l List[T]) Concat(m List[T]) List[T] { + return List[T](l.list().Concat(m)) +} + +func (l List[T]) Intersect(m List[T]) List[T] { + return List[T](l.list().Intersect(m)) +} + +func (l List[T]) Strings() []string { + return util.Map(l, func(id ID[T]) string { + return id.String() + }) +} + +func (l List[T]) Clone() List[T] { + return util.Map(l, func(id ID[T]) ID[T] { + return id.Clone() + }) +} + +func (l List[T]) Sort() List[T] { + m := l.list().Copy() + slices.SortStableFunc(m, func(a, b ID[T]) bool { + return a.Compare(b) <= 0 + }) + return List[T](m) +} + +func (l RefList[T]) Deref() List[T] { + return util.FilterMap(l, func(id *ID[T]) *ID[T] { + if id != nil && !(*id).IsNil() { + return id + } + return nil + }) +} diff --git a/pkg/id/idx/list_test.go b/pkg/id/idx/list_test.go new file mode 100644 index 000000000..9552e094d --- /dev/null +++ b/pkg/id/idx/list_test.go @@ -0,0 +1,249 @@ +package idx + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestListFrom(t *testing.T) { + id := Must[T]("01g0nzan4qnb2f2s9ehrgv62a3") + + ids, err := ListFrom[T]([]string{"01g0nzan4qnb2f2s9ehrgv62a3"}) + assert.NoError(t, err) + assert.Equal(t, List[T]{id}, ids) + + ids, err = ListFrom[T]([]string{"01g0nzan4qnb2f2s9ehrgv62a3", "a"}) + assert.Equal(t, ErrInvalidID, err) + assert.Nil(t, ids) +} + +func TestMustList(t *testing.T) { + id := Must[T]("01g0nzan4qnb2f2s9ehrgv62a3") + + ids := MustList[T]([]string{"01g0nzan4qnb2f2s9ehrgv62a3"}) + assert.Equal(t, List[T]{id}, ids) + + assert.PanicsWithValue(t, ErrInvalidID, func() { + _ = MustList[T]([]string{"01g0nzan4qnb2f2s9ehrgv62a3", "a"}) + }) +} + +func TestList_Has(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.True(t, l.Has(a)) + assert.True(t, l.Has(a, c)) + assert.False(t, l.Has(c)) + assert.False(t, List[T](nil).Has(a)) +} + +func TestList_At(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).At(0)) + assert.Nil(t, l.At(-1)) + assert.Equal(t, &a, l.At(0)) + assert.Equal(t, &b, l.At(1)) + assert.Nil(t, l.At(2)) +} + +func TestList_Index(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.Equal(t, -1, List[T](nil).Index(a)) + assert.Equal(t, 0, l.Index(a)) + assert.Equal(t, 1, l.Index(b)) + assert.Equal(t, -1, l.Index(c)) +} + +func TestList_Len(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{a, b} + + assert.Equal(t, 0, List[T](nil).Len()) + assert.Equal(t, 2, l.Len()) +} + +func TestList_Ref(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Ref()) + assert.Equal(t, &List[T]{a, b}, l.Ref()) +} + +func TestList_Refs(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Refs()) + assert.Equal(t, RefList[T]{&a, &b}, l.Refs()) +} + +func TestList_Delete(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b, c} + + assert.Nil(t, (List[T])(nil).Delete(b)) + assert.Equal(t, List[T]{a, c}, l.Delete(b)) + assert.Equal(t, List[T]{a, b, c}, l) +} + +func TestList_DeleteAt(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b, c} + + assert.Nil(t, (List[T])(nil).DeleteAt(1)) + assert.Equal(t, List[T]{a, c}, l.DeleteAt(1)) + assert.Equal(t, List[T]{a, b, c}, l) +} + +func TestList_Add(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.Equal(t, List[T]{a, b}, List[T](nil).Add(a, b)) + assert.Equal(t, List[T]{a, b, c, a}, l.Add(c, a)) + assert.Equal(t, List[T]{a, b}, l) +} + +func TestList_AddUniq(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.Equal(t, List[T]{a, b}, List[T](nil).AddUniq(a, b)) + assert.Equal(t, List[T]{a, b, c}, l.AddUniq(c, a)) + assert.Equal(t, List[T]{a, b}, l) +} + +func TestList_Insert(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.Equal(t, List[T]{a, b, c}, l.Insert(-1, c)) + assert.Equal(t, List[T]{c, a, b}, l.Insert(0, c)) + assert.Equal(t, List[T]{a, c, b}, l.Insert(1, c)) + assert.Equal(t, List[T]{a, b, c}, l.Insert(2, c)) + assert.Equal(t, List[T]{a, b, c}, l.Insert(3, c)) +} + +func TestList_Move(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b, c} + + assert.Nil(t, List[T](nil).Move(a, -1)) + assert.Equal(t, List[T]{c, a, b}, l.Move(c, 0)) + assert.Equal(t, List[T]{a, b, c}, l) + assert.Equal(t, List[T]{a, b}, l.Move(c, -1)) + assert.Equal(t, List[T]{c, a, b}, l.Move(c, 0)) + assert.Equal(t, List[T]{a, c, b}, l.Move(b, 10)) +} + +func TestList_MoveAt(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b, c} + + assert.Nil(t, List[T](nil).MoveAt(0, -1)) + assert.Equal(t, List[T]{c, a, b}, l.MoveAt(2, 0)) + assert.Equal(t, List[T]{a, b, c}, l) + assert.Equal(t, List[T]{a, b}, l.MoveAt(2, -1)) + assert.Equal(t, List[T]{c, a, b}, l.MoveAt(2, 0)) + assert.Equal(t, List[T]{a, c, b}, l.MoveAt(1, 10)) +} + +func TestList_Reverse(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b, c} + + assert.Nil(t, List[T](nil).Reverse()) + assert.Equal(t, List[T]{c, b, a}, l.Reverse()) + assert.Equal(t, List[T]{a, b, c}, l) +} + +func TestList_Concat(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.Equal(t, List[T]{a, c}, List[T](nil).Concat(List[T]{a, c})) + assert.Equal(t, List[T]{a, b, a, c}, l.Concat(List[T]{a, c})) + assert.Equal(t, List[T]{a, b}, l) +} + +func TestList_Intersect(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Intersect(List[T]{c, a})) + assert.Equal(t, List[T]{a}, l.Intersect(List[T]{c, a})) + assert.Equal(t, List[T]{a, b}, l) +} + +func TestList_Strings(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Strings()) + assert.Equal(t, []string{a.String(), b.String()}, l.Strings()) +} + +func TestList_Clone(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Clone()) + assert.Equal(t, List[T]{a, b}, l.Clone()) + assert.NotSame(t, l, l.Clone()) +} + +func TestList_Sort(t *testing.T) { + a := New[T]() + b := New[T]() + l := List[T]{b, a, a} + + assert.Nil(t, List[T](nil).Sort()) + assert.Equal(t, List[T]{a, a, b}, l.Sort()) +} + +func TestList_Deref(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + l := RefList[T]{&b, &a, nil, &c} + + assert.Nil(t, RefList[T](nil).Deref()) + assert.Equal(t, List[T]{b, a, c}, l.Deref()) +} diff --git a/pkg/id/idx/set.go b/pkg/id/idx/set.go new file mode 100644 index 000000000..c7d899e76 --- /dev/null +++ b/pkg/id/idx/set.go @@ -0,0 +1,85 @@ +package idx + +type Set[T Type] struct { + l List[T] + m map[ID[T]]struct{} +} + +func NewSet[T Type](id ...ID[T]) *Set[T] { + s := &Set[T]{} + s.Add(id...) + return s +} + +func (s *Set[T]) Has(id ...ID[T]) bool { + if s == nil || s.m == nil { + return false + } + for _, i := range id { + if _, ok := s.m[i]; ok { + return true + } + } + return false +} + +func (s *Set[T]) List() List[T] { + if s == nil { + return nil + } + return s.l.Clone() +} + +func (s *Set[T]) Clone() *Set[T] { + if s == nil { + return nil + } + return NewSet(s.l...) +} + +func (s *Set[T]) Add(id ...ID[T]) { + if s == nil { + return + } + for _, i := range id { + if !s.Has(i) { + if s.m == nil { + s.m = map[ID[T]]struct{}{} + } + s.m[i] = struct{}{} + s.l = append(s.l, i) + } + } +} + +func (s *Set[T]) Merge(sets ...*Set[T]) { + if s == nil { + return + } + for _, t := range sets { + if t != nil { + s.Add(t.l...) + } + } +} + +func (s *Set[T]) Concat(sets ...*Set[T]) *Set[T] { + if s == nil { + return nil + } + ns := s.Clone() + ns.Merge(sets...) + return ns +} + +func (s *Set[T]) Delete(id ...ID[T]) { + if s == nil { + return + } + for _, i := range id { + s.l = s.l.Delete(i) + if s.m != nil { + delete(s.m, i) + } + } +} diff --git a/pkg/id/idx/set_test.go b/pkg/id/idx/set_test.go new file mode 100644 index 000000000..8628a7f51 --- /dev/null +++ b/pkg/id/idx/set_test.go @@ -0,0 +1,86 @@ +package idx + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSet_NewSet(t *testing.T) { + a := New[T]() + assert.Equal(t, &Set[T]{ + l: nil, + m: nil, + }, NewSet[T]()) + assert.Equal(t, &Set[T]{ + l: List[T]{a}, + m: map[ID[T]]struct{}{ + a: {}, + }, + }, NewSet(a)) +} + +func TestSet_Has(t *testing.T) { + a := New[T]() + b := New[T]() + assert.False(t, (*Set[T])(nil).Has(a, b)) + assert.True(t, NewSet(a).Has(a, b)) + assert.False(t, NewSet(a).Has(b)) +} + +func TestSet_List(t *testing.T) { + a := New[T]() + b := New[T]() + assert.Nil(t, (*Set[T])(nil).List()) + assert.Nil(t, NewSet[T]().List()) + assert.Equal(t, List[T]{a, b}, NewSet(a, b).List()) +} + +func TestSet_Clone(t *testing.T) { + a := New[T]() + b := New[T]() + s := NewSet(a, b) + assert.Nil(t, (*Set[T])(nil).Clone()) + assert.Equal(t, &Set[T]{}, NewSet[T]().Clone()) + assert.Equal(t, s, s.Clone()) + assert.NotSame(t, s, s.Clone()) +} + +func TestSet_Add(t *testing.T) { + a := New[T]() + b := New[T]() + s := NewSet(a) + (*Set[T])(nil).Add(a, b) + s.Add(a, b) + assert.Equal(t, NewSet(a, b), s) +} + +func TestSet_Merge(t *testing.T) { + a := New[T]() + b := New[T]() + s := NewSet(a) + u := NewSet(a, b) + (*Set[T])(nil).Merge(u) + s.Merge(u) + assert.Equal(t, NewSet(a, b), s) +} + +func TestSet_Concat(t *testing.T) { + a := New[T]() + b := New[T]() + s := NewSet(a) + u := NewSet(a, b) + assert.Nil(t, (*Set[T])(nil).Concat(u)) + assert.Equal(t, NewSet(a, b), s.Concat(u)) + assert.Equal(t, NewSet(a), s) +} + +func TestSet_Delete(t *testing.T) { + a := New[T]() + b := New[T]() + c := New[T]() + s := NewSet(a, b, c) + (*Set[T])(nil).Delete(a, b) + s.Delete(a, b) + assert.Equal(t, NewSet(c), s) +} diff --git a/pkg/id/idx/string.go b/pkg/id/idx/string.go new file mode 100644 index 000000000..7b431d19b --- /dev/null +++ b/pkg/id/idx/string.go @@ -0,0 +1,38 @@ +package idx + +type StringID[T Type] string + +func StringIDFromRef[T Type](id *string) *StringID[T] { + if id == nil { + return nil + } + id2 := StringID[T](*id) + return &id2 +} + +func (id StringID[T]) Ref() *StringID[T] { + if id == "" { + return nil + } + return &id +} + +func (id *StringID[T]) CloneRef() *StringID[T] { + if id == nil { + return nil + } + id2 := *id + return &id2 +} + +func (id StringID[_]) String() string { + return string(id) +} + +func (id *StringID[_]) StringRef() *string { + if id == nil { + return nil + } + id2 := string(*id) + return &id2 +} diff --git a/pkg/id/idx/string_test.go b/pkg/id/idx/string_test.go new file mode 100644 index 000000000..cd6254d86 --- /dev/null +++ b/pkg/id/idx/string_test.go @@ -0,0 +1,29 @@ +package idx + +import ( + "testing" + + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestStringID_Ref(t *testing.T) { + assert.Equal(t, lo.ToPtr(StringID[T]("a")), StringID[T]("a").Ref()) +} + +func TestStringID_CloneRef(t *testing.T) { + s := lo.ToPtr(StringID[T]("a")) + res := s.CloneRef() + assert.Equal(t, s, res) + assert.NotSame(t, s, res) + assert.Nil(t, (*StringID[T])(nil).CloneRef()) +} + +func TestStringID_String(t *testing.T) { + assert.Equal(t, "a", StringID[T]("a").String()) +} + +func TestStringID_StringRef(t *testing.T) { + assert.Equal(t, lo.ToPtr("a"), lo.ToPtr(StringID[T]("a")).StringRef()) + assert.Nil(t, (*StringID[T])(nil).StringRef()) +} diff --git a/pkg/id/idx/ulid.go b/pkg/id/idx/ulid.go new file mode 100644 index 000000000..c93c0c54a --- /dev/null +++ b/pkg/id/idx/ulid.go @@ -0,0 +1,54 @@ +package idx + +import ( + "math/rand" + "sync" + "time" + + "github.com/oklog/ulid" + "github.com/reearth/reearth-backend/pkg/util" +) + +var ( + entropyLock sync.Mutex + // not safe for concurrent + entropy = ulid.Monotonic(rand.New(rand.NewSource(time.Now().UnixNano())), 0) +) + +func generateID() ulid.ULID { + entropyLock.Lock() + newID := ulid.MustNew(ulid.Timestamp(time.Now().UTC()), entropy) + entropyLock.Unlock() + return newID +} + +func generateAllID(n int) []ulid.ULID { + ids := make([]ulid.ULID, 0, n) + entropyLock.Lock() + for i := 0; i < n; i++ { + newID := ulid.MustNew(ulid.Timestamp(time.Now().UTC()), entropy) + ids = append(ids, newID) + } + entropyLock.Unlock() + return ids +} + +func parseID(id string) (parsedID ulid.ULID, e error) { + if includeUpperCase(id) { + return parsedID, ErrInvalidID + } + return ulid.Parse(id) +} + +func includeUpperCase(s string) bool { + for _, c := range s { + if 'A' <= c && c <= 'Z' { + return true + } + } + return false +} + +func mustParseID(id string) ulid.ULID { + return util.Must(parseID(id)) +} diff --git a/pkg/id/infobox_field_gen.go b/pkg/id/infobox_field_gen.go deleted file mode 100644 index fed9630c2..000000000 --- a/pkg/id/infobox_field_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// InfoboxFieldID is an ID for InfoboxField. -type InfoboxFieldID ID - -// NewInfoboxFieldID generates a new InfoboxFieldId. -func NewInfoboxFieldID() InfoboxFieldID { - return InfoboxFieldID(New()) -} - -// InfoboxFieldIDFrom generates a new InfoboxFieldID from a string. -func InfoboxFieldIDFrom(i string) (nid InfoboxFieldID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = InfoboxFieldID(did) - return -} - -// MustInfoboxFieldID generates a new InfoboxFieldID from a string, but panics if the string cannot be parsed. -func MustInfoboxFieldID(i string) InfoboxFieldID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return InfoboxFieldID(did) -} - -// InfoboxFieldIDFromRef generates a new InfoboxFieldID from a string ref. -func InfoboxFieldIDFromRef(i *string) *InfoboxFieldID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := InfoboxFieldID(*did) - return &nid -} - -// InfoboxFieldIDFromRefID generates a new InfoboxFieldID from a ref of a generic ID. -func InfoboxFieldIDFromRefID(i *ID) *InfoboxFieldID { - if i == nil || i.IsNil() { - return nil - } - nid := InfoboxFieldID(*i) - return &nid -} - -// ID returns a domain ID. -func (d InfoboxFieldID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d InfoboxFieldID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d InfoboxFieldID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d InfoboxFieldID) GoString() string { - return "InfoboxFieldID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d InfoboxFieldID) Ref() *InfoboxFieldID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d InfoboxFieldID) Contains(ids []InfoboxFieldID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *InfoboxFieldID) CopyRef() *InfoboxFieldID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *InfoboxFieldID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *InfoboxFieldID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *InfoboxFieldID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *InfoboxFieldID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = InfoboxFieldIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *InfoboxFieldID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *InfoboxFieldID) UnmarshalText(text []byte) (err error) { - *d, err = InfoboxFieldIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d InfoboxFieldID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *InfoboxFieldID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// InfoboxFieldIDsToStrings converts IDs into a string slice. -func InfoboxFieldIDsToStrings(ids []InfoboxFieldID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// InfoboxFieldIDsFrom converts a string slice into a ID slice. -func InfoboxFieldIDsFrom(ids []string) ([]InfoboxFieldID, error) { - dids := make([]InfoboxFieldID, 0, len(ids)) - for _, i := range ids { - did, err := InfoboxFieldIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// InfoboxFieldIDsFromID converts a generic ID slice into a ID slice. -func InfoboxFieldIDsFromID(ids []ID) []InfoboxFieldID { - dids := make([]InfoboxFieldID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, InfoboxFieldID(i)) - } - return dids -} - -// InfoboxFieldIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func InfoboxFieldIDsFromIDRef(ids []*ID) []InfoboxFieldID { - dids := make([]InfoboxFieldID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, InfoboxFieldID(*i)) - } - } - return dids -} - -// InfoboxFieldIDsToID converts a ID slice into a generic ID slice. -func InfoboxFieldIDsToID(ids []InfoboxFieldID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// InfoboxFieldIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func InfoboxFieldIDsToIDRef(ids []*InfoboxFieldID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// InfoboxFieldIDSet represents a set of InfoboxFieldIDs -type InfoboxFieldIDSet struct { - m map[InfoboxFieldID]struct{} - s []InfoboxFieldID -} - -// NewInfoboxFieldIDSet creates a new InfoboxFieldIDSet -func NewInfoboxFieldIDSet() *InfoboxFieldIDSet { - return &InfoboxFieldIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *InfoboxFieldIDSet) Add(p ...InfoboxFieldID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[InfoboxFieldID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []InfoboxFieldID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *InfoboxFieldIDSet) AddRef(p *InfoboxFieldID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *InfoboxFieldIDSet) Has(p InfoboxFieldID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *InfoboxFieldIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *InfoboxFieldIDSet) All() []InfoboxFieldID { - if s == nil { - return nil - } - return append([]InfoboxFieldID{}, s.s...) -} - -// Clone returns a cloned set -func (s *InfoboxFieldIDSet) Clone() *InfoboxFieldIDSet { - if s == nil { - return NewInfoboxFieldIDSet() - } - s2 := NewInfoboxFieldIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *InfoboxFieldIDSet) Merge(s2 *InfoboxFieldIDSet) *InfoboxFieldIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/infobox_field_gen_test.go b/pkg/id/infobox_field_gen_test.go deleted file mode 100644 index 1f6c86b25..000000000 --- a/pkg/id/infobox_field_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewInfoboxFieldID(t *testing.T) { - id := NewInfoboxFieldID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestInfoboxFieldIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result InfoboxFieldID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result InfoboxFieldID - err error - }{ - result: InfoboxFieldID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result InfoboxFieldID - err error - }{ - result: InfoboxFieldID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result InfoboxFieldID - err error - }{ - result: InfoboxFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := InfoboxFieldIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustInfoboxFieldID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected InfoboxFieldID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: InfoboxFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustInfoboxFieldID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestInfoboxFieldIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *InfoboxFieldID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &InfoboxFieldID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := InfoboxFieldIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestInfoboxFieldIDFromRefID(t *testing.T) { - id := New() - id2 := InfoboxFieldIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, InfoboxFieldIDFromRefID(nil)) - assert.Nil(t, InfoboxFieldIDFromRefID(&ID{})) -} - -func TestInfoboxFieldID_ID(t *testing.T) { - id := New() - id2 := InfoboxFieldIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestInfoboxFieldID_String(t *testing.T) { - id := New() - id2 := InfoboxFieldIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", InfoboxFieldID{}.String()) -} - -func TestInfoboxFieldID_RefString(t *testing.T) { - id := NewInfoboxFieldID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, InfoboxFieldID{}.RefString()) -} - -func TestInfoboxFieldID_GoString(t *testing.T) { - id := New() - id2 := InfoboxFieldIDFromRefID(&id) - assert.Equal(t, "InfoboxFieldID("+id.String()+")", id2.GoString()) - assert.Equal(t, "InfoboxFieldID()", InfoboxFieldID{}.GoString()) -} - -func TestInfoboxFieldID_Ref(t *testing.T) { - id := NewInfoboxFieldID() - assert.Equal(t, InfoboxFieldID(id), *id.Ref()) - assert.Nil(t, (&InfoboxFieldID{}).Ref()) -} - -func TestInfoboxFieldID_Contains(t *testing.T) { - id := NewInfoboxFieldID() - id2 := NewInfoboxFieldID() - assert.True(t, id.Contains([]InfoboxFieldID{id, id2})) - assert.False(t, InfoboxFieldID{}.Contains([]InfoboxFieldID{id, id2, {}})) - assert.False(t, id.Contains([]InfoboxFieldID{id2})) -} - -func TestInfoboxFieldID_CopyRef(t *testing.T) { - id := NewInfoboxFieldID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*InfoboxFieldID)(nil).CopyRef()) -} - -func TestInfoboxFieldID_IDRef(t *testing.T) { - id := New() - id2 := InfoboxFieldIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&InfoboxFieldID{}).IDRef()) - assert.Nil(t, (*InfoboxFieldID)(nil).IDRef()) -} - -func TestInfoboxFieldID_StringRef(t *testing.T) { - id := NewInfoboxFieldID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&InfoboxFieldID{}).StringRef()) - assert.Nil(t, (*InfoboxFieldID)(nil).StringRef()) -} - -func TestInfoboxFieldID_MarhsalJSON(t *testing.T) { - id := NewInfoboxFieldID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&InfoboxFieldID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*InfoboxFieldID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestInfoboxFieldID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustInfoboxFieldID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &InfoboxFieldID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestInfoboxFieldID_MarshalText(t *testing.T) { - id := New() - res, err := InfoboxFieldIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&InfoboxFieldID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*InfoboxFieldID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestInfoboxFieldID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &InfoboxFieldID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestInfoboxFieldID_IsNil(t *testing.T) { - assert.True(t, InfoboxFieldID{}.IsNil()) - assert.False(t, NewInfoboxFieldID().IsNil()) -} - -func TestInfoboxFieldID_IsNilRef(t *testing.T) { - assert.True(t, InfoboxFieldID{}.Ref().IsNilRef()) - assert.True(t, (*InfoboxFieldID)(nil).IsNilRef()) - assert.False(t, NewInfoboxFieldID().Ref().IsNilRef()) -} - -func TestInfoboxFieldIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []InfoboxFieldID - expected []string - }{ - { - name: "Empty slice", - input: make([]InfoboxFieldID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, InfoboxFieldIDsToStrings(tt.input)) - }) - } -} - -func TestInfoboxFieldIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []InfoboxFieldID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []InfoboxFieldID - err error - }{ - res: make([]InfoboxFieldID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []InfoboxFieldID - err error - }{ - res: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []InfoboxFieldID - err error - }{ - res: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []InfoboxFieldID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := InfoboxFieldIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestInfoboxFieldIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []InfoboxFieldID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]InfoboxFieldID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := InfoboxFieldIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestInfoboxFieldIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []InfoboxFieldID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]InfoboxFieldID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []InfoboxFieldID{MustInfoboxFieldID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []InfoboxFieldID{ - MustInfoboxFieldID(id1.String()), - MustInfoboxFieldID(id2.String()), - MustInfoboxFieldID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := InfoboxFieldIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestInfoboxFieldIDsToID(t *testing.T) { - tests := []struct { - name string - input []InfoboxFieldID - expected []ID - }{ - { - name: "Empty slice", - input: make([]InfoboxFieldID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := InfoboxFieldIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestInfoboxFieldIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustInfoboxFieldID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustInfoboxFieldID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustInfoboxFieldID(id3.String()) - - tests := []struct { - name string - input []*InfoboxFieldID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*InfoboxFieldID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*InfoboxFieldID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*InfoboxFieldID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := InfoboxFieldIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewInfoboxFieldIDSet(t *testing.T) { - InfoboxFieldIdSet := NewInfoboxFieldIDSet() - assert.NotNil(t, InfoboxFieldIdSet) - assert.Empty(t, InfoboxFieldIdSet.m) - assert.Empty(t, InfoboxFieldIdSet.s) -} - -func TestInfoboxFieldIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []InfoboxFieldID - expected *InfoboxFieldIDSet - }{ - { - name: "Empty slice", - input: make([]InfoboxFieldID, 0), - expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewInfoboxFieldIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestInfoboxFieldIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *InfoboxFieldID - expected *InfoboxFieldIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &InfoboxFieldIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewInfoboxFieldIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestInfoboxFieldIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *InfoboxFieldIDSet - input InfoboxFieldID - expected bool - }{ - { - name: "Empty Set", - target: &InfoboxFieldIDSet{}, - input: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestInfoboxFieldIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *InfoboxFieldIDSet - expected *InfoboxFieldIDSet - }{ - { - name: "Empty set", - input: &InfoboxFieldIDSet{}, - expected: &InfoboxFieldIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &InfoboxFieldIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestInfoboxFieldIDSet_All(t *testing.T) { - tests := []struct { - name string - input *InfoboxFieldIDSet - expected []InfoboxFieldID - }{ - { - name: "Empty", - input: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{}, - s: nil, - }, - expected: make([]InfoboxFieldID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestInfoboxFieldIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *InfoboxFieldIDSet - expected *InfoboxFieldIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewInfoboxFieldIDSet(), - }, - { - name: "Empty set", - input: NewInfoboxFieldIDSet(), - expected: NewInfoboxFieldIDSet(), - }, - { - name: "1 element", - input: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestInfoboxFieldIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *InfoboxFieldIDSet - b *InfoboxFieldIDSet - } - expected *InfoboxFieldIDSet - }{ - { - name: "Nil Set", - input: struct { - a *InfoboxFieldIDSet - b *InfoboxFieldIDSet - }{ - a: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *InfoboxFieldIDSet - b *InfoboxFieldIDSet - }{ - a: &InfoboxFieldIDSet{}, - b: &InfoboxFieldIDSet{}, - }, - expected: &InfoboxFieldIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *InfoboxFieldIDSet - b *InfoboxFieldIDSet - }{ - a: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &InfoboxFieldIDSet{}, - }, - expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *InfoboxFieldIDSet - b *InfoboxFieldIDSet - }{ - a: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []InfoboxFieldID{MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &InfoboxFieldIDSet{ - m: map[InfoboxFieldID]struct{}{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []InfoboxFieldID{ - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t1"), - MustInfoboxFieldID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/layer_gen.go b/pkg/id/layer_gen.go deleted file mode 100644 index 1c22b32de..000000000 --- a/pkg/id/layer_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// LayerID is an ID for Layer. -type LayerID ID - -// NewLayerID generates a new LayerId. -func NewLayerID() LayerID { - return LayerID(New()) -} - -// LayerIDFrom generates a new LayerID from a string. -func LayerIDFrom(i string) (nid LayerID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = LayerID(did) - return -} - -// MustLayerID generates a new LayerID from a string, but panics if the string cannot be parsed. -func MustLayerID(i string) LayerID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return LayerID(did) -} - -// LayerIDFromRef generates a new LayerID from a string ref. -func LayerIDFromRef(i *string) *LayerID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := LayerID(*did) - return &nid -} - -// LayerIDFromRefID generates a new LayerID from a ref of a generic ID. -func LayerIDFromRefID(i *ID) *LayerID { - if i == nil || i.IsNil() { - return nil - } - nid := LayerID(*i) - return &nid -} - -// ID returns a domain ID. -func (d LayerID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d LayerID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d LayerID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d LayerID) GoString() string { - return "LayerID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d LayerID) Ref() *LayerID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d LayerID) Contains(ids []LayerID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *LayerID) CopyRef() *LayerID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *LayerID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *LayerID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *LayerID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *LayerID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = LayerIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *LayerID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *LayerID) UnmarshalText(text []byte) (err error) { - *d, err = LayerIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d LayerID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *LayerID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// LayerIDsToStrings converts IDs into a string slice. -func LayerIDsToStrings(ids []LayerID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// LayerIDsFrom converts a string slice into a ID slice. -func LayerIDsFrom(ids []string) ([]LayerID, error) { - dids := make([]LayerID, 0, len(ids)) - for _, i := range ids { - did, err := LayerIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// LayerIDsFromID converts a generic ID slice into a ID slice. -func LayerIDsFromID(ids []ID) []LayerID { - dids := make([]LayerID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, LayerID(i)) - } - return dids -} - -// LayerIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func LayerIDsFromIDRef(ids []*ID) []LayerID { - dids := make([]LayerID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, LayerID(*i)) - } - } - return dids -} - -// LayerIDsToID converts a ID slice into a generic ID slice. -func LayerIDsToID(ids []LayerID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// LayerIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func LayerIDsToIDRef(ids []*LayerID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// LayerIDSet represents a set of LayerIDs -type LayerIDSet struct { - m map[LayerID]struct{} - s []LayerID -} - -// NewLayerIDSet creates a new LayerIDSet -func NewLayerIDSet() *LayerIDSet { - return &LayerIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *LayerIDSet) Add(p ...LayerID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[LayerID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []LayerID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *LayerIDSet) AddRef(p *LayerID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *LayerIDSet) Has(p LayerID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *LayerIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *LayerIDSet) All() []LayerID { - if s == nil { - return nil - } - return append([]LayerID{}, s.s...) -} - -// Clone returns a cloned set -func (s *LayerIDSet) Clone() *LayerIDSet { - if s == nil { - return NewLayerIDSet() - } - s2 := NewLayerIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *LayerIDSet) Merge(s2 *LayerIDSet) *LayerIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/layer_gen_test.go b/pkg/id/layer_gen_test.go deleted file mode 100644 index a570cdf8d..000000000 --- a/pkg/id/layer_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewLayerID(t *testing.T) { - id := NewLayerID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestLayerIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result LayerID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result LayerID - err error - }{ - result: LayerID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result LayerID - err error - }{ - result: LayerID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result LayerID - err error - }{ - result: LayerID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := LayerIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustLayerID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected LayerID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: LayerID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustLayerID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestLayerIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *LayerID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &LayerID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := LayerIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestLayerIDFromRefID(t *testing.T) { - id := New() - id2 := LayerIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, LayerIDFromRefID(nil)) - assert.Nil(t, LayerIDFromRefID(&ID{})) -} - -func TestLayerID_ID(t *testing.T) { - id := New() - id2 := LayerIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestLayerID_String(t *testing.T) { - id := New() - id2 := LayerIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", LayerID{}.String()) -} - -func TestLayerID_RefString(t *testing.T) { - id := NewLayerID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, LayerID{}.RefString()) -} - -func TestLayerID_GoString(t *testing.T) { - id := New() - id2 := LayerIDFromRefID(&id) - assert.Equal(t, "LayerID("+id.String()+")", id2.GoString()) - assert.Equal(t, "LayerID()", LayerID{}.GoString()) -} - -func TestLayerID_Ref(t *testing.T) { - id := NewLayerID() - assert.Equal(t, LayerID(id), *id.Ref()) - assert.Nil(t, (&LayerID{}).Ref()) -} - -func TestLayerID_Contains(t *testing.T) { - id := NewLayerID() - id2 := NewLayerID() - assert.True(t, id.Contains([]LayerID{id, id2})) - assert.False(t, LayerID{}.Contains([]LayerID{id, id2, {}})) - assert.False(t, id.Contains([]LayerID{id2})) -} - -func TestLayerID_CopyRef(t *testing.T) { - id := NewLayerID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*LayerID)(nil).CopyRef()) -} - -func TestLayerID_IDRef(t *testing.T) { - id := New() - id2 := LayerIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&LayerID{}).IDRef()) - assert.Nil(t, (*LayerID)(nil).IDRef()) -} - -func TestLayerID_StringRef(t *testing.T) { - id := NewLayerID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&LayerID{}).StringRef()) - assert.Nil(t, (*LayerID)(nil).StringRef()) -} - -func TestLayerID_MarhsalJSON(t *testing.T) { - id := NewLayerID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&LayerID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*LayerID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestLayerID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustLayerID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &LayerID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestLayerID_MarshalText(t *testing.T) { - id := New() - res, err := LayerIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&LayerID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*LayerID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestLayerID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &LayerID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestLayerID_IsNil(t *testing.T) { - assert.True(t, LayerID{}.IsNil()) - assert.False(t, NewLayerID().IsNil()) -} - -func TestLayerID_IsNilRef(t *testing.T) { - assert.True(t, LayerID{}.Ref().IsNilRef()) - assert.True(t, (*LayerID)(nil).IsNilRef()) - assert.False(t, NewLayerID().Ref().IsNilRef()) -} - -func TestLayerIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []LayerID - expected []string - }{ - { - name: "Empty slice", - input: make([]LayerID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, LayerIDsToStrings(tt.input)) - }) - } -} - -func TestLayerIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []LayerID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []LayerID - err error - }{ - res: make([]LayerID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []LayerID - err error - }{ - res: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []LayerID - err error - }{ - res: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []LayerID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := LayerIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestLayerIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []LayerID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]LayerID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := LayerIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestLayerIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []LayerID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]LayerID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []LayerID{MustLayerID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []LayerID{ - MustLayerID(id1.String()), - MustLayerID(id2.String()), - MustLayerID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := LayerIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestLayerIDsToID(t *testing.T) { - tests := []struct { - name string - input []LayerID - expected []ID - }{ - { - name: "Empty slice", - input: make([]LayerID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := LayerIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestLayerIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustLayerID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustLayerID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustLayerID(id3.String()) - - tests := []struct { - name string - input []*LayerID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*LayerID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*LayerID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*LayerID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := LayerIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewLayerIDSet(t *testing.T) { - LayerIdSet := NewLayerIDSet() - assert.NotNil(t, LayerIdSet) - assert.Empty(t, LayerIdSet.m) - assert.Empty(t, LayerIdSet.s) -} - -func TestLayerIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []LayerID - expected *LayerIDSet - }{ - { - name: "Empty slice", - input: make([]LayerID, 0), - expected: &LayerIDSet{ - m: map[LayerID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &LayerIDSet{ - m: map[LayerID]struct{}{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &LayerIDSet{ - m: map[LayerID]struct{}{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewLayerIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestLayerIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *LayerID - expected *LayerIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &LayerIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustLayerID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewLayerIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestLayerIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *LayerIDSet - input LayerID - expected bool - }{ - { - name: "Empty Set", - target: &LayerIDSet{}, - input: MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestLayerIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *LayerIDSet - expected *LayerIDSet - }{ - { - name: "Empty set", - input: &LayerIDSet{}, - expected: &LayerIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &LayerIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestLayerIDSet_All(t *testing.T) { - tests := []struct { - name string - input *LayerIDSet - expected []LayerID - }{ - { - name: "Empty", - input: &LayerIDSet{ - m: map[LayerID]struct{}{}, - s: nil, - }, - expected: make([]LayerID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &LayerIDSet{ - m: map[LayerID]struct{}{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestLayerIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *LayerIDSet - expected *LayerIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewLayerIDSet(), - }, - { - name: "Empty set", - input: NewLayerIDSet(), - expected: NewLayerIDSet(), - }, - { - name: "1 element", - input: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &LayerIDSet{ - m: map[LayerID]struct{}{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &LayerIDSet{ - m: map[LayerID]struct{}{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestLayerIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *LayerIDSet - b *LayerIDSet - } - expected *LayerIDSet - }{ - { - name: "Nil Set", - input: struct { - a *LayerIDSet - b *LayerIDSet - }{ - a: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *LayerIDSet - b *LayerIDSet - }{ - a: &LayerIDSet{}, - b: &LayerIDSet{}, - }, - expected: &LayerIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *LayerIDSet - b *LayerIDSet - }{ - a: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &LayerIDSet{}, - }, - expected: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *LayerIDSet - b *LayerIDSet - }{ - a: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &LayerIDSet{ - m: map[LayerID]struct{}{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []LayerID{MustLayerID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &LayerIDSet{ - m: map[LayerID]struct{}{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []LayerID{ - MustLayerID("01f3zhcaq35403zdjnd6dcm0t1"), - MustLayerID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/plugin.go b/pkg/id/plugin.go index ead781d85..052e0986a 100644 --- a/pkg/id/plugin.go +++ b/pkg/id/plugin.go @@ -7,7 +7,6 @@ import ( "github.com/blang/semver" ) -// PluginID is an ID for Plugin. type PluginID struct { name string version string diff --git a/pkg/id/plugin_extension.go b/pkg/id/plugin_extension.go deleted file mode 100644 index c401a6c50..000000000 --- a/pkg/id/plugin_extension.go +++ /dev/null @@ -1,35 +0,0 @@ -package id - -type PluginExtensionID string - -func PluginExtensionIDFromRef(id *string) *PluginExtensionID { - if id == nil { - return nil - } - id2 := PluginExtensionID(*id) - return &id2 -} - -func (id PluginExtensionID) Ref() *PluginExtensionID { - return &id -} - -func (id *PluginExtensionID) CopyRef() *PluginExtensionID { - if id == nil { - return nil - } - id2 := *id - return &id2 -} - -func (id PluginExtensionID) String() string { - return string(id) -} - -func (id *PluginExtensionID) StringRef() *string { - if id == nil { - return nil - } - id2 := string(*id) - return &id2 -} diff --git a/pkg/id/plugin_extension_test.go b/pkg/id/plugin_extension_test.go deleted file mode 100644 index dc504f873..000000000 --- a/pkg/id/plugin_extension_test.go +++ /dev/null @@ -1,64 +0,0 @@ -package id - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestPluginExtensionIDFromRef(t *testing.T) { - input1 := "testStringId" - expected1 := PluginExtensionID(input1) - - tests := []struct { - name string - input *string - expected *PluginExtensionID - }{ - { - name: "success:string input", - input: &input1, - expected: &expected1, - }, - { - name: "fail:nil pointer", - input: nil, - expected: nil, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - result := PluginExtensionIDFromRef(tc.input) - assert.Equal(t, tc.expected, result) - }) - } -} - -func TestPluginExtensionID_Ref(t *testing.T) { - pluginExtensionID := PluginExtensionID("test") - - assert.Equal(t, &pluginExtensionID, pluginExtensionID.Ref()) -} - -func TestPluginExtensionID_CopyRef(t *testing.T) { - pluginExtensionID := PluginExtensionID("test") - - assert.Equal(t, pluginExtensionID, *pluginExtensionID.CopyRef()) - - assert.False(t, pluginExtensionID.Ref() == pluginExtensionID.CopyRef()) -} - -func TestPluginExtensionID_String(t *testing.T) { - pluginExtensionID := PluginExtensionID("test") - - assert.Equal(t, "test", pluginExtensionID.String()) -} - -func TestPluginExtensionID_StringRef(t *testing.T) { - pluginExtensionID := PluginExtensionID("test") - - assert.Equal(t, "test", *pluginExtensionID.StringRef()) -} diff --git a/pkg/id/project_gen.go b/pkg/id/project_gen.go deleted file mode 100644 index aa29ab430..000000000 --- a/pkg/id/project_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// ProjectID is an ID for Project. -type ProjectID ID - -// NewProjectID generates a new ProjectId. -func NewProjectID() ProjectID { - return ProjectID(New()) -} - -// ProjectIDFrom generates a new ProjectID from a string. -func ProjectIDFrom(i string) (nid ProjectID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = ProjectID(did) - return -} - -// MustProjectID generates a new ProjectID from a string, but panics if the string cannot be parsed. -func MustProjectID(i string) ProjectID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return ProjectID(did) -} - -// ProjectIDFromRef generates a new ProjectID from a string ref. -func ProjectIDFromRef(i *string) *ProjectID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := ProjectID(*did) - return &nid -} - -// ProjectIDFromRefID generates a new ProjectID from a ref of a generic ID. -func ProjectIDFromRefID(i *ID) *ProjectID { - if i == nil || i.IsNil() { - return nil - } - nid := ProjectID(*i) - return &nid -} - -// ID returns a domain ID. -func (d ProjectID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d ProjectID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d ProjectID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d ProjectID) GoString() string { - return "ProjectID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d ProjectID) Ref() *ProjectID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d ProjectID) Contains(ids []ProjectID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *ProjectID) CopyRef() *ProjectID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *ProjectID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *ProjectID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *ProjectID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *ProjectID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = ProjectIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *ProjectID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *ProjectID) UnmarshalText(text []byte) (err error) { - *d, err = ProjectIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d ProjectID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *ProjectID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// ProjectIDsToStrings converts IDs into a string slice. -func ProjectIDsToStrings(ids []ProjectID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// ProjectIDsFrom converts a string slice into a ID slice. -func ProjectIDsFrom(ids []string) ([]ProjectID, error) { - dids := make([]ProjectID, 0, len(ids)) - for _, i := range ids { - did, err := ProjectIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// ProjectIDsFromID converts a generic ID slice into a ID slice. -func ProjectIDsFromID(ids []ID) []ProjectID { - dids := make([]ProjectID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, ProjectID(i)) - } - return dids -} - -// ProjectIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func ProjectIDsFromIDRef(ids []*ID) []ProjectID { - dids := make([]ProjectID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, ProjectID(*i)) - } - } - return dids -} - -// ProjectIDsToID converts a ID slice into a generic ID slice. -func ProjectIDsToID(ids []ProjectID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// ProjectIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func ProjectIDsToIDRef(ids []*ProjectID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// ProjectIDSet represents a set of ProjectIDs -type ProjectIDSet struct { - m map[ProjectID]struct{} - s []ProjectID -} - -// NewProjectIDSet creates a new ProjectIDSet -func NewProjectIDSet() *ProjectIDSet { - return &ProjectIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *ProjectIDSet) Add(p ...ProjectID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[ProjectID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []ProjectID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *ProjectIDSet) AddRef(p *ProjectID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *ProjectIDSet) Has(p ProjectID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *ProjectIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *ProjectIDSet) All() []ProjectID { - if s == nil { - return nil - } - return append([]ProjectID{}, s.s...) -} - -// Clone returns a cloned set -func (s *ProjectIDSet) Clone() *ProjectIDSet { - if s == nil { - return NewProjectIDSet() - } - s2 := NewProjectIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *ProjectIDSet) Merge(s2 *ProjectIDSet) *ProjectIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/project_gen_test.go b/pkg/id/project_gen_test.go deleted file mode 100644 index 632c37215..000000000 --- a/pkg/id/project_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewProjectID(t *testing.T) { - id := NewProjectID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestProjectIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result ProjectID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result ProjectID - err error - }{ - result: ProjectID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result ProjectID - err error - }{ - result: ProjectID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result ProjectID - err error - }{ - result: ProjectID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := ProjectIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustProjectID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected ProjectID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: ProjectID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustProjectID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestProjectIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *ProjectID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &ProjectID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := ProjectIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestProjectIDFromRefID(t *testing.T) { - id := New() - id2 := ProjectIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, ProjectIDFromRefID(nil)) - assert.Nil(t, ProjectIDFromRefID(&ID{})) -} - -func TestProjectID_ID(t *testing.T) { - id := New() - id2 := ProjectIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestProjectID_String(t *testing.T) { - id := New() - id2 := ProjectIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", ProjectID{}.String()) -} - -func TestProjectID_RefString(t *testing.T) { - id := NewProjectID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, ProjectID{}.RefString()) -} - -func TestProjectID_GoString(t *testing.T) { - id := New() - id2 := ProjectIDFromRefID(&id) - assert.Equal(t, "ProjectID("+id.String()+")", id2.GoString()) - assert.Equal(t, "ProjectID()", ProjectID{}.GoString()) -} - -func TestProjectID_Ref(t *testing.T) { - id := NewProjectID() - assert.Equal(t, ProjectID(id), *id.Ref()) - assert.Nil(t, (&ProjectID{}).Ref()) -} - -func TestProjectID_Contains(t *testing.T) { - id := NewProjectID() - id2 := NewProjectID() - assert.True(t, id.Contains([]ProjectID{id, id2})) - assert.False(t, ProjectID{}.Contains([]ProjectID{id, id2, {}})) - assert.False(t, id.Contains([]ProjectID{id2})) -} - -func TestProjectID_CopyRef(t *testing.T) { - id := NewProjectID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*ProjectID)(nil).CopyRef()) -} - -func TestProjectID_IDRef(t *testing.T) { - id := New() - id2 := ProjectIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&ProjectID{}).IDRef()) - assert.Nil(t, (*ProjectID)(nil).IDRef()) -} - -func TestProjectID_StringRef(t *testing.T) { - id := NewProjectID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&ProjectID{}).StringRef()) - assert.Nil(t, (*ProjectID)(nil).StringRef()) -} - -func TestProjectID_MarhsalJSON(t *testing.T) { - id := NewProjectID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&ProjectID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*ProjectID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestProjectID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustProjectID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &ProjectID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestProjectID_MarshalText(t *testing.T) { - id := New() - res, err := ProjectIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&ProjectID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*ProjectID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestProjectID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &ProjectID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestProjectID_IsNil(t *testing.T) { - assert.True(t, ProjectID{}.IsNil()) - assert.False(t, NewProjectID().IsNil()) -} - -func TestProjectID_IsNilRef(t *testing.T) { - assert.True(t, ProjectID{}.Ref().IsNilRef()) - assert.True(t, (*ProjectID)(nil).IsNilRef()) - assert.False(t, NewProjectID().Ref().IsNilRef()) -} - -func TestProjectIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []ProjectID - expected []string - }{ - { - name: "Empty slice", - input: make([]ProjectID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, ProjectIDsToStrings(tt.input)) - }) - } -} - -func TestProjectIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []ProjectID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []ProjectID - err error - }{ - res: make([]ProjectID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []ProjectID - err error - }{ - res: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []ProjectID - err error - }{ - res: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []ProjectID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := ProjectIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestProjectIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []ProjectID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]ProjectID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := ProjectIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestProjectIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []ProjectID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]ProjectID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []ProjectID{MustProjectID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []ProjectID{ - MustProjectID(id1.String()), - MustProjectID(id2.String()), - MustProjectID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := ProjectIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestProjectIDsToID(t *testing.T) { - tests := []struct { - name string - input []ProjectID - expected []ID - }{ - { - name: "Empty slice", - input: make([]ProjectID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := ProjectIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestProjectIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustProjectID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustProjectID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustProjectID(id3.String()) - - tests := []struct { - name string - input []*ProjectID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*ProjectID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*ProjectID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*ProjectID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := ProjectIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewProjectIDSet(t *testing.T) { - ProjectIdSet := NewProjectIDSet() - assert.NotNil(t, ProjectIdSet) - assert.Empty(t, ProjectIdSet.m) - assert.Empty(t, ProjectIdSet.s) -} - -func TestProjectIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []ProjectID - expected *ProjectIDSet - }{ - { - name: "Empty slice", - input: make([]ProjectID, 0), - expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewProjectIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestProjectIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *ProjectID - expected *ProjectIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &ProjectIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustProjectID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewProjectIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestProjectIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *ProjectIDSet - input ProjectID - expected bool - }{ - { - name: "Empty Set", - target: &ProjectIDSet{}, - input: MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestProjectIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *ProjectIDSet - expected *ProjectIDSet - }{ - { - name: "Empty set", - input: &ProjectIDSet{}, - expected: &ProjectIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &ProjectIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestProjectIDSet_All(t *testing.T) { - tests := []struct { - name string - input *ProjectIDSet - expected []ProjectID - }{ - { - name: "Empty", - input: &ProjectIDSet{ - m: map[ProjectID]struct{}{}, - s: nil, - }, - expected: make([]ProjectID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &ProjectIDSet{ - m: map[ProjectID]struct{}{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestProjectIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *ProjectIDSet - expected *ProjectIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewProjectIDSet(), - }, - { - name: "Empty set", - input: NewProjectIDSet(), - expected: NewProjectIDSet(), - }, - { - name: "1 element", - input: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &ProjectIDSet{ - m: map[ProjectID]struct{}{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestProjectIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *ProjectIDSet - b *ProjectIDSet - } - expected *ProjectIDSet - }{ - { - name: "Nil Set", - input: struct { - a *ProjectIDSet - b *ProjectIDSet - }{ - a: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *ProjectIDSet - b *ProjectIDSet - }{ - a: &ProjectIDSet{}, - b: &ProjectIDSet{}, - }, - expected: &ProjectIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *ProjectIDSet - b *ProjectIDSet - }{ - a: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &ProjectIDSet{}, - }, - expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *ProjectIDSet - b *ProjectIDSet - }{ - a: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &ProjectIDSet{ - m: map[ProjectID]struct{}{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []ProjectID{MustProjectID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &ProjectIDSet{ - m: map[ProjectID]struct{}{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []ProjectID{ - MustProjectID("01f3zhcaq35403zdjnd6dcm0t1"), - MustProjectID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/property_gen.go b/pkg/id/property_gen.go deleted file mode 100644 index dfd762aae..000000000 --- a/pkg/id/property_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// PropertyID is an ID for Property. -type PropertyID ID - -// NewPropertyID generates a new PropertyId. -func NewPropertyID() PropertyID { - return PropertyID(New()) -} - -// PropertyIDFrom generates a new PropertyID from a string. -func PropertyIDFrom(i string) (nid PropertyID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = PropertyID(did) - return -} - -// MustPropertyID generates a new PropertyID from a string, but panics if the string cannot be parsed. -func MustPropertyID(i string) PropertyID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return PropertyID(did) -} - -// PropertyIDFromRef generates a new PropertyID from a string ref. -func PropertyIDFromRef(i *string) *PropertyID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := PropertyID(*did) - return &nid -} - -// PropertyIDFromRefID generates a new PropertyID from a ref of a generic ID. -func PropertyIDFromRefID(i *ID) *PropertyID { - if i == nil || i.IsNil() { - return nil - } - nid := PropertyID(*i) - return &nid -} - -// ID returns a domain ID. -func (d PropertyID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d PropertyID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d PropertyID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d PropertyID) GoString() string { - return "PropertyID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d PropertyID) Ref() *PropertyID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d PropertyID) Contains(ids []PropertyID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *PropertyID) CopyRef() *PropertyID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *PropertyID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *PropertyID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *PropertyID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *PropertyID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = PropertyIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *PropertyID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *PropertyID) UnmarshalText(text []byte) (err error) { - *d, err = PropertyIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d PropertyID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *PropertyID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// PropertyIDsToStrings converts IDs into a string slice. -func PropertyIDsToStrings(ids []PropertyID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// PropertyIDsFrom converts a string slice into a ID slice. -func PropertyIDsFrom(ids []string) ([]PropertyID, error) { - dids := make([]PropertyID, 0, len(ids)) - for _, i := range ids { - did, err := PropertyIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// PropertyIDsFromID converts a generic ID slice into a ID slice. -func PropertyIDsFromID(ids []ID) []PropertyID { - dids := make([]PropertyID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, PropertyID(i)) - } - return dids -} - -// PropertyIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func PropertyIDsFromIDRef(ids []*ID) []PropertyID { - dids := make([]PropertyID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, PropertyID(*i)) - } - } - return dids -} - -// PropertyIDsToID converts a ID slice into a generic ID slice. -func PropertyIDsToID(ids []PropertyID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// PropertyIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func PropertyIDsToIDRef(ids []*PropertyID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// PropertyIDSet represents a set of PropertyIDs -type PropertyIDSet struct { - m map[PropertyID]struct{} - s []PropertyID -} - -// NewPropertyIDSet creates a new PropertyIDSet -func NewPropertyIDSet() *PropertyIDSet { - return &PropertyIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *PropertyIDSet) Add(p ...PropertyID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[PropertyID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []PropertyID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *PropertyIDSet) AddRef(p *PropertyID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *PropertyIDSet) Has(p PropertyID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *PropertyIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *PropertyIDSet) All() []PropertyID { - if s == nil { - return nil - } - return append([]PropertyID{}, s.s...) -} - -// Clone returns a cloned set -func (s *PropertyIDSet) Clone() *PropertyIDSet { - if s == nil { - return NewPropertyIDSet() - } - s2 := NewPropertyIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *PropertyIDSet) Merge(s2 *PropertyIDSet) *PropertyIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/property_gen_test.go b/pkg/id/property_gen_test.go deleted file mode 100644 index 6b1034afd..000000000 --- a/pkg/id/property_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewPropertyID(t *testing.T) { - id := NewPropertyID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestPropertyIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result PropertyID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result PropertyID - err error - }{ - result: PropertyID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result PropertyID - err error - }{ - result: PropertyID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result PropertyID - err error - }{ - result: PropertyID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := PropertyIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustPropertyID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected PropertyID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: PropertyID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustPropertyID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestPropertyIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *PropertyID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &PropertyID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := PropertyIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestPropertyIDFromRefID(t *testing.T) { - id := New() - id2 := PropertyIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, PropertyIDFromRefID(nil)) - assert.Nil(t, PropertyIDFromRefID(&ID{})) -} - -func TestPropertyID_ID(t *testing.T) { - id := New() - id2 := PropertyIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestPropertyID_String(t *testing.T) { - id := New() - id2 := PropertyIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", PropertyID{}.String()) -} - -func TestPropertyID_RefString(t *testing.T) { - id := NewPropertyID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, PropertyID{}.RefString()) -} - -func TestPropertyID_GoString(t *testing.T) { - id := New() - id2 := PropertyIDFromRefID(&id) - assert.Equal(t, "PropertyID("+id.String()+")", id2.GoString()) - assert.Equal(t, "PropertyID()", PropertyID{}.GoString()) -} - -func TestPropertyID_Ref(t *testing.T) { - id := NewPropertyID() - assert.Equal(t, PropertyID(id), *id.Ref()) - assert.Nil(t, (&PropertyID{}).Ref()) -} - -func TestPropertyID_Contains(t *testing.T) { - id := NewPropertyID() - id2 := NewPropertyID() - assert.True(t, id.Contains([]PropertyID{id, id2})) - assert.False(t, PropertyID{}.Contains([]PropertyID{id, id2, {}})) - assert.False(t, id.Contains([]PropertyID{id2})) -} - -func TestPropertyID_CopyRef(t *testing.T) { - id := NewPropertyID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*PropertyID)(nil).CopyRef()) -} - -func TestPropertyID_IDRef(t *testing.T) { - id := New() - id2 := PropertyIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&PropertyID{}).IDRef()) - assert.Nil(t, (*PropertyID)(nil).IDRef()) -} - -func TestPropertyID_StringRef(t *testing.T) { - id := NewPropertyID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&PropertyID{}).StringRef()) - assert.Nil(t, (*PropertyID)(nil).StringRef()) -} - -func TestPropertyID_MarhsalJSON(t *testing.T) { - id := NewPropertyID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&PropertyID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*PropertyID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestPropertyID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustPropertyID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &PropertyID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestPropertyID_MarshalText(t *testing.T) { - id := New() - res, err := PropertyIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&PropertyID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*PropertyID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestPropertyID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &PropertyID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestPropertyID_IsNil(t *testing.T) { - assert.True(t, PropertyID{}.IsNil()) - assert.False(t, NewPropertyID().IsNil()) -} - -func TestPropertyID_IsNilRef(t *testing.T) { - assert.True(t, PropertyID{}.Ref().IsNilRef()) - assert.True(t, (*PropertyID)(nil).IsNilRef()) - assert.False(t, NewPropertyID().Ref().IsNilRef()) -} - -func TestPropertyIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []PropertyID - expected []string - }{ - { - name: "Empty slice", - input: make([]PropertyID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, PropertyIDsToStrings(tt.input)) - }) - } -} - -func TestPropertyIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []PropertyID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []PropertyID - err error - }{ - res: make([]PropertyID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []PropertyID - err error - }{ - res: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []PropertyID - err error - }{ - res: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []PropertyID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := PropertyIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestPropertyIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []PropertyID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]PropertyID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := PropertyIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestPropertyIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []PropertyID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]PropertyID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []PropertyID{MustPropertyID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []PropertyID{ - MustPropertyID(id1.String()), - MustPropertyID(id2.String()), - MustPropertyID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := PropertyIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestPropertyIDsToID(t *testing.T) { - tests := []struct { - name string - input []PropertyID - expected []ID - }{ - { - name: "Empty slice", - input: make([]PropertyID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := PropertyIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestPropertyIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustPropertyID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustPropertyID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustPropertyID(id3.String()) - - tests := []struct { - name string - input []*PropertyID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*PropertyID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*PropertyID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*PropertyID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := PropertyIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewPropertyIDSet(t *testing.T) { - PropertyIdSet := NewPropertyIDSet() - assert.NotNil(t, PropertyIdSet) - assert.Empty(t, PropertyIdSet.m) - assert.Empty(t, PropertyIdSet.s) -} - -func TestPropertyIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []PropertyID - expected *PropertyIDSet - }{ - { - name: "Empty slice", - input: make([]PropertyID, 0), - expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewPropertyIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestPropertyIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *PropertyID - expected *PropertyIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &PropertyIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewPropertyIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestPropertyIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *PropertyIDSet - input PropertyID - expected bool - }{ - { - name: "Empty Set", - target: &PropertyIDSet{}, - input: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestPropertyIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *PropertyIDSet - expected *PropertyIDSet - }{ - { - name: "Empty set", - input: &PropertyIDSet{}, - expected: &PropertyIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &PropertyIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestPropertyIDSet_All(t *testing.T) { - tests := []struct { - name string - input *PropertyIDSet - expected []PropertyID - }{ - { - name: "Empty", - input: &PropertyIDSet{ - m: map[PropertyID]struct{}{}, - s: nil, - }, - expected: make([]PropertyID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &PropertyIDSet{ - m: map[PropertyID]struct{}{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestPropertyIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *PropertyIDSet - expected *PropertyIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewPropertyIDSet(), - }, - { - name: "Empty set", - input: NewPropertyIDSet(), - expected: NewPropertyIDSet(), - }, - { - name: "1 element", - input: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &PropertyIDSet{ - m: map[PropertyID]struct{}{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestPropertyIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *PropertyIDSet - b *PropertyIDSet - } - expected *PropertyIDSet - }{ - { - name: "Nil Set", - input: struct { - a *PropertyIDSet - b *PropertyIDSet - }{ - a: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *PropertyIDSet - b *PropertyIDSet - }{ - a: &PropertyIDSet{}, - b: &PropertyIDSet{}, - }, - expected: &PropertyIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *PropertyIDSet - b *PropertyIDSet - }{ - a: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &PropertyIDSet{}, - }, - expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *PropertyIDSet - b *PropertyIDSet - }{ - a: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &PropertyIDSet{ - m: map[PropertyID]struct{}{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []PropertyID{MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &PropertyIDSet{ - m: map[PropertyID]struct{}{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []PropertyID{ - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/property_item_gen.go b/pkg/id/property_item_gen.go deleted file mode 100644 index c749aaab4..000000000 --- a/pkg/id/property_item_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// PropertyItemID is an ID for PropertyItem. -type PropertyItemID ID - -// NewPropertyItemID generates a new PropertyItemId. -func NewPropertyItemID() PropertyItemID { - return PropertyItemID(New()) -} - -// PropertyItemIDFrom generates a new PropertyItemID from a string. -func PropertyItemIDFrom(i string) (nid PropertyItemID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = PropertyItemID(did) - return -} - -// MustPropertyItemID generates a new PropertyItemID from a string, but panics if the string cannot be parsed. -func MustPropertyItemID(i string) PropertyItemID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return PropertyItemID(did) -} - -// PropertyItemIDFromRef generates a new PropertyItemID from a string ref. -func PropertyItemIDFromRef(i *string) *PropertyItemID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := PropertyItemID(*did) - return &nid -} - -// PropertyItemIDFromRefID generates a new PropertyItemID from a ref of a generic ID. -func PropertyItemIDFromRefID(i *ID) *PropertyItemID { - if i == nil || i.IsNil() { - return nil - } - nid := PropertyItemID(*i) - return &nid -} - -// ID returns a domain ID. -func (d PropertyItemID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d PropertyItemID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d PropertyItemID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d PropertyItemID) GoString() string { - return "PropertyItemID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d PropertyItemID) Ref() *PropertyItemID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d PropertyItemID) Contains(ids []PropertyItemID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *PropertyItemID) CopyRef() *PropertyItemID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *PropertyItemID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *PropertyItemID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *PropertyItemID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *PropertyItemID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = PropertyItemIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *PropertyItemID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *PropertyItemID) UnmarshalText(text []byte) (err error) { - *d, err = PropertyItemIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d PropertyItemID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *PropertyItemID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// PropertyItemIDsToStrings converts IDs into a string slice. -func PropertyItemIDsToStrings(ids []PropertyItemID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// PropertyItemIDsFrom converts a string slice into a ID slice. -func PropertyItemIDsFrom(ids []string) ([]PropertyItemID, error) { - dids := make([]PropertyItemID, 0, len(ids)) - for _, i := range ids { - did, err := PropertyItemIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// PropertyItemIDsFromID converts a generic ID slice into a ID slice. -func PropertyItemIDsFromID(ids []ID) []PropertyItemID { - dids := make([]PropertyItemID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, PropertyItemID(i)) - } - return dids -} - -// PropertyItemIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func PropertyItemIDsFromIDRef(ids []*ID) []PropertyItemID { - dids := make([]PropertyItemID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, PropertyItemID(*i)) - } - } - return dids -} - -// PropertyItemIDsToID converts a ID slice into a generic ID slice. -func PropertyItemIDsToID(ids []PropertyItemID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// PropertyItemIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func PropertyItemIDsToIDRef(ids []*PropertyItemID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// PropertyItemIDSet represents a set of PropertyItemIDs -type PropertyItemIDSet struct { - m map[PropertyItemID]struct{} - s []PropertyItemID -} - -// NewPropertyItemIDSet creates a new PropertyItemIDSet -func NewPropertyItemIDSet() *PropertyItemIDSet { - return &PropertyItemIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *PropertyItemIDSet) Add(p ...PropertyItemID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[PropertyItemID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []PropertyItemID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *PropertyItemIDSet) AddRef(p *PropertyItemID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *PropertyItemIDSet) Has(p PropertyItemID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *PropertyItemIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *PropertyItemIDSet) All() []PropertyItemID { - if s == nil { - return nil - } - return append([]PropertyItemID{}, s.s...) -} - -// Clone returns a cloned set -func (s *PropertyItemIDSet) Clone() *PropertyItemIDSet { - if s == nil { - return NewPropertyItemIDSet() - } - s2 := NewPropertyItemIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *PropertyItemIDSet) Merge(s2 *PropertyItemIDSet) *PropertyItemIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/property_item_gen_test.go b/pkg/id/property_item_gen_test.go deleted file mode 100644 index 1536c6738..000000000 --- a/pkg/id/property_item_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewPropertyItemID(t *testing.T) { - id := NewPropertyItemID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestPropertyItemIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result PropertyItemID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result PropertyItemID - err error - }{ - result: PropertyItemID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result PropertyItemID - err error - }{ - result: PropertyItemID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result PropertyItemID - err error - }{ - result: PropertyItemID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := PropertyItemIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustPropertyItemID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected PropertyItemID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: PropertyItemID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustPropertyItemID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestPropertyItemIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *PropertyItemID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &PropertyItemID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := PropertyItemIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestPropertyItemIDFromRefID(t *testing.T) { - id := New() - id2 := PropertyItemIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, PropertyItemIDFromRefID(nil)) - assert.Nil(t, PropertyItemIDFromRefID(&ID{})) -} - -func TestPropertyItemID_ID(t *testing.T) { - id := New() - id2 := PropertyItemIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestPropertyItemID_String(t *testing.T) { - id := New() - id2 := PropertyItemIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", PropertyItemID{}.String()) -} - -func TestPropertyItemID_RefString(t *testing.T) { - id := NewPropertyItemID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, PropertyItemID{}.RefString()) -} - -func TestPropertyItemID_GoString(t *testing.T) { - id := New() - id2 := PropertyItemIDFromRefID(&id) - assert.Equal(t, "PropertyItemID("+id.String()+")", id2.GoString()) - assert.Equal(t, "PropertyItemID()", PropertyItemID{}.GoString()) -} - -func TestPropertyItemID_Ref(t *testing.T) { - id := NewPropertyItemID() - assert.Equal(t, PropertyItemID(id), *id.Ref()) - assert.Nil(t, (&PropertyItemID{}).Ref()) -} - -func TestPropertyItemID_Contains(t *testing.T) { - id := NewPropertyItemID() - id2 := NewPropertyItemID() - assert.True(t, id.Contains([]PropertyItemID{id, id2})) - assert.False(t, PropertyItemID{}.Contains([]PropertyItemID{id, id2, {}})) - assert.False(t, id.Contains([]PropertyItemID{id2})) -} - -func TestPropertyItemID_CopyRef(t *testing.T) { - id := NewPropertyItemID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*PropertyItemID)(nil).CopyRef()) -} - -func TestPropertyItemID_IDRef(t *testing.T) { - id := New() - id2 := PropertyItemIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&PropertyItemID{}).IDRef()) - assert.Nil(t, (*PropertyItemID)(nil).IDRef()) -} - -func TestPropertyItemID_StringRef(t *testing.T) { - id := NewPropertyItemID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&PropertyItemID{}).StringRef()) - assert.Nil(t, (*PropertyItemID)(nil).StringRef()) -} - -func TestPropertyItemID_MarhsalJSON(t *testing.T) { - id := NewPropertyItemID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&PropertyItemID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*PropertyItemID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestPropertyItemID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustPropertyItemID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &PropertyItemID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestPropertyItemID_MarshalText(t *testing.T) { - id := New() - res, err := PropertyItemIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&PropertyItemID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*PropertyItemID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestPropertyItemID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &PropertyItemID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestPropertyItemID_IsNil(t *testing.T) { - assert.True(t, PropertyItemID{}.IsNil()) - assert.False(t, NewPropertyItemID().IsNil()) -} - -func TestPropertyItemID_IsNilRef(t *testing.T) { - assert.True(t, PropertyItemID{}.Ref().IsNilRef()) - assert.True(t, (*PropertyItemID)(nil).IsNilRef()) - assert.False(t, NewPropertyItemID().Ref().IsNilRef()) -} - -func TestPropertyItemIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []PropertyItemID - expected []string - }{ - { - name: "Empty slice", - input: make([]PropertyItemID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, PropertyItemIDsToStrings(tt.input)) - }) - } -} - -func TestPropertyItemIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []PropertyItemID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []PropertyItemID - err error - }{ - res: make([]PropertyItemID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []PropertyItemID - err error - }{ - res: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []PropertyItemID - err error - }{ - res: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []PropertyItemID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := PropertyItemIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestPropertyItemIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []PropertyItemID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]PropertyItemID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := PropertyItemIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestPropertyItemIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []PropertyItemID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]PropertyItemID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []PropertyItemID{MustPropertyItemID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []PropertyItemID{ - MustPropertyItemID(id1.String()), - MustPropertyItemID(id2.String()), - MustPropertyItemID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := PropertyItemIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestPropertyItemIDsToID(t *testing.T) { - tests := []struct { - name string - input []PropertyItemID - expected []ID - }{ - { - name: "Empty slice", - input: make([]PropertyItemID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := PropertyItemIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestPropertyItemIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustPropertyItemID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustPropertyItemID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustPropertyItemID(id3.String()) - - tests := []struct { - name string - input []*PropertyItemID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*PropertyItemID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*PropertyItemID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*PropertyItemID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := PropertyItemIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewPropertyItemIDSet(t *testing.T) { - PropertyItemIdSet := NewPropertyItemIDSet() - assert.NotNil(t, PropertyItemIdSet) - assert.Empty(t, PropertyItemIdSet.m) - assert.Empty(t, PropertyItemIdSet.s) -} - -func TestPropertyItemIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []PropertyItemID - expected *PropertyItemIDSet - }{ - { - name: "Empty slice", - input: make([]PropertyItemID, 0), - expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewPropertyItemIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestPropertyItemIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *PropertyItemID - expected *PropertyItemIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &PropertyItemIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewPropertyItemIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestPropertyItemIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *PropertyItemIDSet - input PropertyItemID - expected bool - }{ - { - name: "Empty Set", - target: &PropertyItemIDSet{}, - input: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestPropertyItemIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *PropertyItemIDSet - expected *PropertyItemIDSet - }{ - { - name: "Empty set", - input: &PropertyItemIDSet{}, - expected: &PropertyItemIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &PropertyItemIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestPropertyItemIDSet_All(t *testing.T) { - tests := []struct { - name string - input *PropertyItemIDSet - expected []PropertyItemID - }{ - { - name: "Empty", - input: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{}, - s: nil, - }, - expected: make([]PropertyItemID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestPropertyItemIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *PropertyItemIDSet - expected *PropertyItemIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewPropertyItemIDSet(), - }, - { - name: "Empty set", - input: NewPropertyItemIDSet(), - expected: NewPropertyItemIDSet(), - }, - { - name: "1 element", - input: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestPropertyItemIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *PropertyItemIDSet - b *PropertyItemIDSet - } - expected *PropertyItemIDSet - }{ - { - name: "Nil Set", - input: struct { - a *PropertyItemIDSet - b *PropertyItemIDSet - }{ - a: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *PropertyItemIDSet - b *PropertyItemIDSet - }{ - a: &PropertyItemIDSet{}, - b: &PropertyItemIDSet{}, - }, - expected: &PropertyItemIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *PropertyItemIDSet - b *PropertyItemIDSet - }{ - a: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &PropertyItemIDSet{}, - }, - expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *PropertyItemIDSet - b *PropertyItemIDSet - }{ - a: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []PropertyItemID{MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &PropertyItemIDSet{ - m: map[PropertyItemID]struct{}{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []PropertyItemID{ - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t1"), - MustPropertyItemID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/property_schema.go b/pkg/id/property_schema.go index 812c95145..8ba76ff2f 100644 --- a/pkg/id/property_schema.go +++ b/pkg/id/property_schema.go @@ -5,9 +5,8 @@ import ( "strings" ) -var schemaIDRe = regexp.MustCompile("^[a-zA-Z0-9][a-zA-Z0-9_-]*$|^@$") +var propertySchemaIDRe = regexp.MustCompile("^[a-zA-Z0-9][a-zA-Z0-9_-]*$|^@$") -// PropertySchemaID is an ID for PropertySchema. type PropertySchemaID struct { plugin PluginID id string @@ -15,7 +14,7 @@ type PropertySchemaID struct { // NewPropertySchemaID generates a new PropertySchemaID from a plugin ID and name. func NewPropertySchemaID(p PluginID, name string) PropertySchemaID { - if p.IsNil() || !schemaIDRe.MatchString(name) { + if p.IsNil() || !propertySchemaIDRe.MatchString(name) { return PropertySchemaID{} } return PropertySchemaID{plugin: p.Clone(), id: name} @@ -24,7 +23,7 @@ func NewPropertySchemaID(p PluginID, name string) PropertySchemaID { // PropertySchemaIDFrom generates a new PropertySchemaID from a string. func PropertySchemaIDFrom(id string) (PropertySchemaID, error) { ids := strings.SplitN(id, "/", 2) - if len(ids) < 2 || !schemaIDRe.MatchString(ids[len(ids)-1]) { + if len(ids) < 2 || !propertySchemaIDRe.MatchString(ids[len(ids)-1]) { return PropertySchemaID{}, ErrInvalidID } pid, err := PluginIDFrom(ids[0]) diff --git a/pkg/id/property_schema_field.go b/pkg/id/property_schema_field.go deleted file mode 100644 index b8b9f6d2c..000000000 --- a/pkg/id/property_schema_field.go +++ /dev/null @@ -1,39 +0,0 @@ -package id - -type PropertySchemaFieldID string - -func PropertySchemaFieldIDFrom(str *string) *PropertySchemaFieldID { - if str == nil || *str == "" { - return nil - } - id := PropertySchemaFieldID(*str) - return &id -} - -func (id PropertySchemaFieldID) Ref() *PropertySchemaFieldID { - if id == "" { - return nil - } - id2 := id - return &id2 -} - -func (id *PropertySchemaFieldID) CopyRef() *PropertySchemaFieldID { - if id == nil || *id == "" { - return nil - } - id2 := *id - return &id2 -} - -func (id PropertySchemaFieldID) String() string { - return string(id) -} - -func (id *PropertySchemaFieldID) StringRef() *string { - if id == nil || *id == "" { - return nil - } - str := string(*id) - return &str -} diff --git a/pkg/id/property_schema_field_test.go b/pkg/id/property_schema_field_test.go deleted file mode 100644 index ab3320f56..000000000 --- a/pkg/id/property_schema_field_test.go +++ /dev/null @@ -1,64 +0,0 @@ -package id - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestPropertySchemaFieldIDFrom(t *testing.T) { - input1 := "testStringId" - expected1 := PropertySchemaFieldID(input1) - - tests := []struct { - name string - input *string - expected *PropertySchemaFieldID - }{ - { - name: "success:string input", - input: &input1, - expected: &expected1, - }, - { - name: "fail:nil pointer", - input: nil, - expected: nil, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - result := PropertySchemaFieldIDFrom(tc.input) - assert.Equal(t, tc.expected, result) - }) - } -} - -func TestPropertySchemaFieldID_Ref(t *testing.T) { - propertySchemaFieldID := PropertySchemaFieldID("test") - - assert.Equal(t, &propertySchemaFieldID, propertySchemaFieldID.Ref()) -} - -func TestPropertySchemaFieldID_CopyRef(t *testing.T) { - propertySchemaFieldID := PropertySchemaFieldID("test") - - assert.Equal(t, propertySchemaFieldID, *propertySchemaFieldID.CopyRef()) - - assert.False(t, propertySchemaFieldID.Ref() == propertySchemaFieldID.CopyRef()) -} - -func TestPropertySchemaFieldID_String(t *testing.T) { - propertySchemaFieldID := PropertySchemaFieldID("test") - - assert.Equal(t, "test", propertySchemaFieldID.String()) -} - -func TestPropertySchemaFieldID_StringRef(t *testing.T) { - propertySchemaFieldID := PropertySchemaFieldID("test") - - assert.Equal(t, "test", *propertySchemaFieldID.StringRef()) -} diff --git a/pkg/id/property_schema_group.go b/pkg/id/property_schema_group.go deleted file mode 100644 index cfba5c321..000000000 --- a/pkg/id/property_schema_group.go +++ /dev/null @@ -1,39 +0,0 @@ -package id - -type PropertySchemaGroupID string - -func PropertySchemaGroupIDFrom(str *string) *PropertySchemaGroupID { - if str == nil || *str == "" { - return nil - } - id := PropertySchemaGroupID(*str) - return &id -} - -func (id PropertySchemaGroupID) Ref() *PropertySchemaGroupID { - if id == "" { - return nil - } - id2 := id - return &id2 -} - -func (id *PropertySchemaGroupID) CopyRef() *PropertySchemaGroupID { - if id == nil || *id == "" { - return nil - } - id2 := *id - return &id2 -} - -func (id PropertySchemaGroupID) String() string { - return string(id) -} - -func (id *PropertySchemaGroupID) StringRef() *string { - if id == nil || *id == "" { - return nil - } - str := string(*id) - return &str -} diff --git a/pkg/id/property_schema_group_test.go b/pkg/id/property_schema_group_test.go deleted file mode 100644 index 44c488178..000000000 --- a/pkg/id/property_schema_group_test.go +++ /dev/null @@ -1,64 +0,0 @@ -package id - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestPropertySchemaGroupIDFrom(t *testing.T) { - input1 := "testStringId" - expected1 := PropertySchemaGroupID(input1) - - tests := []struct { - name string - input *string - expected *PropertySchemaGroupID - }{ - { - name: "success:string input", - input: &input1, - expected: &expected1, - }, - { - name: "fail:nil pointer", - input: nil, - expected: nil, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - result := PropertySchemaGroupIDFrom(tc.input) - assert.Equal(t, tc.expected, result) - }) - } -} - -func TestPropertySchemaGroupID_Ref(t *testing.T) { - PropertySchemaGroupID := PropertySchemaGroupID("test") - - assert.Equal(t, &PropertySchemaGroupID, PropertySchemaGroupID.Ref()) -} - -func TestPropertySchemaGroupID_CopyRef(t *testing.T) { - PropertySchemaGroupID := PropertySchemaGroupID("test") - - assert.Equal(t, PropertySchemaGroupID, *PropertySchemaGroupID.CopyRef()) - - assert.False(t, PropertySchemaGroupID.Ref() == PropertySchemaGroupID.CopyRef()) -} - -func TestPropertySchemaGroupID_String(t *testing.T) { - PropertySchemaGroupID := PropertySchemaGroupID("test") - - assert.Equal(t, "test", PropertySchemaGroupID.String()) -} - -func TestPropertySchemaGroupID_StringRef(t *testing.T) { - PropertySchemaGroupID := PropertySchemaGroupID("test") - - assert.Equal(t, "test", *PropertySchemaGroupID.StringRef()) -} diff --git a/pkg/id/scene_gen.go b/pkg/id/scene_gen.go deleted file mode 100644 index cd2c6d0bd..000000000 --- a/pkg/id/scene_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// SceneID is an ID for Scene. -type SceneID ID - -// NewSceneID generates a new SceneId. -func NewSceneID() SceneID { - return SceneID(New()) -} - -// SceneIDFrom generates a new SceneID from a string. -func SceneIDFrom(i string) (nid SceneID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = SceneID(did) - return -} - -// MustSceneID generates a new SceneID from a string, but panics if the string cannot be parsed. -func MustSceneID(i string) SceneID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return SceneID(did) -} - -// SceneIDFromRef generates a new SceneID from a string ref. -func SceneIDFromRef(i *string) *SceneID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := SceneID(*did) - return &nid -} - -// SceneIDFromRefID generates a new SceneID from a ref of a generic ID. -func SceneIDFromRefID(i *ID) *SceneID { - if i == nil || i.IsNil() { - return nil - } - nid := SceneID(*i) - return &nid -} - -// ID returns a domain ID. -func (d SceneID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d SceneID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d SceneID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d SceneID) GoString() string { - return "SceneID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d SceneID) Ref() *SceneID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d SceneID) Contains(ids []SceneID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *SceneID) CopyRef() *SceneID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *SceneID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *SceneID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *SceneID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *SceneID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = SceneIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *SceneID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *SceneID) UnmarshalText(text []byte) (err error) { - *d, err = SceneIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d SceneID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *SceneID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// SceneIDsToStrings converts IDs into a string slice. -func SceneIDsToStrings(ids []SceneID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// SceneIDsFrom converts a string slice into a ID slice. -func SceneIDsFrom(ids []string) ([]SceneID, error) { - dids := make([]SceneID, 0, len(ids)) - for _, i := range ids { - did, err := SceneIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// SceneIDsFromID converts a generic ID slice into a ID slice. -func SceneIDsFromID(ids []ID) []SceneID { - dids := make([]SceneID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, SceneID(i)) - } - return dids -} - -// SceneIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func SceneIDsFromIDRef(ids []*ID) []SceneID { - dids := make([]SceneID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, SceneID(*i)) - } - } - return dids -} - -// SceneIDsToID converts a ID slice into a generic ID slice. -func SceneIDsToID(ids []SceneID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// SceneIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func SceneIDsToIDRef(ids []*SceneID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// SceneIDSet represents a set of SceneIDs -type SceneIDSet struct { - m map[SceneID]struct{} - s []SceneID -} - -// NewSceneIDSet creates a new SceneIDSet -func NewSceneIDSet() *SceneIDSet { - return &SceneIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *SceneIDSet) Add(p ...SceneID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[SceneID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []SceneID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *SceneIDSet) AddRef(p *SceneID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *SceneIDSet) Has(p SceneID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *SceneIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *SceneIDSet) All() []SceneID { - if s == nil { - return nil - } - return append([]SceneID{}, s.s...) -} - -// Clone returns a cloned set -func (s *SceneIDSet) Clone() *SceneIDSet { - if s == nil { - return NewSceneIDSet() - } - s2 := NewSceneIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *SceneIDSet) Merge(s2 *SceneIDSet) *SceneIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/scene_gen_test.go b/pkg/id/scene_gen_test.go deleted file mode 100644 index 5a8e902c2..000000000 --- a/pkg/id/scene_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewSceneID(t *testing.T) { - id := NewSceneID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestSceneIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result SceneID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result SceneID - err error - }{ - result: SceneID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result SceneID - err error - }{ - result: SceneID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result SceneID - err error - }{ - result: SceneID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := SceneIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustSceneID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected SceneID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: SceneID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustSceneID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestSceneIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *SceneID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &SceneID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := SceneIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestSceneIDFromRefID(t *testing.T) { - id := New() - id2 := SceneIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, SceneIDFromRefID(nil)) - assert.Nil(t, SceneIDFromRefID(&ID{})) -} - -func TestSceneID_ID(t *testing.T) { - id := New() - id2 := SceneIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestSceneID_String(t *testing.T) { - id := New() - id2 := SceneIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", SceneID{}.String()) -} - -func TestSceneID_RefString(t *testing.T) { - id := NewSceneID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, SceneID{}.RefString()) -} - -func TestSceneID_GoString(t *testing.T) { - id := New() - id2 := SceneIDFromRefID(&id) - assert.Equal(t, "SceneID("+id.String()+")", id2.GoString()) - assert.Equal(t, "SceneID()", SceneID{}.GoString()) -} - -func TestSceneID_Ref(t *testing.T) { - id := NewSceneID() - assert.Equal(t, SceneID(id), *id.Ref()) - assert.Nil(t, (&SceneID{}).Ref()) -} - -func TestSceneID_Contains(t *testing.T) { - id := NewSceneID() - id2 := NewSceneID() - assert.True(t, id.Contains([]SceneID{id, id2})) - assert.False(t, SceneID{}.Contains([]SceneID{id, id2, {}})) - assert.False(t, id.Contains([]SceneID{id2})) -} - -func TestSceneID_CopyRef(t *testing.T) { - id := NewSceneID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*SceneID)(nil).CopyRef()) -} - -func TestSceneID_IDRef(t *testing.T) { - id := New() - id2 := SceneIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&SceneID{}).IDRef()) - assert.Nil(t, (*SceneID)(nil).IDRef()) -} - -func TestSceneID_StringRef(t *testing.T) { - id := NewSceneID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&SceneID{}).StringRef()) - assert.Nil(t, (*SceneID)(nil).StringRef()) -} - -func TestSceneID_MarhsalJSON(t *testing.T) { - id := NewSceneID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&SceneID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*SceneID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestSceneID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustSceneID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &SceneID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestSceneID_MarshalText(t *testing.T) { - id := New() - res, err := SceneIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&SceneID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*SceneID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestSceneID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &SceneID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestSceneID_IsNil(t *testing.T) { - assert.True(t, SceneID{}.IsNil()) - assert.False(t, NewSceneID().IsNil()) -} - -func TestSceneID_IsNilRef(t *testing.T) { - assert.True(t, SceneID{}.Ref().IsNilRef()) - assert.True(t, (*SceneID)(nil).IsNilRef()) - assert.False(t, NewSceneID().Ref().IsNilRef()) -} - -func TestSceneIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []SceneID - expected []string - }{ - { - name: "Empty slice", - input: make([]SceneID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, SceneIDsToStrings(tt.input)) - }) - } -} - -func TestSceneIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []SceneID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []SceneID - err error - }{ - res: make([]SceneID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []SceneID - err error - }{ - res: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []SceneID - err error - }{ - res: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []SceneID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := SceneIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestSceneIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []SceneID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]SceneID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := SceneIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestSceneIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []SceneID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]SceneID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []SceneID{MustSceneID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []SceneID{ - MustSceneID(id1.String()), - MustSceneID(id2.String()), - MustSceneID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := SceneIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestSceneIDsToID(t *testing.T) { - tests := []struct { - name string - input []SceneID - expected []ID - }{ - { - name: "Empty slice", - input: make([]SceneID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := SceneIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestSceneIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustSceneID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustSceneID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustSceneID(id3.String()) - - tests := []struct { - name string - input []*SceneID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*SceneID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*SceneID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*SceneID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := SceneIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewSceneIDSet(t *testing.T) { - SceneIdSet := NewSceneIDSet() - assert.NotNil(t, SceneIdSet) - assert.Empty(t, SceneIdSet.m) - assert.Empty(t, SceneIdSet.s) -} - -func TestSceneIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []SceneID - expected *SceneIDSet - }{ - { - name: "Empty slice", - input: make([]SceneID, 0), - expected: &SceneIDSet{ - m: map[SceneID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &SceneIDSet{ - m: map[SceneID]struct{}{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &SceneIDSet{ - m: map[SceneID]struct{}{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewSceneIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestSceneIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *SceneID - expected *SceneIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &SceneIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustSceneID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewSceneIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestSceneIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *SceneIDSet - input SceneID - expected bool - }{ - { - name: "Empty Set", - target: &SceneIDSet{}, - input: MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestSceneIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *SceneIDSet - expected *SceneIDSet - }{ - { - name: "Empty set", - input: &SceneIDSet{}, - expected: &SceneIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &SceneIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestSceneIDSet_All(t *testing.T) { - tests := []struct { - name string - input *SceneIDSet - expected []SceneID - }{ - { - name: "Empty", - input: &SceneIDSet{ - m: map[SceneID]struct{}{}, - s: nil, - }, - expected: make([]SceneID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &SceneIDSet{ - m: map[SceneID]struct{}{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestSceneIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *SceneIDSet - expected *SceneIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewSceneIDSet(), - }, - { - name: "Empty set", - input: NewSceneIDSet(), - expected: NewSceneIDSet(), - }, - { - name: "1 element", - input: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &SceneIDSet{ - m: map[SceneID]struct{}{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &SceneIDSet{ - m: map[SceneID]struct{}{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestSceneIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *SceneIDSet - b *SceneIDSet - } - expected *SceneIDSet - }{ - { - name: "Nil Set", - input: struct { - a *SceneIDSet - b *SceneIDSet - }{ - a: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *SceneIDSet - b *SceneIDSet - }{ - a: &SceneIDSet{}, - b: &SceneIDSet{}, - }, - expected: &SceneIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *SceneIDSet - b *SceneIDSet - }{ - a: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &SceneIDSet{}, - }, - expected: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *SceneIDSet - b *SceneIDSet - }{ - a: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &SceneIDSet{ - m: map[SceneID]struct{}{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []SceneID{MustSceneID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &SceneIDSet{ - m: map[SceneID]struct{}{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []SceneID{ - MustSceneID("01f3zhcaq35403zdjnd6dcm0t1"), - MustSceneID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/tag_gen.go b/pkg/id/tag_gen.go deleted file mode 100644 index 27e34057d..000000000 --- a/pkg/id/tag_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// TagID is an ID for Tag. -type TagID ID - -// NewTagID generates a new TagId. -func NewTagID() TagID { - return TagID(New()) -} - -// TagIDFrom generates a new TagID from a string. -func TagIDFrom(i string) (nid TagID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = TagID(did) - return -} - -// MustTagID generates a new TagID from a string, but panics if the string cannot be parsed. -func MustTagID(i string) TagID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return TagID(did) -} - -// TagIDFromRef generates a new TagID from a string ref. -func TagIDFromRef(i *string) *TagID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := TagID(*did) - return &nid -} - -// TagIDFromRefID generates a new TagID from a ref of a generic ID. -func TagIDFromRefID(i *ID) *TagID { - if i == nil || i.IsNil() { - return nil - } - nid := TagID(*i) - return &nid -} - -// ID returns a domain ID. -func (d TagID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d TagID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d TagID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d TagID) GoString() string { - return "TagID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d TagID) Ref() *TagID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d TagID) Contains(ids []TagID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *TagID) CopyRef() *TagID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *TagID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *TagID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *TagID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *TagID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = TagIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *TagID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *TagID) UnmarshalText(text []byte) (err error) { - *d, err = TagIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d TagID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *TagID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// TagIDsToStrings converts IDs into a string slice. -func TagIDsToStrings(ids []TagID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// TagIDsFrom converts a string slice into a ID slice. -func TagIDsFrom(ids []string) ([]TagID, error) { - dids := make([]TagID, 0, len(ids)) - for _, i := range ids { - did, err := TagIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// TagIDsFromID converts a generic ID slice into a ID slice. -func TagIDsFromID(ids []ID) []TagID { - dids := make([]TagID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, TagID(i)) - } - return dids -} - -// TagIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func TagIDsFromIDRef(ids []*ID) []TagID { - dids := make([]TagID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, TagID(*i)) - } - } - return dids -} - -// TagIDsToID converts a ID slice into a generic ID slice. -func TagIDsToID(ids []TagID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// TagIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func TagIDsToIDRef(ids []*TagID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// TagIDSet represents a set of TagIDs -type TagIDSet struct { - m map[TagID]struct{} - s []TagID -} - -// NewTagIDSet creates a new TagIDSet -func NewTagIDSet() *TagIDSet { - return &TagIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *TagIDSet) Add(p ...TagID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[TagID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []TagID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *TagIDSet) AddRef(p *TagID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *TagIDSet) Has(p TagID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *TagIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *TagIDSet) All() []TagID { - if s == nil { - return nil - } - return append([]TagID{}, s.s...) -} - -// Clone returns a cloned set -func (s *TagIDSet) Clone() *TagIDSet { - if s == nil { - return NewTagIDSet() - } - s2 := NewTagIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *TagIDSet) Merge(s2 *TagIDSet) *TagIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/tag_gen_test.go b/pkg/id/tag_gen_test.go deleted file mode 100644 index f2181eee0..000000000 --- a/pkg/id/tag_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewTagID(t *testing.T) { - id := NewTagID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestTagIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result TagID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result TagID - err error - }{ - result: TagID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result TagID - err error - }{ - result: TagID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result TagID - err error - }{ - result: TagID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := TagIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustTagID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected TagID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: TagID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustTagID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestTagIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *TagID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &TagID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := TagIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestTagIDFromRefID(t *testing.T) { - id := New() - id2 := TagIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, TagIDFromRefID(nil)) - assert.Nil(t, TagIDFromRefID(&ID{})) -} - -func TestTagID_ID(t *testing.T) { - id := New() - id2 := TagIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestTagID_String(t *testing.T) { - id := New() - id2 := TagIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", TagID{}.String()) -} - -func TestTagID_RefString(t *testing.T) { - id := NewTagID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, TagID{}.RefString()) -} - -func TestTagID_GoString(t *testing.T) { - id := New() - id2 := TagIDFromRefID(&id) - assert.Equal(t, "TagID("+id.String()+")", id2.GoString()) - assert.Equal(t, "TagID()", TagID{}.GoString()) -} - -func TestTagID_Ref(t *testing.T) { - id := NewTagID() - assert.Equal(t, TagID(id), *id.Ref()) - assert.Nil(t, (&TagID{}).Ref()) -} - -func TestTagID_Contains(t *testing.T) { - id := NewTagID() - id2 := NewTagID() - assert.True(t, id.Contains([]TagID{id, id2})) - assert.False(t, TagID{}.Contains([]TagID{id, id2, {}})) - assert.False(t, id.Contains([]TagID{id2})) -} - -func TestTagID_CopyRef(t *testing.T) { - id := NewTagID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*TagID)(nil).CopyRef()) -} - -func TestTagID_IDRef(t *testing.T) { - id := New() - id2 := TagIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&TagID{}).IDRef()) - assert.Nil(t, (*TagID)(nil).IDRef()) -} - -func TestTagID_StringRef(t *testing.T) { - id := NewTagID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&TagID{}).StringRef()) - assert.Nil(t, (*TagID)(nil).StringRef()) -} - -func TestTagID_MarhsalJSON(t *testing.T) { - id := NewTagID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&TagID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*TagID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestTagID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustTagID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &TagID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestTagID_MarshalText(t *testing.T) { - id := New() - res, err := TagIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&TagID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*TagID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestTagID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &TagID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestTagID_IsNil(t *testing.T) { - assert.True(t, TagID{}.IsNil()) - assert.False(t, NewTagID().IsNil()) -} - -func TestTagID_IsNilRef(t *testing.T) { - assert.True(t, TagID{}.Ref().IsNilRef()) - assert.True(t, (*TagID)(nil).IsNilRef()) - assert.False(t, NewTagID().Ref().IsNilRef()) -} - -func TestTagIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []TagID - expected []string - }{ - { - name: "Empty slice", - input: make([]TagID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, TagIDsToStrings(tt.input)) - }) - } -} - -func TestTagIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []TagID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []TagID - err error - }{ - res: make([]TagID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []TagID - err error - }{ - res: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []TagID - err error - }{ - res: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []TagID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := TagIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestTagIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []TagID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]TagID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := TagIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestTagIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []TagID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]TagID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []TagID{MustTagID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []TagID{ - MustTagID(id1.String()), - MustTagID(id2.String()), - MustTagID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := TagIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestTagIDsToID(t *testing.T) { - tests := []struct { - name string - input []TagID - expected []ID - }{ - { - name: "Empty slice", - input: make([]TagID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := TagIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestTagIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustTagID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustTagID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustTagID(id3.String()) - - tests := []struct { - name string - input []*TagID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*TagID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*TagID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*TagID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := TagIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewTagIDSet(t *testing.T) { - TagIdSet := NewTagIDSet() - assert.NotNil(t, TagIdSet) - assert.Empty(t, TagIdSet.m) - assert.Empty(t, TagIdSet.s) -} - -func TestTagIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []TagID - expected *TagIDSet - }{ - { - name: "Empty slice", - input: make([]TagID, 0), - expected: &TagIDSet{ - m: map[TagID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &TagIDSet{ - m: map[TagID]struct{}{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &TagIDSet{ - m: map[TagID]struct{}{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewTagIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestTagIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *TagID - expected *TagIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &TagIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustTagID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewTagIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestTagIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *TagIDSet - input TagID - expected bool - }{ - { - name: "Empty Set", - target: &TagIDSet{}, - input: MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestTagIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *TagIDSet - expected *TagIDSet - }{ - { - name: "Empty set", - input: &TagIDSet{}, - expected: &TagIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &TagIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestTagIDSet_All(t *testing.T) { - tests := []struct { - name string - input *TagIDSet - expected []TagID - }{ - { - name: "Empty", - input: &TagIDSet{ - m: map[TagID]struct{}{}, - s: nil, - }, - expected: make([]TagID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &TagIDSet{ - m: map[TagID]struct{}{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestTagIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *TagIDSet - expected *TagIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewTagIDSet(), - }, - { - name: "Empty set", - input: NewTagIDSet(), - expected: NewTagIDSet(), - }, - { - name: "1 element", - input: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &TagIDSet{ - m: map[TagID]struct{}{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &TagIDSet{ - m: map[TagID]struct{}{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestTagIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *TagIDSet - b *TagIDSet - } - expected *TagIDSet - }{ - { - name: "Nil Set", - input: struct { - a *TagIDSet - b *TagIDSet - }{ - a: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *TagIDSet - b *TagIDSet - }{ - a: &TagIDSet{}, - b: &TagIDSet{}, - }, - expected: &TagIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *TagIDSet - b *TagIDSet - }{ - a: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &TagIDSet{}, - }, - expected: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *TagIDSet - b *TagIDSet - }{ - a: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &TagIDSet{ - m: map[TagID]struct{}{MustTagID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []TagID{MustTagID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &TagIDSet{ - m: map[TagID]struct{}{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []TagID{ - MustTagID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTagID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/team_gen.go b/pkg/id/team_gen.go deleted file mode 100644 index c4d0cf4c8..000000000 --- a/pkg/id/team_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// TeamID is an ID for Team. -type TeamID ID - -// NewTeamID generates a new TeamId. -func NewTeamID() TeamID { - return TeamID(New()) -} - -// TeamIDFrom generates a new TeamID from a string. -func TeamIDFrom(i string) (nid TeamID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = TeamID(did) - return -} - -// MustTeamID generates a new TeamID from a string, but panics if the string cannot be parsed. -func MustTeamID(i string) TeamID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return TeamID(did) -} - -// TeamIDFromRef generates a new TeamID from a string ref. -func TeamIDFromRef(i *string) *TeamID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := TeamID(*did) - return &nid -} - -// TeamIDFromRefID generates a new TeamID from a ref of a generic ID. -func TeamIDFromRefID(i *ID) *TeamID { - if i == nil || i.IsNil() { - return nil - } - nid := TeamID(*i) - return &nid -} - -// ID returns a domain ID. -func (d TeamID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d TeamID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d TeamID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d TeamID) GoString() string { - return "TeamID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d TeamID) Ref() *TeamID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d TeamID) Contains(ids []TeamID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *TeamID) CopyRef() *TeamID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *TeamID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *TeamID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *TeamID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *TeamID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = TeamIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *TeamID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *TeamID) UnmarshalText(text []byte) (err error) { - *d, err = TeamIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d TeamID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *TeamID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// TeamIDsToStrings converts IDs into a string slice. -func TeamIDsToStrings(ids []TeamID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// TeamIDsFrom converts a string slice into a ID slice. -func TeamIDsFrom(ids []string) ([]TeamID, error) { - dids := make([]TeamID, 0, len(ids)) - for _, i := range ids { - did, err := TeamIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// TeamIDsFromID converts a generic ID slice into a ID slice. -func TeamIDsFromID(ids []ID) []TeamID { - dids := make([]TeamID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, TeamID(i)) - } - return dids -} - -// TeamIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func TeamIDsFromIDRef(ids []*ID) []TeamID { - dids := make([]TeamID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, TeamID(*i)) - } - } - return dids -} - -// TeamIDsToID converts a ID slice into a generic ID slice. -func TeamIDsToID(ids []TeamID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// TeamIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func TeamIDsToIDRef(ids []*TeamID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// TeamIDSet represents a set of TeamIDs -type TeamIDSet struct { - m map[TeamID]struct{} - s []TeamID -} - -// NewTeamIDSet creates a new TeamIDSet -func NewTeamIDSet() *TeamIDSet { - return &TeamIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *TeamIDSet) Add(p ...TeamID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[TeamID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []TeamID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *TeamIDSet) AddRef(p *TeamID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *TeamIDSet) Has(p TeamID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *TeamIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *TeamIDSet) All() []TeamID { - if s == nil { - return nil - } - return append([]TeamID{}, s.s...) -} - -// Clone returns a cloned set -func (s *TeamIDSet) Clone() *TeamIDSet { - if s == nil { - return NewTeamIDSet() - } - s2 := NewTeamIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *TeamIDSet) Merge(s2 *TeamIDSet) *TeamIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/team_gen_test.go b/pkg/id/team_gen_test.go deleted file mode 100644 index 660d521b3..000000000 --- a/pkg/id/team_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewTeamID(t *testing.T) { - id := NewTeamID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestTeamIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result TeamID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result TeamID - err error - }{ - result: TeamID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result TeamID - err error - }{ - result: TeamID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result TeamID - err error - }{ - result: TeamID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := TeamIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustTeamID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected TeamID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: TeamID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustTeamID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestTeamIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *TeamID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &TeamID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := TeamIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestTeamIDFromRefID(t *testing.T) { - id := New() - id2 := TeamIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, TeamIDFromRefID(nil)) - assert.Nil(t, TeamIDFromRefID(&ID{})) -} - -func TestTeamID_ID(t *testing.T) { - id := New() - id2 := TeamIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestTeamID_String(t *testing.T) { - id := New() - id2 := TeamIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", TeamID{}.String()) -} - -func TestTeamID_RefString(t *testing.T) { - id := NewTeamID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, TeamID{}.RefString()) -} - -func TestTeamID_GoString(t *testing.T) { - id := New() - id2 := TeamIDFromRefID(&id) - assert.Equal(t, "TeamID("+id.String()+")", id2.GoString()) - assert.Equal(t, "TeamID()", TeamID{}.GoString()) -} - -func TestTeamID_Ref(t *testing.T) { - id := NewTeamID() - assert.Equal(t, TeamID(id), *id.Ref()) - assert.Nil(t, (&TeamID{}).Ref()) -} - -func TestTeamID_Contains(t *testing.T) { - id := NewTeamID() - id2 := NewTeamID() - assert.True(t, id.Contains([]TeamID{id, id2})) - assert.False(t, TeamID{}.Contains([]TeamID{id, id2, {}})) - assert.False(t, id.Contains([]TeamID{id2})) -} - -func TestTeamID_CopyRef(t *testing.T) { - id := NewTeamID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*TeamID)(nil).CopyRef()) -} - -func TestTeamID_IDRef(t *testing.T) { - id := New() - id2 := TeamIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&TeamID{}).IDRef()) - assert.Nil(t, (*TeamID)(nil).IDRef()) -} - -func TestTeamID_StringRef(t *testing.T) { - id := NewTeamID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&TeamID{}).StringRef()) - assert.Nil(t, (*TeamID)(nil).StringRef()) -} - -func TestTeamID_MarhsalJSON(t *testing.T) { - id := NewTeamID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&TeamID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*TeamID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestTeamID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustTeamID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &TeamID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestTeamID_MarshalText(t *testing.T) { - id := New() - res, err := TeamIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&TeamID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*TeamID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestTeamID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &TeamID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestTeamID_IsNil(t *testing.T) { - assert.True(t, TeamID{}.IsNil()) - assert.False(t, NewTeamID().IsNil()) -} - -func TestTeamID_IsNilRef(t *testing.T) { - assert.True(t, TeamID{}.Ref().IsNilRef()) - assert.True(t, (*TeamID)(nil).IsNilRef()) - assert.False(t, NewTeamID().Ref().IsNilRef()) -} - -func TestTeamIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []TeamID - expected []string - }{ - { - name: "Empty slice", - input: make([]TeamID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, TeamIDsToStrings(tt.input)) - }) - } -} - -func TestTeamIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []TeamID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []TeamID - err error - }{ - res: make([]TeamID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []TeamID - err error - }{ - res: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []TeamID - err error - }{ - res: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []TeamID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := TeamIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestTeamIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []TeamID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]TeamID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := TeamIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestTeamIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []TeamID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]TeamID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []TeamID{MustTeamID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []TeamID{ - MustTeamID(id1.String()), - MustTeamID(id2.String()), - MustTeamID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := TeamIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestTeamIDsToID(t *testing.T) { - tests := []struct { - name string - input []TeamID - expected []ID - }{ - { - name: "Empty slice", - input: make([]TeamID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := TeamIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestTeamIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustTeamID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustTeamID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustTeamID(id3.String()) - - tests := []struct { - name string - input []*TeamID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*TeamID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*TeamID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*TeamID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := TeamIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewTeamIDSet(t *testing.T) { - TeamIdSet := NewTeamIDSet() - assert.NotNil(t, TeamIdSet) - assert.Empty(t, TeamIdSet.m) - assert.Empty(t, TeamIdSet.s) -} - -func TestTeamIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []TeamID - expected *TeamIDSet - }{ - { - name: "Empty slice", - input: make([]TeamID, 0), - expected: &TeamIDSet{ - m: map[TeamID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &TeamIDSet{ - m: map[TeamID]struct{}{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &TeamIDSet{ - m: map[TeamID]struct{}{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewTeamIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestTeamIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *TeamID - expected *TeamIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &TeamIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustTeamID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewTeamIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestTeamIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *TeamIDSet - input TeamID - expected bool - }{ - { - name: "Empty Set", - target: &TeamIDSet{}, - input: MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestTeamIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *TeamIDSet - expected *TeamIDSet - }{ - { - name: "Empty set", - input: &TeamIDSet{}, - expected: &TeamIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &TeamIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestTeamIDSet_All(t *testing.T) { - tests := []struct { - name string - input *TeamIDSet - expected []TeamID - }{ - { - name: "Empty", - input: &TeamIDSet{ - m: map[TeamID]struct{}{}, - s: nil, - }, - expected: make([]TeamID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &TeamIDSet{ - m: map[TeamID]struct{}{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestTeamIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *TeamIDSet - expected *TeamIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewTeamIDSet(), - }, - { - name: "Empty set", - input: NewTeamIDSet(), - expected: NewTeamIDSet(), - }, - { - name: "1 element", - input: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &TeamIDSet{ - m: map[TeamID]struct{}{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &TeamIDSet{ - m: map[TeamID]struct{}{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestTeamIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *TeamIDSet - b *TeamIDSet - } - expected *TeamIDSet - }{ - { - name: "Nil Set", - input: struct { - a *TeamIDSet - b *TeamIDSet - }{ - a: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *TeamIDSet - b *TeamIDSet - }{ - a: &TeamIDSet{}, - b: &TeamIDSet{}, - }, - expected: &TeamIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *TeamIDSet - b *TeamIDSet - }{ - a: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &TeamIDSet{}, - }, - expected: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *TeamIDSet - b *TeamIDSet - }{ - a: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &TeamIDSet{ - m: map[TeamID]struct{}{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []TeamID{MustTeamID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &TeamIDSet{ - m: map[TeamID]struct{}{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []TeamID{ - MustTeamID("01f3zhcaq35403zdjnd6dcm0t1"), - MustTeamID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/user_gen.go b/pkg/id/user_gen.go deleted file mode 100644 index bb2eb0b3d..000000000 --- a/pkg/id/user_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// UserID is an ID for User. -type UserID ID - -// NewUserID generates a new UserId. -func NewUserID() UserID { - return UserID(New()) -} - -// UserIDFrom generates a new UserID from a string. -func UserIDFrom(i string) (nid UserID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = UserID(did) - return -} - -// MustUserID generates a new UserID from a string, but panics if the string cannot be parsed. -func MustUserID(i string) UserID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return UserID(did) -} - -// UserIDFromRef generates a new UserID from a string ref. -func UserIDFromRef(i *string) *UserID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := UserID(*did) - return &nid -} - -// UserIDFromRefID generates a new UserID from a ref of a generic ID. -func UserIDFromRefID(i *ID) *UserID { - if i == nil || i.IsNil() { - return nil - } - nid := UserID(*i) - return &nid -} - -// ID returns a domain ID. -func (d UserID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d UserID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d UserID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d UserID) GoString() string { - return "UserID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d UserID) Ref() *UserID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d UserID) Contains(ids []UserID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *UserID) CopyRef() *UserID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *UserID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *UserID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *UserID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *UserID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = UserIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *UserID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *UserID) UnmarshalText(text []byte) (err error) { - *d, err = UserIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d UserID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *UserID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// UserIDsToStrings converts IDs into a string slice. -func UserIDsToStrings(ids []UserID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// UserIDsFrom converts a string slice into a ID slice. -func UserIDsFrom(ids []string) ([]UserID, error) { - dids := make([]UserID, 0, len(ids)) - for _, i := range ids { - did, err := UserIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// UserIDsFromID converts a generic ID slice into a ID slice. -func UserIDsFromID(ids []ID) []UserID { - dids := make([]UserID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, UserID(i)) - } - return dids -} - -// UserIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func UserIDsFromIDRef(ids []*ID) []UserID { - dids := make([]UserID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, UserID(*i)) - } - } - return dids -} - -// UserIDsToID converts a ID slice into a generic ID slice. -func UserIDsToID(ids []UserID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// UserIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func UserIDsToIDRef(ids []*UserID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// UserIDSet represents a set of UserIDs -type UserIDSet struct { - m map[UserID]struct{} - s []UserID -} - -// NewUserIDSet creates a new UserIDSet -func NewUserIDSet() *UserIDSet { - return &UserIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *UserIDSet) Add(p ...UserID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[UserID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []UserID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *UserIDSet) AddRef(p *UserID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *UserIDSet) Has(p UserID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *UserIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *UserIDSet) All() []UserID { - if s == nil { - return nil - } - return append([]UserID{}, s.s...) -} - -// Clone returns a cloned set -func (s *UserIDSet) Clone() *UserIDSet { - if s == nil { - return NewUserIDSet() - } - s2 := NewUserIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *UserIDSet) Merge(s2 *UserIDSet) *UserIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/user_gen_test.go b/pkg/id/user_gen_test.go deleted file mode 100644 index a4d3a212f..000000000 --- a/pkg/id/user_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewUserID(t *testing.T) { - id := NewUserID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestUserIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result UserID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result UserID - err error - }{ - result: UserID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result UserID - err error - }{ - result: UserID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result UserID - err error - }{ - result: UserID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := UserIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustUserID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected UserID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: UserID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustUserID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestUserIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *UserID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &UserID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := UserIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestUserIDFromRefID(t *testing.T) { - id := New() - id2 := UserIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, UserIDFromRefID(nil)) - assert.Nil(t, UserIDFromRefID(&ID{})) -} - -func TestUserID_ID(t *testing.T) { - id := New() - id2 := UserIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestUserID_String(t *testing.T) { - id := New() - id2 := UserIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", UserID{}.String()) -} - -func TestUserID_RefString(t *testing.T) { - id := NewUserID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, UserID{}.RefString()) -} - -func TestUserID_GoString(t *testing.T) { - id := New() - id2 := UserIDFromRefID(&id) - assert.Equal(t, "UserID("+id.String()+")", id2.GoString()) - assert.Equal(t, "UserID()", UserID{}.GoString()) -} - -func TestUserID_Ref(t *testing.T) { - id := NewUserID() - assert.Equal(t, UserID(id), *id.Ref()) - assert.Nil(t, (&UserID{}).Ref()) -} - -func TestUserID_Contains(t *testing.T) { - id := NewUserID() - id2 := NewUserID() - assert.True(t, id.Contains([]UserID{id, id2})) - assert.False(t, UserID{}.Contains([]UserID{id, id2, {}})) - assert.False(t, id.Contains([]UserID{id2})) -} - -func TestUserID_CopyRef(t *testing.T) { - id := NewUserID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*UserID)(nil).CopyRef()) -} - -func TestUserID_IDRef(t *testing.T) { - id := New() - id2 := UserIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&UserID{}).IDRef()) - assert.Nil(t, (*UserID)(nil).IDRef()) -} - -func TestUserID_StringRef(t *testing.T) { - id := NewUserID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&UserID{}).StringRef()) - assert.Nil(t, (*UserID)(nil).StringRef()) -} - -func TestUserID_MarhsalJSON(t *testing.T) { - id := NewUserID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&UserID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*UserID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestUserID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustUserID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &UserID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestUserID_MarshalText(t *testing.T) { - id := New() - res, err := UserIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&UserID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*UserID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestUserID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &UserID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestUserID_IsNil(t *testing.T) { - assert.True(t, UserID{}.IsNil()) - assert.False(t, NewUserID().IsNil()) -} - -func TestUserID_IsNilRef(t *testing.T) { - assert.True(t, UserID{}.Ref().IsNilRef()) - assert.True(t, (*UserID)(nil).IsNilRef()) - assert.False(t, NewUserID().Ref().IsNilRef()) -} - -func TestUserIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []UserID - expected []string - }{ - { - name: "Empty slice", - input: make([]UserID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, UserIDsToStrings(tt.input)) - }) - } -} - -func TestUserIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []UserID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []UserID - err error - }{ - res: make([]UserID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []UserID - err error - }{ - res: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []UserID - err error - }{ - res: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []UserID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := UserIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestUserIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []UserID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]UserID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := UserIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestUserIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []UserID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]UserID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []UserID{MustUserID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []UserID{ - MustUserID(id1.String()), - MustUserID(id2.String()), - MustUserID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := UserIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestUserIDsToID(t *testing.T) { - tests := []struct { - name string - input []UserID - expected []ID - }{ - { - name: "Empty slice", - input: make([]UserID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := UserIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestUserIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustUserID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustUserID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustUserID(id3.String()) - - tests := []struct { - name string - input []*UserID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*UserID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*UserID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*UserID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := UserIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewUserIDSet(t *testing.T) { - UserIdSet := NewUserIDSet() - assert.NotNil(t, UserIdSet) - assert.Empty(t, UserIdSet.m) - assert.Empty(t, UserIdSet.s) -} - -func TestUserIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []UserID - expected *UserIDSet - }{ - { - name: "Empty slice", - input: make([]UserID, 0), - expected: &UserIDSet{ - m: map[UserID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &UserIDSet{ - m: map[UserID]struct{}{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &UserIDSet{ - m: map[UserID]struct{}{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewUserIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestUserIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *UserID - expected *UserIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &UserIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustUserID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewUserIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestUserIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *UserIDSet - input UserID - expected bool - }{ - { - name: "Empty Set", - target: &UserIDSet{}, - input: MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestUserIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *UserIDSet - expected *UserIDSet - }{ - { - name: "Empty set", - input: &UserIDSet{}, - expected: &UserIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &UserIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestUserIDSet_All(t *testing.T) { - tests := []struct { - name string - input *UserIDSet - expected []UserID - }{ - { - name: "Empty", - input: &UserIDSet{ - m: map[UserID]struct{}{}, - s: nil, - }, - expected: make([]UserID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &UserIDSet{ - m: map[UserID]struct{}{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestUserIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *UserIDSet - expected *UserIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewUserIDSet(), - }, - { - name: "Empty set", - input: NewUserIDSet(), - expected: NewUserIDSet(), - }, - { - name: "1 element", - input: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &UserIDSet{ - m: map[UserID]struct{}{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &UserIDSet{ - m: map[UserID]struct{}{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestUserIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *UserIDSet - b *UserIDSet - } - expected *UserIDSet - }{ - { - name: "Nil Set", - input: struct { - a *UserIDSet - b *UserIDSet - }{ - a: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *UserIDSet - b *UserIDSet - }{ - a: &UserIDSet{}, - b: &UserIDSet{}, - }, - expected: &UserIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *UserIDSet - b *UserIDSet - }{ - a: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &UserIDSet{}, - }, - expected: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *UserIDSet - b *UserIDSet - }{ - a: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &UserIDSet{ - m: map[UserID]struct{}{MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []UserID{MustUserID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &UserIDSet{ - m: map[UserID]struct{}{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []UserID{ - MustUserID("01f3zhcaq35403zdjnd6dcm0t1"), - MustUserID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/id/widget_gen.go b/pkg/id/widget_gen.go deleted file mode 100644 index 01ed68756..000000000 --- a/pkg/id/widget_gen.go +++ /dev/null @@ -1,314 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import "encoding/json" - -// WidgetID is an ID for Widget. -type WidgetID ID - -// NewWidgetID generates a new WidgetId. -func NewWidgetID() WidgetID { - return WidgetID(New()) -} - -// WidgetIDFrom generates a new WidgetID from a string. -func WidgetIDFrom(i string) (nid WidgetID, err error) { - var did ID - did, err = FromID(i) - if err != nil { - return - } - nid = WidgetID(did) - return -} - -// MustWidgetID generates a new WidgetID from a string, but panics if the string cannot be parsed. -func MustWidgetID(i string) WidgetID { - did, err := FromID(i) - if err != nil { - panic(err) - } - return WidgetID(did) -} - -// WidgetIDFromRef generates a new WidgetID from a string ref. -func WidgetIDFromRef(i *string) *WidgetID { - did := FromIDRef(i) - if did == nil { - return nil - } - nid := WidgetID(*did) - return &nid -} - -// WidgetIDFromRefID generates a new WidgetID from a ref of a generic ID. -func WidgetIDFromRefID(i *ID) *WidgetID { - if i == nil || i.IsNil() { - return nil - } - nid := WidgetID(*i) - return &nid -} - -// ID returns a domain ID. -func (d WidgetID) ID() ID { - return ID(d) -} - -// String returns a string representation. -func (d WidgetID) String() string { - if d.IsNil() { - return "" - } - return ID(d).String() -} - -// StringRef returns a reference of the string representation. -func (d WidgetID) RefString() *string { - if d.IsNil() { - return nil - } - str := d.String() - return &str -} - -// GoString implements fmt.GoStringer interface. -func (d WidgetID) GoString() string { - return "WidgetID(" + d.String() + ")" -} - -// Ref returns a reference. -func (d WidgetID) Ref() *WidgetID { - if d.IsNil() { - return nil - } - d2 := d - return &d2 -} - -// Contains returns whether the id is contained in the slice. -func (d WidgetID) Contains(ids []WidgetID) bool { - if d.IsNil() { - return false - } - for _, i := range ids { - if d.ID().Equal(i.ID()) { - return true - } - } - return false -} - -// CopyRef returns a copy of a reference. -func (d *WidgetID) CopyRef() *WidgetID { - if d.IsNilRef() { - return nil - } - d2 := *d - return &d2 -} - -// IDRef returns a reference of a domain id. -func (d *WidgetID) IDRef() *ID { - if d.IsNilRef() { - return nil - } - id := ID(*d) - return &id -} - -// StringRef returns a reference of a string representation. -func (d *WidgetID) StringRef() *string { - if d.IsNilRef() { - return nil - } - id := ID(*d).String() - return &id -} - -// MarhsalJSON implements json.Marhsaler interface -func (d *WidgetID) MarhsalJSON() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return json.Marshal(d.String()) -} - -// UnmarhsalJSON implements json.Unmarshaler interface -func (d *WidgetID) UnmarhsalJSON(bs []byte) (err error) { - var idstr string - if err = json.Unmarshal(bs, &idstr); err != nil { - return - } - *d, err = WidgetIDFrom(idstr) - return -} - -// MarshalText implements encoding.TextMarshaler interface -func (d *WidgetID) MarshalText() ([]byte, error) { - if d.IsNilRef() { - return nil, nil - } - return []byte(d.String()), nil -} - -// UnmarshalText implements encoding.TextUnmarshaler interface -func (d *WidgetID) UnmarshalText(text []byte) (err error) { - *d, err = WidgetIDFrom(string(text)) - return -} - -// IsNil returns true if a ID is zero-value -func (d WidgetID) IsNil() bool { - return ID(d).IsNil() -} - -// IsNilRef returns true if a ID is nil or zero-value -func (d *WidgetID) IsNilRef() bool { - return d == nil || ID(*d).IsNil() -} - -// WidgetIDsToStrings converts IDs into a string slice. -func WidgetIDsToStrings(ids []WidgetID) []string { - strs := make([]string, 0, len(ids)) - for _, i := range ids { - strs = append(strs, i.String()) - } - return strs -} - -// WidgetIDsFrom converts a string slice into a ID slice. -func WidgetIDsFrom(ids []string) ([]WidgetID, error) { - dids := make([]WidgetID, 0, len(ids)) - for _, i := range ids { - did, err := WidgetIDFrom(i) - if err != nil { - return nil, err - } - dids = append(dids, did) - } - return dids, nil -} - -// WidgetIDsFromID converts a generic ID slice into a ID slice. -func WidgetIDsFromID(ids []ID) []WidgetID { - dids := make([]WidgetID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, WidgetID(i)) - } - return dids -} - -// WidgetIDsFromIDRef converts a ref of a generic ID slice into a ID slice. -func WidgetIDsFromIDRef(ids []*ID) []WidgetID { - dids := make([]WidgetID, 0, len(ids)) - for _, i := range ids { - if i != nil { - dids = append(dids, WidgetID(*i)) - } - } - return dids -} - -// WidgetIDsToID converts a ID slice into a generic ID slice. -func WidgetIDsToID(ids []WidgetID) []ID { - dids := make([]ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.ID()) - } - return dids -} - -// WidgetIDsToIDRef converts a ID ref slice into a generic ID ref slice. -func WidgetIDsToIDRef(ids []*WidgetID) []*ID { - dids := make([]*ID, 0, len(ids)) - for _, i := range ids { - dids = append(dids, i.IDRef()) - } - return dids -} - -// WidgetIDSet represents a set of WidgetIDs -type WidgetIDSet struct { - m map[WidgetID]struct{} - s []WidgetID -} - -// NewWidgetIDSet creates a new WidgetIDSet -func NewWidgetIDSet() *WidgetIDSet { - return &WidgetIDSet{} -} - -// Add adds a new ID if it does not exists in the set -func (s *WidgetIDSet) Add(p ...WidgetID) { - if s == nil || p == nil { - return - } - if s.m == nil { - s.m = map[WidgetID]struct{}{} - } - for _, i := range p { - if _, ok := s.m[i]; !ok { - if s.s == nil { - s.s = []WidgetID{} - } - s.m[i] = struct{}{} - s.s = append(s.s, i) - } - } -} - -// AddRef adds a new ID ref if it does not exists in the set -func (s *WidgetIDSet) AddRef(p *WidgetID) { - if s == nil || p == nil { - return - } - s.Add(*p) -} - -// Has checks if the ID exists in the set -func (s *WidgetIDSet) Has(p WidgetID) bool { - if s == nil || s.m == nil { - return false - } - _, ok := s.m[p] - return ok -} - -// Clear clears all stored IDs -func (s *WidgetIDSet) Clear() { - if s == nil { - return - } - s.m = nil - s.s = nil -} - -// All returns stored all IDs as a slice -func (s *WidgetIDSet) All() []WidgetID { - if s == nil { - return nil - } - return append([]WidgetID{}, s.s...) -} - -// Clone returns a cloned set -func (s *WidgetIDSet) Clone() *WidgetIDSet { - if s == nil { - return NewWidgetIDSet() - } - s2 := NewWidgetIDSet() - s2.Add(s.s...) - return s2 -} - -// Merge returns a merged set -func (s *WidgetIDSet) Merge(s2 *WidgetIDSet) *WidgetIDSet { - s3 := s.Clone() - if s2 == nil { - return s3 - } - s3.Add(s2.s...) - return s3 -} diff --git a/pkg/id/widget_gen_test.go b/pkg/id/widget_gen_test.go deleted file mode 100644 index b49c56fc4..000000000 --- a/pkg/id/widget_gen_test.go +++ /dev/null @@ -1,976 +0,0 @@ -// Code generated by gen, DO NOT EDIT. - -package id - -import ( - "encoding/json" - "testing" - - "github.com/oklog/ulid" - "github.com/stretchr/testify/assert" -) - -func TestNewWidgetID(t *testing.T) { - id := NewWidgetID() - assert.NotNil(t, id) - u, err := ulid.Parse(id.String()) - assert.NotNil(t, u) - assert.Nil(t, err) -} - -func TestWidgetIDFrom(t *testing.T) { - tests := []struct { - name string - input string - expected struct { - result WidgetID - err error - } - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: struct { - result WidgetID - err error - }{ - result: WidgetID{}, - err: ErrInvalidID, - }, - }, - { - name: "Fail:Not valid string", - input: "", - expected: struct { - result WidgetID - err error - }{ - result: WidgetID{}, - err: ErrInvalidID, - }, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: struct { - result WidgetID - err error - }{ - result: WidgetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - err: nil, - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result, err := WidgetIDFrom(tt.input) - assert.Equal(t, tt.expected.result, result) - if tt.expected.err != nil { - assert.Equal(t, tt.expected.err, err) - } - }) - } -} - -func TestMustWidgetID(t *testing.T) { - tests := []struct { - name string - input string - shouldPanic bool - expected WidgetID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - shouldPanic: true, - }, - { - name: "Fail:Not valid string", - input: "", - shouldPanic: true, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - shouldPanic: false, - expected: WidgetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if tt.shouldPanic { - assert.Panics(t, func() { MustBeID(tt.input) }) - return - } - result := MustWidgetID(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestWidgetIDFromRef(t *testing.T) { - tests := []struct { - name string - input string - expected *WidgetID - }{ - { - name: "Fail:Not valid string", - input: "testMustFail", - expected: nil, - }, - { - name: "Fail:Not valid string", - input: "", - expected: nil, - }, - { - name: "success:valid string", - input: "01f2r7kg1fvvffp0gmexgy5hxy", - expected: &WidgetID{ulid.MustParse("01f2r7kg1fvvffp0gmexgy5hxy")}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - result := WidgetIDFromRef(&tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} - -func TestWidgetIDFromRefID(t *testing.T) { - id := New() - id2 := WidgetIDFromRefID(&id) - assert.Equal(t, id.id, id2.id) - assert.Nil(t, WidgetIDFromRefID(nil)) - assert.Nil(t, WidgetIDFromRefID(&ID{})) -} - -func TestWidgetID_ID(t *testing.T) { - id := New() - id2 := WidgetIDFromRefID(&id) - assert.Equal(t, id, id2.ID()) -} - -func TestWidgetID_String(t *testing.T) { - id := New() - id2 := WidgetIDFromRefID(&id) - assert.Equal(t, id.String(), id2.String()) - assert.Equal(t, "", WidgetID{}.String()) -} - -func TestWidgetID_RefString(t *testing.T) { - id := NewWidgetID() - assert.Equal(t, id.String(), *id.RefString()) - assert.Nil(t, WidgetID{}.RefString()) -} - -func TestWidgetID_GoString(t *testing.T) { - id := New() - id2 := WidgetIDFromRefID(&id) - assert.Equal(t, "WidgetID("+id.String()+")", id2.GoString()) - assert.Equal(t, "WidgetID()", WidgetID{}.GoString()) -} - -func TestWidgetID_Ref(t *testing.T) { - id := NewWidgetID() - assert.Equal(t, WidgetID(id), *id.Ref()) - assert.Nil(t, (&WidgetID{}).Ref()) -} - -func TestWidgetID_Contains(t *testing.T) { - id := NewWidgetID() - id2 := NewWidgetID() - assert.True(t, id.Contains([]WidgetID{id, id2})) - assert.False(t, WidgetID{}.Contains([]WidgetID{id, id2, {}})) - assert.False(t, id.Contains([]WidgetID{id2})) -} - -func TestWidgetID_CopyRef(t *testing.T) { - id := NewWidgetID().Ref() - id2 := id.CopyRef() - assert.Equal(t, id, id2) - assert.NotSame(t, id, id2) - assert.Nil(t, (*WidgetID)(nil).CopyRef()) -} - -func TestWidgetID_IDRef(t *testing.T) { - id := New() - id2 := WidgetIDFromRefID(&id) - assert.Equal(t, &id, id2.IDRef()) - assert.Nil(t, (&WidgetID{}).IDRef()) - assert.Nil(t, (*WidgetID)(nil).IDRef()) -} - -func TestWidgetID_StringRef(t *testing.T) { - id := NewWidgetID() - assert.Equal(t, id.String(), *id.StringRef()) - assert.Nil(t, (&WidgetID{}).StringRef()) - assert.Nil(t, (*WidgetID)(nil).StringRef()) -} - -func TestWidgetID_MarhsalJSON(t *testing.T) { - id := NewWidgetID() - res, err := id.MarhsalJSON() - assert.Nil(t, err) - exp, _ := json.Marshal(id.String()) - assert.Equal(t, exp, res) - - res, err = (&WidgetID{}).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*WidgetID)(nil).MarhsalJSON() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestWidgetID_UnmarhsalJSON(t *testing.T) { - jsonString := "\"01f3zhkysvcxsnzepyyqtq21fb\"" - id := MustWidgetID("01f3zhkysvcxsnzepyyqtq21fb") - id2 := &WidgetID{} - err := id2.UnmarhsalJSON([]byte(jsonString)) - assert.Nil(t, err) - assert.Equal(t, id, *id2) -} - -func TestWidgetID_MarshalText(t *testing.T) { - id := New() - res, err := WidgetIDFromRefID(&id).MarshalText() - assert.Nil(t, err) - assert.Equal(t, []byte(id.String()), res) - - res, err = (&WidgetID{}).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) - - res, err = (*WidgetID)(nil).MarshalText() - assert.Nil(t, err) - assert.Nil(t, res) -} - -func TestWidgetID_UnmarshalText(t *testing.T) { - text := []byte("01f3zhcaq35403zdjnd6dcm0t2") - id2 := &WidgetID{} - err := id2.UnmarshalText(text) - assert.Nil(t, err) - assert.Equal(t, "01f3zhcaq35403zdjnd6dcm0t2", id2.String()) -} - -func TestWidgetID_IsNil(t *testing.T) { - assert.True(t, WidgetID{}.IsNil()) - assert.False(t, NewWidgetID().IsNil()) -} - -func TestWidgetID_IsNilRef(t *testing.T) { - assert.True(t, WidgetID{}.Ref().IsNilRef()) - assert.True(t, (*WidgetID)(nil).IsNilRef()) - assert.False(t, NewWidgetID().Ref().IsNilRef()) -} - -func TestWidgetIDsToStrings(t *testing.T) { - tests := []struct { - name string - input []WidgetID - expected []string - }{ - { - name: "Empty slice", - input: make([]WidgetID, 0), - expected: make([]string, 0), - }, - { - name: "1 element", - input: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - }, - { - name: "multiple elements", - input: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tt.expected, WidgetIDsToStrings(tt.input)) - }) - } -} - -func TestWidgetIDsFrom(t *testing.T) { - tests := []struct { - name string - input []string - expected struct { - res []WidgetID - err error - } - }{ - { - name: "Empty slice", - input: make([]string, 0), - expected: struct { - res []WidgetID - err error - }{ - res: make([]WidgetID, 0), - err: nil, - }, - }, - { - name: "1 element", - input: []string{"01f3zhcaq35403zdjnd6dcm0t2"}, - expected: struct { - res []WidgetID - err error - }{ - res: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, - err: nil, - }, - }, - { - name: "multiple elements", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "01f3zhcaq35403zdjnd6dcm0t2", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []WidgetID - err error - }{ - res: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - err: nil, - }, - }, - { - name: "error", - input: []string{ - "01f3zhcaq35403zdjnd6dcm0t1", - "x", - "01f3zhcaq35403zdjnd6dcm0t3", - }, - expected: struct { - res []WidgetID - err error - }{ - res: nil, - err: ErrInvalidID, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res, err := WidgetIDsFrom(tc.input) - if tc.expected.err != nil { - assert.Equal(t, tc.expected.err, err) - assert.Nil(t, res) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.expected.res, res) - } - }) - } -} - -func TestWidgetIDsFromID(t *testing.T) { - tests := []struct { - name string - input []ID - expected []WidgetID - }{ - { - name: "Empty slice", - input: make([]ID, 0), - expected: make([]WidgetID, 0), - }, - { - name: "1 element", - input: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := WidgetIDsFromID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestWidgetIDsFromIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - - tests := []struct { - name string - input []*ID - expected []WidgetID - }{ - { - name: "Empty slice", - input: make([]*ID, 0), - expected: make([]WidgetID, 0), - }, - { - name: "1 element", - input: []*ID{&id1}, - expected: []WidgetID{MustWidgetID(id1.String())}, - }, - { - name: "multiple elements", - input: []*ID{&id1, &id2, &id3}, - expected: []WidgetID{ - MustWidgetID(id1.String()), - MustWidgetID(id2.String()), - MustWidgetID(id3.String()), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := WidgetIDsFromIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestWidgetIDsToID(t *testing.T) { - tests := []struct { - name string - input []WidgetID - expected []ID - }{ - { - name: "Empty slice", - input: make([]WidgetID, 0), - expected: make([]ID, 0), - }, - { - name: "1 element", - input: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, - expected: []ID{MustBeID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - { - name: "multiple elements", - input: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: []ID{ - MustBeID("01f3zhcaq35403zdjnd6dcm0t1"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t2"), - MustBeID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := WidgetIDsToID(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestWidgetIDsToIDRef(t *testing.T) { - id1 := MustBeID("01f3zhcaq35403zdjnd6dcm0t1") - id21 := MustWidgetID(id1.String()) - id2 := MustBeID("01f3zhcaq35403zdjnd6dcm0t2") - id22 := MustWidgetID(id2.String()) - id3 := MustBeID("01f3zhcaq35403zdjnd6dcm0t3") - id23 := MustWidgetID(id3.String()) - - tests := []struct { - name string - input []*WidgetID - expected []*ID - }{ - { - name: "Empty slice", - input: make([]*WidgetID, 0), - expected: make([]*ID, 0), - }, - { - name: "1 element", - input: []*WidgetID{&id21}, - expected: []*ID{&id1}, - }, - { - name: "multiple elements", - input: []*WidgetID{&id21, &id22, &id23}, - expected: []*ID{&id1, &id2, &id3}, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - res := WidgetIDsToIDRef(tc.input) - assert.Equal(t, tc.expected, res) - }) - } -} - -func TestNewWidgetIDSet(t *testing.T) { - WidgetIdSet := NewWidgetIDSet() - assert.NotNil(t, WidgetIdSet) - assert.Empty(t, WidgetIdSet.m) - assert.Empty(t, WidgetIdSet.s) -} - -func TestWidgetIDSet_Add(t *testing.T) { - tests := []struct { - name string - input []WidgetID - expected *WidgetIDSet - }{ - { - name: "Empty slice", - input: make([]WidgetID, 0), - expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{}, - s: nil, - }, - }, - { - name: "1 element", - input: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - { - name: "multiple elements with duplication", - input: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewWidgetIDSet() - set.Add(tc.input...) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestWidgetIDSet_AddRef(t *testing.T) { - tests := []struct { - name string - input *WidgetID - expected *WidgetIDSet - }{ - { - name: "Empty slice", - input: nil, - expected: &WidgetIDSet{ - m: nil, - s: nil, - }, - }, - { - name: "1 element", - input: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1").Ref(), - expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - set := NewWidgetIDSet() - set.AddRef(tc.input) - assert.Equal(t, tc.expected, set) - }) - } -} - -func TestWidgetIDSet_Has(t *testing.T) { - tests := []struct { - name string - target *WidgetIDSet - input WidgetID - expected bool - }{ - { - name: "Empty Set", - target: &WidgetIDSet{}, - input: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: false, - }, - { - name: "Set Contains the element", - target: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - expected: true, - }, - { - name: "Set does not Contains the element", - target: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - input: MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), - expected: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.target.Has(tc.input)) - }) - } -} - -func TestWidgetIDSet_Clear(t *testing.T) { - tests := []struct { - name string - input *WidgetIDSet - expected *WidgetIDSet - }{ - { - name: "Empty set", - input: &WidgetIDSet{}, - expected: &WidgetIDSet{}, - }, - { - name: "Nil set", - input: nil, - expected: nil, - }, - { - name: "Contains the element", - input: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &WidgetIDSet{ - m: nil, - s: nil, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.input.Clear() - assert.Equal(t, tc.expected, tc.input) - }) - } -} - -func TestWidgetIDSet_All(t *testing.T) { - tests := []struct { - name string - input *WidgetIDSet - expected []WidgetID - }{ - { - name: "Empty", - input: &WidgetIDSet{ - m: map[WidgetID]struct{}{}, - s: nil, - }, - expected: make([]WidgetID, 0), - }, - { - name: "Nil", - input: nil, - expected: nil, - }, - { - name: "1 element", - input: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - { - name: "multiple elements", - input: &WidgetIDSet{ - m: map[WidgetID]struct{}{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.All()) - }) - } -} - -func TestWidgetIDSet_Clone(t *testing.T) { - tests := []struct { - name string - input *WidgetIDSet - expected *WidgetIDSet - }{ - { - name: "nil set", - input: nil, - expected: NewWidgetIDSet(), - }, - { - name: "Empty set", - input: NewWidgetIDSet(), - expected: NewWidgetIDSet(), - }, - { - name: "1 element", - input: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "multiple elements", - input: &WidgetIDSet{ - m: map[WidgetID]struct{}{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"): {}, - }, - s: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t3"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - clone := tc.input.Clone() - assert.Equal(t, tc.expected, clone) - assert.NotSame(t, tc.input, clone) - }) - } -} - -func TestWidgetIDSet_Merge(t *testing.T) { - tests := []struct { - name string - input struct { - a *WidgetIDSet - b *WidgetIDSet - } - expected *WidgetIDSet - }{ - { - name: "Nil Set", - input: struct { - a *WidgetIDSet - b *WidgetIDSet - }{ - a: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: nil, - }, - expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "Empty Set", - input: struct { - a *WidgetIDSet - b *WidgetIDSet - }{ - a: &WidgetIDSet{}, - b: &WidgetIDSet{}, - }, - expected: &WidgetIDSet{}, - }, - { - name: "1 Empty Set", - input: struct { - a *WidgetIDSet - b *WidgetIDSet - }{ - a: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &WidgetIDSet{}, - }, - expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - }, - { - name: "2 non Empty Set", - input: struct { - a *WidgetIDSet - b *WidgetIDSet - }{ - a: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1")}, - }, - b: &WidgetIDSet{ - m: map[WidgetID]struct{}{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): {}}, - s: []WidgetID{MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2")}, - }, - }, - expected: &WidgetIDSet{ - m: map[WidgetID]struct{}{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"): {}, - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"): {}, - }, - s: []WidgetID{ - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t1"), - MustWidgetID("01f3zhcaq35403zdjnd6dcm0t2"), - }, - }, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - assert.Equal(t, tc.expected, tc.input.a.Merge(tc.input.b)) - }) - } -} diff --git a/pkg/layer/builder.go b/pkg/layer/builder.go index 651d378bf..21612b6f4 100644 --- a/pkg/layer/builder.go +++ b/pkg/layer/builder.go @@ -54,7 +54,7 @@ func (b *Builder) Plugin(plugin *PluginID) *Builder { } func (b *Builder) Extension(extension *PluginExtensionID) *Builder { - b.base.extension = extension.CopyRef() + b.base.extension = extension.CloneRef() return b } diff --git a/pkg/layer/group_builder.go b/pkg/layer/group_builder.go index 4a1898177..230792c82 100644 --- a/pkg/layer/group_builder.go +++ b/pkg/layer/group_builder.go @@ -83,7 +83,7 @@ func (b *GroupBuilder) Plugin(plugin *PluginID) *GroupBuilder { } func (b *GroupBuilder) Extension(extension *PluginExtensionID) *GroupBuilder { - b.l.extension = extension.CopyRef() + b.l.extension = extension.CloneRef() return b } diff --git a/pkg/layer/id.go b/pkg/layer/id.go index f14db47ac..048c996b2 100644 --- a/pkg/layer/id.go +++ b/pkg/layer/id.go @@ -48,18 +48,12 @@ var PropertyIDFromRef = id.PropertyIDFromRef var DatasetIDFromRef = id.DatasetIDFromRef var DatasetSchemaIDFromRef = id.DatasetSchemaIDFromRef -var IDFromRefID = id.LayerIDFromRefID -var InfoboxFieldIDFromRefID = id.InfoboxFieldIDFromRefID -var TagIDFromRefID = id.TagIDFromRefID -var SceneIDFromRefID = id.SceneIDFromRefID -var PropertyIDFromRefID = id.PropertyIDFromRefID -var DatasetIDFromRefID = id.DatasetIDFromRefID -var DatasetSchemaIDFromRefID = id.DatasetSchemaIDFromRefID - type IDSet = id.LayerIDSet type InfoboxFIeldIDSet = id.InfoboxFieldIDSet type DatasetIDSet = id.DatasetIDSet +type DatasetIDList = id.DatasetIDList type TagIDSet = id.TagIDSet +type TagIDList = id.TagIDList var NewIDSet = id.NewLayerIDSet var NewInfoboxFIeldIDSet = id.NewInfoboxFieldIDSet @@ -71,6 +65,6 @@ var ErrInvalidID = id.ErrInvalidID func sortIDs(a []ID) { sort.SliceStable(a, func(i, j int) bool { - return id.ID(a[i]).Compare(id.ID(a[j])) < 0 + return a[i].Compare(a[j]) < 0 }) } diff --git a/pkg/layer/id_list.go b/pkg/layer/id_list.go index d359b54ef..224745d80 100644 --- a/pkg/layer/id_list.go +++ b/pkg/layer/id_list.go @@ -1,7 +1,9 @@ package layer +import "github.com/reearth/reearth-backend/pkg/id" + type IDList struct { - layers []ID + layers id.LayerIDList // for checking duplication layerIDs map[ID]struct{} } @@ -209,3 +211,10 @@ func (l *IDList) Empty() { l.layers = nil l.layerIDs = nil } + +func (l *IDList) Strings() []string { + if l == nil { + return nil + } + return l.layers.Strings() +} diff --git a/pkg/layer/initializer.go b/pkg/layer/initializer.go index a9f7c86a5..7fc9ba2d7 100644 --- a/pkg/layer/initializer.go +++ b/pkg/layer/initializer.go @@ -80,7 +80,7 @@ func (i *Initializer) Clone() *Initializer { return &Initializer{ ID: i.ID.CopyRef(), Plugin: i.Plugin.CopyRef(), - Extension: i.Extension.CopyRef(), + Extension: i.Extension.CloneRef(), Name: i.Name, Infobox: i.Infobox.Clone(), PropertyID: i.PropertyID.CopyRef(), diff --git a/pkg/layer/initializer_test.go b/pkg/layer/initializer_test.go index 725ad4f2a..3f7493b3d 100644 --- a/pkg/layer/initializer_test.go +++ b/pkg/layer/initializer_test.go @@ -93,7 +93,7 @@ func TestInitializer_Layer(t *testing.T) { i.ID = nil actual, err = i.Layer(sid) assert.NoError(t, err) - assert.False(t, actual.RootLayer().ID().IsNil()) + assert.False(t, actual.RootLayer().ID().IsEmpty()) } func TestInitializerInfobox_Clone(t *testing.T) { @@ -183,5 +183,5 @@ func TestInitializerInfoboxField_InfoboxField(t *testing.T) { i.ID = nil actual, _, err = i.InfoboxField(sid) assert.NoError(t, err) - assert.False(t, actual.ID().IsNil()) + assert.False(t, actual.ID().IsEmpty()) } diff --git a/pkg/layer/item_builder.go b/pkg/layer/item_builder.go index 5375ee317..f03ef5f80 100644 --- a/pkg/layer/item_builder.go +++ b/pkg/layer/item_builder.go @@ -78,7 +78,7 @@ func (b *ItemBuilder) Plugin(plugin *PluginID) *ItemBuilder { } func (b *ItemBuilder) Extension(extension *PluginExtensionID) *ItemBuilder { - b.l.extension = extension.CopyRef() + b.l.extension = extension.CloneRef() return b } diff --git a/pkg/layer/layer.go b/pkg/layer/layer.go index d841a45e7..6f00affee 100644 --- a/pkg/layer/layer.go +++ b/pkg/layer/layer.go @@ -123,7 +123,7 @@ func (l *layerBase) Extension() *PluginExtensionID { if l == nil { return nil } - return l.extension.CopyRef() + return l.extension.CloneRef() } func (l *layerBase) Property() *PropertyID { diff --git a/pkg/layer/merged.go b/pkg/layer/merged.go index ed7c98d2b..6ac4217d5 100644 --- a/pkg/layer/merged.go +++ b/pkg/layer/merged.go @@ -50,7 +50,7 @@ func Merge(o Layer, p *Group) *Merged { Scene: o.Scene(), Name: o.Name(), PluginID: o.Plugin().CopyRef(), - ExtensionID: o.Extension().CopyRef(), + ExtensionID: o.Extension().CloneRef(), Property: &property.MergedMetadata{ Original: o.Property(), Parent: p.Property(), diff --git a/pkg/layer/merging/merged.go b/pkg/layer/merging/merged.go index 2ba6e0a65..a6d274b05 100644 --- a/pkg/layer/merging/merged.go +++ b/pkg/layer/merging/merged.go @@ -12,8 +12,8 @@ var ( type MergedLayer interface { Common() *MergedLayerCommon - AllDatasets() []layer.DatasetID - AllTags() []layer.TagID + AllDatasets() layer.DatasetIDList + AllTags() layer.TagIDList } type MergedLayerGroup struct { @@ -56,12 +56,12 @@ func (l *MergedLayerItem) Common() *MergedLayerCommon { return &l.MergedLayerCommon } -func (l *MergedLayerCommon) Datasets() []layer.DatasetID { - return l.datasetIDSet().All() +func (l *MergedLayerCommon) Datasets() layer.DatasetIDList { + return l.datasetIDSet().List() } func (l *MergedLayerCommon) Tags() []layer.TagID { - return l.tagIDSet().All() + return l.tagIDSet().List() } func (l *MergedLayerCommon) datasetIDSet() *layer.DatasetIDSet { @@ -86,21 +86,21 @@ func (l *MergedLayerCommon) tagIDSet() *layer.TagIDSet { return res } -func (l *MergedLayerItem) AllDatasets() []layer.DatasetID { +func (l *MergedLayerItem) AllDatasets() layer.DatasetIDList { if l == nil { return nil } return l.Datasets() } -func (l *MergedLayerItem) AllTags() []layer.TagID { +func (l *MergedLayerItem) AllTags() layer.TagIDList { if l == nil { return nil } return l.Tags() } -func (l *MergedLayerGroup) AllDatasets() []layer.DatasetID { +func (l *MergedLayerGroup) AllDatasets() layer.DatasetIDList { if l == nil { return nil } @@ -108,10 +108,10 @@ func (l *MergedLayerGroup) AllDatasets() []layer.DatasetID { for _, l := range l.Children { d.Add(l.AllDatasets()...) } - return d.All() + return d.List() } -func (l *MergedLayerGroup) AllTags() []layer.TagID { +func (l *MergedLayerGroup) AllTags() layer.TagIDList { if l == nil { return nil } @@ -119,5 +119,5 @@ func (l *MergedLayerGroup) AllTags() []layer.TagID { for _, l := range l.Children { d.Add(l.AllTags()...) } - return d.All() + return d.List() } diff --git a/pkg/plugin/id.go b/pkg/plugin/id.go index 29d4b9777..082f9f94f 100644 --- a/pkg/plugin/id.go +++ b/pkg/plugin/id.go @@ -20,11 +20,8 @@ var SceneIDFrom = id.SceneIDFrom var PropertySchemaIDFrom = id.PropertySchemaIDFrom var IDFromRef = id.PluginIDFromRef -var ExtensionIDFromRef = id.PluginExtensionIDFromRef var SceneIDFromRef = id.SceneIDFromRef var PropertySchemaIDFromRef = id.PropertySchemaIDFromRef -var SceneIDFromRefID = id.SceneIDFromRefID - var OfficialPluginID = id.OfficialPluginID var ErrInvalidID = id.ErrInvalidID diff --git a/pkg/project/builder_test.go b/pkg/project/builder_test.go index 38e8401a3..3da84227d 100644 --- a/pkg/project/builder_test.go +++ b/pkg/project/builder_test.go @@ -224,7 +224,7 @@ func TestBuilder_Build(t *testing.T) { }, expected: &Project{ id: pid, - updatedAt: createdAt(pid), + updatedAt: pid.Timestamp(), }, }, { @@ -345,7 +345,7 @@ func TestBuilder_MustBuild(t *testing.T) { }, expected: &Project{ id: pid, - updatedAt: createdAt(pid), + updatedAt: pid.Timestamp(), }, }, { diff --git a/pkg/project/id.go b/pkg/project/id.go index cafbea20f..da16b1f43 100644 --- a/pkg/project/id.go +++ b/pkg/project/id.go @@ -1,8 +1,6 @@ package project import ( - "time" - "github.com/reearth/reearth-backend/pkg/id" ) @@ -21,11 +19,4 @@ var TeamIDFrom = id.TeamIDFrom var IDFromRef = id.ProjectIDFromRef var TeamIDFromRef = id.TeamIDFromRef -var IDFromRefID = id.ProjectIDFromRefID -var TeamIDFromRefID = id.TeamIDFromRefID - var ErrInvalidID = id.ErrInvalidID - -func createdAt(i ID) time.Time { - return id.ID(i).Timestamp() -} diff --git a/pkg/project/project.go b/pkg/project/project.go index ea7f85fce..d026f0b9f 100644 --- a/pkg/project/project.go +++ b/pkg/project/project.go @@ -109,7 +109,7 @@ func (p *Project) Team() TeamID { } func (p *Project) CreatedAt() time.Time { - return createdAt(p.id) + return p.id.Timestamp() } func (p *Project) Visualizer() visualizer.Visualizer { diff --git a/pkg/property/builder_test.go b/pkg/property/builder_test.go index d17d6fc23..312cc3ef2 100644 --- a/pkg/property/builder_test.go +++ b/pkg/property/builder_test.go @@ -19,7 +19,7 @@ func TestBuilder_ID(t *testing.T) { func TestBuilder_NewID(t *testing.T) { p := New().NewID().Scene(NewSceneID()).Schema(MustSchemaID("xxx~1.1.1/aa")).MustBuild() - assert.False(t, p.ID().IsNil()) + assert.False(t, p.ID().IsEmpty()) } func TestBuilder_Schema(t *testing.T) { diff --git a/pkg/property/group_builder_test.go b/pkg/property/group_builder_test.go index e5a16a0a9..8dd6dc3d4 100644 --- a/pkg/property/group_builder_test.go +++ b/pkg/property/group_builder_test.go @@ -127,7 +127,7 @@ func TestGroupBuilder_MustBuild(t *testing.T) { func TestGroupBuilder_NewID(t *testing.T) { g := NewGroup().NewID().SchemaGroup("x").MustBuild() - assert.False(t, g.ID().IsNil()) + assert.False(t, g.ID().IsEmpty()) } func TestGroupBuilder_InitGroupFrom(t *testing.T) { diff --git a/pkg/property/id.go b/pkg/property/id.go index 93f0d40ac..25fdd7911 100644 --- a/pkg/property/id.go +++ b/pkg/property/id.go @@ -1,18 +1,16 @@ package property import ( - "sort" - "github.com/reearth/reearth-backend/pkg/id" ) type ID = id.PropertyID type ItemID = id.PropertyItemID -type FieldID = id.PropertySchemaFieldID +type FieldID = id.PropertyFieldID type SchemaID = id.PropertySchemaID type SchemaGroupID = id.PropertySchemaGroupID type DatasetID = id.DatasetID -type DatasetFieldID = id.DatasetSchemaFieldID +type DatasetFieldID = id.DatasetFieldID type DatasetSchemaID = id.DatasetSchemaID type SceneID = id.SceneID @@ -20,7 +18,7 @@ var NewID = id.NewPropertyID var NewItemID = id.NewPropertyItemID var NewSchemaID = id.NewPropertySchemaID var NewDatasetID = id.NewDatasetID -var NewDatasetFieldID = id.NewDatasetSchemaFieldID +var NewDatasetFieldID = id.NewDatasetFieldID var NewDatasetSchemaID = id.NewDatasetSchemaID var NewSceneID = id.NewSceneID @@ -28,17 +26,15 @@ var MustID = id.MustPropertyID var MustItemID = id.MustPropertyItemID var MustSchemaID = id.MustPropertySchemaID var MustDatasetID = id.MustDatasetID -var MustDatasetFieldID = id.MustDatasetSchemaFieldID +var MustDatasetFieldID = id.MustDatasetFieldID var MustDatasetSchemaID = id.MustDatasetSchemaID var MustSceneID = id.MustSceneID var IDFrom = id.PropertyIDFrom var ItemIDFrom = id.PropertyItemIDFrom -var FieldIDFrom = id.PropertySchemaFieldIDFrom var SchemaIDFrom = id.PropertySchemaIDFrom -var SchemaGroupIDFrom = id.PropertySchemaGroupIDFrom var DatasetIDFrom = id.DatasetIDFrom -var DatasetFieldIDFrom = id.DatasetSchemaFieldIDFrom +var DatasetFieldIDFrom = id.DatasetFieldIDFrom var DatasetSchemaIDFrom = id.DatasetSchemaIDFrom var SceneIDFrom = id.SceneIDFrom @@ -46,27 +42,15 @@ var IDFromRef = id.PropertyIDFromRef var ItemIDFromRef = id.PropertyItemIDFromRef var SchemaIDFromRef = id.PropertySchemaIDFromRef var DatasetIDFromRef = id.DatasetIDFromRef -var DatasetFieldIDFromRef = id.DatasetSchemaFieldIDFromRef +var DatasetFieldIDFromRef = id.DatasetFieldIDFromRef var DatasetSchemaIDFromRef = id.DatasetSchemaIDFromRef var SceneIDFromRef = id.SceneIDFromRef -var IDFromRefID = id.PropertyIDFromRefID -var ItemIDFromRefID = id.PropertyItemIDFromRefID -var DatasetIDFromRefID = id.DatasetIDFromRefID -var DatasetFieldIDFromRefID = id.DatasetSchemaFieldIDFromRefID -var DatasetSchemaIDFromRefID = id.DatasetSchemaIDFromRefID -var SceneIDFromRefID = id.SceneIDFromRefID - type IDSet = id.PropertyIDSet +type IDList = id.PropertyIDList type ItemIDSet = id.PropertyItemIDSet var NewIDSet = id.NewPropertyIDSet var NewItemIDSet = id.NewPropertyItemIDSet var ErrInvalidID = id.ErrInvalidID - -func sortIDs(a []ID) { - sort.SliceStable(a, func(i, j int) bool { - return id.ID(a[i]).Compare(id.ID(a[j])) < 0 - }) -} diff --git a/pkg/property/initializer_test.go b/pkg/property/initializer_test.go index f12d4c853..efa2f8ed4 100644 --- a/pkg/property/initializer_test.go +++ b/pkg/property/initializer_test.go @@ -47,7 +47,7 @@ func TestInitializer_Property(t *testing.T) { initializer.ID = nil actual, err = initializer.Property(sid) assert.NoError(t, err) - assert.False(t, actual.ID().IsNil()) + assert.False(t, actual.ID().IsEmpty()) } func TestInitializer_PropertyIncludingEmpty(t *testing.T) { @@ -128,7 +128,7 @@ func TestInitializerItem_PropertyItem(t *testing.T) { item.ID = nil created, err = item.PropertyItem() assert.NoError(t, err) - assert.False(t, created.ID().IsNil()) + assert.False(t, created.ID().IsEmpty()) } func TestInitializerItem_PropertyGroup(t *testing.T) { @@ -152,7 +152,7 @@ func TestInitializerItem_PropertyGroup(t *testing.T) { // check if a new id is generated item.ID = nil - assert.False(t, item.PropertyGroup().ID().IsNil()) + assert.False(t, item.PropertyGroup().ID().IsEmpty()) } func TestInitializerItem_PropertyGroupList(t *testing.T) { @@ -172,7 +172,7 @@ func TestInitializerItem_PropertyGroupList(t *testing.T) { // check if a new id is generated item.ID = nil - assert.False(t, item.PropertyGroupList().ID().IsNil()) + assert.False(t, item.PropertyGroupList().ID().IsEmpty()) } func TestInitializerGroup_Clone(t *testing.T) { @@ -223,7 +223,7 @@ func TestInitializerGroup_PropertyGroup(t *testing.T) { item.ID = nil p, err = item.PropertyGroup(parentItem) assert.NoError(t, err) - assert.False(t, p.ID().IsNil()) + assert.False(t, p.ID().IsEmpty()) } func TestInitializerField_Clone(t *testing.T) { diff --git a/pkg/property/list.go b/pkg/property/list.go index 62539ac27..af1cae0a0 100644 --- a/pkg/property/list.go +++ b/pkg/property/list.go @@ -1,5 +1,11 @@ package property +import ( + "sort" + + "github.com/samber/lo" +) + type List []*Property func (l List) IDs() []ID { @@ -35,6 +41,12 @@ func (l List) Schemas() []SchemaID { return schemas } +func (l List) Sort() { + sort.Slice(l, func(i, j int) bool { + return l[i].ID().Compare(l[j].ID()) < 0 + }) +} + func (l List) Map() Map { m := make(Map, len(l)) return m.Add(l...) @@ -94,12 +106,7 @@ func (m Map) Merge(m2 Map) Map { } func (m Map) Keys() []ID { - keys := make([]ID, 0, len(m)) - for k := range m { - keys = append(keys, k) - } - sortIDs(keys) - return keys + return IDList(lo.Keys(m)).Sort() } func (m Map) Len() int { diff --git a/pkg/property/pointer.go b/pkg/property/pointer.go index b30b2420a..990eb73f6 100644 --- a/pkg/property/pointer.go +++ b/pkg/property/pointer.go @@ -13,9 +13,9 @@ func NewPointer(sg *SchemaGroupID, i *ItemID, f *FieldID) *Pointer { return nil } return &Pointer{ - schemaGroup: sg.CopyRef(), + schemaGroup: sg.CloneRef(), item: i.CopyRef(), - field: f.CopyRef(), + field: f.CloneRef(), } } @@ -27,7 +27,7 @@ func PointToEverything() *Pointer { // PointField creates a new Pointer pointing the field in properties. func PointField(sg *SchemaGroupID, i *ItemID, f FieldID) *Pointer { return &Pointer{ - schemaGroup: sg.CopyRef(), + schemaGroup: sg.CloneRef(), item: i.CopyRef(), field: &f, } @@ -75,9 +75,9 @@ func (p *Pointer) Clone() *Pointer { return nil } return &Pointer{ - field: p.field.CopyRef(), + field: p.field.CloneRef(), item: p.item.CopyRef(), - schemaGroup: p.schemaGroup.CopyRef(), + schemaGroup: p.schemaGroup.CloneRef(), } } @@ -232,7 +232,7 @@ func (p *Pointer) AllFields() *Pointer { return nil } return &Pointer{ - schemaGroup: p.schemaGroup.CopyRef(), + schemaGroup: p.schemaGroup.CloneRef(), item: p.item.CopyRef(), field: nil, } @@ -242,8 +242,8 @@ func (p *Pointer) GetAll() (sg *SchemaGroupID, i *ItemID, f *FieldID) { if p == nil { return } - sg = p.schemaGroup.CopyRef() + sg = p.schemaGroup.CloneRef() i = p.item.CopyRef() - f = p.field.CopyRef() + f = p.field.CloneRef() return } diff --git a/pkg/property/schema_group_builder.go b/pkg/property/schema_group_builder.go index 711dad7c1..b0277026e 100644 --- a/pkg/property/schema_group_builder.go +++ b/pkg/property/schema_group_builder.go @@ -72,6 +72,6 @@ func (b *SchemaGroupBuilder) Title(title i18n.String) *SchemaGroupBuilder { } func (b *SchemaGroupBuilder) RepresentativeField(representativeField *FieldID) *SchemaGroupBuilder { - b.p.representativeField = representativeField.CopyRef() + b.p.representativeField = representativeField.CloneRef() return b } diff --git a/pkg/property/schema_group_list.go b/pkg/property/schema_group_list.go index ae119ef21..195a951d2 100644 --- a/pkg/property/schema_group_list.go +++ b/pkg/property/schema_group_list.go @@ -1,7 +1,5 @@ package property -import "github.com/reearth/reearth-backend/pkg/id" - type SchemaGroupList struct { groups []*SchemaGroup } @@ -55,7 +53,7 @@ func (p *SchemaGroupList) GroupAndFields() []SchemaGroupAndField { return fields } -func (p *SchemaGroupList) Field(id id.PropertySchemaFieldID) *SchemaField { +func (p *SchemaGroupList) Field(id FieldID) *SchemaField { if p == nil { return nil } @@ -68,7 +66,7 @@ func (p *SchemaGroupList) Field(id id.PropertySchemaFieldID) *SchemaField { return nil } -func (p *SchemaGroupList) Group(id id.PropertySchemaGroupID) *SchemaGroup { +func (p *SchemaGroupList) Group(id SchemaGroupID) *SchemaGroup { if p == nil { return nil } @@ -81,7 +79,7 @@ func (p *SchemaGroupList) Group(id id.PropertySchemaGroupID) *SchemaGroup { return nil } -func (p *SchemaGroupList) GroupByField(id id.PropertySchemaFieldID) *SchemaGroup { +func (p *SchemaGroupList) GroupByField(id FieldID) *SchemaGroup { if p == nil { return nil } diff --git a/pkg/property/schema_group_list_test.go b/pkg/property/schema_group_list_test.go index e9223a897..12627e0fb 100644 --- a/pkg/property/schema_group_list_test.go +++ b/pkg/property/schema_group_list_test.go @@ -3,7 +3,6 @@ package property import ( "testing" - "github.com/reearth/reearth-backend/pkg/id" "github.com/stretchr/testify/assert" ) @@ -54,7 +53,7 @@ func TestSchemaGroupList_Field(t *testing.T) { tests := []struct { name string target *SchemaGroupList - input id.PropertySchemaFieldID + input FieldID want *SchemaField }{ { @@ -69,7 +68,7 @@ func TestSchemaGroupList_Field(t *testing.T) { { name: "not found", target: testSchemaGroupList1, - input: id.PropertySchemaFieldID("zz"), + input: FieldID("zz"), }, } diff --git a/pkg/property/schema_test.go b/pkg/property/schema_test.go index 4c01cf2af..4e5739075 100644 --- a/pkg/property/schema_test.go +++ b/pkg/property/schema_test.go @@ -35,7 +35,7 @@ func TestLinkableField_Validate(t *testing.T) { S: NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), LF: LinkableFields{ URL: &SchemaFieldPointer{ - Field: id.PropertySchemaFieldID("xx"), + Field: FieldID("xx"), }, }, Expected: false, @@ -45,7 +45,7 @@ func TestLinkableField_Validate(t *testing.T) { S: NewSchema().ID(sid).Groups(NewSchemaGroupList([]*SchemaGroup{sg})).MustBuild(), LF: LinkableFields{ LatLng: &SchemaFieldPointer{ - Field: id.PropertySchemaFieldID("xx"), + Field: FieldID("xx"), }, }, Expected: false, diff --git a/pkg/scene/builder.go b/pkg/scene/builder.go index 70a0b41e9..074ca5dd0 100644 --- a/pkg/scene/builder.go +++ b/pkg/scene/builder.go @@ -16,10 +16,10 @@ func (b *Builder) Build() (*Scene, error) { if b.scene.id.IsNil() { return nil, ErrInvalidID } - if b.scene.team.ID().IsNil() { + if b.scene.team.IsNil() { return nil, ErrInvalidID } - if b.scene.rootLayer.ID().IsNil() { + if b.scene.rootLayer.IsNil() { return nil, ErrInvalidID } if b.scene.widgets == nil { diff --git a/pkg/scene/builder/builder_test.go b/pkg/scene/builder/builder_test.go index 7fe1ac367..8a0ad664f 100644 --- a/pkg/scene/builder/builder_test.go +++ b/pkg/scene/builder/builder_test.go @@ -45,7 +45,7 @@ func TestSceneBuilder(t *testing.T) { ds1 := dataset.New().ID(ds1id).Fields([]*dataset.Field{ dataset.NewField( ds1f1, - dataset.ValueTypeRef.ValueFrom(ds2id.ID()), + dataset.ValueTypeRef.ValueFrom(ds2id), "ds1f1", ), dataset.NewField( @@ -57,7 +57,7 @@ func TestSceneBuilder(t *testing.T) { ds2 := dataset.New().ID(ds2id).Fields([]*dataset.Field{ dataset.NewField( ds2f1, - dataset.ValueTypeRef.ValueFrom(ds3id.ID()), + dataset.ValueTypeRef.ValueFrom(ds3id), "ds2", ), }).Scene(sceneID).Schema(dss2id).Source("ds2").MustBuild() @@ -73,9 +73,9 @@ func TestSceneBuilder(t *testing.T) { tag1 := tag.NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() tag2 := tag.NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() tag3 := tag.NewItem().NewID().Label("unused").Scene(sceneID).MustBuild() - tag4 := tag.NewGroup().NewID().Label("bar").Scene(sceneID).Tags(tag.IDListFrom(([]tag.ID{ + tag4 := tag.NewGroup().NewID().Label("bar").Scene(sceneID).Tags(tag.IDList{ tag1.ID(), tag2.ID(), tag3.ID(), - }))).MustBuild() + }).MustBuild() tag5 := tag.NewItem().NewID().Label("dummy").Scene(scene.NewID()).MustBuild() // dummy tags := tag.List{tag1, tag2, tag3, tag4, tag5} diff --git a/pkg/scene/builder/scene.go b/pkg/scene/builder/scene.go index af1ff48f8..f74bc5a24 100644 --- a/pkg/scene/builder/scene.go +++ b/pkg/scene/builder/scene.go @@ -108,7 +108,7 @@ func (b *Builder) tags(ctx context.Context, s *scene.Scene) ([]*tagJSON, error) func toTag(t tag.Tag, m tag.Map) tagJSON { var tags []tagJSON - if children := tag.GroupFrom(t).Tags().Tags(); children != nil { + if children := tag.GroupFrom(t).Tags(); children != nil { tags = make([]tagJSON, 0, len(children)) for _, tid := range children { t, ok := m[tid] diff --git a/pkg/scene/id.go b/pkg/scene/id.go index 4326ae467..b14133c8f 100644 --- a/pkg/scene/id.go +++ b/pkg/scene/id.go @@ -1,8 +1,6 @@ package scene import ( - "time" - "github.com/reearth/reearth-backend/pkg/id" ) @@ -16,6 +14,9 @@ type PluginExtensionID = id.PluginExtensionID type ProjectID = id.ProjectID type TeamID = id.TeamID +type IDList = id.SceneIDList +type WidgetIDList = id.WidgetIDList + var NewID = id.NewSceneID var NewWidgetID = id.NewWidgetID var NewClusterID = id.NewClusterID @@ -52,61 +53,5 @@ var PluginIDFromRef = id.PluginIDFromRef var ProjectIDFromRef = id.ProjectIDFromRef var TeamIDFromRef = id.TeamIDFromRef -var IDFromRefID = id.SceneIDFromRefID -var WidgetIDFromRefID = id.WidgetIDFromRefID -var ClusterIDFromRefID = id.ClusterIDFromRefID -var LayerIDFromRefID = id.LayerIDFromRefID -var PropertyIDFromRefID = id.PropertyIDFromRefID -var ProjectIDFromRefID = id.ProjectIDFromRefID -var TeamIDFromRefID = id.TeamIDFromRefID - var OfficialPluginID = id.OfficialPluginID var ErrInvalidID = id.ErrInvalidID - -func createdAt(i ID) time.Time { - return id.ID(i).Timestamp() -} - -type IDList []ID - -func (l IDList) Clone() IDList { - if l == nil { - return nil - } - return append(IDList{}, l...) -} - -func (l IDList) Filter(ids ...ID) IDList { - if l == nil { - return nil - } - - res := make(IDList, 0, len(l)) - for _, t := range l { - for _, t2 := range ids { - if t == t2 { - res = append(res, t) - } - } - } - return res -} - -func (l IDList) Includes(ids ...ID) bool { - for _, t := range l { - for _, t2 := range ids { - if t == t2 { - return true - } - } - } - return false -} - -func (l IDList) Len() int { - return len(l) -} - -func (k IDList) Strings() []string { - return id.SceneIDsToStrings(k) -} diff --git a/pkg/scene/id_test.go b/pkg/scene/id_test.go deleted file mode 100644 index a76de6e56..000000000 --- a/pkg/scene/id_test.go +++ /dev/null @@ -1,47 +0,0 @@ -package scene - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestIDList_Clone(t *testing.T) { - t1 := NewID() - t2 := NewID() - t3 := NewID() - ids := IDList{t1, t2, t3} - assert.Equal(t, ids, ids.Clone()) - assert.NotSame(t, ids, ids.Clone()) - assert.Nil(t, IDList(nil).Clone()) -} - -func TestIDList_Filter(t *testing.T) { - t1 := NewID() - t2 := NewID() - t3 := NewID() - t4 := NewID() - assert.Equal(t, IDList{t1}, IDList{t1, t2, t3}.Filter(t1)) - assert.Equal(t, IDList{t1, t3}, IDList{t1, t2, t3}.Filter(t1, t3)) - assert.Equal(t, IDList{}, IDList{t1, t2, t3}.Filter(t4)) - assert.Equal(t, IDList(nil), IDList(nil).Filter(t4)) -} - -func TestIDList_Includes(t *testing.T) { - t1 := NewID() - t2 := NewID() - t3 := NewID() - assert.True(t, IDList{t1, t2, t3}.Includes(t1)) - assert.False(t, IDList{t1, t2}.Includes(t3)) - assert.False(t, IDList(nil).Includes(t1)) -} - -func TestIDList_Len(t *testing.T) { - t1 := NewID() - t2 := NewID() - t3 := NewID() - assert.Equal(t, 2, IDList{t1, t2}.Len()) - assert.Equal(t, 3, IDList{t1, t2, t3}.Len()) - assert.Equal(t, 0, IDList{}.Len()) - assert.Equal(t, 0, IDList(nil).Len()) -} diff --git a/pkg/scene/scene.go b/pkg/scene/scene.go index d22c0d176..3957edbb9 100644 --- a/pkg/scene/scene.go +++ b/pkg/scene/scene.go @@ -30,7 +30,7 @@ func (s *Scene) CreatedAt() time.Time { if s == nil { return time.Time{} } - return createdAt(s.id) + return s.id.Timestamp() } func (s *Scene) Project() ProjectID { diff --git a/pkg/scene/scene_test.go b/pkg/scene/scene_test.go index 7b061c4a1..83e6c771f 100644 --- a/pkg/scene/scene_test.go +++ b/pkg/scene/scene_test.go @@ -82,14 +82,14 @@ func TestScene_Properties(t *testing.T) { func TestSceneNil(t *testing.T) { var s *Scene assert.Nil(t, s.Properties()) - assert.True(t, s.ID().IsNil()) + assert.True(t, s.ID().IsEmpty()) assert.Nil(t, s.Widgets()) - assert.True(t, s.Project().IsNil()) - assert.True(t, s.Team().IsNil()) - assert.True(t, s.RootLayer().IsNil()) + assert.True(t, s.Project().IsEmpty()) + assert.True(t, s.Team().IsEmpty()) + assert.True(t, s.RootLayer().IsEmpty()) assert.True(t, s.CreatedAt().IsZero()) assert.Nil(t, s.Plugins()) - assert.True(t, s.Property().IsNil()) + assert.True(t, s.Property().IsEmpty()) } func TestScene_Clusters(t *testing.T) { diff --git a/pkg/scene/sceneops/dataset_migrator.go b/pkg/scene/sceneops/dataset_migrator.go index c67c42a4f..5dde9729a 100644 --- a/pkg/scene/sceneops/dataset_migrator.go +++ b/pkg/scene/sceneops/dataset_migrator.go @@ -32,7 +32,7 @@ func (r MigrateDatasetResult) Merge(r2 MigrateDatasetResult) MigrateDatasetResul return MigrateDatasetResult{ Layers: r.Layers.Merge(r2.Layers), Properties: r.Properties.Merge(r2.Properties), - RemovedLayers: r.RemovedLayers.Merge(r2.RemovedLayers), + RemovedLayers: r.RemovedLayers.Concat(r2.RemovedLayers), } } diff --git a/pkg/scene/widget_align_system_test.go b/pkg/scene/widget_align_system_test.go index 1259bb280..8050c9e78 100644 --- a/pkg/scene/widget_align_system_test.go +++ b/pkg/scene/widget_align_system_test.go @@ -81,8 +81,8 @@ func TestWidgetAlignSystem_Find(t *testing.T) { } was := NewWidgetAlignSystem() - was.Zone(WidgetZoneInner).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]WidgetID{wid1, wid2, wid3}) - was.Zone(WidgetZoneOuter).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]WidgetID{wid4, wid5}) + was.Zone(WidgetZoneInner).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll(WidgetIDList{wid1, wid2, wid3}) + was.Zone(WidgetZoneOuter).Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll(WidgetIDList{wid4, wid5}) index, location := was.Find(tc.Input) assert.Equal(t, tc.Expected1, index) @@ -98,32 +98,32 @@ func TestWidgetAlignSystem_Remove(t *testing.T) { Name string Zone WidgetZoneType Input WidgetID - Expected []WidgetID + Expected WidgetIDList Nil bool }{ { Name: "inner: remove a widget from widget section", Zone: WidgetZoneInner, Input: wid, - Expected: []WidgetID{}, + Expected: WidgetIDList{}, }, { Name: "inner: couldn't find widgetId", Zone: WidgetZoneInner, Input: NewWidgetID(), - Expected: []WidgetID{wid}, + Expected: WidgetIDList{wid}, }, { Name: "outer: remove a widget from widget section", Zone: WidgetZoneOuter, Input: wid, - Expected: []WidgetID{}, + Expected: WidgetIDList{}, }, { Name: "outer: couldn't find widgetId", Zone: WidgetZoneOuter, Input: NewWidgetID(), - Expected: []WidgetID{wid}, + Expected: WidgetIDList{wid}, }, { Name: "nil", @@ -164,8 +164,8 @@ func TestWidgetAlignSystem_Move(t *testing.T) { Input2 WidgetLocation Input3 int Source WidgetLocation - ExpectedSource []WidgetID - ExpectedDest []WidgetID + ExpectedSource WidgetIDList + ExpectedDest WidgetIDList Nil bool }{ { @@ -182,8 +182,8 @@ func TestWidgetAlignSystem_Move(t *testing.T) { Section: WidgetSectionLeft, Area: WidgetAreaTop, }, - ExpectedSource: []WidgetID{wid2, wid1, wid3}, - ExpectedDest: []WidgetID{wid2, wid1, wid3}, + ExpectedSource: WidgetIDList{wid2, wid1, wid3}, + ExpectedDest: WidgetIDList{wid2, wid1, wid3}, }, { Name: "move a widget in the same area with negative index", @@ -199,8 +199,8 @@ func TestWidgetAlignSystem_Move(t *testing.T) { Section: WidgetSectionLeft, Area: WidgetAreaTop, }, - ExpectedSource: []WidgetID{wid2, wid3, wid1}, - ExpectedDest: []WidgetID{wid2, wid3, wid1}, + ExpectedSource: WidgetIDList{wid2, wid3, wid1}, + ExpectedDest: WidgetIDList{wid2, wid3, wid1}, }, { Name: "move a widget to a different area with positive index", @@ -216,8 +216,8 @@ func TestWidgetAlignSystem_Move(t *testing.T) { Section: WidgetSectionRight, Area: WidgetAreaTop, }, - ExpectedSource: []WidgetID{wid2, wid3}, - ExpectedDest: []WidgetID{wid4, wid1, wid5}, + ExpectedSource: WidgetIDList{wid2, wid3}, + ExpectedDest: WidgetIDList{wid4, wid1, wid5}, }, { Name: "move a widget to a different area with negative index", @@ -233,8 +233,8 @@ func TestWidgetAlignSystem_Move(t *testing.T) { Section: WidgetSectionCenter, Area: WidgetAreaMiddle, }, - ExpectedSource: []WidgetID{wid2, wid3}, - ExpectedDest: []WidgetID{wid4, wid5, wid1}, + ExpectedSource: WidgetIDList{wid2, wid3}, + ExpectedDest: WidgetIDList{wid4, wid5, wid1}, }, { Name: "nil", @@ -253,9 +253,9 @@ func TestWidgetAlignSystem_Move(t *testing.T) { } ws := NewWidgetAlignSystem() - ws.Area(tc.Source).AddAll([]WidgetID{wid1, wid2, wid3}) + ws.Area(tc.Source).AddAll(WidgetIDList{wid1, wid2, wid3}) if tc.Source != tc.Input2 { - ws.Area(tc.Input2).AddAll([]WidgetID{wid4, wid5}) + ws.Area(tc.Input2).AddAll(WidgetIDList{wid4, wid5}) } ws.Move(tc.Input1, tc.Input2, tc.Input3) diff --git a/pkg/scene/widget_area.go b/pkg/scene/widget_area.go index 81d98f4a4..fe69840be 100644 --- a/pkg/scene/widget_area.go +++ b/pkg/scene/widget_area.go @@ -1,8 +1,12 @@ package scene +import ( + "github.com/samber/lo" +) + // WidgetArea has the widgets and alignment information found in each part area of a section. type WidgetArea struct { - widgetIds []WidgetID + widgetIds WidgetIDList align WidgetAlignType } @@ -22,12 +26,12 @@ func NewWidgetArea(widgetIds []WidgetID, align WidgetAlignType) *WidgetArea { } // WidgetIds will return a slice of widget ids from a specific area. -func (a *WidgetArea) WidgetIDs() []WidgetID { +func (a *WidgetArea) WidgetIDs() WidgetIDList { if a == nil { return nil } - return append([]WidgetID{}, a.widgetIds...) + return a.widgetIds.Clone() } // Alignment will return the alignment of a specific area. @@ -43,21 +47,21 @@ func (a *WidgetArea) Find(wid WidgetID) int { if a == nil { return -1 } - - for i, w := range a.widgetIds { - if w == wid { - return i - } - } - return -1 + return lo.IndexOf(a.widgetIds, wid) } func (a *WidgetArea) Add(wid WidgetID, index int) { - if a == nil || wid.Contains(a.widgetIds) { + if a == nil || a.widgetIds.Has(wid) { return } - a.widgetIds = insertWidgetID(a.widgetIds, wid, index) + if i := a.widgetIds.Index(wid); i >= 0 { + a.widgetIds = a.widgetIds.DeleteAt(i) + if i < index { + index-- + } + } + a.widgetIds = a.widgetIds.Insert(index, wid) } func (a *WidgetArea) AddAll(wids []WidgetID) { @@ -65,15 +69,7 @@ func (a *WidgetArea) AddAll(wids []WidgetID) { return } - widgetIds := make([]WidgetID, 0, len(wids)) - for _, w := range wids { - if w.Contains(a.widgetIds) || w.Contains(widgetIds) { - continue - } - widgetIds = append(widgetIds, w) - } - - a.widgetIds = widgetIds + a.widgetIds = a.widgetIds.AddUniq(wids...) } func (a *WidgetArea) SetAlignment(at WidgetAlignType) { @@ -95,7 +91,7 @@ func (a *WidgetArea) Remove(wid WidgetID) { for i, w := range a.widgetIds { if w == wid { - a.widgetIds = removeWidgetID(a.widgetIds, i) + a.widgetIds = a.widgetIds.DeleteAt(i) return } } @@ -107,18 +103,5 @@ func (a *WidgetArea) Move(from, to int) { } wid := a.widgetIds[from] - a.widgetIds = insertWidgetID(removeWidgetID(a.widgetIds, from), wid, to) -} - -// insertWidgetID is used in moveInt to add the widgetID to a new position(index). -func insertWidgetID(array []WidgetID, value WidgetID, index int) []WidgetID { - if index < 0 { - return append(array, value) - } - return append(array[:index], append([]WidgetID{value}, array[index:]...)...) -} - -// removeWidgetID is used in moveInt to remove the widgetID from original position(index). -func removeWidgetID(array []WidgetID, index int) []WidgetID { - return append(array[:index], array[index+1:]...) + a.widgetIds = a.widgetIds.DeleteAt(from).Insert(to, wid) } diff --git a/pkg/scene/widget_area_test.go b/pkg/scene/widget_area_test.go index 57b30afa6..fa65c3d80 100644 --- a/pkg/scene/widget_area_test.go +++ b/pkg/scene/widget_area_test.go @@ -12,27 +12,27 @@ func TestWidgetArea(t *testing.T) { tests := []struct { Name string - Input1 []WidgetID + Input1 WidgetIDList Input2 WidgetAlignType Expected *WidgetArea }{ { Name: "New widget area with proper widget ids and widget align type", - Input1: []WidgetID{wid1, wid2}, + Input1: WidgetIDList{wid1, wid2}, Input2: WidgetAlignEnd, - Expected: &WidgetArea{widgetIds: []WidgetID{wid1, wid2}, align: WidgetAlignEnd}, + Expected: &WidgetArea{widgetIds: WidgetIDList{wid1, wid2}, align: WidgetAlignEnd}, }, { Name: "New widget area with duplicated widget ids", - Input1: []WidgetID{wid1, wid1}, + Input1: WidgetIDList{wid1}, Input2: WidgetAlignEnd, - Expected: &WidgetArea{widgetIds: []WidgetID{wid1}, align: WidgetAlignEnd}, + Expected: &WidgetArea{widgetIds: WidgetIDList{wid1}, align: WidgetAlignEnd}, }, { Name: "New widget area with wrong widget align type", - Input1: []WidgetID{wid1, wid2}, + Input1: WidgetIDList{wid1, wid2}, Input2: "wrong", - Expected: &WidgetArea{widgetIds: []WidgetID{wid1, wid2}, align: WidgetAlignStart}, + Expected: &WidgetArea{widgetIds: WidgetIDList{wid1, wid2}, align: WidgetAlignStart}, }, } @@ -48,7 +48,7 @@ func TestWidgetArea(t *testing.T) { func TestWidgetArea_WidgetIDs(t *testing.T) { wid := NewWidgetID() - wa := NewWidgetArea([]WidgetID{wid}, WidgetAlignStart) + wa := NewWidgetArea(WidgetIDList{wid}, WidgetAlignStart) assert.Equal(t, wa.widgetIds, wa.WidgetIDs()) assert.Nil(t, (*WidgetArea)(nil).WidgetIDs()) } @@ -93,7 +93,7 @@ func TestWidgetArea_Find(t *testing.T) { var wa *WidgetArea if !tc.Nil { - wa = NewWidgetArea([]WidgetID{wid}, WidgetAlignStart) + wa = NewWidgetArea(WidgetIDList{wid}, WidgetAlignStart) } assert.Equal(t, tc.Expected, wa.Find(tc.Input)) }) @@ -110,25 +110,25 @@ func TestWidgetArea_Add(t *testing.T) { Nil bool Input WidgetID Input2 int - Expected []WidgetID + Expected WidgetIDList }{ { Name: "add a widget id", Input: wid3, Input2: -1, - Expected: []WidgetID{wid1, wid2, wid3}, + Expected: WidgetIDList{wid1, wid2, wid3}, }, { Name: "add a widget id but already exists", Input: wid1, Input2: -1, - Expected: []WidgetID{wid1, wid2}, + Expected: WidgetIDList{wid1, wid2}, }, { Name: "insert a widget id", Input: wid3, Input2: 1, - Expected: []WidgetID{wid1, wid3, wid2}, + Expected: WidgetIDList{wid1, wid3, wid2}, }, { Name: "nil widget area", @@ -146,7 +146,7 @@ func TestWidgetArea_Add(t *testing.T) { return } - wa := NewWidgetArea([]WidgetID{wid1, wid2}, WidgetAlignStart) + wa := NewWidgetArea(WidgetIDList{wid1, wid2}, WidgetAlignStart) wa.Add(tc.Input, tc.Input2) assert.Equal(t, tc.Expected, wa.WidgetIDs()) }) @@ -160,18 +160,18 @@ func TestWidgetArea_AddAll(t *testing.T) { tests := []struct { Name string Nil bool - Input []WidgetID - Expected []WidgetID + Input WidgetIDList + Expected WidgetIDList }{ { Name: "add widget ids", - Input: []WidgetID{wid1, wid2}, - Expected: []WidgetID{wid1, wid2}, + Input: WidgetIDList{wid1, wid2}, + Expected: WidgetIDList{wid1, wid2}, }, { Name: "add widget ids but duplicated", - Input: []WidgetID{wid1, wid1, wid2}, - Expected: []WidgetID{wid1, wid2}, + Input: WidgetIDList{wid1, wid1, wid2}, + Expected: WidgetIDList{wid1, wid2}, }, { Name: "nil widget area", @@ -243,18 +243,18 @@ func TestWidgetArea_Remove(t *testing.T) { tests := []struct { Name string Input WidgetID - Expected []WidgetID + Expected WidgetIDList Nil bool }{ { Name: "Remove a widget from widget area", Input: wid, - Expected: []WidgetID{}, + Expected: WidgetIDList{}, }, { Name: "Remove a widget from widget area that doesn't exist", Input: NewWidgetID(), - Expected: []WidgetID{wid}, + Expected: WidgetIDList{wid}, }, { Name: "Return nil if no widget area", @@ -270,7 +270,7 @@ func TestWidgetArea_Remove(t *testing.T) { var wa *WidgetArea if !tc.Nil { - wa = NewWidgetArea([]WidgetID{wid}, "") + wa = NewWidgetArea(WidgetIDList{wid}, "") } wa.Remove(tc.Input) if !tc.Nil { @@ -288,20 +288,20 @@ func TestWidgetArea_Move(t *testing.T) { tests := []struct { Name string Input1, Input2 int - Expected []WidgetID + Expected WidgetIDList Nil bool }{ { Name: "Move widget Id", Input1: 1, Input2: 2, - Expected: []WidgetID{wid, wid3, wid2}, + Expected: WidgetIDList{wid, wid3, wid2}, }, { Name: "Move widget Id", Input1: 2, Input2: 0, - Expected: []WidgetID{wid3, wid, wid2}, + Expected: WidgetIDList{wid3, wid, wid2}, }, { Name: "Nil", @@ -316,7 +316,7 @@ func TestWidgetArea_Move(t *testing.T) { var wa *WidgetArea if !tc.Nil { - wa = NewWidgetArea([]WidgetID{wid, wid2, wid3}, "") + wa = NewWidgetArea(WidgetIDList{wid, wid2, wid3}, "") } wa.Move(tc.Input1, tc.Input2) if !tc.Nil { diff --git a/pkg/scene/widget_section_test.go b/pkg/scene/widget_section_test.go index 0328befa2..9531cd8f2 100644 --- a/pkg/scene/widget_section_test.go +++ b/pkg/scene/widget_section_test.go @@ -82,9 +82,9 @@ func TestWidgetSection_Find(t *testing.T) { } ws := NewWidgetSection() - ws.Area(WidgetAreaTop).AddAll([]WidgetID{wid1, wid2, wid3}) - ws.Area(WidgetAreaMiddle).AddAll([]WidgetID{wid4, wid5}) - ws.Area(WidgetAreaBottom).AddAll([]WidgetID{wid6, wid7}) + ws.Area(WidgetAreaTop).AddAll(WidgetIDList{wid1, wid2, wid3}) + ws.Area(WidgetAreaMiddle).AddAll(WidgetIDList{wid4, wid5}) + ws.Area(WidgetAreaBottom).AddAll(WidgetIDList{wid6, wid7}) index, area := ws.Find(tc.Input) assert.Equal(t, tc.Expected1, index) @@ -100,44 +100,44 @@ func TestWidgetSection_Remove(t *testing.T) { Name string Area WidgetAreaType Input WidgetID - Expected []WidgetID + Expected WidgetIDList Nil bool }{ { Name: "top: remove a widget from widget section", Area: WidgetAreaTop, Input: wid, - Expected: []WidgetID{}, + Expected: WidgetIDList{}, }, { Name: "top: couldn't find widgetId", Area: WidgetAreaTop, Input: NewWidgetID(), - Expected: []WidgetID{wid}, + Expected: WidgetIDList{wid}, }, { Name: "middle: remove a widget from widget section", Area: WidgetAreaMiddle, Input: wid, - Expected: []WidgetID{}, + Expected: WidgetIDList{}, }, { Name: "middle: couldn't find widgetId", Area: WidgetAreaMiddle, Input: NewWidgetID(), - Expected: []WidgetID{wid}, + Expected: WidgetIDList{wid}, }, { Name: "bottom: remove a widget from widget section", Area: WidgetAreaBottom, Input: wid, - Expected: []WidgetID{}, + Expected: WidgetIDList{}, }, { Name: "bottom: couldn't find widgetId", Area: WidgetAreaBottom, Input: NewWidgetID(), - Expected: []WidgetID{wid}, + Expected: WidgetIDList{wid}, }, { Name: "nil", diff --git a/pkg/scene/widget_zone_test.go b/pkg/scene/widget_zone_test.go index b3aa13458..d9ad579a1 100644 --- a/pkg/scene/widget_zone_test.go +++ b/pkg/scene/widget_zone_test.go @@ -89,9 +89,9 @@ func TestWidgetZone_Find(t *testing.T) { } ez := NewWidgetZone() - ez.Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll([]WidgetID{wid1, wid2, wid3}) - ez.Section(WidgetSectionCenter).Area(WidgetAreaTop).AddAll([]WidgetID{wid4, wid5}) - ez.Section(WidgetSectionRight).Area(WidgetAreaTop).AddAll([]WidgetID{wid6, wid7}) + ez.Section(WidgetSectionLeft).Area(WidgetAreaTop).AddAll(WidgetIDList{wid1, wid2, wid3}) + ez.Section(WidgetSectionCenter).Area(WidgetAreaTop).AddAll(WidgetIDList{wid4, wid5}) + ez.Section(WidgetSectionRight).Area(WidgetAreaTop).AddAll(WidgetIDList{wid6, wid7}) index, section, area := ez.Find(tc.Input) assert.Equal(t, tc.Expected1, index) @@ -108,44 +108,44 @@ func TestWidgetZone_Remove(t *testing.T) { Name string Section WidgetSectionType Input WidgetID - Expected []WidgetID + Expected WidgetIDList Nil bool }{ { Name: "left: remove a widget from widget section", Section: WidgetSectionLeft, Input: wid, - Expected: []WidgetID{}, + Expected: WidgetIDList{}, }, { Name: "left: couldn't find widgetId", Section: WidgetSectionLeft, Input: NewWidgetID(), - Expected: []WidgetID{wid}, + Expected: WidgetIDList{wid}, }, { Name: "center: remove a widget from widget section", Section: WidgetSectionCenter, Input: wid, - Expected: []WidgetID{}, + Expected: WidgetIDList{}, }, { Name: "center: couldn't find widgetId", Section: WidgetSectionCenter, Input: NewWidgetID(), - Expected: []WidgetID{wid}, + Expected: WidgetIDList{wid}, }, { Name: "right: remove a widget from widget section", Section: WidgetSectionRight, Input: wid, - Expected: []WidgetID{}, + Expected: WidgetIDList{}, }, { Name: "right: couldn't find widgetId", Section: WidgetSectionRight, Input: NewWidgetID(), - Expected: []WidgetID{wid}, + Expected: WidgetIDList{wid}, }, { Name: "nil", diff --git a/pkg/tag/group.go b/pkg/tag/group.go index 0c9c9a889..0a5302615 100644 --- a/pkg/tag/group.go +++ b/pkg/tag/group.go @@ -2,12 +2,19 @@ package tag type Group struct { tag - tags *IDList + tags IDList } -func (g *Group) Tags() *IDList { +func (g *Group) Tags() IDList { if g == nil { return nil } - return g.tags + return g.tags.Clone() +} + +func (g *Group) RemoveTag(ids ...ID) { + if g == nil { + return + } + g.tags = g.tags.Delete(ids...) } diff --git a/pkg/tag/group_builder.go b/pkg/tag/group_builder.go index 77283cb6b..e84d28f3e 100644 --- a/pkg/tag/group_builder.go +++ b/pkg/tag/group_builder.go @@ -57,10 +57,7 @@ func (b *GroupBuilder) Scene(sid SceneID) *GroupBuilder { return b } -func (b *GroupBuilder) Tags(tl *IDList) *GroupBuilder { - if tl != nil { - b.g.tags = tl - } - +func (b *GroupBuilder) Tags(tl IDList) *GroupBuilder { + b.g.tags = tl.Clone() return b } diff --git a/pkg/tag/group_test.go b/pkg/tag/group_test.go index 2be71f42e..c979dce2b 100644 --- a/pkg/tag/group_test.go +++ b/pkg/tag/group_test.go @@ -16,7 +16,7 @@ func TestGroupBuilder_NewID(t *testing.T) { func TestGroupBuilder_Build(t *testing.T) { tid := NewID() sid := NewSceneID() - tags := []ID{ + tags := IDList{ NewID(), NewID(), } @@ -25,7 +25,7 @@ func TestGroupBuilder_Build(t *testing.T) { Name, Label string Id ID Scene SceneID - Tags *IDList + Tags IDList Expected struct { Group Group Error error @@ -69,9 +69,7 @@ func TestGroupBuilder_Build(t *testing.T) { Id: tid, Label: "xxx", Scene: sid, - Tags: &IDList{ - tags: tags, - }, + Tags: tags, Expected: struct { Group Group Error error @@ -82,9 +80,7 @@ func TestGroupBuilder_Build(t *testing.T) { label: "xxx", sceneId: sid, }, - tags: &IDList{ - tags: tags, - }, + tags: tags, }, }, }, diff --git a/pkg/tag/id.go b/pkg/tag/id.go index 2b0e5d43c..d8f606cc1 100644 --- a/pkg/tag/id.go +++ b/pkg/tag/id.go @@ -6,36 +6,32 @@ type ID = id.TagID type SceneID = id.SceneID type DatasetID = id.DatasetID type DatasetSchemaID = id.DatasetSchemaID -type DatasetFieldID = id.DatasetSchemaFieldID +type DatasetFieldID = id.DatasetFieldID + +type IDList = id.TagIDList var NewID = id.NewTagID var NewSceneID = id.NewSceneID var NewDatasetID = id.NewDatasetID var NewDatasetSchemaID = id.NewDatasetSchemaID -var NewDatasetFieldID = id.NewDatasetSchemaFieldID +var NewDatasetFieldID = id.NewDatasetFieldID var MustID = id.MustTagID var MustSceneID = id.MustSceneID var MustDatasetID = id.MustDatasetID var MustDatasetSchemaID = id.MustDatasetSchemaID -var MustDatasetFieldID = id.MustDatasetSchemaFieldID +var MustDatasetFieldID = id.MustDatasetFieldID var IDFrom = id.TagIDFrom var SceneIDFrom = id.SceneIDFrom var DatasetIDFrom = id.DatasetIDFrom var DatasetSchemaIDFrom = id.DatasetSchemaIDFrom -var DatasetFieldIDFrom = id.DatasetSchemaFieldIDFrom +var DatasetFieldIDFrom = id.DatasetFieldIDFrom var IDFromRef = id.TagIDFromRef var SceneIDFromRef = id.SceneIDFromRef var DatasetIDFromRef = id.DatasetIDFromRef var DatasetSchemaIDFromRef = id.DatasetSchemaIDFromRef -var DatasetFieldIDFromRef = id.DatasetSchemaFieldIDFromRef - -var IDFromRefID = id.TagIDFromRefID -var SceneIDFromRefID = id.SceneIDFromRefID -var DatasetIDFromRefID = id.DatasetIDFromRefID -var DatasetSchemaIDFromRefID = id.DatasetSchemaIDFromRefID -var DatasetFieldIDFromRefID = id.DatasetSchemaFieldIDFromRefID +var DatasetFieldIDFromRef = id.DatasetFieldIDFromRef var ErrInvalidID = id.ErrInvalidID diff --git a/pkg/tag/list.go b/pkg/tag/list.go index 42ee1d352..a4b0535aa 100644 --- a/pkg/tag/list.go +++ b/pkg/tag/list.go @@ -1,58 +1,5 @@ package tag -type IDList struct { - tags []ID -} - -func NewIDList() *IDList { - return &IDList{tags: []ID{}} -} - -func IDListFrom(tags []ID) *IDList { - return &IDList{tags: tags} -} - -func (tl *IDList) Tags() []ID { - if tl == nil || len(tl.tags) == 0 { - return nil - } - return append([]ID{}, tl.tags...) -} - -func (tl *IDList) Has(tid ID) bool { - if tl == nil || tl.tags == nil { - return false - } - for _, tag := range tl.tags { - if tag == tid { - return true - } - } - return false -} - -func (tl *IDList) Add(tags ...ID) { - if tl == nil || tl.tags == nil { - return - } - tl.tags = append(tl.tags, tags...) -} - -func (tl *IDList) Remove(tags ...ID) { - if tl == nil || tl.tags == nil { - return - } - for i := 0; i < len(tl.tags); i++ { - for _, tid := range tags { - if tl.tags[i] == tid { - tl.tags = append(tl.tags[:i], tl.tags[i+1:]...) - i-- - break - } - } - } -} - type List []Tag func DerefList(tags []*Tag) List { diff --git a/pkg/tag/list_test.go b/pkg/tag/list_test.go index bf4775bb7..19ccadb1e 100644 --- a/pkg/tag/list_test.go +++ b/pkg/tag/list_test.go @@ -6,83 +6,15 @@ import ( "github.com/stretchr/testify/assert" ) -func TestIDtList_Add(t *testing.T) { - tid := NewID() - var tl *IDList - tl.Add(tid) - assert.Nil(t, tl.Tags()) - tl = NewIDList() - tl.Add(tid) - expected := []ID{tid} - assert.Equal(t, expected, tl.Tags()) -} - -func TestIDList_Remove(t *testing.T) { - tid := NewID() - tid2 := NewID() - tags := []ID{ - tid, - tid2, - } - var tl *IDList - tl.Remove(tid2) - assert.Nil(t, tl.Tags()) - tl = IDListFrom(tags) - tl.Remove(tid2) - expected := []ID{tid} - assert.Equal(t, expected, tl.Tags()) -} - -func TestIDList_Has(t *testing.T) { - tid1 := NewID() - tid2 := NewID() - tags := []ID{ - tid1, - } - - tests := []struct { - Name string - Tags []ID - TID ID - Expected bool - }{ - { - Name: "false: nil tag list", - Expected: false, - }, - { - Name: "false: tag not found", - Tags: tags, - TID: tid2, - Expected: false, - }, - { - Name: "true: tag found", - Tags: tags, - TID: tid1, - Expected: true, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.Name, func(t *testing.T) { - t.Parallel() - res := IDListFrom(tc.Tags).Has(tc.TID) - assert.Equal(t, tc.Expected, res) - }) - } -} - func TestList_Items(t *testing.T) { sceneID := NewSceneID() sceneID2 := NewSceneID() tag1 := NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() tag2 := NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() tag3 := NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() - tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDListFrom(([]ID{ + tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDList{ tag1.ID(), tag2.ID(), - }))).MustBuild() + }).MustBuild() tags := List{tag1, tag2, tag3, tag4} assert.Equal(t, []*Item{tag1, tag2, tag3}, tags.Items()) @@ -95,9 +27,9 @@ func TestList_Groups(t *testing.T) { tag1 := NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() tag2 := NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() tag3 := NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() - tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDListFrom(([]ID{ + tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDList{ tag1.ID(), tag2.ID(), - }))).MustBuild() + }).MustBuild() tags := List{tag1, tag2, tag3, tag4} assert.Equal(t, []*Group{tag4}, tags.Groups()) @@ -110,9 +42,9 @@ func TestList_FilterByScene(t *testing.T) { tag1 := NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() tag2 := NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() tag3 := NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() - tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDListFrom(([]ID{ + tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDList{ tag1.ID(), tag2.ID(), - }))).MustBuild() + }).MustBuild() tags := List{tag1, tag2, tag3, tag4} assert.Equal(t, List{tag1, tag2, tag4}, tags.FilterByScene(sceneID)) @@ -125,9 +57,9 @@ func TestList_Roots(t *testing.T) { tag1 := NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() tag2 := NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() tag3 := NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() - tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDListFrom(([]ID{ + tag4 := NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDList{ tag1.ID(), tag2.ID(), - }))).MustBuild() + }).MustBuild() tags := List{tag1, tag2, tag3, tag4} assert.Equal(t, List{tag3, tag4}, tags.Roots()) @@ -140,9 +72,9 @@ func TestList_Refs(t *testing.T) { var tag1 Tag = NewItem().NewID().Label("hoge").Scene(sceneID).MustBuild() var tag2 Tag = NewItem().NewID().Label("foo").Scene(sceneID).MustBuild() var tag3 Tag = NewItem().NewID().Label("foo").Scene(sceneID2).MustBuild() - var tag4 Tag = NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDListFrom(([]ID{ + var tag4 Tag = NewGroup().NewID().Label("bar").Scene(sceneID).Tags(IDList{ tag1.ID(), tag2.ID(), - }))).MustBuild() + }).MustBuild() tags := List{tag1, tag2, tag3, tag4} assert.Equal(t, []*Tag{&tag1, &tag2, &tag3, &tag4}, tags.Refs()) diff --git a/pkg/tag/map.go b/pkg/tag/map.go index 71557e1e2..26371e388 100644 --- a/pkg/tag/map.go +++ b/pkg/tag/map.go @@ -13,7 +13,7 @@ func (m Map) All() List { res = append(res, t) } sort.SliceStable(res, func(i, j int) bool { - return res[i].ID().ID().Compare(res[j].ID().ID()) < 0 + return res[i].ID().Compare(res[j].ID()) < 0 }) return res } diff --git a/pkg/user/id.go b/pkg/user/id.go index b134a6682..12968a973 100644 --- a/pkg/user/id.go +++ b/pkg/user/id.go @@ -17,51 +17,6 @@ var TeamIDFrom = id.TeamIDFrom var IDFromRef = id.UserIDFromRef var TeamIDFromRef = id.TeamIDFromRef -var IDFromRefID = id.UserIDFromRefID -var TeamIDFromRefID = id.TeamIDFromRefID - var ErrInvalidID = id.ErrInvalidID -type TeamIDList []TeamID - -func (l TeamIDList) Clone() TeamIDList { - if l == nil { - return nil - } - return append(TeamIDList{}, l...) -} - -func (l TeamIDList) Filter(ids ...TeamID) TeamIDList { - if l == nil { - return nil - } - - res := make(TeamIDList, 0, len(l)) - for _, t := range l { - for _, t2 := range ids { - if t == t2 { - res = append(res, t) - } - } - } - return res -} - -func (l TeamIDList) Includes(ids ...TeamID) bool { - for _, t := range l { - for _, t2 := range ids { - if t == t2 { - return true - } - } - } - return false -} - -func (k TeamIDList) Len() int { - return len(k) -} - -func (k TeamIDList) Strings() []string { - return id.TeamIDsToStrings(k) -} +type TeamIDList = id.TeamIDList diff --git a/pkg/user/id_test.go b/pkg/user/id_test.go deleted file mode 100644 index de4ee0a9b..000000000 --- a/pkg/user/id_test.go +++ /dev/null @@ -1,47 +0,0 @@ -package user - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestTeamIDList_Clone(t *testing.T) { - t1 := NewTeamID() - t2 := NewTeamID() - t3 := NewTeamID() - ids := TeamIDList{t1, t2, t3} - assert.Equal(t, ids, ids.Clone()) - assert.NotSame(t, ids, ids.Clone()) - assert.Nil(t, TeamIDList(nil).Clone()) -} - -func TestTeamIDList_Filter(t *testing.T) { - t1 := NewTeamID() - t2 := NewTeamID() - t3 := NewTeamID() - t4 := NewTeamID() - assert.Equal(t, TeamIDList{t1}, TeamIDList{t1, t2, t3}.Filter(t1)) - assert.Equal(t, TeamIDList{t1, t3}, TeamIDList{t1, t2, t3}.Filter(t1, t3)) - assert.Equal(t, TeamIDList{}, TeamIDList{t1, t2, t3}.Filter(t4)) - assert.Equal(t, TeamIDList(nil), TeamIDList(nil).Filter(t4)) -} - -func TestTeamIDList_Includes(t *testing.T) { - t1 := NewTeamID() - t2 := NewTeamID() - t3 := NewTeamID() - assert.True(t, TeamIDList{t1, t2, t3}.Includes(t1)) - assert.False(t, TeamIDList{t1, t2}.Includes(t3)) - assert.False(t, TeamIDList(nil).Includes(t1)) -} - -func TestTeamIDList_Len(t *testing.T) { - t1 := NewTeamID() - t2 := NewTeamID() - t3 := NewTeamID() - assert.Equal(t, 2, TeamIDList{t1, t2}.Len()) - assert.Equal(t, 3, TeamIDList{t1, t2, t3}.Len()) - assert.Equal(t, 0, TeamIDList{}.Len()) - assert.Equal(t, 0, TeamIDList(nil).Len()) -} diff --git a/pkg/user/members.go b/pkg/user/members.go index cf2e0f5bc..689818514 100644 --- a/pkg/user/members.go +++ b/pkg/user/members.go @@ -114,7 +114,7 @@ func (m *Members) UsersByRole(role Role) []ID { } sort.SliceStable(users, func(a, b int) bool { - return users[a].ID().Compare(users[b].ID()) > 0 + return users[a].Compare(users[b]) > 0 }) return users diff --git a/pkg/util/list.go b/pkg/util/list.go new file mode 100644 index 000000000..d6d6df0e7 --- /dev/null +++ b/pkg/util/list.go @@ -0,0 +1,130 @@ +package util + +import ( + "github.com/samber/lo" + "golang.org/x/exp/slices" +) + +type List[T comparable] []T + +func (l List[T]) Has(elements ...T) bool { + return Any(elements, func(e T) bool { + return slices.Contains(l, e) + }) +} + +func (l List[T]) At(i int) *T { + if len(l) == 0 || i < 0 || len(l) <= i { + return nil + } + e := l[i] + return &e +} + +func (l List[T]) Index(e T) int { + return slices.Index(l, e) +} + +func (l List[T]) Len() int { + return len(l) +} + +func (l List[T]) Copy() List[T] { + if l == nil { + return nil + } + return slices.Clone(l) +} + +func (l List[T]) Ref() *List[T] { + if l == nil { + return nil + } + return &l +} + +func (l List[T]) Refs() []*T { + return Map(l, func(e T) *T { + return &e + }) +} + +func (l List[T]) Delete(elements ...T) List[T] { + if l == nil { + return nil + } + m := l.Copy() + for _, e := range elements { + if j := l.Index(e); j >= 0 { + m = slices.Delete[[]T](m, j, j+1) + } + } + return m +} + +func (l List[T]) DeleteAt(i int) List[T] { + if l == nil { + return nil + } + m := l.Copy() + return slices.Delete(m, i, i+1) +} + +func (l List[T]) Add(elements ...T) List[T] { + res := l.Copy() + for _, e := range elements { + res = append(res, e) + } + return res +} + +func (l List[T]) AddUniq(elements ...T) List[T] { + res := append(List[T]{}, l...) + for _, id := range elements { + if !res.Has(id) { + res = append(res, id) + } + } + return res +} + +func (l List[T]) Insert(i int, elements ...T) List[T] { + if i < 0 || len(l) < i { + return l.Add(elements...) + } + return slices.Insert(l, i, elements...) +} + +func (l List[T]) Move(e T, to int) List[T] { + return l.MoveAt(l.Index(e), to) +} + +func (l List[T]) MoveAt(from, to int) List[T] { + if from < 0 || from == to || len(l) <= from { + return l.Copy() + } + e := l[from] + if from < to { + to-- + } + m := l.DeleteAt(from) + if to < 0 { + return m + } + return m.Insert(to, e) +} + +func (l List[T]) Reverse() List[T] { + return lo.Reverse(l.Copy()) +} + +func (l List[T]) Concat(m []T) List[T] { + return append(l, m...) +} + +func (l List[T]) Intersect(m []T) List[T] { + if l == nil { + return nil + } + return lo.Intersect(m, l) +} diff --git a/pkg/util/list_test.go b/pkg/util/list_test.go new file mode 100644 index 000000000..e54db50d4 --- /dev/null +++ b/pkg/util/list_test.go @@ -0,0 +1,168 @@ +package util + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +type T struct{} + +func TestList_Has(t *testing.T) { + l := List[int]{1, 2} + + assert.True(t, l.Has(1)) + assert.True(t, l.Has(1, 3)) + assert.False(t, l.Has(3)) + assert.False(t, List[int](nil).Has(1)) +} + +func TestList_At(t *testing.T) { + a := T{} + b := T{} + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).At(0)) + assert.Nil(t, l.At(-1)) + assert.Equal(t, &a, l.At(0)) + assert.Equal(t, &b, l.At(1)) + assert.Nil(t, l.At(2)) +} + +func TestList_Index(t *testing.T) { + l := List[string]{"a", "b"} + + assert.Equal(t, -1, List[string](nil).Index("a")) + assert.Equal(t, 0, l.Index("a")) + assert.Equal(t, 1, l.Index("b")) + assert.Equal(t, -1, l.Index("c")) +} + +func TestList_Len(t *testing.T) { + a := T{} + b := T{} + l := List[T]{a, b} + + assert.Equal(t, 0, List[T](nil).Len()) + assert.Equal(t, 2, l.Len()) +} + +func TestList_Copy(t *testing.T) { + a := &T{} + b := &T{} + l := List[*T]{a, b} + + assert.Nil(t, List[*T](nil).Copy()) + assert.Equal(t, List[*T]{a, b}, l.Copy()) + assert.NotSame(t, l, l.Copy()) + assert.Same(t, a, l.Copy()[0]) + assert.Same(t, b, l.Copy()[1]) +} + +func TestList_Ref(t *testing.T) { + a := T{} + b := T{} + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Ref()) + assert.Equal(t, &List[T]{a, b}, l.Ref()) +} + +func TestList_Refs(t *testing.T) { + a := T{} + b := T{} + l := List[T]{a, b} + + assert.Nil(t, List[T](nil).Refs()) + assert.Equal(t, []*T{&a, &b}, l.Refs()) +} + +func TestList_Delete(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Nil(t, (List[string])(nil).Delete("a")) + assert.Equal(t, List[string]{"a", "c"}, l.Delete("b")) + assert.Equal(t, List[string]{"a", "b", "c"}, l) +} + +func TestList_DeleteAt(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Nil(t, (List[string])(nil).DeleteAt(1)) + assert.Equal(t, List[string]{"a", "c"}, l.DeleteAt(1)) + assert.Equal(t, List[string]{"a", "b", "c"}, l) +} + +func TestList_Add(t *testing.T) { + l := List[string]{"a", "b"} + + assert.Equal(t, List[string]{"a"}, (List[string])(nil).Add("a")) + assert.Equal(t, List[string]{"a", "b", "c", "d"}, l.Add("c", "d")) + assert.Equal(t, List[string]{"a", "b"}, l) +} + +func TestList_AddUniq(t *testing.T) { + l := List[string]{"a", "b"} + + assert.Equal(t, List[string]{"a"}, (List[string])(nil).AddUniq("a")) + assert.Equal(t, List[string]{"a", "b", "c"}, l.AddUniq("a", "c")) + assert.Equal(t, List[string]{"a", "b"}, l) +} + +func TestList_Insert(t *testing.T) { + a := T{} + b := T{} + c := T{} + l := List[T]{a, b} + + assert.Equal(t, List[T]{a, b, c}, l.Insert(-1, c)) + assert.Equal(t, List[T]{c, a, b}, l.Insert(0, c)) + assert.Equal(t, List[T]{a, c, b}, l.Insert(1, c)) + assert.Equal(t, List[T]{a, b, c}, l.Insert(2, c)) + assert.Equal(t, List[T]{a, b, c}, l.Insert(3, c)) + assert.Equal(t, List[T]{a, b}, l) +} + +func TestList_Move(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Nil(t, (List[string])(nil).Move("a", -1)) + assert.Equal(t, List[string]{"b", "c"}, l.Move("a", -1)) + assert.Equal(t, List[string]{"a", "b", "c"}, l) + assert.Equal(t, List[string]{"c", "a", "b"}, l.Move("c", 0)) + assert.Equal(t, List[string]{"a", "c", "b"}, l.Move("b", 10)) +} + +func TestList_MoveAt(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Nil(t, (List[string])(nil).MoveAt(0, -1)) + assert.Equal(t, List[string]{"b", "c"}, l.MoveAt(0, -1)) + assert.Equal(t, List[string]{"a", "b", "c"}, l) + assert.Equal(t, List[string]{"c", "a", "b"}, l.MoveAt(2, 0)) + assert.Equal(t, List[string]{"a", "c", "b"}, l.MoveAt(1, 10)) +} + +func TestList_Reverse(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Nil(t, (List[string])(nil).Reverse()) + assert.Equal(t, List[string]{"c", "b", "a"}, l.Reverse()) + assert.Equal(t, List[string]{"a", "b", "c"}, l) +} + +func TestList_Concat(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Equal(t, List[string]{"a"}, (List[string])(nil).Concat(List[string]{"a"})) + assert.Equal(t, List[string]{"a", "b", "c", "d", "e"}, l.Concat(List[string]{"d", "e"})) + assert.Equal(t, List[string]{"a", "b", "c"}, l) +} + +func TestList_Intersect(t *testing.T) { + l := List[string]{"a", "b", "c"} + + assert.Nil(t, (List[string])(nil).Intersect(List[string]{"a"})) + assert.Equal(t, List[string]{"a", "b"}, l.Intersect(List[string]{"b", "e", "a"})) + assert.Equal(t, List[string]{"a", "b", "c"}, l) +} diff --git a/pkg/util/map.go b/pkg/util/map.go new file mode 100644 index 000000000..6d40dd47b --- /dev/null +++ b/pkg/util/map.go @@ -0,0 +1,126 @@ +package util + +import "sync" + +type SyncMap[K, V any] struct { + m sync.Map +} + +func (m *SyncMap[K, V]) Load(key K) (vv V, _ bool) { + v, ok := m.m.Load(key) + if ok { + vv = v.(V) + } + return vv, ok +} + +func (m *SyncMap[K, V]) LoadAll(keys ...K) (r []V) { + for _, k := range keys { + v, ok := m.Load(k) + if ok { + r = append(r, v) + } + } + return r +} + +func (m *SyncMap[K, V]) Store(key K, value V) { + m.m.Store(key, value) +} + +func (m *SyncMap[K, V]) LoadOrStore(key K, value V) (vv V, _ bool) { + v, ok := m.m.LoadOrStore(key, value) + if ok { + vv = v.(V) + } + return vv, ok +} + +func (m *SyncMap[K, V]) Delete(key K) { + m.m.Delete(key) +} + +func (m *SyncMap[K, V]) DeleteAll(key ...K) { + for _, k := range key { + m.Delete(k) + } +} + +func (m *SyncMap[K, V]) Range(f func(key K, value V) bool) { + m.m.Range(func(key, value any) bool { + return f(key.(K), value.(V)) + }) +} + +func (m *SyncMap[K, V]) Find(f func(key K, value V) bool) (v V) { + m.Range(func(key K, value V) bool { + if f(key, value) { + v = value + return false + } + return true + }) + return +} + +func (m *SyncMap[K, V]) FindAll(f func(key K, value V) bool) (v []V) { + m.Range(func(key K, value V) bool { + if f(key, value) { + v = append(v, value) + } + return true + }) + return +} + +func (m *SyncMap[K, V]) Clone() *SyncMap[K, V] { + if m == nil { + return nil + } + n := &SyncMap[K, V]{} + m.Range(func(key K, value V) bool { + n.Store(key, value) + return true + }) + return n +} + +func (m *SyncMap[K, V]) Map(f func(K, V) V) *SyncMap[K, V] { + n := m.Clone() + n.Range(func(key K, value V) bool { + n.Store(key, f(key, value)) + return true + }) + return n +} + +func (m *SyncMap[K, V]) Merge(n *SyncMap[K, V]) { + n.Range(func(key K, value V) bool { + m.Store(key, value) + return true + }) +} + +func (m *SyncMap[K, V]) Keys() (l []K) { + m.Range(func(key K, _ V) bool { + l = append(l, key) + return true + }) + return l +} + +func (m *SyncMap[K, V]) Values() (l []V) { + m.Range(func(_ K, value V) bool { + l = append(l, value) + return true + }) + return l +} + +func (m *SyncMap[K, V]) Len() (i int) { + m.m.Range(func(_ any, _ any) bool { + i++ + return true + }) + return +} diff --git a/pkg/util/map_test.go b/pkg/util/map_test.go new file mode 100644 index 000000000..2ebc2b8af --- /dev/null +++ b/pkg/util/map_test.go @@ -0,0 +1,180 @@ +package util + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "golang.org/x/exp/slices" +) + +func TestSyncMap_Load_Store(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + + res, ok := s.Load("a") + assert.Equal(t, 1, res) + assert.True(t, ok) + + res, ok = s.Load("b") + assert.Equal(t, 0, res) + assert.False(t, ok) +} + +func TestSyncMap_LoadAll(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + + assert.Equal(t, []int{1, 2}, s.LoadAll("a", "b", "c")) + assert.Equal(t, []int(nil), s.LoadAll("d")) +} + +func TestSyncMap_LoadOrStore(t *testing.T) { + s := &SyncMap[string, string]{} + res, ok := s.LoadOrStore("a", "A") + assert.Equal(t, "", res) + assert.False(t, ok) + res, ok = s.LoadOrStore("a", "AA") + assert.Equal(t, "A", res) + assert.True(t, ok) + res, ok = s.Load("a") + assert.Equal(t, "A", res) + assert.True(t, ok) +} + +func TestSyncMap_Delete(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + + s.Delete("a") + res, ok := s.Load("a") + assert.Equal(t, 0, res) + assert.False(t, ok) + + s.Delete("b") // no panic +} + +func TestSyncMap_DeleteAll(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + + s.DeleteAll("a", "b") + res, ok := s.Load("a") + assert.Equal(t, 0, res) + assert.False(t, ok) + res, ok = s.Load("b") + assert.Equal(t, 0, res) + assert.False(t, ok) + + s.DeleteAll("c") // no panic +} + +func TestSyncMap_Range(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + + var vv int + s.Range(func(k string, v int) bool { + if k == "a" { + vv = v + return true + } + return false + }) + assert.Equal(t, 1, vv) +} + +func TestSyncMap_Find(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + + res := s.Find(func(k string, v int) bool { + return k == "a" + }) + assert.Equal(t, 1, res) + + res = s.Find(func(k string, v int) bool { + return k == "c" + }) + assert.Equal(t, 0, res) +} + +func TestSyncMap_FindAll(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + + res := s.FindAll(func(k string, v int) bool { + return k == "a" || k == "b" + }) + slices.Sort(res) + assert.Equal(t, []int{1, 2}, res) + + res = s.FindAll(func(k string, v int) bool { + return k == "c" + }) + assert.Equal(t, []int(nil), res) +} + +func TestSyncMap_Map(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + u := s.Map(func(k string, v int) int { + if k == "a" { + return 3 + } + return v + }) + + keys := u.Keys() + slices.Sort(keys) + values := u.Values() + slices.Sort(values) + assert.Equal(t, []string{"a", "b"}, keys) + assert.Equal(t, []int{2, 3}, values) +} + +func TestSyncMap_Merge(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + u := &SyncMap[string, int]{} + u.Store("c", 3) + s.Merge(u) + + keys := s.Keys() + slices.Sort(keys) + values := s.Values() + slices.Sort(values) + assert.Equal(t, []string{"a", "b", "c"}, keys) + assert.Equal(t, []int{1, 2, 3}, values) +} + +func TestSyncMap_Keys(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + keys := s.Keys() + slices.Sort(keys) + assert.Equal(t, []string{"a", "b"}, keys) +} + +func TestSyncMap_Values(t *testing.T) { + s := &SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + values := s.Values() + slices.Sort(values) + assert.Equal(t, []int{1, 2}, values) +} + +func TestSyncMap_Len(t *testing.T) { + s := SyncMap[string, int]{} + s.Store("a", 1) + s.Store("b", 2) + assert.Equal(t, 2, s.Len()) +} diff --git a/pkg/util/slice.go b/pkg/util/slice.go new file mode 100644 index 000000000..be65b65ac --- /dev/null +++ b/pkg/util/slice.go @@ -0,0 +1,135 @@ +package util + +import "github.com/samber/lo" + +type Element[T any] struct { + Index int + Element T +} + +// Enumerate returns a new slice with each element and its index. +func Enumerate[T any](collection []T) []Element[T] { + if collection == nil { + return nil + } + + return lo.Map(collection, func(e T, i int) Element[T] { + return Element[T]{ + Index: i, + Element: e, + } + }) +} + +// Map is similar to lo.Map, but accepts an iteratee without the index argument. +func Map[T any, V any](collection []T, iteratee func(v T) V) []V { + if collection == nil { + return nil + } + + return lo.Map(collection, func(v T, _ int) V { + return iteratee(v) + }) +} + +// TryMap is similar to Map, but when an error occurs in the iteratee, it terminates the iteration and returns an error. +func TryMap[T any, V any](collection []T, iteratee func(v T) (V, error)) ([]V, error) { + if collection == nil { + return nil, nil + } + + m := make([]V, 0, len(collection)) + for _, e := range collection { + j, err := iteratee(e) + if err != nil { + return nil, err + } + m = append(m, j) + } + return m, nil +} + +// FilterMap is similar to Map, but if the iteratee returns nil, that element will be omitted from the new slice. +func FilterMap[T any, V any](collection []T, iteratee func(v T) *V) []V { + if collection == nil { + return nil + } + + m := make([]V, 0, len(collection)) + for _, e := range collection { + if j := iteratee(e); j != nil { + m = append(m, *j) + } + } + return m +} + +// FilterMapOk is similar to FilterMap, but the iteratee can return a boolean as the second return value, +// and it is false, that element will be omitted from the new slice. +func FilterMapOk[T any, V any](collection []T, iteratee func(v T) (V, bool)) []V { + if collection == nil { + return nil + } + + m := make([]V, 0, len(collection)) + for _, e := range collection { + if j, ok := iteratee(e); ok { + m = append(m, j) + } + } + return m +} + +// FilterMapR is similar to FilterMap, but if the return value of the iteratee is not nil, +// it is not dereferenced and is used as the value of the new element. +func FilterMapR[T any, V any](collection []T, iteratee func(v T) *V) []*V { + if collection == nil { + return nil + } + + m := make([]*V, 0, len(collection)) + for _, e := range collection { + if j := iteratee(e); j != nil { + m = append(m, j) + } + } + return m +} + +// https://github.com/samber/lo/issues/54 +func All[T any](collection []T, predicate func(T) bool) bool { + for _, e := range collection { + if !predicate(e) { + return false + } + } + return true +} + +// https://github.com/samber/lo/issues/54 +func Any[T any](collection []T, predicate func(T) bool) bool { + for _, e := range collection { + if predicate(e) { + return true + } + } + return false +} + +// Filter is similar to lo.Filter, but accepts an iteratee without the index argument. +func Filter[T any](collection []T, iteratee func(v T) bool) []T { + if collection == nil { + return nil + } + + return lo.Filter(collection, func(v T, _ int) bool { + return iteratee(v) + }) +} + +// DerefSlice drops nil elements in the slice and return a new slice with dereferenced elements. +func DerefSlice[T any](collection []*T) []T { + return FilterMap(collection, func(e *T) *T { + return e + }) +} diff --git a/pkg/util/slice_test.go b/pkg/util/slice_test.go new file mode 100644 index 000000000..051ac8932 --- /dev/null +++ b/pkg/util/slice_test.go @@ -0,0 +1,95 @@ +package util + +import ( + "errors" + "testing" + + "github.com/samber/lo" + "github.com/stretchr/testify/assert" +) + +func TestEnumerate(t *testing.T) { + assert.Nil(t, Enumerate[int](nil)) + assert.Equal(t, []Element[int]{ + {Index: 0, Element: 3}, + {Index: 1, Element: 2}, + {Index: 2, Element: 1}, + }, Enumerate([]int{3, 2, 1})) +} + +func TestMap(t *testing.T) { + assert.Nil(t, Map[int, bool](nil, nil)) + assert.Equal(t, []bool{true, false, true}, Map([]int{1, 0, 2}, func(i int) bool { return i != 0 })) +} + +func TestTryMap(t *testing.T) { + res, err := TryMap[int, bool](nil, nil) + assert.Nil(t, res) + assert.NoError(t, err) + + iteratee := func(i int) (bool, error) { + if i == 0 { + return false, errors.New("aaa") + } + return true, nil + } + res, err = TryMap([]int{1, 2, 3}, iteratee) + assert.Equal(t, []bool{true, true, true}, res) + assert.NoError(t, err) + + res, err = TryMap([]int{1, 0, 3}, iteratee) + assert.Nil(t, res) + assert.Equal(t, errors.New("aaa"), err) +} + +func TestFilterMap(t *testing.T) { + assert.Nil(t, FilterMap[int, bool](nil, nil)) + assert.Equal(t, []bool{true, false}, FilterMap([]int{1, 0, 2}, func(i int) *bool { + if i == 0 { + return nil + } + return lo.ToPtr(i == 1) + })) +} + +func TestFilterMapOk(t *testing.T) { + assert.Nil(t, FilterMapOk[int, bool](nil, nil)) + assert.Equal(t, []bool{true, false}, FilterMapOk([]int{1, 0, 2}, func(i int) (bool, bool) { + if i == 0 { + return false, false + } + return i == 1, true + })) +} + +func TestFilterR(t *testing.T) { + assert.Nil(t, FilterMapR[int, bool](nil, nil)) + assert.Equal(t, []*bool{lo.ToPtr(true), lo.ToPtr(false)}, FilterMapR([]int{1, 0, 2}, func(i int) *bool { + if i == 0 { + return nil + } + return lo.ToPtr(i == 1) + })) +} + +func TestAll(t *testing.T) { + assert.True(t, All([]int{1, 2, 3}, func(i int) bool { return i < 4 })) + assert.False(t, All([]int{1, 2, 3}, func(i int) bool { return i < 3 })) +} + +func TestAny(t *testing.T) { + assert.True(t, Any([]int{1, 2, 3}, func(i int) bool { return i == 1 })) + assert.False(t, Any([]int{1, 2, 3}, func(i int) bool { return i == 4 })) +} + +func TestFilter(t *testing.T) { + assert.Nil(t, Filter[int](nil, nil)) + assert.Equal(t, []int{1, 2}, Filter([]int{1, 0, 2}, func(i int) bool { + return i != 0 + })) +} + +func TestDerefSlice(t *testing.T) { + assert.Nil(t, DerefSlice[int](nil)) + assert.Equal(t, []int{1, 0, 2}, DerefSlice([]*int{lo.ToPtr(1), nil, lo.ToPtr(0), lo.ToPtr(2)})) +} diff --git a/pkg/util/util.go b/pkg/util/util.go new file mode 100644 index 000000000..6c4b1383c --- /dev/null +++ b/pkg/util/util.go @@ -0,0 +1,25 @@ +package util + +func Must[T any](v T, err error) T { + if err != nil { + panic(err) + } + return v +} + +func IsZero[T comparable](v T) bool { + var z T + return v == z +} + +func IsNotZero[T comparable](v T) bool { + return !IsZero(v) +} + +func Deref[T any](r *T) T { + if r == nil { + var z T + return z + } + return *r +} diff --git a/pkg/util/util_test.go b/pkg/util/util_test.go new file mode 100644 index 000000000..c017afb43 --- /dev/null +++ b/pkg/util/util_test.go @@ -0,0 +1,52 @@ +package util + +import ( + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestMust(t *testing.T) { + a := &struct{}{} + err := errors.New("ERR") + assert.Same(t, a, Must(a, nil)) + assert.PanicsWithValue(t, err, func() { + _ = Must(a, err) + }) +} + +func TestIsZero(t *testing.T) { + assert.True(t, IsZero(0)) + assert.False(t, IsZero(-1)) + assert.True(t, IsZero(struct { + A int + B string + }{})) + assert.False(t, IsZero(struct { + A int + B string + }{A: 1})) + assert.True(t, IsZero((*(struct{}))(nil))) + assert.False(t, IsZero((*(struct{}))(&struct{}{}))) +} + +func TestIsNotZero(t *testing.T) { + assert.False(t, IsNotZero(0)) + assert.True(t, IsNotZero(-1)) + assert.False(t, IsNotZero(struct { + A int + B string + }{})) + assert.True(t, IsNotZero(struct { + A int + B string + }{A: 1})) + assert.False(t, IsNotZero((*(struct{}))(nil))) + assert.True(t, IsNotZero((*(struct{}))(&struct{}{}))) +} + +func TestDeref(t *testing.T) { + assert.Equal(t, struct{ A int }{}, Deref((*(struct{ A int }))(nil))) + assert.Equal(t, struct{ A int }{A: 1}, Deref((*(struct{ A int }))(&struct{ A int }{A: 1}))) +} diff --git a/pkg/value/ref.go b/pkg/value/ref.go index f065ee915..0fed13345 100644 --- a/pkg/value/ref.go +++ b/pkg/value/ref.go @@ -1,6 +1,6 @@ package value -import "github.com/reearth/reearth-backend/pkg/id" +import "fmt" var TypeRef Type = "ref" @@ -13,11 +13,11 @@ func (*propertyRef) I2V(i interface{}) (interface{}, bool) { if v, ok := i.(*string); ok { return *v, true } - if v, ok := i.(id.ID); ok { + if v, ok := i.(fmt.Stringer); ok { return v.String(), true } - if v, ok := i.(*id.ID); ok && v != nil { - return v.String(), true + if v, ok := i.(*fmt.Stringer); ok && v != nil { + return (*v).String(), true } return nil, false } diff --git a/schema.graphql b/schema.graphql index 4876423fe..4060ae435 100644 --- a/schema.graphql +++ b/schema.graphql @@ -49,12 +49,6 @@ scalar DateTime scalar URL scalar Lang scalar FileSize -scalar PluginID -scalar PluginExtensionID -scalar PropertySchemaID -scalar PropertySchemaGroupID -scalar PropertySchemaFieldID -scalar DatasetSchemaFieldID scalar TranslatedString type LatLng { @@ -163,6 +157,12 @@ type User implements Node { id: ID! name: String! email: String! +} + +type Me { + id: ID! + name: String! + email: String! lang: Lang! theme: Theme! myTeamId: ID! @@ -171,12 +171,6 @@ type User implements Node { myTeam: Team! @goField(forceResolver: true) } -type SearchedUser { - userId: ID! - userName: String! - userEmail: String! -} - type ProjectAliasAvailability { alias: String! available: Boolean! @@ -256,21 +250,21 @@ enum PublishmentStatus { # Plugin type Plugin { - id: PluginID! + id: ID! sceneId: ID name: String! version: String! description: String! author: String! repositoryUrl: String! - propertySchemaId: PropertySchemaID + propertySchemaId: ID extensions: [PluginExtension!]! scenePlugin(sceneId: ID): ScenePlugin allTranslatedDescription: TranslatedString allTranslatedName: TranslatedString scene: Scene @goField(forceResolver: true) - translatedName(lang: String): String! @goField(forceResolver: true) - translatedDescription(lang: String): String! @goField(forceResolver: true) + translatedName(lang: Lang): String! @goField(forceResolver: true) + translatedDescription(lang: Lang): String! @goField(forceResolver: true) propertySchema: PropertySchema @goField(forceResolver: true) } @@ -332,8 +326,8 @@ enum PluginExtensionType { } type PluginExtension { - extensionId: PluginExtensionID! - pluginId: PluginID! + extensionId: ID! + pluginId: ID! type: PluginExtensionType! name: String! description: String! @@ -341,14 +335,14 @@ type PluginExtension { singleOnly: Boolean widgetLayout: WidgetLayout visualizer: Visualizer - propertySchemaId: PropertySchemaID! + propertySchemaId: ID! allTranslatedName: TranslatedString allTranslatedDescription: TranslatedString plugin: Plugin @goField(forceResolver: true) sceneWidget(sceneId: ID!): SceneWidget @goField(forceResolver: true) propertySchema: PropertySchema @goField(forceResolver: true) - translatedName(lang: String): String! @goField(forceResolver: true) - translatedDescription(lang: String): String! @goField(forceResolver: true) + translatedName(lang: Lang): String! @goField(forceResolver: true) + translatedDescription(lang: Lang): String! @goField(forceResolver: true) } # Scene @@ -369,7 +363,6 @@ type Scene implements Node { team: Team @goField(forceResolver: true) property: Property @goField(forceResolver: true) rootLayer: LayerGroup @goField(forceResolver: true) - lockMode: SceneLockMode! @goField(forceResolver: true) datasetSchemas( first: Int last: Int @@ -381,18 +374,10 @@ type Scene implements Node { clusters: [Cluster!]! } -enum SceneLockMode { - FREE - PENDING - DATASET_SYNCING - PLUGIN_UPGRADING - PUBLISHING -} - type SceneWidget { id: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + pluginId: ID! + extensionId: ID! propertyId: ID! enabled: Boolean! extended: Boolean! @@ -402,7 +387,7 @@ type SceneWidget { } type ScenePlugin { - pluginId: PluginID! + pluginId: ID! propertyId: ID plugin: Plugin @goField(forceResolver: true) property: Property @goField(forceResolver: true) @@ -433,36 +418,36 @@ type WidgetArea { # Property type PropertySchema { - id: PropertySchemaID! + id: ID! groups: [PropertySchemaGroup!]! linkableFields: PropertyLinkableFields! } type PropertyLinkableFields { - schemaId: PropertySchemaID! - latlng: PropertySchemaFieldID - url: PropertySchemaFieldID + schemaId: ID! + latlng: ID + url: ID latlngField: PropertySchemaField @goField(forceResolver: true) urlField: PropertySchemaField @goField(forceResolver: true) schema: PropertySchema @goField(forceResolver: true) } type PropertySchemaGroup { - schemaGroupId: PropertySchemaGroupID! - schemaId: PropertySchemaID! + schemaGroupId: ID! + schemaId: ID! fields: [PropertySchemaField!]! isList: Boolean! isAvailableIf: PropertyCondition title: String allTranslatedTitle: TranslatedString - representativeFieldId: PropertySchemaFieldID + representativeFieldId: ID representativeField: PropertySchemaField schema: PropertySchema @goField(forceResolver: true) - translatedTitle(lang: String): String! @goField(forceResolver: true) + translatedTitle(lang: Lang): String! @goField(forceResolver: true) } type PropertySchemaField { - fieldId: PropertySchemaFieldID! + fieldId: ID! type: ValueType! title: String! description: String! @@ -476,8 +461,8 @@ type PropertySchemaField { isAvailableIf: PropertyCondition allTranslatedTitle: TranslatedString allTranslatedDescription: TranslatedString - translatedTitle(lang: String): String! @goField(forceResolver: true) - translatedDescription(lang: String): String! @goField(forceResolver: true) + translatedTitle(lang: Lang): String! @goField(forceResolver: true) + translatedDescription(lang: Lang): String! @goField(forceResolver: true) } enum PropertySchemaFieldUI { @@ -497,18 +482,18 @@ type PropertySchemaFieldChoice { title: String! icon: String allTranslatedTitle: TranslatedString - translatedTitle(lang: String): String! @goField(forceResolver: true) + translatedTitle(lang: Lang): String! @goField(forceResolver: true) } type PropertyCondition { - fieldId: PropertySchemaFieldID! + fieldId: ID! type: ValueType! value: Any } type Property implements Node { id: ID! - schemaId: PropertySchemaID! + schemaId: ID! items: [PropertyItem!]! schema: PropertySchema @goField(forceResolver: true) layer: Layer @goField(forceResolver: true) @@ -519,8 +504,8 @@ union PropertyItem = PropertyGroup | PropertyGroupList type PropertyGroup { id: ID! - schemaId: PropertySchemaID! - schemaGroupId: PropertySchemaGroupID! + schemaId: ID! + schemaGroupId: ID! fields: [PropertyField!]! schema: PropertySchema @goField(forceResolver: true) schemaGroup: PropertySchemaGroup @goField(forceResolver: true) @@ -528,8 +513,8 @@ type PropertyGroup { type PropertyGroupList { id: ID! - schemaId: PropertySchemaID! - schemaGroupId: PropertySchemaGroupID! + schemaId: ID! + schemaGroupId: ID! groups: [PropertyGroup!]! schema: PropertySchema @goField(forceResolver: true) schemaGroup: PropertySchemaGroup @goField(forceResolver: true) @@ -538,8 +523,8 @@ type PropertyGroupList { type PropertyField { id: String! parentId: ID! - schemaId: PropertySchemaID! - fieldId: PropertySchemaFieldID! + schemaId: ID! + fieldId: ID! links: [PropertyFieldLink!] type: ValueType! value: Any @@ -563,7 +548,7 @@ type MergedProperty { originalId: ID parentId: ID # note: schemaId will not always be set - schemaId: PropertySchemaID + schemaId: ID linkedDatasetId: ID original: Property @goField(forceResolver: true) parent: Property @goField(forceResolver: true) @@ -577,9 +562,9 @@ type MergedPropertyGroup { parentPropertyId: ID originalId: ID parentId: ID - schemaGroupId: PropertySchemaGroupID! + schemaGroupId: ID! # note: schemaId will not always be set - schemaId: PropertySchemaID + schemaId: ID linkedDatasetId: ID fields: [MergedPropertyField!]! groups: [MergedPropertyGroup!]! @@ -592,8 +577,8 @@ type MergedPropertyGroup { } type MergedPropertyField { - schemaId: PropertySchemaID! - fieldId: PropertySchemaFieldID! + schemaId: ID! + fieldId: ID! value: Any type: ValueType! links: [PropertyFieldLink!] @@ -662,8 +647,8 @@ interface Layer { name: String! isVisible: Boolean! propertyId: ID - pluginId: PluginID - extensionId: PluginExtensionID + pluginId: ID + extensionId: ID infobox: Infobox # parentId will not be always set parentId: ID @@ -689,8 +674,8 @@ type LayerItem implements Layer { name: String! isVisible: Boolean! propertyId: ID - pluginId: PluginID - extensionId: PluginExtensionID + pluginId: ID + extensionId: ID infobox: Infobox # parentId will not be always set parentId: ID @@ -712,8 +697,8 @@ type LayerGroup implements Layer { name: String! isVisible: Boolean! propertyId: ID - pluginId: PluginID - extensionId: PluginExtensionID + pluginId: ID + extensionId: ID infobox: Infobox # parentId will not be always set parentId: ID @@ -749,8 +734,8 @@ type InfoboxField { sceneId: ID! layerId: ID! propertyId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + pluginId: ID! + extensionId: ID! linkedDatasetId: ID layer: Layer! @goField(forceResolver: true) infobox: Infobox! @goField(forceResolver: true) @@ -800,8 +785,8 @@ type MergedInfobox { type MergedInfoboxField { originalId: ID! sceneID: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + pluginId: ID! + extensionId: ID! property: MergedProperty plugin: Plugin @goField(forceResolver: true) extension: PluginExtension @goField(forceResolver: true) @@ -970,8 +955,8 @@ input WidgetLocationInput { input AddWidgetInput { sceneId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + pluginId: ID! + extensionId: ID! } input UpdateWidgetInput { @@ -996,18 +981,18 @@ input RemoveWidgetInput { input InstallPluginInput { sceneId: ID! - pluginId: PluginID! + pluginId: ID! } input UninstallPluginInput { sceneId: ID! - pluginId: PluginID! + pluginId: ID! } input UpgradePluginInput { sceneId: ID! - pluginId: PluginID! - toPluginId: PluginID! + pluginId: ID! + toPluginId: ID! } input SyncDatasetInput { @@ -1017,33 +1002,33 @@ input SyncDatasetInput { input UpdatePropertyValueInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID + schemaGroupId: ID itemId: ID - fieldId: PropertySchemaFieldID! + fieldId: ID! value: Any type: ValueType! } input RemovePropertyFieldInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID + schemaGroupId: ID itemId: ID - fieldId: PropertySchemaFieldID! + fieldId: ID! } input UploadFileToPropertyInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID + schemaGroupId: ID itemId: ID - fieldId: PropertySchemaFieldID! + fieldId: ID! file: Upload! } input LinkDatasetToPropertyValueInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID + schemaGroupId: ID itemId: ID - fieldId: PropertySchemaFieldID! + fieldId: ID! datasetSchemaIds: [ID!]! datasetSchemaFieldIds: [ID!]! datasetIds: [ID!] @@ -1051,14 +1036,14 @@ input LinkDatasetToPropertyValueInput { input UnlinkPropertyValueInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID + schemaGroupId: ID itemId: ID - fieldId: PropertySchemaFieldID! + fieldId: ID! } input AddPropertyItemInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID! + schemaGroupId: ID! index: Int nameFieldValue: Any nameFieldType: ValueType @@ -1066,20 +1051,20 @@ input AddPropertyItemInput { input MovePropertyItemInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID! + schemaGroupId: ID! itemId: ID! index: Int! } input RemovePropertyItemInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID! + schemaGroupId: ID! itemId: ID! } input UpdatePropertyItemInput { propertyId: ID! - schemaGroupId: PropertySchemaGroupID! + schemaGroupId: ID! operations: [UpdatePropertyItemOperationInput!]! } @@ -1093,8 +1078,8 @@ input UpdatePropertyItemOperationInput { input AddLayerItemInput { parentLayerId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + pluginId: ID! + extensionId: ID! index: Int name: String lat: Float @@ -1103,12 +1088,12 @@ input AddLayerItemInput { input AddLayerGroupInput { parentLayerId: ID! - pluginId: PluginID - extensionId: PluginExtensionID + pluginId: ID + extensionId: ID index: Int linkedDatasetSchemaID: ID name: String - representativeFieldId: DatasetSchemaFieldID + representativeFieldId: ID } input RemoveLayerInput { @@ -1137,8 +1122,8 @@ input RemoveInfoboxInput { input AddInfoboxFieldInput { layerId: ID! - pluginId: PluginID! - extensionId: PluginExtensionID! + pluginId: ID! + extensionId: ID! index: Int } @@ -1275,7 +1260,7 @@ type RemoveAssetPayload { } type UpdateMePayload { - user: User! + me: Me! } type SignupPayload { @@ -1354,7 +1339,7 @@ type InstallPluginPayload { } type UninstallPluginPayload { - pluginId: PluginID! + pluginId: ID! scene: Scene! } @@ -1564,13 +1549,13 @@ type DatasetEdge { # Query type Query { - me: User + me: Me node(id: ID!, type: NodeType!): Node nodes(id: [ID!]!, type: NodeType!): [Node]! - propertySchema(id: PropertySchemaID!): PropertySchema - propertySchemas(id: [PropertySchemaID!]!): [PropertySchema!]! - plugin(id: PluginID!): Plugin - plugins(id: [PluginID!]!): [Plugin!]! + propertySchema(id: ID!): PropertySchema + propertySchemas(id: [ID!]!): [PropertySchema!]! + plugin(id: ID!): Plugin + plugins(id: [ID!]!): [Plugin!]! layer(id: ID!): Layer scene(projectId: ID!): Scene assets( @@ -1601,9 +1586,8 @@ type Query { after: Cursor before: Cursor ): DatasetConnection! - sceneLock(sceneId: ID!): SceneLockMode dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! - searchUser(nameOrEmail: String!): SearchedUser + searchUser(nameOrEmail: String!): User checkProjectAlias(alias: String!): ProjectAliasAvailability! installablePlugins: [PluginMetadata!]! } From 742be871cf1073e729bf97aa6f284cc47d22024e Mon Sep 17 00:00:00 2001 From: issmail-basel Date: Wed, 20 Apr 2022 07:13:00 +0000 Subject: [PATCH 202/253] v0.6.1 --- CHANGELOG.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c22e0f84a..e59172bf5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,22 @@ # Changelog All notable changes to this project will be documented in this file. +## 0.6.1 - 2022-04-20 + +### ๐Ÿ”ง Bug Fixes + +- Renovate bot not running on schedule ([#136](https://github.com/reearth/reearth-backend/pull/136)) [`82843f`](https://github.com/reearth/reearth-backend/commit/82843f) +- Aud was changed and jwt could not be validated correctly [`985100`](https://github.com/reearth/reearth-backend/commit/985100) +- Auth audiences were unintentionally required [`7ec76a`](https://github.com/reearth/reearth-backend/commit/7ec76a) + +### โœจ Refactor + +- Introduce generics, reorganize GraphQL schema ([#135](https://github.com/reearth/reearth-backend/pull/135)) [`04a098`](https://github.com/reearth/reearth-backend/commit/04a098) + +### Miscellaneous Tasks + +- Update dependencies ([#134](https://github.com/reearth/reearth-backend/pull/134)) [`1b9b6b`](https://github.com/reearth/reearth-backend/commit/1b9b6b) + ## 0.6.0 - 2022-04-08 ### ๐Ÿš€ Features From a79376175e2fb983f3e367b53b068bc93a53c5ec Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 20 Apr 2022 19:04:27 +0900 Subject: [PATCH 203/253] fix: signup api requires password field --- internal/adapter/http/user.go | 4 +- internal/usecase/interactor/user_signup.go | 5 +- .../usecase/interactor/user_signup_test.go | 47 +++++++++---------- internal/usecase/interfaces/user.go | 2 +- 4 files changed, 26 insertions(+), 32 deletions(-) diff --git a/internal/adapter/http/user.go b/internal/adapter/http/user.go index 834d26f4a..e71cf91fc 100644 --- a/internal/adapter/http/user.go +++ b/internal/adapter/http/user.go @@ -85,12 +85,12 @@ func (c *UserController) Signup(ctx context.Context, input SignupInput) (SignupO Theme: input.Theme, }, }) - } else if input.Name != nil && input.Email != nil && input.Password != nil { + } else if input.Name != nil && input.Email != nil { u, _, err = c.usecase.Signup(ctx, interfaces.SignupParam{ Sub: input.Sub, Name: *input.Name, Email: *input.Email, - Password: *input.Password, + Password: input.Password, Secret: input.Secret, User: interfaces.SignupUserParam{ UserID: input.UserID, diff --git a/internal/usecase/interactor/user_signup.go b/internal/usecase/interactor/user_signup.go index 19cff4009..8450a1450 100644 --- a/internal/usecase/interactor/user_signup.go +++ b/internal/usecase/interactor/user_signup.go @@ -20,9 +20,6 @@ import ( ) func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (*user.User, *user.Team, error) { - if inp.Password == "" { - return nil, nil, interfaces.ErrSignupInvalidPassword - } if inp.Name == "" { return nil, nil, interfaces.ErrSignupInvalidName } @@ -66,7 +63,7 @@ func (i *User) Signup(ctx context.Context, inp interfaces.SignupParam) (*user.Us Email: inp.Email, Name: inp.Name, Sub: auth, - Password: &inp.Password, + Password: inp.Password, Lang: inp.User.Lang, Theme: inp.User.Theme, UserID: inp.User.UserID, diff --git a/internal/usecase/interactor/user_signup_test.go b/internal/usecase/interactor/user_signup_test.go index 8fb19be00..23b7cc520 100644 --- a/internal/usecase/interactor/user_signup_test.go +++ b/internal/usecase/interactor/user_signup_test.go @@ -15,6 +15,7 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/interfaces" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/user" + "github.com/samber/lo" "github.com/stretchr/testify/assert" "golang.org/x/text/language" ) @@ -47,10 +48,10 @@ func TestUser_Signup(t *testing.T) { signupSecret: "", authSrvUIDomain: "https://reearth.io", args: interfaces.SignupParam{ - Sub: sr("SUB"), + Sub: lo.ToPtr("SUB"), Email: "aaa@bbb.com", Name: "NAME", - Password: "PAss00!!", + Password: lo.ToPtr("PAss00!!"), User: interfaces.SignupUserParam{ UserID: &uid, TeamID: &tid, @@ -88,7 +89,7 @@ func TestUser_Signup(t *testing.T) { args: interfaces.SignupParam{ Email: "aaa@bbb.com", Name: "NAME", - Password: "PAss00!!", + Password: lo.ToPtr("PAss00!!"), User: interfaces.SignupUserParam{ UserID: &uid, TeamID: &tid, @@ -117,10 +118,10 @@ func TestUser_Signup(t *testing.T) { Verification(user.VerificationFrom(mockcode, mocktime, true)). MustBuild(), args: interfaces.SignupParam{ - Sub: sr("SUB"), + Sub: lo.ToPtr("SUB"), Email: "aaa@bbb.com", Name: "NAME", - Password: "PAss00!!", + Password: lo.ToPtr("PAss00!!"), User: interfaces.SignupUserParam{ UserID: &uid, TeamID: &tid, @@ -135,11 +136,11 @@ func TestUser_Signup(t *testing.T) { signupSecret: "", authSrvUIDomain: "", args: interfaces.SignupParam{ - Sub: sr("SUB"), + Sub: lo.ToPtr("SUB"), Email: "aaa@bbb.com", Name: "NAME", - Password: "PAss00!!", - Secret: sr("hogehoge"), + Password: lo.ToPtr("PAss00!!"), + Secret: lo.ToPtr("hogehoge"), User: interfaces.SignupUserParam{ UserID: &uid, TeamID: &tid, @@ -170,11 +171,11 @@ func TestUser_Signup(t *testing.T) { signupSecret: "SECRET", authSrvUIDomain: "", args: interfaces.SignupParam{ - Sub: sr("SUB"), + Sub: lo.ToPtr("SUB"), Email: "aaa@bbb.com", Name: "NAME", - Password: "PAss00!!", - Secret: sr("SECRET"), + Password: lo.ToPtr("PAss00!!"), + Secret: lo.ToPtr("SECRET"), User: interfaces.SignupUserParam{ UserID: &uid, TeamID: &tid, @@ -209,11 +210,11 @@ func TestUser_Signup(t *testing.T) { signupSecret: "SECRET", authSrvUIDomain: "", args: interfaces.SignupParam{ - Sub: sr("SUB"), + Sub: lo.ToPtr("SUB"), Email: "aaa@bbb.com", Name: "NAME", - Password: "PAss00!!", - Secret: sr("SECRET!"), + Password: lo.ToPtr("PAss00!!"), + Secret: lo.ToPtr("SECRET!"), }, wantError: interfaces.ErrSignupInvalidSecret, }, @@ -222,10 +223,10 @@ func TestUser_Signup(t *testing.T) { signupSecret: "SECRET", authSrvUIDomain: "", args: interfaces.SignupParam{ - Sub: sr("SUB"), + Sub: lo.ToPtr("SUB"), Email: "aaa@bbb.com", Name: "NAME", - Password: "PAss00!!", + Password: lo.ToPtr("PAss00!!"), }, wantError: interfaces.ErrSignupInvalidSecret, }, @@ -234,7 +235,7 @@ func TestUser_Signup(t *testing.T) { args: interfaces.SignupParam{ Email: "aaa", Name: "NAME", - Password: "PAss00!!", + Password: lo.ToPtr("PAss00!!"), }, wantError: user.ErrInvalidEmail, }, @@ -243,7 +244,7 @@ func TestUser_Signup(t *testing.T) { args: interfaces.SignupParam{ Email: "aaa@bbb.com", Name: "NAME", - Password: "PAss00", + Password: lo.ToPtr("PAss00"), }, wantError: user.ErrPasswordLength, }, @@ -252,7 +253,7 @@ func TestUser_Signup(t *testing.T) { args: interfaces.SignupParam{ Email: "aaa@bbb.com", Name: "", - Password: "PAss00!!", + Password: lo.ToPtr("PAss00!!"), }, wantError: interfaces.ErrSignupInvalidName, }, @@ -400,7 +401,7 @@ func TestUser_SignupOIDC(t *testing.T) { AccessToken: "accesstoken", Issuer: "https://issuer", Sub: "sub", - Secret: sr("SECRET"), + Secret: lo.ToPtr("SECRET"), User: interfaces.SignupUserParam{ UserID: &uid, TeamID: &tid, @@ -468,7 +469,7 @@ func TestUser_SignupOIDC(t *testing.T) { AccessToken: "accesstoken", Issuer: "https://issuer", Sub: "sub", - Secret: sr("SECRET!"), + Secret: lo.ToPtr("SECRET!"), User: interfaces.SignupUserParam{ UserID: &uid, TeamID: &tid, @@ -543,7 +544,3 @@ func TestIssToURL(t *testing.T) { assert.Equal(t, &url.URL{Scheme: "https", Host: "iss.com", Path: "/hoge"}, issToURL("https://iss.com/hoge", "")) assert.Equal(t, &url.URL{Scheme: "https", Host: "iss.com", Path: "/hoge/foobar"}, issToURL("https://iss.com/hoge", "foobar")) } - -func sr(s string) *string { - return &s -} diff --git a/internal/usecase/interfaces/user.go b/internal/usecase/interfaces/user.go index 79456c69c..c3a743bc4 100644 --- a/internal/usecase/interfaces/user.go +++ b/internal/usecase/interfaces/user.go @@ -27,7 +27,7 @@ type SignupParam struct { Sub *string // required by Auth0 Email string Name string - Password string + Password *string Secret *string User SignupUserParam } From 2c022035470a79862eb0089b80b6ebea93090f67 Mon Sep 17 00:00:00 2001 From: KeisukeYamashita <19yamashita15@gmail.com> Date: Tue, 26 Apr 2022 16:47:49 +0900 Subject: [PATCH 204/253] chore: typo `secrit` on env example (#137) --- .env.example | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env.example b/.env.example index 7e4aba5a5..b4b236ff6 100644 --- a/.env.example +++ b/.env.example @@ -31,7 +31,7 @@ REEARTH_AUTHSRV_DEV=true REEARTH_AUTHSRV_DISABLED=false REEARTH_AUTHSRV_UIDOMAIN=https://reearth.example.com REEARTH_AUTHSRV_DOMAIN=https://api.reearth.example.com -# Any random long string (keep it secrit) +# Any random long string (keep it secret) REEARTH_AUTHSRV_KEY=abcdefghijklmnopqrstuvwxyz # Available mailers: [log, smtp, sendgrid] From 4f72b87c28b15665b613e0cc50bf90136641b9b6 Mon Sep 17 00:00:00 2001 From: KaWaite <34051327+KaWaite@users.noreply.github.com> Date: Tue, 10 May 2022 12:40:01 +0900 Subject: [PATCH 205/253] feat: add an opacity slider to map tiles (#138) * add slider ui type, add opacity to tile * Add description * update manifest "title"'s to keep consistent --- internal/adapter/gql/generated.go | 27 ++++++++-------- .../adapter/gql/gqlmodel/convert_property.go | 2 ++ internal/adapter/gql/gqlmodel/models_gen.go | 6 ++-- pkg/builtin/manifest.yml | 32 ++++++++++++------- pkg/builtin/manifest_ja.yml | 3 ++ pkg/property/schema_field_ui.go | 2 ++ schema.graphql | 1 + schemas/plugin_manifest.json | 3 +- 8 files changed, 48 insertions(+), 28 deletions(-) diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index d78aa027c..b28f972bc 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -6402,8 +6402,8 @@ type Me { email: String! lang: Lang! theme: Theme! - auths: [String!]! myTeamId: ID! + auths: [String!]! teams: [Team!]! @goField(forceResolver: true) myTeam: Team! @goField(forceResolver: true) } @@ -6708,6 +6708,7 @@ enum PropertySchemaFieldUI { SELECTION COLOR RANGE + SLIDER IMAGE VIDEO FILE @@ -15988,7 +15989,7 @@ func (ec *executionContext) _Me_theme(ctx context.Context, field graphql.Collect return ec.marshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx, field.Selections, res) } -func (ec *executionContext) _Me_auths(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { +func (ec *executionContext) _Me_myTeamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16006,7 +16007,7 @@ func (ec *executionContext) _Me_auths(ctx context.Context, field graphql.Collect ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Auths, nil + return obj.MyTeamID, nil }) if err != nil { ec.Error(ctx, err) @@ -16018,12 +16019,12 @@ func (ec *executionContext) _Me_auths(ctx context.Context, field graphql.Collect } return graphql.Null } - res := resTmp.([]string) + res := resTmp.(gqlmodel.ID) fc.Result = res - return ec.marshalNString2แš•stringแš„(ctx, field.Selections, res) + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Me_myTeamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { +func (ec *executionContext) _Me_auths(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) @@ -16041,7 +16042,7 @@ func (ec *executionContext) _Me_myTeamId(ctx context.Context, field graphql.Coll ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.MyTeamID, nil + return obj.Auths, nil }) if err != nil { ec.Error(ctx, err) @@ -16053,9 +16054,9 @@ func (ec *executionContext) _Me_myTeamId(ctx context.Context, field graphql.Coll } return graphql.Null } - res := resTmp.(gqlmodel.ID) + res := resTmp.([]string) fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) + return ec.marshalNString2แš•stringแš„(ctx, field.Selections, res) } func (ec *executionContext) _Me_teams(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { @@ -38446,9 +38447,9 @@ func (ec *executionContext) _Me(ctx context.Context, sel ast.SelectionSet, obj * if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } - case "auths": + case "myTeamId": innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Me_auths(ctx, field, obj) + return ec._Me_myTeamId(ctx, field, obj) } out.Values[i] = innerFunc(ctx) @@ -38456,9 +38457,9 @@ func (ec *executionContext) _Me(ctx context.Context, sel ast.SelectionSet, obj * if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } - case "myTeamId": + case "auths": innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Me_myTeamId(ctx, field, obj) + return ec._Me_auths(ctx, field, obj) } out.Values[i] = innerFunc(ctx) diff --git a/internal/adapter/gql/gqlmodel/convert_property.go b/internal/adapter/gql/gqlmodel/convert_property.go index 94053895a..16fbcce52 100644 --- a/internal/adapter/gql/gqlmodel/convert_property.go +++ b/internal/adapter/gql/gqlmodel/convert_property.go @@ -276,6 +276,8 @@ func ToPropertySchemaFieldUI(ui *property.SchemaFieldUI) *PropertySchemaFieldUI ui2 = PropertySchemaFieldUIColor case property.SchemaFieldUIRange: ui2 = PropertySchemaFieldUIRange + case property.SchemaFieldUISlider: + ui2 = PropertySchemaFieldUISlider case property.SchemaFieldUIImage: ui2 = PropertySchemaFieldUIImage case property.SchemaFieldUIVideo: diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index cdcf705cb..b4de11063 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -555,8 +555,8 @@ type Me struct { Email string `json:"email"` Lang language.Tag `json:"lang"` Theme Theme `json:"theme"` - Auths []string `json:"auths"` MyTeamID ID `json:"myTeamId"` + Auths []string `json:"auths"` Teams []*Team `json:"teams"` MyTeam *Team `json:"myTeam"` } @@ -1628,6 +1628,7 @@ const ( PropertySchemaFieldUISelection PropertySchemaFieldUI = "SELECTION" PropertySchemaFieldUIColor PropertySchemaFieldUI = "COLOR" PropertySchemaFieldUIRange PropertySchemaFieldUI = "RANGE" + PropertySchemaFieldUISlider PropertySchemaFieldUI = "SLIDER" PropertySchemaFieldUIImage PropertySchemaFieldUI = "IMAGE" PropertySchemaFieldUIVideo PropertySchemaFieldUI = "VIDEO" PropertySchemaFieldUIFile PropertySchemaFieldUI = "FILE" @@ -1640,6 +1641,7 @@ var AllPropertySchemaFieldUI = []PropertySchemaFieldUI{ PropertySchemaFieldUISelection, PropertySchemaFieldUIColor, PropertySchemaFieldUIRange, + PropertySchemaFieldUISlider, PropertySchemaFieldUIImage, PropertySchemaFieldUIVideo, PropertySchemaFieldUIFile, @@ -1648,7 +1650,7 @@ var AllPropertySchemaFieldUI = []PropertySchemaFieldUI{ func (e PropertySchemaFieldUI) IsValid() bool { switch e { - case PropertySchemaFieldUILayer, PropertySchemaFieldUIMultiline, PropertySchemaFieldUISelection, PropertySchemaFieldUIColor, PropertySchemaFieldUIRange, PropertySchemaFieldUIImage, PropertySchemaFieldUIVideo, PropertySchemaFieldUIFile, PropertySchemaFieldUICameraPose: + case PropertySchemaFieldUILayer, PropertySchemaFieldUIMultiline, PropertySchemaFieldUISelection, PropertySchemaFieldUIColor, PropertySchemaFieldUIRange, PropertySchemaFieldUISlider, PropertySchemaFieldUIImage, PropertySchemaFieldUIVideo, PropertySchemaFieldUIFile, PropertySchemaFieldUICameraPose: return true } return false diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index c98700081..5caf26b22 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -24,7 +24,7 @@ extensions: description: Show elevation when close to the surface. - id: terrainType type: string - title: Terrain Type + title: Terrain type description: Specify terrain type. defaultValue: cesium choices: @@ -89,7 +89,7 @@ extensions: description: Enable camera limiter. - id: cameraLimitterShowHelper type: bool - title: Show Helper + title: Show helper defaultValue: false description: Display the limiter boundaries. - id: cameraLimitterTargetArea @@ -118,7 +118,7 @@ extensions: fields: - id: tile_type type: string - title: Tile Type + title: Tile type defaultValue: default choices: - key: default @@ -158,6 +158,14 @@ extensions: title: Maximum zoom level min: 0 max: 30 + - id: tile_opacity + type: number + title: Opacity + description: "Change the opacity of the selected tile map. Min: 0 Max: 1" + defaultValue: 1 + ui: slider + min: 0 + max: 1 - id: theme title: Publish Theme description: Set your theme. @@ -248,20 +256,20 @@ extensions: max: 1 - id: brightness_shift type: number - title: Fog Brightness + title: Fog brightness defaultValue: 0.03 description: "Set brightness of the fog. Min: -1 Max: 1" min: -1 max: 1 - id: hue_shift type: number - title: Fog Hue + title: Fog hue description: "Set hue of the fog. Min: -1 Max: 1" min: -1 max: 1 - id: surturation_shift type: number - title: Fog Saturation + title: Fog saturation description: "Set saturation of the fog. Min: -1 Max: 1" min: -1 max: 1 @@ -300,7 +308,7 @@ extensions: title: Title - id: showTitle type: bool - title: Show Title + title: Show title defaultValue: true - id: position type: string @@ -326,7 +334,7 @@ extensions: label: Large - id: heightType type: string - title: Height Type + title: Height type defaultValue: auto choices: - key: auto @@ -374,7 +382,7 @@ extensions: description: "The space between the right side of the infobox and the title and blocks. Min: 0 Max: 40" - id: bgcolor type: string - title: Background Color + title: Background color ui: color - id: outlineWidth type: number @@ -383,11 +391,11 @@ extensions: max: 20 - id: outlineColor type: string - title: Outline Color + title: Outline color ui: color - id: useMask type: bool - title: Use Mask + title: Use mask - id: typography type: typography title: Font @@ -1028,7 +1036,7 @@ extensions: schema: groups: - id: default - title: 3D Model + title: 3D model fields: - id: model type: url diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 99ebb28ef..cea488d93 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -79,6 +79,9 @@ extensions: title: ๆœ€ๅฐใƒฌใƒ™ใƒซ tile_maxLevel: title: ๆœ€ๅคงใƒฌใƒ™ใƒซ + tile_opacity: + title: ไธ้€ๆ˜Žๆ€ง + description: NEEDS DESCRIPTION atmosphere: title: ๅคงๆฐ— description: ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎ่จญๅฎšใŒใงใใพใ™ใ€‚ diff --git a/pkg/property/schema_field_ui.go b/pkg/property/schema_field_ui.go index 3b8b40f05..9deab26ad 100644 --- a/pkg/property/schema_field_ui.go +++ b/pkg/property/schema_field_ui.go @@ -7,6 +7,7 @@ const ( SchemaFieldUISelection SchemaFieldUI = "selection" SchemaFieldUIColor SchemaFieldUI = "color" SchemaFieldUIRange SchemaFieldUI = "range" + SchemaFieldUISlider SchemaFieldUI = "slider" SchemaFieldUIImage SchemaFieldUI = "image" SchemaFieldUIVideo SchemaFieldUI = "video" SchemaFieldUIFile SchemaFieldUI = "file" @@ -21,6 +22,7 @@ var ( SchemaFieldUISelection, SchemaFieldUIColor, SchemaFieldUIRange, + SchemaFieldUISlider, SchemaFieldUIImage, SchemaFieldUIVideo, SchemaFieldUIFile, diff --git a/schema.graphql b/schema.graphql index 4060ae435..5c43a54a2 100644 --- a/schema.graphql +++ b/schema.graphql @@ -471,6 +471,7 @@ enum PropertySchemaFieldUI { SELECTION COLOR RANGE + SLIDER IMAGE VIDEO FILE diff --git a/schemas/plugin_manifest.json b/schemas/plugin_manifest.json index 037ba9725..b681c9202 100644 --- a/schemas/plugin_manifest.json +++ b/schemas/plugin_manifest.json @@ -139,6 +139,7 @@ "selection", "buttons", "range", + "slider", "image", "video", "file", @@ -438,4 +439,4 @@ } }, "$ref": "#/definitions/root" -} +} \ No newline at end of file From 35f9db587c2f2d396db0a6f6398c879db7667eec Mon Sep 17 00:00:00 2001 From: KeisukeYamashita <19yamashita15@gmail.com> Date: Wed, 11 May 2022 11:02:11 +0900 Subject: [PATCH 206/253] test: add Mongo Asset's `FindByID` unit testing (#139) --- .github/workflows/ci.yml | 7 +++ internal/infrastructure/mongo/asset_test.go | 66 +++++++++++++++++++++ internal/infrastructure/mongo/mongo_test.go | 43 ++++++++++++++ 3 files changed, 116 insertions(+) create mode 100644 internal/infrastructure/mongo/asset_test.go create mode 100644 internal/infrastructure/mongo/mongo_test.go diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4a14e47c8..607d65e7c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,6 +8,11 @@ jobs: ci: name: CI runs-on: ubuntu-latest + services: + mongo: + image: mongo:4.4-focal + ports: + - 27017:27017 steps: - name: set up uses: actions/setup-go@v3 @@ -30,6 +35,8 @@ jobs: args: --timeout=10m - name: test run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic -timeout 10m + env: + REEARTH_DB: mongodb://localhost - name: Send coverage report uses: codecov/codecov-action@v2 with: diff --git a/internal/infrastructure/mongo/asset_test.go b/internal/infrastructure/mongo/asset_test.go new file mode 100644 index 000000000..135683dfa --- /dev/null +++ b/internal/infrastructure/mongo/asset_test.go @@ -0,0 +1,66 @@ +package mongo + +import ( + "context" + "testing" + "time" + + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestFindByID(t *testing.T) { + tests := []struct { + Name string + Expected struct { + Name string + Asset *asset.Asset + } + }{ + { + Expected: struct { + Name string + Asset *asset.Asset + }{ + Asset: asset.New(). + NewID(). + CreatedAt(time.Now()). + Team(id.NewTeamID()). + Name("name"). + Size(10). + URL("hxxps://https://reearth.io/"). + ContentType("json"). + MustBuild(), + }, + }, + } + + initDB := connect(t) + + for _, tc := range tests { + tc := tc + + t.Run(tc.Name, func(t *testing.T) { + t.Parallel() + + client, dropDB := initDB() + defer dropDB() + + repo := NewAsset(client) + ctx := context.Background() + err := repo.Save(ctx, tc.Expected.Asset) + assert.NoError(t, err) + + got, err := repo.FindByID(ctx, tc.Expected.Asset.ID()) + assert.NoError(t, err) + assert.Equal(t, tc.Expected.Asset.ID(), got.ID()) + assert.Equal(t, tc.Expected.Asset.CreatedAt(), got.CreatedAt()) + assert.Equal(t, tc.Expected.Asset.Team(), got.Team()) + assert.Equal(t, tc.Expected.Asset.URL(), got.URL()) + assert.Equal(t, tc.Expected.Asset.Size(), got.Size()) + assert.Equal(t, tc.Expected.Asset.Name(), got.Name()) + assert.Equal(t, tc.Expected.Asset.ContentType(), got.ContentType()) + }) + } +} diff --git a/internal/infrastructure/mongo/mongo_test.go b/internal/infrastructure/mongo/mongo_test.go new file mode 100644 index 000000000..ec132989d --- /dev/null +++ b/internal/infrastructure/mongo/mongo_test.go @@ -0,0 +1,43 @@ +package mongo + +import ( + "context" + "encoding/hex" + "os" + "testing" + "time" + + "github.com/reearth/reearth-backend/internal/infrastructure/mongo/mongodoc" + "go.mongodb.org/mongo-driver/mongo" + "go.mongodb.org/mongo-driver/mongo/options" + "go.mongodb.org/mongo-driver/x/mongo/driver/uuid" +) + +func connect(t *testing.T) func() (*mongodoc.Client, func()) { + t.Helper() + + // Skip unit testing if "REEARTH_DB" is not configured + // See details: https://github.com/reearth/reearth/issues/273 + db := os.Getenv("REEARTH_DB") + if db == "" { + t.SkipNow() + return nil + } + + c, _ := mongo.Connect( + context.Background(), + options.Client(). + ApplyURI(db). + SetConnectTimeout(time.Second*10), + ) + + return func() (*mongodoc.Client, func()) { + database, _ := uuid.New() + databaseName := "reearth-test-" + hex.EncodeToString(database[:]) + client := mongodoc.NewClient(databaseName, c) + + return client, func() { + _ = c.Database(databaseName).Drop(context.Background()) + } + } +} From 58e1b02608c29881bc950043f96fb309a2f95fe2 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 11 May 2022 12:51:34 +0900 Subject: [PATCH 207/253] fix: "$in needs an array" error from mongo FindByIDs (#142) --- internal/infrastructure/mongo/asset.go | 4 ++++ internal/infrastructure/mongo/dataset.go | 4 ++++ internal/infrastructure/mongo/dataset_schema.go | 4 ++++ internal/infrastructure/mongo/layer.go | 4 ++++ internal/infrastructure/mongo/plugin.go | 4 ++++ internal/infrastructure/mongo/project.go | 16 ++++++++++------ internal/infrastructure/mongo/property.go | 4 ++++ internal/infrastructure/mongo/property_schema.go | 4 ++++ internal/infrastructure/mongo/scene.go | 4 ++++ internal/infrastructure/mongo/tag.go | 4 ++++ internal/infrastructure/mongo/team.go | 4 ++++ internal/infrastructure/mongo/user.go | 4 ++++ 12 files changed, 54 insertions(+), 6 deletions(-) diff --git a/internal/infrastructure/mongo/asset.go b/internal/infrastructure/mongo/asset.go index 1ad65a3e4..46a78c4e3 100644 --- a/internal/infrastructure/mongo/asset.go +++ b/internal/infrastructure/mongo/asset.go @@ -41,6 +41,10 @@ func (r *assetRepo) FindByID(ctx context.Context, id id.AssetID) (*asset.Asset, } func (r *assetRepo) FindByIDs(ctx context.Context, ids id.AssetIDList) ([]*asset.Asset, error) { + if len(ids) == 0 { + return nil, nil + } + filter := bson.M{ "id": bson.M{"$in": ids.Strings()}, } diff --git a/internal/infrastructure/mongo/dataset.go b/internal/infrastructure/mongo/dataset.go index eda57615e..eb53d48a0 100644 --- a/internal/infrastructure/mongo/dataset.go +++ b/internal/infrastructure/mongo/dataset.go @@ -46,6 +46,10 @@ func (r *datasetRepo) FindByID(ctx context.Context, id id.DatasetID) (*dataset.D } func (r *datasetRepo) FindByIDs(ctx context.Context, ids id.DatasetIDList) (dataset.List, error) { + if len(ids) == 0 { + return nil, nil + } + filter := bson.M{ "id": bson.M{ "$in": ids.Strings(), diff --git a/internal/infrastructure/mongo/dataset_schema.go b/internal/infrastructure/mongo/dataset_schema.go index 384a2fa91..a16c00b65 100644 --- a/internal/infrastructure/mongo/dataset_schema.go +++ b/internal/infrastructure/mongo/dataset_schema.go @@ -46,6 +46,10 @@ func (r *datasetSchemaRepo) FindByID(ctx context.Context, id id.DatasetSchemaID) } func (r *datasetSchemaRepo) FindByIDs(ctx context.Context, ids id.DatasetSchemaIDList) (dataset.SchemaList, error) { + if len(ids) == 0 { + return nil, nil + } + filter := bson.M{ "id": bson.M{ "$in": ids.Strings(), diff --git a/internal/infrastructure/mongo/layer.go b/internal/infrastructure/mongo/layer.go index 83a8829ed..6db761329 100644 --- a/internal/infrastructure/mongo/layer.go +++ b/internal/infrastructure/mongo/layer.go @@ -45,6 +45,10 @@ func (r *layerRepo) FindByID(ctx context.Context, id id.LayerID) (layer.Layer, e } func (r *layerRepo) FindByIDs(ctx context.Context, ids id.LayerIDList) (layer.List, error) { + if len(ids) == 0 { + return nil, nil + } + filter := bson.M{ "id": bson.M{ "$in": ids.Strings(), diff --git a/internal/infrastructure/mongo/plugin.go b/internal/infrastructure/mongo/plugin.go index ebadde8ec..8469f84c9 100644 --- a/internal/infrastructure/mongo/plugin.go +++ b/internal/infrastructure/mongo/plugin.go @@ -54,6 +54,10 @@ func (r *pluginRepo) FindByID(ctx context.Context, pid id.PluginID) (*plugin.Plu } func (r *pluginRepo) FindByIDs(ctx context.Context, ids []id.PluginID) ([]*plugin.Plugin, error) { + if len(ids) == 0 { + return nil, nil + } + // TODO: separate built-in plugins to another repository // exclude built-in b := plugin.Map{} diff --git a/internal/infrastructure/mongo/project.go b/internal/infrastructure/mongo/project.go index 0e64aefed..4d5309801 100644 --- a/internal/infrastructure/mongo/project.go +++ b/internal/infrastructure/mongo/project.go @@ -39,7 +39,17 @@ func (r *projectRepo) Filtered(f repo.TeamFilter) repo.Project { } } +func (r *projectRepo) FindByID(ctx context.Context, id id.ProjectID) (*project.Project, error) { + return r.findOne(ctx, bson.M{ + "id": id.String(), + }) +} + func (r *projectRepo) FindByIDs(ctx context.Context, ids id.ProjectIDList) ([]*project.Project, error) { + if len(ids) == 0 { + return nil, nil + } + filter := bson.M{ "id": bson.M{ "$in": ids.Strings(), @@ -53,12 +63,6 @@ func (r *projectRepo) FindByIDs(ctx context.Context, ids id.ProjectIDList) ([]*p return filterProjects(ids, res), nil } -func (r *projectRepo) FindByID(ctx context.Context, id id.ProjectID) (*project.Project, error) { - return r.findOne(ctx, bson.M{ - "id": id.String(), - }) -} - func (r *projectRepo) FindByTeam(ctx context.Context, id id.TeamID, pagination *usecase.Pagination) ([]*project.Project, *usecase.PageInfo, error) { if !r.f.CanRead(id) { return nil, usecase.EmptyPageInfo(), nil diff --git a/internal/infrastructure/mongo/property.go b/internal/infrastructure/mongo/property.go index ce170852a..2632b3fe1 100644 --- a/internal/infrastructure/mongo/property.go +++ b/internal/infrastructure/mongo/property.go @@ -44,6 +44,10 @@ func (r *propertyRepo) FindByID(ctx context.Context, id id.PropertyID) (*propert } func (r *propertyRepo) FindByIDs(ctx context.Context, ids id.PropertyIDList) (property.List, error) { + if len(ids) == 0 { + return nil, nil + } + filter := bson.M{ "id": bson.M{ "$in": ids.Strings(), diff --git a/internal/infrastructure/mongo/property_schema.go b/internal/infrastructure/mongo/property_schema.go index 87a28b078..4da9fdc7d 100644 --- a/internal/infrastructure/mongo/property_schema.go +++ b/internal/infrastructure/mongo/property_schema.go @@ -48,6 +48,10 @@ func (r *propertySchemaRepo) FindByID(ctx context.Context, id id.PropertySchemaI } func (r *propertySchemaRepo) FindByIDs(ctx context.Context, ids []id.PropertySchemaID) (property.SchemaList, error) { + if len(ids) == 0 { + return nil, nil + } + // exclude built-in b := property.SchemaMap{} ids2 := make([]id.PropertySchemaID, 0, len(ids)) diff --git a/internal/infrastructure/mongo/scene.go b/internal/infrastructure/mongo/scene.go index 9702ca42f..a9567316c 100644 --- a/internal/infrastructure/mongo/scene.go +++ b/internal/infrastructure/mongo/scene.go @@ -45,6 +45,10 @@ func (r *sceneRepo) FindByID(ctx context.Context, id id.SceneID) (*scene.Scene, } func (r *sceneRepo) FindByIDs(ctx context.Context, ids id.SceneIDList) (scene.List, error) { + if len(ids) == 0 { + return nil, nil + } + return r.find(ctx, make(scene.List, 0, len(ids)), bson.M{ "id": bson.M{ "$in": ids.Strings(), diff --git a/internal/infrastructure/mongo/tag.go b/internal/infrastructure/mongo/tag.go index d37a3eed6..fb634dd66 100644 --- a/internal/infrastructure/mongo/tag.go +++ b/internal/infrastructure/mongo/tag.go @@ -45,6 +45,10 @@ func (r *tagRepo) FindByID(ctx context.Context, id id.TagID) (tag.Tag, error) { } func (r *tagRepo) FindByIDs(ctx context.Context, ids id.TagIDList) ([]*tag.Tag, error) { + if len(ids) == 0 { + return nil, nil + } + filter := bson.M{ "id": bson.M{ "$in": ids.Strings(), diff --git a/internal/infrastructure/mongo/team.go b/internal/infrastructure/mongo/team.go index f52ea730c..256b936b7 100644 --- a/internal/infrastructure/mongo/team.go +++ b/internal/infrastructure/mongo/team.go @@ -38,6 +38,10 @@ func (r *teamRepo) FindByUser(ctx context.Context, id id.UserID) (user.TeamList, } func (r *teamRepo) FindByIDs(ctx context.Context, ids id.TeamIDList) (user.TeamList, error) { + if len(ids) == 0 { + return nil, nil + } + dst := make([]*user.Team, 0, len(ids)) res, err := r.find(ctx, dst, bson.M{ "id": bson.M{"$in": ids.Strings()}, diff --git a/internal/infrastructure/mongo/user.go b/internal/infrastructure/mongo/user.go index 4c53b037d..be2d85c09 100644 --- a/internal/infrastructure/mongo/user.go +++ b/internal/infrastructure/mongo/user.go @@ -30,6 +30,10 @@ func (r *userRepo) init() { } func (r *userRepo) FindByIDs(ctx context.Context, ids id.UserIDList) ([]*user.User, error) { + if len(ids) == 0 { + return nil, nil + } + dst := make([]*user.User, 0, len(ids)) res, err := r.find(ctx, dst, bson.M{ "id": bson.M{"$in": ids.Strings()}, From cb82d059df9705cc91286fc58737d83af22bb2e4 Mon Sep 17 00:00:00 2001 From: KeisukeYamashita <19yamashita15@gmail.com> Date: Wed, 11 May 2022 12:57:01 +0900 Subject: [PATCH 208/253] ci: bump `actions/cache` to `v3` (#140) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 607d65e7c..eb440a0dd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,7 +22,7 @@ jobs: - name: checkout uses: actions/checkout@v3 - name: cache - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} From 751e666040e9b1807c611641c90daa42a55e8118 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 11 May 2022 16:56:11 +0900 Subject: [PATCH 209/253] test: refactor mongo connect helper function --- internal/infrastructure/mongo/asset_test.go | 5 ++--- internal/infrastructure/mongo/mongo_test.go | 12 ++++++++---- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/internal/infrastructure/mongo/asset_test.go b/internal/infrastructure/mongo/asset_test.go index 135683dfa..8a41d87f3 100644 --- a/internal/infrastructure/mongo/asset_test.go +++ b/internal/infrastructure/mongo/asset_test.go @@ -36,7 +36,7 @@ func TestFindByID(t *testing.T) { }, } - initDB := connect(t) + init := connect(t) for _, tc := range tests { tc := tc @@ -44,8 +44,7 @@ func TestFindByID(t *testing.T) { t.Run(tc.Name, func(t *testing.T) { t.Parallel() - client, dropDB := initDB() - defer dropDB() + client := init(t) repo := NewAsset(client) ctx := context.Background() diff --git a/internal/infrastructure/mongo/mongo_test.go b/internal/infrastructure/mongo/mongo_test.go index ec132989d..2e4d503cc 100644 --- a/internal/infrastructure/mongo/mongo_test.go +++ b/internal/infrastructure/mongo/mongo_test.go @@ -13,7 +13,7 @@ import ( "go.mongodb.org/mongo-driver/x/mongo/driver/uuid" ) -func connect(t *testing.T) func() (*mongodoc.Client, func()) { +func connect(t *testing.T) func(*testing.T) *mongodoc.Client { t.Helper() // Skip unit testing if "REEARTH_DB" is not configured @@ -31,13 +31,17 @@ func connect(t *testing.T) func() (*mongodoc.Client, func()) { SetConnectTimeout(time.Second*10), ) - return func() (*mongodoc.Client, func()) { + return func(t *testing.T) *mongodoc.Client { + t.Helper() + database, _ := uuid.New() databaseName := "reearth-test-" + hex.EncodeToString(database[:]) client := mongodoc.NewClient(databaseName, c) - return client, func() { + t.Cleanup(func() { _ = c.Database(databaseName).Drop(context.Background()) - } + }) + + return client } } From c2b969fc775471bcfb71cf9b087fc90339b9b1c8 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 11 May 2022 16:59:02 +0900 Subject: [PATCH 210/253] test: util.SyncMap.Range test sometimes fails (#143) --- pkg/util/map_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/util/map_test.go b/pkg/util/map_test.go index 2ebc2b8af..d4fed78ea 100644 --- a/pkg/util/map_test.go +++ b/pkg/util/map_test.go @@ -79,9 +79,9 @@ func TestSyncMap_Range(t *testing.T) { s.Range(func(k string, v int) bool { if k == "a" { vv = v - return true + return false } - return false + return true }) assert.Equal(t, 1, vv) } From 651852d9928125044e0e134e011f38b266c2e82f Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 11 May 2022 17:20:23 +0900 Subject: [PATCH 211/253] fix: name field is available again in signup api (#144) --- internal/adapter/http/user.go | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/internal/adapter/http/user.go b/internal/adapter/http/user.go index e71cf91fc..e235189b5 100644 --- a/internal/adapter/http/user.go +++ b/internal/adapter/http/user.go @@ -28,11 +28,13 @@ type PasswordResetInput struct { } type SignupInput struct { - Sub *string `json:"sub"` - Secret *string `json:"secret"` - UserID *id.UserID `json:"userId"` - TeamID *id.TeamID `json:"teamId"` - Name *string `json:"username"` + Sub *string `json:"sub"` + Secret *string `json:"secret"` + UserID *id.UserID `json:"userId"` + TeamID *id.TeamID `json:"teamId"` + Name *string `json:"name"` + // Username is an alias of Name + Username *string `json:"username"` Email *string `json:"email"` Password *string `json:"password"` Theme *user.Theme `json:"theme"` @@ -65,10 +67,18 @@ func (c *UserController) Signup(ctx context.Context, input SignupInput) (SignupO var u *user.User var err error + name := input.Name + if name == nil { + name = input.Username + } + if name == nil { + name = input.Email + } + if au := adapter.GetAuthInfo(ctx); au != nil { - var name string - if input.Name != nil { - name = *input.Name + var name2 string + if name != nil { + name2 = *name } u, _, err = c.usecase.SignupOIDC(ctx, interfaces.SignupOIDCParam{ @@ -76,7 +86,7 @@ func (c *UserController) Signup(ctx context.Context, input SignupInput) (SignupO AccessToken: au.Token, Issuer: au.Iss, Email: au.Email, - Name: name, + Name: name2, Secret: input.Secret, User: interfaces.SignupUserParam{ UserID: input.UserID, @@ -85,10 +95,10 @@ func (c *UserController) Signup(ctx context.Context, input SignupInput) (SignupO Theme: input.Theme, }, }) - } else if input.Name != nil && input.Email != nil { + } else if name != nil && input.Email != nil { u, _, err = c.usecase.Signup(ctx, interfaces.SignupParam{ Sub: input.Sub, - Name: *input.Name, + Name: *name, Email: *input.Email, Password: input.Password, Secret: input.Secret, From ddaeaaee6f3591717c7f162fd19d48dbcf743b44 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 11 May 2022 20:15:31 +0900 Subject: [PATCH 212/253] refactor: retry mongo lock (#145) --- go.mod | 1 + go.sum | 9 +++++ internal/infrastructure/mongo/lock.go | 49 ++++++--------------------- 3 files changed, 20 insertions(+), 39 deletions(-) diff --git a/go.mod b/go.mod index 4c9e3186c..837d8bf32 100644 --- a/go.mod +++ b/go.mod @@ -61,6 +61,7 @@ require ( github.com/agnivade/levenshtein v1.1.1 // indirect github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a // indirect + github.com/avast/retry-go/v4 v4.0.4 // indirect github.com/caos/logging v0.0.2 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect diff --git a/go.sum b/go.sum index 0f2e1bfee..483176303 100644 --- a/go.sum +++ b/go.sum @@ -92,6 +92,8 @@ github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= github.com/auth0/go-jwt-middleware/v2 v2.0.1 h1:zAgDKL7nsfVBFl31GGxsSXkhuRzYe1fVtJcO3aMSrFU= github.com/auth0/go-jwt-middleware/v2 v2.0.1/go.mod h1:kDt7JgUuDEp1VutfUmO4ZxBLL51vlNu/56oDfXc5E0Y= +github.com/avast/retry-go/v4 v4.0.4 h1:38hLf0DsRXh+hOF6HbTni0+5QGTNdw9zbaMD7KAO830= +github.com/avast/retry-go/v4 v4.0.4/go.mod h1:HqmLvS2VLdStPCGDFjSuZ9pzlTqVRldCI4w2dO4m1Ms= github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= github.com/aws/aws-sdk-go v1.35.5/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+muhnW+k= github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= @@ -405,6 +407,7 @@ github.com/paulmach/go.geojson v1.4.0 h1:5x5moCkCtDo5x8af62P9IOAYGQcYHtxz2QJ3x1D github.com/paulmach/go.geojson v1.4.0/go.mod h1:YaKx1hKpWF+T2oj2lFJPsW/t1Q5e1jQI61eoQSTwpIs= github.com/pelletier/go-toml v1.0.1-0.20170904195809-1d6b12b7cb29/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= +github.com/pierrre/gotestcover v0.0.0-20160517101806-924dca7d15f0/go.mod h1:4xpMLz7RBWyB+ElzHu8Llua96TRCB3YwX+l5EP1wmHk= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -415,6 +418,7 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/ravilushqa/otelgqlgen v0.6.0 h1:SbBmlE1/6Z4NDjCn96Ksbb41BIEhIf3dBT1WlGzNOr4= github.com/ravilushqa/otelgqlgen v0.6.0/go.mod h1:QP2vU3CSdJ2kYJkZl1zvTZWGgd2qEUxSWp1agqZjZ9A= +github.com/robertkrimen/godocdown v0.0.0-20130622164427-0bfa04905481/go.mod h1:C9WhFzY47SzYBIvzFqSvHIR6ROgDo4TtdTuRaOMjF/s= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= @@ -505,6 +509,7 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.mongodb.org/mongo-driver v1.4.2/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= go.mongodb.org/mongo-driver v1.8.4/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= @@ -644,6 +649,7 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= @@ -740,6 +746,7 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -747,6 +754,7 @@ golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211107104306-e0b2ad06fe42/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -839,6 +847,7 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.10 h1:QjFRCZxdOhBJ/UNgnBZLbNV13DlbnK0quyivTnXJM20= golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= diff --git a/internal/infrastructure/mongo/lock.go b/internal/infrastructure/mongo/lock.go index b42cfca05..b7d9626c4 100644 --- a/internal/infrastructure/mongo/lock.go +++ b/internal/infrastructure/mongo/lock.go @@ -3,10 +3,9 @@ package mongo import ( "context" "errors" - "math/rand" "sync" - "time" + "github.com/avast/retry-go/v4" "github.com/google/uuid" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/log" @@ -22,10 +21,7 @@ type Lock struct { } func NewLock(c *mongo.Collection) (repo.Lock, error) { - hostid, err := uuidString() - if err != nil { - return nil, err - } + hostid := uuid.NewString() l := lock.NewClient(c) if err := l.CreateIndexes(context.Background()); err != nil { @@ -43,31 +39,15 @@ func (r *Lock) Lock(ctx context.Context, name string) error { return repo.ErrAlreadyLocked } - lockID, err := uuidString() - if err != nil { - return err - } - + lockID := uuid.NewString() log.Infof("lock: trying to lock: id=%s, name=%s, host=%s", name, lockID, r.hostid) - // wait and retry - const retry = 10 - for i := 0; i < retry; i++ { - if err := r.l.XLock(ctx, name, lockID, r.details()); err != nil { - if errors.Is(err, lock.ErrAlreadyLocked) { - log.Infof("lock: failed to lock (%d/%d): name=%s, id=%s, host=%s", i+1, retry, name, lockID, r.hostid) - if i >= retry { - return repo.ErrFailedToLock - } - - time.Sleep(time.Second * time.Duration(rand.Intn(1)+(i+1))) - continue - } - - log.Infof("lock: failed to lock: name=%s, id=%s, host=%s, err=%s", name, lockID, r.hostid, err) - return repo.ErrFailedToLock - } else { - break - } + + if err := retry.Do( + func() error { return r.l.XLock(ctx, name, lockID, r.details()) }, + retry.RetryIf(func(err error) bool { return errors.Is(err, lock.ErrAlreadyLocked) }), + ); err != nil { + log.Infof("lock: failed to lock: name=%s, id=%s, host=%s, err=%s", name, lockID, r.hostid, err) + return repo.ErrFailedToLock } r.setLockID(name, lockID) @@ -90,15 +70,6 @@ func (r *Lock) Unlock(ctx context.Context, name string) error { return nil } -func uuidString() (string, error) { - u, err := uuid.NewUUID() - if err != nil { - return "", rerror.ErrInternalBy(err) - } - - return u.String(), nil -} - func (r *Lock) details() lock.LockDetails { if r == nil { return lock.LockDetails{} From 89009ba61b7c55314f4c4aeba5378de90ea19391 Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Fri, 13 May 2022 09:35:04 +0300 Subject: [PATCH 213/253] chore: update the go modules (#146) --- go.mod | 70 ++++++++++++++++++++-------------------- go.sum | 100 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 136 insertions(+), 34 deletions(-) diff --git a/go.mod b/go.mod index 837d8bf32..cd5c125d5 100644 --- a/go.mod +++ b/go.mod @@ -2,9 +2,9 @@ module github.com/reearth/reearth-backend require ( cloud.google.com/go/profiler v0.2.0 - cloud.google.com/go/storage v1.21.0 - github.com/99designs/gqlgen v0.17.2 - github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0 + cloud.google.com/go/storage v1.22.0 + github.com/99designs/gqlgen v0.17.5 + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.6.1 github.com/auth0/go-jwt-middleware/v2 v2.0.1 github.com/blang/semver v3.5.1+incompatible github.com/caos/oidc v1.2.0 @@ -14,19 +14,19 @@ require ( github.com/gorilla/mux v1.8.0 github.com/iancoleman/strcase v0.2.0 github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d - github.com/jarcoal/httpmock v1.1.0 + github.com/jarcoal/httpmock v1.2.0 github.com/joho/godotenv v1.4.0 github.com/jonas-p/go-shp v0.1.1 github.com/kelseyhightower/envconfig v1.4.0 github.com/kennygrant/sanitize v1.2.4 github.com/labstack/echo/v4 v4.7.2 github.com/labstack/gommon v0.3.1 - github.com/mitchellh/mapstructure v1.4.3 + github.com/mitchellh/mapstructure v1.5.0 github.com/oklog/ulid v1.3.1 github.com/paulmach/go.geojson v1.4.0 github.com/pkg/errors v0.9.1 - github.com/ravilushqa/otelgqlgen v0.6.0 - github.com/samber/lo v1.11.0 + github.com/ravilushqa/otelgqlgen v0.6.1 + github.com/samber/lo v1.21.0 github.com/sendgrid/sendgrid-go v3.11.1+incompatible github.com/sirupsen/logrus v1.8.1 github.com/spf13/afero v1.8.2 @@ -36,17 +36,17 @@ require ( github.com/uber/jaeger-client-go v2.30.0+incompatible github.com/uber/jaeger-lib v2.4.1+incompatible github.com/vektah/dataloaden v0.3.0 - github.com/vektah/gqlparser/v2 v2.4.1 - go.mongodb.org/mongo-driver v1.9.0 - go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.31.0 - go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.31.0 - go.opentelemetry.io/otel v1.6.1 - go.opentelemetry.io/otel/sdk v1.6.1 - golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29 - golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4 + github.com/vektah/gqlparser/v2 v2.4.2 + go.mongodb.org/mongo-driver v1.9.1 + go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.32.0 + go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.32.0 + go.opentelemetry.io/otel v1.7.0 + go.opentelemetry.io/otel/sdk v1.7.0 + golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88 + golang.org/x/exp v0.0.0-20220428152302-39d4317da171 golang.org/x/text v0.3.7 golang.org/x/tools v0.1.10 - google.golang.org/api v0.74.0 + google.golang.org/api v0.79.0 gopkg.in/go-playground/colors.v1 v1.2.0 gopkg.in/h2non/gock.v1 v1.1.2 gopkg.in/square/go-jose.v2 v2.6.0 @@ -54,9 +54,10 @@ require ( require ( cloud.google.com/go v0.100.2 // indirect - cloud.google.com/go/compute v1.5.0 // indirect - cloud.google.com/go/iam v0.1.1 // indirect - cloud.google.com/go/trace v1.0.0 // indirect + cloud.google.com/go/compute v1.6.1 // indirect + cloud.google.com/go/iam v0.3.0 // indirect + cloud.google.com/go/trace v1.2.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.30.1 // indirect github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect github.com/agnivade/levenshtein v1.1.1 // indirect github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect @@ -76,19 +77,20 @@ require ( github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.2 // indirect github.com/golang/snappy v0.0.3 // indirect - github.com/google/go-cmp v0.5.7 // indirect + github.com/google/go-cmp v0.5.8 // indirect github.com/google/pprof v0.0.0-20220113144219-d25a53d42d00 // indirect - github.com/googleapis/gax-go/v2 v2.2.0 // indirect + github.com/googleapis/gax-go/v2 v2.3.0 // indirect + github.com/googleapis/go-type-adapters v1.0.0 // indirect github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 // indirect github.com/gorilla/handlers v1.5.1 // indirect github.com/gorilla/schema v1.2.0 // indirect github.com/gorilla/securecookie v1.1.1 // indirect - github.com/gorilla/websocket v1.4.2 // indirect + github.com/gorilla/websocket v1.5.0 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/klauspost/compress v1.13.6 // indirect - github.com/matryer/moq v0.2.3 // indirect - github.com/mattn/go-colorable v0.1.11 // indirect + github.com/matryer/moq v0.2.7 // indirect + github.com/mattn/go-colorable v0.1.12 // indirect github.com/mattn/go-isatty v0.0.14 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect @@ -98,7 +100,7 @@ require ( github.com/smartystreets/goconvey v1.6.4 // indirect github.com/stretchr/objx v0.2.0 // indirect github.com/tidwall/pretty v1.0.1 // indirect - github.com/urfave/cli/v2 v2.3.0 // indirect + github.com/urfave/cli/v2 v2.4.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/fasttemplate v1.2.1 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect @@ -106,20 +108,20 @@ require ( github.com/xdg-go/stringprep v1.0.2 // indirect github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d // indirect go.opencensus.io v0.23.0 // indirect - go.opentelemetry.io/contrib v1.4.0 // indirect - go.opentelemetry.io/otel/trace v1.6.1 // indirect + go.opentelemetry.io/contrib v1.6.0 // indirect + go.opentelemetry.io/otel/trace v1.7.0 // indirect go.uber.org/atomic v1.7.0 // indirect golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect - golang.org/x/net v0.0.0-20220325170049-de3da57026de // indirect - golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a // indirect + golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4 // indirect + golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect - golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886 // indirect + golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6 // indirect golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect - golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb // indirect - google.golang.org/grpc v1.45.0 // indirect - google.golang.org/protobuf v1.27.1 // indirect + google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3 // indirect + google.golang.org/grpc v1.46.0 // indirect + google.golang.org/protobuf v1.28.0 // indirect gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect diff --git a/go.sum b/go.sum index 483176303..d4b7c3add 100644 --- a/go.sum +++ b/go.sum @@ -43,10 +43,15 @@ cloud.google.com/go/compute v1.2.0/go.mod h1:xlogom/6gr8RJGBe7nT2eGsQYAFUbbv8dbC cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= cloud.google.com/go/compute v1.5.0 h1:b1zWmYuuHz7gO9kDcM/EpHGr06UgsYNRpNJzI2kFiLM= cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= +cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= +cloud.google.com/go/compute v1.6.1 h1:2sMmt8prCn7DPaG4Pmh0N3Inmc8cT8ae5k1M6VJ9Wqc= +cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/iam v0.1.1 h1:4CapQyNFjiksks1/x7jsvsygFPhihslYk5GptIrlX68= cloud.google.com/go/iam v0.1.1/go.mod h1:CKqrcnI/suGpybEHxZ7BMehL0oA4LpdyJdUlTl9jVMw= +cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc= +cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= cloud.google.com/go/profiler v0.2.0 h1:TZEKR39niWTuvpak6VNg+D8J5qTzJnyaD1Yl4BOU+d8= cloud.google.com/go/profiler v0.2.0/go.mod h1:Rn0g4ZAbYR1sLVP7GAmCZxid4dmtD/nURxcaxf6pngI= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= @@ -62,15 +67,25 @@ cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3f cloud.google.com/go/storage v1.18.2/go.mod h1:AiIj7BWXyhO5gGVmYJ+S8tbkCx3yb0IMjua8Aw4naVM= cloud.google.com/go/storage v1.21.0 h1:HwnT2u2D309SFDHQII6m18HlrCi3jAXhUMTLOWXYH14= cloud.google.com/go/storage v1.21.0/go.mod h1:XmRlxkgPjlBONznT2dDUU/5XlpU2OjMnKuqnZI01LAA= +cloud.google.com/go/storage v1.22.0 h1:NUV0NNp9nkBuW66BFRLuMgldN60C57ET3dhbwLIYio8= +cloud.google.com/go/storage v1.22.0/go.mod h1:GbaLEoMqbVm6sx3Z0R++gSiBlgMv6yUi2q1DeGFKQgE= cloud.google.com/go/trace v1.0.0 h1:laKx2y7IWMjguCe5zZx6n7qLtREk4kyE69SXVC0VSN8= cloud.google.com/go/trace v1.0.0/go.mod h1:4iErSByzxkyHWzzlAj63/Gmjz0NH1ASqhJguHpGcr6A= +cloud.google.com/go/trace v1.2.0 h1:oIaB4KahkIUOpLSAAjEJ8y2desbjY/x/RfP4O3KAtTI= +cloud.google.com/go/trace v1.2.0/go.mod h1:Wc8y/uYyOhPy12KEnXG9XGrvfMz5F5SrYecQlbW1rwM= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/99designs/gqlgen v0.17.2 h1:yczvlwMsfcVu/JtejqfrLwXuSP0yZFhmcss3caEvHw8= github.com/99designs/gqlgen v0.17.2/go.mod h1:K5fzLKwtph+FFgh9j7nFbRUdBKvTcGnsta51fsMTn3o= +github.com/99designs/gqlgen v0.17.5 h1:bTgv7FQz3+NROg6ooHtlkaJ82Uqrp6e5sAziXTBo1hc= +github.com/99designs/gqlgen v0.17.5/go.mod h1:SNpLVzaF37rRLSAXtu8FKVp5I4zycneMmFX6NT4XGSU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0 h1:JLLDOHEcoREA54hzOnjr8KQcZCvX0E8KhosjE0F1jaQ= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0/go.mod h1:Pe8G2QFgCaohbU/zHRBjn0YaFh9z8/HtuEDh/Lyo04E= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.6.1 h1:LNl8Tg6N6qChoiM6lKC1Z44Z+CkPAdYx45L/lJDgThs= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.6.1/go.mod h1:ZFRHg4zuQuc6sQyx16A53fB7kxPvPguSeE8GFVpT6rQ= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.30.1 h1:JftlZfpIvptFTzSD/BXuoK9i0nT3D1GSz1EEBr+jjuM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.30.1/go.mod h1:j+FS9VBW3mwtHBmm9KOJEy5Tq68fCp7fE/R9bV/flIM= github.com/HdrHistogram/hdrhistogram-go v1.0.1 h1:GX8GAYDuhlFQnI2fRDHQhTlkHMz8bEn0jTI6LJU0mpw= github.com/HdrHistogram/hdrhistogram-go v1.0.1/go.mod h1:BWJ+nMSHY3L41Zj7CA3uXnloDp7xxV0YvstAE7nKTaM= github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= @@ -117,6 +132,7 @@ github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XP github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= @@ -136,6 +152,7 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.m github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= @@ -259,6 +276,8 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-github/v31 v31.0.0/go.mod h1:NQPZol8/1sMoWYGN2yaALIBytu17gAWfhbweiEed3pM= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= @@ -296,6 +315,10 @@ github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pf github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= github.com/googleapis/gax-go/v2 v2.2.0 h1:s7jOdKSaksJVOxE0Y/S32otcfiP+UQ0cL8/GTKaONwE= github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= +github.com/googleapis/gax-go/v2 v2.3.0 h1:nRJtk3y8Fm770D42QV6T90ZnvFZyk7agSo3Q+Z9p3WI= +github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= +github.com/googleapis/go-type-adapters v1.0.0 h1:9XdMn+d/G57qq1s8dNc5IesGCXHf6V2HZ2JwRxfA2tA= +github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c29bdd3 h1:eHv/jVY/JNop1xg2J9cBb4EzyMpWZoNCP1BslSAIkOI= github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c29bdd3/go.mod h1:h/KNeRx7oYU4SpA4SoY7W2/NxDKEEVuwA6j9A27L4OI= @@ -312,6 +335,8 @@ github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyC github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gregjones/httpcache v0.0.0-20170920190843-316c5e0ff04e/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw= @@ -334,6 +359,8 @@ github.com/inconshreveable/log15 v0.0.0-20170622235902-74a0988b5f80/go.mod h1:cO github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/jarcoal/httpmock v1.1.0 h1:F47ChZj1Y2zFsCXxNkBPwNNKnAyOATcdQibk0qEdVCE= github.com/jarcoal/httpmock v1.1.0/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= +github.com/jarcoal/httpmock v1.2.0 h1:gSvTxxFR/MEMfsGrvRbdfpRUMBStovlSRLw0Ep1bwwc= +github.com/jarcoal/httpmock v1.2.0/go.mod h1:oCoTsnAz4+UoOUIf5lJOWV2QQIW5UoeUI6aM2YnWAZk= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= @@ -378,11 +405,15 @@ github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsI github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= github.com/matryer/moq v0.2.3 h1:Q06vEqnBYjjfx5KKgHfYRKE/lvlRu+Nj+xodG4YdHnU= github.com/matryer/moq v0.2.3/go.mod h1:9RtPYjTnH1bSBIkpvtHkFN7nbWAnO7oRpdJkEIn6UtE= +github.com/matryer/moq v0.2.7 h1:RtpiPUM8L7ZSCbSwK+QcZH/E9tgqAkFjKQxsRs25b4w= +github.com/matryer/moq v0.2.7/go.mod h1:kITsx543GOENm48TUAQyJ9+SAvFSr7iGQXPoth/VUBk= github.com/mattn/go-colorable v0.0.10-0.20170816031813-ad5389df28cd/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.11 h1:nQ+aFkoE2TMGc0b68U2OKSexC+eq46+XwZzWXHRmPYs= github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-isatty v0.0.2/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= @@ -391,8 +422,11 @@ github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27k github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/mapstructure v0.0.0-20170523030023-d0303fe80992/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.2.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.3.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4= @@ -418,6 +452,8 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/ravilushqa/otelgqlgen v0.6.0 h1:SbBmlE1/6Z4NDjCn96Ksbb41BIEhIf3dBT1WlGzNOr4= github.com/ravilushqa/otelgqlgen v0.6.0/go.mod h1:QP2vU3CSdJ2kYJkZl1zvTZWGgd2qEUxSWp1agqZjZ9A= +github.com/ravilushqa/otelgqlgen v0.6.1 h1:KoRURWiQfthje/G6hG6zDF9QjoEFrmHgb3mAe5kZZ7k= +github.com/ravilushqa/otelgqlgen v0.6.1/go.mod h1:2SUPOCCsJdvbyfLtZI81C/1Q76jSFNFs/2NrtyTI2AQ= github.com/robertkrimen/godocdown v0.0.0-20130622164427-0bfa04905481/go.mod h1:C9WhFzY47SzYBIvzFqSvHIR6ROgDo4TtdTuRaOMjF/s= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= @@ -428,6 +464,8 @@ github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/samber/lo v1.11.0 h1:JfeYozXL1xfkhRUFOfH13ociyeiLSC/GRJjGKI668xM= github.com/samber/lo v1.11.0/go.mod h1:2I7tgIv8Q1SG2xEIkRq0F2i2zgxVpnyPOP0d3Gj2r+A= +github.com/samber/lo v1.21.0 h1:FSby8pJQtX4KmyddTCCGhc3JvnnIVrDA+NW37rG+7G8= +github.com/samber/lo v1.21.0/go.mod h1:2I7tgIv8Q1SG2xEIkRq0F2i2zgxVpnyPOP0d3Gj2r+A= github.com/sendgrid/rest v2.6.6+incompatible h1:3rO5UTPhLQo6fjytWwdwRWclP101CqErg2klf8LneB4= github.com/sendgrid/rest v2.6.6+incompatible/go.mod h1:kXX7q3jZtJXK5c5qK83bSGMdV6tsOE70KbHoqJls4lE= github.com/sendgrid/sendgrid-go v3.11.1+incompatible h1:ai0+woZ3r/+tKLQExznak5XerOFoD6S7ePO0lMV8WXo= @@ -484,6 +522,8 @@ github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVK github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= +github.com/urfave/cli/v2 v2.4.0 h1:m2pxjjDFgDxSPtO8WSdbndj17Wu2y8vOT86wE/tjr+I= +github.com/urfave/cli/v2 v2.4.0/go.mod h1:NX9W0zmTvedE5oDoOMs2RTC8RvdK98NTYZE5LbaEYPg= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4= @@ -493,6 +533,8 @@ github.com/vektah/dataloaden v0.3.0/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf github.com/vektah/gqlparser/v2 v2.4.0/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= github.com/vektah/gqlparser/v2 v2.4.1 h1:QOyEn8DAPMUMARGMeshKDkDgNmVoEaEGiDB0uWxcSlQ= github.com/vektah/gqlparser/v2 v2.4.1/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= +github.com/vektah/gqlparser/v2 v2.4.2 h1:29TGc6QmhEUq5fll+2FPoTmhUhR65WEKN4VK/jo0OlM= +github.com/vektah/gqlparser/v2 v2.4.2/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.0.2 h1:akYIkZ28e6A96dkWNJQu3nmCzH3YfwMPQExUYDaRv7w= @@ -515,6 +557,8 @@ go.mongodb.org/mongo-driver v1.4.2/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4S go.mongodb.org/mongo-driver v1.8.4/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= go.mongodb.org/mongo-driver v1.9.0 h1:f3aLGJvQmBl8d9S40IL+jEyBC6hfLPbJjv9t5hEM9ck= go.mongodb.org/mongo-driver v1.9.0/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= +go.mongodb.org/mongo-driver v1.9.1 h1:m078y9v7sBItkt1aaoe2YlvWEXcD263e1a4E1fBrJ1c= +go.mongodb.org/mongo-driver v1.9.1/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -525,34 +569,59 @@ go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/contrib v1.4.0 h1:o+obgKZArn1GbM8zPCLYU9LZCI7lL6GcTZArn0qz1yw= go.opentelemetry.io/contrib v1.4.0/go.mod h1:FlyPNX9s4U6MCsWEc5YAK4KzKNHFDsjrDUZijJiXvy8= +go.opentelemetry.io/contrib v1.6.0 h1:xJawAzMuR3s4Au5p/ABHqYFychHjK2AHB9JvkBuBbTA= +go.opentelemetry.io/contrib v1.6.0/go.mod h1:FlyPNX9s4U6MCsWEc5YAK4KzKNHFDsjrDUZijJiXvy8= go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.31.0 h1:Qj331G/6VDsXEYMgeRRV7d+zGa6KKDkJtP6q8DdGeZg= go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.31.0/go.mod h1:3k7OUuzfLbCMIVZybXejR2TzJcog/5/HSnUj3m0ISso= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.32.0 h1:bkyJgifVcPo1w8HYf1K0ExtgdmNgxyVa02o/yFDrSAA= +go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.32.0/go.mod h1:rmdIBqEgyXERsERn9CjVXXPL9qAinIsID+X9AhBnzOQ= go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.31.0 h1:401vSW2p/bBvNuAyy8AIT7PoLHQCtuuGVK+ttC5FmwQ= go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.31.0/go.mod h1:OfY26sPTH7bTcD8Fxwj/nlC7wmCCP7SR996JVh93sys= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.32.0 h1:gNKQHn+q326vsi+kOskx9FCz9Jkz2fvxlf1y46dTN14= +go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.32.0/go.mod h1:9WqBmOJ4AOChNHtnRBSCGlKN4PQf1coLTCK57fyXE/s= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.26.0 h1:sdwza9BScvbOFaZLhvKDQc54vQ8CWM8jD9BO2t+rP4E= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.26.0/go.mod h1:4vatbW3QwS11DK0H0SB7FR31/VbthXcYorswdkVXdyg= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.31.0 h1:woM+Mb4d0A+Dxa3rYPenSN5ZeS9qHUvE8rlObiLRXTY= go.opentelemetry.io/contrib/propagators/b3 v1.6.0 h1:rHeNbko1wNe1Sazpw5IJD83x43lfzMnDb8vckdKxRu8= go.opentelemetry.io/contrib/propagators/b3 v1.6.0/go.mod h1:6kJAkL2/nNqP9AYhm/8j4dzVU8BfpcvYr2cy25RGBak= +go.opentelemetry.io/contrib/propagators/b3 v1.7.0 h1:oRAenUhj+GFttfIp3gj7HYVzBhPOHgq/dWPDSmLCXSY= +go.opentelemetry.io/contrib/propagators/b3 v1.7.0/go.mod h1:gXx7AhL4xXCF42gpm9dQvdohoDa2qeyEx4eIIxqK+h4= go.opentelemetry.io/otel v1.0.1/go.mod h1:OPEOD4jIT2SlZPMmwT6FqZz2C0ZNdQqiWcoK6M0SNFU= go.opentelemetry.io/otel v1.1.0/go.mod h1:7cww0OW51jQ8IaZChIEdqLwgh+44+7uiTdWsAL0wQpA= go.opentelemetry.io/otel v1.3.0/go.mod h1:PWIKzi6JCp7sM0k9yZ43VX+T345uNbAkDKwHVjb2PTs= go.opentelemetry.io/otel v1.6.0/go.mod h1:bfJD2DZVw0LBxghOTlgnlI0CV3hLDu9XF/QKOUXMTQQ= go.opentelemetry.io/otel v1.6.1 h1:6r1YrcTenBvYa1x491d0GGpTVBsNECmrc/K6b+zDeis= go.opentelemetry.io/otel v1.6.1/go.mod h1:blzUabWHkX6LJewxvadmzafgh/wnvBSDBdOuwkAtrWQ= +go.opentelemetry.io/otel v1.6.2/go.mod h1:MUBZHaB2cm6CahEBHQPq9Anos7IXynP/noVpjsxQTSc= +go.opentelemetry.io/otel v1.6.3 h1:FLOfo8f9JzFVFVyU+MSRJc2HdEAXQgm7pIv2uFKRSZE= +go.opentelemetry.io/otel v1.6.3/go.mod h1:7BgNga5fNlF/iZjG06hM3yofffp0ofKCDwSXx1GC4dI= +go.opentelemetry.io/otel v1.7.0 h1:Z2lA3Tdch0iDcrhJXDIlC94XE+bxok1F9B+4Lz/lGsM= +go.opentelemetry.io/otel v1.7.0/go.mod h1:5BdUoMIz5WEs0vt0CUEMtSSaTSHBBVwrhnz7+nrD5xk= go.opentelemetry.io/otel/internal/metric v0.24.0 h1:O5lFy6kAl0LMWBjzy3k//M8VjEaTDWL9DPJuqZmWIAA= go.opentelemetry.io/otel/internal/metric v0.24.0/go.mod h1:PSkQG+KuApZjBpC6ea6082ZrWUUy/w132tJ/LOU3TXk= go.opentelemetry.io/otel/metric v0.24.0 h1:Rg4UYHS6JKR1Sw1TxnI13z7q/0p/XAbgIqUTagvLJuU= go.opentelemetry.io/otel/metric v0.24.0/go.mod h1:tpMFnCD9t+BEGiWY2bWF5+AwjuAdM0lSowQ4SBA3/K4= +go.opentelemetry.io/otel/metric v0.28.0 h1:o5YNh+jxACMODoAo1bI7OES0RUW4jAMae0Vgs2etWAQ= go.opentelemetry.io/otel/sdk v1.3.0/go.mod h1:rIo4suHNhQwBIPg9axF8V9CA72Wz2mKF1teNrup8yzs= go.opentelemetry.io/otel/sdk v1.6.0/go.mod h1:PjLRUfDsoPy0zl7yrDGSUqjj43tL7rEtFdCEiGlxXRM= go.opentelemetry.io/otel/sdk v1.6.1 h1:ZmcNyMhcuAYIb/Nr6QhBPTMopMTbov/47wHt1gibkoY= go.opentelemetry.io/otel/sdk v1.6.1/go.mod h1:IVYrddmFZ+eJqu2k38qD3WezFR2pymCzm8tdxyh3R4E= +go.opentelemetry.io/otel/sdk v1.6.2 h1:wxY+YrfpGJfjxtm7SFBMJp9APDMZjDG+ErZOs/wkubg= +go.opentelemetry.io/otel/sdk v1.6.2/go.mod h1:M2r4VCm1Yurk4E+fWtP2p+QzFDHMFEqhGdbtQ7zRf+k= +go.opentelemetry.io/otel/sdk v1.6.3/go.mod h1:A4iWF7HTXa+GWL/AaqESz28VuSBIcZ+0CV+IzJ5NMiQ= +go.opentelemetry.io/otel/sdk v1.7.0 h1:4OmStpcKVOfvDOgCt7UriAPtKolwIhxpnSNI/yK+1B0= +go.opentelemetry.io/otel/sdk v1.7.0/go.mod h1:uTEOTwaqIVuTGiJN7ii13Ibp75wJmYUDe374q6cZwUU= go.opentelemetry.io/otel/trace v1.0.1/go.mod h1:5g4i4fKLaX2BQpSBsxw8YYcgKpMMSW3x7ZTuYBr3sUk= go.opentelemetry.io/otel/trace v1.1.0/go.mod h1:i47XtdcBQiktu5IsrPqOHe8w+sBmnLwwHt8wiUsWGTI= go.opentelemetry.io/otel/trace v1.3.0/go.mod h1:c/VDhno8888bvQYmbYLqe41/Ldmr/KKunbvWM4/fEjk= go.opentelemetry.io/otel/trace v1.6.0/go.mod h1:qs7BrU5cZ8dXQHBGxHMOxwME/27YH2qEp4/+tZLLwJE= go.opentelemetry.io/otel/trace v1.6.1 h1:f8c93l5tboBYZna1nWk0W9DYyMzJXDWdZcJZ0Kb400U= go.opentelemetry.io/otel/trace v1.6.1/go.mod h1:RkFRM1m0puWIq10oxImnGEduNBzxiN7TXluRBtE+5j0= +go.opentelemetry.io/otel/trace v1.6.2/go.mod h1:RMqfw8Mclba1p7sXDmEDBvrB8jw65F6GOoN1fyyXTzk= +go.opentelemetry.io/otel/trace v1.6.3 h1:IqN4L+5b0mPNjdXIiZ90Ni4Bl5BRkDQywePLWemd9bc= +go.opentelemetry.io/otel/trace v1.6.3/go.mod h1:GNJQusJlUgZl9/TQBPKU/Y/ty+0iVB5fjhKeJGZPGFs= +go.opentelemetry.io/otel/trace v1.7.0 h1:O37Iogk1lEkMRXewVtZ1BBTVn5JEp8GrJvP92bJqC6o= +go.opentelemetry.io/otel/trace v1.7.0/go.mod h1:fzLSB9nqR2eXzxPXb2JW9IKE+ScyXA48yyE4TNvoHqU= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= @@ -569,9 +638,12 @@ golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29 h1:tkVvjkPTB7pnW3jnid7kNyAMPVWllTNOf/qKDze4p9o= golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88 h1:Tgea0cVUD0ivh5ADBX4WwuI12DUd2to3nCYe2eayMIw= +golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -584,6 +656,8 @@ golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EH golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4 h1:K3x+yU+fbot38x5bQbU2QqUAVyYLEktdNH2GxZLnM3U= golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE= +golang.org/x/exp v0.0.0-20220428152302-39d4317da171 h1:TfdoLivD44QwvssI9Sv1xwa5DcL5XQr4au4sZ2F2NV4= +golang.org/x/exp v0.0.0-20220428152302-39d4317da171/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -655,6 +729,9 @@ golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220325170049-de3da57026de h1:pZB1TWnKi+o4bENlbzAgLrEbY4RMYmUIRobMcSmfeYc= golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4 h1:HVyaeDAYux4pnY+D/SiwmLOR36ewZ4iGQIIrtnuCjFA= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/oauth2 v0.0.0-20170912212905-13449ad91cb2/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -676,6 +753,8 @@ golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a h1:qfl7ob3DIEs3Ml9oLuPwY2N04gymzAW04WsUQHIClgM= golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 h1:OSnWWcOd/CtWQC2cYSBgbTSJv3ciqd8r54ySIW2y3RE= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/sync v0.0.0-20170517211232-f52d1811a629/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -767,6 +846,9 @@ golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886 h1:eJv7u3ksNXoLbGSKuv2s/SIO4tJVxc/A+MTpzxDgz/Q= golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6 h1:nonptSpoQ4vQjyraW20DXPAglgQfVnM9ZC6MmNLMR60= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -856,6 +938,8 @@ golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.0.0-20170921000349-586095a6e407/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= @@ -899,6 +983,9 @@ google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/S google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= google.golang.org/api v0.74.0 h1:ExR2D+5TYIrMphWgs5JCgwRhEDlPDXXrLwHHMgPHTXE= google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= +google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= +google.golang.org/api v0.79.0 h1:vaOcm0WdXvhGkci9a0+CcQVZqSRjN8ksSBlWv99f8Pg= +google.golang.org/api v0.79.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -950,6 +1037,7 @@ google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210329143202-679c6ae281ee/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= @@ -991,6 +1079,14 @@ google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2 google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb h1:0m9wktIpOxGw+SSKmydXWB3Z3GTfcPP6+q75HCQa6HI= google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= +google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf h1:JTjwKJX9erVpsw17w+OIPP7iAgEkN/r8urhWSunEDTs= +google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3 h1:q1kiSVscqoDeqTF27eQ2NnLLDmqF0I373qQNXYMy0fo= +google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/grpc v1.2.1-0.20170921194603-d4b75ebd4f9f/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= @@ -1021,6 +1117,8 @@ google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9K google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.46.0 h1:oCjezcn6g6A75TGoKYBPgKmVBLexhYLM6MebdrPApP8= +google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -1036,6 +1134,8 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= From b98406caaad5cd041ff31674c234097862217b2f Mon Sep 17 00:00:00 2001 From: mimoham24 <69579255+mimoham24@users.noreply.github.com> Date: Fri, 13 May 2022 12:06:45 +0400 Subject: [PATCH 214/253] fix createTagItem mutation (#147) --- internal/usecase/interactor/tag.go | 2 +- pkg/tag/group.go | 7 ++++ pkg/tag/group_test.go | 53 ++++++++++++++++++++++++++++++ 3 files changed, 61 insertions(+), 1 deletion(-) diff --git a/internal/usecase/interactor/tag.go b/internal/usecase/interactor/tag.go index f8b6a8db9..c37aa5aeb 100644 --- a/internal/usecase/interactor/tag.go +++ b/internal/usecase/interactor/tag.go @@ -70,7 +70,7 @@ func (i *Tag) CreateItem(ctx context.Context, inp interfaces.CreateTagItemParam, } if parent != nil { - parent.Tags().Add(item.ID()) + parent.AddTag(item.ID()) } itemt := tag.Tag(item) diff --git a/pkg/tag/group.go b/pkg/tag/group.go index 0a5302615..138e650d4 100644 --- a/pkg/tag/group.go +++ b/pkg/tag/group.go @@ -18,3 +18,10 @@ func (g *Group) RemoveTag(ids ...ID) { } g.tags = g.tags.Delete(ids...) } + +func (g *Group) AddTag(ids ...ID) { + if g == nil { + return + } + g.tags = g.tags.Add(ids...) +} diff --git a/pkg/tag/group_test.go b/pkg/tag/group_test.go index c979dce2b..d9b1281dc 100644 --- a/pkg/tag/group_test.go +++ b/pkg/tag/group_test.go @@ -3,6 +3,8 @@ package tag import ( "testing" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" ) @@ -107,3 +109,54 @@ func TestGroupBuilder_Build(t *testing.T) { }) } } + +func TestGroup_AddTag(t *testing.T) { + sid := id.NewSceneID() + tid := id.NewTagID() + tests := []struct { + name string + tag *Group + input IDList + expected IDList + }{ + { + name: "should add a tag", + tag: NewGroup().NewID().Scene(sid).Label("foo").MustBuild(), + input: IDList{tid}, + expected: IDList{tid}, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tc.tag.AddTag(tc.input...) + assert.Equal(tt, tc.tag.tags, tc.expected) + }) + } +} + +func TestGroup_RemoveTag(t *testing.T) { + sid := id.NewSceneID() + tid := id.NewTagID() + tid2 := id.NewTagID() + tests := []struct { + name string + tag *Group + input IDList + expected IDList + }{ + { + name: "should remove a tag", + tag: NewGroup().NewID().Scene(sid).Label("foo").Tags(IDList{tid, tid2}).MustBuild(), + input: IDList{tid2}, + expected: IDList{tid}, + }, + } + for _, tc := range tests { + t.Run(tc.name, func(tt *testing.T) { + tt.Parallel() + tc.tag.RemoveTag(tc.input...) + assert.Equal(tt, tc.tag.tags, tc.expected) + }) + } +} From 84fe1323bacaee826ae37c662e3071a39e22b42d Mon Sep 17 00:00:00 2001 From: lavalse Date: Tue, 17 May 2022 09:26:00 +0000 Subject: [PATCH 215/253] v0.7.0 --- CHANGELOG.md | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e59172bf5..e5121edd8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,33 @@ # Changelog All notable changes to this project will be documented in this file. +## 0.7.0 - 2022-05-13 + +### ๐Ÿš€ Features + +- Add an opacity slider to map tiles ([#138](https://github.com/reearth/reearth-backend/pull/138)) [`4f72b8`](https://github.com/reearth/reearth-backend/commit/4f72b8) + +### ๐Ÿ”ง Bug Fixes + +- Signup api requires password field [`a79376`](https://github.com/reearth/reearth-backend/commit/a79376) +- "$in needs an array" error from mongo FindByIDs ([#142](https://github.com/reearth/reearth-backend/pull/142)) [`58e1b0`](https://github.com/reearth/reearth-backend/commit/58e1b0) +- Name field is available again in signup api ([#144](https://github.com/reearth/reearth-backend/pull/144)) [`651852`](https://github.com/reearth/reearth-backend/commit/651852) + +### โœจ Refactor + +- Retry mongo lock ([#145](https://github.com/reearth/reearth-backend/pull/145)) [`ddaeaa`](https://github.com/reearth/reearth-backend/commit/ddaeaa) + +### ๐Ÿงช Testing + +- Add Mongo Asset's [`FindByID`](https://github.com/reearth/reearth-backend/commit/FindByID) unit testing ([#139](https://github.com/reearth/reearth-backend/pull/139)) [`35f9db`](https://github.com/reearth/reearth-backend/commit/35f9db) +- Refactor mongo connect helper function [`751e66`](https://github.com/reearth/reearth-backend/commit/751e66) +- Util.SyncMap.Range test sometimes fails ([#143](https://github.com/reearth/reearth-backend/pull/143)) [`c2b969`](https://github.com/reearth/reearth-backend/commit/c2b969) + +### Miscellaneous Tasks + +- Typo [`secrit`](https://github.com/reearth/reearth-backend/commit/secrit) on env example ([#137](https://github.com/reearth/reearth-backend/pull/137)) [`2c0220`](https://github.com/reearth/reearth-backend/commit/2c0220) +- Update the go modules ([#146](https://github.com/reearth/reearth-backend/pull/146)) [`89009b`](https://github.com/reearth/reearth-backend/commit/89009b) + ## 0.6.1 - 2022-04-20 ### ๐Ÿ”ง Bug Fixes From a21ace6d407dd7dd164ce38a08b65a3c1244d2b5 Mon Sep 17 00:00:00 2001 From: lby Date: Fri, 20 May 2022 16:39:59 +0800 Subject: [PATCH 216/253] Feat: add the property of scene mode (#148) * feat: add sceneMode * Update pkg/builtin/manifest_ja.yml Co-authored-by: HideBa <49897538+HideBa@users.noreply.github.com> Co-authored-by: HideBa <49897538+HideBa@users.noreply.github.com> --- pkg/builtin/manifest.yml | 12 ++++++++++++ pkg/builtin/manifest_ja.yml | 3 +++ 2 files changed, 15 insertions(+) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 5caf26b22..aa064b9e8 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -78,6 +78,18 @@ extensions: type: string title: Cesium Ion API access token description: Cesium Ion account users may use their personal API keys to be able to use their Cesium Ion assets(tile data, 3D data, etc) with their project. + - id: sceneMode + type: string + title: Scene mode + description: Specify scene mode. + defaultValue: scene3d + choices: + - key: scene3d + label: Scene 3D + - key: scene2d + label: Scene 2D + - key: columbus + label: Columbus View - id: cameraLimiter title: Camera Limiter description: Set the camera limiting box. diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index cea488d93..93346de67 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -35,6 +35,9 @@ extensions: ion: title: Cesium Icon APIใ‚ขใ‚ฏใ‚ปใ‚นใƒˆใƒผใ‚ฏใƒณ description: ่‡ช่บซใฎCesium Ionใ‚ขใ‚ซใ‚ฆใƒณใƒˆใ‹ใ‚‰APIใ‚ญใƒผใ‚’็™บ่กŒใ—ใ€ใ“ใ“ใซ่จญๅฎšใ—ใพใ™ใ€‚Cesium Ionใฎใ‚ขใ‚ปใƒƒใƒˆ๏ผˆใ‚ฟใ‚คใƒซใƒ‡ใƒผใ‚ฟใ€3Dใƒ‡ใƒผใ‚ฟใชใฉ๏ผ‰ใฎไฝฟ็”จใŒๅฏ่ƒฝใซใชใ‚‹ใŸใ‚ใ€่จญๅฎšใ‚’ๆŽจๅฅจใ—ใพใ™ใ€‚ + sceneMode: + title: ใ‚ทใƒผใƒณใƒขใƒผใƒ‰ + description: ๅœฐ็ƒๅ„€ใฎ่กจ็คบใ‚’2Dใ€2.5Dใ€3Dใ‹ใ‚‰้ธๆŠžใ—ใพใ™ใ€‚ cameraLimiter: title: ใ‚ซใƒกใƒฉ็ฏ„ๅ›ฒๅˆถ้™ description: ใ‚ซใƒกใƒฉใฎ็งปๅ‹•ใงใใ‚‹็ฏ„ๅ›ฒใ‚’ๅˆถ้™ใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ From 6372bce3eb499b598a24f10e57d578af26575bfc Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Tue, 24 May 2022 09:33:31 +0300 Subject: [PATCH 217/253] chore: update go modules (#150) --- go.mod | 20 ++++++++++---------- go.sum | 23 +++++++++++++++++++++++ 2 files changed, 33 insertions(+), 10 deletions(-) diff --git a/go.mod b/go.mod index cd5c125d5..78dfbbd51 100644 --- a/go.mod +++ b/go.mod @@ -1,10 +1,10 @@ module github.com/reearth/reearth-backend require ( - cloud.google.com/go/profiler v0.2.0 - cloud.google.com/go/storage v1.22.0 + cloud.google.com/go/profiler v0.3.0 + cloud.google.com/go/storage v1.22.1 github.com/99designs/gqlgen v0.17.5 - github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.6.1 + github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.7.0 github.com/auth0/go-jwt-middleware/v2 v2.0.1 github.com/blang/semver v3.5.1+incompatible github.com/caos/oidc v1.2.0 @@ -42,11 +42,11 @@ require ( go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.32.0 go.opentelemetry.io/otel v1.7.0 go.opentelemetry.io/otel/sdk v1.7.0 - golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88 - golang.org/x/exp v0.0.0-20220428152302-39d4317da171 + golang.org/x/crypto v0.0.0-20220518034528-6f7dac969898 + golang.org/x/exp v0.0.0-20220518171630-0b5c67f07fdf golang.org/x/text v0.3.7 golang.org/x/tools v0.1.10 - google.golang.org/api v0.79.0 + google.golang.org/api v0.80.0 gopkg.in/go-playground/colors.v1 v1.2.0 gopkg.in/h2non/gock.v1 v1.1.2 gopkg.in/square/go-jose.v2 v2.6.0 @@ -57,7 +57,7 @@ require ( cloud.google.com/go/compute v1.6.1 // indirect cloud.google.com/go/iam v0.3.0 // indirect cloud.google.com/go/trace v1.2.0 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.30.1 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.31.0 // indirect github.com/HdrHistogram/hdrhistogram-go v1.0.1 // indirect github.com/agnivade/levenshtein v1.1.1 // indirect github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect @@ -78,8 +78,8 @@ require ( github.com/golang/protobuf v1.5.2 // indirect github.com/golang/snappy v0.0.3 // indirect github.com/google/go-cmp v0.5.8 // indirect - github.com/google/pprof v0.0.0-20220113144219-d25a53d42d00 // indirect - github.com/googleapis/gax-go/v2 v2.3.0 // indirect + github.com/google/pprof v0.0.0-20220412212628-83db2b799d1f // indirect + github.com/googleapis/gax-go/v2 v2.4.0 // indirect github.com/googleapis/go-type-adapters v1.0.0 // indirect github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 // indirect github.com/gorilla/handlers v1.5.1 // indirect @@ -119,7 +119,7 @@ require ( golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3 // indirect + google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335 // indirect google.golang.org/grpc v1.46.0 // indirect google.golang.org/protobuf v1.28.0 // indirect gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect diff --git a/go.sum b/go.sum index d4b7c3add..287c3e1d2 100644 --- a/go.sum +++ b/go.sum @@ -54,6 +54,8 @@ cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc= cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= cloud.google.com/go/profiler v0.2.0 h1:TZEKR39niWTuvpak6VNg+D8J5qTzJnyaD1Yl4BOU+d8= cloud.google.com/go/profiler v0.2.0/go.mod h1:Rn0g4ZAbYR1sLVP7GAmCZxid4dmtD/nURxcaxf6pngI= +cloud.google.com/go/profiler v0.3.0 h1:R6y/xAeifaUXxd2x6w+jIwKxoKl8Cv5HJvcvASTPWJo= +cloud.google.com/go/profiler v0.3.0/go.mod h1:9wYk9eY4iZHsev8TQb61kh3wiOiSyz/xOYixWPzweCU= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -69,6 +71,8 @@ cloud.google.com/go/storage v1.21.0 h1:HwnT2u2D309SFDHQII6m18HlrCi3jAXhUMTLOWXYH cloud.google.com/go/storage v1.21.0/go.mod h1:XmRlxkgPjlBONznT2dDUU/5XlpU2OjMnKuqnZI01LAA= cloud.google.com/go/storage v1.22.0 h1:NUV0NNp9nkBuW66BFRLuMgldN60C57ET3dhbwLIYio8= cloud.google.com/go/storage v1.22.0/go.mod h1:GbaLEoMqbVm6sx3Z0R++gSiBlgMv6yUi2q1DeGFKQgE= +cloud.google.com/go/storage v1.22.1 h1:F6IlQJZrZM++apn9V5/VfS3gbTUYg98PS3EMQAzqtfg= +cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= cloud.google.com/go/trace v1.0.0 h1:laKx2y7IWMjguCe5zZx6n7qLtREk4kyE69SXVC0VSN8= cloud.google.com/go/trace v1.0.0/go.mod h1:4iErSByzxkyHWzzlAj63/Gmjz0NH1ASqhJguHpGcr6A= cloud.google.com/go/trace v1.2.0 h1:oIaB4KahkIUOpLSAAjEJ8y2desbjY/x/RfP4O3KAtTI= @@ -84,8 +88,12 @@ github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0 github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0/go.mod h1:Pe8G2QFgCaohbU/zHRBjn0YaFh9z8/HtuEDh/Lyo04E= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.6.1 h1:LNl8Tg6N6qChoiM6lKC1Z44Z+CkPAdYx45L/lJDgThs= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.6.1/go.mod h1:ZFRHg4zuQuc6sQyx16A53fB7kxPvPguSeE8GFVpT6rQ= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.7.0 h1:8vpIORQCKkwM0r/IZ1faAddG56t7byhqSxATphc+8MI= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.7.0/go.mod h1:HuFNmMWVYJDj2IxyIlUOW2vguRBM8ct9mOuAtWRU2EQ= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.30.1 h1:JftlZfpIvptFTzSD/BXuoK9i0nT3D1GSz1EEBr+jjuM= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.30.1/go.mod h1:j+FS9VBW3mwtHBmm9KOJEy5Tq68fCp7fE/R9bV/flIM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.31.0 h1:tfaeStvrph8eJEmo1iji3A4DXen3s6ZMM17nQmvo0WA= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.31.0/go.mod h1:j+FS9VBW3mwtHBmm9KOJEy5Tq68fCp7fE/R9bV/flIM= github.com/HdrHistogram/hdrhistogram-go v1.0.1 h1:GX8GAYDuhlFQnI2fRDHQhTlkHMz8bEn0jTI6LJU0mpw= github.com/HdrHistogram/hdrhistogram-go v1.0.1/go.mod h1:BWJ+nMSHY3L41Zj7CA3uXnloDp7xxV0YvstAE7nKTaM= github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= @@ -303,6 +311,8 @@ github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20220113144219-d25a53d42d00 h1:hQb7P4XOakoaN+LET7TJ7PNoBsGm8Tf4lNtAdNwkxDE= github.com/google/pprof v0.0.0-20220113144219-d25a53d42d00/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= +github.com/google/pprof v0.0.0-20220412212628-83db2b799d1f h1:VrKTY4lquiy1oJzVZgXrauku9Jx9P+POv/gTLakG4Wk= +github.com/google/pprof v0.0.0-20220412212628-83db2b799d1f/go.mod h1:Pt31oes+eGImORns3McJn8zHefuQl2rG8l6xQjGYB4U= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -317,6 +327,8 @@ github.com/googleapis/gax-go/v2 v2.2.0 h1:s7jOdKSaksJVOxE0Y/S32otcfiP+UQ0cL8/GTK github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= github.com/googleapis/gax-go/v2 v2.3.0 h1:nRJtk3y8Fm770D42QV6T90ZnvFZyk7agSo3Q+Z9p3WI= github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= +github.com/googleapis/gax-go/v2 v2.4.0 h1:dS9eYAjhrE2RjmzYw2XAPvcXfmcQLtFEQWn0CR82awk= +github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= github.com/googleapis/go-type-adapters v1.0.0 h1:9XdMn+d/G57qq1s8dNc5IesGCXHf6V2HZ2JwRxfA2tA= github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= @@ -644,6 +656,8 @@ golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29 h1:tkVvjkPTB7pnW3jnid7kNy golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88 h1:Tgea0cVUD0ivh5ADBX4WwuI12DUd2to3nCYe2eayMIw= golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220518034528-6f7dac969898 h1:SLP7Q4Di66FONjDJbCYrCRrh97focO6sLogHO7/g8F0= +golang.org/x/crypto v0.0.0-20220518034528-6f7dac969898/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -658,6 +672,8 @@ golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4 h1:K3x+yU+fbot38x5bQbU2QqUAV golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE= golang.org/x/exp v0.0.0-20220428152302-39d4317da171 h1:TfdoLivD44QwvssI9Sv1xwa5DcL5XQr4au4sZ2F2NV4= golang.org/x/exp v0.0.0-20220428152302-39d4317da171/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE= +golang.org/x/exp v0.0.0-20220518171630-0b5c67f07fdf h1:oXVg4h2qJDd9htKxb5SCpFBHLipW6hXmL3qpUixS2jw= +golang.org/x/exp v0.0.0-20220518171630-0b5c67f07fdf/go.mod h1:yh0Ynu2b5ZUe3MQfp2nM0ecK7wsgouWTDN0FNeJuIys= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -847,6 +863,7 @@ golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886 h1:eJv7u3ksNXoLbGSKuv2s/SIO4tJVxc/A+MTpzxDgz/Q= golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6 h1:nonptSpoQ4vQjyraW20DXPAglgQfVnM9ZC6MmNLMR60= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= @@ -984,8 +1001,11 @@ google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc google.golang.org/api v0.74.0 h1:ExR2D+5TYIrMphWgs5JCgwRhEDlPDXXrLwHHMgPHTXE= google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= +google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= google.golang.org/api v0.79.0 h1:vaOcm0WdXvhGkci9a0+CcQVZqSRjN8ksSBlWv99f8Pg= google.golang.org/api v0.79.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= +google.golang.org/api v0.80.0 h1:IQWaGVCYnsm4MO3hh+WtSXMzMzuyFx/fuR8qkN3A0Qo= +google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -1085,8 +1105,11 @@ google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3 h1:q1kiSVscqoDeqTF27eQ2NnLLDmqF0I373qQNXYMy0fo= google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335 h1:2D0OT6tPVdrQTOnVe1VQjfJPTED6EZ7fdJ/f6Db6OsY= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/grpc v1.2.1-0.20170921194603-d4b75ebd4f9f/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= From 696f35b243ffcf7b6ea6dac39ddd052f665fd5b3 Mon Sep 17 00:00:00 2001 From: lby Date: Fri, 27 May 2022 09:09:11 +0800 Subject: [PATCH 218/253] Refactor: Update scene mode key (#152) * update scene mode keys * update scene mode default value --- pkg/builtin/manifest.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index aa064b9e8..6cc00d3ed 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -82,11 +82,11 @@ extensions: type: string title: Scene mode description: Specify scene mode. - defaultValue: scene3d + defaultValue: 3d choices: - - key: scene3d + - key: 3d label: Scene 3D - - key: scene2d + - key: 2d label: Scene 2D - key: columbus label: Columbus View From c6e98c0f5855bef98524cfee6e28e1058c44bfbf Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 27 May 2022 20:02:03 +0900 Subject: [PATCH 219/253] refactor: declarative description of use case structure (asset only) (#151) Co-authored-by: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> --- internal/app/usecase.go | 3 +- internal/infrastructure/memory/transaction.go | 40 +++- .../infrastructure/memory/transaction_test.go | 38 ++++ .../infrastructure/mongo/mongodoc/client.go | 4 + internal/usecase/interactor/asset.go | 134 +++++------- internal/usecase/interactor/common.go | 4 +- internal/usecase/interactor/usecase.go | 121 +++++++++++ internal/usecase/interactor/usecase_test.go | 202 ++++++++++++++++++ internal/usecase/repo/container.go | 7 +- internal/usecase/repo/transaction.go | 1 + 10 files changed, 461 insertions(+), 93 deletions(-) create mode 100644 internal/infrastructure/memory/transaction_test.go create mode 100644 internal/usecase/interactor/usecase.go create mode 100644 internal/usecase/interactor/usecase_test.go diff --git a/internal/app/usecase.go b/internal/app/usecase.go index 69b68572c..ac3a1800c 100644 --- a/internal/app/usecase.go +++ b/internal/app/usecase.go @@ -15,11 +15,10 @@ func UsecaseMiddleware(r *repo.Container, g *gateway.Container, config interacto var r2 *repo.Container if op := adapter.Operator(ctx); op != nil && r != nil { // apply filters to repos - r3 := r.Filtered( + r2 = r.Filtered( repo.TeamFilterFromOperator(op), repo.SceneFilterFromOperator(op), ) - r2 = &r3 } else { r2 = r } diff --git a/internal/infrastructure/memory/transaction.go b/internal/infrastructure/memory/transaction.go index c0ff8b77b..c66250ccc 100644 --- a/internal/infrastructure/memory/transaction.go +++ b/internal/infrastructure/memory/transaction.go @@ -6,22 +6,52 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/repo" ) -type Transaction struct{} +type Transaction struct { + committed int + beginerror error + enderror error +} -type Tx struct{} +type Tx struct { + t *Transaction + committed bool + enderror error +} func NewTransaction() *Transaction { return &Transaction{} } +func (t *Transaction) SetBeginError(err error) { + t.beginerror = err +} + +func (t *Transaction) SetEndError(err error) { + t.enderror = err +} + +func (t *Transaction) Committed() int { + return t.committed +} + func (t *Transaction) Begin() (repo.Tx, error) { - return &Tx{}, nil + if t.beginerror != nil { + return nil, t.beginerror + } + return &Tx{t: t, enderror: t.enderror}, nil } func (t *Tx) Commit() { - // do nothing + t.committed = true + if t.t != nil { + t.t.committed++ + } } func (t *Tx) End(_ context.Context) error { - return nil + return t.enderror +} + +func (t *Tx) IsCommitted() bool { + return t.committed } diff --git a/internal/infrastructure/memory/transaction_test.go b/internal/infrastructure/memory/transaction_test.go new file mode 100644 index 000000000..b8df60e2a --- /dev/null +++ b/internal/infrastructure/memory/transaction_test.go @@ -0,0 +1,38 @@ +package memory + +import ( + "context" + "errors" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTransaction_Committed(t *testing.T) { + tr := NewTransaction() + tx, err := tr.Begin() + assert.NoError(t, err) + assert.Equal(t, 0, tr.Committed()) + tx.Commit() + assert.Equal(t, 1, tr.Committed()) + assert.NoError(t, tx.End(context.Background())) + assert.NoError(t, err) +} + +func TestTransaction_SetBeginError(t *testing.T) { + err := errors.New("a") + tr := NewTransaction() + tr.SetBeginError(err) + tx, err2 := tr.Begin() + assert.Nil(t, tx) + assert.Same(t, err, err2) +} + +func TestTransaction_SetEndError(t *testing.T) { + err := errors.New("a") + tr := NewTransaction() + tr.SetEndError(err) + tx, err2 := tr.Begin() + assert.NoError(t, err2) + assert.Same(t, err, tx.End(context.Background())) +} diff --git a/internal/infrastructure/mongo/mongodoc/client.go b/internal/infrastructure/mongo/mongodoc/client.go index 7257c2fd5..7c651998a 100644 --- a/internal/infrastructure/mongo/mongodoc/client.go +++ b/internal/infrastructure/mongo/mongodoc/client.go @@ -433,3 +433,7 @@ func (t *Tx) End(ctx context.Context) error { t.session.EndSession(ctx) return nil } + +func (t *Tx) IsCommitted() bool { + return t.commit +} diff --git a/internal/usecase/interactor/asset.go b/internal/usecase/interactor/asset.go index 1bf1331d7..5d90c94a9 100644 --- a/internal/usecase/interactor/asset.go +++ b/internal/usecase/interactor/asset.go @@ -14,7 +14,6 @@ import ( ) type Asset struct { - common repos *repo.Container gateways *gateway.Container } @@ -31,95 +30,66 @@ func (i *Asset) Fetch(ctx context.Context, assets []id.AssetID, operator *usecas } func (i *Asset) FindByTeam(ctx context.Context, tid id.TeamID, keyword *string, sort *asset.SortType, p *usecase.Pagination, operator *usecase.Operator) ([]*asset.Asset, *usecase.PageInfo, error) { - if err := i.CanReadTeam(tid, operator); err != nil { - return nil, nil, err - } - - return i.repos.Asset.FindByTeam(ctx, tid, repo.AssetFilter{ - Sort: sort, - Keyword: keyword, - Pagination: p, - }) + return Run2( + ctx, operator, i.repos, + Usecase().WithReadableTeams(tid), + func() ([]*asset.Asset, *usecase.PageInfo, error) { + return i.repos.Asset.FindByTeam(ctx, tid, repo.AssetFilter{ + Sort: sort, + Keyword: keyword, + Pagination: p, + }) + }, + ) } func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, operator *usecase.Operator) (result *asset.Asset, err error) { - if err := i.CanWriteTeam(inp.TeamID, operator); err != nil { - return nil, err - } - if inp.File == nil { return nil, interfaces.ErrFileNotIncluded } - tx, err := i.repos.Transaction.Begin() - if err != nil { - return - } - defer func() { - if err2 := tx.End(ctx); err == nil && err2 != nil { - err = err2 - } - }() - - url, err := i.gateways.File.UploadAsset(ctx, inp.File) - if err != nil { - return nil, err - } - - result, err = asset.New(). - NewID(). - Team(inp.TeamID). - Name(path.Base(inp.File.Path)). - Size(inp.File.Size). - URL(url.String()). - Build() - if err != nil { - return nil, err - } - - if err = i.repos.Asset.Save(ctx, result); err != nil { - return - } - - tx.Commit() - return + return Run1( + ctx, operator, i.repos, + Usecase(). + WithReadableTeams(inp.TeamID). + Transaction(), + func() (*asset.Asset, error) { + url, err := i.gateways.File.UploadAsset(ctx, inp.File) + if err != nil { + return nil, err + } + + return asset.New(). + NewID(). + Team(inp.TeamID). + Name(path.Base(inp.File.Path)). + Size(inp.File.Size). + URL(url.String()). + Build() + }) } func (i *Asset) Remove(ctx context.Context, aid id.AssetID, operator *usecase.Operator) (result id.AssetID, err error) { - asset, err := i.repos.Asset.FindByID(ctx, aid) - if err != nil { - return aid, err - } - - tx, err := i.repos.Transaction.Begin() - if err != nil { - return - } - defer func() { - if err2 := tx.End(ctx); err == nil && err2 != nil { - err = err2 - } - }() - - team, err := i.repos.Team.FindByID(ctx, asset.Team()) - if err != nil { - return aid, err - } - - if !team.Members().ContainsUser(operator.User) { - return aid, interfaces.ErrOperationDenied - } - - if url, _ := url.Parse(asset.URL()); url != nil { - if err = i.gateways.File.RemoveAsset(ctx, url); err != nil { - return aid, err - } - } - - if err = i.repos.Asset.Remove(ctx, aid); err != nil { - return - } - - tx.Commit() - return aid, nil + return Run1( + ctx, operator, i.repos, + Usecase().Transaction(), + func() (id.AssetID, error) { + asset, err := i.repos.Asset.FindByID(ctx, aid) + if err != nil { + return aid, err + } + + if ok := operator.IsWritableTeam(asset.Team()); !ok { + return aid, interfaces.ErrOperationDenied + } + + if url, _ := url.Parse(asset.URL()); url != nil { + if err := i.gateways.File.RemoveAsset(ctx, url); err != nil { + return aid, err + } + } + + return aid, i.repos.Asset.Remove(ctx, aid) + }, + ) } diff --git a/internal/usecase/interactor/common.go b/internal/usecase/interactor/common.go index 15b134497..10abb9960 100644 --- a/internal/usecase/interactor/common.go +++ b/internal/usecase/interactor/common.go @@ -5,10 +5,9 @@ import ( "errors" "net/url" + "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/interfaces" - - "github.com/reearth/reearth-backend/internal/usecase" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/project" @@ -46,6 +45,7 @@ func NewContainer(r *repo.Container, g *gateway.Container, config ContainerConfi } } +// Deprecated: common will be deprecated. Please use the Usecase function instead. type common struct{} func (common) OnlyOperator(op *usecase.Operator) error { diff --git a/internal/usecase/interactor/usecase.go b/internal/usecase/interactor/usecase.go new file mode 100644 index 000000000..bd3c5f4d2 --- /dev/null +++ b/internal/usecase/interactor/usecase.go @@ -0,0 +1,121 @@ +package interactor + +import ( + "context" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" +) + +type uc struct { + tx bool + readableTeams id.TeamIDList + writableTeams id.TeamIDList + readableScenes id.SceneIDList + writableScenes id.SceneIDList +} + +func Usecase() *uc { + return &uc{} +} + +func (u *uc) WithReadableTeams(ids ...id.TeamID) *uc { + u.readableTeams = id.TeamIDList(ids).Clone() + return u +} + +func (u *uc) WithWritableTeams(ids ...id.TeamID) *uc { + u.writableTeams = id.TeamIDList(ids).Clone() + return u +} + +func (u *uc) WithReadablScenes(ids ...id.SceneID) *uc { + u.readableScenes = id.SceneIDList(ids).Clone() + return u +} + +func (u *uc) WithWritableScenes(ids ...id.SceneID) *uc { + u.writableScenes = id.SceneIDList(ids).Clone() + return u +} + +func (u *uc) Transaction() *uc { + u.tx = true + return u +} + +func Run0(ctx context.Context, op *usecase.Operator, r *repo.Container, e *uc, f func() error) (err error) { + _, _, _, err = Run3( + ctx, op, r, e, + func() (_, _, _ any, err error) { + err = f() + return + }) + return +} + +func Run1[A any](ctx context.Context, op *usecase.Operator, r *repo.Container, e *uc, f func() (A, error)) (a A, err error) { + a, _, _, err = Run3( + ctx, op, r, e, + func() (a A, _, _ any, err error) { + a, err = f() + return + }) + return +} + +func Run2[A, B any](ctx context.Context, op *usecase.Operator, r *repo.Container, e *uc, f func() (A, B, error)) (a A, b B, err error) { + a, b, _, err = Run3( + ctx, op, r, e, + func() (a A, b B, _ any, err error) { + a, b, err = f() + return + }) + return +} + +func Run3[A, B, C any](ctx context.Context, op *usecase.Operator, r *repo.Container, e *uc, f func() (A, B, C, error)) (_ A, _ B, _ C, err error) { + if err = e.checkPermission(op); err != nil { + return + } + + if e.tx && r.Transaction != nil { + tx, err2 := r.Transaction.Begin() + if err2 != nil { + err = err2 + return + } + defer func() { + if err == nil { + tx.Commit() + } + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + } + + return f() +} + +func (u *uc) checkPermission(op *usecase.Operator) error { + ok := true + if u.readableTeams != nil { + ok = op.IsReadableTeam(u.readableTeams...) + } + if ok && u.writableTeams != nil { + ok = op.IsWritableTeam(u.writableTeams...) + } + if ok && u.readableScenes != nil { + ok = op.IsReadableScene(u.readableScenes...) + } + if ok && u.writableScenes != nil { + ok = op.IsWritableScene(u.writableScenes...) + } + if !ok { + return interfaces.ErrOperationDenied + } + return nil +} diff --git a/internal/usecase/interactor/usecase_test.go b/internal/usecase/interactor/usecase_test.go new file mode 100644 index 000000000..114deae46 --- /dev/null +++ b/internal/usecase/interactor/usecase_test.go @@ -0,0 +1,202 @@ +package interactor + +import ( + "context" + "errors" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" +) + +func TestUc_checkPermission(t *testing.T) { + tid := id.NewTeamID() + sid := id.NewSceneID() + + tests := []struct { + name string + op *usecase.Operator + readableTeams id.TeamIDList + writableTeams id.TeamIDList + readableScenes id.SceneIDList + writableScenes id.SceneIDList + wantErr bool + }{ + { + name: "nil operator", + wantErr: false, + }, + { + name: "nil operator 2", + readableTeams: id.TeamIDList{id.NewTeamID()}, + wantErr: false, + }, + { + name: "can read a team", + readableTeams: id.TeamIDList{tid}, + op: &usecase.Operator{ + ReadableTeams: id.TeamIDList{tid}, + }, + wantErr: true, + }, + { + name: "cannot read a team", + readableTeams: id.TeamIDList{id.NewTeamID()}, + op: &usecase.Operator{ + ReadableTeams: id.TeamIDList{}, + }, + wantErr: true, + }, + { + name: "can write a team", + writableTeams: id.TeamIDList{tid}, + op: &usecase.Operator{ + WritableTeams: id.TeamIDList{tid}, + }, + wantErr: true, + }, + { + name: "cannot write a team", + writableTeams: id.TeamIDList{tid}, + op: &usecase.Operator{ + WritableTeams: id.TeamIDList{}, + }, + wantErr: true, + }, + { + name: "can read a scene", + readableScenes: id.SceneIDList{sid}, + op: &usecase.Operator{ + ReadableScenes: id.SceneIDList{sid}, + }, + wantErr: true, + }, + { + name: "cannot read a scene", + readableScenes: id.SceneIDList{sid}, + op: &usecase.Operator{ + ReadableScenes: id.SceneIDList{}, + }, + wantErr: true, + }, + { + name: "can write a scene", + writableScenes: id.SceneIDList{sid}, + op: &usecase.Operator{ + WritableScenes: id.SceneIDList{sid}, + }, + wantErr: true, + }, + { + name: "cannot write a scene", + writableScenes: id.SceneIDList{sid}, + op: &usecase.Operator{ + WritableScenes: id.SceneIDList{}, + }, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + e := &uc{ + readableTeams: tt.readableTeams, + writableTeams: tt.writableTeams, + readableScenes: tt.readableScenes, + writableScenes: tt.writableScenes, + } + got := e.checkPermission(tt.op) + if tt.wantErr { + assert.Equal(t, interfaces.ErrOperationDenied, got) + } else { + assert.Nil(t, got) + } + }) + } +} + +func TestUc(t *testing.T) { + teams := id.TeamIDList{id.NewTeamID(), id.NewTeamID(), id.NewTeamID()} + scenes := id.SceneIDList{id.NewSceneID(), id.NewSceneID(), id.NewSceneID()} + assert.Equal(t, &uc{}, Usecase()) + assert.Equal(t, &uc{readableTeams: teams}, (&uc{}).WithReadableTeams(teams...)) + assert.Equal(t, &uc{writableTeams: teams}, (&uc{}).WithWritableTeams(teams...)) + assert.Equal(t, &uc{readableScenes: scenes}, (&uc{}).WithReadablScenes(scenes...)) + assert.Equal(t, &uc{writableScenes: scenes}, (&uc{}).WithWritableScenes(scenes...)) + assert.Equal(t, &uc{tx: true}, (&uc{}).Transaction()) +} + +func TestRun(t *testing.T) { + ctx := context.Background() + err := errors.New("test") + a, b, c := &struct{}{}, &struct{}{}, &struct{}{} + tr := memory.NewTransaction() + r := &repo.Container{Transaction: tr} + + // regular1: without tx + gota, gotb, gotc, goterr := Run3( + ctx, nil, r, + Usecase(), + func() (any, any, any, error) { + return a, b, c, nil + }, + ) + assert.Same(t, a, gota) + assert.Same(t, b, gotb) + assert.Same(t, c, gotc) + assert.Nil(t, goterr) + assert.Equal(t, 0, tr.Committed()) // not committed + + // regular2: with tx + _ = Run0( + ctx, nil, r, + Usecase().Transaction(), + func() error { + return nil + }, + ) + assert.Equal(t, 1, tr.Committed()) // committed + + // iregular1: the usecase returns an error + goterr = Run0( + ctx, nil, r, + Usecase().Transaction(), + func() error { + return err + }, + ) + assert.Same(t, err, goterr) + assert.Equal(t, 1, tr.Committed()) // not committed + + // iregular2: tx.Begin returns an error + tr.SetBeginError(err) + tr.SetEndError(nil) + goterr = Run0( + ctx, nil, r, + Usecase().Transaction(), + func() error { + return nil + }, + ) + assert.Same(t, err, goterr) + assert.Equal(t, 1, tr.Committed()) // not committed + + // iregular3: tx.End returns an error + tr.SetBeginError(nil) + tr.SetEndError(err) + goterr = Run0( + ctx, nil, r, + Usecase().Transaction(), + func() error { + return nil + }, + ) + assert.Same(t, err, goterr) + assert.Equal(t, 2, tr.Committed()) // committed but fails +} diff --git a/internal/usecase/repo/container.go b/internal/usecase/repo/container.go index 6f991c05d..7f42d17ce 100644 --- a/internal/usecase/repo/container.go +++ b/internal/usecase/repo/container.go @@ -32,8 +32,11 @@ type Container struct { User User } -func (c Container) Filtered(team TeamFilter, scene SceneFilter) Container { - return Container{ +func (c *Container) Filtered(team TeamFilter, scene SceneFilter) *Container { + if c == nil { + return c + } + return &Container{ Asset: c.Asset.Filtered(team), AuthRequest: c.AuthRequest, Config: c.Config, diff --git a/internal/usecase/repo/transaction.go b/internal/usecase/repo/transaction.go index 80175e237..6627b24d6 100644 --- a/internal/usecase/repo/transaction.go +++ b/internal/usecase/repo/transaction.go @@ -13,4 +13,5 @@ type Tx interface { // End finishes the transaction and do commit if Commit() was called once, or else do rollback. // This method is supposed to be called in the uscase layer using defer. End(context.Context) error + IsCommitted() bool } From 602ec07ce1829f35b7395e38dc5d59b58f50a31f Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 31 May 2022 11:49:55 +0900 Subject: [PATCH 220/253] feat: add timeline settings to scene property (#153) * update manifest * update * add tests * fix property desc * add test --- go.mod | 2 +- go.sum | 147 +- internal/adapter/gql/generated.go | 27863 +++++++++++----- internal/adapter/gql/gqlmodel/models_gen.go | 4 +- pkg/builtin/manifest.yml | 62 +- pkg/builtin/manifest_ja.yml | 33 +- .../manifest/parser_translation_test.go | 21 +- pkg/plugin/manifest/schema_translation.go | 21 +- .../manifest/schema_translation_test.go | 45 +- pkg/plugin/manifest/testdata/translation.yml | 2 + pkg/property/schema_field_ui.go | 2 + schema.graphql | 1 + schemas/plugin_manifest.json | 5 +- schemas/plugin_manifest_translation.json | 12 + 14 files changed, 19362 insertions(+), 8858 deletions(-) diff --git a/go.mod b/go.mod index 78dfbbd51..52897d13f 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ require ( github.com/99designs/gqlgen v0.17.5 github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.7.0 github.com/auth0/go-jwt-middleware/v2 v2.0.1 + github.com/avast/retry-go/v4 v4.0.4 github.com/blang/semver v3.5.1+incompatible github.com/caos/oidc v1.2.0 github.com/goccy/go-yaml v1.9.5 @@ -62,7 +63,6 @@ require ( github.com/agnivade/levenshtein v1.1.1 // indirect github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a // indirect - github.com/avast/retry-go/v4 v4.0.4 // indirect github.com/caos/logging v0.0.2 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect diff --git a/go.sum b/go.sum index 287c3e1d2..d88e0b90b 100644 --- a/go.sum +++ b/go.sum @@ -29,7 +29,6 @@ cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+Y cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= -cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= cloud.google.com/go v0.100.2 h1:t9Iw5QH5v4XtlEQaCtUY7x6sCABps8sW0acw7e2WQ6Y= cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= @@ -39,21 +38,15 @@ cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUM cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= -cloud.google.com/go/compute v1.2.0/go.mod h1:xlogom/6gr8RJGBe7nT2eGsQYAFUbbv8dbC29qE3Xmw= cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= -cloud.google.com/go/compute v1.5.0 h1:b1zWmYuuHz7gO9kDcM/EpHGr06UgsYNRpNJzI2kFiLM= cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= cloud.google.com/go/compute v1.6.1 h1:2sMmt8prCn7DPaG4Pmh0N3Inmc8cT8ae5k1M6VJ9Wqc= cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/iam v0.1.1 h1:4CapQyNFjiksks1/x7jsvsygFPhihslYk5GptIrlX68= -cloud.google.com/go/iam v0.1.1/go.mod h1:CKqrcnI/suGpybEHxZ7BMehL0oA4LpdyJdUlTl9jVMw= cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc= cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= -cloud.google.com/go/profiler v0.2.0 h1:TZEKR39niWTuvpak6VNg+D8J5qTzJnyaD1Yl4BOU+d8= -cloud.google.com/go/profiler v0.2.0/go.mod h1:Rn0g4ZAbYR1sLVP7GAmCZxid4dmtD/nURxcaxf6pngI= cloud.google.com/go/profiler v0.3.0 h1:R6y/xAeifaUXxd2x6w+jIwKxoKl8Cv5HJvcvASTPWJo= cloud.google.com/go/profiler v0.3.0/go.mod h1:9wYk9eY4iZHsev8TQb61kh3wiOiSyz/xOYixWPzweCU= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= @@ -66,32 +59,17 @@ cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohl cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= -cloud.google.com/go/storage v1.18.2/go.mod h1:AiIj7BWXyhO5gGVmYJ+S8tbkCx3yb0IMjua8Aw4naVM= -cloud.google.com/go/storage v1.21.0 h1:HwnT2u2D309SFDHQII6m18HlrCi3jAXhUMTLOWXYH14= -cloud.google.com/go/storage v1.21.0/go.mod h1:XmRlxkgPjlBONznT2dDUU/5XlpU2OjMnKuqnZI01LAA= -cloud.google.com/go/storage v1.22.0 h1:NUV0NNp9nkBuW66BFRLuMgldN60C57ET3dhbwLIYio8= -cloud.google.com/go/storage v1.22.0/go.mod h1:GbaLEoMqbVm6sx3Z0R++gSiBlgMv6yUi2q1DeGFKQgE= cloud.google.com/go/storage v1.22.1 h1:F6IlQJZrZM++apn9V5/VfS3gbTUYg98PS3EMQAzqtfg= cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= -cloud.google.com/go/trace v1.0.0 h1:laKx2y7IWMjguCe5zZx6n7qLtREk4kyE69SXVC0VSN8= -cloud.google.com/go/trace v1.0.0/go.mod h1:4iErSByzxkyHWzzlAj63/Gmjz0NH1ASqhJguHpGcr6A= cloud.google.com/go/trace v1.2.0 h1:oIaB4KahkIUOpLSAAjEJ8y2desbjY/x/RfP4O3KAtTI= cloud.google.com/go/trace v1.2.0/go.mod h1:Wc8y/uYyOhPy12KEnXG9XGrvfMz5F5SrYecQlbW1rwM= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/99designs/gqlgen v0.17.2 h1:yczvlwMsfcVu/JtejqfrLwXuSP0yZFhmcss3caEvHw8= -github.com/99designs/gqlgen v0.17.2/go.mod h1:K5fzLKwtph+FFgh9j7nFbRUdBKvTcGnsta51fsMTn3o= github.com/99designs/gqlgen v0.17.5 h1:bTgv7FQz3+NROg6ooHtlkaJ82Uqrp6e5sAziXTBo1hc= github.com/99designs/gqlgen v0.17.5/go.mod h1:SNpLVzaF37rRLSAXtu8FKVp5I4zycneMmFX6NT4XGSU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0 h1:JLLDOHEcoREA54hzOnjr8KQcZCvX0E8KhosjE0F1jaQ= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.3.0/go.mod h1:Pe8G2QFgCaohbU/zHRBjn0YaFh9z8/HtuEDh/Lyo04E= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.6.1 h1:LNl8Tg6N6qChoiM6lKC1Z44Z+CkPAdYx45L/lJDgThs= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.6.1/go.mod h1:ZFRHg4zuQuc6sQyx16A53fB7kxPvPguSeE8GFVpT6rQ= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.7.0 h1:8vpIORQCKkwM0r/IZ1faAddG56t7byhqSxATphc+8MI= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.7.0/go.mod h1:HuFNmMWVYJDj2IxyIlUOW2vguRBM8ct9mOuAtWRU2EQ= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.30.1 h1:JftlZfpIvptFTzSD/BXuoK9i0nT3D1GSz1EEBr+jjuM= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.30.1/go.mod h1:j+FS9VBW3mwtHBmm9KOJEy5Tq68fCp7fE/R9bV/flIM= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.31.0 h1:tfaeStvrph8eJEmo1iji3A4DXen3s6ZMM17nQmvo0WA= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.31.0/go.mod h1:j+FS9VBW3mwtHBmm9KOJEy5Tq68fCp7fE/R9bV/flIM= github.com/HdrHistogram/hdrhistogram-go v1.0.1 h1:GX8GAYDuhlFQnI2fRDHQhTlkHMz8bEn0jTI6LJU0mpw= @@ -101,7 +79,6 @@ github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF0 github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= -github.com/agnivade/levenshtein v1.1.0/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo= github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8= github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= @@ -142,7 +119,6 @@ github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= @@ -176,12 +152,9 @@ github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeME github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.1/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/stdr v1.2.0/go.mod h1:YkVgnZu1ZjjL7xTxrfm/LLZBfkhTqSR1ydtm6jTKKwI= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= @@ -282,7 +255,6 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= @@ -309,8 +281,6 @@ github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20220113144219-d25a53d42d00 h1:hQb7P4XOakoaN+LET7TJ7PNoBsGm8Tf4lNtAdNwkxDE= -github.com/google/pprof v0.0.0-20220113144219-d25a53d42d00/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= github.com/google/pprof v0.0.0-20220412212628-83db2b799d1f h1:VrKTY4lquiy1oJzVZgXrauku9Jx9P+POv/gTLakG4Wk= github.com/google/pprof v0.0.0-20220412212628-83db2b799d1f/go.mod h1:Pt31oes+eGImORns3McJn8zHefuQl2rG8l6xQjGYB4U= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= @@ -323,9 +293,7 @@ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= -github.com/googleapis/gax-go/v2 v2.2.0 h1:s7jOdKSaksJVOxE0Y/S32otcfiP+UQ0cL8/GTKaONwE= github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= -github.com/googleapis/gax-go/v2 v2.3.0 h1:nRJtk3y8Fm770D42QV6T90ZnvFZyk7agSo3Q+Z9p3WI= github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= github.com/googleapis/gax-go/v2 v2.4.0 h1:dS9eYAjhrE2RjmzYw2XAPvcXfmcQLtFEQWn0CR82awk= github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= @@ -333,7 +301,6 @@ github.com/googleapis/go-type-adapters v1.0.0 h1:9XdMn+d/G57qq1s8dNc5IesGCXHf6V2 github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c29bdd3 h1:eHv/jVY/JNop1xg2J9cBb4EzyMpWZoNCP1BslSAIkOI= -github.com/googleinterns/cloud-operations-api-mock v0.0.0-20200709193332-a1e58c29bdd3/go.mod h1:h/KNeRx7oYU4SpA4SoY7W2/NxDKEEVuwA6j9A27L4OI= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 h1:l5lAOZEym3oK3SQ2HBHWsJUfbNBiTXJDeW2QDxw9AQ0= github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= @@ -345,8 +312,6 @@ github.com/gorilla/schema v1.2.0 h1:YufUaxZYCKGFuAq3c96BOhjgd5nmXiOY9NGzF247Tsc= github.com/gorilla/schema v1.2.0/go.mod h1:kgLaKoK1FELgZqMAVxx/5cbj0kT+57qxUrAlIO2eleU= github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= -github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= -github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gregjones/httpcache v0.0.0-20170920190843-316c5e0ff04e/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= @@ -369,8 +334,6 @@ github.com/idubinskiy/schematyper v0.0.0-20190118213059-f71b40dac30d/go.mod h1:x github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/inconshreveable/log15 v0.0.0-20170622235902-74a0988b5f80/go.mod h1:cOaXtrgN4ScfRrD9Bre7U1thNq5RtJ8ZoP4iXVGRj6o= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/jarcoal/httpmock v1.1.0 h1:F47ChZj1Y2zFsCXxNkBPwNNKnAyOATcdQibk0qEdVCE= -github.com/jarcoal/httpmock v1.1.0/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= github.com/jarcoal/httpmock v1.2.0 h1:gSvTxxFR/MEMfsGrvRbdfpRUMBStovlSRLw0Ep1bwwc= github.com/jarcoal/httpmock v1.2.0/go.mod h1:oCoTsnAz4+UoOUIf5lJOWV2QQIW5UoeUI6aM2YnWAZk= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= @@ -415,28 +378,21 @@ github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxA github.com/magiconair/properties v1.7.4-0.20170902060319-8d7837e64d3c/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= -github.com/matryer/moq v0.2.3 h1:Q06vEqnBYjjfx5KKgHfYRKE/lvlRu+Nj+xodG4YdHnU= -github.com/matryer/moq v0.2.3/go.mod h1:9RtPYjTnH1bSBIkpvtHkFN7nbWAnO7oRpdJkEIn6UtE= github.com/matryer/moq v0.2.7 h1:RtpiPUM8L7ZSCbSwK+QcZH/E9tgqAkFjKQxsRs25b4w= github.com/matryer/moq v0.2.7/go.mod h1:kITsx543GOENm48TUAQyJ9+SAvFSr7iGQXPoth/VUBk= github.com/mattn/go-colorable v0.0.10-0.20170816031813-ad5389df28cd/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.11 h1:nQ+aFkoE2TMGc0b68U2OKSexC+eq46+XwZzWXHRmPYs= github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-isatty v0.0.2/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/maxatome/go-testdeep v1.11.0 h1:Tgh5efyCYyJFGUYiT0qxBSIDeXw0F5zSoatlou685kk= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/mapstructure v0.0.0-20170523030023-d0303fe80992/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.2.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.3.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= -github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= @@ -462,8 +418,6 @@ github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qR github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/ravilushqa/otelgqlgen v0.6.0 h1:SbBmlE1/6Z4NDjCn96Ksbb41BIEhIf3dBT1WlGzNOr4= -github.com/ravilushqa/otelgqlgen v0.6.0/go.mod h1:QP2vU3CSdJ2kYJkZl1zvTZWGgd2qEUxSWp1agqZjZ9A= github.com/ravilushqa/otelgqlgen v0.6.1 h1:KoRURWiQfthje/G6hG6zDF9QjoEFrmHgb3mAe5kZZ7k= github.com/ravilushqa/otelgqlgen v0.6.1/go.mod h1:2SUPOCCsJdvbyfLtZI81C/1Q76jSFNFs/2NrtyTI2AQ= github.com/robertkrimen/godocdown v0.0.0-20130622164427-0bfa04905481/go.mod h1:C9WhFzY47SzYBIvzFqSvHIR6ROgDo4TtdTuRaOMjF/s= @@ -471,11 +425,8 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/samber/lo v1.11.0 h1:JfeYozXL1xfkhRUFOfH13ociyeiLSC/GRJjGKI668xM= -github.com/samber/lo v1.11.0/go.mod h1:2I7tgIv8Q1SG2xEIkRq0F2i2zgxVpnyPOP0d3Gj2r+A= github.com/samber/lo v1.21.0 h1:FSby8pJQtX4KmyddTCCGhc3JvnnIVrDA+NW37rG+7G8= github.com/samber/lo v1.21.0/go.mod h1:2I7tgIv8Q1SG2xEIkRq0F2i2zgxVpnyPOP0d3Gj2r+A= github.com/sendgrid/rest v2.6.6+incompatible h1:3rO5UTPhLQo6fjytWwdwRWclP101CqErg2klf8LneB4= @@ -484,7 +435,6 @@ github.com/sendgrid/sendgrid-go v3.11.1+incompatible h1:ai0+woZ3r/+tKLQExznak5Xe github.com/sendgrid/sendgrid-go v3.11.1+incompatible/go.mod h1:QRQt+LX/NmgVEvmdRw0VT/QgUn499+iza2FnDca9fg8= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= @@ -532,8 +482,6 @@ github.com/uber/jaeger-client-go v2.30.0+incompatible h1:D6wyKGCecFaSRUpo8lCVbaO github.com/uber/jaeger-client-go v2.30.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVKhn2Um6rjCsSsg= github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= -github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M= -github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= github.com/urfave/cli/v2 v2.4.0 h1:m2pxjjDFgDxSPtO8WSdbndj17Wu2y8vOT86wE/tjr+I= github.com/urfave/cli/v2 v2.4.0/go.mod h1:NX9W0zmTvedE5oDoOMs2RTC8RvdK98NTYZE5LbaEYPg= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= @@ -542,9 +490,6 @@ github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52 github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= github.com/vektah/dataloaden v0.3.0 h1:ZfVN2QD6swgvp+tDqdH/OIT/wu3Dhu0cus0k5gIZS84= github.com/vektah/dataloaden v0.3.0/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= -github.com/vektah/gqlparser/v2 v2.4.0/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= -github.com/vektah/gqlparser/v2 v2.4.1 h1:QOyEn8DAPMUMARGMeshKDkDgNmVoEaEGiDB0uWxcSlQ= -github.com/vektah/gqlparser/v2 v2.4.1/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= github.com/vektah/gqlparser/v2 v2.4.2 h1:29TGc6QmhEUq5fll+2FPoTmhUhR65WEKN4VK/jo0OlM= github.com/vektah/gqlparser/v2 v2.4.2/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= @@ -566,8 +511,6 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.mongodb.org/mongo-driver v1.4.2/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= -go.mongodb.org/mongo-driver v1.8.4/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= -go.mongodb.org/mongo-driver v1.9.0 h1:f3aLGJvQmBl8d9S40IL+jEyBC6hfLPbJjv9t5hEM9ck= go.mongodb.org/mongo-driver v1.9.0/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= go.mongodb.org/mongo-driver v1.9.1 h1:m078y9v7sBItkt1aaoe2YlvWEXcD263e1a4E1fBrJ1c= go.mongodb.org/mongo-driver v1.9.1/go.mod h1:0sQWfOeY63QTntERDJJ/0SuKK0T1uVSgKCuAROlKEPY= @@ -579,59 +522,20 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= -go.opentelemetry.io/contrib v1.4.0 h1:o+obgKZArn1GbM8zPCLYU9LZCI7lL6GcTZArn0qz1yw= -go.opentelemetry.io/contrib v1.4.0/go.mod h1:FlyPNX9s4U6MCsWEc5YAK4KzKNHFDsjrDUZijJiXvy8= go.opentelemetry.io/contrib v1.6.0 h1:xJawAzMuR3s4Au5p/ABHqYFychHjK2AHB9JvkBuBbTA= go.opentelemetry.io/contrib v1.6.0/go.mod h1:FlyPNX9s4U6MCsWEc5YAK4KzKNHFDsjrDUZijJiXvy8= -go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.31.0 h1:Qj331G/6VDsXEYMgeRRV7d+zGa6KKDkJtP6q8DdGeZg= -go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.31.0/go.mod h1:3k7OUuzfLbCMIVZybXejR2TzJcog/5/HSnUj3m0ISso= go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.32.0 h1:bkyJgifVcPo1w8HYf1K0ExtgdmNgxyVa02o/yFDrSAA= go.opentelemetry.io/contrib/instrumentation/github.com/labstack/echo/otelecho v0.32.0/go.mod h1:rmdIBqEgyXERsERn9CjVXXPL9qAinIsID+X9AhBnzOQ= -go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.31.0 h1:401vSW2p/bBvNuAyy8AIT7PoLHQCtuuGVK+ttC5FmwQ= -go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.31.0/go.mod h1:OfY26sPTH7bTcD8Fxwj/nlC7wmCCP7SR996JVh93sys= go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.32.0 h1:gNKQHn+q326vsi+kOskx9FCz9Jkz2fvxlf1y46dTN14= go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo v0.32.0/go.mod h1:9WqBmOJ4AOChNHtnRBSCGlKN4PQf1coLTCK57fyXE/s= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.26.0 h1:sdwza9BScvbOFaZLhvKDQc54vQ8CWM8jD9BO2t+rP4E= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.26.0/go.mod h1:4vatbW3QwS11DK0H0SB7FR31/VbthXcYorswdkVXdyg= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.31.0 h1:woM+Mb4d0A+Dxa3rYPenSN5ZeS9qHUvE8rlObiLRXTY= -go.opentelemetry.io/contrib/propagators/b3 v1.6.0 h1:rHeNbko1wNe1Sazpw5IJD83x43lfzMnDb8vckdKxRu8= -go.opentelemetry.io/contrib/propagators/b3 v1.6.0/go.mod h1:6kJAkL2/nNqP9AYhm/8j4dzVU8BfpcvYr2cy25RGBak= go.opentelemetry.io/contrib/propagators/b3 v1.7.0 h1:oRAenUhj+GFttfIp3gj7HYVzBhPOHgq/dWPDSmLCXSY= go.opentelemetry.io/contrib/propagators/b3 v1.7.0/go.mod h1:gXx7AhL4xXCF42gpm9dQvdohoDa2qeyEx4eIIxqK+h4= -go.opentelemetry.io/otel v1.0.1/go.mod h1:OPEOD4jIT2SlZPMmwT6FqZz2C0ZNdQqiWcoK6M0SNFU= -go.opentelemetry.io/otel v1.1.0/go.mod h1:7cww0OW51jQ8IaZChIEdqLwgh+44+7uiTdWsAL0wQpA= -go.opentelemetry.io/otel v1.3.0/go.mod h1:PWIKzi6JCp7sM0k9yZ43VX+T345uNbAkDKwHVjb2PTs= -go.opentelemetry.io/otel v1.6.0/go.mod h1:bfJD2DZVw0LBxghOTlgnlI0CV3hLDu9XF/QKOUXMTQQ= -go.opentelemetry.io/otel v1.6.1 h1:6r1YrcTenBvYa1x491d0GGpTVBsNECmrc/K6b+zDeis= -go.opentelemetry.io/otel v1.6.1/go.mod h1:blzUabWHkX6LJewxvadmzafgh/wnvBSDBdOuwkAtrWQ= -go.opentelemetry.io/otel v1.6.2/go.mod h1:MUBZHaB2cm6CahEBHQPq9Anos7IXynP/noVpjsxQTSc= -go.opentelemetry.io/otel v1.6.3 h1:FLOfo8f9JzFVFVyU+MSRJc2HdEAXQgm7pIv2uFKRSZE= -go.opentelemetry.io/otel v1.6.3/go.mod h1:7BgNga5fNlF/iZjG06hM3yofffp0ofKCDwSXx1GC4dI= go.opentelemetry.io/otel v1.7.0 h1:Z2lA3Tdch0iDcrhJXDIlC94XE+bxok1F9B+4Lz/lGsM= go.opentelemetry.io/otel v1.7.0/go.mod h1:5BdUoMIz5WEs0vt0CUEMtSSaTSHBBVwrhnz7+nrD5xk= -go.opentelemetry.io/otel/internal/metric v0.24.0 h1:O5lFy6kAl0LMWBjzy3k//M8VjEaTDWL9DPJuqZmWIAA= -go.opentelemetry.io/otel/internal/metric v0.24.0/go.mod h1:PSkQG+KuApZjBpC6ea6082ZrWUUy/w132tJ/LOU3TXk= -go.opentelemetry.io/otel/metric v0.24.0 h1:Rg4UYHS6JKR1Sw1TxnI13z7q/0p/XAbgIqUTagvLJuU= -go.opentelemetry.io/otel/metric v0.24.0/go.mod h1:tpMFnCD9t+BEGiWY2bWF5+AwjuAdM0lSowQ4SBA3/K4= go.opentelemetry.io/otel/metric v0.28.0 h1:o5YNh+jxACMODoAo1bI7OES0RUW4jAMae0Vgs2etWAQ= -go.opentelemetry.io/otel/sdk v1.3.0/go.mod h1:rIo4suHNhQwBIPg9axF8V9CA72Wz2mKF1teNrup8yzs= -go.opentelemetry.io/otel/sdk v1.6.0/go.mod h1:PjLRUfDsoPy0zl7yrDGSUqjj43tL7rEtFdCEiGlxXRM= -go.opentelemetry.io/otel/sdk v1.6.1 h1:ZmcNyMhcuAYIb/Nr6QhBPTMopMTbov/47wHt1gibkoY= -go.opentelemetry.io/otel/sdk v1.6.1/go.mod h1:IVYrddmFZ+eJqu2k38qD3WezFR2pymCzm8tdxyh3R4E= -go.opentelemetry.io/otel/sdk v1.6.2 h1:wxY+YrfpGJfjxtm7SFBMJp9APDMZjDG+ErZOs/wkubg= -go.opentelemetry.io/otel/sdk v1.6.2/go.mod h1:M2r4VCm1Yurk4E+fWtP2p+QzFDHMFEqhGdbtQ7zRf+k= -go.opentelemetry.io/otel/sdk v1.6.3/go.mod h1:A4iWF7HTXa+GWL/AaqESz28VuSBIcZ+0CV+IzJ5NMiQ= go.opentelemetry.io/otel/sdk v1.7.0 h1:4OmStpcKVOfvDOgCt7UriAPtKolwIhxpnSNI/yK+1B0= go.opentelemetry.io/otel/sdk v1.7.0/go.mod h1:uTEOTwaqIVuTGiJN7ii13Ibp75wJmYUDe374q6cZwUU= -go.opentelemetry.io/otel/trace v1.0.1/go.mod h1:5g4i4fKLaX2BQpSBsxw8YYcgKpMMSW3x7ZTuYBr3sUk= -go.opentelemetry.io/otel/trace v1.1.0/go.mod h1:i47XtdcBQiktu5IsrPqOHe8w+sBmnLwwHt8wiUsWGTI= -go.opentelemetry.io/otel/trace v1.3.0/go.mod h1:c/VDhno8888bvQYmbYLqe41/Ldmr/KKunbvWM4/fEjk= -go.opentelemetry.io/otel/trace v1.6.0/go.mod h1:qs7BrU5cZ8dXQHBGxHMOxwME/27YH2qEp4/+tZLLwJE= -go.opentelemetry.io/otel/trace v1.6.1 h1:f8c93l5tboBYZna1nWk0W9DYyMzJXDWdZcJZ0Kb400U= -go.opentelemetry.io/otel/trace v1.6.1/go.mod h1:RkFRM1m0puWIq10oxImnGEduNBzxiN7TXluRBtE+5j0= -go.opentelemetry.io/otel/trace v1.6.2/go.mod h1:RMqfw8Mclba1p7sXDmEDBvrB8jw65F6GOoN1fyyXTzk= -go.opentelemetry.io/otel/trace v1.6.3 h1:IqN4L+5b0mPNjdXIiZ90Ni4Bl5BRkDQywePLWemd9bc= -go.opentelemetry.io/otel/trace v1.6.3/go.mod h1:GNJQusJlUgZl9/TQBPKU/Y/ty+0iVB5fjhKeJGZPGFs= go.opentelemetry.io/otel/trace v1.7.0 h1:O37Iogk1lEkMRXewVtZ1BBTVn5JEp8GrJvP92bJqC6o= go.opentelemetry.io/otel/trace v1.7.0/go.mod h1:fzLSB9nqR2eXzxPXb2JW9IKE+ScyXA48yyE4TNvoHqU= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= @@ -652,10 +556,6 @@ golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29 h1:tkVvjkPTB7pnW3jnid7kNyAMPVWllTNOf/qKDze4p9o= -golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88 h1:Tgea0cVUD0ivh5ADBX4WwuI12DUd2to3nCYe2eayMIw= -golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220518034528-6f7dac969898 h1:SLP7Q4Di66FONjDJbCYrCRrh97focO6sLogHO7/g8F0= golang.org/x/crypto v0.0.0-20220518034528-6f7dac969898/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -668,10 +568,6 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4 h1:K3x+yU+fbot38x5bQbU2QqUAVyYLEktdNH2GxZLnM3U= -golang.org/x/exp v0.0.0-20220407100705-7b9b53b0aca4/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE= -golang.org/x/exp v0.0.0-20220428152302-39d4317da171 h1:TfdoLivD44QwvssI9Sv1xwa5DcL5XQr4au4sZ2F2NV4= -golang.org/x/exp v0.0.0-20220428152302-39d4317da171/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE= golang.org/x/exp v0.0.0-20220518171630-0b5c67f07fdf h1:oXVg4h2qJDd9htKxb5SCpFBHLipW6hXmL3qpUixS2jw= golang.org/x/exp v0.0.0-20220518171630-0b5c67f07fdf/go.mod h1:yh0Ynu2b5ZUe3MQfp2nM0ecK7wsgouWTDN0FNeJuIys= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= @@ -725,7 +621,6 @@ golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= @@ -743,7 +638,6 @@ golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220325170049-de3da57026de h1:pZB1TWnKi+o4bENlbzAgLrEbY4RMYmUIRobMcSmfeYc= golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4 h1:HVyaeDAYux4pnY+D/SiwmLOR36ewZ4iGQIIrtnuCjFA= @@ -764,10 +658,8 @@ golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a h1:qfl7ob3DIEs3Ml9oLuPwY2N04gymzAW04WsUQHIClgM= golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 h1:OSnWWcOd/CtWQC2cYSBgbTSJv3ciqd8r54ySIW2y3RE= golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= @@ -789,7 +681,6 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -844,7 +735,6 @@ golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -853,14 +743,10 @@ golang.org/x/sys v0.0.0-20211107104306-e0b2ad06fe42/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220207234003-57398862261d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886 h1:eJv7u3ksNXoLbGSKuv2s/SIO4tJVxc/A+MTpzxDgz/Q= golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -929,10 +815,8 @@ golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200701151220-7cb253f4c4f8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200815165600-90abf76919f3/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= @@ -953,7 +837,6 @@ golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U= golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -987,23 +870,14 @@ google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6 google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= -google.golang.org/api v0.58.0/go.mod h1:cAbP2FsxoGVNwtgNAmmn3y5G1TWAiVYRmg4yku3lv+E= google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= -google.golang.org/api v0.64.0/go.mod h1:931CdxA8Rm4t6zqTFGSsgwbAEZ2+GMYurbndwSimebM= -google.golang.org/api v0.65.0/go.mod h1:ArYhxgGadlWmqO1IqVujw6Cs8IdD33bTmzKo2Sh+cbg= -google.golang.org/api v0.66.0/go.mod h1:I1dmXYpX7HGwz/ejRxwQp2qj5bFAz93HiCU1C1oYd9M= google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= -google.golang.org/api v0.68.0/go.mod h1:sOM8pTpwgflXRhz+oC8H2Dr+UcbMqkPPWNJo88Q7TH8= -google.golang.org/api v0.69.0/go.mod h1:boanBiw+h5c3s+tBPgEzLDRHfFLWV0qXxRHz3ws7C80= google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= -google.golang.org/api v0.74.0 h1:ExR2D+5TYIrMphWgs5JCgwRhEDlPDXXrLwHHMgPHTXE= google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= -google.golang.org/api v0.79.0 h1:vaOcm0WdXvhGkci9a0+CcQVZqSRjN8ksSBlWv99f8Pg= -google.golang.org/api v0.79.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= google.golang.org/api v0.80.0 h1:IQWaGVCYnsm4MO3hh+WtSXMzMzuyFx/fuR8qkN3A0Qo= google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= @@ -1041,7 +915,6 @@ google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200605102947-12044bf5ea91/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= @@ -1074,39 +947,23 @@ google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEc google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210921142501-181ce0d877f6/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211016002631-37fc39342514/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211223182754-3ac035c7e7cb/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220107163113-42d7afdf6368/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220111164026-67b88f271998/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220114231437-d2e6a121cae0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220201184016-50beb8ab5c44/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220204002441-d6cc3cc0770e/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220207185906-7721543eae58/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220211171837-173942840c17/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220216160803-4663080d8bc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb h1:0m9wktIpOxGw+SSKmydXWB3Z3GTfcPP6+q75HCQa6HI= google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= -google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf h1:JTjwKJX9erVpsw17w+OIPP7iAgEkN/r8urhWSunEDTs= -google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3 h1:q1kiSVscqoDeqTF27eQ2NnLLDmqF0I373qQNXYMy0fo= google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335 h1:2D0OT6tPVdrQTOnVe1VQjfJPTED6EZ7fdJ/f6Db6OsY= google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= @@ -1138,7 +995,6 @@ google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnD google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/grpc v1.46.0 h1:oCjezcn6g6A75TGoKYBPgKmVBLexhYLM6MebdrPApP8= google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= @@ -1155,7 +1011,6 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index b28f972bc..8cb4415b2 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -6178,6 +6178,75 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler { rc := graphql.GetOperationContext(ctx) ec := executionContext{rc, e} + inputUnmarshalMap := graphql.BuildUnmarshalerMap( + ec.unmarshalInputAddClusterInput, + ec.unmarshalInputAddDatasetSchemaInput, + ec.unmarshalInputAddDynamicDatasetInput, + ec.unmarshalInputAddDynamicDatasetSchemaInput, + ec.unmarshalInputAddInfoboxFieldInput, + ec.unmarshalInputAddLayerGroupInput, + ec.unmarshalInputAddLayerItemInput, + ec.unmarshalInputAddMemberToTeamInput, + ec.unmarshalInputAddPropertyItemInput, + ec.unmarshalInputAddWidgetInput, + ec.unmarshalInputAttachTagItemToGroupInput, + ec.unmarshalInputAttachTagToLayerInput, + ec.unmarshalInputCreateAssetInput, + ec.unmarshalInputCreateInfoboxInput, + ec.unmarshalInputCreateProjectInput, + ec.unmarshalInputCreateSceneInput, + ec.unmarshalInputCreateTagGroupInput, + ec.unmarshalInputCreateTagItemInput, + ec.unmarshalInputCreateTeamInput, + ec.unmarshalInputDeleteMeInput, + ec.unmarshalInputDeleteProjectInput, + ec.unmarshalInputDeleteTeamInput, + ec.unmarshalInputDetachTagFromLayerInput, + ec.unmarshalInputDetachTagItemFromGroupInput, + ec.unmarshalInputImportDatasetFromGoogleSheetInput, + ec.unmarshalInputImportDatasetInput, + ec.unmarshalInputImportLayerInput, + ec.unmarshalInputInstallPluginInput, + ec.unmarshalInputLinkDatasetToPropertyValueInput, + ec.unmarshalInputMoveInfoboxFieldInput, + ec.unmarshalInputMoveLayerInput, + ec.unmarshalInputMovePropertyItemInput, + ec.unmarshalInputPagination, + ec.unmarshalInputPublishProjectInput, + ec.unmarshalInputRemoveAssetInput, + ec.unmarshalInputRemoveClusterInput, + ec.unmarshalInputRemoveDatasetSchemaInput, + ec.unmarshalInputRemoveInfoboxFieldInput, + ec.unmarshalInputRemoveInfoboxInput, + ec.unmarshalInputRemoveLayerInput, + ec.unmarshalInputRemoveMemberFromTeamInput, + ec.unmarshalInputRemoveMyAuthInput, + ec.unmarshalInputRemovePropertyFieldInput, + ec.unmarshalInputRemovePropertyItemInput, + ec.unmarshalInputRemoveTagInput, + ec.unmarshalInputRemoveWidgetInput, + ec.unmarshalInputSignupInput, + ec.unmarshalInputSyncDatasetInput, + ec.unmarshalInputUninstallPluginInput, + ec.unmarshalInputUnlinkPropertyValueInput, + ec.unmarshalInputUpdateClusterInput, + ec.unmarshalInputUpdateDatasetSchemaInput, + ec.unmarshalInputUpdateLayerInput, + ec.unmarshalInputUpdateMeInput, + ec.unmarshalInputUpdateMemberOfTeamInput, + ec.unmarshalInputUpdateProjectInput, + ec.unmarshalInputUpdatePropertyItemInput, + ec.unmarshalInputUpdatePropertyItemOperationInput, + ec.unmarshalInputUpdatePropertyValueInput, + ec.unmarshalInputUpdateTagInput, + ec.unmarshalInputUpdateTeamInput, + ec.unmarshalInputUpdateWidgetAlignSystemInput, + ec.unmarshalInputUpdateWidgetInput, + ec.unmarshalInputUpgradePluginInput, + ec.unmarshalInputUploadFileToPropertyInput, + ec.unmarshalInputUploadPluginInput, + ec.unmarshalInputWidgetLocationInput, + ) first := true switch rc.Operation.Operation { @@ -6187,6 +6256,7 @@ func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler { return nil } first = false + ctx = graphql.WithUnmarshalerMap(ctx, inputUnmarshalMap) data := ec._Query(ctx, rc.Operation.SelectionSet) var buf bytes.Buffer data.MarshalGQL(&buf) @@ -6201,6 +6271,7 @@ func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler { return nil } first = false + ctx = graphql.WithUnmarshalerMap(ctx, inputUnmarshalMap) data := ec._Mutation(ctx, rc.Operation.SelectionSet) var buf bytes.Buffer data.MarshalGQL(&buf) @@ -6713,6 +6784,7 @@ enum PropertySchemaFieldUI { VIDEO FILE CAMERA_POSE + DATETIME } type PropertySchemaFieldChoice { @@ -9674,21 +9746,17 @@ func (ec *executionContext) field___Type_fields_args(ctx context.Context, rawArg // region **************************** field.gotpl ***************************** func (ec *executionContext) _AddClusterPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddClusterPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddClusterPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddClusterPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Scene, nil @@ -9708,22 +9776,71 @@ func (ec *executionContext) _AddClusterPayload_scene(ctx context.Context, field return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddClusterPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddClusterPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddClusterPayload_cluster(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddClusterPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddClusterPayload_cluster(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddClusterPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Cluster, nil @@ -9743,22 +9860,41 @@ func (ec *executionContext) _AddClusterPayload_cluster(ctx context.Context, fiel return ec.marshalNCluster2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCluster(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddClusterPayload_cluster(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddClusterPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Cluster_id(ctx, field) + case "name": + return ec.fieldContext_Cluster_name(ctx, field) + case "propertyId": + return ec.fieldContext_Cluster_propertyId(ctx, field) + case "property": + return ec.fieldContext_Cluster_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Cluster", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDatasetSchemaPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddDatasetSchemaPayload_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddDatasetSchemaPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DatasetSchema, nil @@ -9775,22 +9911,53 @@ func (ec *executionContext) _AddDatasetSchemaPayload_datasetSchema(ctx context.C return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddDatasetSchemaPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddDynamicDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddDynamicDatasetPayload_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddDynamicDatasetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DatasetSchema, nil @@ -9807,22 +9974,53 @@ func (ec *executionContext) _AddDynamicDatasetPayload_datasetSchema(ctx context. return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddDynamicDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddDynamicDatasetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddDynamicDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddDynamicDatasetPayload_dataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddDynamicDatasetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Dataset, nil @@ -9839,22 +10037,45 @@ func (ec *executionContext) _AddDynamicDatasetPayload_dataset(ctx context.Contex return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddDynamicDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddDynamicDatasetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddDynamicDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddDynamicDatasetSchemaPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddDynamicDatasetSchemaPayload_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddDynamicDatasetSchemaPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DatasetSchema, nil @@ -9871,22 +10092,53 @@ func (ec *executionContext) _AddDynamicDatasetSchemaPayload_datasetSchema(ctx co return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddDynamicDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddDynamicDatasetSchemaPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddInfoboxFieldPayload_infoboxField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddInfoboxFieldPayload_infoboxField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddInfoboxFieldPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.InfoboxField, nil @@ -9906,22 +10158,65 @@ func (ec *executionContext) _AddInfoboxFieldPayload_infoboxField(ctx context.Con return ec.marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddInfoboxFieldPayload_infoboxField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddInfoboxFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_InfoboxField_id(ctx, field) + case "sceneId": + return ec.fieldContext_InfoboxField_sceneId(ctx, field) + case "layerId": + return ec.fieldContext_InfoboxField_layerId(ctx, field) + case "propertyId": + return ec.fieldContext_InfoboxField_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_InfoboxField_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_InfoboxField_extensionId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_InfoboxField_linkedDatasetId(ctx, field) + case "layer": + return ec.fieldContext_InfoboxField_layer(ctx, field) + case "infobox": + return ec.fieldContext_InfoboxField_infobox(ctx, field) + case "property": + return ec.fieldContext_InfoboxField_property(ctx, field) + case "plugin": + return ec.fieldContext_InfoboxField_plugin(ctx, field) + case "extension": + return ec.fieldContext_InfoboxField_extension(ctx, field) + case "linkedDataset": + return ec.fieldContext_InfoboxField_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_InfoboxField_merged(ctx, field) + case "scene": + return ec.fieldContext_InfoboxField_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_InfoboxField_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type InfoboxField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddInfoboxFieldPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddInfoboxFieldPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Layer, nil @@ -9941,22 +10236,31 @@ func (ec *executionContext) _AddInfoboxFieldPayload_layer(ctx context.Context, f return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddInfoboxFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _AddLayerGroupPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddLayerGroupPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddLayerGroupPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Layer, nil @@ -9976,22 +10280,75 @@ func (ec *executionContext) _AddLayerGroupPayload_layer(ctx context.Context, fie return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddLayerGroupPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddLayerGroupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddLayerGroupPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddLayerGroupPayload_parentLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddLayerGroupPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ParentLayer, nil @@ -10011,22 +10368,75 @@ func (ec *executionContext) _AddLayerGroupPayload_parentLayer(ctx context.Contex return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddLayerGroupPayload_parentLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddLayerGroupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddLayerGroupPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerGroupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddLayerGroupPayload_index(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddLayerGroupPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Index, nil @@ -10043,22 +10453,31 @@ func (ec *executionContext) _AddLayerGroupPayload_index(ctx context.Context, fie return ec.marshalOInt2แš–int(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddLayerGroupPayload_index(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddLayerGroupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _AddLayerItemPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddLayerItemPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddLayerItemPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Layer, nil @@ -10078,22 +10497,71 @@ func (ec *executionContext) _AddLayerItemPayload_layer(ctx context.Context, fiel return ec.marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddLayerItemPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddLayerItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerItem_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerItem_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerItem_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerItem_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerItem_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerItem_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerItem_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerItem_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerItem_parentId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_LayerItem_linkedDatasetId(ctx, field) + case "tags": + return ec.fieldContext_LayerItem_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerItem_parent(ctx, field) + case "property": + return ec.fieldContext_LayerItem_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerItem_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerItem_extension(ctx, field) + case "linkedDataset": + return ec.fieldContext_LayerItem_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_LayerItem_merged(ctx, field) + case "scene": + return ec.fieldContext_LayerItem_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerItem_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerItem", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddLayerItemPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddLayerItemPayload_parentLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddLayerItemPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ParentLayer, nil @@ -10113,22 +10581,75 @@ func (ec *executionContext) _AddLayerItemPayload_parentLayer(ctx context.Context return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddLayerItemPayload_parentLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddLayerItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddLayerItemPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddLayerItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddLayerItemPayload_index(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddLayerItemPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Index, nil @@ -10145,22 +10666,31 @@ func (ec *executionContext) _AddLayerItemPayload_index(ctx context.Context, fiel return ec.marshalOInt2แš–int(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddLayerItemPayload_index(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddLayerItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _AddMemberToTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddMemberToTeamPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddMemberToTeamPayload_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddMemberToTeamPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Team, nil @@ -10180,22 +10710,45 @@ func (ec *executionContext) _AddMemberToTeamPayload_team(ctx context.Context, fi return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddMemberToTeamPayload_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddMemberToTeamPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddWidgetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddWidgetPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddWidgetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Scene, nil @@ -10215,22 +10768,71 @@ func (ec *executionContext) _AddWidgetPayload_scene(ctx context.Context, field g return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddWidgetPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddWidgetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AddWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AddWidgetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AddWidgetPayload_sceneWidget(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AddWidgetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneWidget, nil @@ -10250,22 +10852,51 @@ func (ec *executionContext) _AddWidgetPayload_sceneWidget(ctx context.Context, f return ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AddWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AddWidgetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_SceneWidget_id(ctx, field) + case "pluginId": + return ec.fieldContext_SceneWidget_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_SceneWidget_extensionId(ctx, field) + case "propertyId": + return ec.fieldContext_SceneWidget_propertyId(ctx, field) + case "enabled": + return ec.fieldContext_SceneWidget_enabled(ctx, field) + case "extended": + return ec.fieldContext_SceneWidget_extended(ctx, field) + case "plugin": + return ec.fieldContext_SceneWidget_plugin(ctx, field) + case "extension": + return ec.fieldContext_SceneWidget_extension(ctx, field) + case "property": + return ec.fieldContext_SceneWidget_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type SceneWidget", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Asset_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Asset", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -10285,22 +10916,31 @@ func (ec *executionContext) _Asset_id(ctx context.Context, field graphql.Collect return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Asset_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Asset", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Asset_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_createdAt(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.CreatedAt, nil @@ -10320,22 +10960,31 @@ func (ec *executionContext) _Asset_createdAt(ctx context.Context, field graphql. return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Asset_createdAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Asset", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Asset_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_teamId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TeamID, nil @@ -10355,22 +11004,31 @@ func (ec *executionContext) _Asset_teamId(ctx context.Context, field graphql.Col return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Asset_teamId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Asset", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Asset_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -10390,22 +11048,31 @@ func (ec *executionContext) _Asset_name(ctx context.Context, field graphql.Colle return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_size(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Asset_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Asset", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Asset_size(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_size(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Size, nil @@ -10425,22 +11092,31 @@ func (ec *executionContext) _Asset_size(ctx context.Context, field graphql.Colle return ec.marshalNFileSize2int64(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Asset_size(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Asset", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type FileSize does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Asset_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_url(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.URL, nil @@ -10460,22 +11136,31 @@ func (ec *executionContext) _Asset_url(ctx context.Context, field graphql.Collec return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Asset_contentType(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Asset_url(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Asset", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Asset_contentType(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_contentType(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ContentType, nil @@ -10495,22 +11180,31 @@ func (ec *executionContext) _Asset_contentType(ctx context.Context, field graphq return ec.marshalNString2string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Asset_contentType(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Asset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Asset_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Asset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Asset_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Asset", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Asset().Team(rctx, obj) @@ -10527,22 +11221,45 @@ func (ec *executionContext) _Asset_team(ctx context.Context, field graphql.Colle return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Asset_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Asset", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AssetConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AssetConnection_edges(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AssetConnection", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Edges, nil @@ -10562,22 +11279,37 @@ func (ec *executionContext) _AssetConnection_edges(ctx context.Context, field gr return ec.marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetEdgeแš„(ctx, field.Selections, res) } -func (ec *executionContext) _AssetConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_AssetConnection_edges(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "AssetConnection", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "cursor": + return ec.fieldContext_AssetEdge_cursor(ctx, field) + case "node": + return ec.fieldContext_AssetEdge_node(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AssetEdge", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _AssetConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AssetConnection_nodes(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Nodes, nil @@ -10597,22 +11329,49 @@ func (ec *executionContext) _AssetConnection_nodes(ctx context.Context, field gr return ec.marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx, field.Selections, res) } -func (ec *executionContext) _AssetConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_AssetConnection_nodes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "AssetConnection", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Asset_id(ctx, field) + case "createdAt": + return ec.fieldContext_Asset_createdAt(ctx, field) + case "teamId": + return ec.fieldContext_Asset_teamId(ctx, field) + case "name": + return ec.fieldContext_Asset_name(ctx, field) + case "size": + return ec.fieldContext_Asset_size(ctx, field) + case "url": + return ec.fieldContext_Asset_url(ctx, field) + case "contentType": + return ec.fieldContext_Asset_contentType(ctx, field) + case "team": + return ec.fieldContext_Asset_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Asset", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _AssetConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AssetConnection_pageInfo(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PageInfo, nil @@ -10632,22 +11391,41 @@ func (ec *executionContext) _AssetConnection_pageInfo(ctx context.Context, field return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx, field.Selections, res) } -func (ec *executionContext) _AssetConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_AssetConnection_pageInfo(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "AssetConnection", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "startCursor": + return ec.fieldContext_PageInfo_startCursor(ctx, field) + case "endCursor": + return ec.fieldContext_PageInfo_endCursor(ctx, field) + case "hasNextPage": + return ec.fieldContext_PageInfo_hasNextPage(ctx, field) + case "hasPreviousPage": + return ec.fieldContext_PageInfo_hasPreviousPage(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PageInfo", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _AssetConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AssetConnection_totalCount(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TotalCount, nil @@ -10667,22 +11445,31 @@ func (ec *executionContext) _AssetConnection_totalCount(ctx context.Context, fie return ec.marshalNInt2int(ctx, field.Selections, res) } -func (ec *executionContext) _AssetEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetEdge) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "AssetEdge", +func (ec *executionContext) fieldContext_AssetConnection_totalCount(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AssetConnection", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _AssetEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AssetEdge_cursor(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Cursor, nil @@ -10702,22 +11489,31 @@ func (ec *executionContext) _AssetEdge_cursor(ctx context.Context, field graphql return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) } -func (ec *executionContext) _AssetEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetEdge) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_AssetEdge_cursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "AssetEdge", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _AssetEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AssetEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AssetEdge_node(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Node, nil @@ -10734,22 +11530,49 @@ func (ec *executionContext) _AssetEdge_node(ctx context.Context, field graphql.C return ec.marshalOAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AssetEdge_node(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AssetEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Asset_id(ctx, field) + case "createdAt": + return ec.fieldContext_Asset_createdAt(ctx, field) + case "teamId": + return ec.fieldContext_Asset_teamId(ctx, field) + case "name": + return ec.fieldContext_Asset_name(ctx, field) + case "size": + return ec.fieldContext_Asset_size(ctx, field) + case "url": + return ec.fieldContext_Asset_url(ctx, field) + case "contentType": + return ec.fieldContext_Asset_contentType(ctx, field) + case "team": + return ec.fieldContext_Asset_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Asset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AttachTagItemToGroupPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AttachTagItemToGroupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AttachTagItemToGroupPayload_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AttachTagItemToGroupPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Tag, nil @@ -10769,22 +11592,47 @@ func (ec *executionContext) _AttachTagItemToGroupPayload_tag(ctx context.Context return ec.marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AttachTagItemToGroupPayload_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AttachTagItemToGroupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagGroup_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagGroup_label(ctx, field) + case "tagIds": + return ec.fieldContext_TagGroup_tagIds(ctx, field) + case "tags": + return ec.fieldContext_TagGroup_tags(ctx, field) + case "scene": + return ec.fieldContext_TagGroup_scene(ctx, field) + case "layers": + return ec.fieldContext_TagGroup_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _AttachTagToLayerPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.AttachTagToLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_AttachTagToLayerPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "AttachTagToLayerPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Layer, nil @@ -10804,22 +11652,31 @@ func (ec *executionContext) _AttachTagToLayerPayload_layer(ctx context.Context, return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_AttachTagToLayerPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "AttachTagToLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _Camera_lat(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_lat(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Camera", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Lat, nil @@ -10839,22 +11696,31 @@ func (ec *executionContext) _Camera_lat(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_lng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Camera_lat(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Camera", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Camera_lng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_lng(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Lng, nil @@ -10874,22 +11740,31 @@ func (ec *executionContext) _Camera_lng(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_altitude(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Camera_lng(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Camera", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Camera_altitude(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_altitude(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Altitude, nil @@ -10909,22 +11784,31 @@ func (ec *executionContext) _Camera_altitude(ctx context.Context, field graphql. return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_heading(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Camera_altitude(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Camera", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Camera_heading(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_heading(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Heading, nil @@ -10944,22 +11828,31 @@ func (ec *executionContext) _Camera_heading(ctx context.Context, field graphql.C return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_pitch(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Camera_heading(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Camera", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Camera_pitch(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_pitch(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Pitch, nil @@ -10979,22 +11872,31 @@ func (ec *executionContext) _Camera_pitch(ctx context.Context, field graphql.Col return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_roll(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Camera_pitch(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Camera", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Camera_roll(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_roll(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Roll, nil @@ -11014,22 +11916,31 @@ func (ec *executionContext) _Camera_roll(ctx context.Context, field graphql.Coll return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Camera_fov(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Camera_roll(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Camera", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Camera_fov(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Camera) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Camera_fov(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Fov, nil @@ -11049,22 +11960,31 @@ func (ec *executionContext) _Camera_fov(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Camera_fov(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Camera", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Cluster_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Cluster_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Cluster", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -11084,22 +12004,31 @@ func (ec *executionContext) _Cluster_id(ctx context.Context, field graphql.Colle return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Cluster_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Cluster_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Cluster", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Cluster_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Cluster_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -11119,22 +12048,31 @@ func (ec *executionContext) _Cluster_name(ctx context.Context, field graphql.Col return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Cluster_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Cluster_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Cluster", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Cluster_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Cluster_propertyId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PropertyID, nil @@ -11154,22 +12092,31 @@ func (ec *executionContext) _Cluster_propertyId(ctx context.Context, field graph return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Cluster_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Cluster", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Cluster_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Cluster) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Cluster_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Cluster", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Cluster().Property(rctx, obj) @@ -11186,22 +12133,45 @@ func (ec *executionContext) _Cluster_property(ctx context.Context, field graphql return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Cluster_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Cluster", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _CreateAssetPayload_asset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateAssetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateAssetPayload_asset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "CreateAssetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Asset, nil @@ -11221,22 +12191,49 @@ func (ec *executionContext) _CreateAssetPayload_asset(ctx context.Context, field return ec.marshalNAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_CreateAssetPayload_asset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateAssetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Asset_id(ctx, field) + case "createdAt": + return ec.fieldContext_Asset_createdAt(ctx, field) + case "teamId": + return ec.fieldContext_Asset_teamId(ctx, field) + case "name": + return ec.fieldContext_Asset_name(ctx, field) + case "size": + return ec.fieldContext_Asset_size(ctx, field) + case "url": + return ec.fieldContext_Asset_url(ctx, field) + case "contentType": + return ec.fieldContext_Asset_contentType(ctx, field) + case "team": + return ec.fieldContext_Asset_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Asset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _CreateInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateInfoboxPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateInfoboxPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "CreateInfoboxPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Layer, nil @@ -11256,22 +12253,31 @@ func (ec *executionContext) _CreateInfoboxPayload_layer(ctx context.Context, fie return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_CreateInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateInfoboxPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _CreateScenePayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateScenePayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateScenePayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "CreateScenePayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Scene, nil @@ -11291,22 +12297,71 @@ func (ec *executionContext) _CreateScenePayload_scene(ctx context.Context, field return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_CreateScenePayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateScenePayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _CreateTagGroupPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTagGroupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateTagGroupPayload_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "CreateTagGroupPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Tag, nil @@ -11326,22 +12381,47 @@ func (ec *executionContext) _CreateTagGroupPayload_tag(ctx context.Context, fiel return ec.marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_CreateTagGroupPayload_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateTagGroupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagGroup_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagGroup_label(ctx, field) + case "tagIds": + return ec.fieldContext_TagGroup_tagIds(ctx, field) + case "tags": + return ec.fieldContext_TagGroup_tags(ctx, field) + case "scene": + return ec.fieldContext_TagGroup_scene(ctx, field) + case "layers": + return ec.fieldContext_TagGroup_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _CreateTagItemPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTagItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateTagItemPayload_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "CreateTagItemPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Tag, nil @@ -11361,22 +12441,57 @@ func (ec *executionContext) _CreateTagItemPayload_tag(ctx context.Context, field return ec.marshalNTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItem(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_CreateTagItemPayload_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateTagItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagItem_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagItem_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagItem_label(ctx, field) + case "parentId": + return ec.fieldContext_TagItem_parentId(ctx, field) + case "linkedDatasetID": + return ec.fieldContext_TagItem_linkedDatasetID(ctx, field) + case "linkedDatasetSchemaID": + return ec.fieldContext_TagItem_linkedDatasetSchemaID(ctx, field) + case "linkedDatasetFieldID": + return ec.fieldContext_TagItem_linkedDatasetFieldID(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_TagItem_linkedDatasetSchema(ctx, field) + case "linkedDataset": + return ec.fieldContext_TagItem_linkedDataset(ctx, field) + case "linkedDatasetField": + return ec.fieldContext_TagItem_linkedDatasetField(ctx, field) + case "parent": + return ec.fieldContext_TagItem_parent(ctx, field) + case "layers": + return ec.fieldContext_TagItem_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagItem", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _CreateTagItemPayload_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTagItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateTagItemPayload_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "CreateTagItemPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Parent, nil @@ -11393,22 +12508,47 @@ func (ec *executionContext) _CreateTagItemPayload_parent(ctx context.Context, fi return ec.marshalOTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_CreateTagItemPayload_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateTagItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagGroup_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagGroup_label(ctx, field) + case "tagIds": + return ec.fieldContext_TagGroup_tagIds(ctx, field) + case "tags": + return ec.fieldContext_TagGroup_tags(ctx, field) + case "scene": + return ec.fieldContext_TagGroup_scene(ctx, field) + case "layers": + return ec.fieldContext_TagGroup_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _CreateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.CreateTeamPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_CreateTeamPayload_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "CreateTeamPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Team, nil @@ -11428,22 +12568,45 @@ func (ec *executionContext) _CreateTeamPayload_team(ctx context.Context, field g return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_CreateTeamPayload_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "CreateTeamPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Dataset_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Dataset_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Dataset", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -11463,22 +12626,31 @@ func (ec *executionContext) _Dataset_id(ctx context.Context, field graphql.Colle return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Dataset_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Dataset_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Dataset", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Dataset_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Dataset_source(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Source, nil @@ -11498,22 +12670,31 @@ func (ec *executionContext) _Dataset_source(ctx context.Context, field graphql.C return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Dataset_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Dataset_source(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Dataset", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Dataset_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Dataset_schemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -11533,22 +12714,31 @@ func (ec *executionContext) _Dataset_schemaId(ctx context.Context, field graphql return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Dataset_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Dataset_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Dataset", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Dataset_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Dataset_fields(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Fields, nil @@ -11568,22 +12758,49 @@ func (ec *executionContext) _Dataset_fields(ctx context.Context, field graphql.C return ec.marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetFieldแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Dataset_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Dataset", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_DatasetField_fieldId(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetField_schemaId(ctx, field) + case "source": + return ec.fieldContext_DatasetField_source(ctx, field) + case "type": + return ec.fieldContext_DatasetField_type(ctx, field) + case "value": + return ec.fieldContext_DatasetField_value(ctx, field) + case "schema": + return ec.fieldContext_DatasetField_schema(ctx, field) + case "field": + return ec.fieldContext_DatasetField_field(ctx, field) + case "valueRef": + return ec.fieldContext_DatasetField_valueRef(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Dataset_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Dataset_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Dataset", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Dataset().Schema(rctx, obj) @@ -11600,22 +12817,53 @@ func (ec *executionContext) _Dataset_schema(ctx context.Context, field graphql.C return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Dataset_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Dataset", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Dataset_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Dataset) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Dataset_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Dataset", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Dataset().Name(rctx, obj) @@ -11632,22 +12880,31 @@ func (ec *executionContext) _Dataset_name(ctx context.Context, field graphql.Col return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ - Object: "DatasetConnection", +func (ec *executionContext) fieldContext_Dataset_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Dataset", Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetConnection_edges(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Edges, nil @@ -11667,22 +12924,37 @@ func (ec *executionContext) _DatasetConnection_edges(ctx context.Context, field return ec.marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetEdgeแš„(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetConnection_edges(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetConnection", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "cursor": + return ec.fieldContext_DatasetEdge_cursor(ctx, field) + case "node": + return ec.fieldContext_DatasetEdge_node(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetEdge", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _DatasetConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetConnection_nodes(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Nodes, nil @@ -11702,22 +12974,45 @@ func (ec *executionContext) _DatasetConnection_nodes(ctx context.Context, field return ec.marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetConnection_nodes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetConnection", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _DatasetConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetConnection_pageInfo(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PageInfo, nil @@ -11737,22 +13032,41 @@ func (ec *executionContext) _DatasetConnection_pageInfo(ctx context.Context, fie return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetConnection_pageInfo(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetConnection", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "startCursor": + return ec.fieldContext_PageInfo_startCursor(ctx, field) + case "endCursor": + return ec.fieldContext_PageInfo_endCursor(ctx, field) + case "hasNextPage": + return ec.fieldContext_PageInfo_hasNextPage(ctx, field) + case "hasPreviousPage": + return ec.fieldContext_PageInfo_hasPreviousPage(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PageInfo", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _DatasetConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetConnection_totalCount(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TotalCount, nil @@ -11772,22 +13086,31 @@ func (ec *executionContext) _DatasetConnection_totalCount(ctx context.Context, f return ec.marshalNInt2int(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetConnection_totalCount(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetEdge_cursor(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetEdge", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Cursor, nil @@ -11807,22 +13130,31 @@ func (ec *executionContext) _DatasetEdge_cursor(ctx context.Context, field graph return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetEdge) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetEdge_cursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetEdge", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetEdge_node(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Node, nil @@ -11839,22 +13171,45 @@ func (ec *executionContext) _DatasetEdge_node(ctx context.Context, field graphql return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetEdge_node(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_fieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetField", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.FieldID, nil @@ -11874,22 +13229,31 @@ func (ec *executionContext) _DatasetField_fieldId(ctx context.Context, field gra return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetField_fieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_schemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -11909,22 +13273,31 @@ func (ec *executionContext) _DatasetField_schemaId(ctx context.Context, field gr return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetField_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetField_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetField_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_source(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Source, nil @@ -11944,22 +13317,31 @@ func (ec *executionContext) _DatasetField_source(ctx context.Context, field grap return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetField_source(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_type(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Type, nil @@ -11979,22 +13361,31 @@ func (ec *executionContext) _DatasetField_type(ctx context.Context, field graphq return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetField_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ValueType does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_value(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Value, nil @@ -12011,22 +13402,31 @@ func (ec *executionContext) _DatasetField_value(ctx context.Context, field graph return ec.marshalOAny2interface(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetField_value(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.DatasetField().Schema(rctx, obj) @@ -12043,22 +13443,53 @@ func (ec *executionContext) _DatasetField_schema(ctx context.Context, field grap return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetField_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetField_field(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_field(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.DatasetField().Field(rctx, obj) @@ -12075,22 +13506,49 @@ func (ec *executionContext) _DatasetField_field(ctx context.Context, field graph return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetField_field(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchemaField_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchemaField_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchemaField_name(ctx, field) + case "type": + return ec.fieldContext_DatasetSchemaField_type(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetSchemaField_schemaId(ctx, field) + case "refId": + return ec.fieldContext_DatasetSchemaField_refId(ctx, field) + case "schema": + return ec.fieldContext_DatasetSchemaField_schema(ctx, field) + case "ref": + return ec.fieldContext_DatasetSchemaField_ref(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetField_valueRef(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetField_valueRef(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.DatasetField().ValueRef(rctx, obj) @@ -12107,22 +13565,45 @@ func (ec *executionContext) _DatasetField_valueRef(ctx context.Context, field gr return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetField_valueRef(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetSchema_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetSchema", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -12142,22 +13623,31 @@ func (ec *executionContext) _DatasetSchema_id(ctx context.Context, field graphql return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchema_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchema", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchema_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_source(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Source, nil @@ -12177,22 +13667,31 @@ func (ec *executionContext) _DatasetSchema_source(ctx context.Context, field gra return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchema_source(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchema", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchema_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -12212,22 +13711,31 @@ func (ec *executionContext) _DatasetSchema_name(ctx context.Context, field graph return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchema_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchema", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchema_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_sceneId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneID, nil @@ -12247,22 +13755,31 @@ func (ec *executionContext) _DatasetSchema_sceneId(ctx context.Context, field gr return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchema_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchema", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchema_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_fields(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Fields, nil @@ -12282,22 +13799,49 @@ func (ec *executionContext) _DatasetSchema_fields(ctx context.Context, field gra return ec.marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaFieldแš„(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchema_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchema", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchemaField_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchemaField_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchemaField_name(ctx, field) + case "type": + return ec.fieldContext_DatasetSchemaField_type(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetSchemaField_schemaId(ctx, field) + case "refId": + return ec.fieldContext_DatasetSchemaField_refId(ctx, field) + case "schema": + return ec.fieldContext_DatasetSchemaField_schema(ctx, field) + case "ref": + return ec.fieldContext_DatasetSchemaField_ref(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaField", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchema_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.RepresentativeFieldID, nil @@ -12314,22 +13858,31 @@ func (ec *executionContext) _DatasetSchema_representativeFieldId(ctx context.Con return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_dynamic(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchema_representativeFieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchema", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchema_dynamic(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_dynamic(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Dynamic, nil @@ -12346,32 +13899,34 @@ func (ec *executionContext) _DatasetSchema_dynamic(ctx context.Context, field gr return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_datasets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchema_dynamic(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchema", Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_DatasetSchema_datasets_args(ctx, rawArgs) +func (ec *executionContext) _DatasetSchema_datasets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_datasets(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.DatasetSchema().Datasets(rctx, obj, args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + return ec.resolvers.DatasetSchema().Datasets(rctx, obj, fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) }) if err != nil { ec.Error(ctx, err) @@ -12388,22 +13943,52 @@ func (ec *executionContext) _DatasetSchema_datasets(ctx context.Context, field g return ec.marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetConnection(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchema_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchema_datasets(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchema", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_DatasetConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_DatasetConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_DatasetConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetConnection", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_DatasetSchema_datasets_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) _DatasetSchema_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_scene(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.DatasetSchema().Scene(rctx, obj) @@ -12420,22 +14005,71 @@ func (ec *executionContext) _DatasetSchema_scene(ctx context.Context, field grap return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetSchema_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetSchema_representativeField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_representativeField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetSchema", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.DatasetSchema().RepresentativeField(rctx, obj) @@ -12452,22 +14086,49 @@ func (ec *executionContext) _DatasetSchema_representativeField(ctx context.Conte return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetSchema_representativeField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchemaField_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchemaField_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchemaField_name(ctx, field) + case "type": + return ec.fieldContext_DatasetSchemaField_type(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetSchemaField_schemaId(ctx, field) + case "refId": + return ec.fieldContext_DatasetSchemaField_refId(ctx, field) + case "schema": + return ec.fieldContext_DatasetSchemaField_schema(ctx, field) + case "ref": + return ec.fieldContext_DatasetSchemaField_ref(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetSchemaConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaConnection_edges(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetSchemaConnection", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Edges, nil @@ -12487,22 +14148,37 @@ func (ec *executionContext) _DatasetSchemaConnection_edges(ctx context.Context, return ec.marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaEdgeแš„(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchemaConnection_edges(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchemaConnection", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "cursor": + return ec.fieldContext_DatasetSchemaEdge_cursor(ctx, field) + case "node": + return ec.fieldContext_DatasetSchemaEdge_node(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaEdge", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchemaConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaConnection_nodes(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Nodes, nil @@ -12522,22 +14198,53 @@ func (ec *executionContext) _DatasetSchemaConnection_nodes(ctx context.Context, return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetSchemaConnection_nodes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetSchemaConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaConnection_pageInfo(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetSchemaConnection", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PageInfo, nil @@ -12557,22 +14264,41 @@ func (ec *executionContext) _DatasetSchemaConnection_pageInfo(ctx context.Contex return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetSchemaConnection_pageInfo(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "startCursor": + return ec.fieldContext_PageInfo_startCursor(ctx, field) + case "endCursor": + return ec.fieldContext_PageInfo_endCursor(ctx, field) + case "hasNextPage": + return ec.fieldContext_PageInfo_hasNextPage(ctx, field) + case "hasPreviousPage": + return ec.fieldContext_PageInfo_hasPreviousPage(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PageInfo", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetSchemaConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaConnection_totalCount(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetSchemaConnection", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TotalCount, nil @@ -12592,22 +14318,31 @@ func (ec *executionContext) _DatasetSchemaConnection_totalCount(ctx context.Cont return ec.marshalNInt2int(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetSchemaConnection_totalCount(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetSchemaEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaEdge_cursor(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetSchemaEdge", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Cursor, nil @@ -12627,22 +14362,31 @@ func (ec *executionContext) _DatasetSchemaEdge_cursor(ctx context.Context, field return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaEdge) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchemaEdge_cursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchemaEdge", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchemaEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaEdge_node(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Node, nil @@ -12659,22 +14403,53 @@ func (ec *executionContext) _DatasetSchemaEdge_node(ctx context.Context, field g return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetSchemaEdge_node(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetSchemaField_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetSchemaField", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -12694,22 +14469,31 @@ func (ec *executionContext) _DatasetSchemaField_id(ctx context.Context, field gr return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchemaField_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchemaField_source(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_source(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Source, nil @@ -12729,22 +14513,31 @@ func (ec *executionContext) _DatasetSchemaField_source(ctx context.Context, fiel return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchemaField_source(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchemaField_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -12764,22 +14557,31 @@ func (ec *executionContext) _DatasetSchemaField_name(ctx context.Context, field return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchemaField_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchemaField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_type(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Type, nil @@ -12799,22 +14601,31 @@ func (ec *executionContext) _DatasetSchemaField_type(ctx context.Context, field return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchemaField_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ValueType does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchemaField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_schemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -12834,22 +14645,31 @@ func (ec *executionContext) _DatasetSchemaField_schemaId(ctx context.Context, fi return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _DatasetSchemaField_refId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_DatasetSchemaField_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "DatasetSchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _DatasetSchemaField_refId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_refId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.RefID, nil @@ -12866,22 +14686,31 @@ func (ec *executionContext) _DatasetSchemaField_refId(ctx context.Context, field return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetSchemaField_refId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetSchemaField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetSchemaField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.DatasetSchemaField().Schema(rctx, obj) @@ -12898,22 +14727,53 @@ func (ec *executionContext) _DatasetSchemaField_schema(ctx context.Context, fiel return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetSchemaField_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetSchemaField_ref(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchemaField_ref(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DatasetSchemaField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.DatasetSchemaField().Ref(rctx, obj) @@ -12930,22 +14790,53 @@ func (ec *executionContext) _DatasetSchemaField_ref(ctx context.Context, field g return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DatasetSchemaField_ref(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchemaField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _DeleteMePayload_userId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DeleteMePayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DeleteMePayload_userId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DeleteMePayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.UserID, nil @@ -12965,22 +14856,31 @@ func (ec *executionContext) _DeleteMePayload_userId(ctx context.Context, field g return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DeleteMePayload_userId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DeleteMePayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _DeleteProjectPayload_projectId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DeleteProjectPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DeleteProjectPayload_projectId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DeleteProjectPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ProjectID, nil @@ -13000,22 +14900,31 @@ func (ec *executionContext) _DeleteProjectPayload_projectId(ctx context.Context, return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DeleteProjectPayload_projectId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DeleteProjectPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _DeleteTeamPayload_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DeleteTeamPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DeleteTeamPayload_teamId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DeleteTeamPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TeamID, nil @@ -13035,22 +14944,31 @@ func (ec *executionContext) _DeleteTeamPayload_teamId(ctx context.Context, field return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DeleteTeamPayload_teamId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DeleteTeamPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _DetachTagFromLayerPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DetachTagFromLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DetachTagFromLayerPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DetachTagFromLayerPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Layer, nil @@ -13070,22 +14988,31 @@ func (ec *executionContext) _DetachTagFromLayerPayload_layer(ctx context.Context return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DetachTagFromLayerPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DetachTagFromLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _DetachTagItemFromGroupPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DetachTagItemFromGroupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DetachTagItemFromGroupPayload_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "DetachTagItemFromGroupPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Tag, nil @@ -13105,22 +15032,47 @@ func (ec *executionContext) _DetachTagItemFromGroupPayload_tag(ctx context.Conte return ec.marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_DetachTagItemFromGroupPayload_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DetachTagItemFromGroupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagGroup_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagGroup_label(ctx, field) + case "tagIds": + return ec.fieldContext_TagGroup_tagIds(ctx, field) + case "tags": + return ec.fieldContext_TagGroup_tags(ctx, field) + case "scene": + return ec.fieldContext_TagGroup_scene(ctx, field) + case "layers": + return ec.fieldContext_TagGroup_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _ImportDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ImportDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ImportDatasetPayload_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "ImportDatasetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DatasetSchema, nil @@ -13140,22 +15092,53 @@ func (ec *executionContext) _ImportDatasetPayload_datasetSchema(ctx context.Cont return ec.marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_ImportDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ImportDatasetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _ImportLayerPayload_layers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ImportLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ImportLayerPayload_layers(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "ImportLayerPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Layers, nil @@ -13175,22 +15158,31 @@ func (ec *executionContext) _ImportLayerPayload_layers(ctx context.Context, fiel return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx, field.Selections, res) } -func (ec *executionContext) _ImportLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ImportLayerPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_ImportLayerPayload_layers(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "ImportLayerPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, } + return fc, nil +} +func (ec *executionContext) _ImportLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ImportLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ImportLayerPayload_parentLayer(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ParentLayer, nil @@ -13210,22 +15202,75 @@ func (ec *executionContext) _ImportLayerPayload_parentLayer(ctx context.Context, return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_ImportLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ImportLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Infobox_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_sceneId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Infobox", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneID, nil @@ -13245,22 +15290,31 @@ func (ec *executionContext) _Infobox_sceneId(ctx context.Context, field graphql. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Infobox_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Infobox", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Infobox_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_layerId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LayerID, nil @@ -13280,22 +15334,31 @@ func (ec *executionContext) _Infobox_layerId(ctx context.Context, field graphql. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Infobox_layerId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Infobox", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Infobox_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_propertyId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PropertyID, nil @@ -13315,22 +15378,31 @@ func (ec *executionContext) _Infobox_propertyId(ctx context.Context, field graph return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Infobox_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Infobox", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Infobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_fields(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Fields, nil @@ -13350,22 +15422,65 @@ func (ec *executionContext) _Infobox_fields(ctx context.Context, field graphql.C return ec.marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxFieldแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Infobox_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_InfoboxField_id(ctx, field) + case "sceneId": + return ec.fieldContext_InfoboxField_sceneId(ctx, field) + case "layerId": + return ec.fieldContext_InfoboxField_layerId(ctx, field) + case "propertyId": + return ec.fieldContext_InfoboxField_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_InfoboxField_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_InfoboxField_extensionId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_InfoboxField_linkedDatasetId(ctx, field) + case "layer": + return ec.fieldContext_InfoboxField_layer(ctx, field) + case "infobox": + return ec.fieldContext_InfoboxField_infobox(ctx, field) + case "property": + return ec.fieldContext_InfoboxField_property(ctx, field) + case "plugin": + return ec.fieldContext_InfoboxField_plugin(ctx, field) + case "extension": + return ec.fieldContext_InfoboxField_extension(ctx, field) + case "linkedDataset": + return ec.fieldContext_InfoboxField_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_InfoboxField_merged(ctx, field) + case "scene": + return ec.fieldContext_InfoboxField_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_InfoboxField_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type InfoboxField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Infobox_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_linkedDatasetId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Infobox", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LinkedDatasetID, nil @@ -13382,22 +15497,31 @@ func (ec *executionContext) _Infobox_linkedDatasetId(ctx context.Context, field return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Infobox_linkedDatasetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Infobox_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Infobox", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Infobox().Layer(rctx, obj) @@ -13417,22 +15541,31 @@ func (ec *executionContext) _Infobox_layer(ctx context.Context, field graphql.Co return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Infobox_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Infobox", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, } + return fc, nil +} +func (ec *executionContext) _Infobox_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_property(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Infobox().Property(rctx, obj) @@ -13449,22 +15582,45 @@ func (ec *executionContext) _Infobox_property(ctx context.Context, field graphql return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Infobox_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Infobox_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_linkedDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Infobox", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Infobox().LinkedDataset(rctx, obj) @@ -13481,22 +15637,45 @@ func (ec *executionContext) _Infobox_linkedDataset(ctx context.Context, field gr return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Infobox_linkedDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Infobox_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_merged(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Infobox", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Infobox().Merged(rctx, obj) @@ -13513,22 +15692,41 @@ func (ec *executionContext) _Infobox_merged(ctx context.Context, field graphql.C return ec.marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfobox(ctx, field.Selections, res) } -func (ec *executionContext) _Infobox_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Infobox_merged(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Infobox", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sceneID": + return ec.fieldContext_MergedInfobox_sceneID(ctx, field) + case "property": + return ec.fieldContext_MergedInfobox_property(ctx, field) + case "fields": + return ec.fieldContext_MergedInfobox_fields(ctx, field) + case "scene": + return ec.fieldContext_MergedInfobox_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedInfobox", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _Infobox_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Infobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Infobox_scene(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Infobox().Scene(rctx, obj) @@ -13545,22 +15743,71 @@ func (ec *executionContext) _Infobox_scene(ctx context.Context, field graphql.Co return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Infobox_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Infobox", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _InfoboxField_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { - defer func() { + fc, err := ec.fieldContext_InfoboxField_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "InfoboxField", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -13580,22 +15827,31 @@ func (ec *executionContext) _InfoboxField_id(ctx context.Context, field graphql. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_InfoboxField_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "InfoboxField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _InfoboxField_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_sceneId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneID, nil @@ -13615,22 +15871,31 @@ func (ec *executionContext) _InfoboxField_sceneId(ctx context.Context, field gra return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_InfoboxField_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "InfoboxField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _InfoboxField_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_layerId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LayerID, nil @@ -13650,22 +15915,31 @@ func (ec *executionContext) _InfoboxField_layerId(ctx context.Context, field gra return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_InfoboxField_layerId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "InfoboxField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _InfoboxField_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_propertyId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PropertyID, nil @@ -13685,22 +15959,31 @@ func (ec *executionContext) _InfoboxField_propertyId(ctx context.Context, field return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_InfoboxField_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "InfoboxField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _InfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_pluginId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PluginID, nil @@ -13720,22 +16003,31 @@ func (ec *executionContext) _InfoboxField_pluginId(ctx context.Context, field gr return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_InfoboxField_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "InfoboxField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _InfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_extensionId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ExtensionID, nil @@ -13755,22 +16047,31 @@ func (ec *executionContext) _InfoboxField_extensionId(ctx context.Context, field return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_InfoboxField_extensionId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "InfoboxField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _InfoboxField_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_linkedDatasetId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LinkedDatasetID, nil @@ -13787,22 +16088,31 @@ func (ec *executionContext) _InfoboxField_linkedDatasetId(ctx context.Context, f return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_InfoboxField_linkedDatasetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _InfoboxField_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "InfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.InfoboxField().Layer(rctx, obj) @@ -13822,22 +16132,31 @@ func (ec *executionContext) _InfoboxField_layer(ctx context.Context, field graph return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _InfoboxField_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_InfoboxField_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "InfoboxField", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, } + return fc, nil +} +func (ec *executionContext) _InfoboxField_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_infobox(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.InfoboxField().Infobox(rctx, obj) @@ -13857,22 +16176,53 @@ func (ec *executionContext) _InfoboxField_infobox(ctx context.Context, field gra return ec.marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_InfoboxField_infobox(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sceneId": + return ec.fieldContext_Infobox_sceneId(ctx, field) + case "layerId": + return ec.fieldContext_Infobox_layerId(ctx, field) + case "propertyId": + return ec.fieldContext_Infobox_propertyId(ctx, field) + case "fields": + return ec.fieldContext_Infobox_fields(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_Infobox_linkedDatasetId(ctx, field) + case "layer": + return ec.fieldContext_Infobox_layer(ctx, field) + case "property": + return ec.fieldContext_Infobox_property(ctx, field) + case "linkedDataset": + return ec.fieldContext_Infobox_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_Infobox_merged(ctx, field) + case "scene": + return ec.fieldContext_Infobox_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Infobox", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _InfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "InfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.InfoboxField().Property(rctx, obj) @@ -13889,22 +16239,45 @@ func (ec *executionContext) _InfoboxField_property(ctx context.Context, field gr return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_InfoboxField_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _InfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "InfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.InfoboxField().Plugin(rctx, obj) @@ -13921,22 +16294,65 @@ func (ec *executionContext) _InfoboxField_plugin(ctx context.Context, field grap return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_InfoboxField_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _InfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_extension(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "InfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.InfoboxField().Extension(rctx, obj) @@ -13953,22 +16369,67 @@ func (ec *executionContext) _InfoboxField_extension(ctx context.Context, field g return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_InfoboxField_extension(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extensionId": + return ec.fieldContext_PluginExtension_extensionId(ctx, field) + case "pluginId": + return ec.fieldContext_PluginExtension_pluginId(ctx, field) + case "type": + return ec.fieldContext_PluginExtension_type(ctx, field) + case "name": + return ec.fieldContext_PluginExtension_name(ctx, field) + case "description": + return ec.fieldContext_PluginExtension_description(ctx, field) + case "icon": + return ec.fieldContext_PluginExtension_icon(ctx, field) + case "singleOnly": + return ec.fieldContext_PluginExtension_singleOnly(ctx, field) + case "widgetLayout": + return ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + case "visualizer": + return ec.fieldContext_PluginExtension_visualizer(ctx, field) + case "propertySchemaId": + return ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + case "allTranslatedName": + return ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + case "plugin": + return ec.fieldContext_PluginExtension_plugin(ctx, field) + case "sceneWidget": + return ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + case "propertySchema": + return ec.fieldContext_PluginExtension_propertySchema(ctx, field) + case "translatedName": + return ec.fieldContext_PluginExtension_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginExtension", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _InfoboxField_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_linkedDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "InfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.InfoboxField().LinkedDataset(rctx, obj) @@ -13985,22 +16446,45 @@ func (ec *executionContext) _InfoboxField_linkedDataset(ctx context.Context, fie return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_InfoboxField_linkedDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _InfoboxField_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_merged(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "InfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.InfoboxField().Merged(rctx, obj) @@ -14017,22 +16501,51 @@ func (ec *executionContext) _InfoboxField_merged(ctx context.Context, field grap return ec.marshalOMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_InfoboxField_merged(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedInfoboxField_originalId(ctx, field) + case "sceneID": + return ec.fieldContext_MergedInfoboxField_sceneID(ctx, field) + case "pluginId": + return ec.fieldContext_MergedInfoboxField_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_MergedInfoboxField_extensionId(ctx, field) + case "property": + return ec.fieldContext_MergedInfoboxField_property(ctx, field) + case "plugin": + return ec.fieldContext_MergedInfoboxField_plugin(ctx, field) + case "extension": + return ec.fieldContext_MergedInfoboxField_extension(ctx, field) + case "scene": + return ec.fieldContext_MergedInfoboxField_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_MergedInfoboxField_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedInfoboxField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _InfoboxField_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "InfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.InfoboxField().Scene(rctx, obj) @@ -14049,22 +16562,71 @@ func (ec *executionContext) _InfoboxField_scene(ctx context.Context, field graph return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_InfoboxField_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _InfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InfoboxField_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "InfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.InfoboxField().ScenePlugin(rctx, obj) @@ -14081,22 +16643,41 @@ func (ec *executionContext) _InfoboxField_scenePlugin(ctx context.Context, field return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_InfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _InstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InstallPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InstallPluginPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "InstallPluginPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Scene, nil @@ -14116,22 +16697,71 @@ func (ec *executionContext) _InstallPluginPayload_scene(ctx context.Context, fie return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_InstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InstallPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _InstallPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.InstallPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_InstallPluginPayload_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "InstallPluginPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ScenePlugin, nil @@ -14151,22 +16781,41 @@ func (ec *executionContext) _InstallPluginPayload_scenePlugin(ctx context.Contex return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_InstallPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "InstallPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LatLng_lat(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLng) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LatLng_lat(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LatLng", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Lat, nil @@ -14186,22 +16835,31 @@ func (ec *executionContext) _LatLng_lat(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _LatLng_lng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLng) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LatLng_lat(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LatLng", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LatLng_lng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLng) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LatLng_lng(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Lng, nil @@ -14221,22 +16879,31 @@ func (ec *executionContext) _LatLng_lng(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LatLng_lng(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LatLng", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _LatLngHeight_lat(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLngHeight) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LatLngHeight_lat(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LatLngHeight", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Lat, nil @@ -14256,22 +16923,31 @@ func (ec *executionContext) _LatLngHeight_lat(ctx context.Context, field graphql return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _LatLngHeight_lng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLngHeight) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LatLngHeight_lat(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LatLngHeight", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LatLngHeight_lng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLngHeight) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LatLngHeight_lng(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Lng, nil @@ -14291,22 +16967,31 @@ func (ec *executionContext) _LatLngHeight_lng(ctx context.Context, field graphql return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _LatLngHeight_height(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLngHeight) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LatLngHeight_lng(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LatLngHeight", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LatLngHeight_height(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LatLngHeight) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LatLngHeight_height(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Height, nil @@ -14326,22 +17011,31 @@ func (ec *executionContext) _LatLngHeight_height(ctx context.Context, field grap return ec.marshalNFloat2float64(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LatLngHeight_height(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LatLngHeight", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _LayerGroup_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerGroup", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -14361,22 +17055,31 @@ func (ec *executionContext) _LayerGroup_id(ctx context.Context, field graphql.Co return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerGroup_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerGroup_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_sceneId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneID, nil @@ -14396,22 +17099,31 @@ func (ec *executionContext) _LayerGroup_sceneId(ctx context.Context, field graph return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerGroup_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -14431,22 +17143,31 @@ func (ec *executionContext) _LayerGroup_name(ctx context.Context, field graphql. return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_isVisible(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerGroup_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerGroup_isVisible(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_isVisible(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.IsVisible, nil @@ -14466,22 +17187,31 @@ func (ec *executionContext) _LayerGroup_isVisible(ctx context.Context, field gra return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerGroup_isVisible(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerGroup_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_propertyId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PropertyID, nil @@ -14498,22 +17228,31 @@ func (ec *executionContext) _LayerGroup_propertyId(ctx context.Context, field gr return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerGroup_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerGroup_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_pluginId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PluginID, nil @@ -14530,22 +17269,31 @@ func (ec *executionContext) _LayerGroup_pluginId(ctx context.Context, field grap return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerGroup_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerGroup_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_extensionId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ExtensionID, nil @@ -14562,22 +17310,31 @@ func (ec *executionContext) _LayerGroup_extensionId(ctx context.Context, field g return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerGroup_extensionId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerGroup_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_infobox(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Infobox, nil @@ -14594,22 +17351,53 @@ func (ec *executionContext) _LayerGroup_infobox(ctx context.Context, field graph return ec.marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerGroup_infobox(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sceneId": + return ec.fieldContext_Infobox_sceneId(ctx, field) + case "layerId": + return ec.fieldContext_Infobox_layerId(ctx, field) + case "propertyId": + return ec.fieldContext_Infobox_propertyId(ctx, field) + case "fields": + return ec.fieldContext_Infobox_fields(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_Infobox_linkedDatasetId(ctx, field) + case "layer": + return ec.fieldContext_Infobox_layer(ctx, field) + case "property": + return ec.fieldContext_Infobox_property(ctx, field) + case "linkedDataset": + return ec.fieldContext_Infobox_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_Infobox_merged(ctx, field) + case "scene": + return ec.fieldContext_Infobox_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Infobox", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerGroup_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_parentId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerGroup", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ParentID, nil @@ -14626,22 +17414,31 @@ func (ec *executionContext) _LayerGroup_parentId(ctx context.Context, field grap return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_linkedDatasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerGroup_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerGroup_linkedDatasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LinkedDatasetSchemaID, nil @@ -14658,22 +17455,31 @@ func (ec *executionContext) _LayerGroup_linkedDatasetSchemaId(ctx context.Contex return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_root(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerGroup_linkedDatasetSchemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerGroup_root(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_root(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Root, nil @@ -14693,22 +17499,31 @@ func (ec *executionContext) _LayerGroup_root(ctx context.Context, field graphql. return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_layerIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerGroup_root(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerGroup_layerIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_layerIds(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LayerIds, nil @@ -14728,22 +17543,31 @@ func (ec *executionContext) _LayerGroup_layerIds(ctx context.Context, field grap return ec.marshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerGroup_layerIds(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_tags(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Tags, nil @@ -14763,22 +17587,31 @@ func (ec *executionContext) _LayerGroup_tags(ctx context.Context, field graphql. return ec.marshalNLayerTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerGroup_tags(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _LayerGroup_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerGroup().Parent(rctx, obj) @@ -14795,22 +17628,75 @@ func (ec *executionContext) _LayerGroup_parent(ctx context.Context, field graphq return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerGroup_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerGroup_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerGroup().Property(rctx, obj) @@ -14827,22 +17713,45 @@ func (ec *executionContext) _LayerGroup_property(ctx context.Context, field grap return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerGroup_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerGroup_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerGroup().Plugin(rctx, obj) @@ -14859,22 +17768,65 @@ func (ec *executionContext) _LayerGroup_plugin(ctx context.Context, field graphq return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerGroup_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerGroup_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_extension(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerGroup().Extension(rctx, obj) @@ -14891,22 +17843,67 @@ func (ec *executionContext) _LayerGroup_extension(ctx context.Context, field gra return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerGroup_extension(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extensionId": + return ec.fieldContext_PluginExtension_extensionId(ctx, field) + case "pluginId": + return ec.fieldContext_PluginExtension_pluginId(ctx, field) + case "type": + return ec.fieldContext_PluginExtension_type(ctx, field) + case "name": + return ec.fieldContext_PluginExtension_name(ctx, field) + case "description": + return ec.fieldContext_PluginExtension_description(ctx, field) + case "icon": + return ec.fieldContext_PluginExtension_icon(ctx, field) + case "singleOnly": + return ec.fieldContext_PluginExtension_singleOnly(ctx, field) + case "widgetLayout": + return ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + case "visualizer": + return ec.fieldContext_PluginExtension_visualizer(ctx, field) + case "propertySchemaId": + return ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + case "allTranslatedName": + return ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + case "plugin": + return ec.fieldContext_PluginExtension_plugin(ctx, field) + case "sceneWidget": + return ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + case "propertySchema": + return ec.fieldContext_PluginExtension_propertySchema(ctx, field) + case "translatedName": + return ec.fieldContext_PluginExtension_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginExtension", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerGroup_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerGroup().LinkedDatasetSchema(rctx, obj) @@ -14923,22 +17920,53 @@ func (ec *executionContext) _LayerGroup_linkedDatasetSchema(ctx context.Context, return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerGroup_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerGroup_layers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_layers(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerGroup().Layers(rctx, obj) @@ -14958,22 +17986,31 @@ func (ec *executionContext) _LayerGroup_layers(ctx context.Context, field graphq return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _LayerGroup_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerGroup_layers(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerGroup", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, } + return fc, nil +} +func (ec *executionContext) _LayerGroup_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_scene(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerGroup().Scene(rctx, obj) @@ -14990,22 +18027,71 @@ func (ec *executionContext) _LayerGroup_scene(ctx context.Context, field graphql return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerGroup_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerGroup_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerGroup().ScenePlugin(rctx, obj) @@ -15022,22 +18108,41 @@ func (ec *executionContext) _LayerGroup_scenePlugin(ctx context.Context, field g return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerGroup_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerItem_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerItem", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -15057,22 +18162,31 @@ func (ec *executionContext) _LayerItem_id(ctx context.Context, field graphql.Col return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerItem_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerItem_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_sceneId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneID, nil @@ -15092,22 +18206,31 @@ func (ec *executionContext) _LayerItem_sceneId(ctx context.Context, field graphq return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerItem_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerItem_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -15127,22 +18250,31 @@ func (ec *executionContext) _LayerItem_name(ctx context.Context, field graphql.C return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_isVisible(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerItem_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerItem_isVisible(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_isVisible(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.IsVisible, nil @@ -15162,22 +18294,31 @@ func (ec *executionContext) _LayerItem_isVisible(ctx context.Context, field grap return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerItem_isVisible(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerItem_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_propertyId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PropertyID, nil @@ -15194,22 +18335,31 @@ func (ec *executionContext) _LayerItem_propertyId(ctx context.Context, field gra return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerItem_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerItem_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_pluginId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PluginID, nil @@ -15226,22 +18376,31 @@ func (ec *executionContext) _LayerItem_pluginId(ctx context.Context, field graph return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerItem_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerItem_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_extensionId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ExtensionID, nil @@ -15258,22 +18417,31 @@ func (ec *executionContext) _LayerItem_extensionId(ctx context.Context, field gr return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerItem_extensionId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerItem_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_infobox(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Infobox, nil @@ -15290,22 +18458,53 @@ func (ec *executionContext) _LayerItem_infobox(ctx context.Context, field graphq return ec.marshalOInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerItem_infobox(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sceneId": + return ec.fieldContext_Infobox_sceneId(ctx, field) + case "layerId": + return ec.fieldContext_Infobox_layerId(ctx, field) + case "propertyId": + return ec.fieldContext_Infobox_propertyId(ctx, field) + case "fields": + return ec.fieldContext_Infobox_fields(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_Infobox_linkedDatasetId(ctx, field) + case "layer": + return ec.fieldContext_Infobox_layer(ctx, field) + case "property": + return ec.fieldContext_Infobox_property(ctx, field) + case "linkedDataset": + return ec.fieldContext_Infobox_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_Infobox_merged(ctx, field) + case "scene": + return ec.fieldContext_Infobox_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Infobox", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerItem_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_parentId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerItem", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ParentID, nil @@ -15322,22 +18521,31 @@ func (ec *executionContext) _LayerItem_parentId(ctx context.Context, field graph return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerItem_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerItem_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_linkedDatasetId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LinkedDatasetID, nil @@ -15354,22 +18562,31 @@ func (ec *executionContext) _LayerItem_linkedDatasetId(ctx context.Context, fiel return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerItem_linkedDatasetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerItem_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_tags(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Tags, nil @@ -15389,22 +18606,31 @@ func (ec *executionContext) _LayerItem_tags(ctx context.Context, field graphql.C return ec.marshalNLayerTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerItem_tags(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _LayerItem_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerItem().Parent(rctx, obj) @@ -15421,22 +18647,75 @@ func (ec *executionContext) _LayerItem_parent(ctx context.Context, field graphql return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerItem_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerItem_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerItem().Property(rctx, obj) @@ -15453,22 +18732,45 @@ func (ec *executionContext) _LayerItem_property(ctx context.Context, field graph return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerItem_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerItem_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerItem().Plugin(rctx, obj) @@ -15485,22 +18787,65 @@ func (ec *executionContext) _LayerItem_plugin(ctx context.Context, field graphql return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } -func (ec *executionContext) _LayerItem_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerItem_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerItem", Field: field, - Args: nil, IsMethod: true, IsResolver: true, - } + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} +func (ec *executionContext) _LayerItem_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_extension(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerItem().Extension(rctx, obj) @@ -15517,22 +18862,67 @@ func (ec *executionContext) _LayerItem_extension(ctx context.Context, field grap return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerItem_extension(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extensionId": + return ec.fieldContext_PluginExtension_extensionId(ctx, field) + case "pluginId": + return ec.fieldContext_PluginExtension_pluginId(ctx, field) + case "type": + return ec.fieldContext_PluginExtension_type(ctx, field) + case "name": + return ec.fieldContext_PluginExtension_name(ctx, field) + case "description": + return ec.fieldContext_PluginExtension_description(ctx, field) + case "icon": + return ec.fieldContext_PluginExtension_icon(ctx, field) + case "singleOnly": + return ec.fieldContext_PluginExtension_singleOnly(ctx, field) + case "widgetLayout": + return ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + case "visualizer": + return ec.fieldContext_PluginExtension_visualizer(ctx, field) + case "propertySchemaId": + return ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + case "allTranslatedName": + return ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + case "plugin": + return ec.fieldContext_PluginExtension_plugin(ctx, field) + case "sceneWidget": + return ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + case "propertySchema": + return ec.fieldContext_PluginExtension_propertySchema(ctx, field) + case "translatedName": + return ec.fieldContext_PluginExtension_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginExtension", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerItem_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_linkedDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerItem().LinkedDataset(rctx, obj) @@ -15549,22 +18939,45 @@ func (ec *executionContext) _LayerItem_linkedDataset(ctx context.Context, field return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerItem_linkedDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerItem_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_merged(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerItem().Merged(rctx, obj) @@ -15581,22 +18994,49 @@ func (ec *executionContext) _LayerItem_merged(ctx context.Context, field graphql return ec.marshalOMergedLayer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedLayer(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerItem_merged(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedLayer_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedLayer_parentId(ctx, field) + case "sceneID": + return ec.fieldContext_MergedLayer_sceneID(ctx, field) + case "property": + return ec.fieldContext_MergedLayer_property(ctx, field) + case "infobox": + return ec.fieldContext_MergedLayer_infobox(ctx, field) + case "original": + return ec.fieldContext_MergedLayer_original(ctx, field) + case "parent": + return ec.fieldContext_MergedLayer_parent(ctx, field) + case "scene": + return ec.fieldContext_MergedLayer_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedLayer", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerItem_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerItem().Scene(rctx, obj) @@ -15613,22 +19053,71 @@ func (ec *executionContext) _LayerItem_scene(ctx context.Context, field graphql. return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerItem_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerItem_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerItem_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerItem().ScenePlugin(rctx, obj) @@ -15645,22 +19134,41 @@ func (ec *executionContext) _LayerItem_scenePlugin(ctx context.Context, field gr return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerItem_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerTagGroup_tagId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerTagGroup_tagId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerTagGroup", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TagID, nil @@ -15680,22 +19188,31 @@ func (ec *executionContext) _LayerTagGroup_tagId(ctx context.Context, field grap return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _LayerTagGroup_children(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_LayerTagGroup_tagId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "LayerTagGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _LayerTagGroup_children(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerTagGroup_children(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Children, nil @@ -15715,22 +19232,37 @@ func (ec *executionContext) _LayerTagGroup_children(ctx context.Context, field g return ec.marshalNLayerTagItem2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagItemแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerTagGroup_children(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerTagGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tagId": + return ec.fieldContext_LayerTagItem_tagId(ctx, field) + case "tag": + return ec.fieldContext_LayerTagItem_tag(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerTagItem", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _LayerTagGroup_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerTagGroup_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerTagGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerTagGroup().Tag(rctx, obj) @@ -15747,22 +19279,31 @@ func (ec *executionContext) _LayerTagGroup_tag(ctx context.Context, field graphq return ec.marshalOTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerTagGroup_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerTagGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _LayerTagItem_tagId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerTagItem_tagId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerTagItem", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TagID, nil @@ -15782,22 +19323,31 @@ func (ec *executionContext) _LayerTagItem_tagId(ctx context.Context, field graph return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerTagItem_tagId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerTagItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _LayerTagItem_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.LayerTagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_LayerTagItem_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "LayerTagItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.LayerTagItem().Tag(rctx, obj) @@ -15814,22 +19364,31 @@ func (ec *executionContext) _LayerTagItem_tag(ctx context.Context, field graphql return ec.marshalOTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_LayerTagItem_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "LayerTagItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _Me_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Me", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -15849,22 +19408,31 @@ func (ec *executionContext) _Me_id(ctx context.Context, field graphql.CollectedF return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Me_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Me_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Me", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Me_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -15884,22 +19452,31 @@ func (ec *executionContext) _Me_name(ctx context.Context, field graphql.Collecte return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Me_email(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Me_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Me", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Me_email(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_email(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Email, nil @@ -15919,22 +19496,31 @@ func (ec *executionContext) _Me_email(ctx context.Context, field graphql.Collect return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Me_lang(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Me_email(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Me", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Me_lang(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_lang(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Lang, nil @@ -15954,22 +19540,31 @@ func (ec *executionContext) _Me_lang(ctx context.Context, field graphql.Collecte return ec.marshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTag(ctx, field.Selections, res) } -func (ec *executionContext) _Me_theme(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Me_lang(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Me", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Lang does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Me_theme(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_theme(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Theme, nil @@ -15989,22 +19584,31 @@ func (ec *executionContext) _Me_theme(ctx context.Context, field graphql.Collect return ec.marshalNTheme2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTheme(ctx, field.Selections, res) } -func (ec *executionContext) _Me_myTeamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Me_theme(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Me", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Theme does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Me_myTeamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_myTeamId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.MyTeamID, nil @@ -16024,22 +19628,31 @@ func (ec *executionContext) _Me_myTeamId(ctx context.Context, field graphql.Coll return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Me_auths(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Me_myTeamId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Me", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Me_auths(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_auths(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Auths, nil @@ -16059,22 +19672,31 @@ func (ec *executionContext) _Me_auths(ctx context.Context, field graphql.Collect return ec.marshalNString2แš•stringแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Me_auths(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Me", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Me_teams(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_teams(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Me", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Me().Teams(rctx, obj) @@ -16094,22 +19716,45 @@ func (ec *executionContext) _Me_teams(ctx context.Context, field graphql.Collect return ec.marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Me_teams(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Me", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Me_myTeam(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Me) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Me_myTeam(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Me", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Me().MyTeam(rctx, obj) @@ -16129,22 +19774,45 @@ func (ec *executionContext) _Me_myTeam(ctx context.Context, field graphql.Collec return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Me_myTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Me", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfobox_sceneID(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedInfobox", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneID, nil @@ -16164,22 +19832,31 @@ func (ec *executionContext) _MergedInfobox_sceneID(ctx context.Context, field gr return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedInfobox_sceneID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedInfobox", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfobox_property(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Property, nil @@ -16196,22 +19873,51 @@ func (ec *executionContext) _MergedInfobox_property(ctx context.Context, field g return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedInfobox_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedInfobox", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedProperty_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedProperty_parentId(ctx, field) + case "schemaId": + return ec.fieldContext_MergedProperty_schemaId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_MergedProperty_linkedDatasetId(ctx, field) + case "original": + return ec.fieldContext_MergedProperty_original(ctx, field) + case "parent": + return ec.fieldContext_MergedProperty_parent(ctx, field) + case "schema": + return ec.fieldContext_MergedProperty_schema(ctx, field) + case "linkedDataset": + return ec.fieldContext_MergedProperty_linkedDataset(ctx, field) + case "groups": + return ec.fieldContext_MergedProperty_groups(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedProperty", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfobox_fields(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Fields, nil @@ -16231,22 +19937,51 @@ func (ec *executionContext) _MergedInfobox_fields(ctx context.Context, field gra return ec.marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxFieldแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedInfobox_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfobox", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedInfoboxField_originalId(ctx, field) + case "sceneID": + return ec.fieldContext_MergedInfoboxField_sceneID(ctx, field) + case "pluginId": + return ec.fieldContext_MergedInfoboxField_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_MergedInfoboxField_extensionId(ctx, field) + case "property": + return ec.fieldContext_MergedInfoboxField_property(ctx, field) + case "plugin": + return ec.fieldContext_MergedInfoboxField_plugin(ctx, field) + case "extension": + return ec.fieldContext_MergedInfoboxField_extension(ctx, field) + case "scene": + return ec.fieldContext_MergedInfoboxField_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_MergedInfoboxField_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedInfoboxField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfobox) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfobox_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedInfobox", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedInfobox().Scene(rctx, obj) @@ -16263,22 +19998,71 @@ func (ec *executionContext) _MergedInfobox_scene(ctx context.Context, field grap return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedInfobox_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfobox", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_originalId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.OriginalID, nil @@ -16298,22 +20082,31 @@ func (ec *executionContext) _MergedInfoboxField_originalId(ctx context.Context, return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedInfoboxField_originalId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedInfoboxField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_sceneID(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneID, nil @@ -16333,22 +20126,31 @@ func (ec *executionContext) _MergedInfoboxField_sceneID(ctx context.Context, fie return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedInfoboxField_sceneID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedInfoboxField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_pluginId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PluginID, nil @@ -16368,22 +20170,31 @@ func (ec *executionContext) _MergedInfoboxField_pluginId(ctx context.Context, fi return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedInfoboxField_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedInfoboxField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_extensionId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ExtensionID, nil @@ -16403,22 +20214,31 @@ func (ec *executionContext) _MergedInfoboxField_extensionId(ctx context.Context, return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedInfoboxField_extensionId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedInfoboxField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_property(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Property, nil @@ -16435,22 +20255,51 @@ func (ec *executionContext) _MergedInfoboxField_property(ctx context.Context, fi return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) } -func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedInfoboxField_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedInfoboxField", Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedProperty_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedProperty_parentId(ctx, field) + case "schemaId": + return ec.fieldContext_MergedProperty_schemaId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_MergedProperty_linkedDatasetId(ctx, field) + case "original": + return ec.fieldContext_MergedProperty_original(ctx, field) + case "parent": + return ec.fieldContext_MergedProperty_parent(ctx, field) + case "schema": + return ec.fieldContext_MergedProperty_schema(ctx, field) + case "linkedDataset": + return ec.fieldContext_MergedProperty_linkedDataset(ctx, field) + case "groups": + return ec.fieldContext_MergedProperty_groups(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedProperty", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_plugin(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedInfoboxField().Plugin(rctx, obj) @@ -16467,22 +20316,65 @@ func (ec *executionContext) _MergedInfoboxField_plugin(ctx context.Context, fiel return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedInfoboxField_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_extension(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedInfoboxField().Extension(rctx, obj) @@ -16499,22 +20391,67 @@ func (ec *executionContext) _MergedInfoboxField_extension(ctx context.Context, f return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedInfoboxField_extension(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extensionId": + return ec.fieldContext_PluginExtension_extensionId(ctx, field) + case "pluginId": + return ec.fieldContext_PluginExtension_pluginId(ctx, field) + case "type": + return ec.fieldContext_PluginExtension_type(ctx, field) + case "name": + return ec.fieldContext_PluginExtension_name(ctx, field) + case "description": + return ec.fieldContext_PluginExtension_description(ctx, field) + case "icon": + return ec.fieldContext_PluginExtension_icon(ctx, field) + case "singleOnly": + return ec.fieldContext_PluginExtension_singleOnly(ctx, field) + case "widgetLayout": + return ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + case "visualizer": + return ec.fieldContext_PluginExtension_visualizer(ctx, field) + case "propertySchemaId": + return ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + case "allTranslatedName": + return ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + case "plugin": + return ec.fieldContext_PluginExtension_plugin(ctx, field) + case "sceneWidget": + return ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + case "propertySchema": + return ec.fieldContext_PluginExtension_propertySchema(ctx, field) + case "translatedName": + return ec.fieldContext_PluginExtension_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginExtension", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedInfoboxField_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedInfoboxField().Scene(rctx, obj) @@ -16531,22 +20468,71 @@ func (ec *executionContext) _MergedInfoboxField_scene(ctx context.Context, field return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedInfoboxField_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedInfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedInfoboxField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedInfoboxField_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedInfoboxField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedInfoboxField().ScenePlugin(rctx, obj) @@ -16563,22 +20549,41 @@ func (ec *executionContext) _MergedInfoboxField_scenePlugin(ctx context.Context, return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedInfoboxField_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedInfoboxField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedLayer_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_originalId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedLayer", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.OriginalID, nil @@ -16598,22 +20603,31 @@ func (ec *executionContext) _MergedLayer_originalId(ctx context.Context, field g return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedLayer_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedLayer_originalId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedLayer", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedLayer_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_parentId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ParentID, nil @@ -16630,22 +20644,31 @@ func (ec *executionContext) _MergedLayer_parentId(ctx context.Context, field gra return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedLayer_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedLayer_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedLayer", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedLayer_sceneID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_sceneID(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneID, nil @@ -16665,22 +20688,31 @@ func (ec *executionContext) _MergedLayer_sceneID(ctx context.Context, field grap return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedLayer_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedLayer_sceneID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedLayer", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedLayer_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_property(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Property, nil @@ -16697,22 +20729,51 @@ func (ec *executionContext) _MergedLayer_property(ctx context.Context, field gra return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) } -func (ec *executionContext) _MergedLayer_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedLayer_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedLayer", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedProperty_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedProperty_parentId(ctx, field) + case "schemaId": + return ec.fieldContext_MergedProperty_schemaId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_MergedProperty_linkedDatasetId(ctx, field) + case "original": + return ec.fieldContext_MergedProperty_original(ctx, field) + case "parent": + return ec.fieldContext_MergedProperty_parent(ctx, field) + case "schema": + return ec.fieldContext_MergedProperty_schema(ctx, field) + case "linkedDataset": + return ec.fieldContext_MergedProperty_linkedDataset(ctx, field) + case "groups": + return ec.fieldContext_MergedProperty_groups(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedProperty", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _MergedLayer_infobox(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_infobox(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Infobox, nil @@ -16729,22 +20790,41 @@ func (ec *executionContext) _MergedLayer_infobox(ctx context.Context, field grap return ec.marshalOMergedInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfobox(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedLayer_infobox(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sceneID": + return ec.fieldContext_MergedInfobox_sceneID(ctx, field) + case "property": + return ec.fieldContext_MergedInfobox_property(ctx, field) + case "fields": + return ec.fieldContext_MergedInfobox_fields(ctx, field) + case "scene": + return ec.fieldContext_MergedInfobox_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedInfobox", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedLayer_original(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_original(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedLayer", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedLayer().Original(rctx, obj) @@ -16761,22 +20841,71 @@ func (ec *executionContext) _MergedLayer_original(ctx context.Context, field gra return ec.marshalOLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedLayer_original(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerItem_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerItem_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerItem_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerItem_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerItem_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerItem_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerItem_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerItem_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerItem_parentId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_LayerItem_linkedDatasetId(ctx, field) + case "tags": + return ec.fieldContext_LayerItem_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerItem_parent(ctx, field) + case "property": + return ec.fieldContext_LayerItem_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerItem_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerItem_extension(ctx, field) + case "linkedDataset": + return ec.fieldContext_LayerItem_linkedDataset(ctx, field) + case "merged": + return ec.fieldContext_LayerItem_merged(ctx, field) + case "scene": + return ec.fieldContext_LayerItem_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerItem_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerItem", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedLayer_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedLayer", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedLayer().Parent(rctx, obj) @@ -16793,22 +20922,75 @@ func (ec *executionContext) _MergedLayer_parent(ctx context.Context, field graph return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedLayer_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedLayer_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedLayer) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedLayer_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedLayer", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedLayer().Scene(rctx, obj) @@ -16825,22 +21007,71 @@ func (ec *executionContext) _MergedLayer_scene(ctx context.Context, field graphq return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedLayer_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedLayer", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedProperty_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_originalId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedProperty", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.OriginalID, nil @@ -16857,22 +21088,31 @@ func (ec *executionContext) _MergedProperty_originalId(ctx context.Context, fiel return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedProperty_originalId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedProperty", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedProperty_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_parentId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ParentID, nil @@ -16889,22 +21129,31 @@ func (ec *executionContext) _MergedProperty_parentId(ctx context.Context, field return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedProperty_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedProperty", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedProperty_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_schemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -16921,22 +21170,31 @@ func (ec *executionContext) _MergedProperty_schemaId(ctx context.Context, field return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedProperty_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedProperty", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedProperty_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_linkedDatasetId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LinkedDatasetID, nil @@ -16953,22 +21211,31 @@ func (ec *executionContext) _MergedProperty_linkedDatasetId(ctx context.Context, return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedProperty_linkedDatasetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _MergedProperty_original(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_original(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedProperty", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedProperty().Original(rctx, obj) @@ -16985,22 +21252,45 @@ func (ec *executionContext) _MergedProperty_original(ctx context.Context, field return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedProperty_original(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedProperty_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedProperty", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedProperty().Parent(rctx, obj) @@ -17017,22 +21307,45 @@ func (ec *executionContext) _MergedProperty_parent(ctx context.Context, field gr return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedProperty_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedProperty_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedProperty", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedProperty().Schema(rctx, obj) @@ -17049,22 +21362,39 @@ func (ec *executionContext) _MergedProperty_schema(ctx context.Context, field gr return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _MergedProperty_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedProperty_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedProperty", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _MergedProperty_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_linkedDataset(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedProperty().LinkedDataset(rctx, obj) @@ -17081,22 +21411,45 @@ func (ec *executionContext) _MergedProperty_linkedDataset(ctx context.Context, f return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedProperty_linkedDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedProperty_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedProperty) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedProperty_groups(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedProperty", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedProperty().Groups(rctx, obj) @@ -17116,22 +21469,63 @@ func (ec *executionContext) _MergedProperty_groups(ctx context.Context, field gr return ec.marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroupแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedProperty_groups(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedProperty", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalPropertyId": + return ec.fieldContext_MergedPropertyGroup_originalPropertyId(ctx, field) + case "parentPropertyId": + return ec.fieldContext_MergedPropertyGroup_parentPropertyId(ctx, field) + case "originalId": + return ec.fieldContext_MergedPropertyGroup_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedPropertyGroup_parentId(ctx, field) + case "schemaGroupId": + return ec.fieldContext_MergedPropertyGroup_schemaGroupId(ctx, field) + case "schemaId": + return ec.fieldContext_MergedPropertyGroup_schemaId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_MergedPropertyGroup_linkedDatasetId(ctx, field) + case "fields": + return ec.fieldContext_MergedPropertyGroup_fields(ctx, field) + case "groups": + return ec.fieldContext_MergedPropertyGroup_groups(ctx, field) + case "originalProperty": + return ec.fieldContext_MergedPropertyGroup_originalProperty(ctx, field) + case "parentProperty": + return ec.fieldContext_MergedPropertyGroup_parentProperty(ctx, field) + case "original": + return ec.fieldContext_MergedPropertyGroup_original(ctx, field) + case "parent": + return ec.fieldContext_MergedPropertyGroup_parent(ctx, field) + case "schema": + return ec.fieldContext_MergedPropertyGroup_schema(ctx, field) + case "linkedDataset": + return ec.fieldContext_MergedPropertyGroup_linkedDataset(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedPropertyGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedPropertyField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedPropertyField", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -17151,22 +21545,31 @@ func (ec *executionContext) _MergedPropertyField_schemaId(ctx context.Context, f return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyField_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_fieldId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.FieldID, nil @@ -17186,22 +21589,31 @@ func (ec *executionContext) _MergedPropertyField_fieldId(ctx context.Context, fi return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyField_fieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_value(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Value, nil @@ -17218,22 +21630,31 @@ func (ec *executionContext) _MergedPropertyField_value(ctx context.Context, fiel return ec.marshalOAny2interface(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyField_value(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_type(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Type, nil @@ -17253,22 +21674,31 @@ func (ec *executionContext) _MergedPropertyField_type(ctx context.Context, field return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_links(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyField_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ValueType does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyField_links(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_links(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Links, nil @@ -17285,22 +21715,47 @@ func (ec *executionContext) _MergedPropertyField_links(ctx context.Context, fiel return ec.marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLinkแš„(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_overridden(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyField_links(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetId": + return ec.fieldContext_PropertyFieldLink_datasetId(ctx, field) + case "datasetSchemaId": + return ec.fieldContext_PropertyFieldLink_datasetSchemaId(ctx, field) + case "datasetSchemaFieldId": + return ec.fieldContext_PropertyFieldLink_datasetSchemaFieldId(ctx, field) + case "dataset": + return ec.fieldContext_PropertyFieldLink_dataset(ctx, field) + case "datasetField": + return ec.fieldContext_PropertyFieldLink_datasetField(ctx, field) + case "datasetSchema": + return ec.fieldContext_PropertyFieldLink_datasetSchema(ctx, field) + case "datasetSchemaField": + return ec.fieldContext_PropertyFieldLink_datasetSchemaField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldLink", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyField_overridden(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_overridden(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Overridden, nil @@ -17320,22 +21775,31 @@ func (ec *executionContext) _MergedPropertyField_overridden(ctx context.Context, return ec.marshalNBoolean2bool(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedPropertyField_overridden(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _MergedPropertyField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedPropertyField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedPropertyField().Schema(rctx, obj) @@ -17352,26 +21816,43 @@ func (ec *executionContext) _MergedPropertyField_schema(ctx context.Context, fie return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyField_field(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyField_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyField", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.MergedPropertyField().Field(rctx, obj) - }) +func (ec *executionContext) _MergedPropertyField_field(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_field(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.MergedPropertyField().Field(rctx, obj) + }) if err != nil { ec.Error(ctx, err) return graphql.Null @@ -17384,22 +21865,65 @@ func (ec *executionContext) _MergedPropertyField_field(ctx context.Context, fiel return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedPropertyField_field(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertySchemaField_type(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaField_title(ctx, field) + case "description": + return ec.fieldContext_PropertySchemaField_description(ctx, field) + case "prefix": + return ec.fieldContext_PropertySchemaField_prefix(ctx, field) + case "suffix": + return ec.fieldContext_PropertySchemaField_suffix(ctx, field) + case "defaultValue": + return ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + case "ui": + return ec.fieldContext_PropertySchemaField_ui(ctx, field) + case "min": + return ec.fieldContext_PropertySchemaField_min(ctx, field) + case "max": + return ec.fieldContext_PropertySchemaField_max(ctx, field) + case "choices": + return ec.fieldContext_PropertySchemaField_choices(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + case "translatedDescription": + return ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedPropertyField_actualValue(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyField_actualValue(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedPropertyField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedPropertyField().ActualValue(rctx, obj) @@ -17416,22 +21940,31 @@ func (ec *executionContext) _MergedPropertyField_actualValue(ctx context.Context return ec.marshalOAny2interface(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedPropertyField_actualValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _MergedPropertyGroup_originalPropertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_originalPropertyId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedPropertyGroup", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.OriginalPropertyID, nil @@ -17448,22 +21981,31 @@ func (ec *executionContext) _MergedPropertyGroup_originalPropertyId(ctx context. return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_parentPropertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyGroup_originalPropertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyGroup_parentPropertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_parentPropertyId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ParentPropertyID, nil @@ -17480,22 +22022,31 @@ func (ec *executionContext) _MergedPropertyGroup_parentPropertyId(ctx context.Co return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyGroup_parentPropertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyGroup_originalId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_originalId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.OriginalID, nil @@ -17512,22 +22063,31 @@ func (ec *executionContext) _MergedPropertyGroup_originalId(ctx context.Context, return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyGroup_originalId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyGroup_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_parentId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ParentID, nil @@ -17544,22 +22104,31 @@ func (ec *executionContext) _MergedPropertyGroup_parentId(ctx context.Context, f return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyGroup_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_schemaGroupId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaGroupID, nil @@ -17579,22 +22148,31 @@ func (ec *executionContext) _MergedPropertyGroup_schemaGroupId(ctx context.Conte return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_schemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -17611,22 +22189,31 @@ func (ec *executionContext) _MergedPropertyGroup_schemaId(ctx context.Context, f return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyGroup_linkedDatasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_linkedDatasetId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LinkedDatasetID, nil @@ -17643,22 +22230,31 @@ func (ec *executionContext) _MergedPropertyGroup_linkedDatasetId(ctx context.Con return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyGroup_linkedDatasetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_fields(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Fields, nil @@ -17678,22 +22274,51 @@ func (ec *executionContext) _MergedPropertyGroup_fields(ctx context.Context, fie return ec.marshalNMergedPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyFieldแš„(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyGroup_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "schemaId": + return ec.fieldContext_MergedPropertyField_schemaId(ctx, field) + case "fieldId": + return ec.fieldContext_MergedPropertyField_fieldId(ctx, field) + case "value": + return ec.fieldContext_MergedPropertyField_value(ctx, field) + case "type": + return ec.fieldContext_MergedPropertyField_type(ctx, field) + case "links": + return ec.fieldContext_MergedPropertyField_links(ctx, field) + case "overridden": + return ec.fieldContext_MergedPropertyField_overridden(ctx, field) + case "schema": + return ec.fieldContext_MergedPropertyField_schema(ctx, field) + case "field": + return ec.fieldContext_MergedPropertyField_field(ctx, field) + case "actualValue": + return ec.fieldContext_MergedPropertyField_actualValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedPropertyField", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyGroup_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_groups(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Groups, nil @@ -17713,22 +22338,63 @@ func (ec *executionContext) _MergedPropertyGroup_groups(ctx context.Context, fie return ec.marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroupแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedPropertyGroup_groups(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalPropertyId": + return ec.fieldContext_MergedPropertyGroup_originalPropertyId(ctx, field) + case "parentPropertyId": + return ec.fieldContext_MergedPropertyGroup_parentPropertyId(ctx, field) + case "originalId": + return ec.fieldContext_MergedPropertyGroup_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedPropertyGroup_parentId(ctx, field) + case "schemaGroupId": + return ec.fieldContext_MergedPropertyGroup_schemaGroupId(ctx, field) + case "schemaId": + return ec.fieldContext_MergedPropertyGroup_schemaId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_MergedPropertyGroup_linkedDatasetId(ctx, field) + case "fields": + return ec.fieldContext_MergedPropertyGroup_fields(ctx, field) + case "groups": + return ec.fieldContext_MergedPropertyGroup_groups(ctx, field) + case "originalProperty": + return ec.fieldContext_MergedPropertyGroup_originalProperty(ctx, field) + case "parentProperty": + return ec.fieldContext_MergedPropertyGroup_parentProperty(ctx, field) + case "original": + return ec.fieldContext_MergedPropertyGroup_original(ctx, field) + case "parent": + return ec.fieldContext_MergedPropertyGroup_parent(ctx, field) + case "schema": + return ec.fieldContext_MergedPropertyGroup_schema(ctx, field) + case "linkedDataset": + return ec.fieldContext_MergedPropertyGroup_linkedDataset(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedPropertyGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedPropertyGroup_originalProperty(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_originalProperty(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedPropertyGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedPropertyGroup().OriginalProperty(rctx, obj) @@ -17745,22 +22411,45 @@ func (ec *executionContext) _MergedPropertyGroup_originalProperty(ctx context.Co return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedPropertyGroup_originalProperty(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedPropertyGroup_parentProperty(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_parentProperty(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedPropertyGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedPropertyGroup().ParentProperty(rctx, obj) @@ -17777,22 +22466,45 @@ func (ec *executionContext) _MergedPropertyGroup_parentProperty(ctx context.Cont return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedPropertyGroup_parentProperty(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedPropertyGroup_original(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_original(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedPropertyGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedPropertyGroup().Original(rctx, obj) @@ -17809,22 +22521,45 @@ func (ec *executionContext) _MergedPropertyGroup_original(ctx context.Context, f return ec.marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedPropertyGroup_original(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertyGroup_id(ctx, field) + case "schemaId": + return ec.fieldContext_PropertyGroup_schemaId(ctx, field) + case "schemaGroupId": + return ec.fieldContext_PropertyGroup_schemaGroupId(ctx, field) + case "fields": + return ec.fieldContext_PropertyGroup_fields(ctx, field) + case "schema": + return ec.fieldContext_PropertyGroup_schema(ctx, field) + case "schemaGroup": + return ec.fieldContext_PropertyGroup_schemaGroup(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedPropertyGroup_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedPropertyGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedPropertyGroup().Parent(rctx, obj) @@ -17841,22 +22576,45 @@ func (ec *executionContext) _MergedPropertyGroup_parent(ctx context.Context, fie return ec.marshalOPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedPropertyGroup_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertyGroup_id(ctx, field) + case "schemaId": + return ec.fieldContext_PropertyGroup_schemaId(ctx, field) + case "schemaGroupId": + return ec.fieldContext_PropertyGroup_schemaGroupId(ctx, field) + case "fields": + return ec.fieldContext_PropertyGroup_fields(ctx, field) + case "schema": + return ec.fieldContext_PropertyGroup_schema(ctx, field) + case "schemaGroup": + return ec.fieldContext_PropertyGroup_schemaGroup(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MergedPropertyGroup_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MergedPropertyGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedPropertyGroup().Schema(rctx, obj) @@ -17873,22 +22631,39 @@ func (ec *executionContext) _MergedPropertyGroup_schema(ctx context.Context, fie return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _MergedPropertyGroup_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MergedPropertyGroup_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MergedPropertyGroup", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _MergedPropertyGroup_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MergedPropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MergedPropertyGroup_linkedDataset(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.MergedPropertyGroup().LinkedDataset(rctx, obj) @@ -17905,22 +22680,45 @@ func (ec *executionContext) _MergedPropertyGroup_linkedDataset(ctx context.Conte return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MergedPropertyGroup_linkedDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MergedPropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveInfoboxFieldPayload_infoboxFieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MoveInfoboxFieldPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.InfoboxFieldID, nil @@ -17940,22 +22738,31 @@ func (ec *executionContext) _MoveInfoboxFieldPayload_infoboxFieldId(ctx context. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MoveInfoboxFieldPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveInfoboxFieldPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Layer, nil @@ -17975,22 +22782,31 @@ func (ec *executionContext) _MoveInfoboxFieldPayload_layer(ctx context.Context, return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _MoveInfoboxFieldPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MoveInfoboxFieldPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, } + return fc, nil +} +func (ec *executionContext) _MoveInfoboxFieldPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveInfoboxFieldPayload_index(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Index, nil @@ -18010,22 +22826,31 @@ func (ec *executionContext) _MoveInfoboxFieldPayload_index(ctx context.Context, return ec.marshalNInt2int(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MoveInfoboxFieldPayload_index(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MoveInfoboxFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _MoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveLayerPayload_layerId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MoveLayerPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LayerID, nil @@ -18045,22 +22870,31 @@ func (ec *executionContext) _MoveLayerPayload_layerId(ctx context.Context, field return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _MoveLayerPayload_fromParentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_MoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "MoveLayerPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _MoveLayerPayload_fromParentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveLayerPayload_fromParentLayer(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.FromParentLayer, nil @@ -18080,22 +22914,75 @@ func (ec *executionContext) _MoveLayerPayload_fromParentLayer(ctx context.Contex return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MoveLayerPayload_fromParentLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MoveLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MoveLayerPayload_toParentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveLayerPayload_toParentLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MoveLayerPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ToParentLayer, nil @@ -18115,22 +23002,75 @@ func (ec *executionContext) _MoveLayerPayload_toParentLayer(ctx context.Context, return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MoveLayerPayload_toParentLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MoveLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _MoveLayerPayload_index(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.MoveLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_MoveLayerPayload_index(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "MoveLayerPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Index, nil @@ -18150,32 +23090,34 @@ func (ec *executionContext) _MoveLayerPayload_index(ctx context.Context, field g return ec.marshalNInt2int(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_MoveLayerPayload_index(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "MoveLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Mutation_createAsset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createAsset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Mutation", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_createAsset_args(ctx, rawArgs) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().CreateAsset(rctx, args["input"].(gqlmodel.CreateAssetInput)) + return ec.resolvers.Mutation().CreateAsset(rctx, fc.Args["input"].(gqlmodel.CreateAssetInput)) }) if err != nil { ec.Error(ctx, err) @@ -18189,32 +23131,49 @@ func (ec *executionContext) _Mutation_createAsset(ctx context.Context, field gra return ec.marshalOCreateAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateAssetPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removeAsset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_createAsset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "asset": + return ec.fieldContext_CreateAssetPayload_asset(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type CreateAssetPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removeAsset_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_createAsset_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeAsset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeAsset(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveAsset(rctx, args["input"].(gqlmodel.RemoveAssetInput)) + return ec.resolvers.Mutation().RemoveAsset(rctx, fc.Args["input"].(gqlmodel.RemoveAssetInput)) }) if err != nil { ec.Error(ctx, err) @@ -18228,32 +23187,49 @@ func (ec *executionContext) _Mutation_removeAsset(ctx context.Context, field gra return ec.marshalORemoveAssetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveAssetPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_signup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_removeAsset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "assetId": + return ec.fieldContext_RemoveAssetPayload_assetId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveAssetPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_signup_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_removeAsset_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_signup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_signup(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().Signup(rctx, args["input"].(gqlmodel.SignupInput)) + return ec.resolvers.Mutation().Signup(rctx, fc.Args["input"].(gqlmodel.SignupInput)) }) if err != nil { ec.Error(ctx, err) @@ -18267,32 +23243,51 @@ func (ec *executionContext) _Mutation_signup(ctx context.Context, field graphql. return ec.marshalOSignupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSignupPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updateMe(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_signup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "user": + return ec.fieldContext_SignupPayload_user(ctx, field) + case "team": + return ec.fieldContext_SignupPayload_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type SignupPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updateMe_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_signup_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) - return graphql.Null + return } - fc.Args = args - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateMe(rctx, args["input"].(gqlmodel.UpdateMeInput)) + return fc, nil +} + +func (ec *executionContext) _Mutation_updateMe(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateMe(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Mutation().UpdateMe(rctx, fc.Args["input"].(gqlmodel.UpdateMeInput)) }) if err != nil { ec.Error(ctx, err) @@ -18306,32 +23301,49 @@ func (ec *executionContext) _Mutation_updateMe(ctx context.Context, field graphq return ec.marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMePayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removeMyAuth(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_updateMe(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "me": + return ec.fieldContext_UpdateMePayload_me(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateMePayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removeMyAuth_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_updateMe_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeMyAuth(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeMyAuth(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveMyAuth(rctx, args["input"].(gqlmodel.RemoveMyAuthInput)) + return ec.resolvers.Mutation().RemoveMyAuth(rctx, fc.Args["input"].(gqlmodel.RemoveMyAuthInput)) }) if err != nil { ec.Error(ctx, err) @@ -18345,32 +23357,49 @@ func (ec *executionContext) _Mutation_removeMyAuth(ctx context.Context, field gr return ec.marshalOUpdateMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMePayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_deleteMe(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_removeMyAuth(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "me": + return ec.fieldContext_UpdateMePayload_me(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateMePayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_deleteMe_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_removeMyAuth_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_deleteMe(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_deleteMe(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().DeleteMe(rctx, args["input"].(gqlmodel.DeleteMeInput)) + return ec.resolvers.Mutation().DeleteMe(rctx, fc.Args["input"].(gqlmodel.DeleteMeInput)) }) if err != nil { ec.Error(ctx, err) @@ -18384,32 +23413,49 @@ func (ec *executionContext) _Mutation_deleteMe(ctx context.Context, field graphq return ec.marshalODeleteMePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteMePayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_createTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_deleteMe(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "userId": + return ec.fieldContext_DeleteMePayload_userId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DeleteMePayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_createTeam_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_deleteMe_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_createTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createTeam(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().CreateTeam(rctx, args["input"].(gqlmodel.CreateTeamInput)) + return ec.resolvers.Mutation().CreateTeam(rctx, fc.Args["input"].(gqlmodel.CreateTeamInput)) }) if err != nil { ec.Error(ctx, err) @@ -18423,32 +23469,49 @@ func (ec *executionContext) _Mutation_createTeam(ctx context.Context, field grap return ec.marshalOCreateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTeamPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_deleteTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_createTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "team": + return ec.fieldContext_CreateTeamPayload_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type CreateTeamPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_deleteTeam_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_createTeam_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_deleteTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_deleteTeam(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().DeleteTeam(rctx, args["input"].(gqlmodel.DeleteTeamInput)) + return ec.resolvers.Mutation().DeleteTeam(rctx, fc.Args["input"].(gqlmodel.DeleteTeamInput)) }) if err != nil { ec.Error(ctx, err) @@ -18462,32 +23525,49 @@ func (ec *executionContext) _Mutation_deleteTeam(ctx context.Context, field grap return ec.marshalODeleteTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteTeamPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updateTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_deleteTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "teamId": + return ec.fieldContext_DeleteTeamPayload_teamId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DeleteTeamPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updateTeam_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_deleteTeam_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateTeam(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateTeam(rctx, args["input"].(gqlmodel.UpdateTeamInput)) + return ec.resolvers.Mutation().UpdateTeam(rctx, fc.Args["input"].(gqlmodel.UpdateTeamInput)) }) if err != nil { ec.Error(ctx, err) @@ -18501,32 +23581,49 @@ func (ec *executionContext) _Mutation_updateTeam(ctx context.Context, field grap return ec.marshalOUpdateTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTeamPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_addMemberToTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_updateTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "team": + return ec.fieldContext_UpdateTeamPayload_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateTeamPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_addMemberToTeam_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_updateTeam_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addMemberToTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addMemberToTeam(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddMemberToTeam(rctx, args["input"].(gqlmodel.AddMemberToTeamInput)) + return ec.resolvers.Mutation().AddMemberToTeam(rctx, fc.Args["input"].(gqlmodel.AddMemberToTeamInput)) }) if err != nil { ec.Error(ctx, err) @@ -18540,32 +23637,49 @@ func (ec *executionContext) _Mutation_addMemberToTeam(ctx context.Context, field return ec.marshalOAddMemberToTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddMemberToTeamPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removeMemberFromTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_addMemberToTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "team": + return ec.fieldContext_AddMemberToTeamPayload_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddMemberToTeamPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removeMemberFromTeam_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_addMemberToTeam_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeMemberFromTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeMemberFromTeam(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveMemberFromTeam(rctx, args["input"].(gqlmodel.RemoveMemberFromTeamInput)) + return ec.resolvers.Mutation().RemoveMemberFromTeam(rctx, fc.Args["input"].(gqlmodel.RemoveMemberFromTeamInput)) }) if err != nil { ec.Error(ctx, err) @@ -18579,32 +23693,49 @@ func (ec *executionContext) _Mutation_removeMemberFromTeam(ctx context.Context, return ec.marshalORemoveMemberFromTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveMemberFromTeamPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updateMemberOfTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_removeMemberFromTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "team": + return ec.fieldContext_RemoveMemberFromTeamPayload_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveMemberFromTeamPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updateMemberOfTeam_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_removeMemberFromTeam_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateMemberOfTeam(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateMemberOfTeam(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateMemberOfTeam(rctx, args["input"].(gqlmodel.UpdateMemberOfTeamInput)) + return ec.resolvers.Mutation().UpdateMemberOfTeam(rctx, fc.Args["input"].(gqlmodel.UpdateMemberOfTeamInput)) }) if err != nil { ec.Error(ctx, err) @@ -18618,32 +23749,49 @@ func (ec *executionContext) _Mutation_updateMemberOfTeam(ctx context.Context, fi return ec.marshalOUpdateMemberOfTeamPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateMemberOfTeamPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_createProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_updateMemberOfTeam(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "team": + return ec.fieldContext_UpdateMemberOfTeamPayload_team(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateMemberOfTeamPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_createProject_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_updateMemberOfTeam_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_createProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createProject(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().CreateProject(rctx, args["input"].(gqlmodel.CreateProjectInput)) + return ec.resolvers.Mutation().CreateProject(rctx, fc.Args["input"].(gqlmodel.CreateProjectInput)) }) if err != nil { ec.Error(ctx, err) @@ -18657,32 +23805,49 @@ func (ec *executionContext) _Mutation_createProject(ctx context.Context, field g return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updateProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_createProject(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "project": + return ec.fieldContext_ProjectPayload_project(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updateProject_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_createProject_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateProject(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateProject(rctx, args["input"].(gqlmodel.UpdateProjectInput)) + return ec.resolvers.Mutation().UpdateProject(rctx, fc.Args["input"].(gqlmodel.UpdateProjectInput)) }) if err != nil { ec.Error(ctx, err) @@ -18696,32 +23861,49 @@ func (ec *executionContext) _Mutation_updateProject(ctx context.Context, field g return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_publishProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_updateProject(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "project": + return ec.fieldContext_ProjectPayload_project(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_publishProject_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_updateProject_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_publishProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_publishProject(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().PublishProject(rctx, args["input"].(gqlmodel.PublishProjectInput)) + return ec.resolvers.Mutation().PublishProject(rctx, fc.Args["input"].(gqlmodel.PublishProjectInput)) }) if err != nil { ec.Error(ctx, err) @@ -18735,32 +23917,49 @@ func (ec *executionContext) _Mutation_publishProject(ctx context.Context, field return ec.marshalOProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_deleteProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_publishProject(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "project": + return ec.fieldContext_ProjectPayload_project(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_deleteProject_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_publishProject_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_deleteProject(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_deleteProject(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().DeleteProject(rctx, args["input"].(gqlmodel.DeleteProjectInput)) + return ec.resolvers.Mutation().DeleteProject(rctx, fc.Args["input"].(gqlmodel.DeleteProjectInput)) }) if err != nil { ec.Error(ctx, err) @@ -18774,32 +23973,49 @@ func (ec *executionContext) _Mutation_deleteProject(ctx context.Context, field g return ec.marshalODeleteProjectPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDeleteProjectPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_createScene(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_deleteProject(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "projectId": + return ec.fieldContext_DeleteProjectPayload_projectId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DeleteProjectPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_createScene_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_deleteProject_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_createScene(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createScene(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().CreateScene(rctx, args["input"].(gqlmodel.CreateSceneInput)) + return ec.resolvers.Mutation().CreateScene(rctx, fc.Args["input"].(gqlmodel.CreateSceneInput)) }) if err != nil { ec.Error(ctx, err) @@ -18813,32 +24029,49 @@ func (ec *executionContext) _Mutation_createScene(ctx context.Context, field gra return ec.marshalOCreateScenePayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateScenePayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_addWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_createScene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_CreateScenePayload_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type CreateScenePayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_addWidget_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_createScene_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addWidget(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddWidget(rctx, args["input"].(gqlmodel.AddWidgetInput)) + return ec.resolvers.Mutation().AddWidget(rctx, fc.Args["input"].(gqlmodel.AddWidgetInput)) }) if err != nil { ec.Error(ctx, err) @@ -18852,32 +24085,51 @@ func (ec *executionContext) _Mutation_addWidget(ctx context.Context, field graph return ec.marshalOAddWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddWidgetPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_addWidget(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_AddWidgetPayload_scene(ctx, field) + case "sceneWidget": + return ec.fieldContext_AddWidgetPayload_sceneWidget(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddWidgetPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updateWidget_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_addWidget_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateWidget(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateWidget(rctx, args["input"].(gqlmodel.UpdateWidgetInput)) + return ec.resolvers.Mutation().UpdateWidget(rctx, fc.Args["input"].(gqlmodel.UpdateWidgetInput)) }) if err != nil { ec.Error(ctx, err) @@ -18891,32 +24143,51 @@ func (ec *executionContext) _Mutation_updateWidget(ctx context.Context, field gr return ec.marshalOUpdateWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updateWidgetAlignSystem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_updateWidget(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_UpdateWidgetPayload_scene(ctx, field) + case "sceneWidget": + return ec.fieldContext_UpdateWidgetPayload_sceneWidget(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateWidgetPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updateWidgetAlignSystem_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_updateWidget_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateWidgetAlignSystem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateWidgetAlignSystem(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateWidgetAlignSystem(rctx, args["input"].(gqlmodel.UpdateWidgetAlignSystemInput)) + return ec.resolvers.Mutation().UpdateWidgetAlignSystem(rctx, fc.Args["input"].(gqlmodel.UpdateWidgetAlignSystemInput)) }) if err != nil { ec.Error(ctx, err) @@ -18930,32 +24201,49 @@ func (ec *executionContext) _Mutation_updateWidgetAlignSystem(ctx context.Contex return ec.marshalOUpdateWidgetAlignSystemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateWidgetAlignSystemPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_updateWidgetAlignSystem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_UpdateWidgetAlignSystemPayload_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateWidgetAlignSystemPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removeWidget_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_updateWidgetAlignSystem_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeWidget(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveWidget(rctx, args["input"].(gqlmodel.RemoveWidgetInput)) + return ec.resolvers.Mutation().RemoveWidget(rctx, fc.Args["input"].(gqlmodel.RemoveWidgetInput)) }) if err != nil { ec.Error(ctx, err) @@ -18969,32 +24257,51 @@ func (ec *executionContext) _Mutation_removeWidget(ctx context.Context, field gr return ec.marshalORemoveWidgetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveWidgetPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_installPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_removeWidget(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_RemoveWidgetPayload_scene(ctx, field) + case "widgetId": + return ec.fieldContext_RemoveWidgetPayload_widgetId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveWidgetPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_installPlugin_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_removeWidget_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_installPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_installPlugin(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().InstallPlugin(rctx, args["input"].(gqlmodel.InstallPluginInput)) + return ec.resolvers.Mutation().InstallPlugin(rctx, fc.Args["input"].(gqlmodel.InstallPluginInput)) }) if err != nil { ec.Error(ctx, err) @@ -19008,32 +24315,51 @@ func (ec *executionContext) _Mutation_installPlugin(ctx context.Context, field g return ec.marshalOInstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInstallPluginPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_uninstallPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_installPlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_InstallPluginPayload_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_InstallPluginPayload_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type InstallPluginPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_uninstallPlugin_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_installPlugin_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_uninstallPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_uninstallPlugin(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UninstallPlugin(rctx, args["input"].(gqlmodel.UninstallPluginInput)) + return ec.resolvers.Mutation().UninstallPlugin(rctx, fc.Args["input"].(gqlmodel.UninstallPluginInput)) }) if err != nil { ec.Error(ctx, err) @@ -19047,32 +24373,51 @@ func (ec *executionContext) _Mutation_uninstallPlugin(ctx context.Context, field return ec.marshalOUninstallPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUninstallPluginPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_uploadPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_uninstallPlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_UninstallPluginPayload_pluginId(ctx, field) + case "scene": + return ec.fieldContext_UninstallPluginPayload_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UninstallPluginPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_uploadPlugin_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_uninstallPlugin_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_uploadPlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_uploadPlugin(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UploadPlugin(rctx, args["input"].(gqlmodel.UploadPluginInput)) + return ec.resolvers.Mutation().UploadPlugin(rctx, fc.Args["input"].(gqlmodel.UploadPluginInput)) }) if err != nil { ec.Error(ctx, err) @@ -19086,32 +24431,53 @@ func (ec *executionContext) _Mutation_uploadPlugin(ctx context.Context, field gr return ec.marshalOUploadPluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUploadPluginPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_upgradePlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_uploadPlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "plugin": + return ec.fieldContext_UploadPluginPayload_plugin(ctx, field) + case "scene": + return ec.fieldContext_UploadPluginPayload_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_UploadPluginPayload_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UploadPluginPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_upgradePlugin_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_uploadPlugin_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_upgradePlugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_upgradePlugin(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpgradePlugin(rctx, args["input"].(gqlmodel.UpgradePluginInput)) + return ec.resolvers.Mutation().UpgradePlugin(rctx, fc.Args["input"].(gqlmodel.UpgradePluginInput)) }) if err != nil { ec.Error(ctx, err) @@ -19125,32 +24491,51 @@ func (ec *executionContext) _Mutation_upgradePlugin(ctx context.Context, field g return ec.marshalOUpgradePluginPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpgradePluginPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_addCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_upgradePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_UpgradePluginPayload_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_UpgradePluginPayload_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpgradePluginPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_addCluster_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_upgradePlugin_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addCluster(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddCluster(rctx, args["input"].(gqlmodel.AddClusterInput)) + return ec.resolvers.Mutation().AddCluster(rctx, fc.Args["input"].(gqlmodel.AddClusterInput)) }) if err != nil { ec.Error(ctx, err) @@ -19164,32 +24549,51 @@ func (ec *executionContext) _Mutation_addCluster(ctx context.Context, field grap return ec.marshalOAddClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddClusterPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updateCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_addCluster(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_AddClusterPayload_scene(ctx, field) + case "cluster": + return ec.fieldContext_AddClusterPayload_cluster(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddClusterPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updateCluster_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_addCluster_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateCluster(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateCluster(rctx, args["input"].(gqlmodel.UpdateClusterInput)) + return ec.resolvers.Mutation().UpdateCluster(rctx, fc.Args["input"].(gqlmodel.UpdateClusterInput)) }) if err != nil { ec.Error(ctx, err) @@ -19203,32 +24607,51 @@ func (ec *executionContext) _Mutation_updateCluster(ctx context.Context, field g return ec.marshalOUpdateClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateClusterPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removeCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_updateCluster(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_UpdateClusterPayload_scene(ctx, field) + case "cluster": + return ec.fieldContext_UpdateClusterPayload_cluster(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateClusterPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removeCluster_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_updateCluster_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeCluster(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeCluster(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveCluster(rctx, args["input"].(gqlmodel.RemoveClusterInput)) + return ec.resolvers.Mutation().RemoveCluster(rctx, fc.Args["input"].(gqlmodel.RemoveClusterInput)) }) if err != nil { ec.Error(ctx, err) @@ -19242,32 +24665,51 @@ func (ec *executionContext) _Mutation_removeCluster(ctx context.Context, field g return ec.marshalORemoveClusterPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveClusterPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updateDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_removeCluster(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "scene": + return ec.fieldContext_RemoveClusterPayload_scene(ctx, field) + case "clusterId": + return ec.fieldContext_RemoveClusterPayload_clusterId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveClusterPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updateDatasetSchema_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_removeCluster_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateDatasetSchema(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateDatasetSchema(rctx, args["input"].(gqlmodel.UpdateDatasetSchemaInput)) + return ec.resolvers.Mutation().UpdateDatasetSchema(rctx, fc.Args["input"].(gqlmodel.UpdateDatasetSchemaInput)) }) if err != nil { ec.Error(ctx, err) @@ -19281,32 +24723,49 @@ func (ec *executionContext) _Mutation_updateDatasetSchema(ctx context.Context, f return ec.marshalOUpdateDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateDatasetSchemaPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_syncDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_updateDatasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetSchema": + return ec.fieldContext_UpdateDatasetSchemaPayload_datasetSchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateDatasetSchemaPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_syncDataset_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_updateDatasetSchema_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_syncDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_syncDataset(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().SyncDataset(rctx, args["input"].(gqlmodel.SyncDatasetInput)) + return ec.resolvers.Mutation().SyncDataset(rctx, fc.Args["input"].(gqlmodel.SyncDatasetInput)) }) if err != nil { ec.Error(ctx, err) @@ -19320,32 +24779,55 @@ func (ec *executionContext) _Mutation_syncDataset(ctx context.Context, field gra return ec.marshalOSyncDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSyncDatasetPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_addDynamicDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_syncDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "sceneId": + return ec.fieldContext_SyncDatasetPayload_sceneId(ctx, field) + case "url": + return ec.fieldContext_SyncDatasetPayload_url(ctx, field) + case "datasetSchema": + return ec.fieldContext_SyncDatasetPayload_datasetSchema(ctx, field) + case "dataset": + return ec.fieldContext_SyncDatasetPayload_dataset(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type SyncDatasetPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_addDynamicDatasetSchema_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_syncDataset_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addDynamicDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addDynamicDatasetSchema(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddDynamicDatasetSchema(rctx, args["input"].(gqlmodel.AddDynamicDatasetSchemaInput)) + return ec.resolvers.Mutation().AddDynamicDatasetSchema(rctx, fc.Args["input"].(gqlmodel.AddDynamicDatasetSchemaInput)) }) if err != nil { ec.Error(ctx, err) @@ -19359,32 +24841,49 @@ func (ec *executionContext) _Mutation_addDynamicDatasetSchema(ctx context.Contex return ec.marshalOAddDynamicDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetSchemaPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_addDynamicDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_addDynamicDatasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetSchema": + return ec.fieldContext_AddDynamicDatasetSchemaPayload_datasetSchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddDynamicDatasetSchemaPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_addDynamicDataset_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_addDynamicDatasetSchema_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addDynamicDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addDynamicDataset(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddDynamicDataset(rctx, args["input"].(gqlmodel.AddDynamicDatasetInput)) + return ec.resolvers.Mutation().AddDynamicDataset(rctx, fc.Args["input"].(gqlmodel.AddDynamicDatasetInput)) }) if err != nil { ec.Error(ctx, err) @@ -19398,32 +24897,51 @@ func (ec *executionContext) _Mutation_addDynamicDataset(ctx context.Context, fie return ec.marshalOAddDynamicDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDynamicDatasetPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removeDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_addDynamicDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetSchema": + return ec.fieldContext_AddDynamicDatasetPayload_datasetSchema(ctx, field) + case "dataset": + return ec.fieldContext_AddDynamicDatasetPayload_dataset(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddDynamicDatasetPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removeDatasetSchema_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_addDynamicDataset_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeDatasetSchema(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveDatasetSchema(rctx, args["input"].(gqlmodel.RemoveDatasetSchemaInput)) + return ec.resolvers.Mutation().RemoveDatasetSchema(rctx, fc.Args["input"].(gqlmodel.RemoveDatasetSchemaInput)) }) if err != nil { ec.Error(ctx, err) @@ -19437,32 +24955,49 @@ func (ec *executionContext) _Mutation_removeDatasetSchema(ctx context.Context, f return ec.marshalORemoveDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveDatasetSchemaPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_importDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_removeDatasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "schemaId": + return ec.fieldContext_RemoveDatasetSchemaPayload_schemaId(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveDatasetSchemaPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_importDataset_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_removeDatasetSchema_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_importDataset(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_importDataset(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().ImportDataset(rctx, args["input"].(gqlmodel.ImportDatasetInput)) + return ec.resolvers.Mutation().ImportDataset(rctx, fc.Args["input"].(gqlmodel.ImportDatasetInput)) }) if err != nil { ec.Error(ctx, err) @@ -19476,32 +25011,49 @@ func (ec *executionContext) _Mutation_importDataset(ctx context.Context, field g return ec.marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_importDatasetFromGoogleSheet(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_importDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetSchema": + return ec.fieldContext_ImportDatasetPayload_datasetSchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ImportDatasetPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_importDatasetFromGoogleSheet_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_importDataset_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_importDatasetFromGoogleSheet(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_importDatasetFromGoogleSheet(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().ImportDatasetFromGoogleSheet(rctx, args["input"].(gqlmodel.ImportDatasetFromGoogleSheetInput)) + return ec.resolvers.Mutation().ImportDatasetFromGoogleSheet(rctx, fc.Args["input"].(gqlmodel.ImportDatasetFromGoogleSheetInput)) }) if err != nil { ec.Error(ctx, err) @@ -19515,32 +25067,49 @@ func (ec *executionContext) _Mutation_importDatasetFromGoogleSheet(ctx context.C return ec.marshalOImportDatasetPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportDatasetPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_addDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_importDatasetFromGoogleSheet(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetSchema": + return ec.fieldContext_ImportDatasetPayload_datasetSchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ImportDatasetPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_addDatasetSchema_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_importDatasetFromGoogleSheet_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addDatasetSchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addDatasetSchema(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddDatasetSchema(rctx, args["input"].(gqlmodel.AddDatasetSchemaInput)) + return ec.resolvers.Mutation().AddDatasetSchema(rctx, fc.Args["input"].(gqlmodel.AddDatasetSchemaInput)) }) if err != nil { ec.Error(ctx, err) @@ -19554,32 +25123,49 @@ func (ec *executionContext) _Mutation_addDatasetSchema(ctx context.Context, fiel return ec.marshalOAddDatasetSchemaPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddDatasetSchemaPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updatePropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_addDatasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetSchema": + return ec.fieldContext_AddDatasetSchemaPayload_datasetSchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddDatasetSchemaPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updatePropertyValue_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_addDatasetSchema_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updatePropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updatePropertyValue(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdatePropertyValue(rctx, args["input"].(gqlmodel.UpdatePropertyValueInput)) + return ec.resolvers.Mutation().UpdatePropertyValue(rctx, fc.Args["input"].(gqlmodel.UpdatePropertyValueInput)) }) if err != nil { ec.Error(ctx, err) @@ -19593,32 +25179,51 @@ func (ec *executionContext) _Mutation_updatePropertyValue(ctx context.Context, f return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removePropertyField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_updatePropertyValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyFieldPayload_property(ctx, field) + case "propertyField": + return ec.fieldContext_PropertyFieldPayload_propertyField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removePropertyField_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_updatePropertyValue_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removePropertyField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removePropertyField(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemovePropertyField(rctx, args["input"].(gqlmodel.RemovePropertyFieldInput)) + return ec.resolvers.Mutation().RemovePropertyField(rctx, fc.Args["input"].(gqlmodel.RemovePropertyFieldInput)) }) if err != nil { ec.Error(ctx, err) @@ -19632,32 +25237,51 @@ func (ec *executionContext) _Mutation_removePropertyField(ctx context.Context, f return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_uploadFileToProperty(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_removePropertyField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyFieldPayload_property(ctx, field) + case "propertyField": + return ec.fieldContext_PropertyFieldPayload_propertyField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_uploadFileToProperty_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_removePropertyField_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_uploadFileToProperty(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_uploadFileToProperty(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UploadFileToProperty(rctx, args["input"].(gqlmodel.UploadFileToPropertyInput)) + return ec.resolvers.Mutation().UploadFileToProperty(rctx, fc.Args["input"].(gqlmodel.UploadFileToPropertyInput)) }) if err != nil { ec.Error(ctx, err) @@ -19671,32 +25295,51 @@ func (ec *executionContext) _Mutation_uploadFileToProperty(ctx context.Context, return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_linkDatasetToPropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_uploadFileToProperty(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyFieldPayload_property(ctx, field) + case "propertyField": + return ec.fieldContext_PropertyFieldPayload_propertyField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_linkDatasetToPropertyValue_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_uploadFileToProperty_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_linkDatasetToPropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_linkDatasetToPropertyValue(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().LinkDatasetToPropertyValue(rctx, args["input"].(gqlmodel.LinkDatasetToPropertyValueInput)) + return ec.resolvers.Mutation().LinkDatasetToPropertyValue(rctx, fc.Args["input"].(gqlmodel.LinkDatasetToPropertyValueInput)) }) if err != nil { ec.Error(ctx, err) @@ -19710,32 +25353,51 @@ func (ec *executionContext) _Mutation_linkDatasetToPropertyValue(ctx context.Con return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_unlinkPropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_linkDatasetToPropertyValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyFieldPayload_property(ctx, field) + case "propertyField": + return ec.fieldContext_PropertyFieldPayload_propertyField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_unlinkPropertyValue_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_linkDatasetToPropertyValue_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_unlinkPropertyValue(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_unlinkPropertyValue(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UnlinkPropertyValue(rctx, args["input"].(gqlmodel.UnlinkPropertyValueInput)) + return ec.resolvers.Mutation().UnlinkPropertyValue(rctx, fc.Args["input"].(gqlmodel.UnlinkPropertyValueInput)) }) if err != nil { ec.Error(ctx, err) @@ -19749,32 +25411,51 @@ func (ec *executionContext) _Mutation_unlinkPropertyValue(ctx context.Context, f return ec.marshalOPropertyFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_addPropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_unlinkPropertyValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyFieldPayload_property(ctx, field) + case "propertyField": + return ec.fieldContext_PropertyFieldPayload_propertyField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_addPropertyItem_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_unlinkPropertyValue_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addPropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addPropertyItem(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddPropertyItem(rctx, args["input"].(gqlmodel.AddPropertyItemInput)) + return ec.resolvers.Mutation().AddPropertyItem(rctx, fc.Args["input"].(gqlmodel.AddPropertyItemInput)) }) if err != nil { ec.Error(ctx, err) @@ -19788,32 +25469,51 @@ func (ec *executionContext) _Mutation_addPropertyItem(ctx context.Context, field return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_movePropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_addPropertyItem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyItemPayload_property(ctx, field) + case "propertyItem": + return ec.fieldContext_PropertyItemPayload_propertyItem(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyItemPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_movePropertyItem_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_addPropertyItem_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_movePropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_movePropertyItem(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().MovePropertyItem(rctx, args["input"].(gqlmodel.MovePropertyItemInput)) + return ec.resolvers.Mutation().MovePropertyItem(rctx, fc.Args["input"].(gqlmodel.MovePropertyItemInput)) }) if err != nil { ec.Error(ctx, err) @@ -19827,32 +25527,51 @@ func (ec *executionContext) _Mutation_movePropertyItem(ctx context.Context, fiel return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removePropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_movePropertyItem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyItemPayload_property(ctx, field) + case "propertyItem": + return ec.fieldContext_PropertyItemPayload_propertyItem(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyItemPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removePropertyItem_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_movePropertyItem_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removePropertyItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removePropertyItem(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemovePropertyItem(rctx, args["input"].(gqlmodel.RemovePropertyItemInput)) + return ec.resolvers.Mutation().RemovePropertyItem(rctx, fc.Args["input"].(gqlmodel.RemovePropertyItemInput)) }) if err != nil { ec.Error(ctx, err) @@ -19866,32 +25585,51 @@ func (ec *executionContext) _Mutation_removePropertyItem(ctx context.Context, fi return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updatePropertyItems(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_removePropertyItem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyItemPayload_property(ctx, field) + case "propertyItem": + return ec.fieldContext_PropertyItemPayload_propertyItem(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyItemPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updatePropertyItems_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_removePropertyItem_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updatePropertyItems(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updatePropertyItems(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdatePropertyItems(rctx, args["input"].(gqlmodel.UpdatePropertyItemInput)) + return ec.resolvers.Mutation().UpdatePropertyItems(rctx, fc.Args["input"].(gqlmodel.UpdatePropertyItemInput)) }) if err != nil { ec.Error(ctx, err) @@ -19905,32 +25643,51 @@ func (ec *executionContext) _Mutation_updatePropertyItems(ctx context.Context, f return ec.marshalOPropertyItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_addLayerItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_updatePropertyItems(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "property": + return ec.fieldContext_PropertyItemPayload_property(ctx, field) + case "propertyItem": + return ec.fieldContext_PropertyItemPayload_propertyItem(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyItemPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_addLayerItem_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_updatePropertyItems_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addLayerItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addLayerItem(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddLayerItem(rctx, args["input"].(gqlmodel.AddLayerItemInput)) + return ec.resolvers.Mutation().AddLayerItem(rctx, fc.Args["input"].(gqlmodel.AddLayerItemInput)) }) if err != nil { ec.Error(ctx, err) @@ -19944,32 +25701,53 @@ func (ec *executionContext) _Mutation_addLayerItem(ctx context.Context, field gr return ec.marshalOAddLayerItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerItemPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_addLayerGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_addLayerItem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_AddLayerItemPayload_layer(ctx, field) + case "parentLayer": + return ec.fieldContext_AddLayerItemPayload_parentLayer(ctx, field) + case "index": + return ec.fieldContext_AddLayerItemPayload_index(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddLayerItemPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_addLayerGroup_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_addLayerItem_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addLayerGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addLayerGroup(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddLayerGroup(rctx, args["input"].(gqlmodel.AddLayerGroupInput)) + return ec.resolvers.Mutation().AddLayerGroup(rctx, fc.Args["input"].(gqlmodel.AddLayerGroupInput)) }) if err != nil { ec.Error(ctx, err) @@ -19983,32 +25761,53 @@ func (ec *executionContext) _Mutation_addLayerGroup(ctx context.Context, field g return ec.marshalOAddLayerGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddLayerGroupPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removeLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_addLayerGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_AddLayerGroupPayload_layer(ctx, field) + case "parentLayer": + return ec.fieldContext_AddLayerGroupPayload_parentLayer(ctx, field) + case "index": + return ec.fieldContext_AddLayerGroupPayload_index(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddLayerGroupPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removeLayer_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_addLayerGroup_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeLayer(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveLayer(rctx, args["input"].(gqlmodel.RemoveLayerInput)) + return ec.resolvers.Mutation().RemoveLayer(rctx, fc.Args["input"].(gqlmodel.RemoveLayerInput)) }) if err != nil { ec.Error(ctx, err) @@ -20022,32 +25821,51 @@ func (ec *executionContext) _Mutation_removeLayer(ctx context.Context, field gra return ec.marshalORemoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveLayerPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updateLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_removeLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layerId": + return ec.fieldContext_RemoveLayerPayload_layerId(ctx, field) + case "parentLayer": + return ec.fieldContext_RemoveLayerPayload_parentLayer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveLayerPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updateLayer_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_removeLayer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateLayer(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateLayer(rctx, args["input"].(gqlmodel.UpdateLayerInput)) + return ec.resolvers.Mutation().UpdateLayer(rctx, fc.Args["input"].(gqlmodel.UpdateLayerInput)) }) if err != nil { ec.Error(ctx, err) @@ -20061,32 +25879,49 @@ func (ec *executionContext) _Mutation_updateLayer(ctx context.Context, field gra return ec.marshalOUpdateLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateLayerPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_moveLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_updateLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_UpdateLayerPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateLayerPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_moveLayer_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_updateLayer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_moveLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_moveLayer(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().MoveLayer(rctx, args["input"].(gqlmodel.MoveLayerInput)) + return ec.resolvers.Mutation().MoveLayer(rctx, fc.Args["input"].(gqlmodel.MoveLayerInput)) }) if err != nil { ec.Error(ctx, err) @@ -20100,32 +25935,55 @@ func (ec *executionContext) _Mutation_moveLayer(ctx context.Context, field graph return ec.marshalOMoveLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveLayerPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_createInfobox(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_moveLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layerId": + return ec.fieldContext_MoveLayerPayload_layerId(ctx, field) + case "fromParentLayer": + return ec.fieldContext_MoveLayerPayload_fromParentLayer(ctx, field) + case "toParentLayer": + return ec.fieldContext_MoveLayerPayload_toParentLayer(ctx, field) + case "index": + return ec.fieldContext_MoveLayerPayload_index(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MoveLayerPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_createInfobox_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_moveLayer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_createInfobox(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createInfobox(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().CreateInfobox(rctx, args["input"].(gqlmodel.CreateInfoboxInput)) + return ec.resolvers.Mutation().CreateInfobox(rctx, fc.Args["input"].(gqlmodel.CreateInfoboxInput)) }) if err != nil { ec.Error(ctx, err) @@ -20139,32 +25997,49 @@ func (ec *executionContext) _Mutation_createInfobox(ctx context.Context, field g return ec.marshalOCreateInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateInfoboxPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removeInfobox(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_createInfobox(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_CreateInfoboxPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type CreateInfoboxPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removeInfobox_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_createInfobox_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeInfobox(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeInfobox(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveInfobox(rctx, args["input"].(gqlmodel.RemoveInfoboxInput)) + return ec.resolvers.Mutation().RemoveInfobox(rctx, fc.Args["input"].(gqlmodel.RemoveInfoboxInput)) }) if err != nil { ec.Error(ctx, err) @@ -20178,32 +26053,49 @@ func (ec *executionContext) _Mutation_removeInfobox(ctx context.Context, field g return ec.marshalORemoveInfoboxPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_addInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_removeInfobox(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_RemoveInfoboxPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveInfoboxPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_addInfoboxField_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_removeInfobox_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_addInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_addInfoboxField(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AddInfoboxField(rctx, args["input"].(gqlmodel.AddInfoboxFieldInput)) + return ec.resolvers.Mutation().AddInfoboxField(rctx, fc.Args["input"].(gqlmodel.AddInfoboxFieldInput)) }) if err != nil { ec.Error(ctx, err) @@ -20217,32 +26109,51 @@ func (ec *executionContext) _Mutation_addInfoboxField(ctx context.Context, field return ec.marshalOAddInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAddInfoboxFieldPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_moveInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_addInfoboxField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "infoboxField": + return ec.fieldContext_AddInfoboxFieldPayload_infoboxField(ctx, field) + case "layer": + return ec.fieldContext_AddInfoboxFieldPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AddInfoboxFieldPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_moveInfoboxField_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_addInfoboxField_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_moveInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_moveInfoboxField(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().MoveInfoboxField(rctx, args["input"].(gqlmodel.MoveInfoboxFieldInput)) + return ec.resolvers.Mutation().MoveInfoboxField(rctx, fc.Args["input"].(gqlmodel.MoveInfoboxFieldInput)) }) if err != nil { ec.Error(ctx, err) @@ -20256,32 +26167,53 @@ func (ec *executionContext) _Mutation_moveInfoboxField(ctx context.Context, fiel return ec.marshalOMoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMoveInfoboxFieldPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removeInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_moveInfoboxField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "infoboxFieldId": + return ec.fieldContext_MoveInfoboxFieldPayload_infoboxFieldId(ctx, field) + case "layer": + return ec.fieldContext_MoveInfoboxFieldPayload_layer(ctx, field) + case "index": + return ec.fieldContext_MoveInfoboxFieldPayload_index(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MoveInfoboxFieldPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removeInfoboxField_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_moveInfoboxField_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeInfoboxField(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeInfoboxField(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveInfoboxField(rctx, args["input"].(gqlmodel.RemoveInfoboxFieldInput)) + return ec.resolvers.Mutation().RemoveInfoboxField(rctx, fc.Args["input"].(gqlmodel.RemoveInfoboxFieldInput)) }) if err != nil { ec.Error(ctx, err) @@ -20295,32 +26227,51 @@ func (ec *executionContext) _Mutation_removeInfoboxField(ctx context.Context, fi return ec.marshalORemoveInfoboxFieldPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveInfoboxFieldPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_importLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_removeInfoboxField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "infoboxFieldId": + return ec.fieldContext_RemoveInfoboxFieldPayload_infoboxFieldId(ctx, field) + case "layer": + return ec.fieldContext_RemoveInfoboxFieldPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveInfoboxFieldPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_importLayer_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_removeInfoboxField_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_importLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_importLayer(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().ImportLayer(rctx, args["input"].(gqlmodel.ImportLayerInput)) + return ec.resolvers.Mutation().ImportLayer(rctx, fc.Args["input"].(gqlmodel.ImportLayerInput)) }) if err != nil { ec.Error(ctx, err) @@ -20334,32 +26285,51 @@ func (ec *executionContext) _Mutation_importLayer(ctx context.Context, field gra return ec.marshalOImportLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšImportLayerPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_attachTagToLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_importLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layers": + return ec.fieldContext_ImportLayerPayload_layers(ctx, field) + case "parentLayer": + return ec.fieldContext_ImportLayerPayload_parentLayer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ImportLayerPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_attachTagToLayer_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_importLayer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_attachTagToLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_attachTagToLayer(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AttachTagToLayer(rctx, args["input"].(gqlmodel.AttachTagToLayerInput)) + return ec.resolvers.Mutation().AttachTagToLayer(rctx, fc.Args["input"].(gqlmodel.AttachTagToLayerInput)) }) if err != nil { ec.Error(ctx, err) @@ -20373,32 +26343,49 @@ func (ec *executionContext) _Mutation_attachTagToLayer(ctx context.Context, fiel return ec.marshalOAttachTagToLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagToLayerPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_detachTagFromLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_attachTagToLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_AttachTagToLayerPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AttachTagToLayerPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_detachTagFromLayer_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_attachTagToLayer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_detachTagFromLayer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_detachTagFromLayer(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().DetachTagFromLayer(rctx, args["input"].(gqlmodel.DetachTagFromLayerInput)) + return ec.resolvers.Mutation().DetachTagFromLayer(rctx, fc.Args["input"].(gqlmodel.DetachTagFromLayerInput)) }) if err != nil { ec.Error(ctx, err) @@ -20412,71 +26399,107 @@ func (ec *executionContext) _Mutation_detachTagFromLayer(ctx context.Context, fi return ec.marshalODetachTagFromLayerPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagFromLayerPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_createTagItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_detachTagFromLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "layer": + return ec.fieldContext_DetachTagFromLayerPayload_layer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DetachTagFromLayerPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_createTagItem_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_detachTagFromLayer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_createTagItem(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createTagItem(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().CreateTagItem(rctx, args["input"].(gqlmodel.CreateTagItemInput)) + return ec.resolvers.Mutation().CreateTagItem(rctx, fc.Args["input"].(gqlmodel.CreateTagItemInput)) }) if err != nil { ec.Error(ctx, err) - return graphql.Null + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*gqlmodel.CreateTagItemPayload) + fc.Result = res + return ec.marshalOCreateTagItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagItemPayload(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Mutation_createTagItem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tag": + return ec.fieldContext_CreateTagItemPayload_tag(ctx, field) + case "parent": + return ec.fieldContext_CreateTagItemPayload_parent(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type CreateTagItemPayload", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_createTagItem_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } - if resTmp == nil { - return graphql.Null - } - res := resTmp.(*gqlmodel.CreateTagItemPayload) - fc.Result = res - return ec.marshalOCreateTagItemPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagItemPayload(ctx, field.Selections, res) + return fc, nil } func (ec *executionContext) _Mutation_createTagGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_createTagGroup(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Mutation", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_createTagGroup_args(ctx, rawArgs) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - fc.Args = args resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().CreateTagGroup(rctx, args["input"].(gqlmodel.CreateTagGroupInput)) + return ec.resolvers.Mutation().CreateTagGroup(rctx, fc.Args["input"].(gqlmodel.CreateTagGroupInput)) }) if err != nil { ec.Error(ctx, err) @@ -20490,32 +26513,49 @@ func (ec *executionContext) _Mutation_createTagGroup(ctx context.Context, field return ec.marshalOCreateTagGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCreateTagGroupPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_attachTagItemToGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_createTagGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tag": + return ec.fieldContext_CreateTagGroupPayload_tag(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type CreateTagGroupPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_attachTagItemToGroup_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_createTagGroup_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_attachTagItemToGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_attachTagItemToGroup(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().AttachTagItemToGroup(rctx, args["input"].(gqlmodel.AttachTagItemToGroupInput)) + return ec.resolvers.Mutation().AttachTagItemToGroup(rctx, fc.Args["input"].(gqlmodel.AttachTagItemToGroupInput)) }) if err != nil { ec.Error(ctx, err) @@ -20529,32 +26569,49 @@ func (ec *executionContext) _Mutation_attachTagItemToGroup(ctx context.Context, return ec.marshalOAttachTagItemToGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAttachTagItemToGroupPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_detachTagItemFromGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_attachTagItemToGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tag": + return ec.fieldContext_AttachTagItemToGroupPayload_tag(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AttachTagItemToGroupPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_detachTagItemFromGroup_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_attachTagItemToGroup_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_detachTagItemFromGroup(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_detachTagItemFromGroup(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().DetachTagItemFromGroup(rctx, args["input"].(gqlmodel.DetachTagItemFromGroupInput)) + return ec.resolvers.Mutation().DetachTagItemFromGroup(rctx, fc.Args["input"].(gqlmodel.DetachTagItemFromGroupInput)) }) if err != nil { ec.Error(ctx, err) @@ -20568,32 +26625,49 @@ func (ec *executionContext) _Mutation_detachTagItemFromGroup(ctx context.Context return ec.marshalODetachTagItemFromGroupPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDetachTagItemFromGroupPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_updateTag(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_detachTagItemFromGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tag": + return ec.fieldContext_DetachTagItemFromGroupPayload_tag(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DetachTagItemFromGroupPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_updateTag_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_detachTagItemFromGroup_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_updateTag(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_updateTag(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().UpdateTag(rctx, args["input"].(gqlmodel.UpdateTagInput)) + return ec.resolvers.Mutation().UpdateTag(rctx, fc.Args["input"].(gqlmodel.UpdateTagInput)) }) if err != nil { ec.Error(ctx, err) @@ -20607,32 +26681,49 @@ func (ec *executionContext) _Mutation_updateTag(ctx context.Context, field graph return ec.marshalOUpdateTagPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUpdateTagPayload(ctx, field.Selections, res) } -func (ec *executionContext) _Mutation_removeTag(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Mutation_updateTag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Mutation", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tag": + return ec.fieldContext_UpdateTagPayload_tag(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type UpdateTagPayload", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Mutation_removeTag_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Mutation_updateTag_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Mutation_removeTag(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Mutation_removeTag(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Mutation().RemoveTag(rctx, args["input"].(gqlmodel.RemoveTagInput)) + return ec.resolvers.Mutation().RemoveTag(rctx, fc.Args["input"].(gqlmodel.RemoveTagInput)) }) if err != nil { ec.Error(ctx, err) @@ -20646,22 +26737,48 @@ func (ec *executionContext) _Mutation_removeTag(ctx context.Context, field graph return ec.marshalORemoveTagPayload2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRemoveTagPayload(ctx, field.Selections, res) } -func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { +func (ec *executionContext) fieldContext_Mutation_removeTag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Mutation", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "tagId": + return ec.fieldContext_RemoveTagPayload_tagId(ctx, field) + case "updatedLayers": + return ec.fieldContext_RemoveTagPayload_updatedLayers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type RemoveTagPayload", field.Name) + }, + } defer func() { if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null + err = ec.Recover(ctx, r) + ec.Error(ctx, err) } }() - fc := &graphql.FieldContext{ - Object: "PageInfo", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Mutation_removeTag_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PageInfo_startCursor(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.StartCursor, nil @@ -20678,22 +26795,31 @@ func (ec *executionContext) _PageInfo_startCursor(ctx context.Context, field gra return ec.marshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) } -func (ec *executionContext) _PageInfo_endCursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PageInfo_startCursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PageInfo", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PageInfo_endCursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PageInfo_endCursor(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.EndCursor, nil @@ -20710,22 +26836,31 @@ func (ec *executionContext) _PageInfo_endCursor(ctx context.Context, field graph return ec.marshalOCursor2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) } -func (ec *executionContext) _PageInfo_hasNextPage(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PageInfo_endCursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PageInfo", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PageInfo_hasNextPage(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PageInfo_hasNextPage(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.HasNextPage, nil @@ -20745,22 +26880,31 @@ func (ec *executionContext) _PageInfo_hasNextPage(ctx context.Context, field gra return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _PageInfo_hasPreviousPage(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PageInfo_hasNextPage(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PageInfo", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PageInfo_hasPreviousPage(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PageInfo) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PageInfo_hasPreviousPage(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.HasPreviousPage, nil @@ -20780,22 +26924,31 @@ func (ec *executionContext) _PageInfo_hasPreviousPage(ctx context.Context, field return ec.marshalNBoolean2bool(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PageInfo_hasPreviousPage(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PageInfo", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Plugin_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Plugin", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -20815,22 +26968,31 @@ func (ec *executionContext) _Plugin_id(ctx context.Context, field graphql.Collec return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Plugin_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_sceneId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneID, nil @@ -20847,22 +27009,31 @@ func (ec *executionContext) _Plugin_sceneId(ctx context.Context, field graphql.C return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Plugin_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -20882,22 +27053,31 @@ func (ec *executionContext) _Plugin_name(ctx context.Context, field graphql.Coll return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_version(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Plugin_version(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_version(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Version, nil @@ -20917,22 +27097,31 @@ func (ec *executionContext) _Plugin_version(ctx context.Context, field graphql.C return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_version(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Plugin_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_description(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Description, nil @@ -20952,22 +27141,31 @@ func (ec *executionContext) _Plugin_description(ctx context.Context, field graph return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_author(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Plugin_author(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_author(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Author, nil @@ -20987,22 +27185,31 @@ func (ec *executionContext) _Plugin_author(ctx context.Context, field graphql.Co return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_repositoryUrl(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_author(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Plugin_repositoryUrl(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_repositoryUrl(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.RepositoryURL, nil @@ -21022,22 +27229,31 @@ func (ec *executionContext) _Plugin_repositoryUrl(ctx context.Context, field gra return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_repositoryUrl(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Plugin_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_propertySchemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PropertySchemaID, nil @@ -21054,22 +27270,31 @@ func (ec *executionContext) _Plugin_propertySchemaId(ctx context.Context, field return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_extensions(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_propertySchemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Plugin_extensions(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_extensions(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Extensions, nil @@ -21089,29 +27314,67 @@ func (ec *executionContext) _Plugin_extensions(ctx context.Context, field graphq return ec.marshalNPluginExtension2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_extensions(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: false, IsResolver: false, - } + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extensionId": + return ec.fieldContext_PluginExtension_extensionId(ctx, field) + case "pluginId": + return ec.fieldContext_PluginExtension_pluginId(ctx, field) + case "type": + return ec.fieldContext_PluginExtension_type(ctx, field) + case "name": + return ec.fieldContext_PluginExtension_name(ctx, field) + case "description": + return ec.fieldContext_PluginExtension_description(ctx, field) + case "icon": + return ec.fieldContext_PluginExtension_icon(ctx, field) + case "singleOnly": + return ec.fieldContext_PluginExtension_singleOnly(ctx, field) + case "widgetLayout": + return ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + case "visualizer": + return ec.fieldContext_PluginExtension_visualizer(ctx, field) + case "propertySchemaId": + return ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + case "allTranslatedName": + return ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + case "plugin": + return ec.fieldContext_PluginExtension_plugin(ctx, field) + case "sceneWidget": + return ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + case "propertySchema": + return ec.fieldContext_PluginExtension_propertySchema(ctx, field) + case "translatedName": + return ec.fieldContext_PluginExtension_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginExtension", field.Name) + }, + } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Plugin_scenePlugin_args(ctx, rawArgs) +func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_scenePlugin(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ScenePlugin, nil @@ -21128,22 +27391,52 @@ func (ec *executionContext) _Plugin_scenePlugin(ctx context.Context, field graph return ec.marshalOScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Plugin_scenePlugin_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) _Plugin_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.AllTranslatedDescription, nil @@ -21160,22 +27453,31 @@ func (ec *executionContext) _Plugin_allTranslatedDescription(ctx context.Context return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_allTranslatedDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Plugin_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_allTranslatedName(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.AllTranslatedName, nil @@ -21192,22 +27494,31 @@ func (ec *executionContext) _Plugin_allTranslatedName(ctx context.Context, field return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Plugin_allTranslatedName(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Plugin_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Plugin", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Plugin().Scene(rctx, obj) @@ -21224,32 +27535,74 @@ func (ec *executionContext) _Plugin_scene(ctx context.Context, field graphql.Col return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_translatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: true, IsResolver: true, - } + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Plugin_translatedName_args(ctx, rawArgs) +func (ec *executionContext) _Plugin_translatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_translatedName(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Plugin().TranslatedName(rctx, obj, args["lang"].(*language.Tag)) + return ec.resolvers.Plugin().TranslatedName(rctx, obj, fc.Args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -21266,32 +27619,45 @@ func (ec *executionContext) _Plugin_translatedName(ctx context.Context, field gr return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Plugin_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Plugin_translatedName(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Plugin", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Plugin_translatedDescription_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Plugin_translatedName_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Plugin_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_translatedDescription(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Plugin().TranslatedDescription(rctx, obj, args["lang"].(*language.Tag)) + return ec.resolvers.Plugin().TranslatedDescription(rctx, obj, fc.Args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -21308,22 +27674,42 @@ func (ec *executionContext) _Plugin_translatedDescription(ctx context.Context, f return ec.marshalNString2string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Plugin_translatedDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Plugin_translatedDescription_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} + func (ec *executionContext) _Plugin_propertySchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Plugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Plugin_propertySchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Plugin", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Plugin().PropertySchema(rctx, obj) @@ -21340,22 +27726,39 @@ func (ec *executionContext) _Plugin_propertySchema(ctx context.Context, field gr return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Plugin_propertySchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Plugin", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PluginExtension_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_extensionId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PluginExtension", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ExtensionID, nil @@ -21375,22 +27778,31 @@ func (ec *executionContext) _PluginExtension_extensionId(ctx context.Context, fi return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_extensionId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginExtension_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_pluginId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PluginID, nil @@ -21410,22 +27822,31 @@ func (ec *executionContext) _PluginExtension_pluginId(ctx context.Context, field return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginExtension_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_type(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Type, nil @@ -21445,22 +27866,31 @@ func (ec *executionContext) _PluginExtension_type(ctx context.Context, field gra return ec.marshalNPluginExtensionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtensionType(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type PluginExtensionType does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginExtension_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -21480,22 +27910,31 @@ func (ec *executionContext) _PluginExtension_name(ctx context.Context, field gra return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginExtension_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_description(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Description, nil @@ -21515,22 +27954,31 @@ func (ec *executionContext) _PluginExtension_description(ctx context.Context, fi return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_icon(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginExtension_icon(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_icon(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Icon, nil @@ -21550,22 +27998,31 @@ func (ec *executionContext) _PluginExtension_icon(ctx context.Context, field gra return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_singleOnly(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_icon(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginExtension_singleOnly(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_singleOnly(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SingleOnly, nil @@ -21582,22 +28039,31 @@ func (ec *executionContext) _PluginExtension_singleOnly(ctx context.Context, fie return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_widgetLayout(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_singleOnly(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginExtension_widgetLayout(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.WidgetLayout, nil @@ -21614,22 +28080,41 @@ func (ec *executionContext) _PluginExtension_widgetLayout(ctx context.Context, f return ec.marshalOWidgetLayout2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLayout(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_visualizer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_widgetLayout(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extendable": + return ec.fieldContext_WidgetLayout_extendable(ctx, field) + case "extended": + return ec.fieldContext_WidgetLayout_extended(ctx, field) + case "floating": + return ec.fieldContext_WidgetLayout_floating(ctx, field) + case "defaultLocation": + return ec.fieldContext_WidgetLayout_defaultLocation(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetLayout", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _PluginExtension_visualizer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_visualizer(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Visualizer, nil @@ -21646,22 +28131,31 @@ func (ec *executionContext) _PluginExtension_visualizer(ctx context.Context, fie return ec.marshalOVisualizer2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_visualizer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Visualizer does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginExtension_propertySchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PropertySchemaID, nil @@ -21681,22 +28175,31 @@ func (ec *executionContext) _PluginExtension_propertySchemaId(ctx context.Contex return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_propertySchemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginExtension_allTranslatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.AllTranslatedName, nil @@ -21713,22 +28216,31 @@ func (ec *executionContext) _PluginExtension_allTranslatedName(ctx context.Conte return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_allTranslatedName(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginExtension_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.AllTranslatedDescription, nil @@ -21745,22 +28257,31 @@ func (ec *executionContext) _PluginExtension_allTranslatedDescription(ctx contex return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PluginExtension_allTranslatedDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _PluginExtension_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PluginExtension", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PluginExtension().Plugin(rctx, obj) @@ -21777,32 +28298,68 @@ func (ec *executionContext) _PluginExtension_plugin(ctx context.Context, field g return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: true, IsResolver: true, - } + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_PluginExtension_sceneWidget_args(ctx, rawArgs) +func (ec *executionContext) _PluginExtension_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_sceneWidget(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PluginExtension().SceneWidget(rctx, obj, args["sceneId"].(gqlmodel.ID)) + return ec.resolvers.PluginExtension().SceneWidget(rctx, obj, fc.Args["sceneId"].(gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -21816,22 +28373,62 @@ func (ec *executionContext) _PluginExtension_sceneWidget(ctx context.Context, fi return ec.marshalOSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_propertySchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_sceneWidget(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_SceneWidget_id(ctx, field) + case "pluginId": + return ec.fieldContext_SceneWidget_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_SceneWidget_extensionId(ctx, field) + case "propertyId": + return ec.fieldContext_SceneWidget_propertyId(ctx, field) + case "enabled": + return ec.fieldContext_SceneWidget_enabled(ctx, field) + case "extended": + return ec.fieldContext_SceneWidget_extended(ctx, field) + case "plugin": + return ec.fieldContext_SceneWidget_plugin(ctx, field) + case "extension": + return ec.fieldContext_SceneWidget_extension(ctx, field) + case "property": + return ec.fieldContext_SceneWidget_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type SceneWidget", field.Name) + }, } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_PluginExtension_sceneWidget_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} +func (ec *executionContext) _PluginExtension_propertySchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_propertySchema(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PluginExtension().PropertySchema(rctx, obj) @@ -21848,32 +28445,42 @@ func (ec *executionContext) _PluginExtension_propertySchema(ctx context.Context, return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_translatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_propertySchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_PluginExtension_translatedName_args(ctx, rawArgs) +func (ec *executionContext) _PluginExtension_translatedName(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_translatedName(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PluginExtension().TranslatedName(rctx, obj, args["lang"].(*language.Tag)) + return ec.resolvers.PluginExtension().TranslatedName(rctx, obj, fc.Args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -21890,32 +28497,45 @@ func (ec *executionContext) _PluginExtension_translatedName(ctx context.Context, return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginExtension_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginExtension_translatedName(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginExtension", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_PluginExtension_translatedDescription_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_PluginExtension_translatedName_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _PluginExtension_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginExtension) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PluginExtension().TranslatedDescription(rctx, obj, args["lang"].(*language.Tag)) + return ec.resolvers.PluginExtension().TranslatedDescription(rctx, obj, fc.Args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -21932,22 +28552,42 @@ func (ec *executionContext) _PluginExtension_translatedDescription(ctx context.C return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginMetadata_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { +func (ec *executionContext) fieldContext_PluginExtension_translatedDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginExtension", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } defer func() { if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null + err = ec.Recover(ctx, r) + ec.Error(ctx, err) } }() - fc := &graphql.FieldContext{ - Object: "PluginMetadata", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_PluginExtension_translatedDescription_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) _PluginMetadata_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginMetadata_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -21967,22 +28607,31 @@ func (ec *executionContext) _PluginMetadata_name(ctx context.Context, field grap return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginMetadata_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginMetadata_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginMetadata", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginMetadata_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginMetadata_description(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Description, nil @@ -22002,22 +28651,31 @@ func (ec *executionContext) _PluginMetadata_description(ctx context.Context, fie return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginMetadata_author(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginMetadata_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginMetadata", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginMetadata_author(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginMetadata_author(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Author, nil @@ -22037,22 +28695,31 @@ func (ec *executionContext) _PluginMetadata_author(ctx context.Context, field gr return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginMetadata_thumbnailUrl(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginMetadata_author(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginMetadata", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginMetadata_thumbnailUrl(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginMetadata_thumbnailUrl(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ThumbnailURL, nil @@ -22072,22 +28739,31 @@ func (ec *executionContext) _PluginMetadata_thumbnailUrl(ctx context.Context, fi return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PluginMetadata_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PluginMetadata_thumbnailUrl(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PluginMetadata", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PluginMetadata_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PluginMetadata_createdAt(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.CreatedAt, nil @@ -22107,22 +28783,31 @@ func (ec *executionContext) _PluginMetadata_createdAt(ctx context.Context, field return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PluginMetadata_createdAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PluginMetadata", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Project_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Project", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -22142,22 +28827,31 @@ func (ec *executionContext) _Project_id(ctx context.Context, field graphql.Colle return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Project_isArchived(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_isArchived(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_isArchived(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.IsArchived, nil @@ -22177,22 +28871,31 @@ func (ec *executionContext) _Project_isArchived(ctx context.Context, field graph return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _Project_isBasicAuthActive(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_isArchived(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_isBasicAuthActive(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_isBasicAuthActive(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.IsBasicAuthActive, nil @@ -22212,22 +28915,31 @@ func (ec *executionContext) _Project_isBasicAuthActive(ctx context.Context, fiel return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _Project_basicAuthUsername(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_isBasicAuthActive(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_basicAuthUsername(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_basicAuthUsername(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.BasicAuthUsername, nil @@ -22242,27 +28954,36 @@ func (ec *executionContext) _Project_basicAuthUsername(ctx context.Context, fiel } return graphql.Null } - res := resTmp.(string) - fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Project_basicAuthUsername(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil } func (ec *executionContext) _Project_basicAuthPassword(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_basicAuthPassword(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Project", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.BasicAuthPassword, nil @@ -22282,22 +29003,31 @@ func (ec *executionContext) _Project_basicAuthPassword(ctx context.Context, fiel return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_basicAuthPassword(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_createdAt(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.CreatedAt, nil @@ -22317,22 +29047,31 @@ func (ec *executionContext) _Project_createdAt(ctx context.Context, field graphq return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Project_updatedAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_createdAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_updatedAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_updatedAt(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.UpdatedAt, nil @@ -22352,22 +29091,31 @@ func (ec *executionContext) _Project_updatedAt(ctx context.Context, field graphq return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Project_publishedAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_updatedAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_publishedAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_publishedAt(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PublishedAt, nil @@ -22384,22 +29132,31 @@ func (ec *executionContext) _Project_publishedAt(ctx context.Context, field grap return ec.marshalODateTime2แš–timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Project_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_publishedAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -22419,22 +29176,31 @@ func (ec *executionContext) _Project_name(ctx context.Context, field graphql.Col return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_description(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Description, nil @@ -22454,22 +29220,31 @@ func (ec *executionContext) _Project_description(ctx context.Context, field grap return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_alias(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_alias(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_alias(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Alias, nil @@ -22489,22 +29264,31 @@ func (ec *executionContext) _Project_alias(ctx context.Context, field graphql.Co return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_publicTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_alias(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_publicTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_publicTitle(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PublicTitle, nil @@ -22524,22 +29308,31 @@ func (ec *executionContext) _Project_publicTitle(ctx context.Context, field grap return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_publicDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_publicTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_publicDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_publicDescription(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PublicDescription, nil @@ -22559,22 +29352,31 @@ func (ec *executionContext) _Project_publicDescription(ctx context.Context, fiel return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_publicImage(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_publicDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_publicImage(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_publicImage(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PublicImage, nil @@ -22594,22 +29396,31 @@ func (ec *executionContext) _Project_publicImage(ctx context.Context, field grap return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Project_publicNoIndex(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_publicImage(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_publicNoIndex(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_publicNoIndex(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PublicNoIndex, nil @@ -22629,22 +29440,31 @@ func (ec *executionContext) _Project_publicNoIndex(ctx context.Context, field gr return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _Project_imageUrl(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_publicNoIndex(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_imageUrl(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_imageUrl(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ImageURL, nil @@ -22661,22 +29481,31 @@ func (ec *executionContext) _Project_imageUrl(ctx context.Context, field graphql return ec.marshalOURL2แš–netแš‹urlแšURL(ctx, field.Selections, res) } -func (ec *executionContext) _Project_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_imageUrl(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type URL does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_teamId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TeamID, nil @@ -22696,22 +29525,31 @@ func (ec *executionContext) _Project_teamId(ctx context.Context, field graphql.C return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Project_visualizer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_teamId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_visualizer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_visualizer(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Visualizer, nil @@ -22731,22 +29569,31 @@ func (ec *executionContext) _Project_visualizer(ctx context.Context, field graph return ec.marshalNVisualizer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšVisualizer(ctx, field.Selections, res) } -func (ec *executionContext) _Project_publishmentStatus(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Project_visualizer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Project", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Visualizer does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Project_publishmentStatus(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_publishmentStatus(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PublishmentStatus, nil @@ -22766,22 +29613,31 @@ func (ec *executionContext) _Project_publishmentStatus(ctx context.Context, fiel return ec.marshalNPublishmentStatus2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPublishmentStatus(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Project_publishmentStatus(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type PublishmentStatus does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Project_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Project", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Project().Team(rctx, obj) @@ -22798,22 +29654,45 @@ func (ec *executionContext) _Project_team(ctx context.Context, field graphql.Col return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Project_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Project_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Project_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Project", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Project().Scene(rctx, obj) @@ -22830,22 +29709,71 @@ func (ec *executionContext) _Project_scene(ctx context.Context, field graphql.Co return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Project_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Project", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _ProjectAliasAvailability_alias(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectAliasAvailability) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectAliasAvailability_alias(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "ProjectAliasAvailability", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Alias, nil @@ -22865,22 +29793,31 @@ func (ec *executionContext) _ProjectAliasAvailability_alias(ctx context.Context, return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _ProjectAliasAvailability_available(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectAliasAvailability) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_ProjectAliasAvailability_alias(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "ProjectAliasAvailability", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _ProjectAliasAvailability_available(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectAliasAvailability) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectAliasAvailability_available(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Available, nil @@ -22900,22 +29837,31 @@ func (ec *executionContext) _ProjectAliasAvailability_available(ctx context.Cont return ec.marshalNBoolean2bool(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_ProjectAliasAvailability_available(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectAliasAvailability", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _ProjectConnection_edges(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectConnection_edges(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "ProjectConnection", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Edges, nil @@ -22935,22 +29881,37 @@ func (ec *executionContext) _ProjectConnection_edges(ctx context.Context, field return ec.marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdgeแš„(ctx, field.Selections, res) } -func (ec *executionContext) _ProjectConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_ProjectConnection_edges(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "ProjectConnection", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "cursor": + return ec.fieldContext_ProjectEdge_cursor(ctx, field) + case "node": + return ec.fieldContext_ProjectEdge_node(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectEdge", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _ProjectConnection_nodes(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectConnection_nodes(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Nodes, nil @@ -22970,22 +29931,75 @@ func (ec *executionContext) _ProjectConnection_nodes(ctx context.Context, field return ec.marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_ProjectConnection_nodes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Project_id(ctx, field) + case "isArchived": + return ec.fieldContext_Project_isArchived(ctx, field) + case "isBasicAuthActive": + return ec.fieldContext_Project_isBasicAuthActive(ctx, field) + case "basicAuthUsername": + return ec.fieldContext_Project_basicAuthUsername(ctx, field) + case "basicAuthPassword": + return ec.fieldContext_Project_basicAuthPassword(ctx, field) + case "createdAt": + return ec.fieldContext_Project_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Project_updatedAt(ctx, field) + case "publishedAt": + return ec.fieldContext_Project_publishedAt(ctx, field) + case "name": + return ec.fieldContext_Project_name(ctx, field) + case "description": + return ec.fieldContext_Project_description(ctx, field) + case "alias": + return ec.fieldContext_Project_alias(ctx, field) + case "publicTitle": + return ec.fieldContext_Project_publicTitle(ctx, field) + case "publicDescription": + return ec.fieldContext_Project_publicDescription(ctx, field) + case "publicImage": + return ec.fieldContext_Project_publicImage(ctx, field) + case "publicNoIndex": + return ec.fieldContext_Project_publicNoIndex(ctx, field) + case "imageUrl": + return ec.fieldContext_Project_imageUrl(ctx, field) + case "teamId": + return ec.fieldContext_Project_teamId(ctx, field) + case "visualizer": + return ec.fieldContext_Project_visualizer(ctx, field) + case "publishmentStatus": + return ec.fieldContext_Project_publishmentStatus(ctx, field) + case "team": + return ec.fieldContext_Project_team(ctx, field) + case "scene": + return ec.fieldContext_Project_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Project", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _ProjectConnection_pageInfo(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectConnection_pageInfo(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "ProjectConnection", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PageInfo, nil @@ -23005,22 +30019,41 @@ func (ec *executionContext) _ProjectConnection_pageInfo(ctx context.Context, fie return ec.marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx, field.Selections, res) } -func (ec *executionContext) _ProjectConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_ProjectConnection_pageInfo(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "ProjectConnection", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "startCursor": + return ec.fieldContext_PageInfo_startCursor(ctx, field) + case "endCursor": + return ec.fieldContext_PageInfo_endCursor(ctx, field) + case "hasNextPage": + return ec.fieldContext_PageInfo_hasNextPage(ctx, field) + case "hasPreviousPage": + return ec.fieldContext_PageInfo_hasPreviousPage(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PageInfo", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _ProjectConnection_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectConnection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectConnection_totalCount(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TotalCount, nil @@ -23040,22 +30073,31 @@ func (ec *executionContext) _ProjectConnection_totalCount(ctx context.Context, f return ec.marshalNInt2int(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_ProjectConnection_totalCount(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectConnection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _ProjectEdge_cursor(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectEdge_cursor(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "ProjectEdge", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Cursor, nil @@ -23075,22 +30117,31 @@ func (ec *executionContext) _ProjectEdge_cursor(ctx context.Context, field graph return ec.marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹usecaseแšCursor(ctx, field.Selections, res) } -func (ec *executionContext) _ProjectEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectEdge) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_ProjectEdge_cursor(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "ProjectEdge", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Cursor does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _ProjectEdge_node(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectEdge) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectEdge_node(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Node, nil @@ -23107,22 +30158,75 @@ func (ec *executionContext) _ProjectEdge_node(ctx context.Context, field graphql return ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_ProjectEdge_node(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectEdge", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Project_id(ctx, field) + case "isArchived": + return ec.fieldContext_Project_isArchived(ctx, field) + case "isBasicAuthActive": + return ec.fieldContext_Project_isBasicAuthActive(ctx, field) + case "basicAuthUsername": + return ec.fieldContext_Project_basicAuthUsername(ctx, field) + case "basicAuthPassword": + return ec.fieldContext_Project_basicAuthPassword(ctx, field) + case "createdAt": + return ec.fieldContext_Project_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Project_updatedAt(ctx, field) + case "publishedAt": + return ec.fieldContext_Project_publishedAt(ctx, field) + case "name": + return ec.fieldContext_Project_name(ctx, field) + case "description": + return ec.fieldContext_Project_description(ctx, field) + case "alias": + return ec.fieldContext_Project_alias(ctx, field) + case "publicTitle": + return ec.fieldContext_Project_publicTitle(ctx, field) + case "publicDescription": + return ec.fieldContext_Project_publicDescription(ctx, field) + case "publicImage": + return ec.fieldContext_Project_publicImage(ctx, field) + case "publicNoIndex": + return ec.fieldContext_Project_publicNoIndex(ctx, field) + case "imageUrl": + return ec.fieldContext_Project_imageUrl(ctx, field) + case "teamId": + return ec.fieldContext_Project_teamId(ctx, field) + case "visualizer": + return ec.fieldContext_Project_visualizer(ctx, field) + case "publishmentStatus": + return ec.fieldContext_Project_publishmentStatus(ctx, field) + case "team": + return ec.fieldContext_Project_team(ctx, field) + case "scene": + return ec.fieldContext_Project_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Project", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _ProjectPayload_project(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ProjectPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ProjectPayload_project(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "ProjectPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Project, nil @@ -23142,22 +30246,75 @@ func (ec *executionContext) _ProjectPayload_project(ctx context.Context, field g return ec.marshalNProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_ProjectPayload_project(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ProjectPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Project_id(ctx, field) + case "isArchived": + return ec.fieldContext_Project_isArchived(ctx, field) + case "isBasicAuthActive": + return ec.fieldContext_Project_isBasicAuthActive(ctx, field) + case "basicAuthUsername": + return ec.fieldContext_Project_basicAuthUsername(ctx, field) + case "basicAuthPassword": + return ec.fieldContext_Project_basicAuthPassword(ctx, field) + case "createdAt": + return ec.fieldContext_Project_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Project_updatedAt(ctx, field) + case "publishedAt": + return ec.fieldContext_Project_publishedAt(ctx, field) + case "name": + return ec.fieldContext_Project_name(ctx, field) + case "description": + return ec.fieldContext_Project_description(ctx, field) + case "alias": + return ec.fieldContext_Project_alias(ctx, field) + case "publicTitle": + return ec.fieldContext_Project_publicTitle(ctx, field) + case "publicDescription": + return ec.fieldContext_Project_publicDescription(ctx, field) + case "publicImage": + return ec.fieldContext_Project_publicImage(ctx, field) + case "publicNoIndex": + return ec.fieldContext_Project_publicNoIndex(ctx, field) + case "imageUrl": + return ec.fieldContext_Project_imageUrl(ctx, field) + case "teamId": + return ec.fieldContext_Project_teamId(ctx, field) + case "visualizer": + return ec.fieldContext_Project_visualizer(ctx, field) + case "publishmentStatus": + return ec.fieldContext_Project_publishmentStatus(ctx, field) + case "team": + return ec.fieldContext_Project_team(ctx, field) + case "scene": + return ec.fieldContext_Project_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Project", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Property_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Property_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Property", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -23177,22 +30334,31 @@ func (ec *executionContext) _Property_id(ctx context.Context, field graphql.Coll return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Property_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Property_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Property", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Property_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Property_schemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -23212,22 +30378,31 @@ func (ec *executionContext) _Property_schemaId(ctx context.Context, field graphq return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Property_items(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Property_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Property", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Property_items(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Property_items(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Items, nil @@ -23247,22 +30422,31 @@ func (ec *executionContext) _Property_items(ctx context.Context, field graphql.C return ec.marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItemแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Property_items(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Property", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type PropertyItem does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Property_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Property_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Property", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Property().Schema(rctx, obj) @@ -23279,22 +30463,39 @@ func (ec *executionContext) _Property_schema(ctx context.Context, field graphql. return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _Property_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Property_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Property", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _Property_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Property_layer(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Property().Layer(rctx, obj) @@ -23311,22 +30512,31 @@ func (ec *executionContext) _Property_layer(ctx context.Context, field graphql.C return ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _Property_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Property_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Property", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, } + return fc, nil +} +func (ec *executionContext) _Property_merged(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Property) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Property_merged(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Property().Merged(rctx, obj) @@ -23343,22 +30553,51 @@ func (ec *executionContext) _Property_merged(ctx context.Context, field graphql. return ec.marshalOMergedProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Property_merged(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Property", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "originalId": + return ec.fieldContext_MergedProperty_originalId(ctx, field) + case "parentId": + return ec.fieldContext_MergedProperty_parentId(ctx, field) + case "schemaId": + return ec.fieldContext_MergedProperty_schemaId(ctx, field) + case "linkedDatasetId": + return ec.fieldContext_MergedProperty_linkedDatasetId(ctx, field) + case "original": + return ec.fieldContext_MergedProperty_original(ctx, field) + case "parent": + return ec.fieldContext_MergedProperty_parent(ctx, field) + case "schema": + return ec.fieldContext_MergedProperty_schema(ctx, field) + case "linkedDataset": + return ec.fieldContext_MergedProperty_linkedDataset(ctx, field) + case "groups": + return ec.fieldContext_MergedProperty_groups(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type MergedProperty", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyCondition_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyCondition) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyCondition_fieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyCondition", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.FieldID, nil @@ -23378,22 +30617,31 @@ func (ec *executionContext) _PropertyCondition_fieldId(ctx context.Context, fiel return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyCondition_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyCondition) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyCondition_fieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyCondition", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyCondition_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyCondition) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyCondition_type(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Type, nil @@ -23413,22 +30661,31 @@ func (ec *executionContext) _PropertyCondition_type(ctx context.Context, field g return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyCondition_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyCondition) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyCondition_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyCondition", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ValueType does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyCondition_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyCondition) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyCondition_value(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Value, nil @@ -23445,22 +30702,31 @@ func (ec *executionContext) _PropertyCondition_value(ctx context.Context, field return ec.marshalOAny2interface(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyCondition_value(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyCondition", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyField_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyField", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -23480,22 +30746,31 @@ func (ec *executionContext) _PropertyField_id(ctx context.Context, field graphql return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyField_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyField_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_parentId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ParentID, nil @@ -23515,22 +30790,31 @@ func (ec *executionContext) _PropertyField_parentId(ctx context.Context, field g return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyField_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyField_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_schemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -23550,22 +30834,31 @@ func (ec *executionContext) _PropertyField_schemaId(ctx context.Context, field g return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyField_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_fieldId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.FieldID, nil @@ -23585,22 +30878,31 @@ func (ec *executionContext) _PropertyField_fieldId(ctx context.Context, field gr return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_links(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyField_fieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyField_links(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_links(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Links, nil @@ -23617,22 +30919,47 @@ func (ec *executionContext) _PropertyField_links(ctx context.Context, field grap return ec.marshalOPropertyFieldLink2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLinkแš„(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyField_links(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "datasetId": + return ec.fieldContext_PropertyFieldLink_datasetId(ctx, field) + case "datasetSchemaId": + return ec.fieldContext_PropertyFieldLink_datasetSchemaId(ctx, field) + case "datasetSchemaFieldId": + return ec.fieldContext_PropertyFieldLink_datasetSchemaFieldId(ctx, field) + case "dataset": + return ec.fieldContext_PropertyFieldLink_dataset(ctx, field) + case "datasetField": + return ec.fieldContext_PropertyFieldLink_datasetField(ctx, field) + case "datasetSchema": + return ec.fieldContext_PropertyFieldLink_datasetSchema(ctx, field) + case "datasetSchemaField": + return ec.fieldContext_PropertyFieldLink_datasetSchemaField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyFieldLink", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _PropertyField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_type(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Type, nil @@ -23652,22 +30979,31 @@ func (ec *executionContext) _PropertyField_type(ctx context.Context, field graph return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyField_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ValueType does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyField_value(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_value(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Value, nil @@ -23684,22 +31020,31 @@ func (ec *executionContext) _PropertyField_value(ctx context.Context, field grap return ec.marshalOAny2interface(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyField_value(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyField_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyField().Parent(rctx, obj) @@ -23716,22 +31061,45 @@ func (ec *executionContext) _PropertyField_parent(ctx context.Context, field gra return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyField_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyField_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyField().Schema(rctx, obj) @@ -23748,22 +31116,39 @@ func (ec *executionContext) _PropertyField_schema(ctx context.Context, field gra return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyField_field(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyField_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyField", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _PropertyField_field(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_field(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyField().Field(rctx, obj) @@ -23780,22 +31165,65 @@ func (ec *executionContext) _PropertyField_field(ctx context.Context, field grap return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyField_field(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertySchemaField_type(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaField_title(ctx, field) + case "description": + return ec.fieldContext_PropertySchemaField_description(ctx, field) + case "prefix": + return ec.fieldContext_PropertySchemaField_prefix(ctx, field) + case "suffix": + return ec.fieldContext_PropertySchemaField_suffix(ctx, field) + case "defaultValue": + return ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + case "ui": + return ec.fieldContext_PropertySchemaField_ui(ctx, field) + case "min": + return ec.fieldContext_PropertySchemaField_min(ctx, field) + case "max": + return ec.fieldContext_PropertySchemaField_max(ctx, field) + case "choices": + return ec.fieldContext_PropertySchemaField_choices(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + case "translatedDescription": + return ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyField_actualValue(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyField_actualValue(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyField", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyField().ActualValue(rctx, obj) @@ -23812,22 +31240,31 @@ func (ec *executionContext) _PropertyField_actualValue(ctx context.Context, fiel return ec.marshalOAny2interface(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyField_actualValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyFieldLink_datasetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_datasetId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyFieldLink", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DatasetID, nil @@ -23844,22 +31281,31 @@ func (ec *executionContext) _PropertyFieldLink_datasetId(ctx context.Context, fi return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyFieldLink_datasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyFieldLink_datasetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyFieldLink", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyFieldLink_datasetSchemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_datasetSchemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DatasetSchemaID, nil @@ -23879,22 +31325,31 @@ func (ec *executionContext) _PropertyFieldLink_datasetSchemaId(ctx context.Conte return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyFieldLink_datasetSchemaFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyFieldLink_datasetSchemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyFieldLink", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyFieldLink_datasetSchemaFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_datasetSchemaFieldId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DatasetSchemaFieldID, nil @@ -23914,22 +31369,31 @@ func (ec *executionContext) _PropertyFieldLink_datasetSchemaFieldId(ctx context. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyFieldLink_datasetSchemaFieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyFieldLink_dataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_dataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyFieldLink", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyFieldLink().Dataset(rctx, obj) @@ -23946,22 +31410,45 @@ func (ec *executionContext) _PropertyFieldLink_dataset(ctx context.Context, fiel return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyFieldLink_dataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyFieldLink_datasetField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_datasetField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyFieldLink", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyFieldLink().DatasetField(rctx, obj) @@ -23978,22 +31465,49 @@ func (ec *executionContext) _PropertyFieldLink_datasetField(ctx context.Context, return ec.marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyFieldLink_datasetField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_DatasetField_fieldId(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetField_schemaId(ctx, field) + case "source": + return ec.fieldContext_DatasetField_source(ctx, field) + case "type": + return ec.fieldContext_DatasetField_type(ctx, field) + case "value": + return ec.fieldContext_DatasetField_value(ctx, field) + case "schema": + return ec.fieldContext_DatasetField_schema(ctx, field) + case "field": + return ec.fieldContext_DatasetField_field(ctx, field) + case "valueRef": + return ec.fieldContext_DatasetField_valueRef(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyFieldLink_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyFieldLink", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyFieldLink().DatasetSchema(rctx, obj) @@ -24010,22 +31524,53 @@ func (ec *executionContext) _PropertyFieldLink_datasetSchema(ctx context.Context return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyFieldLink_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyFieldLink_datasetSchemaField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldLink) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldLink_datasetSchemaField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyFieldLink", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyFieldLink().DatasetSchemaField(rctx, obj) @@ -24042,22 +31587,49 @@ func (ec *executionContext) _PropertyFieldLink_datasetSchemaField(ctx context.Co return ec.marshalODatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyFieldLink_datasetSchemaField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldLink", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchemaField_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchemaField_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchemaField_name(ctx, field) + case "type": + return ec.fieldContext_DatasetSchemaField_type(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetSchemaField_schemaId(ctx, field) + case "refId": + return ec.fieldContext_DatasetSchemaField_refId(ctx, field) + case "schema": + return ec.fieldContext_DatasetSchemaField_schema(ctx, field) + case "ref": + return ec.fieldContext_DatasetSchemaField_ref(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyFieldPayload_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldPayload_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyFieldPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Property, nil @@ -24077,22 +31649,45 @@ func (ec *executionContext) _PropertyFieldPayload_property(ctx context.Context, return ec.marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyFieldPayload_propertyField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyFieldPayload_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyFieldPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _PropertyFieldPayload_propertyField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyFieldPayload_propertyField(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PropertyField, nil @@ -24109,22 +31704,55 @@ func (ec *executionContext) _PropertyFieldPayload_propertyField(ctx context.Cont return ec.marshalOPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyFieldPayload_propertyField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertyField_id(ctx, field) + case "parentId": + return ec.fieldContext_PropertyField_parentId(ctx, field) + case "schemaId": + return ec.fieldContext_PropertyField_schemaId(ctx, field) + case "fieldId": + return ec.fieldContext_PropertyField_fieldId(ctx, field) + case "links": + return ec.fieldContext_PropertyField_links(ctx, field) + case "type": + return ec.fieldContext_PropertyField_type(ctx, field) + case "value": + return ec.fieldContext_PropertyField_value(ctx, field) + case "parent": + return ec.fieldContext_PropertyField_parent(ctx, field) + case "schema": + return ec.fieldContext_PropertyField_schema(ctx, field) + case "field": + return ec.fieldContext_PropertyField_field(ctx, field) + case "actualValue": + return ec.fieldContext_PropertyField_actualValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyGroup_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroup_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyGroup", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -24144,22 +31772,31 @@ func (ec *executionContext) _PropertyGroup_id(ctx context.Context, field graphql return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyGroup_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroup_schemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -24174,27 +31811,36 @@ func (ec *executionContext) _PropertyGroup_schemaId(ctx context.Context, field g } return graphql.Null } - res := resTmp.(gqlmodel.ID) - fc.Result = res - return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) + res := resTmp.(gqlmodel.ID) + fc.Result = res + return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_PropertyGroup_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil } func (ec *executionContext) _PropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroup_schemaGroupId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyGroup", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaGroupID, nil @@ -24214,22 +31860,31 @@ func (ec *executionContext) _PropertyGroup_schemaGroupId(ctx context.Context, fi return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroup_fields(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Fields, nil @@ -24249,22 +31904,55 @@ func (ec *executionContext) _PropertyGroup_fields(ctx context.Context, field gra return ec.marshalNPropertyField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyGroup_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertyField_id(ctx, field) + case "parentId": + return ec.fieldContext_PropertyField_parentId(ctx, field) + case "schemaId": + return ec.fieldContext_PropertyField_schemaId(ctx, field) + case "fieldId": + return ec.fieldContext_PropertyField_fieldId(ctx, field) + case "links": + return ec.fieldContext_PropertyField_links(ctx, field) + case "type": + return ec.fieldContext_PropertyField_type(ctx, field) + case "value": + return ec.fieldContext_PropertyField_value(ctx, field) + case "parent": + return ec.fieldContext_PropertyField_parent(ctx, field) + case "schema": + return ec.fieldContext_PropertyField_schema(ctx, field) + case "field": + return ec.fieldContext_PropertyField_field(ctx, field) + case "actualValue": + return ec.fieldContext_PropertyField_actualValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyGroup_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroup_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyGroup().Schema(rctx, obj) @@ -24281,22 +31969,39 @@ func (ec *executionContext) _PropertyGroup_schema(ctx context.Context, field gra return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroup_schemaGroup(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyGroup_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyGroup", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _PropertyGroup_schemaGroup(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroup_schemaGroup(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyGroup().SchemaGroup(rctx, obj) @@ -24313,22 +32018,55 @@ func (ec *executionContext) _PropertyGroup_schemaGroup(ctx context.Context, fiel return ec.marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyGroup_schemaGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "schemaGroupId": + return ec.fieldContext_PropertySchemaGroup_schemaGroupId(ctx, field) + case "schemaId": + return ec.fieldContext_PropertySchemaGroup_schemaId(ctx, field) + case "fields": + return ec.fieldContext_PropertySchemaGroup_fields(ctx, field) + case "isList": + return ec.fieldContext_PropertySchemaGroup_isList(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaGroup_isAvailableIf(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaGroup_title(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaGroup_allTranslatedTitle(ctx, field) + case "representativeFieldId": + return ec.fieldContext_PropertySchemaGroup_representativeFieldId(ctx, field) + case "representativeField": + return ec.fieldContext_PropertySchemaGroup_representativeField(ctx, field) + case "schema": + return ec.fieldContext_PropertySchemaGroup_schema(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaGroup_translatedTitle(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyGroupList_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroupList_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyGroupList", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -24348,22 +32086,31 @@ func (ec *executionContext) _PropertyGroupList_id(ctx context.Context, field gra return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroupList_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyGroupList_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyGroupList", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyGroupList_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroupList_schemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -24383,22 +32130,31 @@ func (ec *executionContext) _PropertyGroupList_schemaId(ctx context.Context, fie return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroupList_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyGroupList_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyGroupList", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyGroupList_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroupList_schemaGroupId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaGroupID, nil @@ -24418,22 +32174,31 @@ func (ec *executionContext) _PropertyGroupList_schemaGroupId(ctx context.Context return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroupList_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyGroupList_schemaGroupId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyGroupList", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyGroupList_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroupList_groups(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Groups, nil @@ -24453,22 +32218,45 @@ func (ec *executionContext) _PropertyGroupList_groups(ctx context.Context, field return ec.marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroupแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyGroupList_groups(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertyGroup_id(ctx, field) + case "schemaId": + return ec.fieldContext_PropertyGroup_schemaId(ctx, field) + case "schemaGroupId": + return ec.fieldContext_PropertyGroup_schemaGroupId(ctx, field) + case "fields": + return ec.fieldContext_PropertyGroup_fields(ctx, field) + case "schema": + return ec.fieldContext_PropertyGroup_schema(ctx, field) + case "schemaGroup": + return ec.fieldContext_PropertyGroup_schemaGroup(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyGroupList_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroupList_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyGroupList", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyGroupList().Schema(rctx, obj) @@ -24485,22 +32273,39 @@ func (ec *executionContext) _PropertyGroupList_schema(ctx context.Context, field return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyGroupList_schemaGroup(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyGroupList_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyGroupList", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _PropertyGroupList_schemaGroup(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyGroupList) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyGroupList_schemaGroup(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyGroupList().SchemaGroup(rctx, obj) @@ -24517,22 +32322,55 @@ func (ec *executionContext) _PropertyGroupList_schemaGroup(ctx context.Context, return ec.marshalOPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyGroupList_schemaGroup(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyGroupList", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "schemaGroupId": + return ec.fieldContext_PropertySchemaGroup_schemaGroupId(ctx, field) + case "schemaId": + return ec.fieldContext_PropertySchemaGroup_schemaId(ctx, field) + case "fields": + return ec.fieldContext_PropertySchemaGroup_fields(ctx, field) + case "isList": + return ec.fieldContext_PropertySchemaGroup_isList(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaGroup_isAvailableIf(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaGroup_title(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaGroup_allTranslatedTitle(ctx, field) + case "representativeFieldId": + return ec.fieldContext_PropertySchemaGroup_representativeFieldId(ctx, field) + case "representativeField": + return ec.fieldContext_PropertySchemaGroup_representativeField(ctx, field) + case "schema": + return ec.fieldContext_PropertySchemaGroup_schema(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaGroup_translatedTitle(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyItemPayload_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyItemPayload_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyItemPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Property, nil @@ -24552,22 +32390,45 @@ func (ec *executionContext) _PropertyItemPayload_property(ctx context.Context, f return ec.marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyItemPayload_propertyItem(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyItemPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyItemPayload_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyItemPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _PropertyItemPayload_propertyItem(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyItemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyItemPayload_propertyItem(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PropertyItem, nil @@ -24584,22 +32445,31 @@ func (ec *executionContext) _PropertyItemPayload_propertyItem(ctx context.Contex return ec.marshalOPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyItemPayload_propertyItem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyItemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type PropertyItem does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyLinkableFields_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyLinkableFields_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyLinkableFields", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -24619,22 +32489,31 @@ func (ec *executionContext) _PropertyLinkableFields_schemaId(ctx context.Context return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyLinkableFields_latlng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyLinkableFields_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyLinkableFields", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyLinkableFields_latlng(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyLinkableFields_latlng(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Latlng, nil @@ -24651,22 +32530,31 @@ func (ec *executionContext) _PropertyLinkableFields_latlng(ctx context.Context, return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertyLinkableFields_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertyLinkableFields_latlng(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertyLinkableFields", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertyLinkableFields_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyLinkableFields_url(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.URL, nil @@ -24683,22 +32571,31 @@ func (ec *executionContext) _PropertyLinkableFields_url(ctx context.Context, fie return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyLinkableFields_url(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyLinkableFields_latlngField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyLinkableFields_latlngField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyLinkableFields", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyLinkableFields().LatlngField(rctx, obj) @@ -24715,22 +32612,65 @@ func (ec *executionContext) _PropertyLinkableFields_latlngField(ctx context.Cont return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyLinkableFields_latlngField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertySchemaField_type(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaField_title(ctx, field) + case "description": + return ec.fieldContext_PropertySchemaField_description(ctx, field) + case "prefix": + return ec.fieldContext_PropertySchemaField_prefix(ctx, field) + case "suffix": + return ec.fieldContext_PropertySchemaField_suffix(ctx, field) + case "defaultValue": + return ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + case "ui": + return ec.fieldContext_PropertySchemaField_ui(ctx, field) + case "min": + return ec.fieldContext_PropertySchemaField_min(ctx, field) + case "max": + return ec.fieldContext_PropertySchemaField_max(ctx, field) + case "choices": + return ec.fieldContext_PropertySchemaField_choices(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + case "translatedDescription": + return ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyLinkableFields_urlField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyLinkableFields_urlField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyLinkableFields", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyLinkableFields().URLField(rctx, obj) @@ -24747,22 +32687,65 @@ func (ec *executionContext) _PropertyLinkableFields_urlField(ctx context.Context return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyLinkableFields_urlField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertySchemaField_type(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaField_title(ctx, field) + case "description": + return ec.fieldContext_PropertySchemaField_description(ctx, field) + case "prefix": + return ec.fieldContext_PropertySchemaField_prefix(ctx, field) + case "suffix": + return ec.fieldContext_PropertySchemaField_suffix(ctx, field) + case "defaultValue": + return ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + case "ui": + return ec.fieldContext_PropertySchemaField_ui(ctx, field) + case "min": + return ec.fieldContext_PropertySchemaField_min(ctx, field) + case "max": + return ec.fieldContext_PropertySchemaField_max(ctx, field) + case "choices": + return ec.fieldContext_PropertySchemaField_choices(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + case "translatedDescription": + return ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertyLinkableFields_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertyLinkableFields) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertyLinkableFields_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertyLinkableFields", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertyLinkableFields().Schema(rctx, obj) @@ -24779,22 +32762,39 @@ func (ec *executionContext) _PropertyLinkableFields_schema(ctx context.Context, return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertyLinkableFields_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertyLinkableFields", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertySchema_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchema_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertySchema", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -24814,22 +32814,31 @@ func (ec *executionContext) _PropertySchema_id(ctx context.Context, field graphq return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchema_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchema) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchema_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchema", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchema_groups(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchema_groups(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Groups, nil @@ -24849,22 +32858,55 @@ func (ec *executionContext) _PropertySchema_groups(ctx context.Context, field gr return ec.marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroupแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertySchema_groups(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchema", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "schemaGroupId": + return ec.fieldContext_PropertySchemaGroup_schemaGroupId(ctx, field) + case "schemaId": + return ec.fieldContext_PropertySchemaGroup_schemaId(ctx, field) + case "fields": + return ec.fieldContext_PropertySchemaGroup_fields(ctx, field) + case "isList": + return ec.fieldContext_PropertySchemaGroup_isList(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaGroup_isAvailableIf(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaGroup_title(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaGroup_allTranslatedTitle(ctx, field) + case "representativeFieldId": + return ec.fieldContext_PropertySchemaGroup_representativeFieldId(ctx, field) + case "representativeField": + return ec.fieldContext_PropertySchemaGroup_representativeField(ctx, field) + case "schema": + return ec.fieldContext_PropertySchemaGroup_schema(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaGroup_translatedTitle(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertySchema_linkableFields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchema_linkableFields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertySchema", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LinkableFields, nil @@ -24884,22 +32926,45 @@ func (ec *executionContext) _PropertySchema_linkableFields(ctx context.Context, return ec.marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyLinkableFields(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertySchema_linkableFields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchema", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "schemaId": + return ec.fieldContext_PropertyLinkableFields_schemaId(ctx, field) + case "latlng": + return ec.fieldContext_PropertyLinkableFields_latlng(ctx, field) + case "url": + return ec.fieldContext_PropertyLinkableFields_url(ctx, field) + case "latlngField": + return ec.fieldContext_PropertyLinkableFields_latlngField(ctx, field) + case "urlField": + return ec.fieldContext_PropertyLinkableFields_urlField(ctx, field) + case "schema": + return ec.fieldContext_PropertyLinkableFields_schema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyLinkableFields", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertySchemaField_fieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaField", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.FieldID, nil @@ -24919,22 +32984,31 @@ func (ec *executionContext) _PropertySchemaField_fieldId(ctx context.Context, fi return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_fieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaField_type(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_type(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Type, nil @@ -24954,22 +33028,31 @@ func (ec *executionContext) _PropertySchemaField_type(ctx context.Context, field return ec.marshalNValueType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšValueType(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_title(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ValueType does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaField_title(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_title(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Title, nil @@ -24989,22 +33072,31 @@ func (ec *executionContext) _PropertySchemaField_title(ctx context.Context, fiel return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_title(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaField_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_description(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Description, nil @@ -25024,22 +33116,31 @@ func (ec *executionContext) _PropertySchemaField_description(ctx context.Context return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_prefix(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaField_prefix(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_prefix(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Prefix, nil @@ -25056,22 +33157,31 @@ func (ec *executionContext) _PropertySchemaField_prefix(ctx context.Context, fie return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_suffix(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_prefix(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaField_suffix(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_suffix(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Suffix, nil @@ -25088,22 +33198,31 @@ func (ec *executionContext) _PropertySchemaField_suffix(ctx context.Context, fie return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_defaultValue(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_suffix(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaField_defaultValue(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DefaultValue, nil @@ -25120,22 +33239,31 @@ func (ec *executionContext) _PropertySchemaField_defaultValue(ctx context.Contex return ec.marshalOAny2interface(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertySchemaField_defaultValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Any does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _PropertySchemaField_ui(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_ui(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaField", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.UI, nil @@ -25152,22 +33280,31 @@ func (ec *executionContext) _PropertySchemaField_ui(ctx context.Context, field g return ec.marshalOPropertySchemaFieldUI2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldUI(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_min(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_ui(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type PropertySchemaFieldUI does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaField_min(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_min(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Min, nil @@ -25184,22 +33321,31 @@ func (ec *executionContext) _PropertySchemaField_min(ctx context.Context, field return ec.marshalOFloat2แš–float64(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_max(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_min(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaField_max(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_max(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Max, nil @@ -25216,22 +33362,31 @@ func (ec *executionContext) _PropertySchemaField_max(ctx context.Context, field return ec.marshalOFloat2แš–float64(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_choices(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_max(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaField_choices(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_choices(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Choices, nil @@ -25248,22 +33403,43 @@ func (ec *executionContext) _PropertySchemaField_choices(ctx context.Context, fi return ec.marshalOPropertySchemaFieldChoice2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldChoiceแš„(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_isAvailableIf(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_choices(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "key": + return ec.fieldContext_PropertySchemaFieldChoice_key(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaFieldChoice_title(ctx, field) + case "icon": + return ec.fieldContext_PropertySchemaFieldChoice_icon(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaFieldChoice_allTranslatedTitle(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaFieldChoice_translatedTitle(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaFieldChoice", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaField_isAvailableIf(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.IsAvailableIf, nil @@ -25280,22 +33456,39 @@ func (ec *executionContext) _PropertySchemaField_isAvailableIf(ctx context.Conte return ec.marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyCondition(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_isAvailableIf(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertyCondition_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertyCondition_type(ctx, field) + case "value": + return ec.fieldContext_PropertyCondition_value(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyCondition", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaField_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.AllTranslatedTitle, nil @@ -25312,22 +33505,31 @@ func (ec *executionContext) _PropertySchemaField_allTranslatedTitle(ctx context. return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_allTranslatedTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaField_allTranslatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.AllTranslatedDescription, nil @@ -25344,32 +33546,34 @@ func (ec *executionContext) _PropertySchemaField_allTranslatedDescription(ctx co return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_allTranslatedDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_PropertySchemaField_translatedTitle_args(ctx, rawArgs) +func (ec *executionContext) _PropertySchemaField_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PropertySchemaField().TranslatedTitle(rctx, obj, args["lang"].(*language.Tag)) + return ec.resolvers.PropertySchemaField().TranslatedTitle(rctx, obj, fc.Args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -25386,32 +33590,45 @@ func (ec *executionContext) _PropertySchemaField_translatedTitle(ctx context.Con return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaField_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaField_translatedTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaField", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_PropertySchemaField_translatedDescription_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_PropertySchemaField_translatedTitle_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _PropertySchemaField_translatedDescription(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PropertySchemaField().TranslatedDescription(rctx, obj, args["lang"].(*language.Tag)) + return ec.resolvers.PropertySchemaField().TranslatedDescription(rctx, obj, fc.Args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -25428,22 +33645,42 @@ func (ec *executionContext) _PropertySchemaField_translatedDescription(ctx conte return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_key(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { +func (ec *executionContext) fieldContext_PropertySchemaField_translatedDescription(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaField", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } defer func() { if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null + err = ec.Recover(ctx, r) + ec.Error(ctx, err) } }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaFieldChoice", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_PropertySchemaField_translatedDescription_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) _PropertySchemaFieldChoice_key(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaFieldChoice_key(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Key, nil @@ -25463,22 +33700,31 @@ func (ec *executionContext) _PropertySchemaFieldChoice_key(ctx context.Context, return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_title(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaFieldChoice_key(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaFieldChoice", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaFieldChoice_title(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaFieldChoice_title(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Title, nil @@ -25498,22 +33744,31 @@ func (ec *executionContext) _PropertySchemaFieldChoice_title(ctx context.Context return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_icon(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaFieldChoice_title(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaFieldChoice", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaFieldChoice_icon(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaFieldChoice_icon(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Icon, nil @@ -25530,22 +33785,31 @@ func (ec *executionContext) _PropertySchemaFieldChoice_icon(ctx context.Context, return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaFieldChoice_icon(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaFieldChoice", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaFieldChoice_allTranslatedTitle(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.AllTranslatedTitle, nil @@ -25562,32 +33826,34 @@ func (ec *executionContext) _PropertySchemaFieldChoice_allTranslatedTitle(ctx co return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaFieldChoice_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaFieldChoice_allTranslatedTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaFieldChoice", Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_PropertySchemaFieldChoice_translatedTitle_args(ctx, rawArgs) +func (ec *executionContext) _PropertySchemaFieldChoice_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaFieldChoice) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaFieldChoice_translatedTitle(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PropertySchemaFieldChoice().TranslatedTitle(rctx, obj, args["lang"].(*language.Tag)) + return ec.resolvers.PropertySchemaFieldChoice().TranslatedTitle(rctx, obj, fc.Args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -25604,22 +33870,42 @@ func (ec *executionContext) _PropertySchemaFieldChoice_translatedTitle(ctx conte return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { +func (ec *executionContext) fieldContext_PropertySchemaFieldChoice_translatedTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaFieldChoice", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } defer func() { if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null + err = ec.Recover(ctx, r) + ec.Error(ctx, err) } }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaGroup", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_PropertySchemaFieldChoice_translatedTitle_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) _PropertySchemaGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_schemaGroupId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaGroupID, nil @@ -25639,22 +33925,31 @@ func (ec *executionContext) _PropertySchemaGroup_schemaGroupId(ctx context.Conte return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaGroup_schemaGroupId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaGroup_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_schemaId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -25674,22 +33969,31 @@ func (ec *executionContext) _PropertySchemaGroup_schemaId(ctx context.Context, f return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaGroup_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaGroup_fields(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_fields(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Fields, nil @@ -25709,22 +34013,65 @@ func (ec *executionContext) _PropertySchemaGroup_fields(ctx context.Context, fie return ec.marshalNPropertySchemaField2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertySchemaGroup_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertySchemaField_type(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaField_title(ctx, field) + case "description": + return ec.fieldContext_PropertySchemaField_description(ctx, field) + case "prefix": + return ec.fieldContext_PropertySchemaField_prefix(ctx, field) + case "suffix": + return ec.fieldContext_PropertySchemaField_suffix(ctx, field) + case "defaultValue": + return ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + case "ui": + return ec.fieldContext_PropertySchemaField_ui(ctx, field) + case "min": + return ec.fieldContext_PropertySchemaField_min(ctx, field) + case "max": + return ec.fieldContext_PropertySchemaField_max(ctx, field) + case "choices": + return ec.fieldContext_PropertySchemaField_choices(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + case "translatedDescription": + return ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertySchemaGroup_isList(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_isList(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaGroup", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.IsList, nil @@ -25744,22 +34091,31 @@ func (ec *executionContext) _PropertySchemaGroup_isList(ctx context.Context, fie return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_isAvailableIf(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaGroup_isList(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaGroup_isAvailableIf(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_isAvailableIf(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.IsAvailableIf, nil @@ -25776,22 +34132,39 @@ func (ec *executionContext) _PropertySchemaGroup_isAvailableIf(ctx context.Conte return ec.marshalOPropertyCondition2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyCondition(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_title(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaGroup_isAvailableIf(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertyCondition_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertyCondition_type(ctx, field) + case "value": + return ec.fieldContext_PropertyCondition_value(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertyCondition", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaGroup_title(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_title(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Title, nil @@ -25808,22 +34181,31 @@ func (ec *executionContext) _PropertySchemaGroup_title(ctx context.Context, fiel return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaGroup_title(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaGroup_allTranslatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_allTranslatedTitle(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.AllTranslatedTitle, nil @@ -25840,22 +34222,31 @@ func (ec *executionContext) _PropertySchemaGroup_allTranslatedTitle(ctx context. return ec.marshalOTranslatedString2map(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaGroup_allTranslatedTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TranslatedString does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaGroup_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_representativeFieldId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.RepresentativeFieldID, nil @@ -25872,22 +34263,31 @@ func (ec *executionContext) _PropertySchemaGroup_representativeFieldId(ctx conte return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_representativeField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaGroup_representativeFieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _PropertySchemaGroup_representativeField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_representativeField(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.RepresentativeField, nil @@ -25904,22 +34304,65 @@ func (ec *executionContext) _PropertySchemaGroup_representativeField(ctx context return ec.marshalOPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_PropertySchemaGroup_representativeField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_PropertySchemaField_fieldId(ctx, field) + case "type": + return ec.fieldContext_PropertySchemaField_type(ctx, field) + case "title": + return ec.fieldContext_PropertySchemaField_title(ctx, field) + case "description": + return ec.fieldContext_PropertySchemaField_description(ctx, field) + case "prefix": + return ec.fieldContext_PropertySchemaField_prefix(ctx, field) + case "suffix": + return ec.fieldContext_PropertySchemaField_suffix(ctx, field) + case "defaultValue": + return ec.fieldContext_PropertySchemaField_defaultValue(ctx, field) + case "ui": + return ec.fieldContext_PropertySchemaField_ui(ctx, field) + case "min": + return ec.fieldContext_PropertySchemaField_min(ctx, field) + case "max": + return ec.fieldContext_PropertySchemaField_max(ctx, field) + case "choices": + return ec.fieldContext_PropertySchemaField_choices(ctx, field) + case "isAvailableIf": + return ec.fieldContext_PropertySchemaField_isAvailableIf(ctx, field) + case "allTranslatedTitle": + return ec.fieldContext_PropertySchemaField_allTranslatedTitle(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PropertySchemaField_allTranslatedDescription(ctx, field) + case "translatedTitle": + return ec.fieldContext_PropertySchemaField_translatedTitle(ctx, field) + case "translatedDescription": + return ec.fieldContext_PropertySchemaField_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchemaField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _PropertySchemaGroup_schema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "PropertySchemaGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.PropertySchemaGroup().Schema(rctx, obj) @@ -25936,32 +34379,42 @@ func (ec *executionContext) _PropertySchemaGroup_schema(ctx context.Context, fie return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _PropertySchemaGroup_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_PropertySchemaGroup_schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "PropertySchemaGroup", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_PropertySchemaGroup_translatedTitle_args(ctx, rawArgs) +func (ec *executionContext) _PropertySchemaGroup_translatedTitle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PropertySchemaGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_PropertySchemaGroup_translatedTitle(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.PropertySchemaGroup().TranslatedTitle(rctx, obj, args["lang"].(*language.Tag)) + return ec.resolvers.PropertySchemaGroup().TranslatedTitle(rctx, obj, fc.Args["lang"].(*language.Tag)) }) if err != nil { ec.Error(ctx, err) @@ -25978,22 +34431,42 @@ func (ec *executionContext) _PropertySchemaGroup_translatedTitle(ctx context.Con return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Query_me(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { +func (ec *executionContext) fieldContext_PropertySchemaGroup_translatedTitle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "PropertySchemaGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } defer func() { if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null + err = ec.Recover(ctx, r) + ec.Error(ctx, err) } }() - fc := &graphql.FieldContext{ - Object: "Query", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_PropertySchemaGroup_translatedTitle_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) _Query_me(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_me(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Query().Me(rctx) @@ -26010,32 +34483,54 @@ func (ec *executionContext) _Query_me(ctx context.Context, field graphql.Collect return ec.marshalOMe2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMe(ctx, field.Selections, res) } -func (ec *executionContext) _Query_node(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_me(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, - } + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Me_id(ctx, field) + case "name": + return ec.fieldContext_Me_name(ctx, field) + case "email": + return ec.fieldContext_Me_email(ctx, field) + case "lang": + return ec.fieldContext_Me_lang(ctx, field) + case "theme": + return ec.fieldContext_Me_theme(ctx, field) + case "myTeamId": + return ec.fieldContext_Me_myTeamId(ctx, field) + case "auths": + return ec.fieldContext_Me_auths(ctx, field) + case "teams": + return ec.fieldContext_Me_teams(ctx, field) + case "myTeam": + return ec.fieldContext_Me_myTeam(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Me", field.Name) + }, + } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_node_args(ctx, rawArgs) +func (ec *executionContext) _Query_node(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_node(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Node(rctx, args["id"].(gqlmodel.ID), args["type"].(gqlmodel.NodeType)) + return ec.resolvers.Query().Node(rctx, fc.Args["id"].(gqlmodel.ID), fc.Args["type"].(gqlmodel.NodeType)) }) if err != nil { ec.Error(ctx, err) @@ -26049,32 +34544,45 @@ func (ec *executionContext) _Query_node(ctx context.Context, field graphql.Colle return ec.marshalONode2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx, field.Selections, res) } -func (ec *executionContext) _Query_nodes(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_node(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_nodes_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Query_node_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_nodes(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_nodes(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Nodes(rctx, args["id"].([]gqlmodel.ID), args["type"].(gqlmodel.NodeType)) + return ec.resolvers.Query().Nodes(rctx, fc.Args["id"].([]gqlmodel.ID), fc.Args["type"].(gqlmodel.NodeType)) }) if err != nil { ec.Error(ctx, err) @@ -26091,32 +34599,45 @@ func (ec *executionContext) _Query_nodes(ctx context.Context, field graphql.Coll return ec.marshalNNode2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšNode(ctx, field.Selections, res) } -func (ec *executionContext) _Query_propertySchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_nodes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_propertySchema_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Query_nodes_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_propertySchema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_propertySchema(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().PropertySchema(rctx, args["id"].(gqlmodel.ID)) + return ec.resolvers.Query().PropertySchema(rctx, fc.Args["id"].(gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -26130,32 +34651,53 @@ func (ec *executionContext) _Query_propertySchema(ctx context.Context, field gra return ec.marshalOPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx, field.Selections, res) } -func (ec *executionContext) _Query_propertySchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_propertySchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_propertySchemas_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Query_propertySchema_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_propertySchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_propertySchemas(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().PropertySchemas(rctx, args["id"].([]gqlmodel.ID)) + return ec.resolvers.Query().PropertySchemas(rctx, fc.Args["id"].([]gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -26172,32 +34714,53 @@ func (ec *executionContext) _Query_propertySchemas(ctx context.Context, field gr return ec.marshalNPropertySchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Query_plugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_propertySchemas(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_PropertySchema_id(ctx, field) + case "groups": + return ec.fieldContext_PropertySchema_groups(ctx, field) + case "linkableFields": + return ec.fieldContext_PropertySchema_linkableFields(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PropertySchema", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_plugin_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Query_propertySchemas_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_plugin(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_plugin(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Plugin(rctx, args["id"].(gqlmodel.ID)) + return ec.resolvers.Query().Plugin(rctx, fc.Args["id"].(gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -26211,32 +34774,79 @@ func (ec *executionContext) _Query_plugin(ctx context.Context, field graphql.Col return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } -func (ec *executionContext) _Query_plugins(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, - } + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_plugin_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_plugins_args(ctx, rawArgs) +func (ec *executionContext) _Query_plugins(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_plugins(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Plugins(rctx, args["id"].([]gqlmodel.ID)) + return ec.resolvers.Query().Plugins(rctx, fc.Args["id"].([]gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -26253,32 +34863,79 @@ func (ec *executionContext) _Query_plugins(ctx context.Context, field graphql.Co return ec.marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Query_layer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_plugins(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, - } + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_plugins_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_layer_args(ctx, rawArgs) +func (ec *executionContext) _Query_layer(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_layer(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Layer(rctx, args["id"].(gqlmodel.ID)) + return ec.resolvers.Query().Layer(rctx, fc.Args["id"].(gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -26292,32 +34949,45 @@ func (ec *executionContext) _Query_layer(ctx context.Context, field graphql.Coll return ec.marshalOLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } -func (ec *executionContext) _Query_scene(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_scene_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Query_layer_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_scene(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_scene(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Scene(rctx, args["projectId"].(gqlmodel.ID)) + return ec.resolvers.Query().Scene(rctx, fc.Args["projectId"].(gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -26331,32 +35001,85 @@ func (ec *executionContext) _Query_scene(ctx context.Context, field graphql.Coll return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } -func (ec *executionContext) _Query_assets(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, - } + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_scene_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return + } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_assets_args(ctx, rawArgs) +func (ec *executionContext) _Query_assets(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_assets(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Assets(rctx, args["teamId"].(gqlmodel.ID), args["keyword"].(*string), args["sort"].(*gqlmodel.AssetSortType), args["pagination"].(*gqlmodel.Pagination)) + return ec.resolvers.Query().Assets(rctx, fc.Args["teamId"].(gqlmodel.ID), fc.Args["keyword"].(*string), fc.Args["sort"].(*gqlmodel.AssetSortType), fc.Args["pagination"].(*gqlmodel.Pagination)) }) if err != nil { ec.Error(ctx, err) @@ -26373,32 +35096,55 @@ func (ec *executionContext) _Query_assets(ctx context.Context, field graphql.Col return ec.marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetConnection(ctx, field.Selections, res) } -func (ec *executionContext) _Query_projects(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_assets(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_AssetConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_AssetConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_AssetConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_AssetConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AssetConnection", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_projects_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Query_assets_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_projects(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_projects(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Projects(rctx, args["teamId"].(gqlmodel.ID), args["includeArchived"].(*bool), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + return ec.resolvers.Query().Projects(rctx, fc.Args["teamId"].(gqlmodel.ID), fc.Args["includeArchived"].(*bool), fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) }) if err != nil { ec.Error(ctx, err) @@ -26415,32 +35161,55 @@ func (ec *executionContext) _Query_projects(ctx context.Context, field graphql.C return ec.marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx, field.Selections, res) } -func (ec *executionContext) _Query_datasetSchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_projects(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_ProjectConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_ProjectConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_ProjectConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_ProjectConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectConnection", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_datasetSchemas_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Query_projects_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_datasetSchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_datasetSchemas(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().DatasetSchemas(rctx, args["sceneId"].(gqlmodel.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + return ec.resolvers.Query().DatasetSchemas(rctx, fc.Args["sceneId"].(gqlmodel.ID), fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) }) if err != nil { ec.Error(ctx, err) @@ -26457,32 +35226,55 @@ func (ec *executionContext) _Query_datasetSchemas(ctx context.Context, field gra return ec.marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaConnection(ctx, field.Selections, res) } -func (ec *executionContext) _Query_datasets(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_datasetSchemas(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_DatasetSchemaConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_DatasetSchemaConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_DatasetSchemaConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchemaConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaConnection", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_datasets_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Query_datasetSchemas_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_datasets(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_datasets(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().Datasets(rctx, args["datasetSchemaId"].(gqlmodel.ID), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + return ec.resolvers.Query().Datasets(rctx, fc.Args["datasetSchemaId"].(gqlmodel.ID), fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) }) if err != nil { ec.Error(ctx, err) @@ -26499,32 +35291,55 @@ func (ec *executionContext) _Query_datasets(ctx context.Context, field graphql.C return ec.marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetConnection(ctx, field.Selections, res) } -func (ec *executionContext) _Query_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_datasets(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_DatasetConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_DatasetConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_DatasetConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetConnection", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_dynamicDatasetSchemas_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Query_datasets_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_dynamicDatasetSchemas(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().DynamicDatasetSchemas(rctx, args["sceneId"].(gqlmodel.ID)) + return ec.resolvers.Query().DynamicDatasetSchemas(rctx, fc.Args["sceneId"].(gqlmodel.ID)) }) if err != nil { ec.Error(ctx, err) @@ -26541,32 +35356,67 @@ func (ec *executionContext) _Query_dynamicDatasetSchemas(ctx context.Context, fi return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Query_searchUser(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_searchUser_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Query_dynamicDatasetSchemas_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_searchUser(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_searchUser(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().SearchUser(rctx, args["nameOrEmail"].(string)) + return ec.resolvers.Query().SearchUser(rctx, fc.Args["nameOrEmail"].(string)) }) if err != nil { ec.Error(ctx, err) @@ -26580,32 +35430,53 @@ func (ec *executionContext) _Query_searchUser(ctx context.Context, field graphql return ec.marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) } -func (ec *executionContext) _Query_checkProjectAlias(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_searchUser(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_User_id(ctx, field) + case "name": + return ec.fieldContext_User_name(ctx, field) + case "email": + return ec.fieldContext_User_email(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type User", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query_checkProjectAlias_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Query_searchUser_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Query_checkProjectAlias(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_checkProjectAlias(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().CheckProjectAlias(rctx, args["alias"].(string)) + return ec.resolvers.Query().CheckProjectAlias(rctx, fc.Args["alias"].(string)) }) if err != nil { ec.Error(ctx, err) @@ -26622,22 +35493,48 @@ func (ec *executionContext) _Query_checkProjectAlias(ctx context.Context, field return ec.marshalNProjectAliasAvailability2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectAliasAvailability(ctx, field.Selections, res) } -func (ec *executionContext) _Query_installablePlugins(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_checkProjectAlias(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "alias": + return ec.fieldContext_ProjectAliasAvailability_alias(ctx, field) + case "available": + return ec.fieldContext_ProjectAliasAvailability_available(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectAliasAvailability", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query_checkProjectAlias_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) _Query_installablePlugins(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_installablePlugins(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Query().InstallablePlugins(rctx) @@ -26657,32 +35554,46 @@ func (ec *executionContext) _Query_installablePlugins(ctx context.Context, field return ec.marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadataแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query_installablePlugins(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, - IsResolver: false, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext_PluginMetadata_name(ctx, field) + case "description": + return ec.fieldContext_PluginMetadata_description(ctx, field) + case "author": + return ec.fieldContext_PluginMetadata_author(ctx, field) + case "thumbnailUrl": + return ec.fieldContext_PluginMetadata_thumbnailUrl(ctx, field) + case "createdAt": + return ec.fieldContext_PluginMetadata_createdAt(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginMetadata", field.Name) + }, } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Query___type_args(ctx, rawArgs) +func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query___type(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.introspectType(args["name"].(string)) + return ec.introspectType(fc.Args["name"].(string)) }) if err != nil { ec.Error(ctx, err) @@ -26696,22 +35607,64 @@ func (ec *executionContext) _Query___type(ctx context.Context, field graphql.Col return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) } -func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Query___type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Query", Field: field, - Args: nil, IsMethod: true, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query___type_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query___schema(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.introspectSchema() @@ -26728,22 +35681,45 @@ func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.C return ec.marshalO__Schema2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Query___schema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "description": + return ec.fieldContext___Schema_description(ctx, field) + case "types": + return ec.fieldContext___Schema_types(ctx, field) + case "queryType": + return ec.fieldContext___Schema_queryType(ctx, field) + case "mutationType": + return ec.fieldContext___Schema_mutationType(ctx, field) + case "subscriptionType": + return ec.fieldContext___Schema_subscriptionType(ctx, field) + case "directives": + return ec.fieldContext___Schema_directives(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Schema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Rect_west(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Rect_west(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Rect", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.West, nil @@ -26763,22 +35739,31 @@ func (ec *executionContext) _Rect_west(ctx context.Context, field graphql.Collec return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Rect_south(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Rect_west(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Rect", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Rect_south(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Rect_south(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.South, nil @@ -26798,22 +35783,31 @@ func (ec *executionContext) _Rect_south(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Rect_east(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Rect_south(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Rect", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Rect_east(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Rect_east(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.East, nil @@ -26833,22 +35827,31 @@ func (ec *executionContext) _Rect_east(ctx context.Context, field graphql.Collec return ec.marshalNFloat2float64(ctx, field.Selections, res) } -func (ec *executionContext) _Rect_north(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Rect_east(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Rect", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Rect_north(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Rect) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Rect_north(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.North, nil @@ -26868,22 +35871,31 @@ func (ec *executionContext) _Rect_north(ctx context.Context, field graphql.Colle return ec.marshalNFloat2float64(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Rect_north(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Rect", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Float does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _RemoveAssetPayload_assetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveAssetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveAssetPayload_assetId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "RemoveAssetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.AssetID, nil @@ -26903,22 +35915,31 @@ func (ec *executionContext) _RemoveAssetPayload_assetId(ctx context.Context, fie return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_RemoveAssetPayload_assetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveAssetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _RemoveClusterPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveClusterPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveClusterPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "RemoveClusterPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Scene, nil @@ -26938,22 +35959,71 @@ func (ec *executionContext) _RemoveClusterPayload_scene(ctx context.Context, fie return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_RemoveClusterPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveClusterPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _RemoveClusterPayload_clusterId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveClusterPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveClusterPayload_clusterId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "RemoveClusterPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ClusterID, nil @@ -26973,22 +36043,31 @@ func (ec *executionContext) _RemoveClusterPayload_clusterId(ctx context.Context, return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_RemoveClusterPayload_clusterId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveClusterPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _RemoveDatasetSchemaPayload_schemaId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveDatasetSchemaPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveDatasetSchemaPayload_schemaId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "RemoveDatasetSchemaPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SchemaID, nil @@ -27008,22 +36087,31 @@ func (ec *executionContext) _RemoveDatasetSchemaPayload_schemaId(ctx context.Con return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_RemoveDatasetSchemaPayload_schemaId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveDatasetSchemaPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _RemoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveInfoboxFieldPayload_infoboxFieldId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "RemoveInfoboxFieldPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.InfoboxFieldID, nil @@ -27043,22 +36131,31 @@ func (ec *executionContext) _RemoveInfoboxFieldPayload_infoboxFieldId(ctx contex return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_RemoveInfoboxFieldPayload_infoboxFieldId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "RemoveInfoboxFieldPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _RemoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveInfoboxFieldPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveInfoboxFieldPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Layer, nil @@ -27078,22 +36175,31 @@ func (ec *executionContext) _RemoveInfoboxFieldPayload_layer(ctx context.Context return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_RemoveInfoboxFieldPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveInfoboxFieldPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _RemoveInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveInfoboxPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveInfoboxPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "RemoveInfoboxPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Layer, nil @@ -27113,22 +36219,31 @@ func (ec *executionContext) _RemoveInfoboxPayload_layer(ctx context.Context, fie return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_RemoveInfoboxPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveInfoboxPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _RemoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveLayerPayload_layerId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "RemoveLayerPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LayerID, nil @@ -27148,22 +36263,31 @@ func (ec *executionContext) _RemoveLayerPayload_layerId(ctx context.Context, fie return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveLayerPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_RemoveLayerPayload_layerId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "RemoveLayerPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _RemoveLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveLayerPayload_parentLayer(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ParentLayer, nil @@ -27183,22 +36307,75 @@ func (ec *executionContext) _RemoveLayerPayload_parentLayer(ctx context.Context, return ec.marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_RemoveLayerPayload_parentLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _RemoveMemberFromTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveMemberFromTeamPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveMemberFromTeamPayload_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "RemoveMemberFromTeamPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Team, nil @@ -27218,22 +36395,45 @@ func (ec *executionContext) _RemoveMemberFromTeamPayload_team(ctx context.Contex return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_RemoveMemberFromTeamPayload_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveMemberFromTeamPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _RemoveTagPayload_tagId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveTagPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveTagPayload_tagId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "RemoveTagPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TagID, nil @@ -27253,22 +36453,31 @@ func (ec *executionContext) _RemoveTagPayload_tagId(ctx context.Context, field g return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _RemoveTagPayload_updatedLayers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveTagPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_RemoveTagPayload_tagId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "RemoveTagPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _RemoveTagPayload_updatedLayers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveTagPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveTagPayload_updatedLayers(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.UpdatedLayers, nil @@ -27288,22 +36497,31 @@ func (ec *executionContext) _RemoveTagPayload_updatedLayers(ctx context.Context, return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_RemoveTagPayload_updatedLayers(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveTagPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _RemoveWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveWidgetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveWidgetPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "RemoveWidgetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Scene, nil @@ -27323,22 +36541,71 @@ func (ec *executionContext) _RemoveWidgetPayload_scene(ctx context.Context, fiel return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_RemoveWidgetPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveWidgetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _RemoveWidgetPayload_widgetId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.RemoveWidgetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_RemoveWidgetPayload_widgetId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "RemoveWidgetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.WidgetID, nil @@ -27358,22 +36625,31 @@ func (ec *executionContext) _RemoveWidgetPayload_widgetId(ctx context.Context, f return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_RemoveWidgetPayload_widgetId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "RemoveWidgetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Scene_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Scene", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -27393,22 +36669,31 @@ func (ec *executionContext) _Scene_id(ctx context.Context, field graphql.Collect return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_projectId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Scene_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Scene", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Scene_projectId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_projectId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ProjectID, nil @@ -27428,22 +36713,31 @@ func (ec *executionContext) _Scene_projectId(ctx context.Context, field graphql. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Scene_projectId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Scene", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Scene_teamId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_teamId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TeamID, nil @@ -27463,22 +36757,31 @@ func (ec *executionContext) _Scene_teamId(ctx context.Context, field graphql.Col return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Scene_teamId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Scene", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Scene_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_propertyId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PropertyID, nil @@ -27498,22 +36801,31 @@ func (ec *executionContext) _Scene_propertyId(ctx context.Context, field graphql return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Scene_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Scene", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Scene_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_createdAt(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.CreatedAt, nil @@ -27533,22 +36845,31 @@ func (ec *executionContext) _Scene_createdAt(ctx context.Context, field graphql. return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_updatedAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Scene_createdAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Scene", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Scene_updatedAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_updatedAt(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.UpdatedAt, nil @@ -27568,22 +36889,31 @@ func (ec *executionContext) _Scene_updatedAt(ctx context.Context, field graphql. return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_rootLayerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Scene_updatedAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Scene", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type DateTime does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Scene_rootLayerId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_rootLayerId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.RootLayerID, nil @@ -27603,22 +36933,31 @@ func (ec *executionContext) _Scene_rootLayerId(ctx context.Context, field graphq return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_widgets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Scene_rootLayerId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Scene", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Scene_widgets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_widgets(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Widgets, nil @@ -27638,22 +36977,51 @@ func (ec *executionContext) _Scene_widgets(ctx context.Context, field graphql.Co return ec.marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidgetแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_plugins(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Scene_widgets(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Scene", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_SceneWidget_id(ctx, field) + case "pluginId": + return ec.fieldContext_SceneWidget_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_SceneWidget_extensionId(ctx, field) + case "propertyId": + return ec.fieldContext_SceneWidget_propertyId(ctx, field) + case "enabled": + return ec.fieldContext_SceneWidget_enabled(ctx, field) + case "extended": + return ec.fieldContext_SceneWidget_extended(ctx, field) + case "plugin": + return ec.fieldContext_SceneWidget_plugin(ctx, field) + case "extension": + return ec.fieldContext_SceneWidget_extension(ctx, field) + case "property": + return ec.fieldContext_SceneWidget_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type SceneWidget", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _Scene_plugins(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_plugins(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Plugins, nil @@ -27673,22 +37041,41 @@ func (ec *executionContext) _Scene_plugins(ctx context.Context, field graphql.Co return ec.marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePluginแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_widgetAlignSystem(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Scene_plugins(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Scene", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _Scene_widgetAlignSystem(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.WidgetAlignSystem, nil @@ -27705,22 +37092,37 @@ func (ec *executionContext) _Scene_widgetAlignSystem(ctx context.Context, field return ec.marshalOWidgetAlignSystem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAlignSystem(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Scene_widgetAlignSystem(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Scene", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "inner": + return ec.fieldContext_WidgetAlignSystem_inner(ctx, field) + case "outer": + return ec.fieldContext_WidgetAlignSystem_outer(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetAlignSystem", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _Scene_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DynamicDatasetSchemas, nil @@ -27740,22 +37142,53 @@ func (ec *executionContext) _Scene_dynamicDatasetSchemas(ctx context.Context, fi return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Scene_dynamicDatasetSchemas(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Scene_project(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_project(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Scene", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Scene().Project(rctx, obj) @@ -27772,22 +37205,75 @@ func (ec *executionContext) _Scene_project(ctx context.Context, field graphql.Co return ec.marshalOProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Scene_project(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Project_id(ctx, field) + case "isArchived": + return ec.fieldContext_Project_isArchived(ctx, field) + case "isBasicAuthActive": + return ec.fieldContext_Project_isBasicAuthActive(ctx, field) + case "basicAuthUsername": + return ec.fieldContext_Project_basicAuthUsername(ctx, field) + case "basicAuthPassword": + return ec.fieldContext_Project_basicAuthPassword(ctx, field) + case "createdAt": + return ec.fieldContext_Project_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Project_updatedAt(ctx, field) + case "publishedAt": + return ec.fieldContext_Project_publishedAt(ctx, field) + case "name": + return ec.fieldContext_Project_name(ctx, field) + case "description": + return ec.fieldContext_Project_description(ctx, field) + case "alias": + return ec.fieldContext_Project_alias(ctx, field) + case "publicTitle": + return ec.fieldContext_Project_publicTitle(ctx, field) + case "publicDescription": + return ec.fieldContext_Project_publicDescription(ctx, field) + case "publicImage": + return ec.fieldContext_Project_publicImage(ctx, field) + case "publicNoIndex": + return ec.fieldContext_Project_publicNoIndex(ctx, field) + case "imageUrl": + return ec.fieldContext_Project_imageUrl(ctx, field) + case "teamId": + return ec.fieldContext_Project_teamId(ctx, field) + case "visualizer": + return ec.fieldContext_Project_visualizer(ctx, field) + case "publishmentStatus": + return ec.fieldContext_Project_publishmentStatus(ctx, field) + case "team": + return ec.fieldContext_Project_team(ctx, field) + case "scene": + return ec.fieldContext_Project_scene(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Project", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Scene_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Scene", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Scene().Team(rctx, obj) @@ -27804,22 +37290,45 @@ func (ec *executionContext) _Scene_team(ctx context.Context, field graphql.Colle return ec.marshalOTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Scene_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Scene_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Scene", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Scene().Property(rctx, obj) @@ -27836,22 +37345,45 @@ func (ec *executionContext) _Scene_property(ctx context.Context, field graphql.C return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Scene_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Scene_rootLayer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_rootLayer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Scene", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Scene().RootLayer(rctx, obj) @@ -27868,32 +37400,78 @@ func (ec *executionContext) _Scene_rootLayer(ctx context.Context, field graphql. return ec.marshalOLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_datasetSchemas(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Scene_rootLayer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Scene", Field: field, - Args: nil, IsMethod: true, IsResolver: true, - } + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_LayerGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_LayerGroup_sceneId(ctx, field) + case "name": + return ec.fieldContext_LayerGroup_name(ctx, field) + case "isVisible": + return ec.fieldContext_LayerGroup_isVisible(ctx, field) + case "propertyId": + return ec.fieldContext_LayerGroup_propertyId(ctx, field) + case "pluginId": + return ec.fieldContext_LayerGroup_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_LayerGroup_extensionId(ctx, field) + case "infobox": + return ec.fieldContext_LayerGroup_infobox(ctx, field) + case "parentId": + return ec.fieldContext_LayerGroup_parentId(ctx, field) + case "linkedDatasetSchemaId": + return ec.fieldContext_LayerGroup_linkedDatasetSchemaId(ctx, field) + case "root": + return ec.fieldContext_LayerGroup_root(ctx, field) + case "layerIds": + return ec.fieldContext_LayerGroup_layerIds(ctx, field) + case "tags": + return ec.fieldContext_LayerGroup_tags(ctx, field) + case "parent": + return ec.fieldContext_LayerGroup_parent(ctx, field) + case "property": + return ec.fieldContext_LayerGroup_property(ctx, field) + case "plugin": + return ec.fieldContext_LayerGroup_plugin(ctx, field) + case "extension": + return ec.fieldContext_LayerGroup_extension(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_LayerGroup_linkedDatasetSchema(ctx, field) + case "layers": + return ec.fieldContext_LayerGroup_layers(ctx, field) + case "scene": + return ec.fieldContext_LayerGroup_scene(ctx, field) + case "scenePlugin": + return ec.fieldContext_LayerGroup_scenePlugin(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type LayerGroup", field.Name) + }, + } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Scene_datasetSchemas_args(ctx, rawArgs) +func (ec *executionContext) _Scene_datasetSchemas(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_datasetSchemas(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Scene().DatasetSchemas(rctx, obj, args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + return ec.resolvers.Scene().DatasetSchemas(rctx, obj, fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) }) if err != nil { ec.Error(ctx, err) @@ -27910,22 +37488,52 @@ func (ec *executionContext) _Scene_datasetSchemas(ctx context.Context, field gra return ec.marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaConnection(ctx, field.Selections, res) } -func (ec *executionContext) _Scene_tagIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { +func (ec *executionContext) fieldContext_Scene_datasetSchemas(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_DatasetSchemaConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_DatasetSchemaConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_DatasetSchemaConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchemaConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchemaConnection", field.Name) + }, + } defer func() { if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null + err = ec.Recover(ctx, r) + ec.Error(ctx, err) } }() - fc := &graphql.FieldContext{ - Object: "Scene", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Scene_datasetSchemas_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) _Scene_tagIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_tagIds(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TagIds, nil @@ -27945,22 +37553,31 @@ func (ec *executionContext) _Scene_tagIds(ctx context.Context, field graphql.Col return ec.marshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Scene_tagIds(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _Scene_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_tags(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Scene", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.Scene().Tags(rctx, obj) @@ -27980,22 +37597,31 @@ func (ec *executionContext) _Scene_tags(ctx context.Context, field graphql.Colle return ec.marshalNTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Scene_tags(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _Scene_clusters(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Scene) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Scene_clusters(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Scene", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Clusters, nil @@ -28015,22 +37641,41 @@ func (ec *executionContext) _Scene_clusters(ctx context.Context, field graphql.C return ec.marshalNCluster2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšClusterแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Scene_clusters(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Scene", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Cluster_id(ctx, field) + case "name": + return ec.fieldContext_Cluster_name(ctx, field) + case "propertyId": + return ec.fieldContext_Cluster_propertyId(ctx, field) + case "property": + return ec.fieldContext_Cluster_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Cluster", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _ScenePlugin_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ScenePlugin_pluginId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "ScenePlugin", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PluginID, nil @@ -28050,22 +37695,31 @@ func (ec *executionContext) _ScenePlugin_pluginId(ctx context.Context, field gra return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _ScenePlugin_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_ScenePlugin_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "ScenePlugin", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _ScenePlugin_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ScenePlugin_propertyId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PropertyID, nil @@ -28082,22 +37736,31 @@ func (ec *executionContext) _ScenePlugin_propertyId(ctx context.Context, field g return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_ScenePlugin_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ScenePlugin", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _ScenePlugin_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ScenePlugin_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "ScenePlugin", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.ScenePlugin().Plugin(rctx, obj) @@ -28114,22 +37777,65 @@ func (ec *executionContext) _ScenePlugin_plugin(ctx context.Context, field graph return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_ScenePlugin_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ScenePlugin", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _ScenePlugin_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.ScenePlugin) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_ScenePlugin_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "ScenePlugin", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.ScenePlugin().Property(rctx, obj) @@ -28146,22 +37852,45 @@ func (ec *executionContext) _ScenePlugin_property(ctx context.Context, field gra return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_ScenePlugin_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "ScenePlugin", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _SceneWidget_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "SceneWidget", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -28181,22 +37910,31 @@ func (ec *executionContext) _SceneWidget_id(ctx context.Context, field graphql.C return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_SceneWidget_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "SceneWidget", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _SceneWidget_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_pluginId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PluginID, nil @@ -28216,22 +37954,31 @@ func (ec *executionContext) _SceneWidget_pluginId(ctx context.Context, field gra return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_SceneWidget_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "SceneWidget", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _SceneWidget_extensionId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_extensionId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ExtensionID, nil @@ -28251,22 +37998,31 @@ func (ec *executionContext) _SceneWidget_extensionId(ctx context.Context, field return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_SceneWidget_extensionId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "SceneWidget", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _SceneWidget_propertyId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_propertyId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PropertyID, nil @@ -28286,22 +38042,31 @@ func (ec *executionContext) _SceneWidget_propertyId(ctx context.Context, field g return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_enabled(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_SceneWidget_propertyId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "SceneWidget", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _SceneWidget_enabled(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_enabled(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Enabled, nil @@ -28321,22 +38086,31 @@ func (ec *executionContext) _SceneWidget_enabled(ctx context.Context, field grap return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _SceneWidget_extended(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_SceneWidget_enabled(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "SceneWidget", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _SceneWidget_extended(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_extended(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Extended, nil @@ -28356,22 +38130,31 @@ func (ec *executionContext) _SceneWidget_extended(ctx context.Context, field gra return ec.marshalNBoolean2bool(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_SceneWidget_extended(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _SceneWidget_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "SceneWidget", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.SceneWidget().Plugin(rctx, obj) @@ -28388,22 +38171,65 @@ func (ec *executionContext) _SceneWidget_plugin(ctx context.Context, field graph return ec.marshalOPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_SceneWidget_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _SceneWidget_extension(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_extension(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "SceneWidget", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.SceneWidget().Extension(rctx, obj) @@ -28420,22 +38246,67 @@ func (ec *executionContext) _SceneWidget_extension(ctx context.Context, field gr return ec.marshalOPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_SceneWidget_extension(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "extensionId": + return ec.fieldContext_PluginExtension_extensionId(ctx, field) + case "pluginId": + return ec.fieldContext_PluginExtension_pluginId(ctx, field) + case "type": + return ec.fieldContext_PluginExtension_type(ctx, field) + case "name": + return ec.fieldContext_PluginExtension_name(ctx, field) + case "description": + return ec.fieldContext_PluginExtension_description(ctx, field) + case "icon": + return ec.fieldContext_PluginExtension_icon(ctx, field) + case "singleOnly": + return ec.fieldContext_PluginExtension_singleOnly(ctx, field) + case "widgetLayout": + return ec.fieldContext_PluginExtension_widgetLayout(ctx, field) + case "visualizer": + return ec.fieldContext_PluginExtension_visualizer(ctx, field) + case "propertySchemaId": + return ec.fieldContext_PluginExtension_propertySchemaId(ctx, field) + case "allTranslatedName": + return ec.fieldContext_PluginExtension_allTranslatedName(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_PluginExtension_allTranslatedDescription(ctx, field) + case "plugin": + return ec.fieldContext_PluginExtension_plugin(ctx, field) + case "sceneWidget": + return ec.fieldContext_PluginExtension_sceneWidget(ctx, field) + case "propertySchema": + return ec.fieldContext_PluginExtension_propertySchema(ctx, field) + case "translatedName": + return ec.fieldContext_PluginExtension_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_PluginExtension_translatedDescription(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type PluginExtension", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _SceneWidget_property(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SceneWidget) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SceneWidget_property(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "SceneWidget", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.SceneWidget().Property(rctx, obj) @@ -28452,22 +38323,45 @@ func (ec *executionContext) _SceneWidget_property(ctx context.Context, field gra return ec.marshalOProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_SceneWidget_property(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SceneWidget", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Property_id(ctx, field) + case "schemaId": + return ec.fieldContext_Property_schemaId(ctx, field) + case "items": + return ec.fieldContext_Property_items(ctx, field) + case "schema": + return ec.fieldContext_Property_schema(ctx, field) + case "layer": + return ec.fieldContext_Property_layer(ctx, field) + case "merged": + return ec.fieldContext_Property_merged(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Property", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _SignupPayload_user(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SignupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SignupPayload_user(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "SignupPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.User, nil @@ -28487,22 +38381,39 @@ func (ec *executionContext) _SignupPayload_user(ctx context.Context, field graph return ec.marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) } -func (ec *executionContext) _SignupPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SignupPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_SignupPayload_user(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "SignupPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_User_id(ctx, field) + case "name": + return ec.fieldContext_User_name(ctx, field) + case "email": + return ec.fieldContext_User_email(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type User", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _SignupPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SignupPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SignupPayload_team(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Team, nil @@ -28522,22 +38433,45 @@ func (ec *executionContext) _SignupPayload_team(ctx context.Context, field graph return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_SignupPayload_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SignupPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _SyncDatasetPayload_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SyncDatasetPayload_sceneId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "SyncDatasetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneID, nil @@ -28557,22 +38491,31 @@ func (ec *executionContext) _SyncDatasetPayload_sceneId(ctx context.Context, fie return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _SyncDatasetPayload_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_SyncDatasetPayload_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "SyncDatasetPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _SyncDatasetPayload_url(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SyncDatasetPayload_url(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.URL, nil @@ -28592,22 +38535,31 @@ func (ec *executionContext) _SyncDatasetPayload_url(ctx context.Context, field g return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _SyncDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_SyncDatasetPayload_url(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "SyncDatasetPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _SyncDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SyncDatasetPayload_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DatasetSchema, nil @@ -28627,22 +38579,53 @@ func (ec *executionContext) _SyncDatasetPayload_datasetSchema(ctx context.Contex return ec.marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_SyncDatasetPayload_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SyncDatasetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _SyncDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.SyncDatasetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_SyncDatasetPayload_dataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "SyncDatasetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Dataset, nil @@ -28662,22 +38645,45 @@ func (ec *executionContext) _SyncDatasetPayload_dataset(ctx context.Context, fie return ec.marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_SyncDatasetPayload_dataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "SyncDatasetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _TagGroup_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "TagGroup", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -28697,22 +38703,31 @@ func (ec *executionContext) _TagGroup_id(ctx context.Context, field graphql.Coll return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _TagGroup_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_TagGroup_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "TagGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _TagGroup_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_sceneId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneID, nil @@ -28732,22 +38747,31 @@ func (ec *executionContext) _TagGroup_sceneId(ctx context.Context, field graphql return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _TagGroup_label(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_TagGroup_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "TagGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _TagGroup_label(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_label(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Label, nil @@ -28767,22 +38791,31 @@ func (ec *executionContext) _TagGroup_label(ctx context.Context, field graphql.C return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _TagGroup_tagIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_TagGroup_label(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "TagGroup", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _TagGroup_tagIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_tagIds(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TagIds, nil @@ -28799,22 +38832,31 @@ func (ec *executionContext) _TagGroup_tagIds(ctx context.Context, field graphql. return ec.marshalOID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_TagGroup_tagIds(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _TagGroup_tags(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_tags(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "TagGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.TagGroup().Tags(rctx, obj) @@ -28834,22 +38876,57 @@ func (ec *executionContext) _TagGroup_tags(ctx context.Context, field graphql.Co return ec.marshalNTagItem2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItemแš„(ctx, field.Selections, res) } -func (ec *executionContext) _TagGroup_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_TagGroup_tags(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "TagGroup", Field: field, - Args: nil, IsMethod: true, IsResolver: true, - } + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagItem_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagItem_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagItem_label(ctx, field) + case "parentId": + return ec.fieldContext_TagItem_parentId(ctx, field) + case "linkedDatasetID": + return ec.fieldContext_TagItem_linkedDatasetID(ctx, field) + case "linkedDatasetSchemaID": + return ec.fieldContext_TagItem_linkedDatasetSchemaID(ctx, field) + case "linkedDatasetFieldID": + return ec.fieldContext_TagItem_linkedDatasetFieldID(ctx, field) + case "linkedDatasetSchema": + return ec.fieldContext_TagItem_linkedDatasetSchema(ctx, field) + case "linkedDataset": + return ec.fieldContext_TagItem_linkedDataset(ctx, field) + case "linkedDatasetField": + return ec.fieldContext_TagItem_linkedDatasetField(ctx, field) + case "parent": + return ec.fieldContext_TagItem_parent(ctx, field) + case "layers": + return ec.fieldContext_TagItem_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagItem", field.Name) + }, + } + return fc, nil +} +func (ec *executionContext) _TagGroup_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_scene(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.TagGroup().Scene(rctx, obj) @@ -28866,22 +38943,71 @@ func (ec *executionContext) _TagGroup_scene(ctx context.Context, field graphql.C return ec.marshalOScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_TagGroup_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _TagGroup_layers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagGroup) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagGroup_layers(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "TagGroup", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.TagGroup().Layers(rctx, obj) @@ -28901,22 +39027,31 @@ func (ec *executionContext) _TagGroup_layers(ctx context.Context, field graphql. return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_TagGroup_layers(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagGroup", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _TagItem_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "TagItem", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -28936,22 +39071,31 @@ func (ec *executionContext) _TagItem_id(ctx context.Context, field graphql.Colle return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _TagItem_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_TagItem_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "TagItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _TagItem_sceneId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_sceneId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneID, nil @@ -28971,22 +39115,31 @@ func (ec *executionContext) _TagItem_sceneId(ctx context.Context, field graphql. return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _TagItem_label(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_TagItem_sceneId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "TagItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _TagItem_label(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_label(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Label, nil @@ -29006,22 +39159,31 @@ func (ec *executionContext) _TagItem_label(ctx context.Context, field graphql.Co return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _TagItem_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_TagItem_label(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "TagItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _TagItem_parentId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_parentId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ParentID, nil @@ -29038,22 +39200,31 @@ func (ec *executionContext) _TagItem_parentId(ctx context.Context, field graphql return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _TagItem_linkedDatasetID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_TagItem_parentId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "TagItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _TagItem_linkedDatasetID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_linkedDatasetID(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LinkedDatasetID, nil @@ -29070,22 +39241,31 @@ func (ec *executionContext) _TagItem_linkedDatasetID(ctx context.Context, field return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _TagItem_linkedDatasetSchemaID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_TagItem_linkedDatasetID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "TagItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _TagItem_linkedDatasetSchemaID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_linkedDatasetSchemaID(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LinkedDatasetSchemaID, nil @@ -29102,22 +39282,31 @@ func (ec *executionContext) _TagItem_linkedDatasetSchemaID(ctx context.Context, return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _TagItem_linkedDatasetFieldID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_TagItem_linkedDatasetSchemaID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "TagItem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _TagItem_linkedDatasetFieldID(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_linkedDatasetFieldID(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.LinkedDatasetFieldID, nil @@ -29134,22 +39323,31 @@ func (ec *executionContext) _TagItem_linkedDatasetFieldID(ctx context.Context, f return ec.marshalOID2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_TagItem_linkedDatasetFieldID(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _TagItem_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_linkedDatasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "TagItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.TagItem().LinkedDatasetSchema(rctx, obj) @@ -29166,22 +39364,53 @@ func (ec *executionContext) _TagItem_linkedDatasetSchema(ctx context.Context, fi return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_TagItem_linkedDatasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _TagItem_linkedDataset(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_linkedDataset(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "TagItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.TagItem().LinkedDataset(rctx, obj) @@ -29198,22 +39427,45 @@ func (ec *executionContext) _TagItem_linkedDataset(ctx context.Context, field gr return ec.marshalODataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_TagItem_linkedDataset(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Dataset_id(ctx, field) + case "source": + return ec.fieldContext_Dataset_source(ctx, field) + case "schemaId": + return ec.fieldContext_Dataset_schemaId(ctx, field) + case "fields": + return ec.fieldContext_Dataset_fields(ctx, field) + case "schema": + return ec.fieldContext_Dataset_schema(ctx, field) + case "name": + return ec.fieldContext_Dataset_name(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Dataset", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _TagItem_linkedDatasetField(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_linkedDatasetField(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "TagItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.TagItem().LinkedDatasetField(rctx, obj) @@ -29230,22 +39482,49 @@ func (ec *executionContext) _TagItem_linkedDatasetField(ctx context.Context, fie return ec.marshalODatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_TagItem_linkedDatasetField(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "fieldId": + return ec.fieldContext_DatasetField_fieldId(ctx, field) + case "schemaId": + return ec.fieldContext_DatasetField_schemaId(ctx, field) + case "source": + return ec.fieldContext_DatasetField_source(ctx, field) + case "type": + return ec.fieldContext_DatasetField_type(ctx, field) + case "value": + return ec.fieldContext_DatasetField_value(ctx, field) + case "schema": + return ec.fieldContext_DatasetField_schema(ctx, field) + case "field": + return ec.fieldContext_DatasetField_field(ctx, field) + case "valueRef": + return ec.fieldContext_DatasetField_valueRef(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetField", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _TagItem_parent(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_parent(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "TagItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.TagItem().Parent(rctx, obj) @@ -29262,22 +39541,47 @@ func (ec *executionContext) _TagItem_parent(ctx context.Context, field graphql.C return ec.marshalOTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_TagItem_parent(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_TagGroup_id(ctx, field) + case "sceneId": + return ec.fieldContext_TagGroup_sceneId(ctx, field) + case "label": + return ec.fieldContext_TagGroup_label(ctx, field) + case "tagIds": + return ec.fieldContext_TagGroup_tagIds(ctx, field) + case "tags": + return ec.fieldContext_TagGroup_tags(ctx, field) + case "scene": + return ec.fieldContext_TagGroup_scene(ctx, field) + case "layers": + return ec.fieldContext_TagGroup_layers(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TagGroup", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _TagItem_layers(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TagItem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TagItem_layers(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "TagItem", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.TagItem().Layers(rctx, obj) @@ -29297,22 +39601,31 @@ func (ec *executionContext) _TagItem_layers(ctx context.Context, field graphql.C return ec.marshalNLayer2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_TagItem_layers(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TagItem", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _Team_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Team_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Team", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -29332,22 +39645,31 @@ func (ec *executionContext) _Team_id(ctx context.Context, field graphql.Collecte return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _Team_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Team_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Team", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Team_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Team_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -29367,22 +39689,31 @@ func (ec *executionContext) _Team_name(ctx context.Context, field graphql.Collec return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _Team_members(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Team_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Team", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Team_members(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Team_members(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Members, nil @@ -29402,22 +39733,39 @@ func (ec *executionContext) _Team_members(ctx context.Context, field graphql.Col return ec.marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamMemberแš„(ctx, field.Selections, res) } -func (ec *executionContext) _Team_personal(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Team_members(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Team", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "userId": + return ec.fieldContext_TeamMember_userId(ctx, field) + case "role": + return ec.fieldContext_TeamMember_role(ctx, field) + case "user": + return ec.fieldContext_TeamMember_user(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type TeamMember", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _Team_personal(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Team_personal(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Personal, nil @@ -29437,32 +39785,34 @@ func (ec *executionContext) _Team_personal(ctx context.Context, field graphql.Co return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _Team_assets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Team_personal(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Team", Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Team_assets_args(ctx, rawArgs) +func (ec *executionContext) _Team_assets(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Team_assets(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Team().Assets(rctx, obj, args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + return ec.resolvers.Team().Assets(rctx, obj, fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) }) if err != nil { ec.Error(ctx, err) @@ -29479,32 +39829,55 @@ func (ec *executionContext) _Team_assets(ctx context.Context, field graphql.Coll return ec.marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetConnection(ctx, field.Selections, res) } -func (ec *executionContext) _Team_projects(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Team_assets(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Team", Field: field, - Args: nil, IsMethod: true, IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_AssetConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_AssetConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_AssetConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_AssetConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type AssetConnection", field.Name) + }, } - + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field_Team_projects_args(ctx, rawArgs) - if err != nil { + if fc.Args, err = ec.field_Team_assets_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { ec.Error(ctx, err) + return + } + return fc, nil +} + +func (ec *executionContext) _Team_projects(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Team) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Team_projects(ctx, field) + if err != nil { return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return ec.resolvers.Team().Projects(rctx, obj, args["includeArchived"].(*bool), args["first"].(*int), args["last"].(*int), args["after"].(*usecase.Cursor), args["before"].(*usecase.Cursor)) + return ec.resolvers.Team().Projects(rctx, obj, fc.Args["includeArchived"].(*bool), fc.Args["first"].(*int), fc.Args["last"].(*int), fc.Args["after"].(*usecase.Cursor), fc.Args["before"].(*usecase.Cursor)) }) if err != nil { ec.Error(ctx, err) @@ -29521,22 +39894,52 @@ func (ec *executionContext) _Team_projects(ctx context.Context, field graphql.Co return ec.marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx, field.Selections, res) } -func (ec *executionContext) _TeamMember_userId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TeamMember) (ret graphql.Marshaler) { +func (ec *executionContext) fieldContext_Team_projects(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Team", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "edges": + return ec.fieldContext_ProjectConnection_edges(ctx, field) + case "nodes": + return ec.fieldContext_ProjectConnection_nodes(ctx, field) + case "pageInfo": + return ec.fieldContext_ProjectConnection_pageInfo(ctx, field) + case "totalCount": + return ec.fieldContext_ProjectConnection_totalCount(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ProjectConnection", field.Name) + }, + } defer func() { if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null + err = ec.Recover(ctx, r) + ec.Error(ctx, err) } }() - fc := &graphql.FieldContext{ - Object: "TeamMember", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Team_projects_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) _TeamMember_userId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TeamMember) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TeamMember_userId(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.UserID, nil @@ -29556,22 +39959,31 @@ func (ec *executionContext) _TeamMember_userId(ctx context.Context, field graphq return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _TeamMember_role(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TeamMember) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_TeamMember_userId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "TeamMember", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _TeamMember_role(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TeamMember) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TeamMember_role(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Role, nil @@ -29591,22 +40003,31 @@ func (ec *executionContext) _TeamMember_role(ctx context.Context, field graphql. return ec.marshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšRole(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_TeamMember_role(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TeamMember", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Role does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _TeamMember_user(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.TeamMember) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_TeamMember_user(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "TeamMember", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: true, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return ec.resolvers.TeamMember().User(rctx, obj) @@ -29623,22 +40044,39 @@ func (ec *executionContext) _TeamMember_user(ctx context.Context, field graphql. return ec.marshalOUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_TeamMember_user(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "TeamMember", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_User_id(ctx, field) + case "name": + return ec.fieldContext_User_name(ctx, field) + case "email": + return ec.fieldContext_User_email(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type User", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _Typography_fontFamily(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_fontFamily(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Typography", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.FontFamily, nil @@ -29655,22 +40093,31 @@ func (ec *executionContext) _Typography_fontFamily(ctx context.Context, field gr return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_fontWeight(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Typography_fontFamily(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Typography", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Typography_fontWeight(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_fontWeight(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.FontWeight, nil @@ -29687,22 +40134,31 @@ func (ec *executionContext) _Typography_fontWeight(ctx context.Context, field gr return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_fontSize(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Typography_fontWeight(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Typography", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Typography_fontSize(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_fontSize(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.FontSize, nil @@ -29719,22 +40175,31 @@ func (ec *executionContext) _Typography_fontSize(ctx context.Context, field grap return ec.marshalOInt2แš–int(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_color(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Typography_fontSize(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Typography", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Typography_color(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_color(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Color, nil @@ -29751,22 +40216,31 @@ func (ec *executionContext) _Typography_color(ctx context.Context, field graphql return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_textAlign(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Typography_color(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Typography", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Typography_textAlign(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_textAlign(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.TextAlign, nil @@ -29778,27 +40252,36 @@ func (ec *executionContext) _Typography_textAlign(ctx context.Context, field gra if resTmp == nil { return graphql.Null } - res := resTmp.(*gqlmodel.TextAlign) - fc.Result = res - return ec.marshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTextAlign(ctx, field.Selections, res) + res := resTmp.(*gqlmodel.TextAlign) + fc.Result = res + return ec.marshalOTextAlign2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTextAlign(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Typography_textAlign(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Typography", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type TextAlign does not have child fields") + }, + } + return fc, nil } func (ec *executionContext) _Typography_bold(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_bold(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "Typography", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Bold, nil @@ -29815,22 +40298,31 @@ func (ec *executionContext) _Typography_bold(ctx context.Context, field graphql. return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_italic(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Typography_bold(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Typography", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Typography_italic(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_italic(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Italic, nil @@ -29847,22 +40339,31 @@ func (ec *executionContext) _Typography_italic(ctx context.Context, field graphq return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) } -func (ec *executionContext) _Typography_underline(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_Typography_italic(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "Typography", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _Typography_underline(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Typography) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Typography_underline(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Underline, nil @@ -29879,22 +40380,31 @@ func (ec *executionContext) _Typography_underline(ctx context.Context, field gra return ec.marshalOBoolean2แš–bool(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_Typography_underline(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Typography", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _UninstallPluginPayload_pluginId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UninstallPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UninstallPluginPayload_pluginId(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UninstallPluginPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PluginID, nil @@ -29914,22 +40424,31 @@ func (ec *executionContext) _UninstallPluginPayload_pluginId(ctx context.Context return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UninstallPluginPayload) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_UninstallPluginPayload_pluginId(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "UninstallPluginPayload", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UninstallPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UninstallPluginPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Scene, nil @@ -29949,22 +40468,71 @@ func (ec *executionContext) _UninstallPluginPayload_scene(ctx context.Context, f return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UninstallPluginPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UninstallPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UpdateClusterPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateClusterPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateClusterPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpdateClusterPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Scene, nil @@ -29984,22 +40552,71 @@ func (ec *executionContext) _UpdateClusterPayload_scene(ctx context.Context, fie return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpdateClusterPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateClusterPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UpdateClusterPayload_cluster(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateClusterPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateClusterPayload_cluster(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpdateClusterPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Cluster, nil @@ -30019,22 +40636,41 @@ func (ec *executionContext) _UpdateClusterPayload_cluster(ctx context.Context, f return ec.marshalNCluster2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCluster(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpdateClusterPayload_cluster(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateClusterPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Cluster_id(ctx, field) + case "name": + return ec.fieldContext_Cluster_name(ctx, field) + case "propertyId": + return ec.fieldContext_Cluster_propertyId(ctx, field) + case "property": + return ec.fieldContext_Cluster_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Cluster", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UpdateDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateDatasetSchemaPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateDatasetSchemaPayload_datasetSchema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpdateDatasetSchemaPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DatasetSchema, nil @@ -30051,22 +40687,53 @@ func (ec *executionContext) _UpdateDatasetSchemaPayload_datasetSchema(ctx contex return ec.marshalODatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpdateDatasetSchemaPayload_datasetSchema(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateDatasetSchemaPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_DatasetSchema_id(ctx, field) + case "source": + return ec.fieldContext_DatasetSchema_source(ctx, field) + case "name": + return ec.fieldContext_DatasetSchema_name(ctx, field) + case "sceneId": + return ec.fieldContext_DatasetSchema_sceneId(ctx, field) + case "fields": + return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "representativeFieldId": + return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) + case "dynamic": + return ec.fieldContext_DatasetSchema_dynamic(ctx, field) + case "datasets": + return ec.fieldContext_DatasetSchema_datasets(ctx, field) + case "scene": + return ec.fieldContext_DatasetSchema_scene(ctx, field) + case "representativeField": + return ec.fieldContext_DatasetSchema_representativeField(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type DatasetSchema", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UpdateLayerPayload_layer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateLayerPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateLayerPayload_layer(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpdateLayerPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Layer, nil @@ -30086,22 +40753,31 @@ func (ec *executionContext) _UpdateLayerPayload_layer(ctx context.Context, field return ec.marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpdateLayerPayload_layer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateLayerPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _UpdateMePayload_me(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateMePayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateMePayload_me(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpdateMePayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Me, nil @@ -30121,22 +40797,51 @@ func (ec *executionContext) _UpdateMePayload_me(ctx context.Context, field graph return ec.marshalNMe2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMe(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpdateMePayload_me(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateMePayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Me_id(ctx, field) + case "name": + return ec.fieldContext_Me_name(ctx, field) + case "email": + return ec.fieldContext_Me_email(ctx, field) + case "lang": + return ec.fieldContext_Me_lang(ctx, field) + case "theme": + return ec.fieldContext_Me_theme(ctx, field) + case "myTeamId": + return ec.fieldContext_Me_myTeamId(ctx, field) + case "auths": + return ec.fieldContext_Me_auths(ctx, field) + case "teams": + return ec.fieldContext_Me_teams(ctx, field) + case "myTeam": + return ec.fieldContext_Me_myTeam(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Me", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UpdateMemberOfTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateMemberOfTeamPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateMemberOfTeamPayload_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpdateMemberOfTeamPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Team, nil @@ -30156,22 +40861,45 @@ func (ec *executionContext) _UpdateMemberOfTeamPayload_team(ctx context.Context, return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpdateMemberOfTeamPayload_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateMemberOfTeamPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UpdateTagPayload_tag(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateTagPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateTagPayload_tag(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpdateTagPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Tag, nil @@ -30191,22 +40919,31 @@ func (ec *executionContext) _UpdateTagPayload_tag(ctx context.Context, field gra return ec.marshalNTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpdateTagPayload_tag(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateTagPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("FieldContext.Child cannot be called on type INTERFACE") + }, + } + return fc, nil +} + func (ec *executionContext) _UpdateTeamPayload_team(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateTeamPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateTeamPayload_team(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpdateTeamPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Team, nil @@ -30226,22 +40963,45 @@ func (ec *executionContext) _UpdateTeamPayload_team(ctx context.Context, field g return ec.marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpdateTeamPayload_team(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateTeamPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Team_id(ctx, field) + case "name": + return ec.fieldContext_Team_name(ctx, field) + case "members": + return ec.fieldContext_Team_members(ctx, field) + case "personal": + return ec.fieldContext_Team_personal(ctx, field) + case "assets": + return ec.fieldContext_Team_assets(ctx, field) + case "projects": + return ec.fieldContext_Team_projects(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Team", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UpdateWidgetAlignSystemPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateWidgetAlignSystemPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateWidgetAlignSystemPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpdateWidgetAlignSystemPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Scene, nil @@ -30261,22 +41021,71 @@ func (ec *executionContext) _UpdateWidgetAlignSystemPayload_scene(ctx context.Co return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpdateWidgetAlignSystemPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateWidgetAlignSystemPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UpdateWidgetPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateWidgetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateWidgetPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpdateWidgetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Scene, nil @@ -30296,22 +41105,71 @@ func (ec *executionContext) _UpdateWidgetPayload_scene(ctx context.Context, fiel return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpdateWidgetPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateWidgetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UpdateWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpdateWidgetPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpdateWidgetPayload_sceneWidget(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpdateWidgetPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SceneWidget, nil @@ -30331,22 +41189,51 @@ func (ec *executionContext) _UpdateWidgetPayload_sceneWidget(ctx context.Context return ec.marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpdateWidgetPayload_sceneWidget(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpdateWidgetPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_SceneWidget_id(ctx, field) + case "pluginId": + return ec.fieldContext_SceneWidget_pluginId(ctx, field) + case "extensionId": + return ec.fieldContext_SceneWidget_extensionId(ctx, field) + case "propertyId": + return ec.fieldContext_SceneWidget_propertyId(ctx, field) + case "enabled": + return ec.fieldContext_SceneWidget_enabled(ctx, field) + case "extended": + return ec.fieldContext_SceneWidget_extended(ctx, field) + case "plugin": + return ec.fieldContext_SceneWidget_plugin(ctx, field) + case "extension": + return ec.fieldContext_SceneWidget_extension(ctx, field) + case "property": + return ec.fieldContext_SceneWidget_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type SceneWidget", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UpgradePluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpgradePluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpgradePluginPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpgradePluginPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Scene, nil @@ -30366,22 +41253,71 @@ func (ec *executionContext) _UpgradePluginPayload_scene(ctx context.Context, fie return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpgradePluginPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpgradePluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UpgradePluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UpgradePluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UpgradePluginPayload_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UpgradePluginPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ScenePlugin, nil @@ -30401,22 +41337,41 @@ func (ec *executionContext) _UpgradePluginPayload_scenePlugin(ctx context.Contex return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UpgradePluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UpgradePluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UploadPluginPayload_plugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UploadPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UploadPluginPayload_plugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UploadPluginPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Plugin, nil @@ -30436,22 +41391,65 @@ func (ec *executionContext) _UploadPluginPayload_plugin(ctx context.Context, fie return ec.marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UploadPluginPayload_plugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UploadPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Plugin_id(ctx, field) + case "sceneId": + return ec.fieldContext_Plugin_sceneId(ctx, field) + case "name": + return ec.fieldContext_Plugin_name(ctx, field) + case "version": + return ec.fieldContext_Plugin_version(ctx, field) + case "description": + return ec.fieldContext_Plugin_description(ctx, field) + case "author": + return ec.fieldContext_Plugin_author(ctx, field) + case "repositoryUrl": + return ec.fieldContext_Plugin_repositoryUrl(ctx, field) + case "propertySchemaId": + return ec.fieldContext_Plugin_propertySchemaId(ctx, field) + case "extensions": + return ec.fieldContext_Plugin_extensions(ctx, field) + case "scenePlugin": + return ec.fieldContext_Plugin_scenePlugin(ctx, field) + case "allTranslatedDescription": + return ec.fieldContext_Plugin_allTranslatedDescription(ctx, field) + case "allTranslatedName": + return ec.fieldContext_Plugin_allTranslatedName(ctx, field) + case "scene": + return ec.fieldContext_Plugin_scene(ctx, field) + case "translatedName": + return ec.fieldContext_Plugin_translatedName(ctx, field) + case "translatedDescription": + return ec.fieldContext_Plugin_translatedDescription(ctx, field) + case "propertySchema": + return ec.fieldContext_Plugin_propertySchema(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Plugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UploadPluginPayload_scene(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UploadPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UploadPluginPayload_scene(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UploadPluginPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Scene, nil @@ -30471,22 +41469,71 @@ func (ec *executionContext) _UploadPluginPayload_scene(ctx context.Context, fiel return ec.marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UploadPluginPayload_scene(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UploadPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "id": + return ec.fieldContext_Scene_id(ctx, field) + case "projectId": + return ec.fieldContext_Scene_projectId(ctx, field) + case "teamId": + return ec.fieldContext_Scene_teamId(ctx, field) + case "propertyId": + return ec.fieldContext_Scene_propertyId(ctx, field) + case "createdAt": + return ec.fieldContext_Scene_createdAt(ctx, field) + case "updatedAt": + return ec.fieldContext_Scene_updatedAt(ctx, field) + case "rootLayerId": + return ec.fieldContext_Scene_rootLayerId(ctx, field) + case "widgets": + return ec.fieldContext_Scene_widgets(ctx, field) + case "plugins": + return ec.fieldContext_Scene_plugins(ctx, field) + case "widgetAlignSystem": + return ec.fieldContext_Scene_widgetAlignSystem(ctx, field) + case "dynamicDatasetSchemas": + return ec.fieldContext_Scene_dynamicDatasetSchemas(ctx, field) + case "project": + return ec.fieldContext_Scene_project(ctx, field) + case "team": + return ec.fieldContext_Scene_team(ctx, field) + case "property": + return ec.fieldContext_Scene_property(ctx, field) + case "rootLayer": + return ec.fieldContext_Scene_rootLayer(ctx, field) + case "datasetSchemas": + return ec.fieldContext_Scene_datasetSchemas(ctx, field) + case "tagIds": + return ec.fieldContext_Scene_tagIds(ctx, field) + case "tags": + return ec.fieldContext_Scene_tags(ctx, field) + case "clusters": + return ec.fieldContext_Scene_clusters(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type Scene", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _UploadPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.UploadPluginPayload) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_UploadPluginPayload_scenePlugin(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "UploadPluginPayload", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ScenePlugin, nil @@ -30506,22 +41553,41 @@ func (ec *executionContext) _UploadPluginPayload_scenePlugin(ctx context.Context return ec.marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_UploadPluginPayload_scenePlugin(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "UploadPluginPayload", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "pluginId": + return ec.fieldContext_ScenePlugin_pluginId(ctx, field) + case "propertyId": + return ec.fieldContext_ScenePlugin_propertyId(ctx, field) + case "plugin": + return ec.fieldContext_ScenePlugin_plugin(ctx, field) + case "property": + return ec.fieldContext_ScenePlugin_property(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type ScenePlugin", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _User_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_User_id(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "User", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.ID, nil @@ -30541,22 +41607,31 @@ func (ec *executionContext) _User_id(ctx context.Context, field graphql.Collecte return ec.marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšID(ctx, field.Selections, res) } -func (ec *executionContext) _User_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_User_id(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "User", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _User_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_User_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -30576,22 +41651,31 @@ func (ec *executionContext) _User_name(ctx context.Context, field graphql.Collec return ec.marshalNString2string(ctx, field.Selections, res) } -func (ec *executionContext) _User_email(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_User_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "User", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _User_email(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.User) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_User_email(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Email, nil @@ -30611,22 +41695,31 @@ func (ec *executionContext) _User_email(ctx context.Context, field graphql.Colle return ec.marshalNString2string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_User_email(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "User", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _WidgetAlignSystem_inner(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetAlignSystem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetAlignSystem_inner(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "WidgetAlignSystem", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Inner, nil @@ -30643,22 +41736,39 @@ func (ec *executionContext) _WidgetAlignSystem_inner(ctx context.Context, field return ec.marshalOWidgetZone2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZone(ctx, field.Selections, res) } -func (ec *executionContext) _WidgetAlignSystem_outer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetAlignSystem) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_WidgetAlignSystem_inner(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "WidgetAlignSystem", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "left": + return ec.fieldContext_WidgetZone_left(ctx, field) + case "center": + return ec.fieldContext_WidgetZone_center(ctx, field) + case "right": + return ec.fieldContext_WidgetZone_right(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetZone", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _WidgetAlignSystem_outer(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetAlignSystem) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetAlignSystem_outer(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Outer, nil @@ -30675,22 +41785,39 @@ func (ec *executionContext) _WidgetAlignSystem_outer(ctx context.Context, field return ec.marshalOWidgetZone2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZone(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_WidgetAlignSystem_outer(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetAlignSystem", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "left": + return ec.fieldContext_WidgetZone_left(ctx, field) + case "center": + return ec.fieldContext_WidgetZone_center(ctx, field) + case "right": + return ec.fieldContext_WidgetZone_right(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetZone", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _WidgetArea_widgetIds(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetArea) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetArea_widgetIds(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "WidgetArea", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.WidgetIds, nil @@ -30710,22 +41837,31 @@ func (ec *executionContext) _WidgetArea_widgetIds(ctx context.Context, field gra return ec.marshalNID2แš•githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšIDแš„(ctx, field.Selections, res) } -func (ec *executionContext) _WidgetArea_align(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetArea) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_WidgetArea_widgetIds(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "WidgetArea", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type ID does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _WidgetArea_align(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetArea) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetArea_align(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Align, nil @@ -30740,27 +41876,36 @@ func (ec *executionContext) _WidgetArea_align(ctx context.Context, field graphql } return graphql.Null } - res := resTmp.(gqlmodel.WidgetAreaAlign) - fc.Result = res - return ec.marshalNWidgetAreaAlign2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx, field.Selections, res) + res := resTmp.(gqlmodel.WidgetAreaAlign) + fc.Result = res + return ec.marshalNWidgetAreaAlign2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaAlign(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_WidgetArea_align(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetArea", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type WidgetAreaAlign does not have child fields") + }, + } + return fc, nil } func (ec *executionContext) _WidgetExtendable_vertically(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetExtendable) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetExtendable_vertically(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "WidgetExtendable", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Vertically, nil @@ -30780,22 +41925,31 @@ func (ec *executionContext) _WidgetExtendable_vertically(ctx context.Context, fi return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _WidgetExtendable_horizontally(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetExtendable) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_WidgetExtendable_vertically(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "WidgetExtendable", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _WidgetExtendable_horizontally(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetExtendable) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetExtendable_horizontally(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Horizontally, nil @@ -30815,22 +41969,31 @@ func (ec *executionContext) _WidgetExtendable_horizontally(ctx context.Context, return ec.marshalNBoolean2bool(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_WidgetExtendable_horizontally(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetExtendable", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _WidgetLayout_extendable(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLayout_extendable(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "WidgetLayout", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Extendable, nil @@ -30850,22 +42013,37 @@ func (ec *executionContext) _WidgetLayout_extendable(ctx context.Context, field return ec.marshalNWidgetExtendable2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetExtendable(ctx, field.Selections, res) } -func (ec *executionContext) _WidgetLayout_extended(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_WidgetLayout_extendable(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "WidgetLayout", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "vertically": + return ec.fieldContext_WidgetExtendable_vertically(ctx, field) + case "horizontally": + return ec.fieldContext_WidgetExtendable_horizontally(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetExtendable", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _WidgetLayout_extended(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLayout_extended(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Extended, nil @@ -30885,22 +42063,31 @@ func (ec *executionContext) _WidgetLayout_extended(ctx context.Context, field gr return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _WidgetLayout_floating(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_WidgetLayout_extended(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "WidgetLayout", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _WidgetLayout_floating(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLayout_floating(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Floating, nil @@ -30920,22 +42107,31 @@ func (ec *executionContext) _WidgetLayout_floating(ctx context.Context, field gr return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) _WidgetLayout_defaultLocation(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_WidgetLayout_floating(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "WidgetLayout", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _WidgetLayout_defaultLocation(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLayout) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLayout_defaultLocation(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DefaultLocation, nil @@ -30952,22 +42148,39 @@ func (ec *executionContext) _WidgetLayout_defaultLocation(ctx context.Context, f return ec.marshalOWidgetLocation2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetLocation(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_WidgetLayout_defaultLocation(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetLayout", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "zone": + return ec.fieldContext_WidgetLocation_zone(ctx, field) + case "section": + return ec.fieldContext_WidgetLocation_section(ctx, field) + case "area": + return ec.fieldContext_WidgetLocation_area(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetLocation", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _WidgetLocation_zone(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLocation) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLocation_zone(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "WidgetLocation", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Zone, nil @@ -30987,22 +42200,31 @@ func (ec *executionContext) _WidgetLocation_zone(ctx context.Context, field grap return ec.marshalNWidgetZoneType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetZoneType(ctx, field.Selections, res) } -func (ec *executionContext) _WidgetLocation_section(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLocation) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_WidgetLocation_zone(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "WidgetLocation", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type WidgetZoneType does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _WidgetLocation_section(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLocation) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLocation_section(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Section, nil @@ -31022,22 +42244,31 @@ func (ec *executionContext) _WidgetLocation_section(ctx context.Context, field g return ec.marshalNWidgetSectionType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSectionType(ctx, field.Selections, res) } -func (ec *executionContext) _WidgetLocation_area(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLocation) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_WidgetLocation_section(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "WidgetLocation", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type WidgetSectionType does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) _WidgetLocation_area(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetLocation) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetLocation_area(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Area, nil @@ -31057,22 +42288,31 @@ func (ec *executionContext) _WidgetLocation_area(ctx context.Context, field grap return ec.marshalNWidgetAreaType2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetAreaType(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_WidgetLocation_area(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetLocation", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type WidgetAreaType does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _WidgetSection_top(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetSection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetSection_top(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "WidgetSection", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Top, nil @@ -31089,22 +42329,37 @@ func (ec *executionContext) _WidgetSection_top(ctx context.Context, field graphq return ec.marshalOWidgetArea2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetArea(ctx, field.Selections, res) } -func (ec *executionContext) _WidgetSection_middle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetSection) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_WidgetSection_top(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "WidgetSection", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "widgetIds": + return ec.fieldContext_WidgetArea_widgetIds(ctx, field) + case "align": + return ec.fieldContext_WidgetArea_align(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetArea", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _WidgetSection_middle(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetSection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetSection_middle(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Middle, nil @@ -31121,22 +42376,37 @@ func (ec *executionContext) _WidgetSection_middle(ctx context.Context, field gra return ec.marshalOWidgetArea2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetArea(ctx, field.Selections, res) } -func (ec *executionContext) _WidgetSection_bottom(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetSection) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_WidgetSection_middle(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "WidgetSection", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "widgetIds": + return ec.fieldContext_WidgetArea_widgetIds(ctx, field) + case "align": + return ec.fieldContext_WidgetArea_align(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetArea", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _WidgetSection_bottom(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetSection) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetSection_bottom(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Bottom, nil @@ -31153,22 +42423,37 @@ func (ec *executionContext) _WidgetSection_bottom(ctx context.Context, field gra return ec.marshalOWidgetArea2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetArea(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_WidgetSection_bottom(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetSection", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "widgetIds": + return ec.fieldContext_WidgetArea_widgetIds(ctx, field) + case "align": + return ec.fieldContext_WidgetArea_align(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetArea", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) _WidgetZone_left(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetZone) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetZone_left(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "WidgetZone", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Left, nil @@ -31185,22 +42470,39 @@ func (ec *executionContext) _WidgetZone_left(ctx context.Context, field graphql. return ec.marshalOWidgetSection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSection(ctx, field.Selections, res) } -func (ec *executionContext) _WidgetZone_center(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetZone) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_WidgetZone_left(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "WidgetZone", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "top": + return ec.fieldContext_WidgetSection_top(ctx, field) + case "middle": + return ec.fieldContext_WidgetSection_middle(ctx, field) + case "bottom": + return ec.fieldContext_WidgetSection_bottom(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetSection", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _WidgetZone_center(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetZone) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetZone_center(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Center, nil @@ -31217,22 +42519,39 @@ func (ec *executionContext) _WidgetZone_center(ctx context.Context, field graphq return ec.marshalOWidgetSection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSection(ctx, field.Selections, res) } -func (ec *executionContext) _WidgetZone_right(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetZone) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext_WidgetZone_center(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "WidgetZone", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "top": + return ec.fieldContext_WidgetSection_top(ctx, field) + case "middle": + return ec.fieldContext_WidgetSection_middle(ctx, field) + case "bottom": + return ec.fieldContext_WidgetSection_bottom(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetSection", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) _WidgetZone_right(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.WidgetZone) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_WidgetZone_right(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Right, nil @@ -31249,22 +42568,39 @@ func (ec *executionContext) _WidgetZone_right(ctx context.Context, field graphql return ec.marshalOWidgetSection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetSection(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext_WidgetZone_right(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "WidgetZone", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "top": + return ec.fieldContext_WidgetSection_top(ctx, field) + case "middle": + return ec.fieldContext_WidgetSection_middle(ctx, field) + case "bottom": + return ec.fieldContext_WidgetSection_bottom(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type WidgetSection", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Directive", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -31284,22 +42620,31 @@ func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql return ec.marshalNString2string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Directive_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) ___Directive_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Directive", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Description(), nil @@ -31316,22 +42661,31 @@ func (ec *executionContext) ___Directive_description(ctx context.Context, field return ec.marshalOString2แš–string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Directive_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) ___Directive_locations(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_locations(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Directive", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Locations, nil @@ -31351,22 +42705,31 @@ func (ec *executionContext) ___Directive_locations(ctx context.Context, field gr return ec.marshalN__DirectiveLocation2แš•stringแš„(ctx, field.Selections, res) } -func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___Directive_locations(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__Directive", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type __DirectiveLocation does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_args(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Args, nil @@ -31386,22 +42749,41 @@ func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql return ec.marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) } -func (ec *executionContext) ___Directive_isRepeatable(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___Directive_args(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__Directive", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___InputValue_name(ctx, field) + case "description": + return ec.fieldContext___InputValue_description(ctx, field) + case "type": + return ec.fieldContext___InputValue_type(ctx, field) + case "defaultValue": + return ec.fieldContext___InputValue_defaultValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __InputValue", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) ___Directive_isRepeatable(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_isRepeatable(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.IsRepeatable, nil @@ -31421,22 +42803,31 @@ func (ec *executionContext) ___Directive_isRepeatable(ctx context.Context, field return ec.marshalNBoolean2bool(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Directive_isRepeatable(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__EnumValue", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -31456,22 +42847,31 @@ func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql return ec.marshalNString2string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___EnumValue_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) ___EnumValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__EnumValue", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Description(), nil @@ -31488,22 +42888,31 @@ func (ec *executionContext) ___EnumValue_description(ctx context.Context, field return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___EnumValue_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__EnumValue", Field: field, - Args: nil, IsMethod: true, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_isDeprecated(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.IsDeprecated(), nil @@ -31523,22 +42932,31 @@ func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__EnumValue", Field: field, - Args: nil, IsMethod: true, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_deprecationReason(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DeprecationReason(), nil @@ -31555,22 +42973,31 @@ func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, return ec.marshalOString2แš–string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Field", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -31590,22 +43017,31 @@ func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.Col return ec.marshalNString2string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Field_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) ___Field_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Field", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Description(), nil @@ -31622,22 +43058,31 @@ func (ec *executionContext) ___Field_description(ctx context.Context, field grap return ec.marshalOString2แš–string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Field_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_args(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Field", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Args, nil @@ -31657,22 +43102,41 @@ func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.Col return ec.marshalN__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) } -func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___Field_args(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__Field", Field: field, - Args: nil, IsMethod: false, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___InputValue_name(ctx, field) + case "description": + return ec.fieldContext___InputValue_description(ctx, field) + case "type": + return ec.fieldContext___InputValue_type(ctx, field) + case "defaultValue": + return ec.fieldContext___InputValue_defaultValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __InputValue", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_type(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Type, nil @@ -31692,22 +43156,53 @@ func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.Col return ec.marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Field_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_isDeprecated(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Field", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.IsDeprecated(), nil @@ -31727,22 +43222,31 @@ func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field gra return ec.marshalNBoolean2bool(ctx, field.Selections, res) } -func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___Field_isDeprecated(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__Field", Field: field, - Args: nil, IsMethod: true, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_deprecationReason(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DeprecationReason(), nil @@ -31759,22 +43263,31 @@ func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, fiel return ec.marshalOString2แš–string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Field_deprecationReason(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__InputValue", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name, nil @@ -31794,22 +43307,31 @@ func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphq return ec.marshalNString2string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___InputValue_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) ___InputValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__InputValue", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Description(), nil @@ -31826,22 +43348,31 @@ func (ec *executionContext) ___InputValue_description(ctx context.Context, field return ec.marshalOString2แš–string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___InputValue_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) ___InputValue_type(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__InputValue", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Type, nil @@ -31861,22 +43392,53 @@ func (ec *executionContext) ___InputValue_type(ctx context.Context, field graphq return ec.marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___InputValue_type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) ___InputValue_defaultValue(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_defaultValue(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__InputValue", - Field: field, - Args: nil, - IsMethod: false, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.DefaultValue, nil @@ -31893,22 +43455,31 @@ func (ec *executionContext) ___InputValue_defaultValue(ctx context.Context, fiel return ec.marshalOString2แš–string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___InputValue_defaultValue(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) ___Schema_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Schema", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Description(), nil @@ -31925,22 +43496,31 @@ func (ec *executionContext) ___Schema_description(ctx context.Context, field gra return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) ___Schema_types(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___Schema_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__Schema", Field: field, - Args: nil, IsMethod: true, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) ___Schema_types(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_types(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Types(), nil @@ -31960,22 +43540,53 @@ func (ec *executionContext) ___Schema_types(ctx context.Context, field graphql.C return ec.marshalN__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Schema_types(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) ___Schema_queryType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_queryType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Schema", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.QueryType(), nil @@ -31995,22 +43606,53 @@ func (ec *executionContext) ___Schema_queryType(ctx context.Context, field graph return ec.marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Schema_queryType(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) ___Schema_mutationType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_mutationType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Schema", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.MutationType(), nil @@ -32027,22 +43669,53 @@ func (ec *executionContext) ___Schema_mutationType(ctx context.Context, field gr return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Schema_mutationType(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) ___Schema_subscriptionType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_subscriptionType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Schema", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SubscriptionType(), nil @@ -32059,22 +43732,53 @@ func (ec *executionContext) ___Schema_subscriptionType(ctx context.Context, fiel return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Schema_subscriptionType(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) ___Schema_directives(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_directives(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Schema", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Directives(), nil @@ -32094,22 +43798,43 @@ func (ec *executionContext) ___Schema_directives(ctx context.Context, field grap return ec.marshalN__Directive2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšDirectiveแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Schema_directives(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___Directive_name(ctx, field) + case "description": + return ec.fieldContext___Directive_description(ctx, field) + case "locations": + return ec.fieldContext___Directive_locations(ctx, field) + case "args": + return ec.fieldContext___Directive_args(ctx, field) + case "isRepeatable": + return ec.fieldContext___Directive_isRepeatable(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Directive", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) ___Type_kind(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_kind(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Type", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Kind(), nil @@ -32129,22 +43854,31 @@ func (ec *executionContext) ___Type_kind(ctx context.Context, field graphql.Coll return ec.marshalN__TypeKind2string(ctx, field.Selections, res) } -func (ec *executionContext) ___Type_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___Type_kind(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__Type", Field: field, - Args: nil, IsMethod: true, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type __TypeKind does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) ___Type_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_name(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Name(), nil @@ -32161,22 +43895,31 @@ func (ec *executionContext) ___Type_name(ctx context.Context, field graphql.Coll return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) ___Type_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___Type_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__Type", Field: field, - Args: nil, IsMethod: true, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} +func (ec *executionContext) ___Type_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_description(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Description(), nil @@ -32193,32 +43936,34 @@ func (ec *executionContext) ___Type_description(ctx context.Context, field graph return ec.marshalOString2แš–string(ctx, field.Selections, res) } -func (ec *executionContext) ___Type_fields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___Type_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__Type", Field: field, - Args: nil, IsMethod: true, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field___Type_fields_args(ctx, rawArgs) +func (ec *executionContext) ___Type_fields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_fields(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.Fields(args["includeDeprecated"].(bool)), nil + return obj.Fields(fc.Args["includeDeprecated"].(bool)), nil }) if err != nil { ec.Error(ctx, err) @@ -32232,22 +43977,56 @@ func (ec *executionContext) ___Type_fields(ctx context.Context, field graphql.Co return ec.marshalO__Field2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšFieldแš„(ctx, field.Selections, res) } -func (ec *executionContext) ___Type_interfaces(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___Type_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__Type", Field: field, - Args: nil, IsMethod: true, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___Field_name(ctx, field) + case "description": + return ec.fieldContext___Field_description(ctx, field) + case "args": + return ec.fieldContext___Field_args(ctx, field) + case "type": + return ec.fieldContext___Field_type(ctx, field) + case "isDeprecated": + return ec.fieldContext___Field_isDeprecated(ctx, field) + case "deprecationReason": + return ec.fieldContext___Field_deprecationReason(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Field", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field___Type_fields_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) ___Type_interfaces(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_interfaces(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.Interfaces(), nil @@ -32264,22 +44043,53 @@ func (ec *executionContext) ___Type_interfaces(ctx context.Context, field graphq return ec.marshalO__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Type_interfaces(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) ___Type_possibleTypes(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_possibleTypes(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Type", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.PossibleTypes(), nil @@ -32296,32 +44106,56 @@ func (ec *executionContext) ___Type_possibleTypes(ctx context.Context, field gra return ec.marshalO__Type2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšTypeแš„(ctx, field.Selections, res) } -func (ec *executionContext) ___Type_enumValues(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___Type_possibleTypes(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__Type", Field: field, - Args: nil, IsMethod: true, IsResolver: false, - } + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} - ctx = graphql.WithFieldContext(ctx, fc) - rawArgs := field.ArgumentMap(ec.Variables) - args, err := ec.field___Type_enumValues_args(ctx, rawArgs) +func (ec *executionContext) ___Type_enumValues(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_enumValues(ctx, field) if err != nil { - ec.Error(ctx, err) return graphql.Null } - fc.Args = args + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children - return obj.EnumValues(args["includeDeprecated"].(bool)), nil + return obj.EnumValues(fc.Args["includeDeprecated"].(bool)), nil }) if err != nil { ec.Error(ctx, err) @@ -32335,22 +44169,52 @@ func (ec *executionContext) ___Type_enumValues(ctx context.Context, field graphq return ec.marshalO__EnumValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšEnumValueแš„(ctx, field.Selections, res) } -func (ec *executionContext) ___Type_inputFields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___Type_enumValues(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__Type", Field: field, - Args: nil, IsMethod: true, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___EnumValue_name(ctx, field) + case "description": + return ec.fieldContext___EnumValue_description(ctx, field) + case "isDeprecated": + return ec.fieldContext___EnumValue_isDeprecated(ctx, field) + case "deprecationReason": + return ec.fieldContext___EnumValue_deprecationReason(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __EnumValue", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field___Type_enumValues_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return } + return fc, nil +} +func (ec *executionContext) ___Type_inputFields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_inputFields(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.InputFields(), nil @@ -32367,22 +44231,41 @@ func (ec *executionContext) ___Type_inputFields(ctx context.Context, field graph return ec.marshalO__InputValue2แš•githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšInputValueแš„(ctx, field.Selections, res) } -func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - fc := &graphql.FieldContext{ +func (ec *executionContext) fieldContext___Type_inputFields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ Object: "__Type", Field: field, - Args: nil, IsMethod: true, IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___InputValue_name(ctx, field) + case "description": + return ec.fieldContext___InputValue_description(ctx, field) + case "type": + return ec.fieldContext___InputValue_type(ctx, field) + case "defaultValue": + return ec.fieldContext___InputValue_defaultValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __InputValue", field.Name) + }, } + return fc, nil +} +func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_ofType(ctx, field) + if err != nil { + return graphql.Null + } ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.OfType(), nil @@ -32399,22 +44282,53 @@ func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.Co return ec.marshalO__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Type_ofType(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + func (ec *executionContext) ___Type_specifiedByURL(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_specifiedByURL(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) defer func() { if r := recover(); r != nil { ec.Error(ctx, ec.Recover(ctx, r)) ret = graphql.Null } }() - fc := &graphql.FieldContext{ - Object: "__Type", - Field: field, - Args: nil, - IsMethod: true, - IsResolver: false, - } - - ctx = graphql.WithFieldContext(ctx, fc) resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { ctx = rctx // use context from middleware stack in children return obj.SpecifiedByURL(), nil @@ -32431,6 +44345,19 @@ func (ec *executionContext) ___Type_specifiedByURL(ctx context.Context, field gr return ec.marshalOString2แš–string(ctx, field.Selections, res) } +func (ec *executionContext) fieldContext___Type_specifiedByURL(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + // endregion **************************** field.gotpl ***************************** // region **************************** input.gotpl ***************************** @@ -35383,21 +47310,15 @@ func (ec *executionContext) _AddClusterPayload(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("AddClusterPayload") case "scene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddClusterPayload_scene(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddClusterPayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "cluster": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddClusterPayload_cluster(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddClusterPayload_cluster(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -35424,11 +47345,8 @@ func (ec *executionContext) _AddDatasetSchemaPayload(ctx context.Context, sel as case "__typename": out.Values[i] = graphql.MarshalString("AddDatasetSchemaPayload") case "datasetSchema": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddDatasetSchemaPayload_datasetSchema(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddDatasetSchemaPayload_datasetSchema(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -35452,18 +47370,12 @@ func (ec *executionContext) _AddDynamicDatasetPayload(ctx context.Context, sel a case "__typename": out.Values[i] = graphql.MarshalString("AddDynamicDatasetPayload") case "datasetSchema": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddDynamicDatasetPayload_datasetSchema(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddDynamicDatasetPayload_datasetSchema(ctx, field, obj) case "dataset": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddDynamicDatasetPayload_dataset(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddDynamicDatasetPayload_dataset(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -35487,11 +47399,8 @@ func (ec *executionContext) _AddDynamicDatasetSchemaPayload(ctx context.Context, case "__typename": out.Values[i] = graphql.MarshalString("AddDynamicDatasetSchemaPayload") case "datasetSchema": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddDynamicDatasetSchemaPayload_datasetSchema(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddDynamicDatasetSchemaPayload_datasetSchema(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -35515,21 +47424,15 @@ func (ec *executionContext) _AddInfoboxFieldPayload(ctx context.Context, sel ast case "__typename": out.Values[i] = graphql.MarshalString("AddInfoboxFieldPayload") case "infoboxField": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddInfoboxFieldPayload_infoboxField(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddInfoboxFieldPayload_infoboxField(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "layer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddInfoboxFieldPayload_layer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddInfoboxFieldPayload_layer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -35556,31 +47459,22 @@ func (ec *executionContext) _AddLayerGroupPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("AddLayerGroupPayload") case "layer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddLayerGroupPayload_layer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddLayerGroupPayload_layer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "parentLayer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddLayerGroupPayload_parentLayer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddLayerGroupPayload_parentLayer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "index": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddLayerGroupPayload_index(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddLayerGroupPayload_index(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -35604,31 +47498,22 @@ func (ec *executionContext) _AddLayerItemPayload(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("AddLayerItemPayload") case "layer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddLayerItemPayload_layer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddLayerItemPayload_layer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "parentLayer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddLayerItemPayload_parentLayer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddLayerItemPayload_parentLayer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "index": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddLayerItemPayload_index(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddLayerItemPayload_index(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -35652,11 +47537,8 @@ func (ec *executionContext) _AddMemberToTeamPayload(ctx context.Context, sel ast case "__typename": out.Values[i] = graphql.MarshalString("AddMemberToTeamPayload") case "team": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddMemberToTeamPayload_team(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddMemberToTeamPayload_team(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -35683,21 +47565,15 @@ func (ec *executionContext) _AddWidgetPayload(ctx context.Context, sel ast.Selec case "__typename": out.Values[i] = graphql.MarshalString("AddWidgetPayload") case "scene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddWidgetPayload_scene(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddWidgetPayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "sceneWidget": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AddWidgetPayload_sceneWidget(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AddWidgetPayload_sceneWidget(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -35724,71 +47600,50 @@ func (ec *executionContext) _Asset(ctx context.Context, sel ast.SelectionSet, ob case "__typename": out.Values[i] = graphql.MarshalString("Asset") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Asset_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Asset_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "createdAt": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Asset_createdAt(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Asset_createdAt(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "teamId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Asset_teamId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Asset_teamId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Asset_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Asset_name(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "size": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Asset_size(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Asset_size(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "url": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Asset_url(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Asset_url(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "contentType": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Asset_contentType(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Asset_contentType(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -35832,41 +47687,29 @@ func (ec *executionContext) _AssetConnection(ctx context.Context, sel ast.Select case "__typename": out.Values[i] = graphql.MarshalString("AssetConnection") case "edges": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AssetConnection_edges(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AssetConnection_edges(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "nodes": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AssetConnection_nodes(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AssetConnection_nodes(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "pageInfo": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AssetConnection_pageInfo(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AssetConnection_pageInfo(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "totalCount": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AssetConnection_totalCount(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AssetConnection_totalCount(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -35893,21 +47736,15 @@ func (ec *executionContext) _AssetEdge(ctx context.Context, sel ast.SelectionSet case "__typename": out.Values[i] = graphql.MarshalString("AssetEdge") case "cursor": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AssetEdge_cursor(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AssetEdge_cursor(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "node": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AssetEdge_node(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AssetEdge_node(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -35931,11 +47768,8 @@ func (ec *executionContext) _AttachTagItemToGroupPayload(ctx context.Context, se case "__typename": out.Values[i] = graphql.MarshalString("AttachTagItemToGroupPayload") case "tag": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AttachTagItemToGroupPayload_tag(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AttachTagItemToGroupPayload_tag(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -35962,11 +47796,8 @@ func (ec *executionContext) _AttachTagToLayerPayload(ctx context.Context, sel as case "__typename": out.Values[i] = graphql.MarshalString("AttachTagToLayerPayload") case "layer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._AttachTagToLayerPayload_layer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._AttachTagToLayerPayload_layer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -35993,71 +47824,50 @@ func (ec *executionContext) _Camera(ctx context.Context, sel ast.SelectionSet, o case "__typename": out.Values[i] = graphql.MarshalString("Camera") case "lat": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Camera_lat(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Camera_lat(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "lng": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Camera_lng(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Camera_lng(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "altitude": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Camera_altitude(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Camera_altitude(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "heading": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Camera_heading(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Camera_heading(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "pitch": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Camera_pitch(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Camera_pitch(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "roll": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Camera_roll(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Camera_roll(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "fov": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Camera_fov(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Camera_fov(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -36084,31 +47894,22 @@ func (ec *executionContext) _Cluster(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("Cluster") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Cluster_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Cluster_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Cluster_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Cluster_name(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Cluster_propertyId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Cluster_propertyId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -36152,11 +47953,8 @@ func (ec *executionContext) _CreateAssetPayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("CreateAssetPayload") case "asset": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._CreateAssetPayload_asset(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._CreateAssetPayload_asset(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -36183,11 +47981,8 @@ func (ec *executionContext) _CreateInfoboxPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("CreateInfoboxPayload") case "layer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._CreateInfoboxPayload_layer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._CreateInfoboxPayload_layer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -36214,11 +48009,8 @@ func (ec *executionContext) _CreateScenePayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("CreateScenePayload") case "scene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._CreateScenePayload_scene(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._CreateScenePayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -36245,11 +48037,8 @@ func (ec *executionContext) _CreateTagGroupPayload(ctx context.Context, sel ast. case "__typename": out.Values[i] = graphql.MarshalString("CreateTagGroupPayload") case "tag": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._CreateTagGroupPayload_tag(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._CreateTagGroupPayload_tag(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -36276,21 +48065,15 @@ func (ec *executionContext) _CreateTagItemPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("CreateTagItemPayload") case "tag": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._CreateTagItemPayload_tag(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._CreateTagItemPayload_tag(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "parent": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._CreateTagItemPayload_parent(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._CreateTagItemPayload_parent(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -36314,11 +48097,8 @@ func (ec *executionContext) _CreateTeamPayload(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("CreateTeamPayload") case "team": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._CreateTeamPayload_team(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._CreateTeamPayload_team(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -36345,41 +48125,29 @@ func (ec *executionContext) _Dataset(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("Dataset") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Dataset_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Dataset_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "source": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Dataset_source(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Dataset_source(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Dataset_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Dataset_schemaId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fields": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Dataset_fields(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Dataset_fields(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -36440,41 +48208,29 @@ func (ec *executionContext) _DatasetConnection(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("DatasetConnection") case "edges": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetConnection_edges(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetConnection_edges(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "nodes": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetConnection_nodes(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetConnection_nodes(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "pageInfo": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetConnection_pageInfo(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetConnection_pageInfo(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "totalCount": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetConnection_totalCount(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetConnection_totalCount(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -36501,21 +48257,15 @@ func (ec *executionContext) _DatasetEdge(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("DatasetEdge") case "cursor": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetEdge_cursor(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetEdge_cursor(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "node": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetEdge_node(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetEdge_node(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -36539,51 +48289,36 @@ func (ec *executionContext) _DatasetField(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("DatasetField") case "fieldId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetField_fieldId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetField_fieldId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetField_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetField_schemaId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "source": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetField_source(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetField_source(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "type": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetField_type(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetField_type(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "value": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetField_value(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetField_value(ctx, field, obj) case "schema": field := field @@ -36658,68 +48393,47 @@ func (ec *executionContext) _DatasetSchema(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("DatasetSchema") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchema_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchema_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "source": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchema_source(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchema_source(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchema_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchema_name(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchema_sceneId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchema_sceneId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fields": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchema_fields(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchema_fields(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "representativeFieldId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchema_representativeFieldId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchema_representativeFieldId(ctx, field, obj) case "dynamic": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchema_dynamic(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchema_dynamic(ctx, field, obj) case "datasets": field := field @@ -36797,41 +48511,29 @@ func (ec *executionContext) _DatasetSchemaConnection(ctx context.Context, sel as case "__typename": out.Values[i] = graphql.MarshalString("DatasetSchemaConnection") case "edges": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchemaConnection_edges(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchemaConnection_edges(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "nodes": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchemaConnection_nodes(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchemaConnection_nodes(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "pageInfo": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchemaConnection_pageInfo(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchemaConnection_pageInfo(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "totalCount": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchemaConnection_totalCount(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchemaConnection_totalCount(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -36858,21 +48560,15 @@ func (ec *executionContext) _DatasetSchemaEdge(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("DatasetSchemaEdge") case "cursor": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchemaEdge_cursor(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchemaEdge_cursor(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "node": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchemaEdge_node(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchemaEdge_node(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -36896,61 +48592,43 @@ func (ec *executionContext) _DatasetSchemaField(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("DatasetSchemaField") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchemaField_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchemaField_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "source": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchemaField_source(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchemaField_source(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchemaField_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchemaField_name(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "type": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchemaField_type(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchemaField_type(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchemaField_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchemaField_schemaId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "refId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DatasetSchemaField_refId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DatasetSchemaField_refId(ctx, field, obj) case "schema": field := field @@ -37008,11 +48686,8 @@ func (ec *executionContext) _DeleteMePayload(ctx context.Context, sel ast.Select case "__typename": out.Values[i] = graphql.MarshalString("DeleteMePayload") case "userId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DeleteMePayload_userId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DeleteMePayload_userId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -37039,11 +48714,8 @@ func (ec *executionContext) _DeleteProjectPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("DeleteProjectPayload") case "projectId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DeleteProjectPayload_projectId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DeleteProjectPayload_projectId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -37070,11 +48742,8 @@ func (ec *executionContext) _DeleteTeamPayload(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("DeleteTeamPayload") case "teamId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DeleteTeamPayload_teamId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DeleteTeamPayload_teamId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -37101,11 +48770,8 @@ func (ec *executionContext) _DetachTagFromLayerPayload(ctx context.Context, sel case "__typename": out.Values[i] = graphql.MarshalString("DetachTagFromLayerPayload") case "layer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DetachTagFromLayerPayload_layer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DetachTagFromLayerPayload_layer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -37132,11 +48798,8 @@ func (ec *executionContext) _DetachTagItemFromGroupPayload(ctx context.Context, case "__typename": out.Values[i] = graphql.MarshalString("DetachTagItemFromGroupPayload") case "tag": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._DetachTagItemFromGroupPayload_tag(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._DetachTagItemFromGroupPayload_tag(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -37163,11 +48826,8 @@ func (ec *executionContext) _ImportDatasetPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("ImportDatasetPayload") case "datasetSchema": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ImportDatasetPayload_datasetSchema(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ImportDatasetPayload_datasetSchema(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -37194,21 +48854,15 @@ func (ec *executionContext) _ImportLayerPayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("ImportLayerPayload") case "layers": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ImportLayerPayload_layers(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ImportLayerPayload_layers(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "parentLayer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ImportLayerPayload_parentLayer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ImportLayerPayload_parentLayer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -37235,51 +48889,36 @@ func (ec *executionContext) _Infobox(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("Infobox") case "sceneId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Infobox_sceneId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Infobox_sceneId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "layerId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Infobox_layerId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Infobox_layerId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Infobox_propertyId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Infobox_propertyId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fields": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Infobox_fields(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Infobox_fields(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "linkedDatasetId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Infobox_linkedDatasetId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Infobox_linkedDatasetId(ctx, field, obj) case "layer": field := field @@ -37391,71 +49030,50 @@ func (ec *executionContext) _InfoboxField(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("InfoboxField") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._InfoboxField_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._InfoboxField_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._InfoboxField_sceneId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._InfoboxField_sceneId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "layerId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._InfoboxField_layerId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._InfoboxField_layerId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._InfoboxField_propertyId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._InfoboxField_propertyId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "pluginId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._InfoboxField_pluginId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._InfoboxField_pluginId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "extensionId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._InfoboxField_extensionId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._InfoboxField_extensionId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "linkedDatasetId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._InfoboxField_linkedDatasetId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._InfoboxField_linkedDatasetId(ctx, field, obj) case "layer": field := field @@ -37638,21 +49256,15 @@ func (ec *executionContext) _InstallPluginPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("InstallPluginPayload") case "scene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._InstallPluginPayload_scene(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._InstallPluginPayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "scenePlugin": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._InstallPluginPayload_scenePlugin(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._InstallPluginPayload_scenePlugin(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -37679,21 +49291,15 @@ func (ec *executionContext) _LatLng(ctx context.Context, sel ast.SelectionSet, o case "__typename": out.Values[i] = graphql.MarshalString("LatLng") case "lat": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LatLng_lat(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LatLng_lat(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "lng": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LatLng_lng(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LatLng_lng(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -37720,31 +49326,22 @@ func (ec *executionContext) _LatLngHeight(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("LatLngHeight") case "lat": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LatLngHeight_lat(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LatLngHeight_lat(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "lng": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LatLngHeight_lng(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LatLngHeight_lng(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "height": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LatLngHeight_height(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LatLngHeight_height(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -37771,113 +49368,74 @@ func (ec *executionContext) _LayerGroup(ctx context.Context, sel ast.SelectionSe case "__typename": out.Values[i] = graphql.MarshalString("LayerGroup") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_sceneId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_sceneId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_name(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "isVisible": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_isVisible(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_isVisible(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_propertyId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_propertyId(ctx, field, obj) case "pluginId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_pluginId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_pluginId(ctx, field, obj) case "extensionId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_extensionId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_extensionId(ctx, field, obj) case "infobox": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_infobox(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_infobox(ctx, field, obj) case "parentId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_parentId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_parentId(ctx, field, obj) case "linkedDatasetSchemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_linkedDatasetSchemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_linkedDatasetSchemaId(ctx, field, obj) case "root": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_root(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_root(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "layerIds": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_layerIds(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_layerIds(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "tags": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerGroup_tags(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerGroup_tags(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -38043,93 +49601,60 @@ func (ec *executionContext) _LayerItem(ctx context.Context, sel ast.SelectionSet case "__typename": out.Values[i] = graphql.MarshalString("LayerItem") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerItem_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerItem_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerItem_sceneId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerItem_sceneId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerItem_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerItem_name(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "isVisible": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerItem_isVisible(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerItem_isVisible(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerItem_propertyId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerItem_propertyId(ctx, field, obj) case "pluginId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerItem_pluginId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerItem_pluginId(ctx, field, obj) case "extensionId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerItem_extensionId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerItem_extensionId(ctx, field, obj) case "infobox": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerItem_infobox(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerItem_infobox(ctx, field, obj) case "parentId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerItem_parentId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerItem_parentId(ctx, field, obj) case "linkedDatasetId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerItem_linkedDatasetId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerItem_linkedDatasetId(ctx, field, obj) case "tags": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerItem_tags(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerItem_tags(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -38292,21 +49817,15 @@ func (ec *executionContext) _LayerTagGroup(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("LayerTagGroup") case "tagId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerTagGroup_tagId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerTagGroup_tagId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "children": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerTagGroup_children(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerTagGroup_children(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -38350,11 +49869,8 @@ func (ec *executionContext) _LayerTagItem(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("LayerTagItem") case "tagId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._LayerTagItem_tagId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._LayerTagItem_tagId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -38398,71 +49914,50 @@ func (ec *executionContext) _Me(ctx context.Context, sel ast.SelectionSet, obj * case "__typename": out.Values[i] = graphql.MarshalString("Me") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Me_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Me_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Me_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Me_name(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "email": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Me_email(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Me_email(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "lang": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Me_lang(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Me_lang(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "theme": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Me_theme(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Me_theme(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "myTeamId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Me_myTeamId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Me_myTeamId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "auths": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Me_auths(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Me_auths(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -38529,28 +50024,19 @@ func (ec *executionContext) _MergedInfobox(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("MergedInfobox") case "sceneID": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedInfobox_sceneID(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedInfobox_sceneID(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "property": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedInfobox_property(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedInfobox_property(ctx, field, obj) case "fields": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedInfobox_fields(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedInfobox_fields(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -38594,51 +50080,36 @@ func (ec *executionContext) _MergedInfoboxField(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("MergedInfoboxField") case "originalId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedInfoboxField_originalId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedInfoboxField_originalId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneID": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedInfoboxField_sceneID(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedInfoboxField_sceneID(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "pluginId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedInfoboxField_pluginId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedInfoboxField_pluginId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "extensionId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedInfoboxField_extensionId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedInfoboxField_extensionId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "property": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedInfoboxField_property(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedInfoboxField_property(ctx, field, obj) case "plugin": field := field @@ -38730,45 +50201,30 @@ func (ec *executionContext) _MergedLayer(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("MergedLayer") case "originalId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedLayer_originalId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedLayer_originalId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "parentId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedLayer_parentId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedLayer_parentId(ctx, field, obj) case "sceneID": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedLayer_sceneID(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedLayer_sceneID(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "property": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedLayer_property(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedLayer_property(ctx, field, obj) case "infobox": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedLayer_infobox(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedLayer_infobox(ctx, field, obj) case "original": field := field @@ -38843,32 +50299,20 @@ func (ec *executionContext) _MergedProperty(ctx context.Context, sel ast.Selecti case "__typename": out.Values[i] = graphql.MarshalString("MergedProperty") case "originalId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedProperty_originalId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedProperty_originalId(ctx, field, obj) case "parentId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedProperty_parentId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedProperty_parentId(ctx, field, obj) case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedProperty_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedProperty_schemaId(ctx, field, obj) case "linkedDatasetId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedProperty_linkedDatasetId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedProperty_linkedDatasetId(ctx, field, obj) case "original": field := field @@ -38980,55 +50424,37 @@ func (ec *executionContext) _MergedPropertyField(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("MergedPropertyField") case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyField_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyField_schemaId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fieldId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyField_fieldId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyField_fieldId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "value": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyField_value(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyField_value(ctx, field, obj) case "type": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyField_type(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyField_type(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "links": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyField_links(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyField_links(ctx, field, obj) case "overridden": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyField_overridden(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyField_overridden(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -39106,73 +50532,46 @@ func (ec *executionContext) _MergedPropertyGroup(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("MergedPropertyGroup") case "originalPropertyId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyGroup_originalPropertyId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyGroup_originalPropertyId(ctx, field, obj) case "parentPropertyId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyGroup_parentPropertyId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyGroup_parentPropertyId(ctx, field, obj) case "originalId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyGroup_originalId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyGroup_originalId(ctx, field, obj) case "parentId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyGroup_parentId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyGroup_parentId(ctx, field, obj) case "schemaGroupId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyGroup_schemaGroupId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyGroup_schemaGroupId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyGroup_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyGroup_schemaId(ctx, field, obj) case "linkedDatasetId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyGroup_linkedDatasetId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyGroup_linkedDatasetId(ctx, field, obj) case "fields": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyGroup_fields(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyGroup_fields(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "groups": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MergedPropertyGroup_groups(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MergedPropertyGroup_groups(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -39301,31 +50700,22 @@ func (ec *executionContext) _MoveInfoboxFieldPayload(ctx context.Context, sel as case "__typename": out.Values[i] = graphql.MarshalString("MoveInfoboxFieldPayload") case "infoboxFieldId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MoveInfoboxFieldPayload_infoboxFieldId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MoveInfoboxFieldPayload_infoboxFieldId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "layer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MoveInfoboxFieldPayload_layer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MoveInfoboxFieldPayload_layer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "index": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MoveInfoboxFieldPayload_index(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MoveInfoboxFieldPayload_index(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -39352,41 +50742,29 @@ func (ec *executionContext) _MoveLayerPayload(ctx context.Context, sel ast.Selec case "__typename": out.Values[i] = graphql.MarshalString("MoveLayerPayload") case "layerId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MoveLayerPayload_layerId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MoveLayerPayload_layerId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "fromParentLayer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MoveLayerPayload_fromParentLayer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MoveLayerPayload_fromParentLayer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "toParentLayer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MoveLayerPayload_toParentLayer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MoveLayerPayload_toParentLayer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "index": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._MoveLayerPayload_index(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._MoveLayerPayload_index(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -39422,452 +50800,388 @@ func (ec *executionContext) _Mutation(ctx context.Context, sel ast.SelectionSet) case "__typename": out.Values[i] = graphql.MarshalString("Mutation") case "createAsset": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_createAsset(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createAsset(ctx, field) + }) case "removeAsset": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_removeAsset(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeAsset(ctx, field) + }) case "signup": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_signup(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_signup(ctx, field) + }) case "updateMe": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_updateMe(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateMe(ctx, field) + }) case "removeMyAuth": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_removeMyAuth(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeMyAuth(ctx, field) + }) case "deleteMe": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_deleteMe(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_deleteMe(ctx, field) + }) case "createTeam": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_createTeam(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createTeam(ctx, field) + }) case "deleteTeam": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_deleteTeam(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_deleteTeam(ctx, field) + }) case "updateTeam": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_updateTeam(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateTeam(ctx, field) + }) case "addMemberToTeam": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_addMemberToTeam(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addMemberToTeam(ctx, field) + }) case "removeMemberFromTeam": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_removeMemberFromTeam(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeMemberFromTeam(ctx, field) + }) case "updateMemberOfTeam": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_updateMemberOfTeam(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateMemberOfTeam(ctx, field) + }) case "createProject": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_createProject(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createProject(ctx, field) + }) case "updateProject": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_updateProject(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateProject(ctx, field) + }) case "publishProject": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_publishProject(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_publishProject(ctx, field) + }) case "deleteProject": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_deleteProject(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_deleteProject(ctx, field) + }) case "createScene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_createScene(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createScene(ctx, field) + }) case "addWidget": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_addWidget(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addWidget(ctx, field) + }) case "updateWidget": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_updateWidget(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateWidget(ctx, field) + }) case "updateWidgetAlignSystem": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_updateWidgetAlignSystem(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateWidgetAlignSystem(ctx, field) + }) case "removeWidget": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_removeWidget(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeWidget(ctx, field) + }) case "installPlugin": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_installPlugin(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_installPlugin(ctx, field) + }) case "uninstallPlugin": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_uninstallPlugin(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_uninstallPlugin(ctx, field) + }) case "uploadPlugin": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_uploadPlugin(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_uploadPlugin(ctx, field) + }) case "upgradePlugin": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_upgradePlugin(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_upgradePlugin(ctx, field) + }) case "addCluster": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_addCluster(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addCluster(ctx, field) + }) case "updateCluster": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_updateCluster(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateCluster(ctx, field) + }) case "removeCluster": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_removeCluster(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeCluster(ctx, field) + }) case "updateDatasetSchema": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_updateDatasetSchema(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateDatasetSchema(ctx, field) + }) case "syncDataset": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_syncDataset(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_syncDataset(ctx, field) + }) case "addDynamicDatasetSchema": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_addDynamicDatasetSchema(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addDynamicDatasetSchema(ctx, field) + }) case "addDynamicDataset": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_addDynamicDataset(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addDynamicDataset(ctx, field) + }) case "removeDatasetSchema": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_removeDatasetSchema(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeDatasetSchema(ctx, field) + }) case "importDataset": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_importDataset(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_importDataset(ctx, field) + }) case "importDatasetFromGoogleSheet": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_importDatasetFromGoogleSheet(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_importDatasetFromGoogleSheet(ctx, field) + }) case "addDatasetSchema": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_addDatasetSchema(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addDatasetSchema(ctx, field) + }) case "updatePropertyValue": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_updatePropertyValue(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updatePropertyValue(ctx, field) + }) case "removePropertyField": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_removePropertyField(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removePropertyField(ctx, field) + }) case "uploadFileToProperty": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_uploadFileToProperty(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_uploadFileToProperty(ctx, field) + }) case "linkDatasetToPropertyValue": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_linkDatasetToPropertyValue(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_linkDatasetToPropertyValue(ctx, field) + }) case "unlinkPropertyValue": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_unlinkPropertyValue(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_unlinkPropertyValue(ctx, field) + }) case "addPropertyItem": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_addPropertyItem(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addPropertyItem(ctx, field) + }) case "movePropertyItem": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_movePropertyItem(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_movePropertyItem(ctx, field) + }) case "removePropertyItem": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_removePropertyItem(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removePropertyItem(ctx, field) + }) case "updatePropertyItems": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_updatePropertyItems(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updatePropertyItems(ctx, field) + }) case "addLayerItem": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_addLayerItem(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addLayerItem(ctx, field) + }) case "addLayerGroup": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_addLayerGroup(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addLayerGroup(ctx, field) + }) case "removeLayer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_removeLayer(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeLayer(ctx, field) + }) case "updateLayer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_updateLayer(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateLayer(ctx, field) + }) case "moveLayer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_moveLayer(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_moveLayer(ctx, field) + }) case "createInfobox": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_createInfobox(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createInfobox(ctx, field) + }) case "removeInfobox": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_removeInfobox(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeInfobox(ctx, field) + }) case "addInfoboxField": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_addInfoboxField(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_addInfoboxField(ctx, field) + }) case "moveInfoboxField": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_moveInfoboxField(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_moveInfoboxField(ctx, field) + }) case "removeInfoboxField": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_removeInfoboxField(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeInfoboxField(ctx, field) + }) case "importLayer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_importLayer(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_importLayer(ctx, field) + }) case "attachTagToLayer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_attachTagToLayer(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_attachTagToLayer(ctx, field) + }) case "detachTagFromLayer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_detachTagFromLayer(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_detachTagFromLayer(ctx, field) + }) case "createTagItem": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_createTagItem(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createTagItem(ctx, field) + }) case "createTagGroup": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_createTagGroup(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_createTagGroup(ctx, field) + }) case "attachTagItemToGroup": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_attachTagItemToGroup(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_attachTagItemToGroup(ctx, field) + }) case "detachTagItemFromGroup": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_detachTagItemFromGroup(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_detachTagItemFromGroup(ctx, field) + }) case "updateTag": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_updateTag(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_updateTag(ctx, field) + }) case "removeTag": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Mutation_removeTag(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Mutation_removeTag(ctx, field) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -39891,35 +51205,23 @@ func (ec *executionContext) _PageInfo(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("PageInfo") case "startCursor": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PageInfo_startCursor(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PageInfo_startCursor(ctx, field, obj) case "endCursor": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PageInfo_endCursor(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PageInfo_endCursor(ctx, field, obj) case "hasNextPage": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PageInfo_hasNextPage(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PageInfo_hasNextPage(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "hasPreviousPage": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PageInfo_hasPreviousPage(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PageInfo_hasPreviousPage(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -39946,109 +51248,73 @@ func (ec *executionContext) _Plugin(ctx context.Context, sel ast.SelectionSet, o case "__typename": out.Values[i] = graphql.MarshalString("Plugin") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Plugin_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Plugin_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Plugin_sceneId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Plugin_sceneId(ctx, field, obj) case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Plugin_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Plugin_name(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "version": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Plugin_version(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Plugin_version(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Plugin_description(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Plugin_description(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "author": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Plugin_author(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Plugin_author(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "repositoryUrl": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Plugin_repositoryUrl(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Plugin_repositoryUrl(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertySchemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Plugin_propertySchemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Plugin_propertySchemaId(ctx, field, obj) case "extensions": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Plugin_extensions(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Plugin_extensions(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "scenePlugin": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Plugin_scenePlugin(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Plugin_scenePlugin(ctx, field, obj) case "allTranslatedDescription": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Plugin_allTranslatedDescription(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Plugin_allTranslatedDescription(ctx, field, obj) case "allTranslatedName": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Plugin_allTranslatedName(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Plugin_allTranslatedName(ctx, field, obj) case "scene": field := field @@ -40146,109 +51412,73 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select case "__typename": out.Values[i] = graphql.MarshalString("PluginExtension") case "extensionId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginExtension_extensionId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginExtension_extensionId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "pluginId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginExtension_pluginId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginExtension_pluginId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "type": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginExtension_type(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginExtension_type(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginExtension_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginExtension_name(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginExtension_description(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginExtension_description(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "icon": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginExtension_icon(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginExtension_icon(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "singleOnly": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginExtension_singleOnly(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginExtension_singleOnly(ctx, field, obj) case "widgetLayout": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginExtension_widgetLayout(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginExtension_widgetLayout(ctx, field, obj) case "visualizer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginExtension_visualizer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginExtension_visualizer(ctx, field, obj) case "propertySchemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginExtension_propertySchemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginExtension_propertySchemaId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "allTranslatedName": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginExtension_allTranslatedName(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginExtension_allTranslatedName(ctx, field, obj) case "allTranslatedDescription": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginExtension_allTranslatedDescription(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginExtension_allTranslatedDescription(ctx, field, obj) case "plugin": field := field @@ -40363,51 +51593,36 @@ func (ec *executionContext) _PluginMetadata(ctx context.Context, sel ast.Selecti case "__typename": out.Values[i] = graphql.MarshalString("PluginMetadata") case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginMetadata_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginMetadata_name(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "description": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginMetadata_description(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginMetadata_description(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "author": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginMetadata_author(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginMetadata_author(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "thumbnailUrl": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginMetadata_thumbnailUrl(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginMetadata_thumbnailUrl(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "createdAt": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PluginMetadata_createdAt(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PluginMetadata_createdAt(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -40434,185 +51649,128 @@ func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("Project") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "isArchived": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_isArchived(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_isArchived(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "isBasicAuthActive": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_isBasicAuthActive(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_isBasicAuthActive(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "basicAuthUsername": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_basicAuthUsername(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_basicAuthUsername(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "basicAuthPassword": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_basicAuthPassword(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_basicAuthPassword(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "createdAt": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_createdAt(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_createdAt(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "updatedAt": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_updatedAt(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_updatedAt(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "publishedAt": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_publishedAt(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_publishedAt(ctx, field, obj) case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_name(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_description(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_description(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "alias": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_alias(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_alias(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "publicTitle": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_publicTitle(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_publicTitle(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "publicDescription": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_publicDescription(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_publicDescription(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "publicImage": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_publicImage(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_publicImage(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "publicNoIndex": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_publicNoIndex(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_publicNoIndex(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "imageUrl": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_imageUrl(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_imageUrl(ctx, field, obj) case "teamId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_teamId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_teamId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "visualizer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_visualizer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_visualizer(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "publishmentStatus": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Project_publishmentStatus(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Project_publishmentStatus(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -40673,21 +51831,15 @@ func (ec *executionContext) _ProjectAliasAvailability(ctx context.Context, sel a case "__typename": out.Values[i] = graphql.MarshalString("ProjectAliasAvailability") case "alias": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ProjectAliasAvailability_alias(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ProjectAliasAvailability_alias(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "available": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ProjectAliasAvailability_available(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ProjectAliasAvailability_available(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -40714,41 +51866,29 @@ func (ec *executionContext) _ProjectConnection(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("ProjectConnection") case "edges": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ProjectConnection_edges(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ProjectConnection_edges(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "nodes": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ProjectConnection_nodes(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ProjectConnection_nodes(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "pageInfo": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ProjectConnection_pageInfo(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ProjectConnection_pageInfo(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "totalCount": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ProjectConnection_totalCount(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ProjectConnection_totalCount(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -40775,21 +51915,15 @@ func (ec *executionContext) _ProjectEdge(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("ProjectEdge") case "cursor": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ProjectEdge_cursor(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ProjectEdge_cursor(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "node": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ProjectEdge_node(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ProjectEdge_node(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -40813,11 +51947,8 @@ func (ec *executionContext) _ProjectPayload(ctx context.Context, sel ast.Selecti case "__typename": out.Values[i] = graphql.MarshalString("ProjectPayload") case "project": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ProjectPayload_project(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ProjectPayload_project(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -40844,31 +51975,22 @@ func (ec *executionContext) _Property(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("Property") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Property_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Property_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Property_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Property_schemaId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "items": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Property_items(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Property_items(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -40946,31 +52068,22 @@ func (ec *executionContext) _PropertyCondition(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("PropertyCondition") case "fieldId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyCondition_fieldId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyCondition_fieldId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "type": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyCondition_type(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyCondition_type(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "value": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyCondition_value(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyCondition_value(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -40994,68 +52107,47 @@ func (ec *executionContext) _PropertyField(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("PropertyField") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyField_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyField_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "parentId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyField_parentId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyField_parentId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyField_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyField_schemaId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fieldId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyField_fieldId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyField_fieldId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "links": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyField_links(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyField_links(ctx, field, obj) case "type": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyField_type(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyField_type(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "value": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyField_value(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyField_value(ctx, field, obj) case "parent": field := field @@ -41147,28 +52239,19 @@ func (ec *executionContext) _PropertyFieldLink(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("PropertyFieldLink") case "datasetId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyFieldLink_datasetId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyFieldLink_datasetId(ctx, field, obj) case "datasetSchemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyFieldLink_datasetSchemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyFieldLink_datasetSchemaId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "datasetSchemaFieldId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyFieldLink_datasetSchemaFieldId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyFieldLink_datasetSchemaFieldId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -41263,21 +52346,15 @@ func (ec *executionContext) _PropertyFieldPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("PropertyFieldPayload") case "property": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyFieldPayload_property(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyFieldPayload_property(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "propertyField": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyFieldPayload_propertyField(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyFieldPayload_propertyField(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -41301,41 +52378,29 @@ func (ec *executionContext) _PropertyGroup(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("PropertyGroup") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyGroup_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyGroup_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyGroup_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyGroup_schemaId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaGroupId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyGroup_schemaGroupId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyGroup_schemaGroupId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fields": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyGroup_fields(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyGroup_fields(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -41396,41 +52461,29 @@ func (ec *executionContext) _PropertyGroupList(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("PropertyGroupList") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyGroupList_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyGroupList_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyGroupList_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyGroupList_schemaId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaGroupId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyGroupList_schemaGroupId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyGroupList_schemaGroupId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "groups": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyGroupList_groups(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyGroupList_groups(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -41491,21 +52544,15 @@ func (ec *executionContext) _PropertyItemPayload(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("PropertyItemPayload") case "property": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyItemPayload_property(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyItemPayload_property(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "propertyItem": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyItemPayload_propertyItem(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyItemPayload_propertyItem(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -41529,28 +52576,19 @@ func (ec *executionContext) _PropertyLinkableFields(ctx context.Context, sel ast case "__typename": out.Values[i] = graphql.MarshalString("PropertyLinkableFields") case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyLinkableFields_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyLinkableFields_schemaId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "latlng": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyLinkableFields_latlng(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyLinkableFields_latlng(ctx, field, obj) case "url": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertyLinkableFields_url(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertyLinkableFields_url(ctx, field, obj) case "latlngField": field := field @@ -41625,31 +52663,22 @@ func (ec *executionContext) _PropertySchema(ctx context.Context, sel ast.Selecti case "__typename": out.Values[i] = graphql.MarshalString("PropertySchema") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchema_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchema_id(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "groups": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchema_groups(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchema_groups(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "linkableFields": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchema_linkableFields(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchema_linkableFields(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -41676,114 +52705,72 @@ func (ec *executionContext) _PropertySchemaField(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("PropertySchemaField") case "fieldId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_fieldId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_fieldId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "type": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_type(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_type(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "title": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_title(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_title(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "description": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_description(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_description(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "prefix": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_prefix(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_prefix(ctx, field, obj) case "suffix": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_suffix(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_suffix(ctx, field, obj) case "defaultValue": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_defaultValue(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_defaultValue(ctx, field, obj) case "ui": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_ui(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_ui(ctx, field, obj) case "min": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_min(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_min(ctx, field, obj) case "max": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_max(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_max(ctx, field, obj) case "choices": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_choices(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_choices(ctx, field, obj) case "isAvailableIf": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_isAvailableIf(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_isAvailableIf(ctx, field, obj) case "allTranslatedTitle": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_allTranslatedTitle(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_allTranslatedTitle(ctx, field, obj) case "allTranslatedDescription": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaField_allTranslatedDescription(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaField_allTranslatedDescription(ctx, field, obj) case "translatedTitle": field := field @@ -41847,38 +52834,26 @@ func (ec *executionContext) _PropertySchemaFieldChoice(ctx context.Context, sel case "__typename": out.Values[i] = graphql.MarshalString("PropertySchemaFieldChoice") case "key": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaFieldChoice_key(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaFieldChoice_key(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "title": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaFieldChoice_title(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaFieldChoice_title(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "icon": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaFieldChoice_icon(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaFieldChoice_icon(ctx, field, obj) case "allTranslatedTitle": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaFieldChoice_allTranslatedTitle(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaFieldChoice_allTranslatedTitle(ctx, field, obj) case "translatedTitle": field := field @@ -41922,79 +52897,52 @@ func (ec *executionContext) _PropertySchemaGroup(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("PropertySchemaGroup") case "schemaGroupId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaGroup_schemaGroupId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaGroup_schemaGroupId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaGroup_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaGroup_schemaId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "fields": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaGroup_fields(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaGroup_fields(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "isList": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaGroup_isList(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaGroup_isList(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "isAvailableIf": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaGroup_isAvailableIf(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaGroup_isAvailableIf(ctx, field, obj) case "title": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaGroup_title(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaGroup_title(ctx, field, obj) case "allTranslatedTitle": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaGroup_allTranslatedTitle(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaGroup_allTranslatedTitle(ctx, field, obj) case "representativeFieldId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaGroup_representativeFieldId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaGroup_representativeFieldId(ctx, field, obj) case "representativeField": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._PropertySchemaGroup_representativeField(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._PropertySchemaGroup_representativeField(ctx, field, obj) case "schema": field := field @@ -42434,18 +53382,16 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr return rrm(innerCtx) }) case "__type": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Query___type(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Query___type(ctx, field) + }) case "__schema": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Query___schema(ctx, field) - } - out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, innerFunc) + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Query___schema(ctx, field) + }) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -42469,41 +53415,29 @@ func (ec *executionContext) _Rect(ctx context.Context, sel ast.SelectionSet, obj case "__typename": out.Values[i] = graphql.MarshalString("Rect") case "west": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Rect_west(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Rect_west(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "south": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Rect_south(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Rect_south(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "east": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Rect_east(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Rect_east(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "north": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Rect_north(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Rect_north(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -42530,11 +53464,8 @@ func (ec *executionContext) _RemoveAssetPayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("RemoveAssetPayload") case "assetId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveAssetPayload_assetId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveAssetPayload_assetId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -42561,21 +53492,15 @@ func (ec *executionContext) _RemoveClusterPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("RemoveClusterPayload") case "scene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveClusterPayload_scene(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveClusterPayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "clusterId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveClusterPayload_clusterId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveClusterPayload_clusterId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -42602,11 +53527,8 @@ func (ec *executionContext) _RemoveDatasetSchemaPayload(ctx context.Context, sel case "__typename": out.Values[i] = graphql.MarshalString("RemoveDatasetSchemaPayload") case "schemaId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveDatasetSchemaPayload_schemaId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveDatasetSchemaPayload_schemaId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -42633,21 +53555,15 @@ func (ec *executionContext) _RemoveInfoboxFieldPayload(ctx context.Context, sel case "__typename": out.Values[i] = graphql.MarshalString("RemoveInfoboxFieldPayload") case "infoboxFieldId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveInfoboxFieldPayload_infoboxFieldId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveInfoboxFieldPayload_infoboxFieldId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "layer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveInfoboxFieldPayload_layer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveInfoboxFieldPayload_layer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -42674,11 +53590,8 @@ func (ec *executionContext) _RemoveInfoboxPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("RemoveInfoboxPayload") case "layer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveInfoboxPayload_layer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveInfoboxPayload_layer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -42705,21 +53618,15 @@ func (ec *executionContext) _RemoveLayerPayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("RemoveLayerPayload") case "layerId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveLayerPayload_layerId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveLayerPayload_layerId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "parentLayer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveLayerPayload_parentLayer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveLayerPayload_parentLayer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -42746,11 +53653,8 @@ func (ec *executionContext) _RemoveMemberFromTeamPayload(ctx context.Context, se case "__typename": out.Values[i] = graphql.MarshalString("RemoveMemberFromTeamPayload") case "team": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveMemberFromTeamPayload_team(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveMemberFromTeamPayload_team(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -42777,21 +53681,15 @@ func (ec *executionContext) _RemoveTagPayload(ctx context.Context, sel ast.Selec case "__typename": out.Values[i] = graphql.MarshalString("RemoveTagPayload") case "tagId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveTagPayload_tagId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveTagPayload_tagId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "updatedLayers": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveTagPayload_updatedLayers(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveTagPayload_updatedLayers(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -42818,21 +53716,15 @@ func (ec *executionContext) _RemoveWidgetPayload(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("RemoveWidgetPayload") case "scene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveWidgetPayload_scene(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveWidgetPayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "widgetId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._RemoveWidgetPayload_widgetId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._RemoveWidgetPayload_widgetId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -42859,108 +53751,75 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob case "__typename": out.Values[i] = graphql.MarshalString("Scene") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "projectId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_projectId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_projectId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "teamId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_teamId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_teamId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_propertyId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_propertyId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "createdAt": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_createdAt(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_createdAt(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "updatedAt": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_updatedAt(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_updatedAt(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "rootLayerId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_rootLayerId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_rootLayerId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "widgets": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_widgets(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_widgets(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "plugins": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_plugins(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_plugins(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "widgetAlignSystem": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_widgetAlignSystem(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_widgetAlignSystem(ctx, field, obj) case "dynamicDatasetSchemas": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_dynamicDatasetSchemas(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_dynamicDatasetSchemas(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -43054,11 +53913,8 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob }) case "tagIds": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_tagIds(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_tagIds(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -43084,11 +53940,8 @@ func (ec *executionContext) _Scene(ctx context.Context, sel ast.SelectionSet, ob }) case "clusters": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Scene_clusters(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Scene_clusters(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -43115,21 +53968,15 @@ func (ec *executionContext) _ScenePlugin(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("ScenePlugin") case "pluginId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ScenePlugin_pluginId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ScenePlugin_pluginId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._ScenePlugin_propertyId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._ScenePlugin_propertyId(ctx, field, obj) case "plugin": field := field @@ -43187,61 +54034,43 @@ func (ec *executionContext) _SceneWidget(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("SceneWidget") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._SceneWidget_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._SceneWidget_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "pluginId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._SceneWidget_pluginId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._SceneWidget_pluginId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "extensionId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._SceneWidget_extensionId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._SceneWidget_extensionId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "propertyId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._SceneWidget_propertyId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._SceneWidget_propertyId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "enabled": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._SceneWidget_enabled(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._SceneWidget_enabled(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "extended": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._SceneWidget_extended(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._SceneWidget_extended(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -43319,21 +54148,15 @@ func (ec *executionContext) _SignupPayload(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("SignupPayload") case "user": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._SignupPayload_user(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._SignupPayload_user(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "team": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._SignupPayload_team(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._SignupPayload_team(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -43360,41 +54183,29 @@ func (ec *executionContext) _SyncDatasetPayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("SyncDatasetPayload") case "sceneId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._SyncDatasetPayload_sceneId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._SyncDatasetPayload_sceneId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "url": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._SyncDatasetPayload_url(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._SyncDatasetPayload_url(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "datasetSchema": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._SyncDatasetPayload_datasetSchema(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._SyncDatasetPayload_datasetSchema(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "dataset": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._SyncDatasetPayload_dataset(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._SyncDatasetPayload_dataset(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -43421,41 +54232,29 @@ func (ec *executionContext) _TagGroup(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("TagGroup") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TagGroup_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TagGroup_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TagGroup_sceneId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TagGroup_sceneId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "label": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TagGroup_label(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TagGroup_label(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "tagIds": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TagGroup_tagIds(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TagGroup_tagIds(ctx, field, obj) case "tags": field := field @@ -43536,62 +54335,41 @@ func (ec *executionContext) _TagItem(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("TagItem") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TagItem_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TagItem_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "sceneId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TagItem_sceneId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TagItem_sceneId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "label": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TagItem_label(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TagItem_label(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "parentId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TagItem_parentId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TagItem_parentId(ctx, field, obj) case "linkedDatasetID": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TagItem_linkedDatasetID(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TagItem_linkedDatasetID(ctx, field, obj) case "linkedDatasetSchemaID": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TagItem_linkedDatasetSchemaID(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TagItem_linkedDatasetSchemaID(ctx, field, obj) case "linkedDatasetFieldID": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TagItem_linkedDatasetFieldID(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TagItem_linkedDatasetFieldID(ctx, field, obj) case "linkedDatasetSchema": field := field @@ -43703,41 +54481,29 @@ func (ec *executionContext) _Team(ctx context.Context, sel ast.SelectionSet, obj case "__typename": out.Values[i] = graphql.MarshalString("Team") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Team_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Team_id(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Team_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Team_name(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "members": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Team_members(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Team_members(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "personal": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Team_personal(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Team_personal(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -43804,21 +54570,15 @@ func (ec *executionContext) _TeamMember(ctx context.Context, sel ast.SelectionSe case "__typename": out.Values[i] = graphql.MarshalString("TeamMember") case "userId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TeamMember_userId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TeamMember_userId(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } case "role": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._TeamMember_role(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._TeamMember_role(ctx, field, obj) if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) @@ -43862,60 +54622,36 @@ func (ec *executionContext) _Typography(ctx context.Context, sel ast.SelectionSe case "__typename": out.Values[i] = graphql.MarshalString("Typography") case "fontFamily": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Typography_fontFamily(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Typography_fontFamily(ctx, field, obj) case "fontWeight": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Typography_fontWeight(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Typography_fontWeight(ctx, field, obj) case "fontSize": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Typography_fontSize(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Typography_fontSize(ctx, field, obj) case "color": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Typography_color(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Typography_color(ctx, field, obj) case "textAlign": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Typography_textAlign(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Typography_textAlign(ctx, field, obj) case "bold": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Typography_bold(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Typography_bold(ctx, field, obj) case "italic": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Typography_italic(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Typography_italic(ctx, field, obj) case "underline": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._Typography_underline(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._Typography_underline(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -43939,21 +54675,15 @@ func (ec *executionContext) _UninstallPluginPayload(ctx context.Context, sel ast case "__typename": out.Values[i] = graphql.MarshalString("UninstallPluginPayload") case "pluginId": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UninstallPluginPayload_pluginId(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UninstallPluginPayload_pluginId(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "scene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UninstallPluginPayload_scene(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UninstallPluginPayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -43980,21 +54710,15 @@ func (ec *executionContext) _UpdateClusterPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("UpdateClusterPayload") case "scene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpdateClusterPayload_scene(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpdateClusterPayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "cluster": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpdateClusterPayload_cluster(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpdateClusterPayload_cluster(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44021,11 +54745,8 @@ func (ec *executionContext) _UpdateDatasetSchemaPayload(ctx context.Context, sel case "__typename": out.Values[i] = graphql.MarshalString("UpdateDatasetSchemaPayload") case "datasetSchema": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpdateDatasetSchemaPayload_datasetSchema(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpdateDatasetSchemaPayload_datasetSchema(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -44049,11 +54770,8 @@ func (ec *executionContext) _UpdateLayerPayload(ctx context.Context, sel ast.Sel case "__typename": out.Values[i] = graphql.MarshalString("UpdateLayerPayload") case "layer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpdateLayerPayload_layer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpdateLayerPayload_layer(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44080,11 +54798,8 @@ func (ec *executionContext) _UpdateMePayload(ctx context.Context, sel ast.Select case "__typename": out.Values[i] = graphql.MarshalString("UpdateMePayload") case "me": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpdateMePayload_me(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpdateMePayload_me(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44111,11 +54826,8 @@ func (ec *executionContext) _UpdateMemberOfTeamPayload(ctx context.Context, sel case "__typename": out.Values[i] = graphql.MarshalString("UpdateMemberOfTeamPayload") case "team": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpdateMemberOfTeamPayload_team(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpdateMemberOfTeamPayload_team(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44142,11 +54854,8 @@ func (ec *executionContext) _UpdateTagPayload(ctx context.Context, sel ast.Selec case "__typename": out.Values[i] = graphql.MarshalString("UpdateTagPayload") case "tag": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpdateTagPayload_tag(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpdateTagPayload_tag(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44173,11 +54882,8 @@ func (ec *executionContext) _UpdateTeamPayload(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("UpdateTeamPayload") case "team": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpdateTeamPayload_team(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpdateTeamPayload_team(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44204,11 +54910,8 @@ func (ec *executionContext) _UpdateWidgetAlignSystemPayload(ctx context.Context, case "__typename": out.Values[i] = graphql.MarshalString("UpdateWidgetAlignSystemPayload") case "scene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpdateWidgetAlignSystemPayload_scene(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpdateWidgetAlignSystemPayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44235,21 +54938,15 @@ func (ec *executionContext) _UpdateWidgetPayload(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("UpdateWidgetPayload") case "scene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpdateWidgetPayload_scene(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpdateWidgetPayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "sceneWidget": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpdateWidgetPayload_sceneWidget(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpdateWidgetPayload_sceneWidget(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44276,21 +54973,15 @@ func (ec *executionContext) _UpgradePluginPayload(ctx context.Context, sel ast.S case "__typename": out.Values[i] = graphql.MarshalString("UpgradePluginPayload") case "scene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpgradePluginPayload_scene(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpgradePluginPayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "scenePlugin": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UpgradePluginPayload_scenePlugin(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UpgradePluginPayload_scenePlugin(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44317,31 +55008,22 @@ func (ec *executionContext) _UploadPluginPayload(ctx context.Context, sel ast.Se case "__typename": out.Values[i] = graphql.MarshalString("UploadPluginPayload") case "plugin": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UploadPluginPayload_plugin(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UploadPluginPayload_plugin(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "scene": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UploadPluginPayload_scene(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UploadPluginPayload_scene(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "scenePlugin": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._UploadPluginPayload_scenePlugin(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._UploadPluginPayload_scenePlugin(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44368,31 +55050,22 @@ func (ec *executionContext) _User(ctx context.Context, sel ast.SelectionSet, obj case "__typename": out.Values[i] = graphql.MarshalString("User") case "id": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._User_id(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._User_id(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._User_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._User_name(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "email": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._User_email(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._User_email(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44419,18 +55092,12 @@ func (ec *executionContext) _WidgetAlignSystem(ctx context.Context, sel ast.Sele case "__typename": out.Values[i] = graphql.MarshalString("WidgetAlignSystem") case "inner": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetAlignSystem_inner(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetAlignSystem_inner(ctx, field, obj) case "outer": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetAlignSystem_outer(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetAlignSystem_outer(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -44454,21 +55121,15 @@ func (ec *executionContext) _WidgetArea(ctx context.Context, sel ast.SelectionSe case "__typename": out.Values[i] = graphql.MarshalString("WidgetArea") case "widgetIds": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetArea_widgetIds(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetArea_widgetIds(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "align": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetArea_align(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetArea_align(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44495,21 +55156,15 @@ func (ec *executionContext) _WidgetExtendable(ctx context.Context, sel ast.Selec case "__typename": out.Values[i] = graphql.MarshalString("WidgetExtendable") case "vertically": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetExtendable_vertically(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetExtendable_vertically(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "horizontally": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetExtendable_horizontally(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetExtendable_horizontally(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44536,41 +55191,29 @@ func (ec *executionContext) _WidgetLayout(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("WidgetLayout") case "extendable": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetLayout_extendable(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetLayout_extendable(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "extended": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetLayout_extended(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetLayout_extended(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "floating": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetLayout_floating(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetLayout_floating(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "defaultLocation": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetLayout_defaultLocation(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetLayout_defaultLocation(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -44594,31 +55237,22 @@ func (ec *executionContext) _WidgetLocation(ctx context.Context, sel ast.Selecti case "__typename": out.Values[i] = graphql.MarshalString("WidgetLocation") case "zone": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetLocation_zone(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetLocation_zone(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "section": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetLocation_section(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetLocation_section(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "area": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetLocation_area(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetLocation_area(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44645,25 +55279,16 @@ func (ec *executionContext) _WidgetSection(ctx context.Context, sel ast.Selectio case "__typename": out.Values[i] = graphql.MarshalString("WidgetSection") case "top": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetSection_top(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetSection_top(ctx, field, obj) case "middle": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetSection_middle(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetSection_middle(ctx, field, obj) case "bottom": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetSection_bottom(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetSection_bottom(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -44687,25 +55312,16 @@ func (ec *executionContext) _WidgetZone(ctx context.Context, sel ast.SelectionSe case "__typename": out.Values[i] = graphql.MarshalString("WidgetZone") case "left": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetZone_left(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetZone_left(ctx, field, obj) case "center": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetZone_center(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetZone_center(ctx, field, obj) case "right": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec._WidgetZone_right(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec._WidgetZone_right(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -44729,48 +55345,33 @@ func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("__Directive") case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Directive_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Directive_name(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "description": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Directive_description(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Directive_description(ctx, field, obj) case "locations": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Directive_locations(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Directive_locations(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "args": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Directive_args(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Directive_args(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "isRepeatable": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Directive_isRepeatable(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Directive_isRepeatable(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -44797,38 +55398,26 @@ func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionS case "__typename": out.Values[i] = graphql.MarshalString("__EnumValue") case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___EnumValue_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___EnumValue_name(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "description": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___EnumValue_description(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___EnumValue_description(ctx, field, obj) case "isDeprecated": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___EnumValue_isDeprecated(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___EnumValue_isDeprecated(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "deprecationReason": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___EnumValue_deprecationReason(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___EnumValue_deprecationReason(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -44852,58 +55441,40 @@ func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("__Field") case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Field_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Field_name(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "description": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Field_description(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Field_description(ctx, field, obj) case "args": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Field_args(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Field_args(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "type": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Field_type(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Field_type(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "isDeprecated": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Field_isDeprecated(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Field_isDeprecated(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "deprecationReason": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Field_deprecationReason(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Field_deprecationReason(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -44927,38 +55498,26 @@ func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.Selection case "__typename": out.Values[i] = graphql.MarshalString("__InputValue") case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___InputValue_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___InputValue_name(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "description": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___InputValue_description(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___InputValue_description(ctx, field, obj) case "type": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___InputValue_type(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___InputValue_type(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "defaultValue": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___InputValue_defaultValue(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___InputValue_defaultValue(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -44982,52 +55541,34 @@ func (ec *executionContext) ___Schema(ctx context.Context, sel ast.SelectionSet, case "__typename": out.Values[i] = graphql.MarshalString("__Schema") case "description": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Schema_description(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Schema_description(ctx, field, obj) case "types": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Schema_types(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Schema_types(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "queryType": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Schema_queryType(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Schema_queryType(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "mutationType": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Schema_mutationType(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Schema_mutationType(ctx, field, obj) case "subscriptionType": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Schema_subscriptionType(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Schema_subscriptionType(ctx, field, obj) case "directives": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Schema_directives(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Schema_directives(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ @@ -45054,77 +55595,47 @@ func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, o case "__typename": out.Values[i] = graphql.MarshalString("__Type") case "kind": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Type_kind(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Type_kind(ctx, field, obj) if out.Values[i] == graphql.Null { invalids++ } case "name": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Type_name(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Type_name(ctx, field, obj) case "description": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Type_description(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Type_description(ctx, field, obj) case "fields": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Type_fields(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Type_fields(ctx, field, obj) case "interfaces": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Type_interfaces(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Type_interfaces(ctx, field, obj) case "possibleTypes": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Type_possibleTypes(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Type_possibleTypes(ctx, field, obj) case "enumValues": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Type_enumValues(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Type_enumValues(ctx, field, obj) case "inputFields": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Type_inputFields(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Type_inputFields(ctx, field, obj) case "ofType": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Type_ofType(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Type_ofType(ctx, field, obj) case "specifiedByURL": - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - return ec.___Type_specifiedByURL(ctx, field, obj) - } - out.Values[i] = innerFunc(ctx) + out.Values[i] = ec.___Type_specifiedByURL(ctx, field, obj) default: panic("unknown field " + strconv.Quote(field.Name)) @@ -45232,7 +55743,7 @@ func (ec *executionContext) marshalNAsset2แš•แš–githubแš—comแš‹reearthแš‹reearth func (ec *executionContext) marshalNAsset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAsset(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Asset) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -45246,7 +55757,7 @@ func (ec *executionContext) marshalNAssetConnection2githubแš—comแš‹reearthแš‹ree func (ec *executionContext) marshalNAssetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AssetConnection) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -45300,7 +55811,7 @@ func (ec *executionContext) marshalNAssetEdge2แš•แš–githubแš—comแš‹reearthแš‹ree func (ec *executionContext) marshalNAssetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšAssetEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.AssetEdge) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -45326,7 +55837,7 @@ func (ec *executionContext) marshalNBoolean2bool(ctx context.Context, sel ast.Se res := graphql.MarshalBoolean(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } } return res @@ -45379,7 +55890,7 @@ func (ec *executionContext) marshalNCluster2แš•แš–githubแš—comแš‹reearthแš‹reear func (ec *executionContext) marshalNCluster2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšCluster(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Cluster) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -45430,7 +55941,7 @@ func (ec *executionContext) marshalNCursor2githubแš—comแš‹reearthแš‹reearthแš‘ba res := gqlmodel.MarshalCursor(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } } return res @@ -45521,7 +56032,7 @@ func (ec *executionContext) marshalNDataset2แš•แš–githubแš—comแš‹reearthแš‹reear func (ec *executionContext) marshalNDataset2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDataset(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Dataset) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -45535,7 +56046,7 @@ func (ec *executionContext) marshalNDatasetConnection2githubแš—comแš‹reearthแš‹r func (ec *executionContext) marshalNDatasetConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetConnection) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -45589,7 +56100,7 @@ func (ec *executionContext) marshalNDatasetEdge2แš•แš–githubแš—comแš‹reearthแš‹r func (ec *executionContext) marshalNDatasetEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetEdge) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -45643,7 +56154,7 @@ func (ec *executionContext) marshalNDatasetField2แš•แš–githubแš—comแš‹reearthแš‹ func (ec *executionContext) marshalNDatasetField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -45735,7 +56246,7 @@ func (ec *executionContext) marshalNDatasetSchema2แš•แš–githubแš—comแš‹reearth func (ec *executionContext) marshalNDatasetSchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchema(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchema) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -45749,7 +56260,7 @@ func (ec *executionContext) marshalNDatasetSchemaConnection2githubแš—comแš‹reear func (ec *executionContext) marshalNDatasetSchemaConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchemaConnection) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -45803,7 +56314,7 @@ func (ec *executionContext) marshalNDatasetSchemaEdge2แš•แš–githubแš—comแš‹reear func (ec *executionContext) marshalNDatasetSchemaEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchemaEdge) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -45857,7 +56368,7 @@ func (ec *executionContext) marshalNDatasetSchemaField2แš•แš–githubแš—comแš‹reea func (ec *executionContext) marshalNDatasetSchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšDatasetSchemaField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.DatasetSchemaField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -45873,7 +56384,7 @@ func (ec *executionContext) marshalNDateTime2timeแšTime(ctx context.Context, se res := graphql.MarshalTime(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } } return res @@ -45913,7 +56424,7 @@ func (ec *executionContext) marshalNFileSize2int64(ctx context.Context, sel ast. res := graphql.MarshalInt64(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } } return res @@ -45928,7 +56439,7 @@ func (ec *executionContext) marshalNFloat2float64(ctx context.Context, sel ast.S res := graphql.MarshalFloatContext(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } } return graphql.WrapContextMarshaler(ctx, res) @@ -45944,7 +56455,7 @@ func (ec *executionContext) marshalNID2githubแš—comแš‹reearthแš‹reearthแš‘backen res := graphql.MarshalString(string(v)) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } } return res @@ -46004,7 +56515,7 @@ func (ec *executionContext) marshalNInfobox2githubแš—comแš‹reearthแš‹reearthแš‘b func (ec *executionContext) marshalNInfobox2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfobox(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Infobox) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46058,7 +56569,7 @@ func (ec *executionContext) marshalNInfoboxField2แš•แš–githubแš—comแš‹reearthแš‹ func (ec *executionContext) marshalNInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšInfoboxField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.InfoboxField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46079,7 +56590,7 @@ func (ec *executionContext) marshalNInt2int(ctx context.Context, sel ast.Selecti res := graphql.MarshalInt(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } } return res @@ -46094,7 +56605,7 @@ func (ec *executionContext) marshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTa res := gqlmodel.MarshalLang(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } } return res @@ -46103,7 +56614,7 @@ func (ec *executionContext) marshalNLang2golangแš—orgแš‹xแš‹textแš‹languageแšTa func (ec *executionContext) marshalNLayer2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayer(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Layer) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46205,7 +56716,7 @@ func (ec *executionContext) marshalNLayerEncodingFormat2githubแš—comแš‹reearth func (ec *executionContext) marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerGroup) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46215,7 +56726,7 @@ func (ec *executionContext) marshalNLayerGroup2แš–githubแš—comแš‹reearthแš‹reear func (ec *executionContext) marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerItem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerItem) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46225,7 +56736,7 @@ func (ec *executionContext) marshalNLayerItem2แš–githubแš—comแš‹reearthแš‹reeart func (ec *executionContext) marshalNLayerTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTag(ctx context.Context, sel ast.SelectionSet, v gqlmodel.LayerTag) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46323,7 +56834,7 @@ func (ec *executionContext) marshalNLayerTagItem2แš•แš–githubแš—comแš‹reearthแš‹ func (ec *executionContext) marshalNLayerTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšLayerTagItem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.LayerTagItem) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46348,7 +56859,7 @@ func (ec *executionContext) marshalNListOperation2githubแš—comแš‹reearthแš‹reear func (ec *executionContext) marshalNMe2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMe(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Me) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46402,7 +56913,7 @@ func (ec *executionContext) marshalNMergedInfoboxField2แš•แš–githubแš—comแš‹reea func (ec *executionContext) marshalNMergedInfoboxField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedInfoboxField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedInfoboxField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46456,7 +56967,7 @@ func (ec *executionContext) marshalNMergedPropertyField2แš•แš–githubแš—comแš‹ree func (ec *executionContext) marshalNMergedPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedPropertyField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46510,7 +57021,7 @@ func (ec *executionContext) marshalNMergedPropertyGroup2แš•แš–githubแš—comแš‹ree func (ec *executionContext) marshalNMergedPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšMergedPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.MergedPropertyGroup) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46583,7 +57094,7 @@ func (ec *executionContext) marshalNNodeType2githubแš—comแš‹reearthแš‹reearthแš‘ func (ec *executionContext) marshalNPageInfo2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPageInfo(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PageInfo) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46637,7 +57148,7 @@ func (ec *executionContext) marshalNPlugin2แš•แš–githubแš—comแš‹reearthแš‹reeart func (ec *executionContext) marshalNPlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Plugin) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46691,7 +57202,7 @@ func (ec *executionContext) marshalNPluginExtension2แš•แš–githubแš—comแš‹reearth func (ec *executionContext) marshalNPluginExtension2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginExtension(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PluginExtension) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46755,7 +57266,7 @@ func (ec *executionContext) marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearth func (ec *executionContext) marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadata(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PluginMetadata) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46803,7 +57314,7 @@ func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reear func (ec *executionContext) marshalNProject2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Project) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46817,7 +57328,7 @@ func (ec *executionContext) marshalNProjectAliasAvailability2githubแš—comแš‹reea func (ec *executionContext) marshalNProjectAliasAvailability2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectAliasAvailability(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectAliasAvailability) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46831,7 +57342,7 @@ func (ec *executionContext) marshalNProjectConnection2githubแš—comแš‹reearthแš‹r func (ec *executionContext) marshalNProjectConnection2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectConnection(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectConnection) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46885,7 +57396,7 @@ func (ec *executionContext) marshalNProjectEdge2แš•แš–githubแš—comแš‹reearthแš‹r func (ec *executionContext) marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProjectEdge(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ProjectEdge) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46895,7 +57406,7 @@ func (ec *executionContext) marshalNProjectEdge2แš–githubแš—comแš‹reearthแš‹reea func (ec *executionContext) marshalNProperty2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProperty(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Property) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46949,7 +57460,7 @@ func (ec *executionContext) marshalNPropertyField2แš•แš–githubแš—comแš‹reearth func (ec *executionContext) marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -46959,7 +57470,7 @@ func (ec *executionContext) marshalNPropertyField2แš–githubแš—comแš‹reearthแš‹re func (ec *executionContext) marshalNPropertyFieldLink2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyFieldLink(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyFieldLink) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47013,7 +57524,7 @@ func (ec *executionContext) marshalNPropertyGroup2แš•แš–githubแš—comแš‹reearth func (ec *executionContext) marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyGroup) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47023,7 +57534,7 @@ func (ec *executionContext) marshalNPropertyGroup2แš–githubแš—comแš‹reearthแš‹re func (ec *executionContext) marshalNPropertyItem2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyItem(ctx context.Context, sel ast.SelectionSet, v gqlmodel.PropertyItem) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47077,7 +57588,7 @@ func (ec *executionContext) marshalNPropertyItem2แš•githubแš—comแš‹reearthแš‹ree func (ec *executionContext) marshalNPropertyLinkableFields2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertyLinkableFields(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertyLinkableFields) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47131,7 +57642,7 @@ func (ec *executionContext) marshalNPropertySchema2แš•แš–githubแš—comแš‹reearth func (ec *executionContext) marshalNPropertySchema2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchema(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchema) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47185,7 +57696,7 @@ func (ec *executionContext) marshalNPropertySchemaField2แš•แš–githubแš—comแš‹ree func (ec *executionContext) marshalNPropertySchemaField2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaField(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaField) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47195,7 +57706,7 @@ func (ec *executionContext) marshalNPropertySchemaField2แš–githubแš—comแš‹reeart func (ec *executionContext) marshalNPropertySchemaFieldChoice2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaFieldChoice(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaFieldChoice) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47249,7 +57760,7 @@ func (ec *executionContext) marshalNPropertySchemaGroup2แš•แš–githubแš—comแš‹ree func (ec *executionContext) marshalNPropertySchemaGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPropertySchemaGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PropertySchemaGroup) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47344,7 +57855,7 @@ func (ec *executionContext) marshalNRole2githubแš—comแš‹reearthแš‹reearthแš‘back func (ec *executionContext) marshalNScene2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScene(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Scene) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47398,7 +57909,7 @@ func (ec *executionContext) marshalNScenePlugin2แš•แš–githubแš—comแš‹reearthแš‹r func (ec *executionContext) marshalNScenePlugin2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšScenePlugin(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.ScenePlugin) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47452,7 +57963,7 @@ func (ec *executionContext) marshalNSceneWidget2แš•แš–githubแš—comแš‹reearthแš‹r func (ec *executionContext) marshalNSceneWidget2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšSceneWidget(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.SceneWidget) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47473,7 +57984,7 @@ func (ec *executionContext) marshalNString2string(ctx context.Context, sel ast.S res := graphql.MarshalString(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } } return res @@ -47519,7 +58030,7 @@ func (ec *executionContext) unmarshalNSyncDatasetInput2githubแš—comแš‹reearthแš‹ func (ec *executionContext) marshalNTag2githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTag(ctx context.Context, sel ast.SelectionSet, v gqlmodel.Tag) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47573,7 +58084,7 @@ func (ec *executionContext) marshalNTag2แš•githubแš—comแš‹reearthแš‹reearthแš‘ba func (ec *executionContext) marshalNTagGroup2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagGroup(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TagGroup) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47627,7 +58138,7 @@ func (ec *executionContext) marshalNTagItem2แš•แš–githubแš—comแš‹reearthแš‹reear func (ec *executionContext) marshalNTagItem2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTagItem(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TagItem) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47685,7 +58196,7 @@ func (ec *executionContext) marshalNTeam2แš•แš–githubแš—comแš‹reearthแš‹reearth func (ec *executionContext) marshalNTeam2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeam(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.Team) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47739,7 +58250,7 @@ func (ec *executionContext) marshalNTeamMember2แš•แš–githubแš—comแš‹reearthแš‹re func (ec *executionContext) marshalNTeamMember2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšTeamMember(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.TeamMember) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47862,7 +58373,7 @@ func (ec *executionContext) marshalNUpload2githubแš—comแš‹99designsแš‹gqlgenแš‹g res := graphql.MarshalUpload(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } } return res @@ -47881,7 +58392,7 @@ func (ec *executionContext) unmarshalNUploadPluginInput2githubแš—comแš‹reearth func (ec *executionContext) marshalNUser2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšUser(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.User) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -47931,7 +58442,7 @@ func (ec *executionContext) marshalNWidgetAreaType2githubแš—comแš‹reearthแš‹reea func (ec *executionContext) marshalNWidgetExtendable2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšWidgetExtendable(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.WidgetExtendable) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -48020,7 +58531,7 @@ func (ec *executionContext) marshalN__DirectiveLocation2string(ctx context.Conte res := graphql.MarshalString(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } } return res @@ -48194,7 +58705,7 @@ func (ec *executionContext) marshalN__Type2แš•githubแš—comแš‹99designsแš‹gqlgen func (ec *executionContext) marshalN__Type2แš–githubแš—comแš‹99designsแš‹gqlgenแš‹graphqlแš‹introspectionแšType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler { if v == nil { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } return graphql.Null } @@ -48210,7 +58721,7 @@ func (ec *executionContext) marshalN__TypeKind2string(ctx context.Context, sel a res := graphql.MarshalString(v) if res == graphql.Null { if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "must not be null") + ec.Errorf(ctx, "the requested element is null which the schema does not allow") } } return res diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index b4de11063..226d7495c 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -1633,6 +1633,7 @@ const ( PropertySchemaFieldUIVideo PropertySchemaFieldUI = "VIDEO" PropertySchemaFieldUIFile PropertySchemaFieldUI = "FILE" PropertySchemaFieldUICameraPose PropertySchemaFieldUI = "CAMERA_POSE" + PropertySchemaFieldUIDatetime PropertySchemaFieldUI = "DATETIME" ) var AllPropertySchemaFieldUI = []PropertySchemaFieldUI{ @@ -1646,11 +1647,12 @@ var AllPropertySchemaFieldUI = []PropertySchemaFieldUI{ PropertySchemaFieldUIVideo, PropertySchemaFieldUIFile, PropertySchemaFieldUICameraPose, + PropertySchemaFieldUIDatetime, } func (e PropertySchemaFieldUI) IsValid() bool { switch e { - case PropertySchemaFieldUILayer, PropertySchemaFieldUIMultiline, PropertySchemaFieldUISelection, PropertySchemaFieldUIColor, PropertySchemaFieldUIRange, PropertySchemaFieldUISlider, PropertySchemaFieldUIImage, PropertySchemaFieldUIVideo, PropertySchemaFieldUIFile, PropertySchemaFieldUICameraPose: + case PropertySchemaFieldUILayer, PropertySchemaFieldUIMultiline, PropertySchemaFieldUISelection, PropertySchemaFieldUIColor, PropertySchemaFieldUIRange, PropertySchemaFieldUISlider, PropertySchemaFieldUIImage, PropertySchemaFieldUIVideo, PropertySchemaFieldUIFile, PropertySchemaFieldUICameraPose, PropertySchemaFieldUIDatetime: return true } return false diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 6cc00d3ed..342da3a2c 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -292,7 +292,67 @@ extensions: type: bool title: Animation defaultValue: false - description: Enables anmation. If enabled, each 3D models can animate. + description: Enables animation play. If enabled, each 3D models can animate. + - id: visible + type: bool + title: Timeline + description: Whether the timeline UI is displayed or not + - id: current + type: string + title: Current time + ui: datetime + - id: start + type: string + title: Start time + description: If nothing is set, it will be set automatically according to the data being displayed. + ui: datetime + - id: stop + type: string + title: Stop time + description: If nothing is set, it will be set automatically according to the data being displayed. + ui: datetime + - id: stepType + type: string + title: Tick type + defaultValue: rate + description: How to specify the playback speed + choices: + - key: rate + label: Rate + - key: fixed + label: Fixed + - id: multiplier + type: number + title: Multiplier + description: Specifies the playback speed as a multiple of the real time speed. Negative values can also be specified. Default is 1x. + defaultValue: 1 + prefix: x + availableIf: + field: stepType + type: string + value: rate + - id: step + type: number + title: Step + description: Specifies the playback speed in seconds. Each time the screen is repeatedly drawn, it advances by the specified specified number of seconds. Negative values can also be specified. The default is 1 second. + defaultValue: 1 + suffix: s + availableIf: + field: stepType + type: string + value: fixed + - id: rangeType + type: string + title: Range + description: Specifies the playback speed in seconds. Negative values can also be specified. + defaultValue: unbounded + choices: + - key: unbounded + label: Unbounded + - key: clamped + label: Clamped + - key: bounced + label: Bounced - id: googleAnalytics title: Google Analytics description: Set your Google Analytics tracking ID and analyze how your published project is being viewed. diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 93346de67..6bec66bfa 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -121,7 +121,38 @@ extensions: fields: animation: title: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณ - description: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚3Dใƒขใƒ‡ใƒซใฎใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใŒๅ†็”Ÿใ•ใ‚Œใ‚‹ใ‚ˆใ†ใซใชใ‚Šใพใ™ใ€‚ + description: ใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใฎๅ†็”Ÿใ‚’ๆœ‰ๅŠนใซใ—ใพใ™ใ€‚3Dใƒขใƒ‡ใƒซใฎใ‚ขใƒ‹ใƒกใƒผใ‚ทใƒงใƒณใŒๅ†็”Ÿใ•ใ‚Œใ‚‹ใ‚ˆใ†ใซใชใ‚Šใพใ™ใ€‚ + visible: + title: ใ‚ฟใ‚คใƒ ใƒฉใ‚คใƒณ + description: ใ‚ฟใ‚คใƒ ใƒฉใ‚คใƒณใฎUIใ‚’่กจ็คบใ—ใพใ™ใ€‚ + current: + title: ็พๅœจๆ™‚ๅˆป + start: + title: ้–‹ๅง‹ๆ™‚ๅˆป + description: ไฝ•ใ‚‚่จญๅฎšใ•ใ‚Œใฆใ„ใชใ„ๅ ดๅˆใฏ่กจ็คบไธญใฎใƒ‡ใƒผใ‚ฟใซใ‚ˆใฃใฆ่‡ชๅ‹•็š„ใซ่จญๅฎšใ•ใ‚Œใพใ™ใ€‚ + stop: + title: ็ต‚ไบ†ๆ™‚ๅˆป + description: ไฝ•ใ‚‚่จญๅฎšใ•ใ‚Œใฆใ„ใชใ„ๅ ดๅˆใฏ่กจ็คบไธญใฎใƒ‡ใƒผใ‚ฟใซใ‚ˆใฃใฆ่‡ชๅ‹•็š„ใซ่จญๅฎšใ•ใ‚Œใพใ™ใ€‚ + stepType: + title: ๅ†็”Ÿ้€Ÿๅบฆใฎ็จฎ้กž + description: ๅ†็”Ÿ้€ŸๅบฆใฎๆŒ‡ๅฎšๆ–นๆณ•ใงใ™ใ€‚ + choices: + rate: ๅ€็Ž‡ + fixed: ๅ›บๅฎš + multiplier: + title: ๅ†็”Ÿ้€Ÿๅบฆ + description: ๅ†็”Ÿ้€Ÿๅบฆใ‚’็พๅฎŸใฎๆ™‚้–“ใฎ้€Ÿใ•ใซๅฏพใ™ใ‚‹ๅ€็Ž‡ใงๆŒ‡ๅฎšใ—ใพใ™ใ€‚ใƒžใ‚คใƒŠใ‚นๅ€คใ‚‚ๆŒ‡ๅฎšๅฏ่ƒฝใงใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ1ๅ€ใงใ™ใ€‚ + step: + title: ๅ†็”Ÿ้€Ÿๅบฆ + description: ๅ†็”Ÿ้€Ÿๅบฆใ‚’็ง’ใงๆŒ‡ๅฎšใ—ใพใ™ใ€‚็”ป้ขใฎๆ็”ปใŒ็นฐใ‚Š่ฟ”ใ•ใ‚Œใ‚‹ๅบฆใซๆŒ‡ๅฎšใ—ใŸๆŒ‡ๅฎšใ—ใŸ็ง’ๆ•ฐๅˆ†้€ฒใฟใพใ™ใ€‚ใƒžใ‚คใƒŠใ‚นๅ€คใ‚‚ๆŒ‡ๅฎšๅฏ่ƒฝใงใ™ใ€‚ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆใฏ1็ง’ใงใ™ใ€‚ + suffix: ็ง’ + rangeType: + title: ๅ†็”Ÿ็ต‚ไบ†ๆ™‚ + description: ็พๅœจๆ™‚ๅˆปใŒ็ต‚ไบ†ๆ™‚ๅˆปใซ้”ใ—ใŸๆ™‚ใฎๅ‡ฆ็†ๆ–นๆณ•ใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + choices: + unbounded: ๅ†็”Ÿใ‚’็ถšใ‘ใ‚‹ + clamped: ๅœๆญขใ™ใ‚‹ + bounced: ใƒใ‚ฆใƒณใ‚น googleAnalytics: title: Google Analytics description: Google Analyticsใ‚’ๆœ‰ๅŠนใซใ™ใ‚‹ใ“ใจใงใ€ๅ…ฌ้–‹ใƒšใƒผใ‚ธใŒใฉใฎใ‚ˆใ†ใซ้–ฒ่ฆงใ•ใ‚Œใฆใ„ใ‚‹ใ‹ใ‚’ๅˆ†ๆžใ™ใ‚‹ใ“ใจใŒๅฏ่ƒฝใงใ™ใ€‚ diff --git a/pkg/plugin/manifest/parser_translation_test.go b/pkg/plugin/manifest/parser_translation_test.go index 84ff8066a..db565f2e2 100644 --- a/pkg/plugin/manifest/parser_translation_test.go +++ b/pkg/plugin/manifest/parser_translation_test.go @@ -5,33 +5,36 @@ import ( "strings" "testing" + "github.com/samber/lo" "github.com/stretchr/testify/assert" ) //go:embed testdata/translation.yml var translatedManifest string var expected = TranslationRoot{ - Description: sr("test plugin desc"), + Description: lo.ToPtr("test plugin desc"), Extensions: map[string]TranslationExtension{ "test_ext": { - Name: sr("test ext name"), + Name: lo.ToPtr("test ext name"), PropertySchema: TranslationPropertySchema{ "test_ps": TranslationPropertySchemaGroup{ - Description: sr("test ps desc"), + Description: lo.ToPtr("test ps desc"), Fields: map[string]TranslationPropertySchemaField{ "test_field": { Choices: map[string]string{ "test_key": "test choice value"}, - Description: sr("test field desc"), - Title: sr("test field name"), + Description: lo.ToPtr("test field desc"), + Title: lo.ToPtr("test field name"), + Prefix: lo.ToPtr("P"), + Suffix: lo.ToPtr("S"), }, }, - Title: sr("test ps title"), + Title: lo.ToPtr("test ps title"), }, }, }, }, - Name: sr("test plugin name"), + Name: lo.ToPtr("test plugin name"), Schema: nil, } @@ -144,7 +147,3 @@ func TestMustParseTransSystemFromBytes(t *testing.T) { }) } } - -func sr(s string) *string { - return &s -} diff --git a/pkg/plugin/manifest/schema_translation.go b/pkg/plugin/manifest/schema_translation.go index acd606e2b..5ece56ac1 100644 --- a/pkg/plugin/manifest/schema_translation.go +++ b/pkg/plugin/manifest/schema_translation.go @@ -12,8 +12,10 @@ type TranslationPropertySchema map[string]TranslationPropertySchemaGroup type TranslationPropertySchemaField struct { Choices map[string]string `json:"choices,omitempty"` - Description *string `json:"description,omitempty"` Title *string `json:"title,omitempty"` + Description *string `json:"description,omitempty"` + Prefix *string `json:"prefix,omitempty"` + Suffix *string `json:"suffix,omitempty"` } type TranslationPropertySchemaGroup struct { @@ -43,6 +45,8 @@ type TranslatedPropertySchemaField struct { Choices map[string]i18n.String Description i18n.String Title i18n.String + Prefix i18n.String + Suffix i18n.String } type TranslatedPropertySchemaGroup struct { @@ -102,6 +106,7 @@ func (tm TranslationMap) TranslatedRef() *TranslatedRoot { t := tm.Translated() return &t } + func (t TranslationRoot) propertySchema(eid string) (res TranslationPropertySchema) { if eid == "" { return t.Schema @@ -239,6 +244,20 @@ func (t *TranslatedPropertySchema) setPropertySchema(schemas map[string]Translat tf.Description[l] = *f.Description } + if f.Prefix != nil { + if tf.Prefix == nil { + tf.Prefix = i18n.String{} + } + tf.Prefix[l] = *f.Prefix + } + + if f.Suffix != nil { + if tf.Suffix == nil { + tf.Suffix = i18n.String{} + } + tf.Suffix[l] = *f.Suffix + } + if len(f.Choices) > 0 { if tf.Choices == nil { tf.Choices = map[string]i18n.String{} diff --git a/pkg/plugin/manifest/schema_translation_test.go b/pkg/plugin/manifest/schema_translation_test.go index ba158d2c3..b1934cc3d 100644 --- a/pkg/plugin/manifest/schema_translation_test.go +++ b/pkg/plugin/manifest/schema_translation_test.go @@ -4,22 +4,28 @@ import ( "testing" "github.com/reearth/reearth-backend/pkg/i18n" + "github.com/samber/lo" "github.com/stretchr/testify/assert" ) func TestTranslationMap_Translated(t *testing.T) { m := TranslationMap{ "en": TranslationRoot{ - Name: sr("Name"), - Description: sr("desc"), + Name: lo.ToPtr("Name"), + Description: lo.ToPtr("desc"), Extensions: map[string]TranslationExtension{ "a": { - Name: sr("ext"), + Name: lo.ToPtr("ext"), PropertySchema: TranslationPropertySchema{ "default": { Fields: map[string]TranslationPropertySchemaField{ - "foo": {Title: sr("foo"), Choices: map[string]string{"A": "AAA", "B": "BBB"}}, - "hoge": {Title: sr("hoge")}, + "foo": { + Title: lo.ToPtr("foo"), + Choices: map[string]string{"A": "AAA", "B": "BBB"}, + Prefix: lo.ToPtr("P"), + Suffix: lo.ToPtr("S"), + }, + "hoge": {Title: lo.ToPtr("hoge")}, }, }, }, @@ -34,42 +40,43 @@ func TestTranslationMap_Translated(t *testing.T) { }, }, "ja": TranslationRoot{ - Name: sr("ๅๅ‰"), + Name: lo.ToPtr("ๅๅ‰"), Extensions: map[string]TranslationExtension{ "a": { - Name: sr("extJA"), - Description: sr("DESC!"), + Name: lo.ToPtr("extJA"), + Description: lo.ToPtr("DESC!"), PropertySchema: TranslationPropertySchema{ "default": { Fields: map[string]TranslationPropertySchemaField{ "foo": { - Title: sr("foo!"), - Description: sr("DESC"), + Title: lo.ToPtr("foo!"), + Description: lo.ToPtr("DESC"), Choices: map[string]string{"B": "BBB!", "C": "CCC!"}, + Prefix: lo.ToPtr("p"), }, - "bar": {Title: sr("bar!")}, + "bar": {Title: lo.ToPtr("bar!")}, }, }, }, }, "b": { - Name: sr("ext2"), + Name: lo.ToPtr("ext2"), PropertySchema: TranslationPropertySchema{}, }, }, Schema: TranslationPropertySchema{ "default": { Fields: map[string]TranslationPropertySchemaField{ - "a": {Title: sr("ใ‚")}, + "a": {Title: lo.ToPtr("ใ‚")}, }, }, }, }, "zh-CN": TranslationRoot{ - Name: sr("ๅ‘ฝๅ"), + Name: lo.ToPtr("ๅ‘ฝๅ"), Schema: TranslationPropertySchema{ "another": { - Description: sr("ๆ่ฟฐ"), + Description: lo.ToPtr("ๆ่ฟฐ"), }, }, }, @@ -93,6 +100,8 @@ func TestTranslationMap_Translated(t *testing.T) { "B": {"en": "BBB", "ja": "BBB!"}, "C": {"ja": "CCC!"}, }, + Prefix: i18n.String{"en": "P", "ja": "p"}, + Suffix: i18n.String{"en": "S"}, }, "hoge": { Title: i18n.String{"en": "hoge"}, @@ -177,12 +186,12 @@ func TestTranslatedPropertySchema_setPropertySchema(t *testing.T) { "en": { "a": { Fields: map[string]TranslationPropertySchemaField{ - "f": {Title: sr("F")}, + "f": {Title: lo.ToPtr("F")}, }, }, - "b": {Title: sr("B")}, + "b": {Title: lo.ToPtr("B")}, }, - "ja": {"a": {Title: sr("A")}}, + "ja": {"a": {Title: lo.ToPtr("A")}}, }) assert.Equal(t, expected, target) } diff --git a/pkg/plugin/manifest/testdata/translation.yml b/pkg/plugin/manifest/testdata/translation.yml index 553a4a067..d7bbfded9 100644 --- a/pkg/plugin/manifest/testdata/translation.yml +++ b/pkg/plugin/manifest/testdata/translation.yml @@ -19,6 +19,8 @@ "title": "test field name", "description": "test field desc", "choices": { "test_key": "test choice value" }, + "prefix": "P", + "suffix": "S", }, }, }, diff --git a/pkg/property/schema_field_ui.go b/pkg/property/schema_field_ui.go index 9deab26ad..1bdddcb40 100644 --- a/pkg/property/schema_field_ui.go +++ b/pkg/property/schema_field_ui.go @@ -13,6 +13,7 @@ const ( SchemaFieldUIFile SchemaFieldUI = "file" SchemaFieldUILayer SchemaFieldUI = "layer" SchemaFieldUICameraPose SchemaFieldUI = "camera_pose" + SchemaFieldUIDatetTime SchemaFieldUI = "datetime" // DON'T FORGET ADDING A NEW UI TO schemaFieldUIs ALSO! ) @@ -28,6 +29,7 @@ var ( SchemaFieldUIFile, SchemaFieldUILayer, SchemaFieldUICameraPose, + SchemaFieldUIDatetTime, // DON'T FORGET ADDING A NEW UI HERE ALSO! } ) diff --git a/schema.graphql b/schema.graphql index 5c43a54a2..9722657c5 100644 --- a/schema.graphql +++ b/schema.graphql @@ -476,6 +476,7 @@ enum PropertySchemaFieldUI { VIDEO FILE CAMERA_POSE + DATETIME } type PropertySchemaFieldChoice { diff --git a/schemas/plugin_manifest.json b/schemas/plugin_manifest.json index b681c9202..0d2b089fd 100644 --- a/schemas/plugin_manifest.json +++ b/schemas/plugin_manifest.json @@ -143,7 +143,8 @@ "image", "video", "file", - "camera_pose" + "camera_pose", + "datetime" ] }, "min": { @@ -439,4 +440,4 @@ } }, "$ref": "#/definitions/root" -} \ No newline at end of file +} diff --git a/schemas/plugin_manifest_translation.json b/schemas/plugin_manifest_translation.json index 30dad9d84..42e752458 100644 --- a/schemas/plugin_manifest_translation.json +++ b/schemas/plugin_manifest_translation.json @@ -20,6 +20,18 @@ "null" ] }, + "prefix": { + "type": [ + "string", + "null" + ] + }, + "suffix": { + "type": [ + "string", + "null" + ] + }, "choices": { "type": "object", "additionalProperties": false, From e444e4e4ca2d8230f170862c2b32a55db4a4371e Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 3 Jun 2022 18:47:48 +0900 Subject: [PATCH 221/253] fix: assets are not saved when files are uploaded (#155) --- internal/infrastructure/memory/container.go | 6 +- internal/infrastructure/memory/transaction.go | 11 +-- .../infrastructure/memory/transaction_test.go | 4 +- internal/usecase/interactor/asset.go | 14 +++- internal/usecase/interactor/asset_test.go | 69 +++++++++++++++++++ internal/usecase/interactor/layer_test.go | 2 +- .../usecase/interactor/plugin_upload_test.go | 6 +- internal/usecase/interactor/property_test.go | 6 +- internal/usecase/interactor/team_test.go | 2 +- internal/usecase/interactor/usecase_test.go | 2 +- .../usecase/interactor/user_signup_test.go | 4 +- pkg/util/now.go | 10 +++ 12 files changed, 112 insertions(+), 24 deletions(-) create mode 100644 internal/usecase/interactor/asset_test.go create mode 100644 pkg/util/now.go diff --git a/internal/infrastructure/memory/container.go b/internal/infrastructure/memory/container.go index 888479b8f..c8ec4fe41 100644 --- a/internal/infrastructure/memory/container.go +++ b/internal/infrastructure/memory/container.go @@ -4,10 +4,8 @@ import ( "github.com/reearth/reearth-backend/internal/usecase/repo" ) -func InitRepos(c *repo.Container) *repo.Container { - if c == nil { - c = &repo.Container{} - } +func New() *repo.Container { + c := &repo.Container{} c.Asset = NewAsset() c.Config = NewConfig() c.DatasetSchema = NewDatasetSchema() diff --git a/internal/infrastructure/memory/transaction.go b/internal/infrastructure/memory/transaction.go index c66250ccc..6c57908db 100644 --- a/internal/infrastructure/memory/transaction.go +++ b/internal/infrastructure/memory/transaction.go @@ -43,13 +43,16 @@ func (t *Transaction) Begin() (repo.Tx, error) { func (t *Tx) Commit() { t.committed = true - if t.t != nil { - t.t.committed++ - } } func (t *Tx) End(_ context.Context) error { - return t.enderror + if t.enderror != nil { + return t.enderror + } + if t.t != nil && t.committed { + t.t.committed++ + } + return nil } func (t *Tx) IsCommitted() bool { diff --git a/internal/infrastructure/memory/transaction_test.go b/internal/infrastructure/memory/transaction_test.go index b8df60e2a..47a3bebce 100644 --- a/internal/infrastructure/memory/transaction_test.go +++ b/internal/infrastructure/memory/transaction_test.go @@ -14,9 +14,9 @@ func TestTransaction_Committed(t *testing.T) { assert.NoError(t, err) assert.Equal(t, 0, tr.Committed()) tx.Commit() - assert.Equal(t, 1, tr.Committed()) + assert.Equal(t, 0, tr.Committed()) assert.NoError(t, tx.End(context.Background())) - assert.NoError(t, err) + assert.Equal(t, 1, tr.Committed()) } func TestTransaction_SetBeginError(t *testing.T) { diff --git a/internal/usecase/interactor/asset.go b/internal/usecase/interactor/asset.go index 5d90c94a9..288ca548a 100644 --- a/internal/usecase/interactor/asset.go +++ b/internal/usecase/interactor/asset.go @@ -47,11 +47,10 @@ func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, ope if inp.File == nil { return nil, interfaces.ErrFileNotIncluded } - return Run1( ctx, operator, i.repos, Usecase(). - WithReadableTeams(inp.TeamID). + WithWritableTeams(inp.TeamID). Transaction(), func() (*asset.Asset, error) { url, err := i.gateways.File.UploadAsset(ctx, inp.File) @@ -59,13 +58,22 @@ func (i *Asset) Create(ctx context.Context, inp interfaces.CreateAssetParam, ope return nil, err } - return asset.New(). + a, err := asset.New(). NewID(). Team(inp.TeamID). Name(path.Base(inp.File.Path)). Size(inp.File.Size). URL(url.String()). Build() + if err != nil { + return nil, err + } + + if err := i.repos.Asset.Save(ctx, a); err != nil { + return nil, err + } + + return a, nil }) } diff --git a/internal/usecase/interactor/asset_test.go b/internal/usecase/interactor/asset_test.go new file mode 100644 index 000000000..2570db134 --- /dev/null +++ b/internal/usecase/interactor/asset_test.go @@ -0,0 +1,69 @@ +package interactor + +import ( + "bytes" + "context" + "io" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/fs" + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/asset" + "github.com/reearth/reearth-backend/pkg/file" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestAsset_Create(t *testing.T) { + ctx := context.Background() + tid := asset.NewTeamID() + aid := asset.NewID() + newID := asset.NewID + asset.NewID = func() asset.ID { return aid } + t.Cleanup(func() { asset.NewID = newID }) + + mfs := afero.NewMemMapFs() + f, _ := fs.NewFile(mfs, "") + repos := memory.New() + transaction := memory.NewTransaction() + repos.Transaction = transaction + uc := &Asset{ + repos: repos, + gateways: &gateway.Container{ + File: f, + }, + } + buf := bytes.NewBufferString("Hello") + buflen := int64(buf.Len()) + res, err := uc.Create(ctx, interfaces.CreateAssetParam{ + TeamID: tid, + File: &file.File{ + Content: io.NopCloser(buf), + Path: "hoge.txt", + ContentType: "", + Size: buflen, + }, + }, &usecase.Operator{ + WritableTeams: id.TeamIDList{tid}, + }) + + want := asset.New(). + ID(aid). + Team(tid). + URL(res.URL()). + CreatedAt(aid.Timestamp()). + Name("hoge.txt"). + Size(buflen). + ContentType(""). + MustBuild() + + assert.NoError(t, err) + assert.Equal(t, want, res) + assert.Equal(t, 1, transaction.Committed()) + a, _ := repos.Asset.FindByID(ctx, aid) + assert.Equal(t, want, a) +} diff --git a/internal/usecase/interactor/layer_test.go b/internal/usecase/interactor/layer_test.go index 7076886e6..1dcef48cc 100644 --- a/internal/usecase/interactor/layer_test.go +++ b/internal/usecase/interactor/layer_test.go @@ -15,7 +15,7 @@ import ( func TestCreateInfobox(t *testing.T) { ctx := context.Background() - db := memory.InitRepos(nil) + db := memory.New() scene, _ := scene.New().NewID().Team(id.NewTeamID()).Project(id.NewProjectID()).RootLayer(id.NewLayerID()).Build() _ = db.Scene.Save(ctx, scene) il := NewLayer(db) diff --git a/internal/usecase/interactor/plugin_upload_test.go b/internal/usecase/interactor/plugin_upload_test.go index 5817ed732..1f07795e4 100644 --- a/internal/usecase/interactor/plugin_upload_test.go +++ b/internal/usecase/interactor/plugin_upload_test.go @@ -89,7 +89,7 @@ func TestPlugin_Upload_New(t *testing.T) { sid := id.NewSceneID() pid := mockPluginID.WithScene(sid.Ref()) - repos := memory.InitRepos(nil) + repos := memory.New() mfs := mockFS(nil) files, err := fs.NewFile(mfs, "") assert.NoError(t, err) @@ -148,7 +148,7 @@ func TestPlugin_Upload_SameVersion(t *testing.T) { eid2 := id.PluginExtensionID("widget2") wid1 := id.NewWidgetID() - repos := memory.InitRepos(nil) + repos := memory.New() mfs := mockFS(map[string]string{ "plugins/" + pid.String() + "/hogehoge": "foobar", }) @@ -263,7 +263,7 @@ func TestPlugin_Upload_DiffVersion(t *testing.T) { nlpsid2 := id.NewPropertySchemaID(pid, eid2.String()) wid := id.NewWidgetID() - repos := memory.InitRepos(nil) + repos := memory.New() mfs := mockFS(map[string]string{ "plugins/" + oldpid.String() + "/hogehoge": "foobar", }) diff --git a/internal/usecase/interactor/property_test.go b/internal/usecase/interactor/property_test.go index 0db04fdf9..4377b324a 100644 --- a/internal/usecase/interactor/property_test.go +++ b/internal/usecase/interactor/property_test.go @@ -15,7 +15,7 @@ import ( func TestProperty_AddItem(t *testing.T) { ctx := context.Background() - memory := memory.InitRepos(nil) + memory := memory.New() team := id.NewTeamID() scene := scene.New().NewID().Team(team).RootLayer(id.NewLayerID()).MustBuild() @@ -67,7 +67,7 @@ func TestProperty_AddItem(t *testing.T) { func TestProperty_RemoveItem(t *testing.T) { ctx := context.Background() - memory := memory.InitRepos(nil) + memory := memory.New() team := id.NewTeamID() scene := scene.New().NewID().Team(team).RootLayer(id.NewLayerID()).MustBuild() @@ -112,7 +112,7 @@ func TestProperty_RemoveItem(t *testing.T) { func TestProperty_UpdateValue_FieldOfGroupInList(t *testing.T) { ctx := context.Background() - memory := memory.InitRepos(nil) + memory := memory.New() team := id.NewTeamID() scene := scene.New().NewID().Team(team).RootLayer(id.NewLayerID()).MustBuild() diff --git a/internal/usecase/interactor/team_test.go b/internal/usecase/interactor/team_test.go index 739bbbe3c..7a046a236 100644 --- a/internal/usecase/interactor/team_test.go +++ b/internal/usecase/interactor/team_test.go @@ -14,7 +14,7 @@ import ( func TestCreateTeam(t *testing.T) { ctx := context.Background() - db := memory.InitRepos(nil) + db := memory.New() u := user.New().NewID().Email("aaa@bbb.com").Team(id.NewTeamID()).MustBuild() teamUC := NewTeam(db) diff --git a/internal/usecase/interactor/usecase_test.go b/internal/usecase/interactor/usecase_test.go index 114deae46..be271148e 100644 --- a/internal/usecase/interactor/usecase_test.go +++ b/internal/usecase/interactor/usecase_test.go @@ -198,5 +198,5 @@ func TestRun(t *testing.T) { }, ) assert.Same(t, err, goterr) - assert.Equal(t, 2, tr.Committed()) // committed but fails + assert.Equal(t, 1, tr.Committed()) // fails } diff --git a/internal/usecase/interactor/user_signup_test.go b/internal/usecase/interactor/user_signup_test.go index 23b7cc520..ca0c50406 100644 --- a/internal/usecase/interactor/user_signup_test.go +++ b/internal/usecase/interactor/user_signup_test.go @@ -263,7 +263,7 @@ func TestUser_Signup(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { // t.Parallel() cannot be used - r := memory.InitRepos(nil) + r := memory.New() if tt.createUserBefore != nil { assert.NoError(t, r.User.Save( context.Background(), @@ -516,7 +516,7 @@ func TestUser_SignupOIDC(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { // t.Parallel() cannot be used - r := memory.InitRepos(nil) + r := memory.New() if tt.createUserBefore != nil { assert.NoError(t, r.User.Save( context.Background(), diff --git a/pkg/util/now.go b/pkg/util/now.go new file mode 100644 index 000000000..b4904054b --- /dev/null +++ b/pkg/util/now.go @@ -0,0 +1,10 @@ +package util + +import "time" + +var Now = time.Now + +func MockNow(t time.Time) func() { + Now = func() time.Time { return t } + return func() { Now = time.Now } +} From ab6334a13bf07d41d10832b85eb18e60a439b82b Mon Sep 17 00:00:00 2001 From: keiya sasaki <34934510+keiya01@users.noreply.github.com> Date: Fri, 3 Jun 2022 13:12:07 +0000 Subject: [PATCH 222/253] feat: add totalCount field to DatasetSchema type of GraphQL schema (#154) * feat: impl totalCount resolver to count dataset * chore: run `go generate` * Update internal/infrastructure/memory/dataset.go Co-authored-by: rot1024 * Update internal/infrastructure/mongo/dataset.go Co-authored-by: rot1024 Co-authored-by: rot1024 --- gqlgen.yml | 4 + internal/adapter/gql/generated.go | 109 ++++++++++++++++++ internal/adapter/gql/gqlmodel/models_gen.go | 1 + internal/adapter/gql/loader_dataset.go | 14 +++ .../adapter/gql/resolver_dataset_schema.go | 4 + internal/infrastructure/memory/dataset.go | 15 +++ internal/infrastructure/mongo/dataset.go | 10 ++ internal/usecase/interactor/dataset.go | 4 + internal/usecase/interfaces/dataset.go | 1 + internal/usecase/repo/dataset.go | 1 + schema.graphql | 1 + 11 files changed, 164 insertions(+) diff --git a/gqlgen.yml b/gqlgen.yml index cece7ccde..a0cb9a345 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -27,3 +27,7 @@ models: model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.Lang ID: model: github.com/reearth/reearth-backend/internal/adapter/gql/gqlmodel.ID + DatasetSchema: + fields: + totalCount: + resolver: true diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index 8cb4415b2..c593ad301 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -246,6 +246,7 @@ type ComplexityRoot struct { Scene func(childComplexity int) int SceneID func(childComplexity int) int Source func(childComplexity int) int + TotalCount func(childComplexity int) int } DatasetSchemaConnection struct { @@ -1080,6 +1081,8 @@ type DatasetFieldResolver interface { ValueRef(ctx context.Context, obj *gqlmodel.DatasetField) (*gqlmodel.Dataset, error) } type DatasetSchemaResolver interface { + TotalCount(ctx context.Context, obj *gqlmodel.DatasetSchema) (int, error) + Datasets(ctx context.Context, obj *gqlmodel.DatasetSchema, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) (*gqlmodel.DatasetConnection, error) Scene(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.Scene, error) RepresentativeField(ctx context.Context, obj *gqlmodel.DatasetSchema) (*gqlmodel.DatasetSchemaField, error) @@ -1938,6 +1941,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.DatasetSchema.Source(childComplexity), true + case "DatasetSchema.totalCount": + if e.complexity.DatasetSchema.TotalCount == nil { + break + } + + return e.complexity.DatasetSchema.TotalCount(childComplexity), true + case "DatasetSchemaConnection.edges": if e.complexity.DatasetSchemaConnection.Edges == nil { break @@ -6906,6 +6916,7 @@ type DatasetSchema implements Node { name: String! sceneId: ID! fields: [DatasetSchemaField!]! + totalCount: Int! representativeFieldId: ID dynamic: Boolean datasets( @@ -9929,6 +9940,8 @@ func (ec *executionContext) fieldContext_AddDatasetSchemaPayload_datasetSchema(c return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -9992,6 +10005,8 @@ func (ec *executionContext) fieldContext_AddDynamicDatasetPayload_datasetSchema( return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -10110,6 +10125,8 @@ func (ec *executionContext) fieldContext_AddDynamicDatasetSchemaPayload_datasetS return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -12835,6 +12852,8 @@ func (ec *executionContext) fieldContext_Dataset_schema(ctx context.Context, fie return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -13461,6 +13480,8 @@ func (ec *executionContext) fieldContext_DatasetField_schema(ctx context.Context return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -13830,6 +13851,50 @@ func (ec *executionContext) fieldContext_DatasetSchema_fields(ctx context.Contex return fc, nil } +func (ec *executionContext) _DatasetSchema_totalCount(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_DatasetSchema_totalCount(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.DatasetSchema().TotalCount(rctx, obj) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(int) + fc.Result = res + return ec.marshalNInt2int(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_DatasetSchema_totalCount(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "DatasetSchema", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Int does not have child fields") + }, + } + return fc, nil +} + func (ec *executionContext) _DatasetSchema_representativeFieldId(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.DatasetSchema) (ret graphql.Marshaler) { fc, err := ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) if err != nil { @@ -14216,6 +14281,8 @@ func (ec *executionContext) fieldContext_DatasetSchemaConnection_nodes(ctx conte return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -14421,6 +14488,8 @@ func (ec *executionContext) fieldContext_DatasetSchemaEdge_node(ctx context.Cont return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -14745,6 +14814,8 @@ func (ec *executionContext) fieldContext_DatasetSchemaField_schema(ctx context.C return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -14808,6 +14879,8 @@ func (ec *executionContext) fieldContext_DatasetSchemaField_ref(ctx context.Cont return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -15110,6 +15183,8 @@ func (ec *executionContext) fieldContext_ImportDatasetPayload_datasetSchema(ctx return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -17938,6 +18013,8 @@ func (ec *executionContext) fieldContext_LayerGroup_linkedDatasetSchema(ctx cont return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -31542,6 +31619,8 @@ func (ec *executionContext) fieldContext_PropertyFieldLink_datasetSchema(ctx con return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -35374,6 +35453,8 @@ func (ec *executionContext) fieldContext_Query_dynamicDatasetSchemas(ctx context return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -37160,6 +37241,8 @@ func (ec *executionContext) fieldContext_Scene_dynamicDatasetSchemas(ctx context return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -38597,6 +38680,8 @@ func (ec *executionContext) fieldContext_SyncDatasetPayload_datasetSchema(ctx co return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -39382,6 +39467,8 @@ func (ec *executionContext) fieldContext_TagItem_linkedDatasetSchema(ctx context return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -40705,6 +40792,8 @@ func (ec *executionContext) fieldContext_UpdateDatasetSchemaPayload_datasetSchem return ec.fieldContext_DatasetSchema_sceneId(ctx, field) case "fields": return ec.fieldContext_DatasetSchema_fields(ctx, field) + case "totalCount": + return ec.fieldContext_DatasetSchema_totalCount(ctx, field) case "representativeFieldId": return ec.fieldContext_DatasetSchema_representativeFieldId(ctx, field) case "dynamic": @@ -48427,6 +48516,26 @@ func (ec *executionContext) _DatasetSchema(ctx context.Context, sel ast.Selectio if out.Values[i] == graphql.Null { atomic.AddUint32(&invalids, 1) } + case "totalCount": + field := field + + innerFunc := func(ctx context.Context) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._DatasetSchema_totalCount(ctx, field, obj) + if res == graphql.Null { + atomic.AddUint32(&invalids, 1) + } + return res + } + + out.Concurrently(i, func() graphql.Marshaler { + return innerFunc(ctx) + + }) case "representativeFieldId": out.Values[i] = ec._DatasetSchema_representativeFieldId(ctx, field, obj) diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index 226d7495c..56cedf1a2 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -316,6 +316,7 @@ type DatasetSchema struct { Name string `json:"name"` SceneID ID `json:"sceneId"` Fields []*DatasetSchemaField `json:"fields"` + TotalCount int `json:"totalCount"` RepresentativeFieldID *ID `json:"representativeFieldId"` Dynamic *bool `json:"dynamic"` Datasets *DatasetConnection `json:"datasets"` diff --git a/internal/adapter/gql/loader_dataset.go b/internal/adapter/gql/loader_dataset.go index ca476e7c7..d310aed8c 100644 --- a/internal/adapter/gql/loader_dataset.go +++ b/internal/adapter/gql/loader_dataset.go @@ -177,6 +177,20 @@ func (c *DatasetLoader) FindBySchema(ctx context.Context, dsid gqlmodel.ID, firs return conn, nil } +func (c *DatasetLoader) CountBySchema(ctx context.Context, dsid gqlmodel.ID) (int, error) { + id, err := gqlmodel.ToID[id.DatasetSchema](dsid) + if err != nil { + return 0, err + } + + cnt, err := c.usecase.CountBySchema(ctx, id) + if err != nil { + return 0, err + } + + return cnt, nil +} + // data loader type DatasetDataLoader interface { diff --git a/internal/adapter/gql/resolver_dataset_schema.go b/internal/adapter/gql/resolver_dataset_schema.go index b106e9bb9..dd6729a1c 100644 --- a/internal/adapter/gql/resolver_dataset_schema.go +++ b/internal/adapter/gql/resolver_dataset_schema.go @@ -38,6 +38,10 @@ func (r *datasetSchemaResolver) Datasets(ctx context.Context, obj *gqlmodel.Data return loaders(ctx).Dataset.FindBySchema(ctx, obj.ID, first, last, before, after) } +func (r *datasetSchemaResolver) TotalCount(ctx context.Context, obj *gqlmodel.DatasetSchema) (int, error) { + return loaders(ctx).Dataset.CountBySchema(ctx, obj.ID) +} + type datasetSchemaFieldResolver struct{ *Resolver } func (r *datasetSchemaFieldResolver) Schema(ctx context.Context, obj *gqlmodel.DatasetSchemaField) (*gqlmodel.DatasetSchema, error) { diff --git a/internal/infrastructure/memory/dataset.go b/internal/infrastructure/memory/dataset.go index 71634f88a..af3525b64 100644 --- a/internal/infrastructure/memory/dataset.go +++ b/internal/infrastructure/memory/dataset.go @@ -87,6 +87,21 @@ func (r *Dataset) FindBySchema(ctx context.Context, id id.DatasetSchemaID, p *us ), nil } +func (r *Dataset) CountBySchema(ctx context.Context, id id.DatasetSchemaID) (int, error) { + r.lock.Lock() + defer r.lock.Unlock() + + n := 0 + for _, dataset := range r.data { + if dataset.Schema() == id { + if r.f.CanRead(dataset.Scene()) { + n++ + } + } + } + return n, nil +} + func (r *Dataset) FindBySchemaAll(ctx context.Context, id id.DatasetSchemaID) (dataset.List, error) { r.lock.Lock() defer r.lock.Unlock() diff --git a/internal/infrastructure/mongo/dataset.go b/internal/infrastructure/mongo/dataset.go index eb53d48a0..1344fdcb7 100644 --- a/internal/infrastructure/mongo/dataset.go +++ b/internal/infrastructure/mongo/dataset.go @@ -69,6 +69,16 @@ func (r *datasetRepo) FindBySchema(ctx context.Context, schemaID id.DatasetSchem }, pagination) } +func (r *datasetRepo) CountBySchema(ctx context.Context, id id.DatasetSchemaID) (int, error) { + res, err := r.client.Count(ctx, r.readFilter(bson.M{ + "schema": id.String(), + })) + if err != nil { + return 0, err + } + return int(res), nil +} + func (r *datasetRepo) FindBySchemaAll(ctx context.Context, schemaID id.DatasetSchemaID) (dataset.List, error) { return r.find(ctx, nil, bson.M{ "schema": schemaID.String(), diff --git a/internal/usecase/interactor/dataset.go b/internal/usecase/interactor/dataset.go index 9ec5d54cd..46f4a5521 100644 --- a/internal/usecase/interactor/dataset.go +++ b/internal/usecase/interactor/dataset.go @@ -405,6 +405,10 @@ func (i *Dataset) FindBySchema(ctx context.Context, ds id.DatasetSchemaID, p *us return i.datasetRepo.FindBySchema(ctx, ds, p) } +func (i *Dataset) CountBySchema(ctx context.Context, id id.DatasetSchemaID) (int, error) { + return i.datasetRepo.CountBySchema(ctx, id) +} + func (i *Dataset) FindSchemaByScene(ctx context.Context, sid id.SceneID, p *usecase.Pagination, operator *usecase.Operator) (dataset.SchemaList, *usecase.PageInfo, error) { if err := i.CanReadScene(sid, operator); err != nil { return nil, nil, err diff --git a/internal/usecase/interfaces/dataset.go b/internal/usecase/interfaces/dataset.go index 4328d2f7a..2e39fde06 100644 --- a/internal/usecase/interfaces/dataset.go +++ b/internal/usecase/interfaces/dataset.go @@ -69,6 +69,7 @@ type Dataset interface { AddDynamicDatasetSchema(context.Context, AddDynamicDatasetSchemaParam) (*dataset.Schema, error) AddDynamicDataset(context.Context, AddDynamicDatasetParam) (*dataset.Schema, *dataset.Dataset, error) FindBySchema(context.Context, id.DatasetSchemaID, *usecase.Pagination, *usecase.Operator) (dataset.List, *usecase.PageInfo, error) + CountBySchema(context.Context, id.DatasetSchemaID) (int, error) FindSchemaByScene(context.Context, id.SceneID, *usecase.Pagination, *usecase.Operator) (dataset.SchemaList, *usecase.PageInfo, error) FindDynamicSchemaByScene(context.Context, id.SceneID) (dataset.SchemaList, error) RemoveDatasetSchema(context.Context, RemoveDatasetSchemaParam, *usecase.Operator) (id.DatasetSchemaID, error) diff --git a/internal/usecase/repo/dataset.go b/internal/usecase/repo/dataset.go index 572234f75..afcc41f0b 100644 --- a/internal/usecase/repo/dataset.go +++ b/internal/usecase/repo/dataset.go @@ -13,6 +13,7 @@ type Dataset interface { FindByID(context.Context, id.DatasetID) (*dataset.Dataset, error) FindByIDs(context.Context, id.DatasetIDList) (dataset.List, error) FindBySchema(context.Context, id.DatasetSchemaID, *usecase.Pagination) (dataset.List, *usecase.PageInfo, error) + CountBySchema(context.Context, id.DatasetSchemaID) (int, error) FindBySchemaAll(context.Context, id.DatasetSchemaID) (dataset.List, error) FindGraph(context.Context, id.DatasetID, id.DatasetFieldIDList) (dataset.List, error) Save(context.Context, *dataset.Dataset) error diff --git a/schema.graphql b/schema.graphql index 9722657c5..243951e3d 100644 --- a/schema.graphql +++ b/schema.graphql @@ -598,6 +598,7 @@ type DatasetSchema implements Node { name: String! sceneId: ID! fields: [DatasetSchemaField!]! + totalCount: Int! representativeFieldId: ID dynamic: Boolean datasets( From 04e8e437e3d9ca38d981e29bd3472c5fbeda31e5 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Mon, 13 Jun 2022 19:16:59 +0900 Subject: [PATCH 223/253] ci: set up release workflow (#156) --- .github/changelog.json | 18 ++++++++++ .github/cliff.toml | 58 -------------------------------- .github/semantic.yml | 1 - .github/workflows/build.yml | 25 +++++++------- .github/workflows/ci.yml | 8 +++-- .github/workflows/pr_title.yml | 35 ++++++++++++++++++++ .github/workflows/release.yml | 60 ++++++++++------------------------ .github/workflows/stage.yml | 24 ++++++++++++++ 8 files changed, 114 insertions(+), 115 deletions(-) create mode 100644 .github/changelog.json delete mode 100644 .github/cliff.toml delete mode 100644 .github/semantic.yml create mode 100644 .github/workflows/pr_title.yml create mode 100644 .github/workflows/stage.yml diff --git a/.github/changelog.json b/.github/changelog.json new file mode 100644 index 000000000..f43699ebf --- /dev/null +++ b/.github/changelog.json @@ -0,0 +1,18 @@ +{ + "prefix": { + "feat": "๐Ÿš€ Features", + "fix": "๐Ÿ”ง Bug Fixes", + "docs": "๐Ÿ“– Documentation", + "doc": "๐Ÿ“– Documentation", + "perf": "โšก๏ธ Performance", + "refactor": "โœจ Refactor", + "style": "๐ŸŽจ Styling", + "test": "๐Ÿงช Testing", + "chore": "Miscellaneous Tasks", + "build": "Miscellaneous Tasks", + "deps": "Miscellaneous Tasks", + "ci": false, + "revert": false + }, + "titleVersionPrefix": "remove" +} diff --git a/.github/cliff.toml b/.github/cliff.toml deleted file mode 100644 index fe239aa14..000000000 --- a/.github/cliff.toml +++ /dev/null @@ -1,58 +0,0 @@ -# configuration file for git-cliff (0.1.0) - -[changelog] -# changelog header -header = """ -# Changelog -All notable changes to this project will be documented in this file.\n -""" -# template for the changelog body -# https://tera.netlify.app/docs/#introduction -body = """ -{% if version %}\ - ## {{ version | replace(from="v", to="") }} - {{ timestamp | date(format="%Y-%m-%d") }} -{% else %}\ - ## Unreleased -{% endif %}\ -{% for group, commits in commits | group_by(attribute="group") %} - ### {{ group }} - {% for commit in commits %} - - {{ commit.message | upper_first }} `{{ commit.id | split(pat="") | slice(end=7) | join(sep="") }}`\ - {% endfor %} -{% endfor %}\n -""" -# remove the leading and trailing whitespaces from the template -trim = true -# changelog footer -footer = """ - -""" - -[git] -# allow only conventional commits -# https://www.conventionalcommits.org -conventional_commits = true -# regex for parsing and grouping commits -commit_parsers = [ - { message = "^feat", group = "๐Ÿš€ Features"}, - { message = "^fix", group = "๐Ÿ”ง Bug Fixes"}, - { message = "^docs", group = "๐Ÿ“– Documentation"}, - { message = "^doc", group = "๐Ÿ“– Documentation"}, - { message = "^perf", group = "โšก๏ธ Performance"}, - { message = "^refactor", group = "โœจ Refactor"}, - { message = "^style", group = "๐ŸŽจ Styling"}, - { message = "^test", group = "๐Ÿงช Testing"}, - { body = ".*security", group = "๐Ÿ”’ Security"}, - { message = "^chore", group = "Miscellaneous Tasks"}, - { message = "^build", group = "Miscellaneous Tasks"}, - { message = "^deps", group = "Miscellaneous Tasks"}, - { message = "^ci", skip = true}, - { message = "^revert", skip = true}, - { message = "^v[0-9]+", skip = true}, -] -# filter out the commits that are not matched by commit parsers -filter_commits = false -# glob pattern for matching git tags -tag_pattern = "v[0-9]*" -# regex for skipping tags -skip_tags = "v0.1.0-beta.1" diff --git a/.github/semantic.yml b/.github/semantic.yml deleted file mode 100644 index fd160e519..000000000 --- a/.github/semantic.yml +++ /dev/null @@ -1 +0,0 @@ -titleOnly: true diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6a95b9b1b..3d5b03313 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -3,7 +3,7 @@ on: workflow_run: workflows: [ci] type: [completed] - branches: [main] + branches: [main, release] jobs: info: name: Collect information @@ -13,6 +13,7 @@ jobs: sha_short: ${{ steps.info.outputs.sha_short }} new_tag: ${{ steps.info.outputs.new_tag }} new_tag_short: ${{ steps.info.outputs.new_tag_short }} + branch: ${{ steps.info.outputs.branch }} steps: - name: checkout uses: actions/checkout@v3 @@ -28,12 +29,14 @@ jobs: echo "::set-output name=new_tag::$TAG" echo "::set-output name=new_tag_short::${TAG#v}" fi + echo "::set-output name=branch::`git branch --show-current`" - name: Show info env: SHA_SHORT: ${{ steps.info.outputs.sha_short }} NEW_TAG: ${{ steps.info.outputs.new_tag }} NEW_TAG_SHORT: ${{ steps.info.outputs.new_tag_short }} - run: echo "sha_short=$SHA_SHORT, new_tag=$NEW_TAG, new_tag_short=$NEW_TAG_SHORT" + BRANCH: ${{ steps.info.outputs.branch }} + run: echo "sha_short=$SHA_SHORT, new_tag=$NEW_TAG, new_tag_short=$NEW_TAG_SHORT, branch=$BRANCH" build: name: Build and release runs-on: ubuntu-latest @@ -51,17 +54,17 @@ jobs: with: go-version: 1.18 - name: Run GoReleaser for nightly - if: "!needs.info.outputs.new_tag" + if: ${{ needs.info.outputs.branch == 'main' && !needs.info.outputs.new_tag }} uses: goreleaser/goreleaser-action@v2 with: args: release --rm-dist --snapshot env: GORELEASER_CURRENT_TAG: 0.0.0 - name: Rename artifacts - if: "!needs.info.outputs.new_tag" + if: ${{ needs.info.outputs.branch == 'main' && !needs.info.outputs.new_tag }} run: for f in dist/${NAME}_*.*; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_nightly/'); done - name: Create GitHub release for nightly - if: "!needs.info.outputs.new_tag" + if: ${{ needs.info.outputs.branch == 'main' && !needs.info.outputs.new_tag" }} uses: ncipollo/release-action@v1 with: artifacts: dist/${{ env.NAME }}_*.* @@ -72,20 +75,20 @@ jobs: prerelease: true allowUpdates: true - name: Run GoReleaser - if: needs.info.outputs.new_tag + if: ${{ needs.info.outputs.branch == 'release' && needs.info.outputs.new_tag }} uses: goreleaser/goreleaser-action@v2 with: args: release --rm-dist env: GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.new_tag }} - name: Download latest changelog - if: needs.info.outputs.new_tag + if: ${{ needs.info.outputs.branch == 'release' && needs.info.outputs.new_tag }} uses: dawidd6/action-download-artifact@v2 with: workflow: release.yml name: changelog-${{ needs.info.outputs.new_tag }} - name: Create GitHub release - if: needs.info.outputs.new_tag + if: ${{ needs.info.outputs.branch == 'release' && needs.info.outputs.new_tag }} uses: ncipollo/release-action@v1 with: artifacts: dist/${{ env.NAME }}_*.* @@ -113,7 +116,7 @@ jobs: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push nightly - if: "!needs.info.outputs.new_tag" + if: ${{ needs.info.outputs.branch == 'main' && !needs.info.outputs.new_tag }} id: docker_build uses: docker/build-push-action@v2 with: @@ -126,7 +129,7 @@ jobs: cache-to: type=gha,mode=max - name: Get Docker tags id: tags - if: needs.info.outputs.new_tag + if: ${{ needs.info.outputs.branch == 'release' && needs.info.outputs.new_tag }} env: TAG: ${{ needs.info.outputs.new_tag_short }} run: | @@ -138,7 +141,7 @@ jobs: fi echo "::set-output name=tags::$TAGS" - name: Build and push release - if: needs.info.outputs.new_tag + if: ${{ needs.info.outputs.branch == 'release' && needs.info.outputs.new_tag }} uses: docker/build-push-action@v2 with: context: . diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index eb440a0dd..96d8d6fcf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,9 +1,12 @@ name: CI on: push: - branches: [main] + branches: [main, release] tags-ignore: ["*"] pull_request: +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: ci: name: CI @@ -18,7 +21,6 @@ jobs: uses: actions/setup-go@v3 with: go-version: 1.18 - id: go - name: checkout uses: actions/checkout@v3 - name: cache @@ -31,7 +33,7 @@ jobs: - name: golangci-lint uses: golangci/golangci-lint-action@v3 with: - version: v1.45 + version: v1.45 # v1.46 reports an error args: --timeout=10m - name: test run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic -timeout 10m diff --git a/.github/workflows/pr_title.yml b/.github/workflows/pr_title.yml new file mode 100644 index 000000000..db688eb90 --- /dev/null +++ b/.github/workflows/pr_title.yml @@ -0,0 +1,35 @@ +name: PR Title Checker +on: + pull_request: + types: + - opened + - edited + - synchronize + - labeled + - unlabeled +jobs: + pr_title: + runs-on: ubuntu-latest + steps: + - uses: actions/github-script@v6 + env: + PR_TITLE_PREFIX: feat fix docs style refactor pref test build ci chore revert + # PR_TITLE_GROUP: + PR_TITLE_IGNORE_LABEL: meta + with: + script: | + const prefixes = process.env.PR_TITLE_PREFIX?.split(" "); + if (!prefixes?.length) return; + + const ignoreLabels = process.env.PR_TITLE_IGNORE_LABEL?.split(" "); + + const title = context.payload.pull_request.title; + const labels = context.payload.pull_request.labels; + if (ignoreLabels?.length && labels?.some(l => !ignoreLabels?.includes(l.name))) return; + + const groups = process.env.PR_TITLE_GROUP?.split(" "); + const reg = new RegExp(`^(?:${prefixes.join("|")})${groups?.length ? `(?:\\((?:${groups.join("|")})\\))?` : ""}: `, "i"); + if (!reg.test(title)) { + console.error("The pull request title does not follow the conventions."); + process.exit(1); + } diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a3edea4f0..07c6a56db 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,14 +2,20 @@ name: Release on: workflow_dispatch: inputs: - custom_tag: + version: required: false - description: Specify version only when you want to increment the minor and major version (e.g. 1.1.0) + description: 'Next version (NOTE: Switch the branch to "release"!)' + type: choice + default: minor + options: + - patch + - minor + - major jobs: release: name: Release runs-on: ubuntu-latest - if: github.event.repository.full_name == 'reearth/reearth-backend' + if: github.ref == 'refs/heads/release' steps: - name: Set up git config run: | @@ -20,51 +26,21 @@ jobs: with: fetch-depth: 0 token: ${{ secrets.GPT }} - - name: Bump tag version - id: tag - uses: mathieudutour/github-tag-action@v6.0 + - id: changelog + name: Generate CHANGELOG + uses: reearth/changelog-action@main with: - github_token: ${{ secrets.GITHUB_TOKEN }} - custom_tag: ${{ github.event.inputs.custom_tag }} - dry_run: true - - name: Prepare git-cliff - run: touch CHANGELOG.md - - name: Generate changelog - uses: orhun/git-cliff-action@v1 - env: - OUTPUT: CHANGELOG.md - with: - config: .github/cliff.toml - args: --verbose --tag ${{ steps.tag.outputs.new_tag }} - - name: Format changelogs - env: - URL: ${{ github.event.repository.html_url }} - run: | - URL=${URL//\//\\\/} - sed -i -E 's///g; s/\(#([0-9]+)\)/([#\1]('"$URL"'\/pull\/\1))/g; s/`([a-zA-Z0-9]+)`/[`\1`]('"$URL"'\/commit\/\1)/g' CHANGELOG.md - - name: Generate CHANGELOG_latest.md - uses: actions/github-script@v6 - with: - script: | - const fs = require("fs"); - const changelog = fs.readFileSync("CHANGELOG.md", "utf8"); - const lines = changelog.split("\n"); - const h = lines - .map((l, i) => [l, i]) - .filter(l => l[0].startsWith("## ")) - .map(l => l[1]) - .slice(0, 2); - if (!h.length) throw new Error("failed to get the changelog of the latest version"); - const m = lines.slice(h[0] + 1, h[1]).join("\n").trim(); - fs.writeFileSync("CHANGELOG_latest.md", m); - - name: Upload latest changelog + version: ${{ github.event.inputs.version }} + repo: ${{ github.repository }} + latest: true + - name: Upload latest CHANGELOG uses: actions/upload-artifact@v3 with: - name: changelog-${{ steps.tag.outputs.new_tag }} + name: changelog-${{ steps.changelog.outputs.version }} path: CHANGELOG_latest.md - name: Commit & push env: - TAG: ${{ steps.tag.outputs.new_tag }} + TAG: ${{ steps.changelog.outputs.version }} run: | rm CHANGELOG_latest.md git add CHANGELOG.md diff --git a/.github/workflows/stage.yml b/.github/workflows/stage.yml new file mode 100644 index 000000000..55f8e616d --- /dev/null +++ b/.github/workflows/stage.yml @@ -0,0 +1,24 @@ +name: Stage +on: + workflow_dispatch: +jobs: + stage: + runs-on: ubuntu-latest + if: github.ref === "refs/heads/main" + steps: + - name: git config + env: + GPT_USER: ${{ secrets.GPT_USER }} + run: | + git config --global user.name $GPT_USER + git config --global pull.rebase false + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + token: ${{ secrets.GPT }} + - name: Checkout release branch + run: git checkout release + - name: Merge main branch to release branch + run: git merge -X theirs main + - name: Git push + run: git push origin release From 170783081ce6c5f5ce131400ccc8adaecab39ed6 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 14 Jun 2022 16:55:50 +0900 Subject: [PATCH 224/253] ci: create rc release for release branch (#157) --- .github/workflows/build.yml | 128 +++++++++++++++--------------- .github/workflows/deploy_test.yml | 3 + .github/workflows/pr_title.yml | 32 +++----- .github/workflows/release.yml | 3 + .vscode/settings.json | 3 +- 5 files changed, 85 insertions(+), 84 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 3d5b03313..8b9f099b2 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -4,6 +4,9 @@ on: workflows: [ci] type: [completed] branches: [main, release] +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: info: name: Collect information @@ -11,9 +14,10 @@ jobs: if: github.event.workflow_run.conclusion == 'success' && github.event.repository.full_name == 'reearth/reearth-backend' outputs: sha_short: ${{ steps.info.outputs.sha_short }} - new_tag: ${{ steps.info.outputs.new_tag }} - new_tag_short: ${{ steps.info.outputs.new_tag_short }} + tag: ${{ steps.info.outputs.tag }} + tag_short: ${{ steps.info.outputs.tag_short }} branch: ${{ steps.info.outputs.branch }} + name: ${{ steps.info.outputs.name }} steps: - name: checkout uses: actions/checkout@v3 @@ -26,24 +30,32 @@ jobs: echo "::set-output name=sha_short::$(git rev-parse --short HEAD)" TAG=$(git tag --points-at HEAD) if [[ ! -z "$TAG" ]]; then - echo "::set-output name=new_tag::$TAG" - echo "::set-output name=new_tag_short::${TAG#v}" + echo "::set-output name=tag::$TAG" + echo "::set-output name=tag_short::${TAG#v}" + fi + BRANCH=$(git branch --show-current) + echo "::set-output name=branch::$BRANCH" + if [[ "$BRANCH" = "release" ]]; then + echo "::set-output name=name::rc" + else + echo "::set-output name=name::nightly" fi - echo "::set-output name=branch::`git branch --show-current`" - name: Show info env: SHA_SHORT: ${{ steps.info.outputs.sha_short }} - NEW_TAG: ${{ steps.info.outputs.new_tag }} - NEW_TAG_SHORT: ${{ steps.info.outputs.new_tag_short }} + TAG: ${{ steps.info.outputs.tag }} + TAG_SHORT: ${{ steps.info.outputs.tag_short }} BRANCH: ${{ steps.info.outputs.branch }} - run: echo "sha_short=$SHA_SHORT, new_tag=$NEW_TAG, new_tag_short=$NEW_TAG_SHORT, branch=$BRANCH" + NAME: ${{ steps.info.outputs.name }} + run: echo "sha_short=$SHA_SHORT, tag=$TAG, tag_short=$TAG_SHORT, branch=$BRANCH, name=$NAME" build: name: Build and release runs-on: ubuntu-latest needs: - info + if: needs.info.outputs.branch == 'main' || needs.info.outputs.branch == 'release' env: - NAME: reearth-backend + ARTIFACTS: dist/reearth-backend_*.* steps: - name: Checkout uses: actions/checkout@v3 @@ -53,101 +65,93 @@ jobs: uses: actions/setup-go@v3 with: go-version: 1.18 - - name: Run GoReleaser for nightly - if: ${{ needs.info.outputs.branch == 'main' && !needs.info.outputs.new_tag }} + - name: Run GoReleaser for nightly/rc uses: goreleaser/goreleaser-action@v2 with: args: release --rm-dist --snapshot env: - GORELEASER_CURRENT_TAG: 0.0.0 + GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.tag || needs.info.outputs.sha_short }} - name: Rename artifacts - if: ${{ needs.info.outputs.branch == 'main' && !needs.info.outputs.new_tag }} - run: for f in dist/${NAME}_*.*; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_nightly/'); done - - name: Create GitHub release for nightly - if: ${{ needs.info.outputs.branch == 'main' && !needs.info.outputs.new_tag" }} + if: "!needs.info.outputs.tag" + run: for f in $ARTIFACTS; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_${{ needs.info.outputs.name }}/'); done + - name: Release nightly/rc + if: "!needs.info.outputs.tag" uses: ncipollo/release-action@v1 with: - artifacts: dist/${{ env.NAME }}_*.* + allowUpdates: true + artifacts: ${{ env.ARTIFACTS }} commit: ${{ github.sha }} - name: Nightly - tag: nightly + name: ${{ needs.info.outputs.name }} + tag: ${{ needs.info.outputs.name }} body: ${{ github.sha }} prerelease: true - allowUpdates: true - - name: Run GoReleaser - if: ${{ needs.info.outputs.branch == 'release' && needs.info.outputs.new_tag }} - uses: goreleaser/goreleaser-action@v2 - with: - args: release --rm-dist - env: - GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.new_tag }} - name: Download latest changelog - if: ${{ needs.info.outputs.branch == 'release' && needs.info.outputs.new_tag }} + if: needs.info.outputs.branch == 'release' && needs.info.outputs.tag uses: dawidd6/action-download-artifact@v2 with: workflow: release.yml - name: changelog-${{ needs.info.outputs.new_tag }} + name: changelog-${{ needs.info.outputs.tag }} - name: Create GitHub release - if: ${{ needs.info.outputs.branch == 'release' && needs.info.outputs.new_tag }} + if: needs.info.outputs.branch == 'release' && needs.info.outputs.tag uses: ncipollo/release-action@v1 with: - artifacts: dist/${{ env.NAME }}_*.* + artifacts: ${{ env.ARTIFACTS }} commit: ${{ github.sha }} - name: ${{ needs.info.outputs.new_tag }} - tag: ${{ needs.info.outputs.new_tag }} + name: ${{ needs.info.outputs.tag }} + tag: ${{ needs.info.outputs.tag }} bodyFile: CHANGELOG_latest.md docker: name: Build and push Docker image runs-on: ubuntu-latest needs: - info + if: needs.info.outputs.branch == 'main' || needs.info.outputs.branch == 'release' env: IMAGE_NAME: reearth/reearth-backend steps: - name: Checkout uses: actions/checkout@v3 - name: Set up QEMU - uses: docker/setup-qemu-action@v1 + uses: docker/setup-qemu-action@v2 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 + uses: docker/setup-buildx-action@v2 - name: Login to DockerHub - uses: docker/login-action@v1 + uses: docker/login-action@v2 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Build and push nightly - if: ${{ needs.info.outputs.branch == 'main' && !needs.info.outputs.new_tag }} - id: docker_build - uses: docker/build-push-action@v2 - with: - context: . - platforms: linux/amd64,linux/arm64 - push: true - build-args: VERSION=0.0.0-SNAPSHOT-${{ needs.info.outputs.sha_short }} - tags: ${{ env.IMAGE_NAME }}:nightly - cache-from: type=gha - cache-to: type=gha,mode=max - name: Get Docker tags id: tags - if: ${{ needs.info.outputs.branch == 'release' && needs.info.outputs.new_tag }} + if: needs.info.outputs.branch == 'release' && needs.info.outputs.tag env: - TAG: ${{ needs.info.outputs.new_tag_short }} + TAG: ${{ needs.info.outputs.tag_short }} + NAME: ${{ needs.info.outputs.name }} + SHA: ${{ needs.info.outputs.sha_short }} run: | - TAGS=$IMAGE_NAME:$TAG - if [[ ! $TAG =~ '-' ]]; then - TAGS+=,${IMAGE_NAME}:${TAG%.*} - TAGS+=,${IMAGE_NAME}:${TAG%%.*} - TAGS+=,${IMAGE_NAME}:latest + if [[ -z $TAG ]]; then + PLATFORMS=linux/amd64,linux/arm64 + VERSION=$TAG + TAGS=$IMAGE_NAME:$TAG + if [[ ! $TAG =~ '-' ]]; then + TAGS+=,${IMAGE_NAME}:${TAG%.*} + TAGS+=,${IMAGE_NAME}:${TAG%%.*} + TAGS+=,${IMAGE_NAME}:latest + fi + else + PLATFORMS=linux/amd64 + VERSION=$SHA + TAGS=$IMAGE_NAME:$NAME fi + echo "::set-output name=platforms::$PLATFORMS" + echo "::set-output name=version::$VERSION" echo "::set-output name=tags::$TAGS" - - name: Build and push release - if: ${{ needs.info.outputs.branch == 'release' && needs.info.outputs.new_tag }} - uses: docker/build-push-action@v2 + - name: Build and push docker image + uses: docker/build-push-action@v3 with: context: . - platforms: linux/amd64,linux/arm64 + platforms: ${{ steps.tags.outputs.platforms }} push: true - build-args: VERSION=${{ needs.info.outputs.new_tag_short }} + build-args: VERSION=${{ steps.tags.outputs.version }} tags: ${{ steps.tags.outputs.tags }} - cache-from: type=registry,ref=${IMAGE_NAME}:latest - cache-to: type=inline + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml index 88d97d808..8d0bae6d9 100644 --- a/.github/workflows/deploy_test.yml +++ b/.github/workflows/deploy_test.yml @@ -4,6 +4,9 @@ on: workflows: [build] types: [completed] branches: [main] +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true env: IMAGE: reearth/reearth-backend:nightly IMAGE_GCP: us.gcr.io/reearth-oss/reearth-backend:nightly diff --git a/.github/workflows/pr_title.yml b/.github/workflows/pr_title.yml index db688eb90..f6472679b 100644 --- a/.github/workflows/pr_title.yml +++ b/.github/workflows/pr_title.yml @@ -11,25 +11,15 @@ jobs: pr_title: runs-on: ubuntu-latest steps: - - uses: actions/github-script@v6 + - uses: amannn/action-semantic-pull-request@v4 env: - PR_TITLE_PREFIX: feat fix docs style refactor pref test build ci chore revert - # PR_TITLE_GROUP: - PR_TITLE_IGNORE_LABEL: meta - with: - script: | - const prefixes = process.env.PR_TITLE_PREFIX?.split(" "); - if (!prefixes?.length) return; - - const ignoreLabels = process.env.PR_TITLE_IGNORE_LABEL?.split(" "); - - const title = context.payload.pull_request.title; - const labels = context.payload.pull_request.labels; - if (ignoreLabels?.length && labels?.some(l => !ignoreLabels?.includes(l.name))) return; - - const groups = process.env.PR_TITLE_GROUP?.split(" "); - const reg = new RegExp(`^(?:${prefixes.join("|")})${groups?.length ? `(?:\\((?:${groups.join("|")})\\))?` : ""}: `, "i"); - if (!reg.test(title)) { - console.error("The pull request title does not follow the conventions."); - process.exit(1); - } + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ignoreLabels: meta + scopes: | + web + server + subjectPattern: ^(?![A-Z]).+$ + subjectPatternError: | + The subject "{subject}" found in the pull request title "{title}" + didn't match the configured pattern. Please ensure that the subject + doesn't start with an uppercase character. diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 07c6a56db..140ddf159 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,6 +11,9 @@ on: - patch - minor - major +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: release: name: Release diff --git a/.vscode/settings.json b/.vscode/settings.json index 7f4bc2ddc..1463f5115 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -11,7 +11,8 @@ ], "./schemas/plugin_manifest_translation.json": [ "/pkg/builtin/manifest_*.yml" - ] + ], + "https://json.schemastore.org/github-workflow.json": ".github/workflows/build.yml" }, "json.schemas": [ { From dea1b0b35559706da80f361348aa7e636839c1d7 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 14 Jun 2022 17:14:29 +0900 Subject: [PATCH 225/253] ci: fix build workflow --- .github/workflows/build.yml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 8b9f099b2..19168105e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -65,12 +65,12 @@ jobs: uses: actions/setup-go@v3 with: go-version: 1.18 - - name: Run GoReleaser for nightly/rc + - name: Run GoReleaser uses: goreleaser/goreleaser-action@v2 with: args: release --rm-dist --snapshot env: - GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.tag || needs.info.outputs.sha_short }} + GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.tag || '0.0.0' }} - name: Rename artifacts if: "!needs.info.outputs.tag" run: for f in $ARTIFACTS; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_${{ needs.info.outputs.name }}/'); done @@ -120,9 +120,8 @@ jobs: with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Get Docker tags - id: tags - if: needs.info.outputs.branch == 'release' && needs.info.outputs.tag + - name: Get options + id: options env: TAG: ${{ needs.info.outputs.tag_short }} NAME: ${{ needs.info.outputs.name }} @@ -149,9 +148,9 @@ jobs: uses: docker/build-push-action@v3 with: context: . - platforms: ${{ steps.tags.outputs.platforms }} + platforms: ${{ steps.options.outputs.platforms }} push: true - build-args: VERSION=${{ steps.tags.outputs.version }} - tags: ${{ steps.tags.outputs.tags }} + build-args: VERSION=${{ steps.options.outputs.version }} + tags: ${{ steps.options.outputs.tags }} cache-from: type=gha cache-to: type=gha,mode=max From 5b0c43f71494abcf4ec37c6da4160607572994cb Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 14 Jun 2022 17:30:06 +0900 Subject: [PATCH 226/253] ci: fix build workflow --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 19168105e..0e0d6d9b8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -127,7 +127,7 @@ jobs: NAME: ${{ needs.info.outputs.name }} SHA: ${{ needs.info.outputs.sha_short }} run: | - if [[ -z $TAG ]]; then + if [[ -n $TAG ]]; then PLATFORMS=linux/amd64,linux/arm64 VERSION=$TAG TAGS=$IMAGE_NAME:$TAG From be3fc2a30239f1976ca02ed5e4edaa81946f565c Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 14 Jun 2022 17:42:25 +0900 Subject: [PATCH 227/253] ci: fix stage workflow --- .github/workflows/ci.yml | 2 +- .github/workflows/deploy_test.yml | 2 +- .github/workflows/stage.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 96d8d6fcf..d5179e1b2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -53,7 +53,7 @@ jobs: steps: - name: Slack Notification uses: Gamesight/slack-workflow-status@master - if: always() + if: success() || failure() with: repo_token: ${{ secrets.GITHUB_TOKEN }} slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml index 8d0bae6d9..691a8aead 100644 --- a/.github/workflows/deploy_test.yml +++ b/.github/workflows/deploy_test.yml @@ -46,7 +46,7 @@ jobs: steps: - name: Slack Notification uses: Gamesight/slack-workflow-status@master - if: always() + if: success() || failure() with: repo_token: ${{ secrets.GITHUB_TOKEN }} slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/stage.yml b/.github/workflows/stage.yml index 55f8e616d..d4ca58693 100644 --- a/.github/workflows/stage.yml +++ b/.github/workflows/stage.yml @@ -4,7 +4,7 @@ on: jobs: stage: runs-on: ubuntu-latest - if: github.ref === "refs/heads/main" + if: github.ref == "refs/heads/main" steps: - name: git config env: From 00b5c0ee31dd8db6c36898338bbafe853e352a40 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 14 Jun 2022 17:43:28 +0900 Subject: [PATCH 228/253] ci: fix stage workflow --- .github/workflows/stage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/stage.yml b/.github/workflows/stage.yml index d4ca58693..32101fb1d 100644 --- a/.github/workflows/stage.yml +++ b/.github/workflows/stage.yml @@ -4,7 +4,7 @@ on: jobs: stage: runs-on: ubuntu-latest - if: github.ref == "refs/heads/main" + if: github.ref == 'refs/heads/main' steps: - name: git config env: From 84dc50692c5f1e6c0ec9581da6cb68c6fddb2d5d Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 14 Jun 2022 19:38:50 +0900 Subject: [PATCH 229/253] ci: update build and deploy_test workflows --- .github/workflows/build.yml | 29 ++++++++++++++++++----------- .github/workflows/deploy_test.yml | 20 ++++++++------------ .github/workflows/release.yml | 2 +- .github/workflows/stage.yml | 2 +- 4 files changed, 28 insertions(+), 25 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0e0d6d9b8..5cec5479d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -5,26 +5,29 @@ on: type: [completed] branches: [main, release] concurrency: - group: ${{ github.workflow }}-${{ github.ref }} + group: ${{ github.workflow }}-${{ github.event.workflow_run.head_branch }} cancel-in-progress: true jobs: info: name: Collect information runs-on: ubuntu-latest - if: github.event.workflow_run.conclusion == 'success' && github.event.repository.full_name == 'reearth/reearth-backend' + if: github.event.workflow_run.conclusion == 'success' && github.event.repository.full_name == 'reearth/reearth-web' && (github.event.workflow_run.head_branch == 'main' || github.event.workflow_run.head_branch == 'release') outputs: sha_short: ${{ steps.info.outputs.sha_short }} tag: ${{ steps.info.outputs.tag }} tag_short: ${{ steps.info.outputs.tag_short }} - branch: ${{ steps.info.outputs.branch }} name: ${{ steps.info.outputs.name }} steps: - name: checkout uses: actions/checkout@v3 + with: + ref: ${{ github.event.workflow_run.head_sha }} - name: Fetch tags run: git fetch --prune --unshallow --tags - name: Get info id: info + env: + BRANCH: ${{ github.event.workflow_run.head_branch }} # The tag name should be retrieved lazily, as tagging may be delayed. run: | echo "::set-output name=sha_short::$(git rev-parse --short HEAD)" @@ -33,8 +36,6 @@ jobs: echo "::set-output name=tag::$TAG" echo "::set-output name=tag_short::${TAG#v}" fi - BRANCH=$(git branch --show-current) - echo "::set-output name=branch::$BRANCH" if [[ "$BRANCH" = "release" ]]; then echo "::set-output name=name::rc" else @@ -45,15 +46,13 @@ jobs: SHA_SHORT: ${{ steps.info.outputs.sha_short }} TAG: ${{ steps.info.outputs.tag }} TAG_SHORT: ${{ steps.info.outputs.tag_short }} - BRANCH: ${{ steps.info.outputs.branch }} NAME: ${{ steps.info.outputs.name }} - run: echo "sha_short=$SHA_SHORT, tag=$TAG, tag_short=$TAG_SHORT, branch=$BRANCH, name=$NAME" + run: echo "sha_short=$SHA_SHORT, tag=$TAG, tag_short=$TAG_SHORT, name=$NAME" build: name: Build and release runs-on: ubuntu-latest needs: - info - if: needs.info.outputs.branch == 'main' || needs.info.outputs.branch == 'release' env: ARTIFACTS: dist/reearth-backend_*.* steps: @@ -61,6 +60,7 @@ jobs: uses: actions/checkout@v3 with: fetch-depth: 0 + ref: ${{ github.event.workflow_run.head_sha }} - name: Set up Go uses: actions/setup-go@v3 with: @@ -86,13 +86,13 @@ jobs: body: ${{ github.sha }} prerelease: true - name: Download latest changelog - if: needs.info.outputs.branch == 'release' && needs.info.outputs.tag + if: github.event.workflow_run.head_branch == 'release' && needs.info.outputs.tag uses: dawidd6/action-download-artifact@v2 with: workflow: release.yml name: changelog-${{ needs.info.outputs.tag }} - name: Create GitHub release - if: needs.info.outputs.branch == 'release' && needs.info.outputs.tag + if: github.event.workflow_run.head_branch == 'release' && needs.info.outputs.tag uses: ncipollo/release-action@v1 with: artifacts: ${{ env.ARTIFACTS }} @@ -105,12 +105,13 @@ jobs: runs-on: ubuntu-latest needs: - info - if: needs.info.outputs.branch == 'main' || needs.info.outputs.branch == 'release' env: IMAGE_NAME: reearth/reearth-backend steps: - name: Checkout uses: actions/checkout@v3 + with: + ref: ${{ github.event.workflow_run.head_sha }} - name: Set up QEMU uses: docker/setup-qemu-action@v2 - name: Set up Docker Buildx @@ -154,3 +155,9 @@ jobs: tags: ${{ steps.options.outputs.tags }} cache-from: type=gha cache-to: type=gha,mode=max + - name: Invoke deploy_test workflow + uses: benc-uk/workflow-dispatch@v1 + if: needs.info.outputs.name == 'nightly' + with: + workflow: deploy_test + token: ${{ secrets.GPT }} diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml index 691a8aead..2cd61b1f2 100644 --- a/.github/workflows/deploy_test.yml +++ b/.github/workflows/deploy_test.yml @@ -1,11 +1,8 @@ name: deploy_test on: - workflow_run: - workflows: [build] - types: [completed] - branches: [main] + workflow_dispatch: concurrency: - group: ${{ github.workflow }}-${{ github.ref }} + group: ${{ github.workflow }} cancel-in-progress: true env: IMAGE: reearth/reearth-backend:nightly @@ -17,11 +14,11 @@ jobs: runs-on: ubuntu-latest if: github.event.workflow_run.conclusion == 'success' && github.event.repository.full_name == 'reearth/reearth-backend' steps: - - uses: google-github-actions/setup-gcloud@v0 + - uses: google-github-actions/auth@v0 with: - project_id: ${{ secrets.GCP_PROJECT }} - service_account_key: ${{ secrets.GCP_SA_KEY }} - export_default_credentials: true + credentials_json: ${{ secrets.GCP_SA_KEY }} + - name: Set up Cloud SDK + uses: google-github-actions/setup-gcloud@v0 - name: Configure docker run: gcloud auth configure-docker --quiet - name: docker push @@ -33,20 +30,19 @@ jobs: run: | gcloud run deploy reearth-backend \ --image $IMAGE_GCP \ - --project ${{ secrets.GCP_PROJECT }} \ --region $GCP_REGION \ --platform managed \ --quiet slack-notification: name: Slack Notification - if: github.event.repository.full_name == 'reearth/reearth-backend' && always() + if: github.event.repository.full_name == 'reearth/reearth-backend' && (success() || failure()) needs: - deploy_test runs-on: ubuntu-latest steps: - name: Slack Notification uses: Gamesight/slack-workflow-status@master - if: success() || failure() + if: always() with: repo_token: ${{ secrets.GITHUB_TOKEN }} slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 140ddf159..40bfa6f61 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,7 +12,7 @@ on: - minor - major concurrency: - group: ${{ github.workflow }}-${{ github.ref }} + group: ${{ github.workflow }} cancel-in-progress: true jobs: release: diff --git a/.github/workflows/stage.yml b/.github/workflows/stage.yml index 32101fb1d..9fb75349c 100644 --- a/.github/workflows/stage.yml +++ b/.github/workflows/stage.yml @@ -17,7 +17,7 @@ jobs: fetch-depth: 0 token: ${{ secrets.GPT }} - name: Checkout release branch - run: git checkout release + run: git switch release || git switch -c release - name: Merge main branch to release branch run: git merge -X theirs main - name: Git push From 5cf0f9bd04a1f2e86eecf84cae548c2cfbfb991d Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 14 Jun 2022 19:47:30 +0900 Subject: [PATCH 230/253] ci: fix build workflow --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5cec5479d..6da23da6e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -11,7 +11,7 @@ jobs: info: name: Collect information runs-on: ubuntu-latest - if: github.event.workflow_run.conclusion == 'success' && github.event.repository.full_name == 'reearth/reearth-web' && (github.event.workflow_run.head_branch == 'main' || github.event.workflow_run.head_branch == 'release') + if: github.event.workflow_run.conclusion == 'success' && github.event.repository.full_name == 'reearth/reearth-backend' && (github.event.workflow_run.head_branch == 'main' || github.event.workflow_run.head_branch == 'release') outputs: sha_short: ${{ steps.info.outputs.sha_short }} tag: ${{ steps.info.outputs.tag }} From e9ed0f0d67d16d578c7d19e88333f52e635986c9 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 14 Jun 2022 20:02:40 +0900 Subject: [PATCH 231/253] ci: fix deploy_test workflow --- .github/workflows/deploy_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml index 2cd61b1f2..7f0897c1f 100644 --- a/.github/workflows/deploy_test.yml +++ b/.github/workflows/deploy_test.yml @@ -12,7 +12,7 @@ jobs: deploy_test: name: Deploy app to test env runs-on: ubuntu-latest - if: github.event.workflow_run.conclusion == 'success' && github.event.repository.full_name == 'reearth/reearth-backend' + if: github.event.repository.full_name == 'reearth/reearth-backend' steps: - uses: google-github-actions/auth@v0 with: From 647eb4feaebeb5f8995a81fba5c89361d1403b3f Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 17 Jun 2022 17:10:00 +0900 Subject: [PATCH 232/253] ci: fix workflows --- .github/workflows/build.yml | 4 ++-- .github/workflows/ci.yml | 6 +++--- .github/workflows/release.yml | 12 +++++------- 3 files changed, 10 insertions(+), 12 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6da23da6e..be55fb83f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,7 +2,7 @@ name: build on: workflow_run: workflows: [ci] - type: [completed] + types: [completed] branches: [main, release] concurrency: group: ${{ github.workflow }}-${{ github.event.workflow_run.head_branch }} @@ -11,7 +11,7 @@ jobs: info: name: Collect information runs-on: ubuntu-latest - if: github.event.workflow_run.conclusion == 'success' && github.event.repository.full_name == 'reearth/reearth-backend' && (github.event.workflow_run.head_branch == 'main' || github.event.workflow_run.head_branch == 'release') + if: github.event.workflow_run.conclusion != 'failure' && github.event.repository.full_name == 'reearth/reearth-backend' && (github.event.workflow_run.head_branch == 'main' && !startsWith(github.event.head_commit.message, 'v') || github.event.workflow_run.head_branch == 'release') outputs: sha_short: ${{ steps.info.outputs.sha_short }} tag: ${{ steps.info.outputs.tag }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d5179e1b2..b153b1d19 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,7 +2,6 @@ name: CI on: push: branches: [main, release] - tags-ignore: ["*"] pull_request: concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -11,6 +10,7 @@ jobs: ci: name: CI runs-on: ubuntu-latest + if: "!startsWith(github.event.head_commit.message, 'v')" services: mongo: image: mongo:4.4-focal @@ -45,7 +45,7 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} file: coverage.txt slack-notification: - if: github.event.repository.full_name == 'reearth/reearth-backend' && always() + if: github.event.repository.full_name == 'reearth/reearth-backend' && success() || failure() name: Slack Notification needs: - ci @@ -53,7 +53,7 @@ jobs: steps: - name: Slack Notification uses: Gamesight/slack-workflow-status@master - if: success() || failure() + if: always() with: repo_token: ${{ secrets.GITHUB_TOKEN }} slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 40bfa6f61..d47650680 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,9 +11,6 @@ on: - patch - minor - major -concurrency: - group: ${{ github.workflow }} - cancel-in-progress: true jobs: release: name: Release @@ -35,13 +32,13 @@ jobs: with: version: ${{ github.event.inputs.version }} repo: ${{ github.repository }} - latest: true + latest: CHANGELOG_latest.md - name: Upload latest CHANGELOG uses: actions/upload-artifact@v3 with: name: changelog-${{ steps.changelog.outputs.version }} path: CHANGELOG_latest.md - - name: Commit & push + - name: Commit & push to release env: TAG: ${{ steps.changelog.outputs.version }} run: | @@ -49,5 +46,6 @@ jobs: git add CHANGELOG.md git commit -am "$TAG" git tag $TAG - git push - git push --tags + git push --atomic origin release $TAG + - name: Commit & push to main + run: git switch main && git cherry-pick release && git push From 5181985ddc60829a351043a17c2630ebbf194255 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 17 Jun 2022 17:48:50 +0900 Subject: [PATCH 233/253] ci: fix build workflow --- .github/workflows/build.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index be55fb83f..ec32adebb 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -51,6 +51,7 @@ jobs: build: name: Build and release runs-on: ubuntu-latest + if: github.event.workflow_run.head_branch == 'main' && !startsWith(github.event.head_commit.message, 'v') || github.event.workflow_run.head_branch == 'release' needs: - info env: @@ -72,8 +73,9 @@ jobs: env: GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.tag || '0.0.0' }} - name: Rename artifacts - if: "!needs.info.outputs.tag" - run: for f in $ARTIFACTS; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_${{ needs.info.outputs.name }}/'); done + run: for f in $ARTIFACTS; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_${NAME}/'); done + env: + NAME: ${{ needs.info.outputs.tag || needs.info.outputs.name }} - name: Release nightly/rc if: "!needs.info.outputs.tag" uses: ncipollo/release-action@v1 @@ -105,6 +107,7 @@ jobs: runs-on: ubuntu-latest needs: - info + if: github.event.workflow_run.head_branch == 'main' && !startsWith(github.event.head_commit.message, 'v') || github.event.workflow_run.head_branch == 'release' env: IMAGE_NAME: reearth/reearth-backend steps: From 6d0872ac889aab095628f2e1724d95728d89fb16 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 17 Jun 2022 18:37:02 +0900 Subject: [PATCH 234/253] ci: fix ci and build workflow --- .github/workflows/build.yml | 6 ++++-- .github/workflows/ci.yml | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ec32adebb..6dfbc9550 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -69,13 +69,15 @@ jobs: - name: Run GoReleaser uses: goreleaser/goreleaser-action@v2 with: - args: release --rm-dist --snapshot + args: release --rm-dist $SNAPSHOT env: + SNAPSHOT: ${{ !needs.info.outputs.tag && '--snapshot' || '' }} GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.tag || '0.0.0' }} - name: Rename artifacts + if: "!needs.info.outputs.tag" run: for f in $ARTIFACTS; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_${NAME}/'); done env: - NAME: ${{ needs.info.outputs.tag || needs.info.outputs.name }} + NAME: ${{ needs.info.outputs.name }} - name: Release nightly/rc if: "!needs.info.outputs.tag" uses: ncipollo/release-action@v1 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b153b1d19..6ee636549 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,7 +10,7 @@ jobs: ci: name: CI runs-on: ubuntu-latest - if: "!startsWith(github.event.head_commit.message, 'v')" + if: github.event_name != 'push' || !startsWith(github.event.head_commit.message, 'v') services: mongo: image: mongo:4.4-focal From 362d4181e4458289f934a5b281cbc773dbda681e Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 17 Jun 2022 18:53:47 +0900 Subject: [PATCH 235/253] ci: fix build workflow --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6dfbc9550..f74c60f74 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -69,7 +69,7 @@ jobs: - name: Run GoReleaser uses: goreleaser/goreleaser-action@v2 with: - args: release --rm-dist $SNAPSHOT + args: release --rm-dist ${{ env.SNAPSHOT }} env: SNAPSHOT: ${{ !needs.info.outputs.tag && '--snapshot' || '' }} GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.tag || '0.0.0' }} From 413ba3f27530107a400629f40fd4df0527ae7b1a Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 17 Jun 2022 19:08:26 +0900 Subject: [PATCH 236/253] ci: fix build workflow --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f74c60f74..5a9ba97e4 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -75,7 +75,7 @@ jobs: GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.tag || '0.0.0' }} - name: Rename artifacts if: "!needs.info.outputs.tag" - run: for f in $ARTIFACTS; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_${NAME}/'); done + run: for f in $ARTIFACTS; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_${{ env.NAME }}/'); done env: NAME: ${{ needs.info.outputs.name }} - name: Release nightly/rc From a6553e23625e6955ab409fe3dae085f747ac0da5 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 17 Jun 2022 19:26:15 +0900 Subject: [PATCH 237/253] ci: fix build workflow --- .github/workflows/build.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5a9ba97e4..b5827cd80 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -75,9 +75,9 @@ jobs: GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.tag || '0.0.0' }} - name: Rename artifacts if: "!needs.info.outputs.tag" - run: for f in $ARTIFACTS; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_${{ env.NAME }}/'); done - env: - NAME: ${{ needs.info.outputs.name }} + run: for f in $ARTIFACTS; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_${{ needs.info.outputs.name }}/'); done + - name: List artifacts + run: ls -l dist - name: Release nightly/rc if: "!needs.info.outputs.tag" uses: ncipollo/release-action@v1 From 64d6464f7b59741e49c02199b0b2b5477fc5ef17 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 17 Jun 2022 10:39:47 +0000 Subject: [PATCH 238/253] v0.8.0 --- CHANGELOG.md | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e5121edd8..1cf4aca68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,25 @@ # Changelog All notable changes to this project will be documented in this file. +## 0.8.0 - 2022-06-17 + +### ๐Ÿš€ Features + +- Add totalCount field to DatasetSchema type of GraphQL schema ([#154](https://github.com/reearth/reearth-backend/pull/154)) [`ab6334`](https://github.com/reearth/reearth-backend/commit/ab6334) +- Add timeline settings to scene property ([#153](https://github.com/reearth/reearth-backend/pull/153)) [`602ec0`](https://github.com/reearth/reearth-backend/commit/602ec0) + +### ๐Ÿ”ง Bug Fixes + +- Assets are not saved when files are uploaded ([#155](https://github.com/reearth/reearth-backend/pull/155)) [`e444e4`](https://github.com/reearth/reearth-backend/commit/e444e4) + +### โœจ Refactor + +- Declarative description of use case structure (asset only) ([#151](https://github.com/reearth/reearth-backend/pull/151)) [`c6e98c`](https://github.com/reearth/reearth-backend/commit/c6e98c) + +### Miscellaneous Tasks + +- Update go modules ([#150](https://github.com/reearth/reearth-backend/pull/150)) [`6372bc`](https://github.com/reearth/reearth-backend/commit/6372bc) + ## 0.7.0 - 2022-05-13 ### ๐Ÿš€ Features @@ -294,4 +313,4 @@ All notable changes to this project will be documented in this file. - Fix renaming file names in release workflow [`96f0b3`](https://github.com/reearth/reearth-backend/commit/96f0b3) - Fix and refactor release workflow [skip ci] [`d5466b`](https://github.com/reearth/reearth-backend/commit/d5466b) - + \ No newline at end of file From 164eba49b5fd7483945f18dd2fc60660b670f1a5 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 17 Jun 2022 20:08:20 +0900 Subject: [PATCH 239/253] ci: simplify if in build workflow --- .github/workflows/build.yml | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b5827cd80..4b5505647 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -11,7 +11,7 @@ jobs: info: name: Collect information runs-on: ubuntu-latest - if: github.event.workflow_run.conclusion != 'failure' && github.event.repository.full_name == 'reearth/reearth-backend' && (github.event.workflow_run.head_branch == 'main' && !startsWith(github.event.head_commit.message, 'v') || github.event.workflow_run.head_branch == 'release') + if: github.event.workflow_run.conclusion != 'failure' && github.event.repository.full_name == 'reearth/reearth-backend' && (github.event.workflow_run.head_branch == 'release' || !startsWith(github.event.head_commit.message, 'v')) outputs: sha_short: ${{ steps.info.outputs.sha_short }} tag: ${{ steps.info.outputs.tag }} @@ -31,13 +31,14 @@ jobs: # The tag name should be retrieved lazily, as tagging may be delayed. run: | echo "::set-output name=sha_short::$(git rev-parse --short HEAD)" - TAG=$(git tag --points-at HEAD) - if [[ ! -z "$TAG" ]]; then - echo "::set-output name=tag::$TAG" - echo "::set-output name=tag_short::${TAG#v}" - fi if [[ "$BRANCH" = "release" ]]; then - echo "::set-output name=name::rc" + TAG=$(git tag --points-at HEAD) + if [[ ! -z "$TAG" ]]; then + echo "::set-output name=tag::$TAG" + echo "::set-output name=tag_short::${TAG#v}" + else + echo "::set-output name=name::rc" + fi else echo "::set-output name=name::nightly" fi @@ -51,9 +52,9 @@ jobs: build: name: Build and release runs-on: ubuntu-latest - if: github.event.workflow_run.head_branch == 'main' && !startsWith(github.event.head_commit.message, 'v') || github.event.workflow_run.head_branch == 'release' needs: - info + if: needs.info.outputs.name || needs.info.outputs.tag env: ARTIFACTS: dist/reearth-backend_*.* steps: @@ -74,12 +75,12 @@ jobs: SNAPSHOT: ${{ !needs.info.outputs.tag && '--snapshot' || '' }} GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.tag || '0.0.0' }} - name: Rename artifacts - if: "!needs.info.outputs.tag" + if: needs.info.outputs.name run: for f in $ARTIFACTS; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_${{ needs.info.outputs.name }}/'); done - name: List artifacts run: ls -l dist - name: Release nightly/rc - if: "!needs.info.outputs.tag" + if: needs.info.outputs.name uses: ncipollo/release-action@v1 with: allowUpdates: true @@ -90,13 +91,13 @@ jobs: body: ${{ github.sha }} prerelease: true - name: Download latest changelog - if: github.event.workflow_run.head_branch == 'release' && needs.info.outputs.tag + if: needs.info.outputs.tag uses: dawidd6/action-download-artifact@v2 with: workflow: release.yml name: changelog-${{ needs.info.outputs.tag }} - name: Create GitHub release - if: github.event.workflow_run.head_branch == 'release' && needs.info.outputs.tag + if: needs.info.outputs.tag uses: ncipollo/release-action@v1 with: artifacts: ${{ env.ARTIFACTS }} @@ -109,7 +110,7 @@ jobs: runs-on: ubuntu-latest needs: - info - if: github.event.workflow_run.head_branch == 'main' && !startsWith(github.event.head_commit.message, 'v') || github.event.workflow_run.head_branch == 'release' + if: needs.info.outputs.name || needs.info.outputs.tag env: IMAGE_NAME: reearth/reearth-backend steps: From 5afc81be11a88b787e122c9d365b33c8c177dbe5 Mon Sep 17 00:00:00 2001 From: harada Date: Mon, 20 Jun 2022 16:25:23 +0900 Subject: [PATCH 240/253] test: unit test for mongo auth request repo (#159) * add mongo auth request test * add process of checking error * modify fnc name --- .../infrastructure/mongo/auth_request_test.go | 72 +++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100644 internal/infrastructure/mongo/auth_request_test.go diff --git a/internal/infrastructure/mongo/auth_request_test.go b/internal/infrastructure/mongo/auth_request_test.go new file mode 100644 index 000000000..6d6065e26 --- /dev/null +++ b/internal/infrastructure/mongo/auth_request_test.go @@ -0,0 +1,72 @@ +package mongo + +import ( + "context" + "testing" + + "github.com/caos/oidc/pkg/oidc" + "github.com/reearth/reearth-backend/pkg/auth" + "github.com/stretchr/testify/assert" +) + +func TestAuthRequestRepo(t *testing.T) { + tests := []struct { + Name string + Expected struct { + Name string + AuthRequest *auth.Request + } + }{ + { + Expected: struct { + Name string + AuthRequest *auth.Request + }{ + AuthRequest: auth.NewRequest(). + NewID(). + ClientID("client id"). + State("state"). + ResponseType("response type"). + Scopes([]string{"scope"}). + Audiences([]string{"audience"}). + RedirectURI("redirect uri"). + Nonce("nonce"). + CodeChallenge(&oidc.CodeChallenge{ + Challenge: "challenge", + Method: "S256", + }). + AuthorizedAt(nil). + MustBuild(), + }, + }, + } + + init := connect(t) + + for _, tt := range tests { + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + client := init(t) + + repo := NewAuthRequest(client) + + ctx := context.Background() + err := repo.Save(ctx, tt.Expected.AuthRequest) + assert.NoError(t, err) + + got, err := repo.FindByID(ctx, tt.Expected.AuthRequest.ID()) + assert.NoError(t, err) + assert.Equal(t, tt.Expected.AuthRequest.ID(), got.ID()) + assert.Equal(t, tt.Expected.AuthRequest.GetClientID(), got.GetClientID()) + assert.Equal(t, tt.Expected.AuthRequest.GetState(), got.GetState()) + assert.Equal(t, tt.Expected.AuthRequest.GetResponseType(), got.GetResponseType()) + assert.Equal(t, tt.Expected.AuthRequest.GetScopes(), got.GetScopes()) + assert.Equal(t, tt.Expected.AuthRequest.GetAudience(), got.GetAudience()) + assert.Equal(t, tt.Expected.AuthRequest.GetRedirectURI(), got.GetRedirectURI()) + assert.Equal(t, tt.Expected.AuthRequest.GetNonce(), got.GetNonce()) + assert.Equal(t, tt.Expected.AuthRequest.GetCodeChallenge(), got.GetCodeChallenge()) + assert.Equal(t, tt.Expected.AuthRequest.AuthorizedAt(), got.AuthorizedAt()) + }) + } +} From 0267f1d15c4537b9944a78baac0ca02d9fd1bbe3 Mon Sep 17 00:00:00 2001 From: nina992 <89770889+nina992@users.noreply.github.com> Date: Mon, 4 Jul 2022 11:01:03 +0300 Subject: [PATCH 241/253] feat: change layer indicators from preset list from backend side (#158) * change indicator * change scale name * change-feild- name * change the description * add-japanese-translation * Update pkg/builtin/manifest_ja.yml Co-authored-by: HideBa <49897538+HideBa@users.noreply.github.com> Co-authored-by: nina992 Co-authored-by: HideBa <49897538+HideBa@users.noreply.github.com> --- pkg/builtin/manifest.yml | 35 +++++++++++++++++++++++++++++++++++ pkg/builtin/manifest_ja.yml | 15 +++++++++++++++ 2 files changed, 50 insertions(+) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 342da3a2c..8901d0aca 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -178,6 +178,37 @@ extensions: ui: slider min: 0 max: 1 + - id: indicator + title: Indicator + description: Set the style of indicator shown when selecting a layer on the map. + fields: + - id: indicator_type + type: string + title: Type + defaultValue: default + description: Choose how the indicator will look. + choices: + - key: default + label: Default + - key: crosshair + label: Crosshair + - key: custom + label: Custom + - id: indicator_image + type: url + title: Image URL + ui: image + availableIf: + field: indicator_type + type: string + value: custom + - id: indicator_image_scale + type: number + title: Image scale + availableIf: + field: indicator_type + type: string + value: custom - id: theme title: Publish Theme description: Set your theme. @@ -366,6 +397,10 @@ extensions: type: string title: Tracking ID description: Paste your Google Analytics tracking ID here. This will be embedded in your published project. + linkable: + url: + schemaGroupId: indicator + fieldId: indicator_image - id: infobox name: Infobox type: infobox diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 6bec66bfa..2f53cedab 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -85,6 +85,21 @@ extensions: tile_opacity: title: ไธ้€ๆ˜Žๆ€ง description: NEEDS DESCRIPTION + indicator: + title: ใ‚คใƒณใƒ‡ใ‚ฃใ‚ฑใƒผใ‚ฟใƒผ + description: ใƒฌใ‚คใƒคใƒผ้ธๆŠžๆ™‚ใซ่กจ็คบใ•ใ‚Œใ‚‹ใ‚คใƒณใƒ‡ใ‚ฃใ‚ฑใƒผใ‚ฟใƒผใฎใ‚นใ‚ฟใ‚คใƒซใ‚’่จญๅฎšใ—ใพใ™ + fields: + indicator_type: + title: ใ‚ฟใ‚คใƒ— + description: ใ‚คใƒณใƒ‡ใ‚ฃใ‚ฑใƒผใ‚ฟใƒผใฎ่ฆ‹ใŸ็›ฎใ‚’่จญๅฎšใ—ใพใ™ + choices: + default: ใƒ‡ใƒ•ใ‚ฉใƒซใƒˆ + crosshair: ๅๅญ—็ทš + custom: ใ‚ซใ‚นใ‚ฟใƒ  + indicator_image: + title: ็”ปๅƒURL + indicator_image_scale: + title: ็”ปๅƒใ‚ตใ‚คใ‚บ atmosphere: title: ๅคงๆฐ— description: ๅœฐ็ƒใ‚’่ฆ†ใ†ๅคงๆฐ—ๅœใฎ่จญๅฎšใŒใงใใพใ™ใ€‚ From 67780bca4d1a3cb0372075def77a5410c941124f Mon Sep 17 00:00:00 2001 From: rot1024 Date: Thu, 7 Jul 2022 12:51:54 +0900 Subject: [PATCH 242/253] chore: update Makefile to remove unused targets --- Makefile | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/Makefile b/Makefile index 6fb6cdeb9..58f11bda7 100644 --- a/Makefile +++ b/Makefile @@ -11,21 +11,9 @@ run-app: go run ./cmd/reearth run-db: - docker-compose up -d reearth-mongo + docker compose up -d reearth-mongo -gen: - go generate ./... - -gen/gql: +gql: go generate ./internal/adapter/gql -gen/builtin: - go generate ./pkg/builtin - -gen/manifest: - go generate ./pkg/plugin/manifest - -gen/id: - go generate ./pkg/id - -.PHONY: lint test build run-app run-db gen gen/gql gen/builtin gen/manifest gen/id +.PHONY: lint test build run-app run-db gql From 62dede2cb1a942eca44fb7d4d62b1d6a1dbab8ad Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 19 Jul 2022 16:28:28 +0900 Subject: [PATCH 243/253] ci: update changelog config --- .github/changelog.json | 18 ------------------ .github/changelog.yml | 15 +++++++++++++++ 2 files changed, 15 insertions(+), 18 deletions(-) delete mode 100644 .github/changelog.json create mode 100644 .github/changelog.yml diff --git a/.github/changelog.json b/.github/changelog.json deleted file mode 100644 index f43699ebf..000000000 --- a/.github/changelog.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "prefix": { - "feat": "๐Ÿš€ Features", - "fix": "๐Ÿ”ง Bug Fixes", - "docs": "๐Ÿ“– Documentation", - "doc": "๐Ÿ“– Documentation", - "perf": "โšก๏ธ Performance", - "refactor": "โœจ Refactor", - "style": "๐ŸŽจ Styling", - "test": "๐Ÿงช Testing", - "chore": "Miscellaneous Tasks", - "build": "Miscellaneous Tasks", - "deps": "Miscellaneous Tasks", - "ci": false, - "revert": false - }, - "titleVersionPrefix": "remove" -} diff --git a/.github/changelog.yml b/.github/changelog.yml new file mode 100644 index 000000000..a03adf520 --- /dev/null +++ b/.github/changelog.yml @@ -0,0 +1,15 @@ +prefixes: + feat: ๐Ÿš€ Features + fix: ๐Ÿ”ง Bug Fixes + docs: ๐Ÿ“– Documentation + doc: ๐Ÿ“– Documentation + perf: โšก๏ธ Performance + refactor: โœจ Refactor + style: ๐ŸŽจ Styling + test: ๐Ÿงช Testing + chore: Miscellaneous Tasks + build: Miscellaneous Tasks + deps: Miscellaneous Tasks + ci: false + revert: false +titleVersionPrefix: remove From 358237ba21b3f78a2b94e5b8d85c3cb3196582fc Mon Sep 17 00:00:00 2001 From: yk-eukarya <81808708+yk-eukarya@users.noreply.github.com> Date: Tue, 19 Jul 2022 12:19:23 +0300 Subject: [PATCH 244/253] fix: property fields in a property list cannot be removed (#160) * fix * fix pr comments --- pkg/property/pointer.go | 2 +- pkg/property/property.go | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pkg/property/pointer.go b/pkg/property/pointer.go index 990eb73f6..f844bcc1a 100644 --- a/pkg/property/pointer.go +++ b/pkg/property/pointer.go @@ -216,7 +216,7 @@ func (p *Pointer) Test(sg SchemaGroupID, i ItemID, f FieldID) bool { } func (p *Pointer) TestItem(sg SchemaGroupID, i ItemID) bool { - return p.TestSchemaGroup(sg) && (p.item == nil || *p.item == i) + return p.TestSchemaGroup(sg) && (p.item == nil || p.item.Equal(i)) } func (p *Pointer) TestSchemaGroup(sg SchemaGroupID) bool { diff --git a/pkg/property/property.go b/pkg/property/property.go index c72f6c5aa..add864f59 100644 --- a/pkg/property/property.go +++ b/pkg/property/property.go @@ -266,8 +266,11 @@ func (p *Property) RemoveField(ptr *Pointer) { return } - if group := ToGroup(p.Item(ptr)); group != nil { + item := p.Item(ptr) + if group := ToGroup(item); group != nil { group.RemoveField(fid) + } else if groupList := ToGroupList(item); groupList != nil { + groupList.RemoveFields(ptr) } } From 0e2daacab7920ead71de36266a1d8ea8f5db09a7 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Tue, 19 Jul 2022 19:42:17 +0900 Subject: [PATCH 245/253] ci: fix release workflow --- .github/workflows/release.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d47650680..9b7ff0f9d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -17,10 +17,12 @@ jobs: runs-on: ubuntu-latest if: github.ref == 'refs/heads/release' steps: - - name: Set up git config + - name: git config + env: + GPT_USER: ${{ secrets.GPT_USER }} run: | - git config --global user.name "${{ github.actor }}" - git config --global user.email "${{ github.actor }}@users.noreply.github.com" + git config --global user.name $GPT_USER + git config --global pull.rebase false - name: Checkout uses: actions/checkout@v3 with: From db6d1321ce049889ec05c8e1cd27f4d1fcb9b101 Mon Sep 17 00:00:00 2001 From: reearth-bot Date: Wed, 20 Jul 2022 03:24:08 +0000 Subject: [PATCH 246/253] v0.9.0 --- CHANGELOG.md | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1cf4aca68..84d70a896 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,34 @@ # Changelog All notable changes to this project will be documented in this file. +## 0.9.0 - 2022-07-20 + +### chore + +- Update Makefile to remove unused targets [`67780b`](https://github.com/reearth/reearth-backend/commit/67780b) + +### ci + +- Fix release workflow [`0e2daa`](https://github.com/reearth/reearth-backend/commit/0e2daa) +- Update changelog config [`62dede`](https://github.com/reearth/reearth-backend/commit/62dede) +- Simplify if in build workflow [`164eba`](https://github.com/reearth/reearth-backend/commit/164eba) + +### feat + +- Change layer indicators from preset list from backend side ([#158](https://github.com/reearth/reearth-backend/pull/158)) [`0267f1`](https://github.com/reearth/reearth-backend/commit/0267f1) + +### fix + +- Property fields in a property list cannot be removed ([#160](https://github.com/reearth/reearth-backend/pull/160)) [`358237`](https://github.com/reearth/reearth-backend/commit/358237) + +### test + +- Unit test for mongo auth request repo ([#159](https://github.com/reearth/reearth-backend/pull/159)) [`5afc81`](https://github.com/reearth/reearth-backend/commit/5afc81) + +### + +- V0.8.0 [`64d646`](https://github.com/reearth/reearth-backend/commit/64d646) + ## 0.8.0 - 2022-06-17 ### ๐Ÿš€ Features From 895a6482cefb9ae9007e02788ac74dddd64b1b80 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 20 Jul 2022 13:08:36 +0900 Subject: [PATCH 247/253] chore: fix changelog [skip ci] --- CHANGELOG.md | 22 ++++++---------------- 1 file changed, 6 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 84d70a896..945ceef63 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,31 +3,21 @@ All notable changes to this project will be documented in this file. ## 0.9.0 - 2022-07-20 -### chore - -- Update Makefile to remove unused targets [`67780b`](https://github.com/reearth/reearth-backend/commit/67780b) - -### ci - -- Fix release workflow [`0e2daa`](https://github.com/reearth/reearth-backend/commit/0e2daa) -- Update changelog config [`62dede`](https://github.com/reearth/reearth-backend/commit/62dede) -- Simplify if in build workflow [`164eba`](https://github.com/reearth/reearth-backend/commit/164eba) - -### feat +### ๐Ÿš€ Features - Change layer indicators from preset list from backend side ([#158](https://github.com/reearth/reearth-backend/pull/158)) [`0267f1`](https://github.com/reearth/reearth-backend/commit/0267f1) -### fix +### ๐Ÿ”ง Bug Fixes - Property fields in a property list cannot be removed ([#160](https://github.com/reearth/reearth-backend/pull/160)) [`358237`](https://github.com/reearth/reearth-backend/commit/358237) -### test +### ๐Ÿงช Testing - Unit test for mongo auth request repo ([#159](https://github.com/reearth/reearth-backend/pull/159)) [`5afc81`](https://github.com/reearth/reearth-backend/commit/5afc81) -### +### Miscellaneous Tasks -- V0.8.0 [`64d646`](https://github.com/reearth/reearth-backend/commit/64d646) +- Update Makefile to remove unused targets [`67780b`](https://github.com/reearth/reearth-backend/commit/67780b) ## 0.8.0 - 2022-06-17 @@ -341,4 +331,4 @@ All notable changes to this project will be documented in this file. - Fix renaming file names in release workflow [`96f0b3`](https://github.com/reearth/reearth-backend/commit/96f0b3) - Fix and refactor release workflow [skip ci] [`d5466b`](https://github.com/reearth/reearth-backend/commit/d5466b) - \ No newline at end of file + From 70fed0ba5f92e93f8ea7ef0254b9b25d5854b16e Mon Sep 17 00:00:00 2001 From: KaWaite Date: Wed, 3 Aug 2022 11:00:41 +0900 Subject: [PATCH 248/253] chore: add new frontend endpoint (for Vite@3) --- internal/app/app.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/app/app.go b/internal/app/app.go index 8b007d4f4..95d423316 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -139,7 +139,7 @@ func allowedOrigins(cfg *ServerConfig) []string { } origins := append([]string{}, cfg.Config.Origins...) if cfg.Debug { - origins = append(origins, "http://localhost:3000", "http://localhost:8080") + origins = append(origins, "http://localhost:3000", "http://127.0.0.1:3000", "http://localhost:8080") } return origins } From 61b03abd90f9f4ba01c882ab1ba1f9f8f3757bc8 Mon Sep 17 00:00:00 2001 From: rot1024 Date: Wed, 3 Aug 2022 19:01:21 +0900 Subject: [PATCH 249/253] feat: configurable server host --- internal/app/config.go | 1 + internal/app/main.go | 11 ++++++++--- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/internal/app/config.go b/internal/app/config.go index 0cd45a16a..606fa11ef 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -18,6 +18,7 @@ const configPrefix = "reearth" type Config struct { Port string `default:"8080" envconfig:"PORT"` + ServerHost string Host string `default:"http://localhost:8080"` Host_Web string Dev bool diff --git a/internal/app/main.go b/internal/app/main.go index 4750ab597..18415a05e 100644 --- a/internal/app/main.go +++ b/internal/app/main.go @@ -66,10 +66,15 @@ func NewServer(ctx context.Context, cfg *ServerConfig) *WebServer { port = "8080" } - address := "0.0.0.0:" + port - if cfg.Debug { - address = "localhost:" + port + host := cfg.Config.ServerHost + if host == "" { + if cfg.Debug { + host = "localhost" + } else { + host = "0.0.0.0" + } } + address := host + ":" + port w := &WebServer{ address: address, From 897976c438ef67ee2c5f6010a669fbdcb852caad Mon Sep 17 00:00:00 2001 From: keiya sasaki <34934510+keiya01@users.noreply.github.com> Date: Tue, 9 Aug 2022 08:31:02 +0900 Subject: [PATCH 250/253] feat: setup timeline buitlin widget (#161) * feat: setup timeline buitlin widget * fix: enable singleOnly flag * fix: improve description * Update pkg/builtin/manifest_ja.yml Co-authored-by: rot1024 Co-authored-by: rot1024 --- pkg/builtin/manifest.yml | 12 ++++++++++++ pkg/builtin/manifest_ja.yml | 3 +++ 2 files changed, 15 insertions(+) diff --git a/pkg/builtin/manifest.yml b/pkg/builtin/manifest.yml index 8901d0aca..1cb30f101 100644 --- a/pkg/builtin/manifest.yml +++ b/pkg/builtin/manifest.yml @@ -1918,3 +1918,15 @@ extensions: title: Layer type: ref ui: layer + - id: timeline + type: widget + name: Timeline + description: A timeline widget that allows for viewing layers and data at specific points in time. + singleOnly: true + widgetLayout: + extendable: + horizontally: true + defaultLocation: + zone: outer + section: center + area: bottom diff --git a/pkg/builtin/manifest_ja.yml b/pkg/builtin/manifest_ja.yml index 2f53cedab..56694679e 100644 --- a/pkg/builtin/manifest_ja.yml +++ b/pkg/builtin/manifest_ja.yml @@ -904,3 +904,6 @@ extensions: clusterLabelTypography: title: ใƒฉใƒ™ใƒซ description: ใƒฉใƒ™ใƒซใฎใ‚นใ‚ฟใ‚คใƒซใ‚’ๆŒ‡ๅฎšใ—ใพใ™ใ€‚ + timeline: + name: ใ‚ฟใ‚คใƒ ใƒฉใ‚คใƒณ + description: ๆ™‚็ณปๅˆ—ใƒ‡ใƒผใ‚ฟใ‚’่กจ็คบใ™ใ‚‹ใŸใ‚ใซใ€ๆ™‚ๅˆปใ‚’ๅค‰ๆ›ดใ—ใŸใ‚Šๆ™‚้–“ใ‚’ๅ†็”Ÿใ—ใŸใ‚Šใ™ใ‚‹ใ“ใจใŒใงใใพใ™ใ€‚ From 12c6dfb576d0d4a71dd38eea17eacb4a730e7f21 Mon Sep 17 00:00:00 2001 From: reearth-bot Date: Wed, 10 Aug 2022 10:52:01 +0000 Subject: [PATCH 251/253] v0.10.0 --- CHANGELOG.md | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 945ceef63..dd57e5f05 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,17 @@ # Changelog All notable changes to this project will be documented in this file. +## 0.10.0 - 2022-08-10 + +### ๐Ÿš€ Features + +- Configurable server host [`61b03a`](https://github.com/reearth/reearth-backend/commit/61b03a) + +### Miscellaneous Tasks + +- Add new frontend endpoint (for Vite@3) [`70fed0`](https://github.com/reearth/reearth-backend/commit/70fed0) +- Fix changelog [skip ci] [`895a64`](https://github.com/reearth/reearth-backend/commit/895a64) + ## 0.9.0 - 2022-07-20 ### ๐Ÿš€ Features @@ -331,4 +342,4 @@ All notable changes to this project will be documented in this file. - Fix renaming file names in release workflow [`96f0b3`](https://github.com/reearth/reearth-backend/commit/96f0b3) - Fix and refactor release workflow [skip ci] [`d5466b`](https://github.com/reearth/reearth-backend/commit/d5466b) - + \ No newline at end of file From 276ef5f50090391639efeed7cb93d0da9979f39d Mon Sep 17 00:00:00 2001 From: rot1024 Date: Fri, 12 Aug 2022 13:31:50 +0900 Subject: [PATCH 252/253] feat: installing plugins from marketplace (#162) --- internal/adapter/gql/generated.go | 470 ------------------ .../adapter/gql/gqlmodel/convert_plugin.go | 14 - internal/adapter/gql/gqlmodel/models_gen.go | 8 - internal/adapter/gql/loader_plugin.go | 9 - .../adapter/gql/resolver_mutation_layer.go | 25 +- .../adapter/gql/resolver_mutation_scene.go | 4 +- internal/adapter/gql/resolver_query.go | 4 - internal/app/config.go | 34 ++ internal/app/repo.go | 18 +- .../infrastructure/auth0/authenticator.go | 121 +---- .../auth0/authenticator_test.go | 195 +++----- .../infrastructure/fs/plugin_repository.go | 30 -- internal/infrastructure/github/fetcher.go | 26 - .../infrastructure/github/fetcher_test.go | 69 --- .../infrastructure/github/plugin_registry.go | 35 -- .../github/plugin_registry_test.go | 60 --- .../infrastructure/marketplace/marketplace.go | 112 +++++ .../marketplace/marketplace_test.go | 108 ++++ .../marketplace/testdata/test.zip | Bin 0 -> 1804 bytes internal/infrastructure/memory/layer.go | 8 + internal/infrastructure/memory/plugin.go | 8 + internal/infrastructure/memory/property.go | 8 + .../infrastructure/memory/property_schema.go | 8 + internal/infrastructure/memory/scene.go | 8 + internal/usecase/gateway/container.go | 13 +- internal/usecase/gateway/plugin_registry.go | 5 +- internal/usecase/gateway/plugin_repository.go | 19 - internal/usecase/interactor/layer.go | 18 +- internal/usecase/interactor/plugin.go | 18 +- internal/usecase/interactor/plugin_common.go | 76 +++ internal/usecase/interactor/plugin_upload.go | 27 +- internal/usecase/interactor/published.go | 18 +- internal/usecase/interactor/scene.go | 318 ++---------- internal/usecase/interactor/scene_plugin.go | 234 +++++++++ .../usecase/interactor/scene_plugin_test.go | 390 +++++++++++++++ internal/usecase/interactor/user_signup.go | 9 +- internal/usecase/interfaces/layer.go | 2 - internal/usecase/interfaces/plugin.go | 1 - internal/usecase/interfaces/scene.go | 4 +- pkg/cache/cache.go | 18 +- pkg/cache/cache_test.go | 6 +- pkg/id/plugin.go | 13 + pkg/id/plugin_test.go | 60 +++ pkg/layer/layerops/processor.go | 25 +- pkg/layer/layerops/processor_test.go | 14 +- pkg/plugin/builder.go | 4 +- pkg/plugin/builder_test.go | 12 +- pkg/plugin/plugin_test.go | 10 +- pkg/rerror/error.go | 16 +- pkg/rerror/error_test.go | 10 + pkg/scene/sceneops/plugin_migrator.go | 2 +- pkg/util/map.go | 31 ++ pkg/util/map_test.go | 31 ++ schema.graphql | 9 - 54 files changed, 1388 insertions(+), 1407 deletions(-) delete mode 100644 internal/infrastructure/fs/plugin_repository.go delete mode 100644 internal/infrastructure/github/fetcher.go delete mode 100644 internal/infrastructure/github/fetcher_test.go delete mode 100644 internal/infrastructure/github/plugin_registry.go delete mode 100644 internal/infrastructure/github/plugin_registry_test.go create mode 100644 internal/infrastructure/marketplace/marketplace.go create mode 100644 internal/infrastructure/marketplace/marketplace_test.go create mode 100644 internal/infrastructure/marketplace/testdata/test.zip delete mode 100644 internal/usecase/gateway/plugin_repository.go create mode 100644 internal/usecase/interactor/plugin_common.go create mode 100644 internal/usecase/interactor/scene_plugin.go create mode 100644 internal/usecase/interactor/scene_plugin_test.go diff --git a/internal/adapter/gql/generated.go b/internal/adapter/gql/generated.go index c593ad301..33c28edd2 100644 --- a/internal/adapter/gql/generated.go +++ b/internal/adapter/gql/generated.go @@ -616,14 +616,6 @@ type ComplexityRoot struct { WidgetLayout func(childComplexity int) int } - PluginMetadata struct { - Author func(childComplexity int) int - CreatedAt func(childComplexity int) int - Description func(childComplexity int) int - Name func(childComplexity int) int - ThumbnailURL func(childComplexity int) int - } - Project struct { Alias func(childComplexity int) int BasicAuthPassword func(childComplexity int) int @@ -798,7 +790,6 @@ type ComplexityRoot struct { DatasetSchemas func(childComplexity int, sceneID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int Datasets func(childComplexity int, datasetSchemaID gqlmodel.ID, first *int, last *int, after *usecase.Cursor, before *usecase.Cursor) int DynamicDatasetSchemas func(childComplexity int, sceneID gqlmodel.ID) int - InstallablePlugins func(childComplexity int) int Layer func(childComplexity int, id gqlmodel.ID) int Me func(childComplexity int) int Node func(childComplexity int, id gqlmodel.ID, typeArg gqlmodel.NodeType) int @@ -1314,7 +1305,6 @@ type QueryResolver interface { DynamicDatasetSchemas(ctx context.Context, sceneID gqlmodel.ID) ([]*gqlmodel.DatasetSchema, error) SearchUser(ctx context.Context, nameOrEmail string) (*gqlmodel.User, error) CheckProjectAlias(ctx context.Context, alias string) (*gqlmodel.ProjectAliasAvailability, error) - InstallablePlugins(ctx context.Context) ([]*gqlmodel.PluginMetadata, error) } type SceneResolver interface { Project(ctx context.Context, obj *gqlmodel.Scene) (*gqlmodel.Project, error) @@ -4195,41 +4185,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.PluginExtension.WidgetLayout(childComplexity), true - case "PluginMetadata.author": - if e.complexity.PluginMetadata.Author == nil { - break - } - - return e.complexity.PluginMetadata.Author(childComplexity), true - - case "PluginMetadata.createdAt": - if e.complexity.PluginMetadata.CreatedAt == nil { - break - } - - return e.complexity.PluginMetadata.CreatedAt(childComplexity), true - - case "PluginMetadata.description": - if e.complexity.PluginMetadata.Description == nil { - break - } - - return e.complexity.PluginMetadata.Description(childComplexity), true - - case "PluginMetadata.name": - if e.complexity.PluginMetadata.Name == nil { - break - } - - return e.complexity.PluginMetadata.Name(childComplexity), true - - case "PluginMetadata.thumbnailUrl": - if e.complexity.PluginMetadata.ThumbnailURL == nil { - break - } - - return e.complexity.PluginMetadata.ThumbnailURL(childComplexity), true - case "Project.alias": if e.complexity.Project.Alias == nil { break @@ -5108,13 +5063,6 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in return e.complexity.Query.DynamicDatasetSchemas(childComplexity, args["sceneId"].(gqlmodel.ID)), true - case "Query.installablePlugins": - if e.complexity.Query.InstallablePlugins == nil { - break - } - - return e.complexity.Query.InstallablePlugins(childComplexity), true - case "Query.layer": if e.complexity.Query.Layer == nil { break @@ -6586,14 +6534,6 @@ type Plugin { propertySchema: PropertySchema @goField(forceResolver: true) } -type PluginMetadata { - name: String! - description: String! - author: String! - thumbnailUrl: String! - createdAt: DateTime! -} - enum WidgetAreaAlign { START CENTERED @@ -7910,7 +7850,6 @@ type Query { dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! searchUser(nameOrEmail: String!): User checkProjectAlias(alias: String!): ProjectAliasAvailability! - installablePlugins: [PluginMetadata!]! } # Mutation @@ -28653,226 +28592,6 @@ func (ec *executionContext) fieldContext_PluginExtension_translatedDescription(c return fc, nil } -func (ec *executionContext) _PluginMetadata_name(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_PluginMetadata_name(ctx, field) - if err != nil { - return graphql.Null - } - ctx = graphql.WithFieldContext(ctx, fc) - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Name, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(string) - fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) -} - -func (ec *executionContext) fieldContext_PluginMetadata_name(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { - fc = &graphql.FieldContext{ - Object: "PluginMetadata", - Field: field, - IsMethod: false, - IsResolver: false, - Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type String does not have child fields") - }, - } - return fc, nil -} - -func (ec *executionContext) _PluginMetadata_description(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_PluginMetadata_description(ctx, field) - if err != nil { - return graphql.Null - } - ctx = graphql.WithFieldContext(ctx, fc) - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Description, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(string) - fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) -} - -func (ec *executionContext) fieldContext_PluginMetadata_description(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { - fc = &graphql.FieldContext{ - Object: "PluginMetadata", - Field: field, - IsMethod: false, - IsResolver: false, - Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type String does not have child fields") - }, - } - return fc, nil -} - -func (ec *executionContext) _PluginMetadata_author(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_PluginMetadata_author(ctx, field) - if err != nil { - return graphql.Null - } - ctx = graphql.WithFieldContext(ctx, fc) - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.Author, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(string) - fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) -} - -func (ec *executionContext) fieldContext_PluginMetadata_author(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { - fc = &graphql.FieldContext{ - Object: "PluginMetadata", - Field: field, - IsMethod: false, - IsResolver: false, - Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type String does not have child fields") - }, - } - return fc, nil -} - -func (ec *executionContext) _PluginMetadata_thumbnailUrl(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_PluginMetadata_thumbnailUrl(ctx, field) - if err != nil { - return graphql.Null - } - ctx = graphql.WithFieldContext(ctx, fc) - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.ThumbnailURL, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(string) - fc.Result = res - return ec.marshalNString2string(ctx, field.Selections, res) -} - -func (ec *executionContext) fieldContext_PluginMetadata_thumbnailUrl(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { - fc = &graphql.FieldContext{ - Object: "PluginMetadata", - Field: field, - IsMethod: false, - IsResolver: false, - Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type String does not have child fields") - }, - } - return fc, nil -} - -func (ec *executionContext) _PluginMetadata_createdAt(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.PluginMetadata) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_PluginMetadata_createdAt(ctx, field) - if err != nil { - return graphql.Null - } - ctx = graphql.WithFieldContext(ctx, fc) - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return obj.CreatedAt, nil - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.(time.Time) - fc.Result = res - return ec.marshalNDateTime2timeแšTime(ctx, field.Selections, res) -} - -func (ec *executionContext) fieldContext_PluginMetadata_createdAt(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { - fc = &graphql.FieldContext{ - Object: "PluginMetadata", - Field: field, - IsMethod: false, - IsResolver: false, - Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - return nil, errors.New("field of type DateTime does not have child fields") - }, - } - return fc, nil -} - func (ec *executionContext) _Project_id(ctx context.Context, field graphql.CollectedField, obj *gqlmodel.Project) (ret graphql.Marshaler) { fc, err := ec.fieldContext_Project_id(ctx, field) if err != nil { @@ -35604,62 +35323,6 @@ func (ec *executionContext) fieldContext_Query_checkProjectAlias(ctx context.Con return fc, nil } -func (ec *executionContext) _Query_installablePlugins(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { - fc, err := ec.fieldContext_Query_installablePlugins(ctx, field) - if err != nil { - return graphql.Null - } - ctx = graphql.WithFieldContext(ctx, fc) - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = graphql.Null - } - }() - resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { - ctx = rctx // use context from middleware stack in children - return ec.resolvers.Query().InstallablePlugins(rctx) - }) - if err != nil { - ec.Error(ctx, err) - return graphql.Null - } - if resTmp == nil { - if !graphql.HasFieldError(ctx, fc) { - ec.Errorf(ctx, "must not be null") - } - return graphql.Null - } - res := resTmp.([]*gqlmodel.PluginMetadata) - fc.Result = res - return ec.marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadataแš„(ctx, field.Selections, res) -} - -func (ec *executionContext) fieldContext_Query_installablePlugins(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { - fc = &graphql.FieldContext{ - Object: "Query", - Field: field, - IsMethod: true, - IsResolver: true, - Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { - switch field.Name { - case "name": - return ec.fieldContext_PluginMetadata_name(ctx, field) - case "description": - return ec.fieldContext_PluginMetadata_description(ctx, field) - case "author": - return ec.fieldContext_PluginMetadata_author(ctx, field) - case "thumbnailUrl": - return ec.fieldContext_PluginMetadata_thumbnailUrl(ctx, field) - case "createdAt": - return ec.fieldContext_PluginMetadata_createdAt(ctx, field) - } - return nil, fmt.Errorf("no field named %q was found under type PluginMetadata", field.Name) - }, - } - return fc, nil -} - func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { fc, err := ec.fieldContext_Query___type(ctx, field) if err != nil { @@ -51691,62 +51354,6 @@ func (ec *executionContext) _PluginExtension(ctx context.Context, sel ast.Select return out } -var pluginMetadataImplementors = []string{"PluginMetadata"} - -func (ec *executionContext) _PluginMetadata(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.PluginMetadata) graphql.Marshaler { - fields := graphql.CollectFields(ec.OperationContext, sel, pluginMetadataImplementors) - out := graphql.NewFieldSet(fields) - var invalids uint32 - for i, field := range fields { - switch field.Name { - case "__typename": - out.Values[i] = graphql.MarshalString("PluginMetadata") - case "name": - - out.Values[i] = ec._PluginMetadata_name(ctx, field, obj) - - if out.Values[i] == graphql.Null { - invalids++ - } - case "description": - - out.Values[i] = ec._PluginMetadata_description(ctx, field, obj) - - if out.Values[i] == graphql.Null { - invalids++ - } - case "author": - - out.Values[i] = ec._PluginMetadata_author(ctx, field, obj) - - if out.Values[i] == graphql.Null { - invalids++ - } - case "thumbnailUrl": - - out.Values[i] = ec._PluginMetadata_thumbnailUrl(ctx, field, obj) - - if out.Values[i] == graphql.Null { - invalids++ - } - case "createdAt": - - out.Values[i] = ec._PluginMetadata_createdAt(ctx, field, obj) - - if out.Values[i] == graphql.Null { - invalids++ - } - default: - panic("unknown field " + strconv.Quote(field.Name)) - } - } - out.Dispatch() - if invalids > 0 { - return graphql.Null - } - return out -} - var projectImplementors = []string{"Project", "Node"} func (ec *executionContext) _Project(ctx context.Context, sel ast.SelectionSet, obj *gqlmodel.Project) graphql.Marshaler { @@ -53464,29 +53071,6 @@ func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) gr return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) } - out.Concurrently(i, func() graphql.Marshaler { - return rrm(innerCtx) - }) - case "installablePlugins": - field := field - - innerFunc := func(ctx context.Context) (res graphql.Marshaler) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - } - }() - res = ec._Query_installablePlugins(ctx, field) - if res == graphql.Null { - atomic.AddUint32(&invalids, 1) - } - return res - } - - rrm := func(ctx context.Context) graphql.Marshaler { - return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc) - } - out.Concurrently(i, func() graphql.Marshaler { return rrm(innerCtx) }) @@ -57328,60 +56912,6 @@ func (ec *executionContext) marshalNPluginExtensionType2githubแš—comแš‹reearth return v } -func (ec *executionContext) marshalNPluginMetadata2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadataแš„(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.PluginMetadata) graphql.Marshaler { - ret := make(graphql.Array, len(v)) - var wg sync.WaitGroup - isLen1 := len(v) == 1 - if !isLen1 { - wg.Add(len(v)) - } - for i := range v { - i := i - fc := &graphql.FieldContext{ - Index: &i, - Result: &v[i], - } - ctx := graphql.WithFieldContext(ctx, fc) - f := func(i int) { - defer func() { - if r := recover(); r != nil { - ec.Error(ctx, ec.Recover(ctx, r)) - ret = nil - } - }() - if !isLen1 { - defer wg.Done() - } - ret[i] = ec.marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadata(ctx, sel, v[i]) - } - if isLen1 { - f(i) - } else { - go f(i) - } - - } - wg.Wait() - - for _, e := range ret { - if e == graphql.Null { - return graphql.Null - } - } - - return ret -} - -func (ec *executionContext) marshalNPluginMetadata2แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšPluginMetadata(ctx context.Context, sel ast.SelectionSet, v *gqlmodel.PluginMetadata) graphql.Marshaler { - if v == nil { - if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { - ec.Errorf(ctx, "the requested element is null which the schema does not allow") - } - return graphql.Null - } - return ec._PluginMetadata(ctx, sel, v) -} - func (ec *executionContext) marshalNProject2แš•แš–githubแš—comแš‹reearthแš‹reearthแš‘backendแš‹internalแš‹adapterแš‹gqlแš‹gqlmodelแšProject(ctx context.Context, sel ast.SelectionSet, v []*gqlmodel.Project) graphql.Marshaler { ret := make(graphql.Array, len(v)) var wg sync.WaitGroup diff --git a/internal/adapter/gql/gqlmodel/convert_plugin.go b/internal/adapter/gql/gqlmodel/convert_plugin.go index 9b5b7596c..485d2fb4e 100644 --- a/internal/adapter/gql/gqlmodel/convert_plugin.go +++ b/internal/adapter/gql/gqlmodel/convert_plugin.go @@ -57,20 +57,6 @@ func ToPluginExtensionType(t plugin.ExtensionType) PluginExtensionType { return PluginExtensionType("") } -func ToPluginMetadata(t *plugin.Metadata) *PluginMetadata { - if t == nil { - return nil - } - - return &PluginMetadata{ - Name: t.Name, - Description: t.Description, - ThumbnailURL: t.ThumbnailUrl, - Author: t.Author, - CreatedAt: t.CreatedAt, - } -} - func ToPluginWidgetLayout(wl *plugin.WidgetLayout) *WidgetLayout { if wl == nil { return nil diff --git a/internal/adapter/gql/gqlmodel/models_gen.go b/internal/adapter/gql/gqlmodel/models_gen.go index 56cedf1a2..965599ea3 100644 --- a/internal/adapter/gql/gqlmodel/models_gen.go +++ b/internal/adapter/gql/gqlmodel/models_gen.go @@ -719,14 +719,6 @@ type PluginExtension struct { TranslatedDescription string `json:"translatedDescription"` } -type PluginMetadata struct { - Name string `json:"name"` - Description string `json:"description"` - Author string `json:"author"` - ThumbnailURL string `json:"thumbnailUrl"` - CreatedAt time.Time `json:"createdAt"` -} - type Project struct { ID ID `json:"id"` IsArchived bool `json:"isArchived"` diff --git a/internal/adapter/gql/loader_plugin.go b/internal/adapter/gql/loader_plugin.go index d103ddd0a..1ec7ac0b4 100644 --- a/internal/adapter/gql/loader_plugin.go +++ b/internal/adapter/gql/loader_plugin.go @@ -36,15 +36,6 @@ func (c *PluginLoader) Fetch(ctx context.Context, ids []gqlmodel.ID) ([]*gqlmode return plugins, nil } -func (c *PluginLoader) FetchPluginMetadata(ctx context.Context) ([]*gqlmodel.PluginMetadata, error) { - res, err := c.usecase.FetchPluginMetadata(ctx, getOperator(ctx)) - if err != nil { - return nil, err - } - - return util.Map(res, gqlmodel.ToPluginMetadata), nil -} - // data loader type PluginDataLoader interface { diff --git a/internal/adapter/gql/resolver_mutation_layer.go b/internal/adapter/gql/resolver_mutation_layer.go index 0a244ff02..358715c75 100644 --- a/internal/adapter/gql/resolver_mutation_layer.go +++ b/internal/adapter/gql/resolver_mutation_layer.go @@ -15,18 +15,20 @@ func (r *mutationResolver) AddLayerItem(ctx context.Context, input gqlmodel.AddL return nil, err } - pid, err := gqlmodel.ToPluginID(input.PluginID) - if err != nil { - return nil, err - } + // layers are no longer extendable with plugins + // pid, err := gqlmodel.ToPluginID(input.PluginID) + // if err != nil { + // return nil, err + // } layer, parent, err := usecases(ctx).Layer.AddItem(ctx, interfaces.AddLayerItemInput{ ParentLayerID: lid, - PluginID: &pid, - ExtensionID: lo.ToPtr(id.PluginExtensionID(input.ExtensionID)), - Index: input.Index, - Name: gqlmodel.RefToString(input.Name), - LatLng: gqlmodel.ToPropertyLatLng(input.Lat, input.Lng), + // layers are no longer extendable with plugins + // PluginID: &pid, + ExtensionID: lo.ToPtr(id.PluginExtensionID(input.ExtensionID)), + Index: input.Index, + Name: gqlmodel.RefToString(input.Name), + LatLng: gqlmodel.ToPropertyLatLng(input.Lat, input.Lng), // LinkedDatasetID: input.LinkedDatasetID, }, getOperator(ctx)) if err != nil { @@ -47,8 +49,9 @@ func (r *mutationResolver) AddLayerGroup(ctx context.Context, input gqlmodel.Add } layer, parent, err := usecases(ctx).Layer.AddGroup(ctx, interfaces.AddLayerGroupInput{ - ParentLayerID: lid, - PluginID: gqlmodel.ToPluginIDRef(input.PluginID), + ParentLayerID: lid, + // layers are no longer extendable with plugins + // PluginID: gqlmodel.ToPluginIDRef(input.PluginID), ExtensionID: gqlmodel.ToStringIDRef[id.PluginExtension](input.ExtensionID), Index: input.Index, Name: gqlmodel.RefToString(input.Name), diff --git a/internal/adapter/gql/resolver_mutation_scene.go b/internal/adapter/gql/resolver_mutation_scene.go index 8c4e5ee55..2578ea7d6 100644 --- a/internal/adapter/gql/resolver_mutation_scene.go +++ b/internal/adapter/gql/resolver_mutation_scene.go @@ -131,14 +131,14 @@ func (r *mutationResolver) InstallPlugin(ctx context.Context, input gqlmodel.Ins return nil, err } - scene, pl, pr, err := usecases(ctx).Scene.InstallPlugin(ctx, sid, pid, getOperator(ctx)) + scene, pr, err := usecases(ctx).Scene.InstallPlugin(ctx, sid, pid, getOperator(ctx)) if err != nil { return nil, err } return &gqlmodel.InstallPluginPayload{ Scene: gqlmodel.ToScene(scene), ScenePlugin: &gqlmodel.ScenePlugin{ - PluginID: gqlmodel.IDFromPluginID(pl), + PluginID: input.PluginID, PropertyID: gqlmodel.IDFromRef(pr), }, }, nil diff --git a/internal/adapter/gql/resolver_query.go b/internal/adapter/gql/resolver_query.go index f3258f931..90aaa1611 100644 --- a/internal/adapter/gql/resolver_query.go +++ b/internal/adapter/gql/resolver_query.go @@ -259,7 +259,3 @@ func (r *queryResolver) SearchUser(ctx context.Context, nameOrEmail string) (*gq func (r *queryResolver) CheckProjectAlias(ctx context.Context, alias string) (*gqlmodel.ProjectAliasAvailability, error) { return loaders(ctx).Project.CheckAlias(ctx, alias) } - -func (r *queryResolver) InstallablePlugins(ctx context.Context) ([]*gqlmodel.PluginMetadata, error) { - return loaders(ctx).Plugin.FetchPluginMetadata(ctx) -} diff --git a/internal/app/config.go b/internal/app/config.go index 606fa11ef..15e5c61a5 100644 --- a/internal/app/config.go +++ b/internal/app/config.go @@ -12,6 +12,8 @@ import ( "github.com/kelseyhightower/envconfig" "github.com/reearth/reearth-backend/pkg/auth" "github.com/reearth/reearth-backend/pkg/log" + "golang.org/x/oauth2" + "golang.org/x/oauth2/clientcredentials" ) const configPrefix = "reearth" @@ -33,6 +35,7 @@ type Config struct { Tracer string TracerSample float64 GCS GCSConfig + Marketplace MarketplaceConfig AssetBaseURL string `default:"http://localhost:8080/assets"` Origins []string Web WebConfig @@ -301,3 +304,34 @@ func addHTTPScheme(host string) string { } return host } + +type MarketplaceConfig struct { + Endpoint string + OAuth OAuthClientCredentialsConfig +} + +type OAuthClientCredentialsConfig struct { + ClientID string + ClientSecret string + TokenURL string + Scopes []string + Audience []string +} + +func (c OAuthClientCredentialsConfig) Config() clientcredentials.Config { + var params url.Values + if len(c.Audience) > 0 { + params = url.Values{ + "audience": c.Audience, + } + } + + return clientcredentials.Config{ + ClientID: c.ClientID, + ClientSecret: c.ClientSecret, + TokenURL: c.TokenURL, + Scopes: c.Scopes, + AuthStyle: oauth2.AuthStyleInParams, + EndpointParams: params, + } +} diff --git a/internal/app/repo.go b/internal/app/repo.go index 4399297ed..d3620e513 100644 --- a/internal/app/repo.go +++ b/internal/app/repo.go @@ -5,19 +5,17 @@ import ( "fmt" "time" - "github.com/reearth/reearth-backend/internal/infrastructure/mailer" - - "github.com/reearth/reearth-backend/internal/infrastructure/github" - "github.com/reearth/reearth-backend/internal/infrastructure/google" - "github.com/spf13/afero" - "github.com/reearth/reearth-backend/internal/infrastructure/auth0" "github.com/reearth/reearth-backend/internal/infrastructure/fs" "github.com/reearth/reearth-backend/internal/infrastructure/gcs" + "github.com/reearth/reearth-backend/internal/infrastructure/google" + "github.com/reearth/reearth-backend/internal/infrastructure/mailer" + "github.com/reearth/reearth-backend/internal/infrastructure/marketplace" mongorepo "github.com/reearth/reearth-backend/internal/infrastructure/mongo" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/internal/usecase/repo" "github.com/reearth/reearth-backend/pkg/log" + "github.com/spf13/afero" "go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo/options" "go.opentelemetry.io/contrib/instrumentation/go.mongodb.org/mongo-driver/mongo/otelmongo" @@ -66,15 +64,17 @@ func initReposAndGateways(ctx context.Context, conf *Config, debug bool) (*repo. // Auth0 gateways.Authenticator = auth0.New(conf.Auth0.Domain, conf.Auth0.ClientID, conf.Auth0.ClientSecret) - // github - gateways.PluginRegistry = github.NewPluginRegistry() - // google gateways.Google = google.NewGoogle() // mailer gateways.Mailer = initMailer(conf) + // Marketplace + if conf.Marketplace.Endpoint != "" { + gateways.PluginRegistry = marketplace.New(conf.Marketplace.Endpoint, conf.Marketplace.OAuth.Config()) + } + // release lock of all scenes if err := repos.SceneLock.ReleaseAllLock(context.Background()); err != nil { log.Fatalln(fmt.Sprintf("repo initialization error: %+v", err)) diff --git a/internal/infrastructure/auth0/authenticator.go b/internal/infrastructure/auth0/authenticator.go index b5ac5c0bb..2d825779b 100644 --- a/internal/infrastructure/auth0/authenticator.go +++ b/internal/infrastructure/auth0/authenticator.go @@ -2,34 +2,27 @@ package auth0 import ( "bytes" + "context" "encoding/json" "errors" "io" "net/http" + "net/url" "strings" - "sync" - "time" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/reearth/reearth-backend/pkg/log" + "github.com/reearth/reearth-backend/pkg/rerror" + "golang.org/x/oauth2" + "golang.org/x/oauth2/clientcredentials" ) type Auth0 struct { - domain string + base string client *http.Client - clientID string - clientSecret string - token string - expireAt time.Time - lock sync.Mutex - current func() time.Time disableLogging bool } -func currentTime() time.Time { - return time.Now() -} - type response struct { ID string `json:"user_id"` Name string `json:"name"` @@ -37,9 +30,6 @@ type response struct { Email string `json:"email"` EmailVerified bool `json:"email_verified"` Message string `json:"message"` - Token string `json:"access_token"` - Scope string `json:"scope"` - ExpiresIn int64 `json:"expires_in"` ErrorDescription string `json:"error_description"` } @@ -65,15 +55,24 @@ func (u response) Error() string { } func New(domain, clientID, clientSecret string) *Auth0 { + base := urlFromDomain(domain) + conf := clientcredentials.Config{ + ClientID: clientID, + ClientSecret: clientSecret, + TokenURL: base + "oauth/token", + Scopes: []string{"read:users", "update:users"}, + AuthStyle: oauth2.AuthStyleInParams, + EndpointParams: url.Values{ + "audience": []string{base + "api/v2/"}, + }, + } return &Auth0{ - domain: urlFromDomain(domain), - clientID: clientID, - clientSecret: clientSecret, + base: base, + client: conf.Client(context.Background()), } } func (a *Auth0) UpdateUser(p gateway.AuthenticatorUpdateUserParam) (data gateway.AuthenticatorUser, err error) { - err = a.updateToken() if err != nil { return } @@ -94,12 +93,9 @@ func (a *Auth0) UpdateUser(p gateway.AuthenticatorUpdateUserParam) (data gateway } var r response - r, err = a.exec(http.MethodPatch, "api/v2/users/"+p.ID, a.token, payload) + r, err = a.exec(http.MethodPatch, "api/v2/users/"+p.ID, payload) if err != nil { - if !a.disableLogging { - log.Errorf("auth0: update user: %+v", err) - } - err = errors.New("failed to update user") + err = rerror.ErrInternalByWith("failed to update user", err) return } @@ -107,70 +103,11 @@ func (a *Auth0) UpdateUser(p gateway.AuthenticatorUpdateUserParam) (data gateway return } -func (a *Auth0) needsFetchToken() bool { - if a == nil { - return false - } - if a.current == nil { - a.current = currentTime - } - return a.expireAt.IsZero() || a.expireAt.Sub(a.current()) <= time.Hour -} - -func (a *Auth0) updateToken() error { - if a == nil || !a.needsFetchToken() { - return nil - } - - if a.clientID == "" || a.clientSecret == "" || a.domain == "" { - return errors.New("auth0 is not set up") - } - - a.lock.Lock() - defer a.lock.Unlock() - - if !a.needsFetchToken() { - return nil - } - - r, err := a.exec(http.MethodPost, "oauth/token", "", map[string]string{ - "client_id": a.clientID, - "client_secret": a.clientSecret, - "audience": urlFromDomain(a.domain) + "api/v2/", - "grant_type": "client_credentials", - "scope": "read:users update:users", - }) - if err != nil { - if !a.disableLogging { - log.Errorf("auth0: access token error: %+v", err) - } - return errors.New("failed to auth") - } - - if a.current == nil { - a.current = currentTime - } - - if r.Token == "" { - if !a.disableLogging { - log.Errorf("auth0: no token: %+v", r) - } - return errors.New("failed to auth") - } - a.token = r.Token - a.expireAt = a.current().Add(time.Duration(r.ExpiresIn * int64(time.Second))) - - return nil -} - -func (a *Auth0) exec(method, path, token string, b interface{}) (r response, err error) { - if a == nil || a.domain == "" { +func (a *Auth0) exec(method, path string, b any) (r response, err error) { + if a == nil || a.base == "" { err = errors.New("auth0: domain is not set") return } - if a.client == nil { - a.client = http.DefaultClient - } var body io.Reader = nil if b != nil { @@ -187,16 +124,11 @@ func (a *Auth0) exec(method, path, token string, b interface{}) (r response, err } var req *http.Request - req, err = http.NewRequest(method, urlFromDomain(a.domain)+path, body) + req, err = http.NewRequest(method, a.base+path, body) if err != nil { return } - req.Header.Set("Content-Type", "application/json") - if token != "" { - req.Header.Set("Authorization", "Bearer "+token) - } - resp, err := a.client.Do(req) if err != nil { return @@ -233,8 +165,5 @@ func urlFromDomain(path string) string { if !strings.HasPrefix(path, "http://") && !strings.HasPrefix(path, "https://") { path = "https://" + path } - if path[len(path)-1] != '/' { - path += "/" - } - return path + return strings.TrimSuffix(path, "/") + "/" } diff --git a/internal/infrastructure/auth0/authenticator_test.go b/internal/infrastructure/auth0/authenticator_test.go index 3ab188526..4bbb35680 100644 --- a/internal/infrastructure/auth0/authenticator_test.go +++ b/internal/infrastructure/auth0/authenticator_test.go @@ -1,22 +1,20 @@ package auth0 import ( - "bytes" "encoding/json" - "io" "net/http" "strings" "testing" - "time" + "github.com/jarcoal/httpmock" "github.com/reearth/reearth-backend/internal/usecase/gateway" "github.com/stretchr/testify/assert" ) const ( token = "a" - clientID = "b" - clientSecret = "c" + clientID = "clientclient" + clientSecret = "secretsecret" domain = "https://reearth-dev.auth0.com/" userID = "x" expiresIn = 24 * 60 * 60 @@ -24,31 +22,70 @@ const ( userEmail = "e" ) -var ( - current = time.Date(2020, time.April, 1, 0, 0, 0, 0, time.UTC) - current2 = time.Date(2020, time.April, 1, 23, 0, 0, 0, time.UTC) -) - -func TestURLFromDomain(t *testing.T) { - assert.Equal(t, "https://a/", urlFromDomain("a")) - assert.Equal(t, "https://a/", urlFromDomain("a/")) -} - func TestAuth0(t *testing.T) { a := New(domain, clientID, clientSecret) - a.client = client(t) // inject mock - a.current = func() time.Time { return current } a.disableLogging = true - assert.True(t, a.needsFetchToken()) - assert.NoError(t, a.updateToken()) - assert.Equal(t, token, a.token) - assert.Equal(t, current.Add(time.Second*expiresIn), a.expireAt) - assert.False(t, a.needsFetchToken()) + httpmock.Activate() + defer httpmock.Deactivate() + + httpmock.RegisterResponder("POST", domain+"oauth/token", func(req *http.Request) (*http.Response, error) { + _ = req.ParseForm() + assert.Equal(t, domain+"api/v2/", req.Form.Get("audience")) + assert.Equal(t, "client_credentials", req.Form.Get("grant_type")) + assert.Equal(t, "read:users update:users", req.Form.Get("scope")) + assert.Equal(t, clientID, req.Form.Get("client_id")) + assert.Equal(t, clientSecret, req.Form.Get("client_secret")) + return httpmock.NewJsonResponse(http.StatusOK, map[string]any{ + "access_token": token, + "token_type": "Bearer", + "scope": "read:users update:users", + "expires_in": expiresIn, + }) + }) + + httpmock.RegisterResponder("GET", domain+"api/v2/users/"+userID, func(req *http.Request) (*http.Response, error) { + if token != strings.TrimPrefix(req.Header.Get("Authorization"), "Bearer ") { + return httpmock.NewJsonResponse(http.StatusOK, map[string]any{ + "message": "Unauthorized", + }) + } + + return httpmock.NewJsonResponse(http.StatusOK, map[string]any{ + "user_id": userID, + "username": userName, + "email": userEmail, + "email_verified": true, + }) + }) - a.current = func() time.Time { return current2 } - assert.True(t, a.needsFetchToken()) - a.current = func() time.Time { return current } + httpmock.RegisterResponder("PATCH", domain+"api/v2/users/"+userID, func(req *http.Request) (*http.Response, error) { + if token != strings.TrimPrefix(req.Header.Get("Authorization"), "Bearer ") { + return httpmock.NewJsonResponse(http.StatusOK, map[string]any{ + "message": "Unauthorized", + }) + } + + var body map[string]string + _ = json.NewDecoder(req.Body).Decode(&body) + + resEmail := body["email"] + if resEmail == "" { + resEmail = userEmail + } + + resUsername := body["username"] + if resUsername == "" { + resUsername = userName + } + + return httpmock.NewJsonResponse(http.StatusOK, map[string]any{ + "user_id": userID, + "username": resUsername, + "email": resEmail, + "email_verified": true, + }) + }) newEmail := "xxxxx" r, err := a.UpdateUser(gateway.AuthenticatorUpdateUserParam{ @@ -62,111 +99,9 @@ func TestAuth0(t *testing.T) { EmailVerified: true, Name: userName, }, r) - - a.current = func() time.Time { return current2 } -} - -func res(i interface{}) io.ReadCloser { - b, _ := json.Marshal(i) - return io.NopCloser(bytes.NewBuffer(b)) } -func client(t *testing.T) *http.Client { - t.Helper() - - return &http.Client{ - Transport: RoundTripFunc(func(req *http.Request) *http.Response { - p := req.URL.Path - var body map[string]string - if req.Body != nil { - _ = json.NewDecoder(req.Body).Decode(&body) - } - - if req.Method == http.MethodPost && p == "/oauth/token" { - assert.Equal(t, domain+"api/v2/", body["audience"]) - assert.Equal(t, "client_credentials", body["grant_type"]) - assert.Equal(t, clientID, body["client_id"]) - assert.Equal(t, clientSecret, body["client_secret"]) - return &http.Response{ - StatusCode: http.StatusOK, - Body: res(map[string]interface{}{ - "access_token": token, - "scope": "read:users update:users", - "expires_in": expiresIn, - }), - Header: make(http.Header), - } - } - - if req.Method == http.MethodGet && p == "/api/v2/users/"+userID { - tok := strings.TrimPrefix(req.Header.Get("Authorization"), "Bearer ") - if token != tok { - return &http.Response{ - StatusCode: http.StatusUnauthorized, - Body: res(map[string]interface{}{ - "message": "Unauthorized", - }), - Header: make(http.Header), - } - } - - return &http.Response{ - StatusCode: http.StatusOK, - Body: res(map[string]interface{}{ - "user_id": userID, - "username": userName, - "email": userEmail, - "email_verified": true, - }), - Header: make(http.Header), - } - } - - if req.Method == http.MethodPatch && p == "/api/v2/users/"+userID { - tok := strings.TrimPrefix(req.Header.Get("Authorization"), "Bearer ") - if token != tok { - return &http.Response{ - StatusCode: http.StatusUnauthorized, - Body: res(map[string]interface{}{ - "message": "Unauthorized", - }), - Header: make(http.Header), - } - } - - username := userName - email := userEmail - if body["username"] != "" { - username = body["username"] - } - if body["email"] != "" { - email = body["email"] - } - return &http.Response{ - StatusCode: http.StatusOK, - Body: res(map[string]interface{}{ - "user_id": userID, - "username": username, - "email": email, - "email_verified": true, - }), - Header: make(http.Header), - } - } - - return &http.Response{ - StatusCode: http.StatusNotFound, - Body: res(map[string]interface{}{ - "message": "Not found", - }), - Header: make(http.Header), - } - }), - } -} - -type RoundTripFunc func(req *http.Request) *http.Response - -func (f RoundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) { - return f(req), nil +func TestURLFromDomain(t *testing.T) { + assert.Equal(t, "https://a/", urlFromDomain("a")) + assert.Equal(t, "https://a/", urlFromDomain("a/")) } diff --git a/internal/infrastructure/fs/plugin_repository.go b/internal/infrastructure/fs/plugin_repository.go deleted file mode 100644 index 04c8083ab..000000000 --- a/internal/infrastructure/fs/plugin_repository.go +++ /dev/null @@ -1,30 +0,0 @@ -package fs - -import ( - "context" - "path/filepath" - - "github.com/reearth/reearth-backend/internal/usecase/gateway" - "github.com/reearth/reearth-backend/pkg/file" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/plugin/manifest" - "github.com/spf13/afero" -) - -type pluginRepository struct { - fs afero.Fs -} - -func NewPluginRepository(fs afero.Fs) gateway.PluginRepository { - return &pluginRepository{ - fs: fs, - } -} - -func (r *pluginRepository) Data(ctx context.Context, id id.PluginID) (file.Iterator, error) { - return file.NewFsIterator(afero.NewBasePathFs(r.fs, filepath.Join(pluginDir, id.String()))) -} - -func (r *pluginRepository) Manifest(ctx context.Context, id id.PluginID) (*manifest.Manifest, error) { - return readPluginManifest(r.fs, id) -} diff --git a/internal/infrastructure/github/fetcher.go b/internal/infrastructure/github/fetcher.go deleted file mode 100644 index 01e9b9158..000000000 --- a/internal/infrastructure/github/fetcher.go +++ /dev/null @@ -1,26 +0,0 @@ -package github - -import ( - "context" - "fmt" - "io" - "net/http" -) - -func fetchURL(ctx context.Context, url string) (io.ReadCloser, error) { - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return nil, err - } - - res, err := http.DefaultClient.Do(req) - if err != nil { - return nil, err - } - - if res.StatusCode != http.StatusOK { - return nil, fmt.Errorf("StatusCode=%d", res.StatusCode) - } - - return res.Body, nil -} diff --git a/internal/infrastructure/github/fetcher_test.go b/internal/infrastructure/github/fetcher_test.go deleted file mode 100644 index ddee96c34..000000000 --- a/internal/infrastructure/github/fetcher_test.go +++ /dev/null @@ -1,69 +0,0 @@ -package github - -import ( - "context" - "net/http" - "net/http/httptest" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestFetchURL(t *testing.T) { - server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - _, err := rw.Write([]byte(`OK`)) - assert.NoError(t, err) - })) - server2 := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - rw.WriteHeader(http.StatusBadRequest) - })) - - defer func() { - server.Close() - server2.Close() - }() - - tests := []struct { - Name, URL string - Ctx context.Context - ExpectedErr string - }{ - { - Name: "Fail: nil context", - Ctx: nil, - URL: server.URL, - ExpectedErr: "net/http: nil Context", - }, - { - Name: "Fail: nil unsupported protocol scheme", - Ctx: context.Background(), - URL: "", - ExpectedErr: "Get \"\": unsupported protocol scheme \"\"", - }, - { - Name: "Fail: bad request ", - Ctx: context.Background(), - URL: server2.URL, - ExpectedErr: "StatusCode=400", - }, - { - Name: "Success", - Ctx: context.Background(), - URL: server.URL, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.Name, func(t *testing.T) { - body, err := fetchURL(tc.Ctx, tc.URL) - if tc.ExpectedErr != "" { - assert.EqualError(t, err, tc.ExpectedErr) - } else { - _ = body.Close() - assert.NotNil(t, body) - } - }) - } - -} diff --git a/internal/infrastructure/github/plugin_registry.go b/internal/infrastructure/github/plugin_registry.go deleted file mode 100644 index 417783f35..000000000 --- a/internal/infrastructure/github/plugin_registry.go +++ /dev/null @@ -1,35 +0,0 @@ -package github - -import ( - "context" - "encoding/json" - - "github.com/reearth/reearth-backend/internal/usecase/gateway" - "github.com/reearth/reearth-backend/pkg/log" - "github.com/reearth/reearth-backend/pkg/plugin" -) - -type pluginRegistry struct{} - -func NewPluginRegistry() gateway.PluginRegistry { - return &pluginRegistry{} -} - -const source = `https://raw.githubusercontent.com/reearth/plugins/main/plugins.json` - -func (d *pluginRegistry) FetchMetadata(ctx context.Context) ([]*plugin.Metadata, error) { - response, err := fetchURL(ctx, source) - if err != nil { - return nil, err - } - - defer func() { err = response.Close() }() - - var result []*plugin.Metadata - err = json.NewDecoder(response).Decode(&result) - if err != nil { - log.Errorf("plugin_registry: error: %s", err) - return nil, gateway.ErrFailedToFetchDataFromPluginRegistry - } - return result, nil -} diff --git a/internal/infrastructure/github/plugin_registry_test.go b/internal/infrastructure/github/plugin_registry_test.go deleted file mode 100644 index 8f763632b..000000000 --- a/internal/infrastructure/github/plugin_registry_test.go +++ /dev/null @@ -1,60 +0,0 @@ -package github - -import ( - "context" - "testing" - "time" - - "github.com/jarcoal/httpmock" - "github.com/reearth/reearth-backend/internal/usecase/gateway" - "github.com/reearth/reearth-backend/pkg/plugin" - "github.com/stretchr/testify/assert" -) - -func TestNewPluginRegistry(t *testing.T) { - d := NewPluginRegistry() - assert.NotNil(t, d) -} - -func TestPluginRegistry_FetchMetadata(t *testing.T) { - httpmock.Activate() - defer httpmock.DeactivateAndReset() - - httpmock.RegisterResponder( - "GET", - "https://raw.githubusercontent.com/reearth/plugins/main/plugins.json", - httpmock.NewStringResponder( - 200, - `[{"name": "reearth","description": "Official Plugin", "author": "reearth", "thumbnailUrl": "", "createdAt": "2021-03-16T04:19:57.592Z"}]`, - ), - ) - - d := NewPluginRegistry() - res, err := d.FetchMetadata(context.Background()) - tm, _ := time.Parse(time.RFC3339, "2021-03-16T04:19:57.592Z") - - assert.Equal(t, res, []*plugin.Metadata{ - { - Name: "reearth", - Description: "Official Plugin", - Author: "reearth", - ThumbnailUrl: "", - CreatedAt: tm, - }, - }) - assert.NoError(t, err) - - // fail: bad request - httpmock.RegisterResponder("GET", "https://raw.githubusercontent.com/reearth/plugins/main/plugins.json", - httpmock.NewStringResponder(400, `mock bad request`)) - _, err = d.FetchMetadata(context.Background()) - - assert.EqualError(t, err, "StatusCode=400") - - // fail: unable to marshal - httpmock.RegisterResponder("GET", "https://raw.githubusercontent.com/reearth/plugins/main/plugins.json", - httpmock.NewStringResponder(200, `{"hoge": "test"}`)) - _, err = d.FetchMetadata(context.Background()) - - assert.Equal(t, gateway.ErrFailedToFetchDataFromPluginRegistry, err) -} diff --git a/internal/infrastructure/marketplace/marketplace.go b/internal/infrastructure/marketplace/marketplace.go new file mode 100644 index 000000000..0dac3946d --- /dev/null +++ b/internal/infrastructure/marketplace/marketplace.go @@ -0,0 +1,112 @@ +package marketplace + +import ( + "context" + "fmt" + "net/http" + "strings" + + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin/pluginpack" + "github.com/reearth/reearth-backend/pkg/rerror" + "golang.org/x/oauth2/clientcredentials" +) + +var pluginPackageSizeLimit int64 = 10 * 1024 * 1024 // 10MB + +type Marketplace struct { + endpoint string + conf clientcredentials.Config + client *http.Client +} + +func New(endpoint string, conf clientcredentials.Config) *Marketplace { + return &Marketplace{ + endpoint: strings.TrimSuffix(endpoint, "/"), + client: conf.Client(context.Background()), + } +} + +func (m *Marketplace) FetchPluginPackage(ctx context.Context, pid id.PluginID) (*pluginpack.Package, error) { + purl, err := m.getPluginURL(ctx, pid) + if err != nil { + return nil, err + } + return m.downloadPluginPackage(ctx, purl) +} + +func (m *Marketplace) getPluginURL(_ context.Context, pid id.PluginID) (string, error) { + return fmt.Sprintf("%s/api/plugins/%s/%s.zip", m.endpoint, pid.Name(), pid.Version().String()), nil +} + +/* +func (m *Marketplace) getPluginURL(ctx context.Context, pid id.PluginID) (string, error) { + body := strings.NewReader(fmt.Sprintf( + `{"query":"query { node(id:"%s" type:PLUGIN) { ...Plugin { url } } }"}`, + pid.Name(), + )) + req, err := http.NewRequestWithContext(ctx, "POST", m.endpoint+"/graphql", body) + if err != nil { + return "", rerror.ErrInternalBy(err) + } + req.Header.Set("Content-Type", "application/json") + + res, err := m.client.Do(req) + if err != nil { + return "", rerror.ErrInternalBy(err) + } + if res.StatusCode != http.StatusOK { + return "", rerror.ErrNotFound + } + defer func() { + _ = res.Body.Close() + }() + var pluginRes response + if err := json.NewDecoder(res.Body).Decode(&pluginRes); err != nil { + return "", rerror.ErrInternalBy(err) + } + if pluginRes.Errors != nil { + return "", rerror.ErrInternalBy(fmt.Errorf("gql returns errors: %v", pluginRes.Errors)) + } + + purl := pluginRes.PluginURL() + if purl == "" { + return "", rerror.ErrNotFound + } + return purl, nil +} + +type response struct { + Data pluginNodeQueryData `json:"data"` + Errors any `json:"errors"` +} + +func (r response) PluginURL() string { + return r.Data.Node.URL +} + +type pluginNodeQueryData struct { + Node plugin +} + +type plugin struct { + URL string `json:"url"` +} +*/ + +func (m *Marketplace) downloadPluginPackage(ctx context.Context, url string) (*pluginpack.Package, error) { + res, err := m.client.Get(url) + if err != nil { + return nil, rerror.ErrInternalBy(err) + } + defer func() { + _ = res.Body.Close() + }() + if res.StatusCode == http.StatusNotFound { + return nil, rerror.ErrNotFound + } + if res.StatusCode != http.StatusOK { + return nil, rerror.ErrInternalBy(fmt.Errorf("status code is %s", res.Status)) + } + return pluginpack.PackageFromZip(res.Body, nil, pluginPackageSizeLimit) +} diff --git a/internal/infrastructure/marketplace/marketplace_test.go b/internal/infrastructure/marketplace/marketplace_test.go new file mode 100644 index 000000000..1876c9b4b --- /dev/null +++ b/internal/infrastructure/marketplace/marketplace_test.go @@ -0,0 +1,108 @@ +package marketplace + +import ( + "context" + "io" + "net/http" + "net/url" + "os" + "testing" + + "github.com/jarcoal/httpmock" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/stretchr/testify/assert" + "golang.org/x/oauth2/clientcredentials" +) + +func TestMarketplace_FetchPluginPackage(t *testing.T) { + ac := "xxxxx" + pid := id.MustPluginID("testplugin~1.0.1") + + f, err := os.Open("testdata/test.zip") + assert.NoError(t, err) + defer func() { + _ = f.Close() + }() + z, err := io.ReadAll(f) + assert.NoError(t, err) + + httpmock.Activate() + defer httpmock.Deactivate() + + httpmock.RegisterResponder( + "POST", "https://marketplace.example.com/oauth/token", + func(req *http.Request) (*http.Response, error) { + _ = req.ParseForm() + if req.Form.Get("grant_type") != "client_credentials" { + return httpmock.NewStringResponse(http.StatusUnauthorized, ""), nil + } + if req.Form.Get("audience") != "d" { + return httpmock.NewStringResponse(http.StatusUnauthorized, ""), nil + } + if req.Form.Get("client_id") != "x" { + return httpmock.NewStringResponse(http.StatusUnauthorized, ""), nil + } + if req.Form.Get("client_secret") != "y" { + return httpmock.NewStringResponse(http.StatusUnauthorized, ""), nil + } + + resp, err := httpmock.NewJsonResponse(200, map[string]any{ + "access_token": ac, + "token_type": "Bearer", + "expires_in": 86400, + }) + if err != nil { + return httpmock.NewStringResponse(http.StatusInternalServerError, ""), nil + } + return resp, nil + }, + ) + + /* + httpmock.RegisterResponder( + "POST", "https://marketplace.example.com/graphql", + func(req *http.Request) (*http.Response, error) { + if req.Header.Get("Authorization") != "Bearer "+ac { + return httpmock.NewStringResponse(http.StatusUnauthorized, ""), nil + } + if req.Header.Get("Content-Type") != "application/json" { + return httpmock.NewStringResponse(http.StatusBadRequest, ""), nil + } + resp, err := httpmock.NewJsonResponse(200, map[string]any{ + "data": map[string]any{ + "node": map[string]string{ + "url": "https://marketplace.example.com/aaa.zip", + }, + }, + }) + if err != nil { + return httpmock.NewStringResponse(http.StatusInternalServerError, ""), nil + } + return resp, nil + }, + ) + */ + + httpmock.RegisterResponder( + "GET", "https://marketplace.example.com/api/plugins/testplugin/1.0.1.zip", + func(req *http.Request) (*http.Response, error) { + if req.Header.Get("Authorization") != "Bearer "+ac { + return httpmock.NewStringResponse(http.StatusUnauthorized, ""), nil + } + return httpmock.NewBytesResponse(http.StatusOK, z), nil + }, + ) + + m := New("https://marketplace.example.com/", clientcredentials.Config{ + ClientID: "x", + ClientSecret: "y", + TokenURL: "https://marketplace.example.com/oauth/token", + EndpointParams: url.Values{ + "audience": []string{"d"}, + }, + }) + got, err := m.FetchPluginPackage(context.Background(), pid) + assert.NoError(t, err) + // no need to test pluginpack in detail here + assert.Equal(t, id.MustPluginID("testplugin~1.0.1"), got.Manifest.Plugin.ID()) +} diff --git a/internal/infrastructure/marketplace/testdata/test.zip b/internal/infrastructure/marketplace/testdata/test.zip new file mode 100644 index 0000000000000000000000000000000000000000..0d371acbe428c4d8d123d08cbb4d70400e987e89 GIT binary patch literal 1804 zcmWIWW@h1H0D;&!o*`fclwf6$VJJy0F3}GS;bdUGG=n+m3J{l8a5FHnd}U-{U=aZ- z3;=2e(Hsoxy={YUmg^Z;f}|uE82Dh?^<0AEgG=&@Qo-hAZ4OSnG!?^~w=>TBH5*7A z*l!flVOV5Pcsy>;+XE%DDhw1Y`0x1`RLw|hc97u=GM>?`)OP)X`6tdA+3O!O1GYCW z*M0X}_3V0G-KX43>KPc)`2VUkFfvF@2`ib=)%H@{>!{DY!_undaVmQXf9C$$+rjF* z&ssEyL!oiYuYY@XYxo3Ru1WXujg5@GToQZP{7d$7eK)^X%MKqYW0#GzD!#n0{b#Y_ zqV;=T$F>|?k)0#kZBcsILc+(Yz0M%7s8sUUK8c%262Co{7bO}6TwjrXb6=)e#pREn7@qV}`d1PWoe!{1NLM)Ct z+7_%?uRquFG%1J)SlSnCY1Q*G-m`AORWEtwvlqp7X`J0Kw<+wR)Gdc%kksQPM?ZWx zSdh}DE|Rvu^47256D%7|u6Rin{k5N2`%j(Yk^JE++b(ZQ{AMq>^V+AvSG8pg@1+^n z1aDj$)Af34Ro}J?u9>$u1dii@>7P)Wo8ajQFfX zy~^Aia4h}+#vllzrG{z+rM$%4R3$3~rN_-}kDGQrZti&8JpFO=%EwI`9ycv{+`L&y zp_U7jXdre*6;2EWS_KDOFgpf~A@pjnPUEC4eP8V>M4z~QphVF9On0yhN(XnE@DX`ayZ zJ%1+nQec3Vx31Q?Gv}E-C03$r^j zFD11?FRK{tkGq)uNY2kI&d*8J%gImIP|8Tn$;nqJ&o9bJQPQ+V3i51VHeZ@6ogfV~ z9)!hUR=}dUDnr-V5AMmAz&sDaXu*#Z)6ceczgR!@S^uObySG7Nmyt=18CRJCEE&K+ zfZ+fraA743E2M-$vn9YAWE8IQ1!fcj!;(gOm{CZj4bV(bX@k{FTuB~cCa?$tn#sTf zbsj8aU>N{tFqXs*aV)NM4Rfpj!&}D^n8DCg4m1yI3P*$^C|#hYab$f->yCBV=NVgd00kS+UJ literal 0 HcmV?d00001 diff --git a/internal/infrastructure/memory/layer.go b/internal/infrastructure/memory/layer.go index c15939edb..00ada3ad6 100644 --- a/internal/infrastructure/memory/layer.go +++ b/internal/infrastructure/memory/layer.go @@ -22,6 +22,14 @@ func NewLayer() repo.Layer { } } +func NewLayerWith(items ...layer.Layer) repo.Layer { + r := NewLayer() + for _, i := range items { + _ = r.Save(nil, i) + } + return r +} + func (r *Layer) Filtered(f repo.SceneFilter) repo.Layer { return &Layer{ // note data is shared between the source repo and mutex cannot work well diff --git a/internal/infrastructure/memory/plugin.go b/internal/infrastructure/memory/plugin.go index 68b8a8ad3..4e3db19df 100644 --- a/internal/infrastructure/memory/plugin.go +++ b/internal/infrastructure/memory/plugin.go @@ -24,6 +24,14 @@ func NewPlugin() repo.Plugin { } } +func NewPluginWith(items ...*plugin.Plugin) repo.Plugin { + r := NewPlugin() + for _, i := range items { + _ = r.Save(nil, i) + } + return r +} + func (r *Plugin) Filtered(f repo.SceneFilter) repo.Plugin { return &Plugin{ // note data is shared between the source repo and mutex cannot work well diff --git a/internal/infrastructure/memory/property.go b/internal/infrastructure/memory/property.go index 2d8c74571..2e63513d4 100644 --- a/internal/infrastructure/memory/property.go +++ b/internal/infrastructure/memory/property.go @@ -23,6 +23,14 @@ func NewProperty() repo.Property { } } +func NewPropertyWith(items ...*property.Property) repo.Property { + r := NewProperty() + for _, i := range items { + _ = r.Save(nil, i) + } + return r +} + func (r *Property) Filtered(f repo.SceneFilter) repo.Property { return &Property{ // note data is shared between the source repo and mutex cannot work well diff --git a/internal/infrastructure/memory/property_schema.go b/internal/infrastructure/memory/property_schema.go index e67b39630..f0e22b183 100644 --- a/internal/infrastructure/memory/property_schema.go +++ b/internal/infrastructure/memory/property_schema.go @@ -22,6 +22,14 @@ func NewPropertySchema() repo.PropertySchema { return &PropertySchema{} } +func NewPropertySchemaWith(items ...*property.Schema) repo.PropertySchema { + r := NewPropertySchema() + for _, i := range items { + _ = r.Save(nil, i) + } + return r +} + func (r *PropertySchema) initMap() { if r.data == nil { r.data = map[string]*property.Schema{} diff --git a/internal/infrastructure/memory/scene.go b/internal/infrastructure/memory/scene.go index 826f041bc..4f29b502d 100644 --- a/internal/infrastructure/memory/scene.go +++ b/internal/infrastructure/memory/scene.go @@ -24,6 +24,14 @@ func NewScene() repo.Scene { } } +func NewSceneWith(items ...*scene.Scene) repo.Scene { + r := NewScene() + for _, i := range items { + _ = r.Save(nil, i) + } + return r +} + func (r *Scene) Filtered(f repo.TeamFilter) repo.Scene { return &Scene{ // note data is shared between the source repo and mutex cannot work well diff --git a/internal/usecase/gateway/container.go b/internal/usecase/gateway/container.go index 149e422b5..1ed7743fc 100644 --- a/internal/usecase/gateway/container.go +++ b/internal/usecase/gateway/container.go @@ -1,11 +1,10 @@ package gateway type Container struct { - Authenticator Authenticator - Mailer Mailer - PluginRepository PluginRepository - DataSource DataSource - PluginRegistry PluginRegistry - File File - Google Google + Authenticator Authenticator + Mailer Mailer + DataSource DataSource + PluginRegistry PluginRegistry + File File + Google Google } diff --git a/internal/usecase/gateway/plugin_registry.go b/internal/usecase/gateway/plugin_registry.go index a4e81e834..dc349a247 100644 --- a/internal/usecase/gateway/plugin_registry.go +++ b/internal/usecase/gateway/plugin_registry.go @@ -4,11 +4,12 @@ import ( "context" "errors" - "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin/pluginpack" ) var ErrFailedToFetchDataFromPluginRegistry = errors.New("failed to fetch data from the plugin registry") type PluginRegistry interface { - FetchMetadata(ctx context.Context) ([]*plugin.Metadata, error) + FetchPluginPackage(context.Context, id.PluginID) (*pluginpack.Package, error) } diff --git a/internal/usecase/gateway/plugin_repository.go b/internal/usecase/gateway/plugin_repository.go deleted file mode 100644 index afa1fd0eb..000000000 --- a/internal/usecase/gateway/plugin_repository.go +++ /dev/null @@ -1,19 +0,0 @@ -package gateway - -import ( - "context" - "errors" - - "github.com/reearth/reearth-backend/pkg/file" - "github.com/reearth/reearth-backend/pkg/id" - "github.com/reearth/reearth-backend/pkg/plugin/manifest" -) - -var ( - ErrFailedToFetchPluiginRepositoryData error = errors.New("failed to fetch repository data") -) - -type PluginRepository interface { - Manifest(context.Context, id.PluginID) (*manifest.Manifest, error) - Data(context.Context, id.PluginID) (file.Iterator, error) -} diff --git a/internal/usecase/interactor/layer.go b/internal/usecase/interactor/layer.go index 026584a8e..1424a1440 100644 --- a/internal/usecase/interactor/layer.go +++ b/internal/usecase/interactor/layer.go @@ -184,7 +184,11 @@ func (i *Layer) AddItem(ctx context.Context, inp interfaces.AddLayerItemInput, o return nil, nil, interfaces.ErrCannotAddLayerToLinkedLayerGroup } - plugin, extension, err := i.getPlugin(ctx, parentLayer.Scene(), inp.PluginID, inp.ExtensionID) + var pid *id.PluginID + if inp.ExtensionID != nil { + pid = &id.OfficialPluginID + } + plugin, extension, err := i.getPlugin(ctx, parentLayer.Scene(), pid, inp.ExtensionID) if err != nil { return nil, nil, err } @@ -264,7 +268,11 @@ func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, var extensionSchemaID id.PropertySchemaID var propertySchema *property.Schema - plug, extension, err := i.getPlugin(ctx, parentLayer.Scene(), inp.PluginID, inp.ExtensionID) + var pid *id.PluginID + if inp.ExtensionID != nil { + pid = &id.OfficialPluginID + } + plug, extension, err := i.getPlugin(ctx, parentLayer.Scene(), pid, inp.ExtensionID) if err != nil { return nil, nil, err } @@ -301,10 +309,8 @@ func (i *Layer) AddGroup(ctx context.Context, inp interfaces.AddLayerGroupInput, } else { builder = builder.Name(inp.Name) } - if inp.PluginID != nil { - builder = builder.Plugin(inp.PluginID) - } - if inp.PluginID != nil && inp.ExtensionID != nil { + if inp.ExtensionID != nil { + builder = builder.Plugin(&id.OfficialPluginID) propertySchema, err = i.propertySchemaRepo.FindByID(ctx, extensionSchemaID) if err != nil { return nil, nil, err diff --git a/internal/usecase/interactor/plugin.go b/internal/usecase/interactor/plugin.go index 86fa504f1..d3c02bba3 100644 --- a/internal/usecase/interactor/plugin.go +++ b/internal/usecase/interactor/plugin.go @@ -19,7 +19,6 @@ type Plugin struct { propertyRepo repo.Property layerRepo repo.Layer file gateway.File - pluginRepository gateway.PluginRepository transaction repo.Transaction pluginRegistry gateway.PluginRegistry } @@ -32,19 +31,20 @@ func NewPlugin(r *repo.Container, gr *gateway.Container) interfaces.Plugin { propertySchemaRepo: r.PropertySchema, propertyRepo: r.Property, transaction: r.Transaction, - pluginRepository: gr.PluginRepository, file: gr.File, pluginRegistry: gr.PluginRegistry, } } -func (i *Plugin) Fetch(ctx context.Context, ids []id.PluginID, operator *usecase.Operator) ([]*plugin.Plugin, error) { - return i.pluginRepo.FindByIDs(ctx, ids) +func (i *Plugin) pluginCommon() *pluginCommon { + return &pluginCommon{ + pluginRepo: i.pluginRepo, + propertySchemaRepo: i.propertySchemaRepo, + file: i.file, + pluginRegistry: i.pluginRegistry, + } } -func (i *Plugin) FetchPluginMetadata(ctx context.Context, operator *usecase.Operator) ([]*plugin.Metadata, error) { - if err := i.OnlyOperator(operator); err != nil { - return nil, err - } - return i.pluginRegistry.FetchMetadata(ctx) +func (i *Plugin) Fetch(ctx context.Context, ids []id.PluginID, operator *usecase.Operator) ([]*plugin.Plugin, error) { + return i.pluginRepo.FindByIDs(ctx, ids) } diff --git a/internal/usecase/interactor/plugin_common.go b/internal/usecase/interactor/plugin_common.go new file mode 100644 index 000000000..8613222e6 --- /dev/null +++ b/internal/usecase/interactor/plugin_common.go @@ -0,0 +1,76 @@ +package interactor + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase/gateway" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/plugin/pluginpack" + "github.com/reearth/reearth-backend/pkg/rerror" +) + +type pluginCommon struct { + pluginRepo repo.Plugin + propertySchemaRepo repo.PropertySchema + file gateway.File + pluginRegistry gateway.PluginRegistry +} + +func (i *pluginCommon) SavePluginPack(ctx context.Context, p *pluginpack.Package) error { + for { + f, err := p.Files.Next() + if err != nil { + return interfaces.ErrInvalidPluginPackage + } + if f == nil { + break + } + if err := i.file.UploadPluginFile(ctx, p.Manifest.Plugin.ID(), f); err != nil { + return rerror.ErrInternalBy(err) + } + } + + // save plugin and property schemas + if ps := p.Manifest.PropertySchemas(); len(ps) > 0 { + if err := i.propertySchemaRepo.SaveAll(ctx, ps); err != nil { + return err + } + } + + if err := i.pluginRepo.Save(ctx, p.Manifest.Plugin); err != nil { + return err + } + + return nil +} + +func (i *pluginCommon) GetOrDownloadPlugin(ctx context.Context, pid id.PluginID) (*plugin.Plugin, error) { + if pid.IsNil() || pid.Equal(id.OfficialPluginID) { + return nil, rerror.ErrNotFound + } + + if plugin, err := i.pluginRepo.FindByID(ctx, pid); err != nil && !errors.Is(err, rerror.ErrNotFound) { + return nil, err + } else if plugin != nil { + return plugin, nil + } + + if !pid.Scene().IsNil() || i.pluginRegistry == nil { + return nil, rerror.ErrNotFound + } + + pack, err := i.pluginRegistry.FetchPluginPackage(ctx, pid) + if err != nil { + return nil, err + } + + if err := i.SavePluginPack(ctx, pack); err != nil { + return nil, err + } + + return pack.Manifest.Plugin, nil +} diff --git a/internal/usecase/interactor/plugin_upload.go b/internal/usecase/interactor/plugin_upload.go index c5a574ef5..50f947137 100644 --- a/internal/usecase/interactor/plugin_upload.go +++ b/internal/usecase/interactor/plugin_upload.go @@ -56,7 +56,10 @@ func (i *Plugin) UploadFromRemote(ctx context.Context, u *url.URL, sid id.SceneI if err != nil { return nil, nil, interfaces.ErrInvalidPluginPackage } - if res.StatusCode != 200 { + defer func() { + _ = res.Body.Close() + }() + if res.StatusCode != http.StatusOK { return nil, nil, interfaces.ErrInvalidPluginPackage } @@ -66,7 +69,6 @@ func (i *Plugin) UploadFromRemote(ctx context.Context, u *url.URL, sid id.SceneI return nil, nil, interfaces.ErrInvalidPluginPackage } - _ = res.Body.Close() return i.upload(ctx, p, sid, operator) } @@ -117,25 +119,8 @@ func (i *Plugin) upload(ctx context.Context, p *pluginpack.Package, sid id.Scene } } - // uploads files - for { - f, err := p.Files.Next() - if err != nil { - return nil, nil, interfaces.ErrInvalidPluginPackage - } - if f == nil { - break - } - if err := i.file.UploadPluginFile(ctx, p.Manifest.Plugin.ID(), f); err != nil { - return nil, nil, rerror.ErrInternalBy(err) - } - } - - // save plugin and property schemas - if ps := p.Manifest.PropertySchemas(); len(ps) > 0 { - if err := i.propertySchemaRepo.SaveAll(ctx, ps); err != nil { - return nil, nil, err - } + if err := i.pluginCommon().SavePluginPack(ctx, p); err != nil { + return nil, nil, err } if err := i.pluginRepo.Save(ctx, p.Manifest.Plugin); err != nil { diff --git a/internal/usecase/interactor/published.go b/internal/usecase/interactor/published.go index 515b7824b..1ed460ca0 100644 --- a/internal/usecase/interactor/published.go +++ b/internal/usecase/interactor/published.go @@ -24,7 +24,7 @@ import ( type Published struct { project repo.Project file gateway.File - indexHTML *cache.Cache + indexHTML *cache.Cache[string] indexHTMLStr string } @@ -40,23 +40,23 @@ func NewPublishedWithURL(project repo.Project, file gateway.File, indexHTMLURL * return &Published{ project: project, file: file, - indexHTML: cache.New(func(c context.Context, i interface{}) (interface{}, error) { + indexHTML: cache.New(func(c context.Context, i string) (string, error) { req, err := http.NewRequestWithContext(c, http.MethodGet, indexHTMLURL.String(), nil) if err != nil { - return nil, err + return "", err } res, err := http.DefaultClient.Do(req) if err != nil { log.Errorf("published index: conn err: %s", err) - return nil, errors.New("failed to fetch HTML") - } - if res.StatusCode >= 300 { - log.Errorf("published index: status err: %d", res.StatusCode) - return nil, errors.New("failed to fetch HTML") + return "", errors.New("failed to fetch HTML") } defer func() { _ = res.Body.Close() }() + if res.StatusCode >= 300 { + log.Errorf("published index: status err: %d", res.StatusCode) + return "", errors.New("failed to fetch HTML") + } str, err := io.ReadAll(res.Body) if err != nil { log.Errorf("published index: read err: %s", err) @@ -99,7 +99,7 @@ func (i *Published) Index(ctx context.Context, name string, u *url.URL) (string, if err != nil { return "", err } - html = htmli.(string) + html = htmli } return renderIndex(html, u.String(), md), nil } diff --git a/internal/usecase/interactor/scene.go b/internal/usecase/interactor/scene.go index f45c5eed4..be34ce58e 100644 --- a/internal/usecase/interactor/scene.go +++ b/internal/usecase/interactor/scene.go @@ -11,20 +11,16 @@ import ( "github.com/reearth/reearth-backend/pkg/builtin" "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" - "github.com/reearth/reearth-backend/pkg/layer/layerops" "github.com/reearth/reearth-backend/pkg/plugin" "github.com/reearth/reearth-backend/pkg/property" "github.com/reearth/reearth-backend/pkg/rerror" "github.com/reearth/reearth-backend/pkg/scene" - "github.com/reearth/reearth-backend/pkg/scene/sceneops" "github.com/reearth/reearth-backend/pkg/visualizer" ) type Scene struct { common - commonSceneLock sceneRepo repo.Scene - sceneLockRepo repo.SceneLock propertyRepo repo.Property propertySchemaRepo repo.PropertySchema projectRepo repo.Project @@ -33,13 +29,12 @@ type Scene struct { datasetRepo repo.Dataset transaction repo.Transaction file gateway.File + pluginRegistry gateway.PluginRegistry } func NewScene(r *repo.Container, g *gateway.Container) interfaces.Scene { return &Scene{ - commonSceneLock: commonSceneLock{sceneLockRepo: r.SceneLock}, sceneRepo: r.Scene, - sceneLockRepo: r.SceneLock, propertyRepo: r.Property, propertySchemaRepo: r.PropertySchema, projectRepo: r.Project, @@ -48,6 +43,16 @@ func NewScene(r *repo.Container, g *gateway.Container) interfaces.Scene { datasetRepo: r.Dataset, transaction: r.Transaction, file: g.File, + pluginRegistry: g.PluginRegistry, + } +} + +func (i *Scene) pluginCommon() *pluginCommon { + return &pluginCommon{ + pluginRepo: i.pluginRepo, + propertySchemaRepo: i.propertySchemaRepo, + file: i.file, + pluginRegistry: i.pluginRegistry, } } @@ -136,13 +141,6 @@ func (i *Scene) Create(ctx context.Context, pid id.ProjectID, operator *usecase. return res, err } -func (s *Scene) FetchLock(ctx context.Context, ids []id.SceneID, operator *usecase.Operator) ([]scene.LockMode, error) { - if err := s.OnlyOperator(operator); err != nil { - return nil, err - } - return s.sceneLockRepo.GetAllLock(ctx, ids) -} - func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, eid id.PluginExtensionID, operator *usecase.Operator) (_ *scene.Scene, widget *scene.Widget, err error) { tx, err := i.transaction.Begin() if err != nil { @@ -162,15 +160,19 @@ func (i *Scene) AddWidget(ctx context.Context, sid id.SceneID, pid id.PluginID, return nil, nil, err } - // check scene lock - if err := i.CheckSceneLock(ctx, sid); err != nil { + pr, err := i.pluginRepo.FindByID(ctx, pid) + if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return nil, nil, interfaces.ErrPluginNotFound + } return nil, nil, err } - _, extension, err := i.getPlugin(ctx, sid, pid, eid) - if err != nil { - return nil, nil, err + extension := pr.Extension(eid) + if extension == nil { + return nil, nil, interfaces.ErrExtensionNotFound } + if extension.Type() != plugin.ExtensionTypeWidget { return nil, nil, interfaces.ErrExtensionTypeMustBeWidget } @@ -254,21 +256,25 @@ func (i *Scene) UpdateWidget(ctx context.Context, param interfaces.UpdateWidgetP return nil, nil, err } - // check scene lock - if err := i.CheckSceneLock(ctx, param.SceneID); err != nil { - return nil, nil, err - } - widget := scene.Widgets().Widget(param.WidgetID) if widget == nil { return nil, nil, rerror.ErrNotFound } _, location := scene.Widgets().Alignment().Find(param.WidgetID) - _, extension, err := i.getPlugin(ctx, scene.ID(), widget.Plugin(), widget.Extension()) + pr, err := i.pluginRepo.FindByID(ctx, widget.Plugin()) if err != nil { + if errors.Is(err, rerror.ErrNotFound) { + return nil, nil, interfaces.ErrPluginNotFound + } return nil, nil, err } + + extension := pr.Extension(widget.Extension()) + if extension == nil { + return nil, nil, interfaces.ErrExtensionNotFound + } + if extension.Type() != plugin.ExtensionTypeWidget { return nil, nil, interfaces.ErrExtensionTypeMustBeWidget } @@ -332,11 +338,6 @@ func (i *Scene) UpdateWidgetAlignSystem(ctx context.Context, param interfaces.Up return nil, err } - // check scene lock - if err := i.CheckSceneLock(ctx, param.SceneID); err != nil { - return nil, err - } - area := scene.Widgets().Alignment().Area(param.Location) if area == nil { @@ -374,11 +375,6 @@ func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, wid id.WidgetID return nil, err } - // check scene lock - if err := i.CheckSceneLock(ctx, id); err != nil { - return nil, err - } - ws := scene.Widgets() widget := ws.Widget(wid) @@ -403,248 +399,6 @@ func (i *Scene) RemoveWidget(ctx context.Context, id id.SceneID, wid id.WidgetID return scene, nil } -func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.PluginID, operator *usecase.Operator) (_ *scene.Scene, _ id.PluginID, _ *id.PropertyID, err error) { - tx, err := i.transaction.Begin() - if err != nil { - return - } - defer func() { - if err2 := tx.End(ctx); err == nil && err2 != nil { - err = err2 - } - }() - - s, err2 := i.sceneRepo.FindByID(ctx, sid) - if err2 != nil { - return nil, pid, nil, err2 - } - if err := i.CanWriteTeam(s.Team(), operator); err != nil { - return nil, pid, nil, err - } - - // check scene lock - if err2 := i.CheckSceneLock(ctx, sid); err2 != nil { - return nil, pid, nil, err2 - } - - if s.Plugins().HasPlugin(pid) { - return nil, pid, nil, interfaces.ErrPluginAlreadyInstalled - } - - plugin, err := i.pluginRepo.FindByID(ctx, pid) - if err != nil { - if errors.Is(err2, rerror.ErrNotFound) { - return nil, pid, nil, interfaces.ErrPluginNotFound - } - return nil, pid, nil, err - } - if psid := plugin.ID().Scene(); psid != nil && *psid != sid { - return nil, pid, nil, interfaces.ErrPluginNotFound - } - - var p *property.Property - var propertyID *id.PropertyID - schema := plugin.Schema() - if schema != nil { - pr, err := property.New().NewID().Schema(*schema).Scene(sid).Build() - if err != nil { - return nil, pid, nil, err - } - prid := pr.ID() - p = pr - propertyID = &prid - } - - s.Plugins().Add(scene.NewPlugin(pid, propertyID)) - - if p != nil { - if err := i.propertyRepo.Save(ctx, p); err != nil { - return nil, pid, nil, err2 - } - } - - if err := i.sceneRepo.Save(ctx, s); err != nil { - return nil, pid, nil, err2 - } - - tx.Commit() - return s, pid, propertyID, nil -} - -func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.PluginID, operator *usecase.Operator) (_ *scene.Scene, err error) { - if pid.System() { - return nil, rerror.ErrNotFound - } - - tx, err := i.transaction.Begin() - if err != nil { - return - } - defer func() { - if err2 := tx.End(ctx); err == nil && err2 != nil { - err = err2 - } - }() - - scene, err := i.sceneRepo.FindByID(ctx, sid) - if err != nil { - return nil, err - } - if err := i.CanWriteTeam(scene.Team(), operator); err != nil { - return nil, err - } - - pl, err := i.pluginRepo.FindByID(ctx, pid) - if err != nil { - return nil, err - } - - // check scene lock - if err := i.CheckSceneLock(ctx, sid); err != nil { - return nil, err - } - - ps := scene.Plugins() - if !ps.Has(pid) { - return nil, interfaces.ErrPluginNotInstalled - } - - removedProperties := []id.PropertyID{} - - // remove plugin - if p := ps.Property(pid); p != nil { - removedProperties = append(removedProperties, *p) - } - ps.Remove(pid) - - // remove widgets - removedProperties = append(removedProperties, scene.Widgets().RemoveAllByPlugin(pid, nil)...) - - // remove layers and blocks - res, err := layerops.Processor{ - LayerLoader: repo.LayerLoaderFrom(i.layerRepo), - RootLayerID: scene.RootLayer(), - }.UninstallPlugin(ctx, pid) - if err != nil { - return nil, err - } - - removedProperties = append(removedProperties, res.RemovedProperties...) - - // save - if len(res.ModifiedLayers) > 0 { - if err := i.layerRepo.SaveAll(ctx, res.ModifiedLayers); err != nil { - return nil, err - } - } - - if res.RemovedLayers.LayerCount() > 0 { - if err := i.layerRepo.RemoveAll(ctx, res.RemovedLayers.Layers()); err != nil { - return nil, err - } - } - - if len(removedProperties) > 0 { - if err := i.propertyRepo.RemoveAll(ctx, removedProperties); err != nil { - return nil, err - } - } - - if err := i.sceneRepo.Save(ctx, scene); err != nil { - return nil, err - } - - // if the plugin is private, uninstall it - if psid := pid.Scene(); psid != nil && *psid == sid { - if err := i.pluginRepo.Remove(ctx, pl.ID()); err != nil { - return nil, err - } - if ps := pl.PropertySchemas(); len(ps) > 0 { - if err := i.propertySchemaRepo.RemoveAll(ctx, ps); err != nil { - return nil, err - } - } - if err := i.file.RemovePlugin(ctx, pl.ID()); err != nil { - return nil, err - } - } - - tx.Commit() - return scene, nil -} - -func (i *Scene) UpgradePlugin(ctx context.Context, sid id.SceneID, oldPluginID, newPluginID id.PluginID, operator *usecase.Operator) (_ *scene.Scene, err error) { - tx, err := i.transaction.Begin() - if err != nil { - return - } - defer func() { - if err2 := tx.End(ctx); err == nil && err2 != nil { - err = err2 - } - }() - - s, err := i.sceneRepo.FindByID(ctx, sid) - if err != nil { - return nil, err - } - if err := i.CanWriteTeam(s.Team(), operator); err != nil { - return nil, err - } - - if err := i.UpdateSceneLock(ctx, sid, scene.LockModeFree, scene.LockModePluginUpgrading); err != nil { - return nil, err - } - - defer i.ReleaseSceneLock(ctx, sid) - - pluginMigrator := sceneops.PluginMigrator{ - Property: repo.PropertyLoaderFrom(i.propertyRepo), - PropertySchema: repo.PropertySchemaLoaderFrom(i.propertySchemaRepo), - Dataset: repo.DatasetLoaderFrom(i.datasetRepo), - Layer: repo.LayerLoaderBySceneFrom(i.layerRepo), - Plugin: repo.PluginLoaderFrom(i.pluginRepo), - } - - result, err := pluginMigrator.MigratePlugins(ctx, s, oldPluginID, newPluginID) - - if err := i.sceneRepo.Save(ctx, result.Scene); err != nil { - return nil, err - } - if err := i.propertyRepo.SaveAll(ctx, result.Properties); err != nil { - return nil, err - } - if err := i.layerRepo.SaveAll(ctx, result.Layers); err != nil { - return nil, err - } - if err := i.layerRepo.RemoveAll(ctx, result.RemovedLayers); err != nil { - return nil, err - } - if err := i.propertyRepo.RemoveAll(ctx, result.RemovedProperties); err != nil { - return nil, err - } - - tx.Commit() - return result.Scene, err -} - -func (i *Scene) getPlugin(ctx context.Context, sid id.SceneID, p id.PluginID, e id.PluginExtensionID) (*plugin.Plugin, *plugin.Extension, error) { - plugin, err2 := i.pluginRepo.FindByID(ctx, p) - if err2 != nil { - if errors.Is(err2, rerror.ErrNotFound) { - return nil, nil, interfaces.ErrPluginNotFound - } - return nil, nil, err2 - } - - extension := plugin.Extension(e) - if extension == nil { - return nil, nil, interfaces.ErrExtensionNotFound - } - - return plugin, extension, nil -} - func (i *Scene) AddCluster(ctx context.Context, sceneID id.SceneID, name string, operator *usecase.Operator) (*scene.Scene, *scene.Cluster, error) { tx, err := i.transaction.Begin() if err != nil { @@ -664,10 +418,6 @@ func (i *Scene) AddCluster(ctx context.Context, sceneID id.SceneID, name string, return nil, nil, err } - if err := i.CheckSceneLock(ctx, sceneID); err != nil { - return nil, nil, err - } - prop, err := property.New().NewID().Schema(id.MustPropertySchemaID("reearth/cluster")).Scene(sceneID).Build() if err != nil { return nil, nil, err @@ -712,10 +462,6 @@ func (i *Scene) UpdateCluster(ctx context.Context, param interfaces.UpdateCluste return nil, nil, err } - if err := i.CheckSceneLock(ctx, param.SceneID); err != nil { - return nil, nil, err - } - cluster := s.Clusters().Get(param.ClusterID) if cluster == nil { return nil, nil, rerror.ErrNotFound @@ -754,10 +500,6 @@ func (i *Scene) RemoveCluster(ctx context.Context, sceneID id.SceneID, clusterID return nil, err } - if err := i.CheckSceneLock(ctx, sceneID); err != nil { - return nil, err - } - s.Clusters().Remove(clusterID) if err := i.sceneRepo.Save(ctx, s); err != nil { diff --git a/internal/usecase/interactor/scene_plugin.go b/internal/usecase/interactor/scene_plugin.go new file mode 100644 index 000000000..cd0b56036 --- /dev/null +++ b/internal/usecase/interactor/scene_plugin.go @@ -0,0 +1,234 @@ +package interactor + +import ( + "context" + "errors" + + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/internal/usecase/repo" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer/layerops" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/reearth/reearth-backend/pkg/scene/sceneops" +) + +func (i *Scene) InstallPlugin(ctx context.Context, sid id.SceneID, pid id.PluginID, operator *usecase.Operator) (_ *scene.Scene, _ *id.PropertyID, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + s, err := i.sceneRepo.FindByID(ctx, sid) + if err != nil { + return nil, nil, err + } + if err := i.CanWriteTeam(s.Team(), operator); err != nil { + return nil, nil, err + } + + if s.Plugins().HasPlugin(pid) { + return nil, nil, interfaces.ErrPluginAlreadyInstalled + } + + plugin, err := i.pluginCommon().GetOrDownloadPlugin(ctx, pid) + if err != nil { + if errors.Is(rerror.ErrNotFound, err) { + return nil, nil, interfaces.ErrPluginNotFound + } + return nil, nil, err + } + if plugin == nil { + return nil, nil, interfaces.ErrPluginNotFound + } + if psid := plugin.ID().Scene(); psid != nil && *psid != sid { + return nil, nil, interfaces.ErrPluginNotFound + } + + var p *property.Property + if schema := plugin.Schema(); schema != nil { + p, err = property.New().NewID().Schema(*schema).Scene(sid).Build() + if err != nil { + return nil, nil, err + } + } + + s.Plugins().Add(scene.NewPlugin(pid, p.IDRef())) + + if p != nil { + if err := i.propertyRepo.Save(ctx, p); err != nil { + return nil, nil, err + } + } + + if err := i.sceneRepo.Save(ctx, s); err != nil { + return nil, nil, err + } + + tx.Commit() + return s, p.IDRef(), nil +} + +func (i *Scene) UninstallPlugin(ctx context.Context, sid id.SceneID, pid id.PluginID, operator *usecase.Operator) (_ *scene.Scene, err error) { + if pid.System() { + return nil, rerror.ErrNotFound + } + + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + scene, err := i.sceneRepo.FindByID(ctx, sid) + if err != nil { + return nil, err + } + if err := i.CanWriteTeam(scene.Team(), operator); err != nil { + return nil, err + } + + pl, err := i.pluginRepo.FindByID(ctx, pid) + if err != nil { + if errors.Is(rerror.ErrNotFound, err) { + return nil, interfaces.ErrPluginNotFound + } + return nil, err + } + + ps := scene.Plugins() + if !ps.Has(pid) { + return nil, interfaces.ErrPluginNotInstalled + } + + removedProperties := []id.PropertyID{} + + // remove plugin + if p := ps.Property(pid); p != nil { + removedProperties = append(removedProperties, *p) + } + ps.Remove(pid) + + // remove widgets + removedProperties = append(removedProperties, scene.Widgets().RemoveAllByPlugin(pid, nil)...) + + // remove blocks + res, err := layerops.Processor{ + LayerLoader: repo.LayerLoaderFrom(i.layerRepo), + RootLayerID: scene.RootLayer(), + }.UninstallPlugin(ctx, pid) + if err != nil { + return nil, err + } + + removedProperties = append(removedProperties, res.RemovedProperties...) + + // save + if len(res.ModifiedLayers) > 0 { + if err := i.layerRepo.SaveAll(ctx, res.ModifiedLayers); err != nil { + return nil, err + } + } + + if len(removedProperties) > 0 { + if err := i.propertyRepo.RemoveAll(ctx, removedProperties); err != nil { + return nil, err + } + } + + if err := i.sceneRepo.Save(ctx, scene); err != nil { + return nil, err + } + + // if the plugin is private, uninstall it + if psid := pid.Scene(); psid != nil && *psid == sid { + if err := i.pluginRepo.Remove(ctx, pl.ID()); err != nil { + return nil, err + } + if ps := pl.PropertySchemas(); len(ps) > 0 { + if err := i.propertySchemaRepo.RemoveAll(ctx, ps); err != nil { + return nil, err + } + } + if err := i.file.RemovePlugin(ctx, pl.ID()); err != nil { + return nil, err + } + } + + tx.Commit() + return scene, nil +} + +func (i *Scene) UpgradePlugin(ctx context.Context, sid id.SceneID, oldPluginID, newPluginID id.PluginID, operator *usecase.Operator) (_ *scene.Scene, err error) { + tx, err := i.transaction.Begin() + if err != nil { + return + } + defer func() { + if err2 := tx.End(ctx); err == nil && err2 != nil { + err = err2 + } + }() + + s, err := i.sceneRepo.FindByID(ctx, sid) + if err != nil { + return nil, err + } + if err := i.CanWriteTeam(s.Team(), operator); err != nil { + return nil, err + } + + if oldPluginID.IsNil() || newPluginID.IsNil() || oldPluginID.Equal(newPluginID) || !oldPluginID.NameEqual(newPluginID) { + return nil, interfaces.ErrCannotUpgradeToPlugin + } + + if !s.Plugins().Has(oldPluginID) { + return nil, interfaces.ErrPluginNotInstalled + } + + if plugin, err := i.pluginCommon().GetOrDownloadPlugin(ctx, newPluginID); err != nil { + return nil, err + } else if plugin == nil { + return nil, interfaces.ErrPluginNotFound + } + + pluginMigrator := sceneops.PluginMigrator{ + Property: repo.PropertyLoaderFrom(i.propertyRepo), + PropertySchema: repo.PropertySchemaLoaderFrom(i.propertySchemaRepo), + Dataset: repo.DatasetLoaderFrom(i.datasetRepo), + Layer: repo.LayerLoaderBySceneFrom(i.layerRepo), + Plugin: repo.PluginLoaderFrom(i.pluginRepo), + } + + result, err := pluginMigrator.MigratePlugins(ctx, s, oldPluginID, newPluginID) + + if err := i.sceneRepo.Save(ctx, result.Scene); err != nil { + return nil, err + } + if err := i.propertyRepo.SaveAll(ctx, result.Properties); err != nil { + return nil, err + } + if err := i.layerRepo.SaveAll(ctx, result.Layers); err != nil { + return nil, err + } + if err := i.layerRepo.RemoveAll(ctx, result.RemovedLayers); err != nil { + return nil, err + } + if err := i.propertyRepo.RemoveAll(ctx, result.RemovedProperties); err != nil { + return nil, err + } + + tx.Commit() + return result.Scene, err +} diff --git a/internal/usecase/interactor/scene_plugin_test.go b/internal/usecase/interactor/scene_plugin_test.go new file mode 100644 index 000000000..65b75e310 --- /dev/null +++ b/internal/usecase/interactor/scene_plugin_test.go @@ -0,0 +1,390 @@ +package interactor + +import ( + "context" + "testing" + + "github.com/reearth/reearth-backend/internal/infrastructure/fs" + "github.com/reearth/reearth-backend/internal/infrastructure/memory" + "github.com/reearth/reearth-backend/internal/usecase" + "github.com/reearth/reearth-backend/internal/usecase/interfaces" + "github.com/reearth/reearth-backend/pkg/id" + "github.com/reearth/reearth-backend/pkg/layer" + "github.com/reearth/reearth-backend/pkg/plugin" + "github.com/reearth/reearth-backend/pkg/property" + "github.com/reearth/reearth-backend/pkg/rerror" + "github.com/reearth/reearth-backend/pkg/scene" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestScene_InstallPlugin(t *testing.T) { + type args struct { + pluginID plugin.ID + operator *usecase.Operator + } + + type test struct { + name string + installedScenePlugins []*scene.Plugin + args args + wantErr error + } + + sid := scene.NewID() + pid := plugin.MustID("plugin~1.0.0") + pid2 := plugin.MustID("plugin~1.0.1") + pid3 := plugin.MustID("plugin~1.0.1").WithScene(&sid) + pid4 := plugin.MustID("plugin~1.0.1").WithScene(scene.NewID().Ref()) + + tests := []test{ + { + name: "should install a plugin", + args: args{ + pluginID: pid, + }, + }, + { + name: "should install a private plugin with property schema", + args: args{ + pluginID: pid3, + }, + }, + { + name: "already installed", + installedScenePlugins: []*scene.Plugin{ + scene.NewPlugin(pid, nil), + }, + args: args{ + pluginID: pid, + }, + wantErr: interfaces.ErrPluginAlreadyInstalled, + }, + { + name: "not found", + args: args{ + pluginID: pid2, + }, + wantErr: interfaces.ErrPluginNotFound, + }, + { + name: "diff scene", + args: args{ + pluginID: pid4, + }, + wantErr: interfaces.ErrPluginNotFound, + }, + { + name: "operation denied", + args: args{ + operator: &usecase.Operator{}, + }, + wantErr: interfaces.ErrOperationDenied, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert := assert.New(t) + ctx := context.Background() + + tid := id.NewTeamID() + sc := scene.New().ID(sid).RootLayer(id.NewLayerID()).Team(tid).MustBuild() + for _, p := range tt.installedScenePlugins { + sc.Plugins().Add(p) + } + sr := memory.NewSceneWith(sc) + + pl := plugin.New().ID(pid).MustBuild() + pl2 := plugin.New().ID(pid3).Schema(id.NewPropertySchemaID(pid3, "@").Ref()).MustBuild() + pl3 := plugin.New().ID(pid4).MustBuild() + pr := memory.NewPluginWith(pl, pl2, pl3) + + prr := memory.NewProperty() + + uc := &Scene{ + sceneRepo: sr, + pluginRepo: pr, + propertyRepo: prr, + transaction: memory.NewTransaction(), + } + + o := tt.args.operator + if o == nil { + o = &usecase.Operator{ + WritableTeams: id.TeamIDList{tid}, + } + } + gotSc, gotPrid, err := uc.InstallPlugin(ctx, sid, tt.args.pluginID, o) + + if tt.wantErr != nil { + assert.Equal(tt.wantErr, err) + assert.Nil(gotSc) + assert.True(gotPrid.IsNil()) + } else { + assert.NoError(err) + assert.Same(sc, gotSc) + if tt.args.pluginID.Equal(pl2.ID()) { + assert.False(gotPrid.IsNil()) + gotPr, _ := prr.FindByID(ctx, *gotPrid) + assert.Equal(*pl2.Schema(), gotPr.Schema()) + } else { + assert.True(gotPrid.IsNil()) + } + assert.True(gotSc.Plugins().Has(tt.args.pluginID)) + } + }) + } +} + +func TestScene_UninstallPlugin(t *testing.T) { + type args struct { + pluginID plugin.ID + operator *usecase.Operator + } + + type test struct { + name string + args args + wantErr error + } + + sid := scene.NewID() + pid := plugin.MustID("plugin~1.0.0") + pid2 := plugin.MustID("plugin~1.0.1") + pid3 := plugin.MustID("plugin~1.0.2") + pid4 := plugin.MustID("plugin2~1.0.3").WithScene(&sid) + + tests := []test{ + { + name: "should uninstall a plugin", + args: args{ + pluginID: pid, + }, + }, + { + name: "should uninstall a private plugin", + args: args{ + pluginID: pid4, + }, + }, + { + name: "not installed plugin", + args: args{ + pluginID: pid2, + }, + wantErr: interfaces.ErrPluginNotInstalled, + }, + { + name: "not found", + args: args{ + pluginID: pid3, + }, + wantErr: interfaces.ErrPluginNotFound, + }, + { + name: "operation denied", + args: args{ + operator: &usecase.Operator{}, + }, + wantErr: interfaces.ErrOperationDenied, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert := assert.New(t) + ctx := context.Background() + + psid := id.NewPropertySchemaID(pid, "@") + pl3ps := property.NewSchema().ID(psid).MustBuild() + psr := memory.NewPropertySchemaWith(pl3ps) + + pl1 := plugin.New().ID(pid).MustBuild() + pl2 := plugin.New().ID(pid2).MustBuild() + pl3 := plugin.New().ID(pid4).Schema(&psid).MustBuild() + pr := memory.NewPluginWith(pl1, pl2, pl3) + + ppr := property.New().NewID().Scene(sid).Schema(*pl3.Schema()).MustBuild() + ppr2 := property.New().NewID().Scene(sid).Schema(id.NewPropertySchemaID(pid, "a")).MustBuild() + prr := memory.NewPropertyWith(ppr, ppr2) + + ibf := layer.NewInfoboxField().NewID().Plugin(pid).Extension("a").Property(id.NewPropertyID()).MustBuild() + ib := layer.NewInfobox([]*layer.InfoboxField{ibf}, id.NewPropertyID()) + l1 := layer.New().NewID().Scene(sid).Infobox(ib).Item().MustBuild() + l2 := layer.New().NewID().Scene(sid).Group().Layers(layer.NewIDList([]layer.ID{l1.ID()})).MustBuild() + lr := memory.NewLayerWith(l1, l2) + + tid := id.NewTeamID() + sc := scene.New().ID(sid).RootLayer(id.NewLayerID()).Team(tid).MustBuild() + sc.Plugins().Add(scene.NewPlugin(pid, nil)) + sc.Plugins().Add(scene.NewPlugin(pid4, ppr.ID().Ref())) + sw, _ := scene.NewWidget(scene.NewWidgetID(), pid, "a", ppr2.ID(), true, false) + sc.Widgets().Add(sw) + sr := memory.NewSceneWith(sc) + + fsg, _ := fs.NewFile(afero.NewMemMapFs(), "") + + uc := &Scene{ + sceneRepo: sr, + pluginRepo: pr, + propertyRepo: prr, + layerRepo: lr, + propertySchemaRepo: psr, + file: fsg, + transaction: memory.NewTransaction(), + } + + o := tt.args.operator + if o == nil { + o = &usecase.Operator{ + WritableTeams: id.TeamIDList{tid}, + } + } + gotSc, err := uc.UninstallPlugin(ctx, sid, tt.args.pluginID, o) + + if tt.wantErr != nil { + assert.Equal(tt.wantErr, err) + assert.Nil(gotSc) + } else { + assert.NoError(err) + assert.Same(sc, gotSc) + assert.False(gotSc.Plugins().Has(tt.args.pluginID)) + + if tt.args.pluginID.Equal(pid) { + assert.False(sc.Widgets().Has(sw.ID())) + _, err = prr.FindByID(ctx, ppr2.ID()) + assert.Equal(rerror.ErrNotFound, err) + } + + if tt.args.pluginID.Equal(pid4) { + _, err = prr.FindByID(ctx, ppr.ID()) + assert.Equal(rerror.ErrNotFound, err) + } + + if !tt.args.pluginID.Scene().IsNil() { + if tt.args.pluginID.Equal(pid4) { + _, err := psr.FindByID(ctx, ppr.Schema()) + assert.Equal(rerror.ErrNotFound, err) + } + + _, err = pr.FindByID(ctx, tt.args.pluginID) + assert.Equal(rerror.ErrNotFound, err) + } + } + }) + } +} + +func TestScene_UpgradePlugin(t *testing.T) { + type args struct { + old plugin.ID + new plugin.ID + operator *usecase.Operator + } + + type test struct { + name string + args args + wantErr error + } + + sid := scene.NewID() + pid1 := plugin.MustID("plugin~1.0.0") + pid2 := plugin.MustID("plugin~1.0.1") + pid3 := plugin.MustID("plugin~1.0.2") + pid4 := plugin.MustID("pluginx~1.0.2") + + tests := []test{ + { + name: "should upgrade a plugin", + args: args{ + old: pid1, + new: pid2, + }, + }, + { + name: "not installed", + args: args{ + old: pid2, + new: pid3, + }, + wantErr: interfaces.ErrPluginNotInstalled, + }, + { + name: "diff names", + args: args{ + old: pid1, + new: pid4, + }, + wantErr: interfaces.ErrCannotUpgradeToPlugin, + }, + { + name: "operation denied", + args: args{ + operator: &usecase.Operator{}, + }, + wantErr: interfaces.ErrOperationDenied, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + assert := assert.New(t) + ctx := context.Background() + + pl1ps := property.NewSchema().ID(id.NewPropertySchemaID(pid1, "@")).MustBuild() + pl2ps := property.NewSchema().ID(id.NewPropertySchemaID(pid2, "@")).MustBuild() + psr := memory.NewPropertySchemaWith(pl1ps, pl2ps) + + pl1 := plugin.New().ID(pid1).Schema(pl1ps.ID().Ref()).MustBuild() + pl2 := plugin.New().ID(pid2).Schema(pl2ps.ID().Ref()).MustBuild() + pr := memory.NewPluginWith(pl1, pl2) + + pl1p := property.New().NewID().Scene(sid).Schema(*pl1.Schema()).MustBuild() + prr := memory.NewPropertyWith(pl1p) + + lr := memory.NewLayerWith() + + dsr := memory.NewDataset() + + tid := id.NewTeamID() + sc := scene.New().ID(sid).RootLayer(id.NewLayerID()).Team(tid).MustBuild() + sc.Plugins().Add(scene.NewPlugin(pid1, pl1p.ID().Ref())) + sr := memory.NewSceneWith(sc) + + uc := &Scene{ + sceneRepo: sr, + pluginRepo: pr, + propertyRepo: prr, + propertySchemaRepo: psr, + layerRepo: lr, + datasetRepo: dsr, + transaction: memory.NewTransaction(), + } + + o := tt.args.operator + if o == nil { + o = &usecase.Operator{ + WritableTeams: id.TeamIDList{tid}, + } + } + gotSc, err := uc.UpgradePlugin(ctx, sid, tt.args.old, tt.args.new, o) + + if tt.wantErr != nil { + assert.Equal(tt.wantErr, err) + assert.Nil(gotSc) + } else { + assert.NoError(err) + assert.Same(sc, gotSc) + assert.False(gotSc.Plugins().Has(tt.args.old)) + assert.True(gotSc.Plugins().Has(tt.args.new)) + p, _ := prr.FindByID(ctx, *gotSc.Plugins().Plugin(tt.args.new).Property()) + assert.Equal(*pl2.Schema(), p.Schema()) + } + }) + } +} diff --git a/internal/usecase/interactor/user_signup.go b/internal/usecase/interactor/user_signup.go index 8450a1450..daedd3998 100644 --- a/internal/usecase/interactor/user_signup.go +++ b/internal/usecase/interactor/user_signup.go @@ -256,17 +256,17 @@ func getOpenIDConfiguration(ctx context.Context, iss string) (c OpenIDConfigurat err = err2 return } - + defer func() { + _ = res.Body.Close() + }() if res.StatusCode != http.StatusOK { err = errors.New("could not get user info") return } - if err2 := json.NewDecoder(res.Body).Decode(&c); err2 != nil { err = fmt.Errorf("could not get user info: %w", err2) return } - return } @@ -295,6 +295,9 @@ func getUserInfo(ctx context.Context, url, accessToken string) (ui UserInfo, err err = err2 return } + defer func() { + _ = res.Body.Close() + }() if res.StatusCode != http.StatusOK { err = errors.New("could not get user info") diff --git a/internal/usecase/interfaces/layer.go b/internal/usecase/interfaces/layer.go index f43779c4a..afd5deb93 100644 --- a/internal/usecase/interfaces/layer.go +++ b/internal/usecase/interfaces/layer.go @@ -15,7 +15,6 @@ import ( type AddLayerItemInput struct { ParentLayerID id.LayerID - PluginID *id.PluginID ExtensionID *id.PluginExtensionID Index *int LinkedDatasetID *id.DatasetID @@ -25,7 +24,6 @@ type AddLayerItemInput struct { type AddLayerGroupInput struct { ParentLayerID id.LayerID - PluginID *id.PluginID ExtensionID *id.PluginExtensionID Index *int LinkedDatasetSchemaID *id.DatasetSchemaID diff --git a/internal/usecase/interfaces/plugin.go b/internal/usecase/interfaces/plugin.go index 0de5c4a89..115859c34 100644 --- a/internal/usecase/interfaces/plugin.go +++ b/internal/usecase/interfaces/plugin.go @@ -21,5 +21,4 @@ type Plugin interface { Fetch(context.Context, []id.PluginID, *usecase.Operator) ([]*plugin.Plugin, error) Upload(context.Context, io.Reader, id.SceneID, *usecase.Operator) (*plugin.Plugin, *scene.Scene, error) UploadFromRemote(context.Context, *url.URL, id.SceneID, *usecase.Operator) (*plugin.Plugin, *scene.Scene, error) - FetchPluginMetadata(context.Context, *usecase.Operator) ([]*plugin.Metadata, error) } diff --git a/internal/usecase/interfaces/scene.go b/internal/usecase/interfaces/scene.go index 9f9b2e310..116664d58 100644 --- a/internal/usecase/interfaces/scene.go +++ b/internal/usecase/interfaces/scene.go @@ -12,19 +12,19 @@ import ( var ( ErrPluginAlreadyInstalled error = errors.New("plugin already installed") ErrPluginNotInstalled error = errors.New("plugin not installed") + ErrCannotUpgradeToPlugin error = errors.New("cannot upgrade to such plugin") ErrExtensionTypeMustBeWidget error = errors.New("extension type must be widget") ) type Scene interface { Fetch(context.Context, []id.SceneID, *usecase.Operator) ([]*scene.Scene, error) FindByProject(context.Context, id.ProjectID, *usecase.Operator) (*scene.Scene, error) - FetchLock(context.Context, []id.SceneID, *usecase.Operator) ([]scene.LockMode, error) Create(context.Context, id.ProjectID, *usecase.Operator) (*scene.Scene, error) AddWidget(context.Context, id.SceneID, id.PluginID, id.PluginExtensionID, *usecase.Operator) (*scene.Scene, *scene.Widget, error) UpdateWidget(context.Context, UpdateWidgetParam, *usecase.Operator) (*scene.Scene, *scene.Widget, error) UpdateWidgetAlignSystem(context.Context, UpdateWidgetAlignSystemParam, *usecase.Operator) (*scene.Scene, error) RemoveWidget(context.Context, id.SceneID, id.WidgetID, *usecase.Operator) (*scene.Scene, error) - InstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, id.PluginID, *id.PropertyID, error) + InstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, *id.PropertyID, error) UninstallPlugin(context.Context, id.SceneID, id.PluginID, *usecase.Operator) (*scene.Scene, error) UpgradePlugin(context.Context, id.SceneID, id.PluginID, id.PluginID, *usecase.Operator) (*scene.Scene, error) AddCluster(context.Context, id.SceneID, string, *usecase.Operator) (*scene.Scene, *scene.Cluster, error) diff --git a/pkg/cache/cache.go b/pkg/cache/cache.go index b7f8e310d..dee12b692 100644 --- a/pkg/cache/cache.go +++ b/pkg/cache/cache.go @@ -7,22 +7,22 @@ import ( ) // Cache holds data can be accessed synchronously. The data will be automatically updated when it expires. -type Cache struct { - updater func(context.Context, interface{}) (interface{}, error) +type Cache[T any] struct { + updater func(context.Context, T) (T, error) expiresIn time.Duration updatedAt time.Time lock sync.Mutex - data interface{} + data T now func() time.Time } -func New(updater func(context.Context, interface{}) (interface{}, error), expiresIn time.Duration) *Cache { - return &Cache{updater: updater, expiresIn: expiresIn} +func New[T any](updater func(context.Context, T) (T, error), expiresIn time.Duration) *Cache[T] { + return &Cache[T]{updater: updater, expiresIn: expiresIn} } -func (c *Cache) Get(ctx context.Context) (interface{}, error) { +func (c *Cache[T]) Get(ctx context.Context) (res T, _ error) { if c == nil { - return nil, nil + return } c.lock.Lock() @@ -36,7 +36,7 @@ func (c *Cache) Get(ctx context.Context) (interface{}, error) { return c.data, nil } -func (c *Cache) update(ctx context.Context) error { +func (c *Cache[T]) update(ctx context.Context) error { var err error data, err := c.updater(ctx, c.data) if err != nil { @@ -48,7 +48,7 @@ func (c *Cache) update(ctx context.Context) error { return nil } -func (c *Cache) currentTime() time.Time { +func (c *Cache[T]) currentTime() time.Time { if c.now == nil { return time.Now() } diff --git a/pkg/cache/cache_test.go b/pkg/cache/cache_test.go index daa6cc2da..30b149a62 100644 --- a/pkg/cache/cache_test.go +++ b/pkg/cache/cache_test.go @@ -13,14 +13,14 @@ func TestCache_Get(t *testing.T) { ctx := context.Background() data := &struct{}{} err := errors.New("err!") - var cache *Cache + var cache *Cache[*struct{}] called := 0 res, e := cache.Get(ctx) // nil cache assert.NoError(t, e) assert.Nil(t, res) - cache = New(func(c context.Context, i interface{}) (interface{}, error) { + cache = New(func(c context.Context, i *struct{}) (*struct{}, error) { assert.Same(t, ctx, c) if called == 0 { assert.Nil(t, i) @@ -56,7 +56,7 @@ func TestCache_Get2(t *testing.T) { now := time.Date(2022, 6, 4, 0, 0, 0, 0, time.UTC) called := 0 - cache := New(func(_ context.Context, _ interface{}) (interface{}, error) { + cache := New(func(_ context.Context, _ *struct{}) (*struct{}, error) { called++ return data, nil }, time.Second) diff --git a/pkg/id/plugin.go b/pkg/id/plugin.go index 052e0986a..7b339215a 100644 --- a/pkg/id/plugin.go +++ b/pkg/id/plugin.go @@ -211,6 +211,19 @@ func (d PluginID) Equal(d2 PluginID) bool { return d.name == d2.name && d.version == d2.version } +// NameEqual returns true if names of two IDs are equal. +func (d PluginID) NameEqual(d2 PluginID) bool { + if d.sys { + return d2.sys + } + if d.scene != nil { + if d2.scene == nil || *d.scene != *d2.scene { + return false + } + } + return d.name == d2.name +} + // MarshalText implements encoding.TextMarshaler interface func (d *PluginID) MarshalText() ([]byte, error) { return []byte(d.String()), nil diff --git a/pkg/id/plugin_test.go b/pkg/id/plugin_test.go index 69f242507..7a9a24694 100644 --- a/pkg/id/plugin_test.go +++ b/pkg/id/plugin_test.go @@ -568,7 +568,67 @@ func TestPluginID_Equal(t *testing.T) { assert.Equal(t, tc.expected, tc.input2.Equal(tc.input1)) }) } +} +func TestPluginID_NameEqual(t *testing.T) { + tests := []struct { + name string + input1 PluginID + input2 PluginID + expected bool + }{ + { + name: "system", + input1: MustPluginID("reearth"), + input2: MustPluginID("reearth"), + expected: true, + }, + { + name: "system and normal", + input1: MustPluginID("reearth"), + input2: MustPluginID("Test~1.0.0"), + expected: false, + }, + { + name: "same", + input1: MustPluginID("Test~1.0.0"), + input2: MustPluginID("Test~1.0.0"), + expected: true, + }, + { + name: "diff version", + input1: MustPluginID("Test~1.0.0"), + input2: MustPluginID("Test~1.0.1"), + expected: true, + }, + { + name: "diff name", + input1: MustPluginID("Test0~1.0.0"), + input2: MustPluginID("Test1~1.0.0"), + expected: false, + }, + { + name: "same scene", + input1: MustPluginID("01fbprc3j929w0a3h16nh8rqy6~Test~1.0.0"), + input2: MustPluginID("01fbprc3j929w0a3h16nh8rqy6~Test~1.0.0"), + expected: true, + }, + { + name: "diff scene", + input1: MustPluginID("01fbprc3j929w0a3h16nh8rqy6~Test~1.0.0"), + input2: MustPluginID("01fbprc3j929w0a3h16nh8rqy7~Test~1.0.0"), + expected: false, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, tc.input1.NameEqual(tc.input2)) + assert.Equal(t, tc.expected, tc.input2.NameEqual(tc.input1)) + }) + } } func TestPluginID_MarshalText(t *testing.T) { diff --git a/pkg/layer/layerops/processor.go b/pkg/layer/layerops/processor.go index 786037a3b..46b7da580 100644 --- a/pkg/layer/layerops/processor.go +++ b/pkg/layer/layerops/processor.go @@ -13,33 +13,16 @@ type Processor struct { type UninstallPluginResult struct { ModifiedLayers layer.List - RemovedLayers *layer.IDList RemovedProperties []layer.PropertyID } func (p Processor) UninstallPlugin(ctx context.Context, pluginID layer.PluginID) (res UninstallPluginResult, err error) { err = p.LayerLoader.Walk(ctx, func(l layer.Layer, parents layer.GroupList) error { - parent := parents.Last() - parentRemoved := parent != nil && res.RemovedLayers.HasLayer(parent.ID()) - - if !parentRemoved { - if pid := l.Plugin(); pid == nil || !pid.Equal(pluginID) { - // delete infobox fields - removedProperties := l.Infobox().RemoveAllByPlugin(pluginID, nil) - if len(removedProperties) > 0 { - res.RemovedProperties = append(res.RemovedProperties, removedProperties...) - res.ModifiedLayers = append(res.ModifiedLayers, &l) - } - return nil - } - - parent.Layers().RemoveLayer(l.ID()) - res.ModifiedLayers = append(res.ModifiedLayers, parent.LayerRef()) + // delete infobox fields + if removedProperties := l.Infobox().RemoveAllByPlugin(pluginID, nil); len(removedProperties) > 0 { + res.RemovedProperties = append(res.RemovedProperties, removedProperties...) + res.ModifiedLayers = append(res.ModifiedLayers, &l) } - - res.RemovedLayers = res.RemovedLayers.AppendLayers(l.ID()) - res.RemovedProperties = append(res.RemovedProperties, l.Properties()...) - res.ModifiedLayers = res.ModifiedLayers.Remove(l.ID()) return nil }, []layer.ID{p.RootLayerID}) diff --git a/pkg/layer/layerops/processor_test.go b/pkg/layer/layerops/processor_test.go index 858ee63a5..c9e499e08 100644 --- a/pkg/layer/layerops/processor_test.go +++ b/pkg/layer/layerops/processor_test.go @@ -4,6 +4,7 @@ import ( "context" "testing" + "github.com/reearth/reearth-backend/pkg/id" "github.com/reearth/reearth-backend/pkg/layer" "github.com/stretchr/testify/assert" ) @@ -15,9 +16,9 @@ func TestProcessor_UninstallPlugin(t *testing.T) { ibf1 := layer.NewInfoboxField().NewID().Plugin(pid).Extension("a").Property(layer.NewPropertyID()).MustBuild() ibf2 := layer.NewInfoboxField().NewID().Plugin(pid2).Extension("a").Property(layer.NewPropertyID()).MustBuild() ib := layer.NewInfobox([]*layer.InfoboxField{ibf1, ibf2}, layer.NewPropertyID()) - l1 := layer.NewItem().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Plugin(&pid).MustBuild() - l2 := layer.NewItem().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Plugin(&pid2).MustBuild() - l3 := layer.NewItem().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Plugin(&pid2).Infobox(ib).MustBuild() + l1 := layer.NewItem().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Plugin(&id.OfficialPluginID).MustBuild() + l2 := layer.NewItem().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Plugin(&id.OfficialPluginID).MustBuild() + l3 := layer.NewItem().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Plugin(&id.OfficialPluginID).Infobox(ib).MustBuild() l4 := layer.NewGroup().NewID().Scene(sid).Property(layer.NewPropertyID().Ref()).Layers(layer.NewIDList([]layer.ID{l1.ID(), l2.ID()})).MustBuild() l5 := layer.NewGroup().NewID().Scene(sid).Layers(layer.NewIDList([]layer.ID{l3.ID(), l4.ID()})).MustBuild() @@ -28,11 +29,8 @@ func TestProcessor_UninstallPlugin(t *testing.T) { assert.NoError(t, err) assert.Equal(t, UninstallPluginResult{ - ModifiedLayers: layer.List{l3.LayerRef(), l4.LayerRef()}, - RemovedLayers: layer.NewIDList([]layer.ID{l1.ID()}), - RemovedProperties: []layer.PropertyID{ibf1.Property(), *l1.Property()}, + ModifiedLayers: layer.List{l3.LayerRef()}, + RemovedProperties: []layer.PropertyID{ibf1.Property()}, }, res) - - assert.Equal(t, layer.NewIDList([]layer.ID{l2.ID()}), l4.Layers()) assert.Equal(t, []*layer.InfoboxField{ibf2}, ib.Fields()) } diff --git a/pkg/plugin/builder.go b/pkg/plugin/builder.go index 6fad423d5..1a3364df7 100644 --- a/pkg/plugin/builder.go +++ b/pkg/plugin/builder.go @@ -13,7 +13,9 @@ func New() *Builder { } func (b *Builder) Build() (*Plugin, error) { - // TODO: check extensions duplication ...etc + if b.p.id.IsNil() { + return nil, ErrInvalidID + } return b.p, nil } diff --git a/pkg/plugin/builder_test.go b/pkg/plugin/builder_test.go index 6252d27e4..46ea26a3e 100644 --- a/pkg/plugin/builder_test.go +++ b/pkg/plugin/builder_test.go @@ -15,19 +15,19 @@ func TestBuilder_ID(t *testing.T) { func TestBuilder_Name(t *testing.T) { var b = New() - res := b.Name(i18n.StringFrom("fooo")).MustBuild() + res := b.ID(MustID("aaa~1.1.1")).Name(i18n.StringFrom("fooo")).MustBuild() assert.Equal(t, i18n.StringFrom("fooo"), res.Name()) } func TestBuilder_Author(t *testing.T) { var b = New() - res := b.Author("xxx").MustBuild() + res := b.ID(MustID("aaa~1.1.1")).Author("xxx").MustBuild() assert.Equal(t, "xxx", res.Author()) } func TestBuilder_Description(t *testing.T) { var b = New() - res := b.Description(i18n.StringFrom("ddd")).MustBuild() + res := b.ID(MustID("aaa~1.1.1")).Description(i18n.StringFrom("ddd")).MustBuild() assert.Equal(t, i18n.StringFrom("ddd"), res.Description()) } @@ -52,7 +52,7 @@ func TestBuilder_Schema(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() - res := New().Schema(tt.sid).MustBuild() + res := New().ID(MustID("aaa~1.1.1")).Schema(tt.sid).MustBuild() assert.Equal(t, tt.expected, res.Schema()) }) } @@ -64,13 +64,13 @@ func TestBuilder_Extensions(t *testing.T) { NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild(), } - res := b.Extensions(ext).MustBuild() + res := b.ID(MustID("aaa~1.1.1")).Extensions(ext).MustBuild() assert.Equal(t, ext, res.Extensions()) } func TestBuilder_RepositoryURL(t *testing.T) { var b = New() - res := b.RepositoryURL("hoge").MustBuild() + res := b.ID(MustID("aaa~1.1.1")).RepositoryURL("hoge").MustBuild() assert.Equal(t, "hoge", res.RepositoryURL()) } diff --git a/pkg/plugin/plugin_test.go b/pkg/plugin/plugin_test.go index 9b01cda42..386ef2b1b 100644 --- a/pkg/plugin/plugin_test.go +++ b/pkg/plugin/plugin_test.go @@ -17,13 +17,13 @@ func TestPlugin_Extension(t *testing.T) { { name: "exiting extension", key: "yyy", - plugin: New().Extensions([]*Extension{NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild()}).MustBuild(), + plugin: New().ID(MustID("aaa~1.1.1")).Extensions([]*Extension{NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild()}).MustBuild(), expected: NewExtension().ID("yyy").MustBuild(), }, { name: "not exiting extension", key: "zzz", - plugin: New().Extensions([]*Extension{NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild()}).MustBuild(), + plugin: New().ID(MustID("aaa~1.1.1")).Extensions([]*Extension{NewExtension().ID("xxx").MustBuild(), NewExtension().ID("yyy").MustBuild()}).MustBuild(), expected: nil, }, { @@ -55,12 +55,12 @@ func TestPlugin_PropertySchemas(t *testing.T) { }{ { name: "normal", - plugin: New().Schema(&ps1).Extensions([]*Extension{NewExtension().ID("xxx").Schema(ps2).MustBuild(), NewExtension().ID("yyy").Schema(ps3).MustBuild()}).MustBuild(), + plugin: New().ID(MustID("aaa~1.1.1")).Schema(&ps1).Extensions([]*Extension{NewExtension().ID("xxx").Schema(ps2).MustBuild(), NewExtension().ID("yyy").Schema(ps3).MustBuild()}).MustBuild(), expected: []PropertySchemaID{ps1, ps2, ps3}, }, { name: "no plugin property schema", - plugin: New().Extensions([]*Extension{NewExtension().ID("xxx").Schema(ps2).MustBuild(), NewExtension().ID("yyy").Schema(ps3).MustBuild()}).MustBuild(), + plugin: New().ID(MustID("aaa~1.1.1")).Extensions([]*Extension{NewExtension().ID("xxx").Schema(ps2).MustBuild(), NewExtension().ID("yyy").Schema(ps3).MustBuild()}).MustBuild(), expected: []PropertySchemaID{ps2, ps3}, }, { @@ -80,7 +80,7 @@ func TestPlugin_PropertySchemas(t *testing.T) { } func TestPlugin_Author(t *testing.T) { - p := New().Author("xx").MustBuild() + p := New().ID(MustID("aaa~1.1.1")).Author("xx").MustBuild() assert.Equal(t, "xx", p.Author()) } diff --git a/pkg/rerror/error.go b/pkg/rerror/error.go index cbaea7f75..a9ca38a7b 100644 --- a/pkg/rerror/error.go +++ b/pkg/rerror/error.go @@ -19,10 +19,22 @@ var ( ) func ErrInternalBy(err error) error { - log.Errorf("internal error: %s", err.Error()) + return errInternalBy(errInternal, err) +} + +func ErrInternalByWith(label string, err error) error { + return errInternalBy(errors.New(label), err) +} + +func ErrInternalByWithError(label, err error) error { + return errInternalBy(label, err) +} + +func errInternalBy(label, err error) *Error { + log.Errorf("%s: %s", label.Error(), err.Error()) debug.PrintStack() return &Error{ - Label: errInternal, + Label: label, Err: err, Hidden: true, } diff --git a/pkg/rerror/error_test.go b/pkg/rerror/error_test.go index ff4746b5f..ef6846007 100644 --- a/pkg/rerror/error_test.go +++ b/pkg/rerror/error_test.go @@ -14,6 +14,16 @@ func TestErrInternal(t *testing.T) { assert.EqualError(t, err, "internal") assert.IsType(t, err, &Error{}) assert.Same(t, werr, errors.Unwrap(err)) + + err2 := ErrInternalByWith("a", werr) + assert.EqualError(t, err2, "a") + assert.IsType(t, err2, &Error{}) + assert.Same(t, werr, errors.Unwrap(err2)) + + err3 := ErrInternalByWithError(errors.New("x"), werr) + assert.EqualError(t, err3, "x") + assert.IsType(t, err3, &Error{}) + assert.Same(t, werr, errors.Unwrap(err3)) } func TestError(t *testing.T) { diff --git a/pkg/scene/sceneops/plugin_migrator.go b/pkg/scene/sceneops/plugin_migrator.go index 6550c8eb7..802ff72a4 100644 --- a/pkg/scene/sceneops/plugin_migrator.go +++ b/pkg/scene/sceneops/plugin_migrator.go @@ -38,7 +38,7 @@ func (s *PluginMigrator) MigratePlugins(ctx context.Context, sc *scene.Scene, ol return MigratePluginsResult{}, rerror.ErrInternalBy(errors.New("scene is nil")) } - if oldPluginID.Equal(newPluginID) || oldPluginID.Name() != newPluginID.Name() { + if oldPluginID.Equal(newPluginID) || !oldPluginID.NameEqual(newPluginID) { return MigratePluginsResult{}, ErrInvalidPlugins } diff --git a/pkg/util/map.go b/pkg/util/map.go index 6d40dd47b..21cd1322a 100644 --- a/pkg/util/map.go +++ b/pkg/util/map.go @@ -36,6 +36,14 @@ func (m *SyncMap[K, V]) LoadOrStore(key K, value V) (vv V, _ bool) { return vv, ok } +func (m *SyncMap[K, V]) LoadAndDelete(key K) (vv V, ok bool) { + v, ok := m.m.LoadAndDelete(key) + if ok { + vv = v.(V) + } + return vv, ok +} + func (m *SyncMap[K, V]) Delete(key K) { m.m.Delete(key) } @@ -124,3 +132,26 @@ func (m *SyncMap[K, V]) Len() (i int) { }) return } + +type LockMap[T any] struct { + m SyncMap[T, *sync.Mutex] +} + +func (m *LockMap[T]) Lock(k T) func() { + nl := &sync.Mutex{} + l, ok := m.m.LoadOrStore(k, nl) + if ok { + l.Lock() + } else { + nl.Lock() + } + return func() { + m.Unlock(k) + } +} + +func (m *LockMap[T]) Unlock(k T) { + if l, ok := m.m.LoadAndDelete(k); ok { + l.Unlock() + } +} diff --git a/pkg/util/map_test.go b/pkg/util/map_test.go index d4fed78ea..cd71d24dc 100644 --- a/pkg/util/map_test.go +++ b/pkg/util/map_test.go @@ -1,6 +1,7 @@ package util import ( + "sync" "testing" "github.com/stretchr/testify/assert" @@ -178,3 +179,33 @@ func TestSyncMap_Len(t *testing.T) { s.Store("b", 2) assert.Equal(t, 2, s.Len()) } + +func TestLockMap(t *testing.T) { + m := LockMap[string]{} + res := []string{} + wg := sync.WaitGroup{} + + wg.Add(3) + go func() { + u := m.Lock("a") + res = append(res, "a") + u() + wg.Done() + }() + go func() { + u := m.Lock("b") + res = append(res, "b") + u() + wg.Done() + }() + go func() { + u := m.Lock("a") + res = append(res, "c") + u() + wg.Done() + }() + + wg.Wait() + slices.Sort(res) + assert.Equal(t, []string{"a", "b", "c"}, res) +} diff --git a/schema.graphql b/schema.graphql index 243951e3d..75910a0c9 100644 --- a/schema.graphql +++ b/schema.graphql @@ -268,14 +268,6 @@ type Plugin { propertySchema: PropertySchema @goField(forceResolver: true) } -type PluginMetadata { - name: String! - description: String! - author: String! - thumbnailUrl: String! - createdAt: DateTime! -} - enum WidgetAreaAlign { START CENTERED @@ -1592,7 +1584,6 @@ type Query { dynamicDatasetSchemas(sceneId: ID!): [DatasetSchema!]! searchUser(nameOrEmail: String!): User checkProjectAlias(alias: String!): ProjectAliasAvailability! - installablePlugins: [PluginMetadata!]! } # Mutation From 6c6edf3baaa343922c2d042454dcccc87e16d096 Mon Sep 17 00:00:00 2001 From: KeisukeYamashita <19yamashita15@gmail.com> Date: Sat, 13 Aug 2022 17:41:04 +0900 Subject: [PATCH 253/253] ci: fix github workflows, dockefile, vscode settings --- .github/CODEOWNERS | 4 +- .github/changelog.yml | 4 + .github/labeler.yml | 4 + .github/workflows/build.yml | 27 +- .github/workflows/ci.yml | 15 +- .github/workflows/deploy_test.yml | 19 +- .github/workflows/nightly.yml | 68 -- .github/workflows/pr_title.yml | 3 + .github/workflows/release.js | 53 +- .github/workflows/release.yml | 128 +-- .github/workflows/server-release.yml | 53 -- .gitignore | 4 +- .vscode/launch.json | 6 +- .vscode/settings.json | 16 +- CHANGELOG.md | 20 +- Dockerfile | 25 +- server/.goreleaser.yml | 2 +- server/CHANGELOG.md | 1122 -------------------------- server/Dockerfile | 36 - server/Makefile | 2 +- server/docker-compose.yml | 50 -- 21 files changed, 151 insertions(+), 1510 deletions(-) create mode 100644 .github/labeler.yml delete mode 100644 .github/workflows/nightly.yml delete mode 100644 .github/workflows/server-release.yml delete mode 100644 server/CHANGELOG.md delete mode 100644 server/Dockerfile delete mode 100644 server/docker-compose.yml diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 15f679477..bf50e3d09 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,3 +1,3 @@ * @rot1024 -/pkg/builtin/manifest.yml @HideBa -/pkg/builtin/manifest_ja.yml @HideBa +/server/pkg/builtin/manifest.yml @HideBa +/server/pkg/builtin/manifest_ja.yml @HideBa diff --git a/.github/changelog.yml b/.github/changelog.yml index a03adf520..910a90c37 100644 --- a/.github/changelog.yml +++ b/.github/changelog.yml @@ -12,4 +12,8 @@ prefixes: deps: Miscellaneous Tasks ci: false revert: false +scopes: + web: Web + server: Server + "": Misc titleVersionPrefix: remove diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 000000000..4be5e63df --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,4 @@ +web: + - web/**/* +server: + - server/**/* diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4b5505647..864dc1a51 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -11,7 +11,7 @@ jobs: info: name: Collect information runs-on: ubuntu-latest - if: github.event.workflow_run.conclusion != 'failure' && github.event.repository.full_name == 'reearth/reearth-backend' && (github.event.workflow_run.head_branch == 'release' || !startsWith(github.event.head_commit.message, 'v')) + if: github.event.workflow_run.conclusion != 'failure' && github.event.repository.full_name == 'reearth/reearth' && (github.event.workflow_run.head_branch == 'release' || !startsWith(github.event.head_commit.message, 'v')) outputs: sha_short: ${{ steps.info.outputs.sha_short }} tag: ${{ steps.info.outputs.tag }} @@ -56,7 +56,7 @@ jobs: - info if: needs.info.outputs.name || needs.info.outputs.tag env: - ARTIFACTS: dist/reearth-backend_*.* + ARTIFACTS: server/dist/reearth_*.*,reearth-web_${{ needs.info.outputs.name }}.tar.gz steps: - name: Checkout uses: actions/checkout@v3 @@ -67,18 +67,27 @@ jobs: uses: actions/setup-go@v3 with: go-version: 1.18 + - name: Fetch reearth-web release + uses: dsaltares/fetch-gh-release-asset@master + with: + repo: reearth/reearth-web + version: tags/${{ needs.info.outputs.name }} + file: reearth-web_${{ needs.info.outputs.name }}.tar.gz + token: ${{ secrets.GITHUB_TOKEN }} - name: Run GoReleaser uses: goreleaser/goreleaser-action@v2 with: args: release --rm-dist ${{ env.SNAPSHOT }} + workdir: server env: SNAPSHOT: ${{ !needs.info.outputs.tag && '--snapshot' || '' }} GORELEASER_CURRENT_TAG: ${{ needs.info.outputs.tag || '0.0.0' }} - name: Rename artifacts if: needs.info.outputs.name run: for f in $ARTIFACTS; do mv $f $(echo $f | sed -E 's/_0\.0\.0-SNAPSHOT-[^_]*/_${{ needs.info.outputs.name }}/'); done + working-directory: server - name: List artifacts - run: ls -l dist + run: ls -l server/dist - name: Release nightly/rc if: needs.info.outputs.name uses: ncipollo/release-action@v1 @@ -112,7 +121,7 @@ jobs: - info if: needs.info.outputs.name || needs.info.outputs.tag env: - IMAGE_NAME: reearth/reearth-backend + IMAGE_NAME: reearth/reearth steps: - name: Checkout uses: actions/checkout@v3 @@ -151,10 +160,18 @@ jobs: echo "::set-output name=platforms::$PLATFORMS" echo "::set-output name=version::$VERSION" echo "::set-output name=tags::$TAGS" + - name: Fetch reearth-web release + uses: dsaltares/fetch-gh-release-asset@master + with: + repo: reearth/reearth-web + version: tags/${{ needs.info.outputs.name }} + file: reearth-web_${{ needs.info.outputs.name }}.tar.gz + token: ${{ secrets.GITHUB_TOKEN }} + - name: Extract reearth-web + run: tar -xvf reearth-web_${{ needs.info.outputs.name }}.tar.gz && mv reearth-web web/dist - name: Build and push docker image uses: docker/build-push-action@v3 with: - context: . platforms: ${{ steps.options.outputs.platforms }} push: true build-args: VERSION=${{ steps.options.outputs.version }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6ee636549..3425d022e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,25 +35,14 @@ jobs: with: version: v1.45 # v1.46 reports an error args: --timeout=10m + working-directory: server - name: test run: go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic -timeout 10m env: REEARTH_DB: mongodb://localhost + working-directory: server - name: Send coverage report uses: codecov/codecov-action@v2 with: token: ${{ secrets.CODECOV_TOKEN }} file: coverage.txt - slack-notification: - if: github.event.repository.full_name == 'reearth/reearth-backend' && success() || failure() - name: Slack Notification - needs: - - ci - runs-on: ubuntu-latest - steps: - - name: Slack Notification - uses: Gamesight/slack-workflow-status@master - if: always() - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/deploy_test.yml b/.github/workflows/deploy_test.yml index 7f0897c1f..44c48969a 100644 --- a/.github/workflows/deploy_test.yml +++ b/.github/workflows/deploy_test.yml @@ -5,14 +5,14 @@ concurrency: group: ${{ github.workflow }} cancel-in-progress: true env: - IMAGE: reearth/reearth-backend:nightly - IMAGE_GCP: us.gcr.io/reearth-oss/reearth-backend:nightly + IMAGE: reearth/reearth:nightly + IMAGE_GCP: us.gcr.io/reearth-oss/reearth:nightly GCP_REGION: us-central1 jobs: deploy_test: name: Deploy app to test env runs-on: ubuntu-latest - if: github.event.repository.full_name == 'reearth/reearth-backend' + if: github.event.repository.full_name == 'reearth/reearth' steps: - uses: google-github-actions/auth@v0 with: @@ -33,16 +33,3 @@ jobs: --region $GCP_REGION \ --platform managed \ --quiet - slack-notification: - name: Slack Notification - if: github.event.repository.full_name == 'reearth/reearth-backend' && (success() || failure()) - needs: - - deploy_test - runs-on: ubuntu-latest - steps: - - name: Slack Notification - uses: Gamesight/slack-workflow-status@master - if: always() - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml deleted file mode 100644 index 4bff71585..000000000 --- a/.github/workflows/nightly.yml +++ /dev/null @@ -1,68 +0,0 @@ -name: Nightly -on: - workflow_dispatch: - schedule: - - cron: '0 0 * * *' -env: - IMAGE: reearth/reearth - IMAGE_BASE: reearth/reearth-backend - TAG: nightly -jobs: - prenightly: - runs-on: ubuntu-latest - env: - EV: ${{ toJSON(github.event) }} - steps: - - run: echo ${{ github.event.repository.full_name }} - - run: echo $EV - nightly: - name: Nightly - runs-on: ubuntu-latest - if: github.event.repository.full_name == 'reearth/reearth' - steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Fetch reearth-web release - uses: dsaltares/fetch-gh-release-asset@master - with: - repo: reearth/reearth-web - version: tags/nightly - file: reearth-web_nightly.tar.gz - token: ${{ secrets.GITHUB_TOKEN }} - - name: Extract reearth-web - run: tar -xvf reearth-web_nightly.tar.gz && mv reearth-web web - - name: Build and push - id: docker_build - uses: docker/build-push-action@v2 - with: - context: . - platforms: linux/amd64,linux/arm64 - build-args: REEARTH_BACKEND_IMAGE=${{ env.IMAGE_BASE }}:${{ env.TAG }} - push: true - tags: ${{ env.IMAGE }}:${{ env.TAG }} - cache-from: type=gha - cache-to: type=gha,mode=max - - name: Image digest - run: echo ${{ steps.docker_build.outputs.digest }} - slack-notification: - if: github.event.repository.full_name == 'reearth/reearth' && always() - name: Slack Notification - needs: - - nightly - runs-on: ubuntu-latest - steps: - - name: Slack Notification - uses: Gamesight/slack-workflow-status@master - if: always() - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} diff --git a/.github/workflows/pr_title.yml b/.github/workflows/pr_title.yml index f6472679b..cdf267d2e 100644 --- a/.github/workflows/pr_title.yml +++ b/.github/workflows/pr_title.yml @@ -11,6 +11,9 @@ jobs: pr_title: runs-on: ubuntu-latest steps: + - uses: actions/labeler@v2 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} - uses: amannn/action-semantic-pull-request@v4 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release.js b/.github/workflows/release.js index 86d41395c..eea04df74 100644 --- a/.github/workflows/release.js +++ b/.github/workflows/release.js @@ -1,47 +1,28 @@ const { readFileSync, writeFileSync } = require("fs"); -const repos = ["web", "backend"]; -const header = "# Changelog\nAll notable changes to this project will be documented in this file."; - -module.exports = async ({ github, tag }) => { - const newTag = removeVFromTag(tag); - const releases = await Promise.all(repos.map(r => github.rest.repos.getReleaseByTag({ +module.exports = async ({ github }) => { + const newTag = removeVFromTag(process.env.TAG); + const release = await github.rest.repos.getReleaseByTag({ owner: "reearth", - repo: "reearth-" + r, + repo: "reearth-web", tag: `v${newTag}`, - }))); + }); + const webChangelogLatest = "### Web\n\n" + release.data.body; - // generate CHANGELOG_latest.md - const changelogLatest = repos.flatMap((r, i) => - [`## reearth-${r}`, releases[i].data.body] - ).join("\n"); - writeFileSync("CHANGELOG_latest.md", changelogLatest); + const changelogLatest = readFileSync("CHANGELOG_latest.md", "utf8"); + const newChangelogLatest = webChangelogLatest + "\n\n" + changelogLatest; + writeFileSync("CHANGELOG_latest.md", newChangelogLatest); - // insert new changelog to CHANGELOG.md - let changelog = ""; - try { - changelog = readFileSync("CHANGELOG.md", "utf-8"); - } catch { - // ignore - } - const pos = changelog.indexOf("## "); // first version section - const newChangelog = `${formatHeader(tag)}\n\n${changelogLatest.replace(/^#/gm, "##")}`; - if (pos >= 0) { - changelog = changelog.slice(0, pos) + newChangelog + "\n\n" + changelog.slice(pos); - } else { - changelog = [header, newChangelog].join("\n\n") - } - writeFileSync("CHANGELOG.md", changelog); + const changelog = readFileSync("CHANGELOG.md", "utf8"); + const newChangelog = insert(webChangelogLatest + "\n\n", changelog, changelog.indexOf("### ")); + writeFileSync("CHANGELOG.md", newChangelog); }; -function formatHeader(version, date) { - return `## ${removeVFromTag(version)} - ${formatDate(date)}`; -} - -function formatDate(d = new Date()) { - return `${d.getUTCFullYear()}-${("0" + (d.getUTCMonth() + 1)).slice(-2)}-${("0" + d.getUTCDate()).slice(-2)}`; -} - function removeVFromTag(t) { return t.replace("v", ""); } + +function insert(insert, source, pos) { + if (pos < 0) pos = 0; + return source.slice(0, pos) + insert + source.slice(pos); +} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6adf60925..f4f40a347 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,112 +2,58 @@ name: Release on: workflow_dispatch: inputs: - custom_tag: + version: required: false - description: Specify version only when you want to increment the patch and major version (e.g. 1.1.0) -env: - IMAGE: reearth/reearth - IMAGE_BASE: reearth/reearth-backend + description: 'Next version (NOTE: Switch the branch to "release"!)' + type: choice + default: minor + options: + - patch + - minor + - major jobs: release: name: Release runs-on: ubuntu-latest + if: github.ref == 'refs/heads/release' steps: + - name: git config + env: + GPT_USER: ${{ secrets.GPT_USER }} + run: | + git config --global user.name $GPT_USER + git config --global pull.rebase false - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 token: ${{ secrets.GPT }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Bump tag version - id: tag - uses: mathieudutour/github-tag-action@v5.6 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - custom_tag: ${{ github.event.inputs.custom_tag }} - default_bump: minor - dry_run: true - - name: Get Docker tags - id: tags - env: - TAG: ${{ steps.tag.outputs.new_tag }} - run: | - TAG2=${TAG#v} - TAGS=${IMAGE}:${TAG2} - if [[ ! $TAG2 =~ '-' ]]; then - TAGS+=,${IMAGE}:${TAG2%.*} - TAGS+=,${IMAGE}:${TAG2%%.*} - TAGS+=,${IMAGE}:latest - fi - echo "::set-output name=new_tag_short::$TAG2" - echo "::set-output name=tags::$TAGS" - - name: Fetch reearth-web release - uses: dsaltares/fetch-gh-release-asset@master + - id: changelog + name: Generate CHANGELOG + uses: reearth/changelog-action@main with: - repo: reearth/reearth-web - version: tags/${{ steps.tag.outputs.new_tag }} - file: reearth-web_${{ steps.tag.outputs.new_tag }}.tar.gz - token: ${{ secrets.GITHUB_TOKEN }} - - name: Extract reearth-web - run: tar -xvf reearth-web_${{ steps.tag.outputs.new_tag }}.tar.gz && mv reearth-web web - - name: Build and push - id: docker_build - uses: docker/build-push-action@v2 + version: ${{ github.event.inputs.version }} + repo: ${{ github.repository }} + latest: CHANGELOG_latest.md + - name: Insert reearth-web changelog + uses: actions/github-script@v6 with: - context: . - platforms: linux/amd64,linux/arm64 - build-args: REEARTH_BACKEND_IMAGE=${{ env.IMAGE_BASE }}:${{ steps.tags.outputs.new_tag_short }} - push: true - tags: ${{ steps.tags.outputs.tags }} - cache-from: type=gha - cache-to: type=gha,mode=max - - name: Image digest - run: echo ${{ steps.docker_build.outputs.digest }} - - name: Generate changelog - uses: actions/github-script@v5 + script: 'require("./.github/workflows/release")({ github, context })' env: - TAG: ${{ steps.tag.outputs.new_tag }} + TAG: ${{ steps.changelog.outputs.version }} + - name: Upload latest CHANGELOG + uses: actions/upload-artifact@v3 with: - script: 'require("./.github/workflows/release")({ github, context, tag: process.env.TAG })' - - name: Commit and push + name: changelog-${{ steps.changelog.outputs.version }} + path: CHANGELOG_latest.md + - name: Commit & push to release env: - TAG: ${{ steps.tag.outputs.new_tag }} + TAG: ${{ steps.changelog.outputs.version }} run: | - git config --global user.name "${{ github.actor }}" - git config --global user.email "${{ github.actor }}@users.noreply.github.com" + rm CHANGELOG_latest.md git add CHANGELOG.md - git commit -m $TAG + git commit -am "$TAG" git tag $TAG - git push - git push --tags - - name: Get current SHA - id: sha - run: echo "::set-output name=sha::$(git rev-parse HEAD)" - - name: Create GitHub release - uses: ncipollo/release-action@v1 - with: - commit: ${{ steps.sha.outputs.sha }} - name: ${{ steps.tag.outputs.new_tag }} - tag: ${{ steps.tag.outputs.new_tag }} - bodyFile: CHANGELOG_latest.md - slack-notification: - if: always() - name: Slack Notification - needs: - - release - runs-on: ubuntu-latest - steps: - - name: Slack Notification - uses: Gamesight/slack-workflow-status@master - if: always() - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} + git push --atomic origin release $TAG + - name: Commit & push to main + run: git switch main && git cherry-pick release && git push diff --git a/.github/workflows/server-release.yml b/.github/workflows/server-release.yml deleted file mode 100644 index 9b7ff0f9d..000000000 --- a/.github/workflows/server-release.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: Release -on: - workflow_dispatch: - inputs: - version: - required: false - description: 'Next version (NOTE: Switch the branch to "release"!)' - type: choice - default: minor - options: - - patch - - minor - - major -jobs: - release: - name: Release - runs-on: ubuntu-latest - if: github.ref == 'refs/heads/release' - steps: - - name: git config - env: - GPT_USER: ${{ secrets.GPT_USER }} - run: | - git config --global user.name $GPT_USER - git config --global pull.rebase false - - name: Checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - token: ${{ secrets.GPT }} - - id: changelog - name: Generate CHANGELOG - uses: reearth/changelog-action@main - with: - version: ${{ github.event.inputs.version }} - repo: ${{ github.repository }} - latest: CHANGELOG_latest.md - - name: Upload latest CHANGELOG - uses: actions/upload-artifact@v3 - with: - name: changelog-${{ steps.changelog.outputs.version }} - path: CHANGELOG_latest.md - - name: Commit & push to release - env: - TAG: ${{ steps.changelog.outputs.version }} - run: | - rm CHANGELOG_latest.md - git add CHANGELOG.md - git commit -am "$TAG" - git tag $TAG - git push --atomic origin release $TAG - - name: Commit & push to main - run: git switch main && git cherry-pick release && git push diff --git a/.gitignore b/.gitignore index d106a25bb..1ec8a2074 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,7 @@ +.DS_Store /mongo /data -/web +/server/web +/server/coverage.txt /.env /.env.* diff --git a/.vscode/launch.json b/.vscode/launch.json index 3ff903694..3dc0fd9fe 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -2,12 +2,12 @@ "version": "0.2.0", "configurations": [ { - "name": "Launch", + "name": "Launch server", "type": "go", "request": "launch", "mode": "auto", - "cwd": "${workspaceRoot}", - "program": "${workspaceRoot}/cmd/reearth", + "cwd": "${workspaceRoot}/server", + "program": "${workspaceRoot}/server/cmd/reearth", "env": {}, "args": [] } diff --git a/.vscode/settings.json b/.vscode/settings.json index 1463f5115..b041699da 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -6,26 +6,26 @@ "yaml.validate": true, "yaml.hover": true, "yaml.schemas": { - "./schemas/plugin_manifest.json": [ - "/pkg/builtin/manifest.yml" + "./server/schemas/plugin_manifest.json": [ + "/server/pkg/builtin/manifest.yml" ], - "./schemas/plugin_manifest_translation.json": [ - "/pkg/builtin/manifest_*.yml" + "./server/schemas/plugin_manifest_translation.json": [ + "/server/pkg/builtin/manifest_*.yml" ], "https://json.schemastore.org/github-workflow.json": ".github/workflows/build.yml" }, "json.schemas": [ { "fileMatch": [ - "/pkg/builtin/manifest.json" + "/server/pkg/builtin/manifest.json" ], - "url": "./schemas/plugin_manifest.json" + "url": "./server/schemas/plugin_manifest.json" }, { "fileMatch": [ - "/pkg/builtin/manifest_*.json" + "/server/pkg/builtin/manifest_*.json" ], - "url": "./schemas/plugin_manifest_translation.json" + "url": "./server/schemas/plugin_manifest_translation.json" } ] } diff --git a/CHANGELOG.md b/CHANGELOG.md index 47aa7a1c8..aa8aa6af0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,11 @@ # Changelog + All notable changes to this project will be documented in this file. ## 0.10.0 - 2022-08-10 ### reearth-web + #### ๐Ÿš€ Features - Add mouse events to plugin API ([#280](https://github.com/reearth/reearth-web/pull/280)) [`9445f0`](https://github.com/reearth/reearth-web/commit/9445f0) @@ -30,7 +32,9 @@ All notable changes to this project will be documented in this file. - Upgrade resium to v1.15.0 ([#281](https://github.com/reearth/reearth-web/pull/281)) [`bd3968`](https://github.com/reearth/reearth-web/commit/bd3968) - Cosme changelog [`05084e`](https://github.com/reearth/reearth-web/commit/05084e) - Fix changelog [`48de86`](https://github.com/reearth/reearth-web/commit/48de86) + ### reearth-backend + #### ๐Ÿš€ Features - Configurable server host [`61b03a`](https://github.com/reearth/reearth-backend/commit/61b03a) @@ -43,6 +47,7 @@ All notable changes to this project will be documented in this file. ## 0.9.0 - 2022-07-20 ### reearth-web + #### ๐Ÿš€ Features - Plugin API to add layers ([#258](https://github.com/reearth/reearth-web/pull/258)) [`6468e2`](https://github.com/reearth/reearth-web/commit/6468e2) @@ -82,6 +87,7 @@ All notable changes to this project will be documented in this file. - Upgrade to React 18 and switch to React Router ([#234](https://github.com/reearth/reearth-web/pull/234)) [`b0e8e6`](https://github.com/reearth/reearth-web/commit/b0e8e6) ### reearth-backend + #### ๐Ÿš€ Features - Change layer indicators from preset list from backend side ([#158](https://github.com/reearth/reearth-backend/pull/158)) [`0267f1`](https://github.com/reearth/reearth-backend/commit/0267f1) @@ -102,6 +108,7 @@ All notable changes to this project will be documented in this file. ## 0.8.0 - 2022-06-17 ### reearth-web + #### ๐Ÿš€ Features - Add a basic timeline UI ([#232](https://github.com/reearth/reearth-web/pull/232)) [`fc9732`](https://github.com/reearth/reearth-web/commit/fc9732) @@ -136,7 +143,9 @@ All notable changes to this project will be documented in this file. - Upgrade dependency cesium-dnd to 1.1.0 ([#244](https://github.com/reearth/reearth-web/pull/244)) [`ba6b51`](https://github.com/reearth/reearth-web/commit/ba6b51) - Fix typos [`f98005`](https://github.com/reearth/reearth-web/commit/f98005) - Update config so extensionUrls can be declared in .env file for local development ([#237](https://github.com/reearth/reearth-web/pull/237)) [`545b9e`](https://github.com/reearth/reearth-web/commit/545b9e) + ### reearth-backend + #### ๐Ÿš€ Features - Add totalCount field to DatasetSchema type of GraphQL schema ([#154](https://github.com/reearth/reearth-backend/pull/154)) [`ab6334`](https://github.com/reearth/reearth-backend/commit/ab6334) @@ -190,6 +199,7 @@ All notable changes to this project will be documented in this file. - Cluster, dataset, infobox, layer, plugin and project gql query files ([#219](https://github.com/reearth/reearth-web/pull/219)) [`e4dae9`](https://github.com/reearth/reearth-web/commit/e4dae9) ### reearth-backend + #### ๐Ÿš€ Features - Add an opacity slider to map tiles ([#138](https://github.com/reearth/reearth-backend/pull/138)) [`4f72b8`](https://github.com/reearth/reearth-backend/commit/4f72b8) @@ -233,6 +243,7 @@ All notable changes to this project will be documented in this file. - Update all dependencies ([#210](https://github.com/reearth/reearth-web/pull/210)) [`c22b7a`](https://github.com/reearth/reearth-web/commit/c22b7a) ### reearth-backend + #### ๐Ÿ”ง Bug Fixes - Renovate bot not running on schedule ([#136](https://github.com/reearth/reearth-backend/pull/136)) [`82843f`](https://github.com/reearth/reearth-backend/commit/82843f) @@ -278,6 +289,7 @@ All notable changes to this project will be documented in this file. - Set default auth config to start app with zero configuration ([#191](https://github.com/reearth/reearth-web/pull/191)) [`d5a2aa`](https://github.com/reearth/reearth-web/commit/d5a2aa) ### reearth-backend + #### ๐Ÿš€ Features - Authentication system ([#108](https://github.com/reearth/reearth-backend/pull/108)) [`b89c32`](https://github.com/reearth/reearth-backend/commit/b89c32) @@ -352,6 +364,7 @@ All notable changes to this project will be documented in this file. - Upgrade dependencies ([#175](https://github.com/reearth/reearth-web/pull/175)) [`dba959`](https://github.com/reearth/reearth-web/commit/dba959) ### reearth-backend + #### ๐Ÿš€ Features - Implement property.Diff and plugin/manifest.Diff ([#107](https://github.com/reearth/reearth-backend/pull/107)) [`700269`](https://github.com/reearth/reearth-backend/commit/700269) @@ -379,6 +392,7 @@ All notable changes to this project will be documented in this file. ## 0.4.0 - 2022-01-27 ### reearth-web + #### ๐Ÿš€ Features - Add "clamp to filed" option to file primitive ([#155](https://github.com/reearth/reearth-web/pull/155)) [`2e83ba`](https://github.com/reearth/reearth-web/commit/2e83ba) @@ -402,6 +416,7 @@ All notable changes to this project will be documented in this file. - Layer clustering feature (GraphQL) ([#159](https://github.com/reearth/reearth-web/pull/159)) [`4365b8`](https://github.com/reearth/reearth-web/commit/4365b8) ### reearth-backend + #### ๐Ÿš€ Features - Add "clamp to ground" option to file primitive ([#95](https://github.com/reearth/reearth-backend/pull/95)) [`559194`](https://github.com/reearth/reearth-backend/commit/559194) @@ -481,6 +496,7 @@ All notable changes to this project will be documented in this file. ## 0.2.0 - 2021-11-18 #### reearth-web + #### ๐Ÿš€ Features - Widget align system for mobile ([#115](https://github.com/reearth/reearth-web/pull/115)) [`afa4ba`](https://github.com/reearth/reearth-web/commit/afa4ba) @@ -506,6 +522,7 @@ All notable changes to this project will be documented in this file. ### reearth-backend + #### ๐Ÿš€ Features - Support opentelemetry ([#68](https://github.com/reearth/reearth-backend/pull/68)) [`25c581`](https://github.com/reearth/reearth-backend/commit/25c581) @@ -531,6 +548,7 @@ All notable changes to this project will be documented in this file. ## 0.1.0 - 2021-11-02 ### reearth-web + #### ๐Ÿš€ Features - Support Auth0 audience ([#2](https://github.com/reearth/reearth-web/pull/2)) [`0ad0f6`](https://github.com/reearth/reearth-web/commit/0ad0f6) @@ -667,6 +685,7 @@ All notable changes to this project will be documented in this file. ### reearth-backend + #### ๐Ÿš€ Features - Support Auth0 audience ([#3](https://github.com/reearth/reearth-backend/pull/3)) [`c3758e`](https://github.com/reearth/reearth-backend/commit/c3758e) @@ -770,4 +789,3 @@ All notable changes to this project will be documented in this file. - Fix release workflow, fix build comment [skip ci] [`cfc79a`](https://github.com/reearth/reearth-backend/commit/cfc79a) - Fix renaming file names in release workflow [`96f0b3`](https://github.com/reearth/reearth-backend/commit/96f0b3) - Fix and refactor release workflow [skip ci] [`d5466b`](https://github.com/reearth/reearth-backend/commit/d5466b) - diff --git a/Dockerfile b/Dockerfile index cb36a256d..d7c6b1ad1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,26 @@ -ARG REEARTH_BACKEND_IMAGE=reearth/reearth-backend:latest -FROM $REEARTH_BACKEND_IMAGE +FROM golang:1.18-alpine AS build +ARG TAG=release +ARG REV +ARG VERSION -COPY web /reearth/web +RUN apk add --update --no-cache git ca-certificates build-base +COPY server/go.mod server/go.sum server/main.go /reearth/ WORKDIR /reearth +RUN go mod download + +COPY server/cmd/ /reearth/cmd/ +COPY server/pkg/ /reearth/pkg/ +COPY server/internal/ /reearth/internal/ + +RUN CGO_ENABLED=0 go build -tags "${TAG}" "-ldflags=-X main.version=${VERSION} -s -w -buildid=" -trimpath ./cmd/reearth + +FROM scratch + +COPY --from=build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt +COPY --from=build /reearth/reearth /reearth/reearth +COPY web/dist* /reearth/web/ + +WORKDIR /reearth + CMD [ "./reearth" ] diff --git a/server/.goreleaser.yml b/server/.goreleaser.yml index 3c1454481..8cd9e5cbc 100644 --- a/server/.goreleaser.yml +++ b/server/.goreleaser.yml @@ -1,4 +1,4 @@ -project_name: reearth-backend +project_name: reearth before: hooks: - go mod tidy diff --git a/server/CHANGELOG.md b/server/CHANGELOG.md deleted file mode 100644 index 411de6244..000000000 --- a/server/CHANGELOG.md +++ /dev/null @@ -1,1122 +0,0 @@ -<<<<<<< HEAD -# Changelog -All notable changes to this project will be documented in this file. - -## 0.10.0 - 2022-08-10 - -### reearth-web -#### ๐Ÿš€ Features - -- Add mouse events to plugin API ([#280](https://github.com/reearth/reearth-web/pull/280)) [`9445f0`](https://github.com/reearth/reearth-web/commit/9445f0) - -#### ๐Ÿ”ง Bug Fixes - -- Select not working after pinch event on ipad ([#290](https://github.com/reearth/reearth-web/pull/290)) [`821504`](https://github.com/reearth/reearth-web/commit/821504) -- Translation for modal buttons [`7eead9`](https://github.com/reearth/reearth-web/commit/7eead9) -- Plugin widget&[#39](https://github.com/reearth/reearth-web/pull/39);s width using iframe&[#39](https://github.com/reearth/reearth-web/pull/39);s default ([#283](https://github.com/reearth/reearth-web/pull/283)) [`572da0`](https://github.com/reearth/reearth-web/commit/572da0) -- Pointer events issues around widgets ([#279](https://github.com/reearth/reearth-web/pull/279)) [`219ea4`](https://github.com/reearth/reearth-web/commit/219ea4) - -#### ๐ŸŽจ Styling - -- Fix icons of plugin install buttons ([#289](https://github.com/reearth/reearth-web/pull/289)) [`af7a1b`](https://github.com/reearth/reearth-web/commit/af7a1b) - -#### ๐Ÿงช Testing - -- Introduce vitest ([#284](https://github.com/reearth/reearth-web/pull/284)) [`2152e0`](https://github.com/reearth/reearth-web/commit/2152e0) - -#### Miscellaneous Tasks - -- Migrate to Vite, upgrade Cypress to v10 ([#287](https://github.com/reearth/reearth-web/pull/287)) [`50f2b6`](https://github.com/reearth/reearth-web/commit/50f2b6) -- Simplify ESLint config ([#282](https://github.com/reearth/reearth-web/pull/282)) [`b3570a`](https://github.com/reearth/reearth-web/commit/b3570a) -- Upgrade resium to v1.15.0 ([#281](https://github.com/reearth/reearth-web/pull/281)) [`bd3968`](https://github.com/reearth/reearth-web/commit/bd3968) -- Cosme changelog [`05084e`](https://github.com/reearth/reearth-web/commit/05084e) -- Fix changelog [`48de86`](https://github.com/reearth/reearth-web/commit/48de86) -### reearth-backend -#### ๐Ÿš€ Features - -- Configurable server host [`61b03a`](https://github.com/reearth/reearth-backend/commit/61b03a) - -#### Miscellaneous Tasks - -- Add new frontend endpoint (for Vite@3) [`70fed0`](https://github.com/reearth/reearth-backend/commit/70fed0) -- Fix changelog [skip ci] [`895a64`](https://github.com/reearth/reearth-backend/commit/895a64) - -## 0.9.0 - 2022-07-20 - -### reearth-web -#### ๐Ÿš€ Features - -- Plugin API to add layers ([#258](https://github.com/reearth/reearth-web/pull/258)) [`6468e2`](https://github.com/reearth/reearth-web/commit/6468e2) -- Change layer indicators from preset list ([#245](https://github.com/reearth/reearth-web/pull/245)) [`db185e`](https://github.com/reearth/reearth-web/commit/db185e) - -#### ๐Ÿ”ง Bug Fixes - -- Some menu not displayed at sidebar in proejct setting page [`7c0705`](https://github.com/reearth/reearth-web/commit/7c0705) -- Nothing displayed at project setting page when there are many projects [`0a6744`](https://github.com/reearth/reearth-web/commit/0a6744) -- Plugins do not work as expected, update quickjs-emscripten ([#276](https://github.com/reearth/reearth-web/pull/276)) [`9336e6`](https://github.com/reearth/reearth-web/commit/9336e6) -- Plugin editor changes do not take effect until run button is clicked ([#274](https://github.com/reearth/reearth-web/pull/274)) [`39fdb2`](https://github.com/reearth/reearth-web/commit/39fdb2) -- Storytelling widget does not get layers&[#39](https://github.com/reearth/reearth-web/pull/39); title ([#273](https://github.com/reearth/reearth-web/pull/273)) [`5ff72b`](https://github.com/reearth/reearth-web/commit/5ff72b) -- Dataset icon not showing in layer list ([#275](https://github.com/reearth/reearth-web/pull/275)) [`8dbc88`](https://github.com/reearth/reearth-web/commit/8dbc88) -- Show full camera values in camera property field popup ([#270](https://github.com/reearth/reearth-web/pull/270)) [`7d3eac`](https://github.com/reearth/reearth-web/commit/7d3eac) -- Plugin dimensions and iframe issues ([#271](https://github.com/reearth/reearth-web/pull/271)) [`f3a52a`](https://github.com/reearth/reearth-web/commit/f3a52a) -- Camera jump not working ([#269](https://github.com/reearth/reearth-web/pull/269)) [`48bbfe`](https://github.com/reearth/reearth-web/commit/48bbfe) -- Layer select state not update properly ([#268](https://github.com/reearth/reearth-web/pull/268)) [`5f7c69`](https://github.com/reearth/reearth-web/commit/5f7c69) -- Unselect layer not work properly ([#266](https://github.com/reearth/reearth-web/pull/266)) [`eb41da`](https://github.com/reearth/reearth-web/commit/eb41da) -- Layer drag and drop does not work with indicators ([#265](https://github.com/reearth/reearth-web/pull/265)) [`12ae04`](https://github.com/reearth/reearth-web/commit/12ae04) -- Testing-library react 18 warnings ([#263](https://github.com/reearth/reearth-web/pull/263)) [`4c9076`](https://github.com/reearth/reearth-web/commit/4c9076) -- Auto fetch more items in dashboard page , project list , dataset page for big screens ([#255](https://github.com/reearth/reearth-web/pull/255)) [`fb8bf9`](https://github.com/reearth/reearth-web/commit/fb8bf9) -- Asset modal flushes when camera limiter is enabled ([#261](https://github.com/reearth/reearth-web/pull/261)) [`204629`](https://github.com/reearth/reearth-web/commit/204629) -- Not being able to override an image from the asset modal ([#260](https://github.com/reearth/reearth-web/pull/260)) [`1d3c3f`](https://github.com/reearth/reearth-web/commit/1d3c3f) -- Layers pane does not update after move layer or create folder ([#259](https://github.com/reearth/reearth-web/pull/259)) [`336d98`](https://github.com/reearth/reearth-web/commit/336d98) -- Cesium flashes on camera change ([#257](https://github.com/reearth/reearth-web/pull/257)) [`ad2c0e`](https://github.com/reearth/reearth-web/commit/ad2c0e) -- Router typos ([#252](https://github.com/reearth/reearth-web/pull/252)) [`19fcb6`](https://github.com/reearth/reearth-web/commit/19fcb6) -- Dataset page showing errors on page refreshing ([#253](https://github.com/reearth/reearth-web/pull/253)) [`3f48e9`](https://github.com/reearth/reearth-web/commit/3f48e9) - -#### ๐Ÿงช Testing - -- Fix test coverage target ([#272](https://github.com/reearth/reearth-web/pull/272)) [`b9db10`](https://github.com/reearth/reearth-web/commit/b9db10) - -#### Miscellaneous Tasks - -- Update dependency cesium to ^1.95.0 ([#262](https://github.com/reearth/reearth-web/pull/262)) [`845e2a`](https://github.com/reearth/reearth-web/commit/845e2a) -- Upgrade cesium [`363071`](https://github.com/reearth/reearth-web/commit/363071) -- Upgrade to React 18 and switch to React Router ([#234](https://github.com/reearth/reearth-web/pull/234)) [`b0e8e6`](https://github.com/reearth/reearth-web/commit/b0e8e6) - -### reearth-backend -#### ๐Ÿš€ Features - -- Change layer indicators from preset list from backend side ([#158](https://github.com/reearth/reearth-backend/pull/158)) [`0267f1`](https://github.com/reearth/reearth-backend/commit/0267f1) - -#### ๐Ÿ”ง Bug Fixes - -- Property fields in a property list cannot be removed ([#160](https://github.com/reearth/reearth-backend/pull/160)) [`358237`](https://github.com/reearth/reearth-backend/commit/358237) - -#### ๐Ÿงช Testing - -- Unit test for mongo auth request repo ([#159](https://github.com/reearth/reearth-backend/pull/159)) [`5afc81`](https://github.com/reearth/reearth-backend/commit/5afc81) - -#### Miscellaneous Tasks - -- Update Makefile to remove unused targets [`67780b`](https://github.com/reearth/reearth-backend/commit/67780b) - - -## 0.8.0 - 2022-06-17 - -### reearth-web -#### ๐Ÿš€ Features - -- Add a basic timeline UI ([#232](https://github.com/reearth/reearth-web/pull/232)) [`fc9732`](https://github.com/reearth/reearth-web/commit/fc9732) -- Add infinite scroll for project lists and datasets in dashboard and setting pages ([#225](https://github.com/reearth/reearth-web/pull/225)) [`28d377`](https://github.com/reearth/reearth-web/commit/28d377) -- Adapt camera field to support 2d mode ([#233](https://github.com/reearth/reearth-web/pull/233)) [`172de5`](https://github.com/reearth/reearth-web/commit/172de5) -- Add scene property overriding to Re:Earth API ([#224](https://github.com/reearth/reearth-web/pull/224)) [`b07603`](https://github.com/reearth/reearth-web/commit/b07603) - -#### ๐Ÿ”ง Bug Fixes - -- Some plugin APIs were missing ([#248](https://github.com/reearth/reearth-web/pull/248)) [`c83262`](https://github.com/reearth/reearth-web/commit/c83262) -- Slight shift when capture a new position ([#246](https://github.com/reearth/reearth-web/pull/246)) [`182406`](https://github.com/reearth/reearth-web/commit/182406) -- Dataset counts are displayed incorrectly in dataset pane ([#235](https://github.com/reearth/reearth-web/pull/235)) [`45a0c8`](https://github.com/reearth/reearth-web/commit/45a0c8) -- Labeling hidden by marker symbol ([#238](https://github.com/reearth/reearth-web/pull/238)) [`99c378`](https://github.com/reearth/reearth-web/commit/99c378) -- Vertical position style in infobox image block ([#236](https://github.com/reearth/reearth-web/pull/236)) [`647cf8`](https://github.com/reearth/reearth-web/commit/647cf8) -- Unexpected values for theme and lang props of extension components [`723486`](https://github.com/reearth/reearth-web/commit/723486) -- Wait until all extensions are loaded [`dfe2aa`](https://github.com/reearth/reearth-web/commit/dfe2aa) -- Iframe not correctly sizing to plugin ([#230](https://github.com/reearth/reearth-web/pull/230)) [`500ce8`](https://github.com/reearth/reearth-web/commit/500ce8) -- Plugin API cameramove event is not emitted in published pages ([#227](https://github.com/reearth/reearth-web/pull/227)) [`7a11b3`](https://github.com/reearth/reearth-web/commit/7a11b3) - -#### โœจ Refactor - -- Migrate to react-intl from react-i18next ([#240](https://github.com/reearth/reearth-web/pull/240)) [`404743`](https://github.com/reearth/reearth-web/commit/404743) - -#### ๐Ÿงช Testing - -- Disable util/raf tests that do not always succeed [`45a450`](https://github.com/reearth/reearth-web/commit/45a450) -- Fix unit test for utils/raf [`a060d9`](https://github.com/reearth/reearth-web/commit/a060d9) -- Fix Cypress login test fails ([#241](https://github.com/reearth/reearth-web/pull/241)) [`a5dbfb`](https://github.com/reearth/reearth-web/commit/a5dbfb) - -#### Miscellaneous Tasks - -- Upgrade dependency cesium-dnd to 1.1.0 ([#244](https://github.com/reearth/reearth-web/pull/244)) [`ba6b51`](https://github.com/reearth/reearth-web/commit/ba6b51) -- Fix typos [`f98005`](https://github.com/reearth/reearth-web/commit/f98005) -- Update config so extensionUrls can be declared in .env file for local development ([#237](https://github.com/reearth/reearth-web/pull/237)) [`545b9e`](https://github.com/reearth/reearth-web/commit/545b9e) -### reearth-backend -#### ๐Ÿš€ Features - -- Add totalCount field to DatasetSchema type of GraphQL schema ([#154](https://github.com/reearth/reearth-backend/pull/154)) [`ab6334`](https://github.com/reearth/reearth-backend/commit/ab6334) -- Add timeline settings to scene property ([#153](https://github.com/reearth/reearth-backend/pull/153)) [`602ec0`](https://github.com/reearth/reearth-backend/commit/602ec0) - -#### ๐Ÿ”ง Bug Fixes - -- Assets are not saved when files are uploaded ([#155](https://github.com/reearth/reearth-backend/pull/155)) [`e444e4`](https://github.com/reearth/reearth-backend/commit/e444e4) - -#### โœจ Refactor - -- Declarative description of use case structure (asset only) ([#151](https://github.com/reearth/reearth-backend/pull/151)) [`c6e98c`](https://github.com/reearth/reearth-backend/commit/c6e98c) - -#### Miscellaneous Tasks - -- Update go modules ([#150](https://github.com/reearth/reearth-backend/pull/150)) [`6372bc`](https://github.com/reearth/reearth-backend/commit/6372bc) - -## 0.7.0 - 2022-05-17 - -### reearth-web - -#### ๐Ÿš€ Features - -- Implementation of the avatar feature in workspaces screens ([#206](https://github.com/reearth/reearth-web/pull/206)) [`42d7aa`](https://github.com/reearth/reearth-web/commit/42d7aa) -- Update placehoder for color field ([#215](https://github.com/reearth/reearth-web/pull/215)) [`c6c6e3`](https://github.com/reearth/reearth-web/commit/c6c6e3) -- Add opacity field to map tiles ([#220](https://github.com/reearth/reearth-web/pull/220)) [`006a8d`](https://github.com/reearth/reearth-web/commit/006a8d) - -#### ๐Ÿ”ง Bug Fixes - -- Dropdown styles in right panel break when selected item's name is too long [`9a5993`](https://github.com/reearth/reearth-web/commit/9a5993) -- Dashboard not updating on project creation [`4b5478`](https://github.com/reearth/reearth-web/commit/4b5478) -- Query names in refetchQueries not updated ([#222](https://github.com/reearth/reearth-web/pull/222)) [`711712`](https://github.com/reearth/reearth-web/commit/711712) -- Published page uses GraphQL and reports errors [`3e3e45`](https://github.com/reearth/reearth-web/commit/3e3e45) - -#### โœจ Refactor - -- Queries/mutation code into a single directory ([#208](https://github.com/reearth/reearth-web/pull/208)) [`2afc16`](https://github.com/reearth/reearth-web/commit/2afc16) -- Property, scene, tag, user, widget and workspace gql query files ([#221](https://github.com/reearth/reearth-web/pull/221)) [`3bf421`](https://github.com/reearth/reearth-web/commit/3bf421) - -#### Miscellaneous Tasks - -- Introduce i18next ([#212](https://github.com/reearth/reearth-web/pull/212)) [`0ac0c2`](https://github.com/reearth/reearth-web/commit/0ac0c2) -- Add reference to style guide in README [`e29024`](https://github.com/reearth/reearth-web/commit/e29024) -- Add useT hook to i18n ([#223](https://github.com/reearth/reearth-web/pull/223)) [`b96177`](https://github.com/reearth/reearth-web/commit/b96177) -- Update dependency cesium to ^1.93.0 ([#216](https://github.com/reearth/reearth-web/pull/216)) [`06b563`](https://github.com/reearth/reearth-web/commit/06b563) -- Update all dependencies ([#226](https://github.com/reearth/reearth-web/pull/226)) [`36fb79`](https://github.com/reearth/reearth-web/commit/36fb79) - -#### Refactor - -- Clean gql pt1 asset ([#217](https://github.com/reearth/reearth-web/pull/217)) [`b88a8c`](https://github.com/reearth/reearth-web/commit/b88a8c) -- Cluster, dataset, infobox, layer, plugin and project gql query files ([#219](https://github.com/reearth/reearth-web/pull/219)) [`e4dae9`](https://github.com/reearth/reearth-web/commit/e4dae9) - -### reearth-backend -#### ๐Ÿš€ Features - -- Add an opacity slider to map tiles ([#138](https://github.com/reearth/reearth-backend/pull/138)) [`4f72b8`](https://github.com/reearth/reearth-backend/commit/4f72b8) - -#### ๐Ÿ”ง Bug Fixes - -- Signup api requires password field [`a79376`](https://github.com/reearth/reearth-backend/commit/a79376) -- "$in needs an array" error from mongo FindByIDs ([#142](https://github.com/reearth/reearth-backend/pull/142)) [`58e1b0`](https://github.com/reearth/reearth-backend/commit/58e1b0) -- Name field is available again in signup api ([#144](https://github.com/reearth/reearth-backend/pull/144)) [`651852`](https://github.com/reearth/reearth-backend/commit/651852) - -#### โœจ Refactor - -- Retry mongo lock ([#145](https://github.com/reearth/reearth-backend/pull/145)) [`ddaeaa`](https://github.com/reearth/reearth-backend/commit/ddaeaa) - -#### ๐Ÿงช Testing - -- Add Mongo Asset's [`FindByID`](https://github.com/reearth/reearth-backend/commit/FindByID) unit testing ([#139](https://github.com/reearth/reearth-backend/pull/139)) [`35f9db`](https://github.com/reearth/reearth-backend/commit/35f9db) -- Refactor mongo connect helper function [`751e66`](https://github.com/reearth/reearth-backend/commit/751e66) -- Util.SyncMap.Range test sometimes fails ([#143](https://github.com/reearth/reearth-backend/pull/143)) [`c2b969`](https://github.com/reearth/reearth-backend/commit/c2b969) - -#### Miscellaneous Tasks - -- Typo [`secrit`](https://github.com/reearth/reearth-backend/commit/secrit) on env example ([#137](https://github.com/reearth/reearth-backend/pull/137)) [`2c0220`](https://github.com/reearth/reearth-backend/commit/2c0220) -- Update the go modules ([#146](https://github.com/reearth/reearth-backend/pull/146)) [`89009b`](https://github.com/reearth/reearth-backend/commit/89009b) - -## 0.6.1 - 2022-04-20 - -### reearth-web - -#### ๐Ÿš€ Features - -- Extend project publish settings and dataset import modal functionality through extension API ([#200](https://github.com/reearth/reearth-web/pull/200)) [`96aa56`](https://github.com/reearth/reearth-web/commit/96aa56) - -#### ๐Ÿ”ง Bug Fixes - -- Redirect after esc button in any setting page ([#193](https://github.com/reearth/reearth-web/pull/193)) [`c8ec35`](https://github.com/reearth/reearth-web/commit/c8ec35) - -#### Miscellaneous Tasks - -- Follow GraphQL schema updates ([#209](https://github.com/reearth/reearth-web/pull/209)) [`8d9e75`](https://github.com/reearth/reearth-web/commit/8d9e75) -- Update all dependencies ([#210](https://github.com/reearth/reearth-web/pull/210)) [`c22b7a`](https://github.com/reearth/reearth-web/commit/c22b7a) - -### reearth-backend -#### ๐Ÿ”ง Bug Fixes - -- Renovate bot not running on schedule ([#136](https://github.com/reearth/reearth-backend/pull/136)) [`82843f`](https://github.com/reearth/reearth-backend/commit/82843f) -- Aud was changed and jwt could not be validated correctly [`985100`](https://github.com/reearth/reearth-backend/commit/985100) -- Auth audiences were unintentionally required [`7ec76a`](https://github.com/reearth/reearth-backend/commit/7ec76a) - -#### โœจ Refactor - -- Introduce generics, reorganize GraphQL schema ([#135](https://github.com/reearth/reearth-backend/pull/135)) [`04a098`](https://github.com/reearth/reearth-backend/commit/04a098) - -#### Miscellaneous Tasks - -- Update dependencies ([#134](https://github.com/reearth/reearth-backend/pull/134)) [`1b9b6b`](https://github.com/reearth/reearth-backend/commit/1b9b6b) - -## 0.6.0 - 2022-04-08 - -### reearth-web - -#### ๐Ÿš€ Features - -- Add a plugin API to resize iframe by plugins ([#181](https://github.com/reearth/reearth-web/pull/181)) [`7c1019`](https://github.com/reearth/reearth-web/commit/7c1019) -- Authentication ([#121](https://github.com/reearth/reearth-web/pull/121)) [`b63018`](https://github.com/reearth/reearth-web/commit/b63018) -- Infinite scroll on assets ([#130](https://github.com/reearth/reearth-web/pull/130)) [`11f2f2`](https://github.com/reearth/reearth-web/commit/11f2f2) -- Basic plugin editor ([#184](https://github.com/reearth/reearth-web/pull/184)) [`1c4e09`](https://github.com/reearth/reearth-web/commit/1c4e09) - -#### ๐Ÿ”ง Bug Fixes - -- Unable to type RGBA values ([#180](https://github.com/reearth/reearth-web/pull/180)) [`f7345c`](https://github.com/reearth/reearth-web/commit/f7345c) -- Small height of block plugins [`8070a3`](https://github.com/reearth/reearth-web/commit/8070a3) -- Button widget squishing its text & infobox mask click away ([#185](https://github.com/reearth/reearth-web/pull/185)) [`ac7ef0`](https://github.com/reearth/reearth-web/commit/ac7ef0) -- Cannot select layers that activate infobox mask ([#186](https://github.com/reearth/reearth-web/pull/186)) [`d824b6`](https://github.com/reearth/reearth-web/commit/d824b6) -- Display error messages from auth server ([#187](https://github.com/reearth/reearth-web/pull/187)) [`e19fab`](https://github.com/reearth/reearth-web/commit/e19fab) -- Duplicate asset results ([#188](https://github.com/reearth/reearth-web/pull/188)) [`b3eb7f`](https://github.com/reearth/reearth-web/commit/b3eb7f) -- Workspace name cannot be changed, error displayed when deleting assets ([#189](https://github.com/reearth/reearth-web/pull/189)) [`a99cf3`](https://github.com/reearth/reearth-web/commit/a99cf3) -- Multiple assets in infinite scroll and datasets not showing in DatasetPane ([#192](https://github.com/reearth/reearth-web/pull/192)) [`6f5c93`](https://github.com/reearth/reearth-web/commit/6f5c93) -- Asset modal showing only image-based assets ([#196](https://github.com/reearth/reearth-web/pull/196)) [`83a6bf`](https://github.com/reearth/reearth-web/commit/83a6bf) -- Screen becomes inoperable when errors occur in sign up [`820a04`](https://github.com/reearth/reearth-web/commit/820a04) -- Add missing translations [`a4c237`](https://github.com/reearth/reearth-web/commit/a4c237) - -#### Miscellaneous Tasks - -- Update dependency cesium to ^1.91.0 ([#182](https://github.com/reearth/reearth-web/pull/182)) [`603a5c`](https://github.com/reearth/reearth-web/commit/603a5c) -- Set default auth config to start app with zero configuration ([#191](https://github.com/reearth/reearth-web/pull/191)) [`d5a2aa`](https://github.com/reearth/reearth-web/commit/d5a2aa) - -### reearth-backend -#### ๐Ÿš€ Features - -- Authentication system ([#108](https://github.com/reearth/reearth-backend/pull/108)) [`b89c32`](https://github.com/reearth/reearth-backend/commit/b89c32) -- Default mailer that outputs mails into stdout [`aab26c`](https://github.com/reearth/reearth-backend/commit/aab26c) -- Assets filtering & pagination ([#81](https://github.com/reearth/reearth-backend/pull/81)) [`739943`](https://github.com/reearth/reearth-backend/commit/739943) -- Support sign up with information provided by OIDC providers ([#130](https://github.com/reearth/reearth-backend/pull/130)) [`fef60e`](https://github.com/reearth/reearth-backend/commit/fef60e) - -#### ๐Ÿ”ง Bug Fixes - -- Load auth client domain from config ([#124](https://github.com/reearth/reearth-backend/pull/124)) [`9bde8a`](https://github.com/reearth/reearth-backend/commit/9bde8a) -- Signup fails when password is not set [`27c2f0`](https://github.com/reearth/reearth-backend/commit/27c2f0) -- Logger panics [`d1e3a8`](https://github.com/reearth/reearth-backend/commit/d1e3a8) -- Set auth server dev mode automatically [`83a66a`](https://github.com/reearth/reearth-backend/commit/83a66a) -- Auth server bugs and auth client bugs ([#125](https://github.com/reearth/reearth-backend/pull/125)) [`ce2309`](https://github.com/reearth/reearth-backend/commit/ce2309) -- Auth0 setting is not used by JWT verification middleware [`232e75`](https://github.com/reearth/reearth-backend/commit/232e75) -- Invalid mongo queries of pagination [`7caf68`](https://github.com/reearth/reearth-backend/commit/7caf68) -- Auth config not loaded expectedly [`570fe7`](https://github.com/reearth/reearth-backend/commit/570fe7) -- Users cannot creates a new team and scene [`5df25f`](https://github.com/reearth/reearth-backend/commit/5df25f) -- Auth server certificate is not saved as pem format [`982a71`](https://github.com/reearth/reearth-backend/commit/982a71) -- Repo filters are not merged expectedly [`f4cc3f`](https://github.com/reearth/reearth-backend/commit/f4cc3f) -- Auth is no longer required for GraphQL endpoint [`58a6d1`](https://github.com/reearth/reearth-backend/commit/58a6d1) -- Rename auth srv default client ID ([#128](https://github.com/reearth/reearth-backend/pull/128)) [`89adc3`](https://github.com/reearth/reearth-backend/commit/89adc3) -- Signup API is disabled when auth server is disabled, users and auth requests in mongo cannot be deleted ([#132](https://github.com/reearth/reearth-backend/pull/132)) [`47be6a`](https://github.com/reearth/reearth-backend/commit/47be6a) -- Auth to work with zero config ([#131](https://github.com/reearth/reearth-backend/pull/131)) [`3cbb45`](https://github.com/reearth/reearth-backend/commit/3cbb45) -- Property.SchemaListMap.List test fails [`3e6dff`](https://github.com/reearth/reearth-backend/commit/3e6dff) -- Errors when auth srv domain is not specified [`10691a`](https://github.com/reearth/reearth-backend/commit/10691a) -- Errors when auth srv domain is not specified [`648073`](https://github.com/reearth/reearth-backend/commit/648073) -- Login redirect does not work [`cb6ca4`](https://github.com/reearth/reearth-backend/commit/cb6ca4) -- Enable auth srv dev mode when no domain is specified [`0c0e28`](https://github.com/reearth/reearth-backend/commit/0c0e28) -- Add a trailing slash to jwt audiences [`e96f78`](https://github.com/reearth/reearth-backend/commit/e96f78) -- Allow separate auth server ui domain [`0ce79f`](https://github.com/reearth/reearth-backend/commit/0ce79f) - -#### โšก๏ธ Performance - -- Reduce database queries to obtain scene IDs ([#119](https://github.com/reearth/reearth-backend/pull/119)) [`784332`](https://github.com/reearth/reearth-backend/commit/784332) - -#### โœจ Refactor - -- Remove filter args from repos to prevent implementation errors in the use case layer ([#122](https://github.com/reearth/reearth-backend/pull/122)) [`82cf28`](https://github.com/reearth/reearth-backend/commit/82cf28) -- Http api to export layers [`3f2582`](https://github.com/reearth/reearth-backend/commit/3f2582) - -#### Miscellaneous Tasks - -- Update dependencies ([#117](https://github.com/reearth/reearth-backend/pull/117)) [`d1a38e`](https://github.com/reearth/reearth-backend/commit/d1a38e) -- Update docker-compose config [`83f9b1`](https://github.com/reearth/reearth-backend/commit/83f9b1) -- Add log for GraphQL Playground endpoint ([#133](https://github.com/reearth/reearth-backend/pull/133)) [`adeda4`](https://github.com/reearth/reearth-backend/commit/adeda4) - -## 0.5.0 - 2022-02-24 - -### reearth-web - -#### ๐Ÿš€ Features - -- Allowing widget and block plugins to resize when they are expandable ([#170](https://github.com/reearth/reearth-web/pull/170)) [`4fdf5f`](https://github.com/reearth/reearth-web/commit/4fdf5f) -- Plugin APIs to get camera viewport and layers in the viewport ([#165](https://github.com/reearth/reearth-web/pull/165)) [`f1f95a`](https://github.com/reearth/reearth-web/commit/f1f95a) -- Improving the Infobox style ([#176](https://github.com/reearth/reearth-web/pull/176)) [`f1ddda`](https://github.com/reearth/reearth-web/commit/f1ddda) - -#### ๐Ÿ”ง Bug Fixes - -- Plugin blocks cannot be deleted ([#164](https://github.com/reearth/reearth-web/pull/164)) [`a4f17f`](https://github.com/reearth/reearth-web/commit/a4f17f) -- Support tree-structured layers and tags in published pages ([#168](https://github.com/reearth/reearth-web/pull/168)) [`17d968`](https://github.com/reearth/reearth-web/commit/17d968) -- Workspace settings does not refresh ([#167](https://github.com/reearth/reearth-web/pull/167)) [`0f3654`](https://github.com/reearth/reearth-web/commit/0f3654) -- Plugin layersInViewport API returns errors for layers that have no location fields [`e52b44`](https://github.com/reearth/reearth-web/commit/e52b44) - -#### โœจ Refactor - -- Format codes [`219ac6`](https://github.com/reearth/reearth-web/commit/219ac6) -- Format codes [`4e5b61`](https://github.com/reearth/reearth-web/commit/4e5b61) - -#### Miscellaneous Tasks - -- Upgrade dependencies ([#175](https://github.com/reearth/reearth-web/pull/175)) [`dba959`](https://github.com/reearth/reearth-web/commit/dba959) - -### reearth-backend -#### ๐Ÿš€ Features - -- Implement property.Diff and plugin/manifest.Diff ([#107](https://github.com/reearth/reearth-backend/pull/107)) [`700269`](https://github.com/reearth/reearth-backend/commit/700269) -- Support 3rd party plugin translation ([#109](https://github.com/reearth/reearth-backend/pull/109)) [`67a618`](https://github.com/reearth/reearth-backend/commit/67a618) -- Improve the Infobox style (manifest) ([#110](https://github.com/reearth/reearth-backend/pull/110)) [`7aebcd`](https://github.com/reearth/reearth-backend/commit/7aebcd) -- Overwrite installation of new plug-ins without removing (automatic property migration) ([#113](https://github.com/reearth/reearth-backend/pull/113)) [`2dc192`](https://github.com/reearth/reearth-backend/commit/2dc192) -- Update infobox style fields ([#115](https://github.com/reearth/reearth-backend/pull/115)) [`608436`](https://github.com/reearth/reearth-backend/commit/608436) - -#### ๐Ÿ”ง Bug Fixes - -- Scene exporter should export layers and tags while maintaining the tree structure ([#104](https://github.com/reearth/reearth-backend/pull/104)) [`805d78`](https://github.com/reearth/reearth-backend/commit/805d78) -- Property field in groups in list cannot be updated correctly [`5009c5`](https://github.com/reearth/reearth-backend/commit/5009c5) -- Scenes and properties are not updated properly when plugin is updated [`861c4b`](https://github.com/reearth/reearth-backend/commit/861c4b) -- Scene widgets and blocks are not update properly when plugin is updated [`f66f9a`](https://github.com/reearth/reearth-backend/commit/f66f9a) - -#### โœจ Refactor - -- Graphql resolvers ([#105](https://github.com/reearth/reearth-backend/pull/105)) [`01a4e6`](https://github.com/reearth/reearth-backend/commit/01a4e6) - -#### Miscellaneous Tasks - -- Update all dependencies ([#111](https://github.com/reearth/reearth-backend/pull/111)) [`173881`](https://github.com/reearth/reearth-backend/commit/173881) -- Increase batch size of db migration [ci skip] [`fbbca4`](https://github.com/reearth/reearth-backend/commit/fbbca4) - -## 0.4.0 - 2022-01-27 - -### reearth-web -#### ๐Ÿš€ Features - -- Add "clamp to filed" option to file primitive ([#155](https://github.com/reearth/reearth-web/pull/155)) [`2e83ba`](https://github.com/reearth/reearth-web/commit/2e83ba) -- Infobox padding ([#158](https://github.com/reearth/reearth-web/pull/158)) [`90084f`](https://github.com/reearth/reearth-web/commit/90084f) -- Support tags in plugin API ([#153](https://github.com/reearth/reearth-web/pull/153)) [`1031c5`](https://github.com/reearth/reearth-web/commit/1031c5) - -#### ๐Ÿ”ง Bug Fixes - -- Enable to select blocks of plugins ([#162](https://github.com/reearth/reearth-web/pull/162)) [`458402`](https://github.com/reearth/reearth-web/commit/458402) -- Cesium Ion acces token is not set expectedly ([#160](https://github.com/reearth/reearth-web/pull/160)) [`e8e183`](https://github.com/reearth/reearth-web/commit/e8e183) -- Cluster styling issue ([#161](https://github.com/reearth/reearth-web/pull/161)) [`c78872`](https://github.com/reearth/reearth-web/commit/c78872) -- Clusters and layers are not displayed correctly [`4fc124`](https://github.com/reearth/reearth-web/commit/4fc124) -- Type error [`b01bc7`](https://github.com/reearth/reearth-web/commit/b01bc7) -- The style of infobox block dropdown list is broken ([#163](https://github.com/reearth/reearth-web/pull/163)) [`6e02a9`](https://github.com/reearth/reearth-web/commit/6e02a9) -- Plugin blocks protrude from the infobox [`6cf0d3`](https://github.com/reearth/reearth-web/commit/6cf0d3) - -#### โœจ Refactor - -- Layer clustering feature ([#157](https://github.com/reearth/reearth-web/pull/157)) [`db6e6c`](https://github.com/reearth/reearth-web/commit/db6e6c) -- Camera limiter ([#149](https://github.com/reearth/reearth-web/pull/149)) [`105428`](https://github.com/reearth/reearth-web/commit/105428) -- Layer clustering feature (GraphQL) ([#159](https://github.com/reearth/reearth-web/pull/159)) [`4365b8`](https://github.com/reearth/reearth-web/commit/4365b8) - -### reearth-backend -#### ๐Ÿš€ Features - -- Add "clamp to ground" option to file primitive ([#95](https://github.com/reearth/reearth-backend/pull/95)) [`559194`](https://github.com/reearth/reearth-backend/commit/559194) -- Infobox and text block padding ([#100](https://github.com/reearth/reearth-backend/pull/100)) [`ddd0db`](https://github.com/reearth/reearth-backend/commit/ddd0db) - -#### โšก๏ธ Performance - -- Add indexes of mongo collections ([#98](https://github.com/reearth/reearth-backend/pull/98)) [`691cb7`](https://github.com/reearth/reearth-backend/commit/691cb7) - -#### โœจ Refactor - -- Pkg/id, use ID aliases, move JSON schemas ([#97](https://github.com/reearth/reearth-backend/pull/97)) [`1265ac`](https://github.com/reearth/reearth-backend/commit/1265ac) -- Unit tests ([#99](https://github.com/reearth/reearth-backend/pull/99)) [`0d112c`](https://github.com/reearth/reearth-backend/commit/0d112c) -- Pkg/property, pkg/layer, pkg/plugin ([#101](https://github.com/reearth/reearth-backend/pull/101)) [`17a463`](https://github.com/reearth/reearth-backend/commit/17a463) - -## 0.3.0 - 2022-01-11 - -### reearth-web - -#### ๐Ÿš€ Features - -- Enhance terrain feature (type selection, exaggeration) ([#138](https://github.com/reearth/reearth-web/pull/138)) [`dae137`](https://github.com/reearth/reearth-web/commit/dae137) -- Clustering layers ([#143](https://github.com/reearth/reearth-web/pull/143)) [`3439cc`](https://github.com/reearth/reearth-web/commit/3439cc) -- Camera limiter ([#142](https://github.com/reearth/reearth-web/pull/142)) [`dec1dd`](https://github.com/reearth/reearth-web/commit/dec1dd) -- Tagging of layers ([#144](https://github.com/reearth/reearth-web/pull/144)) [`4d0a40`](https://github.com/reearth/reearth-web/commit/4d0a40) - -#### ๐Ÿ”ง Bug Fixes - -- Indicator is not displayed on selecting of clustered layer ([#146](https://github.com/reearth/reearth-web/pull/146)) [`e41f67`](https://github.com/reearth/reearth-web/commit/e41f67) -- Use data URL for marker images [`576ea4`](https://github.com/reearth/reearth-web/commit/576ea4) -- Layer clusters do not updated correctly [`ec74f6`](https://github.com/reearth/reearth-web/commit/ec74f6) -- Position label in front of billboard ([#147](https://github.com/reearth/reearth-web/pull/147)) [`81c533`](https://github.com/reearth/reearth-web/commit/81c533) -- Public pages do not work due to clustering feature [`48d8b3`](https://github.com/reearth/reearth-web/commit/48d8b3) -- Photooverlay transition does not work in Android ([#154](https://github.com/reearth/reearth-web/pull/154)) [`decbfe`](https://github.com/reearth/reearth-web/commit/decbfe) - -#### ๐ŸŽจ Styling - -- Fix the height of the header [`9d6acc`](https://github.com/reearth/reearth-web/commit/9d6acc) - -#### Miscellaneous Tasks - -- Upgrade dependencies ([#134](https://github.com/reearth/reearth-web/pull/134)) [`740821`](https://github.com/reearth/reearth-web/commit/740821) -- Update dependency cesium to ^1.88.0 ([#139](https://github.com/reearth/reearth-web/pull/139)) [`7afdfb`](https://github.com/reearth/reearth-web/commit/7afdfb) -- Fix webpack dev server config [`8d06fa`](https://github.com/reearth/reearth-web/commit/8d06fa) -- Update dependency cesium to ^1.89.0 ([#156](https://github.com/reearth/reearth-web/pull/156)) [`d436ce`](https://github.com/reearth/reearth-web/commit/d436ce) - -### reearth-backend - -#### ๐Ÿš€ Features - -- Clusters for scenes ([#75](https://github.com/reearth/reearth-backend/pull/75)) [`3512c0`](https://github.com/reearth/reearth-backend/commit/3512c0) -- Add fields of scene property for terrain [`8693b4`](https://github.com/reearth/reearth-backend/commit/8693b4) -- Camera limiter ([#87](https://github.com/reearth/reearth-backend/pull/87)) [`63c582`](https://github.com/reearth/reearth-backend/commit/63c582) - -#### ๐Ÿ”ง Bug Fixes - -- Terrain fields of scene property [`5e3d25`](https://github.com/reearth/reearth-backend/commit/5e3d25) -- Numbers are not decoded from gql to value [`2ddbc8`](https://github.com/reearth/reearth-backend/commit/2ddbc8) -- Layers have their own tags separate from the scene ([#90](https://github.com/reearth/reearth-backend/pull/90)) [`c4fb9a`](https://github.com/reearth/reearth-backend/commit/c4fb9a) -- Return property with clusters data ([#89](https://github.com/reearth/reearth-backend/pull/89)) [`1b99c6`](https://github.com/reearth/reearth-backend/commit/1b99c6) -- Cast values, rename value.OptionalValue ([#93](https://github.com/reearth/reearth-backend/pull/93)) [`ba4b18`](https://github.com/reearth/reearth-backend/commit/ba4b18) -- Synchronize mongo migration ([#94](https://github.com/reearth/reearth-backend/pull/94)) [`db4cea`](https://github.com/reearth/reearth-backend/commit/db4cea) - -#### ๐Ÿ“– Documentation - -- Add pkg.go.dev badge to readme [`91f9b3`](https://github.com/reearth/reearth-backend/commit/91f9b3) - -#### โœจ Refactor - -- Make property.Value and dataset.Value independent in pkg/value ([#77](https://github.com/reearth/reearth-backend/pull/77)) [`73143b`](https://github.com/reearth/reearth-backend/commit/73143b) - -#### Miscellaneous Tasks - -- Fix plugin manifest JSON schema [`2b57b1`](https://github.com/reearth/reearth-backend/commit/2b57b1) - - -## 0.2.0 - 2021-11-18 - -#### reearth-web -#### ๐Ÿš€ Features - -- Widget align system for mobile ([#115](https://github.com/reearth/reearth-web/pull/115)) [`afa4ba`](https://github.com/reearth/reearth-web/commit/afa4ba) -- Support dataset schema preview and create layer group from selected primitive type ([#74](https://github.com/reearth/reearth-web/pull/74)) [`769b86`](https://github.com/reearth/reearth-web/commit/769b86) - -#### ๐Ÿ”ง Bug Fixes - -- Markdown background color is not transparent ([#123](https://github.com/reearth/reearth-web/pull/123)) [`f16706`](https://github.com/reearth/reearth-web/commit/f16706) -- Layers would not be marshalled correctly ([#126](https://github.com/reearth/reearth-web/pull/126)) [`886302`](https://github.com/reearth/reearth-web/commit/886302) -- Widget align system issues ([#124](https://github.com/reearth/reearth-web/pull/124)) [`3bc9fa`](https://github.com/reearth/reearth-web/commit/3bc9fa) -- Project setting page does not display correctly after creating a new project ([#127](https://github.com/reearth/reearth-web/pull/127)) [`c120dc`](https://github.com/reearth/reearth-web/commit/c120dc) -- Dataset info pane shows its property though after selected dataset schema is deleted ([#131](https://github.com/reearth/reearth-web/pull/131)) [`2307d8`](https://github.com/reearth/reearth-web/commit/2307d8) - -#### Miscellaneous Tasks - -- Disable storybook workflow for release commit [`80f4d2`](https://github.com/reearth/reearth-web/commit/80f4d2) -- Change semantic commit type of renovate PRs, omit ci skip in changelog [`4a3e9e`](https://github.com/reearth/reearth-web/commit/4a3e9e) -- Follow backend GraphQL schema update ([#120](https://github.com/reearth/reearth-web/pull/120)) [`aeee1f`](https://github.com/reearth/reearth-web/commit/aeee1f) -- Load local reearth-config.json for debugging ([#119](https://github.com/reearth/reearth-web/pull/119)) [`6115ee`](https://github.com/reearth/reearth-web/commit/6115ee) -- Update dependency cesium to ^1.87.0 ([#118](https://github.com/reearth/reearth-web/pull/118)) [`7c65d0`](https://github.com/reearth/reearth-web/commit/7c65d0) -- Update dependency cesium to ^1.87.1 ([#128](https://github.com/reearth/reearth-web/pull/128)) [`a63aa7`](https://github.com/reearth/reearth-web/commit/a63aa7) -- Update codecov.yml to add ignored files [`b72f17`](https://github.com/reearth/reearth-web/commit/b72f17) - - -### reearth-backend -#### ๐Ÿš€ Features - -- Support opentelemetry ([#68](https://github.com/reearth/reearth-backend/pull/68)) [`25c581`](https://github.com/reearth/reearth-backend/commit/25c581) - -#### ๐Ÿ”ง Bug Fixes - -- Add an index to mongo project collection to prevent creating projects whose alias is duplicated [`10f745`](https://github.com/reearth/reearth-backend/commit/10f745) -- Check project alias duplication on project update [`443f2c`](https://github.com/reearth/reearth-backend/commit/443f2c) - -#### โœจ Refactor - -- Add PropertySchemaGroupID to pkg/id ([#70](https://github.com/reearth/reearth-backend/pull/70)) [`9ece9e`](https://github.com/reearth/reearth-backend/commit/9ece9e) - -#### Miscellaneous Tasks - -- Fix typo in github actions [`4a9dc5`](https://github.com/reearth/reearth-backend/commit/4a9dc5) -- Clean up unused code [`b5b01b`](https://github.com/reearth/reearth-backend/commit/b5b01b) -- Update codecov.yml to add ignored files [`d54309`](https://github.com/reearth/reearth-backend/commit/d54309) -- Ignore generated files in codecov [`9d3822`](https://github.com/reearth/reearth-backend/commit/9d3822) -- Upgrade dependencies [`215947`](https://github.com/reearth/reearth-backend/commit/215947) - - -## 0.1.0 - 2021-11-02 - -### reearth-web -#### ๐Ÿš€ Features - -- Support Auth0 audience ([#2](https://github.com/reearth/reearth-web/pull/2)) [`0ad0f6`](https://github.com/reearth/reearth-web/commit/0ad0f6) -- Asset modal redesign ([#1](https://github.com/reearth/reearth-web/pull/1)) [`f71117`](https://github.com/reearth/reearth-web/commit/f71117) -- Basic auth for projects ([#3](https://github.com/reearth/reearth-web/pull/3)) [`372c4e`](https://github.com/reearth/reearth-web/commit/372c4e) -- Google analytics ([#6](https://github.com/reearth/reearth-web/pull/6)) [`01aadf`](https://github.com/reearth/reearth-web/commit/01aadf) -- Refine setting page ([#19](https://github.com/reearth/reearth-web/pull/19)) [`d06ee7`](https://github.com/reearth/reearth-web/commit/d06ee7) -- Add delete assets confirm modal and fix bugs ([#25](https://github.com/reearth/reearth-web/pull/25)) [`0310f5`](https://github.com/reearth/reearth-web/commit/0310f5) -- Update link system and UI ([#12](https://github.com/reearth/reearth-web/pull/12)) [`51de77`](https://github.com/reearth/reearth-web/commit/51de77) -- Import google sheet dataset ([#14](https://github.com/reearth/reearth-web/pull/14)) [`21b167`](https://github.com/reearth/reearth-web/commit/21b167) -- Support multi-line infobox titles ([#40](https://github.com/reearth/reearth-web/pull/40)) [`4cddcc`](https://github.com/reearth/reearth-web/commit/4cddcc) -- Public settings page ([#32](https://github.com/reearth/reearth-web/pull/32)) [`ebfd41`](https://github.com/reearth/reearth-web/commit/ebfd41) -- Refine readme ([#51](https://github.com/reearth/reearth-web/pull/51)) [`41ddb3`](https://github.com/reearth/reearth-web/commit/41ddb3) -- Add light theme ([#52](https://github.com/reearth/reearth-web/pull/52)) [`26159b`](https://github.com/reearth/reearth-web/commit/26159b) -- Add a short discription for light theme ([#56](https://github.com/reearth/reearth-web/pull/56)) [`8b092d`](https://github.com/reearth/reearth-web/commit/8b092d) -- Plugins settings page, install/uninstall plugins ([#22](https://github.com/reearth/reearth-web/pull/22)) [`018674`](https://github.com/reearth/reearth-web/commit/018674) -- Plugin system, refactoring visualizer ([#50](https://github.com/reearth/reearth-web/pull/50)) [`172939`](https://github.com/reearth/reearth-web/commit/172939) -- 3D tileset, model, rectangle primitive, more properties for marker and scene ([#63](https://github.com/reearth/reearth-web/pull/63)) [`a88600`](https://github.com/reearth/reearth-web/commit/a88600) -- Graphiql page ([#70](https://github.com/reearth/reearth-web/pull/70)) [`aa5d10`](https://github.com/reearth/reearth-web/commit/aa5d10) -- Enable to set theme for the scene ([#67](https://github.com/reearth/reearth-web/pull/67)) [`58e670`](https://github.com/reearth/reearth-web/commit/58e670) -- Notification system update ([#73](https://github.com/reearth/reearth-web/pull/73)) [`92cdbb`](https://github.com/reearth/reearth-web/commit/92cdbb) -- Widget align system ([#61](https://github.com/reearth/reearth-web/pull/61)) [`ed2940`](https://github.com/reearth/reearth-web/commit/ed2940) -- Plugin system beta ([#87](https://github.com/reearth/reearth-web/pull/87)) [`d76f1c`](https://github.com/reearth/reearth-web/commit/d76f1c) -- Enhance extended field of widget in plugin API ([#90](https://github.com/reearth/reearth-web/pull/90)) [`06cb14`](https://github.com/reearth/reearth-web/commit/06cb14) -- Add overrideProperty to plugin layers API ([#92](https://github.com/reearth/reearth-web/pull/92)) [`563f88`](https://github.com/reearth/reearth-web/commit/563f88) -- Add a fallback icon for extensions that do not have an icon ([#98](https://github.com/reearth/reearth-web/pull/98)) [`50de1f`](https://github.com/reearth/reearth-web/commit/50de1f) -- Add password validation ([#86](https://github.com/reearth/reearth-web/pull/86)) [`2017aa`](https://github.com/reearth/reearth-web/commit/2017aa) -- Draggable layer ([#58](https://github.com/reearth/reearth-web/pull/58)) [`25a217`](https://github.com/reearth/reearth-web/commit/25a217) -- Multi widgets ([#99](https://github.com/reearth/reearth-web/pull/99)) [`bea1a3`](https://github.com/reearth/reearth-web/commit/bea1a3) -- Front-end for new authentication system ([#102](https://github.com/reearth/reearth-web/pull/102)) [`964d92`](https://github.com/reearth/reearth-web/commit/964d92) -- Add layers.overriddenProperties, find, findAll, walk ([#110](https://github.com/reearth/reearth-web/pull/110)) [`ebe131`](https://github.com/reearth/reearth-web/commit/ebe131) - -#### ๐Ÿ”ง Bug Fixes - -- Reorganize config [`f2e947`](https://github.com/reearth/reearth-web/commit/f2e947) -- Update gql schema [`0905b6`](https://github.com/reearth/reearth-web/commit/0905b6) -- Update dependency cesium to ^1.82.1 ([#4](https://github.com/reearth/reearth-web/pull/4)) [`0627bf`](https://github.com/reearth/reearth-web/commit/0627bf) -- Google analytics ([#7](https://github.com/reearth/reearth-web/pull/7)) [`7505ca`](https://github.com/reearth/reearth-web/commit/7505ca) -- Sprint15 bugs ([#8](https://github.com/reearth/reearth-web/pull/8)) [`e2fe0a`](https://github.com/reearth/reearth-web/commit/e2fe0a) -- Google analytics typo ([#9](https://github.com/reearth/reearth-web/pull/9)) [`943b5e`](https://github.com/reearth/reearth-web/commit/943b5e) -- Ga-typo2 ([#10](https://github.com/reearth/reearth-web/pull/10)) [`b498de`](https://github.com/reearth/reearth-web/commit/b498de) -- Force logout when me query returns null ([#15](https://github.com/reearth/reearth-web/pull/15)) [`339d61`](https://github.com/reearth/reearth-web/commit/339d61) -- Infinit logout loop ([#17](https://github.com/reearth/reearth-web/pull/17)) [`0d510f`](https://github.com/reearth/reearth-web/commit/0d510f) -- Change data.json path [`38a69a`](https://github.com/reearth/reearth-web/commit/38a69a) -- Menu button width ([#21](https://github.com/reearth/reearth-web/pull/21)) [`d08eba`](https://github.com/reearth/reearth-web/commit/d08eba) -- Menu widget bugs ([#37](https://github.com/reearth/reearth-web/pull/37)) [`5d5483`](https://github.com/reearth/reearth-web/commit/5d5483) -- Marker label position is oposite to actual display ([#39](https://github.com/reearth/reearth-web/pull/39)) [`38de46`](https://github.com/reearth/reearth-web/commit/38de46) -- Disable default cesium mouse event ([#42](https://github.com/reearth/reearth-web/pull/42)) [`129ae3`](https://github.com/reearth/reearth-web/commit/129ae3) -- Show layers in storytelling without names ([#45](https://github.com/reearth/reearth-web/pull/45)) [`00ae3c`](https://github.com/reearth/reearth-web/commit/00ae3c) -- Infobox colors ([#47](https://github.com/reearth/reearth-web/pull/47)) [`2a6a36`](https://github.com/reearth/reearth-web/commit/2a6a36) -- Project public image ([#48](https://github.com/reearth/reearth-web/pull/48)) [`91b5ee`](https://github.com/reearth/reearth-web/commit/91b5ee) -- Auth0 redirect uri [`8336a3`](https://github.com/reearth/reearth-web/commit/8336a3) -- Storybook ([#54](https://github.com/reearth/reearth-web/pull/54)) [`fde0c0`](https://github.com/reearth/reearth-web/commit/fde0c0) -- Published data url [`e3d5b0`](https://github.com/reearth/reearth-web/commit/e3d5b0) -- Icon background ([#64](https://github.com/reearth/reearth-web/pull/64)) [`9c69a4`](https://github.com/reearth/reearth-web/commit/9c69a4) -- Prevent extra render, cannot rename layers, cannot display infobox on dataset layers ([#65](https://github.com/reearth/reearth-web/pull/65)) [`e3d618`](https://github.com/reearth/reearth-web/commit/e3d618) -- Remove visibility icon from layer actions [`0ad8aa`](https://github.com/reearth/reearth-web/commit/0ad8aa) -- Default published url, rename layer when focus is removed from text box [`f9accc`](https://github.com/reearth/reearth-web/commit/f9accc) -- Storybook error ([#75](https://github.com/reearth/reearth-web/pull/75)) [`f27f9b`](https://github.com/reearth/reearth-web/commit/f27f9b) -- Showing members section for personal workspace ([#85](https://github.com/reearth/reearth-web/pull/85)) [`8e78f9`](https://github.com/reearth/reearth-web/commit/8e78f9) -- Widget bugs, language ([#89](https://github.com/reearth/reearth-web/pull/89)) [`9de9df`](https://github.com/reearth/reearth-web/commit/9de9df) -- Update dependency cesium to ^1.86.0 ([#93](https://github.com/reearth/reearth-web/pull/93)) [`7ca298`](https://github.com/reearth/reearth-web/commit/7ca298) -- Show properties of 3D tile features on infobox ([#95](https://github.com/reearth/reearth-web/pull/95)) [`a9cc23`](https://github.com/reearth/reearth-web/commit/a9cc23) -- Navigator.language should be used as fallback lang ([#91](https://github.com/reearth/reearth-web/pull/91)) [`15df16`](https://github.com/reearth/reearth-web/commit/15df16) -- Camera property panel bugs ([#96](https://github.com/reearth/reearth-web/pull/96)) [`2c3eaa`](https://github.com/reearth/reearth-web/commit/2c3eaa) -- Camera flight bugs ([#97](https://github.com/reearth/reearth-web/pull/97)) [`b4f1ae`](https://github.com/reearth/reearth-web/commit/b4f1ae) -- Storytelling image crop does not work [`9c23b3`](https://github.com/reearth/reearth-web/commit/9c23b3) -- Export pane is not displayed [`58ceda`](https://github.com/reearth/reearth-web/commit/58ceda) -- 1st bug hunt of october ([#100](https://github.com/reearth/reearth-web/pull/100)) [`1b9032`](https://github.com/reearth/reearth-web/commit/1b9032) -- Layers disappearing when in nested folders ([#101](https://github.com/reearth/reearth-web/pull/101)) [`778395`](https://github.com/reearth/reearth-web/commit/778395) -- Update dependency cesium to ^1.86.1 ([#103](https://github.com/reearth/reearth-web/pull/103)) [`385582`](https://github.com/reearth/reearth-web/commit/385582) -- Bug bounty #2 ([#105](https://github.com/reearth/reearth-web/pull/105)) [`da4815`](https://github.com/reearth/reearth-web/commit/da4815) -- Button widget ([#111](https://github.com/reearth/reearth-web/pull/111)) [`b93485`](https://github.com/reearth/reearth-web/commit/b93485) -- Create team redirect + translations update ([#112](https://github.com/reearth/reearth-web/pull/112)) [`bafcfd`](https://github.com/reearth/reearth-web/commit/bafcfd) -- 3d tile styles not updating sometimes ([#109](https://github.com/reearth/reearth-web/pull/109)) [`1e92b8`](https://github.com/reearth/reearth-web/commit/1e92b8) -- Layers.overrideProperty property merging and rerendering ([#108](https://github.com/reearth/reearth-web/pull/108)) [`e5c255`](https://github.com/reearth/reearth-web/commit/e5c255) -- Password policy conversion in config ([#113](https://github.com/reearth/reearth-web/pull/113)) [`5d57c4`](https://github.com/reearth/reearth-web/commit/5d57c4) -- Password validation, add autofocus ([#117](https://github.com/reearth/reearth-web/pull/117)) [`348454`](https://github.com/reearth/reearth-web/commit/348454) -- Password verification, add better feedback [`bd1725`](https://github.com/reearth/reearth-web/commit/bd1725) - -#### โœจ Refactor - -- Use jotai instead of redux ([#68](https://github.com/reearth/reearth-web/pull/68)) [`ea980c`](https://github.com/reearth/reearth-web/commit/ea980c) -- Replace deprecated gql fields, pass widgetId to widget mutations ([#72](https://github.com/reearth/reearth-web/pull/72)) [`f36c86`](https://github.com/reearth/reearth-web/commit/f36c86) - -#### ๐ŸŽจ Styling - -- Refine font ([#49](https://github.com/reearth/reearth-web/pull/49)) [`8b3755`](https://github.com/reearth/reearth-web/commit/8b3755) -- Refine color vo.1 ([#59](https://github.com/reearth/reearth-web/pull/59)) [`ab7bce`](https://github.com/reearth/reearth-web/commit/ab7bce) - -#### ๐Ÿงช Testing - -- Fix e2e test [`3bcd2d`](https://github.com/reearth/reearth-web/commit/3bcd2d) -- Fix e2e test [`b3e512`](https://github.com/reearth/reearth-web/commit/b3e512) -- Fix e2e test [`277f4e`](https://github.com/reearth/reearth-web/commit/277f4e) -- Fix e2e test [`396f71`](https://github.com/reearth/reearth-web/commit/396f71) -- Fix e2e test [`a8bd0c`](https://github.com/reearth/reearth-web/commit/a8bd0c) -- Fix e2e test [`fd7cf5`](https://github.com/reearth/reearth-web/commit/fd7cf5) -- Fix e2e test [`8c300b`](https://github.com/reearth/reearth-web/commit/8c300b) -- Fix e2e test [`ea5050`](https://github.com/reearth/reearth-web/commit/ea5050) -- Fix e2e test [`866c8c`](https://github.com/reearth/reearth-web/commit/866c8c) -- Support display name in e2e test [`0edf58`](https://github.com/reearth/reearth-web/commit/0edf58) - -#### Miscellaneous Tasks - -- Update workflows, set up nightly release [`0ea0ff`](https://github.com/reearth/reearth-web/commit/0ea0ff) -- Fix nightly release workflow [`d7d1d3`](https://github.com/reearth/reearth-web/commit/d7d1d3) -- Fix config [`7a6ed3`](https://github.com/reearth/reearth-web/commit/7a6ed3) -- Set up cd workflows [`a6f0f5`](https://github.com/reearth/reearth-web/commit/a6f0f5) -- Fix workflows [`97ecf8`](https://github.com/reearth/reearth-web/commit/97ecf8) -- Fix workflows [`a4d451`](https://github.com/reearth/reearth-web/commit/a4d451) -- Fix workflows [`d77b53`](https://github.com/reearth/reearth-web/commit/d77b53) -- Remove unused deps [`81d0eb`](https://github.com/reearth/reearth-web/commit/81d0eb) -- Update cesium [`414b37`](https://github.com/reearth/reearth-web/commit/414b37) -- Update renovate config [`b36740`](https://github.com/reearth/reearth-web/commit/b36740) -- Use .env instead of .env.local [`0b8720`](https://github.com/reearth/reearth-web/commit/0b8720) -- Add storybook workflow [`c624bd`](https://github.com/reearth/reearth-web/commit/c624bd) -- Set up sentry ([#18](https://github.com/reearth/reearth-web/pull/18)) [`8a2d38`](https://github.com/reearth/reearth-web/commit/8a2d38) -- Testable published page ([#43](https://github.com/reearth/reearth-web/pull/43)) [`90c37d`](https://github.com/reearth/reearth-web/commit/90c37d) -- Update netlify.toml [`230e12`](https://github.com/reearth/reearth-web/commit/230e12) -- Add gql sclar types [`09fb76`](https://github.com/reearth/reearth-web/commit/09fb76) -- Update cesium and resium ([#79](https://github.com/reearth/reearth-web/pull/79)) [`c41601`](https://github.com/reearth/reearth-web/commit/c41601) -- Update eslint, enable eslint-plugin-import, perform formatting ([#82](https://github.com/reearth/reearth-web/pull/82)) [`117bab`](https://github.com/reearth/reearth-web/commit/117bab) -- Upgrade dependencies [`4924f9`](https://github.com/reearth/reearth-web/commit/4924f9) -- Fix cypress and unit test [`97f74e`](https://github.com/reearth/reearth-web/commit/97f74e) -- Support for dotenv switching ([#106](https://github.com/reearth/reearth-web/pull/106)) [`cd1974`](https://github.com/reearth/reearth-web/commit/cd1974) -- Upgrade modules oct ([#107](https://github.com/reearth/reearth-web/pull/107)) [`24c145`](https://github.com/reearth/reearth-web/commit/24c145) -- Upgrade react-align ([#116](https://github.com/reearth/reearth-web/pull/116)) [`7f4b98`](https://github.com/reearth/reearth-web/commit/7f4b98) -- Add github workflows to release [`331afb`](https://github.com/reearth/reearth-web/commit/331afb) -- Update translations + format ([#114](https://github.com/reearth/reearth-web/pull/114)) [`7f191e`](https://github.com/reearth/reearth-web/commit/7f191e) -- Lock file maintenance ([#66](https://github.com/reearth/reearth-web/pull/66)) [`6d2a2d`](https://github.com/reearth/reearth-web/commit/6d2a2d) -- Fix slack notifications in workflow [skip ci] [`b4fa35`](https://github.com/reearth/reearth-web/commit/b4fa35) -- Fix sed in release workflow [skip ci] [`f3cd74`](https://github.com/reearth/reearth-web/commit/f3cd74) - - -### reearth-backend -#### ๐Ÿš€ Features - -- Support Auth0 audience ([#3](https://github.com/reearth/reearth-backend/pull/3)) [`c3758e`](https://github.com/reearth/reearth-backend/commit/c3758e) -- Basic auth for projects ([#6](https://github.com/reearth/reearth-backend/pull/6)) [`5db065`](https://github.com/reearth/reearth-backend/commit/5db065) -- Google analytics for scene ([#10](https://github.com/reearth/reearth-backend/pull/10)) [`b44249`](https://github.com/reearth/reearth-backend/commit/b44249) -- Create installable plugins ([#1](https://github.com/reearth/reearth-backend/pull/1)) [`5b7a5f`](https://github.com/reearth/reearth-backend/commit/5b7a5f) -- Add thumbnail, author fields on plugin metadata query ([#15](https://github.com/reearth/reearth-backend/pull/15)) [`888fe0`](https://github.com/reearth/reearth-backend/commit/888fe0) -- Published page api ([#11](https://github.com/reearth/reearth-backend/pull/11)) [`aebac3`](https://github.com/reearth/reearth-backend/commit/aebac3) -- Import dataset from google sheets ([#16](https://github.com/reearth/reearth-backend/pull/16)) [`2ef7ef`](https://github.com/reearth/reearth-backend/commit/2ef7ef) -- Add scenePlugin resolver to layers ([#20](https://github.com/reearth/reearth-backend/pull/20)) [`5213f3`](https://github.com/reearth/reearth-backend/commit/5213f3) -- Marker label position [`bb9e4c`](https://github.com/reearth/reearth-backend/commit/bb9e4c) -- Refine dataset import ([#26](https://github.com/reearth/reearth-backend/pull/26)) [`5dd3db`](https://github.com/reearth/reearth-backend/commit/5dd3db) -- Plugin upload and deletion ([#33](https://github.com/reearth/reearth-backend/pull/33)) [`8742db`](https://github.com/reearth/reearth-backend/commit/8742db) -- New primitives, new properties on primitives [`108711`](https://github.com/reearth/reearth-backend/commit/108711) -- Set scene theme ([#35](https://github.com/reearth/reearth-backend/pull/35)) [`2e4f52`](https://github.com/reearth/reearth-backend/commit/2e4f52) -- Widget align system ([#19](https://github.com/reearth/reearth-backend/pull/19)) [`94611f`](https://github.com/reearth/reearth-backend/commit/94611f) -- Tag system ([#67](https://github.com/reearth/reearth-backend/pull/67)) [`163fcf`](https://github.com/reearth/reearth-backend/commit/163fcf) - -#### ๐Ÿ”ง Bug Fixes - -- Add mutex for each memory repo ([#2](https://github.com/reearth/reearth-backend/pull/2)) [`f4c3b0`](https://github.com/reearth/reearth-backend/commit/f4c3b0) -- Auth0 audience in reearth_config.json [`72e3ed`](https://github.com/reearth/reearth-backend/commit/72e3ed) -- Auth0 domain and multiple auds [`835a02`](https://github.com/reearth/reearth-backend/commit/835a02) -- Signing up and deleting user [`f17b9d`](https://github.com/reearth/reearth-backend/commit/f17b9d) -- Deleting user [`e9b8c9`](https://github.com/reearth/reearth-backend/commit/e9b8c9) -- Sign up and update user [`e5ab87`](https://github.com/reearth/reearth-backend/commit/e5ab87) -- Make gql mutation payloads optional [`9b1c4a`](https://github.com/reearth/reearth-backend/commit/9b1c4a) -- Auth0 [`6a27c6`](https://github.com/reearth/reearth-backend/commit/6a27c6) -- Errors are be overwriten by tx [`2d08c5`](https://github.com/reearth/reearth-backend/commit/2d08c5) -- Deleting user [`f531bd`](https://github.com/reearth/reearth-backend/commit/f531bd) -- Always enable dev mode in debug [`0815d3`](https://github.com/reearth/reearth-backend/commit/0815d3) -- User deletion [`a5eeae`](https://github.com/reearth/reearth-backend/commit/a5eeae) -- Invisible layer issue in published project ([#7](https://github.com/reearth/reearth-backend/pull/7)) [`06cd44`](https://github.com/reearth/reearth-backend/commit/06cd44) -- Dataset link merge bug #378 ([#18](https://github.com/reearth/reearth-backend/pull/18)) [`25da0d`](https://github.com/reearth/reearth-backend/commit/25da0d) -- Ogp image for published page ([#17](https://github.com/reearth/reearth-backend/pull/17)) [`dcb4b0`](https://github.com/reearth/reearth-backend/commit/dcb4b0) -- Change default value of marker label position [`a2059e`](https://github.com/reearth/reearth-backend/commit/a2059e) -- Import dataset from google sheet bug ([#23](https://github.com/reearth/reearth-backend/pull/23)) [`077558`](https://github.com/reearth/reearth-backend/commit/077558) -- Public api param [`846957`](https://github.com/reearth/reearth-backend/commit/846957) -- Replace strings.Split() with strings.field() ([#25](https://github.com/reearth/reearth-backend/pull/25)) [`ba7d16`](https://github.com/reearth/reearth-backend/commit/ba7d16) -- Project public image type [`e82b54`](https://github.com/reearth/reearth-backend/commit/e82b54) -- Published API ([#27](https://github.com/reearth/reearth-backend/pull/27)) [`8ad1f8`](https://github.com/reearth/reearth-backend/commit/8ad1f8) -- Plugin manifest parser bugs ([#32](https://github.com/reearth/reearth-backend/pull/32)) [`78ac13`](https://github.com/reearth/reearth-backend/commit/78ac13) -- Dataset layers are not exported correctly ([#36](https://github.com/reearth/reearth-backend/pull/36)) [`0b8c00`](https://github.com/reearth/reearth-backend/commit/0b8c00) -- Hide parent infobox fields when child infobox is not nil ([#37](https://github.com/reearth/reearth-backend/pull/37)) [`d8c8cd`](https://github.com/reearth/reearth-backend/commit/d8c8cd) -- Mongo.PropertySchema.FindByIDs, propertySchemaID.Equal [`be00da`](https://github.com/reearth/reearth-backend/commit/be00da) -- Gql propertySchemaGroup.translatedTitle resolver [`a4770e`](https://github.com/reearth/reearth-backend/commit/a4770e) -- Use PropertySchemaID.Equal [`8a6459`](https://github.com/reearth/reearth-backend/commit/8a6459) -- Use PropertySchemaID.Equal [`1c3cf1`](https://github.com/reearth/reearth-backend/commit/1c3cf1) -- Tweak field names of model primitive [`080ab9`](https://github.com/reearth/reearth-backend/commit/080ab9) -- Layer importing bug ([#41](https://github.com/reearth/reearth-backend/pull/41)) [`02b17f`](https://github.com/reearth/reearth-backend/commit/02b17f) -- Skip nil geometries ([#42](https://github.com/reearth/reearth-backend/pull/42)) [`90c327`](https://github.com/reearth/reearth-backend/commit/90c327) -- Validate widget extended when moved [`a7daf7`](https://github.com/reearth/reearth-backend/commit/a7daf7) -- Widget extended validation [`98db7e`](https://github.com/reearth/reearth-backend/commit/98db7e) -- Nil error in mongodoc plugin [`d236be`](https://github.com/reearth/reearth-backend/commit/d236be) -- Add widget to default location [`eb1db4`](https://github.com/reearth/reearth-backend/commit/eb1db4) -- Invalid extension data from GraphQL, plugin manifest schema improvement, more friendly error from manifest parser ([#56](https://github.com/reearth/reearth-backend/pull/56)) [`92d137`](https://github.com/reearth/reearth-backend/commit/92d137) -- Translated fields in plugin gql [`0a658a`](https://github.com/reearth/reearth-backend/commit/0a658a) -- Fallback widgetLocation [`579b7a`](https://github.com/reearth/reearth-backend/commit/579b7a) - -#### ๐Ÿ“– Documentation - -- Refine readme ([#28](https://github.com/reearth/reearth-backend/pull/28)) [`a9d209`](https://github.com/reearth/reearth-backend/commit/a9d209) -- Add badges to readme [skip ci] [`cc63cd`](https://github.com/reearth/reearth-backend/commit/cc63cd) - -#### โœจ Refactor - -- Remove unused code [`37b2c2`](https://github.com/reearth/reearth-backend/commit/37b2c2) -- Pkg/error ([#31](https://github.com/reearth/reearth-backend/pull/31)) [`a3f8b6`](https://github.com/reearth/reearth-backend/commit/a3f8b6) -- Graphql adapter ([#40](https://github.com/reearth/reearth-backend/pull/40)) [`2a1d4f`](https://github.com/reearth/reearth-backend/commit/2a1d4f) -- Reorganize graphql schema ([#43](https://github.com/reearth/reearth-backend/pull/43)) [`d3360b`](https://github.com/reearth/reearth-backend/commit/d3360b) - -#### ๐Ÿงช Testing - -- Pkg/shp ([#5](https://github.com/reearth/reearth-backend/pull/5)) [`72ed8e`](https://github.com/reearth/reearth-backend/commit/72ed8e) -- Pkg/id ([#4](https://github.com/reearth/reearth-backend/pull/4)) [`c31bdb`](https://github.com/reearth/reearth-backend/commit/c31bdb) - -#### Miscellaneous Tasks - -- Enable nightly release workflow [`16c037`](https://github.com/reearth/reearth-backend/commit/16c037) -- Set up workflows [`819639`](https://github.com/reearth/reearth-backend/commit/819639) -- Fix workflows [`c022a4`](https://github.com/reearth/reearth-backend/commit/c022a4) -- Print config [`0125aa`](https://github.com/reearth/reearth-backend/commit/0125aa) -- Load .env instead of .env.local [`487a73`](https://github.com/reearth/reearth-backend/commit/487a73) -- Add godoc workflow [`9629dd`](https://github.com/reearth/reearth-backend/commit/9629dd) -- Fix godoc workflow [`cc45b5`](https://github.com/reearth/reearth-backend/commit/cc45b5) -- Fix godoc workflow [`0db163`](https://github.com/reearth/reearth-backend/commit/0db163) -- Fix godoc workflow [`9b78fc`](https://github.com/reearth/reearth-backend/commit/9b78fc) -- Fix godoc workflow [`f1e5a7`](https://github.com/reearth/reearth-backend/commit/f1e5a7) -- Fix godoc workflow [`f7866c`](https://github.com/reearth/reearth-backend/commit/f7866c) -- Fix godoc workflow [`5bc089`](https://github.com/reearth/reearth-backend/commit/5bc089) -- Fix godoc workflow [`5f808b`](https://github.com/reearth/reearth-backend/commit/5f808b) -- Fix godoc workflow [`9f8e11`](https://github.com/reearth/reearth-backend/commit/9f8e11) -- Fix godoc workflow [`150550`](https://github.com/reearth/reearth-backend/commit/150550) -- Use go:embed ([#24](https://github.com/reearth/reearth-backend/pull/24)) [`f7866e`](https://github.com/reearth/reearth-backend/commit/f7866e) -- Add internal error log [`41c377`](https://github.com/reearth/reearth-backend/commit/41c377) -- Support multiple platform docker image [`3651e2`](https://github.com/reearth/reearth-backend/commit/3651e2) -- Stop using upx as it doesn't work on arm64 [`3b5f93`](https://github.com/reearth/reearth-backend/commit/3b5f93) -- Update golang version and modules ([#51](https://github.com/reearth/reearth-backend/pull/51)) [`33f4c7`](https://github.com/reearth/reearth-backend/commit/33f4c7) -- Updating modules ([#62](https://github.com/reearth/reearth-backend/pull/62)) [`65ae32`](https://github.com/reearth/reearth-backend/commit/65ae32) -- Add github workflows to release [`fbcdef`](https://github.com/reearth/reearth-backend/commit/fbcdef) -- Fix release workflow, fix build comment [skip ci] [`cfc79a`](https://github.com/reearth/reearth-backend/commit/cfc79a) -- Fix renaming file names in release workflow [`96f0b3`](https://github.com/reearth/reearth-backend/commit/96f0b3) -- Fix and refactor release workflow [skip ci] [`d5466b`](https://github.com/reearth/reearth-backend/commit/d5466b) - -||||||| empty tree -======= -# Changelog -All notable changes to this project will be documented in this file. - -## 0.10.0 - 2022-08-10 - -### ๐Ÿš€ Features - -- Configurable server host [`61b03a`](https://github.com/reearth/reearth-backend/commit/61b03a) - -### Miscellaneous Tasks - -- Add new frontend endpoint (for Vite@3) [`70fed0`](https://github.com/reearth/reearth-backend/commit/70fed0) -- Fix changelog [skip ci] [`895a64`](https://github.com/reearth/reearth-backend/commit/895a64) - -## 0.9.0 - 2022-07-20 - -### ๐Ÿš€ Features - -- Change layer indicators from preset list from backend side ([#158](https://github.com/reearth/reearth-backend/pull/158)) [`0267f1`](https://github.com/reearth/reearth-backend/commit/0267f1) - -### ๐Ÿ”ง Bug Fixes - -- Property fields in a property list cannot be removed ([#160](https://github.com/reearth/reearth-backend/pull/160)) [`358237`](https://github.com/reearth/reearth-backend/commit/358237) - -### ๐Ÿงช Testing - -- Unit test for mongo auth request repo ([#159](https://github.com/reearth/reearth-backend/pull/159)) [`5afc81`](https://github.com/reearth/reearth-backend/commit/5afc81) - -### Miscellaneous Tasks - -- Update Makefile to remove unused targets [`67780b`](https://github.com/reearth/reearth-backend/commit/67780b) - -## 0.8.0 - 2022-06-17 - -### ๐Ÿš€ Features - -- Add totalCount field to DatasetSchema type of GraphQL schema ([#154](https://github.com/reearth/reearth-backend/pull/154)) [`ab6334`](https://github.com/reearth/reearth-backend/commit/ab6334) -- Add timeline settings to scene property ([#153](https://github.com/reearth/reearth-backend/pull/153)) [`602ec0`](https://github.com/reearth/reearth-backend/commit/602ec0) - -### ๐Ÿ”ง Bug Fixes - -- Assets are not saved when files are uploaded ([#155](https://github.com/reearth/reearth-backend/pull/155)) [`e444e4`](https://github.com/reearth/reearth-backend/commit/e444e4) - -### โœจ Refactor - -- Declarative description of use case structure (asset only) ([#151](https://github.com/reearth/reearth-backend/pull/151)) [`c6e98c`](https://github.com/reearth/reearth-backend/commit/c6e98c) - -### Miscellaneous Tasks - -- Update go modules ([#150](https://github.com/reearth/reearth-backend/pull/150)) [`6372bc`](https://github.com/reearth/reearth-backend/commit/6372bc) - -## 0.7.0 - 2022-05-13 - -### ๐Ÿš€ Features - -- Add an opacity slider to map tiles ([#138](https://github.com/reearth/reearth-backend/pull/138)) [`4f72b8`](https://github.com/reearth/reearth-backend/commit/4f72b8) - -### ๐Ÿ”ง Bug Fixes - -- Signup api requires password field [`a79376`](https://github.com/reearth/reearth-backend/commit/a79376) -- "$in needs an array" error from mongo FindByIDs ([#142](https://github.com/reearth/reearth-backend/pull/142)) [`58e1b0`](https://github.com/reearth/reearth-backend/commit/58e1b0) -- Name field is available again in signup api ([#144](https://github.com/reearth/reearth-backend/pull/144)) [`651852`](https://github.com/reearth/reearth-backend/commit/651852) - -### โœจ Refactor - -- Retry mongo lock ([#145](https://github.com/reearth/reearth-backend/pull/145)) [`ddaeaa`](https://github.com/reearth/reearth-backend/commit/ddaeaa) - -### ๐Ÿงช Testing - -- Add Mongo Asset's [`FindByID`](https://github.com/reearth/reearth-backend/commit/FindByID) unit testing ([#139](https://github.com/reearth/reearth-backend/pull/139)) [`35f9db`](https://github.com/reearth/reearth-backend/commit/35f9db) -- Refactor mongo connect helper function [`751e66`](https://github.com/reearth/reearth-backend/commit/751e66) -- Util.SyncMap.Range test sometimes fails ([#143](https://github.com/reearth/reearth-backend/pull/143)) [`c2b969`](https://github.com/reearth/reearth-backend/commit/c2b969) - -### Miscellaneous Tasks - -- Typo [`secrit`](https://github.com/reearth/reearth-backend/commit/secrit) on env example ([#137](https://github.com/reearth/reearth-backend/pull/137)) [`2c0220`](https://github.com/reearth/reearth-backend/commit/2c0220) -- Update the go modules ([#146](https://github.com/reearth/reearth-backend/pull/146)) [`89009b`](https://github.com/reearth/reearth-backend/commit/89009b) - -## 0.6.1 - 2022-04-20 - -### ๐Ÿ”ง Bug Fixes - -- Renovate bot not running on schedule ([#136](https://github.com/reearth/reearth-backend/pull/136)) [`82843f`](https://github.com/reearth/reearth-backend/commit/82843f) -- Aud was changed and jwt could not be validated correctly [`985100`](https://github.com/reearth/reearth-backend/commit/985100) -- Auth audiences were unintentionally required [`7ec76a`](https://github.com/reearth/reearth-backend/commit/7ec76a) - -### โœจ Refactor - -- Introduce generics, reorganize GraphQL schema ([#135](https://github.com/reearth/reearth-backend/pull/135)) [`04a098`](https://github.com/reearth/reearth-backend/commit/04a098) - -### Miscellaneous Tasks - -- Update dependencies ([#134](https://github.com/reearth/reearth-backend/pull/134)) [`1b9b6b`](https://github.com/reearth/reearth-backend/commit/1b9b6b) - -## 0.6.0 - 2022-04-08 - -### ๐Ÿš€ Features - -- Authentication system ([#108](https://github.com/reearth/reearth-backend/pull/108)) [`b89c32`](https://github.com/reearth/reearth-backend/commit/b89c32) -- Default mailer that outputs mails into stdout [`aab26c`](https://github.com/reearth/reearth-backend/commit/aab26c) -- Assets filtering & pagination ([#81](https://github.com/reearth/reearth-backend/pull/81)) [`739943`](https://github.com/reearth/reearth-backend/commit/739943) -- Support sign up with information provided by OIDC providers ([#130](https://github.com/reearth/reearth-backend/pull/130)) [`fef60e`](https://github.com/reearth/reearth-backend/commit/fef60e) - -### ๐Ÿ”ง Bug Fixes - -- Load auth client domain from config ([#124](https://github.com/reearth/reearth-backend/pull/124)) [`9bde8a`](https://github.com/reearth/reearth-backend/commit/9bde8a) -- Signup fails when password is not set [`27c2f0`](https://github.com/reearth/reearth-backend/commit/27c2f0) -- Logger panics [`d1e3a8`](https://github.com/reearth/reearth-backend/commit/d1e3a8) -- Set auth server dev mode automatically [`83a66a`](https://github.com/reearth/reearth-backend/commit/83a66a) -- Auth server bugs and auth client bugs ([#125](https://github.com/reearth/reearth-backend/pull/125)) [`ce2309`](https://github.com/reearth/reearth-backend/commit/ce2309) -- Auth0 setting is not used by JWT verification middleware [`232e75`](https://github.com/reearth/reearth-backend/commit/232e75) -- Invalid mongo queries of pagination [`7caf68`](https://github.com/reearth/reearth-backend/commit/7caf68) -- Auth config not loaded expectedly [`570fe7`](https://github.com/reearth/reearth-backend/commit/570fe7) -- Users cannot creates a new team and scene [`5df25f`](https://github.com/reearth/reearth-backend/commit/5df25f) -- Auth server certificate is not saved as pem format [`982a71`](https://github.com/reearth/reearth-backend/commit/982a71) -- Repo filters are not merged expectedly [`f4cc3f`](https://github.com/reearth/reearth-backend/commit/f4cc3f) -- Auth is no longer required for GraphQL endpoint [`58a6d1`](https://github.com/reearth/reearth-backend/commit/58a6d1) -- Rename auth srv default client ID ([#128](https://github.com/reearth/reearth-backend/pull/128)) [`89adc3`](https://github.com/reearth/reearth-backend/commit/89adc3) -- Signup API is disabled when auth server is disabled, users and auth requests in mongo cannot be deleted ([#132](https://github.com/reearth/reearth-backend/pull/132)) [`47be6a`](https://github.com/reearth/reearth-backend/commit/47be6a) -- Auth to work with zero config ([#131](https://github.com/reearth/reearth-backend/pull/131)) [`3cbb45`](https://github.com/reearth/reearth-backend/commit/3cbb45) -- Property.SchemaListMap.List test fails [`3e6dff`](https://github.com/reearth/reearth-backend/commit/3e6dff) -- Errors when auth srv domain is not specified [`10691a`](https://github.com/reearth/reearth-backend/commit/10691a) -- Errors when auth srv domain is not specified [`648073`](https://github.com/reearth/reearth-backend/commit/648073) -- Login redirect does not work [`cb6ca4`](https://github.com/reearth/reearth-backend/commit/cb6ca4) -- Enable auth srv dev mode when no domain is specified [`0c0e28`](https://github.com/reearth/reearth-backend/commit/0c0e28) -- Add a trailing slash to jwt audiences [`e96f78`](https://github.com/reearth/reearth-backend/commit/e96f78) -- Allow separate auth server ui domain [`0ce79f`](https://github.com/reearth/reearth-backend/commit/0ce79f) - -### โšก๏ธ Performance - -- Reduce database queries to obtain scene IDs ([#119](https://github.com/reearth/reearth-backend/pull/119)) [`784332`](https://github.com/reearth/reearth-backend/commit/784332) - -### โœจ Refactor - -- Remove filter args from repos to prevent implementation errors in the use case layer ([#122](https://github.com/reearth/reearth-backend/pull/122)) [`82cf28`](https://github.com/reearth/reearth-backend/commit/82cf28) -- Http api to export layers [`3f2582`](https://github.com/reearth/reearth-backend/commit/3f2582) - -### Miscellaneous Tasks - -- Update dependencies ([#117](https://github.com/reearth/reearth-backend/pull/117)) [`d1a38e`](https://github.com/reearth/reearth-backend/commit/d1a38e) -- Update docker-compose config [`83f9b1`](https://github.com/reearth/reearth-backend/commit/83f9b1) -- Add log for GraphQL Playground endpoint ([#133](https://github.com/reearth/reearth-backend/pull/133)) [`adeda4`](https://github.com/reearth/reearth-backend/commit/adeda4) - -## 0.5.0 - 2022-02-24 - -### ๐Ÿš€ Features - -- Implement property.Diff and plugin/manifest.Diff ([#107](https://github.com/reearth/reearth-backend/pull/107)) [`700269`](https://github.com/reearth/reearth-backend/commit/700269) -- Support 3rd party plugin translation ([#109](https://github.com/reearth/reearth-backend/pull/109)) [`67a618`](https://github.com/reearth/reearth-backend/commit/67a618) -- Improve the Infobox style (manifest) ([#110](https://github.com/reearth/reearth-backend/pull/110)) [`7aebcd`](https://github.com/reearth/reearth-backend/commit/7aebcd) -- Overwrite installation of new plug-ins without removing (automatic property migration) ([#113](https://github.com/reearth/reearth-backend/pull/113)) [`2dc192`](https://github.com/reearth/reearth-backend/commit/2dc192) -- Update infobox style fields ([#115](https://github.com/reearth/reearth-backend/pull/115)) [`608436`](https://github.com/reearth/reearth-backend/commit/608436) - -### ๐Ÿ”ง Bug Fixes - -- Scene exporter should export layers and tags while maintaining the tree structure ([#104](https://github.com/reearth/reearth-backend/pull/104)) [`805d78`](https://github.com/reearth/reearth-backend/commit/805d78) -- Property field in groups in list cannot be updated correctly [`5009c5`](https://github.com/reearth/reearth-backend/commit/5009c5) -- Scenes and properties are not updated properly when plugin is updated [`861c4b`](https://github.com/reearth/reearth-backend/commit/861c4b) -- Scene widgets and blocks are not update properly when plugin is updated [`f66f9a`](https://github.com/reearth/reearth-backend/commit/f66f9a) - -### โœจ Refactor - -- Graphql resolvers ([#105](https://github.com/reearth/reearth-backend/pull/105)) [`01a4e6`](https://github.com/reearth/reearth-backend/commit/01a4e6) - -### Miscellaneous Tasks - -- Update all dependencies ([#111](https://github.com/reearth/reearth-backend/pull/111)) [`173881`](https://github.com/reearth/reearth-backend/commit/173881) -- Increase batch size of db migration [ci skip] [`fbbca4`](https://github.com/reearth/reearth-backend/commit/fbbca4) - -## 0.4.0 - 2022-01-27 - -### ๐Ÿš€ Features - -- Add "clamp to ground" option to file primitive ([#95](https://github.com/reearth/reearth-backend/pull/95)) [`559194`](https://github.com/reearth/reearth-backend/commit/559194) -- Infobox and text block padding ([#100](https://github.com/reearth/reearth-backend/pull/100)) [`ddd0db`](https://github.com/reearth/reearth-backend/commit/ddd0db) - -### โšก๏ธ Performance - -- Add indexes of mongo collections ([#98](https://github.com/reearth/reearth-backend/pull/98)) [`691cb7`](https://github.com/reearth/reearth-backend/commit/691cb7) - -### โœจ Refactor - -- Pkg/id, use ID aliases, move JSON schemas ([#97](https://github.com/reearth/reearth-backend/pull/97)) [`1265ac`](https://github.com/reearth/reearth-backend/commit/1265ac) -- Unit tests ([#99](https://github.com/reearth/reearth-backend/pull/99)) [`0d112c`](https://github.com/reearth/reearth-backend/commit/0d112c) -- Pkg/property, pkg/layer, pkg/plugin ([#101](https://github.com/reearth/reearth-backend/pull/101)) [`17a463`](https://github.com/reearth/reearth-backend/commit/17a463) - -## 0.3.0 - 2022-01-11 - -### ๐Ÿš€ Features - -- Clusters for scenes ([#75](https://github.com/reearth/reearth-backend/pull/75)) [`3512c0`](https://github.com/reearth/reearth-backend/commit/3512c0) -- Add fields of scene property for terrain [`8693b4`](https://github.com/reearth/reearth-backend/commit/8693b4) -- Camera limiter ([#87](https://github.com/reearth/reearth-backend/pull/87)) [`63c582`](https://github.com/reearth/reearth-backend/commit/63c582) - -### ๐Ÿ”ง Bug Fixes - -- Terrain fields of scene property [`5e3d25`](https://github.com/reearth/reearth-backend/commit/5e3d25) -- Numbers are not decoded from gql to value [`2ddbc8`](https://github.com/reearth/reearth-backend/commit/2ddbc8) -- Layers have their own tags separate from the scene ([#90](https://github.com/reearth/reearth-backend/pull/90)) [`c4fb9a`](https://github.com/reearth/reearth-backend/commit/c4fb9a) -- Return property with clusters data ([#89](https://github.com/reearth/reearth-backend/pull/89)) [`1b99c6`](https://github.com/reearth/reearth-backend/commit/1b99c6) -- Cast values, rename value.OptionalValue ([#93](https://github.com/reearth/reearth-backend/pull/93)) [`ba4b18`](https://github.com/reearth/reearth-backend/commit/ba4b18) -- Synchronize mongo migration ([#94](https://github.com/reearth/reearth-backend/pull/94)) [`db4cea`](https://github.com/reearth/reearth-backend/commit/db4cea) - -### ๐Ÿ“– Documentation - -- Add pkg.go.dev badge to readme [`91f9b3`](https://github.com/reearth/reearth-backend/commit/91f9b3) - -### โœจ Refactor - -- Make property.Value and dataset.Value independent in pkg/value ([#77](https://github.com/reearth/reearth-backend/pull/77)) [`73143b`](https://github.com/reearth/reearth-backend/commit/73143b) - -### Miscellaneous Tasks - -- Fix plugin manifest JSON schema [`2b57b1`](https://github.com/reearth/reearth-backend/commit/2b57b1) - -## 0.2.0 - 2021-11-18 - -### ๐Ÿš€ Features - -- Support opentelemetry ([#68](https://github.com/reearth/reearth-backend/pull/68)) [`25c581`](https://github.com/reearth/reearth-backend/commit/25c581) - -### ๐Ÿ”ง Bug Fixes - -- Add an index to mongo project collection to prevent creating projects whose alias is duplicated [`10f745`](https://github.com/reearth/reearth-backend/commit/10f745) -- Check project alias duplication on project update [`443f2c`](https://github.com/reearth/reearth-backend/commit/443f2c) - -### โœจ Refactor - -- Add PropertySchemaGroupID to pkg/id ([#70](https://github.com/reearth/reearth-backend/pull/70)) [`9ece9e`](https://github.com/reearth/reearth-backend/commit/9ece9e) - -### Miscellaneous Tasks - -- Fix typo in github actions [`4a9dc5`](https://github.com/reearth/reearth-backend/commit/4a9dc5) -- Clean up unused code [`b5b01b`](https://github.com/reearth/reearth-backend/commit/b5b01b) -- Update codecov.yml to add ignored files [`d54309`](https://github.com/reearth/reearth-backend/commit/d54309) -- Ignore generated files in codecov [`9d3822`](https://github.com/reearth/reearth-backend/commit/9d3822) -- Upgrade dependencies [`215947`](https://github.com/reearth/reearth-backend/commit/215947) - -## 0.1.0 - 2021-11-01 - -### ๐Ÿš€ Features - -- Support Auth0 audience ([#3](https://github.com/reearth/reearth-backend/pull/3)) [`c3758e`](https://github.com/reearth/reearth-backend/commit/c3758e) -- Basic auth for projects ([#6](https://github.com/reearth/reearth-backend/pull/6)) [`5db065`](https://github.com/reearth/reearth-backend/commit/5db065) -- Google analytics for scene ([#10](https://github.com/reearth/reearth-backend/pull/10)) [`b44249`](https://github.com/reearth/reearth-backend/commit/b44249) -- Create installable plugins ([#1](https://github.com/reearth/reearth-backend/pull/1)) [`5b7a5f`](https://github.com/reearth/reearth-backend/commit/5b7a5f) -- Add thumbnail, author fields on plugin metadata query ([#15](https://github.com/reearth/reearth-backend/pull/15)) [`888fe0`](https://github.com/reearth/reearth-backend/commit/888fe0) -- Published page api ([#11](https://github.com/reearth/reearth-backend/pull/11)) [`aebac3`](https://github.com/reearth/reearth-backend/commit/aebac3) -- Import dataset from google sheets ([#16](https://github.com/reearth/reearth-backend/pull/16)) [`2ef7ef`](https://github.com/reearth/reearth-backend/commit/2ef7ef) -- Add scenePlugin resolver to layers ([#20](https://github.com/reearth/reearth-backend/pull/20)) [`5213f3`](https://github.com/reearth/reearth-backend/commit/5213f3) -- Marker label position [`bb9e4c`](https://github.com/reearth/reearth-backend/commit/bb9e4c) -- Refine dataset import ([#26](https://github.com/reearth/reearth-backend/pull/26)) [`5dd3db`](https://github.com/reearth/reearth-backend/commit/5dd3db) -- Plugin upload and deletion ([#33](https://github.com/reearth/reearth-backend/pull/33)) [`8742db`](https://github.com/reearth/reearth-backend/commit/8742db) -- New primitives, new properties on primitives [`108711`](https://github.com/reearth/reearth-backend/commit/108711) -- Set scene theme ([#35](https://github.com/reearth/reearth-backend/pull/35)) [`2e4f52`](https://github.com/reearth/reearth-backend/commit/2e4f52) -- Widget align system ([#19](https://github.com/reearth/reearth-backend/pull/19)) [`94611f`](https://github.com/reearth/reearth-backend/commit/94611f) -- Tag system ([#67](https://github.com/reearth/reearth-backend/pull/67)) [`163fcf`](https://github.com/reearth/reearth-backend/commit/163fcf) - -### ๐Ÿ”ง Bug Fixes - -- Add mutex for each memory repo ([#2](https://github.com/reearth/reearth-backend/pull/2)) [`f4c3b0`](https://github.com/reearth/reearth-backend/commit/f4c3b0) -- Auth0 audience in reearth_config.json [`72e3ed`](https://github.com/reearth/reearth-backend/commit/72e3ed) -- Auth0 domain and multiple auds [`835a02`](https://github.com/reearth/reearth-backend/commit/835a02) -- Signing up and deleting user [`f17b9d`](https://github.com/reearth/reearth-backend/commit/f17b9d) -- Deleting user [`e9b8c9`](https://github.com/reearth/reearth-backend/commit/e9b8c9) -- Sign up and update user [`e5ab87`](https://github.com/reearth/reearth-backend/commit/e5ab87) -- Make gql mutation payloads optional [`9b1c4a`](https://github.com/reearth/reearth-backend/commit/9b1c4a) -- Auth0 [`6a27c6`](https://github.com/reearth/reearth-backend/commit/6a27c6) -- Errors are be overwriten by tx [`2d08c5`](https://github.com/reearth/reearth-backend/commit/2d08c5) -- Deleting user [`f531bd`](https://github.com/reearth/reearth-backend/commit/f531bd) -- Always enable dev mode in debug [`0815d3`](https://github.com/reearth/reearth-backend/commit/0815d3) -- User deletion [`a5eeae`](https://github.com/reearth/reearth-backend/commit/a5eeae) -- Invisible layer issue in published project ([#7](https://github.com/reearth/reearth-backend/pull/7)) [`06cd44`](https://github.com/reearth/reearth-backend/commit/06cd44) -- Dataset link merge bug #378 ([#18](https://github.com/reearth/reearth-backend/pull/18)) [`25da0d`](https://github.com/reearth/reearth-backend/commit/25da0d) -- Ogp image for published page ([#17](https://github.com/reearth/reearth-backend/pull/17)) [`dcb4b0`](https://github.com/reearth/reearth-backend/commit/dcb4b0) -- Change default value of marker label position [`a2059e`](https://github.com/reearth/reearth-backend/commit/a2059e) -- Import dataset from google sheet bug ([#23](https://github.com/reearth/reearth-backend/pull/23)) [`077558`](https://github.com/reearth/reearth-backend/commit/077558) -- Public api param [`846957`](https://github.com/reearth/reearth-backend/commit/846957) -- Replace strings.Split() with strings.field() ([#25](https://github.com/reearth/reearth-backend/pull/25)) [`ba7d16`](https://github.com/reearth/reearth-backend/commit/ba7d16) -- Project public image type [`e82b54`](https://github.com/reearth/reearth-backend/commit/e82b54) -- Published API ([#27](https://github.com/reearth/reearth-backend/pull/27)) [`8ad1f8`](https://github.com/reearth/reearth-backend/commit/8ad1f8) -- Plugin manifest parser bugs ([#32](https://github.com/reearth/reearth-backend/pull/32)) [`78ac13`](https://github.com/reearth/reearth-backend/commit/78ac13) -- Dataset layers are not exported correctly ([#36](https://github.com/reearth/reearth-backend/pull/36)) [`0b8c00`](https://github.com/reearth/reearth-backend/commit/0b8c00) -- Hide parent infobox fields when child infobox is not nil ([#37](https://github.com/reearth/reearth-backend/pull/37)) [`d8c8cd`](https://github.com/reearth/reearth-backend/commit/d8c8cd) -- Mongo.PropertySchema.FindByIDs, propertySchemaID.Equal [`be00da`](https://github.com/reearth/reearth-backend/commit/be00da) -- Gql propertySchemaGroup.translatedTitle resolver [`a4770e`](https://github.com/reearth/reearth-backend/commit/a4770e) -- Use PropertySchemaID.Equal [`8a6459`](https://github.com/reearth/reearth-backend/commit/8a6459) -- Use PropertySchemaID.Equal [`1c3cf1`](https://github.com/reearth/reearth-backend/commit/1c3cf1) -- Tweak field names of model primitive [`080ab9`](https://github.com/reearth/reearth-backend/commit/080ab9) -- Layer importing bug ([#41](https://github.com/reearth/reearth-backend/pull/41)) [`02b17f`](https://github.com/reearth/reearth-backend/commit/02b17f) -- Skip nil geometries ([#42](https://github.com/reearth/reearth-backend/pull/42)) [`90c327`](https://github.com/reearth/reearth-backend/commit/90c327) -- Validate widget extended when moved [`a7daf7`](https://github.com/reearth/reearth-backend/commit/a7daf7) -- Widget extended validation [`98db7e`](https://github.com/reearth/reearth-backend/commit/98db7e) -- Nil error in mongodoc plugin [`d236be`](https://github.com/reearth/reearth-backend/commit/d236be) -- Add widget to default location [`eb1db4`](https://github.com/reearth/reearth-backend/commit/eb1db4) -- Invalid extension data from GraphQL, plugin manifest schema improvement, more friendly error from manifest parser ([#56](https://github.com/reearth/reearth-backend/pull/56)) [`92d137`](https://github.com/reearth/reearth-backend/commit/92d137) -- Translated fields in plugin gql [`0a658a`](https://github.com/reearth/reearth-backend/commit/0a658a) -- Fallback widgetLocation [`579b7a`](https://github.com/reearth/reearth-backend/commit/579b7a) - -### ๐Ÿ“– Documentation - -- Refine readme ([#28](https://github.com/reearth/reearth-backend/pull/28)) [`a9d209`](https://github.com/reearth/reearth-backend/commit/a9d209) -- Add badges to readme [skip ci] [`cc63cd`](https://github.com/reearth/reearth-backend/commit/cc63cd) - -### โœจ Refactor - -- Remove unused code [`37b2c2`](https://github.com/reearth/reearth-backend/commit/37b2c2) -- Pkg/error ([#31](https://github.com/reearth/reearth-backend/pull/31)) [`a3f8b6`](https://github.com/reearth/reearth-backend/commit/a3f8b6) -- Graphql adapter ([#40](https://github.com/reearth/reearth-backend/pull/40)) [`2a1d4f`](https://github.com/reearth/reearth-backend/commit/2a1d4f) -- Reorganize graphql schema ([#43](https://github.com/reearth/reearth-backend/pull/43)) [`d3360b`](https://github.com/reearth/reearth-backend/commit/d3360b) - -### ๐Ÿงช Testing - -- Pkg/shp ([#5](https://github.com/reearth/reearth-backend/pull/5)) [`72ed8e`](https://github.com/reearth/reearth-backend/commit/72ed8e) -- Pkg/id ([#4](https://github.com/reearth/reearth-backend/pull/4)) [`c31bdb`](https://github.com/reearth/reearth-backend/commit/c31bdb) - -### Miscellaneous Tasks - -- Enable nightly release workflow [`16c037`](https://github.com/reearth/reearth-backend/commit/16c037) -- Set up workflows [`819639`](https://github.com/reearth/reearth-backend/commit/819639) -- Fix workflows [`c022a4`](https://github.com/reearth/reearth-backend/commit/c022a4) -- Print config [`0125aa`](https://github.com/reearth/reearth-backend/commit/0125aa) -- Load .env instead of .env.local [`487a73`](https://github.com/reearth/reearth-backend/commit/487a73) -- Add godoc workflow [`9629dd`](https://github.com/reearth/reearth-backend/commit/9629dd) -- Fix godoc workflow [`cc45b5`](https://github.com/reearth/reearth-backend/commit/cc45b5) -- Fix godoc workflow [`0db163`](https://github.com/reearth/reearth-backend/commit/0db163) -- Fix godoc workflow [`9b78fc`](https://github.com/reearth/reearth-backend/commit/9b78fc) -- Fix godoc workflow [`f1e5a7`](https://github.com/reearth/reearth-backend/commit/f1e5a7) -- Fix godoc workflow [`f7866c`](https://github.com/reearth/reearth-backend/commit/f7866c) -- Fix godoc workflow [`5bc089`](https://github.com/reearth/reearth-backend/commit/5bc089) -- Fix godoc workflow [`5f808b`](https://github.com/reearth/reearth-backend/commit/5f808b) -- Fix godoc workflow [`9f8e11`](https://github.com/reearth/reearth-backend/commit/9f8e11) -- Fix godoc workflow [`150550`](https://github.com/reearth/reearth-backend/commit/150550) -- Use go:embed ([#24](https://github.com/reearth/reearth-backend/pull/24)) [`f7866e`](https://github.com/reearth/reearth-backend/commit/f7866e) -- Add internal error log [`41c377`](https://github.com/reearth/reearth-backend/commit/41c377) -- Support multiple platform docker image [`3651e2`](https://github.com/reearth/reearth-backend/commit/3651e2) -- Stop using upx as it doesn't work on arm64 [`3b5f93`](https://github.com/reearth/reearth-backend/commit/3b5f93) -- Update golang version and modules ([#51](https://github.com/reearth/reearth-backend/pull/51)) [`33f4c7`](https://github.com/reearth/reearth-backend/commit/33f4c7) -- Updating modules ([#62](https://github.com/reearth/reearth-backend/pull/62)) [`65ae32`](https://github.com/reearth/reearth-backend/commit/65ae32) -- Add github workflows to release [`fbcdef`](https://github.com/reearth/reearth-backend/commit/fbcdef) -- Fix release workflow, fix build comment [skip ci] [`cfc79a`](https://github.com/reearth/reearth-backend/commit/cfc79a) -- Fix renaming file names in release workflow [`96f0b3`](https://github.com/reearth/reearth-backend/commit/96f0b3) -- Fix and refactor release workflow [skip ci] [`d5466b`](https://github.com/reearth/reearth-backend/commit/d5466b) - - ->>>>>>> 276ef5f50090391639efeed7cb93d0da9979f39d diff --git a/server/Dockerfile b/server/Dockerfile deleted file mode 100644 index 04314f165..000000000 --- a/server/Dockerfile +++ /dev/null @@ -1,36 +0,0 @@ -<<<<<<< HEAD -ARG REEARTH_BACKEND_IMAGE=reearth/reearth-backend:latest -FROM $REEARTH_BACKEND_IMAGE - -COPY web /reearth/web - -WORKDIR /reearth -CMD [ "./reearth" ] -||||||| empty tree -======= -FROM golang:1.18-alpine AS build -ARG TAG=release -ARG REV -ARG VERSION - -RUN apk add --update --no-cache git ca-certificates build-base - -COPY go.mod go.sum main.go /reearth/ -WORKDIR /reearth -RUN go mod download - -COPY cmd/ /reearth/cmd/ -COPY pkg/ /reearth/pkg/ -COPY internal/ /reearth/internal/ - -RUN CGO_ENABLED=0 go build -tags "${TAG}" "-ldflags=-X main.version=${VERSION} -s -w -buildid=" -trimpath ./cmd/reearth - -FROM scratch - -COPY --from=build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ca-certificates.crt -COPY --from=build /reearth/reearth /reearth/reearth - -WORKDIR /reearth - -CMD [ "./reearth" ] ->>>>>>> 276ef5f50090391639efeed7cb93d0da9979f39d diff --git a/server/Makefile b/server/Makefile index 58f11bda7..0b0232bcb 100644 --- a/server/Makefile +++ b/server/Makefile @@ -11,7 +11,7 @@ run-app: go run ./cmd/reearth run-db: - docker compose up -d reearth-mongo + docker compose up -f ../docker-compose.yml -d reearth-mongo gql: go generate ./internal/adapter/gql diff --git a/server/docker-compose.yml b/server/docker-compose.yml deleted file mode 100644 index 0247366c4..000000000 --- a/server/docker-compose.yml +++ /dev/null @@ -1,50 +0,0 @@ -<<<<<<< HEAD -version: '3' -services: - reearth: - image: reearth/reearth:latest - environment: - REEARTH_DB: mongodb://reearth-mongo - ports: - - 8080:8080 - # env_file: - # - ./.env - links: - - reearth-mongo - depends_on: - - reearth-mongo - volumes: - - ./data:/reearth/data - reearth-mongo: - image: mongo:4.4-focal - ports: - - 27017:27017 - volumes: - - ./mongo:/data/db -||||||| empty tree -======= -version: '3' -services: - reearth-backend: - build: - context: . - image: reearth/reearth-backend:nightly - environment: - REEARTH_DB: mongodb://reearth-mongo - ports: - - '8080:8080' - # env_file: - # - ./.env - links: - - reearth-mongo - depends_on: - - reearth-mongo - volumes: - - ./data:/reearth/data - reearth-mongo: - image: mongo:4.4-focal - ports: - - 27017:27017 - volumes: - - ./mongo:/data/db ->>>>>>> 276ef5f50090391639efeed7cb93d0da9979f39d